2 # these metrics are how bogged down we are in to-do type items.
4 REPORT_FILE="$HOME/cloud/overload_history.txt"
6 # given a path, this will find how many items are under it, ignoring svn and git files, plus
7 # other patterns we happen to notice are not useful.
8 function calculate_depth()
11 depth=$(find "$dir" -type f -exec echo \"{}\" ';' 2>/dev/null | grep -v "\.svn" | grep -v "\.git"| grep -v "\.basket" | grep -v "\.version" | grep -v "\.keep" | wc -l | tr -d ' ')
12 if [ -z "$depth" ]; then echo 0; else echo "$depth"; fi
15 # calculates the size in kilobytes of all the note files in a hierarchy.
16 # this is just a raw statistic for how much content all those notes make up. since
17 # we have not separated out all the to-dos in some files (most notably the metaverse
18 # backlogs and to-do lists), it's good to also know what kind of girth the notes have.
19 function calculate_weight()
22 weight=$(find "$dir" -type f -exec echo \"{}\" ';' 2>/dev/null | grep -v "\.svn" | grep -v "\.git"| grep -v "\.basket" | grep -v "\.version" | grep -v "\.keep" | xargs ls -al | awk '{ print $5 }' | paste -sd+ | bc 2>/dev/null)
23 if [ -z "$weight" ]; then echo 0; else echo "$weight"; fi
26 # produces a report line in our format.
27 function format_report_line()
29 local depth="$1"; shift
30 local weight="$1"; shift
31 weight=$((weight / 1024))
32 echo " $depth\t${weight}kb\t$*\n"
35 # two parameters are needed: the directory to sum up and the label to use for it in the report.
36 # this will calculate the depth and weight for a hierarchy of notes, and then produce a
37 # line of reporting for those.
38 function analyze_hierarchy_and_report()
41 local label="$1"; shift
42 local depth=$(calculate_depth "$dir")
43 total_overload=$(($depth + $total_overload))
44 local weight=$(calculate_weight "$dir")
45 total_weight=$(($total_weight + $weight))
46 full_report+=$(format_report_line "$depth" "$weight" "$label")
49 # scans through items in the notes folder that begin with a pattern.
50 # each of those is treated as an aggregable portion of the report.
51 # first parameter is the title in the report, second and so on are
52 # a list of directory patterns to scan and aggregate.
53 function analyze_by_dir_patterns()
55 local title="$1"; shift
59 temp_depth=$(calculate_depth $i)
60 hier_depth=$(($hier_depth + $temp_depth))
61 temp_weight=$(calculate_weight $i)
62 hier_weight=$(($hier_weight + $temp_weight))
64 total_overload=$(($hier_depth + $total_overload))
65 total_weight=$(($total_weight + $hier_weight))
66 full_report+=$(format_report_line "$hier_depth" "$hier_weight" "$title")
71 # reset these before we add anything...
75 # start out the report with a header.
78 Current information overload consists of:\n\
82 # notes are individual files of tasks, usually, although some are combined.
83 analyze_hierarchy_and_report ~/cloud/grunty_notes "grunty notes"
86 #hmmm: make an html todo scanning function from this.
87 # scan web documents for to-do lists. individual items are marked with <li>.
88 # we do this one a bit differently since we have different criteria for html to-do items.
89 html_item_depth=$(find ~/cloud/grunty_notes/ -type f -iname "*.html" -exec grep "<li" "{}" ';' | wc -l | tr -d ' ')
90 total_overload=$(($html_item_depth + $total_overload))
91 html_item_weight=$(find ~/cloud/grunty_notes/ -type f -iname "*.html" -exec grep "<li" "{}" ';' | wc -c | tr -d ' ')
92 total_weight=$(($total_weight + $html_item_weight))
93 full_report+="$(format_report_line "$html_item_depth" "$html_item_weight" "to-do notes in html")"
96 # scan all the items declared as active projects.
97 analyze_by_dir_patterns "active items" ~/cloud/*active*
99 # scan across all appropriately named project or research folders that live in the "cloud".
100 analyze_by_dir_patterns "project files" ~/cloud/*project* ~/cloud/*research*
102 # scan all the trivial project folders.
103 analyze_by_dir_patterns "trivial items" ~/cloud/*trivia*
105 # source examples need to be sucked into other places, other codebases. they are not
106 # supposed to pile up here.
107 analyze_hierarchy_and_report ~/cloud/example_source "source examples"
109 # and then count up the things that we think will be cleaned soon, but one thing we have learned
110 # unsorted files haven't been categorized yet.
111 analyze_hierarchy_and_report ~/cloud/unsorted "unsorted files"
113 # we now consider the backlog of things to read to be a relevant fact. this is going to hose
114 # up our weight accounting considerably.
115 analyze_hierarchy_and_report ~/cloud/reading "reading list"
118 =====================================\n\
120 full_report+="$(format_report_line "$total_overload" "$total_weight" "Total Overload")"
122 [gathered on $(date)]\n\n\
125 echo -e "$full_report" | tee -a "$REPORT_FILE"