3 # these metrics are how bogged down we are in to-do type items.
5 # logged historical file where we append our latest report.
6 REPORT_FILE="$CLOUD_BASE/stats/overload_history.txt"
8 #hmmm: check path validity?
10 # given a path, this will find how many items are under it, ignoring svn and git files, plus
11 # other patterns we happen to notice are not useful.
12 function calculate_count()
15 local count=$(find "$dir" -type f -exec echo \"{}\" ';' 2>/dev/null | grep -v "\.svn" | grep -v "\.git"| grep -v "\.basket" | grep -v "\.version" | grep -v "\.keep" | wc -l | tr -d ' ')
16 if [ -z "$count" ]; then echo 0; else echo "$count"; fi
19 # calculates the size in kilobytes of all the note files in a hierarchy.
20 # this is just a raw statistic for how much content all those notes make up. since
21 # we have not separated out all the to-dos in some files (most notably the metaverse
22 # backlogs and to-do lists), it's good to also know what kind of girth the notes have.
23 function calculate_weight()
26 local weight=$(find "$dir" -type f -exec echo \"{}\" ';' 2>/dev/null | grep -v "\.svn" | grep -v "\.git"| grep -v "\.basket" | grep -v "\.version" | grep -v "\.keep" | xargs ls -al | awk '{ print $5 }' | paste -sd+ - | bc 2>/dev/null)
27 if [ -z "$weight" ]; then echo 0; else echo "$weight"; fi
30 # calculate_complexity gets a very simple metric of how many directory components are
31 # present at the target location and below.
32 function calculate_complexity()
35 local complexity=$(find "$dir" -type d | wc -l | tr -d ' ')
36 if [ -z "$complexity" ]; then echo 0; else echo "$complexity"; fi
39 # produces a report line in our format.
40 function format_report_line()
42 local count="$1"; shift
43 local weight="$1"; shift
44 weight=$((weight / 1024))
45 local complexity="$1"; shift
46 echo "$count\t${complexity}\t\t${weight}\t\t$*\n"
49 # two parameters are needed: the directory to sum up and the label to use for it in the report.
50 # this will calculate the count and weight for a hierarchy of notes, and then produce a
51 # line of reporting for those.
52 function analyze_hierarchy_and_report()
55 local label="$1"; shift
57 # initial values are all zero.
62 if [ -d "$dir" ]; then
63 count=$(calculate_count "$dir")
64 total_overload=$(($count + $total_overload))
65 weight=$(calculate_weight "$dir")
66 total_weight=$(($total_weight + $weight))
67 complexity=$(calculate_complexity "$dir")
68 total_complexity=$(($total_complexity + $complexity))
70 full_report+=$(format_report_line "$count" "$weight" "$complexity" "$label")
73 # scans through items in the notes folder that begin with a pattern.
74 # each of those is treated as an aggregatable portion of the report.
75 # first parameter is the title in the report, second and so on are
76 # a list of directory patterns to scan and aggregate.
77 function analyze_by_dir_patterns()
79 local title="$1"; shift
82 local hier_complexity=0
84 if [ -d "$folder" ]; then
85 temp_count=$(calculate_count $folder)
86 hier_count=$(($hier_count + $temp_count))
87 temp_weight=$(calculate_weight $folder)
88 hier_weight=$(($hier_weight + $temp_weight))
89 temp_complexity=$(calculate_complexity $folder)
90 hier_complexity=$(($hier_complexity + $temp_complexity))
93 total_overload=$(($hier_count + $total_overload))
94 total_weight=$(($total_weight + $hier_weight))
95 total_complexity=$(($total_complexity + $hier_complexity))
96 full_report+=$(format_report_line "$hier_count" "$hier_weight" "$hier_complexity" "$title")
101 # reset these before we add anything...
105 # start out the report with a header.
108 current information overload consists of:\n\
111 full_report+="count\tcomplexity\tweight (kb)\tcategory\n\
112 ================================================================\n\
117 # this group is for hierarchies which we expect to have a stable name.
119 #hmmm: don't fail if the hierarchy doesn't exist.
121 # high priority stuff would be called urgent.
122 analyze_hierarchy_and_report $CLOUD_BASE/aaa_priority "high priority (aieeee!)"
124 # notes are individual files of tasks, usually, although some are combined.
125 analyze_hierarchy_and_report $CLOUD_BASE/grunty* "grunty (external facing) notes"
127 # web site development tasks.
128 analyze_hierarchy_and_report $CLOUD_BASE/webular "web design (ideas and tasks)"
130 # feisty notes are about feisty meow(r) concerns ltd codebase development.
131 analyze_hierarchy_and_report $CLOUD_BASE/feisty_notes "feisty meow notes (mondo coding)"
133 # metaverse notes are about our ongoing simulator development and LSL scripting.
134 analyze_hierarchy_and_report $CLOUD_BASE/metaverse "metaverse in cyberspace design and scripting"
136 # home notes are a new top-level category; used to be under the grunty.
137 analyze_hierarchy_and_report $CLOUD_BASE/branch_road "hearth and home notes (branch road)"
139 # and then count up the things that we think will be cleaned soon, but one thing we have learned
140 # unsorted files haven't been categorized yet.
141 analyze_hierarchy_and_report $CLOUD_BASE/disordered "disordered and maybe deranged files"
143 # we now consider the backlog of things to read to be a relevant fact. this is going to hose
144 # up our weight accounting considerably.
145 analyze_hierarchy_and_report $CLOUD_BASE/reading "reading list (for a quiet afternoon)"
147 # bluesky is our brainstorming and wunderthinking area for new things.
148 analyze_hierarchy_and_report $CLOUD_BASE/blue_sky "blue sky is the limit ideas"
152 # this group is found based on matching a pattern, since we may not use a single
153 # standardized name for it, or maybe there are prefixes for similar items (like project-).
155 # scan all the items declared as active projects.
156 analyze_by_dir_patterns "active issues" $CLOUD_BASE/*active*
158 # vocation is a prefix for anything i consider part of my life's work.
159 analyze_by_dir_patterns "life's work and other oddities" $CLOUD_BASE/vocation*
161 # scan across all appropriately named project or research folders.
162 analyze_by_dir_patterns "running projects" $CLOUD_BASE/*project* $CLOUD_BASE/*research*
164 # look for our mad scientist style efforts.
165 analyze_by_dir_patterns "lab experiments" $CLOUD_BASE/*experiment*
167 # we also are running some businesses. get those notes in.
168 analyze_by_dir_patterns "business ops" $CLOUD_BASE/*business*
170 # find any charity actions that need to be taken.
171 analyze_by_dir_patterns "charitable" $CLOUD_BASE/*charit*
173 # rub alongside all the travel notes to see if any have interesting burrs.
174 analyze_by_dir_patterns "travel plans" $CLOUD_BASE/*travel*
176 # snag any work related items for that category.
177 analyze_by_dir_patterns "jobby work tasks" $CLOUD_BASE/job*
179 # scan all the trivial project folders.
180 analyze_by_dir_patterns "trivialities and back burner items" $CLOUD_BASE/trivia* $CLOUD_BASE/backburn*
182 # okay, fudge. if there are game tasks, then count them too. what are they, nothing? not so.
183 analyze_by_dir_patterns "play time and games" $CLOUD_BASE/*gaming* $CLOUD_BASE/*game*
187 full_report+="================================================================\n\
189 full_report+="$(format_report_line "$total_overload" "$total_weight" "$total_complexity" "total overload")"
191 [gathered on $(date)]\n\n\
194 echo -e "$full_report" | tee -a "$REPORT_FILE"