2 # these metrics are how bogged down we are in to-do type items.
4 REPORT_FILE="$HOME/cloud/fred_stats/overload_history.txt"
6 # given a path, this will find how many items are under it, ignoring svn and git files, plus
7 # other patterns we happen to notice are not useful.
8 function calculate_count()
11 local count=$(find "$dir" -type f -exec echo \"{}\" ';' 2>/dev/null | grep -v "\.svn" | grep -v "\.git"| grep -v "\.basket" | grep -v "\.version" | grep -v "\.keep" | wc -l | tr -d ' ')
12 if [ -z "$count" ]; then echo 0; else echo "$count"; fi
15 # calculates the size in kilobytes of all the note files in a hierarchy.
16 # this is just a raw statistic for how much content all those notes make up. since
17 # we have not separated out all the to-dos in some files (most notably the metaverse
18 # backlogs and to-do lists), it's good to also know what kind of girth the notes have.
19 function calculate_weight()
22 local weight=$(find "$dir" -type f -exec echo \"{}\" ';' 2>/dev/null | grep -v "\.svn" | grep -v "\.git"| grep -v "\.basket" | grep -v "\.version" | grep -v "\.keep" | xargs ls -al | awk '{ print $5 }' | paste -sd+ | bc 2>/dev/null)
23 if [ -z "$weight" ]; then echo 0; else echo "$weight"; fi
26 # calculate_complexity gets a very simple metric of how many directory components are
27 # present at the target location and below.
28 function calculate_complexity()
31 local complexity=$(find "$dir" -type d | wc -l)
32 if [ -z "$complexity" ]; then echo 0; else echo "$complexity"; fi
35 # produces a report line in our format.
36 function format_report_line()
38 local count="$1"; shift
39 local weight="$1"; shift
40 weight=$((weight / 1024))
41 local complexity="$1"; shift
42 echo "$count\t${complexity}\t\t${weight}\t\t$*\n"
45 # two parameters are needed: the directory to sum up and the label to use for it in the report.
46 # this will calculate the count and weight for a hierarchy of notes, and then produce a
47 # line of reporting for those.
48 function analyze_hierarchy_and_report()
51 local label="$1"; shift
52 local count=$(calculate_count "$dir")
53 total_overload=$(($count + $total_overload))
54 local weight=$(calculate_weight "$dir")
55 total_weight=$(($total_weight + $weight))
56 local complexity=$(calculate_complexity "$dir")
57 total_complexity=$(($total_complexity + $complexity))
58 full_report+=$(format_report_line "$count" "$weight" "$complexity" "$label")
61 # scans through items in the notes folder that begin with a pattern.
62 # each of those is treated as an aggregable portion of the report.
63 # first parameter is the title in the report, second and so on are
64 # a list of directory patterns to scan and aggregate.
65 function analyze_by_dir_patterns()
67 local title="$1"; shift
70 local hier_complexity=0
72 temp_count=$(calculate_count $folder)
73 hier_count=$(($hier_count + $temp_count))
74 temp_weight=$(calculate_weight $folder)
75 hier_weight=$(($hier_weight + $temp_weight))
76 temp_complexity=$(calculate_complexity $folder)
77 hier_complexity=$(($hier_complexity + $temp_complexity))
79 total_overload=$(($hier_count + $total_overload))
80 total_weight=$(($total_weight + $hier_weight))
81 total_complexity=$(($total_complexity + $hier_complexity))
82 full_report+=$(format_report_line "$hier_count" "$hier_weight" "$hier_complexity" "$title")
87 # reset these before we add anything...
91 # start out the report with a header.
94 current information overload consists of:\n\
97 full_report+="count\tcomplexity\tweight (kb)\tcategory\n\
98 ================================================================\n\
101 analyze_hierarchy_and_report ~/cloud/urgent "high priority"
103 # notes are individual files of tasks, usually, although some are combined.
104 analyze_hierarchy_and_report ~/cloud/grunty_notes "grunty notes"
106 # feisty notes are about feisty meow(r) concerns ltd codebase development.
107 analyze_hierarchy_and_report ~/cloud/feisty_notes "feisty meow notes"
109 # home notes are a new top-level category; used to be under the grunty.
110 analyze_hierarchy_and_report ~/cloud/branch_road "hearth and home notes"
112 # scan all the items declared as active projects.
113 analyze_by_dir_patterns "active items" ~/cloud/active*
115 # scan across all appropriately named project or research folders that live in the "cloud".
116 analyze_by_dir_patterns "project files" ~/cloud/project* ~/cloud/research*
118 # snag any work related items for that category.
119 analyze_by_dir_patterns "job and work tasks" ~/cloud/job*
121 # scan all the trivial project folders.
122 analyze_by_dir_patterns "trivial items" ~/cloud/trivia*
124 # some source code that needs to be sucked into other places, other codebases. they are not
125 # supposed to pile up here. but they have, so we track them.
126 analyze_hierarchy_and_report ~/cloud/scavenging_source "source scavenging"
128 # and then count up the things that we think will be cleaned soon, but one thing we have learned
129 # unsorted files haven't been categorized yet.
130 analyze_hierarchy_and_report ~/cloud/disordered "unsorted files"
132 # we now consider the backlog of things to read to be a relevant fact. this is going to hose
133 # up our weight accounting considerably.
134 analyze_hierarchy_and_report ~/cloud/reading "reading list"
136 full_report+="================================================================\n\
138 full_report+="$(format_report_line "$total_overload" "$total_weight" "$total_complexity" "total overload")"
140 [gathered on $(date)]\n\n\
143 echo -e "$full_report" | tee -a "$REPORT_FILE"