X-Git-Url: https://feistymeow.org/gitweb/?a=blobdiff_plain;f=scripts%2Fnotes%2Finfo_overload_report.sh;h=05fee6f30eb7370d82ba9175310187a5a2a4853a;hb=7cb361cd12aea2938534e7d4166ba2139ab4d382;hp=c7fb9f9358058a778cc5e15f16bb457c042b17ef;hpb=f9713a722581c57f00d06c530476279ddbd9de7a;p=feisty_meow.git diff --git a/scripts/notes/info_overload_report.sh b/scripts/notes/info_overload_report.sh index c7fb9f93..05fee6f3 100644 --- a/scripts/notes/info_overload_report.sh +++ b/scripts/notes/info_overload_report.sh @@ -1,14 +1,14 @@ # these metrics are how bogged down we are in to-do type items. -REPORT_FILE="$HOME/cloud/overload_history.txt" +REPORT_FILE="$HOME/cloud/fred_stats/overload_history.txt" # given a path, this will find how many items are under it, ignoring svn and git files, plus # other patterns we happen to notice are not useful. function calculate_count() { local dir="$1"; shift - count=$(find "$dir" -type f -exec echo \"{}\" ';' 2>/dev/null | grep -v "\.svn" | grep -v "\.git"| grep -v "\.basket" | grep -v "\.version" | grep -v "\.keep" | wc -l | tr -d ' ') + local count=$(find "$dir" -type f -exec echo \"{}\" ';' 2>/dev/null | grep -v "\.svn" | grep -v "\.git"| grep -v "\.basket" | grep -v "\.version" | grep -v "\.keep" | wc -l | tr -d ' ') if [ -z "$count" ]; then echo 0; else echo "$count"; fi } @@ -19,7 +19,7 @@ function calculate_count() function calculate_weight() { local dir="$1"; shift - weight=$(find "$dir" -type f -exec echo \"{}\" ';' 2>/dev/null | grep -v "\.svn" | grep -v "\.git"| grep -v "\.basket" | grep -v "\.version" | grep -v "\.keep" | xargs ls -al | awk '{ print $5 }' | paste -sd+ | bc 2>/dev/null) + local weight=$(find "$dir" -type f -exec echo \"{}\" ';' 2>/dev/null | grep -v "\.svn" | grep -v "\.git"| grep -v "\.basket" | grep -v "\.version" | grep -v "\.keep" | xargs ls -al | awk '{ print $5 }' | paste -sd+ | bc 2>/dev/null) if [ -z "$weight" ]; then echo 0; else echo "$weight"; fi } @@ -28,7 +28,7 @@ function calculate_weight() function calculate_complexity() { local dir="$1"; shift - complexity=$(find "$dir" -type d | wc -l) + local complexity=$(find "$dir" -type d | wc -l) if [ -z "$complexity" ]; then echo 0; else echo "$complexity"; fi } @@ -39,7 +39,7 @@ function format_report_line() local weight="$1"; shift weight=$((weight / 1024)) local complexity="$1"; shift - echo "$count\t${weight}\t\t${complexity}\t\t$*\n" + echo "$count\t${complexity}\t\t${weight}\t\t$*\n" } # two parameters are needed: the directory to sum up and the label to use for it in the report. @@ -67,13 +67,13 @@ function analyze_by_dir_patterns() local title="$1"; shift local hier_count=0 local hier_weight=0 -# full_report+=$(format_report_line "$hier_count" "$hier_weight" "$hier_complexity" "$title") - for i in $@; do - temp_count=$(calculate_count $i) + local hier_complexity=0 + for folder in $@; do + temp_count=$(calculate_count $folder) hier_count=$(($hier_count + $temp_count)) - temp_weight=$(calculate_weight $i) + temp_weight=$(calculate_weight $folder) hier_weight=$(($hier_weight + $temp_weight)) - temp_complexity=$(calculate_complexity $i) + temp_complexity=$(calculate_complexity $folder) hier_complexity=$(($hier_complexity + $temp_complexity)) done total_overload=$(($hier_count + $total_overload)) @@ -94,32 +94,40 @@ full_report="\ current information overload consists of:\n\ \n\ " -full_report+="count\tweight (kb)\tcomplexity\tcategory\n\ +full_report+="count\tcomplexity\tweight (kb)\tcategory\n\ ================================================================\n\ " +analyze_hierarchy_and_report ~/cloud/urgent "high priority" + # notes are individual files of tasks, usually, although some are combined. analyze_hierarchy_and_report ~/cloud/grunty_notes "grunty notes" # feisty notes are about feisty meow(r) concerns ltd codebase development. analyze_hierarchy_and_report ~/cloud/feisty_notes "feisty meow notes" +# home notes are a new top-level category; used to be under the grunty. +analyze_hierarchy_and_report ~/cloud/branch_road "hearth and home notes" + # scan all the items declared as active projects. -analyze_by_dir_patterns "active items" ~/cloud/*active* +analyze_by_dir_patterns "active items" ~/cloud/active* # scan across all appropriately named project or research folders that live in the "cloud". -analyze_by_dir_patterns "project files" ~/cloud/*project* ~/cloud/*research* +analyze_by_dir_patterns "project files" ~/cloud/project* ~/cloud/research* + +# snag any work related items for that category. +analyze_by_dir_patterns "job and work tasks" ~/cloud/job* # scan all the trivial project folders. -analyze_by_dir_patterns "trivial items" ~/cloud/*trivia* +analyze_by_dir_patterns "trivial items" ~/cloud/trivia* -# source examples need to be sucked into other places, other codebases. they are not -# supposed to pile up here. -analyze_hierarchy_and_report ~/cloud/example_source "source examples" +# some source code that needs to be sucked into other places, other codebases. they are not +# supposed to pile up here. but they have, so we track them. +analyze_hierarchy_and_report ~/cloud/scavenging_source "source scavenging" # and then count up the things that we think will be cleaned soon, but one thing we have learned # unsorted files haven't been categorized yet. -analyze_hierarchy_and_report ~/cloud/unsorted "unsorted files" +analyze_hierarchy_and_report ~/cloud/disordered "unsorted files" # we now consider the backlog of things to read to be a relevant fact. this is going to hose # up our weight accounting considerably.