function calculate_depth()
{
local dir="$1"; shift
- find "$dir" -type f -exec echo \"{}\" ';' | grep -v "\.svn" | grep -v "\.git"| grep -v "\.basket" | grep -v "\.version" | grep -v "\.keep" | wc -l | tr -d ' '
+ depth=$(find "$dir" -type f -exec echo \"{}\" ';' 2>/dev/null | grep -v "\.svn" | grep -v "\.git"| grep -v "\.basket" | grep -v "\.version" | grep -v "\.keep" | wc -l | tr -d ' ')
+ if [ -z "$depth" ]; then echo 0; else echo "$depth"; fi
}
# calculates the size in kilobytes of all the note files in a hierarchy.
function calculate_weight()
{
local dir="$1"; shift
- find "$dir" -type f -exec echo \"{}\" ';' | grep -v "\.svn" | grep -v "\.git"| grep -v "\.basket" | grep -v "\.version" | grep -v "\.keep" | xargs ls -al | awk '{ print $5 }' | paste -sd+ | bc
+ weight=$(find "$dir" -type f -exec echo \"{}\" ';' 2>/dev/null | grep -v "\.svn" | grep -v "\.git"| grep -v "\.basket" | grep -v "\.version" | grep -v "\.keep" | xargs ls -al | awk '{ print $5 }' | paste -sd+ | bc 2>/dev/null)
+ if [ -z "$weight" ]; then echo 0; else echo "$weight"; fi
}
# produces a report line in our format.
# notes are individual files of tasks, usually, although some are combined.
analyze_hierarchy_and_report ~/cloud/grunty_notes "grunty notes"
+####
#hmmm: make an html todo scanning function from this.
# scan web documents for to-do lists. individual items are marked with <li>.
# we do this one a bit differently since we have different criteria for html to-do items.
html_item_weight=$(find ~/cloud/grunty_notes/ -type f -iname "*.html" -exec grep "<li" "{}" ';' | wc -c | tr -d ' ')
total_weight=$(($total_weight + $html_item_weight))
full_report+="$(format_report_line "$html_item_depth" "$html_item_weight" "to-do notes in html")"
+####
-# is that things that are intended are not always followed up on right away. this catches us
-# being too hopeful and makes sure nothing seems to be getting done that's lagging.
+# scan all the items declared as active projects.
analyze_by_dir_patterns "active items" ~/cloud/*active*
# scan across all appropriately named project or research folders that live in the "cloud".
analyze_by_dir_patterns "project files" ~/cloud/*project* ~/cloud/*research*
+# scan all the trivial project folders.
+analyze_by_dir_patterns "trivial items" ~/cloud/*trivia*
+
# source examples need to be sucked into other places, other codebases. they are not
# supposed to pile up here.
analyze_hierarchy_and_report ~/cloud/example_source "source examples"
-# also snag the files labelled as trivia, since they're still to-dos...
-analyze_by_dir_patterns "trivial notes" ~/cloud/*trivia*
-
# and then count up the things that we think will be cleaned soon, but one thing we have learned
# unsorted files haven't been categorized yet.
analyze_hierarchy_and_report ~/cloud/unsorted "unsorted files"
+# we now consider the backlog of things to read to be a relevant fact. this is going to hose
+# up our weight accounting considerably.
+analyze_hierarchy_and_report ~/cloud/reading "reading list"
+
full_report+="\n\
=====================================\n\
"