removed another bunch of useless crap - swerc - anselm's simpler werc fork
 (HTM) git clone git://git.suckless.org/swerc
 (DIR) Log
 (DIR) Files
 (DIR) Refs
 (DIR) README
       ---
 (DIR) commit ee293798c5af8461f2ff9f35f278092ed1038425
 (DIR) parent 43b7ac0606570b80f16420ea5ab217f2dc9c309a
 (HTM) Author: anselm@garbe.us <unknown>
       Date:   Sun, 12 Feb 2012 18:14:45 +0100
       
       removed another bunch of useless crap
       Diffstat:
         M bin/cgilib.rc                       |     148 -------------------------------
         M bin/corehandlers.rc                 |      52 +------------------------------
         M bin/werc.rc                         |      10 ++--------
         D bin/werc_errlog_wrap.rc             |       5 -----
         M bin/wercconf.rc                     |      12 ------------
         M bin/werclib.rc                      |     120 -------------------------------
         M etc/initrc                          |      27 +++------------------------
         D etc/initrc.local                    |       4 ----
         D tpl/_debug.tpl                      |      29 -----------------------------
         M tpl/sitemap.tpl                     |       3 ---
       
       10 files changed, 6 insertions(+), 404 deletions(-)
       ---
 (DIR) diff --git a/bin/cgilib.rc b/bin/cgilib.rc
       @@ -1,9 +1,6 @@
        # Useful CGI stuff
        
        fn dprint { echo $* >[1=2] }
       -fn dprintv { { for(v in $*) { echo -n $v^'#'^$#$v^'=' $$v '; '  }; echo } >[1=2] }
       -
       -fn escape_html { sed 's/&/\&amp;/g; s/</\&lt;/g; s/>/\&gt;/g' $* }
        
        fn http_redirect {
            if(~ $1 http://* https://*)
       @@ -19,38 +16,6 @@ Location: '^$t^'
            exit
        }
        fn perm_redirect { http_redirect $1 '301 Moved Permanantly' }
       -fn post_redirect { http_redirect $1 '303 See Other' }
       -
       -
       -# Note: should check if content type is application/x-www-form-urlencoded?
       -# Should compare with http://www.shelldorado.com/scripts/cmds/urlgetopt.txt
       -fn load_post_args {
       -    if(~ $REQUEST_METHOD POST && ~ $#post_args 0) {
       -        ifs='&
       -'       for(pair in `{cat}) {
       -            ifs='=' { pair=`{echo -n $pair} }
       -            n='post_arg_'^`{echo $pair(1)|tr -cd 'a-zA-Z0-9_'}
       -            post_args=( $post_args $n )
       -            ifs=() { $n=`{echo -n $pair(2)|urldecode|tr -d ''} }
       -        }
       -        pair=()
       -    }
       -    if not
       -        status='No POST or post args already loaded'
       -}
       -# Status is () if at least one arg is found. DEPRECATED: access vars directly.
       -fn get_post_args {
       -    load_post_args
       -    _status='No post arg matches'
       -    for(n in $*) {
       -        v=post_arg_$n
       -        if(! ~ $#$v 0) {
       -            $n=$$v
       -            _status=()
       -        }
       -    }
       -    status=$_status
       -}
        
        # This seems slightly improve performance, but might depend on httpd buffering behavior.
        fn awk_buffer {
       @@ -64,97 +29,7 @@ fn awk_buffer {
            END { printf "%s", buf }'
        }
        
       -fn urldecode {
       -awk '
       -BEGIN {
       -    hextab ["0"] = 0; hextab ["8"] = 8;
       -    hextab ["1"] = 1; hextab ["9"] = 9;
       -    hextab ["2"] = 2; hextab ["A"] = hextab ["a"] = 10
       -    hextab ["3"] = 3; hextab ["B"] = hextab ["b"] = 11;
       -    hextab ["4"] = 4; hextab ["C"] = hextab ["c"] = 12;
       -    hextab ["5"] = 5; hextab ["D"] = hextab ["d"] = 13;
       -    hextab ["6"] = 6; hextab ["E"] = hextab ["e"] = 14;
       -    hextab ["7"] = 7; hextab ["F"] = hextab ["f"] = 15;
       -}
       -{
       -    decoded = ""
       -    i = 1
       -    len = length ($0)
       -    while ( i <= len ) {
       -        c = substr ($0, i, 1)
       -        if ( c == "%" ) {
       -            if ( i+2 <= len ) {
       -                c1 = substr ($0, i+1, 1)
       -                c2 = substr ($0, i+2, 1)
       -                if ( hextab [c1] == "" || hextab [c2] == "" ) {
       -                    print "WARNING: invalid hex encoding: %" c1 c2 | "cat >&2"
       -                } else {
       -                    code = 0 + hextab [c1] * 16 + hextab [c2] + 0
       -                    c = sprintf ("%c", code)
       -                    i = i + 2
       -                }
       -            } else {
       -                print "WARNING: invalid % encoding: " substr ($0, i, len - i)
       -            }
       -        } else if ( c == "+" ) {
       -            c = " "
       -        }
       -        decoded = decoded c
       -        ++i
       -    }
       -    printf "%s", decoded
       -}
       -'
       -}
       -
       -fn url_encode {
       -    awk '
       -    BEGIN {
       -    # We assume an awk implementation that is just plain dumb.
       -    # We will convert an character to its ASCII value with the
       -    # table ord[], and produce two-digit hexadecimal output
       -    # without the printf("%02X") feature.
       -
       -    EOL = "%0A"     # "end of line" string (encoded)
       -    split ("1 2 3 4 5 6 7 8 9 A B C D E F", hextab, " ")
       -    hextab [0] = 0
       -    for ( i=1; i<=255; ++i ) ord [ sprintf ("%c", i) "" ] = i + 0
       -    if ("'^$"EncodeEOL^'" == "yes") EncodeEOL = 1; else EncodeEOL = 0
       -    }
       -    {
       -    encoded = ""
       -    for ( i=1; i<=length ($0); ++i ) {
       -        c = substr ($0, i, 1)
       -        if ( c ~ /[a-zA-Z0-9.-]/ ) {
       -        encoded = encoded c     # safe character
       -        } else if ( c == " " ) {
       -        encoded = encoded "+"   # special handling
       -        } else {
       -        # unsafe character, encode it as a two-digit hex-number
       -        lo = ord [c] % 16
       -        hi = int (ord [c] / 16);
       -        encoded = encoded "%" hextab [hi] hextab [lo]
       -        }
       -    }
       -    if ( EncodeEOL ) {
       -        printf ("%s", encoded EOL)
       -    } else {
       -        print encoded
       -    }
       -    }
       -    END {
       -        #if ( EncodeEOL ) print ""
       -    }
       -' $* 
       -}
       -
        # Cookies
       -fn set_cookie {
       -    # TODO: should check input values more carefully
       -    name=$1
       -    val=$2
       -    extraHttpHeaders=( $extraHttpHeaders 'Set-cookie: '^$"name^'='^$"val^'; path=/;' )
       -}
        fn get_cookie {
            ifs=';' { co=`{echo $HTTP_COOKIE} }
        
       @@ -207,26 +82,3 @@ fn ll_addh {
        
        NEW_LINE='
        '
       -
       -# crop_text [max_lenght [ellipsis]] 
       -# TODO: Option to crop only at word-delimiters.
       -fn crop_text {
       -    m=512
       -    e='...'
       -    if(! ~ $#1 0)
       -        m=$1
       -    if(! ~ $#2 0)
       -        e=$2
       -
       -    awk -v 'max='^$"m -v 'ellipsis='$e '
       -    {
       -        nc += 1 + length;
       -        if(nc > max) {
       -            print substr($0, 1, nc - max) " " ellipsis
       -            exit
       -        }
       -        print
       -    }' 
       -}
       -
       -
 (DIR) diff --git a/bin/corehandlers.rc b/bin/corehandlers.rc
       @@ -12,7 +12,7 @@ fn nav_tree {
            # /./ to deal with p9p's ls failure to follow dir symlinks otherwise
            ls -F $sitedir/./$req_paths_list >[2]/dev/null \
                | { 
       -            sed $dirfilter'/\/[^_.\/][^\/]*(\.(md|txt|html|1)|\/)$/!d; s!^'$sitedir'!!; '$dirclean 
       +            sed $dirfilter'/\/[^_.\/][^\/]*(\.(md|1)|\/)$/!d; s!^'$sitedir'!!; '$dirclean 
                    if(! ~ $#synth_paths 0) echo $synth_paths | tr ' ' $NEW_LINE 
                } | sort -u | awk -F/ ' 
            function p(x, y, s) { for(i=0; i < x-y; i+=1) print s }
       @@ -44,45 +44,16 @@ fn nav_tree {
            echo '</ul>'
        }
        
       -fn link_bar {
       -    if(~ $1 -t) {
       -        echo '<p class="sideBarTitle">'$2'</p>'
       -        shift; shift
       -    }
       -    echo '<ul>'
       -    while(! ~ $#* 0) {
       -        echo '<li><a href="'$2'">- '$1'</a></li>'
       -        shift; shift
       -    }
       -    echo '</ul>'
       -}
       -
        fn md_handler { $formatter $1 }
        
        fn tpl_handler { template $* }
        
       -fn html_handler {
       -    # body states: 0 = no <body> found, 2 = after <body>, 1 = after <body></body>, -1 = after </body>
       -    awk 'gsub(".*<[Bb][Oo][Dd][Yy][^>]*>", "") > 0 {body=2}
       -        gsub("</ *[Bb][Oo][Dd][Yy][^>]*>.*", "") > 0 {print; body=body-1}
       -        body==2 {print}
       -        body==0 {buf=buf "\n" $0}
       -        END {if(body<=0) {print buf}}' < $1
       -}
       -
        fn man_handler {
            echo '<pre>'
            $man_formatter $1
            echo '</pre>'
        }
        
       -fn txt_handler {
       -    # Note: Words are not broken, even if they are way beyond 82 chars long
       -    echo '<pre>'
       -    sed 's/</\&lt;/g; s/>/\&gt;/g' < $1 | fmt -l 82 -j
       -    echo '</pre>'
       -}
       -
        fn dir_listing_handler {
            d=`{basename -d $1}
            if(~ $#d 0)
       @@ -93,14 +64,7 @@ fn dir_listing_handler {
            echo '</ul>'
        }
        
       -fn notices_handler {
       -    for(type in notify_errors notify_notes notify_success)
       -        for(n in $$type)
       -            echo '<div class="'$type'"><b>'$"n'</b></div>'
       -}
       -
        fn setup_handlers {
       -
            if(test -f $local_path.md) {
                local_file=$local_path.md
                handler_body_main=(md_handler $local_file)
       @@ -113,26 +77,13 @@ fn setup_handlers {
                local_file=$local_path.tpl
                handler_body_main=(tpl_handler $local_file)
            }
       -    if not if(test -f $local_path.html) {
       -        local_file=$local_path.html
       -        handler_body_main=(html_handler $local_file)
       -    }
       -    # Global tpl (eg sitemap.tpl), should take precedence over txt handler!
            if not if(test -f tpl^$req_path^.tpl)
       -        # XXX Should we set $local_file for global .tpls?
                handler_body_main=(tpl_handler tpl^$req_path^.tpl)
       -    if not if(test -f $local_path.txt) {
       -        local_file=$local_path.txt
       -        handler_body_main=(txt_handler $local_file)
       -    }
            if(! ~ $#handler_body_main 0)
                { } # We are done
            # Dir listing
            if not if(~ $local_path */index)
                handler_body_main=(dir_listing_handler $req_path)
       -    # Canonize explicit .html urls, the web server might handle this first!
       -    if not if(~ $local_path *.html && test -f $local_path)
       -        perm_redirect `{ echo $req_path|sed 's/.html$//' }
            # Fallback static file handler
            if not if(test -f $local_path)
                static_file $local_path
       @@ -153,4 +104,3 @@ fn setup_404_handler {
        fn run_handlers { for(h in $*) run_handler $$h }
        fn run_handler { $*(1) $*(2-) }
        
       -
 (DIR) diff --git a/bin/werc.rc b/bin/werc.rc
       @@ -11,8 +11,8 @@ difs=$ifs # Used to restore default ifs when needed
        
        # Expected input: ls -F style, $sitedir/path/to/files/
        #          <ls -F+x><symlink hack><Useless?><hiden files  >
       -dirfilter='s/\*$//; s,/+\./+,/,g; s,^\./,,; /\/[._][^\/]/d; /'$forbidden_uri_chars'/d; /\/sitemap\.xml$/d; /\/index\.(md|html|txt|tpl)$/d; /\/(robots|sitemap)\.txt$/d; /_werc\/?$/d; '
       -dirclean=' s/\.(md|html|txt|1)$//; '
       +dirfilter='s/\*$//; s,/+\./+,/,g; s,^\./,,; /\/[._][^\/]/d; /'$forbidden_uri_chars'/d; /\/sitemap\.xml$/d; /\/index\.(md|tpl)$/d; /\/(robots|sitemap)\.txt$/d; /_werc\/?$/d; '
       +dirclean=' s/\.(md|1)$//; '
        
        # Careful, the proper p9p path might not be set until initrc.local is sourced
        path=(. $PLAN9/bin ./bin /bin /usr/bin) 
       @@ -45,12 +45,6 @@ fn werc_exec_request {
            local_file=''
            ifs='/' { args=`{echo -n $req_path} }
        
       -    # Preload post args for templates where cgi's stdin is not accessible
       -    if(~ $REQUEST_METHOD POST) {
       -        load_post_args
       -        login_user
       -    }
       -
            if(~ $req_path */index)
                perm_redirect `{echo $req_path | sed 's,/index$,/,'}
        
 (DIR) diff --git a/bin/werc_errlog_wrap.rc b/bin/werc_errlog_wrap.rc
       @@ -1,5 +0,0 @@
       -#!/usr/local/plan9/bin/rc
       -
       -# This is a wrapper script for broken http servers like recent lighttpd versions which throw away cgi's stderr.
       -
       -./werc.rc >>[2]/tmp/wlog.txt
 (DIR) diff --git a/bin/wercconf.rc b/bin/wercconf.rc
       @@ -5,15 +5,3 @@ fn conf_perm_redirect {
            if not
                ll_addh perm_redir_patterns $1 $2
        }
       -
       -fn conf_hide_paths {
       -    for(i in $*)
       -       dirfilter=$dirfilter^'/'^`{echo $sitedir$conf_wd$i|sed 's!/+!\\/!g'}^'/d; '
       -}
       -
       -# Usually will be called from within conf_enable_foo 
       -fn conf_enable_app {
       -    # Note: maybe we should add test -d apps/$1/?
       -    if(! ~ $1 $enabled_apps)
       -        enabled_apps=( $enabled_apps $1 )
       -}
 (DIR) diff --git a/bin/werclib.rc b/bin/werclib.rc
       @@ -7,136 +7,16 @@ fn get_tpl_file {
        
        fn template { awk -f bin/template.awk $* | rc $rcargs }
        
       -# Auth code
       -# TODO: check http://cookies.lcs.mit.edu/pubs/webauth:tr.pdf
       -allowed_user_chars='[a-zA-Z0-9_]'
       -# Cookie format: WERC_USER: name:timestamp:hash(name.timestamp.password)
       -# login_user can't be used from a template because it sets a cookie 
       -fn login_user {
       -    # Note: we set the cookie even if it is already there.
       -    if(get_user $*)
       -        set_cookie werc_user $"logged_user^':0:'^$"logged_password
       -}
       -
       -# Check login status, if called with group arg we check membership too
       -fn check_user {
       -    get_user
       -    g=($* admin)
       -    _status=$status
       -    if(! ~ $"_status '')
       -        _status=(Not logged in: $"_status)
       -    if not if(! ~ $#* 0 && ! ~ $logged_user $* && ! grep -s '^'^$logged_user^'$' $werc_root/etc/users/$g/members >[2]/dev/null)
       -        _status=(User $logged_user not in: $*)
       -    status=$_status
       -}
       -
       -# If not logged in, try to get user login info from POST or from cookie
       -fn get_user {
       -    if(~ $#logged_user 0) {
       -        if(~ $#* 2) {
       -            user_name=$1 
       -            user_password=$2
       -        }
       -        if not if(~ $REQUEST_METHOD POST)
       -            get_post_args user_name user_password
       -
       -        if(~ $#user_name 0) { 
       -            ifs=':' { cu=`{ifs=$difs {get_cookie werc_user} | tr -d $NEW_LINE} }
       -            if(! ~ $#cu 0) {
       -                user_name=$cu(1) 
       -                user_password=$cu(3)
       -            }
       -        }
       -        auth_user $user_name $user_password
       -    }
       -    if not
       -        status=()
       -}
       -
       -# Check if user_name and user_password represent a valid user account
       -# If valid, 'log in' by setting logged_user
       -fn auth_user {
       -    user_name=$1
       -    user_password=$2
       -
       -    pfile=$werc_root/etc/users/$"user_name/password
       -    if(~ $#user_name 0 || ~ $#user_password 0)
       -        status=('Auth: missing user name or pass: '^$"user_name^' / '^$"user_password)
       -    if not if(! test -f $pfile)
       -        status=('Auth: cant find '^$pfile)
       -    if not if(! test -s $pfile || ! ~ $user_password `{cat $pfile})
       -        status=('Auth: Pass '$user_password' doesnt match '^`{cat $pfile})
       -    if not {
       -        logged_user=$user_name
       -        logged_password=$user_password
       -        dprint Auth: success
       -        status=()
       -    }
       -}
       -
       -fn user_controls {
       -    echo User: $"logged_user
       -}
       -
       -
       -# .md '(meta-)data' extract
       -fn get_md_file_attr {
       -    sed -n '/^\* '$2': /p; /^\* '$2': /q; /^$/q' < $1
       -}
       -
       -
        # File title extraction
        fn get_md_title {
       -    #sed 's/^(................................................................[^ ]*).*$/\1/g; 1q' < $1 
            sed -n -e '1N; /^.*\n===*$/N; /.*\n===*\n *$/!b' -e 's/\n==*\n//p' < $1
        }
        
       -fn get_html_title {
       -    t=`{sed -n '32q; s/^.*<[Tt][Ii][Tt][Ll][Ee]> *([^<]+) *(<\/[Tt][Ii][Tt][Ll][Ee]>.*)?$/\1/p' < $1}
       -
       -    # As a backup we might want to pick the first 'non-tag' text in the file with:
       -    if(~ $"t '')
       -        t=`{sed -n -e 's/^[         ]*(<[^>]+>)*([^<]+).*/\2/p; 32q' < $1 | sed 1q}
       -
       -    echo $t
       -}
       -
        fn get_file_title {
            if (~ $1 *.md)
                get_md_title $1
       -    if not if(~ $1 *.html)
       -        get_html_title $1
            if not if(~ $1 */) {
                if(test -f $1/index.md)
                    get_md_title $1/index.md
       -        if not if(test -f $1/index.html)
       -            get_html_title $1/index.html
            }
        }
       -
       -
       -##########################################################################
       -##########################################################################
       -#app_blog_methods = ( _post index.rss )
       -#fn app_blog__post {
       -#    echo
       -#}
       -#
       -#app_blog___default {
       -#    if (~ $blog)
       -#    call_app blogpost
       -#}
       -#
       -## --
       -#app_blogpost_methods = ( comment  _edit )
       -#
       -#fn app_blogpost_comment {
       -#    call_app comments
       -#}
       -#
       -## --
       -#app_comments_methods = ( _post _edit )
       -#
       -#fn app_comments___default {
       -#
       -#}
 (DIR) diff --git a/etc/initrc b/etc/initrc
       @@ -1,35 +1,14 @@
       -# This file contains the default werc settings.
       -#
       -# DO NOT EDIT, to customize copy to etc/initrc.local and edit at will.
       -#
       -# Some settings can also be set for a specific site or directory in their
       -# respective _werc/config or their $masterSite/_werc/config file.
       -
        # General options
        
       -# Location of your Plan 9 from User Space installation (usually /usr/local/plan9)
        plan9port=$PLAN9
        #plan9port=/usr/local/plan9
        
       -# If you use 9base, it should point to your 9base root, try for example:
       -#plan9port=/usr/lib/9base # This is the default 9base install path in Debian.
       -
       -# If rc is not installed as /usr/local/plan9/bin/rc you will also need to change
       -# the #! line in bin/werc.rc!
       -
        # Path, make sure the plan9port /bin directory is included before /bin
        # Keep '.' in path! It is needed.
        path=($plan9port/bin . ./bin ./bin/contrib /bin /usr/bin)
        
       -# Set this to your favorite markdown formatter, eg., markdown.pl (fltr_cache
       -# takes as an argument a filter, in the default configuration markdown.pl, that
       -# caches output) Note that some werc components assume a markdown-like
       -# formatter, but all major functionality should should be formatter agnostic.
       -formatter=(fltr_cache markdown.pl)
       +formatter=(fltr_cache markdown)
        man_formatter=(troff -N -man)
       +debug=()
        
       -# Enable debugging, to disable set to ()
       -debug=true
       -
       -# Globally enabled apps
       -enabled_apps=( )
       +sitesdir='/var/www/sites'
 (DIR) diff --git a/etc/initrc.local b/etc/initrc.local
       @@ -1,4 +0,0 @@
       -sitesdir='/var/www/sites'
       -formatter=(fltr_cache markdown)
       -man_formatter=(troff -N -man)
       -debug=()
 (DIR) diff --git a/tpl/_debug.tpl b/tpl/_debug.tpl
       @@ -1,29 +0,0 @@
       -% if(! ~ $#debug_shell 0) {
       -<form method="POST" name="prompt">
       -<input size="80" type="text" name="command" value="%($"post_arg_command%)">
       -<input type="submit" Value="Run">
       -</form>
       -<script language="javascript"><!--
       -document.prompt.command.focus()
       -//--></script>
       -
       -%{
       -fn evl {
       -    # Buffering is probably messing this up:
       -    #rc -c 'flag x +;{'^$post_arg_command'} |[2] awk ''{print ">> "$0}'''
       -    rc -c 'flag s +; flag x +;'^$post_arg_command
       -}
       -    if(! ~ $#post_arg_command 0 && ! ~ $#post_arg_command '') {
       -        echo '<hr><pre>'
       -        evl | escape_html |[2] awk '{print "<b>"$0"</b>"}' 
       -        echo '</pre>'
       -    }
       -%}
       -% }
       -
       -<hr><pre>
       -% env | escape_html
       -</pre><hr>
       -
       -% umask
       -
 (DIR) diff --git a/tpl/sitemap.tpl b/tpl/sitemap.tpl
       @@ -55,9 +55,6 @@ if(test -s $tmpfilex) {
                cat $tmpfilex
                rm $tmpfilex &
                echo '</urlset>'
       -        # TODO Enable automaic search engine update notification.
       -        #hget 'http://google.com/ping?sitemap='^`{url_encode $base_url'/sitemap.gz'} > /dev/null
       -    
            } | gzip > $sitedir/sitemap.gz &
            #} > $sitedir/sitemap.xml &
        }