#!/usr/bin/env zsh # # WebNomad, your slick and static website publisher # # Copyright (C) 2012-2014 Denis Roio # # This source code is free software; you can redistribute it and/or # modify it under the terms of the GNU Public License as published by # the Free Software Foundation; either version 3 of the License, or # (at your option) any later version. # # This source code is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. # Please refer to the GNU Public License for more details. # # You should have received a copy of the GNU Public License along with # this source code; if not, write to: # Free Software Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA. VERSION=0.5 QUIET=0 # full path to webnomad's system SYS="`pwd`/webnomad" source $SYS/utils # fill path to the source website root DIR="`pwd`" CMD="$1" { test -r config.zsh } || { error "Directory not configured for WebNomad. First use /webnomad/init" exit 1 } #################################### # Base configuration # base to be added to all urls # used by test to have all css and js in place baseurl="" # destination directory to render # also used by test to substitute pub/ destination="pub" # web root for all urls WEB_ROOT="" # prefix to all indexed files # this can be a full path on the filesystem INDEX_PREFIX="" # thumbnail size THUMB_SIZE=256 source config.zsh #################################### typeset -h dst # full path to destination for render_ functions typeset -alU includecss # array of css files to include typeset -alU includejs # array of js files to include typeset -alU fonts # array of available fonts # string match case insensitive unsetopt CASE_GLOB notice "Rendering your website" act "Title: $B $TITLE $r" # setup paths for test { test "$CMD" = "test" } && { LINK_PREFIX="file://`pwd`" WEB_ROOT="file://`PWD=${SYS} pwd`/test" baseurl="$WEB_ROOT" destination="$DIR/test" notice "Test settings for indexing" act "SYS = $SYS" act "WEB_ROOT = $WEB_ROOT" } render_file() { sed -e "s@\${baseurl}@${baseurl}@g" $@ } render_header() { cat < $TITLE EOF { test -r "$DIR"/views/css/custom.css } && { cat < EOF } # add css needed for internal functions for c in $includecss; do cat < EOF cp "$SYS"/css/${c} "${destination}"/css/ done { test -f "${destination}"/css/custom.fonts.css } && { cat < EOF } # add any argument string to header { test "$1" = "" } || { print "${@}"; print } # add the user configured header render_file "$DIR"/tmpl/header.html } render_footer() { render_file "$DIR"/tmpl/footer.html cat < EOF # add any string argument to the footer { test "$1" = "" } || { print "${@}"; print } # insert and copy all js files for i in $includejs; do cat < EOF { test -r "$SYS"/js/$i } && { cp "$SYS"/js/$i "$destination"/js } done # if test mode then render the test footer { test "$destination" = "test" } && { render_test_footer } cat < EOF } render_html() { ####################################### ## we support the tag inline # parses the html and put all stuff contained in tags # inside separate files, leaving a trace of them into the main html # (a line starting with tmp.md$RAND) tmp="tmp.$RANDOM" awk 'BEGIN { srand(); markdown=0; } /^/ { markdown=1; out="tmp.md" rand(); print out; next } /^<\/markdown>/ { markdown=0; next } { if(markdown==1) { print $0 >out; next } else { print $0 } } ' > $tmp # first pass marks the markdown parts and saves them separate mds=(`find . -name 'tmp.md*'`) { test "${#mds}" = "0" } || { # second pass substituted saved parts with rendered markdown act -n "${#mds} markdown fields " # check which markdown parser is available in PATH command -v maruku > /dev/null if [ "$?" = "0" ]; then parser="maruku --html-frag" else command -v markdown > /dev/null; if [ "$?" = "0" ]; then parser=markdown else command -v multimarkdown > /dev/null if [ "$?" = "0"]; then parser=multimarkdown fi fi fi # parses all html and renders each markdown in the html for i in $mds; do md=`basename $i` newtemp="tmp.$RANDOM" cat $tmp | awk ' /^'"$md"'/ { system("cat '"$md"' | '"$parser"'"); next } { print $0; }' > $newtemp rm $tmp; tmp=$newtemp done } cat $tmp # clean up from temporary files rm -f tmp.* } # should be called from inside the destination directory recursive_index() { # render_header "" { test -d "$1" } || { error "cannot index directory not found: $1"; return 1 } archive="$1" dirs=`find "$archive" -type d` basedir="/`basename "$archive"`/" diralias="$2" dest="`pwd`" func "index archive $archive" func "index basedir: $basedir" func "index aliasdir: $diralias" # copy default icons mkdir -p $destination/icons cp $SYS/icons/$THUMB_SIZE/image-x-generic.png $destination/icons cp $SYS/icons/$THUMB_SIZE/symlink.png $destination/icons cp $SYS/icons/$THUMB_SIZE/folder.png $destination/icons for d in ${(f)dirs}; do dir="${d##*${basedir}}" func "actual file path: $d" func "relative path: $dir" func "destination: ${dest}/${dir}" mkdir -p "${dest}/${dir}" pushd "${dest}/${dir}" render_header > index${EXTENSION} render_file "$DIR"/tmpl/navbar.html >> index${EXTENSION} cat <> index${EXTENSION}
EOF # takes 3 arguments: base dir, alias dir and indexed directory # we must check if its the parent directory index_long_preview "${archive}" "${diralias}" "${dir}" >> index${EXTENSION} # if [ "$dir" = "" ]; then # index_dir "${d}" "${diralias}" "" >> index${EXTENSION} # else # index_dir "${d%%${dir}*}" "${diralias}" "${dir}" >> index${EXTENSION} # fi cat <> index${EXTENSION}

 

EOF render_footer >> index${EXTENSION} popd done } read_meta() { tmp=`head -n 3 | awk ' !/^#/ { next } /title/ { printf "title=\""; for(i=3;i<=NF;i++) printf "%s ", $i; printf "\";" } /description/ { printf "description=\""; for(i=3;i<=NF;i++) printf "%s ", $i; printf "\";" } /keywords/ { printf "keywords=\""; for(i=3;i<=NF;i++) printf "%s ", $i; printf "\";" } '` eval "$tmp" } { test "$1" = "source" } && { return 0 } { test "$1" = "test" } && { act "Local test rendering inside test/" source $SYS/test } # prepare all fonts source $SYS/fonts # Main mkdir -p ${destination}/css mkdir -p ${destination}/js cat << EOF > ${destination}/.htaccess DirectoryIndex index index.html index.php DefaultType text/html EOF act -n "Clean up all temp files ... " temps=(`find "$destination" -type f -name 'temp-*'`) for t in $temps; do rm -f $t; done print "done" # publish all .txt files as-is # useful for robots.txt txts=(`find views -maxdepth 1 -type f -name '*.txt'`) for t in $txts; do txt=`basename $t` dst="${destination}/$txt" act "publishing plain text: $txt" cp $t ${destination}/$txt done # render all HTML views htmls=(`find views -type f -name '*.html'`) for src in $htmls; do # read meta commands cat ${src} | read_meta # compute destination file dst="${destination}/`basename ${src%.*}`${EXTENSION}" render_header > $dst # close as nothing else is needed cat <> $dst
EOF # don't forget the navbar render_file "$DIR"/tmpl/navbar.html >> $dst cat <> $dst

 

EOF # render html act -n "Html rendering: $B $dst $r" cat $src | render_html >> $dst cat <> $dst

 

 

EOF # includejs=(js/bootstrap.min.js) render_footer >> $dst act "done" done # render all image galleries source $SYS/gallery gals=(`find views -type f -name '*.gal'`) gals+=(`find views -type f -name '*.gallery'`) for src in $gals; do cat ${src} | read_meta dst="${destination}/`basename ${src%.*}`" act -n "Gallery rendering: $B $dst $r ... " cat $src | render_gallery > $dst print "done" done # render all directory indexes idxs=(`find views -type f -name '*.idx'`) idxs+=(`find views -type f -name '*.index'`) { test ${#idxs} = 0 } || { source "$SYS/index" for idx in $idxs; do dst=`basename ${idx%%.*}` notice "Directory index rendering to: $dst" dirs=`cat ${idx}` for d in ${(f)dirs}; do mkdir -p "${destination}/${dst}" pushd "${destination}/${dst}" recursive_index "${d[(ws: :)1]}" "${d[(ws: :)2]}" popd done done # copy icons only if needed rsync -rlt "$SYS/icons" "${destination}/" } # copy to destination all subdirs in views/ for m in `find views -mindepth 1 -type d `; do act -n "publishing $B $m $r ... " rsync -r $m ${destination}/ print "done" done # if the whole website is a "slideshow" (set in config.zsh) then we start with # a full screen slideshow of all uploaded photos, cycling random every time. # galleries are supported and can be linked in menu and pages. { test "$WEBSITE" = "slideshow" } && { notice "Site is configured as slideshow" # generate a list of all images (removing duplicates) act "Indexing all images ... " find pub -iname '*.jpg' | sed -e 's/^pub\///g' -e 's/^.\/pub\///g' -e "s@'@@g" | sort | uniq \ | render_gallery views/index.html > ${destination}/index } notice "Website refreshed."