odd-wiki-hive odd-list-wiki de-odd-Zentrum-wiki en-odd-center-wiki fr-odd-centre-wiki it-odd-centro-wiki Gespräch · talk
bavardage
all wikis all recent changes recent changes local names new page special odd-wiki-hive

maintenance

#!/bin/bash
PATH=/bin:/usr/bin

if [[ -n $VERBOSE ]]; then echo Verbose On; fi

# add support for multiple wikis -- mutante

declare -a wiki_names

wiki_names="oddwiki dikiwiki kabowiki eartwiki obmwiki"

for wiki_name in $wiki_names
do
echo "backing up $wiki_name..."

# source code location for this file
SOURCE=/home/mattis/bin/maintenance

# where should the archive file end up
ARCHIVE=/var/www/vhosts/${wiki_name}.org/httpdocs

# CGI script directory for faster maintenance
CGI_BIN=/var/www/vhosts/${wiki_name}.org/cgi-bin

# Data dir
DATA_DIR=/var/www/vhosts/${wiki_name}.org/data

# URL of the wiki.

if [ $wiki_name == "oddwiki" ]
then
SCRIPT_NAME=http://www.${wiki_name}.org/odd
else
SCRIPT_NAME=http://www.${wiki_name}.org/hive
fi
# Extract the administrator password from the config file.  Should
# look as follows: $AdminPass="foo";

PW=`sed -n 's/\$AdminPass="\(.*\)";/\1/p' < $DATA_DIR/config`

# Find all the namespaces in the "hive".

NS=`find $DATA_DIR/ -maxdepth 1 -type d -name '[A-Z]*' | sort`

# Maintenance action for all namespaces.  This code used to sleep
# between calls to give other processes a chance to run. We're running
# perl nice, now, however.

cd $CGI_BIN
if [[ -z $NOMAINT ]]; then
    if [[ -n $VERBOSE ]]; then echo Starting Maintenance; fi
    MAINT=$DATA_DIR/maintenance
    mkdir -p $MAINT
    for f in $NS; do
	f=`basename $f`
	if [[ -n $VERBOSE ]]; then echo $f; fi
	nice perl odd.pl action=maintain ns=$f | tail -n +7 > "$MAINT/$f.html"
    done

# Run maintenance on the main namespace (same as above without
# namespace).  This will make sure that pages marked for deletion that
# are ready to be deleted are in fact deleted.  We'll delete the wikis
# without pages down below.

    if [[ -n $VERBOSE ]]; then echo Main; fi
    nice perl odd.pl action=maintain | tail -n +7 > "$MAINT/Main.html"
    if [[ -n $VERBOSE ]]; then echo Done; fi
fi
cd

# Copy the default homepage.

if [[ -n $VERBOSE ]]; then echo Installing DefaultHomePage; fi
wget -O $DATA_DIR/README \
    -q "$SCRIPT_NAME/raw/DefaultHomePage"
for hp in `find $DATA_DIR/page/ -name 'DefaultHomePage??.pg'`; do
    page=`basename $hp .pg`
    lang=`echo $page | sed -n 's/.*\(..\)$/\1/p' | tr '[A-Z]' '[a-z]'`
    wget -O $DATA_DIR/README.$lang \
	-q "$SCRIPT_NAME/raw/$page"
done
# overwrite the redirect in the english copy
cp -f $DATA_DIR/README $DATA_DIR/README.en

# Status -- find empty wikis by counting the number of page files.
# When counting, ignore symlinks because BannedContent.pg is a symlink
# (see above).  Also post the data collected on the Status page.  Make
# sure that nothing in the rest of this script visits the wikis lest
# their data directories be recreated.

if [[ -n $VERBOSE ]]; then echo Computing status and deleting empty wikis; fi
STATUS='Current Status:\n'
for f in $NS; do
    PG=`(test -d $f/page && find $f/page -name '*.pg' -type f) | wc -l`
    NM=`basename $f`
    if [[ $PG == '0' ]]; then
	if rm -rf  "$f"; then
	    true;
	else
	    STATUS="$STATUS\n* $NM had 0 pages but deletion failed"
	fi
    else
        LAST_FILE=`ls -1 -t $f/page/*/*.pg|head -1`
	MOD=`date -r $LAST_FILE --utc '+%Y-%m-%d %H:%M'`
	STATUS="$STATUS\n* $NM:HomePage has $PG pages, last modified $MOD"
	AGE=`date '+%s' -r $LAST_FILE`
	NOW=`date '+%s'`
	DAYS=$(( ($NOW-${AGE:-0}) / 60 / 60 / 24 ))
	if [ $DAYS -gt 366 ]; then
	    STATUS="$STATUS, this was more than a year ago"
	else
	    if [ $DAYS -gt 183 ]; then
		STATUS="$STATUS, this was more than half a year ago"
	    fi
	fi
    fi
done

# Translate \n to newlines and post status
if [[ -n $VERBOSE ]]; then echo Posting status; fi
STATUS=`echo -e $STATUS` 
curl -F "summary=Page count for all namespaces" \
    -F "title=Status" \
    -F "text=$STATUS" \
    -F "username=CronJob" \
    -F "pwd=$PW" \
    $SCRIPT_NAME

# Documenting what we do
if [[ -n $VERBOSE ]]; then echo Installing script and config files; fi

# Upload the maintenance script as a file.
curl -F "summary=Script update" \
    -F "title=maintenance" \
    -F "text=<$SOURCE" \
    -F "username=CronJob" \
    -F "pwd=$PW" \
    $SCRIPT_NAME

# Upload config but without the password.
CONFIG=`sed -e "s/$PW/*secret*/" < $DATA_DIR/config`;
curl -F "summary=Config update" \
    -F "title=config" \
    -F "text=$CONFIG" \
    -F "username=CronJob" \
    -F "pwd=$PW" \
    $SCRIPT_NAME

# Write archive in the home directory

if [[ -n $VERBOSE ]]; then echo Creating new archive; fi
cd $DATA_DIR/..
DIR=`basename $DATA_DIR`
DATE=`date "+%Y-%V"`
tar czf $ARCHIVE/${wiki_name}-$DATE.tar.gz $DIR
# add a link to the current .tar.gz - mutante
ln -s $ARCHIVE/${wiki_name}=-$DATE.tar.gz $ARCHIVE/${wiki_name}.tar.gz

done

The same page on other sites:
DikiWiki:maintenanceEArtWiki:maintenanceKaboWiki:maintenanceObmWiki:maintenanceOddWiki:maintenance