From 1caeb00a5a0b9894a582514ef385b71cab195092 Mon Sep 17 00:00:00 2001 From: Harry Fuecks Date: Sun, 7 Aug 2005 22:13:09 +0200 Subject: command line utilities for DokuWiki This patch adds a command line tool which should help people modify wiki pages on the command line. Usage would be something like; $ ./dokuwiki/bin/dwpage.php checkout wiki:syntax > /home/harryf/syntax.txt $ vi syntax.txt $ ./dokuwiki/bin/dwpage.php -m "Document new syntax" commit syntax.txt wiki:syntax The second script outputs a list of wiki ids, seperated by newlines. The idea would be it get's run by a cron job at regular intervals and has it's results piped to a text file. darcs-hash:20050807201309-56ad0-7c2dbb2b14f794efad1b9875f4dd0c6a15f6df3f.gz --- bin/wantedpages.php | 129 ++++++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 129 insertions(+) create mode 100755 bin/wantedpages.php (limited to 'bin/wantedpages.php') diff --git a/bin/wantedpages.php b/bin/wantedpages.php new file mode 100755 index 000000000..caa88dc78 --- /dev/null +++ b/bin/wantedpages.php @@ -0,0 +1,129 @@ +#!/usr/bin/php -d short_open_tag=on +: get help +"; +} + +#------------------------------------------------------------------------------ +define ('DW_DIR_CONTINUE',1); +define ('DW_DIR_NS',2); +define ('DW_DIR_PAGE',3); + +#------------------------------------------------------------------------------ +function dw_dir_filter($entry, $basepath) { + if ($entry == '.' || $entry == '..' ) { + return DW_DIR_CONTINUE; + } + if ( is_dir($basepath . '/' . $entry) ) { + if ( strpos($entry, '_') === 0 ) { + return DW_DIR_CONTINUE; + } + return DW_DIR_NS; + } + if ( preg_match('/\.txt$/',$entry) ) { + return DW_DIR_PAGE; + } + return DW_DIR_CONTINUE; +} + +#------------------------------------------------------------------------------ +function dw_get_pages($dir) { + static $trunclen = NULL; + if ( !$trunclen ) { + global $conf; + $trunclen = strlen($conf['datadir'].':'); + } + + if ( !is_dir($dir) ) { + fwrite( STDERR, "Unable to read directory $dir\n"); + exit(1); + } + + $pages = array(); + $dh = opendir($dir); + while ( FALSE !== ( $entry = readdir($dh) ) ) { + $status = dw_dir_filter($entry, $dir); + if ( $status == DW_DIR_CONTINUE ) { + continue; + } else if ( $status == DW_DIR_NS ) { + $pages = array_merge($pages, dw_get_pages($dir . '/' . $entry)); + } else { + $page = array( + 'id'=>substr(pathID($dir . '/' . $entry),$trunclen), + 'file'=>$dir . '/' . $entry, + ); + $pages[] = $page; + } + } + closedir($dh); + return $pages; +} + +#------------------------------------------------------------------------------ +function dw_internal_links($page) { + global $conf; + $instructions = p_get_instructions(file_get_contents($page['file'])); + $links = array(); + $cns = getNS($page['id']); + $exists = FALSE; + foreach($instructions as $ins){ + if($ins[0] == 'internallink' || ($conf['camelcase'] && $ins[0] == 'camelcaselink') ){ + $mid = $ins[1][0]; + resolve_pageid($cns,$mid,$exists); + if ( !$exists ) { + $links[] = $mid; + } + } + } + return $links; +} + +#------------------------------------------------------------------------------ +$OPTS = Doku_Cli_Opts::getOptions(__FILE__,'h',array('help')); + +if ( $OPTS->isError() ) { + fwrite( STDERR, $OPTS->getMessage() . "\n"); + exit(1); +} + +if ( $OPTS->has('h') or $OPTS->has('help') ) { + usage(); + exit(0); +} + +$START_DIR = $conf['datadir']; + +if ( $OPTS->numArgs() == 1 ) { + $START_DIR .= '/' . $OPTS->arg(0); +} + +#------------------------------------------------------------------------------ +$WANTED_PAGES = array(); +foreach ( dw_get_pages($START_DIR) as $WIKI_PAGE ) { + $WANTED_PAGES = array_merge($WANTED_PAGES,dw_internal_links($WIKI_PAGE)); +} +$WANTED_PAGES = array_unique($WANTED_PAGES); +sort($WANTED_PAGES); + +foreach ( $WANTED_PAGES as $WANTED_PAGE ) { + print $WANTED_PAGE."\n"; +} +exit(0); -- cgit v1.2.3