summaryrefslogtreecommitdiff
path: root/bin/wantedpages.php
diff options
context:
space:
mode:
authorHarry Fuecks <hfuecks@gmail.com>2005-08-07 22:13:09 +0200
committerHarry Fuecks <hfuecks@gmail.com>2005-08-07 22:13:09 +0200
commit1caeb00a5a0b9894a582514ef385b71cab195092 (patch)
tree8f35f27caf669c24e7b04fb3ed7f4265c288dce6 /bin/wantedpages.php
parent836cbdc47d7360b9c9ed544c11d7f0720439bf44 (diff)
downloadrpg-1caeb00a5a0b9894a582514ef385b71cab195092.tar.gz
rpg-1caeb00a5a0b9894a582514ef385b71cab195092.tar.bz2
command line utilities for DokuWiki
This patch adds a command line tool which should help people modify wiki pages on the command line. Usage would be something like; $ ./dokuwiki/bin/dwpage.php checkout wiki:syntax > /home/harryf/syntax.txt $ vi syntax.txt $ ./dokuwiki/bin/dwpage.php -m "Document new syntax" commit syntax.txt wiki:syntax The second script outputs a list of wiki ids, seperated by newlines. The idea would be it get's run by a cron job at regular intervals and has it's results piped to a text file. darcs-hash:20050807201309-56ad0-7c2dbb2b14f794efad1b9875f4dd0c6a15f6df3f.gz
Diffstat (limited to 'bin/wantedpages.php')
-rwxr-xr-xbin/wantedpages.php129
1 files changed, 129 insertions, 0 deletions
diff --git a/bin/wantedpages.php b/bin/wantedpages.php
new file mode 100755
index 000000000..caa88dc78
--- /dev/null
+++ b/bin/wantedpages.php
@@ -0,0 +1,129 @@
+#!/usr/bin/php -d short_open_tag=on
+<?php
+#------------------------------------------------------------------------------
+if(!defined('DOKU_INC')) define('DOKU_INC',realpath(dirname(__FILE__).'/../').'/');
+require_once DOKU_INC.'inc/init.php';
+require_once DOKU_INC.'inc/common.php';
+require_once DOKU_INC.'inc/search.php';
+require_once DOKU_INC.'inc/cliopts.php';
+
+#------------------------------------------------------------------------------
+function usage() {
+ print "Usage: wantedpages.php [wiki:namespace]
+
+ Outputs a list of wanted pages (pages which have
+ internal links but do not yet exist).
+
+ If the optional [wiki:namespace] is not provided,
+ defaults to the root wiki namespace
+
+ OPTIONS
+ -h, --help=<action>: get help
+";
+}
+
+#------------------------------------------------------------------------------
+define ('DW_DIR_CONTINUE',1);
+define ('DW_DIR_NS',2);
+define ('DW_DIR_PAGE',3);
+
+#------------------------------------------------------------------------------
+function dw_dir_filter($entry, $basepath) {
+ if ($entry == '.' || $entry == '..' ) {
+ return DW_DIR_CONTINUE;
+ }
+ if ( is_dir($basepath . '/' . $entry) ) {
+ if ( strpos($entry, '_') === 0 ) {
+ return DW_DIR_CONTINUE;
+ }
+ return DW_DIR_NS;
+ }
+ if ( preg_match('/\.txt$/',$entry) ) {
+ return DW_DIR_PAGE;
+ }
+ return DW_DIR_CONTINUE;
+}
+
+#------------------------------------------------------------------------------
+function dw_get_pages($dir) {
+ static $trunclen = NULL;
+ if ( !$trunclen ) {
+ global $conf;
+ $trunclen = strlen($conf['datadir'].':');
+ }
+
+ if ( !is_dir($dir) ) {
+ fwrite( STDERR, "Unable to read directory $dir\n");
+ exit(1);
+ }
+
+ $pages = array();
+ $dh = opendir($dir);
+ while ( FALSE !== ( $entry = readdir($dh) ) ) {
+ $status = dw_dir_filter($entry, $dir);
+ if ( $status == DW_DIR_CONTINUE ) {
+ continue;
+ } else if ( $status == DW_DIR_NS ) {
+ $pages = array_merge($pages, dw_get_pages($dir . '/' . $entry));
+ } else {
+ $page = array(
+ 'id'=>substr(pathID($dir . '/' . $entry),$trunclen),
+ 'file'=>$dir . '/' . $entry,
+ );
+ $pages[] = $page;
+ }
+ }
+ closedir($dh);
+ return $pages;
+}
+
+#------------------------------------------------------------------------------
+function dw_internal_links($page) {
+ global $conf;
+ $instructions = p_get_instructions(file_get_contents($page['file']));
+ $links = array();
+ $cns = getNS($page['id']);
+ $exists = FALSE;
+ foreach($instructions as $ins){
+ if($ins[0] == 'internallink' || ($conf['camelcase'] && $ins[0] == 'camelcaselink') ){
+ $mid = $ins[1][0];
+ resolve_pageid($cns,$mid,$exists);
+ if ( !$exists ) {
+ $links[] = $mid;
+ }
+ }
+ }
+ return $links;
+}
+
+#------------------------------------------------------------------------------
+$OPTS = Doku_Cli_Opts::getOptions(__FILE__,'h',array('help'));
+
+if ( $OPTS->isError() ) {
+ fwrite( STDERR, $OPTS->getMessage() . "\n");
+ exit(1);
+}
+
+if ( $OPTS->has('h') or $OPTS->has('help') ) {
+ usage();
+ exit(0);
+}
+
+$START_DIR = $conf['datadir'];
+
+if ( $OPTS->numArgs() == 1 ) {
+ $START_DIR .= '/' . $OPTS->arg(0);
+}
+
+#------------------------------------------------------------------------------
+$WANTED_PAGES = array();
+foreach ( dw_get_pages($START_DIR) as $WIKI_PAGE ) {
+ $WANTED_PAGES = array_merge($WANTED_PAGES,dw_internal_links($WIKI_PAGE));
+}
+$WANTED_PAGES = array_unique($WANTED_PAGES);
+sort($WANTED_PAGES);
+
+foreach ( $WANTED_PAGES as $WANTED_PAGE ) {
+ print $WANTED_PAGE."\n";
+}
+exit(0);