From f5c2808ff13d9b9dce9a6e2f2ea3989ef686c15c Mon Sep 17 00:00:00 2001 From: Ben Coburn Date: Wed, 5 Apr 2006 04:56:21 +0200 Subject: rewrite export URLs This patch rewrites export urls so that robots.txt can be used to request that some (or all) export types are ignored by robots when indexing the wiki. For example: User-agent: * Disallow: _export/ or for example: User-agent: * Disallow: _export/raw/ Note: This rewriting is only done when $conf['userewrite'] is set to '1' for using the rewrite rules from '.htaccess.dist'. darcs-hash:20060405025621-05dcb-b1b5f48681f78d75d25b1e75fab79346fcc8b84e.gz --- inc/common.php | 37 +++++++++++++++++++++++++++++++++++++ 1 file changed, 37 insertions(+) (limited to 'inc/common.php') diff --git a/inc/common.php b/inc/common.php index 8af63002c..ca05bd3e7 100644 --- a/inc/common.php +++ b/inc/common.php @@ -314,6 +314,43 @@ function wl($id='',$more='',$abs=false,$sep='&'){ return $xlink; } +/** + * This builds a link to an alternate page format + * + * Handles URL rewriting if enabled. Follows the style of wl(). + * + * @author Ben Coburn + */ +function exportlink($id='',$format='raw',$more='',$abs=false,$sep='&'){ + global $conf; + if(is_array($more)){ + $more = buildURLparams($more,$sep); + }else{ + $more = str_replace(',',$sep,$more); + } + + $format = rawurlencode($format); + $id = idfilter($id); + if($abs){ + $xlink = DOKU_URL; + }else{ + $xlink = DOKU_BASE; + } + + if($conf['userewrite'] == 2){ + $xlink .= DOKU_SCRIPT.'/'.$id.'?do=export_'.$format; + if($more) $xlink .= $sep.$more; + }elseif($conf['userewrite'] == 1){ + $xlink .= '_export/'.$format.'/'.$id; + if($more) $xlink .= '?'.$more; + }else{ + $xlink .= DOKU_SCRIPT.'?do=export_'.$format.$sep.'id='.$id; + if($more) $xlink .= $sep.$more; + } + + return $xlink; +} + /** * Build a link to a media file * -- cgit v1.2.3