diff options
author | Andreas Gohr <andi@splitbrain.org> | 2006-06-15 20:48:47 +0200 |
---|---|---|
committer | Andreas Gohr <andi@splitbrain.org> | 2006-06-15 20:48:47 +0200 |
commit | 847b82981c3abfa3a352c66c3922c3c2cb89f023 (patch) | |
tree | 62c8fa4264dae86810a176a2932a22edaa413115 | |
parent | a6ef4796e22156364843a3b42bdd8f2dc78c0db5 (diff) | |
download | rpg-847b82981c3abfa3a352c66c3922c3c2cb89f023.tar.gz rpg-847b82981c3abfa3a352c66c3922c3c2cb89f023.tar.bz2 |
$conf['fetchsize'] added
This patch adds an option to configure the maximum size for files the fetch.php
will ever download. Setting this to 0 completely turns of the caching of external
media files.
Disadvantages of setting a low or zero fetchsize:
* fetch.php needs to download images to be able to resize them. When the used
fetchsize prevents the downloading the images can only be resized by the
browser which means the browser will need to download the fullsized image first.
* If the linked external media files vanishes it will no longer display in the
wiki because it is not cached.
Advantages of setting a low or zero fetchsize:
* fetch.php may be used for a possible denial of service attack by requesting
many big external files.
* The created cache files may take a lot of space on the server
I recommend to leave the setting at 2MB for internal and private wikis and lower
the setting to about 200 to 500 Kb for bigger public Wikis.
Note: the caching of files uploaded through the media manager is not affected by
this setting.
darcs-hash:20060615184847-7ad00-04fc39928f7d72e56f5c5e271013ef265436e6c9.gz
-rw-r--r-- | conf/dokuwiki.php | 1 | ||||
-rw-r--r-- | inc/HTTPClient.php | 10 | ||||
-rw-r--r-- | inc/io.php | 4 | ||||
-rw-r--r-- | lib/exe/fetch.php | 7 |
4 files changed, 16 insertions, 6 deletions
diff --git a/conf/dokuwiki.php b/conf/dokuwiki.php index 5d8616903..e92171322 100644 --- a/conf/dokuwiki.php +++ b/conf/dokuwiki.php @@ -74,6 +74,7 @@ $conf['usegzip'] = 1; //gzip old revisions? $conf['cachetime'] = 60*60*24; //maximum age for cachefile in seconds (defaults to a day) $conf['purgeonadd'] = 1; //purge cache when a new file is added (needed for up to date links) $conf['locktime'] = 15*60; //maximum age for lockfiles (defaults to 15 minutes) +$conf['fetchsize'] = 2*1024*1024; //maximum size (bytes) fetch.php may download from extern $conf['notify'] = ''; //send change info to this email (leave blank for nobody) $conf['mailfrom'] = ''; //use this email when sending mails $conf['gzip_output'] = 0; //use gzip content encodeing for the output xhtml (if allowed by browser) diff --git a/inc/HTTPClient.php b/inc/HTTPClient.php index 69a384487..1612f1211 100644 --- a/inc/HTTPClient.php +++ b/inc/HTTPClient.php @@ -227,9 +227,17 @@ class HTTPClient { $r_headers .= fread($socket,1); #FIXME read full lines here? }while(!preg_match('/\r\n\r\n$/',$r_headers)); + // check if expected body size exceeds allowance + if($this->max_bodysize && preg_match('/\r\nContent-Length:\s*(\d+)\r\n/i',$r_header,$match)){ + if($match[1] > $this->max_bodysize){ + $this->error = 'Reported content length exceeds allowed response size'; + return false; + } + } + //read body (with chunked encoding if needed) $r_body = ''; - if(preg_match('/transfer\-(en)?coding:\s+chunked\r\n/i',$r_header)){ + if(preg_match('/transfer\-(en)?coding:\s*chunked\r\n/i',$r_header)){ do { unset($chunk_size); do { diff --git a/inc/io.php b/inc/io.php index ea20502bb..aed27d88e 100644 --- a/inc/io.php +++ b/inc/io.php @@ -292,10 +292,10 @@ function io_mkdir_ftp($dir){ * @author Andreas Gohr <andi@splitbrain.org> * @author Chris Smith <chris@jalakai.co.uk> */ -function io_download($url,$file,$useAttachment=false,$defaultName=''){ +function io_download($url,$file,$useAttachment=false,$defaultName='',$maxSize=2097152){ global $conf; $http = new DokuHTTPClient(); - $http->max_bodysize = 2*1024*1024; //max. 2MB + $http->max_bodysize = $maxSize; $http->timeout = 25; //max. 25 sec $data = $http->get($url); diff --git a/lib/exe/fetch.php b/lib/exe/fetch.php index f1cf9c7b4..e45c27e67 100644 --- a/lib/exe/fetch.php +++ b/lib/exe/fetch.php @@ -224,8 +224,9 @@ function calc_cache($cache){ function get_from_URL($url,$ext,$cache){ global $conf; - // if 'nocache' just redirect - if ($cache==0) { return false; } + // if no cache or fetchsize just redirect + if ($cache==0) return false; + if (!$conf['fetchsize']) return false; $local = getCacheName(strtolower($url),".media.$ext"); $mtime = @filemtime($local); // 0 if not exists @@ -234,7 +235,7 @@ function get_from_URL($url,$ext,$cache){ if( ($mtime == 0) || // cache does not exist ($cache != -1 && $mtime < time()-$cache) // 'recache' and cache has expired ){ - if(io_download($url,$local)){ + if(io_download($url,$local,false,'',$conf['fetchsize'])){ return $local; }else{ return false; |