summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--conf/dokuwiki.php1
-rw-r--r--inc/HTTPClient.php10
-rw-r--r--inc/io.php4
-rw-r--r--lib/exe/fetch.php7
4 files changed, 16 insertions, 6 deletions
diff --git a/conf/dokuwiki.php b/conf/dokuwiki.php
index 5d8616903..e92171322 100644
--- a/conf/dokuwiki.php
+++ b/conf/dokuwiki.php
@@ -74,6 +74,7 @@ $conf['usegzip'] = 1; //gzip old revisions?
$conf['cachetime'] = 60*60*24; //maximum age for cachefile in seconds (defaults to a day)
$conf['purgeonadd'] = 1; //purge cache when a new file is added (needed for up to date links)
$conf['locktime'] = 15*60; //maximum age for lockfiles (defaults to 15 minutes)
+$conf['fetchsize'] = 2*1024*1024; //maximum size (bytes) fetch.php may download from extern
$conf['notify'] = ''; //send change info to this email (leave blank for nobody)
$conf['mailfrom'] = ''; //use this email when sending mails
$conf['gzip_output'] = 0; //use gzip content encodeing for the output xhtml (if allowed by browser)
diff --git a/inc/HTTPClient.php b/inc/HTTPClient.php
index 69a384487..1612f1211 100644
--- a/inc/HTTPClient.php
+++ b/inc/HTTPClient.php
@@ -227,9 +227,17 @@ class HTTPClient {
$r_headers .= fread($socket,1); #FIXME read full lines here?
}while(!preg_match('/\r\n\r\n$/',$r_headers));
+ // check if expected body size exceeds allowance
+ if($this->max_bodysize && preg_match('/\r\nContent-Length:\s*(\d+)\r\n/i',$r_header,$match)){
+ if($match[1] > $this->max_bodysize){
+ $this->error = 'Reported content length exceeds allowed response size';
+ return false;
+ }
+ }
+
//read body (with chunked encoding if needed)
$r_body = '';
- if(preg_match('/transfer\-(en)?coding:\s+chunked\r\n/i',$r_header)){
+ if(preg_match('/transfer\-(en)?coding:\s*chunked\r\n/i',$r_header)){
do {
unset($chunk_size);
do {
diff --git a/inc/io.php b/inc/io.php
index ea20502bb..aed27d88e 100644
--- a/inc/io.php
+++ b/inc/io.php
@@ -292,10 +292,10 @@ function io_mkdir_ftp($dir){
* @author Andreas Gohr <andi@splitbrain.org>
* @author Chris Smith <chris@jalakai.co.uk>
*/
-function io_download($url,$file,$useAttachment=false,$defaultName=''){
+function io_download($url,$file,$useAttachment=false,$defaultName='',$maxSize=2097152){
global $conf;
$http = new DokuHTTPClient();
- $http->max_bodysize = 2*1024*1024; //max. 2MB
+ $http->max_bodysize = $maxSize;
$http->timeout = 25; //max. 25 sec
$data = $http->get($url);
diff --git a/lib/exe/fetch.php b/lib/exe/fetch.php
index f1cf9c7b4..e45c27e67 100644
--- a/lib/exe/fetch.php
+++ b/lib/exe/fetch.php
@@ -224,8 +224,9 @@ function calc_cache($cache){
function get_from_URL($url,$ext,$cache){
global $conf;
- // if 'nocache' just redirect
- if ($cache==0) { return false; }
+ // if no cache or fetchsize just redirect
+ if ($cache==0) return false;
+ if (!$conf['fetchsize']) return false;
$local = getCacheName(strtolower($url),".media.$ext");
$mtime = @filemtime($local); // 0 if not exists
@@ -234,7 +235,7 @@ function get_from_URL($url,$ext,$cache){
if( ($mtime == 0) || // cache does not exist
($cache != -1 && $mtime < time()-$cache) // 'recache' and cache has expired
){
- if(io_download($url,$local)){
+ if(io_download($url,$local,false,'',$conf['fetchsize'])){
return $local;
}else{
return false;