From 1c07b9e622d139fa815c955c89569f96342475fb Mon Sep 17 00:00:00 2001 From: Tom N Harris Date: Tue, 16 Nov 2010 18:09:53 -0500 Subject: Use external program to split pages into words An external tokenizer inserts extra spaces to mark words in the input text. The text is sent through STDIN and STDOUT file handles. A good choice for Chinese and Japanese is MeCab. http://sourceforge.net/projects/mecab/ With the command line 'mecab -O wakati' --- conf/dokuwiki.php | 2 ++ 1 file changed, 2 insertions(+) (limited to 'conf') diff --git a/conf/dokuwiki.php b/conf/dokuwiki.php index 2405494e0..f10c70e58 100644 --- a/conf/dokuwiki.php +++ b/conf/dokuwiki.php @@ -133,6 +133,8 @@ $conf['broken_iua'] = 0; //Platform with broken ignore_user_abor $conf['xsendfile'] = 0; //Use X-Sendfile (1 = lighttpd, 2 = standard) $conf['renderer_xhtml'] = 'xhtml'; //renderer to use for main page generation $conf['rememberme'] = 1; //Enable/disable remember me on login +$conf['external_tokenizer'] = 0; //Use an external program to split pages into words for indexing +$conf['tokenizer_cmd'] = '/usr/bin/mecab -O wakati'; //Set target to use when creating links - leave empty for same window $conf['target']['wiki'] = ''; -- cgit v1.2.3