r22616 MediaWiki - Code Review archive

Repository:MediaWiki
Revision:r22615‎ | r22616 | r22617 >
Date:01:35, 1 June 2007
Author:tstarling
Status:old
Tags:
Comment:
* Hack to work around regex that breaks in PHP 5.2
* Fixed broken uri_exists()
* Fixed typo in expand_template()
* Disabled save_url()
* Disabled template expansion by default
* Use 81px logo by default
* Fix usage of DIRECTORY_SEPARATOR in parser
* Applied automatic indenting to a couple of files
Modified paths:
  • /trunk/wap/config.php (modified) (history)
  • /trunk/wap/hawiki/hawiki_cfg.inc (modified) (history)
  • /trunk/wap/hawiki/hawiki_parser_hawpedia.inc (modified) (history)
  • /trunk/wap/hawpedia.php (modified) (history)

Diff [purge]

Index: trunk/wap/hawpedia.php
@@ -12,136 +12,136 @@
1313 {
1414 $sessionDeck = new HAW_deck();
1515 $sessionDeck->enable_session();
16 - ini_set('session.use_cookies', 0); // do not use cookies
17 - ini_set('session.use_trans_sid', 1); // use transient sid support
 16+ ini_set('session.use_cookies', 0); // do not use cookies
 17+ ini_set('session.use_trans_sid', 1); // use transient sid support
1818
19 - if ($sessionDeck->ml == HAW_HTML) {
20 - // remove form entry - see http://bugs.php.net/bug.php?id=13472
21 - ini_set('url_rewriter.tags', 'a=href');
22 - }
 19+ if ($sessionDeck->ml == HAW_HTML) {
 20+ // remove form entry - see http://bugs.php.net/bug.php?id=13472
 21+ ini_set('url_rewriter.tags', 'a=href');
 22+ }
2323
24 - session_start();
25 -
26 - determine_settings();
 24+ session_start();
 25+
 26+ determine_settings();
2727 }
2828
2929 function set_deck_properties(&$deck)
3030 {
3131 $deck->set_charset("UTF-8");
32 - $deck->set_width(HAWIKI_DISP_WIDTH);
33 - $deck->set_height(HAWIKI_DISP_HEIGHT);
34 - $deck->set_disp_bgcolor(HAWIKI_DISP_BGCOLOR);
35 - $deck->use_simulator(HAWIKI_SKIN);
 32+ $deck->set_width(HAWIKI_DISP_WIDTH);
 33+ $deck->set_height(HAWIKI_DISP_HEIGHT);
 34+ $deck->set_disp_bgcolor(HAWIKI_DISP_BGCOLOR);
 35+ $deck->use_simulator(HAWIKI_SKIN);
3636 }
3737
3838 function hawpedia_error($error_msg)
3939 {
40 - $error_deck = new HAW_deck(HAWIKI_TITLE);
41 - set_deck_properties($error_deck);
42 - $error_text = new HAW_text($error_msg);
43 - $error_deck->add_text($error_text);
44 -
45 - $rule = new HAW_rule();
46 - $error_deck->add_rule($rule);
47 -
48 - $homelink = new HAW_link(hawtra("Home"), "index.php");
49 - $error_deck->add_link($homelink);
50 -
51 - $error_deck->create_page();
52 - exit();
 40+ $error_deck = new HAW_deck(HAWIKI_TITLE);
 41+ set_deck_properties($error_deck);
 42+ $error_text = new HAW_text($error_msg);
 43+ $error_deck->add_text($error_text);
 44+
 45+ $rule = new HAW_rule();
 46+ $error_deck->add_rule($rule);
 47+
 48+ $homelink = new HAW_link(hawtra("Home"), "index.php");
 49+ $error_deck->add_link($homelink);
 50+
 51+ $error_deck->create_page();
 52+ exit();
5353 }
5454
5555 function determine_settings()
5656 {
5757 global $supportedLanguages;
5858
59 - if (isset($_GET['lang']) &&
60 - isset($supportedLanguages[$_GET['lang']]) &&
61 - ($supportedLanguages[$_GET['lang']] == 1)) {
62 - // language explicitely requested in url parameter
63 - $_SESSION['language'] = $_GET['lang']; // overwrite session info
64 - }
65 - else if (!isset($_SESSION['language']))
66 - {
67 - // no language info stored in session
68 - if(isset($_SERVER['HTTP_ACCEPT_LANGUAGE']) &&
69 - isset($supportedLanguages[$_SERVER['HTTP_ACCEPT_LANGUAGE']]) &&
70 - ($supportedLanguages[$_SERVER['HTTP_ACCEPT_LANGUAGE']] == 1) &&
71 - (!defined('FORCE_DEFAULT_LANGUAGE') || !FORCE_DEFAULT_LANGUAGE))
 59+ if (isset($_GET['lang']) &&
 60+ isset($supportedLanguages[$_GET['lang']]) &&
 61+ ($supportedLanguages[$_GET['lang']] == 1)) {
 62+ // language explicitely requested in url parameter
 63+ $_SESSION['language'] = $_GET['lang']; // overwrite session info
 64+ }
 65+ else if (!isset($_SESSION['language']))
7266 {
73 - // store browser's preference in session
74 - $_SESSION['language'] = $_SERVER['HTTP_ACCEPT_LANGUAGE'];
 67+ // no language info stored in session
 68+ if(isset($_SERVER['HTTP_ACCEPT_LANGUAGE']) &&
 69+ isset($supportedLanguages[$_SERVER['HTTP_ACCEPT_LANGUAGE']]) &&
 70+ ($supportedLanguages[$_SERVER['HTTP_ACCEPT_LANGUAGE']] == 1) &&
 71+ (!defined('FORCE_DEFAULT_LANGUAGE') || !FORCE_DEFAULT_LANGUAGE))
 72+ {
 73+ // store browser's preference in session
 74+ $_SESSION['language'] = $_SERVER['HTTP_ACCEPT_LANGUAGE'];
 75+ }
 76+ elseif(isset($_SERVER['HTTP_HOST']) &&
 77+ ($dot = strpos($_SERVER['HTTP_HOST'], '.')) &&
 78+ ($domlang = substr($_SERVER['HTTP_HOST'], 0, $dot)) &&
 79+ isset($supportedLanguages[$domlang]) &&
 80+ ($supportedLanguages[$domlang] == 1) &&
 81+ (defined('FORCE_DEFAULT_LANGUAGE') && ('subdomain'==FORCE_DEFAULT_LANGUAGE)))
 82+ {
 83+ // store language subdomain in session
 84+ $_SESSION['language'] = $domlang;
 85+ }
 86+ else
 87+ {
 88+ // store default language in session
 89+ $_SESSION['language'] = DEFAULT_LANGUAGE;
 90+ }
7591 }
76 - elseif(isset($_SERVER['HTTP_HOST']) &&
77 - ($dot = strpos($_SERVER['HTTP_HOST'], '.')) &&
78 - ($domlang = substr($_SERVER['HTTP_HOST'], 0, $dot)) &&
79 - isset($supportedLanguages[$domlang]) &&
80 - ($supportedLanguages[$domlang] == 1) &&
81 - (defined('FORCE_DEFAULT_LANGUAGE') && ('subdomain'==FORCE_DEFAULT_LANGUAGE)))
82 - {
83 - // store language subdomain in session
84 - $_SESSION['language'] = $domlang;
 92+
 93+ require('lang/' . $_SESSION['language'] . '/phonenumbers.php');
 94+
 95+ if (isset($_GET['tel']) &&
 96+ isset($phonenumbers[$_GET['tel']])) {
 97+ // phonenumber explicitely requested in url parameter
 98+ $_SESSION['tel'] = $_GET['tel']; // overwrite session info
 99+ }
 100+ else if (!isset($_SESSION['tel'])) {
 101+ // no telephone number info stored in session
 102+ if (count($phonenumbers) > 0) {
 103+ // store key of 1st entry in session
 104+ $_SESSION['tel'] = array_shift(array_keys($phonenumbers));
 105+ }
 106+ else {
 107+ // deactivate feature
 108+ unset($_SESSION['tel']);
 109+ }
85110 }
86 - else
87 - {
88 - // store default language in session
89 - $_SESSION['language'] = DEFAULT_LANGUAGE;
90 - }
91 - }
92 -
93 - require('lang/' . $_SESSION['language'] . '/phonenumbers.php');
94 -
95 - if (isset($_GET['tel']) &&
96 - isset($phonenumbers[$_GET['tel']])) {
97 - // phonenumber explicitely requested in url parameter
98 - $_SESSION['tel'] = $_GET['tel']; // overwrite session info
99 - }
100 - else if (!isset($_SESSION['tel'])) {
101 - // no telephone number info stored in session
102 - if (count($phonenumbers) > 0) {
103 - // store key of 1st entry in session
104 - $_SESSION['tel'] = array_shift(array_keys($phonenumbers));
105 - }
106 - else {
107 - // deactivate feature
108 - unset($_SESSION['tel']);
109 - }
110 - }
111111 }
112112
113113 function hawtra($text)
114114 {
115 - // translate given text
 115+ // translate given text
116116
117 - $translationFile = "lang/" . $_SESSION['language'] . "/translations.php";
118 -
119 - if (!file_exists($translationFile))
120 - return($text); // no translation possible
121 -
122 - require($translationFile);
123 -
124 - if (isset($translation[$text]))
125 - return $translation[$text];
126 - else
127 - return $text; // no translation available
 117+ $translationFile = "lang/" . $_SESSION['language'] . "/translations.php";
 118+
 119+ if (!file_exists($translationFile))
 120+ return($text); // no translation possible
 121+
 122+ require($translationFile);
 123+
 124+ if (isset($translation[$text]))
 125+ return $translation[$text];
 126+ else
 127+ return $text; // no translation available
128128 }
129129
130130 function translate_wikipedia_keyword($keyword) {
131131
132 - // translate language-specific wikipedia keyword
133 -
134 - if ($_SESSION['language'] == 'en')
135 - return $keyword; // no translation needed
 132+ // translate language-specific wikipedia keyword
136133
137 - $languageFile = "lang/" . $_SESSION['language'] . "/keywords.php";
138 - if (!file_exists($languageFile))
139 - die("file not found: " . $filename);
140 - require($languageFile);
 134+ if ($_SESSION['language'] == 'en')
 135+ return $keyword; // no translation needed
141136
 137+ $languageFile = "lang/" . $_SESSION['language'] . "/keywords.php";
 138+ if (!file_exists($languageFile))
 139+ die("file not found: " . $filename);
 140+ require($languageFile);
 141+
142142 if (!isset($keywords[$keyword]))
143 - die("unknown keyword: " . $keyword);
144 -
145 - return $keywords[$keyword];
 143+ die("unknown keyword: " . $keyword);
 144+
 145+ return $keywords[$keyword];
146146 }
147147
148148 function export_wikipedia($searchTerm)
@@ -149,9 +149,9 @@
150150 $result = array();
151151
152152 $export_keyword = translate_wikipedia_keyword('Special:Export');
153 -
 153+
154154 $searchTerm = str_replace(" ", "_", $searchTerm); // blanks must become underscores
155 -
 155+
156156 // get wikipedia xml file
157157 $ch = curl_init();
158158 $url = "http://" . $_SESSION['language'] . ".wikipedia.org/wiki/" . $export_keyword . "/" . $searchTerm;
@@ -160,19 +160,19 @@
161161 curl_setopt($ch, CURLOPT_RETURNTRANSFER, true);
162162 $curlResultString = curl_exec($ch);
163163 if (!is_string($curlResultString))
164 - hawpedia_error(hawtra("Wikipedia currently not available")); // exits internally
 164+ hawpedia_error(hawtra("Wikipedia currently not available")); // exits internally
165165
166166 curl_close($ch);
167167
168168 // determine page title
169169 if (!preg_match("%<title>(.*)</title>%", $curlResultString, $matches))
170 - return false; // search term not found
171 -
 170+ return false; // search term not found
 171+
172172 $result['title'] = $matches[1];
173 -
 173+
174174 // determine wiki text
175175 if (!preg_match("/(<text [^>]*>)/", $curlResultString, $matches))
176 - hawpedia_error(hawtra("wikipedia export error")); // exits internally
 176+ hawpedia_error(hawtra("wikipedia export error")); // exits internally
177177 $textStart = strpos($curlResultString, $matches[1]) + strlen($matches[1]);
178178 $textEnd = strpos($curlResultString, "</text>");
179179 $result['wikitext'] = substr($curlResultString, $textStart, $textEnd - $textStart);
@@ -182,33 +182,33 @@
183183
184184 function expand_template($Term, $Page)
185185 {
186 -# echo("\n".'<br />'.__LINE__.': expantemplate: Term='.$Term.', Page='.$Page.', ');
 186+ # echo("\n".'<br />'.__LINE__.': expantemplate: Term='.$Term.', Page='.$Page.', ');
187187 $result = ('');
188188
189189 $export_keyword = translate_wikipedia_keyword('Special:ExpandTemplates');
190 -
 190+
191191 // get wikipedia xml file
192192 $ch = curl_init();
193193 $url = ("http://" . $_SESSION['language'] . ".wikipedia.org/wiki/" . urlencode($export_keyword)
194 - . '?' . 'input='.urlencode($Term)
195 - . '&' . 'contexttitle='.urlencode($Page)
196 - . '&removecomments=1');
 194+ . '?' . 'input='.urlencode($Term)
 195+ . '&' . 'contexttitle='.urlencode($Page)
 196+ . '&removecomments=1');
197197 curl_setopt($ch, CURLOPT_URL, $url);
198 - curl_setopt($ch, CURLOPT_HEADER, FLASE);
 198+ curl_setopt($ch, CURLOPT_HEADER, FALSE);
199199 curl_setopt($ch, CURLOPT_RETURNTRANSFER, TRUE);
200200 $curlResultString = curl_exec($ch);
201201 if (!is_string($curlResultString))
202 - hawpedia_error(hawtra("Wikipedia Special:ExpandTemplates currently not available")); // exits internally
 202+ hawpedia_error(hawtra("Wikipedia Special:ExpandTemplates currently not available")); // exits internally
203203
204204 curl_close($ch);
205205
206206 // determine wiki text
207207 if (!preg_match('_(<textarea [^>]* readonly="readonly">)_', $curlResultString, $matches))
208 - hawpedia_error(hawtra("wikipedia export error")); // exits internally
 208+ hawpedia_error(hawtra("wikipedia export error")); // exits internally
209209 $textStart = strpos($curlResultString, $matches[1]) + strlen($matches[1]);
210210 $textEnd = strpos($curlResultString, "</textarea>", $textStart);
211211 $result = substr($curlResultString, $textStart, $textEnd - $textStart);
212 -# echo("\n".'<br />'.__LINE__.': expantemplate: result='.$result.',, ');
 212+ # echo("\n".'<br />'.__LINE__.': expantemplate: result='.$result.',, ');
213213 return $result;
214214 }
215215
@@ -238,46 +238,46 @@
239239 $level = 0; // nesting level
240240 foreach($pos as $index => $what)
241241 {
242 -# echo("\n".'<br />'.__LINE__.': index='.$index.', what='.$what.' level='.$level);
 242+ # echo("\n".'<br />'.__LINE__.': index='.$index.', what='.$what.' level='.$level);
243243 switch($what)
244244 {
245 - case -1 :
246 - if(0 == ($level++))
247 - {
248 - $start = $index;
249 - }
250 - break;
251 - case -2 :
252 - if(0 == (--$level))
253 - {
254 - $sec[$start] = $index;
255 - }
256 - elseif($level < 0)
257 - {
258 - $level = 0;
259 - }
260 - break;
261 - default :
262 - die('Internal Error in replace_sections');
 245+ case -1 :
 246+ if(0 == ($level++))
 247+ {
 248+ $start = $index;
 249+ }
 250+ break;
 251+ case -2 :
 252+ if(0 == (--$level))
 253+ {
 254+ $sec[$start] = $index;
 255+ }
 256+ elseif($level < 0)
 257+ {
 258+ $level = 0;
 259+ }
 260+ break;
 261+ default :
 262+ die('Internal Error in replace_sections');
263263 }
264 - }
 264+ }
265265 krsort($sec); //sort reverse so as to allow expansion to take place in the data itself.
266266 //replace sections - from rear to beginning which maintains start/end positions for undone ones.
267267 foreach($sec as $start => $end)
268268 {
269 -#echo("\n".'<br /><br />'.__LINE__.': start='.$start.', end='.$end.' wikitext='.$wikitext);
270 -#echo "\n".'<br /><br />'.__LINE__.': beginning part='.
 269+ #echo("\n".'<br /><br />'.__LINE__.': start='.$start.', end='.$end.' wikitext='.$wikitext);
 270+ #echo "\n".'<br /><br />'.__LINE__.': beginning part='.
271271 $begining_part = substr($wikitext, 0, $start);
272 -#echo "\n".'<br /><br />'.__LINE__.': template call='.
 272+ #echo "\n".'<br /><br />'.__LINE__.': template call='.
273273 $template_call = substr($wikitext, $start, $end-$start);
274 -#echo "\n".'<br /><br />'.__LINE__.': final part='.
 274+ #echo "\n".'<br /><br />'.__LINE__.': final part='.
275275 $final_part = substr($wikitext, $end);
276 -#echo "\n".'<br /><br />'.__LINE__.': replacement='.
 276+ #echo "\n".'<br /><br />'.__LINE__.': replacement='.
277277 $replacement = $replacementfunction($template_call, $title);
278278
279279 $wikitext = ($begining_part.$replacement.$final_part);
280280 }
281 -#echo "\n".'<br /><br />'.__LINE__.': wikitext='.$wikitext;
 281+ #echo "\n".'<br /><br />'.__LINE__.': wikitext='.$wikitext;
282282 return($wikitext);
283283 }
284284
@@ -291,21 +291,20 @@
292292 curl_setopt($ch, CURLOPT_NOBODY, TRUE);
293293 curl_setopt($ch, CURLOPT_FAILONERROR, TRUE);
294294 curl_setopt($ch, CURLOPT_RETURNTRANSFER, true);
295 - $curlResultString = curl_exec($ch);
 295+ curl_exec($ch);
 296+ $status = curl_getinfo( $ch, CURLINFO_HTTP_CODE );
296297 curl_close($ch);
297 - $result = ($curlResultString);
298 -# echo('<br />'.__LINE__.': uri='.$uri.' result='.($result?('TRUE: '.$curlResultString):'FALSE').'<br />');
299 - return $result;
 298+ return $status == 200;
300299 }
301300
302301 function search_articles($article)
303302 {
304303 // search related articles (after export has failed)
305 -
 304+
306305 $result = array();
307306
308307 $search_keyword = translate_wikipedia_keyword('Special:Search');
309 -
 308+
310309 $article = str_replace(" ", "_", $article); // blanks must become underscores
311310
312311 // get wikipedia search result (in html format)
@@ -316,76 +315,76 @@
317316 curl_setopt($ch, CURLOPT_RETURNTRANSFER, true);
318317 $curlResultString = curl_exec($ch);
319318 if (!is_string($curlResultString))
320 - hawpedia_error(hawtra("Wikipedia currently not available")); // exits internally
 319+ hawpedia_error(hawtra("Wikipedia currently not available")); // exits internally
321320
322321 curl_close($ch);
323322
324 - // extract article links from html
325 - preg_match_all("%1em;\"><a href=\"/wiki/([^?\"]*)%", $curlResultString, $matches);
326 -
327 - for ($i=0; $i < count($matches[1]); $i++) {
328 - // iterate over found articles (no category links!)
329 - if (!strstr($matches[1][$i], ":"))
330 - $result[] = $matches[1][$i];
331 -
332 - if (count($result) >= 10)
333 - break; // consider not more than 10 links
334 - }
 323+ // extract article links from html
 324+ preg_match_all("%1em;\"><a href=\"/wiki/([^?\"]*)%", $curlResultString, $matches);
335325
336 - if (count($result) == 0)
337 - return 0; // nothing found
338 - else
339 - return $result;
 326+ for ($i=0; $i < count($matches[1]); $i++) {
 327+ // iterate over found articles (no category links!)
 328+ if (!strstr($matches[1][$i], ":"))
 329+ $result[] = $matches[1][$i];
 330+
 331+ if (count($result) >= 10)
 332+ break; // consider not more than 10 links
 333+ }
 334+
 335+ if (count($result) == 0)
 336+ return 0; // nothing found
 337+ else
 338+ return $result;
340339 }
341340
342341 function show_related_articles($articles, $searchterm)
343342 {
344 - $search_deck = new HAW_deck(HAWIKI_TITLE);
345 - set_deck_properties($search_deck);
346 -
347 - // tell what this deck is about
348 - $intro = new HAW_text(hawtra("Found articles for:") . " " . $searchterm);
349 - $search_deck->add_text($intro);
 343+ $search_deck = new HAW_deck(HAWIKI_TITLE);
 344+ set_deck_properties($search_deck);
350345
351 - // separate intro from link section
352 - $rule = new HAW_rule();
353 - $search_deck->add_rule($rule);
354 -
355 - // create one link for each article
356 - foreach ($articles as $article) {
357 - $article_link = new HAW_link(urldecode($article), "transcode.php?go=" . $article);
358 - $search_deck->add_link($article_link);
359 - }
360 -
361 - // add home link
362 - $search_deck->add_rule($rule);
363 - $homelink = new HAW_link(hawtra("Home"), "index.php");
364 - $search_deck->add_link($homelink);
 346+ // tell what this deck is about
 347+ $intro = new HAW_text(hawtra("Found articles for:") . " " . $searchterm);
 348+ $search_deck->add_text($intro);
365349
366 - $search_deck->create_page();
367 - exit();
 350+ // separate intro from link section
 351+ $rule = new HAW_rule();
 352+ $search_deck->add_rule($rule);
 353+
 354+ // create one link for each article
 355+ foreach ($articles as $article) {
 356+ $article_link = new HAW_link(urldecode($article), "transcode.php?go=" . $article);
 357+ $search_deck->add_link($article_link);
 358+ }
 359+
 360+ // add home link
 361+ $search_deck->add_rule($rule);
 362+ $homelink = new HAW_link(hawtra("Home"), "index.php");
 363+ $search_deck->add_link($homelink);
 364+
 365+ $search_deck->create_page();
 366+ exit();
368367 }
369368
370369 function extract_chapter($wikitext, $chapter)
371370 {
372371 if (!preg_match("/\n(==+)(\s?" . $chapter . "\s?==+)/", $wikitext, $matches))
373 - return("invalid chapter"); // should never happen
374 -
 372+ return("invalid chapter"); // should never happen
 373+
375374 $chapterStart = strpos($wikitext, $matches[1] . $matches[2]);
376375
377 - // search end of chapter
378 - $chapterEnd = $chapterStart + strlen($chapter);
379 - do {
380 - // number of '=' characters must match exactly
381 - $chapterEnd = strpos($wikitext, "\n" . $matches[1], $chapterEnd + 1);
382 - }
383 - while (($chapterEnd !== false) && (substr($wikitext, $chapterEnd + 1 + strlen($matches[1]), 1) == "="));
384 -
385 - if ($chapterEnd !== false)
386 - $wikitext = substr($wikitext, $chapterStart, $chapterEnd - $chapterStart);
387 - else
388 - $wikitext = substr($wikitext, $chapterStart);
389 -
 376+ // search end of chapter
 377+ $chapterEnd = $chapterStart + strlen($chapter);
 378+ do {
 379+ // number of '=' characters must match exactly
 380+ $chapterEnd = strpos($wikitext, "\n" . $matches[1], $chapterEnd + 1);
 381+ }
 382+ while (($chapterEnd !== false) && (substr($wikitext, $chapterEnd + 1 + strlen($matches[1]), 1) == "="));
 383+
 384+ if ($chapterEnd !== false)
 385+ $wikitext = substr($wikitext, $chapterStart, $chapterEnd - $chapterStart);
 386+ else
 387+ $wikitext = substr($wikitext, $chapterStart);
 388+
390389 return($wikitext);
391390 }
392391
@@ -394,59 +393,59 @@
395394 // remove all text within startStr and endStr (incl. limiters)
396395 // preg_replace can cause problems as described here: http://bugs.php.net/bug.php?id=24460
397396 while (true) {
398 - $secStart = strpos($wikitext, $startStr);
399 - if ($secStart === false)
400 - break;
401 -
402 - $secEnd = strpos($wikitext, $endStr, $secStart);
403 - if ($secEnd === false)
404 - break;
405 -
406 - $nestedStart = strpos($wikitext, $startStr, $secStart + strlen($startStr));
407 - if (($nestedStart !== false) && ($nestedStart < $secEnd)) {
408 - // nested section found
409 - // algorithm does work for one nested section only! (Sufficient???)
410 - $secEnd = strpos($wikitext, $endStr, $secEnd + strlen($endStr));
411 - if ($secEnd === false)
412 - break;
413 - }
414 -
415 - //remove section
416 - $wikitext = substr($wikitext, 0, $secStart) . substr($wikitext, $secEnd + strlen($endStr));
 397+ $secStart = strpos($wikitext, $startStr);
 398+ if ($secStart === false)
 399+ break;
 400+
 401+ $secEnd = strpos($wikitext, $endStr, $secStart);
 402+ if ($secEnd === false)
 403+ break;
 404+
 405+ $nestedStart = strpos($wikitext, $startStr, $secStart + strlen($startStr));
 406+ if (($nestedStart !== false) && ($nestedStart < $secEnd)) {
 407+ // nested section found
 408+ // algorithm does work for one nested section only! (Sufficient???)
 409+ $secEnd = strpos($wikitext, $endStr, $secEnd + strlen($endStr));
 410+ if ($secEnd === false)
 411+ break;
 412+ }
 413+
 414+ //remove section
 415+ $wikitext = substr($wikitext, 0, $secStart) . substr($wikitext, $secEnd + strlen($endStr));
417416 }
418 -
 417+
419418 return($wikitext);
420419 }
421420
422421 function remove_controls($wikitext)
423422 {
424 - // remove some mediawiki control elements
425 - $wikitext = str_replace("__NOTOC__", "", $wikitext);
426 - $wikitext = str_replace("__FORCETOC__", "", $wikitext);
427 - $wikitext = str_replace("__TOC__", "", $wikitext);
428 - $wikitext = str_replace("__NOEDITSECTION__", "", $wikitext);
429 - $wikitext = str_replace("__NEWSECTIONLINK__", "", $wikitext);
430 - $wikitext = str_replace("__NOCONTENTCONVERT__", "", $wikitext);
431 - $wikitext = str_replace("__NOCC__", "", $wikitext);
432 - $wikitext = str_replace("__NOGALLERY__", "", $wikitext);
433 - $wikitext = str_replace("__NOTITLECONVERT__", "", $wikitext);
434 - $wikitext = str_replace("__NOTC__", "", $wikitext);
435 - $wikitext = str_replace("__END__", "", $wikitext);
436 - $wikitext = str_replace("__START__", "", $wikitext);
437 -
438 - return($wikitext);
 423+ // remove some mediawiki control elements
 424+ $wikitext = str_replace("__NOTOC__", "", $wikitext);
 425+ $wikitext = str_replace("__FORCETOC__", "", $wikitext);
 426+ $wikitext = str_replace("__TOC__", "", $wikitext);
 427+ $wikitext = str_replace("__NOEDITSECTION__", "", $wikitext);
 428+ $wikitext = str_replace("__NEWSECTIONLINK__", "", $wikitext);
 429+ $wikitext = str_replace("__NOCONTENTCONVERT__", "", $wikitext);
 430+ $wikitext = str_replace("__NOCC__", "", $wikitext);
 431+ $wikitext = str_replace("__NOGALLERY__", "", $wikitext);
 432+ $wikitext = str_replace("__NOTITLECONVERT__", "", $wikitext);
 433+ $wikitext = str_replace("__NOTC__", "", $wikitext);
 434+ $wikitext = str_replace("__END__", "", $wikitext);
 435+ $wikitext = str_replace("__START__", "", $wikitext);
 436+
 437+ return($wikitext);
439438 }
440439
441440 function links2text($wikitext)
442441 {
443442 // make [[wikilinks]] to wikilinks
444 - $wikitext = preg_replace('/\[\[([^:\]]*\|)?([^:\]]*)\]\]/','${2}', $wikitext);
445 -
 443+ $wikitext = preg_replace('/\[\[([^:\]]*\|)?([^:\]]*)\]\]/','${2}', $wikitext);
 444+
446445 // disable detection of http links
447 - $wikitext = preg_replace('/http/','h t t p ', $wikitext);
448 - //$wikitext = preg_replace('@\[?http://\S*(.*?)\]?@','${1}', $wikitext);
449 - //$wikitext = preg_replace('@\[?http://(\S*)\]?@','${1}', $wikitext);
450 -
 446+ $wikitext = preg_replace('/http/','h t t p ', $wikitext);
 447+ //$wikitext = preg_replace('@\[?http://\S*(.*?)\]?@','${1}', $wikitext);
 448+ //$wikitext = preg_replace('@\[?http://(\S*)\]?@','${1}', $wikitext);
 449+
451450 //echo $wikitext;
452451 return($wikitext);
453452 }
@@ -455,48 +454,50 @@
456455 {
457456 $result = array(); // init empty array
458457
459 - while(true) {
460 -
461 - $seg = substr($wikitext, 0, $segLength); // determine maximum segment
462 -
463 - if (strlen($seg) < $segLength) {
464 - // end of text
465 - $result[] = $seg; // add last array element
466 - break;
467 - }
468 -
469 - $crPos = strrpos($seg, "\n"); // find previous new line
 458+ while(true) {
470459
471 - if ($crPos === false) {
472 - // no newline found in segment, find next new line
473 - $crPos = strpos($wikitext, "\n", $segLength);
474 - }
475 -
476 - if (($crPos === false) || ($crPos == 0)) {
477 - // no newline in whole text
478 - $crPos = strlen($wikitext); // consider whole text
479 - }
 460+ $seg = substr($wikitext, 0, $segLength); // determine maximum segment
480461
481 - $seg = substr($wikitext, 0, $crPos + 1); // determine segment
482 - $result[] = $seg; // add array element
483 - $wikitext = substr($wikitext, $crPos + 1); // continue with new line
484 -
485 - if ($crPos == 0)
486 - exit;
487 - }
488 -
489 - return $result;
 462+ if (strlen($seg) < $segLength) {
 463+ // end of text
 464+ $result[] = $seg; // add last array element
 465+ break;
 466+ }
 467+
 468+ $crPos = strrpos($seg, "\n"); // find previous new line
 469+
 470+ if ($crPos === false) {
 471+ // no newline found in segment, find next new line
 472+ $crPos = strpos($wikitext, "\n", $segLength);
 473+ }
 474+
 475+ if (($crPos === false) || ($crPos == 0)) {
 476+ // no newline in whole text
 477+ $crPos = strlen($wikitext); // consider whole text
 478+ }
 479+
 480+ $seg = substr($wikitext, 0, $crPos + 1); // determine segment
 481+ $result[] = $seg; // add array element
 482+ $wikitext = substr($wikitext, $crPos + 1); // continue with new line
 483+
 484+ if ($crPos == 0)
 485+ exit;
 486+ }
 487+
 488+ return $result;
490489 }
491490
492491 function save_url()
493492 {
 493+ /*
494494 // write location parameter to temporary file
495495 $fp = fopen(HAWPEDIA_VXML_TMP_FILE, "w");
496496 if (!$fp)
497497 return; // unsuccessful ...
498 -
 498+
499499 fputs($fp, $_SERVER["REQUEST_URI"]);
500500 fclose($fp);
 501+ */
501502 }
502 -
 503+
503504 ?>
Index: trunk/wap/config.php
@@ -16,7 +16,7 @@
1717 define('DISABLE_IMAGES', FALSE); // do not output images, if set & true.
1818 define('LINKS_HAVE_NO_BRACKETS', TRUE); // do not output brackets surrounding links, if set & true.
1919 define('IMG_MAX_WIDTH', 128); // pixels maximum width for images (may be further reduced by displa size)
20 -define('EXPAND_TEMPLATES', TRUE); // expand {{...}} syntax, if set & true, else remove it.
 20+define('EXPAND_TEMPLATES', false); // expand {{...}} syntax, if set & true, else remove it.
2121
2222 define("SEGLENGTH_WML", 600);
2323 define("SEGLENGTH_HDML", 600);
Index: trunk/wap/hawiki/hawiki_parser_hawpedia.inc
@@ -20,303 +20,304 @@
2121
2222 function HAWIKI_parser(&$input, $isFirstColumn)
2323 {
24 - // lexical analyser for wikipedia pages
 24+ // lexical analyser for wikipedia pages
2525
26 - $po = array(); // array for parser output
 26+ $po = array(); // array for parser output
2727
28 - $po["text"] = ""; // init text output
 28+ $po["text"] = ""; // init text output
2929
30 - if (strlen($input) == 0)
31 - return(0); // end of input reached
 30+ if (strlen($input) == 0)
 31+ return(0); // end of input reached
3232
33 - if (preg_match("/^([\r\n]+)/", $input, $matches))
34 - {
35 - // new line(s)
36 - $po["count"] = substr_count($matches[1], "\n");
37 - $input = substr($input, strlen($matches[1]));
38 - $po["token"] = HAWIKI_TOKEN_NEWLINE;
39 - return($po);
40 - }
 33+ if (preg_match("/^([\r\n]+)/", $input, $matches))
 34+ {
 35+ // new line(s)
 36+ $po["count"] = substr_count($matches[1], "\n");
 37+ $input = substr($input, strlen($matches[1]));
 38+ $po["token"] = HAWIKI_TOKEN_NEWLINE;
 39+ return($po);
 40+ }
4141
42 - if (preg_match("/^('{3,})/", $input, $matches))
43 - {
44 - // format bold
45 - $input = substr($input, strlen($matches[1]));
46 - $po["token"] = HAWIKI_TOKEN_BOLD;
47 - return($po);
48 - }
 42+ if (preg_match("/^('{3,})/", $input, $matches))
 43+ {
 44+ // format bold
 45+ $input = substr($input, strlen($matches[1]));
 46+ $po["token"] = HAWIKI_TOKEN_BOLD;
 47+ return($po);
 48+ }
4949
50 - if (preg_match("/^''/", $input, $matches))
51 - {
52 - // format italic
53 - $input = substr($input, 2);
54 - $po["token"] = HAWIKI_TOKEN_ITALIC;
55 - return($po);
56 - }
 50+ if (preg_match("/^''/", $input, $matches))
 51+ {
 52+ // format italic
 53+ $input = substr($input, 2);
 54+ $po["token"] = HAWIKI_TOKEN_ITALIC;
 55+ return($po);
 56+ }
5757
58 - if (preg_match("/^(==+)/", $input, $matches))
59 - {
60 - // heading
61 - $po["level"] = strlen($matches[1]) - 1;
62 - $input = substr($input, strlen($matches[1]));
63 - $po["token"] = HAWIKI_TOKEN_HEADING;
64 - return($po);
65 - }
 58+ if (preg_match("/^(==+)/", $input, $matches))
 59+ {
 60+ // heading
 61+ $po["level"] = strlen($matches[1]) - 1;
 62+ $input = substr($input, strlen($matches[1]));
 63+ $po["token"] = HAWIKI_TOKEN_HEADING;
 64+ return($po);
 65+ }
6666
67 - if ($isFirstColumn && preg_match("/^(\*+)/", $input, $matches))
68 - {
69 - // bullet list
70 - $po["level"] = strlen($matches[1]);
71 - $input = substr($input, $po["level"]);
72 - $po["token"] = HAWIKI_TOKEN_BULLIST;
73 - return($po);
74 - }
 67+ if ($isFirstColumn && preg_match("/^(\*+)/", $input, $matches))
 68+ {
 69+ // bullet list
 70+ $po["level"] = strlen($matches[1]);
 71+ $input = substr($input, $po["level"]);
 72+ $po["token"] = HAWIKI_TOKEN_BULLIST;
 73+ return($po);
 74+ }
7575
76 - if ($isFirstColumn && preg_match("/^(#+)/", $input, $matches))
77 - {
78 - // numbered list
79 - $po["level"] = strlen($matches[1]);
80 - $input = substr($input, $po["level"]);
81 - $po["token"] = HAWIKI_TOKEN_NUMLIST;
82 - return($po);
83 - }
 76+ if ($isFirstColumn && preg_match("/^(#+)/", $input, $matches))
 77+ {
 78+ // numbered list
 79+ $po["level"] = strlen($matches[1]);
 80+ $input = substr($input, $po["level"]);
 81+ $po["token"] = HAWIKI_TOKEN_NUMLIST;
 82+ return($po);
 83+ }
8484
85 - if ($isFirstColumn && preg_match("/^(----*\r?\n)/", $input, $matches))
86 - {
87 - // horizontal rule
88 - $input = substr($input, strlen($matches[1]));
89 - $po["token"] = HAWIKI_TOKEN_RULE;
90 - return($po);
91 - }
 85+ if ($isFirstColumn && preg_match("/^(----*\r?\n)/", $input, $matches))
 86+ {
 87+ // horizontal rule
 88+ $input = substr($input, strlen($matches[1]));
 89+ $po["token"] = HAWIKI_TOKEN_RULE;
 90+ return($po);
 91+ }
9292
93 - if (preg_match("/^\[\[(([^\[\]]+)|(\[[^\[\]]*\])*|(\[\[[^\[\]]*\]\])*)+\]\]/", $input, $matches))
94 - {
95 - // Forced WikiName ((a_page_with_any_name))
 93+ if (preg_match("/^\[\[([^\[\]]+)\]\]/", $input, $matches))
 94+ #if (preg_match("/^\[\[(([^\[\]]+)|(\[[^\[\]]*\])*|(\[\[[^\[\]]*\]\])*)+\]\]/", $input, $matches))
 95+ {
 96+ // Forced WikiName ((a_page_with_any_name))
9697
97 - // images can be nested like this: [[Image:...|[[link]]...]]
98 - // or this: [[Image:...|[link]...]]
99 -# echo('<blockquote>'); print_r($matches); echo('</blockquote>');
100 - $input = substr($input, strlen($matches[0]));
 98+ // images can be nested like this: [[Image:...|[[link]]...]]
 99+ // or this: [[Image:...|[link]...]]
 100+ # echo('<blockquote>'); print_r($matches); echo('</blockquote>');
 101+ $input = substr($input, strlen($matches[0]));
101102
102 - $wikipage = substr($matches[0], 2, -2);
 103+ $wikipage = substr($matches[0], 2, -2);
103104
104 - $po["extwiki"] = ""; // hawpedia supports no external wikis
 105+ $po["extwiki"] = ""; // hawpedia supports no external wikis
105106
106 - $link = explode("|", $wikipage); // test for alternative link label
107 - if (count($link) > 1)
108 - {
109 - $po["page"] = $link[0];
110 - $po["label"] = $link[1];
111 - }
112 - else
113 - {
114 - $po["page"] = $link[0]; // no extra labelling of link required
115 - $po["label"] = $link[0];
116 - }
 107+ $link = explode("|", $wikipage); // test for alternative link label
 108+ if (count($link) > 1)
 109+ {
 110+ $po["page"] = $link[0];
 111+ $po["label"] = $link[1];
 112+ }
 113+ else
 114+ {
 115+ $po["page"] = $link[0]; // no extra labelling of link required
 116+ $po["label"] = $link[0];
 117+ }
117118
118 - if (strpos($po["page"], ':') != false)
119 - {
120 - // category link
 119+ if (strpos($po["page"], ':') != false)
 120+ {
 121+ // category link
121122
122 - if (preg_match("/^[Ww]ikipedia:/", $po["page"])) {
123 - // follow wikipedia category link
124 - $po["token"] = HAWIKI_TOKEN_WIKINAME;
125 - return($po);
126 - }
 123+ if (preg_match("/^[Ww]ikipedia:/", $po["page"])) {
 124+ // follow wikipedia category link
 125+ $po["token"] = HAWIKI_TOKEN_WIKINAME;
 126+ return($po);
 127+ }
127128
128 - if (preg_match("/^[Ww]iktionary:/", $po["page"])) {
129 - // display wiktionary links as plain text
130 - $po["text"] = $po["label"];
131 - $po["token"] = HAWIKI_TOKEN_TEXT;
132 - return($po);
133 - }
 129+ if (preg_match("/^[Ww]iktionary:/", $po["page"])) {
 130+ // display wiktionary links as plain text
 131+ $po["text"] = $po["label"];
 132+ $po["token"] = HAWIKI_TOKEN_TEXT;
 133+ return($po);
 134+ }
134135
135 - // Image Link - added localization of property strings missing (not needed for lang = en, es)
 136+ // Image Link - added localization of property strings missing (not needed for lang = en, es)
136137
137 - if (!defined('DISABLE_IMAGES') || !DISABLE_IMAGES) {
138 - $temp = namespace_regex('Image:','/');
139 - if (preg_match($temp, $po['page'])) {
140 -# // image source
141 -# $imagename = preg_replace('/^[^\\:]+\\:/', '', $link[0]);
142 -# $temp = md5($imagename);
143 -##http://upload.wikimedia.org/wikipedia/commons/thumb/f/fa/ISS002-E-5456_2.JPG/180px-ISS002-E-5456_2.JPG
144 -# $po['src'] = (translate_wikipedia_keyword('http://upload.wikimedia.org/wikipedia/').
145 -# $_SESSION['language'] . DIRECTORY_SEPARATOR.
146 -# translate_wikipedia_keyword('thumb') . DIRECTORY_SEPARATOR.
147 -# substr($temp,0,1) . DIRECTORY_SEPARATOR.
148 -# substr($temp,0,2) . DIRECTORY_SEPARATOR.
149 -# $imagename . DIRECTORY_SEPARATOR.
150 -# HAWIKI_DISP_WIDTH.'px-'.$imagename);
151 - // isolate image description.
152 - $temp = substr($wikipage, strlen($link[0]));
153 - $ltemp = -1;
154 - while($ltemp != strlen($temp)) {
155 - $ltemp = strlen($temp);
156 - $temp = preg_replace(array('/^\|/', '/\|$/',
157 - '/^(left|right|center|[0-9]+px|thumb)\|/',
158 - '/\|(left|right|center|[0-9]+px|thumb)$/',
159 - '/^(left|right|center|[0-9]+px|thumb)$/'), '', $temp);
160 - }
161 - $po['subscript'] = $temp;
162 - // convert wikilinks to plain text in alt tag.
163 - $po['alt'] = preg_replace(
164 - array('/\[\[([^|\]]+\|)([^\]]*)\]\]/', '/\[\[([^|\]]+)\]\]/'),
165 - array('$2', '$1'), $temp);
166 - // raw image source
167 - // - try wiki, if not there, it has to be on commons
168 - // - URL without scaling is e.g. http://upload.wikimedia.org/wikipedia/commons/thumb/f/fa/xyz.jpg
169 - $imagename = preg_replace('/^[^\\:]+\\:/', '', $link[0]);
170 - $imagetype = strtolower(preg_replace('/^.*\./', '.', $imagename));
171 - $temp = md5($imagename);
172 -# $img_url_tail = ( DIRECTORY_SEPARATOR.
173 -# translate_wikipedia_keyword('thumb') . DIRECTORY_SEPARATOR.
174 - $img_url_tail = ( DIRECTORY_SEPARATOR.
175 - substr($temp,0,1) . DIRECTORY_SEPARATOR.
176 - substr($temp,0,2) . DIRECTORY_SEPARATOR.
177 - $imagename );
178 - $img_url_head = ( translate_wikipedia_keyword('http://upload.wikimedia.org/wikipedia/' ));
179 -# echo '<br />'.
180 - $img_w_url_head = ( $img_url_head . $_SESSION['language'] );
181 - $img_c_url_head = ( $img_url_head . 'commons' );
182 - $img_w_raw_url = ( $img_w_url_head . $img_url_tail );
183 - $img_c_raw_url = ( $img_c_url_head . $img_url_tail );
184 - if(uri_exists($img_w_raw_url))
185 - {
186 - $img_raw_url =& $img_w_raw_url;
187 - $img_url_head =& $img_w_url_head;
188 - }
189 - else
190 - //FIXME: image not on commons, then ? (we safe bandwith ignoring this possibility here)
191 - # if(uri_exists($img_c_raw_url))
192 - {
193 - $img_raw_url =& $img_c_raw_url;
194 - $img_url_head =& $img_c_url_head;
195 - }
196 - # else
197 - # {
198 - # $img_raw_uwl = FALSE;
199 - # }
200 - // image width
201 - // - if width given in wiki code, use it,
202 - // - else get it from the image.
203 - // - if width is less than both IMG_MAX_WIDTH and HAWIKI_DISP_WIDTH, use it,
204 - // - else use the minimum of IMG_MAX_WIDTH and HAWIKI_DISP_WIDTH.
205 - // - don't request scaling to original size (unless wiki code gives a width)
206 - $width = (($imagetype == '.svg') ? (1+HAWIKI_DISP_WIDTH) : 0 ); // force thumbnail conversion for .svg
207 - $temp = substr($wikipage, strlen($link[0]));
208 - $temp = preg_replace('/\[\[[^\]]*\]\]/', '_', $temp); // get rid of links
209 - $temp = explode("|", $temp); // isolate elements
210 - foreach($temp as $tmp)
211 - {
212 - if(preg_match('/^[0-9]+px$/', trim($tmp)))
213 - $width = 0+preg_replace('/[^0-9]+/', '', $tmp); //keep last.
214 - }
215 - if(0 >= $width)
216 - {
217 - $width = (($imagetype == '.svg') ? (1+HAWIKI_DISP_WIDTH) : 0 ); // force thumbnail conversion for .svg
218 - $temp = @(getimagesize($img_raw_url));
219 - $temp = ( isset($temp[0]) ? $temp[0] : $width );
220 - if(($temp > IMG_MAX_WIDTH) || ($temp > HAWIKI_DISP_WIDTH))
221 - {
222 - $width = $temp; // thumbnail only if known and too wide.
223 - }
224 - }
225 - $width = min($width, IMG_MAX_WIDTH, HAWIKI_DISP_WIDTH);
226 - // image source
227 -#http://upload.wikimedia.org/wikipedia/commons/thumb/f/fa/ISS002-E-5456_2.JPG/180px-ISS002-E-5456_2.JPG
228 -# $po['src'] = ((($imagetype == '.svg' || $imagetype == '.png')?('/image.php?'):''). // convert image locally
229 -# translate_wikipedia_keyword('http://upload.wikimedia.org/wikipedia/'). // else load from wmf server
230 -# $_SESSION['language'] . DIRECTORY_SEPARATOR.
231 -# translate_wikipedia_keyword('thumb') . DIRECTORY_SEPARATOR.
232 -# substr($temp,0,1) . DIRECTORY_SEPARATOR.
233 -# substr($temp,0,2) . DIRECTORY_SEPARATOR.
234 -# $imagename .
235 -# ($width ? (DIRECTORY_SEPARATOR.$width.'px-'.$imagename) : ''));
236 -# $po["token"] = HAWIKI_TOKEN_IMAGE;
237 -#http://upload.wikimedia.org/wikipedia/commons/thumb/f/fa/ISS002-E-5456_2.JPG/180px-ISS002-E-5456_2.JPG
238 -# $img_url_tail = ( DIRECTORY_SEPARATOR.
239 -# substr($temp,0,1) . DIRECTORY_SEPARATOR.
240 -# substr($temp,0,2) . DIRECTORY_SEPARATOR.
241 -# $imagename );
242 -# $img_url_head = ( translate_wikipedia_keyword('http://upload.wikimedia.org/wikipedia/' ));
243 -
244 -# = ( DIRECTORY_SEPARATOR.
245 -# $imagename .
246 -# ($width ? (DIRECTORY_SEPARATOR.$width.'px-'.$imagename) : ''));
247 -# echo '<br />'.
248 - $po['src'] = ((($imagetype == '.svg' || $imagetype == '.png')?('/image.php?'):''). // convert image locally
249 - $img_url_head . // else load from wmf server, be it the wiki or commons
250 - ($width ? (DIRECTORY_SEPARATOR . translate_wikipedia_keyword('thumb')) : '' ).
251 - $img_url_tail .
252 - ($width ? (DIRECTORY_SEPARATOR.$width.'px-'.$imagename) : ''));
253 - $po["token"] = HAWIKI_TOKEN_IMAGE;
254 - return($po);
255 - }
256 - }
 138+ if (!defined('DISABLE_IMAGES') || !DISABLE_IMAGES) {
 139+ $temp = namespace_regex('Image:','/');
 140+ if (preg_match($temp, $po['page'])) {
 141+ # // image source
 142+ # $imagename = preg_replace('/^[^\\:]+\\:/', '', $link[0]);
 143+ # $temp = md5($imagename);
 144+ ##http://upload.wikimedia.org/wikipedia/commons/thumb/f/fa/ISS002-E-5456_2.JPG/180px-ISS002-E-5456_2.JPG
 145+ # $po['src'] = (translate_wikipedia_keyword('http://upload.wikimedia.org/wikipedia/').
 146+ # $_SESSION['language'] . DIRECTORY_SEPARATOR.
 147+ # translate_wikipedia_keyword('thumb') . DIRECTORY_SEPARATOR.
 148+ # substr($temp,0,1) . DIRECTORY_SEPARATOR.
 149+ # substr($temp,0,2) . DIRECTORY_SEPARATOR.
 150+ # $imagename . DIRECTORY_SEPARATOR.
 151+ # HAWIKI_DISP_WIDTH.'px-'.$imagename);
 152+ // isolate image description.
 153+ $temp = substr($wikipage, strlen($link[0]));
 154+ $ltemp = -1;
 155+ while($ltemp != strlen($temp)) {
 156+ $ltemp = strlen($temp);
 157+ $temp = preg_replace(array('/^\|/', '/\|$/',
 158+ '/^(left|right|center|[0-9]+px|thumb)\|/',
 159+ '/\|(left|right|center|[0-9]+px|thumb)$/',
 160+ '/^(left|right|center|[0-9]+px|thumb)$/'), '', $temp);
 161+ }
 162+ $po['subscript'] = $temp;
 163+ // convert wikilinks to plain text in alt tag.
 164+ $po['alt'] = preg_replace(
 165+ array('/\[\[([^|\]]+\|)([^\]]*)\]\]/', '/\[\[([^|\]]+)\]\]/'),
 166+ array('$2', '$1'), $temp);
 167+ // raw image source
 168+ // - try wiki, if not there, it has to be on commons
 169+ // - URL without scaling is e.g. http://upload.wikimedia.org/wikipedia/commons/thumb/f/fa/xyz.jpg
 170+ $imagename = preg_replace('/^[^\\:]+\\:/', '', $link[0]);
 171+ $imagetype = strtolower(preg_replace('/^.*\./', '.', $imagename));
 172+ $temp = md5($imagename);
 173+ # $img_url_tail = ( DIRECTORY_SEPARATOR.
 174+ # translate_wikipedia_keyword('thumb') . DIRECTORY_SEPARATOR.
 175+ $img_url_tail = ( '/'.
 176+ substr($temp,0,1) . '/'.
 177+ substr($temp,0,2) . '/'.
 178+ $imagename );
 179+ $img_url_head = ( translate_wikipedia_keyword('http://upload.wikimedia.org/wikipedia/' ));
 180+ # echo '<br />'.
 181+ $img_w_url_head = ( $img_url_head . $_SESSION['language'] );
 182+ $img_c_url_head = ( $img_url_head . 'commons' );
 183+ $img_w_raw_url = ( $img_w_url_head . $img_url_tail );
 184+ $img_c_raw_url = ( $img_c_url_head . $img_url_tail );
 185+ if(uri_exists($img_w_raw_url))
 186+ {
 187+ $img_raw_url =& $img_w_raw_url;
 188+ $img_url_head =& $img_w_url_head;
 189+ }
 190+ else
 191+ //FIXME: image not on commons, then ? (we safe bandwith ignoring this possibility here)
 192+ # if(uri_exists($img_c_raw_url))
 193+ {
 194+ $img_raw_url =& $img_c_raw_url;
 195+ $img_url_head =& $img_c_url_head;
 196+ }
 197+ # else
 198+ # {
 199+ # $img_raw_uwl = FALSE;
 200+ # }
 201+ // image width
 202+ // - if width given in wiki code, use it,
 203+ // - else get it from the image.
 204+ // - if width is less than both IMG_MAX_WIDTH and HAWIKI_DISP_WIDTH, use it,
 205+ // - else use the minimum of IMG_MAX_WIDTH and HAWIKI_DISP_WIDTH.
 206+ // - don't request scaling to original size (unless wiki code gives a width)
 207+ $width = (($imagetype == '.svg') ? (1+HAWIKI_DISP_WIDTH) : 0 ); // force thumbnail conversion for .svg
 208+ $temp = substr($wikipage, strlen($link[0]));
 209+ $temp = preg_replace('/\[\[[^\]]*\]\]/', '_', $temp); // get rid of links
 210+ $temp = explode("|", $temp); // isolate elements
 211+ foreach($temp as $tmp)
 212+ {
 213+ if(preg_match('/^[0-9]+px$/', trim($tmp)))
 214+ $width = 0+preg_replace('/[^0-9]+/', '', $tmp); //keep last.
 215+ }
 216+ if(0 >= $width)
 217+ {
 218+ $width = (($imagetype == '.svg') ? (1+HAWIKI_DISP_WIDTH) : 0 ); // force thumbnail conversion for .svg
 219+ $temp = @(getimagesize($img_raw_url));
 220+ $temp = ( isset($temp[0]) ? $temp[0] : $width );
 221+ if(($temp > IMG_MAX_WIDTH) || ($temp > HAWIKI_DISP_WIDTH))
 222+ {
 223+ $width = $temp; // thumbnail only if known and too wide.
 224+ }
 225+ }
 226+ $width = min($width, IMG_MAX_WIDTH, HAWIKI_DISP_WIDTH);
 227+ // image source
 228+ #http://upload.wikimedia.org/wikipedia/commons/thumb/f/fa/ISS002-E-5456_2.JPG/180px-ISS002-E-5456_2.JPG
 229+ # $po['src'] = ((($imagetype == '.svg' || $imagetype == '.png')?('/image.php?'):''). // convert image locally
 230+ # translate_wikipedia_keyword('http://upload.wikimedia.org/wikipedia/'). // else load from wmf server
 231+ # $_SESSION['language'] . DIRECTORY_SEPARATOR.
 232+ # translate_wikipedia_keyword('thumb') . DIRECTORY_SEPARATOR.
 233+ # substr($temp,0,1) . DIRECTORY_SEPARATOR.
 234+ # substr($temp,0,2) . DIRECTORY_SEPARATOR.
 235+ # $imagename .
 236+ # ($width ? (DIRECTORY_SEPARATOR.$width.'px-'.$imagename) : ''));
 237+ # $po["token"] = HAWIKI_TOKEN_IMAGE;
 238+ #http://upload.wikimedia.org/wikipedia/commons/thumb/f/fa/ISS002-E-5456_2.JPG/180px-ISS002-E-5456_2.JPG
 239+ # $img_url_tail = ( DIRECTORY_SEPARATOR.
 240+ # substr($temp,0,1) . DIRECTORY_SEPARATOR.
 241+ # substr($temp,0,2) . DIRECTORY_SEPARATOR.
 242+ # $imagename );
 243+ # $img_url_head = ( translate_wikipedia_keyword('http://upload.wikimedia.org/wikipedia/' ));
257244
258 - // other category links are not supported
259 - $po["token"] = HAWIKI_TOKEN_DUMMY;
260 - if (preg_match("/^([\r\n]+)/", $input, $matches))
261 - {
262 - // skip whole line
263 - $input = substr($input, strlen($matches[1]));
264 - $po["firstColumn"] = true;
265 - }
266 - else
267 - $po["firstColumn"] = false;
 245+ # = ( DIRECTORY_SEPARATOR.
 246+ # $imagename .
 247+ # ($width ? (DIRECTORY_SEPARATOR.$width.'px-'.$imagename) : ''));
 248+ # echo '<br />'.
 249+ $po['src'] = ((($imagetype == '.svg' || $imagetype == '.png')?('/image.php?'):''). // convert image locally
 250+ $img_url_head . // else load from wmf server, be it the wiki or commons
 251+ ($width ? ('/' . translate_wikipedia_keyword('thumb')) : '' ).
 252+ $img_url_tail .
 253+ ($width ? ("/{$width}px-{$imagename}") : ''));
 254+ $po["token"] = HAWIKI_TOKEN_IMAGE;
 255+ return($po);
 256+ }
 257+ }
268258
269 - return($po);
270 - }
 259+ // other category links are not supported
 260+ $po["token"] = HAWIKI_TOKEN_DUMMY;
 261+ if (preg_match("/^([\r\n]+)/", $input, $matches))
 262+ {
 263+ // skip whole line
 264+ $input = substr($input, strlen($matches[1]));
 265+ $po["firstColumn"] = true;
 266+ }
 267+ else
 268+ $po["firstColumn"] = false;
271269
272 - $po["token"] = HAWIKI_TOKEN_WIKINAME;
273 - return($po);
274 - }
 270+ return($po);
 271+ }
275272
276 - if (preg_match("/^(\[(https?:\/\/.*?)\])/", $input, $matches) ||
277 - preg_match("/^((https?:\/\/.*?)[ \r\n])/", $input, $matches))
278 - {
279 - // link
280 - $input = substr($input, strlen($matches[1]));
 273+ $po["token"] = HAWIKI_TOKEN_WIKINAME;
 274+ return($po);
 275+ }
281276
282 - $seperator_pos = strpos($matches[2], ' ');
283 - if ($seperator_pos == true) // alternative link label available
284 - {
285 - $po["url"] = substr($matches[2], 0, $seperator_pos);
286 - $po["label"] = substr($matches[2], $seperator_pos + 1);
287 - }
288 - else
289 - {
290 - $po["url"] = $matches[2]; // no extra labelling of link required
291 - $po["label"] = $matches[2];
292 - }
 277+ if (preg_match("/^(\[(https?:\/\/.*?)\])/", $input, $matches) ||
 278+ preg_match("/^((https?:\/\/.*?)[ \r\n])/", $input, $matches))
 279+ {
 280+ // link
 281+ $input = substr($input, strlen($matches[1]));
293282
294 - $po["ext"] = true;
295 - $po["extlink_text"] = hawtra("external link");
 283+ $seperator_pos = strpos($matches[2], ' ');
 284+ if ($seperator_pos == true) // alternative link label available
 285+ {
 286+ $po["url"] = substr($matches[2], 0, $seperator_pos);
 287+ $po["label"] = substr($matches[2], $seperator_pos + 1);
 288+ }
 289+ else
 290+ {
 291+ $po["url"] = $matches[2]; // no extra labelling of link required
 292+ $po["label"] = $matches[2];
 293+ }
296294
297 - $po["token"] = HAWIKI_TOKEN_LINK;
298 - return($po);
299 - }
 295+ $po["ext"] = true;
 296+ $po["extlink_text"] = hawtra("external link");
300297
301 - // plain text - search next syntax element resp. eof
302 - //if (preg_match("/^(.*?)(~np~|__|''|===|-=|\^|::|~~|\)\)|\(\(|\[|\|\||\{img |\(:|[A-Z][a-z0-9_]+[A-Z][a-z0-9_]+[\|\w]*|[\r\n]|\{[A-Z]+\([^\)]*\)\}|<[aA]|$)/",
303 - // $input, $matches))
304 - if (preg_match("/^(.*?)(''|==|\[\[|\[?https?:\/\/|[\r\n]|$)/",
305 - $input, $matches))
306 - {
307 - // plain text
308 - $po["text"] = $matches[1];
309 - $input = substr($input, strlen($po["text"]));
 298+ $po["token"] = HAWIKI_TOKEN_LINK;
 299+ return($po);
 300+ }
310301
311 - if (strlen($po["text"]) > 0)
312 - $po["token"] = HAWIKI_TOKEN_TEXT;
313 - else {
314 - //echo "ERROR! input:<br />" . $input;
315 - return(0); // can happen in case of syntax errors
316 - }
317 - return($po);
318 - }
 302+ // plain text - search next syntax element resp. eof
 303+ //if (preg_match("/^(.*?)(~np~|__|''|===|-=|\^|::|~~|\)\)|\(\(|\[|\|\||\{img |\(:|[A-Z][a-z0-9_]+[A-Z][a-z0-9_]+[\|\w]*|[\r\n]|\{[A-Z]+\([^\)]*\)\}|<[aA]|$)/",
 304+ // $input, $matches))
 305+ if (preg_match("/^(.*?)(''|==|\[\[|\[?https?:\/\/|[\r\n]|$)/",
 306+ $input, $matches))
 307+ {
 308+ // plain text
 309+ $po["text"] = $matches[1];
 310+ $input = substr($input, strlen($po["text"]));
319311
320 - return(0);
 312+ if (strlen($po["text"]) > 0)
 313+ $po["token"] = HAWIKI_TOKEN_TEXT;
 314+ else {
 315+ //echo "ERROR! input:<br />" . $input;
 316+ return(0); // can happen in case of syntax errors
 317+ }
 318+ return($po);
 319+ }
 320+
 321+ return(0);
321322 }
322323
323324 ?>
Index: trunk/wap/hawiki/hawiki_cfg.inc
@@ -12,7 +12,7 @@
1313 define("HAWIKI_DATETIME_SHORT", "d/m/Y [H:i]"); // for modifications see PHP function date()
1414 define("HAWIKI_DATETIME_LONG", "l, F jS Y, g:i A");
1515 define("HAWIKI_EXTLINK_ICON", "images/external_link"); // must exist as .gif and .wbmp version
16 -define("HAWIKI_WIKIPEDIA_ICON", "images/32px-Wikipedia-logo"); // must exist as .gif and .wbmp version, we have 16, 32, 58, 81.
 16+define("HAWIKI_WIKIPEDIA_ICON", "images/81px-Wikipedia-logo"); // must exist as .gif and .wbmp version, we have 16, 32, 58, 81.
1717 define("HAWIKI_SKIN", "hawhaw/skin/hawpedia_phone/skin.css"); // comment out to disable skin
1818
1919 ?>