r109403 MediaWiki - Code Review archive

Repository:MediaWiki
Revision:r109402‎ | r109403 | r109404 >
Date:17:16, 18 January 2012
Author:oren
Status:deferred
Tags:
Comment:
maven project layout - moved old webinterface from /webinterface to main/webapp
Modified paths:
  • /trunk/lucene-search-3/src/main/webapp/webinterface (added) (history)

Diff [purge]

Index: trunk/lucene-search-3/src/main/webapp/webinterface/lsweb.py
@@ -0,0 +1,496 @@
 2+import string,cgi,time,urlparse,urllib2, urllib, cgi, copy
 3+import re, time, math
 4+from htmlentitydefs import name2codepoint
 5+from os import curdir, sep
 6+from BaseHTTPServer import BaseHTTPRequestHandler, HTTPServer
 7+from urllib2 import URLError, HTTPError
 8+
 9+#search_host = { 'enwiki' : "srv79:8123", '<default>': 'srv79:8123' }
 10+search_host = {'<default>' : 'srv79:8123',
 11+ 'jawiki' : "localhost:8123",
 12+ 'frwiki' : "localhost:8123",
 13+ 'dewiki' : "localhost:8123",
 14+ 'itwiki' : "localhost:8123",
 15+ 'jawikiquote' : "localhost:8123",
 16+ 'wikilucene' : 'localhost:8123' }
 17+#search_host = {'<default>' : 'localhost:8123'}
 18+
 19+canon_namespaces = { 0 : '', 1: 'Talk', 2: 'User', 3: 'User_talk',
 20+ 4 : 'Project', 5 : 'Project_talk', 6 : 'Image', 7 : 'Image_talk',
 21+ 8 : 'MediaWiki', 9: 'MediaWiki_talk', 10: 'Template', 11: 'Template_talk',
 22+ 12 : 'Help', 13: 'Help_talk', 14: 'Category', 15: 'Category_talk',
 23+ 100: 'Portal', 101: 'Portal_talk', 102: 'Extension', 103: 'Extension_talk',
 24+ 104: 'Index', 105:' Index_talk', 112: 'Portal', 113: 'Portal_talk'}
 25+prefix_aliases = { 'm': 0, 'mt' : 1, 'u' : 2, 'ut' : 3, 'p': 4, 'pt':5, 'i':6, 'it':7,
 26+ 'mw':8, 'mwt':9, 't':10, 'tt':11, 'h':12, 'ht':13, 'c':14, 'ct': 15}
 27+
 28+snippet_separator = " <b>...</b> ";
 29+
 30+def make_link(params,offset,method='search',query=None):
 31+ ''' Duplicate existing query (denoted by params), but with a different offset '''
 32+ dupl = copy.copy(params)
 33+ dupl['offset'] = [offset]
 34+ if query != None:
 35+ dupl['query'] = [query]
 36+ return '/%s?%s' % (method,urllib.urlencode(dupl,True))
 37+
 38+
 39+def rewrite_callback(match):
 40+ namespaces = []
 41+ for prefix in match.group(1).split(','):
 42+ # check for canonical namespace names
 43+ iscanonical = False
 44+ for ns,name in canon_namespaces.iteritems():
 45+ if name.lower() == prefix.lower():
 46+ iscanonical = True # is there a way to continue outer loop in python?
 47+ namespaces.append(str(ns))
 48+ break
 49+ if iscanonical:
 50+ continue
 51+ # check aliases
 52+ if prefix_aliases.has_key(prefix):
 53+ namespaces.append(str(prefix_aliases[prefix]))
 54+ continue
 55+
 56+ if namespaces!=[]:
 57+ return '[%s]:' % ','.join(namespaces)
 58+ else:
 59+ return match.group()
 60+
 61+
 62+def rewrite_query(query):
 63+ '''Rewrite query prefixes, port of php version in LuceneSearch extension'''
 64+ query = query.decode('utf-8')
 65+ prefix_re = re.compile('([a-zA-Z0-9_,]+):') # we will parse only canonical namespaces here
 66+
 67+ return prefix_re.sub(rewrite_callback,query)
 68+
 69+def make_wiki_link(line,dbname,caption=''):
 70+ parts = line.split(' ')
 71+ score = float(parts[0])
 72+ title = ''
 73+ if len(parts) == 3:
 74+ ns = canon_namespaces[int(parts[1])]
 75+ if ns != '':
 76+ ns = ns +":"
 77+ title = ns+parts[2]
 78+ else:
 79+ iw = parts[1]
 80+ ns = canon_namespaces[int(parts[2])]
 81+ if ns != '':
 82+ ns = ns +":"
 83+ title = iw+':'+ns+parts[3]
 84+
 85+ if dbname == 'mediawikiwiki':
 86+ link= 'http://www.mediawiki.org/wiki/%s' % (title)
 87+ elif dbname == 'metawiki':
 88+ link = 'http://meta.wikimedia.org/wiki/%s' % (title)
 89+ elif dbname.endswith('wiktionary'):
 90+ link = 'http://%s.wiktionary.org/wiki/%s' % (dbname[0:2],title)
 91+ else:
 92+ link = 'http://%s.wikipedia.org/wiki/%s' % (dbname[0:2],title)
 93+ decoded = urllib.unquote(title.replace('_',' '))
 94+ if caption !='':
 95+ caption = ns+urllib.unquote(caption.replace('_',' '))
 96+ else:
 97+ caption = decoded
 98+ return ['%1.2f -- <a href="%s">%s</a>' % (score,link,caption),title]
 99+
 100+def make_title_link(line,dbname,caption=''):
 101+ interwiki={'w':'wikipedia', 'wikt':'wiktionary', 's':'wikisource', 'b': 'wikibooks', 'n':'wikinews', 'v':'wikiversity', 'q':'wikiquote',
 102+ 'mw': 'mediawiki', 'meta': 'meta', 'wikinews': 'wikinews'};
 103+ parts = line.split(' ')
 104+ score = float(parts[0])
 105+ title = ''
 106+ iw = parts[1]
 107+ # ns = canon_namespaces[int(parts[2])]
 108+ ns = urllib.unquote(parts[3])
 109+ if ns != '':
 110+ ns = ns +":"
 111+ title = iw+':'+ns+parts[4]
 112+ titleText = ns+parts[4]
 113+
 114+ if dbname == 'mediawikiwiki':
 115+ link= 'http://www.mediawiki.org/wiki/%s' % (title)
 116+ elif dbname == 'metawiki':
 117+ link = 'http://meta.wikimedia.org/wiki/%s' % (title)
 118+ elif dbname.endswith('wiktionary'):
 119+ link = 'http://%s.wiktionary.org/wiki/%s' % (dbname[0:2],title)
 120+ else:
 121+ link = 'http://%s.wikipedia.org/wiki/%s' % (dbname[0:2],title)
 122+ decoded = urllib.unquote(titleText.replace('_',' '))
 123+ if caption!='':
 124+ caption = ns+urllib.unquote(caption.replace('_',' '))
 125+ else:
 126+ caption = decoded
 127+ return ['%s : (%1.2f) <a href="%s">%s</a>' % (interwiki[iw],score,link,caption),title]
 128+
 129+def extract_snippet(line,final_separator=True,originalIsKey=False):
 130+ parts = line.split(' ')
 131+ type = parts[0]
 132+ splits = de_bracket_split(parts[1])
 133+ highlight = de_bracket_split(parts[2])
 134+ suffix = urllib.unquote_plus(de_bracket(parts[3]))
 135+ text = urllib.unquote_plus(parts[4].strip())
 136+ original = None
 137+ if len(parts) > 5:
 138+ original = urllib.unquote_plus(parts[5].strip())
 139+
 140+ splits.append(len(text))
 141+ start = 0
 142+ snippet = ""
 143+ hi = 0
 144+ for sp in splits:
 145+ sp = int(sp)
 146+ while hi < len(highlight) and int(highlight[hi]) < sp:
 147+ s = int(highlight[hi])
 148+ e = int(highlight[hi+1])
 149+ snippet += text[start:s] + "<b>" + text[s:e] + "</b>"
 150+ start = e
 151+ hi += 2
 152+ snippet += text[start:sp]
 153+ if sp == len(text) and suffix != '':
 154+ snippet += suffix
 155+ elif final_separator:
 156+ snippet += snippet_separator
 157+ start = sp;
 158+ if originalIsKey:
 159+ origParts = original.split(":")
 160+ origNs = canon_namespaces[int(origParts[0])]
 161+ if origNs != '':
 162+ origNs = origNs +":"
 163+ original = origNs+origParts[1]
 164+ snippet = origNs+snippet;
 165+
 166+ return [snippet,original]
 167+
 168+def extract_suggest(line):
 169+ parts = line.split(' ')
 170+ type = parts[0]
 171+ highlight = de_bracket_split(parts[1])
 172+ text = urllib.unquote_plus(parts[2].strip())
 173+
 174+ start = 0
 175+ snippet = ""
 176+ hi = 0
 177+ while hi < len(highlight):
 178+ s = int(highlight[hi])
 179+ e = int(highlight[hi+1])
 180+ snippet += text[start:s] + "<i>" + text[s:e] + "</i>"
 181+ start = e
 182+ hi += 2
 183+ if start < len(text):
 184+ snippet += text[start:len(text)]
 185+
 186+ for key,val in canon_namespaces.iteritems():
 187+ snippet = snippet.replace('[%d]' % key, val)
 188+
 189+ return [snippet,text]
 190+
 191+
 192+def de_bracket(s):
 193+ return s[1:len(s)-1]
 194+
 195+def de_bracket_split(s):
 196+ if s == '[]':
 197+ return []
 198+ else:
 199+ return de_bracket(s).split(',')
 200+
 201+class MyHandler(BaseHTTPRequestHandler):
 202+ def do_GET(self):
 203+ try:
 204+ s = urlparse.urlparse(self.path)
 205+ if s[2] == '/search' or s[2] == '/related':
 206+ method = s[2][1:]
 207+ start_time = time.time()
 208+ params = {}
 209+ # parse key1=val1&key2=val2 syntax
 210+ params = cgi.parse_qs(s[4])
 211+
 212+ # defaults
 213+ limit = 20
 214+ offset = 0
 215+ namespaces = []
 216+ case = "ignore"
 217+
 218+ # parameters
 219+ for key,val in params.iteritems():
 220+ if key == 'dbname':
 221+ dbname = val[0]
 222+ elif key == 'query':
 223+ query = val[0]
 224+ elif key == 'limit':
 225+ limit = int(val[0])
 226+ elif key == 'offset':
 227+ offset = int(val[0])
 228+ elif key.startswith('ns'):
 229+ namespaces.append(key[2:])
 230+
 231+ rewritten = rewrite_query(query)
 232+
 233+ if search_host.has_key(dbname):
 234+ host = search_host[dbname]
 235+ else:
 236+ host = search_host['<default>']
 237+
 238+ if dbname.endswith("-exact"):
 239+ case = "exact"
 240+ dbname = dbname[0:-6]
 241+
 242+ # make search url for ls2
 243+ search_url = 'http://%s/%s/%s/%s' % (host,method,dbname,urllib.quote(rewritten.encode('utf-8')))
 244+ search_params = urllib.urlencode({'limit' : limit, 'offset' : offset, 'namespaces' : ','.join(namespaces), "case" : case}, True)
 245+
 246+ # process search results
 247+ try:
 248+ results = urllib2.urlopen(search_url+"?"+search_params)
 249+ numhits = int(results.readline())
 250+ lasthit = min(offset+limit,numhits)
 251+ # info
 252+ infoLine = results.readline()
 253+ # suggestions
 254+ suggest = results.readline()
 255+ suggestHl = ""
 256+ if suggest.startswith("#suggest "):
 257+ [suggestHl,suggest] = extract_suggest(suggest)
 258+ else:
 259+ suggest = ""
 260+ # interwiki
 261+ interwiki_count = results.readline();
 262+ interwiki_count = int(interwiki_count.split(' ')[1])
 263+ i = 0
 264+ interwiki = []
 265+ line = results.readline()
 266+ nextLine = ''
 267+ while not line.startswith("#results"):
 268+ if not line.startswith('#'):
 269+ titleHl = ''
 270+ redirectHl = ''
 271+ redirectLink = None
 272+ nextLine = results.readline()
 273+ if nextLine.startswith('#h.title'):
 274+ [titleHl, orig] = extract_snippet(nextLine,False)
 275+ nextLine = results.readline()
 276+ if nextLine.startswith('#h.redirect'):
 277+ [redirectHl, redirectLink] = extract_snippet(nextLine,False);
 278+ if redirectLink != None:
 279+ redirectLink = 'http://%s.wikipedia.org/wiki/%s' % (dbname[0:2],redirectLink)
 280+ elif nextLine.startswith('#h.redirect'):
 281+ [redirectHl, redirectLink] = extract_snippet(nextLine,False);
 282+ if redirectLink != None:
 283+ redirectLink = 'http://%s.wikipedia.org/wiki/%s' % (dbname[0:2],redirectLink)
 284+
 285+ interwikiHtml = make_title_link(line,dbname,titleHl)[0]
 286+ if redirectLink != None:
 287+ interwikiHtml += '<small> (redirect <a href="%s">%s</a>)</small>' % (redirectLink.strip(), redirectHl)
 288+ interwiki.append(interwikiHtml)
 289+
 290+ if nextLine == '':
 291+ line = results.readline()
 292+ else:
 293+ line = nextLine
 294+ nextLine = ''
 295+ if line.startswith('#h.date'):
 296+ line = results.readline() # just skip
 297+ if line.startswith('#h.wordcount'):
 298+ line = results.readline() # just skip
 299+
 300+ # html headers
 301+ self.send_response(200)
 302+ self.send_header('Cache-Control','no-cache')
 303+ self.send_header('Content-type','text/html')
 304+ self.end_headers()
 305+ self.wfile.write('<html><head><meta http-equiv="Content-Type" content="text/html; charset=utf-8" /><title>LS2 search: %s</title></head>' % query)
 306+ if method == 'related':
 307+ self.wfile.write('<body>Articles related to article: %s <br>' % query)
 308+ else:
 309+ self.wfile.write('<body>Query: %s <br>' % query)
 310+ if suggest != "":
 311+ sparams = params.copy()
 312+ sparams['query'] = suggest;
 313+ slink = make_link(sparams,0,method)
 314+ self.wfile.write('Did you mean: <a href="%s">%s</a><br>' % (slink,suggestHl))
 315+
 316+ # generate next/prev searchbar
 317+ if offset != 0:
 318+ link = make_link(params,max(offset-limit,0),method)
 319+ prev = '<a href="%s">&lt; Previous %s</a>' % (link,limit)
 320+ else:
 321+ prev = "&lt; Previous"
 322+ if numhits > lasthit:
 323+ link = make_link(params,offset+limit,method)
 324+ next = '<a href="%s">Next %s &gt;</a>' % (link,limit)
 325+ else:
 326+ next = "Next &gt;"
 327+ searchbar = '<a href="https://www.mediawiki.org/">New search</a> | %s -- %s | Total results: %d' % (prev, next, numhits)
 328+
 329+
 330+ # show upper search bar
 331+ self.wfile.write(searchbar)
 332+ self.wfile.write('<hr>Showing results %d - %d<br>' % (offset,lasthit))
 333+
 334+ # begin the main results table
 335+ self.wfile.write('<table><tr><td>')
 336+
 337+ # show results
 338+ self.wfile.write('Score / Article<br>')
 339+ lines = []
 340+ for line in results:
 341+ lines.append(line)
 342+ i = 0
 343+ while i < len(lines):
 344+ scoreLine = lines[i];
 345+ # decode highlight info
 346+ textHl = ''
 347+ redirectHl = ''
 348+ redirectLink = None
 349+ sectionHl = ''
 350+ sectionLink = None
 351+ titleHl = ''
 352+ date = None;
 353+ wordcount = None;
 354+ [link,title] = make_wiki_link(scoreLine,dbname)
 355+ while i+1 < len(lines):
 356+ extra = lines[i+1]
 357+ if extra.startswith('#h.text'):
 358+ [newtext, orig] = extract_snippet(extra)
 359+ textHl += newtext
 360+ elif extra.startswith('#h.title'):
 361+ [titleHl, orig] = extract_snippet(extra,False)
 362+ [link,title] = make_wiki_link(scoreLine,dbname,titleHl)
 363+ elif extra.startswith('#h.redirect'):
 364+ [redirectHl, redirectLink] = extract_snippet(extra,False,True);
 365+ if redirectLink != None:
 366+ redirectLink = 'http://%s.wikipedia.org/wiki/%s' % (dbname[0:2],redirectLink)
 367+ elif extra.startswith('#h.section'):
 368+ [sectionHl, sectionLink] = extract_snippet(extra,False);
 369+ if sectionLink != None:
 370+ sectionLink = 'http://%s.wikipedia.org/wiki/%s#%s' % (dbname[0:2],title,sectionLink)
 371+ elif extra.startswith('#h.date'):
 372+ date = extra.split(' ')[1]
 373+ elif extra.startswith('#h.wordcount'):
 374+ wordcount = extra.split(' ')[1]
 375+ elif not extra.startswith('#h'):
 376+ break
 377+ i+=1
 378+
 379+ self.wfile.write(link) # title link
 380+ if redirectLink != None:
 381+ self.wfile.write('<small> (redirect <a href="%s">%s</a>)</small>' % (redirectLink.strip(), redirectHl))
 382+ if sectionLink != None:
 383+ self.wfile.write('<small> (section <a href="%s">%s</a></small>)' % (sectionLink.strip(), sectionHl))
 384+ self.wfile.write('<br>');
 385+ if textHl != '':
 386+ textHl = textHl
 387+ self.wfile.write('<div style="width:500px"><font size="-1">%s</font></div>' % textHl)
 388+ if date != None:
 389+ self.wfile.write('<font size="-1"><i>Date: %s</i></font>' % date)
 390+ if wordcount != None:
 391+ dateprefix = ''
 392+ if date != None:
 393+ dateprefix = ' -- '
 394+ self.wfile.write('<font size="-1">%s<i>%s words</i></font>' % (dateprefix,wordcount))
 395+ if date != None or wordcount != None:
 396+ self.wfile.write(' -- ')
 397+ self.wfile.write('<font size="-1"><a href="%s">Related</a></font><br/>' % make_link(params,0,'related',urllib.unquote(title.replace('_',' '))))
 398+ i += 1
 399+
 400+ # write the grouped titles stuff
 401+ self.wfile.write('</td><td width=35% valign=top>')
 402+ if interwiki != []:
 403+ self.wfile.write('From sister projects:<br/>')
 404+ self.wfile.write('<font size="-1">')
 405+ for iw in interwiki:
 406+ self.wfile.write(iw+'<br/>')
 407+ self.wfile.write('</font>')
 408+ self.wfile.write('</td></tr></table>')
 409+ self.wfile.write('<hr>')
 410+ # show lower search bar
 411+ self.wfile.write(searchbar)
 412+ self.wfile.write('</body></html>')
 413+ except HTTPError:
 414+ self.send_error(400,'Bad request')
 415+ self.wfile.write("<div>Error in query</div>")
 416+ except URLError:
 417+ self.send_error(500,'Internal Server Error')
 418+ self.wfile.write("<div>Cannot connect to lucene search 2 daemon</div>")
 419+ delta_time = time.time() - start_time
 420+ print '[%s] Processed query %s in %d ms' %(time.strftime("%Y-%m-%d %H:%M:%S"),self.path,int(delta_time*1000))
 421+ elif s[2] == '/':
 422+ # show the search form
 423+ f = open(curdir + sep + "searchForm.html")
 424+ search_form = f.read()
 425+ f.close()
 426+ self.send_response(200)
 427+ self.send_header('Cache-Control','no-cache')
 428+ self.send_header('Content-type','text/html')
 429+ self.end_headers()
 430+ self.wfile.write(search_form)
 431+ elif s[2] == '/prefixQuery':
 432+ # prefix search wrapper
 433+ params = {}
 434+ # parse key1=val1&key2=val2 syntax
 435+ params = cgi.parse_qs(s[4])
 436+ query = ''
 437+ dbname = ''
 438+ namespaces = ''
 439+ for key,val in params.iteritems():
 440+ if key == 'dbname':
 441+ dbname = val[0]
 442+ elif key == 'query':
 443+ query = val[0]
 444+ elif key == 'namespaces':
 445+ namespaces = val[0]
 446+
 447+ if search_host.has_key(dbname):
 448+ host = search_host[dbname]
 449+ else:
 450+ host = search_host['<default>']
 451+
 452+ search_url = 'http://%s/prefix/%s/%s?format=json' % (host,dbname,urllib.quote(query))
 453+ if namespaces != '':
 454+ search_url += '&namespaces=%s' % namespaces
 455+
 456+ print(search_url)
 457+
 458+ # forward json text
 459+ try:
 460+ results = urllib2.urlopen(search_url)
 461+ self.send_response(200)
 462+ self.send_header('Cache-Control','no-cache')
 463+ self.send_header('Content-type','text/html')
 464+ self.end_headers()
 465+ for line in results:
 466+ self.wfile.write(line)
 467+ except HTTPError:
 468+ self.send_error(400,'Bad request')
 469+ self.wfile.write("Error in query")
 470+ except URLError:
 471+ self.send_error(500,'Internal Server Error')
 472+ self.wfile.write("Cannot connect to lucene search 2 daemon")
 473+ else:
 474+ # showfile
 475+ f = open(curdir + s[2])
 476+ file = f.read()
 477+ f.close()
 478+ self.send_response(200)
 479+ self.send_header('Cache-Control','no-cache')
 480+ self.send_header('Content-type','text/html')
 481+ self.end_headers()
 482+ self.wfile.write(file)
 483+ return
 484+ except IOError:
 485+ self.send_error(500,'Internal Server Error')
 486+ self.wfile.write('<a href="https://www.mediawiki.org/">Back</a>')
 487+
 488+
 489+
 490+try:
 491+ server = HTTPServer(('', 8080), MyHandler)
 492+ print 'Started webinterface at 8080...'
 493+ server.serve_forever()
 494+except KeyboardInterrupt:
 495+ print '^C received, shutting down server'
 496+ server.socket.close()
 497+
Index: trunk/lucene-search-3/src/main/webapp/webinterface/searchForm.html
@@ -0,0 +1,104 @@
 2+<html>
 3+<head><meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
 4+<link rel="stylesheet" type="text/css" media="screen,projection" href="https://www.mediawiki.org/lucenesearch.css" />
 5+<script type="text/javascript">
 6+var skin = "monobook";
 7+var stylepath = "/wiki-lucene/phase3/skins";
 8+var wgArticlePath = "/wiki-lucene/phase3/index.php/$1";
 9+var wgScriptPath = "/wiki-lucene/phase3";
 10+var wgScript = "/wiki-lucene/phase3/index.php";
 11+var wgServer = "http://localhost";
 12+var wgCanonicalNamespace = "";
 13+var wgCanonicalSpecialPageName = false;
 14+var wgNamespaceNumber = 0;
 15+var wgPageName = "Main_Page";
 16+var wgTitle = "Main Page";
 17+var wgAction = "view";
 18+var wgRestrictionEdit = [];
 19+var wgRestrictionMove = [];
 20+var wgArticleId = "1";
 21+var wgIsArticle = true;
 22+var wgUserName = null;
 23+var wgUserGroups = null;
 24+var wgUserLanguage = "en";
 25+var wgContentLanguage = "en";
 26+var wgBreakFrames = false;
 27+var wgCurRevisionId = "26709";
 28+</script>
 29+<script type="text/javascript" src="/wikibits.js"></script>
 30+<script type="text/javascript" src="/ajax.js"></script>
 31+<script type="text/javascript">
 32+ var wgLuceneAjaxSuggestWrapper="/prefixQuery";
 33+ var wgDBname="enwiki";
 34+</script>
 35+<script type="text/javascript" src="/luceneajaxsuggest.js"></script>
 36+</head>
 37+<body>
 38+
 39+<h2> Test web interface for lucene-search 2.1 </h2>
 40+
 41+Send comments to: Robert Stojnić (rainmansr@gmail.com)
 42+</p>
 43+
 44+<p>
 45+<strong>Status:</strong> Super-slow (doing some testing)
 46+</p>
 47+<strong>Search:</strong>
 48+<hr>
 49+<div id="globalWrapper">
 50+<form name="search" id="search" method="get" action="/search">
 51+
 52+Wiki:
 53+ <select name="dbname" onchange="wgDBname=window.document.search.dbname.options[selectedIndex].text;">
 54+ <option value="wikilucene">wikilucene</option>
 55+ <option value="enwiki">enwiki</option>
 56+ <option value="dewiki">dewiki</option>
 57+ <option value="frwiki">frwiki</option>
 58+ <option value="jawiki">jawiki</option>
 59+ <option value="itwiki">itwiki</option>
 60+ <option value="srwiki">srwiki</option>
 61+ <option value="ruwiki">ruwiki</option>
 62+ <option value="enwiktionary">enwiktionary</option>
 63+ <option value="enwikinews">enwikinews</option>
 64+ <option value="enwikisource">enwikisource</option>
 65+ <option value="enwikiquote">enwikiquote</option>
 66+ <option value="enwikibooks">enwikibooks</option>
 67+ <option value="enwikiversity">enwikiversity</option>
 68+ <option value="enwiktionary-exact">enwiktionary-exact</option>
 69+ <!-- <option value="jawikiquote">jawikiquote</option>
 70+ <option value="wikilucene">wikilucene</option>
 71+ <option value="wikidev">wikidev</option> -->
 72+ </select>
 73+
 74+Search for <input type='text' name="query" value="" size="30" id="lsearchbox" />
 75+ <input type="submit" value="Search" />
 76+ <hr>
 77+
 78+Default namespaces:
 79+
 80+ <label><input type='checkbox' value="1" name="ns0" checked="checked" />(Main)</label>
 81+ <label><input type='checkbox' value="1" name="ns1" />Talk</label>
 82+ <label><input type='checkbox' value="1" name="ns2" />User</label>
 83+ <label><input type='checkbox' value="1" name="ns3" />User talk</label>
 84+ <label><input type='checkbox' value="1" name="ns4" />Wikipedia</label>
 85+
 86+ <label><input type='checkbox' value="1" name="ns5" />Wikipedia talk</label>
 87+ <label><input type='checkbox' value="1" name="ns6" />Image</label>
 88+ <label><input type='checkbox' value="1" name="ns7" />Image talk</label>
 89+ <label><input type='checkbox' value="1" name="ns8" />MediaWiki</label>
 90+ <label><input type='checkbox' value="1" name="ns9" />MediaWiki talk</label>
 91+ <label><input type='checkbox' value="1" name="ns10" />Template</label>
 92+
 93+ <label><input type='checkbox' value="1" name="ns11" />Template talk</label>
 94+ <label><input type='checkbox' value="1" name="ns12" />Help</label>
 95+ <label><input type='checkbox' value="1" name="ns13" />Help talk</label>
 96+ <label><input type='checkbox' value="1" name="ns14" />Category</label>
 97+ <label><input type='checkbox' value="1" name="ns15" />Category talk</label>
 98+ <label><input type='checkbox' value="1" name="ns100" />Portal</label>
 99+ <label><input type='checkbox' value="1" name="ns101" />Portal talk</label>
 100+
 101+</form>
 102+<br><br><br><br><br><br><br><br><br><br><br>
 103+</div>
 104+</body>
 105+</html>
Property changes on: trunk/lucene-search-3/src/main/webapp/webinterface
___________________________________________________________________
Added: svn:ignore
1106 + *.js
*.css

Status & tagging log