r9089 MediaWiki - Code Review archive

Repository:MediaWiki
Revision:r9088‎ | r9089 | r9090 >
Date:00:36, 18 May 2005
Author:vibber
Status:old
Tags:
Comment:
Patches to Lucene.Net
Modified paths:
  • /trunk/mwsearch/Lucene.Net-patches.diff (added) (history)

Diff [purge]

Index: trunk/mwsearch/Lucene.Net-patches.diff
@@ -0,0 +1,562 @@
 2+Fixes for Lucene.Net:
 3+* make GermanAnalyzer work
 4+* Makefile for Mono's MCS compiler (but it's buggy... build with CSC for now)
 5+* Work around some (but not all) MCS compiler bugs
 6+* backport http://svn.apache.org/viewcvs.cgi?rev=150581&view=rev
 7+
 8+
 9+diff -urwN Lucene.Net-1.4.3.final-002/Lucene.Net/Analysis/DE/GermanAnalyzer.cs Lucene.Net-work/Lucene.Net/Analysis/DE/GermanAnalyzer.cs
 10+--- Lucene.Net-1.4.3.final-002/Lucene.Net/Analysis/DE/GermanAnalyzer.cs 2004-11-02 12:50:46.000000000 -0800
 11+@@ -43,8 +43,8 @@
 12+ "das", "dass", "daß", "du", "er", "sie", "es", "was", "wer",
 13+ "wie", "wir", "und", "oder", "ohne", "mit", "am", "im", "in",
 14+ "aus", "auf", "ist", "sein", "war", "wird", "ihr", "ihre",
 15+- "ihres", "als", "für", "von", "mit", "dich", "dir", "mich",
 16+- "mir", "mein", "sein", "kein", "durch", "wegen", "wird"
 17++ "ihres", "als", "für", "von", "dich", "dir", "mich",
 18++ "mir", "mein", "kein", "durch", "wegen"
 19+ };
 20+
 21+ /// <summary> Contains the stopwords used with the StopFilter.</summary>
 22+
 23+diff -urwN Lucene.Net-1.4.3.final-002/Lucene.Net/Analysis/DE/GermanStemmer.cs Lucene.Net-work/Lucene.Net/Analysis/DE/GermanStemmer.cs
 24+--- Lucene.Net-1.4.3.final-002/Lucene.Net/Analysis/DE/GermanStemmer.cs 2004-11-24 20:18:50.000000000 -0800
 25+@@ -85,17 +85,17 @@
 26+ bool doMore = true;
 27+ while (doMore && buffer.Length > 3)
 28+ {
 29+- if ((buffer.Length + substCount > 5) && buffer.ToString(buffer.Length - 2, buffer.Length).Equals("nd"))
 30++ if ((buffer.Length + substCount > 5) && buffer.ToString(buffer.Length - 2, 2).Equals("nd"))
 31+ {
 32+- buffer.Remove(buffer.Length - 2, buffer.Length - (buffer.Length - 2));
 33++ buffer.Remove(buffer.Length - 2, 2);
 34+ }
 35+- else if ((buffer.Length + substCount > 4) && buffer.ToString(buffer.Length - 2, buffer.Length).Equals("em"))
 36++ else if ((buffer.Length + substCount > 4) && buffer.ToString(buffer.Length - 2, 2).Equals("em"))
 37+ {
 38+- buffer.Remove(buffer.Length - 2, buffer.Length - (buffer.Length - 2));
 39++ buffer.Remove(buffer.Length - 2, 2);
 40+ }
 41+- else if ((buffer.Length + substCount > 4) && buffer.ToString(buffer.Length - 2, buffer.Length).Equals("er"))
 42++ else if ((buffer.Length + substCount > 4) && buffer.ToString(buffer.Length - 2, 2).Equals("er"))
 43+ {
 44+- buffer.Remove(buffer.Length - 2, buffer.Length - (buffer.Length - 2));
 45++ buffer.Remove(buffer.Length - 2, 2);
 46+ }
 47+ else if (buffer[buffer.Length - 1] == 'e')
 48+ {
 49+@@ -127,7 +127,7 @@
 50+ private void Optimize(System.Text.StringBuilder buffer)
 51+ {
 52+ // Additional step for female plurals of professions and inhabitants.
 53+- if (buffer.Length > 5 && buffer.ToString(buffer.Length - 5, buffer.Length).Equals("erin*"))
 54++ if (buffer.Length > 5 && buffer.ToString(buffer.Length - 5, 5).Equals("erin*"))
 55+ {
 56+ buffer.Remove(buffer.Length - 1, 1);
 57+ Strip(buffer);
 58+@@ -146,9 +146,9 @@
 59+ {
 60+ for (int c = 0; c < buffer.Length - 3; c++)
 61+ {
 62+- if (buffer.ToString(c, c + 4).Equals("gege"))
 63++ if (buffer.ToString(c, 4).Equals("gege"))
 64+ {
 65+- buffer.Remove(c, c + 2 - c);
 66++ buffer.Remove(c, 2);
 67+ return ;
 68+ }
 69+ }
 70+@@ -204,7 +204,7 @@
 71+ else if ((c < buffer.Length - 2) && buffer[c] == 's' && buffer[c + 1] == 'c' && buffer[c + 2] == 'h')
 72+ {
 73+ buffer[c] = '$';
 74+- buffer.Remove(c + 1, c + 3 - (c + 1));
 75++ buffer.Remove(c + 1, 2);
 76+ substCount = + 2;
 77+ }
 78+ else if (buffer[c] == 'c' && buffer[c + 1] == 'h')
 79+diff -urwN Lucene.Net-1.4.3.final-002/Lucene.Net/Analysis/Standard/StandardTokenizer.cs Lucene.Net-work/Lucene.Net/Analysis/Standard/StandardTokenizer.cs
 80+--- Lucene.Net-1.4.3.final-002/Lucene.Net/Analysis/Standard/StandardTokenizer.cs 2004-12-27 17:15:00.000000000 -0800
 81+@@ -157,8 +157,8 @@
 82+
 83+ private Token Jj_consume_token(int kind)
 84+ {
 85+- Token oldToken;
 86+- if ((oldToken = token).next != null)
 87++ Token oldToken = token;
 88++ if (oldToken.next != null)
 89+ token = token.next;
 90+ else
 91+ token = token.next = token_source.GetNextToken();
 92+diff -urwN Lucene.Net-1.4.3.final-002/Lucene.Net/Index/SegmentTermEnum.cs Lucene.Net-work/Lucene.Net/Index/SegmentTermEnum.cs
 93+--- Lucene.Net-1.4.3.final-002/Lucene.Net/Index/SegmentTermEnum.cs 2004-11-02 13:03:18.000000000 -0800
 94+@@ -25,7 +25,10 @@
 95+ internal long size;
 96+ internal long position = - 1;
 97+
 98+- private Term term = new Term("", "");
 99++ private TermBuffer termBuffer = new TermBuffer();
 100++ private TermBuffer prevBuffer = new TermBuffer();
 101++ private TermBuffer scratch; // used for scanning
 102++
 103+ private TermInfo termInfo = new TermInfo();
 104+
 105+ private int format;
 106+@@ -34,9 +37,6 @@
 107+ internal int indexInterval;
 108+ internal int skipInterval;
 109+ private int formatM1SkipInterval;
 110+- internal Term prev;
 111+-
 112+- private char[] buffer = new char[]{};
 113+
 114+ internal SegmentTermEnum(InputStream i, FieldInfos fis, bool isi)
 115+ {
 116+@@ -98,8 +98,10 @@
 117+
 118+ clone.input = (InputStream) input.Clone();
 119+ clone.termInfo = new TermInfo(termInfo);
 120+- if (term != null)
 121+- clone.GrowBuffer(term.text.Length);
 122++
 123++ clone.termBuffer = (TermBuffer)termBuffer.Clone();
 124++ clone.prevBuffer = (TermBuffer)prevBuffer.Clone();
 125++ clone.scratch = null;
 126+
 127+ return clone;
 128+ }
 129+@@ -108,10 +110,9 @@
 130+ {
 131+ input.Seek(pointer);
 132+ position = p;
 133+- term = t;
 134+- prev = null;
 135++ termBuffer.Set(t);
 136++ prevBuffer.Reset();
 137+ termInfo.Set(ti);
 138+- GrowBuffer(term.text.Length); // copy term text into buffer
 139+ }
 140+
 141+ /// <summary>Increments the enumeration to the next element. True if one exists.</summary>
 142+@@ -119,12 +120,12 @@
 143+ {
 144+ if (position++ >= size - 1)
 145+ {
 146+- term = null;
 147++ termBuffer.Reset();
 148+ return false;
 149+ }
 150+
 151+- prev = term;
 152+- term = ReadTerm();
 153++ prevBuffer.Set(termBuffer);
 154++ termBuffer.Read(input, fieldInfos);
 155+
 156+ termInfo.docFreq = input.ReadVInt(); // read doc freq
 157+ termInfo.freqPointer += input.ReadVLong(); // read freq pointer
 158+@@ -154,24 +155,13 @@
 159+ return true;
 160+ }
 161+
 162+- private Term ReadTerm()
 163+- {
 164+- int start = input.ReadVInt();
 165+- int length = input.ReadVInt();
 166+- int totalLength = start + length;
 167+- if (buffer.Length < totalLength)
 168+- GrowBuffer(totalLength);
 169+-
 170+- input.ReadChars(buffer, start, length);
 171+- return new Term(fieldInfos.FieldName(input.ReadVInt()), new System.String(buffer, 0, totalLength), false);
 172+- }
 173+-
 174+- private void GrowBuffer(int length)
 175++ /** Optimized scan, without allocating new terms. */
 176++ internal void ScanTo(Term term)
 177+ {
 178+- buffer = new char[length];
 179+- for (int i = 0; i < term.text.Length; i++)
 180+- // copy contents
 181+- buffer[i] = term.text[i];
 182++ if (scratch == null)
 183++ scratch = new TermBuffer();
 184++ scratch.Set(term);
 185++ while (scratch.CompareTo(termBuffer) > 0 && Next()) {}
 186+ }
 187+
 188+ /// <summary>Returns the current Term in the enumeration.
 189+@@ -179,7 +169,13 @@
 190+ /// </summary>
 191+ public override Term Term()
 192+ {
 193+- return term;
 194++ return termBuffer.ToTerm();
 195++ }
 196++
 197++ /** Returns the previous Term enumerated. Initially null.*/
 198++ public Term Prev()
 199++ {
 200++ return prevBuffer.ToTerm();
 201+ }
 202+
 203+ /// <summary>Returns the current TermInfo in the enumeration.
 204+diff -urwN Lucene.Net-1.4.3.final-002/Lucene.Net/Index/TermBuffer.cs Lucene.Net-work/Lucene.Net/Index/TermBuffer.cs
 205+--- Lucene.Net-1.4.3.final-002/Lucene.Net/Index/TermBuffer.cs 1969-12-31 16:00:00.000000000 -0800
 206+@@ -0,0 +1,131 @@
 207++/**
 208++ * Copyright 2004 The Apache Software Foundation
 209++ *
 210++ * Licensed under the Apache License, Version 2.0 (the "License");
 211++ * you may not use this file except in compliance with the License.
 212++ * You may obtain a copy of the License at
 213++ *
 214++ * http://www.apache.org/licenses/LICENSE-2.0
 215++ *
 216++ * Unless required by applicable law or agreed to in writing, software
 217++ * distributed under the License is distributed on an "AS IS" BASIS,
 218++ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 219++ * See the License for the specific language governing permissions and
 220++ * limitations under the License.
 221++ */
 222++
 223++using System;
 224++using Lucene.Net.Store;
 225++namespace Lucene.Net.Index
 226++{
 227++
 228++//import java.io.IOException;
 229++//import org.apache.lucene.store.IndexInput;
 230++
 231++ sealed class TermBuffer : ICloneable
 232++ {
 233++ private static readonly char[] NO_CHARS = new char[0];
 234++
 235++ private string field;
 236++ private char[] text = NO_CHARS;
 237++ private int textLength;
 238++ private Term term; // cached
 239++
 240++ public int CompareTo(TermBuffer other)
 241++ {
 242++ if (field == other.field) // fields are interned
 243++ return CompareChars(text, textLength, other.text, other.textLength);
 244++ else
 245++ return field.CompareTo(other.field);
 246++ }
 247++
 248++ private static int CompareChars(char[] v1, int len1, char[] v2, int len2)
 249++ {
 250++ int end = Math.Min(len1, len2);
 251++ for (int k = 0; k < end; k++) {
 252++ char c1 = v1[k];
 253++ char c2 = v2[k];
 254++ if (c1 != c2) {
 255++ return c1 - c2;
 256++ }
 257++ }
 258++ return len1 - len2;
 259++ }
 260++
 261++ private void SetTextLength(int newLength)
 262++ {
 263++ if (text.Length < newLength) {
 264++ char[] newText = new char[newLength];
 265++ Array.Copy(text, 0, newText, 0, textLength);
 266++ text = newText;
 267++ }
 268++ textLength = newLength;
 269++ }
 270++
 271++ public void Read(InputStream input, FieldInfos fieldInfos)
 272++ {
 273++ this.term = null; // invalidate cache
 274++ int start = input.ReadVInt();
 275++ int length = input.ReadVInt();
 276++ int totalLength = start + length;
 277++ SetTextLength(totalLength);
 278++ input.ReadChars(this.text, start, length);
 279++ this.field = fieldInfos.FieldName(input.ReadVInt());
 280++ }
 281++
 282++ public void Set(Term term)
 283++ {
 284++ if (term == null) {
 285++ Reset();
 286++ return;
 287++ }
 288++
 289++ // copy text into the buffer
 290++ SetTextLength(term.Text().Length);
 291++ term.Text().CopyTo(0, text, 0, term.Text().Length);
 292++
 293++ this.field = term.Field();
 294++ this.term = term;
 295++ }
 296++
 297++ public void Set(TermBuffer other)
 298++ {
 299++ SetTextLength(other.textLength);
 300++ Array.Copy(other.text, 0, text, 0, textLength);
 301++
 302++ this.field = other.field;
 303++ this.term = other.term;
 304++ }
 305++
 306++ public void Reset()
 307++ {
 308++ this.field = null;
 309++ this.textLength = 0;
 310++ this.term = null;
 311++ }
 312++
 313++ public Term ToTerm()
 314++ {
 315++ if (field == null) // unset
 316++ return null;
 317++
 318++ if (term == null)
 319++ term = new Term(field, new string(text, 0, textLength), false);
 320++
 321++ return term;
 322++ }
 323++
 324++ public object Clone()
 325++ {
 326++ TermBuffer clone = null;
 327++ try {
 328++ clone = (TermBuffer)base.MemberwiseClone();
 329++ } catch (System.Exception e) {}
 330++
 331++ clone.text = new char[text.Length];
 332++ Array.Copy(text, 0, clone.text, 0, textLength);
 333++
 334++ return clone;
 335++ }
 336++ }
 337++}
 338+
 339+diff -urwN Lucene.Net-1.4.3.final-002/Lucene.Net/Index/TermInfosReader.cs Lucene.Net-work/Lucene.Net/Index/TermInfosReader.cs
 340+--- Lucene.Net-1.4.3.final-002/Lucene.Net/Index/TermInfosReader.cs 2004-11-02 13:03:18.000000000 -0800
 341+@@ -133,7 +133,7 @@
 342+
 343+ // optimize sequential access: first try scanning cached enum w/o seeking
 344+ SegmentTermEnum enumerator = GetEnum();
 345+- if (enumerator.Term() != null && ((enumerator.prev != null && term.CompareTo(enumerator.prev) > 0) || term.CompareTo(enumerator.Term()) >= 0))
 346++ if (enumerator.Term() != null && ((enumerator.Prev() != null && term.CompareTo(enumerator.Prev()) > 0) || term.CompareTo(enumerator.Term()) >= 0))
 347+ {
 348+ int enumOffset = (int) (enumerator.position / enumerator.indexInterval) + 1;
 349+ if (indexTerms.Length == enumOffset || term.CompareTo(indexTerms[enumOffset]) < 0)
 350+@@ -149,9 +149,7 @@
 351+ private TermInfo ScanEnum(Term term)
 352+ {
 353+ SegmentTermEnum enumerator = GetEnum();
 354+- while (term.CompareTo(enumerator.Term()) > 0 && enumerator.Next())
 355+- {
 356+- }
 357++ enumerator.ScanTo(term);
 358+ if (enumerator.Term() != null && term.CompareTo(enumerator.Term()) == 0)
 359+ return enumerator.TermInfo();
 360+ else
 361+diff -urwN Lucene.Net-1.4.3.final-002/Lucene.Net/Makefile Lucene.Net-work/Lucene.Net/Makefile
 362+--- Lucene.Net-1.4.3.final-002/Lucene.Net/Makefile 1969-12-31 16:00:00.000000000 -0800
 363+@@ -0,0 +1,178 @@
 364++# Quickie makefile for building Lucene.Net.dll with Mono
 365++
 366++.PHONY : all clean
 367++
 368++MCS ?= mcs
 369++
 370++LUCENE_SOURCES=\
 371++Analysis/Analyzer.cs \
 372++Analysis/CharTokenizer.cs \
 373++Analysis/DE/GermanAnalyzer.cs \
 374++Analysis/DE/GermanStemFilter.cs \
 375++Analysis/DE/GermanStemmer.cs \
 376++Analysis/DE/WordlistLoader.cs \
 377++Analysis/LetterTokenizer.cs \
 378++Analysis/LowerCaseFilter.cs \
 379++Analysis/LowerCaseTokenizer.cs \
 380++Analysis/PerFieldAnalyzerWrapper.cs \
 381++Analysis/PorterStemFilter.cs \
 382++Analysis/PorterStemmer.cs \
 383++Analysis/RU/RussianAnalyzer.cs \
 384++Analysis/RU/RussianCharsets.cs \
 385++Analysis/RU/RussianLetterTokenizer.cs \
 386++Analysis/RU/RussianLowerCaseFilter.cs \
 387++Analysis/RU/RussianStemFilter.cs \
 388++Analysis/RU/RussianStemmer.cs \
 389++Analysis/SimpleAnalyzer.cs \
 390++Analysis/Standard/CharStream.cs \
 391++Analysis/Standard/FastCharStream.cs \
 392++Analysis/Standard/ParseException.cs \
 393++Analysis/Standard/StandardAnalyzer.cs \
 394++Analysis/Standard/StandardFilter.cs \
 395++Analysis/Standard/StandardTokenizer.cs \
 396++Analysis/Standard/StandardTokenizerConstants.cs \
 397++Analysis/Standard/StandardTokenizerTokenManager.cs \
 398++Analysis/Standard/Token.cs \
 399++Analysis/Standard/TokenMgrError.cs \
 400++Analysis/StopAnalyzer.cs \
 401++Analysis/StopFilter.cs \
 402++Analysis/Token.cs \
 403++Analysis/TokenFilter.cs \
 404++Analysis/Tokenizer.cs \
 405++Analysis/TokenStream.cs \
 406++Analysis/WhitespaceAnalyzer.cs \
 407++Analysis/WhitespaceTokenizer.cs \
 408++AssemblyInfo.cs \
 409++Document/DateField.cs \
 410++Document/Document.cs \
 411++Document/Field.cs \
 412++Index/CompoundFileReader.cs \
 413++Index/CompoundFileWriter.cs \
 414++Index/DocumentWriter.cs \
 415++Index/FieldInfo.cs \
 416++Index/FieldInfos.cs \
 417++Index/FieldsReader.cs \
 418++Index/FieldsWriter.cs \
 419++Index/FilterIndexReader.cs \
 420++Index/IndexReader.cs \
 421++Index/IndexWriter.cs \
 422++Index/MultipleTermPositions.cs \
 423++Index/MultiReader.cs \
 424++Index/SegmentInfo.cs \
 425++Index/SegmentInfos.cs \
 426++Index/SegmentMergeInfo.cs \
 427++Index/SegmentMergeQueue.cs \
 428++Index/SegmentMerger.cs \
 429++Index/SegmentReader.cs \
 430++Index/SegmentTermDocs.cs \
 431++Index/SegmentTermEnum.cs \
 432++Index/SegmentTermPositions.cs \
 433++Index/SegmentTermVector.cs \
 434++Index/Term.cs \
 435++Index/TermBuffer.cs \
 436++Index/TermDocs.cs \
 437++Index/TermEnum.cs \
 438++Index/TermFreqVector.cs \
 439++Index/TermInfo.cs \
 440++Index/TermInfosReader.cs \
 441++Index/TermInfosWriter.cs \
 442++Index/TermPositions.cs \
 443++Index/TermPositionVector.cs \
 444++Index/TermVectorsReader.cs \
 445++Index/TermVectorsWriter.cs \
 446++QueryParser/CharStream.cs \
 447++QueryParser/FastCharStream.cs \
 448++QueryParser/MultiFieldQueryParser.cs \
 449++QueryParser/ParseException.cs \
 450++QueryParser/QueryParser.cs \
 451++QueryParser/QueryParserConstants.cs \
 452++QueryParser/QueryParserTokenManager.cs \
 453++QueryParser/Token.cs \
 454++QueryParser/TokenMgrError.cs \
 455++Search/BooleanClause.cs \
 456++Search/BooleanQuery.cs \
 457++Search/BooleanScorer.cs \
 458++Search/CachingWrapperFilter.cs \
 459++Search/ConjunctionScorer.cs \
 460++Search/DateFilter.cs \
 461++Search/DefaultSimilarity.cs \
 462++Search/ExactPhraseScorer.cs \
 463++Search/Explanation.cs \
 464++Search/FieldCache.cs \
 465++Search/FieldCacheImpl.cs \
 466++Search/FieldDoc.cs \
 467++Search/FieldDocSortedHitQueue.cs \
 468++Search/FieldSortedHitQueue.cs \
 469++Search/Filter.cs \
 470++Search/FilteredQuery.cs \
 471++Search/FilteredTermEnum.cs \
 472++Search/FuzzyQuery.cs \
 473++Search/FuzzyTermEnum.cs \
 474++Search/HitCollector.cs \
 475++Search/HitQueue.cs \
 476++Search/Hits.cs \
 477++Search/IndexSearcher.cs \
 478++Search/MultiSearcher.cs \
 479++Search/MultiTermQuery.cs \
 480++Search/ParallelMultiSearcher.cs \
 481++Search/PhrasePositions.cs \
 482++Search/PhrasePrefixQuery.cs \
 483++Search/PhraseQuery.cs \
 484++Search/PhraseQueue.cs \
 485++Search/PhraseScorer.cs \
 486++Search/PrefixQuery.cs \
 487++Search/Query.cs \
 488++Search/QueryFilter.cs \
 489++Search/QueryTermVector.cs \
 490++Search/RangeQuery.cs \
 491++Search/RemoteSearchable.cs \
 492++Search/ScoreDoc.cs \
 493++Search/ScoreDocComparator.cs \
 494++Search/Scorer.cs \
 495++Search/Searchable.cs \
 496++Search/Searcher.cs \
 497++Search/Similarity.cs \
 498++Search/SloppyPhraseScorer.cs \
 499++Search/Sort.cs \
 500++Search/SortComparator.cs \
 501++Search/SortComparatorSource.cs \
 502++Search/SortField.cs \
 503++Search/Spans/NearSpans.cs \
 504++Search/Spans/SpanFirstQuery.cs \
 505++Search/Spans/SpanNearQuery.cs \
 506++Search/Spans/SpanNotQuery.cs \
 507++Search/Spans/SpanOrQuery.cs \
 508++Search/Spans/SpanQuery.cs \
 509++Search/Spans/Spans.cs \
 510++Search/Spans/SpanScorer.cs \
 511++Search/Spans/SpanTermQuery.cs \
 512++Search/Spans/SpanWeight.cs \
 513++Search/TermQuery.cs \
 514++Search/TermScorer.cs \
 515++Search/TopDocs.cs \
 516++Search/TopFieldDocs.cs \
 517++Search/Weight.cs \
 518++Search/WildcardQuery.cs \
 519++Search/WildcardTermEnum.cs \
 520++Store/Directory.cs \
 521++Store/FSDirectory.cs \
 522++Store/InputStream.cs \
 523++Store/Lock.cs \
 524++Store/OutputStream.cs \
 525++Store/RAMDirectory.cs \
 526++Store/RAMFile.cs \
 527++Store/RAMInputStream.cs \
 528++Store/RAMOutputStream.cs \
 529++SupportClass.cs \
 530++Util/BitVector.cs \
 531++Util/Constants.cs \
 532++Util/PriorityQueue.cs \
 533++Util/StringHelper.cs
 534++
 535++all : Lucene.Net.dll
 536++
 537++clean :
 538++ rm -f Lucene.Net.dll
 539++
 540++Lucene.Net.dll : $(LUCENE_SOURCES)
 541++ $(MCS) -target:library -out:Lucene.Net.dll -r:System.Runtime.Remoting $(LUCENE_SOURCES)
 542+
 543+diff -urwN Lucene.Net-1.4.3.final-002/Lucene.Net/QueryParser/QueryParser.cs Lucene.Net-work/Lucene.Net/QueryParser/QueryParser.cs
 544+--- Lucene.Net-1.4.3.final-002/Lucene.Net/QueryParser/QueryParser.cs 2004-12-12 19:15:52.000000000 -0800
 545+@@ -1272,8 +1272,8 @@
 546+
 547+ private Token Jj_consume_token(int kind)
 548+ {
 549+- Token oldToken;
 550+- if ((oldToken = token).next != null)
 551++ Token oldToken = token;
 552++ if (oldToken.next != null)
 553+ token = token.next;
 554+ else
 555+ token = token.next = token_source.GetNextToken();
Property changes on: trunk/mwsearch/Lucene.Net-patches.diff
___________________________________________________________________
Added: svn:eol-style
1556 + native
Added: svn:keywords
2557 + Author Date Id Revision

Status & tagging log