/*
* Copyright 2004 The Apache Software Foundation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/* Generated By:JavaCC: Do not edit this line. PrecedenceQueryParser.java */
using System;
using Lucene.Net.Analysis;
using Lucene.Net.Documents;
using Term = Lucene.Net.Index.Term;
using Lucene.Net.Search;
using Searchable = Lucene.Net.Search.Searchable;
using Parameter = Lucene.Net.Util.Parameter;
namespace Lucene.Net.QueryParsers.Precedence
{
/// Experimental query parser variant designed to handle operator precedence
/// in a more sensible fashion than QueryParser. There are still some
/// open issues with this parser.
///
/// This class is generated by JavaCC. The only method that clients should need
/// to call is {@link #parse(String)}.
///
/// The syntax for query strings is as follows:
/// A Query is a series of clauses.
/// A clause may be prefixed by:
///
///
a plus (+) or a minus (-) sign, indicating
/// that the clause is required or prohibited respectively; or
///
a term followed by a colon, indicating the field to be searched.
/// This enables one to construct queries which search multiple fields.
///
///
/// A clause may be either:
///
///
a term, indicating all the documents that contain this term; or
///
a nested query, enclosed in parentheses. Note that this may be used
/// with a +/- prefix to require any of a set of
/// terms.
///
///
///
/// Brian Goetz
///
/// Peter Halacsy
///
/// Tatu Saloranta
///
public class PrecedenceQueryParser : PrecedenceQueryParserConstants
{
private void InitBlock()
{
fuzzyMinSim = FuzzyQuery.defaultMinSimilarity;
fuzzyPrefixLength = FuzzyQuery.defaultPrefixLength;
jj_2_rtns = new JJCalls[1];
jj_ls = new LookaheadSuccess();
}
private const int CONJ_NONE = 0;
private const int CONJ_AND = 1;
private const int CONJ_OR = 2;
private const int MOD_NONE = 0;
private const int MOD_NOT = 10;
private const int MOD_REQ = 11;
// make it possible to call setDefaultOperator() without accessing
// the nested class:
public static readonly Operator AND_OPERATOR = Operator.AND;
public static readonly Operator OR_OPERATOR = Operator.OR;
/// The actual operator that parser uses to combine query terms
private Operator operator_Renamed = OR_OPERATOR;
internal bool lowercaseExpandedTerms = true;
internal Analyzer analyzer;
internal System.String field;
internal int phraseSlop = 0;
internal float fuzzyMinSim;
internal int fuzzyPrefixLength;
internal System.Globalization.CultureInfo locale = System.Threading.Thread.CurrentThread.CurrentCulture;
[Serializable]
sealed public class Operator:Parameter
{
internal Operator(System.String name):base(name)
{
}
internal static readonly Operator OR = new Operator("OR");
internal static readonly Operator AND = new Operator("AND");
}
/// Constructs a query parser.
/// the default field for query terms.
///
/// used to find terms in the query text.
///
public PrecedenceQueryParser(System.String f, Analyzer a):this(new FastCharStream(new System.IO.StringReader("")))
{
analyzer = a;
field = f;
}
/// Parses a query string, returning a {@link Lucene.Net.search.Query}.
/// the query string to be parsed.
///
/// ParseException if the parsing fails
public virtual Query parse(System.String expression)
{
// optimize empty query to be empty BooleanQuery
if (expression == null || expression.Trim().Length == 0)
{
return new BooleanQuery();
}
ReInit(new FastCharStream(new System.IO.StringReader(expression)));
try
{
Query query = Query(field);
return (query != null)?query:new BooleanQuery();
}
catch (TokenMgrError tme)
{
throw new ParseException(tme.Message);
}
catch (BooleanQuery.TooManyClauses tmc)
{
throw new ParseException("Too many boolean clauses");
}
}
/// Returns the analyzer.
///
public virtual Analyzer GetAnalyzer()
{
return analyzer;
}
/// Returns the field.
///
public virtual System.String GetField()
{
return field;
}
/// Get the minimal similarity for fuzzy queries.
public virtual float GetFuzzyMinSim()
{
return fuzzyMinSim;
}
/// Set the minimum similarity for fuzzy queries.
/// Default is 0.5f.
///
public virtual void SetFuzzyMinSim(float fuzzyMinSim)
{
this.fuzzyMinSim = fuzzyMinSim;
}
/// Get the prefix length for fuzzy queries.
/// Returns the fuzzyPrefixLength.
///
public virtual int GetFuzzyPrefixLength()
{
return fuzzyPrefixLength;
}
/// Set the prefix length for fuzzy queries. Default is 0.
/// The fuzzyPrefixLength to set.
///
public virtual void SetFuzzyPrefixLength(int fuzzyPrefixLength)
{
this.fuzzyPrefixLength = fuzzyPrefixLength;
}
/// Sets the default slop for phrases. If zero, then exact phrase matches
/// are required. Default value is zero.
///
public virtual void SetPhraseSlop(int phraseSlop)
{
this.phraseSlop = phraseSlop;
}
/// Gets the default slop for phrases.
public virtual int GetPhraseSlop()
{
return phraseSlop;
}
/// Sets the boolean operator of the QueryParser.
/// In default mode (OR_OPERATOR) terms without any modifiers
/// are considered optional: for example capital of Hungary is equal to
/// capital OR of OR Hungary.
/// In AND_OPERATOR mode terms are considered to be in conjuction: the
/// above mentioned query is parsed as capital AND of AND Hungary
///
public virtual void SetDefaultOperator(Operator op)
{
this.operator_Renamed = op;
}
/// Gets implicit operator setting, which will be either AND_OPERATOR
/// or OR_OPERATOR.
///
public virtual Operator GetDefaultOperator()
{
return operator_Renamed;
}
/// Whether terms of wildcard, prefix, fuzzy and range queries are to be automatically
/// lower-cased or not. Default is true.
///
public virtual void SetLowercaseExpandedTerms(bool lowercaseExpandedTerms)
{
this.lowercaseExpandedTerms = lowercaseExpandedTerms;
}
///
///
public virtual bool GetLowercaseExpandedTerms()
{
return lowercaseExpandedTerms;
}
/// Set locale used by date range parsing.
public virtual void SetLocale(System.Globalization.CultureInfo locale)
{
this.locale = locale;
}
/// Returns current locale, allowing access by subclasses.
public virtual System.Globalization.CultureInfo GetLocale()
{
return locale;
}
protected internal virtual void AddClause(System.Collections.ArrayList clauses, int conj, int modifier, Query q)
{
bool required, prohibited;
// If this term is introduced by AND, make the preceding term required,
// unless it's already prohibited
if (clauses.Count > 0 && conj == CONJ_AND)
{
BooleanClause c = (BooleanClause) clauses[clauses.Count - 1];
if (!c.IsProhibited())
c.SetOccur(BooleanClause.Occur.MUST);
}
if (clauses.Count > 0 && operator_Renamed == AND_OPERATOR && conj == CONJ_OR)
{
// If this term is introduced by OR, make the preceding term optional,
// unless it's prohibited (that means we leave -a OR b but +a OR b-->a OR b)
// notice if the input is a OR b, first term is parsed as required; without
// this modification a OR b would parsed as +a OR b
BooleanClause c = (BooleanClause) clauses[clauses.Count - 1];
if (!c.IsProhibited())
c.SetOccur(BooleanClause.Occur.SHOULD);
}
// We might have been passed a null query; the term might have been
// filtered away by the analyzer.
if (q == null)
return ;
if (operator_Renamed == OR_OPERATOR)
{
// We set REQUIRED if we're introduced by AND or +; PROHIBITED if
// introduced by NOT or -; make sure not to set both.
prohibited = (modifier == MOD_NOT);
required = (modifier == MOD_REQ);
if (conj == CONJ_AND && !prohibited)
{
required = true;
}
}
else
{
// We set PROHIBITED if we're introduced by NOT or -; We set REQUIRED
// if not PROHIBITED and not introduced by OR
prohibited = (modifier == MOD_NOT);
required = (!prohibited && conj != CONJ_OR);
}
if (required && !prohibited)
clauses.Add(new BooleanClause(q, BooleanClause.Occur.MUST));
else if (!required && !prohibited)
clauses.Add(new BooleanClause(q, BooleanClause.Occur.SHOULD));
else if (!required && prohibited)
clauses.Add(new BooleanClause(q, BooleanClause.Occur.MUST_NOT));
else
throw new System.SystemException("Clause cannot be both required and prohibited");
}
/// ParseException throw in overridden method to disallow
///
protected internal virtual Query GetFieldQuery(System.String field, System.String queryText)
{
// Use the analyzer to get all the tokens, and then build a TermQuery,
// PhraseQuery, or nothing based on the term count
TokenStream source = analyzer.TokenStream(field, new System.IO.StringReader(queryText));
System.Collections.ArrayList v = System.Collections.ArrayList.Synchronized(new System.Collections.ArrayList(10));
Lucene.Net.Analysis.Token t;
int positionCount = 0;
bool severalTokensAtSamePosition = false;
while (true)
{
try
{
t = source.Next();
}
catch (System.IO.IOException e)
{
t = null;
}
if (t == null)
break;
v.Add(t);
if (t.GetPositionIncrement() == 1)
positionCount++;
else
severalTokensAtSamePosition = true;
}
try
{
source.Close();
}
catch (System.IO.IOException e)
{
// ignore
}
if (v.Count == 0)
return null;
else if (v.Count == 1)
{
t = (Lucene.Net.Analysis.Token) v[0];
return new TermQuery(new Term(field, t.TermText()));
}
else
{
if (severalTokensAtSamePosition)
{
if (positionCount == 1)
{
// no phrase query:
BooleanQuery q = new BooleanQuery();
for (int i = 0; i < v.Count; i++)
{
t = (Lucene.Net.Analysis.Token) v[i];
TermQuery currentQuery = new TermQuery(new Term(field, t.TermText()));
q.Add(currentQuery, BooleanClause.Occur.SHOULD);
}
return q;
}
else
{
// phrase query:
MultiPhraseQuery mpq = new MultiPhraseQuery();
System.Collections.ArrayList multiTerms = new System.Collections.ArrayList();
for (int i = 0; i < v.Count; i++)
{
t = (Lucene.Net.Analysis.Token) v[i];
if (t.GetPositionIncrement() == 1 && multiTerms.Count > 0)
{
mpq.Add((Term[]) multiTerms.ToArray(typeof(Term)));
multiTerms.Clear();
}
multiTerms.Add(new Term(field, t.TermText()));
}
mpq.Add((Term[]) multiTerms.ToArray(typeof(Term)));
return mpq;
}
}
else
{
PhraseQuery q = new PhraseQuery();
q.SetSlop(phraseSlop);
for (int i = 0; i < v.Count; i++)
{
q.Add(new Term(field, ((Lucene.Net.Analysis.Token) v[i]).TermText()));
}
return q;
}
}
}
/// Base implementation delegates to {@link #GetFieldQuery(String,String)}.
/// This method may be overridden, for example, to return
/// a SpanNearQuery instead of a PhraseQuery.
///
///
/// ParseException throw in overridden method to disallow
///
protected internal virtual Query GetFieldQuery(System.String field, System.String queryText, int slop)
{
Query query = GetFieldQuery(field, queryText);
if (query is PhraseQuery)
{
((PhraseQuery) query).SetSlop(slop);
}
if (query is MultiPhraseQuery)
{
((MultiPhraseQuery) query).SetSlop(slop);
}
return query;
}
/// ParseException throw in overridden method to disallow
///
protected internal virtual Query GetRangeQuery(System.String field, System.String part1, System.String part2, bool inclusive)
{
if (lowercaseExpandedTerms)
{
part1 = part1.ToLower();
part2 = part2.ToLower();
}
try
{
System.DateTime d1 = System.DateTime.Parse(part1, locale);
System.DateTime d2 = System.DateTime.Parse(part2, locale);
part1 = DateTools.DateToString(d1, DateTools.Resolution.DAY);
part2 = DateTools.DateToString(d2, DateTools.Resolution.DAY);
}
catch (System.Exception e)
{
}
return new RangeQuery(new Term(field, part1), new Term(field, part2), inclusive);
}
/// Factory method for generating query, given a set of clauses.
/// By default creates a boolean query composed of clauses passed in.
///
/// Can be overridden by extending classes, to modify query being
/// returned.
///
///
/// Vector that contains {@link BooleanClause} instances
/// to join.
///
///
/// Resulting {@link Query} object.
///
/// ParseException throw in overridden method to disallow
///
protected internal virtual Query GetBooleanQuery(System.Collections.ArrayList clauses)
{
return GetBooleanQuery(clauses, false);
}
/// Factory method for generating query, given a set of clauses.
/// By default creates a boolean query composed of clauses passed in.
///
/// Can be overridden by extending classes, to modify query being
/// returned.
///
///
/// Vector that contains {@link BooleanClause} instances
/// to join.
///
/// true if coord scoring should be disabled.
///
///
/// Resulting {@link Query} object.
///
/// ParseException throw in overridden method to disallow
///
protected internal virtual Query GetBooleanQuery(System.Collections.ArrayList clauses, bool disableCoord)
{
if (clauses == null || clauses.Count == 0)
return null;
BooleanQuery query = new BooleanQuery(disableCoord);
for (int i = 0; i < clauses.Count; i++)
{
query.Add((BooleanClause) clauses[i]);
}
return query;
}
/// Factory method for generating a query. Called when parser
/// parses an input term token that contains one or more wildcard
/// characters (? and *), but is not a prefix term token (one
/// that has just a single * character at the end)
///
/// Depending on settings, prefix term may be lower-cased
/// automatically. It will not go through the default Analyzer,
/// however, since normal Analyzers are unlikely to work properly
/// with wildcard templates.
///
/// Can be overridden by extending classes, to provide custom handling for
/// wildcard queries, which may be necessary due to missing analyzer calls.
///
///
/// Name of the field query will use.
///
/// Term token that contains one or more wild card
/// characters (? or *), but is not simple prefix term
///
///
/// Resulting {@link Query} built for the term
///
/// ParseException throw in overridden method to disallow
///
protected internal virtual Query GetWildcardQuery(System.String field, System.String termStr)
{
if (lowercaseExpandedTerms)
{
termStr = termStr.ToLower();
}
Term t = new Term(field, termStr);
return new WildcardQuery(t);
}
/// Factory method for generating a query (similar to
/// {@link #getWildcardQuery}). Called when parser parses an input term
/// token that uses prefix notation; that is, contains a single '*' wildcard
/// character as its last character. Since this is a special case
/// of generic wildcard term, and such a query can be optimized easily,
/// this usually results in a different query object.
///
/// Depending on settings, a prefix term may be lower-cased
/// automatically. It will not go through the default Analyzer,
/// however, since normal Analyzers are unlikely to work properly
/// with wildcard templates.
///
/// Can be overridden by extending classes, to provide custom handling for
/// wild card queries, which may be necessary due to missing analyzer calls.
///
///
/// Name of the field query will use.
///
/// Term token to use for building term for the query
/// (without trailing '*' character!)
///
///
/// Resulting {@link Query} built for the term
///
/// ParseException throw in overridden method to disallow
///
protected internal virtual Query GetPrefixQuery(System.String field, System.String termStr)
{
if (lowercaseExpandedTerms)
{
termStr = termStr.ToLower();
}
Term t = new Term(field, termStr);
return new PrefixQuery(t);
}
/// Factory method for generating a query (similar to
/// {@link #getWildcardQuery}). Called when parser parses
/// an input term token that has the fuzzy suffix (~) appended.
///
///
/// Name of the field query will use.
///
/// Term token to use for building term for the query
///
///
/// Resulting {@link Query} built for the term
///
/// ParseException throw in overridden method to disallow
///
protected internal virtual Query GetFuzzyQuery(System.String field, System.String termStr, float minSimilarity)
{
if (lowercaseExpandedTerms)
{
termStr = termStr.ToLower();
}
Term t = new Term(field, termStr);
return new FuzzyQuery(t, minSimilarity, fuzzyPrefixLength);
}
/// Returns a String where the escape char has been
/// removed, or kept only once if there was a double escape.
///
private System.String DiscardEscapeChar(System.String input)
{
char[] caSource = input.ToCharArray();
char[] caDest = new char[caSource.Length];
int j = 0;
for (int i = 0; i < caSource.Length; i++)
{
if ((caSource[i] != '\\') || (i > 0 && caSource[i - 1] == '\\'))
{
caDest[j++] = caSource[i];
}
}
return new System.String(caDest, 0, j);
}
/// Returns a String where those characters that QueryParser
/// expects to be escaped are escaped by a preceding \.
///
public static System.String escape(System.String s)
{
System.Text.StringBuilder sb = new System.Text.StringBuilder();
for (int i = 0; i < s.Length; i++)
{
char c = s[i];
// NOTE: keep this in sync with _ESCAPED_CHAR below!
if (c == '\\' || c == '+' || c == '-' || c == '!' || c == '(' || c == ')' || c == ':' || c == '^' || c == '[' || c == ']' || c == '\"' || c == '{' || c == '}' || c == '~' || c == '*' || c == '?')
{
sb.Append('\\');
}
sb.Append(c);
}
return sb.ToString();
}
/// Command line tool to test QueryParser, using {@link Lucene.Net.analysis.SimpleAnalyzer}.
/// Usage:
/// java Lucene.Net.queryParser.QueryParser <input>
///
[STAThread]
public static void Main(System.String[] args)
{
if (args.Length == 0)
{
System.Console.Out.WriteLine("Usage: java Lucene.Net.queryParser.QueryParser ");
System.Environment.Exit(0);
}
PrecedenceQueryParser qp = new PrecedenceQueryParser("field", new Lucene.Net.Analysis.SimpleAnalyzer());
Query q = qp.parse(args[0]);
System.Console.Out.WriteLine(q.ToString("field"));
}
// * Query ::= ( Clause )*
// * Clause ::= ["+", "-"] [ ":"] ( | "(" Query ")" )
public int Conjunction()
{
int ret = CONJ_NONE;
switch ((jj_ntk == - 1)?Jj_ntk():jj_ntk)
{
case Lucene.Net.QueryParsers.Precedence.PrecedenceQueryParserConstants.AND:
case Lucene.Net.QueryParsers.Precedence.PrecedenceQueryParserConstants.OR:
switch ((jj_ntk == - 1)?Jj_ntk():jj_ntk)
{
case Lucene.Net.QueryParsers.Precedence.PrecedenceQueryParserConstants.AND:
Jj_consume_token(Lucene.Net.QueryParsers.Precedence.PrecedenceQueryParserConstants.AND);
ret = CONJ_AND;
break;
case Lucene.Net.QueryParsers.Precedence.PrecedenceQueryParserConstants.OR:
Jj_consume_token(Lucene.Net.QueryParsers.Precedence.PrecedenceQueryParserConstants.OR);
ret = CONJ_OR;
break;
default:
jj_la1[0] = jj_gen;
Jj_consume_token(- 1);
throw new ParseException();
}
break;
default:
jj_la1[1] = jj_gen;
;
break;
}
{
if (true)
return ret;
}
throw new System.ApplicationException("Missing return statement in function");
}
public int Modifier()
{
int ret = MOD_NONE;
switch ((jj_ntk == - 1)?Jj_ntk():jj_ntk)
{
case Lucene.Net.QueryParsers.Precedence.PrecedenceQueryParserConstants.NOT:
case Lucene.Net.QueryParsers.Precedence.PrecedenceQueryParserConstants.PLUS:
case Lucene.Net.QueryParsers.Precedence.PrecedenceQueryParserConstants.MINUS:
switch ((jj_ntk == - 1)?Jj_ntk():jj_ntk)
{
case Lucene.Net.QueryParsers.Precedence.PrecedenceQueryParserConstants.PLUS:
Jj_consume_token(Lucene.Net.QueryParsers.Precedence.PrecedenceQueryParserConstants.PLUS);
ret = MOD_REQ;
break;
case Lucene.Net.QueryParsers.Precedence.PrecedenceQueryParserConstants.MINUS:
Jj_consume_token(Lucene.Net.QueryParsers.Precedence.PrecedenceQueryParserConstants.MINUS);
ret = MOD_NOT;
break;
case Lucene.Net.QueryParsers.Precedence.PrecedenceQueryParserConstants.NOT:
Jj_consume_token(Lucene.Net.QueryParsers.Precedence.PrecedenceQueryParserConstants.NOT);
ret = MOD_NOT;
break;
default:
jj_la1[2] = jj_gen;
Jj_consume_token(- 1);
throw new ParseException();
}
break;
default:
jj_la1[3] = jj_gen;
;
break;
}
{
if (true)
return ret;
}
throw new System.ApplicationException("Missing return statement in function");
}
public Query Query(System.String field)
{
System.Collections.ArrayList clauses = System.Collections.ArrayList.Synchronized(new System.Collections.ArrayList(10));
Query q, firstQuery = null;
bool orPresent = false;
int modifier;
modifier = Modifier();
q = AndExpression(field);
AddClause(clauses, CONJ_NONE, modifier, q);
if (modifier == MOD_NONE)
firstQuery = q;
while (true)
{
switch ((jj_ntk == - 1)?Jj_ntk():jj_ntk)
{
case Lucene.Net.QueryParsers.Precedence.PrecedenceQueryParserConstants.OR:
case Lucene.Net.QueryParsers.Precedence.PrecedenceQueryParserConstants.NOT:
case Lucene.Net.QueryParsers.Precedence.PrecedenceQueryParserConstants.PLUS:
case Lucene.Net.QueryParsers.Precedence.PrecedenceQueryParserConstants.MINUS:
case Lucene.Net.QueryParsers.Precedence.PrecedenceQueryParserConstants.LPAREN:
case Lucene.Net.QueryParsers.Precedence.PrecedenceQueryParserConstants.QUOTED:
case Lucene.Net.QueryParsers.Precedence.PrecedenceQueryParserConstants.TERM:
case Lucene.Net.QueryParsers.Precedence.PrecedenceQueryParserConstants.PREFIXTERM:
case Lucene.Net.QueryParsers.Precedence.PrecedenceQueryParserConstants.WILDTERM:
case Lucene.Net.QueryParsers.Precedence.PrecedenceQueryParserConstants.RANGEIN_START:
case Lucene.Net.QueryParsers.Precedence.PrecedenceQueryParserConstants.RANGEEX_START:
case Lucene.Net.QueryParsers.Precedence.PrecedenceQueryParserConstants.NUMBER:
;
break;
default:
jj_la1[4] = jj_gen;
goto label_1_brk;
}
switch ((jj_ntk == - 1)?Jj_ntk():jj_ntk)
{
case Lucene.Net.QueryParsers.Precedence.PrecedenceQueryParserConstants.OR:
Jj_consume_token(Lucene.Net.QueryParsers.Precedence.PrecedenceQueryParserConstants.OR);
orPresent = true;
break;
default:
jj_la1[5] = jj_gen;
;
break;
}
modifier = Modifier();
q = AndExpression(field);
AddClause(clauses, orPresent?CONJ_OR:CONJ_NONE, modifier, q);
}
label_1_brk: ;
if (clauses.Count == 1 && firstQuery != null)
{
if (true)
return firstQuery;
}
else
{
{
if (true)
return GetBooleanQuery(clauses);
}
}
throw new System.ApplicationException("Missing return statement in function");
}
public Query AndExpression(System.String field)
{
System.Collections.ArrayList clauses = System.Collections.ArrayList.Synchronized(new System.Collections.ArrayList(10));
Query q, firstQuery = null;
int modifier;
q = Clause(field);
AddClause(clauses, CONJ_NONE, MOD_NONE, q);
firstQuery = q;
while (true)
{
switch ((jj_ntk == - 1)?Jj_ntk():jj_ntk)
{
case Lucene.Net.QueryParsers.Precedence.PrecedenceQueryParserConstants.AND:
;
break;
default:
jj_la1[6] = jj_gen;
goto label_2_brk;
}
Jj_consume_token(Lucene.Net.QueryParsers.Precedence.PrecedenceQueryParserConstants.AND);
modifier = Modifier();
q = Clause(field);
AddClause(clauses, CONJ_AND, modifier, q);
}
label_2_brk: ;
if (clauses.Count == 1 && firstQuery != null)
{
if (true)
return firstQuery;
}
else
{
{
if (true)
return GetBooleanQuery(clauses);
}
}
throw new System.ApplicationException("Missing return statement in function");
}
public Query Clause(System.String field)
{
Query q;
Token fieldToken = null, boost = null;
if (Jj_2_1(2))
{
fieldToken = Jj_consume_token(Lucene.Net.QueryParsers.Precedence.PrecedenceQueryParserConstants.TERM);
Jj_consume_token(Lucene.Net.QueryParsers.Precedence.PrecedenceQueryParserConstants.COLON);
field = DiscardEscapeChar(fieldToken.image);
}
else
{
;
}
switch ((jj_ntk == - 1)?Jj_ntk():jj_ntk)
{
case Lucene.Net.QueryParsers.Precedence.PrecedenceQueryParserConstants.QUOTED:
case Lucene.Net.QueryParsers.Precedence.PrecedenceQueryParserConstants.TERM:
case Lucene.Net.QueryParsers.Precedence.PrecedenceQueryParserConstants.PREFIXTERM:
case Lucene.Net.QueryParsers.Precedence.PrecedenceQueryParserConstants.WILDTERM:
case Lucene.Net.QueryParsers.Precedence.PrecedenceQueryParserConstants.RANGEIN_START:
case Lucene.Net.QueryParsers.Precedence.PrecedenceQueryParserConstants.RANGEEX_START:
case Lucene.Net.QueryParsers.Precedence.PrecedenceQueryParserConstants.NUMBER:
q = Term(field);
break;
case Lucene.Net.QueryParsers.Precedence.PrecedenceQueryParserConstants.LPAREN:
Jj_consume_token(Lucene.Net.QueryParsers.Precedence.PrecedenceQueryParserConstants.LPAREN);
q = Query(field);
Jj_consume_token(Lucene.Net.QueryParsers.Precedence.PrecedenceQueryParserConstants.RPAREN);
switch ((jj_ntk == - 1)?Jj_ntk():jj_ntk)
{
case Lucene.Net.QueryParsers.Precedence.PrecedenceQueryParserConstants.CARAT:
Jj_consume_token(Lucene.Net.QueryParsers.Precedence.PrecedenceQueryParserConstants.CARAT);
boost = Jj_consume_token(Lucene.Net.QueryParsers.Precedence.PrecedenceQueryParserConstants.NUMBER);
break;
default:
jj_la1[7] = jj_gen;
;
break;
}
break;
default:
jj_la1[8] = jj_gen;
Jj_consume_token(- 1);
throw new ParseException();
}
if (boost != null)
{
float f = (float) 1.0;
try
{
f = (float) System.Single.Parse(boost.image);
q.SetBoost(f);
}
catch (System.Exception ignored)
{
}
}
{
if (true)
return q;
}
throw new System.ApplicationException("Missing return statement in function");
}
public Query Term(System.String field)
{
Token term, boost = null, fuzzySlop = null, goop1, goop2;
bool prefix = false;
bool wildcard = false;
bool fuzzy = false;
Query q;
switch ((jj_ntk == - 1)?Jj_ntk():jj_ntk)
{
case Lucene.Net.QueryParsers.Precedence.PrecedenceQueryParserConstants.TERM:
case Lucene.Net.QueryParsers.Precedence.PrecedenceQueryParserConstants.PREFIXTERM:
case Lucene.Net.QueryParsers.Precedence.PrecedenceQueryParserConstants.WILDTERM:
case Lucene.Net.QueryParsers.Precedence.PrecedenceQueryParserConstants.NUMBER:
switch ((jj_ntk == - 1)?Jj_ntk():jj_ntk)
{
case Lucene.Net.QueryParsers.Precedence.PrecedenceQueryParserConstants.TERM:
term = Jj_consume_token(Lucene.Net.QueryParsers.Precedence.PrecedenceQueryParserConstants.TERM);
break;
case Lucene.Net.QueryParsers.Precedence.PrecedenceQueryParserConstants.PREFIXTERM:
term = Jj_consume_token(Lucene.Net.QueryParsers.Precedence.PrecedenceQueryParserConstants.PREFIXTERM);
prefix = true;
break;
case Lucene.Net.QueryParsers.Precedence.PrecedenceQueryParserConstants.WILDTERM:
term = Jj_consume_token(Lucene.Net.QueryParsers.Precedence.PrecedenceQueryParserConstants.WILDTERM);
wildcard = true;
break;
case Lucene.Net.QueryParsers.Precedence.PrecedenceQueryParserConstants.NUMBER:
term = Jj_consume_token(Lucene.Net.QueryParsers.Precedence.PrecedenceQueryParserConstants.NUMBER);
break;
default:
jj_la1[9] = jj_gen;
Jj_consume_token(- 1);
throw new ParseException();
}
switch ((jj_ntk == - 1)?Jj_ntk():jj_ntk)
{
case Lucene.Net.QueryParsers.Precedence.PrecedenceQueryParserConstants.FUZZY_SLOP:
fuzzySlop = Jj_consume_token(Lucene.Net.QueryParsers.Precedence.PrecedenceQueryParserConstants.FUZZY_SLOP);
fuzzy = true;
break;
default:
jj_la1[10] = jj_gen;
;
break;
}
switch ((jj_ntk == - 1)?Jj_ntk():jj_ntk)
{
case Lucene.Net.QueryParsers.Precedence.PrecedenceQueryParserConstants.CARAT:
Jj_consume_token(Lucene.Net.QueryParsers.Precedence.PrecedenceQueryParserConstants.CARAT);
boost = Jj_consume_token(Lucene.Net.QueryParsers.Precedence.PrecedenceQueryParserConstants.NUMBER);
switch ((jj_ntk == - 1)?Jj_ntk():jj_ntk)
{
case Lucene.Net.QueryParsers.Precedence.PrecedenceQueryParserConstants.FUZZY_SLOP:
fuzzySlop = Jj_consume_token(Lucene.Net.QueryParsers.Precedence.PrecedenceQueryParserConstants.FUZZY_SLOP);
fuzzy = true;
break;
default:
jj_la1[11] = jj_gen;
;
break;
}
break;
default:
jj_la1[12] = jj_gen;
;
break;
}
System.String termImage = DiscardEscapeChar(term.image);
if (wildcard)
{
q = GetWildcardQuery(field, termImage);
}
else if (prefix)
{
q = GetPrefixQuery(field, DiscardEscapeChar(term.image.Substring(0, (term.image.Length - 1) - (0))));
}
else if (fuzzy)
{
float fms = fuzzyMinSim;
try
{
fms = (float) System.Single.Parse(fuzzySlop.image.Substring(1));
}
catch (System.Exception ignored)
{
}
if (fms < 0.0f || fms > 1.0f)
{
{
if (true)
throw new ParseException("Minimum similarity for a FuzzyQuery has to be between 0.0f and 1.0f !");
}
}
q = GetFuzzyQuery(field, termImage, fms);
}
else
{
q = GetFieldQuery(field, termImage);
}
break;
case Lucene.Net.QueryParsers.Precedence.PrecedenceQueryParserConstants.RANGEIN_START:
Jj_consume_token(Lucene.Net.QueryParsers.Precedence.PrecedenceQueryParserConstants.RANGEIN_START);
switch ((jj_ntk == - 1)?Jj_ntk():jj_ntk)
{
case Lucene.Net.QueryParsers.Precedence.PrecedenceQueryParserConstants.RANGEIN_GOOP:
goop1 = Jj_consume_token(Lucene.Net.QueryParsers.Precedence.PrecedenceQueryParserConstants.RANGEIN_GOOP);
break;
case Lucene.Net.QueryParsers.Precedence.PrecedenceQueryParserConstants.RANGEIN_QUOTED:
goop1 = Jj_consume_token(Lucene.Net.QueryParsers.Precedence.PrecedenceQueryParserConstants.RANGEIN_QUOTED);
break;
default:
jj_la1[13] = jj_gen;
Jj_consume_token(- 1);
throw new ParseException();
}
switch ((jj_ntk == - 1)?Jj_ntk():jj_ntk)
{
case Lucene.Net.QueryParsers.Precedence.PrecedenceQueryParserConstants.RANGEIN_TO:
Jj_consume_token(Lucene.Net.QueryParsers.Precedence.PrecedenceQueryParserConstants.RANGEIN_TO);
break;
default:
jj_la1[14] = jj_gen;
;
break;
}
switch ((jj_ntk == - 1)?Jj_ntk():jj_ntk)
{
case Lucene.Net.QueryParsers.Precedence.PrecedenceQueryParserConstants.RANGEIN_GOOP:
goop2 = Jj_consume_token(Lucene.Net.QueryParsers.Precedence.PrecedenceQueryParserConstants.RANGEIN_GOOP);
break;
case Lucene.Net.QueryParsers.Precedence.PrecedenceQueryParserConstants.RANGEIN_QUOTED:
goop2 = Jj_consume_token(Lucene.Net.QueryParsers.Precedence.PrecedenceQueryParserConstants.RANGEIN_QUOTED);
break;
default:
jj_la1[15] = jj_gen;
Jj_consume_token(- 1);
throw new ParseException();
}
Jj_consume_token(Lucene.Net.QueryParsers.Precedence.PrecedenceQueryParserConstants.RANGEIN_END);
switch ((jj_ntk == - 1)?Jj_ntk():jj_ntk)
{
case Lucene.Net.QueryParsers.Precedence.PrecedenceQueryParserConstants.CARAT:
Jj_consume_token(Lucene.Net.QueryParsers.Precedence.PrecedenceQueryParserConstants.CARAT);
boost = Jj_consume_token(Lucene.Net.QueryParsers.Precedence.PrecedenceQueryParserConstants.NUMBER);
break;
default:
jj_la1[16] = jj_gen;
;
break;
}
if (goop1.kind == Lucene.Net.QueryParsers.Precedence.PrecedenceQueryParserConstants.RANGEIN_QUOTED)
{
goop1.image = goop1.image.Substring(1, (goop1.image.Length - 1) - (1));
}
else
{
goop1.image = DiscardEscapeChar(goop1.image);
}
if (goop2.kind == Lucene.Net.QueryParsers.Precedence.PrecedenceQueryParserConstants.RANGEIN_QUOTED)
{
goop2.image = goop2.image.Substring(1, (goop2.image.Length - 1) - (1));
}
else
{
goop2.image = DiscardEscapeChar(goop2.image);
}
q = GetRangeQuery(field, goop1.image, goop2.image, true);
break;
case Lucene.Net.QueryParsers.Precedence.PrecedenceQueryParserConstants.RANGEEX_START:
Jj_consume_token(Lucene.Net.QueryParsers.Precedence.PrecedenceQueryParserConstants.RANGEEX_START);
switch ((jj_ntk == - 1)?Jj_ntk():jj_ntk)
{
case Lucene.Net.QueryParsers.Precedence.PrecedenceQueryParserConstants.RANGEEX_GOOP:
goop1 = Jj_consume_token(Lucene.Net.QueryParsers.Precedence.PrecedenceQueryParserConstants.RANGEEX_GOOP);
break;
case Lucene.Net.QueryParsers.Precedence.PrecedenceQueryParserConstants.RANGEEX_QUOTED:
goop1 = Jj_consume_token(Lucene.Net.QueryParsers.Precedence.PrecedenceQueryParserConstants.RANGEEX_QUOTED);
break;
default:
jj_la1[17] = jj_gen;
Jj_consume_token(- 1);
throw new ParseException();
}
switch ((jj_ntk == - 1)?Jj_ntk():jj_ntk)
{
case Lucene.Net.QueryParsers.Precedence.PrecedenceQueryParserConstants.RANGEEX_TO:
Jj_consume_token(Lucene.Net.QueryParsers.Precedence.PrecedenceQueryParserConstants.RANGEEX_TO);
break;
default:
jj_la1[18] = jj_gen;
;
break;
}
switch ((jj_ntk == - 1)?Jj_ntk():jj_ntk)
{
case Lucene.Net.QueryParsers.Precedence.PrecedenceQueryParserConstants.RANGEEX_GOOP:
goop2 = Jj_consume_token(Lucene.Net.QueryParsers.Precedence.PrecedenceQueryParserConstants.RANGEEX_GOOP);
break;
case Lucene.Net.QueryParsers.Precedence.PrecedenceQueryParserConstants.RANGEEX_QUOTED:
goop2 = Jj_consume_token(Lucene.Net.QueryParsers.Precedence.PrecedenceQueryParserConstants.RANGEEX_QUOTED);
break;
default:
jj_la1[19] = jj_gen;
Jj_consume_token(- 1);
throw new ParseException();
}
Jj_consume_token(Lucene.Net.QueryParsers.Precedence.PrecedenceQueryParserConstants.RANGEEX_END);
switch ((jj_ntk == - 1)?Jj_ntk():jj_ntk)
{
case Lucene.Net.QueryParsers.Precedence.PrecedenceQueryParserConstants.CARAT:
Jj_consume_token(Lucene.Net.QueryParsers.Precedence.PrecedenceQueryParserConstants.CARAT);
boost = Jj_consume_token(Lucene.Net.QueryParsers.Precedence.PrecedenceQueryParserConstants.NUMBER);
break;
default:
jj_la1[20] = jj_gen;
;
break;
}
if (goop1.kind == Lucene.Net.QueryParsers.Precedence.PrecedenceQueryParserConstants.RANGEEX_QUOTED)
{
goop1.image = goop1.image.Substring(1, (goop1.image.Length - 1) - (1));
}
else
{
goop1.image = DiscardEscapeChar(goop1.image);
}
if (goop2.kind == Lucene.Net.QueryParsers.Precedence.PrecedenceQueryParserConstants.RANGEEX_QUOTED)
{
goop2.image = goop2.image.Substring(1, (goop2.image.Length - 1) - (1));
}
else
{
goop2.image = DiscardEscapeChar(goop2.image);
}
q = GetRangeQuery(field, goop1.image, goop2.image, false);
break;
case Lucene.Net.QueryParsers.Precedence.PrecedenceQueryParserConstants.QUOTED:
term = Jj_consume_token(Lucene.Net.QueryParsers.Precedence.PrecedenceQueryParserConstants.QUOTED);
switch ((jj_ntk == - 1)?Jj_ntk():jj_ntk)
{
case Lucene.Net.QueryParsers.Precedence.PrecedenceQueryParserConstants.FUZZY_SLOP:
fuzzySlop = Jj_consume_token(Lucene.Net.QueryParsers.Precedence.PrecedenceQueryParserConstants.FUZZY_SLOP);
break;
default:
jj_la1[21] = jj_gen;
;
break;
}
switch ((jj_ntk == - 1)?Jj_ntk():jj_ntk)
{
case Lucene.Net.QueryParsers.Precedence.PrecedenceQueryParserConstants.CARAT:
Jj_consume_token(Lucene.Net.QueryParsers.Precedence.PrecedenceQueryParserConstants.CARAT);
boost = Jj_consume_token(Lucene.Net.QueryParsers.Precedence.PrecedenceQueryParserConstants.NUMBER);
break;
default:
jj_la1[22] = jj_gen;
;
break;
}
int s = phraseSlop;
if (fuzzySlop != null)
{
try
{
s = (int) System.Single.Parse(fuzzySlop.image.Substring(1));
}
catch (System.Exception ignored)
{
}
}
q = GetFieldQuery(field, term.image.Substring(1, (term.image.Length - 1) - (1)), s);
break;
default:
jj_la1[23] = jj_gen;
Jj_consume_token(- 1);
throw new ParseException();
}
if (boost != null)
{
float f = (float) 1.0;
try
{
f = (float) System.Single.Parse(boost.image);
}
catch (System.Exception ignored)
{
/* Should this be handled somehow? (defaults to "no boost", if
* boost number is invalid)
*/
}
// avoid boosting null queries, such as those caused by stop words
if (q != null)
{
q.SetBoost(f);
}
}
{
if (true)
return q;
}
throw new System.ApplicationException("Missing return statement in function");
}
private bool Jj_2_1(int xla)
{
jj_la = xla; jj_lastpos = jj_scanpos = token;
try
{
return !Jj_3_1();
}
catch (LookaheadSuccess ls)
{
return true;
}
finally
{
Jj_save(0, xla);
}
}
private bool Jj_3_1()
{
if (Jj_scan_token(Lucene.Net.QueryParsers.Precedence.PrecedenceQueryParserConstants.TERM))
return true;
if (Jj_scan_token(Lucene.Net.QueryParsers.Precedence.PrecedenceQueryParserConstants.COLON))
return true;
return false;
}
public PrecedenceQueryParserTokenManager token_source;
public Token token, jj_nt;
private int jj_ntk;
private Token jj_scanpos, jj_lastpos;
private int jj_la;
public bool lookingAhead = false;
private bool jj_semLA;
private int jj_gen;
private int[] jj_la1 = new int[24];
private static ulong[] jj_la1_0_Renamed_Field;
private static void jj_la1_0()
{
jj_la1_0_Renamed_Field = new ulong[]{0x180, 0x180, 0xe00, 0xe00, 0xfb1f00, 0x100, 0x80, 0x8000, 0xfb1000, 0x9a0000, 0x40000, 0x40000, 0x8000, 0xc000000, 0x1000000, 0xc000000, 0x8000, (ulong) 0xc0000000, 0x10000000, (ulong) 0xc0000000, 0x8000, 0x40000, 0x8000, 0xfb0000};
}
private JJCalls[] jj_2_rtns;
private bool jj_rescan = false;
private int jj_gc = 0;
public PrecedenceQueryParser(CharStream stream)
{
InitBlock();
token_source = new PrecedenceQueryParserTokenManager(stream);
token = new Token();
jj_ntk = - 1;
jj_gen = 0;
for (int i = 0; i < 24; i++)
jj_la1[i] = - 1;
for (int i = 0; i < jj_2_rtns.Length; i++)
jj_2_rtns[i] = new JJCalls();
}
public virtual void ReInit(CharStream stream)
{
token_source.ReInit(stream);
token = new Token();
jj_ntk = - 1;
jj_gen = 0;
for (int i = 0; i < 24; i++)
jj_la1[i] = - 1;
for (int i = 0; i < jj_2_rtns.Length; i++)
jj_2_rtns[i] = new JJCalls();
}
public PrecedenceQueryParser(PrecedenceQueryParserTokenManager tm)
{
InitBlock();
token_source = tm;
token = new Token();
jj_ntk = - 1;
jj_gen = 0;
for (int i = 0; i < 24; i++)
jj_la1[i] = - 1;
for (int i = 0; i < jj_2_rtns.Length; i++)
jj_2_rtns[i] = new JJCalls();
}
public virtual void ReInit(PrecedenceQueryParserTokenManager tm)
{
token_source = tm;
token = new Token();
jj_ntk = - 1;
jj_gen = 0;
for (int i = 0; i < 24; i++)
jj_la1[i] = - 1;
for (int i = 0; i < jj_2_rtns.Length; i++)
jj_2_rtns[i] = new JJCalls();
}
private Token Jj_consume_token(int kind)
{
Token oldToken = null;
if ((oldToken = token).next != null)
token = token.next;
else
token = token.next = token_source.GetNextToken();
jj_ntk = - 1;
if (token.kind == kind)
{
jj_gen++;
if (++jj_gc > 100)
{
jj_gc = 0;
for (int i = 0; i < jj_2_rtns.Length; i++)
{
JJCalls c = jj_2_rtns[i];
while (c != null)
{
if (c.gen < jj_gen)
c.first = null;
c = c.next;
}
}
}
return token;
}
token = oldToken;
jj_kind = kind;
throw GenerateParseException();
}
[Serializable]
private sealed class LookaheadSuccess:System.ApplicationException
{
}
private LookaheadSuccess jj_ls;
private bool Jj_scan_token(int kind)
{
if (jj_scanpos == jj_lastpos)
{
jj_la--;
if (jj_scanpos.next == null)
{
jj_lastpos = jj_scanpos = jj_scanpos.next = token_source.GetNextToken();
}
else
{
jj_lastpos = jj_scanpos = jj_scanpos.next;
}
}
else
{
jj_scanpos = jj_scanpos.next;
}
if (jj_rescan)
{
int i = 0; Token tok = token;
while (tok != null && tok != jj_scanpos)
{
i++; tok = tok.next;
}
if (tok != null)
Jj_add_error_token(kind, i);
}
if (jj_scanpos.kind != kind)
return true;
if (jj_la == 0 && jj_scanpos == jj_lastpos)
throw jj_ls;
return false;
}
public Token GetNextToken()
{
if (token.next != null)
token = token.next;
else
token = token.next = token_source.GetNextToken();
jj_ntk = - 1;
jj_gen++;
return token;
}
public Token GetToken(int index)
{
Token t = lookingAhead?jj_scanpos:token;
for (int i = 0; i < index; i++)
{
if (t.next != null)
t = t.next;
else
t = t.next = token_source.GetNextToken();
}
return t;
}
private int Jj_ntk()
{
if ((jj_nt = token.next) == null)
return (jj_ntk = (token.next = token_source.GetNextToken()).kind);
else
return (jj_ntk = jj_nt.kind);
}
private System.Collections.ArrayList jj_expentries = System.Collections.ArrayList.Synchronized(new System.Collections.ArrayList(10));
private int[] jj_expentry;
private int jj_kind = - 1;
private int[] jj_lasttokens = new int[100];
private int jj_endpos;
private void Jj_add_error_token(int kind, int pos)
{
if (pos >= 100)
return ;
if (pos == jj_endpos + 1)
{
jj_lasttokens[jj_endpos++] = kind;
}
else if (jj_endpos != 0)
{
jj_expentry = new int[jj_endpos];
for (int i = 0; i < jj_endpos; i++)
{
jj_expentry[i] = jj_lasttokens[i];
}
bool exists = false;
for (System.Collections.IEnumerator e = jj_expentries.GetEnumerator(); e.MoveNext(); )
{
int[] oldentry = (int[]) (e.Current);
if (oldentry.Length == jj_expentry.Length)
{
exists = true;
for (int i = 0; i < jj_expentry.Length; i++)
{
if (oldentry[i] != jj_expentry[i])
{
exists = false;
break;
}
}
if (exists)
break;
}
}
if (!exists)
jj_expentries.Add(jj_expentry);
if (pos != 0)
jj_lasttokens[(jj_endpos = pos) - 1] = kind;
}
}
public virtual ParseException GenerateParseException()
{
jj_expentries.Clear();
bool[] la1tokens = new bool[32];
for (int i = 0; i < 32; i++)
{
la1tokens[i] = false;
}
if (jj_kind >= 0)
{
la1tokens[jj_kind] = true;
jj_kind = - 1;
}
for (int i = 0; i < 24; i++)
{
if (jj_la1[i] == jj_gen)
{
for (int j = 0; j < 32; j++)
{
if ((jj_la1_0_Renamed_Field[i] & (ulong) ((1 << j))) != 0)
{
la1tokens[j] = true;
}
}
}
}
for (int i = 0; i < 32; i++)
{
if (la1tokens[i])
{
jj_expentry = new int[1];
jj_expentry[0] = i;
jj_expentries.Add(jj_expentry);
}
}
jj_endpos = 0;
Jj_rescan_token();
Jj_add_error_token(0, 0);
int[][] exptokseq = new int[jj_expentries.Count][];
for (int i = 0; i < jj_expentries.Count; i++)
{
exptokseq[i] = (int[]) jj_expentries[i];
}
return new ParseException(token, exptokseq, Lucene.Net.QueryParsers.Precedence.PrecedenceQueryParserConstants.tokenImage);
}
public void Enable_tracing()
{
}
public void Disable_tracing()
{
}
private void Jj_rescan_token()
{
jj_rescan = true;
for (int i = 0; i < 1; i++)
{
JJCalls p = jj_2_rtns[i];
do
{
if (p.gen > jj_gen)
{
jj_la = p.arg; jj_lastpos = jj_scanpos = p.first;
switch (i)
{
case 0: Jj_3_1(); break;
}
}
p = p.next;
}
while (p != null);
}
jj_rescan = false;
}
private void Jj_save(int index, int xla)
{
JJCalls p = jj_2_rtns[index];
while (p.gen > jj_gen)
{
if (p.next == null)
{
p = p.next = new JJCalls(); break;
}
p = p.next;
}
p.gen = jj_gen + xla - jj_la; p.first = token; p.arg = xla;
}
internal sealed class JJCalls
{
internal int gen;
internal Token first;
internal int arg;
internal JJCalls next;
}
static PrecedenceQueryParser()
{
{
jj_la1_0();
}
}
}
}