Unnamed: 0
int64 0
6.45k
| func
stringlengths 37
143k
| target
class label 2
classes | project
stringlengths 33
157
|
---|---|---|---|
740 | public class ProductBundlePricingModelType implements Serializable, BroadleafEnumerationType {
private static final long serialVersionUID = 1L;
private static final Map<String, ProductBundlePricingModelType> TYPES = new LinkedHashMap<String, ProductBundlePricingModelType>();
public static final ProductBundlePricingModelType ITEM_SUM = new ProductBundlePricingModelType("ITEM_SUM","Item Sum");
public static final ProductBundlePricingModelType BUNDLE = new ProductBundlePricingModelType("BUNDLE","Bundle");
public static ProductBundlePricingModelType getInstance(final String type) {
return TYPES.get(type);
}
private String type;
private String friendlyType;
public ProductBundlePricingModelType() {
//do nothing
}
public ProductBundlePricingModelType(final String type, final String friendlyType) {
this.friendlyType = friendlyType;
setType(type);
}
public String getType() {
return type;
}
public String getFriendlyType() {
return friendlyType;
}
private void setType(final String type) {
this.type = type;
if (!TYPES.containsKey(type)) {
TYPES.put(type, this);
}
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + ((type == null) ? 0 : type.hashCode());
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
ProductBundlePricingModelType other = (ProductBundlePricingModelType) obj;
if (type == null) {
if (other.type != null)
return false;
} else if (!type.equals(other.type))
return false;
return true;
}
} | 1no label
| core_broadleaf-framework_src_main_java_org_broadleafcommerce_core_catalog_service_type_ProductBundlePricingModelType.java |
657 | public class PutIndexTemplateResponse extends AcknowledgedResponse {
PutIndexTemplateResponse() {
}
PutIndexTemplateResponse(boolean acknowledged) {
super(acknowledged);
}
@Override
public void readFrom(StreamInput in) throws IOException {
super.readFrom(in);
readAcknowledged(in);
}
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
writeAcknowledged(out);
}
} | 0true
| src_main_java_org_elasticsearch_action_admin_indices_template_put_PutIndexTemplateResponse.java |
74 | public abstract class CallableClientRequest extends ClientRequest implements Callable {
@Override
final void process() throws Exception {
ClientEndpoint endpoint = getEndpoint();
try {
Object result = call();
endpoint.sendResponse(result, getCallId());
} catch (Exception e) {
clientEngine.getLogger(getClass()).warning(e);
endpoint.sendResponse(e, getCallId());
}
}
} | 0true
| hazelcast_src_main_java_com_hazelcast_client_CallableClientRequest.java |
29 | new Visitor() {
@Override
public void visit(Tree.InvocationExpression that) {
Tree.ArgumentList al = that.getPositionalArgumentList();
if (al==null) {
al = that.getNamedArgumentList();
}
if (al!=null) {
Integer startIndex = al.getStartIndex();
Integer startIndex2 = node.getStartIndex();
if (startIndex!=null && startIndex2!=null &&
startIndex.intValue()==startIndex2.intValue()) {
Tree.Primary primary = that.getPrimary();
if (primary instanceof Tree.MemberOrTypeExpression) {
Tree.MemberOrTypeExpression mte =
(Tree.MemberOrTypeExpression) primary;
if (mte.getDeclaration()!=null && mte.getTarget()!=null) {
result.add(new ParameterInfo(al.getStartIndex(),
mte.getDeclaration(), mte.getTarget(),
node.getScope(), cpc,
al instanceof Tree.NamedArgumentList));
}
}
}
}
super.visit(that);
}
}.visit(cpc.getRootNode()); | 0true
| plugins_com.redhat.ceylon.eclipse.ui_src_com_redhat_ceylon_eclipse_code_complete_InvocationCompletionProposal.java |
1,102 | public class OSQLFunctionLast extends OSQLFunctionConfigurableAbstract {
public static final String NAME = "last";
private Object last;
public OSQLFunctionLast() {
super(NAME, 1, 1);
}
public Object execute(final OIdentifiable iCurrentRecord, Object iCurrentResult, final Object[] iParameters,
final OCommandContext iContext) {
Object value = iParameters[0];
if (value instanceof OSQLFilterItem)
value = ((OSQLFilterItem) value).getValue(iCurrentRecord, iContext);
if (OMultiValue.isMultiValue(value))
value = OMultiValue.getLastValue(value);
last = value;
return value;
}
public boolean aggregateResults() {
return configuredParameters.length == 1;
}
@Override
public Object getResult() {
return last;
}
@Override
public boolean filterResult() {
return true;
}
public String getSyntax() {
return "Syntax error: last(<field>)";
}
} | 1no label
| core_src_main_java_com_orientechnologies_orient_core_sql_functions_coll_OSQLFunctionLast.java |
1,136 | public class OSQLMethodAsFloat extends OAbstractSQLMethod {
public static final String NAME = "asfloat";
public OSQLMethodAsFloat() {
super(NAME);
}
@Override
public Object execute(OIdentifiable iCurrentRecord, OCommandContext iContext, Object ioResult, Object[] iMethodParams) {
if (ioResult instanceof Number) {
ioResult = ((Number) ioResult).floatValue();
} else {
ioResult = ioResult != null ? new Float(ioResult.toString().trim()) : null;
}
return ioResult;
}
} | 1no label
| core_src_main_java_com_orientechnologies_orient_core_sql_method_misc_OSQLMethodAsFloat.java |
141 | @Test
public class DecimalSerializerTest {
private final static int FIELD_SIZE = 9;
private static final BigDecimal OBJECT = new BigDecimal(new BigInteger("20"), 2);
private ODecimalSerializer decimalSerializer;
private static final byte[] stream = new byte[FIELD_SIZE];
@BeforeClass
public void beforeClass() {
decimalSerializer = new ODecimalSerializer();
}
public void testFieldSize() {
Assert.assertEquals(decimalSerializer.getObjectSize(OBJECT), FIELD_SIZE);
}
public void testSerialize() {
decimalSerializer.serialize(OBJECT, stream, 0);
Assert.assertEquals(decimalSerializer.deserialize(stream, 0), OBJECT);
}
public void testSerializeNative() {
decimalSerializer.serializeNative(OBJECT, stream, 0);
Assert.assertEquals(decimalSerializer.deserializeNative(stream, 0), OBJECT);
}
public void testNativeDirectMemoryCompatibility() {
decimalSerializer.serializeNative(OBJECT, stream, 0);
ODirectMemoryPointer pointer = new ODirectMemoryPointer(stream);
try {
Assert.assertEquals(decimalSerializer.deserializeFromDirectMemory(pointer, 0), OBJECT);
} finally {
pointer.free();
}
}
} | 0true
| commons_src_test_java_com_orientechnologies_common_serialization_types_DecimalSerializerTest.java |
3,984 | public static abstract class AbstractDistanceScoreFunction extends ScoreFunction {
private final double scale;
protected final double offset;
private final DecayFunction func;
public AbstractDistanceScoreFunction(double userSuppiedScale, double decay, double offset, DecayFunction func) {
super(CombineFunction.MULT);
if (userSuppiedScale <= 0.0) {
throw new ElasticsearchIllegalArgumentException(FunctionScoreQueryParser.NAME + " : scale must be > 0.0.");
}
if (decay <= 0.0 || decay >= 1.0) {
throw new ElasticsearchIllegalArgumentException(FunctionScoreQueryParser.NAME
+ " : decay must be in the range [0..1].");
}
this.scale = func.processScale(userSuppiedScale, decay);
this.func = func;
if (offset < 0.0d) {
throw new ElasticsearchIllegalArgumentException(FunctionScoreQueryParser.NAME + " : offset must be > 0.0");
}
this.offset = offset;
}
@Override
public double score(int docId, float subQueryScore) {
double value = distance(docId);
return func.evaluate(value, scale);
}
/**
* This function computes the distance from a defined origin. Since
* the value of the document is read from the index, it cannot be
* guaranteed that the value actually exists. If it does not, we assume
* the user handles this case in the query and return 0.
* */
protected abstract double distance(int docId);
protected abstract String getDistanceString(int docId);
protected abstract String getFieldName();
@Override
public Explanation explainScore(int docId, Explanation subQueryExpl) {
ComplexExplanation ce = new ComplexExplanation();
ce.setValue(CombineFunction.toFloat(score(docId, subQueryExpl.getValue())));
ce.setMatch(true);
ce.setDescription("Function for field " + getFieldName() + ":");
ce.addDetail(func.explainFunction(getDistanceString(docId), distance(docId), scale));
return ce;
}
} | 1no label
| src_main_java_org_elasticsearch_index_query_functionscore_DecayFunctionParser.java |
224 | @LuceneTestCase.SuppressCodecs({"MockFixedIntBlock", "MockVariableIntBlock", "MockSep", "MockRandom", "Lucene3x"})
public class XPostingsHighlighterTests extends ElasticsearchLuceneTestCase {
/*
Tests changes needed to make possible to perform discrete highlighting.
We want to highlight every field value separately in case of multiple values, at least when needing to return the whole field content
This is needed to be able to get back a single snippet per value when number_of_fragments=0
*/
@Test
public void testDiscreteHighlightingPerValue() throws Exception {
Directory dir = newDirectory();
IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()));
iwc.setMergePolicy(newLogMergePolicy());
RandomIndexWriter iw = new RandomIndexWriter(random(), dir, iwc);
FieldType offsetsType = new FieldType(TextField.TYPE_STORED);
offsetsType.setIndexOptions(FieldInfo.IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS);
Field body = new Field("body", "", offsetsType);
final String firstValue = "This is a test. Just a test highlighting from postings highlighter.";
Document doc = new Document();
doc.add(body);
body.setStringValue(firstValue);
final String secondValue = "This is the second value to perform highlighting on.";
Field body2 = new Field("body", "", offsetsType);
doc.add(body2);
body2.setStringValue(secondValue);
final String thirdValue = "This is the third value to test highlighting with postings.";
Field body3 = new Field("body", "", offsetsType);
doc.add(body3);
body3.setStringValue(thirdValue);
iw.addDocument(doc);
IndexReader ir = iw.getReader();
iw.close();
IndexSearcher searcher = newSearcher(ir);
XPostingsHighlighter highlighter = new XPostingsHighlighter() {
@Override
protected BreakIterator getBreakIterator(String field) {
return new WholeBreakIterator();
}
@Override
protected char getMultiValuedSeparator(String field) {
//U+2029 PARAGRAPH SEPARATOR (PS): each value holds a discrete passage for highlighting
return 8233;
}
};
Query query = new TermQuery(new Term("body", "highlighting"));
TopDocs topDocs = searcher.search(query, null, 10, Sort.INDEXORDER);
assertThat(topDocs.totalHits, equalTo(1));
String snippets[] = highlighter.highlight("body", query, searcher, topDocs);
assertThat(snippets.length, equalTo(1));
String firstHlValue = "This is a test. Just a test <b>highlighting</b> from postings highlighter.";
String secondHlValue = "This is the second value to perform <b>highlighting</b> on.";
String thirdHlValue = "This is the third value to test <b>highlighting</b> with postings.";
//default behaviour: using the WholeBreakIterator, despite the multi valued paragraph separator we get back a single snippet for multiple values
assertThat(snippets[0], equalTo(firstHlValue + (char)8233 + secondHlValue + (char)8233 + thirdHlValue));
highlighter = new XPostingsHighlighter() {
Iterator<String> valuesIterator = Arrays.asList(firstValue, secondValue, thirdValue).iterator();
Iterator<Integer> offsetsIterator = Arrays.asList(0, firstValue.length() + 1, firstValue.length() + secondValue.length() + 2).iterator();
@Override
protected String[][] loadFieldValues(IndexSearcher searcher, String[] fields, int[] docids, int maxLength) throws IOException {
return new String[][]{new String[]{valuesIterator.next()}};
}
@Override
protected int getOffsetForCurrentValue(String field, int docId) {
return offsetsIterator.next();
}
@Override
protected BreakIterator getBreakIterator(String field) {
return new WholeBreakIterator();
}
};
//first call using the WholeBreakIterator, we get now only the first value properly highlighted as we wish
snippets = highlighter.highlight("body", query, searcher, topDocs);
assertThat(snippets.length, equalTo(1));
assertThat(snippets[0], equalTo(firstHlValue));
//second call using the WholeBreakIterator, we get now only the second value properly highlighted as we wish
snippets = highlighter.highlight("body", query, searcher, topDocs);
assertThat(snippets.length, equalTo(1));
assertThat(snippets[0], equalTo(secondHlValue));
//third call using the WholeBreakIterator, we get now only the third value properly highlighted as we wish
snippets = highlighter.highlight("body", query, searcher, topDocs);
assertThat(snippets.length, equalTo(1));
assertThat(snippets[0], equalTo(thirdHlValue));
ir.close();
dir.close();
}
@Test
public void testDiscreteHighlightingPerValue_secondValueWithoutMatches() throws Exception {
Directory dir = newDirectory();
IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()));
iwc.setMergePolicy(newLogMergePolicy());
RandomIndexWriter iw = new RandomIndexWriter(random(), dir, iwc);
FieldType offsetsType = new FieldType(TextField.TYPE_STORED);
offsetsType.setIndexOptions(FieldInfo.IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS);
Field body = new Field("body", "", offsetsType);
final String firstValue = "This is a test. Just a test highlighting from postings highlighter.";
Document doc = new Document();
doc.add(body);
body.setStringValue(firstValue);
final String secondValue = "This is the second value without matches.";
Field body2 = new Field("body", "", offsetsType);
doc.add(body2);
body2.setStringValue(secondValue);
final String thirdValue = "This is the third value to test highlighting with postings.";
Field body3 = new Field("body", "", offsetsType);
doc.add(body3);
body3.setStringValue(thirdValue);
iw.addDocument(doc);
IndexReader ir = iw.getReader();
iw.close();
IndexSearcher searcher = newSearcher(ir);
Query query = new TermQuery(new Term("body", "highlighting"));
TopDocs topDocs = searcher.search(query, null, 10, Sort.INDEXORDER);
assertThat(topDocs.totalHits, equalTo(1));
XPostingsHighlighter highlighter = new XPostingsHighlighter() {
@Override
protected BreakIterator getBreakIterator(String field) {
return new WholeBreakIterator();
}
@Override
protected char getMultiValuedSeparator(String field) {
//U+2029 PARAGRAPH SEPARATOR (PS): each value holds a discrete passage for highlighting
return 8233;
}
@Override
protected Passage[] getEmptyHighlight(String fieldName, BreakIterator bi, int maxPassages) {
return new Passage[0];
}
};
String snippets[] = highlighter.highlight("body", query, searcher, topDocs);
assertThat(snippets.length, equalTo(1));
String firstHlValue = "This is a test. Just a test <b>highlighting</b> from postings highlighter.";
String thirdHlValue = "This is the third value to test <b>highlighting</b> with postings.";
//default behaviour: using the WholeBreakIterator, despite the multi valued paragraph separator we get back a single snippet for multiple values
//but only the first and the third value are returned since there are no matches in the second one.
assertThat(snippets[0], equalTo(firstHlValue + (char)8233 + secondValue + (char)8233 + thirdHlValue));
highlighter = new XPostingsHighlighter() {
Iterator<String> valuesIterator = Arrays.asList(firstValue, secondValue, thirdValue).iterator();
Iterator<Integer> offsetsIterator = Arrays.asList(0, firstValue.length() + 1, firstValue.length() + secondValue.length() + 2).iterator();
@Override
protected String[][] loadFieldValues(IndexSearcher searcher, String[] fields, int[] docids, int maxLength) throws IOException {
return new String[][]{new String[]{valuesIterator.next()}};
}
@Override
protected int getOffsetForCurrentValue(String field, int docId) {
return offsetsIterator.next();
}
@Override
protected BreakIterator getBreakIterator(String field) {
return new WholeBreakIterator();
}
@Override
protected Passage[] getEmptyHighlight(String fieldName, BreakIterator bi, int maxPassages) {
return new Passage[0];
}
};
//first call using the WholeBreakIterator, we get now only the first value properly highlighted as we wish
snippets = highlighter.highlight("body", query, searcher, topDocs);
assertThat(snippets.length, equalTo(1));
assertThat(snippets[0], equalTo(firstHlValue));
//second call using the WholeBreakIterator, we get now nothing back because there's nothing to highlight in the second value
snippets = highlighter.highlight("body", query, searcher, topDocs);
assertThat(snippets.length, equalTo(1));
assertThat(snippets[0], nullValue());
//third call using the WholeBreakIterator, we get now only the third value properly highlighted as we wish
snippets = highlighter.highlight("body", query, searcher, topDocs);
assertThat(snippets.length, equalTo(1));
assertThat(snippets[0], equalTo(thirdHlValue));
ir.close();
dir.close();
}
@Test
public void testDiscreteHighlightingPerValue_MultipleMatches() throws Exception {
Directory dir = newDirectory();
IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()));
iwc.setMergePolicy(newLogMergePolicy());
RandomIndexWriter iw = new RandomIndexWriter(random(), dir, iwc);
FieldType offsetsType = new FieldType(TextField.TYPE_STORED);
offsetsType.setIndexOptions(FieldInfo.IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS);
Field body = new Field("body", "", offsetsType);
final String firstValue = "This is a highlighting test. Just a test highlighting from postings highlighter.";
Document doc = new Document();
doc.add(body);
body.setStringValue(firstValue);
final String secondValue = "This is the second highlighting value to test highlighting with postings.";
Field body2 = new Field("body", "", offsetsType);
doc.add(body2);
body2.setStringValue(secondValue);
iw.addDocument(doc);
IndexReader ir = iw.getReader();
iw.close();
IndexSearcher searcher = newSearcher(ir);
Query query = new TermQuery(new Term("body", "highlighting"));
TopDocs topDocs = searcher.search(query, null, 10, Sort.INDEXORDER);
assertThat(topDocs.totalHits, equalTo(1));
String firstHlValue = "This is a <b>highlighting</b> test. Just a test <b>highlighting</b> from postings highlighter.";
String secondHlValue = "This is the second <b>highlighting</b> value to test <b>highlighting</b> with postings.";
XPostingsHighlighter highlighter = new XPostingsHighlighter() {
Iterator<String> valuesIterator = Arrays.asList(firstValue, secondValue).iterator();
Iterator<Integer> offsetsIterator = Arrays.asList(0, firstValue.length() + 1).iterator();
@Override
protected String[][] loadFieldValues(IndexSearcher searcher, String[] fields, int[] docids, int maxLength) throws IOException {
return new String[][]{new String[]{valuesIterator.next()}};
}
@Override
protected int getOffsetForCurrentValue(String field, int docId) {
return offsetsIterator.next();
}
@Override
protected BreakIterator getBreakIterator(String field) {
return new WholeBreakIterator();
}
@Override
protected Passage[] getEmptyHighlight(String fieldName, BreakIterator bi, int maxPassages) {
return new Passage[0];
}
};
//first call using the WholeBreakIterator, we get now only the first value properly highlighted as we wish
String[] snippets = highlighter.highlight("body", query, searcher, topDocs);
assertThat(snippets.length, equalTo(1));
assertThat(snippets[0], equalTo(firstHlValue));
//second call using the WholeBreakIterator, we get now only the second value properly highlighted as we wish
snippets = highlighter.highlight("body", query, searcher, topDocs);
assertThat(snippets.length, equalTo(1));
assertThat(snippets[0], equalTo(secondHlValue));
ir.close();
dir.close();
}
@Test
public void testDiscreteHighlightingPerValue_MultipleQueryTerms() throws Exception {
Directory dir = newDirectory();
IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()));
iwc.setMergePolicy(newLogMergePolicy());
RandomIndexWriter iw = new RandomIndexWriter(random(), dir, iwc);
FieldType offsetsType = new FieldType(TextField.TYPE_STORED);
offsetsType.setIndexOptions(FieldInfo.IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS);
Field body = new Field("body", "", offsetsType);
final String firstValue = "This is the first sentence. This is the second sentence.";
Document doc = new Document();
doc.add(body);
body.setStringValue(firstValue);
final String secondValue = "This is the third sentence. This is the fourth sentence.";
Field body2 = new Field("body", "", offsetsType);
doc.add(body2);
body2.setStringValue(secondValue);
final String thirdValue = "This is the fifth sentence";
Field body3 = new Field("body", "", offsetsType);
doc.add(body3);
body3.setStringValue(thirdValue);
iw.addDocument(doc);
IndexReader ir = iw.getReader();
iw.close();
IndexSearcher searcher = newSearcher(ir);
BooleanQuery query = new BooleanQuery();
query.add(new BooleanClause(new TermQuery(new Term("body", "third")), BooleanClause.Occur.SHOULD));
query.add(new BooleanClause(new TermQuery(new Term("body", "seventh")), BooleanClause.Occur.SHOULD));
query.add(new BooleanClause(new TermQuery(new Term("body", "fifth")), BooleanClause.Occur.SHOULD));
query.setMinimumNumberShouldMatch(1);
TopDocs topDocs = searcher.search(query, null, 10, Sort.INDEXORDER);
assertThat(topDocs.totalHits, equalTo(1));
String secondHlValue = "This is the <b>third</b> sentence. This is the fourth sentence.";
String thirdHlValue = "This is the <b>fifth</b> sentence";
XPostingsHighlighter highlighter = new XPostingsHighlighter() {
Iterator<String> valuesIterator = Arrays.asList(firstValue, secondValue, thirdValue).iterator();
Iterator<Integer> offsetsIterator = Arrays.asList(0, firstValue.length() + 1, secondValue.length() + 1).iterator();
@Override
protected String[][] loadFieldValues(IndexSearcher searcher, String[] fields, int[] docids, int maxLength) throws IOException {
return new String[][]{new String[]{valuesIterator.next()}};
}
@Override
protected int getOffsetForCurrentValue(String field, int docId) {
return offsetsIterator.next();
}
@Override
protected BreakIterator getBreakIterator(String field) {
return new WholeBreakIterator();
}
@Override
protected Passage[] getEmptyHighlight(String fieldName, BreakIterator bi, int maxPassages) {
return new Passage[0];
}
};
//first call using the WholeBreakIterator, we get now null as the first value doesn't hold any match
String[] snippets = highlighter.highlight("body", query, searcher, topDocs);
assertThat(snippets.length, equalTo(1));
assertThat(snippets[0], nullValue());
//second call using the WholeBreakIterator, we get now only the second value properly highlighted as we wish
snippets = highlighter.highlight("body", query, searcher, topDocs);
assertThat(snippets.length, equalTo(1));
assertThat(snippets[0], equalTo(secondHlValue));
//second call using the WholeBreakIterator, we get now only the third value properly highlighted as we wish
snippets = highlighter.highlight("body", query, searcher, topDocs);
assertThat(snippets.length, equalTo(1));
assertThat(snippets[0], equalTo(thirdHlValue));
ir.close();
dir.close();
}
/*
The following are tests that we added to make sure that certain behaviours are possible using the postings highlighter
They don't require our forked version, but only custom versions of methods that can be overridden and are already exposed to subclasses
*/
/*
Tests that it's possible to obtain different fragments per document instead of a big string of concatenated fragments.
We use our own PassageFormatter for that and override the getFormatter method.
*/
@Test
public void testCustomPassageFormatterMultipleFragments() throws Exception {
Directory dir = newDirectory();
IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()));
iwc.setMergePolicy(newLogMergePolicy());
RandomIndexWriter iw = new RandomIndexWriter(random(), dir, iwc);
FieldType offsetsType = new FieldType(TextField.TYPE_STORED);
offsetsType.setIndexOptions(FieldInfo.IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS);
Field body = new Field("body", "", offsetsType);
Document doc = new Document();
doc.add(body);
body.setStringValue("This test is another test. Not a good sentence. Test test test test.");
iw.addDocument(doc);
IndexReader ir = iw.getReader();
iw.close();
XPostingsHighlighter highlighter = new XPostingsHighlighter();
IndexSearcher searcher = newSearcher(ir);
Query query = new TermQuery(new Term("body", "test"));
TopDocs topDocs = searcher.search(query, null, 10, Sort.INDEXORDER);
assertThat(topDocs.totalHits, equalTo(1));
String snippets[] = highlighter.highlight("body", query, searcher, topDocs, 5);
assertThat(snippets.length, equalTo(1));
//default behaviour that we want to change
assertThat(snippets[0], equalTo("This <b>test</b> is another test. ... <b>Test</b> <b>test</b> <b>test</b> test."));
final CustomPassageFormatter passageFormatter = new CustomPassageFormatter("<b>", "</b>", new DefaultEncoder());
highlighter = new XPostingsHighlighter() {
@Override
protected PassageFormatter getFormatter(String field) {
return passageFormatter;
}
};
final ScoreDoc scoreDocs[] = topDocs.scoreDocs;
int docids[] = new int[scoreDocs.length];
int maxPassages[] = new int[scoreDocs.length];
for (int i = 0; i < docids.length; i++) {
docids[i] = scoreDocs[i].doc;
maxPassages[i] = 5;
}
Map<String, Object[]> highlights = highlighter.highlightFieldsAsObjects(new String[]{"body"}, query, searcher, docids, maxPassages);
assertThat(highlights, notNullValue());
assertThat(highlights.size(), equalTo(1));
Object[] objectSnippets = highlights.get("body");
assertThat(objectSnippets, notNullValue());
assertThat(objectSnippets.length, equalTo(1));
assertThat(objectSnippets[0], instanceOf(Snippet[].class));
Snippet[] snippetsSnippet = (Snippet[]) objectSnippets[0];
assertThat(snippetsSnippet.length, equalTo(2));
//multiple fragments as we wish
assertThat(snippetsSnippet[0].getText(), equalTo("This <b>test</b> is another test."));
assertThat(snippetsSnippet[1].getText(), equalTo("<b>Test</b> <b>test</b> <b>test</b> test."));
ir.close();
dir.close();
}
/*
Tests that it's possible to return no fragments when there's nothing to highlight
We do that by overriding the getEmptyHighlight method
*/
@Test
public void testHighlightWithNoMatches() throws Exception {
Directory dir = newDirectory();
IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()));
iwc.setMergePolicy(newLogMergePolicy());
RandomIndexWriter iw = new RandomIndexWriter(random(), dir, iwc);
FieldType offsetsType = new FieldType(TextField.TYPE_STORED);
offsetsType.setIndexOptions(FieldInfo.IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS);
Field body = new Field("body", "", offsetsType);
Field none = new Field("none", "", offsetsType);
Document doc = new Document();
doc.add(body);
doc.add(none);
body.setStringValue("This is a test. Just a test highlighting from postings. Feel free to ignore.");
none.setStringValue(body.stringValue());
iw.addDocument(doc);
body.setStringValue("Highlighting the first term. Hope it works.");
none.setStringValue(body.stringValue());
iw.addDocument(doc);
IndexReader ir = iw.getReader();
iw.close();
IndexSearcher searcher = newSearcher(ir);
XPostingsHighlighter highlighter = new XPostingsHighlighter();
Query query = new TermQuery(new Term("none", "highlighting"));
TopDocs topDocs = searcher.search(query, null, 10, Sort.INDEXORDER);
assertThat(topDocs.totalHits, equalTo(2));
String snippets[] = highlighter.highlight("body", query, searcher, topDocs, 1);
//Two null snippets if there are no matches (thanks to our own custom passage formatter)
assertThat(snippets.length, equalTo(2));
//default behaviour: returns the first sentence with num passages = 1
assertThat(snippets[0], equalTo("This is a test. "));
assertThat(snippets[1], equalTo("Highlighting the first term. "));
highlighter = new XPostingsHighlighter() {
@Override
protected Passage[] getEmptyHighlight(String fieldName, BreakIterator bi, int maxPassages) {
return new Passage[0];
}
};
snippets = highlighter.highlight("body", query, searcher, topDocs);
//Two null snippets if there are no matches, as we wish
assertThat(snippets.length, equalTo(2));
assertThat(snippets[0], nullValue());
assertThat(snippets[1], nullValue());
ir.close();
dir.close();
}
/*
Tests that it's possible to avoid having fragments that span across different values
We do that by overriding the getMultiValuedSeparator and using a proper separator between values
*/
@Test
public void testCustomMultiValuedSeparator() throws Exception {
Directory dir = newDirectory();
IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()));
iwc.setMergePolicy(newLogMergePolicy());
RandomIndexWriter iw = new RandomIndexWriter(random(), dir, iwc);
FieldType offsetsType = new FieldType(TextField.TYPE_STORED);
offsetsType.setIndexOptions(FieldInfo.IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS);
Field body = new Field("body", "", offsetsType);
Document doc = new Document();
doc.add(body);
body.setStringValue("This is a test. Just a test highlighting from postings");
Field body2 = new Field("body", "", offsetsType);
doc.add(body2);
body2.setStringValue("highlighter.");
iw.addDocument(doc);
IndexReader ir = iw.getReader();
iw.close();
IndexSearcher searcher = newSearcher(ir);
XPostingsHighlighter highlighter = new XPostingsHighlighter();
Query query = new TermQuery(new Term("body", "highlighting"));
TopDocs topDocs = searcher.search(query, null, 10, Sort.INDEXORDER);
assertThat(topDocs.totalHits, equalTo(1));
String snippets[] = highlighter.highlight("body", query, searcher, topDocs);
assertThat(snippets.length, equalTo(1));
//default behaviour: getting a fragment that spans across different values
assertThat(snippets[0], equalTo("Just a test <b>highlighting</b> from postings highlighter."));
highlighter = new XPostingsHighlighter() {
@Override
protected char getMultiValuedSeparator(String field) {
//U+2029 PARAGRAPH SEPARATOR (PS): each value holds a discrete passage for highlighting
return 8233;
}
};
snippets = highlighter.highlight("body", query, searcher, topDocs);
assertThat(snippets.length, equalTo(1));
//getting a fragment that doesn't span across different values since we used the paragraph separator between the different values
assertThat(snippets[0], equalTo("Just a test <b>highlighting</b> from postings" + (char)8233));
ir.close();
dir.close();
}
/*
The following are all the existing postings highlighter tests, to make sure we don't have regression in our own fork
*/
@Test
public void testBasics() throws Exception {
Directory dir = newDirectory();
IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()));
iwc.setMergePolicy(newLogMergePolicy());
RandomIndexWriter iw = new RandomIndexWriter(random(), dir, iwc);
FieldType offsetsType = new FieldType(TextField.TYPE_STORED);
offsetsType.setIndexOptions(FieldInfo.IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS);
Field body = new Field("body", "", offsetsType);
Document doc = new Document();
doc.add(body);
body.setStringValue("This is a test. Just a test highlighting from postings. Feel free to ignore.");
iw.addDocument(doc);
body.setStringValue("Highlighting the first term. Hope it works.");
iw.addDocument(doc);
IndexReader ir = iw.getReader();
iw.close();
IndexSearcher searcher = newSearcher(ir);
XPostingsHighlighter highlighter = new XPostingsHighlighter();
Query query = new TermQuery(new Term("body", "highlighting"));
TopDocs topDocs = searcher.search(query, null, 10, Sort.INDEXORDER);
assertEquals(2, topDocs.totalHits);
String snippets[] = highlighter.highlight("body", query, searcher, topDocs);
assertEquals(2, snippets.length);
assertEquals("Just a test <b>highlighting</b> from postings. ", snippets[0]);
assertEquals("<b>Highlighting</b> the first term. ", snippets[1]);
ir.close();
dir.close();
}
public void testFormatWithMatchExceedingContentLength2() throws Exception {
String bodyText = "123 TEST 01234 TEST";
String[] snippets = formatWithMatchExceedingContentLength(bodyText);
assertEquals(1, snippets.length);
assertEquals("123 <b>TEST</b> 01234 TE", snippets[0]);
}
public void testFormatWithMatchExceedingContentLength3() throws Exception {
String bodyText = "123 5678 01234 TEST TEST";
String[] snippets = formatWithMatchExceedingContentLength(bodyText);
assertEquals(1, snippets.length);
assertEquals("123 5678 01234 TE", snippets[0]);
}
public void testFormatWithMatchExceedingContentLength() throws Exception {
String bodyText = "123 5678 01234 TEST";
String[] snippets = formatWithMatchExceedingContentLength(bodyText);
assertEquals(1, snippets.length);
// LUCENE-5166: no snippet
assertEquals("123 5678 01234 TE", snippets[0]);
}
private String[] formatWithMatchExceedingContentLength(String bodyText) throws IOException {
int maxLength = 17;
final Analyzer analyzer = new MockAnalyzer(random());
Directory dir = newDirectory();
IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, analyzer);
iwc.setMergePolicy(newLogMergePolicy());
RandomIndexWriter iw = new RandomIndexWriter(random(), dir, iwc);
final FieldType fieldType = new FieldType(TextField.TYPE_STORED);
fieldType.setIndexOptions(FieldInfo.IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS);
final Field body = new Field("body", bodyText, fieldType);
Document doc = new Document();
doc.add(body);
iw.addDocument(doc);
IndexReader ir = iw.getReader();
iw.close();
IndexSearcher searcher = newSearcher(ir);
Query query = new TermQuery(new Term("body", "test"));
TopDocs topDocs = searcher.search(query, null, 10, Sort.INDEXORDER);
assertEquals(1, topDocs.totalHits);
XPostingsHighlighter highlighter = new XPostingsHighlighter(maxLength);
String snippets[] = highlighter.highlight("body", query, searcher, topDocs);
ir.close();
dir.close();
return snippets;
}
// simple test highlighting last word.
public void testHighlightLastWord() throws Exception {
Directory dir = newDirectory();
IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()));
iwc.setMergePolicy(newLogMergePolicy());
RandomIndexWriter iw = new RandomIndexWriter(random(), dir, iwc);
FieldType offsetsType = new FieldType(TextField.TYPE_STORED);
offsetsType.setIndexOptions(FieldInfo.IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS);
Field body = new Field("body", "", offsetsType);
Document doc = new Document();
doc.add(body);
body.setStringValue("This is a test");
iw.addDocument(doc);
IndexReader ir = iw.getReader();
iw.close();
IndexSearcher searcher = newSearcher(ir);
XPostingsHighlighter highlighter = new XPostingsHighlighter();
Query query = new TermQuery(new Term("body", "test"));
TopDocs topDocs = searcher.search(query, null, 10, Sort.INDEXORDER);
assertEquals(1, topDocs.totalHits);
String snippets[] = highlighter.highlight("body", query, searcher, topDocs);
assertEquals(1, snippets.length);
assertEquals("This is a <b>test</b>", snippets[0]);
ir.close();
dir.close();
}
// simple test with one sentence documents.
@Test
public void testOneSentence() throws Exception {
Directory dir = newDirectory();
// use simpleanalyzer for more natural tokenization (else "test." is a token)
IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random(), MockTokenizer.SIMPLE, true));
iwc.setMergePolicy(newLogMergePolicy());
RandomIndexWriter iw = new RandomIndexWriter(random(), dir, iwc);
FieldType offsetsType = new FieldType(TextField.TYPE_STORED);
offsetsType.setIndexOptions(FieldInfo.IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS);
Field body = new Field("body", "", offsetsType);
Document doc = new Document();
doc.add(body);
body.setStringValue("This is a test.");
iw.addDocument(doc);
body.setStringValue("Test a one sentence document.");
iw.addDocument(doc);
IndexReader ir = iw.getReader();
iw.close();
IndexSearcher searcher = newSearcher(ir);
XPostingsHighlighter highlighter = new XPostingsHighlighter();
Query query = new TermQuery(new Term("body", "test"));
TopDocs topDocs = searcher.search(query, null, 10, Sort.INDEXORDER);
assertEquals(2, topDocs.totalHits);
String snippets[] = highlighter.highlight("body", query, searcher, topDocs);
assertEquals(2, snippets.length);
assertEquals("This is a <b>test</b>.", snippets[0]);
assertEquals("<b>Test</b> a one sentence document.", snippets[1]);
ir.close();
dir.close();
}
// simple test with multiple values that make a result longer than maxLength.
@Test
public void testMaxLengthWithMultivalue() throws Exception {
Directory dir = newDirectory();
// use simpleanalyzer for more natural tokenization (else "test." is a token)
IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random(), MockTokenizer.SIMPLE, true));
iwc.setMergePolicy(newLogMergePolicy());
RandomIndexWriter iw = new RandomIndexWriter(random(), dir, iwc);
FieldType offsetsType = new FieldType(TextField.TYPE_STORED);
offsetsType.setIndexOptions(FieldInfo.IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS);
Document doc = new Document();
for(int i = 0; i < 3 ; i++) {
Field body = new Field("body", "", offsetsType);
body.setStringValue("This is a multivalued field");
doc.add(body);
}
iw.addDocument(doc);
IndexReader ir = iw.getReader();
iw.close();
IndexSearcher searcher = newSearcher(ir);
XPostingsHighlighter highlighter = new XPostingsHighlighter(40);
Query query = new TermQuery(new Term("body", "field"));
TopDocs topDocs = searcher.search(query, null, 10, Sort.INDEXORDER);
assertEquals(1, topDocs.totalHits);
String snippets[] = highlighter.highlight("body", query, searcher, topDocs);
assertEquals(1, snippets.length);
assertTrue("Snippet should have maximum 40 characters plus the pre and post tags",
snippets[0].length() == (40 + "<b></b>".length()));
ir.close();
dir.close();
}
@Test
public void testMultipleFields() throws Exception {
Directory dir = newDirectory();
IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random(), MockTokenizer.SIMPLE, true));
iwc.setMergePolicy(newLogMergePolicy());
RandomIndexWriter iw = new RandomIndexWriter(random(), dir, iwc);
FieldType offsetsType = new FieldType(TextField.TYPE_STORED);
offsetsType.setIndexOptions(FieldInfo.IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS);
Field body = new Field("body", "", offsetsType);
Field title = new Field("title", "", offsetsType);
Document doc = new Document();
doc.add(body);
doc.add(title);
body.setStringValue("This is a test. Just a test highlighting from postings. Feel free to ignore.");
title.setStringValue("I am hoping for the best.");
iw.addDocument(doc);
body.setStringValue("Highlighting the first term. Hope it works.");
title.setStringValue("But best may not be good enough.");
iw.addDocument(doc);
IndexReader ir = iw.getReader();
iw.close();
IndexSearcher searcher = newSearcher(ir);
XPostingsHighlighter highlighter = new XPostingsHighlighter();
BooleanQuery query = new BooleanQuery();
query.add(new TermQuery(new Term("body", "highlighting")), BooleanClause.Occur.SHOULD);
query.add(new TermQuery(new Term("title", "best")), BooleanClause.Occur.SHOULD);
TopDocs topDocs = searcher.search(query, null, 10, Sort.INDEXORDER);
assertEquals(2, topDocs.totalHits);
Map<String,String[]> snippets = highlighter.highlightFields(new String [] { "body", "title" }, query, searcher, topDocs);
assertEquals(2, snippets.size());
assertEquals("Just a test <b>highlighting</b> from postings. ", snippets.get("body")[0]);
assertEquals("<b>Highlighting</b> the first term. ", snippets.get("body")[1]);
assertEquals("I am hoping for the <b>best</b>.", snippets.get("title")[0]);
assertEquals("But <b>best</b> may not be good enough.", snippets.get("title")[1]);
ir.close();
dir.close();
}
@Test
public void testMultipleTerms() throws Exception {
Directory dir = newDirectory();
IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()));
iwc.setMergePolicy(newLogMergePolicy());
RandomIndexWriter iw = new RandomIndexWriter(random(), dir, iwc);
FieldType offsetsType = new FieldType(TextField.TYPE_STORED);
offsetsType.setIndexOptions(FieldInfo.IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS);
Field body = new Field("body", "", offsetsType);
Document doc = new Document();
doc.add(body);
body.setStringValue("This is a test. Just a test highlighting from postings. Feel free to ignore.");
iw.addDocument(doc);
body.setStringValue("Highlighting the first term. Hope it works.");
iw.addDocument(doc);
IndexReader ir = iw.getReader();
iw.close();
IndexSearcher searcher = newSearcher(ir);
XPostingsHighlighter highlighter = new XPostingsHighlighter();
BooleanQuery query = new BooleanQuery();
query.add(new TermQuery(new Term("body", "highlighting")), BooleanClause.Occur.SHOULD);
query.add(new TermQuery(new Term("body", "just")), BooleanClause.Occur.SHOULD);
query.add(new TermQuery(new Term("body", "first")), BooleanClause.Occur.SHOULD);
TopDocs topDocs = searcher.search(query, null, 10, Sort.INDEXORDER);
assertEquals(2, topDocs.totalHits);
String snippets[] = highlighter.highlight("body", query, searcher, topDocs);
assertEquals(2, snippets.length);
assertEquals("<b>Just</b> a test <b>highlighting</b> from postings. ", snippets[0]);
assertEquals("<b>Highlighting</b> the <b>first</b> term. ", snippets[1]);
ir.close();
dir.close();
}
@Test
public void testMultiplePassages() throws Exception {
Directory dir = newDirectory();
IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random(), MockTokenizer.SIMPLE, true));
iwc.setMergePolicy(newLogMergePolicy());
RandomIndexWriter iw = new RandomIndexWriter(random(), dir, iwc);
FieldType offsetsType = new FieldType(TextField.TYPE_STORED);
offsetsType.setIndexOptions(FieldInfo.IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS);
Field body = new Field("body", "", offsetsType);
Document doc = new Document();
doc.add(body);
body.setStringValue("This is a test. Just a test highlighting from postings. Feel free to ignore.");
iw.addDocument(doc);
body.setStringValue("This test is another test. Not a good sentence. Test test test test.");
iw.addDocument(doc);
IndexReader ir = iw.getReader();
iw.close();
IndexSearcher searcher = newSearcher(ir);
XPostingsHighlighter highlighter = new XPostingsHighlighter();
Query query = new TermQuery(new Term("body", "test"));
TopDocs topDocs = searcher.search(query, null, 10, Sort.INDEXORDER);
assertEquals(2, topDocs.totalHits);
String snippets[] = highlighter.highlight("body", query, searcher, topDocs, 2);
assertEquals(2, snippets.length);
assertEquals("This is a <b>test</b>. Just a <b>test</b> highlighting from postings. ", snippets[0]);
assertEquals("This <b>test</b> is another <b>test</b>. ... <b>Test</b> <b>test</b> <b>test</b> <b>test</b>.", snippets[1]);
ir.close();
dir.close();
}
@Test
public void testUserFailedToIndexOffsets() throws Exception {
Directory dir = newDirectory();
IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random(), MockTokenizer.SIMPLE, true));
iwc.setMergePolicy(newLogMergePolicy());
RandomIndexWriter iw = new RandomIndexWriter(random(), dir, iwc);
FieldType positionsType = new FieldType(TextField.TYPE_STORED);
positionsType.setIndexOptions(FieldInfo.IndexOptions.DOCS_AND_FREQS_AND_POSITIONS);
Field body = new Field("body", "", positionsType);
Field title = new StringField("title", "", Field.Store.YES);
Document doc = new Document();
doc.add(body);
doc.add(title);
body.setStringValue("This is a test. Just a test highlighting from postings. Feel free to ignore.");
title.setStringValue("test");
iw.addDocument(doc);
body.setStringValue("This test is another test. Not a good sentence. Test test test test.");
title.setStringValue("test");
iw.addDocument(doc);
IndexReader ir = iw.getReader();
iw.close();
IndexSearcher searcher = newSearcher(ir);
XPostingsHighlighter highlighter = new XPostingsHighlighter();
Query query = new TermQuery(new Term("body", "test"));
TopDocs topDocs = searcher.search(query, null, 10, Sort.INDEXORDER);
assertEquals(2, topDocs.totalHits);
try {
highlighter.highlight("body", query, searcher, topDocs, 2);
fail("did not hit expected exception");
} catch (IllegalArgumentException iae) {
// expected
}
try {
highlighter.highlight("title", new TermQuery(new Term("title", "test")), searcher, topDocs, 2);
fail("did not hit expected exception");
} catch (IllegalArgumentException iae) {
// expected
}
ir.close();
dir.close();
}
@Test
public void testBuddhism() throws Exception {
String text = "This eight-volume set brings together seminal papers in Buddhist studies from a vast " +
"range of academic disciplines published over the last forty years. With a new introduction " +
"by the editor, this collection is a unique and unrivalled research resource for both " +
"student and scholar. Coverage includes: - Buddhist origins; early history of Buddhism in " +
"South and Southeast Asia - early Buddhist Schools and Doctrinal History; Theravada Doctrine " +
"- the Origins and nature of Mahayana Buddhism; some Mahayana religious topics - Abhidharma " +
"and Madhyamaka - Yogacara, the Epistemological tradition, and Tathagatagarbha - Tantric " +
"Buddhism (Including China and Japan); Buddhism in Nepal and Tibet - Buddhism in South and " +
"Southeast Asia, and - Buddhism in China, East Asia, and Japan.";
Directory dir = newDirectory();
Analyzer analyzer = new MockAnalyzer(random(), MockTokenizer.SIMPLE, true);
RandomIndexWriter iw = new RandomIndexWriter(random(), dir, analyzer);
FieldType positionsType = new FieldType(TextField.TYPE_STORED);
positionsType.setIndexOptions(FieldInfo.IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS);
Field body = new Field("body", text, positionsType);
Document document = new Document();
document.add(body);
iw.addDocument(document);
IndexReader ir = iw.getReader();
iw.close();
IndexSearcher searcher = newSearcher(ir);
PhraseQuery query = new PhraseQuery();
query.add(new Term("body", "buddhist"));
query.add(new Term("body", "origins"));
TopDocs topDocs = searcher.search(query, 10);
assertEquals(1, topDocs.totalHits);
XPostingsHighlighter highlighter = new XPostingsHighlighter();
String snippets[] = highlighter.highlight("body", query, searcher, topDocs, 2);
assertEquals(1, snippets.length);
assertTrue(snippets[0].contains("<b>Buddhist</b> <b>origins</b>"));
ir.close();
dir.close();
}
@Test
public void testCuriousGeorge() throws Exception {
String text = "It’s the formula for success for preschoolers—Curious George and fire trucks! " +
"Curious George and the Firefighters is a story based on H. A. and Margret Rey’s " +
"popular primate and painted in the original watercolor and charcoal style. " +
"Firefighters are a famously brave lot, but can they withstand a visit from one curious monkey?";
Directory dir = newDirectory();
Analyzer analyzer = new MockAnalyzer(random(), MockTokenizer.SIMPLE, true);
RandomIndexWriter iw = new RandomIndexWriter(random(), dir, analyzer);
FieldType positionsType = new FieldType(TextField.TYPE_STORED);
positionsType.setIndexOptions(FieldInfo.IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS);
Field body = new Field("body", text, positionsType);
Document document = new Document();
document.add(body);
iw.addDocument(document);
IndexReader ir = iw.getReader();
iw.close();
IndexSearcher searcher = newSearcher(ir);
PhraseQuery query = new PhraseQuery();
query.add(new Term("body", "curious"));
query.add(new Term("body", "george"));
TopDocs topDocs = searcher.search(query, 10);
assertEquals(1, topDocs.totalHits);
XPostingsHighlighter highlighter = new XPostingsHighlighter();
String snippets[] = highlighter.highlight("body", query, searcher, topDocs, 2);
assertEquals(1, snippets.length);
assertFalse(snippets[0].contains("<b>Curious</b>Curious"));
ir.close();
dir.close();
}
@Test
public void testCambridgeMA() throws Exception {
BufferedReader r = new BufferedReader(new InputStreamReader(
this.getClass().getResourceAsStream("CambridgeMA.utf8"), "UTF-8"));
String text = r.readLine();
r.close();
Directory dir = newDirectory();
Analyzer analyzer = new MockAnalyzer(random(), MockTokenizer.SIMPLE, true);
RandomIndexWriter iw = new RandomIndexWriter(random(), dir, analyzer);
FieldType positionsType = new FieldType(TextField.TYPE_STORED);
positionsType.setIndexOptions(FieldInfo.IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS);
Field body = new Field("body", text, positionsType);
Document document = new Document();
document.add(body);
iw.addDocument(document);
IndexReader ir = iw.getReader();
iw.close();
IndexSearcher searcher = newSearcher(ir);
BooleanQuery query = new BooleanQuery();
query.add(new TermQuery(new Term("body", "porter")), BooleanClause.Occur.SHOULD);
query.add(new TermQuery(new Term("body", "square")), BooleanClause.Occur.SHOULD);
query.add(new TermQuery(new Term("body", "massachusetts")), BooleanClause.Occur.SHOULD);
TopDocs topDocs = searcher.search(query, 10);
assertEquals(1, topDocs.totalHits);
XPostingsHighlighter highlighter = new XPostingsHighlighter(Integer.MAX_VALUE-1);
String snippets[] = highlighter.highlight("body", query, searcher, topDocs, 2);
assertEquals(1, snippets.length);
assertTrue(snippets[0].contains("<b>Square</b>"));
assertTrue(snippets[0].contains("<b>Porter</b>"));
ir.close();
dir.close();
}
@Test
public void testPassageRanking() throws Exception {
Directory dir = newDirectory();
IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random(), MockTokenizer.SIMPLE, true));
iwc.setMergePolicy(newLogMergePolicy());
RandomIndexWriter iw = new RandomIndexWriter(random(), dir, iwc);
FieldType offsetsType = new FieldType(TextField.TYPE_STORED);
offsetsType.setIndexOptions(FieldInfo.IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS);
Field body = new Field("body", "", offsetsType);
Document doc = new Document();
doc.add(body);
body.setStringValue("This is a test. Just highlighting from postings. This is also a much sillier test. Feel free to test test test test test test test.");
iw.addDocument(doc);
IndexReader ir = iw.getReader();
iw.close();
IndexSearcher searcher = newSearcher(ir);
XPostingsHighlighter highlighter = new XPostingsHighlighter();
Query query = new TermQuery(new Term("body", "test"));
TopDocs topDocs = searcher.search(query, null, 10, Sort.INDEXORDER);
assertEquals(1, topDocs.totalHits);
String snippets[] = highlighter.highlight("body", query, searcher, topDocs, 2);
assertEquals(1, snippets.length);
assertEquals("This is a <b>test</b>. ... Feel free to <b>test</b> <b>test</b> <b>test</b> <b>test</b> <b>test</b> <b>test</b> <b>test</b>.", snippets[0]);
ir.close();
dir.close();
}
@Test
public void testBooleanMustNot() throws Exception {
Directory dir = newDirectory();
Analyzer analyzer = new MockAnalyzer(random(), MockTokenizer.SIMPLE, true);
RandomIndexWriter iw = new RandomIndexWriter(random(), dir, analyzer);
FieldType positionsType = new FieldType(TextField.TYPE_STORED);
positionsType.setIndexOptions(FieldInfo.IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS);
Field body = new Field("body", "This sentence has both terms. This sentence has only terms.", positionsType);
Document document = new Document();
document.add(body);
iw.addDocument(document);
IndexReader ir = iw.getReader();
iw.close();
IndexSearcher searcher = newSearcher(ir);
BooleanQuery query = new BooleanQuery();
query.add(new TermQuery(new Term("body", "terms")), BooleanClause.Occur.SHOULD);
BooleanQuery query2 = new BooleanQuery();
query.add(query2, BooleanClause.Occur.SHOULD);
query2.add(new TermQuery(new Term("body", "both")), BooleanClause.Occur.MUST_NOT);
TopDocs topDocs = searcher.search(query, 10);
assertEquals(1, topDocs.totalHits);
XPostingsHighlighter highlighter = new XPostingsHighlighter(Integer.MAX_VALUE-1);
String snippets[] = highlighter.highlight("body", query, searcher, topDocs, 2);
assertEquals(1, snippets.length);
assertFalse(snippets[0].contains("<b>both</b>"));
ir.close();
dir.close();
}
@Test
public void testHighlightAllText() throws Exception {
Directory dir = newDirectory();
IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random(), MockTokenizer.SIMPLE, true));
iwc.setMergePolicy(newLogMergePolicy());
RandomIndexWriter iw = new RandomIndexWriter(random(), dir, iwc);
FieldType offsetsType = new FieldType(TextField.TYPE_STORED);
offsetsType.setIndexOptions(FieldInfo.IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS);
Field body = new Field("body", "", offsetsType);
Document doc = new Document();
doc.add(body);
body.setStringValue("This is a test. Just highlighting from postings. This is also a much sillier test. Feel free to test test test test test test test.");
iw.addDocument(doc);
IndexReader ir = iw.getReader();
iw.close();
IndexSearcher searcher = newSearcher(ir);
XPostingsHighlighter highlighter = new XPostingsHighlighter(10000) {
@Override
protected BreakIterator getBreakIterator(String field) {
return new WholeBreakIterator();
}
};
Query query = new TermQuery(new Term("body", "test"));
TopDocs topDocs = searcher.search(query, null, 10, Sort.INDEXORDER);
assertEquals(1, topDocs.totalHits);
String snippets[] = highlighter.highlight("body", query, searcher, topDocs, 2);
assertEquals(1, snippets.length);
assertEquals("This is a <b>test</b>. Just highlighting from postings. This is also a much sillier <b>test</b>. Feel free to <b>test</b> <b>test</b> <b>test</b> <b>test</b> <b>test</b> <b>test</b> <b>test</b>.", snippets[0]);
ir.close();
dir.close();
}
@Test
public void testSpecificDocIDs() throws Exception {
Directory dir = newDirectory();
IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()));
iwc.setMergePolicy(newLogMergePolicy());
RandomIndexWriter iw = new RandomIndexWriter(random(), dir, iwc);
FieldType offsetsType = new FieldType(TextField.TYPE_STORED);
offsetsType.setIndexOptions(FieldInfo.IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS);
Field body = new Field("body", "", offsetsType);
Document doc = new Document();
doc.add(body);
body.setStringValue("This is a test. Just a test highlighting from postings. Feel free to ignore.");
iw.addDocument(doc);
body.setStringValue("Highlighting the first term. Hope it works.");
iw.addDocument(doc);
IndexReader ir = iw.getReader();
iw.close();
IndexSearcher searcher = newSearcher(ir);
XPostingsHighlighter highlighter = new XPostingsHighlighter();
Query query = new TermQuery(new Term("body", "highlighting"));
TopDocs topDocs = searcher.search(query, null, 10, Sort.INDEXORDER);
assertEquals(2, topDocs.totalHits);
ScoreDoc[] hits = topDocs.scoreDocs;
int[] docIDs = new int[2];
docIDs[0] = hits[0].doc;
docIDs[1] = hits[1].doc;
String snippets[] = highlighter.highlightFields(new String[] {"body"}, query, searcher, docIDs, new int[] { 1 }).get("body");
assertEquals(2, snippets.length);
assertEquals("Just a test <b>highlighting</b> from postings. ", snippets[0]);
assertEquals("<b>Highlighting</b> the first term. ", snippets[1]);
ir.close();
dir.close();
}
@Test
public void testCustomFieldValueSource() throws Exception {
Directory dir = newDirectory();
IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random(), MockTokenizer.SIMPLE, true));
iwc.setMergePolicy(newLogMergePolicy());
RandomIndexWriter iw = new RandomIndexWriter(random(), dir, iwc);
Document doc = new Document();
FieldType offsetsType = new FieldType(TextField.TYPE_NOT_STORED);
offsetsType.setIndexOptions(FieldInfo.IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS);
final String text = "This is a test. Just highlighting from postings. This is also a much sillier test. Feel free to test test test test test test test.";
Field body = new Field("body", text, offsetsType);
doc.add(body);
iw.addDocument(doc);
IndexReader ir = iw.getReader();
iw.close();
IndexSearcher searcher = newSearcher(ir);
XPostingsHighlighter highlighter = new XPostingsHighlighter(10000) {
@Override
protected String[][] loadFieldValues(IndexSearcher searcher, String[] fields, int[] docids, int maxLength) throws IOException {
assertThat(fields.length, equalTo(1));
assertThat(docids.length, equalTo(1));
String[][] contents = new String[1][1];
contents[0][0] = text;
return contents;
}
@Override
protected BreakIterator getBreakIterator(String field) {
return new WholeBreakIterator();
}
};
Query query = new TermQuery(new Term("body", "test"));
TopDocs topDocs = searcher.search(query, null, 10, Sort.INDEXORDER);
assertEquals(1, topDocs.totalHits);
String snippets[] = highlighter.highlight("body", query, searcher, topDocs, 2);
assertEquals(1, snippets.length);
assertEquals("This is a <b>test</b>. Just highlighting from postings. This is also a much sillier <b>test</b>. Feel free to <b>test</b> <b>test</b> <b>test</b> <b>test</b> <b>test</b> <b>test</b> <b>test</b>.", snippets[0]);
ir.close();
dir.close();
}
/** Make sure highlighter returns first N sentences if
* there were no hits. */
@Test
public void testEmptyHighlights() throws Exception {
Directory dir = newDirectory();
IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()));
iwc.setMergePolicy(newLogMergePolicy());
RandomIndexWriter iw = new RandomIndexWriter(random(), dir, iwc);
FieldType offsetsType = new FieldType(TextField.TYPE_STORED);
offsetsType.setIndexOptions(FieldInfo.IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS);
Document doc = new Document();
Field body = new Field("body", "test this is. another sentence this test has. far away is that planet.", offsetsType);
doc.add(body);
iw.addDocument(doc);
IndexReader ir = iw.getReader();
iw.close();
IndexSearcher searcher = newSearcher(ir);
XPostingsHighlighter highlighter = new XPostingsHighlighter();
Query query = new TermQuery(new Term("body", "highlighting"));
int[] docIDs = new int[] {0};
String snippets[] = highlighter.highlightFields(new String[] {"body"}, query, searcher, docIDs, new int[] { 2 }).get("body");
assertEquals(1, snippets.length);
assertEquals("test this is. another sentence this test has. ", snippets[0]);
ir.close();
dir.close();
}
/** Make sure highlighter we can customize how emtpy
* highlight is returned. */
@Test
public void testCustomEmptyHighlights() throws Exception {
Directory dir = newDirectory();
IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()));
iwc.setMergePolicy(newLogMergePolicy());
RandomIndexWriter iw = new RandomIndexWriter(random(), dir, iwc);
FieldType offsetsType = new FieldType(TextField.TYPE_STORED);
offsetsType.setIndexOptions(FieldInfo.IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS);
Document doc = new Document();
Field body = new Field("body", "test this is. another sentence this test has. far away is that planet.", offsetsType);
doc.add(body);
iw.addDocument(doc);
IndexReader ir = iw.getReader();
iw.close();
IndexSearcher searcher = newSearcher(ir);
XPostingsHighlighter highlighter = new XPostingsHighlighter() {
@Override
public Passage[] getEmptyHighlight(String fieldName, BreakIterator bi, int maxPassages) {
return new Passage[0];
}
};
Query query = new TermQuery(new Term("body", "highlighting"));
int[] docIDs = new int[] {0};
String snippets[] = highlighter.highlightFields(new String[] {"body"}, query, searcher, docIDs, new int[] { 2 }).get("body");
assertEquals(1, snippets.length);
assertNull(snippets[0]);
ir.close();
dir.close();
}
/** Make sure highlighter returns whole text when there
* are no hits and BreakIterator is null. */
@Test
public void testEmptyHighlightsWhole() throws Exception {
Directory dir = newDirectory();
IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()));
iwc.setMergePolicy(newLogMergePolicy());
RandomIndexWriter iw = new RandomIndexWriter(random(), dir, iwc);
FieldType offsetsType = new FieldType(TextField.TYPE_STORED);
offsetsType.setIndexOptions(FieldInfo.IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS);
Document doc = new Document();
Field body = new Field("body", "test this is. another sentence this test has. far away is that planet.", offsetsType);
doc.add(body);
iw.addDocument(doc);
IndexReader ir = iw.getReader();
iw.close();
IndexSearcher searcher = newSearcher(ir);
XPostingsHighlighter highlighter = new XPostingsHighlighter(10000) {
@Override
protected BreakIterator getBreakIterator(String field) {
return new WholeBreakIterator();
}
};
Query query = new TermQuery(new Term("body", "highlighting"));
int[] docIDs = new int[] {0};
String snippets[] = highlighter.highlightFields(new String[] {"body"}, query, searcher, docIDs, new int[] { 2 }).get("body");
assertEquals(1, snippets.length);
assertEquals("test this is. another sentence this test has. far away is that planet.", snippets[0]);
ir.close();
dir.close();
}
/** Make sure highlighter is OK with entirely missing
* field. */
@Test
public void testFieldIsMissing() throws Exception {
Directory dir = newDirectory();
IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()));
iwc.setMergePolicy(newLogMergePolicy());
RandomIndexWriter iw = new RandomIndexWriter(random(), dir, iwc);
FieldType offsetsType = new FieldType(TextField.TYPE_STORED);
offsetsType.setIndexOptions(FieldInfo.IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS);
Document doc = new Document();
Field body = new Field("body", "test this is. another sentence this test has. far away is that planet.", offsetsType);
doc.add(body);
iw.addDocument(doc);
IndexReader ir = iw.getReader();
iw.close();
IndexSearcher searcher = newSearcher(ir);
XPostingsHighlighter highlighter = new XPostingsHighlighter();
Query query = new TermQuery(new Term("bogus", "highlighting"));
int[] docIDs = new int[] {0};
String snippets[] = highlighter.highlightFields(new String[] {"bogus"}, query, searcher, docIDs, new int[] { 2 }).get("bogus");
assertEquals(1, snippets.length);
assertNull(snippets[0]);
ir.close();
dir.close();
}
@Test
public void testFieldIsJustSpace() throws Exception {
Directory dir = newDirectory();
IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()));
iwc.setMergePolicy(newLogMergePolicy());
RandomIndexWriter iw = new RandomIndexWriter(random(), dir, iwc);
FieldType offsetsType = new FieldType(TextField.TYPE_STORED);
offsetsType.setIndexOptions(FieldInfo.IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS);
Document doc = new Document();
doc.add(new Field("body", " ", offsetsType));
doc.add(new Field("id", "id", offsetsType));
iw.addDocument(doc);
doc = new Document();
doc.add(new Field("body", "something", offsetsType));
iw.addDocument(doc);
IndexReader ir = iw.getReader();
iw.close();
IndexSearcher searcher = newSearcher(ir);
XPostingsHighlighter highlighter = new XPostingsHighlighter();
int docID = searcher.search(new TermQuery(new Term("id", "id")), 1).scoreDocs[0].doc;
Query query = new TermQuery(new Term("body", "highlighting"));
int[] docIDs = new int[1];
docIDs[0] = docID;
String snippets[] = highlighter.highlightFields(new String[] {"body"}, query, searcher, docIDs, new int[] { 2 }).get("body");
assertEquals(1, snippets.length);
assertEquals(" ", snippets[0]);
ir.close();
dir.close();
}
@Test
public void testFieldIsEmptyString() throws Exception {
Directory dir = newDirectory();
IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()));
iwc.setMergePolicy(newLogMergePolicy());
RandomIndexWriter iw = new RandomIndexWriter(random(), dir, iwc);
FieldType offsetsType = new FieldType(TextField.TYPE_STORED);
offsetsType.setIndexOptions(FieldInfo.IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS);
Document doc = new Document();
doc.add(new Field("body", "", offsetsType));
doc.add(new Field("id", "id", offsetsType));
iw.addDocument(doc);
doc = new Document();
doc.add(new Field("body", "something", offsetsType));
iw.addDocument(doc);
IndexReader ir = iw.getReader();
iw.close();
IndexSearcher searcher = newSearcher(ir);
XPostingsHighlighter highlighter = new XPostingsHighlighter();
int docID = searcher.search(new TermQuery(new Term("id", "id")), 1).scoreDocs[0].doc;
Query query = new TermQuery(new Term("body", "highlighting"));
int[] docIDs = new int[1];
docIDs[0] = docID;
String snippets[] = highlighter.highlightFields(new String[] {"body"}, query, searcher, docIDs, new int[] { 2 }).get("body");
assertEquals(1, snippets.length);
assertNull(snippets[0]);
ir.close();
dir.close();
}
@Test
public void testMultipleDocs() throws Exception {
Directory dir = newDirectory();
IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()));
iwc.setMergePolicy(newLogMergePolicy());
RandomIndexWriter iw = new RandomIndexWriter(random(), dir, iwc);
FieldType offsetsType = new FieldType(TextField.TYPE_STORED);
offsetsType.setIndexOptions(FieldInfo.IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS);
int numDocs = atLeast(100);
for(int i=0;i<numDocs;i++) {
Document doc = new Document();
String content = "the answer is " + i;
if ((i & 1) == 0) {
content += " some more terms";
}
doc.add(new Field("body", content, offsetsType));
doc.add(newStringField("id", ""+i, Field.Store.YES));
iw.addDocument(doc);
if (random().nextInt(10) == 2) {
iw.commit();
}
}
IndexReader ir = iw.getReader();
iw.close();
IndexSearcher searcher = newSearcher(ir);
XPostingsHighlighter highlighter = new XPostingsHighlighter();
Query query = new TermQuery(new Term("body", "answer"));
TopDocs hits = searcher.search(query, numDocs);
assertEquals(numDocs, hits.totalHits);
String snippets[] = highlighter.highlight("body", query, searcher, hits);
assertEquals(numDocs, snippets.length);
for(int hit=0;hit<numDocs;hit++) {
Document doc = searcher.doc(hits.scoreDocs[hit].doc);
int id = Integer.parseInt(doc.get("id"));
String expected = "the <b>answer</b> is " + id;
if ((id & 1) == 0) {
expected += " some more terms";
}
assertEquals(expected, snippets[hit]);
}
ir.close();
dir.close();
}
@Test
public void testMultipleSnippetSizes() throws Exception {
Directory dir = newDirectory();
IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random(), MockTokenizer.SIMPLE, true));
iwc.setMergePolicy(newLogMergePolicy());
RandomIndexWriter iw = new RandomIndexWriter(random(), dir, iwc);
FieldType offsetsType = new FieldType(TextField.TYPE_STORED);
offsetsType.setIndexOptions(FieldInfo.IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS);
Field body = new Field("body", "", offsetsType);
Field title = new Field("title", "", offsetsType);
Document doc = new Document();
doc.add(body);
doc.add(title);
body.setStringValue("This is a test. Just a test highlighting from postings. Feel free to ignore.");
title.setStringValue("This is a test. Just a test highlighting from postings. Feel free to ignore.");
iw.addDocument(doc);
IndexReader ir = iw.getReader();
iw.close();
IndexSearcher searcher = newSearcher(ir);
XPostingsHighlighter highlighter = new XPostingsHighlighter();
BooleanQuery query = new BooleanQuery();
query.add(new TermQuery(new Term("body", "test")), BooleanClause.Occur.SHOULD);
query.add(new TermQuery(new Term("title", "test")), BooleanClause.Occur.SHOULD);
Map<String,String[]> snippets = highlighter.highlightFields(new String[] { "title", "body" }, query, searcher, new int[] { 0 }, new int[] { 1, 2 });
String titleHighlight = snippets.get("title")[0];
String bodyHighlight = snippets.get("body")[0];
assertEquals("This is a <b>test</b>. ", titleHighlight);
assertEquals("This is a <b>test</b>. Just a <b>test</b> highlighting from postings. ", bodyHighlight);
ir.close();
dir.close();
}
public void testEncode() throws Exception {
Directory dir = newDirectory();
IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()));
iwc.setMergePolicy(newLogMergePolicy());
RandomIndexWriter iw = new RandomIndexWriter(random(), dir, iwc);
FieldType offsetsType = new FieldType(TextField.TYPE_STORED);
offsetsType.setIndexOptions(FieldInfo.IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS);
Field body = new Field("body", "", offsetsType);
Document doc = new Document();
doc.add(body);
body.setStringValue("This is a test. Just a test highlighting from <i>postings</i>. Feel free to ignore.");
iw.addDocument(doc);
IndexReader ir = iw.getReader();
iw.close();
IndexSearcher searcher = newSearcher(ir);
PostingsHighlighter highlighter = new PostingsHighlighter() {
@Override
protected PassageFormatter getFormatter(String field) {
return new DefaultPassageFormatter("<b>", "</b>", "... ", true);
}
};
Query query = new TermQuery(new Term("body", "highlighting"));
TopDocs topDocs = searcher.search(query, null, 10, Sort.INDEXORDER);
assertEquals(1, topDocs.totalHits);
String snippets[] = highlighter.highlight("body", query, searcher, topDocs);
assertEquals(1, snippets.length);
assertEquals("Just a test <b>highlighting</b> from <i>postings</i>. ", snippets[0]);
ir.close();
dir.close();
}
/** customizing the gap separator to force a sentence break */
public void testGapSeparator() throws Exception {
Directory dir = newDirectory();
// use simpleanalyzer for more natural tokenization (else "test." is a token)
IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random(), MockTokenizer.SIMPLE, true));
iwc.setMergePolicy(newLogMergePolicy());
RandomIndexWriter iw = new RandomIndexWriter(random(), dir, iwc);
FieldType offsetsType = new FieldType(TextField.TYPE_STORED);
offsetsType.setIndexOptions(FieldInfo.IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS);
Document doc = new Document();
Field body1 = new Field("body", "", offsetsType);
body1.setStringValue("This is a multivalued field");
doc.add(body1);
Field body2 = new Field("body", "", offsetsType);
body2.setStringValue("This is something different");
doc.add(body2);
iw.addDocument(doc);
IndexReader ir = iw.getReader();
iw.close();
IndexSearcher searcher = newSearcher(ir);
PostingsHighlighter highlighter = new PostingsHighlighter() {
@Override
protected char getMultiValuedSeparator(String field) {
assert field.equals("body");
return '\u2029';
}
};
Query query = new TermQuery(new Term("body", "field"));
TopDocs topDocs = searcher.search(query, null, 10, Sort.INDEXORDER);
assertEquals(1, topDocs.totalHits);
String snippets[] = highlighter.highlight("body", query, searcher, topDocs);
assertEquals(1, snippets.length);
assertEquals("This is a multivalued <b>field</b>\u2029", snippets[0]);
ir.close();
dir.close();
}
// LUCENE-4906
public void testObjectFormatter() throws Exception {
Directory dir = newDirectory();
IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()));
iwc.setMergePolicy(newLogMergePolicy());
RandomIndexWriter iw = new RandomIndexWriter(random(), dir, iwc);
FieldType offsetsType = new FieldType(TextField.TYPE_STORED);
offsetsType.setIndexOptions(FieldInfo.IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS);
Field body = new Field("body", "", offsetsType);
Document doc = new Document();
doc.add(body);
body.setStringValue("This is a test. Just a test highlighting from postings. Feel free to ignore.");
iw.addDocument(doc);
IndexReader ir = iw.getReader();
iw.close();
IndexSearcher searcher = newSearcher(ir);
XPostingsHighlighter highlighter = new XPostingsHighlighter() {
@Override
protected PassageFormatter getFormatter(String field) {
return new PassageFormatter() {
PassageFormatter defaultFormatter = new DefaultPassageFormatter();
@Override
public String[] format(Passage passages[], String content) {
// Just turns the String snippet into a length 2
// array of String
return new String[] {"blah blah", defaultFormatter.format(passages, content).toString()};
}
};
}
};
Query query = new TermQuery(new Term("body", "highlighting"));
TopDocs topDocs = searcher.search(query, null, 10, Sort.INDEXORDER);
assertEquals(1, topDocs.totalHits);
int[] docIDs = new int[1];
docIDs[0] = topDocs.scoreDocs[0].doc;
Map<String,Object[]> snippets = highlighter.highlightFieldsAsObjects(new String[]{"body"}, query, searcher, docIDs, new int[] {1});
Object[] bodySnippets = snippets.get("body");
assertEquals(1, bodySnippets.length);
assertTrue(Arrays.equals(new String[] {"blah blah", "Just a test <b>highlighting</b> from postings. "}, (String[]) bodySnippets[0]));
ir.close();
dir.close();
}
} | 0true
| src_test_java_org_apache_lucene_search_postingshighlight_XPostingsHighlighterTests.java |
4,183 | public class IndexShardSnapshotAndRestoreService extends AbstractIndexShardComponent {
private final InternalIndexShard indexShard;
private final RepositoriesService repositoriesService;
private final RestoreService restoreService;
@Inject
public IndexShardSnapshotAndRestoreService(ShardId shardId, @IndexSettings Settings indexSettings, IndexShard indexShard, RepositoriesService repositoriesService, RestoreService restoreService) {
super(shardId, indexSettings);
this.indexShard = (InternalIndexShard) indexShard;
this.repositoriesService = repositoriesService;
this.restoreService = restoreService;
}
/**
* Creates shard snapshot
*
* @param snapshotId snapshot id
* @param snapshotStatus snapshot status
*/
public void snapshot(final SnapshotId snapshotId, final IndexShardSnapshotStatus snapshotStatus) {
IndexShardRepository indexShardRepository = repositoriesService.indexShardRepository(snapshotId.getRepository());
if (!indexShard.routingEntry().primary()) {
throw new IndexShardSnapshotFailedException(shardId, "snapshot should be performed only on primary");
}
if (indexShard.routingEntry().relocating()) {
// do not snapshot when in the process of relocation of primaries so we won't get conflicts
throw new IndexShardSnapshotFailedException(shardId, "cannot snapshot while relocating");
}
if (indexShard.state() == IndexShardState.CREATED || indexShard.state() == IndexShardState.RECOVERING) {
// shard has just been created, or still recovering
throw new IndexShardSnapshotFailedException(shardId, "shard didn't fully recover yet");
}
try {
SnapshotIndexCommit snapshotIndexCommit = indexShard.snapshotIndex();
try {
indexShardRepository.snapshot(snapshotId, shardId, snapshotIndexCommit, snapshotStatus);
if (logger.isDebugEnabled()) {
StringBuilder sb = new StringBuilder();
sb.append("snapshot (").append(snapshotId.getSnapshot()).append(") completed to ").append(indexShardRepository).append(", took [").append(TimeValue.timeValueMillis(snapshotStatus.time())).append("]\n");
sb.append(" index : version [").append(snapshotStatus.indexVersion()).append("], number_of_files [").append(snapshotStatus.numberOfFiles()).append("] with total_size [").append(new ByteSizeValue(snapshotStatus.totalSize())).append("]\n");
logger.debug(sb.toString());
}
} finally {
snapshotIndexCommit.release();
}
} catch (SnapshotFailedEngineException e) {
throw e;
} catch (IndexShardSnapshotFailedException e) {
throw e;
} catch (Throwable e) {
throw new IndexShardSnapshotFailedException(shardId, "Failed to snapshot", e);
}
}
/**
* Restores shard from {@link RestoreSource} associated with this shard in routing table
*
* @param recoveryStatus recovery status
*/
public void restore(final RecoveryStatus recoveryStatus) {
RestoreSource restoreSource = indexShard.routingEntry().restoreSource();
if (restoreSource == null) {
throw new IndexShardRestoreFailedException(shardId, "empty restore source");
}
if (logger.isTraceEnabled()) {
logger.trace("[{}] restoring shard [{}]", restoreSource.snapshotId(), shardId);
}
try {
IndexShardRepository indexShardRepository = repositoriesService.indexShardRepository(restoreSource.snapshotId().getRepository());
ShardId snapshotShardId = shardId;
if (!shardId.getIndex().equals(restoreSource.index())) {
snapshotShardId = new ShardId(restoreSource.index(), shardId.id());
}
indexShardRepository.restore(restoreSource.snapshotId(), shardId, snapshotShardId, recoveryStatus);
restoreService.indexShardRestoreCompleted(restoreSource.snapshotId(), shardId);
} catch (Throwable t) {
throw new IndexShardRestoreFailedException(shardId, "restore failed", t);
}
}
} | 1no label
| src_main_java_org_elasticsearch_index_snapshots_IndexShardSnapshotAndRestoreService.java |
781 | METRIC_TYPE.COUNTER, new OProfilerHookValue() {
public Object getValue() {
return alertTimes;
}
}); | 1no label
| core_src_main_java_com_orientechnologies_orient_core_memory_OMemoryWatchDog.java |
1,988 | @Entity
@Inheritance(strategy = InheritanceType.JOINED)
@Table(name = "BLC_CUSTOMER_PHONE", uniqueConstraints = @UniqueConstraint(name="CSTMR_PHONE_UNIQUE_CNSTRNT", columnNames = { "CUSTOMER_ID", "PHONE_NAME" }))
@AdminPresentationMergeOverrides(
{
@AdminPresentationMergeOverride(name = "phone.phoneNumber", mergeEntries =
@AdminPresentationMergeEntry(propertyType = PropertyType.AdminPresentation.PROMINENT, booleanOverrideValue = true))
}
)
@AdminPresentationClass(populateToOneFields = PopulateToOneFieldsEnum.TRUE)
public class CustomerPhoneImpl implements CustomerPhone{
private static final long serialVersionUID = 1L;
@Id
@GeneratedValue(generator = "CustomerPhoneId")
@GenericGenerator(
name="CustomerPhoneId",
strategy="org.broadleafcommerce.common.persistence.IdOverrideTableGenerator",
parameters = {
@Parameter(name="segment_value", value="CustomerPhoneImpl"),
@Parameter(name="entity_name", value="org.broadleafcommerce.profile.core.domain.CustomerPhoneImpl")
}
)
@Column(name = "CUSTOMER_PHONE_ID")
protected Long id;
@Column(name = "PHONE_NAME")
@AdminPresentation(friendlyName = "CustomerPhoneImpl_Phone_Name", order=1, group = "CustomerPhoneImpl_Identification",
groupOrder = 1, prominent = true, gridOrder = 1)
protected String phoneName;
@ManyToOne(cascade = {CascadeType.PERSIST, CascadeType.MERGE}, targetEntity = CustomerImpl.class, optional=false)
@JoinColumn(name = "CUSTOMER_ID")
@AdminPresentation(excluded = true, visibility = VisibilityEnum.HIDDEN_ALL)
protected Customer customer;
@ManyToOne(cascade = CascadeType.ALL, targetEntity = PhoneImpl.class, optional=false)
@JoinColumn(name = "PHONE_ID")
@Index(name="CUSTPHONE_PHONE_INDEX", columnNames={"PHONE_ID"})
protected Phone phone;
@Override
public Long getId() {
return id;
}
@Override
public void setId(Long id) {
this.id = id;
}
@Override
public String getPhoneName() {
return phoneName;
}
@Override
public void setPhoneName(String phoneName) {
this.phoneName = phoneName;
}
@Override
public Customer getCustomer() {
return customer;
}
@Override
public void setCustomer(Customer customer) {
this.customer = customer;
}
@Override
public Phone getPhone() {
return phone;
}
@Override
public void setPhone(Phone phone) {
this.phone = phone;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + ((customer == null) ? 0 : customer.hashCode());
result = prime * result + ((phone == null) ? 0 : phone.hashCode());
result = prime * result + ((phoneName == null) ? 0 : phoneName.hashCode());
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
CustomerPhoneImpl other = (CustomerPhoneImpl) obj;
if (id != null && other.id != null) {
return id.equals(other.id);
}
if (customer == null) {
if (other.customer != null)
return false;
} else if (!customer.equals(other.customer))
return false;
if (phone == null) {
if (other.phone != null)
return false;
} else if (!phone.equals(other.phone))
return false;
if (phoneName == null) {
if (other.phoneName != null)
return false;
} else if (!phoneName.equals(other.phoneName))
return false;
return true;
}
} | 1no label
| core_broadleaf-profile_src_main_java_org_broadleafcommerce_profile_core_domain_CustomerPhoneImpl.java |
656 | public class PutIndexTemplateRequestBuilder extends MasterNodeOperationRequestBuilder<PutIndexTemplateRequest, PutIndexTemplateResponse, PutIndexTemplateRequestBuilder> {
public PutIndexTemplateRequestBuilder(IndicesAdminClient indicesClient) {
super((InternalIndicesAdminClient) indicesClient, new PutIndexTemplateRequest());
}
public PutIndexTemplateRequestBuilder(IndicesAdminClient indicesClient, String name) {
super((InternalIndicesAdminClient) indicesClient, new PutIndexTemplateRequest(name));
}
/**
* Sets the template match expression that will be used to match on indices created.
*/
public PutIndexTemplateRequestBuilder setTemplate(String template) {
request.template(template);
return this;
}
/**
* Sets the order of this template if more than one template matches.
*/
public PutIndexTemplateRequestBuilder setOrder(int order) {
request.order(order);
return this;
}
/**
* Set to <tt>true</tt> to force only creation, not an update of an index template. If it already
* exists, it will fail with an {@link org.elasticsearch.indices.IndexTemplateAlreadyExistsException}.
*/
public PutIndexTemplateRequestBuilder setCreate(boolean create) {
request.create(create);
return this;
}
/**
* The settings to created the index template with.
*/
public PutIndexTemplateRequestBuilder setSettings(Settings settings) {
request.settings(settings);
return this;
}
/**
* The settings to created the index template with.
*/
public PutIndexTemplateRequestBuilder setSettings(Settings.Builder settings) {
request.settings(settings);
return this;
}
/**
* The settings to crete the index template with (either json/yaml/properties format)
*/
public PutIndexTemplateRequestBuilder setSettings(String source) {
request.settings(source);
return this;
}
/**
* The settings to crete the index template with (either json/yaml/properties format)
*/
public PutIndexTemplateRequestBuilder setSettings(Map<String, Object> source) {
request.settings(source);
return this;
}
/**
* Adds mapping that will be added when the index template gets created.
*
* @param type The mapping type
* @param source The mapping source
*/
public PutIndexTemplateRequestBuilder addMapping(String type, String source) {
request.mapping(type, source);
return this;
}
/**
* The cause for this index template creation.
*/
public PutIndexTemplateRequestBuilder cause(String cause) {
request.cause(cause);
return this;
}
/**
* Adds mapping that will be added when the index template gets created.
*
* @param type The mapping type
* @param source The mapping source
*/
public PutIndexTemplateRequestBuilder addMapping(String type, XContentBuilder source) {
request.mapping(type, source);
return this;
}
/**
* Adds mapping that will be added when the index gets created.
*
* @param type The mapping type
* @param source The mapping source
*/
public PutIndexTemplateRequestBuilder addMapping(String type, Map<String, Object> source) {
request.mapping(type, source);
return this;
}
/**
* The template source definition.
*/
public PutIndexTemplateRequestBuilder setSource(XContentBuilder templateBuilder) {
request.source(templateBuilder);
return this;
}
/**
* The template source definition.
*/
public PutIndexTemplateRequestBuilder setSource(Map templateSource) {
request.source(templateSource);
return this;
}
/**
* The template source definition.
*/
public PutIndexTemplateRequestBuilder setSource(String templateSource) {
request.source(templateSource);
return this;
}
/**
* The template source definition.
*/
public PutIndexTemplateRequestBuilder setSource(BytesReference templateSource) {
request.source(templateSource);
return this;
}
/**
* The template source definition.
*/
public PutIndexTemplateRequestBuilder setSource(byte[] templateSource) {
request.source(templateSource);
return this;
}
/**
* The template source definition.
*/
public PutIndexTemplateRequestBuilder setSource(byte[] templateSource, int offset, int length) {
request.source(templateSource, offset, length);
return this;
}
@Override
protected void doExecute(ActionListener<PutIndexTemplateResponse> listener) {
((IndicesAdminClient) client).putTemplate(request, listener);
}
} | 0true
| src_main_java_org_elasticsearch_action_admin_indices_template_put_PutIndexTemplateRequestBuilder.java |
111 | public class TestDeadlockDetection
{
@Test
public void testDeadlockDetection() throws Exception
{
ResourceObject r1 = newResourceObject( "R1" );
ResourceObject r2 = newResourceObject( "R2" );
ResourceObject r3 = newResourceObject( "R3" );
ResourceObject r4 = newResourceObject( "R4" );
PlaceboTm tm = new PlaceboTm( null, null );
LockManager lm = new LockManagerImpl( new RagManager() );
tm.setLockManager( lm );
LockWorker t1 = new LockWorker( "T1", lm );
LockWorker t2 = new LockWorker( "T2", lm );
LockWorker t3 = new LockWorker( "T3", lm );
LockWorker t4 = new LockWorker( "T4", lm );
try
{
t1.getReadLock( r1, true );
t1.getReadLock( r4, true );
t2.getReadLock( r2, true );
t2.getReadLock( r3, true );
t3.getReadLock( r3, true );
t3.getWriteLock( r1, false ); // t3-r1-t1 // T3
t2.getWriteLock( r4, false ); // t2-r4-t1
t1.getWriteLock( r2, true );
assertTrue( t1.isLastGetLockDeadLock() ); // t1-r2-t2-r4-t1
// resolve and try one more time
t1.releaseReadLock( r4 ); // will give r4 to t2
t1.getWriteLock( r2, false );
// t1-r2-t2
t2.releaseReadLock( r2 ); // will give r2 to t1
t1.getWriteLock( r4, false ); // t1-r4-t2 // T1
// dead lock
t2.getWriteLock( r2, true ); // T2
assertTrue( t2.isLastGetLockDeadLock() );
// t2-r2-t3-r1-t1-r4-t2 or t2-r2-t1-r4-t2
t2.releaseWriteLock( r4 ); // give r4 to t1
t1.releaseWriteLock( r4 );
t2.getReadLock( r4, true );
t1.releaseWriteLock( r2 );
t1.getReadLock( r2, true );
t1.releaseReadLock( r1 ); // give r1 to t3
t3.getReadLock( r2, true );
t3.releaseWriteLock( r1 );
t1.getReadLock( r1, true ); // give r1->t1
t1.getWriteLock( r4, false );
t3.getWriteLock( r1, false );
t4.getReadLock( r2, true );
// deadlock
t2.getWriteLock( r2, true );
assertTrue( t2.isLastGetLockDeadLock() );
// t2-r2-t3-r1-t1-r4-t2
// resolve
t2.releaseReadLock( r4 );
t1.releaseWriteLock( r4 );
t1.releaseReadLock( r1 );
t2.getReadLock( r4, true ); // give r1 to t3
t3.releaseWriteLock( r1 );
t1.getReadLock( r1, true ); // give r1 to t1
t1.getWriteLock( r4, false );
t3.releaseReadLock( r2 );
t3.getWriteLock( r1, false );
// cleanup
t2.releaseReadLock( r4 ); // give r4 to t1
t1.releaseWriteLock( r4 );
t1.releaseReadLock( r1 ); // give r1 to t3
t3.releaseWriteLock( r1 );
t1.releaseReadLock( r2 );
t4.releaseReadLock( r2 );
t2.releaseReadLock( r3 );
t3.releaseReadLock( r3 );
// -- special case...
t1.getReadLock( r1, true );
t2.getReadLock( r1, true );
t1.getWriteLock( r1, false ); // t1->r1-t1&t2
t2.getWriteLock( r1, true );
assertTrue( t2.isLastGetLockDeadLock() );
// t2->r1->t1->r1->t2
t2.releaseReadLock( r1 );
t1.releaseReadLock( r1 );
t1.releaseWriteLock( r1 );
}
catch ( Exception e )
{
File file = new LockWorkFailureDump( getClass() ).dumpState( lm, new LockWorker[] { t1, t2, t3, t4 } );
throw new RuntimeException( "Failed, forensics information dumped to " + file.getAbsolutePath(), e );
}
}
public static class StressThread extends Thread
{
private static final Object READ = new Object();
private static final Object WRITE = new Object();
private static ResourceObject resources[] = new ResourceObject[10];
private final Random rand = new Random( currentTimeMillis() );
static
{
for ( int i = 0; i < resources.length; i++ )
resources[i] = new ResourceObject( "RX" + i );
}
private final CountDownLatch startSignal;
private final String name;
private final int numberOfIterations;
private final int depthCount;
private final float readWriteRatio;
private final LockManager lm;
private volatile Exception error;
private final Transaction tx = mock( Transaction.class );
public volatile Long startedWaiting = null;
StressThread( String name, int numberOfIterations, int depthCount,
float readWriteRatio, LockManager lm, CountDownLatch startSignal )
{
super();
this.name = name;
this.numberOfIterations = numberOfIterations;
this.depthCount = depthCount;
this.readWriteRatio = readWriteRatio;
this.lm = lm;
this.startSignal = startSignal;
}
@Override
public void run()
{
try
{
startSignal.await();
java.util.Stack<Object> lockStack = new java.util.Stack<Object>();
java.util.Stack<ResourceObject> resourceStack = new java.util.Stack<ResourceObject>();
for ( int i = 0; i < numberOfIterations; i++ )
{
try
{
int depth = depthCount;
do
{
float f = rand.nextFloat();
int n = rand.nextInt( resources.length );
if ( f < readWriteRatio )
{
startedWaiting = currentTimeMillis();
lm.getReadLock( resources[n], tx );
startedWaiting = null;
lockStack.push( READ );
}
else
{
startedWaiting = currentTimeMillis();
lm.getWriteLock( resources[n], tx );
startedWaiting = null;
lockStack.push( WRITE );
}
resourceStack.push( resources[n] );
}
while ( --depth > 0 );
}
catch ( DeadlockDetectedException e )
{
// This is good
}
finally
{
releaseAllLocks( lockStack, resourceStack );
}
}
}
catch ( Exception e )
{
error = e;
}
}
private void releaseAllLocks( Stack<Object> lockStack, Stack<ResourceObject> resourceStack )
{
while ( !lockStack.isEmpty() )
{
if ( lockStack.pop() == READ )
{
lm.releaseReadLock( resourceStack.pop(), tx );
}
else
{
lm.releaseWriteLock( resourceStack.pop(), tx );
}
}
}
@Override
public String toString()
{
return this.name;
}
}
@Test
public void testStressMultipleThreads() throws Exception
{
/*
This test starts a bunch of threads, and randomly takes read or write locks on random resources.
No thread should wait more than five seconds for a lock - if it does, we consider it a failure.
Successful outcomes are when threads either finish with all their lock taking and releasing, or
are terminated with a DeadlockDetectedException.
*/
for ( int i = 0; i < StressThread.resources.length; i++ )
{
StressThread.resources[i] = new ResourceObject( "RX" + i );
}
StressThread stressThreads[] = new StressThread[50];
PlaceboTm tm = new PlaceboTm( null, null );
LockManager lm = new LockManagerImpl( new RagManager() );
tm.setLockManager( lm );
CountDownLatch startSignal = new CountDownLatch( 1 );
for ( int i = 0; i < stressThreads.length; i++ )
{
int numberOfIterations = 100;
int depthCount = 10;
float readWriteRatio = 0.80f;
stressThreads[i] = new StressThread( "T" + i, numberOfIterations, depthCount, readWriteRatio, lm,
startSignal );
}
for ( Thread thread : stressThreads )
{
thread.start();
}
startSignal.countDown();
while ( anyAliveAndAllWell( stressThreads ) )
{
throwErrorsIfAny( stressThreads );
sleepALittle();
}
}
private String diagnostics( StressThread culprit, StressThread[] stressThreads, long waited )
{
StringBuilder builder = new StringBuilder();
for ( StressThread stressThread : stressThreads )
{
if ( stressThread.isAlive() )
{
if ( stressThread == culprit )
{
builder.append( "This is the thread that waited too long. It waited: " ).append( waited ).append(
" milliseconds" );
}
for ( StackTraceElement element : stressThread.getStackTrace() )
{
builder.append( element.toString() ).append( "\n" );
}
}
builder.append( "\n" );
}
return builder.toString();
}
private void throwErrorsIfAny( StressThread[] stressThreads ) throws Exception
{
for ( StressThread stressThread : stressThreads )
{
if ( stressThread.error != null )
{
throw stressThread.error;
}
}
}
private void sleepALittle()
{
try
{
Thread.sleep( 1000 );
}
catch ( InterruptedException e )
{
Thread.interrupted();
}
}
private boolean anyAliveAndAllWell( StressThread[] stressThreads )
{
for ( StressThread stressThread : stressThreads )
{
if ( stressThread.isAlive() )
{
Long startedWaiting = stressThread.startedWaiting;
if ( startedWaiting != null )
{
long waitingTime = currentTimeMillis() - startedWaiting;
if ( waitingTime > 5000 )
{
fail( "One of the threads waited far too long. Diagnostics: \n" +
diagnostics( stressThread, stressThreads, waitingTime) );
}
}
return true;
}
}
return false;
}
} | 0true
| community_kernel_src_test_java_org_neo4j_kernel_impl_transaction_TestDeadlockDetection.java |
8 | @Component("blProductCustomPersistenceHandler")
public class ProductCustomPersistenceHandler extends CustomPersistenceHandlerAdapter {
@Resource(name = "blCatalogService")
protected CatalogService catalogService;
private static final Log LOG = LogFactory.getLog(ProductCustomPersistenceHandler.class);
@Override
public Boolean canHandleAdd(PersistencePackage persistencePackage) {
String ceilingEntityFullyQualifiedClassname = persistencePackage.getCeilingEntityFullyQualifiedClassname();
String[] customCriteria = persistencePackage.getCustomCriteria();
return !ArrayUtils.isEmpty(customCriteria) && "productDirectEdit".equals(customCriteria[0]) && Product.class.getName().equals(ceilingEntityFullyQualifiedClassname);
}
@Override
public Boolean canHandleUpdate(PersistencePackage persistencePackage) {
return canHandleAdd(persistencePackage);
}
@Override
public Entity add(PersistencePackage persistencePackage, DynamicEntityDao dynamicEntityDao, RecordHelper helper) throws ServiceException {
Entity entity = persistencePackage.getEntity();
try {
PersistencePerspective persistencePerspective = persistencePackage.getPersistencePerspective();
Product adminInstance = (Product) Class.forName(entity.getType()[0]).newInstance();
Map<String, FieldMetadata> adminProperties = helper.getSimpleMergedProperties(Product.class.getName(), persistencePerspective);
if (adminInstance instanceof ProductBundle) {
removeBundleFieldRestrictions((ProductBundle)adminInstance, adminProperties, entity);
}
adminInstance = (Product) helper.createPopulatedInstance(adminInstance, entity, adminProperties, false);
adminInstance = (Product) dynamicEntityDao.merge(adminInstance);
CategoryProductXref categoryXref = new CategoryProductXrefImpl();
categoryXref.setCategory(adminInstance.getDefaultCategory());
categoryXref.setProduct(adminInstance);
if (adminInstance.getDefaultCategory() != null && !adminInstance.getAllParentCategoryXrefs().contains(categoryXref)) {
categoryXref = (CategoryProductXref) dynamicEntityDao.merge(categoryXref);
adminInstance.getAllParentCategoryXrefs().add(categoryXref);
}
//Since none of the Sku fields are required, it's possible that the user did not fill out
//any Sku fields, and thus a Sku would not be created. Product still needs a default Sku so instantiate one
if (adminInstance.getDefaultSku() == null) {
Sku newSku = catalogService.createSku();
adminInstance.setDefaultSku(newSku);
adminInstance = (Product) dynamicEntityDao.merge(adminInstance);
}
//also set the default product for the Sku
adminInstance.getDefaultSku().setDefaultProduct(adminInstance);
dynamicEntityDao.merge(adminInstance.getDefaultSku());
return helper.getRecord(adminProperties, adminInstance, null, null);
} catch (Exception e) {
throw new ServiceException("Unable to add entity for " + entity.getType()[0], e);
}
}
@Override
public Entity update(PersistencePackage persistencePackage, DynamicEntityDao dynamicEntityDao, RecordHelper helper) throws ServiceException {
Entity entity = persistencePackage.getEntity();
try {
PersistencePerspective persistencePerspective = persistencePackage.getPersistencePerspective();
Map<String, FieldMetadata> adminProperties = helper.getSimpleMergedProperties(Product.class.getName(), persistencePerspective);
Object primaryKey = helper.getPrimaryKey(entity, adminProperties);
Product adminInstance = (Product) dynamicEntityDao.retrieve(Class.forName(entity.getType()[0]), primaryKey);
if (adminInstance instanceof ProductBundle) {
removeBundleFieldRestrictions((ProductBundle)adminInstance, adminProperties, entity);
}
adminInstance = (Product) helper.createPopulatedInstance(adminInstance, entity, adminProperties, false);
adminInstance = (Product) dynamicEntityDao.merge(adminInstance);
CategoryProductXref categoryXref = new CategoryProductXrefImpl();
categoryXref.setCategory(adminInstance.getDefaultCategory());
categoryXref.setProduct(adminInstance);
if (adminInstance.getDefaultCategory() != null && !adminInstance.getAllParentCategoryXrefs().contains(categoryXref)) {
adminInstance.getAllParentCategoryXrefs().add(categoryXref);
}
return helper.getRecord(adminProperties, adminInstance, null, null);
} catch (Exception e) {
throw new ServiceException("Unable to update entity for " + entity.getType()[0], e);
}
}
/**
* If the pricing model is of type item_sum, that property should not be required
* @param adminInstance
* @param adminProperties
* @param entity
*/
protected void removeBundleFieldRestrictions(ProductBundle adminInstance, Map<String, FieldMetadata> adminProperties, Entity entity) {
//no required validation for product bundles
if (entity.getPMap().get("pricingModel") != null) {
if (ProductBundlePricingModelType.ITEM_SUM.getType().equals(entity.getPMap().get("pricingModel").getValue())) {
((BasicFieldMetadata)adminProperties.get("defaultSku.retailPrice")).setRequiredOverride(false);
}
}
}
} | 1no label
| admin_broadleaf-admin-module_src_main_java_org_broadleafcommerce_admin_server_service_handler_ProductCustomPersistenceHandler.java |
343 | @RunWith(HazelcastParallelClassRunner.class)
@Category(QuickTest.class)
public class ClientMapTryLockConcurrentTests {
static HazelcastInstance client;
static HazelcastInstance server;
@BeforeClass
public static void init() {
server = Hazelcast.newHazelcastInstance();
client = HazelcastClient.newHazelcastClient();
}
@AfterClass
public static void destroy() {
HazelcastClient.shutdownAll();
Hazelcast.shutdownAll();
}
@Test
public void concurrent_MapTryLockTest() throws InterruptedException {
concurrent_MapTryLock(false);
}
@Test
public void concurrent_MapTryLockTimeOutTest() throws InterruptedException {
concurrent_MapTryLock(true);
}
private void concurrent_MapTryLock(boolean withTimeOut) throws InterruptedException {
final int maxThreads = 8;
final IMap<String, Integer> map = client.getMap(randomString());
final String upKey = "upKey";
final String downKey = "downKey";
map.put(upKey, 0);
map.put(downKey, 0);
Thread threads[] = new Thread[maxThreads];
for ( int i=0; i< threads.length; i++ ) {
Thread t;
if(withTimeOut){
t = new MapTryLockTimeOutThread(map, upKey, downKey);
}else{
t = new MapTryLockThread(map, upKey, downKey);
}
t.start();
threads[i] = t;
}
assertJoinable(threads);
int upTotal = map.get(upKey);
int downTotal = map.get(downKey);
assertTrue("concurrent access to locked code caused wrong total", upTotal + downTotal == 0);
}
static class MapTryLockThread extends TestHelper {
public MapTryLockThread(IMap map, String upKey, String downKey){
super(map, upKey, downKey);
}
public void doRun() throws Exception{
if(map.tryLock(upKey)){
try{
if(map.tryLock(downKey)){
try {
work();
}finally {
map.unlock(downKey);
}
}
}finally {
map.unlock(upKey);
}
}
}
}
static class MapTryLockTimeOutThread extends TestHelper {
public MapTryLockTimeOutThread(IMap map, String upKey, String downKey){
super(map, upKey, downKey);
}
public void doRun() throws Exception{
if(map.tryLock(upKey, 1, TimeUnit.MILLISECONDS)){
try{
if(map.tryLock(downKey, 1, TimeUnit.MILLISECONDS )){
try {
work();
}finally {
map.unlock(downKey);
}
}
}finally {
map.unlock(upKey);
}
}
}
}
static abstract class TestHelper extends Thread {
protected static final int ITERATIONS = 1000*10;
protected final Random random = new Random();
protected final IMap<String, Integer> map;
protected final String upKey;
protected final String downKey;
public TestHelper(IMap map, String upKey, String downKey){
this.map = map;
this.upKey = upKey;
this.downKey = downKey;
}
public void run() {
try{
for ( int i=0; i < ITERATIONS; i++ ) {
doRun();
}
}catch(Exception e){
throw new RuntimeException("Test Thread crashed with ", e);
}
}
abstract void doRun()throws Exception;
public void work(){
int upTotal = map.get(upKey);
int downTotal = map.get(downKey);
int dif = random.nextInt(1000);
upTotal += dif;
downTotal -= dif;
map.put(upKey, upTotal);
map.put(downKey, downTotal);
}
}
} | 0true
| hazelcast-client_src_test_java_com_hazelcast_client_map_ClientMapTryLockConcurrentTests.java |
500 | indexStateService.closeIndex(updateRequest, new ClusterStateUpdateListener() {
@Override
public void onResponse(ClusterStateUpdateResponse response) {
listener.onResponse(new CloseIndexResponse(response.isAcknowledged()));
}
@Override
public void onFailure(Throwable t) {
logger.debug("failed to close indices [{}]", t, request.indices());
listener.onFailure(t);
}
}); | 1no label
| src_main_java_org_elasticsearch_action_admin_indices_close_TransportCloseIndexAction.java |
475 | public class GetAliasesAction extends IndicesAction<GetAliasesRequest, GetAliasesResponse, GetAliasesRequestBuilder> {
public static final GetAliasesAction INSTANCE = new GetAliasesAction();
public static final String NAME = "indices/get/aliases";
private GetAliasesAction() {
super(NAME);
}
@Override
public GetAliasesRequestBuilder newRequestBuilder(IndicesAdminClient client) {
return new GetAliasesRequestBuilder(client);
}
@Override
public GetAliasesResponse newResponse() {
return new GetAliasesResponse();
}
} | 0true
| src_main_java_org_elasticsearch_action_admin_indices_alias_get_GetAliasesAction.java |
2,137 | public class Lucene {
public static final Version VERSION = Version.LUCENE_46;
public static final Version ANALYZER_VERSION = VERSION;
public static final Version QUERYPARSER_VERSION = VERSION;
public static final NamedAnalyzer STANDARD_ANALYZER = new NamedAnalyzer("_standard", AnalyzerScope.GLOBAL, new StandardAnalyzer(ANALYZER_VERSION));
public static final NamedAnalyzer KEYWORD_ANALYZER = new NamedAnalyzer("_keyword", AnalyzerScope.GLOBAL, new KeywordAnalyzer());
public static final int NO_DOC = -1;
public static ScoreDoc[] EMPTY_SCORE_DOCS = new ScoreDoc[0];
@SuppressWarnings("deprecation")
public static Version parseVersion(@Nullable String version, Version defaultVersion, ESLogger logger) {
if (version == null) {
return defaultVersion;
}
if ("4.6".equals(version)) {
return VERSION.LUCENE_46;
}
if ("4.5".equals(version)) {
return VERSION.LUCENE_45;
}
if ("4.4".equals(version)) {
return VERSION.LUCENE_44;
}
if ("4.3".equals(version)) {
return Version.LUCENE_43;
}
if ("4.2".equals(version)) {
return Version.LUCENE_42;
}
if ("4.1".equals(version)) {
return Version.LUCENE_41;
}
if ("4.0".equals(version)) {
return Version.LUCENE_40;
}
if ("3.6".equals(version)) {
return Version.LUCENE_36;
}
if ("3.5".equals(version)) {
return Version.LUCENE_35;
}
if ("3.4".equals(version)) {
return Version.LUCENE_34;
}
if ("3.3".equals(version)) {
return Version.LUCENE_33;
}
if ("3.2".equals(version)) {
return Version.LUCENE_32;
}
if ("3.1".equals(version)) {
return Version.LUCENE_31;
}
if ("3.0".equals(version)) {
return Version.LUCENE_30;
}
logger.warn("no version match {}, default to {}", version, defaultVersion);
return defaultVersion;
}
/**
* Reads the segments infos, failing if it fails to load
*/
public static SegmentInfos readSegmentInfos(Directory directory) throws IOException {
final SegmentInfos sis = new SegmentInfos();
sis.read(directory);
return sis;
}
public static long count(IndexSearcher searcher, Query query) throws IOException {
TotalHitCountCollector countCollector = new TotalHitCountCollector();
// we don't need scores, so wrap it in a constant score query
if (!(query instanceof ConstantScoreQuery)) {
query = new ConstantScoreQuery(query);
}
searcher.search(query, countCollector);
return countCollector.getTotalHits();
}
/**
* Closes the index writer, returning <tt>false</tt> if it failed to close.
*/
public static boolean safeClose(IndexWriter writer) {
if (writer == null) {
return true;
}
try {
writer.close();
return true;
} catch (Throwable e) {
return false;
}
}
public static TopDocs readTopDocs(StreamInput in) throws IOException {
if (!in.readBoolean()) {
// no docs
return null;
}
if (in.readBoolean()) {
int totalHits = in.readVInt();
float maxScore = in.readFloat();
SortField[] fields = new SortField[in.readVInt()];
for (int i = 0; i < fields.length; i++) {
String field = null;
if (in.readBoolean()) {
field = in.readString();
}
fields[i] = new SortField(field, readSortType(in), in.readBoolean());
}
FieldDoc[] fieldDocs = new FieldDoc[in.readVInt()];
for (int i = 0; i < fieldDocs.length; i++) {
Comparable[] cFields = new Comparable[in.readVInt()];
for (int j = 0; j < cFields.length; j++) {
byte type = in.readByte();
if (type == 0) {
cFields[j] = null;
} else if (type == 1) {
cFields[j] = in.readString();
} else if (type == 2) {
cFields[j] = in.readInt();
} else if (type == 3) {
cFields[j] = in.readLong();
} else if (type == 4) {
cFields[j] = in.readFloat();
} else if (type == 5) {
cFields[j] = in.readDouble();
} else if (type == 6) {
cFields[j] = in.readByte();
} else if (type == 7) {
cFields[j] = in.readShort();
} else if (type == 8) {
cFields[j] = in.readBoolean();
} else if (type == 9) {
cFields[j] = in.readBytesRef();
} else {
throw new IOException("Can't match type [" + type + "]");
}
}
fieldDocs[i] = new FieldDoc(in.readVInt(), in.readFloat(), cFields);
}
return new TopFieldDocs(totalHits, fieldDocs, fields, maxScore);
} else {
int totalHits = in.readVInt();
float maxScore = in.readFloat();
ScoreDoc[] scoreDocs = new ScoreDoc[in.readVInt()];
for (int i = 0; i < scoreDocs.length; i++) {
scoreDocs[i] = new ScoreDoc(in.readVInt(), in.readFloat());
}
return new TopDocs(totalHits, scoreDocs, maxScore);
}
}
public static void writeTopDocs(StreamOutput out, TopDocs topDocs, int from) throws IOException {
if (topDocs.scoreDocs.length - from < 0) {
out.writeBoolean(false);
return;
}
out.writeBoolean(true);
if (topDocs instanceof TopFieldDocs) {
out.writeBoolean(true);
TopFieldDocs topFieldDocs = (TopFieldDocs) topDocs;
out.writeVInt(topDocs.totalHits);
out.writeFloat(topDocs.getMaxScore());
out.writeVInt(topFieldDocs.fields.length);
for (SortField sortField : topFieldDocs.fields) {
if (sortField.getField() == null) {
out.writeBoolean(false);
} else {
out.writeBoolean(true);
out.writeString(sortField.getField());
}
if (sortField.getComparatorSource() != null) {
writeSortType(out, ((IndexFieldData.XFieldComparatorSource) sortField.getComparatorSource()).reducedType());
} else {
writeSortType(out, sortField.getType());
}
out.writeBoolean(sortField.getReverse());
}
out.writeVInt(topDocs.scoreDocs.length - from);
int index = 0;
for (ScoreDoc doc : topFieldDocs.scoreDocs) {
if (index++ < from) {
continue;
}
FieldDoc fieldDoc = (FieldDoc) doc;
out.writeVInt(fieldDoc.fields.length);
for (Object field : fieldDoc.fields) {
if (field == null) {
out.writeByte((byte) 0);
} else {
Class type = field.getClass();
if (type == String.class) {
out.writeByte((byte) 1);
out.writeString((String) field);
} else if (type == Integer.class) {
out.writeByte((byte) 2);
out.writeInt((Integer) field);
} else if (type == Long.class) {
out.writeByte((byte) 3);
out.writeLong((Long) field);
} else if (type == Float.class) {
out.writeByte((byte) 4);
out.writeFloat((Float) field);
} else if (type == Double.class) {
out.writeByte((byte) 5);
out.writeDouble((Double) field);
} else if (type == Byte.class) {
out.writeByte((byte) 6);
out.writeByte((Byte) field);
} else if (type == Short.class) {
out.writeByte((byte) 7);
out.writeShort((Short) field);
} else if (type == Boolean.class) {
out.writeByte((byte) 8);
out.writeBoolean((Boolean) field);
} else if (type == BytesRef.class) {
out.writeByte((byte) 9);
out.writeBytesRef((BytesRef) field);
} else {
throw new IOException("Can't handle sort field value of type [" + type + "]");
}
}
}
out.writeVInt(doc.doc);
out.writeFloat(doc.score);
}
} else {
out.writeBoolean(false);
out.writeVInt(topDocs.totalHits);
out.writeFloat(topDocs.getMaxScore());
out.writeVInt(topDocs.scoreDocs.length - from);
int index = 0;
for (ScoreDoc doc : topDocs.scoreDocs) {
if (index++ < from) {
continue;
}
out.writeVInt(doc.doc);
out.writeFloat(doc.score);
}
}
}
// LUCENE 4 UPGRADE: We might want to maintain our own ordinal, instead of Lucene's ordinal
public static SortField.Type readSortType(StreamInput in) throws IOException {
return SortField.Type.values()[in.readVInt()];
}
public static void writeSortType(StreamOutput out, SortField.Type sortType) throws IOException {
out.writeVInt(sortType.ordinal());
}
public static Explanation readExplanation(StreamInput in) throws IOException {
float value = in.readFloat();
String description = in.readString();
Explanation explanation = new Explanation(value, description);
if (in.readBoolean()) {
int size = in.readVInt();
for (int i = 0; i < size; i++) {
explanation.addDetail(readExplanation(in));
}
}
return explanation;
}
public static void writeExplanation(StreamOutput out, Explanation explanation) throws IOException {
out.writeFloat(explanation.getValue());
out.writeString(explanation.getDescription());
Explanation[] subExplanations = explanation.getDetails();
if (subExplanations == null) {
out.writeBoolean(false);
} else {
out.writeBoolean(true);
out.writeVInt(subExplanations.length);
for (Explanation subExp : subExplanations) {
writeExplanation(out, subExp);
}
}
}
public static class ExistsCollector extends Collector {
private boolean exists;
public void reset() {
exists = false;
}
public boolean exists() {
return exists;
}
@Override
public void setScorer(Scorer scorer) throws IOException {
this.exists = false;
}
@Override
public void collect(int doc) throws IOException {
exists = true;
}
@Override
public void setNextReader(AtomicReaderContext context) throws IOException {
}
@Override
public boolean acceptsDocsOutOfOrder() {
return true;
}
}
private Lucene() {
}
public static final boolean indexExists(final Directory directory) throws IOException {
return DirectoryReader.indexExists(directory);
}
} | 1no label
| src_main_java_org_elasticsearch_common_lucene_Lucene.java |
248 | @Test
public class ODefaultCacheTest {
public void enabledAfterStartup() {
// Given cache created
// And not started
// And not enabled
OCache sut = newCache();
// When started
sut.startup();
// Then it should be enabled
assertTrue(sut.isEnabled());
}
public void disabledAfterShutdown() {
// Given running cache
OCache sut = runningCache();
// When started
sut.shutdown();
// Then it should be disabled
assertFalse(sut.isEnabled());
}
public void disablesOnlyIfWasEnabled() {
// Given enabled cache
OCache sut = enabledCache();
// When disabled more than once
boolean disableConfirmed = sut.disable();
boolean disableNotConfirmed = sut.disable();
// Then should return confirmation of switching from enabled to disabled state for first time
// And no confirmation on subsequent disables
assertTrue(disableConfirmed);
assertFalse(disableNotConfirmed);
}
public void enablesOnlyIfWasDisabled() {
// Given disabled cache
OCache sut = newCache();
// When enabled more than once
boolean enableConfirmed = sut.enable();
boolean enableNotConfirmed = sut.enable();
// Then should return confirmation of switching from disabled to enabled state for first time
// And no confirmation on subsequent enables
assertTrue(enableConfirmed);
assertFalse(enableNotConfirmed);
}
public void doesNothingWhileDisabled() {
// Given cache created
// And not started
// And not enabled
OCache sut = new ODefaultCache(null, 1);
// When any operation called on it
ODocument record = new ODocument();
ORID recordId = record.getIdentity();
sut.put(record);
ORecordInternal<?> recordGot = sut.get(recordId);
int cacheSizeAfterPut = sut.size();
ORecordInternal<?> recordRemoved = sut.remove(recordId);
int cacheSizeAfterRemove = sut.size();
// Then it has no effect on cache's state
assertEquals(sut.isEnabled(), false, "Cache should be disabled at creation");
assertEquals(recordGot, null, "Cache should return empty records while disabled");
assertEquals(recordRemoved, null, "Cache should return empty records while disabled");
assertEquals(cacheSizeAfterPut, 0, "Cache should ignore insert while disabled");
assertEquals(cacheSizeAfterRemove, cacheSizeAfterPut, "Cache should ignore remove while disabled");
}
public void hasZeroSizeAfterClear() {
// Given enabled non-empty cache
OCache sut = enabledNonEmptyCache();
// When cleared
sut.clear();
// Then size of cache should be zero
assertEquals(sut.size(), 0, "Cache was not cleaned up");
}
public void providesAccessToAllKeysInCache() {
// Given enabled non-empty cache
OCache sut = enabledNonEmptyCache();
// When asked for keys
Collection<ORID> keys = sut.keys();
// Then keys count should be same as size of cache
// And records available for keys
assertEquals(keys.size(), sut.size(), "Cache provided not all keys?");
for (ORID key : keys) {
assertNotNull(sut.get(key));
}
}
public void storesRecordsUsingTheirIdentity() {
// Given an enabled cache
OCache sut = enabledCache();
// When new record put into
ORecordId id = new ORecordId(1, OClusterPositionFactory.INSTANCE.valueOf(1));
ODocument record = new ODocument(id);
sut.put(record);
// Then it can be retrieved later by it's id
assertEquals(sut.get(id), record);
}
public void storesRecordsOnlyOnceForEveryIdentity() {
// Given an enabled cache
OCache sut = enabledCache();
final int initialSize = sut.size();
// When some records with same identity put in several times
ODocument first = new ODocument(new ORecordId(1, OClusterPositionFactory.INSTANCE.valueOf(1)));
ODocument last = new ODocument(new ORecordId(1, OClusterPositionFactory.INSTANCE.valueOf(1)));
sut.put(first);
sut.put(last);
// Then cache ends up storing only one item
assertEquals(sut.size(), initialSize + 1);
}
public void removesOnlyOnce() {
// Given an enabled cache with records in it
OCache sut = enabledCache();
ORecordId id = new ORecordId(1, OClusterPositionFactory.INSTANCE.valueOf(1));
ODocument record = new ODocument(id);
sut.put(record);
sut.remove(id);
// When removing already removed record
ORecordInternal<?> removedSecond = sut.remove(id);
// Then empty result returned
assertNull(removedSecond);
}
public void storesNoMoreElementsThanSpecifiedLimit() {
// Given an enabled cache
OCache sut = enabledCache();
// When stored more distinct elements than cache limit allows
for (int i = sut.limit() + 2; i > 0; i--)
sut.put(new ODocument(new ORecordId(i, OClusterPositionFactory.INSTANCE.valueOf(i))));
// Then size of cache should be exactly as it's limit
assertEquals(sut.size(), sut.limit(), "Cache doesn't meet limit requirements");
}
private ODefaultCache newCache() {
return new ODefaultCache(null, 5);
}
private OCache enabledCache() {
ODefaultCache cache = newCache();
cache.enable();
return cache;
}
private OCache enabledNonEmptyCache() {
OCache cache = enabledCache();
cache.put(new ODocument(new ORecordId(1, OClusterPositionFactory.INSTANCE.valueOf(1))));
cache.put(new ODocument(new ORecordId(2, OClusterPositionFactory.INSTANCE.valueOf(2))));
return cache;
}
private OCache runningCache() {
ODefaultCache cache = newCache();
cache.startup();
return cache;
}
} | 0true
| core_src_test_java_com_orientechnologies_orient_core_cache_ODefaultCacheTest.java |
1,322 | new SingleSourceUnitPackage(pkg, sourceUnitFullPath), moduleManager, CeylonBuilder.getProjectTypeChecker(project), tokens, originalProject) {
@Override
protected boolean reuseExistingDescriptorModels() {
return true;
}
}; | 1no label
| plugins_com.redhat.ceylon.eclipse.ui_src_com_redhat_ceylon_eclipse_core_model_JDTModule.java |
253 | public class OUnboundedWeakCache extends OAbstractMapCache<WeakHashMap<ORID, ORecordInternal<?>>> implements OCache {
public OUnboundedWeakCache() {
super(new WeakHashMap<ORID, ORecordInternal<?>>());
}
@Override
public int limit() {
return Integer.MAX_VALUE;
}
} | 1no label
| core_src_main_java_com_orientechnologies_orient_core_cache_OUnboundedWeakCache.java |
2,368 | public class ByteSizeValue implements Serializable, Streamable {
private long size;
private ByteSizeUnit sizeUnit;
private ByteSizeValue() {
}
public ByteSizeValue(long bytes) {
this(bytes, ByteSizeUnit.BYTES);
}
public ByteSizeValue(long size, ByteSizeUnit sizeUnit) {
this.size = size;
this.sizeUnit = sizeUnit;
}
public int bytesAsInt() throws ElasticsearchIllegalArgumentException {
long bytes = bytes();
if (bytes > Integer.MAX_VALUE) {
throw new ElasticsearchIllegalArgumentException("size [" + toString() + "] is bigger than max int");
}
return (int) bytes;
}
public long bytes() {
return sizeUnit.toBytes(size);
}
public long getBytes() {
return bytes();
}
public long kb() {
return sizeUnit.toKB(size);
}
public long getKb() {
return kb();
}
public long mb() {
return sizeUnit.toMB(size);
}
public long getMb() {
return mb();
}
public long gb() {
return sizeUnit.toGB(size);
}
public long getGb() {
return gb();
}
public long tb() {
return sizeUnit.toTB(size);
}
public long getTb() {
return tb();
}
public long pb() {
return sizeUnit.toPB(size);
}
public long getPb() {
return pb();
}
public double kbFrac() {
return ((double) bytes()) / ByteSizeUnit.C1;
}
public double getKbFrac() {
return kbFrac();
}
public double mbFrac() {
return ((double) bytes()) / ByteSizeUnit.C2;
}
public double getMbFrac() {
return mbFrac();
}
public double gbFrac() {
return ((double) bytes()) / ByteSizeUnit.C3;
}
public double getGbFrac() {
return gbFrac();
}
public double tbFrac() {
return ((double) bytes()) / ByteSizeUnit.C4;
}
public double getTbFrac() {
return tbFrac();
}
public double pbFrac() {
return ((double) bytes()) / ByteSizeUnit.C5;
}
public double getPbFrac() {
return pbFrac();
}
@Override
public String toString() {
long bytes = bytes();
double value = bytes;
String suffix = "b";
if (bytes >= ByteSizeUnit.C5) {
value = pbFrac();
suffix = "pb";
} else if (bytes >= ByteSizeUnit.C4) {
value = tbFrac();
suffix = "tb";
} else if (bytes >= ByteSizeUnit.C3) {
value = gbFrac();
suffix = "gb";
} else if (bytes >= ByteSizeUnit.C2) {
value = mbFrac();
suffix = "mb";
} else if (bytes >= ByteSizeUnit.C1) {
value = kbFrac();
suffix = "kb";
}
return Strings.format1Decimals(value, suffix);
}
public static ByteSizeValue parseBytesSizeValue(String sValue) throws ElasticsearchParseException {
return parseBytesSizeValue(sValue, null);
}
public static ByteSizeValue parseBytesSizeValue(String sValue, ByteSizeValue defaultValue) throws ElasticsearchParseException {
if (sValue == null) {
return defaultValue;
}
long bytes;
try {
String lastTwoChars = sValue.substring(sValue.length() - Math.min(2, sValue.length())).toLowerCase(Locale.ROOT);
if (lastTwoChars.endsWith("k")) {
bytes = (long) (Double.parseDouble(sValue.substring(0, sValue.length() - 1)) * ByteSizeUnit.C1);
} else if (lastTwoChars.endsWith("kb")) {
bytes = (long) (Double.parseDouble(sValue.substring(0, sValue.length() - 2)) * ByteSizeUnit.C1);
} else if (lastTwoChars.endsWith("m")) {
bytes = (long) (Double.parseDouble(sValue.substring(0, sValue.length() - 1)) * ByteSizeUnit.C2);
} else if (lastTwoChars.endsWith("mb")) {
bytes = (long) (Double.parseDouble(sValue.substring(0, sValue.length() - 2)) * ByteSizeUnit.C2);
} else if (lastTwoChars.endsWith("g")) {
bytes = (long) (Double.parseDouble(sValue.substring(0, sValue.length() - 1)) * ByteSizeUnit.C3);
} else if (lastTwoChars.endsWith("gb")) {
bytes = (long) (Double.parseDouble(sValue.substring(0, sValue.length() - 2)) * ByteSizeUnit.C3);
} else if (lastTwoChars.endsWith("t")) {
bytes = (long) (Double.parseDouble(sValue.substring(0, sValue.length() - 1)) * ByteSizeUnit.C4);
} else if (lastTwoChars.endsWith("tb")) {
bytes = (long) (Double.parseDouble(sValue.substring(0, sValue.length() - 2)) * ByteSizeUnit.C4);
} else if (lastTwoChars.endsWith("p")) {
bytes = (long) (Double.parseDouble(sValue.substring(0, sValue.length() - 1)) * ByteSizeUnit.C5);
} else if (lastTwoChars.endsWith("pb")) {
bytes = (long) (Double.parseDouble(sValue.substring(0, sValue.length() - 2)) * ByteSizeUnit.C5);
} else if (lastTwoChars.endsWith("b")) {
bytes = Long.parseLong(sValue.substring(0, sValue.length() - 1));
} else {
bytes = Long.parseLong(sValue);
}
} catch (NumberFormatException e) {
throw new ElasticsearchParseException("Failed to parse [" + sValue + "]", e);
}
return new ByteSizeValue(bytes, ByteSizeUnit.BYTES);
}
public static ByteSizeValue readBytesSizeValue(StreamInput in) throws IOException {
ByteSizeValue sizeValue = new ByteSizeValue();
sizeValue.readFrom(in);
return sizeValue;
}
@Override
public void readFrom(StreamInput in) throws IOException {
size = in.readVLong();
sizeUnit = ByteSizeUnit.BYTES;
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeVLong(bytes());
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
ByteSizeValue sizeValue = (ByteSizeValue) o;
if (size != sizeValue.size) return false;
if (sizeUnit != sizeValue.sizeUnit) return false;
return true;
}
@Override
public int hashCode() {
int result = (int) (size ^ (size >>> 32));
result = 31 * result + (sizeUnit != null ? sizeUnit.hashCode() : 0);
return result;
}
} | 1no label
| src_main_java_org_elasticsearch_common_unit_ByteSizeValue.java |
317 | public class NodesHotThreadsAction extends ClusterAction<NodesHotThreadsRequest, NodesHotThreadsResponse, NodesHotThreadsRequestBuilder> {
public static final NodesHotThreadsAction INSTANCE = new NodesHotThreadsAction();
public static final String NAME = "cluster/nodes/hot_threads";
private NodesHotThreadsAction() {
super(NAME);
}
@Override
public NodesHotThreadsResponse newResponse() {
return new NodesHotThreadsResponse();
}
@Override
public NodesHotThreadsRequestBuilder newRequestBuilder(ClusterAdminClient client) {
return new NodesHotThreadsRequestBuilder(client);
}
} | 0true
| src_main_java_org_elasticsearch_action_admin_cluster_node_hotthreads_NodesHotThreadsAction.java |
245 | assertTrueEventually(new AssertTask() {
public void run() throws Exception {
assertTrue(map.containsKey(member.getUuid()));
}
}); | 0true
| hazelcast-client_src_test_java_com_hazelcast_client_executor_ClientExecutorServiceSubmitTest.java |
30 | public class EmbeddedBlueprintsTest extends AbstractCassandraBlueprintsTest {
@Override
protected WriteConfiguration getGraphConfig() {
return CassandraStorageSetup.getEmbeddedGraphConfiguration(getClass().getSimpleName());
}
@Override
public void extraCleanUp(String uid) throws BackendException {
ModifiableConfiguration mc =
new ModifiableConfiguration(GraphDatabaseConfiguration.ROOT_NS, getGraphConfig(), Restriction.NONE);
StoreManager m = new CassandraEmbeddedStoreManager(mc);
m.clearStorage();
m.close();
}
} | 0true
| titan-cassandra_src_test_java_com_thinkaurelius_titan_blueprints_EmbeddedBlueprintsTest.java |
284 | @SuppressWarnings("unchecked")
public class OScriptDocumentDatabaseWrapper {
protected ODatabaseDocumentTx database;
public OScriptDocumentDatabaseWrapper(final ODatabaseDocumentTx database) {
this.database = database;
}
public OScriptDocumentDatabaseWrapper(final ODatabaseRecordTx database) {
this.database = new ODatabaseDocumentTx(database);
}
public OScriptDocumentDatabaseWrapper(final String iURL) {
this.database = new ODatabaseDocumentTx(iURL);
}
public void switchUser(final String iUserName, final String iUserPassword) {
if (!database.isClosed())
database.close();
database.open(iUserName, iUserPassword);
}
public OIdentifiable[] query(final String iText) {
return query(iText, (Object[]) null);
}
public OIdentifiable[] query(final String iText, final Object... iParameters) {
final List<OIdentifiable> res = database.query(new OSQLSynchQuery<Object>(iText), convertParameters(iParameters));
if (res == null)
return new OIdentifiable[] {};
return res.toArray(new OIdentifiable[res.size()]);
}
/**
* To maintain the compatibility with JS API.
*/
public Object executeCommand(final String iText) {
return command(iText, (Object[]) null);
}
/**
* To maintain the compatibility with JS API.
*/
public Object executeCommand(final String iText, final Object... iParameters) {
return command(iText, iParameters);
}
public Object command(final String iText) {
return command(iText, (Object[]) null);
}
public Object command(final String iText, final Object... iParameters) {
Object res = database.command(new OCommandSQL(iText)).execute(convertParameters(iParameters));
if (res instanceof List) {
final List<OIdentifiable> list = (List<OIdentifiable>) res;
return list.toArray(new OIdentifiable[list.size()]);
}
return res;
}
public Object process(final String iType, final String iName, final Object... iParameters) {
final OComposableProcessor process = (OComposableProcessor) OProcessorManager.getInstance().get(iType);
if (process == null)
throw new OProcessException("Process type '" + iType + "' is undefined");
final OBasicCommandContext context = new OBasicCommandContext();
if (iParameters != null) {
int argIdx = 0;
for (Object p : iParameters)
context.setVariable("arg" + (argIdx++), p);
}
Object res;
try {
res = process.processFromFile(iName, context, false);
} catch (Exception e) {
throw new OProcessException("Error on processing '" + iName + "' field of '" + getName() + "' block", e);
}
return res;
}
public OIndex<?> getIndex(final String iName) {
return database.getMetadata().getIndexManager().getIndex(iName);
}
public boolean exists() {
return database.exists();
}
public ODocument newInstance() {
return database.newInstance();
}
public void reload() {
database.reload();
}
public ODocument newInstance(String iClassName) {
return database.newInstance(iClassName);
}
public ORecordIteratorClass<ODocument> browseClass(String iClassName) {
return database.browseClass(iClassName);
}
public STATUS getStatus() {
return database.getStatus();
}
public ORecordIteratorClass<ODocument> browseClass(String iClassName, boolean iPolymorphic) {
return database.browseClass(iClassName, iPolymorphic);
}
public <THISDB extends ODatabase> THISDB setStatus(STATUS iStatus) {
return (THISDB) database.setStatus(iStatus);
}
public void drop() {
database.drop();
}
public String getName() {
return database.getName();
}
public int addCluster(String iType, String iClusterName, String iLocation, String iDataSegmentName, Object... iParameters) {
return database.addCluster(iType, iClusterName, iLocation, iDataSegmentName, iParameters);
}
public String getURL() {
return database.getURL();
}
public ORecordIteratorCluster<ODocument> browseCluster(String iClusterName) {
return database.browseCluster(iClusterName);
}
public boolean isClosed() {
return database.isClosed();
}
public <THISDB extends ODatabase> THISDB open(String iUserName, String iUserPassword) {
return (THISDB) database.open(iUserName, iUserPassword);
}
public ODocument save(final Map<String, Object> iObject) {
return database.save(new ODocument().fields(iObject));
}
public ODocument save(final String iString) {
// return database.save((ORecordInternal<?>) new ODocument().fromJSON(iString));
return database.save((ORecordInternal<?>) new ODocument().fromJSON(iString, true));
}
public ODocument save(ORecordInternal<?> iRecord) {
return database.save(iRecord);
}
public boolean dropCluster(String iClusterName, final boolean iTruncate) {
return database.dropCluster(iClusterName, iTruncate);
}
public <THISDB extends ODatabase> THISDB create() {
return (THISDB) database.create();
}
public boolean dropCluster(int iClusterId, final boolean iTruncate) {
return database.dropCluster(iClusterId, true);
}
public void close() {
database.close();
}
public int getClusters() {
return database.getClusters();
}
public Collection<String> getClusterNames() {
return database.getClusterNames();
}
public int addDataSegment(String iName, String iLocation) {
return database.addDataSegment(iName, iLocation);
}
public String getClusterType(String iClusterName) {
return database.getClusterType(iClusterName);
}
public OTransaction getTransaction() {
return database.getTransaction();
}
public int getDataSegmentIdByName(String iDataSegmentName) {
return database.getDataSegmentIdByName(iDataSegmentName);
}
public ODatabaseComplex<ORecordInternal<?>> begin() {
return database.begin();
}
public String getDataSegmentNameById(int iDataSegmentId) {
return database.getDataSegmentNameById(iDataSegmentId);
}
public int getClusterIdByName(String iClusterName) {
return database.getClusterIdByName(iClusterName);
}
public boolean isMVCC() {
return database.isMVCC();
}
public String getClusterNameById(int iClusterId) {
return database.getClusterNameById(iClusterId);
}
public <RET extends ODatabaseComplex<?>> RET setMVCC(boolean iValue) {
return (RET) database.setMVCC(iValue);
}
public long getClusterRecordSizeById(int iClusterId) {
return database.getClusterRecordSizeById(iClusterId);
}
public boolean isValidationEnabled() {
return database.isValidationEnabled();
}
public long getClusterRecordSizeByName(String iClusterName) {
return database.getClusterRecordSizeByName(iClusterName);
}
public <RET extends ODatabaseRecord> RET setValidationEnabled(boolean iValue) {
return (RET) database.setValidationEnabled(iValue);
}
public OUser getUser() {
return database.getUser();
}
public void setUser(OUser user) {
database.setUser(user);
}
public ODocument save(ORecordInternal<?> iRecord, OPERATION_MODE iMode, boolean iForceCreate,
final ORecordCallback<? extends Number> iRecordCreatedCallback, ORecordCallback<ORecordVersion> iRecordUpdatedCallback) {
return database.save(iRecord, iMode, iForceCreate, iRecordCreatedCallback, iRecordUpdatedCallback);
}
public OMetadata getMetadata() {
return database.getMetadata();
}
public ODictionary<ORecordInternal<?>> getDictionary() {
return database.getDictionary();
}
public byte getRecordType() {
return database.getRecordType();
}
public ODatabaseComplex<ORecordInternal<?>> delete(ORID iRid) {
return database.delete(iRid);
}
public boolean dropDataSegment(String name) {
return database.dropDataSegment(name);
}
public <RET extends ORecordInternal<?>> RET load(ORID iRecordId) {
return (RET) database.load(iRecordId);
}
public <RET extends ORecordInternal<?>> RET load(ORID iRecordId, String iFetchPlan) {
return (RET) database.load(iRecordId, iFetchPlan);
}
public <RET extends ORecordInternal<?>> RET load(ORID iRecordId, String iFetchPlan, boolean iIgnoreCache) {
return (RET) database.load(iRecordId, iFetchPlan, iIgnoreCache);
}
public <RET extends ORecordInternal<?>> RET getRecord(OIdentifiable iIdentifiable) {
return (RET) database.getRecord(iIdentifiable);
}
public int getDefaultClusterId() {
return database.getDefaultClusterId();
}
public <RET extends ORecordInternal<?>> RET load(ORecordInternal<?> iRecord) {
return (RET) database.load(iRecord);
}
public boolean declareIntent(OIntent iIntent) {
return database.declareIntent(iIntent);
}
public <RET extends ORecordInternal<?>> RET load(ORecordInternal<?> iRecord, String iFetchPlan) {
return (RET) database.load(iRecord, iFetchPlan);
}
public <RET extends ORecordInternal<?>> RET load(ORecordInternal<?> iRecord, String iFetchPlan, boolean iIgnoreCache) {
return (RET) database.load(iRecord, iFetchPlan, iIgnoreCache);
}
public ODatabaseComplex<?> setDatabaseOwner(ODatabaseComplex<?> iOwner) {
return database.setDatabaseOwner(iOwner);
}
public void reload(ORecordInternal<?> iRecord) {
database.reload(iRecord);
}
public void reload(ORecordInternal<?> iRecord, String iFetchPlan, boolean iIgnoreCache) {
database.reload(iRecord, iFetchPlan, iIgnoreCache);
}
public Object setProperty(String iName, Object iValue) {
return database.setProperty(iName, iValue);
}
public ODocument save(ORecordInternal<?> iRecord, String iClusterName) {
return database.save(iRecord, iClusterName);
}
public Object getProperty(String iName) {
return database.getProperty(iName);
}
public Iterator<Entry<String, Object>> getProperties() {
return database.getProperties();
}
public Object get(ATTRIBUTES iAttribute) {
return database.get(iAttribute);
}
public <THISDB extends ODatabase> THISDB set(ATTRIBUTES attribute, Object iValue) {
return (THISDB) database.set(attribute, iValue);
}
public void setInternal(ATTRIBUTES attribute, Object iValue) {
database.setInternal(attribute, iValue);
}
public boolean isRetainRecords() {
return database.isRetainRecords();
}
public ODatabaseRecord setRetainRecords(boolean iValue) {
return database.setRetainRecords(iValue);
}
public long getSize() {
return database.getSize();
}
public ORecordInternal<?> getRecordByUserObject(Object iUserObject, boolean iCreateIfNotAvailable) {
return database.getRecordByUserObject(iUserObject, iCreateIfNotAvailable);
}
public ODocument save(ORecordInternal<?> iRecord, String iClusterName, OPERATION_MODE iMode, boolean iForceCreate,
final ORecordCallback<? extends Number> iRecordCreatedCallback, ORecordCallback<ORecordVersion> iRecordUpdatedCallback) {
return database.save(iRecord, iClusterName, iMode, iForceCreate, iRecordCreatedCallback, iRecordUpdatedCallback);
}
public ODataSegmentStrategy getDataSegmentStrategy() {
return database.getDataSegmentStrategy();
}
public void setDataSegmentStrategy(ODataSegmentStrategy dataSegmentStrategy) {
database.setDataSegmentStrategy(dataSegmentStrategy);
}
public ODatabaseDocumentTx delete(ODocument iRecord) {
return database.delete(iRecord);
}
public long countClass(String iClassName) {
return database.countClass(iClassName);
}
public ODatabaseComplex<ORecordInternal<?>> commit() {
return database.commit();
}
public ODatabaseComplex<ORecordInternal<?>> rollback() {
return database.rollback();
}
public String getType() {
return database.getType();
}
protected Object[] convertParameters(final Object[] iParameters) {
if (iParameters != null)
for (int i = 0; i < iParameters.length; ++i) {
final Object p = iParameters[i];
if (p != null) {
// if (p instanceof sun.org.mozilla.javascript.internal.IdScriptableObject) {
// iParameters[i] = ((sun.org.mozilla.javascript.internal.NativeDate) p).to;
// }
}
}
return iParameters;
}
} | 0true
| core_src_main_java_com_orientechnologies_orient_core_command_script_OScriptDocumentDatabaseWrapper.java |
2,571 | clusterService.submitStateUpdateTask("zen-disco-join (elected_as_master)", Priority.URGENT, new ProcessedClusterStateUpdateTask() {
@Override
public ClusterState execute(ClusterState currentState) {
DiscoveryNodes.Builder builder = new DiscoveryNodes.Builder()
.localNodeId(localNode.id())
.masterNodeId(localNode.id())
// put our local node
.put(localNode);
// update the fact that we are the master...
latestDiscoNodes = builder.build();
ClusterBlocks clusterBlocks = ClusterBlocks.builder().blocks(currentState.blocks()).removeGlobalBlock(NO_MASTER_BLOCK).build();
return ClusterState.builder(currentState).nodes(latestDiscoNodes).blocks(clusterBlocks).build();
}
@Override
public void onFailure(String source, Throwable t) {
logger.error("unexpected failure during [{}]", t, source);
}
@Override
public void clusterStateProcessed(String source, ClusterState oldState, ClusterState newState) {
sendInitialStateEventIfNeeded();
}
}); | 1no label
| src_main_java_org_elasticsearch_discovery_zen_ZenDiscovery.java |
275 | public class JMSEmailServiceProducerImpl implements JMSEmailServiceProducer {
private JmsTemplate emailServiceTemplate;
private Destination emailServiceDestination;
public void send(@SuppressWarnings("rawtypes") final HashMap props) {
emailServiceTemplate.send(emailServiceDestination, new MessageCreator() {
public Message createMessage(Session session) throws JMSException {
ObjectMessage message = session.createObjectMessage(props);
EmailInfo info = (EmailInfo) props.get(EmailPropertyType.INFO.getType());
message.setJMSPriority(Integer.parseInt(info.getSendAsyncPriority()));
return message;
}
});
}
/**
* @return the emailServiceTemplate
*/
public JmsTemplate getEmailServiceTemplate() {
return emailServiceTemplate;
}
/**
* @param emailServiceTemplate the emailServiceTemplate to set
*/
public void setEmailServiceTemplate(JmsTemplate emailServiceTemplate) {
this.emailServiceTemplate = emailServiceTemplate;
}
/**
* @return the emailServiceDestination
*/
public Destination getEmailServiceDestination() {
return emailServiceDestination;
}
/**
* @param emailServiceDestination the emailServiceDestination to set
*/
public void setEmailServiceDestination(Destination emailServiceDestination) {
this.emailServiceDestination = emailServiceDestination;
}
} | 1no label
| common_src_main_java_org_broadleafcommerce_common_email_service_jms_JMSEmailServiceProducerImpl.java |
1,654 | md.accept(new MetadataVisitor() {
@Override
public void visit(BasicFieldMetadata fmd) {
request.setType(Type.STANDARD);
request.setCeilingEntityClassname(fmd.getForeignKeyClass());
}
@Override
public void visit(BasicCollectionMetadata fmd) {
ForeignKey foreignKey = (ForeignKey) fmd.getPersistencePerspective()
.getPersistencePerspectiveItems().get(PersistencePerspectiveItemType.FOREIGNKEY);
request.setType(Type.STANDARD);
request.setCeilingEntityClassname(fmd.getCollectionCeilingEntity());
request.setOperationTypesOverride(fmd.getPersistencePerspective().getOperationTypes());
request.setForeignKey(foreignKey);
}
@Override
public void visit(AdornedTargetCollectionMetadata fmd) {
AdornedTargetList adornedList = (AdornedTargetList) fmd.getPersistencePerspective()
.getPersistencePerspectiveItems().get(PersistencePerspectiveItemType.ADORNEDTARGETLIST);
request.setType(Type.ADORNED);
request.setCeilingEntityClassname(fmd.getCollectionCeilingEntity());
request.setOperationTypesOverride(fmd.getPersistencePerspective().getOperationTypes());
request.setAdornedList(adornedList);
}
@Override
public void visit(MapMetadata fmd) {
MapStructure mapStructure = (MapStructure) fmd.getPersistencePerspective()
.getPersistencePerspectiveItems().get(PersistencePerspectiveItemType.MAPSTRUCTURE);
ForeignKey foreignKey = (ForeignKey) fmd.getPersistencePerspective().
getPersistencePerspectiveItems().get(PersistencePerspectiveItemType.FOREIGNKEY);
request.setType(Type.MAP);
request.setCeilingEntityClassname(foreignKey.getForeignKeyClass());
request.setOperationTypesOverride(fmd.getPersistencePerspective().getOperationTypes());
request.setMapStructure(mapStructure);
request.setForeignKey(foreignKey);
}
}); | 1no label
| admin_broadleaf-open-admin-platform_src_main_java_org_broadleafcommerce_openadmin_server_domain_PersistencePackageRequest.java |
559 | private static final ToXContent.Params includeDefaultsParams = new ToXContent.Params() {
final static String INCLUDE_DEFAULTS = "include_defaults";
@Override
public String param(String key) {
if (INCLUDE_DEFAULTS.equals(key)) {
return "true";
}
return null;
}
@Override
public String param(String key, String defaultValue) {
if (INCLUDE_DEFAULTS.equals(key)) {
return "true";
}
return defaultValue;
}
@Override
public boolean paramAsBoolean(String key, boolean defaultValue) {
if (INCLUDE_DEFAULTS.equals(key)) {
return true;
}
return defaultValue;
}
public Boolean paramAsBoolean(String key, Boolean defaultValue) {
if (INCLUDE_DEFAULTS.equals(key)) {
return true;
}
return defaultValue;
}
@Override @Deprecated
public Boolean paramAsBooleanOptional(String key, Boolean defaultValue) {
return paramAsBoolean(key, defaultValue);
}
}; | 1no label
| src_main_java_org_elasticsearch_action_admin_indices_mapping_get_TransportGetFieldMappingsAction.java |
46 | public class OCaseInsentiveComparator implements Comparator<String> {
public int compare(final String stringOne, final String stringTwo) {
return stringOne.compareToIgnoreCase(stringTwo);
}
} | 0true
| commons_src_main_java_com_orientechnologies_common_comparator_OCaseInsentiveComparator.java |
1,353 | tokenStream.getTokens()) {
@Override
protected boolean reuseExistingDescriptorModels() {
return true;
}
}; | 1no label
| plugins_com.redhat.ceylon.eclipse.ui_src_com_redhat_ceylon_eclipse_core_model_ProjectSourceFile.java |
262 | @Inherited
@Retention(RetentionPolicy.RUNTIME)
@Target(ElementType.TYPE)
@TestGroup(enabled = true, sysProperty = SYSPROP_INTEGRATION)
public @interface IntegrationTests {
} | 0true
| src_test_java_org_apache_lucene_util_AbstractRandomizedTest.java |
86 | public class GloballySharedInputStream extends InputStream {
private InputStream parentInputStream;
public GloballySharedInputStream(InputStream parentInputStream) {
this.parentInputStream = parentInputStream;
}
public int available() throws IOException {
return parentInputStream.available();
}
public void close() throws IOException {
parentInputStream.close();
}
public void mark(int arg0) {
parentInputStream.mark(arg0);
}
public boolean markSupported() {
return parentInputStream.markSupported();
}
public int read() throws IOException {
return parentInputStream.read();
}
public int read(byte[] arg0, int arg1, int arg2) throws IOException {
return parentInputStream.read(arg0, arg1, arg2);
}
public int read(byte[] arg0) throws IOException {
return parentInputStream.read(arg0);
}
public void reset() throws IOException {
parentInputStream.reset();
}
public long skip(long arg0) throws IOException {
return parentInputStream.skip(arg0);
}
} | 0true
| admin_broadleaf-contentmanagement-module_src_main_java_org_broadleafcommerce_cms_file_service_GloballySharedInputStream.java |
371 | future.andThen(new ExecutionCallback<Integer>() {
@Override
public void onResponse(Integer response) {
try {
result[0] = response.intValue();
} finally {
semaphore.release();
}
}
@Override
public void onFailure(Throwable t) {
semaphore.release();
}
}); | 0true
| hazelcast-client_src_test_java_com_hazelcast_client_mapreduce_DistributedMapperClientMapReduceTest.java |
End of preview. Expand
in Dataset Viewer.
- Downloads last month
- 53