buggy_function
stringlengths
1
391k
fixed_function
stringlengths
0
392k
public static synchronized void printException(String where, Exception e) { if (e instanceof SQLException) { SQLException se = (SQLException) e; if (se.getSQLState() != null) { // SQLSTATE is NULL for a if (se.getSQLState().equals("40001")) System.out.println("deadlocked detected"); if (se.getSQLS...
public static synchronized void printException(String where, Exception e) { if (e instanceof SQLException) { SQLException se = (SQLException) e; if (se.getSQLState() != null) { // SQLSTATE is NULL for a if (se.getSQLState().equals("40001")) System.out.println("deadlocked detected"); if (se.getSQLS...
public MonotonicAppendingLongBuffer(int initialPageCount, int pageSize) { super(initialPageCount, pageSize); averages = new float[pageSize]; }
public MonotonicAppendingLongBuffer(int initialPageCount, int pageSize) { super(initialPageCount, pageSize); averages = new float[initialPageCount]; }
public void testBuild() throws IOException { final String LF = System.getProperty("line.separator"); String input = "oneword" + LF + "twoword" + LF + "threeword"; PlainTextDictionary ptd = new PlainTextDictionary(new StringReader(input)); Directory ramDir = newDirectory(); SpellChecker spellChecke...
public void testBuild() throws IOException { final String LF = System.getProperty("line.separator"); String input = "oneword" + LF + "twoword" + LF + "threeword"; PlainTextDictionary ptd = new PlainTextDictionary(new StringReader(input)); Directory ramDir = newDirectory(); SpellChecker spellChecke...
public void testExtendedResultsCount() throws Exception { assertJQ(req("qt",rh, SpellCheckComponent.COMPONENT_NAME, "true", SpellCheckComponent.SPELLCHECK_BUILD, "true", "q","bluo", SpellCheckComponent.SPELLCHECK_COUNT,"5", SpellCheckComponent.SPELLCHECK_EXTENDED_RESULTS,"false") ,"/spellcheck/suggestions/...
public void testExtendedResultsCount() throws Exception { assertJQ(req("qt",rh, SpellCheckComponent.COMPONENT_NAME, "true", SpellCheckComponent.SPELLCHECK_BUILD, "true", "q","bluo", SpellCheckComponent.SPELLCHECK_COUNT,"5", SpellCheckComponent.SPELLCHECK_EXTENDED_RESULTS,"false") ,"/spellcheck/suggestions/...
public String[] listAll() { ensureOpen(); String[] res; if (writer != null) { res = writer.listAll(); } else { res = entries.keySet().toArray(new String[entries.size()]); // Add the segment name String seg = fileName.substring(0, fileName.indexOf('.')); for (int i = 0; i...
public String[] listAll() { ensureOpen(); String[] res; if (writer != null) { res = writer.listAll(); } else { res = entries.keySet().toArray(new String[entries.size()]); // Add the segment name String seg = IndexFileNames.parseSegmentName(fileName); for (int i = 0; i < ...
public static void prepareClass() throws Exception { LOCAL = FBUtilities.getLocalAddress(); tablename = "Keyspace4"; StorageService.instance.initServer(); // generate a fake endpoint for which we can spoof receiving/sending trees REMOTE = InetAddress.getByName("127.0.0.2"...
public static void prepareClass() throws Exception { LOCAL = FBUtilities.getLocalAddress(); tablename = "Keyspace5"; StorageService.instance.initServer(); // generate a fake endpoint for which we can spoof receiving/sending trees REMOTE = InetAddress.getByName("127.0.0.2"...
public TestOrdValues(String name) { super(name); }
public TestOrdValues(String name) { super(name, false); }
public StorageService() { MBeanServer mbs = ManagementFactory.getPlatformMBeanServer(); try { mbs.registerMBean(this, new ObjectName("org.apache.cassandra.service:type=StorageService")); } catch (Exception e) { throw new RuntimeException(e)...
public StorageService() { MBeanServer mbs = ManagementFactory.getPlatformMBeanServer(); try { mbs.registerMBean(this, new ObjectName("org.apache.cassandra.service:type=StorageService")); } catch (Exception e) { throw new RuntimeException(e)...
public void testSearch() throws Exception { Query query = QueryParser.parse("test", "contents", analyzer); Hits hits = searcher.search(query); assertEquals("Find document(s)", 2, hits.length()); }
public void testSearch() throws Exception { Query query = new QueryParser("contents",analyzer).parse("test"); Hits hits = searcher.search(query); assertEquals("Find document(s)", 2, hits.length()); }
public void setScorer(Scorer scorer) { super.setScorer(scorer); // TODO: might be cleaner to lazy-init 'source' and set scorer after? assert readerContext != null; try { Map<String,Object> context = new HashMap<String,Object>(); assert scorer != null; context.put("scorer", new ScoreF...
public void setScorer(Scorer scorer) { super.setScorer(scorer); // TODO: might be cleaner to lazy-init 'source' and set scorer after? assert readerContext != null; try { Map<String,Object> context = new HashMap<String,Object>(); assert scorer != null; context.put("scorer", scorer); ...
public ConcurrentUpdateSolrServer(String solrServerUrl, HttpClient client, int queueSize, int threadCount) { this(solrServerUrl, null, queueSize, threadCount, Executors.newCachedThreadPool( new SolrjNamedThreadFactory("concurrentUpdateScheduler"))); shutdownExecutor = true; }
public ConcurrentUpdateSolrServer(String solrServerUrl, HttpClient client, int queueSize, int threadCount) { this(solrServerUrl, client, queueSize, threadCount, Executors.newCachedThreadPool( new SolrjNamedThreadFactory("concurrentUpdateScheduler"))); shutdownExecutor = true; }
public Token getBootstrapToken() { Range range = getLocalPrimaryRange(); List<DecoratedKey> keys = new ArrayList<DecoratedKey>(); for (ColumnFamilyStore cfs : ColumnFamilyStore.all()) { for (IndexSummary.KeyPosition info: cfs.allIndexPositions()) { ...
public Token getBootstrapToken() { Range range = getLocalPrimaryRange(); List<DecoratedKey> keys = new ArrayList<DecoratedKey>(); for (ColumnFamilyStore cfs : ColumnFamilyStore.all()) { for (IndexSummary.KeyPosition info: cfs.allIndexPositions()) { ...
public void setText(CharacterIterator newText) { start = newText.getBeginIndex(); end = newText.getEndIndex(); text = newText; current = newText.getIndex(); }
public void setText(CharacterIterator newText) { start = newText.getBeginIndex(); end = newText.getEndIndex(); text = newText; current = start; }
public void testUpdateDelteSlices() { DocumentsWriterDeleteQueue queue = new DocumentsWriterDeleteQueue(); final int size = 200 + random().nextInt(500) * RANDOM_MULTIPLIER; Integer[] ids = new Integer[size]; for (int i = 0; i < ids.length; i++) { ids[i] = random().nextInt(); } DeleteSlic...
public void testUpdateDelteSlices() { DocumentsWriterDeleteQueue queue = new DocumentsWriterDeleteQueue(); final int size = 200 + random().nextInt(500) * RANDOM_MULTIPLIER; Integer[] ids = new Integer[size]; for (int i = 0; i < ids.length; i++) { ids[i] = random().nextInt(); } DeleteSlic...
public String toString() { return getFilename() + "/" + StringUtils.join(sections, ",") + "\n\t progress=" + progress + "/" + size + " - " + progress*100/size + "%"; }
public String toString() { return getFilename() + " sections=" + sections.size() + " progress=" + progress + "/" + size + " - " + progress*100/size + "%"; }
public int run(String[] args) throws Exception { /** Option seqOpt = obuilder.withLongName("seqFile").withRequired(false).withArgument( abuilder.withName("seqFile").withMinimum(1).withMaximum(1).create()).withDescription( "The Sequence File containing the Vectors").withShortName("s").create(); ...
public int run(String[] args) throws Exception { /** Option seqOpt = obuilder.withLongName("seqFile").withRequired(false).withArgument( abuilder.withName("seqFile").withMinimum(1).withMaximum(1).create()).withDescription( "The Sequence File containing the Vectors").withShortName("s").create(); ...
private void initParents(IndexReader reader, int first) throws IOException { if (reader.maxDoc() == first) { return; } // it's ok to use MultiFields because we only iterate on one posting list. // breaking it to loop over the leaves() only complicates code for no // apparent gain. D...
private void initParents(IndexReader reader, int first) throws IOException { if (reader.maxDoc() == first) { return; } // it's ok to use MultiFields because we only iterate on one posting list. // breaking it to loop over the leaves() only complicates code for no // apparent gain. D...
private int getConnFromDatabaseName() throws DRDAProtocolException { Properties p = new Properties(); databaseAccessException = null; //if we haven't got the correlation token yet, use session number for drdaID if (session.drdaID == null) session.drdaID = leftBrace + session.connNum + rightBrace; p.put(A...
private int getConnFromDatabaseName() throws DRDAProtocolException { Properties p = new Properties(); databaseAccessException = null; //if we haven't got the correlation token yet, use session number for drdaID if (session.drdaID == null) session.drdaID = leftBrace + session.connNum + rightBrace; p.put(A...
private void showFromZooKeeper(SolrQueryRequest req, SolrQueryResponse rsp, CoreContainer coreContainer) throws KeeperException, InterruptedException, UnsupportedEncodingException { SolrZkClient zkClient = coreContainer.getZkController().getZkClient(); String adminFile = getAdminFileFromZooKeepe...
private void showFromZooKeeper(SolrQueryRequest req, SolrQueryResponse rsp, CoreContainer coreContainer) throws KeeperException, InterruptedException, UnsupportedEncodingException { SolrZkClient zkClient = coreContainer.getZkController().getZkClient(); String adminFile = getAdminFileFromZooKeepe...
private void parseSQLDTA(DRDAStatement stmt) throws DRDAProtocolException,SQLException { try { parseSQLDTA_work(stmt); } catch (SQLException se) { skipRemainder(false); throw se; } }
private void parseSQLDTA(DRDAStatement stmt) throws DRDAProtocolException,SQLException { try { parseSQLDTA_work(stmt); } catch (SQLException se) { skipRemainder(true); throw se; } }
public int compare(ColumnFamilyStore o1, ColumnFamilyStore o2) { long size1 = o1.getTotalMemtableLiveSize(); long size2 = o2.getTotalMemtableLiveSize(); if (size1 < size2) return -1; i...
public int compare(ColumnFamilyStore o1, ColumnFamilyStore o2) { long size1 = o1.getTotalMemtableLiveSize(); long size2 = o2.getTotalMemtableLiveSize(); if (size1 < size2) return -1; i...
public void addSSTable(SSTableReader sstable) { ssTables_.add(sstable); CompactionManager.instance.submitMinor(this); }
public void addSSTable(SSTableReader sstable) { ssTables_.add(sstable); CompactionManager.instance.submitMinorIfNeeded(this); }
public void testCompactions() throws IOException, ExecutionException, InterruptedException { CompactionManager.instance.disableAutoCompaction(); // this test does enough rows to force multiple block indexes to be used Table table = Table.open(TABLE1); ColumnFamilyStore store = t...
public void testCompactions() throws IOException, ExecutionException, InterruptedException { CompactionManager.instance.disableAutoCompaction(); // this test does enough rows to force multiple block indexes to be used Table table = Table.open(TABLE1); ColumnFamilyStore store = t...
private final SimpleDocValuesFormat defaultDVFormat = SimpleDocValuesFormat.forName("Memory"); // nocommit need simpleNormsFormat }
private final SimpleDocValuesFormat defaultDVFormat = SimpleDocValuesFormat.forName("Lucene41"); // nocommit need simpleNormsFormat }
public List<String> getIncomingFiles(String host) throws IOException { List<String> files = new ArrayList<String>(); for (PendingFile pf : StreamInManager.getIncomingFiles(InetAddress.getByName(host))) { files.add(String.format("%s: %s", pf.getDescriptor().ksname, pf.toString...
public List<String> getIncomingFiles(String host) throws IOException { List<String> files = new ArrayList<String>(); for (PendingFile pf : StreamInManager.getIncomingFiles(InetAddress.getByName(host))) { files.add(String.format("%s: %s", pf.desc.ksname, pf.toString())); ...
public LinkedHashMap<PendingFile, PendingFile> getContextMapping(PendingFile[] remoteFiles) throws IOException { /* Create a local sstable for each remote sstable */ LinkedHashMap<PendingFile, PendingFile> mapping = new LinkedHashMap<PendingFile, PendingFile>(); for (PendingFile remote :...
public LinkedHashMap<PendingFile, PendingFile> getContextMapping(PendingFile[] remoteFiles) throws IOException { /* Create a local sstable for each remote sstable */ LinkedHashMap<PendingFile, PendingFile> mapping = new LinkedHashMap<PendingFile, PendingFile>(); for (PendingFile remote :...
public void geohashRecursiveRandom() throws IOException { init(12); //1. Iterate test with the cluster at some worldly point of interest Point[] clusterCenters = new Point[]{ctx.makePoint(-180,0), ctx.makePoint(0,90), ctx.makePoint(0,-90)}; for (Point clusterCenter : clusterCenters) { //2. Iter...
public void geohashRecursiveRandom() throws IOException { init(12); //1. Iterate test with the cluster at some worldly point of interest Point[] clusterCenters = new Point[]{ctx.makePoint(-180,0), ctx.makePoint(0,90), ctx.makePoint(0,-90)}; for (Point clusterCenter : clusterCenters) { //2. Iter...
public CoreContainer initialize() throws IOException, ParserConfigurationException, SAXException { CoreContainer cores = null; String instanceDir = SolrResourceLoader.locateInstanceDir(); File fconf = new File(instanceDir, solrConfigFilename == null? "solr.xml": solrConfigFilename); log.info...
public CoreContainer initialize() throws IOException, ParserConfigurationException, SAXException { CoreContainer cores = null; String instanceDir = SolrResourceLoader.locateInstanceDir(); File fconf = new File(instanceDir, solrConfigFilename == null? "solr.xml": solrConfigFilename); log.info...
public static HashFunction[] createHashFunctions(HashType type, int numFunctions) { HashFunction[] hashFunction = new HashFunction[numFunctions]; Random seed = new Random(11); switch (type) { case LINEAR: for (int i = 0; i < numFunctions; i++) { hashFunction[i] = new LinearHash(see...
public static HashFunction[] createHashFunctions(HashType type, int numFunctions) { HashFunction[] hashFunction = new HashFunction[numFunctions]; Random seed = RandomUtils.getRandom(11); switch (type) { case LINEAR: for (int i = 0; i < numFunctions; i++) { hashFunction[i] = new Lin...
public List<TokenRange> describe_ring(String keyspace)throws InvalidRequestException { if (!DatabaseDescriptor.getNonSystemTables().contains(keyspace)) throw new InvalidRequestException("There is no ring for the keyspace: " + keyspace); List<TokenRange> ranges = new ArrayList<TokenRa...
public List<TokenRange> describe_ring(String keyspace)throws InvalidRequestException { if (keyspace == null || !DatabaseDescriptor.getNonSystemTables().contains(keyspace)) throw new InvalidRequestException("There is no ring for the keyspace: " + keyspace); List<TokenRange> ranges = n...
public void testSortedBytes() throws IOException { DocValuesType type = DocValuesType.SORTED; final Directory d = newDirectory(); IndexWriterConfig cfg = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())); IndexWriter w = new IndexWriter(d, cfg); int numDocs...
public void testSortedBytes() throws IOException { DocValuesType type = DocValuesType.SORTED; final Directory d = newDirectory(); IndexWriterConfig cfg = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())); IndexWriter w = new IndexWriter(d, cfg); int numDocs...
public void testAddDocument() throws Exception { Document testDoc = new Document(); DocHelper.setupDoc(testDoc); IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()))); writer.addDocument(testDoc); writer.commit(); SegmentInfoPerCommit...
public void testAddDocument() throws Exception { Document testDoc = new Document(); DocHelper.setupDoc(testDoc); IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()))); writer.addDocument(testDoc); writer.commit(); SegmentInfoPerCommit...
public void testFloatNorms() throws IOException { Directory dir = newDirectory(); IndexWriterConfig config = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())); Similarity provider = new MySimProvider(); config.setSimilarity(provider); RandomIndexWriter writer = new Ra...
public void testFloatNorms() throws IOException { Directory dir = newDirectory(); IndexWriterConfig config = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())); Similarity provider = new MySimProvider(); config.setSimilarity(provider); RandomIndexWriter writer = new Ra...
public void test() throws Exception { NumericDocValues fooNorms = MultiSimpleDocValues.simpleNormValues(reader, "foo"); assertNotNull(fooNorms); for (int i = 0; i < reader.maxDoc(); i++) { assertEquals(expected.get(i).longValue(), fooNorms.get(i)); } }
public void test() throws Exception { NumericDocValues fooNorms = MultiDocValues.getNormValues(reader, "foo"); assertNotNull(fooNorms); for (int i = 0; i < reader.maxDoc(); i++) { assertEquals(expected.get(i).longValue(), fooNorms.get(i)); } }
public void test() throws Exception { NumericDocValues fooNorms = MultiSimpleDocValues.simpleNormValues(reader, "foo"); for (int i = 0; i < reader.maxDoc(); i++) { assertEquals(expected.get(i).intValue(), fooNorms.get(i) & 0xff); } }
public void test() throws Exception { NumericDocValues fooNorms = MultiDocValues.getNormValues(reader, "foo"); for (int i = 0; i < reader.maxDoc(); i++) { assertEquals(expected.get(i).intValue(), fooNorms.get(i) & 0xff); } }
public void test1PhraseShortMV() throws Exception { makeIndexShortMV(); FieldQuery fq = new FieldQuery( tq( "d" ), true, true ); FieldTermStack stack = new FieldTermStack( reader, 0, F, fq ); FieldPhraseList fpl = new FieldPhraseList( stack, fq ); SimpleFragListBuilder sflb = new SimpleFragListBu...
public void test1PhraseShortMV() throws Exception { makeIndexShortMV(); FieldQuery fq = new FieldQuery( tq( "d" ), true, true ); FieldTermStack stack = new FieldTermStack( reader, 0, F, fq ); FieldPhraseList fpl = new FieldPhraseList( stack, fq ); SimpleFragListBuilder sflb = new SimpleFragListBu...
public void test1PhraseShortMV() throws Exception { makeIndexShortMV(); FieldQuery fq = new FieldQuery( tq( "d" ), true, true ); FieldTermStack stack = new FieldTermStack( reader, 0, F, fq ); assertEquals( 1, stack.termList.size() ); assertEquals( "d(6,7,3)", stack.pop().toString() ); }
public void test1PhraseShortMV() throws Exception { makeIndexShortMV(); FieldQuery fq = new FieldQuery( tq( "d" ), true, true ); FieldTermStack stack = new FieldTermStack( reader, 0, F, fq ); assertEquals( 1, stack.termList.size() ); assertEquals( "d(9,10,3)", stack.pop().toString() ); }
public void test1PhraseShortMV() throws Exception { makeIndexShortMV(); FieldQuery fq = new FieldQuery( tq( "d" ), true, true ); FieldTermStack stack = new FieldTermStack( reader, 0, F, fq ); FieldPhraseList fpl = new FieldPhraseList( stack, fq ); assertEquals( 1, fpl.phraseList.size() ); ass...
public void test1PhraseShortMV() throws Exception { makeIndexShortMV(); FieldQuery fq = new FieldQuery( tq( "d" ), true, true ); FieldTermStack stack = new FieldTermStack( reader, 0, F, fq ); FieldPhraseList fpl = new FieldPhraseList( stack, fq ); assertEquals( 1, fpl.phraseList.size() ); ass...
private void unCache(String fileName) throws IOException { // Only let one thread uncache at a time; this only // happens during commit() or close(): synchronized(uncacheLock) { if (VERBOSE) { System.out.println("nrtdir.unCache name=" + fileName); } if (!cache.fileExists(fileName...
private void unCache(String fileName) throws IOException { // Only let one thread uncache at a time; this only // happens during commit() or close(): synchronized(uncacheLock) { if (VERBOSE) { System.out.println("nrtdir.unCache name=" + fileName); } if (!cache.fileExists(fileName...
public void testDerby3000() throws SQLException, IOException { ResultSet rs; // Derby-3000 make sure we process only valid TableType values and // process them correctly. DatabaseMetaData dmd = getConnection().getMetaData(); Statement s = createStatement(); s.executeUpdate("CREATE TABLE APP.TAB (i int)")...
public void testDerby3000() throws SQLException, IOException { ResultSet rs; // Derby-3000 make sure we process only valid TableType values and // process them correctly. DatabaseMetaData dmd = getConnection().getMetaData(); Statement s = createStatement(); s.executeUpdate("CREATE TABLE APP.TAB (i int)")...
public void testClobCreateLocatorSP() throws SQLException { //initialize the locator to a default value. int locator = -1; //call the stored procedure to return the created locator. CallableStatement cs = prepareCall ("? = CALL SYSIBM.CLOBCREATELOCATOR()"); cs.re...
public void testClobCreateLocatorSP() throws SQLException { //initialize the locator to a default value. int locator = -1; //call the stored procedure to return the created locator. CallableStatement cs = prepareCall ("? = CALL SYSIBM.CLOBCREATELOCATOR()"); cs.re...
public void testFragmentCreation() throws Exception { Bundle exportBundle = makeBundleWithExports("export.bundle", "1.2.3", "export.package;version=\"1.0.0\";singleton:=true"); Dictionary fragmentHeaders = makeFragmentFromExportBundle(exportBundle) .getHeaders(); ...
public void testFragmentCreation() throws Exception { Bundle exportBundle = makeBundleWithExports("export.bundle", "1.2.3", "export.package;version=\"1.0.0\";uses:=\"foo.jar,bar.jar\";singleton:=true"); Dictionary fragmentHeaders = makeFragmentFromExportBundle(exportBundle) ...
public static String docValuesId(String segmentsName, int fieldId) { return segmentsName + "-" + fieldId; }
public static String docValuesId(String segmentsName, int fieldId) { return segmentsName + "_" + fieldId; }
private boolean[] expandBooleanArray(boolean[] array, int newLength) { if (array == null) { boolean[] newArray = new boolean[newLength]; return newArray; } if (array.length < newLength) { boolean[] newArray = new boolean[newLength]; System.arra...
private boolean[] expandBooleanArray(boolean[] array, int newLength) { if (array == null) { boolean[] newArray = new boolean[newLength]; return newArray; } if (array.length < newLength) { boolean[] newArray = new boolean[newLength]; System.arra...
public void distribCommit(CommitUpdateCommand cmd, List<Node> nodes, ModifiableSolrParams params) throws IOException { // we need to do any retries before commit... servers.blockUntilFinished(); doRetriesIfNeeded(); UpdateRequest uReq = new UpdateRequest(); uReq.setParams(params); ...
public void distribCommit(CommitUpdateCommand cmd, List<Node> nodes, ModifiableSolrParams params) throws IOException { // we need to do any retries before commit... servers.blockUntilFinished(); doRetriesIfNeeded(); UpdateRequest uReq = new UpdateRequest(); uReq.setParams(params); ...
public static void validateKeyspaceNotYetExisting(String newKsName) throws InvalidRequestException { // keyspace names must be unique case-insensitively because the keyspace name becomes the directory // where we store CF sstables. Names that differ only in case would thus cause problems on ...
public static void validateKeyspaceNotYetExisting(String newKsName) throws InvalidRequestException { // keyspace names must be unique case-insensitively because the keyspace name becomes the directory // where we store CF sstables. Names that differ only in case would thus cause problems on ...
public void testTriggersWithClobColumn() throws Exception { insertDefaultData(); Statement stmt = createStatement(); stmt.executeUpdate( "CREATE TABLE testClobTriggerA (a CLOB(400k), b int)"); stmt.executeUpdate( "CREATE TABLE testClobTriggerB (a CLOB...
public void testTriggersWithClobColumn() throws Exception { insertDefaultData(); Statement stmt = createStatement(); stmt.executeUpdate( "CREATE TABLE testClobTriggerA (a CLOB(400k), b int)"); stmt.executeUpdate( "CREATE TABLE testClobTriggerB (a CLOB...
public static Test suite() { String testName = "InterruptResilienceTest"; if (! isSunJVM()) { // DERBY-4463 test fails on IBM VM 1.5. // It's fixed in IBM VM 1.6 SR9 and above. // Remove this condition when that issue is solved in IBM VM 1.5 SR13. ...
public static Test suite() { String testName = "InterruptResilienceTest"; if (isIBMJVM()) { // DERBY-4463 test fails on IBM VM 1.5. // It's fixed in IBM VM 1.6 SR9 and above. // Remove this condition when that issue is solved in IBM VM 1.5 SR13. i...
public static Test suite() { if (! isSunJVM()) { // DERBY-4463 test fails on IBM VM 1.5. // It's fixed in IBM VM 1.6 SR9 and above. // Remove this condition when that issue is solved in IBM VM 1.5 SR13. if (getSystemProperty("java.version").startsWith...
public static Test suite() { if (isIBMJVM()) { // DERBY-4463 test fails on IBM VM 1.5. // It's fixed in IBM VM 1.6 SR9 and above. // Remove this condition when that issue is solved in IBM VM 1.5 SR13. if (getSystemProperty("java.version").startsWith("...
public void map(LongWritable userID, VectorWritable vectorWritable, OutputCollector<LongWritable, RecommendedItemsWritable> output, Reporter reporter) throws IOException { if (usersToRecommendFor != null && !usersToRecommendFor.contains(userID.get())) { ...
public void map(LongWritable userID, VectorWritable vectorWritable, OutputCollector<LongWritable, RecommendedItemsWritable> output, Reporter reporter) throws IOException { if (usersToRecommendFor != null && !usersToRecommendFor.contains(userID.get())) { ...
public int docID() { return docIt >= upto ? NO_MORE_DOCS : docs[docIt]; }
public int docID() { return docIt < 0 ? -1 : docIt >= upto ? NO_MORE_DOCS : docs[docIt]; }
private boolean mergeClosestClusters(int numUsers, List<FastIDSet> clusters, boolean done) throws TasteException { // We find a certain number of closest clusters... List<ClusterClusterPair> queue = findClosestClusters(numUsers, clusters); // The first one is definitely the closest pair in existence ...
private boolean mergeClosestClusters(int numUsers, List<FastIDSet> clusters, boolean done) throws TasteException { // We find a certain number of closest clusters... List<ClusterClusterPair> queue = findClosestClusters(numUsers, clusters); // The first one is definitely the closest pair in existence ...
public static long getTotalBytes(Iterable<SSTableReader> sstables) { long sum = 0; for (SSTableReader sstable : sstables) { sum += sstable.length(); } return sum; }
public static long getTotalBytes(Iterable<SSTableReader> sstables) { long sum = 0; for (SSTableReader sstable : sstables) { sum += sstable.onDiskLength(); } return sum; }
public CompressedSegmentedFile(String path, CompressionMetadata metadata) { super(path, metadata.dataLength); this.metadata = metadata; }
public CompressedSegmentedFile(String path, CompressionMetadata metadata) { super(path, metadata.dataLength, metadata.compressedFileLength); this.metadata = metadata; }
private static List<Pair<SSTableReader, Long>> createSSTableAndLengthPairs(Collection<SSTableReader> collection) { List<Pair<SSTableReader, Long>> tableLengthPairs = new ArrayList<Pair<SSTableReader, Long>>(); for(SSTableReader table: collection) tableLengthPairs.add(new Pair<SSTable...
private static List<Pair<SSTableReader, Long>> createSSTableAndLengthPairs(Collection<SSTableReader> collection) { List<Pair<SSTableReader, Long>> tableLengthPairs = new ArrayList<Pair<SSTableReader, Long>>(); for(SSTableReader table: collection) tableLengthPairs.add(new Pair<SSTable...
public final void maybeRefreshBlocking() throws IOException, InterruptedException { ensureOpen(); // Ensure only 1 thread does reopen at once refreshLock.lock(); try { doMaybeRefresh(); } finally { refreshLock.lock(); } }
public final void maybeRefreshBlocking() throws IOException, InterruptedException { ensureOpen(); // Ensure only 1 thread does reopen at once refreshLock.lock(); try { doMaybeRefresh(); } finally { refreshLock.unlock(); } }
public Sorter newSorter(Entry[] arr) { return new ArrayTimSorter<Entry>(arr, ArrayUtil.<Entry>naturalComparator(), random().nextInt(arr.length)); }
public Sorter newSorter(Entry[] arr) { return new ArrayTimSorter<Entry>(arr, ArrayUtil.<Entry>naturalComparator(), _TestUtil.nextInt(random(), 0, arr.length)); }
protected synchronized int addCategoryDocument(CategoryPath categoryPath, int length, int parent) throws CorruptIndexException, IOException { // Before Lucene 2.9, position increments >=0 were supported, so we // added 1 to parent to allow the parent -1 ...
protected synchronized int addCategoryDocument(CategoryPath categoryPath, int length, int parent) throws CorruptIndexException, IOException { // Before Lucene 2.9, position increments >=0 were supported, so we // added 1 to parent to allow the parent -1 ...
public void testPerFieldCodec() throws Exception { final int NUM_DOCS = atLeast(173); if (VERBOSE) { System.out.println("TEST: NUM_DOCS=" + NUM_DOCS); } MockDirectoryWrapper dir = newDirectory(); dir.setCheckIndexOnClose(false); // we use a custom codec provider IndexWriter w = new...
public void testPerFieldCodec() throws Exception { final int NUM_DOCS = atLeast(173); if (VERBOSE) { System.out.println("TEST: NUM_DOCS=" + NUM_DOCS); } MockDirectoryWrapper dir = newDirectory(); dir.setCheckIndexOnClose(false); // we use a custom codec provider IndexWriter w = new...
public void setUp() throws Exception { super.setUp(); dir = newDirectory(); RandomIndexWriter writer = new RandomIndexWriter(random, dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)) .setMaxBufferedDocs(_TestUtil.nextInt(random, 50, 1000))); Document doc = ...
public void setUp() throws Exception { super.setUp(); dir = newDirectory(); RandomIndexWriter writer = new RandomIndexWriter(random, dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)) .setMaxBufferedDocs(_TestUtil.nextInt(random, 50, 1000))); Document doc = ...
public void setUp() throws Exception { super.setUp(); dir = newDirectory(); RandomIndexWriter writer = new RandomIndexWriter(random, dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)) .setMaxBufferedDocs(_TestUtil.nextInt(random, 50, 1000))); Document doc = ...
public void setUp() throws Exception { super.setUp(); dir = newDirectory(); RandomIndexWriter writer = new RandomIndexWriter(random, dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)) .setMaxBufferedDocs(_TestUtil.nextInt(random, 50, 1000))); Document doc = ...
public void setUp() throws Exception { super.setUp(); dir = newDirectory(); RandomIndexWriter writer = new RandomIndexWriter(random, dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random, MockTokenizer.KEYWORD, false)) .setMaxBufferedDocs(_TestUtil.nextInt(random, 50, 10...
public void setUp() throws Exception { super.setUp(); dir = newDirectory(); RandomIndexWriter writer = new RandomIndexWriter(random, dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random, MockTokenizer.KEYWORD, false)) .setMaxBufferedDocs(_TestUtil.nextInt(random, 50, 10...
public void assertFromTestData(int codePointTable[]) throws Exception { if (VERBOSE) { System.out.println("TEST: codePointTable=" + codePointTable); } InputStream stream = getClass().getResourceAsStream("fuzzyTestData.txt"); BufferedReader reader = new BufferedReader(new InputStreamReader(stream...
public void assertFromTestData(int codePointTable[]) throws Exception { if (VERBOSE) { System.out.println("TEST: codePointTable=" + codePointTable); } InputStream stream = getClass().getResourceAsStream("fuzzyTestData.txt"); BufferedReader reader = new BufferedReader(new InputStreamReader(stream...
public void testRandomPhrases() throws Exception { Directory dir = newDirectory(); Analyzer analyzer = new MockAnalyzer(random); RandomIndexWriter w = new RandomIndexWriter(random, dir, newIndexWriterConfig(TEST_VERSION_CURRENT, analyzer).setMergePolicy(newLogMergePolicy())); List<List<String>> docs...
public void testRandomPhrases() throws Exception { Directory dir = newDirectory(); Analyzer analyzer = new MockAnalyzer(random); RandomIndexWriter w = new RandomIndexWriter(random, dir, newIndexWriterConfig(TEST_VERSION_CURRENT, analyzer).setMergePolicy(newLogMergePolicy())); List<List<String>> docs...
public void setUp() throws Exception { super.setUp(); dir = newDirectory(); fieldName = random.nextBoolean() ? "field" : ""; // sometimes use an empty string as field name RandomIndexWriter writer = new RandomIndexWriter(random, dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer...
public void setUp() throws Exception { super.setUp(); dir = newDirectory(); fieldName = random.nextBoolean() ? "field" : ""; // sometimes use an empty string as field name RandomIndexWriter writer = new RandomIndexWriter(random, dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer...
public void setUp() throws Exception { super.setUp(); // we generate aweful regexps: good for testing. // but for preflex codec, the test can be very slow, so use less iterations. numIterations = Codec.getDefault().getName().equals("Lucene3x") ? 10 * RANDOM_MULTIPLIER : atLeast(50); dir = newDirec...
public void setUp() throws Exception { super.setUp(); // we generate aweful regexps: good for testing. // but for preflex codec, the test can be very slow, so use less iterations. numIterations = Codec.getDefault().getName().equals("Lucene3x") ? 10 * RANDOM_MULTIPLIER : atLeast(50); dir = newDirec...
public void setUp() throws Exception { super.setUp(); dir = newDirectory(); RandomIndexWriter writer = new RandomIndexWriter(random, dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random, MockTokenizer.KEYWORD, false)) .setMaxBufferedDocs(_TestUtil.nextInt(random, 50, 10...
public void setUp() throws Exception { super.setUp(); dir = newDirectory(); RandomIndexWriter writer = new RandomIndexWriter(random, dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random, MockTokenizer.KEYWORD, false)) .setMaxBufferedDocs(_TestUtil.nextInt(random, 50, 10...
public void testCustomEncoder() throws Exception { Directory dir = newDirectory(); IndexWriterConfig config = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)); config.setSimilarity(new CustomNormEncodingSimilarity()); RandomIndexWriter writer = new RandomIndexWriter(random, dir, co...
public void testCustomEncoder() throws Exception { Directory dir = newDirectory(); IndexWriterConfig config = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)); config.setSimilarity(new CustomNormEncodingSimilarity()); RandomIndexWriter writer = new RandomIndexWriter(random, dir, co...
public void setUp() throws Exception { super.setUp(); dir = newDirectory(); IndexWriterConfig config = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random, MockTokenizer.SIMPLE, true)).setMergePolicy(newLogMergePolicy()); config.setSi...
public void setUp() throws Exception { super.setUp(); dir = newDirectory(); IndexWriterConfig config = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random, MockTokenizer.SIMPLE, true)).setMergePolicy(newLogMergePolicy()); config.setSi...
public void testRollingUpdates() throws Exception { final MockDirectoryWrapper dir = newDirectory(); dir.setCheckIndexOnClose(false); // we use a custom codec provider final LineFileDocs docs = new LineFileDocs(random); //provider.register(new MemoryCodec()); if ( (!"Lucene3x".equals(Codec.getDef...
public void testRollingUpdates() throws Exception { final MockDirectoryWrapper dir = newDirectory(); dir.setCheckIndexOnClose(false); // we use a custom codec provider final LineFileDocs docs = new LineFileDocs(random); //provider.register(new MemoryCodec()); if ( (!"Lucene3x".equals(Codec.getDef...
public void testRandomStoredFields() throws IOException { Directory dir = newDirectory(); Random rand = random; RandomIndexWriter w = new RandomIndexWriter(rand, dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMaxBufferedDocs(_TestUtil.nextInt(rand, 5, 20))); //w.w.setUseC...
public void testRandomStoredFields() throws IOException { Directory dir = newDirectory(); Random rand = random; RandomIndexWriter w = new RandomIndexWriter(rand, dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMaxBufferedDocs(_TestUtil.nextInt(rand, 5, 20))); //w.w.setUseC...
public void run() { try { final Document doc = new Document(); DirectoryReader r = IndexReader.open(dir); Field f = newField("f", "", StringField.TYPE_UNSTORED); doc.add(f); int count = 0; do { if (...
public void run() { try { final Document doc = new Document(); DirectoryReader r = IndexReader.open(dir); Field f = newField("f", "", StringField.TYPE_UNSTORED); doc.add(f); int count = 0; do { if (...
public void run() { final Document doc = new Document(); doc.add(newField(r, "content1", "aaa bbb ccc ddd", TextField.TYPE_STORED)); doc.add(newField(r, "content6", "aaa bbb ccc ddd", DocCopyIterator.custom1)); doc.add(newField(r, "content2", "aaa bbb ccc ddd", DocCopyIterator.custom2)); ...
public void run() { final Document doc = new Document(); doc.add(newField(r, "content1", "aaa bbb ccc ddd", TextField.TYPE_STORED)); doc.add(newField(r, "content6", "aaa bbb ccc ddd", DocCopyIterator.custom1)); doc.add(newField(r, "content2", "aaa bbb ccc ddd", DocCopyIterator.custom2)); ...
private void updateStatistics() throws StandardException { ConglomerateDescriptor[] cds; td = dd.getTableDescriptor(tableId); if (updateStatisticsAll) { cds = td.getConglomerateDescriptors(); } else { cds = new ConglomerateDescriptor[1]; ...
private void updateStatistics() throws StandardException { ConglomerateDescriptor[] cds; td = dd.getTableDescriptor(tableId); if (updateStatisticsAll) { cds = null; } else { cds = new ConglomerateDescriptor[1]; cds[0] = dd.getConglomer...
public void testThreadInterruptDeadlock() throws Exception { IndexerThreadInterrupt t = new IndexerThreadInterrupt(); t.setDaemon(true); t.start(); // Force class loader to load ThreadInterruptedException // up front... else we can see a false failure if 2nd // interrupt arrives while class l...
public void testThreadInterruptDeadlock() throws Exception { IndexerThreadInterrupt t = new IndexerThreadInterrupt(); t.setDaemon(true); t.start(); // Force class loader to load ThreadInterruptedException // up front... else we can see a false failure if 2nd // interrupt arrives while class l...
public static void main(String[] args) throws IOException, ClassNotFoundException, IllegalAccessException, InstantiationException, OptionException { final DefaultOptionBuilder obuilder = new DefaultOptionBuilder(); final ArgumentBuilder abuilder = new ArgumentBuilder(); final GroupBuilder gbuild...
public static void main(String[] args) throws IOException, ClassNotFoundException, IllegalAccessException, InstantiationException, OptionException { final DefaultOptionBuilder obuilder = new DefaultOptionBuilder(); final ArgumentBuilder abuilder = new ArgumentBuilder(); final GroupBuilder gbuild...
public void test() { BayesClassifier classifier = new BayesClassifier(); ClassifierResult result; String[] document = new String[]{"aa", "ff"}; result = classifier.classify(model, document, "unknown"); assertTrue("category is null and it shouldn't be", result != null); assertTrue(result + " is...
public void test() { BayesClassifier classifier = new BayesClassifier(); ClassifierResult result; String[] document = new String[]{"aa", "ff"}; result = classifier.classify(model, document, "unknown"); assertTrue("category is null and it shouldn't be", result != null); assertTrue(result + " is...
public void test() { BayesClassifier classifier = new BayesClassifier(); ClassifierResult result; String[] document = new String[]{"aa", "ff"}; result = classifier.classify(model, document, "unknown"); assertTrue("category is null and it shouldn't be", result != null); assertTrue(result + " is...
public void test() { BayesClassifier classifier = new BayesClassifier(); ClassifierResult result; String[] document = new String[]{"aa", "ff"}; result = classifier.classify(model, document, "unknown"); assertTrue("category is null and it shouldn't be", result != null); assertTrue(result + " is...
public double documentProbability(Model model, String label, String[] document); }
public double documentWeight(Model model, String label, String[] document); }
private void verify(AtomicReader r, int[][] idToOrds, BytesRef[] termsArray, BytesRef prefixRef) throws Exception { final DocTermOrds dto = new DocTermOrds(r, "field", prefixRef, In...
private void verify(AtomicReader r, int[][] idToOrds, BytesRef[] termsArray, BytesRef prefixRef) throws Exception { final DocTermOrds dto = new DocTermOrds(r, "field", prefixRef, In...
public String getNormFileName(int number) { if (hasSeparateNorms(number)) { return IndexFileNames.fileNameFromGeneration(name, "s" + number, normGen.get(number)); } else { // single file for all norms return IndexFileNames.fileNameFromGeneration(name, IndexFileNames.NORMS_EXTENSION, WITHOUT_...
public String getNormFileName(int number) { if (hasSeparateNorms(number)) { return IndexFileNames.fileNameFromGeneration(name, IndexFileNames.SEPARATE_NORMS_EXTENSION + number, normGen.get(number)); } else { // single file for all norms return IndexFileNames.fileNameFromGeneration(name, Inde...
public Session(String[] arguments) throws IllegalArgumentException { float STDev = 0.1f; CommandLineParser parser = new PosixParser(); try { CommandLine cmd = parser.parse(availableOptions, arguments); if (cmd.getArgs().length > 0) { ...
public Session(String[] arguments) throws IllegalArgumentException { float STDev = 0.1f; CommandLineParser parser = new PosixParser(); try { CommandLine cmd = parser.parse(availableOptions, arguments); if (cmd.getArgs().length > 0) { ...
public static String updateJ(String json, SolrParams args) throws Exception { SolrCore core = h.getCore(); DirectSolrConnection connection = new DirectSolrConnection(core); SolrRequestHandler handler = core.getRequestHandler("/udate/json"); if (handler == null) { handler = new JsonUpdateRequestH...
public static String updateJ(String json, SolrParams args) throws Exception { SolrCore core = h.getCore(); DirectSolrConnection connection = new DirectSolrConnection(core); SolrRequestHandler handler = core.getRequestHandler("/update/json"); if (handler == null) { handler = new JsonUpdateRequest...
public void testIndexCreate() throws IOException, ConfigurationException, InterruptedException, ExecutionException { Table table = Table.open("Keyspace1"); // create a row and update the birthdate value, test that the index query fetches the new version RowMutation rm; rm = new ...
public void testIndexCreate() throws IOException, ConfigurationException, InterruptedException, ExecutionException { Table table = Table.open("Keyspace1"); // create a row and update the birthdate value, test that the index query fetches the new version RowMutation rm; rm = new ...
public void reload() { // metadata object has been mutated directly. make all the members jibe with new settings. // only update these runtime-modifiable settings if they have not been modified. if (!minCompactionThreshold.isModified()) for (ColumnFamilyStore cfs : c...
public void reload() { // metadata object has been mutated directly. make all the members jibe with new settings. // only update these runtime-modifiable settings if they have not been modified. if (!minCompactionThreshold.isModified()) for (ColumnFamilyStore cfs : c...
public AllSimilarItemsCandidateItemsStrategy(ItemSimilarity similarity) { Preconditions.checkArgument(similarity != null, "similarity is null"); this.similarity = similarity; } @Override FastIDSet doGetCandidateItems(long[] preferredItemIDs, DataModel dataModel) throws TasteException { FastIDSet ca...
protected FastIDSet doGetCandidateItems(long[] preferredItemIDs, DataModel dataModel) throws TasteException { FastIDSet candidateItemIDs = new FastIDSet(); for (long itemID : preferredItemIDs) { candidateItemIDs.addAll(similarity.allSimilarItemIDs(itemID)); } candidateItemIDs.removeAll(preferred...
public void testMissingField() throws Exception { String fieldName = "field1"; Directory rd1 = newDirectory(); RandomIndexWriter w1 = new RandomIndexWriter(random(), rd1); Document doc = new Document(); doc.add(newStringField(fieldName, "content1", Field.Store.YES)); w1.addDocument(doc); I...
public void testMissingField() throws Exception { String fieldName = "field1"; Directory rd1 = newDirectory(); RandomIndexWriter w1 = new RandomIndexWriter(random(), rd1); Document doc = new Document(); doc.add(newStringField(fieldName, "content1", Field.Store.YES)); w1.addDocument(doc); I...
public final void indexDictionary(Dictionary dict, IndexWriterConfig config, boolean fullMerge) throws IOException { synchronized (modifyCurrentIndexLock) { ensureOpen(); final Directory dir = this.spellIndex; final IndexWriter writer = new IndexWriter(dir, config); IndexSearcher indexSear...
public final void indexDictionary(Dictionary dict, IndexWriterConfig config, boolean fullMerge) throws IOException { synchronized (modifyCurrentIndexLock) { ensureOpen(); final Directory dir = this.spellIndex; final IndexWriter writer = new IndexWriter(dir, config); IndexSearcher indexSear...
public void testTermUTF16SortOrder() throws Throwable { Random rnd = random(); Directory dir = newDirectory(); RandomIndexWriter writer = new RandomIndexWriter(rnd, dir); Document d = new Document(); // Single segment Field f = newStringField("f", "", Field.Store.NO); d.add(f); char[] ...
public void testTermUTF16SortOrder() throws Throwable { Random rnd = random(); Directory dir = newDirectory(); RandomIndexWriter writer = new RandomIndexWriter(rnd, dir); Document d = new Document(); // Single segment Field f = newStringField("f", "", Field.Store.NO); d.add(f); char[] ...
public void testTermVectorExceptions() throws IOException { FailOnTermVectors[] failures = new FailOnTermVectors[] { new FailOnTermVectors(FailOnTermVectors.AFTER_INIT_STAGE), new FailOnTermVectors(FailOnTermVectors.INIT_STAGE), }; int num = atLeast(1); for (int j = 0; j < num; j++) { ...
public void testTermVectorExceptions() throws IOException { FailOnTermVectors[] failures = new FailOnTermVectors[] { new FailOnTermVectors(FailOnTermVectors.AFTER_INIT_STAGE), new FailOnTermVectors(FailOnTermVectors.INIT_STAGE), }; int num = atLeast(1); for (int j = 0; j < num; j++) { ...
public synchronized boolean tryDeleteDocument(IndexReader readerIn, int docID) throws IOException { final AtomicReader reader; if (readerIn instanceof AtomicReader) { // Reader is already atomic: use the incoming docID: reader = (AtomicReader) readerIn; } else { // Composite reader: loo...
public synchronized boolean tryDeleteDocument(IndexReader readerIn, int docID) throws IOException { final AtomicReader reader; if (readerIn instanceof AtomicReader) { // Reader is already atomic: use the incoming docID: reader = (AtomicReader) readerIn; } else { // Composite reader: loo...
private final FieldInfos.Builder fieldInfosBuilder; // note, just like in codec apis Directory 'dir' is NOT the same as segmentInfo.dir!! SegmentMerger(SegmentInfo segmentInfo, InfoStream infoStream, Directory dir, int termIndexInterval, MergeState.CheckAbort checkAbort, PayloadProcessorProvider ...
private final FieldInfos.Builder fieldInfosBuilder; // note, just like in codec apis Directory 'dir' is NOT the same as segmentInfo.dir!! SegmentMerger(SegmentInfo segmentInfo, InfoStream infoStream, Directory dir, int termIndexInterval, MergeState.CheckAbort checkAbort, PayloadProcessorProvider ...
private static DocValues getDocValues(IndexReader reader, final String field, final DocValuesPuller puller) throws IOException { if (reader instanceof AtomicReader) { // already an atomic reader return puller.pull((AtomicReader) reader, field); } assert reader instanceof CompositeReader; f...
private static DocValues getDocValues(IndexReader reader, final String field, final DocValuesPuller puller) throws IOException { if (reader instanceof AtomicReader) { // already an atomic reader return puller.pull((AtomicReader) reader, field); } assert reader instanceof CompositeReader; f...
private void createIndex(IndexWriterConfig config, Directory target, IndexReader reader, Filter preserveFilter, boolean negateFilter) throws IOException { boolean success = false; final IndexWriter w = new IndexWriter(target, config); try { final List<AtomicReaderContext> leaves = reader.getTopReade...
private void createIndex(IndexWriterConfig config, Directory target, IndexReader reader, Filter preserveFilter, boolean negateFilter) throws IOException { boolean success = false; final IndexWriter w = new IndexWriter(target, config); try { final List<AtomicReaderContext> leaves = reader.leaves(); ...
public static long getTotalTermFreq(IndexReader reader, final String field, final BytesRef termText) throws Exception { long totalTF = 0L; for (final AtomicReaderContext ctx : reader.getTopReaderContext().leaves()) { AtomicReader r = ctx.reader(); Bits liveDocs = r.getLiveDocs(); if (live...
public static long getTotalTermFreq(IndexReader reader, final String field, final BytesRef termText) throws Exception { long totalTF = 0L; for (final AtomicReaderContext ctx : reader.leaves()) { AtomicReader r = ctx.reader(); Bits liveDocs = r.getLiveDocs(); if (liveDocs == null) { ...
public void setContext( TransformContext context ) { try { IndexReader reader = qparser.getReq().getSearcher().getIndexReader(); readerContexts = reader.getTopReaderContext().leaves(); docValuesArr = new FunctionValues[readerContexts.size()]; searcher = qparser.getReq().getSearcher(); ...
public void setContext( TransformContext context ) { try { IndexReader reader = qparser.getReq().getSearcher().getIndexReader(); readerContexts = reader.leaves(); docValuesArr = new FunctionValues[readerContexts.size()]; searcher = qparser.getReq().getSearcher(); fcontext = ValueSou...
public static SimpleOrderedMap<Object> getIndexInfo(DirectoryReader reader) throws IOException { Directory dir = reader.directory(); SimpleOrderedMap<Object> indexInfo = new SimpleOrderedMap<Object>(); indexInfo.add("numDocs", reader.numDocs()); indexInfo.add("maxDoc", reader.maxDoc()); indexInf...
public static SimpleOrderedMap<Object> getIndexInfo(DirectoryReader reader) throws IOException { Directory dir = reader.directory(); SimpleOrderedMap<Object> indexInfo = new SimpleOrderedMap<Object>(); indexInfo.add("numDocs", reader.numDocs()); indexInfo.add("maxDoc", reader.maxDoc()); indexInf...
public static void enumeratekeys(String ssTableFile, PrintStream outs) throws IOException { IPartitioner partitioner = StorageService.getPartitioner(); BufferedRandomAccessFile input = new BufferedRandomAccessFile(SSTable.indexFilename(ssTableFile), "r"); while (!input.isEOF()) ...
public static void enumeratekeys(String ssTableFile, PrintStream outs) throws IOException { IPartitioner partitioner = StorageService.getPartitioner(); BufferedRandomAccessFile input = new BufferedRandomAccessFile(SSTable.indexFilename(ssTableFile), "r"); while (!input.isEOF()) ...
private static Path prepareInput(FileSystem fs, List<?> population) throws IOException { Path inpath = new Path(fs.getWorkingDirectory(), "input"); // Delete the input if it already exists if (fs.exists(inpath)) { FileUtil.fullyDelete(fs, inpath); } fs.mkdirs(inpath); storePopul...
private static Path prepareInput(FileSystem fs, List<?> population) throws IOException { Path inpath = new Path(fs.getWorkingDirectory(), "input"); // Delete the input if it already exists if (fs.exists(inpath)) { fs.delete(inpath, true); } fs.mkdirs(inpath); storePopulation(fs,...
public static Path prepareOutput(FileSystem fs) throws IOException { Path outpath = new Path(fs.getWorkingDirectory(), "output"); if (fs.exists(outpath)) { FileUtil.fullyDelete(fs, outpath); } return outpath; }
public static Path prepareOutput(FileSystem fs) throws IOException { Path outpath = new Path(fs.getWorkingDirectory(), "output"); if (fs.exists(outpath)) { fs.delete(outpath, true); } return outpath; }