List of usage examples for com.mongodb DBCollection getIndexInfo
public List<DBObject> getIndexInfo()
From source file:com.redhat.lightblue.metadata.mongo.MongoMetadata.java
License:Open Source License
private void createUpdateEntityInfoIndexes(EntityInfo ei) { LOGGER.debug("createUpdateEntityInfoIndexes: begin"); Indexes indexes = ei.getIndexes();/*from w w w .j a v a2s. co m*/ MongoDataStore ds = (MongoDataStore) ei.getDataStore(); DB entityDB = dbResolver.get(ds); DBCollection entityCollection = entityDB.getCollection(ds.getCollectionName()); Error.push("createUpdateIndex"); try { List<DBObject> existingIndexes = entityCollection.getIndexInfo(); LOGGER.debug("Existing indexes: {}", existingIndexes); for (Index index : indexes.getIndexes()) { boolean createIx = true; LOGGER.debug("Processing index {}", index); for (DBObject existingIndex : existingIndexes) { if (indexFieldsMatch(index, existingIndex) && indexOptionsMatch(index, existingIndex)) { LOGGER.debug("Same index exists, not creating"); createIx = false; break; } } if (createIx) { for (DBObject existingIndex : existingIndexes) { if (indexFieldsMatch(index, existingIndex) && !indexOptionsMatch(index, existingIndex)) { LOGGER.debug("Same index exists with different options, dropping index:{}", existingIndex); // Changing index options, drop the index using its name, recreate with new options entityCollection.dropIndex(existingIndex.get(LITERAL_NAME).toString()); } } } if (createIx) { DBObject newIndex = new BasicDBObject(); for (SortKey p : index.getFields()) { newIndex.put(p.getField().toString(), p.isDesc() ? -1 : 1); } BasicDBObject options = new BasicDBObject("unique", index.isUnique()); if (index.getName() != null && index.getName().trim().length() > 0) { options.append(LITERAL_NAME, index.getName().trim()); } LOGGER.debug("Creating index {} with options {}", newIndex, options); entityCollection.createIndex(newIndex, options); } } } catch (MongoException me) { LOGGER.error("createUpdateEntityInfoIndexes: {}", ei); throw Error.get(MongoMetadataConstants.ERR_ENTITY_INDEX_NOT_CREATED, me.getMessage()); } finally { Error.pop(); } LOGGER.debug("createUpdateEntityInfoIndexes: end"); }
From source file:com.redhat.lightblue.mongo.crud.MongoCRUDController.java
License:Open Source License
private void createUpdateEntityInfoIndexes(EntityInfo ei, Metadata md) { LOGGER.debug("createUpdateEntityInfoIndexes: begin"); Indexes indexes = ei.getIndexes();/*from w w w . j ava 2 s . c o m*/ MongoDataStore ds = (MongoDataStore) ei.getDataStore(); DB entityDB = dbResolver.get(ds); DBCollection entityCollection = entityDB.getCollection(ds.getCollectionName()); Error.push("createUpdateIndex"); try { List<DBObject> existingIndexes = entityCollection.getIndexInfo(); LOGGER.debug("Existing indexes: {}", existingIndexes); // This is how index creation/modification works: // - The _id index will remain untouched. // - If there is an index with name X in metadata, find the same named index, and compare // its fields/flags. If different, drop and recreate. Drop all indexes with the same field signature. // // - If there is an index with null name in metadata, see if there is an index with same // fields and flags. If so, no change. Otherwise, create index. Drop all indexes with the same field signature. List<Index> createIndexes = new ArrayList<>(); List<DBObject> dropIndexes = new ArrayList<>(); List<DBObject> foundIndexes = new ArrayList<>(); for (Index index : indexes.getIndexes()) { if (!isIdIndex(index)) { if (index.getName() != null && index.getName().trim().length() > 0) { LOGGER.debug("Processing index {}", index.getName()); DBObject found = null; for (DBObject existingIndex : existingIndexes) { if (index.getName().equals(existingIndex.get("name"))) { found = existingIndex; break; } } if (found != null) { foundIndexes.add(found); // indexFieldsMatch will handle checking for hidden versions of the index if (indexFieldsMatch(index, found) && indexOptionsMatch(index, found)) { LOGGER.debug("{} already exists", index.getName()); } else { LOGGER.debug("{} modified, dropping and recreating index", index.getName()); existingIndexes.remove(found); dropIndexes.add(found); createIndexes.add(index); } } else { LOGGER.debug("{} not found, checking if there is an index with same field signature", index.getName()); found = findIndexWithSignature(existingIndexes, index); if (found == null) { LOGGER.debug("{} not found, creating", index.getName()); createIndexes.add(index); } else { LOGGER.debug("There is an index with same field signature as {}, drop and recreate", index.getName()); foundIndexes.add(found); dropIndexes.add(found); createIndexes.add(index); } } } else { LOGGER.debug("Processing index with fields {}", index.getFields()); DBObject found = findIndexWithSignature(existingIndexes, index); if (found != null) { foundIndexes.add(found); LOGGER.debug("An index with same keys found: {}", found); if (indexOptionsMatch(index, found)) { LOGGER.debug("Same options as well, not changing"); } else { LOGGER.debug("Index with different options, drop/recreate"); dropIndexes.add(found); createIndexes.add(index); } } else { LOGGER.debug("Creating index with fields {}", index.getFields()); createIndexes.add(index); } } } } // Any index in existingIndexes but not in foundIndexes should be deleted as well for (DBObject index : existingIndexes) { boolean found = false; for (DBObject x : foundIndexes) { if (x == index) { found = true; break; } } if (!found) { for (DBObject x : dropIndexes) { if (x == index) { found = true; break; } } if (!found && !isIdIndex(index)) { LOGGER.warn("Dropping index {}", index.get("name")); entityCollection.dropIndex(index.get("name").toString()); } } } for (DBObject index : dropIndexes) { LOGGER.warn("Dropping index {}", index.get("name")); entityCollection.dropIndex(index.get("name").toString()); } // we want to run in the background if we're only creating indexes (no field generation) boolean hidden = false; // fieldMap is <canonicalPath, hiddenPath> List<Path> fields = new ArrayList<>(); for (Index index : createIndexes) { DBObject newIndex = new BasicDBObject(); for (IndexSortKey p : index.getFields()) { Path field = p.getField(); if (p.isCaseInsensitive()) { fields.add(p.getField()); field = DocTranslator.getHiddenForField(field); // if we have a case insensitive index, we want the index creation operation to be blocking hidden = true; } newIndex.put(ExpressionTranslator.translatePath(field), p.isDesc() ? -1 : 1); } BasicDBObject options = new BasicDBObject("unique", index.isUnique()); // if index is unique and non-partial, also make it a sparse index, so we can have non-required unique fields options.append("sparse", index.isUnique() && !index.getProperties().containsKey(PARTIAL_FILTER_EXPRESSION_OPTION_NAME)); if (index.getName() != null && index.getName().trim().length() > 0) { options.append("name", index.getName().trim()); } options.append("background", true); // partial index if (index.getProperties().containsKey(PARTIAL_FILTER_EXPRESSION_OPTION_NAME)) { try { @SuppressWarnings("unchecked") DBObject filter = new BasicDBObject((Map<String, Object>) index.getProperties() .get(PARTIAL_FILTER_EXPRESSION_OPTION_NAME)); options.append(PARTIAL_FILTER_EXPRESSION_OPTION_NAME, filter); } catch (ClassCastException e) { throw new RuntimeException("Index property " + PARTIAL_FILTER_EXPRESSION_OPTION_NAME + " needs to be a mongo query in json format", e); } } LOGGER.debug("Creating index {} with options {}", newIndex, options); LOGGER.warn("Creating index {} with fields={}, options={}", index.getName(), index.getFields(), options); entityCollection.createIndex(newIndex, options); } if (hidden) { LOGGER.info("Executing post-index creation updates..."); // case insensitive indexes have been updated or created. recalculate all hidden fields Thread pop = new Thread(new Runnable() { @Override public void run() { try { populateHiddenFields(ei, md, fields); } catch (IOException e) { throw new RuntimeException(e); } } }); pop.start(); // TODO: remove hidden fields on index deletions? Worth it? } } catch (MongoException me) { throw Error.get(MongoCrudConstants.ERR_ENTITY_INDEX_NOT_CREATED, me.getMessage()); } catch (Exception e) { throw analyzeException(e, MetadataConstants.ERR_ILL_FORMED_METADATA); } finally { Error.pop(); } LOGGER.debug("createUpdateEntityInfoIndexes: end"); }
From source file:com.stratio.connector.mongodb.core.engine.metadata.DiscoverMetadataUtils.java
License:Apache License
/** * Discover the existing indexes stored in the collection. * * @param collection// w ww .ja va2 s. c o m * the collection * @return the list of indexMetadata. */ public static List<IndexMetadata> discoverIndexes(DBCollection collection) { // TODO add TextIndex, Geospatial,etc... // TODO supported only simple, compound and hashed index // TODO remove _id? // TODO return options?? e.g sparse, unique?? // TODO custom (asc and desc) List<DBObject> indexInfo = collection.getIndexInfo(); String db = collection.getDB().getName(); String collName = collection.getName(); List<IndexMetadata> indexMetadataList = new ArrayList<>(indexInfo.size()); for (DBObject dbObject : indexInfo) { BasicDBObject key = (BasicDBObject) dbObject.get("key"); IndexMetadataBuilder indexMetadataBuilder = new IndexMetadataBuilder(db, collName, (String) dbObject.get("name"), getIndexType(key)); for (String field : key.keySet()) { indexMetadataBuilder.addColumn(field, null); } indexMetadataList.add(indexMetadataBuilder.build()); } return indexMetadataList; }
From source file:edu.sjsu.carbonated.client.MongoDBDOA.java
License:Apache License
public static void main(String[] args) throws Exception { // connect to the local database server Mongo m = new Mongo(); // get handle to "mydb" DB db = m.getDB("mydb"); // Authenticate - optional // boolean auth = db.authenticate("foo", "bar"); // get a list of the collections in this database and print them out Set<String> colls = db.getCollectionNames(); for (String s : colls) { System.out.println(s);//from w w w. jav a2s .c om } // get a collection object to work with DBCollection coll = db.getCollection("testCollection"); // drop all the data in it coll.drop(); // make a document and insert it BasicDBObject doc = new BasicDBObject(); doc.put("test", new AlbumResource("name", "desc", "user", "asdf")); doc.put("name", "MongoDB"); doc.put("type", "database"); doc.put("count", 1); BasicDBObject info = new BasicDBObject(); info.put("x", 203); info.put("y", 102); doc.put("info", info); coll.insert(doc); // get it (since it's the only one in there since we dropped the rest earlier on) DBObject myDoc = coll.findOne(); System.out.println(myDoc); // now, lets add lots of little documents to the collection so we can explore queries and cursors for (int i = 0; i < 100; i++) { coll.insert(new BasicDBObject().append("i", i)); } System.out .println("total # of documents after inserting 100 small ones (should be 101) " + coll.getCount()); // lets get all the documents in the collection and print them out DBCursor cur = coll.find(); while (cur.hasNext()) { System.out.println(cur.next()); } // now use a query to get 1 document out BasicDBObject query = new BasicDBObject(); query.put("i", 71); cur = coll.find(query); while (cur.hasNext()) { System.out.println(cur.next()); } // now use a range query to get a larger subset query = new BasicDBObject(); query.put("i", new BasicDBObject("$gt", 50)); // i.e. find all where i > 50 cur = coll.find(query); while (cur.hasNext()) { System.out.println(cur.next()); } // range query with multiple contstraings query = new BasicDBObject(); query.put("i", new BasicDBObject("$gt", 20).append("$lte", 30)); // i.e. 20 < i <= 30 cur = coll.find(query); while (cur.hasNext()) { System.out.println(cur.next()); } // create an index on the "i" field coll.createIndex(new BasicDBObject("i", 1)); // create index on "i", ascending // list the indexes on the collection List<DBObject> list = coll.getIndexInfo(); for (DBObject o : list) { System.out.println(o); } // See if the last operation had an error System.out.println("Last error : " + db.getLastError()); // see if any previous operation had an error System.out.println("Previous error : " + db.getPreviousError()); // force an error db.forceError(); // See if the last operation had an error System.out.println("Last error : " + db.getLastError()); db.resetError(); }
From source file:es.devcircus.mongodb_examples.hello_world.Main.java
License:Open Source License
/** * Recuperamos la lista de ndices de una coleccin concreta. *///w w w .ja va2s . c o m public static void gettingAListOfIndexesOnACollection() { System.out.println(); System.out.println("---------------------------------------------------------------"); System.out.println(" Getting a List of Indexes on a Collection "); System.out.println("---------------------------------------------------------------"); System.out.println(); /*Getting a List of Indexes on a Collection You can get a list of the indexes on a collection :*/ DBCollection coll = db.getCollection(TEST_COLLECTION); List<DBObject> list = coll.getIndexInfo(); for (DBObject o : list) { System.out.println(" - " + o); } /*and you should see something like { "name" : "i_1" , "ns" : "mydb.testCollection" , "key" : { "i" : 1} }*/ }
From source file:eu.eubrazilcc.lvl.storage.mongodb.MongoDBConnector.java
License:EUPL
/** * Writes statistics about a collection to the specified output stream. * @param os - the output stream to write the statistics to * @param collection - collection from which the statistics are collected * @throws IOException - If an I/O error occurred *///from www . ja v a 2 s. co m public void stats(final OutputStream os, final String collection) throws IOException { checkArgument(os != null, "Uninitialized output stream"); checkArgument(isNotBlank(collection), "Uninitialized or invalid collection"); final DB db = client().getDB(CONFIG_MANAGER.getDbName()); final DBCollection dbcol = db.getCollection(collection); try { os.write((" >> Collection: " + collection + "\n").getBytes()); final List<DBObject> indexes = dbcol.getIndexInfo(); os.write(" >> Indexes\n".getBytes()); for (final DBObject idx : indexes) { os.write((" " + idx.toString() + "\n").getBytes()); } os.write((" >> Items count: " + dbcol.getCount() + "\n").getBytes()); } finally { os.flush(); } }
From source file:example.QuickTour.java
License:Apache License
/** * Run this main method to see the output of this quick example. * * @param args takes no args/*from w ww . jav a2 s .c om*/ * @throws UnknownHostException if it cannot connect to a MongoDB instance at localhost:27017 */ public static void main(final String[] args) throws UnknownHostException { // connect to the local database server MongoClient mongoClient = new MongoClient(); // get handle to "mydb" DB db = mongoClient.getDB("mydb"); // Authenticate - optional // boolean auth = db.authenticate("foo", "bar"); // get a list of the collections in this database and print them out Set<String> collectionNames = db.getCollectionNames(); for (final String s : collectionNames) { System.out.println(s); } // get a collection object to work with DBCollection testCollection = db.getCollection("testCollection"); // drop all the data in it testCollection.drop(); // make a document and insert it BasicDBObject doc = new BasicDBObject("name", "MongoDB").append("type", "database").append("count", 1) .append("info", new BasicDBObject("x", 203).append("y", 102)); testCollection.insert(doc); // get it (since it's the only one in there since we dropped the rest earlier on) DBObject myDoc = testCollection.findOne(); System.out.println(myDoc); // now, lets add lots of little documents to the collection so we can explore queries and cursors for (int i = 0; i < 100; i++) { testCollection.insert(new BasicDBObject().append("i", i)); } System.out.println( "total # of documents after inserting 100 small ones (should be 101) " + testCollection.getCount()); // lets get all the documents in the collection and print them out DBCursor cursor = testCollection.find(); try { while (cursor.hasNext()) { System.out.println(cursor.next()); } } finally { cursor.close(); } // now use a query to get 1 document out BasicDBObject query = new BasicDBObject("i", 71); cursor = testCollection.find(query); try { while (cursor.hasNext()) { System.out.println(cursor.next()); } } finally { cursor.close(); } // now use a range query to get a larger subset query = new BasicDBObject("i", new BasicDBObject("$gt", 50)); // i.e. find all where i > 50 cursor = testCollection.find(query); try { while (cursor.hasNext()) { System.out.println(cursor.next()); } } finally { cursor.close(); } // range query with multiple constraints query = new BasicDBObject("i", new BasicDBObject("$gt", 20).append("$lte", 30)); // i.e. 20 < i <= 30 cursor = testCollection.find(query); try { while (cursor.hasNext()) { System.out.println(cursor.next()); } } finally { cursor.close(); } // create an index on the "i" field testCollection.createIndex(new BasicDBObject("i", 1)); // create index on "i", ascending // list the indexes on the collection List<DBObject> list = testCollection.getIndexInfo(); for (final DBObject o : list) { System.out.println(o); } // See if the last operation had an error System.out.println("Last error : " + db.getLastError()); // see if any previous operation had an error System.out.println("Previous error : " + db.getPreviousError()); // force an error db.forceError(); // See if the last operation had an error System.out.println("Last error : " + db.getLastError()); db.resetError(); // release resources mongoClient.close(); }
From source file:examples.QuickTour.java
License:Apache License
public static void main(String[] args) throws Exception { // connect to the local database server Mongo m = new Mongo(); // get handle to "mydb" DB db = m.getDB("mydb"); // Authenticate - optional boolean auth = db.authenticate("foo", new char[] { 'b', 'a', 'r' }); // get a list of the collections in this database and print them out Set<String> colls = db.getCollectionNames(); for (String s : colls) { System.out.println(s);/*from w w w. j a v a2 s .c om*/ } // get a collection object to work with DBCollection coll = db.getCollection("testCollection"); // drop all the data in it coll.drop(); // make a document and insert it BasicDBObject doc = new BasicDBObject(); doc.put("name", "MongoDB"); doc.put("type", "database"); doc.put("count", 1); BasicDBObject info = new BasicDBObject(); info.put("x", 203); info.put("y", 102); doc.put("info", info); coll.insert(doc); // get it (since it's the only one in there since we dropped the rest earlier on) DBObject myDoc = coll.findOne(); System.out.println(myDoc); // now, lets add lots of little documents to the collection so we can explore queries and cursors for (int i = 0; i < 100; i++) { coll.insert(new BasicDBObject().append("i", i)); } System.out .println("total # of documents after inserting 100 small ones (should be 101) " + coll.getCount()); // lets get all the documents in the collection and print them out DBCursor cur = coll.find(); while (cur.hasNext()) { System.out.println(cur.next()); } // now use a query to get 1 document out BasicDBObject query = new BasicDBObject(); query.put("i", 71); cur = coll.find(query); while (cur.hasNext()) { System.out.println(cur.next()); } // now use a range query to get a larger subset query = new BasicDBObject(); query.put("i", new BasicDBObject("$gt", 50)); // i.e. find all where i > 50 cur = coll.find(query); while (cur.hasNext()) { System.out.println(cur.next()); } // range query with multiple contstraings query = new BasicDBObject(); query.put("i", new BasicDBObject("$gt", 20).append("$lte", 30)); // i.e. 20 < i <= 30 cur = coll.find(query); while (cur.hasNext()) { System.out.println(cur.next()); } // create an index on the "i" field coll.createIndex(new BasicDBObject("i", 1)); // create index on "i", ascending // list the indexes on the collection List<DBObject> list = coll.getIndexInfo(); for (DBObject o : list) { System.out.println(o); } // See if the last operation had an error System.out.println("Last error : " + db.getLastError()); // see if any previous operation had an error System.out.println("Previous error : " + db.getPreviousError()); // force an error db.forceError(); // See if the last operation had an error System.out.println("Last error : " + db.getLastError()); db.resetError(); }
From source file:fr.cirad.web.controller.gigwa.base.AbstractVariantController.java
License:Open Source License
/** * List variants.//from w w w.ja v a2 s.c o m * * @param request the request * @param sModule the module * @param projId the proj id * @param selectedVariantTypes the selected variant types * @param selectedSequences the selected sequences * @param selectedIndividuals the selected individuals * @param gtPattern the gt code * @param genotypeQualityThreshold the genotype quality threshold * @param readDepthThreshold the read depth threshold * @param missingData the missing data * @param minmaf the minmaf * @param maxmaf the maxmaf * @param minposition the minposition * @param maxposition the maxposition * @param alleleCount the allele count * @param geneName the gene name * @param variantEffects the variant effects * @param wantedFields the wanted fields * @param page the page * @param size the size * @param sortBy the sort by * @param sortDir the sort dir * @param processID the process id * @return the array list * @throws Exception the exception */ @RequestMapping(variantListURL) /** * This method returns a list of variants from the current selection */ protected @ResponseBody ArrayList<Comparable[]> listVariants(HttpServletRequest request, @RequestParam("module") String sModule, @RequestParam("project") int projId, @RequestParam("variantTypes") String selectedVariantTypes, @RequestParam("sequences") String selectedSequences, @RequestParam("individuals") String selectedIndividuals, @RequestParam("gtPattern") String gtPattern, @RequestParam("genotypeQualityThreshold") int genotypeQualityThreshold, @RequestParam("readDepthThreshold") int readDepthThreshold, @RequestParam("missingData") double missingData, @RequestParam("minmaf") Float minmaf, @RequestParam("maxmaf") Float maxmaf, @RequestParam("minposition") Long minposition, @RequestParam("maxposition") Long maxposition, @RequestParam("alleleCount") String alleleCount, @RequestParam("geneName") String geneName, @RequestParam("variantEffects") String variantEffects, @RequestParam("wantedFields") String wantedFields, @RequestParam("page") int page, @RequestParam("size") int size, @RequestParam("sortBy") String sortBy, @RequestParam("sortDir") String sortDir, @RequestParam("processID") String processID) throws Exception { String[] usedFields = wantedFields.split(";"); String token = processID.substring(1 + processID.indexOf('|')); String queryKey = getQueryKey(request, sModule, projId, selectedVariantTypes, selectedSequences, selectedIndividuals, gtPattern, genotypeQualityThreshold, readDepthThreshold, missingData, minmaf, maxmaf, minposition, maxposition, alleleCount, geneName, variantEffects); MongoTemplate mongoTemplate = MongoTemplateManager.get(sModule); DBCollection cachedCountcollection = mongoTemplate.getCollection(MgdbDao.COLLECTION_NAME_CACHED_COUNTS); // cachedCountcollection.drop(); DBCursor countCursor = cachedCountcollection.find(new BasicDBObject("_id", queryKey)); Object[] partialCountArray = !countCursor.hasNext() ? null : ((BasicDBList) countCursor.next().get(MgdbDao.FIELD_NAME_CACHED_COUNT_VALUE)).toArray(); HashMap<Integer, String> variantFieldMap = new HashMap<Integer, String>(), runDataFieldMap = new HashMap<Integer, String>(); for (int i = 0; i < usedFields.length; i++) if (usedFields[i].startsWith("#")) variantFieldMap.put(i, usedFields[i].substring(1)); else runDataFieldMap.put(i, usedFields[i]); long expectedCount = 0; for (Object aPartialCount : partialCountArray) expectedCount += (Long) aPartialCount; DBCollection tmpVarCollOrView = getTemporaryVariantCollection(sModule, token, false); boolean fGotTempData = tmpVarCollOrView.findOne() != null; ArrayList<Comparable[]> result = new ArrayList<Comparable[]>(); DBCollection variantColl = mongoTemplate.getCollection(mongoTemplate.getCollectionName(VariantData.class)); if (fGotTempData || expectedCount == variantColl.count()) // otherwise we return an empty list because there seems to be a problem (missing temp records) { boolean fProjectHasAnnotations = getProjectEffectAnnotations(sModule, projId).size() > 0; DBCollection varCollForBuildingRows = fGotTempData ? tmpVarCollOrView : variantColl; DBCursor variantsInFilterCursor = varCollForBuildingRows.find(); ArrayList<Object[]> variantRows = buildVariantRows(mongoTemplate, variantsInFilterCursor, sortBy, sortDir, page, size, variantFieldMap, runDataFieldMap); for (Object[] aRow : variantRows) { List<Comparable> anOutputRow = new ArrayList<Comparable>(); for (int i = 0; i < aRow.length; i++) { String val = null; if (!usedFields[i].startsWith(VariantRunData.SECTION_ADDITIONAL_INFO + ".")) val = aRow[i] == null ? "" : aRow[i].toString(); else if (aRow[i] != null && fProjectHasAnnotations) val = aRow[i].toString().replaceAll("[\\[\\]\"]", ""); // it's an annotation field: make its content look cleaner if (val != null) anOutputRow.add(val); } anOutputRow.add(anOutputRow.get(0)); // for details link result.add(anOutputRow.toArray(new Comparable[anOutputRow.size()])); } } if (fGotTempData && page == 0 && tmpVarCollOrView.getIndexInfo().size() <= 1) new Thread() { // temp data needs to be indexed for faster browsing public void run() { long b4 = System.currentTimeMillis(); tmpVarCollOrView.createIndex(VariantData.FIELDNAME_VERSION); tmpVarCollOrView.createIndex( VariantData.FIELDNAME_REFERENCE_POSITION + "." + ReferencePosition.FIELDNAME_SEQUENCE); tmpVarCollOrView.createIndex(VariantData.FIELDNAME_REFERENCE_POSITION + "." + ReferencePosition.FIELDNAME_START_SITE); tmpVarCollOrView.createIndex(VariantData.FIELDNAME_TYPE); LOG.debug("Indexing " + tmpVarCollOrView.count() + " temp variants took " + (System.currentTimeMillis() - b4) / 1000f + "s"); } }.start(); return result; }
From source file:net.autosauler.ballance.server.model.AbstractStructuredData.java
License:Apache License
/** * Inits the db struct.//from www.j a v a 2s.c om */ private void initDBStruct() { DB db = Database.get(getDomain()); if (db != null) { Database.retain(); DBCollection coll = db.getCollection(getTableName()); List<DBObject> indexes = coll.getIndexInfo(); if (indexes.size() < 1) { BasicDBObject i = new BasicDBObject(); i.put(fieldname_number, 1); coll.createIndex(i); i.put(fieldname_domain, 1); coll.createIndex(i); i.put(fieldname_trash, 1); coll.createIndex(i); onInitDbStruct(i, coll); } Database.release(); } }