List of usage examples for com.mongodb DBCursor size
public int size()
From source file:br.edu.unifesspa.lcc.indexer.GerarIndice.java
@Override public void run() { if (!list.isEmpty()) { //Foi identificado um problema quando h poucos processamentos agendados, // ocorre de processar mais que uma vez, ao mesmo tempo, o mesmo Processamento Id, portando, necessario remover o //processamento_indice do banco,quando comear o processamento //Fazer download da demanda---- try {//from w w w. j a v a 2 s .c om DownloadInfo(); } catch (InterruptedException ex) { // Logger.getLogger(GerarIndice.class.getName()).log(Level.SEVERE, null, ex); ex.printStackTrace(); } //Fim de Fazer download da demada---- //Insere em um banco, especificando qual assunto, a demanda est utilizando BasicDBObject search = new BasicDBObject(); DB aux = mongo.getDB("demandas"); DBCollection colDemandaAtiva = aux.getCollection("demandas_ativas"); search.append("demanda", assunto.toString()); search.append("Processamento_indiceId", procId); System.out.println("Inseriu na demanda_ativa"); colDemandaAtiva.insert(search); //Fim inserir no banco, a demanda ativa //Aqui itera sobre a lista de indicadores que devem ser processados System.out.println("Est para entrar no forit e a lista : " + list); for (Integer integer : list) { try { System.out.println("Processando indicador: " + integer); est.getIndicador(integer, dbAssunto); //Gera o indicador } catch (Exception e) { e.printStackTrace(); } } //Depois de processar todos os indices, remove do banco das demandas_ativas, o assunto que foi processado. colDemandaAtiva.remove(search); System.out.println("REMOVEU"); search.clear(); //Se no tiver mais nenhuma demanda utilizando o assunto, dropa-se o banco da demanda search.append("demanda", assunto.toString()); DBCursor cursor = colDemandaAtiva.find(search); if (cursor.size() == 0) { //dbAssunto.dropDatabase(); System.out.println("DROPOU"); } } }
From source file:br.edu.unifesspa.lcc.indexer.GerarIndice.java
private void DownloadInfo() throws InterruptedException { //Verifica se j existe um banco de demanda criado no Mongo List<String> dbs = mongo.getDatabaseNames(); if (!dbs.contains(assunto.toString())) {//Se o banco no existe, faz o download RestTemplate rt = new RestTemplate(); DB territorios = mongo.getDB("territorios"); DBCollection colTerritorios = territorios.getCollection("territorio"); HttpEntity<String> entity = new HttpEntity<>("parameters", Processar.headers); BasicDBObject insert = new BasicDBObject(); BasicDBObject insertDemanda = new BasicDBObject(); DBCollection colDaDemanda = dbAssunto.getCollection(assunto.toString()); System.out.println("db =" + dbAssunto); DBCollection k_classe = dbAssunto.getCollection("k"); //K_CLASSES ----------------------------------------------------------------------------------------------- try {/*from ww w . j a v a2 s . c o m*/ ResponseEntity<domain.K_classes[]> kClasse = rt.exchange( "http://xingu.lcc.unifesspa.edu.br:8080/api/k_classess?idassunto=" + assunto, HttpMethod.GET, entity, domain.K_classes[].class); System.out.println("K-classe tamanho: " + kClasse.getBody().length); //System.out.println("Tamanho body:" + kClasse.getBody().length); for (domain.K_classes k : kClasse.getBody()) { insert.append("k_ordem", k.getk_ordem()); insert.append("k_classes_id", k.getId()); insert.append("ck", Integer.parseInt(k.getk_codigo())); insert.append("dk", k.getk_descricao()); k_classe.insert(insert); insert.clear(); } System.out.println("DEVERIA TER INSERIDO O K"); } catch (Exception e) { e.printStackTrace(); insert.clear(); } //FIM K-CLASSES E COMEO INPUT_PRESENTES -------------------------------------------------------------------------------------------------------- BasicDBObject insertCI = new BasicDBObject(); try { System.out.println("Comeando a inserir os Inputs do assunto: " + assunto); ResponseEntity<domain.Input_presenteDTO[]> input = rt.exchange( "http://xingu.lcc.unifesspa.edu.br:8080/api/input_presentes/getInputPresenteByAssantoId/" + assunto, HttpMethod.GET, entity, Input_presenteDTO[].class); System.out.println("Fez o download do assunto: " + assunto); System.out.println("Tamano input: " + input.getBody().length + " Assunto: " + assunto); DBCollection colInput_presente = dbAssunto.getCollection("i"); // inserir assunto na base BasicDBObject search2 = new BasicDBObject(); // int cont = 0; for (Input_presenteDTO Input2 : input.getBody()) { // System.out.println("Entrou"); //Para a coleo "i" search2.append("ci", Integer.parseInt(Input2.getCodigo())); DBObject aux = colInput_presente.findOne(search2); if (aux != null) { search2.clear(); // System.out.println("ja tem esse territorio inserido"); } else { DBCursor cursor = colTerritorios.find(search2); while (cursor.hasNext()) { aux = cursor.next(); // System.out.println("AUX: " + aux.toString()); if (Integer.parseInt(aux.get("ci").toString()) == Integer.parseInt(Input2.getCodigo()) && Integer.parseInt(aux.get("ano").toString()) == Integer .parseInt(Input2.getAno_ref())) { // insertCI.append("uf_sigla", aux.get("uf_sigla")); insertCI.append("longitude", Double.parseDouble(aux.get("longitude").toString())); insertCI.append("latitude", Double.parseDouble(aux.get("latitude").toString())); // System.out.println("Achou Territorio"); } } insertCI.append("ci", Integer.parseInt(Input2.getI_codigo_amc())); insertCI.append("di", Input2.getTerritorio_nome()); insertCI.append("ano_ref", Integer.parseInt(aux.get("ano").toString())); search2.clear(); colInput_presente.insert(insertCI); insertCI.clear(); } //Para a coleo com o mesmo nome da base insertDemanda.append("ci", Integer.parseInt(Input2.getI_codigo_amc())); insertDemanda.append("di", Input2.getTerritorio_nome()); insertDemanda.append("ck", Integer.parseInt(Input2.getK_classesK_codigo())); insertDemanda.append("quant_valor", Double.parseDouble(Input2.getX_valor())); insertDemanda.append("ano", Integer.parseInt(Input2.getAno_ref())); //Aqui se acha a descrio da K-Classe envolvida, atravs do "k_classesK_codigo" search2.append("ck", Integer.parseInt(Input2.getK_classesK_codigo())); aux = k_classe.findOne(search2); insertDemanda.append("dk", (String) aux.get("dk")); search2.clear(); // colDaDemanda.insert(insertDemanda); insertDemanda.clear(); // cont++; // System.out.println(cont); } System.out.println("Inseriu input_presente do assunto: " + assunto); //Preparar banco------------------ System.out.println("Preparando o assunto: " + assunto); datadao.prepareDatabase(false, dbAssunto); System.out.println("Fim preparar banco do assunto: " + assunto); //Fim preparar Banco---------- } catch (Exception e) { e.printStackTrace(); insert.clear(); insertCI.clear(); } } else { BasicDBObject search = new BasicDBObject(); DB aux = mongo.getDB("demandas"); DBCollection colDemandaAtiva = aux.getCollection("demandas_ativas"); search.append("demanda", assunto.toString()); boolean situacao = false; while (situacao == false) { DBCursor demanAtiva = colDemandaAtiva.find(search); if (demanAtiva.size() >= 1) { situacao = true; } Thread.currentThread().sleep(5000); } } }
From source file:br.ufabc.impress.mongo.manager.DBHelper.java
@Override public String getAll(String tableName) { if (tableName == null || tableName.equals("")) { return "501"; }/*www . jav a 2 s .c o m*/ String row = ""; DBCollection table = db.getCollection(tableName); BasicDBObject searchQuery = new BasicDBObject(); searchQuery.put("status", "active"); DBCursor cursor = table.find(searchQuery); if (cursor.size() > 0) { JSON json = new JSON(); row = json.serialize(cursor); cursor.close(); return row; } else { cursor.close(); return null; } }
From source file:br.ufabc.impress.mongo.manager.DBHelper.java
@Override public String getByKey(String tableName, String _id) { if (tableName == null || tableName.equals("") || _id == null || _id.equals("")) { return "501"; }/*from ww w . j a v a 2 s.co m*/ String row = null; DBCursor cursor = null; DBCollection table = db.getCollection(tableName); BasicDBObject searchQuery = new BasicDBObject(); searchQuery.put("_id", new ObjectId(_id)); cursor = table.find(searchQuery); if (cursor.size() > 0) { JSON json = new JSON(); row = json.serialize(cursor); cursor.close(); return row; } else { cursor.close(); return null; } }
From source file:br.ufabc.impress.mongo.manager.DBHelper.java
@Override public String getByCondition(String tableName, Map condition) { if (tableName == null || tableName.equals("") || condition == null || condition.equals("")) { return "501"; }/*from w w w . j a v a 2 s . co m*/ String row = ""; DBCollection table = db.getCollection(tableName); BasicDBObject searchQuery = new BasicDBObject(condition); BasicDBObject andQuery = new BasicDBObject(); andQuery.put("$and", searchQuery); DBCursor cursor = table.find(searchQuery); if (cursor.size() > 0) { JSON json = new JSON(); row = json.serialize(cursor); cursor.close(); return row; } else { cursor.close(); return null; } }
From source file:com.aw.app.action.TroopsAction.java
/** * // w ww . j a v a2 s . c o m * @param uid * @param buildingId * @return */ public boolean checkToopsInBarrack(Long uid, long buildingId) { boolean state = false; Long time = new Date().getTime(); DBCollection table = MongoDbUtil.getCollection(MongoDbUtil.defaultDBName, "aw_barrack_troops"); BasicDBObject query = new BasicDBObject("uid", uid); query.put("building_id", buildingId); DBCursor barrackTroops = table.find(query); long nextTroopsTrainingTime = 0; if (barrackTroops != null && barrackTroops.size() > 0) { while (barrackTroops.hasNext()) { DBObject barrackTroop = barrackTroops.next(); if (nextTroopsTrainingTime > 0 && state) { BasicDBObject updateQuery = new BasicDBObject(); updateQuery.put("bc_id", barrackTroop.get("bc_id")); BasicDBObject updateBt = new BasicDBObject(); Map data = new HashMap(); data.put("is_first_in_queue", 1); data.put("training_start_time", nextTroopsTrainingTime); BasicDBObject object = new BasicDBObject(data); updateBt.put("$set", object); MongoDbUtil.updateCollection("aw_barrack_troops", updateQuery, updateBt); } Map result = processTroopTraining(barrackTroop, time); boolean breakState = (boolean) result.get("breakState"); state = (boolean) result.get("status"); long nextTrainingTime = (long) result.get("nextTrainingTime"); if (breakState) { // Error Condition } else if (state && result.containsKey("nextTrainingTime") && nextTrainingTime > 0) { nextTroopsTrainingTime = nextTrainingTime; } else { nextTroopsTrainingTime = 0; } } } return state; }
From source file:com.bigdid.model.Customer_Return_Percentage.java
public ArrayList<String> getFeedback() throws UnknownHostException { ArrayList<String> feedback = new ArrayList<String>(); DBCursor obj = coll.find(); // BasicDBObject whereQuery = new BasicDBObject(); // whereQuery.put( "Percentage", -1 ); // obj.sort(whereQuery); int size = obj.size(); int i;/*from w w w.j a va 2s .c o m*/ for (i = 0; i < size; i++) { feedback.add(((String) obj.next().get("Percentage"))); System.out.println("Valuesss" + feedback.get(i)); } return feedback; }
From source file:com.bigdid.model.Revenue_Item.java
public ArrayList<Integer> getRevenue() throws UnknownHostException { ArrayList<Integer> revenue = new ArrayList<Integer>(); int i = 0;/*from w w w. j a v a2s . c om*/ DBCursor obj = coll.find(); int size = obj.size(); for (i = 0; i < size; i++) { revenue.add((Integer) obj.next().get("Order_Value")); } return revenue; }
From source file:com.ikanow.infinit.e.api.knowledge.processing.ScoringUtils.java
License:Open Source License
@SuppressWarnings("unchecked") public List<BasicDBObject> calcTFIDFAndFilter(DBCollection docsDb, DBCursor docs, AdvancedQueryPojo.QueryScorePojo scoreParams, AdvancedQueryPojo.QueryOutputPojo outParams, StatisticsPojo scores, boolean bLowAccuracyDecay, long nStart, long nToClientLimit, String[] communityIds, String[] entityTypeFilterStrings, String[] assocVerbFilterStrings, LinkedList<BasicDBObject> standaloneEventsReturn, LinkedList<BasicDBObject> lowAccuracyAggregatedEnts, AggregationUtils.GeoContainer lowAccuracyAggregatedGeo, AggregationUtils.GeoContainer extraAliasAggregatedGeo, LinkedList<BasicDBObject> lowAccuracyAggregatedEvents, LinkedList<BasicDBObject> lowAccuracyAggregatedFacts) { _s0_multiCommunityHandler = new ScoringUtils_MultiCommunity(communityIds); _s0_avgLuceneScore = scores.avgScore; _s0_avgLuceneScoreInv = 1.0 / (scores.avgScore + 0.01); // (+0.01 for safety in case avgScore is small) _s0_maxLuceneScoreInv = 1.0 / (scores.maxScore + 0.01); // Utility classes // Quick check - do I need to be here at all? LinkedList<BasicDBObject> returnList = new LinkedList<BasicDBObject>(); _s0_bNeedToCalcSig = (null != lowAccuracyAggregatedEnts) || (null != lowAccuracyAggregatedEvents) || (null != lowAccuracyAggregatedFacts) || (null != lowAccuracyAggregatedGeo) || ((nToClientLimit > 0) && outParams.docs.enable); if (!_s0_bNeedToCalcSig && (null == standaloneEventsReturn)) { return returnList; } //TESTED/* www .j a va 2 s . c o m*/ else if (!_s0_bNeedToCalcSig) { // (ie and want standaloneEventsReturn) if (scoreParams.sigWeight > 0.0) { // (reverse the call, we want sig for the standalone events) _s0_bNeedToCalcSig = true; nToClientLimit = 0; // (ensure no docs get accidentally output) } } //TESTED // Various configuration and state variables // Entity aggregation code: _s0_nNumEntsReturn = 0; if (null != lowAccuracyAggregatedEnts) { _s0_nNumEntsReturn = outParams.aggregation.entsNumReturn; } _s1_entitiesInDataset = new HashMap<String, EntSigHolder>(); _s1_noEntityBuckets = new ArrayList<TempDocBucket>(); // (User output options) _s0_bNonGeoEnts = true; _s0_bGeoEnts = true; _s0_bEvents = true; _s0_bFacts = true; _s0_bSummaries = true; _s0_bMetadata = true; if (null != outParams.docs) { if ((null != outParams.docs.metadata) && !outParams.docs.metadata) { _s0_bMetadata = false; } if ((null != outParams.docs.ents) && !outParams.docs.ents) { _s0_bNonGeoEnts = false; _s0_bGeoEnts = false; // (but can be overridden below) } if ((null != outParams.docs.geo) && !outParams.docs.geo) { _s0_bGeoEnts = false; } else if ((null != outParams.docs.geo) && outParams.docs.geo) { _s0_bGeoEnts = true; } if ((null != outParams.docs.events) && !outParams.docs.events) { _s0_bEvents = false; } if ((null != outParams.docs.facts) && !outParams.docs.facts) { _s0_bFacts = false; } if ((null != outParams.docs.summaries) && !outParams.docs.summaries) { _s0_bSummaries = false; } } //TESTED if (null != entityTypeFilterStrings) { if ('-' == entityTypeFilterStrings[0].charAt(0)) { _s0_bEntityTypeFilterPositive = false; } //TESTED (in both entities and associations) _s0_entityTypeFilter = new HashSet<String>(); for (String entityType : entityTypeFilterStrings) { if (!_s0_bEntityTypeFilterPositive && ('-' == entityType.charAt(0))) { entityType = entityType.substring(1); } _s0_entityTypeFilter.add(entityType.toLowerCase()); } } if (_s0_bEvents || _s0_bFacts || _s0_bSummaries || (null != standaloneEventsReturn)) { // (ie most of the time!) if (null != assocVerbFilterStrings) { if ('-' == assocVerbFilterStrings[0].charAt(0)) { _s0_bAssocVerbFilterPositive = false; } //TESTED _s0_assocVerbFilter = new HashSet<String>(); for (String assocVerb : assocVerbFilterStrings) { if (!_s0_bAssocVerbFilterPositive && ('-' == assocVerb.charAt(0))) { assocVerb = assocVerb.substring(1); } _s0_assocVerbFilter.add(assocVerb); } } } //TESTED if ((scoreParams.relWeight == 0.0) && (scoreParams.sigWeight == 0.0)) { _s0_sortingByDate = true; } // First loop: just count and store if ((null != standaloneEventsReturn) && (null != outParams.docs) && (null != outParams.docs.numEventsTimelineReturn) && (outParams.docs.numEventsTimelineReturn > 0)) { _s0_standaloneEventAggregator = new StandaloneEventHashAggregator(standaloneEventsReturn, false, _s1_aliasLookup); } if ((null != lowAccuracyAggregatedEvents) && (null != outParams.aggregation) && (null != outParams.aggregation.eventsNumReturn) && (outParams.aggregation.eventsNumReturn > 0)) { _s0_lowAccuracyAssociationAggregator_events = new StandaloneEventHashAggregator( lowAccuracyAggregatedEvents, true, _s1_aliasLookup); } if ((null != lowAccuracyAggregatedFacts) && (null != outParams.aggregation) && (null != outParams.aggregation.factsNumReturn) && (outParams.aggregation.factsNumReturn > 0)) { _s0_lowAccuracyAssociationAggregator_facts = new StandaloneEventHashAggregator( lowAccuracyAggregatedFacts, true, _s1_aliasLookup); } if ((null != lowAccuracyAggregatedGeo) && (null != outParams.aggregation) && (null != outParams.aggregation.geoNumReturn) && (outParams.aggregation.geoNumReturn > 0)) { // Initialize the buckets _s3_geoBuckets = (LinkedList<EntSigHolder>[]) new LinkedList[_s3_nGEO_BUCKETS]; _s3_bLowAccuracyGeo = true; } if ((null != extraAliasAggregatedGeo) && (null != outParams.aggregation) && (null != outParams.aggregation.geoNumReturn) && (outParams.aggregation.geoNumReturn > 0)) { _s3_bExtraAliasGeo = true; // (don't initialize _s3_geoBuckets until we have to) } if (bLowAccuracyDecay) { _s1_dManualGeoDecay_latLonInvdecay = QueryHandler.parseGeoDecay(scoreParams); } //TESTED _s0_nQuerySubsetDocCount = docs.size(); // eg (1000 docus, user limit) _s0_nQuerySetDocCount = scores.found; // however many were actually found //lookup the totaldoc count _s0_globalDocCount = 0; long nGlobalDocCount = 0; try { nGlobalDocCount = getDocCount(_s0_multiCommunityHandler.getCommunityIds()); } catch (Exception e) { // If an exception occurs log the error logger.error("Exception Message: " + e.getMessage(), e); } // (End doccount) if (_s0_nQuerySetDocCount > nGlobalDocCount) { nGlobalDocCount = _s0_nQuerySetDocCount; // (This can happen if the source doc counts get out of sync... // ... conversely if the index/db get out of sync, the other way round can be correct, but this way is safer) } _s0_globalDocCount = (double) nGlobalDocCount; stage1_initialCountingLoop(docs, scoreParams, (int) nToClientLimit, scores, standaloneEventsReturn, communityIds.length); //Exit if not generating documents or entity aggregations: if (!_s0_bNeedToCalcSig) { return returnList; } //TESTED // Histogram time: this.stage2_generateFreqHistogramCalcIDFs(); // Next stop: loop over the entities and calculate the IDF terms this.stage3_calculateTFTerms(scoreParams, scores, nStart + nToClientLimit); // (get extra docs to handle deduplication) // Finally, write all the information to the surviving 100 (or whatever) documents // Handle skipping past the end: if ((nStart + nToClientLimit) > _s3_pqDocs.size()) { nToClientLimit = _s3_pqDocs.size() - nStart; if (nToClientLimit < 0) { nToClientLimit = 0; } } this.stage4_prepareDocsForOutput(scoreParams, scores, nToClientLimit, returnList); // And then same for entities this.stage4_prepareEntsForOutput(lowAccuracyAggregatedEnts); //Association is mostly done on the fly, but a final tidy up: if (null != standaloneEventsReturn) { ScoringUtils_Associations.finalizeStandaloneEvents(standaloneEventsReturn, _s0_standaloneEventAggregator, outParams.docs.numEventsTimelineReturn); } if (null != _s0_lowAccuracyAssociationAggregator_events) { ScoringUtils_Associations.finalizeStandaloneEvents(lowAccuracyAggregatedEvents, _s0_lowAccuracyAssociationAggregator_events, outParams.aggregation.eventsNumReturn); } if (null != _s0_lowAccuracyAssociationAggregator_facts) { ScoringUtils_Associations.finalizeStandaloneEvents(lowAccuracyAggregatedFacts, _s0_lowAccuracyAssociationAggregator_facts, outParams.aggregation.factsNumReturn); } // Geo is mostly done on the fly, but a final tidy up: if (null != lowAccuracyAggregatedGeo) { finalizeLowAccuracyGeoAggregation(lowAccuracyAggregatedGeo, outParams.aggregation.geoNumReturn); // (outParams.aggregation.geoNumReturn must exist if (null != lowAccuracyAggregatedGeo)) } else if ((null != extraAliasAggregatedGeo) && (null != _s3_geoBuckets)) { finalizeLowAccuracyGeoAggregation(extraAliasAggregatedGeo, Long.MAX_VALUE); //(at most 1 per alias so size shouldn't be an issue) } return returnList; }
From source file:com.ikanow.infinit.e.core.utils.SourceUtils.java
License:Open Source License
private static int pruneSource(SourcePojo source, int nToPrune, int ttl_days) { int nTotalDocsDeleted = 0; int nDocsDeleted = 0; // (code taken mostly from SourceHandler.deleteSource) if (null != source.getKey()) { // or may delete everything! BasicDBObject docQuery = new BasicDBObject(DocumentPojo.sourceKey_, source.getDistributedKeyQueryTerm()); if (ttl_days > 0) { Date ageOut = new Date(new Date().getTime() - ttl_days * 24L * 3600L * 1000L); ObjectId oldestAllowedId = new ObjectId(ageOut); docQuery.put(DocumentPojo._id_, new BasicDBObject(DbManager.lt_, oldestAllowedId)); } //TODO: TOTEST docQuery.put(DocumentPojo.index_, new BasicDBObject(DbManager.ne_, "?DEL?")); // (robustness) BasicDBObject sortField = new BasicDBObject(DocumentPojo._id_, 1); BasicDBObject docFields = new BasicDBObject(); docFields.append(DocumentPojo.url_, 1); docFields.append(DocumentPojo.sourceUrl_, 1); docFields.append(DocumentPojo.index_, 1); docFields.append(DocumentPojo.sourceKey_, 1); StoreAndIndexManager dataStore = new StoreAndIndexManager(); ObjectId nextId = null;/*from w w w .j a v a2s . c o m*/ while (nToPrune > 0) { int nToDelete = nToPrune; if (nToDelete > 10000) { nToDelete = 10000; } if (null != nextId) { docQuery.put(DocumentPojo._id_, new BasicDBObject(DbManager.gt_, nextId)); } //TESTED (by hand) DBCursor dbc = DbManager.getDocument().getMetadata().find(docQuery, docFields).sort(sortField) .limit(nToDelete); // (ie batches of 10K, ascending ordered by _id) nToPrune -= nToDelete; if (0 == nDocsDeleted) { nDocsDeleted = dbc.count(); nTotalDocsDeleted += nDocsDeleted; } if (0 == dbc.size()) { break; } List<DocumentPojo> docs = DocumentPojo.listFromDb(dbc, DocumentPojo.listType()); nextId = dataStore.removeFromDatastore_byURL(docs, source); } } // No need to do anything related to soft deletion, this is all handled when the harvest ends return nTotalDocsDeleted; }