List of usage examples for com.mongodb BasicDBObject getBoolean
public boolean getBoolean(final String key, final boolean def)
From source file:com.edgytech.umongo.CollectionPanel.java
License:Apache License
public void settings(ButtonBase button) { FormDialog dialog = (FormDialog) ((MenuItem) getBoundUnit(Item.settings)).getDialog(); BasicDBObject stats = (BasicDBObject) getStats(); boolean pwr2 = stats.getBoolean("userFlags", false); setBooleanFieldValue(Item.usePowerOf2Sizes, pwr2); if (!dialog.show()) { return;// w w w.j ava 2 s. com } boolean newPwr2 = getBooleanFieldValue(Item.usePowerOf2Sizes); if (newPwr2 != pwr2) { BasicDBObject cmd = new BasicDBObject("collMod", getCollectionNode().getCollection().getName()); cmd.put("usePowerOf2Sizes", newPwr2); new DbJobCmd(getCollectionNode().getCollection().getDB(), cmd).addJob(); } }
From source file:com.edgytech.umongo.ReplicaDialog.java
License:Apache License
public void updateFromReplicaConfig(BasicDBObject member) { // reset/*from w ww . j av a 2s. co m*/ xmlLoadCheckpoint(); ((TextField) getBoundUnit(Item.host)).editable = false; setStringFieldValue(Item.host, member.getString("host")); setBooleanFieldValue(Item.arbiterOnly, member.getBoolean("arbiterOnly", false)); setBooleanFieldValue(Item.hidden, member.getBoolean("hidden", false)); setBooleanFieldValue(Item.ignoreIndexes, !member.getBoolean("buildIndexes", true)); if (member.containsField("priority")) { setDoubleFieldValue(Item.priority, member.getDouble("priority")); } if (member.containsField("slaveDelay")) { setIntFieldValue(Item.slaveDelay, member.getInt("slaveDelay")); } if (member.containsField("votes")) { setIntFieldValue(Item.votes, member.getInt("votes")); } if (member.containsField("tags")) { ((DocBuilderField) getBoundUnit(Item.tags)).setDBObject((DBObject) member.get("tags")); } }
From source file:com.epam.dlab.auth.dao.UserInfoDAOMongoImpl.java
License:Apache License
@Override public UserInfo getUserInfoByAccessToken(String accessToken) { BasicDBObject uiSearchDoc = new BasicDBObject(); uiSearchDoc.put("_id", accessToken); MongoCollection<BasicDBObject> mc = ms.getCollection("security", BasicDBObject.class); FindIterable<BasicDBObject> res = mc.find(uiSearchDoc); BasicDBObject uiDoc = res.first(); if (uiDoc == null) { log.warn("UI not found {}", accessToken); return null; }//from w ww .j av a2s . c om Date lastAccess = uiDoc.getDate("expireAt"); if (inactiveUserTimeoutMsec < Math.abs(new Date().getTime() - lastAccess.getTime())) { log.warn("UI for {} expired but were not evicted from DB. Contact MongoDB admin to create expireable " + "index" + " on 'expireAt' key.", accessToken); this.deleteUserInfo(accessToken); return null; } String name = uiDoc.get("name").toString(); String firstName = uiDoc.getString("firstName", ""); String lastName = uiDoc.getString("lastName", ""); String remoteIp = uiDoc.getString("remoteIp", ""); BasicDBList roles = (BasicDBList) uiDoc.get("roles"); Boolean awsUser = uiDoc.getBoolean("awsUser", false); UserInfo ui = new UserInfo(name, accessToken); ui.setFirstName(firstName); ui.setLastName(lastName); ui.setRemoteIp(remoteIp); ui.setAwsUser(awsUser); Object awsKeys = uiDoc.get("awsKeys"); if (awsKeys != null) { ((BasicDBObject) awsKeys).forEach((key, val) -> ui.addKey(key, val.toString())); } roles.forEach(o -> ui.addRole("" + o)); log.debug("Found persistent {}", ui); return ui; }
From source file:com.ikanow.infinit.e.core.utils.SourceUtils.java
License:Open Source License
private static boolean updateHarvestDistributionState_tokenComplete(SourcePojo source, HarvestEnum harvestStatus, BasicDBObject incClause, BasicDBObject setClause) { // Update tokens complete, and retrieve modified version int nTokensToBeCleared = source.getDistributionTokens().size(); BasicDBObject query = new BasicDBObject(SourcePojo._id_, source.getId()); BasicDBObject modify = new BasicDBObject(MongoDbManager.inc_, new BasicDBObject( SourceHarvestStatusPojo.sourceQuery_distributionTokensComplete_, nTokensToBeCleared)); BasicDBObject fields = new BasicDBObject(SourceHarvestStatusPojo.sourceQuery_distributionTokensComplete_, 1);//from w w w .j a va 2s.c o m fields.put(SourceHarvestStatusPojo.sourceQuery_harvest_status_, 1); fields.put(SourceHarvestStatusPojo.sourceQuery_distributionReachedLimit_, 1); BasicDBObject partial = (BasicDBObject) MongoDbManager.getIngest().getSource().findAndModify(query, fields, null, false, modify, true, false); //(return new version - ensures previous increments have been taken into account) // Two cases: source complete (all tokens obtained), source incomplete: if (null != partial) { // (else yikes!) BasicDBObject partialStatus = (BasicDBObject) partial.get(SourcePojo.harvest_); if (null != partialStatus) { // (else yikes!) int nTokensComplete = partialStatus.getInt(SourceHarvestStatusPojo.distributionTokensComplete_, 0); // (note after increment) // COMPLETE: reset parameters, status -> error (if anything has errored), success (all done), success_iteration (more to do) if (nTokensComplete == source.getDistributionFactor()) { if (!source.reachedMaxDocs()) { // (Can only do this if we've finished the source... //...else the different threads can be at different points, so the most recent doc for one thread might be // before the most recent doc of another) setClause.put(SourceHarvestStatusPojo.sourceQuery_distributedLastCompletedCycle_, new Date()); } setClause.put(SourceHarvestStatusPojo.sourceQuery_distributionTokensComplete_, 0); setClause.put(SourceHarvestStatusPojo.sourceQuery_distributionTokensFree_, source.getDistributionFactor()); setClause.put(SourceHarvestStatusPojo.sourceQuery_distributionReachedLimit_, false); // (resetting this) // This source is now complete String status = partialStatus.getString(SourceHarvestStatusPojo.harvest_status_, null); Boolean reachedLimit = partialStatus.getBoolean( SourceHarvestStatusPojo.distributionReachedLimit_, false) || source.reachedMaxDocs(); if ((null != status) && ((status.equalsIgnoreCase(HarvestEnum.error.toString()) || (HarvestEnum.error == harvestStatus)))) { setClause.put(SourceHarvestStatusPojo.sourceQuery_harvest_status_, HarvestEnum.error.toString()); } //TESTED (current and previous state == error) else if (reachedLimit || (HarvestEnum.success_iteration == harvestStatus)) { setClause.put(SourceHarvestStatusPojo.sourceQuery_harvest_status_, HarvestEnum.success_iteration.toString()); } //TESTED (from previous or current state) // (else leave with default of success) //DEBUG //System.out.println(Thread.currentThread().getName() + " COMPLETE_SRC COMPLETE_TOKEN=" + source.getKey() + " / " + setClause.toString() + " / " + incClause.toString() + " / " + nTokensComplete); return true; } //TESTED else { // Not complete // If we're here then we're only allowed to update the status to error if (HarvestEnum.error != harvestStatus) { setClause.remove(SourceHarvestStatusPojo.sourceQuery_harvest_status_); } //TESTED if (source.reachedMaxDocs()) { setClause.put(SourceHarvestStatusPojo.sourceQuery_distributionReachedLimit_, true); } //TESTED //DEBUG //System.out.println(Thread.currentThread().getName() + " COMPLETE_TOKEN=" + source.getKey() + " / " + setClause.toString() + " / " + incClause.toString() + " / " + nTokensComplete); return false; } //(end is complete or not) //TESTED (reached max limit) } //(end found partial source status, else catastrophic failure) } //(end found partial source, else catastrophic failure) return false; }
From source file:com.ikanow.infinit.e.processing.generic.GenericProcessingController.java
License:Open Source License
public void InitializeIndex(boolean bDeleteDocs, boolean bDeleteEntityFeature, boolean bDeleteEventFeature, boolean bRebuildDocsIndex) { try { //create elasticsearch indexes if (!ElasticSearchManager.pingIndex(null, null)) { throw new RuntimeException("Index is red, disable indexing operations"); } //TESTED PropertiesManager pm = new PropertiesManager(); if (!pm.getAggregationDisabled()) { boolean languageNormalization = pm.getNormalizeEncoding(); Builder localSettingsEvent = ImmutableSettings.settingsBuilder(); localSettingsEvent.put("number_of_shards", 10).put("number_of_replicas", 2); localSettingsEvent.put("index.analysis.analyzer.suggestAnalyzer.tokenizer", "standard"); if (languageNormalization) { localSettingsEvent.putArray("index.analysis.analyzer.suggestAnalyzer.filter", "icu_normalizer", "icu_folding", "standard", "lowercase"); } else { localSettingsEvent.putArray("index.analysis.analyzer.suggestAnalyzer.filter", "standard", "lowercase"); }/*from w ww . jav a2 s. c om*/ Builder localSettingsGaz = ImmutableSettings.settingsBuilder(); localSettingsGaz.put("number_of_shards", 10).put("number_of_replicas", 2); localSettingsGaz.put("index.analysis.analyzer.suggestAnalyzer.tokenizer", "standard"); if (languageNormalization) { localSettingsGaz.putArray("index.analysis.analyzer.suggestAnalyzer.filter", "icu_normalizer", "icu_folding", "standard", "lowercase"); } else { localSettingsGaz.putArray("index.analysis.analyzer.suggestAnalyzer.filter", "standard", "lowercase"); } //event feature String eventGazMapping = new Gson().toJson(new AssociationFeaturePojoIndexMap.Mapping(), AssociationFeaturePojoIndexMap.Mapping.class); ElasticSearchManager eventIndex = IndexManager.createIndex( AssociationFeaturePojoIndexMap.indexName_, null, false, null, eventGazMapping, localSettingsEvent); if (null == eventIndex) { // (if has been previously referenced in this process space) eventIndex = IndexManager.getIndex(AssociationFeaturePojoIndexMap.indexName_); } eventIndex.createAlias(AssociationFeaturePojoIndexMap.indexCollectionName_); if (bDeleteEventFeature) { eventIndex.deleteMe(); eventIndex = IndexManager.createIndex(AssociationFeaturePojoIndexMap.indexName_, null, false, null, eventGazMapping, localSettingsEvent); } //entity feature String gazMapping = new Gson().toJson(new EntityFeaturePojoIndexMap.Mapping(), EntityFeaturePojoIndexMap.Mapping.class); ElasticSearchManager entityIndex = IndexManager.createIndex(EntityFeaturePojoIndexMap.indexName_, null, false, null, gazMapping, localSettingsGaz); if (null == entityIndex) { // (if has been previously referenced in this process space) entityIndex = IndexManager.getIndex(EntityFeaturePojoIndexMap.indexName_); } entityIndex.createAlias(EntityFeaturePojoIndexMap.indexCollectionName_); if (bDeleteEntityFeature) { entityIndex.deleteMe(); entityIndex = IndexManager.createIndex(EntityFeaturePojoIndexMap.indexName_, null, false, null, gazMapping, localSettingsGaz); } } //DOCS - much more complicated than anything else boolean bPingMainIndexFailed = !ElasticSearchManager .pingIndex(DocumentPojoIndexMap.globalDocumentIndex_); // (ie if main doc index doesn't exist then always rebuild all indexes) if (bPingMainIndexFailed) { // extra level of robustness... sleep for a minute then double check the index is really missing... try { Thread.sleep(60000); } catch (Exception e) { } bPingMainIndexFailed = !ElasticSearchManager.pingIndex(DocumentPojoIndexMap.globalDocumentIndex_); } bRebuildDocsIndex |= bPingMainIndexFailed; // check the main index has the "collection" alias - if not then rebuild everything if (!bPingMainIndexFailed && (null == _aliasInfo)) { ElasticSearchManager docIndex = ElasticSearchManager .getIndex(DocumentPojoIndexMap.globalDocumentIndex_); ClusterStateResponse clusterState = docIndex.getRawClient().admin().cluster() .state(new ClusterStateRequest()).actionGet(); _aliasInfo = CrossVersionImmutableMapOfImmutableMaps .getAliases(clusterState.getState().getMetaData()); if (!_aliasInfo.containsKey(DocumentPojoIndexMap.globalDocumentIndexCollection_)) { bRebuildDocsIndex = true; } } //TESTED createCommunityDocIndex(DocumentPojoIndexMap.globalDocumentIndex_, null, false, true, bDeleteDocs); createCommunityDocIndex(DocumentPojoIndexMap.manyGeoDocumentIndex_, null, false, false, bDeleteDocs); // Some hardwired dummy communities createCommunityDocIndex("4e3706c48d26852237078005", null, true, false, bDeleteDocs); // (admin) createCommunityDocIndex("4e3706c48d26852237079004", null, true, false, bDeleteDocs); // (test user) // (create dummy index used to keep personal group aliases) if (bRebuildDocsIndex || bDeleteDocs) { // OK, going to have different shards for different communities: // Get a list of all the communities: BasicDBObject query = new BasicDBObject(); BasicDBObject fieldsToDrop = new BasicDBObject("members", 0); fieldsToDrop.put("communityAttributes", 0); fieldsToDrop.put("userAttributes", 0); DBCursor dbc = DbManager.getSocial().getCommunity().find(query, fieldsToDrop); List<DBObject> tmparray = dbc.toArray(); // (brings the entire thing into memory so don't get cursor timeouts) int i = 0; System.out.println("Initializing " + dbc.size() + " indexes:"); for (int j = 0; j < 2; ++j) { for (DBObject dbotmp : tmparray) { if ((++i % 100) == 0) { System.out.println("Initialized " + i + " indexes."); } BasicDBObject dbo = (BasicDBObject) dbotmp; // OK, going to see if there are any sources with this group id, create a new index if so: // (Don't use CommunityPojo data model here for performance reasons.... // (Also, haven't gotten round to porting CommunityPojo field access to using static fields)) ObjectId communityId = (ObjectId) dbo.get("_id"); boolean bPersonalGroup = dbo.getBoolean("isPersonalCommunity", false); boolean bSystemGroup = dbo.getBoolean("isSystemCommunity", false); ObjectId parentCommunityId = (ObjectId) dbo.get("parentId"); createCommunityDocIndex(communityId.toString(), parentCommunityId, bPersonalGroup, bSystemGroup, bDeleteDocs, j == 0); } //end loop over communities } // end loop over communities - first time parents only } // (end if need to do big loop over all sources) } catch (Exception e) { //DEBUG //e.printStackTrace(); throw new RuntimeException(e.getMessage()); } }
From source file:org.canedata.provider.mongodb.entity.MongoEntity.java
License:Apache License
/** * Finds the first document in the query and updates it. * @see com.mongodb.DBCollection#findAndModify(com.mongodb.DBObject, com.mongodb.DBObject, com.mongodb.DBObject, boolean, com.mongodb.DBObject, boolean, boolean) * @param expr/*from www . j a v a 2 s . c o m*/ * @return */ public Fields findAndUpdate(Expression expr) { if (logger.isDebug()) logger.debug("Finding and updating entity, Database is {0}, Collection is {1} ...", getSchema(), getName()); BasicDBObject options = new BasicDBObject(); try { validateState(); final BasicDBObject fields = new BasicDBObject(); MongoExpressionFactory expFactory = new MongoExpressionFactory.Impl(); BasicDBObject projection = new BasicDBObject(); Limiter limiter = new Limiter.Default(); BasicDBObject sorter = new BasicDBObject(); IntentParser.parse(getIntent(), expFactory, fields, projection, limiter, sorter, options); BasicDBObject query = expFactory.parse((MongoExpression) expr); if (logger.isDebug()) logger.debug( "Finding and updating entity, Database is {0}, Collection is {1}, expression is {2}, " + "Projections is {3}, Update is {4}, Sorter is {5}, Options is {6} ...", getSchema(), getName(), query.toString(), projection.toString(), fields.toString(), sorter.toString(), JSON.serialize(options)); DBObject rlt = getCollection().findAndModify(query, projection, sorter, options.getBoolean(Options.FIND_AND_REMOVE, false), fields, options.getBoolean(Options.RETURN_NEW, false), options.getBoolean(Options.UPSERT, false)); if (rlt == null || rlt.keySet().isEmpty()) return null; // alive cache if (null != getCache()) { invalidateCache(query); } return new MongoFields(this, getIntent(), (BasicDBObject) rlt).project(projection.keySet()); } catch (AnalyzeBehaviourException abe) { if (logger.isDebug()) logger.debug(abe, "Analyzing behaviour failure, cause by: {0}.", abe.getMessage()); throw new RuntimeException(abe); } finally { if (!options.getBoolean(Options.RETAIN, false)) getIntent().reset(); } }
From source file:org.canedata.provider.mongodb.entity.MongoEntity.java
License:Apache License
public int update(Serializable... keys) { if (logger.isDebug()) logger.debug("Updating entitiy, Database is {0}, Collection is {1}, keys is {2}.", getSchema(), getName(), Arrays.toString(keys)); try {/*from w ww . jav a2 s . c o m*/ if (keys == null || keys.length == 0) return 0; validateState(); final BasicDBObject fields = new BasicDBObject(); final BasicDBObject othersFields = new BasicDBObject(); final BasicDBObject options = new BasicDBObject(); getIntent().playback(new Tracer() { public Tracer trace(Step step) throws AnalyzeBehaviourException { switch (step.step()) { case MongoStep.PUT: if (logger.isDebug()) logger.debug("Analyzing behivor PUT, step is {0}, purpose is {1}, scalar is {2}.", step.step(), step.getPurpose(), Arrays.toString(step.getScalar())); if (StringUtils.isBlank(step.getPurpose())) break; Object val = (step.getScalar() == null || step.getScalar().length == 0) ? null : step.getScalar()[0]; if (step.getPurpose().matches(internalCmds)) othersFields.append(step.getPurpose(), val); else fields.append(step.getPurpose(), val); break; case MongoStep.OPTION: options.append(step.getPurpose(), step.getScalar()[0]); break; default: logger.warn("Step {0} does not apply to activities create, this step will be ignored.", step.step()); } return this; } }); BasicDBObject fs = new BasicDBObject(); if (!fields.isEmpty()) fs.put("$set", fields); if (!othersFields.isEmpty()) fs.putAll(othersFields.toMap()); if (fs.isEmpty()) return 0; WriteResult wr = getCollection().update(new BasicDBObject().append("_id", keys[0]), fs, options.getBoolean(Options.UPSERT, false), false, getCollection().getWriteConcern()); if (!StringUtils.isBlank(wr.getError())) throw new DataAccessException(wr.getError()); // invalidate cache if (null != getCache()) { String cacheKey = getKey().concat("#").concat(keys[0].toString()); getCache().remove(cacheKey); if (logger.isDebug()) logger.debug("Invalidated cache key is {0}.", keys[0].toString()); } return wr.getN(); } catch (AnalyzeBehaviourException abe) { if (logger.isDebug()) logger.debug(abe, "Analyzing behaviour failure, cause by: {0}.", abe.getMessage()); throw new RuntimeException(abe); } finally { getIntent().reset(); } }
From source file:org.canedata.provider.mongodb.entity.MongoEntity.java
License:Apache License
public int updateRange(Expression expr) { if (logger.isDebug()) logger.debug("Updating entities, Database is {0}, Collection is {1} ...", getSchema(), getName()); try {//from w w w.j a v a 2 s . c o m validateState(); final BasicDBObject fields = new BasicDBObject(); final BasicDBObject othersFields = new BasicDBObject(); final BasicDBObject options = new BasicDBObject(); getIntent().playback(new Tracer() { public Tracer trace(Step step) throws AnalyzeBehaviourException { switch (step.step()) { case MongoStep.PUT: if (StringUtils.isBlank(step.getPurpose())) break; Object val = (step.getScalar() == null || step.getScalar().length == 0) ? null : step.getScalar()[0]; if (step.getPurpose().matches(internalCmds)) othersFields.append(step.getPurpose(), val); else fields.append(step.getPurpose(), val); break; case MongoStep.OPTION: options.append(step.getPurpose(), step.getScalar()[0]); break; default: logger.warn("Step {0} does not apply to activities create, this step will be ignored.", step.step()); } return this; } }); final MongoExpressionFactory expFactory = new MongoExpressionFactory.Impl(); BasicDBObject query = expFactory.parse((MongoExpression) expr); if (logger.isDebug()) logger.debug("Updating entities, Database is {0}, Collection is {1}, expression is {2} ...", getSchema(), getName(), query.toString()); BasicDBObject fs = new BasicDBObject(); if (!fields.isEmpty()) fs.append("$set", fields); if (!othersFields.isEmpty()) fs.putAll(othersFields.toMap()); if (fs.isEmpty()) return 0; WriteResult wr = getCollection().update(query, fs, options.getBoolean(Options.UPSERT, false), true, getCollection().getWriteConcern()); if (!StringUtils.isBlank(wr.getError())) throw new DataAccessException(wr.getError()); // invalidate cache if (null != getCache()) { invalidateCache(query); } return wr.getN(); } catch (AnalyzeBehaviourException abe) { if (logger.isDebug()) logger.debug(abe, "Analyzing behaviour failure, cause by: {0}.", abe.getMessage()); throw new RuntimeException(abe); } finally { getIntent().reset(); } }
From source file:org.keycloak.connections.mongo.updater.impl.updates.Update1_7_0.java
License:Apache License
@Override public void update(KeycloakSession session) throws ClassNotFoundException { DBCollection clients = db.getCollection("clients"); DBCursor clientsCursor = clients.find(); try {// w w w . j a va 2 s. c om while (clientsCursor.hasNext()) { BasicDBObject client = (BasicDBObject) clientsCursor.next(); boolean directGrantsOnly = client.getBoolean("directGrantsOnly", false); client.append("standardFlowEnabled", !directGrantsOnly); client.append("implicitFlowEnabled", false); client.append("directAccessGrantsEnabled", directGrantsOnly); client.removeField("directGrantsOnly"); clients.save(client); } } finally { clientsCursor.close(); } }
From source file:tango.parameter.ConditionalParameter.java
License:Open Source License
@Override public void dbGet(BasicDBObject dbo) { Object o = dbo.get(id);//from w w w .j a v a 2 s.com if (!(o instanceof BasicDBObject)) return; BasicDBObject subDBO = (BasicDBObject) dbo.get(id); if (subDBO != null) { actionnableParameter.getParameter().dbGet(subDBO); toggleVisibility(!subDBO.getBoolean("isCollapsed", false)); } if (currentParameters != null) for (Parameter p : currentParameters) p.removeFromContainer(mainBox); currentParameters = getCurrentParameters(); if (currentParameters != null) for (Parameter p : currentParameters) { if (subDBO != null) p.dbGet(subDBO); p.addToContainer(mainBox); } }