List of usage examples for com.mongodb BasicDBList get
public Object get(final String key)
From source file:org.opencb.opencga.storage.hadoop.variant.VariantHbaseDBAdaptor.java
License:Apache License
public QueryResult getSimpleVariantsByRegion(Region region, String sourceId, QueryOptions options) { Long start, end, dbstart, dbend; start = System.currentTimeMillis(); boolean includeStats; boolean includeEffects; if (!options.containsKey("stats") && !options.containsKey("effects")) { includeStats = true;/*from ww w . j a va 2s . c o m*/ includeEffects = true; } else { includeStats = options.containsKey("stats") && options.getBoolean("stats"); includeEffects = options.containsKey("effects") && options.getBoolean("effects"); } QueryResult<Variant> queryResult = new QueryResult<>( String.format("%s:%d-%d", region.getChromosome(), region.getStart(), region.getEnd())); List<Variant> results = new ArrayList<>(); String startRow = buildRowkey(region.getChromosome(), Long.toString(region.getStart())); String stopRow = buildRowkey(region.getChromosome(), Long.toString(region.getEnd())); BasicDBObject query = new BasicDBObject("position", new BasicDBObject("$gte", startRow).append("$lte", stopRow)).append("sources.sourceId", sourceId); DBCollection collection = db.getCollection("variants"); dbstart = System.currentTimeMillis(); DBCursor variantInStudies = collection.find(query); dbend = System.currentTimeMillis(); queryResult.setDbTime(dbend - dbstart); for (DBObject result : variantInStudies) { String[] rowkeyParts = result.get("position").toString().split("_"); String chromosome = rowkeyParts[0].replaceFirst("^0+(?!$)", ""); int position = Integer.parseInt(rowkeyParts[1]); BasicDBList studies = (BasicDBList) result.get("sources"); BasicDBObject st = (BasicDBObject) studies.get(0); String ref = (String) st.get("ref"); String alt = StringUtils.join((ArrayList<String>) st.get("alt"), ","); // TODO Needs rework Variant variant = new Variant(chromosome, position, position, ref, alt); // Set stats informations if (includeStats) { VariantStats stats = new VariantStats(); BasicDBObject mongoStats = (BasicDBObject) st.get("stats"); stats.setMaf((float) (double) mongoStats.get("maf")); stats.setMafAllele((String) mongoStats.get("alleleMaf")); stats.setMissingGenotypes((int) mongoStats.get("missing")); List<Genotype> genotypeCount = new ArrayList<>(); for (BasicDBObject s : (List<BasicDBObject>) mongoStats.get("genotypeCount")) { for (Map.Entry<String, Object> entry : s.entrySet()) { Genotype genotype = new Genotype(entry.getKey()); genotype.setCount((Integer) entry.getValue()); genotypeCount.add(genotype); } } stats.setGenotypes(genotypeCount); variant.setStats(stats); } // TODO Set consequence type names if (includeEffects) { BasicDBList mongoEffects = (BasicDBList) st.get("effects"); if (mongoEffects != null) { for (Object e : mongoEffects) { String effectObo = e.toString(); VariantEffect effect = new VariantEffect(); effect.setConsequenceTypeObo(effectObo); variant.addEffect(effect); } } } results.add(variant); } queryResult.setResult(results); queryResult.setNumResults(results.size()); end = System.currentTimeMillis(); queryResult.setTime(end - start); return queryResult; }
From source file:org.ossmeter.platform.visualisation.Chart.java
License:Open Source License
protected ArrayNode createDatatable(JsonNode datatableSpec, DBCollection collection, DBObject query) { String rowName = null;//from www.j a va2 s .c om if (datatableSpec.has("rows")) { // TODO: May need more checking here if rows can be more complex rowName = datatableSpec.path("rows").textValue(); rowName = rowName.replace("$", ""); } ArrayNode colNames = (ArrayNode) datatableSpec.path("cols"); ObjectMapper mapper = new ObjectMapper(); ArrayNode results = mapper.createArrayNode(); // Ensure data is sorted correctly DBObject orderBy = new BasicDBObject("__datetime", 1); if (rowName != null) { Iterator<DBObject> it = collection.find(query).sort(orderBy).iterator(); while (it.hasNext()) { DBObject dbobj = it.next(); BasicDBList rows = (BasicDBList) dbobj.get(rowName); Iterator<Object> rowsIt = rows.iterator(); while (rowsIt.hasNext()) { BasicDBObject row = (BasicDBObject) rowsIt.next(); ObjectNode r = mapper.createObjectNode(); boolean validRow = true; for (int i = 0; i < colNames.size(); i++) { JsonNode col = colNames.get(i); String name = col.get("name").asText(); String field = col.get("field").asText(); field = field.replace("$", ""); Object value = null; if (field.equals("__date")) { value = dbobj.get(field); } else { // U.G.L.Y. FIXME if (field.contains("[")) { String[] _ = field.split("\\["); String[] __ = _[1].split("\\]"); int _index = Integer.valueOf(__[0]); String _field = __[1].replace(".", ""); BasicDBList _row = (BasicDBList) row.get(_[0]); BasicDBObject _entry = (BasicDBObject) _row.get(_index); value = _entry.get(_field); } else { value = row.get(field); } } // Fix invalid data: // - If value is null, then we ignore row. // - If value is NaN or Infinity, then we set to 0. if (value == null) { validRow = false; break; } if (value.toString().equals("NaN")) { value = 0; } else if (value.toString().equals("Infinity")) { value = -1; } r.put(name, mapper.valueToTree(value)); } if (validRow) results.add(r); } } } else { Iterator<DBObject> it = collection.find(query).sort(orderBy).iterator(); while (it.hasNext()) { DBObject dbobj = it.next(); ObjectNode r = mapper.createObjectNode(); boolean validRow = true; for (int i = 0; i < colNames.size(); i++) { JsonNode col = colNames.get(i); String name = col.get("name").asText(); String field = col.get("field").asText(); field = field.replace("$", ""); Object value = null; if (field.equals("__date")) { value = dbobj.get(field); } else { // U.G.L.Y. FIXME if (field.contains("[")) { String[] _ = field.split("\\["); String[] __ = _[1].split("\\]"); int _index = Integer.valueOf(__[0]); String _field = __[1].replace(".", ""); BasicDBList _row = (BasicDBList) dbobj.get(_[0]); BasicDBObject _entry = (BasicDBObject) _row.get(_index); value = _entry.get(_field); } else { value = dbobj.get(field); // if (value.toString().equals("NaN")) { // System.out.println(value); // } } } // Object value = null; // value = dbobj.get(field); // mapper.valueToTree(value); // Fix invalid data: // - If value is null, then we ignore row. // - If value is NaN, then we set to 0. if (value == null) { validRow = false; break; } if (value.toString().equals("NaN")) { value = 0; } else if (value.toString().equals("Infinity")) { value = -1; } r.put(name, mapper.valueToTree(value)); } if (validRow) results.add(r); } } return results; }
From source file:org.pentaho.di.trans.steps.mongodboutput.MongoDbOutputData.java
License:Open Source License
/** * Converts a kettle row to a Mongo Object for inserting/updating * * @param fieldDefs the document field definitions * @param inputMeta the incoming row format * @param row the current incoming row * @param vars environment variables * @param topLevelStructure the top level structure of the Mongo document * @param hasTopLevelJSONDocInsert true if the user-specified paths include a single incoming Kettle field value that * contains a JSON document that is to be inserted as is * @return a DBObject encapsulating the document to insert/upsert or null if there are no non-null incoming fields * @throws KettleException if a problem occurs *///w w w . j a v a 2s . com protected static DBObject kettleRowToMongo(List<MongoDbOutputMeta.MongoField> fieldDefs, RowMetaInterface inputMeta, Object[] row, VariableSpace vars, MongoTopLevel topLevelStructure, boolean hasTopLevelJSONDocInsert) throws KettleException { // the easy case if (hasTopLevelJSONDocInsert) { for (MongoDbOutputMeta.MongoField f : fieldDefs) { if (f.m_JSON && Const.isEmpty(f.m_mongoDocPath) && !f.m_useIncomingFieldNameAsMongoFieldName) { String incomingFieldName = vars.environmentSubstitute(f.m_incomingFieldName); int index = inputMeta.indexOfValue(incomingFieldName); ValueMetaInterface vm = inputMeta.getValueMeta(index); if (!vm.isNull(row[index])) { String jsonDoc = vm.getString(row[index]); DBObject docToInsert = (DBObject) JSON.parse(jsonDoc); return docToInsert; } else { return null; } } } } DBObject root = null; if (topLevelStructure == MongoTopLevel.RECORD) { root = new BasicDBObject(); } else if (topLevelStructure == MongoTopLevel.ARRAY) { root = new BasicDBList(); } if (vars == null) { vars = new Variables(); } boolean haveNonNullFields = false; for (MongoDbOutputMeta.MongoField field : fieldDefs) { DBObject current = root; field.reset(); List<String> pathParts = field.m_tempPathList; String incomingFieldName = vars.environmentSubstitute(field.m_incomingFieldName); int index = inputMeta.indexOfValue(incomingFieldName); ValueMetaInterface vm = inputMeta.getValueMeta(index); Object lookup = getPathElementName(pathParts, current, field.m_useIncomingFieldNameAsMongoFieldName); do { // array? if (lookup != null && lookup instanceof Integer) { BasicDBList temp = (BasicDBList) current; if (temp.get(lookup.toString()) == null) { if (pathParts.size() == 0 && !field.m_useIncomingFieldNameAsMongoFieldName) { // leaf - primitive element of the array (unless kettle field // value is JSON) boolean res = setMongoValueFromKettleValue(temp, lookup, vm, row[index], field.m_JSON, field.insertNull); haveNonNullFields = (haveNonNullFields || res); } else { // must be a record here (since multi-dimensional array creation // is handled // in getPathElementName()) // need to create this record/object BasicDBObject newRec = new BasicDBObject(); temp.put(lookup.toString(), newRec); current = newRec; // end of the path? if (pathParts.size() == 0) { if (field.m_useIncomingFieldNameAsMongoFieldName) { boolean res = setMongoValueFromKettleValue(current, incomingFieldName, vm, row[index], field.m_JSON, field.insertNull); haveNonNullFields = (haveNonNullFields || res); } else { throw new KettleException(BaseMessages.getString(PKG, "MongoDbOutput.Messages.Error.NoFieldNameSpecifiedForPath")); //$NON-NLS-1$ } } } } else { // existing element of the array current = (DBObject) temp.get(lookup.toString()); // no more path parts so we must be setting a field in an array // element // that is a record if (pathParts == null || pathParts.size() == 0) { if (current instanceof BasicDBObject) { if (field.m_useIncomingFieldNameAsMongoFieldName) { boolean res = setMongoValueFromKettleValue(current, incomingFieldName, vm, row[index], field.m_JSON, field.insertNull); haveNonNullFields = (haveNonNullFields || res); } else { throw new KettleException(BaseMessages.getString(PKG, "MongoDbOutput.Messages.Error.NoFieldNameSpecifiedForPath")); //$NON-NLS-1$ } } } } } else { // record/object if (lookup == null && pathParts.size() == 0) { if (field.m_useIncomingFieldNameAsMongoFieldName) { boolean res = setMongoValueFromKettleValue(current, incomingFieldName, vm, row[index], field.m_JSON, field.insertNull); haveNonNullFields = (haveNonNullFields || res); } else { throw new KettleException(BaseMessages.getString(PKG, "MongoDbOutput.Messages.Error.NoFieldNameSpecifiedForPath")); //$NON-NLS-1$ } } else { if (pathParts.size() == 0) { if (!field.m_useIncomingFieldNameAsMongoFieldName) { boolean res = setMongoValueFromKettleValue(current, lookup.toString(), vm, row[index], field.m_JSON, field.insertNull); haveNonNullFields = (haveNonNullFields || res); } else { current = (DBObject) current.get(lookup.toString()); boolean res = setMongoValueFromKettleValue(current, incomingFieldName, vm, row[index], field.m_JSON, field.insertNull); haveNonNullFields = (haveNonNullFields || res); } } else { current = (DBObject) current.get(lookup.toString()); } } } lookup = getPathElementName(pathParts, current, field.m_useIncomingFieldNameAsMongoFieldName); } while (lookup != null); } if (!haveNonNullFields) { return null; // nothing has been set! } return root; }
From source file:org.pentaho.mongo.MongoUtils.java
License:Open Source License
protected static void setupAllTags(BasicDBList members, List<String> allTags) { HashSet<String> tempTags = new HashSet<String>(); if (members != null && members.size() > 0) { for (int i = 0; i < members.size(); i++) { Object m = members.get(i); if (m != null) { DBObject tags = (DBObject) ((DBObject) m).get("tags"); //$NON-NLS-1$ if (tags == null) { continue; }/*from w w w. j a v a 2s .c om*/ for (String tagName : tags.keySet()) { String tagVal = tags.get(tagName).toString(); String combined = quote(tagName) + " : " + quote(tagVal); //$NON-NLS-1$ tempTags.add(combined); } } } } for (String s : tempTags) { allTags.add(s); } }
From source file:org.pentaho.mongo.MongoUtils.java
License:Open Source License
protected static void checkForReplicaSetMembersThatSatisfyTagSets(List<DBObject> tagSets, List<DBObject> satisfy, BasicDBList members) { if (members != null && members.size() > 0) { for (int i = 0; i < members.size(); i++) { Object m = members.get(i); if (m != null) { DBObject tags = (DBObject) ((DBObject) m).get("tags"); //$NON-NLS-1$ if (tags == null) { continue; }/*www.ja v a 2s . c om*/ for (int j = 0; j < tagSets.size(); j++) { boolean match = true; DBObject toMatch = tagSets.get(j); for (String tagName : toMatch.keySet()) { String tagValue = toMatch.get(tagName).toString(); // does replica set member m's tags contain this tag? Object matchVal = tags.get(tagName); if (matchVal == null) { match = false; // doesn't match this particular tag set // no need to check any other keys in toMatch break; } if (!matchVal.toString().equals(tagValue)) { // rep set member m's tags has this tag, but it's value does not // match match = false; // no need to check any other keys in toMatch break; } } if (match) { // all tag/values present and match - add this member (only if its // not already there) if (!satisfy.contains(m)) { satisfy.add((DBObject) m); } } } } } } }
From source file:org.pentaho.mongo.MongoUtils.java
License:Open Source License
/** * Connect to mongo and retrieve any replica set members defined in the * local.system.replset collection. Note that this method is not actually * needed for configuring a connection to a replica set or sharded mongo * cluster (mongos) as the driver will determine this automatically. * /*from www .jav a2 s.c om*/ * @param hostsPorts the host(s) and port(s) to use for initiating the * connection * @param singlePort default port to use if none specified in the hostsPorts * string * @param username username to use for authenticating * @param password password to use for authenticating * @param vars environment variables * @param log for logging * @return a list of replica set ServerAddresses * @throws KettleException if a problem occurs */ public static List<ServerAddress> getReplicaSetMembers(String hostsPorts, String singlePort, MongoCredential cred, VariableSpace vars, LogChannelInterface log) throws KettleException { List<ServerAddress> replSetMembers = new ArrayList<ServerAddress>(); if (log != null) { log.logBasic( BaseMessages.getString(PKG, "MongoUtils.Message.QueryingForReplicaSetMembers", hostsPorts)); } BasicDBList members = getRepSetMemberRecords(hostsPorts, singlePort, cred, vars, log); try { if (members != null && members.size() > 0) { for (int i = 0; i < members.size(); i++) { Object m = members.get(i); if (m != null) { String hostPort = ((DBObject) m).get("host").toString(); //$NON-NLS-1$ if (!Const.isEmpty(hostPort)) { String[] parts = hostPort.split(":"); //$NON-NLS-1$ if (parts.length == 2) { ServerAddress address = new ServerAddress(parts[0].trim(), Integer.parseInt(parts[1].trim())); replSetMembers.add(address); } else { ServerAddress address = new ServerAddress(parts[0].trim()); replSetMembers.add(address); } } } } } } catch (Exception ex) { throw new KettleException(ex); } return replSetMembers; }
From source file:org.pentaho.mongo.wrapper.field.MongoArrayExpansion.java
License:Open Source License
public Object[][] convertToKettleValue(BasicDBList mongoList, VariableSpace space) throws KettleException { if (mongoList == null) { return nullResult(); }// w w w.ja va 2 s . c om if (m_tempParts.size() == 0) { throw new KettleException(BaseMessages.getString(PKG, "MongoDbInput.ErrorMessage.MalformedPathArray")); //$NON-NLS-1$ } String part = m_tempParts.remove(0); if (!(part.charAt(0) == '[')) { // we're expecting an array at this point - this document does not // contain our field return nullResult(); } String index = part.substring(1, part.indexOf(']')); if (part.indexOf(']') < part.length() - 1) { // more dimensions to the array part = part.substring(part.indexOf(']') + 1, part.length()); m_tempParts.add(0, part); } if (index.equals("*")) { //$NON-NLS-1$ // start the expansion - we delegate conversion to our subfields Object[][] result = new Object[mongoList.size()][m_outputRowMeta.size() + RowDataUtil.OVER_ALLOCATE_SIZE]; for (int i = 0; i < mongoList.size(); i++) { Object element = mongoList.get(i); for (int j = 0; j < m_subFields.size(); j++) { MongoField sf = m_subFields.get(j); sf.reset(space); // what have we got? if (element instanceof BasicDBObject) { result[i][sf.m_outputIndex] = sf.convertToKettleValue((BasicDBObject) element); } else if (element instanceof BasicDBList) { result[i][sf.m_outputIndex] = sf.convertToKettleValue((BasicDBList) element); } else { // assume a primitive result[i][sf.m_outputIndex] = sf.getKettleValue(element); } } } return result; } else { int arrayI = 0; try { arrayI = Integer.parseInt(index.trim()); } catch (NumberFormatException e) { throw new KettleException( BaseMessages.getString(PKG, "MongoDbInput.ErrorMessage.UnableToParseArrayIndex", index)); //$NON-NLS-1$ } if (arrayI >= mongoList.size() || arrayI < 0) { // index is out of bounds return nullResult(); } Object element = mongoList.get(arrayI); if (element == null) { return nullResult(); } if (element instanceof BasicDBObject) { return convertToKettleValue(((BasicDBObject) element), space); } if (element instanceof BasicDBList) { return convertToKettleValue(((BasicDBList) element), space); } // must mean we have a primitive here, but we're expecting to process // more // path so this doesn't match us - return null return nullResult(); } }
From source file:org.pentaho.mongo.wrapper.field.MongodbInputDiscoverFieldsImpl.java
License:Open Source License
private static void processList(BasicDBList list, String path, String name, Map<String, MongoField> lookup) { if (list.size() == 0) { return; // can't infer anything about an empty list }/* w ww . j av a 2 s .c o m*/ String nonPrimitivePath = path + "[-]"; //$NON-NLS-1$ String primitivePath = path; for (int i = 0; i < list.size(); i++) { Object element = list.get(i); if (element instanceof BasicDBObject) { processRecord((BasicDBObject) element, nonPrimitivePath, name + "[" + i + //$NON-NLS-1$ ":" + i + "]", lookup); //$NON-NLS-1$ //$NON-NLS-2$ } else if (element instanceof BasicDBList) { processList((BasicDBList) element, nonPrimitivePath, name + "[" + i + //$NON-NLS-1$ ":" + i + "]", lookup); //$NON-NLS-1$ //$NON-NLS-2$ } else { // some sort of primitive String finalPath = primitivePath + "[" + i + "]"; //$NON-NLS-1$ //$NON-NLS-2$ String finalName = name + "[" + i + "]"; //$NON-NLS-1$ //$NON-NLS-2$ if (!lookup.containsKey(finalPath)) { MongoField newField = new MongoField(); int kettleType = mongoToKettleType(element); // Following suit of mongoToKettleType by interpreting null as String type newField.m_mongoType = String.class; if (element != null) { newField.m_mongoType = element.getClass(); } newField.m_fieldName = finalPath; newField.m_fieldPath = finalName; newField.m_kettleType = ValueMeta.getTypeDesc(kettleType); newField.m_percentageOfSample = 1; lookup.put(finalPath, newField); } else { // update max indexes in array parts of name MongoField m = lookup.get(finalPath); Class<?> elementClass = String.class; if (element != null) { elementClass = element.getClass(); } if (!m.m_mongoType.isAssignableFrom(elementClass)) { m.m_disparateTypes = true; } m.m_percentageOfSample++; updateMaxArrayIndexes(m, finalName); } } } }
From source file:org.restheart.handlers.metadata.ResponseTranformerMetadataHandler.java
License:Open Source License
@Override void enforceDbRepresentationTransformLogic(HttpServerExchange exchange, RequestContext context) throws InvalidMetadataException { List<RepresentationTransformer> dbRts = RepresentationTransformer.getFromJson(context.getDbProps()); RequestContext.TYPE requestType = context.getType(); // DB or COLLECTION for (RepresentationTransformer rt : dbRts) { if (rt.getPhase() == RepresentationTransformer.PHASE.RESPONSE) { Transformer t = (Transformer) NamedSingletonsFactory.getInstance().get("transformers", rt.getName());/*from ww w . ja v a 2 s . c o m*/ if (t == null) { throw new IllegalArgumentException( "cannot find singleton " + rt.getName() + " in singleton group transformers"); } if (rt.getScope() == RepresentationTransformer.SCOPE.THIS && requestType == RequestContext.TYPE.DB) { t.tranform(exchange, context, context.getResponseContent(), rt.getArgs()); } else if (rt.getScope() == RepresentationTransformer.SCOPE.CHILDREN && requestType == RequestContext.TYPE.COLLECTION) { BasicDBObject _embedded = (BasicDBObject) context.getResponseContent().get("_embedded"); // evaluate the script on children collection BasicDBList colls = (BasicDBList) _embedded.get("rh:coll"); if (colls != null) { for (String k : colls.keySet()) { DBObject coll = (DBObject) colls.get(k); t.tranform(exchange, context, coll, rt.getArgs()); } } } } } }
From source file:org.restheart.handlers.metadata.ResponseTranformerMetadataHandler.java
License:Open Source License
@Override void enforceCollRepresentationTransformLogic(HttpServerExchange exchange, RequestContext context) throws InvalidMetadataException { List<RepresentationTransformer> dbRts = RepresentationTransformer.getFromJson(context.getCollectionProps()); RequestContext.TYPE requestType = context.getType(); // DOCUMENT or COLLECTION for (RepresentationTransformer rt : dbRts) { if (rt.getPhase() == RepresentationTransformer.PHASE.RESPONSE) { Transformer t;//from www. j a v a2 s .com try { t = (Transformer) NamedSingletonsFactory.getInstance().get("transformers", rt.getName()); } catch (IllegalArgumentException ex) { context.addWarning("error applying transformer: " + ex.getMessage()); return; } if (t == null) { throw new IllegalArgumentException( "cannot find singleton " + rt.getName() + " in singleton group transformers"); } if (rt.getScope() == RepresentationTransformer.SCOPE.THIS && requestType == RequestContext.TYPE.COLLECTION) { // evaluate the script on collection t.tranform(exchange, context, context.getResponseContent(), rt.getArgs()); } else if (rt.getScope() == RepresentationTransformer.SCOPE.CHILDREN && requestType == RequestContext.TYPE.COLLECTION) { BasicDBObject _embedded = (BasicDBObject) context.getResponseContent().get("_embedded"); // execute the logic on children documents BasicDBList docs = (BasicDBList) _embedded.get("rh:doc"); if (docs != null) { for (String k : docs.keySet()) { DBObject doc = (DBObject) docs.get(k); t.tranform(exchange, context, doc, rt.getArgs()); } } // execute the logic on children files BasicDBList files = (BasicDBList) _embedded.get("rh:file"); if (files != null) { for (String k : files.keySet()) { DBObject file = (DBObject) files.get(k); t.tranform(exchange, context, file, rt.getArgs()); } } } else if (rt.getScope() == RepresentationTransformer.SCOPE.CHILDREN && requestType == RequestContext.TYPE.DOCUMENT) { t.tranform(exchange, context, context.getResponseContent(), rt.getArgs()); } } } }