List of usage examples for com.fasterxml.jackson.databind JsonNode elements
public Iterator<JsonNode> elements()
From source file:com.amazonaws.services.kinesis.aggregators.configuration.ExternalConfigurationModel.java
private static void addTimeHorizons(JsonNode document, ExternalConfigurationModel model) throws Exception { JsonNode node = StreamAggregatorUtils.readJsonValue(document, "timeHorizons"); if (node != null) { Iterator<JsonNode> timeHorizonValues = node.elements(); while (timeHorizonValues.hasNext()) { String t = timeHorizonValues.next().asText(); String timeHorizonName = null; int granularity = -1; boolean utc = false; // process UTC-shifting time horizons if (t.contains("-UTC")) { t = String.join("", t.split("-UTC")); utc = true;/*from w w w . j a v a 2s.com*/ } // process parameterised time horizons if (t.contains("MINUTES_GROUPED")) { String[] items = t.split("\\("); timeHorizonName = items[0]; granularity = Integer.parseInt(items[1].replaceAll("\\)", "")); } else { timeHorizonName = t; } try { TimeHorizon th = TimeHorizon.valueOf(timeHorizonName); th.setUTC(utc); if (th.equals(TimeHorizon.MINUTES_GROUPED) && granularity == -1) { throw new InvalidConfigurationException( "Unable to create Grouped Minutes Time Horizon without configuration of Granularity using notation MINUTES_GROUPED(<granularity in minutes>)"); } else { if (th.equals(TimeHorizon.MINUTES_GROUPED)) { th.setGranularity(granularity); } } model.addTimeHorizon(th); } catch (Exception e) { throw new Exception(String.format("Unable to configure Time Horizon %s", t), e); } } } }
From source file:com.ning.metrics.action.hdfs.data.parser.SmileRowSerializer.java
public static void eventToRow(Registrar r, SmileEnvelopeEventDeserializer deserializer, Rows rows) { final SmileEnvelopeEvent event; try {//w w w . ja v a 2s . c o m event = deserializer.getNextEvent(); } catch (IOException e) { throw new RowAccessException(e); } final JsonNode node = (JsonNode) event.getData(); final Map<Short, GoodwillSchemaField> schema = r.getSchema(event.getName()); final List<ColumnKey> columnKeyList = new ArrayList<ColumnKey>(node.size()); final List<JsonNodeComparable> data = new ArrayList<JsonNodeComparable>(node.size()); // Without Goodwill integration, simply pass the raw json if (schema == null) { final Iterator<String> nodeFieldNames = node.fieldNames(); while (nodeFieldNames.hasNext()) { columnKeyList.add(new DynamicColumnKey(nodeFieldNames.next())); } final Iterator<JsonNode> nodeElements = node.elements(); while (nodeElements.hasNext()) { JsonNode next = nodeElements.next(); if (next == null) { next = NullNode.getInstance(); } data.add(new JsonNodeComparable(next)); } } else { // With Goodwill, select only the fields present in the Goodwill schema, and preserve ordering for (final GoodwillSchemaField schemaField : schema.values()) { final String schemaFieldName = schemaField.getName(); columnKeyList.add(new DynamicColumnKey(schemaFieldName)); JsonNode delegate = node.get(schemaFieldName); if (delegate == null) { delegate = NullNode.getInstance(); } data.add(new JsonNodeComparable(delegate)); } } rows.add(RowFactory.getRow(new RowSchema(event.getName(), columnKeyList), data)); }
From source file:com.amazonaws.services.kinesis.aggregators.configuration.ExternalConfigurationModel.java
public static List<ExternalConfigurationModel> buildFromConfig(String configFilePath) throws Exception { List<ExternalConfigurationModel> response = new ArrayList<>(); // reference the config file as a full path File configFile = new File(configFilePath); if (!configFile.exists()) { // try to load the file from the classpath InputStream classpathConfig = ExternalConfigurationModel.class.getClassLoader() .getResourceAsStream(configFilePath); if (classpathConfig != null && classpathConfig.available() > 0) { configFile = new File(ExternalConfigurationModel.class .getResource((configFilePath.startsWith("/") ? "" : "/") + configFilePath).toURI()); LOG.info(String.format("Loaded Configuration %s from Classpath", configFilePath)); } else {// w ww.j av a2 s .co m if (configFilePath.startsWith("s3://")) { AmazonS3 s3Client = new AmazonS3Client(new DefaultAWSCredentialsProviderChain()); TransferManager tm = new TransferManager(s3Client); // parse the config path to get the bucket name and prefix final String s3ProtoRegex = "s3:\\/\\/"; String bucket = configFilePath.replaceAll(s3ProtoRegex, "").split("/")[0]; String prefix = configFilePath.replaceAll(String.format("%s%s\\/", s3ProtoRegex, bucket), ""); // download the file using TransferManager configFile = File.createTempFile(configFilePath, null); Download download = tm.download(bucket, prefix, configFile); download.waitForCompletion(); // shut down the transfer manager tm.shutdownNow(); LOG.info(String.format("Loaded Configuration from Amazon S3 %s/%s to %s", bucket, prefix, configFile.getAbsolutePath())); } else { // load the file from external URL try { configFile = File.createTempFile(configFilePath, null); FileUtils.copyURLToFile(new URL(configFilePath), configFile, 1000, 1000); LOG.info(String.format("Loaded Configuration from %s to %s", configFilePath, configFile.getAbsolutePath())); } catch (IOException e) { // handle the timeouts and so on with a generalised // config // file not found handler later } } } } else { LOG.info(String.format("Loaded Configuration from Filesystem %s", configFilePath)); } // if we haven't been able to load a config file, then bail if (configFile == null || !configFile.exists()) { throw new InvalidConfigurationException( String.format("Unable to Load Config File from %s", configFilePath)); } JsonNode document = StreamAggregatorUtils.asJsonNode(configFile); ExternalConfigurationModel config = null; Iterator<JsonNode> i = document.elements(); while (i.hasNext()) { config = new ExternalConfigurationModel(); JsonNode section = i.next(); // set generic properties config.setNamespace(StreamAggregatorUtils.readValueAsString(section, "namespace")); config.setDateFormat(StreamAggregatorUtils.readValueAsString(section, "dateFormat")); addTimeHorizons(section, config); setAggregatorType(section, config); // set the label items JsonNode labelItems = StreamAggregatorUtils.readJsonValue(section, "labelItems"); if (labelItems != null && labelItems.size() > 0) { Iterator<JsonNode> iterator = labelItems.elements(); while (iterator.hasNext()) { JsonNode n = iterator.next(); config.addLabelItems(n.asText()); } } config.setLabelAttributeAlias(StreamAggregatorUtils.readValueAsString(section, "labelAttributeAlias")); config.setDateItem(StreamAggregatorUtils.readValueAsString(section, "dateItem")); config.setDateAttributeAlias(StreamAggregatorUtils.readValueAsString(section, "dateAttributeAlias")); JsonNode summaryItems = StreamAggregatorUtils.readJsonValue(section, "summaryItems"); if (summaryItems != null && summaryItems.size() > 0) { Iterator<JsonNode> iterator = summaryItems.elements(); while (iterator.hasNext()) { JsonNode n = iterator.next(); config.addSummaryItem(n.asText()); } } config.setTableName(StreamAggregatorUtils.readValueAsString(section, "tableName")); String readIO = StreamAggregatorUtils.readValueAsString(section, "readIOPS"); if (readIO != null) config.setReadIOPs(Long.parseLong(readIO)); String writeIO = StreamAggregatorUtils.readValueAsString(section, "writeIOPS"); if (writeIO != null) config.setWriteIOPs(Long.parseLong(writeIO)); // configure tolerance of data extraction problems String failOnDataExtraction = StreamAggregatorUtils.readValueAsString(section, "failOnDataExtraction"); if (failOnDataExtraction != null) config.setFailOnDataExtraction(Boolean.parseBoolean(failOnDataExtraction)); // configure whether metrics should be emitted String emitMetrics = StreamAggregatorUtils.readValueAsString(section, "emitMetrics"); String metricsEmitterClassname = StreamAggregatorUtils.readValueAsString(section, "metricsEmitterClass"); if (emitMetrics != null || metricsEmitterClassname != null) { if (metricsEmitterClassname != null) { config.setMetricsEmitter((Class<IMetricsEmitter>) ClassLoader.getSystemClassLoader() .loadClass(metricsEmitterClassname)); } else { config.setEmitMetrics(Boolean.parseBoolean(emitMetrics)); } } // configure the data store class String dataStoreClass = StreamAggregatorUtils.readValueAsString(section, "IDataStore"); if (dataStoreClass != null) { Class<IDataStore> dataStore = (Class<IDataStore>) ClassLoader.getSystemClassLoader() .loadClass(dataStoreClass); config.setDataStore(dataStore); } // get the data extractor configuration, so we know what other json // elements to retrieve from the configuration document String useExtractor = null; try { useExtractor = StreamAggregatorUtils.readValueAsString(section, "dataExtractor"); config.setDataExtractor(DataExtractor.valueOf(useExtractor)); } catch (Exception e) { throw new Exception( String.format("Unable to configure aggregator with Data Extractor %s", useExtractor)); } switch (config.getDataExtractor()) { case CSV: configureStringCommon(section, config); configureCsv(section, config); break; case JSON: configureStringCommon(section, config); break; case OBJECT: configureObject(section, config); break; case REGEX: configureRegex(section, config); } response.add(config); } return response; }
From source file:dao.SearchDAO.java
public static JsonNode elasticSearchMetricByKeyword(String category, String keywords, int page, int size) { ObjectNode queryNode = Json.newObject(); queryNode.put("from", (page - 1) * size); queryNode.put("size", size); JsonNode responseNode = null;/* w ww. ja v a2s . c o m*/ ObjectNode keywordNode = null; try { keywordNode = utils.Search.generateElasticSearchQueryString(category, null, keywords); } catch (Exception e) { Logger.error("Elastic search metric input query is not JSON format. Error message :" + e.getMessage()); } if (keywordNode != null) { queryNode.set("query", keywordNode); Promise<WSResponse> responsePromise = WS .url(Play.application().configuration().getString(SearchDAO.ELASTICSEARCH_METRIC_URL_KEY)) .post(queryNode); responseNode = responsePromise.get(1000).asJson(); } ObjectNode resultNode = Json.newObject(); Long count = 0L; List<Metric> pagedMetrics = new ArrayList<>(); resultNode.put("page", page); resultNode.put("category", category); resultNode.put("isMetrics", true); resultNode.put("itemsPerPage", size); resultNode.put("keywords", keywords); if (responseNode != null && responseNode.isContainerNode() && responseNode.has("hits")) { JsonNode hitsNode = responseNode.get("hits"); if (hitsNode != null) { if (hitsNode.has("total")) { count = hitsNode.get("total").asLong(); } if (hitsNode.has("hits")) { JsonNode dataNode = hitsNode.get("hits"); if (dataNode != null && dataNode.isArray()) { Iterator<JsonNode> arrayIterator = dataNode.elements(); if (arrayIterator != null) { while (arrayIterator.hasNext()) { JsonNode node = arrayIterator.next(); if (node.isContainerNode() && node.has("_id")) { Metric metric = new Metric(); metric.id = node.get("_id").asInt(); if (node.has("_source")) { JsonNode sourceNode = node.get("_source"); if (sourceNode != null) { if (sourceNode.has("metric_name")) { metric.name = sourceNode.get("metric_name").asText(); } if (sourceNode.has("metric_description")) { metric.description = sourceNode.get("metric_description").asText(); } if (sourceNode.has("dashboard_name")) { metric.dashboardName = sourceNode.get("dashboard_name").asText(); } if (sourceNode.has("metric_group")) { metric.group = sourceNode.get("metric_group").asText(); } if (sourceNode.has("metric_category")) { metric.category = sourceNode.get("metric_category").asText(); } if (sourceNode.has("urn")) { metric.urn = sourceNode.get("urn").asText(); } if (sourceNode.has("metric_source")) { metric.source = sourceNode.get("metric_source").asText(); if (StringUtils.isBlank(metric.source)) { metric.source = null; } } metric.schema = sourceNode.toString(); } } pagedMetrics.add(metric); } } } } } } } resultNode.put("count", count); resultNode.put("totalPages", (int) Math.ceil(count / ((double) size))); resultNode.set("data", Json.toJson(pagedMetrics)); return resultNode; }
From source file:dao.SearchDAO.java
public static JsonNode elasticSearchFlowByKeyword(String category, String keywords, int page, int size) { ObjectNode queryNode = Json.newObject(); queryNode.put("from", (page - 1) * size); queryNode.put("size", size); JsonNode searchOpt = null;//from w ww . ja v a 2s . c o m JsonNode responseNode = null; ObjectNode keywordNode = null; try { keywordNode = utils.Search.generateElasticSearchQueryString(category, null, keywords); } catch (Exception e) { Logger.error("Elastic search flow input query is not JSON format. Error message :" + e.getMessage()); } if (keywordNode != null) { queryNode.set("query", keywordNode); Promise<WSResponse> responsePromise = WS .url(Play.application().configuration().getString(SearchDAO.ELASTICSEARCH_FLOW_URL_KEY)) .post(queryNode); responseNode = responsePromise.get(1000).asJson(); } ObjectNode resultNode = Json.newObject(); Long count = 0L; List<FlowJob> pagedFlowJobs = new ArrayList<>(); resultNode.put("page", page); resultNode.put("category", category); resultNode.put("isFlowJob", true); resultNode.put("itemsPerPage", size); resultNode.put("keywords", keywords); if (responseNode != null && responseNode.isContainerNode() && responseNode.has("hits")) { JsonNode hitsNode = responseNode.get("hits"); if (hitsNode != null) { if (hitsNode.has("total")) { count = hitsNode.get("total").asLong(); } if (hitsNode.has("hits")) { JsonNode dataNode = hitsNode.get("hits"); if (dataNode != null && dataNode.isArray()) { Iterator<JsonNode> arrayIterator = dataNode.elements(); if (arrayIterator != null) { while (arrayIterator.hasNext()) { JsonNode node = arrayIterator.next(); if (node.isContainerNode() && node.has("_id")) { FlowJob flowJob = new FlowJob(); if (node.has("_source")) { JsonNode sourceNode = node.get("_source"); if (sourceNode != null) { if (sourceNode.has("app_code")) { flowJob.appCode = sourceNode.get("app_code").asText(); } if (sourceNode.has("app_id")) { flowJob.appId = sourceNode.get("app_id").asInt(); } if (sourceNode.has("flow_id")) { flowJob.flowId = sourceNode.get("flow_id").asLong(); } if (sourceNode.has("flow_name")) { flowJob.flowName = sourceNode.get("flow_name").asText(); flowJob.displayName = flowJob.flowName; } if (sourceNode.has("flow_path")) { flowJob.flowPath = sourceNode.get("flow_path").asText(); } if (sourceNode.has("flow_group")) { flowJob.flowGroup = sourceNode.get("flow_group").asText(); } flowJob.link = "#/flows/name/" + flowJob.appCode + "/" + Long.toString(flowJob.flowId) + "/page/1?urn=" + flowJob.flowGroup; flowJob.path = flowJob.appCode + "/" + flowJob.flowPath; flowJob.schema = sourceNode.toString(); } } pagedFlowJobs.add(flowJob); } } } } } } } resultNode.put("count", count); resultNode.put("totalPages", (int) Math.ceil(count / ((double) size))); resultNode.set("data", Json.toJson(pagedFlowJobs)); return resultNode; }
From source file:dao.SearchDAO.java
public static JsonNode elasticSearchDatasetByKeyword(String category, String keywords, String source, int page, int size) { ObjectNode queryNode = Json.newObject(); queryNode.put("from", (page - 1) * size); queryNode.put("size", size); JsonNode responseNode = null;/*from w w w . j a va2 s .com*/ ObjectNode keywordNode = null; try { keywordNode = utils.Search.generateElasticSearchQueryString(category, source, keywords); } catch (Exception e) { Logger.error("Elastic search dataset input query is not JSON format. Error message :" + e.getMessage()); } if (keywordNode != null) { ObjectNode funcScoreNodes = Json.newObject(); ObjectNode fieldValueFactorNode = Json.newObject(); fieldValueFactorNode.put("field", "static_boosting_score"); fieldValueFactorNode.put("factor", 1); fieldValueFactorNode.put("modifier", "square"); fieldValueFactorNode.put("missing", 1); funcScoreNodes.put("query", keywordNode); funcScoreNodes.put("field_value_factor", fieldValueFactorNode); ObjectNode funcScoreNodesWrapper = Json.newObject(); funcScoreNodesWrapper.put("function_score", funcScoreNodes); queryNode.put("query", funcScoreNodesWrapper); Logger.debug("The query sent to Elastic Search is: " + queryNode.toString()); Promise<WSResponse> responsePromise = WS .url(Play.application().configuration().getString(SearchDAO.ELASTICSEARCH_DATASET_URL_KEY)) .post(queryNode); responseNode = responsePromise.get(1000).asJson(); Logger.debug("The responseNode from Elastic Search is: " + responseNode.toString()); } ObjectNode resultNode = Json.newObject(); Long count = 0L; List<Dataset> pagedDatasets = new ArrayList<>(); resultNode.put("page", page); resultNode.put("category", category); resultNode.put("source", source); resultNode.put("itemsPerPage", size); resultNode.put("keywords", keywords); if (responseNode != null && responseNode.isContainerNode() && responseNode.has("hits")) { JsonNode hitsNode = responseNode.get("hits"); if (hitsNode != null) { if (hitsNode.has("total")) { count = hitsNode.get("total").asLong(); } if (hitsNode.has("hits")) { JsonNode dataNode = hitsNode.get("hits"); if (dataNode != null && dataNode.isArray()) { Iterator<JsonNode> arrayIterator = dataNode.elements(); if (arrayIterator != null) { while (arrayIterator.hasNext()) { JsonNode node = arrayIterator.next(); if (node.isContainerNode() && node.has("_id")) { Dataset dataset = new Dataset(); dataset.id = node.get("_id").asLong(); if (node.has("_source")) { JsonNode sourceNode = node.get("_source"); if (sourceNode != null) { if (sourceNode.has("name")) { dataset.name = sourceNode.get("name").asText(); } if (sourceNode.has("source")) { dataset.source = sourceNode.get("source").asText(); } if (sourceNode.has("urn")) { dataset.urn = sourceNode.get("urn").asText(); } if (sourceNode.has("schema")) { dataset.schema = sourceNode.get("schema").asText(); } } } pagedDatasets.add(dataset); } } } } } } } resultNode.put("count", count); resultNode.put("totalPages", (int) Math.ceil(count / ((double) size))); resultNode.set("data", Json.toJson(pagedDatasets)); return resultNode; }
From source file:nl.esciencecenter.xnatclient.data.XnatParser.java
public static int parseJsonResult(XnatObjectType type, String jsonStr, List list) throws XnatParseException { if (StringUtil.isEmpty(jsonStr)) return 0; try {// ww w . j a v a2s.c o m JsonFactory jsonFac = new JsonFactory(); ObjectMapper mapper = new ObjectMapper(); // use dom like parsing: JsonNode tree = mapper.readTree(jsonStr); JsonNode rootNode = null; JsonNode resultSet = tree.get("ResultSet"); if (resultSet == null) { logger.warnPrintf("Couldn't find 'ResultSet' in jsonTree\n"); // return 0; } JsonNode result = resultSet.get("Result"); if (result == null) { logger.warnPrintf("Couldn't find 'Result' in jsonTree\n"); return 0; } if (result.isArray() == false) { // logger.warnPrintf("Couldn't find 'Result' in jsonTree\n"); return 0; } rootNode = result; // parse objects: Iterator<JsonNode> els = rootNode.elements(); while (els.hasNext()) { JsonNode el = els.next(); list.add(parseXnatObject(type, el)); } } // wrap exception: catch (JsonParseException e) { throw new XnatParseException("Couldn't parse result:\n" + jsonStr + "\n---\n" + e.getMessage(), e); } catch (IOException e) { throw new XnatParseException("IOException:" + e.getMessage(), e); } return list.size(); }
From source file:tests.SearchTests.java
private static List<String> list(JsonNode jsonObject) { List<String> list = new ArrayList<>(); Iterator<JsonNode> elements = jsonObject.elements(); while (elements.hasNext()) { list.add(elements.next().asText()); }// w w w. j a va2s . c om return list; }
From source file:org.dd4t.databind.builder.json.JsonModelConverter.java
private static void handleEmbeddedContent(final JsonNode currentField, final List<JsonNode> nodeList) { final JsonNode embeddedNode = currentField.get(DataBindConstants.EMBEDDED_VALUES_NODE); // This is a fix for when we are already in an embedded node. The Json unfortunately // keeps sibling nodes in this child, which has FieldType embedded, while we're actually already in // that node's Values final JsonNode schemaNode = currentField.get(DataBindConstants.EMBEDDED_SCHEMA_FIELD_NAME); if (embeddedNode != null) { final Iterator<JsonNode> embeddedIterator = embeddedNode.elements(); while (embeddedIterator.hasNext()) { addEmbeddedNodeAndSchemaInfo(nodeList, schemaNode, embeddedIterator); }/*w w w .ja v a 2 s. co m*/ } else { final Iterator<JsonNode> currentFieldElements = currentField.elements(); while (currentFieldElements.hasNext()) { addEmbeddedNodeAndSchemaInfo(nodeList, schemaNode, currentFieldElements); } } }
From source file:com.linecorp.armeria.common.thrift.text.SequenceContext.java
/** * Create an iterator over the children. May be constructed with a null * JsonArray if we only use it for writing. *//*from w w w .java 2s . c om*/ protected SequenceContext(JsonNode json) { children = null != json ? json.elements() : null; }