List of usage examples for com.fasterxml.jackson.databind JsonNode elements
public Iterator<JsonNode> elements()
From source file:com.digitalpebble.storm.crawler.filtering.URLFilters.java
@Override public void configure(Map stormConf, JsonNode jsonNode) { // initialises the filters List<URLFilter> filterLists = new ArrayList<>(); // get the filters part String name = getClass().getCanonicalName(); jsonNode = jsonNode.get(name);// w w w . j av a 2s. com if (jsonNode == null) { LOG.info("No field {} in JSON config. Skipping", name); filters = new URLFilter[0]; return; } // conf node contains a list of objects Iterator<JsonNode> filterIter = jsonNode.elements(); while (filterIter.hasNext()) { JsonNode afilterNode = filterIter.next(); String filterName = "<unnamed>"; JsonNode nameNode = afilterNode.get("name"); if (nameNode != null) { filterName = nameNode.textValue(); } JsonNode classNode = afilterNode.get("class"); if (classNode == null) { LOG.error("Filter {} doesn't specified a 'class' attribute", filterName); continue; } String className = classNode.textValue().trim(); filterName += '[' + className + ']'; // check that it is available and implements the interface URLFilter try { Class<?> filterClass = Class.forName(className); boolean interfaceOK = URLFilter.class.isAssignableFrom(filterClass); if (!interfaceOK) { LOG.error("Class {} does not implement URLFilter", className); continue; } URLFilter filterInstance = (URLFilter) filterClass.newInstance(); JsonNode paramNode = afilterNode.get("params"); if (paramNode != null) { filterInstance.configure(stormConf, paramNode); } else { filterInstance.configure(stormConf, NullNode.getInstance()); } filterLists.add(filterInstance); LOG.info("Loaded instance of class {}", className); } catch (Exception e) { LOG.error("Can't setup {}: {}", filterName, e); continue; } } filters = filterLists.toArray(new URLFilter[filterLists.size()]); }
From source file:com.digitalpebble.stormcrawler.protocol.selenium.NavigationFilters.java
@SuppressWarnings("rawtypes") @Override//from w ww . j ava 2 s . c o m public void configure(Map stormConf, JsonNode filtersConf) { // initialises the filters List<NavigationFilter> filterLists = new ArrayList<>(); // get the filters part String name = getClass().getCanonicalName(); filtersConf = filtersConf.get(name); if (filtersConf == null) { LOG.info("No field {} in JSON config. Skipping", name); filters = new NavigationFilter[0]; return; } // conf node contains a list of objects Iterator<JsonNode> filterIter = filtersConf.elements(); while (filterIter.hasNext()) { JsonNode afilterConf = filterIter.next(); String filterName = "<unnamed>"; JsonNode nameNode = afilterConf.get("name"); if (nameNode != null) { filterName = nameNode.textValue(); } JsonNode classNode = afilterConf.get("class"); if (classNode == null) { LOG.error("Filter {} doesn't specified a 'class' attribute", filterName); continue; } String className = classNode.textValue().trim(); filterName += '[' + className + ']'; // check that it is available and implements the interface // NavigationFilter try { Class<?> filterClass = Class.forName(className); boolean subClassOK = NavigationFilter.class.isAssignableFrom(filterClass); if (!subClassOK) { LOG.error("Filter {} does not extend NavigationFilter", filterName); continue; } NavigationFilter filterInstance = (NavigationFilter) filterClass.newInstance(); JsonNode paramNode = afilterConf.get("params"); if (paramNode != null) { filterInstance.configure(stormConf, paramNode); } else { // Pass in a nullNode if missing filterInstance.configure(stormConf, NullNode.getInstance()); } filterLists.add(filterInstance); LOG.info("Setup {}", filterName); } catch (Exception e) { LOG.error("Can't setup {}: {}", filterName, e); throw new RuntimeException("Can't setup " + filterName, e); } } filters = filterLists.toArray(new NavigationFilter[filterLists.size()]); }
From source file:com.digitalpebble.storm.crawler.parse.ParseFilters.java
@SuppressWarnings("rawtypes") @Override//from w w w . j a v a 2 s.com public void configure(Map stormConf, JsonNode filtersConf) { // initialises the filters List<ParseFilter> filterLists = new ArrayList<>(); // get the filters part String name = getClass().getCanonicalName(); filtersConf = filtersConf.get(name); if (filtersConf == null) { LOG.info("No field {} in JSON config. Skipping", name); filters = new ParseFilter[0]; return; } // conf node contains a list of objects Iterator<JsonNode> filterIter = filtersConf.elements(); while (filterIter.hasNext()) { JsonNode afilterConf = filterIter.next(); String filterName = "<unnamed>"; JsonNode nameNode = afilterConf.get("name"); if (nameNode != null) { filterName = nameNode.textValue(); } JsonNode classNode = afilterConf.get("class"); if (classNode == null) { LOG.error("Filter {} doesn't specified a 'class' attribute", filterName); continue; } String className = classNode.textValue().trim(); filterName += '[' + className + ']'; // check that it is available and implements the interface // ParseFilter try { Class<?> filterClass = Class.forName(className); boolean subClassOK = ParseFilter.class.isAssignableFrom(filterClass); if (!subClassOK) { LOG.error("Filter {} does not extend ParseFilter", filterName); continue; } ParseFilter filterInstance = (ParseFilter) filterClass.newInstance(); JsonNode paramNode = afilterConf.get("params"); if (paramNode != null) { filterInstance.configure(stormConf, paramNode); } else { // Pass in a nullNode if missing filterInstance.configure(stormConf, NullNode.getInstance()); } filterLists.add(filterInstance); LOG.info("Setup {}", filterName); } catch (Exception e) { LOG.error("Can't setup {}: {}", filterName, e); throw new RuntimeException("Can't setup " + filterName, e); } } filters = filterLists.toArray(new ParseFilter[filterLists.size()]); }
From source file:org.apache.drill.exec.store.http.HttpRecordReader.java
private void parseResult(String content) { String key = scanSpec.getResultKey(); JsonNode root = (key == null || key.length() == 0) ? JsonConverter.parse(content) : JsonConverter.parse(content, key); if (root != null) { logger.debug("response object count {}", root.size()); jsonIt = root.elements(); }/*from w w w . ja v a2s. c om*/ }
From source file:org.apache.olingo.commons.core.data.AbstractJsonDeserializer.java
protected void value(final JSONPropertyImpl property, final JsonNode node) { final EdmTypeInfo typeInfo = StringUtils.isBlank(property.getType()) ? null : new EdmTypeInfo.Builder().setTypeExpression(property.getType()).build(); final ODataPropertyType propType = typeInfo == null ? guessPropertyType(node) : typeInfo.isCollection() ? ODataPropertyType.COLLECTION : typeInfo.isPrimitiveType() ? ODataPropertyType.PRIMITIVE : ODataPropertyType.COMPLEX; switch (propType) { case COLLECTION: property.setValue(fromCollection(node.elements(), typeInfo)); break;/*from ww w . j av a 2 s . c om*/ case COMPLEX: if (node.has(Constants.JSON_TYPE)) { property.setType(node.get(Constants.JSON_TYPE).asText()); ((ObjectNode) node).remove(Constants.JSON_TYPE); } property.setValue(fromComplex(node)); break; case PRIMITIVE: if (property.getType() == null) { property.setType(getPrimitiveType(node).getFullQualifiedName().toString()); } property.setValue(fromPrimitive(node, typeInfo)); break; case EMPTY: default: property.setValue(new PrimitiveValueImpl(StringUtils.EMPTY)); } }
From source file:com.alliander.osgp.shared.usermanagement.KeycloakClient.java
private boolean useSessionIdForClients(final JsonNode clientsObject) { final Iterator<JsonNode> clientNameNodeIterator = clientsObject.elements(); while (clientNameNodeIterator.hasNext()) { final JsonNode clientName = clientNameNodeIterator.next(); if (clientName == null || !clientName.isTextual()) { LOGGER.warn("value in clients is not a JSON text node with a client name"); continue; }// w w w . j a v a 2 s . c o m if (clientName.textValue().equals(this.loginClient)) { return true; } } return false; }
From source file:com.unboundid.scim2.common.DiffTestCase.java
private void removeNullNodes(JsonNode object) { Iterator<JsonNode> i = object.elements(); while (i.hasNext()) { JsonNode field = i.next();// w w w . j a va 2s. c om if (field.isNull() || (field.isArray() && field.size() == 0)) { i.remove(); } else { removeNullNodes(field); } } }
From source file:org.opendaylight.alto.core.northbound.route.endpointproperty.impl.AltoNorthboundRouteEndpointproperty.java
@Path("{path}") @POST/*ww w. j a va 2s .co m*/ @Consumes({ ALTO_ENDPOINTPROPERTY_FILTER }) @Produces({ ALTO_ENDPOINTPROPERTY, ALTO_ERROR }) public Response getEndpointProperty(@PathParam("path") String path, String content) { JsonNode filterNode = EndpointpropertyRouteChecker.checkJsonSyntax(content); JsonNode _properties = filterNode.get(FIELD_PROPERTIES); EndpointpropertyRouteChecker.checkMissing(_properties, FIELD_PROPERTIES); EndpointpropertyRouteChecker.checkList(_properties, FIELD_PROPERTIES); JsonNode _endpoints = filterNode.get(FIELD_ENDPOINTS); EndpointpropertyRouteChecker.checkMissing(_endpoints, FIELD_ENDPOINTS); EndpointpropertyRouteChecker.checkList(_endpoints, FIELD_ENDPOINTS); QueryInput input = prepareInput(path, _properties.elements(), _endpoints.elements()); LOG.info(input.toString()); Future<RpcResult<QueryOutput>> outputFuture = mapService.query(input); QueryOutput output = null; try { output = outputFuture.get().getResult(); } catch (Exception e) { LOG.warn("get output failed:", e); } Response response = null; try { response = buildOutput(input, output); } catch (Exception E) { } if (response != null) return response; else return Response.status(404).build(); }
From source file:dao.AdvSearchDAO.java
public static ObjectNode elasticSearch(JsonNode searchOpt, int page, int size) { ObjectNode resultNode = Json.newObject(); Long count = 0L;// www.j a v a 2 s.c o m List<Dataset> pagedDatasets = new ArrayList<>(); ObjectNode queryNode = Json.newObject(); queryNode.put("from", (page - 1) * size); queryNode.put("size", size); JsonNode searchNode = utils.Search.generateDatasetAdvSearchQueryString(searchOpt); if (searchNode != null && searchNode.isContainerNode()) { queryNode.set("query", searchNode); } Promise<WSResponse> responsePromise = WS .url(Play.application().configuration().getString(SearchDAO.ELASTICSEARCH_DATASET_URL_KEY)) .post(queryNode); JsonNode responseNode = responsePromise.get(1000).asJson(); resultNode.put("page", page); resultNode.put("category", "Datasets"); resultNode.put("itemsPerPage", size); if (responseNode != null && responseNode.isContainerNode() && responseNode.has("hits")) { JsonNode hitsNode = responseNode.get("hits"); if (hitsNode != null) { if (hitsNode.has("total")) { count = hitsNode.get("total").asLong(); } if (hitsNode.has("hits")) { JsonNode dataNode = hitsNode.get("hits"); if (dataNode != null && dataNode.isArray()) { Iterator<JsonNode> arrayIterator = dataNode.elements(); if (arrayIterator != null) { while (arrayIterator.hasNext()) { JsonNode node = arrayIterator.next(); if (node.isContainerNode() && node.has("_id")) { Dataset dataset = new Dataset(); dataset.id = node.get("_id").asLong(); if (node.has("_source")) { JsonNode sourceNode = node.get("_source"); if (sourceNode != null) { if (sourceNode.has("name")) { dataset.name = sourceNode.get("name").asText(); } if (sourceNode.has("source")) { dataset.source = sourceNode.get("source").asText(); } if (sourceNode.has("urn")) { dataset.urn = sourceNode.get("urn").asText(); } if (sourceNode.has("schema")) { dataset.schema = sourceNode.get("schema").asText(); } } } pagedDatasets.add(dataset); } } } } } } } resultNode.put("count", count); resultNode.put("totalPages", (int) Math.ceil(count / ((double) size))); resultNode.set("data", Json.toJson(pagedDatasets)); return resultNode; }