List of usage examples for java.util Map.Entry get
V get(Object key);
From source file:org.apache.atlas.hive.hook.HiveHook.java
private List<Referenceable> createColumnLineageProcessInstances(Referenceable processRefObj, Map<String, List<ColumnLineageUtils.HiveColumnLineageInfo>> lineageInfo, Map<String, Referenceable> columnQNameToRef) { List<Referenceable> l = new ArrayList<>(); for (Map.Entry<String, List<ColumnLineageUtils.HiveColumnLineageInfo>> e : lineageInfo.entrySet()) { Referenceable destCol = columnQNameToRef.get(e.getKey()); if (destCol == null) { LOG.debug("Couldn't find output Column {}", e.getKey()); continue; }//from w w w .j a va 2 s . c o m List<Referenceable> outRef = new ArrayList<>(); outRef.add(destCol); List<Referenceable> inputRefs = new ArrayList<>(); for (ColumnLineageUtils.HiveColumnLineageInfo cLI : e.getValue()) { Referenceable srcCol = columnQNameToRef.get(cLI.inputColumn); if (srcCol == null) { LOG.debug("Couldn't find input Column {}", cLI.inputColumn); continue; } inputRefs.add(srcCol); } if (inputRefs.size() > 0) { Referenceable r = new Referenceable(HiveDataTypes.HIVE_COLUMN_LINEAGE.getName()); r.set("name", processRefObj.get(AtlasClient.NAME) + ":" + outRef.get(0).get(AtlasClient.NAME)); r.set(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, processRefObj.get(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME) + ":" + outRef.get(0).get(AtlasClient.NAME)); r.set("inputs", inputRefs); r.set("outputs", outRef); r.set("query", processRefObj); r.set("depenendencyType", e.getValue().get(0).depenendencyType); r.set("expression", e.getValue().get(0).expr); l.add(r); } else { LOG.debug("No input references found for lineage of column {}", destCol.get(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME)); } } return l; }
From source file:org.apache.flink.streaming.connectors.kafka.KafkaConsumerTestBase.java
/** * Runs a job using the provided environment to read a sequence of records from a single Kafka topic. * The method allows to individually specify the expected starting offset and total read value count of each partition. * The job will be considered successful only if all partition read results match the start offset and value count criteria. *//*from w w w .j a va 2s . c o m*/ protected void readSequence(final StreamExecutionEnvironment env, final StartupMode startupMode, final Map<KafkaTopicPartition, Long> specificStartupOffsets, final Properties cc, final String topicName, final Map<Integer, Tuple2<Integer, Integer>> partitionsToValuesCountAndStartOffset) throws Exception { final int sourceParallelism = partitionsToValuesCountAndStartOffset.keySet().size(); int finalCountTmp = 0; for (Map.Entry<Integer, Tuple2<Integer, Integer>> valuesCountAndStartOffset : partitionsToValuesCountAndStartOffset .entrySet()) { finalCountTmp += valuesCountAndStartOffset.getValue().f0; } final int finalCount = finalCountTmp; final TypeInformation<Tuple2<Integer, Integer>> intIntTupleType = TypeInfoParser .parse("Tuple2<Integer, Integer>"); final TypeInformationSerializationSchema<Tuple2<Integer, Integer>> deser = new TypeInformationSerializationSchema<>( intIntTupleType, env.getConfig()); // create the consumer cc.putAll(secureProps); FlinkKafkaConsumerBase<Tuple2<Integer, Integer>> consumer = kafkaServer.getConsumer(topicName, deser, cc); switch (startupMode) { case EARLIEST: consumer.setStartFromEarliest(); break; case LATEST: consumer.setStartFromLatest(); break; case SPECIFIC_OFFSETS: consumer.setStartFromSpecificOffsets(specificStartupOffsets); break; case GROUP_OFFSETS: consumer.setStartFromGroupOffsets(); break; } DataStream<Tuple2<Integer, Integer>> source = env.addSource(consumer).setParallelism(sourceParallelism) .map(new ThrottledMapper<Tuple2<Integer, Integer>>(20)).setParallelism(sourceParallelism); // verify data source.flatMap(new RichFlatMapFunction<Tuple2<Integer, Integer>, Integer>() { private HashMap<Integer, BitSet> partitionsToValueCheck; private int count = 0; @Override public void open(Configuration parameters) throws Exception { partitionsToValueCheck = new HashMap<>(); for (Integer partition : partitionsToValuesCountAndStartOffset.keySet()) { partitionsToValueCheck.put(partition, new BitSet()); } } @Override public void flatMap(Tuple2<Integer, Integer> value, Collector<Integer> out) throws Exception { int partition = value.f0; int val = value.f1; BitSet bitSet = partitionsToValueCheck.get(partition); if (bitSet == null) { throw new RuntimeException("Got a record from an unknown partition"); } else { bitSet.set(val - partitionsToValuesCountAndStartOffset.get(partition).f1); } count++; LOG.info("Received message {}, total {} messages", value, count); // verify if we've seen everything if (count == finalCount) { for (Map.Entry<Integer, BitSet> partitionsToValueCheck : this.partitionsToValueCheck .entrySet()) { BitSet check = partitionsToValueCheck.getValue(); int expectedValueCount = partitionsToValuesCountAndStartOffset .get(partitionsToValueCheck.getKey()).f0; if (check.cardinality() != expectedValueCount) { throw new RuntimeException("Expected cardinality to be " + expectedValueCount + ", but was " + check.cardinality()); } else if (check.nextClearBit(0) != expectedValueCount) { throw new RuntimeException("Expected next clear bit to be " + expectedValueCount + ", but was " + check.cardinality()); } } // test has passed throw new SuccessException(); } } }).setParallelism(1); tryExecute(env, "Read data from Kafka"); LOG.info("Successfully read sequence for verification"); }
From source file:base.BasePlayer.AminoTable.java
void getAminos(Gene gene) { try {//from w w w. j av a 2 s. co m aminoarray.clear(); VarNode varnode = null; Map.Entry<String, ArrayList<SampleNode>> entry; for (int t = 0; t < gene.varnodes.size(); t++) { varnode = gene.varnodes.get(t); if (gene.intergenic) { for (int v = 0; v < varnode.vars.size(); v++) { entry = varnode.vars.get(v); mutcount = 0; for (int m = 0; m < entry.getValue().size(); m++) { if (entry.getValue().get(m).alleles != null) { break; } if (entry.getValue().get(m).getSample().annotation) { entry.getValue().remove(m); m--; continue; } if (!Main.drawCanvas.hideVar(entry.getValue().get(m), entry.getKey().length() > 1) && !entry.getValue().get(m).getSample().annotation) { if (!VariantHandler.none.isSelected()) { if (!entry.getValue().get(m).inheritance) { if (!entry.getValue().get(m).getSample() .equals(Main.drawCanvas.selectedSample)) { entry.getValue().remove(m); m--; continue; } } //mutcount++; } if (VariantHandler.onlyselected.isSelected()) { if (!entry.getValue().get(m).getSample() .equals(Main.drawCanvas.selectedSample)) { entry.getValue().remove(m); m--; continue; } } mutcount++; } else { entry.getValue().remove(m); m--; } } if (mutcount == 0) { continue; } base = entry.getKey(); String[] addrow = new String[9]; if (varnode.getTranscripts() == null) { addrow[0] = gene.getName(); } else if (varnode.getTranscripts().size() == 2) { addrow[0] = gene.getName() + " ... " + varnode.getTranscripts().get(1).getGenename(); } else if (varnode.getPosition() < gene.getStart()) { addrow[0] = "... " + varnode.getTranscripts().get(1).getGenename(); } else { addrow[0] = gene.getName() + " ..."; } addrow[1] = "" + mutcount; addrow[2] = gene.getChrom() + ":" + MethodLibrary.formatNumber((varnode.getPosition() + 1)); addrow[3] = "Intergenic"; if (varnode.isRscode() != null) { addrow[4] = varnode.rscode; } else { addrow[4] = "N/A"; } addrow[5] = base; addrow[6] = "Intergenic"; addrow[7] = "Intergenic"; addrow[8] = "Intergenic"; AminoEntry aminoentry = new AminoEntry(addrow, varnode); aminoarray.add(aminoentry); } continue; } if (varnode.getExons() != null) { // for(int exon = 0; exon<varnode.getExons().size(); exon++) { /* if(!VariantHandler.allIsoforms.isSelected() && varnode.getExons().get(exon).getTranscript().getGene().getCanonical() != null && !varnode.getExons().get(exon).getTranscript().isCanonical()) { continue; }*/ /*if(!gene.equals(varnode.getExons().get(exon).getTranscript().getGene())) { continue; } */ if (!varnode.coding && !VariantHandler.utr.isSelected()) { continue; } for (int v = 0; v < varnode.vars.size(); v++) { entry = varnode.vars.get(v); mutcount = 0; for (int m = 0; m < entry.getValue().size(); m++) { if (entry.getValue().get(m).alleles != null) { continue; } if (entry.getValue().get(m).getSample().annotation) { continue; } if (!VariantHandler.none.isSelected()) { if (!entry.getValue().get(m).inheritance) { entry.getValue().remove(m); m--; continue; } } if (!Main.drawCanvas.hideVar(entry.getValue().get(m), entry.getKey().length() > 1)) { if (VariantHandler.onlyselected.isSelected()) { if (!entry.getValue().get(m).getSample() .equals(Main.drawCanvas.selectedSample)) { entry.getValue().remove(m); m--; continue; } } mutcount++; } else { entry.getValue().remove(m); m--; } } if (mutcount == 0) { continue; } base = entry.getKey(); String[] addrow = new String[9]; StringBuffer aminos = new StringBuffer(""), transcripts = new StringBuffer(""), exons = new StringBuffer(""), biotypes = new StringBuffer(""); addrow[0] = gene.getName(); addrow[1] = "" + mutcount; addrow[2] = gene.getChrom() + ":" + MethodLibrary.formatNumber((varnode.getPosition() + 1)); String aminochange; for (int exon = 0; exon < varnode.getExons().size(); exon++) { if (!varnode.getExons().get(exon).getTranscript().getGene().equals(gene)) { continue; } aminochange = Main.chromDraw.getAminoChange(varnode, base, varnode.getExons().get(exon)); if (aminochange.contains("UTR") && !VariantHandler.utr.isSelected()) { continue; } if (VariantHandler.nonsense.isSelected()) { if (!MethodLibrary.aminoEffect(aminochange).contains("nonsense")) { continue; } } else if (VariantHandler.synonymous.isSelected()) { if (MethodLibrary.aminoEffect(aminochange).contains("synonymous")) { continue; } } if (aminos.length() == 0) { aminos.append(aminochange); transcripts.append(varnode.getExons().get(exon).getTranscript().getENST()); biotypes.append(varnode.getExons().get(exon).getTranscript().getBiotype()); exons.append(varnode.getExons().get(exon).getNro()); } else { aminos.append(";" + aminochange); transcripts.append(";" + varnode.getExons().get(exon).getTranscript().getENST()); biotypes.append(";" + varnode.getExons().get(exon).getTranscript().getBiotype()); exons.append(";" + varnode.getExons().get(exon).getNro()); } } addrow[3] = aminos.toString(); if (varnode.isRscode() != null) { addrow[4] = varnode.rscode; } else { addrow[4] = "N/A"; } addrow[5] = base; addrow[6] = transcripts.toString(); addrow[7] = biotypes.toString(); addrow[8] = exons.toString(); // varAdd.putNext(null); // varAdd.putPrev(null); AminoEntry aminoentry = new AminoEntry(addrow, varnode); /*if(varnode.getPosition() == 127796784) { System.out.println("Jou"); }*/ aminoarray.add(aminoentry); } // } } if (VariantHandler.intronic.isSelected() && varnode.isInGene() && varnode.getTranscripts() != null && varnode.getExons() == null) { for (int v = 0; v < varnode.vars.size(); v++) { entry = varnode.vars.get(v); base = entry.getKey(); mutcount = 0; for (int m = 0; m < entry.getValue().size(); m++) { if (entry.getValue().get(m).alleles != null) { break; } if (entry.getValue().get(m).getSample().annotation) { entry.getValue().remove(m); m--; continue; } if (!Main.drawCanvas.hideVar(entry.getValue().get(m), entry.getKey().length() > 1)) { if (VariantHandler.onlyselected.isSelected()) { if (!entry.getValue().get(m).getSample() .equals(Main.drawCanvas.selectedSample)) { entry.getValue().remove(m); m--; continue; } } mutcount++; } else { entry.getValue().remove(m); m--; } } if (mutcount == 0) { continue; } StringBuffer transcripts = new StringBuffer(""), biotypes = new StringBuffer(""); String[] addrow = new String[9]; addrow[0] = gene.getName(); addrow[1] = "" + mutcount; addrow[2] = gene.getChrom() + ":" + MethodLibrary.formatNumber((varnode.getPosition() + 1)); addrow[3] = Main.getBase.get(varnode.getRefBase()) + "->" + base + " (intronic)"; if (varnode.isRscode() != null) { addrow[4] = varnode.rscode; } else { addrow[4] = "N/A"; } addrow[5] = base; for (int trans = 0; trans < varnode.getTranscripts().size(); trans++) { if (!varnode.getTranscripts().get(trans).getGene().equals(gene)) { continue; } if (transcripts.length() == 0) { transcripts.append(varnode.getTranscripts().get(trans).getENST()); biotypes.append(varnode.getTranscripts().get(trans).getBiotype()); } else { transcripts.append(";" + varnode.getTranscripts().get(trans).getENST()); biotypes.append(";" + varnode.getTranscripts().get(trans).getBiotype()); } } if (varnode.isRscode() != null) { addrow[4] = varnode.rscode; } else { addrow[4] = "N/A"; } addrow[5] = base; addrow[6] = transcripts.toString(); addrow[7] = biotypes.toString(); addrow[8] = "Intronic"; AminoEntry aminoentry = new AminoEntry(addrow, varnode); aminoarray.add(aminoentry); } } } varnode = null; } catch (Exception e) { ErrorLog.addError(e.getStackTrace()); e.printStackTrace(); } }
From source file:com.streamreduce.storm.bolts.InventoryItemMetricsBolt.java
/** * {@inheritDoc}//ww w . ja v a 2s. c o m */ @Override public void handleEvent(String id, Long timestamp, EventId eventId, String accountId, String userId, String targetId, Map<String, Object> metadata) { switch (eventId) { case CREATE: case DELETE: String connectionId = metadata.get("targetConnectionId").toString(); String providerType = metadata.get("targetProviderType").toString(); String targetType = metadata.get("targetType").toString(); String objectType = getExternalObjectType(metadata); Float eventValue = getEventValue(eventId); List<Values> metrics = new ArrayList<>(); if (providerType.equals(ConnectionTypeConstants.CLOUD_TYPE)) { String iso3166Code = (String) metadata.get("targetISO3166Code"); String region = (String) metadata.get("targetRegion"); String zone = (String) metadata.get("targetZone"); String osName = (String) metadata.get("targetOS"); for (String criteriaName : ImmutableSet.of("iso3166Code", "region", "zone", "osName")) { MetricCriteria metricCriteria; String criteriaValue; if (criteriaName.equals("iso3166Code")) { criteriaValue = iso3166Code; metricCriteria = MetricCriteria.ISO_3166_CODE; } else if (criteriaName.equals("region")) { criteriaValue = region; metricCriteria = MetricCriteria.REGION; } else if (criteriaName.equals("zone")) { criteriaValue = zone; metricCriteria = MetricCriteria.AVAILABILITY_ZONE; } else if (criteriaName.equals("osName")) { criteriaValue = osName; metricCriteria = MetricCriteria.OS_NAME; } else { LOGGER.error(criteriaName + " is an unsupported CloudInventoryItem metric."); return; } // If we have an empty value, no need in processing it if (!StringUtils.hasText(criteriaValue)) { return; } // Global (metric name specific) metrics.add(createGlobalMetric(MetricName.INVENTORY_ITEM_COUNT, ImmutableMap.of(metricCriteria, criteriaValue), MetricModeType.DELTA, timestamp, eventValue)); // Account specific (metric name specific) metrics.add(createAccountMetric(accountId, MetricName.INVENTORY_ITEM_COUNT, ImmutableMap.of(metricCriteria, criteriaValue), MetricModeType.DELTA, timestamp, eventValue)); // Connection specific metrics.add(createAccountMetric( accountId, MetricName.INVENTORY_ITEM_COUNT, ImmutableMap.of(metricCriteria, criteriaValue, MetricCriteria.CONNECTION_ID, connectionId), MetricModeType.DELTA, timestamp, eventValue)); if (objectType != null) { // Global (metric name specific) metrics.add(createGlobalMetric( MetricName.INVENTORY_ITEM_COUNT, ImmutableMap.of(metricCriteria, criteriaValue, MetricCriteria.OBJECT_TYPE, objectType), MetricModeType.DELTA, timestamp, eventValue)); // Account specific (metric name specific) metrics.add(createAccountMetric( accountId, MetricName.INVENTORY_ITEM_COUNT, ImmutableMap.of(metricCriteria, criteriaValue, MetricCriteria.OBJECT_TYPE, objectType), MetricModeType.DELTA, timestamp, eventValue)); // Connection specific metrics.add(createAccountMetric(accountId, MetricName.INVENTORY_ITEM_COUNT, ImmutableMap.of(metricCriteria, criteriaValue, MetricCriteria.CONNECTION_ID, connectionId, MetricCriteria.OBJECT_TYPE, objectType), MetricModeType.DELTA, timestamp, eventValue)); } } } emitMetricsAndHashagMetrics(metrics, eventId, targetId, targetType, metadata); break; case ACTIVITY: String providerId = metadata.get("targetProviderId").toString(); if (providerId.equals(ProviderIdConstants.AWS_PROVIDER_ID)) { Map<String, Object> metricsPayload = (Map<String, Object>) metadata.get("payload"); if (metadata.containsKey("isAgentActivity") && Boolean.valueOf(metadata.get("isAgentActivity").toString())) { JSONObject json = JSONUtils.flattenJSON(JSONObject.fromObject(metadata.get("payload")), null); for (Object rawKey : json.keySet()) { String key = rawKey.toString(); Object value = json.get(key); // elapsed_time and generated are not metrics if (key.equals("elapsed_time") || key.equals("generated")) { continue; } if (value != null && !(value instanceof JSONNull)) { emitAccountMetric(accountId, MetricName.INVENTORY_ITEM_RESOURCE_USAGE, ImmutableMap.of(MetricCriteria.OBJECT_ID, targetId, MetricCriteria.RESOURCE_ID, key, MetricCriteria.METRIC_ID, key), MetricModeType.ABSOLUTE, timestamp, Float.valueOf(json.get(key).toString())); } } } else { Set<String> knownMetricKeys = ImmutableSet.of("maximum", "minimum", "average"); for (Map.Entry<String, Object> metric : metricsPayload.entrySet()) { String key = metric.getKey(); Map<String, Object> metricJson = (Map<String, Object>) metric.getValue(); for (String metricKey : knownMetricKeys) { emitAccountMetric(accountId, MetricName.INVENTORY_ITEM_RESOURCE_USAGE, ImmutableMap.of(MetricCriteria.OBJECT_ID, targetId, MetricCriteria.RESOURCE_ID, key, MetricCriteria.METRIC_ID, metricKey), MetricModeType.ABSOLUTE, timestamp, Float.valueOf(metricJson.get(metricKey).toString())); } } } } else if (providerId.equals(ProviderIdConstants.PINGDOM_PROVIDER_ID)) { Map<String, Object> payload = metadata.containsKey("payload") ? (Map<String, Object>) metadata.get("payload") : null; if (payload != null) { int lastresponsetime = payload.containsKey("lastresponsetime") ? (Integer) payload.get("lastresponsetime") : 0; emitAccountMetric(accountId, MetricName.INVENTORY_ITEM_RESOURCE_USAGE, ImmutableMap.of(MetricCriteria.OBJECT_ID, targetId, MetricCriteria.RESOURCE_ID, "ServerResponse", MetricCriteria.METRIC_ID, "time"), MetricModeType.ABSOLUTE, timestamp, (float) lastresponsetime); } } else if (providerId.equals(ProviderIdConstants.CUSTOM_PROVIDER_ID)) { Map<String, Object> payload = metadata.containsKey("payload") ? (Map<String, Object>) metadata.get("payload") : null; if (payload != null && payload.containsKey("metrics")) { Set<Object> imgMetrics = (Set<Object>) payload.get("metrics"); for (Object rawMetric : imgMetrics) { Map<String, Object> metric = (Map<String, Object>) rawMetric; emitAccountMetric(accountId, MetricName.INVENTORY_ITEM_RESOURCE_USAGE, ImmutableMap.of(MetricCriteria.OBJECT_ID, targetId, MetricCriteria.RESOURCE_ID, (String) metric.get("name")), MetricModeType.valueOf((String) metric.get("type")), timestamp, Float.valueOf((metric.get("value")).toString())); } } } else if (providerId.equals(ProviderIdConstants.NAGIOS_PROVIDER_ID)) { Map<String, Object> payload = metadata.containsKey("payload") ? (Map<String, Object>) metadata.get("payload") : null; if (payload != null && payload.containsKey("metrics")) { Set<Object> imgMetrics = (Set<Object>) payload.get("metrics"); for (Object rawMetric : imgMetrics) { Map<String, Object> metric = (Map<String, Object>) rawMetric; String[] nameTokens = ((String) metric.get("name")).split("_"); emitAccountMetric(accountId, MetricName.INVENTORY_ITEM_RESOURCE_USAGE, ImmutableMap.of(MetricCriteria.OBJECT_ID, targetId.toString(), MetricCriteria.RESOURCE_ID, nameTokens[0], MetricCriteria.METRIC_ID, nameTokens[1]), MetricModeType.valueOf((String) metric.get("type")), timestamp, Float.valueOf((metric.get("value")).toString())); } } } break; default: // Due to the nature of how built-in metrics work and such, you can end up with events // being passed through that are expected but unprocessed. No need to log. } }
From source file:org.kuali.kra.coi.disclosure.CoiDisclosureServiceImpl.java
/** * This method is to get a complete list of all project persons * @param personAndDevelopmentProposals/* w ww .ja v a 2 s . c o m*/ * @param personAndInstituteProposals * @param personAndAwards * @param personAndIrbProtocols * @param personAndIacucProtocols * @return */ private HashMap<String, String> getAllProjectPersonsForUndisclosedEvents( Map<String, List<ProposalPerson>> personAndDevelopmentProposals, Map<String, List<InstitutionalProposalPerson>> personAndInstituteProposals, Map<String, List<AwardPerson>> personAndAwards, Map<String, List<ProtocolPerson>> personAndIrbProtocols, Map<String, List<IacucProtocolPerson>> personAndIacucProtocols) { HashMap<String, String> allProjectPersons = new HashMap<String, String>(); for (Map.Entry<String, List<ProposalPerson>> person : personAndDevelopmentProposals.entrySet()) { String personId = person.getKey(); String personName = person.getValue().get(0).getFullName(); allProjectPersons.put(personId, personName); } for (Map.Entry<String, List<InstitutionalProposalPerson>> person : personAndInstituteProposals.entrySet()) { String personId = person.getKey(); String personName = person.getValue().get(0).getFullName(); allProjectPersons.put(personId, personName); } for (Map.Entry<String, List<AwardPerson>> person : personAndAwards.entrySet()) { String personId = person.getKey(); String personName = person.getValue().get(0).getFullName(); allProjectPersons.put(personId, personName); } for (Map.Entry<String, List<ProtocolPerson>> person : personAndIrbProtocols.entrySet()) { String personId = person.getKey(); String personName = person.getValue().get(0).getFullName(); allProjectPersons.put(personId, personName); } for (Map.Entry<String, List<IacucProtocolPerson>> person : personAndIacucProtocols.entrySet()) { String personId = person.getKey(); String personName = person.getValue().get(0).getFullName(); allProjectPersons.put(personId, personName); } return allProjectPersons; }
From source file:org.wso2.carbon.registry.indexing.solr.SolrClient.java
/** * Method for add dynamic fields of the resource * @param fields dynamic fields need to index * @param solrInputDocument Solr InputDocument *//*from ww w. ja v a2 s . c o m*/ private void addDynamicFields(Map<String, List<String>> fields, SolrInputDocument solrInputDocument) { // Add advance search related dynamic fields. if (fields != null && fields.size() > 0) { String fieldKey; for (Map.Entry<String, List<String>> fieldList : fields.entrySet()) { // Add multivalued attributes. if (fieldList.getKey().equals(IndexingConstants.FIELD_PROPERTY_VALUES) || fieldList.getKey().equals(IndexingConstants.FIELD_ASSOCIATION_DESTINATIONS) || fieldList.getKey().equals(IndexingConstants.FIELD_ASSOCIATION_TYPES) || fieldList.getKey().equals(IndexingConstants.FIELD_COMMENTS) || fieldList.getKey().equals(IndexingConstants.FIELD_TAGS) || FIELD_ALLOWED_ROLES.equals(fieldList.getKey())) { if (fieldList.getKey().equals(IndexingConstants.FIELD_PROPERTY_VALUES)) { for (String value : fieldList.getValue()) { String[] propertyValArray = value.split(","); fieldKey = propertyValArray[0]; String[] propValues = Arrays.copyOfRange(propertyValArray, 1, propertyValArray.length); if (propValues.length > 0) { addPropertyField(fieldKey, propValues, solrInputDocument); } } } else { fieldKey = fieldList.getKey() + SolrConstants.SOLR_MULTIVALUED_STRING_FIELD_KEY_SUFFIX; for (String value : fieldList.getValue()) { solrInputDocument.addField(fieldKey, value); } } } else { // Add date fields if (fieldList.getKey().equals(IndexingConstants.FIELD_CREATED_DATE) || fieldList.getKey().equals(IndexingConstants.FIELD_LAST_UPDATED_DATE)) { fieldKey = fieldList.getKey() + SolrConstants.SOLR_DATE_FIELD_KEY_SUFFIX; String date = toSolrDateFormat(fieldList.getValue().get(0), SolrConstants.REG_LOG_DATE_FORMAT); if (date != null) { // Add date attributes solrInputDocument.addField(fieldKey, date); } } else { String fieldKeyValue; if (fieldList.getKey().equals(IndexingConstants.FIELD_RESOURCE_NAME)) { fieldKeyValue = IndexingConstants.FIELD_RESOURCE_NAME; } else { fieldKeyValue = fieldList.getKey(); } // Add single field String values solrInputDocument.addField(fieldKeyValue + SolrConstants.SOLR_STRING_FIELD_KEY_SUFFIX, fieldList.getValue().get(0)); } } } } }
From source file:de.tum.bgu.msm.syntheticPopulationGenerator.capeTown.SyntheticPopCT.java
private int checkHouseholdAndPersonCorrespondence() { //method to remove households from the map that either: //1- have no persons on the person file //2- have different number of persons at the person file than household size int persons = 0; int ppCount = 0; int hhCount = 0; Iterator<Map.Entry<Integer, HashMap<String, Integer>>> it = households.entrySet().iterator(); while (it.hasNext()) { Map.Entry<Integer, HashMap<String, Integer>> pair = it.next(); int hhId = pair.getKey(); int hhSize = pair.getValue().get("hhSizeReal"); if (!personsInHouseholds.containsKey(hhId)) { it.remove();/*from w w w.j a v a 2 s.c om*/ ppCount = ppCount + hhSize; hhCount++; } else { int members = personsInHouseholds.get(hhId).values().size(); if (members != hhSize) { it.remove(); ppCount = ppCount + hhSize; hhCount++; } else { persons = persons + hhSize; } } } logger.info(" " + ppCount + " persons were removed from the sample at " + hhCount + " households."); logger.info(" Microdata contains " + households.size() + " households with " + persons + " persons due to inconsistencies on the micro data."); return persons; }
From source file:org.moe.cli.executor.CocoaPodsExecutor.java
@Override public void execute() throws IOException, OperationsException, InterruptedException, InvalidParameterSpecException, CompressorException, ArchiveException, URISyntaxException, CheckArchitectureException, UnsupportedTypeException, WrapNatJGenException { File podFile = new File(pod); if (podFile.exists()) { SpecObject spec = getSpecObject(podFile); //update self dependency if (spec.getSizeSubspecs() > 0) { Map<String, List<String>> specDependencies = spec.getDependencies(); for (Map.Entry<String, List<String>> entry : specDependencies.entrySet()) { if (entry.getKey().startsWith(spec.getName() + "/")) { int depIdx = entry.getKey().indexOf("/"); spec.addSubspec(entry.getKey().substring(depIdx + 1).trim()); }/*from w w w .j a v a 2s .com*/ } } else if (spec.getSizeSubspecs() == 0 && (spec.getDefaultSubspecs() == null || spec.getDefaultSubspecs().size() == 0)) { Map<String, List<String>> specDependencies = spec.getDependencies(); for (Map.Entry<String, List<String>> entry : specDependencies.entrySet()) { String nameSubspec = null; if (entry.getKey().startsWith(spec.getName() + "/")) { int depIdx = entry.getKey().indexOf("/"); nameSubspec = entry.getKey().substring(depIdx + 1).trim(); spec.addSubspec(nameSubspec); } } } IExecutor executor = null; String source = spec.getSource().get("http"); if (source == null) { String git = spec.getSource().get("git"); String tag = spec.getSource().get("tag"); //remove .git git = git.substring(0, git.length() - 4); source = String.format("%s/archive/%s.zip", git, tag); } //handle jspec repository if (javaSource == null && jpodSpecRepo != null) { JSpecObject jspec = JSpecObject.getJSpecObject(spec, jpodSpecRepo); if (jspec != null) { javaSource = jspec.getSource(); } } if (source != null && (spec.getVendoredFrameworks().size() != 0 || spec.getVendoredLibraries().size() != 0)) { PrebuildCocoaPodsManager manager = new PrebuildCocoaPodsManager(); executor = manager.processCocoapods(source, spec, packageName, javaSource, outputJar); Map<String, List<String>> localDependencies = spec.getDependencies(); for (Map.Entry<String, List<String>> entry : localDependencies.entrySet()) { if (!entry.getKey().startsWith(spec.getName() + "/") && !dependencies.contains(entry.getKey())) { //create dependency spec String podName = entry.getKey(); String subspec = null; if (podName.contains("/")) { int depIdx = podName.indexOf("/"); subspec = podName.substring(depIdx + 1); podName = podName.substring(0, depIdx); } File depPodFile = new File(podFile.getParentFile(), podName + "Pod"); PrintWriter writer = new PrintWriter(depPodFile); try { writer.println("pod:" + podName); String version = entry.getValue().get(0); writer.println("version:" + (version == null ? "" : version)); writer.println("subspec:" + (subspec == null ? "" : subspec)); } finally { writer.close(); } File tmpOut = new File(outputJar); File depOutJar = new File(tmpOut.getParent(), podName + ".jar"); String depPackageName = packageName + "." + entry.getKey(); Set<String> cummonDep = new HashSet<String>(dependencies); cummonDep.addAll(localDependencies.keySet()); CocoaPodsExecutor depExecutor = new CocoaPodsExecutor(depPodFile.getPath(), depOutJar.getPath(), null, jpodSpecRepo, depPackageName.toLowerCase(), cummonDep); depExecutor.execute(); } } } else { SourceCocoaPodsManager manager = new SourceCocoaPodsManager(); executor = manager.processCocoapods(null, spec, packageName, javaSource, outputJar); } if (executor != null) { executor.execute(); } else { throw new InvalidParameterException("Unsupported pod source"); } } else { throw new InvalidParameterException( String.format("Specify correct path in %s parameter", OptionsHandler.POD.getLongOpt())); } }
From source file:org.biomart.configurator.controller.MartController.java
public Collection<Mart> requestCreateMartsFromTarget(MartRegistry registry, DataLinkInfo dlinkInfo) throws MartBuilderException { List<Mart> result = new ArrayList<Mart>(); for (Map.Entry<MartInVirtualSchema, List<DatasetFromUrl>> entry : dlinkInfo.getJdbcLinkObject() .getDsInfoMap().entrySet()) { String tbName = entry.getValue().get(0).getName(); int index = tbName.indexOf(Resources.get("tablenameSep")); String dsName = tbName.substring(0, index); Mart mc = null;// w w w. j a v a 2 s. c o m if (!dlinkInfo.isBCPartitioned()) mc = this.requestCreateDataSetFromTarget(registry, dsName, dlinkInfo); else mc = this.requestCreateDataSetFromTargetPartitioned(registry, entry.getKey(), dlinkInfo); if (mc != null) result.add(mc); // only one for now break; } return result; }
From source file:de.tum.bgu.msm.syntheticPopulationGenerator.capeTown.SyntheticPopCT.java
private void updateMicroPersons(Integer hhId) { HashMap<Integer, HashMap<String, Integer>> persons = personsInHouseholds.get(hhId); frequencyMatrix.setIndexedValueAt(hhId, "population", persons.keySet().size()); for (Map.Entry<Integer, HashMap<String, Integer>> attributeMap : persons.entrySet()) { Integer personId = attributeMap.getKey(); for (int i = 0; i < codePersonAttributes.length; i++) { String labelMicro = microPersonAttributes[i]; String labelCode = codePersonAttributes[i]; int value = attributeMap.getValue().get(labelCode); microDataPerson.setIndexedValueAt(personId, labelMicro, value); if (attributeCodeValues.containsKey(labelCode) & value > -1) { String labelFrequency = labelCode + value; int previousFrequency = (int) frequencyMatrix.getIndexedValueAt(hhId, labelFrequency); frequencyMatrix.setIndexedValueAt(hhId, labelFrequency, previousFrequency + 1); }//from w ww. ja v a 2 s . co m } } }