List of usage examples for java.util Set forEach
default void forEach(Consumer<? super T> action)
From source file:org.finra.herd.service.FacetFieldValidationService.java
/** * Validates a set of facet fields. This method also trims and lowers the fields. * * @param facetFields the facet fields to be validated * @return the set of facet fields// w w w.ja va 2s . c o m */ default Set<String> validateFacetFields(Set<String> facetFields) { // Create a local copy of the fields set so that we can stream it to modify the fields set Set<String> localCopy = new HashSet<>(facetFields); // Clear the fields set facetFields.clear(); // Add to the fields set field the strings both trimmed and lower cased and filter out empty and null strings localCopy.stream().filter(StringUtils::isNotBlank).map(String::trim).map(String::toLowerCase) .forEachOrdered(facetFields::add); // Validate the field names facetFields.forEach(field -> Assert.isTrue(getValidFacetFields().contains(field), String.format("Facet field \"%s\" is not supported.", field))); return facetFields; }
From source file:com.epam.reportportal.extension.bugtracking.jira.JiraStrategy.java
@Override public Ticket submitTicket(final PostTicketRQ ticketRQ, ExternalSystem details) { expect(ticketRQ.getFields(), not(isNull())).verify(UNABLE_INTERACT_WITH_EXTRERNAL_SYSTEM, "External System fields set is empty!"); List<PostFormField> fields = ticketRQ.getFields(); // TODO add validation of any field with allowedValues() array // Additional validation required for unsupported // ticket type and/or components in JIRA. PostFormField issuetype = new PostFormField(); PostFormField components = new PostFormField(); for (PostFormField object : fields) { if ("issuetype".equalsIgnoreCase(object.getId())) issuetype = object;// w w w . j a v a 2 s.c o m if ("components".equalsIgnoreCase(object.getId())) components = object; } expect(issuetype.getValue().size(), equalTo(1)).verify(UNABLE_INTERACT_WITH_EXTRERNAL_SYSTEM, formattedSupplier("[IssueType] field has multiple values '{}' but should be only one", issuetype.getValue())); final String issueTypeStr = issuetype.getValue().get(0); expect(JIRATicketType.findByName(issueTypeStr), not(isNull())).verify(UNABLE_INTERACT_WITH_EXTRERNAL_SYSTEM, formattedSupplier("Ticket with [IssueType] '{}' cannot be send to external system", issueTypeStr)); try (JiraRestClient client = getClient(details.getUrl(), ticketRQ.getUsername(), ticketRQ.getPassword())) { Project jiraProject = getProject(client, details); if (null != components.getValue()) { Set<String> validComponents = StreamSupport.stream(jiraProject.getComponents().spliterator(), false) .map(JiraPredicates.COMPONENT_NAMES).collect(toSet()); validComponents.forEach(component -> expect(component, in(validComponents)).verify( UNABLE_INTERACT_WITH_EXTRERNAL_SYSTEM, formattedSupplier("Component '{}' not exists in the external system", component))); } // TODO consider to modify code below - project cached Optional<IssueType> issueType = StreamSupport.stream(jiraProject.getIssueTypes().spliterator(), false) .filter(input -> issueTypeStr.equalsIgnoreCase(input.getName())).findFirst(); expect(issueType, Preconditions.IS_PRESENT).verify(UNABLE_INTERACT_WITH_EXTRERNAL_SYSTEM, formattedSupplier("Unable post issue with type '{}' for project '{}'.", issuetype.getValue().get(0), details.getProject())); IssueInput issueInput = JIRATicketUtils.toIssueInput(client, jiraProject, issueType, ticketRQ, ticketRQ.getBackLinks().keySet(), descriptionService); Map<String, String> binaryData = findBinaryData(issueInput); /* * Claim because we wanna be sure everything is OK */ BasicIssue createdIssue = client.getIssueClient().createIssue(issueInput).claim(); // post binary data Issue issue = client.getIssueClient().getIssue(createdIssue.getKey()).claim(); AttachmentInput[] attachmentInputs = new AttachmentInput[binaryData.size()]; int counter = 0; for (Map.Entry<String, String> binaryDataEntry : binaryData.entrySet()) { BinaryData data = dataStorage.fetchData(binaryDataEntry.getKey()); if (null != data) { attachmentInputs[counter] = new AttachmentInput(binaryDataEntry.getValue(), data.getInputStream()); counter++; } } if (counter != 0) client.getIssueClient().addAttachments(issue.getAttachmentsUri(), Arrays.copyOf(attachmentInputs, counter)); return getTicket(createdIssue.getKey(), details, client).orElse(null); } catch (Exception e) { LOGGER.error(e.getMessage(), e); throw new ReportPortalException(e.getMessage(), e); } }
From source file:com.github.mrstampy.gameboot.usersession.processor.UserMessageProcessorTest.java
private void metrics() throws Exception { Set<Entry<String, Timer>> timers = helper.getTimers(); timers.forEach(e -> display(e)); Set<Entry<String, Counter>> counters = helper.getCounters(); counters.forEach(e -> display(e)); }
From source file:org.nanoframework.orm.jdbc.record.AbstractJdbcRecord.java
protected SQLScriptBatch createInsertBatchStatement(final List<T> entitys) { Assert.notEmpty(entitys);/*www. j a va 2 s . c o m*/ final Set<String> attributeNames = fieldColumnMapper.keySet(); final StringBuilder sqlBuilder = new StringBuilder("insert into "); final StringBuilder valueBuilder = new StringBuilder(" values ( "); sqlBuilder.append(tableName); sqlBuilder.append(" ( "); attributeNames.forEach(attribute -> { sqlBuilder.append(fieldColumnMapper.get(attribute)); sqlBuilder.append(", "); valueBuilder.append("?, "); }); final List<List<Object>> batchValues = Lists.newArrayList(); entitys.stream().filter(entity -> entity != null).forEach(entity -> { final List<Object> values = Lists.newArrayList(); attributeNames.forEach(attributeName -> values.add(entity.attributeValue(attributeName))); batchValues.add(values); }); return SQLScriptBatch.create(createInsertSQL(sqlBuilder, valueBuilder), batchValues); }
From source file:HSqlManager.java
public static void commonClusterNewPhages(Connection connection, int bps) throws SQLException, IOException, ClassNotFoundException, IllegalAccessException, InstantiationException { Connection db = connection;/*from w w w .j av a 2 s . c o m*/ String base = new File("").getAbsolutePath(); db.setAutoCommit(false); PreparedStatement st = db.prepareStatement("UPDATE Primerdb.Primers SET CommonP = False," + " UniqueP = False" + " WHERE Cluster = ? and " + "Strain = ? and Sequence = ? and Bp =?"); Statement stat = db.createStatement(); if (newPhages != null) { List<String[]> phages = newPhages; phages.forEach(x -> { try { CSV.writeDataCSV(x[0], Fasta.process(x[0], bps), bps); CSV.writeDataCSV(x[0], Fasta.processPrimers(x[0], bps), bps); } catch (IOException e) { e.printStackTrace(); } Set<CharSequence> primers = new HashSet<>(); try { ResultSet rs = stat.executeQuery( "SELECT * FROM Primerdb.Primers WHERE" + " Sequence = '" + x[1] + "' and Clusters = '" + x[2] + "' and CommonP = True" + "and Bp =" + Integer.valueOf(bps)); while (rs.next()) { primers.add((CharSequence) rs.getString("Sequence")); } primers.removeAll(CSV.readCSV(base + "/PhageData/" + Integer.toString(bps) + x[0] + ".csv")); if (primers.size() != 0) { primers.forEach(y -> { try { //finish update st.setString(1, x[1]); st.setString(2, x[2]); st.setString(3, y.toString()); st.setInt(4, bps); st.addBatch(); } catch (SQLException e) { e.printStackTrace(); } }); st.executeBatch(); db.commit(); } } catch (SQLException e) { e.printStackTrace(); } }); } System.out.println("Common Updated"); st.close(); }
From source file:com.ethercamp.harmony.service.ContractsService.java
private String getValidatedAbi(String address, String contractName, CompilationResult result) { log.debug("getValidatedAbi address:{}, contractName: {}", address, contractName); final ContractMetadata metadata = result.getContracts().get(contractName); if (metadata == null) { throw validationError("Contract with name '%s' not found in uploaded sources.", contractName); }/*from w ww. j a v a 2s. c om*/ final String abi = metadata.getAbi(); final CallTransaction.Contract contract = new CallTransaction.Contract(abi); if (ArrayUtils.isEmpty(contract.functions)) { throw validationError("Contract with name '%s' not found in uploaded sources.", contractName); } final List<CallTransaction.FunctionType> funcTypes = asList(CallTransaction.FunctionType.function, CallTransaction.FunctionType.constructor); final Set<String> funcHashes = stream(contract.functions) .filter(function -> funcTypes.contains(function.type)).map(func -> { log.debug("Compiled funcHash " + toHexString(func.encodeSignature()) + " " + func.name); return toHexString(func.encodeSignature()); }).collect(toSet()); final String code = toHexString(ethereum.getRepository().getCode(Hex.decode(address))); final String asm = getAsm(code); if (isBlank(asm)) { throw validationError("Wrong account type: account with address '%s' hasn't any code.", address); } final Set<String> extractFuncHashes = extractFuncHashes(asm); extractFuncHashes.forEach(h -> log.debug("Extracted ASM funcHash " + h)); extractFuncHashes.forEach(funcHash -> { if (!funcHashes.contains(funcHash)) { throw validationError("Incorrect code version: function with hash '%s' not found.", funcHash); } }); log.debug("Contract is valid " + contractName); return abi; }
From source file:com.eventsourcing.index.NavigableIndexTest.java
@Test public void minMax() { Random random = new Random(); IndexedCollection<EntityHandle<Car>> collection = new ConcurrentIndexedCollection<>(); SortedKeyStatisticsIndex<HybridTimestamp, EntityHandle<Car>> MODEL_INDEX = onAttribute(Car.TIMESTAMP); WrappedSimpleIndex<Car, HybridTimestamp> i = new WrappedSimpleIndex<>( (Function<Car, HybridTimestamp>) StandardEntity::timestamp); i.setAttribute(Car.TIMESTAMP);//from w ww . ja v a 2s .c om if (MODEL_INDEX.supportsQuery(new Min<>(i), noQueryOptions()) && MODEL_INDEX.supportsQuery(new Max<>(i), noQueryOptions())) { MODEL_INDEX.clear(noQueryOptions()); collection.addIndex(MODEL_INDEX); QueryOptions queryOptions = queryOptions(); queryOptions.put(Iterable.class, collection); assertTrue(collection.retrieve(max(i), queryOptions).isEmpty()); assertTrue(collection.retrieve(min(i), queryOptions).isEmpty()); Set<EntityHandle<Car>> cars1 = CarFactory.createCollectionOfCars(100_000); cars1.forEach(car -> car.get().timestamp(new HybridTimestamp(random.nextInt(), 0))); collection.addAll(cars1); long t1 = System.nanoTime(); HybridTimestamp max1 = collection.retrieve(max(i), queryOptions).uniqueResult().get().timestamp(); HybridTimestamp min1 = collection.retrieve(min(i), queryOptions).uniqueResult().get().timestamp(); long t2 = System.nanoTime(); // make sure query scoping is respected assertNotEquals(collection.retrieve(scoped(equal(Car.MODEL, "Focus"), max(i)), queryOptions) .uniqueResult().get().uuid(), collection.retrieve(max(i), queryOptions).uniqueResult().get().uuid()); assertNotEquals(collection.retrieve(scoped(equal(Car.MODEL, "Focus"), min(i)), queryOptions) .uniqueResult().get().uuid(), collection.retrieve(min(i), queryOptions).uniqueResult().get().uuid()); assertFalse(cars1.stream().anyMatch(c -> c.get().timestamp().getSerializableComparable() .compareTo(max1.getSerializableComparable()) > 0)); assertFalse(cars1.stream().anyMatch(c -> c.get().timestamp().getSerializableComparable() .compareTo(min1.getSerializableComparable()) < 0)); Set<EntityHandle<Car>> cars2 = CarFactory.createCollectionOfCars(100_000); cars2.forEach(car -> car.get().timestamp(new HybridTimestamp(random.nextInt(), random.nextInt(2)))); collection.addAll(cars2); long t1_ = System.nanoTime(); HybridTimestamp max2 = collection.retrieve(max(i), queryOptions).uniqueResult().get().timestamp(); HybridTimestamp min2 = collection.retrieve(min(i), queryOptions).uniqueResult().get().timestamp(); long t2_ = System.nanoTime(); assertFalse(cars2.stream().anyMatch(c -> c.get().timestamp().getSerializableComparable() .compareTo(max2.getSerializableComparable()) > 0)); assertFalse(cars2.stream().anyMatch(c -> c.get().timestamp().getSerializableComparable() .compareTo(min2.getSerializableComparable()) < 0)); MODEL_INDEX.clear(noQueryOptions()); } }
From source file:fr.lepellerin.ecole.service.internal.CantineServiceImpl.java
@Override @Transactional(readOnly = true)/*from ww w . j a va 2 s.c o m*/ public List<ComboItemDto> getMoisOuvertCantine() throws TechnicalException { final Activite activite = this.getCantineActivite(); final List<Ouverture> ouvertures = this.ouvertureRepository.findByActivite(activite); final Set<YearMonth> moisActs = new HashSet<>(); moisActs.add(YearMonth.now()); ouvertures.sort((o1, o2) -> o1.getDate().compareTo(o2.getDate())); ouvertures.forEach(o -> { moisActs.add(YearMonth.from(((java.sql.Date) o.getDate()).toLocalDate())); }); final List<ComboItemDto> comboMois = new ArrayList<>(); moisActs.forEach(ma -> { final Integer id = Integer .valueOf(ma.format(DateTimeFormatter.ofPattern(GeDateUtils.DATE_FORMAT_YYYYMM))); final String libelle = ma.format(DateTimeFormatter.ofPattern(GeDateUtils.DATE_FORMAT_ANNEE_MOIS_FULL)); comboMois.add(new ComboItemDto(id, libelle)); }); comboMois.sort((c1, c2) -> c1.getId().compareTo(c2.getId())); return comboMois; }
From source file:com.act.lcms.db.analysis.AnalysisHelper.java
private static Map<Pair<String, Double>, MS1ScanForWellAndMassCharge> getMultipleMS1s(MS1 ms1, Set<Pair<String, Double>> metlinMasses, String ms1File) throws ParserConfigurationException, IOException, XMLStreamException { // In order for this to sit well with the data model we'll need to ensure the keys are all unique. Set<String> uniqueKeys = new HashSet<>(); metlinMasses.stream().map(Pair::getLeft).forEach(x -> { if (uniqueKeys.contains(x)) { throw new RuntimeException( String.format("Assumption violation: found duplicate metlin mass keys: %s", x)); }// w w w. j a va 2 s . c om uniqueKeys.add(x); }); Iterator<LCMSSpectrum> ms1Iterator = new LCMSNetCDFParser().getIterator(ms1File); Map<Double, List<XZ>> scanLists = new HashMap<>(metlinMasses.size()); // Initialize reading buffers for all of the target masses. metlinMasses.forEach(x -> { if (!scanLists.containsKey(x.getRight())) { scanLists.put(x.getRight(), new ArrayList<>()); } }); // De-dupe by mass in case we have exact duplicates, sort for well-ordered extractions. List<Double> sortedMasses = new ArrayList<>(scanLists.keySet()); /* Note: this operation is O(n * m) where n is the number of (mass, intensity) readings from the scan * and m is the number of mass targets specified. We might be able to get this down to O(m log n), but * we'll save that for once we get this working at all. */ while (ms1Iterator.hasNext()) { LCMSSpectrum timepoint = ms1Iterator.next(); // get all (mz, intensity) at this timepoint List<Pair<Double, Double>> intensities = timepoint.getIntensities(); // for this timepoint, extract each of the ion masses from the METLIN set for (Double ionMz : sortedMasses) { // this time point is valid to look at if its max intensity is around // the mass we care about. So lets first get the max peak location double intensityForMz = ms1.extractMZ(ionMz, intensities); // the above is Pair(mz_extracted, intensity), where mz_extracted = mz // we now add the timepoint val and the intensity to the output XZ intensityAtThisTime = new XZ(timepoint.getTimeVal(), intensityForMz); scanLists.get(ionMz).add(intensityAtThisTime); } } Map<Pair<String, Double>, MS1ScanForWellAndMassCharge> finalResults = new HashMap<>(metlinMasses.size()); /* Note: we might be able to squeeze more performance out of this by computing the * stats once per trace and then storing them. But the time to compute will probably * be dwarfed by the time to extract the data (assuming deduplication was done ahead * of time), so we'll leave it as is for now. */ for (Pair<String, Double> pair : metlinMasses) { String label = pair.getLeft(); Double mz = pair.getRight(); MS1ScanForWellAndMassCharge result = new MS1ScanForWellAndMassCharge(); result.setMetlinIons(Collections.singletonList(label)); result.getIonsToSpectra().put(label, scanLists.get(mz)); ms1.computeAndStorePeakProfile(result, label); // DO NOT use isGoodPeak here. We want positive and negative results alike. // There's only one ion in this scan, so just use its max. Double maxIntensity = result.getMaxIntensityForIon(label); result.setMaxYAxis(maxIntensity); // How/why is this not IonsToMax? Just set it as such for this. result.setIndividualMaxIntensities(Collections.singletonMap(label, maxIntensity)); finalResults.put(pair, result); } return finalResults; }
From source file:org.onosproject.icona.domainmgr.impl.DistributedDomainStore.java
private void clear(DomainId domainId) { Set<Pair<DomainId, DomainId>> domainPairs = new HashSet<>(); // find all domains connected with the one to be removed and remove related links domainIdLinkSetMap.keySet().forEach(endDomains -> { if (endDomains.getLeft().equals(domainId) || endDomains.getRight().equals(domainId)) { domainPairs.add(endDomains); }//from w w w .j av a2 s .co m }); domainPairs.forEach(pair -> domainIdLinkSetMap.remove(pair)); domainIdDeviceIdsMap.remove(domainId); domainIdHostIdsMap.remove(domainId); }