List of usage examples for java.util.function BiConsumer accept
void accept(T t, U u);
From source file:fi.hsl.parkandride.back.RequestLogDao.java
private <T> void insertBatch(Collection<T> batch, RelationalPath<?> expression, BiConsumer<SQLInsertClause, T> processor) { if (batch.isEmpty()) { return;/* w ww.j a v a 2 s.com*/ } final SQLInsertClause insert = queryFactory.insert(expression); batch.forEach(item -> { processor.accept(insert, item); insert.addBatch(); }); insert.execute(); }
From source file:fi.hsl.parkandride.back.RequestLogDao.java
private <T> void updateBatch(Collection<T> batch, RelationalPath<?> expression, BiConsumer<SQLUpdateClause, T> processor) { if (batch.isEmpty()) { return;/* ww w .j a v a 2s . c o m*/ } final SQLUpdateClause update = queryFactory.update(expression); batch.forEach(item -> { processor.accept(update, item); update.addBatch(); }); update.execute(); }
From source file:me.Wundero.Ray.variables.Variables.java
/** * Register a new variable wrapper/*ww w .j av a2 s.c o m*/ */ public boolean registerWrapper(String key, BiConsumer<Variable, Text> c, int useless) { return registerWrapper(key, (BiFunction<Variable, Text, Text>) (v, t) -> { c.accept(v, t); return t; }); }
From source file:de.qaware.chronix.importer.csv.FileImporter.java
/** * Reads the given file / folder and calls the bi consumer with the extracted points * * @param points// w ww. j ava 2 s. com * @param folder * @param databases * @return */ public Pair<Integer, Integer> importPoints(Map<Attributes, Pair<Instant, Instant>> points, File folder, BiConsumer<List<ImportPoint>, Attributes>... databases) { final AtomicInteger pointCounter = new AtomicInteger(0); final AtomicInteger tsCounter = new AtomicInteger(0); final File metricsFile = new File(METRICS_FILE_PATH); LOGGER.info("Writing imported metrics to {}", metricsFile); LOGGER.info("Import supports csv files as well as gz compressed csv files."); try { final FileWriter metricsFileWriter = new FileWriter(metricsFile); Collection<File> files = new ArrayList<>(); if (folder.isFile()) { files.add(folder); } else { files.addAll(FileUtils.listFiles(folder, new String[] { "gz", "csv" }, true)); } AtomicInteger counter = new AtomicInteger(0); files.parallelStream().forEach(file -> { SimpleDateFormat sdf = new SimpleDateFormat(dateFormat); NumberFormat nf = DecimalFormat.getInstance(numberLocal); InputStream inputStream = null; BufferedReader reader = null; try { inputStream = new FileInputStream(file); if (file.getName().endsWith("gz")) { inputStream = new GZIPInputStream(inputStream); } reader = new BufferedReader(new InputStreamReader(inputStream)); //Read the first line String headerLine = reader.readLine(); if (headerLine == null || headerLine.isEmpty()) { boolean deleted = deleteFile(file, inputStream, reader); LOGGER.debug("File is empty {}. File {} removed {}", file.getName(), deleted); return; } //Extract the attributes from the file name //E.g. first_second_third_attribute.csv String[] fileNameMetaData = file.getName().split("_"); String[] metrics = headerLine.split(csvDelimiter); Map<Integer, Attributes> attributesPerTimeSeries = new HashMap<>(metrics.length); for (int i = 1; i < metrics.length; i++) { String metric = metrics[i]; String metricOnlyAscii = Normalizer.normalize(metric, Normalizer.Form.NFD); metricOnlyAscii = metric.replaceAll("[^\\x00-\\x7F]", ""); Attributes attributes = new Attributes(metricOnlyAscii, fileNameMetaData); //Check if meta data is completely set if (isEmpty(attributes)) { boolean deleted = deleteFile(file, inputStream, reader); LOGGER.info("Attributes contains empty values {}. File {} deleted {}", attributes, file.getName(), deleted); continue; } if (attributes.getMetric().equals(".*")) { boolean deleted = deleteFile(file, inputStream, reader); LOGGER.info("Attributes metric{}. File {} deleted {}", attributes.getMetric(), file.getName(), deleted); continue; } attributesPerTimeSeries.put(i, attributes); tsCounter.incrementAndGet(); } Map<Integer, List<ImportPoint>> dataPoints = new HashMap<>(); String line; while ((line = reader.readLine()) != null) { String[] splits = line.split(csvDelimiter); String date = splits[0]; Instant dateObject; if (instantDate) { dateObject = Instant.parse(date); } else if (sdfDate) { dateObject = sdf.parse(date).toInstant(); } else { dateObject = Instant.ofEpochMilli(Long.valueOf(date)); } for (int column = 1; column < splits.length; column++) { String value = splits[column]; double numericValue = nf.parse(value).doubleValue(); ImportPoint point = new ImportPoint(dateObject, numericValue); if (!dataPoints.containsKey(column)) { dataPoints.put(column, new ArrayList<>()); } dataPoints.get(column).add(point); pointCounter.incrementAndGet(); } } dataPoints.values().forEach(Collections::sort); IOUtils.closeQuietly(reader); IOUtils.closeQuietly(inputStream); dataPoints.forEach((key, importPoints) -> { for (BiConsumer<List<ImportPoint>, Attributes> database : databases) { database.accept(importPoints, attributesPerTimeSeries.get(key)); } points.put(attributesPerTimeSeries.get(key), Pair.of(importPoints.get(0).getDate(), importPoints.get(importPoints.size() - 1).getDate())); //write the stats to the file Instant start = importPoints.get(0).getDate(); Instant end = importPoints.get(importPoints.size() - 1).getDate(); try { writeStatsLine(metricsFileWriter, attributesPerTimeSeries.get(key), start, end); } catch (IOException e) { LOGGER.error("Could not write stats line", e); } LOGGER.info("{} of {} time series imported", counter.incrementAndGet(), tsCounter.get()); }); } catch (Exception e) { LOGGER.info("Exception while reading points.", e); } finally { //close all streams IOUtils.closeQuietly(reader); IOUtils.closeQuietly(inputStream); } }); } catch (Exception e) { LOGGER.error("Exception occurred during reading points."); } return Pair.of(tsCounter.get(), pointCounter.get()); }
From source file:at.ac.tuwien.qse.sepm.service.impl.SynchronizationServiceImpl.java
@Override public void subscribeError(BiConsumer<Operation, ServiceException> callback) { repository.addListener(new AsyncPhotoRepository.AsyncListener() { @Override// w ww . j ava 2s. com public void onError(AsyncPhotoRepository repository, Operation operation, DAOException error) { ServiceException ex = new ServiceException(error); LOGGER.debug("receiving error notification with {}", operation); callback.accept(operation, ex); } }); }
From source file:com.heliosdecompiler.helios.controller.transformers.decompilers.DecompilerController.java
public void decompile(OpenedFile file, String path, BiConsumer<Boolean, String> consumer) { backgroundTaskHelper// www . j a v a2s . c o m .submit(new BackgroundTask(Message.TASK_DECOMPILE_FILE.format(path, getDisplayName()), true, () -> { try { String pre = preDecompile(file, path); if (pre != null) { consumer.accept(false, pre); } else { byte[] data = file.getContent(path); ClassData cd = ClassData.construct(data); TransformationResult<String> transformationResult = decompiler .decompile(Collections.singleton(cd), createSettings(), getClasspath(file)); Map<String, String> results = transformationResult.getTransformationData(); System.out.println("Results: " + results.keySet()); System.out .println("Looking for: " + StringEscapeUtils.escapeJava(cd.getInternalName())); if (results.containsKey(cd.getInternalName())) { consumer.accept(true, results.get(cd.getInternalName())); } else { StringBuilder output = new StringBuilder(); output.append("An error has occurred while decompiling this file.\r\n").append( "If you have not tried another decompiler, try that. Otherwise, you're out of luck.\r\n\r\n") .append("stdout:\r\n").append(transformationResult.getStdout()) .append("\r\nstderr:\r\n").append(transformationResult.getStderr()); consumer.accept(false, output.toString()); } } } catch (Throwable e) { StringWriter writer = new StringWriter(); e.printStackTrace(new PrintWriter(writer)); StringBuilder output = new StringBuilder(); output.append("An error has occurred while decompiling this file.\r\n").append( "If you have not tried another decompiler, try that. Otherwise, you're out of luck.\r\n\r\n") .append("Exception:\r\n").append(writer.toString()); consumer.accept(false, output.toString()); } }, () -> { consumer.accept(false, "Decompilation aborted"); })); }
From source file:reactivity.SseController.java
/** * Generates a random artifact./*ww w . j a va 2 s. co m*/ * * @return the timestamp associated to that artifact */ @PostMapping("/sse/artifacts") long artifact() { final Map<String, Object> categories = new HashMap<>(); final BiConsumer<String, String[]> populator = (key, values) -> { final String value = values[ThreadLocalRandom.current().nextInt(0, values.length)]; if (value != null) { categories.put(key, value); } }; populator.accept("assignee", new String[] { "ndamie", "gdrouet", "asanchez", "qlevaslot", "cazelart", "fclety", "hazarian", null }); populator.accept("category", new String[] { "bug", "feature", "question", null }); populator.accept("description", new String[] { "foo bar baz", "some description", "reactivity is awesome", null }); populator.accept("color", new String[] { "blue", "green", "red", "yellow", null }); populator.accept("priority", new String[] { "high", "low", "medium", null }); final Group group; if (Math.random() < 0.5) { group = new Group("personal", "gdrouet"); } else { group = new Group("organization", "org" + (char) ThreadLocalRandom.current().nextInt(65, 70)); } final Artifact a = new Artifact("default", group, categories); final Observable<JsonDocument> o = repository.add(a); o.subscribe(d -> replayProcessor.onNext(sse(a))); o.subscribe(d -> timeseries(a)); return a.getTimestamp(); }
From source file:org.trustedanalytics.cloud.cc.CcClientTest.java
private void verifyAddingUser(BiConsumer<UUID, UUID> consumer, String apiUrl1, String apiUrl2) { UUID guid = UUID.randomUUID(); consumer.accept(guid, guid); verify(template).put(eq(apiUrl1), eq(null), anyMapOf(String.class, Object.class)); verify(template).put(eq(apiUrl2), eq(null), anyMapOf(String.class, Object.class)); }
From source file:de.ks.idnadrev.information.chart.ChartDataEditor.java
private EventHandler<KeyEvent> getInputKeyHandler(int rowNum, int column, BiFunction<Integer, Integer, TextField> nextTextField, BiConsumer<Integer, Integer> clipBoardHandler) { return e -> { KeyCode code = e.getCode();//from w w w. j av a 2 s .c o m if (e.isControlDown() && code == KeyCode.V) { clipBoardHandler.accept(rowNum, column); e.consume(); } boolean selectNext = false; if (e.getCode() == KeyCode.ENTER && !e.isControlDown()) { selectNext = true; } if (selectNext) { int next = rowNum + 1; TextField textField = nextTextField.apply(next, column); if (textField != null) { textField.requestFocus(); } e.consume(); } }; }
From source file:com.github.horrorho.inflatabledonkey.cloud.AssetDownloader.java
void get(HttpClient httpClient, Function<ChunkServer.ChunkReference, Optional<byte[]>> getKeyEncryptionKey, List<Asset> assetList, Voodoo voodoo, BiConsumer<Asset, List<Chunk>> consumer) { if (assetList.isEmpty()) { logger.error("-- get() - empty asset list"); return;//w w w.ja v a 2 s . c om } Asset primaryAsset = assetList.get(0); Map<ChunkServer.ChunkReference, Chunk> chunkData = fetchChunkData(httpClient, getKeyEncryptionKey, primaryAsset, voodoo); assembleAssetChunkList(chunkData, primaryAsset, voodoo) .ifPresent(fileChunkList -> assetList.forEach(a -> consumer.accept(a, fileChunkList))); }