List of usage examples for java.util.concurrent TimeUnit HOURS
TimeUnit HOURS
To view the source code for java.util.concurrent TimeUnit HOURS.
Click Source Link
From source file:org.springframework.session.data.gemfire.AbstractGemFireOperationsSessionRepositoryTest.java
@Test public void isExpiredIsFalseWhenSessionIsActive() { final int expectedMaxInactiveIntervalInSeconds = (int) TimeUnit.HOURS.toSeconds(2); AbstractGemFireOperationsSessionRepository.GemFireSession session = AbstractGemFireOperationsSessionRepository.GemFireSession .create(expectedMaxInactiveIntervalInSeconds); assertThat(session).isNotNull();//from w w w.j ava 2s . c o m assertThat(session.getMaxInactiveIntervalInSeconds()).isEqualTo(expectedMaxInactiveIntervalInSeconds); final long now = System.currentTimeMillis(); session.setLastAccessedTime(now); assertThat(session.getLastAccessedTime()).isEqualTo(now); assertThat(session.isExpired()).isFalse(); }
From source file:org.ow2.proactive.scheduler.common.job.factories.Job2XMLTransformer.java
private static String formatDate(long millis) { long hours = TimeUnit.MILLISECONDS.toHours(millis); millis -= TimeUnit.HOURS.toMillis(hours); long minutes = TimeUnit.MILLISECONDS.toMinutes(millis); millis -= TimeUnit.MINUTES.toMillis(minutes); long seconds = TimeUnit.MILLISECONDS.toSeconds(millis); String formatted = String.format("%02d:%02d:%02d", hours, minutes, seconds); // replace heading 00: as it's not accepted by the schema validation return (formatted.replaceFirst("^(00:)+", "")); }
From source file:org.springframework.session.data.gemfire.AbstractGemFireOperationsSessionRepositoryTest.java
@Test public void isExpiredIsTrueWhenSessionIsInactive() { final int expectedMaxInactiveIntervalInSeconds = 60; AbstractGemFireOperationsSessionRepository.GemFireSession session = AbstractGemFireOperationsSessionRepository.GemFireSession .create(expectedMaxInactiveIntervalInSeconds); assertThat(session).isNotNull();// w w w . jav a2 s .c o m assertThat(session.getMaxInactiveIntervalInSeconds()).isEqualTo(expectedMaxInactiveIntervalInSeconds); final long twoHoursAgo = (System.currentTimeMillis() - TimeUnit.HOURS.toMillis(2)); session.setLastAccessedTime(twoHoursAgo); assertThat(session.getLastAccessedTime()).isEqualTo(twoHoursAgo); assertThat(session.isExpired()).isTrue(); }
From source file:org.dllearner.algorithms.qtl.experiments.QTLEvaluation.java
public void run(int maxNrOfProcessedQueries, int maxTreeDepth, int[] exampleInterval, double[] noiseInterval, HeuristicType[] measures) throws Exception { this.maxTreeDepth = maxTreeDepth; queryTreeFactory.setMaxDepth(maxTreeDepth); if (exampleInterval != null) { nrOfExamplesIntervals = exampleInterval; }/*from ww w . j a v a 2 s . c om*/ if (noiseInterval != null) { this.noiseIntervals = noiseInterval; } if (measures != null) { this.measures = measures; } logger.info("Started QTL evaluation..."); long t1 = System.currentTimeMillis(); List<String> queries = dataset.getSparqlQueries().values().stream().map(q -> q.toString()) .collect(Collectors.toList()); logger.info("#loaded queries: " + queries.size()); // filter for debugging purposes queries = queries.stream().filter(q -> tokens.stream().noneMatch(t -> !q.contains(t))) .collect(Collectors.toList()); if (maxNrOfProcessedQueries == -1) { maxNrOfProcessedQueries = queries.size(); } // queries = filter(queries, (int) Math.ceil((double) maxNrOfProcessedQueries / maxTreeDepth)); // queries = queries.subList(0, Math.min(queries.size(), maxNrOfProcessedQueries)); logger.info("#queries to process: " + queries.size()); // generate examples for each query logger.info("precomputing pos. and neg. examples..."); final Map<String, ExampleCandidates> query2Examples = new HashMap<>(); for (String query : queries) {//if(!(query.contains("Borough_(New_York_City)")))continue; query2Examples.put(query, generateExamples(query)); } logger.info("precomputing pos. and neg. examples finished."); // check for queries that do not return any result (should not happen, but we never know) Set<String> emptyQueries = query2Examples.entrySet().stream() .filter(e -> e.getValue().correctPosExampleCandidates.isEmpty()).map(e -> e.getKey()) .collect(Collectors.toSet()); logger.info("got {} empty queries.", emptyQueries.size()); queries.removeAll(emptyQueries); // min. pos examples Set<String> lowNrOfExamplesQueries = query2Examples.entrySet().stream() .filter(e -> e.getValue().correctPosExampleCandidates.size() < 2).map(e -> e.getKey()) .collect(Collectors.toSet()); logger.info("got {} queries with < 2 pos. examples.", emptyQueries.size()); queries.removeAll(lowNrOfExamplesQueries); final int totalNrOfQTLRuns = heuristics.length * this.measures.length * nrOfExamplesIntervals.length * noiseIntervals.length * queries.size(); logger.info("#QTL runs: " + totalNrOfQTLRuns); final AtomicInteger currentNrOfFinishedRuns = new AtomicInteger(0); // loop over heuristics for (final QueryTreeHeuristic heuristic : heuristics) { final String heuristicName = heuristic.getClass().getAnnotation(ComponentAnn.class).shortName(); // loop over heuristics measures for (HeuristicType measure : this.measures) { final String measureName = measure.toString(); heuristic.setHeuristicType(measure); double[][] data = new double[nrOfExamplesIntervals.length][noiseIntervals.length]; // loop over number of positive examples for (int i = 0; i < nrOfExamplesIntervals.length; i++) { final int nrOfExamples = nrOfExamplesIntervals[i]; // loop over noise value for (int j = 0; j < noiseIntervals.length; j++) { final double noise = noiseIntervals[j]; // check if not already processed File logFile = new File(benchmarkDirectory, "qtl2-" + nrOfExamples + "-" + noise + "-" + heuristicName + "-" + measureName + ".log"); File statsFile = new File(benchmarkDirectory, "qtl2-" + nrOfExamples + "-" + noise + "-" + heuristicName + "-" + measureName + ".stats"); if (!override && logFile.exists() && statsFile.exists()) { logger.info( "Eval config already processed. For re-running please remove corresponding output files."); continue; } FileAppender appender = null; try { appender = new FileAppender(new SimpleLayout(), logFile.getPath(), false); Logger.getRootLogger().addAppender(appender); } catch (IOException e) { e.printStackTrace(); } logger.info("#examples: " + nrOfExamples + " noise: " + noise); final DescriptiveStatistics nrOfReturnedSolutionsStats = new SynchronizedDescriptiveStatistics(); final DescriptiveStatistics baselinePrecisionStats = new SynchronizedDescriptiveStatistics(); final DescriptiveStatistics baselineRecallStats = new SynchronizedDescriptiveStatistics(); final DescriptiveStatistics baselineFMeasureStats = new SynchronizedDescriptiveStatistics(); final DescriptiveStatistics baselinePredAccStats = new SynchronizedDescriptiveStatistics(); final DescriptiveStatistics baselineMathCorrStats = new SynchronizedDescriptiveStatistics(); final DescriptiveStatistics bestReturnedSolutionPrecisionStats = new SynchronizedDescriptiveStatistics(); final DescriptiveStatistics bestReturnedSolutionRecallStats = new SynchronizedDescriptiveStatistics(); final DescriptiveStatistics bestReturnedSolutionFMeasureStats = new SynchronizedDescriptiveStatistics(); final DescriptiveStatistics bestReturnedSolutionPredAccStats = new SynchronizedDescriptiveStatistics(); final DescriptiveStatistics bestReturnedSolutionMathCorrStats = new SynchronizedDescriptiveStatistics(); final DescriptiveStatistics bestReturnedSolutionRuntimeStats = new SynchronizedDescriptiveStatistics(); final DescriptiveStatistics bestSolutionPrecisionStats = new SynchronizedDescriptiveStatistics(); final DescriptiveStatistics bestSolutionRecallStats = new SynchronizedDescriptiveStatistics(); final DescriptiveStatistics bestSolutionFMeasureStats = new SynchronizedDescriptiveStatistics(); final DescriptiveStatistics bestSolutionPredAccStats = new SynchronizedDescriptiveStatistics(); final DescriptiveStatistics bestSolutionMathCorrStats = new SynchronizedDescriptiveStatistics(); final DescriptiveStatistics bestSolutionPositionStats = new SynchronizedDescriptiveStatistics(); MonitorFactory.getTimeMonitor(TimeMonitors.CBD_RETRIEVAL.name()).reset(); MonitorFactory.getTimeMonitor(TimeMonitors.TREE_GENERATION.name()).reset(); ExecutorService tp = Executors.newFixedThreadPool(nrOfThreads); // indicates if the execution for some of the queries failed final AtomicBoolean failed = new AtomicBoolean(false); // loop over SPARQL queries for (final String sparqlQuery : queries) { tp.submit(() -> { logger.info("##############################################################"); logger.info("Processing query\n" + sparqlQuery); try { ExamplesWrapper examples = query2Examples.get(sparqlQuery).get(nrOfExamples, nrOfExamples, noise); logger.info( "pos. examples:\n" + Joiner.on("\n").join(examples.correctPosExamples)); logger.info( "neg. examples:\n" + Joiner.on("\n").join(examples.correctNegExamples)); // write examples to disk File dir = new File(benchmarkDirectory, "data/" + hash(sparqlQuery)); dir.mkdirs(); Files.write(Joiner.on("\n").join(examples.correctPosExamples), new File(dir, "examples_" + nrOfExamples + "_" + noise + ".tp"), Charsets.UTF_8); Files.write(Joiner.on("\n").join(examples.correctNegExamples), new File(dir, "examples_" + nrOfExamples + "_" + noise + ".tn"), Charsets.UTF_8); Files.write(Joiner.on("\n").join(examples.falsePosExamples), new File(dir, "examples_" + nrOfExamples + "_" + noise + ".fp"), Charsets.UTF_8); // compute baseline logger.info("Computing baseline..."); RDFResourceTree baselineSolution = applyBaseLine(examples, Baseline.MOST_INFORMATIVE_EDGE_IN_EXAMPLES); logger.info("Baseline solution:\n" + owlRenderer .render(QueryTreeUtils.toOWLClassExpression(baselineSolution))); logger.info("Evaluating baseline..."); Score baselineScore = computeScore(sparqlQuery, baselineSolution, noise); logger.info("Baseline score:\n" + baselineScore); String baseLineQuery = QueryTreeUtils.toSPARQLQueryString(baselineSolution, dataset.getBaseIRI(), dataset.getPrefixMapping()); baselinePrecisionStats.addValue(baselineScore.precision); baselineRecallStats.addValue(baselineScore.recall); baselineFMeasureStats.addValue(baselineScore.fmeasure); baselinePredAccStats.addValue(baselineScore.predAcc); baselineMathCorrStats.addValue(baselineScore.mathCorr); // run QTL PosNegLPStandard lp = new PosNegLPStandard(); lp.setPositiveExamples(examples.posExamplesMapping.keySet()); lp.setNegativeExamples(examples.negExamplesMapping.keySet()); QTL2Disjunctive la = new QTL2Disjunctive(lp, qef); la.setRenderer(new org.dllearner.utilities.owl.DLSyntaxObjectRenderer()); la.setReasoner(dataset.getReasoner()); la.setEntailment(Entailment.SIMPLE); la.setTreeFactory(queryTreeFactory); la.setPositiveExampleTrees(examples.posExamplesMapping); la.setNegativeExampleTrees(examples.negExamplesMapping); la.setNoise(noise); la.setHeuristic(heuristic); la.setMaxExecutionTimeInSeconds(maxExecutionTimeInSeconds); la.setMaxTreeComputationTimeInSeconds(maxExecutionTimeInSeconds); la.init(); la.start(); List<EvaluatedRDFResourceTree> solutions = new ArrayList<>(la.getSolutions()); // List<EvaluatedRDFResourceTree> solutions = generateSolutions(examples, noise, heuristic); nrOfReturnedSolutionsStats.addValue(solutions.size()); // the best returned solution by QTL EvaluatedRDFResourceTree bestSolution = solutions.get(0); logger.info("Got " + solutions.size() + " query trees."); logger.info("Best computed solution:\n" + render(bestSolution.asEvaluatedDescription())); logger.info("QTL Score:\n" + bestSolution.getTreeScore()); long runtimeBestSolution = la.getTimeBestSolutionFound(); bestReturnedSolutionRuntimeStats.addValue(runtimeBestSolution); // convert to SPARQL query RDFResourceTree tree = bestSolution.getTree(); // filter.filter(tree); String learnedSPARQLQuery = QueryTreeUtils.toSPARQLQueryString(tree, dataset.getBaseIRI(), dataset.getPrefixMapping()); // compute score Score score = computeScore(sparqlQuery, tree, noise); bestReturnedSolutionPrecisionStats.addValue(score.precision); bestReturnedSolutionRecallStats.addValue(score.recall); bestReturnedSolutionFMeasureStats.addValue(score.fmeasure); bestReturnedSolutionPredAccStats.addValue(score.predAcc); bestReturnedSolutionMathCorrStats.addValue(score.mathCorr); logger.info(score.toString()); // find the extensionally best matching tree in the list Pair<EvaluatedRDFResourceTree, Score> bestMatchingTreeWithScore = findBestMatchingTreeFast( solutions, sparqlQuery, noise, examples); EvaluatedRDFResourceTree bestMatchingTree = bestMatchingTreeWithScore .getFirst(); Score bestMatchingScore = bestMatchingTreeWithScore.getSecond(); // position of best tree in list of solutions int positionBestScore = solutions.indexOf(bestMatchingTree); bestSolutionPositionStats.addValue(positionBestScore); Score bestScore = score; if (positionBestScore > 0) { logger.info("Position of best covering tree in list: " + positionBestScore); logger.info("Best covering solution:\n" + render(bestMatchingTree.asEvaluatedDescription())); logger.info("Tree score: " + bestMatchingTree.getTreeScore()); bestScore = bestMatchingScore; logger.info(bestMatchingScore.toString()); } else { logger.info("Best returned solution was also the best covering solution."); } bestSolutionRecallStats.addValue(bestScore.recall); bestSolutionPrecisionStats.addValue(bestScore.precision); bestSolutionFMeasureStats.addValue(bestScore.fmeasure); bestSolutionPredAccStats.addValue(bestScore.predAcc); bestSolutionMathCorrStats.addValue(bestScore.mathCorr); for (RDFResourceTree negTree : examples.negExamplesMapping.values()) { if (QueryTreeUtils.isSubsumedBy(negTree, bestMatchingTree.getTree())) { Files.append(sparqlQuery + "\n", new File("/tmp/negCovered.txt"), Charsets.UTF_8); break; } } String bestQuery = QueryFactory.create(QueryTreeUtils.toSPARQLQueryString( filter.apply(bestMatchingTree.getTree()), dataset.getBaseIRI(), dataset.getPrefixMapping())).toString(); if (write2DB) { write2DB(sparqlQuery, nrOfExamples, examples, noise, baseLineQuery, baselineScore, heuristicName, measureName, QueryFactory.create(learnedSPARQLQuery).toString(), score, runtimeBestSolution, bestQuery, positionBestScore, bestScore); } } catch (Exception e) { failed.set(true); logger.error("Error occured for query\n" + sparqlQuery, e); try { StringWriter sw = new StringWriter(); PrintWriter pw = new PrintWriter(sw); e.printStackTrace(pw); Files.append(sparqlQuery + "\n" + sw.toString(), new File(benchmarkDirectory, "failed-" + nrOfExamples + "-" + noise + "-" + heuristicName + "-" + measureName + ".txt"), Charsets.UTF_8); } catch (IOException e1) { e1.printStackTrace(); } } finally { int cnt = currentNrOfFinishedRuns.incrementAndGet(); logger.info("***********Evaluation Progress:" + NumberFormat.getPercentInstance() .format((double) cnt / totalNrOfQTLRuns) + "(" + cnt + "/" + totalNrOfQTLRuns + ")" + "***********"); } }); } tp.shutdown(); tp.awaitTermination(12, TimeUnit.HOURS); Logger.getRootLogger().removeAppender(appender); if (!failed.get()) { String result = ""; result += "\nBaseline Precision:\n" + baselinePrecisionStats; result += "\nBaseline Recall:\n" + baselineRecallStats; result += "\nBaseline F-measure:\n" + baselineFMeasureStats; result += "\nBaseline PredAcc:\n" + baselinePredAccStats; result += "\nBaseline MathCorr:\n" + baselineMathCorrStats; result += "#Returned solutions:\n" + nrOfReturnedSolutionsStats; result += "\nOverall Precision:\n" + bestReturnedSolutionPrecisionStats; result += "\nOverall Recall:\n" + bestReturnedSolutionRecallStats; result += "\nOverall F-measure:\n" + bestReturnedSolutionFMeasureStats; result += "\nOverall PredAcc:\n" + bestReturnedSolutionPredAccStats; result += "\nOverall MathCorr:\n" + bestReturnedSolutionMathCorrStats; result += "\nTime until best returned solution found:\n" + bestReturnedSolutionRuntimeStats; result += "\nPositions of best solution:\n" + Arrays.toString(bestSolutionPositionStats.getValues()); result += "\nPosition of best solution stats:\n" + bestSolutionPositionStats; result += "\nOverall Precision of best solution:\n" + bestSolutionPrecisionStats; result += "\nOverall Recall of best solution:\n" + bestSolutionRecallStats; result += "\nOverall F-measure of best solution:\n" + bestSolutionFMeasureStats; result += "\nCBD generation time(total):\t" + MonitorFactory.getTimeMonitor(TimeMonitors.CBD_RETRIEVAL.name()).getTotal() + "\n"; result += "CBD generation time(avg):\t" + MonitorFactory.getTimeMonitor(TimeMonitors.CBD_RETRIEVAL.name()).getAvg() + "\n"; result += "Tree generation time(total):\t" + MonitorFactory.getTimeMonitor(TimeMonitors.TREE_GENERATION.name()).getTotal() + "\n"; result += "Tree generation time(avg):\t" + MonitorFactory.getTimeMonitor(TimeMonitors.TREE_GENERATION.name()).getAvg() + "\n"; result += "Tree size(avg):\t" + treeSizeStats.getMean() + "\n"; logger.info(result); try { Files.write(result, statsFile, Charsets.UTF_8); } catch (IOException e) { e.printStackTrace(); } data[i][j] = bestReturnedSolutionFMeasureStats.getMean(); if (write2DB) { write2DB(heuristicName, measureName, nrOfExamples, noise, bestReturnedSolutionFMeasureStats.getMean(), bestReturnedSolutionPrecisionStats.getMean(), bestReturnedSolutionRecallStats.getMean(), bestReturnedSolutionPredAccStats.getMean(), bestReturnedSolutionMathCorrStats.getMean(), bestSolutionPositionStats.getMean(), bestSolutionFMeasureStats.getMean(), bestSolutionPrecisionStats.getMean(), bestSolutionRecallStats.getMean(), bestSolutionPredAccStats.getMean(), bestSolutionMathCorrStats.getMean(), baselineFMeasureStats.getMean(), baselinePrecisionStats.getMean(), baselineRecallStats.getMean(), baselinePredAccStats.getMean(), baselineMathCorrStats.getMean(), bestReturnedSolutionRuntimeStats.getMean()); } } } } String content = "###"; String separator = "\t"; for (double noiseInterval1 : noiseIntervals) { content += separator + noiseInterval1; } content += "\n"; for (int i = 0; i < nrOfExamplesIntervals.length; i++) { content += nrOfExamplesIntervals[i]; for (int j = 0; j < noiseIntervals.length; j++) { content += separator + data[i][j]; } content += "\n"; } File examplesVsNoise = new File(benchmarkDirectory, "examplesVsNoise-" + heuristicName + "-" + measureName + ".tsv"); try { Files.write(content, examplesVsNoise, Charsets.UTF_8); } catch (IOException e) { logger.error("failed to write stats to file", e); } } } if (write2DB) { conn.close(); } if (useEmailNotification) { sendFinishedMail(); } long t2 = System.currentTimeMillis(); long duration = t2 - t1; logger.info("QTL evaluation finished in " + DurationFormatUtils.formatDurationHMS(duration) + "ms."); }
From source file:org.dllearner.algorithms.qtl.experiments.PRConvergenceExperiment.java
public void run(int maxNrOfProcessedQueries, int maxTreeDepth, int[] exampleInterval, double[] noiseInterval, HeuristicType[] measures) throws Exception { this.maxTreeDepth = maxTreeDepth; queryTreeFactory.setMaxDepth(maxTreeDepth); if (exampleInterval != null) { nrOfExamplesIntervals = exampleInterval; }/*from ww w . j av a2 s .c o m*/ if (noiseInterval != null) { this.noiseIntervals = noiseInterval; } if (measures != null) { this.measures = measures; } boolean noiseEnabled = noiseIntervals.length > 1 || noiseInterval[0] > 0; boolean posOnly = noiseEnabled ? false : true; logger.info("Started QTL evaluation..."); long t1 = System.currentTimeMillis(); List<String> queries = dataset.getSparqlQueries().values().stream().map(q -> q.toString()) .collect(Collectors.toList()); logger.info("#loaded queries: " + queries.size()); // filter for debugging purposes queries = queries.stream().filter(q -> queriesToProcessTokens.stream().noneMatch(t -> !q.contains(t))) .collect(Collectors.toList()); queries = queries.stream().filter(q -> queriesToOmitTokens.stream().noneMatch(t -> q.contains(t))) .collect(Collectors.toList()); if (maxNrOfProcessedQueries == -1) { maxNrOfProcessedQueries = queries.size(); } // queries = filter(queries, (int) Math.ceil((double) maxNrOfProcessedQueries / maxTreeDepth)); // queries = queries.subList(0, Math.min(queries.size(), maxNrOfProcessedQueries)); logger.info("#queries to process: " + queries.size()); // generate examples for each query logger.info("precomputing pos. and neg. examples..."); for (String query : queries) {//if(!(query.contains("Borough_(New_York_City)")))continue; query2Examples.put(query, generateExamples(query, posOnly, noiseEnabled)); } logger.info("precomputing pos. and neg. examples finished."); // check for queries that do not return any result (should not happen, but we never know) Set<String> emptyQueries = query2Examples.entrySet().stream() .filter(e -> e.getValue().correctPosExampleCandidates.isEmpty()).map(e -> e.getKey()) .collect(Collectors.toSet()); logger.info("got {} empty queries.", emptyQueries.size()); queries.removeAll(emptyQueries); // min. pos examples int min = 3; Set<String> lowNrOfExamplesQueries = query2Examples.entrySet().stream() .filter(e -> e.getValue().correctPosExampleCandidates.size() < min).map(e -> e.getKey()) .collect(Collectors.toSet()); logger.info("got {} queries with < {} pos. examples.", emptyQueries.size(), min); queries.removeAll(lowNrOfExamplesQueries); queries = queries.subList(0, Math.min(80, queries.size())); final int totalNrOfQTLRuns = heuristics.length * this.measures.length * nrOfExamplesIntervals.length * noiseIntervals.length * queries.size(); logger.info("#QTL runs: " + totalNrOfQTLRuns); final AtomicInteger currentNrOfFinishedRuns = new AtomicInteger(0); // loop over heuristics for (final QueryTreeHeuristic heuristic : heuristics) { final String heuristicName = heuristic.getClass().getAnnotation(ComponentAnn.class).shortName(); // loop over heuristics measures for (HeuristicType measure : this.measures) { final String measureName = measure.toString(); heuristic.setHeuristicType(measure); double[][] data = new double[nrOfExamplesIntervals.length][noiseIntervals.length]; // loop over number of positive examples for (int i = 0; i < nrOfExamplesIntervals.length; i++) { final int nrOfExamples = nrOfExamplesIntervals[i]; // loop over noise value for (int j = 0; j < noiseIntervals.length; j++) { final double noise = noiseIntervals[j]; // check if not already processed File logFile = new File(benchmarkDirectory, "qtl2-" + nrOfExamples + "-" + noise + "-" + heuristicName + "-" + measureName + ".log"); File statsFile = new File(benchmarkDirectory, "qtl2-" + nrOfExamples + "-" + noise + "-" + heuristicName + "-" + measureName + ".stats"); if (!override && logFile.exists() && statsFile.exists()) { logger.info( "Eval config already processed. For re-running please remove corresponding output files."); continue; } FileAppender appender = null; try { appender = new FileAppender(new SimpleLayout(), logFile.getPath(), false); Logger.getRootLogger().addAppender(appender); } catch (IOException e) { e.printStackTrace(); } logger.info("#examples: " + nrOfExamples + " noise: " + noise); final DescriptiveStatistics nrOfReturnedSolutionsStats = new SynchronizedDescriptiveStatistics(); final DescriptiveStatistics baselinePrecisionStats = new SynchronizedDescriptiveStatistics(); final DescriptiveStatistics baselineRecallStats = new SynchronizedDescriptiveStatistics(); final DescriptiveStatistics baselineFMeasureStats = new SynchronizedDescriptiveStatistics(); final DescriptiveStatistics baselinePredAccStats = new SynchronizedDescriptiveStatistics(); final DescriptiveStatistics baselineMathCorrStats = new SynchronizedDescriptiveStatistics(); final DescriptiveStatistics bestReturnedSolutionPrecisionStats = new SynchronizedDescriptiveStatistics(); final DescriptiveStatistics bestReturnedSolutionRecallStats = new SynchronizedDescriptiveStatistics(); final DescriptiveStatistics bestReturnedSolutionFMeasureStats = new SynchronizedDescriptiveStatistics(); final DescriptiveStatistics bestReturnedSolutionPredAccStats = new SynchronizedDescriptiveStatistics(); final DescriptiveStatistics bestReturnedSolutionMathCorrStats = new SynchronizedDescriptiveStatistics(); final DescriptiveStatistics bestReturnedSolutionRuntimeStats = new SynchronizedDescriptiveStatistics(); final DescriptiveStatistics bestSolutionPrecisionStats = new SynchronizedDescriptiveStatistics(); final DescriptiveStatistics bestSolutionRecallStats = new SynchronizedDescriptiveStatistics(); final DescriptiveStatistics bestSolutionFMeasureStats = new SynchronizedDescriptiveStatistics(); final DescriptiveStatistics bestSolutionPredAccStats = new SynchronizedDescriptiveStatistics(); final DescriptiveStatistics bestSolutionMathCorrStats = new SynchronizedDescriptiveStatistics(); final DescriptiveStatistics bestSolutionPositionStats = new SynchronizedDescriptiveStatistics(); MonitorFactory.getTimeMonitor(TimeMonitors.CBD_RETRIEVAL.name()).reset(); MonitorFactory.getTimeMonitor(TimeMonitors.TREE_GENERATION.name()).reset(); ExecutorService tp = Executors.newFixedThreadPool(nrOfThreads); // indicates if the execution for some of the queries failed final AtomicBoolean failed = new AtomicBoolean(false); Set<String> queriesToProcess = new TreeSet<>(queries); queriesToProcess.retainAll(query2Examples.entrySet().stream() .filter(e -> e.getValue().correctPosExampleCandidates.size() >= nrOfExamples) .map(e -> e.getKey()).collect(Collectors.toSet())); // loop over SPARQL queries for (final String sparqlQuery : queriesToProcess) { CBDStructureTree cbdStructure = cbdStructureTree != null ? cbdStructureTree : QueryUtils.getOptimalCBDStructure(QueryFactory.create(sparqlQuery)); tp.submit(() -> { logger.info("CBD tree:" + cbdStructure.toStringVerbose()); // update max tree depth this.maxTreeDepth = QueryTreeUtils.getDepth(cbdStructure); logger.info("##############################################################"); logger.info("Processing query\n" + sparqlQuery); // we repeat it n times with different permutations of examples int nrOfPermutations = 1; if (nrOfExamples >= query2Examples.get(sparqlQuery).correctPosExampleCandidates .size()) { nrOfPermutations = 1; } for (int perm = 1; perm <= nrOfPermutations; perm++) { logger.info("Run {}/{}", perm, nrOfPermutations); try { ExamplesWrapper examples = getExamples(sparqlQuery, nrOfExamples, nrOfExamples, noise, cbdStructure); logger.info("pos. examples:\n" + Joiner.on("\n").join(examples.correctPosExamples)); logger.info("neg. examples:\n" + Joiner.on("\n").join(examples.correctNegExamples)); // write examples to disk File dir = new File(benchmarkDirectory, "data/" + hash(sparqlQuery)); dir.mkdirs(); Files.write(Joiner.on("\n").join(examples.correctPosExamples), new File(dir, "examples" + perm + "_" + nrOfExamples + "_" + noise + ".tp"), Charsets.UTF_8); Files.write(Joiner.on("\n").join(examples.correctNegExamples), new File(dir, "examples" + perm + "_" + nrOfExamples + "_" + noise + ".tn"), Charsets.UTF_8); Files.write(Joiner.on("\n").join(examples.falsePosExamples), new File(dir, "examples" + perm + "_" + nrOfExamples + "_" + noise + ".fp"), Charsets.UTF_8); // compute baseline RDFResourceTree baselineSolution = applyBaseLine(examples, Baseline.MOST_INFORMATIVE_EDGE_IN_EXAMPLES); logger.info("Evaluating baseline..."); Score baselineScore = computeScore(sparqlQuery, baselineSolution, noise); logger.info("Baseline score:\n" + baselineScore); String baseLineQuery = QueryTreeUtils.toSPARQLQueryString(baselineSolution, dataset.getBaseIRI(), dataset.getPrefixMapping()); baselinePrecisionStats.addValue(baselineScore.precision); baselineRecallStats.addValue(baselineScore.recall); baselineFMeasureStats.addValue(baselineScore.fmeasure); baselinePredAccStats.addValue(baselineScore.predAcc); baselineMathCorrStats.addValue(baselineScore.mathCorr); // run QTL PosNegLPStandard lp = new PosNegLPStandard(); lp.setPositiveExamples(examples.posExamplesMapping.keySet()); lp.setNegativeExamples(examples.negExamplesMapping.keySet()); // QTL2Disjunctive la = new QTL2Disjunctive(lp, qef); QTL2DisjunctiveMultiThreaded la = new QTL2DisjunctiveMultiThreaded(lp, qef); la.setRenderer(new org.dllearner.utilities.owl.DLSyntaxObjectRenderer()); la.setReasoner(dataset.getReasoner()); la.setEntailment(Entailment.SIMPLE); la.setTreeFactory(queryTreeFactory); la.setPositiveExampleTrees(examples.posExamplesMapping); la.setNegativeExampleTrees(examples.negExamplesMapping); la.setNoise(noise); la.setHeuristic(heuristic); la.setMaxExecutionTimeInSeconds(maxExecutionTimeInSeconds); la.setMaxTreeComputationTimeInSeconds(maxExecutionTimeInSeconds); la.init(); la.start(); List<EvaluatedRDFResourceTree> solutions = new ArrayList<>( la.getSolutions()); // List<EvaluatedRDFResourceTree> solutions = generateSolutions(examples, noise, heuristic); nrOfReturnedSolutionsStats.addValue(solutions.size()); // the best returned solution by QTL EvaluatedRDFResourceTree bestSolution = solutions.get(0); logger.info("Got " + solutions.size() + " query trees."); // logger.info("Best computed solution:\n" + render(bestSolution.asEvaluatedDescription())); logger.info("QTL Score:\n" + bestSolution.getTreeScore()); long runtimeBestSolution = la.getTimeBestSolutionFound(); bestReturnedSolutionRuntimeStats.addValue(runtimeBestSolution); // convert to SPARQL query RDFResourceTree tree = bestSolution.getTree(); tree = filter.apply(tree); String learnedSPARQLQuery = QueryTreeUtils.toSPARQLQueryString(tree, dataset.getBaseIRI(), dataset.getPrefixMapping()); // compute score Score score = computeScore(sparqlQuery, tree, noise); bestReturnedSolutionPrecisionStats.addValue(score.precision); bestReturnedSolutionRecallStats.addValue(score.recall); bestReturnedSolutionFMeasureStats.addValue(score.fmeasure); bestReturnedSolutionPredAccStats.addValue(score.predAcc); bestReturnedSolutionMathCorrStats.addValue(score.mathCorr); logger.info(score.toString()); // find the extensionally best matching tree in the list Pair<EvaluatedRDFResourceTree, Score> bestMatchingTreeWithScore = findBestMatchingTreeFast( solutions, sparqlQuery, noise, examples); EvaluatedRDFResourceTree bestMatchingTree = bestMatchingTreeWithScore .getFirst(); Score bestMatchingScore = bestMatchingTreeWithScore.getSecond(); // position of best tree in list of solutions int positionBestScore = solutions.indexOf(bestMatchingTree); bestSolutionPositionStats.addValue(positionBestScore); Score bestScore = score; if (positionBestScore > 0) { logger.info( "Position of best covering tree in list: " + positionBestScore); logger.info("Best covering solution:\n" + render(bestMatchingTree.asEvaluatedDescription())); logger.info("Tree score: " + bestMatchingTree.getTreeScore()); bestScore = bestMatchingScore; logger.info(bestMatchingScore.toString()); } else { logger.info( "Best returned solution was also the best covering solution."); } bestSolutionRecallStats.addValue(bestScore.recall); bestSolutionPrecisionStats.addValue(bestScore.precision); bestSolutionFMeasureStats.addValue(bestScore.fmeasure); bestSolutionPredAccStats.addValue(bestScore.predAcc); bestSolutionMathCorrStats.addValue(bestScore.mathCorr); for (RDFResourceTree negTree : examples.negExamplesMapping.values()) { if (QueryTreeUtils.isSubsumedBy(negTree, bestMatchingTree.getTree())) { Files.append(sparqlQuery + "\n", new File("/tmp/negCovered.txt"), Charsets.UTF_8); break; } } String bestQuery = QueryFactory .create(QueryTreeUtils.toSPARQLQueryString( filter.apply(bestMatchingTree.getTree()), dataset.getBaseIRI(), dataset.getPrefixMapping())) .toString(); if (write2DB) { write2DB(sparqlQuery, nrOfExamples, examples, noise, baseLineQuery, baselineScore, heuristicName, measureName, QueryFactory.create(learnedSPARQLQuery).toString(), score, runtimeBestSolution, bestQuery, positionBestScore, bestScore); } } catch (Exception e) { failed.set(true); logger.error("Error occured for query\n" + sparqlQuery, e); try { StringWriter sw = new StringWriter(); PrintWriter pw = new PrintWriter(sw); e.printStackTrace(pw); Files.append(sparqlQuery + "\n" + sw.toString(), new File(benchmarkDirectory, "failed-" + nrOfExamples + "-" + noise + "-" + heuristicName + "-" + measureName + ".txt"), Charsets.UTF_8); } catch (IOException e1) { e1.printStackTrace(); } } finally { int cnt = currentNrOfFinishedRuns.incrementAndGet(); logger.info("***********Evaluation Progress:" + NumberFormat.getPercentInstance() .format((double) cnt / totalNrOfQTLRuns) + "(" + cnt + "/" + totalNrOfQTLRuns + ")" + "***********"); } } }); } tp.shutdown(); tp.awaitTermination(12, TimeUnit.HOURS); Logger.getRootLogger().removeAppender(appender); if (!failed.get()) { String result = ""; result += "\nBaseline Precision:\n" + baselinePrecisionStats; result += "\nBaseline Recall:\n" + baselineRecallStats; result += "\nBaseline F-measure:\n" + baselineFMeasureStats; result += "\nBaseline PredAcc:\n" + baselinePredAccStats; result += "\nBaseline MathCorr:\n" + baselineMathCorrStats; result += "#Returned solutions:\n" + nrOfReturnedSolutionsStats; result += "\nOverall Precision:\n" + bestReturnedSolutionPrecisionStats; result += "\nOverall Recall:\n" + bestReturnedSolutionRecallStats; result += "\nOverall F-measure:\n" + bestReturnedSolutionFMeasureStats; result += "\nOverall PredAcc:\n" + bestReturnedSolutionPredAccStats; result += "\nOverall MathCorr:\n" + bestReturnedSolutionMathCorrStats; result += "\nTime until best returned solution found:\n" + bestReturnedSolutionRuntimeStats; result += "\nPositions of best solution:\n" + Arrays.toString(bestSolutionPositionStats.getValues()); result += "\nPosition of best solution stats:\n" + bestSolutionPositionStats; result += "\nOverall Precision of best solution:\n" + bestSolutionPrecisionStats; result += "\nOverall Recall of best solution:\n" + bestSolutionRecallStats; result += "\nOverall F-measure of best solution:\n" + bestSolutionFMeasureStats; result += "\nCBD generation time(total):\t" + MonitorFactory.getTimeMonitor(TimeMonitors.CBD_RETRIEVAL.name()).getTotal() + "\n"; result += "CBD generation time(avg):\t" + MonitorFactory.getTimeMonitor(TimeMonitors.CBD_RETRIEVAL.name()).getAvg() + "\n"; result += "Tree generation time(total):\t" + MonitorFactory.getTimeMonitor(TimeMonitors.TREE_GENERATION.name()).getTotal() + "\n"; result += "Tree generation time(avg):\t" + MonitorFactory.getTimeMonitor(TimeMonitors.TREE_GENERATION.name()).getAvg() + "\n"; result += "Tree size(avg):\t" + treeSizeStats.getMean() + "\n"; logger.info(result); try { Files.write(result, statsFile, Charsets.UTF_8); } catch (IOException e) { e.printStackTrace(); } data[i][j] = bestReturnedSolutionFMeasureStats.getMean(); if (write2DB) { write2DB(heuristicName, measureName, nrOfExamples, noise, bestReturnedSolutionFMeasureStats.getMean(), bestReturnedSolutionPrecisionStats.getMean(), bestReturnedSolutionRecallStats.getMean(), bestReturnedSolutionPredAccStats.getMean(), bestReturnedSolutionMathCorrStats.getMean(), bestSolutionPositionStats.getMean(), bestSolutionFMeasureStats.getMean(), bestSolutionPrecisionStats.getMean(), bestSolutionRecallStats.getMean(), bestSolutionPredAccStats.getMean(), bestSolutionMathCorrStats.getMean(), baselineFMeasureStats.getMean(), baselinePrecisionStats.getMean(), baselineRecallStats.getMean(), baselinePredAccStats.getMean(), baselineMathCorrStats.getMean(), bestReturnedSolutionRuntimeStats.getMean()); } } } } String content = "###"; String separator = "\t"; for (double noiseInterval1 : noiseIntervals) { content += separator + noiseInterval1; } content += "\n"; for (int i = 0; i < nrOfExamplesIntervals.length; i++) { content += nrOfExamplesIntervals[i]; for (int j = 0; j < noiseIntervals.length; j++) { content += separator + data[i][j]; } content += "\n"; } File examplesVsNoise = new File(benchmarkDirectory, "examplesVsNoise-" + heuristicName + "-" + measureName + ".tsv"); try { Files.write(content, examplesVsNoise, Charsets.UTF_8); } catch (IOException e) { logger.error("failed to write stats to file", e); } } } if (write2DB) { conn.close(); } if (useEmailNotification) { sendFinishedMail(); } long t2 = System.currentTimeMillis(); long duration = t2 - t1; logger.info("QTL evaluation finished in " + DurationFormatUtils.formatDurationHMS(duration) + "ms."); }
From source file:org.apache.pulsar.compaction.CompactionTest.java
@Test public void testCompactEncryptedBatching() throws Exception { String topic = "persistent://my-property/use/my-ns/my-topic1"; // subscribe before sending anything, so that we get all messages pulsarClient.newConsumer().topic(topic).subscriptionName("sub1").readCompacted(true).subscribe().close(); try (Producer<byte[]> producer = pulsarClient.newProducer().topic(topic) .addEncryptionKey("client-ecdsa.pem").cryptoKeyReader(new EncKeyReader()).maxPendingMessages(3) .enableBatching(true).batchingMaxMessages(3).batchingMaxPublishDelay(1, TimeUnit.HOURS).create()) { producer.newMessage().key("key1").value("my-message-1".getBytes()).sendAsync(); producer.newMessage().key("key2").value("my-message-2".getBytes()).sendAsync(); producer.newMessage().key("key2").value("my-message-3".getBytes()).send(); }//from ww w.jav a2 s.c o m // compact the topic Compactor compactor = new TwoPhaseCompactor(conf, pulsarClient, bk, compactionScheduler); compactor.compact(topic).get(); // with encryption, all messages are passed through compaction as it doesn't // have the keys to decrypt the batch payload try (Consumer<byte[]> consumer = pulsarClient.newConsumer().topic(topic).subscriptionName("sub1") .cryptoKeyReader(new EncKeyReader()).readCompacted(true).subscribe()) { Message<byte[]> message1 = consumer.receive(); Assert.assertEquals(message1.getKey(), "key1"); Assert.assertEquals(new String(message1.getData()), "my-message-1"); Message<byte[]> message2 = consumer.receive(); Assert.assertEquals(message2.getKey(), "key2"); Assert.assertEquals(new String(message2.getData()), "my-message-2"); Message<byte[]> message3 = consumer.receive(); Assert.assertEquals(message3.getKey(), "key2"); Assert.assertEquals(new String(message3.getData()), "my-message-3"); } }
From source file:org.springframework.session.data.gemfire.AbstractGemFireOperationsSessionRepositoryTest.java
@Test public void sessionFromData() throws Exception { final long expectedCreationTime = 1L; final long expectedLastAccessedTime = 2L; final int expectedMaxInactiveIntervalInSeconds = (int) TimeUnit.HOURS.toSeconds(6); final String expectedPrincipalName = "jblum"; DataInput mockDataInput = mock(DataInput.class); given(mockDataInput.readUTF()).willReturn("2").willReturn(expectedPrincipalName); given(mockDataInput.readLong()).willReturn(expectedCreationTime).willReturn(expectedLastAccessedTime); given(mockDataInput.readInt()).willReturn(expectedMaxInactiveIntervalInSeconds); @SuppressWarnings("serial") AbstractGemFireOperationsSessionRepository.GemFireSession session = new AbstractGemFireOperationsSessionRepository.GemFireSession( "1") { @Override// w w w. ja v a 2 s . co m @SuppressWarnings("unchecked") <T> T readObject(DataInput in) throws ClassNotFoundException, IOException { assertThat(in).isNotNull(); AbstractGemFireOperationsSessionRepository.GemFireSessionAttributes sessionAttributes = new AbstractGemFireOperationsSessionRepository.GemFireSessionAttributes(); sessionAttributes.setAttribute("attrOne", "testOne"); sessionAttributes.setAttribute("attrTwo", "testTwo"); return (T) sessionAttributes; } }; session.fromData(mockDataInput); Set<String> expectedAttributeNames = asSet("attrOne", "attrTwo", FindByIndexNameSessionRepository.PRINCIPAL_NAME_INDEX_NAME); assertThat(session.getId()).isEqualTo("2"); assertThat(session.getCreationTime()).isEqualTo(expectedCreationTime); assertThat(session.getLastAccessedTime()).isEqualTo(expectedLastAccessedTime); assertThat(session.getMaxInactiveIntervalInSeconds()).isEqualTo(expectedMaxInactiveIntervalInSeconds); assertThat(session.getPrincipalName()).isEqualTo(expectedPrincipalName); assertThat(session.getAttributeNames().size()).isEqualTo(3); assertThat(session.getAttributeNames().containsAll(expectedAttributeNames)).isTrue(); assertThat(String.valueOf(session.getAttribute("attrOne"))).isEqualTo("testOne"); assertThat(String.valueOf(session.getAttribute("attrTwo"))).isEqualTo("testTwo"); assertThat(String.valueOf(session.getAttribute(FindByIndexNameSessionRepository.PRINCIPAL_NAME_INDEX_NAME))) .isEqualTo(expectedPrincipalName); verify(mockDataInput, times(2)).readUTF(); verify(mockDataInput, times(2)).readLong(); verify(mockDataInput, times(2)).readInt(); }
From source file:la2launcher.MainFrame.java
private void processValidation(boolean full) { final long initTime = new Date().getTime(); final String patcherUrl = "http://" + updateHost + "/hf/updater.lst.la2";//new ArrayBlockingQueue<Runnable>(10000) final ThreadPoolExecutor tpe = new ThreadPoolExecutor(5, 5, 1, TimeUnit.HOURS, new ArrayBlockingQueue<Runnable>(10000)); tpe.execute(new Runnable() { @Override/*from ww w . j av a 2 s .co m*/ public void run() { jTextArea2.setText(""); try { if (full) { jTextArea2.setText(jTextArea2.getText() + "\r\n? "); } else { jTextArea2.setText(jTextArea2.getText() + "\r\n? system"); } File patcher = File.createTempFile("la2", "la2"); patcher.deleteOnExit(); File patcherExt = File.createTempFile("la2", "la2"); patcherExt.deleteOnExit(); FileOutputStream fos = new FileOutputStream(patcher); CloseableHttpClient httpclient = HttpClients.createDefault(); HttpGet httpGet = new HttpGet(patcherUrl); CloseableHttpResponse response1 = httpclient.execute(httpGet); HttpEntity entity1 = response1.getEntity(); copyStream(entity1.getContent(), fos, null); response1.close(); fos.close(); jTextArea2.setText(jTextArea2.getText() + "\r\n?? ? ?: " + patcherUrl); fixBzip2File(patcher); jTextArea2.setText(jTextArea2.getText() + "\r\n ?"); BZip2CompressorInputStream bz = new BZip2CompressorInputStream(new FileInputStream(patcher)); OutputStream pout = new FileOutputStream(patcherExt); copyStream(bz, pout, new CopyListener() { @Override public void transfered(int n) { bytesRecieved += n; bytesRecievedTotal += n; } }); pout.close(); bz.close(); jTextArea2.setText(jTextArea2.getText() + "\r\n? ?"); if (full) { jTextArea2.setText(jTextArea2.getText() + "\r\n "); } else { jTextArea2.setText(jTextArea2.getText() + "\r\n system"); } DefaultTableModel model = (DefaultTableModel) jTable2.getModel(); model.setRowCount(0); int filesCount = scanSumFilesCount(patcherExt, full); jProgressBar1.setMinimum(0); jProgressBar1.setMaximum(filesCount); jProgressBar1.setValue(0); jLabel4.setText("0/" + filesCount); scanSumFile(patcherExt, new SumHandler() { private ReentrantLock lock = new ReentrantLock(); @Override public void handle(MDNamePair pair) { try { jProgressBar1.setIndeterminate(false); //lock.unlock(); tpe.execute(new Runnable() { @Override public void run() { try { lock.lock(); //printMsg(pair.filename); String crc = digest(new File(gamePath + pair.filename)); //printMsg(" : " + pair.crc); //printMsg(" ? ? : " + crc); if (!pair.crc.equals(crc)) { DefaultTableModel dtm = (DefaultTableModel) jTable2.getModel(); dtm.addRow(new Object[] { pair.filename, false }); } jProgressBar1.setValue(jProgressBar1.getValue() + 1); jLabel4.setText(jProgressBar1.getValue() + "/" + filesCount); lock.unlock(); } catch (NoSuchAlgorithmException ex) { Logger.getLogger(MainFrame.class.getName()).log(Level.SEVERE, null, ex); } catch (IOException ex) { Logger.getLogger(MainFrame.class.getName()).log(Level.SEVERE, null, ex); } finally { //if (lock.isLocked()) lock.unlock(); } } }); } finally { //if (lock.isLocked()) lock.unlock(); } } }, full); } catch (IOException ex) { Logger.getLogger(MainFrame.class.getName()).log(Level.SEVERE, null, ex); } } }); jButton5.setEnabled(false); jButton6.setEnabled(false); jButton7.setEnabled(false); jButton8.setEnabled(false); jButton10.setEnabled(false); jProgressBar1.setIndeterminate(true); new Thread() { @Override public void run() { do { long millis = new Date().getTime(); try { sleep(300); } catch (InterruptedException ex) { Logger.getLogger(MainFrame.class.getName()).log(Level.SEVERE, null, ex); } millis = new Date().getTime() - millis; BigDecimal totBig = new BigDecimal(bytesRecievedTotal / (1024 * 1024.0)); totBig = totBig.setScale(2, BigDecimal.ROUND_CEILING); jLabel5.setText("?: " + (bytesRecieved / millis) + "KB/s. : " + totBig + " MB"); bytesRecieved = 0; } while (tpe.getActiveCount() > 0); tpe.shutdown(); jButton5.setEnabled(true); jButton6.setEnabled(true); jButton7.setEnabled(true); jButton8.setEnabled(true); jButton10.setEnabled(true); jProgressBar1.setIndeterminate(false); printMsg(" " + (new Date().getTime() - initTime) + " ?."); } }.start(); }
From source file:org.apache.pulsar.compaction.CompactionTest.java
@Test public void testCompactEncryptedAndCompressedBatching() throws Exception { String topic = "persistent://my-property/use/my-ns/my-topic1"; // subscribe before sending anything, so that we get all messages pulsarClient.newConsumer().topic(topic).subscriptionName("sub1").readCompacted(true).subscribe().close(); try (Producer<byte[]> producer = pulsarClient.newProducer().topic(topic) .addEncryptionKey("client-ecdsa.pem").cryptoKeyReader(new EncKeyReader()) .compressionType(CompressionType.LZ4).maxPendingMessages(3).enableBatching(true) .batchingMaxMessages(3).batchingMaxPublishDelay(1, TimeUnit.HOURS).create()) { producer.newMessage().key("key1").value("my-message-1".getBytes()).sendAsync(); producer.newMessage().key("key2").value("my-message-2".getBytes()).sendAsync(); producer.newMessage().key("key2").value("my-message-3".getBytes()).send(); }/*from w w w .j a v a2s. c o m*/ // compact the topic Compactor compactor = new TwoPhaseCompactor(conf, pulsarClient, bk, compactionScheduler); compactor.compact(topic).get(); // with encryption, all messages are passed through compaction as it doesn't // have the keys to decrypt the batch payload try (Consumer<byte[]> consumer = pulsarClient.newConsumer().topic(topic).subscriptionName("sub1") .cryptoKeyReader(new EncKeyReader()).readCompacted(true).subscribe()) { Message<byte[]> message1 = consumer.receive(); Assert.assertEquals(message1.getKey(), "key1"); Assert.assertEquals(new String(message1.getData()), "my-message-1"); Message<byte[]> message2 = consumer.receive(); Assert.assertEquals(message2.getKey(), "key2"); Assert.assertEquals(new String(message2.getData()), "my-message-2"); Message<byte[]> message3 = consumer.receive(); Assert.assertEquals(message3.getKey(), "key2"); Assert.assertEquals(new String(message3.getData()), "my-message-3"); } }
From source file:org.apache.pulsar.compaction.CompactionTest.java
@Test public void testEmptyPayloadDeletesWhenEncrypted() throws Exception { String topic = "persistent://my-property/use/my-ns/my-topic1"; // subscribe before sending anything, so that we get all messages pulsarClient.newConsumer().topic(topic).subscriptionName("sub1").readCompacted(true).subscribe().close(); try (Producer<byte[]> producerNormal = pulsarClient.newProducer().topic(topic).enableBatching(false) .addEncryptionKey("client-ecdsa.pem").cryptoKeyReader(new EncKeyReader()).create(); Producer<byte[]> producerBatch = pulsarClient.newProducer().topic(topic).maxPendingMessages(3) .enableBatching(true).addEncryptionKey("client-ecdsa.pem") .cryptoKeyReader(new EncKeyReader()).batchingMaxMessages(3) .batchingMaxPublishDelay(1, TimeUnit.HOURS).create()) { // key0 persists through it all producerNormal.newMessage().key("key0").value("my-message-0".getBytes()).send(); // key1 is added but then deleted producerNormal.newMessage().key("key1").value("my-message-1".getBytes()).send(); producerNormal.newMessage().key("key1").send(); // key2 is added but deleted in same batch producerBatch.newMessage().key("key2").value("my-message-2".getBytes()).sendAsync(); producerBatch.newMessage().key("key3").value("my-message-3".getBytes()).sendAsync(); producerBatch.newMessage().key("key2").send(); // key4 is added, deleted, then resurrected producerNormal.newMessage().key("key4").value("my-message-4".getBytes()).send(); }/* www .j a v a2 s . com*/ // compact the topic Compactor compactor = new TwoPhaseCompactor(conf, pulsarClient, bk, compactionScheduler); compactor.compact(topic).get(); try (Consumer<byte[]> consumer = pulsarClient.newConsumer().topic(topic).cryptoKeyReader(new EncKeyReader()) .subscriptionName("sub1").readCompacted(true).subscribe()) { Message<byte[]> message1 = consumer.receive(); Assert.assertEquals(message1.getKey(), "key0"); Assert.assertEquals(new String(message1.getData()), "my-message-0"); // see all messages from batch Message<byte[]> message2 = consumer.receive(); Assert.assertEquals(message2.getKey(), "key2"); Assert.assertEquals(new String(message2.getData()), "my-message-2"); Message<byte[]> message3 = consumer.receive(); Assert.assertEquals(message3.getKey(), "key3"); Assert.assertEquals(new String(message3.getData()), "my-message-3"); Message<byte[]> message4 = consumer.receive(); Assert.assertEquals(message4.getKey(), "key2"); Assert.assertEquals(new String(message4.getData()), ""); Message<byte[]> message5 = consumer.receive(); Assert.assertEquals(message5.getKey(), "key4"); Assert.assertEquals(new String(message5.getData()), "my-message-4"); } }