List of usage examples for javafx.util Pair getValue
public V getValue()
From source file:it.matjaz.jnumerus.RomanCharMapFactory.java
/** * Constructs a HashMap with 13 entries <integer value, equivalent roman * character>.//from ww w. jav a 2 s.c o m * <p> * The Map's values are roman characters stored as Strings and they are * references by their integer values. Those entries are used for * conversions of whole Strings of roman numerals. The Map is * {@link Collections#unmodifiableMap(java.util.Map) unmodifiable}. * <p> * The map returned by this method is the exact opposite of the one returned * by {@link #generateCharMap() } (keys and values are switched). The Map is * filled by inserting values from the {@link #generateCharPairsArray() }. * <p> * The followind table offers a representation of the content of the * generated structure: * * <pre> * key val * int rom * ------- * 1000 M * 900 CM * 500 D * 400 CD * 100 C * 90 XC * 50 L * 40 XL * 10 X * 9 IX * 5 V * 4 IV * 1 I * </pre> * * @return HashMap<Integer, String> containing roman characters as * values and the respective integers as keys. */ public static Map<Integer, String> generateInverseCharMap() { Map<Integer, String> inverseCharMap = new HashMap(); for (Pair charAndValue : generateCharPairsArray()) { inverseCharMap.put((Integer) charAndValue.getValue(), (String) charAndValue.getKey()); } return Collections.unmodifiableMap(inverseCharMap); }
From source file:it.matjaz.jnumerus.RomanCharMapFactory.java
/** * Constructs an unmodifiable HashMap with 13 entries <roman character, * its integer value>./* www. j a va 2s. co m*/ * <p> * The Map's keys are roman characters stored as Strings and they point to * their integer values. Those entries are used for conversions of whole * Strings of roman numerals. The Map is * {@link Collections#unmodifiableMap(java.util.Map) unmodifiable}. * <p> * The map returned by this method is the exact opposite of the one returned * by {@link #generateInverseCharMap() } (keys and values are switched). The * Map is filled by inserting values from the * {@link #generateCharPairsArray() }. * <p> * The followind table offers a representation of the content of the * generated structure: * * <pre> * key val * rom int * ------- * M 1000 * CM 900 * D 500 * CD 400 * C 100 * XC 90 * L 50 * XL 40 * X 10 * IX 9 * V 5 * IV 4 * I 1 * </pre> * * @return HashMap<String, Integer> containing roman characters as * keys and the respective integer values. */ public static Map<String, Integer> generateCharMap() { Map<String, Integer> charMap = new HashMap(); for (Pair charAndValue : generateCharPairsArray()) { charMap.put((String) charAndValue.getKey(), (Integer) charAndValue.getValue()); } return Collections.unmodifiableMap(charMap); }
From source file:org.mskcc.shenkers.control.alignment.ChainParserNGTest.java
public static LocalAlignment trim(LocalAlignment blocks, GenomeSpan query_i, GenomeSpan target_i) { List<Pair<Integer, Integer>> fromBlocks = new ArrayList<>(); List<Pair<Integer, Integer>> toBlocks = new ArrayList<>(); for (int i = 0; i < blocks.fromBlocks.size(); i++) { Pair<Integer, Integer> fromBlock = blocks.fromBlocks.get(i); Pair<Integer, Integer> toBlock = blocks.toBlocks.get(i); assert IntervalTools.isContained(fromBlock.getKey(), fromBlock.getValue(), query_i.getStart(), query_i .getEnd()) : "it is assumed that all blocks in query will be contained in the query interval"; // if this block overlaps it is either OK as is, or needs to be trimmed if (IntervalTools.overlaps(toBlock.getKey(), toBlock.getValue(), target_i.getStart(), target_i.getEnd())) {/* w w w . j a v a 2s. c o m*/ if (IntervalTools.isContained(toBlock.getKey(), toBlock.getValue(), target_i.getStart(), target_i.getEnd())) { fromBlocks.add(fromBlock); toBlocks.add(toBlock); } else { int offsetTargetStart = toBlock.getKey() < target_i.getStart() ? target_i.getStart() - toBlock.getKey() : 0; int offsetTargetEnd = toBlock.getValue() > target_i.getEnd() ? toBlock.getKey() - target_i.getEnd() : 0; Pair<Integer, Integer> offsetToBlock = new Pair<>(toBlock.getKey() + offsetTargetStart, toBlock.getValue() - offsetTargetEnd); Pair<Integer, Integer> offsetFromBlock = blocks.toNegativeStrand ? new Pair<>(fromBlock.getKey() + offsetTargetEnd, fromBlock.getValue() - offsetTargetStart) : new Pair<>(fromBlock.getKey() + offsetTargetStart, fromBlock.getValue() - offsetTargetEnd); fromBlocks.add(offsetFromBlock); toBlocks.add(offsetToBlock); } } } return new LocalAlignment(blocks.fromSequenceName, blocks.toSequenceName, blocks.toNegativeStrand, fromBlocks, toBlocks); }
From source file:controllers.IndexServlet.java
private static void Convert(HttpServletRequest request, HttpServletResponse response, PrintWriter writer) { response.setContentType("text/plain"); try {//from w w w .j ava 2s.co m String fileName = request.getParameter("filename"); String fileUri = DocumentManager.GetFileUri(fileName); String fileExt = FileUtility.GetFileExtension(fileName); FileType fileType = FileUtility.GetFileType(fileName); String internalFileExt = DocumentManager.GetInternalExtension(fileType); if (DocumentManager.GetConvertExts().contains(fileExt)) { String key = ServiceConverter.GenerateRevisionId(fileUri); Pair<Integer, String> res = ServiceConverter.GetConvertedUri(fileUri, fileExt, internalFileExt, key, true); int result = res.getKey(); String newFileUri = res.getValue(); if (result != 100) { writer.write("{ \"step\" : \"" + result + "\", \"filename\" : \"" + fileName + "\"}"); return; } String correctName = DocumentManager .GetCorrectName(FileUtility.GetFileNameWithoutExtension(fileName) + internalFileExt); URL url = new URL(newFileUri); java.net.HttpURLConnection connection = (java.net.HttpURLConnection) url.openConnection(); InputStream stream = connection.getInputStream(); if (stream == null) { throw new Exception("Stream is null"); } File convertedFile = new File(DocumentManager.StoragePath(correctName, null)); try (FileOutputStream out = new FileOutputStream(convertedFile)) { int read; final byte[] bytes = new byte[1024]; while ((read = stream.read(bytes)) != -1) { out.write(bytes, 0, read); } out.flush(); } connection.disconnect(); //remove source file ? //File sourceFile = new File(DocumentManager.StoragePath(fileName, null)); //sourceFile.delete(); fileName = correctName; } writer.write("{ \"filename\" : \"" + fileName + "\"}"); } catch (Exception ex) { writer.write("{ \"error\": \"" + ex.getMessage() + "\"}"); } }
From source file:org.mskcc.shenkers.control.alignment.AlignmentWeaver.java
public static void weave(Map<Pair<Genome, Genome>, LocalAlignment> alignments) { Set<Genome> incorporated = new HashSet<>(); Map<Genome, Integer> rowOrder = new HashMap<>(); Set<Pair<Genome, Genome>> remainder = new HashSet<>(alignments.keySet()); while (!remainder.isEmpty()) { Iterator<Pair<Genome, Genome>> it = remainder.iterator(); while (it.hasNext()) { Pair<Genome, Genome> p = it.next(); if (incorporated.size() == 0 || incorporated.contains(p.getKey()) || incorporated.contains(p.getValue())) { it.remove();/* w w w. j a v a 2s. co m*/ incorporated.add(p.getKey()); incorporated.add(p.getValue()); logger.info("incorporating pair {} {}", p.getKey().getId(), p.getValue().getId()); } } } }
From source file:DatasetCreation.DatasetCSVBuilder.java
/** * Print CSV string contain list of selected features * * @param selectedFeatures ArrayList of selected features selected features * @param featuresDocumentFrequencies all features document frequencies * (Benign, Malicious)/*w ww . j a va2 s. c om*/ * @return StringBuilder */ public static StringBuilder GetSelectedFeaturesCSV(ArrayList<Pair<String, Integer>> selectedFeatures, Map<String, int[]> featuresDocumentFrequencies) { StringBuilder sb = new StringBuilder(); sb.append("#,Feature,InBenignFiles,InMaliciousFiles,Total\n"); Pair pair; String feature; int[] benignMaliciousFrequencies; for (int i = 0; i < selectedFeatures.size(); i++) { pair = selectedFeatures.get(i); feature = pair.getKey().toString(); benignMaliciousFrequencies = featuresDocumentFrequencies.get(feature); sb.append(String.format("f%s,%s,%s,%s,%s", i + 1, feature, benignMaliciousFrequencies[0], benignMaliciousFrequencies[1], pair.getValue())).append("\n"); } return sb; }
From source file:com.flipkart.flux.impl.task.TaskExecutor.java
/** * The HystrixCommand run method. Executes the Task and returns the result or throws an exception in case of a {@link FluxError} * @see com.netflix.hystrix.HystrixCommand#run() *//*from w w w. jav a2 s. com*/ protected Event run() throws Exception { Pair<Object, FluxError> result = this.task.execute(events); if (result.getValue() != null) { throw result.getValue(); } final Object returnObject = result.getKey(); if (returnObject != null) { return new Event(outputeEventName, returnObject.getClass().getCanonicalName(), Event.EventStatus.triggered, stateMachineId, objectMapper.writeValueAsString(returnObject), MANAGED_RUNTIME); } return null; }
From source file:br.edu.unifei.rmss.communities.agglomerative.AgglomerativeCommunityDetector.java
@Override public void compute() { // Inicializate // Add only one node eatch partition int part = 1; for (Vertex v : network.getAllVertex()) { v.setPartition(part);/* www. j a v a 2s . c om*/ network.updateVertex(v); part++; } // Iterate while (true) { // If num of partitions is 2, go to end if (network.getNumberOfPartitions() == 2) { break; } // Find partitions to marge Pair<Integer, Integer> mPart = findPartitionToMerge(); int part1 = mPart.getKey(); int part2 = mPart.getValue(); //Merge partition Part1 and Part2 if ((part1 != -1) && (part2 != -1)) { for (Vertex v : network.getPartitionVertex(part2)) { v.setPartition(part1); network.updateVertex(v); } } } }
From source file:org.mskcc.shenkers.view.StackedIntervalView.java
public void setData(List<Pair<Integer, Integer>> intervals) { Collections.sort(intervals, new Comparator<Pair<Integer, Integer>>() { @Override/*from ww w . j av a 2 s .co m*/ public int compare(Pair<Integer, Integer> o1, Pair<Integer, Integer> o2) { return o1.getKey() - o2.getKey(); } }); // Pair<Integer, Integer> r = intervals.get(0); for (Pair<Integer, Integer> interval : intervals) { Range<Integer> range = Range.closed(interval.getKey(), interval.getValue()); int i = 0; added: { // add the interval to the first row that doesn't intersect with while (i < rows.size()) { TreeRangeSet<Integer> set = rows.get(i); RangeSet<Integer> intersection = set.subRangeSet( Range.closed(interval.getKey() - minSpace, interval.getValue() + minSpace)); if (intersection.isEmpty()) { set.add(range); break added; } i++; } TreeRangeSet<Integer> row = TreeRangeSet.create(); row.add(range); rows.add(row); } } List<Node> content = new ArrayList<>(); for (RangeSet<Integer> row : rows) { RangeSetIntervalView rowView = new RangeSetIntervalView(min, max); rowView.setData(row); content.add(rowView); } getChildren().setAll(content); }
From source file:com.walmart.gatling.commons.ReportExecutor.java
private void runJob(Master.GenerateReport job) { TaskEvent taskEvent = job.reportJob.taskEvent; CommandLine cmdLine = new CommandLine(agentConfig.getJob().getCommand()); Map<String, Object> map = new HashMap<>(); map.put("path", new File(agentConfig.getJob().getJobArtifact(taskEvent.getJobName()))); cmdLine.addArgument("${path}"); //parameters come from the task event for (Pair<String, String> pair : taskEvent.getParameters()) { cmdLine.addArgument(pair.getValue()); }// w ww . j av a2 s. c om String dir = agentConfig.getJob().getLogDirectory() + "reports/" + job.reportJob.trackingId + "/"; cmdLine.addArgument(dir); cmdLine.setSubstitutionMap(map); DefaultExecutor executor = new DefaultExecutor(); executor.setExitValues(agentConfig.getJob().getExitValues()); ExecuteWatchdog watchdog = new ExecuteWatchdog(ExecuteWatchdog.INFINITE_TIMEOUT); executor.setWatchdog(watchdog); executor.setWorkingDirectory(new File(agentConfig.getJob().getPath())); FileOutputStream outFile = null; FileOutputStream errorFile = null; try { List<String> resultFiles = new ArrayList<>(job.results.size()); //download all files adn /*int i=0; for (Worker.Result result : job.results) { String destFile = dir + i++ + ".log"; resultFiles.add(destFile); DownloadFile.downloadFile(result.metrics,destFile); }*/ AtomicInteger index = new AtomicInteger(); job.results.parallelStream().forEach(result -> { String destFile = dir + index.incrementAndGet() + ".log"; resultFiles.add(destFile); DownloadFile.downloadFile(result.metrics, destFile); }); String outPath = agentConfig.getJob().getOutPath(taskEvent.getJobName(), job.reportJob.trackingId); String errPath = agentConfig.getJob().getErrorPath(taskEvent.getJobName(), job.reportJob.trackingId); //create the std and err files outFile = FileUtils.openOutputStream(new File(outPath)); errorFile = FileUtils.openOutputStream(new File(errPath)); PumpStreamHandler psh = new PumpStreamHandler(new ExecLogHandler(outFile), new ExecLogHandler(errorFile)); executor.setStreamHandler(psh); System.out.println(cmdLine); int exitResult = executor.execute(cmdLine); ReportResult result; if (executor.isFailure(exitResult)) { result = new ReportResult(dir, job.reportJob, false); log.info("Report Executor Failed, result: " + job.toString()); } else { result = new ReportResult(job.reportJob.getHtml(), job.reportJob, true); log.info("Report Executor Completed, result: " + result.toString()); } for (String resultFile : resultFiles) { FileUtils.deleteQuietly(new File(resultFile)); } getSender().tell(result, getSelf()); } catch (IOException e) { e.printStackTrace(); throw new RuntimeException(e); } finally { IOUtils.closeQuietly(outFile); IOUtils.closeQuietly(errorFile); } }