List of usage examples for java.util ArrayList stream
default Stream<E> stream()
From source file:sbu.srl.rolextract.ArgumentClassifier.java
public void trainAndTest(String trainDir, String testDir) throws IOException, FileNotFoundException, ClassNotFoundException, NoSuchMethodException, IllegalAccessException, IllegalArgumentException, InvocationTargetException { SBURoleTrain trainer = new SBURoleTrain(trainDir.concat("/train.ser"), isMultiClass); ArrayList<Sentence> trainData = (ArrayList<Sentence>) FileUtil .deserializeFromFile(trainDir.concat("/train.ser")); if (isMultiClass) { trainer.trainMultiClassClassifier(trainDir); } else {//w w w. j a va2 s .c o m trainer.trainBinaryClassifier(trainDir); } FileUtil.serializeToFile(trainData, trainDir.concat("/train.ser")); SBURolePredict predict = new SBURolePredict(trainDir, testDir.concat("/test.arggold.ser"), isMultiClass); predict.performPrediction(testDir.concat("/test.arggold.ser")); ArrayList<Sentence> predictedSentences = (ArrayList<Sentence>) FileUtil .deserializeFromFile(testDir.concat("/test.argpredict.ser")); Map<String, List<Sentence>> groupByProcess = predictedSentences.stream() .collect(Collectors.groupingBy(Sentence::getProcessName)); ArrayList<JSONData> jsonData = SentenceUtil.generateJSONData(groupByProcess); SentenceUtil.flushDataToJSON(jsonData, testDir.concat("/test.srlout.json"), false); SentenceUtil.flushDataToJSON(jsonData, testDir.concat("/test.srlpredict.json"), true); }
From source file:sbu.srl.rolextract.ArgumentClassifier.java
public void knowledgeExtractor() throws IOException, FileNotFoundException, ClassNotFoundException, NoSuchMethodException, IllegalAccessException, IllegalArgumentException, InvocationTargetException { boolean dirCreated = FileUtil.mkDir(outputDir); dirCreated = FileUtil.mkDir(outputDir.concat("/train")); dirCreated = FileUtil.mkDir(outputDir.concat("/test")); if (dirCreated) // this is not a good checking, leave it for now {/*from w w w.ja va 2 s. c o m*/ // TRAINING sentences = (ArrayList<Sentence>) sentences.stream().filter(data -> data.isAnnotated()) .collect(Collectors.toList()); FileUtil.serializeToFile(sentences, outputDir.concat("/train/train.ser")); SBURoleTrain trainer = new SBURoleTrain(outputDir.concat("/train/train.ser"), isMultiClass); trainer.train(outputDir.concat("/train")); FileUtil.serializeToFile(sentences, outputDir.concat("/train/train.ser")); // Read the knowledge sentences using SPOCK data reader SpockDataReader reader = new SpockDataReader(testingFileName, configFileName, true); // process, config, is testing reader.readData(); ArrayList<Sentence> testSentences = reader.getSentences(); FileUtil.serializeToFile(testSentences, outputDir.concat("/test/test.ser")); SBURolePredict predict = new SBURolePredict(outputDir.concat("/train"), outputDir.concat("/test/test.ser"), isMultiClass); predict.knownAnnotation = false; predict.performPrediction(outputDir.concat("/test/test.ser")); ArrayList<Sentence> predictedSentences = (ArrayList<Sentence>) FileUtil .deserializeFromFile(outputDir.concat("/test/predict.ser")); Map<String, List<Sentence>> groupByProcess = predictedSentences.stream() .collect(Collectors.groupingBy(Sentence::getProcessName)); ArrayList<JSONData> jsonData = SentenceUtil.generateJSONData(groupByProcess); SentenceUtil.flushDataToJSON(jsonData, outputDir.concat("/test/srlpredict.json"), true); } }
From source file:sbu.srl.rolextract.ArgumentClassifier.java
public void doTrainClassify(double trainPctg) throws IOException, FileNotFoundException, ClassNotFoundException, NoSuchMethodException, IllegalAccessException, IllegalArgumentException, InvocationTargetException { setupCrossValidationEnvironment(outputDir, 1); Collections.shuffle(sentences, new Random(System.nanoTime())); // /*ww w .j a va 2 s .co m*/ int startIdx = 0; int nbTrain = (int) (trainPctg * sentences.size()); ArrayList<Sentence> trainingData = new ArrayList<>(); ArrayList<Sentence> testingData = new ArrayList<>(); trainingData.addAll(sentences.subList(0, nbTrain)); testingData.addAll(sentences.subList(nbTrain, sentences.size())); FileUtil.serializeToFile(trainingData, outputDir.concat("/fold-1").concat("/train/train.ser")); FileUtil.serializeToFile(testingData, outputDir.concat("/fold-1").concat("/test/test.arggold.ser")); File trainFoldDir = new File(outputDir.concat("/fold-1").concat("/train")); File testFoldDir = new File(outputDir.concat("/fold-1").concat("/test")); SBURoleTrain trainer = new SBURoleTrain(trainFoldDir.getAbsolutePath().concat("/train.ser"), isMultiClass); if (isMultiClass) { trainer.trainMultiClassClassifier(trainFoldDir.getAbsolutePath()); } else { trainer.trainBinaryClassifier(trainFoldDir.getAbsolutePath()); } FileUtil.serializeToFile(trainingData, outputDir.concat("/fold-1").concat("/train/train.ser")); SBURolePredict predict = new SBURolePredict(trainFoldDir.getAbsolutePath(), testFoldDir.getAbsolutePath().concat("/test.arggold.ser"), isMultiClass); predict.performPrediction(testFoldDir.getAbsolutePath().concat("/test.arggold.ser")); ArrayList<Sentence> predictedSentences = (ArrayList<Sentence>) FileUtil .deserializeFromFile(testFoldDir.getAbsolutePath().concat("/test.argpredict.ser")); Map<String, List<Sentence>> groupByProcess = predictedSentences.stream() .collect(Collectors.groupingBy(Sentence::getProcessName)); ArrayList<JSONData> jsonData = SentenceUtil.generateJSONData(groupByProcess); SentenceUtil.flushDataToJSON(jsonData, testFoldDir.getAbsolutePath().concat("/test.srlout.json"), false); SentenceUtil.flushDataToJSON(jsonData, testFoldDir.getAbsolutePath().concat("/test.srlpredict.json"), true); }
From source file:sbu.srl.rolextract.ArgumentClassifier.java
public void performCrossValidation(String outputDir, int crossValidation) throws IOException, FileNotFoundException, ClassNotFoundException, NoSuchMethodException, IllegalAccessException, IllegalArgumentException, InvocationTargetException { for (int i = 1; i <= crossValidation; i++) { File trainFoldDir = new File(outputDir.concat("/fold-").concat("" + i).concat("/train")); File testFoldDir = new File(outputDir.concat("/fold-").concat("" + i).concat("/test")); SBURoleTrain trainer = new SBURoleTrain(trainFoldDir.getAbsolutePath().concat("/train.ser"), isMultiClass);/*w w w. j a v a2 s.c o m*/ trainer.train(trainFoldDir.getAbsolutePath()); SBURolePredict predict = new SBURolePredict(trainFoldDir.getAbsolutePath(), testFoldDir.getAbsolutePath().concat("/test.arggold.ser"), isMultiClass); predict.performPrediction(testFoldDir.getAbsolutePath().concat("/test.arggold.ser")); ArrayList<Sentence> predictedSentences = (ArrayList<Sentence>) FileUtil .deserializeFromFile(testFoldDir.getAbsolutePath().concat("/test.argpredict.ser")); Map<String, List<Sentence>> groupByProcess = predictedSentences.stream() .collect(Collectors.groupingBy(Sentence::getProcessName)); ArrayList<JSONData> jsonData = SentenceUtil.generateJSONData(groupByProcess); SentenceUtil.flushDataToJSON(jsonData, testFoldDir.getAbsolutePath().concat("/test.srlout.json"), false); SentenceUtil.flushDataToJSON(jsonData, testFoldDir.getAbsolutePath().concat("/test.srlpredict.json"), true); SentenceUtil.flushDataToJSON(jsonData, testFoldDir.getAbsolutePath().concat("/test.ilppredict.json"), true); SentenceUtil.flushDataToJSON(jsonData, testFoldDir.getAbsolutePath().concat("/test.semaforpredict.json"), true); SentenceUtil.flushDataToJSON(jsonData, testFoldDir.getAbsolutePath().concat("/test.easysrlpredict.json"), true); /*predict.performPredictionEasySRL(testFoldDir.getAbsolutePath().concat("/test.arggold.ser"), outputDir.concat("/fold-" + i).concat("/test/cv." + i + ".test.sentence.sbu"), outputDir.concat("/fold-" + i).concat("/test/cv." + i + ".raw.predict.easysrl"), "./data/modelCCG", outputDir.concat("/fold-" + i)); predictedSentences = (ArrayList<Sentence>) FileUtil.deserializeFromFile(testFoldDir.getAbsolutePath().concat("/test.argeasysrlpredict.ser")); groupByProcess = predictedSentences.stream().collect(Collectors.groupingBy(Sentence::getProcessName)); jsonData = SentenceUtil.generateJSONData(groupByProcess);*/ SentenceUtil.flushDataToJSON(jsonData, testFoldDir.getAbsolutePath().concat("/test.easysrlpredict.json"), true); } }
From source file:sbu.srl.rolextract.ArgumentClassifier.java
public void performAblation(String outputDir, int crossValidation) throws IOException, FileNotFoundException, ClassNotFoundException, NoSuchMethodException, IllegalAccessException, IllegalArgumentException, InvocationTargetException, InterruptedException { ArrayList<String> triedFeatures = new ArrayList<String>( Arrays.asList(FileUtil.readLinesFromFile("./configSBUProcRel/features.ori"))); List<String> ablationFeatures = getAblationFeatures("./configSBUProcRel/features.ablation"); for (int idxAblation = 0; idxAblation < ablationFeatures.size(); idxAblation++) { System.out.println("Removing features : " + ablationFeatures.get(idxAblation)); Thread.sleep(3000);//w w w .j a va2 s .c o m List<String> removedFeatures = Arrays.asList(ablationFeatures.get(idxAblation).split(",")); triedFeatures.removeAll(removedFeatures); FileUtil.dumpToFile(triedFeatures, "./configSBUProcRel/features"); for (int idxFold = 1; idxFold <= crossValidation; idxFold++) { File trainFoldDir = new File(outputDir.concat("/fold-").concat("" + idxFold).concat("/train")); File testFoldDir = new File(outputDir.concat("/fold-").concat("" + idxFold).concat("/test")); SBURoleTrain trainer = new SBURoleTrain(trainFoldDir.getAbsolutePath().concat("/train.ser"), isMultiClass); trainer.train(trainFoldDir.getAbsolutePath()); SBURolePredict predict = new SBURolePredict(trainFoldDir.getAbsolutePath(), testFoldDir.getAbsolutePath().concat("/test.arggold.ser"), isMultiClass); predict.performPrediction(testFoldDir.getAbsolutePath().concat("/test.arggold.ser")); ArrayList<Sentence> predictedSentences = (ArrayList<Sentence>) FileUtil .deserializeFromFile(testFoldDir.getAbsolutePath().concat("/test.argpredict.ser")); Map<String, List<Sentence>> groupByProcess = predictedSentences.stream() .collect(Collectors.groupingBy(Sentence::getProcessName)); ArrayList<JSONData> jsonData = SentenceUtil.generateJSONData(groupByProcess); SentenceUtil.flushDataToJSON(jsonData, testFoldDir.getAbsolutePath().concat("/test.srlout.json"), false); SentenceUtil.flushDataToJSON(jsonData, testFoldDir.getAbsolutePath().concat("/test.srlpredict.json"), true); SentenceUtil.flushDataToJSON(jsonData, testFoldDir.getAbsolutePath().concat("/test.ilppredict.json"), true); // dummy SentenceUtil.flushDataToJSON(jsonData, testFoldDir.getAbsolutePath().concat("/test.semaforpredict.json"), true);// dummy SentenceUtil.flushDataToJSON(jsonData, testFoldDir.getAbsolutePath().concat("/test.easysrlpredict.json"), true);// dummy } // copy all data to ILP's data folder // cp -r outputDir /home/slouvan/NetBeansProjects/ILP/data/ try { ProcessBuilder pb = new ProcessBuilder( "/home/slouvan/NetBeansProjects/SRL-Integrated/script/cpDir.sh", outputDir, "/home/slouvan/NetBeansProjects/ILP/data/"); //pb.environment().put("param1", ) Process p = pb.start(); // Start the process. p.waitFor(); // Wait for the process to finish. StdUtil.printOutput(p); pb = new ProcessBuilder("/usr/bin/python", "/home/slouvan/NetBeansProjects/ILP/evaluate.py"); p = pb.start(); // Start the process. p.waitFor(); // Wait for the process to finish. StdUtil.printOutput(p); System.out.println("Script executed successfully"); } catch (Exception e) { e.printStackTrace(); } String[] lines = FileUtil.readLinesFromFile("/home/slouvan/NetBeansProjects/ILP/stats.txt"); PrintWriter out = new PrintWriter( new BufferedWriter(new FileWriter(GlobalV.PROJECT_DIR + "/ablationNew.txt", true))); //more code out.println((new Date()).toString() + " Removed features " + removedFeatures); out.println("Eval : " + Arrays.toString(lines)); out.close(); triedFeatures.addAll(removedFeatures); } }
From source file:sbu.srl.rolextract.ArgumentClassifier.java
public void performedFeatureAddition(String outputDir, int crossValidation) throws FileNotFoundException, IOException, ClassNotFoundException, NoSuchMethodException, IllegalAccessException, IllegalArgumentException, InvocationTargetException, InterruptedException { List<String> ablationFeatures = getAblationFeatures("./configSBUProcRel/features.ablation"); ArrayList<String> stepwiseFeatures = new ArrayList<String>(); for (int idxAblation = 0; idxAblation < ablationFeatures.size(); idxAblation++) { double maxF1 = Double.MIN_VALUE; ArrayList<String> currentBestFeat = new ArrayList<String>(); String[] metricsBest = null; for (int idxFeat = 0; idxFeat < ablationFeatures.size(); idxFeat++) { Thread.sleep(3000);/*w ww .j a v a 2 s . c o m*/ ArrayList<String> addedFeatures = new ArrayList<String>(); addedFeatures.addAll(Arrays.asList(ablationFeatures.get(idxFeat).split(","))); //(ArrayList<String>) Arrays.asList(ablationFeatures.get(idxAblation).split(",")); boolean triedFeatures = false; for (int i = 0; i < addedFeatures.size(); i++) { if (stepwiseFeatures.contains(addedFeatures.get(i))) { triedFeatures = true; } } if (triedFeatures) { continue; } System.out.println("Adding features : " + ablationFeatures.get(idxFeat)); stepwiseFeatures.addAll(addedFeatures); FileUtil.dumpToFile(stepwiseFeatures, "./configSBUProcRel/features"); for (int idxFold = 1; idxFold <= crossValidation; idxFold++) { File trainFoldDir = new File(outputDir.concat("/fold-").concat("" + idxFold).concat("/train")); File testFoldDir = new File(outputDir.concat("/fold-").concat("" + idxFold).concat("/test")); SBURoleTrain trainer = new SBURoleTrain(trainFoldDir.getAbsolutePath().concat("/train.ser"), isMultiClass); trainer.train(trainFoldDir.getAbsolutePath()); SBURolePredict predict = new SBURolePredict(trainFoldDir.getAbsolutePath(), testFoldDir.getAbsolutePath().concat("/test.arggold.ser"), isMultiClass); predict.performPrediction(testFoldDir.getAbsolutePath().concat("/test.arggold.ser")); ArrayList<Sentence> predictedSentences = (ArrayList<Sentence>) FileUtil .deserializeFromFile(testFoldDir.getAbsolutePath().concat("/test.argpredict.ser")); Map<String, List<Sentence>> groupByProcess = predictedSentences.stream() .collect(Collectors.groupingBy(Sentence::getProcessName)); ArrayList<JSONData> jsonData = SentenceUtil.generateJSONData(groupByProcess); SentenceUtil.flushDataToJSON(jsonData, testFoldDir.getAbsolutePath().concat("/test.srlout.json"), false); SentenceUtil.flushDataToJSON(jsonData, testFoldDir.getAbsolutePath().concat("/test.srlpredict.json"), true); SentenceUtil.flushDataToJSON(jsonData, testFoldDir.getAbsolutePath().concat("/test.ilppredict.json"), true); // dummy SentenceUtil.flushDataToJSON(jsonData, testFoldDir.getAbsolutePath().concat("/test.semaforpredict.json"), true);// dummy SentenceUtil.flushDataToJSON(jsonData, testFoldDir.getAbsolutePath().concat("/test.easysrlpredict.json"), true);// dummy SentenceUtil.flushDataToJSON(jsonData, testFoldDir.getAbsolutePath().concat("/test.fgpredict.json"), true);// dummy } // copy all data to ILP's data folder // cp -r outputDir /home/slouvan/NetBeansProjects/ILP/data/ copyAndEval(outputDir); String[] lines = FileUtil.readLinesFromFile("/home/slouvan/NetBeansProjects/ILP/stats.txt"); double currentF1 = Double.parseDouble(lines[0].split("\t")[2]); if (currentF1 > maxF1) { maxF1 = currentF1; currentBestFeat = addedFeatures; metricsBest = lines; } stepwiseFeatures.removeAll(addedFeatures); } PrintWriter out = new PrintWriter( new BufferedWriter(new FileWriter(GlobalV.PROJECT_DIR + "/additionNew.txt", true))); out.println((new Date()).toString() + " Best features at this stage is " + currentBestFeat); out.println("Eval : " + Arrays.toString(metricsBest)); stepwiseFeatures.addAll(currentBestFeat); out.println("All current features :" + stepwiseFeatures); out.close(); } }
From source file:sbu.srl.rolextract.ArgumentClassifier.java
public void performGreedySearch(String outputDir, int crossValidation) throws FileNotFoundException, IOException, ClassNotFoundException, NoSuchMethodException, IllegalAccessException, IllegalArgumentException, InvocationTargetException, InterruptedException { // availFeatures = Get all available features) List<String> availableFeatures = new ArrayList<String>( Arrays.asList(FileUtil.readLinesFromFile("./configSBUProcRel/features"))); int nbFeat = availableFeatures.size(); ArrayList<String> triedFeatures = Lists.newArrayList(); while (triedFeatures.size() < nbFeat) { double maxF1 = 0.0; String bestFeat = ""; for (int i = 0; i < availableFeatures.size(); i++) { String nextFeat = availableFeatures.get(i); System.out.println("Trying with " + nextFeat); Thread.sleep(5000);/*from w ww . ja va2 s .c o m*/ triedFeatures.add(nextFeat); FileUtil.dumpToFile(triedFeatures, "./configSBUProcRel/features"); for (int j = 1; j <= 1; j++) { File trainFoldDir = new File(outputDir.concat("/fold-").concat("" + j).concat("/train")); File testFoldDir = new File(outputDir.concat("/fold-").concat("" + j).concat("/test")); SBURoleTrain trainer = new SBURoleTrain(trainFoldDir.getAbsolutePath().concat("/train.ser"), isMultiClass); trainer.train(trainFoldDir.getAbsolutePath()); SBURolePredict predict = new SBURolePredict(trainFoldDir.getAbsolutePath(), testFoldDir.getAbsolutePath().concat("/test.arggold.ser"), isMultiClass); predict.performPrediction(testFoldDir.getAbsolutePath().concat("/test.arggold.ser")); ArrayList<Sentence> predictedSentences = (ArrayList<Sentence>) FileUtil .deserializeFromFile(testFoldDir.getAbsolutePath().concat("/test.argpredict.ser")); Map<String, List<Sentence>> groupByProcess = predictedSentences.stream() .collect(Collectors.groupingBy(Sentence::getProcessName)); ArrayList<JSONData> jsonData = SentenceUtil.generateJSONData(groupByProcess); SentenceUtil.flushDataToJSON(jsonData, testFoldDir.getAbsolutePath().concat("/test.srlout.json"), false); SentenceUtil.flushDataToJSON(jsonData, testFoldDir.getAbsolutePath().concat("/test.srlpredict.json"), true); SentenceUtil.flushDataToJSON(jsonData, testFoldDir.getAbsolutePath().concat("/test.ilppredict.json"), true); SentenceUtil.flushDataToJSON(jsonData, testFoldDir.getAbsolutePath().concat("/test.semaforpredict.json"), true); SentenceUtil.flushDataToJSON(jsonData, testFoldDir.getAbsolutePath().concat("/test.easysrlpredict.json"), true); } // copy all data to ILP's data folder // cp -r outputDir /home/slouvan/NetBeansProjects/ILP/data/ try { ProcessBuilder pb = new ProcessBuilder( "/home/slouvan/NetBeansProjects/SRL-Integrated/script/cpDir.sh", outputDir, "/home/slouvan/NetBeansProjects/ILP/data/"); //pb.environment().put("param1", ) Process p = pb.start(); // Start the process. p.waitFor(); // Wait for the process to finish. StdUtil.printOutput(p); pb = new ProcessBuilder("/usr/bin/python", "/home/slouvan/NetBeansProjects/ILP/evaluate.py"); p = pb.start(); // Start the process. p.waitFor(); // Wait for the process to finish. StdUtil.printOutput(p); System.out.println("Script executed successfully"); } catch (Exception e) { e.printStackTrace(); } String[] lines = FileUtil.readLinesFromFile("/home/slouvan/NetBeansProjects/ILP/f1.txt"); double currentF1 = Double.parseDouble(lines[0]); if (currentF1 > maxF1) { maxF1 = currentF1; bestFeat = nextFeat; } triedFeatures.remove(nextFeat); } triedFeatures.add(bestFeat); System.out.println("Features used : " + triedFeatures); System.out.println( "Best feature at length " + triedFeatures.size() + " is " + bestFeat + " currentF1 : " + maxF1); availableFeatures.remove(bestFeat); PrintWriter out = new PrintWriter( new BufferedWriter(new FileWriter(GlobalV.PROJECT_DIR + "/ablation.txt", true))); out.println("Features used : " + triedFeatures); //more code out.println((new Date()).toString() + " Best feature at length " + triedFeatures.size() + " is " + bestFeat + " currentF1 : " + maxF1); System.out.println("Tried features length : " + triedFeatures.size() + " NbFeat :" + nbFeat); out.close(); //more code } // for each feat from availFeat // add nextFEat to triedFeat // set the feature config file // doCrossVal, output dummy semafor etc // measureF1 {python here} output to a file, read that file // updateMax // remove nextFeat // print best F1 here // add bestFeat to triedFeat }
From source file:org.apache.sysml.hops.codegen.opt.ReachabilityGraph.java
private ArrayList<Pair<CutSet, Double>> evaluateCutSets(ArrayList<ArrayList<NodeLink>> candCS, ArrayList<ArrayList<NodeLink>> remain) { ArrayList<Pair<CutSet, Double>> cutSets = new ArrayList<>(); for (ArrayList<NodeLink> cand : candCS) { HashSet<NodeLink> probe = new HashSet<>(cand); //determine subproblems for cutset candidates HashSet<NodeLink> part1 = new HashSet<>(); rCollectInputs(_root, probe, part1); HashSet<NodeLink> part2 = new HashSet<>(); for (NodeLink rNode : cand) rCollectInputs(rNode, probe, part2); //select, score and create cutsets if (!CollectionUtils.containsAny(part1, part2) && !part1.isEmpty() && !part2.isEmpty()) { //score cutsets (smaller is better) double base = UtilFunctions.pow(2, _matPoints.size()); double numComb = UtilFunctions.pow(2, cand.size()); double score = (numComb - 1) / numComb * base + 1 / numComb * UtilFunctions.pow(2, part1.size()) + 1 / numComb * UtilFunctions.pow(2, part2.size()); //construct cutset cutSets.add(Pair.of(new CutSet(cand.stream().map(p -> p._p).toArray(InterestingPoint[]::new), part1.stream().map(p -> p._p).toArray(InterestingPoint[]::new), part2.stream().map(p -> p._p).toArray(InterestingPoint[]::new)), score)); } else {/*from w w w. j a v a 2s . c om*/ remain.add(cand); } } return cutSets; }
From source file:com.joyent.manta.client.multipart.EncryptedServerSideMultipartManagerIT.java
public final void canRetryUploadPart() throws IOException { final String name = UUID.randomUUID().toString(); final String path = testPathPrefix + name; int part3Size = RandomUtils.nextInt(500, 1500); final byte[] content = RandomUtils.nextBytes((2 * FIVE_MB) + part3Size); final byte[] content1 = Arrays.copyOfRange(content, 0, FIVE_MB + 1); final byte[] content2 = Arrays.copyOfRange(content, FIVE_MB + 1, (2 * FIVE_MB) + 1); final byte[] content3 = Arrays.copyOfRange(content, (2 * FIVE_MB) + 1, (2 * FIVE_MB) + part3Size); EncryptedMultipartUpload<ServerSideMultipartUpload> upload = multipart.initiateUpload(path); ArrayList<MantaMultipartUploadTuple> parts = new ArrayList<>(3); Assert.assertThrows(IOException.class, () -> { // partial read of content1 InputStream content1BadInputStream = new FailingInputStream(new ByteArrayInputStream(content1), 1024); multipart.uploadPart(upload, 1, content1BadInputStream); });// w w w. j a v a 2s .com parts.add(multipart.uploadPart(upload, 1, content1)); parts.add(multipart.uploadPart(upload, 2, content2)); Assert.assertThrows(IOException.class, () -> { // smaller partial read of content3 InputStream content3BadInputStream = new FailingInputStream(new ByteArrayInputStream(content3), 512); multipart.uploadPart(upload, 3, content3BadInputStream); }); parts.add(multipart.uploadPart(upload, 3, content3)); multipart.complete(upload, parts.stream()); // auto-close of MantaEncryptedObjectInputStream validates authentication try (final MantaObjectInputStream in = mantaClient.getAsInputStream(path); final ByteArrayOutputStream out = new ByteArrayOutputStream()) { Assert.assertTrue(in instanceof MantaEncryptedObjectInputStream); IOUtils.copy(in, out); AssertJUnit.assertArrayEquals("Uploaded multipart data doesn't equal actual object data", content, out.toByteArray()); } }
From source file:com.wso2.code.quality.matrices.ChangesFinder.java
/** * Reading the blame received for a current selected file name and insert the parent commits of the changed lines, * relevant authors and the relevant commits hashes to look for the reviewers of those line ranges * * @param rootJsonObject JSONObject containing blame information for current selected file * @param arrayListOfRelevantChangedLinesOfSelectedFile arraylist containing the changed line ranges of the current selected file * @param gettingPr should be true if running this method for finding the authors of buggy lines which are being fixed from the patch *///from w ww .ja va2 s.c o m public void readBlameReceivedForAFile(JSONObject rootJsonObject, ArrayList<String> arrayListOfRelevantChangedLinesOfSelectedFile, boolean gettingPr, String oldRange) { //running a iterator for fileName arrayList to get the location of the above saved file JSONObject dataJSONObject = (JSONObject) rootJsonObject.get(GITHUB_GRAPHQL_API_DATA_KEY_STRING); JSONObject repositoryJSONObect = (JSONObject) dataJSONObject.get(GITHUB_GRAPHQL_API_REPOSITORY_KEY_STRING); JSONObject objectJSONObject = (JSONObject) repositoryJSONObect.get(GITHUB_GRAPHQL_API_OBJECT_KEY_STRING); JSONObject blameJSONObject = (JSONObject) objectJSONObject.get(GITHUB_GRAPHQL_API_BLAME_KEY_STRING); JSONArray rangeJSONArray = (JSONArray) blameJSONObject.get(GITHUB_GRAPHQL_API_RANGES_KEY_STRING); //getting the starting line no of the range of lines that are modified from the patch // parallel streams are not used in here as the order of the arraylist is important in the process arrayListOfRelevantChangedLinesOfSelectedFile.stream().forEach(lineRanges -> { int startingLineNo = 0; int endLineNo = 0; String oldFileRange = StringUtils.substringBefore(lineRanges, "/"); String newFileRange = StringUtils.substringAfter(lineRanges, "/"); // need to skip the newly created files from taking the blame as they contain no previous commits if (!oldFileRange.equals("0,0")) { if (gettingPr && oldRange.equals(oldFileRange)) { // need to consider the line range in the old file for finding authors and reviewers startingLineNo = Integer.parseInt(StringUtils.substringBefore(oldFileRange, ",")); endLineNo = Integer.parseInt(StringUtils.substringAfter(oldFileRange, ",")); } else if (!gettingPr && oldRange == null) { // need to consider the line range in the new file resulted from applying the commit, for finding parent commits startingLineNo = Integer.parseInt(StringUtils.substringBefore(newFileRange, ",")); endLineNo = Integer.parseInt(StringUtils.substringAfter(newFileRange, ",")); } else { return; // to skip the to the next iteration if oldRange != oldFileRange when finding authornames and commits for obtaining PRs } // as a new mapForStoringAgeAndIndex map should be available for each line range to find the most recent change Map<Integer, ArrayList<Integer>> mapForStoringAgeAndIndex = new HashMap<Integer, ArrayList<Integer>>(); //checking line by line by iterating the startingLineNo while (endLineNo >= startingLineNo) { // since the index value is required for later processing, without Java 8 features "for loop" is used for iteration for (int i = 0; i < rangeJSONArray.length(); i++) { JSONObject rangeJSONObject = (JSONObject) rangeJSONArray.get(i); int tempStartingLineNo = (int) rangeJSONObject .get(GITHUB_GRAPHQL_API_STARTING_LINE_KEY_STRING); int tempEndingLineNo = (int) rangeJSONObject.get(GITHUB_GRAPHQL_API_ENDING_LINE_KEY_STRING); //checking whether the line belongs to that line range group if ((tempStartingLineNo <= startingLineNo) && (tempEndingLineNo >= startingLineNo)) { // so the relevant startingLineNo belongs in this line range in other words in this JSONObject if (!gettingPr) { int age = (int) rangeJSONObject.get(GITHUB_GRAPHQL_API_AGE_KEY_STRING); // storing the age field with relevant index of the JSONObject mapForStoringAgeAndIndex.putIfAbsent(age, new ArrayList<Integer>()); if (!mapForStoringAgeAndIndex.get(age).contains(i)) { mapForStoringAgeAndIndex.get(age).add(i); // adding if the index is not present in the array list for the relevant age } } else { //for saving the author names of commiters JSONObject commitJSONObject = (JSONObject) rangeJSONObject .get(GITHUB_GRAPHQL_API_COMMIT_KEY_STRING); JSONObject authorJSONObject = (JSONObject) commitJSONObject .get(GITHUB_GRAPHQL_API_AUTHOR_KEY_STRING); String nameOfTheAuthor = (String) authorJSONObject .get(GITHUB_GRAPHQL_API_NAME_KEY_STRING); authorNames.add(nameOfTheAuthor); // authors are added to the Set String urlOfCommit = (String) commitJSONObject .get(GITHUB_GRAPHQL_API_URL_KEY_STRING); String commitHashForPRReview = StringUtils.substringAfter(urlOfCommit, "commit/"); commitHashObtainedForPRReview.add(commitHashForPRReview); } break; } else { continue; // to skip to the next JSON Object in the rangeJSONArray } } startingLineNo++; // to check for other line numbers } //for the above line range getting the lastest commit which modified the lines if (!gettingPr) { //converting the map into a treeMap to get it ordered TreeMap<Integer, ArrayList<Integer>> treeMap = new TreeMap<>(mapForStoringAgeAndIndex); int minimumKeyOfMapForStoringAgeAndIndex = treeMap.firstKey(); // getting the minimum key //getting the relevant JSONObject indexes which consists of the recent change with in the relevant line range ArrayList<Integer> indexesOfJsonObjectForRecentCommit = mapForStoringAgeAndIndex .get(minimumKeyOfMapForStoringAgeAndIndex); // the order of the indexesOfJsonObjectForRecentCommit is not important as we only need to get the parent commit hashes indexesOfJsonObjectForRecentCommit.parallelStream().forEach(index -> { JSONObject rangeJSONObject = (JSONObject) rangeJSONArray.get(index); JSONObject commitJSONObject = (JSONObject) rangeJSONObject .get(GITHUB_GRAPHQL_API_COMMIT_KEY_STRING); JSONObject historyJSONObject = (JSONObject) commitJSONObject .get(GITHUB_GRAPHQL_API_HISTORY_KEY_STRING); JSONArray edgesJSONArray = (JSONArray) historyJSONObject .get(GITHUB_GRAPHQL_API_EDGE_KEY_STRING); //getting the second json object from the array as it contain the commit of the parent which modified the above line range JSONObject edgeJSONObject = (JSONObject) edgesJSONArray.get(1); JSONObject nodeJSONObject = (JSONObject) edgeJSONObject .get(GITHUB_GRAPHQL_API_NODE_KEY_STRING); String urlOfTheParentCommit = (String) nodeJSONObject .get(GITHUB_GRAPHQL_API_URL_KEY_STRING); // this contain the URL of the parent commit String commitHash = (String) StringUtils.substringAfter(urlOfTheParentCommit, "commit/"); // commitHashesOfTheParent.add(commitHash); // commitHashesof the parent for the selected file commitHashesMapOfTheParent.putIfAbsent(oldFileRange, new HashSet<String>()); if (!commitHashesMapOfTheParent.get(oldFileRange).contains(commitHash)) { commitHashesMapOfTheParent.get(oldFileRange).add(commitHash); } }); } } }); }