List of usage examples for java.util ArrayList stream
default Stream<E> stream()
From source file:com.ntua.cosmos.hackathonplanneraas.Planner.java
@Deprecated private String getPOSTBody(JSONObject obj) { String body = "{}"; StoredPaths pan = new StoredPaths(); JSONObject returned = new JSONObject(); ArrayList<String> names = new ArrayList<>(); ArrayList<String> values = new ArrayList<>(); String originalString = ""; double similarity = 0.0, current = 0.0; if (!obj.isEmpty()) { Set keys = obj.keySet();// w w w . j a v a 2 s . c o m Iterator iter = keys.iterator(); for (; iter.hasNext();) { String temporary = String.valueOf(iter.next()); if (temporary.startsWith("has")) names.add(temporary); } names.stream().forEach((name) -> { values.add(String.valueOf(obj.get(name))); }); } originalString = values.stream().map((value) -> value).reduce(originalString, String::concat); //Begin the initialisation process. OntModelSpec s = new OntModelSpec(PelletReasonerFactory.THE_SPEC); //s.setReasoner(PelletReasonerFactory.theInstance().create()); OntDocumentManager dm = OntDocumentManager.getInstance(); dm.setFileManager(FileManager.get()); s.setDocumentManager(dm); OntModel m = ModelFactory.createOntologyModel(s, null); //OntModel m = ModelFactory.createOntologyModel( PelletReasonerFactory.THE_SPEC ); InputStream in = FileManager.get().open(StoredPaths.casebasepath); if (in == null) { throw new IllegalArgumentException("File: " + StoredPaths.casebasepath + " not found"); } // read the file m.read(in, null); //begin building query string. String queryString = pan.prefixrdf + pan.prefixowl + pan.prefixxsd + pan.prefixrdfs + pan.prefixCasePath; queryString += "\nSELECT DISTINCT "; for (int i = 0; i < names.size(); i++) { queryString += "?param" + i + " "; } queryString += "?message ?handle ?URI ?body WHERE {"; for (int i = 0; i < names.size(); i++) { queryString += "?event base:" + names.get(i) + " ?param" + i + " . "; } queryString += "?event base:isSolvedBy ?solution . ?solution base:exposesMessage ?message . ?solution base:eventHandledBy ?handle . ?solution base:URI ?URI . ?solution base:hasPOSTBody ?body}"; try { String testString = ""; Query query = QueryFactory.create(queryString); //System.out.println(String.valueOf(query)); QueryExecution qe = QueryExecutionFactory.create(query, m); ResultSet results = qe.execSelect(); for (; results.hasNext();) { QuerySolution soln = results.nextSolution(); // Access variables: soln.get("x"); Literal lit; for (int i = 0; i < names.size(); i++) { lit = soln.getLiteral("param" + i);// Get a result variable by name. String temporary = String.valueOf(lit).substring(0, String.valueOf(lit).indexOf("^^")); testString += temporary; } String longer = testString, shorter = originalString; if (testString.length() < originalString.length()) { // longer should always have greater length longer = originalString; shorter = testString; } int longerLength = longer.length(); current = (longerLength - StringUtils.getLevenshteinDistance(longer, shorter)) / (double) longerLength; if (similarity < current) { similarity = current; returned.clear(); returned.put("message", soln.getLiteral("message").getString()); returned.put("URI", soln.getLiteral("URI").getString()); returned.put("handle", soln.getLiteral("handle").getString()); body = soln.getLiteral("body").getString(); } } } catch (Exception e) { System.out.println("Search is interrupted by an error."); } m.close(); return body; }
From source file:com.ntua.cosmos.hackathonplanneraas.Planner.java
@Deprecated public JSONObject searchEventSolution(JSONObject obj) { StoredPaths pan = new StoredPaths(); JSONObject returned = new JSONObject(); long since = 0; long ts = 0;/*from w w w. j a va 2 s.co m*/ ArrayList<String> names = new ArrayList<>(); ArrayList<String> values = new ArrayList<>(); String originalString = ""; double similarity = 0.0, current = 0.0; if (!obj.isEmpty()) { Set keys = obj.keySet(); Iterator iter = keys.iterator(); for (; iter.hasNext();) { String temporary = String.valueOf(iter.next()); if (temporary.startsWith("has")) names.add(temporary); } names.stream().forEach((name) -> { values.add(String.valueOf(obj.get(name))); }); } originalString = values.stream().map((value) -> value).reduce(originalString, String::concat); //Begin the initialisation process. OntModelSpec s = new OntModelSpec(PelletReasonerFactory.THE_SPEC); OntDocumentManager dm = OntDocumentManager.getInstance(); dm.setFileManager(FileManager.get()); s.setDocumentManager(dm); OntModel m = ModelFactory.createOntologyModel(s, null); InputStream in = FileManager.get().open(StoredPaths.casebasepath); if (in == null) { throw new IllegalArgumentException("File: " + StoredPaths.casebasepath + " not found"); } // read the file m.read(in, null); //begin building query string. String queryString = pan.prefixrdf + pan.prefixowl + pan.prefixxsd + pan.prefixrdfs + pan.prefixCasePath; queryString += "\nSELECT DISTINCT "; for (int i = 0; i < names.size(); i++) { queryString += "?param" + i + " "; } queryString += "?message ?handle ?URI WHERE {"; for (int i = 0; i < names.size(); i++) { queryString += "?event base:" + names.get(i) + " ?param" + i + " . "; } queryString += "?event base:isSolvedBy ?solution . ?solution base:exposesMessage ?message . ?solution base:eventHandledBy ?handle . ?solution base:URI ?URI}"; try { String testString = ""; Query query = QueryFactory.create(queryString); QueryExecution qe = QueryExecutionFactory.create(query, m); ResultSet results = qe.execSelect(); for (; results.hasNext();) { QuerySolution soln = results.nextSolution(); // Access variables: soln.get("x"); Literal lit; for (int i = 0; i < names.size(); i++) { lit = soln.getLiteral("param" + i);// Get a result variable by name. String temporary = String.valueOf(lit).substring(0, String.valueOf(lit).indexOf("^^")); testString += temporary; } String longer = testString, shorter = originalString; if (testString.length() < originalString.length()) { // longer should always have greater length longer = originalString; shorter = testString; } int longerLength = longer.length(); System.out.println("Similarity between:" + originalString + " and " + testString + " is:"); current = (longerLength - StringUtils.getLevenshteinDistance(longer, shorter)) / (double) longerLength; System.out.println(current + " out of 1.0."); if (similarity < current) { similarity = current; returned.clear(); returned.put("message", soln.getLiteral("message").getString()); returned.put("URI", soln.getLiteral("URI").getString()); returned.put("handle", soln.getLiteral("handle").getString()); } } } catch (Exception e) { System.out.println("Search is interrupted by an error."); } m.close(); return returned; }
From source file:sbu.srl.rolextract.ArgumentClassifier.java
public void distributeTrainTest() throws FileNotFoundException, IOException, InterruptedException, ClassNotFoundException { //sentences = (ArrayList<Sentence>) FileUtil.deserializeFromFile("./data/training_4_roles.ser"); Map<String, List<Sentence>> processSentPair = sentences.stream() .collect(Collectors.groupingBy(s -> s.getProcessName())); int blockSize = 0; int currentFoldCnt = 1; Thread.sleep(10000);//from www. ja va 2 s.c o m System.out.println("Total sentences : " + sentences.size()); ArrayList<Sentence> trainingData = new ArrayList<Sentence>(); ArrayList<Sentence> testingData = new ArrayList<Sentence>(); HashMap<String, String> testProcessName = new HashMap<String, String>(); HashMap<String, String> trainingProcessName = new HashMap<String, String>(); for (String trainingProcess : processSentPair.keySet()) { if (testProcessName.get(trainingProcess) == null) { trainingData.addAll(processSentPair.get(trainingProcess)); trainingProcessName.put(trainingProcess, trainingProcess); } } // serialize training & testing processes String trainingProcessesStr = Joiner.on("\t").join(trainingProcessName.keySet().iterator()); FileUtil.dumpToFile(trainingProcessesStr, outputDir.concat("/fold-" + currentFoldCnt).concat("/train/train_process_name")); System.out.println("Nb Sentence in train" + trainingData.size()); FileUtil.serializeToFile(trainingData, outputDir.concat("/fold-" + currentFoldCnt).concat("/train/train.ser")); // ============================================== SEMAFOR ============================================================================================================================================== // ============================================================================================================================================================================================ SpockDataReader.generateSEMAFORFrameAnnotation(trainingData, outputDir.concat("/fold-" + currentFoldCnt) .concat("/train/cv." + currentFoldCnt + ".train.sentences.frame.elements.sbu"), outputDir.concat("/fold-" + currentFoldCnt) .concat("/train/cv." + currentFoldCnt + ".train.sentence.sbu"), semOffset); // DUMP REQUIRED DATA FOR SEMAFOR // ============================================== TESTING ======================================================================= SpockDataReader testDataReader = new SpockDataReader(testingFileName, configFileName, true); testDataReader.readData(); ArrayList<Sentence> testingSentences = testDataReader.getSentences();//= (ArrayList<Sentence>)FileUtil.deserializeFromFile("/home/slouvan/NetBeansProjects/SRL-Integrated/thousand_sentences.ser"); FileUtil.serializeToFile(testingSentences, "/home/slouvan/NetBeansProjects/SRL-Integrated/thousand_sentences.ser"); Map<String, List<Sentence>> testProcessSentPair = testingSentences.stream() .collect(Collectors.groupingBy(s -> s.getProcessName())); for (String testingProcess : testProcessSentPair.keySet()) { testProcessName.put(testingProcess, testingProcess); testingData.addAll(testProcessSentPair.get(testingProcess)); } String testingProcessessStr = Joiner.on("\t").join(testProcessName.keySet().iterator()); System.out.println("Nb Sentence in test" + testingData.size()); FileUtil.dumpToFile(testingProcessessStr, outputDir.concat("/fold-" + currentFoldCnt).concat("/test/test_process_name")); SpockDataReader.dumpRawSentences(testingData, outputDir.concat("/fold-" + currentFoldCnt) .concat("/test/cv." + currentFoldCnt + ".test.sentence.sbu")); SpockDataReader.dumpSentenceLexTargetIdxs(testingData, outputDir.concat("/fold-" + currentFoldCnt) .concat("/test/cv." + currentFoldCnt + ".test.process.target")); // EXECUTE ./runMalt.sh here try { ProcessBuilder pb = new ProcessBuilder(MALT_PARSER_PATH, outputDir.concat("/fold-" + currentFoldCnt) .concat("/train/cv." + currentFoldCnt + ".train.sentence.sbu"), outputDir.concat("/fold-" + currentFoldCnt).concat("/train")); //pb.environment().put("param1", ) Process p = pb.start(); // Start the process. p.waitFor(); // Wait for the process to finish. StdUtil.printOutput(p); System.out.println("Script executed successfully"); AllAnnotationsMergingWithoutNE.mergeAllAnnotations( outputDir.concat("/fold-" + currentFoldCnt).concat("/train/tokenized"), outputDir.concat("/fold-" + currentFoldCnt).concat("/train/conll"), outputDir.concat("/fold-" + currentFoldCnt).concat("/train/tmp"), outputDir.concat("/fold-" + currentFoldCnt) .concat("/train/cv." + currentFoldCnt + ".train.sentences.all.lemma.tags.sbu")); } catch (Exception e) { e.printStackTrace(); } // ============================================================================================================================================================================================ // ============================================== END OF SEMAFOR ========================================================================================== FileUtil.serializeToFile(testingData, outputDir.concat("/fold-" + currentFoldCnt).concat("/test/test.arggold.ser")); }
From source file:com.dtolabs.rundeck.core.authorization.RuleEvaluator.java
/** * Return true if all predicate tests on a certain resource entry evaluate to true * * @param resource the resource * @param stringPredicate a Converter<S,Predicate> to convert String to Predicate test * @param key the resource attribute key to check * @param test test to apply, can be a String, or List of Strings if allowListMatch is true * @param listpred/*from www . ja va2 s . c o m*/ * @param sourceIdentity */ boolean applyTest(final Map<String, String> resource, final Function<String, Predicate<String>> stringPredicate, final String key, final Object test, final Function<List, Predicate<String>> listpred, final String sourceIdentity) { final ArrayList<Predicate<String>> tests = new ArrayList<>(); if (listpred != null && test instanceof List) { //must match all values tests.add(listpred.apply((List) test)); } else if (test instanceof String) { //match single test tests.add(stringPredicate.apply((String) test)); } else { //unexpected format, do not match if (test != null) { logger.error(sourceIdentity + ": cannot evaluate unexpected type: " + test.getClass().getName()); } else { logger.error(sourceIdentity + ": cannot evaluate: null value for key `" + key + "`"); } return false; } String value = resource.get(key); return tests.stream().allMatch(new Predicate<Predicate<String>>() { @Override public boolean test(final Predicate<String> pred) { return pred.test(value); } }); }
From source file:com.joyent.manta.client.multipart.EncryptedServerSideMultipartManagerIT.java
public final void canRetryCompleteInCaseOfErrorDuringFinalPartUpload() throws IOException { final String path = testPathPrefix + UUID.randomUUID().toString(); final EncryptedMultipartUpload<ServerSideMultipartUpload> upload = multipart.initiateUpload(path); final ArrayList<MantaMultipartUploadTuple> parts = new ArrayList<>(1); final byte[] content = RandomUtils.nextBytes(FIVE_MB + RandomUtils.nextInt(1, 1500)); // a single part which is larger than the minimum size is the simplest way to trigger complete's finalization parts.add(multipart.uploadPart(upload, 1, content)); Assert.assertFalse(upload.getEncryptionState().isLastPartAuthWritten()); // so this seems really silly, but it's the only way I can see of triggering an exception within complete's // attempt to write the final encryption bytes. it may seem stupid but actually uncovered an error // caused by refactoring final EncryptedMultipartUpload<ServerSideMultipartUpload> uploadSpy = Mockito.spy(upload); Mockito.when(uploadSpy.getWrapped()).thenThrow(new RuntimeException("wat")).thenCallRealMethod(); Assert.assertThrows(RuntimeException.class, () -> multipart.complete(uploadSpy, parts.stream())); multipart.complete(uploadSpy, parts.stream()); // auto-close of MantaEncryptedObjectInputStream validates authentication try (final MantaObjectInputStream in = mantaClient.getAsInputStream(path); final ByteArrayOutputStream out = new ByteArrayOutputStream()) { Assert.assertTrue(in instanceof MantaEncryptedObjectInputStream); IOUtils.copy(in, out);//from w ww. jav a 2 s . c om AssertJUnit.assertArrayEquals("Uploaded multipart data doesn't equal actual object data", content, out.toByteArray()); } }
From source file:Test.ScenarioBail.java
public ScenarioBail(density_tasks DensTask, spatial_concentration SpatConc, degree_of_modularity Mod, score_dispersion DispScore, sensors_dispersion DispSens, boolean random, int scenarioWidth) { //assignment// w w w . jav a 2 s . c o m this.DensTask = DensTask; this.DispScore = DispScore; this.DispSens = DispSens; this.Mod = Mod; this.SpatConc = SpatConc; switch (DensTask) { case high: this.number_of_bails = 30; break; case low: this.number_of_bails = 6; break; } if (SpatConc == spatial_concentration.high) { if (random == false) { if (number_of_bails == 30) { Collections.addAll(this.positions, this.position_30bails_50width1rnd); } else { Collections.addAll(this.positions, this.position_6bails_50width1rnd); } } else { int v1 = (int) (Math.random() * (scenarioWidth - 16)); int v2 = (int) (Math.random() * (scenarioWidth - 16)); v2 = v2 * scenarioWidth; ArrayList<Integer> concentrPoints = new ArrayList(); for (int i = 0; i < 15; i++) { int k = v2 + scenarioWidth * i; for (int j = k + v1; j < k + v1 + 15; j++) { concentrPoints.add(j); } } ArrayList<Integer> allPoints = new ArrayList(); ArrayList<Integer> diffPoints = new ArrayList(); for (int i = 0; i < scenarioWidth * scenarioWidth; i++) { allPoints.add(i); } diffPoints.addAll(allPoints); diffPoints.removeAll(concentrPoints); EnumeratedIntegerDistribution randomizer; Double probDiff = (double) 0 / diffPoints.size(); Double probConc = (double) 1 / concentrPoints.size(); double[] probVector = new double[allPoints.size()]; for (int i = 0; i < allPoints.size(); i++) { if (diffPoints.contains(i)) { probVector[i] = probDiff; } else { probVector[i] = probConc; } } int[] allPointsarray = allPoints.stream().mapToInt(i -> i).toArray(); EnumeratedIntegerDistribution randomizerPosition; randomizerPosition = new EnumeratedIntegerDistribution(allPointsarray, probVector); for (int u = 0; u < this.number_of_bails; u++) { this.positions.add(randomizerPosition.sample()); } } } else { if (random == false) { if (number_of_bails == 30) { Collections.addAll(this.positions, this.position_30bails_50width1rnd); } else { Collections.addAll(this.positions, this.position_6bails_50width1rnd); } } else { for (int u = 0; u < this.number_of_bails; u++) { int v1 = (int) (Math.random() * (scenarioWidth * scenarioWidth - 1)); this.positions.add(v1); } } } Double high; Double low; high = 1200.0; low = 300.0; /*if (Mod == degree_of_modularity.two || Mod == degree_of_modularity.oneComptwo){ high = 800.0; low = 200.0; } else { high = 1200.0; low = 300.0; }*/ if (DispScore == score_dispersion.high) { if (random == false) { if (this.number_of_bails == 30) { for (int u = 0; u < 6; u++) { this.scores_30bails_50width1rnd[u] = this.scores_30bails_50width1rnd[u] + high; } Collections.addAll(this.scores, scores_30bails_50width1rnd); } else { Collections.addAll(this.scores, this.scores_6bails_50widthHDev); } } else { int numHigh = (int) ((int) this.number_of_bails * 0.2); int v = (int) (Math.random() * (this.number_of_bails - numHigh)); ArrayList<Integer> bailsHigh = new ArrayList<>(); Double sum = 0.0; for (int y = 0; y < this.number_of_bails; y++) { this.scores.add(Math.random() * low); } for (int i = v; i < v + numHigh; i++) { this.scores.add(i, Math.random() * low + high); } } adjustMean(this.scores, 30000.0); } else { if (random == false) { if (this.number_of_bails == 30) { Collections.addAll(this.scores, scores_30bails_50width1rnd); } else { Collections.addAll(this.scores, this.scores_6bails_50width); } } else { for (int j = 0; j < this.number_of_bails; j++) { this.scores.add(Math.random() * (high - low) + high / 2); } } adjustMean(this.scores, 30000.0); } ArrayList<String> sensors = new ArrayList(); sensors.add("IR"); switch (Mod) { case three: sensors.add("MW"); sensors.add("AB"); break; case two: sensors.add("MW"); break; default: break; } int count = 0; if (DispSens == sensors_dispersion.high) { for (int j = 1; j <= sensors.size(); j++) { List<Set<String>> subset = getSubsets(sensors, j); EnumeratedIntegerDistribution randR; double[] probVec = new double[subset.size()]; int[] mapSubset = new int[subset.size()]; for (int t = 0; t < subset.size(); t++) { probVec[t] = (double) 1 / subset.size(); mapSubset[t] = t; } randR = new EnumeratedIntegerDistribution(mapSubset, probVec); for (int y = 0; y < (int) this.number_of_bails / sensors.size(); y++) { Set<String> thisset = subset.get(randR.sample()); ArrayList<String> thisarray = new ArrayList(); thisarray.addAll(thisset); this.bail_sensor.put(count, thisarray); count++; } } } else { for (int j = 0; j < this.number_of_bails; j++) { this.bail_sensor.put(count, sensors); count++; } } }
From source file:org.optaplanner.core.impl.domain.solution.descriptor.SolutionDescriptor.java
public void processAnnotations(DescriptorPolicy descriptorPolicy, ScoreDefinition deprecatedScoreDefinition, List<Class<?>> entityClassList) { processSolutionAnnotations(descriptorPolicy); ArrayList<Method> potentiallyOverwritingMethodList = new ArrayList<>(); // Iterate inherited members too (unlike for EntityDescriptor where each one is declared) // to make sure each one is registered for (Class<?> lineageClass : ConfigUtils.getAllAnnotatedLineageClasses(solutionClass, PlanningSolution.class)) { List<Member> memberList = ConfigUtils.getDeclaredMembers(lineageClass); for (Member member : memberList) { if (member instanceof Method && potentiallyOverwritingMethodList.stream() .anyMatch(m -> member.getName().equals(m.getName()) // Short cut to discard negatives faster && ReflectionHelper.isMethodOverwritten((Method) member, m.getDeclaringClass()))) { // Ignore member because it is an overwritten method continue; }// w w w . ja va 2s.c om processValueRangeProviderAnnotation(descriptorPolicy, member); processFactEntityOrScoreAnnotation(descriptorPolicy, member, deprecatedScoreDefinition, entityClassList); } potentiallyOverwritingMethodList .ensureCapacity(potentiallyOverwritingMethodList.size() + memberList.size()); memberList.stream().filter(member -> member instanceof Method) .forEach(member -> potentiallyOverwritingMethodList.add((Method) member)); } if (entityCollectionMemberAccessorMap.isEmpty() && entityMemberAccessorMap.isEmpty()) { throw new IllegalStateException( "The solutionClass (" + solutionClass + ") must have at least 1 member with a " + PlanningEntityCollectionProperty.class.getSimpleName() + " annotation or a " + PlanningEntityProperty.class.getSimpleName() + " annotation."); } if (Solution.class.isAssignableFrom(solutionClass)) { processLegacySolution(descriptorPolicy, deprecatedScoreDefinition); return; } // Do not check if problemFactCollectionMemberAccessorMap and problemFactMemberAccessorMap are empty // because they are only required for Drools score calculation. if (scoreMemberAccessor == null) { throw new IllegalStateException("The solutionClass (" + solutionClass + ") must have 1 member with a " + PlanningScore.class.getSimpleName() + " annotation.\n" + "Maybe add a getScore() method with a " + PlanningScore.class.getSimpleName() + " annotation."); } }
From source file:org.apache.sysml.hops.codegen.opt.ReachabilityGraph.java
public ReachabilityGraph(PlanPartition part, CPlanMemoTable memo) { //create repository of materialization points _matPoints = new HashMap<>(); for (InterestingPoint p : part.getMatPointsExt()) _matPoints.put(Pair.of(p._fromHopID, p._toHopID), new NodeLink(p)); //create reachability graph _root = new NodeLink(null); HashSet<VisitMarkCost> visited = new HashSet<>(); for (Long hopID : part.getRoots()) { Hop rootHop = memo.getHopRefs().get(hopID); addInputNodeLinks(rootHop, _root, part, memo, visited); }/*from ww w . ja v a 2 s .c om*/ //create candidate cutsets List<NodeLink> tmpCS = _matPoints.values().stream().filter(p -> p._inputs.size() > 0 && p._p != null) .sorted().collect(Collectors.toList()); //short-cut for partitions without cutsets if (tmpCS.isEmpty()) { _cutSets = new CutSet[0]; //sort materialization points in decreasing order of their sizes //which can improve the pruning efficiency by skipping larger sub-spaces. _searchSpace = sortBySize(part.getMatPointsExt(), memo, false); return; } //create composite cutsets ArrayList<ArrayList<NodeLink>> candCS = new ArrayList<>(); ArrayList<NodeLink> current = new ArrayList<>(); for (NodeLink node : tmpCS) { if (current.isEmpty()) current.add(node); else if (current.get(0).equals(node)) current.add(node); else { candCS.add(current); current = new ArrayList<>(); current.add(node); } } if (!current.isEmpty()) candCS.add(current); //evaluate cutsets (single, and duplicate pairs) ArrayList<ArrayList<NodeLink>> remain = new ArrayList<>(); ArrayList<Pair<CutSet, Double>> cutSets = evaluateCutSets(candCS, remain); if (!remain.isEmpty() && remain.size() < 5) { //second chance: for pairs for remaining candidates ArrayList<ArrayList<NodeLink>> candCS2 = new ArrayList<>(); for (int i = 0; i < remain.size() - 1; i++) for (int j = i + 1; j < remain.size(); j++) { ArrayList<NodeLink> tmp = new ArrayList<>(); tmp.addAll(remain.get(i)); tmp.addAll(remain.get(j)); candCS2.add(tmp); } ArrayList<Pair<CutSet, Double>> cutSets2 = evaluateCutSets(candCS2, remain); //ensure constructed cutsets are disjoint HashSet<InterestingPoint> testDisjoint = new HashSet<>(); for (Pair<CutSet, Double> cs : cutSets2) { if (!CollectionUtils.containsAny(testDisjoint, Arrays.asList(cs.getLeft().cut))) { cutSets.add(cs); CollectionUtils.addAll(testDisjoint, cs.getLeft().cut); } } } //sort and linearize search space according to scores _cutSets = cutSets.stream().sorted(Comparator.comparing(p -> p.getRight())).map(p -> p.getLeft()) .toArray(CutSet[]::new); //created sorted order of materialization points //(cut sets in predetermined order, other points sorted by size) HashMap<InterestingPoint, Integer> probe = new HashMap<>(); ArrayList<InterestingPoint> lsearchSpace = new ArrayList<>(); for (CutSet cs : _cutSets) { CollectionUtils.addAll(lsearchSpace, cs.cut); for (InterestingPoint p : cs.cut) probe.put(p, probe.size()); } //sort materialization points in decreasing order of their sizes //which can improve the pruning efficiency by skipping larger sub-spaces. for (InterestingPoint p : sortBySize(part.getMatPointsExt(), memo, false)) if (!probe.containsKey(p)) { lsearchSpace.add(p); probe.put(p, probe.size()); } _searchSpace = lsearchSpace.toArray(new InterestingPoint[0]); //finalize cut sets (update positions wrt search space) for (CutSet cs : _cutSets) cs.updatePositions(probe); //final sanity check of interesting points if (_searchSpace.length != part.getMatPointsExt().length) throw new RuntimeException("Corrupt linearized search space: " + _searchSpace.length + " vs " + part.getMatPointsExt().length); }
From source file:org.apache.hadoop.hive.ql.parse.UpdateDeleteSemanticAnalyzer.java
/** * Generates the Insert leg of the multi-insert SQL to represent WHEN NOT MATCHED THEN INSERT clause * @param targetTableNameInSourceQuery - simple name/alias * @throws SemanticException/* w ww. j a v a 2 s . co m*/ */ private void handleInsert(ASTNode whenNotMatchedClause, StringBuilder rewrittenQueryStr, ASTNode target, ASTNode onClause, Table targetTable, String targetTableNameInSourceQuery, String onClauseAsString, String hintStr) throws SemanticException { ASTNode whenClauseOperation = getWhenClauseOperation(whenNotMatchedClause); assert whenNotMatchedClause.getType() == HiveParser.TOK_NOT_MATCHED; assert whenClauseOperation.getType() == HiveParser.TOK_INSERT; // identify the node that contains the values to insert and the optional column list node ArrayList<Node> children = whenClauseOperation.getChildren(); ASTNode valuesNode = (ASTNode) children.stream() .filter(n -> ((ASTNode) n).getType() == HiveParser.TOK_FUNCTION).findFirst().get(); ASTNode columnListNode = (ASTNode) children.stream() .filter(n -> ((ASTNode) n).getType() == HiveParser.TOK_TABCOLNAME).findFirst().orElse(null); // if column list is specified, then it has to have the same number of elements as the values // valuesNode has a child for struct, the rest are the columns if (columnListNode != null && columnListNode.getChildCount() != (valuesNode.getChildCount() - 1)) { throw new SemanticException( String.format("Column schema must have the same length as values (%d vs %d)", columnListNode.getChildCount(), valuesNode.getChildCount() - 1)); } rewrittenQueryStr.append("INSERT INTO ").append(getFullTableNameForSQL(target)); if (columnListNode != null) { rewrittenQueryStr.append(' ').append(getMatchedText(columnListNode)); } addPartitionColsToInsert(targetTable.getPartCols(), rewrittenQueryStr); rewrittenQueryStr.append(" -- insert clause\n SELECT "); if (hintStr != null) { rewrittenQueryStr.append(hintStr); } OnClauseAnalyzer oca = new OnClauseAnalyzer(onClause, targetTable, targetTableNameInSourceQuery, conf, onClauseAsString); oca.analyze(); String valuesClause = getMatchedText(valuesNode); valuesClause = valuesClause.substring(1, valuesClause.length() - 1);//strip '(' and ')' valuesClause = replaceDefaultKeywordForMerge(valuesClause, targetTable, columnListNode); rewrittenQueryStr.append(valuesClause).append("\n WHERE ").append(oca.getPredicate()); String extraPredicate = getWhenClausePredicate(whenNotMatchedClause); if (extraPredicate != null) { //we have WHEN NOT MATCHED AND <boolean expr> THEN INSERT rewrittenQueryStr.append(" AND ").append(getMatchedText(((ASTNode) whenNotMatchedClause.getChild(1)))) .append('\n'); } }
From source file:UI.MainStageController.java
/** * Prompts an alert that the selected file is already part of the current project. *///from www . java 2 s. c om private void showFileAlreadyLoadedAlert(ArrayList<String> fileNames) { if (fileNames.size() > 1) { fileNames = fileNames.stream().map(string -> "'" + string + "'") .collect(Collectors.toCollection(ArrayList::new)); } String name = String.join(",\n", fileNames); String oneFileAlreadyLoaded = "The file \n'" + name + "'\nis already loaded in your project."; String multipleFilesAlreadyLoaded = "The files\n" + name + "\n are already loaded in your project."; fileAlreadyLoadedAlert = new Alert(Alert.AlertType.ERROR); fileAlreadyLoadedAlert.setTitle("File not loaded."); fileAlreadyLoadedAlert .setContentText(fileNames.size() == 1 ? oneFileAlreadyLoaded : multipleFilesAlreadyLoaded); fileAlreadyLoadedAlert.show(); }