List of usage examples for java.util Set removeAll
boolean removeAll(Collection<?> c);
From source file:kr.co.bitnine.octopus.meta.jdo.JDOMetaContext.java
private void updateTablesOfSchema(Schema rawSchema, MSchema mSchema, final String tableRegex) throws MetaException { Map<String, MTable> oldTables = new HashMap<>(); for (MetaTable table : mSchema.getTables()) oldTables.put(table.getName(), (MTable) table); Set<String> oldTableNames = new TreeSet<>(oldTables.keySet()); Set<String> newTableNames = new TreeSet<>(); for (Table rawTable : rawSchema.getTables()) { String tableName = rawTable.getName(); if (tableRegex != null && !tableName.matches(tableRegex)) continue; LOG.debug("update table. tableName=" + tableName); newTableNames.add(tableName);//from ww w. j av a 2s . c o m if (oldTables.containsKey(tableName)) { // update table MTable mTable = oldTables.get(tableName); updateColumnsOfTable(rawTable, mTable); } else { // add new table MTable mTable = new MTable(tableName, "TABLE", mSchema); pm.makePersistent(mTable); updateLogger.create(null, tableName); addColumnsOfTable(rawTable, mTable); } } // remove old tables if (tableRegex == null) { oldTableNames.removeAll(newTableNames); for (String name : oldTableNames) { LOG.debug("delete table. tableName=" + mSchema.getName() + '.' + name); pm.deletePersistent(oldTables.get(name)); updateLogger.delete(null, name); } } }
From source file:org.apache.maven.plugin.eclipse.EclipsePlugin.java
public final EclipseSourceDir[] buildDirectoryList(MavenProject project, File basedir, File buildOutputDirectory) throws MojoExecutionException { File projectBaseDir = project.getFile().getParentFile(); String mainOutput = IdeUtils.toRelativeAndFixSeparator(projectBaseDir, buildOutputDirectory, false); // If using the standard output location, don't mix the test output into it. String testOutput = null;/*ww w. jav a2 s . c o m*/ boolean useStandardOutputDir = buildOutputDirectory .equals(new File(project.getBuild().getOutputDirectory())); if (useStandardOutputDir) { getLog().debug("testOutput toRelativeAndFixSeparator " + projectBaseDir + " , " + project.getBuild().getTestOutputDirectory()); testOutput = IdeUtils.toRelativeAndFixSeparator(projectBaseDir, new File(project.getBuild().getTestOutputDirectory()), false); getLog().debug("testOutput after toRelative : " + testOutput); } Set mainDirectories = new LinkedHashSet(); extractSourceDirs(mainDirectories, project.getCompileSourceRoots(), basedir, projectBaseDir, false, null); extractResourceDirs(mainDirectories, project.getBuild().getResources(), basedir, projectBaseDir, false, mainOutput); Set testDirectories = new LinkedHashSet(); extractSourceDirs(testDirectories, project.getTestCompileSourceRoots(), basedir, projectBaseDir, true, testOutput); extractResourceDirs(testDirectories, project.getBuild().getTestResources(), basedir, projectBaseDir, true, testOutput); // avoid duplicated entries Set directories = new LinkedHashSet(); // NOTE: Since MNG-3118, test classes come before main classes boolean testBeforeMain = isMavenVersion("[2.0.8,)"); if (testBeforeMain) { directories.addAll(testDirectories); directories.removeAll(mainDirectories); directories.addAll(mainDirectories); } else { directories.addAll(mainDirectories); directories.addAll(testDirectories); } if (ajdt) extractAspectDirs(directories, project, basedir, projectBaseDir, testOutput); return (EclipseSourceDir[]) directories.toArray(new EclipseSourceDir[directories.size()]); }
From source file:com.virtusa.akura.student.controller.StudentAcademicLifeController.java
/** * calculate and set average and total term marks. * /* w ww .ja v a 2 s.c o m*/ * @param modelMap - ModelMap attribute. * @param studentTermMarkObjList - List. * @throws AkuraAppException throws detailed exception when fails. */ private void setTotalAndAverageMarks(ModelMap modelMap, List<StudentSubjectAverageViewDTO> studentTermMarkObjList) throws AkuraAppException { List<Map<String, Object>> totalMarksList = new ArrayList<Map<String, Object>>(); List<Map<String, Object>> averageMarksList = new ArrayList<Map<String, Object>>(); Set<String> allTerms = new TreeSet<String>(); Set<String> markedTerms = new TreeSet<String>(); Set<String> noMarksTerms = new TreeSet<String>(); for (Term term : commonService.getTermList()) { allTerms.add(term.getDescription()); noMarksTerms.add(term.getDescription()); } for (StudentSubjectAverageViewDTO studentAvgMarks : studentTermMarkObjList) { markedTerms.add(studentAvgMarks.getTerm()); } // Remove terms with marks from all terms list to get the terms with no marks. noMarksTerms.removeAll(markedTerms); Iterator<String> termObj = allTerms.iterator(); while (termObj.hasNext()) { int totalSubjectOfTerm = 0; double totalMarksOfTerm = 0.0; double avarageMarksOfTerm = 0.0; String term = termObj.next(); for (StudentSubjectAverageViewDTO dto : studentTermMarkObjList) { if (term.equals(dto.getTerm())) { totalMarksOfTerm = totalMarksOfTerm + dto.getMarks(); totalSubjectOfTerm++; } } if (totalMarksOfTerm != 0.0) { avarageMarksOfTerm = totalMarksOfTerm / totalSubjectOfTerm; } Map<String, Object> totalMarkMap = new TreeMap<String, Object>(); Map<String, Object> averageMarkMap = new TreeMap<String, Object>(); // set total marks and average marks as minus one, if this student doesn't have marks for this // term. if (totalMarksOfTerm == 0.0 && noMarksTerms.contains(term)) { totalMarkMap.put(term, AkuraConstant.MINUS_ONE); averageMarkMap.put(term, AkuraConstant.MINUS_ONE); } else { totalMarkMap.put(term, totalMarksOfTerm); averageMarkMap.put(term, avarageMarksOfTerm); } totalMarksList.add(totalMarkMap); averageMarksList.add(averageMarkMap); } modelMap.addAttribute(TOTAL_TERM_MARK_LIST, totalMarksList); modelMap.addAttribute(AVERAGE_TERM_MARK_LIST, averageMarksList); }
From source file:com.streamsets.datacollector.validation.PipelineFragmentConfigurationValidator.java
@VisibleForTesting boolean validatePipelineLanes() { boolean preview = true; List<StageConfiguration> stagesConf = pipelineFragmentConfiguration.getStages(); if (CollectionUtils.isNotEmpty(this.pipelineFragmentConfiguration.getFragments())) { stagesConf = pipelineFragmentConfiguration.getOriginalStages(); }/*from w w w. j a v a2s.c om*/ for (int i = 0; i < stagesConf.size(); i++) { StageConfiguration stageConf = stagesConf.get(i); Set<String> openOutputs = new LinkedHashSet<>(stageConf.getOutputLanes()); Set<String> openEvents = new LinkedHashSet<>(stageConf.getEventLanes()); for (int j = i + 1; j < stagesConf.size(); j++) { StageConfiguration downStreamStageConf = stagesConf.get(j); Set<String> duplicateOutputs = Sets.intersection(new HashSet<>(stageConf.getOutputLanes()), new HashSet<>(downStreamStageConf.getOutputLanes())); Set<String> duplicateEvents = Sets.intersection(new HashSet<>(stageConf.getEventLanes()), new HashSet<>(downStreamStageConf.getEventLanes())); if (!duplicateOutputs.isEmpty()) { // there is more than one stage defining the same output lane issues.add(IssueCreator.getPipeline().create(downStreamStageConf.getInstanceName(), ValidationError.VALIDATION_0010, duplicateOutputs, stageConf.getInstanceName())); preview = false; } if (!duplicateEvents.isEmpty()) { // there is more than one stage defining the same output lane issues.add(IssueCreator.getPipeline().create(downStreamStageConf.getInstanceName(), ValidationError.VALIDATION_0010, duplicateEvents, stageConf.getInstanceName())); preview = false; } openOutputs.removeAll(downStreamStageConf.getInputLanes()); openEvents.removeAll(downStreamStageConf.getInputLanes()); } if (!openOutputs.isEmpty() && !isPipelineFragment) { openLanes.addAll(openOutputs); // the stage has open output lanes Issue issue = IssueCreator.getStage(stageConf.getInstanceName()) .create(ValidationError.VALIDATION_0011); issue.setAdditionalInfo("openStreams", openOutputs); issues.add(issue); } if (!openEvents.isEmpty() && !isPipelineFragment) { openLanes.addAll(openEvents); // the stage has open Event lanes Issue issue = IssueCreator.getStage(stageConf.getInstanceName()) .create(ValidationError.VALIDATION_0104); issue.setAdditionalInfo("openStreams", openEvents); issues.add(issue); } } return preview; }
From source file:com.github.gekoh.yagen.ddl.CreateDDL.java
private String getHsqlDBHistTriggerSql(Dialect dialect, String tableName, String histTableName, String histColName, Set<String> columns, List<String> pkColumns, List<String> histRelevantCols) { VelocityContext context = new VelocityContext(); Set<String> nonPkColumns = new HashSet<String>(columns); nonPkColumns.removeAll(pkColumns); context.put("VERSION_COLUMN_NAME", VERSION_COLUMN_NAME); context.put("MODIFIER_COLUMN_NAME", AuditInfo.LAST_MODIFIED_BY); context.put("liveTableName", tableName); context.put("hstTableName", histTableName); context.put("columns", columns); context.put("histColName", histColName); context.put("pkColumns", pkColumns); context.put("nonPkColumns", nonPkColumns); context.put("histRelevantCols", histRelevantCols); StringWriter wr = new StringWriter(); try {/*from w w w. j ava2 s . c om*/ wr.append(STATEMENT_SEPARATOR); writeTriggerSingleOperation(dialect, wr, "HstTriggerSingleOperation.vm.pl.sql", context, tableName, "_ht", "I"); wr.write("\n/\n"); wr.append(STATEMENT_SEPARATOR); writeTriggerSingleOperation(dialect, wr, "HstTriggerSingleOperation.vm.pl.sql", context, tableName, "_ht", "U"); wr.write("\n/\n"); wr.append(STATEMENT_SEPARATOR); writeTriggerSingleOperation(dialect, wr, "HstTriggerSingleOperation.vm.pl.sql", context, tableName, "_ht", "D"); wr.write("\n/\n"); } catch (IOException e) { throw new IllegalStateException("cannot read history trigger template"); } return wr.toString(); }
From source file:com.github.gekoh.yagen.ddl.CreateDDL.java
private String getHistTriggerSource(Dialect dialect, String objectName, String tableName, String histTableName, String histColName, Set<String> columns, List<String> pkColumns, List<String> histRelevantCols) { checkObjectName(dialect, objectName); VelocityContext context = new VelocityContext(); Set<String> hstNoNullColumns = new HashSet<String>(); TableConfig tableConfig = tblNameToConfig.get(tableName); IntervalPartitioning partitioning = tableConfig != null ? tableConfig.getTableAnnotationOfType(IntervalPartitioning.class) : null;/* www . j a v a 2 s .c o m*/ if (partitioning != null) { hstNoNullColumns.add(partitioning.columnName().toLowerCase()); } Set<String> nonPkColumns = new HashSet<String>(columns); nonPkColumns.removeAll(pkColumns); context.put("VERSION_COLUMN_NAME", VERSION_COLUMN_NAME); context.put("MODIFIER_COLUMN_NAME", AuditInfo.LAST_MODIFIED_BY); context.put("dialect", dialect); context.put("objectName", objectName); context.put("liveTableName", tableName); context.put("hstTableName", histTableName); context.put("columns", columns); context.put("histColName", histColName); context.put("pkColumns", pkColumns); context.put("nonPkColumns", nonPkColumns); context.put("noNullColumns", hstNoNullColumns); context.put("histRelevantCols", histRelevantCols); context.put("varcharType", dialect.getTypeName(Types.VARCHAR, 64, 0, 0)); setNewOldVar(dialect, context); StringWriter wr = new StringWriter(); mergeTemplateFromResource("HstTrigger.vm.pl.sql", wr, context); return wr.toString(); }
From source file:kr.co.bitnine.octopus.meta.jdo.JDOMetaContext.java
private void updateJdbcDataSourceInternal(DataContext dc, MDataSource mDataSource, final String schemaRegex, final String tableRegex) throws MetaException { Map<String, MSchema> oldSchemas = new HashMap<>(); for (MetaSchema schema : mDataSource.getSchemas()) oldSchemas.put(schema.getName(), (MSchema) schema); Set<String> oldSchemaNames = new TreeSet<>(oldSchemas.keySet()); Set<String> newSchemaNames = new TreeSet<>(); for (Schema rawSchema : dc.getSchemas()) { String schemaName = rawSchema.getName(); if (schemaName == null) schemaName = "__DEFAULT"; if (schemaRegex != null && !schemaName.matches(schemaRegex)) continue; LOG.debug("update schema. schemaName=" + schemaName); newSchemaNames.add(schemaName);//from w ww. j a va 2 s.c o m updateLogger.setDefaultSchema(schemaName); if (oldSchemas.containsKey(schemaName)) { // update schema MSchema mSchema = oldSchemas.get(schemaName); updateTablesOfSchema(rawSchema, mSchema, tableRegex); } else { // add new schema MSchema mSchema = new MSchema(schemaName, mDataSource); pm.makePersistent(mSchema); updateLogger.create(schemaName); addTablesOfSchema(rawSchema, mSchema, updateLogger); } } // remove old schemas if (schemaRegex == null) { oldSchemaNames.removeAll(newSchemaNames); for (String name : oldSchemaNames) { LOG.debug("delete schema. schemaName=" + name); MSchema mSchema = oldSchemas.get(name); for (MetaTable table : mSchema.getTables()) updateLogger.delete(name, table.getName()); pm.deletePersistent(oldSchemas.get(name)); updateLogger.delete(name); } } }
From source file:ca.weblite.xmlvm.XMLVM.java
/** * Try to just generate c source files on the changes. If the destination directory * doesn't exist, then it will still perform a clean build. * @throws IOException //w ww.j av a 2s . co m */ public void doRegularBuild(XmlvmCallback callback) throws IOException { try { //if (getJavac() == null) { // setJavac((Javac) getProject().createTask("javac")); //} Javac javac = (Javac) getProject().createTask("javac"); File xmlvmDir = this.getXmlvmCacheDir("xmlvm"); // Create a temporary directory as a destination for javac File tmpBuild = File.createTempFile("build", "build"); tmpBuild.delete(); tmpBuild.mkdir(); // Create a temporary directory to contain only the .java files that // have changed. File changedSrcDir = setupChangedSourcesDir(); System.out.println("Found " + changedSrcDir.list().length + " changed files"); javac.setDestdir(tmpBuild); javac.setSrcdir(new Path(getProject(), changedSrcDir.getAbsolutePath())); try { System.out.println("Classpath currently " + javac.getClasspath()); System.out.println("Adding to classpath: " + getClassPath()); if (!getClassPath().equals(javac.getClasspath())) { javac.setClasspath(getClassPath()); } } catch (Exception ex) { System.out.println(getClassPath()); throw ex; } //javac.getClasspath().add(new Path(getProject(), getJavaBuildDir().getAbsolutePath())); System.out.println("Java build dir is " + getJavaBuildDir()); System.out.println("Running javac on changed sources"); System.out.println("Src dir is " + javac.getSrcdir()); //javac.setFork(true); javac.setVerbose(true); javac.setFailonerror(true); javac.execute(); // Update the dependency graph for the changed classes. System.out.println("Updating dependency graph..."); this.updateDependencyGraph(tmpBuild, xmlvmDir); // Copy the compiled files to the intermediate build dir System.out.println("Copying compiled sources to " + getJavaBuildDir()); Copy copy = (Copy) getProject().createTask("copy"); copy.setTodir(getJavaBuildDir()); FileSet fs = new FileSet(); fs.setDir(tmpBuild); fs.setIncludes("**"); copy.addFileset(fs); copy.setOverwrite(true); copy.execute(); // Now let's find out which classes may need to be updated due to our // changes Set<String> changedClasses = getChangedClassNames(); System.out .println("Found " + changedClasses.size() + " changed classes. Looking for dirty classes..."); Set<String> dirtyClasses = collectDirtyClasses(changedClasses); System.out.println("Found " + dirtyClasses.size() + " dirty classes."); // Now try to find the dirty classes Set<String> found = new HashSet<String>(); System.out.println("Locating dirty classes and copying to " + tmpBuild); Path searchPath = new Path(getProject(), tmpBuild.getAbsolutePath()); searchPath.add(javac.getClasspath()); this.findClassesInPath(searchPath, dirtyClasses, found, tmpBuild); if (found.size() != dirtyClasses.size()) { Set<String> missing = new HashSet<String>(); missing.addAll(dirtyClasses); missing.removeAll(found); System.out.println("Missing classes : " + missing); System.out.println("Classpath is " + javac.getClasspath()); System.out.println("Missing include " + missing); System.out.println("Failed to find all dirty classes that need to be compiled : " + found.size() + " vs " + dirtyClasses.size()); } // Now we should have all we need to generate our C source files // Delete the changed source dir, since we don't need it anymore System.out.println("Deleting " + changedSrcDir + ". We don't need it anymore."); Delete del = (Delete) getProject().createTask("delete"); del.setDir(changedSrcDir); del.execute(); File intermediateOut = this.getXmlvmCacheDir("c"); System.out.print("Deleting intermediate build directory..."); FileUtils.deleteDirectory(intermediateOut); System.out.println("Finished"); intermediateOut.mkdirs(); //System.out.println("Intermediates: "); //for ( File f : tmpOutput.listFiles()){ // System.out.println(f); //} if (tmpBuild.list().length < 5) { System.out.println(Arrays.asList(tmpBuild.list())); } System.out.println("Converting " + tmpBuild.list().length + " xmlvm files to c source files...."); System.out.println("Input Path is " + tmpBuild.getAbsolutePath()); System.out.println("Output Path is " + intermediateOut.getAbsolutePath()); System.out .println("Libraries is " + javac.getClasspath().toString().replaceAll(File.pathSeparator, ",")); // XMLVM seems to copy files from 'libraries' into the 'in' directory // which means we always get the old version. // We need to copy to the ios/build/classes directory. Copy copy2 = (Copy) getProject().createTask("copy"); callback.beforeXmlvm(tmpBuild, intermediateOut); this.runXmlvm(new String[] { "--in=" + tmpBuild.getAbsolutePath(), "--out=" + intermediateOut.getAbsolutePath(), "--target=c", "--libraries=" + javac.getClasspath().toString().replaceAll(File.pathSeparator, ","), "--c-source-extension=m", //"--debug=all", "--disable-vtable-optimizations", }); callback.afterXmlvm(tmpBuild, intermediateOut); //if ( true )throw new IOException("test point"); System.out.println(Arrays.asList(intermediateOut.list())); del = (Delete) getProject().createTask("delete"); del.setDir(tmpBuild); del.execute(); System.out.print("Removing constant pool dependencies..."); ConstantPoolHelper.removeConstantPoolDependencies(intermediateOut); System.out.println("Finished."); System.out.print("Fixing vtable references..."); VtableHelper.fixVtableReferences(getProject(), intermediateOut, getDest()); System.out.println("Finished."); System.out.print("Copying " + dirtyClasses.size() + " changed classes to " + getDest() + "..."); copyClasses(dirtyClasses, intermediateOut, getDest()); System.out.println("Finished."); //copyChangedSource(intermediateOut, getDest()); } catch (Exception ex) { Logger.getLogger(XMLVM.class.getName()).log(Level.SEVERE, null, ex); } }
From source file:org.forester.archaeopteryx.ControlPanel.java
private void search(final MainPanel atv_panel, final Phylogeny tree, final String query_str) { getSearchFoundCountsLabel().setVisible(true); getSearchResetButton().setEnabled(true); getSearchResetButton().setVisible(true); String[] queries = null;/*from w w w. j av a 2 s .c o m*/ Set<PhylogenyNode> nodes = null; if (query_str.indexOf(',') >= 0) { queries = query_str.split(",+"); } else { queries = new String[1]; queries[0] = query_str.trim(); } if ((queries != null) && (queries.length > 0)) { nodes = new HashSet<PhylogenyNode>(); for (String query : queries) { if (ForesterUtil.isEmpty(query)) { continue; } query = query.trim(); if (query.indexOf('+') >= 0) { nodes.addAll(PhylogenyMethods.searchDataLogicalAnd(query.split("\\++"), tree, getOptions().isSearchCaseSensitive(), !getOptions().isMatchWholeTermsOnly())); } else { nodes.addAll(PhylogenyMethods.searchData(query, tree, getOptions().isSearchCaseSensitive(), !getOptions().isMatchWholeTermsOnly())); } } if (getOptions().isInverseSearchResult()) { final Set<PhylogenyNode> all = PhylogenyMethods.obtainAllNodesAsSet(tree); all.removeAll(nodes); nodes = all; } } if ((nodes != null) && (nodes.size() > 0)) { atv_panel.getCurrentTreePanel().setFoundNodes(nodes); setSearchFoundCountsOnLabel(nodes.size()); } else { setSearchFoundCountsOnLabel(0); searchReset(); } }
From source file:jenkins.branch.MultiBranchProject.java
/** * Offers direct access to set the configurable list of branch sources <strong>while</strong> preserving * branch source id associations for sources that are otherwise unmodified * * @param sources the new sources./*w w w . j a v a2s. c o m*/ * @throws IOException if the sources could not be persisted to disk. */ public void setSourcesList(List<BranchSource> sources) throws IOException { if (this.sources.isEmpty() || sources.isEmpty()) { // easy this.sources.replaceBy(sources); return; } Set<String> oldIds = sourceIds(this.sources); Set<String> newIds = sourceIds(sources); if (oldIds.containsAll(newIds) || newIds.containsAll(oldIds)) { // either adding, removing, or updating without an id change this.sources.replaceBy(sources); return; } // Now we need to check if any of the new entries are effectively the same as an old entry that is being removed // we will store the ID changes in a map and process all the affected branches to update their sourceIds Map<String, String> changedIds = new HashMap<>(); Set<String> additions = new HashSet<>(newIds); additions.removeAll(oldIds); Set<String> removals = new HashSet<>(oldIds); removals.removeAll(newIds); for (BranchSource addition : sources) { String additionId = addition.getSource().getId(); if (!additions.contains(additionId)) { continue; } for (BranchSource removal : this.sources) { String removalId = removal.getSource().getId(); if (!removals.contains(removalId)) { continue; } if (!equalButForId(removal.getSource(), addition.getSource())) { continue; } changedIds.put(removalId, additionId); // now take these two out of consideration removals.remove(removalId); additions.remove(additionId); break; } } this.sources.replaceBy(sources); BranchProjectFactory<P, R> factory = getProjectFactory(); for (P item : getItems()) { if (!factory.isProject(item)) { continue; } Branch oldBranch = factory.getBranch(item); if (changedIds.containsKey(oldBranch.getSourceId())) { Branch newBranch = new Branch(changedIds.get(oldBranch.getSourceId()), oldBranch.getHead(), oldBranch.getScm(), oldBranch.getProperties()); newBranch.setActions(oldBranch.getActions()); factory.setBranch(item, newBranch); } } }