Example usage for java.util List removeAll

List of usage examples for java.util List removeAll

Introduction

In this page you can find the example usage for java.util List removeAll.

Prototype

boolean removeAll(Collection<?> c);

Source Link

Document

Removes from this list all of its elements that are contained in the specified collection (optional operation).

Usage

From source file:edu.ucuenca.authorsdisambiguation.Distance.java

public String[] clean2(final String[] v) {
    List<String> list = new ArrayList<String>(java.util.Arrays.asList(v));
    list.removeAll(Collections.singleton(null));
    return list.toArray(new String[list.size()]);
}

From source file:cn.guoyukun.spring.jpa.plugin.web.controller.BaseTreeableController.java

@RequestMapping(value = "ajax/load")
@PageableDefaults(sort = { "parentIds=asc", "weight=asc" })
@ResponseBody/* www .j  a va  2  s.c  om*/
public Object load(HttpServletRequest request,
        @RequestParam(value = "async", defaultValue = "true") boolean async,
        @RequestParam(value = "asyncLoadAll", defaultValue = "false") boolean asyncLoadAll,
        @RequestParam(value = "searchName", required = false) String searchName,
        @RequestParam(value = "id", required = false) ID parentId,
        @RequestParam(value = "excludeId", required = false) ID excludeId,
        @RequestParam(value = "onlyCheckLeaf", required = false, defaultValue = "false") boolean onlyCheckLeaf,
        Searchable searchable) {

    M excludeM = baseService.findOne(excludeId);

    List<M> models = null;

    if (!StringUtils.isEmpty(searchName)) {//name
        searchable.addSearchParam("name_like", searchName);
        models = baseService.findAllByName(searchable, excludeM);
        if (!async || asyncLoadAll) {//?? ??? 
            searchable.removeSearchFilter("name_like");
            List<M> children = baseService.findChildren(models, searchable);
            models.removeAll(children);
            models.addAll(children);
        } else { //? ??

        }
    } else { //?parentId

        if (parentId != null) { //?? 
            searchable.addSearchFilter("parentId", SearchOperator.eq, parentId);
        }

        if (async && !asyncLoadAll) { //? ?
            // ? ??
            baseService.addExcludeSearchFilter(searchable, excludeM);

        }

        if (parentId == null && !asyncLoadAll) {
            models = baseService.findRootAndChild(searchable);
        } else {
            models = baseService.findAllWithSort(searchable);
        }
    }

    return convertToZtreeList(request.getContextPath(), models, async && !asyncLoadAll && parentId != null,
            onlyCheckLeaf);
}

From source file:de.clusteval.tools.ClustQualityEval.java

public ClustQualityEval(final String absRepoPath, final String dataConfigName, final String... qualityMeasures)
        throws RepositoryAlreadyExistsException, InvalidRepositoryException, RepositoryConfigNotFoundException,
        RepositoryConfigurationException, UnknownClusteringQualityMeasureException, InterruptedException,
        UnknownDataSetFormatException, UnknownGoldStandardFormatException, GoldStandardNotFoundException,
        GoldStandardConfigurationException, DataSetConfigurationException, DataSetNotFoundException,
        DataSetConfigNotFoundException, GoldStandardConfigNotFoundException, NoDataSetException,
        DataConfigurationException, DataConfigNotFoundException, NumberFormatException, RunResultParseException,
        ConfigurationException, RegisterException, UnknownContextException, UnknownParameterType, IOException,
        UnknownRunResultFormatException, InvalidRunModeException, UnknownParameterOptimizationMethodException,
        NoOptimizableProgramParameterException, UnknownProgramParameterException,
        InvalidConfigurationFileException, NoRepositoryFoundException, InvalidOptimizationParameterException,
        RunException, UnknownDataStatisticException, UnknownProgramTypeException, UnknownRProgramException,
        IncompatibleParameterOptimizationMethodException, UnknownDistanceMeasureException,
        UnknownRunStatisticException, UnknownDataSetTypeException, UnknownRunDataStatisticException,
        UnknownDataPreprocessorException, IncompatibleDataSetConfigPreprocessorException,
        IncompatibleContextException, InvalidDataSetFormatVersionException, RNotAvailableException,
        FormatConversionException {//from www  .  ja v  a  2s  .c om
    super();
    ClustevalBackendServer.logLevel(Level.INFO);
    ClustevalBackendServer.getBackendServerConfiguration().setNoDatabase(true);
    ClustevalBackendServer.getBackendServerConfiguration().setCheckForRunResults(false);
    this.log = LoggerFactory.getLogger(this.getClass());
    final Repository parent = new Repository(
            new File(absRepoPath).getParentFile().getParentFile().getAbsolutePath(), null);
    parent.initialize();
    this.repo = new RunResultRepository(absRepoPath, parent);
    this.repo.initialize();

    List<ParameterOptimizationResult> result = new ArrayList<ParameterOptimizationResult>();
    final ParameterOptimizationRun run = (ParameterOptimizationRun) ParameterOptimizationResult
            .parseFromRunResultFolder(parent, new File(absRepoPath), result, false, false, false);

    this.dataConfig = this.repo.getStaticObjectWithName(DataConfig.class, dataConfigName);

    final List<ClusteringQualityMeasure> measures = new ArrayList<ClusteringQualityMeasure>();

    if (qualityMeasures.length == 0) {
        log.error("Please add at least one quality measure to the command line arguments.");
        this.repo.terminateSupervisorThread();
        return;
    }
    for (String measureSimpleName : qualityMeasures) {
        measures.add(ClusteringQualityMeasure.parseFromString(this.repo, measureSimpleName));
    }

    Set<Thread> threads = new HashSet<Thread>();
    System.out.println("Program configurations:");
    System.out.println(run.getProgramConfigs());
    for (final ProgramConfig pc : run.getProgramConfigs()) {
        // get the dataset for this program config
        DataSet dsIn = Parser.parseFromFile(DataSet.class,
                new File(FileUtils.buildPath(absRepoPath, "inputs", pc.toString() + "_" + dataConfig.toString(),
                        dataConfig.getDatasetConfig().getDataSet().getMajorName(),
                        dataConfig.getDatasetConfig().getDataSet().getMinorName())));
        // get dataset in standard format
        final DataSet ds = dsIn.preprocessAndConvertTo(run.getContext(),
                run.getContext().getStandardInputFormat(),
                dataConfig.getDatasetConfig().getConversionInputToStandardConfiguration(),
                dataConfig.getDatasetConfig().getConversionStandardToInputConfiguration());

        ds.loadIntoMemory();

        Thread t = new Thread() {

            public void run() {
                try {
                    DataConfig dc = dataConfig.clone();
                    dc.getDatasetConfig().setDataSet(ds);

                    File f = new File(FileUtils.buildPath(repo.getBasePath(), "clusters"));
                    File[] childs = f.listFiles(new FilenameFilter() {

                        /*
                         * (non-Javadoc)
                         * 
                         * @see java.io.FilenameFilter#accept(java.io.File,
                         * java.lang.String)
                         */
                        @Override
                        public boolean accept(File dir, String name) {
                            return name.startsWith(pc.getName() + "_" + dataConfig.getName())
                                    && name.endsWith(".results.conv");
                        }
                    });
                    // printer = new MyProgressPrinter(childs.length, true);
                    ((ch.qos.logback.classic.Logger) LoggerFactory.getLogger(Logger.ROOT_LOGGER_NAME))
                            .info("Assessing qualities of clusterings ...");

                    final Map<Long, ClusteringQualitySet> qualsMap = new HashMap<Long, ClusteringQualitySet>();

                    for (File clusteringFile : childs) {
                        try {
                            Clustering cl = Clustering
                                    .parseFromFile(repo, clusteringFile.getAbsoluteFile(), true).getSecond();

                            // only recalculate for those, for which the
                            // measure
                            // hasn't
                            // been evaluated
                            List<ClusteringQualityMeasure> toEvaluate = new ArrayList<ClusteringQualityMeasure>(
                                    measures);
                            try {
                                if (cl.getQualities() != null)
                                    toEvaluate.removeAll(cl.getQualities().keySet());
                            } catch (NullPointerException e) {
                                System.out.println(clusteringFile);
                                throw e;
                            }
                            ClusteringQualitySet quals = new ClusteringQualitySet();
                            // evaluate the new quality measures
                            if (!toEvaluate.isEmpty()) {
                                quals.putAll(cl.assessQuality(dc, toEvaluate));
                                System.out.println(quals);

                                // write the new qualities into the
                                // results.qual
                                // file
                                for (ClusteringQualityMeasure m : quals.keySet())
                                    FileUtils.appendStringToFile(
                                            clusteringFile.getAbsolutePath().replaceFirst(".results.conv",
                                                    ".results.qual"),
                                            String.format("%s\t%s", m.toString(), quals.get(m).getValue())
                                                    + "\n");
                            }

                            long iterationNumber = Long.parseLong(clusteringFile.getName()
                                    .replaceFirst(String.format("%s_%s.", pc.toString(), dc.toString()), "")
                                    .replaceFirst(".results.conv", ""));

                            // store all qualities of the clustering in one
                            // set
                            ClusteringQualitySet allQuals = new ClusteringQualitySet();
                            if (cl.getQualities() != null)
                                allQuals.putAll(cl.getQualities());
                            allQuals.putAll(quals);
                            qualsMap.put(iterationNumber, allQuals);

                        } catch (IOException e) {
                            e.printStackTrace();
                        } catch (UnknownGoldStandardFormatException e) {
                            e.printStackTrace();
                        } catch (UnknownDataSetFormatException e) {
                            e.printStackTrace();
                        } catch (InvalidDataSetFormatVersionException e) {
                            e.printStackTrace();
                        }
                    }

                    // update complete quality file
                    // we want to have the same lines conserving the same NT
                    // and
                    // skipped
                    // iterations infos (missing lines), therefore we parse
                    // the
                    // old file
                    // first, iterate over all lines and write the same
                    // lines
                    // but add
                    // the additional infos (if there are any)
                    TextFileParser parser = new TextFileParser(
                            FileUtils.buildPath(repo.getBasePath(), "clusters",
                                    String.format("%s_%s.results.qual.complete", pc.toString(), dc.toString())),
                            new int[0], new int[0],
                            FileUtils.buildPath(repo.getBasePath(), "clusters", String
                                    .format("%s_%s.results.qual.complete.new", pc.toString(), dc.toString())),
                            OUTPUT_MODE.STREAM) {

                        protected List<ClusteringQualityMeasure> measures;

                        /*
                         * (non-Javadoc)
                         * 
                         * @see
                         * utils.parse.TextFileParser#processLine(java.lang.
                         * String[], java.lang.String[])
                         */
                        @Override
                        protected void processLine(String[] key, String[] value) {
                        }

                        /*
                         * (non-Javadoc)
                         * 
                         * @see
                         * utils.parse.TextFileParser#getLineOutput(java
                         * .lang .String[], java.lang.String[])
                         */
                        @Override
                        protected String getLineOutput(String[] key, String[] value) {
                            StringBuffer sb = new StringBuffer();
                            // sb.append(combineColumns(value));
                            sb.append(combineColumns(Arrays.copyOf(value, 2)));

                            if (currentLine == 0) {
                                sb.append(outSplit);
                                sb.append(combineColumns(Arrays.copyOfRange(value, 2, value.length)));
                                measures = new ArrayList<ClusteringQualityMeasure>();
                                for (int i = 2; i < value.length; i++)
                                    try {
                                        measures.add(
                                                ClusteringQualityMeasure.parseFromString(parent, value[i]));
                                    } catch (UnknownClusteringQualityMeasureException e) {
                                        e.printStackTrace();
                                        this.terminate();
                                    }

                                // get measures, which are not in the
                                // complete
                                // file
                                // header
                                if (qualsMap.keySet().iterator().hasNext()) {
                                    Set<ClusteringQualityMeasure> requiredMeasures = qualsMap
                                            .get(qualsMap.keySet().iterator().next()).keySet();
                                    requiredMeasures.removeAll(measures);

                                    for (ClusteringQualityMeasure m : requiredMeasures) {
                                        sb.append(outSplit);
                                        sb.append(m.toString());
                                    }

                                    measures.addAll(requiredMeasures);
                                }
                            } else if (value[0].contains("*")) {
                                // do nothing
                            } else {
                                long iterationNumber = Long.parseLong(value[0]);
                                ClusteringQualitySet quals = qualsMap.get(iterationNumber);

                                boolean notTerminated = value[3].equals("NT");

                                // for (int i = value.length - 2; i <
                                // measures
                                // .size(); i++) {
                                // sb.append(outSplit);
                                // if (notTerminated)
                                // sb.append("NT");
                                // else
                                // sb.append(quals.get(measures.get(i)));
                                // }
                                for (int i = 0; i < measures.size(); i++) {
                                    sb.append(outSplit);
                                    if (notTerminated)
                                        sb.append("NT");
                                    else if (quals.containsKey(measures.get(i)))
                                        sb.append(quals.get(measures.get(i)));
                                    else
                                        sb.append(value[i + 2]);
                                }
                            }

                            sb.append(System.getProperty("line.separator"));
                            return sb.toString();
                        }
                    };
                    try {
                        parser.process();
                    } catch (Exception e) {
                        e.printStackTrace();
                    }

                    ds.unloadFromMemory();
                } catch (Exception e) {
                    e.printStackTrace();
                }
            }
        };
        threads.add(t);
        t.start();
    }
    // add the new clustering quality measures into the run config file
    TextFileParser p = new TextFileParser(run.getAbsolutePath(), null, null, false, "",
            run.getAbsolutePath() + ".new", OUTPUT_MODE.STREAM) {

        /*
         * (non-Javadoc)
         * 
         * @see utils.parse.TextFileParser#processLine(java.lang.String[],
         * java.lang.String[])
         */
        @Override
        protected void processLine(String[] key, String[] value) {
        }

        /*
         * (non-Javadoc)
         * 
         * @see utils.parse.TextFileParser#getLineOutput(java.lang.String[],
         * java.lang.String[])
         */
        @Override
        protected String getLineOutput(String[] key, String[] value) {
            StringBuilder sb = new StringBuilder();
            sb.append(value[0]);
            if (value[0].contains("qualityMeasures = "))
                for (ClusteringQualityMeasure m : measures)
                    if (!value[0].contains(m.toString())) {
                        sb.append(",");
                        sb.append(m.toString());
                    }

            sb.append(System.getProperty("line.separator"));
            return sb.toString();
        }
    }.process();
    for (Thread t : threads)
        t.join();
    System.exit(0);
}

From source file:grails.plugin.cache.web.filter.PageFragmentCachingFilter.java

protected PageInfo buildPage(HttpServletRequest request, HttpServletResponse response, FilterChain chain)
        throws IOException, ServletException {
    // Invoke the next entity in the chain
    SerializableByteArrayOutputStream out = new SerializableByteArrayOutputStream();
    GenericResponseWrapper wrapper = new GenericResponseWrapper(response, out);
    Map<String, Serializable> cacheableRequestAttributes = new HashMap<String, Serializable>();

    // TODO: split the special include handling out into a separate method
    HttpServletResponse originalResponse = null;
    boolean isInclude = WebUtils.isIncludeRequest(request);
    if (isInclude) {
        originalResponse = WrappedResponseHolder.getWrappedResponse();
        WrappedResponseHolder.setWrappedResponse(wrapper);
    }/*from ww  w  .j  ava 2  s .  co m*/
    try {
        List<String> attributesBefore = toList(request.getAttributeNames());
        chain.doFilter(request, wrapper);
        List<String> attributesAfter = toList(request.getAttributeNames());
        attributesAfter.removeAll(attributesBefore);
        for (String attrName : attributesAfter) {
            Object value = request.getAttribute(attrName);
            if (value instanceof Serializable) {
                cacheableRequestAttributes.put(attrName, (Serializable) value);
            }
        }
    } finally {
        if (isInclude) {
            WrappedResponseHolder.setWrappedResponse(originalResponse);
        }
    }
    wrapper.flush();

    long timeToLiveSeconds = Integer.MAX_VALUE; // TODO cacheManager.getEhcache(context.cacheName).cacheConfiguration.timeToLiveSeconds;

    String contentType = wrapper.getContentType();
    if (!StringUtils.hasLength(contentType)) {
        contentType = response.getContentType();
    }

    return new PageInfo(wrapper.getStatus(), contentType, out.toByteArray(), false, timeToLiveSeconds,
            wrapper.getAllHeaders(), wrapper.getCookies(), cacheableRequestAttributes);
}

From source file:com.htmlhifive.tools.jslint.library.LibraryManager.java

/**
 * ??./*  w ww .  jav  a 2 s .  c  o m*/
 * 
 * @param project js.
 * @param raw ???.????????????.
 * @return .
 */
private IncludePathEntryWrapper[] getIncludePathEntries(IJavaScriptProject project, boolean raw) {

    try {

        List<IncludePathEntryWrapper> entryList;
        if (raw) {
            entryList = new ArrayList<IncludePathEntryWrapper>(Arrays
                    .asList(IncludePathEntryWrapperFactory.getEntryWrappers(project.getRawIncludepath())));
        } else {
            Set<IncludePathEntryWrapper> removeIndex = getRemoveList();
            entryList = new ArrayList<IncludePathEntryWrapper>(Arrays.asList(
                    IncludePathEntryWrapperFactory.getEntryWrappers(project.getResolvedIncludepath(true))));
            entryList.removeAll(removeIndex);
        }

        return (IncludePathEntryWrapper[]) entryList.toArray(new IncludePathEntryWrapper[entryList.size()]);
    } catch (JavaScriptModelException e) {
        logger.put(Messages.EM0100, e);
        return null;
    }
}

From source file:cw.kop.autobackground.files.DownloadThread.java

private void removeExtras(String dir, String title, int targetNum, Set<File> downloadedFiles) {

    File mainDir = new File(dir + "/" + title + " " + AppSettings.getImagePrefix());
    FilenameFilter filenameFilter = FileHandler.getImageFileNameFilter();

    List<File> files = new ArrayList<>(Arrays.asList(mainDir.listFiles(filenameFilter)));
    files.removeAll(downloadedFiles);

    if (!AppSettings.keepImages()) {
        int extra = mainDir.list(filenameFilter).length - targetNum;
        while (extra > 0 && files.size() > 0) {
            File file = files.get(0);
            AppSettings.clearUrl(file.getName());
            file.delete();/*from  w  w w . jav  a2s  .  c  om*/
            files.remove(file);
            extra--;
        }
    }
}

From source file:io.kamax.mxisd.lookup.strategy.RecursivePriorityLookupStrategy.java

@Override
public List<ThreePidMapping> find(BulkLookupRequest request) {
    List<ThreePidMapping> mapToDo = new ArrayList<>(request.getMappings());
    List<ThreePidMapping> mapFoundAll = new ArrayList<>();

    for (IThreePidProvider provider : listUsableProviders(request)) {
        if (mapToDo.isEmpty()) {
            log.info("No more mappings to lookup");
            break;
        } else {/* w w  w .  j a  v a  2  s.co  m*/
            log.info("{} mappings remaining overall", mapToDo.size());
        }

        log.info("Using provider {} for remaining mappings", provider.getClass().getSimpleName());
        List<ThreePidMapping> mapFound = provider.populate(mapToDo);
        log.info("Provider {} returned {} mappings", provider.getClass().getSimpleName(), mapFound.size());
        mapFoundAll.addAll(mapFound);
        mapToDo.removeAll(mapFound);
    }

    return mapFoundAll;
}

From source file:cs.ox.ac.uk.gsors.GroupPreferencesTest1.java

public void testFORewriting() throws Exception {

    // Configuration.
    final DecompositionStrategy decomposition = DecompositionStrategy.DECOMPOSE;
    final RewritingLanguage rewLang = RewritingLanguage.UCQ;
    final SubCheckStrategy subchkStrategy = SubCheckStrategy.INTRADEC;
    final NCCheck ncCheckStrategy = NCCheck.NONE;

    LOGGER.info("Decomposition: " + decomposition.name());
    LOGGER.info("Rewriting Language: " + rewLang.name());
    LOGGER.info("Subsumption Check Strategy: " + subchkStrategy.name());
    LOGGER.info("Negative Constraints Check Strategy " + ncCheckStrategy.name());

    final File testSuiteFile = FileUtils.getFile(_WORKING_DIR,
            FilenameUtils.separatorsToSystem(_DEFAULT_INPUT_PATH), "test-cases1.txt");

    final List<String> tests = IOUtils.readLines(new FileReader(testSuiteFile));

    final String creationDate = dateFormat.format(new Date());

    // Parse the program
    final Parser parser = new Parser();
    parser.parse(getStringFile(_DEFAULT_INPUT_PATH + "prefDB-ontology.dtg"));

    // Get the rules
    final List<IRule> rules = parser.getRules();

    // Get the queries
    final List<IQuery> queryHeads = parser.getQueries();
    final Map<IPredicate, IRelation> conf = parser.getDirectives();
    if (!conf.isEmpty()) {
        StorageManager.getInstance();// ww w . jav  a 2s  . c  om
        StorageManager.configure(conf);
    }

    // Get the TGDs from the set of rules
    final List<IRule> tgds = RewritingUtils.getTGDs(rules, queryHeads);

    final List<IRule> mSBox = RewritingUtils.getSBoxRules(rules, queryHeads);
    final IRuleSafetyProcessor ruleProc = new StandardRuleSafetyProcessor();
    ruleProc.process(mSBox);
    final IQueryRewriter ndmRewriter = new NDMRewriter(mSBox);
    final IRelationFactory rf = new RelationFactory();

    // Convert the query bodies in rules
    final List<IRule> bodies = new LinkedList<IRule>(rules);
    bodies.removeAll(tgds);

    final List<IRule> queries = RewritingUtils.getQueries(bodies, queryHeads);

    // get the constraints from the set of rules
    final Set<IRule> constraints = RewritingUtils.getConstraints(rules, queryHeads);

    final Set<Expressivity> exprs = RewritingUtils.getExpressivity(tgds);
    LOGGER.info("Expressivity: " + exprs.toString());

    if (!exprs.contains(Expressivity.LINEAR) && !exprs.contains(Expressivity.STICKY)) {
        extracted();
    }

    // compute the dependency graph
    LOGGER.debug("Computing position dependencies.");
    // long depGraphMem = MonitoringUtils.getHeapUsage();
    long posDepTime = System.currentTimeMillis();
    Map<Pair<IPosition, IPosition>, Set<List<IRule>>> deps = DepGraphUtils.computePositionDependencyGraph(tgds);
    posDepTime = System.currentTimeMillis() - posDepTime;
    // depGraphMem = depGraphMem - MonitoringUtils.getHeapUsage();
    // Setup caching
    CacheManager.setupCaching();

    // if linear TGDs, compute the atom coverage graph.
    LOGGER.debug("Computing atom coverage graph.");
    long atomCoverGraphTime = System.currentTimeMillis();
    if (exprs.contains(Expressivity.LINEAR)) {
        deps = DepGraphUtils.computeAtomCoverageGraph(deps);
    }
    atomCoverGraphTime = System.currentTimeMillis() - atomCoverGraphTime;
    // depGraphMem = MonitoringUtils.getHeapUsage() - depGraphMem;

    // rewriting constraints
    // long ncRewMem = MonitoringUtils.getHeapUsage();
    final ParallelRewriter cnsRewriter = new ParallelRewriter(DecompositionStrategy.MONOLITIC,
            RewritingLanguage.UCQ, SubCheckStrategy.NONE, NCCheck.NONE);
    long ncRewTime = System.currentTimeMillis();
    final Set<IRule> rewrittenConstraints = Sets.newHashSet();
    if (!ncCheckStrategy.equals(NCCheck.NONE)) {
        for (final IRule c : constraints) {
            rewrittenConstraints.addAll(cnsRewriter.getRewriting(c, tgds, new HashSet<IRule>(), deps, exprs));
        }
    }
    ncRewTime = System.currentTimeMillis() - ncRewTime;
    // ncRewMem = ncRewMem - MonitoringUtils.getHeapUsage();
    LOGGER.debug("Finished rewriting constraints.");

    // Compute the Rewriting
    final ParallelRewriter rewriter = new ParallelRewriter(decomposition, rewLang, subchkStrategy,
            ncCheckStrategy);

    Map<String, Integer> cities = new HashMap<String, Integer>();
    //      cities.put("Peoria", 109);
    //       cities.put("Gilbert", 163);
    //       cities.put("Glendale", 242);
    //       cities.put("Chandler", 349);
    cities.put("Tempe", 465);
    //cities.put("Scottsdale", 780);
    // cities.put("Phoenix", 1683);
    List<Integer> ks = new ArrayList<Integer>();
    ks.add(1);
    ks.add(2);
    ks.add(3);

    List<AggregateStrategy> str = new ArrayList<AggregateStrategy>();
    str.add(AggregateStrategy.CSU);
    str.add(AggregateStrategy.Plurality);
    str.add(AggregateStrategy.PluralityMisery);

    for (AggregateStrategy strategyQA : str) {

        final String summaryPrefix = StringUtils.join(creationDate, "-", strategyQA.toString());

        final File sizeSummaryFile = FileUtils.getFile(_WORKING_DIR,
                FilenameUtils.separatorsToSystem(_DEFAULT_OUTPUT_PATH + "/" + strategyQA.toString()),
                FilenameUtils.separatorsToSystem(_DEFAULT_SUMMARY_DIR),
                StringUtils.join(summaryPrefix, "-", "size-summary.csv"));
        final CSVWriter sizeSummaryWriter = new CSVWriter(new FileWriter(sizeSummaryFile), ',');

        final File timeSummaryFile = FileUtils.getFile(_WORKING_DIR,
                FilenameUtils.separatorsToSystem(_DEFAULT_OUTPUT_PATH + "/" + strategyQA.toString()),
                FilenameUtils.separatorsToSystem(_DEFAULT_SUMMARY_DIR),
                StringUtils.join(summaryPrefix, "-", "time-summary.csv"));
        final CSVWriter timeSummaryWriter = new CSVWriter(new FileWriter(timeSummaryFile), ',');

        // final File cacheSummaryFile = FileUtils.getFile(
        // _WORKING_DIR,
        // FilenameUtils.separatorsToSystem(_DEFAULT_OUTPUT_PATH + "/"
        // + strategyQA.toString()),
        // FilenameUtils.separatorsToSystem(_DEFAULT_SUMMARY_DIR),
        // StringUtils.join(summaryPrefix, "-", "cache-summary.csv"));
        // final CSVWriter cacheSummaryWriter = new CSVWriter(new
        // FileWriter(
        // cacheSummaryFile), ',');
        //
        // final File memorySummaryFile = FileUtils.getFile(
        // _WORKING_DIR,
        // FilenameUtils.separatorsToSystem(_DEFAULT_OUTPUT_PATH + "/"
        // + strategyQA.toString()),
        // FilenameUtils.separatorsToSystem(_DEFAULT_SUMMARY_DIR),
        // StringUtils.join(summaryPrefix, "-", "memory-summary.csv"));
        // final CSVWriter memorySummaryWriter = new CSVWriter(new
        // FileWriter(
        // memorySummaryFile), ',');

        sizeSummaryWriter.writeNext(GReportingUtils.getSummaryRewritingSizeReportHeader());
        timeSummaryWriter.writeNext(GReportingUtils.getSummaryRewritingTimeReportHeader());
        // cacheSummaryWriter.writeNext(GReportingUtils
        // .getSummaryCachingReportHeader());
        // memorySummaryWriter.writeNext(GReportingUtils
        // .getSummaryMemoryReportHeader());
        for (Integer k : ks) {
            for (String city : cities.keySet()) {
                for (int con = 0; con < 1; con++) {
                    LOGGER.info("con-city-k: " + con + "-" + city + "-" + k + "-" + strategyQA.toString());
                    // top k for each preferences
                    for (final String testName : tests) {
                        // Create a buffer for the output
                        final IRelation result = rf.createRelation();
                        GPrefParameters parameters = new GPrefParameters(testName, k, city, cities.get(city));
                        // Create the Directory where to store the test
                        // results
                        // final File outTestDir = FileUtils
                        // .getFile(
                        // _WORKING_DIR,
                        // FilenameUtils
                        // .separatorsToSystem(_DEFAULT_OUTPUT_PATH
                        // + "/"
                        // + strategyQA
                        // .toString()
                        // + k + city),
                        // testName);
                        // if (!outTestDir.exists()) {
                        // if (outTestDir.mkdirs()) {
                        // LOGGER.info("Created output directory: "
                        // + testName);
                        // } else {
                        // LOGGER.fatal("Error creating output directory");
                        // }
                        // }

                        LOGGER.info("Processing file: " + testName);
                        // dump the rewritten constraints:
                        IRule q = null;
                        if (parameters.getScenario() == Scenario.BREAKFAST_FOOD
                                || parameters.getScenario() == Scenario.LUNCH_FOOD
                                || parameters.getScenario() == Scenario.DINNER_FOOD) {
                            q = queries.get(0);
                        }
                        if (parameters.getScenario() == Scenario.BREAKFAST_CUSINE
                                || parameters.getScenario() == Scenario.LUNCH_CUSINE
                                || parameters.getScenario() == Scenario.DINNER_CUSINE) {
                            q = queries.get(1);
                        }
                        if (parameters.getScenario() == Scenario.BREAKFAST_PLACE
                                || parameters.getScenario() == Scenario.LUNCH_PLACE
                                || parameters.getScenario() == Scenario.DINNER_PLACE) {
                            q = queries.get(2);
                        }

                        CacheManager.setupCaching();

                        final String queryPredicate = q.getHead().iterator().next().getAtom().getPredicate()
                                .getPredicateSymbol();

                        // Setup reporting
                        final ToitReporter rep = ToitReporter.getInstance(true);
                        ToitReporter.setupReporting();
                        ToitReporter.setQuery(queryPredicate);
                        ToitReporter.setTest(testName);
                        ToitReporter.setK(parameters.getK());
                        //      GroupReporter.setStrategy(parameters.getStrategy());
                        ToitReporter.setCity(parameters.getCity());
                        ToitReporter.setGroupID(parameters.getGroupId());
                        ToitReporter.setNbUsers(parameters.getMaxNbUsers());
                        ToitReporter.setNbBuss(parameters.getBs());
                        ToitReporter.setScenario(parameters.getScenario());

                        rep.setValue(GRewMetric.DEPGRAPH_TIME, posDepTime);
                        // rep.setValue(GRewMetric.DEPGRAPH_MEM,
                        // depGraphMem);
                        LOGGER.info("Processing query: ".concat(q.toString()));
                        // final long rewMem =
                        // MonitoringUtils.getHeapUsage();
                        final long overallTime = System.currentTimeMillis();
                        final Set<IRule> rewriting = rewriter.getRewriting(q, tgds, rewrittenConstraints, deps,
                                exprs);
                        rep.setValue(GRewMetric.REW_TIME, System.currentTimeMillis() - overallTime);
                        // rep.setValue(GRewMetric.REW_MEM,
                        // MonitoringUtils.getHeapUsage() - rewMem);

                        rep.setValue(GRewMetric.REW_SIZE, (long) rewriting.size());

                        rep.setValue(GRewMetric.REW_CNS_TIME, ncRewTime);
                        // rep.setValue(GRewMetric.REW_CNS_MEM, ncRewMem);

                        // Other metrics

                        // Caching size metrics

                        // Create a file to store the rewriting results.

                        // File outFile = FileUtils.getFile(outTestDir,
                        // queryPredicate.concat("_rew.dtg"));
                        // final FileWriter rewFW = new FileWriter(outFile);
                        //
                        // rewFW.write("/// Query: " + q + "///\n");
                        // rewFW.write("/// Ontology: " + testName + "///");
                        // rewFW.write("/// Created on: " + creationDate
                        // + " ///\n");
                        // rewFW.write("/// Rules in the program: "
                        // + rules.size() + " ///\n");
                        // rewFW.write("/// TGDs in the program: "
                        // + tgds.size() + " ///\n");

                        // LOGGER.info("Writing the output at: "
                        // + outFile.getAbsolutePath());

                        // dump metrics for individual queries.
                        // rewFW.write(rep.getReport());
                        //
                        // rewFW.write(IOUtils.LINE_SEPARATOR);
                        // rewFW.write(IOUtils.LINE_SEPARATOR);
                        //
                        // rewFW.write("/// Rewritten Program ///\n");
                        final Set<ILiteral> newHeads = new HashSet<ILiteral>();
                        Map<IPredicate, IRelation> results = new HashMap<IPredicate, IRelation>();
                        for (final IRule qr : rewriting) {
                            newHeads.add(qr.getHead().iterator().next());
                            // rewFW.write(qr + "\n");

                            final Set<IRule> sboxRewriting = new LinkedHashSet<IRule>();

                            Set<IRule> rrules = ndmRewriter.getRewriting(qr);
                            sboxRewriting.addAll(rrules);

                            // Produce the SQL rewriting for each query in
                            // the
                            // program
                            final SQLRewriter sqlRewriter = new SQLRewriter(sboxRewriting);

                            // rewFW.write("Computing SQL Rewriting");
                            try {
                                // Get the SQL rewriting as Union of
                                // Conjunctive
                                // Queries
                                long duration = -System.nanoTime();
                                final List<String> ucqSQLRewriting = sqlRewriter.getSQLRewritings(
                                        parameters.getConstraintsSqlQuery(), parameters.getNbNodes(),
                                        parameters.getStartFromRes());

                                duration = ((duration + System.nanoTime()) / 1000000);
                                IRelation resultAux = rf.createRelation();
                                for (final String qu : ucqSQLRewriting) {
                                    IRelation r = StorageManager.executeQuery(qu);

                                    // LOGGER.info("-Query: " +
                                    // qu+" "+r.size()+" "+c);
                                    resultAux.addAll(r);
                                }
                                for (IPredicate predicate : qr.getBodyPredicates()) {
                                    results.put(predicate, resultAux);
                                }
                                result.addAll(resultAux);
                                // LOGGER.info("-R: " +result.size());
                            } catch (final SQLException e) {
                                e.printStackTrace();
                            }
                        }
                        // write the result in the output
                        // rewFW.write(result.toString());

                        // construct the graph
                        Map<User, List<user.models.Pair<IPredicate, IPredicate>>> prefs = JsonHelper
                                .getGPreferences(parameters.getPrefs(), tgds);
                        final cs.ox.ac.uk.gsors2.GPreferencesGraph prefGraph = Factory.GPGRAPH
                                .createPreferencesGraph();
                        long constPrefGraphTime = System.currentTimeMillis();
                        //                     final long constPrefGraphMem = MonitoringUtils
                        //                           .getHeapUsage();

                        for (User user : prefs.keySet()) {
                            for (user.models.Pair<IPredicate, IPredicate> pairPreference : prefs.get(user)) {
                                IRelation morePrefs = results.get(pairPreference.getElement0());
                                IRelation lessPrefs = results.get(pairPreference.getElement1());
                                for (int j = 0; j < morePrefs.size(); j++) {
                                    ITuple el1 = morePrefs.get(j);
                                    if (!lessPrefs.contains(el1)) {
                                        for (int i = 0; i < lessPrefs.size(); i++) {
                                            ITuple el2 = lessPrefs.get(i);
                                            GPreferenceEdge edge = new GPreferenceEdge(el1, el2, user);
                                            prefGraph.addPreference(edge);
                                        }
                                    }
                                }
                            }
                        }
                        for (int i = 0; i < result.size(); i++) {
                            ITuple v = result.get(i);
                            prefGraph.addVertex(v);

                        }
                        // LOGGER.info("-----Size--Graph--: " +
                        // result.size()+"--"+prefGraph.getVertexesSize() );
                        constPrefGraphTime = System.currentTimeMillis() - constPrefGraphTime;
                        rep.setValue(GRewMetric.PREFGRAPH_CONST_TIME, constPrefGraphTime);
                        rep.setValue(GRewMetric.PREFGRAPH_CONST_SIZE_V, (long) prefGraph.getVertexesSize());
                        rep.setValue(GRewMetric.PREFGRAPH_CONST_SIZE_E, (long) prefGraph.getEdgesSize());

                        // rep.setValue(GRewMetric.PREFGRAPH_CONST_MEM,
                        // MonitoringUtils.getHeapUsage()
                        // - constPrefGraphMem);

                        long mergeOperatorTime = System.currentTimeMillis();
                        // final long mergeProbModel = MonitoringUtils
                        // .getHeapUsage();
                        //                     prefGraph
                        //                           .mergeProbabilisticModel(_DEFAULT_INPUT_PATH+"reviews.txt");
                        mergeOperatorTime = System.currentTimeMillis() - mergeOperatorTime;
                        // rep.setValue(GRewMetric.PREFGRAPH_MERGE_MEM,
                        // MonitoringUtils.getHeapUsage()
                        // - mergeProbModel);
                        rep.setValue(GRewMetric.PREFGRAPH_MERGE_SIZE_V, (long) prefGraph.getVertexesSize());
                        rep.setValue(GRewMetric.PREFGRAPH_MERGE_SIZE_E, (long) prefGraph.getEdgesSize());
                        rep.setValue(GRewMetric.PREFGRAPH_MERGE_TIME, (long) mergeOperatorTime);
                        // long topKMem = MonitoringUtils
                        // .getHeapUsage();
                        long topKTime = System.currentTimeMillis();
                        IRelation r = GTopKAlgorithms.getTopK(prefGraph, parameters.getK(), strategyQA);
                        topKTime = System.currentTimeMillis() - topKTime;
                        // rep.setValue(GRewMetric.PREFGRAPH_TOPK_MEM,
                        // topKMem-MonitoringUtils
                        // .getHeapUsage());
                        rep.setValue(GRewMetric.PREFGRAPH_TOPK_TIME, topKTime);
                        rep.setValue(GRewMetric.PREFGRAPH_TOPK_SIZE_V, (long) prefGraph.getVertexesSize());
                        rep.setValue(GRewMetric.PREFGRAPH_TOPK_SIZE_E, (long) prefGraph.getEdgesSize());
                        rep.setValue(GRewMetric.ANSWER_SIZE, (long) r.size());
                        // rewFW.write("\n");
                        // for (final ILiteral h : newHeads) {
                        // rewFW.write("?- " + h + ".\n");
                        // }
                        // rewFW.write("\n");
                        // rewFW.flush();
                        // rewFW.close();

                        // dump summary metrics.
                        sizeSummaryWriter.writeNext(rep.getSummarySizeMetrics());
                        timeSummaryWriter.writeNext(rep.getSummaryTimeMetrics());
                        // cacheSummaryWriter.writeNext(rep
                        // .getSummaryCacheMetrics());
                        // memorySummaryWriter.writeNext(rep
                        // .getSummaryMemoryMetrics());
                        sizeSummaryWriter.flush();
                        timeSummaryWriter.flush();
                        // cacheSummaryWriter.flush();
                        // memorySummaryWriter.flush();

                    }

                }

            }
        }
        sizeSummaryWriter.close();
        timeSummaryWriter.close();
        // cacheSummaryWriter.close();
        // memorySummaryWriter.close();

    }
}

From source file:edu.unc.lib.dl.services.DigitalObjectManagerImpl.java

/**
 * This method destroys a set of objects in Fedora, leaving no preservation data. It will update any ancillary
 * services and log delete events./*from   www  .j a  v a2s  . c o  m*/
 * 
 * @param pids
 *           the PIDs of the objects to purge
 * @param message
 *           the reason for the purge
 * @return a list of PIDs that were purged
 * @see edu.unc.lib.dl.services.DigitalObjectManager.purge()
 */
public List<PID> delete(PID pid, String user, String message) throws IngestException, NotFoundException {
    availableCheck();

    // Prevent deletion of the repository object and the collections object
    if (pid.equals(ContentModelHelper.Administrative_PID.REPOSITORY.getPID()) || pid.equals(collectionsPid))
        throw new IllegalRepositoryStateException("Cannot delete administrative object: " + pid);

    List<PID> deleted = new ArrayList<PID>();

    // FIXME disallow delete of "/admin" folder
    // TODO add protected delete method for force initializing

    // Get all children and store for deletion
    List<PID> toDelete = this.getTripleStoreQueryService().fetchAllContents(pid);
    toDelete.add(pid);

    // gathering delete set, checking for object relationships
    // Find all relationships which refer to the pid being deleted
    List<PID> refs = this.getReferencesToContents(pid);
    refs.removeAll(toDelete);
    if (refs.size() > 0) {
        StringBuffer s = new StringBuffer();
        s.append("Cannot delete ").append(pid)
                .append(" because it will break object references from these PIDs: ");
        for (PID b : refs) {
            s.append("\t").append(b);
        }
        throw new IngestException(s.toString());
    }
    PID container = this.getTripleStoreQueryService().fetchContainer(pid);
    if (container == null) {
        throw new IllegalRepositoryStateException("Cannot find a container for the specified object: " + pid);
    }

    // begin transaction, must delete all content and modify parent or dump
    // rollback info
    PremisEventLogger logger = new PremisEventLogger(user);

    DateTime transactionStart = new DateTime();
    Throwable thrown = null;
    List<PID> removed = new ArrayList<PID>();
    removed.add(pid);
    try {
        // update container
        this.removeFromContainer(pid);
        Element event = logger.logEvent(PremisEventLogger.Type.DELETION,
                "Deleted " + deleted.size() + "contained object(s).", container);
        PremisEventLogger.addDetailedOutcome(event, "success", "Message: " + message, null);
        this.managementClient.writePremisEventsToFedoraObject(logger, container);

        // delete object and all of its children
        for (PID obj : toDelete) {
            try {
                this.getManagementClient().purgeObject(obj, message, false);
                deleted.add(obj);
            } catch (NotFoundException e) {
                log.error("Delete set referenced an object that didn't exist: " + pid.getPid(), e);
            }
        }
        // Send message to message queue informing it of the deletion(s)
        if (this.getOperationsMessageSender() != null) {
            this.getOperationsMessageSender().sendRemoveOperation(user, container, removed, null);
        }
    } catch (FedoraException fault) {
        log.error("Fedora threw an unexpected fault while deleting " + pid.getPid(), fault);
        thrown = fault;
    } catch (RuntimeException e) {
        this.setAvailable(false);
        log.error("Fedora threw an unexpected runtime exception while deleting " + pid.getPid(), e);
        thrown = e;
    } finally {
        if (thrown != null && toDelete.size() > deleted.size()) {
            // some objects not deleted
            List<PID> missed = new ArrayList<PID>();
            missed.addAll(toDelete);
            missed.removeAll(deleted);
            this.dumpRollbackInfo(transactionStart, missed, "Could not complete delete of " + pid.getPid()
                    + ", please purge objects and check container " + container.getPid() + ".");
        }
    }
    if (thrown != null) {
        throw new IngestException("There was a problem completing the delete operation", thrown);
    }
    return deleted;
}

From source file:edu.ku.kuali.kra.timeandmoney.service.impl.TimeAndMoneyHistoryServiceImpl.java

@Override
protected void removeCanceledDocs(List<TimeAndMoneyDocument> docs) {
    List<TimeAndMoneyDocument> tempCanceledDocs = new ArrayList<TimeAndMoneyDocument>();
    for (TimeAndMoneyDocument doc : docs) {
        if (doc.getDocumentHeader().hasWorkflowDocument()) {
            if (doc.getDocumentHeader().getWorkflowDocument().isCanceled()) {
                tempCanceledDocs.add(doc);
            }/*from   w  w w .j a v a 2 s  .  co m*/
        }
    }
    docs.removeAll(tempCanceledDocs);
}