Example usage for java.util Set toString

List of usage examples for java.util Set toString

Introduction

In this page you can find the example usage for java.util Set toString.

Prototype

public String toString() 

Source Link

Document

Returns a string representation of the object.

Usage

From source file:gov.nih.nci.caarray.plugins.illumina.IlluminaCsvDesignHandler.java

private void validateHeader(List<String> headers, FileValidationResult result) throws IOException {
    final Set<? extends Enum> requiredHeaders = this.helper.getRequiredColumns();
    final Set<Enum> tmp = new HashSet<Enum>(requiredHeaders);
    for (final String v : headers) {
        for (final Enum h : requiredHeaders) {
            if (h.name().equalsIgnoreCase(v)) {
                tmp.remove(h);/*from   www . ja  va 2 s . c o m*/
            }
        }
    }
    if (!tmp.isEmpty()) {
        result.addMessage(ValidationMessage.Type.ERROR,
                "Illumina CSV file didn't contain the expected columns " + tmp.toString());
    }
}

From source file:org.openqa.selenium.TakesScreenshotTest.java

private void compareColors(Set<String> expectedColors, Set<String> actualColors) {
    TreeSet<String> c = new TreeSet<String>(expectedColors);
    c.removeAll(actualColors);/*from www  . ja v a 2s .co  m*/
    if (!c.isEmpty()) {
        fail("Unknown expected color is generated: " + c.toString() + ", \n" + " actual colors are: "
                + actualColors.toString());
    }

    if (actualColors.containsAll(expectedColors)) {
        // all is ok
    } else {
        actualColors.removeAll(expectedColors);
        fail("Unknown colors are presented at screenshot: " + actualColors.toString() + " \n"
                + " expected colors are excluded: " + expectedColors.toString());
    }
}

From source file:org.eurekastreams.server.service.opensocial.spi.ActivityServiceImpl.java

/**
 * Shindig implementation for retrieving activities from a single user.
 *
 * @param userId//from  w  w w.j a  va 2 s .  co  m
 *            - id of the user to retrieve the activities for.
 * @param groupId
 *            - id of the group that the user is a member of.
 * @param appId
 *            - id of the application requesting the activities.
 * @param fields
 *            - set of fields to retrieve for the activity.
 * @param options
 *            - collection of options for retrieving the activities.
 * @param activityIds
 *            - set of ids of the activities to retrieve.
 * @param token
 *            - the security token for the request.
 *
 * @return collection of activities.
 */
@SuppressWarnings("unchecked")
public Future<RestfulCollection<Activity>> getActivities(final UserId userId, final GroupId groupId,
        final String appId, final Set<String> fields, final CollectionOptions options,
        final Set<String> activityIds, final SecurityToken token) {
    log.trace("Entering getActivities");
    List<Activity> osActivities = new ArrayList<Activity>();
    try {
        log.debug("Sending getActivities activityIdList to action: " + activityIds.toString());

        LinkedList<Long> activityIdsForRequest = new LinkedList<Long>();
        for (String currentActivityId : activityIds) {
            activityIdsForRequest.add(new Long(currentActivityId));
        }

        Set<String> openSocialIdsForRequest = new HashSet<String>();
        openSocialIdsForRequest.add(userId.getUserId(token));

        GetUserActivitiesRequest currentRequest = new GetUserActivitiesRequest(activityIdsForRequest,
                openSocialIdsForRequest);
        ServiceActionContext currentContext = new ServiceActionContext(currentRequest,
                openSocialPrincipalPopulator.getPrincipal(userId.getUserId(token)));

        LinkedList<ActivityDTO> activities = (LinkedList<ActivityDTO>) serviceActionController
                .execute(currentContext, getUserActivitiesAction);

        log.debug("Retrieved " + activities.size() + " activities from action");

        for (ActivityDTO currentActivity : activities) {
            osActivities.add(convertActivityFromEurekaActivityDTOToOS(currentActivity));
        }
    } catch (Exception ex) {
        log.error("Error occurred retrieving activities ", ex);
        throw new ProtocolException(HttpServletResponse.SC_INTERNAL_SERVER_ERROR, ex.getMessage());
    }

    return ImmediateFuture.newInstance(new RestfulCollection<Activity>(osActivities));
}

From source file:org.wso2.carbon.mediator.kerberos.KerberosMediator.java

/**
 * Create GSSCredential for the user.//  w w  w  .ja  va2 s .  c om
 *
 * @param callbackHandler callback handler.
 * @param mechanismOId    Oid for the mechanism.
 * @return GSSCredential.
 * @throws LoginException
 * @throws PrivilegedActionException
 * @throws GSSException
 */
private GSSCredential createClientCredentials(CallbackHandler callbackHandler, final Oid mechanismOId)
        throws LoginException, PrivilegedActionException, GSSException {

    LoginContext loginContext;
    String loginName;
    if (StringUtils.isNotEmpty(getLoginContextName())) {
        loginName = getLoginContextName();
    } else {
        loginName = "com.sun.security.auth.module.Krb5LoginModule";
    }
    if (callbackHandler != null) {
        loginContext = new LoginContext(loginName, callbackHandler);
    } else {
        loginContext = new LoginContext(loginName);
    }
    loginContext.login();
    if (log.isDebugEnabled()) {
        log.debug("Pre-authentication successful for with Kerberos Server.");
    }

    // Create client credentials from pre authentication with the AD
    final GSSName clientName = gssManager.createName(clientPrincipalValue, GSSName.NT_USER_NAME);
    final PrivilegedExceptionAction<GSSCredential> action = new PrivilegedExceptionAction<GSSCredential>() {
        public GSSCredential run() throws GSSException {

            return gssManager.createCredential(clientName.canonicalize(mechanismOId),
                    GSSCredential.DEFAULT_LIFETIME, mechanismOId, GSSCredential.INITIATE_ONLY);
        }
    };

    if (log.isDebugEnabled()) {
        Set<Principal> principals = loginContext.getSubject().getPrincipals();
        String principalName = null;
        if (principals != null) {
            principalName = principals.toString();
        }
        log.debug("Creating gss credentials as principal : " + principalName);
    }
    return Subject.doAs(loginContext.getSubject(), action);
}

From source file:cc.kune.wave.server.kspecific.KuneWaveServiceDefault.java

@Override
public boolean delParticipants(final WaveRef waveName, final String whoDel, final Set<String> participants) {
    boolean removed = false;
    final Wavelet wavelet = fetchWave(waveName, whoDel);
    final Participants currentParticipants = wavelet.getParticipants();
    LOG.debug("Removing participants: " + participants.toString());
    final OperationQueue opQueue = new OperationQueue();
    for (final String participant : participants) {
        // FIXME Seems like only one participant per opQueue is added (try to fix
        // this in WAVE)
        final String partWithDomain = participantUtils.of(participant).toString();
        if (currentParticipants.contains(partWithDomain)) {
            LOG.debug("Removing as participant: " + partWithDomain);
            removed = true;/*  w w  w . jav a  2  s  .  c  o m*/
            opQueue.removeParticipantFromWavelet(wavelet, partWithDomain);
        }
    }
    doOperations(whoDel, opQueue, "del participant");
    return removed;
}

From source file:de.phoenix.submission.SubmissionCompilerAndTest.java

@Override
public PhoenixSubmissionResult controlSubmission(TaskSubmission submission) {

    SubmissionTask task = new SubmissionTask();

    File dir = PhoenixApplication.submissionPipelineDir;
    List<String> commands = getCommands();

    // Check, if all necessary classes are submitted
    Set<String> classes = new HashSet<String>();
    for (Text text : submission.getTask().getTexts()) {
        classes.add(text.getTitle());/*  w  w w .j  a  va2  s  .c  o m*/
    }

    for (Text clazz : submission.getTexts()) {
        task.addClass(clazz.convert());
        classes.remove(clazz.getTitle());
    }

    // Some to implement classes are missing -> error
    if (!classes.isEmpty()) {
        return new PhoenixSubmissionResult(SubmissionStatus.MISSING_FILES,
                "Missing classes to implement/submit. Maybe you wrote the name of the class wrong? Missing Classes:\r\n"
                        + classes.toString());
    }

    if (submission.getTask().isAutomaticTest()) {
        for (TaskTest test : submission.getTask().getTaskTests()) {
            addTest(task, test);
        }
    }

    // TODO: Add libraries
    ProcessBuilder builder = new ProcessBuilder(commands);
    builder.directory(dir);

    File errorLog = new File(dir, "error.log");
    errorLog.delete();
    builder.redirectError(errorLog);

    try {
        Process process = builder.start();
        JSON_MAPPER.writeValue(process.getOutputStream(), task);
        process.getOutputStream().close();

        PhoenixSubmissionResult result = JSON_MAPPER.readValue(process.getInputStream(),
                PhoenixSubmissionResult.class);

        return result;
    } catch (Exception e) {
        DebugLog.log(e);
    }

    return new PhoenixSubmissionResult(SubmissionStatus.OK, "Fine");
}

From source file:org.apache.hive.ptest.conf.TestParser.java

private List<QFileTestBatch> parseQFileTests() {
    Splitter splitter = Splitter.on(" ").trimResults().omitEmptyStrings();
    List<QFileTestBatch> result = Lists.newArrayList();
    for (String alias : context.getString("qFileTests", "").split(" ")) {
        Context testContext = new Context(
                context.getSubProperties(Joiner.on(".").join("qFileTest", alias, "")));
        String driver = checkNotNull(testContext.getString("driver"), "driver").trim();
        // execute the driver locally?
        boolean isParallel = !testContext.getBoolean("isolateDriver", false);
        File directory = new File(sourceDirectory,
                checkNotNull(testContext.getString("directory"), "directory").trim());
        Set<String> excludedTests = Sets.newHashSet();
        for (String excludedTestGroup : splitter.split(testContext.getString("exclude", ""))) {
            excludedTests.addAll(Arrays.asList(testContext
                    .getString(Joiner.on(".").join("groups", excludedTestGroup), "").trim().split(" ")));
        }//from  w  w w.j  a v  a2 s.c o  m
        Set<String> isolatedTests = Sets.newHashSet();
        for (String ioslatedTestGroup : splitter.split(testContext.getString("isolate", ""))) {
            isolatedTests.addAll(Arrays.asList(testContext
                    .getString(Joiner.on(".").join("groups", ioslatedTestGroup), "").trim().split(" ")));
        }

        Set<String> includedTests = Sets.newHashSet();
        for (String includedTestGroup : splitter.split(testContext.getString("include", ""))) {
            includedTests.addAll(Arrays.asList(testContext
                    .getString(Joiner.on(".").join("groups", includedTestGroup), "").trim().split(" ")));
        }
        if (!includedTests.isEmpty() && !excludedTests.isEmpty()) {
            throw new IllegalArgumentException(
                    String.format("Included and excluded mutally exclusive." + " Included = %s, excluded = %s",
                            includedTests.toString(), excludedTests.toString()));
        }
        result.addAll(createQFileTestBatches(driver,
                checkNotNull(testContext.getString("queryFilesProperty"), "queryFilesProperty").trim(),
                directory, testContext.getInteger("batchSize", 30), isParallel, excludedTests, includedTests,
                isolatedTests));
    }
    return result;
}

From source file:org.lightjason.agentspeak.agent.TestCAgent.java

/**
 * test for default generators and configuration
 *///from w  w  w  . ja  va2 s  . co  m
@Test
public final void testASLDefault() {
    final Set<String> l_result = ASL.entrySet().stream().map(i -> {
        try (final InputStream l_stream = new FileInputStream(i.getKey());) {
            new CAgentGenerator(l_stream, ACTIONS.keySet(), IAggregation.EMPTY, Collections.emptySet(),
                    new CVariableBuilder()).generatesingle().call();
            return null;
        } catch (final Exception l_exception) {
            return MessageFormat.format("{0}: {1}", i.getValue(), l_exception);
        }
    }).filter(Objects::nonNull).collect(Collectors.toSet());

    assertTrue(l_result.toString(), l_result.isEmpty());
}

From source file:org.deri.iris.queryrewriting.SQLRewritingTest.java

public void testSQLRewriting() throws Exception {

    // Configuration.
    final DecompositionStrategy decomposition = DecompositionStrategy.DECOMPOSE;
    final RewritingLanguage rewLang = RewritingLanguage.UCQ;
    final SubCheckStrategy subchkStrategy = SubCheckStrategy.INTRADEC;
    final NCCheck ncCheckStrategy = NCCheck.NONE;

    LOGGER.info("Decomposition: " + decomposition.name());
    LOGGER.info("Rewriting Language: " + rewLang.name());
    LOGGER.info("Subsumption Check Strategy: " + subchkStrategy.name());
    LOGGER.info("Negative Constraints Check Strategy " + ncCheckStrategy.name());

    // Read the test-cases file

    final File testSuiteFile = FileUtils.getFile(_WORKING_DIR,
            FilenameUtils.separatorsToSystem(_DEFAULT_INPUT_PATH), "test-cases.txt");

    final List<String> tests = IOUtils.readLines(new FileReader(testSuiteFile));

    final String creationDate = dateFormat.format(new Date());

    // Summary reporting
    final String summaryPrefix = StringUtils.join(creationDate, "-", decomposition.name(), "-", rewLang.name(),
            "-", subchkStrategy.name(), "-", ncCheckStrategy.name());

    final File sizeSummaryFile = FileUtils.getFile(_WORKING_DIR,
            FilenameUtils.separatorsToSystem(_DEFAULT_OUTPUT_PATH),
            FilenameUtils.separatorsToSystem(_DEFAULT_SUMMARY_DIR),
            StringUtils.join(summaryPrefix, "-", "size-summary.csv"));
    final CSVWriter sizeSummaryWriter = new CSVWriter(new FileWriter(sizeSummaryFile), ',');

    final File timeSummaryFile = FileUtils.getFile(_WORKING_DIR,
            FilenameUtils.separatorsToSystem(_DEFAULT_OUTPUT_PATH),
            FilenameUtils.separatorsToSystem(_DEFAULT_SUMMARY_DIR),
            StringUtils.join(summaryPrefix, "-", "time-summary.csv"));
    final CSVWriter timeSummaryWriter = new CSVWriter(new FileWriter(timeSummaryFile), ',');

    final File cacheSummaryFile = FileUtils.getFile(_WORKING_DIR,
            FilenameUtils.separatorsToSystem(_DEFAULT_OUTPUT_PATH),
            FilenameUtils.separatorsToSystem(_DEFAULT_SUMMARY_DIR),
            StringUtils.join(summaryPrefix, "-", "cache-summary.csv"));
    final CSVWriter cacheSummaryWriter = new CSVWriter(new FileWriter(cacheSummaryFile), ',');

    final File memorySummaryFile = FileUtils.getFile(_WORKING_DIR,
            FilenameUtils.separatorsToSystem(_DEFAULT_OUTPUT_PATH),
            FilenameUtils.separatorsToSystem(_DEFAULT_SUMMARY_DIR),
            StringUtils.join(summaryPrefix, "-", "memory-summary.csv"));
    final CSVWriter memorySummaryWriter = new CSVWriter(new FileWriter(memorySummaryFile), ',');

    sizeSummaryWriter.writeNext(ReportingUtils.getSummaryRewritingSizeReportHeader());
    timeSummaryWriter.writeNext(ReportingUtils.getSummaryRewritingTimeReportHeader());
    cacheSummaryWriter.writeNext(ReportingUtils.getSummaryCachingReportHeader());
    memorySummaryWriter.writeNext(ReportingUtils.getSummaryMemoryReportHeader());

    // Compute the rewriting for each test ontology.
    for (final String testName : tests) {

        // Read the next test case on the list
        final File testFile = FileUtils.getFile(_WORKING_DIR,
                FilenameUtils.separatorsToSystem(_DEFAULT_INPUT_PATH), testName + ".dtg");

        // Create the Directory where to store the test results
        final File outTestDir = FileUtils.getFile(_WORKING_DIR,
                FilenameUtils.separatorsToSystem(_DEFAULT_OUTPUT_PATH), testName);
        if (!outTestDir.exists()) {
            if (outTestDir.mkdir()) {
                LOGGER.info("Created output directory: " + testName);
            } else {
                LOGGER.fatal("Error creating output directory");
            }//from  w  w w. j  a  v a2  s . co m
        }

        LOGGER.info("Processing file: " + testName);

        // Read the content of the current program
        final FileReader fr = new FileReader(testFile);
        final StringBuilder sb = new StringBuilder();
        int ch = -1;
        while ((ch = fr.read()) >= 0) {
            sb.append((char) ch);
        }
        final String program = sb.toString();
        fr.close();

        // Parse the program
        final Parser parser = new Parser();
        parser.parse(program);

        // Get the rules
        final List<IRule> rules = parser.getRules();

        // Get the queries
        final List<IQuery> queryHeads = parser.getQueries();

        // Get the TGDs from the set of rules
        final List<IRule> tgds = RewritingUtils.getTGDs(rules, queryHeads);

        // Convert the query bodies in rules
        final List<IRule> bodies = new LinkedList<IRule>(rules);
        bodies.removeAll(tgds);

        final List<IRule> queries = RewritingUtils.getQueries(bodies, queryHeads);

        // Get the configuration
        final Map<IPredicate, IRelation> conf = parser.getDirectives();
        if (conf.containsKey(BasicFactory.getInstance().createPredicate("DBConnection", 8))) {
            StorageManager.getInstance();
            StorageManager.configure(conf);
        } else {
            LOGGER.error("Missing DB connection parameters.");
            throw new ConfigurationException("Missing DB connection parameters.");

        }

        // Get the SBox rules from the set of rules
        final List<IRule> sbox = RewritingUtils.getSBoxRules(rules, queryHeads);

        // get the constraints from the set of rules
        final Set<IRule> constraints = RewritingUtils.getConstraints(rules, queryHeads);

        final Set<Expressivity> exprs = RewritingUtils.getExpressivity(tgds);
        LOGGER.info("Expressivity: " + exprs.toString());

        if (!exprs.contains(Expressivity.LINEAR) && !exprs.contains(Expressivity.STICKY))
            throw new EvaluationException("Only Linear and Sticky TGDs are supported for rewriting.");

        // compute the dependency graph

        LOGGER.debug("Computing position dependencies.");
        // long depGraphMem = MonitoringUtils.getHeapUsage();
        long posDepTime = System.currentTimeMillis();
        Map<Pair<IPosition, IPosition>, Set<List<IRule>>> deps = DepGraphUtils
                .computePositionDependencyGraph(tgds);
        posDepTime = System.currentTimeMillis() - posDepTime;

        // Setup caching
        CacheManager.setupCaching();

        // if linear TGDs, compute the atom coverage graph.
        LOGGER.debug("Computing atom coverage graph.");
        long atomCoverGraphTime = System.currentTimeMillis();
        if (exprs.contains(Expressivity.LINEAR)) {
            deps = DepGraphUtils.computeAtomCoverageGraph(deps);
        }
        atomCoverGraphTime = System.currentTimeMillis() - atomCoverGraphTime;
        // depGraphMem = MonitoringUtils.getHeapUsage() - depGraphMem;

        // rewriting constraints
        // long ncRewMem = MonitoringUtils.getHeapUsage();
        final ParallelRewriter cnsRewriter = new ParallelRewriter(DecompositionStrategy.MONOLITIC,
                RewritingLanguage.UCQ, SubCheckStrategy.NONE, NCCheck.NONE);
        long ncRewTime = System.currentTimeMillis();
        final Set<IRule> rewrittenConstraints = Sets.newHashSet();
        if (!ncCheckStrategy.equals(NCCheck.NONE)) {
            for (final IRule c : constraints) {
                rewrittenConstraints
                        .addAll(cnsRewriter.getRewriting(c, tgds, new HashSet<IRule>(), deps, exprs));
            }
        }
        ncRewTime = System.currentTimeMillis() - ncRewTime;
        // ncRewMem = ncRewMem - MonitoringUtils.getHeapUsage();
        LOGGER.debug("Finished rewriting constraints.");

        // dump the rewritten constraints:
        File outFile = FileUtils.getFile(outTestDir, testName.concat("_cns.dtg"));
        final FileWriter cnsFW = new FileWriter(outFile);
        IOUtils.writeLines(rewrittenConstraints, IOUtils.LINE_SEPARATOR, cnsFW);
        cnsFW.close();

        // Compute the Rewriting
        final ParallelRewriter rewriter = new ParallelRewriter(decomposition, rewLang, subchkStrategy,
                ncCheckStrategy);
        for (final IRule q : queries) {

            // Setup caching
            CacheManager.setupCaching();

            final String queryPredicate = q.getHead().iterator().next().getAtom().getPredicate()
                    .getPredicateSymbol();

            // Setup reporting
            final Reporter rep = Reporter.getInstance(true);
            Reporter.setupReporting();
            Reporter.setQuery(queryPredicate);
            Reporter.setOntology(testName);
            rep.setValue(RewMetric.DEPGRAPH_TIME, posDepTime);

            LOGGER.info("Processing query: ".concat(q.toString()));
            // final long rewMem = MonitoringUtils.getHeapUsage();
            final long overallTime = System.currentTimeMillis();
            final Set<IRule> rewriting = rewriter.getRewriting(q, tgds, rewrittenConstraints, deps, exprs);
            rep.setValue(RewMetric.OVERALL_TIME, System.currentTimeMillis() - overallTime);

            // rep.setValue(RewMetric.REW_MEM, MonitoringUtils.getHeapUsage() - rewMem);
            // rep.setValue(RewMetric.DEPGRAPH_MEM, depGraphMem);
            rep.setValue(RewMetric.REW_SIZE, (long) rewriting.size());
            rep.setValue(RewMetric.JOIN_COUNT, RewritingUtils.joinCount(rewriting));
            rep.setValue(RewMetric.ATOM_COUNT, RewritingUtils.atomsCount(rewriting));
            rep.setValue(RewMetric.REW_CNS_COUNT, (long) rewrittenConstraints.size());
            rep.setValue(RewMetric.REW_CNS_TIME, ncRewTime);
            // rep.setValue(RewMetric.REW_CNS_MEM, ncRewMem);

            // Other metrics
            rep.setValue(RewMetric.OVERHEAD_TIME,
                    rep.getValue(RewMetric.OVERALL_TIME) - rep.getValue(RewMetric.REW_TIME));

            // Caching size metrics
            rep.setValue(RewMetric.MAX_COVERING_CACHE_SIZE, CoveringCache.getCache().size(CacheType.COVERING));
            rep.setValue(RewMetric.MAX_NON_COVERING_CACHE_SIZE,
                    CoveringCache.getCache().size(CacheType.NOT_COVERING));
            rep.setValue(RewMetric.MAX_MAPSTO_CACHE_SIZE, MapsToCache.size(MapsToCache.CacheType.MAPSTO));
            rep.setValue(RewMetric.MAX_NOT_MAPSTO_CACHE_SIZE,
                    MapsToCache.size(MapsToCache.CacheType.NOT_MAPSTO));
            rep.setValue(RewMetric.MAX_FACTOR_CACHE_SIZE, (long) 0);
            rep.setValue(RewMetric.MAX_NON_FACTOR_CACHE_SIZE, (long) 0);
            rep.setValue(RewMetric.MAX_RENAMING_CACHE_SIZE, RenamingCache.size());
            rep.setValue(RewMetric.MAX_MGU_CACHE_SIZE, MGUCache.size());

            // Create a file to store the rewriting results.

            outFile = FileUtils.getFile(outTestDir, queryPredicate.concat("_rew.dtg"));
            final FileWriter rewFW = new FileWriter(outFile);

            rewFW.write("/// Query: " + q + "///\n");
            rewFW.write("/// Ontology: " + testName + "///");
            rewFW.write("/// Created on: " + creationDate + " ///\n");
            rewFW.write("/// Rules in the program: " + rules.size() + " ///\n");
            rewFW.write("/// TGDs in the program: " + tgds.size() + " ///\n");
            rewFW.write("/// Constraints in the program: " + constraints.size() + " ///\n");
            rewFW.write("/// Theory expressivity: " + exprs.toString() + " ///\n");
            rewFW.write("/// Decomposition: " + decomposition.name() + " ///\n");
            rewFW.write("/// Subsumption Check Strategy: " + subchkStrategy.name() + " ///\n");
            rewFW.write("/// Negative Constraints Check Strategy: " + ncCheckStrategy.name() + " ///\n");
            rewFW.write(IOUtils.LINE_SEPARATOR);

            LOGGER.info("Writing the output at: " + outFile.getAbsolutePath());

            // dump metrics for individual queries.
            rewFW.write(rep.getReport());

            rewFW.write(IOUtils.LINE_SEPARATOR);
            rewFW.write(IOUtils.LINE_SEPARATOR);

            rewFW.write("/// Rewritten Program ///\n");
            final Set<ILiteral> newHeads = new HashSet<ILiteral>();
            for (final IRule qr : rewriting) {
                newHeads.add(qr.getHead().iterator().next());
                rewFW.write(qr + "\n");
            }
            rewFW.write("\n");
            for (final ILiteral h : newHeads) {
                rewFW.write("?- " + h + ".\n");
            }
            rewFW.write("\n");
            rewFW.flush();
            rewFW.close();

            // dump summary metrics.
            sizeSummaryWriter.writeNext(rep.getSummarySizeMetrics());
            timeSummaryWriter.writeNext(rep.getSummaryTimeMetrics());
            cacheSummaryWriter.writeNext(rep.getSummaryCacheMetrics());
            memorySummaryWriter.writeNext(rep.getSummaryMemoryMetrics());
            sizeSummaryWriter.flush();
            timeSummaryWriter.flush();
            cacheSummaryWriter.flush();
            memorySummaryWriter.flush();

            if (sbox.size() > 0) {

                // Produce the rewriting according to the Storage Box
                final IQueryRewriter ndmRewriter = new NDMRewriter(sbox);
                // final Set<ILiteral> newHeads = new HashSet<ILiteral>();
                final Set<IRule> sboxRew = new LinkedHashSet<IRule>();
                for (final IRule r : rewriting) {
                    // Create a file to store the rewriting results as Datalog Rules
                    LOGGER.debug("-- Processing rewriting: " + r);
                    sboxRew.addAll(ndmRewriter.getRewriting(r));
                }

                // dump the rewritten sbox rewriting:
                final File sboxFile = FileUtils.getFile(outTestDir, queryPredicate.concat("_sbox_rew.dtg"));
                final FileWriter sboxFW = new FileWriter(sboxFile);
                IOUtils.writeLines(sboxRew, IOUtils.LINE_SEPARATOR, sboxFW);
                sboxFW.close();

                // Produce a SQL rewriting
                final SQLRewriter sqlRewriter = new SQLRewriter(sboxRew);
                final String sqlRew = sqlRewriter.getUCQSQLRewriting("", 1000, 0);
                final File sqlFile = FileUtils.getFile(outTestDir, queryPredicate.concat("_rew.sql"));
                final FileWriter sqlFW = new FileWriter(sqlFile);
                IOUtils.write(sqlRew, sqlFW);
                sqlFW.close();

                // Execute the SQL rewriting
                LOGGER.info("Executing SQL Rewriting");

                long duration = System.nanoTime();
                final IRelation result = StorageManager.executeQuery(sqlRew);
                duration = (System.nanoTime() - duration) / 1000000;
                LOGGER.info(result.size() + " tuples in " + duration + " [ms]\n");
            }
        }
    }
    sizeSummaryWriter.close();
    timeSummaryWriter.close();
    cacheSummaryWriter.close();
    memorySummaryWriter.close();

}

From source file:keywhiz.service.resources.admin.SecretsResource.java

/**
 * Delete Secret by ID// w  ww .  j  av a 2 s . c  om
 *
 * @excludeParams user
 * @param secretId the ID of the Secret to be deleted
 *
 * @description Deletes a single Secret if found.
 * Used by Keywhiz CLI and the web ui.
 * @responseMessage 200 Found and deleted Secret with given ID
 * @responseMessage 404 Secret with given ID not Found
 */
@Path("{secretId}")
@Timed
@ExceptionMetered
@DELETE
public Response deleteSecret(@Auth User user, @PathParam("secretId") LongParam secretId) {
    Optional<Secret> secret = secretController.getSecretById(secretId.get());
    if (!secret.isPresent()) {
        logger.info("User '{}' tried deleting a secret which was not found (id={})", user, secretId.get());
        throw new NotFoundException("Secret not found.");
    }

    logger.info("User '{}' deleting secret id={}, name='{}'", user, secretId, secret.get().getName());

    // Get the groups for this secret, so they can be restored manually if necessary
    Set<String> groups = aclDAOReadOnly.getGroupsFor(secret.get()).stream().map(Group::getName)
            .collect(toSet());

    secretDAOReadWrite.deleteSecretsByName(secret.get().getName());

    // Record the deletion
    Map<String, String> extraInfo = new HashMap<>();
    extraInfo.put("groups", groups.toString());
    extraInfo.put("current version", secret.get().getVersion().toString());
    auditLog.recordEvent(new Event(Instant.now(), EventTag.SECRET_DELETE, user.getName(),
            secret.get().getName(), extraInfo));
    return Response.noContent().build();
}