Example usage for java.util.concurrent ConcurrentMap keySet

List of usage examples for java.util.concurrent ConcurrentMap keySet

Introduction

In this page you can find the example usage for java.util.concurrent ConcurrentMap keySet.

Prototype

Set<K> keySet();

Source Link

Document

Returns a Set view of the keys contained in this map.

Usage

From source file:org.apereo.portal.io.xml.JaxbPortalDataHandlerService.java

@Override
public void importDataDirectory(File directory, String pattern, final BatchImportOptions options) {
    if (!directory.exists()) {
        throw new IllegalArgumentException("The specified directory '" + directory + "' does not exist");
    }/*from ww w.  ja  v a 2 s  .c  o m*/

    //Create the file filter to use when searching for files to import
    final FileFilter fileFilter;
    if (pattern != null) {
        fileFilter = new AntPatternFileFilter(true, false, pattern, this.dataFileExcludes);
    } else {
        fileFilter = new AntPatternFileFilter(true, false, this.dataFileIncludes, this.dataFileExcludes);
    }

    //Determine the parent directory to log to
    final File logDirectory = determineLogDirectory(options, "import");

    //Setup reporting file
    final File importReport = new File(logDirectory, "data-import.txt");
    final PrintWriter reportWriter;
    try {
        reportWriter = new PrintWriter(new PeriodicFlushingBufferedWriter(500, new FileWriter(importReport)));
    } catch (IOException e) {
        throw new RuntimeException("Failed to create FileWriter for: " + importReport, e);
    }

    //Convert directory to URI String to provide better logging output
    final URI directoryUri = directory.toURI();
    final String directoryUriStr = directoryUri.toString();
    IMPORT_BASE_DIR.set(directoryUriStr);
    try {
        //Scan the specified directory for files to import
        logger.info("Scanning for files to Import from: {}", directory);
        final PortalDataKeyFileProcessor fileProcessor = new PortalDataKeyFileProcessor(this.dataKeyTypes,
                options);
        this.directoryScanner.scanDirectoryNoResults(directory, fileFilter, fileProcessor);
        final long resourceCount = fileProcessor.getResourceCount();
        logger.info("Found {} files to Import from: {}", resourceCount, directory);

        //See if the import should fail on error
        final boolean failOnError = options != null ? options.isFailOnError() : true;

        //Map of files to import, grouped by type
        final ConcurrentMap<PortalDataKey, Queue<Resource>> dataToImport = fileProcessor.getDataToImport();

        //Import the data files
        for (final PortalDataKey portalDataKey : this.dataKeyImportOrder) {
            final Queue<Resource> files = dataToImport.remove(portalDataKey);
            if (files == null) {
                continue;
            }

            final Queue<ImportFuture<?>> importFutures = new LinkedList<ImportFuture<?>>();
            final List<FutureHolder<?>> failedFutures = new LinkedList<FutureHolder<?>>();

            final int fileCount = files.size();
            logger.info("Importing {} files of type {}", fileCount, portalDataKey);
            reportWriter.println(portalDataKey + "," + fileCount);

            while (!files.isEmpty()) {
                final Resource file = files.poll();

                //Check for completed futures on every iteration, needed to fail as fast as possible on an import exception
                final List<FutureHolder<?>> newFailed = waitForFutures(importFutures, reportWriter,
                        logDirectory, false);
                failedFutures.addAll(newFailed);

                final AtomicLong importTime = new AtomicLong(-1);

                //Create import task
                final Callable<Object> task = new CallableWithoutResult() {
                    @Override
                    protected void callWithoutResult() {
                        IMPORT_BASE_DIR.set(directoryUriStr);
                        importTime.set(System.nanoTime());
                        try {
                            importData(file, portalDataKey);
                        } finally {
                            importTime.set(System.nanoTime() - importTime.get());
                            IMPORT_BASE_DIR.remove();
                        }
                    }
                };

                //Submit the import task
                final Future<?> importFuture = this.importExportThreadPool.submit(task);

                //Add the future for tracking
                importFutures.offer(new ImportFuture(importFuture, file, portalDataKey, importTime));
            }

            //Wait for all of the imports on of this type to complete
            final List<FutureHolder<?>> newFailed = waitForFutures(importFutures, reportWriter, logDirectory,
                    true);
            failedFutures.addAll(newFailed);

            if (failOnError && !failedFutures.isEmpty()) {
                throw new RuntimeException(
                        failedFutures.size() + " " + portalDataKey + " entities failed to import.\n\n"
                                + "\tPer entity exception logs and a full report can be found in "
                                + logDirectory + "\n");
            }

            reportWriter.flush();
        }

        if (!dataToImport.isEmpty()) {
            throw new IllegalStateException(
                    "The following PortalDataKeys are not listed in the dataTypeImportOrder List: "
                            + dataToImport.keySet());
        }

        logger.info("For a detailed report on the data import see " + importReport);
    } catch (InterruptedException e) {
        throw new RuntimeException("Interrupted while waiting for entities to import", e);
    } finally {
        IOUtils.closeQuietly(reportWriter);
        IMPORT_BASE_DIR.remove();
    }
}

From source file:org.jasig.portal.io.xml.JaxbPortalDataHandlerService.java

@Override
public void importData(File directory, String pattern, final BatchImportOptions options) {
    if (!directory.exists()) {
        throw new IllegalArgumentException("The specified directory '" + directory + "' does not exist");
    }// w  ww. j  a  va 2  s.c om

    //Create the file filter to use when searching for files to import
    final FileFilter fileFilter;
    if (pattern != null) {
        fileFilter = new AntPatternFileFilter(true, false, pattern, this.dataFileExcludes);
    } else {
        fileFilter = new AntPatternFileFilter(true, false, this.dataFileIncludes, this.dataFileExcludes);
    }

    //Determine the parent directory to log to
    final File logDirectory = determineLogDirectory(options, "import");

    //Setup reporting file
    final File importReport = new File(logDirectory, "data-import.txt");
    final PrintWriter reportWriter;
    try {
        reportWriter = new PrintWriter(new PeriodicFlushingBufferedWriter(500, new FileWriter(importReport)));
    } catch (IOException e) {
        throw new RuntimeException("Failed to create FileWriter for: " + importReport, e);
    }

    //Convert directory to URI String to provide better logging output
    final URI directoryUri = directory.toURI();
    final String directoryUriStr = directoryUri.toString();
    IMPORT_BASE_DIR.set(directoryUriStr);
    try {
        //Scan the specified directory for files to import
        logger.info("Scanning for files to Import from: {}", directory);
        final PortalDataKeyFileProcessor fileProcessor = new PortalDataKeyFileProcessor(this.dataKeyTypes,
                options);
        this.directoryScanner.scanDirectoryNoResults(directory, fileFilter, fileProcessor);
        final long resourceCount = fileProcessor.getResourceCount();
        logger.info("Found {} files to Import from: {}", resourceCount, directory);

        //See if the import should fail on error
        final boolean failOnError = options != null ? options.isFailOnError() : true;

        //Map of files to import, grouped by type
        final ConcurrentMap<PortalDataKey, Queue<Resource>> dataToImport = fileProcessor.getDataToImport();

        //Import the data files
        for (final PortalDataKey portalDataKey : this.dataKeyImportOrder) {
            final Queue<Resource> files = dataToImport.remove(portalDataKey);
            if (files == null) {
                continue;
            }

            final Queue<ImportFuture<?>> importFutures = new LinkedList<ImportFuture<?>>();
            final List<FutureHolder<?>> failedFutures = new LinkedList<FutureHolder<?>>();

            final int fileCount = files.size();
            logger.info("Importing {} files of type {}", fileCount, portalDataKey);
            reportWriter.println(portalDataKey + "," + fileCount);

            while (!files.isEmpty()) {
                final Resource file = files.poll();

                //Check for completed futures on every iteration, needed to fail as fast as possible on an import exception
                final List<FutureHolder<?>> newFailed = waitForFutures(importFutures, reportWriter,
                        logDirectory, false);
                failedFutures.addAll(newFailed);

                final AtomicLong importTime = new AtomicLong(-1);

                //Create import task
                final Callable<Object> task = new CallableWithoutResult() {
                    @Override
                    protected void callWithoutResult() {
                        IMPORT_BASE_DIR.set(directoryUriStr);
                        importTime.set(System.nanoTime());
                        try {
                            importData(file, portalDataKey);
                        } finally {
                            importTime.set(System.nanoTime() - importTime.get());
                            IMPORT_BASE_DIR.remove();
                        }
                    }
                };

                //Submit the import task
                final Future<?> importFuture = this.importExportThreadPool.submit(task);

                //Add the future for tracking
                importFutures.offer(new ImportFuture(importFuture, file, portalDataKey, importTime));
            }

            //Wait for all of the imports on of this type to complete
            final List<FutureHolder<?>> newFailed = waitForFutures(importFutures, reportWriter, logDirectory,
                    true);
            failedFutures.addAll(newFailed);

            if (failOnError && !failedFutures.isEmpty()) {
                throw new RuntimeException(
                        failedFutures.size() + " " + portalDataKey + " entities failed to import.\n\n"
                                + "\tPer entity exception logs and a full report can be found in "
                                + logDirectory + "\n");
            }

            reportWriter.flush();
        }

        if (!dataToImport.isEmpty()) {
            throw new IllegalStateException(
                    "The following PortalDataKeys are not listed in the dataTypeImportOrder List: "
                            + dataToImport.keySet());
        }

        logger.info("For a detailed report on the data import see " + importReport);
    } catch (InterruptedException e) {
        throw new RuntimeException("Interrupted while waiting for entities to import", e);
    } finally {
        IOUtils.closeQuietly(reportWriter);
        IMPORT_BASE_DIR.remove();
    }
}

From source file:org.tomitribe.tribestream.registryng.resources.ClientResource.java

@GET
@Path("invoke/stream")
@Produces("text/event-stream") // will be part of JAX-RS 2.1, for now just making it working
public void invokeScenario(@Suspended final AsyncResponse asyncResponse, @Context final Providers providers,
        @Context final HttpServletRequest httpServletRequest,
        // base64 encoded json with the request and identify since EventSource doesnt handle it very well
        // TODO: use a ciphering with a POST endpoint to avoid to have it readable (or other)
        @QueryParam("request") final String requestBytes) {
    final SseRequest in = loadPayload(SseRequest.class, providers, requestBytes);

    final String auth = in.getIdentity();
    security.check(auth, httpServletRequest, () -> {
    }, () -> {/*from   ww w  .jav a2  s .c  om*/
        throw new WebApplicationException(Response.Status.FORBIDDEN);
    });

    final GenericClientService.Request req = toRequest(in.getHttp());
    final Scenario scenario = in.getHttp().getScenario();

    final MultivaluedHashMap<String, Object> fakeHttpHeaders = new MultivaluedHashMap<>();
    final ConcurrentMap<Future<?>, Boolean> computations = new ConcurrentHashMap<>();
    final MessageBodyWriter<LightHttpResponse> writerResponse = providers.getMessageBodyWriter(
            LightHttpResponse.class, LightHttpResponse.class, annotations, APPLICATION_JSON_TYPE);
    final MessageBodyWriter<ScenarioEnd> writerEnd = providers.getMessageBodyWriter(ScenarioEnd.class,
            ScenarioEnd.class, annotations, APPLICATION_JSON_TYPE);

    // not jaxrs one cause cxf wraps this one and prevents the flush() to works
    final HttpServletResponse httpServletResponse = HttpServletResponse.class
            .cast(httpServletRequest.getAttribute("tribe.registry.response"));
    httpServletResponse.setHeader("Content-Type", "text/event-stream");
    try {
        httpServletResponse.flushBuffer();
    } catch (final IOException e) {
        throw new IllegalStateException(e);
    }

    final ServletOutputStream out;
    try {
        out = httpServletResponse.getOutputStream();
    } catch (final IOException e) {
        throw new IllegalStateException(e);
    }

    mes.submit(() -> {
        final AtomicReference<Invoker.Handle> handleRef = new AtomicReference<>();

        try {
            // we compute some easy stats asynchronously
            final Map<Integer, AtomicInteger> sumPerResponse = new HashMap<>();
            final AtomicInteger total = new AtomicInteger();
            final AtomicLong min = new AtomicLong();
            final AtomicLong max = new AtomicLong();
            final AtomicLong sum = new AtomicLong();

            final AtomicInteger writeErrors = new AtomicInteger(0);

            final long start = System.currentTimeMillis();
            handleRef.set(invoker.invoke(scenario.getThreads(), scenario.getInvocations(),
                    scenario.getDuration(), timeout, () -> {
                        if (handleRef.get().isCancelled()) {
                            return;
                        }

                        LightHttpResponse resp;
                        try {
                            final GenericClientService.Response invoke = service.invoke(req);
                            resp = new LightHttpResponse(invoke.getStatus(), null,
                                    invoke.getClientExecutionDurationMs());
                        } catch (final RuntimeException e) {
                            resp = new LightHttpResponse(-1, e.getMessage(), -1);
                        }

                        // let's process it in an environment where synchronisation is fine
                        final LightHttpResponse respRef = resp;
                        computations.put(mes.submit(() -> {
                            synchronized (out) {
                                try {
                                    out.write(dataStart);
                                    writerResponse.writeTo(respRef, LightHttpResponse.class,
                                            LightHttpResponse.class, annotations, APPLICATION_JSON_TYPE,
                                            fakeHttpHeaders, out);
                                    out.write(dataEnd);
                                    out.flush();
                                } catch (final IOException e) {
                                    if (writeErrors.incrementAndGet() > toleratedWriteErrors) {
                                        handleRef.get().cancel();
                                    }
                                    throw new IllegalStateException(e);
                                }
                            }

                            if (handleRef.get().isCancelled()) {
                                return;
                            }

                            final long clientExecutionDurationMs = respRef.getClientExecutionDurationMs();

                            total.incrementAndGet();
                            sumPerResponse.computeIfAbsent(respRef.getStatus(), k -> new AtomicInteger())
                                    .incrementAndGet();
                            sum.addAndGet(clientExecutionDurationMs);
                            {
                                long m = min.get();
                                do {
                                    m = min.get();
                                    if (min.compareAndSet(m, clientExecutionDurationMs)) {
                                        break;
                                    }
                                } while (m > clientExecutionDurationMs);
                            }

                            {
                                long m = max.get();
                                do {
                                    m = max.get();
                                    if (max.compareAndSet(m, clientExecutionDurationMs)) {
                                        break;
                                    }
                                } while (m < clientExecutionDurationMs);
                            }
                        }), true);
                    }));

            handleRef.get().await();

            final long end = System.currentTimeMillis();

            do { // wait all threads finished to compute the stats
                final Iterator<Future<?>> iterator = computations.keySet().iterator();
                while (iterator.hasNext()) {
                    try {
                        iterator.next().get(timeout, TimeUnit.MILLISECONDS);
                    } catch (final InterruptedException e) {
                        Thread.interrupted();
                    } catch (final ExecutionException | TimeoutException e) {
                        throw new IllegalStateException(e.getCause());
                    } finally {
                        iterator.remove();
                    }
                }
            } while (!computations.isEmpty());

            if (handleRef.get().isCancelled()) {
                return;
            }

            try {
                out.write(dataStart);
                writerEnd.writeTo(
                        new ScenarioEnd(
                                sumPerResponse.entrySet().stream()
                                        .collect(toMap(Map.Entry::getKey, t -> t.getValue().get())),
                                end - start, total.get(), min.get(), max.get(), sum.get() * 1. / total.get()),
                        ScenarioEnd.class, ScenarioEnd.class, annotations, APPLICATION_JSON_TYPE,
                        new MultivaluedHashMap<>(), out);
                out.write(dataEnd);
                out.flush();
            } catch (final IOException e) {
                throw new IllegalStateException(e);
            }
        } finally {
            try {
                // cxf will skip it since we already write ourself
                asyncResponse.resume("");
            } catch (final RuntimeException re) {
                // no-op: not that important
            }
        }
    });
}