Example usage for java.util Queue add

List of usage examples for java.util Queue add

Introduction

In this page you can find the example usage for java.util Queue add.

Prototype

boolean add(E e);

Source Link

Document

Inserts the specified element into this queue if it is possible to do so immediately without violating capacity restrictions, returning true upon success and throwing an IllegalStateException if no space is currently available.

Usage

From source file:de.innovationgate.wgpublisher.lucene.LuceneManager.java

private void addDeletionRequest(IndexingRequest request) {
    synchronized (_indexingRequestLock) {
        // add to list and set service status running for DB
        Queue<IndexingRequest> requests = _deletionRequestsMap.get(request.getDbkey());
        if (requests == null) {
            requests = new ConcurrentLinkedQueue<IndexingRequest>();
            _deletionRequestsMap.put(request.getDbkey(), requests);
        }//from  w  ww.j  a  v  a 2  s  .  c  o  m
        requests.add(request);
    }
}

From source file:de.innovationgate.wgpublisher.lucene.LuceneManager.java

private void addAdditionRequest(IndexingRequest request) {
    synchronized (_indexingRequestLock) {
        // add to list and set service status running for DB
        Queue<IndexingRequest> requests = _additionRequestsMap.get(request.getDbkey());
        if (requests == null) {
            requests = new ConcurrentLinkedQueue<LuceneManager.IndexingRequest>();
            _additionRequestsMap.put(request.getDbkey(), requests);
        }/*from  ww w.java2 s .co  m*/
        requests.add(request);
    }
}

From source file:io.openvidu.test.e2e.OpenViduTestAppE2eTest.java

@Test
@DisplayName("Change publisher dynamically")
void changePublisherTest() throws Exception {

    Queue<Boolean> threadAssertions = new ConcurrentLinkedQueue<Boolean>();

    setupBrowser("chrome");

    log.info("Change publisher dynamically");

    WebElement oneToManyInput = user.getDriver().findElement(By.id("one2many-input"));
    oneToManyInput.clear();/* ww w.j  a va  2  s.  c  o m*/
    oneToManyInput.sendKeys("1");

    user.getDriver().findElement(By.id("auto-join-checkbox")).click();

    final CountDownLatch latch1 = new CountDownLatch(2);

    // First publication (audio + video [CAMERA])
    user.getEventManager().on("streamPlaying", (event) -> {
        JsonObject stream = event.get("target").getAsJsonObject().get("stream").getAsJsonObject();
        threadAssertions.add("CAMERA".equals(stream.get("typeOfVideo").getAsString()));
        threadAssertions.add(stream.get("hasAudio").getAsBoolean());

        latch1.countDown();
    });
    user.getDriver().findElement(By.id("one2many-btn")).click();

    user.getEventManager().waitUntilEventReaches("connectionCreated", 4);
    user.getEventManager().waitUntilEventReaches("accessAllowed", 1);
    user.getEventManager().waitUntilEventReaches("streamCreated", 2);
    user.getEventManager().waitUntilEventReaches("streamPlaying", 2);

    if (!latch1.await(5000, TimeUnit.MILLISECONDS)) {
        gracefullyLeaveParticipants(2);
        fail("Waiting for 2 streamPlaying events to happen in total");
        return;
    }

    user.getEventManager().off("streamPlaying");
    log.info("Thread assertions: {}", threadAssertions.toString());
    for (Iterator<Boolean> iter = threadAssertions.iterator(); iter.hasNext();) {
        Assert.assertTrue("Some Event property was wrong", iter.next());
        iter.remove();
    }

    int numberOfVideos = user.getDriver().findElements(By.tagName("video")).size();
    Assert.assertEquals("Expected 2 videos but found " + numberOfVideos, 2, numberOfVideos);
    Assert.assertTrue("Videos were expected to have audio and video tracks", user.getEventManager()
            .assertMediaTracks(user.getDriver().findElements(By.tagName("video")), true, true));

    final CountDownLatch latch2 = new CountDownLatch(2);

    // Second publication (only video (SCREEN))
    user.getEventManager().on("streamPlaying", (event) -> {
        JsonObject stream = event.get("target").getAsJsonObject().get("stream").getAsJsonObject();
        threadAssertions.add("SCREEN".equals(stream.get("typeOfVideo").getAsString()));
        threadAssertions.add(!stream.get("hasAudio").getAsBoolean());
        latch2.countDown();
    });
    user.getDriver().findElement(By.cssSelector("#openvidu-instance-0 .change-publisher-btn")).click();

    user.getEventManager().waitUntilEventReaches("streamDestroyed", 2);
    user.getEventManager().waitUntilEventReaches("accessAllowed", 2);
    user.getEventManager().waitUntilEventReaches("streamCreated", 4);
    user.getEventManager().waitUntilEventReaches("streamPlaying", 4);

    if (!latch2.await(5000, TimeUnit.MILLISECONDS)) {
        gracefullyLeaveParticipants(2);
        fail("Waiting for 4 streamPlaying events to happen in total");
        return;
    }

    user.getEventManager().off("streamPlaying");
    log.info("Thread assertions: {}", threadAssertions.toString());
    for (Iterator<Boolean> iter = threadAssertions.iterator(); iter.hasNext();) {
        Assert.assertTrue("Some Event property was wrong", iter.next());
        iter.remove();
    }

    numberOfVideos = user.getDriver().findElements(By.tagName("video")).size();
    Assert.assertEquals("Expected 2 videos but found " + numberOfVideos, 2, numberOfVideos);
    Assert.assertTrue("Videos were expected to only have audio tracks", user.getEventManager()
            .assertMediaTracks(user.getDriver().findElements(By.tagName("video")), false, true));

    final CountDownLatch latch3 = new CountDownLatch(2);

    // Third publication (audio + video [CAMERA])
    user.getEventManager().on("streamPlaying", (event) -> {
        JsonObject stream = event.get("target").getAsJsonObject().get("stream").getAsJsonObject();
        threadAssertions.add("CAMERA".equals(stream.get("typeOfVideo").getAsString()));
        threadAssertions.add(stream.get("hasAudio").getAsBoolean());
        latch3.countDown();
    });
    user.getDriver().findElement(By.cssSelector("#openvidu-instance-0 .change-publisher-btn")).click();
    user.getEventManager().waitUntilEventReaches("streamDestroyed", 4);
    user.getEventManager().waitUntilEventReaches("accessAllowed", 3);
    user.getEventManager().waitUntilEventReaches("streamCreated", 6);
    user.getEventManager().waitUntilEventReaches("streamPlaying", 6);

    if (!latch3.await(8000, TimeUnit.MILLISECONDS)) {
        gracefullyLeaveParticipants(2);
        fail("Waiting for 6 streamPlaying events to happen in total");
        return;
    }

    user.getEventManager().off("streamPlaying");
    log.info("Thread assertions: {}", threadAssertions.toString());
    for (Iterator<Boolean> iter = threadAssertions.iterator(); iter.hasNext();) {
        Assert.assertTrue("Some Event property was wrong", iter.next());
        iter.remove();
    }

    numberOfVideos = user.getDriver().findElements(By.tagName("video")).size();
    Assert.assertEquals("Expected 2 videos but found " + numberOfVideos, 2, numberOfVideos);
    Assert.assertTrue("Videos were expected to have audio and video tracks", user.getEventManager()
            .assertMediaTracks(user.getDriver().findElements(By.tagName("video")), true, true));

    gracefullyLeaveParticipants(2);
}

From source file:com.searchcode.app.jobs.repository.IndexBaseRepoJob.java

/**
 * Indexes all the documents in the repository changed file effectively performing a delta update
 * Should only be called when there is a genuine update IE something was indexed previously and
 * has has a new commit./*from   w w  w  . j a  v  a  2 s .  c  o m*/
 */
public void indexDocsByDelta(Path path, String repoName, String repoLocations, String repoRemoteLocation,
        RepositoryChanged repositoryChanged) {
    SearchcodeLib scl = Singleton.getSearchCodeLib(); // Should have data object by this point
    Queue<CodeIndexDocument> codeIndexDocumentQueue = Singleton.getCodeIndexQueue();
    String fileRepoLocations = FilenameUtils.separatorsToUnix(repoLocations);

    // Used to hold the reports of what was indexed
    List<String[]> reportList = new ArrayList<>();

    for (String changedFile : repositoryChanged.getChangedFiles()) {
        if (this.shouldJobPauseOrTerminate()) {
            return;
        }

        if (Singleton.getDataService().getPersistentDelete().contains(repoName)) {
            return;
        }

        String[] split = changedFile.split("/");
        String fileName = split[split.length - 1];
        changedFile = fileRepoLocations + "/" + repoName + "/" + changedFile;
        changedFile = changedFile.replace("//", "/");

        CodeLinesReturn codeLinesReturn = this.getCodeLines(changedFile, reportList);
        if (codeLinesReturn.isError()) {
            break;
        }

        IsMinifiedReturn isMinified = this.getIsMinified(codeLinesReturn.getCodeLines(), fileName, reportList);
        if (isMinified.isMinified()) {
            break;
        }

        if (this.checkIfEmpty(codeLinesReturn.getCodeLines(), changedFile, reportList)) {
            break;
        }

        if (this.determineBinary(changedFile, fileName, codeLinesReturn.getCodeLines(), reportList)) {
            break;
        }

        String md5Hash = this.getFileMd5(changedFile);
        String languageName = Singleton.getFileClassifier().languageGuesser(changedFile,
                codeLinesReturn.getCodeLines());
        String fileLocation = this.getRelativeToProjectPath(path.toString(), changedFile);
        String fileLocationFilename = changedFile.replace(fileRepoLocations, Values.EMPTYSTRING);
        String repoLocationRepoNameLocationFilename = changedFile;
        String newString = this.getBlameFilePath(fileLocationFilename);
        String codeOwner = this.getCodeOwner(codeLinesReturn.getCodeLines(), newString, repoName,
                fileRepoLocations, scl);

        if (this.LOWMEMORY) {
            try {
                Singleton.getCodeIndexer().indexDocument(new CodeIndexDocument(
                        repoLocationRepoNameLocationFilename, repoName, fileName, fileLocation,
                        fileLocationFilename, md5Hash, languageName, codeLinesReturn.getCodeLines().size(),
                        StringUtils.join(codeLinesReturn.getCodeLines(), " "), repoRemoteLocation, codeOwner));
            } catch (IOException ex) {
                Singleton.getLogger().warning("ERROR - caught a " + ex.getClass() + " in " + this.getClass()
                        + "\n with message: " + ex.getMessage());
            }
        } else {
            this.sharedService.incrementCodeIndexLinesCount(codeLinesReturn.getCodeLines().size());
            codeIndexDocumentQueue.add(new CodeIndexDocument(repoLocationRepoNameLocationFilename, repoName,
                    fileName, fileLocation, fileLocationFilename, md5Hash, languageName,
                    codeLinesReturn.getCodeLines().size(),
                    StringUtils.join(codeLinesReturn.getCodeLines(), " "), repoRemoteLocation, codeOwner));
        }

        if (this.LOGINDEXED) {
            reportList.add(new String[] { changedFile, "included", "" });
        }
    }

    if (this.LOGINDEXED && reportList.isEmpty() == false) {
        this.logIndexed(repoName + "_delta", reportList);
    }

    for (String deletedFile : repositoryChanged.getDeletedFiles()) {
        deletedFile = fileRepoLocations + "/" + repoName + "/" + deletedFile;
        deletedFile = deletedFile.replace("//", "/");
        Singleton.getLogger().info("Missing from disk, removing from index " + deletedFile);
        try {
            Singleton.getCodeIndexer().deleteByCodeId(DigestUtils.sha1Hex(deletedFile));
        } catch (IOException ex) {
            Singleton.getLogger()
                    .warning("ERROR - caught a " + ex.getClass() + " in " + this.getClass()
                            + " indexDocsByDelta deleteByFileLocationFilename for " + repoName + " "
                            + deletedFile + "\n with message: " + ex.getMessage());
        }
    }
}

From source file:it.geosolutions.geobatch.geotiff.overview.GeotiffOverviewsEmbedder.java

public Queue<FileSystemEvent> execute(Queue<FileSystemEvent> events) throws ActionException {

    try {//ww w  .  java2 s  .  c om
        // looking for file
        if (events.size() == 0)
            throw new IllegalArgumentException(
                    "GeotiffOverviewsEmbedder::execute(): Wrong number of elements for this action: "
                            + events.size());

        listenerForwarder.setTask("config");
        listenerForwarder.started();

        // //
        //
        // data flow configuration and dataStore name must not be null.
        //
        // //
        if (configuration == null) {
            final String message = "GeotiffOverviewsEmbedder::execute(): DataFlowConfig is null.";
            if (LOGGER.isErrorEnabled())
                LOGGER.error(message);
            throw new IllegalStateException(message);
        }

        // //
        //
        // check the configuration and prepare the overviews embedder
        //
        // //
        final int downsampleStep = configuration.getDownsampleStep();
        if (downsampleStep <= 0)
            throw new IllegalArgumentException(
                    "GeotiffOverviewsEmbedder::execute(): Illegal downsampleStep: " + downsampleStep);

        int numberOfSteps = configuration.getNumSteps();
        if (numberOfSteps <= 0)
            throw new IllegalArgumentException("Illegal numberOfSteps: " + numberOfSteps);

        final OverviewsEmbedder oe = new OverviewsEmbedder();
        oe.setDownsampleStep(downsampleStep);
        oe.setNumSteps(configuration.getNumSteps());
        // SG: this way we are sure we use the standard tile cache
        oe.setTileCache(JAI.getDefaultInstance().getTileCache());

        String scaleAlgorithm = configuration.getScaleAlgorithm();
        if (scaleAlgorithm == null) {
            LOGGER.warn("No scaleAlgorithm defined. Using " + SubsampleAlgorithm.Nearest + " as default");
            scaleAlgorithm = SubsampleAlgorithm.Nearest.name();
        } else {
            final SubsampleAlgorithm algorithm = SubsampleAlgorithm.valueOf(scaleAlgorithm);
            if (algorithm == null) {
                throw new IllegalStateException("Bad scaleAlgorithm defined [" + scaleAlgorithm + "]");
            }
        }
        oe.setScaleAlgorithm(scaleAlgorithm);
        oe.setTileHeight(configuration.getTileH());
        oe.setTileWidth(configuration.getTileW());

        /*
         * TODO check this is formally wrong! this should be done into the
         * configuration.
         */
        // add logger/listener
        if (configuration.isLogNotification())
            oe.addProcessingEventListener(new ProcessingEventListener() {

                public void exceptionOccurred(ExceptionEvent event) {
                    if (LOGGER.isInfoEnabled())
                        LOGGER.info("GeotiffOverviewsEmbedder::execute(): " + event.getMessage(),
                                event.getException());
                }

                public void getNotification(ProcessingEvent event) {
                    if (LOGGER.isInfoEnabled())
                        LOGGER.info("GeotiffOverviewsEmbedder::execute(): " + event.getMessage());
                    listenerForwarder.progressing((float) event.getPercentage(), event.getMessage());
                }
            });

        // The return
        Queue<FileSystemEvent> ret = new LinkedList<FileSystemEvent>();

        while (events.size() > 0) {

            // run
            listenerForwarder.progressing(0, "Embedding overviews");

            final FileSystemEvent event = events.remove();

            final File eventFile = event.getSource();
            if (LOGGER.isDebugEnabled())
                LOGGER.debug("Processing file " + eventFile);

            if (eventFile.exists() && eventFile.canRead() && eventFile.canWrite()) {
                /*
                 * If here: we can start retiler actions on the incoming
                 * file event
                 */

                if (eventFile.isDirectory()) {

                    final FileFilter filter = new RegexFileFilter(".+\\.[tT][iI][fF]([fF]?)");
                    final Collector collector = new Collector(filter);
                    final List<File> fileList = collector.collect(eventFile);
                    int size = fileList.size();
                    for (int progress = 0; progress < size; progress++) {

                        final File inFile = fileList.get(progress);

                        try {
                            oe.setSourcePath(inFile.getAbsolutePath());
                            oe.run();
                        } catch (UnsupportedOperationException uoe) {
                            listenerForwarder.failed(uoe);
                            if (LOGGER.isWarnEnabled())
                                LOGGER.warn("GeotiffOverviewsEmbedder::execute(): " + uoe.getLocalizedMessage(),
                                        uoe);
                        } catch (IllegalArgumentException iae) {
                            listenerForwarder.failed(iae);
                            if (LOGGER.isWarnEnabled())
                                LOGGER.warn("GeotiffOverviewsEmbedder::execute(): " + iae.getLocalizedMessage(),
                                        iae);
                        } finally {
                            listenerForwarder.setProgress((progress * 100) / ((size != 0) ? size : 1));
                            listenerForwarder.progressing();
                        }
                    }
                } else {
                    // file is not a directory
                    try {
                        oe.setSourcePath(eventFile.getAbsolutePath());
                        oe.run();
                    } catch (UnsupportedOperationException uoe) {
                        listenerForwarder.failed(uoe);
                        if (LOGGER.isWarnEnabled())
                            LOGGER.warn("GeotiffOverviewsEmbedder::execute(): " + uoe.getLocalizedMessage(),
                                    uoe);
                    } catch (IllegalArgumentException iae) {
                        listenerForwarder.failed(iae);
                        if (LOGGER.isWarnEnabled())
                            LOGGER.warn("GeotiffOverviewsEmbedder::execute(): " + iae.getLocalizedMessage(),
                                    iae);
                    } finally {
                        listenerForwarder.setProgress(100 / ((events.size() != 0) ? events.size() : 1));
                    }
                }

                // add the directory to the return
                ret.add(event);
            } else {
                final String message = "GeotiffOverviewsEmbedder::execute(): The passed file event refers to a not existent "
                        + "or not readable/writeable file! File: " + eventFile.getAbsolutePath();
                if (LOGGER.isWarnEnabled())
                    LOGGER.warn(message);

                throw new ActionException(this, message);

            }
        } // endwile
        listenerForwarder.completed();

        // return
        if (ret.size() > 0) {
            events.clear();
            return ret;
        } else {
            /*
             * If here: we got an error no file are set to be returned the
             * input queue is returned
             */
            return events;
        }
    } catch (Exception t) {
        final String message = "GeotiffOverviewsEmbedder::execute(): " + t.getLocalizedMessage();
        if (LOGGER.isErrorEnabled())
            LOGGER.error(message, t);
        final ActionException exc = new ActionException(this, message, t);
        listenerForwarder.failed(exc);
        throw exc;
    }
}

From source file:it.geosolutions.geobatch.geotiff.overview.GeotiffOverviewsEmbedderAction.java

@Override
public Queue<FileSystemEvent> execute(Queue<FileSystemEvent> events) throws ActionException {

    try {// ww w .j  ava 2s  .c  o  m
        // looking for file
        if (events.size() == 0)
            throw new IllegalArgumentException(
                    "GeotiffOverviewsEmbedder::execute(): Wrong number of elements for this action: "
                            + events.size());

        listenerForwarder.setTask("config");
        listenerForwarder.started();

        // //
        //
        // data flow configuration and dataStore name must not be null.
        //
        // //
        if (configuration == null) {
            final String message = "GeotiffOverviewsEmbedder::execute(): DataFlowConfig is null.";
            if (LOGGER.isErrorEnabled())
                LOGGER.error(message);
            throw new IllegalStateException(message);
        }

        // //
        //
        // check the configuration and prepare the overviews embedder
        //
        // //
        final int downsampleStep = configuration.getDownsampleStep();
        if (downsampleStep <= 0)
            throw new IllegalArgumentException(
                    "GeotiffOverviewsEmbedder::execute(): Illegal downsampleStep: " + downsampleStep);

        int numberOfSteps = configuration.getNumSteps();
        if (numberOfSteps <= 0)
            throw new IllegalArgumentException("Illegal numberOfSteps: " + numberOfSteps);

        final OverviewsEmbedder oe = new OverviewsEmbedder();
        oe.setDownsampleStep(downsampleStep);
        oe.setNumSteps(configuration.getNumSteps());
        // SG: this way we are sure we use the standard tile cache
        oe.setTileCache(JAI.getDefaultInstance().getTileCache());

        String scaleAlgorithm = configuration.getScaleAlgorithm();
        if (scaleAlgorithm == null) {
            LOGGER.warn("No scaleAlgorithm defined. Using " + SubsampleAlgorithm.Nearest + " as default");
            scaleAlgorithm = SubsampleAlgorithm.Nearest.name();
        } else {
            final SubsampleAlgorithm algorithm = SubsampleAlgorithm.valueOf(scaleAlgorithm);
            if (algorithm == null) {
                throw new IllegalStateException("Bad scaleAlgorithm defined [" + scaleAlgorithm + "]");
            }
        }
        oe.setScaleAlgorithm(scaleAlgorithm);
        oe.setTileHeight(configuration.getTileH());
        oe.setTileWidth(configuration.getTileW());

        /*
         * TODO check this is formally wrong! this should be done into the
         * configuration.
         */
        // add logger/listener
        if (configuration.isLogNotification())
            oe.addProcessingEventListener(new ProcessingEventListener() {

                public void exceptionOccurred(ExceptionEvent event) {
                    if (LOGGER.isInfoEnabled())
                        LOGGER.info("GeotiffOverviewsEmbedder::execute(): " + event.getMessage(),
                                event.getException());
                }

                public void getNotification(ProcessingEvent event) {
                    if (LOGGER.isInfoEnabled())
                        LOGGER.info("GeotiffOverviewsEmbedder::execute(): " + event.getMessage());
                    listenerForwarder.progressing((float) event.getPercentage(), event.getMessage());
                }
            });

        // The return
        Queue<FileSystemEvent> ret = new LinkedList<FileSystemEvent>();

        while (events.size() > 0) {

            // run
            listenerForwarder.progressing(0, "Embedding overviews");

            final FileSystemEvent event = events.remove();

            final File eventFile = event.getSource();
            if (LOGGER.isDebugEnabled())
                LOGGER.debug("Processing file " + eventFile);

            if (eventFile.exists() && eventFile.canRead() && eventFile.canWrite()) {
                /*
                 * If here: we can start retiler actions on the incoming
                 * file event
                 */

                if (eventFile.isDirectory()) {

                    final FileFilter filter = new RegexFileFilter(".+\\.[tT][iI][fF]([fF]?)");
                    final Collector collector = new Collector(filter);
                    final List<File> fileList = collector.collect(eventFile);
                    int size = fileList.size();
                    for (int progress = 0; progress < size; progress++) {

                        final File inFile = fileList.get(progress);

                        try {
                            oe.setSourcePath(inFile.getAbsolutePath());
                            oe.run();
                        } catch (UnsupportedOperationException uoe) {
                            listenerForwarder.failed(uoe);
                            if (LOGGER.isWarnEnabled())
                                LOGGER.warn("GeotiffOverviewsEmbedder::execute(): " + uoe.getLocalizedMessage(),
                                        uoe);
                        } catch (IllegalArgumentException iae) {
                            listenerForwarder.failed(iae);
                            if (LOGGER.isWarnEnabled())
                                LOGGER.warn("GeotiffOverviewsEmbedder::execute(): " + iae.getLocalizedMessage(),
                                        iae);
                        } finally {
                            listenerForwarder.setProgress((progress * 100) / ((size != 0) ? size : 1));
                            listenerForwarder.progressing();
                        }
                    }
                } else {
                    // file is not a directory
                    try {
                        oe.setSourcePath(eventFile.getAbsolutePath());
                        oe.run();
                    } catch (UnsupportedOperationException uoe) {
                        listenerForwarder.failed(uoe);
                        if (LOGGER.isWarnEnabled())
                            LOGGER.warn("GeotiffOverviewsEmbedder::execute(): " + uoe.getLocalizedMessage(),
                                    uoe);
                    } catch (IllegalArgumentException iae) {
                        listenerForwarder.failed(iae);
                        if (LOGGER.isWarnEnabled())
                            LOGGER.warn("GeotiffOverviewsEmbedder::execute(): " + iae.getLocalizedMessage(),
                                    iae);
                    } finally {
                        listenerForwarder.setProgress(100 / ((events.size() != 0) ? events.size() : 1));
                    }
                }

                // add the directory to the return
                ret.add(event);
            } else {
                final String message = "GeotiffOverviewsEmbedder::execute(): The passed file event refers to a not existent "
                        + "or not readable/writeable file! File: " + eventFile.getAbsolutePath();
                if (LOGGER.isWarnEnabled())
                    LOGGER.warn(message);

                throw new ActionException(this, message);

            }
        } // endwile
        listenerForwarder.completed();

        // return
        if (ret.size() > 0) {
            events.clear();
            return ret;
        } else {
            /*
             * If here: we got an error no file are set to be returned the
             * input queue is returned
             */
            return events;
        }
    } catch (Exception t) {
        final String message = "GeotiffOverviewsEmbedder::execute(): " + t.getLocalizedMessage();
        if (LOGGER.isErrorEnabled())
            LOGGER.error(message, t);
        final ActionException exc = new ActionException(this, message, t);
        listenerForwarder.failed(exc);
        throw exc;
    }
}

From source file:ch.entwine.weblounge.maven.S3DeployMojo.java

/**
 * //from ww  w .ja  v a  2s .  c o  m
 * {@inheritDoc}
 * 
 * @see org.apache.maven.plugin.Mojo#execute()
 */
public void execute() throws MojoExecutionException, MojoFailureException {

    // Setup AWS S3 client
    AWSCredentials credentials = new BasicAWSCredentials(awsAccessKey, awsSecretKey);
    AmazonS3Client uploadClient = new AmazonS3Client(credentials);
    TransferManager transfers = new TransferManager(credentials);

    // Make sure key prefix does not start with a slash but has one at the
    // end
    if (keyPrefix.startsWith("/"))
        keyPrefix = keyPrefix.substring(1);
    if (!keyPrefix.endsWith("/"))
        keyPrefix = keyPrefix + "/";

    // Keep track of how much data has been transferred
    long totalBytesTransferred = 0L;
    int items = 0;
    Queue<Upload> uploads = new LinkedBlockingQueue<Upload>();

    try {
        // Check if S3 bucket exists
        getLog().debug("Checking whether bucket " + bucket + " exists");
        if (!uploadClient.doesBucketExist(bucket)) {
            getLog().error("Desired bucket '" + bucket + "' does not exist!");
            return;
        }

        getLog().debug("Collecting files to transfer from " + resources.getDirectory());
        List<File> res = getResources();
        for (File file : res) {
            // Make path of resource relative to resources directory
            String filename = file.getName();
            String extension = FilenameUtils.getExtension(filename);
            String path = file.getPath().substring(resources.getDirectory().length());
            String key = concat("/", keyPrefix, path).substring(1);

            // Delete old file version in bucket
            getLog().debug("Removing existing object at " + key);
            uploadClient.deleteObject(bucket, key);

            // Setup meta data
            ObjectMetadata meta = new ObjectMetadata();
            meta.setCacheControl("public, max-age=" + String.valueOf(valid * 3600));

            FileInputStream fis = null;
            GZIPOutputStream gzipos = null;
            final File fileToUpload;

            if (gzip && ("js".equals(extension) || "css".equals(extension))) {
                try {
                    fis = new FileInputStream(file);
                    File gzFile = File.createTempFile(file.getName(), null);
                    gzipos = new GZIPOutputStream(new FileOutputStream(gzFile));
                    IOUtils.copy(fis, gzipos);
                    fileToUpload = gzFile;
                    meta.setContentEncoding("gzip");
                    if ("js".equals(extension))
                        meta.setContentType("text/javascript");
                    if ("css".equals(extension))
                        meta.setContentType("text/css");
                } catch (FileNotFoundException e) {
                    getLog().error(e);
                    continue;
                } catch (IOException e) {
                    getLog().error(e);
                    continue;
                } finally {
                    IOUtils.closeQuietly(fis);
                    IOUtils.closeQuietly(gzipos);
                }
            } else {
                fileToUpload = file;
            }

            // Do a random check for existing errors before starting the next upload
            if (erroneousUpload != null)
                break;

            // Create put object request
            long bytesToTransfer = fileToUpload.length();
            totalBytesTransferred += bytesToTransfer;
            PutObjectRequest request = new PutObjectRequest(bucket, key, fileToUpload);
            request.setProgressListener(new UploadListener(credentials, bucket, key, bytesToTransfer));
            request.setMetadata(meta);

            // Schedule put object request
            getLog().info(
                    "Uploading " + key + " (" + FileUtils.byteCountToDisplaySize((int) bytesToTransfer) + ")");
            Upload upload = transfers.upload(request);
            uploads.add(upload);
            items++;
        }
    } catch (AmazonServiceException e) {
        getLog().error("Uploading resources failed: " + e.getMessage());
    } catch (AmazonClientException e) {
        getLog().error("Uploading resources failed: " + e.getMessage());
    }

    // Wait for uploads to be finished
    String currentUpload = null;
    try {
        Thread.sleep(1000);
        getLog().info("Waiting for " + uploads.size() + " uploads to finish...");
        while (!uploads.isEmpty()) {
            Upload upload = uploads.poll();
            currentUpload = upload.getDescription().substring("Uploading to ".length());
            if (TransferState.InProgress.equals(upload.getState()))
                getLog().debug("Waiting for upload " + currentUpload + " to finish");
            upload.waitForUploadResult();
        }
    } catch (AmazonServiceException e) {
        throw new MojoExecutionException("Error while uploading " + currentUpload);
    } catch (AmazonClientException e) {
        throw new MojoExecutionException("Error while uploading " + currentUpload);
    } catch (InterruptedException e) {
        getLog().debug("Interrupted while waiting for upload to finish");
    }

    // Check for errors that happened outside of the actual uploading
    if (erroneousUpload != null) {
        throw new MojoExecutionException("Error while uploading " + erroneousUpload);
    }

    getLog().info("Deployed " + items + " files ("
            + FileUtils.byteCountToDisplaySize((int) totalBytesTransferred) + ") to s3://" + bucket);
}

From source file:com.searchcode.app.jobs.IndexSvnRepoJob.java

/**
 * Indexes all the documents in the repository changed file effectively performing a delta update
 * Should only be called when there is a genuine update IE something was indexed previously and
 * has has a new commit.//w  w w  . java  2  s.  com
 */
public void indexDocsByDelta(Path path, String repoName, String repoLocations, String repoRemoteLocation,
        RepositoryChanged repositoryChanged) {
    SearchcodeLib scl = Singleton.getSearchCodeLib(); // Should have data object by this point
    Queue<CodeIndexDocument> codeIndexDocumentQueue = Singleton.getCodeIndexQueue();
    String fileRepoLocations = FilenameUtils.separatorsToUnix(repoLocations);

    Singleton.getLogger().info("Repository Changed File List " + repositoryChanged.getChangedFiles());

    for (String changedFile : repositoryChanged.getChangedFiles()) {

        Singleton.getLogger().info("Indexing " + changedFile + " in " + repoName);

        while (CodeIndexer.shouldPauseAdding()) {
            Singleton.getLogger().info("Pausing parser.");
            try {
                Thread.sleep(SLEEPTIME);
            } catch (InterruptedException ex) {
            }
        }

        String[] split = changedFile.split("/");
        String fileName = split[split.length - 1];
        changedFile = fileRepoLocations + repoName + "/" + changedFile;

        String md5Hash = Values.EMPTYSTRING;
        List<String> codeLines = null;

        try {
            codeLines = Helpers.readFileLines(changedFile, MAXFILELINEDEPTH);
        } catch (IOException ex) {
            Singleton.getLogger().warning("ERROR - caught a " + ex.getClass() + " in " + this.getClass()
                    + "\n with message: " + ex.getMessage());
            break;
        }

        try {
            FileInputStream fis = new FileInputStream(new File(changedFile));
            md5Hash = org.apache.commons.codec.digest.DigestUtils.md5Hex(fis);
            fis.close();
        } catch (IOException ex) {
            Singleton.getLogger().warning("Unable to generate MD5 for " + changedFile);
        }

        if (scl.isMinified(codeLines)) {
            Singleton.getLogger().info("Appears to be minified will not index  " + changedFile);
            break;
        }

        String languageName = scl.languageGuesser(changedFile, codeLines);
        String fileLocation = changedFile.replace(fileRepoLocations, Values.EMPTYSTRING).replace(fileName,
                Values.EMPTYSTRING);
        String fileLocationFilename = changedFile.replace(fileRepoLocations, Values.EMPTYSTRING);
        String repoLocationRepoNameLocationFilename = changedFile;

        String newString = getBlameFilePath(fileLocationFilename);
        String codeOwner = getInfoExternal(codeLines.size(), repoName, fileRepoLocations, newString).getName();

        if (codeLines != null) {
            if (this.LOWMEMORY) {
                try {
                    CodeIndexer.indexDocument(new CodeIndexDocument(repoLocationRepoNameLocationFilename,
                            repoName, fileName, fileLocation, fileLocationFilename, md5Hash, languageName,
                            codeLines.size(), StringUtils.join(codeLines, " "), repoRemoteLocation, codeOwner));
                } catch (IOException ex) {
                    Singleton.incrementCodeIndexLinesCount(codeLines.size());
                    Singleton.getLogger().warning("ERROR - caught a " + ex.getClass() + " in " + this.getClass()
                            + "\n with message: " + ex.getMessage());
                }
            } else {
                codeIndexDocumentQueue.add(new CodeIndexDocument(repoLocationRepoNameLocationFilename, repoName,
                        fileName, fileLocation, fileLocationFilename, md5Hash, languageName, codeLines.size(),
                        StringUtils.join(codeLines, " "), repoRemoteLocation, codeOwner));
            }
        }
    }

    for (String deletedFile : repositoryChanged.getDeletedFiles()) {
        Singleton.getLogger().info("Missing from disk, removing from index " + deletedFile);
        try {
            CodeIndexer.deleteByFileLocationFilename(deletedFile);
        } catch (IOException ex) {
            Singleton.getLogger().warning("ERROR - caught a " + ex.getClass() + " in " + this.getClass()
                    + "\n with message: " + ex.getMessage());
        }
    }
}

From source file:it.geosolutions.geobatch.geotiff.retile.GeotiffRetilerAction.java

@Override
public Queue<FileSystemEvent> execute(Queue<FileSystemEvent> events) throws ActionException {
    try {/*  w  w  w  .j a  v a  2  s  .  co  m*/

        if (configuration == null) {
            final String message = "GeotiffRetiler::execute(): flow configuration is null.";
            if (LOGGER.isErrorEnabled())
                LOGGER.error(message);
            throw new ActionException(this, message);
        }
        if (events.size() == 0) {
            throw new ActionException(this,
                    "GeotiffRetiler::execute(): Unable to process an empty events queue.");
        }

        if (LOGGER.isInfoEnabled())
            LOGGER.info("GeotiffRetiler::execute(): Starting with processing...");

        listenerForwarder.started();

        // The return
        final Queue<FileSystemEvent> ret = new LinkedList<FileSystemEvent>();

        while (events.size() > 0) {

            FileSystemEvent event = events.remove();

            File eventFile = event.getSource();
            FileSystemEventType eventType = event.getEventType();

            if (eventFile.exists() && eventFile.canRead() && eventFile.canWrite()) {
                /*
                 * If here: we can start retiler actions on the incoming file event
                 */

                if (eventFile.isDirectory()) {

                    File[] fileList = eventFile.listFiles();
                    int size = fileList.length;
                    for (int progress = 0; progress < size; progress++) {

                        File inFile = fileList[progress];

                        final String absolutePath = inFile.getAbsolutePath();
                        final String inputFileName = FilenameUtils.getName(absolutePath);

                        if (LOGGER.isInfoEnabled())
                            LOGGER.info("is going to retile: " + inputFileName);

                        try {

                            listenerForwarder.setTask("GeotiffRetiler");
                            GeoTiffRetilerUtils.reTile(inFile, configuration, getTempDir());

                            // set the output
                            /*
                             * COMMENTED OUT 21 Feb 2011: simone: If the event represents a Dir
                             * we have to return a Dir. Do not matter failing files.
                             * 
                             * carlo: we may also want to check if a file is already tiled!
                             * 
                             * File outputFile=reTile(inFile); if (outputFile!=null){ //TODO:
                             * here we use the same event for each file in the ret.add(new
                             * FileSystemEvent(outputFile, eventType)); }
                             */

                        } catch (UnsupportedOperationException uoe) {
                            listenerForwarder.failed(uoe);
                            if (LOGGER.isWarnEnabled())
                                LOGGER.warn(uoe.getLocalizedMessage(), uoe);
                            continue;
                        } catch (IOException ioe) {
                            listenerForwarder.failed(ioe);
                            if (LOGGER.isWarnEnabled())
                                LOGGER.warn(ioe.getLocalizedMessage(), ioe);
                            continue;
                        } catch (IllegalArgumentException iae) {
                            listenerForwarder.failed(iae);
                            if (LOGGER.isWarnEnabled())
                                LOGGER.warn(iae.getLocalizedMessage(), iae);
                            continue;
                        } finally {
                            listenerForwarder.setProgress((progress * 100) / ((size != 0) ? size : 1));
                            listenerForwarder.progressing();
                        }
                    }

                    if (LOGGER.isInfoEnabled())
                        LOGGER.info("SUCCESSFULLY completed work on: " + event.getSource());

                    // add the directory to the return
                    ret.add(event);
                } else {
                    // file is not a directory
                    try {
                        listenerForwarder.setTask("GeotiffRetiler");
                        final File outputFile = GeoTiffRetilerUtils.reTile(eventFile, configuration,
                                getTempDir());

                        if (LOGGER.isInfoEnabled())
                            LOGGER.info("SUCCESSFULLY completed work on: " + event.getSource());
                        listenerForwarder.setProgress(100);
                        ret.add(new FileSystemEvent(outputFile, eventType));

                    } catch (UnsupportedOperationException uoe) {
                        listenerForwarder.failed(uoe);
                        if (LOGGER.isWarnEnabled())
                            LOGGER.warn(uoe.getLocalizedMessage(), uoe);
                        continue;
                    } catch (IOException ioe) {
                        listenerForwarder.failed(ioe);
                        if (LOGGER.isWarnEnabled())
                            LOGGER.warn(ioe.getLocalizedMessage(), ioe);
                        continue;
                    } catch (IllegalArgumentException iae) {
                        listenerForwarder.failed(iae);
                        if (LOGGER.isWarnEnabled())
                            LOGGER.warn(iae.getLocalizedMessage(), iae);
                        continue;
                    } finally {

                        listenerForwarder.setProgress((100) / ((events.size() != 0) ? events.size() : 1));
                        listenerForwarder.progressing();
                    }
                }
            } else {
                final String message = "The passed file event refers to a not existent "
                        + "or not readable/writeable file! File: " + eventFile.getAbsolutePath();
                if (LOGGER.isWarnEnabled())
                    LOGGER.warn(message);
                final IllegalArgumentException iae = new IllegalArgumentException(message);
                listenerForwarder.failed(iae);
            }
        } // endwile
        listenerForwarder.completed();

        // return
        if (ret.size() > 0) {
            events.clear();
            return ret;
        } else {
            /*
             * If here: we got an error no file are set to be returned the input queue is
             * returned
             */
            return events;
        }
    } catch (Exception t) {
        if (LOGGER.isErrorEnabled())
            LOGGER.error(t.getLocalizedMessage(), t);
        final ActionException exc = new ActionException(this, t.getLocalizedMessage(), t);
        listenerForwarder.failed(exc);
        throw exc;
    }
}

From source file:com.datatorrent.stram.StreamingContainerManager.java

private void aggregateMetrics(long windowId, Map<Integer, EndWindowStats> endWindowStatsMap) {
    Collection<OperatorMeta> logicalOperators = getLogicalPlan().getAllOperators();
    //for backward compatibility
    for (OperatorMeta operatorMeta : logicalOperators) {
        @SuppressWarnings("deprecation")
        Context.CountersAggregator aggregator = operatorMeta.getValue(OperatorContext.COUNTERS_AGGREGATOR);
        if (aggregator == null) {
            continue;
        }/*from ww  w  .j a  va 2 s.  c om*/
        Collection<PTOperator> physicalOperators = plan.getAllOperators(operatorMeta);
        List<Object> counters = Lists.newArrayList();
        for (PTOperator operator : physicalOperators) {
            EndWindowStats stats = endWindowStatsMap.get(operator.getId());
            if (stats != null && stats.counters != null) {
                counters.add(stats.counters);
            }
        }
        if (counters.size() > 0) {
            @SuppressWarnings("deprecation")
            Object aggregate = aggregator.aggregate(counters);
            latestLogicalCounters.put(operatorMeta.getName(), aggregate);
        }
    }

    for (OperatorMeta operatorMeta : logicalOperators) {
        AutoMetric.Aggregator aggregator = operatorMeta.getMetricAggregatorMeta() != null
                ? operatorMeta.getMetricAggregatorMeta().getAggregator()
                : null;
        if (aggregator == null) {
            continue;
        }
        Collection<PTOperator> physicalOperators = plan.getAllOperators(operatorMeta);
        List<AutoMetric.PhysicalMetricsContext> metricPool = Lists.newArrayList();

        for (PTOperator operator : physicalOperators) {
            EndWindowStats stats = endWindowStatsMap.get(operator.getId());
            if (stats != null && stats.metrics != null) {
                PhysicalMetricsContextImpl physicalMetrics = new PhysicalMetricsContextImpl(operator.getId(),
                        stats.metrics);
                metricPool.add(physicalMetrics);
            }
        }
        if (metricPool.isEmpty()) {
            //nothing to aggregate
            continue;
        }
        Map<String, Object> lm = aggregator.aggregate(windowId, metricPool);

        if (lm != null && lm.size() > 0) {
            Queue<Pair<Long, Map<String, Object>>> windowMetrics = logicalMetrics.get(operatorMeta.getName());
            if (windowMetrics == null) {
                windowMetrics = new LinkedBlockingQueue<Pair<Long, Map<String, Object>>>(METRIC_QUEUE_SIZE) {
                    private static final long serialVersionUID = 1L;

                    @Override
                    public boolean add(Pair<Long, Map<String, Object>> longMapPair) {
                        if (remainingCapacity() <= 1) {
                            remove();
                        }
                        return super.add(longMapPair);
                    }
                };
                logicalMetrics.put(operatorMeta.getName(), windowMetrics);
            }
            LOG.debug("Adding to logical metrics for {}", operatorMeta.getName());
            windowMetrics.add(new Pair<Long, Map<String, Object>>(windowId, lm));
            Map<String, Object> oldValue = latestLogicalMetrics.put(operatorMeta.getName(), lm);
            if (oldValue == null) {
                try {
                    saveMetaInfo();
                } catch (IOException ex) {
                    LOG.error(
                            "Cannot save application meta info to DFS. App data sources will not be available.",
                            ex);
                }
            }
        }
    }
}