Example usage for java.util Queue size

List of usage examples for java.util Queue size

Introduction

In this page you can find the example usage for java.util Queue size.

Prototype

int size();

Source Link

Document

Returns the number of elements in this collection.

Usage

From source file:ml.shifu.shifu.core.dtrain.dt.DTMaster.java

/**
 * Write {@link #trees}, {@link #toDoQueue} and MasterParams to HDFS.
 *//*from w  w  w. j av a  2s .co  m*/
private void writeStatesToHdfs(Path out, DTMasterParams masterParams, List<TreeNode> trees, boolean isLeafWise,
        Queue<TreeNode> toDoQueue, Queue<TreeNode> toSplitQueue) {
    FSDataOutputStream fos = null;
    try {
        fos = FileSystem.get(new Configuration()).create(out);

        // trees
        int treeLength = trees.size();
        fos.writeInt(treeLength);
        for (TreeNode treeNode : trees) {
            treeNode.write(fos);
        }

        // todo queue
        fos.writeInt(toDoQueue.size());
        for (TreeNode treeNode : toDoQueue) {
            treeNode.write(fos);
        }

        if (isLeafWise && toSplitQueue != null) {
            fos.writeInt(toSplitQueue.size());
            for (TreeNode treeNode : toSplitQueue) {
                treeNode.write(fos);
            }
        }

        // master result
        masterParams.write(fos);
    } catch (Throwable e) {
        LOG.error("Error in writing output.", e);
    } finally {
        IOUtils.closeStream(fos);
        fos = null;
    }
}

From source file:io.uploader.drive.drive.DriveOperations.java

private static Map<Path, File> createDirectoriesStructure(OperationResult operationResult, Drive client,
        File driveDestDirectory, Path srcDir, final StopRequester stopRequester,
        final HasStatusReporter statusReporter) throws IOException {

    Queue<Path> directoriesQueue = io.uploader.drive.util.FileUtils.getAllFilesPath(srcDir,
            FileFinderOption.DIRECTORY_ONLY);

    if (statusReporter != null) {
        statusReporter.setCurrentProgress(0.0);
        statusReporter.setTotalProgress(0.0);
        statusReporter.setStatus("Checking/creating directories structure...");
    }/*from w w w .java  2s .co  m*/

    long count = 0;
    Path topParent = srcDir.getParent();
    Map<Path, File> localPathDriveFileMapping = new HashMap<Path, File>();
    localPathDriveFileMapping.put(topParent, driveDestDirectory);
    for (Path path : directoriesQueue) {
        try {
            if (statusReporter != null) {
                statusReporter.setCurrentProgress(0.0);
                statusReporter.setStatus(
                        "Checking/creating directories structure... (" + path.getFileName().toString() + ")");
            }

            if (hasStopBeenRequested(stopRequester)) {
                if (statusReporter != null) {
                    statusReporter.setStatus("Stopped!");
                }
                operationResult.setStatus(OperationCompletionStatus.STOPPED);
                return localPathDriveFileMapping;
            }

            File driveParent = localPathDriveFileMapping.get(path.getParent());
            if (driveParent == null) {
                throw new IllegalStateException(
                        "The path " + path.toString() + " does not have any parent in the drive (parent path "
                                + path.getParent().toString() + ")...");
            }
            // check whether driveParent already exists, otherwise create it
            File driveDirectory = createDirectoryIfNotExist(client, driveParent, path.getFileName().toString());
            localPathDriveFileMapping.put(path, driveDirectory);

            ++count;
            if (statusReporter != null) {
                double p = ((double) count) / directoriesQueue.size();
                statusReporter.setTotalProgress(p);
                statusReporter.setCurrentProgress(1.0);
            }
        } catch (Throwable e) {
            logger.error("Error occurred while creating the directory " + path.toString(), e);
            operationResult.setStatus(OperationCompletionStatus.ERROR);
            operationResult.addError(path, e);
        }
    }
    return localPathDriveFileMapping;
}

From source file:org.archive.crawler.frontier.WorkQueueFrontier.java

/**
 * @param iqueue //from  w  ww . j  av a 2  s  .com
 * @return
 */
private int tallyInactiveTotals(SortedMap<Integer, Queue<String>> iqueues) {
    int inactiveCount = 0;
    for (Queue<String> q : iqueues.values()) {
        inactiveCount += q.size();
    }
    return inactiveCount;
}

From source file:org.ohmage.query.impl.CampaignQueries.java

public void createCampaign(final Campaign campaign, final Collection<String> classIds,
        final String creatorUsername) throws DataAccessException {

    // Create the transaction.
    DefaultTransactionDefinition def = new DefaultTransactionDefinition();
    def.setName("Creating a new campaign.");

    try {//from   w w w. ja  v a  2 s  .com
        // Begin the transaction.
        PlatformTransactionManager transactionManager = new DataSourceTransactionManager(getDataSource());
        TransactionStatus status = transactionManager.getTransaction(def);

        String iconUrlString = null;
        URL iconUrl = campaign.getIconUrl();
        if (iconUrl != null) {
            iconUrlString = iconUrl.toString();
        }

        String xml;
        try {
            xml = campaign.getXml();
        } catch (DomainException e) {
            transactionManager.rollback(status);
            throw new DataAccessException("The XML could not be saved.");
        }

        // Create the campaign.
        try {
            getJdbcTemplate().update(SQL_INSERT_CAMPAIGN,
                    new Object[] { campaign.getId(), campaign.getName(), xml, campaign.getDescription(),
                            iconUrlString, campaign.getAuthoredBy(), campaign.getRunningState().toString(),
                            campaign.getPrivacyState().toString() });
        } catch (org.springframework.dao.DataAccessException e) {
            transactionManager.rollback(status);
            throw new DataAccessException("Error executing SQL '" + SQL_INSERT_CAMPAIGN + "' with parameters: "
                    + campaign.getId() + ", " + campaign.getName() + ", " + xml + ", "
                    + campaign.getDescription() + ", " + iconUrlString + ", " + campaign.getAuthoredBy() + ", "
                    + campaign.getRunningState().toString() + ", " + campaign.getPrivacyState().toString(), e);
        }

        // Create the set of survey and prompt IDs for this campaign.
        final Set<String> surveyIds = new HashSet<String>();
        final Set<String> promptIds = new HashSet<String>();

        // Loop through all of the surveys and add the survey and prompt
        // IDs.
        for (Survey survey : campaign.getSurveys().values()) {
            // Get this survey's ID.
            surveyIds.add(survey.getId());

            Queue<SurveyItem> surveyItems = new LinkedList<SurveyItem>();
            surveyItems.addAll(survey.getSurveyItems().values());
            while (surveyItems.size() > 0) {
                SurveyItem surveyItem = surveyItems.poll();

                if (surveyItem instanceof RepeatableSet) {
                    RepeatableSet repeatableSet = (RepeatableSet) surveyItem;

                    for (SurveyItem rsSurveyItem : repeatableSet.getSurveyItems().values()) {
                        surveyItems.add(rsSurveyItem);
                    }
                } else if (surveyItem instanceof Prompt) {
                    promptIds.add(((Prompt) surveyItem).getId());
                }
            }
        }

        // Get the campaign's ID.
        final String campaignId = campaign.getId();

        // Compile the list of parameters for the survey ID lookup table.
        List<Object[]> surveyParameters = new ArrayList<Object[]>(surveyIds.size());
        for (String surveyId : surveyIds) {
            Object[] params = new Object[2];
            params[0] = surveyId;
            params[1] = campaignId;
            surveyParameters.add(params);
        }

        // The SQL to write the data.
        final String surveyIdLookupBatchSql = "INSERT INTO " + "campaign_survey_lookup(survey_id, campaign_id) "
                + "VALUES (?, (SELECT id FROM campaign WHERE urn = ?))";

        // Add the survey IDs to the lookup table.
        try {
            getJdbcTemplate().batchUpdate(surveyIdLookupBatchSql, surveyParameters);
        } catch (org.springframework.dao.DataAccessException e) {
            transactionManager.rollback(status);
            throw new DataAccessException("Error executing SQL '" + surveyIdLookupBatchSql + "'.", e);
        }

        // Compile the list of parameters for the prompt ID lookup table.
        List<Object[]> promptParameters = new ArrayList<Object[]>(surveyIds.size());
        for (String promptId : promptIds) {
            Object[] params = new Object[2];
            params[0] = promptId;
            params[1] = campaignId;
            promptParameters.add(params);
        }

        // The SQL to write the data.
        final String promptIdLookupBatchSql = "INSERT INTO " + "campaign_prompt_lookup(prompt_id, campaign_id) "
                + "VALUES (?, (SELECT id FROM campaign WHERE urn = ?))";

        // Add the prompt IDs to the lookup table.
        try {
            getJdbcTemplate().batchUpdate(promptIdLookupBatchSql, promptParameters);
        } catch (org.springframework.dao.DataAccessException e) {
            transactionManager.rollback(status);
            throw new DataAccessException("Error executing SQL '" + promptIdLookupBatchSql + "'.", e);
        }

        // Add each of the classes to the campaign.
        for (String classId : classIds) {
            associateCampaignAndClass(transactionManager, status, campaign.getId(), classId);
        }

        // Add the requesting user as the author. This may have already 
        // happened above.
        try {
            getJdbcTemplate().update(SQL_INSERT_USER_ROLE_CAMPAIGN, creatorUsername, campaign.getId(),
                    Campaign.Role.AUTHOR.toString());
        } catch (org.springframework.dao.DataIntegrityViolationException e) {
            // The user was already an author of this campaign implying 
            // that it's one of the default campaign roles based on a class
            // role that the 'creatorUsername' has.
            e.printStackTrace();
        } catch (org.springframework.dao.DataAccessException e) {
            transactionManager.rollback(status);
            throw new DataAccessException("Error executing SQL '" + SQL_INSERT_USER_ROLE_CAMPAIGN
                    + "' with parameters: " + creatorUsername + ", " + campaign.getId() + ", "
                    + Campaign.Role.AUTHOR.toString(), e);
        }

        // Commit the transaction.
        try {
            transactionManager.commit(status);
        } catch (TransactionException e) {
            transactionManager.rollback(status);
            throw new DataAccessException("Error while committing the transaction.", e);
        }
    } catch (TransactionException e) {
        throw new DataAccessException("Error while attempting to rollback the transaction.", e);
    }
}

From source file:it.geosolutions.geobatch.postgres.shp2pg.Shp2pgAction.java

/**
 * Removes TemplateModelEvents from the queue and put
 *//*from  w  w  w . j ava  2  s  .c  o m*/
public Queue<EventObject> execute(Queue<EventObject> events) throws ActionException {
    listenerForwarder.setTask("config");
    listenerForwarder.started();
    if (configuration == null) {
        throw new IllegalStateException("ActionConfig is null.");
    }
    File workingDir = Path.findLocation(configuration.getWorkingDirectory(),
            ((FileBaseCatalog) CatalogHolder.getCatalog()).getConfigDirectory());
    if (workingDir == null) {
        throw new IllegalStateException("Working directory is null.");
    }
    if (!workingDir.exists() || !workingDir.isDirectory()) {
        throw new IllegalStateException((new StringBuilder()).append("Working directory does not exist (")
                .append(workingDir.getAbsolutePath()).append(").").toString());
    }
    FileSystemEvent event = (FileSystemEvent) events.peek();
    // String shapeName = null;
    File shapefile = null;
    File zippedFile = null;
    File files[];
    if (events.size() == 1) {
        zippedFile = event.getSource();
        if (LOGGER.isTraceEnabled()) {
            LOGGER.trace((new StringBuilder()).append("Testing for compressed file: ")
                    .append(zippedFile.getAbsolutePath()).toString());
        }
        String tmpDirName = null;
        try {
            tmpDirName = Extract.extract(zippedFile.getAbsolutePath());
        } catch (Exception e) {
            final String message = "Shp2pgAction.execute(): Unable to read zip file: "
                    + e.getLocalizedMessage();
            if (LOGGER.isErrorEnabled())
                LOGGER.error(message);
            throw new ActionException(this, message);
        }
        listenerForwarder.progressing(5F, "File extracted");
        File tmpDirFile = new File(tmpDirName);
        if (!tmpDirFile.isDirectory()) {
            throw new IllegalStateException("Not valid input: we need a zip file ");
        }
        Collector c = new Collector(null);
        List fileList = c.collect(tmpDirFile);
        if (fileList != null) {
            files = (File[]) fileList.toArray(new File[1]);
        } else {
            String message = "Input is not a zipped file nor a valid collection of files";
            if (LOGGER.isErrorEnabled()) {
                LOGGER.error(message);
            }
            throw new IllegalStateException(message);
        }
    } else if (events.size() >= 3) {
        if (LOGGER.isTraceEnabled()) {
            LOGGER.trace("Checking input collection...");
        }
        listenerForwarder.progressing(6F, "Checking input collection...");
        files = new File[events.size()];
        int i = 0;
        for (Iterator i$ = events.iterator(); i$.hasNext();) {
            FileSystemEvent ev = (FileSystemEvent) i$.next();
            files[i++] = ev.getSource();
        }

    } else {
        throw new IllegalStateException("Input is not a zipped file nor a valid collection of files");
    }
    if ((shapefile = acceptable(files)) == null) {
        throw new IllegalStateException("The file list do not contains mondadory files");
    }

    listenerForwarder.progressing(10F, "In progress");

    // At this moment i have the shape and a file list

    // connect to the shapefile
    final Map<String, Serializable> connect = new HashMap<String, Serializable>();
    connect.put("url", DataUtilities.fileToURL(shapefile));

    DataStore sourceDataStore = null;
    String typeName = null;
    SimpleFeatureType originalSchema = null;
    try {
        sourceDataStore = SHP_FACTORY.createDataStore(connect);
        String[] typeNames = sourceDataStore.getTypeNames();
        typeName = typeNames[0];

        if (LOGGER.isInfoEnabled()) {
            LOGGER.info("Reading content " + typeName);
        }

        originalSchema = sourceDataStore.getSchema(typeName);
        if (LOGGER.isInfoEnabled()) {
            LOGGER.info("SCHEMA HEADER: " + DataUtilities.spec(originalSchema));
        }
    } catch (IOException e) {
        final String message = "Error to create PostGres datastore" + e.getLocalizedMessage();
        if (LOGGER.isErrorEnabled())
            LOGGER.error(message);
        if (sourceDataStore != null)
            sourceDataStore.dispose();
        throw new ActionException(this, message);
    }
    // prepare to open up a reader for the shapefile
    Query query = new Query();
    query.setTypeName(typeName);
    CoordinateReferenceSystem prj = originalSchema.getCoordinateReferenceSystem();
    query.setCoordinateSystem(prj);

    DataStore destinationDataSource = null;
    try {
        destinationDataSource = createPostgisDataStore(configuration);

        // check if the schema is present in postgis
        boolean schema = false;
        if (destinationDataSource.getTypeNames().length != 0) {
            for (String tableName : destinationDataSource.getTypeNames()) {
                if (tableName.equalsIgnoreCase(typeName)) {
                    schema = true;
                }
            }
        } else {
            schema = false;
        }
        if (!schema)
            destinationDataSource.createSchema(originalSchema);
        LOGGER.info("SCHEMA: " + schema);

    } catch (IOException e) {
        String message = "Error to create postGis datastore";
        if (LOGGER.isErrorEnabled()) {
            LOGGER.error(message);
        }
        if (destinationDataSource != null)
            destinationDataSource.dispose();
        throw new IllegalStateException(message);
    }

    final Transaction transaction = new DefaultTransaction("create");
    FeatureWriter<SimpleFeatureType, SimpleFeature> fw = null;
    FeatureReader<SimpleFeatureType, SimpleFeature> fr = null;
    try {
        SimpleFeatureBuilder builder = new SimpleFeatureBuilder(destinationDataSource.getSchema(typeName));
        fw = destinationDataSource.getFeatureWriter(typeName, transaction);
        fr = sourceDataStore.getFeatureReader(query, transaction);
        SimpleFeatureType sourceSchema = sourceDataStore.getSchema(typeName);
        FeatureStore postgisStore = (FeatureStore) destinationDataSource.getFeatureSource(typeName);
        while (fr.hasNext()) {
            final SimpleFeature oldfeature = fr.next();

            for (AttributeDescriptor ad : sourceSchema.getAttributeDescriptors()) {
                String attribute = ad.getLocalName();
                builder.set(attribute, oldfeature.getAttribute(attribute));
            }
            postgisStore.addFeatures(DataUtilities.collection(builder.buildFeature(null)));

        }

        // close transaction
        transaction.commit();

    } catch (Throwable e) {
        try {
            transaction.rollback();
        } catch (IOException e1) {
            final String message = "Transaction rollback unsuccessful: " + e1.getLocalizedMessage();
            if (LOGGER.isErrorEnabled())
                LOGGER.error(message);
            throw new ActionException(this, message);
        }
    } finally {
        try {
            transaction.close();
        } catch (IOException e) {
            final String message = "Transaction close unsuccessful: " + e.getLocalizedMessage();
            if (LOGGER.isErrorEnabled())
                LOGGER.error(message);
            throw new ActionException(this, message);
        }
        if (fr != null) {
            try {
                fr.close();
            } catch (IOException e1) {
                final String message = "Feature reader IO exception: " + e1.getLocalizedMessage();
                if (LOGGER.isErrorEnabled())
                    LOGGER.error(message);
                throw new ActionException(this, message);
            }
        }
        if (fw != null) {
            try {
                fw.close();
            } catch (IOException e1) {
                final String message = "Feature writer IO exception: " + e1.getLocalizedMessage();
                if (LOGGER.isErrorEnabled())
                    LOGGER.error(message);
                throw new ActionException(this, message);
            }
        }
        if (sourceDataStore != null) {
            try {
                sourceDataStore.dispose();
            } catch (Throwable t) {
            }
        }
        if (destinationDataSource != null) {
            try {
                destinationDataSource.dispose();
            } catch (Throwable t) {
            }
        }
    }

    GeoServerRESTPublisher publisher = new GeoServerRESTPublisher(configuration.getGeoserverURL(),
            configuration.getGeoserverUID(), configuration.getGeoserverPWD());

    publisher.createWorkspace(configuration.getDefaultNamespace());

    GSPostGISDatastoreEncoder datastoreEncoder = new GSPostGISDatastoreEncoder();

    datastoreEncoder.setUser(configuration.getDbUID());
    datastoreEncoder.setDatabase(configuration.getDbName());
    datastoreEncoder.setPassword(configuration.getDbPWD());
    datastoreEncoder.setHost(configuration.getDbServerIp());
    datastoreEncoder.setPort(Integer.valueOf(configuration.getDbPort()));
    datastoreEncoder.setName(configuration.getDbName());

    publisher.createPostGISDatastore(configuration.getDefaultNamespace(), datastoreEncoder);
    String shapeFileName = FilenameUtils.getBaseName(shapefile.getName());

    if (LOGGER.isInfoEnabled()) {
        LOGGER.info("Layer postgis publishing xml-> ");
        LOGGER.info("datastoreEncoder xml: " + datastoreEncoder.toString());
    }

    if (publisher.publishDBLayer(configuration.getDefaultNamespace(), configuration.getDbName(), shapeFileName,
            configuration.getCrs(), configuration.getDefaultStyle())) {
        String message = "PostGis layer SUCCESFULLY registered";
        if (LOGGER.isInfoEnabled()) {
            LOGGER.info(message);
        }
        listenerForwarder.progressing(100F, message);
    } else {
        String message = "PostGis layer not registered";
        ActionException ae = new ActionException(this, message);
        if (LOGGER.isErrorEnabled()) {
            LOGGER.error(message, ae);
        }
        listenerForwarder.failed(ae);
    }
    events.clear();

    return events;
}

From source file:io.cloudslang.lang.tools.build.verifier.SlangContentVerifier.java

public PreCompileResult createModelsAndValidate(String directoryPath, boolean shouldValidateDescription,
        boolean shouldValidateCheckstyle) {
    Validate.notEmpty(directoryPath, "You must specify a path");
    Validate.isTrue(new File(directoryPath).isDirectory(),
            "Directory path argument \'" + directoryPath + "\' does not lead to a directory");
    Map<String, Executable> slangModels = new HashMap<>();
    Collection<File> slangFiles = slangCompilationService.listSlangFiles(new File(directoryPath), true);
    loggingService.logEvent(Level.INFO, "Start compiling all slang files under: " + directoryPath);
    loggingService.logEvent(Level.INFO, slangFiles.size() + " .sl files were found");
    loggingService.logEvent(Level.INFO, "");
    Queue<RuntimeException> exceptions = new ArrayDeque<>();
    String errorMessagePrefixMetadata = "";
    for (File slangFile : slangFiles) {
        Executable sourceModel = null;
        try {//from www. jav  a 2s . co  m
            errorMessagePrefixMetadata = "Failed to extract metadata for file: \'" + slangFile.getAbsoluteFile()
                    + "\'.\n";
            String errorMessagePrefixCompilation = "Failed to compile file: \'" + slangFile.getAbsoluteFile()
                    + "\'.\n";

            Validate.isTrue(slangFile.isFile(),
                    "file path \'" + slangFile.getAbsolutePath() + "\' must lead to a file");
            SlangSource slangSource = SlangSource.fromFile(slangFile);

            ExecutableModellingResult preCompileResult = slangCompiler.preCompileSource(slangSource);
            sourceModel = preCompileResult.getExecutable();
            exceptions.addAll(prependPrefix(preCompileResult.getErrors(), errorMessagePrefixCompilation));

            MetadataModellingResult metadataResult = metadataExtractor
                    .extractMetadataModellingResult(slangSource, shouldValidateCheckstyle);
            Metadata sourceMetadata = metadataResult.getMetadata();
            exceptions.addAll(prependPrefix(metadataResult.getErrors(), errorMessagePrefixMetadata));

            if (sourceModel != null) {
                int size = exceptions.size();
                staticValidator.validateSlangFile(slangFile, sourceModel, sourceMetadata,
                        shouldValidateDescription, exceptions);
                if (size == exceptions.size()) {
                    slangModels.put(getUniqueName(sourceModel), sourceModel);
                }
            }
        } catch (Exception e) {
            String errorMessage = errorMessagePrefixMetadata + e.getMessage();
            loggingService.logEvent(Level.ERROR, errorMessage);
            exceptions.add(new RuntimeException(errorMessage, e));
            if (e instanceof MetadataMissingException && sourceModel != null) {
                slangModels.put(getUniqueName(sourceModel), sourceModel);
            }
        }
    }
    if (slangFiles.size() != slangModels.size()) {
        exceptions.add(new RuntimeException("Some Slang files were not pre-compiled.\nFound: "
                + slangFiles.size() + " executable files in path: \'" + directoryPath
                + "\' But managed to create slang models for only: " + slangModels.size()));
    }
    PreCompileResult preCompileResult = new PreCompileResult();
    preCompileResult.addExceptions(exceptions);
    preCompileResult.addResults(slangModels);
    return preCompileResult;
}

From source file:org.apereo.portal.io.xml.JaxbPortalDataHandlerService.java

@Override
public void importDataDirectory(File directory, String pattern, final BatchImportOptions options) {
    if (!directory.exists()) {
        throw new IllegalArgumentException("The specified directory '" + directory + "' does not exist");
    }// w w  w. j ava 2  s  .  c om

    //Create the file filter to use when searching for files to import
    final FileFilter fileFilter;
    if (pattern != null) {
        fileFilter = new AntPatternFileFilter(true, false, pattern, this.dataFileExcludes);
    } else {
        fileFilter = new AntPatternFileFilter(true, false, this.dataFileIncludes, this.dataFileExcludes);
    }

    //Determine the parent directory to log to
    final File logDirectory = determineLogDirectory(options, "import");

    //Setup reporting file
    final File importReport = new File(logDirectory, "data-import.txt");
    final PrintWriter reportWriter;
    try {
        reportWriter = new PrintWriter(new PeriodicFlushingBufferedWriter(500, new FileWriter(importReport)));
    } catch (IOException e) {
        throw new RuntimeException("Failed to create FileWriter for: " + importReport, e);
    }

    //Convert directory to URI String to provide better logging output
    final URI directoryUri = directory.toURI();
    final String directoryUriStr = directoryUri.toString();
    IMPORT_BASE_DIR.set(directoryUriStr);
    try {
        //Scan the specified directory for files to import
        logger.info("Scanning for files to Import from: {}", directory);
        final PortalDataKeyFileProcessor fileProcessor = new PortalDataKeyFileProcessor(this.dataKeyTypes,
                options);
        this.directoryScanner.scanDirectoryNoResults(directory, fileFilter, fileProcessor);
        final long resourceCount = fileProcessor.getResourceCount();
        logger.info("Found {} files to Import from: {}", resourceCount, directory);

        //See if the import should fail on error
        final boolean failOnError = options != null ? options.isFailOnError() : true;

        //Map of files to import, grouped by type
        final ConcurrentMap<PortalDataKey, Queue<Resource>> dataToImport = fileProcessor.getDataToImport();

        //Import the data files
        for (final PortalDataKey portalDataKey : this.dataKeyImportOrder) {
            final Queue<Resource> files = dataToImport.remove(portalDataKey);
            if (files == null) {
                continue;
            }

            final Queue<ImportFuture<?>> importFutures = new LinkedList<ImportFuture<?>>();
            final List<FutureHolder<?>> failedFutures = new LinkedList<FutureHolder<?>>();

            final int fileCount = files.size();
            logger.info("Importing {} files of type {}", fileCount, portalDataKey);
            reportWriter.println(portalDataKey + "," + fileCount);

            while (!files.isEmpty()) {
                final Resource file = files.poll();

                //Check for completed futures on every iteration, needed to fail as fast as possible on an import exception
                final List<FutureHolder<?>> newFailed = waitForFutures(importFutures, reportWriter,
                        logDirectory, false);
                failedFutures.addAll(newFailed);

                final AtomicLong importTime = new AtomicLong(-1);

                //Create import task
                final Callable<Object> task = new CallableWithoutResult() {
                    @Override
                    protected void callWithoutResult() {
                        IMPORT_BASE_DIR.set(directoryUriStr);
                        importTime.set(System.nanoTime());
                        try {
                            importData(file, portalDataKey);
                        } finally {
                            importTime.set(System.nanoTime() - importTime.get());
                            IMPORT_BASE_DIR.remove();
                        }
                    }
                };

                //Submit the import task
                final Future<?> importFuture = this.importExportThreadPool.submit(task);

                //Add the future for tracking
                importFutures.offer(new ImportFuture(importFuture, file, portalDataKey, importTime));
            }

            //Wait for all of the imports on of this type to complete
            final List<FutureHolder<?>> newFailed = waitForFutures(importFutures, reportWriter, logDirectory,
                    true);
            failedFutures.addAll(newFailed);

            if (failOnError && !failedFutures.isEmpty()) {
                throw new RuntimeException(
                        failedFutures.size() + " " + portalDataKey + " entities failed to import.\n\n"
                                + "\tPer entity exception logs and a full report can be found in "
                                + logDirectory + "\n");
            }

            reportWriter.flush();
        }

        if (!dataToImport.isEmpty()) {
            throw new IllegalStateException(
                    "The following PortalDataKeys are not listed in the dataTypeImportOrder List: "
                            + dataToImport.keySet());
        }

        logger.info("For a detailed report on the data import see " + importReport);
    } catch (InterruptedException e) {
        throw new RuntimeException("Interrupted while waiting for entities to import", e);
    } finally {
        IOUtils.closeQuietly(reportWriter);
        IMPORT_BASE_DIR.remove();
    }
}

From source file:org.jasig.portal.io.xml.JaxbPortalDataHandlerService.java

@Override
public void importData(File directory, String pattern, final BatchImportOptions options) {
    if (!directory.exists()) {
        throw new IllegalArgumentException("The specified directory '" + directory + "' does not exist");
    }//from   w  w w.  j a v  a 2  s.  co  m

    //Create the file filter to use when searching for files to import
    final FileFilter fileFilter;
    if (pattern != null) {
        fileFilter = new AntPatternFileFilter(true, false, pattern, this.dataFileExcludes);
    } else {
        fileFilter = new AntPatternFileFilter(true, false, this.dataFileIncludes, this.dataFileExcludes);
    }

    //Determine the parent directory to log to
    final File logDirectory = determineLogDirectory(options, "import");

    //Setup reporting file
    final File importReport = new File(logDirectory, "data-import.txt");
    final PrintWriter reportWriter;
    try {
        reportWriter = new PrintWriter(new PeriodicFlushingBufferedWriter(500, new FileWriter(importReport)));
    } catch (IOException e) {
        throw new RuntimeException("Failed to create FileWriter for: " + importReport, e);
    }

    //Convert directory to URI String to provide better logging output
    final URI directoryUri = directory.toURI();
    final String directoryUriStr = directoryUri.toString();
    IMPORT_BASE_DIR.set(directoryUriStr);
    try {
        //Scan the specified directory for files to import
        logger.info("Scanning for files to Import from: {}", directory);
        final PortalDataKeyFileProcessor fileProcessor = new PortalDataKeyFileProcessor(this.dataKeyTypes,
                options);
        this.directoryScanner.scanDirectoryNoResults(directory, fileFilter, fileProcessor);
        final long resourceCount = fileProcessor.getResourceCount();
        logger.info("Found {} files to Import from: {}", resourceCount, directory);

        //See if the import should fail on error
        final boolean failOnError = options != null ? options.isFailOnError() : true;

        //Map of files to import, grouped by type
        final ConcurrentMap<PortalDataKey, Queue<Resource>> dataToImport = fileProcessor.getDataToImport();

        //Import the data files
        for (final PortalDataKey portalDataKey : this.dataKeyImportOrder) {
            final Queue<Resource> files = dataToImport.remove(portalDataKey);
            if (files == null) {
                continue;
            }

            final Queue<ImportFuture<?>> importFutures = new LinkedList<ImportFuture<?>>();
            final List<FutureHolder<?>> failedFutures = new LinkedList<FutureHolder<?>>();

            final int fileCount = files.size();
            logger.info("Importing {} files of type {}", fileCount, portalDataKey);
            reportWriter.println(portalDataKey + "," + fileCount);

            while (!files.isEmpty()) {
                final Resource file = files.poll();

                //Check for completed futures on every iteration, needed to fail as fast as possible on an import exception
                final List<FutureHolder<?>> newFailed = waitForFutures(importFutures, reportWriter,
                        logDirectory, false);
                failedFutures.addAll(newFailed);

                final AtomicLong importTime = new AtomicLong(-1);

                //Create import task
                final Callable<Object> task = new CallableWithoutResult() {
                    @Override
                    protected void callWithoutResult() {
                        IMPORT_BASE_DIR.set(directoryUriStr);
                        importTime.set(System.nanoTime());
                        try {
                            importData(file, portalDataKey);
                        } finally {
                            importTime.set(System.nanoTime() - importTime.get());
                            IMPORT_BASE_DIR.remove();
                        }
                    }
                };

                //Submit the import task
                final Future<?> importFuture = this.importExportThreadPool.submit(task);

                //Add the future for tracking
                importFutures.offer(new ImportFuture(importFuture, file, portalDataKey, importTime));
            }

            //Wait for all of the imports on of this type to complete
            final List<FutureHolder<?>> newFailed = waitForFutures(importFutures, reportWriter, logDirectory,
                    true);
            failedFutures.addAll(newFailed);

            if (failOnError && !failedFutures.isEmpty()) {
                throw new RuntimeException(
                        failedFutures.size() + " " + portalDataKey + " entities failed to import.\n\n"
                                + "\tPer entity exception logs and a full report can be found in "
                                + logDirectory + "\n");
            }

            reportWriter.flush();
        }

        if (!dataToImport.isEmpty()) {
            throw new IllegalStateException(
                    "The following PortalDataKeys are not listed in the dataTypeImportOrder List: "
                            + dataToImport.keySet());
        }

        logger.info("For a detailed report on the data import see " + importReport);
    } catch (InterruptedException e) {
        throw new RuntimeException("Interrupted while waiting for entities to import", e);
    } finally {
        IOUtils.closeQuietly(reportWriter);
        IMPORT_BASE_DIR.remove();
    }
}

From source file:it.geosolutions.geobatch.actions.freemarker.FreeMarkerAction.java

/**
 * Removes TemplateModelEvents from the queue and put
 *//*from  w ww . jav a 2 s .c om*/
public Queue<EventObject> execute(Queue<EventObject> events) throws ActionException {

    listenerForwarder.started();

    listenerForwarder.setTask("initializing the FreeMarker engine");
    if (!initialized) {
        try {
            initialize();
        } catch (IllegalArgumentException e) {
            throw new ActionException(this, e.getLocalizedMessage(), e.getCause());
        } catch (IOException e) {
            throw new ActionException(this, e.getLocalizedMessage(), e.getCause());
        }
    }

    listenerForwarder.setTask("build the output absolute file name");

    // build the output absolute file name
    File outputDir = computeOutputDir(); // may throw ActionEx

    // return
    final Queue<EventObject> ret = new LinkedList<EventObject>();

    listenerForwarder.setTask("Building/getting the root data structure");
    /*
     * Building/getting the root data structure
     */
    final Map<String, Object> root = conf.getRoot() != null ? conf.getRoot() : new HashMap<String, Object>();

    // list of incoming event to inject into the root datamodel
    final List<TemplateModel> list;
    if (conf.isNtoN()) {
        list = new ArrayList<TemplateModel>(events.size());
    } else {
        list = new ArrayList<TemplateModel>(1);
    }
    // append the list of adapted event objects
    root.put(TemplateModelEvent.EVENT_KEY, list);

    while (!events.isEmpty()) {
        // the adapted event
        final TemplateModelEvent ev;
        final TemplateModel dataModel;
        try {
            if ((ev = adapter(events.remove())) != null) {
                listenerForwarder.setTask("Try to get a Template DataModel from the Adapted event");
                // try to get a Template DataModel from the Adapted event
                dataModel = ev.getModel(processor);

            } else {
                final String message = "Unable to append the event: unrecognized format. SKIPPING...";
                if (LOGGER.isErrorEnabled()) {
                    LOGGER.error(message);
                }
                if (conf.isFailIgnored()) {
                    continue;
                } else {
                    final ActionException e = new ActionException(this, message);
                    listenerForwarder.failed(e);
                    throw e;
                }
            }
        } catch (TemplateModelException tme) {
            final String message = "Unable to wrap the passed object: " + tme.getLocalizedMessage();
            if (LOGGER.isErrorEnabled())
                LOGGER.error(message);
            if (conf.isFailIgnored()) {
                continue;
            } else {
                listenerForwarder.failed(tme);
                throw new ActionException(this, tme.getLocalizedMessage());
            }
        } catch (Exception ioe) {
            final String message = "Unable to produce the output: " + ioe.getLocalizedMessage();
            if (LOGGER.isErrorEnabled())
                LOGGER.error(message);
            if (conf.isFailIgnored()) {
                continue;
            } else {
                listenerForwarder.failed(ioe);
                throw new ActionException(this, ioe.getLocalizedMessage(), ioe);
            }
        }

        listenerForwarder.setTask("Generating the output");
        /*
         * If getNtoN: For each data incoming event (Template DataModel)
         * build a file. Otherwise the entire queue of incoming object will
         * be transformed in a list of datamodel. In this case only one file
         * is generated.
         */
        if (conf.isNtoN()) {

            if (list.size() > 0) {
                list.remove(0);
            }
            list.add(dataModel);

            final File outputFile;
            // append the incoming data structure
            try {
                outputFile = buildOutput(outputDir, root);
            } catch (ActionException e) {
                if (LOGGER.isErrorEnabled())
                    LOGGER.error(e.getLocalizedMessage(), e);
                if (conf.isFailIgnored()) {
                    continue;
                } else {
                    listenerForwarder.failed(e);
                    throw e;
                }
            }
            // add the file to the return
            ret.add(new FileSystemEvent(outputFile.getAbsoluteFile(), FileSystemEventType.FILE_ADDED));
        } else {
            list.add(dataModel);
        }
    }

    if (!conf.isNtoN()) {
        final File outputFile;
        // append the incoming data structure
        try {
            outputFile = buildOutput(outputDir, root);
        } catch (ActionException e) {
            if (LOGGER.isErrorEnabled())
                LOGGER.error(e.getLocalizedMessage(), e);
            listenerForwarder.failed(e);
            throw e;
        }
        // add the file to the return
        ret.add(new FileSystemEvent(outputFile.getAbsoluteFile(), FileSystemEventType.FILE_ADDED));
    }

    listenerForwarder.completed();
    return ret;
}

From source file:org.archive.crawler.frontier.WorkQueueFrontier.java

/**
 * This method compiles a human readable report on the status of the frontier
 * at the time of the call.// w  w  w. j  a  v a  2  s . c om
 * @param name Name of report.
 * @param writer Where to write to.
 */
@Override
public synchronized void reportTo(PrintWriter writer) {
    int allCount = allQueues.size();
    int inProcessCount = inProcessQueues.size();
    int readyCount = readyClassQueues.size();
    int snoozedCount = getSnoozedCount();
    int activeCount = inProcessCount + readyCount + snoozedCount;
    int inactiveCount = getTotalInactiveQueues();
    int retiredCount = getRetiredQueues().size();
    int exhaustedCount = allCount - activeCount - inactiveCount - retiredCount;

    writer.print("Frontier report - ");
    writer.print(ArchiveUtils.get12DigitDate());
    writer.print("\n");
    writer.print(" Job being crawled: ");
    writer.print(controller.getMetadata().getJobName());
    writer.print("\n");
    writer.print("\n -----===== STATS =====-----\n");
    writer.print(" Discovered:    ");
    writer.print(Long.toString(discoveredUriCount()));
    writer.print("\n");
    writer.print(" Queued:        ");
    writer.print(Long.toString(queuedUriCount()));
    writer.print("\n");
    writer.print(" Finished:      ");
    writer.print(Long.toString(finishedUriCount()));
    writer.print("\n");
    writer.print("  Successfully: ");
    writer.print(Long.toString(succeededFetchCount()));
    writer.print("\n");
    writer.print("  Failed:       ");
    writer.print(Long.toString(failedFetchCount()));
    writer.print("\n");
    writer.print("  Disregarded:  ");
    writer.print(Long.toString(disregardedUriCount()));
    writer.print("\n");
    writer.print("\n -----===== QUEUES =====-----\n");
    writer.print(" Already included size:     ");
    writer.print(Long.toString(uriUniqFilter.count()));
    writer.print("\n");
    writer.print("               pending:     ");
    writer.print(Long.toString(uriUniqFilter.pending()));
    writer.print("\n");
    writer.print("\n All class queues map size: ");
    writer.print(Long.toString(allCount));
    writer.print("\n");
    writer.print("             Active queues: ");
    writer.print(activeCount);
    writer.print("\n");
    writer.print("                    In-process: ");
    writer.print(inProcessCount);
    writer.print("\n");
    writer.print("                         Ready: ");
    writer.print(readyCount);
    writer.print("\n");
    writer.print("                       Snoozed: ");
    writer.print(snoozedCount);
    writer.print("\n");
    writer.print("           Inactive queues: ");
    writer.print(inactiveCount);
    writer.print(" (");
    Map<Integer, Queue<String>> inactives = getInactiveQueuesByPrecedence();
    boolean betwixt = false;
    for (Integer k : inactives.keySet()) {
        if (betwixt) {
            writer.print("; ");
        }
        writer.print("p");
        writer.print(k);
        writer.print(": ");
        writer.print(inactives.get(k).size());
        betwixt = true;
    }
    writer.print(")\n");
    writer.print("            Retired queues: ");
    writer.print(retiredCount);
    writer.print("\n");
    writer.print("          Exhausted queues: ");
    writer.print(exhaustedCount);
    writer.print("\n");

    State last = lastReachedState;
    writer.print("\n             Last state: " + last);

    writer.print("\n -----===== MANAGER THREAD =====-----\n");
    ToeThread.reportThread(managerThread, writer);

    writer.print("\n -----===== " + largestQueues.size() + " LONGEST QUEUES =====-----\n");
    appendQueueReports(writer, "LONGEST", largestQueues.getEntriesDescending().iterator(), largestQueues.size(),
            largestQueues.size());

    writer.print("\n -----===== IN-PROCESS QUEUES =====-----\n");
    Collection<WorkQueue> inProcess = inProcessQueues;
    ArrayList<WorkQueue> copy = extractSome(inProcess, maxQueuesPerReportCategory);
    appendQueueReports(writer, "IN-PROCESS", copy.iterator(), copy.size(), maxQueuesPerReportCategory);

    writer.print("\n -----===== READY QUEUES =====-----\n");
    appendQueueReports(writer, "READY", this.readyClassQueues.iterator(), this.readyClassQueues.size(),
            maxQueuesPerReportCategory);

    writer.print("\n -----===== SNOOZED QUEUES =====-----\n");
    Object[] objs = snoozedClassQueues.toArray();
    DelayedWorkQueue[] qs = Arrays.copyOf(objs, objs.length, DelayedWorkQueue[].class);
    Arrays.sort(qs);
    appendQueueReports(writer, "SNOOZED", new ObjectArrayIterator(qs), getSnoozedCount(),
            maxQueuesPerReportCategory);

    writer.print("\n -----===== INACTIVE QUEUES =====-----\n");
    SortedMap<Integer, Queue<String>> sortedInactives = getInactiveQueuesByPrecedence();
    for (Integer prec : sortedInactives.keySet()) {
        Queue<String> inactiveQueues = sortedInactives.get(prec);
        appendQueueReports(writer, "INACTIVE-p" + prec, inactiveQueues.iterator(), inactiveQueues.size(),
                maxQueuesPerReportCategory);
    }

    writer.print("\n -----===== RETIRED QUEUES =====-----\n");
    appendQueueReports(writer, "RETIRED", getRetiredQueues().iterator(), getRetiredQueues().size(),
            maxQueuesPerReportCategory);

    writer.flush();
}