Example usage for java.util LinkedHashSet add

List of usage examples for java.util LinkedHashSet add

Introduction

In this page you can find the example usage for java.util LinkedHashSet add.

Prototype

boolean add(E e);

Source Link

Document

Adds the specified element to this set if it is not already present (optional operation).

Usage

From source file:org.apache.lens.driver.jdbc.ColumnarSQLRewriter.java

/**
 *  Get all columns used for dimmension tables
 * @param node//  ww w .  j  av a  2  s  .c  o  m
 */
public void getAllDimColumns(ASTNode node) {

    if (node == null) {
        log.debug("Input AST is null ");
        return;
    }
    // Assuming column is specified with table.column format
    if (node.getToken().getType() == HiveParser.DOT) {
        String table = HQLParser.findNodeByPath(node, TOK_TABLE_OR_COL, Identifier).toString();
        String column = node.getChild(1).toString();

        Iterator iterator = tableToAliasMap.keySet().iterator();
        while (iterator.hasNext()) {
            String tab = (String) iterator.next();
            String alias = tableToAliasMap.get(tab);

            if ((table.equals(tab) || table.equals(alias)) && column != null) {
                LinkedHashSet<String> cols;
                if (!tableToAccessedColMap.containsKey(tab)) {
                    cols = new LinkedHashSet<String>();
                    cols.add(column);
                    tableToAccessedColMap.put(tab, cols);
                } else {
                    cols = tableToAccessedColMap.get(tab);
                    if (!cols.contains(column)) {
                        cols.add(column);
                    }
                }
            }
        }
    }
    for (int i = 0; i < node.getChildCount(); i++) {
        ASTNode child = (ASTNode) node.getChild(i);
        getAllDimColumns(child);
    }
}

From source file:org.alfresco.repo.workflow.WorkflowReportServiceImpl.java

/**
 *
 * {@inheritDoc}/*from   w w  w . j  av a2  s. c om*/
 */
@Override
public Map<QName, Serializable> prepareTaskProperties(WorkflowTask task,
        Map<QName, Serializable> propertiesCurrent) {
    Map<QName, Serializable> taskProperties = new HashMap<QName, Serializable>();
    Set<Entry<QName, Serializable>> entrySet = propertiesCurrent.entrySet();

    if (DEBUG_ENABLED) {
        LOGGER.debug("Metadata of '" + task.getId() + "' is :" + propertiesCurrent);
    }
    if (propertiesCurrent.get(WorkflowModel.ASSOC_POOLED_ACTORS) instanceof Collection) {
        Collection<?> pooledActors = (Collection<?>) propertiesCurrent
                .remove(WorkflowModel.ASSOC_POOLED_ACTORS);
        // process if actually there is data
        if (pooledActors.size() > 0) {
            LinkedHashSet<String> pooledUsers = new LinkedHashSet<String>();
            LinkedHashSet<String> pooledGroups = new LinkedHashSet<String>();
            for (Object object : pooledActors) {
                String actorId = getActor(object).getSecond();
                AuthorityType authorityType = AuthorityType.getAuthorityType(actorId);
                if (authorityType == AuthorityType.GROUP) {
                    pooledGroups.add(actorId);
                } else if (authorityType == AuthorityType.USER) {
                    pooledUsers.add(actorId);
                }
            }
            taskProperties.put(WorkflowReportConstants.PROP_POOLED_GROUPS, pooledGroups);
            taskProperties.put(WorkflowReportConstants.PROP_POOLED_ACTORS, pooledUsers);
        }
    }
    if (propertiesCurrent.get(WorkflowModel.ASSOC_ASSIGNEES) instanceof Collection) {
        Collection<?> pooledActors = (Collection<?>) propertiesCurrent.get(WorkflowModel.ASSOC_ASSIGNEES);

        taskProperties.put(WorkflowModel.ASSOC_ASSIGNEES, (Serializable) pooledActors);
    }
    if (propertiesCurrent.get(WorkflowModel.ASSOC_GROUP_ASSIGNEE) != null) {
        String pooledGroup = propertiesCurrent.get(WorkflowModel.ASSOC_GROUP_ASSIGNEE).toString();
        String actorId = getActor(pooledGroup).getSecond();
        taskProperties.put(WorkflowModel.ASSOC_GROUP_ASSIGNEE, actorId);
    }
    for (Entry<QName, Serializable> entry : entrySet) {
        QName qName = entry.getKey();
        if (SKIPPED_DATA.contains(qName)) {
            continue;
        }
        if (StringUtils.isEmpty(qName.getNamespaceURI())) {
            continue;
        }
        if (UPDATED_DATA.containsKey(qName)) {
            QName qNameToUpdate = UPDATED_DATA.get(qName);
            if (qNameToUpdate != null) {
                taskProperties.put(qNameToUpdate, entry.getValue());
            } // else skip
        } else {
            taskProperties.put(qName, entry.getValue());
        }
    }
    taskProperties.put(ContentModel.PROP_NAME, task.getId());
    taskProperties.put(ContentModel.PROP_TITLE, task.getName());

    Serializable taskId = taskProperties.remove(WorkflowModel.PROP_TASK_ID);
    if (taskId != null) {
        taskId = taskId.toString().replaceAll("\\D?", "");
    }
    taskProperties.put(WorkflowModel.PROP_TASK_ID, taskId);
    if (!taskProperties.containsKey(WorkflowModel.PROP_OUTCOME)) {
        taskProperties.put(WorkflowModel.PROP_OUTCOME, "Not Yet Started");
    }
    taskProperties.put(CMFModel.PROP_WF_TASK_STATE, task.getState().toString());
    QName packageId = UPDATED_DATA.get(WorkflowModel.TYPE_PACKAGE);
    Object nodeRef = taskProperties.get(packageId);
    if (nodeRef != null && !nodeRef.toString().trim().isEmpty()) {
        NodeRef nodeRefCreated = new NodeRef(nodeRef.toString());
        if (nodeService.exists(nodeRefCreated)) {
            Pair<String, String> workflowItems = getWorkflowItems(nodeRefCreated, CMFModel.PROP_TYPE);
            taskProperties.put(CMFModel.PROP_WF_CONTEXT_TYPE, workflowItems.getFirst());
            taskProperties.put(CMFModel.PROP_WF_CONTEXT_ID, workflowItems.getSecond());
        }
    }

    WorkflowPath workflowPath = task.getPath();
    if (workflowPath == null) {
        // skip wf data
        return taskProperties;
    }

    WorkflowInstance instance = workflowPath.getInstance();
    taskProperties.put(WorkflowModel.PROP_WORKFLOW_DEFINITION_ID,
            workflowPath.getInstance().getDefinition().getId());

    taskProperties.put(WorkflowModel.PROP_WORKFLOW_DEFINITION_NAME, instance.getDefinition().getName());
    taskProperties.put(WorkflowModel.PROP_WORKFLOW_INSTANCE_ID, instance.getId());

    String definitionId = task.getDefinition().getNode().getName();
    if (definitionId != null) {
        taskProperties.put(CMFModel.PROP_TYPE, definitionId);
    }
    // fix priority if not set
    if (!propertiesCurrent.containsKey(WorkflowModel.PROP_PRIORITY)) {
        taskProperties.put(WorkflowModel.PROP_PRIORITY, workflowPath.getInstance().getPriority());
    } else {
        taskProperties.put(WorkflowModel.PROP_PRIORITY, propertiesCurrent.get(WorkflowModel.PROP_PRIORITY));
    }
    return taskProperties;
}

From source file:com.tct.email.mail.store.ImapFolder.java

public void fetchInternal(Message[] messages, FetchProfile fp, MessageRetrievalListener listener)
        throws MessagingException {
    if (messages.length == 0) {
        return;/*from  www  .  j  ava2  s .  c  o m*/
    }
    //[FEATURE]-Add-BEGIN by TSCD.Chao Zhang,04/14/2014,FR 631895(porting from FR 472914)
    int limitedSize = messages[0].getDownloadOptions();
    //[FEATURE]-Add-END by TSCD.Chao Zhang
    //[FEATURE]-Add-BEGIN by TSCD.chao zhang,04/25/2014,FR 631895(porting from  FR487417)
    if (downloadRemainFlag) {
        limitedSize = Utility.ENTIRE_MAIL;
    }
    //[FEATURE]-Add-END by TSCD.Chao Zhang
    checkOpen();
    HashMap<String, Message> messageMap = new HashMap<String, Message>();
    for (Message m : messages) {
        messageMap.put(m.getUid(), m);
    }

    /*
     * Figure out what command we are going to run:
     * FLAGS     - UID FETCH (FLAGS)
     * ENVELOPE  - UID FETCH (INTERNALDATE UID RFC822.SIZE FLAGS BODY.PEEK[
     *                            HEADER.FIELDS (date subject from content-type to cc)])
     * STRUCTURE - UID FETCH (BODYSTRUCTURE)
     * BODY_SANE - UID FETCH (BODY.PEEK[]<0.N>) where N = max bytes returned
     * BODY      - UID FETCH (BODY.PEEK[])
     * Part      - UID FETCH (BODY.PEEK[ID]) where ID = mime part ID
     */

    final LinkedHashSet<String> fetchFields = new LinkedHashSet<String>();

    fetchFields.add(ImapConstants.UID);
    if (fp.contains(FetchProfile.Item.FLAGS)) {
        fetchFields.add(ImapConstants.FLAGS);
    }
    if (fp.contains(FetchProfile.Item.ENVELOPE)) {
        fetchFields.add(ImapConstants.INTERNALDATE);
        fetchFields.add(ImapConstants.RFC822_SIZE);
        fetchFields.add(ImapConstants.FETCH_FIELD_HEADERS);
    }
    if (fp.contains(FetchProfile.Item.STRUCTURE)) {
        fetchFields.add(ImapConstants.BODYSTRUCTURE);
    }

    if (fp.contains(FetchProfile.Item.BODY_SANE)) {
        fetchFields.add(ImapConstants.FETCH_FIELD_BODY_PEEK_SANE);
    }
    if (fp.contains(FetchProfile.Item.BODY)) {
        fetchFields.add(ImapConstants.FETCH_FIELD_BODY_PEEK);
    }

    // TODO Why are we only fetching the first part given?
    final Part fetchPart = fp.getFirstPart();
    if (fetchPart != null) {
        final String[] partIds = fetchPart.getHeader(MimeHeader.HEADER_ANDROID_ATTACHMENT_STORE_DATA);
        // TODO Why can a single part have more than one Id? And why should we only fetch
        // the first id if there are more than one?
        if (partIds != null) {
            //[FEATURE]-Add-BEGIN by TSCD.Chao Zhang,04/14/2014,FR 631895(porting from FR 472914)
            String fetchFieldCommand = ImapConstants.FETCH_FIELD_BODY_PEEK_BARE + "[" + partIds[0] + "]";
            if (limitedSize != Utility.ENTIRE_MAIL
                    && fetchPart.getMimeType().contains(ImapConstants.TEXT.toLowerCase())) {
                fetchFieldCommand = fetchFieldCommand + "<0." + limitedSize + ">";
            }
            fetchFields.add(fetchFieldCommand);
            //[FEATURE]-Add-END by TSCD.Chao Zhang
        }
    }

    try {
        mConnection.sendCommand(String.format(Locale.US, ImapConstants.UID_FETCH + " %s (%s)",
                ImapStore.joinMessageUids(messages),
                Utility.combine(fetchFields.toArray(new String[fetchFields.size()]), ' ')), false);
        ImapResponse response;
        do {
            response = null;
            try {
                // TS: Gantao 2015-12-07 EMAIL BUGFIX_1020377 MOD_S
                //set ui callback when network downloading, update progress bar when fetching
                // attachment from server.
                //response = mConnection.readResponse();
                response = mConnection.readResponse(listener);
                // TS: Gantao 2015-12-07 EMAIL BUGFIX_1020377 MOD_S

                if (!response.isDataResponse(1, ImapConstants.FETCH)) {
                    continue; // Ignore
                }
                final ImapList fetchList = response.getListOrEmpty(2);
                final String uid = fetchList.getKeyedStringOrEmpty(ImapConstants.UID).getString();
                if (TextUtils.isEmpty(uid))
                    continue;

                ImapMessage message = (ImapMessage) messageMap.get(uid);
                if (message == null)
                    continue;

                if (fp.contains(FetchProfile.Item.FLAGS)) {
                    final ImapList flags = fetchList.getKeyedListOrEmpty(ImapConstants.FLAGS);
                    for (int i = 0, count = flags.size(); i < count; i++) {
                        final ImapString flag = flags.getStringOrEmpty(i);
                        if (flag.is(ImapConstants.FLAG_DELETED)) {
                            message.setFlagInternal(Flag.DELETED, true);
                        } else if (flag.is(ImapConstants.FLAG_ANSWERED)) {
                            message.setFlagInternal(Flag.ANSWERED, true);
                        } else if (flag.is(ImapConstants.FLAG_SEEN)) {
                            message.setFlagInternal(Flag.SEEN, true);
                        } else if (flag.is(ImapConstants.FLAG_FLAGGED)) {
                            message.setFlagInternal(Flag.FLAGGED, true);
                        }
                    }
                }
                if (fp.contains(FetchProfile.Item.ENVELOPE)) {
                    final Date internalDate = fetchList.getKeyedStringOrEmpty(ImapConstants.INTERNALDATE)
                            .getDateOrNull();
                    final int size = fetchList.getKeyedStringOrEmpty(ImapConstants.RFC822_SIZE)
                            .getNumberOrZero();
                    final String header = fetchList
                            .getKeyedStringOrEmpty(ImapConstants.BODY_BRACKET_HEADER, true).getString();

                    message.setInternalDate(internalDate);
                    message.setSize(size);
                    message.parse(Utility.streamFromAsciiString(header));
                }
                if (fp.contains(FetchProfile.Item.STRUCTURE)) {
                    ImapList bs = fetchList.getKeyedListOrEmpty(ImapConstants.BODYSTRUCTURE);
                    if (!bs.isEmpty()) {
                        try {
                            parseBodyStructure(bs, message, ImapConstants.TEXT);
                        } catch (MessagingException e) {
                            if (Logging.LOGD) {
                                LogUtils.v(Logging.LOG_TAG, e, "Error handling message");
                            }
                            message.setBody(null);
                        }
                    }
                }
                if (fp.contains(FetchProfile.Item.BODY) || fp.contains(FetchProfile.Item.BODY_SANE)) {
                    // Body is keyed by "BODY[]...".
                    // Previously used "BODY[..." but this can be confused with "BODY[HEADER..."
                    // TODO Should we accept "RFC822" as well??
                    ImapString body = fetchList.getKeyedStringOrEmpty("BODY[]", true);
                    InputStream bodyStream = body.getAsStream();
                    message.parse(bodyStream);
                }
                if (fetchPart != null) {
                    InputStream bodyStream = fetchList.getKeyedStringOrEmpty("BODY[", true).getAsStream();
                    String encodings[] = fetchPart.getHeader(MimeHeader.HEADER_CONTENT_TRANSFER_ENCODING);

                    String contentTransferEncoding = null;
                    if (encodings != null && encodings.length > 0) {
                        contentTransferEncoding = encodings[0];
                    } else {
                        // According to http://tools.ietf.org/html/rfc2045#section-6.1
                        // "7bit" is the default.
                        contentTransferEncoding = "7bit";
                    }

                    try {
                        // TODO Don't create 2 temp files.
                        // decodeBody creates BinaryTempFileBody, but we could avoid this
                        // if we implement ImapStringBody.
                        // (We'll need to share a temp file.  Protect it with a ref-count.)
                        fetchPart.setBody(
                                decodeBody(bodyStream, contentTransferEncoding, fetchPart.getSize(), listener));
                    } catch (Exception e) {
                        // TODO: Figure out what kinds of exceptions might actually be thrown
                        // from here. This blanket catch-all is because we're not sure what to
                        // do if we don't have a contentTransferEncoding, and we don't have
                        // time to figure out what exceptions might be thrown.
                        LogUtils.e(Logging.LOG_TAG, "Error fetching body %s", e);
                    }
                }

                if (listener != null) {
                    listener.messageRetrieved(message);
                }
            } finally {
                destroyResponses();
            }
        } while (!response.isTagged());
    } catch (IOException ioe) {
        throw ioExceptionHandler(mConnection, ioe);
    }
}

From source file:ch.unibas.fittingwizard.presentation.fitting.FittingParameterPage.java

private File getInitalCharges(MoleculeQueryService queryService) {
    LinkedHashSet<ChargeValue> userCharges = new LinkedHashSet<>();
    LinkedHashSet<AtomTypeId> atomTypesRequiringUserInput = new LinkedHashSet<>();

    List<Molecule> moleculesWithMissingUserCharges = queryService.findMoleculesWithMissingUserCharges();
    atomTypesRequiringUserInput.addAll(getAllAtomTypeIds(moleculesWithMissingUserCharges));

    boolean multipleMoleculesDefined = queryService.getNumberOfMolecules() > 1;
    if (multipleMoleculesDefined) {
        List<AtomTypeId> duplicates = queryService.findUnequalAndDuplicateAtomTypes();
        atomTypesRequiringUserInput.addAll(duplicates);
    }//from   w w  w. j ava 2  s  . c om

    if (atomTypesRequiringUserInput.size() > 0) {
        LinkedHashSet<ChargeValue> editedValues = editAtomTypeChargesDialog
                .editAtomTypes(atomTypesRequiringUserInput);
        if (editedValues == null) {
            // TODO ... no nested return
            return null;
        }
        userCharges.addAll(editedValues);
    }

    // fill up with all other values in order to generate a correct charges file.
    // due to the set, the already edited values will not be replaced.
    LinkedHashSet<ChargeValue> allCharges = queryService.getUserChargesFromMoleculesWithCharges();
    for (ChargeValue charge : allCharges) {
        if (!userCharges.contains(charge)) {
            userCharges.add(charge);
        }
    }

    File initalChargesFile = generateInitialChargesFileFromUserCharges(userCharges);
    return initalChargesFile;
}

From source file:org.codehaus.mojo.jsimport.AbstractImportMojo.java

/**
 * Build up the dependency graph and global symbol table by parsing the project's dependencies.
 * /*  ww w. j  a  v  a 2 s .  com*/
 * @param scope compile or test.
 * @param fileDependencyGraphModificationTime the time that the dependency graph was updated. Used for file time
 *            comparisons to check the age of them.
 * @param processedFiles an insert-ordered set of files that have been processed.
 * @param targetFolder Where the target files live.
 * @param workFolder Where we can create some long lived information that may be useful to subsequent builds.
 * @param compileWorkFolder Ditto but in the case of testing it points to where the compile working folder is.
 * @return true if the dependency graph has been updated.
 * @throws MojoExecutionException if something bad happens.
 */
private boolean buildDependencyGraphForDependencies(Scope scope, long fileDependencyGraphModificationTime,
        LinkedHashSet<File> processedFiles, File targetFolder, File workFolder, File compileWorkFolder)
        throws MojoExecutionException {
    File targetJsFolder = new File(targetFolder, "js");

    boolean fileDependencyGraphUpdated = false;

    // Determine how we need to filter things both for direct filtering and transitive filtering.

    String scopeStr = (scope == Scope.COMPILE ? Artifact.SCOPE_COMPILE : Artifact.SCOPE_TEST);

    AndArtifactFilter jsArtifactFilter = new AndArtifactFilter();
    jsArtifactFilter.add(new ScopeArtifactFilter(scopeStr));
    jsArtifactFilter.add(new TypeArtifactFilter("js"));

    AndArtifactFilter wwwZipArtifactFilter = new AndArtifactFilter();
    wwwZipArtifactFilter.add(new ScopeArtifactFilter(scopeStr));
    wwwZipArtifactFilter.add(new TypeArtifactFilter("zip"));
    wwwZipArtifactFilter.add(new ArtifactFilter() {
        public boolean include(Artifact artifact) {
            return artifact.hasClassifier() && artifact.getClassifier().equals("www");
        }
    });

    // Determine the artifacts to resolve and associate their transitive dependencies.

    Map<Artifact, LinkedHashSet<Artifact>> directArtifactWithTransitives = new HashMap<Artifact, LinkedHashSet<Artifact>>(
            dependencies.size());

    Set<Artifact> directArtifacts = new HashSet<Artifact>(dependencies.size());
    LinkedHashSet<Artifact> transitiveArtifacts = new LinkedHashSet<Artifact>();

    for (Dependency dependency : dependencies) {
        // Process imports and symbols of this dependencies' transitives
        // first.
        Artifact directArtifact = artifactFactory.createDependencyArtifact(dependency.getGroupId(),
                dependency.getArtifactId(), VersionRange.createFromVersion(dependency.getVersion()),
                dependency.getType(), dependency.getClassifier(), dependency.getScope());

        if (!jsArtifactFilter.include(directArtifact) && !wwwZipArtifactFilter.include(directArtifact)) {
            continue;
        }

        Set<Artifact> artifactsToResolve = new HashSet<Artifact>(1);
        artifactsToResolve.add(directArtifact);

        ArtifactResolutionResult result;
        try {
            result = resolver.resolveTransitively(artifactsToResolve, project.getArtifact(), remoteRepositories,
                    localRepository, artifactMetadataSource);
        } catch (ArtifactResolutionException e) {
            throw new MojoExecutionException("Problem resolving dependencies", e);
        } catch (ArtifactNotFoundException e) {
            throw new MojoExecutionException("Problem resolving dependencies", e);
        }

        // Associate the transitive dependencies with the direct dependency and aggregate all transitives for
        // collection later.

        LinkedHashSet<Artifact> directTransitiveArtifacts = new LinkedHashSet<Artifact>(
                result.getArtifacts().size());
        for (Object o : result.getArtifacts()) {
            Artifact resolvedArtifact = (Artifact) o;
            if (jsArtifactFilter.include(resolvedArtifact) && //
                    !resolvedArtifact.equals(directArtifact)) {
                directTransitiveArtifacts.add(resolvedArtifact);
            }
        }

        directArtifacts.add(directArtifact);
        transitiveArtifacts.addAll(directTransitiveArtifacts);
        directArtifactWithTransitives.put(directArtifact, directTransitiveArtifacts);
    }

    // Resolve the best versions of the transitives to use by asking Maven to collect them.

    Set<Artifact> collectedArtifacts = new HashSet<Artifact>(
            directArtifacts.size() + transitiveArtifacts.size());
    Map<ArtifactId, Artifact> indexedCollectedDependencies = new HashMap<ArtifactId, Artifact>(
            collectedArtifacts.size());
    try {
        // Note that we must pass an insert-order set into the collector. The collector appears to assume that order
        // is significant, even though it is undocumented.
        LinkedHashSet<Artifact> collectableArtifacts = new LinkedHashSet<Artifact>(directArtifacts);
        collectableArtifacts.addAll(transitiveArtifacts);

        ArtifactResolutionResult resolutionResult = artifactCollector.collect(collectableArtifacts,
                project.getArtifact(), localRepository, remoteRepositories, artifactMetadataSource, null, //
                Collections.EMPTY_LIST);
        for (Object o : resolutionResult.getArtifacts()) {
            Artifact collectedArtifact = (Artifact) o;
            collectedArtifacts.add(collectedArtifact);

            // Build up an index of of collected transitive dependencies so that we can we refer back to them as we
            // process the direct dependencies.
            ArtifactId collectedArtifactId = new ArtifactId(collectedArtifact.getGroupId(),
                    collectedArtifact.getArtifactId());
            indexedCollectedDependencies.put(collectedArtifactId, collectedArtifact);
        }

        if (getLog().isDebugEnabled()) {
            getLog().debug("Dependencies collected: " + collectedArtifacts.toString());
        }
    } catch (ArtifactResolutionException e) {
        throw new MojoExecutionException("Cannot collect dependencies", e);
    }

    // Now go through direct artifacts and process their transitives.

    LocalRepositoryCollector localRepositoryCollector = new LocalRepositoryCollector(project, localRepository,
            new File[] {});

    for (Entry<Artifact, LinkedHashSet<Artifact>> entry : directArtifactWithTransitives.entrySet()) {
        Artifact directArtifact = entry.getKey();
        LinkedHashSet<Artifact> directArtifactTransitives = entry.getValue();

        LinkedHashSet<String> transitivesAsImports = new LinkedHashSet<String>(
                directArtifactTransitives.size());

        for (Object o : directArtifactTransitives) {
            Artifact directTransitiveArtifact = (Artifact) o;

            // Get the transitive artifact that Maven decided was the best to use.

            ArtifactId directTransitiveArtifactId = new ArtifactId(directTransitiveArtifact.getGroupId(),
                    directTransitiveArtifact.getArtifactId());
            Artifact transitiveArtifact = indexedCollectedDependencies.get(directTransitiveArtifactId);

            List<File> transitiveArtifactFiles = getArtifactFiles(transitiveArtifact, targetFolder, workFolder,
                    compileWorkFolder, localRepositoryCollector);

            // Only process this dependency if we've not done so
            // already.
            for (File transitiveArtifactFile : transitiveArtifactFiles) {
                if (!processedFiles.contains(transitiveArtifactFile)) {
                    String localRepository = localRepositoryCollector
                            .findLocalRepository(transitiveArtifactFile.getAbsolutePath());
                    if (localRepository != null) {
                        if (processFileForImportsAndSymbols(new File(localRepository), targetJsFolder,
                                transitiveArtifactFile, fileDependencyGraphModificationTime,
                                directArtifactTransitives)) {

                            processedFiles.add(transitiveArtifactFile);

                            fileDependencyGraphUpdated = true;
                        }
                    } else {
                        throw new MojoExecutionException(
                                "Problem determining local repository for transitive file: "
                                        + transitiveArtifactFile);
                    }
                }

                // Add transitives to the artifacts set of dependencies -
                // as if they were @import statements themselves.
                transitivesAsImports.add(transitiveArtifactFile.getPath());
            }
        }

        // Now deal with the pom specified dependency.
        List<File> artifactFiles = getArtifactFiles(directArtifact, targetFolder, workFolder, compileWorkFolder,
                localRepositoryCollector);
        for (File artifactFile : artifactFiles) {
            String artifactPath = artifactFile.getAbsolutePath();

            // Process imports and symbols of this dependency if we've not
            // already done so.
            if (!processedFiles.contains(artifactFile)) {
                String localRepository = localRepositoryCollector
                        .findLocalRepository(artifactFile.getAbsolutePath());
                if (localRepository != null) {
                    if (processFileForImportsAndSymbols(new File(localRepository), targetJsFolder, artifactFile,
                            fileDependencyGraphModificationTime, null)) {
                        processedFiles.add(artifactFile);

                        fileDependencyGraphUpdated = true;
                    }
                } else {
                    throw new MojoExecutionException(
                            "Problem determining local repository for file: " + artifactFile);
                }
            }

            // Add in our transitives to the dependency graph if they're not
            // already there.
            LinkedHashSet<String> existingImports = fileDependencies.get(artifactPath);
            if (existingImports.addAll(transitivesAsImports)) {
                if (getLog().isDebugEnabled()) {
                    getLog().debug("Using transitives as import: " + transitivesAsImports + " for file: "
                            + artifactPath);
                }
                fileDependencyGraphUpdated = true;
            }
        }

    }

    return fileDependencyGraphUpdated;
}

From source file:pt.lsts.neptus.util.logdownload.LogsDownloaderWorkerActions.java

@SuppressWarnings("serial")
private AbstractAction createDeleteSelectedLogFilesAction() {
    return new AbstractAction() {
        @Override/*  www.  j  a  v a2s  .  c  o  m*/
        public void actionPerformed(ActionEvent e) {
            if (!gui.validateAndSetUI()) {
                gui.popupErrorConfigurationDialog();
                return;
            }
            AsyncTask task = new AsyncTask() {
                @Override
                public Object run() throws Exception {
                    gui.deleteSelectedLogFilesButton.setEnabled(false);

                    Object[] objArray = gui.logFilesList.getSelectedValues();
                    if (objArray.length == 0)
                        return null;

                    JOptionPane jop = new JOptionPane(
                            I18n.text("Are you sure you want to delete selected log files from remote system?"),
                            JOptionPane.QUESTION_MESSAGE, JOptionPane.YES_NO_OPTION);
                    JDialog dialog = jop.createDialog(gui.frameCompHolder,
                            I18n.text("Remote Delete Confirmation"));
                    dialog.setModalityType(ModalityType.DOCUMENT_MODAL);
                    dialog.setVisible(true);
                    Object userChoice = jop.getValue();
                    try {
                        if (((Integer) userChoice) != JOptionPane.YES_OPTION) {
                            return null;
                        }
                    } catch (Exception e2) {
                        NeptusLog.pub().error(e2.getMessage());
                        return null;
                    }
                    gui.deleteSelectedLogFoldersButton.setEnabled(true);

                    LinkedHashSet<LogFileInfo> logFiles = new LinkedHashSet<LogFileInfo>();
                    for (Object comp : objArray) {
                        if (resetting)
                            break;

                        try {
                            LogFileInfo lfx = (LogFileInfo) comp;
                            if (worker.deleteLogFileFromServer(lfx))
                                logFiles.add(lfx);
                        } catch (Exception e) {
                            NeptusLog.pub().debug(e.getMessage());
                        }
                    }
                    if (!resetting) {
                        LogsDownloaderWorkerGUIUtil.updateLogFilesStateDeleted(logFiles,
                                gui.downloadWorkersHolder, worker.getDirBaseToStoreFiles(),
                                worker.getLogLabel());

                        worker.updateFilesListGUIForFolderSelected();
                    }
                    return true;
                }

                @Override
                public void finish() {
                    gui.deleteSelectedLogFilesButton.setEnabled(true);
                    gui.logFilesList.revalidate();
                    gui.logFilesList.repaint();
                    gui.logFilesList.setEnabled(true);
                    gui.logFolderList.revalidate();
                    gui.logFolderList.repaint();
                    gui.logFolderList.setEnabled(true);
                    try {
                        this.getResultOrThrow();
                    } catch (Exception e) {
                        e.printStackTrace();
                    }
                }
            };

            AsyncWorker.getWorkerThread().postTask(task);
        }
    };
}

From source file:org.alfresco.bm.dataload.RMBaseEventProcessor.java

/**
 * Obtains all record folders underneath specified parent if the parent is a category or the parent itself if it is a record folder
 *
 * @param parentFolder - the parent folder to retrieve children folders from
 * @return all record folders underneath specified parent if the parent is a category or the parent itself if it is a record folder
 *//*from  ww w  .  ja  v  a 2 s.  c o  m*/
protected Set<FolderData> getRecordFolders(FolderData parentFolder) {
    LinkedHashSet<FolderData> result = new LinkedHashSet<FolderData>();
    String context = parentFolder.getContext();
    if (RECORD_CATEGORY_CONTEXT.equals(context)) {
        List<FolderData> directCategoryChildren = getDirectChildrenByContext(parentFolder,
                RECORD_CATEGORY_CONTEXT);
        if (directCategoryChildren.size() > 0) {
            for (FolderData childFolder : directCategoryChildren) {
                result.addAll(getRecordFolders(childFolder));
            }
        }

        List<FolderData> directRecordFolderChildren = getDirectChildrenByContext(parentFolder,
                RECORD_FOLDER_CONTEXT);
        if (directRecordFolderChildren.size() > 0) {
            result.addAll(directRecordFolderChildren);
        }
    } else if (RECORD_FOLDER_CONTEXT.equals(context)) {
        result.add(parentFolder);
    }
    return result;
}

From source file:org.dllearner.algorithms.probabilistic.structure.unife.leap.AbstractLEAP.java

/**
 *
 * @param finalOntology/*  www  .jav  a2 s  . c  o  m*/
 * @param learnedAxioms
 * @return
 */
protected OWLOntology replaceDummyClass(OWLOntology finalOntology, Set<OWLAxiom> learnedAxioms)
        throws UnsupportedLearnedAxiom {
    logger.debug("Replacing super class \"dummyClass\" with \"classToDescribe\"");
    ClassLearningProblem clp = (ClassLearningProblem) cela.getLearningProblem();
    OWLOntologyManager man = finalOntology.getOWLOntologyManager();
    OWLDataFactory df = man.getOWLDataFactory();
    int numInitialAxioms = finalOntology.getLogicalAxiomCount();
    // remove the learned Axioms
    //man.removeAxiom(finalOntology, learnedAxioms.iterator().next());
    Set<OWLAxiom> learnedAxiomsCopy = new LinkedHashSet<>(learnedAxioms);
    for (OWLAxiom axiom : finalOntology.getLogicalAxioms(Imports.EXCLUDED)) {
        for (OWLAxiom axiomToRemove : learnedAxiomsCopy) {
            // conviene usare una copia di probAddedAxioms 
            //in maniera tale da eliminare gli assiomi gi trovati durante la ricerca e 
            //quindi ridurre il numero di check
            //logger.debug("Learned axiom to remove: " + BundleUtilities.getManchesterSyntaxString(axiomToRemove));
            if (axiomToRemove.equalsIgnoreAnnotations(axiom)) {
                man.removeAxiom(finalOntology, axiom);
                learnedAxiomsCopy.remove(axiomToRemove);
                break;
            }
        }
    }
    int numAxiomsAfterRemove = finalOntology.getLogicalAxiomCount();
    // check if correctly removed
    if (numAxiomsAfterRemove != numInitialAxioms - learnedAxioms.size()) {
        String msg = "Error during the replacement of super class: " + "Axiom remotion was incorrect. "
                + "numAxiomsAfterRemove: " + numAxiomsAfterRemove + " numInitialAxioms: " + numInitialAxioms
                + " numAxioms to remove: " + learnedAxioms.size() + " numAxioms removed: "
                + (numInitialAxioms - numAxiomsAfterRemove);
        logger.error(msg);
        throw new StructureLearningException(msg);
    }
    LinkedHashSet<OWLAxiom> newAxioms = new LinkedHashSet<>();
    for (OWLAxiom axiom : learnedAxioms) {
        OWLAxiom newAxiom;
        if (axiom.isOfType(AxiomType.SUBCLASS_OF)) {
            newAxiom = df.getOWLSubClassOfAxiom(((OWLSubClassOfAxiom) axiom).getSubClass(),
                    clp.getClassToDescribe(), axiom.getAnnotations());
        } else if (axiom.isOfType(AxiomType.EQUIVALENT_CLASSES)) {
            OWLClassExpression clazz = null;
            for (OWLClassExpression c : ((OWLEquivalentClassesAxiom) axiom).getClassExpressions()) {

                if (c.compareTo(getDummyClass()) != 0) {
                    clazz = c;
                    break;
                }
            }
            if (clazz == null) {
                throw new UnsupportedLearnedAxiom("The learned axiom " + axiom + "has a null class");
            }
            newAxiom = df.getOWLEquivalentClassesAxiom(clazz, clp.getClassToDescribe(), axiom.getAnnotations());
        } else {
            throw new UnsupportedLearnedAxiom("The learned axiom " + axiom + "is not supported");
        }
        newAxioms.add(newAxiom);
        logger.info("Learned Axiom: " + newAxiom);
    }
    man.addAxioms(finalOntology, newAxioms);
    // check if correctly added
    if (numInitialAxioms != numAxiomsAfterRemove + learnedAxioms.size()) {
        String msg = "Error during the replacement of super class: " + "Axiom addition was incorrect."
                + " numAxiomsAfterRemove: " + numAxiomsAfterRemove + " numAxioms to add: "
                + learnedAxioms.size() + " numAxioms added: " + (numInitialAxioms - numAxiomsAfterRemove);
        ;
        logger.error(msg);
        throw new StructureLearningException(msg);
    }
    logger.debug("Replaced all the super classes");
    return finalOntology;
}

From source file:org.codehaus.mojo.jsimport.AbstractImportMojo.java

/**
 * Build dependency graph from the source files.
 * //  www.j av a2 s. c  o  m
 * @param fileDependencyGraphModificationTime the time the graph read in was updated. Used for comparing file times.
 * @param sourceJsFolder Where the source JS files live.
 * @param targetFolder Where the target files live.
 * @param processedFiles the files that have been processed as a consequence of this method.
 * @return true if the graph has been updated by this method.
 * @throws MojoExecutionException if something goes wrong.
 */
private boolean buildDependencyGraphForChangedSourceFiles(long fileDependencyGraphModificationTime,
        File sourceJsFolder, File targetFolder, LinkedHashSet<File> processedFiles)
        throws MojoExecutionException {
    File targetJsFolder = new File(targetFolder, "js");

    boolean fileDependencyGraphUpdated = false;

    FileCollector fileCollector = new FileCollector(buildContext, new String[] { "**/*.js" }, new String[] {});
    for (String source : fileCollector.collectPaths(sourceJsFolder, includes, excludes)) {
        File sourceFile = new File(sourceJsFolder, source);
        if (processFileForImportsAndSymbols(sourceJsFolder, targetJsFolder, sourceFile,
                fileDependencyGraphModificationTime, null)) {
            processedFiles.add(sourceFile);

            getLog().info("Processed: " + source);

            fileDependencyGraphUpdated = true;
        }
    }

    return fileDependencyGraphUpdated;
}

From source file:org.apache.tajo.engine.planner.rewrite.ProjectionPushDownRule.java

public LogicalNode visitScan(Context context, LogicalPlan plan, LogicalPlan.QueryBlock block, ScanNode node,
        Stack<LogicalNode> stack) throws PlanningException {

    Context newContext = new Context(context);

    Target[] targets;// w  ww  .  j  a  v a  2  s.c o m
    if (node.hasTargets()) {
        targets = node.getTargets();
    } else {
        targets = PlannerUtil.schemaToTargets(node.getTableSchema());
    }

    LinkedHashSet<Target> projectedTargets = Sets.newLinkedHashSet();
    for (Iterator<Target> it = getFilteredTarget(targets, newContext.requiredSet); it.hasNext();) {
        Target target = it.next();
        newContext.addExpr(target);
    }

    for (Iterator<Target> it = context.targetListMgr.getFilteredTargets(newContext.requiredSet); it
            .hasNext();) {
        Target target = it.next();

        if (LogicalPlanner.checkIfBeEvaluatedAtRelation(block, target.getEvalTree(), node)) {
            projectedTargets.add(target);
            newContext.targetListMgr.markAsEvaluated(target);
        }
    }

    node.setTargets(projectedTargets.toArray(new Target[projectedTargets.size()]));
    LogicalPlanner.verifyProjectedFields(block, node);
    return node;
}