Example usage for java.util SortedSet addAll

List of usage examples for java.util SortedSet addAll

Introduction

In this page you can find the example usage for java.util SortedSet addAll.

Prototype

boolean addAll(Collection<? extends E> c);

Source Link

Document

Adds all of the elements in the specified collection to this set if they're not already present (optional operation).

Usage

From source file:com.github.FraggedNoob.GitLabTransfer.GitlabRelatedData.java

/**
 * Put all the issues into the project. Note, will skip issues with the same
 * IID.//from   w ww.  j a va 2  s .  c o  m
 * 
 * @param assigneeID
 *            - the assignee ID for all issues
 * @return T=successfully set issues, F=API failure during attempt
 */
public boolean putAllIssues(Integer assigneeID) {

    if (!createApi()) {
        return false;
    }

    // Get the current project's issues, in case we have duplicate IIDs
    SortedSet<GitlabIssue> currIssues = new TreeSet<GitlabIssue>(new IssueOrderByIID());
    try {
        List<GitlabIssue> ci = api.getIssues(project);
        currIssues.addAll(ci);
    } catch (IOException e1) {
        System.out.println("Error while getting current project issues.");
        e1.printStackTrace();
        return true;
    }

    // add, or skip, each issue we have
    for (GitlabIssue i : issues) {

        // Note, some issues might not have a milestone:
        GitlabMilestone ims = i.getMilestone();
        Integer mileIID = 0;
        Integer newMileID = 0;
        if (ims != null) {
            mileIID = i.getMilestone().getIid();
            newMileID = newMilestoneIDs.get(mileIID);
        }

        // Check if this issue IID exists in this project
        boolean skipIssue = false;
        int currIssueID = 0;
        for (GitlabIssue ci : currIssues) {
            if (ci.getIid() == i.getIid()) {
                currIssueID = ci.getId();
                skipIssue = true;
                break;
            }
        }

        // skip current issues with matching IIDs
        if (skipIssue) {
            // the issue IID exists
            newIssueIDs.put(i.getIid(), currIssueID); // store the IID vs.
            // new ID mapping
            System.out.printf("Skipping existing project issue: %s (IID=%d, ID=%d), in project %s\n",
                    i.getTitle(), i.getIid(), currIssueID, project.getName());
        } else {
            // the issue IID doesn't exist, so add it
            try {

                GitlabIssue current = api.createIssue(project.getId(), assigneeID, newMileID,
                        createIssueLabelString(i), i.getDescription(), i.getTitle());
                System.out.printf("Put project issue: %s (IID=%d), into project %s\n", i.getTitle(), i.getIid(),
                        project.getName());
                newIssueIDs.put(i.getIid(), current.getId()); // store the
                // IID vs.
                // new ID
                // mapping

                // edit the issue and set open/closed
                GitlabIssue.Action act = GitlabIssue.Action.LEAVE;
                if (i.getState().contains("lose")) { // closed?
                    act = GitlabIssue.Action.CLOSE;
                }

                api.editIssue(project.getId(), current.getId(), assigneeID, newMileID,
                        createIssueLabelString(i), i.getDescription(), i.getTitle(), act);
            } catch (IOException e) {
                System.out.printf("Error while putting project issue: %s (IID=%d), into project %s\n",
                        i.getTitle(), i.getIid(), project.getName());
                e.printStackTrace();
                return false;
            }
        }
    }

    System.out.println("Final Issue IID-to-newID mapping:");
    for (Integer IID : newIssueIDs.keySet()) {
        System.out.printf("IID: %d\t = ID: %d \n", IID, newIssueIDs.get(IID));
    }

    return true;
}

From source file:org.apache.atlas.hive.hook.HiveHook.java

private void handleExternalTables(final HiveMetaStoreBridge dgiBridge, final HiveEventContext event,
        final LinkedHashMap<Type, Referenceable> tables) throws HiveException, MalformedURLException {
    List<Referenceable> entities = new ArrayList<>();
    final WriteEntity hiveEntity = (WriteEntity) getEntityByType(event.getOutputs(), Type.TABLE);

    Table hiveTable = hiveEntity == null ? null : hiveEntity.getTable();

    //Refresh to get the correct location
    if (hiveTable != null) {
        hiveTable = dgiBridge.hiveClient.getTable(hiveTable.getDbName(), hiveTable.getTableName());
    }//from   www  .  j  a  v a 2  s. c  o m

    if (hiveTable != null && TableType.EXTERNAL_TABLE.equals(hiveTable.getTableType())) {
        LOG.info("Registering external table process {} ", event.getQueryStr());
        final String location = lower(hiveTable.getDataLocation().toString());
        final ReadEntity dfsEntity = new ReadEntity();
        dfsEntity.setTyp(Type.DFS_DIR);
        dfsEntity.setD(new Path(location));

        SortedMap<ReadEntity, Referenceable> hiveInputsMap = new TreeMap<ReadEntity, Referenceable>(
                entityComparator) {
            {
                put(dfsEntity, dgiBridge.fillHDFSDataSet(location));
            }
        };

        SortedMap<WriteEntity, Referenceable> hiveOutputsMap = new TreeMap<WriteEntity, Referenceable>(
                entityComparator) {
            {
                put(hiveEntity, tables.get(Type.TABLE));
            }
        };

        SortedSet<ReadEntity> sortedIps = new TreeSet<>(entityComparator);
        sortedIps.addAll(hiveInputsMap.keySet());
        SortedSet<WriteEntity> sortedOps = new TreeSet<>(entityComparator);
        sortedOps.addAll(hiveOutputsMap.keySet());

        Referenceable processReferenceable = getProcessReferenceable(dgiBridge, event, sortedIps, sortedOps,
                hiveInputsMap, hiveOutputsMap);

        entities.addAll(tables.values());
        entities.add(processReferenceable);
        event.addMessage(new HookNotification.EntityUpdateRequest(event.getUser(), entities));
    }
}

From source file:org.codehaus.mojo.license.DefaultThirdPartyTool.java

/**
 * {@inheritDoc}//w  ww.  jav a 2 s.co  m
 */
@Override
public void mergeLicenses(LicenseMap licenseMap, String... licenses) {
    if (licenses.length == 0) {
        return;
    }

    String mainLicense = licenses[0].trim();
    SortedSet<MavenProject> mainSet = licenseMap.get(mainLicense);
    if (mainSet == null) {
        getLogger().debug("No license [" + mainLicense + "] found, will create it.");
        mainSet = new TreeSet<MavenProject>(projectComparator);
        licenseMap.put(mainLicense, mainSet);
    }
    int size = licenses.length;
    for (int i = 1; i < size; i++) {
        String license = licenses[i].trim();
        SortedSet<MavenProject> set = licenseMap.get(license);
        if (set == null) {
            getLogger().debug("No license [" + license + "] found, skip this merge.");
            continue;
        }
        getLogger().debug("Merge license [" + license + "] (" + set.size() + " depedencies).");
        mainSet.addAll(set);
        set.clear();
        licenseMap.remove(license);
    }
}

From source file:de.uni_potsdam.hpi.asg.logictool.mapping.SequenceBasedAndGateDecomposer.java

private void removeSubSequences(SortedSet<IOBehaviour> sequencesFront, SortedSet<IOBehaviour> sequencesBack,
        Set<IOBehaviour> newSequences, Set<IOBehaviour> rmSequences) {
    rmSequences.clear();/*from  w  ww . j  a v a2  s  .c o  m*/
    sequencesFront.addAll(newSequences);
    Iterator<IOBehaviour> it = sequencesFront.iterator();
    if (!it.hasNext()) {
        //TODO: why?
        return;
    }
    IOBehaviour curr = it.next();
    while (it.hasNext()) {
        IOBehaviour next = it.next();
        if (newSequences.contains(curr)) {
            if (curr.getStart().compareTo(next.getStart()) == 0) {
                int i = 0;
                while (true) {
                    if (curr.getSequence().size() == i) {
                        rmSequences.add(curr);
                        break;
                    }
                    //System.out.println(curr.toString() + " vs " + next.toString());
                    int cmpT = curr.getSequence().get(i).compareTo(next.getSequence().get(i));
                    if (cmpT != 0) {
                        break;
                    }
                    //gleich, check next trans
                    i++;
                }
            }
        }
        curr = next;
    }
    newSequences.removeAll(rmSequences);
    sequencesBack.addAll(newSequences);
    it = sequencesBack.iterator();
    curr = it.next();
    while (it.hasNext()) {
        IOBehaviour next = it.next();
        if (newSequences.contains(curr)) {
            if (curr.getEnd().compareTo(next.getEnd()) == 0) {
                int i = 0;
                while (true) {
                    if (curr.getSequence().size() == i) {
                        rmSequences.add(curr);
                        break;
                    }
                    int cmpT = curr.getSequence().get(curr.getSequence().size() - i - 1)
                            .compareTo(next.getSequence().get(next.getSequence().size() - i - 1));
                    if (cmpT != 0) {
                        break;
                    }
                    //gleich, check next trans
                    i++;
                }
            }
        }
        curr = next;
    }
}

From source file:com.fluidops.iwb.server.SparqlServlet.java

/**
 * Handle legacy aggregation of tuple queries. Prints the aggregated result as CSV!
 * /*from ww w  . ja v a2  s .  c om*/
 * Condition: tuple query + parameter "input" and "output" are set
 * 
 * Also deals with (optional) parameter "datasets"
 * 
 * This method MUST return true, if SPARQL processing shall continue. If legacy code
 * is applied, results may be written to the outputstream directly (and false is returned)
 *  
 * @param res
 * @param req
 * @return
 *          true if the standard SPARQL processing should continue, false otherwise
 * @throws QueryEvaluationException 
 * @throws IOException 
 */
private boolean handleAggregationLegacy(TupleQueryResult res, HttpServletRequest req, ReadDataManager queryDM,
        OutputStream outputStream) throws QueryEvaluationException, IOException {

    String input = req.getParameter("input");
    String output = req.getParameter("output");

    // check the condition
    if (StringUtil.isNullOrEmpty(input) || StringUtil.isNullOrEmpty(output))
        return true;

    String datasets = req.getParameter("datasets");
    String aggregation = req.getParameter("aggregation");

    String[] outputs = output.split(",");

    AggregationType aggType = AggregationType.NONE; // DEFAULT
    if (aggregation != null) {
        if (aggregation.equals("COUNT"))
            aggType = AggregationType.COUNT;
        else if (aggregation.equals("SUM"))
            aggType = AggregationType.SUM;
        else if (aggregation.equals("NONE"))
            aggType = AggregationType.NONE;
        else if (aggregation.equals("AVG"))
            aggType = AggregationType.AVG;
    }

    Map<Value, Vector<Number>> valueMap;
    if (datasets == null) {
        valueMap = queryDM.aggregateQueryResult(res, aggType, input, outputs);
    } else {
        // special handling: we must first group by the values
        // of the datasets parameter before aggregating; this
        // processing scheme supports only a single output variable
        String[] splittedDatasets = datasets.split(",");
        valueMap = queryDM.aggregateQueryResultWrtDatasets(res, aggType, input, outputs[0], splittedDatasets);
    }

    // We need to sort the input again, as the order gets lost when accessing the valueMap
    Set<Value> keySet = valueMap.keySet();
    SortedSet<Value> sortedSet = new TreeSet<Value>(new ValueComparator());
    sortedSet.addAll(keySet);

    // need to write at least one space, as empty results cause errors in charts
    if (sortedSet.isEmpty())
        outputStream.write(" ".getBytes("UTF-8"));

    for (Value val : sortedSet) {
        Vector<Number> vals = valueMap.get(val);
        outputStream.write(val.stringValue().getBytes("UTF-8"));
        for (int i = 0; i < vals.size(); i++) {
            Number n = vals.elementAt(i);
            if (n == null || n.toString() == null)
                outputStream.write(";".getBytes("UTF-8"));
            else
                outputStream.write((";" + n.toString()).getBytes("UTF-8"));
        }
        outputStream.write("\n".getBytes("UTF-8"));
    }

    return false;
}

From source file:net.sourceforge.fenixedu.domain.Lesson.java

private SortedSet<Summary> getSummaries(Comparator<Summary> comparator) {
    SortedSet<Summary> lessonSummaries = new TreeSet<Summary>(comparator);
    lessonSummaries.addAll(getAssociatedSummaries());
    return lessonSummaries;
}

From source file:net.sourceforge.fenixedu.domain.ExecutionDegree.java

public java.util.SortedSet<net.sourceforge.fenixedu.domain.SchoolClass> getSortedSchoolClasses() {
    final SortedSet<SchoolClass> result = new TreeSet<>(SchoolClass.COMPARATOR_BY_NAME);
    result.addAll(getSchoolClassesSet());
    return result;
}

From source file:net.sourceforge.fenixedu.domain.Lesson.java

public SortedSet<YearMonthDay> getAllLessonDatesUntil(YearMonthDay day) {
    SortedSet<YearMonthDay> result = new TreeSet<YearMonthDay>();
    if (day != null) {
        result.addAll(getAllLessonInstanceDatesUntil(day));
        if (!wasFinished()) {
            YearMonthDay startDateToSearch = getLessonStartDay();
            YearMonthDay lessonEndDay = getLessonEndDay();
            YearMonthDay endDateToSearch = (lessonEndDay.isAfter(day)) ? day : lessonEndDay;
            result.addAll(getAllValidLessonDatesWithoutInstancesDates(startDateToSearch, endDateToSearch));
        }/* ww  w  .j  a  va2s  .c o m*/
    }
    return result;
}

From source file:org.apache.atlas.hive.hook.HiveHook.java

private void registerProcess(HiveMetaStoreBridge dgiBridge, HiveEventContext event) throws AtlasHookException {
    try {//  w  ww .jav  a 2 s .c  o  m
        Set<ReadEntity> inputs = event.getInputs();
        Set<WriteEntity> outputs = event.getOutputs();

        //Even explain CTAS has operation name as CREATETABLE_AS_SELECT
        if (inputs.isEmpty() && outputs.isEmpty()) {
            LOG.info("Explain statement. Skipping...");
            return;
        }

        if (event.getQueryId() == null) {
            LOG.info("Query id/plan is missing for {}", event.getQueryStr());
        }

        final SortedMap<ReadEntity, Referenceable> source = new TreeMap<>(entityComparator);
        final SortedMap<WriteEntity, Referenceable> target = new TreeMap<>(entityComparator);

        final Set<String> dataSets = new HashSet<>();
        final Set<Referenceable> entities = new LinkedHashSet<>();

        boolean isSelectQuery = isSelectQuery(event);

        // filter out select queries which do not modify data
        if (!isSelectQuery) {

            SortedSet<ReadEntity> sortedHiveInputs = new TreeSet<>(entityComparator);
            if (event.getInputs() != null) {
                sortedHiveInputs.addAll(event.getInputs());
            }

            SortedSet<WriteEntity> sortedHiveOutputs = new TreeSet<>(entityComparator);
            if (event.getOutputs() != null) {
                sortedHiveOutputs.addAll(event.getOutputs());
            }

            for (ReadEntity readEntity : sortedHiveInputs) {
                processHiveEntity(dgiBridge, event, readEntity, dataSets, source, entities);
            }

            for (WriteEntity writeEntity : sortedHiveOutputs) {
                processHiveEntity(dgiBridge, event, writeEntity, dataSets, target, entities);
            }

            if (source.size() > 0 || target.size() > 0) {
                Referenceable processReferenceable = getProcessReferenceable(dgiBridge, event, sortedHiveInputs,
                        sortedHiveOutputs, source, target);
                // setup Column Lineage
                List<Referenceable> sourceList = new ArrayList<>(source.values());
                List<Referenceable> targetList = new ArrayList<>(target.values());
                List<Referenceable> colLineageProcessInstances = new ArrayList<>();
                try {
                    Map<String, Referenceable> columnQNameToRef = ColumnLineageUtils
                            .buildColumnReferenceableMap(sourceList, targetList);
                    colLineageProcessInstances = createColumnLineageProcessInstances(processReferenceable,
                            event.lineageInfo, columnQNameToRef);
                } catch (Exception e) {
                    LOG.warn("Column lineage process setup failed with exception {}", e);
                }
                colLineageProcessInstances.add(0, processReferenceable);
                entities.addAll(colLineageProcessInstances);
                event.addMessage(
                        new HookNotification.EntityUpdateRequest(event.getUser(), new ArrayList<>(entities)));
            } else {
                LOG.info("Skipped query {} since it has no getInputs() or resulting getOutputs()",
                        event.getQueryStr());
            }
        } else {
            LOG.info("Skipped query {} for processing since it is a select query ", event.getQueryStr());
        }
    } catch (Exception e) {
        throw new AtlasHookException("HiveHook.registerProcess() failed.", e);
    }
}

From source file:org.apache.accumulo.core.util.shell.commands.CreateTableCommand.java

@Override
public int execute(final String fullCommand, final CommandLine cl, final Shell shellState)
        throws AccumuloException, AccumuloSecurityException, TableExistsException, TableNotFoundException,
        IOException, ClassNotFoundException {

    final String testTableName = cl.getArgs()[0];

    if (!testTableName.matches(Tables.VALID_NAME_REGEX)) {
        shellState.getReader()//from   ww  w .j  av a  2s  . co m
                .println("Only letters, numbers and underscores are allowed for use in table names.");
        throw new IllegalArgumentException();
    }

    final String tableName = cl.getArgs()[0];
    if (shellState.getConnector().tableOperations().exists(tableName)) {
        throw new TableExistsException(null, tableName, null);
    }
    final SortedSet<Text> partitions = new TreeSet<Text>();
    final boolean decode = cl.hasOption(base64Opt.getOpt());

    if (cl.hasOption(createTableOptSplit.getOpt())) {
        final String f = cl.getOptionValue(createTableOptSplit.getOpt());

        String line;
        Scanner file = new Scanner(new File(f), Constants.UTF8.name());
        try {
            while (file.hasNextLine()) {
                line = file.nextLine();
                if (!line.isEmpty())
                    partitions.add(decode ? new Text(Base64.decodeBase64(line.getBytes(Constants.UTF8)))
                            : new Text(line));
            }
        } finally {
            file.close();
        }
    } else if (cl.hasOption(createTableOptCopySplits.getOpt())) {
        final String oldTable = cl.getOptionValue(createTableOptCopySplits.getOpt());
        if (!shellState.getConnector().tableOperations().exists(oldTable)) {
            throw new TableNotFoundException(null, oldTable, null);
        }
        partitions.addAll(shellState.getConnector().tableOperations().listSplits(oldTable));
    }

    if (cl.hasOption(createTableOptCopyConfig.getOpt())) {
        final String oldTable = cl.getOptionValue(createTableOptCopyConfig.getOpt());
        if (!shellState.getConnector().tableOperations().exists(oldTable)) {
            throw new TableNotFoundException(null, oldTable, null);
        }
    }

    TimeType timeType = TimeType.MILLIS;
    if (cl.hasOption(createTableOptTimeLogical.getOpt())) {
        timeType = TimeType.LOGICAL;
    }

    // create table
    shellState.getConnector().tableOperations().create(tableName, true, timeType);
    if (partitions.size() > 0) {
        shellState.getConnector().tableOperations().addSplits(tableName, partitions);
    }

    shellState.setTableName(tableName); // switch shell to new table context

    if (cl.hasOption(createTableNoDefaultIters.getOpt())) {
        for (String key : IteratorUtil.generateInitialTableProperties(true).keySet()) {
            shellState.getConnector().tableOperations().removeProperty(tableName, key);
        }
    }

    // Copy options if flag was set
    if (cl.hasOption(createTableOptCopyConfig.getOpt())) {
        if (shellState.getConnector().tableOperations().exists(tableName)) {
            final Iterable<Entry<String, String>> configuration = shellState.getConnector().tableOperations()
                    .getProperties(cl.getOptionValue(createTableOptCopyConfig.getOpt()));
            for (Entry<String, String> entry : configuration) {
                if (Property.isValidTablePropertyKey(entry.getKey())) {
                    shellState.getConnector().tableOperations().setProperty(tableName, entry.getKey(),
                            entry.getValue());
                }
            }
        }
    }

    if (cl.hasOption(createTableOptEVC.getOpt())) {
        try {
            shellState.getConnector().tableOperations().addConstraint(tableName,
                    VisibilityConstraint.class.getName());
        } catch (AccumuloException e) {
            Shell.log.warn(e.getMessage() + " while setting visibility constraint, but table was created");
        }
    }

    // Load custom formatter if set
    if (cl.hasOption(createTableOptFormatter.getOpt())) {
        final String formatterClass = cl.getOptionValue(createTableOptFormatter.getOpt());

        shellState.getConnector().tableOperations().setProperty(tableName,
                Property.TABLE_FORMATTER_CLASS.toString(), formatterClass);
    }

    return 0;
}