Example usage for java.util Set isEmpty

List of usage examples for java.util Set isEmpty

Introduction

In this page you can find the example usage for java.util Set isEmpty.

Prototype

boolean isEmpty();

Source Link

Document

Returns true if this set contains no elements.

Usage

From source file:com.tesora.dve.sql.statement.ddl.PEDropTableStatement.java

protected static void compute(SchemaContext pc, Set<CatalogEntity> deletes, Set<CatalogEntity> updates,
        List<TableKey> keys, Set<PEAbstractTable<?>> referencedTables, boolean ignoreFKChecks)
        throws PEException {
    if (keys.isEmpty())
        return;//  w w  w  .j a va 2 s  .  co  m
    if (!deletes.isEmpty())
        return;
    for (TableKey tk : keys) {
        PETable tab = tk.getAbstractTable().asTable();
        appendChildTriggersToCollection(pc, tab, deletes);
        List<PEForeignKey> effectedForeignKeys = new ArrayList<PEForeignKey>();
        if (!tk.isUserlandTemporaryTable())
            checkForeignKeys(pc, tab, effectedForeignKeys, referencedTables, ignoreFKChecks);
        pc.beginSaveContext();
        try {
            if (tk.isUserlandTemporaryTable()) {
                // for userland temp tables, we only need to toss out the temporary table record
                deletes.addAll(pc.getCatalog().findUserlandTemporaryTable(pc.getConnection().getConnectionId(),
                        tab.getDatabaseName(pc).getUnquotedName().get(),
                        tab.getName().getUnquotedName().get()));
            } else {
                deletes.add(tab.persistTree(pc));
                if (tab.isContainerBaseTable(pc)) {
                    PEContainer cont = tab.getDistributionVector(pc).getContainer(pc);
                    List<UserTable> anyTabs = pc.getCatalog().findContainerMembers(cont.getName().get());
                    if (anyTabs.size() == 1) {
                        // last table - this is ok
                        cont.setBaseTable(pc, null);
                        updates.add(cont.persistTree(pc));
                        // we're also going to delete all of the container tenants
                        try {
                            deletes.addAll(
                                    pc.getCatalog().getDAO().findContainerTenants(cont.getPersistent(pc)));
                        } catch (Exception e) {
                            throw new PEException(
                                    "Unable to find container tenants of container " + cont.getName(), e);
                        }
                    } else {
                        // more than one table left - not ok
                        throw new SchemaException(Pass.PLANNER,
                                "Unable to drop table " + tab.getName().getSQL()
                                        + " because it is the base table to container "
                                        + cont.getName().getSQL() + " which is not empty");
                    }
                }
                for (PEForeignKey pefk : effectedForeignKeys) {
                    // this should be persisting the whole table, not the key
                    updates.add(pefk.persistTree(pc));
                }
            }
        } finally {
            pc.endSaveContext();
        }
    }

}

From source file:com.tech.utils.CustomCollectionUtil.java

/**
 * Method to calculate standard set intersection operation. Example
 * :Consider set1 = {1,2,3,4,5} set2 = {2,4,5,6,7} then, the output of this
 * method will be setIntersection = {2,4,5}
 * /*from   w ww.java  2 s  . c  o m*/
 * @param set1
 * @param set2
 * @return setIntersection
 */
public static Set<Long> setIntersection(Set<Long> ietmSet1, Set<Long> itemSet2) {
    Set<Long> setIntersection = new HashSet<Long>();
    /*
     * Perform set intersection operation only if both the set are not
     * empty.
     */
    if (ietmSet1 != null && !ietmSet1.isEmpty() && itemSet2 != null && !itemSet2.isEmpty()) {
        for (Long item : ietmSet1) {
            if (itemSet2.contains(item)) {
                setIntersection.add(item);
            }
        }
    }
    return setIntersection;
}

From source file:com.bibisco.manager.SceneTagsManager.java

/**
 * @return a Map as/*ww w. j a  v a2s  .co m*/
 * 
 *                 Chapter.1 Chapter.2 Chapter.3
 * - strand.1 -     X         X
 * - strand.2 -               X
 * - strand.3 -     X         X          
 * 
 */
public static Map<String, List<Boolean>> getStrandsChaptersPresence() {

    Map<String, List<Boolean>> lMapStrandChapterPresence = new HashMap<String, List<Boolean>>();

    mLog.debug("Start getStrandsChaptersDistribution()");

    List<com.bibisco.bean.StrandDTO> lListStrandDTO = StrandManager.loadAll();
    List<ChapterDTO> lListChapters = ChapterManager.loadAll();

    if (CollectionUtils.isEmpty(lListStrandDTO) || CollectionUtils.isEmpty(lListChapters)) {
        mLog.debug("End getStrandsChaptersDistribution()");
        return lMapStrandChapterPresence;
    }

    SqlSessionFactory lSqlSessionFactory = SqlSessionFactoryManager.getInstance().getSqlSessionFactoryProject();
    SqlSession lSqlSession = lSqlSessionFactory.openSession();
    try {

        VSceneTagsMapper lVSceneTagsMapper = lSqlSession.getMapper(VSceneTagsMapper.class);
        VSceneTagsExample lVSceneTagsExample = new VSceneTagsExample();
        lVSceneTagsExample.setOrderByClause("chapter_position, id_strand");
        List<VSceneTags> lListVSceneTags = lVSceneTagsMapper.selectByExample(lVSceneTagsExample);

        if (lListVSceneTags != null && lListVSceneTags.size() > 0) {

            Map<Integer, Set<Integer>> lMapStrandsChaptersDistribution = new HashMap<Integer, Set<Integer>>();
            int lIntLastChapter = -1;
            Set<Integer> lSetChapterStrands = null;

            // filter duplicate items using a set
            for (VSceneTags lVSceneTags : lListVSceneTags) {
                if (lVSceneTags.getChapterPosition().intValue() != lIntLastChapter) {
                    lSetChapterStrands = new HashSet<Integer>();
                    lMapStrandsChaptersDistribution.put(lVSceneTags.getChapterPosition(), lSetChapterStrands);
                    lIntLastChapter = lVSceneTags.getChapterPosition();
                }
                if (lVSceneTags.getIdStrand() != null) {
                    lSetChapterStrands.add(lVSceneTags.getIdStrand().intValue());
                }

            }

            // populate result map
            for (StrandDTO lStrandDTO : lListStrandDTO) {
                List<Boolean> lListStrandChapterPresence = new ArrayList<Boolean>();
                lMapStrandChapterPresence.put(lStrandDTO.getIdStrand().toString(), lListStrandChapterPresence);
                for (ChapterDTO lChapterDTO : lListChapters) {
                    Set<Integer> lSetStrands = lMapStrandsChaptersDistribution.get(lChapterDTO.getPosition());
                    if (lSetStrands != null && !lSetStrands.isEmpty()
                            && lSetStrands.contains(lStrandDTO.getIdStrand())) {
                        lListStrandChapterPresence.add(Boolean.TRUE);
                    } else {
                        lListStrandChapterPresence.add(Boolean.FALSE);
                    }
                }
            }
        }

    } catch (Throwable t) {
        mLog.error(t);
        throw new BibiscoException(t, BibiscoException.SQL_EXCEPTION);
    } finally {
        lSqlSession.close();
    }

    mLog.debug("End getStrandsChaptersDistribution()");

    return lMapStrandChapterPresence;
}

From source file:com.hp.octane.integrations.uft.UftTestDispatchUtils.java

public static Map<String, Entity> getDataTablesFromServer(EntitiesService entitiesService, long workspaceId,
        long scmRepositoryId, Set<String> allNames) {
    List<String> conditions = new ArrayList<>();
    if (allNames != null && !allNames.isEmpty()) {
        String byPathCondition = QueryHelper.conditionIn(EntityConstants.ScmResourceFile.NAME_FIELD, allNames,
                false);/*w w w .  j  a  v  a 2 s  .  com*/

        //Query string is part of UR, some servers limit request size by 4K,
        //Here we limit nameCondition by 3K, if it exceed, we will fetch all
        if (byPathCondition.length() < QUERY_CONDITION_SIZE_THRESHOLD) {
            conditions.add(byPathCondition);
        }
    }

    String conditionByScmRepository = QueryHelper
            .conditionRef(EntityConstants.ScmResourceFile.SCM_REPOSITORY_FIELD, scmRepositoryId);
    conditions.add(conditionByScmRepository);

    List<String> dataTablesFields = Arrays.asList(EntityConstants.ScmResourceFile.ID_FIELD,
            EntityConstants.ScmResourceFile.NAME_FIELD, EntityConstants.ScmResourceFile.RELATIVE_PATH_FIELD);
    List<Entity> octaneDataTables = entitiesService.getEntities(workspaceId,
            EntityConstants.ScmResourceFile.COLLECTION_NAME, conditions, dataTablesFields);

    Map<String, Entity> octaneDataTablesMap = new HashMap<>();
    for (Entity dataTable : octaneDataTables) {
        octaneDataTablesMap.put(dataTable.getStringValue(EntityConstants.ScmResourceFile.RELATIVE_PATH_FIELD),
                dataTable);
    }

    return octaneDataTablesMap;
}

From source file:com.netflix.genie.server.repository.jpa.CommandSpecs.java

/**
 * Get all the clusters given the specified parameters.
 *
 * @param applicationId The id of the application that is registered with these commands
 * @param statuses The status of the commands
 * @return The specification/* w ww.  j  a  v  a 2 s  .co  m*/
 */
public static Specification<Command> findCommandsForApplication(final String applicationId,
        final Set<CommandStatus> statuses) {
    return new Specification<Command>() {
        @Override
        public Predicate toPredicate(final Root<Command> root, final CriteriaQuery<?> cq,
                final CriteriaBuilder cb) {
            final List<Predicate> predicates = new ArrayList<>();
            final Join<Command, Application> application = root.join(Command_.application);

            predicates.add(cb.equal(application.get(Application_.id), applicationId));

            if (statuses != null && !statuses.isEmpty()) {
                //Could optimize this as we know size could use native array
                final List<Predicate> orPredicates = new ArrayList<>();
                for (final CommandStatus status : statuses) {
                    orPredicates.add(cb.equal(root.get(Command_.status), status));
                }
                predicates.add(cb.or(orPredicates.toArray(new Predicate[orPredicates.size()])));
            }

            return cb.and(predicates.toArray(new Predicate[predicates.size()]));
        }
    };
}

From source file:com.reactive.hzdfs.utils.EntityFinder.java

/**
 * /*from   www  . ja v  a  2s.  c  om*/
 * @param provider
 * @param basePkg
 * @return
 * @throws ClassNotFoundException
 */
private static Set<Class<?>> findComponents(ClassPathScanningCandidateComponentProvider provider,
        String basePkg) throws ClassNotFoundException {
    Set<BeanDefinition> beans = null;
    String pkg = "";
    try {
        pkg = StringUtils.hasText(basePkg) ? basePkg : EntityFinder.class.getPackage().getName();
        beans = provider.findCandidateComponents(pkg);
    } catch (Exception e) {
        throw new ClassNotFoundException("Unable to scan for classes under given base package",
                new IllegalArgumentException("Package=> " + pkg, e));
    }

    Set<Class<?>> classes = new HashSet<>();
    if (beans != null && !beans.isEmpty()) {
        classes = new HashSet<>(beans.size());
        for (BeanDefinition bd : beans) {
            classes.add(Class.forName(bd.getBeanClassName()));
        }
    } else {
        log.warn(">> Did not find any classes under the given base package [" + basePkg + "]");
    }
    return classes;
}

From source file:net.lldp.checksims.util.PairGenerator.java

/**
 * Generate all pairs for normal submissions, and pairs for archive submissions to compare to normal submissions.
 *
 * @param submissions Normal submissions - compared to each other and archive submissions
 * @param archiveSubmissions Archive submissions - only compared to normal submissions, not each other
 * @return Set of all unordered pairs required for comparison with archive directory
 *///from w  w w .j a v a  2s . co  m
public static Set<Pair<Submission, Submission>> generatePairsWithArchive(Set<Submission> submissions,
        Set<Submission> archiveSubmissions) {
    checkNotNull(submissions);
    checkNotNull(archiveSubmissions);

    // TODO it may be desirable to allow comparison of a single submission to an archive
    // However, generatePairs fails if only 1 submission is given
    // (This would also require tweaks in the frontend)
    Set<Pair<Submission, Submission>> basePairs = generatePairs(submissions);

    // If we have no archive submissions, just return the same result generatePairs would
    if (archiveSubmissions.isEmpty()) {
        return basePairs;
    }

    // Now we need to add pairs for the archive submissions
    List<Submission> remaining = new ArrayList<>();
    remaining.addAll(archiveSubmissions);

    // Loop through each archive submission
    while (!remaining.isEmpty()) {
        Submission first = remaining.get(0);
        remaining.remove(0);

        // For each archive submission, generate pairs for each normal submission
        for (Submission s : submissions) {
            Pair<Submission, Submission> pair = Pair.of(first, s);
            Pair<Submission, Submission> reversed = Pair.of(s, first);

            // Something's wrong, we've made a duplicate pair (but reversed)
            // Should never happen
            if (basePairs.contains(reversed)) {
                throw new RuntimeException("Internal error in pair generation: duplicate pair produced!");
            }

            // One pair for each normal submission, consisting of the archive submission and the normal submission
            basePairs.add(pair);
        }
    }

    return basePairs;
}

From source file:com.google.publicalerts.cap.CapUtil.java

private static void getRepeatedFieldNamesInternal(Descriptor d, Map<String, Set<String>> result) {
    Set<String> repeatedFields = Sets.newHashSet();
    for (FieldDescriptor fd : d.getFields()) {
        if (fd.isRepeated()) {
            repeatedFields.add(javaCase(fd.getName()));
        }// w w  w .j a  va 2s . c o  m
        if (fd.getType() == FieldDescriptor.Type.MESSAGE) {
            getRepeatedFieldNamesInternal(fd.getMessageType(), result);
        }
    }
    if (!repeatedFields.isEmpty()) {
        result.put(javaCase(d.getName()), repeatedFields);
    }
}

From source file:com.ibm.bi.dml.runtime.controlprogram.parfor.opt.OptimizationWrapper.java

/**
 * /*from w  ww. j av a 2s . c om*/
 * @param type
 * @param ck
 * @param cm
 * @param sb
 * @param pb
 * @throws DMLRuntimeException
 * @throws DMLUnsupportedOperationException 
 * @throws  
 */
@SuppressWarnings("unused")
private static void optimize(POptMode otype, int ck, double cm, ParForStatementBlock sb, ParForProgramBlock pb,
        ExecutionContext ec, boolean monitor) throws DMLRuntimeException, DMLUnsupportedOperationException {
    Timing time = new Timing(true);

    //maintain statistics
    if (DMLScript.STATISTICS)
        Statistics.incrementParForOptimCount();

    //create specified optimizer
    Optimizer opt = createOptimizer(otype);
    CostModelType cmtype = opt.getCostModelType();
    LOG.trace("ParFOR Opt: Created optimizer (" + otype + "," + opt.getPlanInputType() + ","
            + opt.getCostModelType());

    if (cmtype == CostModelType.RUNTIME_METRICS //TODO remove check when perftesttool supported
            && !ALLOW_RUNTIME_COSTMODEL) {
        throw new DMLRuntimeException(
                "ParFOR Optimizer " + otype + " requires cost model " + cmtype + " that is not suported yet.");
    }

    OptTree tree = null;

    //recompile parfor body 
    if (OptimizerUtils.ALLOW_DYN_RECOMPILATION) {
        ForStatement fs = (ForStatement) sb.getStatement(0);

        //debug output before recompilation
        if (LOG.isDebugEnabled()) {
            try {
                tree = OptTreeConverter.createOptTree(ck, cm, opt.getPlanInputType(), sb, pb, ec);
                LOG.debug("ParFOR Opt: Input plan (before recompilation):\n" + tree.explain(false));
                OptTreeConverter.clear();
            } catch (Exception ex) {
                throw new DMLRuntimeException("Unable to create opt tree.", ex);
            }
        }

        //constant propagation into parfor body 
        //(input scalars to parfor are guaranteed read only, but need to ensure safe-replace on multiple reopt
        //separate propagation required because recompile in-place without literal replacement)
        try {
            LocalVariableMap constVars = ProgramRecompiler.getReusableScalarVariables(sb.getDMLProg(), sb,
                    ec.getVariables());
            ProgramRecompiler.replaceConstantScalarVariables(sb, constVars);
        } catch (Exception ex) {
            throw new DMLRuntimeException(ex);
        }

        //program rewrites (e.g., constant folding, branch removal) according to replaced literals
        try {
            ProgramRewriter rewriter = createProgramRewriterWithRuleSets();
            ProgramRewriteStatus state = new ProgramRewriteStatus();
            rewriter.rewriteStatementBlockHopDAGs(sb, state);
            fs.setBody(rewriter.rewriteStatementBlocks(fs.getBody(), state));
            if (state.getRemovedBranches()) {
                LOG.debug("ParFOR Opt: Removed branches during program rewrites, rebuilding runtime program");
                pb.setChildBlocks(
                        ProgramRecompiler.generatePartitialRuntimeProgram(pb.getProgram(), fs.getBody()));
            }
        } catch (Exception ex) {
            throw new DMLRuntimeException(ex);
        }

        //recompilation of parfor body and called functions (if safe)
        try {
            //core parfor body recompilation (based on symbol table entries)
            //* clone of variables in order to allow for statistics propagation across DAGs
            //(tid=0, because deep copies created after opt)
            LocalVariableMap tmp = (LocalVariableMap) ec.getVariables().clone();
            Recompiler.recompileProgramBlockHierarchy(pb.getChildBlocks(), tmp, 0, true);

            //inter-procedural optimization (based on previous recompilation)
            if (pb.hasFunctions()) {
                InterProceduralAnalysis ipa = new InterProceduralAnalysis();
                Set<String> fcand = ipa.analyzeSubProgram(sb);

                if (!fcand.isEmpty()) {
                    //regenerate runtime program of modified functions
                    for (String func : fcand) {
                        String[] funcparts = DMLProgram.splitFunctionKey(func);
                        FunctionProgramBlock fpb = pb.getProgram().getFunctionProgramBlock(funcparts[0],
                                funcparts[1]);
                        //reset recompilation flags according to recompileOnce because it is only safe if function is recompileOnce 
                        //because then recompiled for every execution (otherwise potential issues if func also called outside parfor)
                        Recompiler.recompileProgramBlockHierarchy(fpb.getChildBlocks(), new LocalVariableMap(),
                                0, fpb.isRecompileOnce());
                    }
                }
            }
        } catch (Exception ex) {
            throw new DMLRuntimeException(ex);
        }
    }

    //create opt tree (before optimization)
    try {
        tree = OptTreeConverter.createOptTree(ck, cm, opt.getPlanInputType(), sb, pb, ec);
        LOG.debug("ParFOR Opt: Input plan (before optimization):\n" + tree.explain(false));
    } catch (Exception ex) {
        throw new DMLRuntimeException("Unable to create opt tree.", ex);
    }

    //create cost estimator
    CostEstimator est = createCostEstimator(cmtype);
    LOG.trace("ParFOR Opt: Created cost estimator (" + cmtype + ")");

    //core optimize
    opt.optimize(sb, pb, tree, est, ec);
    LOG.debug("ParFOR Opt: Optimized plan (after optimization): \n" + tree.explain(false));

    //assert plan correctness
    if (CHECK_PLAN_CORRECTNESS && LOG.isDebugEnabled()) {
        try {
            OptTreePlanChecker.checkProgramCorrectness(pb, sb, new HashSet<String>());
            LOG.debug("ParFOR Opt: Checked plan and program correctness.");
        } catch (Exception ex) {
            throw new DMLRuntimeException("Failed to check program correctness.", ex);
        }
    }

    long ltime = (long) time.stop();
    LOG.trace("ParFOR Opt: Optimized plan in " + ltime + "ms.");
    if (DMLScript.STATISTICS)
        Statistics.incrementParForOptimTime(ltime);

    //cleanup phase
    OptTreeConverter.clear();

    //monitor stats
    if (monitor) {
        StatisticMonitor.putPFStat(pb.getID(), Stat.OPT_OPTIMIZER, otype.ordinal());
        StatisticMonitor.putPFStat(pb.getID(), Stat.OPT_NUMTPLANS, opt.getNumTotalPlans());
        StatisticMonitor.putPFStat(pb.getID(), Stat.OPT_NUMEPLANS, opt.getNumEvaluatedPlans());
    }
}

From source file:com.impetus.kundera.metadata.MetadataUtils.java

/**
 * Creates a string representation of a set of foreign keys by combining
 * them together separated by "~" character.
 * //from   ww  w .  j a  v a2s . c o m
 * Note: Assumption is that @Id will never contain "~" character. Checks for
 * this are not added yet.
 * 
 * @param foreignKeys
 *            the foreign keys
 * @return the string
 */
public static String serializeKeys(Set<String> foreignKeys) {
    if (null == foreignKeys || foreignKeys.isEmpty()) {
        return null;
    }

    StringBuilder sb = new StringBuilder();
    for (String key : foreignKeys) {
        if (sb.length() > 0) {
            sb.append(Constants.FOREIGN_KEY_SEPARATOR);
        }
        sb.append(key);
    }
    return sb.toString();
}