Example usage for java.util HashSet isEmpty

List of usage examples for java.util HashSet isEmpty

Introduction

In this page you can find the example usage for java.util HashSet isEmpty.

Prototype

public boolean isEmpty() 

Source Link

Document

Returns true if this set contains no elements.

Usage

From source file:amie.keys.CombinationsExplorationNew.java

public static void discoverConditionalKeysForCondition(GraphNew newGraph, GraphNew graph,
        HashSet<Node> candidateKeys, Rule conditionRule) {
    HashSet<Node> newCandidateKeys = new HashSet<>();
    for (Node candidateKey : candidateKeys) {
        //  System.out.println("candidateKey:" + candidateKey);
        if (candidateKey.toExplore) {
            //   System.out.println("candidate:" + candidateKey);
            // if (candidateKey.toExplore) {
            List<String> properties = candidateKey.mapToString(id2Property);
            Rule amieRule = buildAMIERule(properties, conditionRule);
            boolean isConditionalKey = isConditionaKey(amieRule);

            if (amieRule.getSupport() >= support && !isConditionalKey) {
                //System.out.println("Case 0");
                if (!newGraph.graph.containsKey(candidateKey)) {
                    //System.out.println("Case 1");
                    Node newCandidateKey = candidateKey.clone();
                    HashSet<Node> children = new HashSet<>();
                    newGraph.graph.put(newCandidateKey, children);
                    newGraph.nodes.put(newCandidateKey, newCandidateKey);
                    newCandidateKeys.add(newCandidateKey);
                } else {
                    //System.out.println("Case 2");
                    HashSet<Node> children = new HashSet<>();
                    newGraph.graph.put(candidateKey, children);
                    newCandidateKeys.add(candidateKey);
                }//from   w w  w  . java 2 s .co  m
            }

            // If the rule is a conditional above the support
            // and there is no a simpler key already discovered
            // then output it
            if (isConditionalKey && amieRule.getSupport() >= support
                    && !isSubsumedByKey(amieRule, conditionRule, conditions2Keys)) {
                //        System.out.println("KEY");
                if (!newGraph.graph.containsKey(candidateKey)) {
                    //       System.out.println("clone");
                    Node newCandidateKey = candidateKey.clone();
                    System.out.println(Utilities.formatKey(amieRule));
                    conditions2Keys.put(conditionRule, amieRule);
                    newCandidateKey.toExplore = false;
                    HashSet<Node> children = new HashSet<>();
                    newGraph.graph.put(newCandidateKey, children);
                    newGraph.nodes.put(newCandidateKey, newCandidateKey);
                    newCandidateKeys.add(newCandidateKey);
                    //System.out.println("Case 3");
                } else {
                    System.out.println(Utilities.formatKey(amieRule));
                    conditions2Keys.put(conditionRule, amieRule);
                    candidateKey.toExplore = false;
                    HashSet<Node> children = new HashSet<>();
                    newGraph.graph.put(candidateKey, children);
                    newGraph.nodes.put(candidateKey, candidateKey);
                    newCandidateKeys.add(candidateKey);
                    //System.out.println("Case 4");
                }

                // System.out.println(Utilities.formatKey(amieRule));
                //conditions2Keys.put(conditionRule, amieRule);
                // candidateKey.toExplore = false;
            }
        } else {
            //System.out.println("Case 5");
            newCandidateKeys.add(candidateKey);
        }
    }
    //   System.out.println("newGraphBefore:" + newGraph);

    // createChildren
    HashSet<Node> allChildren = new HashSet<>();
    // System.out.println("newCandidateKeys:"+newCandidateKeys);
    for (Node parent1 : newCandidateKeys) {
        //    System.out.println("parent1:"+parent1);
        for (Node parent2 : newCandidateKeys) {
            if (parent1 != parent2 && parent1.toExplore != false && parent2.toExplore != false) {
                HashSet<Integer> newSet = new HashSet<>();
                newSet.addAll(parent1.set);
                newSet.addAll(parent2.set);
                HashSet<Integer> condProp_KeyProp = new HashSet<>();
                condProp_KeyProp.addAll(newSet);
                condProp_KeyProp.addAll(getRelations(conditionRule, property2Id));
                //     System.out.println("newSet:" + newSet);
                if ((newSet.size() == parent1.set.size() + 1) && (getSupport(newSet, conditionRule, support))
                        && GraphNew.containsASuperSetOf(CombinationsExplorationNew.nonKeysInt,
                                condProp_KeyProp) != -1) {
                    //      System.out.println("enters");
                    Node child = new Node(newSet);
                    if (hasFalseParent(newSet, newCandidateKeys)) {
                        //         System.out.println("falseParent");
                        child.toExplore = false;
                    }
                    HashSet<Node> children1 = newGraph.graph.get(parent1);
                    children1.add(child);
                    newGraph.graph.put(parent1, children1);
                    newGraph.nodes.put(child, child);
                    HashSet<Node> grandChildren = new HashSet<>();
                    newGraph.graph.put(child, grandChildren);
                    HashSet<Node> children2 = newGraph.graph.get(parent2);
                    children2.add(child);
                    newGraph.graph.put(parent2, children2);
                    allChildren.add(child);
                }
            }
        }
    }
    if (!allChildren.isEmpty()) {
        discoverConditionalKeysForCondition(newGraph, newGraph, allChildren, conditionRule);
    }
}

From source file:it.iit.genomics.cru.igb.bundles.mi.business.MIWorker.java

/**
 * Get the symmetries at the same position on the main sequence, i.e. get
 * the gene corresponding to a selected Symmetry.
 *
 * @param selectedSymmetry//w  w w  . ja va 2 s . c o  m
 * @return
 */
private void getGenes(ArrayList<SeqSymmetry> selectedSymmetries, BioSeq seq, boolean ignoreIfNoProtein) {

    // Don't map residues if they cover a full gene
    boolean skipSearchResidues = false;

    MapOfMap<String, MoleculeEntry> proteins = new MapOfMap<>();

    HashSet<String> searchGeneNames = new HashSet<>();
    HashSet<String> searchRefSeq = new HashSet<>();
    HashSet<String> searchEnsembl = new HashSet<>();

    MapOfMap<SeqSymmetry, MIGene> candidates = geneManager.getBySymList(seq, selectedSymmetries);

    /**
     * TODO : may be more than one!!!
     */
    for (SeqSymmetry sym : candidates.keySet()) {
        logAndPublish("map " + sym.getID());
        for (MIGene gene : candidates.get(sym)) {

            String refseqPattern = "[A-Z]{2}\\_[0-9]+";
            if (gene.getID().matches(refseqPattern)) {
                searchRefSeq.add(gene.getID());
            } else if (gene.getID().startsWith("ENS")) {
                searchEnsembl.add(gene.getID());
            } else {
                searchGeneNames.add(gene.getID());
            }
        }

        try {
            if (false == searchRefSeq.isEmpty()) {
                proteins.merge(UniprotkbUtils.getInstance(query.getTaxid())
                        .getUniprotEntriesFromRefSeqs(searchRefSeq));

            }

            if (false == searchEnsembl.isEmpty()) {
                proteins.merge(UniprotkbUtils.getInstance(query.getTaxid())
                        .getUniprotEntriesFromEnsembl(searchEnsembl));
            }

            if (false == searchGeneNames.isEmpty()) {
                proteins.merge(UniprotkbUtils.getInstance(query.getTaxid())
                        .getUniprotEntriesFromGenes(searchGeneNames));
            }
        } catch (BridgesRemoteAccessException be) {
            igbLogger.severe("Cannot access Uniprot!");
        }

        for (MIGene gene : candidates.get(sym)) {
            // Try to find the best protein,
            // e.g. Swissprot rather than Trembl
            MoleculeEntry protein = null;

            for (MoleculeEntry uniprotProtein : proteins.get(gene.getID())) {
                if (protein == null) {
                    protein = uniprotProtein;
                } else {
                    if (false == protein.isSwissprot() && uniprotProtein.isSwissprot()) {
                        protein = uniprotProtein;
                    }
                }
            }

            if (protein == null) {
                igbLogger.warning("No protein for gene " + gene.getID());
            } else {
                miGene2selectedSyms.add(gene, sym);

                gene.getUniprotAcs().add(protein.getUniprotAc());
                gene.setProtein(protein);

            }
        }

    }

}

From source file:nl.umcg.westrah.binarymetaanalyzer.BinaryMetaAnalysis.java

private void createProbeIndex(String outdir) throws IOException {

    HashSet<String> confineToTheseProbes = null;

    HashSet<String> probePreselection = null;
    if (settings.getSNPProbeSelection() != null) {
        if (settings.getSNPProbeSelection() != null) {
            System.out.println(/*  w ww . j ava2 s.com*/
                    "Getting Probes from SNP/Probe selection file: " + settings.getSNPProbeSelection());
            probePreselection = new HashSet<String>();
            TextFile tf = new TextFile(settings.getSNPProbeSelection(), TextFile.R);
            String[] elems = tf.readLineElems(TextFile.tab);
            while (elems != null) {
                if (elems.length >= 2) {
                    String probe = elems[1];
                    probePreselection.add(probe);
                }
                elems = tf.readLineElems(TextFile.tab);
            }
            tf.close();
            System.out.println(
                    "Found " + probePreselection.size() + " unique probes in SNP/Probe selection file.");
            if (probePreselection.isEmpty()) {
                System.err.println("Error: SNP/Probe selection file defined, but no Probes found.");
                System.exit(-1);
            }
        }
    }

    if (settings.getProbeselection() != null) {
        System.out.println("Selecting Probes from file: " + settings.getProbeselection());
        confineToTheseProbes = new HashSet<String>();
        TextFile tf = new TextFile(settings.getProbeselection(), TextFile.R);

        if (probePreselection == null) {
            confineToTheseProbes.addAll(tf.readAsArrayList());
        } else {
            ArrayList<String> confineTMP = tf.readAsArrayList();
            for (String p : confineTMP) {
                if (probePreselection.contains(p)) {
                    confineToTheseProbes.add(p);
                }
            }
        }
        tf.close();
        System.out.println(confineToTheseProbes.size() + " Probes loaded.");
    } else if (probePreselection != null) {
        confineToTheseProbes = probePreselection;
    }

    System.out.println("");

    // TODO: write probe list of probes that we didn't find in the annotation

    probeIndex = new Integer[traitList.length][datasets.length];

    for (int d = 0; d < datasets.length; d++) {
        String[] probes = datasets[d].getProbeList();
        int platformId = probeAnnotation.getPlatformId(datasets[d].getPlatform());

        HashMap<String, MetaQTL4MetaTrait> traitHashForPlatform = probeAnnotation
                .getTraitHashForPlatform(platformId);
        //         System.out.println(probeAnnotation.getTraitHashPerPlatform().size());
        //
        //         System.out.println(datasets[d].getName() + "\t" + platformId + "\t" + datasets[d].getPlatform() + "\t" + traitHashForPlatform.size());
        for (int p = 0; p < probes.length; p++) {

            MetaQTL4MetaTrait t = traitHashForPlatform.get(probes[p]);
            if (t != null) {
                int index = traitMap.get(t);
                //               if (confineToTheseProbes == null || confineToTheseProbes.contains(probes[p]) || confineToTheseProbes.contains(t.getMetaTraitName())) {
                if (confineToTheseProbes == null || confineToTheseProbes.contains(t.getMetaTraitName())) {
                    // TODO: was there a reason we selected specific platform probes/identifiers?
                    probeIndex[index][d] = p;
                }
            }
            //            else {
            //               probeIndex[index][d] = -1;
            //            }
        }
    }

    System.out.println("");

    TextFile out = new TextFile(outdir + "probeindex.txt", TextFile.W);

    String header = "metaID";
    for (int d = 0; d < datasets.length; d++) {
        header += "\t" + datasets[d].getName() + "-pid\t" + datasets[d].getName() + "-probename";
    }
    out.writeln(header);
    for (int p = 0; p < probeIndex.length; p++) {

        String lnout = "" + traitList[p].getMetaTraitId();
        for (int d = 0; d < datasets.length; d++) {
            Integer pid = probeIndex[p][d];
            String probeName = null;
            if (pid != null) {
                probeName = datasets[d].getProbeList()[pid];
            }
            lnout += "\t" + pid + "\t" + probeName;
        }

        out.writeln(lnout);
    }

    out.close();
}

From source file:xc.mst.services.marcaggregation.MarcAggregationService.java

/**
 * search to see if there are multiple in records for this given out record.
 * , in any event, add the predecessor to the output record.
 *
 *//*from w ww  .  jav a  2s.com*/
@Override
protected void addPredecessor(Record in, Record out) {
    HashSet<Long> set = allBibRecordsO2Imap.get(out.getId());
    if (set == null || set.isEmpty()) {
        // picks up the holding records
        LOG.debug("addPredecessor, empty or null set for record id: " + out.getId());
        out.addPredecessor(in);
    } else {
        LOG.debug("addPredecessor, will add, for record id: " + out.getId());
        for (Long in_rec : set) {
            Record r = inputRepo.getRecord(in_rec);
            if (r != null) {
                out.addPredecessor(r);
            }
        }
    }
}

From source file:it.iit.genomics.cru.structures.bridges.uniprot.UniprotkbUtils.java

/**
 *
 * @param xrefs/*w ww .  j  av  a 2s.co  m*/
 * @param filterTaxid
 * @return
 * @throws BridgesRemoteAccessException
 */
public HashMap<String, MoleculeEntry> getUniprotEntriesFromUniprotAccessions(Collection<String> xrefs,
        boolean filterTaxid) throws BridgesRemoteAccessException {
    String tool = UNIPROT_TOOL;

    // remove xrefs that are not uniprotAcs
    Collection<String> uniprotAcs = getUniprotAcs(xrefs);

    HashMap<String, MoleculeEntry> results = new HashMap<>();

    HashSet<String> ref2get = new HashSet<>();

    try {
        for (String ref : uniprotAcs) {
            if (cache.containsKey(ref.toUpperCase())) {
                results.put(ref, cache.get(ref.toUpperCase()).iterator().next());
            } else {
                ref2get.add(ref);

                // if size == limit, do query
                if (ref2get.size() == maxQueries) {
                    String location = UNIPROT_SERVER + tool + "/?" + "query=(accession:"
                            + URLEncoder.encode(StringUtils.join(ref2get, " OR accession:") + "", "UTF-8")
                            + ")";
                    if (filterTaxid) {
                        location += "+AND+keyword:181+AND+organism:"
                                + URLEncoder.encode("\"" + taxid + "\"", "UTF-8");
                    }

                    Collection<MoleculeEntry> uniprotEntries = getUniprotEntriesXML(location);

                    for (MoleculeEntry entry : uniprotEntries) {
                        results.put(entry.getUniprotAc(), entry);
                    }
                    ref2get.clear();
                }
            }
        }

        if (ref2get.isEmpty()) {
            return results;
        }

        String location = UNIPROT_SERVER + tool + "/?" + "query=(accession:"
                + URLEncoder.encode(StringUtils.join(ref2get, " OR accession:") + "", "UTF-8") + ")";
        if (filterTaxid) {
            location += "+AND+keyword:181+AND+organism:" + URLEncoder.encode("\"" + taxid + "\"", "UTF-8");
        }
        Collection<MoleculeEntry> uniprotEntries = getUniprotEntriesXML(location);

        for (MoleculeEntry entry : uniprotEntries) {
            results.put(entry.getUniprotAc(), entry);
        }

    } catch (UnsupportedEncodingException e) {
        logger.error("cannot get proteins for " + StringUtils.join(xrefs, ", "), e);
    }

    return results;
}

From source file:org.unitime.timetable.solver.TimetableSolver.java

public Hashtable conflictInfo(Collection hints) {
    Hashtable conflictTable = new Hashtable();
    Lock lock = currentSolution().getLock().readLock();
    lock.lock();//from  w  ww.  jav a 2  s . c  o m
    try {
        HashSet done = new HashSet();
        for (Iterator i = hints.iterator(); i.hasNext();) {
            Hint hint = (Hint) i.next();
            Placement p = hint.getPlacement((TimetableModel) currentSolution().getModel());
            if (p == null)
                continue;
            for (Constraint constraint : p.variable().hardConstraints()) {
                HashSet conflicts = new HashSet();
                constraint.computeConflicts(currentSolution().getAssignment(), p, conflicts);
                if (conflicts != null && !conflicts.isEmpty()) {
                    for (Iterator j = conflicts.iterator(); j.hasNext();) {
                        Placement conflict = (Placement) j.next();
                        Hint confHint = new Hint(this, conflict);
                        if (done.contains(confHint))
                            continue;
                        if (!conflictTable.containsKey(confHint)) {
                            String name = constraint.getName();
                            if (constraint instanceof RoomConstraint) {
                                name = "Room " + constraint.getName();
                            } else if (constraint instanceof InstructorConstraint) {
                                name = "Instructor " + constraint.getName();
                            } else if (constraint instanceof GroupConstraint) {
                                name = "Distribution " + constraint.getName();
                            } else if (constraint instanceof DepartmentSpreadConstraint) {
                                name = "Balancing of department " + constraint.getName();
                            } else if (constraint instanceof SpreadConstraint) {
                                name = "Same subpart spread " + constraint.getName();
                            } else if (constraint instanceof ClassLimitConstraint) {
                                name = "Class limit " + constraint.getName();
                            }
                            conflictTable.put(confHint, name);
                        }
                    }
                }
            }
            done.add(hint);
        }
    } finally {
        lock.unlock();
    }
    return conflictTable;
}

From source file:org.apache.hadoop.hdfs.DataStreamer.java

private boolean[] getPinnings(DatanodeInfo[] nodes, boolean shouldLog) {
    if (favoredNodes == null) {
        return null;
    } else {/*from   www.  j  a  v  a 2  s  .  c om*/
        boolean[] pinnings = new boolean[nodes.length];
        HashSet<String> favoredSet = new HashSet<String>(Arrays.asList(favoredNodes));
        for (int i = 0; i < nodes.length; i++) {
            pinnings[i] = favoredSet.remove(nodes[i].getXferAddrWithHostname());
            if (LOG.isDebugEnabled()) {
                LOG.debug(nodes[i].getXferAddrWithHostname() + " was chosen by name node (favored="
                        + pinnings[i] + ").");
            }
        }
        if (shouldLog && !favoredSet.isEmpty()) {
            // There is one or more favored nodes that were not allocated.
            LOG.warn("These favored nodes were specified but not chosen: " + favoredSet
                    + " Specified favored nodes: " + Arrays.toString(favoredNodes));

        }
        return pinnings;
    }
}

From source file:org.apache.hadoop.hive.llap.cli.LlapServiceDriver.java

private int run(String[] args) throws Exception {
    LlapOptionsProcessor optionsProcessor = new LlapOptionsProcessor();
    final LlapOptions options = optionsProcessor.processOptions(args);

    final Properties propsDirectOptions = new Properties();

    if (options == null) {
        // help/*ww  w.  j  a va2s  .c  om*/
        return 1;
    }

    // Working directory.
    Path tmpDir = new Path(options.getDirectory());

    if (conf == null) {
        throw new Exception("Cannot load any configuration to run command");
    }

    final long t0 = System.nanoTime();

    final FileSystem fs = FileSystem.get(conf);
    final FileSystem lfs = FileSystem.getLocal(conf).getRawFileSystem();

    int threadCount = Math.max(1, Runtime.getRuntime().availableProcessors() / 2);
    final ExecutorService executor = Executors.newFixedThreadPool(threadCount,
            new ThreadFactoryBuilder().setNameFormat("llap-pkg-%d").build());
    final CompletionService<Void> asyncRunner = new ExecutorCompletionService<Void>(executor);

    int rc = 0;
    try {

        // needed so that the file is actually loaded into configuration.
        for (String f : NEEDED_CONFIGS) {
            conf.addResource(f);
            if (conf.getResource(f) == null) {
                throw new Exception("Unable to find required config file: " + f);
            }
        }
        for (String f : OPTIONAL_CONFIGS) {
            conf.addResource(f);
        }

        conf.reloadConfiguration();

        populateConfWithLlapProperties(conf, options.getConfig());

        if (options.getName() != null) {
            // update service registry configs - caveat: this has nothing to do with the actual settings
            // as read by the AM
            // if needed, use --hiveconf llap.daemon.service.hosts=@llap0 to dynamically switch between
            // instances
            conf.set(ConfVars.LLAP_DAEMON_SERVICE_HOSTS.varname, "@" + options.getName());
            propsDirectOptions.setProperty(ConfVars.LLAP_DAEMON_SERVICE_HOSTS.varname, "@" + options.getName());
        }

        if (options.getLogger() != null) {
            HiveConf.setVar(conf, ConfVars.LLAP_DAEMON_LOGGER, options.getLogger());
            propsDirectOptions.setProperty(ConfVars.LLAP_DAEMON_LOGGER.varname, options.getLogger());
        }
        boolean isDirect = HiveConf.getBoolVar(conf, HiveConf.ConfVars.LLAP_ALLOCATOR_DIRECT);

        if (options.getSize() != -1) {
            if (options.getCache() != -1) {
                if (HiveConf.getBoolVar(conf, HiveConf.ConfVars.LLAP_ALLOCATOR_MAPPED) == false) {
                    // direct heap allocations need to be safer
                    Preconditions.checkArgument(options.getCache() < options.getSize(),
                            "Cache size (" + LlapUtil.humanReadableByteCount(options.getCache())
                                    + ") has to be smaller" + " than the container sizing ("
                                    + LlapUtil.humanReadableByteCount(options.getSize()) + ")");
                } else if (options.getCache() < options.getSize()) {
                    LOG.warn("Note that this might need YARN physical memory monitoring to be turned off "
                            + "(yarn.nodemanager.pmem-check-enabled=false)");
                }
            }
            if (options.getXmx() != -1) {
                Preconditions.checkArgument(options.getXmx() < options.getSize(),
                        "Working memory (Xmx=" + LlapUtil.humanReadableByteCount(options.getXmx())
                                + ") has to be" + " smaller than the container sizing ("
                                + LlapUtil.humanReadableByteCount(options.getSize()) + ")");
            }
            if (isDirect && !HiveConf.getBoolVar(conf, HiveConf.ConfVars.LLAP_ALLOCATOR_MAPPED)) {
                // direct and not memory mapped
                Preconditions.checkArgument(options.getXmx() + options.getCache() <= options.getSize(),
                        "Working memory (Xmx=" + LlapUtil.humanReadableByteCount(options.getXmx())
                                + ") + cache size (" + LlapUtil.humanReadableByteCount(options.getCache())
                                + ") has to be smaller than the container sizing ("
                                + LlapUtil.humanReadableByteCount(options.getSize()) + ")");
            }
        }

        if (options.getExecutors() != -1) {
            conf.setLong(ConfVars.LLAP_DAEMON_NUM_EXECUTORS.varname, options.getExecutors());
            propsDirectOptions.setProperty(ConfVars.LLAP_DAEMON_NUM_EXECUTORS.varname,
                    String.valueOf(options.getExecutors()));
            // TODO: vcpu settings - possibly when DRFA works right
        }

        if (options.getIoThreads() != -1) {
            conf.setLong(ConfVars.LLAP_IO_THREADPOOL_SIZE.varname, options.getIoThreads());
            propsDirectOptions.setProperty(ConfVars.LLAP_IO_THREADPOOL_SIZE.varname,
                    String.valueOf(options.getIoThreads()));
        }

        long cache = -1, xmx = -1;
        if (options.getCache() != -1) {
            cache = options.getCache();
            conf.set(HiveConf.ConfVars.LLAP_IO_MEMORY_MAX_SIZE.varname, Long.toString(cache));
            propsDirectOptions.setProperty(HiveConf.ConfVars.LLAP_IO_MEMORY_MAX_SIZE.varname,
                    Long.toString(cache));
        }

        if (options.getXmx() != -1) {
            // Needs more explanation here
            // Xmx is not the max heap value in JDK8. You need to subtract 50% of the survivor fraction
            // from this, to get actual usable memory before it goes into GC
            xmx = options.getXmx();
            long xmxMb = (xmx / (1024L * 1024L));
            conf.setLong(ConfVars.LLAP_DAEMON_MEMORY_PER_INSTANCE_MB.varname, xmxMb);
            propsDirectOptions.setProperty(ConfVars.LLAP_DAEMON_MEMORY_PER_INSTANCE_MB.varname,
                    String.valueOf(xmxMb));
        }

        long size = options.getSize();
        if (size == -1) {
            long heapSize = xmx;
            if (!isDirect) {
                heapSize += cache;
            }
            size = Math.min((long) (heapSize * 1.2), heapSize + 1024L * 1024 * 1024);
            if (isDirect) {
                size += cache;
            }
        }
        long containerSize = size / (1024 * 1024);
        final long minAlloc = conf.getInt(YarnConfiguration.RM_SCHEDULER_MINIMUM_ALLOCATION_MB, -1);
        Preconditions.checkArgument(containerSize >= minAlloc,
                "Container size (" + LlapUtil.humanReadableByteCount(options.getSize()) + ") should be greater"
                        + " than minimum allocation("
                        + LlapUtil.humanReadableByteCount(minAlloc * 1024L * 1024L) + ")");
        conf.setLong(ConfVars.LLAP_DAEMON_YARN_CONTAINER_MB.varname, containerSize);
        propsDirectOptions.setProperty(ConfVars.LLAP_DAEMON_YARN_CONTAINER_MB.varname,
                String.valueOf(containerSize));

        LOG.info("Memory settings: container memory: {} executor memory: {} cache memory: {}",
                LlapUtil.humanReadableByteCount(options.getSize()),
                LlapUtil.humanReadableByteCount(options.getXmx()),
                LlapUtil.humanReadableByteCount(options.getCache()));

        if (options.getLlapQueueName() != null && !options.getLlapQueueName().isEmpty()) {
            conf.set(ConfVars.LLAP_DAEMON_QUEUE_NAME.varname, options.getLlapQueueName());
            propsDirectOptions.setProperty(ConfVars.LLAP_DAEMON_QUEUE_NAME.varname, options.getLlapQueueName());
        }

        final URL logger = conf.getResource(LlapConstants.LOG4j2_PROPERTIES_FILE);

        if (null == logger) {
            throw new Exception("Unable to find required config file: llap-daemon-log4j2.properties");
        }

        Path home = new Path(System.getenv("HIVE_HOME"));
        Path scriptParent = new Path(new Path(home, "scripts"), "llap");
        Path scripts = new Path(scriptParent, "bin");

        if (!lfs.exists(home)) {
            throw new Exception("Unable to find HIVE_HOME:" + home);
        } else if (!lfs.exists(scripts)) {
            LOG.warn("Unable to find llap scripts:" + scripts);
        }

        final Path libDir = new Path(tmpDir, "lib");
        final Path tezDir = new Path(libDir, "tez");
        final Path udfDir = new Path(libDir, "udfs");
        final Path confPath = new Path(tmpDir, "conf");
        if (!lfs.mkdirs(confPath)) {
            LOG.warn("mkdirs for " + confPath + " returned false");
        }
        if (!lfs.mkdirs(tezDir)) {
            LOG.warn("mkdirs for " + tezDir + " returned false");
        }
        if (!lfs.mkdirs(udfDir)) {
            LOG.warn("mkdirs for " + udfDir + " returned false");
        }

        NamedCallable<Void> downloadTez = new NamedCallable<Void>("downloadTez") {
            @Override
            public Void call() throws Exception {
                synchronized (fs) {
                    String tezLibs = conf.get(TezConfiguration.TEZ_LIB_URIS);
                    if (tezLibs == null) {
                        LOG.warn("Missing tez.lib.uris in tez-site.xml");
                    }
                    if (LOG.isDebugEnabled()) {
                        LOG.debug("Copying tez libs from " + tezLibs);
                    }
                    lfs.mkdirs(tezDir);
                    fs.copyToLocalFile(new Path(tezLibs), new Path(libDir, "tez.tar.gz"));
                    CompressionUtils.unTar(new Path(libDir, "tez.tar.gz").toString(), tezDir.toString(), true);
                    lfs.delete(new Path(libDir, "tez.tar.gz"), false);
                }
                return null;
            }
        };

        NamedCallable<Void> copyLocalJars = new NamedCallable<Void>("copyLocalJars") {
            @Override
            public Void call() throws Exception {
                Class<?>[] dependencies = new Class<?>[] { LlapDaemonProtocolProtos.class, // llap-common
                        LlapTezUtils.class, // llap-tez
                        LlapInputFormat.class, // llap-server
                        HiveInputFormat.class, // hive-exec
                        SslContextFactory.class, // hive-common (https deps)
                        Rule.class, // Jetty rewrite class
                        RegistryUtils.ServiceRecordMarshal.class, // ZK registry
                        // log4j2
                        com.lmax.disruptor.RingBuffer.class, // disruptor
                        org.apache.logging.log4j.Logger.class, // log4j-api
                        org.apache.logging.log4j.core.Appender.class, // log4j-core
                        org.apache.logging.slf4j.Log4jLogger.class, // log4j-slf4j
                        // log4j-1.2-API needed for NDC
                        org.apache.log4j.NDC.class, };

                for (Class<?> c : dependencies) {
                    Path jarPath = new Path(Utilities.jarFinderGetJar(c));
                    lfs.copyFromLocalFile(jarPath, libDir);
                    if (LOG.isDebugEnabled()) {
                        LOG.debug("Copying " + jarPath + " to " + libDir);
                    }
                }
                return null;
            }
        };

        // copy default aux classes (json/hbase)

        NamedCallable<Void> copyAuxJars = new NamedCallable<Void>("copyAuxJars") {
            @Override
            public Void call() throws Exception {
                for (String className : DEFAULT_AUX_CLASSES) {
                    localizeJarForClass(lfs, libDir, className, false);
                }
                Collection<String> codecs = conf.getStringCollection("io.compression.codecs");
                if (codecs != null) {
                    for (String codecClassName : codecs) {
                        localizeJarForClass(lfs, libDir, codecClassName, false);
                    }
                }

                if (options.getIsHBase()) {
                    try {
                        localizeJarForClass(lfs, libDir, HBASE_SERDE_CLASS, true);
                        Job fakeJob = new Job(new JobConf()); // HBase API is convoluted.
                        TableMapReduceUtil.addDependencyJars(fakeJob);
                        Collection<String> hbaseJars = fakeJob.getConfiguration()
                                .getStringCollection("tmpjars");
                        for (String jarPath : hbaseJars) {
                            if (!jarPath.isEmpty()) {
                                lfs.copyFromLocalFile(new Path(jarPath), libDir);
                            }
                        }
                    } catch (Throwable t) {
                        String err = "Failed to add HBase jars. Use --auxhbase=false to avoid localizing them";
                        LOG.error(err);
                        System.err.println(err);
                        throw new RuntimeException(t);
                    }
                }

                HashSet<String> auxJars = new HashSet<>();
                // There are many ways to have AUX jars in Hive... sigh
                if (options.getIsHiveAux()) {
                    // Note: we don't add ADDED jars, RELOADABLE jars, etc. That is by design; there are too many ways
                    // to add jars in Hive, some of which are session/etc. specific. Env + conf + arg should be enough.
                    addAuxJarsToSet(auxJars, conf.getAuxJars());
                    addAuxJarsToSet(auxJars, System.getenv("HIVE_AUX_JARS_PATH"));
                    LOG.info("Adding the following aux jars from the environment and configs: " + auxJars);
                }

                addAuxJarsToSet(auxJars, options.getAuxJars());
                for (String jarPath : auxJars) {
                    lfs.copyFromLocalFile(new Path(jarPath), libDir);
                }
                return null;
            }

            private void addAuxJarsToSet(HashSet<String> auxJarSet, String auxJars) {
                if (auxJars != null && !auxJars.isEmpty()) {
                    // TODO: transitive dependencies warning?
                    String[] jarPaths = auxJars.split(",");
                    for (String jarPath : jarPaths) {
                        if (!jarPath.isEmpty()) {
                            auxJarSet.add(jarPath);
                        }
                    }
                }
            }
        };

        NamedCallable<Void> copyUdfJars = new NamedCallable<Void>("copyUdfJars") {
            @Override
            public Void call() throws Exception {
                // UDFs
                final Set<String> allowedUdfs;

                if (HiveConf.getBoolVar(conf, HiveConf.ConfVars.LLAP_ALLOW_PERMANENT_FNS)) {
                    synchronized (fs) {
                        allowedUdfs = downloadPermanentFunctions(conf, udfDir);
                    }
                } else {
                    allowedUdfs = Collections.emptySet();
                }

                PrintWriter udfStream = new PrintWriter(lfs
                        .create(new Path(confPath, StaticPermanentFunctionChecker.PERMANENT_FUNCTIONS_LIST)));
                for (String udfClass : allowedUdfs) {
                    udfStream.println(udfClass);
                }

                udfStream.close();
                return null;
            }
        };

        String java_home;
        if (options.getJavaPath() == null || options.getJavaPath().isEmpty()) {
            java_home = System.getenv("JAVA_HOME");
            String jre_home = System.getProperty("java.home");
            if (java_home == null) {
                java_home = jre_home;
            } else if (!java_home.equals(jre_home)) {
                LOG.warn("Java versions might not match : JAVA_HOME=[{}],process jre=[{}]", java_home,
                        jre_home);
            }
        } else {
            java_home = options.getJavaPath();
        }
        if (java_home == null || java_home.isEmpty()) {
            throw new RuntimeException(
                    "Could not determine JAVA_HOME from command line parameters, environment or system properties");
        }
        LOG.info("Using [{}] for JAVA_HOME", java_home);

        NamedCallable<Void> copyConfigs = new NamedCallable<Void>("copyConfigs") {
            @Override
            public Void call() throws Exception {
                // Copy over the mandatory configs for the package.
                for (String f : NEEDED_CONFIGS) {
                    copyConfig(lfs, confPath, f);
                }
                for (String f : OPTIONAL_CONFIGS) {
                    try {
                        copyConfig(lfs, confPath, f);
                    } catch (Throwable t) {
                        LOG.info("Error getting an optional config " + f + "; ignoring: " + t.getMessage());
                    }
                }
                createLlapDaemonConfig(lfs, confPath, conf, propsDirectOptions, options.getConfig());
                setUpLogAndMetricConfigs(lfs, logger, confPath);
                return null;
            }
        };

        @SuppressWarnings("unchecked")
        final NamedCallable<Void>[] asyncWork = new NamedCallable[] { downloadTez, copyUdfJars, copyLocalJars,
                copyAuxJars, copyConfigs };
        @SuppressWarnings("unchecked")
        final Future<Void>[] asyncResults = new Future[asyncWork.length];
        for (int i = 0; i < asyncWork.length; i++) {
            asyncResults[i] = asyncRunner.submit(asyncWork[i]);
        }

        // TODO: need to move from Python to Java for the rest of the script.
        JSONObject configs = createConfigJson(containerSize, cache, xmx, java_home);
        writeConfigJson(tmpDir, lfs, configs);

        if (LOG.isDebugEnabled()) {
            LOG.debug("Config generation took " + (System.nanoTime() - t0) + " ns");
        }
        for (int i = 0; i < asyncWork.length; i++) {
            final long t1 = System.nanoTime();
            asyncResults[i].get();
            final long t2 = System.nanoTime();
            if (LOG.isDebugEnabled()) {
                LOG.debug(asyncWork[i].getName() + " waited for " + (t2 - t1) + " ns");
            }
        }
        if (options.isStarting()) {
            String version = System.getenv("HIVE_VERSION");
            if (version == null || version.isEmpty()) {
                version = DateTime.now().toString("ddMMMyyyy");
            }

            String outputDir = options.getOutput();
            Path packageDir = null;
            if (outputDir == null) {
                outputDir = OUTPUT_DIR_PREFIX + version;
                packageDir = new Path(Paths.get(".").toAbsolutePath().toString(), OUTPUT_DIR_PREFIX + version);
            } else {
                packageDir = new Path(outputDir);
            }
            rc = runPackagePy(args, tmpDir, scriptParent, version, outputDir);
            if (rc == 0) {
                LlapSliderUtils.startCluster(conf, options.getName(), "llap-" + version + ".zip", packageDir,
                        HiveConf.getVar(conf, ConfVars.LLAP_DAEMON_QUEUE_NAME));
            }
        } else {
            rc = 0;
        }
    } finally {
        executor.shutdown();
        lfs.close();
        fs.close();
    }

    if (rc == 0) {
        if (LOG.isDebugEnabled()) {
            LOG.debug("Exiting successfully");
        }
    } else {
        LOG.info("Exiting with rc = " + rc);
    }
    return rc;
}

From source file:com.rinke.solutions.pinball.PinDmdEditor.java

void importProject(String filename) {
    log.info("importing project from {}", filename);
    Project projectToImport = (Project) fileHelper.loadObject(filename);
    // merge into existing Project
    HashSet<String> collisions = new HashSet<>();
    for (String key : projectToImport.frameSeqMap.keySet()) {
        if (project.frameSeqMap.containsKey(key)) {
            collisions.add(key);/*from   w w  w  . j a  va 2  s . c  om*/
        } else {
            project.frameSeqMap.put(key, projectToImport.frameSeqMap.get(key));
        }
    }
    if (!collisions.isEmpty()) {
        MessageBox messageBox = new MessageBox(shell, SWT.ICON_WARNING | SWT.OK | SWT.IGNORE | SWT.ABORT);

        messageBox.setText("Override warning");
        messageBox.setMessage("the following frame seq have NOT been \nimported due to name collisions: "
                + collisions + "\n");
        messageBox.open();
    }

    for (String inputFile : projectToImport.inputFiles) {
        aniAction.loadAni(buildRelFilename(filename, inputFile), true, true);
    }
    for (PalMapping palMapping : projectToImport.palMappings) {
        project.palMappings.add(palMapping);
    }
}