Example usage for java.util TreeSet size

List of usage examples for java.util TreeSet size

Introduction

In this page you can find the example usage for java.util TreeSet size.

Prototype

public int size() 

Source Link

Document

Returns the number of elements in this set (its cardinality).

Usage

From source file:org.commoncrawl.mapred.ec2.parser.EC2ParserTask.java

public EC2ParserTask(Configuration conf) throws Exception {

    super(conf);/*from  w  w  w. j a v  a2s  .  co m*/

    if (!conf.getBoolean(CONF_PARAM_TEST_MODE, false)) {
        conf.set(VALID_SEGMENTS_PATH_PROPERTY, VALID_SEGMENTS_PATH);
        conf.set(SEGMENT_PATH_PROPERTY, SEGMENTS_PATH);
        conf.set(JOB_LOGS_PATH_PROPERTY, JOB_LOGS_PATH);
        conf.set(CHECKPOIINTS_PATH_PROPERTY, CHECKPOINTS_PATH);

        jobThreadSemaphore = new Semaphore(-(MAX_SIMULTANEOUS_JOBS - 1));

    } else {
        conf.set(VALID_SEGMENTS_PATH_PROPERTY, TEST_VALID_SEGMENTS_PATH);
        conf.set(SEGMENT_PATH_PROPERTY, TEST_SEGMENTS_PATH);
        conf.set(JOB_LOGS_PATH_PROPERTY, TEST_JOB_LOGS_PATH);

        jobThreadSemaphore = new Semaphore(0);
        maxSimultaneousJobs = 1;
    }

    FileSystem fs = FileSystem.get(new URI("s3n://aws-publicdatasets"), conf);
    LOG.info(
            "FileSystem is:" + fs.getUri() + " Scanning for candidates at path:" + CRAWL_LOG_INTERMEDIATE_PATH);
    TreeSet<Path> candidateSet = buildCandidateList(fs, new Path(CRAWL_LOG_INTERMEDIATE_PATH));
    LOG.info("Scanning for completed segments");
    List<Path> processedLogs = scanForCompletedSegments(fs, conf);
    LOG.info("Found " + processedLogs.size() + " processed logs");
    // remove processed from candidate set ... 
    candidateSet.removeAll(processedLogs);
    // ok we are ready to go .. 
    LOG.info("There are: " + candidateSet.size() + " logs in need of parsing");
    while (candidateSet.size() != 0) {
        ImmutableList.Builder<Path> pathBuilder = new ImmutableList.Builder<Path>();
        Iterator<Path> iterator = Iterators.limit(candidateSet.iterator(), LOGS_PER_ITERATION);
        while (iterator.hasNext()) {
            pathBuilder.add(iterator.next());
            iterator.remove();
        }
        LOG.info("Queueing Parse");
        queue(fs, conf, pathBuilder.build());
        LOG.info("Queued Parse");

        // in test mode, queue only a single segment's worth of data 
        if (conf.getBoolean(CONF_PARAM_TEST_MODE, false)) {
            LOG.info("Test Mode - Queueing only a single Item");
            break;
        }
    }

    // queue shutdown items 
    for (int i = 0; i < maxSimultaneousJobs; ++i) {
        _queue.put(new QueueItem());
    }
}

From source file:org.apache.hadoop.dfs.NamenodeFsck.java

private DatanodeInfo bestNode(DFSClient dfs, DatanodeInfo[] nodes, TreeSet<DatanodeInfo> deadNodes)
        throws IOException {
    if ((nodes == null) || (nodes.length - deadNodes.size() < 1)) {
        throw new IOException("No live nodes contain current block");
    }//from   w  ww  .j a v a2 s . c o m
    DatanodeInfo chosenNode;
    do {
        chosenNode = nodes[r.nextInt(nodes.length)];
    } while (deadNodes.contains(chosenNode));
    return chosenNode;
}

From source file:net.opentsdb.tools.ConfigArgP.java

/**
 * Creates a new ConfigArgP// w w  w  . jav a  2  s.c  o m
 * @param args The command line arguments
 */
public ConfigArgP(String... args) {
    InputStream is = null;

    try {
        final Config loadConfig = new NoLoadConfig();
        is = ConfigArgP.class.getClassLoader().getResourceAsStream("opentsdb.conf.json");
        ObjectMapper jsonMapper = new ObjectMapper();
        JsonNode root = jsonMapper.reader().readTree(is);
        JsonNode configRoot = root.get("config-items");
        scriptEngine.eval("var config = " + configRoot.toString() + ";");
        processBindings(jsonMapper, root);
        final ConfigurationItem[] loadedItems = jsonMapper.reader(ConfigurationItem[].class)
                .readValue(configRoot);
        final TreeSet<ConfigurationItem> items = new TreeSet<ConfigurationItem>(Arrays.asList(loadedItems));
        LOG.info("Loaded [{}] Configuration Items from opentsdb.conf.json", items.size());
        //         if(LOG.isDebugEnabled()) {
        StringBuilder b = new StringBuilder("Configs:");
        for (ConfigurationItem ci : items) {
            b.append("\n\t").append(ci.toString());
        }
        b.append("\n");
        LOG.info(b.toString());
        //         }
        for (ConfigurationItem ci : items) {
            LOG.debug("Processing CI [{}]", ci.getKey());
            if (ci.meta != null) {
                argp.addOption(ci.clOption, ci.meta, ci.description);
                if ("default".equals(ci.help))
                    dargp.addOption(ci.clOption, ci.meta, ci.description);
            } else {
                argp.addOption(ci.clOption, ci.description);
                if ("default".equals(ci.help))
                    dargp.addOption(ci.clOption, ci.description);
            }
            if (!configItemsByKey.add(ci)) {
                throw new RuntimeException("Duplicate configuration key [" + ci.key
                        + "] in opentsdb.conf.json. Programmer Error.");
            }
            if (!configItemsByCl.add(ci)) {
                throw new RuntimeException("Duplicate configuration command line option [" + ci.clOption
                        + "] in opentsdb.conf.json. Programmer Error.");
            }
            if (ci.getDefaultValue() != null) {
                ci.setValue(processConfigValue(ci.getDefaultValue()));
                loadConfig.overrideConfig(ci.key, processConfigValue(ci.getValue()));
            }
        }
        //loadConfig.loadStaticVariables();
        // find --config and --include-config in argp and load into config 
        //      validate
        //argp.parse(args);
        this.config = new Config(loadConfig);
        nonOptionArgs = applyArgs(args);
    } catch (Exception ex) {
        if (ex instanceof IllegalArgumentException) {
            throw (IllegalArgumentException) ex;
        }
        throw new RuntimeException("Failed to read opentsdb.conf.json", ex);
    } finally {
        if (is != null)
            try {
                is.close();
            } catch (Exception x) {
                /* No Op */ }
    }
}

From source file:org.apache.hadoop.fs.DistributedFSCheck.java

private void analyzeResult(long execTime, String resFileName, boolean viewStats) throws IOException {
    Path reduceFile = new Path(READ_DIR, "part-00000");
    DataInputStream in;// w  w w . j  a  v a  2  s . co m
    in = new DataInputStream(fs.open(reduceFile));

    BufferedReader lines;
    lines = new BufferedReader(new InputStreamReader(in));
    long blocks = 0;
    long size = 0;
    long time = 0;
    float rate = 0;
    StringTokenizer badBlocks = null;
    long nrBadBlocks = 0;
    String line;
    while ((line = lines.readLine()) != null) {
        StringTokenizer tokens = new StringTokenizer(line, " \t\n\r\f%");
        String attr = tokens.nextToken();
        if (attr.endsWith("blocks"))
            blocks = Long.parseLong(tokens.nextToken());
        else if (attr.endsWith("size"))
            size = Long.parseLong(tokens.nextToken());
        else if (attr.endsWith("time"))
            time = Long.parseLong(tokens.nextToken());
        else if (attr.endsWith("rate"))
            rate = Float.parseFloat(tokens.nextToken());
        else if (attr.endsWith("badBlocks")) {
            badBlocks = new StringTokenizer(tokens.nextToken(), ";");
            nrBadBlocks = badBlocks.countTokens();
        }
    }

    Vector<String> resultLines = new Vector<String>();
    resultLines.add("----- DistributedFSCheck ----- : ");
    resultLines.add("               Date & time: " + new Date(System.currentTimeMillis()));
    resultLines.add("    Total number of blocks: " + blocks);
    resultLines.add("    Total number of  files: " + nrFiles);
    resultLines.add("Number of corrupted blocks: " + nrBadBlocks);

    int nrBadFilesPos = resultLines.size();
    TreeSet<String> badFiles = new TreeSet<String>();
    long nrBadFiles = 0;
    if (nrBadBlocks > 0) {
        resultLines.add("");
        resultLines.add("----- Corrupted Blocks (file@offset) ----- : ");
        while (badBlocks.hasMoreTokens()) {
            String curBlock = badBlocks.nextToken();
            resultLines.add(curBlock);
            badFiles.add(curBlock.substring(0, curBlock.indexOf('@')));
        }
        nrBadFiles = badFiles.size();
    }

    resultLines.insertElementAt(" Number of corrupted files: " + nrBadFiles, nrBadFilesPos);

    if (viewStats) {
        resultLines.add("");
        resultLines.add("-----   Performance  ----- : ");
        resultLines.add("         Total MBytes read: " + size / MEGA);
        resultLines.add("         Throughput mb/sec: " + (float) size * 1000.0 / (time * MEGA));
        resultLines.add("    Average IO rate mb/sec: " + rate / 1000 / blocks);
        resultLines.add("        Test exec time sec: " + (float) execTime / 1000);
    }

    PrintStream res = new PrintStream(new FileOutputStream(new File(resFileName), true));
    for (int i = 0; i < resultLines.size(); i++) {
        String cur = resultLines.get(i);
        LOG.info(cur);
        res.println(cur);
    }
}

From source file:org.atricore.idbus.kernel.main.databinding.JAXBUtils.java

/**
 * Get a JAXBContext for the class//from w  w w .  j  a v  a  2 s . c o m
 *
 * Note: The contextPackage object is used by multiple threads.  It should be considered immutable
 * and not altered by this method.
 *
 * @param contextPackage  Set<Package>
 * @param contructionType (output value that indicates how the context was constructed)
 * @param forceArrays (forces the returned JAXBContext to include the array types)
 * @param cacheKey ClassLoader
 * @return JAXBContext
 * @throws javax.xml.bind.JAXBException
 */
public static JAXBContext getJAXBContext(TreeSet<String> contextPackages,
        Holder<CONSTRUCTION_TYPE> constructionType, boolean forceArrays, String key, ClassLoader cacheKey,
        Map<String, ?> properties) throws JAXBException {
    // JAXBContexts for the same class can be reused and are supposed to be thread-safe
    if (log.isDebugEnabled()) {
        log.debug("Following packages are in this batch of getJAXBContext() :");
        for (String pkg : contextPackages) {
            log.debug(pkg);
        }
    }
    if (JAXBUtilsMonitor.isMonitoring()) {
        JAXBUtilsMonitor.addPackageKey(contextPackages.toString());
    }

    // Get or Create The InnerMap using the package key
    ConcurrentHashMap<ClassLoader, JAXBContextValue> innerMap = null;
    SoftReference<ConcurrentHashMap<ClassLoader, JAXBContextValue>> softRef = jaxbMap.get(key);

    if (softRef != null) {
        innerMap = softRef.get();
    }

    if (innerMap == null) {
        synchronized (jaxbMap) {
            softRef = jaxbMap.get(key);
            if (softRef != null) {
                innerMap = softRef.get();
            }
            if (innerMap == null) {
                innerMap = new ConcurrentHashMap<ClassLoader, JAXBContextValue>();
                softRef = new SoftReference<ConcurrentHashMap<ClassLoader, JAXBContextValue>>(innerMap);
                jaxbMap.put(key, softRef);
            }
        }
    }

    // Now get the contextValue using either the classloader key or
    // the current Classloader
    ClassLoader cl = getContextClassLoader();
    JAXBContextValue contextValue = null;
    if (cacheKey != null) {
        if (log.isDebugEnabled()) {
            log.debug("Using supplied classloader to retrieve JAXBContext: " + cacheKey);
        }
        contextValue = innerMap.get(cacheKey);
    } else {
        if (log.isDebugEnabled()) {
            log.debug("Using classloader from Thread to retrieve JAXBContext: " + cl);
        }
        contextValue = innerMap.get(cl);
    }

    // If the context value is found, but the caller requested that the JAXBContext
    // contain arrays, then rebuild the JAXBContext value
    if (forceArrays && contextValue != null
            && contextValue.constructionType != JAXBUtils.CONSTRUCTION_TYPE.BY_CLASS_ARRAY_PLUS_ARRAYS) {
        if (log.isDebugEnabled()) {
            log.debug("Found a JAXBContextValue with constructionType=" + contextValue.constructionType
                    + "  but the caller requested a JAXBContext "
                    + " that includes arrays.  A new JAXBContext will be built");
        }
        contextValue = null;
    }

    if (contextPackages == null) {
        contextPackages = new TreeSet<String>();
    }
    if (contextValue == null) {
        synchronized (innerMap) {
            // Try to get the contextValue once more since sync was temporarily exited.
            ClassLoader clKey = (cacheKey != null) ? cacheKey : cl;
            contextValue = innerMap.get(clKey);
            adjustPoolSize(innerMap);
            if (forceArrays && contextValue != null
                    && contextValue.constructionType != JAXBUtils.CONSTRUCTION_TYPE.BY_CLASS_ARRAY_PLUS_ARRAYS) {
                contextValue = null;
            }
            if (contextValue == null) {
                // Create a copy of the contextPackages.  This new TreeSet will
                // contain only the valid contextPackages.
                // Note: The original contextPackage set is accessed by multiple
                // threads and should not be altered.

                TreeSet<String> validContextPackages = new TreeSet<String>(contextPackages);

                List<String> classRefs = pruneDirectives(validContextPackages);

                int numPackages = validContextPackages.size();

                contextValue = createJAXBContextValue(validContextPackages, clKey, forceArrays, properties,
                        classRefs);

                synchronized (jaxbMap) {
                    // Add the context value with the original package set
                    ConcurrentHashMap<ClassLoader, JAXBContextValue> map1 = null;
                    SoftReference<ConcurrentHashMap<ClassLoader, JAXBContextValue>> softRef1 = jaxbMap.get(key);
                    if (softRef1 != null) {
                        map1 = softRef1.get();
                    }
                    if (map1 == null) {
                        map1 = new ConcurrentHashMap<ClassLoader, JAXBContextValue>();
                        softRef1 = new SoftReference<ConcurrentHashMap<ClassLoader, JAXBContextValue>>(map1);
                        jaxbMap.put(key, softRef1);
                    }
                    map1.put(clKey, contextValue);

                    String validPackagesKey = validContextPackages.toString();

                    // Add the context value with the new package set
                    ConcurrentHashMap<ClassLoader, JAXBContextValue> map2 = null;
                    SoftReference<ConcurrentHashMap<ClassLoader, JAXBContextValue>> softRef2 = jaxbMap
                            .get(validPackagesKey);
                    if (softRef2 != null) {
                        map2 = softRef2.get();
                    }
                    if (map2 == null) {
                        map2 = new ConcurrentHashMap<ClassLoader, JAXBContextValue>();
                        softRef2 = new SoftReference<ConcurrentHashMap<ClassLoader, JAXBContextValue>>(map2);
                        jaxbMap.put(validPackagesKey, softRef2);
                    }
                    map2.put(clKey, contextValue);

                    if (log.isDebugEnabled()) {
                        log.debug("JAXBContext [created] for " + key);
                        log.debug("JAXBContext also stored by the list of valid packages:" + validPackagesKey);
                    }
                }
            }
        }
    } else {
        if (log.isDebugEnabled()) {
            log.debug("JAXBContext [from pool] for " + key);
        }
    }
    if (log.isDebugEnabled()) {
        log.debug("JAXBContext constructionType= " + contextValue.constructionType);
        log.debug("JAXBContextValue = " + JavaUtils.getObjectIdentity(contextValue));
        log.debug("JAXBContext = " + JavaUtils.getObjectIdentity(contextValue.jaxbContext));
    }
    constructionType.value = contextValue.constructionType;
    return contextValue.jaxbContext;
}

From source file:gov.nih.nci.firebird.data.CredentialTest.java

@Test
public void testBoardCertifiedSpecialty() throws CredentialAlreadyExistsException {
    CertifiedSpecialtyBoard board = new CertifiedSpecialtyBoard("American Board of Anesthesiology");
    CertifiedSpecialtyBoard board2 = new CertifiedSpecialtyBoard("American Board of Allergy and Immunology");
    CertifiedSpecialtyType type1 = new CertifiedSpecialtyType("Anesthesiology", board);
    CertifiedSpecialtyType type2 = new CertifiedSpecialtyType("Critical Care Medicine", board,
            SpecialtyDesignation.SUBSPECIALTY);
    CertifiedSpecialtyType type3 = new CertifiedSpecialtyType("Allergy and Immunology", board);
    type1.setName(type1.getName()); // Coverage
    assertNotNull(type1.toString()); // Coverage
    type1.setId(null); // Coverage
    assertFalse(type1.equals(board2));// w w w  .  j av a 2  s.c  om

    assertEquals(type1.getName() + " (" + type1.getDesignation().getDisplay() + ")", type1.getDisplay());

    board.getTypes().add(type1);
    save(board, type1, type2, type3);

    getCurrentSession().save(profile);
    Calendar effectiveDate2 = Calendar.getInstance();
    effectiveDate2.setTime(new Date());
    Calendar expirationDate2 = Calendar.getInstance();
    expirationDate2.setTime(new Date());
    expirationDate2.add(Calendar.YEAR, 1);
    BoardCertifiedSpecialty specialty1 = new BoardCertifiedSpecialty(profile, effectiveDate2.getTime(),
            expirationDate2.getTime(), CertificationStatus.CERTIFIED, type1);
    BoardCertifiedSpecialty specialty2 = new BoardCertifiedSpecialty(profile, effectiveDate, expirationDate,
            CertificationStatus.ELIGIBLE, type2);
    BoardCertifiedSpecialty specialty3 = new BoardCertifiedSpecialty(profile, effectiveDate, expirationDate,
            CertificationStatus.ELIGIBLE, type3);
    assertEquals(CredentialType.SPECIALTY, specialty1.getType());
    assertEquals(CredentialType.SPECIALTY.name(), getDiscriminator(BoardCertifiedSpecialty.class));
    assertFalse(specialty1.equals(null));

    profile.addCredential(specialty1);
    profile.addCredential(specialty2);
    profile.addCredential(specialty3);
    getCurrentSession().save(profile);

    flushAndClearSession();

    assertNotNull(specialty1.getId());
    specialty1 = loadObject(BoardCertifiedSpecialty.class, specialty1.getId());
    specialty2 = loadObject(BoardCertifiedSpecialty.class, specialty2.getId());
    specialty3 = loadObject(BoardCertifiedSpecialty.class, specialty3.getId());

    @SuppressWarnings("unchecked")
    TreeSet<BoardCertifiedSpecialty> specialtyList = new TreeSet<BoardCertifiedSpecialty>(
            getCurrentSession().createCriteria(BoardCertifiedSpecialty.class).list());
    assertEquals(3, specialtyList.size());

    for (BoardCertifiedSpecialty specialty : specialtyList) {
        if (specialty.getSpecialtyType().equals(specialty1.getSpecialtyType())) {
            assertSame(specialty1, specialty);
        } else if (specialty.getSpecialtyType().equals(specialty2.getSpecialtyType())) {
            assertSame(specialty2, specialty);
        } else {
            assertSame(specialty3, specialty);
        }

    }

    profile = (InvestigatorProfile) getCurrentSession().get(InvestigatorProfile.class, profile.getId());
    assertEquals(3, profile.getCredentials().size());

    for (BoardCertifiedSpecialty specialty : profile.getCredentials(BoardCertifiedSpecialty.class)) {
        assertSame(profile, specialty.getProfile());
        if (specialty.getSpecialtyType().equals(specialty1.getSpecialtyType())) {
            assertDateEquals(effectiveDate2, specialty.getEffectiveDate());
            assertDateEquals(expirationDate2, specialty.getExpirationDate());
            assertEquals(CertificationStatus.CERTIFIED, specialty.getStatus());
            assertEquals(type1.getId(), specialty.getSpecialtyType().getId());
        } else if (specialty.getSpecialtyType().equals(specialty2.getSpecialtyType())) {
            assertDateEquals(effectiveDateCalendar, specialty.getEffectiveDate());
            assertDateEquals(expirationDateCalendar, specialty.getExpirationDate());
            assertEquals(CertificationStatus.ELIGIBLE, specialty.getStatus());
            assertEquals(type2.getId(), specialty.getSpecialtyType().getId());
        } else {
            assertDateEquals(effectiveDateCalendar, specialty.getEffectiveDate());
            assertDateEquals(expirationDateCalendar, specialty.getExpirationDate());
            assertEquals(CertificationStatus.ELIGIBLE, specialty.getStatus());
            assertEquals(type3.getId(), specialty.getSpecialtyType().getId());
        }

    }
}

From source file:org.apache.axis2.jaxws.message.databinding.JAXBUtils.java

/**
 * Get a JAXBContext for the class/*from  w w w  . j a  v  a 2  s . c o m*/
 *
 * Note: The contextPackage object is used by multiple threads.  It should be considered immutable
 * and not altered by this method.
 * 
 * @param contextPackage  Set<Package> 
 * @param contructionType (output value that indicates how the context was constructed)
 * @param forceArrays (forces the returned JAXBContext to include the array types)
 * @param cacheKey ClassLoader
 * @return JAXBContext
 * @throws JAXBException
 */
public static JAXBContext getJAXBContext(TreeSet<String> contextPackages,
        Holder<CONSTRUCTION_TYPE> constructionType, boolean forceArrays, String key, ClassLoader cacheKey,
        Map<String, ?> properties) throws JAXBException {
    // JAXBContexts for the same class can be reused and are supposed to be thread-safe
    if (log.isDebugEnabled()) {
        log.debug("Following packages are in this batch of getJAXBContext() :");
        for (String pkg : contextPackages) {
            log.debug(pkg);
        }
    }
    if (JAXBUtilsMonitor.isMonitoring()) {
        JAXBUtilsMonitor.addPackageKey(contextPackages.toString());
    }

    // Get or Create The InnerMap using the package key
    ConcurrentHashMap<ClassLoader, JAXBContextValue> innerMap = null;
    SoftReference<ConcurrentHashMap<ClassLoader, JAXBContextValue>> softRef = jaxbMap.get(key);

    if (softRef != null) {
        innerMap = softRef.get();
    }

    if (innerMap == null) {
        synchronized (jaxbMap) {
            softRef = jaxbMap.get(key);
            if (softRef != null) {
                innerMap = softRef.get();
            }
            if (innerMap == null) {
                innerMap = new ConcurrentHashMap<ClassLoader, JAXBContextValue>();
                softRef = new SoftReference<ConcurrentHashMap<ClassLoader, JAXBContextValue>>(innerMap);
                jaxbMap.put(key, softRef);
            }
        }
    }

    // Now get the contextValue using either the classloader key or 
    // the current Classloader
    ClassLoader cl = getContextClassLoader();
    JAXBContextValue contextValue = null;
    if (cacheKey != null) {
        if (log.isDebugEnabled()) {
            log.debug("Using supplied classloader to retrieve JAXBContext: " + cacheKey);
        }
        contextValue = innerMap.get(cacheKey);
    } else {
        if (log.isDebugEnabled()) {
            log.debug("Using classloader from Thread to retrieve JAXBContext: " + cl);
        }
        contextValue = innerMap.get(cl);
    }

    // If the context value is found, but the caller requested that the JAXBContext
    // contain arrays, then rebuild the JAXBContext value
    if (forceArrays && contextValue != null
            && contextValue.constructionType != JAXBUtils.CONSTRUCTION_TYPE.BY_CLASS_ARRAY_PLUS_ARRAYS) {
        if (log.isDebugEnabled()) {
            log.debug("Found a JAXBContextValue with constructionType=" + contextValue.constructionType
                    + "  but the caller requested a JAXBContext "
                    + " that includes arrays.  A new JAXBContext will be built");
        }
        contextValue = null;
    }

    if (contextPackages == null) {
        contextPackages = new TreeSet<String>();
    }
    if (contextValue == null) {
        synchronized (innerMap) {
            // Try to get the contextValue once more since sync was temporarily exited.
            ClassLoader clKey = (cacheKey != null) ? cacheKey : cl;
            contextValue = innerMap.get(clKey);
            adjustPoolSize(innerMap);
            if (forceArrays && contextValue != null
                    && contextValue.constructionType != JAXBUtils.CONSTRUCTION_TYPE.BY_CLASS_ARRAY_PLUS_ARRAYS) {
                contextValue = null;
            }
            if (contextValue == null) {
                // Create a copy of the contextPackages.  This new TreeSet will
                // contain only the valid contextPackages.
                // Note: The original contextPackage set is accessed by multiple 
                // threads and should not be altered.

                TreeSet<String> validContextPackages = new TreeSet<String>(contextPackages);

                List<String> classRefs = pruneDirectives(validContextPackages);

                int numPackages = validContextPackages.size();

                contextValue = createJAXBContextValue(validContextPackages, clKey, forceArrays, properties,
                        classRefs);

                synchronized (jaxbMap) {
                    // Add the context value with the original package set
                    ConcurrentHashMap<ClassLoader, JAXBContextValue> map1 = null;
                    SoftReference<ConcurrentHashMap<ClassLoader, JAXBContextValue>> softRef1 = jaxbMap.get(key);
                    if (softRef1 != null) {
                        map1 = softRef1.get();
                    }
                    if (map1 == null) {
                        map1 = new ConcurrentHashMap<ClassLoader, JAXBContextValue>();
                        softRef1 = new SoftReference<ConcurrentHashMap<ClassLoader, JAXBContextValue>>(map1);
                        jaxbMap.put(key, softRef1);
                    }
                    map1.put(clKey, contextValue);

                    String validPackagesKey = validContextPackages.toString();

                    // Add the context value with the new package set
                    ConcurrentHashMap<ClassLoader, JAXBContextValue> map2 = null;
                    SoftReference<ConcurrentHashMap<ClassLoader, JAXBContextValue>> softRef2 = jaxbMap
                            .get(validPackagesKey);
                    if (softRef2 != null) {
                        map2 = softRef2.get();
                    }
                    if (map2 == null) {
                        map2 = new ConcurrentHashMap<ClassLoader, JAXBContextValue>();
                        softRef2 = new SoftReference<ConcurrentHashMap<ClassLoader, JAXBContextValue>>(map2);
                        jaxbMap.put(validPackagesKey, softRef2);
                    }
                    map2.put(clKey, contextValue);

                    if (log.isDebugEnabled()) {
                        log.debug("JAXBContext [created] for " + key);
                        log.debug("JAXBContext also stored by the list of valid packages:" + validPackagesKey);
                    }
                }
            }
        }
    } else {
        if (log.isDebugEnabled()) {
            log.debug("JAXBContext [from pool] for " + key);
        }
    }
    if (log.isDebugEnabled()) {
        log.debug("JAXBContext constructionType= " + contextValue.constructionType);
        log.debug("JAXBContextValue = " + JavaUtils.getObjectIdentity(contextValue));
        log.debug("JAXBContext = " + JavaUtils.getObjectIdentity(contextValue.jaxbContext));
    }
    constructionType.value = contextValue.constructionType;
    return contextValue.jaxbContext;
}

From source file:org.paxle.parser.impl.SubParserManager.java

public Collection<ISubParser> getSubParsers(String mimeType) {
    if (mimeType == null)
        return null;

    mimeType = mimeType.trim();/*  ww w . j a  v a 2 s  .  co m*/
    final TreeSet<ServiceReference> refs = this.subParserList.get(mimeType);
    if (refs == null)
        return null;

    final ArrayList<ISubParser> list = new ArrayList<ISubParser>(refs.size());
    for (final ServiceReference ref : refs) {
        if (isEnabled(mimeType, ref)) {
            final ISubParser parser = (ISubParser) context.getService(ref);
            if (parser != null)
                list.add(parser);
        }
    }

    return list;
}

From source file:com.offbynull.voip.kademlia.model.RouteTreeNode.java

public void dumpAllNodesUnderTreeNode(Id id, TreeSet<Activity> output, int max, boolean includeStale,
        Set<BitString> skipPrefixes) {
    Validate.notNull(id);//from www.  j a  va  2 s  . c om
    Validate.notNull(output); // technically shouldn't contain any null elements, but we don't care since we're just adding to this
    Validate.notNull(skipPrefixes);
    Validate.noNullElements(skipPrefixes);
    Validate.isTrue(max >= 0); // why would anyone want 0 here? let thru anwyways

    // No more room in bucket? just leave right away.
    if (output.size() >= max) {
        return;
    }

    // Sort branches at this treenode by how close the are to the ID we're searching for... Go through the sorted branches in
    // order...
    //
    //   If it's a bucket: dump it.
    //   If it's a branch: recurse in to the branch and repeat
    //
    ArrayList<RouteTreeBranch> sortedBranches = new ArrayList<>(branches);
    Collections.sort(sortedBranches, new PrefixClosenessComparator(id, prefix.getBitLength(), suffixLen));

    // What is the point of taking in an ID and sorting the branches in this tree node such that the we access the "closer" prefixes
    // first? We want to access the branches that are closer to the suffix of the ID first because ...
    //
    //
    // 1. Given the same prefix, we don't end up accessing the exact same set of nodes given. For example...
    //
    //      0/\1
    //      /  EMPTY
    //    0/\1
    //    /  FULL
    //  0/\1
    // ME  FULL
    //
    // Assume the routing tree above. We want to route to node 111, but bucket 1xx is empty. We then go down the other branch and
    // start grabbing nodes starting with prefix 0xx. We then use the suffix of 111 (x11) to determine which branches to traverse
    // down first for our 0xx nodes to return. We do this because we don't want to return the same set of nodes everytime someone
    // tries to access a 1xx node and we have an empty branch.
    //
    // For example...
    // if someone wanted 111 and 1xx was empty, path to search under 0xx would be 011, then 001, then 000.
    // if someone wanted 101 and 1xx was empty, path to search under 0xx would be 001, then 000, then 011.
    //
    // If we did something like a depth-first search, we'd always target 000 first, then 001, then 011. We don't want to do that
    // because we don't want to return the same set of nodes everytime. It would end up being an undue burden on those nodes.
    //
    //
    //
    // 2. Remember our notion of closeness: XOR and normal integer less-than to see which is closer. So for example, lets say we're
    // looking for ID 111110 and the prefix at this point in the tree is is 110xxx. Even though the prefix 110 doesn't match, we
    // still want to match as closely to the remaining suffix as possible, because when we XOR those extra 0's at the beginning of 
    // the suffix mean that we're closer.
    //
    // For example...
    //
    // This tree node has the prefix 110xxx and the ID we're searching for is 111110. There are 2 branches at this tree node:
    // 1100xx and 1101xx
    //
    //      110xxx
    //        /\
    //       /  \
    //      /    \
    //   0 /      \ 1
    //    /        \
    // 1100xx    1101xx
    //
    // We know that for ID 111110, the IDs under 1101xx WILL ALWAYS BE CLOSER than the IDs at 1100xx.
    //
    // XORing with the 1100xx bucket ... XOR(111110, 1100xx) = 0011xx
    // 
    // XORing with the 1101xx bucket ... XOR(111110, 1101xx) = 0010xx
    //
    //
    //     Remember how < works... go compare each single bit from the beginning until you come across a pair of bits that aren't
    //     equal (one is 0 and the other is 1). The ID with 0 at that position is less-than the other one.
    //
    //
    // The one on the bottom (1101xx) will ALWAYS CONTAIN CLOSER IDs...
    //
    // An example ID in top:    110011 ... XOR(111110, 110011) = 001101 = 13
    // An exmaple ID in bottom: 110100 ... XOR(111110, 110100) = 001010 = 9
    // 

    for (RouteTreeBranch sortedBranch : sortedBranches) {
        if (skipPrefixes.contains(sortedBranch.getPrefix())) {
            continue;
        }

        if (sortedBranch instanceof RouteTreeNodeBranch) {
            RouteTreeNode node = sortedBranch.getItem();
            node.dumpAllNodesUnderTreeNode(id, output, max, includeStale, emptySet()); // dont propogate skipPrefixes (not relevant)

            // Bucket's full after dumping nodes in that branch. No point in continued processing.
            if (output.size() >= max) {
                return;
            }
        } else if (sortedBranch instanceof RouteTreeBucketBranch) {
            KBucket bucket = sortedBranch.getItem();

            // don't bother with locked nodes for now, we're not supporting them
            output.addAll(bucket.dumpBucket(true, includeStale, false));

            // Bucket's full after that add. No point in continued processing.
            if (output.size() >= max) {
                // If we have more than max elements from that last add, start evicting farthest away nodes
                while (output.size() > max) {
                    output.pollLast();
                }
                return;
            }
        } else {
            throw new IllegalStateException(); // should never happen
        }
    }
}

From source file:com.clust4j.algo.MeanShiftTests.java

@Test
public void testAutoEstimation() {
    Array2DRowRealMatrix iris = data_;
    final double[][] X = iris.getData();

    // MS estimates bw at 1.2032034114912584
    final double bandwidth = 1.2032034114912584;
    assertTrue(MeanShift.autoEstimateBW(iris, 0.3, Distance.EUCLIDEAN, GlobalState.DEFAULT_RANDOM_STATE,
            false) == bandwidth);/*from   w w w.  j  ava  2s.c om*/

    // Asserting fit works without breaking things...
    RadiusNeighbors r = new RadiusNeighbors(iris, new RadiusNeighborsParameters(bandwidth)).fit();

    TreeSet<MeanShiftSeed> centers = new TreeSet<>();
    for (double[] seed : X)
        centers.add(MeanShift.singleSeed(seed, r, X, 300));

    assertTrue(centers.size() == 7);

    double[][] expected_dists = new double[][] {
            new double[] { 6.2114285714285691, 2.8928571428571428, 4.8528571428571423, 1.6728571428571426 },
            new double[] { 6.1927536231884037, 2.8768115942028984, 4.8188405797101437, 1.6463768115942023 },
            new double[] { 6.1521739130434767, 2.850724637681159, 4.7405797101449272, 1.6072463768115937 },
            new double[] { 6.1852941176470564, 2.8705882352941177, 4.8058823529411754, 1.6397058823529407 },
            new double[] { 6.1727272727272711, 2.874242424242424, 4.7757575757575745, 1.6287878787878785 },
            new double[] { 5.0163265306122451, 3.440816326530614, 1.46734693877551, 0.24285714285714283 },
            new double[] { 5.0020833333333341, 3.4208333333333356, 1.4666666666666668, 0.23958333333333334 } };

    int[] expected_centers = new int[] { 70, 69, 69, 68, 66, 49, 48 };

    int idx = 0;
    for (MeanShiftSeed seed : centers) {
        assertTrue(VecUtils.equalsWithTolerance(seed.dists, expected_dists[idx], 1e-1));
        assertTrue(seed.count == expected_centers[idx]);
        idx++;
    }
}