Example usage for java.util TreeSet contains

List of usage examples for java.util TreeSet contains

Introduction

In this page you can find the example usage for java.util TreeSet contains.

Prototype

public boolean contains(Object o) 

Source Link

Document

Returns true if this set contains the specified element.

Usage

From source file:gr.cti.android.experimentation.controller.api.HistoryController.java

private void fillMissingIntervals(TreeSet<Long> treeSet, String rollup, long toLong) {

    //TODO: add non existing intervals
    if (rollup.endsWith("d")) {
        DateTime firstDate = new DateTime(treeSet.iterator().next());

        while (firstDate.isBefore(toLong)) {
            firstDate = firstDate.plusDays(1);
            if (!treeSet.contains(firstDate.getMillis())) {
                treeSet.add(firstDate.getMillis());
            }/* ww  w .ja  v  a2 s.c  o m*/
        }
    } else if (rollup.endsWith("h")) {
        DateTime firstDate = new DateTime(treeSet.iterator().next());

        while (firstDate.isBefore(toLong)) {
            firstDate = firstDate.plusHours(1);
            if (!treeSet.contains(firstDate.getMillis())) {
                treeSet.add(firstDate.getMillis());
            }
        }
    } else if (rollup.endsWith("m")) {
        DateTime firstDate = new DateTime(treeSet.iterator().next());

        while (firstDate.isBefore(toLong)) {
            firstDate = firstDate.plusMinutes(1);
            if (!treeSet.contains(firstDate.getMillis())) {
                treeSet.add(firstDate.getMillis());
            }
        }
    }
}

From source file:com.qpark.maven.plugin.flowmapper.AbstractMappingOperationGenerator.java

protected String getMapperDefinitionSetter(final List<Entry<ComplexTypeChild, List<ComplexTypeChild>>> children,
        final Set<String> importedClasses) {
    StringBuffer sb = new StringBuffer(1024);
    ComplexContent cc = null;/*www  .ja v a  2 s . c  o  m*/
    TreeSet<String> usedInterfaces = new TreeSet<String>();
    for (Entry<ComplexTypeChild, List<ComplexTypeChild>> child : children) {
        for (ComplexTypeChild grandchild : child.getValue()) {
            cc = this.getMapperDefinition(grandchild.getComplexType());
            if (cc != null && !usedInterfaces.contains(cc.getFQInterfaceName())) {
                usedInterfaces.add(cc.getFQInterfaceName());
                String varName = Util.lowerize(cc.interfaceClassName);
                String className = cc.interfaceClassName;
                if (!importedClasses.contains(cc.getFQInterfaceName())) {
                    className = cc.getFQInterfaceName();
                }
                sb.append("\t/**\n\t * Set the {@link ");
                sb.append(className);
                sb.append("}.\n\t * @param ");
                sb.append(varName);
                sb.append(" the {@ link ");
                sb.append(className);
                sb.append("}.\n\t */\n ");
                sb.append("\tpublic void set");
                sb.append(cc.interfaceClassName);
                sb.append("(");
                sb.append(className);
                sb.append(" ");
                sb.append(varName);
                sb.append(") {\n\t\tthis.");
                sb.append(varName);
                sb.append(" = ");
                sb.append(varName);
                sb.append(";\n\t}\n\n");
            }

        }
    }
    return sb.toString();
}

From source file:org.kepler.sms.NamedOntModel.java

/**
 * gets a list of the named classes in the ontology
 * /*  w  ww.  ja  va 2 s .  c o  m*/
 * @param sorted
 *            Return sorted list if true
 * @return A sorted list of named ontology classes
 */
public Iterator<NamedOntClass> getNamedClasses() {

    //use TreeSet instead of Vector so contains() is fast. TreeSet also
    //gives ascending natural ordering sorting for free. fix for bug #4539
    //Vector<NamedOntClass> results = new Vector<NamedOntClass>();
    TreeSet<NamedOntClass> results = new TreeSet<NamedOntClass>();
    NamedOntClass noc = null;
    for (OWLClass c : _ontology.getReferencedClasses()) {
        noc = new NamedOntClass(c, _ontology);

        if (!results.contains(noc))
            results.add(noc);
    }

    return results.iterator();
}

From source file:ro.agrade.jira.qanda.dao.GenericDelegatorLoader.java

/**
* For the delegator instance, initialize helpers by group and do the data
* source check. The code is a part from GenericDelegator(String
* delegatorName) constructor.//www. j  a va2 s.  c o m
* @throws OfbizDataException if an error occurs
*/
private void initializeHelpersAndDatasourceCheck() throws OfbizDataException {
    GenericDelegator delegator = GenericDelegator.getGenericDelegator(delegatorName);
    if (delegator == null) {
        LOG.warn("Null delegator in initializeHelpersAndDatasourceCheck().");
        return;
    }
    // initialize helpers by group
    Iterator<?> groups = UtilMisc.toIterator(delegator.getModelGroupReader().getGroupNames());

    while (groups != null && groups.hasNext()) {
        String groupName = (String) groups.next();
        String helperName = delegator.getGroupHelperName(groupName);
        if (LOG.isDebugEnabled()) {
            LOG.debug(String.format("Delegator %s initializing helper %s " + "for entity group %s ",
                    delegator.getDelegatorName(), helperName, groupName));
        }
        TreeSet<String> helpersDone = new TreeSet<String>();

        if (helperName != null && helperName.length() > 0) {
            // make sure each helper is only loaded once
            if (helpersDone.contains(helperName)) {
                if (LOG.isDebugEnabled()) {
                    LOG.debug(String.format("Helper %s already initialized," + " not re-initializing.",
                            helperName));
                }
                continue;
            }
            helpersDone.add(helperName);
            // pre-load field type defs, the return value is ignored
            ModelFieldTypeReader.getModelFieldTypeReader(helperName);
            // get the helper and if configured, do the datasource check
            GenericHelper helper = GenericHelperFactory.getHelper(helperName);

            try {
                helper.checkDataSource(delegator.getModelEntityMapByGroup(groupName), null, true);
            } catch (GenericEntityException e) {
                LOG.warn(e);
            }
        }
    }
}

From source file:org.deegree.enterprise.WebUtils.java

private static void handleProxies(String protocol, HttpClient client, String host) {
    TreeSet<String> nops = new TreeSet<String>();

    String proxyHost = getProperty((protocol == null ? "" : protocol + ".") + "proxyHost");

    String proxyUser = getProperty((protocol == null ? "" : protocol + ".") + "proxyUser");
    String proxyPass = getProperty((protocol == null ? "" : protocol + ".") + "proxyPassword");

    if (proxyHost != null) {
        String nop = getProperty((protocol == null ? "" : protocol + ".") + "noProxyHosts");
        if (nop != null && !nop.equals("")) {
            nops.addAll(asList(nop.split("\\|")));
        }//  w w w  . j a v  a2 s  .c o  m
        nop = getProperty((protocol == null ? "" : protocol + ".") + "nonProxyHosts");
        if (nop != null && !nop.equals("")) {
            nops.addAll(asList(nop.split("\\|")));
        }

        int proxyPort = parseInt(getProperty((protocol == null ? "" : protocol + ".") + "proxyPort"));

        HostConfiguration hc = client.getHostConfiguration();

        if (LOG.isDebug()) {
            LOG.logDebug("Found the following no- and nonProxyHosts", nops);
        }

        if (proxyUser != null) {
            Credentials creds = new UsernamePasswordCredentials(proxyUser, proxyPass);
            client.getState().setProxyCredentials(AuthScope.ANY, creds);
            client.getParams().setAuthenticationPreemptive(true);
        }

        if (!nops.contains(host)) {
            if (LOG.isDebug()) {
                LOG.logDebug("Using proxy " + proxyHost + ":" + proxyPort);
                if (protocol == null) {
                    LOG.logDebug("This overrides the protocol specific settings, if there were any.");
                }
            }
            hc.setProxy(proxyHost, proxyPort);
            client.setHostConfiguration(hc);
        } else {
            if (LOG.isDebug()) {
                LOG.logDebug("Proxy was set, but " + host + " was contained in the no-/nonProxyList!");
                if (protocol == null) {
                    LOG.logDebug("If a protocol specific proxy has been set, it will be used anyway!");
                }
            }
        }
    }

    if (protocol != null) {
        handleProxies(null, client, host);
    }
}

From source file:org.opendatakit.aggregate.odktables.impl.api.TableServiceImpl.java

@Override
public Response createTable(TableDefinition definition) throws ODKDatastoreException,
        TableAlreadyExistsException, PermissionDeniedException, ODKTaskLockException, IOException {

    TreeSet<GrantedAuthorityName> ui = SecurityServiceUtil.getCurrentUserSecurityInfo(cc);
    if (!ui.contains(GrantedAuthorityName.ROLE_ADMINISTER_TABLES)) {
        throw new PermissionDeniedException("User does not belong to the 'Administer Tables' group");
    }/*from w  w w.j av a 2s  . co m*/

    TablesUserPermissions userPermissions = new TablesUserPermissionsImpl(cc);

    TableManager tm = new TableManager(appId, userPermissions, cc);
    // NOTE: the only access control restriction for
    // creating the table is the Administer Tables role.
    List<Column> columns = definition.getColumns();

    TableEntry entry = tm.createTable(tableId, columns);
    TableResource resource = getResource(info, appId, entry);

    // set the table-level manifest ETag if known...
    try {
        resource.setTableLevelManifestETag(
                FileManifestServiceImpl.getTableLevelManifestETag(entry.getTableId(), cc));
    } catch (ODKDatastoreException e) {
        // ignore
    }

    logger.info(String.format("createTable: tableId: %s, definition: %s", tableId, definition));

    return Response.ok(resource)
            .header(ApiConstants.OPEN_DATA_KIT_VERSION_HEADER, ApiConstants.OPEN_DATA_KIT_VERSION)
            .header("Access-Control-Allow-Origin", "*").header("Access-Control-Allow-Credentials", "true")
            .build();
}

From source file:com.cloudera.recordbreaker.analyzer.FSCrawler.java

/**
 * <code>getStartNonblockingCrawl</code> traverses a given filesystem.  It returns immediately
 * and does not wait for the crawl to complete.
 * If the crawl is created or is already ongoing, it returns true.
 * If the crawl is not currently going and cannot start, it returns false. 
 *///w  w  w. j a va 2s  .  c om
public synchronized boolean getStartNonblockingCrawl(final URI fsURI) {
    try {
        final int subdirDepth = INFINITE_CRAWL_DEPTH;
        long fsId = analyzer.getCreateFilesystem(fsURI, true);
        if (fsId < 0) {
            return false;
        }
        LOG.info("Grabbing filesystem: " + fsURI);
        final FileSystem fs = FileSystem.get(fsURI, new Configuration());
        final Path startDir = fs.makeQualified(new Path(fsURI.getPath()));

        final long crawlid = analyzer.getCreatePendingCrawl(fsId, true);
        Thread pendingThread = pendingCrawls.get(crawlid);
        if (pendingThread == null) {
            Thread t = new Thread() {
                public void run() {
                    try {
                        synchronized (pendingCrawls) {
                            pendingCrawls.put(crawlid, this);
                        }
                        synchronized (crawlStatusInfo) {
                            crawlStatusInfo.put(crawlid, new CrawlRuntimeStatus("Initializing crawl"));
                        }
                        // Build the file and dir-level todo lists
                        List<Path> todoFileList = new ArrayList<Path>();
                        List<Path> todoDirList = new ArrayList<Path>();
                        recursiveCrawlBuildList(fs, startDir, subdirDepth, crawlid, todoFileList, todoDirList);

                        // Get the files to process
                        TreeSet<String> observedFilenames = new TreeSet<String>();
                        for (Path p : analyzer.getFilesForCrawl(crawlid)) {
                            observedFilenames.add(p.toString());
                        }
                        for (Iterator<Path> it = todoFileList.iterator(); it.hasNext();) {
                            Path p = it.next();
                            if (observedFilenames.contains(p.toString())) {
                                it.remove();
                            }
                        }

                        // Get the dirs to process
                        TreeSet<String> observedDirnames = new TreeSet<String>();
                        for (Path p : analyzer.getDirsForCrawl(crawlid)) {
                            observedDirnames.add(p.toString());
                        }
                        for (Iterator<Path> it = todoDirList.iterator(); it.hasNext();) {
                            Path p = it.next();
                            if (observedDirnames.contains(p.toString())) {
                                it.remove();
                            }
                        }

                        synchronized (crawlStatusInfo) {
                            CrawlRuntimeStatus cstatus = crawlStatusInfo.get(crawlid);
                            cstatus.setMessage("Processing files");
                            cstatus.setNumToProcess(todoFileList.size());
                            cstatus.setNumDone(0);
                        }

                        int numDone = 0;
                        for (Path p : todoDirList) {
                            try {
                                analyzer.addSingleFile(fs, p, crawlid);
                            } catch (IOException iex) {
                                iex.printStackTrace();
                            }
                        }
                        for (Path p : todoFileList) {
                            synchronized (crawlStatusInfo) {
                                CrawlRuntimeStatus cstatus = crawlStatusInfo.get(crawlid);
                                cstatus.setMessage("Processing file " + p.toString());
                            }
                            try {
                                analyzer.addSingleFile(fs, p, crawlid);
                            } catch (Exception iex) {
                                iex.printStackTrace();
                            }
                            numDone++;
                            synchronized (crawlStatusInfo) {
                                CrawlRuntimeStatus cstatus = crawlStatusInfo.get(crawlid);
                                cstatus.setNumDone(numDone);
                                if (cstatus.shouldFinish()) {
                                    break;
                                }
                            }
                        }
                    } catch (IOException iex) {
                        iex.printStackTrace();
                    } finally {
                        try {
                            synchronized (pendingCrawls) {
                                pendingCrawls.remove(crawlid);
                                analyzer.completeCrawl(crawlid);
                            }
                        } catch (SQLiteException sle) {
                        }
                    }
                }
            };
            t.start();
        }
        return true;
    } catch (Exception iex) {
        iex.printStackTrace();
    }
    return false;
}

From source file:com.symbian.utils.config.ConfigUtils.java

/**
 * completeConfigFromStore/* w w  w .  j  a  v a2  s  .  c  o m*/
 * add any extra elements from the store. For example, if the plugins have been activated before, there settings 
 * would have been loaded by their activators.
 *
 */
protected void completeConfigFromStore() {
    try {
        TreeSet<String> storeKeys = new TreeSet<String>();
        storeKeys.addAll(Arrays.asList(iPrefrences.keys()));
        TreeSet<String> localKeys = new TreeSet<String>();
        localKeys.addAll(Arrays.asList(iPreferenceLiterals));

        for (String key : storeKeys) {
            if (!localKeys.contains(key)) {
                addConfig(key, iPrefrences.get(key, ""), new CheckGetConfig(), new CheckSetConfig(),
                        String.class);
            }
        }
    } catch (BackingStoreException e) {
        LOGGER.log(Level.SEVERE, "Could not initialise the config " + e.getMessage());
    }
}

From source file:org.opendatakit.persistence.table.GrantedAuthorityHierarchyTable.java

public static final void assertGrantedAuthorityHierarchy(GrantedAuthority dominantGrant,
        Collection<String> desiredGrants, CallingContext cc) throws ODKDatastoreException {

    if (!GrantedAuthorityName.permissionsCanBeAssigned(dominantGrant.getAuthority())) {
        throw new IllegalArgumentException("Dominant grant must be permissions-assignable!");
    }//from   w w  w  .  ja v a  2  s.c o  m

    Datastore ds = cc.getDatastore();
    User user = cc.getCurrentUser();

    boolean hasNotChanged = true;

    try {
        GrantedAuthorityHierarchyTable relation = GrantedAuthorityHierarchyTable.assertRelation(ds, user);

        TreeSet<String> groups = new TreeSet<String>();
        TreeSet<String> roles = new TreeSet<String>();
        for (String grant : desiredGrants) {
            if (!GrantedAuthorityName.permissionsCanBeAssigned(grant)) {
                roles.add(grant);
            } else {
                groups.add(grant);
            }
        }

        // get the hierarchy as currently defined for this group
        List<? extends CommonFieldsBase> groupsList;
        relation = GrantedAuthorityHierarchyTable.assertRelation(ds, user);
        Query query = ds.createQuery(relation, "GrantedAuthorityHierarchyTable.assertGrantedAuthorityHierarchy",
                user);
        query.addFilter(GrantedAuthorityHierarchyTable.DOMINATING_GRANTED_AUTHORITY, FilterOperation.EQUAL,
                dominantGrant.getAuthority());
        groupsList = query.executeQuery();

        // OK we have the groups and roles to establish for this dominantGrant.
        // AND we have the groupsList of groups and roles already established for dominantGrant.
        List<EntityKey> deleted = new ArrayList<EntityKey>();
        for (CommonFieldsBase b : groupsList) {
            GrantedAuthorityHierarchyTable t = (GrantedAuthorityHierarchyTable) b;
            String authority = t.getSubordinateGrantedAuthority().getAuthority();
            if (groups.contains(authority)) {
                groups.remove(authority);
            } else if (roles.contains(authority)) {
                roles.remove(authority);
            } else {
                deleted.add(t.getEntityKey());
            }
        }
        // we now have the list of groups and roles to insert, and the list of
        // existing records to delete...
        List<GrantedAuthorityHierarchyTable> added = new ArrayList<GrantedAuthorityHierarchyTable>();
        for (String group : groups) {
            GrantedAuthorityHierarchyTable t = ds.createEntityUsingRelation(relation, user);
            t.setDominatingGrantedAuthority(dominantGrant.getAuthority());
            t.setSubordinateGrantedAuthority(group);
            added.add(t);
        }

        for (String role : roles) {
            GrantedAuthorityHierarchyTable t = ds.createEntityUsingRelation(relation, user);
            t.setDominatingGrantedAuthority(dominantGrant.getAuthority());
            t.setSubordinateGrantedAuthority(role);
            added.add(t);
        }

        hasNotChanged = added.isEmpty() && deleted.isEmpty();

        // we now have the list of EntityKeys to delete, and the list of records to add -- do it.
        ds.putEntities(added, user);
        ds.deleteEntities(deleted, user);
    } finally {
        if (!hasNotChanged) {
            // finally, since we mucked with the group hierarchies, flag that
            // the cache of those hierarchies has changed.
            SecurityRevisionsTable.setLastRoleHierarchyRevisionDate(ds, user);
        }
    }
}

From source file:org.opendatakit.aggregate.odktables.impl.api.TableServiceImpl.java

public Response putInternalTableProperties(PropertyEntryXmlList propertiesList)
        throws ODKDatastoreException, PermissionDeniedException, ODKTaskLockException, TableNotFoundException {

    TreeSet<GrantedAuthorityName> ui = SecurityServiceUtil.getCurrentUserSecurityInfo(cc);
    if (!ui.contains(GrantedAuthorityName.ROLE_ADMINISTER_TABLES)) {
        throw new PermissionDeniedException("User does not belong to the 'Administer Tables' group");
    }//from   www.j a  v  a 2  s .c om

    TablesUserPermissions userPermissions = new TablesUserPermissionsImpl(cc);

    String appRelativePath = FileManager.getPropertiesFilePath(tableId);

    String contentType = com.google.common.net.MediaType.CSV_UTF_8.toString();

    // DbTableFileInfo.NO_TABLE_ID -- means that we are working with app-level
    // permissions
    userPermissions.checkPermission(appId, tableId, TablePermission.WRITE_PROPERTIES);

    ByteArrayOutputStream bas = new ByteArrayOutputStream();
    Writer wtr = null;
    RFC4180CsvWriter csvWtr = null;

    try {
        wtr = new OutputStreamWriter(bas, CharEncoding.UTF_8);
        csvWtr = new RFC4180CsvWriter(wtr);
        String[] entry = new String[5];
        entry[0] = "_partition";
        entry[1] = "_aspect";
        entry[2] = "_key";
        entry[3] = "_type";
        entry[4] = "_value";
        csvWtr.writeNext(entry);
        for (PropertyEntryXml e : propertiesList.getProperties()) {
            entry[0] = e.getPartition();
            entry[1] = e.getAspect();
            entry[2] = e.getKey();
            entry[3] = e.getType();
            entry[4] = e.getValue();
            csvWtr.writeNext(entry);
        }
        csvWtr.flush();
    } catch (UnsupportedEncodingException ex) {
        ex.printStackTrace();
        throw new IllegalStateException("Unrecognized UTF-8 charset!");
    } catch (IOException ex) {
        ex.printStackTrace();
        throw new IllegalStateException("Unable to write into a byte array!");
    } finally {
        if (csvWtr != null) {
            try {
                csvWtr.close();
            } catch (IOException e) {
                e.printStackTrace();
            }
        } else if (wtr != null) {
            try {
                wtr.close();
            } catch (IOException e) {
                e.printStackTrace();
            }
        }
    }

    byte[] content = bas.toByteArray();

    FileManager fm = new FileManager(appId, cc);

    FileContentInfo fi = new FileContentInfo(contentType, Long.valueOf(content.length), null, content);

    @SuppressWarnings("unused")
    FileChangeDetail outcome = fm.putFile("1", tableId, appRelativePath, userPermissions, fi);
    return Response.status(Status.ACCEPTED)
            .header(ApiConstants.OPEN_DATA_KIT_VERSION_HEADER, ApiConstants.OPEN_DATA_KIT_VERSION)
            .header("Access-Control-Allow-Origin", "*").header("Access-Control-Allow-Credentials", "true")
            .build();
}