Example usage for java.util HashSet size

List of usage examples for java.util HashSet size

Introduction

In this page you can find the example usage for java.util HashSet size.

Prototype

public int size() 

Source Link

Document

Returns the number of elements in this set (its cardinality).

Usage

From source file:org.mskcc.pathdb.action.web_api.NeighborhoodMapRetriever.java

/**
 * Gets neighborhood map size.//from w  w w .j  a v  a  2s . com
 *
  * @param xdebug   XDebug
  * @param protocolRequest ProtocolRequest
 * @return NeighborhoodMapSize
 * @throws DaoException
 */
public NeighborhoodMapSize getNeighborhoodMapSize(XDebug xdebug, ProtocolRequest protocolRequest)
        throws DaoException {

    NeighborhoodMapSize toReturn = new NeighborhoodMapSize();
    HashSet<String> filteredBinaryInteractionParticipants = new HashSet<String>();
    try {
        // get list of neighbor ids
        if (!MEMBER_VARS_SET)
            setMemberVars(xdebug, protocolRequest);
        long[] neighborIDs = getNeighborIDs();

        log.info("NeighborhoodMapRetriever.getNeighborMapSize(), before sif conversion: "
                + Long.toString(neighborIDs.length));
        if (neighborIDs.length == 0)
            return toReturn;

        // create sif assembly
        XmlAssembly biopaxAssembly = XmlAssemblyFactory.createXmlAssembly(neighborIDs, XmlRecordType.BIO_PAX, 1,
                XmlAssemblyFactory.XML_FULL, true, new XDebug());
        toReturn.biopaxAssembly = biopaxAssembly;

        WebUIBean webUIBean = new WebUIBean();
        webUIBean.setConverterThreshold(SIF_CONVERTER_THRESHOLD);
        BinaryInteractionUtil binaryInteractionUtil = new BinaryInteractionUtil(webUIBean);
        BinaryInteractionAssembly sifAssembly = BinaryInteractionAssemblyFactory.createAssembly(
                BinaryInteractionAssemblyFactory.AssemblyType.SIF, binaryInteractionUtil,
                binaryInteractionUtil.getRuleTypes(), biopaxAssembly.getXmlString());

        // filter out unwanted interactions
        String[] binaryInteractionStringArray = sifAssembly.getBinaryInteractionString().split("\n");
        for (String binaryInteractionString : binaryInteractionStringArray) {
            if (binaryInteractionString != null) {
                // sif format:  ID\tINTERACTION_TYPE\tID
                String[] components = binaryInteractionString.split("\t");
                if (components.length == 3) {
                    // populate filteredBinaryInteractionParticpants (neighbors in map)
                    if (!UNWANTED_INTERACTIONS.contains(components[1])) {
                        filteredBinaryInteractionParticipants.add(components[0]);
                        filteredBinaryInteractionParticipants.add(components[2]);
                    }
                }
            }
        }
    } catch (Exception e) {
        log.info("NeighborhoodMapRetriever.getNeighborMapSize(), Exception caught: " + e.getMessage()
                + ", PHYSICAL_ENTITY_RECORD_ID: " + Long.toString(PHYSICAL_ENTITY_RECORD_ID));
        if (e instanceof MaximumInteractionThresholdExceedException) {
            toReturn.sifNeighborhoodSize = Integer.MAX_VALUE;
        }
        return toReturn;
    }

    log.info("NeighborhoodMapRetriever.getNeighborMapSize(), after sif conversion: "
            + filteredBinaryInteractionParticipants.size());

    // outta here
    toReturn.sifNeighborhoodSize = filteredBinaryInteractionParticipants.size();
    return toReturn;
}

From source file:org.apache.hadoop.hdfs.server.blockmanagement.TestBlockReportRateLimiting.java

@Test(timeout = 180000)
public void testRateLimitingDuringDataNodeStartup() throws Exception {
    Configuration conf = new Configuration();
    conf.setInt(DFS_NAMENODE_MAX_FULL_BLOCK_REPORT_LEASES, 1);
    conf.setLong(DFS_NAMENODE_FULL_BLOCK_REPORT_LEASE_LENGTH_MS, 20L * 60L * 1000L);

    final Semaphore fbrSem = new Semaphore(0);
    final HashSet<DatanodeID> expectedFbrDns = new HashSet<>();
    final HashSet<DatanodeID> fbrDns = new HashSet<>();
    final AtomicReference<String> failure = new AtomicReference<String>("");

    final BlockManagerFaultInjector injector = new BlockManagerFaultInjector() {
        private int numLeases = 0;

        @Override/*www . j a va2s . c om*/
        public void incomingBlockReportRpc(DatanodeID nodeID, BlockReportContext context) throws IOException {
            LOG.info("Incoming full block report from " + nodeID + ".  Lease ID = 0x"
                    + Long.toHexString(context.getLeaseId()));
            if (context.getLeaseId() == 0) {
                setFailure(failure,
                        "Got unexpected rate-limiting-" + "bypassing full block report RPC from " + nodeID);
            }
            fbrSem.acquireUninterruptibly();
            synchronized (this) {
                fbrDns.add(nodeID);
                if (!expectedFbrDns.remove(nodeID)) {
                    setFailure(failure, "Got unexpected full block report " + "RPC from " + nodeID
                            + ".  expectedFbrDns = " + Joiner.on(", ").join(expectedFbrDns));
                }
                LOG.info("Proceeding with full block report from " + nodeID + ".  Lease ID = 0x"
                        + Long.toHexString(context.getLeaseId()));
            }
        }

        @Override
        public void requestBlockReportLease(DatanodeDescriptor node, long leaseId) {
            if (leaseId == 0) {
                return;
            }
            synchronized (this) {
                numLeases++;
                expectedFbrDns.add(node);
                LOG.info("requestBlockReportLease(node=" + node + ", leaseId=0x" + Long.toHexString(leaseId)
                        + ").  " + "expectedFbrDns = " + Joiner.on(", ").join(expectedFbrDns));
                if (numLeases > 1) {
                    setFailure(failure, "More than 1 lease was issued at once.");
                }
            }
        }

        @Override
        public void removeBlockReportLease(DatanodeDescriptor node, long leaseId) {
            LOG.info("removeBlockReportLease(node=" + node + ", leaseId=0x" + Long.toHexString(leaseId) + ")");
            synchronized (this) {
                numLeases--;
            }
        }
    };
    BlockManagerFaultInjector.instance = injector;

    final int NUM_DATANODES = 5;
    MiniDFSCluster cluster = new MiniDFSCluster.Builder(conf).numDataNodes(NUM_DATANODES).build();
    cluster.waitActive();
    for (int n = 1; n <= NUM_DATANODES; n++) {
        LOG.info("Waiting for " + n + " datanode(s) to report in.");
        fbrSem.release();
        Uninterruptibles.sleepUninterruptibly(20, TimeUnit.MILLISECONDS);
        final int currentN = n;
        GenericTestUtils.waitFor(new Supplier<Boolean>() {
            @Override
            public Boolean get() {
                synchronized (injector) {
                    if (fbrDns.size() > currentN) {
                        setFailure(failure,
                                "Expected at most " + currentN
                                        + " datanodes to have sent a block report, but actually "
                                        + fbrDns.size() + " have.");
                    }
                    return (fbrDns.size() >= currentN);
                }
            }
        }, 25, 50000);
    }
    cluster.shutdown();
    Assert.assertEquals("", failure.get());
}

From source file:es.tekniker.framework.ktek.questionnaire.mng.server.QuestionnaireMngServer.java

private KtekQuestionnaireModelEntity[] getQuestionnaires4TypeMonitoringActivityPlannedData(int idUser,
        String codtelecareprogram, int idLang, short typeMonitoringActivity, short status)
        throws KtekExceptionEntity {
    KtekQuestionnaireModelEntity[] array = null;
    Ktek_questionnaire[] arrayInstance = null;
    KtekQuestionnaireModelEntity instance = null;
    QuestionnaireManagerDB db = null;// w w  w.  jav  a  2  s  . c o  m
    Ktek_user user = null;
    HashSet<Ktek_questionnaire> questionnaireIdHash;
    Ktek_questionnaire instanceQuestionaire = null;

    db = new QuestionnaireManagerDB();
    try {
        user = UtilsQuestionnaire.getUserByIdUser(idUser);
        arrayInstance = db.getQuestionnaires4TypeMonitoringActivityPlanned(typeMonitoringActivity,
                user.getKtek_uk_coduser(), codtelecareprogram, status);
    } catch (Exception e) {
        e.printStackTrace();
    }

    if (arrayInstance != null && arrayInstance.length > 0) {
        array = new KtekQuestionnaireModelEntity[arrayInstance.length];
        for (int i = 0; i < arrayInstance.length; i++) {
            instance = BO2Entity.QuestionnaireModelBaseBO2Entity(idLang, arrayInstance[i]);
            array[i] = instance;
        }

        questionnaireIdHash = new HashSet<Ktek_questionnaire>();
        for (int i = 0; i < arrayInstance.length; i++) {
            if (questionnaireIdHash.contains(arrayInstance[i]) == false) {
                questionnaireIdHash.add(arrayInstance[i]);
            }
        }

        array = new KtekQuestionnaireModelEntity[questionnaireIdHash.size()];

        Iterator<Ktek_questionnaire> it = questionnaireIdHash.iterator();
        int i = 0;
        while (it.hasNext()) {
            instanceQuestionaire = it.next();
            instance = BO2Entity.QuestionnaireModelBaseBO2Entity(idLang, instanceQuestionaire);
            array[i] = instance;
            i = i + 1;
        }
    } else {
        //array = new KtekQuestionnaireModelEntity[1];
    }

    return array;
}

From source file:org.rhq.enterprise.server.resource.ResourceManagerBean.java

public Map<Integer, String> getResourcesAncestry(Subject subject, Integer[] resourceIds,
        ResourceAncestryFormat format) {
    Map<Integer, String> result = new HashMap<Integer, String>(resourceIds.length);

    if (resourceIds.length == 0) {
        return result;
    }//from w  w w.j a  v  a2  s. c  o m

    ResourceCriteria resourceCriteria = new ResourceCriteria();
    resourceCriteria.addFilterIds(resourceIds);
    resourceCriteria.fetchResourceType(true);
    List<Resource> resources = findResourcesByCriteria(subject, resourceCriteria);

    if (ResourceAncestryFormat.RAW == format) {
        for (Resource resource : resources) {
            result.put(resource.getId(), resource.getAncestry());
        }
        return result;
    }

    HashSet<Integer> typesSet = new HashSet<Integer>();
    HashSet<String> ancestries = new HashSet<String>();
    for (Resource resource : resources) {
        ResourceType type = resource.getResourceType();
        if (type != null) {
            typesSet.add(type.getId());
        }
        ancestries.add(resource.getAncestry());
    }

    // In addition to the types of the result resources, get the types of their ancestry
    typesSet.addAll(getAncestryTypeIds(ancestries));

    ResourceTypeCriteria resourceTypeCriteria = new ResourceTypeCriteria();
    resourceTypeCriteria.addFilterIds(typesSet.toArray(new Integer[typesSet.size()]));
    List<ResourceType> types = typeManager.findResourceTypesByCriteria(subject, resourceTypeCriteria);

    for (Resource resource : resources) {
        String decodedAncestry = getDecodedAncestry(resource, types, format);
        result.put(resource.getId(), decodedAncestry);
    }
    return result;
}

From source file:org.openanzo.datasource.nodecentric.internal.NodeCentricDatasource.java

private boolean checkIfTablesExists(Connection connection, boolean none) throws AnzoException {
    ResultSet rs = null;// w  w w. j  a  va2  s  . c  om
    try {
        long currentVersion = none ? SCHEMA_VERSION : getCurrentVersion(connection);

        boolean tables = true;
        boolean sequences = false;
        boolean views = false;
        try {
            rs = connection.getMetaData().getTableTypes();
            while (rs.next() && (!tables || !sequences || !views)) {
                String type = rs.getString(1);
                if (type.toUpperCase().equals(table)) {
                    tables = true;
                } else if (type.toUpperCase().equals(seq)) {
                    sequences = true;
                } else if (type.toUpperCase().equals(view)) {
                    views = true;
                }
            }
        } finally {
            if (rs != null) {
                rs.close();
            }
        }
        if (tables) {
            try {
                rs = connection.getMetaData().getTables(null, null, null, new String[] { table });

                HashSet<String> requiredTables = new HashSet<String>();
                requiredTables.add(serverUpper);
                java.util.Collections.addAll(requiredTables, resetService.getRequiredTables());
                java.util.Collections.addAll(requiredTables, resetService.getNodeCentricTables());
                while (rs.next()) {
                    String tbl = rs.getString(3);
                    if (requiredTables.remove(tbl.toUpperCase()) && none) {
                        throw new AnzoException(ExceptionConstants.RDB.INCOMPLETE_DATABASE);
                    }
                    if (tbl.toUpperCase().equals("ANZO_U")) {
                        ResultSet metadata = connection.getMetaData().getColumns(null, null, tbl, null);
                        while (metadata.next()) {
                            String name = metadata.getString(4);
                            if (name.toUpperCase().equals("VALUE")) {
                                int size = metadata.getInt(7);
                                configuration.setMaxLongObjectLength(size);
                                nodeLayout.setMaxLength(size);
                                break;
                            }
                        }
                    }
                }
                if (!none && requiredTables.size() > 0) {
                    throw new AnzoException(ExceptionConstants.RDB.FAILED_GETTING_TABLE_STATUS,
                            Arrays.toString(requiredTables.toArray()));
                }
            } finally {
                if (rs != null) {
                    rs.close();
                }
            }
        }
        if (sequences) {
            String seqs[][] = resetService.getRequiredSequences();
            for (int i = 0; i < currentVersion; i++) {
                String vseq[] = seqs[i];
                if (vseq != null && vseq.length > 0) {
                    try {
                        rs = connection.getMetaData().getTables(null, null, null, new String[] { seq });
                        HashSet<String> requiredSeq = new HashSet<String>();
                        java.util.Collections.addAll(requiredSeq, vseq);
                        while (rs.next()) {
                            String tbl = rs.getString(3);
                            if (requiredSeq.remove(tbl.toUpperCase()) && none) {
                                throw new AnzoException(ExceptionConstants.RDB.INCOMPLETE_DATABASE);
                            }
                        }
                        if (!none && requiredSeq.size() > 0) {
                            throw new AnzoException(ExceptionConstants.RDB.FAILED_GETTING_TABLE_STATUS,
                                    Arrays.toString(requiredSeq.toArray()));
                        }
                    } finally {
                        if (rs != null) {
                            rs.close();
                        }
                    }
                }
            }
        }
        if (views) {
            try {
                rs = connection.getMetaData().getTables(null, null, null, new String[] { view });

                HashSet<String> required = new HashSet<String>();
                if (currentVersion < 12) {
                    required.add("ALL_STMTS_VIEW");
                } else {
                    java.util.Collections.addAll(required, resetService.getRequiredViews());
                }
                while (rs.next()) {
                    String tbl = rs.getString(3);
                    if (required.remove(tbl.toUpperCase()) && none) {
                        throw new AnzoException(ExceptionConstants.RDB.INCOMPLETE_DATABASE);
                    }
                }
                if (!none && required.size() > 0) {
                    throw new AnzoException(ExceptionConstants.RDB.FAILED_GETTING_TABLE_STATUS,
                            Arrays.toString(required.toArray()));
                }
            } finally {
                if (rs != null) {
                    rs.close();
                }
            }
        }
    } catch (SQLException e) {
        log.error(LogUtils.RDB_MARKER, "Error checking if statements exist", e);
        throw new AnzoException(ExceptionConstants.RDB.FAILED_INITIALZE_DB, e);
    } finally {
        if (rs != null) {
            try {
                rs.close();
            } catch (SQLException e) {
                log.debug(LogUtils.RDB_MARKER, "Error closing result set", e);

            }
        }
    }
    return true;
}

From source file:com.emc.storageos.volumecontroller.impl.plugins.IsilonCommunicationInterface.java

/**
 * Generate Export Map for UnManagedFileSystem
 * Ignore exports with multiple exports for the same path
 * Ignore exports that have multiple security flavors
 * Ignore exports with multiple paths//  w ww .ja  v  a2  s  .  co m
 * Ignore exports not found on the array
 * Ignore exports which have the same internal export key ( <sec, perm, root-mapping>)
 * 
 * @param umfs
 * @param isilonExportIds
 * @param storagePort
 * @param fsPath
 * @param isilonApi
 * @return boolean
 */
private List<UnManagedFileExportRule> getUnManagedFSExportRules(UnManagedFileSystem umfs,
        StoragePort storagePort, HashSet<Integer> isilonExportIds, String fsPath, String zoneName,
        IsilonApi isilonApi) {

    List<UnManagedFileExportRule> expRules = new ArrayList<UnManagedFileExportRule>();
    ArrayList<IsilonExport> isilonExports = new ArrayList<IsilonExport>();

    if (isilonExportIds != null && isilonExportIds.size() > 1) {
        _log.info("Ignoring file system {}, Multiple exports found {} ", fsPath, isilonExportIds.size());
    }

    for (Integer expId : isilonExportIds) {
        IsilonExport exp = getIsilonExport(isilonApi, expId, zoneName);
        if (exp == null) {
            _log.info("Ignoring file system {}, export {} not found", fsPath, expId);
        } else if (exp.getSecurityFlavors().size() > 1) {
            _log.info("Ignoring file system {}, multiple security flavors {} found", fsPath,
                    exp.getSecurityFlavors().toString());
        } else if (exp.getPaths().size() > 1) {
            _log.info("Ignoring file system {}, multiple paths {} found", fsPath, exp.getPaths().toString());
        } else {
            isilonExports.add(exp);
        }
    }

    for (IsilonExport exp : isilonExports) {
        String securityFlavor = exp.getSecurityFlavors().get(0);
        // Isilon Maps sys to unix and we do this conversion during export from ViPR
        if (securityFlavor.equalsIgnoreCase(UNIXSECURITY)) {
            securityFlavor = SYSSECURITY;
        }

        String path = exp.getPaths().get(0);

        // Get User
        String rootUserMapping = "";
        String mapAllUserMapping = "";
        if (exp.getMap_root() != null && exp.getMap_root().getUser() != null) {
            rootUserMapping = exp.getMap_root().getUser();
        } else if (exp.getMap_all() != null && exp.getMap_all().getUser() != null) {
            mapAllUserMapping = exp.getMap_all().getUser();
        }

        String resolvedUser = (rootUserMapping != null && (!rootUserMapping.isEmpty())) ? rootUserMapping
                : mapAllUserMapping;

        UnManagedFileExportRule expRule = new UnManagedFileExportRule();
        expRule.setExportPath(path);
        expRule.setSecFlavor(securityFlavor);
        expRule.setAnon(resolvedUser);
        expRule.setDeviceExportId(exp.getId().toString());
        expRule.setFileSystemId(umfs.getId());
        expRule.setMountPoint(storagePort.getPortNetworkId() + ":" + fsPath);

        if (exp != null && exp.getReadOnlyClients() != null && !exp.getReadOnlyClients().isEmpty()) {
            expRule.setReadOnlyHosts(new StringSet(exp.getReadOnlyClients()));
        }

        if (exp != null && exp.getReadWriteClients() != null && !exp.getReadWriteClients().isEmpty()) {
            expRule.setReadWriteHosts(new StringSet(exp.getReadWriteClients()));
        }

        if (exp != null && exp.getRootClients() != null && !exp.getRootClients().isEmpty()) {
            expRule.setRootHosts(new StringSet(exp.getRootClients()));
        }

        if (exp.getReadOnlyClients() != null && exp.getReadWriteClients() != null
                && exp.getRootClients() != null) {
            // Check Clients size
            if (exp.getReadOnlyClients().isEmpty() && exp.getReadWriteClients().isEmpty()
                    && exp.getRootClients().isEmpty()) {
                // All hosts case. Check whether it is RO/RW/ROOT

                if (exp.getReadOnly()) {
                    // This is a read only export for all hosts
                    expRule.setReadOnlyHosts(new StringSet(exp.getClients()));
                } else {
                    // Not read Only case
                    if (exp.getMap_all() != null && exp.getMap_all().getUser() != null
                            && exp.getMap_all().getUser().equalsIgnoreCase(ROOT)) {
                        // All hosts with root permission
                        expRule.setRootHosts(new StringSet(exp.getClients()));

                    } else if (exp.getMap_all() != null) {
                        // All hosts with RW permission
                        expRule.setReadWriteHosts(new StringSet(exp.getClients()));
                    }
                }
            }
        }
        expRules.add(expRule);
    }

    return expRules;
}

From source file:net.ravendb.client.connection.ServerClient.java

private MultiLoadResult completeMultiGet(final OperationMetadata operationMetadata, final String[] keys,
        final String[] includes, final String transformer, final Map<String, RavenJToken> transformerParameters,
        RavenJToken result) {/* w  w w  .j a  v  a  2 s  .c  om*/
    ErrorResponseException responseException;
    try {

        HashSet<String> uniqueKeys = new HashSet<>(Arrays.asList(keys));

        List<RavenJObject> results = new ArrayList<>();
        for (RavenJToken token : result.value(RavenJArray.class, "Results")) {
            if (token instanceof RavenJObject) {
                results.add((RavenJObject) token);
            }
        }

        Map<String, RavenJObject> documents = new HashMap<>();
        for (RavenJObject doc : results) {
            if (doc.containsKey("@metadata") && doc.get("@metadata").value(String.class, "@id") != null) {
                documents.put(doc.get("@metadata").value(String.class, "@id"), doc);
            }
        }

        if (results.size() >= uniqueKeys.size()) {
            for (int i = 0; i < uniqueKeys.size(); i++) {
                String key = keys[i];
                if (documents.containsKey(key)) {
                    continue;
                }
                documents.put(key, results.get(i));
            }
        }

        MultiLoadResult multiLoadResult = new MultiLoadResult();

        List<RavenJObject> includesList = new ArrayList<>();
        for (RavenJToken token : result.value(RavenJArray.class, "Includes")) {
            includesList.add((RavenJObject) token);
        }
        multiLoadResult.setIncludes(includesList);

        List<RavenJObject> resultsList = new ArrayList<>();
        for (String key : keys) {
            if (documents.containsKey(key)) {
                resultsList.add(documents.get(key));
            } else {
                resultsList.add(null);
            }
        }
        multiLoadResult.setResults(resultsList);

        List<RavenJObject> docResults = new ArrayList<>();
        docResults.addAll(resultsList);
        docResults.addAll(includesList);

        return retryOperationBecauseOfConflict(operationMetadata, docResults, multiLoadResult,
                new Function0<MultiLoadResult>() {
                    @Override
                    public MultiLoadResult apply() {
                        return directGet(keys, operationMetadata, includes, transformer, transformerParameters,
                                false);
                    }
                }, null);
    } catch (ErrorResponseException e) {
        if (e.getStatusCode() != HttpStatus.SC_CONFLICT) {
            throw e;
        }
        responseException = e;
    }
    throw fetchConcurrencyException(responseException);
}

From source file:com.emc.storageos.volumecontroller.impl.plugins.IsilonCommunicationInterface.java

/**
 * Generate Export Map for UnManagedFileSystem
 * Ignore exports with multiple exports for the same path
 * Ignore exports that have multiple security flavors
 * Ignore exports with multiple paths/*from  w w  w  . ja  v  a  2 s  . c o  m*/
 * Ignore exports not found on the array
 * Ignore exports which have the same internal export key ( <sec, perm, root-mapping>)
 * 
 * @param umfs
 * @param isilonExportIds
 * @param storagePort
 * @param fsPath
 * @param isilonApi
 * @return boolean
 */
private boolean getUnManagedFSExportMap(UnManagedFileSystem umfs, HashSet<Integer> isilonExportIds,
        StoragePort storagePort, String fsPath, String zoneName, IsilonApi isilonApi) {

    UnManagedFSExportMap exportMap = new UnManagedFSExportMap();

    int generatedExportCount = 0;

    ArrayList<IsilonExport> isilonExports = new ArrayList<IsilonExport>();

    if (isilonExportIds != null && isilonExportIds.size() > 1) {
        _log.info("Ignoring file system {}, Multiple exports found {} ", fsPath, isilonExportIds.size());
        return false;
    }

    for (Integer expId : isilonExportIds) {
        IsilonExport exp = getIsilonExport(isilonApi, expId, zoneName);
        if (exp == null) {
            _log.info("Ignoring file system {}, export {} not found", fsPath, expId);
            return false;
        } else if (exp.getSecurityFlavors().size() > 1) {
            _log.info("Ignoring file system {}, multiple security flavors {} found", fsPath,
                    exp.getSecurityFlavors().toString());
            return false;
        } else if (exp.getPaths().size() > 1) {
            _log.info("Ignoring file system {}, multiple paths {} found", fsPath, exp.getPaths().toString());
            return false;
        } else {
            isilonExports.add(exp);
        }
    }

    for (IsilonExport exp : isilonExports) {
        String securityFlavor = exp.getSecurityFlavors().get(0);
        // Isilon Maps sys to unix and we do this conversion during export from ViPR
        if (securityFlavor.equalsIgnoreCase(UNIXSECURITY)) {
            securityFlavor = SYSSECURITY;
        }

        String path = exp.getPaths().get(0);

        // Get User
        String rootUserMapping = "";
        String mapAllUserMapping = "";
        if (exp.getMap_root() != null && exp.getMap_root().getUser() != null) {
            rootUserMapping = exp.getMap_root().getUser();
        } else if (exp.getMap_all() != null && exp.getMap_all().getUser() != null) {
            mapAllUserMapping = exp.getMap_all().getUser();
        }

        String resolvedUser = (rootUserMapping != null && (!rootUserMapping.isEmpty())) ? rootUserMapping
                : mapAllUserMapping;

        if (exp != null && exp.getReadOnlyClients() != null && !exp.getReadOnlyClients().isEmpty()) {
            UnManagedFSExport unManagedROFSExport = new UnManagedFSExport(exp.getReadOnlyClients(),
                    storagePort.getPortName(), storagePort.getPortName() + ":" + path, securityFlavor, RO,
                    resolvedUser, NFS, storagePort.getPortName(), path, exp.getPaths().get(0));
            unManagedROFSExport.setIsilonId(exp.getId().toString());
            exportMap.put(unManagedROFSExport.getFileExportKey(), unManagedROFSExport);
            generatedExportCount++;
        }

        if (exp != null && exp.getReadWriteClients() != null && !exp.getReadWriteClients().isEmpty()) {
            UnManagedFSExport unManagedRWFSExport = new UnManagedFSExport(exp.getReadWriteClients(),
                    storagePort.getPortName(), storagePort.getPortName() + ":" + path, securityFlavor, RW,
                    resolvedUser, NFS, storagePort.getPortName(), path, exp.getPaths().get(0));
            unManagedRWFSExport.setIsilonId(exp.getId().toString());
            exportMap.put(unManagedRWFSExport.getFileExportKey(), unManagedRWFSExport);
            generatedExportCount++;
        }

        if (exp != null && exp.getRootClients() != null && !exp.getRootClients().isEmpty()) {
            UnManagedFSExport unManagedROOTFSExport = new UnManagedFSExport(exp.getRootClients(),
                    storagePort.getPortName(), storagePort.getPortName() + ":" + path, securityFlavor, ROOT,
                    resolvedUser, NFS, storagePort.getPortName(), path, path);
            unManagedROOTFSExport.setIsilonId(exp.getId().toString());
            exportMap.put(unManagedROOTFSExport.getFileExportKey(), unManagedROOTFSExport);
            generatedExportCount++;
        }

        if (exp.getReadOnlyClients() != null && exp.getReadWriteClients() != null
                && exp.getRootClients() != null) {
            // Check Clients size
            if (exp.getReadOnlyClients().isEmpty() && exp.getReadWriteClients().isEmpty()
                    && exp.getRootClients().isEmpty()) {
                // All hosts case. Check whether it is RO/RW/ROOT

                if (exp.getReadOnly()) {
                    // This is a read only export for all hosts
                    UnManagedFSExport unManagedROFSExport = new UnManagedFSExport(exp.getClients(),
                            storagePort.getPortName(), storagePort.getPortName() + ":" + path, securityFlavor,
                            RO, rootUserMapping, NFS, storagePort.getPortName(), path, path);
                    unManagedROFSExport.setIsilonId(exp.getId().toString());
                    exportMap.put(unManagedROFSExport.getFileExportKey(), unManagedROFSExport);
                    generatedExportCount++;
                } else {
                    // Not read Only case
                    if (exp.getMap_all() != null && exp.getMap_all().getUser() != null
                            && exp.getMap_all().getUser().equalsIgnoreCase(ROOT)) {
                        // All hosts with root permission
                        UnManagedFSExport unManagedROOTFSExport = new UnManagedFSExport(exp.getClients(),
                                storagePort.getPortName(), storagePort.getPortName() + ":" + path,
                                securityFlavor, ROOT, mapAllUserMapping, NFS, storagePort.getPortName(), path,
                                path);
                        unManagedROOTFSExport.setIsilonId(exp.getId().toString());
                        exportMap.put(unManagedROOTFSExport.getFileExportKey(), unManagedROOTFSExport);
                        generatedExportCount++;

                    } else if (exp.getMap_all() != null) {
                        // All hosts with RW permission
                        UnManagedFSExport unManagedRWFSExport = new UnManagedFSExport(exp.getClients(),
                                storagePort.getPortName(), storagePort.getPortName() + ":" + path,
                                securityFlavor, RW, rootUserMapping, NFS, storagePort.getPortName(), path,
                                path);
                        unManagedRWFSExport.setIsilonId(exp.getId().toString());
                        exportMap.put(unManagedRWFSExport.getFileExportKey(), unManagedRWFSExport);
                        generatedExportCount++;
                    }
                }
            }
        }
    }

    if (exportMap.values().size() < generatedExportCount) {
        // The keys are not unique and so all the exports are not valid
        _log.info(
                "Ignoring Exports because they have multiple exports with the same internal export key <sec, perm, root-mapping>. Expected {} got {}",
                generatedExportCount, exportMap.values().size());
        return false;
    }

    // Return valid
    UnManagedFSExportMap allExportMap = umfs.getFsUnManagedExportMap();
    if (allExportMap == null) {
        allExportMap = new UnManagedFSExportMap();
    }
    allExportMap.putAll(exportMap);
    umfs.setFsUnManagedExportMap(allExportMap);

    return true;
}

From source file:edu.ucla.cs.scai.canali.core.index.BuildIndex.java

private void loadTriples() throws Exception {
    HashMap<String, Integer> propertyFrequency = new HashMap<>();
    HashSet<String> shortProperties = new HashSet<>();
    if (minPropertyLength > 1) {
        System.out.println(//ww  w . java  2 s. co m
                "Finding propertys to be ignored because they have lenght less than " + minPropertyLength);
        int i = 0;
        try (BufferedReader in = new BufferedReader(new FileReader(basePathInput + "property_labels"))) {
            String l;
            while ((l = in.readLine()) != null) {
                i++;
                if (l.length() > 0) {
                    try {
                        StringTokenizer st = new StringTokenizer(l, "\t<> ");
                        String uri = st.nextToken().trim();
                        if (uri.startsWith("http")) {
                            String label = st.hasMoreTokens() ? st.nextToken().trim() : "";
                            if (label.length() < minPropertyLength && !shortProperties.contains(uri)) {
                                shortProperties.add(uri);
                                System.out
                                        .println("Property " + uri + " will be ignored, having label " + label);
                                propertyFrequency.put(uri, 0);
                            }
                        }
                    } catch (Exception e) {
                        System.out.println("Error at line " + i + ": " + l);
                        e.printStackTrace();
                    }
                }
            }
        }
        System.out.println(shortProperties.size() + " propertys will be ignored, having lenght less than "
                + minPropertyLength);
    }
    int maxNumberOfProperties = 100000;
    System.out.println("Finding the the " + maxNumberOfProperties
            + " most frequent propertys of the propertys whose label has at least two characters");
    try (BufferedReader in = new BufferedReader(new FileReader(basePathInput + "triples"))) {
        String l = in.readLine();
        int n = 0;
        while (l != null && l.length() > 0) {
            if (l.contains("classDegree")) {
                System.out.print("");
            }
            StringTokenizer st = new StringTokenizer(l, "<> \t");
            String subject = st.nextToken();
            String property = st.nextToken();
            String value = st.nextToken();
            if (subject.startsWith("http") && property.startsWith("http")
                    && !shortProperties.contains(property)) {
                if (value.startsWith("http") || value.startsWith("ftp:")) { //it is an entity
                    Integer c = propertyFrequency.get(property);
                    if (c == null) {
                        propertyFrequency.put(property, 1);
                    } else {
                        propertyFrequency.put(property, 1 + c);
                    }
                } else { //it is a literal
                    if (value.endsWith("^^")) { //it is a basic type
                        String type = StringEscapeUtils.unescapeJava(st.nextToken());
                        String literalType = basicTypesMapping.get(type);
                        if (literalType != null) {
                            Integer c = propertyFrequency.get(property);
                            if (c == null) {
                                propertyFrequency.put(property, 1);
                            } else {
                                propertyFrequency.put(property, 1 + c);
                            }
                        } else {
                            System.out.println("Basic type not recognized in " + l);
                        }
                    } else {
                        if (value.startsWith("\"")) { //it is a String
                            Integer c = propertyFrequency.get(property);
                            if (c == null) {
                                propertyFrequency.put(property, 1);
                            } else {
                                propertyFrequency.put(property, 1 + c);
                            }
                        } else {
                            System.out.println("Basic type not recognized in " + l);
                        }
                    }
                }
                n++;
                if (n % 1000000 == 0) {
                    System.out.println("Scanned " + (n / 1000000) + "M triples");
                }
            } else {
                //System.out.println("Invalid triple: " + l);
            }
            l = in.readLine();
        }
    }
    shortProperties = null;
    System.gc();
    ArrayList<Map.Entry<String, Integer>> f = new ArrayList<>(propertyFrequency.entrySet());
    Collections.sort(f, new Comparator<Map.Entry<String, Integer>>() {
        @Override
        public int compare(Map.Entry<String, Integer> o1, Map.Entry<String, Integer> o2) {
            return Integer.compare(o2.getValue(), o1.getValue());
        }
    });
    int minFreq = 1;
    if (f.size() > maxNumberOfProperties) {
        minFreq = f.get(maxNumberOfProperties - 1).getValue();
        if (f.get(maxNumberOfProperties).equals(f.get(maxNumberOfProperties - 1))) {
            minFreq++;
        }
    }
    for (Map.Entry<String, Integer> e : f) {
        System.out.println(e.getKey() + "\t" + e.getValue());
    }
    System.out.println("Keeping propertys with at least " + minFreq + " occurrences");
    HashSet<String> acceptedProperties = new HashSet<>();
    for (Map.Entry<String, Integer> e : propertyFrequency.entrySet()) {
        if (e.getValue() >= minFreq) {
            acceptedProperties.add(e.getKey());
        }
    }
    System.out.println(acceptedProperties.size() + " propertys kept over " + f.size());
    f = null;
    propertyFrequency = null;
    System.gc();
    System.out.println("Mapping entities and property URIs to ids");
    int nEntityTriples = 0;
    HashMap<String, Integer> nLiteralTriples = new HashMap<>();
    for (String type : literalTypes) {
        nLiteralTriples.put(type, 0);
    }
    HashSet<String> unrecognizedBasicTypes = new HashSet<>();
    //count entity-valued and literal-valued triples
    //and
    //create the association between uris and ids for entities        
    try (BufferedReader in = new BufferedReader(new FileReader(basePathInput + "triples"))) {
        String l = in.readLine();
        int n = 0;
        while (l != null && l.length() > 0) {
            StringTokenizer st = new StringTokenizer(l, "<> \t");
            String subject = st.nextToken();
            String property = st.nextToken();
            if (!acceptedProperties.contains(property)) {
                l = in.readLine();
                continue;
            }
            String value = st.nextToken();
            if (subject.startsWith("http") && property.startsWith("http")) {
                Integer idSbj = getEntityIdFromUri(subject); //entityIdFromUri.get(subject);
                if (idSbj == null) {
                    idSbj = entityIdFromUriWithPrefix.size() + 1;//entityIdFromUri.size() + 1;
                    putEntityIdFromUri(subject, idSbj); //entityIdFromUri.put(subject, idSbj);
                }
                Integer idAttr = propertyIdFromUri.get(property);
                if (idAttr == null) {
                    idAttr = propertyIdFromUri.size() + 1;
                    propertyIdFromUri.put(property, idAttr);
                }
                if (value.startsWith("http") || value.startsWith("ftp:")) { //it is an entity
                    Integer idVal = getEntityIdFromUri(value); //entityIdFromUri.get(value);
                    if (idVal == null) {
                        idVal = entityIdFromUriWithPrefix.size() + 1;//entityIdFromUri.size() + 1;
                        putEntityIdFromUri(value, idVal);//entityIdFromUri.put(value, idVal);
                    }
                    Integer idInvAttr = propertyIdFromUri.get(property + "Inv");
                    if (idInvAttr == null) {
                        idInvAttr = propertyIdFromUri.size() + 1;
                        propertyIdFromUri.put(property + "Inv", idInvAttr);
                    }
                    nEntityTriples += 2;
                } else { //it is a literal
                    if (value.endsWith("^^")) { //it is a basic type
                        String type = StringEscapeUtils.unescapeJava(st.nextToken());
                        String literalType = basicTypesMapping.get(type);
                        if (literalType != null) {
                            nLiteralTriples.put(literalType, nLiteralTriples.get(literalType) + 1);
                        } else {
                            if (!unrecognizedBasicTypes.contains(type)) {
                                System.out.println("Unrecognized type: " + type);
                                System.out.println("in line: " + l);
                                unrecognizedBasicTypes.add(type);
                            }
                        }
                    } else {
                        if (value.startsWith("\"")) { //it is a String
                            nLiteralTriples.put(STRING, nLiteralTriples.get(STRING) + 1);
                        }
                    }
                }
                n++;
                if (n % 1000000 == 0) {
                    System.out.println("Loaded " + (n / 1000000) + "M triples");
                }
            } else {
                System.out.println("Invalid triple: " + l);
            }
            l = in.readLine();
        }
    }
    System.out.println("Number of triples with entity value: " + nEntityTriples);
    for (String type : literalTypes) {
        System.out.println("Number of triples with " + type + " value: " + nLiteralTriples.get(type));
    }
    entityTriplesSubjects = new int[nEntityTriples];
    entityTriplesProperties = new int[nEntityTriples];
    entityTriplesValues = new int[nEntityTriples];
    for (String type : literalTypes) {
        literalTriplesSubjects.put(type, new int[nLiteralTriples.get(type)]);
        literalTriplesProperties.put(type, new int[nLiteralTriples.get(type)]);
    }
    //load the triples into the arrays creaded above
    System.out.println("Loading triples");
    try (BufferedReader in = new BufferedReader(new FileReader(basePathInput + "triples"))) {
        String l = in.readLine();
        int n = 0;
        while (l != null && l.length() > 0) {
            StringTokenizer st = new StringTokenizer(l, "<> \t");
            String sbj = st.nextToken();
            String attr = st.nextToken();
            if (!acceptedProperties.contains(attr)) {
                l = in.readLine();
                continue;
            }
            String val = st.nextToken();
            if (sbj.startsWith("http") && attr.startsWith("http")) {
                if (val.startsWith("http") || val.startsWith("ftp:")) { //it is an entity
                    updateTriples(sbj, attr, val, null);
                } else { //it is a literal
                    if (val.endsWith("^^")) { //it is a basic type
                        String type = StringEscapeUtils.unescapeJava(st.nextToken());
                        String literalType = basicTypesMapping.get(type);
                        if (literalType != null) {
                            updateTriples(sbj, attr, null, literalType);
                        } else {
                            if (!unrecognizedBasicTypes.contains(type)) {
                                System.out.println("Unrecognized type: " + type);
                                System.out.println("in line: " + l);
                                unrecognizedBasicTypes.add(type);
                            }
                        }
                    } else {
                        if (val.startsWith("\"")) { //it is a String
                            updateTriples(sbj, attr, null, STRING);
                        } else {
                            System.out.println("Unexpected line: " + l);
                        }
                    }
                }
                n++;
                if (n % 1000000 == 0) {
                    System.out.println("Loaded " + (n / 1000000) + "M triples");
                }
            } else {
                System.out.println("Invalid triple: " + l);
            }
            l = in.readLine();
        }
    }
    System.out.println("Entity value triples: " + entityTriplesSubjects.length);
    for (String type : literalTriplesSubjects.keySet()) {
        System.out.println(type + " value triples: " + literalTriplesSubjects.get(type).length);
    }
    propertyUri = new String[propertyIdFromUri.size() + 1];
    for (Map.Entry<String, Integer> e : propertyIdFromUri.entrySet()) {
        propertyUri[e.getValue()] = e.getKey();
    }
    entityUriWithPrefix = new String[entityIdFromUriWithPrefix.size() + 1];
    for (Map.Entry<String, Integer> e : entityIdFromUriWithPrefix.entrySet()) {
        entityUriWithPrefix[e.getValue()] = e.getKey();
    }
    //entityUri = new String[entityIdFromUri.size() + 1];
    //for (Map.Entry<String, Integer> e : entityIdFromUri.entrySet()) {
    //    entityUri[e.getValue()] = e.getKey();
    //}
    entityLabels = new HashSet[entityIdFromUriWithPrefix.size() + 1]; //entityLabels = new HashSet[entityIdFromUri.size() + 1];
    entityClasses = new HashSet[entityIdFromUriWithPrefix.size() + 1]; //entityClasses = new HashSet[entityIdFromUri.size() + 1];
    propertyLabels = new HashSet[propertyIdFromUri.size() + 1];
    entityOutProperties = new HashSet[entityIdFromUriWithPrefix.size() + 1]; //entityOutProperties = new HashSet[entityIdFromUri.size() + 1];
    entityInProperties = new HashSet[entityIdFromUriWithPrefix.size() + 1]; //entityInProperties = new HashSet[entityIdFromUri.size() + 1];
    propertyOutProperties = new HashSet[propertyIdFromUri.size() + 1];
    propertyInProperties = new HashSet[propertyIdFromUri.size() + 1];
    propertyHasLiteralRange = new boolean[propertyIdFromUri.size() + 1];
    propertyCount = new int[propertyIdFromUri.size() + 1];
}

From source file:eionet.cr.harvest.scheduled.HarvestingJob.java

/**
 *
 * @throws DAOException/* w w  w . j  a v a2s  . c  o m*/
 */
private void handleBatchQueue() throws DAOException {

    // Even if it is not currently a batch harvesting hour, we shall proceed to getting the list of next scheduled sources, and
    // looping over them, as there are specific sources for which the batch-harvesting hours should be ignored. Currently these
    // are sources whose harvest interval is less than 8 hours.

    if (isBatchHarvestingHour()) {
        LOGGER.trace("Handling batch queue...");
    }

    // Initialize batch queue collection.
    batchQueue = Collections.synchronizedList(new ArrayList<HarvestSourceDTO>());

    // Initialize collection for sources that will have to be deleted.
    HashSet<String> sourcesToDelete = new HashSet<String>();

    // Initialize harvest source DAO.
    HarvestSourceDAO sourceDao = DAOFactory.get().getDao(HarvestSourceDAO.class);

    // Get next scheduled sources.
    List<HarvestSourceDTO> nextScheduledSources = getNextScheduledSources();
    if (isBatchHarvestingHour()) {
        LOGGER.trace(nextScheduledSources.size() + " next scheduled sources found");
    }

    // Loop over next scheduled sources.
    for (HarvestSourceDTO sourceDTO : nextScheduledSources) {

        // If source is marked with permanent error then increase its unavailability count if it's a
        // priority source, or simply delete it if it's not a priority source.
        // If source not marked with permanent and its unavailability count is >=5 and it's a
        // non-priority source then delete it.
        // In all other cases, add the harvest source to the batch-harvest queue.
        if (sourceDTO.isPermanentError()) {
            if (sourceDTO.isPrioritySource()) {
                LOGGER.trace("Increasing unavailability count of permanent-error priority source "
                        + sourceDTO.getUrl());
                sourceDao.increaseUnavailableCount(sourceDTO.getUrl());
            } else {
                LOGGER.debug(
                        sourceDTO.getUrl() + "  will be deleted as a non-priority source with permanent error");
                sourcesToDelete.add(sourceDTO.getUrl());
            }
        } else if (sourceDTO.getCountUnavail() >= 5) {
            if (!sourceDTO.isPrioritySource()) {
                LOGGER.debug(sourceDTO.getUrl()
                        + "  will be deleted as a non-priority source with unavailability >= 5");
                sourcesToDelete.add(sourceDTO.getUrl());
            }
        } else {
            batchQueue.add(sourceDTO);
        }
    }

    // Harvest the batch harvest queue (if anything added to it).
    for (Iterator<HarvestSourceDTO> iter = batchQueue.iterator(); iter.hasNext();) {

        HarvestSourceDTO sourceDTO = iter.next();

        // For sources where interval is less than 8 hours, the batch harvesting hours doesn't apply.
        // They are always harvested.
        boolean ignoreBatchHarvestingHour = sourceDTO.getIntervalMinutes().intValue() < 480;
        if (isBatchHarvestingHour() || ignoreBatchHarvestingHour) {

            // Remove source from batch harvest queue before starting its harvest.
            iter.remove();

            LOGGER.trace("Going to batch-harvest " + sourceDTO.getUrl());
            pullHarvest(sourceDTO, false);
        }
    }

    // Delete sources that were found necessary to delete (if any).
    if (!sourcesToDelete.isEmpty()) {

        LOGGER.debug("Deleting " + sourcesToDelete.size() + " sources found above");
        for (Iterator<String> iter = sourcesToDelete.iterator(); iter.hasNext();) {

            String sourceUrl = iter.next();
            if (CurrentHarvests.contains(sourceUrl)) {
                iter.remove();
                LOGGER.debug("Skipping deletion of " + sourceUrl + " because it is currently being harvested");
            }
        }
        sourceDao.removeHarvestSources(sourcesToDelete);
    }
}