Example usage for java.util Hashtable size

List of usage examples for java.util Hashtable size

Introduction

In this page you can find the example usage for java.util Hashtable size.

Prototype

public synchronized int size() 

Source Link

Document

Returns the number of keys in this hashtable.

Usage

From source file:corelyzer.ui.CorelyzerGLCanvas.java

private void handleAnnotationEvent(final String annotationTypeName, final String sessionname,
        final String trackname, final String secname, final float[] upperLeft, final float[] lowerRight)
        throws ClassNotFoundException, IllegalAccessException, InstantiationException {
    if (annotationTypeName.equalsIgnoreCase("cancel")) {
        return;/*from  w ww  . ja va 2  s.co  m*/
    }

    AnnotationTypeDirectory dir = AnnotationTypeDirectory.getLocalAnnotationTypeDirectory();
    if (dir == null) {
        System.out.println("Null AnnotationTypeDirectory");
        return;
    }

    AnnotationType t = dir.getAnnotationType(annotationTypeName);
    if (t == null) {
        System.out.println("Null AnnotationType");
        return;
    }

    String formClassName = t.getFormName();
    AbstractAnnotationDialog dlg = (AbstractAnnotationDialog) Class.forName(formClassName).newInstance();

    Date now = new Date(System.currentTimeMillis());
    SimpleDateFormat format = new SimpleDateFormat("MM/dd/yyy hh:mm:ss a z");
    String time = format.format(now);

    // init empty dictionary
    String dictFilePath = "resources/annotations/" + t.getDictFilename();
    File f = new File(dictFilePath);
    Hashtable<String, String> dict = PropertyListUtility.generateHashtableFromFile(f);

    System.out.println(
            "- [DEBUG] Init empty dictionary with file: " + f.getAbsolutePath() + ", size: " + dict.size());

    dlg.setAttributes(dict);
    dlg.setValueForKey("sessionname", sessionname);
    dlg.setValueForKey("trackname", trackname);
    dlg.setValueForKey("corename", secname);
    dlg.setValueForKey("username", System.getProperty("user.name"));
    dlg.setValueForKey("date", time);

    dlg.setTrackId(selectedTrack);
    dlg.setSectionId(selectedTrackSection);
    dlg.setRange(upperLeft[0], upperLeft[1], lowerRight[0], lowerRight[1]);

    dlg.pack();
    dlg.setLocation(prePos.x + 20, prePos.y - dlg.getSize().height / 2);

    dlg.setVisible(true);
}

From source file:org.lexevs.system.ResourceManager.java

/**
 * Removes the code system./*from ww w .j  a  v  a2 s.  co m*/
 * 
 * @param codingSchemeReference the coding scheme reference
 * 
 * @throws LBInvocationException the LB invocation exception
 * @throws LBParameterException the LB parameter exception
 */
public void removeCodeSystem(AbsoluteCodingSchemeVersionReference codingSchemeReference)
        throws LBInvocationException, LBParameterException {
    try {
        WriteLockManager.instance().acquireLock(codingSchemeReference.getCodingSchemeURN(),
                codingSchemeReference.getCodingSchemeVersion());
        try {
            // garbage collect to ensure any unreferenced ResourceManagers
            // get removed (and the
            // finalize method gets called to drop unused db connections
            System.gc();

            // start by collecting necessary info about this code system
            LocalCodingScheme lcs = new LocalCodingScheme();
            lcs.codingSchemeName = getInternalCodingSchemeNameForUserCodingSchemeName(
                    codingSchemeReference.getCodingSchemeURN(), codingSchemeReference.getCodingSchemeVersion());
            lcs.version = codingSchemeReference.getCodingSchemeVersion();

            String lcsKey = lcs.getKey();

            String serverId = codingSchemeToServerMap_.get(lcsKey);
            String indexId = codingSchemeToIndexMap_.get(lcsKey);

            // clean out the first two maps..
            codingSchemeToServerMap_.remove(lcsKey);
            codingSchemeToIndexMap_.remove(lcsKey);

            boolean singleDBMode = ResourceManager.instance().getSystemVariables().getAutoLoadSingleDBMode();

            // close down the sql statements, remove it from them server
            // map.
            SQLInterface si = sqlServerInterfaces_.get(serverId);

            sqlServerInterfaces_.remove(serverId);

            // drop the tables if we are in single db mode.
            if (singleDBMode) {
                si.dropTables();
            }

            String dbName = registry_.getDBCodingSchemeEntry(codingSchemeReference).dbName;

            //This is for backwards compatiblity. Since multi-db mode is now deprecated,
            //this enables us to still drop a database that has been previously loaded.
            //We detect a multi-db load by detecting if the 'dbName' is not blank in the
            //registry. We then reconstruct the jdbc url from the registry.
            if (!singleDBMode || StringUtils.isNotBlank(dbName)) {
                String url = registry_.getDBCodingSchemeEntry(codingSchemeReference).dbURL;
                url = this.constructJdbcUrlForDeprecatedMultiDbMode(url, dbName);
                DBUtility.dropDatabase(url, systemVars_.getAutoLoadDBDriver(), dbName,
                        systemVars_.getAutoLoadDBUsername(), systemVars_.getAutoLoadDBPassword());
            }

            // all automatic code systems are in a single index interface -
            // so need to clean that
            // up from within.
            // mturk 1/8/2009 -- added check for null indexId value
            if (indexId != null) {
                indexInterfaces_.get(indexId).deleteIndex(lcs.codingSchemeName, lcs.version);
            }

            // clean up the localName - internal name / version map

            removeInternalMap(lcs);

            Hashtable<String, String> temp = codingSchemeLocalNamesToInternalNameMap_
                    .get(codingSchemeReference.getCodingSchemeURN());

            // if the hashtable is now empty, we should remove any key that
            // maps to this hashtable in the
            // local name map
            if (temp == null || temp.size() == 0) {
                // also, if this hashtable was empty, it means that no other
                // code systems exist with the
                // same UID - so I can also clear the
                // internalCodingSchemeNameUIDMap
                Enumeration<String> e = internalCodingSchemeNameUIDMap_.keys();
                while (e.hasMoreElements()) {
                    String key = (String) e.nextElement();
                    if (internalCodingSchemeNameUIDMap_.get(key)
                            .equals(codingSchemeReference.getCodingSchemeURN())) {
                        internalCodingSchemeNameUIDMap_.remove(key);
                    }
                }
            }

            // The supportecCodingSchemeToInternalMap_ should be cleaned
            // here, but it isn't structured in
            // a way that makes that easy. So skip it - shouldn't cause any
            // harm

            // clear the lru cache.

            cache_.clear();
            registry_.remove(codingSchemeReference);
            WriteLockManager.instance().releaseLock(codingSchemeReference.getCodingSchemeURN(),
                    codingSchemeReference.getCodingSchemeVersion());
        } catch (Exception e) {
            String id = logger_.error("Unexpected error while removing a coding scheme", e);
            throw new LBInvocationException("There was an unexpected error while removing the coding scheme",
                    id);
        }
    } finally {
        WriteLockManager.instance().releaseLock(codingSchemeReference.getCodingSchemeURN(),
                codingSchemeReference.getCodingSchemeVersion());
    }

}

From source file:helma.objectmodel.db.NodeManager.java

/**
 *  Updates a modified node in the embedded db or an external relational database, depending
 * on its database mapping./*from  w w w. j  a v a2  s .  com*/
 *
 * @return true if the DbMapping of the updated Node is to be marked as updated via
 *              DbMapping.setLastDataChange
 */
public boolean updateNode(IDatabase db, ITransaction txn, Node node)
        throws IOException, SQLException, ClassNotFoundException {

    invokeOnPersist(node);
    DbMapping dbm = node.getDbMapping();
    boolean markMappingAsUpdated = false;

    if ((dbm == null) || !dbm.isRelational()) {
        db.updateNode(txn, node.getID(), node);
    } else {
        Hashtable propMap = node.getPropMap();
        Property[] props;

        if (propMap == null) {
            props = new Property[0];
        } else {
            props = new Property[propMap.size()];
            propMap.values().toArray(props);
        }

        // make sure table meta info is loaded by dbmapping
        dbm.getColumns();

        StringBuffer b = dbm.getUpdate();

        // comma flag set after the first dirty column, also tells as
        // if there are dirty columns at all
        boolean comma = false;

        for (int i = 0; i < props.length; i++) {
            // skip clean properties
            if ((props[i] == null) || !props[i].dirty) {
                // null out clean property so we don't consider it later
                props[i] = null;
                continue;
            }

            Relation rel = dbm.propertyToRelation(props[i].getName());

            // skip readonly, virtual and collection relations
            if ((rel == null) || rel.readonly || rel.virtual || (!rel.isPrimitiveOrReference())) {
                // null out property so we don't consider it later
                props[i] = null;
                continue;
            }

            if (comma) {
                b.append(", ");
            } else {
                comma = true;
            }

            b.append(rel.getDbField());
            b.append(" = ?");
        }

        // if no columns were updated, return false
        if (!comma) {
            return false;
        }

        b.append(" WHERE ");
        dbm.appendCondition(b, dbm.getIDField(), node.getID());

        Connection con = dbm.getConnection();
        // set connection to write mode
        if (con.isReadOnly())
            con.setReadOnly(false);
        PreparedStatement stmt = con.prepareStatement(b.toString());

        int stmtNumber = 0;
        long logTimeStart = logSql ? System.currentTimeMillis() : 0;

        try {
            for (int i = 0; i < props.length; i++) {
                Property p = props[i];

                if (p == null) {
                    continue;
                }

                Relation rel = dbm.propertyToRelation(p.getName());

                stmtNumber++;
                setStatementValue(stmt, stmtNumber, p, rel.getColumnType());

                p.dirty = false;

                if (!rel.isPrivate()) {
                    markMappingAsUpdated = true;
                }
            }

            stmt.executeUpdate();

        } finally {
            if (logSql) {
                long logTimeStop = System.currentTimeMillis();
                logSqlStatement("SQL UPDATE", dbm.getTableName(), logTimeStart, logTimeStop, b.toString());
            }
            if (stmt != null) {
                try {
                    stmt.close();
                } catch (Exception ignore) {
                }
            }
        }
    }

    // update may cause changes in the node's parent subnode array
    // TODO: is this really needed anymore?
    if (markMappingAsUpdated && node.isAnonymous()) {
        Node parent = node.getCachedParent();

        if (parent != null) {
            parent.markSubnodesChanged();
        }
    }

    return markMappingAsUpdated;
}

From source file:com.netscape.kra.StorageKeyUnit.java

/**
 * Reconstructs password from recovery agents.
 *///from   w w  w  .ja va 2 s . c om
private String constructPassword(Credential creds[]) throws EBaseException {
    // sort the credential according to the order in
    // configuration file
    Hashtable<String, byte[]> v = new Hashtable<String, byte[]>();

    for (int i = 0;; i++) {
        String uid = null;

        try {
            uid = mStorageConfig.getString(PROP_UID + i);
            if (uid == null)
                break;
        } catch (Exception e) {
            break;
        }
        for (int j = 0; j < creds.length; j++) {
            if (uid.equals(creds[j].getIdentifier())) {
                byte pwd[] = decryptShareWithInternalStorage(mStorageConfig.getString(PROP_SHARE + i),
                        creds[j].getPassword());
                if (pwd == null) {
                    throw new EBaseException(CMS.getUserMessage("CMS_AUTHENTICATION_INVALID_CREDENTIAL"));
                }

                v.put(Integer.toString(i), pwd);
                JssSubsystem jssSubsystem = (JssSubsystem) CMS.getSubsystem(JssSubsystem.ID);
                jssSubsystem.obscureBytes(pwd);
                break;
            }
        }
    }

    if (v.size() < 0) {
        throw new EBaseException(CMS.getUserMessage("CMS_AUTHENTICATION_INVALID_CREDENTIAL"));
    }

    if (v.size() != creds.length) {
        throw new EBaseException(CMS.getUserMessage("CMS_AUTHENTICATION_INVALID_CREDENTIAL"));
    }

    IJoinShares j = null;
    try {
        String className = mConfig.getString("joinshares_class", "com.netscape.cms.shares.OldJoinShares");
        j = (IJoinShares) Class.forName(className).newInstance();
    } catch (Exception e) {
        CMS.debug("JoinShares error " + e);
    }
    if (j == null) {
        CMS.debug("JoinShares plugin is not found");
        throw new EBaseException(CMS.getUserMessage("CMS_AUTHENTICATION_INVALID_CREDENTIAL"));
    }

    try {
        j.initialize(v.size());
    } catch (Exception e) {
        CMS.debug("Failed to initialize JoinShares");
        throw new EBaseException(CMS.getUserMessage("CMS_AUTHENTICATION_INVALID_CREDENTIAL"));
    }
    Enumeration<String> e = v.keys();

    while (e.hasMoreElements()) {
        String next = e.nextElement();

        j.addShare(Integer.parseInt(next) + 1, v.get(next));
    }
    try {
        byte secret[] = j.recoverSecret();
        String pwd = new String(secret);

        JssSubsystem jssSubsystem = (JssSubsystem) CMS.getSubsystem(JssSubsystem.ID);
        jssSubsystem.obscureBytes(secret);

        return pwd;
    } catch (Exception ee) {
        mKRA.log(ILogger.LL_FAILURE, CMS.getLogMessage("CMSCORE_KRA_STORAGE_RECONSTRUCT", e.toString()));
        throw new EBaseException(CMS.getUserMessage("CMS_KRA_INVALID_PASSWORD", ee.toString()));
    }
}

From source file:org.pdfsam.console.business.pdf.handlers.SplitCmdExecutor.java

/**
 * Execute the split of a pdf document when split type is S_BLEVEL
 * /*from w w w  .j ava 2  s.  c o m*/
 * @param inputCommand
 * @param bookmarksTable
 *            bookmarks table. It's populated only when splitting by bookmarks. If null or empty it's ignored
 * @throws Exception
 */
private void executeSplit(SplitParsedCommand inputCommand, Hashtable bookmarksTable) throws Exception {
    pdfReader = PdfUtility.readerFor(inputCommand.getInputFile());
    pdfReader.removeUnusedObjects();
    pdfReader.consolidateNamedDestinations();

    int n = pdfReader.getNumberOfPages();
    BookmarksProcessor bookmarkProcessor = new BookmarksProcessor(SimpleBookmark.getBookmark(pdfReader), n);
    int fileNum = 0;
    LOG.info("Found " + n + " pages in input pdf document.");

    Integer[] limits = inputCommand.getSplitPageNumbers();
    // limits list validation end clean
    TreeSet limitsList = validateSplitLimits(limits, n);
    if (limitsList.isEmpty()) {
        throw new SplitException(SplitException.ERR_NO_PAGE_LIMITS);
    }

    // HERE I'M SURE I'VE A LIMIT LIST WITH VALUES, I CAN START BOOKMARKS
    int currentPage;
    Document currentDocument = new Document(pdfReader.getPageSizeWithRotation(1));
    int relativeCurrentPage = 0;
    int endPage = n;
    int startPage = 1;
    PdfImportedPage importedPage;
    File tmpFile = null;
    File outFile = null;

    Iterator itr = limitsList.iterator();
    if (itr.hasNext()) {
        endPage = ((Integer) itr.next()).intValue();
    }
    for (currentPage = 1; currentPage <= n; currentPage++) {
        relativeCurrentPage++;
        // check if i've to read one more page or to open a new doc
        if (relativeCurrentPage == 1) {
            LOG.debug("Creating a new document.");
            fileNum++;
            tmpFile = FileUtility.generateTmpFile(inputCommand.getOutputFile());
            String bookmark = null;
            if (bookmarksTable != null && bookmarksTable.size() > 0) {
                bookmark = (String) bookmarksTable.get(new Integer(currentPage));
            }
            FileNameRequest request = new FileNameRequest(currentPage, fileNum, bookmark);
            outFile = new File(inputCommand.getOutputFile(), prefixParser.generateFileName(request));
            startPage = currentPage;
            currentDocument = new Document(pdfReader.getPageSizeWithRotation(currentPage));

            pdfWriter = new PdfSmartCopy(currentDocument, new FileOutputStream(tmpFile));

            // set creator
            currentDocument.addCreator(ConsoleServicesFacade.CREATOR);

            setCompressionSettingOnWriter(inputCommand, pdfWriter);
            setPdfVersionSettingOnWriter(inputCommand, pdfWriter, Character.valueOf(pdfReader.getPdfVersion()));

            currentDocument.open();
        }

        importedPage = pdfWriter.getImportedPage(pdfReader, currentPage);
        pdfWriter.addPage(importedPage);

        // if it's time to close the document
        if (currentPage == endPage) {
            LOG.info("Temporary document " + tmpFile.getName() + " done, now adding bookmarks...");
            // manage bookmarks
            List bookmarks = bookmarkProcessor.processBookmarks(startPage, endPage);
            if (bookmarks != null) {
                pdfWriter.setOutlines(bookmarks);
            }
            relativeCurrentPage = 0;
            currentDocument.close();
            FileUtility.renameTemporaryFile(tmpFile, outFile, inputCommand.isOverwrite());
            LOG.debug("File " + outFile.getCanonicalPath() + " created.");
            endPage = (itr.hasNext()) ? ((Integer) itr.next()).intValue() : n;
        }
        setPercentageOfWorkDone((currentPage * WorkDoneDataModel.MAX_PERGENTAGE) / n);
    }
    pdfReader.close();
    LOG.info("Split " + inputCommand.getSplitType() + " done.");
}

From source file:edu.mayo.informatics.lexgrid.convert.directConversions.UMLSToSQL.java

private void populateSupportedAssociations(String UMLSCodingSchemeName) throws SQLException {
    messages_.info("Getting the descriptive associations");
    PreparedStatement getRelations = umlsConnection_
            .prepareStatement("SELECT DISTINCT RELA, DIR FROM MRREL WHERE SAB = ?");

    getRelations.setString(1, UMLSCodingSchemeName);

    ResultSet relations = getRelations.executeQuery();
    Hashtable relationsHolder = new Hashtable();

    while (relations.next()) {
        String temp = relations.getString("RELA");
        String tempDirFlag = relations.getString("DIR");
        if (temp == null || temp.length() == 0) {
            continue;
        }/*from w w w . ja  v a 2 s  .co  m*/
        mapSupportedAssociationsHelper(temp, UMLSCodingSchemeName, tempDirFlag, "RELA", relationsHolder);
    }

    messages_.info("Getting the base umls associations");
    getRelations = umlsConnection_.prepareStatement("SELECT DISTINCT REL, DIR FROM MRREL WHERE SAB = ?");
    getRelations.setString(1, UMLSCodingSchemeName);

    relations = getRelations.executeQuery();
    while (relations.next()) {
        String temp = relations.getString("REL");
        String tempDirFlag = relations.getString("DIR");
        if (temp == null || temp.length() == 0) {
            continue;
        }
        mapSupportedAssociationsHelper(temp, UMLSCodingSchemeName, tempDirFlag, "REL", relationsHolder);
    }
    getRelations.close();

    supportedAssociations_ = new Association[relationsHolder.size()];
    Enumeration elements = relationsHolder.elements();
    int i = 0;
    while (elements.hasMoreElements()) {
        Association temp = (Association) elements.nextElement();
        supportedAssociations_[i++] = temp;
    }
}

From source file:com.flexive.core.security.UserTicketImpl.java

/**
 * {@inheritDoc}/*from w w w.  j  a v a 2s. co  m*/
 */
@Override
public Long[] getACLsId(long ownerId, ACLCategory category, ACLPermission... perms) {
    Boolean mayCreate = null;
    Boolean mayRead = null;
    Boolean mayEdit = null;
    Boolean mayDelete = null;
    Boolean mayRelate = null;
    Boolean mayExport = null;
    for (ACLPermission perm : perms) {
        switch (perm) {
        case CREATE:
            mayCreate = true;
            break;
        case NOT_CREATE:
            mayCreate = false;
            break;
        case READ:
            mayRead = true;
            break;
        case NOT_READ:
            mayRead = false;
            break;
        case EDIT:
            mayEdit = true;
            break;
        case NOT_EDIT:
            mayEdit = false;
            break;
        case DELETE:
            mayDelete = true;
            break;
        case NOT_DELETE:
            mayDelete = false;
            break;
        case RELATE:
            mayRelate = true;
            break;
        case NOT_RELATE:
            mayRelate = false;
            break;
        case EXPORT:
            mayExport = true;
            break;
        case NOT_EXPORT:
            mayExport = false;
            break;
        }
    }
    Hashtable<Long, boolean[]> hlp = new Hashtable<Long, boolean[]>(this.assignments.length);

    // Condense the ACL right informations
    // If a ACL is assigned via groupX and groupY the rights are taken from both assignments.
    for (ACLAssignment acl : this.assignments) {
        if (acl.isOwnerGroupAssignment() && ownerId != userId)
            continue;
        if (category != null && acl.getACLCategory() != category)
            continue;
        Long key = acl.getAclId();
        boolean[] rights = hlp.get(key);
        if (rights == null) {
            rights = new boolean[] { false, false, false, false, false, false, false };
        }
        if (acl.getMayRead())
            rights[ACLPermission.READ.ordinal()] = true;
        if (acl.getMayEdit())
            rights[ACLPermission.EDIT.ordinal()] = true;
        if (acl.getMayDelete())
            rights[ACLPermission.DELETE.ordinal()] = true;
        if (acl.getMayRelate())
            rights[ACLPermission.RELATE.ordinal()] = true;
        if (acl.getMayExport())
            rights[ACLPermission.EXPORT.ordinal()] = true;
        if (acl.getMayCreate() && !acl.isOwnerGroupAssignment())
            rights[ACLPermission.CREATE.ordinal()] = true;
        hlp.put(key, rights);
    }

    // Return matching ACLs
    Enumeration keys = hlp.keys();
    List<Long> result = new ArrayList<Long>(hlp.size());
    while (keys.hasMoreElements()) {
        Long aclId = (Long) keys.nextElement();
        boolean[] rights = hlp.get(aclId);
        if (mayRead != null && mayRead != rights[ACLPermission.READ.ordinal()])
            continue;
        if (mayEdit != null && mayEdit != rights[ACLPermission.EDIT.ordinal()])
            continue;
        if (mayDelete != null && mayDelete != rights[ACLPermission.DELETE.ordinal()])
            continue;
        if (mayRelate != null && mayRelate != rights[ACLPermission.RELATE.ordinal()])
            continue;
        if (mayExport != null && mayExport != rights[ACLPermission.EXPORT.ordinal()])
            continue;
        if (mayCreate != null && mayCreate != rights[ACLPermission.CREATE.ordinal()])
            continue;
        result.add(aclId);
    }
    return result.toArray(new Long[result.size()]);
}

From source file:com.clustercontrol.nodemap.session.NodeMapControllerBean.java

/**
 * fping?ping?????<BR>/*  w  w  w  .  java2 s  .c  o m*/
 * @param facilityId Ping?ID()collect?facilityID???
 * @return ping??
 * @throws HinemosUnknown
 * @throws NodeMapException 
 */
public List<String> pingToFacilityList(List<String> facilityList) throws HinemosUnknown, NodeMapException {

    String message = "";
    String messageOrg = "";
    //??????
    // hosts[] IP(String ??)
    // hostsv6[]  IPv6(String??)
    // node     IP????
    // target   nodo?
    HashSet<String> hosts = new HashSet<String>();
    HashSet<String> hostsv6 = new HashSet<String>();
    // ip?name
    Hashtable<String, List<String>> facilityNameTable = new Hashtable<>();

    String facilityId = null;
    int version = 4;
    String[] node;
    for (int index = 0; index < facilityList.size(); index++) {
        facilityId = facilityList.get(index);
        if (facilityId != null && !"".equals(facilityId)) {
            node = new String[2];
            try {

                // ??
                NodeInfo info = new RepositoryControllerBean().getNode(facilityId);

                if (info.getIpAddressVersion() != null) {
                    version = info.getIpAddressVersion();
                } else {
                    version = 4;
                }

                if (version == 6) {

                    InetAddress[] ip = InetAddress.getAllByName(info.getIpAddressV6());

                    if (ip.length != 1) {
                        //IPInetAddress??????1????????
                        //UnnownHostExcption
                        UnknownHostException e = new UnknownHostException();
                        m_log.info("pingToFacilityList() : " + e.getClass().getSimpleName() + ", "
                                + e.getMessage());
                        throw e;
                    }

                    node[0] = ip[0].getHostAddress();
                    if (node[0] != null && !node[0].equals("")) {
                        //IPHashSet?????
                        hostsv6.add(node[0]);
                    }
                } else {
                    node[0] = info.getIpAddressV4();
                    if (node[0] != null && !node[0].equals("")) {

                        //IPHashSet?????
                        hosts.add(node[0]);
                    }
                }
                if (node[0] != null && !node[0].equals("")) {
                    node[1] = info.getNodeName();
                    //target??????
                    List<String> facilitys = facilityNameTable.get(node[0]);
                    if (facilitys == null) {
                        facilitys = new ArrayList<>();
                    }
                    facilitys.add(facilityId);
                    facilityNameTable.put(node[0], facilitys);
                }
            } catch (FacilityNotFound e) {
                message = MessageConstant.MESSAGE_COULD_NOT_GET_NODE_ATTRIBUTES_PING.getMessage() + ","
                        + facilityId;
                messageOrg = e.getMessage();
                throw new NodeMapException(message + ", " + messageOrg, e);
            } catch (UnknownHostException e) {
                // ???
            }
        }
    }

    int runCount = 0;
    int runInterval = 0;
    int pingTimeout = 0;
    try {
        // [](default:1?19)
        String runCountKey = "nodemap.ping.runcount";
        runCount = HinemosPropertyUtil
                .getHinemosPropertyNum(runCountKey, Long.valueOf(PingRunCountConstant.TYPE_COUNT_01))
                .intValue();
        CommonValidator.validateInt(runCountKey, runCount, 1, 9);

        // [ms](default:1000?05000)
        String runIntervalKey = "nodemap.ping.runinterval";
        runInterval = HinemosPropertyUtil
                .getHinemosPropertyNum(runIntervalKey, Long.valueOf(PingRunIntervalConstant.TYPE_SEC_02))
                .intValue();
        CommonValidator.validateInt(runIntervalKey, runInterval, 0, 5 * 1000);

        // [ms](default:5000?13600000)
        String pintTimeoutKey = "nodemap.ping.timeout";
        pingTimeout = HinemosPropertyUtil
                .getHinemosPropertyNum(pintTimeoutKey, Long.valueOf(PingRunIntervalConstant.TYPE_SEC_05))
                .intValue();
        CommonValidator.validateInt(pintTimeoutKey, pingTimeout, 1, 60 * 60 * 1000);
    } catch (Exception e) {
        m_log.warn("pingToFacilityList() : " + e.getClass().getSimpleName() + ", " + e.getMessage(), e);
        throw new HinemosUnknown(e.getMessage(), e);
    }

    ReachAddressFping reachabilityFping = new ReachAddressFping(runCount, runInterval, pingTimeout);

    boolean result = true;
    boolean resultTmp = true;
    ArrayList<String> msgErr = new ArrayList<>();
    ArrayList<String> msgErrV6 = new ArrayList<>();
    Hashtable<String, PingResult> fpingResultSet = new Hashtable<String, PingResult>();
    Hashtable<String, PingResult> fpingResultSetV6 = new Hashtable<String, PingResult>();

    RunMonitorPing monitorPing = new RunMonitorPing();
    //IPv4???fping??
    if (hosts.size() != 0) {
        result = reachabilityFping.isReachable(hosts, 4);
        msgErr = reachabilityFping.getM_errMsg();
    }
    //IPv6???fping6??
    if (hostsv6.size() != 0) {
        resultTmp = reachabilityFping.isReachable(hostsv6, 6);
        msgErrV6 = reachabilityFping.getM_errMsg();
    }

    if (!result || !resultTmp) {
        return null;
    }
    List<String> retList = new ArrayList<>();
    fpingResultSet = monitorPing.wrapUpFping(msgErr, runCount, 4);
    fpingResultSetV6 = monitorPing.wrapUpFping(msgErrV6, runCount, 6);
    //IPv4????????IPv6?????
    m_log.debug("pingToFacilityList(): before fpingResultSet check");
    if (fpingResultSet.size() == 0) {
        m_log.debug("pingToFacilityList(): after fpingResultSet check");
        fpingResultSet = fpingResultSetV6;
    }
    //IPv4??????IPv6??
    else if (fpingResultSetV6.size() != 0) {
        fpingResultSet.putAll(fpingResultSetV6);
    }
    for (Map.Entry<String, List<String>> ipAdd : facilityNameTable.entrySet()) {
        PingResult pingResult = fpingResultSet.get(ipAdd.getKey());
        for (String facility : ipAdd.getValue()) {
            retList.add(facility + " : " + pingResult.getMesseageOrg());
        }
    }
    return retList;
}

From source file:org.auscope.portal.server.web.controllers.GridSubmitController.java

/**
 * Processes a job submission request.//from  w  ww  .  j a v a2 s .  com
 *
 * @param request The servlet request
 * @param response The servlet response
 *
 * @return A JSON object with a success attribute that indicates whether
 *         the job was successfully submitted.
 */
@RequestMapping("/submitJob.do")
public ModelAndView submitJob(HttpServletRequest request, HttpServletResponse response, GeodesyJob job) {

    logger.debug("Job details:\n" + job.toString());

    GeodesySeries series = null;
    boolean success = true;
    final String user = request.getRemoteUser();
    String jobInputDir = (String) request.getSession().getAttribute("jobInputDir");
    String newSeriesName = request.getParameter("seriesName");
    String seriesIdStr = request.getParameter("seriesId");
    ModelAndView mav = new ModelAndView("jsonView");
    Object credential = request.getSession().getAttribute("userCred");
    String localJobInputDir = (String) request.getSession().getAttribute("localJobInputDir");

    //Used to store Job Submission status, because there will be another request checking this.
    GridTransferStatus gridStatus = new GridTransferStatus();

    if (credential == null) {
        //final String errorString = "Invalid grid credentials!";
        logger.error(GridSubmitController.CREDENTIAL_ERROR);
        gridStatus.currentStatusMsg = GridSubmitController.CREDENTIAL_ERROR;
        gridStatus.jobSubmissionStatus = JobSubmissionStatus.Failed;

        // Save in session for status update request for this job.
        request.getSession().setAttribute("gridStatus", gridStatus);
        //mav.addObject("error", errorString);
        mav.addObject("success", false);
        return mav;
    }

    // if seriesName parameter was provided then we create a new series
    // otherwise seriesId contains the id of the series to use.
    if (newSeriesName != null && newSeriesName != "") {
        String newSeriesDesc = request.getParameter("seriesDesc");

        logger.debug("Creating new series '" + newSeriesName + "'.");
        series = new GeodesySeries();
        series.setUser(user);
        series.setName(newSeriesName);
        if (newSeriesDesc != null) {
            series.setDescription(newSeriesDesc);
        }
        jobManager.saveSeries(series);
        // Note that we can now access the series' new ID

    } else if (seriesIdStr != null && seriesIdStr != "") {
        try {
            int seriesId = Integer.parseInt(seriesIdStr);
            series = jobManager.getSeriesById(seriesId);
        } catch (NumberFormatException e) {
            logger.error("Error parsing series ID!");
        }
    }

    if (series == null) {
        success = false;
        final String msg = "No valid series found. NOT submitting job!";
        logger.error(msg);
        gridStatus.currentStatusMsg = msg;
        gridStatus.jobSubmissionStatus = JobSubmissionStatus.Failed;

    } else {
        //Reduce our list of input files to an array of urls
        List<GeodesyGridInputFile> gpsFiles = (List<GeodesyGridInputFile>) request.getSession()
                .getAttribute("gridInputFiles");
        logger.debug("gpsFiles: " + gpsFiles.toString());
        if (gpsFiles == null) {
            logger.warn("gridInputFiles is null, using empty list instead");
            gpsFiles = new ArrayList<GeodesyGridInputFile>();
        }

        String[] urlArray = new String[gpsFiles.size()];
        int urlArrayIndex = 0;
        for (GeodesyGridInputFile ggif : gpsFiles) {
            urlArray[urlArrayIndex++] = ggif.getFileUrl();
        }

        //Transfer job input files to Grid StageInURL
        //if(urlArray.length > 0){
        //   gridStatus = urlCopy(urlArray, request);
        //}          

        if (gridStatus.jobSubmissionStatus != JobSubmissionStatus.Failed) {

            job.setSeriesId(series.getId());
            //job.setArguments(new String[] { job.getScriptFile() });
            job.setJobType(job.getJobType().replace(",", ""));
            JSONArray args = JSONArray.fromObject(request.getParameter("arguments"));
            logger.info(
                    "Args count: " + job.getArguments().length + " | Args in Json : " + args.toArray().length);
            job.setArguments((String[]) args.toArray(new String[args.toArray().length]));

            // Create a new directory for the output files of this job
            //String certDN = (String)request.getSession().getAttribute("certDN");
            String certDN_DIR = "";
            try {
                certDN_DIR = generateCertDNDirectory(credential);
                logger.debug("certDN_DIR: " + certDN_DIR);
            } catch (GSSException e) {
                logger.error(FaultHelper.getMessage(e));
            }

            success = createGridDir(request, gridAccess.getGridFtpStageOutDir() + certDN_DIR + File.separator);
            SimpleDateFormat sdf = new SimpleDateFormat("yyyyMMdd_HHmmss");
            String dateFmt = sdf.format(new Date());
            String jobID = user + "-" + job.getName() + "-" + dateFmt + File.separator;
            String jobOutputDir = gridAccess.getGridFtpStageOutDir() + certDN_DIR + File.separator + jobID;

            // Add grid stage-in directory and local stage-in directory.
            String stageInURL = gridAccess.getGridFtpServer() + jobInputDir;
            logger.debug("stagInURL: " + stageInURL);

            if (job.getJobType().equals("single")) {

                //Transfer job input files to Grid StageInURL
                if (urlArray != null && urlArray.length > 0) {
                    // Full URL
                    // e.g. "gsiftp://pbstore.ivec.org:2811//pbstore/au01/grid-auscope/Abdi.Jama@csiro.au-20091103_163322/"
                    //       +"rinex/" + filename
                    String toURL = gridAccess.getGridFtpServer() + File.separator + jobInputDir
                            + GridSubmitController.RINEX_DIR + File.separator;
                    gridStatus = urlCopy(urlArray, request, toURL);
                }

                String localStageInURL = gridAccess.getLocalGridFtpServer()
                        + (String) request.getSession().getAttribute("localJobInputDir");
                job.setInTransfers(new String[] { stageInURL, localStageInURL });

                logger.debug("localStagInURL: " + localStageInURL);
            } else {
                //Here see if date range is used and not parameter list from the gui for multi job
                String strDateFrom = (String) request.getSession().getAttribute("dateFrom");
                String strDateTo = (String) request.getSession().getAttribute("dateTo");
                if (strDateFrom != null && strDateTo != null) {
                    String[] params = createSubjobs(strDateFrom, strDateTo, job.getArguments()[0], request,
                            gpsFiles);

                    //overwrite job args
                    job.setArguments(params);
                    String localStageInURL = gridAccess.getLocalGridFtpServer() + localJobInputDir;
                    job.setInTransfers(new String[] { localStageInURL });
                    gridStatus = (GridTransferStatus) request.getSession().getAttribute("gridStatus");
                } else {
                    if (urlArray != null && urlArray.length > 0) {
                        // Full URL
                        // e.g. "gsiftp://pbstore.ivec.org:2811//pbstore/au01/grid-auscope/Abdi.Jama@csiro.au-20091103_163322/"
                        //       +"rinex/" + filename
                        String toURL = gridAccess.getGridFtpServer() + File.separator + jobInputDir
                                + GridSubmitController.RINEX_DIR + File.separator;
                        gridStatus = urlCopy(urlArray, request, toURL);
                    }
                    String localStageInURL = gridAccess.getLocalGridFtpServer() + localJobInputDir;
                    job.setInTransfers(new String[] { stageInURL, localStageInURL });
                }

                //create the base directory for multi job, because this fails on stage out.
                success = createGridDir(request, jobOutputDir);

                //Add subJobStageIns
                Hashtable localSubJobDir = (Hashtable) request.getSession().getAttribute("localSubJobDir");
                if (localSubJobDir == null)
                    localSubJobDir = new Hashtable();
                job.setSubJobStageIn(localSubJobDir);
                request.getSession().removeAttribute("localSubJobDir");
                logger.debug("localSubJobDir size: " + localSubJobDir.size());

                //Add grigSubJobStageIns
                Hashtable gridSubJobStageInDir = (Hashtable) request.getSession()
                        .getAttribute("subJobStageInDir");
                if (gridSubJobStageInDir == null)
                    gridSubJobStageInDir = new Hashtable();
                job.setGridSubJobStageIn(gridSubJobStageInDir);
                request.getSession().removeAttribute("subJobStageInDir");
                logger.debug("gridSubJobStageInDir size: " + gridSubJobStageInDir.size());
            }

            String submitEPR = null;
            job.setEmailAddress(user);
            job.setOutputDir(jobOutputDir);
            job.setOutTransfers(new String[] { gridAccess.getGridFtpServer() + jobOutputDir });

            logger.info("Submitting job with name " + job.getName() + " to " + job.getSite());
            // ACTION!
            if (success && gridStatus.jobSubmissionStatus != JobSubmissionStatus.Failed)
                submitEPR = gridAccess.submitJob(job, credential);

            if (submitEPR == null) {
                success = false;
                gridStatus.jobSubmissionStatus = JobSubmissionStatus.Failed;
                gridStatus.currentStatusMsg = GridSubmitController.INTERNAL_ERROR;
            } else {
                logger.info("SUCCESS! EPR: " + submitEPR);
                String status = gridAccess.retrieveJobStatus(submitEPR, credential);
                job.setReference(submitEPR);
                job.setStatus(status);
                job.setSubmitDate(dateFmt);
                jobSupplementInfo(job);
                jobManager.saveJob(job);
                request.getSession().removeAttribute("jobInputDir");
                request.getSession().removeAttribute("localJobInputDir");

                //This means job submission to the grid done.
                gridStatus.jobSubmissionStatus = JobSubmissionStatus.Done;
                gridStatus.currentStatusMsg = GridSubmitController.TRANSFER_COMPLETE;
            }
        } else {
            success = false;
            logger.error(GridSubmitController.FILE_COPY_ERROR);
            gridStatus.currentStatusMsg = GridSubmitController.FILE_COPY_ERROR;
            gridStatus.jobSubmissionStatus = JobSubmissionStatus.Failed;
            mav.addObject("error", GridSubmitController.FILE_COPY_ERROR);
        }
    }
    // Save in session for status update request for this job.
    request.getSession().setAttribute("gridStatus", gridStatus);

    //reset the date range for next job
    request.getSession().removeAttribute("dateTo");
    request.getSession().removeAttribute("dateFrom");

    mav.addObject("success", success);

    return mav;
}

From source file:de.juwimm.cms.remote.UserServiceSpringImpl.java

/**
 * Returns all groups users of this unit are member of.<br>
 * /*ww  w .j  a  v a2 s.  com*/
 * @return Returns all GroupValue Objects in an Array. Is empty if nobody
 *         was found.
 * 
 * @see de.juwimm.cms.remote.UserServiceSpring#getAllGroupsUsedInUnit(java.lang.Integer)
 */
@Override
protected GroupValue[] handleGetAllGroupsUsedInUnit(Integer unitId) throws Exception {
    if (log.isDebugEnabled()) {
        log.debug("begin getAllGroupsUsedInUnit");
    }
    Hashtable<Integer, GroupValue> groupsTable = new Hashtable<Integer, GroupValue>();
    try {
        UnitHbm unit = super.getUnitHbmDao().load(unitId);
        Iterator<UserHbm> usIt = unit.getUsers().iterator();
        UserHbm principal = super.getUserHbmDao().load(AuthenticationHelper.getUserName());
        if (log.isDebugEnabled()) {
            log.debug("Principal: " + principal);
        }
        UserHbm user = null;
        while (usIt.hasNext()) {
            user = usIt.next();
            Iterator<GroupHbm> grpIt = super.getUserHbmDao().getGroups4ActiveSite(user).iterator();
            GroupHbm grp = null;
            GroupValue gv = null;
            while (grpIt.hasNext()) {
                grp = grpIt.next();
                gv = grp.getGroupValue();
                if (gv != null) {
                    groupsTable.put(gv.getGroupId(), gv);
                }
            }
        }
    } catch (Exception e) {
        log.error("Error while getting groups for unit", e);
        throw new UserException(e.getMessage());
    }
    return groupsTable.values().toArray(new GroupValue[groupsTable.size()]);
}