Example usage for java.util HashMap isEmpty

List of usage examples for java.util HashMap isEmpty

Introduction

In this page you can find the example usage for java.util HashMap isEmpty.

Prototype

public boolean isEmpty() 

Source Link

Document

Returns true if this map contains no key-value mappings.

Usage

From source file:org.archive.crawler.framework.CrawlJob.java

/**
 * Did the ApplicationContext self-validate? 
 * return true if validation passed without errors
 *//*from  www .  j  av a 2s . c  o  m*/
public synchronized boolean hasValidApplicationContext() {
    if (ac == null) {
        return false;
    }
    HashMap<String, Errors> allErrors = ac.getAllErrors();
    return allErrors != null && allErrors.isEmpty();
}

From source file:org.wso2.carbon.device.mgt.core.service.DeviceManagementProviderServiceTest.java

@Test(dependsOnMethods = { "testSuccessfulDeviceEnrollment" })
public void testGetTenantedDevice() throws DeviceManagementException {
    HashMap<Integer, Device> deviceMap = deviceMgtService
            .getTenantedDevice(new DeviceIdentifier(DEVICE_ID, DEVICE_TYPE));
    if (!isMock()) {
        Assert.assertTrue(!deviceMap.isEmpty());
    }//from w  ww .j  a  v a2  s  .c o  m
}

From source file:com.makotosan.vimeodroid.vimeo.Methods.java

private XmlPullParser makeRequest(String method, HashMap<String, String> parameters) {
    final StringBuilder urlStringBuilder = new StringBuilder();
    try {// www . j  ava 2 s. com
        urlStringBuilder.append(VimeoUrls.STANDARD_API + "?method=vimeo." + method);
        final ConsumerInfo info = getConsumerInfo();

        if (parameters != null && !parameters.isEmpty()) {
            for (String key : parameters.keySet()) {
                urlStringBuilder.append("&" + key + "=" + parameters.get(key));
            }
            if (!parameters.containsKey("user_id")) {
                urlStringBuilder.append("&user_id=" + info.getConsumerToken());
            }
        }

        final HttpGet request = new HttpGet(urlStringBuilder.toString());
        request.addHeader("Accept-Encoding", "gzip");

        final HttpClient client = this.app.getHttpClient();

        Authentication.signRequest(info, request);

        final org.apache.http.HttpResponse response = client.execute(request);
        final int statusCode = response.getStatusLine().getStatusCode();
        // Log.d("HTTP method : " + method, "return statusCode : " +
        // statusCode);

        if (statusCode != 200) {
            throw new HttpResponseException(statusCode, "HTTP Error");
        }

        final HttpEntity entity = response.getEntity();
        if (entity != null) {
            InputStream inputStream = null;
            try {
                inputStream = entity.getContent();
                final Header contentEncoding = response.getFirstHeader("Content-Encoding");
                if (contentEncoding != null && contentEncoding.getValue().equalsIgnoreCase("gzip")) {
                    inputStream = new GZIPInputStream(inputStream);
                }

                final XmlPullParserFactory factory = XmlPullParserFactory.newInstance();
                final XmlPullParser xpp = factory.newPullParser();
                final String rawXml = IOUtils.toString(inputStream);
                xpp.setInput(new StringReader(rawXml));
                int eventType = xpp.getEventType();
                while (eventType != XmlPullParser.END_DOCUMENT) {
                    switch (eventType) {
                    case XmlPullParser.START_TAG:
                        if ("rsp".equals(xpp.getName())) {
                            String status = xpp.getAttributeValue(XmlPullParser.NO_NAMESPACE, "stat");
                            if ("fail".equals(status)) {
                                ErrorInfo error = VimeoXmlParser.parse(ErrorInfo.class, xpp);
                                Log.e(TAG, error.getExplanation());
                                Toast.makeText(context, error.getExplanation(), Toast.LENGTH_LONG).show();
                                return null;
                                // throw new
                                // Exception(error.getExplanation());
                            }
                            return xpp;
                        }
                        break;
                    }

                    eventType = xpp.next();
                }
                return xpp;
            } finally {
                if (inputStream != null) {
                    inputStream.close();
                }
                entity.consumeContent();
            }
        }
    } catch (Exception e) {
        Log.e(TAG, e.getMessage(), e);
        // Toast.makeText(context, e.getMessage(),
        // Toast.LENGTH_SHORT).show();
    }

    return null;
}

From source file:gov.llnl.lc.smt.command.port.SmtPort.java

/**
 * Describe the method here/*from   w w w.  j  av  a 2 s  .c om*/
 *
 * @see     describe related java objects
 *
 * @param subCommandArg
 * @return
 ***********************************************************/
private OSM_Port getOSM_PortByString(String subCommandArg) {
    // return the first match
    HashMap<String, OSM_Port> ports = getOSM_PortsByString(subCommandArg);
    if (ports.isEmpty())
        return null;

    return ports.values().iterator().next();
}

From source file:eionet.cr.api.xmlrpc.XmlRpcServices.java

@Override
public List getResourcesSinceTimestamp(Date timestamp) throws CRException {

    if (logger.isInfoEnabled()) {
        logger.info("Entered " + Thread.currentThread().getStackTrace()[1].getMethodName());
    }//from   w  w  w  . ja v  a 2  s .c om

    List<Map<String, String[]>> result = new ArrayList<Map<String, String[]>>();
    if (timestamp != null) {

        // given timestamp must be less than current time (in seconds)
        long curTimeSeconds = Util.currentTimeSeconds();
        long givenTimeSeconds = Util.getSeconds(timestamp.getTime());
        if (givenTimeSeconds < curTimeSeconds) {

            try {
                Collection<SubjectDTO> subjects = DAOFactory.get().getDao(HelperDAO.class)
                        .getSubjectsNewerThan(timestamp, MAX_RESULTS);

                for (Iterator<SubjectDTO> subjectsIter = subjects.iterator(); subjectsIter.hasNext();) {

                    SubjectDTO subjectDTO = subjectsIter.next();
                    HashMap<String, String[]> map = new HashMap<String, String[]>();
                    for (Iterator<String> predicatesIter = subjectDTO.getPredicates().keySet()
                            .iterator(); predicatesIter.hasNext();) {

                        String predicate = predicatesIter.next();
                        map.put(predicate, toStringArray(subjectDTO.getObjects(predicate)));
                    }

                    // if map not empty and the subject has a URL (i.e. getUrl() is not blank)
                    // then add the map to result
                    if (!map.isEmpty()) {
                        String url = subjectDTO.getUrl();
                        if (!StringUtils.isBlank(url)) {
                            String[] arr = new String[1];
                            arr[0] = url;
                            map.put(Predicates.CR_URL, arr); // QAW needs this special reserved predicate
                            result.add(map);
                        }
                    }
                }
            } catch (Throwable t) {
                t.printStackTrace();
                if (t instanceof CRException) {
                    throw (CRException) t;
                } else {
                    throw new CRException(t.toString(), t);
                }
            }

        }
    }

    return result;
}

From source file:org.apache.hadoop.hive.ql.parse.NewGroupByUtils1.java

private void genDistTag2Aggr(QB qb, String dest, ArrayList<ArrayList<Integer>> tag2AggrPos,
        ArrayList<ArrayList<ASTNode>> distTag2AggrParamAst,
        HashMap<Integer, ArrayList<Integer>> nonDistPos2TagOffs) {

    HashMap<String, ASTNode> aggregationTrees = qb.getParseInfo().getAggregationExprsForClause(dest);
    if (aggregationTrees == null || aggregationTrees.isEmpty()) {
        return;/*from   ww  w .  ja  va2s  . c  om*/
    }
    tag2AggrPos.clear();
    distTag2AggrParamAst.clear();
    tag2AggrPos.add(new ArrayList<Integer>());
    distTag2AggrParamAst.add(new ArrayList<ASTNode>());
    HashMap<String, Integer> treeidx = new HashMap<String, Integer>();
    HashMap<Integer, HashSet<String>> tag2paramaststr = new HashMap<Integer, HashSet<String>>();
    int pos = 0;
    for (Map.Entry<String, ASTNode> entry : aggregationTrees.entrySet()) {
        ASTNode value = entry.getValue();
        String[] params = new String[value.getChildCount() - 1];
        for (int i = 1; i < value.getChildCount(); i++) {
            params[i - 1] = value.getChild(i).toStringTree();
        }
        Arrays.sort(params);
        ArrayList<String> params1 = new ArrayList<String>();
        params1.add(params[0]);
        String curr = params[0];
        for (int i = 1; i < params.length; i++) {
            if (!curr.equalsIgnoreCase(params[i])) {
                params1.add(params[i]);
                curr = params[i];
            }
        }

        StringBuffer sb = new StringBuffer();
        sb.append(value.getToken().getType());
        for (int i = 0; i < params1.size(); i++) {
            sb.append(params1.get(i));
        }
        String asttree = sb.toString();

        if (!treeidx.containsKey(asttree)) {
            if (value.getToken().getType() == HiveParser.TOK_FUNCTIONDI) {
                int disttag = tag2AggrPos.size();
                treeidx.put(asttree, disttag);
                tag2AggrPos.add(new ArrayList<Integer>());
                distTag2AggrParamAst.add(new ArrayList<ASTNode>());
                if (!tag2paramaststr.containsKey(disttag)) {
                    tag2paramaststr.put(disttag, new HashSet<String>());
                }
                for (int i = 1; i < value.getChildCount(); i++) {
                    ASTNode param = (ASTNode) value.getChild(i);
                    if (!tag2paramaststr.get(disttag).contains(param.toStringTree())) {
                        tag2paramaststr.get(disttag).add(param.toStringTree());
                        distTag2AggrParamAst.get(distTag2AggrParamAst.size() - 1).add(param);
                    }
                }
            } else {
                if (!tag2paramaststr.containsKey(0)) {
                    tag2paramaststr.put(0, new HashSet<String>());
                }
                treeidx.put(asttree, 0);
                for (int i = 1; i < value.getChildCount(); i++) {
                    ASTNode param = (ASTNode) value.getChild(i);
                    if (!tag2paramaststr.get(0).contains(param.toStringTree())) {
                        tag2paramaststr.get(0).add(param.toStringTree());
                        distTag2AggrParamAst.get(0).add(param);
                    }
                }
            }
        }
        if (value.getToken().getType() != HiveParser.TOK_FUNCTIONDI) {
            nonDistPos2TagOffs.put(pos, new ArrayList<Integer>());
            for (int i = 1; i < value.getChildCount(); i++) {
                String param = value.getChild(i).toStringTree();
                int idx = -1;
                for (int j = 0; j < distTag2AggrParamAst.get(0).size(); j++) {
                    if (distTag2AggrParamAst.get(0).get(j).toStringTree().equals(param)) {
                        idx = j;
                        break;
                    }
                }
                nonDistPos2TagOffs.get(pos).add(idx);
            }
        }

        tag2AggrPos.get(treeidx.get(asttree)).add(pos);
        pos++;
    }

    LOG.debug("distTag2AggrPos:\t" + tag2AggrPos);
    for (int i = 0; i < tag2AggrPos.size(); i++) {
        LOG.debug("distTag2AggrPos[" + i + "]:\t" + tag2AggrPos.get(i));
    }
    LOG.debug("distTag2AggrParamAst:\t" + distTag2AggrParamAst);
    for (int i = 0; i < distTag2AggrParamAst.size(); i++) {
        StringBuffer sb = new StringBuffer();
        for (int j = 0; j < distTag2AggrParamAst.get(i).size(); j++) {
            sb.append(distTag2AggrParamAst.get(i).get(j).toStringTree()).append("\t");
        }
        LOG.debug("distTag2AggrParamAst[" + i + "]:\t" + sb.toString());
    }
    LOG.debug("nonDistPos2TagOffs:\t" + nonDistPos2TagOffs);
    for (Integer key : nonDistPos2TagOffs.keySet()) {
        LOG.debug("nonDistPos2TagOffs[" + key + "]:\t" + nonDistPos2TagOffs.get(key));
    }
}

From source file:org.apache.hadoop.hbase.backup.impl.IncrementalBackupManager.java

/**
 * Get list of WAL files eligible for incremental backup
 * @return list of WAL files/*from www . j  a  va 2  s.  c om*/
 * @throws IOException
 */
public List<String> getIncrBackupLogFileList() throws IOException {
    List<String> logList;
    HashMap<String, Long> newTimestamps;
    HashMap<String, Long> previousTimestampMins;

    String savedStartCode = readBackupStartCode();

    // key: tableName
    // value: <RegionServer,PreviousTimeStamp>
    HashMap<TableName, HashMap<String, Long>> previousTimestampMap = readLogTimestampMap();

    previousTimestampMins = BackupUtils.getRSLogTimestampMins(previousTimestampMap);

    if (LOG.isDebugEnabled()) {
        LOG.debug("StartCode " + savedStartCode + "for backupID " + backupInfo.getBackupId());
    }
    // get all new log files from .logs and .oldlogs after last TS and before new timestamp
    if (savedStartCode == null || previousTimestampMins == null || previousTimestampMins.isEmpty()) {
        throw new IOException("Cannot read any previous back up timestamps from backup system table. "
                + "In order to create an incremental backup, at least one full backup is needed.");
    }

    newTimestamps = readRegionServerLastLogRollResult();

    logList = getLogFilesForNewBackup(previousTimestampMins, newTimestamps, conf, savedStartCode);
    List<WALItem> logFromSystemTable = getLogFilesFromBackupSystem(previousTimestampMins, newTimestamps,
            getBackupInfo().getBackupRootDir());

    logList = excludeAlreadyBackedUpWALs(logList, logFromSystemTable);
    backupInfo.setIncrBackupFileList(logList);

    return logList;
}

From source file:org.apache.hadoop.hbase.backup.impl.IncrementalBackupManager.java

/**
 * Obtain the list of logs that need to be copied out for this incremental backup. The list is set
 * in BackupInfo.//from  ww w .ja va 2  s  . c  o m
 * @return The new HashMap of RS log time stamps after the log roll for this incremental backup.
 * @throws IOException exception
 */
public HashMap<String, Long> getIncrBackupLogFileMap() throws IOException {
    List<String> logList;
    HashMap<String, Long> newTimestamps;
    HashMap<String, Long> previousTimestampMins;

    String savedStartCode = readBackupStartCode();

    // key: tableName
    // value: <RegionServer,PreviousTimeStamp>
    HashMap<TableName, HashMap<String, Long>> previousTimestampMap = readLogTimestampMap();

    previousTimestampMins = BackupUtils.getRSLogTimestampMins(previousTimestampMap);

    if (LOG.isDebugEnabled()) {
        LOG.debug("StartCode " + savedStartCode + "for backupID " + backupInfo.getBackupId());
    }
    // get all new log files from .logs and .oldlogs after last TS and before new timestamp
    if (savedStartCode == null || previousTimestampMins == null || previousTimestampMins.isEmpty()) {
        throw new IOException("Cannot read any previous back up timestamps from backup system table. "
                + "In order to create an incremental backup, at least one full backup is needed.");
    }

    LOG.info("Execute roll log procedure for incremental backup ...");
    HashMap<String, String> props = new HashMap<String, String>();
    props.put("backupRoot", backupInfo.getBackupRootDir());

    try (Admin admin = conn.getAdmin();) {

        admin.execProcedure(LogRollMasterProcedureManager.ROLLLOG_PROCEDURE_SIGNATURE,
                LogRollMasterProcedureManager.ROLLLOG_PROCEDURE_NAME, props);

    }
    newTimestamps = readRegionServerLastLogRollResult();

    logList = getLogFilesForNewBackup(previousTimestampMins, newTimestamps, conf, savedStartCode);
    List<WALItem> logFromSystemTable = getLogFilesFromBackupSystem(previousTimestampMins, newTimestamps,
            getBackupInfo().getBackupRootDir());
    logList = excludeAlreadyBackedUpWALs(logList, logFromSystemTable);
    backupInfo.setIncrBackupFileList(logList);

    return newTimestamps;
}

From source file:org.apache.phoenix.mapreduce.OrphanViewTool.java

/**
 * Starting from the child views of the base tables from baseSet, visit views level by level and identify
 * missing or broken links, and thereby identify orphan vies
 *//*from  ww  w.j  av  a  2 s  . c  om*/
private void visitViewsLevelByLevelAndIdentifyOrphanViews() {
    if (baseSet.isEmpty())
        return;
    HashMap<Key, View> viewSet = new HashMap<>();
    viewSetArray.add(0, viewSet);
    // Remove the child views of the tables of baseSet from orphanViewSet and add them to viewSetArray[0]
    // if these views have the correct physical link
    for (Map.Entry<Key, Base> baseEntry : baseSet.entrySet()) {
        for (Key child : baseEntry.getValue().childViews) {
            View childView = orphanViewSet.get(child);
            if (childView != null && childView.base != null && childView.base.equals(baseEntry.getKey())) {
                orphanViewSet.remove(child);
                viewSet.put(child, childView);
            }
        }
    }
    HashMap<Key, View> parentViewSet = viewSet;
    // Remove the child views of  viewSetArray[N] from orphanViewSet and add them to viewSetArray[N+1]
    // if these view have the correct physical link and parent link
    maxViewLevel = 1;
    for (int i = 1; !parentViewSet.isEmpty(); i++) {
        HashMap<Key, View> childViewSet = new HashMap<>();
        viewSetArray.add(i, childViewSet);
        for (Map.Entry<Key, View> viewEntry : parentViewSet.entrySet()) {
            View parentView = viewEntry.getValue();
            Key parentKey = viewEntry.getKey();
            if (parentView.isParent()) {
                for (Key child : parentView.childViews) {
                    View childView = orphanViewSet.get(child);
                    if (childView != null && childView.parent != null && childView.parent.equals(parentKey)
                            && childView.base != null && childView.base.equals(parentView.base)) {
                        orphanViewSet.remove(child);
                        childViewSet.put(child, childView);
                    }
                }
            }
        }
        parentViewSet = childViewSet;
        maxViewLevel += 1;
    }
}

From source file:org.dcm4chex.archive.hsm.VerifyTar.java

public static Map<String, byte[]> verify(InputStream in, String tarname, byte[] buf,
        ArrayList<String> objectNames) throws IOException, VerifyTarException {
    TarInputStream tar = new TarInputStream(in);
    try {/*from   w w  w  . ja v  a 2  s.c o m*/
        log.debug("Verify tar file: {}", tarname);
        TarEntry entry = tar.getNextEntry();
        if (entry == null)
            throw new VerifyTarException("No entries in " + tarname);
        String entryName = entry.getName();
        if (!"MD5SUM".equals(entryName))
            throw new VerifyTarException("Missing MD5SUM entry in " + tarname);
        BufferedReader dis = new BufferedReader(new InputStreamReader(tar));

        HashMap<String, byte[]> md5sums = new HashMap<String, byte[]>();
        String line;
        while ((line = dis.readLine()) != null) {
            char[] c = line.toCharArray();
            byte[] md5sum = new byte[16];
            for (int i = 0, j = 0; i < md5sum.length; i++, j++, j++) {
                md5sum[i] = (byte) ((fromHexDigit(c[j]) << 4) | fromHexDigit(c[j + 1]));
            }
            md5sums.put(line.substring(34), md5sum);
        }
        Map<String, byte[]> entries = new HashMap<String, byte[]>(md5sums.size());
        entries.putAll(md5sums);
        MessageDigest digest;
        try {
            digest = MessageDigest.getInstance("MD5");
        } catch (NoSuchAlgorithmException e) {
            throw new RuntimeException(e);
        }
        while ((entry = tar.getNextEntry()) != null) {
            entryName = entry.getName();
            log.debug("START: Check MD5 of entry: {}", entryName);
            if (objectNames != null && !objectNames.remove(entryName))
                throw new VerifyTarException(
                        "TAR " + tarname + " contains entry: " + entryName + " not in file list");
            byte[] md5sum = (byte[]) md5sums.remove(entryName);
            if (md5sum == null)
                throw new VerifyTarException("Unexpected TAR entry: " + entryName + " in " + tarname);
            digest.reset();
            in = new DigestInputStream(tar, digest);
            while (in.read(buf) > 0)
                ;
            if (!Arrays.equals(digest.digest(), md5sum)) {
                throw new VerifyTarException("Failed MD5 check of TAR entry: " + entryName + " in " + tarname);
            }
            log.debug("DONE: Check MD5 of entry: {}", entryName);
        }
        if (!md5sums.isEmpty())
            throw new VerifyTarException("Missing TAR entries: " + md5sums.keySet() + " in " + tarname);
        if (objectNames != null && !objectNames.isEmpty())
            throw new VerifyTarException(
                    "Missing TAR entries from object list: " + objectNames.toString() + " in " + tarname);
        return entries;
    } finally {
        tar.close();
    }
}