Example usage for java.util HashMap size

List of usage examples for java.util HashMap size

Introduction

In this page you can find the example usage for java.util HashMap size.

Prototype

int size

To view the source code for java.util HashMap size.

Click Source Link

Document

The number of key-value mappings contained in this map.

Usage

From source file:com.globalsight.machineTranslation.domt.DoMTProxy.java

/**
 * Send pure texts to DoMT engine for translation.
 *//*from w w  w  .  j a  v  a  2 s. c om*/
private String[] translatePureText(Locale sourceLocale, Locale targetLocale, String[] segments) {
    String[] results = new String[segments.length];
    try {
        // Ensure the sequence will be unchanged after translation.
        HashMap<Integer, String> id2Segs = new HashMap<Integer, String>();
        for (int i = 0; i < segments.length; i++) {
            String[] segmentsFromGxml = MTHelper.getSegmentsInGxml(segments[i]);
            if (segmentsFromGxml == null || segmentsFromGxml.length < 1) {
                results[i] = segments[i];
            } else {
                for (int count = 0; count < segmentsFromGxml.length; count++) {
                    id2Segs.put(composeKey(i, count), EditUtil.encodeXmlEntities(segmentsFromGxml[count]));
                }
            }
        }

        if (id2Segs.size() > 0) {
            String srcXlf = getDoMtXliff(id2Segs, sourceLocale, targetLocale);
            if (MTHelper.isLogDetailedInfo(ENGINE_DOMT)) {
                logger.info("Segments in XLF sending to DoMT:" + srcXlf);
            }

            String translatedXlf = hitDoMt(sourceLocale, targetLocale, srcXlf);
            if (MTHelper.isLogDetailedInfo(ENGINE_DOMT)) {
                logger.info("Segments in XLF returned from DoMT:" + translatedXlf);
            }

            // id :: translated targets
            HashMap<Integer, String> targets = extractDoMtReturning(translatedXlf);

            HashMap<Integer, HashMap<Integer, String>> targetGroups = getTargetGroups(targets);

            for (int mainIndex = 0; mainIndex < segments.length; mainIndex++) {
                HashMap<Integer, String> subSet = targetGroups.get(mainIndex);
                GxmlElement gxmlRoot = MTHelper.getGxmlElement(segments[mainIndex]);
                List items = MTHelper.getImmediateAndSubImmediateTextNodes(gxmlRoot);
                for (int subIndex = 0; subIndex < items.size(); subIndex++) {
                    TextNode textNode = (TextNode) items.get(subIndex);
                    if (subSet == null) {
                        textNode.setTextBuffer(new StringBuffer(""));
                    } else {
                        String seg = subSet.get(subIndex);
                        // if DoMT fails to translate this, it returns -1.
                        if (seg == null || "-1".equals(seg)) {
                            seg = "";
                        }
                        textNode.setTextBuffer(new StringBuffer(seg));
                    }
                }
                results[mainIndex] = gxmlRoot.toGxml();
            }
        }
    } catch (MachineTranslationException e) {
        logger.error(e.getMessage());
    }

    return results;
}

From source file:eu.dety.burp.joseph.attacks.key_confusion.KeyConfusionInfo.java

@Override
public HashMap<String, String> updateValuesByPayload(Enum payloadTypeId, String header, String payload,
        String signature) throws AttackPreparationFailedException {
    String publicKeyValue = publicKey.getText();
    int publicKeyFormat = publicKeySelection.getSelectedIndex();

    String modifiedKey;/*from   w w w .  jav  a  2  s . c  o m*/

    switch (publicKeyFormat) {
    // JWK (JSON)
    case 1:
        loggerInstance.log(getClass(), "Key format is JWK:  " + publicKeyValue, Logger.LogLevel.DEBUG);

        HashMap<String, PublicKey> publicKeys;
        PublicKey selectedPublicKey;

        try {
            Object publickKeyValueJson = new JSONParser().parse(publicKeyValue);

            publicKeys = Converter.getRsaPublicKeysByJwkWithId(publickKeyValueJson);
        } catch (Exception e) {
            loggerInstance.log(getClass(), "Error in updateValuesByPayload (JWK):  " + e.getMessage(),
                    Logger.LogLevel.ERROR);
            throw new AttackPreparationFailedException(bundle.getString("NOT_VALID_JWK"));
        }

        switch (publicKeys.size()) {
        // No suitable JWK in JWK Set found
        case 0:
            loggerInstance.log(getClass(), "Error in updateValuesByPayload (JWK): No suitable JWK",
                    Logger.LogLevel.ERROR);
            throw new AttackPreparationFailedException(bundle.getString("NO_SUITABLE_JWK"));

            // Exactly one suitable JWK found
        case 1:
            selectedPublicKey = publicKeys.entrySet().iterator().next().getValue();
            break;

        // More than one suitable JWK found. Provide dialog to select one.
        default:
            selectedPublicKey = Converter.getRsaPublicKeyByJwkSelectionPanel(publicKeys);
        }

        try {
            modifiedKey = transformKeyByPayload(payloadTypeId, selectedPublicKey);
        } catch (Exception e) {
            loggerInstance.log(getClass(), "Error in updateValuesByPayload (JWK):  " + e.getMessage(),
                    Logger.LogLevel.ERROR);
            throw new AttackPreparationFailedException(bundle.getString("ATTACK_PREPARATION_FAILED"));
        }

        break;
    // PEM (String)
    default:
        loggerInstance.log(getClass(), "Key format is PEM:  " + publicKeyValue, Logger.LogLevel.DEBUG);

        // Simple check if String has valid format
        if (!publicKeyValue.trim().startsWith("-----BEGIN") && !publicKeyValue.trim().startsWith("MI")) {
            throw new AttackPreparationFailedException(bundle.getString("NOT_VALID_PEM"));
        }

        try {
            modifiedKey = transformKeyByPayload(payloadTypeId, publicKeyValue);

        } catch (Exception e) {
            loggerInstance.log(getClass(), "Error in updateValuesByPayload (PEM):  " + e.getMessage(),
                    Logger.LogLevel.ERROR);
            throw new AttackPreparationFailedException(bundle.getString("NOT_VALID_PEM"));
        }

    }

    Pattern jwsPattern = Pattern.compile("\"alg\":\"(.+?)\"", Pattern.CASE_INSENSITIVE);
    Matcher jwsMatcher = jwsPattern.matcher(header);

    String algorithm = (jwsMatcher.find()) ? jwsMatcher.group(1) : "HS256";

    String macAlg = Crypto.getMacAlgorithmByJoseAlgorithm(algorithm, "HmacSHA256");

    if (!Crypto.JWS_HMAC_ALGS.contains(algorithm))
        algorithm = "HS256";

    header = header.replaceFirst("\"alg\":\"(.+?)\"", "\"alg\":\"" + algorithm + "\"");

    HashMap<String, String> result = new HashMap<>();
    result.put("header", header);
    result.put("payload", payload);
    result.put(
            "signature", Decoder
                    .getEncoded(
                            Crypto.generateMac(macAlg, helpers.stringToBytes(modifiedKey),
                                    helpers.stringToBytes(Decoder.concatComponents(new String[] {
                                            Decoder.base64UrlEncode(helpers.stringToBytes(header)),
                                            Decoder.base64UrlEncode(helpers.stringToBytes(payload)) })))));

    if (publicKeyValue.isEmpty()) {
        return result;
    }

    return result;
}

From source file:edu.csupomona.nlp.tool.crawler.Facebook.java

/**
 * Get all the Comments for the Post. /*from  w w  w. jav a2s. c  o m*/
 * The replies to the comments are not included, because Facebook does not
 * provide such API query.
 * @param post              Post to be parsed
 * @return                  HashMap of Comments
 */
public HashMap<String, Comment> getComments(Post post) {
    HashMap<String, Comment> fullComments = new HashMap<>();

    // get first few comments using getComments from post
    PagableList<Comment> comments = post.getComments();
    Paging<Comment> paging;
    do {
        // NOTE: so far didn't figure out how to get replies 
        // for the comments
        for (Comment comment : comments)
            fullComments.put(comment.getId(), comment);

        // get next page
        // NOTE: somehow few comments will not be included.
        // however, this won't affect much on our research
        paging = comments.getPaging();

        // to reduce speed
        pause(1);

        // trace
        System.out.println("Getting comments... " + fullComments.size());

        // get next page
        if (paging != null)
            for (int n = 1; n <= maxRetries_; ++n) {
                try {
                    comments = fb_.fetchNext(paging);
                } catch (FacebookException ex) { // exception & retry
                    Logger.getLogger(Facebook.class.getName()).log(Level.SEVERE, null, ex);
                    pause(STEP_SEC_ * n);
                    System.out.println("Starting retry... " + n + "/" + maxRetries_);
                    continue;
                }
                break;
            }
    } while ((paging != null) && (comments != null));

    return fullComments;
}

From source file:au.org.paperminer.main.LocationFilter.java

/**
 * Returns JSON struct for a given set of TROVE ids:
 *  refs: [{troveId:[locnId,freq]*]*/* w ww .j  a  v a  2 s  .co  m*/
 * or blank if none.
 * @param req
 * @param resp
 */
private void getReferences(HttpServletRequest req, HttpServletResponse resp) {
    HashMap<String, ArrayList<ArrayList<String>>> map = new HashMap<String, ArrayList<ArrayList<String>>>();
    try {
        String arg = req.getParameter("lst");
        if ((arg != null) && (arg.length() > 0)) {
            String[] refs = arg.split(",");
            m_logger.debug("locationFilter getReferences: " + arg + " length:" + refs.length);
            for (int i = 0; i < refs.length; i++) {
                ArrayList<ArrayList<String>> tmp = m_helper.getLocationsForRef(refs[i]);
                if (tmp != null) {
                    map.put(refs[i], tmp);
                    m_logger.debug("locationFilter ref: " + refs[i] + " is " + tmp);
                }
            }
        }
        resp.setContentType("text/json");
        PrintWriter pm = resp.getWriter();
        String jsonStr = "";
        if (map.size() > 0) {
            jsonStr = "{\"refs\":" + JSONValue.toJSONString(map) + "}";
        }
        pm.write(jsonStr);
        pm.close();
        m_logger.debug("locationFilter getReferences JSON: " + jsonStr);

    } catch (PaperMinerException ex) {
        req.setAttribute(PaperMinerConstants.ERROR_PAGE, "e301");
    } catch (IOException ex) {
        req.setAttribute(PaperMinerConstants.ERROR_PAGE, "e114");
    }
}

From source file:net.pms.PMS.java

private void displayBanner() throws IOException {
    logger.info("Starting " + PropertiesUtil.getProjectProperties().get("project.name") + " " + getVersion());
    logger.info("by shagrath / 2008-2013");
    logger.info("http://ps3mediaserver.org");
    logger.info("https://github.com/ps3mediaserver/ps3mediaserver");
    logger.info("");

    String commitId = PropertiesUtil.getProjectProperties().get("git.commit.id");
    String commitTime = PropertiesUtil.getProjectProperties().get("git.commit.time");
    String shortCommitId = commitId.substring(0, 9);

    logger.info("Build: " + shortCommitId + " (" + commitTime + ")");

    // Log system properties
    logSystemInfo();/* w w w.jav a 2 s.  com*/

    String cwd = new File("").getAbsolutePath();
    logger.info("Working directory: " + cwd);

    logger.info("Temp directory: " + configuration.getTempFolder());

    // Verify the java.io.tmpdir is writable; JNA requires it.
    // Note: the configured tempFolder has already been checked, but it
    // may differ from the java.io.tmpdir so double check to be sure.
    File javaTmpdir = new File(System.getProperty("java.io.tmpdir"));

    if (!FileUtil.isDirectoryWritable(javaTmpdir)) {
        logger.error("The Java temp directory \"{}\" is not writable for PMS!", javaTmpdir.getAbsolutePath());
        logger.error("Please make sure the directory is writable for user \"{}\"",
                System.getProperty("user.name"));
        throw new IOException("Cannot write to Java temp directory");
    }

    logger.info("Logging config file: {}", LoggingConfigFileLoader.getConfigFilePath());

    HashMap<String, String> lfps = LoggingConfigFileLoader.getLogFilePaths();

    // debug.log filename(s) and path(s)
    if (lfps != null && lfps.size() > 0) {
        if (lfps.size() == 1) {
            Entry<String, String> entry = lfps.entrySet().iterator().next();
            logger.info(String.format("%s: %s", entry.getKey(), entry.getValue()));
        } else {
            logger.info("Logging to multiple files:");
            Iterator<Entry<String, String>> logsIterator = lfps.entrySet().iterator();
            Entry<String, String> entry;
            while (logsIterator.hasNext()) {
                entry = logsIterator.next();
                logger.info(String.format("%s: %s", entry.getKey(), entry.getValue()));
            }
        }
    }

    String profilePath = configuration.getProfilePath();
    String profileDirectoryPath = configuration.getProfileDirectory();
    logger.info("");
    logger.info("Profile directory: {}", profileDirectoryPath);
    logger.info("Profile directory permissions: {}", getPathPermissions(profileDirectoryPath));
    logger.info("Profile path: {}", profilePath);
    logger.info("Profile permissions: {}", getPathPermissions(profilePath));
    logger.info("Profile name: {}", configuration.getProfileName());

    String webConfPath = configuration.getWebConfPath();
    logger.info("");
    logger.info("Web conf path: {}", webConfPath);
    logger.info("Web conf permissions: {}", getPathPermissions(webConfPath));

    logger.info("");
}

From source file:com.google.gwt.emultest.java.util.HashMapTest.java

public void testEntrySetEntrySetterString() {
    HashMap<String, String> hashMap = new HashMap<String, String>();
    hashMap.put("A", "B");
    Set<Map.Entry<String, String>> entrySet = hashMap.entrySet();
    Map.Entry<String, String> entry = entrySet.iterator().next();

    entry.setValue("C");
    assertEquals("C", hashMap.get("A"));

    hashMap.put("A", "D");
    assertEquals("D", entry.getValue());

    assertEquals(1, hashMap.size());
}

From source file:terse.vm.Terp.java

public Dict handleUrl(String url, HashMap<String, String> query) {
    say("runUrl: %s", url);
    query = (query == null) ? new HashMap<String, String>() : query;
    Ur[] queryArr = new Ur[query.size()];
    int i = 0;/*  w  w  w. j  ava  2 s.  c o  m*/
    for (String k : query.keySet()) {
        String v = query.get(k);
        if (k == null)
            k = "HOW_DID_WE_GET_A_NULL_KEY";
        if (v == null)
            v = "HOW_DID_WE_GET_A_NULL_VALUE";
        Ur queryKey = newStr(k);
        Ur queryValue = newStr(v.replaceAll("\r\n", "\n"));
        queryArr[i] = new Vec(this, urs(queryKey, queryValue));
        ++i;
    }
    Dict qDict = newDict(queryArr);
    assert url.startsWith("/");
    if (url.equals("/")) {
        url = "/Top";
    }

    // To get app name, skip the initial '/', and split on dots.
    String[] word = url.substring(1).split("[.]");
    assert word.length > 0;
    String appName = word[0];

    Dict result = null;
    try {
        Cls cls = getTerp().clss.get(appName.toLowerCase());
        if (cls == null) {
            toss("Rendering class does not exist: <%s>", appName);
        }

        String urlRepr = newStr(url).repr();
        String qDictRepr = qDict.repr(); // Inefficient. TODO.
        Ur result_ur = instNil;
        int id = 0;

        Obj inst = null;
        try {
            id = Integer.parseInt(word[1]);
            if (cls instanceof Usr.UsrCls) {
                inst = ((Usr.UsrCls) cls).cache.find(id);
            }
        } catch (Exception _) {
            // pass.
        }
        long before = tickCounter;
        long nanosBefore = System.nanoTime();
        if (inst != null) {
            result_ur = inst.eval(fmt("self handle: (%s) query: (%s)", urlRepr, qDictRepr));
        } else if (Send.understands(cls, "handle:query:")) {
            say("CLS <%s> understands handle:query: so sending to class.", cls);
            // First try sending to the class.
            result_ur = cls.eval(fmt("self handle: (%s) query: (%s)", urlRepr, qDictRepr));
        } else {
            Ur instance = cls.eval("self new");
            Usr usrInst = instance.asUsr();
            // TODO: LRU & mention() conflict with Cls.insts map.
            id = usrInst == null ? 0 : usrInst.omention(); // LRU Cache

            // Next try creating new instance, and send to it.
            result_ur = instance.asObj()
                    .eval(fmt("self handle: (%s) query: (%s)", newStr(url).repr(), qDict.repr()));
        }
        result = result_ur.asDict();
        if (result == null) {
            toss("Sending <handle:query:> to instance of <%s> did not return a Dict: <%s>", appName, result_ur);
        }
        result.dict.put(newStr("id"), newStr(Integer.toString(id)));
        long after = tickCounter;
        long nanosAfter = System.nanoTime();
        result.dict.put(newStr("ticks"), newNum(before - after));
        result.dict.put(newStr("nanos"), newNum(nanosAfter - nanosBefore));
        say("<handle:query:> used %d ticks and %.3f secs.", before - after,
                (double) (nanosAfter - nanosBefore) / 1000000000.0);

    } catch (Exception ex) {
        ex.printStackTrace();
        StringBuffer sb = new StringBuffer(ex.toString());
        StackTraceElement[] elems = ex.getStackTrace();
        for (StackTraceElement e : elems) {
            sb.append("\n  * ");
            sb.append(e.toString());
        }
        Ur[] dict_arr = urs(new Vec(this, urs(newStr("type"), newStr("text"))),
                new Vec(this, urs(newStr("title"), newStr(ex.toString()))),
                new Vec(this, urs(newStr("value"), newStr(sb.toString()))));
        result = newDict(dict_arr);
    } catch (TooManyTicks err) {
        err.printStackTrace();
        String s = fmt("TOO_MANY_TICKS_IN_handleUrl <%s> qdict <%s>", url, qDict);
        Ur[] dict_arr = urs(new Vec(this, urs(newStr("type"), newStr("text"))),
                new Vec(this, urs(newStr("title"), newStr(err.toString()))),
                new Vec(this, urs(newStr("value"), newStr(s))));
        result = newDict(dict_arr);
    } catch (Error err) {
        err.printStackTrace();
        Ur[] dict_arr = urs(new Vec(this, urs(newStr("type"), newStr("text"))),
                new Vec(this, urs(newStr("title"), newStr(err.toString()))),
                new Vec(this, urs(newStr("value"), newStr(err.toString()))));
        result = newDict(dict_arr);
    }
    return result;
}

From source file:fr.cirad.mgdb.exporting.individualoriented.PLinkExportHandler.java

@Override
public void exportData(OutputStream outputStream, String sModule, Collection<File> individualExportFiles,
        boolean fDeleteSampleExportFilesOnExit, ProgressIndicator progress, DBCursor markerCursor,
        Map<Comparable, Comparable> markerSynonyms, Map<String, InputStream> readyToExportFiles)
        throws Exception {
    File warningFile = File.createTempFile("export_warnings_", "");
    FileWriter warningFileWriter = new FileWriter(warningFile);

    ZipOutputStream zos = new ZipOutputStream(outputStream);

    if (readyToExportFiles != null)
        for (String readyToExportFile : readyToExportFiles.keySet()) {
            zos.putNextEntry(new ZipEntry(readyToExportFile));
            InputStream inputStream = readyToExportFiles.get(readyToExportFile);
            byte[] dataBlock = new byte[1024];
            int count = inputStream.read(dataBlock, 0, 1024);
            while (count != -1) {
                zos.write(dataBlock, 0, count);
                count = inputStream.read(dataBlock, 0, 1024);
            }/*from  w  ww  . java2s  . co m*/
        }

    MongoTemplate mongoTemplate = MongoTemplateManager.get(sModule);
    int markerCount = markerCursor.count();

    String exportName = sModule + "_" + markerCount + "variants_" + individualExportFiles.size()
            + "individuals";
    zos.putNextEntry(new ZipEntry(exportName + ".ped"));

    TreeMap<Integer, Comparable> problematicMarkerIndexToNameMap = new TreeMap<Integer, Comparable>();
    short nProgress = 0, nPreviousProgress = 0;
    int i = 0;
    for (File f : individualExportFiles) {
        BufferedReader in = new BufferedReader(new FileReader(f));
        try {
            String individualId, line = in.readLine(); // read sample id
            if (line != null) {
                individualId = line;
                String population = getIndividualPopulation(sModule, line);
                String individualInfo = (population == null ? "." : population) + " " + individualId;
                zos.write((individualInfo + " 0 0 0 " + getIndividualGenderCode(sModule, individualId))
                        .getBytes());
            } else
                throw new Exception("Unable to read first line of temp export file " + f.getName());

            int nMarkerIndex = 0;
            while ((line = in.readLine()) != null) {
                List<String> genotypes = MgdbDao.split(line, "|");
                HashMap<Object, Integer> genotypeCounts = new HashMap<Object, Integer>(); // will help us to keep track of missing genotypes
                int highestGenotypeCount = 0;
                String mostFrequentGenotype = null;
                for (String genotype : genotypes) {
                    if (genotype.length() == 0)
                        continue; /* skip missing genotypes */

                    int gtCount = 1 + MgdbDao.getCountForKey(genotypeCounts, genotype);
                    if (gtCount > highestGenotypeCount) {
                        highestGenotypeCount = gtCount;
                        mostFrequentGenotype = genotype;
                    }
                    genotypeCounts.put(genotype, gtCount);
                }

                if (genotypeCounts.size() > 1) {
                    warningFileWriter.write("- Dissimilar genotypes found for variant " + nMarkerIndex
                            + ", individual " + individualId + ". Exporting most frequent: "
                            + mostFrequentGenotype + "\n");
                    problematicMarkerIndexToNameMap.put(nMarkerIndex, "");
                }

                String[] alleles = mostFrequentGenotype == null ? new String[0]
                        : mostFrequentGenotype.split(" ");
                if (alleles.length > 2) {
                    warningFileWriter.write("- More than 2 alleles found for variant " + nMarkerIndex
                            + ", individual " + individualId + ". Exporting only the first 2 alleles.\n");
                    problematicMarkerIndexToNameMap.put(nMarkerIndex, "");
                }

                String all1 = alleles.length == 0 ? "0" : alleles[0];
                String all2 = alleles.length == 0 ? "0" : alleles[alleles.length == 1 ? 0 : 1];
                if (all1.length() != 1 || all2.length() != 1) {
                    warningFileWriter
                            .write("- SNP expected, but alleles are not coded on a single char for variant "
                                    + nMarkerIndex + ", individual " + individualId
                                    + ". Ignoring this genotype.\n");
                    problematicMarkerIndexToNameMap.put(nMarkerIndex, "");
                } else
                    zos.write((" " + all1 + " " + all2).getBytes());

                nMarkerIndex++;
            }
        } catch (Exception e) {
            LOG.error("Error exporting data", e);
            progress.setError("Error exporting data: " + e.getClass().getSimpleName()
                    + (e.getMessage() != null ? " - " + e.getMessage() : ""));
            return;
        } finally {
            in.close();
        }

        if (progress.hasAborted())
            return;

        nProgress = (short) (++i * 100 / individualExportFiles.size());
        if (nProgress > nPreviousProgress) {
            progress.setCurrentStepProgress(nProgress);
            nPreviousProgress = nProgress;
        }
        zos.write('\n');

        if (!f.delete()) {
            f.deleteOnExit();
            LOG.info("Unable to delete tmp export file " + f.getAbsolutePath());
        }
    }
    warningFileWriter.close();

    zos.putNextEntry(new ZipEntry(exportName + ".map"));

    int avgObjSize = (Integer) mongoTemplate
            .getCollection(mongoTemplate.getCollectionName(VariantRunData.class)).getStats().get("avgObjSize");
    int nChunkSize = nMaxChunkSizeInMb * 1024 * 1024 / avgObjSize;

    markerCursor.batchSize(nChunkSize);
    int nMarkerIndex = 0;
    while (markerCursor.hasNext()) {
        DBObject exportVariant = markerCursor.next();
        DBObject refPos = (DBObject) exportVariant.get(VariantData.FIELDNAME_REFERENCE_POSITION);
        Comparable markerId = (Comparable) exportVariant.get("_id");
        String chrom = (String) refPos.get(ReferencePosition.FIELDNAME_SEQUENCE);
        Long pos = ((Number) refPos.get(ReferencePosition.FIELDNAME_START_SITE)).longValue();

        if (chrom == null)
            LOG.warn("Chromosomal position not found for marker " + markerId);
        Comparable exportedId = markerSynonyms == null ? markerId : markerSynonyms.get(markerId);
        zos.write(((chrom == null ? "0" : chrom) + " " + exportedId + " " + 0 + " " + (pos == null ? 0 : pos)
                + LINE_SEPARATOR).getBytes());

        if (problematicMarkerIndexToNameMap.containsKey(nMarkerIndex)) { // we are going to need this marker's name for the warning file
            Comparable variantName = markerId;
            if (markerSynonyms != null) {
                Comparable syn = markerSynonyms.get(markerId);
                if (syn != null)
                    variantName = syn;
            }
            problematicMarkerIndexToNameMap.put(nMarkerIndex, variantName);
        }
        nMarkerIndex++;
    }

    if (warningFile.length() > 0) {
        zos.putNextEntry(new ZipEntry(exportName + "-REMARKS.txt"));
        int nWarningCount = 0;
        BufferedReader in = new BufferedReader(new FileReader(warningFile));
        String sLine;
        while ((sLine = in.readLine()) != null) {
            for (Integer aMarkerIndex : problematicMarkerIndexToNameMap.keySet())
                sLine = sLine.replaceAll("__" + aMarkerIndex + "__",
                        problematicMarkerIndexToNameMap.get(aMarkerIndex).toString());
            zos.write((sLine + "\n").getBytes());
            in.readLine();
            nWarningCount++;
        }
        LOG.info("Number of Warnings for export (" + exportName + "): " + nWarningCount);
        in.close();
    }
    warningFile.delete();

    zos.close();
    progress.setCurrentStepProgress((short) 100);
}

From source file:org.openmeetings.app.ldap.LdapLoginManagement.java

/**
 * Ldap Login// w w  w.j  ava 2  s. c  o m
 * 
 * Connection Data is retrieved from ConfigurationFile
 * 
 */
// ----------------------------------------------------------------------------------------
public Object doLdapLogin(String user, String passwd, RoomClient currentClient, String SID, String domain) {
    log.debug("LdapLoginmanagement.doLdapLogin");

    // Retrieve Configuration Data
    HashMap<String, String> configData;

    try {
        configData = getLdapConfigData(domain);
    } catch (Exception e) {
        log.error("Error on LdapAuth : " + e.getMessage());
        return null;
    }

    if (configData == null || configData.size() < 1) {
        log.error("Error on LdapLogin : Configurationdata couldnt be retrieved!");
        return null;
    }

    // Connection URL
    String ldap_url = configData.get(CONFIGKEY_LDAP_URL);

    // for OpenLDAP only
    // LDAP SERVER TYPE to search accordingly
    String ldap_server_type = configData.get(CONFIGKEY_LDAP_SERVER_TYPE);

    // Username for LDAP SERVER himself
    String ldap_admin_dn = configData.get(CONFIGKEY_LDAP_ADMIN_DN);

    // Password for LDAP SERVER himself
    String ldap_passwd = configData.get(CONFIGKEY_LDAP_ADMIN_PASSWD);

    // SearchScope for retrievment of userdata
    String ldap_search_scope = configData.get(CONFIGKEY_LDAP_SEARCH_SCOPE);

    // FieldName for Users's Principal Name
    String ldap_fieldname_user_principal = configData.get(CONFIGKEY_LDAP_FIELDNAME_USER_PRINCIPAL);

    // Wether or not we'll store Ldap passwd into OM db
    String ldap_sync_passwd_to_om = configData.get(CONFIGKEY_LDAP_SYNC_PASSWD_OM);

    /***
     * for future use (lemeur) // Ldap user filter to refine the search
     * String ldap_user_extrafilter =
     * configData.get(CONFIGKEY_LDAP_USER_EXTRAFILTER);
     * 
     * // Count of Ldap group filters String ldap_group_filter_num =
     * configData.get(CONFIGKEY_LDAP_GROUP_FILTER_NUM);
     * 
     * // Prefix name of Ldap group filter name String
     * ldap_group_filter_name_prefix =
     * configData.get(CONFIGKEY_LDAP_GROUP_FILTER_NAME_PREFIX);
     * 
     * // Prefix name of Ldap group filter base String
     * ldap_group_filter_base_prefix =
     * configData.get(CONFIGKEY_LDAP_GROUP_FILTER_NAME_PREFIX);
     * 
     * // Prefix name of Ldap group filter type String
     * ldap_group_filter_type_prefix =
     * configData.get(CONFIGKEY_LDAP_GROUP_FILTER_TYPE_PREFIX);
     * 
     * // Prefix name of Ldap group filter text String
     * ldap_group_filter_text_prefix =
     * configData.get(CONFIGKEY_LDAP_GROUP_FILTER_TEXT_PREFIX);
     ***/

    // Get custom Ldap attributes mapping
    String ldap_user_attr_lastname = configData.get(CONFIGKEY_LDAP_KEY_LASTNAME);
    String ldap_user_attr_firstname = configData.get(CONFIGKEY_LDAP_KEY_FIRSTNAME);
    String ldap_user_attr_mail = configData.get(CONFIGKEY_LDAP_KEY_MAIL);
    String ldap_user_attr_street = configData.get(CONFIGKEY_LDAP_KEY_STREET);
    String ldap_user_attr_additional_name = configData.get(CONFIGKEY_LDAP_KEY_ADDITIONAL_NAME);
    String ldap_user_attr_fax = configData.get(CONFIGKEY_LDAP_KEY_FAX);
    String ldap_user_attr_zip = configData.get(CONFIGKEY_LDAP_KEY_ZIP);
    String ldap_user_attr_country = configData.get(CONFIGKEY_LDAP_KEY_COUNTRY);
    String ldap_user_attr_town = configData.get(CONFIGKEY_LDAP_KEY_TOWN);
    String ldap_user_attr_phone = configData.get(CONFIGKEY_LDAP_KEY_PHONE);
    String ldap_user_attr_timezone = configData.get(CONFIGKEY_LDAP_TIMEZONE_NAME);
    String ldap_use_lower_case = configData.get(CONFIGKEY_LDAP_USE_LOWER_CASE);

    if (ldap_use_lower_case != null && ldap_use_lower_case.equals("true")) {
        user = user.toLowerCase();
    }

    if (ldap_user_attr_lastname == null) {
        ldap_user_attr_lastname = LDAP_KEY_LASTNAME;
    }
    if (ldap_user_attr_firstname == null) {
        ldap_user_attr_firstname = LDAP_KEY_FIRSTNAME;
    }
    if (ldap_user_attr_mail == null) {
        ldap_user_attr_mail = LDAP_KEY_MAIL;
    }
    if (ldap_user_attr_street == null) {
        ldap_user_attr_street = LDAP_KEY_STREET;
    }
    if (ldap_user_attr_additional_name == null) {
        ldap_user_attr_additional_name = LDAP_KEY_ADDITIONAL_NAME;
    }
    if (ldap_user_attr_fax == null) {
        ldap_user_attr_fax = LDAP_KEY_FAX;
    }
    if (ldap_user_attr_zip == null) {
        ldap_user_attr_zip = LDAP_KEY_ZIP;
    }
    if (ldap_user_attr_country == null) {
        ldap_user_attr_country = LDAP_KEY_COUNTRY;
    }
    if (ldap_user_attr_town == null) {
        ldap_user_attr_town = LDAP_KEY_TOWN;
    }
    if (ldap_user_attr_phone == null) {
        ldap_user_attr_phone = LDAP_KEY_PHONE;
    }
    if (ldap_user_attr_timezone == null) {
        ldap_user_attr_timezone = LDAP_KEY_TIMEZONE;
    }

    // Auth Type
    String ldap_auth_type = configData.get(CONFIGKEY_LDAP_AUTH_TYPE);

    if (ldap_auth_type == null)
        ldap_auth_type = "";

    if (!isValidAuthType(ldap_auth_type)) {
        log.error("ConfigKey in Ldap Config contains invalid auth type : '" + ldap_auth_type
                + "' -> Defaulting to " + LdapAuthBase.LDAP_AUTH_TYPE_SIMPLE);
        ldap_auth_type = LdapAuthBase.LDAP_AUTH_TYPE_SIMPLE;
    }

    // Filter for Search of UserData
    String ldap_search_filter = "(" + ldap_fieldname_user_principal + "=" + user + ")";

    log.debug("Searching userdata with LDAP Search Filter :" + ldap_search_filter);

    // replace : -> in config = are replaced by : to be able to build valid
    // key=value pairs
    ldap_search_scope = ldap_search_scope.replaceAll(":", "=");
    ldap_admin_dn = ldap_admin_dn.replaceAll(":", "=");

    LdapAuthBase lAuth = new LdapAuthBase(ldap_url, ldap_admin_dn, ldap_passwd, ldap_auth_type);

    log.debug("authenticating admin...");
    lAuth.authenticateUser(ldap_admin_dn, ldap_passwd);

    log.debug("Checking server type...");
    // for OpenLDAP only
    if (ldap_server_type.equalsIgnoreCase("OpenLDAP")) {
        String ldapUserDN = user;
        log.debug("LDAP server is OpenLDAP");
        log.debug("LDAP search base: " + ldap_search_scope);
        HashMap<String, String> uidCnDictionary = lAuth.getUidCnHashMap(ldap_search_scope, ldap_search_filter,
                ldap_fieldname_user_principal);
        if (uidCnDictionary.get(user) != null) {
            ldapUserDN = uidCnDictionary.get(user) + "," + ldap_search_scope;
            log.debug("Authentication with DN: " + ldapUserDN);
        }
        try {
            if (!lAuth.authenticateUser(ldapUserDN, passwd)) {
                log.error(ldapUserDN + " not authenticated.");
                return new Long(-11);
            }
        } catch (Exception e) {
            log.error("Error on LdapAuth : " + e.getMessage());
            return null;
        }
    } else {
        try {
            if (!lAuth.authenticateUser(user, passwd))
                return new Long(-11);
        } catch (Exception e) {
            log.error("Error on LdapAuth : " + e.getMessage());
            return null;
        }
    }

    // check if user already exists

    Users u = null;

    try {
        u = userManagement.getUserByLogin(user);

    } catch (Exception e) {
        log.error("Error retrieving Userdata : " + e.getMessage());
    }

    // User not existant in local database -> take over data for referential
    // integrity
    if (u == null) {
        log.debug("user doesnt exist local -> create new");

        // Attributes to retrieve from ldap
        List<String> attributes = new ArrayList<String>();
        attributes.add(ldap_user_attr_lastname); // Lastname
        attributes.add(ldap_user_attr_firstname); // Firstname
        attributes.add(ldap_user_attr_mail);// mail
        attributes.add(ldap_user_attr_street); // Street
        attributes.add(ldap_user_attr_additional_name); // Additional name
        attributes.add(ldap_user_attr_fax); // Fax
        attributes.add(ldap_user_attr_zip); // ZIP
        attributes.add(ldap_user_attr_country); // Country
        attributes.add(ldap_user_attr_town); // Town
        attributes.add(ldap_user_attr_phone); // Phone
        attributes.add(ldap_user_attr_timezone); // Phone

        HashMap<String, String> ldapAttrs = new HashMap<String, String>();
        ldapAttrs.put("lastnameAttr", ldap_user_attr_lastname);
        ldapAttrs.put("firstnameAttr", ldap_user_attr_firstname);
        ldapAttrs.put("mailAttr", ldap_user_attr_mail);
        ldapAttrs.put("streetAttr", ldap_user_attr_street);
        ldapAttrs.put("additionalNameAttr", ldap_user_attr_additional_name);
        ldapAttrs.put("faxAttr", ldap_user_attr_fax);
        ldapAttrs.put("zipAttr", ldap_user_attr_zip);
        ldapAttrs.put("countryAttr", ldap_user_attr_country);
        ldapAttrs.put("townAttr", ldap_user_attr_town);
        ldapAttrs.put("phoneAttr", ldap_user_attr_phone);
        ldapAttrs.put("phoneAttr", ldap_user_attr_phone);
        ldapAttrs.put("timezoneAttr", ldap_user_attr_timezone);

        Vector<HashMap<String, String>> result = lAuth.getData(ldap_search_scope, ldap_search_filter,
                attributes);

        if (result == null || result.size() < 1) {
            log.error("Error on Ldap request - no result for user " + user);
            return new Long(-10);
        }

        if (result.size() > 1) {
            log.error("Error on Ldap request - more than one result for user " + user);
            return null;
        }

        HashMap<String, String> userData = result.get(0);

        try {
            // Create User with LdapData
            Long userid;
            if (ldap_sync_passwd_to_om != null && ldap_sync_passwd_to_om.equals("no")) {
                Random r = new Random();
                String token = Long.toString(Math.abs(r.nextLong()), 36);
                log.debug("Synching Ldap user to OM DB with RANDOM password: " + token);
                userid = createUserFromLdapData(userData, token, user, ldapAttrs);
            } else {
                log.debug("Synching Ldap user to OM DB with password");
                userid = createUserFromLdapData(userData, passwd, user, ldapAttrs);
            }
            log.debug("New User ID : " + userid);

            // If invoked via SOAP this is NULL
            if (currentClient != null) {
                currentClient.setUser_id(userid);
            }

            // Update Session
            Boolean bool = sessionManagement.updateUser(SID, userid);

            if (bool == null) {
                // Exception
                log.error("Error on Updating Session");
                return new Long(-1);
            } else if (!bool) {
                // invalid Session-Object
                log.error("Invalid Session Object");
                return new Long(-35);
            }

            // Return UserObject
            Users u2 = userManagement.getUserById(userid);

            if (u2 == null)
                return new Long(-1);

            u2.setExternalUserType(EXTERNAL_USER_TYPE_LDAP); // TIBO

            // initialize lazy collection
            userManagement.refreshUserObject(u2);

            log.debug("getUserbyId : " + userid + " : " + u2.getLogin());

            return u2;

        } catch (Exception e) {
            log.error("Error on Working Userdata : ", e);
            return new Long(-1);
        }
    } else {
        // User exists, just update necessary values
        log.debug("User already exists -> Update of current passwd");

        // If invoked via SOAP this is NULL
        if (currentClient != null) {
            currentClient.setUser_id(u.getUser_id());
        }

        // Update Session
        Boolean bool = sessionManagement.updateUser(SID, u.getUser_id());

        if (bool == null) {
            // Exception
            log.error("Error on Updating Session");
            return new Long(-1);
        } else if (!bool) {
            // invalid Session-Object
            log.error("Invalid Session Object");
            return new Long(-35);
        }

        // Update password (could have changed in LDAP)
        if (ldap_sync_passwd_to_om == null || !ldap_sync_passwd_to_om.equals("no")) {
            u.setPassword(passwd);
        }
        try {
            userManagement.updateUserObject(u, true);
        } catch (Exception e) {
            log.error("Error updating user : " + e.getMessage());
            return new Long(-1);
        }

        return u;

    }
}

From source file:ca.uhn.fhir.jpa.term.TerminologyLoaderSvc.java

UploadStatistics processSnomedCtFiles(List<byte[]> theZipBytes, RequestDetails theRequestDetails) {
    final TermCodeSystemVersion codeSystemVersion = new TermCodeSystemVersion();
    final Map<String, TermConcept> id2concept = new HashMap<String, TermConcept>();
    final Map<String, TermConcept> code2concept = new HashMap<String, TermConcept>();
    final Set<String> validConceptIds = new HashSet<String>();

    IRecordHandler handler = new SctHandlerConcept(validConceptIds);
    iterateOverZipFile(theZipBytes, SCT_FILE_CONCEPT, handler, '\t', null);

    ourLog.info("Have {} valid concept IDs", validConceptIds.size());

    handler = new SctHandlerDescription(validConceptIds, code2concept, id2concept, codeSystemVersion);
    iterateOverZipFile(theZipBytes, SCT_FILE_DESCRIPTION, handler, '\t', null);

    ourLog.info("Got {} concepts, cloning map", code2concept.size());
    final HashMap<String, TermConcept> rootConcepts = new HashMap<String, TermConcept>(code2concept);

    handler = new SctHandlerRelationship(codeSystemVersion, rootConcepts, code2concept);
    iterateOverZipFile(theZipBytes, SCT_FILE_RELATIONSHIP, handler, '\t', null);

    theZipBytes.clear();/*from w w  w . j  a v  a2s .  c o m*/

    ourLog.info("Looking for root codes");
    for (Iterator<Entry<String, TermConcept>> iter = rootConcepts.entrySet().iterator(); iter.hasNext();) {
        if (iter.next().getValue().getParents().isEmpty() == false) {
            iter.remove();
        }
    }

    ourLog.info("Done loading SNOMED CT files - {} root codes, {} total codes", rootConcepts.size(),
            code2concept.size());

    Counter circularCounter = new Counter();
    for (TermConcept next : rootConcepts.values()) {
        long count = circularCounter.getThenAdd();
        float pct = ((float) count / rootConcepts.size()) * 100.0f;
        ourLog.info(" * Scanning for circular refs - have scanned {} / {} codes ({}%)", count,
                rootConcepts.size(), pct);
        dropCircularRefs(next, new ArrayList<String>(), code2concept, circularCounter);
    }

    codeSystemVersion.getConcepts().addAll(rootConcepts.values());
    String url = SCT_URL;
    storeCodeSystem(theRequestDetails, codeSystemVersion, url);

    return new UploadStatistics(code2concept.size());
}