Example usage for java.util LinkedHashMap get

List of usage examples for java.util LinkedHashMap get

Introduction

In this page you can find the example usage for java.util LinkedHashMap get.

Prototype

public V get(Object key) 

Source Link

Document

Returns the value to which the specified key is mapped, or null if this map contains no mapping for the key.

Usage

From source file:geogebra.io.MyI2GHandler.java

private void handleConstraintsStart(String eName, LinkedHashMap attrs, String outputType, int outputQuantity,
        String[] inputType, int[] inputQuantity) {
    if (inputType.length != inputQuantity.length) {
        Application/*  w w  w .j a v  a  2s .c  om*/
                .debug("call of handleConstraintsStart with invalid arguments, this should never happen :-(");
        return;
    }

    label = null;

    if (eName.equals(outputType)) {
        if (inputType.length > 0 && outputType.equals(inputType[0])) {
            if ("true".equals((String) attrs.get("out"))) {
                if (cmd.labelCount() >= outputQuantity) {
                    Application.debug("more than " + outputQuantity + " <" + eName
                            + " out=\"true\"> specified for <" + cmdName + ">");
                    return;
                }
                lastType = outputType;
                subMode = subMode + 1;
                return;
            } else if (handleConstraintsCheck(inputType[0], inputQuantity[0])) {
                lastType = inputType[0];
                subMode = subMode + 2;
                return;
            }
        } else {
            if (!"true".equals((String) attrs.get("out"))) {
                Application.debug("tag <" + eName + "> not set as output tag");
            }
            if (cmd.labelCount() >= outputQuantity) {
                Application.debug(
                        "more than " + outputQuantity + " <" + eName + "> specified for <" + cmdName + ">");
                return;
            }
            lastType = outputType;
            subMode = subMode + 1;
            return;
        }
    } else {
        for (int i = 0; i < inputType.length; i++) {
            if (eName.equals(inputType[i])) {
                if (handleConstraintsCheck(inputType[i], inputQuantity[i])) {
                    lastType = inputType[i];
                    subMode = subMode + 2;
                }
                return;
            }
        }
    }
    Application.debug("unknown tag in <" + cmdName + ">: " + eName);
}

From source file:fingerprint.MyStructureFingerprint.java

private LinkedHashMap<PhiThetaInterval, List<PointIfc>> groupResiduesAccordingToSolidAngleAccordingToLocalStructureBarycenter(
        PointIfc barycenterShape, MyStructureIfc myStructureHere) { // strongly reusing code of groupPoints

    List<PointIfc> listRepresentativePoints = new ArrayList<>();

    for (MyChainIfc chain : myStructureHere.getAllChainsRelevantForShapeBuilding()) {
        for (MyMonomerIfc monomer : chain.getMyMonomers()) {
            float[] coords = ToolsMathAppliedToMyStructure.getCoordinatesOfRepresentativeAtom(monomer);
            PointIfc point = new Point(coords);
            listRepresentativePoints.add(point);
        }//from  w  ww .j a  v a2 s  . c  om
    }

    // defining 36 zones in solid space
    double deltaOnlyForTheta = Math.PI / 8.0;
    int countOfIncrementAngle = 8;
    // group monomers

    EquidistributionPhi equidistributionPhi = new EquidistributionPhi();
    List<Double> phiValues = equidistributionPhi.getMapCountOfIntervalsAndPointValues()
            .get(countOfIncrementAngle);
    // theta in map ranges from -pi to +pi in agreement with apache spherical coodinates
    List<Double> tethaValues = ShapeReductorTools.doBinningThetaValues(deltaOnlyForTheta,
            countOfIncrementAngle);

    List<PhiThetaInterval> sectors = generateSector(deltaOnlyForTheta, phiValues, tethaValues);

    // create the Map to return
    LinkedHashMap<PhiThetaInterval, List<PointIfc>> groupPoints = new LinkedHashMap<>();

    Iterator<PhiThetaInterval> it = sectors.iterator();
    while (it.hasNext()) {
        PhiThetaInterval sector = it.next();
        List<PointIfc> listPoints = new ArrayList<>();
        groupPoints.put(sector, listPoints);
    }

    for (PointIfc point : listRepresentativePoints) {

        float[] pointRelativeToBarycenter = MathTools.v1minusV2(point.getCoords(), barycenterShape.getCoords());
        Vector3D pointRelativeToBarycenterV3d = new Vector3D(pointRelativeToBarycenter[0],
                pointRelativeToBarycenter[1], pointRelativeToBarycenter[2]);

        SphericalCoordinates pointShericalRelative = new SphericalCoordinates(pointRelativeToBarycenterV3d);

        PhiThetaInterval intervalForThisPoint = getIntervalFromSphericalCoordinates(pointShericalRelative,
                sectors);

        if (intervalForThisPoint == null) { // it could be that some points doesnt fit so I should make the binning a bit larger I guess
            continue;
        }
        groupPoints.get(intervalForThisPoint).add(point);
    }

    return groupPoints;
}

From source file:com.simiacryptus.mindseye.applications.ObjectLocationBase.java

/**
 * Run./*from w  ww. j av a  2  s.  com*/
 *
 * @param log the log
 */
public void run(@Nonnull final NotebookOutput log) {
    //    @Nonnull String logName = "cuda_" + log.getName() + ".log";
    //    log.p(log.file((String) null, logName, "GPU Log"));
    //    CudaSystem.addLog(new PrintStream(log.file(logName)));

    ImageClassifierBase classifier = getClassifierNetwork();
    Layer classifyNetwork = classifier.getNetwork();

    ImageClassifierBase locator = getLocatorNetwork();
    Layer locatorNetwork = locator.getNetwork();
    ArtistryUtil.setPrecision((DAGNetwork) classifyNetwork, Precision.Float);
    ArtistryUtil.setPrecision((DAGNetwork) locatorNetwork, Precision.Float);

    Tensor[][] inputData = loadImages_library();
    //    Tensor[][] inputData = loadImage_Caltech101(log);
    double alphaPower = 0.8;

    final AtomicInteger index = new AtomicInteger(0);
    Arrays.stream(inputData).limit(10).forEach(row -> {
        log.h3("Image " + index.getAndIncrement());
        final Tensor img = row[0];
        log.p(log.image(img.toImage(), ""));
        Result classifyResult = classifyNetwork.eval(new MutableResult(row));
        Result locationResult = locatorNetwork.eval(new MutableResult(row));
        Tensor classification = classifyResult.getData().get(0);
        List<CharSequence> categories = classifier.getCategories();
        int[] sortedIndices = IntStream.range(0, categories.size()).mapToObj(x -> x)
                .sorted(Comparator.comparing(i -> -classification.get(i))).mapToInt(x -> x).limit(10).toArray();
        logger.info(Arrays.stream(sortedIndices)
                .mapToObj(
                        i -> String.format("%s: %s = %s%%", i, categories.get(i), classification.get(i) * 100))
                .reduce((a, b) -> a + "\n" + b).orElse(""));
        LinkedHashMap<CharSequence, Tensor> vectors = new LinkedHashMap<>();
        List<CharSequence> predictionList = Arrays.stream(sortedIndices).mapToObj(categories::get)
                .collect(Collectors.toList());
        Arrays.stream(sortedIndices).limit(6).forEach(category -> {
            CharSequence name = categories.get(category);
            log.h3(name);
            Tensor alphaTensor = renderAlpha(alphaPower, img, locationResult, classification, category);
            log.p(log.image(img.toRgbImageAlphaMask(0, 1, 2, alphaTensor), ""));
            vectors.put(name, alphaTensor.unit());
        });

        Tensor avgDetection = vectors.values().stream().reduce((a, b) -> a.add(b)).get()
                .scale(1.0 / vectors.size());
        Array2DRowRealMatrix covarianceMatrix = new Array2DRowRealMatrix(predictionList.size(),
                predictionList.size());
        for (int x = 0; x < predictionList.size(); x++) {
            for (int y = 0; y < predictionList.size(); y++) {
                Tensor l = vectors.get(predictionList.get(x));
                Tensor r = vectors.get(predictionList.get(y));

                covarianceMatrix.setEntry(x, y,
                        null == l || null == r ? 0 : (l.minus(avgDetection)).dot(r.minus(avgDetection)));
            }
        }
        @Nonnull
        final EigenDecomposition decomposition = new EigenDecomposition(covarianceMatrix);

        for (int objectVector = 0; objectVector < 10; objectVector++) {
            log.h3("Eigenobject " + objectVector);
            double eigenvalue = decomposition.getRealEigenvalue(objectVector);
            RealVector eigenvector = decomposition.getEigenvector(objectVector);
            Tensor detectionRegion = IntStream.range(0, eigenvector.getDimension()).mapToObj(i -> {
                Tensor tensor = vectors.get(predictionList.get(i));
                return null == tensor ? null : tensor.scale(eigenvector.getEntry(i));
            }).filter(x -> null != x).reduce((a, b) -> a.add(b)).get();
            detectionRegion = detectionRegion.scale(255.0 / detectionRegion.rms());
            CharSequence categorization = IntStream.range(0, eigenvector.getDimension()).mapToObj(i -> {
                CharSequence category = predictionList.get(i);
                double component = eigenvector.getEntry(i);
                return String.format("<li>%s = %.4f</li>", category, component);
            }).reduce((a, b) -> a + "" + b).get();
            log.p(String.format("Object Detected: <ol>%s</ol>", categorization));
            log.p("Object Eigenvalue: " + eigenvalue);
            log.p("Object Region: " + log.image(img.toRgbImageAlphaMask(0, 1, 2, detectionRegion), ""));
            log.p("Object Region Compliment: "
                    + log.image(img.toRgbImageAlphaMask(0, 1, 2, detectionRegion.scale(-1)), ""));
        }

        //      final int[] orderedVectors = IntStream.range(0, 10).mapToObj(x -> x)
        //        .sorted(Comparator.comparing(x -> -decomposition.getRealEigenvalue(x))).mapToInt(x -> x).toArray();
        //      IntStream.range(0, orderedVectors.length)
        //        .mapToObj(i -> {
        //            //double realEigenvalue = decomposition.getRealEigenvalue(orderedVectors[i]);
        //            return decomposition.getEigenvector(orderedVectors[i]).toArray();
        //          }
        //        ).toArray(i -> new double[i][]);

        log.p(String.format(
                "<table><tr><th>Cosine Distance</th>%s</tr>%s</table>", Arrays.stream(sortedIndices).limit(10)
                        .mapToObj(col -> "<th>" + categories.get(col) + "</th>").reduce((a, b) -> a + b).get(),
                Arrays.stream(sortedIndices).limit(10).mapToObj(r -> {
                    return String.format("<tr><td>%s</td>%s</tr>", categories.get(r),
                            Arrays.stream(sortedIndices).limit(10).mapToObj(col -> {
                                Tensor l = vectors.get(categories.get(r));
                                Tensor r2 = vectors.get(categories.get(col));
                                return String.format("<td>%.4f</td>",
                                        (null == l || null == r2) ? 0 : Math.acos(l.dot(r2)));
                            }).reduce((a, b) -> a + b).get());
                }).reduce((a, b) -> a + b).orElse("")));
    });

    log.setFrontMatterProperty("status", "OK");
}

From source file:org.lokra.seaweedfs.core.FileTemplate.java

/**
 * Save files by stream map.//from   w w  w .j ava2s.  co m
 *
 * @param streamMap   Map of file name and file stream.
 * @param contentType File content type.
 * @return Files status.
 * @throws IOException Http connection is fail or server response within some error message.
 */
public LinkedHashMap<String, FileHandleStatus> saveFilesByStreamMap(
        LinkedHashMap<String, InputStream> streamMap, ContentType contentType) throws IOException {
    // Assign file key
    final AssignFileKeyParams params = new AssignFileKeyParams(assignFileKeyParams.getReplication(),
            streamMap.size(), assignFileKeyParams.getDataCenter(), assignFileKeyParams.getTtl(),
            assignFileKeyParams.getCollection());

    final AssignFileKeyResult assignFileKeyResult = masterWrapper.assignFileKey(params);
    String uploadUrl;
    if (usingPublicUrl)
        uploadUrl = assignFileKeyResult.getPublicUrl();
    else
        uploadUrl = assignFileKeyResult.getUrl();

    // Upload file
    LinkedHashMap<String, FileHandleStatus> resultMap = new LinkedHashMap<String, FileHandleStatus>();
    int index = 0;
    for (String fileName : streamMap.keySet()) {
        if (index == 0)
            resultMap.put(fileName,
                    new FileHandleStatus(assignFileKeyResult.getFid(),
                            volumeWrapper.uploadFile(uploadUrl, assignFileKeyResult.getFid(), fileName,
                                    streamMap.get(fileName), timeToLive, contentType)));
        else
            resultMap.put(fileName,
                    new FileHandleStatus(assignFileKeyResult.getFid() + "_" + String.valueOf(index),
                            volumeWrapper.uploadFile(uploadUrl,
                                    assignFileKeyResult.getFid() + "_" + String.valueOf(index), fileName,
                                    streamMap.get(fileName), timeToLive, contentType)));
        index++;
    }
    return resultMap;
}

From source file:gate.util.reporting.PRTimeReporter.java

/**
 * Sorts the processing element entries inside tree like structure made up of
 * LinkedHashMap. Entries will be sorted in descending order of time taken.
 *
 * @param gStore//  www  .ja  va2 s. com
 *          An Object of type LinkedHashMap<String, Object> containing the
 *          processing elements (with time in milliseconds) in hierarchical
 *          structure.
 *
 * @return An Object of type LinkedHashMap<String, Object> containing the
 *         processing elements sorted in descending order of processing time
 *         taken.
 */
@SuppressWarnings("unchecked")
private LinkedHashMap<String, Object> sortReport(LinkedHashMap<String, Object> gStore) {
    Iterator<String> i = gStore.keySet().iterator();
    LinkedHashMap<String, Object> sortedReport = new LinkedHashMap<String, Object>();
    LinkedHashMap<String, Object> mapperReport = new LinkedHashMap<String, Object>();
    LinkedHashMap<String, String> unsortedReport = new LinkedHashMap<String, String>();
    while (i.hasNext()) {
        Object key = i.next();
        if (gStore.get(key) instanceof LinkedHashMap) {
            int systotal = 0;
            if (((LinkedHashMap<String, Object>) (gStore.get(key))).get("systotal") != null) {
                systotal = Integer
                        .parseInt((String) ((LinkedHashMap<String, Object>) (gStore.get(key))).get("systotal"));
            }
            if (systotal >= 0) {
                unsortedReport.put((String) key, Integer.toString(systotal));
            }
            mapperReport.put((String) key, sortReport((LinkedHashMap<String, Object>) (gStore.get(key))));

        } else {
            if (!(key.equals("total") || key.equals("systotal"))) {
                if (Integer.parseInt((String) (gStore.get(key))) >= 0) {
                    unsortedReport.put((String) key, new Integer((String) gStore.get(key)).toString());
                }
            }
        }
    }
    LinkedHashMap<String, String> tempOutLHM = sortHashMapByValues(unsortedReport);

    Iterator<String> itr = tempOutLHM.keySet().iterator();
    while (itr.hasNext()) {
        Object tempKey = itr.next();
        sortedReport.put((String) tempKey, tempOutLHM.get(tempKey));
        if (mapperReport.containsKey(tempKey)) {
            sortedReport.put((String) tempKey, mapperReport.get(tempKey));
        }
    }
    sortedReport.put("total", gStore.get("total"));
    if (gStore.get("systotal") != null) {
        sortedReport.put("systotal", gStore.get("systotal"));
    }
    return sortedReport;
}

From source file:de.ingrid.importer.udk.strategy.v33.IDCStrategyDefault3_3.java

/**
 * Also drops all old values (if syslist already exists) !
 * @param listId id of syslist/*from   ww w .  ja  va 2 s  .  c o  m*/
 * @param deleteOldValues pass true if all old syslist values should be deleted before adding new ones !
 * @param syslistMap_de german entries
 * @param syslistMap_en english entries
 * @param defaultEntry_de pass key of GERMAN default entry or -1 if no default entry !
 * @param defaultEntry_en pass key of ENGLISH default entry or -1 if no default entry !
 * @param syslistMap_descr_de pass null if no GERMAN description available
 * @param syslistMap_descr_en pass null if no ENGLISH description available
 * @throws Exception
 */
protected void writeNewSyslist(int listId, boolean deleteOldValues,
        LinkedHashMap<Integer, String> syslistMap_de, LinkedHashMap<Integer, String> syslistMap_en,
        int defaultEntry_de, int defaultEntry_en, LinkedHashMap<Integer, String> syslistMap_descr_de,
        LinkedHashMap<Integer, String> syslistMap_descr_en) throws Exception {

    if (syslistMap_descr_de == null) {
        syslistMap_descr_de = new LinkedHashMap<Integer, String>();
    }
    if (syslistMap_descr_en == null) {
        syslistMap_descr_en = new LinkedHashMap<Integer, String>();
    }

    if (deleteOldValues) {
        // clean up, to guarantee no old values !
        sqlStr = "DELETE FROM sys_list where lst_id = " + listId;
        jdbc.executeUpdate(sqlStr);
    }

    String psSql = "INSERT INTO sys_list (id, lst_id, entry_id, lang_id, name, maintainable, is_default, description) "
            + "VALUES (?,?,?,?,?,?,?,?)";
    PreparedStatement psInsert = jdbc.prepareStatement(psSql);

    Iterator<Integer> itr = syslistMap_de.keySet().iterator();
    while (itr.hasNext()) {
        int key = itr.next();
        // german version
        String isDefault = "N";
        if (key == defaultEntry_de) {
            isDefault = "Y";
        }
        psInsert.setLong(1, getNextId());
        psInsert.setInt(2, listId);
        psInsert.setInt(3, key);
        psInsert.setString(4, "de");
        psInsert.setString(5, syslistMap_de.get(key));
        psInsert.setInt(6, 0);
        psInsert.setString(7, isDefault);
        psInsert.setString(8, syslistMap_descr_de.get(key));
        psInsert.executeUpdate();

        // english version
        isDefault = "N";
        if (key == defaultEntry_en) {
            isDefault = "Y";
        }
        psInsert.setLong(1, getNextId());
        psInsert.setString(4, "en");
        psInsert.setString(5, syslistMap_en.get(key));
        psInsert.setString(7, isDefault);
        psInsert.setString(8, syslistMap_descr_en.get(key));
        psInsert.executeUpdate();
    }

    psInsert.close();
}

From source file:edu.harvard.i2b2.analysis.dataModel.ConceptTableModel.java

public void fillDataFromTable(ArrayList<ArrayList<ConceptTableRow>> list) {
    list.clear();//from w w w.  j a v a2 s. c o m
    ConceptTableRow row = null;
    ArrayList<ConceptTableRow> group = null;
    Integer curRow = null;
    LinkedHashMap<Integer, ArrayList<ConceptTableRow>> rowMap = new LinkedHashMap<Integer, ArrayList<ConceptTableRow>>();

    for (int i = 1; i < rowCount; i++) {
        row = new ConceptTableRow();
        curRow = new Integer((String) content.get("0/" + i));
        row.rowNumber = curRow.intValue();
        if (!rowMap.containsKey(curRow)) {
            group = new ArrayList<ConceptTableRow>();
            list.add(group);
            rowMap.put(curRow, group);
        } else {
            group = rowMap.get(curRow);
        }
        row.conceptName = (String) content.get("1/" + i);
        row.dateText = (String) content.get("2/" + i);
        row.valueText = (String) content.get("3/" + i);
        row.height = (String) content.get("4/" + i);
        row.color = (RGB) content.get("5/" + i);
        row.conceptXml = (String) content.get("6/" + i);
        row.data = (QueryModel) content.get("7/" + i);
        row.rowId = i;
        group.add(row);
    }
}

From source file:gate.util.reporting.DocTimeReporter.java

/**
 * Prints the document level statistics report in HTML format.
 *
 * @param reportSource/*from www  .  j a  v  a2  s .co  m*/
 *          An Object of type LinkedHashMap<String, Object> containing the
 *          document names (with time in milliseconds).
 * @param outputFile
 *          An object of type File representing the output report file to
 *          which the HTML report is to be written.
 */
private void printToHTML(LinkedHashMap<String, Object> reportSource, File outputFile) {
    String htmlReport = "<!DOCTYPE html PUBLIC \"-//W3C//DTD HTML 4.01 Transitional//EN\"" + NL
            + "\"http://www.w3.org/TR/html4/loose.dtd\">" + NL
            + "<html><head><title>Benchmarking Report</title>" + NL + "<meta http-equiv=\"Content-Type\""
            + " content=\"text/html; charset=utf-8\">" + NL + "<style type=\"text/css\">" + NL
            + "div { font-size:12px; margin-top: 4; }" + NL + "</style>" + NL + "</head>" + NL
            + "<body style=\"font-family:Verdana; color:navy;\">" + NL;
    String hTrace = "<div style=\"right: 0pt; border-top:1px solid #C9D7F1;" + " font-size:1px;\" ></div>" + NL;
    String reportTitle = hTrace;
    String docs = "";
    if (maxDocumentInReport != ALL_DOCS) {
        if (allDocs.size() < maxDocumentInReport) {
            docs = Integer.toString(allDocs.size());
        } else {
            docs = Integer.toString(maxDocumentInReport);
        }
    } else {
        docs = "All";
    }
    if (PRMatchingRegex.equals(MATCH_ALL_PR_REGEX)) {
        reportTitle = reportTitle + "<div style=\"font-size:15px;font-family:Verdana; color:navy;\">Top " + docs
                + " expensive documents matching All PRs in <b>" + pipelineName + "</b></div>" + NL;
    } else {
        if (matchingPRs.size() > 0) {
            reportTitle = reportTitle + "<div style=\"font-size:15px;font-family:Verdana; color:navy;\">Top "
                    + docs + " expensive documents matching following PRs in <b>" + pipelineName + "</b> <ul>"
                    + NL;
            for (String pr : matchingPRs) {
                reportTitle = reportTitle + "<li>" + pr + "</li>";
            }
            reportTitle = reportTitle + "</ul></div>";
        } else {
            reportTitle += "<div style=\"font-size:15px;font-family:Verdana; color:navy;\">"
                    + "No PRs matched to search string \"" + getPRMatchingRegex() + " \" in " + pipelineName
                    + "</div>";
        }
    }
    reportTitle = reportTitle + hTrace;

    if (allDocs.size() > 0) {
        String htmlReportTitle = reportTitle + "<table><tr bgcolor=\"#eeeeff\">"
                + "<td><b>Document Name</b></td>" + "<td><b>Time in seconds</b></td>"
                + "<td><b>% Time taken</b></td>" + "</tr><tr>" + NL;
        String documentNameHTMLString = "<td rowspan = '112' width = '550'>";
        String timeTakenHTMLString = "<td width = '100'>";
        String timeInPercentHTMLString = "<td width = '100'>";
        LinkedHashMap<String, Object> rcHash = reportSource;
        rcHash.remove("total");
        Iterator<String> i = rcHash.keySet().iterator();
        int count = 0;
        while (i.hasNext()) {
            Object key = i.next();
            if (!((String) key).equals("total")) {
                int value = Integer.parseInt((String) rcHash.get(key));
                if (maxDocumentInReport == ALL_DOCS) {
                    documentNameHTMLString += "<div>" + key + "</div>";
                    timeTakenHTMLString += "<div>" + value / 1000.0 + "</div>";
                    timeInPercentHTMLString += "<div>" + Math.round(((value / globalTotal) * 100) * 10) / 10.0
                            + "</div>" + NL;
                } else if (count < maxDocumentInReport) {
                    documentNameHTMLString += "<div>" + key + "</div>";
                    timeTakenHTMLString += "<div>" + value / 1000.0 + "</div>";
                    timeInPercentHTMLString += "<div>" + Math.round(((value / globalTotal) * 100) * 10) / 10.0
                            + "</div>" + NL;
                }
            }
            count++;
        }
        documentNameHTMLString += "<div bgcolor=\"#eeeeff\" style = \"font-size:15px;margin-left:400px;\">"
                + "<b>Total</b></div></td>" + NL;
        timeTakenHTMLString += "<div bgcolor=\"#eeeeff\" style = \"font-size:15px;\"><b>" + globalTotal / 1000.0
                + "</b></div></td>" + NL;
        timeInPercentHTMLString += "<div bgcolor=\"#eeeeff\" style = \"font-size:15px;\">"
                + "<b>100</b></div></td>" + NL;

        if (!outputFile.exists()) {
            htmlReport += htmlReportTitle + documentNameHTMLString + timeTakenHTMLString
                    + timeInPercentHTMLString + "</tr></table>";
        } else {
            htmlReport = "<br/><br/>" + htmlReportTitle + documentNameHTMLString + timeTakenHTMLString
                    + timeInPercentHTMLString + "</tr></table></body></html>";
        }
    } else {
        htmlReport += reportTitle + "</body></html>";
    }

    BufferedWriter out = null;
    try {
        out = new BufferedWriter(new FileWriter(outputFile));
        out.write(htmlReport);

    } catch (IOException e) {
        e.printStackTrace();

    } finally {
        try {
            if (out != null) {
                out.close();
            }
        } catch (IOException e) {
            e.printStackTrace();
        }
    }
}

From source file:aldenjava.opticalmapping.data.mappingresult.OptMapResultNode.java

public List<OptMapResultNode> getBreakResult(LinkedHashMap<String, DataNode> optrefmap, int meas, double ear) {
    if (parentFrag == null || optrefmap == null)
        return null;
    List<OptMapResultNode> resultList = new ArrayList<OptMapResultNode>();
    if (!isUsed()) {
        resultList.add(new OptMapResultNode(this));
        return resultList;
    }//  w  ww.  j a  v a2  s  .c om

    DataNode ref = optrefmap.get(mappedRegion.ref);
    String precigar = this.cigar.getPrecigar();
    int lastrefpos = -1;
    int lastfragpos = -1;
    int currentrefpos = subrefstart;
    int currentfragpos = subfragstart;
    int lastrefstart = -1;
    int lastfragstart = -1;
    for (char c : precigar.toCharArray())
        switch (c) {
        case 'M':
            if (lastrefpos != -1) {
                if (lastrefstart == -1) {
                    lastrefstart = lastrefpos;
                    lastfragstart = lastfragpos;
                }
                // start comparison
                long reflen = ref.length(lastrefpos, currentrefpos - 1);
                long fraglen;
                if (mappedstrand == 1)
                    fraglen = parentFrag.length(lastfragpos, currentfragpos - mappedstrand);
                else if (mappedstrand == -1)
                    fraglen = parentFrag.length(currentfragpos - mappedstrand, lastfragpos);
                else
                    fraglen = 0;

                boolean pass = (reflen * (1 - ear) - meas < fraglen && fraglen < reflen * (1 + ear) + meas);
                if (!pass) {
                    if (lastfragstart != lastfragpos) {
                        resultList.add(this.getSubResult(ref, lastfragstart, lastfragpos - mappedstrand)); // the current one should not be included

                    }
                    lastrefstart = -1;
                    lastfragstart = -1;
                } else {
                    if (pass && currentrefpos == subrefstop + 1) {
                        resultList.add(this.getSubResult(ref, lastfragstart, subfragstop)); // the current one should not be included
                        lastrefstart = -1;
                        lastfragstart = -1;
                    }
                }
            }
            lastrefpos = currentrefpos;
            lastfragpos = currentfragpos;
            currentfragpos += mappedstrand;
            currentrefpos += 1;
            break;
        case 'I':
            currentfragpos += mappedstrand;
            break;
        case 'D':
            currentrefpos += 1;
            break;
        default:
            ;
        }
    return resultList;
}

From source file:com.amalto.workbench.utils.XSDAnnotationsStructure.java

/**
 * author: fliu, set Multilingual facet error messages attached to facets in the schema please refer to bug 0009157
 *///from   w  ww.  j  av a2s. co m
public boolean setFactMessage(LinkedHashMap<String, String> facts) {
    Iterator<String> isos = Util.iso2lang.keySet().iterator();
    while (isos.hasNext()) {
        String lang = isos.next();
        removeAppInfos("X_Facet_" + lang.toUpperCase());//$NON-NLS-1$
    }

    Iterator<String> isoIter = facts.keySet().iterator();
    while (isoIter.hasNext()) {
        String iso = isoIter.next();
        removeAppInfos("X_Facet_" + iso.toUpperCase());//$NON-NLS-1$
        addAppInfo("X_Facet_" + iso.toUpperCase(), facts.get(iso));//$NON-NLS-1$
    }

    hasChanged = true;
    return true;
}