Example usage for java.util LinkedHashMap keySet

List of usage examples for java.util LinkedHashMap keySet

Introduction

In this page you can find the example usage for java.util LinkedHashMap keySet.

Prototype

public Set<K> keySet() 

Source Link

Document

Returns a Set view of the keys contained in this map.

Usage

From source file:org.ejbca.util.dn.DnComponents.java

/**
 * Load DN ordering used in CertTools.stringToBCDNString etc.
 * Loads from file placed in src/dncomponents.properties
 *
 *//*ww w. j a va 2s .c  o  m*/
private static void loadOrdering() {
    // Read the file to an array of lines 
    String line;
    LinkedHashMap<String, DERObjectIdentifier> map = new LinkedHashMap<String, DERObjectIdentifier>();
    BufferedReader in = null;
    InputStreamReader inf = null;
    try {
        InputStream is = obj.getClass().getResourceAsStream("/dncomponents.properties");
        //log.info("is is: " + is);
        if (is != null) {
            inf = new InputStreamReader(is);
            //inf = new FileReader("c:\\foo.properties");
            in = new BufferedReader(inf);
            if (!in.ready()) {
                throw new IOException();
            }
            String[] splits = null;
            while ((line = in.readLine()) != null) {
                if (!line.startsWith("#")) { // # is a comment line
                    splits = StringUtils.split(line, '=');
                    if ((splits != null) && (splits.length > 1)) {
                        String name = splits[0].toLowerCase();
                        DERObjectIdentifier oid = new DERObjectIdentifier(splits[1]);
                        map.put(name, oid);
                    }
                }
            }
            in.close();
            // Now we have read it in, transfer it to the main oid map
            log.info("Using DN components from properties file");
            oids.clear();
            oids.putAll(map);
            Set<String> keys = map.keySet();
            // Set the maps to the desired ordering
            dNObjectsForward = (String[]) keys.toArray(new String[0]);
        } else {
            log.debug("Using default values for DN components");
        }
    } catch (IOException e) {
        log.debug("Using default values for DN components");
    } finally {
        try {
            if (inf != null) {
                inf.close();
            }
            if (in != null) {
                in.close();
            }
        } catch (IOException e) {
        }
    }

}

From source file:netbeanstypescript.TSHintsProvider.java

@Override
public void computeSuggestions(HintsManager manager, final RuleContext context, List<Hint> suggestions,
        int caretOffset) {
    // Group the possibly-fixable errors by span and dedupe, as getCodeFixesAtPosition requires
    LinkedHashMap<OffsetRange, LinkedHashSet<Integer>> errsBySpan = new LinkedHashMap<>();
    for (Error err : context.parserResult.getDiagnostics()) {
        int errStart = err.getStartPosition(), errEnd = err.getEndPosition();
        if (err.getKey() != null && caretOffset >= errStart && caretOffset <= errEnd) {
            OffsetRange span = new OffsetRange(errStart, errEnd);
            LinkedHashSet<Integer> errCodes = errsBySpan.get(span);
            if (errCodes == null) {
                errsBySpan.put(span, errCodes = new LinkedHashSet<>());
            }/*from  w ww  .j ava2 s  .  com*/
            errCodes.add(Integer.parseInt(err.getKey()));
        }
    }
    for (OffsetRange span : errsBySpan.keySet()) {
        final FileObject fileObj = context.parserResult.getSnapshot().getSource().getFileObject();
        Object fixes = TSService.call("getCodeFixesAtPosition", fileObj, span.getStart(), span.getEnd(),
                errsBySpan.get(span)); // amazingly, LinkedHashSet<Integer>'s toString is valid JSON
        if (fixes == null) {
            continue;
        }
        List<HintFix> hintFixes = new ArrayList<>();
        final Reformat formatter = Reformat.get(context.doc);
        for (final JSONObject fix : (List<JSONObject>) fixes) {
            hintFixes.add(new HintFix() {
                @Override
                public String getDescription() {
                    return (String) fix.get("description");
                }

                @Override
                public void implement() {
                    for (JSONObject change : (List<JSONObject>) fix.get("changes")) {
                        Object fileName = change.get("fileName");
                        if (!fileName.equals(fileObj.getPath())) {
                            String error = "Unimplemented: code fix involves changes to a different file "
                                    + fileName;
                            DialogDisplayer.getDefault().notify(
                                    new NotifyDescriptor.Message(error, NotifyDescriptor.ERROR_MESSAGE));
                            return;
                        }
                    }
                    formatter.lock();
                    try {
                        context.doc.runAtomic(new Runnable() {
                            @Override
                            public void run() {
                                try {
                                    for (JSONObject change : (List<JSONObject>) fix.get("changes")) {
                                        OffsetRange changed = TSFormatter.applyEdits(context.doc,
                                                change.get("textChanges"));
                                        // Code fixes are badly formatted, so reformat the affected range
                                        // https://github.com/Microsoft/TypeScript/issues/12249
                                        if (changed != null) {
                                            formatter.reformat(changed.getStart(), changed.getEnd());
                                        }
                                    }
                                } catch (BadLocationException ex) {
                                    Exceptions.printStackTrace(ex);
                                }
                            }
                        });
                    } finally {
                        formatter.unlock();
                    }
                }

                @Override
                public boolean isSafe() {
                    return false;
                }

                @Override
                public boolean isInteractive() {
                    return false;
                }
            });
        }
        if (!hintFixes.isEmpty()) {
            Rule rule = new Rule() {
                @Override
                public boolean appliesTo(RuleContext rc) {
                    return true;
                }

                @Override
                public String getDisplayName() {
                    return "TS code fix";
                }

                @Override
                public boolean showInTasklist() {
                    return false;
                }

                @Override
                public HintSeverity getDefaultSeverity() {
                    return HintSeverity.ERROR;
                }
            };
            suggestions.add(new Hint(rule,
                    hintFixes.size() + (hintFixes.size() == 1 ? " code fix" : " code fixes") + " available",
                    fileObj, span, hintFixes, 0));
        }
    }
}

From source file:com.vmware.admiral.request.compute.ComputeReservationTaskService.java

private void selectReservation(ComputeReservationTaskState state,
        LinkedHashMap<String, String> resourcePoolsPerGroupPlacementLinks) {
    if (resourcePoolsPerGroupPlacementLinks.isEmpty()) {
        failTask(null,/*from   w  w w. j a v  a 2s .  co m*/
                new LocalizableValidationException("resourcePoolsPerGroupPlacementLinks must not be empty",
                        "request.compute.reservation.resource-pools.empty"));
        return;
    }

    Iterator<String> iter = resourcePoolsPerGroupPlacementLinks.keySet().iterator();
    String placementLink = iter.next();
    iter.remove();

    logInfo("Current selected placement: %s", placementLink);
    proceedTo(SubStage.RESERVATION_SELECTED, s -> {
        s.resourcePoolsPerGroupPlacementLinks = resourcePoolsPerGroupPlacementLinks;
        s.groupResourcePlacementLink = placementLink;
    });
}

From source file:org.kutkaitis.timetable2.timetable.MonteCarlo.java

public void optimizeTimetable() {

    long startTime = System.currentTimeMillis();

    Map<Integer, List<LinkedHashMap>> bestResult = new LinkedHashMap<>();
    for (int i = 0; i < properties.getOptimisationIterations(); i++) {

        // Clearing all values for new optimization iteration
        allTeachersListForOptm = null;//from  ww w .  j  av a  2 s .co m
        teachersListOfIAndIIForOptm = null;
        teachersListOfIIIAndIVForOptm = null;

        mondayTimeTable = new LinkedHashMap<>();
        tuesdayTimeTable = new LinkedHashMap<>();
        wednesdayTimeTable = new LinkedHashMap<>();
        thursdayTimeTable = new LinkedHashMap<>();
        fridayTimeTable = new LinkedHashMap<>();

        getAllTeachersListForOptm(); // Dirty hack for action listener

        System.out.println("ITERATION: " + i);
        optimizeMondayTimeTableForIIIAndIVGymnasium();
        //            System.out.println("Days time tables for itr: " +daysTimeTablesForItr);
        optimizeMondayTimeTableForIAndIIGymnasium();

        int bestRes = Integer.valueOf(optimizationResults.getTotalPenaltyPoints());
        System.out.println("bestRes: " + bestRes);
        if (bestResult.isEmpty()) {
            bestResult.put(bestRes, daysTimeTablesForItr);
            continue;
        }

        Collection<Integer> res = bestResult.keySet();
        for (Integer resultNumber : res) {
            if (bestRes < resultNumber) {
                bestResult.remove(resultNumber);
                //                    List<LinkedHashMap> daysTimeTablesToStore = new ArrayList<>();
                //                    for (LinkedHashMap teachersTimeTable : daysTimeTablesForItr) {
                //                        for (teachersTimeTable)
                //                    }
                System.out.println("daysTimeTable: " + daysTimeTablesForItr);
                bestResult.put(bestRes, daysTimeTablesForItr); //TODO make object copies, otherwise it will be wiped out

            }
        }

        System.out.println("MondayTM: " + mondayTimeTable);
    }

    // Adding best result
    Collection<Integer> res = bestResult.keySet();
    System.out.println("res: " + res);
    for (Integer resultNumber : res) {
        System.out.println("resultNumber: " + resultNumber);
        setTotalPenaltyPoints(String.valueOf(resultNumber));
        System.out.println("best result hash code: " + bestResult.get(resultNumber));
        optimizationResults.setAllDaysTeacherTimeTable(bestResult.get(resultNumber));
        System.out.println(
                "Total recalcualed current tm penalty points: " + optimizationResults.getTotalPenaltyPoints());

        int counter = 0;
        for (LinkedHashMap allDaysTM : bestResult.get(resultNumber)) {
            if (counter == 0) {
                setMondayTimeTable(allDaysTM);
                Collection<String> teacherNames = allDaysTM.keySet();
                setAllTeachersListForOptm(new ArrayList<>(teacherNames));

            }
            if (counter == 1) {
                setTuesdayTimeTable(allDaysTM);
            }
            if (counter == 2) {
                setWednesdayTimeTable(allDaysTM);
            }
            if (counter == 3) {
                setThursdayTimeTable(allDaysTM);
            }
            if (counter == 4) {
                setFridayTimeTable(allDaysTM);
            }
            counter++;
        }
    }

    long stopTime = System.currentTimeMillis();
    long elapsedTime = stopTime - startTime;
    long timeSeconds = TimeUnit.MILLISECONDS.toSeconds(elapsedTime);
    optimizationResults.setDuration(String.valueOf(timeSeconds));

}

From source file:org.tdl.vireo.export.impl.ExcelPackagerImpl.java

/**
 * (OPITONAL) Set the attachment types which will be included in the package. Since not all attachments should be deposited, this allows the package to filter which files to include. They must be the exact name (all uppercase) of types listed in the AttachmentType enum.
 * //from   w  w  w .  j  a va2  s .  c  o m
 * If no types are specified then no attachments will be included.
 * 
 * @param attachmentTypeNames
 *            List of attachment types to include.
 */
public void setAttachmentTypeNames(LinkedHashMap<String, Properties> attachmentTypeNames) {

    this.attachmentTypes = new ArrayList<AttachmentType>();
    this.attachmentAttributes = new LinkedHashMap<String, Properties>();

    if (attachmentTypeNames != null) {
        this.attachmentAttributes = attachmentTypeNames;
        for (String name : attachmentTypeNames.keySet()) {
            AttachmentType type = AttachmentType.valueOf(name);
            this.attachmentTypes.add(type);
        }
    }
}

From source file:org.cesecore.certificates.util.DnComponents.java

/**
 * Load DN ordering used in CertTools.stringToBCDNString etc.
 * Loads from file placed in src/dncomponents.properties
 *
 *//*w  ww .j av  a  2s  . c om*/
private static void loadOrdering() {
    // Read the file to an array of lines 
    String line;
    LinkedHashMap<String, ASN1ObjectIdentifier> map = new LinkedHashMap<String, ASN1ObjectIdentifier>();
    BufferedReader in = null;
    InputStreamReader inf = null;
    try {
        InputStream is = obj.getClass().getResourceAsStream("/dncomponents.properties");
        //log.info("is is: " + is);
        if (is != null) {
            inf = new InputStreamReader(is);
            //inf = new FileReader("c:\\foo.properties");
            in = new BufferedReader(inf);
            if (!in.ready()) {
                throw new IOException();
            }
            String[] splits = null;
            while ((line = in.readLine()) != null) {
                if (!line.startsWith("#")) { // # is a comment line
                    splits = StringUtils.split(line, '=');
                    if ((splits != null) && (splits.length > 1)) {
                        String name = splits[0].toLowerCase();
                        ASN1ObjectIdentifier oid = new ASN1ObjectIdentifier(splits[1]);
                        map.put(name, oid);
                    }
                }
            }
            in.close();
            // Now we have read it in, transfer it to the main oid map
            log.info("Using DN components from properties file");
            oids.clear();
            oids.putAll(map);
            Set<String> keys = map.keySet();
            // Set the maps to the desired ordering
            dNObjectsForward = (String[]) keys.toArray(new String[keys.size()]);
        } else {
            log.debug("Using default values for DN components");
        }
    } catch (IOException e) {
        log.debug("Using default values for DN components");
    } finally {
        try {
            if (inf != null) {
                inf.close();
            }
            if (in != null) {
                in.close();
            }
        } catch (IOException e) {
        }
    }

}

From source file:com.fortify.bugtracker.common.tgt.processor.AbstractTargetProcessorUpdateIssues.java

private boolean updateIssueFieldsIfNecessary(Context context,
        TargetIssueLocatorAndFields targetIssueLocatorAndFields, LinkedHashMap<String, Object> issueFields) {
    ITargetIssueFieldsUpdater updater = getTargetIssueFieldsUpdater();
    if (updater != null && MapUtils.isNotEmpty(issueFields)) {
        if (targetIssueLocatorAndFields.canRetrieveFields()) {
            issueFields = removeUnmodifiedFields(targetIssueLocatorAndFields, issueFields);
        }/*from   ww  w.  jav  a  2 s.  c om*/
        if (MapUtils.isNotEmpty(issueFields) && updater.updateIssueFields(context, targetIssueLocatorAndFields,
                new LinkedHashMap<String, Object>(issueFields))) {
            LOG.info(String.format("[%s] Updated field(s) %s for issue %s", getTargetName(),
                    issueFields.keySet().toString(), targetIssueLocatorAndFields.getLocator().getDeepLink()));
            targetIssueLocatorAndFields.resetFields();
            return true;
        }
    }
    return false;
}

From source file:fr.cirad.mgdb.exporting.markeroriented.GFFExportHandler.java

@Override
public void exportData(OutputStream outputStream, String sModule, List<SampleId> sampleIDs,
        ProgressIndicator progress, DBCursor markerCursor, Map<Comparable, Comparable> markerSynonyms,
        int nMinimumGenotypeQuality, int nMinimumReadDepth, Map<String, InputStream> readyToExportFiles)
        throws Exception {
    MongoTemplate mongoTemplate = MongoTemplateManager.get(sModule);
    ZipOutputStream zos = new ZipOutputStream(outputStream);

    if (readyToExportFiles != null)
        for (String readyToExportFile : readyToExportFiles.keySet()) {
            zos.putNextEntry(new ZipEntry(readyToExportFile));
            InputStream inputStream = readyToExportFiles.get(readyToExportFile);
            byte[] dataBlock = new byte[1024];
            int count = inputStream.read(dataBlock, 0, 1024);
            while (count != -1) {
                zos.write(dataBlock, 0, count);
                count = inputStream.read(dataBlock, 0, 1024);
            }//  w  w  w  .  j  a va2 s.com
        }

    File warningFile = File.createTempFile("export_warnings_", "");
    FileWriter warningFileWriter = new FileWriter(warningFile);

    int markerCount = markerCursor.count();

    List<Individual> individuals = getIndividualsFromSamples(sModule, sampleIDs);
    ArrayList<String> individualList = new ArrayList<String>();
    for (int i = 0; i < sampleIDs.size(); i++) {
        Individual individual = individuals.get(i);
        if (!individualList.contains(individual.getId())) {
            individualList.add(individual.getId());
        }
    }

    String exportName = sModule + "_" + markerCount + "variants_" + individualList.size() + "individuals";
    zos.putNextEntry(new ZipEntry(exportName + ".gff3"));
    String header = "##gff-version 3" + LINE_SEPARATOR;
    zos.write(header.getBytes());

    TreeMap<String, String> typeToOntology = new TreeMap<String, String>();
    typeToOntology.put(Type.SNP.toString(), "SO:0000694");
    typeToOntology.put(Type.INDEL.toString(), "SO:1000032");
    typeToOntology.put(Type.MIXED.toString(), "SO:0001059");
    typeToOntology.put(Type.SYMBOLIC.toString(), "SO:0000109");
    typeToOntology.put(Type.MNP.toString(), "SO:0001059");

    int avgObjSize = (Integer) mongoTemplate
            .getCollection(mongoTemplate.getCollectionName(VariantRunData.class)).getStats().get("avgObjSize");
    int nChunkSize = nMaxChunkSizeInMb * 1024 * 1024 / avgObjSize;
    short nProgress = 0, nPreviousProgress = 0;
    long nLoadedMarkerCount = 0;

    while (markerCursor.hasNext()) {
        int nLoadedMarkerCountInLoop = 0;
        Map<Comparable, String> markerChromosomalPositions = new LinkedHashMap<Comparable, String>();
        boolean fStartingNewChunk = true;
        markerCursor.batchSize(nChunkSize);
        while (markerCursor.hasNext() && (fStartingNewChunk || nLoadedMarkerCountInLoop % nChunkSize != 0)) {
            DBObject exportVariant = markerCursor.next();
            DBObject refPos = (DBObject) exportVariant.get(VariantData.FIELDNAME_REFERENCE_POSITION);
            markerChromosomalPositions.put((Comparable) exportVariant.get("_id"),
                    refPos.get(ReferencePosition.FIELDNAME_SEQUENCE) + ":"
                            + refPos.get(ReferencePosition.FIELDNAME_START_SITE));
            nLoadedMarkerCountInLoop++;
            fStartingNewChunk = false;
        }

        List<Comparable> currentMarkers = new ArrayList<Comparable>(markerChromosomalPositions.keySet());
        LinkedHashMap<VariantData, Collection<VariantRunData>> variantsAndRuns = MgdbDao.getSampleGenotypes(
                mongoTemplate, sampleIDs, currentMarkers, true,
                null /*new Sort(VariantData.FIELDNAME_REFERENCE_POSITION + "." + ChromosomalPosition.FIELDNAME_SEQUENCE).and(new Sort(VariantData.FIELDNAME_REFERENCE_POSITION + "." + ChromosomalPosition.FIELDNAME_START_SITE))*/); // query mongo db for matching genotypes
        for (VariantData variant : variantsAndRuns.keySet()) // read data and write results into temporary files (one per sample)
        {
            Comparable variantId = variant.getId();
            List<String> variantDataOrigin = new ArrayList<String>();

            Map<String, Integer> gqValueForSampleId = new LinkedHashMap<String, Integer>();
            Map<String, Integer> dpValueForSampleId = new LinkedHashMap<String, Integer>();
            Map<String, List<String>> individualGenotypes = new LinkedHashMap<String, List<String>>();
            List<String> chromAndPos = Helper.split(markerChromosomalPositions.get(variantId), ":");
            if (chromAndPos.size() == 0)
                LOG.warn("Chromosomal position not found for marker " + variantId);
            // LOG.debug(marker + "\t" + (chromAndPos.length == 0 ? "0" : chromAndPos[0]) + "\t" + 0 + "\t" + (chromAndPos.length == 0 ? 0l : Long.parseLong(chromAndPos[1])) + LINE_SEPARATOR);
            if (markerSynonyms != null) {
                Comparable syn = markerSynonyms.get(variantId);
                if (syn != null)
                    variantId = syn;
            }

            Collection<VariantRunData> runs = variantsAndRuns.get(variant);
            if (runs != null)
                for (VariantRunData run : runs)
                    for (Integer sampleIndex : run.getSampleGenotypes().keySet()) {
                        SampleGenotype sampleGenotype = run.getSampleGenotypes().get(sampleIndex);
                        String individualId = individuals
                                .get(sampleIDs.indexOf(new SampleId(run.getId().getProjectId(), sampleIndex)))
                                .getId();

                        Integer gq = null;
                        try {
                            gq = (Integer) sampleGenotype.getAdditionalInfo().get(VariantData.GT_FIELD_GQ);
                        } catch (Exception ignored) {
                        }
                        if (gq != null && gq < nMinimumGenotypeQuality)
                            continue;

                        Integer dp = null;
                        try {
                            dp = (Integer) sampleGenotype.getAdditionalInfo().get(VariantData.GT_FIELD_DP);
                        } catch (Exception ignored) {
                        }
                        if (dp != null && dp < nMinimumReadDepth)
                            continue;

                        String gtCode = sampleGenotype.getCode();
                        List<String> storedIndividualGenotypes = individualGenotypes.get(individualId);
                        if (storedIndividualGenotypes == null) {
                            storedIndividualGenotypes = new ArrayList<String>();
                            individualGenotypes.put(individualId, storedIndividualGenotypes);
                        }
                        storedIndividualGenotypes.add(gtCode);
                    }

            zos.write((chromAndPos.get(0) + "\t" + StringUtils.join(variantDataOrigin, ";") /*source*/ + "\t"
                    + typeToOntology.get(variant.getType()) + "\t" + Long.parseLong(chromAndPos.get(1)) + "\t"
                    + Long.parseLong(chromAndPos.get(1)) + "\t" + "." + "\t" + "+" + "\t" + "." + "\t")
                            .getBytes());
            Comparable syn = markerSynonyms == null ? null : markerSynonyms.get(variant.getId());
            zos.write(("ID=" + variant.getId() + ";" + (syn != null ? "Name=" + syn + ";" : "") + "alleles="
                    + StringUtils.join(variant.getKnownAlleleList(), "/") + ";" + "refallele="
                    + variant.getKnownAlleleList().get(0) + ";").getBytes());

            for (int j = 0; j < individualList
                    .size(); j++ /* we use this list because it has the proper ordering*/) {

                NumberFormat nf = NumberFormat.getInstance(Locale.US);
                nf.setMaximumFractionDigits(4);
                HashMap<String, Integer> compt1 = new HashMap<String, Integer>();
                int highestGenotypeCount = 0;
                int sum = 0;

                String individualId = individualList.get(j);
                List<String> genotypes = individualGenotypes.get(individualId);
                HashMap<Object, Integer> genotypeCounts = new HashMap<Object, Integer>(); // will help us to keep track of missing genotypes

                String mostFrequentGenotype = null;
                if (genotypes != null)
                    for (String genotype : genotypes) {
                        if (genotype.length() == 0)
                            continue; /* skip missing genotypes */

                        int count = 0;
                        for (String t : variant.getAllelesFromGenotypeCode(genotype)) {
                            for (String t1 : variant.getKnownAlleleList()) {
                                if (t.equals(t1) && !(compt1.containsKey(t1))) {
                                    count++;
                                    compt1.put(t1, count);
                                } else if (t.equals(t1) && compt1.containsKey(t1)) {
                                    if (compt1.get(t1) != 0) {
                                        count++;
                                        compt1.put(t1, count);
                                    } else
                                        compt1.put(t1, count);
                                } else if (!(compt1.containsKey(t1))) {
                                    compt1.put(t1, 0);
                                }
                            }
                        }
                        for (int countValue : compt1.values()) {
                            sum += countValue;
                        }

                        int gtCount = 1 + MgdbDao.getCountForKey(genotypeCounts, genotype);
                        if (gtCount > highestGenotypeCount) {
                            highestGenotypeCount = gtCount;
                            mostFrequentGenotype = genotype;
                        }
                        genotypeCounts.put(genotype, gtCount);
                    }

                List<String> alleles = mostFrequentGenotype == null ? new ArrayList<String>()
                        : variant.getAllelesFromGenotypeCode(mostFrequentGenotype);

                if (alleles.size() != 0) {
                    zos.write(("acounts=" + individualId + ":").getBytes());

                    for (String knowAllelesCompt : compt1.keySet()) {
                        zos.write(
                                (knowAllelesCompt + " " + nf.format(compt1.get(knowAllelesCompt) / (float) sum)
                                        + " " + compt1.get(knowAllelesCompt) + " ").getBytes());
                    }
                    zos.write((alleles.size() + ";").getBytes());
                }
                if (genotypeCounts.size() > 1) {
                    Comparable sVariantId = markerSynonyms != null ? markerSynonyms.get(variant.getId())
                            : variant.getId();
                    warningFileWriter.write("- Dissimilar genotypes found for variant "
                            + (sVariantId == null ? variant.getId() : sVariantId) + ", individual "
                            + individualId + ". Exporting most frequent: " + StringUtils.join(alleles, ",")
                            + "\n");
                }
            }
            zos.write((LINE_SEPARATOR).getBytes());
        }

        if (progress.hasAborted())
            return;

        nLoadedMarkerCount += nLoadedMarkerCountInLoop;
        nProgress = (short) (nLoadedMarkerCount * 100 / markerCount);
        if (nProgress > nPreviousProgress) {
            //            if (nProgress%5 == 0)
            //               LOG.info("========================= exportData: " + nProgress + "% =========================" + (System.currentTimeMillis() - before)/1000 + "s");
            progress.setCurrentStepProgress(nProgress);
            nPreviousProgress = nProgress;
        }
    }

    warningFileWriter.close();
    if (warningFile.length() > 0) {
        zos.putNextEntry(new ZipEntry(exportName + "-REMARKS.txt"));
        int nWarningCount = 0;
        BufferedReader in = new BufferedReader(new FileReader(warningFile));
        String sLine;
        while ((sLine = in.readLine()) != null) {
            zos.write((sLine + "\n").getBytes());
            in.readLine();
            nWarningCount++;
        }
        LOG.info("Number of Warnings for export (" + exportName + "): " + nWarningCount);
        in.close();
    }
    warningFile.delete();

    zos.close();
    progress.setCurrentStepProgress((short) 100);
}

From source file:fr.cirad.mgdb.exporting.markeroriented.VcfExportHandler.java

@Override
public void exportData(OutputStream outputStream, String sModule, List<SampleId> sampleIDs,
        ProgressIndicator progress, DBCursor markerCursor, Map<Comparable, Comparable> markerSynonyms,
        int nMinimumGenotypeQuality, int nMinimumReadDepth, Map<String, InputStream> readyToExportFiles)
        throws Exception {
    Integer projectId = null;/*from  ww w  . j  a v  a  2  s. co m*/
    for (SampleId spId : sampleIDs) {
        if (projectId == null)
            projectId = spId.getProject();
        else if (projectId != spId.getProject()) {
            projectId = 0;
            break; // more than one project are involved: no header will be written
        }
    }

    File warningFile = File.createTempFile("export_warnings_", "");
    FileWriter warningFileWriter = new FileWriter(warningFile);

    MongoTemplate mongoTemplate = MongoTemplateManager.get(sModule);
    int markerCount = markerCursor.count();
    ZipOutputStream zos = new ZipOutputStream(outputStream);

    if (readyToExportFiles != null)
        for (String readyToExportFile : readyToExportFiles.keySet()) {
            zos.putNextEntry(new ZipEntry(readyToExportFile));
            InputStream inputStream = readyToExportFiles.get(readyToExportFile);
            byte[] dataBlock = new byte[1024];
            int count = inputStream.read(dataBlock, 0, 1024);
            while (count != -1) {
                zos.write(dataBlock, 0, count);
                count = inputStream.read(dataBlock, 0, 1024);
            }
        }

    LinkedHashMap<SampleId, String> sampleIDToIndividualIdMap = new LinkedHashMap<SampleId, String>();
    ArrayList<String> individualList = new ArrayList<String>();
    List<Individual> individuals = getIndividualsFromSamples(sModule, sampleIDs);
    for (int i = 0; i < sampleIDs.size(); i++) {
        String individualId = individuals.get(i).getId();
        sampleIDToIndividualIdMap.put(sampleIDs.get(i), individualId);
        if (!individualList.contains(individualId)) {
            individualList.add(individualId);
        }
    }

    String exportName = sModule + "_" + markerCount + "variants_" + individualList.size() + "individuals";
    zos.putNextEntry(new ZipEntry(exportName + ".vcf"));

    int avgObjSize = (Integer) mongoTemplate
            .getCollection(mongoTemplate.getCollectionName(VariantRunData.class)).getStats().get("avgObjSize");
    int nQueryChunkSize = nMaxChunkSizeInMb * 1024 * 1024 / avgObjSize;

    VariantContextWriter writer = null;
    try {
        List<String> distinctSequenceNames = new ArrayList<String>();

        String sequenceSeqCollName = MongoTemplateManager.getMongoCollectionName(Sequence.class);
        if (mongoTemplate.collectionExists(sequenceSeqCollName)) {
            DBCursor markerCursorCopy = markerCursor.copy();
            markerCursorCopy.batchSize(nQueryChunkSize);
            while (markerCursorCopy.hasNext()) {
                int nLoadedMarkerCountInLoop = 0;
                boolean fStartingNewChunk = true;
                while (markerCursorCopy.hasNext()
                        && (fStartingNewChunk || nLoadedMarkerCountInLoop % nQueryChunkSize != 0)) {
                    DBObject exportVariant = markerCursorCopy.next();
                    String chr = (String) ((DBObject) exportVariant
                            .get(VariantData.FIELDNAME_REFERENCE_POSITION))
                                    .get(ReferencePosition.FIELDNAME_SEQUENCE);
                    if (!distinctSequenceNames.contains(chr))
                        distinctSequenceNames.add(chr);
                }
            }
            markerCursorCopy.close();
        }

        Collections.sort(distinctSequenceNames, new AlphaNumericStringComparator());
        SAMSequenceDictionary dict = createSAMSequenceDictionary(sModule, distinctSequenceNames);
        writer = new CustomVCFWriter(null, zos, dict, false, false, true);
        //         VariantContextWriterBuilder vcwb = new VariantContextWriterBuilder();
        //         vcwb.unsetOption(Options.INDEX_ON_THE_FLY);
        //         vcwb.unsetOption(Options.DO_NOT_WRITE_GENOTYPES);
        //         vcwb.setOption(Options.USE_ASYNC_IOINDEX_ON_THE_FLY);
        //         vcwb.setOption(Options.ALLOW_MISSING_FIELDS_IN_HEADER);
        //         vcwb.setReferenceDictionary(dict);
        //         writer = vcwb.build();
        //         writer = new AsyncVariantContextWriter(writer, 3000);

        progress.moveToNextStep(); // done with dictionary
        DBCursor headerCursor = mongoTemplate
                .getCollection(MongoTemplateManager.getMongoCollectionName(DBVCFHeader.class))
                .find(new BasicDBObject("_id." + VcfHeaderId.FIELDNAME_PROJECT, projectId));
        Set<VCFHeaderLine> headerLines = new HashSet<VCFHeaderLine>();
        boolean fWriteCommandLine = true, fWriteEngineHeaders = true; // default values

        while (headerCursor.hasNext()) {
            DBVCFHeader dbVcfHeader = DBVCFHeader.fromDBObject(headerCursor.next());
            headerLines.addAll(dbVcfHeader.getHeaderLines());

            // Add sequence header lines (not stored in our vcf header collection)
            BasicDBObject projection = new BasicDBObject(SequenceStats.FIELDNAME_SEQUENCE_LENGTH, true);
            int nSequenceIndex = 0;
            for (String sequenceName : distinctSequenceNames) {
                String sequenceInfoCollName = MongoTemplateManager.getMongoCollectionName(SequenceStats.class);
                boolean fCollectionExists = mongoTemplate.collectionExists(sequenceInfoCollName);
                if (fCollectionExists) {
                    DBObject record = mongoTemplate.getCollection(sequenceInfoCollName).findOne(
                            new Query(Criteria.where("_id").is(sequenceName)).getQueryObject(), projection);
                    if (record == null) {
                        LOG.warn("Sequence '" + sequenceName + "' not found in collection "
                                + sequenceInfoCollName);
                        continue;
                    }

                    Map<String, String> sequenceLineData = new LinkedHashMap<String, String>();
                    sequenceLineData.put("ID", (String) record.get("_id"));
                    sequenceLineData.put("length",
                            ((Number) record.get(SequenceStats.FIELDNAME_SEQUENCE_LENGTH)).toString());
                    headerLines.add(new VCFContigHeaderLine(sequenceLineData, nSequenceIndex++));
                }
            }
            fWriteCommandLine = headerCursor.size() == 1 && dbVcfHeader.getWriteCommandLine(); // wouldn't make sense to include command lines for several runs
            if (!dbVcfHeader.getWriteEngineHeaders())
                fWriteEngineHeaders = false;
        }
        headerCursor.close();

        VCFHeader header = new VCFHeader(headerLines, individualList);
        header.setWriteCommandLine(fWriteCommandLine);
        header.setWriteEngineHeaders(fWriteEngineHeaders);
        writer.writeHeader(header);

        short nProgress = 0, nPreviousProgress = 0;
        long nLoadedMarkerCount = 0;
        HashMap<SampleId, Comparable /*phID*/> phasingIDsBySample = new HashMap<SampleId, Comparable>();

        while (markerCursor.hasNext()) {
            if (progress.hasAborted())
                return;

            int nLoadedMarkerCountInLoop = 0;
            boolean fStartingNewChunk = true;
            markerCursor.batchSize(nQueryChunkSize);
            List<Comparable> currentMarkers = new ArrayList<Comparable>();
            while (markerCursor.hasNext()
                    && (fStartingNewChunk || nLoadedMarkerCountInLoop % nQueryChunkSize != 0)) {
                DBObject exportVariant = markerCursor.next();
                currentMarkers.add((Comparable) exportVariant.get("_id"));
                nLoadedMarkerCountInLoop++;
                fStartingNewChunk = false;
            }

            LinkedHashMap<VariantData, Collection<VariantRunData>> variantsAndRuns = MgdbDao.getSampleGenotypes(
                    mongoTemplate, sampleIDs, currentMarkers, true,
                    null /*new Sort(VariantData.FIELDNAME_REFERENCE_POSITION + "." + ChromosomalPosition.FIELDNAME_SEQUENCE).and(new Sort(VariantData.FIELDNAME_REFERENCE_POSITION + "." + ChromosomalPosition.FIELDNAME_START_SITE))*/); // query mongo db for matching genotypes
            for (VariantData variant : variantsAndRuns.keySet()) {
                VariantContext vc = variant.toVariantContext(variantsAndRuns.get(variant),
                        !ObjectId.isValid(variant.getId().toString()), sampleIDToIndividualIdMap,
                        phasingIDsBySample, nMinimumGenotypeQuality, nMinimumReadDepth, warningFileWriter,
                        markerSynonyms == null ? variant.getId() : markerSynonyms.get(variant.getId()));
                try {
                    writer.add(vc);
                } catch (Throwable t) {
                    Exception e = new Exception("Unable to convert to VariantContext: " + variant.getId(), t);
                    LOG.debug("error", e);
                    throw e;
                }

                if (nLoadedMarkerCountInLoop > currentMarkers.size())
                    LOG.error("Bug: writing variant number " + nLoadedMarkerCountInLoop + " (only "
                            + currentMarkers.size() + " variants expected)");
            }

            nLoadedMarkerCount += nLoadedMarkerCountInLoop;
            nProgress = (short) (nLoadedMarkerCount * 100 / markerCount);
            if (nProgress > nPreviousProgress) {
                progress.setCurrentStepProgress(nProgress);
                nPreviousProgress = nProgress;
            }
        }
        progress.setCurrentStepProgress((short) 100);

    } catch (Exception e) {
        LOG.error("Error exporting", e);
        progress.setError(e.getMessage());
        return;
    } finally {
        warningFileWriter.close();
        if (warningFile.length() > 0) {
            zos.putNextEntry(new ZipEntry(exportName + "-REMARKS.txt"));
            int nWarningCount = 0;
            BufferedReader in = new BufferedReader(new FileReader(warningFile));
            String sLine;
            while ((sLine = in.readLine()) != null) {
                zos.write((sLine + "\n").getBytes());
                nWarningCount++;
            }
            LOG.info("Number of Warnings for export (" + exportName + "): " + nWarningCount);
            in.close();
        }
        warningFile.delete();
        if (writer != null)
            try {
                writer.close();
            } catch (Throwable ignored) {
            }
    }
}

From source file:ubic.gemma.datastructure.matrix.ExpressionDataMatrixColumnSort.java

/**
 * Divide the biomaterials up into chunks based on the experimental factor given, keeping everybody in order.
 * //w  w  w  . j  av a2  s.  c o  m
 * @param ef
 * @param bms
 * @return ordered map of fv->bm where fv is of ef, or null if it couldn't be done properly.
 */
private static LinkedHashMap<FactorValueValueObject, List<BioMaterialValueObject>> chunkOnFactorVO(
        ExperimentalFactor ef, List<BioMaterialValueObject> bms) {

    if (bms == null) {
        return null;
    }

    LinkedHashMap<FactorValueValueObject, List<BioMaterialValueObject>> chunks = new LinkedHashMap<FactorValueValueObject, List<BioMaterialValueObject>>();

    /*
     * Get the factor values in the order we have things right now
     */
    Collection<Long> factorValueIds = EntityUtils.getIds(ef.getFactorValues());
    for (BioMaterialValueObject bm : bms) {
        for (FactorValueValueObject fv : bm.getFactorValueObjects()) {
            if (!factorValueIds.contains(fv.getId())) {
                continue;
            }
            if (chunks.keySet().contains(fv)) {
                continue;
            }
            chunks.put(fv, new ArrayList<BioMaterialValueObject>());
        }
    }

    /*
     * What if bm doesn't have a value for the factorvalue. Need a dummy value.
     */
    FactorValueValueObject dummy = new FactorValueValueObject();
    dummy.setFactorId(ef.getId());
    dummy.setValue("");
    dummy.setId(-1L);
    chunks.put(dummy, new ArrayList<BioMaterialValueObject>());

    for (BioMaterialValueObject bm : bms) {
        boolean found = false;
        for (FactorValueValueObject fv : bm.getFactorValueObjects()) {
            if (factorValueIds.contains(fv.getId())) {
                found = true;
                assert chunks.containsKey(fv);
                chunks.get(fv).add(bm);
            }
        }

        if (!found) {
            if (log.isDebugEnabled())
                log.debug(bm + " has no value for factor=" + ef + "; using dummy value");
            chunks.get(dummy).add(bm);
        }

    }

    if (chunks.get(dummy).size() == 0) {
        if (log.isDebugEnabled())
            log.debug("removing dummy");
        chunks.remove(dummy);
    }

    log.debug(chunks.size() + " chunks for " + ef + ", from current chunk of size " + bms.size());

    /*
     * Sanity check
     */
    int total = 0;
    for (FactorValueValueObject fv : chunks.keySet()) {
        List<BioMaterialValueObject> chunk = chunks.get(fv);
        total += chunk.size();
    }

    assert total == bms.size() : "expected " + bms.size() + ", got " + total;

    return chunks;
}