Example usage for java.util LinkedHashMap keySet

List of usage examples for java.util LinkedHashMap keySet

Introduction

In this page you can find the example usage for java.util LinkedHashMap keySet.

Prototype

public Set<K> keySet() 

Source Link

Document

Returns a Set view of the keys contained in this map.

Usage

From source file:org.i4change.app.installation.ImportInitvalues.java

public void loadInitLanguages(String filePath) throws Exception {

    this.loadCountriesFiles(filePath);

    // String listLanguages[] = {"deutsch", "english", "french", "spanish"};

    LinkedHashMap<Integer, String> listlanguages = this.getLanguageFiles(filePath);

    // TODO empty tables before launch
    // Languagemanagement.getInstance().emptyFieldLanguage();

    boolean langFieldIdIsInited = false;

    /** Read all languages files */
    for (Iterator<Integer> itLang = listlanguages.keySet().iterator(); itLang.hasNext();) {
        Integer langId = itLang.next();
        String lang = listlanguages.get(langId);
        log.debug("loadInitLanguages lang: " + lang);

        Long languages_id = this.languagemanagement.addLanguage(lang);

        SAXReader reader = new SAXReader();
        Document document = reader.read(filePath + lang + ".xml");

        Element root = document.getRootElement();

        for (Iterator it = root.elementIterator("string"); it.hasNext();) {
            Element item = (Element) it.next();
            // log.error(item.getName());

            Long id = Long.valueOf(item.attributeValue("id")).longValue();
            String name = item.attributeValue("name");
            String value = "";

            for (Iterator t2 = item.elementIterator("value"); t2.hasNext();) {
                Element val = (Element) t2.next();
                value = val.getText();
            }//w w  w  . j  a v  a 2 s  . c  om

            // log.error("result: "+langFieldIdIsInited+" "+id+" "+name+" "+value);

            // if (this.checkForLangIds(id)){
            // Only do that for the first field-set
            if (!langFieldIdIsInited) {
                this.fieldmanagment.addFieldByLabelNumber(name, id);
            }

            this.fieldmanagment.addFieldValueByLabeldNumberAndLanguage(id, languages_id, value);
            // }
        }
        log.debug("Lang ADDED: " + lang);
        if (!langFieldIdIsInited)
            langFieldIdIsInited = true;
    }

    // Add help items

    SAXReader readerHelp = new SAXReader();
    Document documentHelp = readerHelp.read(filePath + "helpTopics.xml");
    Element roothelp = documentHelp.getRootElement();

    for (Iterator it = roothelp.elementIterator("item"); it.hasNext();) {
        Element item = (Element) it.next();

        Integer priority = Integer.valueOf(item.attributeValue("priority")).intValue();
        Long labelId = Long.valueOf(item.attributeValue("labelid")).longValue();
        Long topicLabelId = Long.valueOf(item.attributeValue("topiclabelid")).longValue();
        Long helpId = Long.valueOf(item.attributeValue("helpid")).longValue();

        String helpName = item.attributeValue("helpname");
        String isAgentHelp = item.attributeValue("isAgentHelp");

        boolean isAgentHelpBool = false;
        if (isAgentHelp.equals("true")) {
            isAgentHelpBool = true;
        }

        this.helpTopicServiceDaoImpl.addHelpTopic(labelId, topicLabelId, helpId, helpName, isAgentHelpBool,
                priority);

    }
}

From source file:org.kuali.rice.kew.docsearch.DocumentSearchCustomizationMediatorImpl.java

@Override
public List<RemotableAttributeError> validateLookupFieldParameters(DocumentType documentType,
        DocumentSearchCriteria documentSearchCriteria) {

    List<DocumentTypeAttributeBo> searchableAttributes = documentType.getSearchableAttributes();
    LinkedHashMap<String, List<String>> applicationIdToAttributeNameMap = new LinkedHashMap<String, List<String>>();

    for (DocumentTypeAttributeBo searchableAttribute : searchableAttributes) {
        RuleAttribute ruleAttribute = searchableAttribute.getRuleAttribute();
        String attributeName = ruleAttribute.getName();
        String applicationId = ruleAttribute.getApplicationId();
        if (!applicationIdToAttributeNameMap.containsKey(applicationId)) {
            applicationIdToAttributeNameMap.put(applicationId, new ArrayList<String>());
        }/*w ww .j  a  v  a2 s  . c  o m*/
        applicationIdToAttributeNameMap.get(applicationId).add(attributeName);
    }

    List<RemotableAttributeError> errors = new ArrayList<RemotableAttributeError>();
    for (String applicationId : applicationIdToAttributeNameMap.keySet()) {
        DocumentSearchCustomizationHandlerService documentSearchCustomizationService = loadCustomizationService(
                applicationId);
        List<String> searchableAttributeNames = applicationIdToAttributeNameMap.get(applicationId);
        List<RemotableAttributeError> searchErrors = documentSearchCustomizationService
                .validateCriteria(documentSearchCriteria, searchableAttributeNames);
        if (!CollectionUtils.isEmpty(searchErrors)) {
            errors.addAll(searchErrors);
        }
    }

    return errors;
}

From source file:org.envirocar.wps.util.EnviroCarFeatureParser.java

/**
 * parses envirocar track encoded as JSON into Geotools simple features; a feature is created for each measurement point
 * of track//from w ww.ja v a2 s  . c  om
 * 
 * @param url
 *          URL of track (e.g. https://envirocar.org/api/stable/tracks/53433169e4b09d7b34fa824a)
 * @return 
 *          Geotools simple features; a feature is created for each measurement point of track
 * @throws IOException
 *          if opening of URL stream fails
 */
public SimpleFeatureCollection createFeaturesFromJSON(URL url) throws IOException {

    InputStream in = url.openStream();

    ObjectMapper objMapper = new ObjectMapper();

    Map<?, ?> map = objMapper.readValue(in, Map.class);

    ArrayList<?> features = null;

    for (Object o : map.keySet()) {
        Object entry = map.get(o);

        if (o.equals("features")) {
            features = (ArrayList<?>) entry;
        }
    }

    GeometryFactory geomFactory = new GeometryFactory();

    List<SimpleFeature> simpleFeatureList = new ArrayList<SimpleFeature>();

    String uuid = UUID.randomUUID().toString().substring(0, 5);

    String namespace = "http://www.52north.org/" + uuid;

    SimpleFeatureType sft = null;

    SimpleFeatureBuilder sfb = null;

    typeBuilder = new SimpleFeatureTypeBuilder();
    try {
        typeBuilder.setCRS(CRS.decode("EPSG:4326"));
    } catch (NoSuchAuthorityCodeException e) {
        LOGGER.error("Could not decode EPSG:4326", e);
    } catch (FactoryException e) {
        LOGGER.error("Could not decode EPSG:4326", e);
    }

    typeBuilder.setNamespaceURI(namespace);
    Name nameType = new NameImpl(namespace, "Feature-" + uuid);
    typeBuilder.setName(nameType);

    typeBuilder.add(FeatureProperties.GEOMETRY, Point.class);
    typeBuilder.add(FeatureProperties.ID, String.class);
    typeBuilder.add(FeatureProperties.TIME, String.class);

    Set<String> distinctPhenomenonNames = gatherPropertiesForFeatureTypeBuilder(features);

    for (Object object : features) {

        if (object instanceof LinkedHashMap<?, ?>) {
            LinkedHashMap<?, ?> featureMap = (LinkedHashMap<?, ?>) object;

            Object geometryObject = featureMap.get("geometry");

            Point point = null;

            if (geometryObject instanceof LinkedHashMap<?, ?>) {
                LinkedHashMap<?, ?> geometryMap = (LinkedHashMap<?, ?>) geometryObject;

                Object coordinatesObject = geometryMap.get("coordinates");

                if (coordinatesObject instanceof ArrayList<?>) {
                    ArrayList<?> coordinatesList = (ArrayList<?>) coordinatesObject;

                    Object xObj = coordinatesList.get(0);
                    Object yObj = coordinatesList.get(1);

                    point = geomFactory.createPoint(new Coordinate(Double.parseDouble(xObj.toString()),
                            Double.parseDouble(yObj.toString())));

                }
            }

            Object propertiesObject = featureMap.get("properties");

            if (propertiesObject instanceof LinkedHashMap<?, ?>) {
                LinkedHashMap<?, ?> propertiesMap = (LinkedHashMap<?, ?>) propertiesObject;

                /*
                 * get id and time
                 */

                String id = propertiesMap.get("id").toString();
                String time = propertiesMap.get("time").toString();

                Object phenomenonsObject = propertiesMap.get("phenomenons");

                if (phenomenonsObject instanceof LinkedHashMap<?, ?>) {
                    LinkedHashMap<?, ?> phenomenonsMap = (LinkedHashMap<?, ?>) phenomenonsObject;
                    /*
                     * properties are id, time and phenomenons
                     */
                    if (sft == null) {
                        sft = buildFeatureType(distinctPhenomenonNames);
                        sfb = new SimpleFeatureBuilder(sft);
                    }
                    sfb.set(FeatureProperties.ID, id);
                    sfb.set(FeatureProperties.TIME, time);
                    sfb.set(FeatureProperties.GEOMETRY, point);

                    for (Object phenomenonKey : phenomenonsMap.keySet()) {

                        Object phenomenonValue = phenomenonsMap.get(phenomenonKey);

                        if (phenomenonValue instanceof LinkedHashMap<?, ?>) {
                            LinkedHashMap<?, ?> phenomenonValueMap = (LinkedHashMap<?, ?>) phenomenonValue;

                            String value = phenomenonValueMap.get("value").toString();
                            String unit = phenomenonValueMap.get("unit").toString();

                            /*
                             * create property name
                             */
                            String propertyName = phenomenonKey.toString() + " (" + unit + ")";
                            if (sfb != null) {
                                sfb.set(propertyName, value);
                            }

                        }

                    }
                    if (sfb != null) {
                        simpleFeatureList.add(sfb.buildFeature(id));
                    }
                }
            }

        }
    }

    return new ListFeatureCollection(sft, simpleFeatureList);
}

From source file:ubic.gemma.visualization.ExperimentalDesignVisualizationServiceImpl.java

/**
 * Test method for now, shows how this can be used.
 * //from w w  w.ja  v  a 2s.c o m
 * @param e
 */
protected void plotExperimentalDesign(ExpressionExperiment e) {
    LinkedHashMap<BioAssayValueObject, LinkedHashMap<ExperimentalFactor, Double>> layout = getExperimentalDesignLayout(
            e);

    List<String> efStrings = new ArrayList<String>();
    List<String> baStrings = new ArrayList<String>();
    List<double[]> rows = new ArrayList<double[]>();
    boolean first = true;
    int i = 0;
    for (BioAssayValueObject ba : layout.keySet()) {
        baStrings.add(ba.getName());

        int j = 0;
        for (ExperimentalFactor ef : layout.get(ba).keySet()) {
            if (first) {
                double[] nextRow = new double[layout.size()];
                rows.add(nextRow);
                efStrings.add(ef.getName() + " ( id=" + ef.getId() + ")"); // make sure they are unique.
            }
            double d = layout.get(ba).get(ef);

            rows.get(j)[i] = d;
            j++;
        }
        i++;
        first = false;
    }

    double[][] mat = rows.toArray(new double[][] {});

    DoubleMatrix<String, String> data = DoubleMatrixFactory.dense(mat);
    data.setRowNames(efStrings);
    data.setColumnNames(baStrings);

    ColorMatrix<String, String> cm = new ColorMatrix<String, String>(data, ColorMap.GREENRED_COLORMAP,
            Color.GRAY);

    try {
        writeImage(cm, File.createTempFile(e.getShortName() + "_", ".png"));
    } catch (IOException e1) {
        throw new RuntimeException(e1);
    }
}

From source file:fr.cirad.mgdb.exporting.markeroriented.EigenstratExportHandler.java

@Override
public void exportData(OutputStream outputStream, String sModule, List<SampleId> sampleIDs,
        ProgressIndicator progress, DBCursor markerCursor, Map<Comparable, Comparable> markerSynonyms,
        int nMinimumGenotypeQuality, int nMinimumReadDepth, Map<String, InputStream> readyToExportFiles)
        throws Exception {
    // long before = System.currentTimeMillis();

    File warningFile = File.createTempFile("export_warnings_", "");
    FileWriter warningFileWriter = new FileWriter(warningFile);
    File snpFile = null;//from   www .  j a v a  2  s. c o m

    try {
        snpFile = File.createTempFile("snpFile", "");
        FileWriter snpFileWriter = new FileWriter(snpFile);

        ZipOutputStream zos = new ZipOutputStream(outputStream);
        if (ByteArrayOutputStream.class.isAssignableFrom(outputStream.getClass()))
            zos.setLevel(ZipOutputStream.STORED);

        if (readyToExportFiles != null)
            for (String readyToExportFile : readyToExportFiles.keySet()) {
                zos.putNextEntry(new ZipEntry(readyToExportFile));
                InputStream inputStream = readyToExportFiles.get(readyToExportFile);
                byte[] dataBlock = new byte[1024];
                int count = inputStream.read(dataBlock, 0, 1024);
                while (count != -1) {
                    zos.write(dataBlock, 0, count);
                    count = inputStream.read(dataBlock, 0, 1024);
                }
            }

        MongoTemplate mongoTemplate = MongoTemplateManager.get(sModule);
        int markerCount = markerCursor.count();

        List<Individual> individuals = getIndividualsFromSamples(sModule, sampleIDs);

        ArrayList<String> individualList = new ArrayList<String>();
        StringBuffer indFileContents = new StringBuffer();

        for (int i = 0; i < sampleIDs.size(); i++) {
            Individual individual = individuals.get(i);
            if (!individualList.contains(individual.getId())) {
                individualList.add(individual.getId());
                indFileContents
                        .append(individual.getId() + "\t" + getIndividualGenderCode(sModule, individual.getId())
                                + "\t" + (individual.getPopulation() == null ? "." : individual.getPopulation())
                                + LINE_SEPARATOR);
            }
        }

        String exportName = sModule + "_" + markerCount + "variants_" + individualList.size() + "individuals";
        zos.putNextEntry(new ZipEntry(exportName + ".ind"));
        zos.write(indFileContents.toString().getBytes());

        zos.putNextEntry(new ZipEntry(exportName + ".eigenstratgeno"));

        int avgObjSize = (Integer) mongoTemplate
                .getCollection(mongoTemplate.getCollectionName(VariantRunData.class)).getStats()
                .get("avgObjSize");
        int nChunkSize = nMaxChunkSizeInMb * 1024 * 1024 / avgObjSize;
        short nProgress = 0, nPreviousProgress = 0;
        long nLoadedMarkerCount = 0;

        while (markerCursor.hasNext()) {
            int nLoadedMarkerCountInLoop = 0;
            Map<Comparable, String> markerChromosomalPositions = new LinkedHashMap<Comparable, String>();
            boolean fStartingNewChunk = true;
            markerCursor.batchSize(nChunkSize);
            while (markerCursor.hasNext()
                    && (fStartingNewChunk || nLoadedMarkerCountInLoop % nChunkSize != 0)) {
                DBObject exportVariant = markerCursor.next();
                DBObject refPos = (DBObject) exportVariant.get(VariantData.FIELDNAME_REFERENCE_POSITION);
                markerChromosomalPositions.put((Comparable) exportVariant.get("_id"),
                        refPos.get(ReferencePosition.FIELDNAME_SEQUENCE) + ":"
                                + refPos.get(ReferencePosition.FIELDNAME_START_SITE));
                nLoadedMarkerCountInLoop++;
                fStartingNewChunk = false;
            }

            List<Comparable> currentMarkers = new ArrayList<Comparable>(markerChromosomalPositions.keySet());
            LinkedHashMap<VariantData, Collection<VariantRunData>> variantsAndRuns = MgdbDao.getSampleGenotypes(
                    mongoTemplate, sampleIDs, currentMarkers, true,
                    null /*new Sort(VariantData.FIELDNAME_REFERENCE_POSITION + "." + ChromosomalPosition.FIELDNAME_SEQUENCE).and(new Sort(VariantData.FIELDNAME_REFERENCE_POSITION + "." + ChromosomalPosition.FIELDNAME_START_SITE))*/); // query mongo db for matching genotypes
            for (VariantData variant : variantsAndRuns.keySet()) // read data and write results into temporary files (one per sample)
            {
                Comparable variantId = variant.getId();

                List<String> chromAndPos = Helper.split(markerChromosomalPositions.get(variantId), ":");
                if (chromAndPos.size() == 0)
                    LOG.warn("Chromosomal position not found for marker " + variantId);
                // LOG.debug(marker + "\t" + (chromAndPos.length == 0 ? "0" : chromAndPos[0]) + "\t" + 0 + "\t" + (chromAndPos.length == 0 ? 0l : Long.parseLong(chromAndPos[1])) + LINE_SEPARATOR);
                if (markerSynonyms != null) {
                    Comparable syn = markerSynonyms.get(variantId);
                    if (syn != null)
                        variantId = syn;
                }
                snpFileWriter.write(variantId + "\t" + (chromAndPos.size() == 0 ? "0" : chromAndPos.get(0))
                        + "\t" + 0 + "\t" + (chromAndPos.size() == 0 ? 0l : Long.parseLong(chromAndPos.get(1)))
                        + LINE_SEPARATOR);

                Map<String, List<String>> individualGenotypes = new LinkedHashMap<String, List<String>>();
                Collection<VariantRunData> runs = variantsAndRuns.get(variant);
                if (runs != null)
                    for (VariantRunData run : runs)
                        for (Integer sampleIndex : run.getSampleGenotypes().keySet()) {
                            SampleGenotype sampleGenotype = run.getSampleGenotypes().get(sampleIndex);
                            String individualId = individuals
                                    .get(sampleIDs
                                            .indexOf(new SampleId(run.getId().getProjectId(), sampleIndex)))
                                    .getId();

                            Integer gq = null;
                            try {
                                gq = (Integer) sampleGenotype.getAdditionalInfo().get(VariantData.GT_FIELD_GQ);
                            } catch (Exception ignored) {
                            }
                            if (gq != null && gq < nMinimumGenotypeQuality)
                                continue;

                            Integer dp = null;
                            try {
                                dp = (Integer) sampleGenotype.getAdditionalInfo().get(VariantData.GT_FIELD_DP);
                            } catch (Exception ignored) {
                            }
                            if (dp != null && dp < nMinimumReadDepth)
                                continue;

                            String gtCode = sampleGenotype.getCode();
                            List<String> storedIndividualGenotypes = individualGenotypes.get(individualId);
                            if (storedIndividualGenotypes == null) {
                                storedIndividualGenotypes = new ArrayList<String>();
                                individualGenotypes.put(individualId, storedIndividualGenotypes);
                            }
                            storedIndividualGenotypes.add(gtCode);
                        }

                for (int j = 0; j < individualList
                        .size(); j++ /* we use this list because it has the proper ordering*/) {
                    String individualId = individualList.get(j);
                    List<String> genotypes = individualGenotypes.get(individualId);
                    HashMap<Object, Integer> genotypeCounts = new HashMap<Object, Integer>(); // will help us to keep track of missing genotypes
                    int highestGenotypeCount = 0;
                    String mostFrequentGenotype = null;
                    if (genotypes != null)
                        for (String genotype : genotypes) {
                            if (genotype.length() == 0)
                                continue; /* skip missing genotypes */

                            int gtCount = 1 + MgdbDao.getCountForKey(genotypeCounts, genotype);
                            if (gtCount > highestGenotypeCount) {
                                highestGenotypeCount = gtCount;
                                mostFrequentGenotype = genotype;
                            }
                            genotypeCounts.put(genotype, gtCount);
                        }

                    List<String> alleles = mostFrequentGenotype == null ? new ArrayList<String>()
                            : variant.getAllelesFromGenotypeCode(mostFrequentGenotype);

                    int nOutputCode = 0;
                    if (mostFrequentGenotype == null)
                        nOutputCode = 9;
                    else
                        for (String all : Helper.split(mostFrequentGenotype, "/"))
                            if ("0".equals(all))
                                nOutputCode++;
                    if (j == 0 && variant.getKnownAlleleList().size() > 2)
                        warningFileWriter.write("- Variant " + variant.getId()
                                + " is multi-allelic. Make sure Eigenstrat genotype encoding specifications are suitable for you.\n");
                    zos.write(("" + nOutputCode).getBytes());

                    if (genotypeCounts.size() > 1 || alleles.size() > 2) {
                        if (genotypeCounts.size() > 1)
                            warningFileWriter.write("- Dissimilar genotypes found for variant "
                                    + (variantId == null ? variant.getId() : variantId) + ", individual "
                                    + individualId + ". Exporting most frequent: " + nOutputCode + "\n");
                        if (alleles.size() > 2)
                            warningFileWriter.write("- More than 2 alleles found for variant "
                                    + (variantId == null ? variant.getId() : variantId) + ", individual "
                                    + individualId + ". Exporting only the first 2 alleles.\n");
                    }
                }
                zos.write((LINE_SEPARATOR).getBytes());
            }

            if (progress.hasAborted())
                return;

            nLoadedMarkerCount += nLoadedMarkerCountInLoop;
            nProgress = (short) (nLoadedMarkerCount * 100 / markerCount);
            if (nProgress > nPreviousProgress) {
                // if (nProgress%5 == 0)
                //    LOG.info("============= exportData: " + nProgress + "% =============" + (System.currentTimeMillis() - before)/1000 + "s");
                progress.setCurrentStepProgress(nProgress);
                nPreviousProgress = nProgress;
            }
        }

        snpFileWriter.close();
        zos.putNextEntry(new ZipEntry(exportName + ".snp"));
        BufferedReader in = new BufferedReader(new FileReader(snpFile));
        String sLine;
        while ((sLine = in.readLine()) != null)
            zos.write((sLine + "\n").getBytes());
        in.close();

        warningFileWriter.close();
        if (warningFile.length() > 0) {
            zos.putNextEntry(new ZipEntry(exportName + "-REMARKS.txt"));
            int nWarningCount = 0;
            in = new BufferedReader(new FileReader(warningFile));
            while ((sLine = in.readLine()) != null) {
                zos.write((sLine + "\n").getBytes());
                nWarningCount++;
            }
            LOG.info("Number of Warnings for export (" + exportName + "): " + nWarningCount);
            in.close();
        }
        warningFile.delete();

        zos.close();
        progress.setCurrentStepProgress((short) 100);
    } finally {
        if (snpFile != null && snpFile.exists())
            snpFile.delete();
    }
}

From source file:de.helmholtz_muenchen.ibis.utils.abstractNodes.BinaryWrapperNode.BinaryWrapperNodeModel.java

/**
 * Returns the parameters which were set by the parameter file and the additional parameter field
 * and by the GUI. //from w  w w.  j  ava 2  s .c  o m
 * Override levels: GUI parameter -> additional parameter -> default parameter file
 * @param inData Input data tables
 * @return
 */
protected ArrayList<String> getSetParameters(final BufferedDataTable[] inData) {
    LinkedHashMap<String, String> pars = new LinkedHashMap<String, String>(); // merged parameter set
    //      LinkedHashMap<String, String> parsFile = getParametersFromParameterFile();         // get parameter from file
    //      LinkedHashMap<String, String> parsAdditional = getAdditionalParameter();         // get parameter from input field
    LinkedHashMap<String, String> parsGUI = getGUIParameters(inData); // get parameter from GUI

    // merge them all together
    //      pars.putAll(parsFile);
    //      pars.putAll(parsAdditional);
    pars.putAll(parsGUI);

    // build the command list
    ArrayList<String> commands = new ArrayList<String>();
    for (Iterator<String> it = pars.keySet().iterator(); it.hasNext();) {
        // add parameter name
        String key = it.next();

        if (key.length() > 0) {
            // add value, if some is set
            String value = pars.get(key);
            if (value.length() != 0)
                commands.add(key + " " + value);
            else
                commands.add(key);
        }
    }

    // return the commands
    return commands;
}

From source file:org.kutkaitis.timetable2.timetable.MonteCarlo.java

private void convertTeachersTimeTableToStudents() {
    List<LinkedHashMap> teachersAllDay = addDaysTimeTablesForIteration();
    studentsTimeTableForMonday = new LinkedHashMap();
    studentsTimeTableForTuesday = new LinkedHashMap();
    studentsTimeTableForWednesday = new LinkedHashMap();
    studentsTimeTableForThursday = new LinkedHashMap();
    studentsTimeTableForFriday = new LinkedHashMap();

    List<LinkedHashMap> allDaysStudentsTimeTables = addStudentsTimeTablesToTheList(studentsTimeTableForMonday,
            studentsTimeTableForTuesday, studentsTimeTableForWednesday, studentsTimeTableForThursday,
            studentsTimeTableForFriday);

    for (LinkedHashMap<String, LinkedHashMap> daysTimeTable : teachersAllDay) {
        // Very dirty hack because of rush
        int weekDayNumber = teachersAllDay.indexOf(daysTimeTable);
        Days weekDay = decideWeekDay(weekDayNumber);
        Collection<String> teacherNames = daysTimeTable.keySet();
        //            System.out.println("Day: " + weekDay);
        LinkedHashMap<Student, LinkedHashMap<String, String>> studentsTimeTableForDay = allDaysStudentsTimeTables
                .get(weekDayNumber);//from  ww w.  j a v a 2  s  .  c om
        //            System.out.println("studentsTimeTableForDay: " + studentsTimeTableForDay);

        LinkedHashMap<String, String> teachersTimeTableForTheDay = daysTimeTable
                .get(teacherNames.iterator().next());
        Collection<String> lectureNumbers = teachersTimeTableForTheDay.keySet();

        for (String teacherName : teacherNames) {
            //                System.out.println("teacherName: " + teacherName);

            for (String lectureNumber : lectureNumbers) {
                //                    System.out.println("lectureNumber: " + lectureNumber);
                String groupNameToSplit = teachersTimeTableForTheDay.get(lectureNumber);
                String[] splittedGroupNames = groupNameToSplit.split(":");
                String groupName = splittedGroupNames[1].trim();
                if (!StringUtils.equals(groupName, EMPTY_GROUP)) {
                    List<Student> groupsStudentsList = studentsMockDataFiller.getGroups().get(groupName)
                            .getStudents();

                    for (Student stud : groupsStudentsList) {
                        studentsTimeTableForDay.get(stud).put(lectureNumber, groupNameToSplit);
                    }

                    //                        System.out.println("studentsTimeTable for the day 2: " + studentsTimeTableForDay);
                    //                        System.out.println("group name: " + groupName);
                    //                        System.out.println("group students: " + groupsStudentsList);
                    //                    
                    //                        System.out.println(": " + studentsTimeTableForDay.get(groupsStudentsList));
                    //                        
                    //                    if (studentsTimeTableForDay.containsKey(groupsStudentsList)) {
                    //                        System.out.println("groupsStudentsList: " + groupsStudentsList);
                    ////                        System.out.println("StudentsTimeTableForTheDay: " + studentsTimeTableForDay);
                    //                        
                    ////                        studentsTimeTableForDay.get(groupsStudentsList).put(lectureNumber, groupNameToSplit);
                    //                    } else {
                    //                        System.out.println("Doesn't contain");
                    //                    }
                }

            }
        }
    }
    System.out.println("AllDaysTM: " + allDaysStudentsTimeTables);

}

From source file:org.esigate.extension.surrogate.Surrogate.java

/**
 * The current implementation of ESI cannot execute rules partially. For instance if ESI-Inline is requested, ESI,
 * ESI-Inline, X-ESI-Fragment are executed.
 * //from   w  ww.  j  a v  a 2 s.c  om
 * <p>
 * This method handles this specific case : if one requested capability enables the Esi extension in this instance,
 * all other capabilities are moved to this instance. This prevents broken behavior.
 * 
 * 
 * @see Esi
 * @see org.esigate.extension.Esi
 * 
 * @param targetCapabilities
 * @param currentSurrogate
 *            the current surrogate id.
 */
private void fixSurrogateMap(LinkedHashMap<String, List<String>> targetCapabilities, String currentSurrogate) {
    boolean esiEnabledInEsigate = false;

    // Check if Esigate will perform ESI.
    for (String c : Esi.CAPABILITIES) {
        if (targetCapabilities.get(currentSurrogate).contains(c)) {
            esiEnabledInEsigate = true;
            break;
        }
    }

    if (esiEnabledInEsigate) {
        // Ensure all Esi capabilities are executed by our instance.
        for (String c : Esi.CAPABILITIES) {
            for (String device : targetCapabilities.keySet()) {
                if (device.equals(currentSurrogate)) {
                    if (!targetCapabilities.get(device).contains(c)) {
                        targetCapabilities.get(device).add(c);
                    }
                } else {
                    targetCapabilities.get(device).remove(c);
                }

            }
        }

    }

}

From source file:org.openmeetings.app.data.user.Organisationmanagement.java

/**
 * TODO/* ww  w .  jav a 2  s .  c  o m*/
 * 
 * @param org
 * @param users
 * @return
 */
@SuppressWarnings({ "unused", "rawtypes" })
private Long updateOrganisationUsersByHashMap(Organisation org, LinkedHashMap users, long insertedby) {
    try {
        LinkedList<Long> usersToAdd = new LinkedList<Long>();
        LinkedList<Long> usersToDel = new LinkedList<Long>();

        List usersStored = this.getUsersByOrganisationId(org.getOrganisation_id());

        for (Iterator it = users.keySet().iterator(); it.hasNext();) {
            Integer key = (Integer) it.next();
            Long userIdToAdd = Long.valueOf(users.get(key).toString()).longValue();
            log.error("userIdToAdd: " + userIdToAdd);
            if (!this.checkUserAlreadyStored(userIdToAdd, usersStored))
                usersToAdd.add(userIdToAdd);
        }

        for (Iterator it = usersStored.iterator(); it.hasNext();) {
            Users us = (Users) it.next();
            Long userIdStored = us.getUser_id();
            log.error("userIdStored: " + userIdStored);
            if (!this.checkUserShouldBeStored(userIdStored, users))
                usersToDel.add(userIdStored);
        }

        log.debug("usersToAdd.size " + usersToAdd.size());
        log.debug("usersToDel.size " + usersToDel.size());

        for (Iterator<Long> it = usersToAdd.iterator(); it.hasNext();) {
            Long user_id = it.next();
            this.addUserToOrganisation(user_id, org.getOrganisation_id(), insertedby);
        }

        for (Iterator<Long> it = usersToDel.iterator(); it.hasNext();) {
            Long user_id = it.next();
            this.deleteUserFromOrganisation(new Long(3), user_id, org.getOrganisation_id());
        }

    } catch (Exception err) {
        log.error("updateOrganisationUsersByHashMap", err);
    }
    return null;
}

From source file:de.ingrid.importer.udk.strategy.v33.IDCStrategyDefault3_3.java

/**
 * Also drops all old values (if syslist already exists) !
 * @param listId id of syslist//from   w w w.  j  a va 2 s . c  o  m
 * @param deleteOldValues pass true if all old syslist values should be deleted before adding new ones !
 * @param syslistMap_de german entries
 * @param syslistMap_en english entries
 * @param defaultEntry_de pass key of GERMAN default entry or -1 if no default entry !
 * @param defaultEntry_en pass key of ENGLISH default entry or -1 if no default entry !
 * @param syslistMap_descr_de pass null if no GERMAN description available
 * @param syslistMap_descr_en pass null if no ENGLISH description available
 * @throws Exception
 */
protected void writeNewSyslist(int listId, boolean deleteOldValues,
        LinkedHashMap<Integer, String> syslistMap_de, LinkedHashMap<Integer, String> syslistMap_en,
        int defaultEntry_de, int defaultEntry_en, LinkedHashMap<Integer, String> syslistMap_descr_de,
        LinkedHashMap<Integer, String> syslistMap_descr_en) throws Exception {

    if (syslistMap_descr_de == null) {
        syslistMap_descr_de = new LinkedHashMap<Integer, String>();
    }
    if (syslistMap_descr_en == null) {
        syslistMap_descr_en = new LinkedHashMap<Integer, String>();
    }

    if (deleteOldValues) {
        // clean up, to guarantee no old values !
        sqlStr = "DELETE FROM sys_list where lst_id = " + listId;
        jdbc.executeUpdate(sqlStr);
    }

    String psSql = "INSERT INTO sys_list (id, lst_id, entry_id, lang_id, name, maintainable, is_default, description) "
            + "VALUES (?,?,?,?,?,?,?,?)";
    PreparedStatement psInsert = jdbc.prepareStatement(psSql);

    Iterator<Integer> itr = syslistMap_de.keySet().iterator();
    while (itr.hasNext()) {
        int key = itr.next();
        // german version
        String isDefault = "N";
        if (key == defaultEntry_de) {
            isDefault = "Y";
        }
        psInsert.setLong(1, getNextId());
        psInsert.setInt(2, listId);
        psInsert.setInt(3, key);
        psInsert.setString(4, "de");
        psInsert.setString(5, syslistMap_de.get(key));
        psInsert.setInt(6, 0);
        psInsert.setString(7, isDefault);
        psInsert.setString(8, syslistMap_descr_de.get(key));
        psInsert.executeUpdate();

        // english version
        isDefault = "N";
        if (key == defaultEntry_en) {
            isDefault = "Y";
        }
        psInsert.setLong(1, getNextId());
        psInsert.setString(4, "en");
        psInsert.setString(5, syslistMap_en.get(key));
        psInsert.setString(7, isDefault);
        psInsert.setString(8, syslistMap_descr_en.get(key));
        psInsert.executeUpdate();
    }

    psInsert.close();
}