Example usage for java.util HashMap values

List of usage examples for java.util HashMap values

Introduction

In this page you can find the example usage for java.util HashMap values.

Prototype

public Collection<V> values() 

Source Link

Document

Returns a Collection view of the values contained in this map.

Usage

From source file:edu.harvard.i2b2.crc.dao.setfinder.querybuilder.temporal.TemporalQuery.java

/**
 * Parse Query//from ww w. j  a  v  a 2 s . co m
 * 
 * <P> Parses out string representation of query xml object into building blocks that will be used to generate sql statement.
 * In particular, this method establishes the processing order of groups, panels, and items when building the query
 * 
 * @param queryXml            string that contains the query definition xml object received from the query request
 * @throws JAXBUtilException   exception thrown when errors arise from converting string to xml and vice versa
 * @throws I2B2Exception      exception thrown when i2b2 specific error arises
 */
public void parseQuery() throws I2B2Exception {
    //get query timing out - this will be used as the default timing for all groups 
    //and panels. Panel timing values will override this value

    queryTiming = queryDef.getQueryTiming();
    //first step, get the constraints parsed out by groups they reference
    //also parse through the columns that need to be returned by each panel group

    constraintMapping = new TemporalQueryConstraintMapping(queryDef);

    HashMap<String, TemporalSubQuery> subQuerySet = new HashMap<String, TemporalSubQuery>();

    //second step, treat panel object on the query level as panel group with no temporal component

    if (queryDef.getPanel() != null && queryDef.getPanel().size() > 0) {
        TemporalSubQuery mainQuery = new TemporalSubQuery(this, queryDef.getPanel());

        //check to see if the main query is an "everyone" query
        boolean getEveryoneQuery = false;
        if (mainQuery != null && mainQuery.getPanelCount() == 1) {
            TemporalPanel mainPanel = mainQuery.getPanelList().get(0);
            if (mainPanel != null && mainPanel.isPanelInverted()
                    && (mainPanel.getItemList() == null || mainPanel.getItemList().size() == 0)) {
                getEveryoneQuery = true;
            }
        }

        //if there are no subqueries or the main query is not an "everyone" query, include
        //the main query as a special subquery
        if (queryDef.getSubquery() == null || queryDef.getSubquery().size() == 0 || !getEveryoneQuery) {
            subQuerySet.put("**default**", mainQuery);
        }
    }

    //third step, check for query objects and create corresponding temporal query panel groups
    if (queryDef.getSubquery() != null && queryDef.getSubquery().size() > 0) {
        for (QueryDefinitionType query : queryDef.getSubquery()) {
            HashMap<String, List<QueryConstraintType>> constraints = constraintMapping
                    .getConstraintsForQuery(query.getQueryId());
            EnumSet<TemporalSubQuery.TemporalQueryReturnColumns> returnColumns = constraintMapping
                    .getReturnColumnsForQuery(query.getQueryId());
            TemporalSubQuery subQuery = new TemporalSubQuery(this, query, constraints, returnColumns);
            subQuerySet.put(query.getQueryId(), subQuery);
        }
    }

    //last step, group set object established the order of the groups.  now copy objects
    //into straight list for easier processing
    subQueryList = new ArrayList<TemporalSubQuery>();
    subQueryMap = new HashMap<String, Integer>();
    int index = 0;
    for (String eventId : constraintMapping.getOrderedQueryList()) {
        TemporalSubQuery subQuery = subQuerySet.get(eventId);
        if (subQuery != null) {
            subQueryList.add(subQuery);
            subQueryMap.put(eventId, index);
            index++;
            subQuerySet.remove(eventId);
        }
    }

    if (subQuerySet.size() > 0) {
        for (TemporalSubQuery grp : subQuerySet.values()) {
            subQueryList.add(grp);
            subQueryMap.put(grp.getQueryId(), index);
            index++;
        }
    }

}

From source file:com.webcohesion.enunciate.modules.java_xml_client.JavaXMLClientModule.java

protected File generateClientSources() {
    File sourceDir = getSourceDir();
    sourceDir.mkdirs();//w  w w.  j a va 2s .  c  o m

    Map<String, Object> model = new HashMap<String, Object>();

    Map<String, String> conversions = getClientPackageConversions();
    EnunciateJaxbContext jaxbContext = this.jaxbModule.getJaxbContext();
    ClientClassnameForMethod classnameFor = new ClientClassnameForMethod(conversions, jaxbContext);
    model.put("packageFor", new ClientPackageForMethod(conversions, this.context));
    model.put("classnameFor", classnameFor);
    model.put("simpleNameFor", new SimpleNameForMethod(classnameFor));
    model.put("file", new FileDirective(sourceDir, this.enunciate.getLogger()));
    model.put("generatedCodeLicense", this.enunciate.getConfiguration().readGeneratedCodeLicenseFile());
    model.put("annotationValue", new AnnotationValueMethod());

    Set<String> facetIncludes = new TreeSet<String>(this.enunciate.getConfiguration().getFacetIncludes());
    facetIncludes.addAll(getFacetIncludes());
    Set<String> facetExcludes = new TreeSet<String>(this.enunciate.getConfiguration().getFacetExcludes());
    facetExcludes.addAll(getFacetExcludes());
    FacetFilter facetFilter = new FacetFilter(facetIncludes, facetExcludes);

    model.put("isFacetExcluded", new IsFacetExcludedMethod(facetFilter));

    boolean upToDate = isUpToDateWithSources(sourceDir);
    if (!upToDate) {
        try {
            debug("Generating the Java client classes...");

            HashMap<String, WebFault> allFaults = new HashMap<String, WebFault>();
            AntPatternMatcher matcher = new AntPatternMatcher();
            matcher.setPathSeparator(".");

            if (this.jaxwsModule != null) {
                Set<String> seeAlsos = new TreeSet<String>();
                // Process the annotations, the request/response beans, and gather the set of web faults
                // for each endpoint interface.
                for (WsdlInfo wsdlInfo : this.jaxwsModule.getJaxwsContext().getWsdls().values()) {
                    for (EndpointInterface ei : wsdlInfo.getEndpointInterfaces()) {
                        if (facetFilter.accept(ei)) {
                            for (WebMethod webMethod : ei.getWebMethods()) {
                                if (facetFilter.accept(webMethod)) {
                                    for (WebMessage webMessage : webMethod.getMessages()) {
                                        if (webMessage instanceof RequestWrapper) {
                                            model.put("message", webMessage);
                                            processTemplate(getTemplateURL("client-request-bean.fmt"), model);
                                            seeAlsos.add(getBeanName(classnameFor,
                                                    ((RequestWrapper) webMessage).getRequestBeanName()));
                                        } else if (webMessage instanceof ResponseWrapper) {
                                            model.put("message", webMessage);
                                            processTemplate(getTemplateURL("client-response-bean.fmt"), model);
                                            seeAlsos.add(getBeanName(classnameFor,
                                                    ((ResponseWrapper) webMessage).getResponseBeanName()));
                                        } else if (webMessage instanceof WebFault) {
                                            WebFault fault = (WebFault) webMessage;
                                            allFaults.put(fault.getQualifiedName().toString(), fault);
                                        }
                                    }
                                }
                            }
                        }
                    }
                }

                //gather the annotation information and process the possible beans for each web fault.
                for (WebFault webFault : allFaults.values()) {
                    boolean implicit = webFault.isImplicitSchemaElement();
                    String faultBean = implicit
                            ? getBeanName(classnameFor, webFault.getImplicitFaultBeanQualifiedName())
                            : classnameFor.convert(webFault.getExplicitFaultBeanType());
                    seeAlsos.add(faultBean);

                    if (implicit) {
                        model.put("fault", webFault);
                        processTemplate(getTemplateURL("client-fault-bean.fmt"), model);
                    }
                }

                model.put("seeAlsoBeans", seeAlsos);
                model.put("baseUri", this.enunciate.getConfiguration().getApplicationRoot());
                for (WsdlInfo wsdlInfo : this.jaxwsModule.getJaxwsContext().getWsdls().values()) {
                    if (wsdlInfo.getWsdlFile() == null) {
                        throw new EnunciateException("WSDL " + wsdlInfo.getId() + " doesn't have a filename.");
                    }

                    for (EndpointInterface ei : wsdlInfo.getEndpointInterfaces()) {
                        if (facetFilter.accept(ei)) {
                            model.put("endpointInterface", ei);
                            model.put("wsdlFileName", wsdlInfo.getFilename());

                            processTemplate(getTemplateURL("client-endpoint-interface.fmt"), model);
                            processTemplate(getTemplateURL("client-soap-endpoint-impl.fmt"), model);
                        }
                    }
                }

                for (WebFault webFault : allFaults.values()) {
                    if (useServerSide(webFault, matcher)) {
                        copyServerSideType(sourceDir, webFault);
                    } else {
                        TypeElement superFault = (TypeElement) ((DeclaredType) webFault.getSuperclass())
                                .asElement();
                        if (superFault != null
                                && allFaults.containsKey(superFault.getQualifiedName().toString())
                                && allFaults.get(superFault.getQualifiedName().toString())
                                        .isImplicitSchemaElement()) {
                            model.put("superFault", allFaults.get(superFault.getQualifiedName().toString()));
                        } else {
                            model.remove("superFault");
                        }

                        model.put("fault", webFault);
                        processTemplate(getTemplateURL("client-web-fault.fmt"), model);
                    }
                }
            }

            for (SchemaInfo schemaInfo : this.jaxbModule.getJaxbContext().getSchemas().values()) {
                for (TypeDefinition typeDefinition : schemaInfo.getTypeDefinitions()) {
                    if (facetFilter.accept(typeDefinition)) {
                        if (useServerSide(typeDefinition, matcher)) {
                            copyServerSideType(sourceDir, typeDefinition);
                        } else {
                            model.put("rootEl",
                                    this.jaxbModule.getJaxbContext().findElementDeclaration(typeDefinition));
                            model.put("type", typeDefinition);
                            URL template = typeDefinition.isEnum()
                                    ? typeDefinition instanceof QNameEnumTypeDefinition
                                            ? getTemplateURL("client-qname-enum-type.fmt")
                                            : getTemplateURL("client-enum-type.fmt")
                                    : typeDefinition.isSimple() ? getTemplateURL("client-simple-type.fmt")
                                            : getTemplateURL("client-complex-type.fmt");
                            processTemplate(template, model);
                        }
                    }
                }

                for (Registry registry : schemaInfo.getRegistries()) {
                    model.put("registry", registry);
                    processTemplate(getTemplateURL("client-registry.fmt"), model);
                }
            }
        } catch (IOException e) {
            throw new EnunciateException(e);
        } catch (TemplateException e) {
            throw new EnunciateException(e);
        }
    } else {
        info("Skipping generation of Java client sources as everything appears up-to-date...");
    }

    context.setProperty(LIRBARY_DESCRIPTION_PROPERTY, readLibraryDescription(model));

    return sourceDir;
}

From source file:de.tudarmstadt.ukp.dkpro.core.io.conll.Conll2012Writer.java

private void convert(JCas aJCas, PrintWriter aOut) {
    Map<Token, Collection<SemanticPredicate>> predIdx = indexCovered(aJCas, Token.class,
            SemanticPredicate.class);
    Map<SemanticArgument, Collection<Token>> argIdx = indexCovered(aJCas, SemanticArgument.class, Token.class);
    Map<Token, Collection<NamedEntity>> neIdx = indexCovering(aJCas, Token.class, NamedEntity.class);
    Map<Token, Collection<WordSense>> wordSenseIdx = indexCovered(aJCas, Token.class, WordSense.class);
    Map<Token, Collection<CoreferenceLink>> corefIdx = indexCovering(aJCas, Token.class, CoreferenceLink.class);
    Map<CoreferenceLink, Integer> corefChainIdx = new HashMap<>();

    int chainId = 1;
    for (CoreferenceChain chain : select(aJCas, CoreferenceChain.class)) {
        for (CoreferenceLink link : chain.links()) {
            corefChainIdx.put(link, chainId);
        }/*from w  ww .  ja  v a 2  s  .  c  o  m*/
        chainId++;
    }

    for (Sentence sentence : select(aJCas, Sentence.class)) {
        HashMap<Token, Row> ctokens = new LinkedHashMap<Token, Row>();

        // Tokens
        List<Token> tokens = selectCovered(Token.class, sentence);

        List<SemanticPredicate> preds = selectCovered(SemanticPredicate.class, sentence);

        String[] parseFragments = null;
        List<ROOT> root = selectCovered(ROOT.class, sentence);
        if (root.size() == 1) {
            PennTreeNode rootNode = PennTreeUtils.convertPennTree(root.get(0));
            if ("ROOT".equals(rootNode.getLabel())) {
                rootNode.setLabel("TOP");
            }
            parseFragments = toPrettyPennTree(rootNode);
        }

        if (parseFragments != null && parseFragments.length != tokens.size()) {
            throw new IllegalStateException("Parse fragments do not match tokens - tokens: " + tokens
                    + " parse: " + asList(parseFragments));
        }

        for (int i = 0; i < tokens.size(); i++) {
            Row row = new Row();
            row.id = i;
            row.token = tokens.get(i);
            row.args = new SemanticArgument[preds.size()];
            row.parse = parseFragments != null ? parseFragments[i] : UNUSED;

            // If there are multiple semantic predicates for the current token, then 
            // we keep only the first
            Collection<SemanticPredicate> predsForToken = predIdx.get(row.token);
            if (predsForToken != null && !predsForToken.isEmpty()) {
                row.pred = predsForToken.iterator().next();
            }

            // If there are multiple named entities for the current token, we keep only the
            // first
            Collection<NamedEntity> neForToken = neIdx.get(row.token);
            if (neForToken != null && !neForToken.isEmpty()) {
                row.ne = neForToken.iterator().next();
            }

            // If there are multiple word senses for the current token, we keep only the
            // first
            Collection<WordSense> senseForToken = wordSenseIdx.get(row.token);
            if (senseForToken != null && !senseForToken.isEmpty()) {
                row.wordSense = senseForToken.iterator().next();
            }

            row.coref = corefIdx.get(row.token);

            ctokens.put(row.token, row);
        }

        // Semantic arguments
        for (int p = 0; p < preds.size(); p++) {
            FSArray args = preds.get(p).getArguments();
            for (SemanticArgument arg : select(args, SemanticArgument.class)) {
                for (Token t : argIdx.get(arg)) {
                    Row row = ctokens.get(t);
                    row.args[p] = arg;
                }
            }
        }

        // Write sentence in CONLL 2012 format
        for (Row row : ctokens.values()) {
            String documentId = DocumentMetaData.get(aJCas).getDocumentId();
            if (StringUtils.isBlank(documentId)) {
                documentId = UNUSED;
            }

            int partNumber = 0;

            if (documentId.contains("#")) {
                partNumber = Integer.parseInt(StringUtils.substringAfterLast(documentId, "#"));
                documentId = StringUtils.substringBeforeLast(documentId, "#");
            }

            int id = row.id;

            String form = row.token.getCoveredText();

            String lemma = UNUSED + " ";
            if (writeLemma && (row.token.getLemma() != null)) {
                lemma = row.token.getLemma().getValue();
            }

            String pos = UNUSED;
            if (writePos && (row.token.getPos() != null)) {
                POS posAnno = row.token.getPos();
                pos = posAnno.getPosValue();
            }

            String parse = row.parse;
            if (!parse.endsWith(")")) {
                // This is just the curious way that the CoNLL files are encoded...
                parse += " ";
            }

            String wordSense = UNUSED;
            if (row.wordSense != null) {
                wordSense = row.wordSense.getValue();
            }

            String speaker = UNUSED; // FIXME

            String namedEntity = ALT_UNUSED + " ";
            if (row.ne != null) {
                namedEntity = encodeMultiTokenAnnotation(row.token, row.ne, row.ne.getValue());
            }

            String pred = UNUSED;
            StringBuilder apreds = new StringBuilder();
            if (writeSemanticPredicate) {
                if (row.pred != null) {
                    pred = row.pred.getCategory();
                }

                for (SemanticArgument arg : row.args) {

                    if (apreds.length() > 0) {
                        apreds.append("             ");
                    }

                    String value;
                    if (arg == null) {
                        if (row.pred != null && row.pred.getBegin() == row.token.getBegin()
                                && row.pred.getEnd() == row.token.getEnd()) {
                            value = "(V*)";
                        } else {
                            value = ALT_UNUSED + ' ';
                        }
                    } else {
                        value = encodeMultiTokenAnnotation(row.token, arg, arg.getRole());
                    }
                    apreds.append(String.format("%10s", value));
                }
            }

            StringBuilder coref = new StringBuilder();
            if (!row.coref.isEmpty()) {
                for (CoreferenceLink link : row.coref) {
                    if (coref.length() > 0) {
                        coref.append('|');
                    }
                    coref.append(encodeMultiTokenLink(row.token, link, corefChainIdx.get(link)));
                }
            }
            if (coref.length() == 0) {
                coref.append(UNUSED);
            }

            aOut.printf("%s %3d %3d %10s %5s %13s %9s %3s %3s %10s %10s %10s %s\n", documentId, partNumber, id,
                    form, pos, parse, lemma, pred, wordSense, speaker, namedEntity, apreds, coref);
        }

        aOut.println();
    }

    aOut.println("#end document");
}

From source file:edu.umass.cs.gigapaxos.PaxosInstanceStateMachine.java

private void getActualDecisions(HashMap<Integer, PValuePacket> missing) {
    if (missing.isEmpty())
        return;//from   w w  w  . java 2s .com
    Integer minSlot = null, maxSlot = null;
    // find meta value commits
    for (Integer slot : missing.keySet()) {
        if (missing.get(slot).isMetaValue()) {
            if (minSlot == null)
                minSlot = (maxSlot = slot);
            if (slot - minSlot < 0)
                minSlot = slot;
            if (slot - maxSlot > 0)
                maxSlot = slot;
        }
    }

    if (!(minSlot == null || (minSlot - this.paxosState.getMaxAcceptedSlot() > 0))) {

        // get logged accepts for meta commit slots
        Map<Integer, PValuePacket> accepts = this.paxosManager.getPaxosLogger()
                .getLoggedAccepts(this.getPaxosID(), this.getVersion(), minSlot, maxSlot + 1);

        // reconstruct decision from accept
        for (PValuePacket pvalue : accepts.values())
            if (missing.containsKey(pvalue.slot) && missing.get(pvalue.slot).isMetaValue())
                missing.put(pvalue.slot, pvalue.makeDecision(pvalue.getMedianCheckpointedSlot()));
    }

    // remove remaining meta value decisions
    for (Iterator<PValuePacket> pvalueIter = missing.values().iterator(); pvalueIter.hasNext();) {
        PValuePacket decision = pvalueIter.next();
        if (decision.isMetaValue()) {
            if (this.paxosState.getSlot() - decision.slot > 0)
                log.log(Level.FINE, "{0} has no body for executed meta-decision {1} "
                        + " likely because placeholder decision was " + "logged without corresponding accept)",
                        new Object[] { this, decision });
            pvalueIter.remove();
        }
    }
}

From source file:edu.cornell.mannlib.vitro.webapp.dao.jena.IndividualDaoSDB.java

/**
 * In Jena it can be difficult to get an object with a given dataproperty if
 * you do not care about the datatype or lang of the literal.  Use this
 * method if you would like to ignore the lang and datatype.  
 * //  ww w .  j  a  v  a  2  s  .c om
 * Note: this method doesn't require that a property be declared in the 
 * ontology as a data property -- only that it behaves as one.
 */
@Override
public List<Individual> getIndividualsByDataProperty(String dataPropertyUri, String value) {
    OntModel fullModel = getOntModelSelector().getFullModel();

    Property prop = null;
    if (RDFS.label.getURI().equals(dataPropertyUri)) {
        prop = RDFS.label;
    } else {
        prop = fullModel.getProperty(dataPropertyUri);
    }

    if (prop == null) {
        log.debug("Could not getIndividualsByDataProperty() " + "because " + dataPropertyUri
                + "was not found in model.");
        return Collections.emptyList();
    }

    if (value == null) {
        log.debug("Could not getIndividualsByDataProperty() " + "because value was null");
        return Collections.emptyList();
    }

    Literal litv1 = fullModel.createLiteral(value);
    Literal litv2 = fullModel.createTypedLiteral(value);

    //warning: this assumes that any language tags will be EN
    Literal litv3 = fullModel.createLiteral(value, "EN");

    HashMap<String, Individual> individualsMap = new HashMap<String, Individual>();

    fullModel.enterCriticalSection(Lock.READ);
    int count = 0;
    try {
        StmtIterator stmts = fullModel.listStatements((Resource) null, prop, litv1);
        while (stmts.hasNext()) {
            count++;
            Statement stmt = stmts.nextStatement();

            RDFNode sub = stmt.getSubject();
            if (sub == null || sub.isAnon() || sub.isLiteral())
                continue;

            RDFNode obj = stmt.getObject();
            if (obj == null || !obj.isLiteral())
                continue;

            Literal literal = (Literal) obj;
            Object v = literal.getValue();
            if (v == null)
                continue;

            String subUri = ((Resource) sub).getURI();
            if (!individualsMap.containsKey(subUri)) {
                individualsMap.put(subUri, makeIndividual(subUri));
            }
        }

        stmts = fullModel.listStatements((Resource) null, prop, litv2);
        while (stmts.hasNext()) {
            count++;
            Statement stmt = stmts.nextStatement();

            RDFNode sub = stmt.getSubject();
            if (sub == null || sub.isAnon() || sub.isLiteral())
                continue;

            RDFNode obj = stmt.getObject();
            if (obj == null || !obj.isLiteral())
                continue;

            Literal literal = (Literal) obj;
            Object v = literal.getValue();
            if (v == null)
                continue;

            String subUri = ((Resource) sub).getURI();
            if (!individualsMap.containsKey(subUri)) {
                individualsMap.put(subUri, makeIndividual(subUri));
            }
        }

        stmts = fullModel.listStatements((Resource) null, prop, litv3);
        while (stmts.hasNext()) {
            count++;
            Statement stmt = stmts.nextStatement();

            RDFNode sub = stmt.getSubject();
            if (sub == null || sub.isAnon() || sub.isLiteral())
                continue;

            RDFNode obj = stmt.getObject();
            if (obj == null || !obj.isLiteral())
                continue;

            Literal literal = (Literal) obj;
            Object v = literal.getValue();
            if (v == null)
                continue;

            String subUri = ((Resource) sub).getURI();
            if (!individualsMap.containsKey(subUri)) {
                individualsMap.put(subUri, makeIndividual(subUri));
            }
        }
    } finally {
        fullModel.leaveCriticalSection();
    }

    List<Individual> rv = new ArrayList(individualsMap.size());
    rv.addAll(individualsMap.values());
    return rv;
}

From source file:de.tum.bgu.msm.syntheticPopulationGenerator.kagawa.SyntheticPopJP.java

private void generateHouseholdsPersonsDwellings() {

    //Generate the synthetic population using Monte Carlo (select the households according to the weight)
    //Once the household is selected, all the characteristics of the household will be copied (including the household members)
    logger.info("   Starting to generate households and persons.");

    //List of households of the micro data
    int previousHouseholds = 0;
    int previousPersons = 0;

    //Define income distribution
    double incomeShape = ResourceUtil.getDoubleProperty(rb, PROPERTIES_INCOME_GAMMA_SHAPE);
    double incomeRate = ResourceUtil.getDoubleProperty(rb, PROPERTIES_INCOME_GAMMA_RATE);
    double[] incomeProbability = ResourceUtil.getDoubleArray(rb, PROPERTIES_INCOME_GAMMA_PROBABILITY);
    GammaDistributionImpl gammaDist = new GammaDistributionImpl(incomeShape, 1 / incomeRate);

    //Create a map to store the household IDs by municipality
    HashMap<Integer, HashMap<Integer, Integer>> householdByMunicipality = new HashMap<>();

    generateCountersForValidation();/*from ww  w  .  j a  v  a2  s .  c  o m*/

    RealEstateDataManager realEstate = dataContainer.getRealEstateData();
    HouseholdDataManager householdDataManager = dataContainer.getHouseholdData();
    HouseholdFactory householdFactory = HouseholdUtil.getFactory();

    regionsforFrequencyMatrix = SiloUtil.readCSVfile(rb.getString(PROPERTIES_ATRIBUTES_ZONAL_DATA));
    regionsforFrequencyMatrix.buildIndex(regionsforFrequencyMatrix.getColumnPosition("V1"));
    householdsForFrequencyMatrix = new HashMap<>();
    for (int i = 1; i <= microDataDwelling.getRowCount(); i++) {
        int v2Zone = (int) microDataDwelling.getValueAt(i, "PtResCode");
        int ddID = (int) microDataDwelling.getValueAt(i, "id");
        if (householdsForFrequencyMatrix.containsKey(v2Zone)) {
            householdsForFrequencyMatrix.get(v2Zone).put(ddID, 1);
        } else {
            HashMap<Integer, Integer> map = new HashMap<>();
            map.put(ddID, 1);
            householdsForFrequencyMatrix.put(v2Zone, map);
        }
    }

    //Selection of households, persons, jobs and dwellings per municipality
    for (int municipality = 0; municipality < cityID.length; municipality++) {
        logger.info("   Municipality " + cityID[municipality] + ". Starting to generate households.");

        //-----------***** Data preparation *****-------------------------------------------------------------------
        //Create local variables to avoid accessing to the same variable on the parallel processing
        int municipalityID = cityID[municipality];
        int v2zone = (int) regionsforFrequencyMatrix.getIndexedValueAt(municipalityID, "V2");
        if (householdsForFrequencyMatrix.containsKey(v2zone)) {
            String[] attributesHouseholdIPU = attributesMunicipality;
            TableDataSet rasterCellsMatrix = cellsMatrix;
            TableDataSet microHouseholds = microDataHousehold;
            TableDataSet microPersons = microDataPerson;
            TableDataSet microDwellings = microDataDwelling;
            microHouseholds.buildIndex(microHouseholds.getColumnPosition("id"));
            microDwellings.buildIndex(microDwellings.getColumnPosition("id"));
            int totalHouseholds = (int) marginalsMunicipality.getIndexedValueAt(municipalityID, "hhTotal");
            int[] agePerson = ageBracketsPerson;
            int[] levelEdu = new int[4];
            double[] probEdu = new double[4];
            for (int i = 0; i < levelEdu.length; i++) {
                probEdu[i] = marginalsMunicipality.getIndexedValueAt(municipalityID, "Ed_" + i);
            }
            //Probability of floor size for vacant dwellings
            double[] sizeDistribution = new double[sizeBracketsDwelling.length];
            for (int row = 0; row < sizeBracketsDwelling.length; row++) {
                String name = "HA_LT_" + sizeBracketsDwelling[row] + "sqm";
                sizeDistribution[row] = marginalsMunicipality.getIndexedValueAt(municipalityID, name);
            }
            //Probability for year and building size for vacant dwellings
            double[] yearDistribution = new double[yearBracketsDwelling.length];
            for (int row = 0; row < yearBracketsDwelling.length; row++) {
                String name = "HY_" + yearBracketsDwelling[row];
                yearDistribution[row] = marginalsMunicipality.getIndexedValueAt(municipalityID, name)
                        / totalHouseholds;
            }
            //Average price per sqm of the zone according to building type
            float[] averagePriceDistribution = new float[typeBracketsDwelling.length];
            for (int row = 0; row < typeBracketsDwelling.length; row++) {
                String name = "HPrice_" + typeBracketsDwelling[row];
                yearDistribution[row] = marginalsMunicipality.getIndexedValueAt(municipalityID, name);
            }

            HashMap<Integer, Integer> hhs = householdsForFrequencyMatrix.get(v2zone);
            int[] hhFromV2 = hhs.keySet().stream().mapToInt(Integer::intValue).toArray();
            HashMap<Integer, Integer> generatedHouseholds = new HashMap<>();

            //obtain the raster cells of the municipality and their weight within the municipality
            int[] tazInCity = cityTAZ.get(municipalityID);
            double[] probTaz = new double[tazInCity.length];
            double tazRemaining = 0;
            for (int i = 0; i < tazInCity.length; i++) {
                probTaz[i] = rasterCellsMatrix.getIndexedValueAt(tazInCity[i], "Population");
                tazRemaining = tazRemaining + probTaz[i];
            }

            double hhRemaining = 0;
            HashMap<Integer, Double> prob = new HashMap<>();
            for (int row = 0; row < hhFromV2.length; row++) {
                double value = weightsTable.getIndexedValueAt(hhFromV2[row], Integer.toString(municipalityID));
                hhRemaining = hhRemaining + value;
                prob.put(hhFromV2[row], value);
            }

            //marginals for the municipality
            int hhPersons = 0;
            int hhTotal = 0;
            int quartersTotal = 0;
            int id = 0;

            //for all the households that are inside the municipality (we will match perfectly the number of households. The total population will vary compared to the marginals.)
            for (int row = 0; row < totalHouseholds; row++) {

                //select the household to copy from the micro data(with replacement)
                double[] probability = prob.values().stream().mapToDouble(Double::doubleValue).toArray();
                int[] hhIds = prob.keySet().stream().mapToInt(Integer::intValue).toArray();
                int selectedHh = select(probability, hhIds, hhRemaining)[0];
                if (prob.get(selectedHh) > 1) {
                    prob.put(selectedHh, prob.get(selectedHh) - 1);
                    hhRemaining = hhRemaining - 1;
                } else {
                    hhRemaining = hhRemaining - prob.get(selectedHh);
                    prob.remove(selectedHh);
                }

                //Select the taz to allocate the household (without replacement)
                int[] recordsCell = select(probTaz, tazInCity, tazRemaining);
                int selectedTAZ = recordsCell[0];

                //copy the private household characteristics
                int householdSize = (int) microHouseholds.getIndexedValueAt(selectedHh, "HHsize");
                int householdCars = Math.min((int) microHouseholds.getIndexedValueAt(selectedHh, "N_Car"), 3);
                id = householdDataManager.getNextHouseholdId();
                int newDdId = RealEstateDataManager.getNextDwellingId();
                Household household = householdFactory.createHousehold(id, newDdId, householdCars); //(int id, int dwellingID, int homeZone, int hhSize, int autos)
                householdDataManager.addHousehold(household);
                hhTotal++;

                //copy the household members characteristics
                PersonFactory factory = PersonUtils.getFactory();
                for (int rowPerson = 0; rowPerson < householdSize; rowPerson++) {
                    int idPerson = householdDataManager.getNextPersonId();
                    int personCounter = (int) microHouseholds.getIndexedValueAt(selectedHh, "firstPerson")
                            + rowPerson;
                    int age = (int) microPersons.getValueAt(personCounter, "age");
                    Gender gender = Gender.valueOf((int) microDataPerson.getValueAt(personCounter, "gender"));
                    Occupation occupation = Occupation.UNEMPLOYED;
                    int jobType = 1;
                    if ((int) microDataPerson.getValueAt(personCounter, "occupation") == 1) {
                        occupation = Occupation.EMPLOYED;
                        if ((int) microDataPerson.getValueAt(personCounter, "jobType") == 1) {
                            jobType = 1;
                        } else if ((int) microDataPerson.getValueAt(personCounter, "jobType") == 2) {
                            jobType = 2;
                        } else {
                            jobType = 3;
                        }
                    }
                    int income = 0;
                    int education = 0;
                    if (age > 15) {
                        education = SiloUtil.select(probEdu, levelEdu);
                        try {
                            income = (int) translateIncome((int) Math.random() * 10, incomeProbability,
                                    gammaDist) * 12; //convert monthly income to yearly income
                        } catch (MathException e) {
                            e.printStackTrace();
                        }
                    }
                    Person pers = factory.createPerson(idPerson, age, gender, Race.white, occupation, null, 0,
                            income); //(int id, int hhid, int age, int gender, Race race, int occupation, int workplace, int income)
                    householdDataManager.addPerson(pers);
                    householdDataManager.addPersonToHousehold(pers, household);
                    jobTypeByWorker.put(pers, jobType);
                    PersonRole role = PersonRole.CHILD; //default value = child
                    if ((int) microPersons.getValueAt(personCounter, "personRole") == 1) { //the person is single
                        role = PersonRole.SINGLE;
                    } else if ((int) microPersons.getValueAt(personCounter, "personRole") == 2) { // the person is married
                        role = PersonRole.MARRIED;
                    }
                    pers.setRole(role);
                    pers.setNationality(Nationality.GERMAN);
                    boolean license = false;
                    if (microPersons.getValueAt(personCounter, "DrivLicense") == 1) {
                        license = true;
                    }
                    pers.setDriverLicense(license);
                    pers.setSchoolType((int) microPersons.getValueAt(personCounter, "school"));
                    hhPersons++;
                    //counterMunicipality = updateCountersPerson(pers, counterMunicipality, municipality,ageBracketsPerson);
                }
                //counterMunicipality = updateCountersHousehold(household, counterMunicipality, municipality);

                //Copy the dwelling of that household
                int bedRooms = 1; //Not on the micro data
                int year = select(yearDistribution, yearBracketsDwelling)[0]; //the category
                int floorSpace = select(sizeDistribution, sizeBracketsDwelling)[0];
                int usage = (int) microDwellings.getIndexedValueAt(selectedHh, "H_");
                int buildingSize = (int) microDwellings.getIndexedValueAt(selectedHh, "ddT_");
                DefaultDwellingTypeImpl ddType = translateDwellingType(buildingSize);
                int quality = 1; //depend on year built and type of heating
                year = selectDwellingYear(year); //convert from year class to actual 4-digit year
                int price = estimatePrice(ddType, floorSpace);
                Dwelling dwell = DwellingUtils.getFactory().createDwelling(newDdId, selectedTAZ, null, id,
                        ddType, bedRooms, quality, price, 0, year);
                realEstate.addDwelling(dwell);
                dwell.setFloorSpace(floorSpace);
                dwell.setUsage(DwellingUsage.valueOf(usage));
                dwell.setBuildingSize(buildingSize);
                generatedHouseholds.put(dwell.getId(), 1);
            }
            int households = householdDataManager.getHighestHouseholdIdInUse() - previousHouseholds;
            int persons = householdDataManager.getHighestPersonIdInUse() - previousPersons;
            previousHouseholds = householdDataManager.getHighestHouseholdIdInUse();
            previousPersons = householdDataManager.getHighestPersonIdInUse();

            //Consider if I need to add also the errors from other attributes. They must be at the marginals file, or one extra file
            //For county level they should be calculated on a next step, outside this loop.
            float averageError = 0f;
            /*for (int attribute = 1; attribute < attributesHouseholdIPU.length; attribute++){
            float error = Math.abs((counterMunicipality.getIndexedValueAt(municipalityID,attributesHouseholdIPU[attribute]) -
                    marginalsMunicipality.getIndexedValueAt(municipalityID,attributesHouseholdIPU[attribute])) /
                    marginalsMunicipality.getIndexedValueAt(municipalityID,attributesHouseholdIPU[attribute]));
            errorMunicipality.setIndexedValueAt(municipalityID,attributesHouseholdIPU[attribute],error);
            averageError = averageError + error;
            }
            averageError = averageError / (1 + attributesHouseholdIPU.length) * 100;*/
            householdByMunicipality.put(municipalityID, generatedHouseholds);

            logger.info("   Municipality " + municipalityID + ". Generated " + hhPersons + " persons in "
                    + hhTotal + " households. Average error of " + averageError + " %.");

        } else {
            logger.info("   Municipality " + municipalityID + " has no TAZ assigned.");
        }
    }
    int households = householdDataManager.getHighestHouseholdIdInUse();
    int persons = householdDataManager.getHighestPersonIdInUse();
    logger.info("   Finished generating households and persons. A population of " + persons + " persons in "
            + households + " households was generated.");

    //Vacant dwellings--------------------------------------------
    //They have similar characteristics to the dwellings that are occupied (assume that there is no difference between the occupied and vacant dwellings in terms of quality)
    int vacantCounter = 0;
    for (int municipality = 0; municipality < cityID.length; municipality++) {

        logger.info("   Municipality " + cityID[municipality] + ". Starting to generate vacant dwellings.");
        int municipalityID = cityID[municipality];
        int vacantDwellings = (int) marginalsMunicipality.getIndexedValueAt(cityID[municipality], "dd_Vacant");
        TableDataSet rasterCellsMatrix = cellsMatrix;
        int[] occupiedDwellings = householdByMunicipality.get(municipalityID).keySet().stream()
                .mapToInt(Integer::intValue).toArray();

        //obtain the raster cells of the municipality and their weight within the municipality
        int[] tazInCity = cityTAZ.get(municipalityID);
        double[] probTaz = new double[tazInCity.length];
        double sumProbTaz = 0;
        for (int i = 0; i < tazInCity.length; i++) {
            probTaz[i] = rasterCellsMatrix.getIndexedValueAt(tazInCity[i], "Population");
            sumProbTaz = sumProbTaz + probTaz[i];
        }

        //Select the vacant dwelling and copy characteristics
        for (int row = 0; row < vacantDwellings; row++) {

            //Allocation
            int ddTAZ = select(probTaz, tazInCity, sumProbTaz)[0]; // I allocate vacant dwellings using the same proportion as occupied dwellings.
            //Select one occupied dwelling to copy
            int dd = selectEqualProbability(occupiedDwellings)[0];
            //Copy characteristics
            int newDdId = realEstate.getNextDwellingId();
            Dwelling ddToCopy = realEstate.getDwelling(dd);
            int bedRooms = ddToCopy.getBedrooms();
            int price = ddToCopy.getPrice();
            int quality = ddToCopy.getQuality();
            int year = ddToCopy.getYearBuilt();
            DwellingType type = ddToCopy.getType(); //using always type MF234
            int floorSpaceDwelling = ddToCopy.getFloorSpace();
            Dwelling dwell = DwellingUtils.getFactory().createDwelling(newDdId, ddTAZ, null, -1,
                    DefaultDwellingTypeImpl.MF234, bedRooms, quality, price, 0, year);
            dwell.setUsage(DwellingUsage.VACANT); //vacant dwelling = 3; and hhID is equal to -1
            dwell.setFloorSpace(floorSpaceDwelling);
            vacantCounter++;
        }
        logger.info("   The number of vacant dwellings is: " + vacantCounter);
    }
    //Write the files for all municipalities
    String name = ("microData/interimFiles/totalsSynPop.csv");
    SiloUtil.writeTableDataSet(counterMunicipality, name);
    String name1 = ("microData/interimFiles/errorsSynPop.csv");
    SiloUtil.writeTableDataSet(errorMunicipality, name1);

}

From source file:exm.stc.frontend.ASTWalker.java

/**
 * Check that app output args are not omitted from command line
 * Omit warning//from www .  j  a v  a 2s. co m
 * @param context
 * @param outputs
 * @param outArgs
 * @param redir
 * @throws UserException
 */
private void checkAppOutputs(Context context, List<Var> outArgs, List<Var> args, Redirects<Var> redirFutures,
        Set<Suppression> suppressions) throws UserException {
    boolean deferredError = false;
    HashMap<String, Var> outMap = new HashMap<String, Var>();
    for (Var output : outArgs) {
        // Check output types
        if (!Types.isFile(output) && !Types.isVoid(output)) {
            LogHelper.error(context, "Output argument " + output.name() + " has "
                    + " invalid type for app output: " + output.type().typeName());
            deferredError = true;
        }
        outMap.put(output.name(), output);
    }
    if (redirFutures.stdout != null) {
        // Already typechecked
        Var output = redirFutures.stdout;
        outMap.put(output.name(), output);
    }

    for (Var arg : args) {
        if (arg.defType() == DefType.OUTARG) {
            outMap.remove(arg.name());
        }
    }
    for (Var redir : redirFutures.redirections(false, true)) {
        if (redir.defType() == DefType.OUTARG) {
            outMap.remove(redir.name());
        }
    }

    for (Var unreferenced : outMap.values()) {
        if (!Types.isVoid(unreferenced.type()) && !suppressions.contains(Suppression.UNUSED_OUTPUT)) {
            LogHelper.warn(context,
                    "Output argument " + unreferenced.name()
                            + " is not referenced in app command line.  This usually "
                            + "indicates an error.  However, if this is intended, for example "
                            + "if the file location is implicit, you can suppress this warning "
                            + "by annotating the function with @suppress=unused_output");
        }
    }
    if (deferredError) {
        throw new UserException(context, "Compilation failed due to type " + "error in definition of function "
                + context.getFunctionContext().getFunctionName());
    }
}

From source file:com.example.android.basicsyncadapter.SyncAdapter.java

/**
 * Read XML from an input stream, storing it into the content provider.
 *
 * <p>This is where incoming data is persisted, committing the results of a sync. In order to
 * minimize (expensive) disk operations, we compare incoming data with what's already in our
 * database, and compute a merge. Only changes (insert/update/delete) will result in a database
 * write.//from   ww  w . j  a  va 2  s.c o m
 *
 * <p>As an additional optimization, we use a batch operation to perform all database writes at
 * once.
 *
 * <p>Merge strategy:
 * 1. Get cursor to all items in feed<br/>
 * 2. For each item, check if it's in the incoming data.<br/>
 *    a. YES: Remove from "incoming" list. Check if data has mutated, if so, perform
 *            database UPDATE.<br/>
 *    b. NO: Schedule DELETE from database.<br/>
 * (At this point, incoming database only contains missing items.)<br/>
 * 3. For any items remaining in incoming list, ADD to database.
 */
public void updateLocalFeedData(final InputStream stream, final SyncResult syncResult) throws IOException,
        XmlPullParserException, RemoteException, OperationApplicationException, ParseException {
    //final FeedParser feedParser = new FeedParser();
    final CAPFeedParser feedParser = new CAPFeedParser();
    final ContentResolver contentResolver = getContext().getContentResolver();

    //Log.i(TAG, "Parsing stream as Atom feed");
    final List<CAPFeedParser.Entry> entries = feedParser.parse(stream);
    Log.i(TAG, "Parsing complete. Found " + entries.size() + " entries");

    ArrayList<ContentProviderOperation> batch = new ArrayList<ContentProviderOperation>();

    // Build hash table of incoming entries
    HashMap<String, CAPFeedParser.Entry> entryMap = new HashMap<String, CAPFeedParser.Entry>();
    for (CAPFeedParser.Entry e : entries) {
        entryMap.put(e.id, e);
    }

    // Get list of all items
    //Log.i(TAG, "Fetching local entries for merge");
    Uri uri = FeedContract.Entry.CONTENT_URI; // Get all entries
    Cursor c = contentResolver.query(uri, PROJECTION, null, null, null);
    assert c != null;
    //Log.i(TAG, "Found " + c.getCount() + " local entries. Computing merge solution...");

    // Find stale data
    int id;
    String entryId;
    String title;
    String description;
    String headline;
    String url;
    String areas;
    String issued;
    while (c.moveToNext()) {
        syncResult.stats.numEntries++;

        id = c.getInt(COLUMN_ID);
        entryId = c.getString(COLUMN_ENTRY_ID);
        title = c.getString(COLUMN_TITLE);
        description = c.getString(COLUMN_DESCRIPTION);
        headline = c.getString(COLUMN_HEADLINE);
        areas = c.getString(COLUMN_AREAS);
        url = c.getString(COLUMN_LINK);
        issued = c.getString(COLUMN_ISSUED);

        CAPFeedParser.Entry match = entryMap.get(entryId);
        if (match != null) {
            // Entry exists. Remove from entry map to prevent insert later.
            entryMap.remove(entryId);
            // Check to see if the entry needs to be updated
            Uri existingUri = FeedContract.Entry.CONTENT_URI.buildUpon().appendPath(Integer.toString(id))
                    .build();
            if ((match.title != null && !match.title.equals(title))
                    || (match.link != null && !match.link.equals(url)) || (match.issued != issued)) {
                // Update existing record
                //Log.i(TAG, "Scheduling update: " + existingUri);
                batch.add(ContentProviderOperation.newUpdate(existingUri)
                        .withValue(FeedContract.Entry.COLUMN_NAME_TITLE, title)
                        .withValue(FeedContract.Entry.COLUMN_NAME_DESCRIPTION, description)
                        .withValue(FeedContract.Entry.COLUMN_NAME_HEADLINE, headline)
                        .withValue(FeedContract.Entry.COLUMN_NAME_ISSUED, issued)
                        .withValue(FeedContract.Entry.COLUMN_NAME_LINK, url)
                        .withValue(FeedContract.Entry.COLUMN_NAME_AREAS, areas).build());
                syncResult.stats.numUpdates++;
            } else {
                //Log.i(TAG, "No action: " + existingUri);
            }
        } else {
            // Entry doesn't exist. Remove it from the database.
            Uri deleteUri = FeedContract.Entry.CONTENT_URI.buildUpon().appendPath(Integer.toString(id)).build();
            //Log.i(TAG, "Scheduling delete: " + deleteUri);
            batch.add(ContentProviderOperation.newDelete(deleteUri).build());
            syncResult.stats.numDeletes++;
        }
    }
    c.close();

    // Add new items
    for (CAPFeedParser.Entry e : entryMap.values()) {
        //Log.i(TAG, "Scheduling insert: entry_id=" + e.id);
        batch.add(ContentProviderOperation.newInsert(FeedContract.Entry.CONTENT_URI)
                .withValue(FeedContract.Entry.COLUMN_NAME_ENTRY_ID, e.id)
                .withValue(FeedContract.Entry.COLUMN_NAME_TITLE, e.title)
                .withValue(FeedContract.Entry.COLUMN_NAME_DESCRIPTION, e.description)
                .withValue(FeedContract.Entry.COLUMN_NAME_HEADLINE, e.headline)
                .withValue(FeedContract.Entry.COLUMN_NAME_ISSUED, e.issued)
                .withValue(FeedContract.Entry.COLUMN_NAME_LINK, e.link)
                .withValue(FeedContract.Entry.COLUMN_NAME_AREAS, e.areas).build());
        syncResult.stats.numInserts++;
    }
    //Log.i(TAG, "Merge solution ready. Applying batch update");
    mContentResolver.applyBatch(FeedContract.CONTENT_AUTHORITY, batch);
    mContentResolver.notifyChange(FeedContract.Entry.CONTENT_URI, // URI where data was modified
            null, // No local observer
            false); // IMPORTANT: Do not sync to network
    // This sample doesn't support uploads, but if *your* code does, make sure you set
    // syncToNetwork=false in the line above to prevent duplicate syncs.
}

From source file:cz.muni.fi.mir.mathmlunificator.MathMLUnificator.java

/**
 * <p>/*www  . jav a 2  s  .c o  m*/
 * Implementation of MathML unification. In the given W3C DOM represented
 * XML document find all maths nodes (see
 * {@link DocumentParser#findMathMLNodes(org.w3c.dom.Document)}) and
 * remember links to operator elements and other elements in
 * {@link #nodesByDepth} data structure. Then substitute them gradualy for
 * series of formulae with leaf elements substituted for a special
 * unification representing symbol {@code &#x25CD;} (for Presentation
 * MathML, see {@link Constants#PMATHML_UNIFICATOR}) or {@code &#x25D0;}
 * (for Content MathML, see {@link Constants#CMATHML_UNIFICATOR}).
 * </p>
 * <p>
 * Resulting series of the original and unified MathML nodes is itself
 * encapsulated in a new element &lt;unified-math&gt; (see
 * {@link Constants#UNIFIED_MATHML_ROOT_ELEM}) in XML namespace
 * <code>http://mir.fi.muni.cz/mathml-unification/</code> (see
 * {@link Constants#UNIFIED_MATHML_NS}) and put to the place of the original
 * math element {@link Node} in the XML DOM representation the node is
 * attached to.
 * </p>
 *
 * @param mathNode W3C DOM XML document representation attached MathML node
 * to work on.
 * @param workInPlace If <code>true</code>, given <code>mathNode</code> will
 * be modified in place; if <code>false</code>, <code>mathNode</code> will
 * not be modified and series of modified nodes will be returned.
 * @param operatorUnification If <code>true</code> unify also operator
 * nodes, otherwise keep operator nodes intact.
 * @return <code>null</code> if <code>workInPlace</code> is
 * <code>false</code>; otherwise collection of unified versions of the
 * <code>mathNode</code> with key of the {@link HashMap} describing order
 * (level of unification) of elements in the collection.
 */
private HashMap<Integer, Node> unifyMathMLNodeImpl(Node mathNode, boolean operatorUnification,
        boolean workInPlace) {

    if (mathNode.getOwnerDocument() == null) {
        String msg = "The given node is not attached to any document.";
        if (mathNode.getNodeType() == Node.DOCUMENT_NODE) {
            msg = "The given node is document itself. Call with mathNode.getDocumentElement() instead.";
        }
        throw new IllegalArgumentException(msg);
    }

    nodesByDepth = new HashMap<>();

    Node unifiedMathNode = null;
    HashMap<Integer, Node> unifiedNodesList = null;
    Document unifiedMathDoc = null;

    if (workInPlace) {
        // New element encapsulating the series of unified formulae.
        unifiedMathNode = mathNode.getOwnerDocument().createElementNS(UNIFIED_MATHML_NS,
                UNIFIED_MATHML_ROOT_ELEM);
        mathNode.getParentNode().replaceChild(unifiedMathNode, mathNode);
        unifiedMathNode.appendChild(mathNode.cloneNode(true));
    } else {
        unifiedNodesList = new HashMap<>();
        // Create a new separate DOM to work over with imporeted clone of the node given by user
        unifiedMathDoc = DOMBuilder.createNewDocWithNodeClone(mathNode, true);
        mathNode = unifiedMathDoc.getDocumentElement();
    }

    // Parse XML subtree starting at mathNode and remember elements by their depth.
    rememberLevelsOfNodes(mathNode, operatorUnification);

    // Build series of formulae of level by level unified MathML.
    NodeLevel<Integer, Integer> level = new NodeLevel<>(getMaxMajorNodesLevel(), NUMOFMINORLEVELS);
    int levelAttrCounter = 0;
    Collection<Attr> maxLevelAttrs = new LinkedList<>();
    while (level.major > 0) {
        if (nodesByDepth.containsKey(level)) {
            if (unifyAtLevel(level)) {
                levelAttrCounter++;

                Node thisLevelMathNode = mathNode.cloneNode(true);
                Attr thisLevelAttr = thisLevelMathNode.getOwnerDocument().createAttributeNS(UNIFIED_MATHML_NS,
                        UNIFIED_MATHML_NS_PREFIX + ":" + UNIFIED_MATHML_LEVEL_ATTR);
                Attr maxLevelAttr = thisLevelMathNode.getOwnerDocument().createAttributeNS(UNIFIED_MATHML_NS,
                        UNIFIED_MATHML_NS_PREFIX + ":" + UNIFIED_MATHML_MAX_LEVEL_ATTR);
                maxLevelAttrs.add(maxLevelAttr);

                thisLevelAttr.setTextContent(String.valueOf(levelAttrCounter));

                ((Element) thisLevelMathNode).setAttributeNodeNS(thisLevelAttr);
                ((Element) thisLevelMathNode).setAttributeNodeNS(maxLevelAttr);

                if (workInPlace) {
                    unifiedMathNode.appendChild(thisLevelMathNode);
                } else {
                    // Create a new document for every node in the collection.
                    unifiedNodesList.put(levelAttrCounter,
                            DOMBuilder.cloneNodeToNewDoc(thisLevelMathNode, true));
                }
            }
        }
        level.minor--;
        if (level.minor <= 0) {
            level.major--;
            level.minor = NUMOFMINORLEVELS;
        }
    }
    for (Attr attr : maxLevelAttrs) {
        attr.setTextContent(String.valueOf((levelAttrCounter)));
    }

    if (workInPlace) {
        return null;
    } else {
        for (Node node : unifiedNodesList.values()) {
            Attr maxLevelAttr = (Attr) node.getAttributes().getNamedItemNS(UNIFIED_MATHML_NS,
                    UNIFIED_MATHML_MAX_LEVEL_ATTR);
            if (maxLevelAttr != null) {
                maxLevelAttr.setTextContent(String.valueOf((levelAttrCounter)));
            }
        }
        return unifiedNodesList;
    }

}

From source file:org.apache.openejb.config.DeploymentLoader.java

protected AppModule createAppModule(final File jarFile, final String jarPath) throws OpenEJBException {
    File appDir = unpack(jarFile);
    try {/*from   www  . ja v  a  2s .  com*/
        appDir = appDir.getCanonicalFile();
    } catch (final IOException e) {
        throw new OpenEJBException("Invalid application directory " + appDir.getAbsolutePath());
    }

    final URL appUrl = getFileUrl(appDir);

    final String appId = appDir.getAbsolutePath();
    final ClassLoader tmpClassLoader = ClassLoaderUtil.createTempClassLoader(appId, new URL[] { appUrl },
            getOpenEJBClassLoader());

    final ResourceFinder finder = new ResourceFinder("", tmpClassLoader, appUrl);
    final Map<String, URL> appDescriptors = getDescriptors(finder);

    try {

        //
        // Find all the modules using either the application xml or by searching for all .jar, .war and .rar files.
        //

        final Map<String, URL> ejbModules = new LinkedHashMap<String, URL>();
        final Map<String, URL> clientModules = new LinkedHashMap<String, URL>();
        final Map<String, URL> resouceModules = new LinkedHashMap<String, URL>();
        final Map<String, URL> webModules = new LinkedHashMap<String, URL>();
        final Map<String, String> webContextRoots = new LinkedHashMap<String, String>();

        final URL applicationXmlUrl = appDescriptors.get("application.xml");
        final List<URL> extraLibs = new ArrayList<URL>();

        final Application application;
        if (applicationXmlUrl != null) {

            application = unmarshal(applicationXmlUrl);
            for (final Module module : application.getModule()) {
                try {
                    if (module.getEjb() != null) {
                        final URL url = finder.find(module.getEjb().trim());
                        ejbModules.put(module.getEjb(), url);
                    } else if (module.getJava() != null) {
                        final URL url = finder.find(module.getJava().trim());
                        clientModules.put(module.getJava(), url);
                        extraLibs.add(url);
                    } else if (module.getConnector() != null) {
                        final URL url = finder.find(module.getConnector().trim());
                        resouceModules.put(module.getConnector(), url);
                    } else if (module.getWeb() != null) {
                        final URL url = finder.find(module.getWeb().getWebUri().trim());
                        webModules.put(module.getWeb().getWebUri(), url);
                        webContextRoots.put(module.getWeb().getWebUri(), module.getWeb().getContextRoot());
                    }
                } catch (final IOException e) {
                    throw new OpenEJBException("Invalid path to module " + e.getMessage(), e);
                }
            }
        } else {
            application = new Application();
            final HashMap<String, URL> files = new HashMap<String, URL>();
            scanDir(appDir, files, "", false);
            files.remove("META-INF/MANIFEST.MF");

            // todo we should also filter URLs here using DeploymentsResolver.loadFromClasspath

            createApplicationFromFiles(appId, tmpClassLoader, ejbModules, clientModules, resouceModules,
                    webModules, files);
        }

        final ClassLoaderConfigurer configurer = QuickJarsTxtParser
                .parse(new File(appDir, "META-INF/" + QuickJarsTxtParser.FILE_NAME));
        final Collection<URL> jarsXmlLib = new ArrayList<>();
        if (configurer != null) {
            for (final URL url : configurer.additionalURLs()) {
                try {
                    detectAndAddModuleToApplication(appId, tmpClassLoader, ejbModules, clientModules,
                            resouceModules, webModules,
                            new ImmutablePair<>(URLs.toFile(url).getAbsolutePath(), url));
                } catch (final Exception e) {
                    jarsXmlLib.add(url);
                }
            }
        }

        //
        // Create a class loader for the application
        //

        // lib/*
        if (application.getLibraryDirectory() == null) {
            application.setLibraryDirectory("lib/");
        } else {
            final String dir = application.getLibraryDirectory();
            if (!dir.endsWith("/")) {
                application.setLibraryDirectory(dir + "/");
            }
        }

        try {
            final Map<String, URL> libs = finder.getResourcesMap(application.getLibraryDirectory());
            extraLibs.addAll(libs.values());
        } catch (final IOException e) {
            logger.warning(
                    "Cannot load libs from '" + application.getLibraryDirectory() + "' : " + e.getMessage(), e);
        }

        // APP-INF/lib/*
        try {
            final Map<String, URL> libs = finder.getResourcesMap("APP-INF/lib/");
            extraLibs.addAll(libs.values());
        } catch (final IOException e) {
            logger.warning("Cannot load libs from 'APP-INF/lib/' : " + e.getMessage(), e);
        }

        // META-INF/lib/*
        try {
            final Map<String, URL> libs = finder.getResourcesMap("META-INF/lib/");
            extraLibs.addAll(libs.values());
        } catch (final IOException e) {
            logger.warning("Cannot load libs from 'META-INF/lib/' : " + e.getMessage(), e);
        }

        // All jars nested in the Resource Adapter
        final HashMap<String, URL> rarLibs = new HashMap<String, URL>();
        for (final Map.Entry<String, URL> entry : resouceModules.entrySet()) {
            try {
                // unpack the resource adapter archive
                File rarFile = URLs.toFile(entry.getValue());
                rarFile = unpack(rarFile);
                entry.setValue(rarFile.toURI().toURL());

                scanDir(appDir, rarLibs, "");
            } catch (final MalformedURLException e) {
                throw new OpenEJBException("Malformed URL to app. " + e.getMessage(), e);
            }
        }
        for (final Iterator<Map.Entry<String, URL>> iterator = rarLibs.entrySet().iterator(); iterator
                .hasNext();) {
            // remove all non jars from the rarLibs
            final Map.Entry<String, URL> fileEntry = iterator.next();
            if (!fileEntry.getKey().endsWith(".jar")) {
                continue;
            }
            iterator.remove();
        }

        final List<URL> classPath = new ArrayList<>();
        classPath.addAll(ejbModules.values());
        classPath.addAll(clientModules.values());
        classPath.addAll(rarLibs.values());
        classPath.addAll(extraLibs);
        classPath.addAll(jarsXmlLib);
        final URL[] urls = classPath.toArray(new URL[classPath.size()]);

        SystemInstance.get().fireEvent(new BeforeDeploymentEvent(urls));

        final ClassLoader appClassLoader = ClassLoaderUtil.createTempClassLoader(appId, urls,
                getOpenEJBClassLoader());

        //
        // Create the AppModule and all nested module objects
        //

        final AppModule appModule = new AppModule(appClassLoader, appId, application, false);
        appModule.getAdditionalLibraries().addAll(extraLibs);
        appModule.getAltDDs().putAll(appDescriptors);
        appModule.getWatchedResources().add(appId);
        if (applicationXmlUrl != null) {
            appModule.getWatchedResources().add(URLs.toFilePath(applicationXmlUrl));
        }

        // EJB modules
        for (final String moduleName : ejbModules.keySet()) {
            try {
                URL ejbUrl = ejbModules.get(moduleName);
                // we should try to use a reference to the temp classloader
                if (ClassLoaderUtil.isUrlCached(appModule.getJarLocation(), ejbUrl)) {
                    try {
                        ejbUrl = ClassLoaderUtil.getUrlCachedName(appModule.getJarLocation(), ejbUrl).toURI()
                                .toURL();

                    } catch (final MalformedURLException ignore) {
                        // no-op
                    }
                }
                final File ejbFile = URLs.toFile(ejbUrl);
                final String absolutePath = ejbFile.getAbsolutePath();

                final EjbModule ejbModule = createEjbModule(ejbUrl, absolutePath, appClassLoader);
                appModule.getEjbModules().add(ejbModule);
            } catch (final OpenEJBException e) {
                logger.error("Unable to load EJBs from EAR: " + appId + ", module: " + moduleName
                        + ". Exception: " + e.getMessage(), e);
            }
        }

        // Application Client Modules
        for (final String moduleName : clientModules.keySet()) {
            try {
                URL clientUrl = clientModules.get(moduleName);
                // we should try to use a reference to the temp classloader
                if (ClassLoaderUtil.isUrlCached(appModule.getJarLocation(), clientUrl)) {
                    try {
                        clientUrl = ClassLoaderUtil.getUrlCachedName(appModule.getJarLocation(), clientUrl)
                                .toURI().toURL();

                    } catch (final MalformedURLException ignore) {
                        // no-op
                    }
                }
                final File clientFile = URLs.toFile(clientUrl);
                final String absolutePath = clientFile.getAbsolutePath();

                final ClientModule clientModule = createClientModule(clientUrl, absolutePath, appClassLoader,
                        null);

                appModule.getClientModules().add(clientModule);
            } catch (final Exception e) {
                logger.error("Unable to load App Client from EAR: " + appId + ", module: " + moduleName
                        + ". Exception: " + e.getMessage(), e);
            }
        }

        // Resource modules
        for (final String moduleName : resouceModules.keySet()) {
            try {
                URL rarUrl = resouceModules.get(moduleName);
                // we should try to use a reference to the temp classloader
                if (ClassLoaderUtil.isUrlCached(appModule.getJarLocation(), rarUrl)) {
                    try {
                        rarUrl = ClassLoaderUtil.getUrlCachedName(appModule.getJarLocation(), rarUrl).toURI()
                                .toURL();

                    } catch (final MalformedURLException ignore) {
                        // no-op
                    }
                }
                final ConnectorModule connectorModule = createConnectorModule(appId, URLs.toFilePath(rarUrl),
                        appClassLoader, moduleName);

                appModule.getConnectorModules().add(connectorModule);
            } catch (final OpenEJBException e) {
                logger.error("Unable to load RAR: " + appId + ", module: " + moduleName + ". Exception: "
                        + e.getMessage(), e);
            }
        }

        // Web modules
        for (final String moduleName : webModules.keySet()) {
            try {
                final URL warUrl = webModules.get(moduleName);
                addWebModule(appModule, warUrl, appClassLoader, webContextRoots.get(moduleName), null);
            } catch (final OpenEJBException e) {
                logger.error("Unable to load WAR: " + appId + ", module: " + moduleName + ". Exception: "
                        + e.getMessage(), e);
            }
        }

        addBeansXmls(appModule);

        // Persistence Units
        final Properties p = new Properties();
        p.put(appModule.getModuleId(), appModule.getJarLocation());
        final FileUtils base = new FileUtils(appModule.getModuleId(), appModule.getModuleId(), p);
        final List<URL> filteredUrls = new ArrayList<>();
        DeploymentsResolver.loadFromClasspath(base, filteredUrls, appModule.getClassLoader());
        addPersistenceUnits(appModule, filteredUrls.toArray(new URL[filteredUrls.size()]));

        final Object pXmls = appModule.getAltDDs().get("persistence.xml");

        for (final WebModule webModule : appModule.getWebModules()) {
            final List<URL> foundRootUrls = new ArrayList<>();
            final List<URL> scannableUrls = webModule.getScannableUrls();
            for (final URL url : scannableUrls) {
                if (!addPersistenceUnits(appModule, url).isEmpty()) {
                    foundRootUrls.add(url);
                }
            }

            if (pXmls != null && Collection.class.isInstance(pXmls)) {
                final File webapp = webModule.getFile();
                if (webapp == null) {
                    continue;
                }
                final String webappAbsolutePath = webapp.getAbsolutePath();

                final Collection<URL> list = Collection.class.cast(pXmls);
                for (final URL url : list) {
                    try {
                        final File file = URLs.toFile(url);
                        if (file.getAbsolutePath().startsWith(webappAbsolutePath)) {
                            foundRootUrls.add(url);
                        }
                    } catch (final IllegalArgumentException iae) {
                        // no-op
                    }
                }
            }

            webModule.getAltDDs().put(EAR_WEBAPP_PERSISTENCE_XML_JARS, foundRootUrls);
        }

        for (final DeploymentModule module : appModule.getDeploymentModule()) {
            module.setStandaloneModule(false);
        }

        return appModule;

    } catch (final OpenEJBException e) {
        logger.error("Unable to load EAR: " + jarPath, e);
        throw e;
    }
}