Example usage for java.util LinkedHashMap keySet

List of usage examples for java.util LinkedHashMap keySet

Introduction

In this page you can find the example usage for java.util LinkedHashMap keySet.

Prototype

public Set<K> keySet() 

Source Link

Document

Returns a Set view of the keys contained in this map.

Usage

From source file:com.atinternet.tracker.Builder.java

/**
 * Prepare the hit queryString//  w  w w .ja  v  a  2  s .c o m
 *
 * @return LinkedHashMap
 */
private LinkedHashMap<String, Object[]> prepareQuery() {
    LinkedHashMap<String, Object[]> formattedParameters = new LinkedHashMap<String, Object[]>();

    ArrayList<Param> completeBuffer = new ArrayList<Param>() {
        {
            addAll(persistentParams);
            addAll(volatileParams);
        }
    };

    ArrayList<Param> params = organizeParameters(completeBuffer);

    for (Param p : params) {
        String value = p.getValue().execute();
        String key = p.getKey();

        HashMap<String, String> plugins = PluginParam.get(tracker);
        if (plugins.containsKey(key)) {
            String pluginClass = plugins.get(key);
            Plugin plugin = null;
            try {
                plugin = (Plugin) Class.forName(pluginClass).newInstance();
                plugin.execute(tracker);
                value = plugin.getResponse();
                p.setType(Param.Type.JSON);
                key = Hit.HitParam.JSON.stringValue();
            } catch (Exception e) {
                e.printStackTrace();
                value = null;
            }
        } else if (key.equals(Hit.HitParam.UserId.stringValue())) {
            if (TechnicalContext.doNotTrackEnabled(Tracker.getAppContext())) {
                value = OPT_OUT;
            } else if (((Boolean) configuration.get(TrackerConfigurationKeys.HASH_USER_ID))) {
                value = Tool.SHA_256(value);
            }
        }

        if (p.getType() == Param.Type.Closure && Tool.parseJSON(value) != null) {
            p.setType(Param.Type.JSON);
        }

        if (value != null) {
            // Referrer processing
            if (key.equals(Hit.HitParam.Referrer.stringValue())) {

                value = value.replace("&", "$").replaceAll("[<>]", "");
            }

            if (p.getOptions() != null && p.getOptions().isEncode()) {
                value = Tool.percentEncode(value);
                p.getOptions().setSeparator(Tool.percentEncode(p.getOptions().getSeparator()));
            }
            int duplicateParamIndex = -1;
            String duplicateParamKey = null;

            Set<String> keys = formattedParameters.keySet();

            String[] keySet = keys.toArray(new String[keys.size()]);
            int length = keySet.length;
            for (int i = 0; i < length; i++) {
                if (keySet[i].equals(key)) {
                    duplicateParamIndex = i;
                    duplicateParamKey = key;
                    break;
                }
            }

            if (duplicateParamIndex != -1) {
                List<Object[]> values = new ArrayList<Object[]>(formattedParameters.values());
                Param duplicateParam = (Param) values.get(duplicateParamIndex)[0];
                String str = ((String) formattedParameters.get(duplicateParamKey)[1]).split("=")[0] + "=";
                String val = ((String) formattedParameters.get(duplicateParamKey)[1]).split("=")[1];

                if (p.getType() == Param.Type.JSON) {
                    Object json = Tool.parseJSON(Tool.percentDecode(val));
                    Object newJson = Tool.parseJSON(Tool.percentDecode(value));

                    if (json != null && json instanceof JSONObject) {
                        Map dictionary = Tool.toMap((JSONObject) json);

                        if (newJson instanceof JSONObject) {
                            Map newDictionary = Tool.toMap((JSONObject) newJson);
                            dictionary.putAll(newDictionary);

                            JSONObject jsonData = new JSONObject(dictionary);
                            formattedParameters.put(key, new Object[] { duplicateParam,
                                    makeSubQuery(key, Tool.percentEncode(jsonData.toString())) });
                        } else {
                            Tool.executeCallback(tracker.getListener(), CallbackType.warning,
                                    "Couldn't append value to a dictionary");
                        }
                    } else if (json != null && json instanceof JSONArray) {
                        try {
                            ArrayList<Object> array = new ArrayList<Object>();
                            JSONArray jArray = (JSONArray) json;
                            for (int i = 0; i < jArray.length(); i++) {
                                array.add(jArray.get(i).toString());
                            }
                            if (newJson instanceof JSONArray) {
                                jArray = (JSONArray) newJson;
                                for (int i = 0; i < jArray.length(); i++) {
                                    array.add(jArray.get(i).toString());
                                }
                                JSONObject jsonData = new JSONObject(array.toString());
                                formattedParameters.put(key, new Object[] { duplicateParam,
                                        makeSubQuery(key, Tool.percentEncode(jsonData.toString())) });
                            } else {
                                Tool.executeCallback(tracker.getListener(), CallbackType.warning,
                                        "Couldn't append value to an array");
                            }
                        } catch (JSONException e) {
                            Tool.executeCallback(tracker.getListener(), CallbackType.warning,
                                    "Couldn't append value to an array");
                        }
                    } else {
                        Tool.executeCallback(tracker.getListener(), CallbackType.warning,
                                "Couldn't append value to a JSON Object");
                    }
                } else if (duplicateParam.getType() == Param.Type.JSON) {
                    Tool.executeCallback(tracker.getListener(), CallbackType.warning,
                            "Couldn't append value to a JSON Object");
                } else {
                    formattedParameters.put(key,
                            new Object[] { duplicateParam, str + val + p.getOptions().getSeparator() + value });
                }
            } else {
                formattedParameters.put(key, new Object[] { p, makeSubQuery(key, value) });
            }
        }
    }
    return formattedParameters;
}

From source file:org.orcid.frontend.web.controllers.PublicProfileController.java

@RequestMapping(value = "/{orcid:(?:\\d{4}-){3,}\\d{3}[\\dX]}")
public ModelAndView publicPreview(HttpServletRequest request,
        @RequestParam(value = "page", defaultValue = "1") int pageNo,
        @RequestParam(value = "v", defaultValue = "0") int v,
        @RequestParam(value = "maxResults", defaultValue = "15") int maxResults,
        @PathVariable("orcid") String orcid) {

    OrcidProfile profile = orcidProfileCacheManager.retrievePublic(orcid);

    if (profile == null) {
        return new ModelAndView("error-404");
    }/* w  w  w.  ja v  a2s. c  o m*/

    ModelAndView mav = null;
    mav = new ModelAndView("public_profile_v3");
    mav.addObject("isPublicProfile", true);

    boolean isProfileEmtpy = true;

    request.getSession().removeAttribute(PUBLIC_WORKS_RESULTS_ATTRIBUTE);

    mav.addObject("profile", profile);

    String countryName = getCountryName(profile, true);
    if (!StringUtil.isBlank(countryName))
        mav.addObject("countryName", countryName);

    LinkedHashMap<Long, WorkForm> minimizedWorksMap = new LinkedHashMap<>();
    LinkedHashMap<Long, Affiliation> affiliationMap = new LinkedHashMap<>();
    LinkedHashMap<Long, Funding> fundingMap = new LinkedHashMap<>();
    LinkedHashMap<Long, PeerReview> peerReviewMap = new LinkedHashMap<>();

    if (profile != null && profile.getOrcidBio() != null && profile.getOrcidBio().getBiography() != null
            && StringUtils.isNotBlank(profile.getOrcidBio().getBiography().getContent())) {
        isProfileEmtpy = false;
    }

    if (profile.isLocked()) {
        mav.addObject("locked", true);
    } else if (profile.getOrcidDeprecated() != null) {
        String primaryRecord = profile.getOrcidDeprecated().getPrimaryRecord().getOrcidIdentifier().getPath();
        mav.addObject("deprecated", true);
        mav.addObject("primaryRecord", primaryRecord);
    } else {
        minimizedWorksMap = minimizedWorksMap(orcid);
        if (minimizedWorksMap.size() > 0) {
            mav.addObject("works", minimizedWorksMap.values());
            isProfileEmtpy = false;
        } else {
            mav.addObject("worksEmpty", true);
        }

        affiliationMap = affiliationMap(orcid);
        if (affiliationMap.size() > 0) {
            mav.addObject("affilations", affiliationMap.values());
            isProfileEmtpy = false;
        } else {
            mav.addObject("affiliationsEmpty", true);
        }

        fundingMap = fundingMap(orcid);
        if (fundingMap.size() > 0)
            isProfileEmtpy = false;
        else {
            mav.addObject("fundingEmpty", true);
        }

        peerReviewMap = peerReviewMap(orcid);
        if (peerReviewMap.size() > 0) {
            mav.addObject("peerReviews", peerReviewMap.values());
            isProfileEmtpy = false;
        } else {
            mav.addObject("peerReviewsEmpty", true);
        }

    }
    ObjectMapper mapper = new ObjectMapper();

    try {
        String worksIdsJson = mapper.writeValueAsString(minimizedWorksMap.keySet());
        String affiliationIdsJson = mapper.writeValueAsString(affiliationMap.keySet());
        String fundingIdsJson = mapper.writeValueAsString(fundingMap.keySet());
        String peerReviewIdsJson = mapper.writeValueAsString(peerReviewMap.keySet());
        mav.addObject("workIdsJson", StringEscapeUtils.escapeEcmaScript(worksIdsJson));
        mav.addObject("affiliationIdsJson", StringEscapeUtils.escapeEcmaScript(affiliationIdsJson));
        mav.addObject("fundingIdsJson", StringEscapeUtils.escapeEcmaScript(fundingIdsJson));
        mav.addObject("peerReviewIdsJson", StringEscapeUtils.escapeEcmaScript(peerReviewIdsJson));
        mav.addObject("isProfileEmpty", isProfileEmtpy);

        String creditName = "";
        if (profile.getOrcidBio() != null && profile.getOrcidBio().getPersonalDetails() != null) {
            PersonalDetails personalDetails = profile.getOrcidBio().getPersonalDetails();
            if (personalDetails.getCreditName() != null
                    && !PojoUtil.isEmpty(personalDetails.getCreditName().getContent()))
                creditName = profile.getOrcidBio().getPersonalDetails().getCreditName().getContent();
            else {
                if (personalDetails.getGivenNames() != null
                        && !PojoUtil.isEmpty(personalDetails.getGivenNames().getContent()))
                    creditName += personalDetails.getGivenNames().getContent();
                if (personalDetails.getFamilyName() != null
                        && !PojoUtil.isEmpty(personalDetails.getFamilyName().getContent()))
                    creditName += " " + personalDetails.getFamilyName().getContent();
            }
        }
        if (!PojoUtil.isEmpty(creditName)) {
            // <Published Name> (<ORCID iD>) - ORCID | Connecting Research
            // and Researchers
            mav.addObject("title", getMessage("layout.public-layout.title", creditName.trim(), orcid));
        }

    } catch (JsonGenerationException e) {
        e.printStackTrace();
    } catch (JsonMappingException e) {
        e.printStackTrace();
    } catch (IOException e) {
        e.printStackTrace();
    }

    if (!profile.isReviewed()) {
        if (isProfileValidForIndex(profile)) {
            if (profile.isLocked() || profile.getCountTokens() == 0
                    || (!CreationMethod.WEBSITE.equals(profile.getOrcidHistory().getCreationMethod())
                            && !CreationMethod.DIRECT.equals(profile.getOrcidHistory().getCreationMethod()))) {
                mav.addObject("noIndex", true);
            }
        } else {
            mav.addObject("noIndex", true);
        }
    }

    return mav;
}

From source file:com.zenkey.net.prowser.Request.java

/**************************************************************************
 * Returns a <code>java.util.Map</code> of the parameters contained
 * within the query string of the URI for this <code>Request</code>. The
 * keys of the map are parameter names; the values of the map are arrays of
 * parameter values associated with each parameter name.
 * /*  w w w.j  av a2s . c  o  m*/
 * @return A <code>java.util.Map</code> of the URI's query string
 *         parameters, or <code>null</code> if the URI has no query
 *         string.
 */
private Map<String, String[]> getQueryMap() {

    // If the URI is null, don't do anything
    if (uri == null)
        return null;

    // Declare maps for temp work and the return value
    LinkedHashMap<String, ArrayList<String>> tempQueryMap = null;
    LinkedHashMap<String, String[]> queryMap = null;

    // If the URI has a query string, build a temp map of its parameters
    String rawQueryString = uri.getRawQuery();
    if (rawQueryString != null) {

        // Parse out the name value pairs
        String[] queryPairsArray = rawQueryString.split("&");

        // Create a temp map of query parameters
        tempQueryMap = new LinkedHashMap<String, ArrayList<String>>();
        String name = null;
        String value = null;
        for (String rawQueryPairString : queryPairsArray) {

            // Split the query param name-value pair into a name and value
            String[] queryPairArray = rawQueryPairString.split("=", 2);
            name = queryPairArray[0];
            if (queryPairArray.length > 1)
                value = queryPairArray[1];
            else
                value = EMPTY_STRING;
            if (value == null)
                value = EMPTY_STRING;

            // If the query parameter doesn't exist, create it
            ArrayList<String> values = tempQueryMap.get(name);
            if (values == null) {
                values = new ArrayList<String>();
                tempQueryMap.put(name, values);
            }

            // Add the specified value to the list of values for the param
            values.add(value);
        }

        // Build the query parameter map to return
        queryMap = new LinkedHashMap<String, String[]>();
        for (String tempName : tempQueryMap.keySet()) {
            String[] tempValues = tempQueryMap.get(tempName)
                    .toArray(new String[tempQueryMap.get(tempName).size()]);
            queryMap.put(tempName, tempValues);
        }
    }

    // Return a map of the query's parameters (or null if URI has no query)
    return queryMap;
}

From source file:jp.co.iidev.subartifact1.divider1.mojo.ArtifactDividerMojo.java

public void execute() throws MojoExecutionException {

    Artifact projArt = project.getArtifact();
    Map<Dependency, Artifact> artifactsForDep = Maps.newHashMap();

    projArt = project.getArtifact();/*from  w  ww  . j a  va  2s  .co  m*/

    {
        List<Dependency> dep = project.getDependencies();
        Set<Artifact> arts = project.getDependencyArtifacts();

        for (Dependency dx : dep) {
            String grpid = dx.getGroupId();
            String artid = dx.getArtifactId();
            String clsf = dx.getClassifier();

            for (Artifact art : arts) {
                boolean a = StringUtils.equals(art.getArtifactId(), artid);
                boolean g = StringUtils.equals(art.getGroupId(), grpid);
                boolean c = StringUtils.equals(art.getClassifier(), clsf);

                if (a && g && c) {
                    artifactsForDep.put(dx, art);
                }
            }
        }
    }

    {
        String version = project.getVersion();
        String groupId = project.getGroupId();

        LinkedHashMap<File, Dependency> compiletimeClasspath = Maps.newLinkedHashMap();

        File rtjar = Paths.get(System.getProperty("java.home"), "lib", "rt.jar").toFile();
        Dependency rtjar_dummyDep = new Dependency();
        {
            rtjar_dummyDep.setGroupId(SystemUtils.JAVA_VENDOR.replace(" ", "."));
            rtjar_dummyDep.setVersion(SystemUtils.JAVA_RUNTIME_VERSION);
            rtjar_dummyDep.setArtifactId(SystemUtils.JAVA_RUNTIME_NAME);
        }

        File targetJar = project.getArtifact().getFile();
        Dependency targetJarDep = new Dependency();
        {
            targetJarDep.setArtifactId(project.getArtifactId());
            targetJarDep.setGroupId(project.getGroupId());
            targetJarDep.setVersion(project.getVersion());
            targetJarDep.setClassifier(projArt.getClassifier());
        }

        compiletimeClasspath.put(rtjar, rtjar_dummyDep);
        compiletimeClasspath.put(targetJar, targetJarDep);
        artifactsForDep.forEach((d, a) -> {
            compiletimeClasspath.put(a.getFile(), d);
        });

        LoggableFactory lf = new LoggableFactory() {
            @Override
            public Loggable createLoggable(Class cx) {
                return new Loggable() {
                    Logger l = LoggerFactory.getLogger(cx);

                    @Override
                    public void warn(String text) {
                        l.warn(text);
                    }

                    @Override
                    public void info(String text) {
                        l.info(text);
                    }

                    @Override
                    public void error(String text) {
                        l.error(text);
                    }

                    @Override
                    public void debug(String text) {
                        l.debug(text);
                    }
                };
            }
        };
        try {
            LinkedHashMap<SubArtifactDefinition, SubArtifactDeployment> buildPlan = new DivisionExecutor(
                    lf.createLoggable(DivisionExecutor.class)).planDivision(targetJar, rootSubArtifactId,
                            Arrays.asList(subartifacts == null ? new SubArtifact[0] : subartifacts),
                            compiletimeClasspath, not(in(ImmutableSet.of(rtjar, targetJar))),
                            defaultRootTransitivePropagations, defaultRootSourceReferencePropagations,
                            defaultSubartifactSourceReferencePropagations, globalReferencePropagations, lf);

            Set<File> usableJar = Sets.newLinkedHashSet(compiletimeClasspath.keySet());
            usableJar.remove(targetJar);
            usableJar.remove(rtjar);

            int ix = 0;
            for (SubArtifact s : subartifacts) {
                for (Dependency d : s.getExtraDependencies()) {
                    buildPlan.get(s).getJarDeps().put(new File("x_xx_xyx_duMmy" + (ix++) + ".jar"), d);
                }
            }

            new PomSetGenerator(project.getBasedir().toPath().resolve("pom.xml"), outputDirectory.toPath(),
                    templateOutputDirectory.toPath(), lf).generate(groupId, version,
                            this.subArtifactsParentArtifactId, buildPlan);
        } catch (RuntimeException e) {
            throw e;
        } catch (Exception e) {
            throw new MojoExecutionException("division process error", e);
        }
    }

}

From source file:com.vmware.bdd.cli.commands.ClusterCommands.java

private void prettyOutputClusterInfo(ClusterRead cluster, boolean detail) {
    Map<String, Map<String, String>> infraCfg = cluster.getInfrastructure_config();
    Map<String, String> userMgmtCfg = null;
    if (MapUtils.isNotEmpty(infraCfg)) {
        userMgmtCfg = infraCfg.get(UserMgmtConstants.LDAP_USER_MANAGEMENT);
    }//from  w ww  .jav a  2  s  .c  o  m

    TopologyType topology = cluster.getTopologyPolicy();
    printSeperator();

    // list cluster level params
    LinkedHashMap<String, String> clusterParams = new LinkedHashMap<String, String>();
    clusterParams.put("CLUSTER NAME", cluster.getName());
    clusterParams.put("AGENT VERSION", cluster.getVersion());
    clusterParams.put("APP MANAGER", cluster.getAppManager());
    clusterParams.put("DISTRO", cluster.getDistro());
    clusterParams.put("NODE TEMPLATE", cluster.getTemplateName());
    String cloneType = cluster.getClusterCloneType();
    if (!CommandsUtils.isBlank(cloneType)) {
        clusterParams.put("CLUSTER CLONE TYPE", cloneType.toUpperCase());
    }
    if (topology != null && topology != TopologyType.NONE) {
        clusterParams.put("TOPOLOGY", topology.toString());
    }
    clusterParams.put("IO SHARES", cluster.getIoShares() == null ? "" : cluster.getIoShares().toString());
    clusterParams.put("STATUS", cluster.getStatus() == null ? "" : cluster.getStatus().toString());
    if (cluster.getExternalHDFS() != null && !cluster.getExternalHDFS().isEmpty()) {
        clusterParams.put("EXTERNAL HDFS", cluster.getExternalHDFS());
    }
    //Burst out
    if (!CommandsUtils.isBlank(cluster.getExternalMapReduce())) {
        clusterParams.put("EXTERNAL MAPREDUCE", cluster.getExternalMapReduce());
    }

    clusterParams.put("AD/LDAP ENABLED", Boolean.toString(MapUtils.isNotEmpty(userMgmtCfg)));

    for (String key : clusterParams.keySet()) {
        System.out.printf(Constants.OUTPUT_INDENT + "%-26s:" + Constants.OUTPUT_INDENT + "%s\n", key,
                clusterParams.get(key));
    }
    System.out.println();

    LinkedHashMap<String, List<String>> ngColumnNamesWithGetMethodNames = new LinkedHashMap<String, List<String>>();
    List<NodeGroupRead> nodegroups = cluster.getNodeGroups();
    if (nodegroups != null) {
        ngColumnNamesWithGetMethodNames.put(Constants.FORMAT_TABLE_COLUMN_GROUP_NAME, Arrays.asList("getName"));
        ngColumnNamesWithGetMethodNames.put(Constants.FORMAT_TABLE_COLUMN_ROLES, Arrays.asList("getRoles"));
        ngColumnNamesWithGetMethodNames.put(Constants.FORMAT_TABLE_COLUMN_INSTANCE,
                Arrays.asList("getInstanceNum"));
        ngColumnNamesWithGetMethodNames.put(Constants.FORMAT_TABLE_COLUMN_CPU, Arrays.asList("getCpuNum"));
        ngColumnNamesWithGetMethodNames.put(Constants.FORMAT_TABLE_COLUMN_MEM,
                Arrays.asList("getMemCapacityMB"));
        ngColumnNamesWithGetMethodNames.put(Constants.FORMAT_TABLE_COLUMN_TYPE,
                Arrays.asList("getStorage", "getType"));
        ngColumnNamesWithGetMethodNames.put(Constants.FORMAT_TABLE_COLUMN_SIZE,
                Arrays.asList("getStorage", "getSizeGB"));

        try {
            if (detail) {
                prettyOutputDetailNodegroups(topology, ngColumnNamesWithGetMethodNames, nodegroups);

            } else
                CommandsUtils.printInTableFormat(ngColumnNamesWithGetMethodNames, nodegroups.toArray(),
                        Constants.OUTPUT_INDENT);
        } catch (Exception e) {
            CommandsUtils.printCmdFailure(Constants.OUTPUT_OBJECT_CLUSTER, Constants.OUTPUT_OP_LIST,
                    Constants.OUTPUT_OP_RESULT_FAIL, e.getMessage());
        }

        if (detail) {
            prettyOutputDetailedUserMgmt(cluster.getName(), userMgmtCfg);
        }
    }
}

From source file:org.openmrs.module.chica.DynamicFormAccess.java

/**
 * Consume the information populated on a form.
 * /*from  w w  w.  j  a  va  2  s.  co  m*/
 * @param formInstance FormInstance object containing the relevant form information.
 * @param patient The patient the form belongs to.
 * @param locationTagId The location tag identifier.
 * @param encounterId The associated encounter identifier.
 * @param fieldMap Map of field name to Field object.
 * @param formFieldToValue Map of FormField to field value.
 * @param parameterHandler The parameter handler used for rule execution.
 * @param form The form containing the data to consume.
 */
private void consume(FormInstance formInstance, Patient patient, Integer locationTagId, Integer encounterId,
        HashMap<String, Field> fieldMap, LinkedHashMap<FormField, String> formFieldToValue,
        ParameterHandler parameterHandler, Form form) {
    ATDService atdService = Context.getService(ATDService.class);
    Integer locationId = formInstance.getLocationId();
    PatientState patientState = org.openmrs.module.atd.util.Util
            .getProducePatientStateByFormInstanceAction(formInstance);
    Integer sessionId = patientState.getSessionId();
    FieldType prioritizedMergeType = getFieldType("Prioritized Merge Field");

    String mode = "CONSUME";
    LinkedHashMap<String, LinkedHashMap<String, Rule>> rulesToRunByField = new LinkedHashMap<String, LinkedHashMap<String, Rule>>();
    LogicService logicService = Context.getLogicService();
    FormDatasource formDatasource = (FormDatasource) logicService.getLogicDataSource("form");
    try {
        formInstance = formDatasource.setFormFields(fieldMap, formInstance, locationTagId);
    } catch (Exception e) {
        this.log.error("Error setting form fields to be consumed");
        this.log.error(e.getMessage());
        this.log.error(Util.getStackTrace(e));
        return;
    }

    if (formInstance == null) {
        log.error("Form instance came back null");
        return;
    }

    Encounter encounter = Context.getEncounterService().getEncounter(encounterId);
    locationId = encounter.getLocation().getLocationId();
    Location location = Context.getLocationService().getLocation(locationId);
    String locationName = null;
    if (location != null) {
        locationName = location.getName();
    }

    List<Field> fieldsToAdd = new ArrayList<Field>();
    Map<Integer, PatientATD> fieldIdToPatientAtdMap = new HashMap<Integer, PatientATD>();
    for (FormField currField : formFieldToValue.keySet()) {
        org.openmrs.Field field = currField.getField();
        String fieldName = field.getName();
        Concept currConcept = field.getConcept();
        String ruleName = field.getDefaultValue();
        LinkedHashMap<String, Rule> rulesToRun = null;
        Map<String, Object> parameters = new HashMap<String, Object>();

        FormField parentField = currField.getParent();

        //if parent field is not null look at parent
        //field for rule to execute
        Rule rule = null;
        if (parentField != null) {
            FieldType currFieldType = field.getFieldType();

            if (currFieldType.equals(prioritizedMergeType)) {
                ruleName = null;//no rule to execute unless patientATD finds one   
            }

            Integer fieldId = parentField.getField().getFieldId();
            PatientATD patientATD = fieldIdToPatientAtdMap.get(fieldId);
            if (patientATD == null) {
                patientATD = atdService.getPatientATD(formInstance, fieldId);
            }

            if (patientATD != null) {
                rule = patientATD.getRule();
                ruleName = rule.getTokenName();
                fieldIdToPatientAtdMap.put(fieldId, patientATD);
            }
        }

        String lookupFieldName = null;
        Integer formFieldId = null; // DWE CHICA-437 Get the form field id here so that it can be used to determine if obs records should be voided when rules are evaluated
        if (parentField != null) {
            lookupFieldName = parentField.getField().getName();
            formFieldId = parentField.getFormFieldId();
        } else {
            lookupFieldName = fieldName;
            formFieldId = currField.getFormFieldId();
        }

        if (ruleName != null) {
            rulesToRun = rulesToRunByField.get(lookupFieldName);
            if (rulesToRun == null) {
                rulesToRun = new LinkedHashMap<String, Rule>();
                rulesToRunByField.put(lookupFieldName, rulesToRun);
            }

            Rule ruleLookup = rulesToRun.get(ruleName);
            if (ruleLookup == null) {
                if (rule != null) {
                    ruleLookup = rule;
                } else {
                    ruleLookup = new Rule();
                    ruleLookup.setTokenName(ruleName);
                }
                ruleLookup.setParameters(parameters);
                rulesToRun.put(ruleName, ruleLookup);
            } else {
                parameters = ruleLookup.getParameters();
            }
        }

        //------------start set rule parameters
        parameters.put("sessionId", sessionId);
        parameters.put("formInstance", formInstance);
        parameters.put("locationTagId", locationTagId);
        parameters.put("locationId", locationId);
        parameters.put("location", locationName);
        parameters.put("mode", mode);
        parameters.put("encounterId", encounterId);
        if (rule != null) {
            parameters.put("ruleId", rule.getRuleId());
        }

        if (currConcept != null) {
            try {
                String elementString = ((ConceptName) currConcept.getNames().toArray()[0]).getName();
                parameters.put("concept", elementString);
            } catch (Exception e) {
                parameters.put("concept", null);
            }
        } else {
            parameters.put("concept", null);
        }

        if (fieldName != null) {
            parameters.put("fieldName", lookupFieldName);
            String value = formFieldToValue.get(currField);
            parameters.put(lookupFieldName, value);
            Field saveField = new Field();
            saveField.setId(fieldName);
            saveField.setValue(value);
            fieldsToAdd.add(saveField);
        }

        // DWE CHICA-437 
        if (formFieldId != null) {
            parameters.put("formFieldId", formFieldId);
        }

        //----------end set rule parameters
    }

    HashMap<String, Integer> childIndex = new HashMap<String, Integer>();

    for (FormField currField : formFieldToValue.keySet()) {
        LinkedHashMap<String, Rule> rulesToRun = null;
        Map<String, Object> parameters = new HashMap<String, Object>();
        FormField parentField = currField.getParent();

        //look for parentField
        if (parentField != null) {
            FieldType parentFieldType = parentField.getField().getFieldType();

            String parentRuleName = parentField.getField().getDefaultValue();
            String parentFieldName = parentField.getField().getName();

            if (parentFieldType.equals(prioritizedMergeType)) {
                parentRuleName = null;//no rule to execute unless patientATD finds one   
            }

            Integer fieldId = parentField.getField().getFieldId();
            PatientATD patientATD = fieldIdToPatientAtdMap.get(fieldId);
            if (patientATD == null) {
                patientATD = atdService.getPatientATD(formInstance, fieldId);
            }

            if (patientATD != null) {
                Rule rule = patientATD.getRule();
                parentRuleName = rule.getTokenName();
                fieldIdToPatientAtdMap.put(fieldId, patientATD);
            }
            //if there is a parent rule, add a parameter for the child's fieldname
            //add the parent rule if it is not in rules to run
            if (parentRuleName != null) {
                rulesToRun = rulesToRunByField.get(parentFieldName);
                if (rulesToRun == null) {
                    rulesToRun = new LinkedHashMap<String, Rule>();
                    rulesToRunByField.put(parentFieldName, rulesToRun);
                }

                Rule ruleLookup = rulesToRun.get(parentRuleName);

                if (ruleLookup == null) {
                    ruleLookup = new Rule();
                    ruleLookup.setParameters(parameters);
                    ruleLookup.setTokenName(parentRuleName);
                    rulesToRun.put(parentRuleName, ruleLookup);
                } else {
                    parameters = ruleLookup.getParameters();
                }

                String childFieldName = currField.getField().getName();
                Integer index = childIndex.get(parentFieldName);
                if (index == null) {
                    index = 0;
                }
                parameters.put("child" + index, childFieldName);
                parameters.put(childFieldName, formFieldToValue.get(currField));
                childIndex.put(parentFieldName, ++index);
            }
        }
    }

    //run all the consume rules
    Integer formInstanceId = formInstance.getFormInstanceId();
    String formName = form.getName();
    String formType = org.openmrs.module.chirdlutil.util.Util.getFormType(form.getFormId(), locationTagId,
            locationId); // CHICA-1234 Look up the formType
    for (LinkedHashMap<String, Rule> rulesToRun : rulesToRunByField.values()) {
        for (String currRuleName : rulesToRun.keySet()) {
            Rule rule = rulesToRun.get(currRuleName);
            Map<String, Object> parameters = rule.getParameters();
            parameterHandler.addParameters(parameters, fieldMap, formType); // CHICA-1234 Added formType parameter
            atdService.evaluateRule(currRuleName, patient, parameters);
            setScannedTimestamps(formInstanceId, rule.getRuleId(), formName, locationId);
        }
    }

    // DWE CHICA-430 Now that rules have run and obs records have been added/updated/voided
    // create the list of fields to remove from the xml
    List<String> elementsToRemoveList = createElementsToRemoveList(form, formInstanceId, encounter,
            locationTagId, locationId);

    fieldIdToPatientAtdMap.clear();
    serializeFields(formInstance, locationTagId, fieldsToAdd, elementsToRemoveList); // DWE CHICA-430 Add elementsToRemoveList
}

From source file:jp.or.openid.eiwg.scim.operation.Operation.java

/**
 * /*from  ww  w.j a  va2s. c  om*/
 *
 * @param context
 * @param request
 * @param targetId
 * @param attributes
 * @param filter
 * @param sortBy
 * @param sortOrder
 * @param startIndex
 * @param count
 */
public ArrayList<LinkedHashMap<String, Object>> searchUserInfo(ServletContext context,
        HttpServletRequest request, String targetId, String attributes, String filter, String sortBy,
        String sortOrder, String startIndex, String count) {
    ArrayList<LinkedHashMap<String, Object>> result = null;

    Set<String> returnAttributeNameSet = new HashSet<>();

    // ?
    setError(0, null, null);

    // ??
    if (attributes != null && !attributes.isEmpty()) {
        // 
        String[] tempList = attributes.split(",");
        for (int i = 0; i < tempList.length; i++) {
            String attributeName = tempList[i].trim();
            // ???????
            LinkedHashMap<String, Object> attributeSchema = SCIMUtil.getUserAttributeInfo(context,
                    attributeName, true);
            if (attributeSchema != null && !attributeSchema.isEmpty()) {
                returnAttributeNameSet.add(attributeName);
            } else {
                // ???????
                String message = String.format(MessageConstants.ERROR_INVALID_ATTRIBUTES, attributeName);
                setError(HttpServletResponse.SC_BAD_REQUEST, null, message);
                return result;
            }
        }
    }

    // ????????
    // (sortBy)?
    // (sortOrder)?
    // ?(startIndex)?
    // 1??(count)?
    //
    // ()

    // 
    result = new ArrayList<LinkedHashMap<String, Object>>();

    // ?
    @SuppressWarnings("unchecked")
    ArrayList<LinkedHashMap<String, Object>> users = (ArrayList<LinkedHashMap<String, Object>>) context
            .getAttribute("Users");
    if (users != null && !users.isEmpty()) {
        Iterator<LinkedHashMap<String, Object>> usersIt = users.iterator();
        while (usersIt.hasNext()) {
            boolean isMatched = false;
            LinkedHashMap<String, Object> userInfo = usersIt.next();

            // id???????
            if (targetId != null && !targetId.isEmpty()) {
                Object id = SCIMUtil.getAttribute(userInfo, "id");
                if (id != null && id instanceof String) {
                    // id????
                    if (targetId.equals(id.toString())) {
                        if (filter != null && !filter.isEmpty()) {
                            // ????
                            boolean matched = false;
                            try {
                                matched = SCIMUtil.checkUserSimpleFilter(context, userInfo, filter);
                            } catch (SCIMUtilException e) {
                                result = null;
                                setError(e.getCode(), e.getType(), e.getMessage());
                                break;
                            }

                            if (matched) {
                                isMatched = true;
                            }
                        } else {
                            isMatched = true;
                        }
                    }
                }
            } else {
                if (filter != null && !filter.isEmpty()) {
                    // ????
                    boolean matched = false;
                    try {
                        matched = SCIMUtil.checkUserSimpleFilter(context, userInfo, filter);
                    } catch (SCIMUtilException e) {
                        result = null;
                        setError(e.getCode(), e.getType(), e.getMessage());
                        break;
                    }

                    if (matched) {
                        isMatched = true;
                    }
                } else {
                    isMatched = true;
                }
            }

            if (isMatched) {
                // ??
                LinkedHashMap<String, Object> resultInfo = new LinkedHashMap<String, Object>();
                Iterator<String> attributeIt = userInfo.keySet().iterator();
                while (attributeIt.hasNext()) {
                    // ???
                    String attributeName = attributeIt.next();

                    // ?
                    LinkedHashMap<String, Object> attributeSchema = SCIMUtil.getUserAttributeInfo(context,
                            attributeName, true);
                    Object returned = attributeSchema.get("returned");

                    if (returned != null && returned.toString().equalsIgnoreCase("never")) {
                        continue;
                    }

                    // ?
                    Object attributeValue = userInfo.get(attributeName);

                    resultInfo.put(attributeName, attributeValue);
                }

                result.add(resultInfo);
            }
        }
    }

    return result;
}

From source file:com.foo.manager.commonManager.thread.HttpHandleThread.java

private String generalRequestXml4TJ(String id, ResourceBundle bundle) {
    int idInt = Integer.valueOf(id);
    // step 1 ?xml
    LinkedHashMap InventoryHead = new LinkedHashMap();
    //?//from  w ww  . j  a  v  a 2 s  .  co m
    List<LinkedHashMap> headList = snCommonManagerMapper.selectInventoryHead(idInt);
    if (headList != null) {
        LinkedHashMap item = headList.get(0);
        // ?
        if (item != null) {
            for (Object key : item.keySet()) {
                if (bundle.containsKey("TJ_HEAD_" + key.toString())) {
                    InventoryHead.put(bundle.getObject("TJ_HEAD_" + key.toString()), item.get(key));
                } else {
                    InventoryHead.put(key.toString(), item.get(key));
                }
            }
        }
    }

    List<LinkedHashMap> InventoryList = new ArrayList<LinkedHashMap>();
    //?
    List<LinkedHashMap> ItemList = snCommonManagerMapper.selectInventoryList(idInt);
    if (ItemList != null) {
        for (LinkedHashMap item : ItemList) {
            //book??
            List<LinkedHashMap> bookInfoList = snCommonManagerMapper.selectInventoryListRelateBookInfo(
                    item.get("ORDER_NO").toString(), item.get("ITEM_NO").toString());

            for (LinkedHashMap bookInfo : bookInfoList) {
                //book?
                item.put("RECORD_NO", bookInfo.get("RECORD_NO"));
                item.put("GOODS_SERIALNO", bookInfo.get("GOODS_SERIALNO"));
                item.put("DECL_NO", bookInfo.get("DECL_NO"));

                LinkedHashMap Inventory = new LinkedHashMap();
                //t_new_import_sku.unit2<qty2>????t_new_import_inventory_detail.qty2
                //t_new_import_sku.unit2?<qty2>?t_new_import_inventory_detail.qty1
                if (item.get("UNIT2") == null || item.get("UNIT2").toString().isEmpty()) {
                    //??
                } else {
                    item.put("QTY2", item.get("QTY1"));
                }
                // ?
                if (item != null) {
                    for (Object key : item.keySet()) {
                        if (bundle.containsKey("TJ_LIST_" + key.toString())) {
                            Inventory.put(bundle.getObject("TJ_LIST_" + key.toString()), item.get(key));
                        } else {
                            Inventory.put(key.toString(), item.get(key));
                        }
                    }
                }

                InventoryList.add(Inventory);
            }

        }
    }

    LinkedHashMap IODeclContainerList = new LinkedHashMap();
    List<LinkedHashMap> IODeclContainerListTemp = snCommonManagerMapper.selectIODeclContainerList(idInt);
    if (IODeclContainerListTemp != null) {
        LinkedHashMap item = IODeclContainerListTemp.get(0);
        // ?
        if (item != null) {
            for (Object key : item.keySet()) {
                if (bundle.containsKey("TJ_IO_" + key.toString())) {
                    IODeclContainerList.put(bundle.getObject("TJ_IO_" + key.toString()), item.get(key));
                } else {
                    IODeclContainerList.put(key.toString(), item.get(key));
                }
            }
        }

    }
    LinkedHashMap IODeclOrderRelationList = new LinkedHashMap();
    List<LinkedHashMap> IODeclOrderRelationListTemp = snCommonManagerMapper
            .selectIODeclOrderRelationList(idInt);
    if (IODeclOrderRelationListTemp != null) {
        LinkedHashMap item = IODeclOrderRelationListTemp.get(0);
        // ?
        if (item != null) {
            for (Object key : item.keySet()) {
                if (bundle.containsKey("TJ_IO_" + key.toString())) {
                    IODeclOrderRelationList.put(bundle.getObject("TJ_IO_" + key.toString()), item.get(key));
                } else {
                    IODeclOrderRelationList.put(key.toString(), item.get(key));
                }
            }
        }
    }

    LinkedHashMap BaseTransfer = snCommonManagerMapper.selectBaseTransfer();

    String resultXmlString = XmlUtil.generalRequestXml4TJ_621(InventoryHead, InventoryList, IODeclContainerList,
            IODeclOrderRelationList, BaseTransfer, bundle);

    return resultXmlString;
}

From source file:com.streamsets.pipeline.stage.processor.hive.HiveMetadataProcessor.java

@Override
protected void process(Record record, BatchMaker batchMaker) throws StageException {
    ELVars variables = getContext().createELVars();
    RecordEL.setRecordInContext(variables, record);
    TimeEL.setCalendarInContext(variables, Calendar.getInstance());
    TimeNowEL.setTimeNowInContext(variables, new Date());

    // Calculate record time for this particular record and persist it in the variables
    Date timeBasis = elEvals.timeDriverElEval.eval(variables, timeDriver, Date.class);
    Calendar calendar = Calendar.getInstance(timeZone);
    calendar.setTime(timeBasis);//ww  w  .j a v a  2 s . co m
    TimeEL.setCalendarInContext(variables, calendar);

    String dbName = HiveMetastoreUtil.resolveEL(elEvals.dbNameELEval, variables, databaseEL);
    String tableName = HiveMetastoreUtil.resolveEL(elEvals.tableNameELEval, variables, tableEL);
    String targetPath;
    String avroSchema;
    String partitionStr = "";
    LinkedHashMap<String, String> partitionValMap;

    if (dbName.isEmpty()) {
        dbName = DEFAULT_DB;
    }
    try {
        // Validate Database and Table names
        if (!HiveMetastoreUtil.validateObjectName(dbName)) {
            throw new HiveStageCheckedException(Errors.HIVE_METADATA_03, "database name", dbName);
        }
        if (!HiveMetastoreUtil.validateObjectName(tableName)) {
            throw new HiveStageCheckedException(Errors.HIVE_METADATA_03, "table name", tableName);
        }

        partitionValMap = getPartitionValuesFromRecord(variables);

        if (partitioned) {
            partitionStr = externalTable
                    ? HiveMetastoreUtil.resolveEL(elEvals.partitionPathTemplateELEval, variables,
                            partitionPathTemplate)
                    : HiveMetastoreUtil.generatePartitionPath(partitionValMap);
            if (!partitionStr.startsWith("/"))
                partitionStr = "/" + partitionStr;
        }
        // First, find out if this record has all necessary data to process
        validateNames(dbName, tableName);
        String qualifiedName = HiveMetastoreUtil.getQualifiedTableName(dbName, tableName);
        LOG.trace("Generated table {} for record {}", qualifiedName, record.getHeader().getSourceId());

        if (externalTable) {
            // External table have location in the resolved EL
            targetPath = HiveMetastoreUtil.resolveEL(elEvals.tablePathTemplateELEval, variables,
                    tablePathTemplate);
        } else {
            // Internal table will be the database location + table name
            String databaseLocation;
            try {
                databaseLocation = databaseCache.get(dbName);
            } catch (ExecutionException e) {
                throw new HiveStageCheckedException(com.streamsets.pipeline.stage.lib.hive.Errors.HIVE_23,
                        e.getMessage());
            }
            targetPath = String.format("%s/%s", databaseLocation, tableName);
        }

        if (targetPath.isEmpty()) {
            throw new HiveStageCheckedException(Errors.HIVE_METADATA_02, targetPath);
        }

        // Obtain the record structure from current record
        LinkedHashMap<String, HiveTypeInfo> recordStructure = HiveMetastoreUtil.convertRecordToHMSType(record,
                elEvals.scaleEL, elEvals.precisionEL, elEvals.commentEL, decimalDefaultsConfig.scaleExpression,
                decimalDefaultsConfig.precisionExpression, commentExpression, variables);

        if (recordStructure.isEmpty()) { // If record has no data to process, No-op
            return;
        }

        TBLPropertiesInfoCacheSupport.TBLPropertiesInfo tblPropertiesInfo = HiveMetastoreUtil
                .getCacheInfo(cache, HMSCacheType.TBLPROPERTIES_INFO, qualifiedName, queryExecutor);

        if (tblPropertiesInfo != null) {
            HiveMetastoreUtil.validateTblPropertiesInfo(dataFormat, tblPropertiesInfo, tableName);

            if (tblPropertiesInfo.isExternal() != externalTable) {
                throw new HiveStageCheckedException(com.streamsets.pipeline.stage.lib.hive.Errors.HIVE_23,
                        "EXTERNAL", externalTable, tblPropertiesInfo.isExternal());
            }
        }

        TypeInfoCacheSupport.TypeInfo tableCache = HiveMetastoreUtil.getCacheInfo(cache, HMSCacheType.TYPE_INFO,
                qualifiedName, queryExecutor);

        if (tableCache != null) {
            //Checks number and name of partitions.
            HiveMetastoreUtil.validatePartitionInformation(tableCache, partitionValMap, qualifiedName);
            //Checks the type of partitions.
            Map<String, HiveTypeInfo> cachedPartitionTypeInfoMap = tableCache.getPartitionTypeInfo();
            for (Map.Entry<String, HiveTypeInfo> cachedPartitionTypeInfo : cachedPartitionTypeInfoMap
                    .entrySet()) {
                String partitionName = cachedPartitionTypeInfo.getKey();
                HiveTypeInfo expectedTypeInfo = cachedPartitionTypeInfo.getValue();
                HiveTypeInfo actualTypeInfo = partitionTypeInfo.get(partitionName);
                if (!expectedTypeInfo.equals(actualTypeInfo)) {
                    throw new HiveStageCheckedException(com.streamsets.pipeline.stage.lib.hive.Errors.HIVE_28,
                            partitionName, qualifiedName, expectedTypeInfo.toString(),
                            actualTypeInfo.toString());
                }
            }
            // Validate that the columns from record itself does not clash with partition columns
            for (String columnName : recordStructure.keySet()) {
                if (cachedPartitionTypeInfoMap.containsKey(columnName)) {
                    throw new HiveStageCheckedException(com.streamsets.pipeline.stage.lib.hive.Errors.HIVE_40,
                            columnName);
                }
            }
        }

        AvroSchemaInfoCacheSupport.AvroSchemaInfo schemaCache = HiveMetastoreUtil.getCacheInfo(cache,
                HMSCacheType.AVRO_SCHEMA_INFO, qualifiedName, queryExecutor);

        // True if there was a schema drift (including detection of new table)
        boolean schemaDrift = false;

        // Build final structure of how the table should look like
        LinkedHashMap<String, HiveTypeInfo> finalStructure;
        if (tableCache != null) {
            // Table already exists in Hive - so it's columns will be preserved and in their original order
            finalStructure = new LinkedHashMap<>();
            finalStructure.putAll(tableCache.getColumnTypeInfo());

            // If there is any diff (any new columns), we will append them at the end of the table
            LinkedHashMap<String, HiveTypeInfo> columnDiff = tableCache.getDiff(recordStructure);
            if (!columnDiff.isEmpty()) {
                LOG.trace("Detected drift for table {} - new columns: {}", qualifiedName,
                        StringUtils.join(columnDiff.keySet(), ","));
                schemaDrift = true;
                finalStructure.putAll(columnDiff);
            }
        } else {
            LOG.trace("{} is a new table", qualifiedName);
            // This table doesn't exists yet, so we'll use record own structure as the final table's structure
            schemaDrift = true;
            finalStructure = recordStructure;
        }

        // Generate schema only if the table do not exist or it's schema is changed.
        if (schemaDrift) {
            avroSchema = HiveMetastoreUtil.generateAvroSchema(finalStructure, qualifiedName);
            LOG.trace("Schema Drift. Generated new Avro schema for table {}: {}", qualifiedName, avroSchema);

            // Add custom metadata attributes if they are specified
            Map<String, String> metadataHeaderAttributeMap = new LinkedHashMap();
            if (metadataHeadersToAddExist) {
                metadataHeaderAttributeMap = generateResolvedHeaderAttributeMap(metadataHeaderAttributeConfigs,
                        variables);
            }

            handleSchemaChange(dbName, tableName, recordStructure, targetPath, avroSchema, batchMaker,
                    qualifiedName, tableCache, schemaCache, metadataHeaderAttributeMap);
        } else {
            if (schemaCache == null) { // Table exists in Hive, but this is cold start so the cache is null
                avroSchema = HiveMetastoreUtil.generateAvroSchema(finalStructure, qualifiedName);
                LOG.trace("Cold Start. Generated new Avro schema for table {}: {}", qualifiedName, avroSchema);
                updateAvroCache(schemaCache, avroSchema, qualifiedName);
            } else // No schema change, table already exists in Hive, and we have avro schema in cache.
                avroSchema = schemaCache.getSchema();
        }

        if (partitioned) {
            PartitionInfoCacheSupport.PartitionInfo pCache = HiveMetastoreUtil.getCacheInfo(cache,
                    HMSCacheType.PARTITION_VALUE_INFO, qualifiedName, queryExecutor);

            PartitionInfoCacheSupport.PartitionValues partitionValues = new PartitionInfoCacheSupport.PartitionValues(
                    partitionValMap);

            // If the partition information exist (thus this is not a cold start)
            if (pCache != null) {
                // If we detected drift, we need to persist that information and "roll" all partitions next time
                // we will see them.
                if (schemaDrift) {
                    pCache.setAllPartitionsToBeRolled();
                }

                // If we performed drift for the table and this is the firs time we see this partition, we need to
                // set the roll flag anyway.
                if (pCache.shouldRoll(partitionValues)) {
                    schemaDrift = true;
                }
            }

            // Append partition path to target path as all paths from now should be with the partition info
            targetPath += partitionStr;

            Map<PartitionInfoCacheSupport.PartitionValues, String> diff = detectNewPartition(partitionValues,
                    pCache, targetPath);

            // Send new partition metadata if new partition is detected.
            if (diff != null) {
                // Add custom metadata attributes if they are specified
                Map<String, String> partitionMetadataHeaderAttributeMap = new LinkedHashMap<>();
                if (metadataHeadersToAddExist) {
                    partitionMetadataHeaderAttributeMap = generateResolvedHeaderAttributeMap(
                            metadataHeaderAttributeConfigs, variables);
                }
                handleNewPartition(partitionValMap, pCache, dbName, tableName, targetPath, batchMaker,
                        qualifiedName, diff, partitionMetadataHeaderAttributeMap);
            }
        }

        // Send record to HDFS target.
        if (dataFormat == HMPDataFormat.PARQUET) {
            targetPath = targetPath + TEMP_AVRO_DIR_NAME;
        }

        changeRecordFieldToLowerCase(record);
        updateRecordForHDFS(record, schemaDrift, avroSchema, targetPath);
        batchMaker.addRecord(record, hdfsLane);
    } catch (HiveStageCheckedException error) {
        LOG.error("Error happened when processing record", error);
        LOG.trace("Record that caused the error: {}", record.toString());
        errorRecordHandler.onError(new OnRecordErrorException(record, error.getErrorCode(), error.getParams()));
    }
}

From source file:net.jradius.client.gui.JRadiusSimulator.java

private void createAttributeTreeNodes(DefaultMutableTreeNode top) {
    DefaultMutableTreeNode standardTree = new DefaultMutableTreeNode("Standard Attributes");
    DefaultMutableTreeNode vsaTree = new DefaultMutableTreeNode("Vendor Specific Attributes");
    addAttributesToTable(standardTree, AttributeFactory.getAttributeNameMap(), true);
    top.add(standardTree);/*  w ww .j  a v  a 2s  .com*/

    Map<Long, VendorValue> vendors = AttributeFactory.getVendorValueMap();
    LinkedHashMap<String, Map<String, Class<?>>> dictList = new LinkedHashMap<String, Map<String, Class<?>>>();
    for (Iterator<VendorValue> i = vendors.values().iterator(); i.hasNext();) {
        VendorValue vendor = i.next();
        try {
            VSADictionary dict = (VSADictionary) vendor.getDictClass().newInstance();
            String vendorName = dict.getVendorName();
            Map<String, Class<?>> map = vendor.getAttributeNameMap();
            System.out.println("Loading vendor " + vendorName + " with " + map.size() + " attributes.");
            dictList.put(vendorName, map);
        } catch (Exception e) {
            e.printStackTrace();
        }
    }
    LinkedList<String> list = new LinkedList<String>(dictList.keySet());
    Collections.sort(list);
    for (Iterator<String> i = list.iterator(); i.hasNext();) {
        String vendorName = i.next();
        DefaultMutableTreeNode vsaNode = new DefaultMutableTreeNode(vendorName);
        addAttributesToTable(vsaNode, dictList.get(vendorName), false);
        vsaTree.add(vsaNode);
    }
    top.add(vsaTree);
}