List of usage examples for java.util LinkedHashMap values
public Collection<V> values()
From source file:org.jumpmind.db.platform.AbstractDdlBuilder.java
public void writeCopyDataStatement(Table sourceTable, Table targetTable, LinkedHashMap<Column, Column> columnMap, StringBuilder ddl) { ddl.append("INSERT INTO "); ddl.append(getFullyQualifiedTableNameShorten(targetTable)); ddl.append(" ("); for (Iterator<Column> columnIt = columnMap.values().iterator(); columnIt.hasNext();) { printIdentifier(getColumnName((Column) columnIt.next()), ddl); if (columnIt.hasNext()) { ddl.append(","); }//from w w w. ja va 2s . c om } ddl.append(") SELECT "); for (Iterator<Map.Entry<Column, Column>> columnsIt = columnMap.entrySet().iterator(); columnsIt .hasNext();) { Map.Entry<Column, Column> entry = columnsIt.next(); writeCastExpression((Column) entry.getKey(), (Column) entry.getValue(), ddl); if (columnsIt.hasNext()) { ddl.append(","); } } ddl.append(" FROM "); ddl.append(getFullyQualifiedTableNameShorten(sourceTable)); printEndOfStatement(ddl); }
From source file:ca.on.oicr.pde.workflows.GATKGenotypeGVCFsWorkflow.java
@Override public void buildWorkflow() { final String binDir = this.getWorkflowBaseDir() + "/bin/"; final Boolean manualOutput = BooleanUtils.toBoolean(getProperty("manual_output"), "true", "false"); final String queue = getOptionalProperty("queue", ""); final String java = getProperty("java"); final String gatk = getOptionalProperty("gatk_jar", binDir); final String gatkKey = getProperty("gatk_key"); final String identifier = getProperty("identifier"); final String refFasta = getProperty("ref_fasta"); final Double standCallConf = Double.valueOf(getProperty("stand_call_conf")); final Double standEmitConf = Double.valueOf(getProperty("stand_emit_conf")); final String dbsnpVcf = getOptionalProperty("gatk_dbsnp_vcf", null); final Integer gatkGenotypeGvcfsXmx = Integer.parseInt(getProperty("gatk_genotype_gvcfs_xmx")); final String gatkGenotypeGvcfsParams = getOptionalProperty("gatk_genotype_gvcfs_params", null); final Integer gatkCombineGVCFsXmx = Integer.parseInt(getProperty("gatk_combine_gvcfs_xmx")); final Integer gatkOverhead = Integer.parseInt(getProperty("gatk_sched_overhead_mem")); final Integer maxGenotypeGVCFsInputFiles = Integer .parseInt(getProperty("gatk_genotype_gvcfs_max_input_files")); final Integer maxCombineGVCFsInputFiles = Integer .parseInt(getProperty("gatk_combine_gvcfs_max_input_files")); final List<String> chrSizesList = Arrays.asList(StringUtils.split(getProperty("chr_sizes"), ",")); final Set<String> chrSizes = new LinkedHashSet<>(chrSizesList); if (chrSizes.size() != chrSizesList.size()) { throw new RuntimeException("Duplicate chr_sizes detected."); }//from w w w. jav a2 s .co m // one chrSize record is required, null will result in no parallelization if (chrSizes.isEmpty()) { chrSizes.add(null); } List<Pair<String, Job>> combineGvcfs = batchGVCFs(inputFiles, maxGenotypeGVCFsInputFiles, maxCombineGVCFsInputFiles, java, gatkCombineGVCFsXmx, gatkOverhead, tmpDir, gatk, gatkKey, tmpGVCFsDir, refFasta, queue); //use linked hashmap to keep "pairs" in sort order determined by chr_sizes LinkedHashMap<String, Pair<GenotypeGVCFs, Job>> vcfs = new LinkedHashMap<>(); for (String chrSize : chrSizes) { //GATK Genotype VCFs( https://www.broadinstitute.org/gatk/gatkdocs/org_broadinstitute_gatk_tools_walkers_variantutils_GenotypeGVCFs.php ) GenotypeGVCFs.Builder genotypeGvcfsBuilder = new GenotypeGVCFs.Builder(java, gatkGenotypeGvcfsXmx + "g", tmpDir, gatk, gatkKey, dataDir) .setReferenceSequence(refFasta) .setOutputFileName( identifier + (chrSize != null ? "." + chrSize.replace(":", "-") : "") + ".raw") .addInterval(chrSize).setStandardCallConfidence(standCallConf) .setStandardEmitConfidence(standEmitConf).setDbsnpFilePath(dbsnpVcf) .setExtraParameters(gatkGenotypeGvcfsParams); for (String f : getLeftCollection(combineGvcfs)) { genotypeGvcfsBuilder.addInputFile(f); } GenotypeGVCFs genotypeGvcfsCommand = genotypeGvcfsBuilder.build(); Job genotypeGvcfsJob = getWorkflow().createBashJob("GATKGenotypeGVCFs") .setMaxMemory(Integer.toString((gatkGenotypeGvcfsXmx + gatkOverhead) * 1024)).setQueue(queue); genotypeGvcfsJob.getCommand().setArguments(genotypeGvcfsCommand.getCommand()); // add parents, null if provision file in, not null if parent is a combine gvcf job for (Job j : getRightCollection(combineGvcfs)) { if (j != null) { genotypeGvcfsJob.addParent(j); } } if (vcfs.put(chrSize, Pair.of(genotypeGvcfsCommand, genotypeGvcfsJob)) != null) { throw new RuntimeException("Unexpected state: duplicate vcf."); } } if (vcfs.size() > 1) { //GATK CatVariants ( https://www.broadinstitute.org/gatk/guide/tooldocs/org_broadinstitute_gatk_tools_CatVariants.php ) CatVariants.Builder catVariantsBuilder = new CatVariants.Builder(java, gatkCombineGVCFsXmx + "g", tmpDir, gatk, gatkKey, dataDir).setReferenceSequence(refFasta) //individual vcf files sorted by genotype gvcfs; order of input vcf concatenation is determined by chr_sizes order (assumed to be sorted) .disableSorting().setOutputFileName(identifier + ".raw"); for (GenotypeGVCFs cmd : getLeftCollection(vcfs.values())) { catVariantsBuilder.addInputFile(cmd.getOutputFile()); } CatVariants catVariantsCommand = catVariantsBuilder.build(); Job combineGVCFsJob = getWorkflow().createBashJob("GATKCombineGVCFs") .setMaxMemory(Integer.toString((gatkCombineGVCFsXmx + gatkOverhead) * 1024)).setQueue(queue); combineGVCFsJob.getParents().addAll(getRightCollection(vcfs.values())); combineGVCFsJob.getCommand().setArguments(catVariantsCommand.getCommand()); combineGVCFsJob.addFile( createOutputFile(catVariantsCommand.getOutputFile(), "application/vcf-gz", manualOutput)); combineGVCFsJob.addFile( createOutputFile(catVariantsCommand.getOutputIndex(), "application/tbi", manualOutput)); } else if (vcfs.size() == 1) { Pair<GenotypeGVCFs, Job> p = Iterables.getOnlyElement(vcfs.values()); GenotypeGVCFs cmd = p.getLeft(); Job genotypeGvcfsJob = p.getRight(); genotypeGvcfsJob.addFile(createOutputFile(cmd.getOutputFile(), "application/vcf-gz", manualOutput)); genotypeGvcfsJob.addFile(createOutputFile(cmd.getOutputIndex(), "application/tbi", manualOutput)); } else { throw new RuntimeException("Unexpected state: No VCFs"); } }
From source file:com.opengamma.analytics.financial.interestrate.MultipleYieldCurveFinderDataBundle.java
public MultipleYieldCurveFinderDataBundle(final List<InstrumentDerivative> derivatives, final double[] marketValues, final YieldCurveBundle knownCurves, final LinkedHashMap<String, double[]> unknownCurveNodePoints, final LinkedHashMap<String, Interpolator1D> unknownCurveInterpolators, final boolean useFiniteDifferenceByDefault, final FXMatrix fxMatrix) { ArgumentChecker.notNull(derivatives, "derivatives"); ArgumentChecker.noNulls(derivatives, "derivatives"); ArgumentChecker.notNull(marketValues, "market values null"); ArgumentChecker.notNull(unknownCurveNodePoints, "unknown curve node points"); ArgumentChecker.notNull(unknownCurveInterpolators, "unknown curve interpolators"); ArgumentChecker.notEmpty(unknownCurveNodePoints, "unknown curve node points"); ArgumentChecker.notEmpty(unknownCurveInterpolators, "unknown curve interpolators"); ArgumentChecker.isTrue(derivatives.size() == marketValues.length, "marketValues wrong length; must be one par rate per derivative (have {} values for {} derivatives", marketValues.length, derivatives.size()); ArgumentChecker.notNull(fxMatrix, "FX matrix"); if (knownCurves != null) { for (final String name : knownCurves.getAllNames()) { if (unknownCurveInterpolators.containsKey(name)) { throw new IllegalArgumentException("Curve name in known set matches one to be solved for"); }/*from w w w . ja v a2s . com*/ } _knownCurves = knownCurves; } else { _knownCurves = null; } _derivatives = derivatives; _marketValues = marketValues; if (unknownCurveNodePoints.size() != unknownCurveInterpolators.size()) { throw new IllegalArgumentException("Number of unknown curves not the same as curve interpolators"); } final Iterator<Entry<String, double[]>> nodePointsIterator = unknownCurveNodePoints.entrySet().iterator(); final Iterator<Entry<String, Interpolator1D>> unknownCurvesIterator = unknownCurveInterpolators.entrySet() .iterator(); _names = new ArrayList<>(); while (nodePointsIterator.hasNext()) { final Entry<String, double[]> entry1 = nodePointsIterator.next(); final Entry<String, Interpolator1D> entry2 = unknownCurvesIterator.next(); final String name1 = entry1.getKey(); if (!name1.equals(entry2.getKey())) { throw new IllegalArgumentException("Names must be the same"); } ArgumentChecker.notNull(entry1.getValue(), "curve node points for " + name1); ArgumentChecker.notNull(entry2.getValue(), "interpolator for " + name1); _names.add(name1); } int nNodes = 0; for (final double[] nodes : unknownCurveNodePoints.values()) { nNodes += nodes.length; } if (nNodes > derivatives.size()) { throw new IllegalArgumentException("Total number of nodes (" + nNodes + ") is greater than the number of instruments (" + derivatives.size() + ")"); } _totalNodes = nNodes; _unknownCurveNodePoints = unknownCurveNodePoints; _unknownCurveInterpolators = unknownCurveInterpolators; _useFiniteDifferenceByDefault = useFiniteDifferenceByDefault; _fxMatrix = fxMatrix; }
From source file:org.apache.atlas.hive.hook.HiveHook.java
private void handleExternalTables(final HiveMetaStoreBridge dgiBridge, final HiveEventContext event, final LinkedHashMap<Type, Referenceable> tables) throws HiveException, MalformedURLException { List<Referenceable> entities = new ArrayList<>(); final WriteEntity hiveEntity = (WriteEntity) getEntityByType(event.getOutputs(), Type.TABLE); Table hiveTable = hiveEntity == null ? null : hiveEntity.getTable(); //Refresh to get the correct location if (hiveTable != null) { hiveTable = dgiBridge.hiveClient.getTable(hiveTable.getDbName(), hiveTable.getTableName()); }/*from w w w. ja va2 s . co m*/ if (hiveTable != null && TableType.EXTERNAL_TABLE.equals(hiveTable.getTableType())) { LOG.info("Registering external table process {} ", event.getQueryStr()); final String location = lower(hiveTable.getDataLocation().toString()); final ReadEntity dfsEntity = new ReadEntity(); dfsEntity.setTyp(Type.DFS_DIR); dfsEntity.setD(new Path(location)); SortedMap<ReadEntity, Referenceable> hiveInputsMap = new TreeMap<ReadEntity, Referenceable>( entityComparator) { { put(dfsEntity, dgiBridge.fillHDFSDataSet(location)); } }; SortedMap<WriteEntity, Referenceable> hiveOutputsMap = new TreeMap<WriteEntity, Referenceable>( entityComparator) { { put(hiveEntity, tables.get(Type.TABLE)); } }; SortedSet<ReadEntity> sortedIps = new TreeSet<>(entityComparator); sortedIps.addAll(hiveInputsMap.keySet()); SortedSet<WriteEntity> sortedOps = new TreeSet<>(entityComparator); sortedOps.addAll(hiveOutputsMap.keySet()); Referenceable processReferenceable = getProcessReferenceable(dgiBridge, event, sortedIps, sortedOps, hiveInputsMap, hiveOutputsMap); entities.addAll(tables.values()); entities.add(processReferenceable); event.addMessage(new HookNotification.EntityUpdateRequest(event.getUser(), entities)); } }
From source file:org.pentaho.reporting.platform.plugin.ParameterXmlContentHandler.java
public void createParameterContent(final OutputStream outputStream, final Serializable fileId, final String path, boolean overrideOutputType, MasterReport report) throws Exception { final Object rawSessionId = inputs.get(ParameterXmlContentHandler.SYS_PARAM_SESSION_ID); if ((rawSessionId instanceof String) == false || "".equals(rawSessionId)) { inputs.put(ParameterXmlContentHandler.SYS_PARAM_SESSION_ID, UUIDUtil.getUUIDAsString()); }/*from w w w.j av a2s . c o m*/ this.document = DocumentBuilderFactory.newInstance().newDocumentBuilder().newDocument(); final IParameterProvider requestParams = getRequestParameters(); final SimpleReportingComponent reportComponent = new SimpleReportingComponent(); reportComponent.setReportFileId(fileId); if (report != null) { reportComponent.setReport(report); } reportComponent.setPaginateOutput(true); final boolean isMobile = "true".equals(requestParams.getStringParameter("mobile", "false")); //$NON-NLS-1$ //$NON-NLS-2$ if (isMobile) { overrideOutputType = true; reportComponent.setForceDefaultOutputTarget(true); } else { reportComponent.setForceDefaultOutputTarget(overrideOutputType); } reportComponent.setDefaultOutputTarget(HtmlTableModule.TABLE_HTML_PAGE_EXPORT_TYPE); if (path.endsWith(".prpti")) { reportComponent.setForceUnlockPreferredOutput(true); } reportComponent.setInputs(inputs); report = reportComponent.getReport(); final DefaultParameterContext parameterContext = new DefaultParameterContext(report); final ValidationResult vr; final Element parameters; try { // apply inputs to parameters final ValidationResult validationResult = ReportContentUtil.applyInputsToReportParameters(report, parameterContext, inputs, new ValidationResult()); final ReportParameterDefinition reportParameterDefinition = report.getParameterDefinition(); vr = reportParameterDefinition.getValidator().validate(validationResult, reportParameterDefinition, parameterContext); parameters = document.createElement(GROUP_PARAMETERS); //$NON-NLS-1$ parameters.setAttribute("is-prompt-needed", String.valueOf(vr.isEmpty() == false)); //$NON-NLS-1$ //$NON-NLS-2$ parameters.setAttribute("ignore-biserver-5538", "true"); // check if pagination is allowed and turned on final Boolean autoSubmitFlag = requestFlag("autoSubmit", report, AttributeNames.Core.NAMESPACE, AttributeNames.Core.AUTO_SUBMIT_PARAMETER, "org.pentaho.reporting.engine.classic.core.ParameterAutoSubmit"); if (Boolean.TRUE.equals(autoSubmitFlag)) { parameters.setAttribute("autoSubmit", "true"); } else if (Boolean.FALSE.equals(autoSubmitFlag)) { parameters.setAttribute("autoSubmit", "false"); } final Boolean autoSubmitUiFlag = requestFlag("autoSubmitUI", report, // NON-NLS AttributeNames.Core.NAMESPACE, AttributeNames.Core.AUTO_SUBMIT_DEFAULT, "org.pentaho.reporting.engine.classic.core.ParameterAutoSubmitUI"); if (Boolean.FALSE.equals(autoSubmitUiFlag)) { parameters.setAttribute("autoSubmitUI", "false"); // NON-NLS } else { parameters.setAttribute("autoSubmitUI", "true"); // NON-NLS } parameters.setAttribute("layout", requestConfiguration("layout", report, // NON-NLS AttributeNames.Core.NAMESPACE, AttributeNames.Core.PARAMETER_UI_LAYOUT, "org.pentaho.reporting.engine.classic.core.ParameterUiLayout")); final ParameterDefinitionEntry[] parameterDefinitions = reportParameterDefinition .getParameterDefinitions(); // Collect all parameter, but allow user-parameter to override system parameter. // It is the user's problem if the types do not match and weird errors occur, but // there are sensible usecases where this should be allowed. // System parameter must come last in the list, as this is how it was done in the original // version and this is how people expect it to be now. final LinkedHashMap<String, ParameterDefinitionEntry> reportParameters = new LinkedHashMap<String, ParameterDefinitionEntry>(); for (final ParameterDefinitionEntry parameter : parameterDefinitions) { reportParameters.put(parameter.getName(), parameter); } for (final Map.Entry<String, ParameterDefinitionEntry> entry : getSystemParameter().entrySet()) { if (reportParameters.containsKey(entry.getKey()) == false) { reportParameters.put(entry.getKey(), entry.getValue()); } } if (overrideOutputType) { final ParameterDefinitionEntry definitionEntry = reportParameters .get(SimpleReportingComponent.OUTPUT_TARGET); if (definitionEntry instanceof AbstractParameter) { final AbstractParameter parameter = (AbstractParameter) definitionEntry; parameter.setHidden(true); parameter.setMandatory(false); } } else { hideOutputParameterIfLocked(report, reportParameters); } final Map<String, Object> inputs = computeRealInput(parameterContext, reportParameters, reportComponent.getComputedOutputTarget(), vr); final Boolean showParameterUI = requestFlag("showParameters", report, // NON-NLS AttributeNames.Core.NAMESPACE, AttributeNames.Core.SHOW_PARAMETER_UI, null); if (Boolean.FALSE.equals(showParameterUI)) { inputs.put("showParameters", Boolean.FALSE); // NON-NLS } else { inputs.put("showParameters", Boolean.TRUE); // NON-NLS } // Adding proportional width config parameter String proportionalWidth = report.getReportConfiguration() .getConfigProperty(CONFIG_PARAM_HTML_PROPORTIONAL_WIDTH); inputs.put(SYS_PARAM_HTML_PROPORTIONAL_WIDTH, Boolean.valueOf(proportionalWidth)); for (final ParameterDefinitionEntry parameter : reportParameters.values()) { final Object selections = inputs.get(parameter.getName()); final ParameterContextWrapper wrapper = new ParameterContextWrapper(parameterContext, vr.getParameterValues()); parameters.appendChild(createParameterElement(parameter, wrapper, selections)); } if (vr.isEmpty() == false) { parameters.appendChild(createErrorElements(vr)); } final String[] outputParameter = new OutputParameterCollector().collectParameter(report); for (int i = 0; i < outputParameter.length; i++) { final String outputParameterName = outputParameter[i]; // <output-parameter displayName="Territory" id="[Markets].[Territory]"/> final Element element = document.createElement("output-parameter");// NON-NLS element.setAttribute("displayName", outputParameterName);// NON-NLS element.setAttribute("id", outputParameterName);// NON-NLS parameters.appendChild(element); } if (vr.isEmpty() && reportComponent.getComputedOutputTarget() .equals(HtmlTableModule.TABLE_HTML_PAGE_EXPORT_TYPE)) //$NON-NLS-1$ //$NON-NLS-2$ { appendPageCount(reportComponent, parameters); } document.appendChild(parameters); final DOMSource source = new DOMSource(document); final StreamResult result = new StreamResult(outputStream); final Transformer transformer = TransformerFactory.newInstance().newTransformer(); transformer.transform(source, result); // close parameter context } finally { parameterContext.close(); } }
From source file:org.cerberus.servlet.crud.testexecution.ReadTestCaseExecution.java
private AnswerItem findExecutionListBySystem(String system, ApplicationContext appContext, HttpServletRequest request) throws ParseException, JSONException { AnswerItem answer = new AnswerItem(new MessageEvent(MessageEventEnum.DATA_OPERATION_OK)); /**//from w w w. jav a 2s . c o m * Parse all parameters used in the search. */ String charset = request.getCharacterEncoding(); /** * Parse parameters - list of values */ List<String> testList = ParameterParserUtil.parseListParamAndDecode(request.getParameterValues("test"), null, charset); List<String> applicationList = ParameterParserUtil .parseListParamAndDecode(request.getParameterValues("application"), null, charset); List<String> projectList = ParameterParserUtil .parseListParamAndDecode(request.getParameterValues("project"), null, charset); List<String> tcstatusList = ParameterParserUtil .parseListParamAndDecode(request.getParameterValues("tcstatus"), null, charset); List<String> groupList = ParameterParserUtil.parseListParamAndDecode(request.getParameterValues("group"), null, charset); List<String> tcactiveList = ParameterParserUtil .parseListParamAndDecode(request.getParameterValues("tcactive"), null, charset); List<String> priorityList = ParameterParserUtil .parseListParamAndDecode(request.getParameterValues("priority"), null, charset); List<String> targetsprintList = ParameterParserUtil .parseListParamAndDecode(request.getParameterValues("targetsprint"), null, charset); List<String> targetrevisionList = ParameterParserUtil .parseListParamAndDecode(request.getParameterValues("targetrevision"), null, charset); List<String> creatorList = ParameterParserUtil .parseListParamAndDecode(request.getParameterValues("creator"), null, charset); List<String> implementerList = ParameterParserUtil .parseListParamAndDecode(request.getParameterValues("implementer"), null, charset); List<String> environmentList = ParameterParserUtil .parseListParamAndDecode(request.getParameterValues("environment"), null, charset); List<String> buildList = ParameterParserUtil.parseListParamAndDecode(request.getParameterValues("build"), null, charset); List<String> revisionList = ParameterParserUtil .parseListParamAndDecode(request.getParameterValues("revision"), null, charset); List<String> countryList = ParameterParserUtil .parseListParamAndDecode(request.getParameterValues("country"), null, charset); List<String> browserList = ParameterParserUtil .parseListParamAndDecode(request.getParameterValues("browser"), null, charset); List<String> tcestatusList = ParameterParserUtil .parseListParamAndDecode(request.getParameterValues("tcestatus"), null, charset); //Sorts the lists if (countryList != null) { Collections.sort(countryList); } if (browserList != null) { Collections.sort(browserList); } /** * Parse parameters - free text */ String bugid = StringEscapeUtils.escapeHtml4(request.getParameter("bugid")); String ticket = StringEscapeUtils.escapeHtml4(request.getParameter("ticket")); String ip = StringEscapeUtils.escapeHtml4(request.getParameter("ip")); String port = StringEscapeUtils.escapeHtml4(request.getParameter("port")); String tag = StringEscapeUtils.escapeHtml4(request.getParameter("tag")); String browserversion = StringEscapeUtils.escapeHtml4(request.getParameter("browserversion")); String comment = StringEscapeUtils.escapeHtml4(request.getParameter("comment")); /** * Gets regular executions (not in queue) */ AnswerList answerExecutions = testCaseExecutionService.readBySystemByVarious(system, testList, applicationList, projectList, tcstatusList, groupList, tcactiveList, priorityList, targetsprintList, targetrevisionList, creatorList, implementerList, buildList, revisionList, environmentList, countryList, browserList, tcestatusList, ip, port, tag, browserversion, comment, bugid, ticket); List<TestCaseExecution> testCaseExecutions = (List<TestCaseExecution>) answerExecutions.getDataList(); /** * Get list of Execution in Queue by Tag */ ITestCaseExecutionInQueueService testCaseExecutionInQueueService = appContext .getBean(ITestCaseExecutionInQueueService.class); AnswerList answerExecutionsInQueue = testCaseExecutionInQueueService.readBySystemByVarious(system, testList, applicationList, projectList, tcstatusList, groupList, tcactiveList, priorityList, targetsprintList, targetrevisionList, creatorList, implementerList, buildList, revisionList, environmentList, countryList, browserList, tcestatusList, ip, port, tag, browserversion, comment, bugid, ticket); List<TestCaseExecutionInQueue> testCaseExecutionsInQueue = (List<TestCaseExecutionInQueue>) answerExecutionsInQueue .getDataList(); /** * Merge Test Case Executions */ List<TestCaseExecution> allTestCaseExecutions = hashExecution(testCaseExecutions, testCaseExecutionsInQueue); JSONArray executionList = new JSONArray(); LinkedHashMap<String, JSONObject> ttc = new LinkedHashMap<String, JSONObject>(); for (TestCaseExecution testCaseExecution : allTestCaseExecutions) { try { JSONObject execution = testCaseExecutionToJSONObject(testCaseExecution); String execKey = testCaseExecution.getCountry() + " " + testCaseExecution.getBrowser(); //the key is country and browser String testCaseKey = testCaseExecution.getTest() + "_" + testCaseExecution.getTestCase(); JSONObject execTab = new JSONObject(); executionList.put(testCaseExecutionToJSONObject(testCaseExecution)); JSONObject ttcObject = new JSONObject(); if (ttc.containsKey(testCaseKey)) { ttcObject = ttc.get(testCaseKey); execTab = ttcObject.getJSONObject("execTab"); execTab.put(execKey, execution); ttcObject.put("execTab", execTab); } else { ttcObject.put("test", testCaseExecution.getTest()); ttcObject.put("testCase", testCaseExecution.getTestCase()); ttcObject.put("function", testCaseExecution.getTestCaseObj().getFunction()); ttcObject.put("shortDesc", testCaseExecution.getTestCaseObj().getDescription()); ttcObject.put("status", testCaseExecution.getTestCaseObj().getStatus()); ttcObject.put("application", testCaseExecution.getApplication()); ttcObject.put("bugId", testCaseExecution.getTestCaseObj().getBugID()); ttcObject.put("ticket", testCaseExecution.getTestCaseObj().getTicket()); ttcObject.put("comment", testCaseExecution.getTestCaseObj().getComment()); ttcObject.put("priority", testCaseExecution.getTestCaseObj().getPriority()); ttcObject.put("status", testCaseExecution.getStatus()); ttcObject.put("group", testCaseExecution.getTestCaseObj().getGroup()); execTab.put(execKey, execution); ttcObject.put("execTab", execTab); } ttc.put(testCaseExecution.getTest() + "_" + testCaseExecution.getTestCase(), ttcObject); } catch (JSONException ex) { Logger.getLogger(ReadTestCaseExecution.class.getName()).log(Level.SEVERE, null, ex); } } JSONObject jsonResponse = new JSONObject(); jsonResponse.put("contentTable", ttc.values()); jsonResponse.put("iTotalRecords", ttc.size()); jsonResponse.put("iTotalDisplayRecords", ttc.size()); answer.setItem(jsonResponse); answer.setResultMessage(new MessageEvent(MessageEventEnum.DATA_OPERATION_OK)); return answer; }
From source file:com.atinternet.tracker.Builder.java
/** * Prepare the hit queryString// w w w . j a va 2s .c om * * @return LinkedHashMap */ private LinkedHashMap<String, Object[]> prepareQuery() { LinkedHashMap<String, Object[]> formattedParameters = new LinkedHashMap<String, Object[]>(); ArrayList<Param> completeBuffer = new ArrayList<Param>() { { addAll(persistentParams); addAll(volatileParams); } }; ArrayList<Param> params = organizeParameters(completeBuffer); for (Param p : params) { String value = p.getValue().execute(); String key = p.getKey(); HashMap<String, String> plugins = PluginParam.get(tracker); if (plugins.containsKey(key)) { String pluginClass = plugins.get(key); Plugin plugin = null; try { plugin = (Plugin) Class.forName(pluginClass).newInstance(); plugin.execute(tracker); value = plugin.getResponse(); p.setType(Param.Type.JSON); key = Hit.HitParam.JSON.stringValue(); } catch (Exception e) { e.printStackTrace(); value = null; } } else if (key.equals(Hit.HitParam.UserId.stringValue())) { if (TechnicalContext.doNotTrackEnabled(Tracker.getAppContext())) { value = OPT_OUT; } else if (((Boolean) configuration.get(TrackerConfigurationKeys.HASH_USER_ID))) { value = Tool.SHA_256(value); } } if (p.getType() == Param.Type.Closure && Tool.parseJSON(value) != null) { p.setType(Param.Type.JSON); } if (value != null) { // Referrer processing if (key.equals(Hit.HitParam.Referrer.stringValue())) { value = value.replace("&", "$").replaceAll("[<>]", ""); } if (p.getOptions() != null && p.getOptions().isEncode()) { value = Tool.percentEncode(value); p.getOptions().setSeparator(Tool.percentEncode(p.getOptions().getSeparator())); } int duplicateParamIndex = -1; String duplicateParamKey = null; Set<String> keys = formattedParameters.keySet(); String[] keySet = keys.toArray(new String[keys.size()]); int length = keySet.length; for (int i = 0; i < length; i++) { if (keySet[i].equals(key)) { duplicateParamIndex = i; duplicateParamKey = key; break; } } if (duplicateParamIndex != -1) { List<Object[]> values = new ArrayList<Object[]>(formattedParameters.values()); Param duplicateParam = (Param) values.get(duplicateParamIndex)[0]; String str = ((String) formattedParameters.get(duplicateParamKey)[1]).split("=")[0] + "="; String val = ((String) formattedParameters.get(duplicateParamKey)[1]).split("=")[1]; if (p.getType() == Param.Type.JSON) { Object json = Tool.parseJSON(Tool.percentDecode(val)); Object newJson = Tool.parseJSON(Tool.percentDecode(value)); if (json != null && json instanceof JSONObject) { Map dictionary = Tool.toMap((JSONObject) json); if (newJson instanceof JSONObject) { Map newDictionary = Tool.toMap((JSONObject) newJson); dictionary.putAll(newDictionary); JSONObject jsonData = new JSONObject(dictionary); formattedParameters.put(key, new Object[] { duplicateParam, makeSubQuery(key, Tool.percentEncode(jsonData.toString())) }); } else { Tool.executeCallback(tracker.getListener(), CallbackType.warning, "Couldn't append value to a dictionary"); } } else if (json != null && json instanceof JSONArray) { try { ArrayList<Object> array = new ArrayList<Object>(); JSONArray jArray = (JSONArray) json; for (int i = 0; i < jArray.length(); i++) { array.add(jArray.get(i).toString()); } if (newJson instanceof JSONArray) { jArray = (JSONArray) newJson; for (int i = 0; i < jArray.length(); i++) { array.add(jArray.get(i).toString()); } JSONObject jsonData = new JSONObject(array.toString()); formattedParameters.put(key, new Object[] { duplicateParam, makeSubQuery(key, Tool.percentEncode(jsonData.toString())) }); } else { Tool.executeCallback(tracker.getListener(), CallbackType.warning, "Couldn't append value to an array"); } } catch (JSONException e) { Tool.executeCallback(tracker.getListener(), CallbackType.warning, "Couldn't append value to an array"); } } else { Tool.executeCallback(tracker.getListener(), CallbackType.warning, "Couldn't append value to a JSON Object"); } } else if (duplicateParam.getType() == Param.Type.JSON) { Tool.executeCallback(tracker.getListener(), CallbackType.warning, "Couldn't append value to a JSON Object"); } else { formattedParameters.put(key, new Object[] { duplicateParam, str + val + p.getOptions().getSeparator() + value }); } } else { formattedParameters.put(key, new Object[] { p, makeSubQuery(key, value) }); } } } return formattedParameters; }
From source file:org.apache.fineract.infrastructure.dataexport.service.DataExportReadPlatformServiceImpl.java
@Override public DataExportEntityData retrieveTemplate(String baseEntityName) { DataExportEntityData dataExportEntityData = null; DataExportBaseEntity dataExportBaseEntity = DataExportBaseEntity.fromEntityName(baseEntityName); DataExportCoreTable dataExportCoreTable = DataExportCoreTable .newInstance(dataExportBaseEntity.getTableName()); if (dataExportBaseEntity.isValid()) { Collection<DatatableData> datatables = new ArrayList<>(); Collection<EntityColumnMetaData> columns = new ArrayList<>(); LinkedHashMap<String, EntityColumnMetaData> uniqueColumns = new LinkedHashMap<>(); final Collection<EntityColumnMetaData> nonCoreColumns = DataExportUtils .getTableColumnsMetaData(dataExportBaseEntity.getTableName(), jdbcTemplate); final Collection<RegisteredTable> registeredTables = this.registeredTableRepository .findAllByApplicationTableName(dataExportBaseEntity.getTableName()); for (RegisteredTable registeredTable : registeredTables) { Long category = (registeredTable.getCategory() != null) ? registeredTable.getCategory().longValue() : null;// w ww . j a v a2 s. c o m String tableName = registeredTable.getRegisteredTableName(); // only return user created or musoni system datatables (ml_loan_details, etc.) if (StringUtils.startsWithAny(tableName, new String[] { DataExportApiConstants.USER_CREATED_DATATABLE_NAME_PREFIX, DataExportApiConstants.MUSONI_SYSTEM_DATATABLE_NAME_PREFIX })) { DatatableData datatableData = DatatableData.create(registeredTable.getApplicationTableName(), tableName, null, category, null, registeredTable.isSystemDefined(), registeredTable.getDisplayName()); datatables.add(datatableData); } } // add the core datatables to the list of datatables for (DataExportCoreDatatable coreDatatable : DataExportCoreDatatable.values()) { DataExportBaseEntity baseEntity = coreDatatable.getBaseEntity(); if (dataExportBaseEntity.equals(baseEntity)) { DatatableData datatableData = DatatableData.create(baseEntity.getTableName(), coreDatatable.getTableName(), null, 0L, null, false, coreDatatable.getDisplayName()); datatables.add(datatableData); } } // add the core columns for (DataExportCoreColumn coreColumn : DataExportCoreColumn.values()) { if (coreColumn.getBaseEntity() == null || (coreColumn.getBaseEntity() != null && coreColumn.getBaseEntity().equals(dataExportBaseEntity))) { String columnLabel = DataExportUtils.createHumanReadableTableColumnLabel(coreColumn.getLabel(), dataExportCoreTable); EntityColumnMetaData metaData = EntityColumnMetaData.newInstance(coreColumn.getName(), columnLabel, coreColumn.getType(), coreColumn.isNullable()); uniqueColumns.put(coreColumn.getName(), metaData); } } // add the non-core columns for (EntityColumnMetaData nonCoreColumn : nonCoreColumns) { String columnLabel = DataExportUtils.createHumanReadableTableColumnLabel(nonCoreColumn.getLabel(), dataExportCoreTable); // update the label property nonCoreColumn.updateLabel(columnLabel); uniqueColumns.put(nonCoreColumn.getName(), nonCoreColumn); } // convert LinkedHashMap to ArrayList columns.addAll(uniqueColumns.values()); dataExportEntityData = DataExportEntityData.newInstance(dataExportBaseEntity.getEntityName(), dataExportBaseEntity.getTableName(), datatables, columns); } return dataExportEntityData; }
From source file:org.apache.asterix.app.translator.QueryTranslator.java
private static ARecordType createEnforcedType(ARecordType initialType, List<Index> indexes) throws AlgebricksException { ARecordType enforcedType = initialType; for (Index index : indexes) { if (!index.isSecondaryIndex() || !index.isEnforcingKeyFileds()) { continue; }// w w w. j ava 2s . c o m if (index.hasMetaFields()) { throw new AlgebricksException("Indexing an open field is only supported on the record part"); } for (int i = 0; i < index.getKeyFieldNames().size(); i++) { Deque<Pair<ARecordType, String>> nestedTypeStack = new ArrayDeque<>(); List<String> splits = index.getKeyFieldNames().get(i); ARecordType nestedFieldType = enforcedType; boolean openRecords = false; String bridgeName = nestedFieldType.getTypeName(); int j; // Build the stack for the enforced type for (j = 1; j < splits.size(); j++) { nestedTypeStack.push(new Pair<ARecordType, String>(nestedFieldType, splits.get(j - 1))); bridgeName = nestedFieldType.getTypeName(); nestedFieldType = (ARecordType) enforcedType.getSubFieldType(splits.subList(0, j)); if (nestedFieldType == null) { openRecords = true; break; } } if (openRecords) { // create the smallest record enforcedType = new ARecordType(splits.get(splits.size() - 2), new String[] { splits.get(splits.size() - 1) }, new IAType[] { AUnionType.createUnknownableType(index.getKeyFieldTypes().get(i)) }, true); // create the open part of the nested field for (int k = splits.size() - 3; k > (j - 2); k--) { enforcedType = new ARecordType(splits.get(k), new String[] { splits.get(k + 1) }, new IAType[] { AUnionType.createUnknownableType(enforcedType) }, true); } // Bridge the gap Pair<ARecordType, String> gapPair = nestedTypeStack.pop(); ARecordType parent = gapPair.first; IAType[] parentFieldTypes = ArrayUtils.addAll(parent.getFieldTypes().clone(), new IAType[] { AUnionType.createUnknownableType(enforcedType) }); enforcedType = new ARecordType(bridgeName, ArrayUtils.addAll(parent.getFieldNames(), enforcedType.getTypeName()), parentFieldTypes, true); } else { //Schema is closed all the way to the field //enforced fields are either null or strongly typed LinkedHashMap<String, IAType> recordNameTypesMap = createRecordNameTypeMap(nestedFieldType); // if a an enforced field already exists and the type is correct IAType enforcedFieldType = recordNameTypesMap.get(splits.get(splits.size() - 1)); if (enforcedFieldType != null && enforcedFieldType.getTypeTag() == ATypeTag.UNION && ((AUnionType) enforcedFieldType).isUnknownableType()) { enforcedFieldType = ((AUnionType) enforcedFieldType).getActualType(); } if (enforcedFieldType != null && !ATypeHierarchy.canPromote(enforcedFieldType.getTypeTag(), index.getKeyFieldTypes().get(i).getTypeTag())) { throw new AlgebricksException("Cannot enforce field " + index.getKeyFieldNames().get(i) + " to have type " + index.getKeyFieldTypes().get(i)); } if (enforcedFieldType == null) { recordNameTypesMap.put(splits.get(splits.size() - 1), AUnionType.createUnknownableType(index.getKeyFieldTypes().get(i))); } enforcedType = new ARecordType(nestedFieldType.getTypeName(), recordNameTypesMap.keySet().toArray(new String[recordNameTypesMap.size()]), recordNameTypesMap.values().toArray(new IAType[recordNameTypesMap.size()]), nestedFieldType.isOpen()); } // Create the enforced type for the nested fields in the schema, from the ground up if (!nestedTypeStack.isEmpty()) { while (!nestedTypeStack.isEmpty()) { Pair<ARecordType, String> nestedTypePair = nestedTypeStack.pop(); ARecordType nestedRecType = nestedTypePair.first; IAType[] nestedRecTypeFieldTypes = nestedRecType.getFieldTypes().clone(); nestedRecTypeFieldTypes[nestedRecType.getFieldIndex(nestedTypePair.second)] = enforcedType; enforcedType = new ARecordType(nestedRecType.getTypeName() + "_enforced", nestedRecType.getFieldNames(), nestedRecTypeFieldTypes, nestedRecType.isOpen()); } } } } return enforcedType; }
From source file:com.dbmojo.QueryExecutor.java
/** * Execute a set of queries/updates encoded in JSON via <b>reqStr</b> in the * format <br><br><i>[{query:"select x from y",values:[]},{}...] * </i>.<br><br>The <b>update</b> flag determines whether or not to * treat each statement in the <b>reqStr</b> as an update or a query. *///from w ww . j av a 2 s .co m public String execute(String reqStr, boolean update) throws Exception { if (DebugLog.enabled) { DebugLog.add(this, "Begin execute"); } String message = ""; ArrayList<HashMap> resultsList = new ArrayList<HashMap>(); LinkedHashMap<String, PreparedStatement> bpstmts = null; Statement bstmt = null; try { this.open(update); if (update) { conn.setAutoCommit(false); } final JSONArray reqs = new JSONArray(reqStr); final boolean batchUpdates = reqs.length() > 1; //Connection MUST be ready to go if (this.conn == null) { throw new QueryExecutorException("Connection could not be checked out"); } final int rLen = reqs.length(); if (rLen <= 0) { throw new QueryExecutorException("No queries specified"); } for (int r = 0; r < rLen; r++) { String rMessage = ""; final JSONObject reqObj = reqs.getJSONObject(r); JSONArray tValues = reqObj.optJSONArray("values"); String[] values = new String[(tValues != null ? tValues.length() : 0)]; //Convert the JSONArray to a String[] for (int v = 0; v < values.length; v++) { values[v] = tValues.getString(v); } String query = reqObj.getString("query"); final boolean prepared = values != null; //Can't move forward without a query! if (query == null || query.equals("")) { throw new QueryExecutorException("Query is missing"); } //Here's where we need to do either an update or a query if (update) { if (batchUpdates) { // This is NOT a prepared statement and we need to create a // batch statement to add all non prepared statements to if (!prepared && bstmt == null) { bstmt = conn.createStatement(); // This IS a prepared statement and we need to create a // ordered map of prepared statements so we can execute // these statements together in order (sortof...) } else if (prepared && bpstmts == null) { bpstmts = new LinkedHashMap<String, PreparedStatement>(); } addBatchUpdate(this.conn, prepared, query, values, bstmt, bpstmts); } else { // Single update query / prepared statement to execute executeUpdate(this.conn, prepared, query, values); } } else { resultsList.add(executeQuery(this.conn, prepared, query, values)); } } //Execute Batch Updates if (update && batchUpdates) { //Execute any prepared statement batches that have been gathered. //If we have an SQL error and exception will be thrown if (bpstmts != null && bpstmts.size() > 0) { for (PreparedStatement p : bpstmts.values()) { if (DebugLog.enabled) { DebugLog.add(this, "Executing batch prepared statement"); } p.executeBatch(); } } //Execute all the standard SQL in a batch. //If we have a SQL error an Exception will be thrown if (bstmt != null) { if (DebugLog.enabled) { DebugLog.add(this, "Executing batch statement"); } bstmt.executeBatch(); } } if (update) { this.conn.commit(); } } catch (JSONException je) { //There was an error parsing the JSON final String err = je.toString(); if (DebugLog.enabled) { DebugLog.add(this, err); } resultsList.add(Util.getError(err)); } catch (Exception e) { //We couldn't connect to the DB... if (this.conn == null) { final String err = e.toString(); if (ErrorLog.enabled) { ErrorLog.add(this, err, false); } resultsList.add(Util.getError(err)); //There was an error executing the query/update } else if (update) { final String err = "Rolling Back Update(s): " + e; if (DebugLog.enabled) { DebugLog.add(this, err); } if (this.conn != null) { this.conn.rollback(); } resultsList.add(Util.getError(err)); } else { final String err = e.toString(); if (DebugLog.enabled) { DebugLog.add(this, err); } resultsList.add(Util.getError(err)); } } finally { //Cleanup batch statement (If applicable) if (bstmt != null) { try { if (DebugLog.enabled) { DebugLog.add(this, "Closing batch statement"); } bstmt.close(); } catch (Exception se) { String err = "Error closing batch statement - " + se.toString(); if (ErrorLog.enabled) { ErrorLog.add(this, err, false); } resultsList.add(Util.getError(err)); } } //Cleanup the batch prepared statement (If applicable) if (bpstmts != null) { for (PreparedStatement p : bpstmts.values()) { try { if (p != null) { p.close(); if (DebugLog.enabled) { DebugLog.add(this, "Closing batch prepared stmnt"); } } } catch (Exception pse) { String err = "Error closing batch prepared stmnt - " + pse.toString(); if (ErrorLog.enabled) { ErrorLog.add(this, err, false); } resultsList.add(Util.getError(err)); } } } if (DebugLog.enabled) { DebugLog.add(this, "Closing connection"); } //Cleanup DB connection (Always applicable) this.conn.close(); } if (DebugLog.enabled) { DebugLog.add(this, "End execute"); } //UPDATE => [{message:"",status:"success"}] if (update && resultsList.size() <= 0) { HashMap pObj = new HashMap(); pObj.put("message", ""); pObj.put("status", "success"); pObj.put("rows", new ArrayList()); pObj.put("types", new ArrayList()); pObj.put("cols", new ArrayList()); resultsList.add(pObj); } //Serialize resultsArray into JSON return serializeToJson(resultsList); }