List of usage examples for java.util LinkedHashMap containsKey
boolean containsKey(Object key);
From source file:org.jamwiki.db.AnsiQueryHandler.java
/** * *//*w ww . ja va2s .com*/ public List<RoleMap> getRoleMapGroups() throws SQLException { Connection conn = null; PreparedStatement stmt = null; ResultSet rs = null; try { conn = DatabaseConnection.getConnection(); stmt = conn.prepareStatement(STATEMENT_SELECT_GROUPS_AUTHORITIES); rs = stmt.executeQuery(); LinkedHashMap<Integer, RoleMap> roleMaps = new LinkedHashMap<Integer, RoleMap>(); while (rs.next()) { Integer groupId = rs.getInt("group_id"); RoleMap roleMap = new RoleMap(); if (roleMaps.containsKey(groupId)) { roleMap = roleMaps.get(groupId); } else { roleMap.setGroupId(groupId); roleMap.setGroupName(rs.getString("group_name")); } roleMap.addRole(rs.getString("authority")); roleMaps.put(groupId, roleMap); } return new ArrayList<RoleMap>(roleMaps.values()); } finally { DatabaseConnection.closeConnection(conn, stmt, rs); } }
From source file:org.jamwiki.db.AnsiQueryHandler.java
/** * *///w w w. j a va 2 s .c om public List<RoleMap> getRoleMapByLogin(String loginFragment) throws SQLException { if (StringUtils.isBlank(loginFragment)) { return new ArrayList<RoleMap>(); } Connection conn = null; PreparedStatement stmt = null; ResultSet rs = null; try { conn = DatabaseConnection.getConnection(); stmt = conn.prepareStatement(STATEMENT_SELECT_AUTHORITIES_LOGIN); loginFragment = '%' + loginFragment.toLowerCase() + '%'; stmt.setString(1, loginFragment); rs = stmt.executeQuery(); LinkedHashMap<Integer, RoleMap> roleMaps = new LinkedHashMap<Integer, RoleMap>(); while (rs.next()) { Integer userId = rs.getInt("wiki_user_id"); RoleMap roleMap = new RoleMap(); if (roleMaps.containsKey(userId)) { roleMap = roleMaps.get(userId); } else { roleMap.setUserId(userId); roleMap.setUserLogin(rs.getString("username")); } roleMap.addRole(rs.getString("authority")); roleMaps.put(userId, roleMap); } return new ArrayList<RoleMap>(roleMaps.values()); } finally { DatabaseConnection.closeConnection(conn, stmt, rs); } }
From source file:org.oscarehr.casemgmt.web.CaseManagementEntryAction.java
public void doPrint(HttpServletRequest request, OutputStream os) throws IOException, DocumentException { String ids = request.getParameter("notes2print"); String demono = getDemographicNo(request); request.setAttribute("demoName", getDemoName(demono)); request.setAttribute("demoSex", getDemoSex(demono)); request.setAttribute("demoAge", getDemoAge(demono)); request.setAttribute("mrp", getMRP(request)); String dob = getDemoDOB(demono); dob = convertDateFmt(dob, request);// w ww . j av a 2s. c om request.setAttribute("demoDOB", dob); String providerNo = getProviderNo(request); String[] noteIds; if (ids.length() > 0) noteIds = ids.split(","); else noteIds = (String[]) Array.newInstance(String.class, 0); List<CaseManagementNote> notes = new ArrayList<CaseManagementNote>(); List<String> remoteNoteUUIDs = new ArrayList<String>(); String uuid; for (int idx = 0; idx < noteIds.length; ++idx) { if (noteIds[idx].startsWith("UUID")) { uuid = noteIds[idx].substring(4); remoteNoteUUIDs.add(uuid); } else { notes.add(this.caseManagementMgr.getNote(noteIds[idx])); } } LoggedInInfo loggedInInfo = LoggedInInfo.loggedInInfo.get(); if (loggedInInfo.currentFacility.isIntegratorEnabled() && remoteNoteUUIDs.size() > 0) { DemographicWs demographicWs = CaisiIntegratorManager.getDemographicWs(); List<CachedDemographicNote> remoteNotes = demographicWs .getLinkedCachedDemographicNotes(ConversionUtils.fromIntString(demono)); for (CachedDemographicNote remoteNote : remoteNotes) { for (String remoteUUID : remoteNoteUUIDs) { if (remoteUUID.equals(remoteNote.getCachedDemographicNoteCompositePk().getUuid())) { CaseManagementNote fakeNote = getFakedNote(remoteNote); notes.add(fakeNote); break; } } } } // we're not guaranteed any ordering of notes given to us, so sort by observation date oscar.OscarProperties p = oscar.OscarProperties.getInstance(); String noteSort = p.getProperty("CMESort", ""); if (noteSort.trim().equalsIgnoreCase("UP")) { Collections.sort(notes, CaseManagementNote.noteObservationDateComparator); Collections.reverse(notes); } else Collections.sort(notes, CaseManagementNote.noteObservationDateComparator); List<CaseManagementNote> issueNotes; List<CaseManagementNote> tmpNotes; HashMap<String, List<CaseManagementNote>> cpp = null; if (request.getParameter("printCPP").equalsIgnoreCase("true")) { cpp = new HashMap<String, List<CaseManagementNote>>(); String[] issueCodes = { "OMeds", "SocHistory", "MedHistory", "Concerns", "Reminders", "FamHistory", "RiskFactors" }; for (int j = 0; j < issueCodes.length; ++j) { List<Issue> issues = caseManagementMgr.getIssueInfoByCode(providerNo, issueCodes[j]); String[] issueIds = getIssueIds(issues);// = new String[issues.size()]; tmpNotes = caseManagementMgr.getNotes(demono, issueIds); issueNotes = new ArrayList<CaseManagementNote>(); for (int k = 0; k < tmpNotes.size(); ++k) { if (!tmpNotes.get(k).isLocked()) { List<CaseManagementNoteExt> exts = caseManagementMgr.getExtByNote(tmpNotes.get(k).getId()); boolean exclude = false; for (CaseManagementNoteExt ext : exts) { if (ext.getKeyVal().equals("Hide Cpp")) { if (ext.getValue().equals("1")) { exclude = true; } } } if (!exclude) { issueNotes.add(tmpNotes.get(k)); } } } cpp.put(issueCodes[j], issueNotes); } } String demoNo = null; List<CaseManagementNote> othermeds = null; if (request.getParameter("printRx").equalsIgnoreCase("true")) { demoNo = demono; if (cpp == null) { List<Issue> issues = caseManagementMgr.getIssueInfoByCode(providerNo, "OMeds"); String[] issueIds = getIssueIds(issues);// new String[issues.size()]; othermeds = caseManagementMgr.getNotes(demono, issueIds); } else { othermeds = cpp.get("OMeds"); } } // Create new file to save form to String path = OscarProperties.getInstance().getProperty("DOCUMENT_DIR"); String fileName = path + "EncounterForm-" + UtilDateUtilities.getToday("yyyy-MM-dd.hh.mm.ss") + ".pdf"; FileOutputStream out = new FileOutputStream(fileName); CaseManagementPrintPdf printer = new CaseManagementPrintPdf(request, out); printer.printDocHeaderFooter(); printer.printCPP(cpp); printer.printRx(demoNo, othermeds); printer.printNotes(notes); /* check extensions */ Enumeration<String> e = request.getParameterNames(); while (e.hasMoreElements()) { String name = e.nextElement(); if (name.startsWith("extPrint")) { if (request.getParameter(name).equals("true")) { ExtPrint printBean = (ExtPrint) SpringUtils.getBean(name); if (printBean != null) { printBean.printExt(printer, request); } } } } printer.finish(); List<Object> pdfDocs = new ArrayList<Object>(); pdfDocs.add(fileName); if (request.getParameter("printLabs") != null && request.getParameter("printLabs").equalsIgnoreCase("true")) { // get the labs which fall into the date range which are attached to this patient CommonLabResultData comLab = new CommonLabResultData(); ArrayList<LabResultData> labs = comLab.populateLabResultsData("", demono, "", "", "", "U"); LinkedHashMap<String, LabResultData> accessionMap = new LinkedHashMap<String, LabResultData>(); for (int i = 0; i < labs.size(); i++) { LabResultData result = labs.get(i); if (result.isHL7TEXT()) { if (result.accessionNumber == null || result.accessionNumber.equals("")) { accessionMap.put("noAccessionNum" + i + result.labType, result); } else { if (!accessionMap.containsKey(result.accessionNumber + result.labType)) accessionMap.put(result.accessionNumber + result.labType, result); } } } for (LabResultData result : accessionMap.values()) { Date d = result.getDateObj(); // TODO:filter out the ones which aren't in our date range if there's a date range???? String segmentId = result.segmentID; MessageHandler handler = Factory.getHandler(segmentId); String fileName2 = OscarProperties.getInstance().getProperty("DOCUMENT_DIR") + "//" + handler.getPatientName().replaceAll("\\s", "_") + "_" + handler.getMsgDate() + "_LabReport.pdf"; OutputStream os2 = new FileOutputStream(fileName2); LabPDFCreator pdfCreator = new LabPDFCreator(os2, segmentId, LoggedInInfo.loggedInInfo.get().loggedInProvider.getProviderNo()); pdfCreator.printPdf(); pdfDocs.add(fileName2); } } ConcatPDF.concat(pdfDocs, os); }
From source file:org.jamwiki.db.AnsiQueryHandler.java
/** * *//*from w ww . j a v a 2 s. c o m*/ public List<RoleMap> getRoleMapByRole(String authority, boolean includeInheritedRoles) throws SQLException { Connection conn = null; PreparedStatement stmt = null; ResultSet rs = null; try { conn = DatabaseConnection.getConnection(); if (includeInheritedRoles) { stmt = conn.prepareStatement(STATEMENT_SELECT_AUTHORITIES_AUTHORITY_ALL); stmt.setString(1, authority); stmt.setString(2, authority); stmt.setString(3, authority); stmt.setString(4, authority); } else { stmt = conn.prepareStatement(STATEMENT_SELECT_AUTHORITIES_AUTHORITY); stmt.setString(1, authority); stmt.setString(2, authority); } rs = stmt.executeQuery(); LinkedHashMap<String, RoleMap> roleMaps = new LinkedHashMap<String, RoleMap>(); while (rs.next()) { int userId = rs.getInt("wiki_user_id"); int groupId = rs.getInt("group_id"); RoleMap roleMap = new RoleMap(); String key = userId + "|" + groupId; if (roleMaps.containsKey(key)) { roleMap = roleMaps.get(key); } else { if (userId > 0) { roleMap.setUserId(userId); roleMap.setUserLogin(rs.getString("username")); } if (groupId > 0) { roleMap.setGroupId(groupId); roleMap.setGroupName(rs.getString("group_name")); } } String roleName = rs.getString("authority"); if (roleName != null) { roleMap.addRole(roleName); } // roleMap.addRole(rs.getString("authority")); roleMaps.put(key, roleMap); } return new ArrayList<RoleMap>(roleMaps.values()); } finally { DatabaseConnection.closeConnection(conn, stmt, rs); } }
From source file:mondrian.rolap.RolapSchemaLoader.java
private String newTableAlias(RolapSchema.PhysRelation physRelation, LinkedHashMap<String, RolapSchema.PhysRelation> tbls) { String alias = physRelation.getAlias(); int i = 1;//from ww w . j av a 2 s . c om while (true) { String candidateAlias = alias + "_" + i; if (!tbls.containsKey(candidateAlias)) { return candidateAlias; } i++; } }
From source file:ubic.gemma.web.controller.expression.experiment.DEDVController.java
/** * Prepare vvo for display on front end. Uses factors and factor values from layouts * * @param vvo Note: This will be modified! It will be updated with the factorNames and factorValuesToNames *//* w ww.ja v a 2 s .c om*/ private void prepareFactorsForFrontEndDisplay(VisualizationValueObject vvo, LinkedHashMap<BioAssayValueObject, LinkedHashMap<ExperimentalFactor, Double>> eeLayouts) { if (eeLayouts == null || eeLayouts.isEmpty()) { log.warn("No layouts, bail"); vvo.setFactorNames(null); vvo.setFactorValuesToNames(null); return; } LinkedHashSet<ExperimentalFactor> factorNames = getFactorNames(eeLayouts); // colours for conditions/factor values bar chart FIXME make continuous maps different. Map<ExperimentalFactor, Queue<String>> factorColoursMap = createFactorNameToColoursMap(factorNames); String missingValueColour = "#DCDCDC"; Random random = new Random(); LinkedHashMap<String, LinkedHashMap<String, String>> factorToValueNames = new LinkedHashMap<>(); // list of maps with entries: key = factorName, value=array of factor values // 1 entry per sample List<LinkedHashMap<String, String[]>> factorValueMaps = new ArrayList<>(); Collection<String> factorsMissingValues = new HashSet<>(); Collection<BioMaterialValueObject> seenSamples = new HashSet<>(); // if same sample was run more than once on // diff platforms. Map<Long, FactorValue> fvs = new HashMap<>(); // avoid loading repeatedly. Collection<ExperimentalFactor> seenFactors = new HashSet<>(); for (BioAssayValueObject ba : eeLayouts.keySet()) { if (seenSamples.contains(ba.getSample())) { continue; } seenSamples.add(ba.getSample()); // double should be the factorValue id, defined in // ubic.gemma.core.visualization.ExperimentalDesignVisualizationService.getExperimentalDesignLayout(ExpressionExperiment, // BioAssayDimension) LinkedHashMap<ExperimentalFactor, Double> factorMap = eeLayouts.get(ba); LinkedHashMap<String, String[]> factorNamesToValueColourPairs = new LinkedHashMap<>(factorNames.size()); // this is defensive, should only come into play when there's something messed up with the data. // for every factor, add a missing-value entry (guards against missing data messing up the layout) for (ExperimentalFactor factor : factorNames) { String[] facValAndColour = new String[] { "No value", missingValueColour }; factorNamesToValueColourPairs.put(getUniqueFactorName(factor), facValAndColour); } // for each experimental factor, store the name and value for (Entry<ExperimentalFactor, Double> pair : factorMap.entrySet()) { ExperimentalFactor factor = pair.getKey(); Double valueOrId = pair.getValue(); /* * the double is only a double because it is meant to hold measurements when the factor is continuous if * the factor is categorical, the double value is set to the value's id see * ubic.gemma.core.visualization.ExperimentalDesignVisualizationService.getExperimentalDesignLayout( * ExpressionExperiment, BioAssayDimension) */ if (valueOrId == null || factor.getType() == null || (factor.getType().equals(FactorType.CATEGORICAL) && factor.getFactorValues().isEmpty())) { factorsMissingValues.add(getUniqueFactorName(factor)); continue; } if (!seenFactors.contains(factor) && factor.getType().equals(FactorType.CATEGORICAL)) { for (FactorValue fv : factor.getFactorValues()) { fvs.put(fv.getId(), fv); } } String facValsStr = getFacValsStr(fvs, factor, valueOrId); if (!factorToValueNames.containsKey(getUniqueFactorName(factor))) { factorToValueNames.put(getUniqueFactorName(factor), new LinkedHashMap<String, String>()); } // assign colour if unassigned or fetch it if already assigned String colourString = ""; if (!factorToValueNames.get(getUniqueFactorName(factor)).containsKey(facValsStr)) { if (factorColoursMap.containsKey(factor)) { colourString = factorColoursMap.get(factor).poll(); } if (colourString == null || Objects.equals(colourString, "")) { // ran out of predefined colours colourString = getRandomColour(random); } factorToValueNames.get(getUniqueFactorName(factor)).put(facValsStr, colourString); } else { colourString = factorToValueNames.get(getUniqueFactorName(factor)).get(facValsStr); } String[] facValAndColour = new String[] { facValsStr, colourString }; factorNamesToValueColourPairs.put(getUniqueFactorName(factor), facValAndColour); } factorValueMaps.add(factorNamesToValueColourPairs); } // add missing value entries here so they show up at the end of the legend's value lists if (!factorsMissingValues.isEmpty()) { for (String factorName : factorsMissingValues) { if (!factorToValueNames.containsKey(factorName)) { factorToValueNames.put(factorName, new LinkedHashMap<String, String>()); } factorToValueNames.get(factorName).put("No value", missingValueColour); } } vvo.setFactorNames(factorToValueNames); // this is summary of values & colours by factor, used for legend vvo.setFactorValuesToNames(factorValueMaps); // this is list of maps for each sample }
From source file:com.espertech.esper.filter.FilterSpecCompiler.java
private static FilterSpecParam handleInSetNode(ExprInNode constituent, LinkedHashMap<String, Pair<EventType, String>> arrayEventTypes, ExprEvaluatorContext exprEvaluatorContext, String statementName) throws ExprValidationException { ExprNode left = constituent.getChildNodes()[0]; if (!(left instanceof ExprFilterOptimizableNode)) { return null; }//from w w w .j a v a 2 s . c om ExprFilterOptimizableNode filterOptimizableNode = (ExprFilterOptimizableNode) left; FilterSpecLookupable lookupable = filterOptimizableNode.getFilterLookupable(); FilterOperator op = FilterOperator.IN_LIST_OF_VALUES; if (constituent.isNotIn()) { op = FilterOperator.NOT_IN_LIST_OF_VALUES; } int expectedNumberOfConstants = constituent.getChildNodes().length - 1; List<FilterSpecParamInValue> listofValues = new ArrayList<FilterSpecParamInValue>(); Iterator<ExprNode> it = Arrays.asList(constituent.getChildNodes()).iterator(); it.next(); // ignore the first node as it's the identifier while (it.hasNext()) { ExprNode subNode = it.next(); if (ExprNodeUtility.isConstantValueExpr(subNode)) { ExprConstantNode constantNode = (ExprConstantNode) subNode; Object constant = constantNode.evaluate(null, true, exprEvaluatorContext); if (constant instanceof Collection) { return null; } if (constant instanceof Map) { return null; } if ((constant != null) && (constant.getClass().isArray())) { for (int i = 0; i < Array.getLength(constant); i++) { Object arrayElement = Array.get(constant, i); Object arrayElementCoerced = handleConstantsCoercion(lookupable, arrayElement); listofValues.add(new InSetOfValuesConstant(arrayElementCoerced)); if (i > 0) { expectedNumberOfConstants++; } } } else { constant = handleConstantsCoercion(lookupable, constant); listofValues.add(new InSetOfValuesConstant(constant)); } } if (subNode instanceof ExprContextPropertyNode) { ExprContextPropertyNode contextPropertyNode = (ExprContextPropertyNode) subNode; Class returnType = contextPropertyNode.getType(); if (JavaClassHelper.isImplementsInterface(contextPropertyNode.getType(), Collection.class) || JavaClassHelper.isImplementsInterface(contextPropertyNode.getType(), Map.class)) { return null; } if ((returnType != null) && (returnType.getClass().isArray())) { return null; } SimpleNumberCoercer coercer = getNumberCoercer(left.getExprEvaluator().getType(), contextPropertyNode.getType(), lookupable.getExpression()); listofValues.add(new InSetOfValuesContextProp(contextPropertyNode.getPropertyName(), contextPropertyNode.getGetter(), coercer)); } if (subNode instanceof ExprIdentNode) { ExprIdentNode identNodeInner = (ExprIdentNode) subNode; if (identNodeInner.getStreamId() == 0) { break; // for same event evals use the boolean expression, via count compare failing below } boolean isMustCoerce = false; Class numericCoercionType = JavaClassHelper.getBoxedType(lookupable.getReturnType()); if (identNodeInner.getExprEvaluator().getType() != lookupable.getReturnType()) { if (JavaClassHelper.isNumeric(lookupable.getReturnType())) { if (!JavaClassHelper.canCoerce(identNodeInner.getExprEvaluator().getType(), lookupable.getReturnType())) { throwConversionError(identNodeInner.getExprEvaluator().getType(), lookupable.getReturnType(), lookupable.getExpression()); } isMustCoerce = true; } else { break; // assumed not compatible } } FilterSpecParamInValue inValue; String streamName = identNodeInner.getResolvedStreamName(); if (arrayEventTypes != null && !arrayEventTypes.isEmpty() && arrayEventTypes.containsKey(streamName)) { Pair<Integer, String> indexAndProp = getStreamIndex(identNodeInner.getResolvedPropertyName()); inValue = new InSetOfValuesEventPropIndexed(identNodeInner.getResolvedStreamName(), indexAndProp.getFirst(), indexAndProp.getSecond(), isMustCoerce, numericCoercionType, statementName); } else { inValue = new InSetOfValuesEventProp(identNodeInner.getResolvedStreamName(), identNodeInner.getResolvedPropertyName(), isMustCoerce, numericCoercionType); } listofValues.add(inValue); } } // Fallback if not all values in the in-node can be resolved to properties or constants if (listofValues.size() == expectedNumberOfConstants) { return new FilterSpecParamIn(lookupable, op, listofValues); } return null; }
From source file:com.twinsoft.convertigo.engine.migration.Migration7_0_0.java
public static void migrate(final String projectName) { try {//w w w. j ava 2s. c o m Map<String, Reference> referenceMap = new HashMap<String, Reference>(); XmlSchema projectSchema = null; Project project = Engine.theApp.databaseObjectsManager.getOriginalProjectByName(projectName, false); // Copy all xsd files to project's xsd directory File destDir = new File(project.getXsdDirPath()); copyXsdOfProject(projectName, destDir); String projectWsdlFilePath = Engine.PROJECTS_PATH + "/" + projectName + "/" + projectName + ".wsdl"; File wsdlFile = new File(projectWsdlFilePath); String projectXsdFilePath = Engine.PROJECTS_PATH + "/" + projectName + "/" + projectName + ".xsd"; File xsdFile = new File(projectXsdFilePath); if (xsdFile.exists()) { // Load project schema from old XSD file XmlSchemaCollection collection = new XmlSchemaCollection(); collection.setSchemaResolver(new DefaultURIResolver() { public InputSource resolveEntity(String targetNamespace, String schemaLocation, String baseUri) { // Case of a c8o project location if (schemaLocation.startsWith("../") && schemaLocation.endsWith(".xsd")) { try { String targetProjectName = schemaLocation.substring(3, schemaLocation.indexOf("/", 3)); File pDir = new File(Engine.PROJECTS_PATH + "/" + targetProjectName); if (pDir.exists()) { File pFile = new File(Engine.PROJECTS_PATH + schemaLocation.substring(2)); // Case c8o project is already migrated if (!pFile.exists()) { Document doc = Engine.theApp.schemaManager .getSchemaForProject(targetProjectName).getSchemaDocument(); DOMSource source = new DOMSource(doc); StringWriter writer = new StringWriter(); StreamResult result = new StreamResult(writer); TransformerFactory.newInstance().newTransformer().transform(source, result); StringReader reader = new StringReader(writer.toString()); return new InputSource(reader); } } return null; } catch (Exception e) { Engine.logDatabaseObjectManager .warn("[Migration 7.0.0] Unable to find schema location \"" + schemaLocation + "\"", e); return null; } } else if (schemaLocation.indexOf("://") == -1 && schemaLocation.endsWith(".xsd")) { return super.resolveEntity(targetNamespace, schemaLocation, Engine.PROJECTS_PATH + "/" + projectName); } return super.resolveEntity(targetNamespace, schemaLocation, baseUri); } }); projectSchema = SchemaUtils.loadSchema(new File(projectXsdFilePath), collection); ConvertigoError.updateXmlSchemaObjects(projectSchema); SchemaMeta.setCollection(projectSchema, collection); for (Connector connector : project.getConnectorsList()) { for (Transaction transaction : connector.getTransactionsList()) { try { // Migrate transaction in case of a Web Service consumption project if (transaction instanceof XmlHttpTransaction) { XmlHttpTransaction xmlHttpTransaction = (XmlHttpTransaction) transaction; String reqn = xmlHttpTransaction.getResponseElementQName(); if (!reqn.equals("")) { boolean useRef = reqn.indexOf(";") == -1; // Doc/Literal case if (useRef) { try { String[] qn = reqn.split(":"); QName refName = new QName( projectSchema.getNamespaceContext().getNamespaceURI(qn[0]), qn[1]); xmlHttpTransaction.setXmlElementRefAffectation(new XmlQName(refName)); } catch (Exception e) { } } // RPC case else { int index, index2; try { index = reqn.indexOf(";"); String opName = reqn.substring(0, index); if ((index2 = reqn.indexOf(";", index + 1)) != -1) { String eltName = reqn.substring(index + 1, index2); String eltType = reqn.substring(index2 + 1); String[] qn = eltType.split(":"); QName typeName = new QName( projectSchema.getNamespaceContext().getNamespaceURI(qn[0]), qn[1]); String responseElementQName = opName + ";" + eltName + ";" + "{" + typeName.getNamespaceURI() + "}" + typeName.getLocalPart(); xmlHttpTransaction.setResponseElementQName(responseElementQName); } } catch (Exception e) { } } } } // Retrieve required XmlSchemaObjects for transaction QName requestQName = new QName(project.getTargetNamespace(), transaction.getXsdRequestElementName()); QName responseQName = new QName(project.getTargetNamespace(), transaction.getXsdResponseElementName()); LinkedHashMap<QName, XmlSchemaObject> map = new LinkedHashMap<QName, XmlSchemaObject>(); XmlSchemaWalker dw = XmlSchemaWalker.newDependencyWalker(map, true, false); dw.walkByElementRef(projectSchema, requestQName); dw.walkByElementRef(projectSchema, responseQName); // Create transaction schema String targetNamespace = projectSchema.getTargetNamespace(); String prefix = projectSchema.getNamespaceContext().getPrefix(targetNamespace); XmlSchema transactionSchema = SchemaUtils.createSchema(prefix, targetNamespace, XsdForm.unqualified.name(), XsdForm.unqualified.name()); // Add required prefix declarations List<String> nsList = new LinkedList<String>(); for (QName qname : map.keySet()) { String nsURI = qname.getNamespaceURI(); if (!nsURI.equals(Constants.URI_2001_SCHEMA_XSD)) { if (!nsList.contains(nsURI)) { nsList.add(nsURI); } } String nsPrefix = qname.getPrefix(); if (!nsURI.equals(targetNamespace)) { NamespaceMap nsMap = SchemaUtils.getNamespaceMap(transactionSchema); if (nsMap.getNamespaceURI(nsPrefix) == null) { nsMap.add(nsPrefix, nsURI); transactionSchema.setNamespaceContext(nsMap); } } } // Add required imports for (String namespaceURI : nsList) { XmlSchemaObjectCollection includes = projectSchema.getIncludes(); for (int i = 0; i < includes.getCount(); i++) { XmlSchemaObject xmlSchemaObject = includes.getItem(i); if (xmlSchemaObject instanceof XmlSchemaImport) { if (((XmlSchemaImport) xmlSchemaObject).getNamespace() .equals(namespaceURI)) { // do not allow import with same ns ! if (namespaceURI.equals(project.getTargetNamespace())) continue; String location = ((XmlSchemaImport) xmlSchemaObject) .getSchemaLocation(); // This is a convertigo project reference if (location.startsWith("../")) { // Copy all xsd files to xsd directory String targetProjectName = location.substring(3, location.indexOf("/", 3)); copyXsdOfProject(targetProjectName, destDir); } // Add reference addReferenceToMap(referenceMap, namespaceURI, location); // Add import addImport(transactionSchema, namespaceURI, location); } } } } QName responseTypeQName = new QName(project.getTargetNamespace(), transaction.getXsdResponseTypeName()); // Add required schema objects for (QName qname : map.keySet()) { if (qname.getNamespaceURI().equals(targetNamespace)) { XmlSchemaObject ob = map.get(qname); if (qname.getLocalPart().startsWith("ConvertigoError")) continue; transactionSchema.getItems().add(ob); // Add missing response error element and attributes if (qname.equals(responseTypeQName)) { Transaction.addSchemaResponseObjects(transactionSchema, (XmlSchemaComplexType) ob); } } } // Add missing ResponseType (with document) if (map.containsKey(responseTypeQName)) { Transaction.addSchemaResponseType(transactionSchema, transaction); } // Add everything if (map.isEmpty()) { Transaction.addSchemaObjects(transactionSchema, transaction); } // Add c8o error objects (for internal xsd edition only) ConvertigoError.updateXmlSchemaObjects(transactionSchema); // Save schema to file String transactionXsdFilePath = transaction.getSchemaFilePath(); new File(transaction.getSchemaFileDirPath()).mkdirs(); SchemaUtils.saveSchema(transactionXsdFilePath, transactionSchema); } catch (Exception e) { Engine.logDatabaseObjectManager .error("[Migration 7.0.0] An error occured while migrating transaction \"" + transaction.getName() + "\"", e); } if (transaction instanceof TransactionWithVariables) { TransactionWithVariables transactionVars = (TransactionWithVariables) transaction; handleRequestableVariable(transactionVars.getVariablesList()); // Change SQLQuery variables : i.e. {id} --> {{id}} if (transaction instanceof SqlTransaction) { String sqlQuery = ((SqlTransaction) transaction).getSqlQuery(); sqlQuery = sqlQuery.replaceAll("\\{([a-zA-Z0-9_]+)\\}", "{{$1}}"); ((SqlTransaction) transaction).setSqlQuery(sqlQuery); } } } } } else {// Should only happen for projects which version <= 4.6.0 XmlSchemaCollection collection = new XmlSchemaCollection(); String prefix = project.getName() + "_ns"; projectSchema = SchemaUtils.createSchema(prefix, project.getNamespaceUri(), XsdForm.unqualified.name(), XsdForm.unqualified.name()); ConvertigoError.addXmlSchemaObjects(projectSchema); SchemaMeta.setCollection(projectSchema, collection); for (Connector connector : project.getConnectorsList()) { for (Transaction transaction : connector.getTransactionsList()) { if (transaction instanceof TransactionWithVariables) { TransactionWithVariables transactionVars = (TransactionWithVariables) transaction; handleRequestableVariable(transactionVars.getVariablesList()); } } } } // Handle sequence objects for (Sequence sequence : project.getSequencesList()) { handleSteps(projectSchema, referenceMap, sequence.getSteps()); handleRequestableVariable(sequence.getVariablesList()); } // Add all references to project if (!referenceMap.isEmpty()) { for (Reference reference : referenceMap.values()) project.add(reference); } // Delete XSD file if (xsdFile.exists()) xsdFile.delete(); // Delete WSDL file if (wsdlFile.exists()) wsdlFile.delete(); } catch (Exception e) { Engine.logDatabaseObjectManager .error("[Migration 7.0.0] An error occured while migrating project \"" + projectName + "\"", e); } }
From source file:com.ikanow.aleph2.search_service.elasticsearch.utils.ElasticsearchIndexUtils.java
/** Creates a mapping for the bucket - columnar elements * ALSO INCLUDES THE PER-FIELD CONFIGURATION FROM THE SEARCH_INDEX_SCHEMA AND TEMPORAL_SCHMEA * @param bucket/*from ww w .j ava 2 s . c o m*/ * @return * @throws IOException */ public static XContentBuilder getColumnarMapping(final DataBucketBean bucket, Optional<XContentBuilder> to_embed, final LinkedHashMap<Either<String, Tuple2<String, String>>, JsonNode> field_lookups, final JsonNode enabled_not_analyzed, final JsonNode enabled_analyzed, final JsonNode default_not_analyzed, final JsonNode default_analyzed, final Optional<JsonNode> doc_schema, final SearchIndexSchemaDefaultBean search_index_schema_override, final ObjectMapper mapper, final String index_type) { try { final XContentBuilder start = to_embed.orElse(XContentFactory.jsonBuilder().startObject()); final boolean columnar_enabled = Optional.ofNullable(bucket.data_schema()) .map(DataSchemaBean::columnar_schema).filter(s -> Optional.ofNullable(s.enabled()).orElse(true)) .isPresent(); final Map<Either<String, Tuple2<String, String>>, String> type_override = Optionals .of(() -> bucket.data_schema().search_index_schema().type_override()).map(m -> buildTypeMap(m)) .orElse(Collections.emptyMap()); // If no columnar settings are specified then go with a sensible default final Optional<DataSchemaBean.ColumnarSchemaBean> maybe_user_columnar_schema = Optionals .of(() -> bucket.data_schema().columnar_schema()); final DataSchemaBean.ColumnarSchemaBean columnar_schema = maybe_user_columnar_schema .filter(__ -> columnar_enabled).filter(schema -> (null == schema.field_include_list()) && // ie the entire thing is empty (null == schema.field_exclude_list()) && (null == schema.field_include_pattern_list()) && (null == schema.field_type_include_list()) && (null == schema.field_exclude_pattern_list()) && (null == schema.field_type_exclude_list())) .map(schema -> BeanTemplateUtils.clone(schema) .with(DataSchemaBean.ColumnarSchemaBean::field_type_include_list, Arrays.asList("string", "number", "date")) .done()) .orElseGet(() -> maybe_user_columnar_schema.orElse(null)) // (NOTE: can only be null if columnar_enabled is false) ; final LinkedHashMap<Either<String, Tuple2<String, String>>, JsonNode> column_lookups_pretypes = Stream .of(columnar_enabled ? createFieldIncludeLookups( Optionals.ofNullable(columnar_schema.field_include_list()).stream(), fn -> getKey(fn), field_lookups, enabled_not_analyzed, enabled_analyzed, true, search_index_schema_override, type_override, mapper, index_type) : Stream.<Tuple2<Either<String, Tuple2<String, String>>, JsonNode>>empty(), columnar_enabled ? createFieldExcludeLookups( Optionals.ofNullable(columnar_schema.field_exclude_list()).stream(), fn -> getKey(fn), field_lookups, search_index_schema_override, type_override, mapper, index_type) : Stream.<Tuple2<Either<String, Tuple2<String, String>>, JsonNode>>empty(), columnar_enabled ? createFieldIncludeLookups( Optionals.ofNullable(columnar_schema.field_include_pattern_list()) .stream(), fn -> Either.right(Tuples._2T(fn, "*")), field_lookups, enabled_not_analyzed, enabled_analyzed, true, search_index_schema_override, type_override, mapper, index_type) : Stream.<Tuple2<Either<String, Tuple2<String, String>>, JsonNode>>empty(), columnar_enabled ? createFieldIncludeLookups( Optionals.ofNullable(columnar_schema.field_type_include_list()) .stream(), fn -> Either.right(Tuples._2T("*", fn)), field_lookups, enabled_not_analyzed, enabled_analyzed, true, search_index_schema_override, type_override, mapper, index_type) : Stream.<Tuple2<Either<String, Tuple2<String, String>>, JsonNode>>empty(), columnar_enabled ? createFieldExcludeLookups( Optionals.ofNullable(columnar_schema.field_exclude_pattern_list()) .stream(), fn -> Either.right(Tuples._2T(fn, "*")), field_lookups, search_index_schema_override, type_override, mapper, index_type) : Stream.<Tuple2<Either<String, Tuple2<String, String>>, JsonNode>>empty(), columnar_enabled ? createFieldExcludeLookups( Optionals.ofNullable(columnar_schema.field_type_exclude_list()) .stream(), fn -> Either.right(Tuples._2T("*", fn)), field_lookups, search_index_schema_override, type_override, mapper, index_type) : Stream.<Tuple2<Either<String, Tuple2<String, String>>, JsonNode>>empty(), // Finally add the default columnar lookups to the unmentioned strings (ensures that *_* is at the end) field_lookups.entrySet().stream() .flatMap(kv -> createFieldIncludeLookups(Stream.of(kv.getKey().toString()), __ -> kv.getKey(), field_lookups, default_not_analyzed, default_analyzed, false, search_index_schema_override, type_override, mapper, index_type))) .flatMap(x -> x).collect(Collectors.toMap(t2 -> t2._1(), t2 -> t2._2(), (v1, v2) -> v1, // (ie ignore duplicates) () -> new LinkedHashMap<Either<String, Tuple2<String, String>>, JsonNode>())); ; // Also any types that didn't map onto one of the fields or tokens: final LinkedHashMap<Either<String, Tuple2<String, String>>, JsonNode> column_lookups_types = type_override .entrySet().stream() // (filter - convert name/* to name/type and check if I've already created such an entry using the type map) .filter(kv -> !column_lookups_pretypes .containsKey(kv.getKey().either(s -> s, t2 -> Tuples._2T(t2._1(), kv.getValue())))) .flatMap(kv -> createFieldIncludeLookups(Stream.of(kv.getKey().toString()), __ -> kv.getKey().<Either<String, Tuple2<String, String>>>either(s -> Either.left(s), t2 -> Either.right(Tuples._2T(t2._1(), kv.getValue()))), field_lookups, default_not_analyzed, default_analyzed, false, search_index_schema_override, type_override, mapper, index_type)) .collect(Collectors.toMap(t2 -> t2._1(), t2 -> t2._2(), (v1, v2) -> v1, () -> new LinkedHashMap<Either<String, Tuple2<String, String>>, JsonNode>())); final LinkedHashMap<Either<String, Tuple2<String, String>>, JsonNode> column_lookups = Stream .concat(column_lookups_pretypes.entrySet().stream(), column_lookups_types.entrySet().stream()) .sorted((a, b) -> Integer.compare(sortKey(a.getKey()), sortKey(b.getKey()))) .collect(Collectors.toMap(t2 -> t2.getKey(), t2 -> t2.getValue(), (v1, v2) -> v1, () -> new LinkedHashMap<Either<String, Tuple2<String, String>>, JsonNode>())); final XContentBuilder properties = column_lookups.entrySet().stream() // properties not dynamic_templates .filter(kv -> kv.getKey().isLeft()) // overwrite with version built using columns if it exists .map(kv -> Tuples._2T(kv.getKey(), column_lookups.getOrDefault(kv.getKey(), kv.getValue()))) .reduce(Optional.of(start.startObject("properties")) // add doc_schema if it exists .map(props -> doc_schema .map(ds -> Optionals.streamOf(ds.fields(), false) .reduce(props, Lambdas.wrap_u((acc, kv) -> acc.rawField(kv.getKey(), kv.getValue().toString().getBytes())), (acc1, acc2) -> acc1 // shouldn't be possible )).orElse(props)).get(), Lambdas.wrap_u((acc, t2) -> acc.rawField(t2._1().left().value(), t2._2().toString().getBytes())), // (left by construction) (acc1, acc2) -> acc1) // (not actually possible) .endObject(); final XContentBuilder templates = column_lookups.entrySet().stream() // properties not dynamic_templates .filter(kv -> kv.getKey().isRight()) // overwrite with version built using columns if it exists .map(kv -> Tuples._2T(kv.getKey(), column_lookups.getOrDefault(kv.getKey(), kv.getValue()))) .reduce(properties.startArray("dynamic_templates"), Lambdas.wrap_u((acc, t2) -> acc.startObject() .rawField(getFieldNameFromMatchPair(t2._1().right().value()), t2._2().toString().getBytes()) // (right by construction) .endObject()), (acc1, acc2) -> acc1) // (not actually possible) .endArray(); return templates; } catch (IOException e) { //Handle in-practice-impossible "IOException" return null; } }
From source file:com.sonicle.webtop.calendar.CalendarManager.java
@Override public Map<Integer, ShareFolderCalendar> listIncomingCalendarFolders(String rootShareId) throws WTException { CoreManager coreMgr = WT.getCoreManager(getTargetProfileId()); LinkedHashMap<Integer, ShareFolderCalendar> folders = new LinkedHashMap<>(); for (Integer folderId : shareCache.getFolderIdsByShareRoot(rootShareId)) { final String shareFolderId = shareCache.getShareFolderIdByFolderId(folderId); if (StringUtils.isBlank(shareFolderId)) continue; SharePermsFolder fperms = coreMgr.getShareFolderPermissions(shareFolderId); SharePermsElements eperms = coreMgr.getShareElementsPermissions(shareFolderId); if (folders.containsKey(folderId)) { final ShareFolderCalendar shareFolder = folders.get(folderId); if (shareFolder == null) continue; shareFolder.getPerms().merge(fperms); shareFolder.getElementsPerms().merge(eperms); } else {/*ww w. ja va2 s. c om*/ final Calendar calendar = getCalendar(folderId); if (calendar == null) continue; folders.put(folderId, new ShareFolderCalendar(shareFolderId, fperms, eperms, calendar)); } } return folders; }