List of usage examples for java.util LinkedHashMap keySet
public Set<K> keySet()
From source file:service.EventService.java
public void eventAppointSaveAll(Long campaignId, Long cabinetId, Long[] userIdArray, String[] clientNumArray) { if (userIdArray != null && clientNumArray != null) { LinkedHashMap<Long, Integer> userIdCountAssignedMap = new LinkedHashMap(); List<Event> events = getUnassignedEvent(campaignId, cabinetId); List<Event> eventsForUpdate = new ArrayList(); PersonalCabinet pk = personalCabinetDao.find(cabinetId); //int clientCount = 0; int summClient = 0; if (userIdArray.length > 0 && events.size() > 0 && clientNumArray.length > 0) { for (int i = 0; i < userIdArray.length; i++) { if (clientNumArray.length >= i) { int count = StringAdapter.toInteger(clientNumArray[i]); summClient += count; userIdCountAssignedMap.put(userIdArray[i], count); } else { userIdCountAssignedMap.put(userIdArray[i], 0); }/*from w w w. ja va2 s . c o m*/ } /// ? ?, ? ? ? - ? - // ? ? appointMap ? ? //??? ? ? ? ? ? /*for (int i = 0; i < clientNumArray.length; i++) { String df = clientNumArray[i]; if(df.equals("")||!df.matches("[0-9]*")){ df="0"; } Integer i2 = Integer.valueOf(df); summClient += i2; }*/ //Long eclId = (long)0; int sindx = 0; if (summClient <= events.size()) { for (Long userId : userIdCountAssignedMap.keySet()) { Integer eventsCountToAssign = userIdCountAssignedMap.get(userId); User user = userDao.getUserBelongsPk(pk, userId); if (user != null) { //int supCount = 0; for (int supCount = 0; supCount < eventsCountToAssign; supCount++) { Event ev = events.get(sindx); if (ev != null && supCount < eventsCountToAssign) { ev.setUser(user); ev.setStatus(Event.ASSIGNED); if (validate(ev)) { eventsForUpdate.add(ev); //supCount++; sindx++; } } } /*for (Event ev : events) { if (supCount < eventsCountToAssign && eclId < ev.getEventId()) { ev.setUser(user); ev.setStatus(Event.ASSIGNED); if (validate(ev)) { eventDao.save(ev); eclId = ev.getEventId(); } supCount++; } }*/ } else { addError("! id:" + userId + " !"); } } for (Event ev : eventsForUpdate) { eventDao.update(ev); User u = ev.getUser(); addEventComment( " " + u.getShortName() + "(" + u.getEmail() + ")", EventComment.ASSIGN, ev, cabinetId); } /*String str = ""; for(Map.Entry<Long,Integer> entry:userIdCountAssignedMap.entrySet()){ str+=entry.getKey()+"-"+entry.getValue()+"; "; } addError(str);*/ } else { addError("? " + summClient + " ? : " + events.size()); } } } }
From source file:gr.seab.r2rml.test.ComplianceTests.java
@Test public void testAll() { log.info("test all"); LinkedHashMap<String, String[]> tests = new LinkedHashMap<String, String[]>(); tests.put("D000-1table1column0rows", new String[] { "r2rml.ttl" }); tests.put("D001-1table1column1row", new String[] { "r2rmla.ttl", "r2rmlb.ttl" }); tests.put("D002-1table2columns1row", new String[] { "r2rmla.ttl", "r2rmlb.ttl", "r2rmlc.ttl", "r2rmld.ttl", "r2rmle.ttl", "r2rmlf.ttl", "r2rmlg.ttl", "r2rmlh.ttl", "r2rmli.ttl", "r2rmlj.ttl" }); tests.put("D003-1table3columns1row", new String[] { "r2rmla.ttl", "r2rmlb.ttl", "r2rmlc.ttl" }); tests.put("D004-1table2columns1row", new String[] { "r2rmla.ttl", "r2rmlb.ttl" }); tests.put("D005-1table3columns3rows2duplicates", new String[] { "r2rmla.ttl", "r2rmlb.ttl" }); tests.put("D006-1table1primarykey1column1row", new String[] { "r2rmla.ttl" }); tests.put("D007-1table1primarykey2columns1row", new String[] { "r2rmla.ttl", "r2rmlb.ttl", "r2rmlc.ttl", "r2rmld.ttl", "r2rmle.ttl", "r2rmlf.ttl", "r2rmlg.ttl", "r2rmlh.ttl" }); tests.put("D008-1table1compositeprimarykey3columns1row", new String[] { "r2rmla.ttl", "r2rmlb.ttl", "r2rmlc.ttl" }); tests.put("D009-2tables1primarykey1foreignkey", new String[] { "r2rmla.ttl", "r2rmlb.ttl", "r2rmlc.ttl", "r2rmld.ttl" }); tests.put("D010-1table1primarykey3colums3rows", new String[] { "r2rmla.ttl", "r2rmlb.ttl", "r2rmlc.ttl" }); tests.put("D011-M2MRelations", new String[] { "r2rmla.ttl", "r2rmlb.ttl" }); tests.put("D012-2tables2duplicates0nulls", new String[] { "r2rmla.ttl", "r2rmlb.ttl", "r2rmlc.ttl", "r2rmld.ttl", "r2rmle.ttl" }); tests.put("D013-1table1primarykey3columns2rows1nullvalue", new String[] { "r2rmla.ttl" }); tests.put("D014-3tables1primarykey1foreignkey", new String[] { "r2rmla.ttl", "r2rmlb.ttl", "r2rmlc.ttl", "r2rmld.ttl" }); tests.put("D015-1table3columns1composityeprimarykey3rows2languages", new String[] { "r2rmla.ttl", "r2rmlb.ttl" }); tests.put("D016-1table1primarykey10columns3rowsSQLdatatypes", new String[] { "r2rmla.ttl", "r2rmlb.ttl", "r2rmlc.ttl", "r2rmld.ttl", "r2rmle.ttl" }); tests.put("D017-I18NnoSpecialChars", new String[] {}); tests.put("D018-1table1primarykey2columns3rows", new String[] { "r2rmla.ttl" }); tests.put("D019-1table1primarykey3columns3rows", new String[] { "r2rmla.ttl", "r2rmlb.ttl" }); tests.put("D020-1table1column5rows", new String[] { "r2rmla.ttl", "r2rmlb.ttl" }); tests.put("D021-2tables2primarykeys1foreignkeyReferencesAllNulls", new String[] {}); tests.put("D022-2tables1primarykey1foreignkeyReferencesNoPrimaryKey", new String[] {}); tests.put("D023-2tables2primarykeys2foreignkeysReferencesToNon-primarykeys", new String[] {}); tests.put("D024-2tables2primarykeys1foreignkeyToARowWithSomeNulls", new String[] {}); tests.put("D025-3tables3primarykeys3foreignkeys", new String[] {}); ClassPathXmlApplicationContext context = new ClassPathXmlApplicationContext("test-context.xml"); int counter = 0; for (String key : tests.keySet()) { if (counter > 2 && counter < 26) { String folder = "src/test/resources/postgres/" + key + "/"; initialiseSourceDatabase(folder + "create.sql"); for (String mappingFile : tests.get(key)) { //Override property file Parser parser = (Parser) context.getBean("parser"); Properties p = parser.getProperties(); mappingFile = folder + mappingFile; if (new File(mappingFile).exists()) { p.setProperty("mapping.file", mappingFile); } else { log.error("File " + mappingFile + " does not exist."); }// w ww . ja v a 2s. c o m p.setProperty("jena.destinationFileName", mappingFile.substring(0, mappingFile.indexOf(".") + 1) + "nq"); parser.setProperties(p); MappingDocument mappingDocument = parser.parse(); Generator generator = (Generator) context.getBean("generator"); generator.setProperties(parser.getProperties()); generator.setResultModel(parser.getResultModel()); log.info("--- generating " + p.getProperty("jena.destinationFileName") + " from " + mappingFile + " ---"); generator.createTriples(mappingDocument); } } counter++; } context.close(); }
From source file:org.apache.asterix.app.translator.QueryTranslator.java
private static ARecordType createEnforcedType(ARecordType initialType, List<Index> indexes) throws AlgebricksException { ARecordType enforcedType = initialType; for (Index index : indexes) { if (!index.isSecondaryIndex() || !index.isEnforcingKeyFileds()) { continue; }/*from w w w . ja v a 2 s .c o m*/ if (index.hasMetaFields()) { throw new AlgebricksException("Indexing an open field is only supported on the record part"); } for (int i = 0; i < index.getKeyFieldNames().size(); i++) { Deque<Pair<ARecordType, String>> nestedTypeStack = new ArrayDeque<>(); List<String> splits = index.getKeyFieldNames().get(i); ARecordType nestedFieldType = enforcedType; boolean openRecords = false; String bridgeName = nestedFieldType.getTypeName(); int j; // Build the stack for the enforced type for (j = 1; j < splits.size(); j++) { nestedTypeStack.push(new Pair<ARecordType, String>(nestedFieldType, splits.get(j - 1))); bridgeName = nestedFieldType.getTypeName(); nestedFieldType = (ARecordType) enforcedType.getSubFieldType(splits.subList(0, j)); if (nestedFieldType == null) { openRecords = true; break; } } if (openRecords) { // create the smallest record enforcedType = new ARecordType(splits.get(splits.size() - 2), new String[] { splits.get(splits.size() - 1) }, new IAType[] { AUnionType.createUnknownableType(index.getKeyFieldTypes().get(i)) }, true); // create the open part of the nested field for (int k = splits.size() - 3; k > (j - 2); k--) { enforcedType = new ARecordType(splits.get(k), new String[] { splits.get(k + 1) }, new IAType[] { AUnionType.createUnknownableType(enforcedType) }, true); } // Bridge the gap Pair<ARecordType, String> gapPair = nestedTypeStack.pop(); ARecordType parent = gapPair.first; IAType[] parentFieldTypes = ArrayUtils.addAll(parent.getFieldTypes().clone(), new IAType[] { AUnionType.createUnknownableType(enforcedType) }); enforcedType = new ARecordType(bridgeName, ArrayUtils.addAll(parent.getFieldNames(), enforcedType.getTypeName()), parentFieldTypes, true); } else { //Schema is closed all the way to the field //enforced fields are either null or strongly typed LinkedHashMap<String, IAType> recordNameTypesMap = createRecordNameTypeMap(nestedFieldType); // if a an enforced field already exists and the type is correct IAType enforcedFieldType = recordNameTypesMap.get(splits.get(splits.size() - 1)); if (enforcedFieldType != null && enforcedFieldType.getTypeTag() == ATypeTag.UNION && ((AUnionType) enforcedFieldType).isUnknownableType()) { enforcedFieldType = ((AUnionType) enforcedFieldType).getActualType(); } if (enforcedFieldType != null && !ATypeHierarchy.canPromote(enforcedFieldType.getTypeTag(), index.getKeyFieldTypes().get(i).getTypeTag())) { throw new AlgebricksException("Cannot enforce field " + index.getKeyFieldNames().get(i) + " to have type " + index.getKeyFieldTypes().get(i)); } if (enforcedFieldType == null) { recordNameTypesMap.put(splits.get(splits.size() - 1), AUnionType.createUnknownableType(index.getKeyFieldTypes().get(i))); } enforcedType = new ARecordType(nestedFieldType.getTypeName(), recordNameTypesMap.keySet().toArray(new String[recordNameTypesMap.size()]), recordNameTypesMap.values().toArray(new IAType[recordNameTypesMap.size()]), nestedFieldType.isOpen()); } // Create the enforced type for the nested fields in the schema, from the ground up if (!nestedTypeStack.isEmpty()) { while (!nestedTypeStack.isEmpty()) { Pair<ARecordType, String> nestedTypePair = nestedTypeStack.pop(); ARecordType nestedRecType = nestedTypePair.first; IAType[] nestedRecTypeFieldTypes = nestedRecType.getFieldTypes().clone(); nestedRecTypeFieldTypes[nestedRecType.getFieldIndex(nestedTypePair.second)] = enforcedType; enforcedType = new ARecordType(nestedRecType.getTypeName() + "_enforced", nestedRecType.getFieldNames(), nestedRecTypeFieldTypes, nestedRecType.isOpen()); } } } } return enforcedType; }
From source file:net.jradius.client.gui.JRadiusSimulator.java
private void addAttributesToTable(DefaultMutableTreeNode node, Map<String, Class<?>> map, boolean skipVSA) { LinkedHashMap<String, String> attributeList = new LinkedHashMap<String, String>(); for (Iterator<Map.Entry<String, Class<?>>> i = map.entrySet().iterator(); i.hasNext();) { Map.Entry<String, Class<?>> entry = i.next(); String type = entry.getKey(); Class<?> clazz = entry.getValue(); try {//from w ww . j av a 2 s . c o m RadiusAttribute attribute = (RadiusAttribute) clazz.newInstance(); if (!skipVSA || (!(attribute instanceof VSAttribute) && attribute.getType() <= 255)) { String attributeName = attribute.getAttributeName(); if (attributeName.equals("Vendor-Specific")) continue; if (attributeName.startsWith("X-Ascend-")) continue; attributeList.put(type, attributeName); } } catch (Exception e) { e.printStackTrace(); } } LinkedList<String> list = new LinkedList<String>(attributeList.keySet()); Collections.sort(list); for (Iterator<String> i = list.iterator(); i.hasNext();) { node.add(new DefaultMutableTreeNode(attributeList.get(i.next()))); } }
From source file:com.gtwm.pb.model.manageData.DataManagement.java
public int globalEdit(HttpServletRequest request, TableInfo table, LinkedHashMap<BaseField, BaseValue> dataToSave, SessionDataInfo sessionData, List<FileItem> multipartItems) throws InputRecordException, ObjectNotFoundException, SQLException, CodingErrorException, CantDoThatException, DisallowedException, MissingParametersException { int affectedFieldCount = dataToSave.size(); for (BaseField affectedField : dataToSave.keySet()) { if (affectedField.getFieldName().equals(HiddenFields.LAST_MODIFIED.getFieldName())) { affectedFieldCount--;//from w w w . java 2 s . co m } if (affectedField.getFieldName().equals(HiddenFields.MODIFIED_BY.getFieldName())) { affectedFieldCount--; } } if (affectedFieldCount > 1) { throw new CantDoThatException( "Global edits can only apply changes to one field at a time. Requested field changes were " + dataToSave); } Connection conn = null; Set<Integer> rowIds = new HashSet<Integer>(); BaseReportInfo sessionReport = sessionData.getReport(); BaseField primaryKey = table.getPrimaryKey(); try { conn = this.dataSource.getConnection(); conn.setAutoCommit(false); ReportDataInfo reportData = new ReportData(conn, sessionReport, false, false); // Generates a SELECT DISTINCT on the primary key including // filterValues & rowLimits in the WHERE clause Map<BaseField, Boolean> emptySorts = new HashMap<BaseField, Boolean>(); Map<BaseField, String> filterValues = sessionData.getReportFilterValues(); PreparedStatement statement = reportData.getReportSqlPreparedStatement(conn, filterValues, false, emptySorts, -1, primaryKey, QuickFilterType.AND, false); ResultSet results = statement.executeQuery(); while (results.next()) { Integer item = results.getInt(1); if (item != null) { rowIds.add(item); } } results.close(); statement.close(); } catch (SQLException sqlex) { // catch exception where field is not included // within report and simply return an empty tree logger.warn(sqlex.toString() + ". Probably occurred because field " + this + " isn't in report " + sessionReport + ", in which case it's nothing to worry about"); } finally { if (conn != null) { conn.close(); } } this.saveRecord(request, table, dataToSave, false, rowIds, sessionData, multipartItems); return rowIds.size(); }
From source file:org.deri.iris.rules.compiler.RuleCompiler.java
/** * Compile a rule body (or query). The literals are compiled in the order * given. However, if one literal can not be compiled, because one or more * of its variables are not bound from the proceeding literal, then it is * skipped an re-tried later./*from w w w .jav a 2s .c om*/ * * @param bodyLiterals The list of literals to compile * @return The compiled rule elements. * @throws EvaluationException If a rule construct can not be compiled (e.g. * a built-in has constructed terms) */ private LinkedHashMap<RuleElement, ILiteral> compileBody(Collection<ILiteral> bodyLiterals) throws EvaluationException { List<ILiteral> literals = new ArrayList<ILiteral>(bodyLiterals); LinkedHashMap<RuleElement, ILiteral> elements = new LinkedHashMap<RuleElement, ILiteral>(); List<IVariable> previousVariables = new ArrayList<IVariable>(); while (elements.size() < bodyLiterals.size()) { EvaluationException lastException = null; boolean added = false; for (int l = 0; l < literals.size(); ++l) { ILiteral literal = literals.get(l); IAtom atom = literal.getAtom(); boolean positive = literal.isPositive(); RuleElement element; try { if (atom instanceof IBuiltinAtom) { IBuiltinAtom builtinAtom = (IBuiltinAtom) atom; boolean constructedTerms = false; for (ITerm term : atom.getTuple()) { if (term instanceof IConstructedTerm) { constructedTerms = true; break; } } if (constructedTerms) element = new BuiltinForConstructedTermArguments(previousVariables, builtinAtom, positive, mEquivalentTerms, mConfiguration); else element = new Builtin(atom.getPredicate(), atom.getTuple(), previousVariables, builtinAtom, positive, mEquivalentTerms, mConfiguration); } else { IPredicate predicate = atom.getPredicate(); IRelation relation = mFacts.get(predicate); ITuple viewCriteria = atom.getTuple(); if (positive) { if (previousVariables.size() == 0) { // First sub-goal element = new FirstSubgoal(predicate, relation, viewCriteria, mEquivalentTerms, mConfiguration); } else { element = new Joiner(previousVariables, predicate, relation, viewCriteria, mEquivalentTerms, mConfiguration.indexFactory, mConfiguration.relationFactory); } } else { // This *is* allowed to be the first literal for // rules such as: // p('a') :- not q('b') // or even: // p('a') :- not q(?X) element = new Differ(previousVariables, relation, viewCriteria, mEquivalentTerms, mConfiguration); } } previousVariables = element.getOutputVariables(); elements.put(element, literals.get(l)); literals.remove(l); added = true; break; } catch (EvaluationException e) { // Oh dear. Store the exception and try the next literal. lastException = e; } } if (!added) { // No more literals, so the last error really was serious. throw lastException; } } if (elements.size() > 0) { RuleElement lastElement = null; for (RuleElement element : elements.keySet()) { lastElement = element; } assert lastElement != null; RuleElement element = new EquivalenceResolver(lastElement.getOutputVariables(), mEquivalentTerms, mConfiguration); elements.put(element, null); } return elements; }
From source file:au.org.theark.core.dao.DataExtractionDao.java
public File createGenoCSV(Search search, DataExtractionVO devo, FieldCategory fieldCategory, Long maxProcessesPerPipeline, Map<Long, Long> maxInputList, Map<Long, Long> maxOutputList) { final String tempDir = System.getProperty("java.io.tmpdir"); String filename = new String("GENO.csv"); final java.io.File file = new File(tempDir, filename); List<LinkedExtractionVO> genoData = devo.getGenoData(); OutputStream outputStream;// w w w. j a v a2s . c o m try { outputStream = new FileOutputStream(file); CsvWriter csv = new CsvWriter(outputStream); // Header csv.write("SUBJECTUID"); //csv.write("RECORD_DATE_TIME");?? csv.write(Constants.GENO_FIELDS_PIPELINE_ID); csv.write(Constants.GENO_FIELDS_PIPELINE_NAME); csv.write(Constants.GENO_FIELDS_PIPELINE_DECSRIPTION); for (int processIndex = 1; processIndex <= maxProcessesPerPipeline; processIndex++) { //one-based humanized //process csv.write(Constants.GENO_FIELDS_PROCESS_ID + "_forProcess_" + processIndex); csv.write(Constants.GENO_FIELDS_PROCESS_NAME + "_forProcess_" + processIndex); csv.write(Constants.GENO_FIELDS_PROCESS_DESCRIPTION + "_forProcess_" + processIndex); csv.write(Constants.GENO_FIELDS_PROCESS_START_TIME + "_forProcess_" + processIndex); csv.write(Constants.GENO_FIELDS_PROCESS_END_TIME + "_forProcess_" + processIndex); //commmand csv.write(Constants.GENO_FIELDS_PROCESS_COMMAND_SERVER_URL + "_" + processIndex); csv.write(Constants.GENO_FIELDS_PROCESS_COMMAND_NAME + "_" + processIndex); csv.write(Constants.GENO_FIELDS_PROCESS_COMMAND_LOCATION + "_" + processIndex); // csv.write(Constants.GENO_FIELDS_PROCESS_COMMAND_INPUT_FILE_FORMAT)// csv.write(Constants.GENO_FIELDS_PROCESS_COMMAND_OUTPUT_FILE_FORMAT); //input //for each of the inputs..........!!!! EACH - there COULD be more than one long maxInputsForThisProcess = 0L; if (maxInputList != null && maxInputList.get(new Long(processIndex)) != null) { maxInputsForThisProcess = maxInputList.get(new Long(processIndex)).longValue(); } long maxOutputsForThisProcess = 0L; if (maxOutputList != null && maxOutputList.get(new Long(processIndex)) != null) { maxOutputsForThisProcess = maxOutputList.get(new Long(processIndex)).longValue(); } /**why ther heck isnt this workinG? for(long inputIndex=1L ; inputIndex<=maxInputsForThisProcess ; inputIndex++){ //csv.write(Constants.GENO_FIELDS_PROCESS_INPUT_SERVER); csv.write(Constants.GENO_FIELDS_PROCESS_INPUT_SERVER + (processIndex>1?("_forProcess_"+processIndex):"") + "_" + inputIndex ); csv.write(Constants.GENO_FIELDS_PROCESS_INPUT_LOCATION + (processIndex>1?("_forProcess_"+processIndex):"") + "_" + inputIndex ); csv.write(Constants.GENO_FIELDS_PROCESS_INPUT_FILE_HASH + (processIndex>1?("_forProcess_"+processIndex):"") + "_" + inputIndex ); csv.write(Constants.GENO_FIELDS_PROCESS_INPUT_FILE_TYPE + (processIndex>1?("_forProcess_"+processIndex):"") + "_" + inputIndex ); csv.write(Constants.GENO_FIELDS_PROCESS_INPUT_KEPT + (processIndex>1?("_forProcess_"+processIndex):"") + "_" + inputIndex ); }*/ long inputIndex = 1L; while (inputIndex <= maxInputsForThisProcess) { //csv.write(Constants.GENO_FIELDS_PROCESS_INPUT_SERVER); csv.write(Constants.GENO_FIELDS_PROCESS_INPUT_SERVER + "_" + inputIndex + "_forProcess_" + processIndex); csv.write(Constants.GENO_FIELDS_PROCESS_INPUT_LOCATION + "_" + inputIndex + "_forProcess_" + processIndex); csv.write(Constants.GENO_FIELDS_PROCESS_INPUT_FILE_HASH + "_" + inputIndex + "_forProcess_" + processIndex); csv.write(Constants.GENO_FIELDS_PROCESS_INPUT_FILE_TYPE + "_" + inputIndex + "_forProcess_" + processIndex); csv.write(Constants.GENO_FIELDS_PROCESS_INPUT_KEPT + "_" + inputIndex + "_forProcess_" + processIndex); inputIndex++; } long outputIndex = 1L; while (outputIndex <= maxOutputsForThisProcess) { //csv.write(Constants.GENO_FIELDS_PROCESS_OUTPUT_SERVER); csv.write(Constants.GENO_FIELDS_PROCESS_OUTPUT_SERVER + "_" + outputIndex + "_forProcess_" + processIndex); csv.write(Constants.GENO_FIELDS_PROCESS_OUTPUT_LOCATION + "_" + outputIndex + "_forProcess_" + processIndex); csv.write(Constants.GENO_FIELDS_PROCESS_OUTPUT_FILE_HASH + "_" + outputIndex + "_forProcess_" + processIndex); csv.write(Constants.GENO_FIELDS_PROCESS_OUTPUT_FILE_TYPE + "_" + outputIndex + "_forProcess_" + processIndex); csv.write(Constants.GENO_FIELDS_PROCESS_OUTPUT_KEPT + "_" + outputIndex + "_forProcess_" + processIndex); outputIndex++; } /* long maxInputsForThisProcess = 0L; if(maxInputList!=null && maxInputList.get(new Long(processIndex))!=null){ maxInputsForThisProcess = maxInputList.get(new Long(processIndex)); } for(long inputIndex=1 ; inputIndex<maxInputsForThisProcess ; inputIndex++){ //csv.write(Constants.GENO_FIELDS_PROCESS_INPUT_SERVER); csv.write(Constants.GENO_FIELDS_PROCESS_INPUT_SERVER + (processIndex>1?("_"+processIndex):"") + "_" + inputIndex ); csv.write(Constants.GENO_FIELDS_PROCESS_INPUT_LOCATION + (processIndex>1?("_"+processIndex):"") + "_" + inputIndex ); csv.write(Constants.GENO_FIELDS_PROCESS_INPUT_FILE_HASH + (processIndex>1?("_"+processIndex):"") + "_" + inputIndex ); csv.write(Constants.GENO_FIELDS_PROCESS_INPUT_FILE_TYPE + (processIndex>1?("_"+processIndex):"") + "_" + inputIndex ); csv.write(Constants.GENO_FIELDS_PROCESS_INPUT_KEPT + (processIndex>1?("_"+processIndex):"") + "_" + inputIndex ); }*/ /* //input //for each of the inputs..........!!!! EACH - there COULD be more than one csv.write(Constants.GENO_FIELDS_PROCESS_INPUT_SERVER + "_" + processIndex); csv.write(Constants.GENO_FIELDS_PROCESS_INPUT_LOCATION + "_" + processIndex); csv.write(Constants.GENO_FIELDS_PROCESS_INPUT_FILE_HASH + "_" + processIndex); csv.write(Constants.GENO_FIELDS_PROCESS_INPUT_FILE_TYPE + "_" + processIndex); csv.write(Constants.GENO_FIELDS_PROCESS_INPUT_KEPT + "_" + processIndex); //output //for each of the outputs..........!!!! EACH - there COULD be more than one csv.write(Constants.GENO_FIELDS_PROCESS_OUTPUT_SERVER + "_forProcess_" + processIndex); csv.write(Constants.GENO_FIELDS_PROCESS_OUTPUT_LOCATION + "_forProcess_" + processIndex); csv.write(Constants.GENO_FIELDS_PROCESS_OUTPUT_FILE_HASH + "_forProcess_" + processIndex); csv.write(Constants.GENO_FIELDS_PROCESS_OUTPUT_FILE_TYPE + "_forProcess_" + processIndex); csv.write(Constants.GENO_FIELDS_PROCESS_OUTPUT_KEPT + "_forProcess_" + processIndex); */ } csv.endLine(); //now for the actual data for (LinkedExtractionVO evo : genoData) { //csv.write(evo.getSubjectUid());//ExtractionVO evo = genoData.get(phenoCollectionId); if (evo != null) { csv.write(evo.getSubjectUid()); //csv.write(evo.getRecordDate()); LinkedHashMap<String, String> keyValues = evo.getKeyValues(); //TODO: Something should be done to check that values always match order of columns /*order is based on the code that constructs this in study dao Pipeline pl = lssp.getPipeline(); map.put(Constants.GENO_FIELDS_PIPELINE_ID, pl.getId().toString()); map.put(Constants.GENO_FIELDS_PIPELINE_NAME, pl.getName()); map.put(Constants.GENO_FIELDS_PIPELINE_DECSRIPTION, pl.getDescription()); int index = 0; for(Process p : pl.getPipelineProcesses()){ index++; //TODO : obvbiously need to pre=append the pipeline info/count too map.put((Constants.GENO_FIELDS_PROCESS_ID + (index>1?("_"+index):"")), p.getId().toString()); map.put((Constants.GENO_FIELDS_PROCESS_NAME + (index>1?("_"+index):"")), p.getName()); map.put((Constants.GENO_FIELDS_PROCESS_DESCRIPTION + (index>1?("_"+index):"")), p.getDescription()); map.put((Constants.GENO_FIELDS_PROCESS_START_TIME + (index>1?("_"+index):"")), p.getStartTime()!=null?p.getStartTime().toString():""); map.put((Constants.GENO_FIELDS_PROCESS_END_TIME + (index>1?("_"+index):"")), p.getEndTime()!=null?p.getEndTime().toString():""); Command command = p.getCommand(); map.put((Constants.GENO_FIELDS_PROCESS_COMMAND_NAME + (index>1?("_"+index):"")), (command==null?"":command.getName())); map.put((Constants.GENO_FIELDS_PROCESS_COMMAND_LOCATION + (index>1?("_"+index):"")), (command==null?"":command.getLocation())); map.put((Constants.GENO_FIELDS_PROCESS_COMMAND_SERVER_URL + (index>1?("_"+index):"")), (command==null?"":command.getServerUrl())); */ for (String key : keyValues.keySet()) { log.info(key + keyValues.get(key)); } csv.write(keyValues.get(Constants.GENO_FIELDS_PIPELINE_ID)); csv.write(keyValues.get(Constants.GENO_FIELDS_PIPELINE_NAME)); csv.write(keyValues.get(Constants.GENO_FIELDS_PIPELINE_DECSRIPTION)); for (int processIndex = 1; processIndex <= maxProcessesPerPipeline; processIndex++) { //one-based humanized if (processIndex == 1) { //process csv.write((keyValues.get(Constants.GENO_FIELDS_PROCESS_ID) == null) ? "" : keyValues.get(Constants.GENO_FIELDS_PROCESS_ID)); csv.write((keyValues.get(Constants.GENO_FIELDS_PROCESS_NAME) == null) ? "" : keyValues.get(Constants.GENO_FIELDS_PROCESS_NAME)); csv.write((keyValues.get(Constants.GENO_FIELDS_PROCESS_DESCRIPTION) == null) ? "" : keyValues.get(Constants.GENO_FIELDS_PROCESS_DESCRIPTION)); csv.write((keyValues.get(Constants.GENO_FIELDS_PROCESS_START_TIME) == null) ? "" : keyValues.get(Constants.GENO_FIELDS_PROCESS_START_TIME)); csv.write((keyValues.get(Constants.GENO_FIELDS_PROCESS_END_TIME) == null) ? "" : keyValues.get(Constants.GENO_FIELDS_PROCESS_END_TIME)); // == null)?"": //commmand csv.write((keyValues.get(Constants.GENO_FIELDS_PROCESS_COMMAND_SERVER_URL) == null) ? "" : keyValues.get(Constants.GENO_FIELDS_PROCESS_COMMAND_SERVER_URL)); csv.write((keyValues.get(Constants.GENO_FIELDS_PROCESS_COMMAND_NAME) == null) ? "" : keyValues.get(Constants.GENO_FIELDS_PROCESS_COMMAND_NAME)); csv.write((keyValues.get(Constants.GENO_FIELDS_PROCESS_COMMAND_LOCATION) == null) ? "" : keyValues.get(Constants.GENO_FIELDS_PROCESS_COMMAND_LOCATION)); // csv.write(Constants.GENO_FIELDS_PROCESS_COMMAND_INPUT_FILE_FORMAT)// csv.write(Constants.GENO_FIELDS_PROCESS_COMMAND_OUTPUT_FILE_FORMAT); long maxInputsForThisProcess = 0L; if (maxInputList != null && maxInputList.get(new Long(processIndex)) != null) { maxInputsForThisProcess = maxInputList.get(new Long(processIndex)); } long inputIndex = 1L; while (inputIndex <= maxInputsForThisProcess) { //input //for each of the inputs..........!!!! EACH - there COULD be more than one csv.write((keyValues.get(Constants.GENO_FIELDS_PROCESS_INPUT_SERVER + "_" + processIndex + "_" + inputIndex) == null) ? "" : keyValues.get(Constants.GENO_FIELDS_PROCESS_INPUT_SERVER + "_" + processIndex + "_" + inputIndex)); csv.write((keyValues.get(Constants.GENO_FIELDS_PROCESS_INPUT_LOCATION + "_" + processIndex + "_" + inputIndex) == null) ? "" : keyValues.get(Constants.GENO_FIELDS_PROCESS_INPUT_LOCATION + "_" + processIndex + "_" + inputIndex)); csv.write((keyValues.get(Constants.GENO_FIELDS_PROCESS_INPUT_FILE_HASH + "_" + processIndex + "_" + inputIndex) == null) ? "" : keyValues.get(Constants.GENO_FIELDS_PROCESS_INPUT_FILE_HASH + "_" + processIndex + "_" + inputIndex)); csv.write((keyValues.get(Constants.GENO_FIELDS_PROCESS_INPUT_FILE_TYPE + "_" + processIndex + "_" + inputIndex) == null) ? "" : keyValues.get(Constants.GENO_FIELDS_PROCESS_INPUT_FILE_TYPE + "_" + processIndex + "_" + inputIndex)); csv.write((keyValues.get(Constants.GENO_FIELDS_PROCESS_INPUT_KEPT + "_" + processIndex + "_" + inputIndex) == null) ? "" : keyValues.get(Constants.GENO_FIELDS_PROCESS_INPUT_KEPT + "_" + processIndex + "_" + inputIndex)); inputIndex++; } long maxOutputsForThisProcess = 0L; if (maxOutputList != null && maxOutputList.get(new Long(processIndex)) != null) { maxOutputsForThisProcess = maxOutputList.get(new Long(processIndex)); } for (long outputIndex = 1L; outputIndex <= maxOutputsForThisProcess; outputIndex++) { //output //for each of the outputs..........!!!! EACH - there COULD be more than one csv.write((keyValues.get(Constants.GENO_FIELDS_PROCESS_OUTPUT_SERVER + "_" + processIndex + "_" + outputIndex) == null) ? "" : keyValues.get(Constants.GENO_FIELDS_PROCESS_OUTPUT_SERVER + "_" + processIndex + "_" + outputIndex)); csv.write((keyValues.get(Constants.GENO_FIELDS_PROCESS_OUTPUT_LOCATION + "_" + processIndex + "_" + outputIndex) == null) ? "" : keyValues.get(Constants.GENO_FIELDS_PROCESS_OUTPUT_LOCATION + "_" + processIndex + "_" + outputIndex)); csv.write((keyValues.get(Constants.GENO_FIELDS_PROCESS_OUTPUT_FILE_HASH + "_" + processIndex + "_" + outputIndex) == null) ? "" : keyValues.get(Constants.GENO_FIELDS_PROCESS_OUTPUT_FILE_HASH + "_" + processIndex + "_" + outputIndex)); csv.write((keyValues.get(Constants.GENO_FIELDS_PROCESS_OUTPUT_FILE_TYPE + "_" + processIndex + "_" + outputIndex) == null) ? "" : keyValues.get(Constants.GENO_FIELDS_PROCESS_OUTPUT_FILE_TYPE + "_" + processIndex + "_" + outputIndex)); csv.write((keyValues.get(Constants.GENO_FIELDS_PROCESS_OUTPUT_KEPT + "_" + processIndex + "_" + outputIndex) == null) ? "" : keyValues.get(Constants.GENO_FIELDS_PROCESS_OUTPUT_KEPT + "_" + processIndex + "_" + outputIndex)); } log.info("\n\nprocess=" + processIndex + " \n max in=" + maxInputsForThisProcess + " \n max outs=" + maxOutputsForThisProcess + "\nequation=" + (inputIndex < maxInputsForThisProcess)); /* //output //for each of the outputs..........!!!! EACH - there COULD be more than one csv.write((keyValues.get(Constants.GENO_FIELDS_PROCESS_OUTPUT_SERVER)== null)?"":keyValues.get(Constants.GENO_FIELDS_PROCESS_OUTPUT_SERVER)); csv.write((keyValues.get(Constants.GENO_FIELDS_PROCESS_OUTPUT_LOCATION)== null)?"":keyValues.get(Constants.GENO_FIELDS_PROCESS_OUTPUT_LOCATION)); csv.write((keyValues.get(Constants.GENO_FIELDS_PROCESS_OUTPUT_FILE_HASH)== null)?"":keyValues.get(Constants.GENO_FIELDS_PROCESS_OUTPUT_FILE_HASH)); csv.write((keyValues.get(Constants.GENO_FIELDS_PROCESS_OUTPUT_FILE_TYPE)== null)?"":keyValues.get(Constants.GENO_FIELDS_PROCESS_OUTPUT_FILE_TYPE)); csv.write((keyValues.get(Constants.GENO_FIELDS_PROCESS_OUTPUT_KEPT)== null)?"":keyValues.get(Constants.GENO_FIELDS_PROCESS_OUTPUT_KEPT));*/ } else { //process csv.write((keyValues.get(Constants.GENO_FIELDS_PROCESS_ID + "_" + processIndex) == null) ? "" : keyValues.get(Constants.GENO_FIELDS_PROCESS_ID + "_" + processIndex)); csv.write( (keyValues.get(Constants.GENO_FIELDS_PROCESS_NAME + "_" + processIndex) == null) ? "" : keyValues .get(Constants.GENO_FIELDS_PROCESS_NAME + "_" + processIndex)); csv.write((keyValues .get(Constants.GENO_FIELDS_PROCESS_DESCRIPTION + "_" + processIndex) == null) ? "" : keyValues.get(Constants.GENO_FIELDS_PROCESS_DESCRIPTION + "_" + processIndex)); csv.write((keyValues .get(Constants.GENO_FIELDS_PROCESS_START_TIME + "_" + processIndex) == null) ? "" : keyValues.get( Constants.GENO_FIELDS_PROCESS_START_TIME + "_" + processIndex)); csv.write((keyValues .get(Constants.GENO_FIELDS_PROCESS_END_TIME + "_" + processIndex) == null) ? "" : keyValues.get( Constants.GENO_FIELDS_PROCESS_END_TIME + "_" + processIndex)); //commmand csv.write((keyValues.get( Constants.GENO_FIELDS_PROCESS_COMMAND_SERVER_URL + "_" + processIndex) == null) ? "" : keyValues.get(Constants.GENO_FIELDS_PROCESS_COMMAND_SERVER_URL + "_" + processIndex)); csv.write((keyValues .get(Constants.GENO_FIELDS_PROCESS_COMMAND_NAME + "_" + processIndex) == null) ? "" : keyValues.get(Constants.GENO_FIELDS_PROCESS_COMMAND_NAME + "_" + processIndex)); csv.write((keyValues.get( Constants.GENO_FIELDS_PROCESS_COMMAND_LOCATION + "_" + processIndex) == null) ? "" : keyValues.get(Constants.GENO_FIELDS_PROCESS_COMMAND_LOCATION + "_" + processIndex)); // csv.write((Constants.GENO_FIELDS_PROCESS_COMMAND_INPUT_FILE_FORMAT)// csv.write(Constants.GENO_FIELDS_PROCESS_COMMAND_OUTPUT_FILE_FORMAT)); long maxInputsForThisProcess = 0L; if (maxInputList != null && maxInputList.get(new Long(processIndex)) != null) { maxInputsForThisProcess = maxInputList.get(new Long(processIndex)); } long inputIndex = 1L; while (inputIndex <= maxInputsForThisProcess) { log.info("pr=" + processIndex + " in=" + inputIndex); csv.write((keyValues.get(Constants.GENO_FIELDS_PROCESS_INPUT_SERVER + "_" + processIndex + "_" + inputIndex) == null) ? "" : keyValues.get(Constants.GENO_FIELDS_PROCESS_INPUT_SERVER + "_" + processIndex + "_" + inputIndex)); csv.write((keyValues.get(Constants.GENO_FIELDS_PROCESS_INPUT_LOCATION + "_" + processIndex + "_" + inputIndex) == null) ? "" : keyValues.get(Constants.GENO_FIELDS_PROCESS_INPUT_LOCATION + "_" + processIndex + "_" + inputIndex)); csv.write((keyValues.get(Constants.GENO_FIELDS_PROCESS_INPUT_FILE_HASH + "_" + processIndex + "_" + inputIndex) == null) ? "" : keyValues.get(Constants.GENO_FIELDS_PROCESS_INPUT_FILE_HASH + "_" + processIndex + "_" + inputIndex)); csv.write((keyValues.get(Constants.GENO_FIELDS_PROCESS_INPUT_FILE_TYPE + "_" + processIndex + "_" + inputIndex) == null) ? "" : keyValues.get(Constants.GENO_FIELDS_PROCESS_INPUT_FILE_TYPE + "_" + processIndex + "_" + inputIndex)); csv.write((keyValues.get(Constants.GENO_FIELDS_PROCESS_INPUT_KEPT + "_" + processIndex + "_" + inputIndex) == null) ? "" : keyValues.get(Constants.GENO_FIELDS_PROCESS_INPUT_KEPT + "_" + processIndex + "_" + inputIndex)); inputIndex++; } long maxOutputsForThisProcess = 0L; if (maxOutputList != null && maxOutputList.get(new Long(processIndex)) != null) { maxOutputsForThisProcess = maxOutputList.get(new Long(processIndex)); } log.info("\n\nprocess=" + processIndex + " \n max in=" + maxInputsForThisProcess + " \n max outs=" + maxOutputsForThisProcess + "\nequation=" + (inputIndex < maxInputsForThisProcess)); for (long outputIndex = 1; outputIndex <= maxOutputsForThisProcess; outputIndex++) { //output //for each of the outputs..........!!!! EACH - there COULD be more than one csv.write((keyValues.get(Constants.GENO_FIELDS_PROCESS_OUTPUT_SERVER + "_" + processIndex + "_" + outputIndex) == null) ? "" : keyValues.get(Constants.GENO_FIELDS_PROCESS_OUTPUT_SERVER + "_" + processIndex + "_" + outputIndex)); csv.write((keyValues.get(Constants.GENO_FIELDS_PROCESS_OUTPUT_LOCATION + "_" + processIndex + "_" + outputIndex) == null) ? "" : keyValues.get(Constants.GENO_FIELDS_PROCESS_OUTPUT_LOCATION + "_" + processIndex + "_" + outputIndex)); csv.write((keyValues.get(Constants.GENO_FIELDS_PROCESS_OUTPUT_FILE_HASH + "_" + processIndex + "_" + outputIndex) == null) ? "" : keyValues.get(Constants.GENO_FIELDS_PROCESS_OUTPUT_FILE_HASH + "_" + processIndex + "_" + outputIndex)); csv.write((keyValues.get(Constants.GENO_FIELDS_PROCESS_OUTPUT_FILE_TYPE + "_" + processIndex + "_" + outputIndex) == null) ? "" : keyValues.get(Constants.GENO_FIELDS_PROCESS_OUTPUT_FILE_TYPE + "_" + processIndex + "_" + outputIndex)); csv.write((keyValues.get(Constants.GENO_FIELDS_PROCESS_OUTPUT_KEPT + "_" + processIndex + "_" + outputIndex) == null) ? "" : keyValues.get(Constants.GENO_FIELDS_PROCESS_OUTPUT_KEPT + "_" + processIndex + "_" + outputIndex)); } /*output //for each of the outputs..........!!!! EACH - there COULD be more than one csv.write((keyValues.get(Constants.GENO_FIELDS_PROCESS_OUTPUT_SERVER + "_" + processIndex)== null)?"":keyValues.get(Constants.GENO_FIELDS_PROCESS_OUTPUT_SERVER + "_" + processIndex)); csv.write((keyValues.get(Constants.GENO_FIELDS_PROCESS_OUTPUT_LOCATION + "_" + processIndex)== null)?"":keyValues.get(Constants.GENO_FIELDS_PROCESS_OUTPUT_LOCATION + "_" + processIndex)); csv.write((keyValues.get(Constants.GENO_FIELDS_PROCESS_OUTPUT_FILE_HASH + "_" + processIndex)== null)?"":keyValues.get(Constants.GENO_FIELDS_PROCESS_OUTPUT_FILE_HASH + "_" + processIndex)); csv.write((keyValues.get(Constants.GENO_FIELDS_PROCESS_OUTPUT_FILE_TYPE + "_" + processIndex)== null)?"":keyValues.get(Constants.GENO_FIELDS_PROCESS_OUTPUT_FILE_TYPE + "_" + processIndex)); csv.write((keyValues.get(Constants.GENO_FIELDS_PROCESS_OUTPUT_KEPT + "_" + processIndex)== null)?"":keyValues.get(Constants.GENO_FIELDS_PROCESS_OUTPUT_KEPT + "_" + processIndex)); */ } } //TODO: replace this with hardcoded use of the field names or use the function if // available to getColumnOf(GENO_FIELD_PIPELINE_ID) and put it in there at appropriate line /* * TODO ASAP : Change this to iterating in order so naming remains the same * * THIS CODE WORKS BUT WE MAY WANT TO MAKE SURE THAT WE HAVE THE CORRECT ORDER * for (String key : keyValues.keySet()) { String valueResult = keyValues.get(key); if (valueResult != null) { if(key.startsWith(Constants.GENO_FIELDS_PROCESS_END_TIME) || key.startsWith(Constants.GENO_FIELDS_PROCESS_START_TIME)){ try { DateFormat dateFormat = new SimpleDateFormat(au.org.theark.core.Constants.DD_MM_YYYY); String[] dateFormats = { au.org.theark.core.Constants.DD_MM_YYYY, au.org.theark.core.Constants.yyyy_MM_dd_hh_mm_ss_S }; Date date = DateUtils.parseDate(valueResult, dateFormats); csv.write(dateFormat.format(date)); } catch (ParseException e) { csv.write(valueResult); } } else{ csv.write(valueResult); } } else { csv.write(""); } } */ } else { //not sure if we need this // // Write out a line with no values (no data existed for subject in question // for (CustomFieldDisplay cfd : cfds) { // csv.write(""); // } } csv.endLine(); } csv.close(); } catch (FileNotFoundException e) { log.error(e.getMessage()); } return file; }
From source file:org.jumpmind.db.platform.AbstractDdlBuilder.java
/** * Processes the changes to the structure of tables. * * @param currentModel/* www . ja va 2s.co m*/ * The current database schema * @param desiredModel * The desired database schema * @param changes * The change objects */ protected void processTableStructureChanges(Database currentModel, Database desiredModel, Collection<TableChange> changes, StringBuilder ddl) { filterChanges(changes); LinkedHashMap<String, List<TableChange>> changesPerTable = new LinkedHashMap<String, List<TableChange>>(); LinkedHashMap<String, List<ForeignKey>> unchangedFKs = new LinkedHashMap<String, List<ForeignKey>>(); boolean caseSensitive = delimitedIdentifierModeOn; // we first sort the changes for the tables // however since the changes might contain source or target tables // we use the names rather than the table objects for (Iterator<TableChange> changeIt = changes.iterator(); changeIt.hasNext();) { TableChange change = changeIt.next(); String name = change.getChangedTable().getName(); if (!caseSensitive) { name = name.toUpperCase(); } List<TableChange> changesForTable = (List<TableChange>) changesPerTable.get(name); if (changesForTable == null) { changesForTable = new ArrayList<TableChange>(); changesPerTable.put(name, changesForTable); unchangedFKs.put(name, getUnchangedForeignKeys(currentModel, desiredModel, name)); } changesForTable.add(change); } // we also need to drop the foreign keys of the unchanged tables // referencing the changed tables addRelevantFKsFromUnchangedTables(currentModel, desiredModel, changesPerTable.keySet(), unchangedFKs); // we're dropping the unchanged foreign keys for (Iterator<Map.Entry<String, List<ForeignKey>>> tableFKIt = unchangedFKs.entrySet().iterator(); tableFKIt .hasNext();) { Map.Entry<String, List<ForeignKey>> entry = tableFKIt.next(); Table targetTable = desiredModel.findTable((String) entry.getKey(), caseSensitive); for (Iterator<ForeignKey> fkIt = entry.getValue().iterator(); fkIt.hasNext();) { writeExternalForeignKeyDropStmt(targetTable, fkIt.next(), ddl); } } // We're using a copy of the current model so that the table structure // changes can modify it Database copyOfCurrentModel = copy(currentModel); for (Iterator<Map.Entry<String, List<TableChange>>> tableChangeIt = changesPerTable.entrySet() .iterator(); tableChangeIt.hasNext();) { Map.Entry<String, List<TableChange>> entry = tableChangeIt.next(); processTableStructureChanges(copyOfCurrentModel, desiredModel, entry.getKey(), entry.getValue(), ddl); } // and finally we're re-creating the unchanged foreign keys for (Iterator<Map.Entry<String, List<ForeignKey>>> tableFKIt = unchangedFKs.entrySet().iterator(); tableFKIt .hasNext();) { Map.Entry<String, List<ForeignKey>> entry = tableFKIt.next(); Table targetTable = desiredModel.findTable((String) entry.getKey(), caseSensitive); for (Iterator<ForeignKey> fkIt = entry.getValue().iterator(); fkIt.hasNext();) { writeExternalForeignKeyCreateStmt(desiredModel, targetTable, fkIt.next(), ddl); } } }
From source file:org.opentaps.amazon.product.AmazonProductServices.java
/** * Service looks over AmazonProductImage and collect product images that haven't been posted yet, * creates XML document for ProductImage Feed and post it to Amazon.com. * @param dctx a <code>DispatchContext</code> value * @param context the service context <code>Map</code> * @return the service response <code>Map</code> *//*from w w w. j a v a 2 s . c o m*/ public static Map<String, Object> publishProductImagesToAmazon(DispatchContext dctx, Map<String, Object> context) { Delegator delegator = dctx.getDelegator(); LocalDispatcher dispatcher = dctx.getDispatcher(); Locale locale = (Locale) context.get("locale"); GenericValue userLogin = (GenericValue) context.get("userLogin"); String prodId = (String) context.get("productId"); try { List<EntityCondition> conditions = UtilMisc.<EntityCondition>toList(EntityCondition.makeCondition( "statusId", EntityOperator.IN, Arrays.asList(AmazonConstants.statusProductCreated, AmazonConstants.statusProductChanged, AmazonConstants.statusProductError))); if (UtilValidate.isNotEmpty(prodId)) { conditions.add(EntityCondition.makeCondition("productId", EntityOperator.EQUALS, prodId)); } TransactionUtil.begin(); EntityListIterator amazonImageIt = delegator.findListIteratorByCondition("AmazonProductImage", EntityCondition.makeCondition(conditions, EntityOperator.AND), null, Arrays.asList("productId")); TransactionUtil.commit(); long messageId = 1; long processedProductImages = 0; Map<GenericValue, String> invalidAmazonImages = new HashMap<GenericValue, String>(); List<GenericValue> validAmazonImages = new ArrayList<GenericValue>(); List<GenericValue> amazonProductImageAcks = new ArrayList<GenericValue>(); Document imageFeed = AmazonConstants.soapClient .createDocumentHeader(AmazonConstants.messageTypeProductImage); Element root = imageFeed.getDocumentElement(); GenericValue amazonProductImage = null; while ((amazonProductImage = amazonImageIt.next()) != null) { String errMessage = null; if (AmazonConstants.productPostRetryThreshold <= amazonProductImage.getLong("postFailures") .intValue()) { String errorLog = UtilProperties.getMessage(AmazonConstants.errorResource, "AmazonError_PostImageAttemptsOverThreshold", UtilMisc.<String, Object>toMap("productId", amazonProductImage.getString("productId"), "threshold", AmazonConstants.productPostRetryThreshold), locale); Debug.logInfo(errorLog, MODULE); continue; } // check if this product was exported and acknowledged earlier if (delegator.findCountByAnd("AmazonProduct", UtilMisc.toMap("productId", amazonProductImage.getString("productId"), "statusId", AmazonConstants.statusProductPosted, "ackStatusId", AmazonConstants.statusProductAckRecv)) != 1) { String errorLog = UtilProperties.getMessage(AmazonConstants.errorResource, "AmazonError_PostImageNonExistentProduct", UtilMisc.toMap("productId", amazonProductImage.getString("productId")), locale); Debug.logError(errorLog, MODULE); continue; } // Ignore products marked deleted if (AmazonUtil.isAmazonProductDeleted(delegator, amazonProductImage.getString("productId"))) { String errorLog = UtilProperties.getMessage(AmazonConstants.errorResource, "AmazonError_IgnoringProductImage_ProductDeleted", UtilMisc.toMap("productId", amazonProductImage.getString("productId")), locale); Debug.logError(errorLog, MODULE); continue; } String upc = null; if (AmazonConstants.requireUpcCodes || AmazonConstants.useUPCAsSKU) { // Establish and validate the UPC upc = getProductUPC(delegator, amazonProductImage.getString("productId"), locale); if (UtilValidate.isEmpty(upc) && AmazonConstants.requireUpcCodes) { errMessage = AmazonUtil.compoundError(errMessage, UtilProperties.getMessage( AmazonConstants.errorResource, "AmazonError_MissingCodeUPC", UtilMisc.toMap("productId", amazonProductImage.getString("productId")), locale)); } else if (UtilValidate.isNotEmpty(upc) && !UtilProduct.isValidUPC(upc)) { errMessage = AmazonUtil.compoundError(errMessage, UtilProperties.getMessage( AmazonConstants.errorResource, "AmazonError_InvalidCodeUPC", UtilMisc.toMap("productId", amazonProductImage.getString("productId")), locale)); } } // Establish and validate the SKU String sku = getProductSKU(delegator, amazonProductImage, upc); if (UtilValidate.isEmpty(sku) && !AmazonConstants.useUPCAsSKU) { errMessage = AmazonUtil.compoundError(errMessage, UtilProperties.getMessage(AmazonConstants.errorResource, "AmazonError_NoRequiredParameter", UtilMisc.toMap("parameterName", "SKU", "productName", amazonProductImage.getString("productId")), locale)); } ProductContentWrapper contentWrapper = UtilProduct.getProductContentWrapper(delegator, dispatcher, amazonProductImage.getString("productId"), locale); // Products must have a Main image if (UtilValidate.isEmpty(AmazonConstants.imageTypes.get("Main")) || UtilValidate.isEmpty(contentWrapper.get(AmazonConstants.imageTypes.get("Main")))) { errMessage = AmazonUtil.compoundError(errMessage, UtilProperties.getMessage(AmazonConstants.errorResource, "AmazonError_MissingProductImage", UtilMisc.toMap("productId", amazonProductImage.getString("productId"), "productContentTypeId", AmazonConstants.imageTypes.get("Main")), locale)); } LinkedHashMap<String, URI> imageTypeUrls = new LinkedHashMap<String, URI>(); for (String imageType : AmazonConstants.imageTypes.keySet()) { String fieldName = AmazonConstants.imageTypes.get(imageType); StringUtil.StringWrapper imageUrlWrapper = contentWrapper.get(fieldName); if (imageUrlWrapper == null) { Debug.logInfo( "No image url found for product [" + prodId + "] and field [" + fieldName + "]", MODULE); continue; } String imageUrlString = imageUrlWrapper.toString(); if (UtilValidate.isEmpty(imageUrlString)) { imageTypeUrls.put(imageType, null); continue; } try { int slashPos = imageUrlString.lastIndexOf("/"); if (slashPos != -1 && slashPos + 1 != imageUrlString.length()) { String filePath = imageUrlString.substring(0, slashPos + 1); String fileName = imageUrlString.substring(slashPos + 1, imageUrlString.length()); imageUrlString = filePath + URLEncoder.encode(fileName, "UTF-8"); } imageTypeUrls.put(imageType, new URI(AmazonConstants.imageUrlPrefix + imageUrlString)); } catch (URISyntaxException e) { errMessage = AmazonUtil.compoundError(errMessage, UtilProperties.getMessage(AmazonConstants.errorResource, "AmazonError_InvalidProductImageUri", UtilMisc.toMap("productId", amazonProductImage.getString("productId"), "fieldName", fieldName), locale)); break; } catch (UnsupportedEncodingException e) { errMessage = AmazonUtil.compoundError(errMessage, UtilProperties.getMessage(AmazonConstants.errorResource, "AmazonError_InvalidProductImageUri", UtilMisc.toMap("productId", amazonProductImage.getString("productId"), "fieldName", fieldName), locale)); break; } } if (UtilValidate.isNotEmpty(errMessage)) { invalidAmazonImages.put(amazonProductImage, errMessage); continue; } for (String imageType : imageTypeUrls.keySet()) { URI imageUri = imageTypeUrls.get(imageType); String operationType = UtilValidate.isEmpty(imageUri) ? "Delete" : "Update"; Element message = imageFeed.createElement("Message"); root.appendChild(message); UtilXml.addChildElementValue(message, "MessageID", "" + messageId, imageFeed); UtilXml.addChildElementValue(message, "OperationType", operationType, imageFeed); Element productImage = imageFeed.createElement("ProductImage"); message.appendChild(productImage); UtilXml.addChildElementValue(productImage, "SKU", sku, imageFeed); UtilXml.addChildElementValue(productImage, "ImageType", imageType, imageFeed); if (UtilValidate.isNotEmpty(imageUri)) { UtilXml.addChildElementValue(productImage, "ImageLocation", imageUri.toString(), imageFeed); } amazonProductImageAcks.add(delegator.makeValue("AmazonProductImageAck", UtilMisc.toMap("productId", amazonProductImage.getString("productId"), "productContentTypeId", AmazonConstants.imageTypes.get(imageType), "acknowledgeMessageId", "" + messageId, "ackStatusId", AmazonConstants.statusProductNotAcked, "acknowledgeTimestamp", UtilDateTime.nowTimestamp()))); messageId++; } validAmazonImages.add(amazonProductImage); processedProductImages++; if (processedProductImages % 500 == 0) { Debug.logInfo(UtilProperties.getMessage(AmazonConstants.errorResource, "AmazonError_Processed_Records_Image", UtilMisc.toMap("count", processedProductImages), locale), MODULE); } } amazonImageIt.close(); LinkedHashMap<GenericValue, String> emailErrorMessages = new LinkedHashMap<GenericValue, String>(); if (UtilValidate.isEmpty(validAmazonImages)) { String infoMessage = UtilProperties.getMessage(AmazonConstants.errorResource, "AmazonError_PostNoNewImages", locale); Debug.logInfo(infoMessage, MODULE); } else { boolean success = true; String postErrorMessage = null; long processingDocumentId = -1; try { String xml = UtilXml.writeXmlDocument(imageFeed); Debug.logVerbose(xml, MODULE); Writer writer = new OutputStreamWriter(new FileOutputStream(AmazonConstants.xmlOutputLocation + "AmazonImageFeed_" + AmazonConstants.xmlOutputDateFormat.format(new Date()) + ".xml"), "UTF-8"); writer.write(xml); writer.close(); processingDocumentId = AmazonConstants.soapClient.postProductImages(xml); Debug.logInfo(UtilProperties.getMessage(AmazonConstants.errorResource, "AmazonError_ProcessingDocumentId_Image", UtilMisc.toMap("processingDocumentId", processingDocumentId), locale), MODULE); } catch (RemoteException e) { success = false; postErrorMessage = e.getMessage(); List<String> productIds = EntityUtil.getFieldListFromEntityList(validAmazonImages, "productId", true); String errorLog = UtilProperties.getMessage(AmazonConstants.errorResource, "AmazonError_PostImageError", UtilMisc.toMap("productIds", productIds, "errorMessage", postErrorMessage), locale); Debug.logError(errorLog, MODULE); } // Store operational data of the post attempt for (GenericValue validAmazonImage : validAmazonImages) { validAmazonImage.set("statusId", success ? AmazonConstants.statusProductPosted : AmazonConstants.statusProductError); validAmazonImage.set("postTimestamp", UtilDateTime.nowTimestamp()); validAmazonImage.set("postErrorMessage", success ? null : postErrorMessage); if (!success) { validAmazonImage.set("postFailures", validAmazonImage.getLong("postFailures") + 1); } validAmazonImage.set("processingDocumentId", success ? processingDocumentId : null); validAmazonImage.store(); if (AmazonConstants.sendErrorEmails && !success) { emailErrorMessages.put(validAmazonImage, postErrorMessage); } // Remove the old AmazonProductImageAcks from the database and store the new ones delegator.removeRelated("AmazonProductImageAck", validAmazonImage); delegator.storeAll( EntityUtil.filterByCondition(amazonProductImageAcks, EntityCondition.makeCondition( "productId", EntityOperator.EQUALS, validAmazonImage.getString("productId")))); } } for (GenericValue invalidAmazonImage : invalidAmazonImages.keySet()) { String errorMessage = invalidAmazonImages.get(invalidAmazonImage); invalidAmazonImage.set("statusId", AmazonConstants.statusProductError); invalidAmazonImage.set("postTimestamp", UtilDateTime.nowTimestamp()); invalidAmazonImage.set("postErrorMessage", errorMessage); invalidAmazonImage.set("postFailures", invalidAmazonImage.getLong("postFailures") + 1); invalidAmazonImage.set("processingDocumentId", null); invalidAmazonImage.store(); delegator.removeRelated("AmazonProductImageAck", invalidAmazonImage); if (AmazonConstants.sendErrorEmails) { emailErrorMessages.put(invalidAmazonImage, errorMessage); } } if (AmazonConstants.sendErrorEmails && UtilValidate.isNotEmpty(emailErrorMessages)) { AmazonUtil.sendBulkErrorEmail(dispatcher, userLogin, emailErrorMessages, UtilProperties.getMessage(AmazonConstants.errorResource, "AmazonError_ErrorEmailSubject_PostImage", AmazonConstants.errorEmailLocale), AmazonConstants.errorEmailScreenUriProducts); } } catch (GenericEntityException gee) { UtilMessage.createAndLogServiceError(gee, locale, MODULE); } catch (IOException ioe) { UtilMessage.createAndLogServiceError(ioe, locale, MODULE); } catch (GenericServiceException gse) { UtilMessage.createAndLogServiceError(gse, locale, MODULE); } return ServiceUtil.returnSuccess(); }