List of usage examples for java.util HashMap values
public Collection<V> values()
From source file:de.static_interface.sinksql.SqlDatabase.java
@SuppressWarnings("deprecation") @Override//w w w. ja v a 2s . co m public <T extends Row> void createTable(AbstractTable<T> abstractTable) { char bt = getBacktick(); String sql = "CREATE TABLE IF NOT EXISTS " + bt + abstractTable.getName() + bt + " ("; List<String> primaryKeys = new ArrayList<>(); List<String> uniqueKeys = new ArrayList<>(); List<Field> foreignKeys = new ArrayList<>(); List<Field> indexes = new ArrayList<>(); HashMap<Integer, List<String>> combinedUniqueKeys = new HashMap<>(); Class foreignOptionsTable = null; if (abstractTable instanceof OptionsTable) { foreignOptionsTable = ((OptionsTable) abstractTable).getForeignTable(); } for (Field f : abstractTable.getRowClass() . getFields() ) { Column column = FieldCache.getAnnotation(f, Column.class); if (column == null) { continue; } String name = StringUtil.isEmptyOrNull(column.name()) ? f.getName() : column.name(); sql += bt + name + bt + " " + toDatabaseType(f); if (column.zerofill()) { if (!ReflectionUtil.isNumber(f.getType())) { throw new InvalidSqlColumnException(abstractTable, f, name, "column was annotated as ZEROFILL but wrapper type is not a number"); } sql += " ZEROFILL"; } if (column.unsigned()) { if (!ReflectionUtil.isNumber(f.getType())) { throw new InvalidSqlColumnException(abstractTable, f, name, "column was annotated as UNSIGNED but wrapper type is not a number"); } sql += " UNSIGNED"; } if (column.autoIncrement()) { if (!ReflectionUtil.isNumber(f.getType())) { throw new InvalidSqlColumnException(abstractTable, f, name, "column was annotated as AUTO_INCREMENT but wrapper type is not a number"); } sql += " AUTO_INCREMENT"; } if (column.uniqueKey()) { uniqueKeys.add(name); } UniqueKey uniqueKey = FieldCache.getAnnotation(f, UniqueKey.class); if (uniqueKey != null) { if (uniqueKey.combinationId() == Integer.MAX_VALUE) { uniqueKeys.add(name); } else { List<String> keys = combinedUniqueKeys.get(uniqueKey.combinationId()); if (keys == null) { keys = new ArrayList<>(); } keys.add(name); combinedUniqueKeys.put(uniqueKey.combinationId(), keys); } } if (column.primaryKey()) { primaryKeys.add(name); } if (FieldCache.getAnnotation(f, Nullable.class) == null) { sql += " NOT NULL"; } else if (ReflectionUtil.isPrimitiveClass(f.getType())) { // The column is nullable but the wrapper type is a primitive value, which can't be null throw new InvalidSqlColumnException(abstractTable, f, name, "column was annotated as NULLABLE but wrapper type is a primitive type"); } if (!StringUtil.isEmptyOrNull(column.defaultValue())) { sql += " DEFAULT " + column.defaultValue(); } if (!StringUtil.isEmptyOrNull(column.comment())) { sql += " COMMENT '" + column.comment() + "'"; } if (FieldCache.getAnnotation(f, ForeignKey.class) != null) { foreignKeys.add(f); } if (FieldCache.getAnnotation(f, Index.class) != null) { indexes.add(f); } sql += ","; } if (primaryKeys.size() > 0) { String columns = ""; for (String f : primaryKeys) { if (!columns.equals("")) { columns += ", "; } columns += bt + f + bt; } sql += "PRIMARY KEY (" + columns + "),"; } if (uniqueKeys.size() > 0) { for (String s : uniqueKeys) { sql += "UNIQUE (" + bt + s + bt + "),"; } } if (combinedUniqueKeys.size() > 0) { for (List<String> columnsList : combinedUniqueKeys.values()) { String columns = ""; String first = null; for (String f : columnsList) { if (!columns.equals("")) { columns += ", "; } if (first == null) { first = f; } columns += bt + f + bt; } sql += "UNIQUE KEY " + bt + first + "_uk" + bt + " (" + columns + "),"; } } for (Field f : foreignKeys) { Column column = FieldCache.getAnnotation(f, Column.class); String name = StringUtil.isEmptyOrNull(column.name()) ? f.getName() : column.name(); ForeignKey foreignKey = FieldCache.getAnnotation(f, ForeignKey.class); sql = addForeignKey(sql, name, foreignKey.table(), foreignKey.column(), foreignKey.onUpdate(), foreignKey.onDelete()); } if (foreignOptionsTable != null) { String column = ((OptionsTable) abstractTable).getForeignColumn(); CascadeAction onUpdate = ((OptionsTable) abstractTable).getForeignOnUpdateAction(); CascadeAction onDelete = ((OptionsTable) abstractTable).getForeignOnDeleteAction(); sql = addForeignKey(sql, "foreignTarget", foreignOptionsTable, column, onUpdate, onDelete); } for (Field f : indexes) { if (abstractTable.getEngine().equalsIgnoreCase("InnoDB") && foreignKeys.contains(f)) { continue; //InnoDB already creates indexes for foreign keys, so skip these... } Column column = FieldCache.getAnnotation(f, Column.class); String name = StringUtil.isEmptyOrNull(column.name()) ? f.getName() : column.name(); Index index = FieldCache.getAnnotation(f, Index.class); String indexName = StringUtil.isEmptyOrNull(index.name()) ? name + "_I_" + abstractTable.getName().toLowerCase() : index.name(); sql += "INDEX " + bt + indexName + bt + " (" + bt + name + bt + ")"; sql += ","; } if (sql.endsWith(",")) { sql = sql.substring(0, sql.length() - 1); } sql += ")"; if (supportsEngines()) { //Todo: do other SQL databases support engines? sql += " ENGINE=" + abstractTable.getEngine(); } sql += ";"; abstractTable.executeUpdate(sql); }
From source file:de.dfki.km.leech.parser.wikipedia.WikipediaDumpParser.java
protected void parseInfoBox(String strText, Metadata metadata, ContentHandler handler) throws SAXException { // att-value paare mit | getrennt. Innerhalb eines values gibt es auch Zeilenumbrche (mit '<br />') - dies gilt als Aufzhlung // |Single1 |Datum1 , Besetzung1a Besetzung1b, Sonstiges1Titel |Sonstiges1Inhalt , Coverversion3 |Jahr3 // | 1Option = 3 // | 1Option Name = Demos // | 1Option Link = Demos // | 1Option Color = // als erstes schneiden wir mal die Infobox raus. (?m) ist multiline und (?s) ist dotall ('.' matcht auch line breaks) int iStartInfoBox = -1; int iEndInfoBox = -1; MatchResult infoMatch = StringUtils.findFirst("\\{\\{\\s*Infobox", strText); if (infoMatch != null) { iStartInfoBox = infoMatch.start(); iEndInfoBox = StringUtils.findMatchingBracket(iStartInfoBox, strText) + 1; } else/*ww w.ja v a 2 s. c o m*/ return; if (strText.length() < 3 || strText.length() < iEndInfoBox || iEndInfoBox <= 0 || (iStartInfoBox + 2) > iEndInfoBox) return; String strInfoBox = ""; strInfoBox = strText.substring(iStartInfoBox + 2, iEndInfoBox); if (strInfoBox.length() < 5) return; String strCleanedInfoBox = m_wikiModel.render(new PlainTextConverter(), strInfoBox.replaceAll("<br />", "<br />")); // da wir hier eigentlich relationierte Datenstze haben, machen wir auch einzelne, separierte Dokumente draus // System.out.println(strCleanedInfoBox); // System.out.println(strCleanedInfoBox.substring(0, strCleanedInfoBox.indexOf("\n")).trim()); // erste Zeile bezeichnet die InfoBox int iIndex = strCleanedInfoBox.indexOf("|"); if (iIndex == -1) iIndex = strCleanedInfoBox.indexOf("\n"); if (iIndex == -1) return; String strInfoBoxName = strCleanedInfoBox.substring(7, iIndex).trim(); metadata.add(infobox, strInfoBoxName); String[] straCleanedInfoBoxSplit = strCleanedInfoBox.split("\\s*\\|\\s*"); HashMap<String, MultiValueHashMap<String, String>> hsSubDocId2AttValuePairsOfSubDoc = new HashMap<String, MultiValueHashMap<String, String>>(); for (String strAttValuePair : straCleanedInfoBoxSplit) { // System.out.println("\nattValPair unsplittet " + strAttValuePair); // die Dinger sind mit einem '=' getrennt String[] straAtt2Value = strAttValuePair.split("="); if (straAtt2Value.length == 0 || straAtt2Value[0] == null) continue; if (straAtt2Value.length < 2 || straAtt2Value[1] == null) continue; String strAttName = straAtt2Value[0].trim(); String strAttValues = straAtt2Value[1]; if (StringUtils.nullOrWhitespace(strAttValues)) continue; // Innerhalb eines values gibt es auch Zeilenumbrche (mit '<br />' bzw. '<br />') - dies gilt als Aufzhlung String[] straAttValues = strAttValues.split(Pattern.quote("<br />")); // XXX wir werfen zusatzangaben in Klammern erst mal weg - man knnte sie auch als attnameAddInfo in einem extra Attribut speichern - // allerdings mu man dann wieder aufpassen, ob nicht ein subDocument entstehen mu (Bsp. mehrere Genre-entries mit jeweiliger // Jahreszahl) // der Attributname entscheidet nun, ob ein Dokument ausgelagert werden soll oder nicht. Ist darin eine Zahl enthalten, dann entfernen // wir diese und gruppieren alle att-value-paare mit dieser Zahl in einen extra Datensatz (MultiValueHashMap) Matcher numberMatcher = Pattern.compile("([\\D]*)(\\d+)([\\D]*)").matcher(strAttName); if (!numberMatcher.find()) { // wir haben keine Zahl im AttNamen - wir tragen diesen Wert einfach in die Metadaten ein. for (String strAttValue : straAttValues) { String strCleanedAttValue = cleanAttValue(strAttName, strAttValue); if (strCleanedAttValue != null) metadata.add(strAttName, strCleanedAttValue); } } else { // wir haben eine Zahl im Namen - wir tragen den Wert in einem SubDocument unter der Id <zahl> ein String strPrefix = numberMatcher.group(1); String strNumber = numberMatcher.group(2); String strSuffix = numberMatcher.group(3); String strDataSetId = strPrefix + strNumber; String strFinalAttName = strPrefix + strSuffix; // wenn wir noch mehr Zahlen haben, dann haben wir geloost - und tragen es einfach ein if (numberMatcher.find()) { for (String strAttValue : straAttValues) { String strCleanedAttValue = cleanAttValue(strFinalAttName, strAttValue); if (strCleanedAttValue != null) metadata.add(strFinalAttName, strCleanedAttValue); } } // System.out.println("prefix " + strPrefix); // System.out.println("num " + strDataSetId); // System.out.println("suffix " + strSuffix); MultiValueHashMap<String, String> hsAttname2ValueOfSubDoc = hsSubDocId2AttValuePairsOfSubDoc .get(strDataSetId); if (hsAttname2ValueOfSubDoc == null) { hsAttname2ValueOfSubDoc = new MultiValueHashMap<String, String>(); hsSubDocId2AttValuePairsOfSubDoc.put(strDataSetId, hsAttname2ValueOfSubDoc); } for (String strAttValue : straAttValues) hsAttname2ValueOfSubDoc.add(strFinalAttName, strAttValue.replaceAll("\\(.*?\\)", "").trim()); } } String strPageId = new UID().toString(); metadata.add(LeechMetadata.id, strPageId); // we have to use the same metadata Object Metadata metadataBackup4ParentPage = TikaUtils.copyMetadata(metadata); for (MultiValueHashMap<String, String> hsAttValuePairsOfSubDoc : hsSubDocId2AttValuePairsOfSubDoc .values()) { TikaUtils.clearMetadata(metadata); // die Referenz zu meinem parent metadata.add(LeechMetadata.parentId, strPageId); metadata.add(infobox, strInfoBoxName); String strChildId = new UID().toString(); metadata.add(LeechMetadata.id, strChildId); // zum rckreferenzieren geben wir dem parent auch noch unsere id metadataBackup4ParentPage.add(LeechMetadata.childId, strChildId); for (Entry<String, String> attName2Value4SubDoc : hsAttValuePairsOfSubDoc.entryList()) { String strAttName = attName2Value4SubDoc.getKey(); String strAttValue = attName2Value4SubDoc.getValue(); String strCleanedAttValue = cleanAttValue(strAttName, strAttValue); if (strCleanedAttValue != null) metadata.add(strAttName, strCleanedAttValue); } metadata.add(Metadata.CONTENT_TYPE, "application/wikipedia-meta+xml"); // so erreichen wir, da im bergeordneten ContentHandler mehrere Docs ankommen :) XHTMLContentHandler xhtml = new XHTMLContentHandler(handler, metadata); xhtml.startDocument(); xhtml.endDocument(); } TikaUtils.clearMetadata(metadata); TikaUtils.copyMetadataFromTo(metadataBackup4ParentPage, metadata); }
From source file:com.vgi.mafscaling.ClosedLoop.java
private void calculateCorrectedGS() { double time;/*from w ww.j a va2 s . c o m*/ double load; double rpm; double dvdt; double afr; double mafv; double stft; double ltft; double iat; double corr; double val1; double val2; String timeStr; String loadStr; String rpmStr; String mafvStr; String afrStr; String stftStr; String ltftStr; String dvdtStr; String iatStr; int closestMafIdx; int closestRmpIdx; int closestLoadIdx; int i; String tableName = "Log Data"; ArrayList<Integer> temp = new ArrayList<Integer>(gsArray.size()); correctionMeanArray = new ArrayList<Double>(gsArray.size()); correctionModeArray = new ArrayList<Double>(gsArray.size()); ArrayList<HashMap<Double, Integer>> modeCalcArray = new ArrayList<HashMap<Double, Integer>>(); for (i = 0; i < gsArray.size(); ++i) { temp.add(0); correctionMeanArray.add(0.0); correctionModeArray.add(0.0); modeCalcArray.add(new HashMap<Double, Integer>()); } ArrayList<Double> afrRpmArray = new ArrayList<Double>(); for (i = 1; i < polfTable.getRowCount(); ++i) { afrRpmArray.add(Double.valueOf(polfTable.getValueAt(i, 0).toString())); Utils.ensureRowCount(i + 1, afr1Table); Utils.ensureRowCount(i + 1, afr2Table); afr1Table.setValueAt(polfTable.getValueAt(i, 0), i, 0); afr2Table.setValueAt(polfTable.getValueAt(i, 0), i, 0); } ArrayList<Double> afrLoadArray = new ArrayList<Double>(); for (i = 1; i < polfTable.getColumnCount(); ++i) { afrLoadArray.add(Double.valueOf(polfTable.getValueAt(0, i).toString())); Utils.ensureColumnCount(i + 1, afr1Table); Utils.ensureColumnCount(i + 1, afr2Table); afr1Table.setValueAt(polfTable.getValueAt(0, i), 0, i); afr2Table.setValueAt(polfTable.getValueAt(0, i), 0, i); } Integer val; HashMap<Double, Integer> modeCountMap; for (i = 0; i < logDataTable.getRowCount(); ++i) { timeStr = logDataTable.getValueAt(i, 0).toString(); loadStr = logDataTable.getValueAt(i, 1).toString(); rpmStr = logDataTable.getValueAt(i, 2).toString(); mafvStr = logDataTable.getValueAt(i, 3).toString(); afrStr = logDataTable.getValueAt(i, 4).toString(); stftStr = logDataTable.getValueAt(i, 5).toString(); ltftStr = logDataTable.getValueAt(i, 6).toString(); dvdtStr = logDataTable.getValueAt(i, 7).toString(); iatStr = logDataTable.getValueAt(i, 8).toString(); if (timeStr.isEmpty() || loadStr.isEmpty() || rpmStr.isEmpty() || mafvStr.isEmpty() || afrStr.isEmpty() || stftStr.isEmpty() || ltftStr.isEmpty() || dvdtStr.isEmpty() || iatStr.isEmpty()) break; if (!Utils.validateDouble(timeStr, i, 0, tableName) || !Utils.validateDouble(loadStr, i, 1, tableName) || !Utils.validateDouble(rpmStr, i, 2, tableName) || !Utils.validateDouble(mafvStr, i, 3, tableName) || !Utils.validateDouble(afrStr, i, 4, tableName) || !Utils.validateDouble(stftStr, i, 5, tableName) || !Utils.validateDouble(ltftStr, i, 6, tableName) || !Utils.validateDouble(dvdtStr, i, 7, tableName) || !Utils.validateDouble(iatStr, i, 8, tableName)) return; time = Double.valueOf(timeStr); load = Double.valueOf(loadStr); rpm = Double.valueOf(rpmStr); mafv = Double.valueOf(mafvStr); afr = Double.valueOf(afrStr); stft = Double.valueOf(stftStr); ltft = Double.valueOf(ltftStr); dvdt = Double.valueOf(dvdtStr); iat = Double.valueOf(iatStr); corr = ltft + stft; trimArray.add(corr); rpmArray.add(rpm); timeArray.add(time); iatArray.add(iat); mafvArray.add(mafv); dvdtArray.add(dvdt); closestMafIdx = Utils.closestValueIndex(load * rpm / 60.0, gsArray); correctionMeanArray.set(closestMafIdx, (correctionMeanArray.get(closestMafIdx) * temp.get(closestMafIdx) + corr) / (temp.get(closestMafIdx) + 1)); temp.set(closestMafIdx, temp.get(closestMafIdx) + 1); modeCountMap = modeCalcArray.get(closestMafIdx); double roundedCorr = ((double) Math.round(corr * 10.0)) / 10.0; val = modeCountMap.get(roundedCorr); if (val == null) modeCountMap.put(roundedCorr, 1); else modeCountMap.put(roundedCorr, val + 1); closestRmpIdx = Utils.closestValueIndex(rpm, afrRpmArray) + 1; closestLoadIdx = Utils.closestValueIndex(load, afrLoadArray) + 1; val1 = (afr1Table.getValueAt(closestRmpIdx, closestLoadIdx).toString().isEmpty()) ? 0 : Double.valueOf(afr1Table.getValueAt(closestRmpIdx, closestLoadIdx).toString()); val2 = (afr2Table.getValueAt(closestRmpIdx, closestLoadIdx).toString().isEmpty()) ? 0 : Double.valueOf(afr2Table.getValueAt(closestRmpIdx, closestLoadIdx).toString()); afr1Table.setValueAt((val1 * val2 + afr) / (val2 + 1.0), closestRmpIdx, closestLoadIdx); afr2Table.setValueAt(val2 + 1.0, closestRmpIdx, closestLoadIdx); } for (i = 0; i < modeCalcArray.size(); ++i) { modeCountMap = modeCalcArray.get(i); if (modeCountMap.size() > 0) { int maxValueInMap = (Collections.max(modeCountMap.values())); double sum = 0; int count = 0; for (Entry<Double, Integer> entry : modeCountMap.entrySet()) { if (entry.getValue() == maxValueInMap) { sum += entry.getKey(); count += 1; } } correctionModeArray.set(i, sum / count); } } int size = afrRpmArray.size() + 1; while (size < afr1Table.getRowCount()) Utils.removeRow(size, afr1Table); while (size < afr2Table.getRowCount()) Utils.removeRow(size, afr2Table); Utils.colorTable(afr1Table); Utils.colorTable(afr2Table); int firstCorrIndex = 0; double firstCorr = 1; for (i = 0; i < correctionMeanArray.size(); ++i) { corr = 1; if (temp.get(i) > minCellHitCount) { corr = 1.0 + (correctionMeanArray.get(i) + correctionModeArray.get(i)) / 200.00; if (firstCorrIndex == 0) { firstCorrIndex = i; firstCorr = corr; } } gsCorrected.add(i, gsArray.get(i) * corr); } for (i = firstCorrIndex - 1; i > 0; --i) gsCorrected.set(i, gsArray.get(i) * firstCorr); }
From source file:ffx.potential.parameters.ForceField.java
/** * Patches that add new atom classes/types that bond to existing atom * classes/types require "hybrid" force field types that include a mixture * of new and existing types./* www . j a v a 2 s . c om*/ * * @param typeMap A look-up from new types to existing types. */ public void patchClassesAndTypes(HashMap<AtomType, AtomType> typeMap, HashMap<String, AtomType> patchTypes) { for (BondType bondType : bondTypes.values().toArray(new BondType[0])) { BondType newType = bondType.patchClasses(typeMap); if (newType != null) { logger.info(" " + newType.toString()); addForceFieldType(newType); } } for (AngleType angleType : angleTypes.values().toArray(new AngleType[0])) { AngleType newType = angleType.patchClasses(typeMap); if (newType != null) { logger.info(" " + newType.toString()); addForceFieldType(newType); } } for (OutOfPlaneBendType outOfPlaneBendType : outOfPlaneBendTypes.values() .toArray(new OutOfPlaneBendType[0])) { OutOfPlaneBendType newType = outOfPlaneBendType.patchClasses(typeMap); if (newType != null) { logger.info(" " + newType.toString()); addForceFieldType(newType); } } for (PiTorsionType piTorsionType : piTorsionTypes.values().toArray(new PiTorsionType[0])) { PiTorsionType newType = piTorsionType.patchClasses(typeMap); if (newType != null) { logger.info(" " + newType.toString()); addForceFieldType(newType); } } for (StretchBendType stretchBendType : stretchBendTypes.values().toArray(new StretchBendType[0])) { StretchBendType newType = stretchBendType.patchClasses(typeMap); if (newType != null) { logger.info(" " + newType.toString()); addForceFieldType(newType); } } /* for (TorsionTorsionType torsionTorsionType : * torsionTorsionTypes.values().toArray(new TorsionTorsionType[0])) { * String currentKey = torsionTorsionType.key; * torsionTorsionType.patchClasses(typeMap); if * (!torsionTorsionType.key.equals(currentKey)) { * torsionTorsionTypes.remove(currentKey); * addForceFieldType(torsionTorsionType); } } */ for (TorsionType torsionType : torsionTypes.values().toArray(new TorsionType[0])) { TorsionType newType = torsionType.patchClasses(typeMap); if (newType != null) { logger.info(" " + newType.toString()); addForceFieldType(newType); } } /* for (ImproperTorsionType improperType : imptorsTypes.values().toArray(new ImproperTorsionType[0])) { String currentKey = improperType.key; improperType.patchClasses(typeMap); if (!improperType.key.equals(currentKey)) { torsionTypes.remove(currentKey); addForceFieldType(improperType); } } for (UreyBradleyType ureyBradleyType : ureyBradleyTypes.values().toArray(new UreyBradleyType[0])) { String currentKey = ureyBradleyType.key; ureyBradleyType.patchClasses(typeMap); if (!ureyBradleyType.key.equals(currentKey)) { ureyBradleyTypes.remove(currentKey); addForceFieldType(ureyBradleyType); } } */ for (MultipoleType multipoleType : multipoleTypes.values().toArray(new MultipoleType[0])) { MultipoleType newType = multipoleType.patchTypes(typeMap); if (newType != null) { logger.info(" " + newType.toString()); addForceFieldType(newType); } } try { for (AtomType atomType : patchTypes.values()) { PolarizeType polarizeType = getPolarizeType(atomType.key); if (polarizeType != null && polarizeType.patchTypes(typeMap)) { logger.info(" " + polarizeType.toString()); } } } catch (Exception e) { // Inefficient hack. Should actually check if polarizeTypes are necessary. } }
From source file:act.installer.metacyc.OrganismCompositionMongoWriter.java
/** * Each Metacyc biopax file contains collections of reactions and chemicals, organized by organism. * The reactions reference the chemicals using biopax-specific (or Metacyc-specific?) identifiers that don't match * our internal id scheme (for good reason--our identifier approach is far less complex!). This method writes the * contents of one organism's reactions and chemicals to the DB. The chemicals are written first so that we can * accumulate a mapping of Metacyc small molecule reference ids to our DB's chemical ids. The reactions' substrates * and products are then written to the DB using our internal chemical IDs, allowing us to unify Metacyc's chemical * and reaction data with whatever has already been written. */ public void write() { if (false)/* w ww . j av a2 s . co m*/ writeStdout(); // for debugging, if you need a full copy of the data in stdout // while going through this organisms chemicals (optionally installing // into db if required), we map its rdfID to the inchi (in db) HashMap<String, Long> rdfID2MongoID = new HashMap<String, Long>(); // for debugging, we log only the number of new reactions with sequences seen int newRxns = 0; int resolvedViaDirectInChISpecified = 0; int resolvedViaSmallMoleculeRelationship = 0; // Stores chemical strings derived from CML to avoid repeated processing for reused small molecule references. HashMap<Resource, ChemInfoContainer> smRefsCollections = new HashMap<>(); for (Resource id : smallmolecules.keySet()) { SmallMolecule sm = (SmallMolecule) smallmolecules.get(id); SmallMoleculeRef smref = (SmallMoleculeRef) this.src.resolve(sm.getSMRef()); if (smref == null) { continue; // only happens in one case standardName="a ribonucleic acid" } /* De-duplicate structureToChemStrs calls by storing already accessed small molecule structures in a hash. * If we find the same molecule in our hash, we don't need to process it again! */ ChemInfoContainer chemInfoContainer = smRefsCollections.get(sm.getSMRef()); if (chemInfoContainer == null) { ChemicalStructure c = (ChemicalStructure) this.src.resolve(smref.getChemicalStructure()); ChemStrs chemStrs = null; if (c != null) { // Only produce ChemStrs if we have a chemical structure to store. String lookupInChI; if (c.getInChI() != null) { chemStrs = new ChemStrs(c.getInChI(), null, null); resolvedViaDirectInChISpecified++; } else if ((lookupInChI = lookupInChIByXRefs(sm)) != null) { // TODO: should we track these? They could just be bogus compounds or compound classes. chemStrs = new ChemStrs(lookupInChI, null, null); resolvedViaSmallMoleculeRelationship++; } else { // Extract various canonical representations (like InChI) for this molecule based on the structure. chemStrs = structureToChemStrs(c); } } else { /* This occurs for Metacyc entries that are treated as classes of molecules rather than individual molecules. * See https://github.com/20n/act/issues/40. */ System.out.format("--- warning, null ChemicalStructure for %s; %s; %s\n", smref.getStandardName(), smref.getID(), smref.getChemicalStructure()); // TODO: we could probably call `continue` here safely. } // Wrap all of the nominal/structural information for this molecule together for de-duplication. chemInfoContainer = new ChemInfoContainer(smref, chemStrs, c); smRefsCollections.put(sm.getSMRef(), chemInfoContainer); } if (chemInfoContainer.c == null) { if (debugFails) System.out.println("No structure: " + smref.expandedJSON(this.src).toString(2)); continue; // mostly big molecules (e.g., a ureido compound, a sulfhydryl reagent, a macrolide antibiotic), but sometimes complexes (their members fields has small molecule structures), and sometimes just no structure given (colanic acid, a reduced nitroaromatic compound) } SmallMolMetaData meta = getSmallMoleculeMetaData(sm, smref); chemInfoContainer.addSmallMolMetaData(meta); } System.out.format("*** Resolved %d of %d small molecules' InChIs via InChI structures.\n", resolvedViaDirectInChISpecified, smallmolecules.size()); System.out.format("*** Resolved %d of %d small molecules' InChIs via compounds.dat lookup.\n", resolvedViaSmallMoleculeRelationship, smallmolecules.size()); System.out.format("--- writing chemicals for %d collections from %d molecules\n", smRefsCollections.size(), smallmolecules.size()); // Write all referenced small molecules only once. We de-duplicated while reading, so we should be ready to go! for (ChemInfoContainer cic : smRefsCollections.values()) { // actually add chemical to DB Long dbId = writeChemicalToDB(cic.structure, cic.c, cic.metas); if (dbId == null) { System.err.format("ERROR: unable to find/write chemical '%s'\n", cic.smRef == null ? null : cic.smRef.getStandardName()); continue; } /* Put rdfID -> mongodb ID in rdfID2MongoID map. These ids will be used to reference the chemicals in Metacyc * substrates/products entries, so it's important to get them right (and for the mapping to be complete). */ rdfID2MongoID.put(cic.c.getID().getLocal(), dbId); } /* It appears that Catalysis objects can appear outside of BiochemicalPathwaySteps in biopax files. Record which * catalyses we've installed from BiochemicalPathwaySteps so that we can ensure full coverage without duplicating * reactions in the DB. */ Set<Resource> seenCatalyses = new HashSet<>(this.enzyme_catalysis.size()); // Iterate over the BiochemicalPathwaySteps, extracting either Catalyses if available or the raw Conversion if not. for (Map.Entry<Resource, BiochemicalPathwayStep> entry : this.biochemicalPathwaySteps.entrySet()) { BiochemicalPathwayStep bps = entry.getValue(); // TODO: does this correctly handle the case where the process consists only of Modulations? Is that possible? Set<Resource> catalyses = bps.getProcess(); if (catalyses == null || catalyses.size() == 0) { System.out.format("%s: No catalyses, falling back to conversion %s\n", bps.getID(), bps.getConversion()); Conversion c = (Conversion) this.src.resolve(bps.getConversion()); if (c == null) { System.err.format("ERROR: could not find expected conversion %s for %s\n", bps.getConversion(), bps.getID()); } else { addReaction(c, rdfID2MongoID, bps.getDirection()); } } else { System.out.format("%s: Found %d catalyses\n", bps.getID(), catalyses.size()); for (Resource res : catalyses) { Catalysis c = this.enzyme_catalysis.get(res); // Don't warn here, as the stepProcess could be a Modulation and we don't necessarily care about those. if (c != null) { seenCatalyses.add(res); addReaction(c, rdfID2MongoID, bps.getDirection()); } } newRxns++; } } /* Some Catalysis objects exist outside BiochemicalPathwaySteps, so iterate over all the Catalyses in this file * and install any we haven't already seen. */ for (Map.Entry<Resource, Catalysis> entry : enzyme_catalysis.entrySet()) { // Don't re-install Catalysis objects that were part of BiochemicalPathwaySteps, but make sure we get 'em all. if (seenCatalyses.contains(entry.getKey())) { continue; } // actually add reaction to DB addReaction(entry.getValue(), rdfID2MongoID, null); newRxns++; } // Output stats: System.out.format("New writes: %s (%d) :: (rxns)\n", this.originDBSubID, newRxns); System.out.format("Ignored %d of %d small molecules with multiple chemical structures\n", ignoredMoleculesWithMultipleStructures, totalSmallMolecules); }
From source file:com.emc.storageos.volumecontroller.impl.plugins.IsilonCommunicationInterface.java
private IsilonApi.IsilonList<FileShare> discoverAllFileSystem(StorageSystem storageSystem, String resumeToken) throws IsilonCollectionException { // Discover All FileSystem List<FileShare> discoveredFS = new ArrayList<FileShare>(); URI storageSystemId = storageSystem.getId(); try {/*from w w w . j a v a 2 s . c om*/ _log.info("discoverAllFileSystem for storage system {} - start", storageSystemId); IsilonApi isilonApi = getIsilonDevice(storageSystem); HashMap<String, FileShare> fsWithQuotaMap = new HashMap<String, FileShare>(); // get first page of quota data, process and insert to database IsilonApi.IsilonList<IsilonSmartQuota> quotas = isilonApi.listQuotas(null); boolean qualified = false; for (IsilonSmartQuota quota : quotas.getList()) { String fsNativeId = quota.getPath(); qualified = isUnderUnmanagedDiscoveryPath(fsNativeId); if (qualified) { FileShare fs = extractFileShare(fsNativeId, quota, storageSystem); _log.debug("quota id {} with capacity {}", fsNativeId + ":QUOTA:" + quota.getId(), fs.getCapacity() + " used capacity " + fs.getUsedCapacity()); fsWithQuotaMap.put(fsNativeId, fs); } else { _log.debug("quota id {} no FileSystem or directory", fsNativeId); } } // get all other pages of quota data, process and set quota page by page while (quotas.getToken() != null && !quotas.getToken().isEmpty()) { quotas = isilonApi.listQuotas(quotas.getToken()); for (IsilonSmartQuota quota : quotas.getList()) { String fsNativeId = quota.getPath(); qualified = isUnderUnmanagedDiscoveryPath(fsNativeId); if (qualified) { FileShare fs = extractFileShare(fsNativeId, quota, storageSystem); _log.debug("quota id {} with capacity {}", fsNativeId + ":QUOTA:" + quota.getId(), fs.getCapacity() + " used capacity " + fs.getUsedCapacity()); fsWithQuotaMap.put(fsNativeId, fs); } else { _log.debug("quota id {} no FileSystem or directory", fsNativeId); } } } _log.info("NativeGUIDGenerator for storage system {} - complete", storageSystemId); // Filter out FS with no Quota associated with them discoveredFS = new ArrayList<FileShare>(fsWithQuotaMap.values()); IsilonApi.IsilonList<FileShare> isilonFSList = new IsilonApi.IsilonList<FileShare>(); isilonFSList.addList(discoveredFS); // isilonFSList.setToken(isilonFileSystems.getToken()); return isilonFSList; } catch (IsilonException ie) { _log.error("discoverAllFileSystem failed. Storage system: {}", storageSystemId, ie); IsilonCollectionException ice = new IsilonCollectionException( "discoverAllFileSystem failed. Storage system: " + storageSystemId); ice.initCause(ie); throw ice; } catch (Exception e) { _log.error("discoverAllFileSystem failed. Storage system: {}", storageSystemId, e); IsilonCollectionException ice = new IsilonCollectionException( "discoverAllFileSystem failed. Storage system: " + storageSystemId); ice.initCause(e); throw ice; } }
From source file:com.weimed.app.sync.SyncAdapter.java
/** * Read JSON from an input stream, storing it into the content provider. * * <p>This is where incoming data is persisted, committing the results of a sync. In order to * minimize (expensive) disk operations, we compare incoming data with what's already in our * database, and compute a merge. Only changes (insert/update/delete) will result in a database * write./*from ww w.jav a2s . c om*/ * * <p>As an additional optimization, we use a batch operation to perform all database writes at * once. * * <p>Merge strategy: * 1. Get cursor to all items in feed<br/> * 2. For each item, check if it's in the incoming data.<br/> * a. YES: Remove from "incoming" list. Check if data has mutated, if so, perform * database UPDATE.<br/> * b. NO: Schedule DELETE from database.<br/> * (At this point, incoming database only contains missing items.)<br/> * 3. For any items remaining in incoming list, ADD to database. */ public void updateLocalJSONData(final InputStream stream, final SyncResult syncResult) throws IOException, JSONException, RemoteException, OperationApplicationException, ParseException { final JSONParser JSONParser = new JSONParser(); final ContentResolver contentResolver = getContext().getContentResolver(); Log.i(TAG, "Parsing stream as JSON Array"); final JSONObject json = JSONParser.parseJSONObject(stream); Log.i(TAG, "Parsing complete. Found " + json.getInt("total_rows") + " entries"); ArrayList<ContentProviderOperation> batch = new ArrayList<ContentProviderOperation>(); // Build hash table of incoming entries HashMap<String, JSONObject> entryMap = new HashMap<String, JSONObject>(); final JSONArray entries = json.getJSONArray("rows"); for (int i = 0; i < json.getInt("total_rows"); i++) { JSONObject e = entries.getJSONObject(i).getJSONObject("value"); entryMap.put(e.getString("_id"), e); } // Get list of all items Log.i(TAG, "Fetching local entries for merge"); Uri uri = NewsContract.Entry.CONTENT_URI; // Get all entries Cursor c = contentResolver.query(uri, PROJECTION, null, null, null); assert c != null; Log.i(TAG, "Found " + c.getCount() + " local entries. Computing merge solution..."); // Find stale data int id; String entryId; String title; String content; String publisher; String picurl; String originalurl; String createdat; String updatedat; String publishedat; while (c.moveToNext()) { syncResult.stats.numEntries++; id = c.getInt(COLUMN_ID); entryId = c.getString(COLUMN_ENTRY_ID); title = c.getString(COLUMN_TITLE); content = c.getString(COLUMN_CONTENT); publisher = c.getString(COLUMN_PUBLISHER); picurl = c.getString(COLUMN_PICURL); originalurl = c.getString(COLUMN_ORIGINALURL); createdat = c.getString(COLUMN_CREATEDAT); updatedat = c.getString(COLUMN_UPDATEDAT); publishedat = c.getString(COLUMN_PUBLISHEDAT); JSONObject match = entryMap.get(entryId); // if (match != null) { // Entry exists. Remove from entry map to prevent insert later. // entryMap.remove(entryId); // Check to see if the entry needs to be updated // How to know update local or remote? updatedAt! which is newer, update another. // Uri existingUri = NewsContract.Entry.CONTENT_URI.buildUpon() // .appendPath(Integer.toString(id)).build(); // if ((match.getString("title") != null && !match.getString("title").equals(title)) || // (match.getString("content") != null && !match.getString("content").equals(content)) || // (match.getString("publisher") != null && !match.getString("publisher").equals(publisher)) || // (match.getString("picurl") != null && !match.getString("picurl").equals(picurl)) || // (match.getString("originalurl") != null && !match.getString("originalurl").equals(originalurl)) || // (match.getString("createdat") != null && !match.getString("createdat").equals(createdat)) || // (match.getString("updatedat") != null && !match.getString("updatedat").equals(updatedat)) || // (match.getString("publishedat") != null && !match.getString("publishedat").equals(publishedat)) // ) { // // Update existing record // Log.i(TAG, "Scheduling update: " + existingUri); // batch.add(ContentProviderOperation.newUpdate(existingUri) // .withValue(NewsContract.Entry.COLUMN_TITLE, title) // .withValue(NewsContract.Entry.COLUMN_CONTENT, content) // .withValue(NewsContract.Entry.COLUMN_PUBLISHER, publisher) // .withValue(NewsContract.Entry.COLUMN_PICURL, picurl) // .withValue(NewsContract.Entry.COLUMN_ORIGINALURL, originalurl) // .withValue(NewsContract.Entry.COLUMN_CREATEDAT, createdat) // .withValue(NewsContract.Entry.COLUMN_UPDATEDAT, updatedat) // .withValue(NewsContract.Entry.COLUMN_PUBLISHEDAT, publishedat) // .build()); // syncResult.stats.numUpdates++; // } else { // Log.i(TAG, "No action: " + existingUri); // } // } else { // Entry doesn't exist. Remove it from the database. Uri deleteUri = NewsContract.Entry.CONTENT_URI.buildUpon().appendPath(Integer.toString(id)).build(); Log.i(TAG, "Scheduling delete: " + deleteUri); batch.add(ContentProviderOperation.newDelete(deleteUri).build()); syncResult.stats.numDeletes++; // } } c.close(); // Add new items for (JSONObject e : entryMap.values()) { Log.i(TAG, "Scheduling insert: entry_id=" + e.getString("_id")); batch.add(ContentProviderOperation.newInsert(NewsContract.Entry.CONTENT_URI) .withValue(NewsContract.Entry.COLUMN_ENTRY_ID, e.getString("_id")) .withValue(NewsContract.Entry.COLUMN_TITLE, e.getString("title")) .withValue(NewsContract.Entry.COLUMN_CONTENT, fetchTextFileToString(NEWS_URL_BASE + '/' + e.getString("_id") + "/content.md")) .withValue(NewsContract.Entry.COLUMN_PUBLISHER, e.getString("publisher")) .withValue(NewsContract.Entry.COLUMN_PICURL, e.has("pic_link") ? e.getString("pic_link") : null) .withValue(NewsContract.Entry.COLUMN_ORIGINALURL, e.getString("origin_link")) .withValue(NewsContract.Entry.COLUMN_CREATEDAT, e.getString("created_at")) .withValue(NewsContract.Entry.COLUMN_UPDATEDAT, e.getString("updated_at")) .withValue(NewsContract.Entry.COLUMN_PUBLISHEDAT, e.getString("publish_at")).build()); syncResult.stats.numInserts++; } Log.i(TAG, "Merge solution ready. Applying batch update"); mContentResolver.applyBatch(NewsContract.CONTENT_AUTHORITY, batch); mContentResolver.notifyChange(NewsContract.Entry.CONTENT_URI, // URI where data was modified null, // No local observer false); // IMPORTANT: Do not sync to network // This sample doesn't support uploads, but if *your* code does, make sure you set // syncToNetwork=false in the line above to prevent duplicate syncs. }
From source file:org.apache.carbondata.spark.partition.api.impl.CSVFilePartitioner.java
@Deprecated public void splitFile(String databaseName, String tableName, List<String> sourceFilePath, String targetFolder, List<String> nodes, int partitionCount, String[] partitionColumn, String[] requiredColumns, String delimiter, String quoteChar, String fileHeader, String escapeChar, boolean multiLine) throws Exception { LOGGER.info("Processing file split: " + sourceFilePath); // Create the target folder FileFactory.mkdirs(targetFolder, FileFactory.getFileType(targetFolder)); String[] headerColumns = null; HashMap<Partition, CSVWriter> outputStreamsMap = new HashMap<Partition, CSVWriter>( CarbonCommonConstants.DEFAULT_COLLECTION_SIZE); String key = databaseName + '_' + tableName; badRecordslogger = new BadRecordslogger(key, "Partition_" + System.currentTimeMillis() + ".log", getBadLogStoreLocation("partition/" + databaseName + '/' + tableName)); CSVReader dataInputStream = null;/*from w ww. j a v a2s . c om*/ long recordCounter = 0; CSVParser customParser = getCustomParser(delimiter, quoteChar, escapeChar); for (int i = 0; i < sourceFilePath.size(); i++) { try { CarbonFile file = FileFactory.getCarbonFile(sourceFilePath.get(i), FileFactory.getFileType(sourceFilePath.get(i))); // File file = new File(sourceFilePath); String fileAbsolutePath = file.getAbsolutePath(); String fileName = null; if (!sourceFilesBasePath.endsWith(".csv") && fileAbsolutePath.startsWith(sourceFilesBasePath)) { if (sourceFilesBasePath.endsWith(File.separator)) { fileName = fileAbsolutePath.substring(sourceFilesBasePath.length()).replace(File.separator, "_"); } else { fileName = fileAbsolutePath.substring(sourceFilesBasePath.length() + 1) .replace(File.separator, "_"); } } else { fileName = file.getName(); } // Read and prepare columns from first row in file DataInputStream inputStream = FileFactory.getDataInputStream(sourceFilePath.get(i), FileFactory.getFileType(sourceFilePath.get(i))); if (fileName.endsWith(".gz")) { GZIPInputStream gzipInputStream = new GZIPInputStream(inputStream); dataInputStream = new CSVReader( new InputStreamReader(gzipInputStream, Charset.defaultCharset()), CSVReader.DEFAULT_SKIP_LINES, customParser); fileName = fileName.substring(0, fileName.indexOf(".gz")); } else if (fileName.endsWith(".bz2")) { BZip2CompressorInputStream stream = new BZip2CompressorInputStream(inputStream); dataInputStream = new CSVReader(new InputStreamReader(stream, Charset.defaultCharset()), CSVReader.DEFAULT_SKIP_LINES, customParser); fileName = fileName.substring(0, fileName.indexOf(".bz2")); } else if (fileName.endsWith(".csv")) { dataInputStream = new CSVReader(new InputStreamReader(inputStream, Charset.defaultCharset()), CSVReader.DEFAULT_SKIP_LINES, customParser); fileName = fileName.substring(0, fileName.indexOf(".csv")); } else { LOGGER.info("Processing file split: Unsupported File Extension: Skipping File : " + file.getAbsolutePath()); partialSuccess = true; return; } dataInputStream.setBadRecordsLogger(badRecordslogger); if (fileHeader == null || fileHeader.length() == 0) { headerColumns = dataInputStream.readNext(); } else { headerColumns = fileHeader.split(","); } if (null == headerColumns) { LOGGER.info("Csv file does not contain the header column neither the headers are " + "passed in DDL or API. Skipping file :: " + sourceFilePath); partialSuccess = true; return; } int[] indexes = pruneColumnsAndGetIndexes(headerColumns, requiredColumns); // In case there is a dummy measure required columns length and // header columns length will not be equal if ((null == fileHeader || 0 == fileHeader.length()) && (0 == indexes.length) && (fileHeader.length() != indexes.length)) { LOGGER.info("Column headers are invalid. They do not match with the schema headers." + "Skipping file :: " + sourceFilePath); partialSuccess = true; return; } partitionData(targetFolder, nodes, partitionCount, partitionColumn, headerColumns, outputStreamsMap, dataInputStream, recordCounter, fileName, indexes, fileAbsolutePath); } catch (IOException e) { LOGGER.error(e, e.getMessage()); } finally { CarbonUtil.closeStreams(dataInputStream); for (CSVWriter dataOutStream : outputStreamsMap.values()) { CarbonUtil.closeStreams(dataOutStream); } badRecordslogger.closeStreams(); } } }
From source file:edu.isi.wings.catalog.component.api.impl.kb.ComponentReasoningKB.java
/** * <b>Query 4.2</b><br/> * This function is supposed to <b>SET</b> the DataSet Metrics, or Parameter * Values for the Variables that are passed in via the input/output maps as * part of details.<br/>/* w w w . j a v a 2 s.com*/ * Variables will already be bound to dataObjects, so the function will have * to do something like the following : * * <pre> * If Variable.isParameterVariable() Variable.setParameterValue(value) * If Variable.isDataVariable() Variable.getDataObjectBinding().setDataMetrics(xml) * </pre> * * @param details * A ComponentDetails Object which contains: * <ul> * <li>component, * <li>maps of component input arguments to template variables, * <li>maps of component output arguments to template variables, * <li>template variable descriptions (dods) - list of triples * </ul> * @return List of extra template variable descriptions (will mostly be * empty in Q4.2 though) */ public ArrayList<ComponentPacket> findOutputDataPredictedDescriptions(ComponentPacket details) { ArrayList<ComponentPacket> list = new ArrayList<ComponentPacket>(); HashMap<String, KBObject> omap = this.objPropMap; HashMap<String, KBObject> dmap = this.dataPropMap; // If the component has no rules, then simplify !! // Extract info from details object ComponentVariable c = details.getComponent(); HashMap<String, Variable> sRoleMap = details.getStringRoleMaps(); HashMap<String, Boolean> noParamBindings = new HashMap<String, Boolean>(); ArrayList<KBTriple> redbox = details.getRequirements(); Component comp = this.getCachedComponent(c.getBinding().getID()); if (comp == null) { logger.debug(c.getBinding().getID() + " is not a valid component"); details.addExplanations(c.getBinding().getID() + " is not a valid component"); details.setInvalidFlag(true); list.add(details); return list; } c.setRequirements(comp.getComponentRequirement()); boolean typesOk = true; // Set default parameter values (if not already set) // - Also recheck type compatibility ArrayList<String> inputRoles = new ArrayList<String>(); for (ComponentRole role : comp.getInputs()) { inputRoles.add(role.getRoleName()); Variable v = sRoleMap.get(role.getRoleName()); if (role.isParam()) { if (v.getBinding() == null) { v.setBinding(new ValueBinding(role.getParamDefaultalue())); noParamBindings.put(v.getID(), true); } else if (v.getBinding().getValue() == null) { v.getBinding().setValue(role.getParamDefaultalue()); noParamBindings.put(v.getID(), true); } } else { ArrayList<String> varclassids = new ArrayList<String>(); ArrayList<Metric> vartypes = v.getBinding().getMetrics().getMetrics().get(KBUtils.RDF + "type"); if (vartypes != null) { for (Metric m : vartypes) { varclassids.add(m.getValueAsString()); } // Check type compatibility of roles if (!checkTypeCompatibility(varclassids, role.getID())) { details.addExplanations("INFO " + comp + " is not selectable because " + role.getID() + " is not type compatible with variable binding: " + v.getBinding()); typesOk = false; break; } } } } details.setInputRoles(inputRoles); if (!typesOk) { details.setInvalidFlag(true); list.add(details); return list; } if (!comp.hasRules()) { // No rules. Just set default parameter values (if not already set) list.add(details); return list; } // Create a new temporary KB store to run rules on KBAPI tkb = this.ontologyFactory.getKB(OntSpec.PLAIN); KBObject compobj = this.kb.getIndividual(comp.getID()); // Add component to the temporary KB store (add all its classes // explicitly) KBObject tcomp = this.copyObjectIntoKB(comp.getID(), compobj, tkb, this.pcdomns, null, false); // Keep a map of variable object to variable name HashMap<Variable, String> variableNameMap = new HashMap<Variable, String>(); for (String rolestr : sRoleMap.keySet()) { Variable var = sRoleMap.get(rolestr); // Map template variable to a temporary variable for running rules // - Reason is that the same variable may be used in multiple roles // and we want to distinguish them String variableName = var.getID() + "_" + rolestr; variableNameMap.put(var, variableName); } // Add the information from redbox to the temporary KB store // Cache varid to varobj HashMap<String, KBObject> varIDObjMap = new HashMap<String, KBObject>(); for (Variable var : sRoleMap.values()) { KBObject varobj = tkb.getResource(variableNameMap.get(var)); varIDObjMap.put(var.getID(), varobj); } // Add information from redbox for (KBTriple t : redbox) { KBObject subj = varIDObjMap.get(t.getSubject().getID()); KBObject obj = varIDObjMap.get(t.getObject().getID()); if (subj == null) subj = t.getSubject(); if (obj == null) obj = t.getObject(); tkb.addTriple(subj, t.getPredicate(), obj); } // Get a mapping of ArgID's to arg for the Component // Also note which roles are inputs HashMap<String, ComponentRole> argMaps = new HashMap<String, ComponentRole>(); HashMap<String, Boolean> sInputRoles = new HashMap<String, Boolean>(); for (ComponentRole role : comp.getInputs()) { argMaps.put(role.getRoleName(), role); sInputRoles.put(role.getRoleName(), true); } for (ComponentRole role : comp.getOutputs()) { argMaps.put(role.getRoleName(), role); } // Convert metrics to Property assertions in the Temporary KB for (String rolestr : sRoleMap.keySet()) { Variable var = sRoleMap.get(rolestr); ComponentRole arg = argMaps.get(rolestr); if (arg == null) { details.addExplanations("ERROR Component catalog cannot recognize role id " + rolestr); continue; } String variableName = variableNameMap.get(var); // Get a KBObject for the temporary variable KBObject varobj = tkb.getResource(variableName); if (var.isDataVariable()) { // If the variable is a data variable (& is bound) if (var.getBinding() != null) { // Convert Metrics to PC properties in order to run rules Metrics metrics = var.getBinding().getMetrics(); HashMap<String, ArrayList<Metric>> propValMap = metrics.getMetrics(); for (String propid : propValMap.keySet()) { for (Metric tmp : propValMap.get(propid)) { Object val = tmp.getValue(); String valstring = tmp.getValueAsString(); int type = tmp.getType(); String dtype = tmp.getDatatype(); KBObject metricProp = this.kb.getProperty(propid); if (metricProp != null) { //System.out.println(var.getName()+": " + propid + " = " +valstring); if (type == Metric.URI) { // Object Property KBObject valobj = this.kb.getResource(valstring); if (valobj == null) { // TODO: Log and explain (make a utility // function) details.addExplanations( "ERROR Cannot Recognize Metrics Value " + valstring); continue; } // Copy over the object class into kb as well // (except where the object itself is a class) if (!metricProp.getID().equals(KBUtils.RDF + "type")) { valobj = this.copyObjectIntoKB(valobj.getID(), valobj, tkb, null, null, true); // Remove any existing values first for (KBTriple t : tkb.genericTripleQuery(varobj, metricProp, null)) tkb.removeTriple(t); } // Add a Triple for the metric property value tkb.addTriple(varobj, metricProp, valobj); } else if (type == Metric.LITERAL && val != null) { // Literal value KBObject tobj = dtype != null ? tkb.createXSDLiteral(valstring, dtype) : tkb.createLiteral(val); if (tobj != null) { // Remove any existing values first for (KBTriple t : tkb.genericTripleQuery(varobj, metricProp, null)) tkb.removeTriple(t); // Add a Triple for the metric propertyvalue tkb.addTriple(varobj, metricProp, tobj); } else { details.addExplanations("ERROR Cannot Convert Metrics Value " + valstring); continue; } } } else { // TODO: Log and explain (make a utility function) details.addExplanations( "ERROR No Such Metrics Property Known to Component Catalog : " + propid); continue; } } } // Create other standard PC properties on variable // - hasDimensionSizes // - hasBindingID if (var.getBinding().isSet()) { String dimensionSizes = ""; ArrayList<Binding> vbs = new ArrayList<Binding>(); vbs.add(var.getBinding()); while (!vbs.isEmpty()) { Binding vb = vbs.remove(0); if (vb.isSet()) { for (WingsSet vs : vb) { vbs.add((Binding) vs); } if (!dimensionSizes.equals("")) dimensionSizes += ","; dimensionSizes += vb.getSize(); } } tkb.setPropertyValue(varobj, dmap.get("hasDimensionSizes"), tkb.createLiteral(dimensionSizes)); } if (var.getBinding().getID() != null) tkb.addTriple(varobj, dmap.get("hasBindingID"), tkb.createLiteral(var.getBinding().getName())); else tkb.addTriple(varobj, dmap.get("hasBindingID"), tkb.createLiteral("")); // end if (var.getDataBinding() != null) } // end if (var.isDataVariable()) } else if (var.isParameterVariable()) { // If the Variable/Argument is a Parameter ValueBinding parambinding = (ValueBinding) var.getBinding(); if (parambinding != null && parambinding.getValue() != null) { // If the template has any value specified, use that instead //arg_value = tkb.createLiteral(var.getBinding().getValue()); KBObject arg_value = tkb.createXSDLiteral(parambinding.getValueAsString(), parambinding.getDatatype()); tkb.setPropertyValue(varobj, dmap.get("hasValue"), arg_value); } if (dmap.containsKey("hasBindingID")) // Set the hasBindingID term tkb.addTriple(varobj, dmap.get("hasBindingID"), tkb.createLiteral("Param" + arg.getName())); } // Copy argument classes from Catalog as classes for the temporary // variable in the temporary kb store KBObject argobj = kb.getIndividual(arg.getID()); this.copyObjectClassesIntoKB(varobj.getID(), argobj, tkb, null, null, true); // Set the temporary variable's argumentID so rules can get/set // triples based on the argument tkb.addTriple(varobj, dmap.get("hasArgumentID"), tkb.createLiteral(rolestr)); // Set hasInput or hasOutput for the temporary Variable if (sInputRoles.containsKey(rolestr)) { tkb.addTriple(tcomp, omap.get("hasInput"), varobj); } else { tkb.addTriple(tcomp, omap.get("hasOutput"), varobj); } // end of for (String rolestr : sRoleMap.keySet()) } // Add all metrics and datametrics properties to temporary store tkb.addTriples(metricTriples); // Set current output variable metrics to do a diff with later for (String rolestr : sRoleMap.keySet()) { Variable var = sRoleMap.get(rolestr); if (var.isDataVariable() && !sInputRoles.containsKey(rolestr)) { Metrics metrics = new Metrics(); KBObject varobj = tkb.getResource(variableNameMap.get(var)); // Create Metrics from PC Properties for (KBObject metricProp : metricProps) { KBObject val = tkb.getPropertyValue(varobj, metricProp); if (val == null) continue; // Add value if (val.isLiteral()) metrics.addMetric(metricProp.getID(), new Metric(Metric.LITERAL, val.getValue(), val.getDataType())); else metrics.addMetric(metricProp.getID(), new Metric(Metric.URI, val.getID())); } var.getBinding().setMetrics(metrics); } } KBRuleList rules = this.getCachedComponentRules(comp); if (rules.getRules().size() > 0) { // Redirect Standard output to a byte stream ByteArrayOutputStream bost = new ByteArrayOutputStream(); PrintStream oldout = System.out; System.setOut(new PrintStream(bost, true)); // *** Run propagation rules on the temporary ontmodel *** tkb.setRulePrefixes(this.rulePrefixes); tkb.applyRules(rules); //tkb.applyRulesFromString(allrules); // Add printouts from rules as explanations if (!bost.toString().equals("")) { for (String exp : bost.toString().split("\\n")) { details.addExplanations(exp); } } // Reset the Standard output System.setOut(oldout); } // Check if the rules marked this component as invalid for // the current component details packet KBObject invalidProp = this.dataPropMap.get("isInvalid"); KBObject isInvalid = tkb.getPropertyValue(tcomp, invalidProp); if (isInvalid != null && (Boolean) isInvalid.getValue()) { details.addExplanations("INFO " + tcomp + " is not valid for its inputs"); logger.debug(tcomp + " is not valid for its inputs"); details.setInvalidFlag(true); list.add(details); return list; } // Check component dependencies // If set, overwrite the component dependencies with these ComponentRequirement req = this.getComponentRequirements(tcomp, tkb); if (req != null) { if (req.getMemoryGB() != 0) c.getRequirements().setMemoryGB(req.getMemoryGB()); if (req.getStorageGB() != 0) c.getRequirements().setStorageGB(req.getStorageGB()); } // Set values of variables by looking at values set by rules // in temporary kb store // - Only set if there isn't already a binding value for the variable for (Variable var : sRoleMap.values()) { if (var.isParameterVariable() && (noParamBindings.containsKey(var.getID()) || var.getBinding() == null || var.getBinding().getValue() == null)) { KBObject varobj = tkb.getResource(variableNameMap.get(var)); KBObject origvarobj = tkb.getResource(var.getID()); KBObject val = tkb.getPropertyValue(varobj, dmap.get("hasValue")); if (val != null && val.getValue() != null) { tkb.addTriple(origvarobj, tkb.getResource(this.wflowns + "hasParameterValue"), val); var.setBinding(new ValueBinding(val.getValue(), val.getDataType())); } } } // To create the output Variable metrics, we go through the metrics // property of the output data variables and get their metrics property // values for (String rolestr : sRoleMap.keySet()) { Variable var = sRoleMap.get(rolestr); if (var.isDataVariable() && !sInputRoles.containsKey(rolestr)) { Metrics curmetrics = var.getBinding().getMetrics(); Metrics metrics = new Metrics(); KBObject varobj = tkb.getResource(variableNameMap.get(var)); // Create Metrics from PC Properties for (KBObject metricProp : metricProps) { ArrayList<KBObject> vals = tkb.getPropertyValues(varobj, metricProp); if (vals == null) continue; for (KBObject val : vals) { if (vals.size() > 1) { if (!curmetrics.getMetrics().containsKey(metricProp.getID())) continue; // If multiple values present, ignore value that is equal to current value for (Metric mval : curmetrics.getMetrics().get(metricProp.getID())) { if (!val.isLiteral() && val.getID().equals(mval.getValue())) continue; else if (val.isLiteral() && val.getValue().equals(mval.getValue())) continue; } } // Add value if (val.isLiteral()) metrics.addMetric(metricProp.getID(), new Metric(Metric.LITERAL, val.getValue(), val.getDataType())); else metrics.addMetric(metricProp.getID(), new Metric(Metric.URI, val.getID())); } } ArrayList<KBObject> clses = this.getAllClassesOfInstance(tkb, varobj.getID()); for (KBObject cls : clses) metrics.addMetric(KBUtils.RDF + "type", new Metric(Metric.URI, cls.getID())); // Set metrics for the Binding if (var.getBinding() != null) var.getBinding().setMetrics(metrics); // -- Dealing with Collections -- // User other Properties for creating output binding collections // and setting the collection item metrics as well // PC Properties used: // - hasDimensionSizes // - hasDimensionIndexProperties int dim = 0; final int maxdims = 10; // not more than 10 dimensions int[] dimSizes = new int[maxdims]; String[] dimIndexProps = new String[maxdims]; KBObject dimSizesObj = tkb.getPropertyValue(varobj, dmap.get("hasDimensionSizes")); KBObject dimIndexPropsObj = tkb.getPropertyValue(varobj, dmap.get("hasDimensionIndexProperties")); // Parse dimension sizes string (can be given as a comma-separated list) // Example 2,3 // - This will create a 2x3 matrix if (dimSizesObj != null && dimSizesObj.getValue() != null) { if (dimSizesObj.getValue().getClass().getName().equals("java.lang.Integer")) { dimSizes[0] = (Integer) dimSizesObj.getValue(); dim = 1; } else { String dimSizesStr = (String) dimSizesObj.getValue(); for (String dimSize : dimSizesStr.split(",")) { try { int size = Integer.parseInt(dimSize); dimSizes[dim] = size; dim++; } catch (Exception e) { } } } } // Parse dimension index string (can be given as a comma // separated list) // Example hasXIndex, hasYIndex // - This will set each output item's // - first dimension index using property hasXIndex // - second dimension index using property hasYIndex // Example output: // - output // - output0 (hasXIndex 0) // - output00 (hasXIndex 0, hasYIndex 0) // - output01 (hasXIndex 0, hasYIndex 1) // - output0 (hasXIndex 1) // - output10 (hasXIndex 1, hasYIndex 0) // - output11 (hasXIndex 1, hasYIndex 1) if (dimIndexPropsObj != null && dimIndexPropsObj.getValue() != null) { int xdim = 0; String dimIndexPropsStr = (String) dimIndexPropsObj.getValue(); for (String dimIndexProp : dimIndexPropsStr.split(",")) { try { dimIndexProps[xdim] = dimIndexProp; xdim++; } catch (Exception e) { } } } // If the output is a collection // dim = 1 is a List // dim = 2 is a Matrix // dim = 3 is a Cube // .. and so on if (dim > 0) { int[] dimCounters = new int[dim]; dimCounters[0] = 1; for (int k = 1; k < dim; k++) { int perms = 1; for (int l = k - 1; l >= 0; l--) perms *= dimSizes[l]; dimCounters[k] = dimCounters[k - 1] + perms; } Binding b = var.getBinding(); ArrayList<Binding> vbs = new ArrayList<Binding>(); vbs.add(b); int counter = 0; while (!vbs.isEmpty()) { Binding vb = vbs.remove(0); if (vb.getMetrics() == null) continue; int vdim = 0; for (vdim = 0; vdim < dim; vdim++) { if (counter < dimCounters[vdim]) break; } if (vdim < dim) { for (int i = 0; i < dimSizes[vdim]; i++) { Binding cvb = new Binding(b.getNamespace() + UuidGen.generateAUuid("" + i)); // Copy over metrics from parent variable binding Metrics tmpMetrics = new Metrics(vb.getMetrics()); // Add dimension index (if property set) String prop = dimIndexProps[vdim]; if (prop != null && !prop.equals("")) { Metric nm = new Metric(Metric.LITERAL, i, KBUtils.XSD + "integer"); tmpMetrics.addMetric(this.dcdomns + prop, nm); } cvb.setMetrics(tmpMetrics); vb.add(cvb); vbs.add(cvb); } } counter++; } } // end if(dim > 0) } } // FIXME: Handle multiple configurations list.add(details); return list; }
From source file:it.prato.comune.sit.LayerTerritorio.java
/** * Consente di modificare un oggetto territorio contenuto in questo layer. * Le modifiche possono riguardare sia la parte alfanumerica che la geometria. * L'oggetto passato come parametro (oggettoTer) verr sostituito a quello * attualmente presente e caratterizzato dallo stesso ID. * /*from w w w .j a va 2 s. c o m*/ * @param oggettoTer Oggetto modificato da scrivere * @param transaction Transazione * @throws IOException Errore generico nell'accesso al DBStore * @throws SITException * @throws SQLException * @throws SITParseException Errore di parsing della stringa WKT */ public void modifyFeature(OggettoTerritorio oggettoTer, SITTransaction transaction) throws IOException, SITException, SQLException { String ID = null; try { ID = getFIDScrittura(oggettoTer); } catch (SITFIDMapperException e) { logger.warn(this.getClass().getName() + " SITFIDMapperException durante recupero FIDScrittura", e); throw e; } catch (IOException e) { logger.warn(this.getClass().getName() + " IOException durante recupero FIDScrittura", e); throw e; } catch (SITException e) { logger.warn(this.getClass().getName() + " SITException durante recupero FIDScrittura", e); throw e; } catch (SQLException e) { logger.warn(this.getClass().getName() + " SQLException durante recupero FIDScrittura", e); throw e; } // TOB // FilterFactory ff = FilterFactory.createFilterFactory(); // FidFilter fidFilter = ff.createFidFilter(ID); Id idFilter = SITFilterFactory.id(ID); HashMap<String, String> nch = oggettoTer.getFlagVariazione(); ArrayList<Object> objAttrs = new ArrayList<Object>(); ArrayList<String> nc = new ArrayList<String>(); Iterator<String> iter = nch.values().iterator(); DataStore ds = null; try { while (iter.hasNext()) { String nome = (String) iter.next(); if (nome.equals("##GEOM##")) { // TOB // nc.add(getDataStore().getSchema(configBean.getTypeName()).getDefaultGeometry().getName()); ds = getDataStore(); nc.add(ds.getSchema(configBean.getTypeName()).getGeometryDescriptor().getLocalName()); objAttrs.add(oggettoTer.getFeat().getDefaultGeometry()); } else { objAttrs.add(oggettoTer.getAttribute(nome)); nc.add(nome); } } } catch (IOException e) { logger.warn(this.getClass().getName() + " IOException durante recupero campi", e); throw e; } catch (SITException e) { logger.warn(this.getClass().getName() + " SITException durante recupero campi", e); throw e; } // //////////////////////////////////////////////////////// // Utilizzo WKTString == null perch il campo Geometry // e' eventualmente incluso in nc // //////////////////////////////////////////////////////// try { if (transaction == null) { modifyFeatureWKT(null, nc, objAttrs, idFilter); } else { modifyFeatureWKT(null, nc, objAttrs, idFilter, transaction); } } catch (SITParseException e) { // /////////////////////////////////////////////////////////////// // L'errore non si puo' verificare perche' la chiamata utilizza // null come WKTString in quanto la geometria e' gi presente // /////////////////////////////////////////////////////////////// } catch (SITIllegalAttributeException e) { logger.warn(this.getClass().getName() + " SITIllegalAttributeException durante modifyFeatureWKT", e); throw e; } catch (IOException e) { logger.warn(this.getClass().getName() + " IOException durante modifyFeatureWKT", e); throw e; } catch (SITException e) { logger.warn(this.getClass().getName() + " SITException durante modifyFeatureWKT", e); throw e; } }