List of usage examples for java.util TreeMap containsKey
public boolean containsKey(Object key)
From source file:org.apache.hadoop.hbase.backup.util.RestoreServerUtil.java
/** * Calculate region boundaries and add all the column families to the table descriptor * @param regionDirList region dir list/*from www .ja v a2s .c o m*/ * @return a set of keys to store the boundaries */ byte[][] generateBoundaryKeys(ArrayList<Path> regionDirList) throws FileNotFoundException, IOException { TreeMap<byte[], Integer> map = new TreeMap<byte[], Integer>(Bytes.BYTES_COMPARATOR); // Build a set of keys to store the boundaries byte[][] keys = null; // calculate region boundaries and add all the column families to the table descriptor for (Path regionDir : regionDirList) { LOG.debug("Parsing region dir: " + regionDir); Path hfofDir = regionDir; if (!fs.exists(hfofDir)) { LOG.warn("HFileOutputFormat dir " + hfofDir + " not found"); } FileStatus[] familyDirStatuses = fs.listStatus(hfofDir); if (familyDirStatuses == null) { throw new IOException("No families found in " + hfofDir); } for (FileStatus stat : familyDirStatuses) { if (!stat.isDirectory()) { LOG.warn("Skipping non-directory " + stat.getPath()); continue; } boolean isIgnore = false; String pathName = stat.getPath().getName(); for (String ignore : ignoreDirs) { if (pathName.contains(ignore)) { LOG.warn("Skipping non-family directory" + pathName); isIgnore = true; break; } } if (isIgnore) { continue; } Path familyDir = stat.getPath(); LOG.debug("Parsing family dir [" + familyDir.toString() + " in region [" + regionDir + "]"); // Skip _logs, etc if (familyDir.getName().startsWith("_") || familyDir.getName().startsWith(".")) { continue; } // start to parse hfile inside one family dir Path[] hfiles = FileUtil.stat2Paths(fs.listStatus(familyDir)); for (Path hfile : hfiles) { if (hfile.getName().startsWith("_") || hfile.getName().startsWith(".") || StoreFileInfo.isReference(hfile.getName()) || HFileLink.isHFileLink(hfile.getName())) { continue; } HFile.Reader reader = HFile.createReader(fs, hfile, new CacheConfig(conf), conf); final byte[] first, last; try { reader.loadFileInfo(); first = reader.getFirstRowKey(); last = reader.getLastRowKey(); LOG.debug("Trying to figure out region boundaries hfile=" + hfile + " first=" + Bytes.toStringBinary(first) + " last=" + Bytes.toStringBinary(last)); // To eventually infer start key-end key boundaries Integer value = map.containsKey(first) ? (Integer) map.get(first) : 0; map.put(first, value + 1); value = map.containsKey(last) ? (Integer) map.get(last) : 0; map.put(last, value - 1); } finally { reader.close(); } } } } keys = LoadIncrementalHFiles.inferBoundaries(map); return keys; }
From source file:org.dllearner.reasoning.SPARQLReasoner.java
@Override public ObjectPropertyHierarchy prepareObjectPropertyHierarchy() throws ReasoningMethodUnsupportedException { // if(precomputeObjectPropertyHierarchy) { logger.info("Preparing object property subsumption hierarchy ..."); long startTime = System.currentTimeMillis(); TreeMap<OWLObjectProperty, SortedSet<OWLObjectProperty>> subsumptionHierarchyUp = new TreeMap<>(); TreeMap<OWLObjectProperty, SortedSet<OWLObjectProperty>> subsumptionHierarchyDown = new TreeMap<>(); String query = "SELECT * WHERE {" + "?sub a <http://www.w3.org/2002/07/owl#ObjectProperty> . " + "FILTER NOT EXISTS{?sub a <http://www.w3.org/2002/07/owl#DatatypeProperty>}" // TODO remove workaround + "FILTER(?sub != <http://www.w3.org/2002/07/owl#bottomObjectProperty> && ?sub != <http://www.w3.org/2002/07/owl#topObjectProperty>)" + "OPTIONAL {" + "?sub <http://www.w3.org/2000/01/rdf-schema#subPropertyOf> ?sup ." + "?sup a <http://www.w3.org/2002/07/owl#ObjectProperty> . " + "FILTER(?sup != ?sub && ?sup != <http://www.w3.org/2002/07/owl#topObjectProperty>)" + "}" + "}"; ResultSet rs = executeSelectQuery(query); while (rs.hasNext()) { QuerySolution qs = rs.next();/*from ww w . j a va 2s .c o m*/ if (qs.get("sub").isURIResource()) { IRI iri = IRI.create(qs.get("sub").asResource().getURI()); if (!iri.isReservedVocabulary()) { OWLObjectProperty sub = df.getOWLObjectProperty(iri); // add sub properties entry if (!subsumptionHierarchyDown.containsKey(sub)) { subsumptionHierarchyDown.put(sub, new TreeSet<>()); } // add super properties entry if (!subsumptionHierarchyUp.containsKey(sub)) { subsumptionHierarchyUp.put(sub, new TreeSet<>()); } // if there is a super property if (qs.get("sup") != null && qs.get("sup").isURIResource()) { OWLObjectProperty sup = df .getOWLObjectProperty(IRI.create(qs.get("sup").asResource().getURI())); // add sub properties entry if (!subsumptionHierarchyDown.containsKey(sup)) { subsumptionHierarchyDown.put(sup, new TreeSet<>()); } // add super properties entry if (!subsumptionHierarchyUp.containsKey(sup)) { subsumptionHierarchyUp.put(sup, new TreeSet<>()); } // add super properties entry SortedSet<OWLObjectProperty> superClasses = subsumptionHierarchyUp.get(sub); if (superClasses == null) { superClasses = new TreeSet<>(); subsumptionHierarchyUp.put(sub, superClasses); } superClasses.add(sup); // add sub properties entry SortedSet<OWLObjectProperty> subProperties = subsumptionHierarchyDown.get(sup); if (subProperties == null) { subProperties = new TreeSet<>(); subsumptionHierarchyDown.put(sup, subProperties); } subProperties.add(sub); } } } } logger.info("... done in {}ms", (System.currentTimeMillis() - startTime)); roleHierarchy = new ObjectPropertyHierarchy(subsumptionHierarchyUp, subsumptionHierarchyDown); // } return roleHierarchy; }
From source file:com.google.gwt.emultest.java.util.TreeMapTest.java
public void testContainsKey_ComparableKey() { TreeMap<String, Object> map = new TreeMap<String, Object>(); ConflictingKey conflictingKey = new ConflictingKey("conflictingKey"); assertFalse(map.containsKey(conflictingKey)); map.put("something", "value"); assertFalse(map.containsKey(conflictingKey)); }
From source file:com.projity.server.data.Serializer.java
public ProjectData serializeProject(Project project, Collection flatAssignments, Collection flatLinks, boolean incremental, SerializeOptions options) throws Exception { if (TMP_FILES) initTmpDir();//from w w w. j a v a 2 s . co m if (project.isForceNonIncremental()) incremental = false; boolean incrementalDistributions = incremental && !project.isForceNonIncrementalDistributions(); // calendars.clear(); Count projectCount = new Count("Project"); //if (globalIdsOnly) makeGLobal(project); ProjectData projectData = (ProjectData) serialize(project, ProjectData.FACTORY, projectCount); if (project.isForceNonIncremental()) projectData.setVersion(0); projectData.setMaster(project.isMaster()); // projectData.setExternalId(project.getExternalId()); //exposed attributes // projectData.setAttributes(SpreadSheetFieldArray.convertFields(project, "projectExposed", new Transformer(){ // public Object transform(Object value) { // if (value instanceof Money) return ((Money)value).doubleValue(); // return null; // } // })); projectCount.dump(); //resources Map resourceMap = saveResources(project, projectData); //tasks saveTasks(project, projectData, resourceMap, flatAssignments, flatLinks, incremental, options); //distribution long t = System.currentTimeMillis(); Collection<DistributionData> dist = (Collection<DistributionData>) (new DistributionConverter()) .createDistributionData(project, incrementalDistributions); if (dist == null) { dist = new ArrayList<DistributionData>(); } projectData.setDistributions(dist); projectData.setIncrementalDistributions(incrementalDistributions); TreeMap<DistributionData, DistributionData> distMap = project.getDistributionMap(); if (distMap == null) { distMap = new TreeMap<DistributionData, DistributionData>(new DistributionComparator()); project.setDistributionMap(distMap); } TreeMap<DistributionData, DistributionData> newDistMap = new TreeMap<DistributionData, DistributionData>( new DistributionComparator()); //ArrayList<DistributionData> toInsertInOld=new ArrayList<DistributionData>(); //insert, update dist for (Iterator<DistributionData> i = dist.iterator(); i.hasNext();) { DistributionData d = i.next(); if (incrementalDistributions) { DistributionData oldD = distMap.get(d); if (oldD == null) { d.setStatus(DistributionData.INSERT); } else { if (oldD.getWork() == d.getWork() && oldD.getCost() == d.getCost()) { //System.out.println(d+" did not change"); d.setStatus(0); i.remove(); } else d.setStatus(DistributionData.UPDATE); } } else { d.setStatus(DistributionData.INSERT); } newDistMap.put(d, d); } //remove dist if (incrementalDistributions && distMap.size() > 0) { Set<Long> noChangeTaskIds = new HashSet<Long>(); Task task; for (Iterator i = project.getTaskOutlineIterator(); i.hasNext();) { task = (Task) i.next(); if (incremental && !task.isDirty()) noChangeTaskIds.add(task.getUniqueId()); } // for (Iterator i=projectData.getTasks().iterator();i.hasNext();){ // TaskData task=(TaskData)i.next(); // if (!task.isDirty()) noChangeTaskIds.add(task.getUniqueId()); // } for (Iterator<DistributionData> i = distMap.values().iterator(); i.hasNext();) { DistributionData d = i.next(); if (newDistMap.containsKey(d)) continue; if (noChangeTaskIds.contains(d.getTaskId())) { d.setStatus(0); newDistMap.put(d, d); } else { d.setStatus(DistributionData.REMOVE); dist.add(d); } } } project.setNewDistributionMap(newDistMap); System.out.println("Distributions generated in " + (System.currentTimeMillis() - t) + " ms"); // send project field values to server too HashMap fieldValues = FieldValues.getValues(FieldDictionary.getInstance().getProjectFields(), project); if (project.getContainingSubprojectTask() != null) { // special case in which we want to use the duration from subproject task Object durationFieldValue = Configuration.getFieldFromId("Field.duration") .getValue(project.getContainingSubprojectTask(), null); fieldValues.put("Field.duration", durationFieldValue); } projectData.setFieldValues(fieldValues); projectData.setGroup(project.getGroup()); projectData.setDivision(project.getDivision()); projectData.setExpenseType(project.getExpenseType()); projectData.setProjectType(project.getProjectType()); projectData.setProjectStatus(project.getProjectStatus()); projectData.setExtraFields(project.getExtraFields()); projectData.setAccessControlPolicy(project.getAccessControlPolicy()); projectData.setCreationDate(project.getCreationDate()); projectData.setLastModificationDate(project.getLastModificationDate()); // System.out.println("done serialize project " + project); // Collection<DistributionData> dis=(Collection<DistributionData>)projectData.getDistributions(); // for (DistributionData d: dis) System.out.println("Dist: "+d.getTimeId()+", "+d.getType()+", "+d.getStatus()); // project.setNewTaskIds(null); // if (projectData.getTasks()!=null){ // Set<Long> ids=new HashSet<Long>(); // project.setNewTaskIds(ids); // for (TaskData task:(Collection<TaskData>)projectData.getTasks()){ // ids.add(task.getUniqueId()); // } // } // long[] unchangedTasks=projectData.getUnchangedTasks(); // if (unchangedTasks!=null){ // Set<Long> ids=project.getNewTaskIds(); // if (ids==null){ // ids=new HashSet<Long>(); // project.setNewTaskIds(ids); // } // for (int i=0;i<unchangedTasks.length;i++) ids.add(unchangedTasks[i]); // } // // project.setNewLinkIds(null); // if (flatLinks!=null){ // Set<DependencyKey> ids=new HashSet<DependencyKey>(); // project.setNewLinkIds(ids); // for (LinkData link:(Collection<LinkData>)flatLinks){ // ids.add(new DependencyKey(link.getPredecessorId(),link.getSuccessorId()/*,link.getExternalId()*/)); // } // } // long[] unchangedLinks=projectData.getUnchangedLinks(); // if (unchangedLinks!=null){ // Set<DependencyKey> ids=project.getNewLinkIds(); // if (ids==null){ // ids=new HashSet<DependencyKey>(); // project.setNewLinkIds(ids); // } // for (int i=0;i<unchangedLinks.length;i+=2) ids.add(new DependencyKey(unchangedLinks[i],unchangedLinks[i+1])); // } //project.setNewIds(); //claur - useful ? return projectData; }
From source file:org.apache.accumulo.shell.commands.ConfigCommand.java
@Override public int execute(final String fullCommand, final CommandLine cl, final Shell shellState) throws AccumuloException, AccumuloSecurityException, TableNotFoundException, IOException, ClassNotFoundException, NamespaceNotFoundException { reader = shellState.getReader();//from ww w . j a va 2 s.c o m final String tableName = cl.getOptionValue(tableOpt.getOpt()); if (tableName != null && !shellState.getConnector().tableOperations().exists(tableName)) { throw new TableNotFoundException(null, tableName, null); } final String namespace = cl.getOptionValue(namespaceOpt.getOpt()); if (namespace != null && !shellState.getConnector().namespaceOperations().exists(namespace)) { throw new NamespaceNotFoundException(null, namespace, null); } if (cl.hasOption(deleteOpt.getOpt())) { // delete property from table String property = cl.getOptionValue(deleteOpt.getOpt()); if (property.contains("=")) { throw new BadArgumentException("Invalid '=' operator in delete operation.", fullCommand, fullCommand.indexOf('=')); } if (tableName != null) { if (!Property.isValidTablePropertyKey(property)) { Shell.log.warn("Invalid per-table property : " + property + ", still removing from zookeeper if it's there."); } shellState.getConnector().tableOperations().removeProperty(tableName, property); Shell.log.debug("Successfully deleted table configuration option."); } else if (namespace != null) { if (!Property.isValidTablePropertyKey(property)) { Shell.log.warn("Invalid per-table property : " + property + ", still removing from zookeeper if it's there."); } shellState.getConnector().namespaceOperations().removeProperty(namespace, property); Shell.log.debug("Successfully deleted namespace configuration option."); } else { if (!Property.isValidZooPropertyKey(property)) { Shell.log.warn("Invalid per-table property : " + property + ", still removing from zookeeper if it's there."); } shellState.getConnector().instanceOperations().removeProperty(property); Shell.log.debug("Successfully deleted system configuration option"); } } else if (cl.hasOption(setOpt.getOpt())) { // set property on table String property = cl.getOptionValue(setOpt.getOpt()), value = null; if (!property.contains("=")) { throw new BadArgumentException("Missing '=' operator in set operation.", fullCommand, fullCommand.indexOf(property)); } final String pair[] = property.split("=", 2); property = pair[0]; value = pair[1]; if (tableName != null) { if (!Property.isValidTablePropertyKey(property)) { throw new BadArgumentException("Invalid per-table property.", fullCommand, fullCommand.indexOf(property)); } if (property.equals(Property.TABLE_DEFAULT_SCANTIME_VISIBILITY.getKey())) { new ColumnVisibility(value); // validate that it is a valid expression } shellState.getConnector().tableOperations().setProperty(tableName, property, value); Shell.log.debug("Successfully set table configuration option."); } else if (namespace != null) { if (!Property.isValidTablePropertyKey(property)) { throw new BadArgumentException("Invalid per-table property.", fullCommand, fullCommand.indexOf(property)); } if (property.equals(Property.TABLE_DEFAULT_SCANTIME_VISIBILITY.getKey())) { new ColumnVisibility(value); // validate that it is a valid expression } shellState.getConnector().namespaceOperations().setProperty(namespace, property, value); Shell.log.debug("Successfully set table configuration option."); } else { if (!Property.isValidZooPropertyKey(property)) { throw new BadArgumentException("Property cannot be modified in zookeeper", fullCommand, fullCommand.indexOf(property)); } shellState.getConnector().instanceOperations().setProperty(property, value); Shell.log.debug("Successfully set system configuration option"); } } else { // display properties final TreeMap<String, String> systemConfig = new TreeMap<String, String>(); systemConfig.putAll(shellState.getConnector().instanceOperations().getSystemConfiguration()); final String outputFile = cl.getOptionValue(outputFileOpt.getOpt()); final PrintFile printFile = outputFile == null ? null : new PrintFile(outputFile); final TreeMap<String, String> siteConfig = new TreeMap<String, String>(); siteConfig.putAll(shellState.getConnector().instanceOperations().getSiteConfiguration()); final TreeMap<String, String> defaults = new TreeMap<String, String>(); for (Entry<String, String> defaultEntry : AccumuloConfiguration.getDefaultConfiguration()) { defaults.put(defaultEntry.getKey(), defaultEntry.getValue()); } final TreeMap<String, String> namespaceConfig = new TreeMap<String, String>(); if (tableName != null) { String n = Namespaces.getNamespaceName(shellState.getInstance(), Tables.getNamespaceId( shellState.getInstance(), Tables.getTableId(shellState.getInstance(), tableName))); for (Entry<String, String> e : shellState.getConnector().namespaceOperations().getProperties(n)) { namespaceConfig.put(e.getKey(), e.getValue()); } } Iterable<Entry<String, String>> acuconf = shellState.getConnector().instanceOperations() .getSystemConfiguration().entrySet(); if (tableName != null) { acuconf = shellState.getConnector().tableOperations().getProperties(tableName); } else if (namespace != null) { acuconf = shellState.getConnector().namespaceOperations().getProperties(namespace); } final TreeMap<String, String> sortedConf = new TreeMap<String, String>(); for (Entry<String, String> propEntry : acuconf) { sortedConf.put(propEntry.getKey(), propEntry.getValue()); } for (Entry<String, String> propEntry : acuconf) { final String key = propEntry.getKey(); // only show properties with similar names to that // specified, or all of them if none specified if (cl.hasOption(filterOpt.getOpt()) && !key.contains(cl.getOptionValue(filterOpt.getOpt()))) { continue; } if ((tableName != null || namespace != null) && !Property.isValidTablePropertyKey(key)) { continue; } COL2 = Math.max(COL2, propEntry.getKey().length() + 3); } final ArrayList<String> output = new ArrayList<String>(); printConfHeader(output); for (Entry<String, String> propEntry : sortedConf.entrySet()) { final String key = propEntry.getKey(); // only show properties with similar names to that // specified, or all of them if none specified if (cl.hasOption(filterOpt.getOpt()) && !key.contains(cl.getOptionValue(filterOpt.getOpt()))) { continue; } if ((tableName != null || namespace != null) && !Property.isValidTablePropertyKey(key)) { continue; } String siteVal = siteConfig.get(key); String sysVal = systemConfig.get(key); String curVal = propEntry.getValue(); String dfault = defaults.get(key); String nspVal = namespaceConfig.get(key); boolean printed = false; if (dfault != null && key.toLowerCase().contains("password")) { siteVal = sysVal = dfault = curVal = curVal.replaceAll(".", "*"); } if (sysVal != null) { if (defaults.containsKey(key) && !Property.getPropertyByKey(key).isExperimental()) { printConfLine(output, "default", key, dfault); printed = true; } if (!defaults.containsKey(key) || !defaults.get(key).equals(siteVal)) { printConfLine(output, "site", printed ? " @override" : key, siteVal == null ? "" : siteVal); printed = true; } if (!siteConfig.containsKey(key) || !siteVal.equals(sysVal)) { printConfLine(output, "system", printed ? " @override" : key, sysVal); printed = true; } } if (nspVal != null) { if (!systemConfig.containsKey(key) || !sysVal.equals(nspVal)) { printConfLine(output, "namespace", printed ? " @override" : key, nspVal); printed = true; } } // show per-table value only if it is different (overridden) if (tableName != null && !curVal.equals(nspVal)) { printConfLine(output, "table", printed ? " @override" : key, curVal); } else if (namespace != null && !curVal.equals(sysVal)) { printConfLine(output, "namespace", printed ? " @override" : key, curVal); } } printConfFooter(output); shellState.printLines(output.iterator(), !cl.hasOption(disablePaginationOpt.getOpt()), printFile); if (printFile != null) { printFile.close(); } } return 0; }
From source file:org.apache.accumulo.core.util.shell.commands.ConfigCommand.java
@Override public int execute(final String fullCommand, final CommandLine cl, final Shell shellState) throws AccumuloException, AccumuloSecurityException, TableNotFoundException, IOException, ClassNotFoundException, NamespaceNotFoundException { reader = shellState.getReader();//from ww w. jav a2s . c om final String tableName = cl.getOptionValue(tableOpt.getOpt()); if (tableName != null && !shellState.getConnector().tableOperations().exists(tableName)) { throw new TableNotFoundException(null, tableName, null); } final String namespace = cl.getOptionValue(namespaceOpt.getOpt()); if (namespace != null && !shellState.getConnector().namespaceOperations().exists(namespace)) { throw new NamespaceNotFoundException(null, namespace, null); } if (cl.hasOption(deleteOpt.getOpt())) { // delete property from table String property = cl.getOptionValue(deleteOpt.getOpt()); if (property.contains("=")) { throw new BadArgumentException("Invalid '=' operator in delete operation.", fullCommand, fullCommand.indexOf('=')); } if (tableName != null) { if (!Property.isValidTablePropertyKey(property)) { Shell.log.warn("Invalid per-table property : " + property + ", still removing from zookeeper if it's there."); } shellState.getConnector().tableOperations().removeProperty(tableName, property); Shell.log.debug("Successfully deleted table configuration option."); } else if (namespace != null) { if (!Property.isValidTablePropertyKey(property)) { Shell.log.warn("Invalid per-table property : " + property + ", still removing from zookeeper if it's there."); } shellState.getConnector().namespaceOperations().removeProperty(namespace, property); Shell.log.debug("Successfully deleted namespace configuration option."); } else { if (!Property.isValidZooPropertyKey(property)) { Shell.log.warn("Invalid per-table property : " + property + ", still removing from zookeeper if it's there."); } shellState.getConnector().instanceOperations().removeProperty(property); Shell.log.debug("Successfully deleted system configuration option"); } } else if (cl.hasOption(setOpt.getOpt())) { // set property on table String property = cl.getOptionValue(setOpt.getOpt()), value = null; if (!property.contains("=")) { throw new BadArgumentException("Missing '=' operator in set operation.", fullCommand, fullCommand.indexOf(property)); } final String pair[] = property.split("=", 2); property = pair[0]; value = pair[1]; if (tableName != null) { if (!Property.isValidTablePropertyKey(property)) { throw new BadArgumentException("Invalid per-table property.", fullCommand, fullCommand.indexOf(property)); } if (property.equals(Property.TABLE_DEFAULT_SCANTIME_VISIBILITY.getKey())) { new ColumnVisibility(value); // validate that it is a valid expression } shellState.getConnector().tableOperations().setProperty(tableName, property, value); Shell.log.debug("Successfully set table configuration option."); } else if (namespace != null) { if (!Property.isValidTablePropertyKey(property)) { throw new BadArgumentException("Invalid per-table property.", fullCommand, fullCommand.indexOf(property)); } if (property.equals(Property.TABLE_DEFAULT_SCANTIME_VISIBILITY.getKey())) { new ColumnVisibility(value); // validate that it is a valid expression } shellState.getConnector().namespaceOperations().setProperty(namespace, property, value); Shell.log.debug("Successfully set table configuration option."); } else { if (!Property.isValidZooPropertyKey(property)) { throw new BadArgumentException("Property cannot be modified in zookeeper", fullCommand, fullCommand.indexOf(property)); } shellState.getConnector().instanceOperations().setProperty(property, value); Shell.log.debug("Successfully set system configuration option"); } } else { // display properties final TreeMap<String, String> systemConfig = new TreeMap<String, String>(); systemConfig.putAll(shellState.getConnector().instanceOperations().getSystemConfiguration()); final String outputFile = cl.getOptionValue(outputFileOpt.getOpt()); final PrintFile printFile = outputFile == null ? null : new PrintFile(outputFile); final TreeMap<String, String> siteConfig = new TreeMap<String, String>(); siteConfig.putAll(shellState.getConnector().instanceOperations().getSiteConfiguration()); final TreeMap<String, String> defaults = new TreeMap<String, String>(); for (Entry<String, String> defaultEntry : AccumuloConfiguration.getDefaultConfiguration()) { defaults.put(defaultEntry.getKey(), defaultEntry.getValue()); } final TreeMap<String, String> namespaceConfig = new TreeMap<String, String>(); if (tableName != null) { String n = Namespaces.getNamespaceName(shellState.getInstance(), Tables.getNamespaceId( shellState.getInstance(), Tables.getTableId(shellState.getInstance(), tableName))); for (Entry<String, String> e : shellState.getConnector().namespaceOperations().getProperties(n)) { namespaceConfig.put(e.getKey(), e.getValue()); } } Iterable<Entry<String, String>> acuconf = shellState.getConnector().instanceOperations() .getSystemConfiguration().entrySet(); if (tableName != null) { acuconf = shellState.getConnector().tableOperations().getProperties(tableName); } else if (namespace != null) { acuconf = shellState.getConnector().namespaceOperations().getProperties(namespace); } final TreeMap<String, String> sortedConf = new TreeMap<String, String>(); for (Entry<String, String> propEntry : acuconf) { sortedConf.put(propEntry.getKey(), propEntry.getValue()); } for (Entry<String, String> propEntry : acuconf) { final String key = propEntry.getKey(); // only show properties with similar names to that // specified, or all of them if none specified if (cl.hasOption(filterOpt.getOpt()) && !key.contains(cl.getOptionValue(filterOpt.getOpt()))) { continue; } if ((tableName != null || namespace != null) && !Property.isValidTablePropertyKey(key)) { continue; } COL2 = Math.max(COL2, propEntry.getKey().length() + 3); } final ArrayList<String> output = new ArrayList<String>(); printConfHeader(output); for (Entry<String, String> propEntry : sortedConf.entrySet()) { final String key = propEntry.getKey(); // only show properties with similar names to that // specified, or all of them if none specified if (cl.hasOption(filterOpt.getOpt()) && !key.contains(cl.getOptionValue(filterOpt.getOpt()))) { continue; } if ((tableName != null || namespace != null) && !Property.isValidTablePropertyKey(key)) { continue; } String siteVal = siteConfig.get(key); String sysVal = systemConfig.get(key); String curVal = propEntry.getValue(); String dfault = defaults.get(key); String nspVal = namespaceConfig.get(key); boolean printed = false; if (dfault != null && key.toLowerCase().contains("password")) { siteVal = sysVal = dfault = curVal = curVal.replaceAll(".", "*"); } if (sysVal != null) { if (defaults.containsKey(key)) { printConfLine(output, "default", key, dfault); printed = true; } if (!defaults.containsKey(key) || !defaults.get(key).equals(siteVal)) { printConfLine(output, "site", printed ? " @override" : key, siteVal == null ? "" : siteVal); printed = true; } if (!siteConfig.containsKey(key) || !siteVal.equals(sysVal)) { printConfLine(output, "system", printed ? " @override" : key, sysVal == null ? "" : sysVal); printed = true; } } if (nspVal != null) { if (!systemConfig.containsKey(key) || !sysVal.equals(nspVal)) { printConfLine(output, "namespace", printed ? " @override" : key, nspVal == null ? "" : nspVal); printed = true; } } // show per-table value only if it is different (overridden) if (tableName != null && !curVal.equals(nspVal)) { printConfLine(output, "table", printed ? " @override" : key, curVal); } else if (namespace != null && !curVal.equals(sysVal)) { printConfLine(output, "namespace", printed ? " @override" : key, curVal); } } printConfFooter(output); shellState.printLines(output.iterator(), !cl.hasOption(disablePaginationOpt.getOpt()), printFile); if (printFile != null) { printFile.close(); } } return 0; }
From source file:org.exoplatform.addon.pulse.service.ws.RestActivitiesStatistic.java
private ChartData buildStatisticByFilter(String maxColumn, String filter, Date fromDate) throws Exception { int totalDataCoulumn = 5; try {//from www.j a va 2 s.c o m totalDataCoulumn = Integer.parseInt(maxColumn); } catch (Exception e) { //do nothing } if (filter.equalsIgnoreCase(FILTER_BY_DAY)) { Calendar calendar = Calendar.getInstance(); calendar.setTime(fromDate); calendar.add(Calendar.DATE, totalDataCoulumn - 1); Date toDate = calendar.getTime(); List<ActivityStatisticBean> list = service.getListActivityStatisticByDate(fromDate, toDate); TreeMap<Date, ActivityStatisticBean> dateData = new TreeMap<Date, ActivityStatisticBean>(); //init empty-data for (int i = 0; i < totalDataCoulumn; i++) { calendar.clear(); calendar.setTime(fromDate); calendar.add(Calendar.DATE, i); Date nextDate = parseDate(partString(calendar.getTime(), "dd/MM/yyyy"), "dd/MM/yyyy"); dateData.put(nextDate, null); } List<String> listTitle = new ArrayList<String>(); List<Long> newUsersData = new ArrayList<Long>(); List<Long> loginCountData = new ArrayList<Long>(); List<Long> forumActiveUsersData = new ArrayList<Long>(); List<Long> newForumPostsData = new ArrayList<Long>(); List<Long> userConnectionData = new ArrayList<Long>(); List<Long> socialPostData = new ArrayList<Long>(); List<Long> emailNotificationData = new ArrayList<Long>(); ChartData chartData = new ChartData(); for (ActivityStatisticBean bean : list) { dateData.put(parseDate(partString(bean.getCreatedDate(), "dd/MM/yyyy"), "dd/MM/yyyy"), bean); } for (Date key : dateData.keySet()) { ActivityStatisticBean bean = dateData.get(key); if (bean != null) { listTitle.add(partString(bean.getCreatedDate(), "dd-MM-yyyy")); newUsersData.add(bean.getNewUserToday()); loginCountData.add(bean.getLoginCountToday()); forumActiveUsersData.add(bean.getForumActiveUserToday()); newForumPostsData.add(bean.getForumPostToday()); userConnectionData.add(bean.getUserConnectionCountToday()); socialPostData.add(bean.getSocialPostCountToday()); emailNotificationData.add(bean.getEmailNotificationCountToday()); } else { listTitle.add(partString(key, "dd-MM-yyyy")); newUsersData.add(0L); loginCountData.add(0L); forumActiveUsersData.add(0L); newForumPostsData.add(0L); userConnectionData.add(0L); socialPostData.add(0L); emailNotificationData.add(0L); } } chartData.setListTitle(listTitle); chartData.setNewUsersData(newUsersData); chartData.setLoginCountData(loginCountData); chartData.setForumActiveUsersData(forumActiveUsersData); chartData.setNewForumPostsData(newForumPostsData); chartData.setUserConnectionData(userConnectionData); chartData.setSocialPostData(socialPostData); chartData.setEmailNotificationData(emailNotificationData); return chartData; } if (filter.equalsIgnoreCase(FILTER_BY_WEEK)) { Calendar calendar = Calendar.getInstance(); calendar.clear(); calendar.setTime(fromDate); calendar.add(Calendar.WEEK_OF_YEAR, totalDataCoulumn - 1); Date nextFewWeek = calendar.getTime(); List<ActivityStatisticBean> list = service.getListActivityStatisticByDate(fromDate, nextFewWeek); List<String> listTitle = new ArrayList<String>(); List<Long> newUsersData = new ArrayList<Long>(); List<Long> loginCountData = new ArrayList<Long>(); List<Long> forumActiveUsersData = new ArrayList<Long>(); List<Long> newForumPostsData = new ArrayList<Long>(); List<Long> userConnectionData = new ArrayList<Long>(); List<Long> socialPostData = new ArrayList<Long>(); List<Long> emailNotificationData = new ArrayList<Long>(); ChartData chartData = new ChartData(); TreeMap<String, List<ActivityStatisticBean>> weekData = new TreeMap<String, List<ActivityStatisticBean>>(); //init empty-data for (int i = 0; i < totalDataCoulumn; i++) { calendar.clear(); calendar.setTime(fromDate); calendar.add(Calendar.WEEK_OF_YEAR, i); int weekIndex = calendar.get(Calendar.WEEK_OF_YEAR); int monthIndex = calendar.get(Calendar.MONTH); if (monthIndex == Calendar.DECEMBER && weekIndex == 1) weekIndex = 53; int year = calendar.get(Calendar.YEAR); //goto begin of week calendar.clear(); calendar.set(Calendar.WEEK_OF_YEAR, weekIndex); calendar.set(Calendar.YEAR, year); //goto end of week calendar.add(Calendar.DATE, 6); String week = ""; if (calendar.get(Calendar.MONTH) == Calendar.DECEMBER && calendar.get(Calendar.WEEK_OF_YEAR) == 1) { week = 53 + "-" + calendar.get(Calendar.YEAR); } else { week = calendar.get(Calendar.WEEK_OF_YEAR) + "-" + calendar.get(Calendar.YEAR); } week = week.length() < 7 ? calendar.get(Calendar.YEAR) + "-" + "0" + week : calendar.get(Calendar.YEAR) + "-" + week; weekData.put(week, new ArrayList<ActivityStatisticBean>()); } for (ActivityStatisticBean bean : list) { calendar.clear(); calendar.setTime(bean.getCreatedDate()); int weekIndex = calendar.get(Calendar.WEEK_OF_YEAR); int monthIndex = calendar.get(Calendar.MONTH); if (monthIndex == Calendar.DECEMBER && weekIndex == 1) weekIndex = 53; int year = calendar.get(Calendar.YEAR); //goto begin of week calendar.clear(); calendar.set(Calendar.WEEK_OF_YEAR, weekIndex); calendar.set(Calendar.YEAR, year); //goto end of week calendar.add(Calendar.DATE, 6); String week = ""; if (calendar.get(Calendar.MONTH) == Calendar.DECEMBER && calendar.get(Calendar.WEEK_OF_YEAR) == 1) { week = 53 + "-" + calendar.get(Calendar.YEAR); } else { week = calendar.get(Calendar.WEEK_OF_YEAR) + "-" + calendar.get(Calendar.YEAR); } week = week.length() < 7 ? calendar.get(Calendar.YEAR) + "-" + "0" + week : calendar.get(Calendar.YEAR) + "-" + week; if (weekData.containsKey(week)) { List<ActivityStatisticBean> listValueOfNode = weekData.get(week); listValueOfNode.add(bean); } else { List<ActivityStatisticBean> listValueOfNode = new ArrayList<ActivityStatisticBean>(); listValueOfNode.add(bean); weekData.put(week, listValueOfNode); } } for (String key : weekData.keySet()) { List<ActivityStatisticBean> listValueOfNode = weekData.get(key); Long weekNewUsersValue = 0L; Long weekLoginCountValue = 0L; Long weekForumActiveUsersValue = 0L; Long weekNewForumPostsValue = 0L; Long weekUserConnectionValue = 0L; Long weekSocialPostsValue = 0L; Long weekEmailNotificationValue = 0L; for (ActivityStatisticBean obj : listValueOfNode) { weekNewUsersValue = weekNewUsersValue + obj.getNewUserToday(); weekLoginCountValue = weekLoginCountValue + obj.getLoginCountToday(); weekForumActiveUsersValue = weekForumActiveUsersValue + obj.getForumActiveUserToday(); weekNewForumPostsValue = weekNewForumPostsValue + obj.getForumPostToday(); weekUserConnectionValue = weekUserConnectionValue + obj.getUserConnectionCountToday(); weekSocialPostsValue = weekSocialPostsValue + obj.getSocialPostCountToday(); weekEmailNotificationValue = weekEmailNotificationValue + obj.getEmailNotificationCountToday(); } String weekTitle = "W" + key.substring(5, key.length()); listTitle.add(weekTitle); newUsersData.add(weekNewUsersValue); loginCountData.add(weekLoginCountValue); forumActiveUsersData.add( weekForumActiveUsersValue > 0 ? (Long) (weekForumActiveUsersValue / listValueOfNode.size()) : 0L); newForumPostsData.add(weekNewForumPostsValue); userConnectionData.add(weekUserConnectionValue); socialPostData.add(weekSocialPostsValue); emailNotificationData.add(weekEmailNotificationValue); } chartData.setListTitle(listTitle); chartData.setNewUsersData(newUsersData); chartData.setLoginCountData(loginCountData); chartData.setForumActiveUsersData(forumActiveUsersData); chartData.setNewForumPostsData(newForumPostsData); chartData.setUserConnectionData(userConnectionData); chartData.setSocialPostData(socialPostData); chartData.setEmailNotificationData(emailNotificationData); return chartData; } if (filter.equalsIgnoreCase(FILTER_BY_MONTH)) { Calendar calendar = Calendar.getInstance(); calendar.clear(); calendar.setTime(fromDate); calendar.add(Calendar.MONTH, totalDataCoulumn - 1); Date nextFewMonth = calendar.getTime(); List<ActivityStatisticBean> list = service.getListActivityStatisticByDate(fromDate, nextFewMonth); List<String> listTitle = new ArrayList<String>(); List<Long> newUsersData = new ArrayList<Long>(); List<Long> loginCountData = new ArrayList<Long>(); List<Long> forumActiveUsersData = new ArrayList<Long>(); List<Long> newForumPostsData = new ArrayList<Long>(); List<Long> userConnectionData = new ArrayList<Long>(); List<Long> socialPostData = new ArrayList<Long>(); List<Long> emailNotificationData = new ArrayList<Long>(); ChartData chartData = new ChartData(); TreeMap<String, List<ActivityStatisticBean>> monthData = new TreeMap<String, List<ActivityStatisticBean>>(); //init empty-data for (int i = 0; i < totalDataCoulumn; i++) { calendar.clear(); calendar.setTime(fromDate); calendar.add(Calendar.MONTH, i); String month = calendar.get(Calendar.YEAR) + "-" + partString(calendar.getTime(), "MM") + "-" + partString(calendar.getTime(), "MMM") + "-" + calendar.get(Calendar.YEAR); //get name of Month monthData.put(month, new ArrayList<ActivityStatisticBean>()); } for (ActivityStatisticBean bean : list) { calendar.clear(); calendar.setTime(bean.getCreatedDate()); String month = calendar.get(Calendar.YEAR) + "-" + partString(calendar.getTime(), "MM") + "-" + partString(calendar.getTime(), "MMM") + "-" + calendar.get(Calendar.YEAR); //get name of Month if (monthData.containsKey(month)) { List<ActivityStatisticBean> listValueOfNode = monthData.get(month); listValueOfNode.add(bean); } else { List<ActivityStatisticBean> listValueOfNode = new ArrayList<ActivityStatisticBean>(); listValueOfNode.add(bean); monthData.put(month, listValueOfNode); } } for (String key : monthData.keySet()) { List<ActivityStatisticBean> listValueOfNode = monthData.get(key); Long monthNewUsersValue = 0L; Long monthLoginCountValue = 0L; Long monthForumActiveUsersValue = 0L; Long monthNewForumPostsValue = 0L; Long monthUserConnectionValue = 0L; Long monthSocialPostsValue = 0L; Long monthEmailNotificationValue = 0L; for (ActivityStatisticBean obj : listValueOfNode) { monthNewUsersValue = monthNewUsersValue + obj.getNewUserToday(); monthLoginCountValue = monthLoginCountValue + obj.getLoginCountToday(); monthForumActiveUsersValue = monthForumActiveUsersValue + obj.getForumActiveUserToday(); monthNewForumPostsValue = monthNewForumPostsValue + obj.getForumPostToday(); monthUserConnectionValue = monthUserConnectionValue + obj.getUserConnectionCountToday(); monthSocialPostsValue = monthSocialPostsValue + obj.getSocialPostCountToday(); monthEmailNotificationValue = +monthEmailNotificationValue + obj.getEmailNotificationCountToday(); } listTitle.add(key.substring(8, key.length())); newUsersData.add(monthNewUsersValue); loginCountData.add(monthLoginCountValue); forumActiveUsersData.add(monthForumActiveUsersValue > 0 ? (Long) (monthForumActiveUsersValue / listValueOfNode.size()) : 0L); newForumPostsData.add(monthNewForumPostsValue); userConnectionData.add(monthUserConnectionValue); socialPostData.add(monthSocialPostsValue); emailNotificationData.add(monthEmailNotificationValue); } chartData.setListTitle(listTitle); chartData.setNewUsersData(newUsersData); chartData.setLoginCountData(loginCountData); chartData.setForumActiveUsersData(forumActiveUsersData); chartData.setNewForumPostsData(newForumPostsData); chartData.setUserConnectionData(userConnectionData); chartData.setSocialPostData(socialPostData); chartData.setEmailNotificationData(emailNotificationData); return chartData; } return null; }
From source file:org.biomart.configurator.controller.MartController.java
/** * @param fksToBeDropped/*w w w.ja va 2 s. c o m*/ * @param dmd * @param schema * @param catalog * @param stepSize * @throws SQLException * @throws DataModelException */ public void synchroniseKeysUsingDMD(final SourceSchema ss, final Collection<ForeignKey> fksToBeDropped, final DatabaseMetaData dmd, final String schema, final String catalog) throws SQLException, DataModelException { Log.debug("Running DMD key synchronisation"); // Loop through all the tables in the database, which is the same // as looping through all the primary keys. Log.debug("Finding tables"); for (final Iterator<Table> i = ss.getTables().iterator(); i.hasNext();) { // Obtain the table and its primary key. final SourceTable pkTable = (SourceTable) i.next(); final PrimaryKey pk = pkTable.getPrimaryKey(); // Skip all tables which have no primary key. if (pk == null) continue; Log.debug("Processing primary key " + pk); // Make a list of relations that already exist in this schema, // from some previous run. Any relations that are left in this // list by the end of the loop for this table no longer exist in // the database, and will be dropped. final Collection<Relation> relationsToBeDropped = new TreeSet<Relation>(pk.getRelations()); // Tree for // order // Identify all foreign keys in the database metadata that refer // to the current primary key. Log.debug("Finding referring foreign keys"); String searchCatalog = catalog; String searchSchema = schema; final ResultSet dbTblFKCols = dmd.getExportedKeys(searchCatalog, searchSchema, pkTable.getName()); // Loop through the results. There will be one result row per // column per key, so we need to build up a set of key columns // in a map. // The map keys represent the column position within a key. Each // map value is a list of columns. In essence the map is a 2-D // representation of the foreign keys which refer to this PK, // with the keys of the map (Y-axis) representing the column // position in the FK, and the values of the map (X-axis) // representing each individual FK. In all cases, FK columns are // assumed to be in the same order as the PK columns. The map is // sorted by key column position. // An assumption is made that the query will return columns from // the FK in the same order as all other FKs, ie. all column 1s // will be returned before any 2s, and then all 2s will be // returned // in the same order as the 1s they are associated with, etc. final TreeMap<Short, List<Column>> dbFKs = new TreeMap<Short, List<Column>>(); while (dbTblFKCols.next()) { final String fkTblName = dbTblFKCols.getString("FKTABLE_NAME"); final String fkColName = dbTblFKCols.getString("FKCOLUMN_NAME"); final Short fkColSeq = new Short(dbTblFKCols.getShort("KEY_SEQ")); if (fkTblName != null && fkTblName.contains("$")) { // exclude ORACLE's temporary tables (unlikely to be // found here though) continue; } // Note the column. if (!dbFKs.containsKey(fkColSeq)) dbFKs.put(fkColSeq, new ArrayList<Column>()); // In some dbs, FKs can be invalid, so we need to check // them. final Table fkTbl = ss.getTableByName(fkTblName); if (fkTbl != null) { final Column fkCol = (Column) fkTbl.getColumnByName(fkColName); if (fkCol != null) (dbFKs.get(fkColSeq)).add(fkCol); } } dbTblFKCols.close(); // Sort foreign keys by name (case insensitive) for (List<Column> columnList : dbFKs.values()) { Collections.sort(columnList); } // Only construct FKs if we actually found any. if (!dbFKs.isEmpty()) { // Identify the sequence of the first column, which may be 0 // or 1, depending on database implementation. final int firstColSeq = ((Short) dbFKs.firstKey()).intValue(); // How many columns are in the PK? final int pkColCount = pkTable.getPrimaryKey().getColumns().size(); // How many FKs do we have? final int fkCount = dbFKs.get(dbFKs.firstKey()).size(); // Loop through the FKs, and construct each one at a time. for (int j = 0; j < fkCount; j++) { // Set up an array to hold the FK columns. final List<Column> candidateFKColumns = new ArrayList<Column>(); // For each FK column name, look up the actual column in // the table. for (final Iterator<Map.Entry<Short, List<Column>>> k = dbFKs.entrySet().iterator(); k .hasNext();) { final Map.Entry<Short, List<Column>> entry = k.next(); final Short keySeq = (Short) entry.getKey(); // Convert the db-specific column index to a // 0-indexed figure for the array of fk columns. final int fkColSeq = keySeq.intValue() - firstColSeq; candidateFKColumns.add((Column) (entry.getValue()).get(j)); } // Create a template foreign key based around the set // of candidate columns we found. ForeignKey fkObject; try { List<Column> columns = new ArrayList<Column>(); for (int k = 0; k < candidateFKColumns.size(); k++) { columns.add(candidateFKColumns.get(k)); } fkObject = new ForeignKey(columns); // new KeyController(fkObject); } catch (final Throwable t) { throw new BioMartError(t); } final Table fkTable = fkObject.getTable(); // If any FK already exists on the target table with the // same columns in the same order, then reuse it. boolean fkAlreadyExists = false; for (final Iterator<ForeignKey> f = fkTable.getForeignKeys().iterator(); f.hasNext() && !fkAlreadyExists;) { final ForeignKey candidateFK = f.next(); if (candidateFK.equals(fkObject)) { // Found one. Reuse it! fkObject = candidateFK; // Update the status to indicate that the FK is // backed by the database, if previously it was // handmade. if (fkObject.getStatus().equals(ComponentStatus.HANDMADE)) fkObject.setStatus(ComponentStatus.INFERRED); // Remove the FK from the list to be dropped // later, as it definitely exists now. fksToBeDropped.remove(candidateFK); // Flag the key as existing. fkAlreadyExists = true; } } // Has the key been reused, or is it a new one? if (!fkAlreadyExists) try { fkTable.getForeignKeys().add(fkObject); // fkTable.getForeignKeys().add(fk); } catch (final Throwable t) { throw new BioMartError(t); } // Work out whether the relation from the FK to // the PK should be 1:M or 1:1. The rule is that // it will be 1:M in all cases except where the // FK table has a PK with identical columns to // the FK, in which case it is 1:1, as the FK // is unique. Cardinality card = Cardinality.MANY_A; final PrimaryKey fkPK = fkTable.getPrimaryKey(); if (fkPK != null && fkObject.getColumns().equals(fkPK.getColumns())) card = Cardinality.ONE; // Check to see if it already has a relation. boolean relationExists = false; for (final Iterator<Relation> f = fkObject.getRelations().iterator(); f.hasNext();) { // Obtain the next relation. final Relation candidateRel = f.next(); // a) a relation already exists between the FK // and the PK. if (candidateRel.getOtherKey(fkObject).equals(pk)) { // If cardinality matches, make it // inferred. If doesn't match, make it // modified and update original cardinality. try { if (card.equals(candidateRel.getCardinality())) { if (!candidateRel.getStatus().equals(ComponentStatus.INFERRED_INCORRECT)) candidateRel.setStatus(ComponentStatus.INFERRED); } else { if (!candidateRel.getStatus().equals(ComponentStatus.INFERRED_INCORRECT)) candidateRel.setStatus(ComponentStatus.MODIFIED); candidateRel.setOriginalCardinality(card); } } catch (final AssociationException ae) { throw new BioMartError(ae); } // Don't drop it at the end of the loop. relationsToBeDropped.remove(candidateRel); // Say we've found it. relationExists = true; } // b) a handmade relation exists elsewhere which // should not be dropped. All other relations // elsewhere will be dropped. else if (candidateRel.getStatus().equals(ComponentStatus.HANDMADE)) // Don't drop it at the end of the loop. relationsToBeDropped.remove(candidateRel); } // If relation did not already exist, create it. if (!relationExists && !pk.equals(fkObject)) { // Establish the relation. try { new RelationSource(pk, fkObject, card); // pk.getObject().addRelation(relation); // fk.getObject().addRelation(relation); } catch (final Throwable t) { throw new BioMartError(t); } } } } // Remove any relations that we didn't find in the database (but // leave the handmade ones behind). for (final Iterator<Relation> j = relationsToBeDropped.iterator(); j.hasNext();) { final Relation r = j.next(); if (r.getStatus().equals(ComponentStatus.HANDMADE)) continue; r.getFirstKey().removeRelation(r); r.getSecondKey().removeRelation(r); } } }
From source file:com.sfs.whichdoctor.dao.AccreditationDAOImpl.java
/** * Gets the training summary.//from www .j ava 2s.com * * @param guid the guid * @param type the type * * @return the training summary * * @throws WhichDoctorDaoException the which doctor dao exception */ @SuppressWarnings("unchecked") public final TreeMap<String, AccreditationBean[]> getTrainingSummary(final int guid, final String type) throws WhichDoctorDaoException { if (type == null) { throw new NullPointerException("Training type cannot be null"); } dataLogger.info("Getting " + type + " Training Summary for Member GUID: " + guid); TreeMap<String, AccreditationBean[]> summary = new TreeMap<String, AccreditationBean[]>(); Collection<AccreditationBean> accreditations = new ArrayList<AccreditationBean>(); try { accreditations = this.getJdbcTemplateReader().query(this.getSQL().getValue("accreditation/loadSummary"), new Object[] { guid, type }, new RowMapper() { public Object mapRow(final ResultSet rs, final int rowNum) throws SQLException { AccreditationBean accreditation = new AccreditationBean(); accreditation.setAbbreviation(rs.getString("AccreditationTypeAbbreviation")); accreditation.setAccreditationType(rs.getString("AccreditationType")); accreditation.setSpecialtyType(rs.getString("SpecialtyTypeClass")); accreditation.setSpecialtySubType(rs.getString("SpecialtyTypeName")); accreditation.setSpecialtyTypeAbbreviation(rs.getString("SpecialtyTypeAbbreviation")); accreditation.setCore(rs.getBoolean("Core")); accreditation.setWeeksApproved(rs.getInt("WeeksApproved")); accreditation.setWeeksCertified(rs.getInt("WeeksCertified")); // The active flag holds whether the accreditation is excess boolean active = true; String trainingClass = rs.getString("TrainingClass"); if (StringUtils.contains(trainingClass, "nterrupted") || StringUtils.contains(trainingClass, "ontinuing")) { active = false; } accreditation.setActive(active); return accreditation; } }); } catch (IncorrectResultSizeDataAccessException ie) { dataLogger.debug("No results found for search: " + ie.getMessage()); } for (AccreditationBean acrd : accreditations) { if (acrd.getActive()) { // Generate index key String specialtyAbbreviation = acrd.getAccreditationType(); String specialtyTypeName = acrd.getSpecialtyType(); if (StringUtils.isNotBlank(acrd.getAbbreviation())) { specialtyAbbreviation = acrd.getAbbreviation(); } if (StringUtils.isNotBlank(acrd.getSpecialtySubType())) { specialtyTypeName = acrd.getSpecialtyType() + " - " + acrd.getSpecialtySubType(); } String specialtyKey = specialtyAbbreviation + ": " + specialtyTypeName; AccreditationBean core = new AccreditationBean(); core.setAbbreviation(acrd.getAbbreviation()); core.setAccreditationType(acrd.getAccreditationType()); core.setCore(true); core.setSpecialtyType(acrd.getSpecialtyType()); core.setSpecialtySubType(acrd.getSpecialtySubType()); core.setSpecialtyTypeAbbreviation(acrd.getSpecialtyTypeAbbreviation()); AccreditationBean nonCore = new AccreditationBean(); nonCore.setAbbreviation(acrd.getAbbreviation()); nonCore.setAccreditationType(acrd.getAccreditationType()); nonCore.setCore(false); nonCore.setSpecialtyType(acrd.getSpecialtyType()); nonCore.setSpecialtySubType(acrd.getSpecialtySubType()); nonCore.setSpecialtyTypeAbbreviation(acrd.getSpecialtyTypeAbbreviation()); if (summary.containsKey(specialtyKey)) { // Specialty exists in TreeMap -> Get array and modify try { AccreditationBean[] existing = summary.get(specialtyKey); core = existing[0]; nonCore = existing[1]; } catch (Exception e) { dataLogger.error("Error loading existing training summary item: " + e.getMessage()); } } // Add to the relevant core/nonCore running totals if (acrd.getCore()) { core.setWeeksApproved(core.getWeeksApproved() + acrd.getWeeksApproved()); core.setWeeksCertified(core.getWeeksCertified() + acrd.getWeeksCertified()); } else { nonCore.setWeeksApproved(nonCore.getWeeksApproved() + acrd.getWeeksApproved()); nonCore.setWeeksCertified(nonCore.getWeeksCertified() + acrd.getWeeksCertified()); } // Set accreditation details AccreditationBean[] details = new AccreditationBean[] { core, nonCore }; // Add accreditation to map summary.put(specialtyKey, details); } } return summary; }
From source file:fr.cirad.mgdb.exporting.individualoriented.DARwinExportHandler.java
@Override public void exportData(OutputStream outputStream, String sModule, Collection<File> individualExportFiles, boolean fDeleteSampleExportFilesOnExit, ProgressIndicator progress, DBCursor markerCursor, Map<Comparable, Comparable> markerSynonyms, Map<String, InputStream> readyToExportFiles) throws Exception { MongoTemplate mongoTemplate = MongoTemplateManager.get(sModule); GenotypingProject aProject = mongoTemplate.findOne( new Query(Criteria.where(GenotypingProject.FIELDNAME_PLOIDY_LEVEL).exists(true)), GenotypingProject.class); if (aProject == null) LOG.warn("Unable to find a project containing ploidy level information! Assuming ploidy level is 2."); int ploidy = aProject == null ? 2 : aProject.getPloidyLevel(); File warningFile = File.createTempFile("export_warnings_", ""); FileWriter warningFileWriter = new FileWriter(warningFile); int markerCount = markerCursor.count(); ZipOutputStream zos = new ZipOutputStream(outputStream); if (readyToExportFiles != null) for (String readyToExportFile : readyToExportFiles.keySet()) { zos.putNextEntry(new ZipEntry(readyToExportFile)); InputStream inputStream = readyToExportFiles.get(readyToExportFile); byte[] dataBlock = new byte[1024]; int count = inputStream.read(dataBlock, 0, 1024); while (count != -1) { zos.write(dataBlock, 0, count); count = inputStream.read(dataBlock, 0, 1024); }/*from w w w . j a v a 2 s.co m*/ } String exportName = sModule + "_" + markerCount + "variants_" + individualExportFiles.size() + "individuals"; StringBuffer donFileContents = new StringBuffer( "@DARwin 5.0 - DON -" + LINE_SEPARATOR + individualExportFiles.size() + "\t" + 1 + LINE_SEPARATOR + "N" + "\t" + "individual" + LINE_SEPARATOR); int count = 0; String missingGenotype = ""; for (int j = 0; j < ploidy; j++) missingGenotype += "\tN"; zos.putNextEntry(new ZipEntry(exportName + ".var")); zos.write(("@DARwin 5.0 - ALLELIC - " + ploidy + LINE_SEPARATOR + individualExportFiles.size() + "\t" + markerCount * ploidy + LINE_SEPARATOR + "N").getBytes()); DBCursor markerCursorCopy = markerCursor.copy(); // dunno how expensive this is, but seems safer than keeping all IDs in memory at any time short nProgress = 0, nPreviousProgress = 0; int avgObjSize = (Integer) mongoTemplate .getCollection(mongoTemplate.getCollectionName(VariantRunData.class)).getStats().get("avgObjSize"); int nChunkSize = nMaxChunkSizeInMb * 1024 * 1024 / avgObjSize; markerCursorCopy.batchSize(nChunkSize); int nMarkerIndex = 0; while (markerCursorCopy.hasNext()) { DBObject exportVariant = markerCursorCopy.next(); Comparable markerId = (Comparable) exportVariant.get("_id"); if (markerSynonyms != null) { Comparable syn = markerSynonyms.get(markerId); if (syn != null) markerId = syn; } for (int j = 0; j < ploidy; j++) zos.write(("\t" + markerId).getBytes()); } TreeMap<Integer, Comparable> problematicMarkerIndexToNameMap = new TreeMap<Integer, Comparable>(); ArrayList<String> distinctAlleles = new ArrayList<String>(); // the index of each allele will be used as its code int i = 0; for (File f : individualExportFiles) { BufferedReader in = new BufferedReader(new FileReader(f)); try { String individualId, line = in.readLine(); // read sample id if (line != null) individualId = line; else throw new Exception("Unable to read first line of temp export file " + f.getName()); donFileContents.append(++count + "\t" + individualId + LINE_SEPARATOR); zos.write((LINE_SEPARATOR + count).getBytes()); nMarkerIndex = 0; while ((line = in.readLine()) != null) { List<String> genotypes = MgdbDao.split(line, "|"); HashMap<Object, Integer> genotypeCounts = new HashMap<Object, Integer>(); // will help us to keep track of missing genotypes int highestGenotypeCount = 0; String mostFrequentGenotype = null; for (String genotype : genotypes) { if (genotype.length() == 0) continue; /* skip missing genotypes */ int gtCount = 1 + MgdbDao.getCountForKey(genotypeCounts, genotype); if (gtCount > highestGenotypeCount) { highestGenotypeCount = gtCount; mostFrequentGenotype = genotype; } genotypeCounts.put(genotype, gtCount); } if (genotypeCounts.size() > 1) { warningFileWriter.write("- Dissimilar genotypes found for variant __" + nMarkerIndex + "__, individual " + individualId + ". Exporting most frequent: " + mostFrequentGenotype + "\n"); problematicMarkerIndexToNameMap.put(nMarkerIndex, ""); } String codedGenotype = ""; if (mostFrequentGenotype != null) for (String allele : mostFrequentGenotype.split(" ")) { if (!distinctAlleles.contains(allele)) distinctAlleles.add(allele); codedGenotype += "\t" + distinctAlleles.indexOf(allele); } else codedGenotype = missingGenotype.replaceAll("N", "-1"); // missing data is coded as -1 zos.write(codedGenotype.getBytes()); nMarkerIndex++; } } catch (Exception e) { LOG.error("Error exporting data", e); progress.setError("Error exporting data: " + e.getClass().getSimpleName() + (e.getMessage() != null ? " - " + e.getMessage() : "")); return; } finally { in.close(); } if (progress.hasAborted()) return; nProgress = (short) (++i * 100 / individualExportFiles.size()); if (nProgress > nPreviousProgress) { // LOG.debug("============= doDARwinExport (" + i + "): " + nProgress + "% ============="); progress.setCurrentStepProgress(nProgress); nPreviousProgress = nProgress; } if (!f.delete()) { f.deleteOnExit(); LOG.info("Unable to delete tmp export file " + f.getAbsolutePath()); } } zos.putNextEntry(new ZipEntry(exportName + ".don")); zos.write(donFileContents.toString().getBytes()); // now read variant names for those that induced warnings nMarkerIndex = 0; markerCursor.batchSize(nChunkSize); while (markerCursor.hasNext()) { DBObject exportVariant = markerCursor.next(); if (problematicMarkerIndexToNameMap.containsKey(nMarkerIndex)) { Comparable markerId = (Comparable) exportVariant.get("_id"); if (markerSynonyms != null) { Comparable syn = markerSynonyms.get(markerId); if (syn != null) markerId = syn; } for (int j = 0; j < ploidy; j++) zos.write(("\t" + markerId).getBytes()); problematicMarkerIndexToNameMap.put(nMarkerIndex, markerId); } } warningFileWriter.close(); if (warningFile.length() > 0) { zos.putNextEntry(new ZipEntry(exportName + "-REMARKS.txt")); int nWarningCount = 0; BufferedReader in = new BufferedReader(new FileReader(warningFile)); String sLine; while ((sLine = in.readLine()) != null) { for (Integer aMarkerIndex : problematicMarkerIndexToNameMap.keySet()) sLine = sLine.replaceAll("__" + aMarkerIndex + "__", problematicMarkerIndexToNameMap.get(aMarkerIndex).toString()); zos.write((sLine + "\n").getBytes()); in.readLine(); nWarningCount++; } LOG.info("Number of Warnings for export (" + exportName + "): " + nWarningCount); in.close(); } warningFile.delete(); zos.close(); progress.setCurrentStepProgress((short) 100); }