List of usage examples for java.util TreeMap entrySet
EntrySet entrySet
To view the source code for java.util TreeMap entrySet.
Click Source Link
From source file:com.zimbra.cs.account.ProvUtil.java
private void dumpAttrs(Map<String, Object> attrsIn, Set<String> specificAttrs) throws ServiceException { TreeMap<String, Object> attrs = new TreeMap<String, Object>(attrsIn); Map<String, Set<String>> specificAttrValues = null; if (specificAttrs != null) { specificAttrValues = new HashMap<String, Set<String>>(); for (String specificAttr : specificAttrs) { int colonAt = specificAttr.indexOf("="); String attrName = null; String attrValue = null; if (colonAt == -1) { attrName = specificAttr; } else { attrName = specificAttr.substring(0, colonAt); attrValue = specificAttr.substring(colonAt + 1); if (attrValue.length() < 1) { throw ServiceException.INVALID_REQUEST("missing value for " + specificAttr, null); }//from www . j a v a 2 s .c o m } attrName = attrName.toLowerCase(); Set<String> values = specificAttrValues.get(attrName); if (values == null) { // haven't seen the attr yet values = new HashSet<String>(); } if (attrValue != null) { values.add(attrValue); } specificAttrValues.put(attrName, values); } } AttributeManager attrMgr = AttributeManager.getInstance(); SimpleDateFormat dateFmt = new SimpleDateFormat("yyyyMMddHHmmss"); String timestamp = dateFmt.format(new Date()); for (Map.Entry<String, Object> entry : attrs.entrySet()) { String name = entry.getKey(); boolean isBinary = needsBinaryIO(attrMgr, name); Set<String> specificValues = null; if (specificAttrValues != null) { specificValues = specificAttrValues.get(name.toLowerCase()); } if (specificAttrValues == null || specificAttrValues.keySet().contains(name.toLowerCase())) { Object value = entry.getValue(); if (value instanceof String[]) { String sv[] = (String[]) value; for (int i = 0; i < sv.length; i++) { String aSv = sv[i]; // don't print permission denied attr if (this.forceDisplayAttrValue || aSv.length() > 0 && (specificValues == null || specificValues.isEmpty() || specificValues.contains(aSv))) { printAttr(name, aSv, i, isBinary, timestamp); } } } else if (value instanceof String) { // don't print permission denied attr if (this.forceDisplayAttrValue || ((String) value).length() > 0 && (specificValues == null || specificValues.isEmpty() || specificValues.contains(value))) { printAttr(name, (String) value, null, isBinary, timestamp); } } } } // force display empty value attribute if (this.forceDisplayAttrValue) { for (String attr : specificAttrs) { if (!attrs.containsKey(attr)) { AttributeInfo ai = attrMgr.getAttributeInfo(attr); if (ai != null) { printAttr(attr, "", null, false, timestamp); } } } } }
From source file:org.xwiki.repository.internal.RepositoryManager.java
public DocumentReference importExtension(String extensionId, ExtensionRepository repository, Type type) throws QueryException, XWikiException, ResolveException { TreeMap<Version, String> versions = new TreeMap<Version, String>(); Version lastVersion = getVersions(extensionId, repository, type, versions); if (lastVersion == null) { throw new ExtensionNotFoundException("Can't find any version for the extension [" + extensionId + "] on repository [" + repository + "]"); } else if (versions.isEmpty()) { // If no valid version import the last version versions.put(lastVersion, extensionId); } else {// w w w. jav a2 s . co m // Select the last valid version lastVersion = versions.lastKey(); } Extension extension = repository.resolve(new ExtensionId(extensionId, lastVersion)); // Get former ids versions Collection<ExtensionId> features = extension.getExtensionFeatures(); for (ExtensionId feature : features) { try { getVersions(feature.getId(), repository, type, versions); } catch (ResolveException e) { // Ignore } } XWikiContext xcontext = this.xcontextProvider.get(); boolean needSave = false; XWikiDocument document = getExistingExtensionDocumentById(extensionId); if (document == null) { // Create document document = xcontext.getWiki().getDocument(new DocumentReference(xcontext.getWikiId(), Arrays.asList("Extension", extension.getName()), "WebHome"), xcontext); for (int i = 1; !document.isNew(); ++i) { document = xcontext.getWiki().getDocument(new DocumentReference(xcontext.getWikiId(), Arrays.asList("Extension", extension.getName() + ' ' + i), "WebHome"), xcontext); } document.readFromTemplate( this.currentResolver.resolve(XWikiRepositoryModel.EXTENSION_TEMPLATEREFERENCE), xcontext); needSave = true; } // Update document BaseObject extensionObject = document.getXObject(XWikiRepositoryModel.EXTENSION_CLASSREFERENCE); if (extensionObject == null) { extensionObject = document.newXObject(XWikiRepositoryModel.EXTENSION_CLASSREFERENCE, xcontext); needSave = true; } if (!StringUtils.equals(extensionId, getValue(extensionObject, XWikiRepositoryModel.PROP_EXTENSION_ID, (String) null))) { extensionObject.set(XWikiRepositoryModel.PROP_EXTENSION_ID, extensionId, xcontext); needSave = true; } // Update extension informations needSave |= updateExtension(extension, extensionObject, xcontext); // Proxy marker BaseObject extensionProxyObject = document.getXObject(XWikiRepositoryModel.EXTENSIONPROXY_CLASSREFERENCE); if (extensionProxyObject == null) { extensionProxyObject = document.newXObject(XWikiRepositoryModel.EXTENSIONPROXY_CLASSREFERENCE, xcontext); extensionProxyObject.setIntValue(XWikiRepositoryModel.PROP_PROXY_AUTOUPDATE, 1); needSave = true; } needSave |= update(extensionProxyObject, XWikiRepositoryModel.PROP_PROXY_REPOSITORYID, repository.getDescriptor().getId()); needSave |= update(extensionProxyObject, XWikiRepositoryModel.PROP_PROXY_REPOSITORYTYPE, repository.getDescriptor().getType()); needSave |= update(extensionProxyObject, XWikiRepositoryModel.PROP_PROXY_REPOSITORYURI, repository.getDescriptor().getURI().toString()); // Remove unexisting versions Set<String> validVersions = new HashSet<String>(); List<BaseObject> versionObjects = document .getXObjects(XWikiRepositoryModel.EXTENSIONVERSION_CLASSREFERENCE); if (versionObjects != null) { for (BaseObject versionObject : versionObjects) { if (versionObject != null) { String version = getValue(versionObject, XWikiRepositoryModel.PROP_VERSION_VERSION); if (StringUtils.isBlank(version) || (isVersionProxyingEnabled(document) && !new DefaultVersion(version).equals(extension.getId().getVersion()))) { // Empty version OR old versions should be proxied document.removeXObject(versionObject); needSave = true; } else { if (!versions.containsKey(new DefaultVersion(version))) { // The version does not exist on remote repository if (!isVersionValid(document, versionObject, xcontext)) { // The version is invalid, removing it to not make the whole extension invalid document.removeXObject(versionObject); needSave = true; } else { // The version is valid, lets keep it validVersions.add(version); } } else { // This version exist on remote repository validVersions.add(version); } } } } } List<BaseObject> dependencyObjects = document .getXObjects(XWikiRepositoryModel.EXTENSIONDEPENDENCY_CLASSREFERENCE); if (dependencyObjects != null) { for (BaseObject dependencyObject : dependencyObjects) { if (dependencyObject != null) { String version = getValue(dependencyObject, XWikiRepositoryModel.PROP_DEPENDENCY_EXTENSIONVERSION); if (!validVersions.contains(version)) { // The version is invalid, removing it to not make the whole extension invalid document.removeXObject(dependencyObject); needSave = true; } } } } // Update versions for (Map.Entry<Version, String> entry : versions.entrySet()) { Version version = entry.getKey(); String id = entry.getValue(); try { Extension versionExtension; if (version.equals(extension.getId().getVersion())) { versionExtension = extension; } else if (isVersionProxyingEnabled(document)) { continue; } else { versionExtension = repository.resolve(new ExtensionId(id, version)); } // Update version related informations needSave |= updateExtensionVersion(document, versionExtension); } catch (Exception e) { this.logger.error("Failed to resolve extension with id [" + id + "] and version [" + version + "] on repository [" + repository + "]", e); } } if (needSave) { document.setAuthorReference(xcontext.getUserReference()); if (document.isNew()) { document.setContentAuthorReference(xcontext.getUserReference()); document.setCreatorReference(xcontext.getUserReference()); } xcontext.getWiki().saveDocument(document, "Imported extension [" + extensionId + "] from repository [" + repository.getDescriptor() + "]", true, xcontext); } return document.getDocumentReference(); }
From source file:org.opendatakit.services.database.utilities.ODKDatabaseImplUtils.java
/** * If the caller specified a complex json value for a structured type, flush * the value through to the individual columns. * * @param orderedColumns/*www.j av a2 s.com*/ * @param values */ private void cleanUpValuesMap(OrderedColumns orderedColumns, Map<String, Object> values) { TreeMap<String, String> toBeResolved = new TreeMap<String, String>(); for (String key : values.keySet()) { if (DataTableColumns.CONFLICT_TYPE.equals(key)) { continue; } else if (DataTableColumns.DEFAULT_ACCESS.equals(key)) { continue; } else if (DataTableColumns.ROW_OWNER.equals(key)) { continue; } else if (DataTableColumns.GROUP_READ_ONLY.equals(key)) { continue; } else if (DataTableColumns.GROUP_MODIFY.equals(key)) { continue; } else if (DataTableColumns.GROUP_PRIVILEGED.equals(key)) { continue; } else if (DataTableColumns.FORM_ID.equals(key)) { continue; } else if (DataTableColumns.ID.equals(key)) { continue; } else if (DataTableColumns.LOCALE.equals(key)) { continue; } else if (DataTableColumns.ROW_ETAG.equals(key)) { continue; } else if (DataTableColumns.SAVEPOINT_CREATOR.equals(key)) { continue; } else if (DataTableColumns.SAVEPOINT_TIMESTAMP.equals(key)) { continue; } else if (DataTableColumns.SAVEPOINT_TYPE.equals(key)) { continue; } else if (DataTableColumns.SYNC_STATE.equals(key)) { continue; } else if (DataTableColumns._ID.equals(key)) { continue; } // OK it is one of the data columns ColumnDefinition cp = orderedColumns.find(key); if (!cp.isUnitOfRetention()) { toBeResolved.put(key, (String) values.get(key)); } } // remove these non-retained values from the values set... for (String key : toBeResolved.keySet()) { values.remove(key); } while (!toBeResolved.isEmpty()) { TreeMap<String, String> moreToResolve = new TreeMap<String, String>(); for (Map.Entry<String, String> entry : toBeResolved.entrySet()) { String key = entry.getKey(); String json = entry.getValue(); if (json == null) { // don't need to do anything // since the value is null continue; } ColumnDefinition cp = orderedColumns.find(key); try { TypeReference<Map<String, Object>> reference = new TypeReference<Map<String, Object>>() { }; Map<String, Object> struct = ODKFileUtils.mapper.readValue(json, reference); for (ColumnDefinition child : cp.getChildren()) { String subkey = child.getElementKey(); ColumnDefinition subcp = orderedColumns.find(subkey); if (subcp.isUnitOfRetention()) { ElementType subtype = subcp.getType(); ElementDataType type = subtype.getDataType(); if (type == ElementDataType.integer) { values.put(subkey, (Integer) struct.get(subcp.getElementName())); } else if (type == ElementDataType.number) { values.put(subkey, (Double) struct.get(subcp.getElementName())); } else if (type == ElementDataType.bool) { values.put(subkey, ((Boolean) struct.get(subcp.getElementName())) ? 1 : 0); } else { values.put(subkey, (String) struct.get(subcp.getElementName())); } } else { // this must be a javascript structure... re-JSON it and save (for // next round). moreToResolve.put(subkey, ODKFileUtils.mapper.writeValueAsString(struct.get(subcp.getElementName()))); } } } catch (JsonParseException e) { e.printStackTrace(); throw new IllegalStateException("should not be happening"); } catch (JsonMappingException e) { e.printStackTrace(); throw new IllegalStateException("should not be happening"); } catch (IOException e) { e.printStackTrace(); throw new IllegalStateException("should not be happening"); } } toBeResolved = moreToResolve; } }
From source file:io.warp10.continuum.gts.GTSHelper.java
public static String buildSelector(Metadata metadata) { StringBuilder sb = new StringBuilder(); encodeName(sb, metadata.getName());/*from w w w . ja v a 2 s . c o m*/ sb.append("{"); TreeMap<String, String> labels = new TreeMap<String, String>(metadata.getLabels()); boolean first = true; for (Entry<String, String> entry : labels.entrySet()) { if (!first) { sb.append(","); } encodeName(sb, entry.getKey()); sb.append("="); encodeName(sb, entry.getValue()); first = false; } sb.append("}"); return sb.toString(); }
From source file:org.dasein.cloud.aws.compute.EC2Instance.java
@Override public @Nonnull Iterable<VmStatistics> getVMStatisticsForPeriod(@Nonnull String instanceId, long startTimestamp, long endTimestamp) throws InternalException, CloudException { APITrace.begin(getProvider(), "getVMStatisticsForPeriod"); try {//from ww w. j ava 2 s. co m if (endTimestamp < 1L) { endTimestamp = System.currentTimeMillis() + 1000L; } if (startTimestamp < (System.currentTimeMillis() - CalendarWrapper.DAY)) { startTimestamp = System.currentTimeMillis() - CalendarWrapper.DAY; if (startTimestamp > (endTimestamp - (2L * CalendarWrapper.MINUTE))) { endTimestamp = startTimestamp + (2L * CalendarWrapper.MINUTE); } } else if (startTimestamp > (endTimestamp - (2L * CalendarWrapper.MINUTE))) { startTimestamp = endTimestamp - (2L * CalendarWrapper.MINUTE); } TreeMap<Integer, VmStatistics> statMap = new TreeMap<Integer, VmStatistics>(); int minutes = (int) ((endTimestamp - startTimestamp) / CalendarWrapper.MINUTE); for (int i = 1; i <= minutes; i++) { statMap.put(i, new VmStatistics()); } Set<Metric> metrics = calculate("CPUUtilization", "Percent", instanceId, false, startTimestamp, endTimestamp); for (Metric m : metrics) { int minute = 1 + (int) ((m.timestamp - startTimestamp) / CalendarWrapper.MINUTE); VmStatistics stats = statMap.get(minute); if (stats == null) { stats = new VmStatistics(); statMap.put(minute, stats); } stats.setAverageCpuUtilization(m.average); stats.setMaximumCpuUtilization(m.maximum); stats.setMinimumCpuUtilization(m.minimum); stats.setStartTimestamp(m.timestamp); stats.setEndTimestamp(m.timestamp); stats.setSamples(m.samples); } String id = instanceId; boolean idIsVolumeId = false; VirtualMachine vm = getVirtualMachine(instanceId); if (vm != null && vm.isPersistent()) { if (vm.getProviderVolumeIds(getProvider()).length > 0) { id = vm.getProviderVolumeIds(getProvider())[0]; idIsVolumeId = true; } } metrics = calculate(idIsVolumeId ? "VolumeReadBytes" : "DiskReadBytes", "Bytes", id, idIsVolumeId, startTimestamp, endTimestamp); for (Metric m : metrics) { int minute = 1 + (int) ((m.timestamp - startTimestamp) / CalendarWrapper.MINUTE); VmStatistics stats = statMap.get(minute); if (stats == null) { stats = new VmStatistics(); statMap.put(minute, stats); } stats.setAverageDiskReadBytes(m.average); stats.setMinimumDiskReadBytes(m.minimum); stats.setMaximumDiskReadBytes(m.maximum); if (stats.getSamples() < 1) { stats.setSamples(m.samples); } } metrics = calculate(idIsVolumeId ? "VolumeReadOps" : "DiskReadOps", "Count", id, idIsVolumeId, startTimestamp, endTimestamp); for (Metric m : metrics) { int minute = 1 + (int) ((m.timestamp - startTimestamp) / CalendarWrapper.MINUTE); VmStatistics stats = statMap.get(minute); if (stats == null) { stats = new VmStatistics(); statMap.put(minute, stats); } stats.setAverageDiskReadOperations(m.average); stats.setMinimumDiskReadOperations(m.minimum); stats.setMaximumDiskReadOperations(m.maximum); if (stats.getSamples() < 1) { stats.setSamples(m.samples); } } metrics = calculate(idIsVolumeId ? "VolumeWriteBytes" : "DiskWriteBytes", "Bytes", id, idIsVolumeId, startTimestamp, endTimestamp); for (Metric m : metrics) { int minute = 1 + (int) ((m.timestamp - startTimestamp) / CalendarWrapper.MINUTE); VmStatistics stats = statMap.get(minute); if (stats == null) { stats = new VmStatistics(); statMap.put(minute, stats); } stats.setAverageDiskWriteBytes(m.average); stats.setMinimumDiskWriteBytes(m.minimum); stats.setMaximumDiskWriteBytes(m.maximum); if (stats.getSamples() < 1) { stats.setSamples(m.samples); } } metrics = calculate(idIsVolumeId ? "VolumeWriteOps" : "DiskWriteOps", "Count", id, idIsVolumeId, startTimestamp, endTimestamp); for (Metric m : metrics) { int minute = 1 + (int) ((m.timestamp - startTimestamp) / CalendarWrapper.MINUTE); VmStatistics stats = statMap.get(minute); if (stats == null) { stats = new VmStatistics(); statMap.put(minute, stats); } stats.setAverageDiskWriteOperations(m.average); stats.setMinimumDiskWriteOperations(m.minimum); stats.setMaximumDiskWriteOperations(m.maximum); if (stats.getSamples() < 1) { stats.setSamples(m.samples); } } metrics = calculate("NetworkIn", "Bytes", instanceId, false, startTimestamp, endTimestamp); for (Metric m : metrics) { int minute = 1 + (int) ((m.timestamp - startTimestamp) / CalendarWrapper.MINUTE); VmStatistics stats = statMap.get(minute); if (stats == null) { stats = new VmStatistics(); statMap.put(minute, stats); } stats.setAverageNetworkIn(m.average); stats.setMinimumNetworkIn(m.minimum); stats.setMaximumNetworkIn(m.maximum); if (stats.getSamples() < 1) { stats.setSamples(m.samples); } } metrics = calculate("NetworkOut", "Bytes", instanceId, false, startTimestamp, endTimestamp); for (Metric m : metrics) { int minute = 1 + (int) ((m.timestamp - startTimestamp) / CalendarWrapper.MINUTE); VmStatistics stats = statMap.get(minute); if (stats == null) { stats = new VmStatistics(); statMap.put(minute, stats); } stats.setAverageNetworkOut(m.average); stats.setMinimumNetworkOut(m.minimum); stats.setMaximumNetworkOut(m.maximum); if (stats.getSamples() < 1) { stats.setSamples(m.samples); } } ArrayList<VmStatistics> list = new ArrayList<VmStatistics>(); for (Map.Entry<Integer, VmStatistics> entry : statMap.entrySet()) { VmStatistics stats = entry.getValue(); if (stats != null && stats.getSamples() > 0) { list.add(stats); } } return list; } finally { APITrace.end(); } }
From source file:io.warp10.continuum.gts.GTSHelper.java
public static List<GeoTimeSerie> chunk(GeoTimeSerie gts, long lastchunk, long chunkwidth, long chunkcount, String chunklabel, boolean keepempty, long overlap) throws WarpScriptException { if (overlap < 0 || overlap > chunkwidth) { throw new WarpScriptException("Overlap cannot exceed chunk width."); }/*from www. ja va 2 s. c om*/ // // Check if 'chunklabel' exists in the GTS labels // Metadata metadata = gts.getMetadata(); if (metadata.getLabels().containsKey(chunklabel)) { throw new WarpScriptException( "Cannot operate on Geo Time Series which already have a label named '" + chunklabel + "'"); } TreeMap<Long, GeoTimeSerie> chunks = new TreeMap<Long, GeoTimeSerie>(); // // If GTS is bucketized, make sure bucketspan is less than boxwidth // boolean bucketized = GTSHelper.isBucketized(gts); if (bucketized) { if (gts.bucketspan > chunkwidth) { throw new WarpScriptException( "Cannot operate on Geo Time Series with a bucketspan greater than the chunk width."); } } else { // GTS is not bucketized and has 0 values, if lastchunk was 0, return an empty list as we // are unable to produce chunks if (0 == gts.values && 0L == lastchunk) { return new ArrayList<GeoTimeSerie>(); } } // // Set chunkcount to Integer.MAX_VALUE if it's 0 // boolean zeroChunkCount = false; if (0 == chunkcount) { chunkcount = Integer.MAX_VALUE; zeroChunkCount = true; } // // Sort timestamps in reverse order so we can produce all chunks in O(n) // GTSHelper.sort(gts, true); // // Loop on the chunks // // Index in the timestamp array int idx = 0; long bucketspan = gts.bucketspan; int bucketcount = gts.bucketcount; long lastbucket = gts.lastbucket; // // If lastchunk is 0, use lastbucket or the most recent tick // if (0 == lastchunk) { if (isBucketized(gts)) { lastchunk = lastbucket; } else { // Use the most recent tick lastchunk = gts.ticks[0]; // Make sure lastchunk is aligned on 'chunkwidth' boundary if (0 != (lastchunk % chunkwidth)) { lastchunk = lastchunk - (lastchunk % chunkwidth) + chunkwidth; } } } for (long i = 0; i < chunkcount; i++) { // If we have no more values and were not specified a chunk count, exit the loop, we're done if (idx >= gts.values && zeroChunkCount) { break; } // Compute chunk bounds long chunkend = lastchunk - i * chunkwidth; long chunkstart = chunkend - chunkwidth + 1; GeoTimeSerie chunkgts = new GeoTimeSerie(lastbucket, bucketcount, bucketspan, 16); // Set metadata for the GTS chunkgts.setMetadata(metadata); // Add 'chunklabel' chunkgts.getMetadata().putToLabels(chunklabel, Long.toString(chunkend)); if (bucketized) { // Chunk is outside the GTS, it will be empty if (lastbucket < chunkstart || chunkend <= lastbucket - (bucketcount * bucketspan)) { // Add the (empty) chunk if keepempty is true if (keepempty || overlap > 0) { chunks.put(chunkend, chunkgts); } continue; } // Set the bucketized parameters in the GTS // If bucketspan does not divide chunkwidth, chunks won't be bucketized if (0 == chunkwidth % bucketspan) { chunkgts.bucketspan = bucketspan; chunkgts.lastbucket = chunkend; chunkgts.bucketcount = (int) ((chunkend - chunkstart + 1) / bucketspan); } else { chunkgts.bucketspan = 0L; chunkgts.lastbucket = 0L; chunkgts.bucketspan = 0; } } // // Add the datapoints which fall within the current chunk // // Advance until the current tick is before 'chunkend' while (idx < gts.values && gts.ticks[idx] > chunkend) { idx++; } // We've exhausted the values if (idx >= gts.values) { // only add chunk if it's not empty or empty with 'keepempty' set to true if (0 != chunkgts.values || (keepempty || overlap > 0)) { chunks.put(chunkend, chunkgts); } continue; } // The current tick is before the beginning of the current chunk if (gts.ticks[idx] < chunkstart) { // only add chunk if it's not empty or empty with 'keepempty' set to true if (0 != chunkgts.values || (keepempty || overlap > 0)) { chunks.put(chunkend, chunkgts); } continue; } while (idx < gts.values && gts.ticks[idx] >= chunkstart) { GTSHelper.setValue(chunkgts, GTSHelper.tickAtIndex(gts, idx), GTSHelper.locationAtIndex(gts, idx), GTSHelper.elevationAtIndex(gts, idx), GTSHelper.valueAtIndex(gts, idx), false); idx++; } // only add chunk if it's not empty or empty with 'keepempty' set to true if (0 != chunkgts.values || (keepempty || overlap > 0)) { chunks.put(chunkend, chunkgts); } } // // Handle overlapping is need be. // We need to iterate over all ticks and add datapoints to each GTS they belong to // if (overlap > 0) { // // Check if we need to add a first and a last chunk // long ts = GTSHelper.tickAtIndex(gts, 0); if (ts <= chunks.firstKey() - chunkwidth) { Entry<Long, GeoTimeSerie> currentFirst = chunks.firstEntry(); GeoTimeSerie firstChunk = currentFirst.getValue().cloneEmpty(); if (GTSHelper.isBucketized(currentFirst.getValue())) { firstChunk.lastbucket = firstChunk.lastbucket - firstChunk.bucketspan; } chunks.put(currentFirst.getKey() - chunkwidth, firstChunk); } ts = GTSHelper.tickAtIndex(gts, gts.values - 1); if (ts >= chunks.lastKey() - chunkwidth + 1 - overlap) { Entry<Long, GeoTimeSerie> currentLast = chunks.lastEntry(); GeoTimeSerie lastChunk = currentLast.getValue().cloneEmpty(); if (GTSHelper.isBucketized(currentLast.getValue())) { lastChunk.lastbucket = lastChunk.lastbucket + lastChunk.bucketspan; } chunks.put(currentLast.getKey() + chunkwidth, lastChunk); } // // Put all entries in a list so we can access them randomly // List<Entry<Long, GeoTimeSerie>> allchunks = new ArrayList<Entry<Long, GeoTimeSerie>>(chunks.entrySet()); int[] currentSizes = new int[allchunks.size()]; for (int i = 0; i < currentSizes.length; i++) { currentSizes[i] = allchunks.get(i).getValue().values; } // // Iterate over chunks, completing with prev and next overlaps // Remember the timestamps are in reverse order so far. // for (int i = 0; i < allchunks.size(); i++) { GeoTimeSerie current = allchunks.get(i).getValue(); long lowerBound = allchunks.get(i).getKey() - chunkwidth + 1 - overlap; long upperBound = allchunks.get(i).getKey() + overlap; if (i > 0) { GeoTimeSerie prev = allchunks.get(i - 1).getValue(); for (int j = 0; j < currentSizes[i - 1]; j++) { long timestamp = GTSHelper.tickAtIndex(prev, j); if (timestamp < lowerBound) { break; } GTSHelper.setValue(current, timestamp, GTSHelper.locationAtIndex(prev, j), GTSHelper.elevationAtIndex(prev, j), GTSHelper.valueAtIndex(prev, j), false); } } if (i < allchunks.size() - 1) { GeoTimeSerie next = allchunks.get(i + 1).getValue(); for (int j = currentSizes[i + 1] - 1; j >= 0; j--) { long timestamp = GTSHelper.tickAtIndex(next, j); if (timestamp > upperBound) { break; } GTSHelper.setValue(current, timestamp, GTSHelper.locationAtIndex(next, j), GTSHelper.elevationAtIndex(next, j), GTSHelper.valueAtIndex(next, j), false); } } } } List<GeoTimeSerie> result = new ArrayList<GeoTimeSerie>(); for (GeoTimeSerie g : chunks.values()) { if (!keepempty && 0 == g.values) { continue; } result.add(g); } return result; }
From source file:org.sakaiproject.tool.assessment.facade.AssessmentGradingFacadeQueries.java
public List getExportResponsesData(String publishedAssessmentId, boolean anonymous, String audioMessage, String fileUploadMessage, String noSubmissionMessage, boolean showPartAndTotalScoreSpreadsheetColumns, String poolString, String partString, String questionString, String textString, String rationaleString, String itemGradingCommentsString, Map useridMap, String responseCommentString) { ArrayList dataList = new ArrayList(); ArrayList headerList = new ArrayList(); ArrayList finalList = new ArrayList(2); PublishedAssessmentService pubService = new PublishedAssessmentService(); HashSet publishedAssessmentSections = pubService .getSectionSetForAssessment(Long.valueOf(publishedAssessmentId)); Double zeroDouble = new Double(0.0); HashMap publishedAnswerHash = pubService .preparePublishedAnswerHash(pubService.getPublishedAssessment(publishedAssessmentId)); HashMap publishedItemTextHash = pubService .preparePublishedItemTextHash(pubService.getPublishedAssessment(publishedAssessmentId)); HashMap publishedItemHash = pubService .preparePublishedItemHash(pubService.getPublishedAssessment(publishedAssessmentId)); //Get this sorted to add the blank gradings for the questions not answered later. Set publishItemSet = new TreeSet(new ItemComparator()); publishItemSet.addAll(publishedItemHash.values()); int numSubmission = 1; String numSubmissionText = noSubmissionMessage; String lastAgentId = ""; String agentEid = ""; String firstName = ""; String lastName = ""; Set useridSet = new HashSet(useridMap.keySet()); ArrayList responseList = null; boolean canBeExported = false; boolean fistItemGradingData = true; List list = getAllOrderedSubmissions(publishedAssessmentId); Iterator assessmentGradingIter = list.iterator(); while (assessmentGradingIter.hasNext()) { // create new section-item-scores structure for this assessmentGrading Iterator sectionsIter = publishedAssessmentSections.iterator(); HashMap sectionItems = new HashMap(); TreeMap sectionScores = new TreeMap(); while (sectionsIter.hasNext()) { PublishedSectionData publishedSection = (PublishedSectionData) sectionsIter.next(); ArrayList itemsArray = publishedSection.getItemArraySortedForGrading(); Iterator itemsIter = itemsArray.iterator(); // Iterate through the assessment questions (items) HashMap itemsForSection = new HashMap(); while (itemsIter.hasNext()) { ItemDataIfc item = (ItemDataIfc) itemsIter.next(); itemsForSection.put(item.getItemId(), item.getItemId()); }//ww w .j a va2 s. c o m sectionItems.put(publishedSection.getSequence(), itemsForSection); sectionScores.put(publishedSection.getSequence(), zeroDouble); } AssessmentGradingData assessmentGradingData = (AssessmentGradingData) assessmentGradingIter.next(); String agentId = assessmentGradingData.getAgentId(); responseList = new ArrayList(); canBeExported = false; if (anonymous) { canBeExported = true; responseList.add(assessmentGradingData.getAssessmentGradingId()); } else { if (useridMap.containsKey(assessmentGradingData.getAgentId())) { useridSet.remove(assessmentGradingData.getAgentId()); canBeExported = true; try { agentEid = userDirectoryService.getUser(assessmentGradingData.getAgentId()).getEid(); firstName = userDirectoryService.getUser(assessmentGradingData.getAgentId()).getFirstName(); lastName = userDirectoryService.getUser(assessmentGradingData.getAgentId()).getLastName(); } catch (Exception e) { log.error("Cannot get user"); } responseList.add(lastName); responseList.add(firstName); responseList.add(agentEid); if (assessmentGradingData.getForGrade()) { if (lastAgentId.equals(agentId)) { numSubmission++; } else { numSubmission = 1; lastAgentId = agentId; } } else { numSubmission = 0; lastAgentId = agentId; } if (numSubmission == 0) { numSubmissionText = noSubmissionMessage; } else { numSubmissionText = String.valueOf(numSubmission); } responseList.add(numSubmissionText); } } if (canBeExported) { int sectionScoreColumnStart = responseList.size(); if (showPartAndTotalScoreSpreadsheetColumns) { Double finalScore = assessmentGradingData.getFinalScore(); if (finalScore != null) { responseList.add((Double) finalScore.doubleValue()); // gopal - cast for spreadsheet numerics } else { log.debug("finalScore is NULL"); responseList.add(0d); } } String assessmentGradingComments = ""; if (assessmentGradingData.getComments() != null) { assessmentGradingComments = assessmentGradingData.getComments().replaceAll("<br\\s*/>", ""); } responseList.add(assessmentGradingComments); Long assessmentGradingId = assessmentGradingData.getAssessmentGradingId(); HashMap studentGradingMap = getStudentGradingData( assessmentGradingData.getAssessmentGradingId().toString(), false); ArrayList grades = new ArrayList(); grades.addAll(studentGradingMap.values()); Collections.sort(grades, new QuestionComparator(publishedItemHash)); //Add the blank gradings for the questions not answered in random pools. if (grades.size() < publishItemSet.size()) { int index = -1; for (Object pido : publishItemSet) { index++; PublishedItemData pid = (PublishedItemData) pido; if (index == grades.size() || ((ItemGradingData) ((List) grades.get(index)).get(0)) .getPublishedItemId().longValue() != pid.getItemId().longValue()) { //have to add the placeholder List newList = new ArrayList(); newList.add(new EmptyItemGrading(pid.getSection().getSequence(), pid.getItemId(), pid.getSequence())); grades.add(index, newList); } } } int questionNumber = 0; for (Object oo : grades) { // There can be more than one answer to a question, e.g. for // FIB with more than one blank or matching questions. So sort // by sequence number of answer. (don't bother to sort if just 1) List l = (List) oo; if (l.size() > 1) Collections.sort(l, new AnswerComparator(publishedAnswerHash)); String maintext = ""; String rationale = ""; String responseComment = ""; boolean addRationale = false; boolean addResponseComment = false; boolean matrixChoices = false; TreeMap responsesMap = new TreeMap(); // loop over answers per question int count = 0; ItemGradingData grade = null; //boolean isAudioFileUpload = false; boolean isFinFib = false; double itemScore = 0.0d; //Add the missing sequences! //To manage emi answers, could help with others too Map<Long, String> emiAnswerText = new TreeMap<Long, String>(); for (Object ooo : l) { grade = (ItemGradingData) ooo; if (grade == null || EmptyItemGrading.class.isInstance(grade)) { continue; } if (grade != null && grade.getAutoScore() != null) { itemScore += grade.getAutoScore().doubleValue(); } // now print answer data log.debug("<br> " + grade.getPublishedItemId() + " " + grade.getRationale() + " " + grade.getAnswerText() + " " + grade.getComments() + " " + grade.getReview()); Long publishedItemId = grade.getPublishedItemId(); ItemDataIfc publishedItemData = (ItemDataIfc) publishedItemHash.get(publishedItemId); Long typeId = publishedItemData.getTypeId(); questionNumber = publishedItemData.getSequence(); if (typeId.equals(TypeIfc.FILL_IN_BLANK) || typeId.equals(TypeIfc.FILL_IN_NUMERIC) || typeId.equals(TypeIfc.CALCULATED_QUESTION)) { log.debug("FILL_IN_BLANK, FILL_IN_NUMERIC"); isFinFib = true; String thistext = ""; Long answerid = grade.getPublishedAnswerId(); Long sequence = null; if (answerid != null) { AnswerIfc answer = (AnswerIfc) publishedAnswerHash.get(answerid); if (answer != null) { sequence = answer.getSequence(); } } String temptext = grade.getAnswerText(); if (temptext == null) { temptext = "No Answer"; } thistext = sequence + ": " + temptext; if (count == 0) maintext = thistext; else maintext = maintext + "|" + thistext; count++; } else if (typeId.equals(TypeIfc.MATCHING)) { log.debug("MATCHING"); String thistext = ""; // for some question types we have another text field Long answerid = grade.getPublishedAnswerId(); String temptext = "No Answer"; Long sequence = null; if (answerid != null) { AnswerIfc answer = (AnswerIfc) publishedAnswerHash.get(answerid); if (answer != null) { temptext = answer.getText(); if (temptext == null) { temptext = "No Answer"; } sequence = answer.getItemText().getSequence(); } else if (answerid == -1) { temptext = "None of the Above"; ItemTextIfc itemTextIfc = (ItemTextIfc) publishedItemTextHash .get(grade.getPublishedItemTextId()); sequence = itemTextIfc.getSequence(); } } else { ItemTextIfc itemTextIfc = (ItemTextIfc) publishedItemTextHash .get(grade.getPublishedItemTextId()); sequence = itemTextIfc.getSequence(); } thistext = sequence + ": " + temptext; if (count == 0) maintext = thistext; else maintext = maintext + "|" + thistext; count++; } else if (typeId.equals(TypeIfc.IMAGEMAP_QUESTION)) { log.debug("MATCHING"); ItemTextIfc itemTextIfc = (ItemTextIfc) publishedItemTextHash .get(grade.getPublishedItemTextId()); Long sequence = itemTextIfc.getSequence(); String temptext = (grade.getIsCorrect()) ? "OK" : "No OK"; String thistext = sequence + ": " + temptext; if (count == 0) maintext = thistext; else maintext = maintext + "|" + thistext; count++; } else if (typeId.equals(TypeIfc.IMAGEMAP_QUESTION)) { log.debug("MATCHING"); ItemTextIfc itemTextIfc = (ItemTextIfc) publishedItemTextHash .get(grade.getPublishedItemTextId()); Long sequence = itemTextIfc.getSequence(); String temptext = (grade.getIsCorrect()) ? "OK" : "No OK"; String thistext = sequence + ": " + temptext; if (count == 0) maintext = thistext; else maintext = maintext + "|" + thistext; count++; } else if (typeId.equals(TypeIfc.IMAGEMAP_QUESTION)) { log.debug("MATCHING"); ItemTextIfc itemTextIfc = (ItemTextIfc) publishedItemTextHash .get(grade.getPublishedItemTextId()); Long sequence = itemTextIfc.getSequence(); String temptext = (grade.getIsCorrect()) ? "OK" : "No OK"; String thistext = sequence + ": " + temptext; if (count == 0) maintext = thistext; else maintext = maintext + "|" + thistext; count++; } else if (typeId.equals(TypeIfc.EXTENDED_MATCHING_ITEMS)) { log.debug("EXTENDED_MATCHING_ITEMS"); String thistext = ""; // for some question types we have another text field Long answerid = grade.getPublishedAnswerId(); String temptext = "No Answer"; Long sequence = null; if (answerid != null) { AnswerIfc answer = (AnswerIfc) publishedAnswerHash.get(answerid); if (answer != null) { temptext = answer.getLabel(); if (temptext == null) { temptext = "No Answer"; } sequence = answer.getItemText().getSequence(); } } if (sequence == null) { ItemTextIfc itemTextIfc = (ItemTextIfc) publishedItemTextHash .get(grade.getPublishedItemTextId()); if (itemTextIfc != null) { sequence = itemTextIfc.getSequence(); } } if (sequence != null) { thistext = emiAnswerText.get(sequence); if (thistext == null) { thistext = temptext; } else { thistext = thistext + temptext; } emiAnswerText.put(sequence, thistext); } else { // Orphaned answer: the answer item to which it refers was removed after the assessment was taken, // as a result of editing the published assessment. This behaviour should be fixed, i.e. it should // not be possible to get orphaned answer item references in the database. sequence = new Long(99); emiAnswerText.put(sequence, "Item Removed"); } } else if (typeId.equals(TypeIfc.MATRIX_CHOICES_SURVEY)) { log.debug("MATRIX_CHOICES_SURVEY"); // for this kind of question a responsesMap is generated matrixChoices = true; Long answerid = grade.getPublishedAnswerId(); String temptext = "No Answer"; Long sequence = null; if (answerid != null) { AnswerIfc answer = (AnswerIfc) publishedAnswerHash.get(answerid); temptext = answer.getText(); if (temptext == null) { temptext = "No Answer"; } sequence = answer.getItemText().getSequence(); } else { ItemTextIfc itemTextIfc = (ItemTextIfc) publishedItemTextHash .get(grade.getPublishedItemTextId()); sequence = itemTextIfc.getSequence(); log.debug( "Answerid null for " + grade.getPublishedItemId() + ". Adding " + sequence); temptext = "No Answer"; } responsesMap.put(sequence, temptext); } else if (typeId.equals(TypeIfc.AUDIO_RECORDING)) { log.debug("AUDIO_RECORDING"); maintext = audioMessage; //isAudioFileUpload = true; } else if (typeId.equals(TypeIfc.FILE_UPLOAD)) { log.debug("FILE_UPLOAD"); maintext = fileUploadMessage; //isAudioFileUpload = true; } else if (typeId.equals(TypeIfc.ESSAY_QUESTION)) { log.debug("ESSAY_QUESTION"); if (grade.getAnswerText() != null) { maintext = grade.getAnswerText(); } } else { log.debug("other type"); String thistext = ""; // for some question types we have another text field Long answerid = grade.getPublishedAnswerId(); if (answerid != null) { AnswerIfc answer = (AnswerIfc) publishedAnswerHash.get(answerid); if (answer != null) { String temptext = answer.getText(); if (temptext != null) thistext = temptext; } else { log.warn("Published answer for " + answerid + " is null"); } } if (count == 0) maintext = thistext; else maintext = maintext + "|" + thistext; count++; } // taking care of rationale if (!addRationale && (typeId.equals(TypeIfc.MULTIPLE_CHOICE) || typeId.equals(TypeIfc.MULTIPLE_CORRECT) || typeId.equals(TypeIfc.MULTIPLE_CORRECT_SINGLE_SELECTION) || typeId.equals(TypeIfc.TRUE_FALSE))) { log.debug( "MULTIPLE_CHOICE or MULTIPLE_CORRECT or MULTIPLE_CORRECT_SINGLE_SELECTION or TRUE_FALSE"); if (publishedItemData.getHasRationale() != null && publishedItemData.getHasRationale()) { addRationale = true; rationale = grade.getRationale(); if (rationale == null) { rationale = ""; } } } //Survey - Matrix of Choices - Add Comment Field if (typeId.equals(TypeIfc.MATRIX_CHOICES_SURVEY)) { PublishedItemData pid = (PublishedItemData) publishedItemData; if (pid.getAddCommentFlag()) { addResponseComment = true; if (responseComment.equals("") && grade.getAnswerText() != null) { responseComment = grade.getAnswerText(); } } } } // inner for - answers if (!emiAnswerText.isEmpty()) { if (maintext == null) { maintext = ""; } for (Entry<Long, String> entry : emiAnswerText.entrySet()) { maintext = maintext + "|" + entry.getKey().toString() + ":" + entry.getValue(); } if (maintext.startsWith("|")) { maintext = maintext.substring(1); } } Integer sectionSequenceNumber = null; if (grade == null || EmptyItemGrading.class.isInstance(grade)) { sectionSequenceNumber = EmptyItemGrading.class.cast(grade).getSectionSequence(); questionNumber = EmptyItemGrading.class.cast(grade).getItemSequence(); // indicate that the student was not presented with this question maintext = "-"; } else { sectionSequenceNumber = updateSectionScore(sectionItems, sectionScores, grade.getPublishedItemId(), itemScore); } if (isFinFib && maintext.indexOf("No Answer") >= 0 && count == 1) { maintext = "No Answer"; } else if ("".equals(maintext)) { maintext = "No Answer"; } String itemGradingComments = ""; // if question type is not matrix choices apply the original code if (!matrixChoices) { responseList.add(maintext); if (grade.getComments() != null) { itemGradingComments = grade.getComments().replaceAll("<br\\s*/>", ""); } responseList.add(itemGradingComments); } else { // if there are questions not answered, a no answer response is added to the map ItemDataIfc correspondingPublishedItemData = (ItemDataIfc) publishedItemHash .get(grade.getPublishedItemId()); List correspondingItemTextArray = correspondingPublishedItemData.getItemTextArray(); log.debug("publishedItem is " + correspondingPublishedItemData.getText() + " and number of rows " + correspondingItemTextArray.size()); if (responsesMap.size() < correspondingItemTextArray.size()) { Iterator itItemTextHash = correspondingItemTextArray.iterator(); while (itItemTextHash.hasNext()) { ItemTextIfc itemTextIfc = (ItemTextIfc) itItemTextHash.next(); if (!responsesMap.containsKey(itemTextIfc.getSequence())) { log.debug("responsesMap does not contain answer to " + itemTextIfc.getText()); responsesMap.put(itemTextIfc.getSequence(), "No Answer"); } } } Iterator it = responsesMap.entrySet().iterator(); while (it.hasNext()) { Map.Entry e = (Map.Entry) it.next(); log.debug("Adding to response list " + e.getKey() + " and " + e.getValue()); responseList.add(e.getValue()); if (grade.getComments() != null) { itemGradingComments = grade.getComments().replaceAll("<br\\s*/>", ""); } responseList.add(itemGradingComments); itemGradingComments = ""; } } if (addRationale) { responseList.add(rationale); } if (addResponseComment) { responseList.add(responseComment); } // Only set header based on the first item grading data if (fistItemGradingData) { //get the pool name String poolName = null; for (Iterator i = publishedAssessmentSections.iterator(); i.hasNext();) { PublishedSectionData psd = (PublishedSectionData) i.next(); if (psd.getSequence().intValue() == sectionSequenceNumber) { poolName = psd.getSectionMetaDataByLabel(SectionDataIfc.POOLNAME_FOR_RANDOM_DRAW); } } if (!matrixChoices) { headerList.add(makeHeader(partString, sectionSequenceNumber, questionString, textString, questionNumber, poolString, poolName)); if (addRationale) { headerList.add(makeHeader(partString, sectionSequenceNumber, questionString, rationaleString, questionNumber, poolString, poolName)); } if (addResponseComment) { headerList.add(makeHeader(partString, sectionSequenceNumber, questionString, responseCommentString, questionNumber, poolString, poolName)); } headerList.add(makeHeader(partString, sectionSequenceNumber, questionString, itemGradingCommentsString, questionNumber, poolString, poolName)); } else { int numberRows = responsesMap.size(); for (int i = 0; i < numberRows; i = i + 1) { headerList.add(makeHeaderMatrix(partString, sectionSequenceNumber, questionString, textString, questionNumber, i + 1, poolString, poolName)); if (addRationale) { headerList .add(makeHeaderMatrix(partString, sectionSequenceNumber, questionString, rationaleString, questionNumber, i + 1, poolString, poolName)); } if (addResponseComment) { headerList.add(makeHeaderMatrix(partString, sectionSequenceNumber, questionString, responseCommentString, questionNumber, i + 1, poolString, poolName)); } headerList.add(makeHeaderMatrix(partString, sectionSequenceNumber, questionString, itemGradingCommentsString, questionNumber, i + 1, poolString, poolName)); } } } } // outer for - questions if (showPartAndTotalScoreSpreadsheetColumns) { if (sectionScores.size() > 1) { Iterator keys = sectionScores.keySet().iterator(); while (keys.hasNext()) { Double partScore = (Double) ((Double) sectionScores.get(keys.next())).doubleValue(); responseList.add(sectionScoreColumnStart++, partScore); } } } dataList.add(responseList); if (fistItemGradingData) { fistItemGradingData = false; } } } // while if (!anonymous && useridSet.size() != 0) { Iterator iter = useridSet.iterator(); while (iter.hasNext()) { String id = (String) iter.next(); try { agentEid = userDirectoryService.getUser(id).getEid(); firstName = userDirectoryService.getUser(id).getFirstName(); lastName = userDirectoryService.getUser(id).getLastName(); } catch (Exception e) { log.error("Cannot get user"); } responseList = new ArrayList(); responseList.add(lastName); responseList.add(firstName); responseList.add(agentEid); responseList.add(noSubmissionMessage); dataList.add(responseList); } } Collections.sort(dataList, new ResponsesComparator(anonymous)); finalList.add(dataList); finalList.add(headerList); return finalList; }
From source file:net.massbank.validator.RecordValidator.java
/** * ??//from w w w .jav a 2 s .c o m * * @param db * DB * @param op * PrintWriter? * @param dataPath * ? * @param registPath * * @param ver * ? * @return ??Map<??, ?> * @throws IOException * */ private static TreeMap<String, String> validationRecordOnline(DatabaseAccess db, PrintStream op, String dataPath, String registPath, int ver) throws IOException { op.println(msgInfo("validation archive is [" + UPLOAD_RECDATA_ZIP + "] or [" + UPLOAD_RECDATA_MSBK + "].")); if (ver == 1) { op.println(msgInfo("check record format version is [version 1].")); } final String[] dataList = (new File(dataPath)).list(); TreeMap<String, String> validationMap = new TreeMap<String, String>(); if (dataList.length == 0) { op.println(msgWarn("no file for validation.")); return validationMap; } // ---------------------------------------------------- // ??? // ---------------------------------------------------- String[] requiredList = new String[] { // Ver.2 "ACCESSION: ", "RECORD_TITLE: ", "DATE: ", "AUTHORS: ", "LICENSE: ", "CH$NAME: ", "CH$COMPOUND_CLASS: ", "CH$FORMULA: ", "CH$EXACT_MASS: ", "CH$SMILES: ", "CH$IUPAC: ", "AC$INSTRUMENT: ", "AC$INSTRUMENT_TYPE: ", "AC$MASS_SPECTROMETRY: MS_TYPE ", "AC$MASS_SPECTROMETRY: ION_MODE ", "PK$NUM_PEAK: ", "PK$PEAK: " }; if (ver == 1) { // Ver.1 requiredList = new String[] { "ACCESSION: ", "RECORD_TITLE: ", "DATE: ", "AUTHORS: ", "COPYRIGHT: ", "CH$NAME: ", "CH$COMPOUND_CLASS: ", "CH$FORMULA: ", "CH$EXACT_MASS: ", "CH$SMILES: ", "CH$IUPAC: ", "AC$INSTRUMENT: ", "AC$INSTRUMENT_TYPE: ", "AC$ANALYTICAL_CONDITION: MODE ", "PK$NUM_PEAK: ", "PK$PEAK: " }; } for (int i = 0; i < dataList.length; i++) { String name = dataList[i]; String status = ""; StringBuilder detailsErr = new StringBuilder(); StringBuilder detailsWarn = new StringBuilder(); // ???? File file = new File(dataPath + name); if (file.isDirectory()) { // ?? status = STATUS_ERR; detailsErr.append("[" + name + "] is directory."); validationMap.put(name, status + "\t" + detailsErr.toString()); continue; } else if (file.isHidden()) { // ??? status = STATUS_ERR; detailsErr.append("[" + name + "] is hidden."); validationMap.put(name, status + "\t" + detailsErr.toString()); continue; } else if (name.lastIndexOf(REC_EXTENSION) == -1) { // ???? status = STATUS_ERR; detailsErr.append("file extension of [" + name + "] is not [" + REC_EXTENSION + "]."); validationMap.put(name, status + "\t" + detailsErr.toString()); continue; } // ?? boolean isEndTagRead = false; boolean isInvalidInfo = false; boolean isDoubleByte = false; ArrayList<String> fileContents = new ArrayList<String>(); boolean existLicense = false; // LICENSE?Ver.1 ArrayList<String> workChName = new ArrayList<String>(); // RECORD_TITLE??CH$NAME??Ver.1? String workAcInstrumentType = ""; // RECORD_TITLE??AC$INSTRUMENT_TYPE??Ver.1? String workAcMsType = ""; // RECORD_TITLE??AC$MASS_SPECTROMETRY: // MS_TYPE??Ver.2 String line = ""; BufferedReader br = null; try { br = new BufferedReader(new FileReader(file)); while ((line = br.readLine()) != null) { if (isEndTagRead) { if (!line.equals("")) { isInvalidInfo = true; } } // if (line.startsWith("//")) { isEndTagRead = true; } fileContents.add(line); // LICENSE?Ver.1 if (line.startsWith("LICENSE: ")) { existLicense = true; } // CH$NAME?Ver.1? else if (line.startsWith("CH$NAME: ")) { workChName.add(line.trim().replaceAll("CH\\$NAME: ", "")); } // AC$INSTRUMENT_TYPE?Ver.1? else if (line.startsWith("AC$INSTRUMENT_TYPE: ")) { workAcInstrumentType = line.trim().replaceAll("AC\\$INSTRUMENT_TYPE: ", ""); } // AC$MASS_SPECTROMETRY: MS_TYPE?Ver.2 else if (ver != 1 && line.startsWith("AC$MASS_SPECTROMETRY: MS_TYPE ")) { workAcMsType = line.trim().replaceAll("AC\\$MASS_SPECTROMETRY: MS_TYPE ", ""); } // ? if (!isDoubleByte) { byte[] bytes = line.getBytes("MS932"); if (bytes.length != line.length()) { isDoubleByte = true; } } } } catch (IOException e) { Logger.getLogger("global").severe("file read failed." + NEW_LINE + " " + file.getPath()); e.printStackTrace(); op.println(msgErr("server error.")); validationMap.clear(); return validationMap; } finally { try { if (br != null) { br.close(); } } catch (IOException e) { } } if (isInvalidInfo) { // ????? if (status.equals("")) status = STATUS_WARN; detailsWarn.append("invalid after the end tag [//]."); } if (isDoubleByte) { // ????? if (status.equals("")) status = STATUS_ERR; detailsErr.append("double-byte character included."); } if (ver == 1 && existLicense) { // LICENSE???Ver.1 if (status.equals("")) status = STATUS_ERR; detailsErr.append("[LICENSE: ] tag can not be used in record format [version 1]."); } // ---------------------------------------------------- // ???? // ---------------------------------------------------- boolean isNameCheck = false; int peakNum = -1; for (int j = 0; j < requiredList.length; j++) { String requiredStr = requiredList[j]; ArrayList<String> valStrs = new ArrayList<String>(); // boolean findRequired = false; // boolean findValue = false; // boolean isPeakMode = false; // for (int k = 0; k < fileContents.size(); k++) { String lineStr = fileContents.get(k); // ????RELATED_RECORD?????? if (lineStr.startsWith("//")) { // Ver.1? break; } else if (ver == 1 && lineStr.startsWith("RELATED_RECORD:")) { // Ver.1 break; } // ????? else if (isPeakMode) { findRequired = true; if (!lineStr.trim().equals("")) { valStrs.add(lineStr); } } // ?????? else if (lineStr.indexOf(requiredStr) != -1) { // findRequired = true; if (requiredStr.equals("PK$PEAK: ")) { isPeakMode = true; findValue = true; valStrs.add(lineStr.replace(requiredStr, "")); } else { // String tmpVal = lineStr.replace(requiredStr, ""); if (!tmpVal.trim().equals("")) { findValue = true; valStrs.add(tmpVal); } break; } } } if (!findRequired) { // ?????? status = STATUS_ERR; detailsErr.append("no required item [" + requiredStr + "]."); } else { if (!findValue) { // ????? status = STATUS_ERR; detailsErr.append("no value of required item [" + requiredStr + "]."); } else { // ??? // ---------------------------------------------------- // ?? // ---------------------------------------------------- String val = (valStrs.size() > 0) ? valStrs.get(0) : ""; // ACESSIONVer.1? if (requiredStr.equals("ACCESSION: ")) { if (!val.equals(name.replace(REC_EXTENSION, ""))) { status = STATUS_ERR; detailsErr.append("value of required item [" + requiredStr + "] not correspond to file name."); } if (val.length() != 8) { status = STATUS_ERR; detailsErr.append( "value of required item [" + requiredStr + "] is 8 digits necessary."); } } // RECORD_TITLEVer.1? else if (requiredStr.equals("RECORD_TITLE: ")) { if (!val.equals(DEFAULT_VALUE)) { if (val.indexOf(";") != -1) { String[] recTitle = val.split(";"); if (!workChName.contains(recTitle[0].trim())) { if (status.equals("")) status = STATUS_WARN; detailsWarn.append("value of required item [" + requiredStr + "], compound name is not included in the [CH$NAME]."); } if (!workAcInstrumentType.equals(recTitle[1].trim())) { if (status.equals("")) status = STATUS_WARN; detailsWarn.append("value of required item [" + requiredStr + "], instrument type is different from [AC$INSTRUMENT_TYPE]."); } if (ver != 1 && !workAcMsType.equals(recTitle[2].trim())) { // Ver.2 if (status.equals("")) status = STATUS_WARN; detailsWarn.append("value of required item [" + requiredStr + "], ms type is different from [AC$MASS_SPECTROMETRY: MS_TYPE]."); } } else { if (status.equals("")) status = STATUS_WARN; detailsWarn.append("value of required item [" + requiredStr + "] is not record title format."); if (!workChName.contains(val)) { detailsWarn.append("value of required item [" + requiredStr + "], compound name is not included in the [CH$NAME]."); } if (!workAcInstrumentType.equals(DEFAULT_VALUE)) { detailsWarn.append("value of required item [" + requiredStr + "], instrument type is different from [AC$INSTRUMENT_TYPE]."); } if (ver != 1 && !workAcMsType.equals(DEFAULT_VALUE)) { // Ver.2 detailsWarn.append("value of required item [" + requiredStr + "], ms type is different from [AC$MASS_SPECTROMETRY: MS_TYPE]."); } } } else { if (!workAcInstrumentType.equals(DEFAULT_VALUE)) { if (status.equals("")) status = STATUS_WARN; detailsWarn.append("value of required item [" + requiredStr + "], instrument type is different from [AC$INSTRUMENT_TYPE]."); } if (ver != 1 && !workAcMsType.equals(DEFAULT_VALUE)) { // Ver.2 if (status.equals("")) status = STATUS_WARN; detailsWarn.append("value of required item [" + requiredStr + "], ms type is different from [AC$MASS_SPECTROMETRY: MS_TYPE]."); } } } // DATEVer.1? else if (requiredStr.equals("DATE: ") && !val.equals(DEFAULT_VALUE)) { val = val.replace(".", "/"); val = val.replace("-", "/"); try { DateFormat.getDateInstance(DateFormat.SHORT, Locale.JAPAN).parse(val); } catch (ParseException e) { if (status.equals("")) status = STATUS_WARN; detailsWarn .append("value of required item [" + requiredStr + "] is not date format."); } } // CH$COMPOUND_CLASSVer.1? else if (requiredStr.equals("CH$COMPOUND_CLASS: ") && !val.equals(DEFAULT_VALUE)) { if (!val.startsWith("Natural Product") && !val.startsWith("Non-Natural Product")) { if (status.equals("")) status = STATUS_WARN; detailsWarn.append("value of required item [" + requiredStr + "] is not compound class format."); } } // CH$EXACT_MASSVer.1? else if (requiredStr.equals("CH$EXACT_MASS: ") && !val.equals(DEFAULT_VALUE)) { try { Double.parseDouble(val); } catch (NumberFormatException e) { if (status.equals("")) status = STATUS_WARN; detailsWarn.append("value of required item [" + requiredStr + "] is not numeric."); } } // AC$INSTRUMENT_TYPEVer.1? else if (requiredStr.equals("AC$INSTRUMENT_TYPE: ") && !val.equals(DEFAULT_VALUE)) { if (val.trim().indexOf(" ") != -1) { if (status.equals("")) status = STATUS_WARN; detailsWarn .append("value of required item [" + requiredStr + "] is space included."); } if (val.trim().indexOf(" ") != -1) { if (status.equals("")) status = STATUS_WARN; detailsWarn .append("value of required item [" + requiredStr + "] is space included."); } } // AC$MASS_SPECTROMETRY: MS_TYPEVer.2 else if (ver != 1 && requiredStr.equals("AC$MASS_SPECTROMETRY: MS_TYPE ") && !val.equals(DEFAULT_VALUE)) { boolean isMsType = true; if (val.startsWith("MS")) { val = val.replace("MS", ""); if (!val.equals("")) { try { Integer.parseInt(val); } catch (NumberFormatException e) { isMsType = false; } } } else { isMsType = false; } if (!isMsType) { if (status.equals("")) status = STATUS_WARN; detailsWarn.append("value of required item [" + requiredStr + "] is not \"MSn\"."); } } // AC$MASS_SPECTROMETRY: // ION_MODEVer.2?AC$ANALYTICAL_CONDITION: MODEVer.1 else if ((ver != 1 && requiredStr.equals("AC$MASS_SPECTROMETRY: ION_MODE ") && !val.equals(DEFAULT_VALUE)) || (ver == 1 && requiredStr.equals("AC$ANALYTICAL_CONDITION: MODE ") && !val.equals(DEFAULT_VALUE))) { if (!val.equals("POSITIVE") && !val.equals("NEGATIVE")) { if (status.equals("")) status = STATUS_WARN; detailsWarn.append("value of required item [" + requiredStr + "] is not \"POSITIVE\" or \"NEGATIVE\"."); } } // PK$NUM_PEAKVer.1? else if (requiredStr.equals("PK$NUM_PEAK: ") && !val.equals(DEFAULT_VALUE)) { try { peakNum = Integer.parseInt(val); } catch (NumberFormatException e) { status = STATUS_ERR; detailsErr.append("value of required item [" + requiredStr + "] is not numeric."); } } // PK$PEAK:Ver.1? else if (requiredStr.equals("PK$PEAK: ")) { if (valStrs.size() == 0 || !valStrs.get(0).startsWith("m/z int. rel.int.")) { status = STATUS_ERR; detailsErr.append( "value of required item [PK$PEAK: ] , the first line is not \"PK$PEAK: m/z int. rel.int.\"."); } else { boolean isNa = false; String peak = ""; String mz = ""; String intensity = ""; boolean mzDuplication = false; boolean mzNotNumeric = false; boolean intensityNotNumeric = false; boolean invalidFormat = false; HashSet<String> mzSet = new HashSet<String>(); for (int l = 0; l < valStrs.size(); l++) { peak = valStrs.get(l).trim(); // N/A if (peak.indexOf(DEFAULT_VALUE) != -1) { isNa = true; break; } if (l == 0) { continue; } // m/z int. rel.int.?????????? if (peak.indexOf(" ") != -1) { mz = peak.split(" ")[0]; if (!mzSet.add(mz)) { mzDuplication = true; } try { Double.parseDouble(mz); } catch (NumberFormatException e) { mzNotNumeric = true; } intensity = peak.split(" ")[1]; try { Double.parseDouble(intensity); } catch (NumberFormatException e) { intensityNotNumeric = true; } } else { invalidFormat = true; } if (mzDuplication && mzNotNumeric && intensityNotNumeric && invalidFormat) { break; } } if (isNa) {// PK$PEAK:?N/A?? if (peakNum != -1) { // PK$NUM_PEAK:N/A?? if (status.equals("")) status = STATUS_WARN; detailsWarn .append("value of required item [PK$NUM_PEAK: ] is mismatch or \"" + DEFAULT_VALUE + "\"."); } if (valStrs.size() - 1 > 0) { // PK$PEAK:???????? if (status.equals("")) status = STATUS_WARN; detailsWarn.append( "value of required item [PK$NUM_PEAK: ] is invalid peak information exists."); } } else { if (mzDuplication) { status = STATUS_ERR; detailsErr.append( "mz value of required item [" + requiredStr + "] is duplication."); } if (mzNotNumeric) { status = STATUS_ERR; detailsErr.append( "mz value of required item [" + requiredStr + "] is not numeric."); } if (intensityNotNumeric) { status = STATUS_ERR; detailsErr.append("intensity value of required item [" + requiredStr + "] is not numeric."); } if (invalidFormat) { status = STATUS_ERR; detailsErr.append( "value of required item [" + requiredStr + "] is not peak format."); } if (peakNum != 0 && valStrs.size() - 1 == 0) { // ?????N/A????PK$NUM_PEAK:?0??????? if (status.equals("")) status = STATUS_WARN; detailsWarn.append( "value of required item [PK$PEAK: ] is no value. at that time, please add \"" + DEFAULT_VALUE + "\". "); } if (peakNum != valStrs.size() - 1) { if (status.equals("")) status = STATUS_WARN; detailsWarn .append("value of required item [PK$NUM_PEAK: ] is mismatch or \"" + DEFAULT_VALUE + "\"."); } } } } } } } String details = detailsErr.toString() + detailsWarn.toString(); if (status.equals("")) { status = STATUS_OK; details = " "; } validationMap.put(name, status + "\t" + details); } // ---------------------------------------------------- // ???? // ---------------------------------------------------- // ?ID?DB HashSet<String> regIdList = new HashSet<String>(); String[] sqls = { "SELECT ID FROM SPECTRUM ORDER BY ID", "SELECT ID FROM RECORD ORDER BY ID", "SELECT ID FROM PEAK GROUP BY ID ORDER BY ID", "SELECT ID FROM CH_NAME ID ORDER BY ID", "SELECT ID FROM CH_LINK ID ORDER BY ID", "SELECT ID FROM TREE WHERE ID IS NOT NULL AND ID<>'' ORDER BY ID" }; for (int i = 0; i < sqls.length; i++) { String execSql = sqls[i]; ResultSet rs = null; try { rs = db.executeQuery(execSql); while (rs.next()) { String idStr = rs.getString("ID"); regIdList.add(idStr); } } catch (SQLException e) { Logger.getLogger("global").severe(" sql : " + execSql); e.printStackTrace(); op.println(msgErr("database access error.")); return new TreeMap<String, String>(); } finally { try { if (rs != null) { rs.close(); } } catch (SQLException e) { } } } // ?ID? final String[] recFileList = (new File(registPath)).list(); for (int i = 0; i < recFileList.length; i++) { String name = recFileList[i]; File file = new File(registPath + File.separator + name); if (!file.isFile() || file.isHidden() || name.lastIndexOf(REC_EXTENSION) == -1) { continue; } String idStr = name.replace(REC_EXTENSION, ""); regIdList.add(idStr); } // ?? for (Map.Entry<String, String> e : validationMap.entrySet()) { String statusStr = e.getValue().split("\t")[0]; if (statusStr.equals(STATUS_ERR)) { continue; } String nameStr = e.getKey(); String idStr = e.getKey().replace(REC_EXTENSION, ""); String detailsStr = e.getValue().split("\t")[1]; if (regIdList.contains(idStr)) { statusStr = STATUS_WARN; detailsStr += "id [" + idStr + "] of file name [" + nameStr + "] already registered."; validationMap.put(nameStr, statusStr + "\t" + detailsStr); } } return validationMap; }