List of usage examples for java.util SortedSet size
int size();
From source file:Arguments.java
/** * Returns a verbose String that documents this Arguments instance. It summarizes your options * into required, non-required, and positional fields. For long args it also tells you which * fields should have a value if it is present. */// w w w . j a va 2 s . co m public String getDocumentation() { StringBuilder buf = new StringBuilder("\n"); int maxFlagSize = 0; SortedSet<String> reqList = new TreeSet<String>(); SortedSet<String> nonReqList = new TreeSet<String>(); if (programName.length() > 0) { buf.append(programName + ": "); } if (shortProgramDoc.length() > 0) { buf.append(shortProgramDoc + "\n"); } else { buf.append("Complete documentation...\n"); } // add flags to required/non-required sets for (String docMe : docs.keySet()) { maxFlagSize = Math.max(maxFlagSize, docMe.length()); if (requireFlag.contains(docMe)) { reqList.add(docMe); } else { nonReqList.add(docMe); } } // add positional fields to required/non-required sets for (int i = 0; i < positionalDocs.size(); i++) { List<String> posDoc = positionalDocs.get(i); String pseudoFlag = getPseudoFlag(i, posDoc.get(0)); maxFlagSize = Math.max(maxFlagSize, pseudoFlag.length()); } if (reqList.size() > 0) { buf.append("codeTitle>Required flags:\n\n"); buf.append(getDocumentation(reqList, maxFlagSize)); } if (nonReqList.size() > 0) { buf.append("\n Non-required flags:\n\n"); buf.append(getDocumentation(nonReqList, maxFlagSize)); } if (positionalDocs.size() > 0) { buf.append("\n Positional fields:\n\n"); for (int i = 0; i < positionalDocs.size(); i++) { List<String> posDoc = positionalDocs.get(i); String pseudoFlag = getPseudoFlag(i, posDoc.get(0)); buf.append(formatDocumentation(pseudoFlag, space, maxFlagSize, posDoc.get(1), 70)); } } return buf.toString(); }
From source file:org.apache.hive.ptest.execution.JIRAService.java
@VisibleForTesting String generateComments(boolean error, int numTestsExecuted, SortedSet<String> failedTests, List<String> messages, Set<String> addedTests) { BuildInfo buildInfo = formatBuildTag(mBuildTag); String buildTagForLogs = formatBuildTagForLogs(mBuildTag); List<String> comments = Lists.newArrayList(); comments.add(""); comments.add(""); if (!mPatch.isEmpty()) { comments.add("Here are the results of testing the latest attachment:"); comments.add(mPatch);//from w w w . ja v a 2s .co m } comments.add(""); if (error && numTestsExecuted == 0) { comments.add(formatError("-1 due to build exiting with an error")); } else { if (addedTests.size() > 0) { comments.add(formatSuccess("+1 due to " + addedTests.size() + " test(s) being added or modified.")); } else { comments.add(formatError("-1 due to no test(s) being added or modified.")); } comments.add(""); if (numTestsExecuted == 0) { comments.add(formatError("-1 due to no tests executed")); } else { if (failedTests.isEmpty()) { comments.add(formatSuccess("+1 due to " + numTestsExecuted + " tests passed")); } else { comments.add(formatError("-1 due to " + failedTests.size() + " failed/errored test(s), " + numTestsExecuted + " tests executed")); comments.add("*Failed tests:*"); comments.add("{noformat}"); comments.addAll(failedTests); comments.add("{noformat}"); } } } comments.add(""); comments.add("Test results: " + mJenkinsURL + "/" + buildInfo.getFormattedBuildTag() + "/testReport"); comments.add("Console output: " + mJenkinsURL + "/" + buildInfo.getFormattedBuildTag() + "/console"); comments.add("Test logs: " + mLogsURL + buildTagForLogs); comments.add(""); if (!messages.isEmpty()) { comments.add("Messages:"); comments.add("{noformat}"); comments.addAll(trimMessages(messages)); comments.add("{noformat}"); comments.add(""); } comments.add("This message is automatically generated."); String attachmentId = parseAttachementId(mPatch); comments.add(""); comments.add("ATTACHMENT ID: " + attachmentId + " - " + buildInfo.getBuildName()); mLogger.info("Comment: " + Joiner.on("\n").join(comments)); return Joiner.on("\n").join(comments); }
From source file:org.zanata.client.commands.push.RawPushCommand.java
@Override public void run() throws IOException { PushCommand.logOptions(log, getOpts()); consoleInteractor.printfln(DisplayMode.Warning, "Using EXPERIMENTAL project type 'file'."); List<FileTypeInfo> serverAcceptedTypes = fileTypeInfoList(client); if (getOpts().getListFileTypes()) { printFileTypes(serverAcceptedTypes); return;//ww w.ja v a 2 s . c o m } if (!pushSource() && !pushTrans()) { throw new RuntimeException("Invalid option for push type"); } // only supporting single module for now File sourceDir = getOpts().getSrcDir(); if (!sourceDir.exists()) { boolean enableModules = getOpts().getEnableModules(); // TODO(files) remove warning when modules supported if (enableModules) { consoleInteractor.printfln(DisplayMode.Warning, "enableModules=true but multi-modules not yet supported for this command. Using single module push."); } throw new RuntimeException("directory '" + sourceDir + "' does not exist - check " + getOpts().getSrcDirParameterName() + " option"); } RawPushStrategy strat = new RawPushStrategy(); strat.setPushOptions(getOpts()); ImmutableList<FileTypeInfo> actualFileTypes = getActualFileTypes(serverAcceptedTypes, getOpts().getFileTypes()); if (actualFileTypes.isEmpty()) { log.info("no valid types specified; nothing to do"); return; } ImmutableList.Builder<String> sourceFileExtensionsBuilder = ImmutableList.builder(); actualFileTypes .forEach(fileTypeInfo -> sourceFileExtensionsBuilder.addAll(fileTypeInfo.getSourceExtensions())); ImmutableList<String> sourceFileExtensions = sourceFileExtensionsBuilder.build(); String[] srcFiles = strat.getSrcFiles(sourceDir, getOpts().getIncludes(), getOpts().getExcludes(), sourceFileExtensions, true, getOpts().getCaseSensitive()); SortedSet<String> localDocNames = new TreeSet<String>(Arrays.asList(srcFiles)); // TODO(files) handle obsolete document deletion consoleInteractor.printfln(DisplayMode.Warning, "Obsolete document removal is not yet implemented, no documents will be removed from the server."); SortedSet<String> docsToPush = localDocNames; if (getOpts().getFromDoc() != null) { if (!localDocNames.contains(getOpts().getFromDoc())) { log.error("Document with id {} not found, unable to start push from unknown document. Aborting.", getOpts().getFromDoc()); // FIXME should this be throwing an exception to properly abort? // need to see behaviour with modules return; } docsToPush = localDocNames.tailSet(getOpts().getFromDoc()); int numSkippedDocs = localDocNames.size() - docsToPush.size(); log.info("Skipping {} document(s) before {}.", numSkippedDocs, getOpts().getFromDoc()); } if (docsToPush.isEmpty()) { log.info("no documents in module: {}; nothing to do", getOpts().getCurrentModule()); return; } else { consoleInteractor.printfln("Found source documents:"); for (String docName : localDocNames) { if (docsToPush.contains(docName)) { FileTypeName fileType = getFileTypeNameBySourceExtension(actualFileTypes, FilenameUtils.getExtension(docName)); consoleInteractor.printfln(" " + Messages.format("push.info.documentToPush", docName, fileType.getName())); } else { consoleInteractor.printfln(Messages.format("push.info.skipDocument", docName)); } } } if (pushTrans()) { if (getOpts().getLocaleMapList() == null) throw new ConfigException( "pushType set to '" + getOpts().getPushType() + "', but project has no locales configured"); consoleInteractor.printfln(DisplayMode.Warning, Messages.format("push.warn.overrideTranslations", getOpts().getPushType())); if (getOpts().getPushType() == PushPullType.Both) { confirmWithUser("This will overwrite existing documents AND TRANSLATIONS on the server.\n"); // , and delete obsolete documents.\n"); } else if (getOpts().getPushType() == PushPullType.Trans) { confirmWithUser("This will overwrite existing TRANSLATIONS on the server.\n"); } } else { // confirmWithUser("This will overwrite existing documents on the server, and delete obsolete documents.\n"); confirmWithUser("This will overwrite existing documents on the server.\n"); } boolean hasErrors = false; for (final String localDocName : docsToPush) { try { final String srcExtension = FilenameUtils.getExtension(localDocName); final FileTypeInfo fileType = getFileType(actualFileTypes, srcExtension); final String qualifiedDocName = qualifiedDocName(localDocName); if (pushSource()) { if (!getOpts().isDryRun()) { boolean sourcePushed = pushSourceDocumentToServer(sourceDir, localDocName, qualifiedDocName, fileType.getType().getName()); // ClientUtility.checkResult(putResponse, uri); if (!sourcePushed) { hasErrors = true; } } else { log.info("pushing source doc [qualifiedname={}] to server (skipped due to dry run)", qualifiedDocName); } } if (pushTrans()) { Optional<String> translationFileExtension = getTranslationFileExtension(fileType, srcExtension); strat.visitTranslationFiles(localDocName, new TranslationFilesVisitor() { @Override public void visit(LocaleMapping locale, File translatedDoc) { log.info("pushing {} translation of {}", locale.getLocale(), qualifiedDocName); pushDocumentToServer(qualifiedDocName, fileType.getType().getName(), locale.getLocale(), translatedDoc); } }, translationFileExtension); } } catch (IOException | RuntimeException e) { log.error( "Operation failed: " + e.getMessage() + "\n\n" + " To retry from the last document, please add the option: {}\n", getOpts().buildFromDocArgument(localDocName)); throw new RuntimeException(e.getMessage(), e); } } if (hasErrors) { throw new RuntimeException("Push completed with errors, see log for details."); } }
From source file:no.abmu.abmstatistikk.annualstatistic.service.hibernate2.AnnualStatisticServiceHelper.java
protected SchemaList getReportDataBySchemaTypeAndYear(SortedSet<OrgUnitReport> orgSortedSet, String schemaTypeName, int year) { Assert.checkRequiredArgument("orgSortedSet", orgSortedSet); Assert.checkRequiredArgument("schemaTypeName", schemaTypeName); Assert.checkRequiredArgument("year", year); int numberOfReports = orgSortedSet.size(); SchemaList schemaList = new SchemaList(numberOfReports); ProgressStatistics ps = new ProgressStatistics(numberOfReports); for (OrgUnitReport orgUnitReport : orgSortedSet) { Map<String, Object> report = orgUnitReport.createReport(); fillInReportValues(report, schemaTypeName, year); schemaList.add(report);//from w ww .java 2s . com ps.increaseCountAndDumpStatus(); } ps.dumpStatus(true); return schemaList; }
From source file:no.abmu.abmstatistikk.annualstatistic.service.hibernate2.AnnualStatisticServiceHelper.java
public SchemaList getCommentReportAndLastYearForSchemaTypeAndYear( SortedSet<SimpleOrgUnitReport> simpleOrgSortedSet, String schemaTypeName, int year) { Assert.checkRequiredArgument("simpleOrgSortedSet", simpleOrgSortedSet); Assert.checkRequiredArgument("schemaTypeName", schemaTypeName); Assert.checkRequiredArgument("year", year); int numberOfReports = simpleOrgSortedSet.size(); SchemaList schemaList = new SchemaList(numberOfReports); ProgressStatistics ps = new ProgressStatistics(numberOfReports); for (SimpleOrgUnitReport simpleOrgUnitReport : simpleOrgSortedSet) { Map<String, Object> report = fillInCommentReportAndLastYear(simpleOrgUnitReport, schemaTypeName, year); schemaList.add(report);//from ww w . ja v a 2 s. c o m ps.increaseCountAndDumpStatus(); } ps.dumpStatus(true); return schemaList; }
From source file:org.torproject.ernie.web.RelayServlet.java
public void doGet(HttpServletRequest request, HttpServletResponse response) throws IOException, ServletException { /* Measure how long it takes to process this request. */ long started = System.currentTimeMillis(); /* Get print writer and start writing response. */ PrintWriter out = response.getWriter(); writeHeader(out);//from w ww. j a va 2 s.c om /* Check fingerprint parameter. */ String fingerprintParameter = request.getParameter("fingerprint"); boolean validParameter = true; if (fingerprintParameter == null || fingerprintParameter.length() < 8 || fingerprintParameter.length() > 40) { validParameter = false; } else { Pattern fingerprintPattern = Pattern.compile("^[0-9a-f]{8,40}$"); if (!fingerprintPattern.matcher(fingerprintParameter.toLowerCase()).matches()) { validParameter = false; } } if (!validParameter) { out.write(" <br/><p>Sorry, \"" + fingerprintParameter + "\" is not a valid relay fingerprint. Please provide at " + "least the first 8 hex characters of a relay " + "fingerprint.</p>\n"); writeFooter(out); return; } /* If we were only given a partial fingerprint, look up all * fingerprints starting with that part to see if it's unique in the * last 30 days. */ String fingerprint = fingerprintParameter.toLowerCase(); if (fingerprint.length() < 40) { SortedSet<String> allFingerprints = new TreeSet<String>(); try { Connection conn = this.ds.getConnection(); Statement statement = conn.createStatement(); String query = "SELECT DISTINCT fingerprint FROM statusentry " + "WHERE validafter >= '" + this.dayFormat.format(started - 30L * 24L * 60L * 60L * 1000L) + " 00:00:00' AND fingerprint LIKE '" + fingerprint + "%'"; ResultSet rs = statement.executeQuery(query); while (rs.next()) { allFingerprints.add(rs.getString(1)); } rs.close(); statement.close(); conn.close(); } catch (SQLException e) { out.println("<p><font color=\"red\"><b>Warning: </b></font>We " + "experienced an unknown database problem while looking up " + "the relay with fingerprint starting with " + fingerprintParameter + ". If this problem persists, please " + "<a href=\"mailto:tor-assistants@freehaven.net\">let us " + "know</a>!</p>\n"); writeFooter(out); return; } if (allFingerprints.size() == 0) { out.write("<p>No relay found with fingerprint starting with " + fingerprintParameter + " in the last 30 days.</p>"); writeFooter(out); return; } else if (allFingerprints.size() > 1) { out.println("<p>The fingerprint part " + fingerprintParameter + " is not unique for relays running in the last 30 days. " + "Please choose one of the following fingerprints:</p><ul>"); for (String f : allFingerprints) { out.println("<li><a href=\"relay.html?fingerprint=" + f + "\">" + f + "</a></li>"); } out.write("</ul><br/>"); writeFooter(out); return; } else { fingerprint = allFingerprints.first(); } } /* Print out in which consensuses this relay was last contained. */ boolean foundRelay = false; String lastDescriptor = null; try { Connection conn = this.ds.getConnection(); Statement statement = conn.createStatement(); String query = "SELECT validafter, rawdesc FROM statusentry WHERE " + "validafter >= '" + this.dayFormat.format(started - 30L * 24L * 60L * 60L * 1000L) + " 00:00:00' AND fingerprint = '" + fingerprint + "' ORDER BY validafter DESC LIMIT 3"; ResultSet rs = statement.executeQuery(query); boolean printedDescription = false; while (rs.next()) { foundRelay = true; if (!printedDescription) { out.println("<p>The relay with fingerprint " + (fingerprintParameter.length() < 40 ? "starting " : "") + "with " + fingerprintParameter + " was last " + "referenced in the following relay lists:</p>"); printedDescription = true; } String validAfter = rs.getTimestamp(1).toString().substring(0, 19); out.println(" <br/><tt>valid-after " + "<a href=\"consensus?valid-after=" + validAfter.replaceAll(":", "-").replaceAll(" ", "-") + "\" target=\"_blank\">" + validAfter + "</a></tt><br/>"); byte[] rawStatusEntry = rs.getBytes(2); try { String statusEntryLines = new String(rawStatusEntry, "US-ASCII"); String[] lines = statusEntryLines.split("\n"); for (String line : lines) { if (line.startsWith("r ")) { String[] parts = line.split(" "); String descriptor = String.format("%040x", new BigInteger(1, Base64.decodeBase64(parts[3] + "=="))); if (lastDescriptor == null) { lastDescriptor = descriptor; } out.println(" <tt>r " + parts[1] + " " + parts[2] + " " + "<a href=\"descriptor.html?desc-id=" + descriptor + "\" target=\"_blank\">" + parts[3] + "</a> " + parts[4] + " " + parts[5] + " " + parts[6] + " " + parts[7] + " " + parts[8] + "</tt><br/>"); } else { out.println(" <tt>" + line + "</tt><br/>"); } } } catch (UnsupportedEncodingException e) { /* This shouldn't happen, because we know that ASCII is * supported. */ } } rs.close(); statement.close(); conn.close(); } catch (SQLException e) { out.println("<p><font color=\"red\"><b>Warning: </b></font>We " + "experienced an unknown database problem while looking up " + "the relay with fingerprint " + (fingerprintParameter.length() < 40 ? "starting with " : "") + fingerprintParameter + ". If this problem persists, please " + "<a href=\"mailto:tor-assistants@freehaven.net\">let us " + "know</a>!</p>\n"); writeFooter(out); return; } /* If we didn't find this relay, stop here. */ if (!foundRelay) { out.write("<p>No relay found with fingerprint " + (fingerprintParameter.length() < 40 ? "starting with " : "") + fingerprintParameter + " in the last 30 days.</p>"); writeFooter(out); return; } /* Look up last server and extra-info descriptor in the database. */ String query = null, descriptor = null, nickname = null, published = null, extrainfo = null; byte[] rawDescriptor = null, rawExtrainfo = null; if (lastDescriptor != null) { try { Connection conn = this.ds.getConnection(); Statement statement = conn.createStatement(); query = "SELECT descriptor, nickname, published, extrainfo, " + "rawdesc FROM descriptor WHERE descriptor = '" + lastDescriptor + "'"; ResultSet rs = statement.executeQuery(query); if (rs.next()) { descriptor = rs.getString(1); nickname = rs.getString(2); published = rs.getTimestamp(3).toString().substring(0, 19); extrainfo = rs.getString(4); rawDescriptor = rs.getBytes(5); query = "SELECT rawdesc FROM extrainfo WHERE extrainfo = '" + extrainfo + "'"; rs = statement.executeQuery(query); if (rs.next()) { rawExtrainfo = rs.getBytes(1); } } rs.close(); statement.close(); conn.close(); } catch (SQLException e) { out.write("<br/><p><font color=\"red\"><b>Warning: </b></font>" + "Internal server error when looking up descriptor. The " + "query was '" + query + "'. If this problem persists, " + "please " + "<a href=\"mailto:tor-assistants@freehaven.net\">let us " + "know</a>!</p>\n"); writeFooter(out); return; } } /* If no descriptor was found, stop here. */ if (descriptor == null) { out.write("<p>No descriptor found with identifier " + descriptor + " which was referenced in the last relay list.</p>"); writeFooter(out); return; } /* Print out both server and extra-info descriptor. */ out.write("<br/><p>The last referenced server descriptor published " + "by this relay is:</p>"); BufferedReader br = new BufferedReader(new StringReader(new String(rawDescriptor, "US-ASCII"))); String line = null; while ((line = br.readLine()) != null) { out.println(" <tt>" + line + "</tt><br/>"); } br.close(); if (rawExtrainfo != null) { out.println("<br/><p>Together with this server descriptor, the " + "relay published the following extra-info descriptor:</p>"); br = new BufferedReader(new StringReader(new String(rawExtrainfo, "US-ASCII"))); line = null; while ((line = br.readLine()) != null) { out.println(" <tt>" + line + "</tt><br/>"); } } /* Provide links to raw descriptors, too. */ out.println("<br/><p>Note that the descriptor" + (rawExtrainfo != null ? "s have" : " has") + " been converted to ASCII and reformatted " + "for display purposes. You may also download the raw " + "<a href=\"serverdesc?desc-id=" + descriptor + "\" target=\"_blank\">server " + "descriptor</a>" + (extrainfo != null ? " and <a href=\"extrainfodesc?desc-id=" + extrainfo + "\" target=\"_blank\">extra-info descriptor</a>" : "") + " as " + (extrainfo != null ? "they were" : "it was") + " published to the directory authorities.</p>"); /* Display total lookup time on the results page. */ long searchTime = System.currentTimeMillis() - started; out.write(" <br/><p>Looking up this relay took us " + String.format("%d.%03d", searchTime / 1000, searchTime % 1000) + " seconds.</p>\n"); /* Finish writing response. */ writeFooter(out); }
From source file:org.hyperic.hq.measurement.server.session.AvailabilityManagerImpl.java
private AvailabilityDataRLE findAvailBefore(DataPoint state, Map<Integer, TreeSet<AvailabilityDataRLE>> currAvails) { Integer mId = state.getMeasurementId(); TreeSet<AvailabilityDataRLE> rles = currAvails.get(mId); long start = state.getTimestamp(); AvailabilityDataRLE tmp = new AvailabilityDataRLE(); // headSet is inclusive so we need to subtract 1 from start tmp.setStartime(start - 1);// ww w .jav a2s .co m SortedSet<AvailabilityDataRLE> set = rles.headSet(tmp); if (set.size() == 0) { return null; } return set.last(); }
From source file:org.apache.geode.management.internal.beans.DistributedSystemBridge.java
/** * @return a list of region names hosted on the system *//*from w w w . j a v a2s .c o m*/ public String[] listAllRegionPaths() { if (distrRegionMap.values().size() == 0) { return ManagementConstants.NO_DATA_STRING; } // Sort region paths SortedSet<String> regionPathsSet = new TreeSet<>(); for (DistributedRegionBridge bridge : distrRegionMap.values()) { regionPathsSet.add(bridge.getFullPath()); } String[] regionPaths = new String[regionPathsSet.size()]; regionPaths = regionPathsSet.toArray(regionPaths); regionPathsSet.clear(); return regionPaths; }
From source file:org.apache.nifi.registry.service.RegistryService.java
public VersionedFlowSnapshot createFlowSnapshot(final VersionedFlowSnapshot flowSnapshot) { if (flowSnapshot == null) { throw new IllegalArgumentException("Versioned flow snapshot cannot be null"); }//www . j a v a2 s . com // validation will ensure that the metadata and contents are not null if (flowSnapshot.getSnapshotMetadata() != null) { flowSnapshot.getSnapshotMetadata().setTimestamp(System.currentTimeMillis()); } // these fields aren't used for creation flowSnapshot.setFlow(null); flowSnapshot.setBucket(null); validate(flowSnapshot, "Cannot create versioned flow snapshot"); writeLock.lock(); try { final VersionedFlowSnapshotMetadata snapshotMetadata = flowSnapshot.getSnapshotMetadata(); // ensure the bucket exists final BucketEntity existingBucket = metadataService .getBucketById(snapshotMetadata.getBucketIdentifier()); if (existingBucket == null) { LOGGER.warn("The specified bucket id [{}] does not exist.", snapshotMetadata.getBucketIdentifier()); throw new ResourceNotFoundException("The specified bucket ID does not exist in this registry."); } // ensure the flow exists final FlowEntity existingFlow = metadataService.getFlowById(snapshotMetadata.getFlowIdentifier()); if (existingFlow == null) { LOGGER.warn("The specified flow id [{}] does not exist.", snapshotMetadata.getFlowIdentifier()); throw new ResourceNotFoundException("The specified flow ID does not exist in this bucket."); } if (!existingBucket.getId().equals(existingFlow.getBucketId())) { throw new IllegalStateException("The requested flow is not located in the given bucket"); } // convert the set of FlowSnapshotEntity to set of VersionedFlowSnapshotMetadata final SortedSet<VersionedFlowSnapshotMetadata> sortedSnapshots = new TreeSet<>(); final List<FlowSnapshotEntity> existingFlowSnapshots = metadataService .getSnapshots(existingFlow.getId()); if (existingFlowSnapshots != null) { existingFlowSnapshots.stream() .forEach(s -> sortedSnapshots.add(DataModelMapper.map(existingBucket, s))); } // if we already have snapshots we need to verify the new one has the correct version if (sortedSnapshots != null && sortedSnapshots.size() > 0) { final VersionedFlowSnapshotMetadata lastSnapshot = sortedSnapshots.last(); if (snapshotMetadata.getVersion() <= lastSnapshot.getVersion()) { throw new IllegalStateException( "A Versioned flow snapshot with the same version already exists: " + snapshotMetadata.getVersion()); } if (snapshotMetadata.getVersion() > (lastSnapshot.getVersion() + 1)) { throw new IllegalStateException( "Version must be a one-up number, last version was " + lastSnapshot.getVersion() + " and version for this snapshot was " + snapshotMetadata.getVersion()); } } else if (snapshotMetadata.getVersion() != 1) { throw new IllegalStateException("Version of first snapshot must be 1"); } // serialize the snapshot final ByteArrayOutputStream out = new ByteArrayOutputStream(); processGroupSerializer.serialize(flowSnapshot.getFlowContents(), out); // save the serialized snapshot to the persistence provider final Bucket bucket = DataModelMapper.map(existingBucket); final VersionedFlow versionedFlow = DataModelMapper.map(existingBucket, existingFlow); final FlowSnapshotContext context = new StandardFlowSnapshotContext.Builder(bucket, versionedFlow, snapshotMetadata).build(); flowPersistenceProvider.saveFlowContent(context, out.toByteArray()); // create snapshot in the metadata provider metadataService.createFlowSnapshot(DataModelMapper.map(snapshotMetadata)); // update the modified date on the flow metadataService.updateFlow(existingFlow); // get the updated flow, we need to use "with counts" here so we can return this is a part of the response final FlowEntity updatedFlow = metadataService .getFlowByIdWithSnapshotCounts(snapshotMetadata.getFlowIdentifier()); if (updatedFlow == null) { throw new ResourceNotFoundException( "Versioned flow does not exist for identifier " + snapshotMetadata.getFlowIdentifier()); } final VersionedFlow updatedVersionedFlow = DataModelMapper.map(existingBucket, updatedFlow); flowSnapshot.setBucket(bucket); flowSnapshot.setFlow(updatedVersionedFlow); return flowSnapshot; } finally { writeLock.unlock(); } }
From source file:org.hyperic.hq.measurement.server.session.AvailabilityManagerImpl.java
private AvailabilityDataRLE findAvailAfter(DataPoint state, Map<Integer, TreeSet<AvailabilityDataRLE>> currAvails) { final Integer mId = state.getMeasurementId(); final TreeSet<AvailabilityDataRLE> rles = currAvails.get(mId); final long start = state.getTimestamp(); final AvailabilityDataRLE tmp = new AvailabilityDataRLE(); // tailSet is inclusive so we need to add 1 to start tmp.setStartime(start + 1);/*from w ww .j ava 2s . com*/ final SortedSet<AvailabilityDataRLE> set = rles.tailSet(tmp); if (set.size() == 0) { return null; } return (AvailabilityDataRLE) set.first(); }