List of usage examples for java.util.stream Collectors joining
public static Collector<CharSequence, ?, String> joining(CharSequence delimiter)
From source file:org.ambraproject.wombat.service.remote.SolrSearchApiImpl.java
@Override public Map<?, ?> lookupArticlesByDois(List<String> dois, Site site) throws IOException { String doiQueryString = dois.stream().map(doi -> "id:" + QueryParser.escape(doi)) .collect(Collectors.joining(" OR ")); ArticleSearchQuery.Builder query = ArticleSearchQuery.builder().setQuery(doiQueryString).setStart(0) .setRows(dois.size());/*from w w w .j ava 2s. com*/ return search(query.build(), site); }
From source file:com.intuit.wasabi.tests.service.priority.BasicPriorityTest.java
@Test(groups = { "priorityListChange" }, dependsOnGroups = { "setup", "priorityChange" }, dataProvider = "Application", dataProviderClass = PriorityDataProvider.class) public void t_invliadUuids(String appName) { String exclusion = "{\"experimentIDs\": [" + validExperimentsLists.stream().map(s -> "\"" + s.id + "\"").collect(Collectors.joining(",")) + "]}"; exclusion = exclusion.replace(validExperimentsLists.get(0).id, "bbbac42e-50c5-4c9a-a398-8588bf6bbe33"); response = apiServerConnector//from w w w . jav a 2s .c o m .doPut("applications/" + validExperimentsLists.get(0).applicationName + "/priorities", exclusion); assertReturnCode(response, HttpStatus.SC_NO_CONTENT); response = apiServerConnector .doGet("applications/" + validExperimentsLists.get(0).applicationName + "/priorities"); LOGGER.debug("output: " + response.asString()); assertReturnCode(response, HttpStatus.SC_OK); Type listType = new TypeToken<Map<String, ArrayList<Map<String, Object>>>>() { }.getType(); Map<String, List<Map<String, Object>>> resultMap = new Gson().fromJson(response.asString(), listType); List<Map<String, Object>> prioritizedExperiments = resultMap.get("prioritizedExperiments"); Assert.assertEquals(prioritizedExperiments.size(), validExperimentsLists.size()); }
From source file:org.jaqpot.core.service.client.jpdi.JPDIClientImpl.java
@Override public Future<Model> train(Dataset dataset, Algorithm algorithm, Map<String, Object> parameters, String predictionFeature, MetaInfo modelMeta, String taskId) { CompletableFuture<Model> futureModel = new CompletableFuture<>(); TrainingRequest trainingRequest = new TrainingRequest(); trainingRequest.setDataset(dataset); trainingRequest.setParameters(parameters); trainingRequest.setPredictionFeature(predictionFeature); // String trainingRequestString = serializer.write(trainingRequest); final HttpPost request = new HttpPost(algorithm.getTrainingService()); PipedOutputStream out = new PipedOutputStream(); PipedInputStream in;/* w w w . ja va2 s.c o m*/ try { in = new PipedInputStream(out); } catch (IOException ex) { futureModel.completeExceptionally(ex); return futureModel; } InputStreamEntity entity = new InputStreamEntity(in, ContentType.APPLICATION_JSON); entity.setChunked(true); request.setEntity(entity); request.addHeader("Accept", "application/json"); Future futureResponse = client.execute(request, new FutureCallback<HttpResponse>() { @Override public void completed(final HttpResponse response) { futureMap.remove(taskId); int status = response.getStatusLine().getStatusCode(); try { InputStream responseStream = response.getEntity().getContent(); switch (status) { case 200: case 201: TrainingResponse trainingResponse = serializer.parse(responseStream, TrainingResponse.class); Model model = new Model(); model.setId(randomStringGenerator.nextString(20)); model.setActualModel(trainingResponse.getRawModel()); model.setPmmlModel(trainingResponse.getPmmlModel()); model.setAdditionalInfo(trainingResponse.getAdditionalInfo()); model.setAlgorithm(algorithm); model.setParameters(parameters); model.setDatasetUri(dataset != null ? dataset.getDatasetURI() : null); //Check if independedFeatures of model exist in dataset List<String> filteredIndependedFeatures = new ArrayList<String>(); if (dataset != null && dataset.getFeatures() != null && trainingResponse.getIndependentFeatures() != null) for (String feature : trainingResponse.getIndependentFeatures()) { for (FeatureInfo featureInfo : dataset.getFeatures()) { if (feature.equals(featureInfo.getURI())) filteredIndependedFeatures.add(feature); } } model.setIndependentFeatures(filteredIndependedFeatures); model.setDependentFeatures(Arrays.asList(predictionFeature)); model.setMeta(modelMeta); List<String> predictedFeatures = new ArrayList<>(); for (String featureTitle : trainingResponse.getPredictedFeatures()) { Feature predictionFeatureResource = featureHandler.findByTitleAndSource(featureTitle, "algorithm/" + algorithm.getId()); if (predictionFeatureResource == null) { // Create the prediction features (POST /feature) String predFeatID = randomStringGenerator.nextString(12); predictionFeatureResource = new Feature(); predictionFeatureResource.setId(predFeatID); predictionFeatureResource.setPredictorFor(predictionFeature); predictionFeatureResource.setMeta(MetaInfoBuilder.builder() .addSources( /*messageBody.get("base_uri") + */"algorithm/" + algorithm.getId()) .addComments("Feature created to hold predictions by algorithm with ID " + algorithm.getId()) .addTitles(featureTitle).addSeeAlso(predictionFeature) .addCreators(algorithm.getMeta().getCreators()).build()); /* Create feature */ featureHandler.create(predictionFeatureResource); } predictedFeatures.add(baseURI + "feature/" + predictionFeatureResource.getId()); } model.setPredictedFeatures(predictedFeatures); futureModel.complete(model); break; case 400: String message = new BufferedReader(new InputStreamReader(responseStream)).lines() .collect(Collectors.joining("\n")); futureModel.completeExceptionally(new BadRequestException(message)); break; case 500: message = new BufferedReader(new InputStreamReader(responseStream)).lines() .collect(Collectors.joining("\n")); futureModel.completeExceptionally(new InternalServerErrorException(message)); break; default: message = new BufferedReader(new InputStreamReader(responseStream)).lines() .collect(Collectors.joining("\n")); futureModel.completeExceptionally(new InternalServerErrorException(message)); } } catch (IOException | UnsupportedOperationException ex) { futureModel.completeExceptionally(ex); } } @Override public void failed(final Exception ex) { futureMap.remove(taskId); futureModel.completeExceptionally(ex); } @Override public void cancelled() { futureMap.remove(taskId); futureModel.cancel(true); } }); serializer.write(trainingRequest, out); try { out.close(); } catch (IOException ex) { futureModel.completeExceptionally(ex); } futureMap.put(taskId, futureResponse); return futureModel; }
From source file:org.apache.nifi.minifi.c2.service.ConfigService.java
@GET public Response getConfig(@Context HttpServletRequest request, @Context HttpHeaders httpHeaders, @Context UriInfo uriInfo) { try {//from www .j av a2 s .c om authorizer.authorize(SecurityContextHolder.getContext().getAuthentication(), uriInfo); } catch (AuthorizationException e) { logger.warn(HttpRequestUtil.getClientString(request) + " not authorized to access " + uriInfo, e); return Response.status(403).build(); } Map<String, List<String>> parameters = new HashMap<>(); for (Map.Entry<String, List<String>> entry : uriInfo.getQueryParameters().entrySet()) { parameters.put(entry.getKey(), entry.getValue()); } List<MediaType> acceptValues = httpHeaders.getAcceptableMediaTypes(); boolean defaultAccept = false; if (acceptValues.size() == 0) { acceptValues = Arrays.asList(MediaType.WILDCARD_TYPE); defaultAccept = true; } if (logger.isDebugEnabled()) { StringBuilder builder = new StringBuilder("Handling request from ") .append(HttpRequestUtil.getClientString(request)).append(" with parameters ").append(parameters) .append(" and Accept"); if (defaultAccept) { builder = builder.append(" default value"); } builder = builder.append(": ") .append(acceptValues.stream().map(Object::toString).collect(Collectors.joining(", "))); logger.debug(builder.toString()); } try { ConfigurationProviderValue configurationProviderValue = configurationCache .get(new ConfigurationProviderKey(acceptValues, parameters)); Configuration configuration = configurationProviderValue.getConfiguration(); Response.ResponseBuilder ok = Response.ok(); ok = ok.header("X-Content-Version", configuration.getVersion()); ok = ok.type(configurationProviderValue.getMediaType()); byte[] buffer = new byte[1024]; int read; try (InputStream inputStream = configuration.getInputStream(); ByteArrayOutputStream outputStream = new ByteArrayOutputStream()) { MessageDigest md5 = MessageDigest.getInstance("MD5"); MessageDigest sha256 = MessageDigest.getInstance("SHA-256"); while ((read = inputStream.read(buffer)) >= 0) { outputStream.write(buffer, 0, read); md5.update(buffer, 0, read); sha256.update(buffer, 0, read); } ok = ok.header("Content-MD5", bytesToHex(md5.digest())); ok = ok.header("X-Content-SHA-256", bytesToHex(sha256.digest())); ok = ok.entity(outputStream.toByteArray()); } catch (ConfigurationProviderException | IOException | NoSuchAlgorithmException e) { logger.error("Error reading or checksumming configuration file", e); throw new WebApplicationException(500); } return ok.build(); } catch (AuthorizationException e) { logger.warn(HttpRequestUtil.getClientString(request) + " not authorized to access " + uriInfo, e); return Response.status(403).build(); } catch (InvalidParameterException e) { logger.info(HttpRequestUtil.getClientString(request) + " made invalid request with " + HttpRequestUtil.getQueryString(request), e); return Response.status(400).entity("Invalid request.").build(); } catch (ConfigurationProviderException e) { logger.warn("Unable to get configuration.", e); return Response.status(500).build(); } catch (ExecutionException | UncheckedExecutionException e) { Throwable cause = e.getCause(); if (cause instanceof WebApplicationException) { throw (WebApplicationException) cause; } logger.error(HttpRequestUtil.getClientString(request) + " made request with " + HttpRequestUtil.getQueryString(request) + " that caused error.", cause); return Response.status(500).entity("Internal error").build(); } }
From source file:de.codesourcery.spring.contextrewrite.XMLRewrite.java
private void mergeAttributes(Node source, Node target, Document targetDocument) { for (int i = 0, len = source.getAttributes().getLength(); i < len; i++) { final Node attrToMerge = source.getAttributes().item(i); final String attrName = attrToMerge.getNodeName(); final String attrValue = attrToMerge.getNodeValue(); final Optional<Node> existingAttr = findAttribute(target, attrToMerge); if (existingAttr.isPresent()) { final String[] existingValues = split(existingAttr.get().getNodeValue()); if (attrName.endsWith(":schemaLocation") || attrName.equals("schemaLocation")) { final List<Pair> existingPairs = toPairs(existingValues); final List<Pair> newPairs = toPairs(split(attrValue)); final String toAdd = newPairs.stream() .filter(p -> existingPairs.stream().noneMatch(x -> x.sameFirst(p))) .map(p -> p.first + " " + p.second).collect(Collectors.joining(" ")); if (!toAdd.isEmpty()) { existingAttr.get().setNodeValue(existingAttr.get().getNodeValue() + " " + toAdd); }/*www . j a va2s. co m*/ continue; } // merge value if (Stream.of(existingValues).noneMatch(value -> value.equals(attrToMerge.getNodeValue()))) { if (existingValues.length == 0) { debug("adding missing attribute value " + attrName + "=" + attrValue); existingAttr.get().setNodeValue(attrValue); } else { debug("Appending existing attribute " + existingAttr.get().getNodeName() + "=" + attrValue); existingAttr.get().setNodeValue(existingAttr.get().getNodeValue() + " " + attrValue); } } else { debug("Already present: attribute " + existingAttr.get().getNodeName() + "=" + attrValue); } } else { debug("Adding new attribute " + attrName + "=" + attrValue); final Node cloned = targetDocument.adoptNode(attrToMerge.cloneNode(true)); target.getAttributes().setNamedItem(cloned); } } }
From source file:com.esri.geoportal.commons.agp.client.AgpClient.java
/** * Updates item item./*from w w w . j a va 2 s .co m*/ * @param owner user name * @param folderId folder id (optional) * @param itemId item id * @param title title * @param description description * @param text text * @param thumbnailUrl thumbnail URL * @param itemType item type (must be a URL type) * @param extent extent * @param typeKeywords type keywords * @param tags tags tags * @param token token * @return add item response * @throws URISyntaxException if invalid URL * @throws IOException if operation fails */ public ItemResponse updateItem(String owner, String folderId, String itemId, String title, String description, String text, URL thumbnailUrl, ItemType itemType, Double[] extent, String[] typeKeywords, String[] tags, String token) throws IOException, URISyntaxException { URIBuilder builder = new URIBuilder(updateItemUri(owner, StringUtils.trimToNull(folderId), itemId)); HttpPost req = new HttpPost(builder.build()); HashMap<String, String> params = new HashMap<>(); params.put("f", "json"); params.put("title", title); params.put("description", description); params.put("type", itemType.getTypeName()); params.put("text", text); if (thumbnailUrl != null) { params.put("thumbnailurl", thumbnailUrl.toExternalForm()); } if (extent != null && extent.length == 4) { params.put("extent", Arrays.asList(extent).stream().map(Object::toString).collect(Collectors.joining(","))); } if (typeKeywords != null) { params.put("typeKeywords", Arrays.asList(typeKeywords).stream().collect(Collectors.joining(","))); } if (tags != null) { params.put("tags", Arrays.asList(tags).stream().collect(Collectors.joining(","))); } params.put("token", token); req.setEntity(createEntity(params)); return execute(req, ItemResponse.class); }
From source file:com.thinkbiganalytics.metadata.jpa.support.GenericQueryDslFilter.java
/** * convert a passed in filter string to a list of criteria objects * * @param filterString a filter, <column><operator><value> Example: jobinstance.name==jobName,jobExcutionId>=200. that will search for all jobs named 'jobName' and jobExecutionId >= 200 * @return a list of criteria objects/*from w ww . j av a 2 s . c o m*/ */ public static List<SearchCriteria> parseFilterString(String filterString) { List<SearchCriteria> searchCriterias = new ArrayList<>(); if (StringUtils.isNotBlank(filterString)) { //first match and split on , for various filters String[] filterConditions = filterString.split(",(?=(?:[^\\\"]*\\\"[^\\\"]*\\\")*[^\\\"]*$)"); List<String> filterConditionsList = Arrays.asList(filterConditions); //Pattern used to match the <column><operator><value> String validOperatorsRegEx = operators.keySet().stream().map(key -> key) .collect(Collectors.joining("|")); Pattern columnOperatorValuePattern = Pattern.compile("(.*)(" + validOperatorsRegEx + ")(.*)"); filterConditionsList.stream().forEach(filter -> { Matcher matcher = columnOperatorValuePattern.matcher(filter); while (matcher.find()) { String field = matcher.group(1); String operator = matcher.group(2); String value = matcher.group(3); searchCriterias.add(new SearchCriteria(field, operator, value)); } }); } return searchCriterias; }
From source file:com.ikanow.aleph2.graph.titan.utils.TestMiscTitanProperties.java
@SuppressWarnings("unchecked") //@org.junit.Test public void test_elementProperties_vertex() throws IOException, InterruptedException { final String tmpdir = System.getProperty("java.io.tmpdir") + "/titan-test-" + UuidUtils.get().getRandomUuid(); try {/* w w w. j av a2 s.c o m*/ FileUtils.deleteDirectory(new File(tmpdir)); } catch (Exception e) { } TitanGraph titan = TitanFactory.build().set("storage.backend", "inmemory") .set("index.search.backend", "elasticsearch").set("index.search.elasticsearch.local-mode", true) .set("index.search.directory", tmpdir) .set("index.search.cluster-name", UuidUtils.get().getRandomUuid()) .set("index.search.ignore-cluster-name", false).set("index.search.elasticsearch.client-only", false) //.set("query.force-index", true) //(disabled for testing) .open(); { TitanManagement mgmt = titan.openManagement(); // Without ES: //mgmt.makePropertyKey("paths").dataType(String.class).cardinality(Cardinality.SET).make(); //with ES as a search back-end, can do SET/LIST mgmt.buildIndex("pathQuery", Vertex.class).addKey( mgmt.makePropertyKey("paths").dataType(String.class).cardinality(Cardinality.SET).make(), Mapping.STRING.asParameter()).buildMixedIndex("search"); //mgmt.buildIndex("pathQuery_nonAnalyzed", Vertex.class).addKey(mgmt.makePropertyKey("paths").dataType(String.class).cardinality(Cardinality.SET).make()).buildMixedIndex("search"); //.addKey("_b", Mapping.STRING.asParameter()).buildMixedIndex("search") mgmt.commit(); } // Just check I can do this multiple times: try { TitanManagement mgmt = titan.openManagement(); mgmt.makePropertyKey("paths").dataType(String.class).cardinality(Cardinality.SET).make(); mgmt.commit(); } catch (SchemaViolationException e) { } // (can but throws this exception, which is fine) Thread.sleep(1500L); // (mapping not generated in time?) buildSmallGraph(titan); final TitanTransaction tx = titan.buildTransaction().start(); final TitanVertex v = Optionals.<TitanVertex>streamOf(tx.query().has("type", "rabbit").vertices(), false) .findFirst().get(); // These will fail because the property has not been declared // v.property(org.apache.tinkerpop.gremlin.structure.VertexProperty.Cardinality.set, "animal", "mouse"); // v.property(org.apache.tinkerpop.gremlin.structure.VertexProperty.Cardinality.set, "animal", "cat"); // v.property(org.apache.tinkerpop.gremlin.structure.VertexProperty.Cardinality.set, "animal", "mouse"); // This all works as expected v.property(org.apache.tinkerpop.gremlin.structure.VertexProperty.Cardinality.set, "paths", "mouse"); v.property(org.apache.tinkerpop.gremlin.structure.VertexProperty.Cardinality.set, "paths", "cat"); v.property(org.apache.tinkerpop.gremlin.structure.VertexProperty.Cardinality.set, "paths", "mouse"); // These will overwrite // v.property("_b", "mouse"); // v.property("_b", "cat"); // v.property("_b", "mouse"); // This does not work, so properties will be protected with a single bucket access v.property("type", "rabbit", "paths", Arrays.asList("a", "b")); //"[a, b]" (ie a string of a list) //v.property("type", "rabbit", "_b", Stream.of("a", "b").toArray()); // [L;Object] System.out.println(showElement(titan, v)); tx.commit(); // OK let's double check how we retrieve a list of properties final TitanVertex v2 = Optionals .<TitanVertex>streamOf(titan.query().has("type", "rabbit").vertices(), false).findFirst().get(); // Use "properties" with a single key to get the list of buckets System.out.println("paths = " + Optionals.streamOf(v2.properties("paths"), false) .map(vp -> vp.value().toString()).collect(Collectors.joining(";"))); // Double check a query on _b works assertEquals(0L, Optionals.streamOf(titan.query().has("paths", "cat").vertices(), false).count());//all these queries System.out.println("ES queries in 3s...:"); Thread.sleep(3000L); // (wait for ES to complete) assertEquals(1L, Optionals.streamOf(titan.indexQuery("pathQuery", "v.paths:cat").vertices(), false).count()); assertEquals(1L, Optionals.streamOf(titan.indexQuery("pathQuery", "v.paths:(rabbit cat)").vertices(), false) .count()); assertEquals(1L, Optionals.streamOf(titan.query().has("paths", Text.CONTAINS, "cat").vertices(), false).count()); final TitanTransaction tx2 = titan.buildTransaction().start(); //these all intermittently fail?! (well consistently either work or fail for some period, v unclear why) // but starting from a transaction instead of titan as below seems to fix this, so something in titan must not be getting updated assertEquals(1L, Optionals.streamOf(tx2.query().has("paths").vertices(), false).count()); // (Ah interestingly this doesn't seem to accept a scan?!) assertEquals(1L, Optionals.streamOf(tx2.query().has("paths", "cat").vertices(), false).count()); assertEquals(1L, Optionals.streamOf(tx2.query().has("paths", Cmp.EQUAL, "cat").vertices(), false).count()); // note this doesn't use the ES query annoyingly assertEquals(1L, Optionals .streamOf(tx2.query().has("paths", Contain.IN, Arrays.asList("cat")).vertices(), false).count()); assertEquals(1L, Optionals .streamOf(tx2.query().has("paths", Contain.IN, Arrays.asList("rabbit", "cat")).vertices(), false) .count()); tx2.commit(); // need to figure out how to handle analyzed vs non-analyzed, I think it's TEXT vs STRING? // https://groups.google.com/forum/#!topic/aureliusgraphs/VGv-RJwt8zI // actual docs: http://s3.thinkaurelius.com/docs/titan/1.0.0/index-parameters.html // graph.makeKey("name").dataType(String.class).indexed("search", Element.class, new Parameter[]{Parameter.of(Mapping.MAPPING_PREFIX,Mapping.STRING)}) .single().make(); // Can you do a simple query on a list type (ie single cardinality)? Nope only over the entire list { assertEquals(0L, Optionals.streamOf(titan.query().has("set", "val1").vertices(), false).count()); assertEquals(1L, Optionals .streamOf(titan.query().has("set", Arrays.asList("val1", "val2").toArray()).vertices(), false) .count()); } }
From source file:cn.edu.zjnu.acm.judge.core.Judger.java
private boolean runProcess(RunRecord runRecord) throws IOException { Path dataPath = runRecord.getDataPath(); Objects.requireNonNull(dataPath, "dataPath"); Path specialFile = dataPath.resolve(JudgeConfiguration.VALIDATE_FILE_NAME); boolean isspecial = Files.exists(specialFile); if (!Files.isDirectory(dataPath)) { log.error("{} not exists", runRecord.getDataPath()); return false; }//from w w w . j av a2 s . c o m List<Path[]> files = new ArrayList<>(20); try (DirectoryStream<Path> listFiles = Files.newDirectoryStream(dataPath)) { log.debug("dataPath = {}", dataPath); for (Path inFile : listFiles) { String inFileName = inFile.getFileName().toString(); if (!inFileName.toLowerCase().endsWith(".in")) { continue; } Path outFile = dataPath.resolve(inFileName.substring(0, inFileName.length() - 3) + ".out"); if (!Files.exists(outFile)) { continue; } files.add(new Path[] { inFile, outFile });//, } } int casenum = files.size(); log.debug("casenum = {}", casenum); if (casenum == 0) { return false; } int accept = 0; //? ArrayList<String> details = new ArrayList<>(casenum << 2); long time = 0; // long memory = 0; // String command = runRecord.getLanguage().getExecuteCommand(); Path work = judgeConfiguration.getWorkDirectory(runRecord.getSubmissionId()); // command = !StringUtils.isEmptyOrWhitespace(command) ? command : work.resolve("Main." + runRecord.getLanguage().getExecutableExtension()).toString(); long extTime = runRecord.getLanguage().getExtTime(); long castTimeLimit = runRecord.getTimeLimit() * runRecord.getLanguage().getTimeFactor() + extTime; long extraMemory = runRecord.getLanguage().getExtMemory(); // long caseMemoryLimit = (runRecord.getMemoryLimit() + extraMemory) * 1024; Options[] optionses = new Options[casenum]; for (int cas = 0; cas < casenum; cas++) { Path[] entry = files.get(cas); Path in = entry[0]; Path standard = entry[1]; Path progOutput = work.resolve(standard.getFileName()); optionses[cas] = Options.builder().timeLimit(castTimeLimit) // time limit .memoryLimit(caseMemoryLimit) // memory in bytes .outputLimit(16 * 1024 * 1024) // 16M .command(command).workDirectory(work).inputFile(in).outputFile(progOutput) .standardOutput(standard).errFile(getNull(work)).build(); } String detailMessageStr = null; String scorePerCase = new DecimalFormat("0.#").format(100.0 / casenum); final Validator validator = isspecial ? new SpecialValidator(specialFile.toString(), work) : new SimpleValidator(); try { ExecuteResult[] ers = JudgeBridge.INSTANCE.judge(optionses, false, validator); for (ExecuteResult er : ers) { long tim1 = er.getTime() - extTime; tim1 = Math.max(0, tim1); long mem1 = er.getMemory() / 1024 - extraMemory; mem1 = Math.max(0, mem1); String message = er.getMessage(); int caseResult = getResultFromExecuteResult(er); time = Math.max(time, tim1); memory = Math.max(memory, mem1); log.debug("message = {}, time = {}, memory = {}", message, time, memory); details.add(String.valueOf(caseResult)); if (caseResult == 0) { details.add(scorePerCase); } else { details.add("0"); } details.add(String.valueOf(tim1)); details.add(String.valueOf(mem1)); if (caseResult == 0) { ++accept; } } } catch (JudgeException | RuntimeException | Error ex) { log.error("", ex); accept = ResultType.SYSTEM_ERROR; detailMessageStr = ex.getMessage(); } log.debug("{}", details); int score = accept >= 0 ? (int) Math.round(accept * 100.0 / casenum) : accept; if (score == 0 && accept != 0) { ++score; } else if (score == 100 && accept != casenum) { --score; } submissionMapper.updateResult(runRecord.getSubmissionId(), score, time, memory); submissionMapper.saveDetail(runRecord.getSubmissionId(), detailMessageStr != null ? detailMessageStr : details.stream().map(String::valueOf).collect(Collectors.joining(","))); updateSubmissionStatus(runRecord); return score == 100; }
From source file:org.codice.ddf.confluence.source.ConfluenceSource.java
public void setExpandedSections(List<String> expandedSections) { if (expandedSections == null) { this.expandedSections = ""; return;/*from w ww.j a v a 2 s. c o m*/ } this.expandedSections = expandedSections.stream().collect(Collectors.joining(",")); }