List of usage examples for java.util.function Consumer accept
void accept(T t);
From source file:de.acosix.alfresco.utility.common.spring.BeanDefinitionFromPropertiesPostProcessor.java
protected void processListPropertyValueDefinition(final String beanName, final String propertyName, final String definitionKey, final String value, final MutablePropertyValues propertyValues, final Consumer<ManagedList<?>> paddedListRegistrator) { boolean isCsv = false; String definitionKeyRemainder = definitionKey; int nextDot = definitionKeyRemainder.indexOf('.'); if (definitionKeyRemainder.startsWith(SUFFIX_CSV_PROPERTY) && (nextDot == -1 || nextDot == SUFFIX_CSV_PROPERTY.length())) { isCsv = true;// w ww . j a v a2s . c om definitionKeyRemainder = nextDot != -1 ? definitionKeyRemainder.substring(nextDot + 1) : ""; } nextDot = definitionKeyRemainder.indexOf('.'); final String potentialIndex; if (nextDot != -1) { potentialIndex = definitionKeyRemainder.substring(0, nextDot); definitionKeyRemainder = definitionKeyRemainder.substring(nextDot + 1); } else { potentialIndex = definitionKeyRemainder; definitionKeyRemainder = ""; } // potentialIndex may just be used as a differentiator for multiple list additions / removals for the same property final int index; if (potentialIndex.matches("^\\d+$")) { index = Integer.parseInt(potentialIndex); } else { if (definitionKeyRemainder.isEmpty()) { definitionKeyRemainder = potentialIndex; } else { definitionKeyRemainder = potentialIndex + DOT + definitionKeyRemainder; } index = -1; } final ManagedList<Object> valueList = this.initListPropertyValue(beanName, propertyName, propertyValues); if (definitionKeyRemainder.endsWith(SUFFIX_REMOVE) || definitionKeyRemainder.equals(SUFFIX_SIMPLE_REMOVE)) { if (index != -1) { valueList.remove(index); } else { if (definitionKeyRemainder.endsWith(SUFFIX_REMOVE)) { definitionKeyRemainder = definitionKeyRemainder.substring(0, definitionKeyRemainder.indexOf(SUFFIX_REMOVE)); } else { definitionKeyRemainder = ""; } if (isCsv) { if (!value.isEmpty()) { final String[] strValues = value.split("\\s*(?<!\\\\),\\s*"); for (final String singleValue : strValues) { final Object valueToRemove = this.getAsValue(beanName, propertyName, definitionKeyRemainder, singleValue); valueList.remove(valueToRemove); } } } else { final Object valueToRemove = this.getAsValue(beanName, propertyName, definitionKeyRemainder, value); valueList.remove(valueToRemove); } } } else { if (valueList.size() < index) { paddedListRegistrator.accept(valueList); while (valueList.size() < index) { // pad with null values // may be replaced with actual value if another property defines value for index valueList.add(null); } } if (isCsv) { if (!value.isEmpty()) { final String[] strValues = value.split("\\s*(?<!\\\\),\\s*"); for (final String singleValue : strValues) { final Object valueToSet = this.getAsValue(beanName, propertyName, definitionKeyRemainder, singleValue); if (index == -1 || valueList.size() == index) { valueList.add(valueToSet); } else { valueList.set(index, valueToSet); } } } } else { final Object valueToSet = this.getAsValue(beanName, propertyName, definitionKeyRemainder, value); if (index == -1 || valueList.size() == index) { valueList.add(valueToSet); } else { valueList.set(index, valueToSet); } } } }
From source file:org.openecomp.sdc.be.model.operations.impl.PropertyOperation.java
protected TitanOperationStatus fillProperties(String uniqueId, Consumer<List<PropertyDefinition>> propertySetter) { Either<Map<String, PropertyDefinition>, TitanOperationStatus> findPropertiesOfNode = this .findPropertiesOfNode(NodeTypeEnum.GroupType, uniqueId); if (findPropertiesOfNode.isRight()) { TitanOperationStatus titanOperationStatus = findPropertiesOfNode.right().value(); log.debug("After looking for properties of vertex " + uniqueId + ". status is " + titanOperationStatus); if (TitanOperationStatus.NOT_FOUND.equals(titanOperationStatus)) { return TitanOperationStatus.OK; } else {/*w w w . j ava2 s. c o m*/ return titanOperationStatus; } } else { Map<String, PropertyDefinition> properties = findPropertiesOfNode.left().value(); if (properties != null) { List<PropertyDefinition> propertiesAsList = properties.entrySet().stream().map(p -> p.getValue()) .collect(Collectors.toList()); propertySetter.accept(propertiesAsList); } return TitanOperationStatus.OK; } }
From source file:org.matonto.ontology.rest.impl.OntologyRestImplTest.java
private void assertImportedOntologies(JSONArray responseArray, Consumer<JSONObject> assertConsumer) { for (Object o : responseArray) { JSONObject jsonO = (JSONObject) o; String ontologyId = jsonO.get("id").toString(); assertNotEquals(importedOntologies.stream() .filter(ont -> ont.getOntologyId().getOntologyIdentifier().stringValue().equals(ontologyId)) .collect(Collectors.toList()).size(), 0); assertConsumer.accept(jsonO); }//ww w . j a v a2s .co m }
From source file:org.apache.hadoop.hbase.client.RawAsyncHBaseAdmin.java
private <PREQ, PRESP> CompletableFuture<Void> procedureCall( Consumer<MasterRequestCallerBuilder<?>> prioritySetter, PREQ preq, MasterRpcCall<PRESP, PREQ> rpcCall, Converter<Long, PRESP> respConverter, ProcedureBiConsumer consumer) { MasterRequestCallerBuilder<Long> builder = this.<Long>newMasterCaller().action( (controller, stub) -> this.<PREQ, PRESP, Long>call(controller, stub, preq, rpcCall, respConverter)); prioritySetter.accept(builder); CompletableFuture<Long> procFuture = builder.call(); CompletableFuture<Void> future = waitProcedureResult(procFuture); addListener(future, consumer);//from w ww. java2 s . c o m return future; }
From source file:com.ikanow.aleph2.analytics.services.TestDeduplicationService.java
@SuppressWarnings("unchecked") @Test//from ww w .j av a 2s . com public void test_handleDuplicateRecord() { final IEnrichmentModuleContext enrich_context = Mockito.mock(IEnrichmentModuleContext.class); Mockito.when(enrich_context.emitImmutableObject(Mockito.any(Long.class), Mockito.any(JsonNode.class), Mockito.any(Optional.class), Mockito.any(Optional.class), Mockito.any(Optional.class))) .thenReturn(Validation.success(_mapper.createObjectNode())); TestDedupEnrichmentModule test_module = new TestDedupEnrichmentModule(); final String ts_field = "@timestamp"; final ObjectNode old_json = _mapper.createObjectNode(); old_json.put("_id", "old_record"); old_json.put("@timestamp", 0L); old_json.put("url", "test"); final ObjectNode new_json = _mapper.createObjectNode(); new_json.put("@timestamp", 1L); new_json.put("url", "test"); final ObjectNode new_json_but_same_time = _mapper.createObjectNode(); new_json_but_same_time.put("@timestamp", 0L); new_json_but_same_time.put("url", "test"); Tuple3<Long, IBatchRecord, ObjectNode> new_record = Tuples._3T(0L, new BatchRecordUtils.JsonBatchRecord(new_json), _mapper.createObjectNode()); Tuple3<Long, IBatchRecord, ObjectNode> new_record_but_same_time = Tuples._3T(0L, new BatchRecordUtils.JsonBatchRecord(new_json_but_same_time), _mapper.createObjectNode()); new_record._2().getContent(); //(code coverage!) final TextNode key = new TextNode("url"); LinkedHashMap<JsonNode, LinkedList<Tuple3<Long, IBatchRecord, ObjectNode>>> mutable_obj_map = new LinkedHashMap<>(); final LinkedList<Tuple3<Long, IBatchRecord, ObjectNode>> new_records = Stream.of(new_record) .collect(Collectors.toCollection(LinkedList::new)); final LinkedList<Tuple3<Long, IBatchRecord, ObjectNode>> new_records_but_same_time = Stream .of(new_record_but_same_time).collect(Collectors.toCollection(LinkedList::new)); // Simple case Leave policy { //(reset) mutable_obj_map.clear(); mutable_obj_map.put(new TextNode("never_changed"), new_records); mutable_obj_map.put(new TextNode("url"), new_records); assertEquals(2, mutable_obj_map.size()); new_record._3().removeAll(); new_record_but_same_time._3().removeAll(); _called_batch.set(0); DocumentSchemaBean config = BeanTemplateUtils.build(DocumentSchemaBean.class) .with(DocumentSchemaBean::deduplication_policy, DeduplicationPolicy.leave).done().get(); DeduplicationEnrichmentContext test_context = new DeduplicationEnrichmentContext(enrich_context, config, j -> Optional.empty()); final Stream<JsonNode> ret_val = DeduplicationService.handleDuplicateRecord(config, Optional.of(Tuples._2T(test_module, test_context)), ts_field, new_records, Arrays.asList(old_json), key, mutable_obj_map); assertEquals(0L, ret_val.count()); // Nothing emitted Mockito.verify(enrich_context, Mockito.times(0)).emitImmutableObject(Mockito.any(Long.class), Mockito.any(JsonNode.class), Mockito.any(Optional.class), Mockito.any(Optional.class), Mockito.any(Optional.class)); // No custom processing performed assertEquals(0, _called_batch.get()); // No annotations/mutations assertEquals("{}", new_record._3().toString()); // Object removed from mutable map assertEquals(1, mutable_obj_map.size()); } // Simple case update policy - time updates final Consumer<Boolean> test_time_updates = delete_unhandled -> { //(reset) mutable_obj_map.clear(); mutable_obj_map.put(new TextNode("never_changed"), new_records); mutable_obj_map.put(new TextNode("url"), new_records); assertEquals(2, mutable_obj_map.size()); new_record._3().removeAll(); new_record_but_same_time._3().removeAll(); _called_batch.set(0); DocumentSchemaBean config = BeanTemplateUtils.build(DocumentSchemaBean.class) .with(DocumentSchemaBean::deduplication_policy, DeduplicationPolicy.update) .with(DocumentSchemaBean::delete_unhandled_duplicates, delete_unhandled).done().get(); DeduplicationEnrichmentContext test_context = new DeduplicationEnrichmentContext(enrich_context, config, j -> Optional.empty()); // (add the same object twice to test the "return ids to delete" functionality) final Stream<JsonNode> ret_val = DeduplicationService.handleDuplicateRecord(config, Optional.of(Tuples._2T(test_module, test_context)), ts_field, new_records, Arrays.asList(old_json, old_json), key, mutable_obj_map); if (delete_unhandled) { assertEquals(Arrays.asList("old_record"), ret_val.sorted() .map(j -> DeduplicationService.jsonToObject(j)).collect(Collectors.toList())); } else { assertEquals(0L, ret_val.count()); } // Nothing emitted Mockito.verify(enrich_context, Mockito.times(0)).emitImmutableObject(Mockito.any(Long.class), Mockito.any(JsonNode.class), Mockito.any(Optional.class), Mockito.any(Optional.class), Mockito.any(Optional.class)); // No custom processing performed assertEquals(0, _called_batch.get()); // _id assertEquals("{\"_id\":\"old_record\"}", new_record._3().toString()); // Object removed from mutable map assertEquals(2, mutable_obj_map.size()); }; test_time_updates.accept(true); test_time_updates.accept(false); // Simple case update policy - times the same { //(reset) mutable_obj_map.clear(); mutable_obj_map.put(new TextNode("never_changed"), new_records); mutable_obj_map.put(new TextNode("url"), new_records); new_record._3().removeAll(); new_record_but_same_time._3().removeAll(); _called_batch.set(0); DocumentSchemaBean config = BeanTemplateUtils.build(DocumentSchemaBean.class) .with(DocumentSchemaBean::deduplication_policy, DeduplicationPolicy.update) .with(DocumentSchemaBean::delete_unhandled_duplicates, false).done().get(); DeduplicationEnrichmentContext test_context = new DeduplicationEnrichmentContext(enrich_context, config, j -> Optional.empty()); final Stream<JsonNode> ret_val = DeduplicationService.handleDuplicateRecord(config, Optional.of(Tuples._2T(test_module, test_context)), ts_field, new_records_but_same_time, Arrays.asList(old_json), key, mutable_obj_map); assertEquals(0L, ret_val.count()); // Nothing emitted Mockito.verify(enrich_context, Mockito.times(0)).emitImmutableObject(Mockito.any(Long.class), Mockito.any(JsonNode.class), Mockito.any(Optional.class), Mockito.any(Optional.class), Mockito.any(Optional.class)); // No custom processing performed assertEquals(0, _called_batch.get()); // No annotations/mutations assertEquals("{}", new_record_but_same_time._3().toString()); // Object removed from mutable map assertEquals(1, mutable_obj_map.size()); } // overwrite final Consumer<Boolean> test_overwrites = delete_unhandled -> { //(reset) mutable_obj_map.clear(); mutable_obj_map.put(new TextNode("never_changed"), new_records); mutable_obj_map.put(new TextNode("url"), new_records); assertEquals(2, mutable_obj_map.size()); new_record._3().removeAll(); new_record_but_same_time._3().removeAll(); _called_batch.set(0); DocumentSchemaBean config = BeanTemplateUtils.build(DocumentSchemaBean.class) .with(DocumentSchemaBean::deduplication_policy, DeduplicationPolicy.overwrite) .with(DocumentSchemaBean::delete_unhandled_duplicates, delete_unhandled).done().get(); DeduplicationEnrichmentContext test_context = new DeduplicationEnrichmentContext(enrich_context, config, j -> Optional.empty()); final Stream<JsonNode> ret_val = DeduplicationService.handleDuplicateRecord(config, Optional.of(Tuples._2T(test_module, test_context)), ts_field, new_records, Arrays.asList(old_json, old_json), key, mutable_obj_map); if (delete_unhandled) { assertEquals(Arrays.asList("old_record"), ret_val.sorted() .map(j -> DeduplicationService.jsonToObject(j)).collect(Collectors.toList())); } else { assertEquals(0L, ret_val.count()); } // Nothing emitted Mockito.verify(enrich_context, Mockito.times(0)).emitImmutableObject(Mockito.any(Long.class), Mockito.any(JsonNode.class), Mockito.any(Optional.class), Mockito.any(Optional.class), Mockito.any(Optional.class)); // No custom processing performed assertEquals(0, _called_batch.get()); // _id assertEquals("{\"_id\":\"old_record\"}", new_record._3().toString()); // Object removed from mutable map assertEquals(2, mutable_obj_map.size()); }; test_overwrites.accept(true); test_overwrites.accept(false); //(check ignores times) { //(reset) mutable_obj_map.clear(); mutable_obj_map.put(new TextNode("never_changed"), new_records); mutable_obj_map.put(new TextNode("url"), new_records); assertEquals(2, mutable_obj_map.size()); new_record._3().removeAll(); new_record_but_same_time._3().removeAll(); _called_batch.set(0); DocumentSchemaBean config = BeanTemplateUtils.build(DocumentSchemaBean.class) .with(DocumentSchemaBean::deduplication_policy, DeduplicationPolicy.overwrite) .with(DocumentSchemaBean::delete_unhandled_duplicates, false).done().get(); DeduplicationEnrichmentContext test_context = new DeduplicationEnrichmentContext(enrich_context, config, j -> Optional.empty()); final Stream<JsonNode> ret_val = DeduplicationService.handleDuplicateRecord(config, Optional.of(Tuples._2T(test_module, test_context)), ts_field, new_records_but_same_time, Arrays.asList(old_json), key, mutable_obj_map); assertEquals(0L, ret_val.count()); // Nothing emitted Mockito.verify(enrich_context, Mockito.times(0)).emitImmutableObject(Mockito.any(Long.class), Mockito.any(JsonNode.class), Mockito.any(Optional.class), Mockito.any(Optional.class), Mockito.any(Optional.class)); // No custom processing performed assertEquals(0, _called_batch.get()); // _id assertEquals("{\"_id\":\"old_record\"}", new_record_but_same_time._3().toString()); // Object removed from mutable map assertEquals(2, mutable_obj_map.size()); } // custom { //(reset) mutable_obj_map.clear(); mutable_obj_map.put(new TextNode("never_changed"), new_records); mutable_obj_map.put(new TextNode("url"), new_records); assertEquals(2, mutable_obj_map.size()); new_record._3().removeAll(); new_record_but_same_time._3().removeAll(); _called_batch.set(0); DocumentSchemaBean config = BeanTemplateUtils.build(DocumentSchemaBean.class) .with(DocumentSchemaBean::deduplication_policy, DeduplicationPolicy.custom) .with(DocumentSchemaBean::delete_unhandled_duplicates, false).done().get(); DeduplicationEnrichmentContext test_context = new DeduplicationEnrichmentContext(enrich_context, config, j -> Optional.empty()); final Stream<JsonNode> ret_val = DeduplicationService.handleDuplicateRecord(config, Optional.of(Tuples._2T(test_module, test_context)), ts_field, new_records, Arrays.asList(old_json), key, mutable_obj_map); assertEquals(0L, ret_val.count()); // Nothing emitted Mockito.verify(enrich_context, Mockito.times(0)).emitImmutableObject(Mockito.any(Long.class), Mockito.any(JsonNode.class), Mockito.any(Optional.class), Mockito.any(Optional.class), Mockito.any(Optional.class)); // No custom processing performed assertEquals(2, _called_batch.get()); //(old + new) // _id assertEquals("{}", new_record._3().toString()); // up to the custom code to do this // Object removed from mutable map assertEquals(1, mutable_obj_map.size()); //(remove since it's the responsibility of the custom code to emit) } //(check ignores times) { //(reset) mutable_obj_map.clear(); mutable_obj_map.put(new TextNode("never_changed"), new_records); mutable_obj_map.put(new TextNode("url"), new_records); assertEquals(2, mutable_obj_map.size()); new_record._3().removeAll(); new_record_but_same_time._3().removeAll(); _called_batch.set(0); DocumentSchemaBean config = BeanTemplateUtils.build(DocumentSchemaBean.class) .with(DocumentSchemaBean::deduplication_policy, DeduplicationPolicy.custom) .with(DocumentSchemaBean::delete_unhandled_duplicates, false).done().get(); DeduplicationEnrichmentContext test_context = new DeduplicationEnrichmentContext(enrich_context, config, j -> Optional.empty()); final Stream<JsonNode> ret_val = DeduplicationService.handleDuplicateRecord(config, Optional.of(Tuples._2T(test_module, test_context)), ts_field, new_records_but_same_time, Arrays.asList(old_json), key, mutable_obj_map); assertEquals(0L, ret_val.count()); // Nothing emitted Mockito.verify(enrich_context, Mockito.times(0)).emitImmutableObject(Mockito.any(Long.class), Mockito.any(JsonNode.class), Mockito.any(Optional.class), Mockito.any(Optional.class), Mockito.any(Optional.class)); // No custom processing performed assertEquals(2, _called_batch.get()); //(old + new) // _id assertEquals("{}", new_record_but_same_time._3().toString()); // up to the custom code to do this // Object removed from mutable map assertEquals(1, mutable_obj_map.size()); //(remove since it's the responsibility of the custom code to emit) } // Simple case *custom* update policy - time updates { //(reset) mutable_obj_map.clear(); mutable_obj_map.put(new TextNode("never_changed"), new_records); mutable_obj_map.put(new TextNode("url"), new_records); assertEquals(2, mutable_obj_map.size()); new_record._3().removeAll(); new_record_but_same_time._3().removeAll(); _called_batch.set(0); DocumentSchemaBean config = BeanTemplateUtils.build(DocumentSchemaBean.class) .with(DocumentSchemaBean::deduplication_policy, DeduplicationPolicy.custom_update) .with(DocumentSchemaBean::delete_unhandled_duplicates, false).done().get(); DeduplicationEnrichmentContext test_context = new DeduplicationEnrichmentContext(enrich_context, config, j -> Optional.empty()); final Stream<JsonNode> ret_val = DeduplicationService.handleDuplicateRecord(config, Optional.of(Tuples._2T(test_module, test_context)), ts_field, new_records, Arrays.asList(old_json), key, mutable_obj_map); assertEquals(0L, ret_val.count()); // Nothing emitted Mockito.verify(enrich_context, Mockito.times(0)).emitImmutableObject(Mockito.any(Long.class), Mockito.any(JsonNode.class), Mockito.any(Optional.class), Mockito.any(Optional.class), Mockito.any(Optional.class)); // No custom processing performed assertEquals(2, _called_batch.get()); //(old + new) // _id assertEquals("{}", new_record._3().toString()); // up to the custom code to do this // Object removed from mutable map assertEquals(1, mutable_obj_map.size()); //(remove since it's the responsibility of the custom code to emit) } // Simple case *custom* update policy - times the same { //(reset) mutable_obj_map.clear(); mutable_obj_map.put(new TextNode("never_changed"), new_records); mutable_obj_map.put(new TextNode("url"), new_records); assertEquals(2, mutable_obj_map.size()); new_record._3().removeAll(); new_record_but_same_time._3().removeAll(); _called_batch.set(0); DocumentSchemaBean config = BeanTemplateUtils.build(DocumentSchemaBean.class) .with(DocumentSchemaBean::deduplication_policy, DeduplicationPolicy.custom_update) .with(DocumentSchemaBean::delete_unhandled_duplicates, false).done().get(); DeduplicationEnrichmentContext test_context = new DeduplicationEnrichmentContext(enrich_context, config, j -> Optional.empty()); final Stream<JsonNode> ret_val = DeduplicationService.handleDuplicateRecord(config, Optional.of(Tuples._2T(test_module, test_context)), ts_field, new_records_but_same_time, Arrays.asList(old_json), key, mutable_obj_map); assertEquals(0L, ret_val.count()); // Nothing emitted Mockito.verify(enrich_context, Mockito.times(0)).emitImmutableObject(Mockito.any(Long.class), Mockito.any(JsonNode.class), Mockito.any(Optional.class), Mockito.any(Optional.class), Mockito.any(Optional.class)); // No custom processing performed assertEquals(0, _called_batch.get()); // No annotations/mutations assertEquals("{}", new_record_but_same_time._3().toString()); // Object removed from mutable map assertEquals(1, mutable_obj_map.size()); } }
From source file:org.nuxeo.ecm.core.io.download.DownloadServiceImpl.java
@Override public void downloadBlob(HttpServletRequest request, HttpServletResponse response, DocumentModel doc, String xpath, Blob blob, String filename, String reason, Map<String, Serializable> extendedInfos, Boolean inline, Consumer<ByteRange> blobTransferer) throws IOException { Objects.requireNonNull(blob); // check blob permissions if (!checkPermission(doc, xpath, blob, reason, extendedInfos)) { response.sendError(HttpServletResponse.SC_FORBIDDEN, "Permission denied"); return;/*from w w w . j a v a 2s . co m*/ } // check Blob Manager download link URI uri = redirectResolver.getURI(blob, UsageHint.DOWNLOAD, request); if (uri != null) { try { Map<String, Serializable> ei = new HashMap<>(); if (extendedInfos != null) { ei.putAll(extendedInfos); } ei.put("redirect", uri.toString()); logDownload(doc, xpath, filename, reason, ei); response.sendRedirect(uri.toString()); } catch (IOException ioe) { DownloadHelper.handleClientDisconnect(ioe); } return; } try { String digest = blob.getDigest(); if (digest == null) { digest = DigestUtils.md5Hex(blob.getStream()); } String etag = '"' + digest + '"'; // with quotes per RFC7232 2.3 response.setHeader("ETag", etag); // re-send even on SC_NOT_MODIFIED addCacheControlHeaders(request, response); String ifNoneMatch = request.getHeader("If-None-Match"); if (ifNoneMatch != null) { boolean match = false; if (ifNoneMatch.equals("*")) { match = true; } else { for (String previousEtag : StringUtils.split(ifNoneMatch, ", ")) { if (previousEtag.equals(etag)) { match = true; break; } } } if (match) { String method = request.getMethod(); if (method.equals("GET") || method.equals("HEAD")) { response.sendError(HttpServletResponse.SC_NOT_MODIFIED); } else { // per RFC7232 3.2 response.sendError(HttpServletResponse.SC_PRECONDITION_FAILED); } return; } } // regular processing if (StringUtils.isBlank(filename)) { filename = StringUtils.defaultIfBlank(blob.getFilename(), "file"); } String contentDisposition = DownloadHelper.getRFC2231ContentDisposition(request, filename, inline); response.setHeader("Content-Disposition", contentDisposition); response.setContentType(blob.getMimeType()); if (blob.getEncoding() != null) { response.setCharacterEncoding(blob.getEncoding()); } long length = blob.getLength(); response.setHeader("Accept-Ranges", "bytes"); String range = request.getHeader("Range"); ByteRange byteRange; if (StringUtils.isBlank(range)) { byteRange = null; } else { byteRange = DownloadHelper.parseRange(range, length); if (byteRange == null) { log.error("Invalid byte range received: " + range); } else { response.setHeader("Content-Range", "bytes " + byteRange.getStart() + "-" + byteRange.getEnd() + "/" + length); response.setStatus(HttpServletResponse.SC_PARTIAL_CONTENT); } } long contentLength = byteRange == null ? length : byteRange.getLength(); if (contentLength < Integer.MAX_VALUE) { response.setContentLength((int) contentLength); } logDownload(doc, xpath, filename, reason, extendedInfos); // execute the final download blobTransferer.accept(byteRange); } catch (UncheckedIOException e) { DownloadHelper.handleClientDisconnect(e.getCause()); } catch (IOException ioe) { DownloadHelper.handleClientDisconnect(ioe); } }
From source file:org.kie.workbench.common.dmn.backend.DMNMarshallerTest.java
@SuppressWarnings("unchecked") private Diagram<Graph, Metadata> connectTwoNodes( final org.kie.workbench.common.stunner.core.graph.content.Bounds bounds, final String decisionNode1UUID, final String decisionNode2UUID, final String edgeUUID, final Consumer<MagnetConnection> sourceMagnetConsumer, final Consumer<MagnetConnection> targetMagnetConsumer) { final DiagramImpl diagram = new DiagramImpl("dmn graph", null); final Graph<DefinitionSet, Node> graph = mock(Graph.class); final Node<View, Edge> diagramNode = mock(Node.class); final View diagramView = mock(View.class); final DMNDiagram dmnDiagram = new DMNDiagram(); when(diagramNode.getContent()).thenReturn(diagramView); when(diagramView.getDefinition()).thenReturn(dmnDiagram); final Node<View, Edge> decisionNode1 = mock(Node.class); final View decisionView1 = mock(View.class); final Decision decision1 = new Decision(); decision1.getId().setValue(decisionNode1UUID); when(decisionNode1.getContent()).thenReturn(decisionView1); when(decisionView1.getDefinition()).thenReturn(decision1); when(decisionView1.getBounds()).thenReturn(bounds); final Node<View, Edge> decisionNode2 = mock(Node.class); final View decisionView2 = mock(View.class); final Decision decision2 = new Decision(); decision2.getId().setValue(decisionNode2UUID); when(decisionNode2.getContent()).thenReturn(decisionView2); when(decisionView2.getDefinition()).thenReturn(decision2); when(decisionView2.getBounds()).thenReturn(bounds); final Edge edge = mock(Edge.class); final ViewConnector edgeView = mock(ViewConnector.class); when(edge.getUUID()).thenReturn(edgeUUID); when(edge.getContent()).thenReturn(edgeView); final MagnetConnection edgeSourceConnection = mock(MagnetConnection.class); final MagnetConnection edgeTargetConnection = mock(MagnetConnection.class); when(edgeView.getSourceConnection()).thenReturn(Optional.of(edgeSourceConnection)); when(edgeView.getTargetConnection()).thenReturn(Optional.of(edgeTargetConnection)); when(edgeView.getControlPoints()).thenReturn(new ControlPoint[] {}); when(decisionNode1.getOutEdges()).thenReturn(Collections.singletonList(edge)); when(decisionNode2.getInEdges()).thenReturn(Collections.singletonList(edge)); when(edge.getSourceNode()).thenReturn(decisionNode1); when(edge.getTargetNode()).thenReturn(decisionNode2); sourceMagnetConsumer.accept(edgeSourceConnection); targetMagnetConsumer.accept(edgeTargetConnection); doReturn(asList(diagramNode, decisionNode1, decisionNode2)).when(graph).nodes(); diagram.setGraph(graph);// w w w . j a v a2 s . c o m return diagram; }
From source file:com.evolveum.midpoint.security.enforcer.impl.SecurityEnforcerImpl.java
private <O extends ObjectType, T extends ObjectType> AccessDecision isAuthorizedPhase( MidPointPrincipal midPointPrincipal, String operationUrl, AuthorizationPhaseType phase, AuthorizationParameters<O, T> params, OwnerResolver ownerResolver, Consumer<Authorization> applicableAutzConsumer, Task task, OperationResult result) throws SchemaException, ObjectNotFoundException, ExpressionEvaluationException, CommunicationException, ConfigurationException, SecurityViolationException { if (AuthorizationConstants.AUTZ_NO_ACCESS_URL.equals(operationUrl)) { return AccessDecision.DENY; }/* www . j a v a 2 s .c o m*/ if (phase == null) { throw new IllegalArgumentException("No phase"); } AccessDecision decision = AccessDecision.DEFAULT; if (LOGGER.isTraceEnabled()) { LOGGER.trace("AUTZ: evaluating authorization principal={}, op={}, phase={}, {}", getUsername(midPointPrincipal), operationUrl, phase, params.shortDump()); } final AutzItemPaths allowedItems = new AutzItemPaths(); Collection<Authorization> authorities = getAuthorities(midPointPrincipal); if (authorities != null) { for (GrantedAuthority authority : authorities) { if (authority instanceof Authorization) { Authorization autz = (Authorization) authority; String autzHumanReadableDesc = autz.getHumanReadableDesc(); LOGGER.trace(" Evaluating {}", autzHumanReadableDesc); // First check if the authorization is applicable. // action if (!autz.getAction().contains(operationUrl) && !autz.getAction().contains(AuthorizationConstants.AUTZ_ALL_URL)) { LOGGER.trace(" {} not applicable for operation {}", autzHumanReadableDesc, operationUrl); continue; } // phase if (autz.getPhase() == null) { LOGGER.trace(" {} is applicable for all phases (continuing evaluation)", autzHumanReadableDesc); } else { if (autz.getPhase() != phase) { LOGGER.trace(" {} is not applicable for phases {} (breaking evaluation)", autzHumanReadableDesc, phase); continue; } else { LOGGER.trace(" {} is applicable for phases {} (continuing evaluation)", autzHumanReadableDesc, phase); } } // relation if (!isApplicableRelation(autz, params.getRelation())) { LOGGER.trace(" {} not applicable for relation {}", autzHumanReadableDesc, params.getRelation()); continue; } // object if (isApplicable(autz.getObject(), params.getObject(), midPointPrincipal, ownerResolver, "object", autzHumanReadableDesc, task, result)) { LOGGER.trace(" {} applicable for object {} (continuing evaluation)", autzHumanReadableDesc, params.getObject()); } else { LOGGER.trace( " {} not applicable for object {}, none of the object specifications match (breaking evaluation)", autzHumanReadableDesc, params.getObject()); continue; } // target if (isApplicable(autz.getTarget(), params.getTarget(), midPointPrincipal, ownerResolver, "target", autzHumanReadableDesc, task, result)) { LOGGER.trace(" {} applicable for target {} (continuing evaluation)", autzHumanReadableDesc, params.getObject()); } else { LOGGER.trace( " {} not applicable for target {}, none of the target specifications match (breaking evaluation)", autzHumanReadableDesc, params.getObject()); continue; } if (applicableAutzConsumer != null) { applicableAutzConsumer.accept(autz); } // authority is applicable to this situation. now we can process the decision. AuthorizationDecisionType autzDecision = autz.getDecision(); if (autzDecision == null || autzDecision.equals(AuthorizationDecisionType.ALLOW)) { allowedItems.collectItems(autz); LOGGER.trace(" {}: ALLOW operation {} (but continue evaluation)", autzHumanReadableDesc, operationUrl); decision = AccessDecision.ALLOW; // Do NOT break here. Other authorization statements may still deny the operation } else { // item if (isApplicableItem(autz, params.getObject(), params.getDelta())) { LOGGER.trace(" {}: Deny authorization applicable for items (continuing evaluation)", autzHumanReadableDesc); } else { LOGGER.trace(" {} not applicable for items (breaking evaluation)", autzHumanReadableDesc); continue; } LOGGER.trace(" {}: DENY operation {}", autzHumanReadableDesc, operationUrl); decision = AccessDecision.DENY; // Break right here. Deny cannot be overridden by allow. This decision cannot be changed. break; } } else { LOGGER.warn("Unknown authority type {} in user {}", authority.getClass(), getUsername(midPointPrincipal)); } } } if (decision.equals(AccessDecision.ALLOW)) { // Still check allowedItems. We may still deny the operation. if (allowedItems.isAllItems()) { // This means all items are allowed. No need to check anything LOGGER.trace(" Empty list of allowed items, operation allowed"); } else { // all items in the object and delta must be allowed LOGGER.trace(" Checking for allowed items: {}", allowedItems); ItemDecisionFunction itemDecisionFunction = (itemPath, removingContainer) -> decideAllowedItems(itemPath, allowedItems, phase, removingContainer); AccessDecision itemsDecision = null; if (params.hasDelta()) { itemsDecision = determineDeltaDecision(params.getDelta(), params.getObject(), itemDecisionFunction); } else if (params.hasObject()) { itemsDecision = determineObjectDecision(params.getObject(), itemDecisionFunction); } if (itemsDecision != AccessDecision.ALLOW) { LOGGER.trace(" NOT ALLOWED operation because the item decision is {}", itemsDecision); decision = AccessDecision.DEFAULT; } } } if (LOGGER.isTraceEnabled()) { LOGGER.trace("AUTZ result: principal={}, operation={}: {}", getUsername(midPointPrincipal), prettyActionUrl(operationUrl), decision); } return decision; }
From source file:net.mozq.picto.core.ProcessCore.java
public static void findFiles(ProcessCondition processCondition, Consumer<ProcessData> processDataSetter, BooleanSupplier processStopper) throws IOException { Set<FileVisitOption> fileVisitOptionSet; if (processCondition.isFollowLinks()) { fileVisitOptionSet = EnumSet.of(FileVisitOption.FOLLOW_LINKS); } else {//from ww w . j a va 2s.c o m fileVisitOptionSet = Collections.emptySet(); } Files.walkFileTree(processCondition.getSrcRootPath(), fileVisitOptionSet, processCondition.getDept(), new SimpleFileVisitor<Path>() { @Override public FileVisitResult preVisitDirectory(Path dir, BasicFileAttributes attrs) throws IOException { if (processStopper.getAsBoolean()) { return FileVisitResult.TERMINATE; } return FileVisitResult.CONTINUE; } @Override public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException { if (attrs.isDirectory()) { return FileVisitResult.SKIP_SUBTREE; } if (processStopper.getAsBoolean()) { return FileVisitResult.TERMINATE; } if (!processCondition.getPathFilter().accept(file, attrs)) { return FileVisitResult.SKIP_SUBTREE; } Path rootRelativeSubPath = processCondition.getSrcRootPath().relativize(file.getParent()); ImageMetadata imageMetadata = getImageMetadata(file); Date baseDate; if (processCondition.isChangeFileCreationDate() || processCondition.isChangeFileModifiedDate() || processCondition.isChangeFileAccessDate() || processCondition.isChangeExifDate()) { baseDate = getBaseDate(processCondition, file, attrs, imageMetadata); } else { baseDate = null; } String destSubPathname = processCondition.getDestSubPathFormat().format(varName -> { try { switch (varName) { case "Now": return new Date(); case "ParentSubPath": return rootRelativeSubPath.toString(); case "FileName": return file.getFileName().toString(); case "BaseName": return FileUtilz.getBaseName(file.getFileName().toString()); case "Extension": return FileUtilz.getExt(file.getFileName().toString()); case "Size": return Long.valueOf(Files.size(file)); case "CreationDate": return (processCondition.isChangeFileCreationDate()) ? baseDate : new Date(attrs.creationTime().toMillis()); case "ModifiedDate": return (processCondition.isChangeFileModifiedDate()) ? baseDate : new Date(attrs.lastModifiedTime().toMillis()); case "AccessDate": return (processCondition.isChangeFileAccessDate()) ? baseDate : new Date(attrs.lastAccessTime().toMillis()); case "PhotoTakenDate": return (processCondition.isChangeExifDate()) ? baseDate : getPhotoTakenDate(file, imageMetadata); case "Width": return getEXIFIntValue(imageMetadata, ExifTagConstants.EXIF_TAG_EXIF_IMAGE_WIDTH); case "Height": return getEXIFIntValue(imageMetadata, ExifTagConstants.EXIF_TAG_EXIF_IMAGE_LENGTH); case "FNumber": return getEXIFDoubleValue(imageMetadata, ExifTagConstants.EXIF_TAG_FNUMBER); case "Aperture": return getEXIFDoubleValue(imageMetadata, ExifTagConstants.EXIF_TAG_APERTURE_VALUE); case "MaxAperture": return getEXIFDoubleValue(imageMetadata, ExifTagConstants.EXIF_TAG_MAX_APERTURE_VALUE); case "ISO": return getEXIFIntValue(imageMetadata, ExifTagConstants.EXIF_TAG_ISO); case "FocalLength": return getEXIFDoubleValue(imageMetadata, ExifTagConstants.EXIF_TAG_FOCAL_LENGTH); // ? case "FocalLength35mm": return getEXIFDoubleValue(imageMetadata, ExifTagConstants.EXIF_TAG_FOCAL_LENGTH_IN_35MM_FORMAT); case "ShutterSpeed": return getEXIFDoubleValue(imageMetadata, ExifTagConstants.EXIF_TAG_SHUTTER_SPEED_VALUE); case "Exposure": return getEXIFStringValue(imageMetadata, ExifTagConstants.EXIF_TAG_EXPOSURE); // case "ExposureTime": return getEXIFDoubleValue(imageMetadata, ExifTagConstants.EXIF_TAG_EXPOSURE_TIME); // case "ExposureMode": return getEXIFIntValue(imageMetadata, ExifTagConstants.EXIF_TAG_EXPOSURE_MODE); case "ExposureProgram": return getEXIFIntValue(imageMetadata, ExifTagConstants.EXIF_TAG_EXPOSURE_PROGRAM); case "Brightness": return getEXIFDoubleValue(imageMetadata, ExifTagConstants.EXIF_TAG_BRIGHTNESS_VALUE); case "WhiteBalance": return getEXIFIntValue(imageMetadata, ExifTagConstants.EXIF_TAG_WHITE_BALANCE_1); case "LightSource": return getEXIFIntValue(imageMetadata, ExifTagConstants.EXIF_TAG_LIGHT_SOURCE); case "Lens": return getEXIFStringValue(imageMetadata, ExifTagConstants.EXIF_TAG_LENS); case "LensMake": return getEXIFStringValue(imageMetadata, ExifTagConstants.EXIF_TAG_LENS_MAKE); case "LensModel": return getEXIFStringValue(imageMetadata, ExifTagConstants.EXIF_TAG_LENS_MODEL); case "LensSerialNumber": return getEXIFStringValue(imageMetadata, ExifTagConstants.EXIF_TAG_LENS_SERIAL_NUMBER); case "Make": return getEXIFStringValue(imageMetadata, TiffTagConstants.TIFF_TAG_MAKE); case "Model": return getEXIFStringValue(imageMetadata, TiffTagConstants.TIFF_TAG_MODEL); case "SerialNumber": return getEXIFStringValue(imageMetadata, ExifTagConstants.EXIF_TAG_SERIAL_NUMBER); case "Software": return getEXIFStringValue(imageMetadata, ExifTagConstants.EXIF_TAG_SOFTWARE); case "ProcessingSoftware": return getEXIFStringValue(imageMetadata, ExifTagConstants.EXIF_TAG_PROCESSING_SOFTWARE); case "OwnerName": return getEXIFStringValue(imageMetadata, ExifTagConstants.EXIF_TAG_OWNER_NAME); case "CameraOwnerName": return getEXIFStringValue(imageMetadata, ExifTagConstants.EXIF_TAG_CAMERA_OWNER_NAME); case "GPSLat": return getEXIFGpsLat(imageMetadata); case "GPSLatDeg": return getEXIFDoubleValue(imageMetadata, GpsTagConstants.GPS_TAG_GPS_LATITUDE, 0); case "GPSLatMin": return getEXIFDoubleValue(imageMetadata, GpsTagConstants.GPS_TAG_GPS_LATITUDE, 1); case "GPSLatSec": return getEXIFDoubleValue(imageMetadata, GpsTagConstants.GPS_TAG_GPS_LATITUDE, 2); case "GPSLatRef": return getEXIFStringValue(imageMetadata, GpsTagConstants.GPS_TAG_GPS_LATITUDE_REF); case "GPSLon": return getEXIFGpsLon(imageMetadata); case "GPSLonDeg": return getEXIFDoubleValue(imageMetadata, GpsTagConstants.GPS_TAG_GPS_LONGITUDE, 0); case "GPSLonMin": return getEXIFDoubleValue(imageMetadata, GpsTagConstants.GPS_TAG_GPS_LONGITUDE, 1); case "GPSLonSec": return getEXIFDoubleValue(imageMetadata, GpsTagConstants.GPS_TAG_GPS_LONGITUDE, 2); case "GPSLonRef": return getEXIFStringValue(imageMetadata, GpsTagConstants.GPS_TAG_GPS_LONGITUDE_REF); case "GPSAlt": return getEXIFDoubleValue(imageMetadata, GpsTagConstants.GPS_TAG_GPS_ALTITUDE); case "GPSAltRef": return getEXIFIntValue(imageMetadata, GpsTagConstants.GPS_TAG_GPS_ALTITUDE_REF); default: throw new PictoInvalidDestinationPathException(Messages .getString("message.warn.invalid.destSubPath.varName", varName)); } } catch (PictoException e) { throw e; } catch (Exception e) { throw new PictoInvalidDestinationPathException( Messages.getString("message.warn.invalid.destSubPath.pattern"), e); } }); Path destSubPath = processCondition.getDestRootPath().resolve(destSubPathname).normalize(); if (!destSubPath.startsWith(processCondition.getDestRootPath())) { throw new PictoInvalidDestinationPathException( Messages.getString("message.warn.invalid.destination.path", destSubPath)); } ProcessData processData = new ProcessData(); processData.setSrcPath(file); processData.setSrcFileAttributes(attrs); processData.setDestPath(destSubPath); processData.setBaseDate(baseDate); processDataSetter.accept(processData); return FileVisitResult.CONTINUE; } }); }
From source file:org.polymap.p4.map.ProjectMapPanel.java
/** * Simple/experimental way to add bottom view to this panel. * * @param creator//from w ww .j a v a 2 s .c o m */ protected void updateButtomView(Consumer<Composite> creator) { on(tableParent).height(250); UIUtils.disposeChildren(tableParent); creator.accept(tableParent); tableParent.getParent().layout(true); }