List of usage examples for java.util.stream Collectors toMap
public static <T, K, U> Collector<T, ?, Map<K, U>> toMap(Function<? super T, ? extends K> keyMapper, Function<? super T, ? extends U> valueMapper)
From source file:com.thinkbiganalytics.feedmgr.service.template.DefaultFeedManagerTemplateService.java
/** * For a given Template and its related connection info to the reusable templates, walk the graph to return the Processors. * The system will first walk the incoming templateid. If the {@code connectionInfo} parameter is set it will make the connections to the incoming template and continue walking those processors * * @param nifiTemplateId the NiFi templateId required to start walking the flow * @param connectionInfo the connections required to connect * @return a list of all the processors for a template and possible connections *//* w w w. ja v a2 s . c o m*/ public List<RegisteredTemplate.FlowProcessor> getNiFiTemplateFlowProcessors(String nifiTemplateId, List<ReusableTemplateConnectionInfo> connectionInfo) { TemplateDTO templateDTO = nifiRestClient.getTemplateById(nifiTemplateId); //make the connection if (connectionInfo != null && !connectionInfo.isEmpty()) { Set<PortDTO> templatePorts = templateDTO.getSnippet().getOutputPorts(); Map<String, PortDTO> outputPorts = templateDTO.getSnippet().getOutputPorts().stream() .collect(Collectors.toMap(portDTO -> portDTO.getName(), Function.identity())); Map<String, PortDTO> inputPorts = getReusableFeedInputPorts().stream() .collect(Collectors.toMap(portDTO -> portDTO.getName(), Function.identity())); connectionInfo.stream().forEach(reusableTemplateConnectionInfo -> { PortDTO outputPort = outputPorts.get(reusableTemplateConnectionInfo.getFeedOutputPortName()); PortDTO inputPort = inputPorts .get(reusableTemplateConnectionInfo.getReusableTemplateInputPortName()); ConnectionDTO connectionDTO = new ConnectionDTO(); ConnectableDTO source = new ConnectableDTO(); source.setName(reusableTemplateConnectionInfo.getFeedOutputPortName()); source.setType(outputPort.getType()); source.setId(outputPort.getId()); source.setGroupId(outputPort.getParentGroupId()); ConnectableDTO dest = new ConnectableDTO(); dest.setName(inputPort.getName()); dest.setType(inputPort.getType()); dest.setId(inputPort.getId()); dest.setGroupId(inputPort.getParentGroupId()); connectionDTO.setSource(source); connectionDTO.setDestination(dest); connectionDTO.setId(UUID.randomUUID().toString()); templateDTO.getSnippet().getConnections().add(connectionDTO); }); } NifiFlowProcessGroup template = nifiRestClient.getTemplateFeedFlow(templateDTO); return template.getProcessorMap().values().stream().map(flowProcessor -> { RegisteredTemplate.FlowProcessor p = new RegisteredTemplate.FlowProcessor(flowProcessor.getId()); p.setGroupId(flowProcessor.getParentGroupId()); p.setType(flowProcessor.getType()); p.setName(flowProcessor.getName()); p.setFlowId(flowProcessor.getFlowId()); p.setIsLeaf(flowProcessor.isLeaf()); return p; }).collect(Collectors.toList()); }
From source file:com.bouncestorage.swiftproxy.v1.ObjectResource.java
private Map<String, String> getUserMetadata(Request request) { return StreamSupport.stream(request.getHeaderNames().spliterator(), false) .filter(name -> name.toLowerCase().startsWith(META_HEADER_PREFIX.toLowerCase())).filter(name -> { if (name.equalsIgnoreCase(META_HEADER_PREFIX) || RESERVED_METADATA.contains(name)) { throw new BadRequestException(); }/*from w w w .j av a 2s . co m*/ if (name.length() - META_HEADER_PREFIX.length() > InfoResource.CONFIG.swift.max_meta_name_length || request.getHeader(name).length() > InfoResource.CONFIG.swift.max_meta_value_length) { throw new BadRequestException(); } return true; }).collect(Collectors.toMap(name -> name.substring(META_HEADER_PREFIX.length()), name -> request.getHeader(name))); }
From source file:com.epam.catgenome.manager.vcf.VcfManager.java
/** * Loads VCF FILTER and INFO data for a {@code Collection} of VCF files * @param vcfFileIds {@code Collection} specifies VCF files of interest * @return VCF FILTER and INFO data//from w ww . j av a 2s. c o m * @throws IOException if an error with file system occurred */ public VcfFilterInfo getFiltersInfo(Collection<Long> vcfFileIds) throws IOException { VcfFilterInfo filterInfo = new VcfFilterInfo(); Map<String, InfoItem> infoItems = new HashMap<>(); Set<String> availableFilters = new HashSet<>(); for (Long fileId : vcfFileIds) { VcfFile vcfFile = vcfFileManager.loadVcfFile(fileId); Assert.notNull(vcfFile, getMessage(ERROR_VCF_ID_INVALID, fileId)); try (FeatureReader<VariantContext> reader = AbstractFeatureReader.getFeatureReader(vcfFile.getPath(), new VCFCodec(), false)) { VCFHeader header = (VCFHeader) reader.getHeader(); Collection<VCFInfoHeaderLine> headerLines = header.getInfoHeaderLines(); infoItems.putAll(headerLines.stream().filter(l -> !isExtendedInfoLine(l.getDescription())) // Exclude ANN from fields, .map(InfoItem::new) // we don't need it in the index .collect(Collectors.toMap(InfoItem::getName, i -> i))); availableFilters.addAll(header.getFilterLines().stream().map(VCFSimpleHeaderLine::getID) .collect(Collectors.toList())); } } List<String> filtersWhiteList = getFilterWhiteList(); if (!filtersWhiteList.isEmpty()) { infoItems = scourFilterList(infoItems, filtersWhiteList); } infoItems.put(FeatureIndexDao.FeatureIndexFields.IS_EXON.getFieldName(), new InfoItem(FeatureIndexDao.FeatureIndexFields.IS_EXON.getFieldName(), VCFHeaderLineType.Flag, "Defines if a variation is " + "located in exon region")); filterInfo.setInfoItemMap(infoItems); filterInfo.setAvailableFilters(availableFilters); return filterInfo; }
From source file:fr.paris.lutece.portal.web.xsl.XslExportJspBeanTest.java
public void testDoRemoveXslExportNoToken() throws AccessDeniedException { MockHttpServletRequest request = new MockHttpServletRequest(); AdminUser user = new AdminUser(); user.setRoles(//from w ww. ja v a2s. c o m AdminRoleHome.findAll().stream().collect(Collectors.toMap(AdminRole::getKey, Function.identity()))); Utils.registerAdminUserWithRigth(request, user, XslExportJspBean.RIGHT_MANAGE_XSL_EXPORT); request.setParameter("id_xsl_export", Integer.toString(_xslExport.getIdXslExport())); _instance.init(request, XslExportJspBean.RIGHT_MANAGE_XSL_EXPORT); try { _instance.doRemoveXslExport(request); fail("Should have thrown"); } catch (AccessDeniedException e) { XslExport stored = XslExportHome.findByPrimaryKey(_xslExport.getIdXslExport()); assertNotNull(stored); } }
From source file:com.bmc.gibraltar.automation.dataprovider.RestDataProvider.java
/** * Collect all selection fields and their options from the specified {@param record} record definition name * * @param record record definition name//from w w w.j a v a 2 s . c o m * @return HashMap where 'String' is a name of the selection field, and 'List<String>' - its options */ public Map<String, List<String>> getSelectionFieldsAndOptions(String record) { JsonPath response = JsonPath.from(getRecFields(record)); String optionsPath = "fieldDefinitions.find { it.name == '%s'}.options"; List<String> selectionFields = getRecordFieldsNamesByProperty(record, "resourceType", DataType.SELECTION.getResourceType()); return selectionFields.stream().collect(Collectors.toMap(selectionField -> selectionField, selectionField -> response.getList(String.format(optionsPath, selectionField)))); }
From source file:com.haulmont.cuba.web.gui.components.WebCalendar.java
@Override public Map<DayOfWeek, String> getDayNames() { List<String> days = Arrays.asList(component.getDayNamesShort().clone()); int shift = Math.abs(component.getFirstDayOfWeek() - java.util.Calendar.MONDAY) + 1; Collections.rotate(days, -shift); return days.stream().collect(Collectors.toMap((String d) -> DayOfWeek.of(days.indexOf(d) + 1), d -> d)); }
From source file:alfio.manager.system.ConfigurationManager.java
private Map<ConfigurationKeys.SettingCategory, List<Configuration>> groupByCategory( Map<ConfigurationKeys.SettingCategory, List<Configuration>> all, Map<ConfigurationKeys.SettingCategory, List<Configuration>> existing) { return all.entrySet().stream().map(e -> { Set<Configuration> entries = new TreeSet<>(); ConfigurationKeys.SettingCategory key = e.getKey(); entries.addAll(e.getValue());/*from w w w .java 2s . c o m*/ if (existing.containsKey(key)) { List<Configuration> configurations = existing.get(key); entries.removeAll(configurations); entries.addAll(configurations); } return Pair.of(key, new ArrayList<>(entries)); }).collect(Collectors.toMap(Pair::getKey, Pair::getValue)); }
From source file:com.thinkbiganalytics.feedmgr.nifi.NifiFlowCache.java
/** * Ensure that there is a configured reporting task *//*w ww . j av a 2 s .c o m*/ private void ensureNiFiKyloReportingTask() { String reportingTaskName = StringUtils.substringAfterLast(NiFiKyloProvenanceEventReportingTaskType, "."); if (!nifiRestClient.getNiFiRestClient().reportingTasks() .findFirstByType(NiFiKyloProvenanceEventReportingTaskType).isPresent()) { log.info("Attempting to create the {} in NiFi ", reportingTaskName); //create it //1 ensure the controller service exists and is wired correctly Optional<ControllerServiceDTO> controllerService = nifiRestClient.getNiFiRestClient().reportingTasks() .findFirstControllerServiceByType(NiFiMetadataControllerServiceType); ControllerServiceDTO metadataService = null; if (controllerService.isPresent()) { metadataService = controllerService.get(); } else { log.info("Attempting to create the Controller Service: {} with the name {} in NiFi ", NiFiMetadataControllerServiceType, NiFiMetadataServiceName); //create it and enable it //first create it ControllerServiceDTO controllerServiceDTO = new ControllerServiceDTO(); controllerServiceDTO.setType(NiFiMetadataControllerServiceType); controllerServiceDTO.setName(NiFiMetadataServiceName); metadataService = nifiRestClient.getNiFiRestClient().reportingTasks() .createReportingTaskControllerService(controllerServiceDTO); //find the properties to inject Map<String, Object> configProperties = propertyExpressionResolver.getStaticConfigProperties(); Map<String, String> stringConfigProperties = new HashMap<>(); if (configProperties != null) { //transform the object map to the String map stringConfigProperties = configProperties.entrySet().stream().collect(Collectors .toMap(Map.Entry::getKey, e -> e.getValue() != null ? e.getValue().toString() : null)); } metadataService = nifiRestClient.enableControllerServiceAndSetProperties(metadataService.getId(), stringConfigProperties); } if (metadataService != null) { try { if (NifiProcessUtil.SERVICE_STATE.DISABLED.name() .equalsIgnoreCase(metadataService.getState())) { log.info("Reporting Task Controller Service {} exists, ensuring it is enabled.", NiFiMetadataServiceName); //enable it.... metadataService = nifiRestClient .enableControllerServiceAndSetProperties(metadataService.getId(), null); } } catch (NifiClientRuntimeException e) { //swallow the exception and attempt to move on to create the task } log.info("Creating the Reporting Task {} ", reportingTaskName); ReportingTaskDTO reportingTaskDTO = new ReportingTaskDTO(); reportingTaskDTO.setType(NiFiKyloProvenanceEventReportingTaskType); reportingTaskDTO = nifiRestClient.getNiFiRestClient().reportingTasks() .createReportingTask(reportingTaskDTO); //now set the properties ReportingTaskDTO updatedReportingTask = new ReportingTaskDTO(); updatedReportingTask.setType(NiFiKyloProvenanceEventReportingTaskType); updatedReportingTask.setId(reportingTaskDTO.getId()); updatedReportingTask.setName(reportingTaskName); updatedReportingTask.setProperties(new HashMap<>(1)); updatedReportingTask.getProperties().put("Metadata Service", metadataService.getId()); updatedReportingTask.setSchedulingStrategy("TIMER_DRIVEN"); updatedReportingTask.setSchedulingPeriod("5 secs"); updatedReportingTask.setComments( "Reporting task that will query the provenance repository and send the events and summary statistics over to Kylo via a JMS queue"); updatedReportingTask.setState(NifiProcessUtil.PROCESS_STATE.RUNNING.name()); //update it reportingTaskDTO = nifiRestClient.getNiFiRestClient().reportingTasks().update(updatedReportingTask); if (reportingTaskDTO != null) { log.info("Successfully created the Reporting Task {} ", reportingTaskName); } else { log.info("Error creating the Reporting Task {}. You will need to go into NiFi to resolve. ", reportingTaskName); } } } ; }
From source file:com.ikanow.aleph2.search_service.elasticsearch.services.TestElasticsearchIndexService.java
@Test public void test_validationSuccess() throws IOException { final String bucket_str = Resources.toString(Resources.getResource( "com/ikanow/aleph2/search_service/elasticsearch/services/test_bucket_validate_success.json"), Charsets.UTF_8);// ww w. j a va2 s. co m final DataBucketBean bucket = BeanTemplateUtils.build(bucket_str, DataBucketBean.class).done().get(); // 1) Verbose mode off { final Collection<BasicMessageBean> res_col = _index_service .validateSchema(bucket.data_schema().columnar_schema(), bucket)._2(); final Collection<BasicMessageBean> res_search = _index_service .validateSchema(bucket.data_schema().search_index_schema(), bucket)._2(); final Collection<BasicMessageBean> res_time = _index_service .validateSchema(bucket.data_schema().temporal_schema(), bucket)._2(); assertEquals(0, res_col.size()); assertEquals(0, res_search.size()); assertEquals(0, res_time.size()); } // 2) Verbose mode on { final DataBucketBean bucket_verbose = BeanTemplateUtils.clone(bucket).with(DataBucketBean::data_schema, BeanTemplateUtils.clone(bucket.data_schema()).with(DataSchemaBean::search_index_schema, BeanTemplateUtils.clone(bucket.data_schema().search_index_schema()).with( DataSchemaBean.SearchIndexSchemaBean::technology_override_schema, ImmutableMap.builder() .putAll(bucket.data_schema().search_index_schema() .technology_override_schema()) .put("verbose", true).build()) .done()) .done()) .done(); final Collection<BasicMessageBean> res_col = _index_service .validateSchema(bucket_verbose.data_schema().columnar_schema(), bucket)._2(); final Collection<BasicMessageBean> res_search = _index_service .validateSchema(bucket_verbose.data_schema().search_index_schema(), bucket)._2(); final Collection<BasicMessageBean> res_time = _index_service .validateSchema(bucket_verbose.data_schema().temporal_schema(), bucket)._2(); assertEquals(0, res_col.size()); assertEquals(0, res_time.size()); assertEquals(2, res_search.size()); assertEquals(true, res_search.stream().allMatch(BasicMessageBean::success)); Iterator<BasicMessageBean> res_search_message = res_search.iterator(); final String mapping_str = Resources.toString(Resources.getResource( "com/ikanow/aleph2/search_service/elasticsearch/services/test_verbose_mapping_validate_results.json"), Charsets.UTF_8); final JsonNode mapping_json = _mapper.readTree(mapping_str.getBytes()); assertEquals(mapping_json.toString(), _mapper.readTree(res_search_message.next().message()).toString()); assertTrue( "Sets the max index override: " + res_search.stream().skip(1).map(m -> m.message()).collect(Collectors.joining()), res_search_message.next().message().contains("1,000 MB")); // 2b) Same but with valid overrides final DataSchemaBean.ColumnarSchemaBean empty_field_spec = BeanTemplateUtils .build(DataSchemaBean.ColumnarSchemaBean.class).done().get(); final Collection<BasicMessageBean> res_search_2 = _index_service.validateSchema( BeanTemplateUtils.clone(bucket_verbose.data_schema().search_index_schema()).with( DataSchemaBean.SearchIndexSchemaBean::type_override, ElasticsearchIndexService._supported_types .stream().<Tuple2<String, DataSchemaBean.ColumnarSchemaBean>>map( s -> Tuples._2T(s, empty_field_spec)) .collect(Collectors.toMap( (Tuple2<String, DataSchemaBean.ColumnarSchemaBean> t2) -> t2._1(), t2 -> t2._2()))) .with(DataSchemaBean.SearchIndexSchemaBean::tokenization_override, ImmutableMap.of("_default_", empty_field_spec, "_none_", empty_field_spec)) .done(), bucket)._2(); assertEquals(2, res_search_2.size()); } // 3) Temporal { final DataBucketBean bucket_temporal_no_grouping = BeanTemplateUtils.clone(bucket) .with(DataBucketBean::data_schema, BeanTemplateUtils.clone(bucket.data_schema()) .with(DataSchemaBean::temporal_schema, BeanTemplateUtils .build(DataSchemaBean.TemporalSchemaBean.class).done().get()) .done()) .done(); assertEquals("", _index_service .validateSchema(bucket_temporal_no_grouping.data_schema().temporal_schema(), bucket)._1()); final DataBucketBean bucket_temporal_grouping = BeanTemplateUtils.clone(bucket) .with(DataBucketBean::data_schema, BeanTemplateUtils.clone(bucket.data_schema()).with(DataSchemaBean::temporal_schema, BeanTemplateUtils.build(DataSchemaBean.TemporalSchemaBean.class) .with(DataSchemaBean.TemporalSchemaBean::grouping_time_period, "1d") .done().get()) .done()) .done(); assertEquals("_{yyyy.MM.dd}", _index_service .validateSchema(bucket_temporal_grouping.data_schema().temporal_schema(), bucket)._1()); } }
From source file:nu.yona.server.analysis.service.ActivityService.java
private Map<ZonedDateTime, Set<DayActivityDto>> mapDayActivitiesToDtos( Map<ZonedDateTime, Set<DayActivity>> dayActivityEntitiesByZonedDate) { return dayActivityEntitiesByZonedDate.entrySet().stream() .collect(Collectors.toMap(Map.Entry::getKey, e -> mapDayActivitiesToDtos(e.getValue()))); }