List of usage examples for java.util.stream IntStream range
public static IntStream range(int startInclusive, int endExclusive)
From source file:it.greenvulcano.gvesb.gviamx.service.internal.SignUpManager.java
public void createSignUpRequest(String email, byte[] request) throws UserExistException { if (email == null || !email.matches(UserActionRequest.EMAIL_PATTERN)) { throw new IllegalArgumentException("Invalid email: " + email); }/*from ww w .ja v a2 s.com*/ try { usersManager.getUser(email.toLowerCase()); throw new UserExistException(email); } catch (UserNotFoundException e) { if (usersManager.searchUsers(SearchCriteria.builder().byEmail(email.toLowerCase()).limitedTo(1).build()) .getTotalCount() > 0) { throw new UserExistException(email); } } SignUpRequest signUpRequest = signupRepository.get(email.toLowerCase(), SignUpRequest.class) .orElseGet(SignUpRequest::new); signUpRequest.setEmail(email.toLowerCase()); signUpRequest.setIssueTime(new Date()); signUpRequest.setExpireTime(expireTime); signUpRequest.setRequest(request); signUpRequest.setNotificationStatus(NotificationStatus.PENDING); byte[] token = new byte[4]; secureRandom.nextBytes(token); String clearTextToken = String.format(Locale.US, "%02x%02x%02x%02x", IntStream.range(0, token.length).mapToObj(i -> Byte.valueOf(token[i])).toArray()); signUpRequest.setToken(DigestUtils.sha256Hex(clearTextToken)); signupRepository.add(signUpRequest); signUpRequest.setClearToken(clearTextToken); notificationServices.stream() .map(n -> new NotificationManager.NotificationTask(n, signUpRequest, signupRepository, "signup")) .forEach(executor::submit); }
From source file:delfos.rs.collaborativefiltering.als.ALSRecommender.java
@Override public MatrixFactorizationModel buildRecommendationModel(DatasetLoader<? extends Rating> datasetLoader) throws CannotLoadRatingsDataset, CannotLoadContentDataset, CannotLoadUsersDataset { int numIter = 1; int dimension = 5; long seed = getSeedValue(); final double lambda = 0.1; Bias bias = new Bias(datasetLoader); Map<User, List<Double>> randomUserVectors = datasetLoader.getUsersDataset().parallelStream() .collect(Collectors.toMap(user -> user, user -> { Random random = new Random(seed + user.getId()); List<Double> vector = random.doubles(-10, 10).limit(dimension).boxed() .collect(Collectors.toList()); return vector; }));/*from w w w. ja va2 s .c om*/ Map<Item, List<Double>> randomItemVectors = datasetLoader.getContentDataset().parallelStream() .collect(Collectors.toMap(item -> item, item -> { Random random = new Random(seed + item.getId()); List<Double> vector = random.doubles(-10, 10).limit(dimension).boxed() .collect(Collectors.toList()); return vector; })); MatrixFactorizationModel model = new MatrixFactorizationModel(randomUserVectors, randomItemVectors, bias); for (int iterationIndex = 0; iterationIndex < numIter; iterationIndex++) { final int iteration = iterationIndex; final MatrixFactorizationModel initialModel = model; double error = getModelError(bias, datasetLoader, initialModel); System.out.println("Error in iteration " + iterationIndex + " is " + error); ProgressChangedController userProgress = new ProgressChangedController( getAlias() + " for dataset " + datasetLoader.getAlias() + " userOptimization iteration " + iteration, datasetLoader.getUsersDataset().size(), this::fireBuildingProgressChangedEvent); Map<User, List<Double>> trainedUserVectors = datasetLoader.getUsersDataset().parallelStream() .collect(Collectors.toMap(user -> user, (User user) -> { Map<Integer, ? extends Rating> userRatings = datasetLoader.getRatingsDataset() .getUserRatingsRated(user.getId()); ObjectiveFunction objectiveFunction = new ObjectiveFunction((double[] pu) -> { List<Double> userVector = Arrays.stream(pu).boxed().collect(Collectors.toList()); double predictionError = userRatings.values().parallelStream() .map(bias.getBiasApplier()).map(rating -> { List<Double> itemVector = initialModel.getItemFeatures(rating.getItem()); double prediction = IntStream.range(0, userVector.size()) .mapToDouble(index -> userVector.get(index) * itemVector.get(index)) .sum(); double value = rating.getRatingValue().doubleValue(); double errorThisRating = prediction - value; return errorThisRating; }).map(value -> Math.pow(value, 2)).mapToDouble(value -> value).sum(); double penalty = Arrays.stream(pu).map(value -> Math.pow(value, 2)).sum(); double objectiveFunctionValue = predictionError + lambda * penalty; return objectiveFunctionValue; }); SimplexOptimizer simplexOptimizer = new SimplexOptimizer(0, 0); double[] initialGuess = new Random(seed + user.getId()).doubles(-10, 10).limit(dimension) .toArray(); List<Double> initialGuessList = Arrays.stream(initialGuess).boxed() .collect(Collectors.toList()); double initialGuessPenalty = objectiveFunction.getObjectiveFunction().value(initialGuess); try { PointValuePair optimize = simplexOptimizer.optimize( new MultiDirectionalSimplex(dimension), new InitialGuess(initialGuess), objectiveFunction, GoalType.MINIMIZE, MAX_EVAL, MAX_ITER); double optimizedPenalty = optimize.getValue(); userProgress.setTaskFinished(); List<Double> optimizedUserVector = Arrays.stream(optimize.getPoint()).boxed() .collect(Collectors.toList()); return optimizedUserVector; } catch (Exception ex) { System.out.println("Vector cannot be optimized for user " + user + " (numRatings=" + userRatings.size() + ")"); return initialModel.getUserFeatures(user); } })); ProgressChangedController itemProgress = new ProgressChangedController( getAlias() + " for dataset " + datasetLoader.getAlias() + " item optimization iteration " + iteration, datasetLoader.getContentDataset().size(), this::fireBuildingProgressChangedEvent); Map<Item, List<Double>> trainedItemVectors = datasetLoader.getContentDataset().parallelStream() .collect(Collectors.toMap(item -> item, item -> { Map<Integer, ? extends Rating> itemRatings = datasetLoader.getRatingsDataset() .getItemRatingsRated(item.getId()); ObjectiveFunction objectiveFunction = new ObjectiveFunction((double[] pu) -> { List<Double> itemVector = Arrays.stream(pu).boxed().collect(Collectors.toList()); double predictionError = itemRatings.values().parallelStream() .map(bias.getBiasApplier()).map(rating -> { List<Double> userVector = initialModel.getUserFeatures(rating.getUser()); double prediction = IntStream.range(0, userVector.size()) .mapToDouble(index -> userVector.get(index) * itemVector.get(index)) .sum(); double value = rating.getRatingValue().doubleValue(); double errorThisRating = prediction - value; return errorThisRating; }).map(value -> Math.pow(value, 2)).mapToDouble(value -> value).sum(); double penalty = Arrays.stream(pu).map(value -> Math.pow(value, 2)).sum(); double objectiveFunctionValue = predictionError + lambda * penalty; return objectiveFunctionValue; }); SimplexOptimizer simplexOptimizer = new SimplexOptimizer(0, 0); double[] initialGuess = new Random(seed + item.getId()).doubles(-10, 10).limit(dimension) .toArray(); List<Double> initialGuessList = Arrays.stream(initialGuess).boxed() .collect(Collectors.toList()); double initialGuessPenalty = objectiveFunction.getObjectiveFunction().value(initialGuess); try { PointValuePair optimize = simplexOptimizer.optimize( new MultiDirectionalSimplex(dimension), new InitialGuess(initialGuess), objectiveFunction, GoalType.MINIMIZE, MAX_EVAL, MAX_ITER); double optimizedPenalty = optimize.getValue(); itemProgress.setTaskFinished(); List<Double> optimizedVector = Arrays.stream(optimize.getPoint()).boxed() .collect(Collectors.toList()); return optimizedVector; } catch (Exception ex) { System.out.println("Vector cannot be optimized " + item + " cannot be optimized (numRatings=" + itemRatings.size() + ")"); return initialModel.getItemFeatures(item); } })); model = new MatrixFactorizationModel(trainedUserVectors, trainedItemVectors, bias); } return model; }
From source file:com.simiacryptus.mindseye.applications.ArtistryUtil.java
/** * Paint lines.//from w w w .j a va 2 s. co m * * @param canvas the canvas */ public static void paint_Lines(final Tensor canvas) { BufferedImage originalImage = canvas.toImage(); BufferedImage newImage = new BufferedImage(originalImage.getWidth(), originalImage.getHeight(), BufferedImage.TYPE_INT_ARGB); Graphics2D graphics = (Graphics2D) newImage.getGraphics(); IntStream.range(0, 100).forEach(i -> { Random random = new Random(); graphics.setColor(new Color(random.nextInt(255), random.nextInt(255), random.nextInt(255))); graphics.drawLine(random.nextInt(originalImage.getWidth()), random.nextInt(originalImage.getHeight()), random.nextInt(originalImage.getWidth()), random.nextInt(originalImage.getHeight())); }); canvas.set(Tensor.fromRGB(newImage)); }
From source file:com.vmware.photon.controller.cloudstore.xenon.entity.SchedulingConstantGeneratorTest.java
/** * Test distribution of scheduling constants, creating hosts concurrently on a * single Xenon host.//from w w w. j a v a2 s . c o m */ @Test(dataProvider = "HostCounts") public void testSchedulingConstantVariationConcurrent(int hostCount) throws Throwable { List<Long> schedulingConstants = Collections.synchronizedList(new ArrayList<>()); TestEnvironment env = TestEnvironment.create(1); List<Thread> threads = new ArrayList<>(); ServiceHost xenonHost = env.getHosts()[0]; IntStream.range(0, THREADS).forEach((threadId) -> { Thread t = new Thread(() -> { List<Long> thisThreadSchedulingConstants = createHosts(xenonHost, hostCount); schedulingConstants.addAll(thisThreadSchedulingConstants); }); t.start(); threads.add(t); }); for (Thread t : threads) { t.join(); } env.stop(); assertThat(schedulingConstants.size(), equalTo(hostCount * THREADS)); Collections.sort(schedulingConstants); double cv = schedulingConstantGapCV(schedulingConstants); logger.info("Scheduling constant gap coefficient of variation: {}", cv); assertThat(cv, lessThan(MAX_VARIATION)); }
From source file:nl.salp.warcraft4j.fileformat.dbc.DbcFile.java
/** * Get all entries from the DBC file.//from w ww. j a v a 2 s.c o m * * @return The parsed entries. * * @throws DbcParsingException When the entries could not be parsed. */ public List<DbcEntry> getEntries() throws DbcParsingException { DbcHeader header = getHeader(); try (DataReader reader = getDataReader()) { return IntStream.range(0, header.getEntryCount()).mapToObj(i -> getEntry(i, reader)) .collect(Collectors.toList()); } catch (IOException e) { throw new DbcParsingException( format("Error parsing entries for DBC file %d (%s)", filenameHash, filename), e); } }
From source file:org.apache.sysml.runtime.controlprogram.paramserv.ParamservUtils.java
public static List<ExecutionContext> createExecutionContexts(ExecutionContext ec, LocalVariableMap varsMap, String updFunc, String aggFunc, int workerNum, int k) { FunctionProgramBlock updPB = getFunctionBlock(ec, updFunc); FunctionProgramBlock aggPB = getFunctionBlock(ec, aggFunc); Program prog = ec.getProgram();/*from w ww . jav a2 s . c om*/ // 1. Recompile the internal program blocks recompileProgramBlocks(k, prog.getProgramBlocks()); // 2. Recompile the imported function blocks prog.getFunctionProgramBlocks() .forEach((fname, fvalue) -> recompileProgramBlocks(k, fvalue.getChildBlocks())); // 3. Copy function for workers List<ExecutionContext> workerECs = IntStream.range(0, workerNum).mapToObj(i -> { FunctionProgramBlock newUpdFunc = copyFunction(updFunc, updPB); FunctionProgramBlock newAggFunc = copyFunction(aggFunc, aggPB); Program newProg = new Program(); putFunction(newProg, newUpdFunc); putFunction(newProg, newAggFunc); return ExecutionContextFactory.createContext(new LocalVariableMap(varsMap), newProg); }).collect(Collectors.toList()); // 4. Copy function for agg service FunctionProgramBlock newAggFunc = copyFunction(aggFunc, aggPB); Program newProg = new Program(); putFunction(newProg, newAggFunc); ExecutionContext aggEC = ExecutionContextFactory.createContext(new LocalVariableMap(varsMap), newProg); List<ExecutionContext> result = new ArrayList<>(workerECs); result.add(aggEC); return result; }
From source file:no.imr.stox.functions.acoustic.PgNapesIO.java
public static void export2(String cruise, String country, String callSignal, String path, String fileName, List<DistanceBO> distances, Double groupThickness, Integer freqFilter, String specFilter, boolean withZeros) { Set<Integer> freqs = distances.stream().flatMap(dist -> dist.getFrequencies().stream()) .map(FrequencyBO::getFreq).collect(Collectors.toSet()); if (freqFilter == null && freqs.size() == 1) { freqFilter = freqs.iterator().next(); }/*from w w w . j a v a 2 s.co m*/ if (freqFilter == null) { System.out.println("Multiple frequencies, specify frequency filter as parameter"); return; } Integer freqFilterF = freqFilter; // ef.final List<String> acList = distances.parallelStream().flatMap(dist -> dist.getFrequencies().stream()) .filter(fr -> freqFilterF.equals(fr.getFreq())).map(f -> { DistanceBO d = f.getDistanceBO(); LocalDateTime sdt = LocalDateTime.ofInstant(d.getStart_time().toInstant(), ZoneOffset.UTC); Double intDist = d.getIntegrator_dist(); String month = StringUtils.leftPad(sdt.getMonthValue() + "", 2, "0"); String day = StringUtils.leftPad(sdt.getDayOfMonth() + "", 2, "0"); String hour = StringUtils.leftPad(sdt.getHour() + "", 2, "0"); String minute = StringUtils.leftPad(sdt.getMinute() + "", 2, "0"); String log = Conversion.formatDoubletoDecimalString(d.getLog_start(), "0.0"); String acLat = Conversion.formatDoubletoDecimalString(d.getLat_start(), "0.000"); String acLon = Conversion.formatDoubletoDecimalString(d.getLon_start(), "0.000"); return Stream .of(d.getNation(), d.getPlatform(), d.getCruise(), log, sdt.getYear(), month, day, hour, minute, acLat, acLon, intDist, f.getFreq(), f.getThreshold()) .map(o -> o == null ? "" : o.toString()).collect(Collectors.joining("\t")) + "\t"; }).collect(Collectors.toList()); String fil1 = path + "/" + fileName + ".txt"; acList.add(0, Stream.of("Country", "Vessel", "Cruise", "Log", "Year", "Month", "Day", "Hour", "Min", "AcLat", "AcLon", "Logint", "Frequency", "Sv_threshold").collect(Collectors.joining("\t"))); try { Files.write(Paths.get(fil1), acList, StandardOpenOption.CREATE, StandardOpenOption.TRUNCATE_EXISTING); } catch (IOException ex) { Logger.getLogger(PgNapesIO.class.getName()).log(Level.SEVERE, null, ex); } acList.clear(); // Acoustic values distances.stream().filter(d -> d.getPel_ch_thickness() != null) .flatMap(dist -> dist.getFrequencies().stream()).filter(fr -> freqFilterF.equals(fr.getFreq())) .forEachOrdered(f -> { try { Double groupThicknessF = Math.max(f.getDistanceBO().getPel_ch_thickness(), groupThickness); Map<String, Map<Integer, Double>> pivot = f.getSa().stream() .filter(s -> s.getCh_type().equals("P")).map(s -> new SAGroup(s, groupThicknessF)) .filter(s -> s.getSpecies() != null && (specFilter == null || specFilter.equals(s.getSpecies()))) // create pivot table: species (dim1) -> depth interval index (dim2) -> sum sa (group aggregator) .collect(Collectors.groupingBy(SAGroup::getSpecies, Collectors.groupingBy( SAGroup::getDepthGroupIdx, Collectors.summingDouble(SAGroup::sa)))); if (pivot.isEmpty() && specFilter != null && withZeros) { pivot.put(specFilter, new HashMap<>()); } Integer maxGroupIdx = pivot.entrySet().stream().flatMap(e -> e.getValue().keySet().stream()) .max(Integer::compare).orElse(null); if (maxGroupIdx == null) { return; } acList.addAll(pivot.entrySet().stream().sorted(Comparator.comparing(Map.Entry::getKey)) .flatMap(e -> { return IntStream.range(0, maxGroupIdx + 1).boxed().map(groupIdx -> { Double chUpDepth = groupIdx * groupThicknessF; Double chLowDepth = (groupIdx + 1) * groupThicknessF; Double sa = e.getValue().get(groupIdx); if (sa == null) { sa = 0d; } String res = null; if (withZeros || sa > 0d) { DistanceBO d = f.getDistanceBO(); String log = Conversion.formatDoubletoDecimalString(d.getLog_start(), "0.0"); LocalDateTime sdt = LocalDateTime .ofInstant(d.getStart_time().toInstant(), ZoneOffset.UTC); String month = StringUtils.leftPad(sdt.getMonthValue() + "", 2, "0"); String day = StringUtils.leftPad(sdt.getDayOfMonth() + "", 2, "0"); //String sas = String.format(Locale.UK, "%11.5f", sa); res = Stream .of(d.getNation(), d.getPlatform(), d.getCruise(), log, sdt.getYear(), month, day, e.getKey(), chUpDepth, chLowDepth, sa) .map(o -> o == null ? "" : o.toString()) .collect(Collectors.joining("\t")); } return res; }).filter(s -> s != null); }).collect(Collectors.toList())); } catch (Exception e) { e.printStackTrace(); } }); String fil2 = path + "/" + fileName + "Values.txt"; acList.add(0, Stream.of("Country", "Vessel", "Cruise", "Log", "Year", "Month", "Day", "Species", "ChUppDepth", "ChLowDepth", "SA").collect(Collectors.joining("\t"))); try { Files.write(Paths.get(fil2), acList, StandardOpenOption.CREATE, StandardOpenOption.TRUNCATE_EXISTING); } catch (IOException ex) { Logger.getLogger(PgNapesIO.class.getName()).log(Level.SEVERE, null, ex); } }
From source file:com.abixen.platform.service.businessintelligence.multivisualization.service.impl.JsonFilterServiceImpl.java
private Map<String, String> getColumnTypeMapping(ResultSetMetaData rsmd) throws SQLException { int columnCount = rsmd.getColumnCount(); Map<String, String> columnTypeMapping = new HashMap<>(); IntStream.range(1, columnCount + 1).forEach(i -> { try {/*from w w w. j av a 2 s .c om*/ String columnTypeName = rsmd.getColumnTypeName(i); if ("BIGINT".equals(columnTypeName)) { columnTypeName = "INTEGER"; } if ("VARCHAR".equals(columnTypeName)) { columnTypeName = "STRING"; } if ("FLOAT8".equals(columnTypeName)) { columnTypeName = "DOUBLE"; } columnTypeMapping.put(rsmd.getColumnName(i).toUpperCase(), columnTypeName.toUpperCase()); } catch (SQLException e) { e.printStackTrace(); } }); return columnTypeMapping; }
From source file:org.apache.hadoop.hbase.tool.TestLoadIncrementalHFilesSplitRecovery.java
private TableDescriptor createTableDesc(TableName name, int cfs) { TableDescriptorBuilder builder = TableDescriptorBuilder.newBuilder(name); IntStream.range(0, cfs).mapToObj(i -> ColumnFamilyDescriptorBuilder.of(family(i))) .forEachOrdered(builder::addColumnFamily); return builder.build(); }
From source file:jp.co.opentone.bsol.linkbinder.view.admin.module.dataimport.MasterDataImportModule.java
/** * ???./*from w w w .ja va2 s. c o m*/ * @param file ? * @param list ????. * @return ? * @throws IOException */ protected List<ValidationErrorInfo> validate(String file, List<T> list, MasterDataImportProcessType processType) throws IOException { List<ValidationErrorInfo> result = new ArrayList<>(); // CsvConfig config = createCsvConfig(); List<String[]> lines = Csv.load(newInputStream(file), SystemConfig.getValue(Constants.KEY_CSV_ENCODING), config, new StringArrayListHandler()); int expectedCount = getCsvColumnCount(); String msgInvalidColumnCount = Messages.getMessage(ApplicationMessageCode.ERROR_INVALID_COLUMN_COUNT) .getMessage(); IntStream.range(0, lines.size()).forEach(i -> { int rowNum = i + 1; if (lines.get(i).length != expectedCount) { result.add(new ValidationErrorInfo(rowNum, null, null, msgInvalidColumnCount)); } }); if (!result.isEmpty()) { return result; } // ?? Class<?>[] validationGroups; switch (processType) { case CREATE_OR_UPDATE: validationGroups = new Class<?>[] { Default.class, CreateGroup.class }; break; case DELETE: validationGroups = new Class<?>[] { Default.class }; break; default: validationGroups = new Class<?>[] { Default.class }; } ValidatorFactory factory = Validation.buildDefaultValidatorFactory(); Validator validator = factory.getValidator(); IntStream.range(0, list.size()).forEach(i -> { int rowNum = i + 1; Set<ConstraintViolation<T>> violations = validator.validate(list.get(i), validationGroups); List<ValidationErrorInfo> infoList = violations.stream().map(v -> new ValidationErrorInfo(rowNum, toViewName(v.getPropertyPath().toString()), v.getInvalidValue(), v.getMessage())) .collect(Collectors.toList()); result.addAll(infoList); }); return result; }