List of usage examples for java.util.concurrent ThreadLocalRandom current
public static ThreadLocalRandom current()
From source file:org.neo4j.kernel.api.impl.index.LuceneSchemaIndexUniquenessVerificationIT.java
private Set<PropertyValue> randomStrings() { return ThreadLocalRandom.current().ints(nodesToCreate, 1, 200).mapToObj(this::randomString) .map(PropertyValue::new).collect(toSet()); }
From source file:org.xwiki.refactoring.script.RefactoringScriptService.java
/** * @param type the type of refactoring// w w w. j a va2 s . c o m * @return an id for a job to perform the specified type of refactoring */ private List<String> generateJobId(String type) { String suffix = new Date().getTime() + "-" + ThreadLocalRandom.current().nextInt(100, 1000); return getJobId(type, suffix); }
From source file:org.apache.sysml.utils.lite.BuildLiteExecution.java
public static void jmlcL2SVM() throws Exception { Connection conn = getConfiguredConnection(); String dml = conn.readScript("scripts/algorithms/l2-svm.dml"); PreparedScript l2svm = conn.prepareScript(dml, new String[] { "X", "Y", "fmt", "Log" }, new String[] { "w", "debug_str" }, false); double[][] trainData = new double[150][3]; for (int i = 0; i < 150; i++) { int one = ThreadLocalRandom.current().nextInt(0, 101); int two = ThreadLocalRandom.current().nextInt(0, 101); int three = ThreadLocalRandom.current().nextInt(0, 101); double[] row = new double[] { one, two, three }; trainData[i] = row;/* ww w. j a v a2 s.c om*/ } l2svm.setMatrix("X", trainData); log.debug(displayMatrix(trainData)); double[][] trainLabels = new double[150][1]; for (int i = 0; i < 150; i++) { int one = ThreadLocalRandom.current().nextInt(1, 3); double[] row = new double[] { one }; trainLabels[i] = row; } l2svm.setMatrix("Y", trainLabels); log.debug(displayMatrix(trainLabels)); l2svm.setScalar("fmt", "csv"); l2svm.setScalar("Log", "temp/l2-svm-log.csv"); ResultVariables l2svmResults = l2svm.executeScript(); double[][] model = l2svmResults.getMatrix("w"); log.debug("MODEL:"); log.debug(displayMatrix(model)); String debugString = l2svmResults.getString("debug_str"); log.debug("DEBUG STRING:"); log.debug(debugString); String s = conn.readScript("scripts/algorithms/l2-svm-predict.dml"); Map<String, String> m = new HashMap<>(); m.put("$Y", "temp/1.csv"); m.put("$confusion", "temp/2.csv"); m.put("$scores", "temp/3.csv"); PreparedScript l2svmPredict = conn.prepareScript(s, m, new String[] { "X", "Y", "w", "fmt" }, new String[] { "scores", "confusion_mat" }, false); double[][] testData = new double[150][3]; for (int i = 0; i < 150; i++) { int one = ThreadLocalRandom.current().nextInt(0, 101); int two = ThreadLocalRandom.current().nextInt(0, 101); int three = ThreadLocalRandom.current().nextInt(0, 101); double[] row = new double[] { one, two, three }; testData[i] = row; } l2svmPredict.setMatrix("X", testData); double[][] testLabels = new double[150][1]; for (int i = 0; i < 150; i++) { int one = ThreadLocalRandom.current().nextInt(1, 3); double[] row = new double[] { one }; testLabels[i] = row; } l2svmPredict.setMatrix("Y", testLabels); l2svmPredict.setMatrix("w", model); l2svmPredict.setScalar("fmt", "csv"); ResultVariables l2svmPredictResults = l2svmPredict.executeScript(); double[][] scores = l2svmPredictResults.getMatrix("scores"); log.debug("SCORES:"); log.debug(displayMatrix(scores)); double[][] confusionMatrix = l2svmPredictResults.getMatrix("confusion_mat"); log.debug("CONFUSION MATRIX:"); log.debug(displayMatrix(confusionMatrix)); conn.close(); }
From source file:org.neo4j.kernel.api.impl.index.LuceneSchemaIndexUniquenessVerificationIT.java
private String randomString(int size) { return ThreadLocalRandom.current().nextBoolean() ? RandomStringUtils.random(size) : RandomStringUtils.randomAlphabetic(size); }
From source file:org.neo4j.kernel.api.impl.index.LuceneSchemaIndexUniquenessVerificationIT.java
private Set<PropertyValue> randomLongs(long min, long max) { return ThreadLocalRandom.current().longs(nodesToCreate, min, max).boxed().map(PropertyValue::new) .collect(toSet());/*from w w w .ja v a 2 s . c om*/ }
From source file:com.jaeksoft.searchlib.ClientCatalog.java
public static ClusterInstance getAnyClusterInstance(String indexName) throws SearchLibException { File clientDir = getClientDir(indexName); ClusterManager clusterManager = getClusterManager(); String[] instanceIds = clusterManager.getClientInstances(clientDir); if (instanceIds == null || instanceIds.length == 0) return null; return clusterManager.getInstance(instanceIds[ThreadLocalRandom.current().nextInt(instanceIds.length)]); }
From source file:org.apache.storm.metricstore.rocksdb.RocksDbMetricsWriter.java
private void generateUniqueStringIds() throws MetricException { int attempts = 0; while (unusedIds.isEmpty()) { attempts++;// w w w . ja v a2s . c o m if (attempts > 100) { String message = "Failed to generate unique ids"; LOG.error(message); throw new MetricException(message); } for (int i = 0; i < 600; i++) { int n = ThreadLocalRandom.current().nextInt(); if (n == RocksDbStore.INVALID_METADATA_STRING_ID) { continue; } // remove any entries in the cache if (stringMetadataCache.contains(n)) { continue; } unusedIds.add(n); } // now scan all metadata and remove any matching string Ids from this list RocksDbKey firstPrefix = RocksDbKey.getPrefix(KeyType.METADATA_STRING_START); RocksDbKey lastPrefix = RocksDbKey.getPrefix(KeyType.METADATA_STRING_END); store.scanRange(firstPrefix, lastPrefix, (key, value) -> { unusedIds.remove(key.getMetadataStringId()); return true; // process all metadata }); } }
From source file:org.neo4j.kernel.api.impl.index.LuceneSchemaIndexUniquenessVerificationIT.java
private Set<PropertyValue> randomDoubles(double min, double max) { return ThreadLocalRandom.current().doubles(nodesToCreate, min, max).boxed().map(PropertyValue::new) .collect(toSet());//from ww w. ja va2 s . c om }
From source file:org.apache.hadoop.yarn.server.nodemanager.containermanager.localizer.sharedcache.SharedCacheUploader.java
private String getTemporaryFileName(Path path) { return path.getName() + "-" + ThreadLocalRandom.current().nextLong(); }
From source file:org.apache.hadoop.hbase.client.AsyncNonMetaRegionLocator.java
private void complete(TableName tableName, LocateRequest req, HRegionLocation loc, Throwable error) { if (error != null) { if (LOG.isDebugEnabled()) { LOG.debug("Failed to locate region in '" + tableName + "', row='" + Bytes.toStringBinary(req.row) + "', locateType=" + req.locateType, error); }/*from w ww . java2 s . c om*/ } LocateRequest toSend = null; TableCache tableCache = getTableCache(tableName); if (loc != null) { if (!addToCache(tableCache, loc)) { // someone is ahead of us. synchronized (tableCache) { tableCache.pendingRequests.remove(req); } return; } } synchronized (tableCache) { tableCache.pendingRequests.remove(req); if (error instanceof DoNotRetryIOException) { CompletableFuture<?> future = tableCache.allRequests.remove(req); if (future != null) { future.completeExceptionally(error); } } if (loc != null) { for (Iterator<Map.Entry<LocateRequest, CompletableFuture<HRegionLocation>>> iter = tableCache.allRequests .entrySet().iterator(); iter.hasNext();) { Map.Entry<LocateRequest, CompletableFuture<HRegionLocation>> entry = iter.next(); if (tryComplete(entry.getKey(), entry.getValue(), loc)) { iter.remove(); } } } if (!tableCache.allRequests.isEmpty() && tableCache.hasQuota(maxConcurrentLocateRequestPerTable)) { LocateRequest[] candidates = tableCache.allRequests.keySet().stream() .filter(r -> !tableCache.isPending(r)).toArray(LocateRequest[]::new); if (candidates.length > 0) { // TODO: use a better algorithm to send a request which is more likely to fetch a new // location. toSend = candidates[ThreadLocalRandom.current().nextInt(candidates.length)]; tableCache.send(toSend); } } } if (toSend != null) { locateInMeta(tableName, toSend); } }