List of usage examples for java.util Arrays fill
public static void fill(Object[] a, Object val)
From source file:com.fatwire.dta.sscrawler.reporting.reporters.PageletTimingsStatisticsReporter.java
@Override public synchronized void endCollecting() { if (pagesDone.get() == 0) return;/*from w w w . j av a 2 s . c om*/ report.startReport(); final DecimalFormat df = new DecimalFormat("###0.00"); final DecimalFormat lf = new DecimalFormat("##0"); int l = 0; for (final String s : stats.keySet()) { l = Math.max(s.length(), l); } l += 3; final char[] blank = new char[l]; Arrays.fill(blank, ' '); report.addHeader("pagename" + new String(blank, 0, l - 8), "invocations", "average", "min", "max", "standard-deviation"); for (final Map.Entry<String, SynchronizedSummaryStatistics> e : stats.entrySet()) { final SynchronizedSummaryStatistics s = e.getValue(); final String n = e.getKey() + new String(blank, 0, l - e.getKey().length()) + (Boolean.FALSE.equals(cached.get(e.getKey())) ? " * " : " "); report.addRow(n, Long.toString(s.getN()), df.format(s.getMean()), lf.format(s.getMin()), lf.format(s.getMax()), df.format(s.getStandardDeviation())); } final String n = "total" + new String(blank, 0, l - "total".length()); report.addRow(n, Long.toString(total.getN()), df.format(total.getMean()), lf.format(total.getMin()), lf.format(total.getMax()), df.format(total.getStandardDeviation())); report.finishReport(); }
From source file:com.linkedin.r2.filter.compression.stream.TestStreamingCompression.java
@Test public void testSnappyCompressor() throws IOException, InterruptedException, CompressionException, ExecutionException { StreamingCompressor compressor = new SnappyCompressor(_executor); final byte[] origin = new byte[BUF_SIZE]; Arrays.fill(origin, (byte) 'a'); ByteArrayOutputStream out = new ByteArrayOutputStream(); SnappyFramedOutputStream snappy = new SnappyFramedOutputStream(out); IOUtils.write(origin, snappy);// ww w . ja va 2s . co m snappy.close(); byte[] compressed = out.toByteArray(); testCompress(compressor, origin, compressed); testDecompress(compressor, origin, compressed); testCompressThenDecompress(compressor, origin); }
From source file:com.hadoop.compression.lzo.LzopInputStream.java
/** * Read bytes, update checksums, return first four bytes as an int, first * byte read in the MSB./* ww w. ja v a 2 s. c o m*/ */ private static int readHeaderItem(InputStream in, byte[] buf, int len, Adler32 adler, CRC32 crc32) throws IOException { int ret = readInt(in, buf, len); adler.update(buf, 0, len); crc32.update(buf, 0, len); Arrays.fill(buf, (byte) 0); return ret; }
From source file:io.confluent.kafkarest.tools.ProducerPerformance.java
public ProducerPerformance(String baseUrl, String topic, long iterations, int recordsPerIteration, long iterationsPerSec, int recordSize) throws Exception { super(iterations * recordsPerIteration); this.iterations = iterations; this.iterationsPerSec = iterationsPerSec; this.recordsPerIteration = recordsPerIteration; this.bytesPerIteration = recordsPerIteration * recordSize; /* setup perf test */ targetUrl = baseUrl + "/topics/" + topic; byte[] payload = new byte[recordSize]; Arrays.fill(payload, (byte) 1); TopicProduceRecord record = new BinaryTopicProduceRecord(payload); TopicProduceRecord[] records = new TopicProduceRecord[recordsPerIteration]; Arrays.fill(records, record); TopicProduceRequest request = new TopicProduceRequest(); request.setRecords(Arrays.asList(records)); requestEntity = new ObjectMapper().writeValueAsBytes(request); requestEntityLength = Integer.toString(requestEntity.length); buffer = new byte[1024 * 1024]; }
From source file:bide.core.par.TunePar.java
public TunePar(String forLorG, int ite, int tuneSize, int tuneGroup) { tuneInitSize = 2.38;//w ww.jav a 2 s .co m tuneStepSize = 0.001; if (forLorG == Setting.LOCAL) { this.noTunePar = SavePar.NO_LOCAL_TUNE; } else { this.noTunePar = SavePar.NO_GLOBAL_TUNE; } tunePar = new double[noTunePar]; for (int i = 0; i < noTunePar; i++) { Arrays.fill(tunePar, tuneInitSize); } this.tuneSize = tuneSize; this.tuneGroup = tuneGroup; // minAccRate = 1.0 / tuneSize; tuneGroup1 = tuneGroup - 1; accept = new double[noTunePar][tuneGroup]; }
From source file:com.opengamma.analytics.financial.model.volatility.smile.fitting.SmileModelFitterTest.java
public SmileModelFitterTest() { final VolatilityFunctionProvider<T> model = getModel(); final T data = getModelData(); final double[] strikes = new double[] { 0.005, 0.01, 0.02, 0.03, 0.04, 0.05, 0.07, 0.1 }; final int n = strikes.length; _noisyVols = new double[n]; _errors = new double[n]; _cleanVols = model.getVolatilityFunction(F, strikes, TIME_TO_EXPIRY).evaluate(data); Arrays.fill(_errors, 1e-4); for (int i = 0; i < n; i++) { _noisyVols[i] = _cleanVols[i] + UNIFORM.nextDouble() * _errors[i]; }//from w ww. j av a2 s . c o m _fitter = getFitter(F, strikes, TIME_TO_EXPIRY, _cleanVols, _errors, model); _nosiyFitter = getFitter(F, strikes, TIME_TO_EXPIRY, _noisyVols, _errors, model); }
From source file:com.acciente.oacc.encryptor.jasypt.LegacyJasyptPasswordEncryptor.java
private byte[] getCleanedBytes(char[] password) { final char[] normalizedChars = TextNormalizer.getInstance().normalizeToNfc(password); final ByteBuffer byteBuffer = StandardCharsets.UTF_8.encode(CharBuffer.wrap(normalizedChars)); final byte[] byteArray = new byte[byteBuffer.remaining()]; byteBuffer.get(byteArray);/*from w w w. j a v a2s. c om*/ Arrays.fill(byteBuffer.array(), (byte) 0); return byteArray; }
From source file:gov.nih.nci.caarray.plugins.illumina.ProbeHandler.java
/** * {@inheritDoc}//from w w w .jav a2s . c o m */ @Override @SuppressWarnings("PMD.EmptyCatchBlock") public void parseFirstRow(String[] values, int lineNumber) { colIndex = new int[BgxDesignHandler.Header.values().length]; Arrays.fill(colIndex, -1); for (int i = 0; i < values.length; i++) { String col = values[i].toUpperCase(Locale.getDefault()); try { BgxDesignHandler.Header h = BgxDesignHandler.Header.valueOf(col); colIndex[h.ordinal()] = i; } catch (IllegalArgumentException e) { // unknown column } } }
From source file:net.myrrix.common.collection.FastByIDFloatMap.java
public FastByIDFloatMap(int size) { Preconditions.checkArgument(size >= 0, "size must be at least 0"); Preconditions.checkArgument(size < MAX_SIZE, "size must be less than " + MAX_SIZE); int hashSize = RandomUtils.nextTwinPrime((int) (DEFAULT_LOAD_FACTOR * size) + 1); keys = new long[hashSize]; Arrays.fill(keys, KEY_NULL); values = new float[hashSize]; Arrays.fill(values, VALUE_NULL); }
From source file:gda.util.exafs.Element.java
static Double[] setArrayFromFileParser(int maxAtomicNumber, Double defaultValue, TokenFileParser p, int column) { Double[] arrayToSet = new Double[maxAtomicNumber]; Arrays.fill(arrayToSet, defaultValue); List<String> atomicNumberFromFile = p.getColumn(0); List<String> valuesFromFile = p.getColumn(column); for (int i = 0; i < atomicNumberFromFile.size(); i++) { // ignore first line if (i == 0) continue; int atomicNumber = Integer.parseInt(atomicNumberFromFile.get(i)); if (atomicNumber <= maxAtomicNumber) arrayToSet[atomicNumber - 1] = Double.parseDouble(valuesFromFile.get(i)); }/* ww w .j a v a 2 s . c om*/ return arrayToSet; }