Example usage for java.util Arrays fill

List of usage examples for java.util Arrays fill

Introduction

In this page you can find the example usage for java.util Arrays fill.

Prototype

public static void fill(Object[] a, Object val) 

Source Link

Document

Assigns the specified Object reference to each element of the specified array of Objects.

Usage

From source file:com.amazonaws.mturk.cmd.GrantQualificationRequests.java

public void grantQualRequestsInFile(String fileName, Integer defaultValue) throws IOException {
    if (fileName == null) {
        throw new IllegalArgumentException("fileName must not be null");
    }/*from  ww w .j a  v  a 2  s.  co  m*/

    String[] qualReqs = super.getFieldValuesFromFile(fileName, QUAL_REQUEST_TO_APPROVE_COLUMN);

    String[] values = super.getFieldValuesFromFile(fileName, QUAL_REQUEST_TO_APPROVE_VALUE_COLUMN);

    Integer[] int_values = null;
    if (values.length > 0) {
        int_values = new Integer[values.length];
        for (int i = 0; i < values.length; i++) {

            try {
                int_values[i] = Integer.valueOf(values[i]);
            } catch (NumberFormatException e) {
                int_values[i] = defaultValue;
            }
        }
    } else {
        int_values = new Integer[qualReqs.length];
        Arrays.fill(int_values, defaultValue);
    }
    grantQualRequests(qualReqs, int_values);
}

From source file:com.gargoylesoftware.htmlunit.html.HtmlSerializerTest.java

/**
 * Test {@link HtmlSerializer#cleanup(String)}.
 *//* w w w  . j a va  2s. c o  m*/
@Test
public void cleanUpPerformanceWhitespace() {
    final HtmlSerializer serializer = new HtmlSerializer();

    final int length = 80_000;
    final char[] charArray = new char[length];
    Arrays.fill(charArray, ' ');
    charArray[0] = 'a';
    charArray[length - 1] = 'a';
    final String text = new String(charArray);

    final long time = System.currentTimeMillis();
    serializer.cleanUp(text);

    final long runTime = System.currentTimeMillis() - time;
    assertTrue("cleanUp() took too much time", runTime < 1_000);
}

From source file:com.cloudera.oryx.common.collection.LongObjectMap.java

/**
 * Creates a new  whose capacity can accommodate the given number of entries without
 * rehash./* www.  ja v a  2s.c  o m*/
 * 
 * @param size desired capacity
 * @throws IllegalArgumentException if size is less than 0, maxSize is less than 1
 *  or at least half of {@link RandomUtils#MAX_INT_SMALLER_TWIN_PRIME}, or
 *  loadFactor is less than 1
 */
public LongObjectMap(int size) {
    Preconditions.checkArgument(size >= 0, "size must be at least 0");
    Preconditions.checkArgument(size < MAX_SIZE, "size must be less than " + MAX_SIZE);
    int hashSize = RandomUtils.nextTwinPrime((int) (LOAD_FACTOR * size) + 1);
    keys = new long[hashSize];
    Arrays.fill(keys, NULL);

    @SuppressWarnings("unchecked")
    V[] theValues = (V[]) new Object[hashSize];
    values = theValues;
}

From source file:com.almende.eve.algorithms.DAAValueBean.java

/**
 * Sets the ttl.//  w ww  . j  av  a 2s . co  m
 *
 * @param initialTTL
 *            the initial ttl
 * @return the DAA value bean
 */
public DAAValueBean setTTL(final long initialTTL) {
    Arrays.fill(ttlArray, initialTTL);
    return this;
}

From source file:com.webcohesion.ofx4j.io.tagsoup.TestTagSoupOFXReader.java

/**
 * tests using sax to parse an OFX doc.//from w w w  . j  a  v  a2s.c om
 */
public void testSimpleVersion1() throws Exception {
    TagSoupOFXReader reader = new TagSoupOFXReader();
    final Map<String, String> headers = new HashMap<String, String>();
    final Stack<Map<String, Object>> aggregateStack = new Stack<Map<String, Object>>();
    TreeMap<String, Object> root = new TreeMap<String, Object>();
    aggregateStack.push(root);

    reader.setContentHandler(new DefaultHandler() {

        @Override
        public void onHeader(String name, String value) {
            LOG.debug(name + ":" + value);
            headers.put(name, value);
        }

        @Override
        public void onElement(String name, String value) {
            char[] tabs = new char[aggregateStack.size() * 2];
            Arrays.fill(tabs, ' ');
            LOG.debug(new String(tabs) + name + "=" + value);

            aggregateStack.peek().put(name, value);
        }

        @Override
        public void startAggregate(String aggregateName) {
            char[] tabs = new char[aggregateStack.size() * 2];
            Arrays.fill(tabs, ' ');
            LOG.debug(new String(tabs) + aggregateName + " {");

            TreeMap<String, Object> aggregate = new TreeMap<String, Object>();
            aggregateStack.peek().put(aggregateName, aggregate);
            aggregateStack.push(aggregate);
        }

        @Override
        public void endAggregate(String aggregateName) {
            aggregateStack.pop();

            char[] tabs = new char[aggregateStack.size() * 2];
            Arrays.fill(tabs, ' ');
            LOG.debug(new String(tabs) + "}");
        }
    });
    reader.parse(TestNanoXMLOFXReader.class.getResourceAsStream("simple.ofx"));
    assertEquals(9, headers.size());
    assertEquals(1, aggregateStack.size());
    assertSame(root, aggregateStack.pop());
}

From source file:au.org.ala.delta.translation.dist.DistItemsFileWriter.java

protected Pair<List<Integer>, List<Integer>> writeItems(int[] wordOffsets, int[] bitOffsets) {
    final int BYTES_IN_WORD = 4;
    List<Integer> itemRecords = new ArrayList<Integer>();
    List<Integer> nameLengths = new ArrayList<Integer>();
    int size = BinaryKeyFile.RECORD_LENGTH_BYTES;
    for (int offset : wordOffsets) {
        size = Math.max(size, offset);
    }/*from ww w.  j a v a 2  s.  c om*/
    Iterator<Item> items = _dataSet.unfilteredItems();
    while (items.hasNext()) {
        Item item = items.next();
        String description = _itemFormatter.formatItemDescription(item);
        nameLengths.add(description.length());
        byte[] bytes = new byte[(size + 1) * BYTES_IN_WORD];
        Arrays.fill(bytes, (byte) 0);

        ByteBuffer work = ByteBuffer.wrap(bytes);
        work.order(ByteOrder.LITTLE_ENDIAN);

        Iterator<IdentificationKeyCharacter> chars = _dataSet.unfilteredIdentificationKeyCharacterIterator();
        while (chars.hasNext()) {
            IdentificationKeyCharacter keyChar = chars.next();
            int charNum = keyChar.getCharacterNumber();
            if (!keyChar.getCharacterType().isText()) {
                int offset = wordOffsets[keyChar.getCharacterNumber() - 1] - 1;
                if (!(keyChar.getCharacterType() == CharacterType.UnorderedMultiState)) {
                    work.putFloat(offset * BYTES_IN_WORD, -9999.0f);
                }
                Attribute attribute = item.getAttribute(keyChar.getCharacter());
                if (attribute == null || attribute.isUnknown()) {
                    continue;
                }
                switch (keyChar.getCharacterType()) {
                case UnorderedMultiState:
                    encodeUnorderedMultistateAttribute(work, wordOffsets[charNum - 1] - 1,
                            bitOffsets[charNum - 1], keyChar, (MultiStateAttribute) attribute);

                    break;
                case OrderedMultiState:
                    encodeOrderedMultistateAttribute(work, wordOffsets[charNum - 1] - 1, keyChar,
                            (MultiStateAttribute) attribute);
                    break;
                case IntegerNumeric:
                case RealNumeric:
                    encodeNumericAttribute(work, wordOffsets[charNum - 1] - 1, keyChar,
                            (NumericAttribute) attribute);

                    break;
                }
            }

        }
        itemRecords.add(_itemsFile.writeItem(description, work));
    }
    return new Pair<List<Integer>, List<Integer>>(itemRecords, nameLengths);
}

From source file:gedi.atac.Atac.java

public static void analyzeNfkb(DiskGenomicNumericProvider rmq, GenomicRegionStorage<?> tfbs) throws Exception {

    int size = tfbs.getRandomEntry().getRegion().getTotalLength();
    double[] buff = new double[size + 200];

    tfbs.iterateReferenceGenomicRegions().forEachRemaining(rgr -> {
        if (rgr.getRegion().getTotalLength() != size || rgr.getRegion().getNumParts() != 1)
            throw new RuntimeException();

        ArrayGenomicRegion reg = rgr.getRegion().extendAll(100, 100);
        PositionNumericIterator it = rmq.iterateValues(rgr.getReference().toPlusStrand(), reg);
        while (it.hasNext())
            buff[it.nextInt() - reg.getStart()] += it.getValue(0);
        it = rmq.iterateValues(rgr.getReference().toMinusStrand(), reg);
        while (it.hasNext())
            buff[it.nextInt() - reg.getStart()] += it.getValue(0);

        System.out.printf("%s:%s", rgr.getReference().toStrandIndependent(), rgr.getRegion().toRegionString());
        for (int i = 0; i < buff.length; i++)
            System.out.printf("\t%.0f", buff[i]);
        System.out.println();/*w  w w  .  j a v  a 2s .  c  o  m*/
        Arrays.fill(buff, 0);
    });

}

From source file:ch.unine.vauchers.fuseerasure.codes.ReedSolomonCode.java

/**
 * This function (actually, the GF.remainder() function) will modify
 * the "inputs" parameter.//from   w w w. j  a va  2s . co m
 */
@Override
public void encodeBulk(byte[][] inputs, byte[][] outputs) {
    final int stripeSize = stripeSize();
    final int paritySize = paritySize();
    assert (stripeSize == inputs.length);
    assert (paritySize == outputs.length);

    for (int i = 0; i < outputs.length; i++) {
        Arrays.fill(outputs[i], (byte) 0);
    }

    byte[][] data = new byte[stripeSize + paritySize][];

    for (int i = 0; i < paritySize; i++) {
        data[i] = outputs[i];
    }
    for (int i = 0; i < stripeSize; i++) {
        data[i + paritySize] = inputs[i];
    }

    // Compute the remainder
    GF.remainder(data, generatingPolynomial);
}

From source file:com.polytech4A.cuttingstock.core.method.LinearResolutionMethod.java

/**
 * Update current context's objective function using the solution.
 *
 * @param solution Solution used to update the function.
 *//*from w ww .  j a  v a 2 s.c  o  m*/
public void updateFunction(Solution solution) {
    double[] coefficients = new double[solution.getPatterns().size()];
    Arrays.fill(coefficients, context.getSheetCost());
    function = new LinearObjectiveFunction(coefficients,
            solution.getPatterns().size() * context.getPatternCost());
}

From source file:bachelorthesis.methods.detection.bayesian.BayesianDetection.java

private double[][] offlineCpd(Value[] data) {
    int n = data.length;
    double[] Q = new double[n];
    double[] g = new double[n];
    double[] G = new double[n];
    double[][] P = new double[n][n];

    Arrays.fill(g, Math.log(1.d / (data.length + 1)));
    G[0] = g[0];//w w  w  . j ava2s  . co m
    for (int i = 1; i < G.length; i++) {
        G[i] = Math.log((Math.exp(G[i - 1]) + Math.exp(g[i])));
    }
    for (double[] array : P) {
        Arrays.fill(array, Double.NEGATIVE_INFINITY);
    }

    P[n - 1][n - 1] = gaussianObsLogLikelihood(data, n - 1, n);
    Q[n - 1] = P[n - 1][n - 1];

    for (int t = n - 2; t >= 0; t--) {
        double p_next_cp = Double.NEGATIVE_INFINITY;
        for (int s = t; s < n - 1; s++) {
            P[t][s] = gaussianObsLogLikelihood(data, t, s + 1);
            double summand = P[t][s] + Q[s + 1] + g[s + 1 - t];
            p_next_cp = Math.log((Math.exp(p_next_cp) + Math.exp(summand)));
            if (summand - p_next_cp < BAYESIAN_TRUNCATE) {
                break;
            }
        }
        P[t][n - 1] = gaussianObsLogLikelihood(data, t, n);
        double antiG;
        if (G[n - 1 - t] < -1e-15) {
            antiG = Math.log(1.d - Math.exp(G[n - 1 - t]));
        } else {
            antiG = Math.log(-G[n - 1 - t]);
        }
        Q[t] = Math.log((Math.exp(p_next_cp) + Math.exp(P[t][n - 1] + antiG)));
    }

    double[][] Pcp = new double[n - 1][n - 1];
    for (double[] array : Pcp) {
        Arrays.fill(array, Double.NEGATIVE_INFINITY);
    }
    for (int t = 0; t < n - 1; t++) {
        Pcp[0][t] = P[0][t] + Q[t + 1] + g[t] - Q[0];
        if (Double.isNaN(Pcp[0][t])) {
            Pcp[0][t] = Double.NEGATIVE_INFINITY;
        }
    }
    for (int j = 1; j < n - 1; j++) {
        for (int t = j; t < n - 1; t++) {
            double[] tmp_cond = copyOfRange(Pcp[j - 1], j - 1, t);

            tmp_cond = add(tmp_cond, getSameEntryOfAllArrays(copyOfRange(P, j, t + 1), t));
            double summand = Q[t + 1];
            tmp_cond = forEach(tmp_cond, value -> value + summand);
            tmp_cond = add(tmp_cond, copyOfRange(g, 0, t - j + 1));
            double[] negativePart = forEach(copyOfRange(Q, j, t + 1), value -> -value);
            tmp_cond = add(tmp_cond, negativePart);

            double[] tempArray = forEach(tmp_cond, value -> Math.exp(value));
            Pcp[j][t] = Math.log(sum(tempArray));
            if (Double.isNaN(Pcp[j][t])) {
                Pcp[j][t] = Double.NEGATIVE_INFINITY;
            }
        }
    }
    return Pcp;
}