Example usage for java.util Arrays fill

List of usage examples for java.util Arrays fill

Introduction

In this page you can find the example usage for java.util Arrays fill.

Prototype

public static void fill(Object[] a, Object val) 

Source Link

Document

Assigns the specified Object reference to each element of the specified array of Objects.

Usage

From source file:hudson.remoting.PipeTest.java

private static void write(Pipe pipe) throws IOException {
    OutputStream os = pipe.getOut();
    byte[] buf = new byte[384];
    for (int i = 0; i < 256; i++) {
        Arrays.fill(buf, (byte) i);
        os.write(buf, 0, 256);/*  w ww .j  av a2 s  . com*/
    }
    os.close();
}

From source file:mitm.common.security.crypto.PBEncryptionOutputStream.java

private void init() throws CryptoException {
    try {//www  . j  ava2 s.c  o m
        SecurityFactory securityFactory = SecurityFactoryFactory.getSecurityFactory();

        SecretKeyFactory keyFactory = securityFactory.createSecretKeyFactory(algorithm);

        RandomGenerator randomGenerator = securityFactory.createRandomGenerator();

        salt = randomGenerator.generateRandom(saltLength);

        PBEKeySpec keySpec = new PBEKeySpec(password, salt, iterationCount);

        /*
         * Clear out the password
         */
        Arrays.fill(password, '#');

        Key secretKey = keyFactory.generateSecret(keySpec);

        cipher = securityFactory.createCipher(algorithm);

        cipher.init(Cipher.ENCRYPT_MODE, secretKey);
    } catch (NoSuchProviderException e) {
        throw new NoSuchProviderRuntimeException(e);
    } catch (NoSuchAlgorithmException e) {
        throw new CryptoException(e);
    } catch (InvalidKeySpecException e) {
        throw new CryptoException(e);
    } catch (NoSuchPaddingException e) {
        throw new CryptoException(e);
    } catch (InvalidKeyException e) {
        throw new CryptoException(e);
    }
}

From source file:fc.extensions.itext.smart.SmartTable.java

private void create() {
    if (!created) {
        if (columnWidthsScale == null || columnWidthsScale.length == 0) {
            columnWidthsScale = new int[columns];
            Arrays.fill(columnWidthsScale, 1);
        }/* w w w.  ja  v  a2s  .c om*/
        try {
            table = this.writer.createTable(columns, position.getWidth(), columnWidthsScale);
        } catch (DocumentException ex) {
            Logger.getLogger(SmartTable.class.getName()).log(Level.SEVERE, null, ex);
        }
        table.getDefaultCell().setBorderWidth(borderWidth);
        table.getDefaultCell().setFixedHeight(rowFixedHeight);
        table.getDefaultCell().setPadding(0.2F);
        created = true;
    }
}

From source file:com.weblyzard.lib.string.nilsimsa.Nilsimsa.java

/**
 * @return the digest for the current Nilsimsa object.
 *///from w ww. j  a  v  a2 s.  co m
public byte[] digest() {
    int total = 0;
    int threshold;
    byte[] digest = new byte[32];
    Arrays.fill(digest, (byte) 0);

    if (count == 3) {
        total = 1;
    } else if (count == 4) {
        total = 4;
    } else if (count > 4) {
        total = 8 * count - 28;
    }
    threshold = total / 256;

    for (int i = 0; i < 256; i++) {
        if (acc[i] > threshold) {
            digest[i >> 3] += 1 << (i & 7);
        }
    }
    ArrayUtils.reverse(digest);
    return digest;
}

From source file:gedi.atac.Atac.java

public static void analyzePromotors(GenomicRegionStorage<AlignedReadsData> storage,
        GenomicRegionStorage<String> promotors) throws Exception {

    int offset = 4;
    int numCond = storage.getRandomRecord().getNumConditions();

    ProcessorSource<String> p = new ProcessorSource<String>();
    p.setProgress(new ConsoleProgress());
    p.process(storage, ReferenceSequenceConversion.none, promotors, new GenomicRegionProcessor() {

        int[][] tn5;
        LineOrientedFile[] out = new LineOrientedFile[numCond];

        @Override//from  w  w w  .  j  a  v a2s  .c o m
        public void begin(ProcessorContext context) throws Exception {
            for (int i = 0; i < out.length; i++) {
                out[i] = new LineOrientedFile("promotor."
                        + storage.getMetaData().get("conditions").getEntry(i).getEntry("name").asString()
                        + ".csv");
                out[i].startWriting();
            }
        }

        @Override
        public void beginRegion(MutableReferenceGenomicRegion<?> region, ProcessorContext context)
                throws Exception {
            if (tn5 == null || tn5[0].length != region.getRegion().getTotalLength())
                tn5 = new int[numCond][region.getRegion().getTotalLength()];
            else
                for (int i = 0; i < tn5.length; i++)
                    Arrays.fill(tn5[i], 0);
        }

        @Override
        public void read(MutableReferenceGenomicRegion<?> region,
                MutableReferenceGenomicRegion<AlignedReadsData> read, ProcessorContext context)
                throws Exception {

            for (int c = 0; c < numCond; c++) {
                int v = read.getData().getTotalCountForConditionInt(c, ReadCountMode.All);
                addValue(region,
                        GenomicRegionPosition.Start.position(read.getReference(), read.getRegion(), offset), c,
                        v);
                addValue(region,
                        GenomicRegionPosition.Stop.position(read.getReference(), read.getRegion(), -offset), c,
                        v);
            }
        }

        private void addValue(MutableReferenceGenomicRegion<?> region, int position, int condition, int value) {
            if (region.getRegion().contains(position)) {
                position = region.getRegion().induce(position);
                if (region.getReference().getStrand() == Strand.Minus)
                    position = region.getRegion().getTotalLength() - 1 - position;
                tn5[condition][position] += value;
            }
        }

        @Override
        public void endRegion(MutableReferenceGenomicRegion<?> region, ProcessorContext context)
                throws Exception {

            for (int i = 0; i < out.length; i++) {
                out[i].writef(region.getData().toString());
                for (int p = 0; p < tn5[i].length; p++)
                    out[i].writef("\t%d", tn5[i][p]);
                out[i].writeLine();
            }

        }

        @Override
        public void end(ProcessorContext context) throws Exception {
            for (int i = 0; i < out.length; i++) {
                out[i].finishWriting();
            }
        }
    });

}

From source file:com.my.diff.MyDKCSVFileSource.java

private int[] getHeaderColumnNameIndices(String[] names_) {
    if (names_ == null)
        return null;
    int[] indices = new int[names_.length];
    Arrays.fill(indices, -1);
    for (int i = 0, j = 0; i < names_.length; i++) {
        int foundAt = ArrayUtils.indexOf(_headerColumnNames, names_[i]);
        if (foundAt < 0)
            throw new RuntimeException(String.format("no value in _headerColumnNames for %s", names_[i]));
        indices[j++] = foundAt;//from w w  w .  j  av a  2 s . c om
    }
    return DKArrayUtil.compactFill(indices, -1);
}

From source file:com.kegare.caveworld.world.ChunkProviderCaveworld.java

@Override
public Chunk provideChunk(int chunkX, int chunkZ) {
    random.setSeed(chunkX * 341873128712L + chunkZ * 132897987541L);

    int worldHeight = worldObj.provider.getActualHeight();
    BiomeGenBase biome = worldObj.getWorldChunkManager().getBiomeGenAt(chunkX * 16, chunkZ * 16);
    Block[] blocks = new Block[65536];
    byte[] metadata = new byte[65536];
    ICaveBiome entry = CaveworldAPI.getCaveBiome(biome);
    Block block = entry.getTerrainBlock().getBlock();
    int meta = entry.getTerrainBlock().getMetadata();

    Arrays.fill(blocks, block);
    Arrays.fill(metadata, (byte) meta);

    if (Config.generateCaves) {
        caveGenerator.func_151539_a(this, worldObj, chunkX, chunkZ, blocks);
    }//w w  w.ja  v  a 2 s. c o m

    if (Config.generateExtremeCaves && worldHeight > 150) {
        extremeCaveGenerator.func_151539_a(this, worldObj, chunkX, chunkZ, blocks);
    }

    if (Config.generateRavine) {
        ravineGenerator.func_151539_a(this, worldObj, chunkX, chunkZ, blocks);
    }

    if (Config.generateExtremeRavine) {
        extremeRavineGenerator.func_151539_a(this, worldObj, chunkX, chunkZ, blocks);
    }

    if (generateStructures) {
        if (Config.generateMineshaft) {
            mineshaftGenerator.func_151539_a(this, worldObj, chunkX, chunkZ, blocks);
        }

        if (Config.generateStronghold) {
            strongholdGenerator.func_151539_a(this, worldObj, chunkX, chunkZ, blocks);
        }
    }

    int i;

    for (int x = 0; x < 16; ++x) {
        for (int z = 0; z < 16; ++z) {
            i = (x * 16 + z) * 256;

            blocks[i + worldHeight - 1] = Blocks.bedrock;
            blocks[i + worldHeight - 2] = block;

            if (!entry.getTerrainBlock().equals(entry.getTopBlock())) {
                for (int y = 1; y < worldHeight - 4; ++y) {
                    if (blocks[i + y] != null && blocks[i + y + 1] == null) {
                        blocks[i + y] = entry.getTopBlock().getBlock();
                        metadata[i + y] = (byte) entry.getTopBlock().getMetadata();
                    }
                }
            }

            for (int y = 255; y >= worldHeight; --y) {
                blocks[i + y] = null;
            }
        }
    }

    for (i = 0; meta != 0 && i < blocks.length; ++i) {
        if (blocks[i] != block) {
            metadata[i] = 0;
        }
    }

    Chunk chunk = new Chunk(worldObj, blocks, metadata, chunkX, chunkZ);
    byte[] biomes = new byte[256];

    Arrays.fill(biomes, (byte) biome.biomeID);

    chunk.setBiomeArray(biomes);
    chunk.resetRelightChecks();

    try {
        provideChunkTFC(chunk);
    } catch (NoSuchMethodError e) {
    }

    return chunk;
}

From source file:brut.androlib.res.decoder.ARSCDecoder.java

private ResType readType() throws AndrolibException, IOException {
    checkChunkType(Header.TYPE_TYPE);// w w  w  .j a va 2s .c  om
    byte id = mIn.readByte();
    mIn.skipBytes(3);
    int entryCount = mIn.readInt();

    mMissingResSpecs = new boolean[entryCount];
    Arrays.fill(mMissingResSpecs, true);

    if (mFlagsOffsets != null) {
        mFlagsOffsets.add(new FlagsOffset(mCountIn.getCount(), entryCount));
    }
    /* flags */mIn.skipBytes(entryCount * 4);

    mResId = (0xff000000 & mResId) | id << 16;
    mType = new ResType(mTypeNames.getString(id - 1), mResTable, mPkg);
    mPkg.addType(mType);

    while (nextChunk().type == Header.TYPE_CONFIG) {
        readConfig();
    }

    addMissingResSpecs();

    return mType;
}

From source file:demo.util.model.BoundedFifoBuffer.java

/**
 * Clears this buffer.
 */
@Override
public void clear() {
    full = false;
    start = 0;
    end = 0;
    Arrays.fill(elements, null);
}

From source file:gnu.trove.map.custom_hash.TObjectByteCustomHashMap.java

/**
 * Creates a new <code>TObjectByteHashMap</code> instance with a prime
 * value at or near the specified capacity and load factor.
 *
 * @param initialCapacity used to find a prime capacity for the table.
 * @param loadFactor used to calculate the threshold over which
 * rehashing takes place.//w w w . j a  v a  2  s. c o m
 * @param noEntryValue the value used to represent null.
 */
public TObjectByteCustomHashMap(HashingStrategy<K> strategy, int initialCapacity, float loadFactor,
        byte noEntryValue) {

    super(strategy, initialCapacity, loadFactor);

    no_entry_value = noEntryValue;
    //noinspection RedundantCast
    if (no_entry_value != (byte) 0) {
        Arrays.fill(_values, no_entry_value);
    }
}