List of usage examples for java.util BitSet BitSet
private BitSet(long[] words)
From source file:com.opengamma.analytics.financial.model.volatility.smile.fitting.SABRSurfaceFittingTest.java
@Test public void doIt() { final double[] maturities = new double[] { 5, 1, 10, 15, 1, 5 }; final double[] tenors = new double[] { 5, 5, 10, 15, 1, 10 }; final double[] forwards = new double[] { 0.0424, 0.025513, 0.046213, 0.04405, 0.010482, 0.04443 }; final double[] atmVols = new double[] { 0.23845, 0.36995, 0.18745, 0.162, 0.7332, 0.2177 }; final int n = maturities.length; Validate.isTrue(n == tenors.length && n == forwards.length && n == atmVols.length); final double[] moneynessSigma = new double[] { -2, -1, -0.5, -0.25, 0, 0.25, 0.5, 1, 2 }; final double[][] vols = new double[n][]; vols[0] = new double[] { 0, 0.27, 0.253, 0.247, 0.23845, 0.238, 0.236, 0.233, 0.226 }; vols[1] = new double[] { 0.653, 0.442, 0.396, 0.382, 0.36995, 0.367, 0.363, 0.363, 0.36 }; vols[2] = new double[] { 0.25, 0.214, 0.2, 0.194, 0.18745, 0.186, 0.183, 0.179, 0.171 }; vols[3] = new double[] { 0.224, 0.19, 0.175, 0.17, 0.162, 0.161, 0.158, 0.154, 0.15 }; vols[4] = new double[] { 0, 0, 0.847, 0.776, 0.7332, 0.718, 0.707, 0.702, 0.701 }; vols[5] = new double[] { 0.284, 0.247, 0.231, 0.225, 0.2177, 0.217, 0.213, 0.209, 0.207 }; final double[] alpha = new double[n]; final double[] beta = new double[n]; final double[] nu = new double[n]; final double[] rho = new double[n]; final double[] start = new double[] { 0.3, 0.9, 0.3, 0.0 }; for (int i = 0; i < n; i++) { int m = 0; for (int j = 0; j < vols[i].length; j++) { if (vols[i][j] > 0.0) { m++;// w w w.j av a 2 s. c om } } final EuropeanVanillaOption[] options = new EuropeanVanillaOption[m]; final BlackFunctionData[] data = new BlackFunctionData[m]; final double[] errors = new double[m]; int p = 0; for (int j = 0; j < vols[i].length; j++) { if (vols[i][j] > 0.0) { options[p] = new EuropeanVanillaOption( forwards[i] * Math.exp(atmVols[i] * Math.sqrt(maturities[i]) * moneynessSigma[j]), maturities[i], true); data[p] = new BlackFunctionData(forwards[i], 1, vols[i][j]); errors[p] = 0.001; p++; } } final LeastSquareResultsWithTransform result = FITTER.getFitResult(options, data, errors, start, new BitSet(4)); final DoubleMatrix1D params = result.getModelParameters(); alpha[i] = params.getEntry(0); beta[i] = params.getEntry(1); nu[i] = params.getEntry(2); rho[i] = params.getEntry(3); // System.out.print(alpha[i] + "\t" + beta[i] + "\t" + nu[i] + "\t" + rho[i] + "\t"); // for (int j = 0; j < m; j++) { // System.out.print("\t" + strikes[j]); // } // System.out.print("\n"); // System.out.print("\t\t\t\t"); // for (int j = 0; j < m; j++) { // double sabrVol = sabr.impliedVolatility(forwards[i], alpha[i], beta[i], nu[i], rho[i], strikes[j], maturities[i]); // System.out.print("\t" + sabrVol); // } // System.out.print("\n"); } }
From source file:com.fimagena.filepicker.FilePickerFragment.java
@Override public void onInflate(Activity activity, AttributeSet attrs, Bundle savedInstanceState) { super.onInflate(activity, attrs, savedInstanceState); if ((savedInstanceState != null) && savedInstanceState.containsKey(KEY_PARAMS)) { mParams = (BitSet) savedInstanceState.getSerializable(KEY_PARAMS); mStartPath = new File(savedInstanceState.getString(KEY_CURRENT_PATH)); } else {// w ww.j a v a 2s . c o m // read xml custom-attributes TypedArray attrArray = activity.obtainStyledAttributes(attrs, R.styleable.FilePickerFragment); mParams = new BitSet(4); mParams.set(SELECT_FILE, attrArray.getBoolean(R.styleable.FilePickerFragment_select_file, false)); mParams.set(SELECT_DIR, attrArray.getBoolean(R.styleable.FilePickerFragment_select_dir, false)); mParams.set(ALLOW_CREATE_DIR, attrArray.getBoolean(R.styleable.FilePickerFragment_allow_dir_create, false)); mParams.set(ALLOW_MULTIPLE_SELECT, attrArray.getBoolean(R.styleable.FilePickerFragment_allow_multiple, false)); if (!mParams.get(SELECT_FILE) && !mParams.get(SELECT_DIR)) mParams.set(SELECT_FILE, true); if (attrArray.hasValue(R.styleable.FilePickerFragment_start_path)) mStartPath = new File(attrArray.getText(R.styleable.FilePickerFragment_start_path).toString()); else mStartPath = Environment.getExternalStorageDirectory(); attrArray.recycle(); } }
From source file:org.apache.hadoop.mapreduce.lib.input.TestKeyValueTextInputFormat.java
public void testFormat() throws Exception { Job job = new Job(defaultConf); Path file = new Path(workDir, "test.txt"); int seed = new Random().nextInt(); LOG.info("seed = " + seed); Random random = new Random(seed); localFs.delete(workDir, true);// w w w .j av a2 s. c o m FileInputFormat.setInputPaths(job, workDir); // for a variety of lengths for (int length = 0; length < MAX_LENGTH; length += random.nextInt(MAX_LENGTH / 10) + 1) { LOG.debug("creating; entries = " + length); // create a file with length entries Writer writer = new OutputStreamWriter(localFs.create(file)); try { for (int i = 0; i < length; i++) { writer.write(Integer.toString(i * 2)); writer.write("\t"); writer.write(Integer.toString(i)); writer.write("\n"); } } finally { writer.close(); } KeyValueTextInputFormat format = new KeyValueTextInputFormat(); JobContext jobContext = new JobContext(job.getConfiguration(), new JobID()); List<InputSplit> splits = format.getSplits(jobContext); LOG.debug("splitting: got = " + splits.size()); TaskAttemptContext context = new TaskAttemptContext(job.getConfiguration(), new TaskAttemptID()); // check each split BitSet bits = new BitSet(length); for (InputSplit split : splits) { LOG.debug("split= " + split); RecordReader<Text, Text> reader = format.createRecordReader(split, context); Class readerClass = reader.getClass(); assertEquals("reader class is KeyValueLineRecordReader.", KeyValueLineRecordReader.class, readerClass); reader.initialize(split, context); try { int count = 0; while (reader.nextKeyValue()) { int v = Integer.parseInt(reader.getCurrentValue().toString()); LOG.debug("read " + v); if (bits.get(v)) { LOG.warn("conflict with " + v + " in split " + split + " at " + reader.getProgress()); } assertFalse("Key in multiple partitions.", bits.get(v)); bits.set(v); count++; } LOG.debug("split=" + split + " count=" + count); } finally { reader.close(); } } assertEquals("Some keys in no partition.", length, bits.cardinality()); } }
From source file:org.apache.hadoop.mapred.TestCombineSequenceFileInputFormat.java
@Test(timeout = 10000) public void testFormat() throws Exception { JobConf job = new JobConf(conf); Reporter reporter = Reporter.NULL;//from w w w.j av a 2 s . c om Random random = new Random(); long seed = random.nextLong(); LOG.info("seed = " + seed); random.setSeed(seed); localFs.delete(workDir, true); FileInputFormat.setInputPaths(job, workDir); final int length = 10000; final int numFiles = 10; // create a file with various lengths createFiles(length, numFiles, random); // create a combine split for the files InputFormat<IntWritable, BytesWritable> format = new CombineSequenceFileInputFormat<IntWritable, BytesWritable>(); IntWritable key = new IntWritable(); BytesWritable value = new BytesWritable(); for (int i = 0; i < 3; i++) { int numSplits = random.nextInt(length / (SequenceFile.SYNC_INTERVAL / 20)) + 1; LOG.info("splitting: requesting = " + numSplits); InputSplit[] splits = format.getSplits(job, numSplits); LOG.info("splitting: got = " + splits.length); // we should have a single split as the length is comfortably smaller than // the block size assertEquals("We got more than one splits!", 1, splits.length); InputSplit split = splits[0]; assertEquals("It should be CombineFileSplit", CombineFileSplit.class, split.getClass()); // check each split BitSet bits = new BitSet(length); RecordReader<IntWritable, BytesWritable> reader = format.getRecordReader(split, job, reporter); try { while (reader.next(key, value)) { assertFalse("Key in multiple partitions.", bits.get(key.get())); bits.set(key.get()); } } finally { reader.close(); } assertEquals("Some keys in no partition.", length, bits.cardinality()); } }
From source file:com.amazonaws.services.dynamodbv2.replication.impl.BitSetMultiRegionCheckpoint.java
/** * Constructs a BitSetMultiRegionCheckpoint configured to require acks for every region/table pair specified in the * supplied ReplicationConfiguration. It is associated with the update identified by its sequence number. * * @param configuration//from www .j av a2 s. co m * ReplicationConfiguration containing the region/table pairs from which this BitSetMultiRegionCheckpoint * will require acks * @param sequenceNumber * The unique identifier of the update to which this {@link BitSetMultiRegionCheckpoint} corresponds * @param createdTime * The time that users created the update and sent to the DynamoDB */ public BitSetMultiRegionCheckpoint(final ReplicationConfiguration configuration, final String sequenceNumber, final String createdTime) { this.sequenceNumber = sequenceNumber; // Map each table to a unique integer int index = 0; bitMap = new HashMap<String, Integer>(); latencyPerRegion = new HashMap<String, HashMap<String, Long>>(); for (final String region : configuration.getRegions()) { for (final String table : configuration.getTables(region)) { bitMap.put(getBitMapKey(region, table), index++); } latencyPerRegion.put(region, new HashMap<String, Long>()); } cardinality = index; bitSet = new BitSet(cardinality); if (createdTime != null) { this.createdTime = ISO8601Utils.parse(createdTime).getTime(); } else { this.createdTime = new Date().getTime(); } latency = Long.MAX_VALUE; }
From source file:org.apache.hadoop.mapred.TestTextInputFormat.java
public void testFormat() throws Exception { JobConf job = new JobConf(); Path file = new Path(workDir, "test.txt"); // A reporter that does nothing Reporter reporter = Reporter.NULL;/* ww w. ja va2s. co m*/ int seed = new Random().nextInt(); LOG.info("seed = " + seed); Random random = new Random(seed); localFs.delete(workDir, true); FileInputFormat.setInputPaths(job, workDir); // for a variety of lengths for (int length = 0; length < MAX_LENGTH; length += random.nextInt(MAX_LENGTH / 10) + 1) { LOG.debug("creating; entries = " + length); // create a file with length entries Writer writer = new OutputStreamWriter(localFs.create(file)); try { for (int i = 0; i < length; i++) { writer.write(Integer.toString(i)); writer.write("\n"); } } finally { writer.close(); } // try splitting the file in a variety of sizes TextInputFormat format = new TextInputFormat(); format.configure(job); LongWritable key = new LongWritable(); Text value = new Text(); for (int i = 0; i < 3; i++) { int numSplits = random.nextInt(MAX_LENGTH / 20) + 1; LOG.debug("splitting: requesting = " + numSplits); InputSplit[] splits = format.getSplits(job, numSplits); LOG.debug("splitting: got = " + splits.length); if (length == 0) { assertEquals("Files of length 0 are not returned from FileInputFormat.getSplits().", 1, splits.length); assertEquals("Empty file length == 0", 0, splits[0].getLength()); } // check each split BitSet bits = new BitSet(length); for (int j = 0; j < splits.length; j++) { LOG.debug("split[" + j + "]= " + splits[j]); RecordReader<LongWritable, Text> reader = format.getRecordReader(splits[j], job, reporter); try { int count = 0; while (reader.next(key, value)) { int v = Integer.parseInt(value.toString()); LOG.debug("read " + v); if (bits.get(v)) { LOG.warn("conflict with " + v + " in split " + j + " at position " + reader.getPos()); } assertFalse("Key in multiple partitions.", bits.get(v)); bits.set(v); count++; } LOG.debug("splits[" + j + "]=" + splits[j] + " count=" + count); } finally { reader.close(); } } assertEquals("Some keys in no partition.", length, bits.cardinality()); } } }
From source file:com.joliciel.jochre.graphics.ShapeFillerImpl.java
BitSet fillBitSet(Shape shape, BitSet bitset, int neighbourBirthCount) { BitSet newBitSet = new BitSet(bitset.size()); int baseIndex = 0; for (int y = 0; y < shape.getHeight(); y++) { for (int x = 0; x < shape.getWidth(); x++) { int index = baseIndex + x; if (bitset.get(index)) newBitSet.set(index);/* www . j av a 2 s .c o m*/ else { int surroundingCount = 0; if (y > 0) { if (x > 0) surroundingCount += bitset.get(index - (shape.getWidth() + 1)) ? 1 : 0; surroundingCount += bitset.get(index - (shape.getWidth())) ? 1 : 0; if (x < shape.getWidth() - 1) surroundingCount += bitset.get(index - (shape.getWidth() - 1)) ? 1 : 0; } if (x > 0) surroundingCount += bitset.get(index - 1) ? 1 : 0; if (x < shape.getWidth() - 1) surroundingCount += bitset.get(index + 1) ? 1 : 0; if (y < shape.getHeight() - 1) { if (x > 0) surroundingCount += bitset.get(index + (shape.getWidth() - 1)) ? 1 : 0; surroundingCount += bitset.get(index + (shape.getWidth())) ? 1 : 0; if (x < shape.getWidth() - 1) surroundingCount += bitset.get(index + (shape.getWidth() + 1)) ? 1 : 0; } // if at least NEIGHBOUR_COUNT_BIRTH out of 8 surrounding pixels are on, // assume this one should be on if (surroundingCount >= NEIGHBOUR_COUNT_BIRTH) newBitSet.set(index); } } baseIndex += shape.getWidth(); } return newBitSet; }
From source file:net.solarnetwork.node.control.modbus.toggle.ModbusToggler.java
private synchronized Boolean setValue(Boolean desiredValue) throws IOException { final BitSet bits = new BitSet(1); bits.set(0, desiredValue);/* w w w .j a va 2s .c o m*/ log.info("Setting {} value to {}", controlId, desiredValue); final Integer[] addresses = new Integer[] { address }; return performAction(new ModbusConnectionAction<Boolean>() { @Override public Boolean doWithConnection(ModbusConnection conn) throws IOException { return conn.writeDiscreetValues(addresses, bits); } }); }
From source file:cascading.tap.hadoop.ZipInputFormatTest.java
public void testSplits() throws Exception { JobConf job = new JobConf(); FileSystem currentFs = FileSystem.get(job); Path file = new Path(workDir, "test.zip"); Reporter reporter = Reporter.NULL;//from w ww. java 2 s . c om int seed = new Random().nextInt(); LOG.info("seed = " + seed); Random random = new Random(seed); FileInputFormat.setInputPaths(job, file); for (int entries = 1; entries < MAX_ENTRIES; entries += random.nextInt(MAX_ENTRIES / 10) + 1) { ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream(); ZipOutputStream zos = new ZipOutputStream(byteArrayOutputStream); long length = 0; LOG.debug("creating; zip file with entries = " + entries); // for each entry in the zip file for (int entryCounter = 0; entryCounter < entries; entryCounter++) { // construct zip entries splitting MAX_LENGTH between entries long entryLength = MAX_LENGTH / entries; ZipEntry zipEntry = new ZipEntry("/entry" + entryCounter + ".txt"); zipEntry.setMethod(ZipEntry.DEFLATED); zos.putNextEntry(zipEntry); for (length = entryCounter * entryLength; length < (entryCounter + 1) * entryLength; length++) { zos.write(Long.toString(length).getBytes()); zos.write("\n".getBytes()); } zos.flush(); zos.closeEntry(); } zos.flush(); zos.close(); currentFs.delete(file, true); OutputStream outputStream = currentFs.create(file); byteArrayOutputStream.writeTo(outputStream); outputStream.close(); ZipInputFormat format = new ZipInputFormat(); format.configure(job); LongWritable key = new LongWritable(); Text value = new Text(); InputSplit[] splits = format.getSplits(job, 100); BitSet bits = new BitSet((int) length); for (int j = 0; j < splits.length; j++) { LOG.debug("split[" + j + "]= " + splits[j]); RecordReader<LongWritable, Text> reader = format.getRecordReader(splits[j], job, reporter); try { int count = 0; while (reader.next(key, value)) { int v = Integer.parseInt(value.toString()); LOG.debug("read " + v); if (bits.get(v)) LOG.warn("conflict with " + v + " in split " + j + " at position " + reader.getPos()); assertFalse("key in multiple partitions.", bits.get(v)); bits.set(v); count++; } LOG.debug("splits[" + j + "]=" + splits[j] + " count=" + count); } finally { reader.close(); } } assertEquals("some keys in no partition.", length, bits.cardinality()); } }
From source file:org.apache.hadoop.mapred.TestCombineTextInputFormat.java
@Test(timeout = 10000) public void testFormat() throws Exception { JobConf job = new JobConf(defaultConf); Random random = new Random(); long seed = random.nextLong(); LOG.info("seed = " + seed); random.setSeed(seed);/*w w w . j a v a2 s.co m*/ localFs.delete(workDir, true); FileInputFormat.setInputPaths(job, workDir); final int length = 10000; final int numFiles = 10; createFiles(length, numFiles, random); // create a combined split for the files CombineTextInputFormat format = new CombineTextInputFormat(); LongWritable key = new LongWritable(); Text value = new Text(); for (int i = 0; i < 3; i++) { int numSplits = random.nextInt(length / 20) + 1; LOG.info("splitting: requesting = " + numSplits); InputSplit[] splits = format.getSplits(job, numSplits); LOG.info("splitting: got = " + splits.length); // we should have a single split as the length is comfortably smaller than // the block size assertEquals("We got more than one splits!", 1, splits.length); InputSplit split = splits[0]; assertEquals("It should be CombineFileSplit", CombineFileSplit.class, split.getClass()); // check the split BitSet bits = new BitSet(length); LOG.debug("split= " + split); RecordReader<LongWritable, Text> reader = format.getRecordReader(split, job, voidReporter); try { int count = 0; while (reader.next(key, value)) { int v = Integer.parseInt(value.toString()); LOG.debug("read " + v); if (bits.get(v)) { LOG.warn("conflict with " + v + " at position " + reader.getPos()); } assertFalse("Key in multiple partitions.", bits.get(v)); bits.set(v); count++; } LOG.info("splits=" + split + " count=" + count); } finally { reader.close(); } assertEquals("Some keys in no partition.", length, bits.cardinality()); } }