List of usage examples for java.util BitSet BitSet
private BitSet(long[] words)
From source file:edu.tsinghua.lumaqq.ui.jobs.DownloadCustomHeadJob.java
@Override public void prepare(MainShell m) { super.prepare(m); finishCount = -1;/*from w w w .jav a 2 s . c o m*/ fragmentChecker = new BitSet(256); downloading = false; latestFragmentTime = System.currentTimeMillis(); main.getClient().addQQListener(this); }
From source file:de.blizzy.documentr.search.GetVisibleBranchDocIdsTask.java
@Override public BitSet call() throws IOException { List<String> branches = Lists .newArrayList(permissionEvaluator.getBranchesForPermission(authentication, Permission.VIEW)); if (!branches.isEmpty()) { Collections.sort(branches); BooleanQuery allBranchesQuery = new BooleanQuery(); for (String projectAndBranch : branches) { String projectName = StringUtils.substringBefore(projectAndBranch, "/"); //$NON-NLS-1$ String branchName = StringUtils.substringAfter(projectAndBranch, "/"); //$NON-NLS-1$ TermQuery projectQuery = new TermQuery(new Term(PageIndex.PROJECT, projectName)); TermQuery branchQuery = new TermQuery(new Term(PageIndex.BRANCH, branchName)); BooleanQuery projectAndBranchQuery = new BooleanQuery(); projectAndBranchQuery.add(projectQuery, BooleanClause.Occur.MUST); projectAndBranchQuery.add(branchQuery, BooleanClause.Occur.MUST); allBranchesQuery.add(projectAndBranchQuery, BooleanClause.Occur.SHOULD); }/*from www .j a v a 2 s . c o m*/ AbstractDocIdsCollector collector = new AllDocIdsCollector(); searcher.search(allBranchesQuery, collector); return collector.getDocIds(); } else { return new BitSet(1); } }
From source file:org.apache.hadoop.mapred.TestSequenceFileInputFormat.java
public void testFormat() throws Exception { JobConf job = new JobConf(conf); FileSystem fs = FileSystem.getLocal(conf); Path dir = new Path(System.getProperty("test.build.data", ".") + "/mapred"); Path file = new Path(dir, "test.seq"); Reporter reporter = Reporter.NULL;//from w ww .j a v a2 s.c o m int seed = new Random().nextInt(); //LOG.info("seed = "+seed); Random random = new Random(seed); fs.delete(dir, true); FileInputFormat.setInputPaths(job, dir); // for a variety of lengths for (int length = 0; length < MAX_LENGTH; length += random.nextInt(MAX_LENGTH / 10) + 1) { //LOG.info("creating; entries = " + length); // create a file with length entries SequenceFile.Writer writer = SequenceFile.createWriter(fs, conf, file, IntWritable.class, BytesWritable.class); try { for (int i = 0; i < length; i++) { IntWritable key = new IntWritable(i); byte[] data = new byte[random.nextInt(10)]; random.nextBytes(data); BytesWritable value = new BytesWritable(data); writer.append(key, value); } } finally { writer.close(); } // try splitting the file in a variety of sizes InputFormat<IntWritable, BytesWritable> format = new SequenceFileInputFormat<IntWritable, BytesWritable>(); IntWritable key = new IntWritable(); BytesWritable value = new BytesWritable(); for (int i = 0; i < 3; i++) { int numSplits = random.nextInt(MAX_LENGTH / (SequenceFile.SYNC_INTERVAL / 20)) + 1; //LOG.info("splitting: requesting = " + numSplits); InputSplit[] splits = format.getSplits(job, numSplits); //LOG.info("splitting: got = " + splits.length); // check each split BitSet bits = new BitSet(length); for (int j = 0; j < splits.length; j++) { RecordReader<IntWritable, BytesWritable> reader = format.getRecordReader(splits[j], job, reporter); try { int count = 0; while (reader.next(key, value)) { // if (bits.get(key.get())) { // LOG.info("splits["+j+"]="+splits[j]+" : " + key.get()); // LOG.info("@"+reader.getPos()); // } assertFalse("Key in multiple partitions.", bits.get(key.get())); bits.set(key.get()); count++; } //LOG.info("splits["+j+"]="+splits[j]+" count=" + count); } finally { reader.close(); } } assertEquals("Some keys in no partition.", length, bits.cardinality()); } } }
From source file:org.apache.hadoop.record.TestRecordWritable.java
public void testFormat() throws Exception { JobConf job = new JobConf(conf); FileSystem fs = FileSystem.getLocal(conf); Path dir = new Path(System.getProperty("test.build.data", ".") + "/mapred"); Path file = new Path(dir, "test.seq"); int seed = new Random().nextInt(); //LOG.info("seed = "+seed); Random random = new Random(seed); fs.delete(dir, true);//from w w w . ja v a2 s .c om FileInputFormat.setInputPaths(job, dir); // for a variety of lengths for (int length = 0; length < MAX_LENGTH; length += random.nextInt(MAX_LENGTH / 10) + 1) { // create a file with length entries SequenceFile.Writer writer = new SequenceFile.Writer(fs, conf, file, RecInt.class, RecBuffer.class); try { for (int i = 0; i < length; i++) { RecInt key = new RecInt(); key.setData(i); byte[] data = new byte[random.nextInt(10)]; random.nextBytes(data); RecBuffer value = new RecBuffer(); value.setData(new Buffer(data)); writer.append(key, value); } } finally { writer.close(); } // try splitting the file in a variety of sizes InputFormat<RecInt, RecBuffer> format = new SequenceFileInputFormat<RecInt, RecBuffer>(); RecInt key = new RecInt(); RecBuffer value = new RecBuffer(); for (int i = 0; i < 3; i++) { int numSplits = random.nextInt(MAX_LENGTH / (SequenceFile.SYNC_INTERVAL / 20)) + 1; InputSplit[] splits = format.getSplits(job, numSplits); // check each split BitSet bits = new BitSet(length); for (int j = 0; j < splits.length; j++) { RecordReader<RecInt, RecBuffer> reader = format.getRecordReader(splits[j], job, Reporter.NULL); try { int count = 0; while (reader.next(key, value)) { assertFalse("Key in multiple partitions.", bits.get(key.getData())); bits.set(key.getData()); count++; } } finally { reader.close(); } } assertEquals("Some keys in no partition.", length, bits.cardinality()); } } }
From source file:Exec.java
/** * Description of the Method//from w w w.ja v a 2 s .c o m * * @param command * Description of the Parameter * @param input * Description of the Parameter * @param successCode * Description of the Parameter * @param timeout * Description of the Parameter * @param lazy * Description of the Parameter * @return Description of the Return Value */ public static ExecResults execOptions(String[] command, String input, int successCode, int timeout, boolean lazy) { Process child = null; ByteArrayOutputStream output = new ByteArrayOutputStream(); ByteArrayOutputStream errors = new ByteArrayOutputStream(); ExecResults results = new ExecResults(command[0], input, successCode, timeout); BitSet interrupted = new BitSet(1); boolean lazyQuit = false; ThreadWatcher watcher; try { // start the command child = Runtime.getRuntime().exec(command); // get the streams in and out of the command InputStream processIn = child.getInputStream(); InputStream processError = child.getErrorStream(); OutputStream processOut = child.getOutputStream(); // start the clock running if (timeout > 0) { watcher = new ThreadWatcher(child, interrupted, timeout); new Thread(watcher).start(); } // Write to the child process' input stream if ((input != null) && !input.equals("")) { try { processOut.write(input.getBytes()); processOut.flush(); processOut.close(); } catch (IOException e1) { results.setThrowable(e1); } } // Read from the child process' output stream // The process may get killed by the watcher at any time int c = 0; try { while (true) { if (interrupted.get(0) || lazyQuit) { break; } // interrupted c = processIn.read(); if (c == -1) { break; } // end of stream output.write(c); if (lazy && (processIn.available() < 1)) { lazyQuit = true; } // if lazy and nothing then quit (after at least one read) } processIn.close(); } catch (IOException e2) { results.setThrowable(e2); } finally { if (interrupted.get(0)) { results.setInterrupted(); } results.setOutput(output.toString()); } // Read from the child process' error stream // The process may get killed by the watcher at any time try { while (true) { if (interrupted.get(0) || lazyQuit) { break; } // interrupted c = processError.read(); if (c == -1) { break; } // end of stream output.write(c); if (lazy && (processError.available() < 1)) { lazyQuit = true; } // if lazy and nothing then quit (after at least one read) } processError.close(); } catch (IOException e3) { results.setThrowable(e3); } finally { if (interrupted.get(0)) { results.setInterrupted(); } results.setErrors(errors.toString()); } // wait for the return value of the child process. if (!interrupted.get(0) && !lazyQuit) { int returnCode = child.waitFor(); results.setReturnCode(returnCode); if (returnCode != successCode) { results.setError(ExecResults.BADRETURNCODE); } } } catch (InterruptedException i) { results.setInterrupted(); } catch (Throwable t) { results.setThrowable(t); } finally { if (child != null) { child.destroy(); } } return (results); }
From source file:org.mycore.imagetiler.MCRImageTest.java
/** * Tests {@link MCRImage#tile()} with various images provided by {@link #setUp()}. * @throws Exception if tiling process fails *///from www . j a v a2s. com @Test public void testTiling() throws Exception { for (final Map.Entry<String, String> entry : pics.entrySet()) { final File file = new File(entry.getValue()); final String derivateID = "derivateID"; final String imagePath = "imagePath/" + FilenameUtils.getName(entry.getValue()); final MCRImage image = new MCRMemSaveImage(file.toPath(), derivateID, imagePath); image.setTileDir(tileDir); final BitSet events = new BitSet(2);//pre and post event image.tile(new MCRTileEventHandler() { @Override public void preImageReaderCreated() { events.flip(0); } @Override public void postImageReaderCreated() { events.flip(1); } }); assertTrue("preImageReaderCreated() was not called", events.get(0)); assertTrue("postImageReaderCreated() was not called", events.get(1)); assertTrue("Tile directory is not created.", Files.exists(tileDir)); final Path iviewFile = MCRImage.getTiledFile(tileDir, derivateID, imagePath); assertTrue("IView File is not created:" + iviewFile, Files.exists(iviewFile)); final MCRTiledPictureProps props = MCRTiledPictureProps.getInstanceFromFile(iviewFile); final int tilesCount; try (final ZipFile iviewImage = new ZipFile(iviewFile.toFile())) { tilesCount = iviewImage.size() - 1; ZipEntry imageInfoXML = iviewImage.getEntry(MCRTiledPictureProps.IMAGEINFO_XML); DocumentBuilder documentBuilder = DocumentBuilderFactory.newInstance().newDocumentBuilder(); Document imageInfo = documentBuilder.parse(iviewImage.getInputStream(imageInfoXML)); String hAttr = Objects.requireNonNull(imageInfo.getDocumentElement().getAttribute("height")); String wAttr = Objects.requireNonNull(imageInfo.getDocumentElement().getAttribute("width")); String zAttr = Objects.requireNonNull(imageInfo.getDocumentElement().getAttribute("zoomLevel")); String tAttr = Objects.requireNonNull(imageInfo.getDocumentElement().getAttribute("tiles")); assertTrue("height must be positive: " + hAttr, Integer.parseInt(hAttr) > 0); assertTrue("width must be positive: " + wAttr, Integer.parseInt(wAttr) > 0); assertTrue("zoomLevel must be zero or positive: " + zAttr, Integer.parseInt(zAttr) >= 0); int iTiles = Integer.parseInt(tAttr); assertEquals(tilesCount, iTiles); } assertEquals(entry.getKey() + ": Metadata tile count does not match stored tile count.", props.getTilesCount(), tilesCount); final int x = props.width; final int y = props.height; assertEquals(entry.getKey() + ": Calculated tile count does not match stored tile count.", MCRImage.getTileCount(x, y), tilesCount); } }
From source file:org.apache.openjpa.jdbc.sql.JoinSet.java
/** * Iterator over joins that prepares them for SQL translation. *//*from w w w .j ava2 s .co m*/ public Iterator joinIterator() { if (_size < 2) return iterator(); if (_sorted != null) return _sorted.iterator(); List sorted = new ArrayList(_size); LinkedList queue = new LinkedList(); BitSet seen = new BitSet(_graph.size() * _graph.size() + _graph.size()); // traverse graph Node n; int idx, sidx; for (int i = 0; i < _graph.size(); i++) { // seed queue with next set of disconnected joins for (n = (Node) _graph.get(i); n != null; n = n.next) { sidx = getSeenIndex(n.join); if (!seen.get(sidx)) { seen.set(sidx); queue.add(n); } } if (queue.isEmpty()) continue; // traverse from those joins to reachables while (!queue.isEmpty()) { n = (Node) queue.removeFirst(); // don't repeat a join to a table we've already joined, but // do traverse through it in the graph (the first indexes of // the seeen bitset are reserved for joined-to tables) idx = (n.forward) ? n.join.getIndex2() : n.join.getIndex1(); if (!seen.get(idx)) { sorted.add((n.forward) ? n.join : n.join.reverse()); seen.set(idx); } for (n = (Node) _graph.get(idx); n != null; n = n.next) { sidx = getSeenIndex(n.join); if (!seen.get(sidx)) { seen.set(sidx); queue.add(n); } } } } _sorted = sorted; return _sorted.iterator(); }
From source file:org.apache.hadoop.mapred.TestKeyValueTextInputFormat.java
public void testFormat() throws Exception { JobConf job = new JobConf(); Path file = new Path(workDir, "test.txt"); // A reporter that does nothing Reporter reporter = Reporter.NULL;/*from w w w . j a va 2 s.c o m*/ int seed = new Random().nextInt(); LOG.info("seed = " + seed); Random random = new Random(seed); localFs.delete(workDir, true); FileInputFormat.setInputPaths(job, workDir); // for a variety of lengths for (int length = 0; length < MAX_LENGTH; length += random.nextInt(MAX_LENGTH / 10) + 1) { LOG.debug("creating; entries = " + length); // create a file with length entries Writer writer = new OutputStreamWriter(localFs.create(file)); try { for (int i = 0; i < length; i++) { writer.write(Integer.toString(i * 2)); writer.write("\t"); writer.write(Integer.toString(i)); writer.write("\n"); } } finally { writer.close(); } // try splitting the file in a variety of sizes KeyValueTextInputFormat format = new KeyValueTextInputFormat(); format.configure(job); for (int i = 0; i < 3; i++) { int numSplits = random.nextInt(MAX_LENGTH / 20) + 1; LOG.debug("splitting: requesting = " + numSplits); InputSplit[] splits = format.getSplits(job, numSplits); LOG.debug("splitting: got = " + splits.length); // check each split BitSet bits = new BitSet(length); for (int j = 0; j < splits.length; j++) { LOG.debug("split[" + j + "]= " + splits[j]); RecordReader<Text, Text> reader = format.getRecordReader(splits[j], job, reporter); Class readerClass = reader.getClass(); assertEquals("reader class is KeyValueLineRecordReader.", KeyValueLineRecordReader.class, readerClass); Text key = reader.createKey(); Class keyClass = key.getClass(); Text value = reader.createValue(); Class valueClass = value.getClass(); assertEquals("Key class is Text.", Text.class, keyClass); assertEquals("Value class is Text.", Text.class, valueClass); try { int count = 0; while (reader.next(key, value)) { int v = Integer.parseInt(value.toString()); LOG.debug("read " + v); if (bits.get(v)) { LOG.warn("conflict with " + v + " in split " + j + " at position " + reader.getPos()); } assertFalse("Key in multiple partitions.", bits.get(v)); bits.set(v); count++; } LOG.debug("splits[" + j + "]=" + splits[j] + " count=" + count); } finally { reader.close(); } } assertEquals("Some keys in no partition.", length, bits.cardinality()); } } }
From source file:org.efaps.admin.datamodel.attributetype.BitEnumType.java
/** * @param _int integer the BitSet is wanted for * @return BitSet representing the given integer *//* w w w . ja v a 2 s . c om*/ public static BitSet getBitSet(final int _int) { final char[] bits = Integer.toBinaryString(_int).toCharArray(); ArrayUtils.reverse(bits); final BitSet bitSet = new BitSet(bits.length); for (int i = 0; i < bits.length; i++) { if (bits[i] == '1') { bitSet.set(i, true); } else { bitSet.set(i, false); } } return bitSet; }
From source file:org.pentaho.di.trans.steps.enhanced.jsoninput.JsonInput.java
@Override protected boolean init() { data.rownr = 1L;//from w w w . j a va 2 s. c om data.nrInputFields = meta.getInputFields().length; data.repeatedFields = new BitSet(data.nrInputFields); // Take care of variable substitution for (int i = 0; i < data.nrInputFields; i++) { JsonInputField field = meta.getInputFields()[i]; field.setPath(environmentSubstitute(field.getPath())); if (field.isRepeated()) { data.repeatedFields.set(i); } } try { // Init a new JSON reader createReader(); } catch (KettleException e) { logError(e.getMessage()); return false; } return true; }