List of usage examples for java.nio IntBuffer put
public IntBuffer put(IntBuffer src)
From source file:org.jcodec.codecs.mjpeg.JpegDecoder.java
/** Corner case of putBlock */ private static void putBlock(IntBuffer result, int width, MCU block, int bx, int by, int blockH, int blockV) { IntBuffer rgb = block.getRgb24(); int bY = by * block.v * width; int bX = bx * block.h; int blockPtr = bY + bX; for (int j = 0; j < blockV; j++) { result.position(blockPtr);//from w w w .j a v a 2 s . c o m for (int i = 0; i < blockH; i++) { result.put(rgb.get()); } rgb.position(rgb.position() + (block.h - blockH)); blockPtr += width; } }
From source file:org.jcodec.codecs.mjpeg.JpegDecoder.java
private static void putBlock(IntBuffer result, int width, MCU block, int bx, int by) { int blockH = block.h; int blockV = block.v; IntBuffer rgb = block.getRgb24(); int bY = by * blockV * width; int bX = bx * blockH; int blockPtr = bY + bX; for (int j = 0; j < blockV; j++) { result.position(blockPtr);/*from www .j a v a 2 s . co m*/ for (int i = 0; i < blockH; i++) { result.put(rgb.get()); } blockPtr += width; } }
From source file:org.mrgeo.data.raster.RasterWritable.java
private static byte[] rasterToBytes(final Raster raster) { final int datatype = raster.getTransferType(); byte[] pixels; final Object elements = raster.getDataElements(raster.getMinX(), raster.getMinY(), raster.getWidth(), raster.getHeight(), null);/*from w ww .ja va2 s . c om*/ switch (datatype) { case DataBuffer.TYPE_BYTE: { pixels = (byte[]) elements; break; } case DataBuffer.TYPE_FLOAT: { final float[] floatElements = (float[]) elements; pixels = new byte[floatElements.length * RasterUtils.FLOAT_BYTES]; final ByteBuffer bytebuff = ByteBuffer.wrap(pixels); final FloatBuffer floatbuff = bytebuff.asFloatBuffer(); floatbuff.put(floatElements); break; } case DataBuffer.TYPE_DOUBLE: { final double[] doubleElements = (double[]) elements; pixels = new byte[doubleElements.length * RasterUtils.DOUBLE_BYTES]; final ByteBuffer bytebuff = ByteBuffer.wrap(pixels); final DoubleBuffer doubleBuff = bytebuff.asDoubleBuffer(); doubleBuff.put(doubleElements); break; } case DataBuffer.TYPE_INT: { final int[] intElements = (int[]) elements; pixels = new byte[intElements.length * RasterUtils.INT_BYTES]; final ByteBuffer bytebuff = ByteBuffer.wrap(pixels); final IntBuffer intBuff = bytebuff.asIntBuffer(); intBuff.put(intElements); break; } case DataBuffer.TYPE_SHORT: case DataBuffer.TYPE_USHORT: { final short[] shortElements = (short[]) elements; pixels = new byte[shortElements.length * RasterUtils.SHORT_BYTES]; final ByteBuffer bytebuff = ByteBuffer.wrap(pixels); final ShortBuffer shortbuff = bytebuff.asShortBuffer(); shortbuff.put(shortElements); break; } default: throw new RasterWritableException("Error trying to append raster. Bad raster data type"); } return pixels; }
From source file:org.smurn.jply.lwjgldemo.LWJGLDemo.java
/** * Fill the index buffer with the data from the PLY file. *//* w w w . j av a2 s . co m*/ private static void fillIndexBuffer(ElementReader reader, IntBuffer indexBuffer) throws IOException { // Just go though the triangles and store the indices in the buffer. Element triangle = reader.readElement(); while (triangle != null) { int[] indices = triangle.getIntList("vertex_index"); for (int index : indices) { indexBuffer.put(index); } triangle = reader.readElement(); } }
From source file:haven.Utils.java
public static IntBuffer bufcp(IntBuffer a) { a.rewind();//from ww w .j ava 2 s .co m IntBuffer ret = mkibuf(a.remaining()); ret.put(a).rewind(); return (ret); }
From source file:com.atilika.kuromoji.trie.DoubleArrayTrie.java
private void extendBuffers(int nextIndex) { int newLength = nextIndex + (int) (baseBuffer.capacity() * BUFFER_GROWTH_PERCENTAGE); ProgressLog.println("Buffers extended to " + baseBuffer.capacity() + " entries"); IntBuffer newBaseBuffer = IntBuffer.allocate(newLength); baseBuffer.rewind();// www.j a v a 2 s. co m newBaseBuffer.put(baseBuffer); baseBuffer = newBaseBuffer; IntBuffer newCheckBuffer = IntBuffer.allocate(newLength);//ByteBuffer.allocate(newLength).asIntBuffer(); checkBuffer.rewind(); newCheckBuffer.put(checkBuffer); checkBuffer = newCheckBuffer; }
From source file:org.hobbit.core.rabbit.FileStreamingTest.java
private void generateFiles(String sendInputDir) { System.out.println("Generating files..."); if (!sendInputDir.endsWith(File.separator)) { sendInputDir += File.separator; }//from ww w .j av a2 s . c om OutputStream os = null; // create first file try { os = new BufferedOutputStream(new FileOutputStream(sendInputDir + "file1.dat")); ByteBuffer buffer = ByteBuffer.allocate(4000); IntBuffer intBuffer = buffer.asIntBuffer(); int number = 0; // for (int i = 0; i < 200; ++i) { for (int j = 0; j < 1000; ++j) { intBuffer.put(number); ++number; } os.write(buffer.array()); buffer.position(0); intBuffer.position(0); // } os.flush(); } catch (Exception e) { e.printStackTrace(); Assert.fail(); } finally { IOUtils.closeQuietly(os); } // create second file try { os = new BufferedOutputStream(new FileOutputStream(sendInputDir + "file2.txt")); byte data[] = "Lorem ipsum dolor sit amet, consectetur adipisici elit, sed eiusmod tempor incidunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquid ex ea commodi consequat. Quis aute iure reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. Excepteur sint obcaecat cupiditat non proident, sunt in culpa qui officia deserunt mollit anim id est laborum." .getBytes(Charsets.UTF_8); for (int i = 0; i < 200; ++i) { os.write(data); } os.flush(); } catch (Exception e) { e.printStackTrace(); Assert.fail(); } finally { IOUtils.closeQuietly(os); } // create third file try { os = new BufferedOutputStream(new FileOutputStream(sendInputDir + "file3.dat")); ByteBuffer buffer = ByteBuffer.allocate(400); IntBuffer intBuffer = buffer.asIntBuffer(); Random random = new Random(); // for (int i = 0; i < 200; ++i) { for (int j = 0; j < 100; ++j) { intBuffer.put(random.nextInt()); } os.write(buffer.array()); buffer.position(0); intBuffer.position(0); // } os.flush(); } catch (Exception e) { e.printStackTrace(); Assert.fail(); } finally { IOUtils.closeQuietly(os); } }
From source file:com.metamx.druid.index.v1.IndexMerger.java
private static File makeIndexFiles(final List<IndexableAdapter> indexes, final File outDir, final ProgressIndicator progress, final List<String> mergedDimensions, final List<String> mergedMetrics, final Function<ArrayList<Iterable<Rowboat>>, Iterable<Rowboat>> rowMergerFn) throws IOException { Map<String, String> metricTypes = Maps.newTreeMap(Ordering.<String>natural().nullsFirst()); for (IndexableAdapter adapter : indexes) { for (String metric : adapter.getAvailableMetrics()) { metricTypes.put(metric, adapter.getMetricType(metric)); }/* www . jav a 2s .c om*/ } final Interval dataInterval; File v8OutDir = new File(outDir, "v8-tmp"); v8OutDir.mkdirs(); /************* Main index.drd file **************/ progress.progress(); long startTime = System.currentTimeMillis(); File indexFile = new File(v8OutDir, "index.drd"); FileOutputStream fileOutputStream = null; FileChannel channel = null; try { fileOutputStream = new FileOutputStream(indexFile); channel = fileOutputStream.getChannel(); channel.write(ByteBuffer.wrap(new byte[] { IndexIO.V8_VERSION })); GenericIndexed.fromIterable(mergedDimensions, GenericIndexed.stringStrategy).writeToChannel(channel); GenericIndexed.fromIterable(mergedMetrics, GenericIndexed.stringStrategy).writeToChannel(channel); DateTime minTime = new DateTime(Long.MAX_VALUE); DateTime maxTime = new DateTime(0l); for (IndexableAdapter index : indexes) { minTime = JodaUtils.minDateTime(minTime, index.getDataInterval().getStart()); maxTime = JodaUtils.maxDateTime(maxTime, index.getDataInterval().getEnd()); } dataInterval = new Interval(minTime, maxTime); serializerUtils.writeString(channel, String.format("%s/%s", minTime, maxTime)); } finally { Closeables.closeQuietly(channel); channel = null; Closeables.closeQuietly(fileOutputStream); fileOutputStream = null; } IndexIO.checkFileSize(indexFile); log.info("outDir[%s] completed index.drd in %,d millis.", v8OutDir, System.currentTimeMillis() - startTime); /************* Setup Dim Conversions **************/ progress.progress(); startTime = System.currentTimeMillis(); IOPeon ioPeon = new TmpFileIOPeon(); ArrayList<FileOutputSupplier> dimOuts = Lists.newArrayListWithCapacity(mergedDimensions.size()); Map<String, Integer> dimensionCardinalities = Maps.newHashMap(); ArrayList<Map<String, IntBuffer>> dimConversions = Lists.newArrayListWithCapacity(indexes.size()); for (IndexableAdapter index : indexes) { dimConversions.add(Maps.<String, IntBuffer>newHashMap()); } for (String dimension : mergedDimensions) { final GenericIndexedWriter<String> writer = new GenericIndexedWriter<String>(ioPeon, dimension, GenericIndexed.stringStrategy); writer.open(); List<Indexed<String>> dimValueLookups = Lists.newArrayListWithCapacity(indexes.size()); DimValueConverter[] converters = new DimValueConverter[indexes.size()]; for (int i = 0; i < indexes.size(); i++) { Indexed<String> dimValues = indexes.get(i).getDimValueLookup(dimension); if (dimValues != null) { dimValueLookups.add(dimValues); converters[i] = new DimValueConverter(dimValues); } } Iterable<String> dimensionValues = CombiningIterable.createSplatted( Iterables.transform(dimValueLookups, new Function<Indexed<String>, Iterable<String>>() { @Override public Iterable<String> apply(@Nullable Indexed<String> indexed) { return Iterables.transform(indexed, new Function<String, String>() { @Override public String apply(@Nullable String input) { return (input == null) ? "" : input; } }); } }), Ordering.<String>natural().nullsFirst()); int count = 0; for (String value : dimensionValues) { value = value == null ? "" : value; writer.write(value); for (int i = 0; i < indexes.size(); i++) { DimValueConverter converter = converters[i]; if (converter != null) { converter.convert(value, count); } } ++count; } dimensionCardinalities.put(dimension, count); FileOutputSupplier dimOut = new FileOutputSupplier(IndexIO.makeDimFile(v8OutDir, dimension), true); dimOuts.add(dimOut); writer.close(); serializerUtils.writeString(dimOut, dimension); ByteStreams.copy(writer.combineStreams(), dimOut); for (int i = 0; i < indexes.size(); ++i) { DimValueConverter converter = converters[i]; if (converter != null) { dimConversions.get(i).put(dimension, converters[i].getConversionBuffer()); } } ioPeon.cleanup(); } log.info("outDir[%s] completed dim conversions in %,d millis.", v8OutDir, System.currentTimeMillis() - startTime); /************* Walk through data sets and merge them *************/ progress.progress(); startTime = System.currentTimeMillis(); ArrayList<Iterable<Rowboat>> boats = Lists.newArrayListWithCapacity(indexes.size()); for (int i = 0; i < indexes.size(); ++i) { final IndexableAdapter adapter = indexes.get(i); final int[] dimLookup = new int[mergedDimensions.size()]; int count = 0; for (String dim : adapter.getAvailableDimensions()) { dimLookup[count] = mergedDimensions.indexOf(dim.toLowerCase()); count++; } final int[] metricLookup = new int[mergedMetrics.size()]; count = 0; for (String metric : adapter.getAvailableMetrics()) { metricLookup[count] = mergedMetrics.indexOf(metric); count++; } boats.add(new MMappedIndexRowIterable( Iterables.transform(indexes.get(i).getRows(), new Function<Rowboat, Rowboat>() { @Override public Rowboat apply(@Nullable Rowboat input) { int[][] newDims = new int[mergedDimensions.size()][]; int j = 0; for (int[] dim : input.getDims()) { newDims[dimLookup[j]] = dim; j++; } Object[] newMetrics = new Object[mergedMetrics.size()]; j = 0; for (Object met : input.getMetrics()) { newMetrics[metricLookup[j]] = met; j++; } return new Rowboat(input.getTimestamp(), newDims, newMetrics, input.getRowNum(), input.getDescriptions()); } }), mergedDimensions, dimConversions.get(i), i)); } Iterable<Rowboat> theRows = rowMergerFn.apply(boats); CompressedLongsSupplierSerializer timeWriter = CompressedLongsSupplierSerializer.create(ioPeon, "little_end_time", IndexIO.BYTE_ORDER); timeWriter.open(); ArrayList<VSizeIndexedWriter> forwardDimWriters = Lists.newArrayListWithCapacity(mergedDimensions.size()); for (String dimension : mergedDimensions) { VSizeIndexedWriter writer = new VSizeIndexedWriter(ioPeon, dimension, dimensionCardinalities.get(dimension)); writer.open(); forwardDimWriters.add(writer); } ArrayList<MetricColumnSerializer> metWriters = Lists.newArrayListWithCapacity(mergedMetrics.size()); for (Map.Entry<String, String> entry : metricTypes.entrySet()) { String metric = entry.getKey(); String typeName = entry.getValue(); if ("float".equals(typeName)) { metWriters.add(new FloatMetricColumnSerializer(metric, v8OutDir, ioPeon)); } else { ComplexMetricSerde serde = ComplexMetrics.getSerdeForType(typeName); if (serde == null) { throw new ISE("Unknown type[%s]", typeName); } metWriters.add(new ComplexMetricColumnSerializer(metric, v8OutDir, ioPeon, serde)); } } for (MetricColumnSerializer metWriter : metWriters) { metWriter.open(); } int rowCount = 0; long time = System.currentTimeMillis(); List<IntBuffer> rowNumConversions = Lists.newArrayListWithCapacity(indexes.size()); for (IndexableAdapter index : indexes) { int[] arr = new int[index.getNumRows()]; Arrays.fill(arr, INVALID_ROW); rowNumConversions.add(IntBuffer.wrap(arr)); } final Map<String, String> descriptions = Maps.newHashMap(); for (Rowboat theRow : theRows) { progress.progress(); timeWriter.add(theRow.getTimestamp()); final Object[] metrics = theRow.getMetrics(); for (int i = 0; i < metrics.length; ++i) { metWriters.get(i).serialize(metrics[i]); } int[][] dims = theRow.getDims(); for (int i = 0; i < dims.length; ++i) { List<Integer> listToWrite = (i >= dims.length || dims[i] == null) ? null : Ints.asList(dims[i]); forwardDimWriters.get(i).write(listToWrite); } for (Map.Entry<Integer, TreeSet<Integer>> comprisedRow : theRow.getComprisedRows().entrySet()) { final IntBuffer conversionBuffer = rowNumConversions.get(comprisedRow.getKey()); for (Integer rowNum : comprisedRow.getValue()) { while (conversionBuffer.position() < rowNum) { conversionBuffer.put(INVALID_ROW); } conversionBuffer.put(rowCount); } } if ((++rowCount % 500000) == 0) { log.info("outDir[%s] walked 500,000/%,d rows in %,d millis.", v8OutDir, rowCount, System.currentTimeMillis() - time); time = System.currentTimeMillis(); } descriptions.putAll(theRow.getDescriptions()); } for (IntBuffer rowNumConversion : rowNumConversions) { rowNumConversion.rewind(); } final File timeFile = IndexIO.makeTimeFile(v8OutDir, IndexIO.BYTE_ORDER); timeFile.delete(); OutputSupplier<FileOutputStream> out = Files.newOutputStreamSupplier(timeFile, true); timeWriter.closeAndConsolidate(out); IndexIO.checkFileSize(timeFile); for (int i = 0; i < mergedDimensions.size(); ++i) { forwardDimWriters.get(i).close(); ByteStreams.copy(forwardDimWriters.get(i).combineStreams(), dimOuts.get(i)); } for (MetricColumnSerializer metWriter : metWriters) { metWriter.close(); } ioPeon.cleanup(); log.info("outDir[%s] completed walk through of %,d rows in %,d millis.", v8OutDir, rowCount, System.currentTimeMillis() - startTime); /************ Create Inverted Indexes *************/ startTime = System.currentTimeMillis(); final File invertedFile = new File(v8OutDir, "inverted.drd"); Files.touch(invertedFile); out = Files.newOutputStreamSupplier(invertedFile, true); final File geoFile = new File(v8OutDir, "spatial.drd"); Files.touch(geoFile); OutputSupplier<FileOutputStream> spatialOut = Files.newOutputStreamSupplier(geoFile, true); for (int i = 0; i < mergedDimensions.size(); ++i) { long dimStartTime = System.currentTimeMillis(); String dimension = mergedDimensions.get(i); File dimOutFile = dimOuts.get(i).getFile(); final MappedByteBuffer dimValsMapped = Files.map(dimOutFile); if (!dimension.equals(serializerUtils.readString(dimValsMapped))) { throw new ISE("dimensions[%s] didn't equate!? This is a major WTF moment.", dimension); } Indexed<String> dimVals = GenericIndexed.read(dimValsMapped, GenericIndexed.stringStrategy); log.info("Starting dimension[%s] with cardinality[%,d]", dimension, dimVals.size()); GenericIndexedWriter<ImmutableConciseSet> writer = new GenericIndexedWriter<ImmutableConciseSet>(ioPeon, dimension, ConciseCompressedIndexedInts.objectStrategy); writer.open(); boolean isSpatialDim = "spatial".equals(descriptions.get(dimension)); ByteBufferWriter<ImmutableRTree> spatialWriter = null; RTree tree = null; IOPeon spatialIoPeon = new TmpFileIOPeon(); if (isSpatialDim) { spatialWriter = new ByteBufferWriter<ImmutableRTree>(spatialIoPeon, dimension, IndexedRTree.objectStrategy); spatialWriter.open(); tree = new RTree(2, new LinearGutmanSplitStrategy(0, 50)); } for (String dimVal : IndexedIterable.create(dimVals)) { progress.progress(); List<Iterable<Integer>> convertedInverteds = Lists.newArrayListWithCapacity(indexes.size()); for (int j = 0; j < indexes.size(); ++j) { convertedInverteds.add(new ConvertingIndexedInts(indexes.get(j).getInverteds(dimension, dimVal), rowNumConversions.get(j))); } ConciseSet bitset = new ConciseSet(); for (Integer row : CombiningIterable.createSplatted(convertedInverteds, Ordering.<Integer>natural().nullsFirst())) { if (row != INVALID_ROW) { bitset.add(row); } } writer.write(ImmutableConciseSet.newImmutableFromMutable(bitset)); if (isSpatialDim && dimVal != null) { List<String> stringCoords = Lists.newArrayList(SPLITTER.split(dimVal)); float[] coords = new float[stringCoords.size()]; for (int j = 0; j < coords.length; j++) { coords[j] = Float.valueOf(stringCoords.get(j)); } tree.insert(coords, bitset); } } writer.close(); serializerUtils.writeString(out, dimension); ByteStreams.copy(writer.combineStreams(), out); ioPeon.cleanup(); log.info("Completed dimension[%s] in %,d millis.", dimension, System.currentTimeMillis() - dimStartTime); if (isSpatialDim) { spatialWriter.write(ImmutableRTree.newImmutableFromMutable(tree)); spatialWriter.close(); serializerUtils.writeString(spatialOut, dimension); ByteStreams.copy(spatialWriter.combineStreams(), spatialOut); spatialIoPeon.cleanup(); } } log.info("outDir[%s] completed inverted.drd in %,d millis.", v8OutDir, System.currentTimeMillis() - startTime); final ArrayList<String> expectedFiles = Lists.newArrayList(Iterables.concat( Arrays.asList("index.drd", "inverted.drd", "spatial.drd", String.format("time_%s.drd", IndexIO.BYTE_ORDER)), Iterables.transform(mergedDimensions, GuavaUtils.formatFunction("dim_%s.drd")), Iterables.transform(mergedMetrics, GuavaUtils.formatFunction(String.format("met_%%s_%s.drd", IndexIO.BYTE_ORDER))))); Map<String, File> files = Maps.newLinkedHashMap(); for (String fileName : expectedFiles) { files.put(fileName, new File(v8OutDir, fileName)); } File smooshDir = new File(v8OutDir, "smoosher"); smooshDir.mkdir(); for (Map.Entry<String, File> entry : Smoosh.smoosh(v8OutDir, smooshDir, files).entrySet()) { entry.getValue().delete(); } for (File file : smooshDir.listFiles()) { Files.move(file, new File(v8OutDir, file.getName())); } if (!smooshDir.delete()) { log.info("Unable to delete temporary dir[%s], contains[%s]", smooshDir, Arrays.asList(smooshDir.listFiles())); throw new IOException(String.format("Unable to delete temporary dir[%s]", smooshDir)); } createIndexDrdFile(IndexIO.V8_VERSION, v8OutDir, GenericIndexed.fromIterable(mergedDimensions, GenericIndexed.stringStrategy), GenericIndexed.fromIterable(mergedMetrics, GenericIndexed.stringStrategy), dataInterval); IndexIO.DefaultIndexIOHandler.convertV8toV9(v8OutDir, outDir); FileUtils.deleteDirectory(v8OutDir); return outDir; }
From source file:org.goko.viewer.jogl.utils.render.internal.AbstractVboJoglRenderer.java
/** (inheritDoc) * @see org.goko.viewer.jogl.service.ICoreJoglRenderer#performDestroy(javax.media.opengl.GL3) *///from w w w .j a v a 2 s . c o m @Override public void performDestroy(GL3 gl) throws GkException { if (!isInitialized()) { return; } List<Integer> lstBuffers = new ArrayList<Integer>(); if (useVerticesBuffer) { lstBuffers.add(verticesBufferObject); } if (useColorsBuffer) { lstBuffers.add(colorsBufferObject); } if (useUvsBuffer) { lstBuffers.add(uvsBufferObject); } IntBuffer buffers = IntBuffer.allocate(lstBuffers.size()); for (Integer integer : lstBuffers) { buffers.put(integer); } buffers.rewind(); gl.glDeleteBuffers(lstBuffers.size(), buffers); IntBuffer intBuffer = IntBuffer.wrap(new int[] { vertexArrayObject }); gl.glDeleteVertexArrays(1, intBuffer); }
From source file:io.druid.segment.IndexMaker.java
private static int convertDims(final List<IndexableAdapter> adapters, final ProgressIndicator progress, final Iterable<Rowboat> theRows, final List<IntBuffer> rowNumConversions) throws IOException { final String section = "convert dims"; progress.startSection(section);// www. j a v a 2 s . c om for (IndexableAdapter index : adapters) { int[] arr = new int[index.getNumRows()]; Arrays.fill(arr, INVALID_ROW); rowNumConversions.add(IntBuffer.wrap(arr)); } int rowCount = 0; for (Rowboat theRow : theRows) { for (Map.Entry<Integer, TreeSet<Integer>> comprisedRow : theRow.getComprisedRows().entrySet()) { final IntBuffer conversionBuffer = rowNumConversions.get(comprisedRow.getKey()); for (Integer rowNum : comprisedRow.getValue()) { while (conversionBuffer.position() < rowNum) { conversionBuffer.put(INVALID_ROW); } conversionBuffer.put(rowCount); } } if ((++rowCount % 500000) == 0) { progress.progressSection(section, String.format("Walked 500,000/%,d rows", rowCount)); } } for (IntBuffer rowNumConversion : rowNumConversions) { rowNumConversion.rewind(); } progress.stopSection(section); return rowCount; }