List of usage examples for java.nio IntBuffer position
public final int position()
From source file:MainClass.java
public static void main(String[] args) { ByteBuffer bb = ByteBuffer.wrap(new byte[] { 0, 0, 0, 0, 0, 0, 0, 'a' }); bb.rewind();// w w w . jav a 2s .c o m IntBuffer ib = ((ByteBuffer) bb.rewind()).asIntBuffer(); System.out.println("Int Buffer"); while (ib.hasRemaining()) System.out.println(ib.position() + " -> " + ib.get()); }
From source file:Main.java
public static void main(String[] args) { ByteBuffer bb = ByteBuffer.wrap(new byte[] { 0, 0, 0, 0, 0, 0, 0, 'a' }); bb.rewind();// w ww .ja va 2 s .com System.out.println("Byte Buffer"); while (bb.hasRemaining()) System.out.println(bb.position() + " -> " + bb.get()); CharBuffer cb = ((ByteBuffer) bb.rewind()).asCharBuffer(); System.out.println("Char Buffer"); while (cb.hasRemaining()) System.out.println(cb.position() + " -> " + cb.get()); FloatBuffer fb = ((ByteBuffer) bb.rewind()).asFloatBuffer(); System.out.println("Float Buffer"); while (fb.hasRemaining()) System.out.println(fb.position() + " -> " + fb.get()); IntBuffer ib = ((ByteBuffer) bb.rewind()).asIntBuffer(); System.out.println("Int Buffer"); while (ib.hasRemaining()) System.out.println(ib.position() + " -> " + ib.get()); LongBuffer lb = ((ByteBuffer) bb.rewind()).asLongBuffer(); System.out.println("Long Buffer"); while (lb.hasRemaining()) System.out.println(lb.position() + " -> " + lb.get()); ShortBuffer sb = ((ByteBuffer) bb.rewind()).asShortBuffer(); System.out.println("Short Buffer"); while (sb.hasRemaining()) System.out.println(sb.position() + " -> " + sb.get()); DoubleBuffer db = ((ByteBuffer) bb.rewind()).asDoubleBuffer(); System.out.println("Double Buffer"); while (db.hasRemaining()) System.out.println(db.position() + " -> " + db.get()); }
From source file:com.metamx.druid.index.v1.IndexMerger.java
private static File makeIndexFiles(final List<IndexableAdapter> indexes, final File outDir, final ProgressIndicator progress, final List<String> mergedDimensions, final List<String> mergedMetrics, final Function<ArrayList<Iterable<Rowboat>>, Iterable<Rowboat>> rowMergerFn) throws IOException { Map<String, String> metricTypes = Maps.newTreeMap(Ordering.<String>natural().nullsFirst()); for (IndexableAdapter adapter : indexes) { for (String metric : adapter.getAvailableMetrics()) { metricTypes.put(metric, adapter.getMetricType(metric)); }/*from www. j a va 2s. c o m*/ } final Interval dataInterval; File v8OutDir = new File(outDir, "v8-tmp"); v8OutDir.mkdirs(); /************* Main index.drd file **************/ progress.progress(); long startTime = System.currentTimeMillis(); File indexFile = new File(v8OutDir, "index.drd"); FileOutputStream fileOutputStream = null; FileChannel channel = null; try { fileOutputStream = new FileOutputStream(indexFile); channel = fileOutputStream.getChannel(); channel.write(ByteBuffer.wrap(new byte[] { IndexIO.V8_VERSION })); GenericIndexed.fromIterable(mergedDimensions, GenericIndexed.stringStrategy).writeToChannel(channel); GenericIndexed.fromIterable(mergedMetrics, GenericIndexed.stringStrategy).writeToChannel(channel); DateTime minTime = new DateTime(Long.MAX_VALUE); DateTime maxTime = new DateTime(0l); for (IndexableAdapter index : indexes) { minTime = JodaUtils.minDateTime(minTime, index.getDataInterval().getStart()); maxTime = JodaUtils.maxDateTime(maxTime, index.getDataInterval().getEnd()); } dataInterval = new Interval(minTime, maxTime); serializerUtils.writeString(channel, String.format("%s/%s", minTime, maxTime)); } finally { Closeables.closeQuietly(channel); channel = null; Closeables.closeQuietly(fileOutputStream); fileOutputStream = null; } IndexIO.checkFileSize(indexFile); log.info("outDir[%s] completed index.drd in %,d millis.", v8OutDir, System.currentTimeMillis() - startTime); /************* Setup Dim Conversions **************/ progress.progress(); startTime = System.currentTimeMillis(); IOPeon ioPeon = new TmpFileIOPeon(); ArrayList<FileOutputSupplier> dimOuts = Lists.newArrayListWithCapacity(mergedDimensions.size()); Map<String, Integer> dimensionCardinalities = Maps.newHashMap(); ArrayList<Map<String, IntBuffer>> dimConversions = Lists.newArrayListWithCapacity(indexes.size()); for (IndexableAdapter index : indexes) { dimConversions.add(Maps.<String, IntBuffer>newHashMap()); } for (String dimension : mergedDimensions) { final GenericIndexedWriter<String> writer = new GenericIndexedWriter<String>(ioPeon, dimension, GenericIndexed.stringStrategy); writer.open(); List<Indexed<String>> dimValueLookups = Lists.newArrayListWithCapacity(indexes.size()); DimValueConverter[] converters = new DimValueConverter[indexes.size()]; for (int i = 0; i < indexes.size(); i++) { Indexed<String> dimValues = indexes.get(i).getDimValueLookup(dimension); if (dimValues != null) { dimValueLookups.add(dimValues); converters[i] = new DimValueConverter(dimValues); } } Iterable<String> dimensionValues = CombiningIterable.createSplatted( Iterables.transform(dimValueLookups, new Function<Indexed<String>, Iterable<String>>() { @Override public Iterable<String> apply(@Nullable Indexed<String> indexed) { return Iterables.transform(indexed, new Function<String, String>() { @Override public String apply(@Nullable String input) { return (input == null) ? "" : input; } }); } }), Ordering.<String>natural().nullsFirst()); int count = 0; for (String value : dimensionValues) { value = value == null ? "" : value; writer.write(value); for (int i = 0; i < indexes.size(); i++) { DimValueConverter converter = converters[i]; if (converter != null) { converter.convert(value, count); } } ++count; } dimensionCardinalities.put(dimension, count); FileOutputSupplier dimOut = new FileOutputSupplier(IndexIO.makeDimFile(v8OutDir, dimension), true); dimOuts.add(dimOut); writer.close(); serializerUtils.writeString(dimOut, dimension); ByteStreams.copy(writer.combineStreams(), dimOut); for (int i = 0; i < indexes.size(); ++i) { DimValueConverter converter = converters[i]; if (converter != null) { dimConversions.get(i).put(dimension, converters[i].getConversionBuffer()); } } ioPeon.cleanup(); } log.info("outDir[%s] completed dim conversions in %,d millis.", v8OutDir, System.currentTimeMillis() - startTime); /************* Walk through data sets and merge them *************/ progress.progress(); startTime = System.currentTimeMillis(); ArrayList<Iterable<Rowboat>> boats = Lists.newArrayListWithCapacity(indexes.size()); for (int i = 0; i < indexes.size(); ++i) { final IndexableAdapter adapter = indexes.get(i); final int[] dimLookup = new int[mergedDimensions.size()]; int count = 0; for (String dim : adapter.getAvailableDimensions()) { dimLookup[count] = mergedDimensions.indexOf(dim.toLowerCase()); count++; } final int[] metricLookup = new int[mergedMetrics.size()]; count = 0; for (String metric : adapter.getAvailableMetrics()) { metricLookup[count] = mergedMetrics.indexOf(metric); count++; } boats.add(new MMappedIndexRowIterable( Iterables.transform(indexes.get(i).getRows(), new Function<Rowboat, Rowboat>() { @Override public Rowboat apply(@Nullable Rowboat input) { int[][] newDims = new int[mergedDimensions.size()][]; int j = 0; for (int[] dim : input.getDims()) { newDims[dimLookup[j]] = dim; j++; } Object[] newMetrics = new Object[mergedMetrics.size()]; j = 0; for (Object met : input.getMetrics()) { newMetrics[metricLookup[j]] = met; j++; } return new Rowboat(input.getTimestamp(), newDims, newMetrics, input.getRowNum(), input.getDescriptions()); } }), mergedDimensions, dimConversions.get(i), i)); } Iterable<Rowboat> theRows = rowMergerFn.apply(boats); CompressedLongsSupplierSerializer timeWriter = CompressedLongsSupplierSerializer.create(ioPeon, "little_end_time", IndexIO.BYTE_ORDER); timeWriter.open(); ArrayList<VSizeIndexedWriter> forwardDimWriters = Lists.newArrayListWithCapacity(mergedDimensions.size()); for (String dimension : mergedDimensions) { VSizeIndexedWriter writer = new VSizeIndexedWriter(ioPeon, dimension, dimensionCardinalities.get(dimension)); writer.open(); forwardDimWriters.add(writer); } ArrayList<MetricColumnSerializer> metWriters = Lists.newArrayListWithCapacity(mergedMetrics.size()); for (Map.Entry<String, String> entry : metricTypes.entrySet()) { String metric = entry.getKey(); String typeName = entry.getValue(); if ("float".equals(typeName)) { metWriters.add(new FloatMetricColumnSerializer(metric, v8OutDir, ioPeon)); } else { ComplexMetricSerde serde = ComplexMetrics.getSerdeForType(typeName); if (serde == null) { throw new ISE("Unknown type[%s]", typeName); } metWriters.add(new ComplexMetricColumnSerializer(metric, v8OutDir, ioPeon, serde)); } } for (MetricColumnSerializer metWriter : metWriters) { metWriter.open(); } int rowCount = 0; long time = System.currentTimeMillis(); List<IntBuffer> rowNumConversions = Lists.newArrayListWithCapacity(indexes.size()); for (IndexableAdapter index : indexes) { int[] arr = new int[index.getNumRows()]; Arrays.fill(arr, INVALID_ROW); rowNumConversions.add(IntBuffer.wrap(arr)); } final Map<String, String> descriptions = Maps.newHashMap(); for (Rowboat theRow : theRows) { progress.progress(); timeWriter.add(theRow.getTimestamp()); final Object[] metrics = theRow.getMetrics(); for (int i = 0; i < metrics.length; ++i) { metWriters.get(i).serialize(metrics[i]); } int[][] dims = theRow.getDims(); for (int i = 0; i < dims.length; ++i) { List<Integer> listToWrite = (i >= dims.length || dims[i] == null) ? null : Ints.asList(dims[i]); forwardDimWriters.get(i).write(listToWrite); } for (Map.Entry<Integer, TreeSet<Integer>> comprisedRow : theRow.getComprisedRows().entrySet()) { final IntBuffer conversionBuffer = rowNumConversions.get(comprisedRow.getKey()); for (Integer rowNum : comprisedRow.getValue()) { while (conversionBuffer.position() < rowNum) { conversionBuffer.put(INVALID_ROW); } conversionBuffer.put(rowCount); } } if ((++rowCount % 500000) == 0) { log.info("outDir[%s] walked 500,000/%,d rows in %,d millis.", v8OutDir, rowCount, System.currentTimeMillis() - time); time = System.currentTimeMillis(); } descriptions.putAll(theRow.getDescriptions()); } for (IntBuffer rowNumConversion : rowNumConversions) { rowNumConversion.rewind(); } final File timeFile = IndexIO.makeTimeFile(v8OutDir, IndexIO.BYTE_ORDER); timeFile.delete(); OutputSupplier<FileOutputStream> out = Files.newOutputStreamSupplier(timeFile, true); timeWriter.closeAndConsolidate(out); IndexIO.checkFileSize(timeFile); for (int i = 0; i < mergedDimensions.size(); ++i) { forwardDimWriters.get(i).close(); ByteStreams.copy(forwardDimWriters.get(i).combineStreams(), dimOuts.get(i)); } for (MetricColumnSerializer metWriter : metWriters) { metWriter.close(); } ioPeon.cleanup(); log.info("outDir[%s] completed walk through of %,d rows in %,d millis.", v8OutDir, rowCount, System.currentTimeMillis() - startTime); /************ Create Inverted Indexes *************/ startTime = System.currentTimeMillis(); final File invertedFile = new File(v8OutDir, "inverted.drd"); Files.touch(invertedFile); out = Files.newOutputStreamSupplier(invertedFile, true); final File geoFile = new File(v8OutDir, "spatial.drd"); Files.touch(geoFile); OutputSupplier<FileOutputStream> spatialOut = Files.newOutputStreamSupplier(geoFile, true); for (int i = 0; i < mergedDimensions.size(); ++i) { long dimStartTime = System.currentTimeMillis(); String dimension = mergedDimensions.get(i); File dimOutFile = dimOuts.get(i).getFile(); final MappedByteBuffer dimValsMapped = Files.map(dimOutFile); if (!dimension.equals(serializerUtils.readString(dimValsMapped))) { throw new ISE("dimensions[%s] didn't equate!? This is a major WTF moment.", dimension); } Indexed<String> dimVals = GenericIndexed.read(dimValsMapped, GenericIndexed.stringStrategy); log.info("Starting dimension[%s] with cardinality[%,d]", dimension, dimVals.size()); GenericIndexedWriter<ImmutableConciseSet> writer = new GenericIndexedWriter<ImmutableConciseSet>(ioPeon, dimension, ConciseCompressedIndexedInts.objectStrategy); writer.open(); boolean isSpatialDim = "spatial".equals(descriptions.get(dimension)); ByteBufferWriter<ImmutableRTree> spatialWriter = null; RTree tree = null; IOPeon spatialIoPeon = new TmpFileIOPeon(); if (isSpatialDim) { spatialWriter = new ByteBufferWriter<ImmutableRTree>(spatialIoPeon, dimension, IndexedRTree.objectStrategy); spatialWriter.open(); tree = new RTree(2, new LinearGutmanSplitStrategy(0, 50)); } for (String dimVal : IndexedIterable.create(dimVals)) { progress.progress(); List<Iterable<Integer>> convertedInverteds = Lists.newArrayListWithCapacity(indexes.size()); for (int j = 0; j < indexes.size(); ++j) { convertedInverteds.add(new ConvertingIndexedInts(indexes.get(j).getInverteds(dimension, dimVal), rowNumConversions.get(j))); } ConciseSet bitset = new ConciseSet(); for (Integer row : CombiningIterable.createSplatted(convertedInverteds, Ordering.<Integer>natural().nullsFirst())) { if (row != INVALID_ROW) { bitset.add(row); } } writer.write(ImmutableConciseSet.newImmutableFromMutable(bitset)); if (isSpatialDim && dimVal != null) { List<String> stringCoords = Lists.newArrayList(SPLITTER.split(dimVal)); float[] coords = new float[stringCoords.size()]; for (int j = 0; j < coords.length; j++) { coords[j] = Float.valueOf(stringCoords.get(j)); } tree.insert(coords, bitset); } } writer.close(); serializerUtils.writeString(out, dimension); ByteStreams.copy(writer.combineStreams(), out); ioPeon.cleanup(); log.info("Completed dimension[%s] in %,d millis.", dimension, System.currentTimeMillis() - dimStartTime); if (isSpatialDim) { spatialWriter.write(ImmutableRTree.newImmutableFromMutable(tree)); spatialWriter.close(); serializerUtils.writeString(spatialOut, dimension); ByteStreams.copy(spatialWriter.combineStreams(), spatialOut); spatialIoPeon.cleanup(); } } log.info("outDir[%s] completed inverted.drd in %,d millis.", v8OutDir, System.currentTimeMillis() - startTime); final ArrayList<String> expectedFiles = Lists.newArrayList(Iterables.concat( Arrays.asList("index.drd", "inverted.drd", "spatial.drd", String.format("time_%s.drd", IndexIO.BYTE_ORDER)), Iterables.transform(mergedDimensions, GuavaUtils.formatFunction("dim_%s.drd")), Iterables.transform(mergedMetrics, GuavaUtils.formatFunction(String.format("met_%%s_%s.drd", IndexIO.BYTE_ORDER))))); Map<String, File> files = Maps.newLinkedHashMap(); for (String fileName : expectedFiles) { files.put(fileName, new File(v8OutDir, fileName)); } File smooshDir = new File(v8OutDir, "smoosher"); smooshDir.mkdir(); for (Map.Entry<String, File> entry : Smoosh.smoosh(v8OutDir, smooshDir, files).entrySet()) { entry.getValue().delete(); } for (File file : smooshDir.listFiles()) { Files.move(file, new File(v8OutDir, file.getName())); } if (!smooshDir.delete()) { log.info("Unable to delete temporary dir[%s], contains[%s]", smooshDir, Arrays.asList(smooshDir.listFiles())); throw new IOException(String.format("Unable to delete temporary dir[%s]", smooshDir)); } createIndexDrdFile(IndexIO.V8_VERSION, v8OutDir, GenericIndexed.fromIterable(mergedDimensions, GenericIndexed.stringStrategy), GenericIndexed.fromIterable(mergedMetrics, GenericIndexed.stringStrategy), dataInterval); IndexIO.DefaultIndexIOHandler.convertV8toV9(v8OutDir, outDir); FileUtils.deleteDirectory(v8OutDir); return outDir; }
From source file:io.druid.segment.IndexMaker.java
private static int convertDims(final List<IndexableAdapter> adapters, final ProgressIndicator progress, final Iterable<Rowboat> theRows, final List<IntBuffer> rowNumConversions) throws IOException { final String section = "convert dims"; progress.startSection(section);/*from w w w .ja v a2 s. co m*/ for (IndexableAdapter index : adapters) { int[] arr = new int[index.getNumRows()]; Arrays.fill(arr, INVALID_ROW); rowNumConversions.add(IntBuffer.wrap(arr)); } int rowCount = 0; for (Rowboat theRow : theRows) { for (Map.Entry<Integer, TreeSet<Integer>> comprisedRow : theRow.getComprisedRows().entrySet()) { final IntBuffer conversionBuffer = rowNumConversions.get(comprisedRow.getKey()); for (Integer rowNum : comprisedRow.getValue()) { while (conversionBuffer.position() < rowNum) { conversionBuffer.put(INVALID_ROW); } conversionBuffer.put(rowCount); } } if ((++rowCount % 500000) == 0) { progress.progressSection(section, String.format("Walked 500,000/%,d rows", rowCount)); } } for (IntBuffer rowNumConversion : rowNumConversions) { rowNumConversion.rewind(); } progress.stopSection(section); return rowCount; }
From source file:io.druid.segment.IndexMerger.java
private static File makeIndexFiles(final List<IndexableAdapter> indexes, final File outDir, final ProgressIndicator progress, final List<String> mergedDimensions, final List<String> mergedMetrics, final Map<String, Object> segmentMetadata, final Function<ArrayList<Iterable<Rowboat>>, Iterable<Rowboat>> rowMergerFn, final IndexSpec indexSpec) throws IOException { final Map<String, ValueType> valueTypes = Maps.newTreeMap(Ordering.<String>natural().nullsFirst()); final Map<String, String> metricTypeNames = Maps.newTreeMap(Ordering.<String>natural().nullsFirst()); final Map<String, ColumnCapabilitiesImpl> columnCapabilities = Maps.newHashMap(); for (IndexableAdapter adapter : indexes) { for (String dimension : adapter.getDimensionNames()) { ColumnCapabilitiesImpl mergedCapabilities = columnCapabilities.get(dimension); ColumnCapabilities capabilities = adapter.getCapabilities(dimension); if (mergedCapabilities == null) { mergedCapabilities = new ColumnCapabilitiesImpl(); mergedCapabilities.setType(ValueType.STRING); }/*from w w w .j a v a 2 s . c o m*/ columnCapabilities.put(dimension, mergedCapabilities.merge(capabilities)); } for (String metric : adapter.getMetricNames()) { ColumnCapabilitiesImpl mergedCapabilities = columnCapabilities.get(metric); ColumnCapabilities capabilities = adapter.getCapabilities(metric); if (mergedCapabilities == null) { mergedCapabilities = new ColumnCapabilitiesImpl(); } columnCapabilities.put(metric, mergedCapabilities.merge(capabilities)); valueTypes.put(metric, capabilities.getType()); metricTypeNames.put(metric, adapter.getMetricType(metric)); } } final Interval dataInterval; File v8OutDir = new File(outDir, "v8-tmp"); v8OutDir.mkdirs(); /************* Main index.drd file **************/ progress.progress(); long startTime = System.currentTimeMillis(); File indexFile = new File(v8OutDir, "index.drd"); try (FileOutputStream fileOutputStream = new FileOutputStream(indexFile); FileChannel channel = fileOutputStream.getChannel()) { channel.write(ByteBuffer.wrap(new byte[] { IndexIO.V8_VERSION })); GenericIndexed.fromIterable(mergedDimensions, GenericIndexed.STRING_STRATEGY).writeToChannel(channel); GenericIndexed.fromIterable(mergedMetrics, GenericIndexed.STRING_STRATEGY).writeToChannel(channel); DateTime minTime = new DateTime(JodaUtils.MAX_INSTANT); DateTime maxTime = new DateTime(JodaUtils.MIN_INSTANT); for (IndexableAdapter index : indexes) { minTime = JodaUtils.minDateTime(minTime, index.getDataInterval().getStart()); maxTime = JodaUtils.maxDateTime(maxTime, index.getDataInterval().getEnd()); } dataInterval = new Interval(minTime, maxTime); serializerUtils.writeString(channel, String.format("%s/%s", minTime, maxTime)); serializerUtils.writeString(channel, mapper.writeValueAsString(indexSpec.getBitmapSerdeFactory())); } IndexIO.checkFileSize(indexFile); log.info("outDir[%s] completed index.drd in %,d millis.", v8OutDir, System.currentTimeMillis() - startTime); /************* Setup Dim Conversions **************/ progress.progress(); startTime = System.currentTimeMillis(); IOPeon ioPeon = new TmpFileIOPeon(); ArrayList<FileOutputSupplier> dimOuts = Lists.newArrayListWithCapacity(mergedDimensions.size()); Map<String, Integer> dimensionCardinalities = Maps.newHashMap(); ArrayList<Map<String, IntBuffer>> dimConversions = Lists.newArrayListWithCapacity(indexes.size()); for (IndexableAdapter index : indexes) { dimConversions.add(Maps.<String, IntBuffer>newHashMap()); } for (String dimension : mergedDimensions) { final GenericIndexedWriter<String> writer = new GenericIndexedWriter<String>(ioPeon, dimension, GenericIndexed.STRING_STRATEGY); writer.open(); List<Indexed<String>> dimValueLookups = Lists.newArrayListWithCapacity(indexes.size()); DimValueConverter[] converters = new DimValueConverter[indexes.size()]; for (int i = 0; i < indexes.size(); i++) { Indexed<String> dimValues = indexes.get(i).getDimValueLookup(dimension); if (!isNullColumn(dimValues)) { dimValueLookups.add(dimValues); converters[i] = new DimValueConverter(dimValues); } } Iterable<String> dimensionValues = CombiningIterable.createSplatted( Iterables.transform(dimValueLookups, new Function<Indexed<String>, Iterable<String>>() { @Override public Iterable<String> apply(@Nullable Indexed<String> indexed) { return Iterables.transform(indexed, new Function<String, String>() { @Override public String apply(@Nullable String input) { return (input == null) ? "" : input; } }); } }), Ordering.<String>natural().nullsFirst()); int count = 0; for (String value : dimensionValues) { value = value == null ? "" : value; writer.write(value); for (int i = 0; i < indexes.size(); i++) { DimValueConverter converter = converters[i]; if (converter != null) { converter.convert(value, count); } } ++count; } dimensionCardinalities.put(dimension, count); FileOutputSupplier dimOut = new FileOutputSupplier(IndexIO.makeDimFile(v8OutDir, dimension), true); dimOuts.add(dimOut); writer.close(); serializerUtils.writeString(dimOut, dimension); ByteStreams.copy(writer.combineStreams(), dimOut); for (int i = 0; i < indexes.size(); ++i) { DimValueConverter converter = converters[i]; if (converter != null) { dimConversions.get(i).put(dimension, converters[i].getConversionBuffer()); } } ioPeon.cleanup(); } log.info("outDir[%s] completed dim conversions in %,d millis.", v8OutDir, System.currentTimeMillis() - startTime); /************* Walk through data sets and merge them *************/ progress.progress(); startTime = System.currentTimeMillis(); ArrayList<Iterable<Rowboat>> boats = Lists.newArrayListWithCapacity(indexes.size()); for (int i = 0; i < indexes.size(); ++i) { final IndexableAdapter adapter = indexes.get(i); final int[] dimLookup = new int[mergedDimensions.size()]; int count = 0; for (String dim : adapter.getDimensionNames()) { dimLookup[count] = mergedDimensions.indexOf(dim); count++; } final int[] metricLookup = new int[mergedMetrics.size()]; count = 0; for (String metric : adapter.getMetricNames()) { metricLookup[count] = mergedMetrics.indexOf(metric); count++; } boats.add(new MMappedIndexRowIterable( Iterables.transform(indexes.get(i).getRows(), new Function<Rowboat, Rowboat>() { @Override public Rowboat apply(@Nullable Rowboat input) { int[][] newDims = new int[mergedDimensions.size()][]; int j = 0; for (int[] dim : input.getDims()) { newDims[dimLookup[j]] = dim; j++; } Object[] newMetrics = new Object[mergedMetrics.size()]; j = 0; for (Object met : input.getMetrics()) { newMetrics[metricLookup[j]] = met; j++; } return new Rowboat(input.getTimestamp(), newDims, newMetrics, input.getRowNum()); } }), mergedDimensions, dimConversions.get(i), i)); } Iterable<Rowboat> theRows = rowMergerFn.apply(boats); CompressedLongsSupplierSerializer timeWriter = CompressedLongsSupplierSerializer.create(ioPeon, "little_end_time", IndexIO.BYTE_ORDER, CompressedObjectStrategy.DEFAULT_COMPRESSION_STRATEGY); timeWriter.open(); ArrayList<VSizeIndexedWriter> forwardDimWriters = Lists.newArrayListWithCapacity(mergedDimensions.size()); for (String dimension : mergedDimensions) { VSizeIndexedWriter writer = new VSizeIndexedWriter(ioPeon, dimension, dimensionCardinalities.get(dimension)); writer.open(); forwardDimWriters.add(writer); } ArrayList<MetricColumnSerializer> metWriters = Lists.newArrayListWithCapacity(mergedMetrics.size()); for (String metric : mergedMetrics) { ValueType type = valueTypes.get(metric); switch (type) { case LONG: metWriters.add(new LongMetricColumnSerializer(metric, v8OutDir, ioPeon)); break; case FLOAT: metWriters.add(new FloatMetricColumnSerializer(metric, v8OutDir, ioPeon)); break; case COMPLEX: final String typeName = metricTypeNames.get(metric); ComplexMetricSerde serde = ComplexMetrics.getSerdeForType(typeName); if (serde == null) { throw new ISE("Unknown type[%s]", typeName); } metWriters.add(new ComplexMetricColumnSerializer(metric, v8OutDir, ioPeon, serde)); break; default: throw new ISE("Unknown type[%s]", type); } } for (MetricColumnSerializer metWriter : metWriters) { metWriter.open(); } int rowCount = 0; long time = System.currentTimeMillis(); List<IntBuffer> rowNumConversions = Lists.newArrayListWithCapacity(indexes.size()); for (IndexableAdapter index : indexes) { int[] arr = new int[index.getNumRows()]; Arrays.fill(arr, INVALID_ROW); rowNumConversions.add(IntBuffer.wrap(arr)); } for (Rowboat theRow : theRows) { progress.progress(); timeWriter.add(theRow.getTimestamp()); final Object[] metrics = theRow.getMetrics(); for (int i = 0; i < metrics.length; ++i) { metWriters.get(i).serialize(metrics[i]); } int[][] dims = theRow.getDims(); for (int i = 0; i < dims.length; ++i) { List<Integer> listToWrite = (i >= dims.length || dims[i] == null) ? null : Ints.asList(dims[i]); forwardDimWriters.get(i).write(listToWrite); } for (Map.Entry<Integer, TreeSet<Integer>> comprisedRow : theRow.getComprisedRows().entrySet()) { final IntBuffer conversionBuffer = rowNumConversions.get(comprisedRow.getKey()); for (Integer rowNum : comprisedRow.getValue()) { while (conversionBuffer.position() < rowNum) { conversionBuffer.put(INVALID_ROW); } conversionBuffer.put(rowCount); } } if ((++rowCount % 500000) == 0) { log.info("outDir[%s] walked 500,000/%,d rows in %,d millis.", v8OutDir, rowCount, System.currentTimeMillis() - time); time = System.currentTimeMillis(); } } for (IntBuffer rowNumConversion : rowNumConversions) { rowNumConversion.rewind(); } final File timeFile = IndexIO.makeTimeFile(v8OutDir, IndexIO.BYTE_ORDER); timeFile.delete(); OutputSupplier<FileOutputStream> out = Files.newOutputStreamSupplier(timeFile, true); timeWriter.closeAndConsolidate(out); IndexIO.checkFileSize(timeFile); for (int i = 0; i < mergedDimensions.size(); ++i) { forwardDimWriters.get(i).close(); ByteStreams.copy(forwardDimWriters.get(i).combineStreams(), dimOuts.get(i)); } for (MetricColumnSerializer metWriter : metWriters) { metWriter.close(); } ioPeon.cleanup(); log.info("outDir[%s] completed walk through of %,d rows in %,d millis.", v8OutDir, rowCount, System.currentTimeMillis() - startTime); /************ Create Inverted Indexes *************/ startTime = System.currentTimeMillis(); final File invertedFile = new File(v8OutDir, "inverted.drd"); Files.touch(invertedFile); out = Files.newOutputStreamSupplier(invertedFile, true); final File geoFile = new File(v8OutDir, "spatial.drd"); Files.touch(geoFile); OutputSupplier<FileOutputStream> spatialOut = Files.newOutputStreamSupplier(geoFile, true); for (int i = 0; i < mergedDimensions.size(); ++i) { long dimStartTime = System.currentTimeMillis(); String dimension = mergedDimensions.get(i); File dimOutFile = dimOuts.get(i).getFile(); final MappedByteBuffer dimValsMapped = Files.map(dimOutFile); if (!dimension.equals(serializerUtils.readString(dimValsMapped))) { throw new ISE("dimensions[%s] didn't equate!? This is a major WTF moment.", dimension); } Indexed<String> dimVals = GenericIndexed.read(dimValsMapped, GenericIndexed.STRING_STRATEGY); log.info("Starting dimension[%s] with cardinality[%,d]", dimension, dimVals.size()); final BitmapSerdeFactory bitmapSerdeFactory = indexSpec.getBitmapSerdeFactory(); GenericIndexedWriter<ImmutableBitmap> writer = new GenericIndexedWriter<>(ioPeon, dimension, bitmapSerdeFactory.getObjectStrategy()); writer.open(); boolean isSpatialDim = columnCapabilities.get(dimension).hasSpatialIndexes(); ByteBufferWriter<ImmutableRTree> spatialWriter = null; RTree tree = null; IOPeon spatialIoPeon = new TmpFileIOPeon(); if (isSpatialDim) { BitmapFactory bitmapFactory = bitmapSerdeFactory.getBitmapFactory(); spatialWriter = new ByteBufferWriter<ImmutableRTree>(spatialIoPeon, dimension, new IndexedRTree.ImmutableRTreeObjectStrategy(bitmapFactory)); spatialWriter.open(); tree = new RTree(2, new LinearGutmanSplitStrategy(0, 50, bitmapFactory), bitmapFactory); } for (String dimVal : IndexedIterable.create(dimVals)) { progress.progress(); List<Iterable<Integer>> convertedInverteds = Lists.newArrayListWithCapacity(indexes.size()); for (int j = 0; j < indexes.size(); ++j) { convertedInverteds.add(new ConvertingIndexedInts( indexes.get(j).getBitmapIndex(dimension, dimVal), rowNumConversions.get(j))); } MutableBitmap bitset = bitmapSerdeFactory.getBitmapFactory().makeEmptyMutableBitmap(); for (Integer row : CombiningIterable.createSplatted(convertedInverteds, Ordering.<Integer>natural().nullsFirst())) { if (row != INVALID_ROW) { bitset.add(row); } } writer.write(bitmapSerdeFactory.getBitmapFactory().makeImmutableBitmap(bitset)); if (isSpatialDim && dimVal != null) { List<String> stringCoords = Lists.newArrayList(SPLITTER.split(dimVal)); float[] coords = new float[stringCoords.size()]; for (int j = 0; j < coords.length; j++) { coords[j] = Float.valueOf(stringCoords.get(j)); } tree.insert(coords, bitset); } } writer.close(); serializerUtils.writeString(out, dimension); ByteStreams.copy(writer.combineStreams(), out); ioPeon.cleanup(); log.info("Completed dimension[%s] in %,d millis.", dimension, System.currentTimeMillis() - dimStartTime); if (isSpatialDim) { spatialWriter.write(ImmutableRTree.newImmutableFromMutable(tree)); spatialWriter.close(); serializerUtils.writeString(spatialOut, dimension); ByteStreams.copy(spatialWriter.combineStreams(), spatialOut); spatialIoPeon.cleanup(); } } log.info("outDir[%s] completed inverted.drd in %,d millis.", v8OutDir, System.currentTimeMillis() - startTime); final ArrayList<String> expectedFiles = Lists.newArrayList(Iterables.concat( Arrays.asList("index.drd", "inverted.drd", "spatial.drd", String.format("time_%s.drd", IndexIO.BYTE_ORDER)), Iterables.transform(mergedDimensions, GuavaUtils.formatFunction("dim_%s.drd")), Iterables.transform(mergedMetrics, GuavaUtils.formatFunction(String.format("met_%%s_%s.drd", IndexIO.BYTE_ORDER))))); if (segmentMetadata != null && !segmentMetadata.isEmpty()) { writeMetadataToFile(new File(v8OutDir, "metadata.drd"), segmentMetadata); log.info("wrote metadata.drd in outDir[%s].", v8OutDir); expectedFiles.add("metadata.drd"); } Map<String, File> files = Maps.newLinkedHashMap(); for (String fileName : expectedFiles) { files.put(fileName, new File(v8OutDir, fileName)); } File smooshDir = new File(v8OutDir, "smoosher"); smooshDir.mkdir(); for (Map.Entry<String, File> entry : Smoosh.smoosh(v8OutDir, smooshDir, files).entrySet()) { entry.getValue().delete(); } for (File file : smooshDir.listFiles()) { Files.move(file, new File(v8OutDir, file.getName())); } if (!smooshDir.delete()) { log.info("Unable to delete temporary dir[%s], contains[%s]", smooshDir, Arrays.asList(smooshDir.listFiles())); throw new IOException(String.format("Unable to delete temporary dir[%s]", smooshDir)); } createIndexDrdFile(IndexIO.V8_VERSION, v8OutDir, GenericIndexed.fromIterable(mergedDimensions, GenericIndexed.STRING_STRATEGY), GenericIndexed.fromIterable(mergedMetrics, GenericIndexed.STRING_STRATEGY), dataInterval, indexSpec.getBitmapSerdeFactory()); IndexIO.DefaultIndexIOHandler.convertV8toV9(v8OutDir, outDir, indexSpec); FileUtils.deleteDirectory(v8OutDir); return outDir; }
From source file:io.druid.segment.IndexMergerV9.java
private void mergeIndexesAndWriteColumns(final List<IndexableAdapter> adapters, final ProgressIndicator progress, final Iterable<Rowboat> theRows, final LongColumnSerializer timeWriter, final ArrayList<GenericColumnSerializer> metWriters, final List<IntBuffer> rowNumConversions, final List<DimensionMerger> mergers) throws IOException { final String section = "walk through and merge rows"; progress.startSection(section);//from ww w. ja v a 2 s . com long startTime = System.currentTimeMillis(); int rowCount = 0; for (IndexableAdapter adapter : adapters) { int[] arr = new int[adapter.getNumRows()]; Arrays.fill(arr, INVALID_ROW); rowNumConversions.add(IntBuffer.wrap(arr)); } long time = System.currentTimeMillis(); for (Rowboat theRow : theRows) { progress.progress(); timeWriter.serialize(theRow.getTimestamp()); final Object[] metrics = theRow.getMetrics(); for (int i = 0; i < metrics.length; ++i) { metWriters.get(i).serialize(metrics[i]); } Object[] dims = theRow.getDims(); for (int i = 0; i < dims.length; ++i) { DimensionMerger merger = mergers.get(i); if (merger.canSkip()) { continue; } merger.processMergedRow(dims[i]); } for (Map.Entry<Integer, TreeSet<Integer>> comprisedRow : theRow.getComprisedRows().entrySet()) { final IntBuffer conversionBuffer = rowNumConversions.get(comprisedRow.getKey()); for (Integer rowNum : comprisedRow.getValue()) { while (conversionBuffer.position() < rowNum) { conversionBuffer.put(INVALID_ROW); } conversionBuffer.put(rowCount); } } if ((++rowCount % 500000) == 0) { log.info("walked 500,000/%d rows in %,d millis.", rowCount, System.currentTimeMillis() - time); time = System.currentTimeMillis(); } } for (IntBuffer rowNumConversion : rowNumConversions) { rowNumConversion.rewind(); } log.info("completed walk through of %,d rows in %,d millis.", rowCount, System.currentTimeMillis() - startTime); progress.stopSection(section); }
From source file:com.creativeongreen.imageeffects.MainActivity.java
public static Bitmap colorDodgeBlend(Bitmap source, Bitmap layer) { Bitmap base = source.copy(Config.ARGB_8888, true); Bitmap blend = layer.copy(Config.ARGB_8888, false); IntBuffer buffBase = IntBuffer.allocate(base.getWidth() * base.getHeight()); base.copyPixelsToBuffer(buffBase);/*w w w . j a va 2 s . c om*/ buffBase.rewind(); IntBuffer buffBlend = IntBuffer.allocate(blend.getWidth() * blend.getHeight()); blend.copyPixelsToBuffer(buffBlend); buffBlend.rewind(); IntBuffer buffOut = IntBuffer.allocate(base.getWidth() * base.getHeight()); buffOut.rewind(); while (buffOut.position() < buffOut.limit()) { int filterInt = buffBlend.get(); int srcInt = buffBase.get(); int redValueFilter = Color.red(filterInt); int greenValueFilter = Color.green(filterInt); int blueValueFilter = Color.blue(filterInt); int redValueSrc = Color.red(srcInt); int greenValueSrc = Color.green(srcInt); int blueValueSrc = Color.blue(srcInt); int redValueFinal = colordodge(redValueFilter, redValueSrc); int greenValueFinal = colordodge(greenValueFilter, greenValueSrc); int blueValueFinal = colordodge(blueValueFilter, blueValueSrc); int pixel = Color.argb(255, redValueFinal, greenValueFinal, blueValueFinal); /* * float[] hsv = new float[3]; Color.colorToHSV(pixel, hsv); hsv[1] = 0.0f; float top = * VALUE_TOP; // Setting this as 0.95f gave the best result so far if (hsv[2] <= top) { * hsv[2] = 0.0f; } else { hsv[2] = 1.0f; } pixel = Color.HSVToColor(hsv); */ buffOut.put(pixel); } buffOut.rewind(); base.copyPixelsFromBuffer(buffOut); blend.recycle(); return base; }
From source file:com.creativeongreen.imageeffects.MainActivity.java
public static Bitmap getCartoonizedBitmap(Bitmap realBitmap, Bitmap dodgeBlendBitmap, int hueIntervalSize, int saturationIntervalSize, int valueIntervalSize, int saturationPercent, int valuePercent) { // Bitmap bitmap = Bitmap.createBitmap(scaledBitmap); // //fastblur(scaledBitmap, 4); Bitmap base = fastblur(realBitmap, 3).copy(Config.ARGB_8888, true); Bitmap dodge = dodgeBlendBitmap.copy(Config.ARGB_8888, false); try {/*from w w w . j a v a2 s. com*/ int realColor; int color; float top = 0.87f;// VALUE_TOP; // Between 0.0f .. 1.0f I use 0.87f IntBuffer templatePixels = IntBuffer.allocate(dodge.getWidth() * dodge.getHeight()); IntBuffer scaledPixels = IntBuffer.allocate(base.getWidth() * base.getHeight()); IntBuffer buffOut = IntBuffer.allocate(base.getWidth() * base.getHeight()); base.copyPixelsToBuffer(scaledPixels); dodge.copyPixelsToBuffer(templatePixels); templatePixels.rewind(); scaledPixels.rewind(); buffOut.rewind(); while (buffOut.position() < buffOut.limit()) { color = (templatePixels.get()); realColor = scaledPixels.get(); float[] realHSV = new float[3]; Color.colorToHSV(realColor, realHSV); realHSV[0] = getRoundedValue(realHSV[0], hueIntervalSize); realHSV[2] = (getRoundedValue(realHSV[2] * 100, valueIntervalSize) / 100) * (valuePercent / 100); realHSV[2] = realHSV[2] < 1.0 ? realHSV[2] : 1.0f; realHSV[1] = realHSV[1] * (saturationPercent / 100); realHSV[1] = realHSV[1] < 1.0 ? realHSV[1] : 1.0f; float[] HSV = new float[3]; Color.colorToHSV(color, HSV); boolean putBlackPixel = HSV[2] <= top; realColor = Color.HSVToColor(realHSV); if (putBlackPixel) { buffOut.put(color); } else { buffOut.put(realColor); } } // END WHILE dodge.recycle(); buffOut.rewind(); base.copyPixelsFromBuffer(buffOut); } catch (Exception e) { // TODO: handle exception } return base; }
From source file:org.apache.druid.segment.IndexMergerV9.java
/** * Returns rowNumConversions, if fillRowNumConversions argument is true *//*from w w w.java2s .c om*/ @Nullable private List<IntBuffer> mergeIndexesAndWriteColumns(final List<IndexableAdapter> adapters, final ProgressIndicator progress, final TimeAndDimsIterator timeAndDimsIterator, final GenericColumnSerializer timeWriter, final ArrayList<GenericColumnSerializer> metricWriters, final List<DimensionMergerV9> mergers, final boolean fillRowNumConversions) throws IOException { final String section = "walk through and merge rows"; progress.startSection(section); long startTime = System.currentTimeMillis(); List<IntBuffer> rowNumConversions = null; int rowCount = 0; if (fillRowNumConversions) { rowNumConversions = new ArrayList<>(adapters.size()); for (IndexableAdapter adapter : adapters) { int[] arr = new int[adapter.getNumRows()]; Arrays.fill(arr, INVALID_ROW); rowNumConversions.add(IntBuffer.wrap(arr)); } } long time = System.currentTimeMillis(); while (timeAndDimsIterator.moveToNext()) { progress.progress(); TimeAndDimsPointer timeAndDims = timeAndDimsIterator.getPointer(); timeWriter.serialize(timeAndDims.timestampSelector); for (int metricIndex = 0; metricIndex < timeAndDims.getNumMetrics(); metricIndex++) { metricWriters.get(metricIndex).serialize(timeAndDims.getMetricSelector(metricIndex)); } for (int dimIndex = 0; dimIndex < timeAndDims.getNumDimensions(); dimIndex++) { DimensionMerger merger = mergers.get(dimIndex); if (merger.canSkip()) { continue; } merger.processMergedRow(timeAndDims.getDimensionSelector(dimIndex)); } if (timeAndDimsIterator instanceof RowCombiningTimeAndDimsIterator) { RowCombiningTimeAndDimsIterator comprisedRows = (RowCombiningTimeAndDimsIterator) timeAndDimsIterator; for (int originalIteratorIndex = comprisedRows.nextCurrentlyCombinedOriginalIteratorIndex( 0); originalIteratorIndex >= 0; originalIteratorIndex = comprisedRows .nextCurrentlyCombinedOriginalIteratorIndex(originalIteratorIndex + 1)) { IntBuffer conversionBuffer = rowNumConversions.get(originalIteratorIndex); int minRowNum = comprisedRows .getMinCurrentlyCombinedRowNumByOriginalIteratorIndex(originalIteratorIndex); int maxRowNum = comprisedRows .getMaxCurrentlyCombinedRowNumByOriginalIteratorIndex(originalIteratorIndex); for (int rowNum = minRowNum; rowNum <= maxRowNum; rowNum++) { while (conversionBuffer.position() < rowNum) { conversionBuffer.put(INVALID_ROW); } conversionBuffer.put(rowCount); } } } else if (timeAndDimsIterator instanceof MergingRowIterator) { RowPointer rowPointer = (RowPointer) timeAndDims; IntBuffer conversionBuffer = rowNumConversions.get(rowPointer.getIndexNum()); int rowNum = rowPointer.getRowNum(); while (conversionBuffer.position() < rowNum) { conversionBuffer.put(INVALID_ROW); } conversionBuffer.put(rowCount); } else { if (fillRowNumConversions) { throw new IllegalStateException( "Filling row num conversions is supported only with RowCombining and Merging iterators"); } } if ((++rowCount % 500000) == 0) { log.info("walked 500,000/%d rows in %,d millis.", rowCount, System.currentTimeMillis() - time); time = System.currentTimeMillis(); } } if (rowNumConversions != null) { for (IntBuffer rowNumConversion : rowNumConversions) { rowNumConversion.rewind(); } } log.info("completed walk through of %,d rows in %,d millis.", rowCount, System.currentTimeMillis() - startTime); progress.stopSection(section); return rowNumConversions; }