List of usage examples for java.nio ByteBuffer get
public abstract byte get(int index);
From source file:com.linkedin.pinot.common.utils.DataTable.java
private void deserializeDataTable(ByteBuffer input) { numRows = input.getInt();/*from w w w . j av a2s . c o m*/ numCols = input.getInt(); // READ dictionary final int dictionaryStart = input.getInt(); final int dictionaryLength = input.getInt(); final int metadataStart = input.getInt(); final int metadataLength = input.getInt(); final int schemaStart = input.getInt(); final int schemaLength = input.getInt(); final int fixedDataStart = input.getInt(); final int fixedDataLength = input.getInt(); final int variableDataStart = input.getInt(); final int variableDataLength = input.getInt(); // READ DICTIONARY byte[] dictionaryBytes = null; if (dictionaryLength != 0) { dictionaryBytes = new byte[dictionaryLength]; input.position(dictionaryStart); input.get(dictionaryBytes); dictionary = (Map<String, Map<Integer, String>>) deserializeDictionary(dictionaryBytes); } else { dictionary = new HashMap<String, Map<Integer, String>>(1); } // READ METADATA byte[] metadataBytes; if (metadataLength != 0) { metadataBytes = new byte[metadataLength]; input.position(metadataStart); input.get(metadataBytes); metadata = (Map<String, String>) deserializeMetadata(metadataBytes); } else { metadata = new HashMap<String, String>(); } // READ SCHEMA byte[] schemaBytes; if (schemaLength != 0) { schemaBytes = new byte[schemaLength]; input.position(schemaStart); input.get(schemaBytes); schema = DataSchema.fromBytes(schemaBytes); columnOffsets = computeColumnOffsets(schema); } // READ FIXED SIZE DATA BYTES if (fixedDataLength != 0) { fixedSizeDataBytes = new byte[fixedDataLength]; input.position(fixedDataStart); input.get(fixedSizeDataBytes); fixedSizeData = ByteBuffer.wrap(fixedSizeDataBytes); } // READ VARIABLE SIZE DATA BYTES if (variableDataLength != 0) { variableSizeDataBytes = new byte[variableDataLength]; input.position(variableDataStart); input.get(variableSizeDataBytes); variableSizeData = ByteBuffer.wrap(variableSizeDataBytes); } }
From source file:au.org.ala.delta.intkey.model.IntkeyDatasetFileReader.java
/** * Read attributes from the items file//from w w w . j av a 2 s . c om * * @param itemFileHeader * item file header * @param itemBinFile * item file data * @param c * character that we want attributes for * @param taxa * taxa that we want attributes for * @return a list of attributes for the supplied character and taxa. */ private static List<Attribute> readAttributes(ItemsFileHeader itemFileHeader, BinFile itemBinFile, Character c, List<Item> taxa) { List<Attribute> retList = new ArrayList<Attribute>(); int totalNumChars = itemFileHeader.getNChar(); int totalNumTaxa = itemFileHeader.getNItem(); seekToRecord(itemBinFile, itemFileHeader.getRpCdat()); List<Integer> charAttributeDataRecordIndicies = readIntegerList(itemBinFile, totalNumChars); // Subtract 1 from the charNo because characters are zero indexed in // intkey API int charNo = c.getCharacterId(); int charTaxonDataRecordIndex = charAttributeDataRecordIndicies.get(charNo - 1); seekToRecord(itemBinFile, charTaxonDataRecordIndex); if (c instanceof MultiStateCharacter) { MultiStateCharacter multiStateChar = (MultiStateCharacter) c; int bitsPerTaxon = multiStateChar.getStates().length + 1; int totalBitsNeeded = bitsPerTaxon * totalNumTaxa; int bytesToRead = Double.valueOf(Math.ceil(Double.valueOf(totalBitsNeeded) / Double.valueOf(Byte.SIZE))) .intValue(); byte[] bytes = new byte[bytesToRead]; itemBinFile.readBytes(bytes); boolean[] taxaData = Utils.byteArrayToBooleanArray(bytes); for (Item t : taxa) { int startIndex = (t.getItemNumber() - 1) * bitsPerTaxon; // Taxa // numbers // are // 1 // indexed // instead // of 0 // indexed int endIndex = startIndex + bitsPerTaxon; boolean[] taxonData = Arrays.copyOfRange(taxaData, startIndex, endIndex); // Taxon data consists of a bit for each state, indicating // the states presence, followed by // a final bit signifying whether or not the character is // inapplicable for the taxon. boolean inapplicable = taxonData[taxonData.length - 1]; HashSet<Integer> presentStates = new HashSet<Integer>(); for (int k = 0; k < taxonData.length - 1; k++) { boolean statePresent = taxonData[k]; if (statePresent) { presentStates.add(k + 1); } } SimpleAttributeData attrData = new SimpleAttributeData(presentStates.isEmpty(), inapplicable); MultiStateAttribute msAttr = new MultiStateAttribute(multiStateChar, attrData); msAttr.setItem(t); msAttr.setPresentStates(presentStates); retList.add(msAttr); } } else if (c instanceof IntegerCharacter) { IntegerCharacter intChar = (IntegerCharacter) c; int charMinValue = intChar.getMinimumValue(); int charMaxValue = intChar.getMaximumValue(); // 1 bit for all values below minimum, 1 bit for each value between // minimum and maximum (inclusive), // 1 bit for all values above maximum, 1 inapplicability bit. int bitsPerTaxon = charMaxValue - charMinValue + 4; int totalBitsNeeded = bitsPerTaxon * totalNumTaxa; int bytesToRead = Double.valueOf(Math.ceil(Double.valueOf(totalBitsNeeded) / Double.valueOf(Byte.SIZE))) .intValue(); byte[] bytes = new byte[bytesToRead]; itemBinFile.readBytes(bytes); boolean[] taxaData = Utils.byteArrayToBooleanArray(bytes); for (Item t : taxa) { int startIndex = (t.getItemNumber() - 1) * bitsPerTaxon; // Taxa // numbers // are // 1 // indexed // instead // of 0 // indexed int endIndex = startIndex + bitsPerTaxon; boolean[] taxonData = Arrays.copyOfRange(taxaData, startIndex, endIndex); boolean inapplicable = taxonData[taxonData.length - 1]; Set<Integer> presentValues = new HashSet<Integer>(); for (int k = 0; k < taxonData.length - 1; k++) { boolean present = taxonData[k]; if (present) { presentValues.add(k + charMinValue - 1); } } IntegerAttribute intAttr = new IntegerAttribute(intChar, new SimpleAttributeData(presentValues.isEmpty(), inapplicable)); intAttr.setItem(t); intAttr.setPresentValues(presentValues); retList.add(intAttr); } } else if (c instanceof RealCharacter) { // Read NI inapplicability bits int bytesToRead = Double.valueOf(Math.ceil(Double.valueOf(totalNumTaxa) / Double.valueOf(Byte.SIZE))) .intValue(); byte[] bytes = new byte[bytesToRead]; itemBinFile.readBytes(bytes); boolean[] taxaInapplicabilityData = Utils.byteArrayToBooleanArray(bytes); int recordsSpannedByInapplicabilityData = recordsSpannedByBytes(bytesToRead); seekToRecord(itemBinFile, charTaxonDataRecordIndex + recordsSpannedByInapplicabilityData); // Read two float values per taxon List<Float> taxonData = readFloatList(itemBinFile, totalNumTaxa * 2); for (Item t : taxa) { int taxonNumber = t.getItemNumber(); float lowerFloat = taxonData.get((taxonNumber - 1) * 2); float upperFloat = taxonData.get(((taxonNumber - 1) * 2) + 1); boolean inapplicable = taxaInapplicabilityData[taxonNumber - 1]; // Character is unknown for the corresponding taxon if // lowerfloat > upperfloat boolean unknown = lowerFloat > upperFloat; RealAttribute realAttr = new RealAttribute((RealCharacter) c, new SimpleAttributeData(unknown, inapplicable)); if (!unknown) { FloatRange range = new FloatRange(lowerFloat, upperFloat); realAttr.setPresentRange(range); } realAttr.setItem(t); retList.add(realAttr); } } else if (c instanceof TextCharacter) { TextCharacter textChar = (TextCharacter) c; // Read NI inapplicability bits int bytesToRead = Double.valueOf(Math.ceil(Double.valueOf(totalNumTaxa) / Double.valueOf(Byte.SIZE))) .intValue(); byte[] bytes = new byte[bytesToRead]; itemBinFile.readBytes(bytes); boolean[] taxaInapplicabilityData = Utils.byteArrayToBooleanArray(bytes); int recordsSpannedByInapplicabilityData = recordsSpannedByBytes(bytesToRead); seekToRecord(itemBinFile, charTaxonDataRecordIndex + recordsSpannedByInapplicabilityData); List<Integer> taxonTextDataOffsets = readIntegerList(itemBinFile, totalNumTaxa + 1); int recordsSpannedByOffsets = recordsSpannedByBytes((totalNumTaxa + 1) * Constants.SIZE_INT_IN_BYTES); seekToRecord(itemBinFile, charTaxonDataRecordIndex + recordsSpannedByInapplicabilityData + recordsSpannedByOffsets); ByteBuffer taxonTextData = itemBinFile.readByteBuffer( taxonTextDataOffsets.get(taxonTextDataOffsets.size() - taxonTextDataOffsets.get(0))); for (Item t : taxa) { int taxonNumber = t.getItemNumber(); int lowerOffset = taxonTextDataOffsets.get(taxonNumber - 1); int upperOffset = taxonTextDataOffsets.get((taxonNumber - 1) + 1); int textLength = upperOffset - lowerOffset; String txt = ""; if (textLength > 0) { byte[] textBytes = new byte[textLength]; taxonTextData.position(lowerOffset - 1); taxonTextData.get(textBytes); txt = BinFileEncoding.decode(textBytes); } boolean inapplicable = taxaInapplicabilityData[taxonNumber - 1]; boolean unknown = StringUtils.isEmpty(txt); TextAttribute txtAttr = new TextAttribute(textChar, new SimpleAttributeData(unknown, inapplicable)); try { txtAttr.setText(txt); } catch (DirectiveException e) { // The SimpleAttributeData implementation won't throw this // Exception. } txtAttr.setItem(t); retList.add(txtAttr); } } return retList; }
From source file:com.musicplayer.AudioDecoderThread.java
/** * After decoding AAC, Play using Audio Track. * // w w w. j a v a 2 s . co m */ public void processTrack(Uri syncContentUri, final Genre classLabel, Context context, ProcessTrackRunnable lock) { // INITIALISE EXTRACTOR AND DECODER Log.v("", "Break Point 1"); MediaExtractor extractor = new MediaExtractor(); int sampleRate = 0; Uri contentUri = null; synchronized (lock) { contentUri = syncContentUri; } try { extractor.setDataSource(context, contentUri, null); } catch (IOException e) { e.printStackTrace(); } int channel = 0; for (int i = 0; i < extractor.getTrackCount(); i++) { MediaFormat format = extractor.getTrackFormat(i); String mime = format.getString(MediaFormat.KEY_MIME); if (mime.startsWith("audio/")) { extractor.selectTrack(i); Log.d("", "format : " + format); // ByteBuffer csd = format.getByteBuffer("csd-0"); // if(csd == null){ // Log.v("", "csd is null"); // } else{ // Log.v("", "csd is not null"); // } // for (int k = 0; k < csd.capacity(); ++k) { // Log.v("", "inside for loop 1"); // Log.e("TAG", "csd : " + csd.array()[k]); // } sampleRate = format.getInteger(MediaFormat.KEY_SAMPLE_RATE); channel = format.getInteger(MediaFormat.KEY_CHANNEL_COUNT); break; } } // MediaFormat format = makeAACCodecSpecificData(MediaCodecInfo.CodecProfileLevel.AACObjectLC, mSampleRate, channel); // if (format == null) // return; int countt = 0; boolean found = false; MediaFormat format = null; String mime = null; while (countt < extractor.getTrackCount() && !found) { format = extractor.getTrackFormat(countt); mime = format.getString(MediaFormat.KEY_MIME); sampleRate = format.getInteger(MediaFormat.KEY_SAMPLE_RATE); if (mime.startsWith("audio/")) { found = true; } countt++; } //format = mExtractor.getTrackFormat(count); //MediaCodecInfo codec = selectCodec(mime); //String name = codec.getName(); MediaCodec decoder = MediaCodec.createDecoderByType(mime); //mDecoder = MediaCodec.createDecoderByType("audio/mp4a-latm"); decoder.configure(format, null, null, 0); if (decoder == null) { Log.e("DecodeActivity", "Can't find video info!"); return; } decoder.start(); Log.v("", "Break Point 2"); // Get decoded bytes ByteBuffer[] inputBuffers = decoder.getInputBuffers(); ByteBuffer[] outputBuffers = decoder.getOutputBuffers(); BufferInfo info = new BufferInfo(); // int buffsize = AudioTrack.getMinBufferSize(sampleRate, AudioFormat.CHANNEL_OUT_STEREO, AudioFormat.ENCODING_PCM_16BIT); // // create an audiotrack object // AudioTrack audioTrack = new AudioTrack(AudioManager.STREAM_MUSIC, sampleRate, // AudioFormat.CHANNEL_OUT_STEREO, // AudioFormat.ENCODING_PCM_16BIT, // buffsize, // AudioTrack.MODE_STREAM); // audioTrack.play(); extractor.seekTo(WINDOW_START, MediaExtractor.SEEK_TO_CLOSEST_SYNC); long start = SystemClock.elapsedRealtimeNanos(); Log.v("", "Break Point 3"); // MUSICAL SURFACE FEATURES double[] flux = new double[NUM_CHUNKS]; double[] zeroCrossings = new double[NUM_CHUNKS]; double[] centroid = new double[NUM_CHUNKS]; int[] rolloff = new int[NUM_CHUNKS]; double[] rolloffFreq = new double[NUM_CHUNKS]; double lowEnergy = 0.0; // Means across all chunks double fluxMean = 0.0; double zeroCrossingsMean = 0; double centroidMean = 0.0; double rolloffMean = 0; // Standard deviations across all chunks double fluxStdDeviation = 0.0; double zeroCrossingsStdDeviation = 0; double centroidStdDeviation = 0.0; double rolloffStdDeviation = 0; // Initialise some variables to use while iterating double[] fftSums = new double[NUM_CHUNKS]; int iter = 0; int count = 0; FastFourierTransformer transformer = new FastFourierTransformer(DftNormalization.STANDARD); double po2 = 0.0; Complex[] input = null; Complex[] output = null; Complex[] previousOutput = null; Complex[] temp = null; double frequency = 0.0; double centroidNum = 0.0; double centroidDen = 0.0; double fftValue = 0.0; double fftPrevious = 0.0; double fluxSquared = 0.0; int r = 0; boolean foundRolloff = false; double sum = 0; ArrayList<Double> data = new ArrayList<Double>(); ArrayList<Double> currentChunk = new ArrayList<Double>(); int gap = 0; int tempCount = 0; byte[] chunk = null; ArrayList<Double> outputExample = new ArrayList<Double>(); double normConst = 0.0; // Iterate through the chunks Log.v("", "count: " + String.valueOf(count)); while (!eosReceived && count < NUM_CHUNKS) { Log.v("", "Break Point " + String.valueOf(count + 4)); Log.v("", "Inside While Loop Break Point 1"); if (count == 0) { // Log.v("", "Timestamp of chunk 0: " + String.valueOf(extractor.getSampleTime())); } int inIndex = decoder.dequeueInputBuffer(TIMEOUT_US); if (inIndex >= 0) { ByteBuffer buffer = inputBuffers[inIndex]; int sampleSize = extractor.readSampleData(buffer, 0); if (sampleSize < 0) { // We shouldn't stop the playback at this point, just pass the EOS // flag to mDecoder, we will get it again from the // dequeueOutputBuffer //Log.d("DecodeActivity", "InputBuffer BUFFER_FLAG_END_OF_STREAM"); decoder.queueInputBuffer(inIndex, 0, 0, 0, MediaCodec.BUFFER_FLAG_END_OF_STREAM); } else { decoder.queueInputBuffer(inIndex, 0, sampleSize, extractor.getSampleTime(), 0); extractor.advance(); } int outIndex = decoder.dequeueOutputBuffer(info, TIMEOUT_US); Log.v("", "Inside While Loop Break Point 2"); switch (outIndex) { case MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED: Log.d("DecodeActivity", "INFO_OUTPUT_BUFFERS_CHANGED"); outputBuffers = decoder.getOutputBuffers(); break; case MediaCodec.INFO_OUTPUT_FORMAT_CHANGED: MediaFormat mediaFormat = decoder.getOutputFormat(); Log.d("DecodeActivity", "New format " + mediaFormat); // audioTrack.setPlaybackRate(mediaFormat.getInteger(MediaFormat.KEY_SAMPLE_RATE)); break; case MediaCodec.INFO_TRY_AGAIN_LATER: Log.d("DecodeActivity", "dequeueOutputBuffer timed out!"); break; default: Log.v("", "Inside While Loop Break Point 3"); ByteBuffer outBuffer = outputBuffers[outIndex]; //Log.v("DecodeActivity", "We can't use this buffer but render it due to the API limit, " + outBuffer); chunk = new byte[info.size]; if (chunk.length == 0) { continue; } outBuffer.get(chunk); // Read the buffer all at once outBuffer.clear(); // ** MUST DO!!! OTHERWISE THE NEXT TIME YOU GET THIS SAME BUFFER BAD THINGS WILL HAPPEN gap = chunk.length / DOWN_FACTOR; currentChunk.clear(); Log.v("", "Inside While Loop Break Point 4a"); // ZERO CROSSINGS int increment = 1; if (chunk.length > 1000) { increment = (int) ((double) chunk.length / ((double) 1000)); } // Downsampling for (int i = 0; i < chunk.length; i = i + increment) { data.add((double) chunk[i]); currentChunk.add((double) chunk[i]); tempCount++; if (currentChunk.size() > 1) { iter += FastMath.abs(sign(currentChunk.get(currentChunk.size() - 1)) - sign(currentChunk.get(currentChunk.size() - 2))); } } increment = 0; tempCount = 0; zeroCrossings[count] = 0.5 * iter; po2 = FastMath.ceil(FastMath.log(currentChunk.size()) / FastMath.log(2)); input = new Complex[(int) (FastMath.pow(2.0, po2))]; Log.v("", "chunk length: " + chunk.length); Log.v("", "input length: " + input.length); for (int i = 0; i < input.length; i++) { if (i < currentChunk.size()) { input[i] = new Complex((double) currentChunk.get(i)); } else { input[i] = new Complex(0.0); } } // FFT output = transformer.transform(input, TransformType.FORWARD); outputExample.add(centroidDen); // CENTROID AND FLUX for (int i = 0; i < output.length; i++) { if (count > 0) { fftPrevious = fftValue; } fftValue = FastMath.hypot(output[i].getReal(), output[i].getImaginary()); fluxSquared += (fftValue - fftPrevious) * (fftValue - fftPrevious); centroidNum += i * fftValue; centroidDen += fftValue; } // for(int i = 0; i < output.length; i++){ // // normConst += FastMath.hypot(output[i].getReal(), output[i].getImaginary()) * // FastMath.hypot(output[i].getReal(), output[i].getImaginary()); // // // } // fluxSquared = fluxSquared / normConst; flux[count] = FastMath.sqrt(fluxSquared) / 1000.0; // ROLLOFF while (!foundRolloff && r < output.length - 1) { r++; sum += FastMath.hypot(output[r].getReal(), output[r].getImaginary()); foundRolloff = checkRolloff(ROLLOFF_PROPORTIONAL_ERROR, sum, centroidDen); } fftSums[count] = centroidDen; if (centroidDen != 0.0) { centroid[count] = centroidNum / centroidDen; } else { centroid[count] = 0.0; } rolloff[count] = r; iter = 0; fluxSquared = 0.0; centroidNum = 0.0; centroidDen = 0.0; r = 0; sum = 0.0; foundRolloff = false; count++; //audioTrack.write(chunk, info.offset, info.offset + info.size); // AudioTrack write data decoder.releaseOutputBuffer(outIndex, false); break; } // All decoded frames have been rendered, we can stop playing now if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) { Log.d("DecodeActivity", "OutputBuffer BUFFER_FLAG_END_OF_STREAM"); break; } if (count > 0) { previousOutput = output; output = null; } } if (count == NUM_CHUNKS) { // Log.v("", "Timestamp of last chunk: " + String.valueOf(extractor.getSampleTime())); decoder.stop(); decoder.release(); extractor.release(); } } // while loop currentChunk.clear(); currentChunk = null; // for(int i = 0; i < centroid.length; i++){ // Log.v("", "centroid: " + String.valueOf(centroid[i])); // } double energySum = 0.0; double energyAverage = 0.0; int lowEnergyCount = 0; for (int i = 0; i < NUM_CHUNKS; i++) { energySum += fftSums[i]; } energyAverage = energySum / NUM_CHUNKS; for (int i = 0; i < NUM_CHUNKS; i++) { if (fftSums[i] < energyAverage) { lowEnergyCount++; } } lowEnergy = 100.0 * (((double) lowEnergyCount) / ((double) NUM_CHUNKS)); // Work out the means and standard deviations for (int i = 0; i < NUM_CHUNKS; i++) { fluxMean += flux[i]; zeroCrossingsMean += zeroCrossings[i]; centroidMean += centroid[i]; rolloffMean += rolloff[i]; } fluxMean = fluxMean / flux.length; zeroCrossingsMean = zeroCrossingsMean / zeroCrossings.length; centroidMean = centroidMean / centroid.length; rolloffMean = rolloffMean / rolloff.length; for (int i = 0; i < NUM_CHUNKS; i++) { fluxStdDeviation += (flux[i] - fluxMean) * (flux[i] - fluxMean); zeroCrossingsStdDeviation += (zeroCrossings[i] - zeroCrossingsMean) * (zeroCrossings[i] - zeroCrossingsMean); centroidStdDeviation += (centroid[i] - centroidMean) * (centroid[i] - centroidMean); rolloffStdDeviation += (rolloff[i] - rolloffMean) * (rolloff[i] - rolloffMean); } fluxStdDeviation = Math.sqrt(fluxStdDeviation / flux.length); zeroCrossingsStdDeviation = Math.sqrt(zeroCrossingsStdDeviation / zeroCrossings.length); centroidStdDeviation = Math.sqrt(centroidStdDeviation / centroid.length); rolloffStdDeviation = Math.sqrt(rolloffStdDeviation / rolloff.length); Log.v("", "fluxMean: " + String.valueOf(fluxMean)); Log.v("", "zeroCrossingsMean: " + String.valueOf(zeroCrossingsMean)); Log.v("", "centroidMean: " + String.valueOf(centroidMean)); Log.v("", "rolloffMean: " + String.valueOf(rolloffMean)); Log.v("", "fluxStdDeviation: " + String.valueOf(fluxStdDeviation)); Log.v("", "zeroCrossingsStdDeviation: " + String.valueOf(zeroCrossingsStdDeviation)); Log.v("", "centroidStdDeviation: " + String.valueOf(centroidStdDeviation)); Log.v("", "rolloffStdDeviation: " + String.valueOf(rolloffStdDeviation)); Log.v("", "lowEnergy: " + String.valueOf(lowEnergy)); Log.v("", "data size: " + String.valueOf(data.size())); // BEAT ANALYSIS Transform t = new Transform(new FastWaveletTransform(new Daubechies4())); double[] dataArray = new double[data.size()]; for (int i = 0; i < data.size(); i++) { dataArray[i] = data.get(i); } data.clear(); data = null; double powerOf2 = FastMath.ceil(FastMath.log(chunk.length) / FastMath.log(2)); double[] dataArrayPo2 = Arrays.copyOf(dataArray, (int) (FastMath.pow(2.0, powerOf2))); dataArray = null; double[] dataCurrentInputArray = null; double[] dataCurrentOutputArray = null; double[] dataCumulativeArray = new double[dataArrayPo2.length]; for (int i = 0; i < dataCumulativeArray.length; i++) { dataCumulativeArray[i] = 0.0; } double temp1 = 0.0; double temp2 = 0.0; ArrayList<Double> tempList = new ArrayList<Double>(); int k = 16; // Downsampling factor int tempCount1 = 0; double mean = 0.0; for (int level = 0; level < (int) FastMath.log(2.0, dataArrayPo2.length); level++) { dataCurrentInputArray = t.forward(dataArrayPo2, level); dataCurrentOutputArray = dataCurrentInputArray; dataCurrentOutputArray[0] = 0.0; for (int i = 1; i < dataCurrentOutputArray.length; i++) { temp1 = FastMath.abs(dataCurrentInputArray[i]); // Full-wave rectification dataCurrentOutputArray[i] = (1.0 - ALPHA) * temp1 - ALPHA * dataCurrentOutputArray[i - 1]; // Low-pass filtering } tempCount1 = 0; mean = 0.0; while (k * tempCount1 < dataCurrentOutputArray.length) { tempList.add(dataCurrentOutputArray[k * tempCount1]); // Downsampling by k mean += dataCurrentOutputArray[k * tempCount1]; tempCount1++; } mean = mean / dataCurrentOutputArray.length; tempCount1 = 0; while (k * tempCount1 < dataCurrentOutputArray.length) { dataCumulativeArray[k * tempCount1] += tempList.get(tempCount1) - mean; // Mean removal tempCount1++; } } int N = dataCumulativeArray.length; ArrayList<Double> dataList = new ArrayList<Double>(); double dataElement = 0.0; for (int i = 0; i < N; i++) { if (dataCumulativeArray[i] != 0.0) { dataElement = autocorrelate(i, N, dataCumulativeArray); dataList.add(dataElement); Log.v("", "dataList: " + String.valueOf(dataElement)); } } PeakDetector peakDetector = new PeakDetector(dataList); int[] peakIndices = peakDetector.process(5, 2); HashSet<Integer> hs = new HashSet<Integer>(); for (int i = 0; i < peakIndices.length; i++) { hs.add(peakIndices[i]); } ArrayList<Integer> indicesList = new ArrayList<Integer>(); ArrayList<Double> valuesList = new ArrayList<Double>(); indicesList.addAll(hs); Double tempDoub = 0.0; HashMap<Double, Integer> hm = new HashMap<Double, Integer>(); for (int i = 0; i < indicesList.size(); i++) { tempDoub = dataList.get(indicesList.get(i)); hm.put(tempDoub, indicesList.get(i)); } indicesList.clear(); valuesList.clear(); Entry<Double, Integer> tempEntry = null; Iterator<Entry<Double, Integer>> it = hm.entrySet().iterator(); while (it.hasNext()) { tempEntry = (Entry<Double, Integer>) it.next(); if (tempEntry.getValue() < 75) { it.remove(); } else { //indicesList.add(tempEntry.getValue()); valuesList.add(tempEntry.getKey()); } } Collections.sort(valuesList); for (int i = 0; i < valuesList.size(); i++) { indicesList.add(hm.get(valuesList.get(i))); } double valuesSum = 0.0; double histogramSum = 0.0; double beatStrength = 0.0; double P1 = 0.0; double P2 = 0.0; double A1 = 0.0; double A2 = 0.0; double RA = 0.0; for (int i = 0; i < dataList.size(); i++) { histogramSum += dataList.get(i); } for (int i = 0; i < valuesList.size(); i++) { valuesSum += valuesList.get(i); } // if(histogramSum != 0.0 && valuesList.size() != 0){ // SUM = (1000.0 * valuesSum) / (histogramSum * valuesList.size()); // } if (valuesList.size() != 0) { beatStrength = valuesSum / valuesList.size(); } if (indicesList.size() > 0) { // Set P1 as the largest peak P1 = (double) indicesList.get(indicesList.size() - 1); } if (indicesList.size() > 1) { int beatCount = indicesList.size() - 2; boolean beatFound = false; // Start with P2 as the second largest peak P2 = (double) indicesList.get(indicesList.size() - 2); double diff = 0; // Iterate backwards through the peaks, largest to smallest while (!beatFound && beatCount > -1) { diff = ((double) indicesList.get(beatCount)) - P1; if (FastMath.abs(diff) / P1 > 0.3) { // Set P2 as the period of the first peak that is reasonably different from P1 P2 = (double) indicesList.get(beatCount); beatFound = true; } beatCount--; } } if (indicesList.size() > 0) { A1 = FastMath.abs(dataList.get((int) P1)) / histogramSum; if (P2 != 0.0) { A2 = FastMath.abs(dataList.get((int) P2)) / histogramSum; } if (A1 != 0.0) { RA = A2 / A1; } } for (int i = 0; i < valuesList.size(); i++) { Log.v("", String.valueOf(i) + ") valuesList: " + String.valueOf(valuesList.get(i))); } Log.v("", "P1: " + String.valueOf(P1)); Log.v("", "P2: " + String.valueOf(P2)); Log.v("", "A1: " + String.valueOf(A1)); Log.v("", "A2: " + String.valueOf(A2)); Log.v("", "RA: " + String.valueOf(RA)); Log.v("", "SUM: " + String.valueOf(histogramSum)); Log.v("", "Number of Peaks: " + String.valueOf(valuesList.size())); double[] result = { fluxMean, zeroCrossingsMean, centroidMean, rolloffMean, fluxStdDeviation, zeroCrossingsStdDeviation, centroidStdDeviation, rolloffStdDeviation, lowEnergy, P1, P2, A1, A2, RA, histogramSum, valuesList.size() }; final DenseInstance denseInstance = new DenseInstance(result); if (P1 + P2 + A1 + A2 + RA != 0.0) { Handler handler = new Handler(Looper.getMainLooper()); handler.post(new ReturnResultsRunnable(lock, mAudioCallback, denseInstance, classLabel)); } else { Log.v("", "Track could not be classified!"); } // for(int i = 0; i < dataList.size(); i++){ // Log.v("", String.valueOf(i) + ") autocorrelation: " + String.valueOf(dataList.get(i))); // histogramSum += dataList.get(i); // } // Log.v("", "indicesList size: " + String.valueOf(indicesList.size())); // for(int i = 0; i < valuesList.size(); i++){ // Log.v("", "indicesList: " + String.valueOf(indicesList.get(i)) + ", value: " + String.valueOf(valuesList.get(i))); // valuesSum += valuesList.get(i); // } //Classifier c = new KNearestNeighbors(5); // double A0 = valuesList.get(valuesList.size() - 1) / valuesSum; // double A1 = valuesList.get(valuesList.size() - 2) / valuesSum; // double RA = A1 / A0; // double P0 = 1 / ((double) indicesList.get(indicesList.size() - 1)); // double P1 = 1 / ((double) indicesList.get(indicesList.size() - 2)); // // Log.v("", "A0: " + String.valueOf(A0)); // Log.v("", "A1: " + String.valueOf(A1)); // Log.v("", "RA: " + String.valueOf(RA)); // Log.v("", "P0: " + String.valueOf(P0)); // Log.v("", "P1: " + String.valueOf(P1)); // Log.v("", "SUM: " + String.valueOf(histogramSum)); long durationUs = SystemClock.elapsedRealtimeNanos() - start; double durationSecs = ((double) durationUs) / 1000000000.0; Log.v("", "count = " + String.valueOf(count) + ", Sample rate: " + String.valueOf(sampleRate) + ", Duration: " + String.valueOf(durationSecs)); // audioTrack.stop(); // audioTrack.release(); // audioTrack = null; }
From source file:edu.hawaii.soest.kilonalu.adcp.EnsembleFixedLeader.java
/** * Constructor. This method populates the Fixed Leader fields from * the given ByteBuffer of data passed in as an argument, based on metadata * found in the EnsembleHeader./*from w w w . j a v a 2s .co m*/ * * @param ensembleBuffer the ByteBuffer that contains the binary ensemble data * @param ensemble the parent ensemble for this fixed leader */ public EnsembleFixedLeader(ByteBuffer ensembleBuffer, Ensemble ensemble) { // prepare the ensemble buffer for reading ensembleBuffer.flip(); ensembleBuffer.limit(ensembleBuffer.capacity()); // position the cursor at the correct offset given the sequential location // of the fixed leader in the data stream. int typeNumber = ensemble.getDataTypeNumber(EnsembleDataType.FIXED_LEADER); int offset = ensemble.getDataTypeOffset(typeNumber); ensembleBuffer.position(offset); // define the temporary arrays for passing bytes byte[] oneByte = new byte[1]; byte[] twoBytes = new byte[2]; // set all of the FixedLeader fields in the order that they are read from // the byte stream ensembleBuffer.get(twoBytes); setFixedLeaderID(twoBytes); ensemble.addToByteSum(twoBytes); ensembleBuffer.get(oneByte); setCpuFirmwareVersion(oneByte); ensemble.addToByteSum(oneByte); ensembleBuffer.get(oneByte); setCpuFirmwareRevision(oneByte); ensemble.addToByteSum(oneByte); ensembleBuffer.get(twoBytes); setSystemConfiguration(twoBytes); ensemble.addToByteSum(twoBytes); ensembleBuffer.get(oneByte); setPdRealOrSimulatedFlag(oneByte); ensemble.addToByteSum(oneByte); ensembleBuffer.get(oneByte); setLagLength(oneByte); ensemble.addToByteSum(oneByte); ensembleBuffer.get(oneByte); setNumberOfBeams(oneByte); ensemble.addToByteSum(oneByte); ensembleBuffer.get(oneByte); setNumberOfCells(oneByte); ensemble.addToByteSum(oneByte); ensembleBuffer.get(twoBytes); setPingsPerEnsemble(twoBytes); ensemble.addToByteSum(twoBytes); ensembleBuffer.get(twoBytes); setDepthCellLength(twoBytes); ensemble.addToByteSum(twoBytes); ensembleBuffer.get(twoBytes); setBlankAfterTransmit(twoBytes); ensemble.addToByteSum(twoBytes); ensembleBuffer.get(oneByte); setProfilingMode(oneByte); ensemble.addToByteSum(oneByte); ensembleBuffer.get(oneByte); setLowCorrelationThreshold(oneByte); ensemble.addToByteSum(oneByte); ensembleBuffer.get(oneByte); setNumberOfCodeRepetitions(oneByte); ensemble.addToByteSum(oneByte); ensembleBuffer.get(oneByte); setPercentGoodMinimum(oneByte); ensemble.addToByteSum(oneByte); ensembleBuffer.get(twoBytes); setErrorVelocityThreshold(twoBytes); ensemble.addToByteSum(twoBytes); ensembleBuffer.get(oneByte); setPingMinutes(oneByte); ensemble.addToByteSum(oneByte); ensembleBuffer.get(oneByte); setPingSeconds(oneByte); ensemble.addToByteSum(oneByte); ensembleBuffer.get(oneByte); setPingHundredths(oneByte); ensemble.addToByteSum(oneByte); ensembleBuffer.get(oneByte); setCoordinateTransformParams(oneByte); ensemble.addToByteSum(oneByte); ensembleBuffer.get(twoBytes); setHeadingAlignment(twoBytes); ensemble.addToByteSum(twoBytes); ensembleBuffer.get(twoBytes); setHeadingBias(twoBytes); ensemble.addToByteSum(twoBytes); ensembleBuffer.get(oneByte); setSensorSource(oneByte); ensemble.addToByteSum(oneByte); ensembleBuffer.get(oneByte); setSensorAvailability(oneByte); ensemble.addToByteSum(oneByte); ensembleBuffer.get(twoBytes); setBinOneDistance(twoBytes); ensemble.addToByteSum(twoBytes); ensembleBuffer.get(twoBytes); setTransmitPulseLength(twoBytes); ensemble.addToByteSum(twoBytes); ensembleBuffer.get(oneByte); setReferenceLayerStart(oneByte); ensemble.addToByteSum(oneByte); ensembleBuffer.get(oneByte); setReferenceLayerEnd(oneByte); ensemble.addToByteSum(oneByte); ensembleBuffer.get(oneByte); setFalseTargetThreshold(oneByte); ensemble.addToByteSum(oneByte); ensembleBuffer.get(oneByte); setFixedLeaderSpare(oneByte); ensemble.addToByteSum(oneByte); ensembleBuffer.get(twoBytes); setTransmitLagDistance(twoBytes); ensemble.addToByteSum(twoBytes); byte[] boardSerialNumber = new byte[8]; ensembleBuffer.get(boardSerialNumber); // read 8 bytes setCpuBoardSerialNumber(boardSerialNumber); ensemble.addToByteSum(boardSerialNumber); ensembleBuffer.get(twoBytes); setSystemBandwidth(twoBytes); ensemble.addToByteSum(twoBytes); ensembleBuffer.get(oneByte); setSystemPower(oneByte); ensemble.addToByteSum(oneByte); // the following don't get called for Workhorse ADCPs // TODO: test for model and add fields if necessary //ensembleBuffer.get(oneByte); //setBaseFrequencyIndex(oneByte); //ensemble.addToByteSum(oneByte); //byte[] instrumentSerialNumber = new byte[4]; //ensembleBuffer.get(instrumentSerialNumber); // read 4 bytes //setSerialNumber(instrumentSerialNumber); //ensemble.addToByteSum(instrumentSerialNumber); //ensembleBuffer.get(oneByte); //setBeamAngle(oneByte); //ensemble.addToByteSum(oneByte); }
From source file:com.healthmarketscience.jackcess.impl.DatabaseImpl.java
/** * Reads a table with the given name from the given pageNumber. *//*ww w . j a va2 s. c o m*/ private TableImpl readTable(String name, int pageNumber, int flags) throws IOException { // first, check for existing table TableImpl table = _tableCache.get(pageNumber); if (table != null) { return table; } ByteBuffer buffer = takeSharedBuffer(); try { // need to load table from db _pageChannel.readPage(buffer, pageNumber); byte pageType = buffer.get(0); if (pageType != PageTypes.TABLE_DEF) { throw new IOException( "Looking for " + name + " at page " + pageNumber + ", but page type is " + pageType); } return _tableCache.put(new TableImpl(this, buffer, pageNumber, name, flags)); } finally { releaseSharedBuffer(buffer); } }
From source file:com.healthmarketscience.jackcess.impl.DatabaseImpl.java
public String getDatabasePassword() throws IOException { ByteBuffer buffer = takeSharedBuffer(); try {/*from w w w. java 2 s .c o m*/ _pageChannel.readPage(buffer, 0); byte[] pwdBytes = new byte[_format.SIZE_PASSWORD]; buffer.position(_format.OFFSET_PASSWORD); buffer.get(pwdBytes); // de-mask password using extra password mask if necessary (the extra // password mask is generated from the database creation date stored in // the header) byte[] pwdMask = getPasswordMask(buffer, _format); if (pwdMask != null) { for (int i = 0; i < pwdBytes.length; ++i) { pwdBytes[i] ^= pwdMask[i % pwdMask.length]; } } boolean hasPassword = false; for (int i = 0; i < pwdBytes.length; ++i) { if (pwdBytes[i] != 0) { hasPassword = true; break; } } if (!hasPassword) { return null; } String pwd = ColumnImpl.decodeUncompressedText(pwdBytes, getCharset()); // remove any trailing null chars int idx = pwd.indexOf('\0'); if (idx >= 0) { pwd = pwd.substring(0, idx); } return pwd; } finally { releaseSharedBuffer(buffer); } }
From source file:fuse.okuyamafs.OkuyamaFilesystem.java
public int write(String path, Object fh, boolean isWritepage, ByteBuffer buf, long offset) throws FuseException { log.info("write path:" + path + " offset:" + offset + " isWritepage:" + isWritepage + " buf.limit:" + buf.limit());//from w ww .ja v a 2s . c o m //long startAA = System.nanoTime(); try { if (startTimeAAA == 0L) startTimeAAA = System.nanoTime(); // ?????????????? if (OkuyamaFilesystem.storageType == 1) return realWrite(path, fh, isWritepage, buf, offset); if (fh == null) return Errno.EBADE; synchronized (this.parallelDataAccessSync[((path.hashCode() << 1) >>> 1) % 100]) { if (appendWriteDataBuf.containsKey(fh)) { Map appendData = (Map) appendWriteDataBuf.get(fh); ByteArrayOutputStream bBuf = (ByteArrayOutputStream) appendData.get("buf"); long bOffset = ((Long) appendData.get("offset")).longValue(); if ((bOffset + bBuf.size()) == offset) { byte[] tmpBuf = new byte[buf.limit()]; buf.get(tmpBuf); bBuf.write(tmpBuf); // ?????????????????? if (bBuf.size() >= writeBufferSize) { // ??????? appendWriteDataBuf.remove(fh); String bPath = (String) appendData.get("path"); Object bFh = (Object) appendData.get("fh"); boolean bIsWritepage = ((Boolean) appendData.get("isWritepage")).booleanValue(); int ret = this.realWrite(bPath, bFh, bIsWritepage, bBuf, bOffset); return ret; } else { return 0; } } else { // offset????????????? appendWriteDataBuf.remove(fh); String bPath = (String) appendData.get("path"); Object bFh = (Object) appendData.get("fh"); boolean bIsWritepage = ((Boolean) appendData.get("isWritepage")).booleanValue(); int realWriteRet = this.realWrite(bPath, bFh, bIsWritepage, bBuf, bOffset); if (realWriteRet == 0) { int retI = this.realWrite(path, fh, isWritepage, buf, offset); return retI; } else { return realWriteRet; } } } else { Map appendData = new HashMap(); appendData.put("path", path); appendData.put("fh", fh); appendData.put("isWritepage", isWritepage); ByteArrayOutputStream baos = new ByteArrayOutputStream(1024 * 1024 * 10 + 8192); byte[] tmpByte = new byte[buf.limit()]; buf.get(tmpByte); baos.write(tmpByte); appendData.put("buf", baos); appendData.put("offset", offset); this.appendWriteDataBuf.put(fh, appendData); this.writeBufFpMap.addGroupingData(path, fh); return 0; } } } catch (Exception e) { throw new FuseException(e); } finally { } }
From source file:com.healthmarketscience.jackcess.Table.java
/** * Returns a name read from the buffer at the current position. The * expected name format is the name length followed by the name * encoded using the {@link JetFormat#CHARSET} *///from w w w.jav a2s . c o m private String readName(ByteBuffer buffer) { int nameLength = readNameLength(buffer); byte[] nameBytes = new byte[nameLength]; buffer.get(nameBytes); return Column.decodeUncompressedText(nameBytes, getDatabase().getCharset()); }
From source file:com.healthmarketscience.jackcess.Database.java
/** * Reads a table with the given name from the given pageNumber. *///from ww w . j av a2s .c o m private Table readTable(String name, int pageNumber, int flags, boolean useBigIndex) throws IOException { // first, check for existing table Table table = _tableCache.get(pageNumber); if (table != null) { return table; } ByteBuffer buffer = takeSharedBuffer(); try { // need to load table from db _pageChannel.readPage(buffer, pageNumber); byte pageType = buffer.get(0); if (pageType != PageTypes.TABLE_DEF) { throw new IOException( "Looking for " + name + " at page " + pageNumber + ", but page type is " + pageType); } return _tableCache.put(new Table(this, buffer, pageNumber, name, flags, useBigIndex)); } finally { releaseSharedBuffer(buffer); } }
From source file:com.healthmarketscience.jackcess.Column.java
/** * Read a column definition in from a buffer * @param table owning table//from www . ja v a 2 s . co m * @param buffer Buffer containing column definition * @param offset Offset in the buffer at which the column definition starts * @usage _advanced_method_ */ public Column(Table table, ByteBuffer buffer, int offset, int displayIndex) throws IOException { _table = table; _displayIndex = displayIndex; if (LOG.isDebugEnabled()) { LOG.debug("Column def block:\n" + ByteUtil.toHexString(buffer, offset, 25)); } byte colType = buffer.get(offset + getFormat().OFFSET_COLUMN_TYPE); _columnNumber = buffer.getShort(offset + getFormat().OFFSET_COLUMN_NUMBER); _columnLength = buffer.getShort(offset + getFormat().OFFSET_COLUMN_LENGTH); byte flags = buffer.get(offset + getFormat().OFFSET_COLUMN_FLAGS); _variableLength = ((flags & FIXED_LEN_FLAG_MASK) == 0); _autoNumber = ((flags & (AUTO_NUMBER_FLAG_MASK | AUTO_NUMBER_GUID_FLAG_MASK)) != 0); try { _type = DataType.fromByte(colType); } catch (IOException e) { LOG.warn("Unsupported column type " + colType); _type = (_variableLength ? DataType.UNSUPPORTED_VARLEN : DataType.UNSUPPORTED_FIXEDLEN); setUnknownDataType(colType); } if (_type.getHasScalePrecision()) { modifyNumericInfo(); _numericInfo._precision = buffer.get(offset + getFormat().OFFSET_COLUMN_PRECISION); _numericInfo._scale = buffer.get(offset + getFormat().OFFSET_COLUMN_SCALE); } else if (_type.isTextual()) { modifyTextInfo(); // co-located w/ precision/scale _textInfo._sortOrder = readSortOrder(buffer, offset + getFormat().OFFSET_COLUMN_SORT_ORDER, getFormat()); int cpOffset = getFormat().OFFSET_COLUMN_CODE_PAGE; if (cpOffset >= 0) { _textInfo._codePage = buffer.getShort(offset + cpOffset); } _textInfo._compressedUnicode = ((buffer.get(offset + getFormat().OFFSET_COLUMN_COMPRESSED_UNICODE) & 1) == 1); } setAutoNumberGenerator(); if (_variableLength) { _varLenTableIndex = buffer.getShort(offset + getFormat().OFFSET_COLUMN_VARIABLE_TABLE_INDEX); } else { _fixedDataOffset = buffer.getShort(offset + getFormat().OFFSET_COLUMN_FIXED_DATA_OFFSET); } // load complex info if (_type == DataType.COMPLEX_TYPE) { _complexInfo = ComplexColumnInfo.create(this, buffer, offset); } }