List of usage examples for java.nio ByteBuffer clear
public final Buffer clear()
From source file:com.clustercontrol.agent.job.PublicKeyThread.java
/** * ?Authorized_key????<BR>//from w w w . ja v a 2 s . com * * @param publicKey * @return true : ?false: */ private synchronized boolean deleteKey(String publicKey) { m_log.debug("delete key start"); if (SKIP_KEYFILE_UPDATE) { m_log.info("skipped deleting publicKey"); return true; } Charset charset = Charset.forName("UTF-8"); CharsetEncoder encoder = charset.newEncoder(); CharsetDecoder decoder = charset.newDecoder(); //??? String fileName = AgentProperties.getProperty(execUser.toLowerCase() + AUTHORIZED_KEY_PATH); if (fileName == null || fileName.length() == 0) return false; //File? File fi = new File(fileName); RandomAccessFile randomAccessFile = null; FileChannel channel = null; FileLock lock = null; boolean delete = false; try { //RandomAccessFile? randomAccessFile = new RandomAccessFile(fi, "rw"); //FileChannel? channel = randomAccessFile.getChannel(); // for (int i = 0; i < (FILELOCK_TIMEOUT / FILELOCK_WAIT); i++) { if (null != (lock = channel.tryLock())) { break; } m_log.info("waiting for locked file... [" + (i + 1) + "/" + (FILELOCK_TIMEOUT / FILELOCK_WAIT) + " : " + fileName + "]"); Thread.sleep(FILELOCK_WAIT); } if (null == lock) { m_log.warn("file locking timeout."); return false; } // (?) synchronized (authKeyLock) { //?? ByteBuffer buffer = ByteBuffer.allocate((int) channel.size()); //?? channel.read(buffer); // ???????????0? buffer.flip(); //?? String contents = decoder.decode(buffer).toString(); // ? m_log.debug("contents " + contents.length() + " : " + contents); //?? List<String> keyCheck = new ArrayList<String>(); StringTokenizer tokenizer = new StringTokenizer(contents, "\n"); while (tokenizer.hasMoreTokens()) { keyCheck.add(tokenizer.nextToken()); } //?????? int s = keyCheck.lastIndexOf(publicKey); if (s != -1) { // ? m_log.debug("remobe key : " + keyCheck.get(s)); keyCheck.remove(s); } //????? encoder.reset(); buffer.clear(); int i; if (keyCheck.size() > 0) { for (i = 0; i < keyCheck.size() - 1; i++) { encoder.encode(CharBuffer.wrap(keyCheck.get(i) + "\n"), buffer, false); } encoder.encode(CharBuffer.wrap(keyCheck.get(i)), buffer, true); } //??? buffer.flip(); channel.truncate(0); channel.position(0); channel.write(buffer); } delete = true; } catch (IOException e) { m_log.error(e.getMessage(), e); } catch (RuntimeException e) { m_log.error(e.getMessage(), e); } catch (InterruptedException e) { m_log.error(e.getMessage(), e); } finally { try { if (channel != null) { channel.close(); } if (randomAccessFile != null) { randomAccessFile.close(); } //? if (lock != null) { lock.release(); } } catch (Exception e) { } } return delete; }
From source file:com.koda.integ.hbase.storage.FileExtStorage.java
@Override public void flush() throws IOException { //TODO this method flashes only internal buffer //and does not touch internal flusher queue LOG.info("Flushing internal buffer to the storage"); long start = System.currentTimeMillis(); writeLock.writeLock().lock();//from ww w. j a v a2 s .co m try { ByteBuffer buf = activeBuffer.get(); if (bufferOffset.get() == 0) { // skip flush LOG.info("Skipping flush"); return; } if (buf != null) { if (buf.position() != 0) buf.flip(); while (buf.hasRemaining()) { currentForWrite.getChannel().write(buf); } buf.clear(); bufferOffset.set(0); // we advance to next file; } else { LOG.warn("Active buffer is NULL"); } } catch (Exception e) { LOG.error(e); } finally { writeLock.writeLock().unlock(); // Close file currentForWrite.close(); } LOG.info("Flushing completed in " + (System.currentTimeMillis() - start) + "ms"); }
From source file:org.opendaylight.lispflowmapping.lisp.serializer.MapRegisterSerializer.java
public ByteBuffer serialize(MapRegister mapRegister) { int size = Length.HEADER_SIZE; if (mapRegister.getAuthenticationData() != null) { size += mapRegister.getAuthenticationData().length; }//w w w. j a va 2 s . co m if (mapRegister.isXtrSiteIdPresent() != null && mapRegister.isXtrSiteIdPresent()) { size += Length.XTRID_SIZE + Length.SITEID_SIZE; } for (MappingRecordItem eidToLocatorRecord : mapRegister.getMappingRecordItem()) { size += MappingRecordSerializer.getInstance() .getSerializationSize(eidToLocatorRecord.getMappingRecord()); } ByteBuffer registerBuffer = ByteBuffer.allocate(size); registerBuffer.put((byte) ((byte) (MessageType.MapRegister.getIntValue() << 4) | ByteUtil.boolToBit(BooleanUtils.isTrue(mapRegister.isProxyMapReply()), Flags.PROXY) | ByteUtil.boolToBit(BooleanUtils.isTrue(mapRegister.isXtrSiteIdPresent()), Flags.XTRSITEID))); registerBuffer.position(registerBuffer.position() + Length.RES); registerBuffer.put((byte) (ByteUtil.boolToBit(BooleanUtils.isTrue(mapRegister.isMergeEnabled()), Flags.MERGE_ENABLED) | ByteUtil.boolToBit(BooleanUtils.isTrue(mapRegister.isWantMapNotify()), Flags.WANT_MAP_NOTIFY))); registerBuffer.put((byte) mapRegister.getMappingRecordItem().size()); registerBuffer.putLong(NumberUtil.asLong(mapRegister.getNonce())); registerBuffer.putShort(NumberUtil.asShort(mapRegister.getKeyId())); if (mapRegister.getAuthenticationData() != null) { registerBuffer.putShort((short) mapRegister.getAuthenticationData().length); registerBuffer.put(mapRegister.getAuthenticationData()); } else { registerBuffer.putShort((short) 0); } for (MappingRecordItem eidToLocatorRecord : mapRegister.getMappingRecordItem()) { MappingRecordSerializer.getInstance().serialize(registerBuffer, eidToLocatorRecord.getMappingRecord()); } if (mapRegister.isXtrSiteIdPresent() != null && mapRegister.isXtrSiteIdPresent()) { registerBuffer.put(mapRegister.getXtrId().getValue()); registerBuffer.put(mapRegister.getSiteId().getValue()); } registerBuffer.clear(); return registerBuffer; }
From source file:com.musicplayer.AudioDecoderThread.java
/** * After decoding AAC, Play using Audio Track. * /*from www . j a v a 2 s. c o m*/ */ public void processTrack(Uri syncContentUri, final Genre classLabel, Context context, ProcessTrackRunnable lock) { // INITIALISE EXTRACTOR AND DECODER Log.v("", "Break Point 1"); MediaExtractor extractor = new MediaExtractor(); int sampleRate = 0; Uri contentUri = null; synchronized (lock) { contentUri = syncContentUri; } try { extractor.setDataSource(context, contentUri, null); } catch (IOException e) { e.printStackTrace(); } int channel = 0; for (int i = 0; i < extractor.getTrackCount(); i++) { MediaFormat format = extractor.getTrackFormat(i); String mime = format.getString(MediaFormat.KEY_MIME); if (mime.startsWith("audio/")) { extractor.selectTrack(i); Log.d("", "format : " + format); // ByteBuffer csd = format.getByteBuffer("csd-0"); // if(csd == null){ // Log.v("", "csd is null"); // } else{ // Log.v("", "csd is not null"); // } // for (int k = 0; k < csd.capacity(); ++k) { // Log.v("", "inside for loop 1"); // Log.e("TAG", "csd : " + csd.array()[k]); // } sampleRate = format.getInteger(MediaFormat.KEY_SAMPLE_RATE); channel = format.getInteger(MediaFormat.KEY_CHANNEL_COUNT); break; } } // MediaFormat format = makeAACCodecSpecificData(MediaCodecInfo.CodecProfileLevel.AACObjectLC, mSampleRate, channel); // if (format == null) // return; int countt = 0; boolean found = false; MediaFormat format = null; String mime = null; while (countt < extractor.getTrackCount() && !found) { format = extractor.getTrackFormat(countt); mime = format.getString(MediaFormat.KEY_MIME); sampleRate = format.getInteger(MediaFormat.KEY_SAMPLE_RATE); if (mime.startsWith("audio/")) { found = true; } countt++; } //format = mExtractor.getTrackFormat(count); //MediaCodecInfo codec = selectCodec(mime); //String name = codec.getName(); MediaCodec decoder = MediaCodec.createDecoderByType(mime); //mDecoder = MediaCodec.createDecoderByType("audio/mp4a-latm"); decoder.configure(format, null, null, 0); if (decoder == null) { Log.e("DecodeActivity", "Can't find video info!"); return; } decoder.start(); Log.v("", "Break Point 2"); // Get decoded bytes ByteBuffer[] inputBuffers = decoder.getInputBuffers(); ByteBuffer[] outputBuffers = decoder.getOutputBuffers(); BufferInfo info = new BufferInfo(); // int buffsize = AudioTrack.getMinBufferSize(sampleRate, AudioFormat.CHANNEL_OUT_STEREO, AudioFormat.ENCODING_PCM_16BIT); // // create an audiotrack object // AudioTrack audioTrack = new AudioTrack(AudioManager.STREAM_MUSIC, sampleRate, // AudioFormat.CHANNEL_OUT_STEREO, // AudioFormat.ENCODING_PCM_16BIT, // buffsize, // AudioTrack.MODE_STREAM); // audioTrack.play(); extractor.seekTo(WINDOW_START, MediaExtractor.SEEK_TO_CLOSEST_SYNC); long start = SystemClock.elapsedRealtimeNanos(); Log.v("", "Break Point 3"); // MUSICAL SURFACE FEATURES double[] flux = new double[NUM_CHUNKS]; double[] zeroCrossings = new double[NUM_CHUNKS]; double[] centroid = new double[NUM_CHUNKS]; int[] rolloff = new int[NUM_CHUNKS]; double[] rolloffFreq = new double[NUM_CHUNKS]; double lowEnergy = 0.0; // Means across all chunks double fluxMean = 0.0; double zeroCrossingsMean = 0; double centroidMean = 0.0; double rolloffMean = 0; // Standard deviations across all chunks double fluxStdDeviation = 0.0; double zeroCrossingsStdDeviation = 0; double centroidStdDeviation = 0.0; double rolloffStdDeviation = 0; // Initialise some variables to use while iterating double[] fftSums = new double[NUM_CHUNKS]; int iter = 0; int count = 0; FastFourierTransformer transformer = new FastFourierTransformer(DftNormalization.STANDARD); double po2 = 0.0; Complex[] input = null; Complex[] output = null; Complex[] previousOutput = null; Complex[] temp = null; double frequency = 0.0; double centroidNum = 0.0; double centroidDen = 0.0; double fftValue = 0.0; double fftPrevious = 0.0; double fluxSquared = 0.0; int r = 0; boolean foundRolloff = false; double sum = 0; ArrayList<Double> data = new ArrayList<Double>(); ArrayList<Double> currentChunk = new ArrayList<Double>(); int gap = 0; int tempCount = 0; byte[] chunk = null; ArrayList<Double> outputExample = new ArrayList<Double>(); double normConst = 0.0; // Iterate through the chunks Log.v("", "count: " + String.valueOf(count)); while (!eosReceived && count < NUM_CHUNKS) { Log.v("", "Break Point " + String.valueOf(count + 4)); Log.v("", "Inside While Loop Break Point 1"); if (count == 0) { // Log.v("", "Timestamp of chunk 0: " + String.valueOf(extractor.getSampleTime())); } int inIndex = decoder.dequeueInputBuffer(TIMEOUT_US); if (inIndex >= 0) { ByteBuffer buffer = inputBuffers[inIndex]; int sampleSize = extractor.readSampleData(buffer, 0); if (sampleSize < 0) { // We shouldn't stop the playback at this point, just pass the EOS // flag to mDecoder, we will get it again from the // dequeueOutputBuffer //Log.d("DecodeActivity", "InputBuffer BUFFER_FLAG_END_OF_STREAM"); decoder.queueInputBuffer(inIndex, 0, 0, 0, MediaCodec.BUFFER_FLAG_END_OF_STREAM); } else { decoder.queueInputBuffer(inIndex, 0, sampleSize, extractor.getSampleTime(), 0); extractor.advance(); } int outIndex = decoder.dequeueOutputBuffer(info, TIMEOUT_US); Log.v("", "Inside While Loop Break Point 2"); switch (outIndex) { case MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED: Log.d("DecodeActivity", "INFO_OUTPUT_BUFFERS_CHANGED"); outputBuffers = decoder.getOutputBuffers(); break; case MediaCodec.INFO_OUTPUT_FORMAT_CHANGED: MediaFormat mediaFormat = decoder.getOutputFormat(); Log.d("DecodeActivity", "New format " + mediaFormat); // audioTrack.setPlaybackRate(mediaFormat.getInteger(MediaFormat.KEY_SAMPLE_RATE)); break; case MediaCodec.INFO_TRY_AGAIN_LATER: Log.d("DecodeActivity", "dequeueOutputBuffer timed out!"); break; default: Log.v("", "Inside While Loop Break Point 3"); ByteBuffer outBuffer = outputBuffers[outIndex]; //Log.v("DecodeActivity", "We can't use this buffer but render it due to the API limit, " + outBuffer); chunk = new byte[info.size]; if (chunk.length == 0) { continue; } outBuffer.get(chunk); // Read the buffer all at once outBuffer.clear(); // ** MUST DO!!! OTHERWISE THE NEXT TIME YOU GET THIS SAME BUFFER BAD THINGS WILL HAPPEN gap = chunk.length / DOWN_FACTOR; currentChunk.clear(); Log.v("", "Inside While Loop Break Point 4a"); // ZERO CROSSINGS int increment = 1; if (chunk.length > 1000) { increment = (int) ((double) chunk.length / ((double) 1000)); } // Downsampling for (int i = 0; i < chunk.length; i = i + increment) { data.add((double) chunk[i]); currentChunk.add((double) chunk[i]); tempCount++; if (currentChunk.size() > 1) { iter += FastMath.abs(sign(currentChunk.get(currentChunk.size() - 1)) - sign(currentChunk.get(currentChunk.size() - 2))); } } increment = 0; tempCount = 0; zeroCrossings[count] = 0.5 * iter; po2 = FastMath.ceil(FastMath.log(currentChunk.size()) / FastMath.log(2)); input = new Complex[(int) (FastMath.pow(2.0, po2))]; Log.v("", "chunk length: " + chunk.length); Log.v("", "input length: " + input.length); for (int i = 0; i < input.length; i++) { if (i < currentChunk.size()) { input[i] = new Complex((double) currentChunk.get(i)); } else { input[i] = new Complex(0.0); } } // FFT output = transformer.transform(input, TransformType.FORWARD); outputExample.add(centroidDen); // CENTROID AND FLUX for (int i = 0; i < output.length; i++) { if (count > 0) { fftPrevious = fftValue; } fftValue = FastMath.hypot(output[i].getReal(), output[i].getImaginary()); fluxSquared += (fftValue - fftPrevious) * (fftValue - fftPrevious); centroidNum += i * fftValue; centroidDen += fftValue; } // for(int i = 0; i < output.length; i++){ // // normConst += FastMath.hypot(output[i].getReal(), output[i].getImaginary()) * // FastMath.hypot(output[i].getReal(), output[i].getImaginary()); // // // } // fluxSquared = fluxSquared / normConst; flux[count] = FastMath.sqrt(fluxSquared) / 1000.0; // ROLLOFF while (!foundRolloff && r < output.length - 1) { r++; sum += FastMath.hypot(output[r].getReal(), output[r].getImaginary()); foundRolloff = checkRolloff(ROLLOFF_PROPORTIONAL_ERROR, sum, centroidDen); } fftSums[count] = centroidDen; if (centroidDen != 0.0) { centroid[count] = centroidNum / centroidDen; } else { centroid[count] = 0.0; } rolloff[count] = r; iter = 0; fluxSquared = 0.0; centroidNum = 0.0; centroidDen = 0.0; r = 0; sum = 0.0; foundRolloff = false; count++; //audioTrack.write(chunk, info.offset, info.offset + info.size); // AudioTrack write data decoder.releaseOutputBuffer(outIndex, false); break; } // All decoded frames have been rendered, we can stop playing now if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) { Log.d("DecodeActivity", "OutputBuffer BUFFER_FLAG_END_OF_STREAM"); break; } if (count > 0) { previousOutput = output; output = null; } } if (count == NUM_CHUNKS) { // Log.v("", "Timestamp of last chunk: " + String.valueOf(extractor.getSampleTime())); decoder.stop(); decoder.release(); extractor.release(); } } // while loop currentChunk.clear(); currentChunk = null; // for(int i = 0; i < centroid.length; i++){ // Log.v("", "centroid: " + String.valueOf(centroid[i])); // } double energySum = 0.0; double energyAverage = 0.0; int lowEnergyCount = 0; for (int i = 0; i < NUM_CHUNKS; i++) { energySum += fftSums[i]; } energyAverage = energySum / NUM_CHUNKS; for (int i = 0; i < NUM_CHUNKS; i++) { if (fftSums[i] < energyAverage) { lowEnergyCount++; } } lowEnergy = 100.0 * (((double) lowEnergyCount) / ((double) NUM_CHUNKS)); // Work out the means and standard deviations for (int i = 0; i < NUM_CHUNKS; i++) { fluxMean += flux[i]; zeroCrossingsMean += zeroCrossings[i]; centroidMean += centroid[i]; rolloffMean += rolloff[i]; } fluxMean = fluxMean / flux.length; zeroCrossingsMean = zeroCrossingsMean / zeroCrossings.length; centroidMean = centroidMean / centroid.length; rolloffMean = rolloffMean / rolloff.length; for (int i = 0; i < NUM_CHUNKS; i++) { fluxStdDeviation += (flux[i] - fluxMean) * (flux[i] - fluxMean); zeroCrossingsStdDeviation += (zeroCrossings[i] - zeroCrossingsMean) * (zeroCrossings[i] - zeroCrossingsMean); centroidStdDeviation += (centroid[i] - centroidMean) * (centroid[i] - centroidMean); rolloffStdDeviation += (rolloff[i] - rolloffMean) * (rolloff[i] - rolloffMean); } fluxStdDeviation = Math.sqrt(fluxStdDeviation / flux.length); zeroCrossingsStdDeviation = Math.sqrt(zeroCrossingsStdDeviation / zeroCrossings.length); centroidStdDeviation = Math.sqrt(centroidStdDeviation / centroid.length); rolloffStdDeviation = Math.sqrt(rolloffStdDeviation / rolloff.length); Log.v("", "fluxMean: " + String.valueOf(fluxMean)); Log.v("", "zeroCrossingsMean: " + String.valueOf(zeroCrossingsMean)); Log.v("", "centroidMean: " + String.valueOf(centroidMean)); Log.v("", "rolloffMean: " + String.valueOf(rolloffMean)); Log.v("", "fluxStdDeviation: " + String.valueOf(fluxStdDeviation)); Log.v("", "zeroCrossingsStdDeviation: " + String.valueOf(zeroCrossingsStdDeviation)); Log.v("", "centroidStdDeviation: " + String.valueOf(centroidStdDeviation)); Log.v("", "rolloffStdDeviation: " + String.valueOf(rolloffStdDeviation)); Log.v("", "lowEnergy: " + String.valueOf(lowEnergy)); Log.v("", "data size: " + String.valueOf(data.size())); // BEAT ANALYSIS Transform t = new Transform(new FastWaveletTransform(new Daubechies4())); double[] dataArray = new double[data.size()]; for (int i = 0; i < data.size(); i++) { dataArray[i] = data.get(i); } data.clear(); data = null; double powerOf2 = FastMath.ceil(FastMath.log(chunk.length) / FastMath.log(2)); double[] dataArrayPo2 = Arrays.copyOf(dataArray, (int) (FastMath.pow(2.0, powerOf2))); dataArray = null; double[] dataCurrentInputArray = null; double[] dataCurrentOutputArray = null; double[] dataCumulativeArray = new double[dataArrayPo2.length]; for (int i = 0; i < dataCumulativeArray.length; i++) { dataCumulativeArray[i] = 0.0; } double temp1 = 0.0; double temp2 = 0.0; ArrayList<Double> tempList = new ArrayList<Double>(); int k = 16; // Downsampling factor int tempCount1 = 0; double mean = 0.0; for (int level = 0; level < (int) FastMath.log(2.0, dataArrayPo2.length); level++) { dataCurrentInputArray = t.forward(dataArrayPo2, level); dataCurrentOutputArray = dataCurrentInputArray; dataCurrentOutputArray[0] = 0.0; for (int i = 1; i < dataCurrentOutputArray.length; i++) { temp1 = FastMath.abs(dataCurrentInputArray[i]); // Full-wave rectification dataCurrentOutputArray[i] = (1.0 - ALPHA) * temp1 - ALPHA * dataCurrentOutputArray[i - 1]; // Low-pass filtering } tempCount1 = 0; mean = 0.0; while (k * tempCount1 < dataCurrentOutputArray.length) { tempList.add(dataCurrentOutputArray[k * tempCount1]); // Downsampling by k mean += dataCurrentOutputArray[k * tempCount1]; tempCount1++; } mean = mean / dataCurrentOutputArray.length; tempCount1 = 0; while (k * tempCount1 < dataCurrentOutputArray.length) { dataCumulativeArray[k * tempCount1] += tempList.get(tempCount1) - mean; // Mean removal tempCount1++; } } int N = dataCumulativeArray.length; ArrayList<Double> dataList = new ArrayList<Double>(); double dataElement = 0.0; for (int i = 0; i < N; i++) { if (dataCumulativeArray[i] != 0.0) { dataElement = autocorrelate(i, N, dataCumulativeArray); dataList.add(dataElement); Log.v("", "dataList: " + String.valueOf(dataElement)); } } PeakDetector peakDetector = new PeakDetector(dataList); int[] peakIndices = peakDetector.process(5, 2); HashSet<Integer> hs = new HashSet<Integer>(); for (int i = 0; i < peakIndices.length; i++) { hs.add(peakIndices[i]); } ArrayList<Integer> indicesList = new ArrayList<Integer>(); ArrayList<Double> valuesList = new ArrayList<Double>(); indicesList.addAll(hs); Double tempDoub = 0.0; HashMap<Double, Integer> hm = new HashMap<Double, Integer>(); for (int i = 0; i < indicesList.size(); i++) { tempDoub = dataList.get(indicesList.get(i)); hm.put(tempDoub, indicesList.get(i)); } indicesList.clear(); valuesList.clear(); Entry<Double, Integer> tempEntry = null; Iterator<Entry<Double, Integer>> it = hm.entrySet().iterator(); while (it.hasNext()) { tempEntry = (Entry<Double, Integer>) it.next(); if (tempEntry.getValue() < 75) { it.remove(); } else { //indicesList.add(tempEntry.getValue()); valuesList.add(tempEntry.getKey()); } } Collections.sort(valuesList); for (int i = 0; i < valuesList.size(); i++) { indicesList.add(hm.get(valuesList.get(i))); } double valuesSum = 0.0; double histogramSum = 0.0; double beatStrength = 0.0; double P1 = 0.0; double P2 = 0.0; double A1 = 0.0; double A2 = 0.0; double RA = 0.0; for (int i = 0; i < dataList.size(); i++) { histogramSum += dataList.get(i); } for (int i = 0; i < valuesList.size(); i++) { valuesSum += valuesList.get(i); } // if(histogramSum != 0.0 && valuesList.size() != 0){ // SUM = (1000.0 * valuesSum) / (histogramSum * valuesList.size()); // } if (valuesList.size() != 0) { beatStrength = valuesSum / valuesList.size(); } if (indicesList.size() > 0) { // Set P1 as the largest peak P1 = (double) indicesList.get(indicesList.size() - 1); } if (indicesList.size() > 1) { int beatCount = indicesList.size() - 2; boolean beatFound = false; // Start with P2 as the second largest peak P2 = (double) indicesList.get(indicesList.size() - 2); double diff = 0; // Iterate backwards through the peaks, largest to smallest while (!beatFound && beatCount > -1) { diff = ((double) indicesList.get(beatCount)) - P1; if (FastMath.abs(diff) / P1 > 0.3) { // Set P2 as the period of the first peak that is reasonably different from P1 P2 = (double) indicesList.get(beatCount); beatFound = true; } beatCount--; } } if (indicesList.size() > 0) { A1 = FastMath.abs(dataList.get((int) P1)) / histogramSum; if (P2 != 0.0) { A2 = FastMath.abs(dataList.get((int) P2)) / histogramSum; } if (A1 != 0.0) { RA = A2 / A1; } } for (int i = 0; i < valuesList.size(); i++) { Log.v("", String.valueOf(i) + ") valuesList: " + String.valueOf(valuesList.get(i))); } Log.v("", "P1: " + String.valueOf(P1)); Log.v("", "P2: " + String.valueOf(P2)); Log.v("", "A1: " + String.valueOf(A1)); Log.v("", "A2: " + String.valueOf(A2)); Log.v("", "RA: " + String.valueOf(RA)); Log.v("", "SUM: " + String.valueOf(histogramSum)); Log.v("", "Number of Peaks: " + String.valueOf(valuesList.size())); double[] result = { fluxMean, zeroCrossingsMean, centroidMean, rolloffMean, fluxStdDeviation, zeroCrossingsStdDeviation, centroidStdDeviation, rolloffStdDeviation, lowEnergy, P1, P2, A1, A2, RA, histogramSum, valuesList.size() }; final DenseInstance denseInstance = new DenseInstance(result); if (P1 + P2 + A1 + A2 + RA != 0.0) { Handler handler = new Handler(Looper.getMainLooper()); handler.post(new ReturnResultsRunnable(lock, mAudioCallback, denseInstance, classLabel)); } else { Log.v("", "Track could not be classified!"); } // for(int i = 0; i < dataList.size(); i++){ // Log.v("", String.valueOf(i) + ") autocorrelation: " + String.valueOf(dataList.get(i))); // histogramSum += dataList.get(i); // } // Log.v("", "indicesList size: " + String.valueOf(indicesList.size())); // for(int i = 0; i < valuesList.size(); i++){ // Log.v("", "indicesList: " + String.valueOf(indicesList.get(i)) + ", value: " + String.valueOf(valuesList.get(i))); // valuesSum += valuesList.get(i); // } //Classifier c = new KNearestNeighbors(5); // double A0 = valuesList.get(valuesList.size() - 1) / valuesSum; // double A1 = valuesList.get(valuesList.size() - 2) / valuesSum; // double RA = A1 / A0; // double P0 = 1 / ((double) indicesList.get(indicesList.size() - 1)); // double P1 = 1 / ((double) indicesList.get(indicesList.size() - 2)); // // Log.v("", "A0: " + String.valueOf(A0)); // Log.v("", "A1: " + String.valueOf(A1)); // Log.v("", "RA: " + String.valueOf(RA)); // Log.v("", "P0: " + String.valueOf(P0)); // Log.v("", "P1: " + String.valueOf(P1)); // Log.v("", "SUM: " + String.valueOf(histogramSum)); long durationUs = SystemClock.elapsedRealtimeNanos() - start; double durationSecs = ((double) durationUs) / 1000000000.0; Log.v("", "count = " + String.valueOf(count) + ", Sample rate: " + String.valueOf(sampleRate) + ", Duration: " + String.valueOf(durationSecs)); // audioTrack.stop(); // audioTrack.release(); // audioTrack = null; }
From source file:org.apache.nifi.processors.standard.TailFile.java
/** * Read new lines from the given FileChannel, copying it to the given Output * Stream. The Checksum is used in order to later determine whether or not * data has been consumed.//from www .j ava2 s.c o m * * @param reader The FileChannel to read data from * @param buffer the buffer to use for copying data * @param out the OutputStream to copy the data to * @param checksum the Checksum object to use in order to calculate checksum * for recovery purposes * * @return The new position after the lines have been read * @throws java.io.IOException if an I/O error occurs. */ private long readLines(final FileChannel reader, final ByteBuffer buffer, final OutputStream out, final Checksum checksum) throws IOException { getLogger().debug("Reading lines starting at position {}", new Object[] { reader.position() }); try (final ByteArrayOutputStream baos = new ByteArrayOutputStream()) { long pos = reader.position(); long rePos = pos; // position to re-read int num; int linesRead = 0; boolean seenCR = false; buffer.clear(); while (((num = reader.read(buffer)) != -1)) { buffer.flip(); for (int i = 0; i < num; i++) { byte ch = buffer.get(i); switch (ch) { case '\n': { baos.write(ch); seenCR = false; baos.writeTo(out); final byte[] baosBuffer = baos.toByteArray(); checksum.update(baosBuffer, 0, baos.size()); if (getLogger().isTraceEnabled()) { getLogger().trace("Checksum updated to {}", new Object[] { checksum.getValue() }); } baos.reset(); rePos = pos + i + 1; linesRead++; break; } case '\r': { baos.write(ch); seenCR = true; break; } default: { if (seenCR) { seenCR = false; baos.writeTo(out); final byte[] baosBuffer = baos.toByteArray(); checksum.update(baosBuffer, 0, baos.size()); if (getLogger().isTraceEnabled()) { getLogger().trace("Checksum updated to {}", new Object[] { checksum.getValue() }); } linesRead++; baos.reset(); baos.write(ch); rePos = pos + i; } else { baos.write(ch); } } } } pos = reader.position(); } if (rePos < reader.position()) { getLogger().debug("Read {} lines; repositioning reader from {} to {}", new Object[] { linesRead, pos, rePos }); reader.position(rePos); // Ensure we can re-read if necessary } return rePos; } }
From source file:com.healthmarketscience.jackcess.impl.TableImpl.java
/** * Writes a new table defined by the given TableCreator to the database. * @usage _advanced_method_//from ww w .j ava 2 s.co m */ protected static void writeTableDefinition(TableCreator creator) throws IOException { // first, create the usage map page createUsageMapDefinitionBuffer(creator); // next, determine how big the table def will be (in case it will be more // than one page) JetFormat format = creator.getFormat(); int idxDataLen = (creator.getIndexCount() * (format.SIZE_INDEX_DEFINITION + format.SIZE_INDEX_COLUMN_BLOCK)) + (creator.getLogicalIndexCount() * format.SIZE_INDEX_INFO_BLOCK); int colUmapLen = creator.getLongValueColumns().size() * 10; int totalTableDefSize = format.SIZE_TDEF_HEADER + (format.SIZE_COLUMN_DEF_BLOCK * creator.getColumns().size()) + idxDataLen + colUmapLen + format.SIZE_TDEF_TRAILER; // total up the amount of space used by the column and index names (2 // bytes per char + 2 bytes for the length) for (ColumnBuilder col : creator.getColumns()) { int nameByteLen = (col.getName().length() * JetFormat.TEXT_FIELD_UNIT_SIZE); totalTableDefSize += nameByteLen + 2; } for (IndexBuilder idx : creator.getIndexes()) { int nameByteLen = (idx.getName().length() * JetFormat.TEXT_FIELD_UNIT_SIZE); totalTableDefSize += nameByteLen + 2; } // now, create the table definition PageChannel pageChannel = creator.getPageChannel(); ByteBuffer buffer = PageChannel.createBuffer(Math.max(totalTableDefSize, format.PAGE_SIZE)); writeTableDefinitionHeader(creator, buffer, totalTableDefSize); if (creator.hasIndexes()) { // index row counts IndexData.writeRowCountDefinitions(creator, buffer); } // column definitions ColumnImpl.writeDefinitions(creator, buffer); if (creator.hasIndexes()) { // index and index data definitions IndexData.writeDefinitions(creator, buffer); IndexImpl.writeDefinitions(creator, buffer); } // write long value column usage map references for (ColumnBuilder lvalCol : creator.getLongValueColumns()) { buffer.putShort(lvalCol.getColumnNumber()); TableCreator.ColumnState colState = creator.getColumnState(lvalCol); // owned pages umap (both are on same page) buffer.put(colState.getUmapOwnedRowNumber()); ByteUtil.put3ByteInt(buffer, colState.getUmapPageNumber()); // free space pages umap buffer.put(colState.getUmapFreeRowNumber()); ByteUtil.put3ByteInt(buffer, colState.getUmapPageNumber()); } //End of tabledef buffer.put((byte) 0xff); buffer.put((byte) 0xff); // write table buffer to database if (totalTableDefSize <= format.PAGE_SIZE) { // easy case, fits on one page buffer.putShort(format.OFFSET_FREE_SPACE, (short) (buffer.remaining() - 8)); // overwrite page free space // Write the tdef page to disk. pageChannel.writePage(buffer, creator.getTdefPageNumber()); } else { // need to split across multiple pages ByteBuffer partialTdef = pageChannel.createPageBuffer(); buffer.rewind(); int nextTdefPageNumber = PageChannel.INVALID_PAGE_NUMBER; while (buffer.hasRemaining()) { // reset for next write partialTdef.clear(); if (nextTdefPageNumber == PageChannel.INVALID_PAGE_NUMBER) { // this is the first page. note, the first page already has the // page header, so no need to write it here nextTdefPageNumber = creator.getTdefPageNumber(); } else { // write page header writeTablePageHeader(partialTdef); } // copy the next page of tdef bytes int curTdefPageNumber = nextTdefPageNumber; int writeLen = Math.min(partialTdef.remaining(), buffer.remaining()); partialTdef.put(buffer.array(), buffer.position(), writeLen); ByteUtil.forward(buffer, writeLen); if (buffer.hasRemaining()) { // need a next page nextTdefPageNumber = pageChannel.allocateNewPage(); partialTdef.putInt(format.OFFSET_NEXT_TABLE_DEF_PAGE, nextTdefPageNumber); } // update page free space partialTdef.putShort(format.OFFSET_FREE_SPACE, (short) (partialTdef.remaining() - 8)); // overwrite page free space // write partial page to disk pageChannel.writePage(partialTdef, curTdefPageNumber); } } }
From source file:automenta.knowtention.channel.LineFileChannel.java
@Override public void run() { FileInputStream fileInputStream = null; FileChannel channel = null;// ww w . j a va2 s . c o m ByteBuffer buffer = null; LinkedList<String> lines = new LinkedList(); StringBuilder builder = new StringBuilder(); long lastSize = -1, lastLastModified = -1; while (running) { try { Thread.sleep(delayPeriodMS); } catch (InterruptedException ex) { } lines.clear(); try { fileInputStream = new FileInputStream(file); channel = fileInputStream.getChannel(); long lastModified = file.lastModified(); long csize = channel.size(); if ((lastModified == lastLastModified) && (csize == lastSize)) { //also check file update time? fileInputStream.close(); continue; } int currentPos = (int) csize; buffer = channel.map(FileChannel.MapMode.READ_ONLY, 0, csize); buffer.position(currentPos); lastSize = csize; lastLastModified = lastModified; int count = 0; for (long i = csize - 1; i >= 0; i--) { char c = (char) buffer.get((int) i); if (c == '\n') { count++; builder.reverse(); lines.addFirst(builder.toString()); if (count == numLines) { break; } builder.setLength(0); } else builder.append(c); } update(lines); lines.clear(); buffer.clear(); channel.close(); fileInputStream.close(); fileInputStream = null; } catch (Exception ex) { Logger.getLogger(LineFileChannel.class.getName()).log(Level.SEVERE, null, ex); } } try { channel.close(); } catch (IOException ex) { Logger.getLogger(LineFileChannel.class.getName()).log(Level.SEVERE, null, ex); } }
From source file:org.sglover.checksum.ChecksumServiceImpl.java
@Override public NodeChecksums getChecksums(final Node node, final InputStream in) { final String nodeId = node.getNodeId(); final Long nodeVersion = node.getNodeVersion(); final Long nodeInternalId = node.getNodeInternalId(); final String versionLabel = node.getVersionLabel(); int x = 0;/*from ww w. j av a2 s.c om*/ NodeChecksums documentChecksums = new NodeChecksums(nodeId, nodeInternalId, nodeVersion, versionLabel, blockSize); try (ReadableByteChannel fc = getChannel(in)) { ByteBuffer data = ByteBuffer.allocate(blockSize * 20); int bytesRead = -1; int blockNum = 1; // starts at 1 do { bytesRead = fc.read(data); if (bytesRead > 0) { x += bytesRead; data.flip(); long numBlocks = data.limit() / blockSize + (data.limit() % blockSize > 0 ? 1 : 0); // spin through the data and create checksums for each block for (int i = 0; i < numBlocks; i++) { int start = i * blockSize; int end = start + blockSize - 1; if (end >= data.limit()) { end = data.limit() - 1; } // calculate the adler32 checksum Adler32 adlerInfo = new Adler32(data, start, end, hasher); // calculate the full md5 checksum String md5sum = hasher.md5(data, start, end); Checksum checksum = new Checksum(blockNum, start, end, adlerInfo.getHash(), adlerInfo.getAdler32(), md5sum); if (blockNum < 2) { System.out.println(checksum); } documentChecksums.addChecksum(checksum); blockNum++; } data.clear(); } } while (bytesRead > 0); } catch (NoSuchAlgorithmException | IOException e) { throw new RuntimeException(e); } return documentChecksums; }
From source file:edu.hawaii.soest.kilonalu.ctd.SBE37Source.java
/** * A method that executes the streaming of data from the source to the RBNB * server after all configuration of settings, connections to hosts, and * thread initiatizing occurs. This method contains the detailed code for * streaming the data and interpreting the stream. *//*from w w w. j a v a 2 s .co m*/ protected boolean execute() { logger.debug("SBE37Source.execute() called."); // do not execute the stream if there is no connection if (!isConnected()) return false; boolean failed = false; // while data are being sent, read them into the buffer try { this.socketChannel = getSocketConnection(); // create four byte placeholders used to evaluate up to a four-byte // window. The FIFO layout looks like: // ------------------------- // in ---> | One | Two |Three|Four | ---> out // ------------------------- byte byteOne = 0x00, // set initial placeholder values byteTwo = 0x00, byteThree = 0x00, byteFour = 0x00; // Create a buffer that will store the sample bytes as they are read ByteBuffer sampleBuffer = ByteBuffer.allocate(getBufferSize()); // create a byte buffer to store bytes from the TCP stream ByteBuffer buffer = ByteBuffer.allocateDirect(getBufferSize()); // create a character string to store characters from the TCP stream StringBuilder responseString = new StringBuilder(); // add a channel of data that will be pushed to the server. // Each sample will be sent to the Data Turbine as an rbnb frame. ChannelMap rbnbChannelMap = new ChannelMap(); int channelIndex = rbnbChannelMap.Add(getRBNBChannelName()); // wake the instrument with an initial take sample command this.command = this.commandPrefix + getInstrumentID() + "TS" + this.commandSuffix; this.sentCommand = queryInstrument(this.command); // verify the instrument ID is correct while (getInstrumentID() == null) { // allow time for the instrument response streamingThread.sleep(2000); buffer.clear(); // send the command and update the sentCommand status this.sentCommand = queryInstrument(this.command); // read the response into the buffer. Note that the streamed bytes // are 8-bit, not 16-bit Unicode characters. Use the US-ASCII // encoding instead. while (this.socketChannel.read(buffer) != -1 || buffer.position() > 0) { buffer.flip(); while (buffer.hasRemaining()) { String nextCharacter = new String(new byte[] { buffer.get() }, "US-ASCII"); responseString.append(nextCharacter); } // look for the command line ending if (responseString.toString().indexOf("S>") > 0) { // parse the ID from the idCommand response int idStartIndex = responseString.indexOf("=") + 2; int idStopIndex = responseString.indexOf("=") + 4; String idString = responseString.substring(idStartIndex, idStopIndex); // test that the ID is a valid number and set the instrument ID if ((new Integer(idString)).intValue() > 0) { setInstrumentID(idString); buffer.clear(); logger.debug("Instrument ID is " + getInstrumentID() + "."); break; } else { logger.debug("Instrument ID \"" + idString + "\" was not set."); } } else { break; } buffer.compact(); if (getInstrumentID() != null) { break; } } } // instrumentID is set // allow time for the instrument response streamingThread.sleep(5000); this.command = this.commandPrefix + getInstrumentID() + this.takeSampleCommand + this.commandSuffix; this.sentCommand = queryInstrument(command); // while there are bytes to read from the socket ... while (this.socketChannel.read(buffer) != -1 || buffer.position() > 0) { // prepare the buffer for reading buffer.flip(); // while there are unread bytes in the ByteBuffer while (buffer.hasRemaining()) { byteOne = buffer.get(); //logger.debug("b1: " + new String(Hex.encodeHex((new byte[]{byteOne}))) + "\t" + // "b2: " + new String(Hex.encodeHex((new byte[]{byteTwo}))) + "\t" + // "b3: " + new String(Hex.encodeHex((new byte[]{byteThree}))) + "\t" + // "b4: " + new String(Hex.encodeHex((new byte[]{byteFour}))) + "\t" + // "sample pos: " + sampleBuffer.position() + "\t" + // "sample rem: " + sampleBuffer.remaining() + "\t" + // "sample cnt: " + sampleByteCount + "\t" + // "buffer pos: " + buffer.position() + "\t" + // "buffer rem: " + buffer.remaining() + "\t" + // "state: " + state //); // Use a State Machine to process the byte stream. // Start building an rbnb frame for the entire sample, first by // inserting a timestamp into the channelMap. This time is merely // the time of insert into the data turbine, not the time of // observations of the measurements. That time should be parsed out // of the sample in the Sink client code switch (state) { case 0: // sample line is begun by S> // note bytes are in reverse order in the FIFO window if (byteOne == 0x3E && byteTwo == 0x53) { // we've found the beginning of a sample, move on state = 1; break; } else { break; } case 1: // read the rest of the bytes to the next EOL characters // sample line is terminated by S> // note bytes are in reverse order in the FIFO window if (byteOne == 0x3E && byteTwo == 0x53) { sampleByteCount++; // add the last byte found to the count // add the last byte found to the sample buffer if (sampleBuffer.remaining() > 0) { sampleBuffer.put(byteOne); } else { sampleBuffer.compact(); sampleBuffer.put(byteOne); } // extract just the length of the sample bytes (less 2 bytes // to exclude the 'S>' prompt characters) out of the // sample buffer, and place it in the channel map as a // byte array. Then, send it to the data turbine. byte[] sampleArray = new byte[sampleByteCount - 2]; sampleBuffer.flip(); sampleBuffer.get(sampleArray); // send the sample to the data turbine rbnbChannelMap.PutTimeAuto("server"); String sampleString = new String(sampleArray, "US-ASCII"); rbnbChannelMap.PutMime(channelIndex, "text/plain"); rbnbChannelMap.PutDataAsString(channelIndex, sampleString); getSource().Flush(rbnbChannelMap); logger.info("Sample: " + sampleString); logger.info("flushed data to the DataTurbine. "); byteOne = 0x00; byteTwo = 0x00; byteThree = 0x00; byteFour = 0x00; sampleBuffer.clear(); sampleByteCount = 0; //rbnbChannelMap.Clear(); //logger.debug("Cleared b1,b2,b3,b4. Cleared sampleBuffer. Cleared rbnbChannelMap."); //state = 0; // Once the sample is flushed, take a new sample if (getInstrumentID() != null) { // allow time for the instrument response streamingThread.sleep(2000); this.command = this.commandPrefix + getInstrumentID() + this.takeSampleCommand + this.commandSuffix; this.sentCommand = queryInstrument(command); } } else { // not 0x0A0D // still in the middle of the sample, keep adding bytes sampleByteCount++; // add each byte found if (sampleBuffer.remaining() > 0) { sampleBuffer.put(byteOne); } else { sampleBuffer.compact(); logger.debug("Compacting sampleBuffer ..."); sampleBuffer.put(byteOne); } break; } // end if for 0x0A0D EOL } // end switch statement // shift the bytes in the FIFO window byteFour = byteThree; byteThree = byteTwo; byteTwo = byteOne; } //end while (more unread bytes) // prepare the buffer to read in more bytes from the stream buffer.compact(); } // end while (more socket bytes to read) this.socketChannel.close(); } catch (IOException e) { // handle exceptions // In the event of an i/o exception, log the exception, and allow execute() // to return false, which will prompt a retry. failed = true; e.printStackTrace(); return !failed; } catch (SAPIException sapie) { // In the event of an RBNB communication exception, log the exception, // and allow execute() to return false, which will prompt a retry. failed = true; sapie.printStackTrace(); return !failed; } catch (java.lang.InterruptedException ie) { ie.printStackTrace(); } return !failed; }
From source file:com.koda.integ.hbase.storage.FileExtStorage.java
@Override public StorageHandle getData(StorageHandle storeHandle, ByteBuffer buf) { FileStorageHandle fsh = (FileStorageHandle) storeHandle; // Check if current file and offset > currentFileOffset int id = maxId.get(); if (fsh.getId() > id || (fsh.getId() == id && fsh.getOffset() >= currentFileOffset.get())) { // not found buf.putInt(0, 0);/*from ww w . ja va2 s .co m*/ return fsh; } RandomAccessFile file = getFile(fsh.getId());//openFile(fsh.getId(), "r"); boolean needSecondChance = needSecondChance(fsh.getId()); try { if (file == null) { // return null buf.putInt(0, 0); } else { buf.clear(); int toRead = fsh.getSize(); buf.putInt(fsh.getSize()); buf.limit(4 + toRead); try { FileChannel fc = file.getChannel(); int total = 0; int c = 0; // offset start with overall object length .add +4 int off = fsh.getOffset() + 4; while (total < toRead) { c = fc.read(buf, off); off += c; if (c < 0) { // return not found buf.putInt(0, 0); break; } total += c; } } catch (IOException e) { // return not found if (fsh.getId() > minId.get()) { e.printStackTrace(); } buf.putInt(0, 0); } } if (buf.getInt(0) != 0 && needSecondChance) { // store again fsh = (FileStorageHandle) storeData(buf); } return fsh; } finally { if (file != null) { // return file back // PUT we need for old version putFile(fsh.getId(), file); } } }