Example usage for android.media AudioFormat CHANNEL_OUT_MONO

List of usage examples for android.media AudioFormat CHANNEL_OUT_MONO

Introduction

In this page you can find the example usage for android.media AudioFormat CHANNEL_OUT_MONO.

Prototype

int CHANNEL_OUT_MONO

To view the source code for android.media AudioFormat CHANNEL_OUT_MONO.

Click Source Link

Usage

From source file:com.brejza.matt.habmodem.Dsp_service.java

public void startAudio() {
    if (!_enableDecoder)
        return;//from   ww w .  ja  v a  2  s  .  c  o m

    boolean mic = this.getPackageManager().hasSystemFeature(PackageManager.FEATURE_MICROPHONE);

    System.out.println("isRecording: " + isRecording);
    logEvent("Starting Audio. Mic avaliable: " + mic, false);
    if (!isRecording) {
        isRecording = true;

        buffsize = AudioRecord.getMinBufferSize(8000, AudioFormat.CHANNEL_IN_MONO,
                AudioFormat.ENCODING_PCM_16BIT);
        buffsize = Math.max(buffsize, 3000);

        mRecorder = new AudioRecord(AudioSource.MIC, 8000, AudioFormat.CHANNEL_IN_MONO,
                AudioFormat.ENCODING_PCM_16BIT, buffsize);

        mPlayer = new AudioTrack(AudioManager.STREAM_MUSIC, 8000, AudioFormat.CHANNEL_OUT_MONO,
                AudioFormat.ENCODING_PCM_16BIT, 2 * buffsize, AudioTrack.MODE_STREAM);

        if (enableEcho) {
            AudioManager manager = (AudioManager) getSystemService(Context.AUDIO_SERVICE);
            manager.setMode(AudioManager.MODE_IN_CALL);
            manager.setSpeakerphoneOn(true);
        }

        if (mRecorder.getState() != AudioRecord.STATE_INITIALIZED) {

            mRecorder = new AudioRecord(AudioSource.DEFAULT, 8000, AudioFormat.CHANNEL_IN_MONO,
                    AudioFormat.ENCODING_PCM_16BIT, buffsize);

            if (mRecorder.getState() != AudioRecord.STATE_INITIALIZED) {
                logEvent("Error - Could not initialise audio", true);
                return;
            }
            logEvent("Using default audio source", false);
        }

        mRecorder.startRecording();
        System.out.println("STARTING THREAD");
        Thread ct = new captureThread();
        logEvent("Starting Audio Thread.", false);
        setDecoderRunningNotification();
        ct.start();
    }
}

From source file:com.cypress.cysmart.RDKEmulatorView.RemoteControlEmulatorFragment.java

@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) {
    /**/*from  www .  j  av  a  2s .c o m*/
     * Getting the current orientation of the screen
     * Loading different view for LandScape and portrait
     */
    int currentOrientation = getResources().getConfiguration().orientation;
    if (currentOrientation == Configuration.ORIENTATION_LANDSCAPE) {
        mParentView = inflater.inflate(R.layout.rdk_emulator_view_landscape, container, false);
    } else {
        mParentView = inflater.inflate(R.layout.rdk_emulator_view_portrait, container, false);
    }
    mProgressDialog = new ProgressDialog(getActivity());
    /**
     * Getting the ID's of all Emulator view UI elements
     */
    Button mTrackpadView = (Button) mParentView.findViewById(R.id.trackpad_btn);
    Button mMicrophoneView = (Button) mParentView.findViewById(R.id.microphone_btn);
    mVolumePlusbtn = (ImageButton) mParentView.findViewById(R.id.volume_plus_btn);
    mVolumeMinusBtn = (ImageButton) mParentView.findViewById(R.id.volume_minus_btn);
    mChannelPlusBtn = (ImageButton) mParentView.findViewById(R.id.channel_plus_btn);
    mChannelMinusBtn = (ImageButton) mParentView.findViewById(R.id.channel_minus_btn);
    mLeftBtn = (ImageButton) mParentView.findViewById(R.id.left_btn);
    mRightBtn = (ImageButton) mParentView.findViewById(R.id.right_btn);
    mBackBtn = (ImageButton) mParentView.findViewById(R.id.back_btn);
    mGesturebtn = (ImageButton) mParentView.findViewById(R.id.gesture_btn);
    mExitBtn = (ImageButton) mParentView.findViewById(R.id.exit_btn);
    mPowerBtn = (ImageButton) mParentView.findViewById(R.id.power_btn);
    mRecBtn = (ImageButton) mParentView.findViewById(R.id.record_btn);
    mRecBtn = (ImageButton) mParentView.findViewById(R.id.record_btn);
    /**
     * AudioTrack class initialisation as follows
     *  streamType- AudioManager.STREAM_MUSIC,
     *  sampleRateInHz- 16000,
     *  channelConfig- AudioFormat.CHANNEL_OUT_MONO,
     *  audioFormat-AudioFormat.ENCODING_PCM_16BIT,
     *  bufferSizeInBytes-8000,
     *  mode- AudioTrack.MODE_STREAM
     *
     */
    mAudioTrack = new AudioTrack(AudioManager.STREAM_MUSIC, SAMPLE_RATE, AudioFormat.CHANNEL_OUT_MONO,
            AudioFormat.ENCODING_PCM_16BIT, BUFFER_SIZE, AudioTrack.MODE_STREAM);
    /**
     * TrackPAd button click listner
     */
    mTrackpadView.setOnClickListener(new View.OnClickListener() {
        @Override
        public void onClick(View view) {
            TrackpadEmulatorFragment trackpadService = new TrackpadEmulatorFragment();
            try {
                displayView(trackpadService);
            } catch (Exception e) {
                e.printStackTrace();
            }

        }
    });
    /**
     * Microphone Button click listner
     */
    mMicrophoneView.setOnClickListener(new View.OnClickListener() {
        @Override
        public void onClick(View view) {
            MicrophoneEmulatorFragment microphoneService = new MicrophoneEmulatorFragment();
            microphoneService.create(mservice);
            displayView(microphoneService);
        }
    });
    return mParentView;
}

From source file:com.ferdi2005.secondgram.voip.VoIPService.java

@Override
public void onCreate() {
    super.onCreate();
    FileLog.d("=============== VoIPService STARTING ===============");
    AudioManager am = (AudioManager) getSystemService(AUDIO_SERVICE);
    if (android.os.Build.VERSION.SDK_INT >= android.os.Build.VERSION_CODES.JELLY_BEAN_MR1
            && am.getProperty(AudioManager.PROPERTY_OUTPUT_FRAMES_PER_BUFFER) != null) {
        int outFramesPerBuffer = Integer
                .parseInt(am.getProperty(AudioManager.PROPERTY_OUTPUT_FRAMES_PER_BUFFER));
        VoIPController.setNativeBufferSize(outFramesPerBuffer);
    } else {// w w w.  j a  va  2s.  c o m
        VoIPController.setNativeBufferSize(
                AudioTrack.getMinBufferSize(48000, AudioFormat.CHANNEL_OUT_MONO, AudioFormat.ENCODING_PCM_16BIT)
                        / 2);
    }
    final SharedPreferences preferences = getSharedPreferences("mainconfig", MODE_PRIVATE);
    VoIPServerConfig.setConfig(preferences.getString("voip_server_config", "{}"));
    if (System.currentTimeMillis() - preferences.getLong("voip_server_config_updated", 0) > 24 * 3600000) {
        ConnectionsManager.getInstance().sendRequest(new TLRPC.TL_phone_getCallConfig(), new RequestDelegate() {
            @Override
            public void run(TLObject response, TLRPC.TL_error error) {
                if (error == null) {
                    String data = ((TLRPC.TL_dataJSON) response).data;
                    VoIPServerConfig.setConfig(data);
                    preferences.edit().putString("voip_server_config", data)
                            .putLong("voip_server_config_updated",
                                    BuildConfig.DEBUG ? 0 : System.currentTimeMillis())
                            .apply();
                }
            }
        });
    }
    try {
        controller = new VoIPController();
        controller.setConnectionStateListener(this);
        controller.setConfig(MessagesController.getInstance().callPacketTimeout / 1000.0,
                MessagesController.getInstance().callConnectTimeout / 1000.0,
                preferences.getInt("VoipDataSaving", VoIPController.DATA_SAVING_NEVER));

        cpuWakelock = ((PowerManager) getSystemService(POWER_SERVICE))
                .newWakeLock(PowerManager.PARTIAL_WAKE_LOCK, "telegram-voip");
        cpuWakelock.acquire();

        btAdapter = am.isBluetoothScoAvailableOffCall() ? BluetoothAdapter.getDefaultAdapter() : null;

        IntentFilter filter = new IntentFilter();
        filter.addAction(ConnectivityManager.CONNECTIVITY_ACTION);
        filter.addAction(ACTION_HEADSET_PLUG);
        if (btAdapter != null) {
            filter.addAction(BluetoothHeadset.ACTION_CONNECTION_STATE_CHANGED);
            filter.addAction(AudioManager.ACTION_SCO_AUDIO_STATE_UPDATED);
        }
        filter.addAction(TelephonyManager.ACTION_PHONE_STATE_CHANGED);
        filter.addAction(getPackageName() + ".END_CALL");
        filter.addAction(getPackageName() + ".DECLINE_CALL");
        filter.addAction(getPackageName() + ".ANSWER_CALL");
        registerReceiver(receiver, filter);

        ConnectionsManager.getInstance().setAppPaused(false, false);

        soundPool = new SoundPool(1, AudioManager.STREAM_VOICE_CALL, 0);
        spConnectingId = soundPool.load(this, R.raw.voip_connecting, 1);
        spRingbackID = soundPool.load(this, R.raw.voip_ringback, 1);
        spFailedID = soundPool.load(this, R.raw.voip_failed, 1);
        spEndId = soundPool.load(this, R.raw.voip_end, 1);
        spBusyId = soundPool.load(this, R.raw.voip_busy, 1);

        am.registerMediaButtonEventReceiver(new ComponentName(this, VoIPMediaButtonReceiver.class));

        if (btAdapter != null && btAdapter.isEnabled()) {
            int headsetState = btAdapter.getProfileConnectionState(BluetoothProfile.HEADSET);
            updateBluetoothHeadsetState(headsetState == BluetoothProfile.STATE_CONNECTED);
            if (headsetState == BluetoothProfile.STATE_CONNECTED)
                am.setBluetoothScoOn(true);
            for (StateListener l : stateListeners)
                l.onAudioSettingsChanged();
        }

        NotificationCenter.getInstance().addObserver(this, NotificationCenter.appDidLogout);
    } catch (Exception x) {
        FileLog.e("error initializing voip controller", x);
        callFailed();
    }
}

From source file:org.noise_planet.noisecapture.CalibrationLinearityActivity.java

private void playNewTrack() {

    double rms = dbToRms(99 - (splLoop++) * DB_STEP);
    short[] data = makeWhiteNoiseSignal(44100, rms);
    double[] fftCenterFreq = FFTSignalProcessing
            .computeFFTCenterFrequency(AudioProcess.REALTIME_SAMPLE_RATE_LIMITATION);
    FFTSignalProcessing fftSignalProcessing = new FFTSignalProcessing(44100, fftCenterFreq, 44100);
    fftSignalProcessing.addSample(data);
    whiteNoisedB = fftSignalProcessing.computeGlobalLeq();
    freqLeqStats.add(new LinearCalibrationResult(fftSignalProcessing.processSample(true, false, false)));
    LOGGER.info("Emit white noise of " + whiteNoisedB + " dB");
    if (audioTrack == null) {
        audioTrack = new AudioTrack(getAudioOutput(), 44100, AudioFormat.CHANNEL_OUT_MONO,
                AudioFormat.ENCODING_PCM_16BIT, data.length * (Short.SIZE / 8), AudioTrack.MODE_STATIC);
    } else {//from   w w w  .  ja v  a 2s .  c o  m
        try {
            audioTrack.pause();
            audioTrack.flush();
        } catch (IllegalStateException ex) {
            // Ignore
        }
    }
    audioTrack.setLoopPoints(0, audioTrack.write(data, 0, data.length), -1);
    audioTrack.play();
}