Example usage for android.app Activity getApplicationContext

List of usage examples for android.app Activity getApplicationContext

Introduction

In this page you can find the example usage for android.app Activity getApplicationContext.

Prototype

@Override
    public Context getApplicationContext() 

Source Link

Usage

From source file:com.microsoft.windowsazure.mobileservices.MobileServiceClient.java

/**
 * Invokes Microsoft Azure Mobile Service authentication using a the Google
 * account registered in the device/* w ww .  j ava2s  .  c  o m*/
 *
 * @param activity The activity that triggered the authentication
 * @param account  The account used for the login operation
 * @param scopes   The scopes used as authentication token type for login
 */
public ListenableFuture<MobileServiceUser> loginWithGoogleAccount(Activity activity, Account account,
        String scopes) {
    final SettableFuture<MobileServiceUser> future = SettableFuture.create();

    try {
        if (account == null) {
            throw new IllegalArgumentException("account");
        }

        final MobileServiceClient client = this;

        AccountManagerCallback<Bundle> authCallback = new AccountManagerCallback<Bundle>() {

            @Override
            public void run(AccountManagerFuture<Bundle> futureBundle) {
                try {
                    if (futureBundle.isCancelled()) {
                        future.setException(new MobileServiceException("User cancelled"));
                        // callback.onCompleted(null, new
                        // MobileServiceException("User cancelled"), null);
                    } else {
                        Bundle bundle = futureBundle.getResult();

                        String token = (String) (bundle.get(AccountManager.KEY_AUTHTOKEN));

                        JsonObject json = new JsonObject();
                        json.addProperty("access_token", token);

                        ListenableFuture<MobileServiceUser> loginFuture = client
                                .login(MobileServiceAuthenticationProvider.Google, json);

                        Futures.addCallback(loginFuture, new FutureCallback<MobileServiceUser>() {
                            @Override
                            public void onFailure(Throwable e) {
                                future.setException(e);
                            }

                            @Override
                            public void onSuccess(MobileServiceUser user) {
                                future.set(user);
                            }
                        });
                    }
                } catch (Exception e) {
                    future.setException(e);
                }
            }
        };

        AccountManager acMgr = AccountManager.get(activity.getApplicationContext());
        acMgr.getAuthToken(account, scopes, null, activity, authCallback, null);

    } catch (Exception e) {
        future.setException(e);
    }

    return future;
}

From source file:com.rnd.snapsplit.view.OcrCaptureFragment.java

/**
 * Initializes the UI and creates the detector pipeline.
 *///from   w  w  w  .j av a  2 s  .  c  o  m
//    @Override
//    public void onActivityResult(int requestCode, int resultCode, Intent data) {
//        super.onActivityResult(requestCode, resultCode, data);
//
//        if (requestCode == TAKE_PHOTO_CODE && resultCode == RESULT_OK) {
//            Toast.makeText(getContext(), "pic saved", Toast.LENGTH_LONG).show();
//            Log.d("CameraDemo", "Pic saved");
//        }
//    }

@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) {

    final View view = inflater.inflate(R.layout.view_ocr_capture, container, false);
    final Activity activity = getActivity();
    final Context context = getContext();

    ((Toolbar) activity.findViewById(R.id.tool_bar_hamburger))
            .setBackgroundColor(ContextCompat.getColor(context, android.R.color.transparent));
    final String dir = Environment.getExternalStoragePublicDirectory(Environment.DIRECTORY_PICTURES)
            + "/picFolder/";
    File newdir = new File(dir);
    newdir.mkdirs();

    mPreview = (CameraSourcePreview) view.findViewById(R.id.preview);
    mGraphicOverlay = (GraphicOverlay<OcrGraphic>) view.findViewById(R.id.graphicOverlay);

    StrictMode.VmPolicy.Builder builder = new StrictMode.VmPolicy.Builder();
    StrictMode.setVmPolicy(builder.build());

    // Set good defaults for capturing text.
    boolean autoFocus = true;
    boolean useFlash = false;

    //        createNewThread();
    //        t.start();

    final ImageView upArrow = (ImageView) view.findViewById(R.id.arrow_up);
    upArrow.setOnClickListener(new View.OnClickListener() {
        @Override
        public void onClick(View v) {
            if (rotationAngle == 0) { // arrow up
                //mCameraSource.takePicture(null, mPicture);
                //mGraphicOverlay.clear();
                //                    mGraphicOverlay.clear();
                //                    mGraphicOverlay.amountItem = null;
                onPause();
                //shouldContinue = false;
                //mCamera.takePicture(null, null, mPicture);

                File pictureFile = getOutputMediaFile();
                if (pictureFile == null) {
                    return;
                }
                try {
                    FileOutputStream fos = new FileOutputStream(pictureFile);
                    Bitmap receiptBitmap = byteStreamToBitmap(mCameraSource.mostRecentBitmap);
                    receiptBitmap.compress(Bitmap.CompressFormat.JPEG, 80, fos);
                    picPath = pictureFile.getAbsolutePath();
                    //fos.write(mCameraSource.mostRecentBitmap);
                    fos.close();
                } catch (FileNotFoundException e) {

                } catch (IOException e) {
                }

                upArrow.animate().rotation(180).setDuration(500).start();

                TextView amount = (TextView) view.findViewById(R.id.text_amount_value);
                if (mGraphicOverlay.amountItem == null) {
                    amount.setText("0.00");
                } else {
                    amount.setText(String.format("%.2f", mGraphicOverlay.amountItemAfterFormat));
                }
                TextView desc = (TextView) view.findViewById(R.id.text_name_value);
                desc.setText(mGraphicOverlay.description);

                RelativeLayout box = (RelativeLayout) view.findViewById(R.id.recognition_box);
                box.setVisibility(View.VISIBLE);
                Animation slide_up = AnimationUtils.loadAnimation(activity.getApplicationContext(),
                        R.anim.slide_up);

                box.startAnimation(slide_up);
                rotationAngle = 180;

            } else {
                //                    t.interrupt();
                //                    t = null;
                RelativeLayout box = (RelativeLayout) view.findViewById(R.id.recognition_box);
                Animation slide_down = AnimationUtils.loadAnimation(activity.getApplicationContext(),
                        R.anim.slide_down);

                upArrow.animate().rotation(0).setDuration(500).start();

                box.startAnimation(slide_down);
                box.setVisibility(View.INVISIBLE);
                //shouldContinue = true;
                mGraphicOverlay.amountItem = null;
                mGraphicOverlay.amountItemAfterFormat = 0f;
                mGraphicOverlay.description = "";
                onResume();
                //                    createNewThread();
                //                    t.start();
                rotationAngle = 0;
            }
        }
    });

    ImageView addButton = (ImageView) view.findViewById(R.id.add_icon);
    addButton.setOnClickListener(new View.OnClickListener() {
        @Override
        public void onClick(View v) {
            // takePicture();
            EditText description = (EditText) view.findViewById(R.id.text_name_value);
            EditText amount = (EditText) view.findViewById(R.id.text_amount_value);
            float floatAmount = Float.parseFloat(amount.getText().toString());
            Summary t = new Summary(description.getText().toString(), floatAmount);

            Bundle bundle = new Bundle();
            bundle.putSerializable("splitTransaction", t);

            //                 ByteArrayOutputStream stream = new ByteArrayOutputStream();
            //                 mCameraSource.mostRecentBitmap.compress(Bitmap.CompressFormat.PNG, 80, stream);
            //                 byte[] byteArray = stream.toByteArray();
            //Bitmap receiptBitmap = byteStreamToBitmap(mCameraSource.mostRecentBitmap);
            //bundle.putParcelable("receiptPicture",receiptBitmap);
            bundle.putString("receiptPicture", picPath);

            FriendsSelectionFragment fragment = new FriendsSelectionFragment();
            fragment.setArguments(bundle);

            ((Toolbar) activity.findViewById(R.id.tool_bar_hamburger)).setVisibility(View.INVISIBLE);
            getActivity().getSupportFragmentManager().beginTransaction()
                    .add(R.id.fragment_holder, fragment, "FriendsSelectionFragment").addToBackStack(null)
                    .commit();
        }
    });

    // Check for the camera permission before accessing the camera.  If the
    // permission is not granted yet, request permission.
    int rc = ActivityCompat.checkSelfPermission(context, Manifest.permission.CAMERA);
    if (rc == PackageManager.PERMISSION_GRANTED) {
        createCameraSource(autoFocus, useFlash);
    } else {
        requestCameraPermission();
    }

    gestureDetector = new GestureDetector(context, new CaptureGestureListener());
    scaleGestureDetector = new ScaleGestureDetector(context, new ScaleListener());

    //        Snackbar.make(mGraphicOverlay, "Tap to Speak. Pinch/Stretch to zoom",
    //                Snackbar.LENGTH_LONG)
    //                .show();

    // Set up the Text To Speech engine.
    TextToSpeech.OnInitListener listener = new TextToSpeech.OnInitListener() {
        @Override
        public void onInit(final int status) {
            if (status == TextToSpeech.SUCCESS) {
                Log.d("OnInitListener", "Text to speech engine started successfully.");
                tts.setLanguage(Locale.US);
            } else {
                Log.d("OnInitListener", "Error starting the text to speech engine.");
            }
        }
    };
    tts = new TextToSpeech(activity.getApplicationContext(), listener);

    return view;
}

From source file:com.android.mms.ui.MessageUtils.java

public static void viewMmsMessageAttachment(final Activity activity, final Uri msgUri,
        final SlideshowModel slideshow, final int requestCode, AsyncDialog asyncDialog) {
    /// M: Code analyze 002, For fix bug ALPS00112553, system-server JE
    // happens and MS reboot when tap play in MMS. @{
    final boolean isSimple = (slideshow == null) ? false : slideshow.isSimple();

    if (isSimple) {
        SlideModel slideTemp = slideshow.get(0);
        // In attachment-editor mode, we only ever have one slide.
        /// M: fix bug ALPS00393187, play in gellay when simple slide only has picture or video
        if (slideTemp != null && !slideTemp.hasAudio()
                && (!slideTemp.hasText() || slideTemp.getText().getText().length() == 0)) {
            MessageUtils.viewSimpleSlideshow(activity, slideshow);
            return;
        }/*from ww  w  .  ja  va 2s.  c  o m*/
    }
    /// @}
    // M: change feature ALPS01751464
    if (isSimple) {
        SlideModel slideOne = slideshow.get(0);
        if (slideOne != null && slideOne.hasAudio()) {
            MediaModel model = slideOne.getAudio();
            if (model != null && model.hasDrmContent()) {
                DrmUtilsEx.showDrmAlertDialog(activity);
                return;
            }
        }
    }

    // The user wants to view the slideshow. We have to persist the slideshow parts
    // in a background task. If the task takes longer than a half second, a progress dialog
    // is displayed. Once the PDU persisting is done, another runnable on the UI thread get
    // executed to start the SlideshowActivity.
    asyncDialog.runAsync(new Runnable() {
        @Override
        public void run() {
            // If a slideshow was provided, save it to disk first.
            if (slideshow != null) {
                PduPersister persister = PduPersister.getPduPersister(activity);
                try {
                    PduBody pb = slideshow.toPduBody();
                    MessageUtils.updatePartsIfNeeded(slideshow, persister, msgUri, pb, null);
                    //persister.updateParts(msgUri, pb, null);
                    slideshow.sync(pb);
                } catch (MmsException e) {
                    Log.e(TAG, "Unable to save message for preview");
                    return;
                }
                slide = slideshow.get(0);
            }
        }
    }, new Runnable() {
        @Override
        public void run() {
            // Launch the slideshow activity to play/view.
            Intent intent;
            if ((isSimple && slide.hasAudio()) || (requestCode == AttachmentEditor.MSG_PLAY_AUDIO)) {
                intent = new Intent(activity.getApplicationContext(), SlideshowActivity.class);
            } else {
                intent = new Intent(activity.getApplicationContext(), MmsPlayerActivity.class);
            }
            intent.setData(msgUri);
            if (requestCode > 0) {
                activity.startActivityForResult(intent, requestCode);
            } else {
                activity.startActivity(intent);
            }
            //                    // Once the above background thread is complete, this runnable is run
            //                    // on the UI thread to launch the slideshow activity.
            //                    launchSlideshowActivity(activity, msgUri, requestCode);

        }
    }, R.string.building_slideshow_title);

}