Example usage for android.content ContentProviderOperation newUpdate

List of usage examples for android.content ContentProviderOperation newUpdate

Introduction

In this page you can find the example usage for android.content ContentProviderOperation newUpdate.

Prototype

public static Builder newUpdate(Uri uri) 

Source Link

Document

Create a Builder suitable for building an update ContentProviderOperation .

Usage

From source file:at.bitfire.ical4android.AndroidTask.java

public void update(Task task) throws CalendarStorageException {
    this.task = task;

    BatchOperation batch = new BatchOperation(taskList.provider.client);
    Builder builder = ContentProviderOperation.newUpdate(taskList.syncAdapterURI(taskSyncURI()));
    buildTask(builder, true);/* w  w  w . j av a  2 s.  c  om*/
    batch.enqueue(builder.build());
    batch.commit();
}

From source file:com.granita.contacticloudsync.resource.LocalCalendar.java

@SuppressWarnings("Recycle")
public void processDirtyExceptions() throws CalendarStorageException {
    // process deleted exceptions
    Constants.log.info("Processing deleted exceptions");
    try {/*from w  w  w  . ja  va 2s. com*/
        @Cleanup
        Cursor cursor = provider.query(syncAdapterURI(Events.CONTENT_URI),
                new String[] { Events._ID, Events.ORIGINAL_ID, LocalEvent.COLUMN_SEQUENCE },
                Events.DELETED + "!=0 AND " + Events.ORIGINAL_ID + " IS NOT NULL", null, null);
        while (cursor != null && cursor.moveToNext()) {
            Constants.log.debug("Found deleted exception, removing; then re-schuling original event");
            long id = cursor.getLong(0), // can't be null (by definition)
                    originalID = cursor.getLong(1); // can't be null (by query)
            int sequence = cursor.isNull(2) ? 0 : cursor.getInt(2);

            // get original event's SEQUENCE
            @Cleanup
            Cursor cursor2 = provider.query(
                    syncAdapterURI(ContentUris.withAppendedId(Events.CONTENT_URI, originalID)),
                    new String[] { LocalEvent.COLUMN_SEQUENCE }, null, null, null);
            int originalSequence = cursor.isNull(0) ? 0 : cursor.getInt(0);

            BatchOperation batch = new BatchOperation(provider);
            // re-schedule original event and set it to DIRTY
            batch.enqueue(ContentProviderOperation
                    .newUpdate(syncAdapterURI(ContentUris.withAppendedId(Events.CONTENT_URI, originalID)))
                    .withValue(LocalEvent.COLUMN_SEQUENCE, originalSequence)
                    .withValue(Events.DIRTY, DIRTY_INCREASE_SEQUENCE).build());
            // remove exception
            batch.enqueue(ContentProviderOperation
                    .newDelete(syncAdapterURI(ContentUris.withAppendedId(Events.CONTENT_URI, id))).build());
            batch.commit();
        }
    } catch (RemoteException e) {
        throw new CalendarStorageException("Couldn't process locally modified exception", e);
    }

    // process dirty exceptions
    Constants.log.info("Processing dirty exceptions");
    try {
        @Cleanup
        Cursor cursor = provider.query(syncAdapterURI(Events.CONTENT_URI),
                new String[] { Events._ID, Events.ORIGINAL_ID, LocalEvent.COLUMN_SEQUENCE },
                Events.DIRTY + "!=0 AND " + Events.ORIGINAL_ID + " IS NOT NULL", null, null);
        while (cursor != null && cursor.moveToNext()) {
            Constants.log.debug("Found dirty exception, increasing SEQUENCE to re-schedule");
            long id = cursor.getLong(0), // can't be null (by definition)
                    originalID = cursor.getLong(1); // can't be null (by query)
            int sequence = cursor.isNull(2) ? 0 : cursor.getInt(2);

            BatchOperation batch = new BatchOperation(provider);
            // original event to DIRTY
            batch.enqueue(ContentProviderOperation
                    .newUpdate(syncAdapterURI(ContentUris.withAppendedId(Events.CONTENT_URI, originalID)))
                    .withValue(Events.DIRTY, DIRTY_DONT_INCREASE_SEQUENCE).build());
            // increase SEQUENCE and set DIRTY to 0
            batch.enqueue(ContentProviderOperation
                    .newUpdate(syncAdapterURI(ContentUris.withAppendedId(Events.CONTENT_URI, id)))
                    .withValue(LocalEvent.COLUMN_SEQUENCE, sequence + 1).withValue(Events.DIRTY, 0).build());
            batch.commit();
        }
    } catch (RemoteException e) {
        throw new CalendarStorageException("Couldn't process locally modified exception", e);
    }
}

From source file:org.exfio.csyncdroid.resource.LocalCalendar.java

public void setCTag(String cTag) {
    pendingOperations.add(ContentProviderOperation.newUpdate(ContentUris.withAppendedId(calendarsURI(), id))
            .withValue(COLLECTION_COLUMN_CTAG, cTag).build());
}

From source file:at.bitfire.davdroid.syncadapter.ContactsSyncManager.java

@Override
protected void prepareDirty() throws CalendarStorageException, ContactsStorageException {
    super.prepareDirty();

    LocalAddressBook addressBook = localAddressBook();

    if (groupMethod == GroupMethod.CATEGORIES) {
        /* groups memberships are represented as contact CATEGORIES */

        // groups with DELETED=1: set all members to dirty, then remove group
        for (LocalGroup group : addressBook.getDeletedGroups()) {
            App.log.fine("Finally removing group " + group);
            // useless because Android deletes group memberships as soon as a group is set to DELETED:
            // group.markMembersDirty();
            group.delete();//from w ww.  j  a va2  s . c o  m
        }

        // groups with DIRTY=1: mark all memberships as dirty, then clean DIRTY flag of group
        for (LocalGroup group : addressBook.getDirtyGroups()) {
            App.log.fine("Marking members of modified group " + group + " as dirty");
            group.markMembersDirty();
            group.clearDirty(null);
        }
    } else {
        /* groups as separate VCards: there are group contacts and individual contacts */

        // mark groups with changed members as dirty
        BatchOperation batch = new BatchOperation(addressBook.provider);
        for (LocalContact contact : addressBook.getDirtyContacts())
            try {
                App.log.fine("Looking for changed group memberships of contact " + contact.getFileName());
                Set<Long> cachedGroups = contact.getCachedGroupMemberships(),
                        currentGroups = contact.getGroupMemberships();
                for (Long groupID : SetUtils.disjunction(cachedGroups, currentGroups)) {
                    App.log.fine("Marking group as dirty: " + groupID);
                    batch.enqueue(new BatchOperation.Operation(ContentProviderOperation
                            .newUpdate(addressBook
                                    .syncAdapterURI(ContentUris.withAppendedId(Groups.CONTENT_URI, groupID)))
                            .withValue(Groups.DIRTY, 1).withYieldAllowed(true)));
                }
            } catch (FileNotFoundException ignored) {
            }
        batch.commit();
    }
}

From source file:org.voidsink.anewjkuapp.update.ImportExamTask.java

@Override
protected Void doInBackground(Void... params) {
    Log.d(TAG, "Start importing exams");

    synchronized (sync_lock) {
        final DateFormat df = DateFormat.getDateInstance();

        try {// w  w  w. j av a  2s .c o  m
            Log.d(TAG, "setup connection");

            updateNotify(mContext.getString(R.string.notification_sync_connect));

            if (KusssHandler.getInstance().isAvailable(mContext,
                    AppUtils.getAccountAuthToken(mContext, mAccount),
                    AppUtils.getAccountName(mContext, mAccount),
                    AppUtils.getAccountPassword(mContext, mAccount))) {

                updateNotify(mContext.getString(R.string.notification_sync_exam_loading));

                List<Exam> exams;
                if (PreferenceWrapper.getNewExamsByCourseId(mContext)) {
                    CourseMap courseMap = new CourseMap(mContext);
                    List<Term> terms = KusssContentProvider.getTerms(mContext);

                    Log.d(TAG, "load exams by courseId");
                    exams = KusssHandler.getInstance().getNewExamsByCourseId(mContext, courseMap.getCourses(),
                            terms);
                } else {
                    Log.d(TAG, "load exams");
                    exams = KusssHandler.getInstance().getNewExams(mContext);
                }
                if (exams == null) {
                    mSyncResult.stats.numParseExceptions++;
                } else {
                    Map<String, Exam> examMap = new HashMap<>();
                    for (Exam exam : exams) {
                        Exam old = examMap.put(
                                KusssHelper.getExamKey(exam.getCourseId(),
                                        AppUtils.termToString(exam.getTerm()), exam.getDtStart().getTime()),
                                exam);
                        if (old != null) {
                            Log.w(TAG, "exam alread loaded: " + KusssHelper.getExamKey(old.getCourseId(),
                                    AppUtils.termToString(old.getTerm()), old.getDtStart().getTime()));
                        }
                    }

                    Log.d(TAG, String.format("got %s exams", exams.size()));

                    updateNotify(mContext.getString(R.string.notification_sync_exam_updating));

                    ArrayList<ContentProviderOperation> batch = new ArrayList<>();

                    Uri examUri = KusssContentContract.Exam.CONTENT_URI;
                    Cursor c = mProvider.query(examUri, EXAM_PROJECTION, null, null, null);

                    if (c == null) {
                        Log.w(TAG, "selection failed");
                    } else {
                        Log.d(TAG, "Found " + c.getCount() + " local entries. Computing merge solution...");
                        int examId;
                        String examTerm;
                        String examCourseId;
                        long examDtStart;
                        long examDtEnd;
                        String examLocation;

                        while (c.moveToNext()) {
                            examId = c.getInt(COLUMN_EXAM_ID);
                            examTerm = c.getString(COLUMN_EXAM_TERM);
                            examCourseId = c.getString(COLUMN_EXAM_COURSEID);
                            examDtStart = c.getLong(COLUMN_EXAM_DTSTART);
                            examDtEnd = c.getLong(COLUMN_EXAM_DTEND);
                            examLocation = c.getString(COLUMN_EXAM_LOCATION);

                            Exam exam = examMap
                                    .remove(KusssHelper.getExamKey(examCourseId, examTerm, examDtStart));
                            if (exam != null) {
                                // Check to see if the entry needs to be
                                // updated
                                Uri existingUri = examUri.buildUpon().appendPath(Integer.toString(examId))
                                        .build();
                                Log.d(TAG, "Scheduling update: " + existingUri);

                                if (!DateUtils.isSameDay(new Date(examDtStart), exam.getDtStart())
                                        || !new Date(examDtEnd).equals(exam.getDtEnd())
                                        || !examLocation.equals(exam.getLocation())) {
                                    mNewExamNotification.addUpdate(getEventString(exam));
                                }

                                batch.add(ContentProviderOperation
                                        .newUpdate(KusssContentContract.asEventSyncAdapter(existingUri,
                                                mAccount.name, mAccount.type))
                                        .withValue(KusssContentContract.Exam.COL_ID, Integer.toString(examId))
                                        .withValues(KusssHelper.getExamContentValues(exam)).build());
                                mSyncResult.stats.numUpdates++;
                            } else if (examDtStart > mSyncFromNow - DateUtils.MILLIS_PER_DAY) {
                                // Entry doesn't exist. Remove only newer
                                // events from the database.
                                Uri deleteUri = examUri.buildUpon().appendPath(Integer.toString(examId))
                                        .build();
                                Log.d(TAG, "Scheduling delete: " + deleteUri);

                                batch.add(ContentProviderOperation.newDelete(KusssContentContract
                                        .asEventSyncAdapter(deleteUri, mAccount.name, mAccount.type)).build());
                                mSyncResult.stats.numDeletes++;
                            }
                        }
                        c.close();

                        for (Exam exam : examMap.values()) {
                            batch.add(ContentProviderOperation
                                    .newInsert(KusssContentContract.asEventSyncAdapter(examUri, mAccount.name,
                                            mAccount.type))
                                    .withValues(KusssHelper.getExamContentValues(exam)).build());
                            Log.d(TAG, "Scheduling insert: " + exam.getTerm() + " " + exam.getCourseId());

                            mNewExamNotification.addInsert(getEventString(exam));

                            mSyncResult.stats.numInserts++;
                        }

                        if (batch.size() > 0) {
                            updateNotify(mContext.getString(R.string.notification_sync_exam_saving));

                            Log.d(TAG, "Applying batch update");
                            mProvider.applyBatch(batch);
                            Log.d(TAG, "Notify resolver");
                            mResolver.notifyChange(KusssContentContract.Exam.CONTENT_CHANGED_URI, null, // No
                                    // local
                                    // observer
                                    false); // IMPORTANT: Do not
                            // sync to
                            // network
                        } else {
                            Log.w(TAG, "No batch operations found! Do nothing");
                        }
                    }
                }
                KusssHandler.getInstance().logout(mContext);
            } else {
                mSyncResult.stats.numAuthExceptions++;
            }
        } catch (Exception e) {
            Analytics.sendException(mContext, e, true);
            Log.e(TAG, "import failed", e);
        }
    }

    setImportDone();

    return null;
}

From source file:com.granita.tasks.SettingsListFragment.java

/**
 * This function is called to save the any modifications made to the displayed list. It retrieves the {@link HashMap} from the adapter of the list and uses
 * it makes the changes persistent. For this it uses a batch operation provided by {@link ContentResolver}. The operations to be performed in the batch
 * operation are stored in an {@link ArrayList} of {@link ContentProviderOperation}.
 * /*from www  .j av  a2s  . c om*/
 * @return <code>true</code> if the save operation was successful, <code>false</code> otherwise.
 */
public boolean saveListState() {
    HashMap<Long, Boolean> savedPositions = ((VisibleListAdapter) getListAdapter()).getState();
    ArrayList<ContentProviderOperation> ops = new ArrayList<ContentProviderOperation>();

    for (Long posInt : savedPositions.keySet()) {
        boolean val = savedPositions.get(posInt);
        ContentProviderOperation op = ContentProviderOperation
                .newUpdate(TaskContract.TaskLists.getContentUri(mAutority))
                .withSelection(TaskContract.TaskLists._ID + "=?", new String[] { posInt.toString() })
                .withValue(mListCompareColumnName, val ? "1" : "0").build();
        ops.add(op);
    }

    try {
        mContext.getContentResolver().applyBatch(mAutority, ops);
    } catch (RemoteException e) {
        e.printStackTrace();
        return false;
    } catch (OperationApplicationException e) {
        e.printStackTrace();
        return false;
    }
    return true;
}

From source file:com.rukman.emde.smsgroups.syncadapter.SyncAdapter.java

private ContentProviderResult[] optimisticallyUpdateGroup(JSONObject group, ContentProviderClient provider,
        ContentProviderClient contactsProvider, String authToken, Account account, SyncResult syncResult)
        throws JSONException, RemoteException {

    String groupCloudId = null;//www . ja va2s .c  o  m
    String version = null;
    try {
        groupCloudId = group.getString(JSONKeys.KEY_ID);
        version = group.getString(JSONKeys.KEY_VERSION);

        ArrayList<ContentProviderOperation> ops = new ArrayList<ContentProviderOperation>();
        // Operation 0 - we believe the record exists, but we'll check, inside a transaction, to make sure
        ContentProviderOperation op;
        op = ContentProviderOperation.newAssertQuery(GMSGroups.CONTENT_URI)
                .withSelection(GMSGroup.CLOUD_ID + "=?", new String[] { groupCloudId }).withExpectedCount(1)
                .build();
        ops.add(op);
        // Operation 1 - we know it exists. If its the right version, we don't need to do the update
        // So we assert that we'll find zero records with the current version and if that's right, we'll update our
        // record, including the version with the new record data
        op = ContentProviderOperation.newAssertQuery(GMSGroups.CONTENT_URI)
                .withSelection(GMSGroup.CLOUD_ID + "=? AND " + GMSGroup.VERSION + "=?",
                        new String[] { groupCloudId, version })
                .withExpectedCount(0).build();
        ops.add(op);
        // If we get this far, update the existing group from the information in the JSON object
        // Operation 2
        ContentValues groupValues = GMSApplication.getGroupValues(group);
        op = ContentProviderOperation.newUpdate(GMSGroups.CONTENT_URI)
                .withSelection(GMSGroup.CLOUD_ID + "=?", new String[] { groupCloudId }).withValues(groupValues)
                .withValue(GMSGroup.STATUS, GMSGroup.STATUS_SYNCED).withExpectedCount(1).build();
        ops.add(op);
        return provider.applyBatch(ops);
    } catch (OperationApplicationException e) {
        ArrayList<ContentProviderOperation> ops = new ArrayList<ContentProviderOperation>();
        ContentProviderOperation op;
        // Operation 0 - we know it exists. If its the right version, we don't need to do the update
        // So we assert that we'll find zero records with the current version and if that's right, we'll update our
        // record, including the version with the new record data
        op = ContentProviderOperation.newAssertQuery(GMSGroups.CONTENT_URI)
                .withSelection(GMSGroup.CLOUD_ID + "=? AND " + GMSGroup.VERSION + "=?",
                        new String[] { groupCloudId, version })
                .withExpectedCount(1).build();
        ops.add(op);
        // If we get this far we only need to update the is_synced field in the database
        // Operation 1
        op = ContentProviderOperation.newUpdate(GMSGroups.CONTENT_URI)
                .withSelection(GMSGroup.CLOUD_ID + "=?", new String[] { groupCloudId })
                .withValue(GMSGroup.STATUS, GMSGroup.STATUS_SYNCED).withExpectedCount(1).build();
        ops.add(op);
        try {
            return provider.applyBatch(ops);
        } catch (OperationApplicationException e1) {
            e1.printStackTrace();
            syncResult.stats.numSkippedEntries++;
        }
    }
    return null;
}

From source file:com.ptts.sync.SyncAdapter.java

/**
 * Read JSON from an input stream, storing it into the content provider.
 *
 * <p>This is where incoming data is persisted, committing the results of a sync. In order to
 * minimize (expensive) disk operations, we compare incoming data with what's already in our
 * database, and compute a merge. Only changes (insert/update/delete) will result in a database
 * write.//from   www  .j  a v a2s.  c om
 *
 * <p>As an additional optimization, we use a batch operation to perform all database writes at
 * once.
 *
 * <p>Merge strategy:
 * 1. Get cursor to all items in feed<br/>
 * 2. For each item, check if it's in the incoming data.<br/>
 *    a. YES: Remove from "incoming" list. Check if data has mutated, if so, perform
 *            database UPDATE.<br/>
 *    b. NO: Schedule DELETE from database.<br/>
 * (At this point, incoming database only contains missing items.)<br/>
 * 3. For any items remaining in incoming list, ADD to database.
 */
public void updateLocalFeedData(final InputStream stream, final SyncResult syncResult)
        throws IOException, JSONException, RemoteException, OperationApplicationException, ParseException {
    final FeedParserJson feedParser = new FeedParserJson();
    final ContentResolver contentResolver = getContext().getContentResolver();

    Log.i(TAG, "Parsing stream as Json feed");
    final List<FeedParserJson.Entry> entries = feedParser.parse(stream);
    Log.i(TAG, "Parsing complete. Found " + entries.size() + " entries");

    ArrayList<ContentProviderOperation> batch = new ArrayList<ContentProviderOperation>();

    // Build hash table of incoming entries
    HashMap<String, FeedParserJson.Entry> entryMap = new HashMap<String, FeedParserJson.Entry>();
    for (FeedParserJson.Entry e : entries) {
        entryMap.put(e.id, e);
    }

    // Get list of all items
    Log.i(TAG, "Fetching local entries for merge");
    Uri uri = FeedContract.Entry.CONTENT_URI; // Get all entries
    Cursor c = contentResolver.query(uri, PROJECTION, null, null, null);
    assert c != null;
    Log.i(TAG, "Found " + c.getCount() + " local entries. Computing merge solution...");

    // Find stale data
    int id;
    String entryId;
    String name;
    String start;
    String end;
    String stops;
    while (c.moveToNext()) {
        syncResult.stats.numEntries++;
        id = c.getInt(COLUMN_ID);
        entryId = c.getString(COLUMN_ENTRY_ID);
        name = c.getString(COLUMN_NAME);
        start = c.getString(COLUMN_START);
        end = c.getString(COLUMN_END);
        stops = c.getString(COLUMN_STOPS);

        Log.i("STOPS FROM PROJECTION", stops);

        FeedParserJson.Entry match = entryMap.get(entryId);
        if (match != null) {
            // Entry exists. Remove from entry map to prevent insert later.
            entryMap.remove(entryId);
            // Check to see if the entry needs to be updated
            Uri existingUri = FeedContract.Entry.CONTENT_URI.buildUpon().appendPath(Integer.toString(id))
                    .build();

            if ((match.name != null && !match.name.equals(name))
                    || (match.start != null && !match.start.equals(start))
                    || (match.stops != null && !match.stops.equals(stops)) || (match.end != end)) {

                Log.i("STOPS FROM HASHMAP", match.stops);
                if (!match.stops.equals(stops)) {
                    Log.i("COMPARING PROJECTION " + match.stops + " & HASHMAP " + stops,
                            "The two aren't equal");
                } else {
                    Log.i("COMPARING PROJECTION & HASHMAP", "The two are equal");
                }

                // Update existing record

                Log.i(TAG, "Scheduling update: " + existingUri);
                batch.add(ContentProviderOperation.newUpdate(existingUri)
                        .withValue(FeedContract.Entry.COLUMN_NAME_ENTRY_ID, entryId)
                        .withValue(FeedContract.Entry.COLUMN_NAME_NAME, name)
                        .withValue(FeedContract.Entry.COLUMN_NAME_START, start)
                        .withValue(FeedContract.Entry.COLUMN_NAME_END, end)
                        .withValue(FeedContract.Entry.COLUMN_NAME_STOPS, stops).build());
                syncResult.stats.numUpdates++;
            } else {
                Log.i(TAG, "No action: " + existingUri);
            }
        } else {
            // Entry doesn't exist. Remove it from the database.
            Uri deleteUri = FeedContract.Entry.CONTENT_URI.buildUpon().appendPath(Integer.toString(id)).build();
            Log.i(TAG, "Scheduling delete: " + deleteUri);
            batch.add(ContentProviderOperation.newDelete(deleteUri).build());
            syncResult.stats.numDeletes++;
        }
    }
    c.close();

    // Add new items
    for (FeedParserJson.Entry e : entryMap.values()) {
        Log.i(TAG, "Scheduling insert: entry_id=" + e.id);
        batch.add(ContentProviderOperation.newInsert(FeedContract.Entry.CONTENT_URI)
                .withValue(FeedContract.Entry.COLUMN_NAME_ENTRY_ID, e.id)
                .withValue(FeedContract.Entry.COLUMN_NAME_NAME, e.name)
                .withValue(FeedContract.Entry.COLUMN_NAME_START, e.start)
                .withValue(FeedContract.Entry.COLUMN_NAME_END, e.end)
                .withValue(FeedContract.Entry.COLUMN_NAME_STOPS, e.stops).build());
        syncResult.stats.numInserts++;
    }
    Log.i(TAG, "Merge solution ready. Applying batch update");
    mContentResolver.applyBatch(FeedContract.CONTENT_AUTHORITY, batch);
    mContentResolver.notifyChange(FeedContract.Entry.CONTENT_URI, // URI where data was modified
            null, // No local observer
            false); // IMPORTANT: Do not sync to network
    // This sample doesn't support uploads, but if *your* code does, make sure you set
    // syncToNetwork=false in the line above to prevent duplicate syncs.
}

From source file:com.example.android.network.sync.basicsyncadapter.SyncAdapter.java

/**
 * Read XML from an input stream, storing it into the content provider.
 *
 * <p>This is where incoming data is persisted, committing the results of a sync. In order to
 * minimize (expensive) disk operations, we compare incoming data with what's already in our
 * database, and compute a merge. Only changes (insert/update/delete) will result in a database
 * write.//from w  ww  .j ava2s.c  o  m
 *
 * <p>As an additional optimization, we use a batch operation to perform all database writes at
 * once.
 *
 * <p>Merge strategy:
 * 1. Get cursor to all items in feed<br/>
 * 2. For each item, check if it's in the incoming data.<br/>
 *    a. YES: Remove from "incoming" list. Check if data has mutated, if so, perform
 *            database UPDATE.<br/>
 *    b. NO: Schedule DELETE from database.<br/>
 * (At this point, incoming database only contains missing items.)<br/>
 * 3. For any items remaining in incoming list, ADD to database.
 */
public void updateLocalFeedData(final InputStream stream, final SyncResult syncResult) throws IOException,
        XmlPullParserException, RemoteException, OperationApplicationException, ParseException {

    final ContentResolver contentResolver = getContext().getContentResolver();

    Log.i(TAG, "Parsing stream as Atom feed");
    final List<Transformer> entries = null;
    /*=this.parseTransformersResponse(stream)*/;
    ;
    Log.i(TAG, "Parsing complete. Found " + entries.size() + " entries");

    ArrayList<ContentProviderOperation> batch = new ArrayList<ContentProviderOperation>();

    // Build hash table of incoming entries
    HashMap<String, Transformer> entryMap = new HashMap<String, Transformer>();
    for (Transformer e : entries) {
        entryMap.put(e.transformerID, e);
    }
    Cursor c = null;

    try {

        // Get list of all items
        Log.i(TAG, "Fetching local entries for merge");
        Uri uri = Transformer.CONTENT_URI; // Get all entries
        c = contentResolver.query(uri, null, null, null, null);
        assert c != null;
        Log.i(TAG, "Found " + c.getCount() + " local entries. Computing merge solution...");
    }

    catch (Exception ex) {

    }
    // Find stale data
    String id;
    String name;
    String location;

    while (c.moveToNext()) {
        syncResult.stats.numEntries++;

        id = c.getColumnName(COLUMN_ID);
        name = c.getString(COLUMN_ENTRY_ID);
        location = c.getString(COLUMN_TITLE);

        Transformer match = entryMap.get(id);
        if (match != null) {
            // Entry exists. Remove from entry map to prevent insert later.
            entryMap.remove(id);
            // Check to see if the entry needs to be updated
            Uri existingUri = Transformer.CONTENT_URI.buildUpon().appendPath(id).build();
            if ((match.trsName != null && !match.trsLocation.equals(name))) {
                // Update existing record
                Log.i(TAG, "Scheduling update: " + existingUri);
                batch.add(ContentProviderOperation.newUpdate(existingUri).withValue(Transformer.KEY_NAME, name)
                        .withValue(Transformer.KEY_LOCATION, location)

                        .build());
                syncResult.stats.numUpdates++;
            } else {
                Log.i(TAG, "No action: " + existingUri);
            }
        } else {
            // Entry doesn't exist. Remove it from the database.
            Uri deleteUri = Transformer.CONTENT_URI.buildUpon().appendPath(id).build();
            Log.i(TAG, "Scheduling delete: " + deleteUri);
            batch.add(ContentProviderOperation.newDelete(deleteUri).build());
            syncResult.stats.numDeletes++;
        }
    }
    c.close();

    // Add new items
    for (Transformer e : entryMap.values()) {
        Log.i(TAG, "Scheduling insert: entry_id=" + e.transformerID);
        batch.add(ContentProviderOperation.newInsert(Transformer.CONTENT_URI)
                .withValue(Transformer.KEY_TRANSFORMER_ID, e.transformerID)
                .withValue(Transformer.KEY_NAME, e.trsName).withValue(Transformer.KEY_LOCATION, e.trsLocation)
                .withValue(Transformer.KEY_CURRENT_TEMP, e.trsCurrentTemp)
                .withValue(Transformer.KEY_LAST_SERVER_SYNC_DATE, e.lastServerSyncDate)
                .withValue(Transformer.KEY_LAST_UPDATED_TIME, e.lastServerSyncDate)
                .withValue(Transformer.KEY_SYNC_STATUS, 0).withValue(Transformer.KEY_MAKE, e.trsMake)
                .withValue(Transformer.KEY_WINDING_MAKE, e.trsWindingMake)
                .withValue(Transformer.KEY_WINDING_COUNT, e.trsWindingCount)
                .withValue(Transformer.KEY_OIL_LEVEL, e.trsOilLevel)
                .withValue(Transformer.KEY_OPERATING_POWER, e.trsOperatingPower)
                .withValue(Transformer.KEY_TYPE, e.trsType)

                .build());
        syncResult.stats.numInserts++;
    }
    Log.i(TAG, "Merge solution ready. Applying batch update");
    mContentResolver.applyBatch(Transformer.CONTENT_AUTHORITY, batch);
    mContentResolver.notifyChange(Transformer.CONTENT_URI, // URI where data was modified
            null, // No local observer
            false); // IMPORTANT: Do not sync to network
    // This sample doesn't support uploads, but if *your* code does, make sure you set
    // syncToNetwork=false in the line above to prevent duplicate syncs.
}

From source file:com.nineash.hutsync.client.NetworkUtilities.java

private static ContentProviderOperation updateEvent(long calendar_id, Account account, Event event,
        long raw_id) {
    ContentProviderOperation.Builder builder;
    if (raw_id != -1) {
        builder = ContentProviderOperation.newUpdate(Events.CONTENT_URI.buildUpon()
                .appendQueryParameter(CalendarContract.CALLER_IS_SYNCADAPTER, "true")
                .appendQueryParameter(Calendars.ACCOUNT_NAME, account.name)
                .appendQueryParameter(Calendars.ACCOUNT_TYPE, account.type).build());
        builder.withSelection(Events._ID + " = '" + raw_id + "'", null);
    } else {//from   www .ja v  a2  s  .com
        builder = ContentProviderOperation.newInsert(Events.CONTENT_URI.buildUpon()
                .appendQueryParameter(CalendarContract.CALLER_IS_SYNCADAPTER, "true")
                .appendQueryParameter(Calendars.ACCOUNT_NAME, account.name)
                .appendQueryParameter(Calendars.ACCOUNT_TYPE, account.type).build());
    }
    long dtstart = event.getStartDate().getTime();
    long dtend = dtstart + (1000 * 60 * 60);
    if (event.getEndDate() != null)
        dtend = event.getEndDate().getTime();
    builder.withValue(Events.CALENDAR_ID, calendar_id);
    builder.withValue(Events.DTSTART, dtstart);
    builder.withValue(Events.DTEND, dtend);
    builder.withValue(Events.TITLE, event.getTitle());

    String location = "Pizza Hut";

    builder.withValue(Events.EVENT_LOCATION, location);

    String description = event.getDescription();

    builder.withValue(Events.DESCRIPTION, description);

    builder.withValue(Events._SYNC_ID, Long.valueOf(event.getId()));
    return builder.build();
}