Example usage for android.content ContentProviderOperation newDelete

List of usage examples for android.content ContentProviderOperation newDelete

Introduction

In this page you can find the example usage for android.content ContentProviderOperation newDelete.

Prototype

public static Builder newDelete(Uri uri) 

Source Link

Document

Create a Builder suitable for building a delete ContentProviderOperation .

Usage

From source file:com.barak.pix.FeedsActivity.java

/**
 * This method remove IM all entries at default Contact application
 *
 * @param contentResolver/*from  w w  w. j  ava2 s  . c  o m*/
 *            content resolver
 */
public static void deleteAllIMContactField(ContentResolver contentResolver) {
    ArrayList<ContentProviderOperation> ops = new ArrayList<ContentProviderOperation>();

    ops.add(ContentProviderOperation.newDelete(ContactsContract.Data.CONTENT_URI)
            .withSelection(
                    ContactsContract.Data.MIMETYPE + "= ? and "
                            + ContactsContract.CommonDataKinds.Im.CUSTOM_PROTOCOL + "= ?",
                    new String[] { ContactsContract.CommonDataKinds.Im.CONTENT_ITEM_TYPE, IM_LABEL })
            .build());

    try {
        contentResolver.applyBatch(ContactsContract.AUTHORITY, ops);
    } catch (Exception e) {
        Log.d(LOG_TAG, "An exception occurred when deleting all IM field of Contact.");
    }
}

From source file:com.ptts.sync.SyncAdapter.java

/**
 * Read JSON from an input stream, storing it into the content provider.
 *
 * <p>This is where incoming data is persisted, committing the results of a sync. In order to
 * minimize (expensive) disk operations, we compare incoming data with what's already in our
 * database, and compute a merge. Only changes (insert/update/delete) will result in a database
 * write.//from   ww w.jav  a2s . c  o  m
 *
 * <p>As an additional optimization, we use a batch operation to perform all database writes at
 * once.
 *
 * <p>Merge strategy:
 * 1. Get cursor to all items in feed<br/>
 * 2. For each item, check if it's in the incoming data.<br/>
 *    a. YES: Remove from "incoming" list. Check if data has mutated, if so, perform
 *            database UPDATE.<br/>
 *    b. NO: Schedule DELETE from database.<br/>
 * (At this point, incoming database only contains missing items.)<br/>
 * 3. For any items remaining in incoming list, ADD to database.
 */
public void updateLocalFeedData(final InputStream stream, final SyncResult syncResult)
        throws IOException, JSONException, RemoteException, OperationApplicationException, ParseException {
    final FeedParserJson feedParser = new FeedParserJson();
    final ContentResolver contentResolver = getContext().getContentResolver();

    Log.i(TAG, "Parsing stream as Json feed");
    final List<FeedParserJson.Entry> entries = feedParser.parse(stream);
    Log.i(TAG, "Parsing complete. Found " + entries.size() + " entries");

    ArrayList<ContentProviderOperation> batch = new ArrayList<ContentProviderOperation>();

    // Build hash table of incoming entries
    HashMap<String, FeedParserJson.Entry> entryMap = new HashMap<String, FeedParserJson.Entry>();
    for (FeedParserJson.Entry e : entries) {
        entryMap.put(e.id, e);
    }

    // Get list of all items
    Log.i(TAG, "Fetching local entries for merge");
    Uri uri = FeedContract.Entry.CONTENT_URI; // Get all entries
    Cursor c = contentResolver.query(uri, PROJECTION, null, null, null);
    assert c != null;
    Log.i(TAG, "Found " + c.getCount() + " local entries. Computing merge solution...");

    // Find stale data
    int id;
    String entryId;
    String name;
    String start;
    String end;
    String stops;
    while (c.moveToNext()) {
        syncResult.stats.numEntries++;
        id = c.getInt(COLUMN_ID);
        entryId = c.getString(COLUMN_ENTRY_ID);
        name = c.getString(COLUMN_NAME);
        start = c.getString(COLUMN_START);
        end = c.getString(COLUMN_END);
        stops = c.getString(COLUMN_STOPS);

        Log.i("STOPS FROM PROJECTION", stops);

        FeedParserJson.Entry match = entryMap.get(entryId);
        if (match != null) {
            // Entry exists. Remove from entry map to prevent insert later.
            entryMap.remove(entryId);
            // Check to see if the entry needs to be updated
            Uri existingUri = FeedContract.Entry.CONTENT_URI.buildUpon().appendPath(Integer.toString(id))
                    .build();

            if ((match.name != null && !match.name.equals(name))
                    || (match.start != null && !match.start.equals(start))
                    || (match.stops != null && !match.stops.equals(stops)) || (match.end != end)) {

                Log.i("STOPS FROM HASHMAP", match.stops);
                if (!match.stops.equals(stops)) {
                    Log.i("COMPARING PROJECTION " + match.stops + " & HASHMAP " + stops,
                            "The two aren't equal");
                } else {
                    Log.i("COMPARING PROJECTION & HASHMAP", "The two are equal");
                }

                // Update existing record

                Log.i(TAG, "Scheduling update: " + existingUri);
                batch.add(ContentProviderOperation.newUpdate(existingUri)
                        .withValue(FeedContract.Entry.COLUMN_NAME_ENTRY_ID, entryId)
                        .withValue(FeedContract.Entry.COLUMN_NAME_NAME, name)
                        .withValue(FeedContract.Entry.COLUMN_NAME_START, start)
                        .withValue(FeedContract.Entry.COLUMN_NAME_END, end)
                        .withValue(FeedContract.Entry.COLUMN_NAME_STOPS, stops).build());
                syncResult.stats.numUpdates++;
            } else {
                Log.i(TAG, "No action: " + existingUri);
            }
        } else {
            // Entry doesn't exist. Remove it from the database.
            Uri deleteUri = FeedContract.Entry.CONTENT_URI.buildUpon().appendPath(Integer.toString(id)).build();
            Log.i(TAG, "Scheduling delete: " + deleteUri);
            batch.add(ContentProviderOperation.newDelete(deleteUri).build());
            syncResult.stats.numDeletes++;
        }
    }
    c.close();

    // Add new items
    for (FeedParserJson.Entry e : entryMap.values()) {
        Log.i(TAG, "Scheduling insert: entry_id=" + e.id);
        batch.add(ContentProviderOperation.newInsert(FeedContract.Entry.CONTENT_URI)
                .withValue(FeedContract.Entry.COLUMN_NAME_ENTRY_ID, e.id)
                .withValue(FeedContract.Entry.COLUMN_NAME_NAME, e.name)
                .withValue(FeedContract.Entry.COLUMN_NAME_START, e.start)
                .withValue(FeedContract.Entry.COLUMN_NAME_END, e.end)
                .withValue(FeedContract.Entry.COLUMN_NAME_STOPS, e.stops).build());
        syncResult.stats.numInserts++;
    }
    Log.i(TAG, "Merge solution ready. Applying batch update");
    mContentResolver.applyBatch(FeedContract.CONTENT_AUTHORITY, batch);
    mContentResolver.notifyChange(FeedContract.Entry.CONTENT_URI, // URI where data was modified
            null, // No local observer
            false); // IMPORTANT: Do not sync to network
    // This sample doesn't support uploads, but if *your* code does, make sure you set
    // syncToNetwork=false in the line above to prevent duplicate syncs.
}

From source file:com.example.android.network.sync.basicsyncadapter.SyncAdapter.java

/**
 * Read XML from an input stream, storing it into the content provider.
 *
 * <p>This is where incoming data is persisted, committing the results of a sync. In order to
 * minimize (expensive) disk operations, we compare incoming data with what's already in our
 * database, and compute a merge. Only changes (insert/update/delete) will result in a database
 * write.//from   w w  w.  j  a va 2 s.co  m
 *
 * <p>As an additional optimization, we use a batch operation to perform all database writes at
 * once.
 *
 * <p>Merge strategy:
 * 1. Get cursor to all items in feed<br/>
 * 2. For each item, check if it's in the incoming data.<br/>
 *    a. YES: Remove from "incoming" list. Check if data has mutated, if so, perform
 *            database UPDATE.<br/>
 *    b. NO: Schedule DELETE from database.<br/>
 * (At this point, incoming database only contains missing items.)<br/>
 * 3. For any items remaining in incoming list, ADD to database.
 */
public void updateLocalFeedData(final InputStream stream, final SyncResult syncResult) throws IOException,
        XmlPullParserException, RemoteException, OperationApplicationException, ParseException {

    final ContentResolver contentResolver = getContext().getContentResolver();

    Log.i(TAG, "Parsing stream as Atom feed");
    final List<Transformer> entries = null;
    /*=this.parseTransformersResponse(stream)*/;
    ;
    Log.i(TAG, "Parsing complete. Found " + entries.size() + " entries");

    ArrayList<ContentProviderOperation> batch = new ArrayList<ContentProviderOperation>();

    // Build hash table of incoming entries
    HashMap<String, Transformer> entryMap = new HashMap<String, Transformer>();
    for (Transformer e : entries) {
        entryMap.put(e.transformerID, e);
    }
    Cursor c = null;

    try {

        // Get list of all items
        Log.i(TAG, "Fetching local entries for merge");
        Uri uri = Transformer.CONTENT_URI; // Get all entries
        c = contentResolver.query(uri, null, null, null, null);
        assert c != null;
        Log.i(TAG, "Found " + c.getCount() + " local entries. Computing merge solution...");
    }

    catch (Exception ex) {

    }
    // Find stale data
    String id;
    String name;
    String location;

    while (c.moveToNext()) {
        syncResult.stats.numEntries++;

        id = c.getColumnName(COLUMN_ID);
        name = c.getString(COLUMN_ENTRY_ID);
        location = c.getString(COLUMN_TITLE);

        Transformer match = entryMap.get(id);
        if (match != null) {
            // Entry exists. Remove from entry map to prevent insert later.
            entryMap.remove(id);
            // Check to see if the entry needs to be updated
            Uri existingUri = Transformer.CONTENT_URI.buildUpon().appendPath(id).build();
            if ((match.trsName != null && !match.trsLocation.equals(name))) {
                // Update existing record
                Log.i(TAG, "Scheduling update: " + existingUri);
                batch.add(ContentProviderOperation.newUpdate(existingUri).withValue(Transformer.KEY_NAME, name)
                        .withValue(Transformer.KEY_LOCATION, location)

                        .build());
                syncResult.stats.numUpdates++;
            } else {
                Log.i(TAG, "No action: " + existingUri);
            }
        } else {
            // Entry doesn't exist. Remove it from the database.
            Uri deleteUri = Transformer.CONTENT_URI.buildUpon().appendPath(id).build();
            Log.i(TAG, "Scheduling delete: " + deleteUri);
            batch.add(ContentProviderOperation.newDelete(deleteUri).build());
            syncResult.stats.numDeletes++;
        }
    }
    c.close();

    // Add new items
    for (Transformer e : entryMap.values()) {
        Log.i(TAG, "Scheduling insert: entry_id=" + e.transformerID);
        batch.add(ContentProviderOperation.newInsert(Transformer.CONTENT_URI)
                .withValue(Transformer.KEY_TRANSFORMER_ID, e.transformerID)
                .withValue(Transformer.KEY_NAME, e.trsName).withValue(Transformer.KEY_LOCATION, e.trsLocation)
                .withValue(Transformer.KEY_CURRENT_TEMP, e.trsCurrentTemp)
                .withValue(Transformer.KEY_LAST_SERVER_SYNC_DATE, e.lastServerSyncDate)
                .withValue(Transformer.KEY_LAST_UPDATED_TIME, e.lastServerSyncDate)
                .withValue(Transformer.KEY_SYNC_STATUS, 0).withValue(Transformer.KEY_MAKE, e.trsMake)
                .withValue(Transformer.KEY_WINDING_MAKE, e.trsWindingMake)
                .withValue(Transformer.KEY_WINDING_COUNT, e.trsWindingCount)
                .withValue(Transformer.KEY_OIL_LEVEL, e.trsOilLevel)
                .withValue(Transformer.KEY_OPERATING_POWER, e.trsOperatingPower)
                .withValue(Transformer.KEY_TYPE, e.trsType)

                .build());
        syncResult.stats.numInserts++;
    }
    Log.i(TAG, "Merge solution ready. Applying batch update");
    mContentResolver.applyBatch(Transformer.CONTENT_AUTHORITY, batch);
    mContentResolver.notifyChange(Transformer.CONTENT_URI, // URI where data was modified
            null, // No local observer
            false); // IMPORTANT: Do not sync to network
    // This sample doesn't support uploads, but if *your* code does, make sure you set
    // syncToNetwork=false in the line above to prevent duplicate syncs.
}

From source file:at.bitfire.davdroid.resource.LocalCalendar.java

@Override
public void delete(Resource resource) {
    super.delete(resource);

    // delete all exceptions of this event, too
    pendingOperations.add(ContentProviderOperation.newDelete(entriesURI())
            .withSelection(Events.ORIGINAL_ID + "=?", new String[] { String.valueOf(resource.getLocalID()) })
            .build());//ww  w  . ja va2 s . com
}

From source file:org.mythtv.service.dvr.v25.RecordingRuleHelperV25.java

private void deleteRecordingRules(final Context context, final LocationProfile locationProfile,
        ArrayList<ContentProviderOperation> ops, DateTime lastModified) {
    Log.d(TAG, "deleteRecordingRules : enter");

    //      Log.v( TAG, "load : remove deleted recordings" );
    String deletedSelection = RecordingRuleConstants.TABLE_NAME + "."
            + RecordingRuleConstants.FIELD_LAST_MODIFIED_DATE + " < ?";
    String[] deletedSelectionArgs = new String[] { String.valueOf(lastModified.getMillis()) };

    deletedSelection = appendLocationHostname(context, locationProfile, deletedSelection,
            RecordingRuleConstants.TABLE_NAME);

    //      Log.v( TAG, "load : deleting recRules" );
    ops.add(ContentProviderOperation.newDelete(RecordingRuleConstants.CONTENT_URI)
            .withSelection(deletedSelection, deletedSelectionArgs).build());

    Log.d(TAG, "deleteRecordingRules : exit");
}

From source file:com.google.samples.apps.iosched.io.SessionsHandler.java

private void buildSessionSpeakerMapping(Session session, ArrayList<ContentProviderOperation> list) {
    final Uri uri = ScheduleContractHelper
            .setUriAsCalledFromSyncAdapter(ScheduleContract.Sessions.buildSpeakersDirUri(session.id));

    // delete any existing relationship between this session and speakers
    list.add(ContentProviderOperation.newDelete(uri).build());

    // add relationship records to indicate the speakers for this session
    if (session.speakers != null) {
        for (String speakerId : session.speakers) {
            list.add(ContentProviderOperation.newInsert(uri)
                    .withValue(ScheduleDatabase.SessionsSpeakers.SESSION_ID, session.id)
                    .withValue(ScheduleDatabase.SessionsSpeakers.SPEAKER_ID, speakerId).build());
        }//from ww  w  .  j  a v a 2s . c  om
    }
}

From source file:at.bitfire.davdroid.resource.LocalCollection.java

/** Enqueues deleting a resource from the local collection. Requires commit() to be effective! */
public void delete(Resource resource) {
    pendingOperations.add(//w w  w .  ja  v a  2  s. c o m
            ContentProviderOperation.newDelete(ContentUris.withAppendedId(entriesURI(), resource.getLocalID()))
                    .withYieldAllowed(true).build());
}

From source file:com.example.android.basicsyncadapter.SyncAdapter.java

/**
 * Read XML from an input stream, storing it into the content provider.
 *
 * <p>This is where incoming data is persisted, committing the results of a sync. In order to
 * minimize (expensive) disk operations, we compare incoming data with what's already in our
 * database, and compute a merge. Only changes (insert/update/delete) will result in a database
 * write./*  w ww.j ava2s.  c  o  m*/
 *
 * <p>As an additional optimization, we use a batch operation to perform all database writes at
 * once.
 *
 * <p>Merge strategy:
 * 1. Get cursor to all items in feed<br/>
 * 2. For each item, check if it's in the incoming data.<br/>
 *    a. YES: Remove from "incoming" list. Check if data has mutated, if so, perform
 *            database UPDATE.<br/>
 *    b. NO: Schedule DELETE from database.<br/>
 * (At this point, incoming database only contains missing items.)<br/>
 * 3. For any items remaining in incoming list, ADD to database.
 */
public void updateLocalFeedData(final InputStream stream, final SyncResult syncResult) throws IOException,
        XmlPullParserException, RemoteException, OperationApplicationException, ParseException {
    //final FeedParser feedParser = new FeedParser();
    final CAPFeedParser feedParser = new CAPFeedParser();
    final ContentResolver contentResolver = getContext().getContentResolver();

    //Log.i(TAG, "Parsing stream as Atom feed");
    final List<CAPFeedParser.Entry> entries = feedParser.parse(stream);
    Log.i(TAG, "Parsing complete. Found " + entries.size() + " entries");

    ArrayList<ContentProviderOperation> batch = new ArrayList<ContentProviderOperation>();

    // Build hash table of incoming entries
    HashMap<String, CAPFeedParser.Entry> entryMap = new HashMap<String, CAPFeedParser.Entry>();
    for (CAPFeedParser.Entry e : entries) {
        entryMap.put(e.id, e);
    }

    // Get list of all items
    //Log.i(TAG, "Fetching local entries for merge");
    Uri uri = FeedContract.Entry.CONTENT_URI; // Get all entries
    Cursor c = contentResolver.query(uri, PROJECTION, null, null, null);
    assert c != null;
    //Log.i(TAG, "Found " + c.getCount() + " local entries. Computing merge solution...");

    // Find stale data
    int id;
    String entryId;
    String title;
    String description;
    String headline;
    String url;
    String areas;
    String issued;
    while (c.moveToNext()) {
        syncResult.stats.numEntries++;

        id = c.getInt(COLUMN_ID);
        entryId = c.getString(COLUMN_ENTRY_ID);
        title = c.getString(COLUMN_TITLE);
        description = c.getString(COLUMN_DESCRIPTION);
        headline = c.getString(COLUMN_HEADLINE);
        areas = c.getString(COLUMN_AREAS);
        url = c.getString(COLUMN_LINK);
        issued = c.getString(COLUMN_ISSUED);

        CAPFeedParser.Entry match = entryMap.get(entryId);
        if (match != null) {
            // Entry exists. Remove from entry map to prevent insert later.
            entryMap.remove(entryId);
            // Check to see if the entry needs to be updated
            Uri existingUri = FeedContract.Entry.CONTENT_URI.buildUpon().appendPath(Integer.toString(id))
                    .build();
            if ((match.title != null && !match.title.equals(title))
                    || (match.link != null && !match.link.equals(url)) || (match.issued != issued)) {
                // Update existing record
                //Log.i(TAG, "Scheduling update: " + existingUri);
                batch.add(ContentProviderOperation.newUpdate(existingUri)
                        .withValue(FeedContract.Entry.COLUMN_NAME_TITLE, title)
                        .withValue(FeedContract.Entry.COLUMN_NAME_DESCRIPTION, description)
                        .withValue(FeedContract.Entry.COLUMN_NAME_HEADLINE, headline)
                        .withValue(FeedContract.Entry.COLUMN_NAME_ISSUED, issued)
                        .withValue(FeedContract.Entry.COLUMN_NAME_LINK, url)
                        .withValue(FeedContract.Entry.COLUMN_NAME_AREAS, areas).build());
                syncResult.stats.numUpdates++;
            } else {
                //Log.i(TAG, "No action: " + existingUri);
            }
        } else {
            // Entry doesn't exist. Remove it from the database.
            Uri deleteUri = FeedContract.Entry.CONTENT_URI.buildUpon().appendPath(Integer.toString(id)).build();
            //Log.i(TAG, "Scheduling delete: " + deleteUri);
            batch.add(ContentProviderOperation.newDelete(deleteUri).build());
            syncResult.stats.numDeletes++;
        }
    }
    c.close();

    // Add new items
    for (CAPFeedParser.Entry e : entryMap.values()) {
        //Log.i(TAG, "Scheduling insert: entry_id=" + e.id);
        batch.add(ContentProviderOperation.newInsert(FeedContract.Entry.CONTENT_URI)
                .withValue(FeedContract.Entry.COLUMN_NAME_ENTRY_ID, e.id)
                .withValue(FeedContract.Entry.COLUMN_NAME_TITLE, e.title)
                .withValue(FeedContract.Entry.COLUMN_NAME_DESCRIPTION, e.description)
                .withValue(FeedContract.Entry.COLUMN_NAME_HEADLINE, e.headline)
                .withValue(FeedContract.Entry.COLUMN_NAME_ISSUED, e.issued)
                .withValue(FeedContract.Entry.COLUMN_NAME_LINK, e.link)
                .withValue(FeedContract.Entry.COLUMN_NAME_AREAS, e.areas).build());
        syncResult.stats.numInserts++;
    }
    //Log.i(TAG, "Merge solution ready. Applying batch update");
    mContentResolver.applyBatch(FeedContract.CONTENT_AUTHORITY, batch);
    mContentResolver.notifyChange(FeedContract.Entry.CONTENT_URI, // URI where data was modified
            null, // No local observer
            false); // IMPORTANT: Do not sync to network
    // This sample doesn't support uploads, but if *your* code does, make sure you set
    // syncToNetwork=false in the line above to prevent duplicate syncs.
}

From source file:com.weimed.app.sync.SyncAdapter.java

/**
 * Read JSON from an input stream, storing it into the content provider.
 *
 * <p>This is where incoming data is persisted, committing the results of a sync. In order to
 * minimize (expensive) disk operations, we compare incoming data with what's already in our
 * database, and compute a merge. Only changes (insert/update/delete) will result in a database
 * write.//from ww w .jav  a2  s .c  om
 *
 * <p>As an additional optimization, we use a batch operation to perform all database writes at
 * once.
 *
 * <p>Merge strategy:
 * 1. Get cursor to all items in feed<br/>
 * 2. For each item, check if it's in the incoming data.<br/>
 *    a. YES: Remove from "incoming" list. Check if data has mutated, if so, perform
 *            database UPDATE.<br/>
 *    b. NO: Schedule DELETE from database.<br/>
 * (At this point, incoming database only contains missing items.)<br/>
 * 3. For any items remaining in incoming list, ADD to database.
 */
public void updateLocalJSONData(final InputStream stream, final SyncResult syncResult)
        throws IOException, JSONException, RemoteException, OperationApplicationException, ParseException {
    final JSONParser JSONParser = new JSONParser();
    final ContentResolver contentResolver = getContext().getContentResolver();

    Log.i(TAG, "Parsing stream as JSON Array");
    final JSONObject json = JSONParser.parseJSONObject(stream);
    Log.i(TAG, "Parsing complete. Found " + json.getInt("total_rows") + " entries");

    ArrayList<ContentProviderOperation> batch = new ArrayList<ContentProviderOperation>();

    // Build hash table of incoming entries
    HashMap<String, JSONObject> entryMap = new HashMap<String, JSONObject>();
    final JSONArray entries = json.getJSONArray("rows");
    for (int i = 0; i < json.getInt("total_rows"); i++) {
        JSONObject e = entries.getJSONObject(i).getJSONObject("value");
        entryMap.put(e.getString("_id"), e);
    }

    // Get list of all items
    Log.i(TAG, "Fetching local entries for merge");
    Uri uri = NewsContract.Entry.CONTENT_URI; // Get all entries
    Cursor c = contentResolver.query(uri, PROJECTION, null, null, null);
    assert c != null;
    Log.i(TAG, "Found " + c.getCount() + " local entries. Computing merge solution...");

    // Find stale data
    int id;
    String entryId;
    String title;
    String content;
    String publisher;
    String picurl;
    String originalurl;
    String createdat;
    String updatedat;
    String publishedat;

    while (c.moveToNext()) {
        syncResult.stats.numEntries++;
        id = c.getInt(COLUMN_ID);
        entryId = c.getString(COLUMN_ENTRY_ID);
        title = c.getString(COLUMN_TITLE);
        content = c.getString(COLUMN_CONTENT);
        publisher = c.getString(COLUMN_PUBLISHER);
        picurl = c.getString(COLUMN_PICURL);
        originalurl = c.getString(COLUMN_ORIGINALURL);
        createdat = c.getString(COLUMN_CREATEDAT);
        updatedat = c.getString(COLUMN_UPDATEDAT);
        publishedat = c.getString(COLUMN_PUBLISHEDAT);
        JSONObject match = entryMap.get(entryId);
        //            if (match != null) {
        // Entry exists. Remove from entry map to prevent insert later.
        //                entryMap.remove(entryId);
        // Check to see if the entry needs to be updated
        // How to know update local or remote? updatedAt! which is newer, update another.
        //                Uri existingUri = NewsContract.Entry.CONTENT_URI.buildUpon()
        //                                              .appendPath(Integer.toString(id)).build();
        //                if ((match.getString("title") != null && !match.getString("title").equals(title)) ||
        //                    (match.getString("content") != null && !match.getString("content").equals(content)) ||
        //                    (match.getString("publisher") != null && !match.getString("publisher").equals(publisher)) ||
        //                    (match.getString("picurl") != null && !match.getString("picurl").equals(picurl)) ||
        //                    (match.getString("originalurl") != null && !match.getString("originalurl").equals(originalurl)) ||
        //                    (match.getString("createdat") != null && !match.getString("createdat").equals(createdat)) ||
        //                    (match.getString("updatedat") != null && !match.getString("updatedat").equals(updatedat)) ||
        //                    (match.getString("publishedat") != null && !match.getString("publishedat").equals(publishedat))
        //                   ) {
        //                    // Update existing record
        //                    Log.i(TAG, "Scheduling update: " + existingUri);
        //                    batch.add(ContentProviderOperation.newUpdate(existingUri)
        //                         .withValue(NewsContract.Entry.COLUMN_TITLE, title)
        //                         .withValue(NewsContract.Entry.COLUMN_CONTENT, content)
        //                         .withValue(NewsContract.Entry.COLUMN_PUBLISHER, publisher)
        //                         .withValue(NewsContract.Entry.COLUMN_PICURL, picurl)
        //                         .withValue(NewsContract.Entry.COLUMN_ORIGINALURL, originalurl)
        //                         .withValue(NewsContract.Entry.COLUMN_CREATEDAT, createdat)
        //                         .withValue(NewsContract.Entry.COLUMN_UPDATEDAT, updatedat)
        //                         .withValue(NewsContract.Entry.COLUMN_PUBLISHEDAT, publishedat)
        //                         .build());
        //                    syncResult.stats.numUpdates++;
        //                } else {
        //                    Log.i(TAG, "No action: " + existingUri);
        //                }
        //            } else {
        // Entry doesn't exist. Remove it from the database.
        Uri deleteUri = NewsContract.Entry.CONTENT_URI.buildUpon().appendPath(Integer.toString(id)).build();
        Log.i(TAG, "Scheduling delete: " + deleteUri);
        batch.add(ContentProviderOperation.newDelete(deleteUri).build());
        syncResult.stats.numDeletes++;
        //            }
    }
    c.close();

    // Add new items
    for (JSONObject e : entryMap.values()) {
        Log.i(TAG, "Scheduling insert: entry_id=" + e.getString("_id"));
        batch.add(ContentProviderOperation.newInsert(NewsContract.Entry.CONTENT_URI)
                .withValue(NewsContract.Entry.COLUMN_ENTRY_ID, e.getString("_id"))
                .withValue(NewsContract.Entry.COLUMN_TITLE, e.getString("title"))
                .withValue(NewsContract.Entry.COLUMN_CONTENT,
                        fetchTextFileToString(NEWS_URL_BASE + '/' + e.getString("_id") + "/content.md"))
                .withValue(NewsContract.Entry.COLUMN_PUBLISHER, e.getString("publisher"))
                .withValue(NewsContract.Entry.COLUMN_PICURL, e.has("pic_link") ? e.getString("pic_link") : null)
                .withValue(NewsContract.Entry.COLUMN_ORIGINALURL, e.getString("origin_link"))
                .withValue(NewsContract.Entry.COLUMN_CREATEDAT, e.getString("created_at"))
                .withValue(NewsContract.Entry.COLUMN_UPDATEDAT, e.getString("updated_at"))
                .withValue(NewsContract.Entry.COLUMN_PUBLISHEDAT, e.getString("publish_at")).build());
        syncResult.stats.numInserts++;
    }
    Log.i(TAG, "Merge solution ready. Applying batch update");
    mContentResolver.applyBatch(NewsContract.CONTENT_AUTHORITY, batch);
    mContentResolver.notifyChange(NewsContract.Entry.CONTENT_URI, // URI where data was modified
            null, // No local observer
            false); // IMPORTANT: Do not sync to network
    // This sample doesn't support uploads, but if *your* code does, make sure you set
    // syncToNetwork=false in the line above to prevent duplicate syncs.
}

From source file:com.granita.icloudcalsync.resource.LocalCollection.java

/**
 * Enqueues deleting all resources except the give ones from the local collection. Requires commit().
 * @param remoteResources resources with these remote file names will be kept
 *//*from ww  w  .j a v a  2s.com*/
public void deleteAllExceptRemoteNames(Resource[] remoteResources) {
    final String where;

    if (remoteResources.length != 0) {
        // delete all except certain entries
        final List<String> sqlFileNames = new LinkedList<>();
        for (final Resource res : remoteResources)
            sqlFileNames.add(DatabaseUtils.sqlEscapeString(res.getName()));
        where = entryColumnRemoteName() + " NOT IN (" + StringUtils.join(sqlFileNames, ",") + ')';
    } else
        // delete all entries
        where = entryColumnRemoteName() + " IS NOT NULL";

    ContentProviderOperation.Builder builder = ContentProviderOperation.newDelete(entriesURI()).withSelection( // restrict deletion to parent collection
            entryColumnParentID() + "=? AND (" + where + ')', new String[] { String.valueOf(getId()) });
    pendingOperations.add(builder.withYieldAllowed(true).build());
}