List of usage examples for android.content ContentProviderOperation newInsert
public static Builder newInsert(Uri uri)
From source file:com.ptts.sync.SyncAdapter.java
/** * Read JSON from an input stream, storing it into the content provider. * * <p>This is where incoming data is persisted, committing the results of a sync. In order to * minimize (expensive) disk operations, we compare incoming data with what's already in our * database, and compute a merge. Only changes (insert/update/delete) will result in a database * write.// w w w.ja v a2s . c o m * * <p>As an additional optimization, we use a batch operation to perform all database writes at * once. * * <p>Merge strategy: * 1. Get cursor to all items in feed<br/> * 2. For each item, check if it's in the incoming data.<br/> * a. YES: Remove from "incoming" list. Check if data has mutated, if so, perform * database UPDATE.<br/> * b. NO: Schedule DELETE from database.<br/> * (At this point, incoming database only contains missing items.)<br/> * 3. For any items remaining in incoming list, ADD to database. */ public void updateLocalFeedData(final InputStream stream, final SyncResult syncResult) throws IOException, JSONException, RemoteException, OperationApplicationException, ParseException { final FeedParserJson feedParser = new FeedParserJson(); final ContentResolver contentResolver = getContext().getContentResolver(); Log.i(TAG, "Parsing stream as Json feed"); final List<FeedParserJson.Entry> entries = feedParser.parse(stream); Log.i(TAG, "Parsing complete. Found " + entries.size() + " entries"); ArrayList<ContentProviderOperation> batch = new ArrayList<ContentProviderOperation>(); // Build hash table of incoming entries HashMap<String, FeedParserJson.Entry> entryMap = new HashMap<String, FeedParserJson.Entry>(); for (FeedParserJson.Entry e : entries) { entryMap.put(e.id, e); } // Get list of all items Log.i(TAG, "Fetching local entries for merge"); Uri uri = FeedContract.Entry.CONTENT_URI; // Get all entries Cursor c = contentResolver.query(uri, PROJECTION, null, null, null); assert c != null; Log.i(TAG, "Found " + c.getCount() + " local entries. Computing merge solution..."); // Find stale data int id; String entryId; String name; String start; String end; String stops; while (c.moveToNext()) { syncResult.stats.numEntries++; id = c.getInt(COLUMN_ID); entryId = c.getString(COLUMN_ENTRY_ID); name = c.getString(COLUMN_NAME); start = c.getString(COLUMN_START); end = c.getString(COLUMN_END); stops = c.getString(COLUMN_STOPS); Log.i("STOPS FROM PROJECTION", stops); FeedParserJson.Entry match = entryMap.get(entryId); if (match != null) { // Entry exists. Remove from entry map to prevent insert later. entryMap.remove(entryId); // Check to see if the entry needs to be updated Uri existingUri = FeedContract.Entry.CONTENT_URI.buildUpon().appendPath(Integer.toString(id)) .build(); if ((match.name != null && !match.name.equals(name)) || (match.start != null && !match.start.equals(start)) || (match.stops != null && !match.stops.equals(stops)) || (match.end != end)) { Log.i("STOPS FROM HASHMAP", match.stops); if (!match.stops.equals(stops)) { Log.i("COMPARING PROJECTION " + match.stops + " & HASHMAP " + stops, "The two aren't equal"); } else { Log.i("COMPARING PROJECTION & HASHMAP", "The two are equal"); } // Update existing record Log.i(TAG, "Scheduling update: " + existingUri); batch.add(ContentProviderOperation.newUpdate(existingUri) .withValue(FeedContract.Entry.COLUMN_NAME_ENTRY_ID, entryId) .withValue(FeedContract.Entry.COLUMN_NAME_NAME, name) .withValue(FeedContract.Entry.COLUMN_NAME_START, start) .withValue(FeedContract.Entry.COLUMN_NAME_END, end) .withValue(FeedContract.Entry.COLUMN_NAME_STOPS, stops).build()); syncResult.stats.numUpdates++; } else { Log.i(TAG, "No action: " + existingUri); } } else { // Entry doesn't exist. Remove it from the database. Uri deleteUri = FeedContract.Entry.CONTENT_URI.buildUpon().appendPath(Integer.toString(id)).build(); Log.i(TAG, "Scheduling delete: " + deleteUri); batch.add(ContentProviderOperation.newDelete(deleteUri).build()); syncResult.stats.numDeletes++; } } c.close(); // Add new items for (FeedParserJson.Entry e : entryMap.values()) { Log.i(TAG, "Scheduling insert: entry_id=" + e.id); batch.add(ContentProviderOperation.newInsert(FeedContract.Entry.CONTENT_URI) .withValue(FeedContract.Entry.COLUMN_NAME_ENTRY_ID, e.id) .withValue(FeedContract.Entry.COLUMN_NAME_NAME, e.name) .withValue(FeedContract.Entry.COLUMN_NAME_START, e.start) .withValue(FeedContract.Entry.COLUMN_NAME_END, e.end) .withValue(FeedContract.Entry.COLUMN_NAME_STOPS, e.stops).build()); syncResult.stats.numInserts++; } Log.i(TAG, "Merge solution ready. Applying batch update"); mContentResolver.applyBatch(FeedContract.CONTENT_AUTHORITY, batch); mContentResolver.notifyChange(FeedContract.Entry.CONTENT_URI, // URI where data was modified null, // No local observer false); // IMPORTANT: Do not sync to network // This sample doesn't support uploads, but if *your* code does, make sure you set // syncToNetwork=false in the line above to prevent duplicate syncs. }
From source file:com.example.android.network.sync.basicsyncadapter.SyncAdapter.java
/** * Read XML from an input stream, storing it into the content provider. * * <p>This is where incoming data is persisted, committing the results of a sync. In order to * minimize (expensive) disk operations, we compare incoming data with what's already in our * database, and compute a merge. Only changes (insert/update/delete) will result in a database * write./*from w ww . j ava 2 s . c om*/ * * <p>As an additional optimization, we use a batch operation to perform all database writes at * once. * * <p>Merge strategy: * 1. Get cursor to all items in feed<br/> * 2. For each item, check if it's in the incoming data.<br/> * a. YES: Remove from "incoming" list. Check if data has mutated, if so, perform * database UPDATE.<br/> * b. NO: Schedule DELETE from database.<br/> * (At this point, incoming database only contains missing items.)<br/> * 3. For any items remaining in incoming list, ADD to database. */ public void updateLocalFeedData(final InputStream stream, final SyncResult syncResult) throws IOException, XmlPullParserException, RemoteException, OperationApplicationException, ParseException { final ContentResolver contentResolver = getContext().getContentResolver(); Log.i(TAG, "Parsing stream as Atom feed"); final List<Transformer> entries = null; /*=this.parseTransformersResponse(stream)*/; ; Log.i(TAG, "Parsing complete. Found " + entries.size() + " entries"); ArrayList<ContentProviderOperation> batch = new ArrayList<ContentProviderOperation>(); // Build hash table of incoming entries HashMap<String, Transformer> entryMap = new HashMap<String, Transformer>(); for (Transformer e : entries) { entryMap.put(e.transformerID, e); } Cursor c = null; try { // Get list of all items Log.i(TAG, "Fetching local entries for merge"); Uri uri = Transformer.CONTENT_URI; // Get all entries c = contentResolver.query(uri, null, null, null, null); assert c != null; Log.i(TAG, "Found " + c.getCount() + " local entries. Computing merge solution..."); } catch (Exception ex) { } // Find stale data String id; String name; String location; while (c.moveToNext()) { syncResult.stats.numEntries++; id = c.getColumnName(COLUMN_ID); name = c.getString(COLUMN_ENTRY_ID); location = c.getString(COLUMN_TITLE); Transformer match = entryMap.get(id); if (match != null) { // Entry exists. Remove from entry map to prevent insert later. entryMap.remove(id); // Check to see if the entry needs to be updated Uri existingUri = Transformer.CONTENT_URI.buildUpon().appendPath(id).build(); if ((match.trsName != null && !match.trsLocation.equals(name))) { // Update existing record Log.i(TAG, "Scheduling update: " + existingUri); batch.add(ContentProviderOperation.newUpdate(existingUri).withValue(Transformer.KEY_NAME, name) .withValue(Transformer.KEY_LOCATION, location) .build()); syncResult.stats.numUpdates++; } else { Log.i(TAG, "No action: " + existingUri); } } else { // Entry doesn't exist. Remove it from the database. Uri deleteUri = Transformer.CONTENT_URI.buildUpon().appendPath(id).build(); Log.i(TAG, "Scheduling delete: " + deleteUri); batch.add(ContentProviderOperation.newDelete(deleteUri).build()); syncResult.stats.numDeletes++; } } c.close(); // Add new items for (Transformer e : entryMap.values()) { Log.i(TAG, "Scheduling insert: entry_id=" + e.transformerID); batch.add(ContentProviderOperation.newInsert(Transformer.CONTENT_URI) .withValue(Transformer.KEY_TRANSFORMER_ID, e.transformerID) .withValue(Transformer.KEY_NAME, e.trsName).withValue(Transformer.KEY_LOCATION, e.trsLocation) .withValue(Transformer.KEY_CURRENT_TEMP, e.trsCurrentTemp) .withValue(Transformer.KEY_LAST_SERVER_SYNC_DATE, e.lastServerSyncDate) .withValue(Transformer.KEY_LAST_UPDATED_TIME, e.lastServerSyncDate) .withValue(Transformer.KEY_SYNC_STATUS, 0).withValue(Transformer.KEY_MAKE, e.trsMake) .withValue(Transformer.KEY_WINDING_MAKE, e.trsWindingMake) .withValue(Transformer.KEY_WINDING_COUNT, e.trsWindingCount) .withValue(Transformer.KEY_OIL_LEVEL, e.trsOilLevel) .withValue(Transformer.KEY_OPERATING_POWER, e.trsOperatingPower) .withValue(Transformer.KEY_TYPE, e.trsType) .build()); syncResult.stats.numInserts++; } Log.i(TAG, "Merge solution ready. Applying batch update"); mContentResolver.applyBatch(Transformer.CONTENT_AUTHORITY, batch); mContentResolver.notifyChange(Transformer.CONTENT_URI, // URI where data was modified null, // No local observer false); // IMPORTANT: Do not sync to network // This sample doesn't support uploads, but if *your* code does, make sure you set // syncToNetwork=false in the line above to prevent duplicate syncs. }
From source file:fr.mixit.android.io.JsonHandlerApplyTalks.java
public void parseLinkedInterests(String itemId, JSONArray interests, ContentResolver resolver) throws JSONException { final Uri itemInterestsUri = MixItContract.Sessions.buildInterestsDirUri(itemId); final HashSet<String> interestsIds = Sets.newHashSet(); for (int j = 0; j < interests.length(); j++) { final int id = interests.getInt(j); final String interestId = String.valueOf(id); interestsIds.add(interestId);/*from ww w . j ava 2 s .com*/ final ContentProviderOperation ope = ContentProviderOperation.newInsert(itemInterestsUri) // .withValue(MixItDatabase.SessionsInterests.INTEREST_ID, interestId) // .withValue(MixItDatabase.SessionsInterests.SESSION_ID, itemId) // .build(); ProviderParsingUtils.addOpeAndApplyBatch(mAuthority, resolver, mBatch, false, ope); } mItemInterestsIds.put(itemId, interestsIds); }
From source file:saschpe.birthdays.service.CalendarSyncService.java
private static ContentProviderOperation insertEvent(Context context, long calendarId, Date eventDate, int year, String title, String description, String lookupKey) { ContentProviderOperation.Builder builder = ContentProviderOperation .newInsert(getCalendarUri(context, CalendarContract.Events.CONTENT_URI)); Calendar calendar = Calendar.getInstance(); calendar.setTime(eventDate);//from www. j a va 2 s.c o m calendar.set(Calendar.YEAR, year); calendar.set(Calendar.HOUR, 0); calendar.set(Calendar.MINUTE, 0); calendar.set(Calendar.SECOND, 0); calendar.set(Calendar.MILLISECOND, 0); calendar.setTimeZone(TimeZone.getTimeZone("UTC")); /* Define over entire day. * * Note: ALL_DAY is enough on original Android calendar, but some calendar apps (Business * Calendar) do not display the event if time between dtstart and dtend is 0 */ final long dtstart = calendar.getTimeInMillis(); final long dtend = dtstart + DateUtils.DAY_IN_MILLIS; builder.withValue(CalendarContract.Events.CALENDAR_ID, calendarId); builder.withValue(CalendarContract.Events.DTSTART, dtstart); builder.withValue(CalendarContract.Events.DTEND, dtend); builder.withValue(CalendarContract.Events.EVENT_TIMEZONE, Time.TIMEZONE_UTC); builder.withValue(CalendarContract.Events.ALL_DAY, 1); builder.withValue(CalendarContract.Events.TITLE, title); builder.withValue(CalendarContract.Events.DESCRIPTION, description); builder.withValue(CalendarContract.Events.STATUS, CalendarContract.Events.STATUS_CONFIRMED); /* Enable reminders for this event * Note: Need to be explicitly set on Android < 4 to enable reminders */ builder.withValue(CalendarContract.Events.HAS_ALARM, 1); // Set availability to free. if (Build.VERSION.SDK_INT >= 14) { builder.withValue(CalendarContract.Events.AVAILABILITY, CalendarContract.Events.AVAILABILITY_FREE); } // Add button to open contact if (Build.VERSION.SDK_INT >= 16 && lookupKey != null) { builder.withValue(CalendarContract.Events.CUSTOM_APP_PACKAGE, context.getPackageName()); final Uri contactLookupUri = Uri.withAppendedPath(ContactsContract.Contacts.CONTENT_LOOKUP_URI, lookupKey); builder.withValue(CalendarContract.Events.CUSTOM_APP_URI, contactLookupUri.toString()); } return builder.build(); }
From source file:org.linphone.compatibility.ApiFivePlus.java
public static void addSipAddressToContact(Context context, ArrayList<ContentProviderOperation> ops, String sipAddress, String rawContactID) { ops.add(ContentProviderOperation.newInsert(ContactsContract.Data.CONTENT_URI) .withValue(ContactsContract.Data.RAW_CONTACT_ID, rawContactID) .withValue(ContactsContract.Data.MIMETYPE, ContactsContract.CommonDataKinds.Im.CONTENT_ITEM_TYPE) .withValue(ContactsContract.CommonDataKinds.Im.DATA, sipAddress) .withValue(ContactsContract.CommonDataKinds.Im.TYPE, ContactsContract.CommonDataKinds.Im.TYPE_CUSTOM) .withValue(ContactsContract.CommonDataKinds.Im.LABEL, context.getString(R.string.addressbook_label)) .build());/*from w w w.j a v a 2 s . co m*/ }
From source file:org.sufficientlysecure.keychain.provider.KeyWritableRepository.java
/** * Saves an UncachedKeyRing of the public variant into the db. * <p/>/* ww w .ja v a2 s. c om*/ * This method will not delete all previous data for this masterKeyId from the database prior * to inserting. All public data is effectively re-inserted, secret keyrings are left deleted * and need to be saved externally to be preserved past the operation. */ @SuppressWarnings("unchecked") private int saveCanonicalizedPublicKeyRing(CanonicalizedPublicKeyRing keyRing, boolean selfCertsAreTrusted) { // start with ok result int result = SaveKeyringResult.SAVED_PUBLIC; long masterKeyId = keyRing.getMasterKeyId(); UncachedPublicKey masterKey = keyRing.getPublicKey(); ArrayList<ContentProviderOperation> operations; try { log(LogType.MSG_IP_PREPARE); mIndent += 1; // save all keys and userIds included in keyRing object in database operations = new ArrayList<>(); log(LogType.MSG_IP_INSERT_KEYRING); try { writePublicKeyRing(keyRing, masterKeyId, operations); } catch (IOException e) { log(LogType.MSG_IP_ENCODE_FAIL); return SaveKeyringResult.RESULT_ERROR; } log(LogType.MSG_IP_INSERT_SUBKEYS); mIndent += 1; { // insert subkeys Uri uri = Keys.buildKeysUri(masterKeyId); int rank = 0; for (CanonicalizedPublicKey key : keyRing.publicKeyIterator()) { long keyId = key.getKeyId(); log(keyId == masterKeyId ? LogType.MSG_IP_MASTER : LogType.MSG_IP_SUBKEY, KeyFormattingUtils.convertKeyIdToHex(keyId)); mIndent += 1; ContentValues values = new ContentValues(); values.put(Keys.MASTER_KEY_ID, masterKeyId); values.put(Keys.RANK, rank); values.put(Keys.KEY_ID, key.getKeyId()); values.put(Keys.KEY_SIZE, key.getBitStrength()); values.put(Keys.KEY_CURVE_OID, key.getCurveOid()); values.put(Keys.ALGORITHM, key.getAlgorithm()); values.put(Keys.FINGERPRINT, key.getFingerprint()); boolean c = key.canCertify(), e = key.canEncrypt(), s = key.canSign(), a = key.canAuthenticate(); values.put(Keys.CAN_CERTIFY, c); values.put(Keys.CAN_ENCRYPT, e); values.put(Keys.CAN_SIGN, s); values.put(Keys.CAN_AUTHENTICATE, a); values.put(Keys.IS_REVOKED, key.isRevoked()); values.put(Keys.IS_SECURE, key.isSecure()); // see above if (masterKeyId == keyId) { if (key.getKeyUsage() == null) { log(LogType.MSG_IP_MASTER_FLAGS_UNSPECIFIED); } else { log(LOG_TYPES_FLAG_MASTER[(c ? 1 : 0) + (e ? 2 : 0) + (s ? 4 : 0) + (a ? 8 : 0)]); } } else { if (key.getKeyUsage() == null) { log(LogType.MSG_IP_SUBKEY_FLAGS_UNSPECIFIED); } else { log(LOG_TYPES_FLAG_SUBKEY[(c ? 1 : 0) + (e ? 2 : 0) + (s ? 4 : 0) + (a ? 8 : 0)]); } } Date creation = key.getCreationTime(); values.put(Keys.CREATION, creation.getTime() / 1000); Date expiryDate = key.getExpiryTime(); if (expiryDate != null) { values.put(Keys.EXPIRY, expiryDate.getTime() / 1000); if (key.isExpired()) { log(keyId == masterKeyId ? LogType.MSG_IP_MASTER_EXPIRED : LogType.MSG_IP_SUBKEY_EXPIRED, expiryDate.toString()); } else { log(keyId == masterKeyId ? LogType.MSG_IP_MASTER_EXPIRES : LogType.MSG_IP_SUBKEY_EXPIRES, expiryDate.toString()); } } operations.add(ContentProviderOperation.newInsert(uri).withValues(values).build()); ++rank; mIndent -= 1; } } mIndent -= 1; // get a list of owned secret keys, for verification filtering LongSparseArray<CanonicalizedPublicKey> trustedKeys = getTrustedMasterKeys(); // classify and order user ids. primary are moved to the front, revoked to the back, // otherwise the order in the keyfile is preserved. List<UserPacketItem> uids = new ArrayList<>(); List<Long> signerKeyIds = new ArrayList<>(); if (trustedKeys.size() == 0) { log(LogType.MSG_IP_UID_CLASSIFYING_ZERO); } else { log(LogType.MSG_IP_UID_CLASSIFYING, trustedKeys.size()); } mIndent += 1; for (byte[] rawUserId : masterKey.getUnorderedRawUserIds()) { String userId = Utf8Util.fromUTF8ByteArrayReplaceBadEncoding(rawUserId); UserPacketItem item = new UserPacketItem(); uids.add(item); OpenPgpUtils.UserId splitUserId = KeyRing.splitUserId(userId); item.userId = userId; item.name = splitUserId.name; item.email = splitUserId.email; item.comment = splitUserId.comment; int unknownCerts = 0; log(LogType.MSG_IP_UID_PROCESSING, userId); mIndent += 1; // look through signatures for this specific key for (WrappedSignature cert : new IterableIterator<>(masterKey.getSignaturesForRawId(rawUserId))) { long certId = cert.getKeyId(); // self signature if (certId == masterKeyId) { // NOTE self-certificates are already verified during canonicalization, // AND we know there is at most one cert plus at most one revocation if (!cert.isRevocation()) { item.selfCert = cert; item.isPrimary = cert.isPrimaryUserId(); } else { item.selfRevocation = cert; log(LogType.MSG_IP_UID_REVOKED); } continue; } // do we have a trusted key for this? if (trustedKeys.indexOfKey(certId) < 0) { if (!signerKeyIds.contains(certId)) { operations.add(ContentProviderOperation.newInsert(KeySignatures.CONTENT_URI) .withValue(KeySignatures.MASTER_KEY_ID, masterKeyId) .withValue(KeySignatures.SIGNER_KEY_ID, certId).build()); signerKeyIds.add(certId); } unknownCerts += 1; continue; } // verify signatures from known private keys CanonicalizedPublicKey trustedKey = trustedKeys.get(certId); try { cert.init(trustedKey); // if it doesn't certify, leave a note and skip if (!cert.verifySignature(masterKey, rawUserId)) { log(LogType.MSG_IP_UID_CERT_BAD); continue; } log(cert.isRevocation() ? LogType.MSG_IP_UID_CERT_GOOD_REVOKE : LogType.MSG_IP_UID_CERT_GOOD, KeyFormattingUtils.convertKeyIdToHexShort(trustedKey.getKeyId())); // check if there is a previous certificate WrappedSignature prev = item.trustedCerts.get(cert.getKeyId()); if (prev != null) { // if it's newer, skip this one if (prev.getCreationTime().after(cert.getCreationTime())) { log(LogType.MSG_IP_UID_CERT_OLD); continue; } // if the previous one was a non-revokable certification, no need to look further if (!prev.isRevocation() && !prev.isRevokable()) { log(LogType.MSG_IP_UID_CERT_NONREVOKE); continue; } log(LogType.MSG_IP_UID_CERT_NEW); } item.trustedCerts.put(cert.getKeyId(), cert); } catch (PgpGeneralException e) { log(LogType.MSG_IP_UID_CERT_ERROR, KeyFormattingUtils.convertKeyIdToHex(cert.getKeyId())); } } if (unknownCerts > 0) { log(LogType.MSG_IP_UID_CERTS_UNKNOWN, unknownCerts); } mIndent -= 1; } mIndent -= 1; ArrayList<WrappedUserAttribute> userAttributes = masterKey.getUnorderedUserAttributes(); // Don't spam the log if there aren't even any attributes if (!userAttributes.isEmpty()) { log(LogType.MSG_IP_UAT_CLASSIFYING); } mIndent += 1; for (WrappedUserAttribute userAttribute : userAttributes) { UserPacketItem item = new UserPacketItem(); uids.add(item); item.type = userAttribute.getType(); item.attributeData = userAttribute.getEncoded(); int unknownCerts = 0; switch (item.type) { case WrappedUserAttribute.UAT_IMAGE: log(LogType.MSG_IP_UAT_PROCESSING_IMAGE); break; default: log(LogType.MSG_IP_UAT_PROCESSING_UNKNOWN); break; } mIndent += 1; // look through signatures for this specific key for (WrappedSignature cert : new IterableIterator<>( masterKey.getSignaturesForUserAttribute(userAttribute))) { long certId = cert.getKeyId(); // self signature if (certId == masterKeyId) { // NOTE self-certificates are already verified during canonicalization, // AND we know there is at most one cert plus at most one revocation // AND the revocation only exists if there is no newer certification if (!cert.isRevocation()) { item.selfCert = cert; } else { item.selfRevocation = cert; log(LogType.MSG_IP_UAT_REVOKED); } continue; } // do we have a trusted key for this? if (trustedKeys.indexOfKey(certId) < 0) { unknownCerts += 1; continue; } // verify signatures from known private keys CanonicalizedPublicKey trustedKey = trustedKeys.get(certId); try { cert.init(trustedKey); // if it doesn't certify, leave a note and skip if (!cert.verifySignature(masterKey, userAttribute)) { log(LogType.MSG_IP_UAT_CERT_BAD); continue; } log(cert.isRevocation() ? LogType.MSG_IP_UAT_CERT_GOOD_REVOKE : LogType.MSG_IP_UAT_CERT_GOOD, KeyFormattingUtils.convertKeyIdToHexShort(trustedKey.getKeyId())); // check if there is a previous certificate WrappedSignature prev = item.trustedCerts.get(cert.getKeyId()); if (prev != null) { // if it's newer, skip this one if (prev.getCreationTime().after(cert.getCreationTime())) { log(LogType.MSG_IP_UAT_CERT_OLD); continue; } // if the previous one was a non-revokable certification, no need to look further if (!prev.isRevocation() && !prev.isRevokable()) { log(LogType.MSG_IP_UAT_CERT_NONREVOKE); continue; } log(LogType.MSG_IP_UAT_CERT_NEW); } item.trustedCerts.put(cert.getKeyId(), cert); } catch (PgpGeneralException e) { log(LogType.MSG_IP_UAT_CERT_ERROR, KeyFormattingUtils.convertKeyIdToHex(cert.getKeyId())); } } if (unknownCerts > 0) { log(LogType.MSG_IP_UAT_CERTS_UNKNOWN, unknownCerts); } mIndent -= 1; } mIndent -= 1; log(LogType.MSG_IP_UID_REORDER); // primary before regular before revoked (see UserIdItem.compareTo) // this is a stable sort, so the order of keys is otherwise preserved. Collections.sort(uids); // iterate and put into db for (int userIdRank = 0; userIdRank < uids.size(); userIdRank++) { UserPacketItem item = uids.get(userIdRank); operations.add(buildUserIdOperations(masterKeyId, item, userIdRank)); if (item.selfRevocation != null) { operations.add( buildCertOperations(masterKeyId, userIdRank, item.selfRevocation, Certs.VERIFIED_SELF)); // don't bother with trusted certs if the uid is revoked, anyways continue; } if (item.selfCert == null) { throw new AssertionError("User ids MUST be self-certified at this point!!"); } operations.add(buildCertOperations(masterKeyId, userIdRank, item.selfCert, selfCertsAreTrusted ? Certs.VERIFIED_SECRET : Certs.VERIFIED_SELF)); // iterate over signatures for (int i = 0; i < item.trustedCerts.size(); i++) { WrappedSignature sig = item.trustedCerts.valueAt(i); // if it's a revocation if (sig.isRevocation()) { // don't further process it continue; } // otherwise, build database operation operations.add(buildCertOperations(masterKeyId, userIdRank, sig, Certs.VERIFIED_SECRET)); } } } catch (IOException e) { log(LogType.MSG_IP_ERROR_IO_EXC); Log.e(Constants.TAG, "IOException during import", e); return SaveKeyringResult.RESULT_ERROR; } finally { mIndent -= 1; } ContentProviderOperation lastUpdateReinsertOp = getLastUpdatedReinsertOperationByMasterKeyId(masterKeyId); if (lastUpdateReinsertOp != null) { operations.add(lastUpdateReinsertOp); } try { // delete old version of this keyRing (from database only!), which also deletes all keys and userIds on cascade int deleted = mContentResolver.delete(KeyRingData.buildPublicKeyRingUri(masterKeyId), null, null); if (deleted > 0) { log(LogType.MSG_IP_DELETE_OLD_OK); result |= SaveKeyringResult.UPDATED; } else { log(LogType.MSG_IP_DELETE_OLD_FAIL); } log(LogType.MSG_IP_APPLY_BATCH); mContentResolver.applyBatch(KeychainContract.CONTENT_AUTHORITY, operations); log(LogType.MSG_IP_SUCCESS); return result; } catch (RemoteException e) { log(LogType.MSG_IP_ERROR_REMOTE_EX); Log.e(Constants.TAG, "RemoteException during import", e); return SaveKeyringResult.RESULT_ERROR; } catch (OperationApplicationException e) { log(LogType.MSG_IP_ERROR_OP_EXC); Log.e(Constants.TAG, "OperationApplicationException during import", e); return SaveKeyringResult.RESULT_ERROR; } }
From source file:fr.mixit.android.io.JsonHandlerApplyTalks.java
public void parseLinkedSpeakers(String itemId, JSONArray speakers, ContentResolver resolver) throws JSONException { final Uri itemSpeakersUri = MixItContract.Sessions.buildSpeakersDirUri(itemId); final HashSet<String> speakersIds = Sets.newHashSet(); for (int j = 0; j < speakers.length(); j++) { final int id = speakers.getInt(j); final String speakerId = String.valueOf(id); speakersIds.add(speakerId);/*from w w w . j av a 2 s . c om*/ final ContentProviderOperation ope = ContentProviderOperation.newInsert(itemSpeakersUri) // .withValue(MixItDatabase.SessionsSpeakers.SPEAKER_ID, speakerId) // .withValue(MixItDatabase.SessionsSpeakers.SESSION_ID, itemId) // .build(); ProviderParsingUtils.addOpeAndApplyBatch(mAuthority, resolver, mBatch, false, ope); } mItemSpeakersIds.put(itemId, speakersIds); }
From source file:com.google.samples.apps.iosched.io.SessionsHandler.java
private void buildSessionSpeakerMapping(Session session, ArrayList<ContentProviderOperation> list) { final Uri uri = ScheduleContractHelper .setUriAsCalledFromSyncAdapter(ScheduleContract.Sessions.buildSpeakersDirUri(session.id)); // delete any existing relationship between this session and speakers list.add(ContentProviderOperation.newDelete(uri).build()); // add relationship records to indicate the speakers for this session if (session.speakers != null) { for (String speakerId : session.speakers) { list.add(ContentProviderOperation.newInsert(uri) .withValue(ScheduleDatabase.SessionsSpeakers.SESSION_ID, session.id) .withValue(ScheduleDatabase.SessionsSpeakers.SPEAKER_ID, speakerId).build()); }/*from w ww . ja v a 2 s.c o m*/ } }
From source file:com.example.android.basicsyncadapter.SyncAdapter.java
/** * Read XML from an input stream, storing it into the content provider. * * <p>This is where incoming data is persisted, committing the results of a sync. In order to * minimize (expensive) disk operations, we compare incoming data with what's already in our * database, and compute a merge. Only changes (insert/update/delete) will result in a database * write.//from w w w . ja v a 2 s .c om * * <p>As an additional optimization, we use a batch operation to perform all database writes at * once. * * <p>Merge strategy: * 1. Get cursor to all items in feed<br/> * 2. For each item, check if it's in the incoming data.<br/> * a. YES: Remove from "incoming" list. Check if data has mutated, if so, perform * database UPDATE.<br/> * b. NO: Schedule DELETE from database.<br/> * (At this point, incoming database only contains missing items.)<br/> * 3. For any items remaining in incoming list, ADD to database. */ public void updateLocalFeedData(final InputStream stream, final SyncResult syncResult) throws IOException, XmlPullParserException, RemoteException, OperationApplicationException, ParseException { //final FeedParser feedParser = new FeedParser(); final CAPFeedParser feedParser = new CAPFeedParser(); final ContentResolver contentResolver = getContext().getContentResolver(); //Log.i(TAG, "Parsing stream as Atom feed"); final List<CAPFeedParser.Entry> entries = feedParser.parse(stream); Log.i(TAG, "Parsing complete. Found " + entries.size() + " entries"); ArrayList<ContentProviderOperation> batch = new ArrayList<ContentProviderOperation>(); // Build hash table of incoming entries HashMap<String, CAPFeedParser.Entry> entryMap = new HashMap<String, CAPFeedParser.Entry>(); for (CAPFeedParser.Entry e : entries) { entryMap.put(e.id, e); } // Get list of all items //Log.i(TAG, "Fetching local entries for merge"); Uri uri = FeedContract.Entry.CONTENT_URI; // Get all entries Cursor c = contentResolver.query(uri, PROJECTION, null, null, null); assert c != null; //Log.i(TAG, "Found " + c.getCount() + " local entries. Computing merge solution..."); // Find stale data int id; String entryId; String title; String description; String headline; String url; String areas; String issued; while (c.moveToNext()) { syncResult.stats.numEntries++; id = c.getInt(COLUMN_ID); entryId = c.getString(COLUMN_ENTRY_ID); title = c.getString(COLUMN_TITLE); description = c.getString(COLUMN_DESCRIPTION); headline = c.getString(COLUMN_HEADLINE); areas = c.getString(COLUMN_AREAS); url = c.getString(COLUMN_LINK); issued = c.getString(COLUMN_ISSUED); CAPFeedParser.Entry match = entryMap.get(entryId); if (match != null) { // Entry exists. Remove from entry map to prevent insert later. entryMap.remove(entryId); // Check to see if the entry needs to be updated Uri existingUri = FeedContract.Entry.CONTENT_URI.buildUpon().appendPath(Integer.toString(id)) .build(); if ((match.title != null && !match.title.equals(title)) || (match.link != null && !match.link.equals(url)) || (match.issued != issued)) { // Update existing record //Log.i(TAG, "Scheduling update: " + existingUri); batch.add(ContentProviderOperation.newUpdate(existingUri) .withValue(FeedContract.Entry.COLUMN_NAME_TITLE, title) .withValue(FeedContract.Entry.COLUMN_NAME_DESCRIPTION, description) .withValue(FeedContract.Entry.COLUMN_NAME_HEADLINE, headline) .withValue(FeedContract.Entry.COLUMN_NAME_ISSUED, issued) .withValue(FeedContract.Entry.COLUMN_NAME_LINK, url) .withValue(FeedContract.Entry.COLUMN_NAME_AREAS, areas).build()); syncResult.stats.numUpdates++; } else { //Log.i(TAG, "No action: " + existingUri); } } else { // Entry doesn't exist. Remove it from the database. Uri deleteUri = FeedContract.Entry.CONTENT_URI.buildUpon().appendPath(Integer.toString(id)).build(); //Log.i(TAG, "Scheduling delete: " + deleteUri); batch.add(ContentProviderOperation.newDelete(deleteUri).build()); syncResult.stats.numDeletes++; } } c.close(); // Add new items for (CAPFeedParser.Entry e : entryMap.values()) { //Log.i(TAG, "Scheduling insert: entry_id=" + e.id); batch.add(ContentProviderOperation.newInsert(FeedContract.Entry.CONTENT_URI) .withValue(FeedContract.Entry.COLUMN_NAME_ENTRY_ID, e.id) .withValue(FeedContract.Entry.COLUMN_NAME_TITLE, e.title) .withValue(FeedContract.Entry.COLUMN_NAME_DESCRIPTION, e.description) .withValue(FeedContract.Entry.COLUMN_NAME_HEADLINE, e.headline) .withValue(FeedContract.Entry.COLUMN_NAME_ISSUED, e.issued) .withValue(FeedContract.Entry.COLUMN_NAME_LINK, e.link) .withValue(FeedContract.Entry.COLUMN_NAME_AREAS, e.areas).build()); syncResult.stats.numInserts++; } //Log.i(TAG, "Merge solution ready. Applying batch update"); mContentResolver.applyBatch(FeedContract.CONTENT_AUTHORITY, batch); mContentResolver.notifyChange(FeedContract.Entry.CONTENT_URI, // URI where data was modified null, // No local observer false); // IMPORTANT: Do not sync to network // This sample doesn't support uploads, but if *your* code does, make sure you set // syncToNetwork=false in the line above to prevent duplicate syncs. }
From source file:com.conferenceengineer.android.iosched.io.SessionsHandler.java
private ArrayList<ContentProviderOperation> buildContentProviderOperations(SessionsResponse sessions, SessionsResponse starredSessions, TracksResponse tracks) { // If there was no starred sessions response (e.g. there was an auth issue, // or this is a local sync), keep all the locally starred sessions. boolean retainLocallyStarredSessions = (starredSessions == null); final ArrayList<ContentProviderOperation> batch = Lists.newArrayList(); // Build lookup table for starredSessions mappings HashSet<String> starredSessionsMap = new HashSet<String>(); if (starredSessions != null) { List<SessionResponse> starredSessionList = starredSessions.getSessions(); if (starredSessionList != null) { for (SessionResponse session : starredSessionList) { String sessionId = session.getId(); starredSessionsMap.add(sessionId); }//from w ww. j a v a2 s. c om } } // Build lookup table for track mappings // Assumes that sessions can only have one track. Not guarenteed by the Conference API, // but is being enforced by conference organizer policy. HashMap<String, TrackResponse> trackMap = new HashMap<String, TrackResponse>(); if (tracks != null) { for (TrackResponse track : tracks.getTracks()) { List<String> sessionIds = track.getSessions(); if (sessionIds != null) { for (String sessionId : sessionIds) { trackMap.put(sessionId, track); } } } } if (sessions != null) { List<SessionResponse> sessionList = sessions.getSessions(); int numSessions = sessionList.size(); if (numSessions > 0) { LOGI(TAG, "Updating sessions data"); Set<String> starredSessionIds = new HashSet<String>(); if (retainLocallyStarredSessions) { Cursor starredSessionsCursor = mContext.getContentResolver().query(Sessions.CONTENT_STARRED_URI, new String[] { ScheduleContract.Sessions.SESSION_ID }, null, null, null); while (starredSessionsCursor.moveToNext()) { starredSessionIds.add(starredSessionsCursor.getString(0)); } starredSessionsCursor.close(); } // Clear out existing sessions batch.add(ContentProviderOperation .newDelete(ScheduleContract.addCallerIsSyncAdapterParameter(Sessions.CONTENT_URI)).build()); // Maintain a list of created session block IDs Set<String> blockIds = new HashSet<String>(); // Maintain a map of insert operations for sandbox-only blocks HashMap<String, ContentProviderOperation> sandboxBlocks = new HashMap<String, ContentProviderOperation>(); for (SessionResponse session : sessionList) { int flags = 0; String sessionId = session.getId(); if (retainLocallyStarredSessions) { flags = (starredSessionIds.contains(sessionId) ? PARSE_FLAG_FORCE_SCHEDULE_ADD : PARSE_FLAG_FORCE_SCHEDULE_REMOVE); } if (session.getFlags() != 0) { // Allow data set flags to override locally // set ones (e.g. single talk slot additions). flags = session.getFlags(); } if (TextUtils.isEmpty(sessionId)) { LOGW(TAG, "Found session with empty ID in API response."); continue; } // Session title String sessionTitle = session.getTitle(); String sessionSubtype = session.getSubtype(); if (EVENT_TYPE_CODELAB.equals(sessionSubtype)) { sessionTitle = mContext.getString(R.string.codelab_title_template, sessionTitle); } // Whether or not it's in the schedule boolean inSchedule = starredSessionsMap.contains(sessionId); if ((flags & PARSE_FLAG_FORCE_SCHEDULE_ADD) != 0 || (flags & PARSE_FLAG_FORCE_SCHEDULE_REMOVE) != 0) { inSchedule = (flags & PARSE_FLAG_FORCE_SCHEDULE_ADD) != 0; } if (EVENT_TYPE_KEYNOTE.equals(sessionSubtype)) { // Keynotes are always in your schedule. inSchedule = true; } // Clean up session abstract String sessionAbstract = session.getDescription(); if (sessionAbstract != null) { sessionAbstract = sessionAbstract.replace('\r', '\n'); } // Hashtags TrackResponse track = trackMap.get(sessionId); String hashtag = null; if (track != null) { hashtag = ParserUtils.sanitizeId(track.getTitle()); } // Get block id long sessionStartTime = session.getStartTimestamp().longValue() * 1000; long sessionEndTime = session.getEndTimestamp().longValue() * 1000; String blockId = ScheduleContract.Blocks.generateBlockId(sessionStartTime, sessionEndTime); if (!blockIds.contains(blockId) && !EVENT_TYPE_SANDBOX.equals(sessionSubtype)) { // New non-sandbox block if (sandboxBlocks.containsKey(blockId)) { sandboxBlocks.remove(blockId); } String blockType; String blockTitle; if (EVENT_TYPE_KEYNOTE.equals(sessionSubtype)) { blockType = ScheduleContract.Blocks.BLOCK_TYPE_KEYNOTE; blockTitle = mContext.getString(R.string.schedule_block_title_keynote); } else if (EVENT_TYPE_CODELAB.equals(sessionSubtype)) { blockType = ScheduleContract.Blocks.BLOCK_TYPE_CODELAB; blockTitle = mContext.getString(R.string.schedule_block_title_code_labs); } else if (EVENT_TYPE_OFFICE_HOURS.equals(sessionSubtype)) { blockType = ScheduleContract.Blocks.BLOCK_TYPE_OFFICE_HOURS; blockTitle = mContext.getString(R.string.schedule_block_title_office_hours); } else { blockType = ScheduleContract.Blocks.BLOCK_TYPE_SESSION; blockTitle = mContext.getString(R.string.schedule_block_title_sessions); } batch.add(ContentProviderOperation.newInsert(ScheduleContract.Blocks.CONTENT_URI) .withValue(ScheduleContract.Blocks.BLOCK_ID, blockId) .withValue(ScheduleContract.Blocks.BLOCK_TYPE, blockType) .withValue(ScheduleContract.Blocks.BLOCK_TITLE, blockTitle) .withValue(ScheduleContract.Blocks.BLOCK_START, sessionStartTime) .withValue(ScheduleContract.Blocks.BLOCK_END, sessionEndTime).build()); blockIds.add(blockId); } else if (!sandboxBlocks.containsKey(blockId) && !blockIds.contains(blockId) && EVENT_TYPE_SANDBOX.equals(sessionSubtype)) { // New sandbox-only block, add insert operation to map String blockType = ScheduleContract.Blocks.BLOCK_TYPE_SANDBOX; String blockTitle = mContext.getString(R.string.schedule_block_title_sandbox); sandboxBlocks.put(blockId, ContentProviderOperation.newInsert(ScheduleContract.Blocks.CONTENT_URI) .withValue(ScheduleContract.Blocks.BLOCK_ID, blockId) .withValue(ScheduleContract.Blocks.BLOCK_TYPE, blockType) .withValue(ScheduleContract.Blocks.BLOCK_TITLE, blockTitle) .withValue(ScheduleContract.Blocks.BLOCK_START, sessionStartTime) .withValue(ScheduleContract.Blocks.BLOCK_END, sessionEndTime).build()); } // Insert session info final ContentProviderOperation.Builder builder; if (EVENT_TYPE_SANDBOX.equals(sessionSubtype)) { // Sandbox companies go in the special sandbox table builder = ContentProviderOperation .newInsert(ScheduleContract .addCallerIsSyncAdapterParameter(ScheduleContract.Sandbox.CONTENT_URI)) .withValue(SyncColumns.UPDATED, System.currentTimeMillis()) .withValue(ScheduleContract.Sandbox.COMPANY_ID, sessionId) .withValue(ScheduleContract.Sandbox.COMPANY_NAME, sessionTitle) .withValue(ScheduleContract.Sandbox.COMPANY_DESC, sessionAbstract) .withValue(ScheduleContract.Sandbox.COMPANY_URL, session.getWebLink()) .withValue(ScheduleContract.Sandbox.COMPANY_LOGO_URL, session.getIconUrl()) .withValue(ScheduleContract.Sandbox.ROOM_ID, sanitizeId(session.getLocation())) .withValue(ScheduleContract.Sandbox.TRACK_ID, (track != null ? track.getId() : null)) .withValue(ScheduleContract.Sandbox.BLOCK_ID, blockId); batch.add(builder.build()); } else { // All other fields go in the normal sessions table builder = ContentProviderOperation .newInsert(ScheduleContract.addCallerIsSyncAdapterParameter(Sessions.CONTENT_URI)) .withValue(SyncColumns.UPDATED, System.currentTimeMillis()) .withValue(Sessions.SESSION_ID, sessionId) .withValue(Sessions.SESSION_TYPE, sessionSubtype) .withValue(Sessions.SESSION_LEVEL, null) // Not available .withValue(Sessions.SESSION_TITLE, sessionTitle) .withValue(Sessions.SESSION_ABSTRACT, sessionAbstract) .withValue(Sessions.SESSION_HASHTAGS, hashtag) .withValue(Sessions.SESSION_TAGS, null) // Not available .withValue(Sessions.SESSION_URL, session.getWebLink()) .withValue(Sessions.SESSION_MODERATOR_URL, null) // Not available .withValue(Sessions.SESSION_REQUIREMENTS, null) // Not available .withValue(Sessions.SESSION_STARRED, inSchedule) .withValue(Sessions.SESSION_YOUTUBE_URL, null) // Not available .withValue(Sessions.SESSION_PDF_URL, null) // Not available .withValue(Sessions.SESSION_NOTES_URL, null) // Not available .withValue(Sessions.ROOM_ID, sanitizeId(session.getLocation())) .withValue(Sessions.BLOCK_ID, blockId); batch.add(builder.build()); } // Replace all session speakers final Uri sessionSpeakersUri = Sessions.buildSpeakersDirUri(sessionId); batch.add(ContentProviderOperation .newDelete(ScheduleContract.addCallerIsSyncAdapterParameter(sessionSpeakersUri)) .build()); List<String> presenterIds = session.getPresenterIds(); if (presenterIds != null) { for (String presenterId : presenterIds) { batch.add(ContentProviderOperation.newInsert(sessionSpeakersUri) .withValue(SessionsSpeakers.SESSION_ID, sessionId) .withValue(SessionsSpeakers.SPEAKER_ID, presenterId).build()); } } // Add track mapping if (track != null) { String trackId = track.getId(); if (trackId != null) { final Uri sessionTracksUri = ScheduleContract.addCallerIsSyncAdapterParameter( ScheduleContract.Sessions.buildTracksDirUri(sessionId)); batch.add(ContentProviderOperation.newInsert(sessionTracksUri) .withValue(ScheduleDatabase.SessionsTracks.SESSION_ID, sessionId) .withValue(ScheduleDatabase.SessionsTracks.TRACK_ID, trackId).build()); } } // Codelabs: Add mapping to codelab table if (EVENT_TYPE_CODELAB.equals(sessionSubtype)) { final Uri sessionTracksUri = ScheduleContract.addCallerIsSyncAdapterParameter( ScheduleContract.Sessions.buildTracksDirUri(sessionId)); batch.add(ContentProviderOperation.newInsert(sessionTracksUri) .withValue(ScheduleDatabase.SessionsTracks.SESSION_ID, sessionId) .withValue(ScheduleDatabase.SessionsTracks.TRACK_ID, "CODE_LABS").build()); } } // Insert sandbox-only blocks batch.addAll(sandboxBlocks.values()); } } return batch; }