List of usage examples for android.database Cursor moveToNext
boolean moveToNext();
From source file:gov.wa.wsdot.android.wsdot.service.MountainPassesSyncService.java
/** * Check the mountain pass table for any starred entries. If we find some, save them * to a list so we can re-star those passes after we flush the database. *///from www . j av a2s. c o m private List<Integer> getStarred() { ContentResolver resolver = getContentResolver(); Cursor cursor = null; List<Integer> starred = new ArrayList<Integer>(); try { cursor = resolver.query(MountainPasses.CONTENT_URI, new String[] { MountainPasses.MOUNTAIN_PASS_ID }, MountainPasses.MOUNTAIN_PASS_IS_STARRED + "=?", new String[] { "1" }, null); if (cursor != null && cursor.moveToFirst()) { while (!cursor.isAfterLast()) { starred.add(cursor.getInt(0)); cursor.moveToNext(); } } } finally { if (cursor != null) { cursor.close(); } } return starred; }
From source file:at.bitfire.davdroid.resource.LocalCollection.java
/** * Finds updated resources (resources which have already been uploaded, but have changed locally). * Updated resources are 1) dirty, and 2) already have an ETag. Only records matching sqlFilter * will be returned.// w ww . j a va 2 s. c o m * * @return IDs of updated resources * @throws LocalStorageException when the content provider couldn't be queried */ public long[] findUpdated() throws LocalStorageException { String where = entryColumnDirty() + "=1 AND " + entryColumnETag() + " IS NOT NULL"; if (entryColumnParentID() != null) where += " AND " + entryColumnParentID() + "=" + String.valueOf(getId()); if (sqlFilter != null) where += " AND (" + sqlFilter + ")"; try { @Cleanup Cursor cursor = providerClient.query(entriesURI(), new String[] { entryColumnID(), entryColumnRemoteName(), entryColumnETag() }, where, null, null); if (cursor == null) throw new LocalStorageException("Couldn't query updated records"); long[] updated = new long[cursor.getCount()]; for (int idx = 0; cursor.moveToNext(); idx++) updated[idx] = cursor.getLong(0); return updated; } catch (RemoteException ex) { throw new LocalStorageException(ex); } }
From source file:org.noorganization.instalistsynch.controller.synch.impl.ProductSynch.java
@Override public void indexLocal(int _groupId, Date _lastIndexTime) { String lastIndexTime = ISO8601Utils.format(_lastIndexTime, false, TimeZone.getTimeZone("GMT+0000"));//.concat("+0000"); boolean isLocal = false; GroupAuth groupAuth = mGroupAuthDbController.getLocalGroup(); if (groupAuth != null) { isLocal = groupAuth.getGroupId() == _groupId; }/*from ww w . j a v a2 s . c om*/ Cursor logCursor = mClientLogDbController.getLogsSince(lastIndexTime, mModelType); if (logCursor.getCount() == 0) { logCursor.close(); return; } try { while (logCursor.moveToNext()) { // fetch the action type int actionId = logCursor.getInt(logCursor.getColumnIndex(LogInfo.COLUMN.ACTION)); eActionType actionType = eActionType.getTypeById(actionId); List<ModelMapping> modelMappingList = mProductModelMapping.get( ModelMapping.COLUMN.GROUP_ID + " = ? AND " + ModelMapping.COLUMN.CLIENT_SIDE_UUID + " LIKE ?", new String[] { String.valueOf(_groupId), logCursor.getString(logCursor.getColumnIndex(LogInfo.COLUMN.ITEM_UUID)) }); ModelMapping modelMapping = modelMappingList.size() == 0 ? null : modelMappingList.get(0); switch (actionType) { case INSERT: // skip insertion because this should be decided by the user if the non local groups should have access to the category // and also skip if a mapping for this case already exists! if (!isLocal || modelMapping != null) { continue; } String clientUuid = logCursor.getString(logCursor.getColumnIndex(LogInfo.COLUMN.ITEM_UUID)); Date clientDate = ISO8601Utils.parse( logCursor.getString(logCursor.getColumnIndex(LogInfo.COLUMN.ACTION_DATE)), new ParsePosition(0)); modelMapping = new ModelMapping(null, groupAuth.getGroupId(), null, clientUuid, new Date(Constants.INITIAL_DATE), clientDate, false); mProductModelMapping.insert(modelMapping); break; case UPDATE: if (modelMapping == null) { Log.i(TAG, "indexLocal: the model is null but shouldn't be"); continue; } String timeString = logCursor.getString(logCursor.getColumnIndex(LogInfo.COLUMN.ACTION_DATE)); clientDate = ISO8601Utils.parse(timeString, new ParsePosition(0)); modelMapping.setLastClientChange(clientDate); mProductModelMapping.update(modelMapping); break; case DELETE: if (modelMapping == null) { Log.i(TAG, "indexLocal: the model is null but shouldn't be"); continue; } modelMapping.setDeleted(true); timeString = logCursor.getString(logCursor.getColumnIndex(LogInfo.COLUMN.ACTION_DATE)); clientDate = ISO8601Utils.parse(timeString, new ParsePosition(0)); modelMapping.setLastClientChange(clientDate); mProductModelMapping.update(modelMapping); break; default: } } } catch (Exception e) { logCursor.close(); } }
From source file:at.bitfire.ical4android.AndroidEvent.java
@SuppressWarnings("Recycle") protected void populateExceptions() throws FileNotFoundException, RemoteException { @Cleanup Cursor c = calendar.provider.query(calendar.syncAdapterURI(Events.CONTENT_URI), new String[] { Events._ID }, Events.ORIGINAL_ID + "=?", new String[] { String.valueOf(id) }, null); while (c != null && c.moveToNext()) { long exceptionId = c.getLong(0); try {//from w w w .j a v a2 s . c o m AndroidEvent exception = calendar.eventFactory.newInstance(calendar, exceptionId, null); event.getExceptions().add(exception.getEvent()); } catch (CalendarStorageException e) { Log.e(TAG, "Couldn't find exception details, ignoring", e); } } }
From source file:com.android.xbrowser.BookmarksPageCallbacks.java
@Override public void onLoadFinished(Loader<Cursor> loader, Cursor cursor) { if (loader.getId() == LOADER_ACCOUNTS) { LoaderManager lm = getLoaderManager(); int id = LOADER_BOOKMARKS; while (cursor.moveToNext()) { String accountName = cursor.getString(0); String accountType = cursor.getString(1); Bundle args = new Bundle(); args.putString(ACCOUNT_NAME, accountName); args.putString(ACCOUNT_TYPE, accountType); BrowserBookmarksAdapter adapter = new BrowserBookmarksAdapter(getActivity(), VIEW_THUMBNAILS); mBookmarkAdapters.put(id, adapter); boolean expand = true; try { expand = mState.getBoolean( accountName != null ? accountName : BookmarkExpandableView.LOCAL_ACCOUNT_NAME); } catch (JSONException e) { } // no state for accountName mGrid.addAccount(accountName, adapter, expand); lm.restartLoader(id, args, this); id++;/*from ww w .j a va2 s .c o m*/ } // TODO: Figure out what a reload of these means // Currently, a reload is triggered whenever bookmarks change // This is less than ideal // It also causes UI flickering as a new adapter is created // instead of re-using an existing one when the account_name is the // same. // For now, this is a one-shot load getLoaderManager().destroyLoader(LOADER_ACCOUNTS); } else if (loader.getId() >= LOADER_BOOKMARKS) { BrowserBookmarksAdapter adapter = mBookmarkAdapters.get(loader.getId()); adapter.changeCursor(cursor); } }
From source file:org.mythtv.service.dvr.v26.RecordedHelperV26.java
private int load(final Context context, final LocationProfile locationProfile, final Program[] programs) throws RemoteException, OperationApplicationException { Log.d(TAG, "load : enter"); if (null == context) throw new RuntimeException("ProgramGuideHelperV26 is not initialized"); processProgramGroups(context, locationProfile, programs); String tag = UUID.randomUUID().toString(); int processed = -1; int count = 0; ArrayList<ContentProviderOperation> ops = new ArrayList<ContentProviderOperation>(); boolean inError; List<Integer> channelsChecked = new ArrayList<Integer>(); for (Program program : programs) { if (null != program.getRecording() && "livetv".equalsIgnoreCase(program.getRecording().getRecGroup()) && !"deleted".equalsIgnoreCase(program.getRecording().getRecGroup())) { Log.w(TAG,//from w ww . j a v a2s. c o m "load : program has no recording or program is in livetv or deleted recording groups:" + program.getTitle() + ":" + program.getSubTitle() + ":" + program.getChannel().getChanId() + ":" + program.getStartTime() + ":" + program.getHostName() + " (" + (null == program.getRecording() ? "No Recording" : ("livetv".equalsIgnoreCase(program.getRecording().getRecGroup()) ? "LiveTv" : "Deleted")) + ")"); continue; } if (null == program.getStartTime() || null == program.getEndTime()) { Log.w(TAG, "load : null starttime and or endtime"); inError = true; } else { inError = false; } ProgramHelperV26.getInstance().processProgram(context, locationProfile, ProgramConstants.CONTENT_URI_RECORDED, ProgramConstants.TABLE_NAME_RECORDED, ops, program, tag); count++; if (null != program.getChannel()) { if (!channelsChecked.contains(program.getChannel().getChanId())) { if (null == mChannelDaoHelper.findByChannelId(context, locationProfile, Long.parseLong(String.valueOf(program.getChannel().getChanId())))) { ChannelHelperV26.getInstance().processChannel(context, locationProfile, ops, program.getChannel()); count++; } channelsChecked.add(program.getChannel().getChanId()); } } if (!inError && null != program.getRecording()) { if (program.getRecording().getRecordId() > 0) { RecordingHelperV26.getInstance().processRecording(context, locationProfile, ops, RecordingConstants.ContentDetails.RECORDED, program, tag); count++; } } if (count > BATCH_COUNT_LIMIT) { Log.i(TAG, "load : applying batch for '" + count + "' transactions"); processBatch(context, ops, processed, count); count = 0; } } if (!ops.isEmpty()) { Log.i(TAG, "load : applying batch for '" + count + "' transactions"); processBatch(context, ops, processed, count); } ProgramHelperV26.getInstance().findAllPrograms(context, locationProfile, ProgramConstants.CONTENT_URI_RECORDED, ProgramConstants.TABLE_NAME_RECORDED); Log.v(TAG, "load : remove deleted recording live streams"); String[] deletedProjection = new String[] { ProgramConstants.FIELD_CHANNEL_ID, ProgramConstants.FIELD_START_TIME, ProgramConstants.FIELD_TITLE, ProgramConstants.FIELD_SUB_TITLE, ProgramConstants.FIELD_LAST_MODIFIED_DATE }; String deletedSelection = "not " + ProgramConstants.TABLE_NAME_RECORDED + "." + ProgramConstants.FIELD_LAST_MODIFIED_TAG + " = ?"; String[] deletedSelectionArgs = new String[] { tag }; deletedSelection = appendLocationHostname(context, locationProfile, deletedSelection, ProgramConstants.TABLE_NAME_RECORDED); int deleteCount = 0; Cursor deletedCursor = context.getContentResolver().query(ProgramConstants.CONTENT_URI_RECORDED, deletedProjection, deletedSelection, deletedSelectionArgs, null); while (deletedCursor.moveToNext()) { long channelId = deletedCursor.getLong(deletedCursor.getColumnIndex(ProgramConstants.FIELD_CHANNEL_ID)); long startTime = deletedCursor.getLong(deletedCursor.getColumnIndex(ProgramConstants.FIELD_START_TIME)); // Delete any live stream details String liveStreamSelection = LiveStreamConstants.FIELD_CHAN_ID + " = ? AND " + LiveStreamConstants.FIELD_START_TIME + " = ?"; String[] liveStreamSelectionArgs = new String[] { String.valueOf(channelId), String.valueOf(startTime) }; liveStreamSelection = appendLocationHostname(context, locationProfile, liveStreamSelection, LiveStreamConstants.TABLE_NAME); Cursor liveStreamCursor = context.getContentResolver().query(LiveStreamConstants.CONTENT_URI, null, liveStreamSelection, liveStreamSelectionArgs, null); if (liveStreamCursor.moveToFirst()) { Log.v(TAG, "load : remove live stream"); int liveStreamId = liveStreamCursor.getInt(liveStreamCursor .getColumnIndex(LiveStreamConstants.TABLE_NAME + "." + LiveStreamConstants.FIELD_ID)); RemoveStreamTask removeStreamTask = new RemoveStreamTask(context, locationProfile); removeStreamTask.execute(liveStreamId); } liveStreamCursor.close(); deleteCount++; } deletedCursor.close(); Log.v(TAG, "load : queued deleted programs - " + deleteCount); ProgramHelperV26.getInstance().deletePrograms(context, locationProfile, ProgramConstants.CONTENT_URI_RECORDED, ProgramConstants.TABLE_NAME_RECORDED, tag); // RecordingHelperV26.getInstance().deleteRecordings( context, locationProfile, ops, RecordingConstants.ContentDetails.RECORDED, lastModified ); if (!ops.isEmpty()) { Log.i(TAG, "load : applying delete batch for transactions"); processBatch(context, ops, processed, count); } // Log.v( TAG, "load : exit" ); return processed; }
From source file:org.mythtv.service.dvr.v25.RecordedHelperV25.java
private int load(final Context context, final LocationProfile locationProfile, final Program[] programs) throws RemoteException, OperationApplicationException { Log.d(TAG, "load : enter"); if (null == context) throw new RuntimeException("RecordedHelperV25 is not initialized"); processProgramGroups(context, locationProfile, programs); String tag = UUID.randomUUID().toString(); int processed = -1; int count = 0; ArrayList<ContentProviderOperation> ops = new ArrayList<ContentProviderOperation>(); boolean inError; List<Integer> channelsChecked = new ArrayList<Integer>(); for (Program program : programs) { if (null != program.getRecording() && "livetv".equalsIgnoreCase(program.getRecording().getRecGroup()) && !"deleted".equalsIgnoreCase(program.getRecording().getRecGroup())) { Log.w(TAG,//from www . j a v a 2 s .c o m "load : program has no recording or program is in livetv or deleted recording groups:" + program.getTitle() + ":" + program.getSubTitle() + ":" + program.getChannel().getChanId() + ":" + program.getStartTime() + ":" + program.getHostName() + " (" + (null == program.getRecording() ? "No Recording" : ("livetv".equalsIgnoreCase(program.getRecording().getRecGroup()) ? "LiveTv" : "Deleted")) + ")"); continue; } if (null == program.getStartTime() || null == program.getEndTime()) { Log.w(TAG, "load : null starttime and or endtime"); inError = true; } else { inError = false; } ProgramHelperV25.getInstance().processProgram(context, locationProfile, ProgramConstants.CONTENT_URI_RECORDED, ProgramConstants.TABLE_NAME_RECORDED, ops, program, tag); count++; if (null != program.getChannel()) { if (!channelsChecked.contains(program.getChannel().getChanId())) { if (null == mChannelDaoHelper.findByChannelId(context, locationProfile, Long.parseLong(String.valueOf(program.getChannel().getChanId())))) { ChannelHelperV25.getInstance().processChannel(context, locationProfile, ops, program.getChannel()); count++; } channelsChecked.add(program.getChannel().getChanId()); } } if (!inError && null != program.getRecording()) { if (program.getRecording().getRecordId() > 0) { RecordingHelperV25.getInstance().processRecording(context, locationProfile, ops, RecordingConstants.ContentDetails.RECORDED, program, tag); count++; } } if (count > BATCH_COUNT_LIMIT) { Log.i(TAG, "load : applying batch for '" + count + "' transactions"); processBatch(context, ops, processed, count); count = 0; } } if (!ops.isEmpty()) { Log.i(TAG, "load : applying final batch for '" + count + "' transactions"); processBatch(context, ops, processed, count); } ProgramHelperV25.getInstance().findAllPrograms(context, locationProfile, ProgramConstants.CONTENT_URI_RECORDED, ProgramConstants.TABLE_NAME_RECORDED); Log.v(TAG, "load : remove deleted recording live streams"); String[] deletedProjection = new String[] { ProgramConstants.FIELD_CHANNEL_ID, ProgramConstants.FIELD_START_TIME, ProgramConstants.FIELD_TITLE, ProgramConstants.FIELD_SUB_TITLE, ProgramConstants.FIELD_LAST_MODIFIED_DATE }; String deletedSelection = "not " + ProgramConstants.TABLE_NAME_RECORDED + "." + ProgramConstants.FIELD_LAST_MODIFIED_TAG + " = ?"; String[] deletedSelectionArgs = new String[] { tag }; deletedSelection = appendLocationHostname(context, locationProfile, deletedSelection, ProgramConstants.TABLE_NAME_RECORDED); int deleteCount = 0; Cursor deletedCursor = context.getContentResolver().query(ProgramConstants.CONTENT_URI_RECORDED, deletedProjection, deletedSelection, deletedSelectionArgs, null); while (deletedCursor.moveToNext()) { long channelId = deletedCursor.getLong(deletedCursor.getColumnIndex(ProgramConstants.FIELD_CHANNEL_ID)); long startTime = deletedCursor.getLong(deletedCursor.getColumnIndex(ProgramConstants.FIELD_START_TIME)); // Delete any live stream details String liveStreamSelection = LiveStreamConstants.FIELD_CHAN_ID + " = ? AND " + LiveStreamConstants.FIELD_START_TIME + " = ?"; String[] liveStreamSelectionArgs = new String[] { String.valueOf(channelId), String.valueOf(startTime) }; liveStreamSelection = appendLocationHostname(context, locationProfile, liveStreamSelection, LiveStreamConstants.TABLE_NAME); Cursor liveStreamCursor = context.getContentResolver().query(LiveStreamConstants.CONTENT_URI, null, liveStreamSelection, liveStreamSelectionArgs, null); if (liveStreamCursor.moveToFirst()) { Log.v(TAG, "load : remove live stream"); int liveStreamId = liveStreamCursor.getInt(liveStreamCursor .getColumnIndex(LiveStreamConstants.TABLE_NAME + "." + LiveStreamConstants.FIELD_ID)); RemoveStreamTask removeStreamTask = new RemoveStreamTask(context, locationProfile); removeStreamTask.execute(liveStreamId); } liveStreamCursor.close(); deleteCount++; } deletedCursor.close(); Log.v(TAG, "load : queued deleted programs - " + deleteCount); ProgramHelperV25.getInstance().deletePrograms(context, locationProfile, ProgramConstants.CONTENT_URI_RECORDED, ProgramConstants.TABLE_NAME_RECORDED, tag); // RecordingHelperV25.getInstance().deleteRecordings( context, locationProfile, ops, RecordingConstants.ContentDetails.RECORDED, lastModified ); if (!ops.isEmpty()) { Log.i(TAG, "load : applying delete batch for transactions"); processBatch(context, ops, processed, count); } // Log.v( TAG, "load : exit" ); return processed; }
From source file:com.conferenceengineer.android.iosched.io.SessionsHandler.java
private ArrayList<ContentProviderOperation> buildContentProviderOperations(SessionsResponse sessions, SessionsResponse starredSessions, TracksResponse tracks) { // If there was no starred sessions response (e.g. there was an auth issue, // or this is a local sync), keep all the locally starred sessions. boolean retainLocallyStarredSessions = (starredSessions == null); final ArrayList<ContentProviderOperation> batch = Lists.newArrayList(); // Build lookup table for starredSessions mappings HashSet<String> starredSessionsMap = new HashSet<String>(); if (starredSessions != null) { List<SessionResponse> starredSessionList = starredSessions.getSessions(); if (starredSessionList != null) { for (SessionResponse session : starredSessionList) { String sessionId = session.getId(); starredSessionsMap.add(sessionId); }//from w w w . j a va 2 s . c o m } } // Build lookup table for track mappings // Assumes that sessions can only have one track. Not guarenteed by the Conference API, // but is being enforced by conference organizer policy. HashMap<String, TrackResponse> trackMap = new HashMap<String, TrackResponse>(); if (tracks != null) { for (TrackResponse track : tracks.getTracks()) { List<String> sessionIds = track.getSessions(); if (sessionIds != null) { for (String sessionId : sessionIds) { trackMap.put(sessionId, track); } } } } if (sessions != null) { List<SessionResponse> sessionList = sessions.getSessions(); int numSessions = sessionList.size(); if (numSessions > 0) { LOGI(TAG, "Updating sessions data"); Set<String> starredSessionIds = new HashSet<String>(); if (retainLocallyStarredSessions) { Cursor starredSessionsCursor = mContext.getContentResolver().query(Sessions.CONTENT_STARRED_URI, new String[] { ScheduleContract.Sessions.SESSION_ID }, null, null, null); while (starredSessionsCursor.moveToNext()) { starredSessionIds.add(starredSessionsCursor.getString(0)); } starredSessionsCursor.close(); } // Clear out existing sessions batch.add(ContentProviderOperation .newDelete(ScheduleContract.addCallerIsSyncAdapterParameter(Sessions.CONTENT_URI)).build()); // Maintain a list of created session block IDs Set<String> blockIds = new HashSet<String>(); // Maintain a map of insert operations for sandbox-only blocks HashMap<String, ContentProviderOperation> sandboxBlocks = new HashMap<String, ContentProviderOperation>(); for (SessionResponse session : sessionList) { int flags = 0; String sessionId = session.getId(); if (retainLocallyStarredSessions) { flags = (starredSessionIds.contains(sessionId) ? PARSE_FLAG_FORCE_SCHEDULE_ADD : PARSE_FLAG_FORCE_SCHEDULE_REMOVE); } if (session.getFlags() != 0) { // Allow data set flags to override locally // set ones (e.g. single talk slot additions). flags = session.getFlags(); } if (TextUtils.isEmpty(sessionId)) { LOGW(TAG, "Found session with empty ID in API response."); continue; } // Session title String sessionTitle = session.getTitle(); String sessionSubtype = session.getSubtype(); if (EVENT_TYPE_CODELAB.equals(sessionSubtype)) { sessionTitle = mContext.getString(R.string.codelab_title_template, sessionTitle); } // Whether or not it's in the schedule boolean inSchedule = starredSessionsMap.contains(sessionId); if ((flags & PARSE_FLAG_FORCE_SCHEDULE_ADD) != 0 || (flags & PARSE_FLAG_FORCE_SCHEDULE_REMOVE) != 0) { inSchedule = (flags & PARSE_FLAG_FORCE_SCHEDULE_ADD) != 0; } if (EVENT_TYPE_KEYNOTE.equals(sessionSubtype)) { // Keynotes are always in your schedule. inSchedule = true; } // Clean up session abstract String sessionAbstract = session.getDescription(); if (sessionAbstract != null) { sessionAbstract = sessionAbstract.replace('\r', '\n'); } // Hashtags TrackResponse track = trackMap.get(sessionId); String hashtag = null; if (track != null) { hashtag = ParserUtils.sanitizeId(track.getTitle()); } // Get block id long sessionStartTime = session.getStartTimestamp().longValue() * 1000; long sessionEndTime = session.getEndTimestamp().longValue() * 1000; String blockId = ScheduleContract.Blocks.generateBlockId(sessionStartTime, sessionEndTime); if (!blockIds.contains(blockId) && !EVENT_TYPE_SANDBOX.equals(sessionSubtype)) { // New non-sandbox block if (sandboxBlocks.containsKey(blockId)) { sandboxBlocks.remove(blockId); } String blockType; String blockTitle; if (EVENT_TYPE_KEYNOTE.equals(sessionSubtype)) { blockType = ScheduleContract.Blocks.BLOCK_TYPE_KEYNOTE; blockTitle = mContext.getString(R.string.schedule_block_title_keynote); } else if (EVENT_TYPE_CODELAB.equals(sessionSubtype)) { blockType = ScheduleContract.Blocks.BLOCK_TYPE_CODELAB; blockTitle = mContext.getString(R.string.schedule_block_title_code_labs); } else if (EVENT_TYPE_OFFICE_HOURS.equals(sessionSubtype)) { blockType = ScheduleContract.Blocks.BLOCK_TYPE_OFFICE_HOURS; blockTitle = mContext.getString(R.string.schedule_block_title_office_hours); } else { blockType = ScheduleContract.Blocks.BLOCK_TYPE_SESSION; blockTitle = mContext.getString(R.string.schedule_block_title_sessions); } batch.add(ContentProviderOperation.newInsert(ScheduleContract.Blocks.CONTENT_URI) .withValue(ScheduleContract.Blocks.BLOCK_ID, blockId) .withValue(ScheduleContract.Blocks.BLOCK_TYPE, blockType) .withValue(ScheduleContract.Blocks.BLOCK_TITLE, blockTitle) .withValue(ScheduleContract.Blocks.BLOCK_START, sessionStartTime) .withValue(ScheduleContract.Blocks.BLOCK_END, sessionEndTime).build()); blockIds.add(blockId); } else if (!sandboxBlocks.containsKey(blockId) && !blockIds.contains(blockId) && EVENT_TYPE_SANDBOX.equals(sessionSubtype)) { // New sandbox-only block, add insert operation to map String blockType = ScheduleContract.Blocks.BLOCK_TYPE_SANDBOX; String blockTitle = mContext.getString(R.string.schedule_block_title_sandbox); sandboxBlocks.put(blockId, ContentProviderOperation.newInsert(ScheduleContract.Blocks.CONTENT_URI) .withValue(ScheduleContract.Blocks.BLOCK_ID, blockId) .withValue(ScheduleContract.Blocks.BLOCK_TYPE, blockType) .withValue(ScheduleContract.Blocks.BLOCK_TITLE, blockTitle) .withValue(ScheduleContract.Blocks.BLOCK_START, sessionStartTime) .withValue(ScheduleContract.Blocks.BLOCK_END, sessionEndTime).build()); } // Insert session info final ContentProviderOperation.Builder builder; if (EVENT_TYPE_SANDBOX.equals(sessionSubtype)) { // Sandbox companies go in the special sandbox table builder = ContentProviderOperation .newInsert(ScheduleContract .addCallerIsSyncAdapterParameter(ScheduleContract.Sandbox.CONTENT_URI)) .withValue(SyncColumns.UPDATED, System.currentTimeMillis()) .withValue(ScheduleContract.Sandbox.COMPANY_ID, sessionId) .withValue(ScheduleContract.Sandbox.COMPANY_NAME, sessionTitle) .withValue(ScheduleContract.Sandbox.COMPANY_DESC, sessionAbstract) .withValue(ScheduleContract.Sandbox.COMPANY_URL, session.getWebLink()) .withValue(ScheduleContract.Sandbox.COMPANY_LOGO_URL, session.getIconUrl()) .withValue(ScheduleContract.Sandbox.ROOM_ID, sanitizeId(session.getLocation())) .withValue(ScheduleContract.Sandbox.TRACK_ID, (track != null ? track.getId() : null)) .withValue(ScheduleContract.Sandbox.BLOCK_ID, blockId); batch.add(builder.build()); } else { // All other fields go in the normal sessions table builder = ContentProviderOperation .newInsert(ScheduleContract.addCallerIsSyncAdapterParameter(Sessions.CONTENT_URI)) .withValue(SyncColumns.UPDATED, System.currentTimeMillis()) .withValue(Sessions.SESSION_ID, sessionId) .withValue(Sessions.SESSION_TYPE, sessionSubtype) .withValue(Sessions.SESSION_LEVEL, null) // Not available .withValue(Sessions.SESSION_TITLE, sessionTitle) .withValue(Sessions.SESSION_ABSTRACT, sessionAbstract) .withValue(Sessions.SESSION_HASHTAGS, hashtag) .withValue(Sessions.SESSION_TAGS, null) // Not available .withValue(Sessions.SESSION_URL, session.getWebLink()) .withValue(Sessions.SESSION_MODERATOR_URL, null) // Not available .withValue(Sessions.SESSION_REQUIREMENTS, null) // Not available .withValue(Sessions.SESSION_STARRED, inSchedule) .withValue(Sessions.SESSION_YOUTUBE_URL, null) // Not available .withValue(Sessions.SESSION_PDF_URL, null) // Not available .withValue(Sessions.SESSION_NOTES_URL, null) // Not available .withValue(Sessions.ROOM_ID, sanitizeId(session.getLocation())) .withValue(Sessions.BLOCK_ID, blockId); batch.add(builder.build()); } // Replace all session speakers final Uri sessionSpeakersUri = Sessions.buildSpeakersDirUri(sessionId); batch.add(ContentProviderOperation .newDelete(ScheduleContract.addCallerIsSyncAdapterParameter(sessionSpeakersUri)) .build()); List<String> presenterIds = session.getPresenterIds(); if (presenterIds != null) { for (String presenterId : presenterIds) { batch.add(ContentProviderOperation.newInsert(sessionSpeakersUri) .withValue(SessionsSpeakers.SESSION_ID, sessionId) .withValue(SessionsSpeakers.SPEAKER_ID, presenterId).build()); } } // Add track mapping if (track != null) { String trackId = track.getId(); if (trackId != null) { final Uri sessionTracksUri = ScheduleContract.addCallerIsSyncAdapterParameter( ScheduleContract.Sessions.buildTracksDirUri(sessionId)); batch.add(ContentProviderOperation.newInsert(sessionTracksUri) .withValue(ScheduleDatabase.SessionsTracks.SESSION_ID, sessionId) .withValue(ScheduleDatabase.SessionsTracks.TRACK_ID, trackId).build()); } } // Codelabs: Add mapping to codelab table if (EVENT_TYPE_CODELAB.equals(sessionSubtype)) { final Uri sessionTracksUri = ScheduleContract.addCallerIsSyncAdapterParameter( ScheduleContract.Sessions.buildTracksDirUri(sessionId)); batch.add(ContentProviderOperation.newInsert(sessionTracksUri) .withValue(ScheduleDatabase.SessionsTracks.SESSION_ID, sessionId) .withValue(ScheduleDatabase.SessionsTracks.TRACK_ID, "CODE_LABS").build()); } } // Insert sandbox-only blocks batch.addAll(sandboxBlocks.values()); } } return batch; }
From source file:com.navjagpal.fileshare.WebServer.java
private String getFileListing(Uri uri) { int folderId = Integer.parseInt(uri.getPathSegments().get(1)); Uri fileUri = FileSharingProvider.Files.CONTENT_URI; String where = FileSharingProvider.Files.Columns.FOLDER_ID + "=" + folderId; Cursor c = mContext.getContentResolver().query(fileUri, null, where, null, null); int nameIndex = c.getColumnIndexOrThrow(FileSharingProvider.Files.Columns.DISPLAY_NAME); int idIndex = c.getColumnIndexOrThrow(FileSharingProvider.Files.Columns._ID); String s = ""; boolean hasMusic = false; while (c.moveToNext()) { String name = c.getString(nameIndex); int id = c.getInt(idIndex); s += fileToLink(name, id) + "<br/>"; if (name.endsWith(".mp3")) { hasMusic = true;/* w w w . j av a 2s.c om*/ } } c.close(); if (hasMusic) { s += getPlaylistLink(folderId) + "<br/>"; } s += getZipLink(folderId) + "<br/>"; return s; }
From source file:com.example.android.basicsyncadapter.SyncAdapter.java
/** * Read XML from an input stream, storing it into the content provider. * * <p>This is where incoming data is persisted, committing the results of a sync. In order to * minimize (expensive) disk operations, we compare incoming data with what's already in our * database, and compute a merge. Only changes (insert/update/delete) will result in a database * write.//from w w w. j a v a 2 s . c o m * * <p>As an additional optimization, we use a batch operation to perform all database writes at * once. * * <p>Merge strategy: * 1. Get cursor to all items in feed<br/> * 2. For each item, check if it's in the incoming data.<br/> * a. YES: Remove from "incoming" list. Check if data has mutated, if so, perform * database UPDATE.<br/> * b. NO: Schedule DELETE from database.<br/> * (At this point, incoming database only contains missing items.)<br/> * 3. For any items remaining in incoming list, ADD to database. */ public void updateLocalFeedData(final InputStream stream, final SyncResult syncResult) throws IOException, XmlPullParserException, RemoteException, OperationApplicationException, ParseException { //final FeedParser feedParser = new FeedParser(); final CAPFeedParser feedParser = new CAPFeedParser(); final ContentResolver contentResolver = getContext().getContentResolver(); //Log.i(TAG, "Parsing stream as Atom feed"); final List<CAPFeedParser.Entry> entries = feedParser.parse(stream); Log.i(TAG, "Parsing complete. Found " + entries.size() + " entries"); ArrayList<ContentProviderOperation> batch = new ArrayList<ContentProviderOperation>(); // Build hash table of incoming entries HashMap<String, CAPFeedParser.Entry> entryMap = new HashMap<String, CAPFeedParser.Entry>(); for (CAPFeedParser.Entry e : entries) { entryMap.put(e.id, e); } // Get list of all items //Log.i(TAG, "Fetching local entries for merge"); Uri uri = FeedContract.Entry.CONTENT_URI; // Get all entries Cursor c = contentResolver.query(uri, PROJECTION, null, null, null); assert c != null; //Log.i(TAG, "Found " + c.getCount() + " local entries. Computing merge solution..."); // Find stale data int id; String entryId; String title; String description; String headline; String url; String areas; String issued; while (c.moveToNext()) { syncResult.stats.numEntries++; id = c.getInt(COLUMN_ID); entryId = c.getString(COLUMN_ENTRY_ID); title = c.getString(COLUMN_TITLE); description = c.getString(COLUMN_DESCRIPTION); headline = c.getString(COLUMN_HEADLINE); areas = c.getString(COLUMN_AREAS); url = c.getString(COLUMN_LINK); issued = c.getString(COLUMN_ISSUED); CAPFeedParser.Entry match = entryMap.get(entryId); if (match != null) { // Entry exists. Remove from entry map to prevent insert later. entryMap.remove(entryId); // Check to see if the entry needs to be updated Uri existingUri = FeedContract.Entry.CONTENT_URI.buildUpon().appendPath(Integer.toString(id)) .build(); if ((match.title != null && !match.title.equals(title)) || (match.link != null && !match.link.equals(url)) || (match.issued != issued)) { // Update existing record //Log.i(TAG, "Scheduling update: " + existingUri); batch.add(ContentProviderOperation.newUpdate(existingUri) .withValue(FeedContract.Entry.COLUMN_NAME_TITLE, title) .withValue(FeedContract.Entry.COLUMN_NAME_DESCRIPTION, description) .withValue(FeedContract.Entry.COLUMN_NAME_HEADLINE, headline) .withValue(FeedContract.Entry.COLUMN_NAME_ISSUED, issued) .withValue(FeedContract.Entry.COLUMN_NAME_LINK, url) .withValue(FeedContract.Entry.COLUMN_NAME_AREAS, areas).build()); syncResult.stats.numUpdates++; } else { //Log.i(TAG, "No action: " + existingUri); } } else { // Entry doesn't exist. Remove it from the database. Uri deleteUri = FeedContract.Entry.CONTENT_URI.buildUpon().appendPath(Integer.toString(id)).build(); //Log.i(TAG, "Scheduling delete: " + deleteUri); batch.add(ContentProviderOperation.newDelete(deleteUri).build()); syncResult.stats.numDeletes++; } } c.close(); // Add new items for (CAPFeedParser.Entry e : entryMap.values()) { //Log.i(TAG, "Scheduling insert: entry_id=" + e.id); batch.add(ContentProviderOperation.newInsert(FeedContract.Entry.CONTENT_URI) .withValue(FeedContract.Entry.COLUMN_NAME_ENTRY_ID, e.id) .withValue(FeedContract.Entry.COLUMN_NAME_TITLE, e.title) .withValue(FeedContract.Entry.COLUMN_NAME_DESCRIPTION, e.description) .withValue(FeedContract.Entry.COLUMN_NAME_HEADLINE, e.headline) .withValue(FeedContract.Entry.COLUMN_NAME_ISSUED, e.issued) .withValue(FeedContract.Entry.COLUMN_NAME_LINK, e.link) .withValue(FeedContract.Entry.COLUMN_NAME_AREAS, e.areas).build()); syncResult.stats.numInserts++; } //Log.i(TAG, "Merge solution ready. Applying batch update"); mContentResolver.applyBatch(FeedContract.CONTENT_AUTHORITY, batch); mContentResolver.notifyChange(FeedContract.Entry.CONTENT_URI, // URI where data was modified null, // No local observer false); // IMPORTANT: Do not sync to network // This sample doesn't support uploads, but if *your* code does, make sure you set // syncToNetwork=false in the line above to prevent duplicate syncs. }