List of usage examples for android.database.sqlite SQLiteDatabase update
public int update(String table, ContentValues values, String whereClause, String[] whereArgs)
From source file:org.frc836.database.DBSyncService.java
private void processPits(JSONArray pits) { // TODO could be abstracted further try {/*from ww w . ja v a 2 s. c om*/ for (int i = 0; i < pits.length(); i++) { JSONObject row = pits.getJSONObject(i); Action action = Action.UPDATE; if (row.getInt(PitStats.COLUMN_NAME_INVALID) != 0) { action = Action.DELETE; } ContentValues vals = PitStats.getNewPitStats().jsonToCV(row); // check if this entry exists already String[] projection = { PitStats.COLUMN_NAME_ID, PitStats.COLUMN_NAME_INVALID }; String[] where = { vals.getAsString(PitStats.COLUMN_NAME_TEAM_ID) }; synchronized (ScoutingDBHelper.lock) { SQLiteDatabase db = ScoutingDBHelper.getInstance().getWritableDatabase(); Cursor c = db.query(PitStats.TABLE_NAME, projection, // select PitStats.COLUMN_NAME_TEAM_ID + "=?", where, null, // don't // group null, // don't filter null, // don't order "0,1"); // limit to 1 try { if (!c.moveToFirst()) { if (action == Action.UPDATE) action = Action.INSERT; else if (action == Action.DELETE) action = Action.NOTHING; } else { int invalid = c.getInt(c.getColumnIndexOrThrow(PitStats.COLUMN_NAME_INVALID)); if (invalid > 0) // Current entry has not been sent // to server, don't overwrite action = Action.NOTHING; } switch (action) { case UPDATE: db.update(PitStats.TABLE_NAME, vals, PitStats.COLUMN_NAME_TEAM_ID + " = ?", where); break; case INSERT: db.insert(PitStats.TABLE_NAME, null, vals); break; case DELETE: db.delete(PitStats.TABLE_NAME, PitStats.COLUMN_NAME_TEAM_ID + " = ?", where); break; default: } } finally { if (c != null) c.close(); ScoutingDBHelper.getInstance().close(); } } } } catch (JSONException e) { // TODO handle error } }
From source file:com.openatk.planting.MainActivity.java
@Override public void EditJobDelete() { // Find and delete job if exists if (currentJob != null) { SQLiteDatabase database = dbHelper.getWritableDatabase(); ContentValues values = new ContentValues(); values.put(TableJobs.COL_DELETED, 1); values.put(TableJobs.COL_HAS_CHANGED, 1); values.put(TableJobs.COL_DATE_CHANGED, DatabaseHelper.dateToStringUTC(new Date())); String where = TableJobs.COL_ID + " = " + Integer.toString(currentJob.getId()) + " AND " + TableJobs.COL_DELETED + " = 0"; database.update(TableJobs.TABLE_NAME, values, where, null); dbHelper.close();/*w w w . j av a 2 s . c o m*/ currentJob = null; drawMap(); if (this.currentField == null) { //Close edit hideEdit(true); } else { if (this.fragmentEditField != null) this.fragmentEditField.refreshData(); } if (this.fragmentListView != null) this.fragmentListView.getData(); this.trelloController.syncDelayed(); } }
From source file:net.gaast.giggity.Db.java
private void updateSchedule(SQLiteDatabase db, Seed.Schedule sched, int last_version) { if (sched.start.equals(sched.end)) { /* If it's one day only, avoid having start == end. Pretend it's from 6:00 'til 18:00 or something. */ sched.start.setHours(6);/*from w w w.j a v a 2s. co m*/ sched.end.setHours(18); } else { /* For different days, pretend the even't from noon to noon. In both cases, we'll have exact times * once we load the schedule for the first time. */ sched.start.setHours(12); sched.end.setHours(12); } Cursor q = db.rawQuery("Select sch_id From schedule Where sch_url = ?", new String[] { sched.url }); Log.d("cursor", "" + q.getCount()); if (sched.version > last_version && q.getCount() == 0) { ContentValues row = new ContentValues(); if (sched.id != null) row.put("sch_id_s", sched.id); else row.put("sch_id_s", Schedule.hashify(sched.url)); row.put("sch_url", sched.url); row.put("sch_title", sched.title); row.put("sch_atime", sched.start.getTime() / 1000); row.put("sch_start", sched.start.getTime() / 1000); row.put("sch_end", sched.end.getTime() / 1000); row.put("sch_metadata", sched.metadata); db.insert("schedule", null, row); } else if (q.getCount() == 1) { q.moveToNext(); if (oldDbVer < 8) { /* We're upgrading from < 8 so we have to backfill the start/end columns. */ ContentValues row = new ContentValues(); row.put("sch_start", sched.start.getTime() / 1000); row.put("sch_end", sched.end.getTime() / 1000); db.update("schedule", row, "sch_id = ?", new String[] { q.getString(0) }); } /* Always refresh the metadata, seedfile is authoritative. */ if (sched.metadata != "") { ContentValues row = new ContentValues(); row.put("sch_metadata", sched.metadata); db.update("schedule", row, "sch_id = ?", new String[] { q.getString(0) }); } } q.close(); }
From source file:org.ttrssreader.controllers.DBHelper.java
/** * mark remote files with given IDs as non cached (cached=0) * * @param rfIds IDs of remote files to be marked as non-cached *//* w w w . j a va 2 s.co m*/ public void markRemoteFilesNonCached(Collection<Integer> rfIds) { if (!isDBAvailable()) return; SQLiteDatabase db = getOpenHelper().getWritableDatabase(); writeLock(true); db.beginTransaction(); try { ContentValues cv = new ContentValues(1); cv.put("cached", 0); for (String ids : StringSupport.convertListToString(rfIds, 1000)) { db.update(TABLE_REMOTEFILES, cv, "id in (" + ids + ")", null); } db.setTransactionSuccessful(); } finally { db.endTransaction(); writeLock(false); } }
From source file:org.ttrssreader.controllers.DBHelper.java
/** * mark given remote file as cached/uncached and optionally specify it's file size * * @param url remote file URL/*from w w w .ja va 2 s .c om*/ * @param cached the cached flag * @param size file size may be {@code null}, if so, then it will not be updated in DB */ public void markRemoteFileCached(String url, boolean cached, Long size) { if (!isDBAvailable()) return; SQLiteDatabase db = getOpenHelper().getWritableDatabase(); writeLock(true); db.beginTransaction(); try { ContentValues cv = new ContentValues(2); cv.put("cached", cached); if (size != null) { cv.put("length", size); } db.update(TABLE_REMOTEFILES, cv, "url=?", new String[] { url }); db.setTransactionSuccessful(); } finally { db.endTransaction(); writeLock(false); } }
From source file:org.ttrssreader.controllers.DBHelper.java
/** * remove specified mark in the temporary mark table for specified * articles and then cleanup this table/*from w w w . j av a 2 s. c om*/ * * @param ids article IDs, which mark should be reseted * @param mark article mark to be reseted */ void setMarked(Map<Integer, String> ids, String mark) { if (!isDBAvailable()) return; SQLiteDatabase db = getOpenHelper().getWritableDatabase(); writeLock(true); db.beginTransaction(); try { ContentValues cv = new ContentValues(1); for (String idList : StringSupport.convertListToString(ids.keySet(), 1000)) { cv.putNull(mark); db.update(TABLE_MARK, cv, "id IN(" + idList + ")", null); db.delete(TABLE_MARK, "isUnread IS null AND isStarred IS null AND isPublished IS null", null); } // Insert notes afterwards and only if given note is not null cv = new ContentValues(1); for (Integer id : ids.keySet()) { String note = ids.get(id); if (note == null || note.equals("")) continue; cv.put(MARK_NOTE, note); db.update(TABLE_MARK, cv, "id=" + id, null); } db.setTransactionSuccessful(); } finally { db.endTransaction(); writeLock(false); } }
From source file:org.ttrssreader.controllers.DBHelper.java
/** * Set unread counters for feeds and categories according to real amount of unread articles. *///w w w . j ava 2 s. co m void calculateCounters() { if (!isDBAvailable()) return; long time = System.currentTimeMillis(); int total = 0; Cursor c = null; SQLiteDatabase db = getOpenHelper().getWritableDatabase(); writeLock(true); db.beginTransaction(); try { ContentValues cv = new ContentValues(1); // First of all, reset all feeds and all categories to unread=0 cv.put("unread", 0); db.update(TABLE_FEEDS, cv, null, null); db.update(TABLE_CATEGORIES, cv, null, null); // Count all feeds where unread articles exist try { // select feedId, count(*) from articles where isUnread>0 group by feedId c = db.query(TABLE_ARTICLES, new String[] { "feedId", "count(*)" }, "isUnread>0", null, "feedId", null, null, null); // update feeds while (c.moveToNext()) { int feedId = c.getInt(0); int unreadCount = c.getInt(1); total += unreadCount; cv.put("unread", unreadCount); db.update(TABLE_FEEDS, cv, "_id=" + feedId, null); } } finally { if (c != null && !c.isClosed()) c.close(); } // Count all categories where feeds with unread articles exist try { // select categoryId, sum(unread) from feeds where categoryId >= 0 group by categoryId c = db.query(TABLE_FEEDS, new String[] { "categoryId", "sum(unread)" }, "categoryId>=0", null, "categoryId", null, null, null); // update real categories while (c.moveToNext()) { int categoryId = c.getInt(0); int unreadCount = c.getInt(1); cv.put("unread", unreadCount); db.update(TABLE_CATEGORIES, cv, "_id=" + categoryId, null); } } finally { if (c != null && !c.isClosed()) c.close(); } // Count special categories cv.put("unread", total); db.update(TABLE_CATEGORIES, cv, "_id=" + Data.VCAT_ALL, null); cv.put("unread", getUnreadCount(Data.VCAT_FRESH, true)); db.update(TABLE_CATEGORIES, cv, "_id=" + Data.VCAT_FRESH, null); cv.put("unread", getUnreadCount(Data.VCAT_PUB, true)); db.update(TABLE_CATEGORIES, cv, "_id=" + Data.VCAT_PUB, null); cv.put("unread", getUnreadCount(Data.VCAT_STAR, true)); db.update(TABLE_CATEGORIES, cv, "_id=" + Data.VCAT_STAR, null); db.setTransactionSuccessful(); } finally { db.endTransaction(); writeLock(false); } Log.i(TAG, String.format("Fixed counters, total unread: %s (took %sms)", total, (System.currentTimeMillis() - time))); }
From source file:com.openerp.orm.ORM.java
/** * Write./*from w w w . j a v a 2 s.c om*/ * * @param dbHelper * the db helper * @param values * the values * @param id * the id * @param fromServer * the from server * @return true, if successful */ public boolean write(BaseDBHelper dbHelper, ContentValues values, int id, boolean fromServer) { // Handling many2one records HashMap<String, Object> many2onecols = dbHelper.getMany2OneColumns(); // Handling Many2Many Records HashMap<String, Object> many2manycols = dbHelper.getMany2ManyColumns(); for (String key : many2manycols.keySet()) { try { JSONArray m2mArray = new JSONArray(values.getAsString(key)); Many2Many m2m = (Many2Many) many2manycols.get(key); updateM2MRecords(values.getAsString("id"), m2mArray, key, dbHelper, m2m, values); } catch (Exception e) { } values.remove(key); } // Handling many2one records. [id, "name"] to id for (String key : many2onecols.keySet()) { try { String tempVals = values.getAsString(key); if (!tempVals.equals("false")) { JSONArray m2oArray = new JSONArray(values.getAsString(key)); int m2oid = m2oArray.getInt(0); values.put(key, m2oid); } else { values.put(key, "false"); } } catch (Exception e) { } } boolean flag = false; SQLiteDatabase db = getWritableDatabase(); try { if (OpenERPServerConnection.isNetworkAvailable(context)) { String table = modelToTable(dbHelper.getModelName()); try { JSONObject arguments = new JSONObject(); for (String key : values.keySet()) { try { int keyid = Integer.parseInt(values.getAsString(key)); arguments.put(key, keyid); } catch (Exception e) { String temp = values.getAsString(key); if (temp.equals("true") || temp.equals("false")) { arguments.put(key, ((temp.equals("true")) ? true : false)); } else { arguments.put(key, values.get(key).toString()); } } } if (fromServer) { int res = db.update(table, values, "id = " + id, null); flag = true; } else { if (oe_obj.updateValues(dbHelper.getModelName(), arguments, id)) { int res = db.update(table, values, "id = " + id, null); flag = true; } } } catch (Exception e) { e.printStackTrace(); flag = false; } } else { Toast.makeText(context, "Unable to Connect server ! Please Try again Later. ", Toast.LENGTH_LONG) .show(); flag = false; } } catch (Exception e) { } db.close(); return flag; }
From source file:org.ttrssreader.controllers.DBHelper.java
void markUnsynchronizedNotes(Map<Integer, String> ids) { if (!isDBAvailable()) return;/* w ww.ja v a 2 s. c o m*/ SQLiteDatabase db = getOpenHelper().getWritableDatabase(); writeLock(true); db.beginTransaction(); try { for (Integer id : ids.keySet()) { String note = ids.get(id); if (note == null || note.equals("")) continue; ContentValues cv = new ContentValues(1); cv.put(MARK_NOTE, note); db.update(TABLE_MARK, cv, "id=" + id, null); } db.setTransactionSuccessful(); } finally { db.endTransaction(); writeLock(false); } }
From source file:com.openatk.planting.MainActivity.java
@Override public void AddFieldDone(String name, Integer acres) { // Check if field name is valid and doesn't exist already if (name.length() == 0) { // Tell them to input a name // TODO add this message to R.strings Toast.makeText(this, "Field name cannot be blank.", Toast.LENGTH_LONG).show(); } else {/*from w w w . j a va 2 s . c o m*/ // Check if field name already exists in db if (FindFieldByName(name) != null && currentField == null) { Toast.makeText(this, "A field with this name already exists. Field names must be unique.", Toast.LENGTH_LONG).show(); } else { this.currentPolygon.complete(); this.currentPolygon.setLabel(name, true); if (currentJob == null) { this.currentPolygon.setFillColor(Field.FILL_COLOR_NOT_PLANNED); } else { if (currentJob.getStatus() == Job.STATUS_NOT_PLANNED) { this.currentPolygon.setFillColor(Field.FILL_COLOR_NOT_PLANNED); } else if (currentJob.getStatus() == Job.STATUS_PLANNED) { this.currentPolygon.setFillColor(Field.FILL_COLOR_PLANNED); } else if (currentJob.getStatus() == Job.STATUS_STARTED) { this.currentPolygon.setFillColor(Field.FILL_COLOR_STARTED); } else if (currentJob.getStatus() == Job.STATUS_DONE) { this.currentPolygon.setFillColor(Field.FILL_COLOR_DONE); } } List<LatLng> points = this.currentPolygon.getPoints(); Boolean wasAnEdit = false; if (currentField == null) { currentField = new Field(points, map); } else { currentField.setBoundary(points); wasAnEdit = true; } currentField.setName(name); currentField.setAcres(acres); Log.d("MainActivity", "Acres:" + Integer.toString(acres)); String strNewBoundary = ""; if (points != null && points.isEmpty() == false) { // Generate boundary StringBuilder newBoundary = new StringBuilder(points.size() * 20); for (int i = 0; i < points.size(); i++) { newBoundary.append(points.get(i).latitude); newBoundary.append(","); newBoundary.append(points.get(i).longitude); newBoundary.append(","); } newBoundary.deleteCharAt(newBoundary.length() - 1); strNewBoundary = newBoundary.toString(); } // Save this field to the db SQLiteDatabase database = dbHelper.getWritableDatabase(); ContentValues values = new ContentValues(); values.put(TableFields.COL_NAME, currentField.getName()); values.put(TableFields.COL_ACRES, currentField.getAcres()); values.put(TableFields.COL_BOUNDARY, strNewBoundary); //TODO only update if something changed values.put(TableFields.COL_HAS_CHANGED, 1); values.put(TableFields.COL_DATE_CHANGED, DatabaseHelper.dateToStringUTC(new Date())); if (wasAnEdit == false) { Integer insertId = (int) database.insert(TableFields.TABLE_NAME, null, values); currentField.setId(insertId); } else { database.update(TableFields.TABLE_NAME, values, TableFields.COL_ID + " = " + Integer.toString(currentField.getId()), null); } dbHelper.close(); // Add to list so we can catch click events currentField.setPolygon(this.currentPolygon); if (wasAnEdit == false) { FieldsOnMap.add(currentField); } else { for (int i = 0; i < FieldsOnMap.size(); i++) { if (FieldsOnMap.get(i).getId() == currentField.getId()) { FieldsOnMap.get(i).setName(name); FieldsOnMap.get(i).setPolygon(this.currentPolygon); FieldsOnMap.get(i).setAcres(acres); FieldsOnMap.get(i).setBoundary(points); } } } // add or update in list view if (this.fragmentListView != null) this.fragmentListView.getData(); // Check to see if we have any operations if (operationsList.isEmpty() == false) { // Check if any operation selected if (currentOperationId != 0) { showEdit(true); } else { // Make them select an operation // TODO popup list?? } } else { // Add an operation createOperation(new Callable<Void>() { public Void call() { return showEdit(true); } }); } this.trelloController.syncDelayed(); } } }