Example usage for java.util HashMap remove

List of usage examples for java.util HashMap remove

Introduction

In this page you can find the example usage for java.util HashMap remove.

Prototype

public V remove(Object key) 

Source Link

Document

Removes the mapping for the specified key from this map if present.

Usage

From source file:com.tesora.dve.sql.schema.PETable.java

protected void updateExistingKeys(SchemaContext pc, UserTable ut) throws PEException {
    HashMap<String, Key> persKeys = new HashMap<String, Key>();
    HashMap<String, Key> persCons = new HashMap<String, Key>();
    HashMap<String, PEKey> transKeys = new HashMap<String, PEKey>();
    HashMap<String, PEForeignKey> transCons = new HashMap<String, PEForeignKey>();
    for (PEKey c : getKeys(pc)) {
        if (c.isForeign())
            transCons.put(c.getName().getCapitalized().get(), (PEForeignKey) c);
        else/*from  w w w. ja va  2 s  . c  o m*/
            transKeys.put(c.getName().getCapitalized().get(), c);
    }
    for (Key uc : ut.getKeys()) {
        String name = uc.getName().toUpperCase().trim();
        if (uc.isForeignKey()) {
            PEForeignKey was = transCons.remove(name);
            boolean same = (was != null);
            if (same) {
                PEForeignKey apc = PEForeignKey.load(uc, pc, null);
                updateColumnPositions(pc, was, apc);
                String anydiffs = was.differs(pc, apc, true);
                if (anydiffs != null) {
                    same = false;
                    transCons.put(name, was);
                }
            }
            if (!same)
                persCons.put(name, uc);
        } else {
            PEKey was = transKeys.remove(name);
            boolean same = (was != null);
            if (same) {
                PEKey apc = PEKey.load(uc, pc, null);
                updateColumnPositions(pc, was, apc);
                String anydiffs = was.differs(pc, apc, true);
                if (anydiffs != null) {
                    same = false;
                    transKeys.put(name, was);
                }
            }
            if (!same)
                persKeys.put(name, uc);
        }
    }
    // now transCols has columns not in persCols, and persCols has columns not in transCols
    // the former are additions, the latter are removals
    for (Key uc : persCons.values()) {
        ut.removeKey(uc);
    }
    for (Key uc : persKeys.values()) {
        ut.removeKey(uc);
    }
    pc.beginSaveContext();
    try {
        for (PEKey c : transKeys.values()) {
            ut.addKey(c.persistTree(pc));
        }
        for (PEForeignKey c : transCons.values()) {
            ut.addKey(c.persistTree(pc));
        }
    } finally {
        pc.endSaveContext();
    }
}

From source file:StorageEngineClient.CombineFileInputFormat_bak.java

private void getMoreSplits(JobConf job, Path[] paths1, long maxSize, long minSizeNode, long minSizeRack,
        List<CombineFileSplit> splits) throws IOException {
    if (paths1.length == 0) {
        return;//from   www . jav a 2  s  . c o m
    }

    Path[] paths = paths1;
    ArrayList<Path> splitable = new ArrayList<Path>();
    ArrayList<Path> unsplitable = new ArrayList<Path>();
    for (int i = 0; i < paths1.length; i++) {
        if (isSplitable(paths1[i].getFileSystem(job), paths1[i])) {
            splitable.add(paths1[i]);
        } else {
            unsplitable.add(paths1[i]);
        }
    }
    if (unsplitable.size() != 0) {
        paths = new Path[splitable.size()];
        splitable.toArray(paths);
    }

    OneFileInfo[] files;

    HashMap<String, List<OneBlockInfo>> rackToBlocks = new HashMap<String, List<OneBlockInfo>>();

    HashMap<OneBlockInfo, String[]> blockToNodes = new HashMap<OneBlockInfo, String[]>();

    HashMap<String, List<OneBlockInfo>> nodeToBlocks = new HashMap<String, List<OneBlockInfo>>();

    files = new OneFileInfo[paths.length];

    long totLength = 0;
    for (int i = 0; i < paths.length; i++) {
        files[i] = new OneFileInfo(paths[i], job, rackToBlocks, blockToNodes, nodeToBlocks);
        totLength += files[i].getLength();
    }
    ArrayList<OneBlockInfo> validBlocks = new ArrayList<OneBlockInfo>();
    ArrayList<String> nodes = new ArrayList<String>();
    long curSplitSize = 0;

    for (Iterator<Map.Entry<String, List<OneBlockInfo>>> iter = nodeToBlocks.entrySet().iterator(); iter
            .hasNext();) {

        Map.Entry<String, List<OneBlockInfo>> one = iter.next();
        nodes.add(one.getKey());
        List<OneBlockInfo> blocksInNode = one.getValue();

        for (OneBlockInfo oneblock : blocksInNode) {
            if (blockToNodes.containsKey(oneblock)) {
                validBlocks.add(oneblock);
                blockToNodes.remove(oneblock);
                curSplitSize += oneblock.length;

                if (maxSize != 0 && curSplitSize >= maxSize) {
                    addCreatedSplit(job, splits, nodes, validBlocks);
                    curSplitSize = 0;
                    validBlocks.clear();
                }
            }
        }
        if (minSizeNode != 0 && curSplitSize >= minSizeNode) {
            addCreatedSplit(job, splits, nodes, validBlocks);
        } else {
            for (OneBlockInfo oneblock : validBlocks) {
                blockToNodes.put(oneblock, oneblock.hosts);
            }
        }
        validBlocks.clear();
        nodes.clear();
        curSplitSize = 0;
    }

    ArrayList<OneBlockInfo> overflowBlocks = new ArrayList<OneBlockInfo>();
    ArrayList<String> racks = new ArrayList<String>();

    while (blockToNodes.size() > 0) {

        for (Iterator<Map.Entry<String, List<OneBlockInfo>>> iter = rackToBlocks.entrySet().iterator(); iter
                .hasNext();) {

            Map.Entry<String, List<OneBlockInfo>> one = iter.next();
            List<OneBlockInfo> blocks = one.getValue();

            boolean createdSplit = false;
            for (OneBlockInfo oneblock : blocks) {
                if (blockToNodes.containsKey(oneblock)) {
                    validBlocks.add(oneblock);
                    blockToNodes.remove(oneblock);
                    curSplitSize += oneblock.length;
                    for (int i = 0; i < oneblock.hosts.length; i++) {
                        racks.add(oneblock.hosts[i]);
                    }

                    if (maxSize != 0 && curSplitSize >= maxSize) {
                        addCreatedSplit(job, splits, racks, validBlocks);
                        createdSplit = true;
                        break;
                    }
                }
            }

            if (createdSplit) {
                curSplitSize = 0;
                validBlocks.clear();
                racks.clear();
                continue;
            }

            if (!validBlocks.isEmpty()) {
                if (minSizeRack != 0 && curSplitSize >= minSizeRack) {
                    addCreatedSplit(job, splits, racks, validBlocks);
                } else {
                    overflowBlocks.addAll(validBlocks);
                }
            }
            curSplitSize = 0;
            validBlocks.clear();
            racks.clear();
        }
    }

    assert blockToNodes.isEmpty();
    assert curSplitSize == 0;
    assert validBlocks.isEmpty();
    assert racks.isEmpty();

    for (OneBlockInfo oneblock : overflowBlocks) {
        validBlocks.add(oneblock);
        curSplitSize += oneblock.length;

        for (int i = 0; i < oneblock.racks.length; i++) {
            racks.add(oneblock.hosts[i]);
        }

        if (maxSize != 0 && curSplitSize >= maxSize) {
            addCreatedSplit(job, splits, racks, validBlocks);
            curSplitSize = 0;
            validBlocks.clear();
            racks.clear();
        }
    }

    if (!validBlocks.isEmpty()) {
        addCreatedSplit(job, splits, racks, validBlocks);
    }

    if (unsplitable.size() != 0) {
        curSplitSize = 0;
        nodes.clear();
        validBlocks.clear();
        for (Path path : unsplitable) {
            FileSystem fs = path.getFileSystem(job);
            FileStatus stat = fs.getFileStatus(path);
            LOG.info(path.toString());
            long len = fs.getFileStatus(path).getLen();
            curSplitSize += len;
            BlockLocation[] locations = path.getFileSystem(job).getFileBlockLocations(stat, 0, len);
            OneBlockInfo oneblock = new OneBlockInfo(path, 0, len, locations[0].getHosts(),
                    locations[0].getTopologyPaths());
            validBlocks.add(oneblock);
            for (int i = 0; i < oneblock.hosts.length; i++) {
                nodes.add(oneblock.hosts[i]);
            }

            if (maxSize != 0 && curSplitSize >= maxSize) {
                addCreatedSplit(job, splits, nodes, validBlocks);
                curSplitSize = 0;
                validBlocks.clear();
                nodes.clear();
            }
        }
        if (!validBlocks.isEmpty()) {
            addCreatedSplit(job, splits, nodes, validBlocks);
        }
    }
}

From source file:pltag.parser.Lexicon.java

protected HashMap<String, Integer> getPosTags(Collection<String> treeStrings, String searchWord) {
    int maxfreq = 0;
    HashMap<String, Integer> postags = new HashMap<String, Integer>();
    for (String ts : treeStrings) {
        String candpostag = getPosFromTreeString(ts, "@LEXEME1@");
        if (!postags.keySet().contains(candpostag)) {
            Integer freq = posTagNo.get(candpostag.toLowerCase() + " " + searchWord);
            if (freq == null) {
                freq = 1;//  ww  w.  j  a va2 s.  co m
            }
            postags.put(candpostag, freq);
            if (freq > maxfreq) {
                maxfreq = freq;
            }
        }
    }
    ArrayList<String> pt = new ArrayList<String>();
    pt.addAll(postags.keySet());
    for (String key : pt) {
        if (postags.get(key) * 50 < maxfreq) {
            postags.remove(key);
        }
    }
    return postags;
}

From source file:com.compomics.util.experiment.identification.psm_scoring.psm_scores.HyperScore.java

/**
 * Returns the interpolation values for the given scores in the form {a, b}.
 *
 * @param scores the scores/*from   ww  w . j a v a 2  s  .  co m*/
 * @param useCache if true the interpolation values will be stored in the
 * histograms in cache
 *
 * @return the interpolation values for the given scores
 */
public double[] getInterpolationValues(int[] scores, boolean useCache) {
    HashMap<Integer, Integer> scoreHistogram = new HashMap<Integer, Integer>();
    int maxScore = 0;
    int minScore = Integer.MAX_VALUE;
    for (int score : scores) {
        if (score > 0) {
            Integer nScores = scoreHistogram.get(score);
            if (nScores == null) {
                nScores = 1;
            } else {
                nScores++;
            }
            scoreHistogram.put(score, nScores);
            if (score > maxScore) {
                maxScore = score;
            }
            if (score < minScore) {
                minScore = score;
            }
        }
    }
    Integer secondEmptybin = maxScore;
    Integer firstEmptybin = maxScore;
    boolean emptyBin = false;
    for (int bin = minScore; bin <= maxScore; bin++) {
        if (!scoreHistogram.containsKey(bin)) {
            if (!emptyBin) {
                emptyBin = true;
                firstEmptybin = bin;
            } else {
                secondEmptybin = bin;
                break;
            }
        }
    }
    ArrayList<Integer> bins = new ArrayList<Integer>(scoreHistogram.keySet());
    for (Integer bin : bins) {
        if (bin > secondEmptybin) {
            scoreHistogram.remove(bin);
        } else if (bin > firstEmptybin) {
            scoreHistogram.put(bin, 1);
        }
    }
    return getInterpolationValues(scoreHistogram, useCache);
}

From source file:com.nuance.expertassistant.ReadExcelFile.java

public boolean retrieveAnswersandWrite(String projectID, String filename) {

    int row = 1;//from www.  j  a va 2  s.c om

    HashMap<String, HashMap<String, String>> qaList = new HashMap<String, HashMap<String, String>>();
    qaList = QueryDB.fetchQAList2Write(projectID);

    try {

        WritableWorkbook workbook = Workbook
                .createWorkbook(new File("/usr/local/Nuance/SQLDB/" + filename + ".xls"));
        WritableSheet sheet = workbook.createSheet("First Sheet", 0);

        Label label = new Label(0, 0, "Question");
        sheet.addCell(label);
        label = new Label(1, 0, "Answer");
        sheet.addCell(label);
        label = new Label(2, 0, "Score");
        sheet.addCell(label);
        label = new Label(3, 0, "Confidence");
        sheet.addCell(label);
        label = new Label(4, 0, "User-Rating");
        sheet.addCell(label);
        label = new Label(5, 0, "Rank");
        sheet.addCell(label);
        label = new Label(6, 0, "Found");
        sheet.addCell(label);

        Iterator it = qaList.entrySet().iterator();
        while (it.hasNext()) {

            HashMap.Entry pair = (HashMap.Entry) it.next();
            System.out.println("*************************************");
            System.out.println(" The QUESTION IS :" + pair.getKey());
            HashMap<String, String> responseList = (HashMap) pair.getValue();

            String currentQuestion = pair.getKey().toString();

            // Invoking QA Core to fetch Responses
            String contextID = InvokeQACoreAPI.getContextID(projectID).replaceAll("\n", "");
            System.out.println(" The contextID is :[" + contextID + "]");
            String answerString = InvokeQACoreAPI.getAnswer(contextID, projectID, currentQuestion);
            JSONArray jsonArray = new JSONArray(answerString);

            for (int i = 0; i < jsonArray.length(); i++) {
                JSONObject job = jsonArray.getJSONObject(i);
                String answerText = job.getJSONObject("answer").getString("text");
                String evidence = html2text(
                        job.getJSONObject("answer").getJSONObject("evidence").getString("text"));
                Double confidence = job.getJSONObject("answer").getDouble("confidence");
                Double score = job.getJSONObject("answer").getDouble("score");
                int Rank = i;
                String Found = "YES";
                String Rating = "Not Rated";

                if (responseList.containsKey(answerText)) {
                    Rating = responseList.get(answerText);
                    responseList.remove(answerText);
                }

                label = new Label(0, row, currentQuestion);
                sheet.addCell(label);
                label = new Label(1, row, answerText);
                sheet.addCell(label);
                label = new Label(2, row, String.valueOf(score));
                sheet.addCell(label);
                label = new Label(3, row, String.valueOf(confidence));
                sheet.addCell(label);
                label = new Label(4, row, Rating);
                sheet.addCell(label);
                label = new Label(5, row, String.valueOf(Rank));
                sheet.addCell(label);
                label = new Label(6, row, Found);
                sheet.addCell(label);

                row++;

            }

            Iterator it2 = responseList.entrySet().iterator();
            while (it2.hasNext()) {

                HashMap.Entry pair2 = (HashMap.Entry) it2.next();
                System.out.println(pair2.getKey() + " = " + pair2.getValue());

                String answerText = pair2.getKey().toString();
                String evidence = "null";
                Double confidence = 0.0;
                Double score = 0.0;
                int Rank = -1;
                String Found = "NO";
                String Rating = pair2.getValue().toString();

                label = new Label(0, row, currentQuestion);
                sheet.addCell(label);
                label = new Label(1, row, answerText);
                sheet.addCell(label);
                label = new Label(2, row, String.valueOf(score));
                sheet.addCell(label);
                label = new Label(3, row, String.valueOf(confidence));
                sheet.addCell(label);
                label = new Label(4, row, Rating);
                sheet.addCell(label);
                label = new Label(5, row, String.valueOf(Rank));
                sheet.addCell(label);
                label = new Label(6, row, Found);
                sheet.addCell(label);

                row++;

                it2.remove(); // avoids a ConcurrentModificationException
            }

            System.out.println("*************************************");
            it.remove(); // avoids a ConcurrentModificationException    
        }

        workbook.write();
        workbook.close();

        return true;

    } catch (Exception e) {
        System.out.println("Cannot Access the workbook !");
        return false;

    }

}

From source file:com.tremolosecurity.embedd.EmbPostProc.java

protected HashMap<String, Attribute> setHeadersCookiesEmb(HttpFilterRequest req) throws Exception {
    Iterator<String> names;

    names = req.getHeaderNames();/*from w  ww. j a va2s.c  om*/

    HashMap<String, Attribute> reqHeaders = new HashMap<String, Attribute>();

    while (names.hasNext()) {
        String name = names.next();
        if (name.equalsIgnoreCase("Cookie")) {

            continue;
        }

        if (logger.isDebugEnabled()) {
            logger.debug("Header : " + name);
        }

        Attribute attrib = req.getHeader(name);
        ArrayList<String> vals = new ArrayList<String>();

        vals.addAll(attrib.getValues());
        //logger.info("Header : '" + name + "'='" + vals + "'");

        if (name.equalsIgnoreCase("Content-Type")) {
            continue;
        } else if (name.equalsIgnoreCase("If-Range")) {
            continue;
        } else if (name.equalsIgnoreCase("Range")) {
            continue;
        } else if (name.equalsIgnoreCase("If-None-Match")) {
            continue;
        }

        if (this.addHeader(name)) {
            Attribute header = reqHeaders.get(name);
            if (header == null) {
                header = new Attribute(name);
                reqHeaders.put(name, header);
            }

            header.getValues().addAll(vals);
        }

    }

    HashMap<String, Attribute> fromResults = (HashMap<String, Attribute>) req
            .getAttribute(AzSys.AUTO_IDM_HTTP_HEADERS);
    if (fromResults != null) {
        names = fromResults.keySet().iterator();

        while (names.hasNext()) {
            String name = names.next();
            reqHeaders.remove(name);

            Attribute attrib = fromResults.get(name);

            Attribute header = reqHeaders.get(name);
            if (header == null) {
                header = new Attribute(name);
                reqHeaders.put(name, header);
            }

            header.getValues().addAll(attrib.getValues());

            //logger.info("Header2 : '" + name + "'='" + header.getValues() + "'");
        }
    }

    return reqHeaders;

}

From source file:org.apache.sysml.hops.cost.CostEstimator.java

private void maintainCPInstVariableStatistics(CPInstruction inst, HashMap<String, VarStats> stats) {
    if (inst instanceof VariableCPInstruction) {
        String optype = inst.getOpcode();
        String[] parts = InstructionUtils.getInstructionParts(inst.toString());

        if (optype.equals("createvar")) {
            if (parts.length < 10)
                return;
            String varname = parts[1];
            long rlen = Long.parseLong(parts[6]);
            long clen = Long.parseLong(parts[7]);
            long brlen = Long.parseLong(parts[8]);
            long bclen = Long.parseLong(parts[9]);
            long nnz = Long.parseLong(parts[10]);
            VarStats vs = new VarStats(rlen, clen, brlen, bclen, nnz, false);
            stats.put(varname, vs);//from   ww  w .  ja va  2s.c  o  m

            //System.out.println(varname+" "+vs);
        } else if (optype.equals("cpvar")) {
            String varname = parts[1];
            String varname2 = parts[2];
            VarStats vs = stats.get(varname);
            stats.put(varname2, vs);
        } else if (optype.equals("mvvar")) {
            String varname = parts[1];
            String varname2 = parts[2];
            VarStats vs = stats.remove(varname);
            stats.put(varname2, vs);
        } else if (optype.equals("rmvar")) {
            String varname = parts[1];
            stats.remove(varname);
        }
    } else if (inst instanceof DataGenCPInstruction) {
        DataGenCPInstruction randInst = (DataGenCPInstruction) inst;
        String varname = randInst.output.getName();
        long rlen = randInst.getRows();
        long clen = randInst.getCols();
        long brlen = randInst.getRowsInBlock();
        long bclen = randInst.getColsInBlock();
        long nnz = (long) (randInst.getSparsity() * rlen * clen);
        VarStats vs = new VarStats(rlen, clen, brlen, bclen, nnz, true);
        stats.put(varname, vs);
    } else if (inst instanceof StringInitCPInstruction) {
        StringInitCPInstruction iinst = (StringInitCPInstruction) inst;
        String varname = iinst.output.getName();
        long rlen = iinst.getRows();
        long clen = iinst.getCols();
        VarStats vs = new VarStats(rlen, clen, ConfigurationManager.getBlocksize(),
                ConfigurationManager.getBlocksize(), rlen * clen, true);
        stats.put(varname, vs);
    } else if (inst instanceof FunctionCallCPInstruction) {
        FunctionCallCPInstruction finst = (FunctionCallCPInstruction) inst;
        ArrayList<String> outVars = finst.getBoundOutputParamNames();
        for (String varname : outVars) {
            stats.put(varname, _unknownStats);
            //System.out.println(varname+" "+vs);
        }
    }
}

From source file:edu.ku.brc.ui.UIRegistry.java

/**
 * Unregisters a uicomp.//from  w  w  w  . j a va 2  s  . co  m
 * @param category the category to be registered
 * @param name the name
 * @throws UIException throws exception if it is not registered
 */
public static void unregisterUI(final String category, final String name) throws UIException {
    HashMap<String, JComponent> compsHash = instance.uiItems.get(category);
    if (compsHash == null) {
        throw new UIException("Couldn't find UI Category with Name[" + category + "].");
    }
    JComponent comp = compsHash.get(name);
    if (comp == null) {
        throw new UIException("Couldn't find UI component with Name[" + name + "].");
    }
    compsHash.remove(comp);
}

From source file:com.example.android.basicsyncadapter.SyncAdapter.java

/**
 * Read XML from an input stream, storing it into the content provider.
 *
 * <p>This is where incoming data is persisted, committing the results of a sync. In order to
 * minimize (expensive) disk operations, we compare incoming data with what's already in our
 * database, and compute a merge. Only changes (insert/update/delete) will result in a database
 * write.//from   ww w. jav  a  2s.  c  o  m
 *
 * <p>As an additional optimization, we use a batch operation to perform all database writes at
 * once.
 *
 * <p>Merge strategy:
 * 1. Get cursor to all items in feed<br/>
 * 2. For each item, check if it's in the incoming data.<br/>
 *    a. YES: Remove from "incoming" list. Check if data has mutated, if so, perform
 *            database UPDATE.<br/>
 *    b. NO: Schedule DELETE from database.<br/>
 * (At this point, incoming database only contains missing items.)<br/>
 * 3. For any items remaining in incoming list, ADD to database.
 */
public void updateLocalFeedData(final InputStream stream, final SyncResult syncResult) throws IOException,
        XmlPullParserException, RemoteException, OperationApplicationException, ParseException {
    //final FeedParser feedParser = new FeedParser();
    final CAPFeedParser feedParser = new CAPFeedParser();
    final ContentResolver contentResolver = getContext().getContentResolver();

    //Log.i(TAG, "Parsing stream as Atom feed");
    final List<CAPFeedParser.Entry> entries = feedParser.parse(stream);
    Log.i(TAG, "Parsing complete. Found " + entries.size() + " entries");

    ArrayList<ContentProviderOperation> batch = new ArrayList<ContentProviderOperation>();

    // Build hash table of incoming entries
    HashMap<String, CAPFeedParser.Entry> entryMap = new HashMap<String, CAPFeedParser.Entry>();
    for (CAPFeedParser.Entry e : entries) {
        entryMap.put(e.id, e);
    }

    // Get list of all items
    //Log.i(TAG, "Fetching local entries for merge");
    Uri uri = FeedContract.Entry.CONTENT_URI; // Get all entries
    Cursor c = contentResolver.query(uri, PROJECTION, null, null, null);
    assert c != null;
    //Log.i(TAG, "Found " + c.getCount() + " local entries. Computing merge solution...");

    // Find stale data
    int id;
    String entryId;
    String title;
    String description;
    String headline;
    String url;
    String areas;
    String issued;
    while (c.moveToNext()) {
        syncResult.stats.numEntries++;

        id = c.getInt(COLUMN_ID);
        entryId = c.getString(COLUMN_ENTRY_ID);
        title = c.getString(COLUMN_TITLE);
        description = c.getString(COLUMN_DESCRIPTION);
        headline = c.getString(COLUMN_HEADLINE);
        areas = c.getString(COLUMN_AREAS);
        url = c.getString(COLUMN_LINK);
        issued = c.getString(COLUMN_ISSUED);

        CAPFeedParser.Entry match = entryMap.get(entryId);
        if (match != null) {
            // Entry exists. Remove from entry map to prevent insert later.
            entryMap.remove(entryId);
            // Check to see if the entry needs to be updated
            Uri existingUri = FeedContract.Entry.CONTENT_URI.buildUpon().appendPath(Integer.toString(id))
                    .build();
            if ((match.title != null && !match.title.equals(title))
                    || (match.link != null && !match.link.equals(url)) || (match.issued != issued)) {
                // Update existing record
                //Log.i(TAG, "Scheduling update: " + existingUri);
                batch.add(ContentProviderOperation.newUpdate(existingUri)
                        .withValue(FeedContract.Entry.COLUMN_NAME_TITLE, title)
                        .withValue(FeedContract.Entry.COLUMN_NAME_DESCRIPTION, description)
                        .withValue(FeedContract.Entry.COLUMN_NAME_HEADLINE, headline)
                        .withValue(FeedContract.Entry.COLUMN_NAME_ISSUED, issued)
                        .withValue(FeedContract.Entry.COLUMN_NAME_LINK, url)
                        .withValue(FeedContract.Entry.COLUMN_NAME_AREAS, areas).build());
                syncResult.stats.numUpdates++;
            } else {
                //Log.i(TAG, "No action: " + existingUri);
            }
        } else {
            // Entry doesn't exist. Remove it from the database.
            Uri deleteUri = FeedContract.Entry.CONTENT_URI.buildUpon().appendPath(Integer.toString(id)).build();
            //Log.i(TAG, "Scheduling delete: " + deleteUri);
            batch.add(ContentProviderOperation.newDelete(deleteUri).build());
            syncResult.stats.numDeletes++;
        }
    }
    c.close();

    // Add new items
    for (CAPFeedParser.Entry e : entryMap.values()) {
        //Log.i(TAG, "Scheduling insert: entry_id=" + e.id);
        batch.add(ContentProviderOperation.newInsert(FeedContract.Entry.CONTENT_URI)
                .withValue(FeedContract.Entry.COLUMN_NAME_ENTRY_ID, e.id)
                .withValue(FeedContract.Entry.COLUMN_NAME_TITLE, e.title)
                .withValue(FeedContract.Entry.COLUMN_NAME_DESCRIPTION, e.description)
                .withValue(FeedContract.Entry.COLUMN_NAME_HEADLINE, e.headline)
                .withValue(FeedContract.Entry.COLUMN_NAME_ISSUED, e.issued)
                .withValue(FeedContract.Entry.COLUMN_NAME_LINK, e.link)
                .withValue(FeedContract.Entry.COLUMN_NAME_AREAS, e.areas).build());
        syncResult.stats.numInserts++;
    }
    //Log.i(TAG, "Merge solution ready. Applying batch update");
    mContentResolver.applyBatch(FeedContract.CONTENT_AUTHORITY, batch);
    mContentResolver.notifyChange(FeedContract.Entry.CONTENT_URI, // URI where data was modified
            null, // No local observer
            false); // IMPORTANT: Do not sync to network
    // This sample doesn't support uploads, but if *your* code does, make sure you set
    // syncToNetwork=false in the line above to prevent duplicate syncs.
}

From source file:com.example.android.network.sync.basicsyncadapter.SyncAdapter.java

/**
 * Read XML from an input stream, storing it into the content provider.
 *
 * <p>This is where incoming data is persisted, committing the results of a sync. In order to
 * minimize (expensive) disk operations, we compare incoming data with what's already in our
 * database, and compute a merge. Only changes (insert/update/delete) will result in a database
 * write./*  ww w . j  a v a2s .  c om*/
 *
 * <p>As an additional optimization, we use a batch operation to perform all database writes at
 * once.
 *
 * <p>Merge strategy:
 * 1. Get cursor to all items in feed<br/>
 * 2. For each item, check if it's in the incoming data.<br/>
 *    a. YES: Remove from "incoming" list. Check if data has mutated, if so, perform
 *            database UPDATE.<br/>
 *    b. NO: Schedule DELETE from database.<br/>
 * (At this point, incoming database only contains missing items.)<br/>
 * 3. For any items remaining in incoming list, ADD to database.
 */
public void updateLocalFeedData(final InputStream stream, final SyncResult syncResult) throws IOException,
        XmlPullParserException, RemoteException, OperationApplicationException, ParseException {

    final ContentResolver contentResolver = getContext().getContentResolver();

    Log.i(TAG, "Parsing stream as Atom feed");
    final List<Transformer> entries = null;
    /*=this.parseTransformersResponse(stream)*/;
    ;
    Log.i(TAG, "Parsing complete. Found " + entries.size() + " entries");

    ArrayList<ContentProviderOperation> batch = new ArrayList<ContentProviderOperation>();

    // Build hash table of incoming entries
    HashMap<String, Transformer> entryMap = new HashMap<String, Transformer>();
    for (Transformer e : entries) {
        entryMap.put(e.transformerID, e);
    }
    Cursor c = null;

    try {

        // Get list of all items
        Log.i(TAG, "Fetching local entries for merge");
        Uri uri = Transformer.CONTENT_URI; // Get all entries
        c = contentResolver.query(uri, null, null, null, null);
        assert c != null;
        Log.i(TAG, "Found " + c.getCount() + " local entries. Computing merge solution...");
    }

    catch (Exception ex) {

    }
    // Find stale data
    String id;
    String name;
    String location;

    while (c.moveToNext()) {
        syncResult.stats.numEntries++;

        id = c.getColumnName(COLUMN_ID);
        name = c.getString(COLUMN_ENTRY_ID);
        location = c.getString(COLUMN_TITLE);

        Transformer match = entryMap.get(id);
        if (match != null) {
            // Entry exists. Remove from entry map to prevent insert later.
            entryMap.remove(id);
            // Check to see if the entry needs to be updated
            Uri existingUri = Transformer.CONTENT_URI.buildUpon().appendPath(id).build();
            if ((match.trsName != null && !match.trsLocation.equals(name))) {
                // Update existing record
                Log.i(TAG, "Scheduling update: " + existingUri);
                batch.add(ContentProviderOperation.newUpdate(existingUri).withValue(Transformer.KEY_NAME, name)
                        .withValue(Transformer.KEY_LOCATION, location)

                        .build());
                syncResult.stats.numUpdates++;
            } else {
                Log.i(TAG, "No action: " + existingUri);
            }
        } else {
            // Entry doesn't exist. Remove it from the database.
            Uri deleteUri = Transformer.CONTENT_URI.buildUpon().appendPath(id).build();
            Log.i(TAG, "Scheduling delete: " + deleteUri);
            batch.add(ContentProviderOperation.newDelete(deleteUri).build());
            syncResult.stats.numDeletes++;
        }
    }
    c.close();

    // Add new items
    for (Transformer e : entryMap.values()) {
        Log.i(TAG, "Scheduling insert: entry_id=" + e.transformerID);
        batch.add(ContentProviderOperation.newInsert(Transformer.CONTENT_URI)
                .withValue(Transformer.KEY_TRANSFORMER_ID, e.transformerID)
                .withValue(Transformer.KEY_NAME, e.trsName).withValue(Transformer.KEY_LOCATION, e.trsLocation)
                .withValue(Transformer.KEY_CURRENT_TEMP, e.trsCurrentTemp)
                .withValue(Transformer.KEY_LAST_SERVER_SYNC_DATE, e.lastServerSyncDate)
                .withValue(Transformer.KEY_LAST_UPDATED_TIME, e.lastServerSyncDate)
                .withValue(Transformer.KEY_SYNC_STATUS, 0).withValue(Transformer.KEY_MAKE, e.trsMake)
                .withValue(Transformer.KEY_WINDING_MAKE, e.trsWindingMake)
                .withValue(Transformer.KEY_WINDING_COUNT, e.trsWindingCount)
                .withValue(Transformer.KEY_OIL_LEVEL, e.trsOilLevel)
                .withValue(Transformer.KEY_OPERATING_POWER, e.trsOperatingPower)
                .withValue(Transformer.KEY_TYPE, e.trsType)

                .build());
        syncResult.stats.numInserts++;
    }
    Log.i(TAG, "Merge solution ready. Applying batch update");
    mContentResolver.applyBatch(Transformer.CONTENT_AUTHORITY, batch);
    mContentResolver.notifyChange(Transformer.CONTENT_URI, // URI where data was modified
            null, // No local observer
            false); // IMPORTANT: Do not sync to network
    // This sample doesn't support uploads, but if *your* code does, make sure you set
    // syncToNetwork=false in the line above to prevent duplicate syncs.
}