Example usage for java.util HashMap clear

List of usage examples for java.util HashMap clear

Introduction

In this page you can find the example usage for java.util HashMap clear.

Prototype

public void clear() 

Source Link

Document

Removes all of the mappings from this map.

Usage

From source file:org.telegram.ui.PassportActivity.java

private void setTypeValue(TLRPC.TL_secureRequiredType requiredType, String text, String json,
        TLRPC.TL_secureRequiredType documentRequiredType, String documentsJson, boolean documentOnly,
        int availableDocumentTypesCount) {
    TextDetailSecureCell view = typesViews.get(requiredType);
    if (view == null) {
        if (currentActivityType == TYPE_MANAGE) {
            ArrayList<TLRPC.TL_secureRequiredType> documentTypes = new ArrayList<>();
            if (documentRequiredType != null) {
                documentTypes.add(documentRequiredType);
            }/*from ww  w .  j  a  va  2s.  c o  m*/
            View prev = linearLayout2.getChildAt(linearLayout2.getChildCount() - 6);
            if (prev instanceof TextDetailSecureCell) {
                ((TextDetailSecureCell) prev).setNeedDivider(true);
            }
            view = addField(getParentActivity(), requiredType, documentTypes, true, true);
            updateManageVisibility();
        } else {
            return;
        }
    }
    HashMap<String, String> values = typesValues.get(requiredType);
    HashMap<String, String> documentValues = documentRequiredType != null
            ? typesValues.get(documentRequiredType)
            : null;
    TLRPC.TL_secureValue requiredTypeValue = getValueByType(requiredType, true);
    TLRPC.TL_secureValue documentRequiredTypeValue = getValueByType(documentRequiredType, true);

    if (json != null && languageMap == null) {
        languageMap = new HashMap<>();
        try {
            BufferedReader reader = new BufferedReader(new InputStreamReader(
                    ApplicationLoader.applicationContext.getResources().getAssets().open("countries.txt")));
            String line;
            while ((line = reader.readLine()) != null) {
                String[] args = line.split(";");
                languageMap.put(args[1], args[2]);
            }
            reader.close();
        } catch (Exception e) {
            FileLog.e(e);
        }
    } else {
        languageMap = null;
    }

    String value = null;
    if (text != null) {
        if (requiredType.type instanceof TLRPC.TL_secureValueTypePhone) {
            value = PhoneFormat.getInstance().format("+" + text);
        } else if (requiredType.type instanceof TLRPC.TL_secureValueTypeEmail) {
            value = text;
        }
    } else {
        StringBuilder stringBuilder = null;
        if (currentActivityType != TYPE_MANAGE && documentRequiredType != null
                && (!TextUtils.isEmpty(documentsJson) || documentRequiredTypeValue != null)) {
            if (stringBuilder == null) {
                stringBuilder = new StringBuilder();
            }
            if (availableDocumentTypesCount > 1) {
                stringBuilder.append(getTextForType(documentRequiredType.type));
            } else if (TextUtils.isEmpty(documentsJson)) {
                stringBuilder
                        .append(LocaleController.getString("PassportDocuments", R.string.PassportDocuments));
            }
        }
        if (json != null || documentsJson != null) {
            if (values == null) {
                return;
            }
            values.clear();
            String keys[] = null;
            String documentKeys[] = null;
            if (requiredType.type instanceof TLRPC.TL_secureValueTypePersonalDetails) {
                if (currentActivityType == TYPE_REQUEST && !documentOnly
                        || currentActivityType == TYPE_MANAGE && documentRequiredType == null) {
                    keys = new String[] { "first_name", "middle_name", "last_name", "first_name_native",
                            "middle_name_native", "last_name_native", "birth_date", "gender", "country_code",
                            "residence_country_code" };
                }
                if (currentActivityType == TYPE_REQUEST
                        || currentActivityType == TYPE_MANAGE && documentRequiredType != null) {
                    documentKeys = new String[] { "document_no", "expiry_date" };
                }
            } else if (requiredType.type instanceof TLRPC.TL_secureValueTypeAddress) {
                if (currentActivityType == TYPE_REQUEST && !documentOnly
                        || currentActivityType == TYPE_MANAGE && documentRequiredType == null) {
                    keys = new String[] { "street_line1", "street_line2", "post_code", "city", "state",
                            "country_code" };
                }
            }
            if (keys != null || documentKeys != null) {
                try {
                    JSONObject jsonObject = null;
                    String currentKeys[] = null;
                    for (int b = 0; b < 2; b++) {
                        if (b == 0) {
                            if (json != null) {
                                jsonObject = new JSONObject(json);
                                currentKeys = keys;
                            }
                        } else {
                            if (documentValues == null) {
                                continue;
                            }
                            if (documentsJson != null) {
                                jsonObject = new JSONObject(documentsJson);
                                currentKeys = documentKeys;
                            }
                        }
                        if (currentKeys == null || jsonObject == null) {
                            continue;
                        }
                        try {
                            Iterator<String> iter = jsonObject.keys();
                            while (iter.hasNext()) {
                                String key = iter.next();
                                if (b == 0) {
                                    values.put(key, jsonObject.getString(key));
                                } else {
                                    documentValues.put(key, jsonObject.getString(key));
                                }
                            }
                        } catch (Throwable e) {
                            FileLog.e(e);
                        }

                        for (int a = 0; a < currentKeys.length; a++) {
                            if (jsonObject.has(currentKeys[a])) {
                                if (stringBuilder == null) {
                                    stringBuilder = new StringBuilder();
                                }
                                String jsonValue = jsonObject.getString(currentKeys[a]);
                                if (jsonValue != null) {
                                    if (!TextUtils.isEmpty(jsonValue)) {
                                        if ("first_name_native".equals(currentKeys[a])
                                                || "middle_name_native".equals(currentKeys[a])
                                                || "last_name_native".equals(currentKeys[a])) {
                                            continue;
                                        }
                                        if (stringBuilder.length() > 0) {
                                            if ("last_name".equals(currentKeys[a])
                                                    || "last_name_native".equals(currentKeys[a])
                                                    || "middle_name".equals(currentKeys[a])
                                                    || "middle_name_native".equals(currentKeys[a])) {
                                                stringBuilder.append(" ");
                                            } else {
                                                stringBuilder.append(", ");
                                            }
                                        }
                                        switch (currentKeys[a]) {
                                        case "country_code":
                                        case "residence_country_code":
                                            String country = languageMap.get(jsonValue);
                                            if (country != null) {
                                                stringBuilder.append(country);
                                            }
                                            break;
                                        case "gender":
                                            if ("male".equals(jsonValue)) {
                                                stringBuilder.append(LocaleController.getString("PassportMale",
                                                        R.string.PassportMale));
                                            } else if ("female".equals(jsonValue)) {
                                                stringBuilder.append(LocaleController
                                                        .getString("PassportFemale", R.string.PassportFemale));
                                            }
                                            break;
                                        default:
                                            stringBuilder.append(jsonValue);
                                            break;
                                        }
                                    }
                                }
                            }
                        }
                    }
                } catch (Exception ignore) {

                }
            }
        }
        if (stringBuilder != null) {
            value = stringBuilder.toString();
        }
    }

    boolean isError = false;
    HashMap<String, String> errors = !documentOnly ? errorsMap.get(getNameForType(requiredType.type)) : null;
    HashMap<String, String> documentsErrors = documentRequiredType != null
            ? errorsMap.get(getNameForType(documentRequiredType.type))
            : null;
    if (errors != null && errors.size() > 0 || documentsErrors != null && documentsErrors.size() > 0) {
        value = null;
        if (!documentOnly) {
            value = mainErrorsMap.get(getNameForType(requiredType.type));
        }
        if (value == null) {
            value = mainErrorsMap.get(getNameForType(documentRequiredType.type));
        }
        isError = true;
    } else {
        if (requiredType.type instanceof TLRPC.TL_secureValueTypePersonalDetails) {
            if (TextUtils.isEmpty(value)) {
                if (documentRequiredType == null) {
                    value = LocaleController.getString("PassportPersonalDetailsInfo",
                            R.string.PassportPersonalDetailsInfo);
                } else {
                    if (currentActivityType == TYPE_MANAGE) {
                        value = LocaleController.getString("PassportDocuments", R.string.PassportDocuments);
                    } else {
                        if (availableDocumentTypesCount == 1) {
                            if (documentRequiredType.type instanceof TLRPC.TL_secureValueTypePassport) {
                                value = LocaleController.getString("PassportIdentityPassport",
                                        R.string.PassportIdentityPassport);
                            } else if (documentRequiredType.type instanceof TLRPC.TL_secureValueTypeInternalPassport) {
                                value = LocaleController.getString("PassportIdentityInternalPassport",
                                        R.string.PassportIdentityInternalPassport);
                            } else if (documentRequiredType.type instanceof TLRPC.TL_secureValueTypeDriverLicense) {
                                value = LocaleController.getString("PassportIdentityDriverLicence",
                                        R.string.PassportIdentityDriverLicence);
                            } else if (documentRequiredType.type instanceof TLRPC.TL_secureValueTypeIdentityCard) {
                                value = LocaleController.getString("PassportIdentityID",
                                        R.string.PassportIdentityID);
                            }
                        } else {
                            value = LocaleController.getString("PassportIdentityDocumentInfo",
                                    R.string.PassportIdentityDocumentInfo);
                        }
                    }
                }
            }
        } else if (requiredType.type instanceof TLRPC.TL_secureValueTypeAddress) {
            if (TextUtils.isEmpty(value)) {
                if (documentRequiredType == null) {
                    value = LocaleController.getString("PassportAddressNoUploadInfo",
                            R.string.PassportAddressNoUploadInfo);
                } else {
                    if (currentActivityType == TYPE_MANAGE) {
                        value = LocaleController.getString("PassportDocuments", R.string.PassportDocuments);
                    } else {
                        if (availableDocumentTypesCount == 1) {
                            if (documentRequiredType.type instanceof TLRPC.TL_secureValueTypeRentalAgreement) {
                                value = LocaleController.getString("PassportAddAgreementInfo",
                                        R.string.PassportAddAgreementInfo);
                            } else if (documentRequiredType.type instanceof TLRPC.TL_secureValueTypeUtilityBill) {
                                value = LocaleController.getString("PassportAddBillInfo",
                                        R.string.PassportAddBillInfo);
                            } else if (documentRequiredType.type instanceof TLRPC.TL_secureValueTypePassportRegistration) {
                                value = LocaleController.getString("PassportAddPassportRegistrationInfo",
                                        R.string.PassportAddPassportRegistrationInfo);
                            } else if (documentRequiredType.type instanceof TLRPC.TL_secureValueTypeTemporaryRegistration) {
                                value = LocaleController.getString("PassportAddTemporaryRegistrationInfo",
                                        R.string.PassportAddTemporaryRegistrationInfo);
                            } else if (documentRequiredType.type instanceof TLRPC.TL_secureValueTypeBankStatement) {
                                value = LocaleController.getString("PassportAddBankInfo",
                                        R.string.PassportAddBankInfo);
                            }
                        } else {
                            value = LocaleController.getString("PassportAddressInfo",
                                    R.string.PassportAddressInfo);
                        }
                    }
                }
            }
        } else if (requiredType.type instanceof TLRPC.TL_secureValueTypePhone) {
            if (TextUtils.isEmpty(value)) {
                value = LocaleController.getString("PassportPhoneInfo", R.string.PassportPhoneInfo);
            }
        } else if (requiredType.type instanceof TLRPC.TL_secureValueTypeEmail) {
            if (TextUtils.isEmpty(value)) {
                value = LocaleController.getString("PassportEmailInfo", R.string.PassportEmailInfo);
            }
        }
    }
    view.setValue(value);
    view.valueTextView.setTextColor(Theme.getColor(
            isError ? Theme.key_windowBackgroundWhiteRedText3 : Theme.key_windowBackgroundWhiteGrayText2));
    view.setChecked(!isError && currentActivityType != TYPE_MANAGE
            && (documentOnly && documentRequiredType != null || !documentOnly && requiredTypeValue != null)
            && (documentRequiredType == null || documentRequiredTypeValue != null));
}

From source file:hu.sztaki.lpds.pgportal.portlets.workflow.EasyWorkflowPortlet.java

private Vector getEConfParam(String pUser, String pWorkflow, PortletSession ps) throws Exception {
    if (((Hashtable) ps.getAttribute("findwf")).containsKey(pWorkflow)) {//findinWF.get(pWorkflow) != null
        //System.out.println("during processing:"+pWorkflow);
        return new Vector();
    }//from w ww.j a  v a  2  s.  c  o m
    ((Hashtable) ps.getAttribute("findwf")).put(pWorkflow, "true");
    //System.out.println("WFs during processing:"+findinWF);
    Vector eparam = new Vector();//end user parameters
    Hashtable hsh = new Hashtable();
    hsh.put("url", PortalCacheService.getInstance().getUser(pUser).getWorkflow(pWorkflow).getWfsID());
    ServiceType st = InformationBase.getI().getService("wfs", "portal", hsh, new Vector());
    // try
    // {
    PortalWfsClient pc = (PortalWfsClient) Class.forName(st.getClientObject()).newInstance();
    pc.setServiceURL(st.getServiceUrl());
    pc.setServiceID(st.getServiceID());
    ComDataBean tmp = new ComDataBean();
    tmp.setPortalID(PropertyLoader.getInstance().getProperty("service.url"));
    tmp.setUserID(pUser);
    tmp.setWorkflowID(pWorkflow);
    Vector wfconfigdt = pc.getWorkflowConfigData(tmp);

    for (int i = 0; i < wfconfigdt.size(); i++) {
        // replace special characters...
        String jobtxt = new String(((JobPropertyBean) wfconfigdt.get(i)).getTxt());
        ((JobPropertyBean) wfconfigdt.get(i)).setTxt(replaceTextS(jobtxt));

        JobPropertyBean jobprop = (JobPropertyBean) wfconfigdt.get(i);

        String gridtype = "" + jobprop.getExe().get("gridtype");
        if (GEMLCA.equals("" + jobprop.getExe().get("gridtype"))) {//GLC lista frissites + hozzaferes ellenorzes
            Vector v = GemlcaCacheService.getInstance().getGLCList(
                    "" + PropertyLoader.getInstance().getProperty("portal.prefix.dir") + "/users/" + pUser
                            + "/x509up." + jobprop.getExe().get("grid"),
                    "" + jobprop.getExe().get("grid"), 0, 0);
        }

        // input
        for (int j = 0; j < ((JobPropertyBean) wfconfigdt.get(i)).getInputs().size(); j++) {
            PortDataBean ptmp = (PortDataBean) ((JobPropertyBean) wfconfigdt.get(i)).getInputs().get(j);
            // replace special characters...
            ptmp.setTxt(replaceTextS(ptmp.getTxt()));
            /*   System.out.println(j+" job input port getName "+ ptmp.getName());
            System.out.println(" job input port getLabel "+ ptmp.getLabel());//ha nem ures v. nem null
            System.out.println(" job input port getData "+ ptmp.getData());
            System.out.println(" job input port getDataDisabled "+ ptmp.getDataDisabled());
            System.out.println(" job input port getDesc "+ ptmp.getDesc());*/

            Set keys = ptmp.getLabel().keySet();
            Iterator it = keys.iterator();
            while (it.hasNext()) {
                String key = (String) it.next();
                String label = (String) ptmp.getLabel().get(key);
                String inh = (String) ptmp.getInherited().get(key);
                if (!(label.equals("") || label.equals("null")) && (inh.equals("---") || inh.equals("null"))) {
                    Hashtable ph = new Hashtable();
                    ph.put("wfID", pWorkflow);
                    ph.put("jobID", "" + ((JobPropertyBean) wfconfigdt.get(i)).getId());
                    ph.put("jobName", "" + ((JobPropertyBean) wfconfigdt.get(i)).getName());
                    ph.put("type", "iport");
                    ph.put("typeID", "" + ptmp.getId());
                    ph.put("name", key);
                    ph.put("value", "" + ptmp.getData().get(key));
                    ph.put("label", label);
                    ph.put("desc", "" + ptmp.getDesc().get(key));
                    ph.put("gridtype", gridtype);
                    if (key.equals("file")) {
                        try {
                            ph.put("storageID", PortalCacheService.getInstance().getUser(pUser)
                                    .getWorkflow(pWorkflow).getStorageID());
                            ph.put("userID", pUser);
                            ph.put("portalID", PropertyLoader.getInstance().getProperty("service.url"));
                            ph.put("inputID", "" + ptmp.getSeq());
                        } catch (Exception e) {
                            e.printStackTrace();
                        }
                    }
                    eparam.add(ph);
                }
            }

        }
        // outputs
        for (int jo = 0; jo < ((JobPropertyBean) wfconfigdt.get(i)).getOutputs().size(); jo++) {
            PortDataBean ptmpo = (PortDataBean) ((JobPropertyBean) wfconfigdt.get(i)).getOutputs().get(jo);
            // replace special characters...
            ptmpo.setTxt(replaceTextS(ptmpo.getTxt()));

            Set keys = ptmpo.getLabel().keySet();
            Iterator it = keys.iterator();
            while (it.hasNext()) {
                String key = (String) it.next();
                String label = (String) ptmpo.getLabel().get(key);
                String inh = (String) ptmpo.getInherited().get(key);
                if (!(label.equals("") || label.equals("null")) && (inh.equals("---") || inh.equals("null"))) {
                    Hashtable ph = new Hashtable();
                    ph.put("wfID", pWorkflow);
                    ph.put("jobID", "" + ((JobPropertyBean) wfconfigdt.get(i)).getId());
                    ph.put("jobName", "" + ((JobPropertyBean) wfconfigdt.get(i)).getName());
                    ph.put("type", "oport");
                    ph.put("typeID", "" + ptmpo.getId());
                    ph.put("name", key);
                    ph.put("value", "" + ptmpo.getData().get(key));
                    ph.put("label", label);
                    ph.put("desc", "" + ptmpo.getDesc().get(key));
                    ph.put("gridtype", gridtype);
                    eparam.add(ph);
                }
            }
        }
        //JobPropertyBean jobprop = (JobPropertyBean) wfconfigdt.get(i);
        /*   System.out.println(" job getName "+ jobprop.getName());
        System.out.println(" job getTxt "+ jobprop.getTxt());
        System.out.println(" job getDesc "+ jobprop.getDesc());
        System.out.println(" job getDesc0 "+ jobprop.getDesc0());
        System.out.println(" job getExe "+ jobprop.getExe());
        System.out.println(" job getExeDisabled "+ jobprop.getExeDisabled());
        System.out.println(" job getId "+ jobprop.getId());
        System.out.println(" job getInherited "+ jobprop.getInherited());
        System.out.println(" job getLabel "+ jobprop.getLabel());*/

        if (((String) jobprop.getExe().get("jobistype")).equals("workflow")) {//jobistype=workflow // ha beagyazott
            System.out.println(" ----embedded job start - getName:" + jobprop.getName());
            Vector ep = getEConfParam(pUser, (String) jobprop.getExe().get("iworkflow"), ps);
            if (ep != null) {
                for (int ie = 0; ie < ep.size(); ie++) {
                    eparam.add(ep.get(ie));
                }
            }
            System.out.println(" ----embedded job end  - job getName:" + jobprop.getName());
        } else {
            Set keys = jobprop.getLabel().keySet();
            Iterator it = keys.iterator();
            int isGridtype = -1;
            int isGrid = -1;
            int isResource = -1;
            int isjobmanager = -1;
            HashMap resconf = new HashMap();

            while (it.hasNext()) {
                String key = (String) it.next();
                String label = (String) jobprop.getLabel().get(key);
                String inh = (String) jobprop.getInherited().get(key);
                if (!(label.equals("") || label.equals("null")) && (inh.equals("---") || inh.equals("null"))) {
                    Hashtable ph = new Hashtable();
                    ph.put("wfID", pWorkflow);
                    ph.put("jobID", "" + ((JobPropertyBean) wfconfigdt.get(i)).getId());
                    ph.put("jobName", "" + ((JobPropertyBean) wfconfigdt.get(i)).getName());
                    ph.put("type", "exe");
                    ph.put("typeID", "" + jobprop.getId());
                    ph.put("name", key);
                    ph.put("value", "" + jobprop.getExe().get(key));
                    ph.put("label", label);
                    ph.put("desc", "" + jobprop.getDesc0().get(key));
                    ph.put("gridtype", gridtype);

                    if (key.equals("binary")) {
                        try {
                            ph.put("storageID", PortalCacheService.getInstance().getUser(pUser)
                                    .getWorkflow(pWorkflow).getStorageID());
                            ph.put("userID", pUser);
                            ph.put("portalID", PropertyLoader.getInstance().getProperty("service.url"));
                        } catch (Exception e) {
                            e.printStackTrace();
                        }
                    } else if (key.equals("gridtype")) {
                        if (!jobprop.getExe().get("gridtype").equals(EDGI)) {//filter out edgi config
                            Hashtable vGridTypes = new Hashtable();
                            Vector t = ConfigHandler.getGridMidlewares(
                                    (List<Middleware>) ps.getAttribute("resources", ps.APPLICATION_SCOPE));
                            for (int gt = 0; gt < t.size(); gt++) {
                                if (!(t.get(gt).equals(GEMLCA) || t.get(gt).equals(EDGI))) {//filter out GEMLCA and EDGI
                                    vGridTypes.put("" + t.get(gt), "0");
                                }
                            }
                            ph.put("data", vGridTypes);
                            resconf.put("gridtype", ph);
                        }
                        ph = null;
                    } else if (key.equals("grid")) {
                        try {
                            if (!jobprop.getExe().get("gridtype").equals(EDGI)) {//filter out edgi config
                                ph.put("data", ConfigHandler.getGroups(
                                        (List<Middleware>) ps.getAttribute("resources", ps.APPLICATION_SCOPE),
                                        (String) jobprop.getExe().get("gridtype")));
                                resconf.put("grid", ph);
                            }
                            ph = null;
                        } catch (Exception e) {
                            e.printStackTrace();
                        }
                    } else if (key.equals("resource")) {
                        try {
                            ph.put("data", ConfigHandler.getResources(
                                    (List<Middleware>) ps.getAttribute("resources", ps.APPLICATION_SCOPE),
                                    (String) jobprop.getExe().get("gridtype"),
                                    (String) jobprop.getExe().get("grid")));
                        } catch (Exception e0) {
                            ph.put("data", new Vector());
                        }

                        if (!jobprop.getExe().get("gridtype").equals(EDGI)) {//filter out edgi config
                            resconf.put("resource", ph);
                        }
                        ph = null;
                    } else if (key.equals("jobmanager")) {
                        if (jobprop.getExe().get("gridtype").equals(GEMLCA)) {
                            try {
                                //gemlcaquery
                                Vector v = GemlcaCacheService.getInstance().getGLCList(
                                        "" + PropertyLoader.getInstance().getProperty("portal.prefix.dir")
                                                + "/users/" + pUser + "/x509up." + jobprop.getExe().get("grid"),
                                        "" + jobprop.getExe().get("grid"), 0, 0);
                                //gemlca sites
                                Vector vgsites = GemlcaCacheService.getInstance().getGLCsites(
                                        "" + jobprop.getExe().get("grid"),
                                        "" + jobprop.getExe().get("resource"));
                                ph.put("data", vgsites);
                            } catch (Exception e0) {
                                ph.put("data", new Vector());
                            }
                        } else {
                            try {
                                if (SshResourceService.getI().isGroupforSshKey(ps,
                                        "" + jobprop.getExe().get("gridtype"))) {
                                    ph.put("data", SshResourceService.getI().getUserforHost(pUser,
                                            "" + jobprop.getExe().get("grid")));
                                } else {
                                    ph.put("data",
                                            ConfigHandler.getServices(
                                                    (List<Middleware>) ps.getAttribute("resources",
                                                            ps.APPLICATION_SCOPE),
                                                    (String) jobprop.getExe().get("gridtype"),
                                                    (String) jobprop.getExe().get("grid"),
                                                    (String) jobprop.getExe().get("resource")));
                                }
                            } catch (Exception e0) {
                                ph.put("data", new Vector());
                            }
                        }

                        if (!jobprop.getExe().get("gridtype").equals(EDGI)) {//filter out edgi config
                            resconf.put("jobmanager", ph);
                        }
                        ph = null;
                    } else if ((key.equals("params")) && (jobprop.getExe().get("gridtype").equals(GEMLCA))) {
                        // gemlca parameterek
                        Vector vgparams = GemlcaCacheService.getInstance().getGLCparams(
                                "" + jobprop.getExe().get("grid"), "" + jobprop.getExe().get("resource"));
                        String ggparams = "";
                        if (ph.get("value") == null) {// default values
                            for (int x = 0; x < vgparams.size(); x++) {
                                ((HashMap) vgparams.get(x)).put("svalue",
                                        "" + ((HashMap) vgparams.get(x)).get("value"));
                                ((HashMap) vgparams.get(x)).put("nbr", "" + x);
                                ggparams += ((HashMap) vgparams.get(x)).get("value") + " ";
                            }
                            ph.put("value", ggparams.trim());
                        } else {
                            String[] gsparams = ((String) ph.get("value")).split(" ");
                            if (gsparams.length == vgparams.size()) {
                                for (int x = 0; x < vgparams.size(); x++) {
                                    ((HashMap) vgparams.get(x)).put("svalue", "" + gsparams[x]);
                                    ((HashMap) vgparams.get(x)).put("nbr", "" + x);
                                }
                            } else {// default values
                                for (int x = 0; x < vgparams.size(); x++) {
                                    ((HashMap) vgparams.get(x)).put("svalue",
                                            "" + ((HashMap) vgparams.get(x)).get("value"));
                                    ((HashMap) vgparams.get(x)).put("nbr", "" + x);
                                    ggparams += ((HashMap) vgparams.get(x)).get("value") + " ";
                                }
                                ph.put("value", ggparams.trim());
                            }
                        }
                        ph.put("gparams", vgparams);
                    }

                    //filter out edgi config
                    if (jobprop.getExe().get("gridtype").equals(EDGI)) {
                    }

                    if (ph != null) {
                        eparam.add(ph);
                    }
                }
            }

            if (resconf.size() > 0) {// sort 
                if (resconf.containsKey("gridtype")) {
                    isGridtype = eparam.size();
                    eparam.add(resconf.get("gridtype"));
                }
                if (resconf.containsKey("grid")) {
                    isGrid = eparam.size();
                    eparam.add(resconf.get("grid"));
                }
                if (resconf.containsKey("resource")) {
                    ((Hashtable) resconf.get("resource")).put("grid", "" + jobprop.getExe().get("grid"));
                    isResource = eparam.size();
                    eparam.add(resconf.get("resource"));
                }
                if (resconf.containsKey("jobmanager")) {
                    ((Hashtable) resconf.get("jobmanager")).put("grid", "" + jobprop.getExe().get("grid"));
                    isjobmanager = eparam.size();
                    eparam.add(resconf.get("jobmanager"));
                }
                resconf.clear();
            }
            //dynamic management of grid, resource, jobmanager
            //i= next parameter                
            if (isGridtype > -1) {
                if (isGrid > -1) {
                    ((Hashtable) eparam.get(isGrid)).put("egridt", isGridtype);
                    ((Hashtable) eparam.get(isGridtype)).put("i", isGrid);
                }
                if (isResource > -1) {
                    ((Hashtable) eparam.get(isResource)).put("egridt", isGridtype);
                }
                if (isjobmanager > -1) {
                    ((Hashtable) eparam.get(isjobmanager)).put("egridt", isGridtype);
                }
            }
            if (isGrid > -1) {
                if (isResource > -1) {
                    ((Hashtable) eparam.get(isResource)).put("egrid", isGrid);
                    ((Hashtable) eparam.get(isGrid)).put("i", isResource);
                }
                if (isjobmanager > -1) {
                    ((Hashtable) eparam.get(isjobmanager)).put("egrid", isGrid);
                }
            }
            if (isResource > -1) {
                if (isjobmanager > -1) {
                    ((Hashtable) eparam.get(isjobmanager)).put("eresource", isResource);
                    ((Hashtable) eparam.get(isResource)).put("i", "" + isjobmanager);
                }
            }

        }
    }
    //}
    // catch(Exception e){e.printStackTrace();}        
    return eparam;
}

From source file:org.atomserver.core.AbstractAtomCollection.java

/**
 * {@inheritDoc}// w  ww.  ja v  a  2 s. co  m
 */
public java.util.Collection<UpdateCreateOrDeleteEntry> updateEntries(final RequestContext request)
        throws AtomServerException {

    Document<Feed> document;
    try {
        document = request.getDocument();
    } catch (IOException e) {
        throw new AtomServerException(e);
    }

    if (document.getRoot().getEntries().size() > getMaxFullEntriesPerPage()) {
        throw new BadRequestException(MessageFormat.format("too many entries ({0}) in batch - max is {1}",
                document.getRoot().getEntries().size(), getMaxFullEntriesPerPage()));
    }

    final List<EntryTarget> entriesToUpdate = new ArrayList<EntryTarget>();
    final List<EntryTarget> entriesToDelete = new ArrayList<EntryTarget>();
    final EntryMap<String> entryXmlMap = new EntryMap<String>();
    final Map<EntryTarget, Entry> entryMap = new HashMap<EntryTarget, Entry>();
    final HashMap<EntryTarget, Integer> orderMap = new HashMap<EntryTarget, Integer>();

    Operation defaultOperationExtension = document.getRoot().getExtension(AtomServerConstants.OPERATION);
    String defaultOperation = defaultOperationExtension == null ? "update"
            : defaultOperationExtension.getType();

    List<Entry> entries = document.getRoot().getEntries();

    UpdateCreateOrDeleteEntry[] updateEntries = new UpdateCreateOrDeleteEntry[entries.size()];
    Set<RelaxedEntryTarget> relaxedEntryTargetSet = new HashSet<RelaxedEntryTarget>();

    int order = 0;
    for (Entry entry : entries) {
        try {
            IRI baseIri = new IRI(getServiceBaseUri());
            IRI iri = baseIri.relativize(entry.getLink("edit").getHref());
            EntryTarget entryTarget = null;
            try {
                // The request is always as PUT, so we will get back a FeedTarget when we want an insert
                URITarget uriTarget = getURIHandler().parseIRI(request, iri);
                if (uriTarget instanceof FeedTarget) {
                    entryTarget = new EntryTarget((FeedTarget) uriTarget);

                    // determine if we are creating the entryId -- i.e. if this was a POST
                    if (getEntryIdGenerator() == null) {
                        throw new AtomServerException("No EntryIdGenerator was wired into the Collection ("
                                + entryTarget.toString() + ")");
                    } else {
                        entryTarget.setEntryId(getEntryIdGenerator().generateId());
                    }

                } else {
                    entryTarget = (EntryTarget) uriTarget;
                }
            } catch (Exception e) {
                throw new BadRequestException("Bad request URI: " + iri, e);
            }
            if (entryTarget == null) {
                throw new BadRequestException("Bad request URI: " + iri);
            }

            String collection = entryTarget.getCollection();
            ensureCollectionExists(collection);

            // Verify that we do not have multiple <operation> elements
            List<Operation> operationExtensions = entry.getExtensions(AtomServerConstants.OPERATION);

            if (operationExtensions != null && operationExtensions.size() > 1) {
                throw new BadRequestException("Multiple operations applied to one entry");
            }

            // Set to the default operation if none is set.
            String operation = operationExtensions == null || operationExtensions.isEmpty() ? defaultOperation
                    : operationExtensions.get(0).getType();
            if (log.isDebugEnabled()) {
                log.debug("operation : " + operation);
            }

            // We do not allow an Entry to occur twice in the batch.
            //   NOTE: the first one wins !!
            RelaxedEntryTarget relaxedEntryTarget = new RelaxedEntryTarget(entryTarget);
            if (relaxedEntryTargetSet.contains(relaxedEntryTarget)) {
                throw new BadRequestException(
                        "You may not include the same Entry twice (" + entryTarget + ").");
            } else {
                relaxedEntryTargetSet.add(relaxedEntryTarget);
            }

            entryMap.put(entryTarget, entry);

            // Add to the processing lists.
            if ("delete".equalsIgnoreCase(operation)) {
                entriesToDelete.add(entryTarget);
                orderMap.put(entryTarget, order);
            } else if ("update".equalsIgnoreCase(operation) || "insert".equalsIgnoreCase(operation)) {
                String entryXml = validateAndPreprocessEntryContents(entry, entryTarget);
                entriesToUpdate.add(entryTarget);
                entryXmlMap.put(entryTarget, entryXml);
                orderMap.put(entryTarget, order);
                setTargetContentHashCode(entryTarget, entry, entryXml);
            }

        } catch (AtomServerException e) {
            UpdateCreateOrDeleteEntry.CreateOrUpdateEntry updateEntry = new UpdateCreateOrDeleteEntry.CreateOrUpdateEntry(
                    entry, false);
            updateEntry.setException(e);
            updateEntries[order] = updateEntry;
        }
        order++;
    }

    // update entry count
    if (getEntriesMonitor() != null) {
        getEntriesMonitor().updateNumberOfEntriesToUpdate(entries.size());
    }
    Abdera abdera = request.getServiceContext().getAbdera();

    // ---------------- process updates ------------------
    if (!entriesToUpdate.isEmpty()) {
        java.util.Collection<BatchEntryResult> results = executeTransactionally(
                new TransactionalTask<java.util.Collection<BatchEntryResult>>() {
                    public Collection<BatchEntryResult> execute() {
                        java.util.Collection<BatchEntryResult> results = modifyEntries(request,
                                entriesToUpdate);
                        for (BatchEntryResult result : results) {
                            boolean categoriesUpdated = false;
                            if (result.getMetaData() != null) {
                                categoriesUpdated = postProcessEntryContents(
                                        entryXmlMap.get(result.getMetaData()), result.getMetaData());
                            }
                            if (!result.isModified() && !categoriesUpdated) {
                                // Same contents and categories
                                if (getEntriesMonitor() != null) {
                                    getEntriesMonitor().updateNumberOfEntriesNotUpdatedDueToSameContent(1);
                                }
                                continue;
                            }
                            // if contents is the same but the categories have changed,
                            // go back and update the entry so that it'll have a new revision and timestamp.
                            if (!result.isModified()) {
                                EntryMetaDataStatus mdStatus = reModifyEntry(null, result.getEntryTarget());
                                // update the result to indicate Entry has been modified.
                                result.setMetaData(mdStatus.getEntryMetaData());
                                result.setModified(true);
                            }

                            if (result.getException() == null) {
                                String entryXml = entryXmlMap.get(result.getEntryTarget());
                                getContentStorage().putContent(entryXml, result.getMetaData());
                            }
                            if (getEntriesMonitor() != null) {
                                getEntriesMonitor().updateNumberOfEntriesActuallyUpdated(1);
                            }
                        }
                        return results;
                    }
                });

        for (BatchEntryResult result : results) {
            EntryMetaData metaData = result.getMetaData();
            if (metaData == null) {
                EntryTarget target = result.getEntryTarget().cloneWithNewRevision(URIHandler.REVISION_OVERRIDE);
                try {
                    metaData = getEntry(target);
                } catch (AtomServerException e) {
                    metaData = null;
                }
            }
            Entry entry = metaData == null ? newEntryWithCommonContentOnly(abdera, result.getEntryTarget())
                    : newEntry(abdera, metaData, EntryType.full);

            entry.addSimpleExtension(AtomServerConstants.ENTRY_UPDATED,
                    (result.isModified()) ? "true" : "false");
            if (metaData != null && metaData.getContentHashCode() != null) {
                entry.addSimpleExtension(AtomServerConstants.CONTENT_HASH, metaData.getContentHashCode());
            }

            UpdateCreateOrDeleteEntry.CreateOrUpdateEntry updateEntry = new UpdateCreateOrDeleteEntry.CreateOrUpdateEntry(
                    entry, metaData != null && metaData.isNewlyCreated());
            if (result.getException() != null) {
                updateEntry.setException(result.getException());
            }

            Integer listOrder = orderMap.get(result.getEntryTarget());
            if (listOrder == null) {
                // This should never happen....
                String msg = "Could not map (" + result.getEntryTarget() + ") in Batch Order Map";
                log.error(msg);
                throw new AtomServerException(msg);
            }
            updateEntries[listOrder] = updateEntry;
        }
    }

    // ---------------- process deletes ------------------
    if (!entriesToDelete.isEmpty()) {
        java.util.Collection<BatchEntryResult> results = executeTransactionally(
                new TransactionalTask<Collection<BatchEntryResult>>() {
                    public Collection<BatchEntryResult> execute() {
                        java.util.Collection<BatchEntryResult> results = deleteEntries(request,
                                entriesToDelete);
                        for (BatchEntryResult result : results) {
                            if (result.getException() == null) {
                                EntryMetaData entryMetaDataClone = (EntryMetaData) (result.getMetaData()
                                        .clone());
                                int currentRevision = result.getMetaData().getRevision();
                                entryMetaDataClone.setRevision((currentRevision - 1));
                                String deletedEntryXml = createDeletedEntryXML(entryMetaDataClone);

                                getContentStorage().deleteContent(deletedEntryXml, result.getMetaData());
                            }
                        }
                        return results;
                    }
                });

        for (BatchEntryResult result : results) {
            // TODO: WRONG!
            EntryMetaData metaData = result.getMetaData();
            UpdateCreateOrDeleteEntry.DeleteEntry deleteEntry = null;
            if (metaData == null) {
                Factory factory = AtomServer.getFactory(abdera);

                Entry entry = factory.newEntry();
                String workspace = result.getEntryTarget().getWorkspace();
                String collection = result.getEntryTarget().getCollection();
                String entryId = result.getEntryTarget().getEntryId();
                Locale locale = result.getEntryTarget().getLocale();
                String fileURI = getURIHandler().constructURIString(workspace, collection, entryId, locale);
                setEntryId(factory, entry, fileURI);

                setEntryTitle(factory, entry,
                        isLocalized() ? (" Entry: " + collection + " " + entryId + "." + locale)
                                : (" Entry: " + collection + " " + entryId));

                addAuthorToEntry(factory, entry, "AtomServer APP Service");

                addLinkToEntry(factory, entry, fileURI, "self");

                String editURL = fileURI + "/" + (result.getEntryTarget().getRevision() + 1);
                addLinkToEntry(factory, entry, editURL, "edit");

                deleteEntry = new UpdateCreateOrDeleteEntry.DeleteEntry(entry);
            } else {
                deleteEntry = new UpdateCreateOrDeleteEntry.DeleteEntry(
                        newEntry(abdera, metaData, EntryType.full));
            }
            if (result.getException() != null) {
                deleteEntry.setException(result.getException());
            }

            Integer listOrder = orderMap.get(result.getEntryTarget());
            if (listOrder == null) {
                // This should never happen....
                String msg = "Could not map (" + result.getEntryTarget() + ") in Batch Order Map";
                log.error(msg);
                throw new AtomServerException(msg);
            }
            updateEntries[listOrder] = deleteEntry;
        }
    }

    // Clear the maps to help out the Garbage Collector
    entryXmlMap.clear();
    entriesToUpdate.clear();
    entriesToDelete.clear();
    orderMap.clear();
    relaxedEntryTargetSet.clear();

    return Arrays.asList(updateEntries);
}

From source file:edu.ku.brc.specify.conversion.AgentConverter.java

/**
 * Specify 5.x points at AgentAdress instead of an Agent. The idea was that to point at an Agent
 * and possibly a differnt address that represents what that person does. This was really
 * confusing so we are changing it to point at an Agent instead.
 * /*from  ww  w.j ava  2  s  .  co m*/
 * So that means we need to pull apart these relationships and have all foreign keys that point
 * to an AgentAddress now point at an Agent and we then need to add in the Agents and then add
 * the Address to the Agents.
 * 
 * The AgentAdress, Agent and Address (triple) can have a NULL Address but it cannot have a NULL
 * Agent. If there is a NULL Agent then this method will throw a RuntimeException.
 */
public boolean convertAgents(final boolean doFixAgents) {
    boolean debugAgents = false;

    log.debug("convert Agents");

    BasicSQLUtils.removeForeignKeyConstraints(newDBConn, BasicSQLUtils.myDestinationServerType);

    // Create the mappers here, but fill them in during the AgentAddress Process
    IdTableMapper agentIDMapper = idMapperMgr.addTableMapper("agent", "AgentID");
    IdTableMapper agentAddrIDMapper = idMapperMgr.addTableMapper("agentaddress", "AgentAddressID");

    agentIDMapper.setInitialIndex(4);

    if (shouldCreateMapTables) {
        log.info("Mapping Agent Ids");
        agentIDMapper.mapAllIds("SELECT AgentID FROM agent ORDER BY AgentID");
    }

    // Just like in the conversion of the CollectionObjects we
    // need to build up our own SELECT clause because the MetaData of columns names returned
    // FROM
    // a query doesn't include the table names for all columns, this is far more predictable
    List<String> oldFieldNames = new ArrayList<String>();

    StringBuilder agtAdrSQL = new StringBuilder("SELECT ");
    List<String> agentAddrFieldNames = getFieldNamesFromSchema(oldDBConn, "agentaddress");
    agtAdrSQL.append(buildSelectFieldList(agentAddrFieldNames, "agentaddress"));
    agtAdrSQL.append(", ");
    GenericDBConversion.addNamesWithTableName(oldFieldNames, agentAddrFieldNames, "agentaddress");

    List<String> agentFieldNames = getFieldNamesFromSchema(oldDBConn, "agent");
    agtAdrSQL.append(buildSelectFieldList(agentFieldNames, "agent"));
    log.debug("MAIN: " + agtAdrSQL);
    agtAdrSQL.append(", ");
    GenericDBConversion.addNamesWithTableName(oldFieldNames, agentFieldNames, "agent");

    List<String> addrFieldNames = getFieldNamesFromSchema(oldDBConn, "address");
    log.debug(agtAdrSQL);
    agtAdrSQL.append(buildSelectFieldList(addrFieldNames, "address"));
    GenericDBConversion.addNamesWithTableName(oldFieldNames, addrFieldNames, "address");

    // Create a Map FROM the full table/fieldname to the index in the resultset (start at 1 not zero)
    HashMap<String, Integer> indexFromNameMap = new HashMap<String, Integer>();

    agtAdrSQL.append(
            " FROM agent INNER JOIN agentaddress ON agentaddress.AgentID = agent.AgentID INNER JOIN address ON agentaddress.AddressID = address.AddressID Order By agentaddress.AgentAddressID Asc");

    // These represent the New columns of Agent Table
    // So the order of the names are for the new table
    // the names reference the old table
    String[] agentColumns = { "agent.AgentID", "agent.TimestampModified", "agent.AgentType",
            "agentaddress.JobTitle", "agent.FirstName", "agent.LastName", "agent.MiddleInitial", "agent.Title",
            "agent.Interests", "agent.Abbreviation", "agentaddress.Email", "agentaddress.URL", "agent.Remarks",
            "agent.TimestampCreated", // User/Security changes
            "agent.ParentOrganizationID" };

    HashMap<Integer, AddressInfo> addressHash = new HashMap<Integer, AddressInfo>();

    // Create a HashMap to track which IDs have been handled during the conversion process
    try {
        log.info("Hashing Address Ids");

        Integer agentCnt = BasicSQLUtils.getCount(oldDBConn,
                "SELECT COUNT(AddressID) FROM address ORDER BY AddressID");

        // So first we hash each AddressID and the value is set to 0 (false)
        Statement stmtX = oldDBConn.createStatement(ResultSet.TYPE_SCROLL_INSENSITIVE,
                ResultSet.CONCUR_READ_ONLY);
        ResultSet rsX = stmtX
                .executeQuery("SELECT AgentAddressID, AddressID FROM agentaddress ORDER BY AgentAddressID");

        conv.setProcess(0, agentCnt);

        int cnt = 0;
        // Needed to add in case AgentAddress table wasn't used.
        while (rsX.next()) {
            int agentAddrId = rsX.getInt(1);
            int addrId = rsX.getInt(2);
            addressHash.put(addrId, new AddressInfo(agentAddrId, addrId));

            if (cnt % 100 == 0) {
                conv.setProcess(0, cnt);
            }
            cnt++;
        }
        rsX.close();
        stmtX.close();

        conv.setProcess(0, 0);

        // Next we hash all the Agents and set their values to 0 (false)
        log.info("Hashing Agent Ids");
        stmtX = oldDBConn.createStatement(ResultSet.TYPE_SCROLL_INSENSITIVE, ResultSet.CONCUR_READ_ONLY);
        agentCnt = BasicSQLUtils.getCount(oldDBConn, "SELECT COUNT(*) FROM agent ORDER BY AgentID");
        rsX = stmtX.executeQuery(
                "SELECT AgentID, AgentType, LastName, Name, FirstName FROM agent ORDER BY AgentID");

        conv.setProcess(0, agentCnt);

        cnt = 0;
        while (rsX.next()) {
            int agentId = rsX.getInt(1);
            agentHash.put(agentId, new AgentInfo(agentId, agentIDMapper.get(agentId), rsX.getByte(2),
                    rsX.getString(3), rsX.getString(4), rsX.getString(5)));
            if (cnt % 100 == 0) {
                conv.setProcess(0, cnt);
            }
            cnt++;
        }

        rsX.close();
        stmtX.close();

        conv.setProcess(0, 0);

        // Now we map all the Agents to their Addresses AND
        // All the Addresses to their Agents.
        //
        // NOTE: A single Address Record May be used by more than one Agent so
        // we will need to Duplicate the Address records later
        //
        log.info("Cross Mapping Agents and Addresses");

        String post = " FROM agentaddress WHERE AddressID IS NOT NULL and AgentID IS NOT NULL";
        agentCnt = BasicSQLUtils.getCount(oldDBConn, "SELECT COUNT(AgentAddressID)" + post);

        stmtX = oldDBConn.createStatement(ResultSet.TYPE_SCROLL_INSENSITIVE, ResultSet.CONCUR_READ_ONLY);

        String asSQL = "SELECT AgentAddressID, AgentID" + post;
        log.debug(asSQL);
        rsX = stmtX.executeQuery(asSQL);

        conv.setProcess(0, agentCnt);
        cnt = 0;
        // Needed to add in case AgentAddress table wasn't used.
        while (rsX.next()) {
            int agentAddrId = rsX.getInt(1);
            int agentId = rsX.getInt(2);

            // ///////////////////////
            // Add Address to Agent
            // ///////////////////////
            AgentInfo agentInfo = agentHash.get(agentId);
            if (agentInfo == null) {
                String msg = "The AgentID [" + agentId + "] in AgentAddress table id[" + agentAddrId
                        + "] desn't exist";
                log.error(msg);
                tblWriter.logError(msg);
            } else {
                agentInfo.add(agentAddrId, agentAddrId);
            }

            if (cnt % 100 == 0) {
                conv.setProcess(0, cnt);
            }
            cnt++;
        }
        rsX.close();
        stmtX.close();

        //dumpInfo("beforeInfo.txt", addressHash);

        conv.setProcess(0, 0);

        // It OK if the address is NULL, but the Agent CANNOT be NULL
        log.info("Checking for null Agents");

        agentCnt = BasicSQLUtils.getCount(oldDBConn,
                "SELECT COUNT(AgentAddressID) FROM agentaddress a where AddressID IS NOT NULL and AgentID is null");
        // If there is a Single Record With a NULL Agent this would be BAD!
        if (agentCnt != null && agentCnt > 0) {
            showError("There are " + agentCnt
                    + " AgentAddress Records where the AgentID is null and the AddressId IS NOT NULL!");
        }

        // ////////////////////////////////////////////////////////////////////////////////
        // This does the part of AgentAddress where it has both an Address AND an Agent
        // ////////////////////////////////////////////////////////////////////////////////

        log.info(agtAdrSQL.toString());

        Statement stmt = oldDBConn.createStatement(ResultSet.TYPE_SCROLL_INSENSITIVE,
                ResultSet.CONCUR_READ_ONLY);

        log.debug("AgentAddress: " + agtAdrSQL.toString());

        // Create Map of column name to column index number
        int inx = 1;
        for (String fldName : oldFieldNames) {
            // log.info("["+fldName+"] "+inx+" ["+rsmd.getColumnName(inx)+"]");
            indexFromNameMap.put(fldName, inx++);
        }

        Statement updateStatement = newDBConn.createStatement();

        // Figure out certain column indexes we will need alter
        int agentIdInx = indexFromNameMap.get("agent.AgentID");
        int agentTypeInx = indexFromNameMap.get("agent.AgentType");
        int lastEditInx = indexFromNameMap.get("agent.LastEditedBy");
        int nameInx = indexFromNameMap.get("agent.Name");
        int lastNameInx = indexFromNameMap.get("agent.LastName");
        int firstNameInx = indexFromNameMap.get("agent.FirstName");

        int recordCnt = 0;
        ResultSet rs = stmt.executeQuery(agtAdrSQL.toString());
        while (rs.next()) {
            int agentAddressId = rs.getInt(1);
            int agentId = rs.getInt(agentIdInx);
            String lastEditedBy = rs.getString(lastEditInx);

            AgentInfo agentInfo = agentHash.get(agentId);

            // Deal with Agent FirstName, LastName and Name]
            String lastName = rs.getString(lastNameInx);
            String name = rs.getString(nameInx);

            namePair.second = StringUtils.isNotEmpty(name) && StringUtils.isEmpty(lastName) ? name : lastName;
            namePair.first = rs.getString(firstNameInx);

            // Now tell the AgentAddress Mapper the New ID to the Old AgentAddressID
            if (shouldCreateMapTables) {
                agentAddrIDMapper.setShowLogErrors(false);
                if (debugAgents)
                    log.info(String.format("Map - agentAddressId (Old) %d  to Agent -> New ID: %d",
                            agentAddressId, agentInfo.getNewAgentId()));

                if (agentAddrIDMapper.get(agentAddressId) == null) {
                    agentAddrIDMapper.put(agentAddressId, agentInfo.getNewAgentId());
                } else {
                    log.debug(String.format("ERROR - agentAddressId %d  Already mapped to  New ID:  %d",
                            agentAddressId, agentInfo.getNewAgentId()));
                }
                agentAddrIDMapper.setShowLogErrors(true);
            }

            // Because of the old DB relationships we want to make sure we only add each agent
            // in one time
            // So start by checking the HashMap to see if it has already been added
            if (!agentInfo.wasAdded()) {
                agentInfo.setWasAdded(true);
                //agentInfo.addWrittenAddrOldId(addrInfo.getOldAddrId());

                BasicSQLUtils.setIdentityInsertONCommandForSQLServer(newDBConn, "agent",
                        BasicSQLUtils.myDestinationServerType);

                // It has not been added yet so Add it
                StringBuilder sqlStr = new StringBuilder();
                sqlStr.append("INSERT INTO agent ");
                sqlStr.append(
                        "(AgentID, DivisionId, TimestampModified, AgentType, JobTitle, FirstName, LastName, MiddleInitial, ");
                sqlStr.append("Title, Interests, Abbreviation, Email, URL, Remarks, TimestampCreated, ");
                sqlStr.append("ParentOrganizationID, CreatedByAgentID, ModifiedByAgentID, Version)");
                sqlStr.append(" VALUES (");

                for (int i = 0; i < agentColumns.length; i++) {
                    if (i > 0) {
                        sqlStr.append(",");
                    }

                    if (i == 0) {
                        if (debugAgents)
                            log.info("Adding: " + agentColumns[i] + "  New ID: " + agentInfo.getNewAgentId());
                        sqlStr.append(agentInfo.getNewAgentId());
                        sqlStr.append(",");
                        sqlStr.append(conv.getCurDivisionID());

                    } else if (agentColumns[i].equals("agent.ParentOrganizationID")) {
                        Object obj = rs.getObject(indexFromNameMap.get(agentColumns[i]));
                        if (obj != null) {
                            int oldId = rs.getInt(agentColumns[i]);
                            Integer newID = agentIDMapper.get(oldId);
                            if (newID == null) {
                                log.error("Couldn't map ParentOrganizationID [" + oldId + "]");
                            }
                            sqlStr.append(BasicSQLUtils.getStrValue(newID));

                        } else {
                            sqlStr.append("NULL");
                        }

                    } else if (agentColumns[i].equals("agent.LastName") || agentColumns[i].equals("LastName")) {

                        int lastNameLen = 120;
                        String lstName = namePair.second;
                        lstName = lstName == null ? null
                                : lstName.length() <= lastNameLen ? lstName : lstName.substring(0, lastNameLen);
                        sqlStr.append(BasicSQLUtils.getStrValue(lstName));

                    } else if (agentColumns[i].equals("agent.FirstName")
                            || agentColumns[i].equals("FirstName")) {
                        sqlStr.append(BasicSQLUtils.getStrValue(namePair.first));

                    } else {
                        inx = indexFromNameMap.get(agentColumns[i]);
                        sqlStr.append(BasicSQLUtils.getStrValue(rs.getObject(inx)));
                    }
                }
                sqlStr.append("," + conv.getCreatorAgentIdForAgent(lastEditedBy) + ","
                        + conv.getModifiedByAgentIdForAgent(lastEditedBy) + ",0");
                sqlStr.append(")");

                try {
                    if (debugAgents) {
                        log.info(sqlStr.toString());
                    }
                    updateStatement.executeUpdate(sqlStr.toString(), Statement.RETURN_GENERATED_KEYS);

                    Integer newAgentId = BasicSQLUtils.getInsertedId(updateStatement);
                    if (newAgentId == null) {
                        throw new RuntimeException("Couldn't get the Agent's inserted ID");
                    }

                    //conv.addAgentDisciplineJoin(newAgentId, conv.getDisciplineId());

                } catch (SQLException e) {
                    log.error(sqlStr.toString());
                    log.error("Count: " + recordCnt);
                    e.printStackTrace();
                    log.error(e);
                    System.exit(0);
                    throw new RuntimeException(e);
                }

            }

            BasicSQLUtils.setIdentityInsertOFFCommandForSQLServer(newDBConn, "agent",
                    BasicSQLUtils.myDestinationServerType);

            if (recordCnt % 250 == 0) {
                log.info("AgentAddress Records: " + recordCnt);
            }
            recordCnt++;
        } // while

        BasicSQLUtils.setIdentityInsertOFFCommandForSQLServer(newDBConn, "address",
                BasicSQLUtils.myDestinationServerType);

        log.info("AgentAddress Records: " + recordCnt);
        rs.close();
        stmt.close();

        // ////////////////////////////////////////////////////////////////////////////////
        // This does the part of AgentAddress where it has JUST Agent
        // ////////////////////////////////////////////////////////////////////////////////
        log.info("******** Doing AgentAddress JUST Agent");

        int newRecordsAdded = 0;

        StringBuilder justAgentSQL = new StringBuilder();
        justAgentSQL.setLength(0);
        justAgentSQL.append("SELECT ");
        justAgentSQL.append(buildSelectFieldList(agentAddrFieldNames, "agentaddress"));
        justAgentSQL.append(", ");

        getFieldNamesFromSchema(oldDBConn, "agent", agentFieldNames);
        justAgentSQL.append(buildSelectFieldList(agentFieldNames, "agent"));

        justAgentSQL.append(
                " FROM agent INNER JOIN agentaddress ON agentaddress.AgentID = agent.AgentID WHERE agentaddress.AddressID IS NULL ORDER BY agentaddress.AgentAddressID ASC");

        log.info(justAgentSQL.toString());

        stmt = oldDBConn.createStatement(ResultSet.TYPE_SCROLL_INSENSITIVE, ResultSet.CONCUR_READ_ONLY);
        rs = stmt.executeQuery(justAgentSQL.toString());

        oldFieldNames.clear();
        GenericDBConversion.addNamesWithTableName(oldFieldNames, agentAddrFieldNames, "agentaddress");
        GenericDBConversion.addNamesWithTableName(oldFieldNames, agentFieldNames, "agent");

        indexFromNameMap.clear();
        inx = 1;
        for (String fldName : oldFieldNames) {
            indexFromNameMap.put(fldName, inx++);
        }

        agentIdInx = indexFromNameMap.get("agent.AgentID");
        lastEditInx = indexFromNameMap.get("agent.LastEditedBy");
        agentTypeInx = indexFromNameMap.get("agent.AgentType");

        recordCnt = 0;
        while (rs.next()) {
            byte agentType = rs.getByte(agentTypeInx);
            int agentAddressId = rs.getInt(1);
            int agentId = rs.getInt(agentIdInx);
            String lastEditedBy = rs.getString(lastEditInx);

            AgentInfo agentInfo = agentHash.get(agentId);

            // Now tell the AgentAddress Mapper the New ID to the Old AgentAddressID
            if (shouldCreateMapTables) {
                agentAddrIDMapper.put(agentAddressId, agentInfo.getNewAgentId());
            }

            recordCnt++;

            if (!agentInfo.wasAdded()) {
                agentInfo.setWasAdded(true);
                BasicSQLUtils.setIdentityInsertONCommandForSQLServer(newDBConn, "agent",
                        BasicSQLUtils.myDestinationServerType);

                // Create Agent
                StringBuilder sqlStr = new StringBuilder("INSERT INTO agent ");
                sqlStr.append(
                        "(AgentID, DivisionID, TimestampModified, AgentType, JobTitle, FirstName, LastName, MiddleInitial, Title, Interests, ");
                sqlStr.append("Abbreviation, Email, URL, Remarks, TimestampCreated, ParentOrganizationID, ");
                sqlStr.append("CreatedByAgentID, ModifiedByAgentID, Version)");
                sqlStr.append(" VALUES (");
                for (int i = 0; i < agentColumns.length; i++) {
                    if (i > 0)
                        sqlStr.append(",");

                    if (i == 0) {
                        if (debugAgents)
                            log.info(agentColumns[i]);
                        sqlStr.append(agentInfo.getNewAgentId());
                        sqlStr.append(",");
                        sqlStr.append(conv.getCurDivisionID());

                    } else if (i == lastEditInx) {
                        // Skip the field

                    } else if (agentColumns[i].equals("agent.LastName")) {
                        if (debugAgents)
                            log.info(agentColumns[i]);
                        int srcColInx = agentType != 1 ? nameInx : lastNameInx;
                        String lName = BasicSQLUtils.getStrValue(rs.getObject(srcColInx));
                        sqlStr.append(lName);

                    } else {
                        if (debugAgents)
                            log.info(agentColumns[i]);
                        inx = indexFromNameMap.get(agentColumns[i]);
                        sqlStr.append(BasicSQLUtils.getStrValue(rs.getObject(inx)));
                    }
                }
                sqlStr.append("," + conv.getCreatorAgentIdForAgent(lastEditedBy) + ","
                        + conv.getModifiedByAgentIdForAgent(lastEditedBy) + ", 0"); // '0' is Version
                sqlStr.append(")");

                try {
                    if (debugAgents) {
                        log.info(sqlStr.toString());
                    }
                    updateStatement.executeUpdate(sqlStr.toString(), Statement.RETURN_GENERATED_KEYS);

                    Integer newAgentId = BasicSQLUtils.getInsertedId(updateStatement);
                    if (newAgentId == null) {
                        throw new RuntimeException("Couldn't get the Agent's inserted ID");
                    }

                    newRecordsAdded++;

                } catch (SQLException e) {
                    log.error(sqlStr.toString());
                    log.error("Count: " + recordCnt);
                    e.printStackTrace();
                    log.error(e);
                    throw new RuntimeException(e);
                }

            }

            if (recordCnt % 250 == 0) {
                log.info("AgentAddress (Agent Only) Records: " + recordCnt);
            }
        } // while
        log.info("AgentAddress (Agent Only) Records: " + recordCnt + "  newRecordsAdded " + newRecordsAdded);

        rs.close();

        updateStatement.close();

        conv.setProcess(0, BasicSQLUtils.getNumRecords(oldDBConn, "agent"));
        conv.setDesc("Adding Agents");

        // Now Copy all the Agents that where part of an Agent Address Conversions
        stmt = oldDBConn.createStatement(ResultSet.TYPE_SCROLL_INSENSITIVE, ResultSet.CONCUR_READ_ONLY);
        rs = stmt.executeQuery("SELECT AgentID FROM agent");
        recordCnt = 0;
        while (rs.next()) {
            Integer agentId = rs.getInt(1);
            AgentInfo agentInfo = agentHash.get(agentId);
            if (agentInfo == null || !agentInfo.wasAdded()) {
                copyAgentFromOldToNew(agentId, agentIDMapper);
            }
            recordCnt++;
            if (recordCnt % 50 == 0) {
                conv.setProcess(recordCnt);
            }
        }

        conv.setProcess(recordCnt);
        BasicSQLUtils.setIdentityInsertOFFCommandForSQLServer(newDBConn, "agent",
                BasicSQLUtils.myDestinationServerType);

        //------------------------------------------------------------
        // Now Copy all the Agents that where missed
        //------------------------------------------------------------
        conv.setProcess(0);
        stmt = oldDBConn.createStatement(ResultSet.TYPE_SCROLL_INSENSITIVE, ResultSet.CONCUR_READ_ONLY);
        rs = stmt.executeQuery("SELECT AgentID FROM agent");
        recordCnt = 0;
        while (rs.next()) {
            Integer agentId = rs.getInt(1);
            Integer newId = agentIDMapper.get(agentId);
            if (newId != null) {
                Integer isThere = BasicSQLUtils.getCount(newDBConn,
                        "SELECT COUNT(*) FROM agent WHERE AgentID = " + newId);
                if (isThere == null || isThere == 0) {
                    copyAgentFromOldToNew(agentId, agentIDMapper);
                }
            } else {
                tblWriter.logError("Mapping missing for old Agent id[" + agentId + "]");
            }
            recordCnt++;
            if (recordCnt % 50 == 0) {
                conv.setProcess(recordCnt);
            }
        }
        conv.setProcess(recordCnt);

        if (doFixAgents) {
            fixAgentsLFirstLastName();
        }

        //----------------------------------------------------------------------------------------------------------------------------------
        // Now loop through the Agents hash and write the addresses. If the address has already been written then it will need to be 
        // duplicate in the second step.
        //----------------------------------------------------------------------------------------------------------------------------------
        StringBuilder sqlStr1 = new StringBuilder("INSERT INTO address ");
        sqlStr1.append(
                "(TimestampModified, Address, Address2, City, State, Country, PostalCode, Remarks, TimestampCreated, ");
        sqlStr1.append(
                "IsPrimary, IsCurrent, Phone1, Phone2, Fax, RoomOrBuilding, PositionHeld, AgentID, CreatedByAgentID, ModifiedByAgentID, Version, Ordinal)");
        sqlStr1.append(" VALUES (?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?)");

        PreparedStatement pStmt = newDBConn.prepareStatement(sqlStr1.toString(),
                Statement.RETURN_GENERATED_KEYS);

        //                               1                2         3        4        5           6            7              8                9          10      11           12                13            14            15
        String addrSQL = "SELECT a.TimestampModified, a.Address, a.City, a.State, a.Country, a.Postalcode, a.Remarks, a.TimestampCreated, aa.Phone1, aa.Phone2, aa.Fax, aa.RoomOrBuilding , aa.IsCurrent, a.LastEditedBy, aa.JobTitle "
                + "FROM address AS a "
                + "INNER JOIN agentaddress AS aa ON a.AddressID = aa.AddressID WHERE aa.AgentAddressID = %d";

        BasicSQLUtils.setIdentityInsertONCommandForSQLServer(newDBConn, "address",
                BasicSQLUtils.myDestinationServerType);

        int fixCnt = 0;
        for (AgentInfo agentInfo : agentHash.values()) {
            HashMap<Integer, Integer> addrs = agentInfo.getAddrs();

            for (Integer oldAgentAddrId : addrs.keySet()) {
                String adrSQL = String.format(addrSQL, oldAgentAddrId);
                rs = stmt.executeQuery(adrSQL);
                if (!rs.next()) {
                    rs.close();
                    continue;
                }

                String lastEditedBy = rs.getString(14);
                String posHeld = rs.getString(15);
                if (posHeld != null && posHeld.length() > 32) {
                    posHeld = posHeld.substring(0, 32);
                }

                String addr1 = rs.getString(2);
                String addr2 = null;
                if (addr1 != null && addr1.length() > 255) {
                    addr1 = addr1.substring(0, 255);
                    addr2 = addr1.substring(255);
                }

                pStmt.setTimestamp(1, rs.getTimestamp(1));
                pStmt.setString(2, addr1);
                pStmt.setString(3, addr2); // Address 2
                pStmt.setString(4, rs.getString(3));
                pStmt.setString(5, rs.getString(4));
                pStmt.setString(6, rs.getString(5));
                pStmt.setString(7, rs.getString(6));
                pStmt.setString(8, rs.getString(7));
                pStmt.setTimestamp(9, rs.getTimestamp(8));
                pStmt.setBoolean(10, rs.getByte(13) != 0);
                pStmt.setBoolean(11, rs.getByte(13) != 0);
                pStmt.setString(12, rs.getString(9));
                pStmt.setString(13, rs.getString(10));
                pStmt.setString(14, rs.getString(11));
                pStmt.setString(15, rs.getString(12));
                pStmt.setString(16, posHeld);
                pStmt.setInt(17, agentInfo.getNewAgentId());
                pStmt.setInt(18, conv.getCreatorAgentIdForAgent(lastEditedBy));
                pStmt.setInt(19, conv.getModifiedByAgentIdForAgent(lastEditedBy));
                pStmt.setInt(20, 0);

                pStmt.setInt(21, agentInfo.addrOrd);

                Integer newID = BasicSQLUtils.getInsertedId(pStmt);
                log.debug(String.format("Saved New Id %d", newID));

                //agentInfo.addWrittenAddrOldId(addrInfo.getOldAddrId());

                agentInfo.addrOrd++;

                rs.close();

                try {
                    if (debugAgents) {
                        log.info(sqlStr1.toString());
                    }

                    if (pStmt.executeUpdate() != 1) {
                        log.error("Error inserting address.)");
                    }
                    //addrInfo.setWasAdded(true);

                } catch (SQLException e) {
                    log.error(sqlStr1.toString());
                    log.error("Count: " + recordCnt);
                    e.printStackTrace();
                    log.error(e);
                    throw new RuntimeException(e);
                }
            }
        }
        log.info(String.format("Added %d new Addresses", fixCnt));

        pStmt.close();

        //------------------------------------------------------------------
        // Step #2 - Now duplicate the addresses for the agents that had 
        // already been written to the database
        //------------------------------------------------------------------

        /*fixCnt = 0;
        for (AgentInfo agentInfo : agentHash.values())
        {
        for (Integer oldAgentAddrId : agentInfo.getUnwrittenOldAddrIds())
        {
            Integer     oldAddrId = agentInfo.getAddrs().get(oldAgentAddrId);
            //AddressInfo addrInfo  = addressHash.get(oldAddrId);
            System.out.println(String.format("%d  %d", oldAgentAddrId, oldAddrId));
            //duplicateAddress(newDBConn, addrInfo.getOldAddrId(), addrInfo.getNewAddrId(), agentInfo.getNewAgentId());
        }
        }
        log.info(String.format("Duplicated %d new Addresses", fixCnt));
        */

        //----------------------------------------------------------------------------------------------------------------------------------
        // Now loop through the Agents hash and write the addresses. If the address has already been written then it will need to be 
        // duplicate in the second step.
        //----------------------------------------------------------------------------------------------------------------------------------
        /*BasicSQLUtils.setIdentityInsertONCommandForSQLServer(newDBConn, "address", BasicSQLUtils.myDestinationServerType);
                
        sqlStr1 = new StringBuilder("INSERT INTO address ");
        sqlStr1.append("(TimestampModified, Address, Address2, City, State, Country, PostalCode, Remarks, TimestampCreated, ");
        sqlStr1.append("IsPrimary, IsCurrent, Phone1, Phone2, Fax, RoomOrBuilding, AgentID, CreatedByAgentID, ModifiedByAgentID, Version, Ordinal)");
        sqlStr1.append(" VALUES (?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?)");
                
        pStmt = newDBConn.prepareStatement(sqlStr1.toString());
                
        //                               1                2         3        4        5           6            7              8                9          10      11           12                13            14                 15
        String addrOnlySQL = "SELECT aa.TimestampModified, a.Address, a.City, a.State, a.Country, a.Postalcode, a.Remarks, aa.TimestampCreated, aa.Phone1, aa.Phone2, aa.Fax, aa.RoomOrBuilding , aa.IsCurrent, a.LastEditedBy, aa.AgentID " +
                         "FROM agentaddress AS aa " +
                         "LEFT JOIN address AS a ON a.AddressID = aa.AddressID " +
                         "WHERE a.addressID IS NULL AND aa.AgentID IS NOT NULL";
                
        fixCnt = 0;
        rs = stmt.executeQuery(addrOnlySQL);
        while (rs.next())
        {
        int agentId    = rs.getInt(15);
        int newAgentId = agentIDMapper.get(agentId);
                
        String lastEditedBy = rs.getString(14);
                
        pStmt.setTimestamp(1, rs.getTimestamp(1));
        pStmt.setString(2,    rs.getString(2));
        pStmt.setString(3,    null);                 // Address 2
        pStmt.setString(4,    rs.getString(3));
        pStmt.setString(5,    rs.getString(4));
        pStmt.setString(6,    rs.getString(5));
        pStmt.setString(7,    rs.getString(6));
        pStmt.setString(8,    rs.getString(7));
        pStmt.setTimestamp(9, rs.getTimestamp(8));
        pStmt.setBoolean(10,  rs.getByte(13) != 0);
        pStmt.setBoolean(11,  rs.getByte(13) != 0);
        pStmt.setString(12,   rs.getString(9));
        pStmt.setString(13,   rs.getString(10));
        pStmt.setString(14,   rs.getString(11));
        pStmt.setString(15,   rs.getString(12));
        pStmt.setInt(16,      newAgentId);
        pStmt.setInt(17,      conv.getCreatorAgentIdForAgent(lastEditedBy));
        pStmt.setInt(18,      conv.getModifiedByAgentIdForAgent(lastEditedBy));
        pStmt.setInt(19,      0);
        pStmt.setInt(20,      1);
                
        try
        {
            if (debugAgents)
            {
                log.info(sqlStr1.toString());
            }
                    
            if (pStmt.executeUpdate() != 1)
            {
                log.error("Error inserting address.)");
            } else
            {
                fixCnt++;
            }
                
        } catch (SQLException e)
        {
            log.error(sqlStr1.toString());
            log.error("Count: " + recordCnt);
            e.printStackTrace();
            log.error(e);
            throw new RuntimeException(e);
        }
        }
        rs.close();
        log.info(String.format("Added %d new Addresses", fixCnt));
                
        pStmt.close();*/

        stmt.close();

        //dumpInfo("afterInfo.txt", addressHash);

        BasicSQLUtils.setIdentityInsertOFFCommandForSQLServer(newDBConn, "agent",
                BasicSQLUtils.myDestinationServerType);

        return true;

    } catch (SQLException ex) {
        log.error(ex);
        ex.printStackTrace();
        System.exit(0);
        throw new RuntimeException(ex);
    }
}

From source file:edu.mit.mobile.android.locast.sync.SyncEngine.java

/**
 * @param toSync// w w w  .j a v a 2s .  c  o m
 * @param account
 * @param extras
 * @param provider
 * @param syncResult
 * @return true if the item was sync'd successfully. Soft errors will cause this to return
 *         false.
 * @throws RemoteException
 * @throws SyncException
 * @throws JSONException
 * @throws IOException
 * @throws NetworkProtocolException
 * @throws NoPublicPath
 * @throws OperationApplicationException
 * @throws InterruptedException
 */
public boolean sync(Uri toSync, Account account, Bundle extras, ContentProviderClient provider,
        SyncResult syncResult) throws RemoteException, SyncException, JSONException, IOException,
        NetworkProtocolException, NoPublicPath, OperationApplicationException, InterruptedException {

    String pubPath = null;

    //
    // Handle http or https uris separately. These require the
    // destination uri.
    //
    if ("http".equals(toSync.getScheme()) || "https".equals(toSync.getScheme())) {
        pubPath = toSync.toString();

        if (!extras.containsKey(EXTRA_DESTINATION_URI)) {
            throw new IllegalArgumentException("missing EXTRA_DESTINATION_URI when syncing HTTP URIs");
        }
        toSync = Uri.parse(extras.getString(EXTRA_DESTINATION_URI));
    }

    final String type = provider.getType(toSync);
    final boolean isDir = type.startsWith(CONTENT_TYPE_PREFIX_DIR);

    final boolean manualSync = extras.getBoolean(ContentResolver.SYNC_EXTRAS_MANUAL, false);

    // skip any items already sync'd
    if (!manualSync && mLastUpdated.isUpdatedRecently(toSync)) {
        if (DEBUG) {
            Log.d(TAG, "not syncing " + toSync + " as it's been updated recently");
        }
        syncResult.stats.numSkippedEntries++;
        return false;
    }

    // the sync map will convert the json data to ContentValues
    final SyncMap syncMap = MediaProvider.getSyncMap(provider, toSync);

    final Uri toSyncWithoutQuerystring = toSync.buildUpon().query(null).build();

    final HashMap<String, SyncStatus> syncStatuses = new HashMap<String, SyncEngine.SyncStatus>();
    final ArrayList<ContentProviderOperation> cpo = new ArrayList<ContentProviderOperation>();
    final LinkedList<String> cpoPubUris = new LinkedList<String>();

    //
    // first things first, upload any content that needs to be
    // uploaded.
    //

    try {
        uploadUnpublished(toSync, account, provider, syncMap, syncStatuses, syncResult);

        if (Thread.interrupted()) {
            throw new InterruptedException();
        }

        // this should ensure that all items have a pubPath when we
        // query it below.

        if (pubPath == null) {
            // we should avoid calling this too much as it
            // can be expensive
            pubPath = MediaProvider.getPublicPath(mContext, toSync);
        }
    } catch (final NoPublicPath e) {
        // TODO this is a special case and this is probably not the best place to handle this.
        // Ideally, this should be done in such a way as to reduce any extra DB queries -
        // perhaps by doing a join with the parent.
        if (syncMap.isFlagSet(SyncMap.FLAG_PARENT_MUST_SYNC_FIRST)) {
            if (DEBUG) {
                Log.d(TAG, "skipping " + toSync + " whose parent hasn't been sync'd first");
            }
            syncResult.stats.numSkippedEntries++;
            return false;
        }

        // if it's an item, we can handle it.
        if (isDir) {
            throw e;
        }
    }

    if (pubPath == null) {

        // this should have been updated already by the initial
        // upload, so something must be wrong
        throw new SyncException("never got a public path for " + toSync);
    }

    if (DEBUG) {
        Log.d(TAG, "sync(toSync=" + toSync + ", account=" + account + ", extras=" + extras + ", manualSync="
                + manualSync + ",...)");
        Log.d(TAG, "pubPath: " + pubPath);
    }

    final long request_time = System.currentTimeMillis();

    HttpResponse hr = mNetworkClient.get(pubPath);

    final long response_time = System.currentTimeMillis();

    // the time compensation below allows a time-based synchronization to function even if the
    // local clock is entirely wrong. The server's time is extracted using the Date header and
    // all are compared relative to the respective clock reference. Any data that's stored on
    // the mobile should be stored relative to the local clock and the server will respect the
    // same.
    long serverTime;

    try {
        serverTime = getServerTime(hr);
    } catch (final DateParseException e) {
        Log.w(TAG, "could not retrieve date from server. Using local time, which may be incorrect.", e);
        serverTime = System.currentTimeMillis();
    }

    // TODO check out
    // http://www.w3.org/Protocols/rfc2616/rfc2616-sec13.html
    final long response_delay = response_time - request_time;
    if (DEBUG) {
        Log.d(TAG, "request took " + response_delay + "ms");
    }
    final long localTime = request_time;

    // add this to the server time to get the local time
    final long localOffset = (localTime - serverTime);

    if (Math.abs(localOffset) > 30 * 60 * 1000) {
        Log.w(TAG, "local clock is off by " + localOffset + "ms");
    }

    if (Thread.interrupted()) {
        throw new InterruptedException();
    }

    final HttpEntity ent = hr.getEntity();

    String selection;
    String selectionInverse;
    String[] selectionArgs;

    if (isDir) {

        final JSONArray ja = new JSONArray(StreamUtils.inputStreamToString(ent.getContent()));
        ent.consumeContent();

        final int len = ja.length();
        selectionArgs = new String[len];

        // build the query to see which items are already in the
        // database
        final StringBuilder sb = new StringBuilder();

        sb.append("(");

        for (int i = 0; i < len; i++) {
            if (Thread.interrupted()) {
                throw new InterruptedException();
            }

            final SyncStatus syncStatus = loadItemFromJsonObject(ja.getJSONObject(i), syncMap, serverTime);

            syncStatuses.put(syncStatus.remote, syncStatus);

            selectionArgs[i] = syncStatus.remote;

            // add in a placeholder for the query
            sb.append('?');
            if (i != (len - 1)) {
                sb.append(',');
            }

        }
        sb.append(")");

        final String placeholders = sb.toString();
        selection = JsonSyncableItem._PUBLIC_URI + " IN " + placeholders;
        selectionInverse = JsonSyncableItem._PUBLIC_URI + " NOT IN " + placeholders;
    } else {

        final JSONObject jo = new JSONObject(StreamUtils.inputStreamToString(ent.getContent()));
        ent.consumeContent();
        final SyncStatus syncStatus = loadItemFromJsonObject(jo, syncMap, serverTime);

        syncStatuses.put(syncStatus.remote, syncStatus);

        selection = JsonSyncableItem._PUBLIC_URI + "=?";
        selectionInverse = JsonSyncableItem._PUBLIC_URI + "!=?";
        selectionArgs = new String[] { syncStatus.remote };
    }

    // first check without the querystring. This will ensure that we
    // properly mark things that we already have in the database.
    final Cursor check = provider.query(toSyncWithoutQuerystring, SYNC_PROJECTION, selection, selectionArgs,
            null);

    // these items are on both sides
    try {
        final int pubUriCol = check.getColumnIndex(JsonSyncableItem._PUBLIC_URI);
        final int idCol = check.getColumnIndex(JsonSyncableItem._ID);

        // All the items in this cursor should be found on both
        // the client and the server.
        for (check.moveToFirst(); !check.isAfterLast(); check.moveToNext()) {
            if (Thread.interrupted()) {
                throw new InterruptedException();
            }

            final long id = check.getLong(idCol);
            final Uri localUri = ContentUris.withAppendedId(toSync, id);

            final String pubUri = check.getString(pubUriCol);

            final SyncStatus itemStatus = syncStatuses.get(pubUri);

            itemStatus.state = SyncState.BOTH_UNKNOWN;

            itemStatus.local = localUri;

            // make the status searchable by both remote and
            // local uri
            syncStatuses.put(localUri.toString(), itemStatus);
        }
    } finally {
        check.close();
    }

    Cursor c = provider.query(toSync, SYNC_PROJECTION, selection, selectionArgs, null);

    // these items are on both sides
    try {
        final int pubUriCol = c.getColumnIndex(JsonSyncableItem._PUBLIC_URI);
        final int localModifiedCol = c.getColumnIndex(JsonSyncableItem._MODIFIED_DATE);
        final int serverModifiedCol = c.getColumnIndex(JsonSyncableItem._SERVER_MODIFIED_DATE);
        final int idCol = c.getColumnIndex(JsonSyncableItem._ID);

        // All the items in this cursor should be found on both
        // the client and the server.
        for (c.moveToFirst(); !c.isAfterLast(); c.moveToNext()) {
            if (Thread.interrupted()) {
                throw new InterruptedException();
            }

            final long id = c.getLong(idCol);
            final Uri localUri = ContentUris.withAppendedId(toSync, id);

            final String pubUri = c.getString(pubUriCol);

            final SyncStatus itemStatus = syncStatuses.get(pubUri);

            if (itemStatus.state == SyncState.ALREADY_UP_TO_DATE
                    || itemStatus.state == SyncState.NOW_UP_TO_DATE) {
                if (DEBUG) {
                    Log.d(TAG, localUri + "(" + pubUri + ")" + " is already up to date.");
                }
                continue;
            }

            itemStatus.local = localUri;

            // make the status searchable by both remote and local uri
            syncStatuses.put(localUri.toString(), itemStatus);

            // last modified as stored in the DB, in phone time
            final long itemLocalModified = c.getLong(localModifiedCol);

            // last modified as stored in the DB, in server time
            final long itemServerModified = c.getLong(serverModifiedCol);
            final long localAge = localTime - itemLocalModified;

            final long remoteAge = serverTime - itemStatus.remoteModifiedTime;

            final long ageDifference = Math.abs(localAge - remoteAge);

            // up to date, as far remote -> local goes
            if (itemServerModified == itemStatus.remoteModifiedTime) {
                itemStatus.state = SyncState.ALREADY_UP_TO_DATE;
                if (DEBUG) {
                    Log.d(TAG, pubUri + " is up to date.");
                }

                // need to download
            } else if (localAge > remoteAge) {
                if (DEBUG) {
                    final long serverModified = itemStatus.remoteModifiedTime;

                    Log.d(TAG,
                            pubUri + " : local is " + ageDifference + "ms older ("
                                    + android.text.format.DateUtils.formatDateTime(mContext, itemLocalModified,
                                            FORMAT_ARGS_DEBUG)
                                    + ") than remote (" + android.text.format.DateUtils.formatDateTime(mContext,
                                            serverModified, FORMAT_ARGS_DEBUG)
                                    + "); updating local copy...");
                }

                itemStatus.state = SyncState.REMOTE_DIRTY;

                final ContentProviderOperation.Builder b = ContentProviderOperation.newUpdate(localUri);

                // update this so it's in the local timescale
                correctServerOffset(itemStatus.remoteCVs, JsonSyncableItem._CREATED_DATE,
                        JsonSyncableItem._CREATED_DATE, localOffset);
                correctServerOffset(itemStatus.remoteCVs, JsonSyncableItem._SERVER_MODIFIED_DATE,
                        JsonSyncableItem._MODIFIED_DATE, localOffset);

                b.withValues(itemStatus.remoteCVs);
                b.withExpectedCount(1);

                cpo.add(b.build());
                cpoPubUris.add(pubUri);

                syncResult.stats.numUpdates++;

                // need to upload
            } else if (localAge < remoteAge) {
                if (DEBUG) {
                    final long serverModified = itemStatus.remoteModifiedTime;

                    Log.d(TAG,
                            pubUri + " : local is " + ageDifference + "ms newer ("
                                    + android.text.format.DateUtils.formatDateTime(mContext, itemLocalModified,
                                            FORMAT_ARGS_DEBUG)
                                    + ") than remote (" + android.text.format.DateUtils.formatDateTime(mContext,
                                            serverModified, FORMAT_ARGS_DEBUG)
                                    + "); publishing to server...");
                }
                itemStatus.state = SyncState.LOCAL_DIRTY;

                mNetworkClient.putJson(pubPath, JsonSyncableItem.toJSON(mContext, localUri, c, syncMap));
            }

            mLastUpdated.markUpdated(localUri);

            syncResult.stats.numEntries++;
        } // end for
    } finally {

        c.close();
    }

    /*
     * Apply updates in bulk
     */
    if (cpo.size() > 0) {
        if (DEBUG) {
            Log.d(TAG, "applying " + cpo.size() + " bulk updates...");
        }

        final ContentProviderResult[] r = provider.applyBatch(cpo);
        if (DEBUG) {
            Log.d(TAG, "Done applying updates. Running postSync handler...");
        }

        for (int i = 0; i < r.length; i++) {
            final ContentProviderResult res = r[i];
            final SyncStatus ss = syncStatuses.get(cpoPubUris.get(i));
            if (ss == null) {
                Log.e(TAG, "can't get sync status for " + res.uri);
                continue;
            }
            syncMap.onPostSyncItem(mContext, account, ss.local, ss.remoteJson,
                    res.count != null ? res.count == 1 : true);

            ss.state = SyncState.NOW_UP_TO_DATE;
        }

        if (DEBUG) {
            Log.d(TAG, "done running postSync handler.");
        }

        cpo.clear();
        cpoPubUris.clear();
    }

    if (Thread.interrupted()) {
        throw new InterruptedException();
    }

    /*
     * Look through the SyncState.state values and find ones that need to be stored.
     */

    for (final Map.Entry<String, SyncStatus> entry : syncStatuses.entrySet()) {
        if (Thread.interrupted()) {
            throw new InterruptedException();
        }

        final String pubUri = entry.getKey();
        final SyncStatus status = entry.getValue();
        if (status.state == SyncState.REMOTE_ONLY) {
            if (DEBUG) {
                Log.d(TAG, pubUri + " is not yet stored locally, adding...");
            }

            // update this so it's in the local timescale
            correctServerOffset(status.remoteCVs, JsonSyncableItem._CREATED_DATE,
                    JsonSyncableItem._CREATED_DATE, localOffset);
            correctServerOffset(status.remoteCVs, JsonSyncableItem._SERVER_MODIFIED_DATE,
                    JsonSyncableItem._MODIFIED_DATE, localOffset);

            final ContentProviderOperation.Builder b = ContentProviderOperation.newInsert(toSync);
            b.withValues(status.remoteCVs);

            cpo.add(b.build());
            cpoPubUris.add(pubUri);
            syncResult.stats.numInserts++;

        }
    }

    /*
     * Execute the content provider operations in bulk.
     */
    if (cpo.size() > 0) {
        if (DEBUG) {
            Log.d(TAG, "bulk inserting " + cpo.size() + " items...");
        }
        final ContentProviderResult[] r = provider.applyBatch(cpo);
        if (DEBUG) {
            Log.d(TAG, "applyBatch completed. Processing results...");
        }

        int successful = 0;
        for (int i = 0; i < r.length; i++) {
            final ContentProviderResult res = r[i];
            if (res.uri == null) {
                syncResult.stats.numSkippedEntries++;
                Log.e(TAG, "result from content provider bulk operation returned null");
                continue;
            }
            final String pubUri = cpoPubUris.get(i);
            final SyncStatus ss = syncStatuses.get(pubUri);

            if (ss == null) {
                syncResult.stats.numSkippedEntries++;
                Log.e(TAG, "could not find sync status for " + cpoPubUris.get(i));
                continue;
            }

            ss.local = res.uri;
            if (DEBUG) {
                Log.d(TAG, "onPostSyncItem(" + res.uri + ", ...); pubUri: " + pubUri);
            }

            syncMap.onPostSyncItem(mContext, account, res.uri, ss.remoteJson,
                    res.count != null ? res.count == 1 : true);

            ss.state = SyncState.NOW_UP_TO_DATE;
            successful++;
        }
        if (DEBUG) {
            Log.d(TAG, successful + " batch inserts successfully applied.");
        }
    } else {
        if (DEBUG) {
            Log.d(TAG, "no updates to perform.");
        }
    }

    /**
     * Look through all the items that we didn't already find on the server side, but which
     * still have a public uri. They should be checked to make sure they're not deleted.
     */
    c = provider.query(toSync, SYNC_PROJECTION,
            ProviderUtils.addExtraWhere(selectionInverse, JsonSyncableItem._PUBLIC_URI + " NOT NULL"),
            selectionArgs, null);

    try {
        final int idCol = c.getColumnIndex(JsonSyncableItem._ID);
        final int pubUriCol = c.getColumnIndex(JsonSyncableItem._PUBLIC_URI);

        cpo.clear();

        for (c.moveToFirst(); !c.isAfterLast(); c.moveToNext()) {
            final String pubUri = c.getString(pubUriCol);
            SyncStatus ss = syncStatuses.get(pubUri);

            final Uri item = isDir ? ContentUris.withAppendedId(toSyncWithoutQuerystring, c.getLong(idCol))
                    : toSync;

            if (ss == null) {
                ss = syncStatuses.get(item.toString());
            }

            if (DEBUG) {
                Log.d(TAG, item + " was not found in the main list of items on the server (" + pubPath
                        + "), but appears to be a child of " + toSync);

                if (ss != null) {
                    Log.d(TAG, "found sync status for " + item + ": " + ss);
                }
            }

            if (ss != null) {
                switch (ss.state) {
                case ALREADY_UP_TO_DATE:
                case NOW_UP_TO_DATE:
                    if (DEBUG) {
                        Log.d(TAG, item + " is already up to date. No need to see if it was deleted.");
                    }
                    continue;

                case BOTH_UNKNOWN:
                    if (DEBUG) {
                        Log.d(TAG,
                                item + " was found on both sides, but has an unknown sync status. Skipping...");
                    }
                    continue;

                default:

                    Log.w(TAG, "got an unexpected state for " + item + ": " + ss);
                }

            } else {
                ss = new SyncStatus(pubUri, SyncState.LOCAL_ONLY);
                ss.local = item;

                hr = mNetworkClient.head(pubUri);

                switch (hr.getStatusLine().getStatusCode()) {
                case 200:
                    if (DEBUG) {
                        Log.d(TAG, "HEAD " + pubUri + " returned 200");
                    }
                    ss.state = SyncState.BOTH_UNKNOWN;
                    break;

                case 404:
                    if (DEBUG) {
                        Log.d(TAG, "HEAD " + pubUri + " returned 404. Deleting locally...");
                    }
                    ss.state = SyncState.DELETED_REMOTELY;
                    final ContentProviderOperation deleteOp = ContentProviderOperation
                            .newDelete(ContentUris.withAppendedId(toSyncWithoutQuerystring, c.getLong(idCol)))
                            .build();
                    cpo.add(deleteOp);

                    break;

                default:
                    syncResult.stats.numIoExceptions++;
                    Log.w(TAG, "HEAD " + pubUri + " got unhandled result: " + hr.getStatusLine());
                }
            }
            syncStatuses.put(pubUri, ss);
        } // for cursor

        if (cpo.size() > 0) {
            final ContentProviderResult[] results = provider.applyBatch(cpo);

            for (final ContentProviderResult result : results) {
                if (result.count != 1) {
                    throw new SyncException("Error deleting item");
                }
            }
        }

    } finally {
        c.close();
    }

    syncStatuses.clear();

    mLastUpdated.markUpdated(toSync);

    return true;
}