Example usage for org.hibernate Session clear

List of usage examples for org.hibernate Session clear

Introduction

In this page you can find the example usage for org.hibernate Session clear.

Prototype

void clear();

Source Link

Document

Completely clear the session.

Usage

From source file:com.job5156.jsDateJoin.event.InputCompanyData.java

public static void saveDate(Session Localsession, List list) {
    Object[] obj = new Object[list.size()];

    for (int j = 0; j < list.size(); j++) {
        obj[j] = list.get(j);/*from   w  ww .  j  a v  a2  s  . co  m*/
    }

    EntityManager.batchSave(obj, Localsession);
    Localsession.flush();
    Localsession.clear();
    obj = null;
}

From source file:com.job5156.jsDateJoin.event.InputCompanyTO29.java

public static void runCompanyDate(Session localsession, Session session29) {
    String hqlCount = "SELECT COUNT(u.id) FROM unit u order by u.id";
    String hql = "SELECT u FROM unit u order by u.id";

    int allRecord = EntityManager.getAllEntityNumberByHql(hqlCount, localsession);
    int dataLoadNumber = 2000;
    int allPage = allRecord > 0 ? 1 : 0;
    if (allRecord > dataLoadNumber) {
        allPage = allRecord / dataLoadNumber + ((allRecord % dataLoadNumber == 0) ? 0 : 1);
    }/*  w w w .  ja  v a2 s . c  o m*/

    for (int m = 5; m < allPage; m++) {
        System.out.println(" " + m + "");
        //List listCBI = new ArrayList();// ComBaseInfo  List
        int k = 0;
        List list = EntityManager.getEntityByHqlAndStartRecords(hql, localsession, m * dataLoadNumber,
                dataLoadNumber);
        if (list != null && list.size() > 0) {
            for (int i = 0; i < list.size(); i++) {
                //
                unit u = (unit) list.get(i);
                String name = StringUtil.getNotNullStr(u.getName());
                boolean flag = isTrue(name, 1, session29);

                if (flag) {
                    try {
                        // ComBaseInfo 
                        System.out.println(" " + i + "");
                        ComBaseInfo cbi = getCBI(u);
                        EntityManager.saveEntity(cbi, session29);
                        int comID = StringUtil.parseInt(cbi.getId());

                        // ComUserInfo 
                        ComUserInfo cui = getCUI(u, comID, session29);
                        EntityManager.saveEntity(cui, session29);
                        k++;
                        if (k > 100) {
                            session29.clear();
                            k = 0;
                        }
                    } catch (Exception e) {
                        e.printStackTrace();
                        log.error(" " + m * dataLoadNumber + i + "");
                    }

                }

                try {
                    Thread.sleep(500);
                    System.out.println("");
                } catch (InterruptedException e) {
                    // TODO Auto-generated catch block
                    System.out.println(" ");
                    e.printStackTrace();
                }
            }
        }
    }
}

From source file:com.job5156.jsDateJoin.event.InputCompanyTO29.java

public static void saveDate(Session session29, List list) {
    Object[] obj = new Object[list.size()];

    for (int j = 0; j < list.size(); j++) {
        obj[j] = list.get(j);//w w  w . j  a va  2s .  com
    }

    EntityManager.batchSave(obj, session29);
    session29.flush();
    session29.clear();
    obj = null;
}

From source file:com.job5156.jsDateJoin.event.sqlTomysql.java

public static void run(Session localsession) {
    Connection con = null;/*w w w  . j av  a  2s . com*/
    try {
        //
        Class.forName("com.microsoft.sqlserver.jdbc.SQLServerDriver");// 
        con = DriverManager.getConnection("jdbc:sqlserver://218.94.11.46:1433;DatabaseName=rc", "njzt",
                "njztnjztnjzt");

        //         Class.forName("com.microsoft.sqlserver.jdbc.SQLServerDriver");// 
        //         con = DriverManager.getConnection(
        //               "jdbc:sqlserver://CHITONE-486:1433;DatabaseName=test11",
        //               "sa", "123");

        int allRecord = getRecordCount(con);
        int dataLoadNumber = 2000;
        int allPage = allRecord > 0 ? 1 : 0;
        if (allRecord > dataLoadNumber) {
            allPage = allRecord / dataLoadNumber + ((allRecord % dataLoadNumber == 0) ? 0 : 1);
        }
        //allPage = 1;

        for (int m = 49; m < allPage; m++) {
            System.out.println("sqlToMysql " + m + "");
            List list = getResumeData(localsession, con, dataLoadNumber, m * dataLoadNumber);
            if (list != null && list.size() > 0) {
                //
                saveDate(localsession, list);
            }
            list.clear();
            localsession.flush();
            localsession.clear();
        }
    } catch (Exception e) {
        e.printStackTrace();
        System.out.println("");
    }
}

From source file:com.job5156.jsDateJoin.event.sqlTomysql.java

public static void saveDate(Session localsession, List list) {
    if (list != null && list.size() > 0) {
        Object[] obj = new Object[list.size()];

        for (int j = 0; j < list.size(); j++) {
            obj[j] = list.get(j);//from  www .j av  a 2s.co m
        }

        EntityManager.batchSave(obj, localsession);
        localsession.flush();
        localsession.clear();
        obj = null;
    }

}

From source file:com.knowbout.epg.processor.ScheduleParser.java

License:Apache License

public void parseSchedule(InputStream stream, Configuration config) throws IOException {
    this.config = config;
    loadLineups();// ww w.  j  a v  a2s  . c o  m
    processChannels();
    processNetworks();
    Date firstSchedule = processSchedules(stream);
    TransactionManager.beginTransaction();
    log.info("About to delete from NetworkSchedule.");
    NetworkSchedule.deleteAfter(firstSchedule);
    log.info("Done deleting from NetworkSchedule. Now deleting from Schedule");
    int deleted = Schedule.deleteAfter(firstSchedule);
    log.info("Deleted schedules after " + firstSchedule + ", " + deleted + " total schedules.");
    int count = 0;
    int newSchedules = 0;
    List<NetworkLineup> lineups = NetworkLineup.selectSearchableLineups();
    for (ChannelSchedule schedule : programSchedules.values()) {
        //Now if it is a split channel (east & west coast) then dump any show that does not show on both
        schedule.removeSingleProgramming();
        int airings = schedule.getValidAirings().size();
        if (airings > 0) {
            newSchedules += airings;
            createSchedule(schedule, lineups, false);
        }
        if (++count % 50 == 0) {
            Session session = HibernateUtil.currentSession();
            session.flush();
            session.clear();
        }
        if (count % 1000 == 0) {
            log.debug("Processed " + count + " programs for a total of " + newSchedules + " schedules");
        }
    }
    log.info("Added " + newSchedules + " new schedules (note this these may be on one or more lineups)");
    log.info("Processing special SDTW-C lineup to preserve local data for cc data processing");
    Session session = HibernateUtil.currentSession();
    NetworkLineup networkLineup = (NetworkLineup) session.get(NetworkLineup.class, SAN_DIEGO_TW_CABLE);
    if (networkLineup != null) {
        count = 0;
        List<NetworkLineup> lineupList = new ArrayList<NetworkLineup>();
        lineupList.add(networkLineup);
        newSchedules = 0;
        for (ChannelSchedule schedule : programSchedules.values()) {
            int airings = schedule.getPacificAirings().size();
            airings += schedule.getSingleStationAirings().size();
            if (airings > 0) {
                createSchedule(schedule, lineupList, true);
                newSchedules += airings;
            }
            if (++count % 50 == 0) {
                session.flush();
                session.clear();
            }
            if (count % 1000 == 0) {
                log.debug("Processed " + count + " programs for a total of " + newSchedules + " schedules");
            }
        }
    }
    TransactionManager.commitTransaction();
    log.info("Finished processing special SDTW-C lineup to preserve local data for cc data processing");
}

From source file:com.krawler.dao.BaseDAO.java

License:Open Source License

/**
 * Executes an update query using the provided hql, query parameters and
 * paging parameters/*from  ww  w . j  av  a 2 s.co  m*/
 * 
 * @param hql
 *            Query to execute
 * @param params
 *            Query parameters
 * @param pagingParam
 *            paging parameters
 * @return List
 */
public int executeUpdatePaging(final String hql, final Object[] params, final Integer[] pagingParam) {
    int numRow = 0;
    numRow = (Integer) getHibernateTemplate().execute(new HibernateCallback() {

        public Object doInHibernate(Session session) {
            int numRows = 0;
            Query query = session.createQuery(hql);
            if (params != null) {
                for (int i = 0; i < params.length; i++) {
                    query.setParameter(i, params[i]);
                }
            }
            query.setFirstResult(pagingParam[0]);
            query.setMaxResults(pagingParam[1]);
            numRows = query.executeUpdate();
            session.flush();
            session.clear();
            return numRows;
        }
    });

    return numRow;
}

From source file:com.krawler.spring.importFunctionality.ImportUtil.java

License:Open Source License

/**
 * @param requestParams// w w  w .j  a  va  2  s . co m
 * @param txnManager
 * @param kwlCommonTablesDAOObj
 * @param importDao
 * @return
 */
public static JSONObject validateFileData(HashMap<String, Object> requestParams,
        HibernateTransactionManager txnManager, kwlCommonTablesDAO kwlCommonTablesDAOObj, ImportDAO importDao,
        fieldManagerDAO fieldManagerDAOobj) {
    JSONObject jobj = new JSONObject();
    String msg = "";
    boolean issuccess = true;

    DefaultTransactionDefinition def = new DefaultTransactionDefinition();
    def.setName("import_Tx");
    def.setPropagationBehavior(TransactionDefinition.PROPAGATION_REQUIRED);
    TransactionStatus status = txnManager.getTransaction(def);
    boolean commitedEx = false;

    int total = 0, failed = 0, fileSize = 0;
    String fileName = "", extn = "";
    Modules module = null;
    String exceededLimit = "no", channelName = "";
    try {
        String companyid = (String) requestParams.get("companyid");
        String mode = (String) requestParams.get("modName");
        fileName = (String) requestParams.get("filename");
        extn = fileName.substring(fileName.lastIndexOf(".") + 1);
        channelName = "/ValidateFile/" + fileName;

        Object extraObj = requestParams.get("extraObj");
        JSONObject extraParams = (JSONObject) requestParams.get("extraParams");

        String jsondata = (String) requestParams.get("resjson");
        JSONObject rootcsvjobj = new JSONObject(jsondata);
        JSONArray mapping = rootcsvjobj.getJSONArray("root");

        String dateFormat = null, dateFormatId = (String) requestParams.get("dateFormat");
        if (extn.equalsIgnoreCase("csv") && !StringUtil.isNullOrEmpty(dateFormatId)) {
            KWLDateFormat kdf = (KWLDateFormat) kwlCommonTablesDAOObj
                    .getClassObject(KWLDateFormat.class.getName(), dateFormatId);
            dateFormat = kdf != null ? kdf.getJavaForm() : null;
        }

        String classPath = "", primaryKey = "", uniqueKeyMethodName = "", uniqueKeyHbmName = "";
        try {
            List list = importDao.getModuleObject(mode);
            module = (Modules) list.get(0); //Will throw null pointer if no module entry found
        } catch (Exception ex) {
            throw new DataInvalidateException("Column config not available for module " + mode);
        }

        try {
            classPath = module.getPojoClassPathFull().toString();
            primaryKey = module.getPrimaryKey_MethodName().toString();
        } catch (Exception ex) {
            throw new DataInvalidateException("Please set proper properties for module " + mode);
        }
        uniqueKeyMethodName = module.getUniqueKey_MethodName();
        uniqueKeyHbmName = module.getUniqueKey_HbmName();

        JSONArray columnConfig = getModuleColumnConfig1(module.getId(), companyid, fieldManagerDAOobj,
                module.getModuleName(), false);
        String tableName = importDao.getTableName(fileName);
        KwlReturnObject kresult = importDao.getFileData(tableName, new HashMap<String, Object>());
        List fileDataList = kresult.getEntityList();
        Iterator itr = fileDataList.iterator();

        importDao.markRecordValidation(tableName, -1, 1, "", ""); //reset all invalidation
        JSONArray recordJArr = new JSONArray(), columnsJArr = new JSONArray(), DataJArr = new JSONArray();
        if (itr.hasNext()) { //
            Object[] fileData = (Object[]) itr.next();
            JSONObject jtemp = new JSONObject();
            jtemp.put("header", "Row No.");
            jtemp.put("dataIndex", "col0");
            jtemp.put("width", 50);
            columnsJArr.put(jtemp);

            for (int i = 1; i < fileData.length - 3; i++) { //Discard columns, id at index 0 and isvalid,validationlog at last 2.
                jtemp = new JSONObject();
                jtemp.put("header", fileData[i] == null ? "" : fileData[i].toString());
                jtemp.put("dataIndex", "col" + i);
                columnsJArr.put(jtemp);
            }

            jtemp = new JSONObject();
            jtemp.put("header", "Validation Log");
            //                jtemp.put("hidden", true);
            jtemp.put("dataIndex", "validateLog");
            columnsJArr.put(jtemp);

            //Create record Obj for grid's store
            for (int i = 0; i < fileData.length - 1; i++) {
                jtemp = new JSONObject();
                jtemp.put("name", "col" + i);
                recordJArr.put(jtemp);
            }
            jtemp = new JSONObject();
            jtemp.put("name", "validateLog");
            recordJArr.put(jtemp);
        }

        try {
            jobj.put("record", recordJArr);
            jobj.put("columns", columnsJArr);
            jobj.put("data", DataJArr);
            jobj.put("count", failed);
            jobj.put("valid", 0);
            jobj.put("totalrecords", total);
            jobj.put("isHeader", true);
            jobj.put("finishedValidation", false);
            ServerEventManager.publish(channelName, jobj.toString(),
                    (ServletContext) requestParams.get("servletContext"));
        } catch (Exception ex) {
            throw ex;
        }

        fileSize = fileDataList.size() - 1;
        fileSize = fileSize >= IMPORT_LIMIT ? IMPORT_LIMIT : fileSize; // fileSize used for showing progress bar[Client Side]

        jobj.put("isHeader", false);
        int recIndex = 0;
        Session session = txnManager.getSessionFactory().getCurrentSession();
        int batchCounter = 0;
        while (itr.hasNext()) {
            Object[] fileData = (Object[]) itr.next();
            tempFileData = null;
            tempFileData = fileData;
            recIndex = (Integer) fileData[0];
            HashMap<String, Object> dataMap = new HashMap<String, Object>();
            HashMap<String, Object> columnHeaderMap = new HashMap<String, Object>();
            HashMap<String, Object> columnCSVindexMap = new HashMap<String, Object>();
            JSONArray customfield = new JSONArray();
            for (int k = 0; k < mapping.length(); k++) {
                JSONObject mappingJson = mapping.getJSONObject(k);
                String datakey = mappingJson.getString("columnname");
                Object dataValue = cleanHTML((String) fileData[mappingJson.getInt("csvindex") + 1]); //+1 for id column at index-0
                dataMap.put(datakey, dataValue);
                columnHeaderMap.put(datakey, mappingJson.getString("csvheader"));
                columnCSVindexMap.put(datakey, mappingJson.getInt("csvindex") + 1);
            }

            for (int j = 0; j < extraParams.length(); j++) {
                String datakey = (String) extraParams.names().get(j);
                Object dataValue = extraParams.get(datakey);
                dataMap.put(datakey, dataValue);
            }

            try {
                if (total >= IMPORT_LIMIT) {
                    exceededLimit = "yes";
                    break;
                }
                //Update processing status at client side
                if (total > 0 && total % 10 == 0) {
                    try {
                        ServerEventManager.publish(channelName,
                                "{parsedCount:" + total + ",invalidCount:" + failed + ", fileSize:" + fileSize
                                        + ", finishedValidation:false}",
                                (ServletContext) requestParams.get("servletContext"));
                    } catch (Exception ex) {
                        throw ex;
                    }
                }

                //                    CheckUniqueRecord(requestParams, dataMap, classPath, uniqueKeyMethodName, uniqueKeyHbmName);
                validateDataMAP2(requestParams, dataMap, columnConfig, customfield, columnHeaderMap,
                        columnCSVindexMap, dateFormat, importDao, new HashMap<String, String>());
            } catch (Exception ex) {
                failed++;
                String errorMsg = ex.getMessage(), invalidColumns = "";
                try {
                    JSONObject errorLog = new JSONObject(errorMsg);
                    errorMsg = errorLog.getString("errorMsg");
                    invalidColumns = errorLog.getString("invalidColumns");
                } catch (JSONException jex) {
                }

                importDao.markRecordValidation(tableName, recIndex, 0, errorMsg, invalidColumns);
                if (batchCounter % 30 == 0) {
                    session.flush();
                    session.clear();
                }
                batchCounter++;
                JSONObject jtemp = new JSONObject();
                if (tempFileData != null) {
                    for (int i = 0; i < tempFileData.length - 2; i++) {
                        jtemp.put("col" + i, tempFileData[i] == null ? "" : tempFileData[i].toString());
                    }
                } else {
                    for (int i = 0; i < fileData.length - 2; i++) {
                        jtemp.put("col" + i, fileData[i] == null ? "" : fileData[i].toString());
                    }
                }
                jtemp.put("invalidcolumns", invalidColumns);
                jtemp.put("validateLog", errorMsg);
                DataJArr.put(jtemp);

                //                    try {
                //                        jtemp.put("count", failed);
                //                        jtemp.put("totalrecords", total+1);
                //                        jtemp.put("fileSize", fileSize);
                //                        jtemp.put("finishedValidation", false);
                //                        ServerEventManager.publish(channelName, jtemp.toString(), (ServletContext) requestParams.get("servletContext"));
                //                    } catch(Exception dex) {
                //                        throw dex;
                //                    }
            }
            total++;
        }

        int success = total - failed;
        if (total == 0) {
            msg = "Empty file.";
        } else if (success == 0) {
            msg = "All the records are invalid.";
        } else if (success == total) {
            msg = "All the records are valid.";
        } else {
            msg = "" + success + " valid record" + (success > 1 ? "s" : "") + "";
            msg += (failed == 0 ? "." : " and " + failed + " invalid record" + (failed > 1 ? "s" : "") + ".");
        }

        jobj.put("record", recordJArr);
        jobj.put("columns", columnsJArr);
        jobj.put("data", DataJArr);
        jobj.put("count", failed);
        jobj.put("valid", success);
        jobj.put("totalrecords", total);

        try {
            ServerEventManager.publish(channelName,
                    "{parsedCount:" + total + ",invalidCount:" + failed + ", fileSize:" + fileSize
                            + ", finishedValidation:true}",
                    (ServletContext) requestParams.get("servletContext"));
        } catch (Exception ex) {
            throw ex;
        }

        try {
            txnManager.commit(status);
        } catch (Exception ex) {
            commitedEx = true;
            throw ex;
        }
    } catch (Exception e) {
        if (!commitedEx) { //if exception occurs during commit then dont call rollback
            txnManager.rollback(status);
        }
        issuccess = false;
        msg = "" + e.getMessage();
        Logger.getLogger(ImportHandler.class.getName()).log(Level.SEVERE, null, e);
    } finally {
        try {
            jobj.put("success", issuccess);
            jobj.put("msg", msg);
            jobj.put("exceededLimit", exceededLimit);
        } catch (JSONException ex) {
            Logger.getLogger(ImportHandler.class.getName()).log(Level.SEVERE, null, ex);
        }
    }
    return jobj;
}

From source file:com.krawler.spring.importFunctionality.ImportUtil.java

License:Open Source License

/**
 * @param requestParams//from  www  .  j a  v a  2  s  . co  m
 * @param txnManager
 * @param KwlCommonTablesDAOObj
 * @param importDao
 * @param fieldManagerDAOobj
 * @return
 */
public static JSONObject importFileData(HashMap<String, Object> requestParams,
        HibernateTransactionManager txnManager, kwlCommonTablesDAO KwlCommonTablesDAOObj, ImportDAO importDao,
        fieldManagerDAO fieldManagerDAOobj) {

    JSONObject jobj = new JSONObject();
    String msg = "";
    boolean issuccess = true;

    DefaultTransactionDefinition def = new DefaultTransactionDefinition();
    def.setName("import_Tx");
    def.setPropagationBehavior(TransactionDefinition.PROPAGATION_REQUIRED);
    TransactionStatus status = txnManager.getTransaction(def);
    boolean commitedEx = false;

    int total = 0, failed = 0;
    String fileName = "", tableName = "", extn = "";
    Modules module = null;

    try {
        String moduleID = "";
        String mode = (String) requestParams.get("modName");
        fileName = (String) requestParams.get("filename");
        String companyid = (String) requestParams.get("companyid");
        extn = fileName.substring(fileName.lastIndexOf(".") + 1);
        StringBuilder failedRecords = new StringBuilder();

        String dateFormat = null, dateFormatId = (String) requestParams.get("dateFormat");
        if (extn.equalsIgnoreCase("csv") && !StringUtil.isNullOrEmpty(dateFormatId)) {
            KWLDateFormat kdf = (KWLDateFormat) KwlCommonTablesDAOObj
                    .getClassObject(KWLDateFormat.class.getName(), dateFormatId);
            dateFormat = kdf != null ? kdf.getJavaForm() : null;
        }

        Object extraObj = requestParams.get("extraObj");
        JSONObject extraParams = (JSONObject) requestParams.get("extraParams");

        String jsondata = (String) requestParams.get("resjson");
        JSONObject rootcsvjobj = new JSONObject(jsondata);
        JSONArray mapping = rootcsvjobj.getJSONArray("root");

        String classPath = "", primaryKey = "", uniqueKeyMethodName = "", uniqueKeyHbmName = "";
        try {
            List list = importDao.getModuleObject(mode);
            module = (Modules) list.get(0); //Will throw null pointer if no module entry found
        } catch (Exception ex) {
            throw new DataInvalidateException("Column config not available for module " + mode);
        }

        try {
            classPath = module.getPojoClassPathFull().toString();
            primaryKey = module.getPrimaryKey_MethodName().toString();
            moduleID = module.getId();
        } catch (Exception ex) {
            throw new DataInvalidateException("Please set proper properties for module " + mode);
        }
        uniqueKeyMethodName = module.getUniqueKey_MethodName();
        uniqueKeyHbmName = module.getUniqueKey_HbmName();

        JSONArray columnConfig = getModuleColumnConfig1(module.getId(), companyid, fieldManagerDAOobj,
                module.getModuleName(), true);
        tableName = importDao.getTableName(fileName);
        HashMap<String, Object> filterParams = new HashMap<String, Object>();
        //            filterParams.put("isvalid", 1); //To fetch valid records
        KwlReturnObject kresult = importDao.getFileData(tableName, filterParams); //Fetch all valid records
        List fileDataList = kresult.getEntityList();
        Iterator itr = fileDataList.iterator();
        if (itr.hasNext()) {
            Object[] fileData = (Object[]) itr.next(); //Skip header row
            failedRecords.append(createCSVrecord(fileData) + "\"Error Message\"");//failedRecords.append("\"Row No.\","+createCSVrecord(fileData)+"\"Error Message\"");
        }
        int recIndex = 0;
        importDao.markRecordValidation(tableName, -1, 1, "", ""); //reset all invalidation
        int batchCounter = 0;
        Session session = txnManager.getSessionFactory().getCurrentSession();

        /*-Auto no custom column changes*/
        String customdataclasspath = "";
        int intModuleId = 0;
        if (moduleID.equals(Constants.MODULEID_LEAD)) {
            intModuleId = Constants.Crm_lead_moduleid;
            customdataclasspath = Constants.Crm_lead_custom_data_classpath;
        } else if (moduleID.equals(Constants.MODULEID_ACCOUNT)) {
            intModuleId = Constants.Crm_account_moduleid;
            customdataclasspath = Constants.Crm_account_custom_data_classpath;
        } else if (moduleID.equals(Constants.MODULEID_CONTACT)) {
            intModuleId = Constants.Crm_contact_moduleid;
            customdataclasspath = Constants.Crm_contact_custom_data_classpath;
        } else if (moduleID.equals(Constants.MODULEID_OPPORTUNITY)) {
            intModuleId = Constants.Crm_opportunity_moduleid;
            customdataclasspath = Constants.Crm_opportunity_custom_data_classpath;
        } else if (moduleID.equals(Constants.MODULEID_CASE)) {
            intModuleId = Constants.Crm_case_moduleid;
            customdataclasspath = Constants.Crm_case_custom_data_classpath;
        } else if (moduleID.equals(Constants.MODULEID_PRODUCT)) {
            intModuleId = Constants.Crm_product_moduleid;
            customdataclasspath = Constants.Crm_product_custom_data_classpath;
        }
        List autoNoFieldName = new ArrayList();
        HashMap<String, String> autoNoMap = new HashMap<String, String>();
        HashMap<String, Object> fieldrequestParams = new HashMap<String, Object>();
        fieldrequestParams.put("isexport", true);
        fieldrequestParams.put("filter_names", Arrays.asList("companyid", "moduleid", "fieldtype"));
        fieldrequestParams.put("filter_values",
                Arrays.asList(companyid, intModuleId, Constants.CUSTOM_FIELD_AUTONUMBER));
        KwlReturnObject AutoNoFieldMap = fieldManagerDAOobj.getFieldParams(fieldrequestParams);
        if (AutoNoFieldMap.getEntityList().size() != 0) {
            List<FieldParams> autNoList = AutoNoFieldMap.getEntityList();
            for (FieldParams obj : autNoList) {
                String maxNo = fieldManagerDAOobj.getMaxAutoNumber(
                        Constants.Custom_column_Prefix + obj.getColnum(), customdataclasspath, companyid,
                        obj.getPrefix(), obj.getSuffix());
                Integer maxNumber = Integer.parseInt(maxNo) + 1;
                autoNoMap.put(obj.getFieldname(), maxNumber.toString());
                autoNoFieldName.add(obj.getFieldname());
                autoNoMap.put(obj.getFieldname() + "_" + Constants.CUSTOM_FIELD_PREFIX, obj.getPrefix());
                autoNoMap.put(obj.getFieldname() + "_" + Constants.CUSTOM_FIELD_SUFFIX, obj.getSuffix());
            }
        }
        // End
        while (itr.hasNext()) {
            total++;
            Object[] fileData = (Object[]) itr.next();
            recIndex = (Integer) fileData[0];
            HashMap<String, Object> dataMap = new HashMap<String, Object>();
            HashMap<String, Object> columnHeaderMap = new HashMap<String, Object>();
            HashMap<String, Object> columnCSVindexMap = new HashMap<String, Object>();
            JSONArray customfield = new JSONArray();
            for (int k = 0; k < mapping.length(); k++) {
                JSONObject mappingJson = mapping.getJSONObject(k);
                String datakey = mappingJson.getString("columnname");
                Object dataValue = cleanHTML(fileData[mappingJson.getInt("csvindex") + 1] == null ? null
                        : String.valueOf(fileData[mappingJson.getInt("csvindex") + 1])); //+1 for id column at index-0
                dataMap.put(datakey, dataValue);
                columnHeaderMap.put(datakey, mappingJson.getString("csvheader"));
                columnCSVindexMap.put(datakey, mappingJson.getInt("csvindex") + 1);
            }

            for (int j = 0; j < extraParams.length(); j++) {
                String datakey = (String) extraParams.names().get(j);
                Object dataValue = extraParams.get(datakey);
                dataMap.put(datakey, dataValue);
            }

            Object object = null;
            try {
                //                    CheckUniqueRecord(requestParams, dataMap, classPath, uniqueKeyMethodName, uniqueKeyHbmName);
                validateDataMAP2(requestParams, dataMap, columnConfig, customfield, columnHeaderMap,
                        columnCSVindexMap, dateFormat, importDao, autoNoMap);
                object = importDao.saveRecord(requestParams, dataMap, null, mode, classPath, primaryKey,
                        extraObj, customfield);
                if (batchCounter % 100 == 0) {
                    session.flush();
                    session.clear();
                }
                batchCounter++;
            } catch (Exception ex) {
                failed++;
                String errorMsg = ex.getMessage(), invalidColumns = "";
                try {
                    JSONObject errorLog = new JSONObject(errorMsg);
                    errorMsg = errorLog.getString("errorMsg");
                    invalidColumns = errorLog.getString("invalidColumns");
                } catch (JSONException jex) {
                }
                failedRecords.append("\n" + createCSVrecord(fileData) + "\"" + errorMsg + "\"");//failedRecords.append("\n"+(total)+","+createCSVrecord(fileData)+"\""+ex.getMessage()+"\"");
                importDao.markRecordValidation(tableName, recIndex, 0, errorMsg, invalidColumns);
            }
        }

        if (failed > 0) {
            createFailureFiles(fileName, failedRecords, ".csv");
        }

        int success = total - failed;
        if (total == 0) {
            msg = "Empty file.";
        } else if (success == 0) {
            msg = "Failed to import all the records.";
        } else if (success == total) {
            msg = "All records are imported successfully.";
        } else {
            msg = "Imported " + success + " record" + (success > 1 ? "s" : "") + " successfully";
            msg += (failed == 0 ? "."
                    : " and failed to import " + failed + " record" + (failed > 1 ? "s" : "") + ".");
        }

        try {
            txnManager.commit(status);
            importDao.linkCustomData(mode);
        } catch (Exception ex) {
            commitedEx = true;
            throw ex;
        }
    } catch (Exception e) {
        if (!commitedEx) { //if exception occurs during commit then dont call rollback
            txnManager.rollback(status);
        }
        issuccess = false;
        msg = "" + e.getMessage();
        Logger.getLogger(ImportHandler.class.getName()).log(Level.SEVERE, null, e);
    } finally {
        DefaultTransactionDefinition ldef = new DefaultTransactionDefinition();
        ldef.setName("import_Tx");
        ldef.setPropagationBehavior(TransactionDefinition.PROPAGATION_REQUIRED);
        TransactionStatus lstatus = txnManager.getTransaction(ldef);
        boolean exCommit = false;
        try {
            //Insert Integration log

            requestParams.put("modulename", module.getModuleName());
            requestParams.put("validflag", 2);
            if (!module.getModuleName().equals("Target") && !module.getModuleName().equals("Calibration"))
                fieldManagerDAOobj.validateimportrecords(requestParams);

            HashMap<String, Object> logDataMap = new HashMap<String, Object>();
            String logId = (String) requestParams.get("logId");
            if (!StringUtil.isNullOrEmpty(logId)) {
                logDataMap.put("Id", logId);
            }
            failed = issuccess ? failed : total;
            logDataMap.put("FileName", ImportLog.getActualFileName(fileName));
            logDataMap.put("StorageName", fileName);
            logDataMap.put("Log", msg);
            logDataMap.put("Type", fileName.substring(fileName.lastIndexOf(".") + 1));
            logDataMap.put("TotalRecs", total);
            logDataMap.put("Rejected", failed);
            logDataMap.put("Module", module.getId());
            logDataMap.put("ImportDate", new Date());
            logDataMap.put("User", (String) requestParams.get("userid"));
            logDataMap.put("Company", (String) requestParams.get("companyid"));
            importDao.saveImportLog(logDataMap);
            importDao.removeFileTable(tableName);//Remove table after importing all records
            try {
                txnManager.commit(lstatus);
            } catch (Exception ex) {
                exCommit = true;
                throw ex;
            }
        } catch (Exception ex) {
            if (!exCommit) { //if exception occurs during commit then dont call rollback
                txnManager.rollback(lstatus);
            }
            Logger.getLogger(ImportHandler.class.getName()).log(Level.SEVERE, null, ex);
        }

        try {
            jobj.put("success", issuccess);
            jobj.put("msg", msg);
            jobj.put("totalrecords", total);
            jobj.put("successrecords", total - failed);
            jobj.put("failedrecords", failed);
            jobj.put("filename", ImportLog.getActualFileName(fileName));
        } catch (JSONException ex) {
            Logger.getLogger(ImportHandler.class.getName()).log(Level.SEVERE, null, ex);
        }
    }
    return jobj;
}

From source file:com.krawler.spring.importFunctionality.ImportUtil.java

License:Open Source License

/**
 * @param filename//from w ww. j a  v  a  2s  . c  o m
 * @param delimiterType
 * @param startindex
 * @param importDao
 * @return
 * @throws ServiceException
 */
public static void dumpCSVFileData(String filename, String delimiterType, int startindex, ImportDAO importDao,
        HibernateTransactionManager txnManager) throws ServiceException {
    boolean commitedEx = false;
    DefaultTransactionDefinition def = new DefaultTransactionDefinition();
    def.setName("import_Tx");
    def.setPropagationBehavior(TransactionDefinition.PROPAGATION_REQUIRED);
    TransactionStatus status = txnManager.getTransaction(def);
    try {
        CsvReader csvReader = null;
        FileInputStream fstream = null;
        String destinationDirectory = storageHandlerImpl.GetDocStorePath() + "importplans";
        File csv = new File(destinationDirectory + "/" + filename);
        fstream = new FileInputStream(csv);
        csvReader = new CsvReader(new InputStreamReader(fstream), delimiterType);
        //            csvReader.readHeaders();
        String tableName = importDao.getTableName(filename);
        Session session = txnManager.getSessionFactory().getCurrentSession();
        int flushCounter = 0;
        while (csvReader.readRecord()) {
            ArrayList<String> dataArray = new ArrayList<String>();
            for (int i = 0; i < csvReader.getColumnCount(); i++) {
                dataArray.add(cleanHTML(csvReader.get(i)));
            }
            importDao.dumpFileRow(tableName, dataArray.toArray());
            if (flushCounter % 30 == 0) {
                session.flush();
                session.clear();
            }
            flushCounter++;
        }
        try {
            txnManager.commit(status);
        } catch (Exception ex) {
            commitedEx = true;
            throw ex;
        }
    } catch (IOException ex) {
        throw ServiceException.FAILURE("dumpCSVFileData: " + ex.getMessage(), ex);
    } catch (Exception ex) {
        if (!commitedEx) { //if exception occurs during commit then dont call rollback
            txnManager.rollback(status);
        }
        throw ServiceException.FAILURE("dumpCSVFileData: " + ex.getMessage(), ex);
    }
}