Example usage for java.util HashMap entrySet

List of usage examples for java.util HashMap entrySet

Introduction

In this page you can find the example usage for java.util HashMap entrySet.

Prototype

Set entrySet

To view the source code for java.util HashMap entrySet.

Click Source Link

Document

Holds cached entrySet().

Usage

From source file:it.unibas.spicygui.controllo.datasource.ActionAddTargetInstanceCsv.java

@Override
public void performAction() {
    Scenario scenario = (Scenario) modello.getBean(Costanti.CURRENT_SCENARIO);
    MappingTask mappingTask = scenario.getMappingTask();
    IDataSourceProxy dataSource = mappingTask.getTargetProxy();
    LoadCsvInstancesMainFrame jd = new LoadCsvInstancesMainFrame(dataSource);
    InstancesTopComponent viewInstancesTopComponent = scenario.getInstancesTopComponent();
    HashMap<String, String> absolutePaths = jd.getResponse();
    if (!absolutePaths.isEmpty()) {
        if (scenario.getMappingTask().getTargetProxy().getType().equalsIgnoreCase("CSV")) {
            try {
                //pathHashMap is a multimap set with the tablename String as key
                //and an arraylist with two values: a)the tablename
                //b)a boolean value that represents if the file contains column names and
                //c)a boolean that contains the info if the instance file has been already loaded
                HashMap<String, ArrayList<Object>> pathHashMap = new HashMap<String, ArrayList<Object>>();
                for (Map.Entry<String, String> entry : absolutePaths.entrySet()) {
                    ArrayList<Object> valSet = new ArrayList<Object>();
                    valSet.add(entry.getValue());
                    valSet.add(jd.getColNames());
                    valSet.add(false);//  w w w.  jav  a 2 s  . c  o m
                    pathHashMap.put(entry.getKey(), valSet);
                }
                DAOCsv daoCsv = new DAOCsv();
                daoCsv.addInstances(dataSource, pathHashMap);

                if (!viewInstancesTopComponent.isRipulito()) {
                    viewInstancesTopComponent.createTargetInstanceTree();
                    viewInstancesTopComponent.requestActive();
                }
                DialogDisplayer.getDefault().notify(new NotifyDescriptor.Message(
                        NbBundle.getMessage(Costanti.class, Costanti.ADD_INSTANCE_OK)));
            } catch (DAOException ex) {
                DialogDisplayer.getDefault().notify(new NotifyDescriptor.Message(
                        NbBundle.getMessage(Costanti.class, Costanti.OPEN_ERROR) + " : " + ex.getMessage(),
                        DialogDescriptor.ERROR_MESSAGE));
                logger.error(ex);
            }
        } else {
            DialogDisplayer.getDefault()
                    .notify(new NotifyDescriptor.Message(
                            NbBundle.getMessage(Costanti.class, Costanti.CSV_INST_NOTIF),
                            DialogDescriptor.ERROR_MESSAGE));
        }
    } else {
        DialogDisplayer.getDefault()
                .notify(new NotifyDescriptor.Message(
                        NbBundle.getMessage(Costanti.class, Costanti.CSV_EMPTY_INST_NOTIF),
                        DialogDescriptor.ERROR_MESSAGE));
    }
}

From source file:ch.icclab.cyclops.resource.impl.GenerateResource.java

/**
 * Gets sum of usage of clientID/instanceID mappings and returns ArrayList of last events.
 *
 * @param clientInstanceMap/*w w w . ja v a 2  s.c  om*/
 * @param dbClient
 */
private TSDBData getBillingModel(HashMap<String, ArrayList<String>> clientInstanceMap,
        InfluxDBClient dbClient) {
    logger.debug("Attempting to get the Billing Models");
    ArrayList<TSDBData> UDRs = new ArrayList<TSDBData>();
    Iterator it = clientInstanceMap.entrySet().iterator();
    while (it.hasNext()) {
        Map.Entry pair = (Map.Entry) it.next();
        String clientId = pair.getKey().toString();
        ArrayList<String> instances = (ArrayList<String>) pair.getValue();
        logger.debug("Attempting to get the Billing Model for: " + clientId);
        for (String instance : instances) {
            String queryString = "SELECT sum(usage) FROM UDR WHERE clientId='" + clientId + "' AND instanceId='"
                    + instance + "' GROUP BY clientID,instanceID";
            logger.trace("DATA TSDBData sumUsage(...): query=" + queryString);
        }
        it.remove(); // avoids a ConcurrentModificationException
    }
    return UDRs.get(0);
}

From source file:ANNFileDetect.detectFile.java

private void DetectionActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_DetectionActionPerformed
    ResField.setText("");
    EncogTestClass ec = new EncogTestClass();
    String[] nets = sql.GetNetworkNames();
    HashMap resulthm = new HashMap();
    scores = new ArrayList();
    for (String net : nets) {
        ec.removeStdDirs();//from w w  w.  j a  v  a  2 s . com
        String netfile = sql.GetNetworkFile(net);
        String FPnetfile = sql.GetNetworkFPFile(net);
        ec.runNet(FileBox.getText(), nndir + "/" + netfile, false, 0);
        double out = ec.testSingleFPfile(nndir + "/" + FPnetfile);
        HashMap hm = sql.GetValuescore(net, out);
        resulthm.put(net, hm);
        scores.add("Net: " + net + " Score: " + out);
    }
    //make an inventory of all possible files
    Iterator it = resulthm.entrySet().iterator();
    HashMap inventory = new HashMap();
    while (it.hasNext()) {
        Map.Entry entries = (Map.Entry) it.next();
        Iterator itb = ((HashMap) entries.getValue()).entrySet().iterator();
        while (itb.hasNext()) {
            Map.Entry pair = (Map.Entry) itb.next();
            String file = (String) pair.getKey();
            String[] tmpvw = ((String) pair.getValue()).split(",");
            double score = Double.parseDouble(tmpvw[0]);
            double weight = Double.parseDouble(tmpvw[1]);

            if (inventory.containsKey(file)) {
                String caz = inventory.get(file).toString();
                double curscore = Double.parseDouble(caz);
                double out = 0.0;
                if (score > 1)
                    out = ((curscore + ((score * weight) / 100)) / 2);
                if (score == 0)
                    out = (curscore / 2);
                inventory.put(file, out);
            } else {
                inventory.put(file, (score * weight) / 100);
            }

        }
    }
    String file = sql.GetFinalResultsMetrics(inventory);
    if (file.length() > 0)
        ResField.setText("File is likely to be a " + file);
    else
        ResField.setText("No file detected");
    JFreeChart jf = new GraphingClass().chartOutcome(inventory);
    ChartPanel cp = new ChartPanel(jf);
    cp.setSize(new Dimension(GPanel.getWidth(), GPanel.getHeight()));
    cp.setVisible(true);
    GPanel.removeAll();
    GPanel.add(cp);
    GPanel.repaint();
    GPanel.setVisible(true);
    System.out.println();
    invt = resulthm;

}

From source file:it.unibas.spicy.persistence.csv.DAOCsv.java

@SuppressWarnings("unchecked")
public void loadInstance(int scenarioNo, IDataSourceProxy dataSource, boolean source)
        throws DAOException, SQLException {

    IConnectionFactory connectionFactory = null;
    Connection connection = null;
    AccessConfiguration accessConfiguration = new AccessConfiguration();
    accessConfiguration.setDriver(SpicyEngineConstants.ACCESS_CONFIGURATION_DRIVER);
    accessConfiguration/*  w  w w  . j  a  va 2s .c om*/
            .setUri(SpicyEngineConstants.ACCESS_CONFIGURATION_URI + SpicyEngineConstants.MAPPING_TASK_DB_NAME);
    accessConfiguration.setLogin(SpicyEngineConstants.ACCESS_CONFIGURATION_LOGIN);
    accessConfiguration.setPassword(SpicyEngineConstants.ACCESS_CONFIGURATION_PASS);
    try {
        connectionFactory = new SimpleDbConnectionFactory();
        connection = connectionFactory.getConnection(accessConfiguration);
        Statement statement = connection.createStatement();

        HashMap<String, ArrayList<Object>> strfullPath = (HashMap<String, ArrayList<Object>>) dataSource
                .getAnnotation(SpicyEngineConstants.INSTANCE_PATH_LIST);

        for (Map.Entry<String, ArrayList<Object>> entry : strfullPath.entrySet()) {
            String filePath = entry.getKey();
            //the list entry.getValue() contains a)the table name 
            //b)a boolean that contains the info if the instance file includes column names 
            //and c) a boolean that contains the info if the instance file has been already loaded 
            boolean loaded = (Boolean) entry.getValue().get(2);
            if (!loaded) {
                String tableName = (String) entry.getValue().get(0);
                if (source) {
                    tableName = SpicyEngineConstants.SOURCE_SCHEMA_NAME + scenarioNo + ".\"" + tableName + "\"";
                } else {
                    tableName = SpicyEngineConstants.TARGET_SCHEMA_NAME + scenarioNo + ".\"" + tableName + "\"";
                }
                boolean colNames = (Boolean) entry.getValue().get(1);

                //avenet
                //CSVReader reader = new CSVReader(new FileReader(filePath));
                Reader r = new FileReader(filePath);
                CSVReader reader = new com.opencsv.CSVReaderBuilder(r)
                        .withFieldAsNull(CSVReaderNullFieldIndicator.EMPTY_SEPARATORS).build();

                try {
                    //ignore the first line if file includes column names
                    if (colNames) {
                        reader.readNext();
                    }
                    String[] nextLine;
                    String values;

                    ArrayList<String> stmnt_list = new ArrayList<String>();
                    String sql_insert_stmnt = "";
                    int line = 0;
                    while ((nextLine = reader.readNext()) != null) {//for each line in the file   
                        line++;
                        //skip empty lines at the end of the csv file
                        if (nextLine.length != 1 || !nextLine[0].isEmpty()) {
                            //insert into batches (of 500 rows)
                            if (line % BATCH_SIZE == 0) {
                                //take out the last ',' character           
                                sql_insert_stmnt = sql_insert_stmnt.substring(0, sql_insert_stmnt.length() - 1);
                                stmnt_list.add(sql_insert_stmnt);
                                sql_insert_stmnt = "";
                            }
                            values = "";
                            for (int i = 0; i < nextLine.length; i++) {
                                //avenet 20/7
                                if (nextLine[i] != null) {
                                    //                                    if (!nextLine[i].equalsIgnoreCase("null")){
                                    //                                        //replace double quotes with single quotes
                                    //                                        //while first escape the character ' for SQL (the "replaceAll" method call)
                                    values += "'" + nextLine[i].trim().replaceAll("'", "''") + "',";
                                    //                                    }
                                    //                                    //do not put quotes if value is the string null
                                    //                                    else{
                                    //                                        values += nextLine[i].trim().replaceAll("'", "''") + ",";   
                                    //                                    }
                                } else {
                                    values += "null,";
                                }
                            }
                            //take out the last ',' character
                            values = values.substring(0, values.length() - 1);
                            sql_insert_stmnt += "(" + values + "),";
                        }
                    }
                    reader.close();
                    if (sql_insert_stmnt != "") {
                        //take out the last ',' character
                        sql_insert_stmnt = sql_insert_stmnt.substring(0, sql_insert_stmnt.length() - 1);
                        stmnt_list.add(sql_insert_stmnt);
                        for (String stmnmt : stmnt_list) {
                            statement.executeUpdate("insert into " + tableName + " values " + stmnmt + ";");
                        }
                    }

                    //change the "loaded" value of the entry by replacing it in the hashmap
                    ArrayList<Object> valSet = new ArrayList<Object>();
                    valSet.add(tableName);
                    valSet.add(colNames);
                    valSet.add(true);
                    strfullPath.put(filePath, valSet);

                } catch (IOException ex) {
                    Logger.getLogger(DAOCsv.class.getName()).log(Level.SEVERE, null, ex);
                    throw new DAOException(ex);
                }
                dataSource.addAnnotation(SpicyEngineConstants.LOADED_INSTANCES_FLAG, true);
            }
        }
    } catch (FileNotFoundException ex) {
        Logger.getLogger(DAOCsv.class.getName()).log(Level.SEVERE, null, ex);
        throw new DAOException(ex);
    } finally {
        if (connection != null)
            connection.close();
    }
}

From source file:it.polito.tellmefirst.web.rest.clients.ClientEpub.java

/**
 * Classify each chapter (the top-level section defined in the Toc file) of an Epub document.
 *
 * @param file the input file//from   w  w w.  ja  v  a2  s. co  m
 * @param fileName the input filename
 * @param url the url of the resource
 * @param numOfTopics number of topics to be returned
 * @param lang the language of the text to be classified ("italian" or "english")
 * @return A HashMap in which the key is a string with the title of the chapter and the value
 *         is a list of the results of the classification process
 */
public HashMap<String, List<String[]>> classifyEPubChapters(File file, String fileName, String url,
        int numOfTopics, String lang) throws TMFVisibleException, IOException {

    //The classifyEPubChapters() method works  in case of a well-defined structure in the Toc file.
    //Otherwise you can use the usual classify() method.

    LOG.debug("[classifyEPubChapters] - BEGIN");

    if (!(file != null && fileName.toLowerCase().endsWith(".epub") || urlIsEpub(url))) {
        throw new TMFVisibleException("Resource not valid: only epub files allowed.");
    }

    dBpediaManager = new DBpediaManager();
    if (!lang.equals("english") && !dBpediaManager.isDBpediaEnglishUp()) {
        //comment for local use
        throw new TMFVisibleException("DBpedia English service seems to be down, so TellMeFirst can't work "
                + "properly. Please try later!");
    } else {
        if (lang.equals("italian") && !dBpediaManager.isDBpediaItalianUp()) {
            //comment for local use
            throw new TMFVisibleException("DBpedia Italian service seems to be down, so TellMeFirst can't work"
                    + " properly. Please try later!");
        }
    }
    File epubFile;
    if (url != null) {
        epubFile = fromURLtoFile(url);
    } else {
        epubFile = file;
    }
    HashMap<String, List<String[]>> results = new LinkedHashMap<>();
    HashMap<String, String> parserResults = new LinkedHashMap<String, String>();
    parserResults = parseEpub(epubFile);
    Set set = parserResults.entrySet();
    Iterator i = set.iterator();
    while (i.hasNext()) {
        Map.Entry me = (Map.Entry) i.next();
        Text text = new Text((me.getValue().toString()));
        LOG.debug("* Title of the chapter");
        LOG.debug(me.getKey().toString());
        LOG.debug("* Text of the chapter");
        LOG.debug(me.getValue().toString().substring(0, 100));
        String textString = text.getText();
        int totalNumWords = TMFUtils.countWords(textString);
        LOG.debug("TOTAL WORDS: " + totalNumWords);
        try {
            if (totalNumWords > 1000) {
                LOG.debug("Text contains " + totalNumWords + " words. We'll use Classify for long texts.");
                List<String[]> classificationResults = classifier.classify(textString, numOfTopics);
                results.put(me.getKey().toString(), classificationResults);
            } else {
                LOG.debug("Text contains " + totalNumWords + " words. We'll use Classify for short texts.");
                List<String[]> classificationResults = classifier.classifyShortText(textString, numOfTopics);
                results.put(me.getKey().toString(), classificationResults);
            }
        } catch (Exception e) {
            LOG.error("[classifyEpub] - EXCEPTION: ", e);
            throw new TMFVisibleException("Unable to extract topics from specified text.");
        }
    }

    LOG.debug("[classifyEPubChapters] - END");

    return results;
}

From source file:it.unibas.spicygui.controllo.datasource.ActionAddSourceInstanceCsv.java

@Override
public void performAction() {
    Scenario scenario = (Scenario) modello.getBean(Costanti.CURRENT_SCENARIO);
    MappingTask mappingTask = scenario.getMappingTask();
    IDataSourceProxy dataSource = mappingTask.getSourceProxy();
    LoadCsvInstancesMainFrame jd = new LoadCsvInstancesMainFrame(dataSource);
    InstancesTopComponent viewInstancesTopComponent = scenario.getInstancesTopComponent();
    //jd.getResponse() returns the file path and the table name
    HashMap<String, String> absolutePaths = jd.getResponse();
    if (!absolutePaths.isEmpty()) {
        if (scenario.getMappingTask().getSourceProxy().getType().equalsIgnoreCase("CSV")) {
            try {
                //pathHashMap is a multimap set with the file path String as key
                //and an arraylist with two values: a)the tablename
                //b)a boolean value that represents if the file contains column names and
                //c)a boolean that contains the info if the instance file has been already loaded
                HashMap<String, ArrayList<Object>> pathHashMap = new HashMap<String, ArrayList<Object>>();
                for (Map.Entry<String, String> entry : absolutePaths.entrySet()) {
                    ArrayList<Object> valSet = new ArrayList<Object>();
                    //table name
                    valSet.add(entry.getValue());
                    valSet.add(jd.getColNames());
                    valSet.add(false);/*from w ww.j  a v  a2s .  c om*/
                    pathHashMap.put(entry.getKey(), valSet);
                }
                //dataSource.getInstances().clear();
                //dataSource.getOriginalInstances().clear();
                DAOCsv daoCsv = new DAOCsv();
                daoCsv.addInstances(dataSource, pathHashMap);

                if (!viewInstancesTopComponent.isRipulito()) {
                    viewInstancesTopComponent.clearSource();
                    viewInstancesTopComponent.createSourceInstanceTree();
                    viewInstancesTopComponent.requestActive();
                }
                //StatusDisplayer.getDefault().setStatusText(NbBundle.getMessage(Costanti.class, Costanti.ADD_INSTANCE_OK));
                DialogDisplayer.getDefault().notify(new NotifyDescriptor.Message(
                        NbBundle.getMessage(Costanti.class, Costanti.ADD_INSTANCE_OK)));
            } catch (DAOException ex) {
                DialogDisplayer.getDefault().notify(new NotifyDescriptor.Message(
                        NbBundle.getMessage(Costanti.class, Costanti.OPEN_ERROR) + " : " + ex.getMessage(),
                        DialogDescriptor.ERROR_MESSAGE));
                logger.error(ex);
            }
        } else {
            DialogDisplayer.getDefault()
                    .notify(new NotifyDescriptor.Message(
                            NbBundle.getMessage(Costanti.class, Costanti.CSV_INST_NOTIF),
                            DialogDescriptor.ERROR_MESSAGE));
        }
    } else {
        DialogDisplayer.getDefault()
                .notify(new NotifyDescriptor.Message(
                        NbBundle.getMessage(Costanti.class, Costanti.CSV_EMPTY_INST_NOTIF),
                        DialogDescriptor.ERROR_MESSAGE));
    }
}

From source file:de.ingrid.importer.udk.strategy.v1.IDCStrategy1_0_6_fixSysListInspire.java

protected void fixSysList5200() throws Exception {
    if (log.isInfoEnabled()) {
        log.info("Fixing syslist 5200 values in sys_list and t011_obj_serv_type ...");
    }/*from  w ww  . j a  v  a2 s  .c  o m*/

    // new syslist 5200 values
    HashMap<Integer, String> mapKeyToNewValueListDE = new HashMap<Integer, String>();
    mapKeyToNewValueListDE.put(101, "Katalogdienst (Viewer)");
    mapKeyToNewValueListDE.put(207, "Katalogdienst (Service)");

    HashMap<Integer, String> mapKeyToNewValueListEN = new HashMap<Integer, String>();
    mapKeyToNewValueListEN.put(415, "Feature generalisation service (spatial)");
    mapKeyToNewValueListEN.put(513, "Multiband image manipulation");

    String[] langToProcess = new String[] { "de", "en" };
    for (String lang : langToProcess) {
        Iterator<Entry<Integer, String>> entryIt = null;
        if (lang.equals("de")) {
            entryIt = mapKeyToNewValueListDE.entrySet().iterator();
        } else if (lang.equals("en")) {
            entryIt = mapKeyToNewValueListEN.entrySet().iterator();
        }
        String catLang = UtilsLanguageCodelist.getShortcutFromCode(readCatalogLanguageKey());

        while (entryIt.hasNext()) {
            Entry<Integer, String> entry = entryIt.next();

            // fix sys_list
            int numUpdated = jdbc.executeUpdate(
                    "UPDATE sys_list SET " + "name = '" + entry.getValue() + "' " + "where " + "lst_id = 5200"
                            + " and lang_id = '" + lang + "'" + " and entry_id = " + entry.getKey());
            if (log.isDebugEnabled()) {
                log.debug("sys_list 5100: updated " + numUpdated + " rows -> entry_id(" + entry.getKey() + "), "
                        + "new name(" + entry.getValue() + ")");
            }

            // fix data: existing keys with wrong value ! ONLY IF CATALOGLANGUAGE MAPS !
            if (lang.equals(catLang)) {
                numUpdated = jdbc.executeUpdate("UPDATE t011_obj_serv_type SET " + "serv_type_value = '"
                        + entry.getValue() + "' " + "where " + "serv_type_key = " + entry.getKey());
                if (log.isDebugEnabled()) {
                    log.debug("t011_obj_serv_type: updated " + numUpdated + " rows -> existing serv_type_key("
                            + entry.getKey() + "), " + "new serv_type_value(" + entry.getValue() + ")");
                }
            }
        }
    }
}

From source file:de.ingrid.importer.udk.strategy.v1.IDCStrategy1_0_6_fixSysListInspire.java

protected void fixSysList528() throws Exception {
    if (log.isInfoEnabled()) {
        log.info("Fixing syslist 528 values in sys_list ...");
    }// ww  w  .jav a 2  s .co m

    HashMap<Integer, String> mapKeyToNewValueListDE = new HashMap<Integer, String>();
    mapKeyToNewValueListDE.put(1, "Geometrie ohne Topologie");
    mapKeyToNewValueListDE.put(2, "Linien");
    mapKeyToNewValueListDE.put(3, "geschlossene Linien eben");
    mapKeyToNewValueListDE.put(4, "Flchen");
    mapKeyToNewValueListDE.put(5, "geschlossene Linien flchendeckend");
    mapKeyToNewValueListDE.put(6, "Flchen flchendeckend");
    mapKeyToNewValueListDE.put(7, "Krper");
    mapKeyToNewValueListDE.put(8, "3D-Oberflche");
    mapKeyToNewValueListDE.put(9, "topologisches Gebilde ohne geometrischen Raumbezug");

    Iterator<Entry<Integer, String>> entryIt = mapKeyToNewValueListDE.entrySet().iterator();

    while (entryIt.hasNext()) {
        Entry<Integer, String> entry = entryIt.next();

        // fix sys_list
        int numUpdated = jdbc.executeUpdate("UPDATE sys_list SET " + "name = '" + entry.getValue() + "' "
                + "where " + "lst_id = 528" + " and lang_id = 'de'" + " and entry_id = " + entry.getKey());
        if (log.isDebugEnabled()) {
            log.debug("sys_list 528: updated " + numUpdated + " rows -> entry_id(" + entry.getKey() + "), "
                    + "new name(" + entry.getValue() + ")");
        }

        // NO fixing of data values. In object data only entry id is stored !
    }
}

From source file:com.jci.job.repo.JobRepoImpl.java

@Override
public synchronized List<Object> batchInsert(BatchInsertReq request) {
    LOG.info("### Starting in JobRepoImpl.batchInsert ###");

    String erpName = request.getErpName();
    HashMap<String, List<TableEntity>> tableNameToEntityMap = request.getTableNameToEntityMap();
    CloudTable cloudTable = null;//w  ww.  j  a  va 2 s  .com

    String partitionKey = erpName.toUpperCase();
    //List<String> rowKeys = request.getRowKeyList();

    Map<String, List<String>> tableNameToRowkeyListMap = request.getTableNameToRowkeyListMap();
    List<Map<String, Integer>> rowKeyData = null;
    for (Map.Entry<String, List<TableEntity>> entry : tableNameToEntityMap.entrySet()) {
        String tableName = null;
        List<TableEntity> value = null;
        int valueSize = 0;
        try {
            cloudTable = azureStorage.getTable(entry.getKey());
            tableName = entry.getKey();
            value = entry.getValue();
            valueSize = value == null ? 0 : value.size();

            LOG.info("tableName-->" + tableName);
            if (request.isDummyGrData() && Constants.TABLE_PO_DETAILS.equals(tableName)) {
                rowKeyData = null;
            } else {
                rowKeyData = CommonUtils.getNewRowkeys(partitionKey, tableName,
                        tableNameToRowkeyListMap.get(tableName), cloudTable);
            }
        } catch (Exception e) {
            LOG.error("### Exception in JobRepoImpl.batchInsert.getTable ###" + e);
            continue;
        }
        TableBatchOperation batchOperation = new TableBatchOperation();
        for (int i = 0; i < value.size(); i++) {
            TableEntity entity = value.get(i);
            batchOperation.insertOrMerge(entity);
            if (i != 0 && i % batchSize == 0) {
                try {
                    cloudTable.execute(batchOperation);
                    batchOperation.clear();
                } catch (Exception e) {
                    LOG.error("### Exception in JobRepoImpl.batchInsert.execute ###" + e);
                    continue;
                }
            }
        }

        if (batchOperation.size() > 0) {
            try {
                cloudTable.execute(batchOperation);
            } catch (Exception e) {
                LOG.error("### Exception in JobRepoImpl.batchInsert.execute ###" + e);
                continue;
            }
        }

        //Insert MIsc data
        MiscDataEntity miscEntity = null;
        try {
            miscEntity = getStatusCountEntity(Constants.PARTITION_KEY_MISCDATA, erpName);
        } catch (Exception e) {
            LOG.error("### Exception in JobRepoImpl.addMiscEntity ####", e);
        }

        if (miscEntity == null) {
            miscEntity = new MiscDataEntity(Constants.PARTITION_KEY_MISCDATA, erpName);
        }

        if (rowKeyData != null) {
            miscEntity = CommonUtils.getMiscEntity(miscEntity, tableName, rowKeyData);
            LOG.info("miscEntity--->" + miscEntity);

            if (miscEntity != null) {
                updateStatusCountEntity(miscEntity);
            }
        } else if (request.isDummyGrData() && Constants.TABLE_PO_DETAILS.equals(tableName)) {
            miscEntity = CommonUtils.getMiscEntity(miscEntity, valueSize);
            if (miscEntity != null) {
                updateStatusCountEntity(miscEntity);
            }
        }
    }

    LOG.info("### Ending in JobRepoImpl.batchInsert ###");
    return request.getReq();
}

From source file:mase.generic.WeightedClusterSCPostEval.java

@Override
protected void updateClusters(EvolutionState state) {
    // the update exerted by each data point is weighted by the importance
    // of the point. irrelevant points are not taken into account in the
    // clustering process

    // Compute the weights of the individual states
    HashMap<Integer, Double> pointWeight = stateCorrelations(state);

    // Cache the centers nearest to the elements of buffer
    HashMap<Integer, Integer> centerCache = new HashMap<Integer, Integer>(buffer.size() * 2);
    int[] genCounts = new int[clusters.length];
    double[] weightTotals = new double[clusters.length];
    for (Integer key : buffer.keySet()) {
        int cluster = assignements.containsKey(key) ? assignements.get(key)
                : closestCluster(globalKey.get(key));
        centerCache.put(key, cluster);/*from   w w w  . j a  va 2s .  c  o  m*/
        genCounts[cluster]++;
        counts[cluster]++;
        weightTotals[cluster] += pointWeight.get(key);
    }

    // Normalize weights
    for (Entry<Integer, Double> e : pointWeight.entrySet()) {
        int closest = centerCache.get(e.getKey());
        e.setValue(e.getValue() * genCounts[closest] / weightTotals[closest]);
    }

    // Calculate per-cluster adjustement rates
    float[] adjWeights = new float[clusters.length];
    for (int i = 0; i < genCounts.length; i++) {
        if (genCounts[i] > 0) {
            adjWeights[i] = Math.max(1f / counts[i], minLearningRate / genCounts[i]);
        }
    }

    // Update clusters
    for (Integer key : buffer.keySet()) {
        int c = centerCache.get(key); // get closest cluster
        double[] cluster = clusters[c];
        double learningRate = adjWeights[c] * pointWeight.get(key);
        if (learningRate >= 1) {
            System.out.println("Warning: " + learningRate);
        }
        byte[] x = globalKey.get(key); // new data point
        for (int i = 0; i < cluster.length; i++) {
            cluster[i] += learningRate * (x[i] - cluster[i]); // gradient step
        }
    }

    buffer.clear();
    buffer.clear();

}