Example usage for java.util HashMap clear

List of usage examples for java.util HashMap clear

Introduction

In this page you can find the example usage for java.util HashMap clear.

Prototype

public void clear() 

Source Link

Document

Removes all of the mappings from this map.

Usage

From source file:com.yangtsaosoftware.pebblemessenger.services.PebbleCenter.java

private void split_message_to_packages_add_to_sendQue(List<PebbleMessage> listPM) {
    sendLock.lock();//from  ww  w.j  a  v a  2 s  .c  om
    PebbleDictionary dataMsg = new PebbleDictionary();
    dataMsg.addUint8(ID_COMMAND, REMOTE_EXCUTE_NEW_MESSAGE);

    dataMsg.addUint8(ID_CLOSE_DELAY_SEC, (byte) (timeOut / 1000));
    dataMsg.addUint8(ID_CHAR_SCALE, char_scale);
    Constants.log(TAG_NAME, "add char_scale:" + String.valueOf(char_scale));
    dataMsg.addUint32(ID_INFO_ID, listPM.get(0).get_id().intValue());
    byte[] packInfo = new byte[] { 1, 1, 2, 1 };
    dataMsg.addBytes(ID_PAGE_INFO, packInfo);
    dataMsg.addUint8(ID_WHITE_BACKGROUND, whiteBackground ? (byte) 1 : (byte) 0);
    sendQueue.add(dataMsg);
    HashMap unicodeMap = new HashMap();

    Constants.log(TAG_NAME, "listpm.size:" + String.valueOf(listPM.size()));
    for (int page = 1; page <= listPM.size(); page++) {
        PebbleMessage dealPM = listPM.get(page - 1);
        int strPackages = bigInt((float) dealPM.getAscMsg().length() / (float) MAX_CHARS_PACKAGE_CONTAIN);
        byte totalPackages = (byte) (dealPM.getCharacterQueue().size() + strPackages);
        Constants.log(TAG_NAME, "total Packages:" + String.valueOf(totalPackages));
        for (int pg = 1; pg <= strPackages; pg++) {
            dataMsg = new PebbleDictionary();
            dataMsg.addUint8(ID_COMMAND, REMOTE_EXCUTE_CONTINUE_MESSAGE);
            byte[] strPackInfo = new byte[] { (byte) listPM.size(), (byte) page, 0, 0 };
            dataMsg.addBytes(ID_PAGE_INFO, strPackInfo);
            //       dataMsg.addUint8(ID_TOTAL_PAGES,(byte)listPM.size());
            //       dataMsg.addUint8(ID_PAGE_NUM,(byte)page);
            //       dataMsg.addUint8(ID_TOTAL_PACKAGES,totalPackages);
            //       dataMsg.addUint8(ID_PACKAGE_NUM,(byte) pg);
            dataMsg.addString(ID_ASCSTR,
                    dealPM.getAscMsg().substring((pg - 1) * MAX_CHARS_PACKAGE_CONTAIN,
                            (pg * MAX_CHARS_PACKAGE_CONTAIN > dealPM.getAscMsg().length()
                                    ? (dealPM.getAscMsg().length())
                                    : (pg * MAX_CHARS_PACKAGE_CONTAIN))));
            Constants.log(TAG_NAME, "Add Queue a strmsg:<" + dataMsg.getString(ID_ASCSTR) + ">");
            unicodeMap.put(pg, dataMsg);
        }
        for (int pg = strPackages + 1; pg <= totalPackages; pg++) {
            CharacterMatrix cm = dealPM.getCharacterQueue().pollFirst();
            PebbleDictionary duoPD = (PebbleDictionary) unicodeMap.get(cm.getCode());
            if (duoPD == null) {
                dataMsg = new PebbleDictionary();
                dataMsg.addUint8(ID_COMMAND, REMOTE_EXCUTE_CONTINUE_MESSAGE);
                byte[] uniPackInfo = new byte[] { (byte) listPM.size(), (byte) page, 0, 0 };
                dataMsg.addBytes(ID_PAGE_INFO, uniPackInfo);
                // dataMsg.addUint8(ID_TOTAL_PAGES,(byte)listPM.size());
                // dataMsg.addUint8(ID_PAGE_NUM,(byte)page);
                //        dataMsg.addUint8(ID_TOTAL_PACKAGES,totalPackages);
                //dataMsg.addUint8(ID_PACKAGE_NUM,(byte) pg);
                Constants.log(TAG_NAME,
                        "There are " + String.valueOf(dealPM.getCharacterQueue().size()) + "unicode in queue");
                int size = cm.getByteList().size();
                byte[] b2 = new byte[size];
                cm.getbyteArray(b2, size);
                byte[] uniInfo = new byte[] { (byte) cm.getWidthBytes(), cm.getPos()[0], cm.getPos()[1] };
                dataMsg.addBytes(ID_UNICHR_INFO, uniInfo);
                //dataMsg.addUint8(ID_UNICHR_WIDTH,(byte) cm.getWidthBytes());
                //dataMsg.addBytes(ID_UNICHR_POS,cm.getPos());
                Constants.log(TAG_NAME,
                        "row:" + String.valueOf(cm.getPos()[0]) + " col:" + String.valueOf(cm.getPos()[1]));
                dataMsg.addBytes(ID_UNICHR_BYTES, b2);
                Constants.log(TAG_NAME, "b2 length:" + String.valueOf(b2.length));
                //        Constants.log(TAG_NAME,"Add Queue a unimsg:" + dataMsg.getUnsignedInteger(ID_PACKAGE_NUM).toString());
                unicodeMap.put(cm.getCode(), dataMsg);
            } else {
                if (duoPD.contains(ID_EXTRA_POS_NUM)) {
                    byte[] soubyte = duoPD.getBytes(ID_EXTRA_POS_NUM);
                    byte num = soubyte[0];
                    byte[] tarbyte = new byte[num + 2];
                    tarbyte[0] = (byte) (num + 1);
                    for (int i = 1; i <= num; i++) {
                        tarbyte[i] = soubyte[i];
                    }
                    tarbyte[num + 1] |= (cm.getPos()[1] - 1) << 4;
                    tarbyte[num + 1] |= (cm.getPos()[0] - 1);
                    duoPD.addBytes(ID_EXTRA_POS_NUM, tarbyte);

                } else {
                    byte[] tmpbyte = new byte[2];
                    tmpbyte[0] = 1;
                    tmpbyte[1] |= (cm.getPos()[1] - 1) << 4;
                    tmpbyte[1] |= (cm.getPos()[0] - 1);
                    duoPD.addBytes(ID_EXTRA_POS_NUM, tmpbyte);
                }
            }

        }

        totalPackages = (byte) unicodeMap.size();
        Iterator it = unicodeMap.keySet().iterator();
        int i = 0;
        while (it.hasNext()) {
            PebbleDictionary tmpPD = (PebbleDictionary) unicodeMap.get(it.next());
            byte[] tmpPackInfo = tmpPD.getBytes(ID_PAGE_INFO);
            tmpPackInfo[2] = totalPackages;
            tmpPackInfo[3] = (byte) ++i;
            tmpPD.addBytes(ID_PAGE_INFO, tmpPackInfo);
            //tmpPD.addUint8(ID_TOTAL_PACKAGES, totalPackages);
            //tmpPD.addUint8(ID_PACKAGE_NUM,(byte)++i);
            sendQueue.add(tmpPD);
        }
        unicodeMap.clear();
    }
    sendLock.unlock();
}

From source file:algorithm.NQueens.java

public void doMain(String[] args) throws InterruptedException, IOException {
    CmdLineParser parser = new CmdLineParser(this);

    try {/*from ww  w  . j av a2  s.  c om*/
        /* Parse the arguments */
        parser.parseArgument(args);
    } catch (CmdLineException e) {
        System.err.println(e.getMessage());
        System.err.println("java NQueens [options...] arguments...");
        parser.printUsage(System.err);

        /* Print program sample showing all of the options */
        System.err.println("\n Example: java NQueens" + parser.printExample(ALL));
        System.exit(1);
    }

    try {
        String resultantPath = "/" + numQueens + "_q" + "/";
        if (mutation == null) {
            resultantPath += "variable/";
        } else {
            resultantPath += mutation.toString() + "/";
        }

        outputDir += resultantPath;

        File dir = new File(outputDir);
        File figureDir = new File(outputDir + "/figures/");
        /* 
         * Returns true if all the directories are created
         * Returns false and the directories may have been made
         * (so far returns false every other time and it works every time)
         */
        dir.mkdirs();
        figureDir.mkdirs();
    } catch (Exception e) {
        System.err.println(e.getMessage());
        System.exit(1);
    }

    for (int count = 0; count < numRuns; ++count) {
        /* Create an initial population of uniformly random chromosomes */
        initPopulation();

        /* Initialize the Breed operation */
        if (mutation != null) {
            Breed.init(new Random(), mutation);
        } else {
            Breed.init(new Random());
        }

        /* Iterate until all of the solutions for the N queens problem has been found */
        while (solutions.size() < distinctSolutions[numQueens - 1] && numGenerations <= maxGenerations) {
            /* If the percentage of similar chromosomes due to in-breeding exceeds
             * the minimum threshold value, increase the amount of mutation
             */
            curSimilarity = similarChromosomes(population);
            if (mutation == null) {
                if (curSimilarity >= inbreedingThreshold) {
                    Breed.inBreeding(true);
                } else {
                    Breed.inBreeding(false);
                }
            }

            /* Calculate the fitness distribution of the current population */
            HashMap<Chromosome, Double> fitness = Fitness.calculate(population);

            /* Instantiate the selection iterator using the fitness distribution,
             * the selection iterator uses roulette wheel selection to select
             * each chromosome.
             */
            Selection selection = new Selection(new Random());
            selection.init(fitness);

            /* Generate the next population by selecting chromosomes from the current
             * population using selection iterator and applying the cloning, crossover,
             * and mutation operations.
             */
            ArrayList<Chromosome> nextPopulation = new ArrayList<Chromosome>(populationSize);
            ArrayList<Chromosome> chromosomes = new ArrayList<Chromosome>(2);

            while (nextPopulation.size() < populationSize) {
                /* Select a random number and apply the breeding operation */
                Integer randomNum = random.nextInt(100);

                /* Pair of parent chromosomes continue on to the next generation.*/
                if (Breed.CLONING.contains(randomNum)) {
                    chromosomes.addAll(Breed.cloning(selection));
                }
                /* Pair of parent chromosomes are cross-overed to create new pair */
                else if (Breed.CROSSOVER.contains(randomNum)) {
                    chromosomes.addAll(Breed.crossover(selection));
                }

                /* Apply the background mutation operator to the chromosomes */
                for (Chromosome chromosome : chromosomes) {
                    randomNum = random.nextInt(100);

                    if (Breed.MUTATION.contains(randomNum)) {
                        nextPopulation.add(Breed.mutation(chromosome));
                    } else {
                        nextPopulation.add(chromosome);
                    }
                }
                chromosomes.clear();
            }

            /* If there are any solutions (fitness of 1) that are unique save them */
            for (Chromosome chromosome : fitness.keySet()) {
                if (fitness.get(chromosome) == 1.0) {
                    if (uniqueSolution(chromosome)) {
                        /* Save a copy of the chromosome */
                        Chromosome solution = new Chromosome(new ArrayList<Integer>(chromosome.get()),
                                chromosome.size());
                        solutions.add(solution);
                        solutionGeneration.put(solutions.size(), numGenerations);

                        /* Perform three rotations then a reflection followed by three more rotations */
                        for (int i = 0; i < 6; ++i) {
                            rotation = Transformation.rotate(solutions.get(solutions.size() - 1));

                            if (uniqueSolution(rotation)) {
                                solutions.add(rotation);
                                solutionGeneration.put(solutions.size(), numGenerations);
                            } else {
                                if (rotationMiss.containsKey(numGenerations)) {
                                    rotationMiss.put(numGenerations, rotationMiss.get(numGenerations) + 1);
                                } else {
                                    rotationMiss.put(numGenerations, 1);
                                }
                            }

                            if (i == 2) {
                                reflection = Transformation.reflect(solution);

                                if (uniqueSolution(reflection)) {
                                    solutions.add(reflection);
                                    solutionGeneration.put(solutions.size(), numGenerations);
                                } else {
                                    if (reflectionMiss.containsKey(numGenerations)) {
                                        reflectionMiss.put(numGenerations,
                                                reflectionMiss.get(numGenerations) + 1);
                                    } else {
                                        reflectionMiss.put(numGenerations, 1);
                                    }
                                }
                            }
                        }
                    } else {
                        if (duplicateBuffer.containsKey(numGenerations)) {
                            duplicateBuffer.put(numGenerations, duplicateBuffer.get(numGenerations) + 1);
                        } else {
                            duplicateBuffer.put(numGenerations, 1);
                        }
                    }
                }
            }

            /* Save average fitness for the current generation */
            DescriptiveStatistics descStats = new DescriptiveStatistics(Doubles.toArray(fitness.values()));
            fitnessBuffer.add(descStats.getMean());

            /* Save chromosome similarity and mutation rate for current generation */
            similarityBuffer.add(curSimilarity);

            /* Save the variable mutation rate */
            if (mutation == null) {
                mutationBuffer.add((Breed.MUTATION.upperEndpoint() - Breed.MUTATION.lowerEndpoint()) / 100.0);
            }

            /* Calculate statistics for the fitness, similarity, and mutation buffer every 1000, generations */
            if ((numGenerations % 1000) == 0) {
                calcStatistics(1000);
            }

            /* Write the current results to file every 10,000 generations */
            if ((numGenerations % 10000) == 0) {
                writeResults();
            }

            /* Set the current population as the NEXT population */
            fitness.clear();
            population = nextPopulation;

            ++numGenerations;
        }

        /* Calculate statistics and write any remaining results */
        if (fitnessBuffer.size() > 0) {
            calcStatistics(1000);
        }
        writeResults();

        /* Display random solutions for the number of solutions specified */
        for (int j = 0; j < numDisplay; ++j) {
            /* Display a random solution */
            Chromosome solution = solutions.get(random.nextInt(solutions.size()));

            try {
                QueenGame myGame = new QueenGame(new QueenBoard(Ints.toArray(solution.get()), numQueens));
                myGame.playGame(
                        outputDir + "/figures/" + "figure_run_" + String.valueOf(runNumber) + "_" + j + ".png");
            } catch (Exception e) {
                System.out.println("Bad set of Queens");
            }
        }

        /* Reset the current state for the next run and increment run number */
        reset();
        ++runNumber;
    }
}

From source file:ddf.catalog.impl.CatalogFrameworkImpl.java

@Override
public UpdateResponse update(UpdateStorageRequest streamUpdateRequest)
        throws IngestException, SourceUnavailableException {

    validateUpdateStorageRequest(streamUpdateRequest);

    setFlagsOnRequest(streamUpdateRequest);

    if (fanoutEnabled) {
        throw new IngestException(FANOUT_MESSAGE);
    }/*from  w w  w. ja v a2 s .  co m*/

    if (Requests.isLocal(streamUpdateRequest)
            && (!sourceIsAvailable(catalog) || !storageIsAvailable(storage))) {
        SourceUnavailableException sourceUnavailableException = new SourceUnavailableException(
                "Local provider is not available, cannot perform create operation.");
        if (INGEST_LOGGER.isWarnEnabled()) {
            INGEST_LOGGER.warn("Error on create operation, local provider not available.",
                    sourceUnavailableException);
        }
        throw sourceUnavailableException;
    }

    Map<String, Metacard> metacardMap = new HashMap<>();
    List<ContentItem> contentItems = new ArrayList<>(streamUpdateRequest.getContentItems().size());
    HashMap<String, Path> tmpContentPaths = new HashMap<>(streamUpdateRequest.getContentItems().size());
    generateMetacardAndContentItems(streamUpdateRequest, streamUpdateRequest.getContentItems(), metacardMap,
            contentItems, tmpContentPaths);
    streamUpdateRequest.getProperties().put(CONTENT_PATHS, tmpContentPaths);

    UpdateResponse updateResponse;
    UpdateStorageRequest updateStorageRequest = null;
    try {
        if (contentItems.size() > 0) {
            updateStorageRequest = new UpdateStorageRequestImpl(contentItems, streamUpdateRequest.getId(),
                    streamUpdateRequest.getProperties());

            for (final PreUpdateStoragePlugin plugin : frameworkProperties.getPreUpdateStoragePlugins()) {
                try {
                    updateStorageRequest = plugin.process(updateStorageRequest);
                } catch (PluginExecutionException e) {
                    LOGGER.warn("Plugin processing failed. This is allowable. Skipping to next plugin.", e);
                }
            }

            UpdateStorageResponse updateStorageResponse;
            try {
                updateStorageResponse = storage.update(updateStorageRequest);
                updateStorageResponse.getProperties().put(CONTENT_PATHS, tmpContentPaths);
            } catch (StorageException e) {
                throw new IngestException("Could not store content items. Removed created metacards.", e);
            }

            for (final PostUpdateStoragePlugin plugin : frameworkProperties.getPostUpdateStoragePlugins()) {
                try {
                    updateStorageResponse = plugin.process(updateStorageResponse);
                } catch (PluginExecutionException e) {
                    LOGGER.warn("Plugin processing failed. This is allowable. Skipping to next plugin.", e);
                }
            }

            for (ContentItem contentItem : updateStorageResponse.getUpdatedContentItems()) {
                metacardMap.put(contentItem.getId(), contentItem.getMetacard());
            }
        }

        UpdateRequestImpl updateRequest = new UpdateRequestImpl(Iterables.toArray(
                metacardMap.values().stream().map(Metacard::getId).collect(Collectors.toList()), String.class),
                new ArrayList<>(metacardMap.values()));
        updateRequest.setProperties(streamUpdateRequest.getProperties());
        updateResponse = update(updateRequest);
    } catch (Exception e) {
        if (updateStorageRequest != null) {
            try {
                storage.rollback(updateStorageRequest);
            } catch (StorageException e1) {
                LOGGER.error("Unable to remove temporary content for id: " + streamUpdateRequest.getId(), e1);
            }
        }
        throw new IngestException("Unable to store products for request: " + streamUpdateRequest.getId(), e);
    } finally {
        if (updateStorageRequest != null) {
            try {
                storage.commit(updateStorageRequest);
            } catch (StorageException e) {
                LOGGER.error("Unable to commit content changes for id: " + updateStorageRequest.getId(), e);
                try {
                    storage.rollback(updateStorageRequest);
                } catch (StorageException e1) {
                    LOGGER.error("Unable to remove temporary content for id: " + updateStorageRequest.getId(),
                            e1);
                }
            }
        }
        tmpContentPaths.values().stream().forEach(path -> FileUtils.deleteQuietly(path.toFile()));
        tmpContentPaths.clear();
    }

    return updateResponse;
}

From source file:initializers.FSInitializer.java

@Override
public void doAnalysis(Federation federation, List<?> dataProviders, boolean fedFlag, String[] elements2Analyze,
        String elmtVoc) throws InstantiationException, IllegalAccessException, ClassNotFoundException,
        SAXException, ParserConfigurationException {
    // TODO Auto-generated method stub

    // Vector<String> xmlElements = new Vector<>();
    HashMap<String, Double> xmlElements = new HashMap<>();
    Vector<String> xmlElementsDistinct = new Vector<>();

    //HashMap<String,HashMap<HashMap<String, String>, Integer>> attributes = new HashMap<>();
    HashMap<String, Integer> attributes = new HashMap<>();

    HashMap<String, Integer> elementDims = new HashMap<>();
    HashMap<String, Integer> elementCompletness = new HashMap<>();
    Vector<String> elementEntropy = new Vector<>();
    HashMap<String, Double> elementImportance = new HashMap<>();

    Properties props = new Properties();
    try {//  ww  w . ja  v a  2 s.com
        props.load(new FileInputStream("configure.properties"));

    } catch (FileNotFoundException e) {
        // TODO Auto-generated catch block
        e.printStackTrace();
        System.exit(-1);
    } catch (IOException e) {
        // TODO Auto-generated catch block
        e.printStackTrace();
        System.exit(-1);
    }

    String resultsPath = props.getProperty(AnalyticsConstants.resultsPath);
    String[] elementVocs = elmtVoc.split(",");

    ConfigureLogger conf = new ConfigureLogger();
    Logger logger = conf.getLogger("vocAnalysis",
            resultsPath + "Analysis_Results" + File.separator + "vocAnalysis.log");

    Logger loggerAtt = conf.getLogger("attributeAnalysis",
            resultsPath + "Analysis_Results" + File.separator + "attributeAnalysis.log");

    Logger loggerEl = conf.getLogger("elementAnalysis",
            resultsPath + "Analysis_Results" + File.separator + "elementAnalysis.log");

    for (int i = 0; i < dataProviders.size(); i++) {

        String[] extensions = { "xml" };
        //FileUtils utils = new FileUtils();
        Collection<File> xmls = FileUtils.listFiles((File) dataProviders.get(i), extensions, true);

        String filterXMLs = props.getProperty(AnalyticsConstants.filteringEnabled);

        if (filterXMLs.equalsIgnoreCase("true")) {
            Filtering filtering = new Filtering();
            String expression = props.getProperty(AnalyticsConstants.xpathExpression);
            System.out.println("Filtering is enabled.");
            Iterator<File> iterator = xmls.iterator();
            while (iterator.hasNext()) {
                File next = iterator.next();
                if (!filtering.filterXML(next, expression)) {
                    System.out.println("File:" + next.getName() + " is filtered out.");
                    iterator.remove();
                } else
                    System.out.println("File:" + next.getName() + " is kept in xmls' collection.");

            }
        }

        try {

            // Repository repo = new Repository(xmls, elements2Analyze);

            //distinctAtts, 
            Repository repo = new Repository(xmls, attributes, xmlElements, xmlElementsDistinct, elementDims,
                    elementCompletness, elementEntropy, elementImportance, props);

            repo.setRepoName(((File) dataProviders.get(i)).getName());
            repo.setRecordsNum(xmls.size());

            if (fedFlag) {

                federation.addRepoName(((File) dataProviders.get(i)).getName());

                System.out.println("######################################");
                System.out.println("Analysing repository:" + repo.getRepoName());
                System.out.println("Number of records:" + xmls.size());
                repo.parseXMLs(elements2Analyze, elementVocs);

                federation.appendFreqElements(repo.getElementFrequency());

                federation.appendCompletnessElements(repo.getElementCompleteness(), dataProviders.size());
                federation.appendImportanceElements(repo.getElementImportance(), dataProviders.size());

                federation.appendDimensionalityElements(repo.getElementDimensions());

                federation.appendEntropyElements(repo.computeElementEntropy(), dataProviders.size());

                this.logElementAnalysis(loggerEl, repo.getRepoName(), resultsPath);

                repo.computeElementValueFreq(elementVocs, logger);

                // FileUtils.deleteDirectory(new File("buffer"));

                repo.getAttributeFrequency(loggerAtt);

                federation.appendFileSize(repo.getFileSizeDistribution());

                federation.appendNoRecords(repo.getXmls().size());
                repo.storeRepoGeneralInfo(true);
                federation.appendInformativeness(repo.getAvgRepoInformativeness());
                federation.appendSchemas(repo.getSchema(false));
                federation.appendRequirements(repo.getRequirements());

                System.out.println("Repository:" + repo.getRepoName() + " analysis completed.");
                System.out.println("======================================");
            } else {
                System.out.println("######################################");
                System.out.println("Analysing repository:" + repo.getRepoName());
                System.out.println("Number of records:" + repo.getXmls().size());
                repo.parseXMLs(elements2Analyze, elementVocs);
                repo.getElementFrequency();
                repo.getElementCompleteness();
                repo.getElementDimensions();
                repo.getElementImportance();

                repo.computeElementEntropy();

                this.logElementAnalysis(loggerEl, repo.getRepoName(), resultsPath);
                // System.out.println(repo.getVocabularies());

                repo.computeElementValueFreq(elementVocs, logger);

                repo.storeRepoGeneralInfo(false);

                // FileUtils.deleteDirectory(new File("buffer"));

                repo.getAttributeFrequency(loggerAtt);

                System.out.println("======================================");
                System.out.println("Repository:" + repo.getRepoName() + " analysis completed.");
                System.out.println("======================================");

            }
        } catch (IOException ex) {

            ex.printStackTrace();
        }
        xmlElements.clear();
        xmlElementsDistinct.clear();
        attributes.clear();
        //   distinctAtts.clear();
        elementDims.clear();
        elementCompletness.clear();

        elementEntropy.clear();
        elementImportance.clear();

    }

    if (fedFlag) {
        try {
            federation.getElementsSFrequency();
            federation.getElementsMCompletness();
            federation.getElementsMImportance();
            federation.getElementsMaxDimensionality();
            federation.getElementsMEntropy();
            federation.getAttributesSumFreq(loggerAtt);
            federation.getElementValueSumFreq(elmtVoc, logger);
            System.out.println("Average file size:" + federation.getAverageFileSize() + " Bytes");
            System.out.println("Sum number of records:" + federation.getRecordsSum() + " records");
            System.out.println("Sum storage requirements:" + federation.getRequirements() + " bytes");
            System.out.println("AVG informativeness(bits):" + federation.getAVGInformativeness());

            federation.storeGeneralInfo2CSV();
            this.logElementAnalysis(loggerEl, "Federation", resultsPath);

        } catch (IOException ex) {
            ex.printStackTrace();
        }

    }

}

From source file:edu.ku.brc.specify.dbsupport.SpecifySchemaUpdateService.java

/**
 * Fixes the Schema for Database Version 1.2
 * @param conn//ww w  .  j av a2 s  . c o m
 * @throws Exception
 */
private boolean doFixesForDBSchemaVersions(final Connection conn, final String databaseName) throws Exception {
    /////////////////////////////
    // PaleoContext
    /////////////////////////////
    getTableNameAndTitleForFrame(PaleoContext.getClassTableId());
    Integer len = getFieldLength(conn, databaseName, "paleocontext", "Text1");
    alterFieldLength(conn, databaseName, "paleocontext", "Text1", 32, 64);
    alterFieldLength(conn, databaseName, "paleocontext", "Text2", 32, 64);

    len = getFieldLength(conn, databaseName, "paleocontext", "Remarks");
    if (len == null) {
        int count = BasicSQLUtils.getCountAsInt("SELECT COUNT(*) FROM paleocontext");
        int rv = update(conn, "ALTER TABLE paleocontext ADD Remarks VARCHAR(60)");
        if (rv != count) {
            errMsgList.add("Error updating PaleoContext.Remarks");
            return false;
        }
    }
    frame.incOverall();

    DBConnection dbc = DBConnection.getInstance();

    /////////////////////////////
    // FieldNotebookPage
    /////////////////////////////
    getTableNameAndTitleForFrame(FieldNotebookPage.getClassTableId());
    len = getFieldLength(conn, databaseName, "fieldnotebookpage", "PageNumber");
    if (len != null && len == 16) {
        alterFieldLength(conn, databaseName, "fieldnotebookpage", "PageNumber", 16, 32);
        update(conn, "ALTER TABLE fieldnotebookpage ALTER COLUMN ScanDate DROP DEFAULT");
    }
    frame.incOverall();

    /////////////////////////////
    // Project Table
    /////////////////////////////
    alterFieldLength(conn, databaseName, "project", "projectname", 50, 128);
    frame.incOverall();

    /////////////////////////////
    // AttachmentImageAttribute Table
    /////////////////////////////
    if (doesTableExist(databaseName, "attachmentimageattribute")) {
        alterFieldLength(conn, databaseName, "attachmentimageattribute", "CreativeCommons", 128, 500);
        frame.incOverall();
    }

    /////////////////////////////
    // LocalityDetail
    /////////////////////////////

    String tblName = getTableNameAndTitleForFrame(LocalityDetail.getClassTableId());

    boolean statusOK = true;
    String sql = String.format(
            "SELECT COUNT(*) FROM `INFORMATION_SCHEMA`.`COLUMNS` WHERE TABLE_SCHEMA = '%s' AND TABLE_NAME = 'localitydetail' AND COLUMN_NAME = 'UtmScale' AND DATA_TYPE = 'varchar'",
            dbc.getDatabaseName());
    int count = BasicSQLUtils.getCountAsInt(sql);
    if (count > 0) {
        Vector<Object[]> values = query("SELECT ld.LocalityDetailID, ld.UtmScale, l.LocalityName "
                + "FROM localitydetail ld INNER JOIN locality l ON ld.LocalityID = l.LocalityID WHERE ld.UtmScale IS NOT NULL");

        update(conn, "ALTER TABLE localitydetail DROP COLUMN UtmScale");
        addColumn(conn, databaseName, tblName, "UtmScale", "FLOAT", "UtmOrigLongitude");
        addColumn(conn, databaseName, tblName, "MgrsZone", "VARCHAR(4)", "UtmScale");

        HashMap<String, String> badLocalitiesHash = new HashMap<String, String>();

        try {
            PreparedStatement pStmt = conn
                    .prepareStatement("UPDATE localitydetail SET UtmScale=? WHERE LocalityDetailID=?");

            for (Object[] row : values) {
                Integer locDetailId = (Integer) row[0];
                String scale = (String) row[1];
                String locName = (String) row[2];

                scale = StringUtils.contains(scale, ',') ? StringUtils.replace(scale, ",", "") : scale;
                if (!StringUtils.isNumeric(scale)) {
                    badLocalitiesHash.put(locName, scale);
                    continue;
                }

                float scaleFloat = 0.0f;
                try {
                    scaleFloat = Float.parseFloat(scale);

                } catch (NumberFormatException ex) {
                    badLocalitiesHash.put(locName, scale);
                    continue;
                }

                pStmt.setFloat(1, scaleFloat);
                pStmt.setInt(2, locDetailId);
                pStmt.execute();
            }
            pStmt.close();

        } catch (SQLException ex) {
            statusOK = false;
        }

        if (badLocalitiesHash.size() > 0) {
            try {
                File file = new File(
                        UIRegistry.getUserHomeDir() + File.separator + "localitydetailerrors.html");
                TableWriter tblWriter = new TableWriter(file.getAbsolutePath(), "Locality Detail Errors");
                tblWriter.startTable();
                tblWriter.logHdr(new String[] { "Locality Name", "Scale" });

                for (String key : badLocalitiesHash.keySet()) {
                    tblWriter.log(key, badLocalitiesHash.get(key));
                }
                tblWriter.endTable();
                tblWriter.flush();
                tblWriter.close();

                UIRegistry.showLocalizedError("LOC_DETAIL_ERRORS", badLocalitiesHash.size(),
                        file.getAbsoluteFile());

                badLocalitiesHash.clear();

                if (file.exists()) {
                    try {
                        AttachmentUtils.openURI(file.toURI());

                    } catch (Exception ex) {
                        ex.printStackTrace();
                    }
                }
            } catch (IOException ex) {
                ex.printStackTrace();
            }
        }
    } else {
        addColumn(conn, databaseName, tblName, "UtmScale", "FLOAT", "UtmOrigLongitude");
    }
    frame.incOverall();

    //////////////////////////////////////////////
    // collectingeventattribute Schema 1.3
    //////////////////////////////////////////////
    DBMSUserMgr dbmsMgr = DBMSUserMgr.getInstance();
    if (dbmsMgr.connectToDBMS(itUserNamePassword.first, itUserNamePassword.second, dbc.getServerName())) {
        boolean status = true;

        Connection connection = dbmsMgr.getConnection();
        try {
            // Add New Fields to Determination
            tblName = getTableNameAndTitleForFrame(Determination.getClassTableId());
            addColumn(conn, databaseName, tblName, "VarQualifier",
                    "ALTER TABLE %s ADD COLUMN %s VARCHAR(16) AFTER Qualifier");
            addColumn(conn, databaseName, tblName, "SubSpQualifier",
                    "ALTER TABLE %s ADD COLUMN %s VARCHAR(16) AFTER VarQualifier");
            frame.incOverall();

            // CollectingEventAttributes
            sql = String.format(
                    "SELECT COUNT(*) FROM `INFORMATION_SCHEMA`.`COLUMNS` WHERE TABLE_SCHEMA = '%s' AND TABLE_NAME = 'collectingeventattribute' AND COLUMN_NAME = 'CollectionMemberID'",
                    dbc.getDatabaseName());
            count = BasicSQLUtils.getCountAsInt(sql);

            connection.setCatalog(dbc.getDatabaseName());

            //int numCEAttrs = BasicSQLUtils.getCountAsInt("SELECT COUNT(*) FROM collectingeventattribute");
            if (count > 0) {
                HashMap<Integer, Integer> collIdToDispIdHash = new HashMap<Integer, Integer>();
                sql = "SELECT UserGroupScopeId, DisciplineID FROM collection";
                for (Object[] cols : query(sql)) {
                    Integer colId = (Integer) cols[0];
                    Integer dspId = (Integer) cols[1];
                    collIdToDispIdHash.put(colId, dspId);
                }

                count = BasicSQLUtils.getCountAsInt("SELECT COUNT(*) FROM collectingeventattribute");

                IdMapperMgr.getInstance().setDBs(connection, connection);
                IdTableMapper mapper = new IdTableMapper("ceattrmapper", "id",
                        "SELECT CollectingEventAttributeID, CollectionMemberID FROM collectingeventattribute",
                        true, false);
                mapper.setFrame(frame);
                mapper.mapAllIdsNoIncrement(count > 0 ? count : null);

                Statement stmt = null;
                try {
                    getTableNameAndTitleForFrame(CollectingEventAttribute.getClassTableId());

                    stmt = connection.createStatement(ResultSet.TYPE_SCROLL_INSENSITIVE,
                            ResultSet.CONCUR_READ_ONLY);
                    update(conn, "DROP INDEX COLEVATSColMemIDX on collectingeventattribute");
                    update(conn, "ALTER TABLE collectingeventattribute DROP COLUMN CollectionMemberID");
                    update(conn, "ALTER TABLE collectingeventattribute ADD COLUMN DisciplineID int(11)");
                    update(conn, "CREATE INDEX COLEVATSDispIDX ON collectingeventattribute(DisciplineID)");

                    double inc = count > 0 ? (100.0 / (double) count) : 0;
                    double cnt = 0;
                    int percent = 0;
                    frame.setProcess(0, 100);
                    frame.setProcessPercent(true);

                    PreparedStatement pStmt = conn.prepareStatement(
                            "UPDATE collectingeventattribute SET DisciplineID=? WHERE CollectingEventAttributeID=?");
                    ResultSet rs = stmt
                            .executeQuery("SELECT CollectingEventAttributeID FROM collectingeventattribute");
                    while (rs.next()) {
                        Integer ceAttrId = rs.getInt(1);
                        Integer oldColId = mapper.get(ceAttrId);
                        if (oldColId != null) {
                            Integer dispId = collIdToDispIdHash.get(oldColId);
                            if (dispId != null) {
                                pStmt.setInt(1, dispId);
                                pStmt.setInt(2, ceAttrId);
                                pStmt.execute();

                            } else {
                                log.debug("Error getting hashed DisciplineID from Old Collection ID[" + oldColId
                                        + "]  ceAttrId[" + ceAttrId + "]");
                            }
                        } else {
                            log.debug("Error getting mapped  Collection ID[" + oldColId + "]  ceAttrId["
                                    + ceAttrId + "]");
                        }

                        cnt += inc;
                        if (((int) cnt) > percent) {
                            percent = (int) cnt;
                            frame.setProcess(percent);
                        }
                    }
                    rs.close();
                    pStmt.close();

                    frame.setProcess(100);

                } catch (SQLException ex) {
                    ex.printStackTrace();

                } finally {
                    if (stmt != null)
                        stmt.close();
                }
                mapper.cleanup();
            }
            frame.incOverall();

            //-----------------------------
            // Collectors
            //-----------------------------
            sql = String.format(
                    "SELECT COUNT(*) FROM `INFORMATION_SCHEMA`.`COLUMNS` WHERE TABLE_SCHEMA = '%s' AND TABLE_NAME = 'collector' AND COLUMN_NAME = 'CollectionMemberID'",
                    dbc.getDatabaseName());
            count = BasicSQLUtils.getCountAsInt(sql);
            if (count > 0) {
                HashMap<Integer, Integer> collIdToDivIdHash = new HashMap<Integer, Integer>();
                sql = "SELECT c.UserGroupScopeId, d.DivisionID FROM collection c INNER JOIN discipline d ON c.DisciplineID = d.UserGroupScopeId";
                for (Object[] cols : query(sql)) {
                    Integer colId = (Integer) cols[0];
                    Integer divId = (Integer) cols[1];
                    collIdToDivIdHash.put(colId, divId);
                }

                count = BasicSQLUtils.getCountAsInt("SELECT COUNT(*) FROM collector");

                IdMapperMgr.getInstance().setDBs(connection, connection);
                IdTableMapper mapper = new IdTableMapper("collectormap", "id",
                        "SELECT CollectorID, CollectionMemberID FROM collector", true, false);
                mapper.setFrame(frame);
                mapper.mapAllIdsNoIncrement(count > 0 ? count : null);

                getTableNameAndTitleForFrame(Collector.getClassTableId());
                Statement stmt = null;
                try {
                    stmt = connection.createStatement(ResultSet.TYPE_SCROLL_INSENSITIVE,
                            ResultSet.CONCUR_READ_ONLY);
                    update(conn, "DROP INDEX COLTRColMemIDX on collector");
                    update(conn, "ALTER TABLE collector DROP COLUMN CollectionMemberID");
                    update(conn, "ALTER TABLE collector ADD COLUMN DivisionID INT(11)");
                    update(conn, "CREATE INDEX COLTRDivIDX ON collector(DivisionID)");

                    double inc = count > 0 ? (100.0 / (double) count) : 0;
                    double cnt = 0;
                    int percent = 0;
                    frame.setProcess(0, 100);
                    frame.setProcessPercent(true);

                    PreparedStatement pStmt = conn
                            .prepareStatement("UPDATE collector SET DivisionID=? WHERE CollectorID=?");
                    ResultSet rs = stmt.executeQuery("SELECT CollectorID FROM collector");
                    while (rs.next()) {
                        Integer coltrId = rs.getInt(1);
                        Integer oldColId = mapper.get(coltrId);
                        if (oldColId != null) {
                            Integer divId = collIdToDivIdHash.get(oldColId);
                            if (divId != null) {
                                pStmt.setInt(1, divId);
                                pStmt.setInt(2, coltrId);
                                pStmt.execute();

                            } else {
                                log.debug("Error getting hashed DisciplineID from Old Collection ID[" + oldColId
                                        + "]");
                            }
                        } else {
                            log.debug("Error getting mapped Collector ID[" + oldColId + "]");
                        }

                        cnt += inc;
                        if (((int) cnt) > percent) {
                            percent = (int) cnt;
                            frame.setProcess(percent);
                        }
                    }
                    rs.close();
                    pStmt.close();

                    frame.setProcess(100);

                } catch (SQLException ex) {
                    ex.printStackTrace();

                } finally {
                    if (stmt != null)
                        stmt.close();
                }
                mapper.cleanup();

                frame.incOverall();
            }

        } catch (Exception ex) {
            ex.printStackTrace();

        } finally {
            frame.getProcessProgress().setIndeterminate(true);
            frame.setDesc("Loading updated schema...");

            if (!status) {
                //UIRegistry.showLocalizedError("SCHEMA_UPDATE_ERROR", errMsgStr);
                JTextArea ta = UIHelper.createTextArea();
                ta.setText(errMsgStr);
                CellConstraints cc = new CellConstraints();
                PanelBuilder pb = new PanelBuilder(new FormLayout("f:p:g", "f:p:g"));
                pb.add(new JScrollPane(ta, JScrollPane.VERTICAL_SCROLLBAR_AS_NEEDED,
                        JScrollPane.HORIZONTAL_SCROLLBAR_AS_NEEDED), cc.xy(1, 1));
                pb.setDefaultDialogBorder();

                CustomDialog dlg = new CustomDialog((Frame) UIRegistry.getTopWindow(),
                        getResourceString("SCHEMA_UPDATE_ERROR"), true, pb.getPanel());
                UIHelper.centerAndShow(dlg);
            }

            dbmsMgr.close();
        }
    }

    return statusOK;
}

From source file:sbu.srl.rolextract.ArgumentClassifier.java

public void distributeCrossValidationByProcess(String outputDir, int nbFold)
        throws FileNotFoundException, IOException, InterruptedException {
    // /*from  w ww .  ja v a 2 s.  c om*/
    Map<String, List<Sentence>> processSentPair = sentences.stream()
            .collect(Collectors.groupingBy(s -> s.getProcessName()));
    int partitionSize = sentences.size() / nbFold;
    int blockSize = 0;
    int currentFoldCnt = 1;
    Thread.sleep(10000);
    System.out.println("Total sentences : " + sentences.size());
    ArrayList<Sentence> trainingData = new ArrayList<Sentence>();
    ArrayList<Sentence> testingData = new ArrayList<Sentence>();
    HashMap<String, String> testProcessName = new HashMap<String, String>();
    HashMap<String, String> trainingProcessName = new HashMap<String, String>();
    for (String testingProcess : processSentPair.keySet()) {
        System.out.println(
                "Process " + testingProcess + " Nb Sentence :" + processSentPair.get(testingProcess).size());
        // if foldNumber is equal to totalFold then
        // keep adding to testData
        if (currentFoldCnt == nbFold) {
            System.out.println("Processing last fold");
            testingData.addAll(processSentPair.get(testingProcess));
            testProcessName.put(testingProcess, testingProcess);
        } // if the block counter still less than partition size AND foldNumber is less than totalFold
          // keep adding to testingData
        else if (blockSize < partitionSize && currentFoldCnt < nbFold) {
            System.out.println("Has not reached the boundary, keep adding testing data");
            blockSize += processSentPair.get(testingProcess).size();
            testingData.addAll(processSentPair.get(testingProcess));
            testProcessName.put(testingProcess, testingProcess);
            System.out.println("BLOCK SIZE : " + blockSize);
        } else {
            System.out.println("Boundary reached, get the training data and flush everything");
            for (String trainingProcess : processSentPair.keySet()) {
                if (testProcessName.get(trainingProcess) == null) {
                    trainingData.addAll(processSentPair.get(trainingProcess));
                    trainingProcessName.put(trainingProcess, trainingProcess);
                }
            }
            System.out.println("Flushing fold " + currentFoldCnt);
            // serialize training & testing processes
            String trainingProcessesStr = Joiner.on("\t").join(trainingProcessName.keySet().iterator());
            String testingProcessessStr = Joiner.on("\t").join(testProcessName.keySet().iterator());
            FileUtil.dumpToFile(trainingProcessesStr,
                    outputDir.concat("/fold-" + currentFoldCnt).concat("/train/train_process_name"));
            FileUtil.dumpToFile(testingProcessessStr,
                    outputDir.concat("/fold-" + currentFoldCnt).concat("/test/test_process_name"));
            System.out.println("Nb Sentence in train" + trainingData.size());
            System.out.println("Nb Sentence in test" + testingData.size());
            FileUtil.serializeToFile(trainingData,
                    outputDir.concat("/fold-" + currentFoldCnt).concat("/train/train.ser"));

            // ==============================================   SEMAFOR ==============================================================================================================================================
            // ============================================================================================================================================================================================
            SpockDataReader.generateSEMAFORFrameAnnotation(trainingData,
                    outputDir.concat("/fold-" + currentFoldCnt)
                            .concat("/train/cv." + currentFoldCnt + ".train.sentences.frame.elements.sbu"),
                    outputDir.concat("/fold-" + currentFoldCnt)
                            .concat("/train/cv." + currentFoldCnt + ".train.sentence.sbu"),
                    semOffset); // DUMP REQUIRED DATA FOR SEMAFOR
            SpockDataReader.dumpRawSentences(testingData, outputDir.concat("/fold-" + currentFoldCnt)
                    .concat("/test/cv." + currentFoldCnt + ".test.sentence.sbu"));
            SpockDataReader.dumpSentenceLexTargetIdxs(testingData, outputDir.concat("/fold-" + currentFoldCnt)
                    .concat("/test/cv." + currentFoldCnt + ".test.process.target"));
            // EXECUTE ./runMalt.sh here
            try {
                ProcessBuilder pb = new ProcessBuilder(MALT_PARSER_PATH,
                        outputDir.concat("/fold-" + currentFoldCnt)
                                .concat("/train/cv." + currentFoldCnt + ".train.sentence.sbu"),
                        outputDir.concat("/fold-" + currentFoldCnt).concat("/train"));
                //pb.environment().put("param1", )
                Process p = pb.start(); // Start the process.
                p.waitFor(); // Wait for the process to finish.
                StdUtil.printOutput(p);
                System.out.println("Script executed successfully");
                AllAnnotationsMergingWithoutNE.mergeAllAnnotations(
                        outputDir.concat("/fold-" + currentFoldCnt).concat("/train/tokenized"),
                        outputDir.concat("/fold-" + currentFoldCnt).concat("/train/conll"),
                        outputDir.concat("/fold-" + currentFoldCnt).concat("/train/tmp"),
                        outputDir.concat("/fold-" + currentFoldCnt)
                                .concat("/train/cv." + currentFoldCnt + ".train.sentences.all.lemma.tags.sbu"));
            } catch (Exception e) {
                e.printStackTrace();
            }
            // ============================================================================================================================================================================================
            // ==============================================   END OF SEMAFOR ==========================================================================================

            FileUtil.serializeToFile(testingData,
                    outputDir.concat("/fold-" + currentFoldCnt).concat("/test/test.arggold.ser"));
            trainingData.clear();
            testingData.clear();
            blockSize = 0;
            currentFoldCnt++;
            testProcessName.clear();
            trainingProcessName.clear();
        }
    }

    // handle for the last fold""
    for (String trainingProcess : processSentPair.keySet()) {
        if (testProcessName.get(trainingProcess) == null) {
            trainingData.addAll(processSentPair.get(trainingProcess));
            trainingProcessName.put(trainingProcess, trainingProcess);
        }
    }
    // serialize training & testing processes
    System.out.println("Flushing fold " + currentFoldCnt);
    String trainingProcessesStr = Joiner.on("\t").join(trainingProcessName.keySet().iterator());
    String testingProcessessStr = Joiner.on("\t").join(testProcessName.keySet().iterator());
    FileUtil.dumpToFile(trainingProcessesStr,
            outputDir.concat("/fold-" + currentFoldCnt).concat("/train/train_process_name"));
    FileUtil.dumpToFile(testingProcessessStr,
            outputDir.concat("/fold-" + currentFoldCnt).concat("/test/test_process_name"));
    System.out.println("Nb Sentence in train" + trainingData.size());
    System.out.println("Nb Sentence in test" + testingData.size());
    FileUtil.serializeToFile(trainingData,
            outputDir.concat("/fold-" + currentFoldCnt).concat("/train/train.ser"));

    // ==============================================   SEMAFOR ==============================================================================================================================================
    // ============================================================================================================================================================================================
    SpockDataReader.generateSEMAFORFrameAnnotation(trainingData,
            outputDir.concat("/fold-" + currentFoldCnt)
                    .concat("/train/cv." + currentFoldCnt + ".train.sentences.frame.elements.sbu"),
            outputDir.concat("/fold-" + currentFoldCnt)
                    .concat("/train/cv." + currentFoldCnt + ".train.sentence.sbu"),
            semOffset); // DUMP REQUIRED DATA FOR SEMAFOR
    SpockDataReader.dumpRawSentences(testingData, outputDir.concat("/fold-" + currentFoldCnt)
            .concat("/test/cv." + currentFoldCnt + ".test.sentence.sbu"));
    SpockDataReader.dumpSentenceLexTargetIdxs(testingData, outputDir.concat("/fold-" + currentFoldCnt)
            .concat("/test/cv." + currentFoldCnt + ".test.process.target"));
    // EXECUTE ./runMalt.sh here
    try {
        ProcessBuilder pb = new ProcessBuilder(MALT_PARSER_PATH,
                outputDir.concat("/fold-" + currentFoldCnt)
                        .concat("/train/cv." + currentFoldCnt + ".train.sentence.sbu"),
                outputDir.concat("/fold-" + currentFoldCnt).concat("/train"));
        //pb.environment().put("param1", )
        Process p = pb.start(); // Start the process.
        p.waitFor(); // Wait for the process to finish.
        StdUtil.printOutput(p);
        System.out.println("Script executed successfully");
        AllAnnotationsMergingWithoutNE.mergeAllAnnotations(
                outputDir.concat("/fold-" + currentFoldCnt).concat("/train/tokenized"),
                outputDir.concat("/fold-" + currentFoldCnt).concat("/train/conll"),
                outputDir.concat("/fold-" + currentFoldCnt).concat("/train/tmp"),
                outputDir.concat("/fold-" + currentFoldCnt)
                        .concat("/train/cv." + currentFoldCnt + ".train.sentences.all.lemma.tags.sbu"));
    } catch (Exception e) {
        e.printStackTrace();
    }
    // ============================================================================================================================================================================================
    // ==============================================   END OF SEMAFOR ==========================================================================================

    FileUtil.serializeToFile(testingData,
            outputDir.concat("/fold-" + currentFoldCnt).concat("/test/test.arggold.ser"));
}

From source file:sbu.srl.rolextract.ArgumentClassifier.java

public void generateDevSet(String outputDir, int nbFold, List<String> processes)
        throws FileNotFoundException, IOException {

    sentences = (ArrayList<Sentence>) sentences.stream().filter(s -> processes.contains(s.getProcessName()))
            .collect(Collectors.toList());
    Map<String, List<Sentence>> processSentPair = sentences.stream()
            .collect(Collectors.groupingBy(s -> s.getProcessName()));
    int partitionSize = sentences.size() / nbFold;
    int blockSize = 0;
    int currentFoldCnt = 1;

    ArrayList<Sentence> trainingData = new ArrayList<Sentence>();
    ArrayList<Sentence> testingData = new ArrayList<Sentence>();
    HashMap<String, String> testProcessName = new HashMap<String, String>();
    HashMap<String, String> trainingProcessName = new HashMap<String, String>();
    for (String testingProcess : processSentPair.keySet()) {
        System.out.println(/* w  w w  . ja  v a 2s.  c o m*/
                "Process " + testingProcess + " Nb Sentence :" + processSentPair.get(testingProcess).size());
        // if foldNumber is equal to totalFold then
        // keep adding to testData
        if (currentFoldCnt == nbFold) {
            System.out.println("Processing last fold");
            testingData.addAll(processSentPair.get(testingProcess));
            testProcessName.put(testingProcess, testingProcess);
        } // if the block counter still less than partition size AND foldNumber is less than totalFold
          // keep adding to testingData
        else if (blockSize < partitionSize && currentFoldCnt < nbFold) {
            System.out.println("Has not reached the boundary, keep adding testing data");
            blockSize += processSentPair.get(testingProcess).size();
            testingData.addAll(processSentPair.get(testingProcess));
            testProcessName.put(testingProcess, testingProcess);
            System.out.println("BLOCK SIZE : " + blockSize);
        } else {
            System.out.println("Boundary reached, get the training data and flush everything");
            for (String trainingProcess : processSentPair.keySet()) {
                if (testProcessName.get(trainingProcess) == null) {
                    trainingData.addAll(processSentPair.get(trainingProcess));
                    trainingProcessName.put(trainingProcess, trainingProcess);
                }
            }
            System.out.println("Flushing fold " + currentFoldCnt);
            // serialize training & testing processes
            String trainingProcessesStr = Joiner.on("\t").join(trainingProcessName.keySet().iterator());
            String testingProcessessStr = Joiner.on("\t").join(testProcessName.keySet().iterator());
            FileUtil.dumpToFile(trainingProcessesStr,
                    outputDir.concat("/fold-" + currentFoldCnt).concat("/train/train_process_name"));
            FileUtil.dumpToFile(testingProcessessStr,
                    outputDir.concat("/fold-" + currentFoldCnt).concat("/test/test_process_name"));
            System.out.println("Nb Sentence in train" + trainingData.size());
            System.out.println("Nb Sentence in test" + testingData.size());
            FileUtil.serializeToFile(trainingData,
                    outputDir.concat("/fold-" + currentFoldCnt).concat("/train/train.ser"));

            // ==============================================   SEMAFOR ==============================================================================================================================================
            // ============================================================================================================================================================================================
            SpockDataReader.generateSEMAFORFrameAnnotation(trainingData,
                    outputDir.concat("/fold-" + currentFoldCnt)
                            .concat("/train/cv." + currentFoldCnt + ".train.sentences.frame.elements.sbu"),
                    outputDir.concat("/fold-" + currentFoldCnt)
                            .concat("/train/cv." + currentFoldCnt + ".train.sentence.sbu"),
                    semOffset); // DUMP REQUIRED DATA FOR SEMAFOR
            SpockDataReader.dumpRawSentences(testingData, outputDir.concat("/fold-" + currentFoldCnt)
                    .concat("/test/cv." + currentFoldCnt + ".test.sentence.sbu"));
            SpockDataReader.dumpSentenceLexTargetIdxs(testingData, outputDir.concat("/fold-" + currentFoldCnt)
                    .concat("/test/cv." + currentFoldCnt + ".test.process.target"));
            // EXECUTE ./runMalt.sh here
            try {
                ProcessBuilder pb = new ProcessBuilder(MALT_PARSER_PATH,
                        outputDir.concat("/fold-" + currentFoldCnt)
                                .concat("/train/cv." + currentFoldCnt + ".train.sentence.sbu"),
                        outputDir.concat("/fold-" + currentFoldCnt).concat("/train"));
                //pb.environment().put("param1", )
                Process p = pb.start(); // Start the process.
                p.waitFor(); // Wait for the process to finish.
                StdUtil.printOutput(p);
                System.out.println("Script executed successfully");
                AllAnnotationsMergingWithoutNE.mergeAllAnnotations(
                        outputDir.concat("/fold-" + currentFoldCnt).concat("/train/tokenized"),
                        outputDir.concat("/fold-" + currentFoldCnt).concat("/train/conll"),
                        outputDir.concat("/fold-" + currentFoldCnt).concat("/train/tmp"),
                        outputDir.concat("/fold-" + currentFoldCnt)
                                .concat("/train/cv." + currentFoldCnt + ".train.sentences.all.lemma.tags.sbu"));
            } catch (Exception e) {
                e.printStackTrace();
            }
            // ============================================================================================================================================================================================
            // ==============================================   END OF SEMAFOR ==========================================================================================

            FileUtil.serializeToFile(testingData,
                    outputDir.concat("/fold-" + currentFoldCnt).concat("/test/test.arggold.ser"));
            trainingData.clear();
            testingData.clear();

            blockSize = 0;
            currentFoldCnt++;
            testProcessName.clear();
            trainingProcessName.clear();

        }
    }

    // handle for the last fold""
    for (String trainingProcess : processSentPair.keySet()) {
        if (testProcessName.get(trainingProcess) == null) {
            trainingData.addAll(processSentPair.get(trainingProcess));
            trainingProcessName.put(trainingProcess, trainingProcess);
        }
    }
    // serialize training & testing processes
    System.out.println("Flushing fold " + currentFoldCnt);
    String trainingProcessesStr = Joiner.on("\t").join(trainingProcessName.keySet().iterator());
    String testingProcessessStr = Joiner.on("\t").join(testProcessName.keySet().iterator());
    FileUtil.dumpToFile(trainingProcessesStr,
            outputDir.concat("/fold-" + currentFoldCnt).concat("/train/train_process_name"));
    FileUtil.dumpToFile(testingProcessessStr,
            outputDir.concat("/fold-" + currentFoldCnt).concat("/test/test_process_name"));
    System.out.println("Nb Sentence in train" + trainingData.size());
    System.out.println("Nb Sentence in test" + testingData.size());
    FileUtil.serializeToFile(trainingData,
            outputDir.concat("/fold-" + currentFoldCnt).concat("/train/train.ser"));

    // ==============================================   SEMAFOR ==============================================================================================================================================
    // ============================================================================================================================================================================================
    SpockDataReader.generateSEMAFORFrameAnnotation(trainingData,
            outputDir.concat("/fold-" + currentFoldCnt)
                    .concat("/train/cv." + currentFoldCnt + ".train.sentences.frame.elements.sbu"),
            outputDir.concat("/fold-" + currentFoldCnt)
                    .concat("/train/cv." + currentFoldCnt + ".train.sentence.sbu"),
            semOffset); // DUMP REQUIRED DATA FOR SEMAFOR
    SpockDataReader.dumpRawSentences(testingData, outputDir.concat("/fold-" + currentFoldCnt)
            .concat("/test/cv." + currentFoldCnt + ".test.sentence.sbu"));
    SpockDataReader.dumpSentenceLexTargetIdxs(testingData, outputDir.concat("/fold-" + currentFoldCnt)
            .concat("/test/cv." + currentFoldCnt + ".test.process.target"));
    // EXECUTE ./runMalt.sh here
    try {
        ProcessBuilder pb = new ProcessBuilder(MALT_PARSER_PATH,
                outputDir.concat("/fold-" + currentFoldCnt)
                        .concat("/train/cv." + currentFoldCnt + ".train.sentence.sbu"),
                outputDir.concat("/fold-" + currentFoldCnt).concat("/train"));
        //pb.environment().put("param1", )
        Process p = pb.start(); // Start the process.
        p.waitFor(); // Wait for the process to finish.
        StdUtil.printOutput(p);
        System.out.println("Script executed successfully");
        AllAnnotationsMergingWithoutNE.mergeAllAnnotations(
                outputDir.concat("/fold-" + currentFoldCnt).concat("/train/tokenized"),
                outputDir.concat("/fold-" + currentFoldCnt).concat("/train/conll"),
                outputDir.concat("/fold-" + currentFoldCnt).concat("/train/tmp"),
                outputDir.concat("/fold-" + currentFoldCnt)
                        .concat("/train/cv." + currentFoldCnt + ".train.sentences.all.lemma.tags.sbu"));
    } catch (Exception e) {
        e.printStackTrace();
    }
    // ============================================================================================================================================================================================
    // ==============================================   END OF SEMAFOR ==========================================================================================

    FileUtil.serializeToFile(testingData,
            outputDir.concat("/fold-" + currentFoldCnt).concat("/test/test.arggold.ser"));
}

From source file:com.krawler.spring.hrms.rec.job.hrmsRecJobController.java

public ModelAndView jobsearch(HttpServletRequest request, HttpServletResponse response) {
    KwlReturnObject result = null;//from   w ww.j a v a2 s  .co  m
    JSONObject jobj = new JSONObject();
    JSONObject jobj1 = new JSONObject();
    String jobtype = "Internal";
    int count = 0;
    String status = "";
    String userid = request.getParameter("userid");
    String ss = request.getParameter("ss");
    int start = 0;
    int limit = 15;
    HashMap<String, Object> requestParams = new HashMap<String, Object>();
    ArrayList filter_names = new ArrayList(), filter_values = new ArrayList();
    if (request.getParameter("start") != null) {
        start = Integer.parseInt(request.getParameter("start"));
        limit = Integer.parseInt(request.getParameter("limit"));
    }

    try {
        List lst = null;
        if (StringUtil.isNullOrEmpty(request.getParameter("position"))) {
            filter_names.add("!jobtype");
            filter_names.add("company.companyID");
            filter_names.add("delflag");
            filter_names.add("<=startdate");
            filter_names.add(">=enddate");

            filter_values.add(jobtype);
            filter_values.add(sessionHandlerImplObj.getCompanyid(request));
            filter_values.add(0);
            filter_values.add(new Date());
            filter_values.add(new Date());

        } else {
            filter_names.add("position.id");
            filter_names.add("!jobtype");
            filter_names.add("company.companyID");
            filter_names.add("delflag");
            filter_names.add("<=startdate");
            filter_names.add(">=enddate");

            filter_values.add(request.getParameter("position"));
            filter_values.add(jobtype);
            filter_values.add(sessionHandlerImplObj.getCompanyid(request));
            filter_values.add(0);
            filter_values.add(new Date());
            filter_values.add(new Date());
        }

        requestParams.put("filter_names", filter_names);
        requestParams.put("filter_values", filter_values);
        requestParams.put("searchcol", new String[] { "jobid" });
        requestParams.put("ss", ss);
        requestParams.put("allflag", false);
        requestParams.put("start", start);
        requestParams.put("limit", limit);
        result = hrmsRecJobDAOObj.getPositionmain(requestParams);
        lst = result.getEntityList();
        count = result.getRecordTotalCount();
        for (int ctr = 0; ctr < count; ctr++) {
            Positionmain extmt = (Positionmain) lst.get(ctr);
            JSONObject tmpObj = new JSONObject();
            tmpObj.put("jid", extmt.getPositionid());
            //                status = getappPositionstatus(userid,extmt.getPositionid(), session, request);
            filter_names.clear();
            filter_values.clear();
            filter_names.add("configjobapplicant.id");
            filter_names.add("position.positionid");
            filter_names.add("delflag");
            filter_values.add(userid);
            filter_values.add(extmt.getPositionid());
            filter_values.add(0);
            requestParams.clear();
            requestParams.put("filter_names", filter_names);
            requestParams.put("filter_values", filter_values);
            result = hrmsRecJobDAOObj.getPositionstatus(requestParams);
            Allapplications app = null;
            if (StringUtil.checkResultobjList(result)) {
                app = (Allapplications) result.getEntityList().get(0);
                status = app.getStatus();
            } else {
                status = "none";
            }

            if (status.equalsIgnoreCase("none")) {
                tmpObj.put("status", 0);
                tmpObj.put("selectionstatus", messageSource.getMessage("hrms.recruitment.not.applied", null,
                        RequestContextUtils.getLocale(request)));
            } else {
                tmpObj.put("status", 1);
                tmpObj.put("applicationid", app.getId());
                tmpObj.put("selectionstatus", status);
            }
            tmpObj.put("jobname", extmt.getPosition().getValue());
            tmpObj.put("jobpositionid", extmt.getJobid());
            tmpObj.put("jdescription", extmt.getDetails());
            requestParams.clear();
            requestParams.put("request", request);
            DateFormat df = kwlCommonTablesDAOObj.getUserDateFormatter(
                    sessionHandlerImplObj.getDateFormatID(request),
                    sessionHandlerImplObj.getUserTimeFormat(request),
                    sessionHandlerImplObj.getTimeZoneDifference(request));
            tmpObj.put("jstartdate", df.format(extmt.getStartdate()));
            tmpObj.put("jenddate", df.format(extmt.getEnddate()));

            tmpObj.put("jdepartment", extmt.getDepartmentid().getValue());
            tmpObj.put("posmasterid", extmt.getPosition().getId());
            jobj.append("data", tmpObj);
        }
        if (jobj.isNull("data")) {
            jobj.put("data", new com.krawler.utils.json.JSONArray());
        }
        jobj.put("count", count);
        jobj1.put("data", jobj.toString());
        jobj1.put("valid", true);
    } catch (Exception e) {

    } finally {
        return new ModelAndView("jsonView", "model", jobj1.toString());
    }
}

From source file:ddf.catalog.impl.CatalogFrameworkImpl.java

@Override
public CreateResponse create(CreateStorageRequest streamCreateRequest)
        throws IngestException, SourceUnavailableException {
    validateCreateStorageRequest(streamCreateRequest);

    setFlagsOnRequest(streamCreateRequest);

    if (fanoutEnabled) {
        throw new IngestException(FANOUT_MESSAGE);
    }/*w  ww .j a  va 2  s.c  o m*/

    if (Requests.isLocal(streamCreateRequest)
            && (!sourceIsAvailable(catalog) || !storageIsAvailable(storage))) {
        SourceUnavailableException sourceUnavailableException = new SourceUnavailableException(
                "Local provider is not available, cannot perform create operation.");
        if (INGEST_LOGGER.isWarnEnabled()) {
            INGEST_LOGGER.warn("Error on create operation, local provider not available.",
                    sourceUnavailableException);
        }
        throw sourceUnavailableException;
    }

    Map<String, Metacard> metacardMap = new HashMap<>();
    List<ContentItem> contentItems = new ArrayList<>(streamCreateRequest.getContentItems().size());
    HashMap<String, Path> tmpContentPaths = new HashMap<>(streamCreateRequest.getContentItems().size());
    generateMetacardAndContentItems(streamCreateRequest, streamCreateRequest.getContentItems(), metacardMap,
            contentItems, tmpContentPaths);
    streamCreateRequest.getProperties().put(CONTENT_PATHS, tmpContentPaths);

    // Get attributeOverrides, apply them and then remove them from the streamCreateRequest so they are not exposed to plugins
    Map<String, String> attributeOverrideHeaders = (HashMap<String, String>) streamCreateRequest.getProperties()
            .get(Constants.ATTRIBUTE_OVERRIDES_KEY);
    applyAttributeOverridesToMetacardMap(attributeOverrideHeaders, metacardMap);
    streamCreateRequest.getProperties().remove(Constants.ATTRIBUTE_OVERRIDES_KEY);

    CreateStorageRequest createStorageRequest = null;
    CreateResponse createResponse;
    try {
        if (contentItems.size() > 0) {
            createStorageRequest = new CreateStorageRequestImpl(contentItems, streamCreateRequest.getId(),
                    streamCreateRequest.getProperties());
            for (final PreCreateStoragePlugin plugin : frameworkProperties.getPreCreateStoragePlugins()) {
                try {
                    createStorageRequest = plugin.process(createStorageRequest);
                } catch (PluginExecutionException e) {
                    LOGGER.warn("Plugin processing failed. This is allowable. Skipping to next plugin.", e);
                }
            }

            CreateStorageResponse createStorageResponse;
            try {
                createStorageResponse = storage.create(createStorageRequest);
                createStorageResponse.getProperties().put(CONTENT_PATHS, tmpContentPaths);
            } catch (StorageException e) {
                throw new IngestException("Could not store content items.", e);
            }

            for (final PostCreateStoragePlugin plugin : frameworkProperties.getPostCreateStoragePlugins()) {
                try {
                    createStorageResponse = plugin.process(createStorageResponse);
                } catch (PluginExecutionException e) {
                    LOGGER.warn("Plugin processing failed. This is allowable. Skipping to next plugin.", e);
                }
            }

            for (ContentItem contentItem : createStorageResponse.getCreatedContentItems()) {
                if (contentItem.getMetacard().getResourceURI() == null) {
                    contentItem.getMetacard()
                            .setAttribute(new AttributeImpl(Metacard.RESOURCE_URI, contentItem.getUri()));
                    contentItem.getMetacard().setAttribute(
                            new AttributeImpl(Metacard.RESOURCE_SIZE, String.valueOf(contentItem.getSize())));
                }
                metacardMap.put(contentItem.getId(), contentItem.getMetacard());
            }
        }

        CreateRequest createRequest = new CreateRequestImpl(new ArrayList<>(metacardMap.values()),
                streamCreateRequest.getProperties());

        createResponse = create(createRequest);
    } catch (Exception e) {
        if (createStorageRequest != null) {
            try {
                storage.rollback(createStorageRequest);
            } catch (StorageException e1) {
                LOGGER.error("Unable to remove temporary content for id: " + createStorageRequest.getId(), e1);
            }
        }
        throw new IngestException("Unable to store products for request: " + streamCreateRequest.getId(), e);
    } finally {
        if (createStorageRequest != null) {
            try {
                storage.commit(createStorageRequest);
            } catch (StorageException e) {
                LOGGER.error("Unable to commit content changes for id: " + createStorageRequest.getId(), e);
                try {
                    storage.rollback(createStorageRequest);
                } catch (StorageException e1) {
                    LOGGER.error("Unable to remove temporary content for id: " + createStorageRequest.getId(),
                            e1);
                }
            }
        }
        tmpContentPaths.values().stream().forEach(path -> FileUtils.deleteQuietly(path.toFile()));
        tmpContentPaths.clear();
    }

    return createResponse;
}

From source file:com.krawler.spring.hrms.common.hrmsCommonController.java

public ModelAndView getAllUserDetailsHrms(HttpServletRequest request, HttpServletResponse response) {
    KwlReturnObject kmsg = null;/* w  w  w  . ja v a2s .  c o m*/
    JSONObject jobj = new JSONObject();
    JSONArray jarr = new JSONArray();
    JSONObject countobj = new JSONObject();
    JSONObject jobj1 = new JSONObject();
    try {
        String Searchjson = request.getParameter("searchJson");
        String appendCase = "and";
        String companyid = sessionHandlerImplObj.getCompanyid(request);
        String lid = StringUtil.checkForNull(request.getParameter("lid"));
        HashMap<String, Object> requestParams = new HashMap<String, Object>();
        ArrayList filter_names = new ArrayList(
                Arrays.asList("ua.user.company.companyID", "ua.user.deleteflag"));
        ArrayList filter_values = new ArrayList(Arrays.asList(companyid, 0));
        requestParams.put("ss", StringUtil.checkForNull(request.getParameter("ss")));
        requestParams.put("allflag", false);
        requestParams.put("searchcol",
                new String[] { "u.firstName", "u.lastName", "ua.role.name", "u.emailID" });
        if (request.getParameter("combo") != null) {
            requestParams.put("combo", request.getParameter("combo"));
            requestParams.put("allflag", true);
        } else {
            requestParams.put("combo", "");
        }
        StringUtil.checkpaging(requestParams, request);
        SimpleDateFormat df = new SimpleDateFormat("yyyy/MM/dd");
        if (!StringUtil.isNullOrEmpty(request.getParameter("stdate"))) {
            filter_names.add(">=emp.joindate");
            filter_values.add(new Date(df.format(new Date(request.getParameter("stdate")))));
            filter_names.add("<=emp.joindate");
            filter_values.add(new Date(df.format(new Date(request.getParameter("enddate")))));
        }

        if (!StringUtil.isNullOrEmpty(Searchjson)) {
            getMyAdvanceSearchparams(Searchjson, filter_names);
            insertParamAdvanceSearchString(filter_values, Searchjson);
        }
        requestParams.put("filter_names", filter_names);
        requestParams.put("filter_values", filter_values);

        kmsg = hrmsCommonDAOObj.getUserDetailsHrms(requestParams);
        List lst = kmsg.getEntityList();
        jarr = kwlCommonTablesDAOObj.getDetailsJson(lst, 0, "com.krawler.common.admin.User");

        int count = 0;
        for (int ctr = 0; ctr < jarr.length(); ctr++) {
            jobj = jarr.getJSONObject(ctr);
            Object[] row = (Object[]) lst.get(ctr);
            User u = (User) jobj.get("instance");
            Useraccount ua = (Useraccount) kwlCommonTablesDAOObj
                    .getObject("com.krawler.common.admin.Useraccount", row[0].toString());
            if (row[1] != null) {
                Empprofile e = (Empprofile) kwlCommonTablesDAOObj.getObject("com.krawler.hrms.ess.Empprofile",
                        row[1].toString());
                if (!StringUtil.isNullOrEmpty(e.getStatus())) {
                    jobj.put("status", e.getStatus());
                } else {
                    jobj.put("status", "Pending");
                }
                jobj.put("joindate", (e.getJoindate() == null ? ""
                        : sessionHandlerImplObj.getDateFormatter(request).format(e.getJoindate())));
            } else {
                jobj.put("status", "Incomplete");
            }
            jobj.put("department", (ua.getDepartment() == null ? "" : ua.getDepartment().getId()));
            jobj.put("departmentname", (ua.getDepartment() == null ? "" : ua.getDepartment().getValue()));
            jobj.put("role", (ua.getRole() == null ? "" : ua.getRole().getID()));
            String name = "";
            if (ua.getRole() != null && ua.getRole().getCompany() != null) {
                name = ua.getRole().getName();
            } else {
                name = messageSource.getMessage("hrms.common.role." + ua.getRole().getID(), null,
                        ua.getRole().getName(), RequestContextUtils.getLocale(request));
            }
            jobj.put("rolename", (ua.getRole() == null ? "" : name));
            jobj.put("username", u.getUserLogin().getUserName());
            jobj.put("fullname", u.getFirstName() + " " + (u.getLastName() == null ? "" : u.getLastName()));
            jobj.put("lastlogin",
                    (u.getUserLogin().getLastActivityDate() == null ? ""
                            : sessionHandlerImplObj.getDateFormatter(request)
                                    .format(u.getUserLogin().getLastActivityDate())));
            jobj.put("designation", ua.getDesignationid() == null ? "" : ua.getDesignationid().getValue());
            jobj.put("designationid", ua.getDesignationid() == null ? "" : ua.getDesignationid().getId());
            jobj.put("templateid", ua.getTemplateid() != null ? ua.getTemplateid() : "");
            jobj.put("salary", ua.getSalary());
            jobj.put("accno", ua.getAccno());
            jobj.put("frequency", u.getFrequency());
            requestParams.clear();
            requestParams.put("companyid", sessionHandlerImplObj.getCompanyid(request));
            requestParams.put("empid", ua.getEmployeeid());
            KwlReturnObject result;
            //                KwlReturnObject result = profileHandlerDAOObj.getEmpidFormatEdit(requestParams);
            if (ua.getEmployeeIdFormat() == null) {
                jobj.put("employeeid", ua.getEmployeeid() == null ? ""
                        : profileHandlerDAOObj.getEmpidFormatEdit(requestParams).getEntityList().get(0));
            } else {
                requestParams.put("standardEmpId", profileHandlerDAOObj.getEmpidFormatEdit(requestParams)
                        .getEntityList().get(0).toString());
                requestParams.put("employeeIdFormat", ua.getEmployeeIdFormat());
                jobj.put("employeeid", profileHandlerDAOObj.getNewEmployeeIdFormat(requestParams));
            }

            requestParams.clear();
            filter_names.clear();
            filter_values.clear();
            filter_names.add("assignemp.userID");
            filter_values.add(u.getUserID());

            filter_names.add("assignman.deleteflag");
            filter_values.add(0);

            filter_names.add("managerstatus");
            filter_values.add(1);

            requestParams.put("filter_names", filter_names);
            requestParams.put("filter_values", filter_values);

            result = hrmsCommonDAOObj.getAssignmanager(requestParams);
            List lst1 = result.getEntityList();
            Iterator itr1 = lst1.iterator();

            if (itr1.hasNext()) {
                while (itr1.hasNext()) {
                    Assignmanager asm = (Assignmanager) itr1.next();
                    if (asm.getAssignman() != null) {
                        jobj.append("managerid", asm.getAssignman().getUserID());
                        jobj.append("manager",
                                asm.getAssignman().getFirstName() + " " + asm.getAssignman().getLastName());
                    }
                }
            } else {
                jobj.put("manager", " ");
                jobj.put("managerid", " ");
            }

            requestParams.clear();
            filter_names.clear();
            filter_values.clear();
            filter_names.add("employee.userID");
            filter_values.add(u.getUserID());

            filter_names.add("reviewer.deleteflag");
            filter_values.add(0);

            filter_names.add("reviewerstatus");
            filter_values.add(1);

            requestParams.put("filter_names", filter_names);
            requestParams.put("filter_values", filter_values);

            result = hrmsCommonDAOObj.getAssignreviewer(requestParams);
            lst1 = result.getEntityList();
            itr1 = lst1.iterator();

            if (itr1.hasNext()) {
                while (itr1.hasNext()) {
                    Assignreviewer rev = (Assignreviewer) itr1.next();
                    if (rev.getReviewer() != null) {
                        jobj.append("reviewerid", rev.getReviewer().getUserID());
                        jobj.append("reviewer",
                                rev.getReviewer().getFirstName() + " " + rev.getReviewer().getLastName());
                    }
                }
            } else {
                jobj.put("reviewer", " ");
                jobj.put("reviewerid", " ");
            }
            jarr.put(ctr, jobj);
            count++;
        }

        countobj.put("data", jarr);
        countobj.put("count", kmsg.getRecordTotalCount());
        jobj1.put("data", countobj);
        jobj1.put("valid", true);
    } catch (Exception e) {
        e.printStackTrace();
    } finally {
        return new ModelAndView("jsonView", "model", jobj1.toString());
    }
}