Example usage for java.util NoSuchElementException getMessage

List of usage examples for java.util NoSuchElementException getMessage

Introduction

In this page you can find the example usage for java.util NoSuchElementException getMessage.

Prototype

public String getMessage() 

Source Link

Document

Returns the detail message string of this throwable.

Usage

From source file:org.mobicents.servlet.sip.example.SimpleSipServlet.java

/**
 * {@inheritDoc}//from  w  ww  .  j  a v a  2  s. c o  m
 */
protected void doNotify(SipServletRequest request) throws ServletException, IOException {

    Channel channel = null;
    String routingKey = "";

    //a trick to change routingKey value.
    //routingKey = getBindingKey();

    try {

        channel = pool.borrowObject();
        String message = request.getCallId();

        channel.exchangeDeclare(EXCHANGE_NAME, "topic", true);
        channel.basicPublish(EXCHANGE_NAME, routingKey, null, message.getBytes());
        logger.info("PUBLISH A MESSAGE : " + message);

        logger.info("*************************************");
        logger.info("**" + "Number active : " + pool.getNumActive() + " ***");
        logger.info("**" + "Number idle  : " + pool.getNumIdle() + " ***");
        logger.info("*************************************");

    } catch (NoSuchElementException e) {

        logger.error(e.getMessage());
        throw new ServletException(e);

    } catch (IllegalStateException e) {

        logger.error(e.getMessage());
        throw new ServletException(e);
    } catch (Exception e) {

        logger.error(e.getMessage());
        throw new ServletException(e);
    } finally {
        if (channel != null) {
            try {
                pool.returnObject(channel);
            } catch (Exception e) {
                e.printStackTrace();
                logger.error("Failed to return channel back to pool. Exception message: " + e.getMessage());
            }
            //logger.info("RETURN CHANNEL TO THE POOL");
        }

    }
    SipServletResponse sipServletResponse = request.createResponse(SipServletResponse.SC_OK);
    sipServletResponse.send();

}

From source file:org.geogig.geoserver.config.ConfigStoreTest.java

@Test
public void loadNonExistent() throws Exception {
    final String dummyId = "94bcb762-9ee9-4b43-a912-063509966989";
    try {//w ww . j a  v a2 s  .  co m
        store.get(dummyId);
        fail("Expected FileNotFoundException");
    } catch (NoSuchElementException e) {
        assertTrue(e.getMessage().startsWith("Repository not found: "));
    }

    String path = ConfigStore.path(dummyId);
    Resource resource = dataDir.get(path);
    assertEquals(Type.UNDEFINED, resource.getType());
}

From source file:edu.cornell.mannlib.vitro.webapp.visualization.persongrantcount.PersonGrantCountVisCodeGenerator.java

/**
 * This method is used to setup parameters for the sparkline value object. These parameters
 * will be used in the template to construct the actual html/javascript code.
 * @param visMode/*ww w .  j a  v  a  2 s .com*/
 * @param visContainer
 * @param authorDocuments
 * @return 
 */
private SparklineData setupSparklineParameters(String visMode, String providedVisContainerID) {

    SparklineData sparklineData = new SparklineData();
    sparklineData.setYearToActivityCount(yearToGrantCount);

    int numOfYearsToBeRendered = 0;

    /*
     * It was decided that to prevent downward curve that happens if there are no publications 
     * in the current year seems a bit harsh, so we consider only publications from the last 10
     * complete years. 
     * */
    int currentYear = Calendar.getInstance().get(Calendar.YEAR) - 1;
    int shortSparkMinYear = currentYear - VisConstants.MINIMUM_YEARS_CONSIDERED_FOR_SPARKLINE + 1;

    /*
     * This is required because when deciding the range of years over which
     * the vis was rendered we dont want to be influenced by the
     * "DEFAULT_GRANT_YEAR".
     */
    Set<String> grantYears = new HashSet<String>(yearToGrantCount.keySet());
    grantYears.remove(VOConstants.DEFAULT_GRANT_YEAR);

    /*
     * We are setting the default value of minGrantYear to be 10 years
     * before the current year (which is suitably represented by the
     * shortSparkMinYear), this in case we run into invalid set of grant
     * years.
     */
    int minGrantYear = shortSparkMinYear;

    String visContainerID = null;

    if (yearToGrantCount.size() > 0) {
        try {
            minGrantYear = Integer.parseInt(Collections.min(grantYears));
        } catch (NoSuchElementException e1) {
            log.debug("vis: " + e1.getMessage() + " error occurred for " + yearToGrantCount.toString());
        } catch (NumberFormatException e2) {
            log.debug("vis: " + e2.getMessage() + " error occurred for " + yearToGrantCount.toString());
        }
    }

    int minGrantYearConsidered = 0;

    /*
     * There might be a case that the author investigated his first grant
     * within the last 10 years but we want to make sure that the sparkline
     * is representative of at least the last 10 years, so we will set the
     * minGrantYearConsidered to "currentYear - 10" which is also given by
     * "shortSparkMinYear".
     */
    if (minGrantYear > shortSparkMinYear) {
        minGrantYearConsidered = shortSparkMinYear;
    } else {
        minGrantYearConsidered = minGrantYear;
    }

    numOfYearsToBeRendered = currentYear - minGrantYearConsidered + 1;

    sparklineData.setNumOfYearsToBeRendered(numOfYearsToBeRendered);

    int grantCounter = 0;

    /*
     * For the purpose of this visualization I have come up with a term
     * "Sparks" which essentially means data points. Sparks that will be
     * rendered in full mode will always be the one's which have any year
     * associated with it. Hence.
     */
    int renderedFullSparks = 0;

    List<YearToEntityCountDataElement> yearToGrantCountDataTable = new ArrayList<YearToEntityCountDataElement>();

    for (int grantYear = minGrantYearConsidered; grantYear <= currentYear; grantYear++) {

        String stringInvestigatedYear = String.valueOf(grantYear);
        Integer currentGrants = yearToGrantCount.get(stringInvestigatedYear);

        if (currentGrants == null) {
            currentGrants = 0;
        }

        yearToGrantCountDataTable
                .add(new YearToEntityCountDataElement(grantCounter, stringInvestigatedYear, currentGrants));

        /*
         * Sparks that will be rendered will always be the one's which has
         * any year associated with it. Hence.
         */
        renderedFullSparks += currentGrants;
        grantCounter++;

    }

    sparklineData.setYearToEntityCountDataTable(yearToGrantCountDataTable);
    sparklineData.setRenderedSparks(renderedFullSparks);

    /*
     * Total grants will also consider grants that have no year
     * associated with it. Hence.
     */
    Integer unknownYearGrants = 0;
    if (yearToGrantCount.get(VOConstants.DEFAULT_GRANT_YEAR) != null) {
        unknownYearGrants = yearToGrantCount.get(VOConstants.DEFAULT_GRANT_YEAR);
    }

    sparklineData.setUnknownYearGrants(unknownYearGrants);

    if (providedVisContainerID != null) {
        visContainerID = providedVisContainerID;
    } else {
        visContainerID = DEFAULT_VIS_CONTAINER_DIV_ID;
    }

    sparklineData.setVisContainerDivID(visContainerID);

    /*
     * By default these represents the range of the rendered sparks. Only in
     * case of "short" sparkline mode we will set the Earliest
     * RenderedGrant year to "currentYear - 10".
     */
    sparklineData.setEarliestYearConsidered(minGrantYearConsidered);
    sparklineData.setEarliestRenderedGrantYear(minGrantYear);
    sparklineData.setLatestRenderedGrantYear(currentYear);

    /*
     * The Full Sparkline will be rendered by default. Only if the url has
     * specific mention of SHORT_SPARKLINE_MODE_URL_HANDLE then we render
     * the short sparkline and not otherwise.
     */
    if (VisualizationFrameworkConstants.SHORT_SPARKLINE_VIS_MODE.equalsIgnoreCase(visMode)) {

        sparklineData.setEarliestRenderedGrantYear(shortSparkMinYear);
        sparklineData.setShortVisMode(true);

    } else {
        sparklineData.setShortVisMode(false);
    }

    if (yearToGrantCount.size() > 0) {

        sparklineData.setFullTimelineNetworkLink(UtilityFunctions.getCollaboratorshipNetworkLink(individualURI,
                VisualizationFrameworkConstants.PERSON_LEVEL_VIS,
                VisualizationFrameworkConstants.COPI_VIS_MODE));

        sparklineData.setDownloadDataLink(UtilityFunctions.getCSVDownloadURL(individualURI,
                VisualizationFrameworkConstants.PERSON_GRANT_COUNT_VIS, ""));
    }
    return sparklineData;
}

From source file:com.epam.catgenome.manager.wig.WigManager.java

private double queryWig(BigWigFile bigWigFile, String chrName, int start, int end) throws IOException {
    List<BigSummary> summarize;
    try {/*from w  w w  .ja  v a2 s. co  m*/
        summarize = bigWigFile.summarize(chrName, start, end, 1, true);
    } catch (NoSuchElementException e) {
        LOGGER.info(e.getMessage(), e);
        return 0;
    }
    double res = 0.0;
    for (BigSummary summary : summarize) {
        if (!Double.isNaN(summary.getMaxValue()) && !Double.isInfinite(summary.getMaxValue())) {
            res += summary.getMaxValue();
        }
    }
    return res;
}

From source file:com.att.cspd.SimpleSipServlet.java

/**
 * {@inheritDoc}/*  www .jav a2 s.c o m*/
 */
protected void doNotify(SipServletRequest request) throws ServletException, IOException {

    Channel channel = null;
    String routingKey = "";

    //a trick to change routingKey value.
    //routingKey = getBindingKey();

    try {

        channel = pool.borrowObject();
        String message = request.getCallId();
        logger.info("doNotify method: Request dump: " + request.toString());
        Iterator itr = request.getHeaderNames();
        while (itr.hasNext()) {
            logger.info("Header Name : " + itr.next() + "\n");
        }
        String toHdr = request.getHeader("To");

        Matcher matcher = Pattern.compile("sip:(.*)@.+").matcher(toHdr);
        if (matcher.find()) {
            String userpart = matcher.group(1);
            logger.info("user part of the sip url : " + userpart);
            routingKey = userpart;
        }

        channel.exchangeDeclare(EXCHANGE_NAME, "topic", true);
        channel.basicPublish(EXCHANGE_NAME, routingKey, null, message.getBytes());
        logger.info("PUBLISH A MESSAGE : " + message);

        logger.info("*************************************");
        logger.info("**" + "Number active : " + pool.getNumActive() + " ***");
        logger.info("**" + "Number idle  : " + pool.getNumIdle() + " ***");
        logger.info("*************************************");

    } catch (NoSuchElementException e) {

        logger.error(e.getMessage());
        throw new ServletException(e);

    } catch (IllegalStateException e) {

        logger.error(e.getMessage());
        throw new ServletException(e);
    } catch (Exception e) {

        logger.error(e.getMessage());
        throw new ServletException(e);
    } finally {
        if (channel != null) {
            try {
                pool.returnObject(channel);
            } catch (Exception e) {
                e.printStackTrace();
                logger.error("Failed to return channel back to pool. Exception message: " + e.getMessage());
            }
            //logger.info("RETURN CHANNEL TO THE POOL");
        }

    }
    SipServletResponse sipServletResponse = request.createResponse(SipServletResponse.SC_OK);
    sipServletResponse.send();

}

From source file:com.cyberway.issue.io.WriterPool.java

/**
 * Check out a {@link WriterPoolMember}.
 * //from ww  w  .jav a 2  s  .c  o m
 * This method must be answered by a call to
 * {@link #returnFile(WriterPoolMember)} else pool starts leaking.
 * 
 * @return Writer checked out of a pool of files.
 * @throws IOException Problem getting Writer from pool (Converted
 * from Exception to IOException so this pool can live as a good citizen
 * down in depths of ARCSocketFactory).
 * @throws NoSuchElementException If we time out waiting on a pool member.
 */
public WriterPoolMember borrowFile() throws IOException {
    WriterPoolMember f = null;
    for (int i = 0; f == null; i++) {
        long waitStart = System.currentTimeMillis();
        try {
            f = (WriterPoolMember) this.pool.borrowObject();
            if (logger.getLevel() == Level.FINE) {
                logger.fine("Borrowed " + f + " (Pool State: " + getPoolState(waitStart) + ").");
            }
        } catch (NoSuchElementException e) {
            // Let this exception out. Unit test at least depends on it.
            // Log current state of the pool.
            logger.warning(e.getMessage() + ": Retry #" + i + " of " + " max of " + arbitraryRetryMax
                    + ": NSEE Pool State: " + getPoolState(waitStart));
            if (i >= arbitraryRetryMax) {
                logger.log(Level.SEVERE, "maximum retries exceeded; rethrowing", e);
                throw e;
            }
        } catch (Exception e) {
            // Convert.
            logger.log(Level.SEVERE, "E Pool State: " + getPoolState(waitStart), e);
            throw new IOException("Failed getting writer from pool: " + e.getMessage());
        }
    }
    return f;
}

From source file:edu.cornell.mannlib.vitro.webapp.visualization.coauthorship.CoAuthorshipVisCodeGenerator.java

/**
 * This method is used to setup parameters for the sparkline value object. These parameters
 * will be used in the template to construct the actual html/javascript code.
 * @param visMode//from ww  w .j  a va 2  s  .c  om
 * @param visContainer
 */
private SparklineData setupSparklineParameters(String visMode, String providedVisContainerID) {

    SparklineData sparklineData = new SparklineData();

    int numOfYearsToBeRendered = 0;

    /*
     * It was decided that to prevent downward curve that happens if there are no publications 
     * in the current year seems a bit harsh, so we consider only publications from the last 10
     * complete years. 
     * */
    int currentYear = Calendar.getInstance().get(Calendar.YEAR) - 1;
    int shortSparkMinYear = currentYear - VisConstants.MINIMUM_YEARS_CONSIDERED_FOR_SPARKLINE + 1;

    /*
     * This is required because when deciding the range of years over which the vis
     * was rendered we dont want to be influenced by the "DEFAULT_PUBLICATION_YEAR".
     * */
    Set<String> publishedYears = new HashSet<String>(yearToUniqueCoauthors.keySet());
    publishedYears.remove(VOConstants.DEFAULT_PUBLICATION_YEAR);

    /*
     * We are setting the default value of minPublishedYear to be 10 years before 
     * the current year (which is suitably represented by the shortSparkMinYear),
     * this in case we run into invalid set of published years.
     * */
    int minPublishedYear = shortSparkMinYear;

    String visContainerID = null;

    if (yearToUniqueCoauthors.size() > 0) {
        try {
            minPublishedYear = Integer.parseInt(Collections.min(publishedYears));
        } catch (NoSuchElementException e1) {
            log.debug("vis: " + e1.getMessage() + " error occurred for " + yearToUniqueCoauthors.toString());
        } catch (NumberFormatException e2) {
            log.debug("vis: " + e2.getMessage() + " error occurred for " + yearToUniqueCoauthors.toString());
        }
    }

    int minPubYearConsidered = 0;

    /*
     * There might be a case that the author has made his first publication within the 
     * last 10 years but we want to make sure that the sparkline is representative of 
     * at least the last 10 years, so we will set the minPubYearConsidered to 
     * "currentYear - 10" which is also given by "shortSparkMinYear".
     * */
    if (minPublishedYear > shortSparkMinYear) {
        minPubYearConsidered = shortSparkMinYear;
    } else {
        minPubYearConsidered = minPublishedYear;
    }

    numOfYearsToBeRendered = currentYear - minPubYearConsidered + 1;

    sparklineData.setNumOfYearsToBeRendered(numOfYearsToBeRendered);

    int uniqueCoAuthorCounter = 0;
    Set<Collaborator> allCoAuthorsWithKnownAuthorshipYears = new HashSet<Collaborator>();
    List<YearToEntityCountDataElement> yearToUniqueCoauthorsCountDataTable = new ArrayList<YearToEntityCountDataElement>();

    for (int publicationYear = minPubYearConsidered; publicationYear <= currentYear; publicationYear++) {

        String publicationYearAsString = String.valueOf(publicationYear);
        Set<Collaborator> currentCoAuthors = yearToUniqueCoauthors.get(publicationYearAsString);

        Integer currentUniqueCoAuthors = null;

        if (currentCoAuthors != null) {
            currentUniqueCoAuthors = currentCoAuthors.size();
            allCoAuthorsWithKnownAuthorshipYears.addAll(currentCoAuthors);
        } else {
            currentUniqueCoAuthors = 0;
        }

        yearToUniqueCoauthorsCountDataTable.add(new YearToEntityCountDataElement(uniqueCoAuthorCounter,
                publicationYearAsString, currentUniqueCoAuthors));
        uniqueCoAuthorCounter++;
    }

    /*
     * For the purpose of this visualization I have come up with a term "Sparks" which 
     * essentially means data points. 
     * Sparks that will be rendered in full mode will always be the one's which have any year
     * associated with it. Hence.
     * */
    sparklineData.setRenderedSparks(allCoAuthorsWithKnownAuthorshipYears.size());

    sparklineData.setYearToEntityCountDataTable(yearToUniqueCoauthorsCountDataTable);

    /*
     * This is required only for the sparklines which convey collaborationships like 
     * coinvestigatorships and coauthorship. There are edge cases where a collaborator can be 
     * present for in a collaboration with known & unknown year. We do not want to repeat the 
     * count for this collaborator when we present it in the front-end. 
     * */
    Set<Collaborator> totalUniqueCoInvestigators = new HashSet<Collaborator>(
            allCoAuthorsWithKnownAuthorshipYears);

    /*
     * Total publications will also consider publications that have no year associated with
     * them. Hence.
     * */
    Integer unknownYearCoauthors = 0;
    if (yearToUniqueCoauthors.get(VOConstants.DEFAULT_PUBLICATION_YEAR) != null) {
        unknownYearCoauthors = yearToUniqueCoauthors.get(VOConstants.DEFAULT_PUBLICATION_YEAR).size();

        totalUniqueCoInvestigators.addAll(yearToUniqueCoauthors.get(VOConstants.DEFAULT_GRANT_YEAR));
    }

    sparklineData.setUnknownYearPublications(unknownYearCoauthors);

    sparklineData.setTotalCollaborationshipCount(totalUniqueCoInvestigators.size());

    if (providedVisContainerID != null) {
        visContainerID = providedVisContainerID;
    } else {
        visContainerID = DEFAULT_VISCONTAINER_DIV_ID;
    }

    sparklineData.setVisContainerDivID(visContainerID);

    /*
     * By default these represents the range of the rendered sparks. Only in case of
     * "short" sparkline mode we will set the Earliest RenderedPublication year to
     * "currentYear - 10". 
     * */
    sparklineData.setEarliestYearConsidered(minPubYearConsidered);
    sparklineData.setEarliestRenderedPublicationYear(minPublishedYear);
    sparklineData.setLatestRenderedPublicationYear(currentYear);

    /*
     * The Full Sparkline will be rendered by default. Only if the url has specific mention of
     * SHORT_SPARKLINE_MODE_KEY then we render the short sparkline and not otherwise.
     * */
    if (VisualizationFrameworkConstants.SHORT_SPARKLINE_VIS_MODE.equalsIgnoreCase(visMode)) {

        sparklineData.setEarliestRenderedPublicationYear(shortSparkMinYear);
        sparklineData.setShortVisMode(true);

    } else {
        sparklineData.setShortVisMode(false);
    }

    if (yearToUniqueCoauthors.size() > 0) {

        sparklineData.setFullTimelineNetworkLink(UtilityFunctions.getCollaboratorshipNetworkLink(individualURI,
                VisualizationFrameworkConstants.PERSON_LEVEL_VIS,
                VisualizationFrameworkConstants.COAUTHOR_VIS_MODE));

        sparklineData.setDownloadDataLink(UtilityFunctions.getCSVDownloadURL(individualURI,
                VisualizationFrameworkConstants.COAUTHORSHIP_VIS,
                VisualizationFrameworkConstants.COAUTHORS_COUNT_PER_YEAR_VIS_MODE));

        Map<String, Integer> yearToUniqueCoauthorsCount = new HashMap<String, Integer>();

        for (Map.Entry<String, Set<Collaborator>> currentYearToCoAuthors : yearToUniqueCoauthors.entrySet()) {
            yearToUniqueCoauthorsCount.put(currentYearToCoAuthors.getKey(),
                    currentYearToCoAuthors.getValue().size());
        }

        sparklineData.setYearToActivityCount(yearToUniqueCoauthorsCount);
    }

    return sparklineData;
}

From source file:com.ephesoft.dcma.da.common.UpgradePatchPreparation.java

/**
 * This method creates patch for batch class.
 * /* w  w  w.ja  v  a 2 s.co m*/
 * @param service {@link BatchClassService}
 * @param batchClassInfo {@link String}
 */
private static void createPatchForBatchClass(BatchClassService service, String batchClassInfo) {
    StringTokenizer batchClassTokens = new StringTokenizer(batchClassInfo, DataAccessConstant.SEMI_COLON);
    while (batchClassTokens.hasMoreTokens()) {
        String batchClassName = batchClassTokens.nextToken();
        try {
            BatchClass createdBatchClass = createPatchForBatchClass(batchClassName, service);
            if (createdBatchClass != null) {
                batchClassNameVsBatchClassMap.put(createdBatchClass.getName(), createdBatchClass);
            }

        } catch (NoSuchElementException e) {
            LOG.error("Incomplete data specified in properties file.", e);
        }
    }

    try {
        File serializedExportFile = new File(
                upgradePatchFolderPath + File.separator + "BatchClassUpdate" + SERIALIZATION_EXT);
        SerializationUtils.serialize(batchClassNameVsBatchClassMap, new FileOutputStream(serializedExportFile));
    } catch (FileNotFoundException e) {
        // Unable to read serializable file
        LOG.error(ERROR_OCCURRED_WHILE_CREATING_THE_SERIALIZABLE_FILE + e.getMessage(), e);
    }
}

From source file:com.ephesoft.dcma.da.common.UpgradePatchPreparation.java

/**
 * This method creates patch for module.
 * //from w  ww  . j  a  v  a  2 s  .  co m
 * @param service {@link BatchClassService}
 * @param moduleInfo {@link String}
 */
private static void createPatchForModule(BatchClassService service, String moduleInfo) {
    StringTokenizer moduleTokens = new StringTokenizer(moduleInfo, DataAccessConstant.SEMI_COLON);
    while (moduleTokens.hasMoreTokens()) {
        String moduleToken = moduleTokens.nextToken();
        StringTokenizer pluginConfigTokens = new StringTokenizer(moduleToken, DataAccessConstant.COMMA);
        String batchClassName = null;
        String moduleId = null;
        try {
            batchClassName = pluginConfigTokens.nextToken();
            moduleId = pluginConfigTokens.nextToken();
            BatchClassModule createdModule = createPatchForModule(batchClassName, moduleId, service);
            if (createdModule != null) {
                BatchClass batchClass = service.getBatchClassByIdentifier(batchClassName);
                ArrayList<BatchClassModule> bcmList = batchClassNameVsModulesMap.get(batchClass.getName());
                if (bcmList == null) {
                    bcmList = new ArrayList<BatchClassModule>();
                    batchClassNameVsModulesMap.put(batchClass.getName(), bcmList);
                }
                bcmList.add(createdModule);
            }

        } catch (NoSuchElementException e) {
            LOG.error("Incomplete data specified in properties file.", e);
        }
    }

    try {
        File serializedExportFile = new File(
                upgradePatchFolderPath + File.separator + "ModuleUpdate" + SERIALIZATION_EXT);
        SerializationUtils.serialize(batchClassNameVsModulesMap, new FileOutputStream(serializedExportFile));
    } catch (FileNotFoundException e) {
        // Unable to read serializable file
        LOG.error(ERROR_OCCURRED_WHILE_CREATING_THE_SERIALIZABLE_FILE + e.getMessage(), e);
    }

}

From source file:com.ephesoft.dcma.da.common.UpgradePatchPreparation.java

/**
 * This method creates patch for plugin.
 * //from  ww  w.  j ava 2 s  .com
 * @param service {@link BatchClassService}
 * @param pluginInfo {@link String}
 */
private static void createPatchForPlugin(final BatchClassService service, final String pluginInfo) {
    final StringTokenizer pluginTokens = new StringTokenizer(pluginInfo, DataAccessConstant.SEMI_COLON);
    while (pluginTokens.hasMoreTokens()) {
        String pluginToken = pluginTokens.nextToken();
        StringTokenizer pluginConfigTokens = new StringTokenizer(pluginToken, DataAccessConstant.COMMA);
        String batchClassIdentifier = null;
        String moduleId = null;
        String pluginId = null;
        try {
            batchClassIdentifier = pluginConfigTokens.nextToken();
            moduleId = pluginConfigTokens.nextToken();
            pluginId = pluginConfigTokens.nextToken();
            BatchClassPlugin createdPlugin = createPatch(batchClassIdentifier, moduleId, pluginId, service);
            if (createdPlugin != null) {
                BatchClass batchClass = service.getBatchClassByIdentifier(batchClassIdentifier);
                Module module = moduleService.getModulePropertiesForModuleId(Long.valueOf(moduleId));
                String key = batchClass.getName() + DataAccessConstant.COMMA + module.getName();
                ArrayList<BatchClassPlugin> pluginsList = batchClassNameVsPluginsMap.get(key);
                if (pluginsList == null) {
                    pluginsList = new ArrayList<BatchClassPlugin>();
                    batchClassNameVsPluginsMap.put(key, pluginsList);
                }
                pluginsList.add(createdPlugin);
            }

        } catch (NoSuchElementException e) {
            LOG.info("Incomplete data specifiedin properties file.", e);
        }
    }

    try {
        File serializedExportFile = new File(
                upgradePatchFolderPath + File.separator + "PluginUpdate" + SERIALIZATION_EXT);
        SerializationUtils.serialize(batchClassNameVsPluginsMap, new FileOutputStream(serializedExportFile));
    } catch (FileNotFoundException e) {
        // Unable to read serializable file
        LOG.error(ERROR_OCCURRED_WHILE_CREATING_THE_SERIALIZABLE_FILE + e.getMessage(), e);
    }
}