List of usage examples for java.util NoSuchElementException getMessage
public String getMessage()
From source file:edu.cornell.mannlib.vitro.webapp.visualization.coprincipalinvestigator.CoPIVisCodeGenerator.java
/** * This method is used to setup parameters for the sparkline value object. These parameters * will be used in the template to construct the actual html/javascript code. * @param visMode/* w w w.j a v a 2s.c o m*/ * @param visContainer */ private SparklineData setupSparklineParameters(String visMode, String providedVisContainerID) { SparklineData sparklineData = new SparklineData(); int numOfYearsToBeRendered = 0; /* * It was decided that to prevent downward curve that happens if there are no publications * in the current year seems a bit harsh, so we consider only publications from the last 10 * complete years. * */ int currentYear = Calendar.getInstance().get(Calendar.YEAR) - 1; int shortSparkMinYear = currentYear - VisConstants.MINIMUM_YEARS_CONSIDERED_FOR_SPARKLINE + 1; /* * This is required because when deciding the range of years over which * the vis was rendered we dont want to be influenced by the * "DEFAULT_GRANT_YEAR". */ Set<String> investigatedYears = new HashSet<String>(yearToUniqueCoPIs.keySet()); investigatedYears.remove(VOConstants.DEFAULT_GRANT_YEAR); /* * We are setting the default value of minGrantYear to be 10 years * before the current year (which is suitably represented by the * shortSparkMinYear), this in case we run into invalid set of investigated * years. */ int minGrantYear = shortSparkMinYear; String visContainerID = null; if (yearToUniqueCoPIs.size() > 0) { try { minGrantYear = Integer.parseInt(Collections.min(investigatedYears)); } catch (NoSuchElementException e1) { log.debug("vis: " + e1.getMessage() + " error occurred for " + yearToUniqueCoPIs.toString()); } catch (NumberFormatException e2) { log.debug("vis: " + e2.getMessage() + " error occurred for " + yearToUniqueCoPIs.toString()); } } int minGrantYearConsidered = 0; /* * There might be a case that the person investigated his first grant * within the last 10 years but we want to make sure that the sparkline * is representative of at least the last 10 years, so we will set the * minGrantYearConsidered to "currentYear - 10" which is also given by * "shortSparkMinYear". */ if (minGrantYear > shortSparkMinYear) { minGrantYearConsidered = shortSparkMinYear; } else { minGrantYearConsidered = minGrantYear; } numOfYearsToBeRendered = currentYear - minGrantYearConsidered + 1; sparklineData.setNumOfYearsToBeRendered(numOfYearsToBeRendered); int uniqueCoPICounter = 0; Set<Collaborator> allCoPIsWithKnownGrantShipYears = new HashSet<Collaborator>(); List<YearToEntityCountDataElement> yearToUniqueInvestigatorsCountDataTable = new ArrayList<YearToEntityCountDataElement>(); for (int grantYear = minGrantYearConsidered; grantYear <= currentYear; grantYear++) { String grantYearAsString = String.valueOf(grantYear); Set<Collaborator> currentCoPIs = yearToUniqueCoPIs.get(grantYearAsString); Integer currentUniqueCoPIs = null; if (currentCoPIs != null) { currentUniqueCoPIs = currentCoPIs.size(); allCoPIsWithKnownGrantShipYears.addAll(currentCoPIs); } else { currentUniqueCoPIs = 0; } yearToUniqueInvestigatorsCountDataTable.add( new YearToEntityCountDataElement(uniqueCoPICounter, grantYearAsString, currentUniqueCoPIs)); uniqueCoPICounter++; } /* * For the purpose of this visualization I have come up with a term * "Sparks" which essentially means data points. Sparks that will be * rendered in full mode will always be the one's which have any year * associated with it. Hence. */ sparklineData.setRenderedSparks(allCoPIsWithKnownGrantShipYears.size()); sparklineData.setYearToEntityCountDataTable(yearToUniqueInvestigatorsCountDataTable); /* * This is required only for the sparklines which convey collaborationships like * coinvestigatorships and coauthorship. There are edge cases where a collaborator can be * present for in a collaboration with known & unknown year. We do not want to repeat the * count for this collaborator when we present it in the front-end. * */ Set<Collaborator> totalUniqueCoInvestigators = new HashSet<Collaborator>(allCoPIsWithKnownGrantShipYears); /* * Total grants will also consider grants that have no year * associated with them. Hence. */ Integer unknownYearGrants = 0; if (yearToUniqueCoPIs.get(VOConstants.DEFAULT_GRANT_YEAR) != null) { unknownYearGrants = yearToUniqueCoPIs.get(VOConstants.DEFAULT_GRANT_YEAR).size(); totalUniqueCoInvestigators.addAll(yearToUniqueCoPIs.get(VOConstants.DEFAULT_GRANT_YEAR)); } sparklineData.setTotalCollaborationshipCount(totalUniqueCoInvestigators.size()); sparklineData.setUnknownYearGrants(unknownYearGrants); if (providedVisContainerID != null) { visContainerID = providedVisContainerID; } else { visContainerID = DEFAULT_VISCONTAINER_DIV_ID; } sparklineData.setVisContainerDivID(visContainerID); /* * By default these represents the range of the rendered sparks. Only in * case of "short" sparkline mode we will set the Earliest * RenderedGrant year to "currentYear - 10". */ sparklineData.setEarliestYearConsidered(minGrantYearConsidered); sparklineData.setEarliestRenderedGrantYear(minGrantYear); sparklineData.setLatestRenderedGrantYear(currentYear); /* * The Full Sparkline will be rendered by default. Only if the url has * specific mention of SHORT_SPARKLINE_MODE_KEY then we render the short * sparkline and not otherwise. */ if (VisualizationFrameworkConstants.SHORT_SPARKLINE_VIS_MODE.equalsIgnoreCase(visMode)) { sparklineData.setEarliestRenderedGrantYear(shortSparkMinYear); sparklineData.setShortVisMode(true); } else { sparklineData.setShortVisMode(false); } if (yearToUniqueCoPIs.size() > 0) { sparklineData.setFullTimelineNetworkLink(UtilityFunctions.getCollaboratorshipNetworkLink(individualURI, VisualizationFrameworkConstants.PERSON_LEVEL_VIS, VisualizationFrameworkConstants.COPI_VIS_MODE)); sparklineData.setDownloadDataLink( UtilityFunctions.getCSVDownloadURL(individualURI, VisualizationFrameworkConstants.CO_PI_VIS, VisualizationFrameworkConstants.COPIS_COUNT_PER_YEAR_VIS_MODE)); Map<String, Integer> yearToUniqueCoPIsCount = new HashMap<String, Integer>(); for (Map.Entry<String, Set<Collaborator>> currentYearToUniqueCoPIsCount : yearToUniqueCoPIs .entrySet()) { yearToUniqueCoPIsCount.put(currentYearToUniqueCoPIsCount.getKey(), currentYearToUniqueCoPIsCount.getValue().size()); } sparklineData.setYearToActivityCount(yearToUniqueCoPIsCount); } return sparklineData; }
From source file:com.ephesoft.dcma.da.common.UpgradePatchPreparation.java
/** * This method creates patch for plugin config. * /*w w w . j a v a 2s . c o m*/ * @param service {@link BatchClassService} * @param pluginConfigInfo {@link String} */ private static void createPatchForPluginConfig(BatchClassService service, final String pluginConfigInfo) { StringTokenizer pluginTokens = new StringTokenizer(pluginConfigInfo, DataAccessConstant.SEMI_COLON); while (pluginTokens.hasMoreTokens()) { String pluginToken = pluginTokens.nextToken(); StringTokenizer pluginConfigTokens = new StringTokenizer(pluginToken, DataAccessConstant.COMMA); String pluginId = null; String pluginConfigId = null; try { pluginId = pluginConfigTokens.nextToken(); pluginConfigId = pluginConfigTokens.nextToken(); createPatch(pluginId, pluginConfigId, service); } catch (NoSuchElementException e) { LOG.error("Incomplete data specified in properties file.", e); } } try { File serializedExportFile = new File( upgradePatchFolderPath + File.separator + "PluginConfigUpdate" + SERIALIZATION_EXT); SerializationUtils.serialize(pluginNameVsBatchPluginConfigList, new FileOutputStream(serializedExportFile)); } catch (FileNotFoundException e) { // Unable to read serializable file LOG.error(ERROR_OCCURRED_WHILE_CREATING_THE_SERIALIZABLE_FILE + e.getMessage(), e); } }
From source file:com.pushinginertia.commons.net.client.AbstractHttpPostClient.java
/** * Retrieves the response message from the remote host. * * @param con instantiated connection//from w w w. j av a 2s .com * @param encoding encoding to use in the response * @return response message from the remote host or null if none exists * @throws HttpConnectException if there is a problem retrieving the message */ protected String getResponseMessage(final HttpURLConnection con, final String encoding) throws HttpConnectException { try { final InputStream is = con.getInputStream(); final Scanner s = new Scanner(is, encoding); s.useDelimiter("\\A"); // \A is the beginning of input if (s.hasNext()) return s.next(); return null; } catch (NoSuchElementException e) { // no input return null; } catch (Exception e) { final String msg = "An unexpected error occurred while trying to retrieve the response message from [" + getUrl() + "]: " + e.getMessage(); LOG.error(getClass().getSimpleName(), msg, e); throw new HttpConnectException(msg, e); } }
From source file:com.sampas.socbs.core.data.arcsde.impl.ArcSDEConnectionPool.java
/** * DOCUMENT ME!/* ww w . j av a2s . com*/ * * @return DOCUMENT ME! * * @throws DataSourceException * DOCUMENT ME! * @throws UnavailableArcSDEConnectionException * @throws IllegalStateException * DOCUMENT ME! */ public ArcSDEPooledConnection getConnection() throws DataSourceException, UnavailableArcSDEConnectionException { if (pool == null) { throw new IllegalStateException("The ConnectionPool has been closed."); } try { // String caller = null; // if (LOGGER.isLoggable(Level.FINER)) { // StackTraceElement[] stackTrace = // Thread.currentThread().getStackTrace(); // caller = stackTrace[3].getClassName() + "." + // stackTrace[3].getMethodName(); // } ArcSDEPooledConnection connection = (ArcSDEPooledConnection) this.pool.borrowObject(); if (LOGGER.isLoggable(Level.FINER)) { // System.err.println("-> " + caller + " got " + connection); LOGGER.finer(connection + " out of connection pool"); } connection.markActive(); return connection; } catch (NoSuchElementException e) { LOGGER.log(Level.WARNING, "Out of connections: " + e.getMessage(), e); throw new UnavailableArcSDEConnectionException(this.pool.getNumActive(), this.config); } catch (SeException se) { LOGGER.log(Level.WARNING, "ArcSDE error getting connection: " + se.getSeError().getErrDesc(), se); throw new DataSourceException("ArcSDE Error Message: " + se.getSeError().getErrDesc(), se); } catch (Exception e) { LOGGER.log(Level.WARNING, "Unknown problem getting connection: " + e.getMessage(), e); throw new DataSourceException("Unknown problem fetching connection from connection pool", e); } }
From source file:com.xpn.xwiki.internal.plugin.rightsmanager.ReferenceUserIteratorTest.java
@Test public void getMembersWhenCallingNextWithNoMoreReference() throws Exception { Iterator<DocumentReference> iterator = setUpSingleUser(); assertEquals(new DocumentReference("userwiki", "XWiki", "userpage"), iterator.next()); try {// w ww . j ava 2s . c o m iterator.next(); } catch (NoSuchElementException expected) { assertEquals("No more users to extract from the passed references [[userwiki:XWiki.userpage]]", expected.getMessage()); } }
From source file:com.litwan.yanel.impl.resources.textedit.TextEditResource.java
public String getResToEditMimeType() { String mimeType = null;//from w w w. j a v a2 s . co m try { mimeType = getEnvironment().getRequest().getSession().getServletContext().getMimeType(getEditPath()); } catch (NoSuchElementException e) { log.warn("mimetype util could not guess the mimetype."); } if (mimeType == null) { try { mimeType = getRealm().getRepository().getNode(getEditPath()).getMimeType(); } catch (Exception e) { log.error(e.getMessage(), e); } } return mimeType; }
From source file:forge.deck.io.OldDeckParser.java
private void convertConstructedAndSealed() { boolean allowDeleteUnsupportedConstructed = false; final Map<String, Pair<DeckGroup, MutablePair<File, File>>> sealedDecks = new TreeMap<String, Pair<DeckGroup, MutablePair<File, File>>>( String.CASE_INSENSITIVE_ORDER); for (final File f : this.deckDir.listFiles(DeckStorage.DCK_FILE_FILTER)) { boolean importedOk = false; final List<String> fileLines = FileUtil.readFile(f); final Map<String, List<String>> sections = FileSection.parseSections(fileLines); final DeckFileHeader dh = DeckSerializer.readDeckMetadata(sections); String name = dh.getName(); if (dh.isCustomPool()) { try { this.cube.add(DeckSerializer.fromSections(sections)); importedOk = true;//ww w .j av a 2 s .c o m } catch (final NoSuchElementException ex) { if (!allowDeleteUnsupportedConstructed) { final String msg = String.format( "Can not convert deck '%s' for some unsupported cards it contains. %n%s%n%nMay Forge delete all such decks?", name, ex.getMessage()); allowDeleteUnsupportedConstructed = SOptionPane.showConfirmDialog(msg, "Problem converting decks"); } } if (importedOk || allowDeleteUnsupportedConstructed) { f.delete(); } continue; } switch (dh.getDeckType()) { case Constructed: try { this.constructed.add(DeckSerializer.fromSections(sections)); importedOk = true; } catch (final NoSuchElementException ex) { if (!allowDeleteUnsupportedConstructed) { final String msg = String.format( "Can not convert deck '%s' for some unsupported cards it contains. %n%s%n%nMay Forge delete all such decks?", name, ex.getMessage()); allowDeleteUnsupportedConstructed = SOptionPane.showConfirmDialog(msg, "Problem converting decks"); } } if (importedOk || allowDeleteUnsupportedConstructed) { f.delete(); } break; case Limited: name = name.startsWith("AI_") ? name.replace("AI_", "") : name; Pair<DeckGroup, MutablePair<File, File>> stored = sealedDecks.get(name); if (null == stored) { stored = ImmutablePair.of(new DeckGroup(name), MutablePair.of((File) null, (File) null)); } final Deck deck = DeckSerializer.fromSections(sections); if (dh.isIntendedForAi()) { stored.getLeft().addAiDeck(deck); stored.getRight().setRight(f); } else { stored.getLeft().setHumanDeck(deck); stored.getRight().setLeft(f); } if ((stored.getLeft().getHumanDeck() != null) && !stored.getLeft().getAiDecks().isEmpty()) { // have both parts of sealed deck, may convert this.sealed.add(stored.getLeft()); stored.getRight().getLeft().delete(); stored.getRight().getRight().delete(); // there stay only orphans sealedDecks.remove(name); } else { sealedDecks.put(name, stored); } break; default: break; } } // advise to kill orphaned decks if (!sealedDecks.isEmpty()) { final StringBuilder sb = new StringBuilder(); for (final Pair<DeckGroup, MutablePair<File, File>> s : sealedDecks.values()) { final String missingPart = s.getRight().getLeft() == null ? "human" : "computer"; sb.append(String.format("Sealed deck '%s' has no matching '%s' deck.%n", s.getKey().getName(), missingPart)); } sb.append(System.getProperty("line.separator")); sb.append("May Forge delete these decks?"); if (SOptionPane.showConfirmDialog(sb.toString(), "Some of your sealed decks are orphaned")) { for (final Pair<DeckGroup, MutablePair<File, File>> s : sealedDecks.values()) { if (s.getRight().getLeft() != null) { s.getRight().getLeft().delete(); } if (s.getRight().getRight() != null) { s.getRight().getRight().delete(); } } } } }
From source file:de.zib.gndms.dspace.service.SliceServiceImpl.java
@Override @RequestMapping(value = "/_{subspaceId}/_{sliceKindId}/_{sliceId}/config", method = RequestMethod.GET) @Secured("ROLE_USER") public ResponseEntity<SliceInformation> getSliceInformation(@PathVariable final String subspaceId, @PathVariable final String sliceKindId, @PathVariable final String sliceId, @RequestHeader("DN") final String dn) { GNDMSResponseHeader headers = setHeaders(subspaceId, sliceKindId, sliceId, dn); try {//from w w w . j av a 2s.com Slice sliceModel = findSliceOfKind(subspaceId, sliceKindId, sliceId); de.zib.gndms.infra.dspace.Slice slice = new de.zib.gndms.infra.dspace.Slice(sliceModel); return new ResponseEntity<SliceInformation>(slice.getSliceInformation(), headers, HttpStatus.OK); } catch (NoSuchElementException e) { logger.warn(e.getMessage()); return new ResponseEntity<SliceInformation>(new SliceInformation(), headers, HttpStatus.NOT_FOUND); } }
From source file:de.zib.gndms.dspace.service.SliceServiceImpl.java
@Override @RequestMapping(value = "/_{subspaceId}/_{sliceKindId}/_{sliceId}/gsiftp", method = RequestMethod.GET) @Secured("ROLE_USER") public ResponseEntity<String> getGridFtpUrl(@PathVariable final String subspaceId, @PathVariable final String sliceKindId, @PathVariable final String sliceId, @RequestHeader("DN") final String dn) { GNDMSResponseHeader headers = setHeaders(subspaceId, sliceKindId, sliceId, dn); try {//from w w w .j a v a2 s . c o m Subspace space = subspaceProvider.get(subspaceId); Slice slice = findSliceOfKind(subspaceId, sliceKindId, sliceId); return new ResponseEntity<String>(space.getGsiFtpPathForSlice(slice), headers, HttpStatus.OK); } catch (NoSuchElementException ne) { logger.warn(ne.getMessage()); return new ResponseEntity<String>(null, headers, HttpStatus.NOT_FOUND); } }