List of usage examples for java.util.logging Level FINER
Level FINER
To view the source code for java.util.logging Level FINER.
Click Source Link
From source file:Peer.java
@Override public synchronized void notify(Key _pred) throws Exception { lg.log(Level.FINEST, "notify entry"); pred = _pred;/*from w w w.j a v a2 s .c o m*/ lg.log(Level.FINER, "Notified by " + _pred.toString()); succ = superpeer.getSuccessor(nodeid); // Start the notify cycle ... end the cycle with the initiator. PeerInterface peer = getPeer(succ); if (!lock) { lock = true; constructFingerTable(hasher.getBitSize()); peer.notify(nodeid); lock = false; return; } else { lock = false; return; } }
From source file:org.jenkinsci.modules.optpluginhelper.PluginHelper.java
/** * Refreshes the list of plugins that should be loaded. This will re-examine the full list of plugins provided * by all the {@link PluginSource} extensions and filter them through all the {@link PluginWrapperFilter} * extensions to see if there are any plugins that can be installed. An attempt will be made to dynamically load * the plugins./*from www . ja va 2s . co m*/ * * @return {@code true} if a restart is required to complete activation, {@code false} if either nothing changed * or the additional plugins were successfully dynamically loaded. */ public boolean refresh() { final Jenkins jenkins = Jenkins.getInstance(); if (jenkins == null) { return false; } PluginManager pm = jenkins.getPluginManager(); PluginStrategy ps = pm.getPluginStrategy(); // now figure out which plugins are included LOGGER.log(Level.FINE, "Enumerating available optional plugins and filtering to determine set for activation"); Map<PluginWrapper, File> wrapperToFile = new HashMap<PluginWrapper, File>(); Map<PluginWrapper, PluginWrapperFilter.Decision> wrapperToDecision = new HashMap<PluginWrapper, PluginWrapperFilter.Decision>(); for (File plugin : listPlugins()) { try { PluginWrapper wrapper = ps.createPluginWrapper(plugin); final PluginWrapper existing = pm.getPlugin(wrapper.getShortName()); if (existing != null && (existing.isEnabled() || existing.isActive()) && !(wrapper.getVersionNumber().isNewerThan(existing.getVersionNumber()))) { LOGGER.log(Level.FINER, "Excluding {0} version {1} as version {2} is already installed", new Object[] { wrapper.getShortName(), wrapper.getVersion(), existing.getVersion() }); continue; } final PluginWrapperFilter.Decision decision = PluginWrapperFilter.decide(wrapper, plugin); if (decision == PluginWrapperFilter.Decision.EXCLUDE) { LOGGER.log(Level.FINER, "Excluding {0} version {1} based on decision from filters", new Object[] { wrapper.getShortName(), wrapper.getVersion() }); } else { wrapperToFile.put(wrapper, plugin); wrapperToDecision.put(wrapper, decision); } } catch (IOException e) { LOGGER.log(Level.WARNING, "IO exception processing " + plugin, e); } } LOGGER.log(Level.FINE, "Initial filtered set determined: {0}", wrapperToDecision); // now any non-optional dependencies of an included plugin get upped to included boolean changed = true; while (changed) { changed = false; Set<String> upscale = new HashSet<String>(); for (Map.Entry<PluginWrapper, PluginWrapperFilter.Decision> entry : wrapperToDecision.entrySet()) { if (entry.getValue() != PluginWrapperFilter.Decision.INCLUDE) { continue; } for (PluginWrapper.Dependency d : entry.getKey().getDependencies()) { // we need all non-optional dependencies upscale.add(d.shortName); } for (PluginWrapper.Dependency d : entry.getKey().getOptionalDependencies()) { // we only need optional dependencies if they are already installed and are an incompatible version final PluginWrapper existing = pm.getPlugin(d.shortName); if (existing != null && (existing.isEnabled() || existing.isActive())) { if (existing.isOlderThan(new VersionNumber(d.version))) { upscale.add(d.shortName); } } } } for (Map.Entry<PluginWrapper, PluginWrapperFilter.Decision> entry : wrapperToDecision.entrySet()) { if (entry.getValue() == PluginWrapperFilter.Decision.INCLUDE) { continue; } if (upscale.contains(entry.getKey().getShortName())) { changed = true; entry.setValue(PluginWrapperFilter.Decision.INCLUDE); } } } for (Iterator<Map.Entry<PluginWrapper, PluginWrapperFilter.Decision>> iterator = wrapperToDecision .entrySet().iterator(); iterator.hasNext();) { Map.Entry<PluginWrapper, PluginWrapperFilter.Decision> entry = iterator.next(); if (entry.getValue() == PluginWrapperFilter.Decision.INCLUDE) { continue; } wrapperToFile.remove(entry.getKey()); iterator.remove(); } LOGGER.log(Level.FINE, "After adding required dependencies: {0}", wrapperToDecision.keySet()); if (wrapperToFile.isEmpty()) { // bail early if the list is empty LOGGER.log(Level.FINE, "No new optional plugins to install"); return false; } LOGGER.log(Level.FINE, "Checking if dynamic loading of plugins is possible..."); boolean cannotDynamicLoad = false; for (PluginWrapper wrapper : wrapperToFile.keySet()) { final PluginWrapper existing = pm.getPlugin(wrapper.getShortName()); if (existing != null && (existing.isActive() || existing.isEnabled()) && !existing.isPinned()) { LOGGER.log(Level.INFO, "Cannot dynamically load optional plugins because {0} is already installed", existing.getShortName()); cannotDynamicLoad = true; } else if (YesNoMaybe.NO == wrapper.supportsDynamicLoad()) { LOGGER.log(Level.INFO, "Cannot dynamically load optional plugins because {0} does not support dynamic load", wrapper.getShortName()); cannotDynamicLoad = true; } } Map<String, VersionNumber> finalVersions = new HashMap<String, VersionNumber>(); // start with the active/enabled plugins that are currently installed for (PluginWrapper w : pm.getPlugins()) { if (w.isActive() || w.isEnabled()) { finalVersions.put(w.getShortName(), w.getVersionNumber()); } } // now add any new versions for (PluginWrapper w : wrapperToFile.keySet()) { VersionNumber existing = finalVersions.get(w.getShortName()); if (existing == null || w.getVersionNumber().isNewerThan(existing)) { finalVersions.put(w.getShortName(), w.getVersionNumber()); } } LOGGER.log(Level.FINE, "Expected final plugin version map: {0}", finalVersions); Set<String> pluginsToEnable = new HashSet<String>(); for (PluginWrapper w : wrapperToFile.keySet()) { LOGGER.log(Level.FINE, "Checking if {0} can be enabled, i.e. all dependencies can be satisfied", w.getShortName()); boolean missingDependency = false; for (PluginWrapper.Dependency d : w.getDependencies()) { VersionNumber v = finalVersions.get(d.shortName); if (v == null || v.isOlderThan(new VersionNumber(d.version))) { missingDependency = true; LOGGER.log(Level.FINER, "{0} is missing a dependency on {1} version {2}", new Object[] { w.getShortName(), d.shortName, d.version }); } } for (PluginWrapper.Dependency d : w.getOptionalDependencies()) { VersionNumber v = finalVersions.get(d.shortName); if (v != null && v.isOlderThan(new VersionNumber(d.version))) { missingDependency = true; LOGGER.log(Level.FINER, "{0} is missing a dependency on {1} version {2}", new Object[] { w.getShortName(), d.shortName, d.version }); } } if (missingDependency) { LOGGER.log(Level.FINE, "{0} cannot be enabled due to missing dependencies", w.getShortName()); } else { LOGGER.log(Level.FINE, "{0} can be enabled", w.getShortName()); pluginsToEnable.add(w.getShortName()); } } Map<String, File> newPlugins = new HashMap<String, File>(); for (Map.Entry<PluginWrapper, File> entry : wrapperToFile.entrySet()) { final String shortName = entry.getKey().getShortName(); final PluginWrapper existing = pm.getPlugin(shortName); final PluginWrapper proposed = entry.getKey(); if (existing != null && existing.isActive()) { if (existing.getVersionNumber().equals(proposed.getVersionNumber())) { LOGGER.log(Level.FINE, "Ignoring installing plugin {0} as current version is desired", shortName); // ignore as we are fine continue; } if (existing.getVersionNumber().isNewerThan(proposed.getVersionNumber())) { LOGGER.log(Level.INFO, "Ignoring installing plugin {0} as current version {1} is newer that bundled " + "version {2}", new Object[] { shortName, existing.getVersion(), proposed.getVersion() }); continue; } if (existing.isPinned()) { LOGGER.log(Level.INFO, "Ignoring installing plugin {0} as it is pinned. You might want to unpin this plugin.", new Object[] { shortName }); continue; } LOGGER.log(Level.INFO, "Restart required as plugin {0} is already installed", shortName); cannotDynamicLoad = true; } String fileName = shortName + ".jpi"; String legacyName = fileName.replace(".jpi", ".hpi"); File file = new File(pm.rootDir, fileName); File pinFile = new File(pm.rootDir, fileName + ".pinned"); File disableFile = new File(pm.rootDir, fileName + ".disabled"); // normalization first, if the old file exists. try { rename(new File(pm.rootDir, legacyName), file); } catch (IOException e) { LOGGER.log(Level.WARNING, String.format("Could not move legacy %s.hpi to %s.jpi", shortName, shortName), e); } try { rename(new File(pm.rootDir, legacyName + ".pinned"), pinFile); } catch (IOException e) { LOGGER.log(Level.WARNING, String.format("Could not move legacy %s.hpi.pinned to %s.jpi.pinned", shortName, shortName), e); } try { rename(new File(pm.rootDir, legacyName + ".disabled"), disableFile); } catch (IOException e) { LOGGER.log(Level.WARNING, String.format("Could not move legacy %s.hpi.disabled to %s.jpi.disabled", shortName, shortName), e); } // update file if: // - no file exists today // - bundled version and current version differs (by timestamp), and the file isn't pinned. final long lastModified = entry.getValue().lastModified(); if (!file.exists() || (file.lastModified() != lastModified && !pinFile.exists())) { try { FileUtils.copyFile(entry.getValue(), file); if (lastModified != -1 && !file.setLastModified(lastModified)) { LOGGER.log(Level.WARNING, "Could not set last modified timestamp on {0}.jpi", shortName); } // lastModified is set for two reasons: // - to avoid unpacking as much as possible, but still do it on both upgrade and downgrade // - to make sure the value is not changed after each restart, so we can avoid // unpacking the plugin itself in ClassicPluginStrategy.explode newPlugins.put(shortName, file); } catch (IOException e) { LOGGER.log(Level.WARNING, String.format("Could not write %s.jpi", shortName), e); } } if (!pluginsToEnable.contains(shortName)) { try { new FileOutputStream(disableFile).close(); } catch (IOException e) { LOGGER.log(Level.WARNING, String.format("Could not flag %s as a disabled plugin", shortName), e); } } } if (cannotDynamicLoad) { return true; } LOGGER.log(Level.FINE, "Sorting plugins to determine loading order..."); // now we need to sort plugins and try and dynamically load them final List<PluginWrapper> plugins = new ArrayList<PluginWrapper>(newPlugins.size()); for (File p : newPlugins.values()) { try { plugins.add(ps.createPluginWrapper(p)); } catch (IOException e) { LOGGER.log(Level.WARNING, "IO exception processing " + p, e); cannotDynamicLoad = true; } } if (cannotDynamicLoad) { return true; } CyclicGraphDetector<PluginWrapper> cgd = new CyclicGraphDetector<PluginWrapper>() { @Override protected List<PluginWrapper> getEdges(PluginWrapper p) { List<PluginWrapper> next = new ArrayList<PluginWrapper>(); addTo(p.getDependencies(), next); addTo(p.getOptionalDependencies(), next); return next; } private void addTo(List<PluginWrapper.Dependency> dependencies, List<PluginWrapper> r) { for (PluginWrapper.Dependency d : dependencies) { for (PluginWrapper p : plugins) { if (p.getShortName().equals(d.shortName)) { r.add(p); } } } } }; try { cgd.run(plugins); } catch (CyclicGraphDetector.CycleDetectedException e) { LOGGER.log(Level.WARNING, "Cyclic reference detected amongst bundled plugins: " + plugins, e); cannotDynamicLoad = true; } LOGGER.log(Level.FINE, "Sorted plugin load order: {0}", cgd.getSorted()); LOGGER.log(Level.INFO, "Starting dynamic loading of optional bundled plugins"); for (PluginWrapper plugin : cgd.getSorted()) { File archive = newPlugins.get(plugin.getShortName()); if (archive == null) { // cannot happen, we put only plugins from newPlugins into the list and sorting should never // add, so the sorting should be a 1:1 mapping. We have this NPE check for safety only. continue; } try { pm.dynamicLoad(archive); } catch (IOException e) { LOGGER.log(Level.WARNING, String.format("Failed to dynamic load plugin %s version %s", plugin.getShortName(), plugin.getVersion()), e); cannotDynamicLoad = true; break; } catch (InterruptedException e) { LOGGER.log(Level.WARNING, String.format("Interrupted while trying to dynamic load plugin %s version %s", plugin.getShortName(), plugin.getVersion()), e); cannotDynamicLoad = true; break; } catch (RestartRequiredException e) { LOGGER.log(Level.WARNING, String.format("Plugin %s version %s does not support dynamic loading", plugin.getShortName(), plugin.getVersion()), e); cannotDynamicLoad = true; break; } } LOGGER.log(Level.INFO, "Finished dynamic loading of optional bundled plugins, restart required {0}", cannotDynamicLoad); return cannotDynamicLoad; }
From source file:org.geotools.data.ngi.NGIReader.java
private Geometry getNextGeometry(BufferedReader reader) { try {/*from w w w. ja v a 2s .co m*/ String gtype = reader.readLine().toUpperCase().trim(); if (gtype.startsWith("POINT")) { return gf.createPoint(parseCoordinate(reader.readLine())); } else if (gtype.startsWith("TEXT")) { return gf.createPoint(parseCoordinate(reader.readLine())); } else if (gtype.startsWith("LINE")) { int numofPoints = parseInteger(reader.readLine()); Coordinate[] coordinates = new Coordinate[numofPoints]; for (int index = 0; index < numofPoints; index++) { coordinates[index] = parseCoordinate(reader.readLine()); } return gf.createLineString(coordinates); } else if (gtype.startsWith("POLYGON")) { int numofRing = parseInteger(reader.readLine().replace("NUMPARTS", "")); LinearRing shell = null; LinearRing[] holes = numofRing > 1 ? new LinearRing[numofRing - 1] : null; for (int ringIndex = 0; ringIndex < numofRing; ringIndex++) { int numofPoints = parseInteger(reader.readLine()); // ========================================================= // NGI ?? ?? ?? ? ???? . Coordinate[] coordinates = new Coordinate[numofPoints + 1]; // ========================================================= for (int index = 0; index < numofPoints; index++) { coordinates[index] = parseCoordinate(reader.readLine()); } coordinates[numofPoints] = coordinates[0]; if (ringIndex == 0) { shell = gf.createLinearRing(coordinates); } else { holes[ringIndex - 1] = gf.createLinearRing(coordinates); } } return gf.createPolygon(shell, holes); } else if (gtype.startsWith("MULTIPOINT")) { int numofPoints = parseInteger(reader.readLine()); Coordinate[] coordinates = new Coordinate[numofPoints]; for (int index = 0; index < numofPoints; index++) { coordinates[index] = parseCoordinate(reader.readLine()); } return gf.createMultiPoint(coordinates); } else if (gtype.startsWith("MULTILINE")) { int numofParts = parseInteger(reader.readLine().replace("NUMPARTS", "")); LineString[] lineStrings = new LineString[numofParts]; for (int partIndex = 0; partIndex < numofParts; partIndex++) { int numofPoints = parseInteger(reader.readLine()); Coordinate[] coordinates = new Coordinate[numofPoints]; for (int index = 0; index < numofPoints; index++) { coordinates[index] = parseCoordinate(reader.readLine()); } lineStrings[partIndex] = gf.createLineString(coordinates); } return gf.createMultiLineString(lineStrings); } else if (gtype.startsWith("MULTIPOLY")) { int numofParts = parseInteger(reader.readLine().replace("NUMPARTS", "")); Polygon[] polygons = new Polygon[numofParts]; for (int partIndex = 0; partIndex < numofParts; partIndex++) { int numofRing = parseInteger(reader.readLine().replace("NUMPARTS", "")); LinearRing shell = null; LinearRing[] holes = numofRing > 1 ? new LinearRing[numofRing - 1] : null; for (int ringIndex = 0; ringIndex < numofRing; ringIndex++) { int numofPoints = parseInteger(reader.readLine()); // ========================================================= // NGI ?? ?? ?? ? ???? . Coordinate[] coordinates = new Coordinate[numofPoints + 1]; // ========================================================= for (int index = 0; index < numofPoints; index++) { coordinates[index] = parseCoordinate(reader.readLine()); } coordinates[numofPoints] = coordinates[0]; if (ringIndex == 0) { shell = gf.createLinearRing(coordinates); } else { holes[ringIndex - 1] = gf.createLinearRing(coordinates); } } polygons[partIndex] = gf.createPolygon(shell, holes); } return gf.createMultiPolygon(polygons); } } catch (IOException e) { LOGGER.log(Level.FINER, e.getMessage(), e); } return null; }
From source file:org.jafer.zclient.AbstractClient.java
/** * Description of the Method//from w w w . j a va2s. c om * * @param query Description of Parameter * @return Description of the Returned Value */ public int submitQuery(Object query) throws JaferException { logger.entering("ZClient", "public int submitQuery(Object query)"); try { // reset the last search exception setSearchException((String[]) null, null); resultsByDB.clear(); setDefaults(); // check if query needs parsing if (isParseQuery()) query = QueryParser.parseQuery(query); setQuery(query); // if a cache is not already configured then create a HashtableCache // as // default otherwise clear the current cache if (getCache() == null) { logger.log(Level.FINER, "No supplied cache, creating default HashtableCache"); setCache(new HashtableCacheFactory(getDataCacheSize()).getCache()); } else { getCache().clear(); } connect(); logger.exiting("ZClient", "public int submitQuery(Object query)"); return search(); } catch (QueryException e) { String message = userIP + "ZClient submitQuery(Object query); " + e.getMessage(); logger.log(Level.SEVERE, message); setSearchException((String[]) null, new JaferException(e)); throw e; } catch (JaferException exc) { // store the exception and throw it on setSearchException((String[]) null, exc); throw exc; } }
From source file:org.geotools.arcsde.session.SessionPool.java
/** * @see org.geotools.arcsde.session.ISessionPool#getSession(boolean) *///from w w w . j a va 2 s . co m public ISession getSession(final boolean transactional) throws IOException, UnavailableConnectionException { checkOpen(); try { Session connection = null; if (transactional) { LOGGER.finest("Borrowing session from pool for transactional access"); connection = (Session) pool.borrowObject(); } else { synchronized (openSessionsNonTransactional) { try { if (LOGGER.isLoggable(Level.FINER)) { LOGGER.finer("Grabbing session from pool on " + Thread.currentThread().getName()); } connection = (Session) pool.borrowObject(); if (LOGGER.isLoggable(Level.FINER)) { LOGGER.finer("Got session from the pool on " + Thread.currentThread().getName()); } } catch (NoSuchElementException e) { if (LOGGER.isLoggable(Level.FINER)) { LOGGER.finer("No available sessions in the pool, falling back to queued session"); } connection = openSessionsNonTransactional.remove(); } openSessionsNonTransactional.add(connection); if (LOGGER.isLoggable(Level.FINER)) { LOGGER.finer("Got session from the in use queue on " + Thread.currentThread().getName()); } } } connection.markActive(); return connection; } catch (NoSuchElementException e) { LOGGER.log(Level.WARNING, "Out of connections: " + e.getMessage() + ". Config: " + this.config); throw new UnavailableConnectionException(config.getMaxConnections(), this.config); } catch (SeException se) { ArcSdeException sdee = new ArcSdeException(se); LOGGER.log(Level.WARNING, "ArcSDE error getting connection for " + config, sdee); throw sdee; } catch (Exception e) { LOGGER.log(Level.WARNING, "Unknown problem getting connection: " + e.getMessage(), e); throw (IOException) new IOException("Unknown problem fetching connection from connection pool") .initCause(e); } }
From source file:org.b3log.latke.plugin.PluginManager.java
/** * Registers the specified plugin into the specified holder. * /*from w w w .j av a2 s. c om*/ * @param plugin the specified plugin * @param holder the specified holder */ private void register(final AbstractPlugin plugin, final HashMap<String, HashSet<AbstractPlugin>> holder) { final String rendererId = plugin.getRendererId(); /** * the rendererId support multiple,using ';' to split. * and using Map to match the plugin is not flexible, a regular expression match pattern may be needed in futrue. */ final String[] redererIds = rendererId.split(";"); for (String rid : redererIds) { HashSet<AbstractPlugin> set = holder.get(rid); if (null == set) { set = new HashSet<AbstractPlugin>(); holder.put(rid, set); } set.add(plugin); } LOGGER.log(Level.FINER, "Registered plugin[name={0}, version={1}] for rendererId[name={2}], [{3}] plugins totally", new Object[] { plugin.getName(), plugin.getVersion(), rendererId, holder.size() }); }
From source file:org.geotools.data.ngi.NGISchemaReader.java
private Class<?> getGeometryType(BufferedReader reader) { try {/* w w w . j av a 2 s. co m*/ String line = reader.readLine(); while (line != null) { if (line.trim().equalsIgnoreCase("$GEOMETRIC_METADATA")) { String shapetypelist = reader.readLine().toUpperCase(); shapetypelist = shapetypelist.substring(5, shapetypelist.length() - 1); // MASK(LINESTRING,POLYGON) MASK(LINESTRING,TEXT)?, MASK(LINESTRING) int pos = shapetypelist.indexOf("POLYGON"); String shapetype; if (pos != -1) { shapetype = "POLYGON"; } else { pos = shapetypelist.indexOf(","); if (pos > 0) { shapetype = shapetypelist.substring(0, pos); } else { shapetype = shapetypelist; } } return getGeometryTypeFromName(shapetype); } line = reader.readLine(); } } catch (IOException e) { LOGGER.log(Level.FINER, e.getMessage(), e); } return null; }
From source file:com.esri.gpt.control.search.ServletSavedSearch.java
/** * Write saved searches.// ww w.jav a2 s.c o m * * @param request the request * @param response the response * @param context the context * @param errorMessage the error message * @throws SearchException the search exception * @throws JSONException the jSON exception * @throws IOException Signals that an I/O exception has occurred. */ private void writeSavedSearches(HttpServletRequest request, HttpServletResponse response, RequestContext context, String errorMessage) throws SearchException, JSONException, IOException { ISearchSaveRepository saveRpstry = SearchSaveRpstryFactory.getSearchSaveRepository(); if (saveRpstry instanceof GptRepository) { ((GptRepository) saveRpstry).setRequestContext(context); } SavedSearchCriterias savedCriterias = saveRpstry.getSavedList(context.getUser()); JSONArray resultsArray = new JSONArray(); for (SavedSearchCriteria savedCriteria : savedCriterias) { JSONObject jObj = new JSONObject(); jObj.put("id", savedCriteria.getId()); jObj.put("name", savedCriteria.getName()); String criteria = savedCriteria.getCriteria(); try { InputStream inputStream = new ByteArrayInputStream(criteria.getBytes()); Document doc = XMLUtils.newDocument(new InputSource(inputStream)); SearchCriteria searchCriteria = new SearchCriteria(doc); MessageBroker messageBroker = new FacesContextBroker(request, response).extractMessageBroker(); RestUrlBuilder builder = RestUrlBuilder.newBuilder(context, request, messageBroker); String id = SearchEngineLocal.ID; for (ISearchFilter filter : searchCriteria.getMiscelleniousFilters()) { if (filter instanceof SearchFilterHarvestSites) { SearchFilterHarvestSites hFilter = (SearchFilterHarvestSites) filter; id = hFilter.getSelectedHarvestSiteId(); } } String params = builder.buildParameters(searchCriteria, "searchPage", id); criteria = params; } catch (Exception e) { LOG.log(Level.FINER, "", e); } jObj.put("criteria", criteria); resultsArray.put(jObj); } String contentType = "application/json"; int indent = 0; if (Val.chkStr(request.getParameter("f")).equals("pjson")) { indent = 2; contentType = "text/plain"; } JSONObject resultObj = new JSONObject(); errorMessage = Val.chkStr(errorMessage); if (!errorMessage.equals("")) { resultObj.put("error", errorMessage); } resultObj.put("resultRecords", resultsArray); String content = resultObj.toString(indent); String callBack = Val.chkStr(request.getParameter("callBack")); if (callBack.equals("") == false) { content += callBack + "(" + content + ")"; } writeCharacterResponse(response, content, "UTF-8", contentType); }
From source file:org.geotools.data.ngi.NGIReader.java
private boolean nextRecord(BufferedReader reader) { try {/*w ww. j a v a2s .c o m*/ String line = reader.readLine(); while (line != null) { if (line.toUpperCase().indexOf("$RECORD") != -1) { return true; } if (line.toUpperCase().indexOf("<LAYER_END>") != -1) { return false; } line = reader.readLine(); } } catch (IOException e) { LOGGER.log(Level.FINER, e.getMessage(), e); } return false; }
From source file:diet.gridr.g5k.gui.G5kSummaryChart.java
/** * CardPanel displaying the data for the Grid5000 summary view * * @return card panel//w w w . j ava2s . com */ private JPanel getCardPanel() { if (cardPanel == null) { cardPanel = new JPanel(); carder = new CardLayout(); cardPanel.setLayout(carder); cardPanel.add("Stack Bar Chart", new StackBarChart()); cardPanel.add("Bar Chart 3D", new BarChart3D()); cardPanel.add("Layered Bar Chart", new LayeredBarChart()); cardPanel.add("Pie Chart", new PieChart()); cardPanel.add("Pie Chart 3D", new PieChart3D()); carder.show(cardPanel, "Stack Bar Chart"); LoggingManager.log(Level.FINER, LoggingManager.RESOURCESTOOL, this.getClass().getName(), "getCardPanel", "Charts added"); } return cardPanel; }