List of usage examples for java.util.logging Logger warning
public void warning(Supplier<String> msgSupplier)
From source file:com.codelanx.codelanxlib.util.auth.UUIDFetcher.java
/** * Makes a request to mojang's servers of a sublist of at most 100 player's * names. Additionally can provide progress outputs * // w ww. j a v a2 s.c om * @since 0.0.1 * @version 0.1.0 * * @param output Whether or not to print output * @param log The {@link Logger} to print to * @param doOutput A {@link Predicate} representing when to output a number * @return A {@link Map} of player names to their {@link UUID}s * @throws IOException If there's a problem sending or receiving the request * @throws ParseException If the request response cannot be read * @throws InterruptedException If the thread is interrupted while sleeping */ public Map<String, UUID> callWithProgessOutput(boolean output, Logger log, Predicate<? super Integer> doOutput) throws IOException, ParseException, InterruptedException { //Method start Map<String, UUID> uuidMap = new HashMap<>(); int totalNames = this.names.size(); int completed = 0; int failed = 0; int requests = (int) Math.ceil(this.names.size() / UUIDFetcher.PROFILES_PER_REQUEST); for (int i = 0; i < requests; i++) { List<String> request = names.subList(i * 100, Math.min((i + 1) * 100, this.names.size())); String body = JSONArray.toJSONString(request); HttpURLConnection connection = UUIDFetcher.createConnection(); UUIDFetcher.writeBody(connection, body); if (connection.getResponseCode() == 429 && this.rateLimiting) { log.warning("[UUIDFetcher] Rate limit hit! Waiting 10 minutes until continuing conversion..."); Thread.sleep(TimeUnit.MINUTES.toMillis(10)); connection = UUIDFetcher.createConnection(); UUIDFetcher.writeBody(connection, body); } JSONArray array = (JSONArray) this.jsonParser.parse(new InputStreamReader(connection.getInputStream())); completed += array.size(); failed += request.size() - array.size(); for (Object profile : array) { JSONObject jsonProfile = (JSONObject) profile; UUID uuid = UUIDFetcher.getUUID((String) jsonProfile.get("id")); uuidMap.put((String) jsonProfile.get("name"), uuid); } if (output) { int processed = completed + failed; if (doOutput.test(processed) || processed == totalNames) { log.info(String.format("[UUIDFetcher] Progress: %d/%d, %.2f%%, Failed names: %d", processed, totalNames, ((double) processed / totalNames) * 100D, failed)); } } } return uuidMap; }
From source file:org.usrz.libs.logging.LevelDebugTest.java
@Test public void testJavaLogging() { final java.util.logging.Logger logger = java.util.logging.Logger.getLogger(this.getClass().getName()); logger.finest("Foobar FINEST"); AppenderForTests.hasNoLastEvent("at Finest level"); assertFalse(logger.isLoggable(java.util.logging.Level.FINEST)); logger.finer("Foobar FINER"); AppenderForTests.hasNoLastEvent("at Finer level"); assertFalse(logger.isLoggable(java.util.logging.Level.FINER)); logger.fine("Foobar FINE"); AppenderForTests.hasLastEvent("at Fine level"); assertTrue(logger.isLoggable(java.util.logging.Level.FINE)); logger.config("Foobar CONFIG"); AppenderForTests.hasLastEvent("at Config level"); assertTrue(logger.isLoggable(java.util.logging.Level.CONFIG)); logger.info("Foobar INFO"); AppenderForTests.hasLastEvent("at Info level"); assertTrue(logger.isLoggable(java.util.logging.Level.INFO)); logger.warning("Foobar WARNING"); AppenderForTests.hasLastEvent("at Warning level"); assertTrue(logger.isLoggable(java.util.logging.Level.WARNING)); logger.severe("Foobar SEVERE"); AppenderForTests.hasLastEvent("at Severe level"); assertTrue(logger.isLoggable(java.util.logging.Level.SEVERE)); }
From source file:org.usrz.libs.logging.LevelInfoTest.java
@Test public void testJavaLogging() { final java.util.logging.Logger logger = java.util.logging.Logger.getLogger(this.getClass().getName()); logger.finest("Foobar FINEST"); AppenderForTests.hasNoLastEvent("at Finest level"); assertFalse(logger.isLoggable(java.util.logging.Level.FINEST)); logger.finer("Foobar FINER"); AppenderForTests.hasNoLastEvent("at Finer level"); assertFalse(logger.isLoggable(java.util.logging.Level.FINER)); logger.fine("Foobar FINE"); AppenderForTests.hasNoLastEvent("at Fine level"); assertFalse(logger.isLoggable(java.util.logging.Level.FINE)); logger.config("Foobar CONFIG"); AppenderForTests.hasLastEvent("at Config level"); assertTrue(logger.isLoggable(java.util.logging.Level.CONFIG)); logger.info("Foobar INFO"); AppenderForTests.hasLastEvent("at Info level"); assertTrue(logger.isLoggable(java.util.logging.Level.INFO)); logger.warning("Foobar WARNING"); AppenderForTests.hasLastEvent("at Warning level"); assertTrue(logger.isLoggable(java.util.logging.Level.WARNING)); logger.severe("Foobar SEVERE"); AppenderForTests.hasLastEvent("at Severe level"); assertTrue(logger.isLoggable(java.util.logging.Level.SEVERE)); }
From source file:org.usrz.libs.logging.LevelErrorTest.java
@Test public void testJavaLogging() { final java.util.logging.Logger logger = java.util.logging.Logger.getLogger(this.getClass().getName()); logger.finest("Foobar FINEST"); AppenderForTests.hasNoLastEvent("at Finest level"); assertFalse(logger.isLoggable(java.util.logging.Level.FINEST)); logger.finer("Foobar FINER"); AppenderForTests.hasNoLastEvent("at Finer level"); assertFalse(logger.isLoggable(java.util.logging.Level.FINER)); logger.fine("Foobar FINE"); AppenderForTests.hasNoLastEvent("at Fine level"); assertFalse(logger.isLoggable(java.util.logging.Level.FINE)); logger.config("Foobar CONFIG"); AppenderForTests.hasNoLastEvent("at Config level"); assertFalse(logger.isLoggable(java.util.logging.Level.CONFIG)); logger.info("Foobar INFO"); AppenderForTests.hasNoLastEvent("at Info level"); assertFalse(logger.isLoggable(java.util.logging.Level.INFO)); logger.warning("Foobar WARNING"); AppenderForTests.hasNoLastEvent("at Warning level"); assertFalse(logger.isLoggable(java.util.logging.Level.WARNING)); logger.severe("Foobar SEVERE"); AppenderForTests.hasLastEvent("at Severe level"); assertTrue(logger.isLoggable(java.util.logging.Level.SEVERE)); }
From source file:org.usrz.libs.logging.LevelWarningTest.java
@Test public void testJavaLogging() { final java.util.logging.Logger logger = java.util.logging.Logger.getLogger(this.getClass().getName()); logger.finest("Foobar FINEST"); AppenderForTests.hasNoLastEvent("at Finest level"); assertFalse(logger.isLoggable(java.util.logging.Level.FINEST)); logger.finer("Foobar FINER"); AppenderForTests.hasNoLastEvent("at Finer level"); assertFalse(logger.isLoggable(java.util.logging.Level.FINER)); logger.fine("Foobar FINE"); AppenderForTests.hasNoLastEvent("at Fine level"); assertFalse(logger.isLoggable(java.util.logging.Level.FINE)); logger.config("Foobar CONFIG"); AppenderForTests.hasNoLastEvent("at Config level"); assertFalse(logger.isLoggable(java.util.logging.Level.CONFIG)); logger.info("Foobar INFO"); AppenderForTests.hasNoLastEvent("at Info level"); assertFalse(logger.isLoggable(java.util.logging.Level.INFO)); logger.warning("Foobar WARNING"); AppenderForTests.hasLastEvent("at Warning level"); assertTrue(logger.isLoggable(java.util.logging.Level.WARNING)); logger.severe("Foobar SEVERE"); AppenderForTests.hasLastEvent("at Severe level"); assertTrue(logger.isLoggable(java.util.logging.Level.SEVERE)); }
From source file:org.protempa.backend.dsb.relationaldb.EventResultProcessor.java
@Override public void process(ResultSet resultSet) throws SQLException { ResultCache<Event> results = getResults(); EntitySpec entitySpec = getEntitySpec(); String entitySpecName = entitySpec.getName(); //boolean hasRefs = entitySpec.getInboundRefSpecs().length > 0; String[] propIds = entitySpec.getPropositionIds(); ColumnSpec codeSpec = entitySpec.getCodeSpec(); if (codeSpec != null) { List<ColumnSpec> codeSpecL = codeSpec.asList(); codeSpec = codeSpecL.get(codeSpecL.size() - 1); }//from w ww. j av a2s. co m Logger logger = SQLGenUtil.logger(); PropertySpec[] propertySpecs = entitySpec.getPropertySpecs(); Value[] propertyValues = new Value[propertySpecs.length]; int count = 0; ResultSetMetaData resultSetMetaData = resultSet.getMetaData(); int[] columnTypes = new int[resultSetMetaData.getColumnCount()]; for (int i = 0; i < columnTypes.length; i++) { columnTypes[i] = resultSetMetaData.getColumnType(i + 1); } String[] uniqueIds = new String[entitySpec.getUniqueIdSpecs().length]; SourceSystem dsType = DataSourceBackendSourceSystem.getInstance(getDataSourceBackendId()); JDBCPositionFormat positionParser = entitySpec.getPositionParser(); while (resultSet.next()) { int i = 1; String keyId = resultSet.getString(i++); if (keyId == null) { logger.warning("A keyId is null. Skipping record."); continue; } i = readUniqueIds(uniqueIds, resultSet, i); if (Arrays.contains(uniqueIds, null)) { if (logger.isLoggable(Level.WARNING)) { logger.log(Level.WARNING, "Unique ids contain null ({0}). Skipping record.", StringUtils.join(uniqueIds, ", ")); continue; } } UniqueId uniqueId = generateUniqueId(entitySpecName, uniqueIds); String propId = null; if (!isCasePresent()) { if (codeSpec == null) { assert propIds.length == 1 : "Don't know which proposition id to assign to"; propId = propIds[0]; } else { String code = resultSet.getString(i++); propId = sqlCodeToPropositionId(codeSpec, code); if (propId == null) { continue; } } } else { i++; } ColumnSpec finishTimeSpec = entitySpec.getFinishTimeSpec(); Granularity gran = entitySpec.getGranularity(); Interval interval = null; if (finishTimeSpec == null) { Long d = null; try { d = positionParser.toPosition(resultSet, i, columnTypes[i - 1]); i++; } catch (SQLException e) { logger.log(Level.WARNING, "Could not parse timestamp. Leaving the start time/timestamp unset.", e); } interval = intervalFactory.getInstance(d, gran); } else { Long start = null; try { start = positionParser.toPosition(resultSet, i, columnTypes[i - 1]); } catch (SQLException e) { logger.log(Level.WARNING, "Could not parse start time. Leaving the start time/timestamp unset.", e); } finally { i++; } Long finish = null; try { finish = positionParser.toPosition(resultSet, i, columnTypes[i - 1]); } catch (SQLException e) { logger.log(Level.WARNING, "Could not parse start time. Leaving the finish time unset.", e); } finally { i++; } if (finish != null && start != null && finish.compareTo(start) < 0) { logger.log(Level.WARNING, "Finish {0} is before start {1}: Leaving time unset", new Object[] { finish, start }); interval = intervalFactory.getInstance(null, gran, null, gran); } else { interval = intervalFactory.getInstance(start, gran, finish, gran); } } i = extractPropertyValues(resultSet, i, propertyValues, columnTypes); if (isCasePresent()) { propId = resultSet.getString(i++); } Event event = new Event(propId, uniqueId); event.setSourceSystem(dsType); event.setInterval(interval); for (int j = 0; j < propertySpecs.length; j++) { PropertySpec propertySpec = propertySpecs[j]; event.setProperty(propertySpec.getName(), propertyValues[j]); } logger.log(Level.FINEST, "Created event {0}", event); results.add(keyId, event); if (++count % FLUSH_SIZE == 0) { try { results.flush(this); } catch (IOException ex) { throw new QueryResultsCacheException("Flushing primitive parameters to cache failed", ex); } if (logger.isLoggable(Level.FINE)) { Logging.logCount(logger, Level.FINE, count, "Retrieved {0} record", "Retrieved {0} records"); } } } try { results.flush(this); } catch (IOException ex) { throw new QueryResultsCacheException("Flushing primitive parameters to cache failed", ex); } if (logger.isLoggable(Level.FINE)) { Logging.logCount(logger, Level.FINE, count, "Retrieved {0} record total", "Retrieved {0} records total"); } }
From source file:me.realized.tm.utilities.profile.UUIDFetcher.java
/** * Makes a request to mojang's servers of a sublist of at most 100 player's * names. Additionally can provide progress outputs * * @param output Whether or not to print output * @param log The {@link Logger} to print to * @param doOutput A {@link Predicate} representing when to output a number * @return A {@link Map} of player names to their {@link UUID}s * @throws IOException If there's a problem sending or receiving the request * @throws ParseException If the request response cannot be read * @throws InterruptedException If the thread is interrupted while sleeping * @version 0.1.0// ww w. jav a 2 s .c om * @since 0.0.1 */ public Map<String, UUID> callWithProgressOutput(boolean output, Logger log, Predicate<? super Integer> doOutput) throws IOException, ParseException, InterruptedException { Map<String, UUID> uuidMap = new HashMap<>(); int totalNames = this.names.size(); int completed = 0; int failed = 0; int requests = (int) Math.ceil(this.names.size() / UUIDFetcher.PROFILES_PER_REQUEST); for (int i = 0; i < requests; i++) { List<String> request = names.subList(i * 100, Math.min((i + 1) * 100, this.names.size())); String body = JSONArray.toJSONString(request); HttpURLConnection connection = UUIDFetcher.createConnection(); UUIDFetcher.writeBody(connection, body); if (connection.getResponseCode() == 429 && this.rateLimiting) { String out = "[UUIDFetcher] Rate limit hit! Waiting 10 minutes until continuing conversion..."; if (log != null) { log.warning(out); } else { Bukkit.getLogger().warning(out); } Thread.sleep(TimeUnit.MINUTES.toMillis(10)); connection = UUIDFetcher.createConnection(); UUIDFetcher.writeBody(connection, body); } JSONArray array = (JSONArray) this.jsonParser.parse(new InputStreamReader(connection.getInputStream())); completed += array.size(); failed += request.size() - array.size(); for (Object profile : array) { JSONObject jsonProfile = (JSONObject) profile; UUID uuid = UUIDFetcher.getUUID((String) jsonProfile.get("id")); uuidMap.put((String) jsonProfile.get("name"), uuid); } if (output) { int processed = completed + failed; if (doOutput.apply(processed) || processed == totalNames) { if (log != null) { log.info(String.format("[UUIDFetcher] Progress: %d/%d, %.2f%%, Failed names: %d", processed, totalNames, ((double) processed / totalNames) * 100D, failed)); } } } } return uuidMap; }
From source file:hudson.cli.CLI.java
public static int _main(String[] _args) throws Exception { List<String> args = Arrays.asList(_args); PrivateKeyProvider provider = new PrivateKeyProvider(); boolean sshAuthRequestedExplicitly = false; String httpProxy = null;/*from w ww . java2 s . co m*/ String url = System.getenv("JENKINS_URL"); if (url == null) url = System.getenv("HUDSON_URL"); boolean tryLoadPKey = true; Mode mode = null; String user = null; String auth = null; String userIdEnv = System.getenv("JENKINS_USER_ID"); String tokenEnv = System.getenv("JENKINS_API_TOKEN"); boolean strictHostKey = false; while (!args.isEmpty()) { String head = args.get(0); if (head.equals("-version")) { System.out.println("Version: " + computeVersion()); return 0; } if (head.equals("-http")) { if (mode != null) { printUsage("-http clashes with previously defined mode " + mode); return -1; } mode = Mode.HTTP; args = args.subList(1, args.size()); continue; } if (head.equals("-ssh")) { if (mode != null) { printUsage("-ssh clashes with previously defined mode " + mode); return -1; } mode = Mode.SSH; args = args.subList(1, args.size()); continue; } if (head.equals("-remoting")) { if (mode != null) { printUsage("-remoting clashes with previously defined mode " + mode); return -1; } mode = Mode.REMOTING; args = args.subList(1, args.size()); continue; } if (head.equals("-s") && args.size() >= 2) { url = args.get(1); args = args.subList(2, args.size()); continue; } if (head.equals("-noCertificateCheck")) { LOGGER.info("Skipping HTTPS certificate checks altogether. Note that this is not secure at all."); SSLContext context = SSLContext.getInstance("TLS"); context.init(null, new TrustManager[] { new NoCheckTrustManager() }, new SecureRandom()); HttpsURLConnection.setDefaultSSLSocketFactory(context.getSocketFactory()); // bypass host name check, too. HttpsURLConnection.setDefaultHostnameVerifier(new HostnameVerifier() { public boolean verify(String s, SSLSession sslSession) { return true; } }); args = args.subList(1, args.size()); continue; } if (head.equals("-noKeyAuth")) { tryLoadPKey = false; args = args.subList(1, args.size()); continue; } if (head.equals("-i") && args.size() >= 2) { File f = new File(args.get(1)); if (!f.exists()) { printUsage(Messages.CLI_NoSuchFileExists(f)); return -1; } provider.readFrom(f); args = args.subList(2, args.size()); sshAuthRequestedExplicitly = true; continue; } if (head.equals("-strictHostKey")) { strictHostKey = true; args = args.subList(1, args.size()); continue; } if (head.equals("-user") && args.size() >= 2) { user = args.get(1); args = args.subList(2, args.size()); continue; } if (head.equals("-auth") && args.size() >= 2) { auth = args.get(1); args = args.subList(2, args.size()); continue; } if (head.equals("-p") && args.size() >= 2) { httpProxy = args.get(1); args = args.subList(2, args.size()); continue; } if (head.equals("-logger") && args.size() >= 2) { Level level = parse(args.get(1)); for (Handler h : Logger.getLogger("").getHandlers()) { h.setLevel(level); } for (Logger logger : new Logger[] { LOGGER, FullDuplexHttpStream.LOGGER, PlainCLIProtocol.LOGGER, Logger.getLogger("org.apache.sshd") }) { // perhaps also Channel logger.setLevel(level); } args = args.subList(2, args.size()); continue; } break; } if (url == null) { printUsage(Messages.CLI_NoURL()); return -1; } if (auth == null) { // -auth option not set if (StringUtils.isNotBlank(userIdEnv) && StringUtils.isNotBlank(tokenEnv)) { auth = StringUtils.defaultString(userIdEnv).concat(":").concat(StringUtils.defaultString(tokenEnv)); } else if (StringUtils.isNotBlank(userIdEnv) || StringUtils.isNotBlank(tokenEnv)) { printUsage(Messages.CLI_BadAuth()); return -1; } // Otherwise, none credentials were set } if (!url.endsWith("/")) { url += '/'; } if (args.isEmpty()) args = Arrays.asList("help"); // default to help if (tryLoadPKey && !provider.hasKeys()) provider.readFromDefaultLocations(); if (mode == null) { mode = Mode.HTTP; } LOGGER.log(FINE, "using connection mode {0}", mode); if (user != null && auth != null) { LOGGER.warning("-user and -auth are mutually exclusive"); } if (mode == Mode.SSH) { if (user == null) { // TODO SshCliAuthenticator already autodetects the user based on public key; why cannot AsynchronousCommand.getCurrentUser do the same? LOGGER.warning("-user required when using -ssh"); return -1; } return SSHCLI.sshConnection(url, user, args, provider, strictHostKey); } if (strictHostKey) { LOGGER.warning("-strictHostKey meaningful only with -ssh"); } if (user != null) { LOGGER.warning("Warning: -user ignored unless using -ssh"); } CLIConnectionFactory factory = new CLIConnectionFactory().url(url).httpsProxyTunnel(httpProxy); String userInfo = new URL(url).getUserInfo(); if (userInfo != null) { factory = factory.basicAuth(userInfo); } else if (auth != null) { factory = factory.basicAuth( auth.startsWith("@") ? FileUtils.readFileToString(new File(auth.substring(1))).trim() : auth); } if (mode == Mode.HTTP) { return plainHttpConnection(url, args, factory); } CLI cli = factory.connect(); try { if (provider.hasKeys()) { try { // TODO: server verification cli.authenticate(provider.getKeys()); } catch (IllegalStateException e) { if (sshAuthRequestedExplicitly) { LOGGER.warning("The server doesn't support public key authentication"); return -1; } } catch (UnsupportedOperationException e) { if (sshAuthRequestedExplicitly) { LOGGER.warning("The server doesn't support public key authentication"); return -1; } } catch (GeneralSecurityException e) { if (sshAuthRequestedExplicitly) { LOGGER.log(WARNING, null, e); return -1; } LOGGER.warning("Failed to authenticate with your SSH keys. Proceeding as anonymous"); LOGGER.log(FINE, null, e); } } // execute the command // Arrays.asList is not serializable --- see 6835580 args = new ArrayList<String>(args); return cli.execute(args, System.in, System.out, System.err); } finally { cli.close(); } }
From source file:net.roboconf.target.docker.internal.DockerUtils.java
/** * Finds the options and tries to configure them on the creation command. * @param options the options (key = name, value = option value) * @param cmd a non-null command to create a container * @throws TargetException//from ww w.j a va2 s. c om */ public static void configureOptions(Map<String, String> options, CreateContainerCmd cmd) throws TargetException { Logger logger = Logger.getLogger(DockerUtils.class.getName()); // Basically, we had two choices: // 1. Map our properties to the Java REST API. // 2. By-pass it and send our custom JSon object. // // The second option is much more complicated. // So, we use Java reflection and some hacks to match Docker properties // with the setter methods available in the API. The API remains in charge // of generating the right JSon objects. Map<String, List<String>> hackedSetterNames = new HashMap<>(); // Remains from Docker-Java 2.x (the mechanism still works) // // List<String> list = new ArrayList<> (); // list.add( "withMemoryLimit" ); // hackedSetterNames.put( "withMemory", list ); // List known types List<Class<?>> types = new ArrayList<>(); types.add(String.class); types.add(String[].class); types.add(long.class); types.add(Long.class); types.add(int.class); types.add(Integer.class); types.add(boolean.class); types.add(Boolean.class); types.add(Capability[].class); // Deal with the options for (Map.Entry<String, String> entry : options.entrySet()) { String optionValue = entry.getValue(); // Now, guess what option to set String methodName = entry.getKey().replace("-", " ").trim(); methodName = WordUtils.capitalize(methodName); methodName = methodName.replace(" ", ""); methodName = "with" + methodName; Method _m = null; for (Method m : cmd.getClass().getMethods()) { boolean sameMethod = methodName.equalsIgnoreCase(m.getName()); boolean methodWithAlias = hackedSetterNames.containsKey(methodName) && hackedSetterNames.get(methodName).contains(m.getName()); if (sameMethod || methodWithAlias) { // Only one parameter? if (m.getParameterTypes().length != 1) { logger.warning("A method was found for " + entry.getKey() + " but it does not have the right number of parameters."); continue; } // The right type? if (!types.contains(m.getParameterTypes()[0])) { // Since Docker-java 3.x, there are two methods to set cap-add and cap-drop. // One takes an array as parameter, the other takes a list. logger.warning("A method was found for " + entry.getKey() + " but it does not have the right parameter type. " + "Skipping it. You may want to add a feature request."); continue; } // That's probably the right one. _m = m; break; } } // Handle errors if (_m == null) throw new TargetException( "Nothing matched the " + entry.getKey() + " option in the REST API. Please, report it."); // Try to set the option in the REST client try { Object o = prepareParameter(optionValue, _m.getParameterTypes()[0]); _m.invoke(cmd, o); } catch (ReflectiveOperationException | IllegalArgumentException e) { throw new TargetException("Option " + entry.getKey() + " could not be set."); } } }
From source file:org.torproject.ernie.db.ArchiveReader.java
public ArchiveReader(RelayDescriptorParser rdp, String archivesDir, boolean keepImportHistory) { int parsedFiles = 0, ignoredFiles = 0; Logger logger = Logger.getLogger(ArchiveReader.class.getName()); SortedSet<String> archivesImportHistory = new TreeSet<String>(); File archivesImportHistoryFile = new File("stats/archives-import-history"); if (keepImportHistory && archivesImportHistoryFile.exists()) { try {/*from w ww. ja v a2 s .com*/ BufferedReader br = new BufferedReader(new FileReader(archivesImportHistoryFile)); String line = null; while ((line = br.readLine()) != null) { archivesImportHistory.add(line); } br.close(); } catch (IOException e) { logger.log(Level.WARNING, "Could not read in archives import " + "history file. Skipping."); } } if (new File(archivesDir).exists()) { logger.fine("Importing files in directory " + archivesDir + "/..."); Stack<File> filesInInputDir = new Stack<File>(); filesInInputDir.add(new File(archivesDir)); List<File> problems = new ArrayList<File>(); while (!filesInInputDir.isEmpty()) { File pop = filesInInputDir.pop(); if (pop.isDirectory()) { for (File f : pop.listFiles()) { filesInInputDir.add(f); } } else { if (rdp != null) { try { BufferedInputStream bis = null; if (keepImportHistory && archivesImportHistory.contains(pop.getName())) { ignoredFiles++; continue; } else if (pop.getName().endsWith(".tar.bz2")) { logger.warning( "Cannot parse compressed tarball " + pop.getAbsolutePath() + ". Skipping."); continue; } else if (pop.getName().endsWith(".bz2")) { FileInputStream fis = new FileInputStream(pop); BZip2CompressorInputStream bcis = new BZip2CompressorInputStream(fis); bis = new BufferedInputStream(bcis); } else { FileInputStream fis = new FileInputStream(pop); bis = new BufferedInputStream(fis); } if (keepImportHistory) { archivesImportHistory.add(pop.getName()); } ByteArrayOutputStream baos = new ByteArrayOutputStream(); int len; byte[] data = new byte[1024]; while ((len = bis.read(data, 0, 1024)) >= 0) { baos.write(data, 0, len); } bis.close(); byte[] allData = baos.toByteArray(); rdp.parse(allData); parsedFiles++; } catch (IOException e) { problems.add(pop); if (problems.size() > 3) { break; } } } } } if (problems.isEmpty()) { logger.fine("Finished importing files in directory " + archivesDir + "/."); } else { StringBuilder sb = new StringBuilder( "Failed importing files in " + "directory " + archivesDir + "/:"); int printed = 0; for (File f : problems) { sb.append("\n " + f.getAbsolutePath()); if (++printed >= 3) { sb.append("\n ... more"); break; } } } } if (keepImportHistory) { try { archivesImportHistoryFile.getParentFile().mkdirs(); BufferedWriter bw = new BufferedWriter(new FileWriter(archivesImportHistoryFile)); for (String line : archivesImportHistory) { bw.write(line + "\n"); } bw.close(); } catch (IOException e) { logger.log(Level.WARNING, "Could not write archives import " + "history file."); } } logger.info("Finished importing relay descriptors from local " + "directory:\nParsed " + parsedFiles + ", ignored " + ignoredFiles + " files."); }