List of usage examples for java.util Timer cancel
public void cancel()
From source file:org.apache.cloud.rdf.web.sail.RdfController.java
@RequestMapping(value = "/queryrdf", method = { RequestMethod.GET, RequestMethod.POST }) public void queryRdf(@RequestParam("query") final String query, @RequestParam(value = RdfCloudTripleStoreConfiguration.CONF_QUERY_AUTH, required = false) String auth, @RequestParam(value = RdfCloudTripleStoreConfiguration.CONF_CV, required = false) final String vis, @RequestParam(value = RdfCloudTripleStoreConfiguration.CONF_INFER, required = false) final String infer, @RequestParam(value = "nullout", required = false) final String nullout, @RequestParam(value = RdfCloudTripleStoreConfiguration.CONF_RESULT_FORMAT, required = false) final String emit, @RequestParam(value = "padding", required = false) final String padding, @RequestParam(value = "callback", required = false) final String callback, final HttpServletRequest request, final HttpServletResponse response) { // WARNING: if you add to the above request variables, // Be sure to validate and encode since they come from the outside and could contain odd damaging character sequences. SailRepositoryConnection conn = null; final Thread queryThread = Thread.currentThread(); auth = StringUtils.arrayToCommaDelimitedString(provider.getUserAuths(request)); final Timer timer = new Timer(); timer.schedule(new TimerTask() { @Override//from w w w . j av a2 s . co m public void run() { log.debug("interrupting"); queryThread.interrupt(); } }, QUERY_TIME_OUT_SECONDS * 1000); try { final ServletOutputStream os = response.getOutputStream(); conn = repository.getConnection(); final Boolean isBlankQuery = StringUtils.isEmpty(query); final ParsedOperation operation = QueryParserUtil.parseOperation(QueryLanguage.SPARQL, query, null); final Boolean requestedCallback = !StringUtils.isEmpty(callback); final Boolean requestedFormat = !StringUtils.isEmpty(emit); if (!isBlankQuery) { if (operation instanceof ParsedGraphQuery) { // Perform Graph Query final RDFHandler handler = new RDFXMLWriter(os); response.setContentType("text/xml"); performGraphQuery(query, conn, auth, infer, nullout, handler); } else if (operation instanceof ParsedTupleQuery) { // Perform Tuple Query TupleQueryResultHandler handler; if (requestedFormat && emit.equalsIgnoreCase("json")) { handler = new SPARQLResultsJSONWriter(os); response.setContentType("application/json"); } else { handler = new SPARQLResultsXMLWriter(os); response.setContentType("text/xml"); } performQuery(query, conn, auth, infer, nullout, handler); } else if (operation instanceof ParsedUpdate) { // Perform Update Query performUpdate(query, conn, os, infer, vis); } else { throw new MalformedQueryException("Cannot process query. Query type not supported."); } } if (requestedCallback) { os.print(")"); } } catch (final Exception e) { log.error("Error running query", e); throw new RuntimeException(e); } finally { if (conn != null) { try { conn.close(); } catch (final RepositoryException e) { log.error("Error closing connection", e); } } } timer.cancel(); }
From source file:org.getobjects.eoaccess.EOAdaptor.java
public void maintainPool() { final boolean debugOn = log.isDebugEnabled(); /* scan pool for connections which should be closed */ if (debugOn)//from www. ja v a 2 s . c o m log.debug("running maintenance ..."); List<EOAdaptorChannel> toBeClosed = null; Timer timer = null; /* collect expired connections */ synchronized (this) { /* scan for connections which should be closed */ for (EOAdaptorChannel entry : this.availableChannels) { if (!this.shouldKeepChannel(entry)) { if (toBeClosed == null) toBeClosed = new ArrayList<EOAdaptorChannel>(4); toBeClosed.add(entry); } } /* remove expired entries */ if (toBeClosed != null) { if (debugOn) log.debug(" removing expired entries ..."); for (EOAdaptorChannel entry : toBeClosed) this.availableChannels.remove(entry); } /* stop timer if we don't need it anymore */ if (this.maintenanceTimer != null && this.availableChannels.size() == 0 && this.checkedOutChannels.size() == 0) { timer = this.maintenanceTimer; this.maintenanceTimer = null; } this.openCountSinceLastMaintenance = 0; this.releaseCountSinceLastMaintenance = 0; } /* close expired entries */ if (toBeClosed != null) { if (debugOn) log.debug(" closing expired entries ..."); for (EOAdaptorChannel entry : toBeClosed) this.closeChannel(entry); } /* stop timer */ if (timer != null) { /* Hm, will this do a hard cancel to our thread? Not really relevant * since we are done anyways ;-). */ timer.cancel(); timer = null; } if (debugOn) log.debug("pool maintenance done."); }
From source file:password.pwm.util.localdb.LocalDBUtility.java
private void importLocalDB(final InputStream inputStream, final Appendable out, final long totalBytes) throws PwmOperationalException, IOException { this.prepareForImport(); importLineCounter = 0;// w w w .ja v a 2 s . c om if (totalBytes > 0) { writeStringToOut(out, "total bytes in localdb import source: " + totalBytes); } writeStringToOut(out, "beginning localdb import..."); final Instant startTime = Instant.now(); final TransactionSizeCalculator transactionCalculator = new TransactionSizeCalculator( new TransactionSizeCalculator.SettingsBuilder() .setDurationGoal(new TimeDuration(100, TimeUnit.MILLISECONDS)).setMinTransactions(50) .setMaxTransactions(5 * 1000).createSettings()); final Map<LocalDB.DB, Map<String, String>> transactionMap = new HashMap<>(); for (final LocalDB.DB loopDB : LocalDB.DB.values()) { transactionMap.put(loopDB, new TreeMap<>()); } final CountingInputStream countingInputStream = new CountingInputStream(inputStream); final EventRateMeter eventRateMeter = new EventRateMeter(TimeDuration.MINUTE); final Timer statTimer = new Timer(true); statTimer.scheduleAtFixedRate(new TimerTask() { @Override public void run() { String output = ""; if (totalBytes > 0) { final ProgressInfo progressInfo = new ProgressInfo(startTime, totalBytes, countingInputStream.getByteCount()); output += progressInfo.debugOutput(); } else { output += "recordsImported=" + importLineCounter; } output += ", avgTransactionSize=" + transactionCalculator.getTransactionSize() + ", recordsPerMinute=" + eventRateMeter.readEventRate().setScale(2, BigDecimal.ROUND_DOWN); writeStringToOut(out, output); } }, 30 * 1000, 30 * 1000); Reader csvReader = null; try { csvReader = new InputStreamReader(new GZIPInputStream(countingInputStream, GZIP_BUFFER_SIZE), PwmConstants.DEFAULT_CHARSET); for (final CSVRecord record : PwmConstants.DEFAULT_CSV_FORMAT.parse(csvReader)) { importLineCounter++; eventRateMeter.markEvents(1); final String dbName_recordStr = record.get(0); final LocalDB.DB db = JavaHelper.readEnumFromString(LocalDB.DB.class, null, dbName_recordStr); final String key = record.get(1); final String value = record.get(2); if (db == null) { writeStringToOut(out, "ignoring localdb import record #" + importLineCounter + ", invalid DB name '" + dbName_recordStr + "'"); } else { transactionMap.get(db).put(key, value); int cachedTransactions = 0; for (final LocalDB.DB loopDB : LocalDB.DB.values()) { cachedTransactions += transactionMap.get(loopDB).size(); } if (cachedTransactions >= transactionCalculator.getTransactionSize()) { final long startTxnTime = System.currentTimeMillis(); for (final LocalDB.DB loopDB : LocalDB.DB.values()) { localDB.putAll(loopDB, transactionMap.get(loopDB)); transactionMap.get(loopDB).clear(); } transactionCalculator.recordLastTransactionDuration(TimeDuration.fromCurrent(startTxnTime)); } } } } finally { LOGGER.trace("import process completed"); statTimer.cancel(); IOUtils.closeQuietly(csvReader); IOUtils.closeQuietly(countingInputStream); } for (final LocalDB.DB loopDB : LocalDB.DB.values()) { localDB.putAll(loopDB, transactionMap.get(loopDB)); transactionMap.get(loopDB).clear(); } this.markImportComplete(); writeStringToOut(out, "restore complete, restored " + importLineCounter + " records in " + TimeDuration.fromCurrent(startTime).asLongString()); statTimer.cancel(); }
From source file:com.delphix.session.test.ServiceTest.java
@Test public void testSessionReset() { int numThreads = 4; // Configure the server Server server = serverManager.locate(helloService.getServiceName()); final Timer timer = new Timer(); server.addListener(new SessionEventListener(new SessionEventRunnable() { @Override// w w w . ja va 2 s . com public void work() { timer.schedule(new TimerTask() { @Override public void run() { nexus.close(); } }, 500); } })); // Create the session ClientConfig spec = initServiceSpec(new HelloService(helloService)); ClientNexus client = clientManager.create(spec); login(client); // Issue commands over the fore channel issueCommands(client, numThreads, 32, 0); // Wait for the login to complete awaitDone(numThreads + 1); timer.cancel(); }
From source file:com.gochinatv.datasync.util.Shell.java
/** * Run a command/* w w w .j ava 2s . c o m*/ */ private void runCommand() throws IOException { ProcessBuilder builder = new ProcessBuilder(getExecString()); Timer timeOutTimer = null; ShellTimeoutTimerTask timeoutTimerTask; timedOut = new AtomicBoolean(false); completed = new AtomicBoolean(false); if (environment != null) { builder.environment().putAll(this.environment); } if (dir != null) { builder.directory(this.dir); } process = builder.start(); if (timeOutInterval > 0) { timeOutTimer = new Timer(); timeoutTimerTask = new ShellTimeoutTimerTask(this); //One time scheduling. timeOutTimer.schedule(timeoutTimerTask, timeOutInterval); } final BufferedReader errReader = new BufferedReader(new InputStreamReader(process.getErrorStream())); BufferedReader inReader = new BufferedReader(new InputStreamReader(process.getInputStream())); final StringBuffer errMsg = new StringBuffer(); // read error and input streams as this would free up the buffers // free the error stream buffer Thread errThread = new Thread() { @Override public void run() { try { String line = errReader.readLine(); while ((line != null) && !isInterrupted()) { errMsg.append(line); errMsg.append(System.getProperty("line.separator")); line = errReader.readLine(); } } catch (IOException ioe) { LOG.warn("Error reading the error stream", ioe); } } }; try { errThread.start(); } catch (IllegalStateException ignored) { } try { parseExecResult(inReader); // parse the output // clear the input stream buffer String line = inReader.readLine(); while (line != null) { line = inReader.readLine(); } // wait for the process to finish and check the exit code exitCode = process.waitFor(); try { // make sure that the error thread exits errThread.join(); } catch (InterruptedException ie) { LOG.warn("Interrupted while reading the error stream", ie); } completed.set(true); //the timeout thread handling //taken care in finally block if (exitCode != 0) { throw new ExitCodeException(exitCode, errMsg.toString()); } } catch (InterruptedException ie) { throw new IOException(ie.toString()); } finally { if ((timeOutTimer != null) && !timedOut.get()) { timeOutTimer.cancel(); } // close the input stream try { inReader.close(); } catch (IOException ioe) { LOG.warn("Error while closing the input stream", ioe); } if (!completed.get()) { errThread.interrupt(); } try { errReader.close(); } catch (IOException ioe) { LOG.warn("Error while closing the error stream", ioe); } process.destroy(); lastTime = System.currentTimeMillis(); } }
From source file:org.apache.hadoop.mapred.util.Shell.java
/** Run a command */ private void runCommand() throws IOException { ProcessBuilder builder = new ProcessBuilder(getExecString()); Timer timeOutTimer = null; ShellTimeoutTimerTask timeoutTimerTask = null; timedOut = new AtomicBoolean(false); completed = new AtomicBoolean(false); if (environment != null) { builder.environment().putAll(this.environment); }/*from w w w. j a va 2s . com*/ if (dir != null) { builder.directory(this.dir); } process = builder.start(); if (timeOutInterval > 0) { timeOutTimer = new Timer(); timeoutTimerTask = new ShellTimeoutTimerTask(this); //One time scheduling. timeOutTimer.schedule(timeoutTimerTask, timeOutInterval); } final BufferedReader errReader = new BufferedReader(new InputStreamReader(process.getErrorStream())); BufferedReader inReader = new BufferedReader(new InputStreamReader(process.getInputStream())); final StringBuffer errMsg = new StringBuffer(); // read error and input streams as this would free up the buffers // free the error stream buffer Thread errThread = new Thread() { @Override public void run() { try { String line = errReader.readLine(); while ((line != null) && !isInterrupted()) { errMsg.append(line); errMsg.append(System.getProperty("line.separator")); line = errReader.readLine(); } } catch (IOException ioe) { LOG.warn("Error reading the error stream", ioe); } } }; try { errThread.start(); } catch (IllegalStateException ise) { } try { parseExecResult(inReader); // parse the output // clear the input stream buffer String line = inReader.readLine(); while (line != null) { line = inReader.readLine(); } // wait for the process to finish and check the exit code exitCode = process.waitFor(); try { // make sure that the error thread exits errThread.join(); } catch (InterruptedException ie) { LOG.warn("Interrupted while reading the error stream", ie); } completed.set(true); //the timeout thread handling //taken care in finally block if (exitCode != 0) { throw new ExitCodeException(exitCode, errMsg.toString()); } } catch (InterruptedException ie) { throw new IOException(ie.toString()); } finally { if ((timeOutTimer != null) && !timedOut.get()) { timeOutTimer.cancel(); } // close the input stream try { inReader.close(); } catch (IOException ioe) { LOG.warn("Error while closing the input stream", ioe); } if (!completed.get()) { errThread.interrupt(); } try { errReader.close(); } catch (IOException ioe) { LOG.warn("Error while closing the error stream", ioe); } process.destroy(); lastTime = System.currentTimeMillis(); } }
From source file:com.delphix.session.test.ServiceTest.java
@Test public void testSyncDispatchReset() { int numThreads = 8; // Configure the server Server server = serverManager.locate(delayService.getServiceName()); HelloDelayService delay = (HelloDelayService) server.getService(); delay.setDelay(1000);//www . ja v a 2s . com final Timer timer = new Timer(); server.addListener(new SessionEventListener(new SessionEventRunnable() { @Override public void work() { timer.schedule(new TimerTask() { @Override public void run() { nexus.close(); } }, 500); } })); // Create the session ClientConfig spec = initServiceSpec(new HelloDelayService(delayService)); // Set a small fore channel queue depth to test sync dispatch ServiceOptions proposal = spec.getOptions(); proposal.setOption(FORE_QUEUE_DEPTH, 2); proposal.setOption(SYNC_DISPATCH, true); ClientNexus client = clientManager.create(spec); login(client); // Issue commands over the fore channel issueCommands(client, numThreads, 4096, 0); // Wait for the test to complete awaitDone(numThreads + 1); timer.cancel(); }
From source file:gate.util.reporting.DocTimeReporter.java
/** * Calls store, calculate and printReport for generating the actual report. *//* www. j av a 2 s . c om*/ private void generateReport() throws BenchmarkReportInputFileFormatException, BenchmarkReportFileAccessException { Timer timer = null; try { TimerTask task = new FileWatcher(getBenchmarkFile()) { @Override protected void onChange(File file) { throw new BenchmarkReportExecutionException( getBenchmarkFile() + " file has been modified while generating the report."); } }; timer = new Timer(); // repeat the check every second timer.schedule(task, new Date(), 1000); if (reportFile == null) { reportFile = new File(System.getProperty("java.io.tmpdir"), "report." + ((printMedia.equals(MEDIA_HTML)) ? "html" : "txt")); } splitBenchmarkFile(getBenchmarkFile(), reportFile); if (validEntries == 0) { if (logicalStart != null) { throw new BenchmarkReportInputFileFormatException("No valid log entries present in " + getBenchmarkFile() + " does not contain a marker named " + logicalStart + "."); } else { throw new BenchmarkReportInputFileFormatException( "No valid log entries present in " + getBenchmarkFile().getAbsolutePath()); } } File dir = temporaryDirectory; // Folder already exists; then delete all files in the temporary folder if (dir.isDirectory()) { File files[] = dir.listFiles(); for (int count = 0; count < files.length; count++) { File inFile = files[count]; Object report2Container1 = store(inFile); Object report2Container2 = calculate(report2Container1); printReport(report2Container2, reportFile); } if (files.length > 0 && files[0].exists()) { if (!files[0].delete()) { System.err.println(files[0] + " was not possible to delete."); } } } } finally { if (timer != null) { timer.cancel(); } } }
From source file:org.pentaho.di.job.Job.java
/** * Handle logging at start//from ww w. j a v a 2s . c om * * @return true if it went OK. * * @throws KettleException */ public boolean beginProcessing() throws KettleException { currentDate = new Date(); logDate = new Date(); startDate = Const.MIN_DATE; endDate = currentDate; resetErrors(); final JobLogTable jobLogTable = jobMeta.getJobLogTable(); int intervalInSeconds = Const.toInt(environmentSubstitute(jobLogTable.getLogInterval()), -1); if (jobLogTable.isDefined()) { DatabaseMeta logcon = jobMeta.getJobLogTable().getDatabaseMeta(); String schemaName = environmentSubstitute(jobMeta.getJobLogTable().getActualSchemaName()); String tableName = environmentSubstitute(jobMeta.getJobLogTable().getActualTableName()); String schemaAndTable = jobMeta.getJobLogTable().getDatabaseMeta() .getQuotedSchemaTableCombination(schemaName, tableName); Database ldb = new Database(this, logcon); ldb.shareVariablesWith(this); ldb.connect(); ldb.setCommit(logCommitSize); try { // See if we have to add a batch id... Long id_batch = new Long(1); if (jobMeta.getJobLogTable().isBatchIdUsed()) { id_batch = logcon.getNextBatchId(ldb, schemaName, tableName, jobLogTable.getKeyField().getFieldName()); setBatchId(id_batch.longValue()); if (getPassedBatchId() <= 0) { setPassedBatchId(id_batch.longValue()); } } Object[] lastr = ldb.getLastLogDate(schemaAndTable, jobMeta.getName(), true, LogStatus.END); if (!Const.isEmpty(lastr)) { Date last; try { last = ldb.getReturnRowMeta().getDate(lastr, 0); } catch (KettleValueException e) { throw new KettleJobException( BaseMessages.getString(PKG, "Job.Log.ConversionError", "" + tableName), e); } if (last != null) { startDate = last; } } depDate = currentDate; ldb.writeLogRecord(jobMeta.getJobLogTable(), LogStatus.START, this, null); if (!ldb.isAutoCommit()) { ldb.commitLog(true, jobMeta.getJobLogTable()); } ldb.disconnect(); // If we need to do periodic logging, make sure to install a timer for // this... // if (intervalInSeconds > 0) { final Timer timer = new Timer(getName() + " - interval logging timer"); TimerTask timerTask = new TimerTask() { public void run() { try { endProcessing(); } catch (Exception e) { log.logError( BaseMessages.getString(PKG, "Job.Exception.UnableToPerformIntervalLogging"), e); // Also stop the show... // errors.incrementAndGet(); stopAll(); } } }; timer.schedule(timerTask, intervalInSeconds * 1000, intervalInSeconds * 1000); addJobListener(new JobAdapter() { public void jobFinished(Job job) { timer.cancel(); } }); } // Add a listener at the end of the job to take of writing the final job // log record... // addJobListener(new JobAdapter() { public void jobFinished(Job job) throws KettleException { try { endProcessing(); } catch (KettleJobException e) { log.logError(BaseMessages.getString(PKG, "Job.Exception.UnableToWriteToLoggingTable", jobLogTable.toString()), e); //do not skip exception here //job is failed in case log database record is failed! throw new KettleException(e); } } }); } catch (KettleDatabaseException dbe) { addErrors(1); // This is even before actual execution throw new KettleJobException( BaseMessages.getString(PKG, "Job.Log.UnableToProcessLoggingStart", "" + tableName), dbe); } finally { ldb.disconnect(); } } // If we need to write out the job entry logging information, do so at the end of the job: // JobEntryLogTable jobEntryLogTable = jobMeta.getJobEntryLogTable(); if (jobEntryLogTable.isDefined()) { addJobListener(new JobAdapter() { public void jobFinished(Job job) throws KettleException { try { writeJobEntryLogInformation(); } catch (KettleException e) { throw new KettleException( BaseMessages.getString(PKG, "Job.Exception.UnableToPerformJobEntryLoggingAtJobEnd"), e); } } }); } // If we need to write the log channel hierarchy and lineage information, // add a listener for that too... // ChannelLogTable channelLogTable = jobMeta.getChannelLogTable(); if (channelLogTable.isDefined()) { addJobListener(new JobAdapter() { public void jobFinished(Job job) throws KettleException { try { writeLogChannelInformation(); } catch (KettleException e) { throw new KettleException( BaseMessages.getString(PKG, "Job.Exception.UnableToPerformLoggingAtTransEnd"), e); } } }); } JobExecutionExtension extension = new JobExecutionExtension(this, result, null, false); ExtensionPointHandler.callExtensionPoint(log, KettleExtensionPoint.JobBeginProcessing.id, extension); return true; }
From source file:org.jivesoftware.sparkimpl.updater.CheckUpdates.java
public void downloadUpdate(final File downloadedFile, final SparkVersion version) { final java.util.Timer timer = new java.util.Timer(); // Prepare HTTP post final GetMethod post = new GetMethod(version.getDownloadURL()); // Get HTTP client Protocol.registerProtocol("https", new Protocol("https", new EasySSLProtocolSocketFactory(), 443)); final HttpClient httpclient = new HttpClient(); String proxyHost = System.getProperty("http.proxyHost"); String proxyPort = System.getProperty("http.proxyPort"); if (ModelUtil.hasLength(proxyHost) && ModelUtil.hasLength(proxyPort)) { try {/*w ww . ja v a2s .co m*/ httpclient.getHostConfiguration().setProxy(proxyHost, Integer.parseInt(proxyPort)); } catch (NumberFormatException e) { Log.error(e); } } // Execute request try { int result = httpclient.executeMethod(post); if (result != 200) { return; } long length = post.getResponseContentLength(); int contentLength = (int) length; bar = new JProgressBar(0, contentLength); } catch (IOException e) { Log.error(e); } final JFrame frame = new JFrame(Res.getString("title.downloading.im.client")); frame.setIconImage(SparkRes.getImageIcon(SparkRes.SMALL_MESSAGE_IMAGE).getImage()); titlePanel = new TitlePanel(Res.getString("title.upgrading.client"), Res.getString("message.version", version.getVersion()), SparkRes.getImageIcon(SparkRes.SEND_FILE_24x24), true); final Thread thread = new Thread(new Runnable() { public void run() { try { InputStream stream = post.getResponseBodyAsStream(); long size = post.getResponseContentLength(); ByteFormat formater = new ByteFormat(); sizeText = formater.format(size); titlePanel.setDescription(Res.getString("message.version", version.getVersion()) + " \n" + Res.getString("message.file.size", sizeText)); downloadedFile.getParentFile().mkdirs(); FileOutputStream out = new FileOutputStream(downloadedFile); copy(stream, out); out.close(); if (!cancel) { downloadComplete = true; promptForInstallation(downloadedFile, Res.getString("title.download.complete"), Res.getString("message.restart.spark")); } else { out.close(); downloadedFile.delete(); } UPDATING = false; frame.dispose(); } catch (Exception ex) { // Nothing to do } finally { timer.cancel(); // Release current connection to the connection pool once you are done post.releaseConnection(); } } }); frame.getContentPane().setLayout(new GridBagLayout()); frame.getContentPane().add(titlePanel, new GridBagConstraints(0, 0, 1, 1, 1.0, 0.0, GridBagConstraints.WEST, GridBagConstraints.HORIZONTAL, new Insets(5, 5, 5, 5), 0, 0)); frame.getContentPane().add(bar, new GridBagConstraints(0, 1, 1, 1, 1.0, 0.0, GridBagConstraints.WEST, GridBagConstraints.HORIZONTAL, new Insets(5, 5, 5, 5), 0, 0)); JEditorPane pane = new JEditorPane(); boolean displayContentPane = version.getChangeLogURL() != null || version.getDisplayMessage() != null; try { pane.setEditable(false); if (version.getChangeLogURL() != null) { pane.setEditorKit(new HTMLEditorKit()); pane.setPage(version.getChangeLogURL()); } else if (version.getDisplayMessage() != null) { pane.setText(version.getDisplayMessage()); } if (displayContentPane) { frame.getContentPane().add(new JScrollPane(pane), new GridBagConstraints(0, 2, 1, 1, 1.0, 1.0, GridBagConstraints.WEST, GridBagConstraints.BOTH, new Insets(5, 5, 5, 5), 0, 0)); } } catch (IOException e) { Log.error(e); } frame.getContentPane().setBackground(Color.WHITE); frame.pack(); if (displayContentPane) { frame.setSize(600, 400); } else { frame.setSize(400, 100); } frame.setLocationRelativeTo(SparkManager.getMainWindow()); GraphicUtils.centerWindowOnScreen(frame); frame.addWindowListener(new WindowAdapter() { public void windowClosing(WindowEvent windowEvent) { thread.interrupt(); cancel = true; UPDATING = false; if (!downloadComplete) { JOptionPane.showMessageDialog(SparkManager.getMainWindow(), Res.getString("message.updating.cancelled"), Res.getString("title.cancelled"), JOptionPane.ERROR_MESSAGE); } } }); frame.setVisible(true); thread.start(); timer.scheduleAtFixedRate(new TimerTask() { int seconds = 1; public void run() { ByteFormat formatter = new ByteFormat(); long value = bar.getValue(); long average = value / seconds; String text = formatter.format(average) + "/Sec"; String total = formatter.format(value); titlePanel.setDescription(Res.getString("message.version", version.getVersion()) + " \n" + Res.getString("message.file.size", sizeText) + "\n" + Res.getString("message.transfer.rate") + ": " + text + "\n" + Res.getString("message.total.downloaded") + ": " + total); seconds++; } }, 1000, 1000); }