List of usage examples for java.lang Thread setDaemon
public final void setDaemon(boolean on)
From source file:pt.lsts.neptus.console.bathymLayer.TidePanel.java
@Override public void initSubPanel() { storedMenuPath = I18n.text("Tools") + ">" + I18n.text("Tides") + ">" + I18n.textf("Using '%file'", GeneralPreferences.tidesFile.getName()); tidesItem = addMenuItem(storedMenuPath, null, new ActionListener() { @Override// w w w .j a v a 2 s. co m public void actionPerformed(ActionEvent e) { JMenuItem menu = (JMenuItem) e.getSource(); Thread t = new Thread("Tide chooser") { @Override public void run() { try { File usedTidesSource = GeneralPreferences.tidesFile; String currentSource = usedTidesSource == null || !usedTidesSource.exists() ? null : usedTidesSource.getName(); Date startDate = new Date(System.currentTimeMillis() - 2 * DateTimeUtil.DAY); Date endDate = new Date(System.currentTimeMillis() + 3 * DateTimeUtil.DAY); String harbor = TidePredictionFactory.showTidesSourceChooserGuiPopup(getConsole(), currentSource, startDate, endDate); if (harbor != null && !harbor.isEmpty() && TidePredictionFactory.getTidesSourceFileFrom(harbor).exists()) { GeneralPreferences.tidesFile = TidePredictionFactory.getTidesSourceFileFrom(harbor); GeneralPreferences.saveProperties(); preferencesUpdated(); // Force the tide file reload TidePredictionFactory.getTideLevel(System.currentTimeMillis()); } } catch (Exception e) { e.printStackTrace(); } finally { menu.setEnabled(true); } } }; t.setDaemon(true); menu.setEnabled(false); t.run(); } }); // tidesItem.setEnabled(false); addMenuItem(I18n.text("Tools") + ">" + I18n.text("Tides") + ">" + I18n.text("Update Predictions"), null, new ActionListener() { @Override public void actionPerformed(ActionEvent e) { JMenuItem menu = (JMenuItem) e.getSource(); Thread t = new Thread("Tide fetcher") { public void run() { try { String harbor = TidePredictionFactory.fetchData(getConsole()); if (harbor != null && !harbor.isEmpty()) { File used = GeneralPreferences.tidesFile; File f = new File(ConfigFetch.getConfFolder() + "/tides/" + harbor + "." + TidePredictionFactory.defaultTideFormat); if (f.exists() && !f.getAbsolutePath().equals(used.getAbsolutePath())) { int resp = GuiUtils.confirmDialog(getConsole(), I18n.text("Tide Predictions"), I18n.textf( "The selected location does not match the current location in use (%harbor). Do you wish to set the current location as %selection?", used.getName(), harbor + "." + TidePredictionFactory.defaultTideFormat), ModalityType.DOCUMENT_MODAL); if (resp == JOptionPane.YES_OPTION) { GeneralPreferences.tidesFile = f; GeneralPreferences.saveProperties(); preferencesUpdated(); // Force the tide file reload TidePredictionFactory.getTideLevel(System.currentTimeMillis()); } } } } catch (Exception e) { e.printStackTrace(); } finally { menu.setEnabled(true); } }; }; t.setDaemon(true); menu.setEnabled(false); t.start(); } }); ts = new TimeSeries(I18n.text("Tide level")); tsc.addSeries(ts); for (double i = -12; i < 12; i += 0.25) { Date d = new Date(System.currentTimeMillis() + (long) (i * 1000 * 3600)); ts.addOrUpdate(new Millisecond(d), TidePredictionFactory.getTideLevel(d)); } timeSeriesChart.getXYPlot().addDomainMarker(marker); levelMarker.setValue(TidePredictionFactory.getTideLevel(new Date())); timeSeriesChart.getXYPlot().addRangeMarker(levelMarker); }
From source file:com.intuit.tank.perfManager.workLoads.JobManager.java
private void startTest(final JobInfo info) { LOG.info("Sending start command asynchronously."); Thread t = new Thread(new Runnable() { @Override// w ww .j a v a 2 s.c o m public void run() { LOG.info( "Sleeping for one minute before starting test to give time for all agents to download files."); try { Thread.sleep(60000);// sixty seconds } catch (InterruptedException e) { // ignore } try { LOG.info("Sending start commands on executer."); List<FutureTask<AgentData>> futures = new ArrayList<FutureTask<AgentData>>(); for (AgentData agent : info.agentData) { futures.add(sendCommand(agent, WatsAgentCommand.start, true)); } LOG.info("waiting for agentFutures to return."); for (FutureTask<AgentData> future : futures) { AgentData dataFuture = future.get(); if (dataFuture != null) { // error happened. TODO: message system that agent did not start. tracker.setStatus(crateFailureStatus(dataFuture)); tracker.stopJob(info.jobRequest.getId()); } } LOG.info("All agents received start command."); } catch (Exception e) { LOG.error("Error sending start: " + e, e); tracker.stopJob(info.jobRequest.getId()); } } }); t.setDaemon(true); t.start(); }
From source file:com.baifendian.swordfish.execserver.job.ProcessJob.java
/** * ?, ?//from w ww . jav a2 s. c om */ private void readProcessOutput() { String threadLoggerInfoName = String.format("LoggerInfo-%s", jobAppId); Thread loggerInfoThread = new Thread(() -> { BufferedReader reader = null; try { reader = new BufferedReader(new InputStreamReader(process.getInputStream())); String line; long preFlushTime = System.currentTimeMillis(); while ((line = reader.readLine()) != null) { logs.add(line); long now = System.currentTimeMillis(); // ?? if (logs.size() >= Constants.defaultLogBufferSize || now - preFlushTime > Constants.defaultLogFlushInterval) { preFlushTime = now; // ? logHandler.accept(logs); logs.clear(); } } } catch (Exception e) { // Do Nothing } finally { // , if (!logs.isEmpty()) { // ? logHandler.accept(logs); logs.clear(); } if (reader != null) { try { reader.close(); } catch (IOException e) { logger.error(e.getMessage(), e); } } } }, threadLoggerInfoName); try { loggerInfoThread.setDaemon(true); loggerInfoThread.start(); } catch (Exception e) { logger.error(e.getMessage(), e); } }
From source file:com.couchbase.client.CouchbaseClient.java
/** * Flush all caches from all servers with a delay of application. * * @param delay the period of time to delay, in seconds * @return whether or not the operation was accepted *//*ww w . j a va2s .c om*/ @Override public OperationFuture<Boolean> flush(final int delay) { final CountDownLatch latch = new CountDownLatch(1); final FlushRunner flushRunner = new FlushRunner(latch); final OperationFuture<Boolean> rv = new OperationFuture<Boolean>("", latch, operationTimeout) { private CouchbaseConnectionFactory factory = (CouchbaseConnectionFactory) connFactory; @Override public boolean cancel() { throw new UnsupportedOperationException("Flush cannot be" + " canceled"); } @Override public boolean isDone() { return flushRunner.status(); } @Override public Boolean get(long duration, TimeUnit units) throws InterruptedException, TimeoutException, ExecutionException { if (!latch.await(duration, units)) { throw new TimeoutException("Flush not completed within" + " timeout."); } return flushRunner.status(); } @Override public Boolean get() throws InterruptedException, ExecutionException { try { return get(factory.getViewTimeout(), TimeUnit.MILLISECONDS); } catch (TimeoutException e) { throw new RuntimeException("Timed out waiting for operation", e); } } @Override public Long getCas() { throw new UnsupportedOperationException("Flush has no CAS" + " value."); } @Override public String getKey() { throw new UnsupportedOperationException("Flush has no" + " associated key."); } @Override public OperationStatus getStatus() { throw new UnsupportedOperationException("Flush has no" + " OperationStatus."); } @Override public boolean isCancelled() { throw new UnsupportedOperationException("Flush cannot be" + " canceled."); } }; Thread flusher = new Thread(flushRunner, "Temporary Flusher"); flusher.setDaemon(true); flusher.start(); return rv; }
From source file:com.twinsoft.convertigo.eclipse.wizards.setup.SetupWizard.java
public void postRegisterState(final String page) { if (!page.equals(previousPageName)) { previousPageName = page;/* w w w. j a v a 2 s . co m*/ Thread th = new Thread(new Runnable() { public void run() { synchronized (SetupWizard.this) { try { String[] url = { "http://www.google-analytics.com/collect" }; HttpClient client = prepareHttpClient(url); PostMethod method = new PostMethod(url[0]); HeaderName.ContentType.setRequestHeader(method, MimeType.WwwForm.value()); // set parameters for POST method method.setParameter("v", "1"); method.setParameter("tid", "UA-660091-6"); method.setParameter("cid", getUniqueID()); method.setParameter("t", "pageview"); method.setParameter("dh", "http://www.convertigo.com"); method.setParameter("dp", "/StudioRegistrationWizard_" + page + ".html"); method.setParameter("dt", page + "_" + ProductVersion.productVersion); // execute HTTP post with parameters if (client != null) { client.executeMethod(method); } } catch (Exception e) { // ConvertigoPlugin.logWarning(e, // "Error while trying to send registration"); } } } }); th.setDaemon(true); th.setName("SetupWizard.register_steps"); th.start(); } else { previousPageName = page; } }
From source file:org.dawnsci.commandserver.core.producer.AliveConsumer.java
protected void startNotifications() throws Exception { if (uri == null) throw new NullPointerException("Please set the URI before starting notifications!"); this.cbean = new ConsumerBean(); cbean.setStatus(ConsumerStatus.STARTING); cbean.setName(getName());/*from ww w . j av a 2 s .c o m*/ cbean.setConsumerId(consumerId); cbean.setVersion(consumerVersion); cbean.setStartTime(System.currentTimeMillis()); try { cbean.setHostName(InetAddress.getLocalHost().getHostName()); } catch (UnknownHostException e) { // Not fatal but would be nice... e.printStackTrace(); } System.out.println("Running events on topic " + Constants.ALIVE_TOPIC + " to notify of '" + getName() + "' service being available."); final Thread aliveThread = new Thread(new Runnable() { public void run() { try { ConnectionFactory connectionFactory = ConnectionFactoryFacade.createConnectionFactory(uri); aliveConnection = connectionFactory.createConnection(); aliveConnection.start(); final Session session = aliveConnection.createSession(false, Session.AUTO_ACKNOWLEDGE); final Topic topic = session.createTopic(Constants.ALIVE_TOPIC); final MessageProducer producer = session.createProducer(topic); final ObjectMapper mapper = new ObjectMapper(); // Here we are sending the message out to the topic while (isActive()) { try { Thread.sleep(Constants.NOTIFICATION_FREQUENCY); TextMessage temp = session.createTextMessage(mapper.writeValueAsString(cbean)); producer.send(temp, DeliveryMode.NON_PERSISTENT, 1, 5000); if (ConsumerStatus.STARTING.equals(cbean.getStatus())) { cbean.setStatus(ConsumerStatus.RUNNING); } } catch (InterruptedException ne) { break; } catch (Exception neOther) { neOther.printStackTrace(); } } } catch (Exception ne) { ne.printStackTrace(); setActive(false); } } }); aliveThread.setName("Alive Notification Topic"); aliveThread.setDaemon(true); aliveThread.setPriority(Thread.MIN_PRIORITY); aliveThread.start(); }
From source file:com.actuate.development.tool.task.InstallBRDPro.java
private void interruptAntTaskErrorMessage(final Process process, final StringBuffer buffer) { Thread errThread = new Thread("Monitor Ant Task") { public void run() { try { BufferedReader input = new BufferedReader(new InputStreamReader(process.getErrorStream())); String line;//from w ww.j a v a 2 s. co m while ((line = input.readLine()) != null) { if (buffer != null) buffer.append(line).append("\r\n"); } input.close(); } catch (Exception e) { } } }; errThread.setDaemon(true); errThread.start(); }
From source file:com.blackberry.logdriver.admin.LogMaintenance.java
@Override public int run(String[] args) throws Exception { Configuration conf = getConf(); // If run by Oozie, then load the Oozie conf too if (System.getProperty("oozie.action.conf.xml") != null) { conf.addResource(new URL("file://" + System.getProperty("oozie.action.conf.xml"))); }/* w ww . ja v a 2s .co m*/ // For some reason, Oozie needs some options to be set in system instead of // in the confiuration. So copy the configs over. { Iterator<Entry<String, String>> i = conf.iterator(); while (i.hasNext()) { Entry<String, String> next = i.next(); System.setProperty(next.getKey(), next.getValue()); } } if (args.length < 3) { printUsage(); return 1; } String userName = args[0]; String dcNumber = args[1]; String service = args[2]; String date = null; String hour = null; if (args.length >= 4) { date = args[3]; } if (args.length >= 5) { hour = args[4]; } // Set from environment variables String mergeJobPropertiesFile = getConfOrEnv(conf, "MERGEJOB_CONF"); String filterJobPropertiesFile = getConfOrEnv(conf, "FILTERJOB_CONF"); String daysBeforeArchive = getConfOrEnv(conf, "DAYS_BEFORE_ARCHIVE"); String daysBeforeDelete = getConfOrEnv(conf, "DAYS_BEFORE_DELETE"); String maxConcurrentMR = getConfOrEnv(conf, "MAX_CONCURRENT_MR", "-1"); String zkConnectString = getConfOrEnv(conf, "ZK_CONNECT_STRING"); String logdir = getConfOrEnv(conf, "logdriver.logdir.name"); boolean resetOrphanedJobs = Boolean.parseBoolean(getConfOrEnv(conf, "reset.orphaned.jobs", "true")); String rootDir = getConfOrEnv(conf, "service.root.dir"); String maxTotalMR = getConfOrEnv(conf, "MAX_TOTAL_MR", "-1"); boolean doMerge = true; boolean doArchive = true; boolean doDelete = true; if (zkConnectString == null) { LOG.error("ZK_CONNECT_STRING is not set. Exiting."); return 1; } if (mergeJobPropertiesFile == null) { LOG.info("MERGEJOB_CONF is not set. Not merging."); doMerge = false; } if (filterJobPropertiesFile == null) { LOG.info("FILTERJOB_CONF is not set. Not archiving."); doArchive = false; } if (daysBeforeArchive == null) { LOG.info("DAYS_BEFORE_ARCHIVE is not set. Not archiving."); doArchive = false; } if (doArchive && Integer.parseInt(daysBeforeArchive) < 0) { LOG.info("DAYS_BEFORE_ARCHIVE is negative. Not archiving."); doArchive = false; } if (daysBeforeDelete == null) { LOG.info("DAYS_BEFORE_DELETE is not set. Not deleting."); doDelete = false; } if (doDelete && Integer.parseInt(daysBeforeDelete) < 0) { LOG.info("DAYS_BEFORE_DELETE is negative. Not deleting."); doDelete = false; } if (logdir == null) { LOG.info("LOGDRIVER_LOGDIR_NAME is not set. Using default value of 'logs'."); logdir = "logs"; } if (rootDir == null) { LOG.info("SERVICE_ROOT_DIR is not set. Using default value of 'service'."); rootDir = "/service"; } // We can hang if this fails. So make sure we abort if it fails. fs = null; try { fs = FileSystem.get(conf); fs.exists(new Path("/")); // Test if it works. } catch (IOException e) { LOG.error("Error getting filesystem.", e); return 1; } // Create the LockUtil instance lockUtil = new LockUtil(zkConnectString); // Now it's safe to create our Job Runner JobRunner jobRunner = new JobRunner(Integer.parseInt(maxConcurrentMR), Integer.parseInt(maxTotalMR)); Thread jobRunnerThread = new Thread(jobRunner); jobRunnerThread.setName("JobRunner"); jobRunnerThread.setDaemon(false); jobRunnerThread.start(); // Figure out what date we start filters on. String filterCutoffDate = ""; if (doArchive) { Calendar cal = Calendar.getInstance(); cal.add(Calendar.DAY_OF_MONTH, Integer.parseInt("-" + daysBeforeArchive)); filterCutoffDate = String.format("%04d%02d%02d%02d", cal.get(Calendar.YEAR), (cal.get(Calendar.MONTH) + 1), cal.get(Calendar.DAY_OF_MONTH), cal.get(Calendar.HOUR_OF_DAY)); LOG.info("Archiving logs from before {}", filterCutoffDate); } String deleteCutoffDate = ""; if (doDelete) { Calendar cal = Calendar.getInstance(); cal.add(Calendar.DAY_OF_MONTH, Integer.parseInt("-" + daysBeforeDelete)); deleteCutoffDate = String.format("%04d%02d%02d%02d", cal.get(Calendar.YEAR), (cal.get(Calendar.MONTH) + 1), cal.get(Calendar.DAY_OF_MONTH), cal.get(Calendar.HOUR_OF_DAY)); LOG.info("Deleting logs from before {}", deleteCutoffDate); } long now = System.currentTimeMillis(); // Various exceptions have been popping up here. So make sure I catch them // all. try { // Patterns to recognize hour, day and incoming directories, so that they // can be processed. Pattern datePathPattern; Pattern hourPathPattern; Pattern incomingPathPattern; Pattern dataPathPattern; Pattern archivePathPattern; Pattern workingPathPattern; if (hour != null) { datePathPattern = Pattern.compile(rootDir + "/" + Pattern.quote(dcNumber) + "/" + Pattern.quote(service) + "/" + Pattern.quote(logdir) + "/(" + Pattern.quote(date) + ")"); hourPathPattern = Pattern.compile(rootDir + "/" + Pattern.quote(dcNumber) + "/" + Pattern.quote(service) + "/" + Pattern.quote(logdir) + "/(" + Pattern.quote(date) + ")/(" + Pattern.quote(hour) + ")"); incomingPathPattern = Pattern.compile(rootDir + "/" + Pattern.quote(dcNumber) + "/" + Pattern.quote(service) + "/" + Pattern.quote(logdir) + "/(" + Pattern.quote(date) + ")/(" + Pattern.quote(hour) + ")/([^/]+)/incoming"); dataPathPattern = Pattern.compile(rootDir + "/" + Pattern.quote(dcNumber) + "/" + Pattern.quote(service) + "/" + Pattern.quote(logdir) + "/(" + Pattern.quote(date) + ")/(" + Pattern.quote(hour) + ")/([^/]+)/data"); archivePathPattern = Pattern.compile(rootDir + "/" + Pattern.quote(dcNumber) + "/" + Pattern.quote(service) + "/" + Pattern.quote(logdir) + "/(" + Pattern.quote(date) + ")/(" + Pattern.quote(hour) + ")/([^/]+)/archive"); workingPathPattern = Pattern.compile(rootDir + "/" + Pattern.quote(dcNumber) + "/" + Pattern.quote(service) + "/" + Pattern.quote(logdir) + "/(" + Pattern.quote(date) + ")/(" + Pattern.quote(hour) + ")/([^/]+)/working/([^/]+)_(\\d+)"); } else if (date != null) { datePathPattern = Pattern.compile(rootDir + "/" + Pattern.quote(dcNumber) + "/" + Pattern.quote(service) + "/" + Pattern.quote(logdir) + "/(" + Pattern.quote(date) + ")"); hourPathPattern = Pattern .compile(rootDir + "/" + Pattern.quote(dcNumber) + "/" + Pattern.quote(service) + "/" + Pattern.quote(logdir) + "/(" + Pattern.quote(date) + ")/(\\d{2})"); incomingPathPattern = Pattern.compile(rootDir + "/" + Pattern.quote(dcNumber) + "/" + Pattern.quote(service) + "/" + Pattern.quote(logdir) + "/(" + Pattern.quote(date) + ")/(\\d{2})/([^/]+)/incoming"); dataPathPattern = Pattern .compile(rootDir + "/" + Pattern.quote(dcNumber) + "/" + Pattern.quote(service) + "/" + Pattern.quote(logdir) + "/(" + Pattern.quote(date) + ")/(\\d{2})/([^/]+)/data"); archivePathPattern = Pattern.compile(rootDir + "/" + Pattern.quote(dcNumber) + "/" + Pattern.quote(service) + "/" + Pattern.quote(logdir) + "/(" + Pattern.quote(date) + ")/(\\d{2})/([^/]+)/archive"); workingPathPattern = Pattern.compile(rootDir + "/" + Pattern.quote(dcNumber) + "/" + Pattern.quote(service) + "/" + Pattern.quote(logdir) + "/(" + Pattern.quote(date) + ")/(\\d{2})/([^/]+)/working/([^/]+)_(\\d+)"); } else { datePathPattern = Pattern.compile(rootDir + "/" + Pattern.quote(dcNumber) + "/" + Pattern.quote(service) + "/" + Pattern.quote(logdir) + "/(\\d{8})"); hourPathPattern = Pattern.compile(rootDir + "/" + Pattern.quote(dcNumber) + "/" + Pattern.quote(service) + "/" + Pattern.quote(logdir) + "/(\\d{8})/(\\d{2})"); incomingPathPattern = Pattern .compile(rootDir + "/" + Pattern.quote(dcNumber) + "/" + Pattern.quote(service) + "/" + Pattern.quote(logdir) + "/(\\d{8})/(\\d{2})/([^/]+)/incoming"); dataPathPattern = Pattern.compile(rootDir + "/" + Pattern.quote(dcNumber) + "/" + Pattern.quote(service) + "/" + Pattern.quote(logdir) + "/(\\d{8})/(\\d{2})/([^/]+)/data"); archivePathPattern = Pattern .compile(rootDir + "/" + Pattern.quote(dcNumber) + "/" + Pattern.quote(service) + "/" + Pattern.quote(logdir) + "/(\\d{8})/(\\d{2})/([^/]+)/archive"); workingPathPattern = Pattern .compile(rootDir + "/" + Pattern.quote(dcNumber) + "/" + Pattern.quote(service) + "/" + Pattern.quote(logdir) + "/(\\d{8})/(\\d{2})/([^/]+)/working/([^/]+)_(\\d+)"); } // Do a depth first search of the directory, processing anything that // looks // interesting along the way Deque<Path> paths = new ArrayDeque<Path>(); Path rootPath = new Path(rootDir + "/" + dcNumber + "/" + service + "/" + logdir + "/"); paths.push(rootPath); while (paths.size() > 0) { Path p = paths.pop(); LOG.debug("{}", p.toString()); if (!fs.exists(p)) { continue; } FileStatus dirStatus = fs.getFileStatus(p); FileStatus[] children = fs.listStatus(p); boolean addChildren = true; boolean old = dirStatus.getModificationTime() < now - WAIT_TIME; LOG.debug(" Was last modified {}ms ago", now - dirStatus.getModificationTime()); if (!old) { LOG.debug(" Skipping, since it's not old enough."); } else if ((!rootPath.equals(p)) && (children.length == 0 || (children.length == 1 && children[0].getPath().getName().equals(READY_MARKER)))) { // old and no children? Delete! LOG.info(" Deleting empty directory {}", p.toString()); fs.delete(p, true); } else { Matcher matcher = datePathPattern.matcher(p.toUri().getPath()); if (matcher.matches()) { LOG.debug("Checking date directory"); // If this is already done, then skip it. So only process if it // doesn't exist. if (fs.exists(new Path(p, READY_MARKER)) == false) { // Check each subdirectory. If they all have ready markers, then I // guess we're ready. boolean ready = true; for (FileStatus c : children) { if (c.isDirectory() && fs.exists(new Path(c.getPath(), READY_MARKER)) == false) { ready = false; break; } } if (ready) { fs.createNewFile(new Path(p, READY_MARKER)); } } } matcher = hourPathPattern.matcher(p.toUri().getPath()); if (matcher.matches()) { LOG.debug("Checking hour directory"); // If this is already done, then skip it. So only process if it // doesn't exist. if (fs.exists(new Path(p, READY_MARKER)) == false) { // Check each subdirectory. If they all have ready markers, then I // guess we're ready. boolean ready = true; for (FileStatus c : children) { if (c.isDirectory() && fs.exists(new Path(c.getPath(), READY_MARKER)) == false) { ready = false; break; } } if (ready) { fs.createNewFile(new Path(p, READY_MARKER)); } } } // Check to see if we have to run a merge matcher = incomingPathPattern.matcher(p.toUri().getPath()); if (matcher.matches()) { LOG.debug("Checking incoming directory"); String matchDate = matcher.group(1); String matchHour = matcher.group(2); String matchComponent = matcher.group(3); String timestamp = matchDate + matchHour; if (doDelete && timestamp.compareTo(deleteCutoffDate) < 0) { LOG.info("Deleting old directory: {}", p); fs.delete(p, true); addChildren = false; } else if (doMerge) { // old, looks right, and has children? Run it! boolean hasMatchingChildren = false; boolean subdirTooYoung = false; for (FileStatus child : children) { if (!hasMatchingChildren) { FileStatus[] grandchildren = fs.listStatus(child.getPath()); for (FileStatus gc : grandchildren) { if (VALID_FILE.matcher(gc.getPath().getName()).matches()) { hasMatchingChildren = true; break; } } } if (!subdirTooYoung) { if (child.getModificationTime() >= now - WAIT_TIME) { subdirTooYoung = true; LOG.debug(" Subdir {} is too young.", child.getPath()); } } } if (!hasMatchingChildren) { LOG.debug(" No files match the expected pattern ({})", VALID_FILE.pattern()); } if (hasMatchingChildren && !subdirTooYoung) { LOG.info(" Run Merge job {} :: {} {} {} {} {}", new Object[] { p.toString(), dcNumber, service, matchDate, matchHour, matchComponent }); Properties jobProps = new Properties(); jobProps.load(new FileInputStream(mergeJobPropertiesFile)); jobProps.setProperty("jobType", "merge"); jobProps.setProperty("rootDir", rootDir); jobProps.setProperty("dcNumber", dcNumber); jobProps.setProperty("service", service); jobProps.setProperty("date", matchDate); jobProps.setProperty("hour", matchHour); jobProps.setProperty("component", matchComponent); jobProps.setProperty("user.name", userName); jobProps.setProperty("logdir", logdir); jobRunner.submit(jobProps); addChildren = false; } } } // Check to see if we need to run a filter and archive matcher = dataPathPattern.matcher(p.toUri().getPath()); if (matcher.matches()) { String matchDate = matcher.group(1); String matchHour = matcher.group(2); String matchComponent = matcher.group(3); String timestamp = matchDate + matchHour; if (doDelete && timestamp.compareTo(deleteCutoffDate) < 0) { LOG.info("Deleting old directory: {}", p); fs.delete(p, true); addChildren = false; } else if (doArchive && timestamp.compareTo(filterCutoffDate) < 0) { Properties jobProps = new Properties(); jobProps.load(new FileInputStream(filterJobPropertiesFile)); jobProps.setProperty("jobType", "filter"); jobProps.setProperty("rootDir", rootDir); jobProps.setProperty("dcNumber", dcNumber); jobProps.setProperty("service", service); jobProps.setProperty("date", matchDate); jobProps.setProperty("hour", matchHour); jobProps.setProperty("component", matchComponent); jobProps.setProperty("user.name", userName); jobProps.setProperty("logdir", logdir); // Check to see if we should just keep all or delete all here. // The filter file should be here String appPath = jobProps.getProperty("oozie.wf.application.path"); appPath = appPath.replaceFirst("\\$\\{.*?\\}", ""); Path filterFile = new Path( appPath + "/" + conf.get("filter.definition.file", service + ".yaml")); LOG.info("Filter file is {}", filterFile); if (fs.exists(filterFile)) { List<BoomFilterMapper.Filter> filters = BoomFilterMapper.loadFilters(matchComponent, fs.open(filterFile)); if (filters == null) { LOG.warn( " Got null when getting filters. Not processing. {} :: {} {} {} {} {}", new Object[] { p.toString(), dcNumber, service, matchDate, matchHour, matchComponent }); } else if (filters.size() == 0) { LOG.warn(" Got no filters. Not processing. {} :: {} {} {} {} {}", new Object[] { p.toString(), dcNumber, service, matchDate, matchHour, matchComponent }); } else if (filters.size() == 1 && filters.get(0) instanceof BoomFilterMapper.KeepAllFilter) { LOG.info(" Keeping everything. {} :: {} {} {} {} {}", new Object[] { p.toString(), dcNumber, service, matchDate, matchHour, matchComponent }); // Move files from data to archive // delete it all! String destination = rootDir + "/" + dcNumber + "/" + service + "/" + logdir + "/" + matchDate + "/" + matchHour + "/" + matchComponent + "/archive/"; PathInfo pathInfo = new PathInfo(); pathInfo.setDcNumber(dcNumber); pathInfo.setService(service); pathInfo.setLogdir(logdir); pathInfo.setDate(matchDate); pathInfo.setHour(matchHour); pathInfo.setComponent(matchComponent); try { lockUtil.acquireWriteLock(lockUtil.getLockPath(pathInfo)); fs.mkdirs(new Path(destination)); for (FileStatus f : fs.listStatus(p)) { fs.rename(f.getPath(), new Path(destination)); } } finally { lockUtil.releaseWriteLock(lockUtil.getLockPath(pathInfo)); } } else if (filters.size() == 1 && filters.get(0) instanceof BoomFilterMapper.DropAllFilter) { LOG.info(" Dropping everything. {} :: {} {} {} {} {}", new Object[] { p.toString(), dcNumber, service, matchDate, matchHour, matchComponent }); PathInfo pathInfo = new PathInfo(); pathInfo.setDcNumber(dcNumber); pathInfo.setService(service); pathInfo.setLogdir(logdir); pathInfo.setDate(matchDate); pathInfo.setHour(matchHour); pathInfo.setComponent(matchComponent); try { lockUtil.acquireWriteLock(lockUtil.getLockPath(pathInfo)); fs.delete(p, true); } finally { lockUtil.releaseWriteLock(lockUtil.getLockPath(pathInfo)); } } else { LOG.info(" Run Filter/Archive job {} :: {} {} {} {} {}", new Object[] { p.toString(), dcNumber, service, matchDate, matchHour, matchComponent }); jobRunner.submit(jobProps); } } else { LOG.warn("Skipping filter job, since no filter file exists"); } addChildren = false; } } matcher = archivePathPattern.matcher(p.toUri().getPath()); if (matcher.matches()) { String matchDate = matcher.group(1); String matchHour = matcher.group(2); String timestamp = matchDate + matchHour; if (doDelete && timestamp.compareTo(deleteCutoffDate) < 0) { LOG.info("Deleting old directory: {}", p); fs.delete(p, true); addChildren = false; } } matcher = workingPathPattern.matcher(p.toUri().getPath()); if (matcher.matches()) { LOG.info(" Matches working pattern ({})", p); if (resetOrphanedJobs) { String matchDate = matcher.group(1); String matchHour = matcher.group(2); String matchComponent = matcher.group(3); // Move everything from working/xxx/incoming/ to incoming/ PathInfo lockPathInfo = new PathInfo(logdir, rootDir + "/" + dcNumber + "/" + service + "/" + logdir + "/" + matchDate + "/" + matchHour + "/" + matchComponent); lockUtil.acquireWriteLock(lockUtil.getLockPath(lockPathInfo)); FileStatus[] fileStatuses = fs.listStatus(new Path(p.toUri().getPath() + "/incoming/")); if (fileStatuses != null) { for (FileStatus fileStatus : fileStatuses) { Path toPath = new Path( fileStatus.getPath().getParent().getParent().getParent().getParent(), "incoming/" + fileStatus.getPath().getName()); LOG.info(" Moving data from {} to {}", fileStatus.getPath(), toPath); LOG.info(" mkdir {}", toPath); fs.mkdirs(toPath); Path fromDir = new Path(p.toUri().getPath(), "incoming/" + fileStatus.getPath().getName()); LOG.info(" moving from {}", fromDir); FileStatus[] files = fs.listStatus(fromDir); if (files == null || files.length == 0) { LOG.info(" Nothing to move from {}", fromDir); } else { for (FileStatus f : files) { LOG.info(" rename {} {}", f.getPath(), new Path(toPath, f.getPath().getName())); fs.rename(f.getPath(), new Path(toPath, f.getPath().getName())); } } LOG.info(" rm {}", fileStatus.getPath()); fs.delete(fileStatus.getPath(), true); } lockUtil.releaseWriteLock(lockUtil.getLockPath(lockPathInfo)); fs.delete(new Path(p.toUri().getPath()), true); } } addChildren = false; } } // Add any children which are directories to the stack. if (addChildren) { for (int i = children.length - 1; i >= 0; i--) { FileStatus child = children[i]; if (child.isDirectory()) { paths.push(child.getPath()); } } } } // Since we may have deleted a bunch of directories, delete any unused // locks // from ZooKeeper. { LOG.info("Checking for unused locks in ZooKeeper"); String scanPath = rootDir + "/" + dcNumber + "/" + service + "/" + logdir; if (date != null) { scanPath += "/" + date; if (hour != null) { scanPath += "/" + hour; } } List<LockInfo> lockInfo = lockUtil.scan(scanPath); for (LockInfo li : lockInfo) { // Check if the lock path still exists in HDFS. If it doesn't, then // delete it from ZooKeeper. String path = li.getPath(); String hdfsPath = path.substring(LockUtil.ROOT.length()); if (!fs.exists(new Path(hdfsPath))) { ZooKeeper zk = lockUtil.getZkClient(); while (!path.equals(LockUtil.ROOT)) { try { zk.delete(path, -1); } catch (KeeperException.NotEmptyException e) { // That's fine. just stop trying then. break; } catch (Exception e) { LOG.error("Caught exception trying to delete from ZooKeeper.", e); break; } LOG.info("Deleted from ZooKeeper: {}", path); path = path.substring(0, path.lastIndexOf('/')); } } } } // Now that we're done, wait for the Oozie Runner to stop, and print the // results. LOG.info("Waiting for Oozie jobs to complete."); jobRunner.shutdown(); jobRunnerThread.join(); LOG.info("Job Stats : Started={} Succeeded={} failed={} errors={}", new Object[] { jobRunner.getStarted(), jobRunner.getSucceeded(), jobRunner.getFailed(), jobRunner.getErrors() }); lockUtil.close(); } catch (Exception e) { LOG.error("Unexpected exception caught.", e); return 1; } return 0; }
From source file:net.sf.profiler4j.console.util.task.LongTaskExecutorDialog.java
public void runTask(final LongTask task) { label.setText("Executing long-running task..."); task.setDialog(this); Thread t = new Thread("PROFILER4J_TASK") { public void run() { log.debug("TASK STARTED"); try { task.executeInBackground(); } catch (final Exception e) { SwingUtilities.invokeLater(new Runnable() { public void run() { log.error("Caught task error", e); error = e;/*from ww w .j a v a 2 s . c om*/ }; }); } finally { SwingUtilities.invokeLater(new Runnable() { public void run() { log.debug("TASK COMPLETED"); if (error != null) { setDefaultCloseOperation(DISPOSE_ON_CLOSE); progressBar.setIndeterminate(false); setSize(486, 379); setVisible(true); toFront(); ((JComponent) statusTextArea.getParent()).revalidate(); StringWriter sw = new StringWriter(); PrintWriter pw = new PrintWriter(sw); error.printStackTrace(pw); statusTextArea.setText(sw.toString()); statusTextArea.setCaretPosition(0); task.setError(error); } else { setVisible(false); dispose(); } } }); } }; }; t.setName("LongTaskRunner"); t.setPriority(Thread.MIN_PRIORITY); t.setDaemon(false); t.start(); setLocation(getLocation().x, getLocation().y - 120); setVisible(true); }