List of usage examples for java.util Timer scheduleAtFixedRate
public void scheduleAtFixedRate(TimerTask task, Date firstTime, long period)
From source file:edu.hawaii.soest.hioos.storx.StorXDispatcher.java
/** * The main method for running the code @ param args[] the command line list * of string arguments, none are needed//from w w w. j av a2 s. co m */ public static void main(String args[]) { try { // create a new instance of the StorXDispatcher object, and parse // the command // line arguments as settings for this instance final StorXDispatcher storXDispatcher = new StorXDispatcher(); // Handle ctrl-c's and other abrupt death signals to the process Runtime.getRuntime().addShutdownHook(new Thread() { // stop the streaming process public void run() { Collection sourceCollection = storXDispatcher.sourceMap.values(); for (Iterator iterator = sourceCollection.iterator(); iterator.hasNext();) { Object sourceObject = iterator.next(); try { // disconnect StorX sources StorXSource source = (StorXSource) sourceObject; logger.info("Disconnecting source: " + source.getRBNBClientName()); source.stopConnection(); } catch (java.lang.ClassCastException cce) { // disconnect ISUS sources try { ISUSSource source = (ISUSSource) sourceObject; logger.info("Disconnecting source: " + source.getRBNBClientName()); source.stopConnection(); } catch (java.lang.ClassCastException cce2) { // disconnect CTD sources CTDSource source = (CTDSource) sourceObject; logger.info("Disconnecting source: " + source.getRBNBClientName()); source.stopConnection(); } // end try/catch } // end try/catch } // end for() } // end run() } // end new Thread() ); // end addShutDownHook() // parse the commandline arguments to configure the connection, then // start the streaming connection between the source and the RBNB // server. if (storXDispatcher.parseArgs(args) && storXDispatcher.parseConfiguration()) { // establish the individual source connections with the RBNB if (storXDispatcher.connect()) { // fetch data on a schedule TimerTask fetchData = new TimerTask() { public void run() { logger.debug("TimerTask.run() called."); storXDispatcher.execute(); } }; Timer executeTimer = new Timer("Execute Timer"); // run the fetchData timer task at the default interval executeTimer.scheduleAtFixedRate(fetchData, new Date(), storXDispatcher.executeInterval); } else { logger.info("Could not establish a connection to the DataTurbine. Exiting."); System.exit(0); } } } catch (Exception e) { logger.info("Error in main(): " + e.getMessage()); e.printStackTrace(); } }
From source file:ctlogger.CTlogger.java
public static void main(String args[]) { /**/*from w w w . j a v a 2 s . com*/ * * Original code for command line parsing * (This has been replaced by code using Apache Commons CLI, see below) * String helpMsg = "CTlogger -x -r -z -g -k <skiplines> -f <flush_sec> -p <poll_sec> -n <nanVal> -i <leadingID> -s <SourceName> -H <HeaderLine> <logger.dat> <CTfolder>"; int dirArg = 0; while((dirArg<args.length) && args[dirArg].startsWith("-")) { // arg parsing if(args[dirArg].equals("-h")) { System.err.println(helpMsg); System.exit(0); } if(args[dirArg].equals("-x")) { debug = true; } if(args[dirArg].equals("-b")) { noBackwards = true; } if(args[dirArg].equals("-g")) { gzipmode = true; } // default false if(args[dirArg].equals("-a")) { appendMode = false; } // default true if(args[dirArg].equals("-z")) { zipmode = false; } // default true if(args[dirArg].equals("-N")) { newFileMode = true; } // default false if(args[dirArg].equals("-f")) { autoflush = Long.parseLong(args[++dirArg]); } if(args[dirArg].equals("-p")) { pollInterval = Long.parseLong(args[++dirArg]); } if(args[dirArg].equals("-k")) { skipLines = Long.parseLong(args[++dirArg]); } if(args[dirArg].equals("-r")) { repeatFetch = true; } if(args[dirArg].equals("-B")) { blockMode = true; } if(args[dirArg].equals("-t")) { storeTime = true; } if(args[dirArg].equals("-T")) { trimTime = Double.parseDouble(args[++dirArg]); } if(args[dirArg].equals("-n")) { nanVal = args[++dirArg]; } if(args[dirArg].equals("-i")) { leadingID = args[++dirArg]; } if(args[dirArg].equals("-s")) { SourceName = args[++dirArg]; } if(args[dirArg].equals("-H")) { HeaderLine = args[++dirArg]; } dirArg++; } if(args.length < (dirArg+2)) { System.err.println(helpMsg); System.exit(0); } loggerFileName = args[dirArg++]; // args[0]: logger.dat file CTrootfolder = args[dirArg++]; // args[1]: CT destination folder */ // // Parse command line arguments // // 1. Setup command line options // Options options = new Options(); // Boolean options (only the flag, no argument) options.addOption("h", "help", false, "Print this message"); options.addOption("x", "debug", false, "turn on debug output"); options.addOption("b", "nobackwards", false, "no backwards-going time allowed"); options.addOption("g", "gzipmode", false, "turn on gzip for extra compression"); options.addOption("a", "noappend", false, "turn off append mode (i.e., do not append to end of existing CT data)"); options.addOption("z", "nozip", false, "turn off zip mode (it is on by default)"); options.addOption("N", "newfilemode", false, "re-parse entire logger file every time it is checked"); options.addOption("r", "repeatFetch", false, "turn on repeat fetch (auto-fetch data loop)"); options.addOption("B", "blockMode", false, "turn on CloudTurbine writer block mode (multiple points per output data file, packed data)"); options.addOption("t", "storeTime", false, "store time string as a channel; time is the first data entry in each line; if this option is not specified, then the time channel is skipped/not saved to CloudTurbine"); // Options with an argument Option outputFolderOption = Option.builder("f").argName("autoflush").hasArg() .desc("flush interval (sec); default = \"" + autoflush + "\"").build(); options.addOption(outputFolderOption); outputFolderOption = Option.builder("p").argName("pollInterval").hasArg().desc( "if repeatFetch option has been specified, recheck the logger data file at this polling interval (sec); default = \"" + pollInterval + "\"") .build(); options.addOption(outputFolderOption); outputFolderOption = Option.builder("k").argName("skipLines").hasArg().desc( "in logger file, the num lines to skip after the header line to get to the first line of data; default = \"" + skipLines + "\"") .build(); options.addOption(outputFolderOption); outputFolderOption = Option.builder("T").argName("trimTime").hasArg().desc( "trim (ring-buffer loop) time (sec) (trimTime=0 for indefinite); default = \"" + trimTime + "\"") .build(); options.addOption(outputFolderOption); outputFolderOption = Option.builder("n").argName("nanVal").hasArg() .desc("replace NAN with this; default = \"" + nanVal + "\"").build(); options.addOption(outputFolderOption); outputFolderOption = Option.builder("i").argName("leadingID").hasArg() .desc("leading ID string (IWG1 compliant)").build(); options.addOption(outputFolderOption); outputFolderOption = Option.builder("s").argName("sourceName").hasArg() .desc("CloudTurbine source name; default = \"" + SourceName + "\"").build(); options.addOption(outputFolderOption); outputFolderOption = Option.builder("H").argName("HeaderLine").hasArg().desc( "optional CSV list of channel names; if not supplied, this is read from the first line in the logger file") .build(); options.addOption(outputFolderOption); outputFolderOption = Option.builder("l").argName("loggerfilename").hasArg() .desc("name of the logger data file; required argument").build(); options.addOption(outputFolderOption); outputFolderOption = Option.builder("o").longOpt("outputfolder").argName("folder").hasArg() .desc("Location of output files (source is created under this folder); default = " + CTrootfolder) .build(); options.addOption(outputFolderOption); // // 2. Parse command line options // CommandLineParser parser = new DefaultParser(); CommandLine line = null; try { line = parser.parse(options, args); } catch (org.apache.commons.cli.ParseException exp) { // oops, something went wrong System.err.println("Command line argument parsing failed: " + exp.getMessage()); return; } // // 3. Retrieve the command line values // if (line.hasOption("help")) { // Display help message and quit HelpFormatter formatter = new HelpFormatter(); formatter.printHelp("CTlogger", options); return; } debug = line.hasOption("x"); noBackwards = line.hasOption("b"); gzipmode = line.hasOption("g"); appendMode = !line.hasOption("a"); zipmode = !line.hasOption("z"); newFileMode = line.hasOption("N"); repeatFetch = line.hasOption("r"); blockMode = line.hasOption("B"); storeTime = line.hasOption("t"); autoflush = Long.parseLong(line.getOptionValue("f", Long.toString(autoflush))); pollInterval = Long.parseLong(line.getOptionValue("p", Long.toString(pollInterval))); skipLines = Long.parseLong(line.getOptionValue("k", Long.toString(skipLines))); trimTime = Double.parseDouble(line.getOptionValue("T", Double.toString(trimTime))); nanVal = line.getOptionValue("n", nanVal); if (line.hasOption("i")) { leadingID = line.getOptionValue("i"); } SourceName = line.getOptionValue("s", SourceName); if (line.hasOption("H")) { HeaderLine = line.getOptionValue("H"); } if (line.hasOption("l")) { loggerFileName = line.getOptionValue("l"); } else { System.err.println("ERROR: you must supply the logger file name."); return; } CTrootfolder = line.getOptionValue("o", CTrootfolder); if (!debug) { System.err.println("CTlogger: " + loggerFileName + ", CTrootfolder: " + CTrootfolder + ", pollInterval: " + pollInterval); } else { System.err.println("debug = " + debug); System.err.println("noBackwards = " + noBackwards); System.err.println("gzipmode = " + gzipmode); System.err.println("appendMode = " + appendMode); System.err.println("zipmode = " + zipmode); System.err.println("newFileMode = " + newFileMode); System.err.println("repeatFetch = " + repeatFetch); System.err.println("blockMode = " + blockMode); System.err.println("storeTime = " + storeTime); System.err.println("autoflush = " + autoflush); System.err.println("pollInterval = " + pollInterval); System.err.println("skipLines = " + skipLines); System.err.println("trimTime = " + trimTime); System.err.println("nanVal = " + nanVal); System.err.println("leadingID = " + leadingID); System.err.println("SourceName = " + SourceName); System.err.println("HeaderLine = " + HeaderLine); System.err.println("loggerFileName = " + loggerFileName); System.err.println("CTrootfolder = " + CTrootfolder); } // // Run CTlogger // if (!repeatFetch) getData(true); // run once else { Timer timer = new Timer(); TimerTask fetchTask = new TimerTask() { @Override public void run() { if (newFileMode) getData(true); else if (getData(false)) { // pick up from old data if you can System.err.println("Failed to pick up from old data, refetch from start of file..."); boolean status = getData(true); System.err.println("refetch status: " + status); } if (debug) System.err.println("Waiting for data, pollInterval: " + pollInterval + " sec..."); }; }; // repeatFetch@autoflush interval, convert to msec if ((autoflush > 0) && (pollInterval > autoflush)) pollInterval = autoflush; timer.scheduleAtFixedRate(fetchTask, 0, pollInterval * 1000); } }
From source file:org.apache.metron.performance.load.LoadGenerator.java
public static void main(String[] args) throws Exception { CommandLine cli = LoadOptions.parse(new PosixParser(), args); EnumMap<LoadOptions, Optional<Object>> evaluatedArgs = LoadOptions.createConfig(cli); Map<String, Object> kafkaConfig = new HashMap<>(); kafkaConfig.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName()); kafkaConfig.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName()); kafkaConfig.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName()); kafkaConfig.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName()); if (LoadOptions.ZK.has(cli)) { String zkQuorum = (String) evaluatedArgs.get(LoadOptions.ZK).get(); kafkaConfig.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, Joiner.on(",").join(KafkaUtils.INSTANCE.getBrokersFromZookeeper(zkQuorum))); }/*from w w w . j av a2s. c o m*/ String groupId = evaluatedArgs.get(LoadOptions.CONSUMER_GROUP).get().toString(); System.out.println("Consumer Group: " + groupId); kafkaConfig.put(ConsumerConfig.GROUP_ID_CONFIG, groupId); if (LoadOptions.KAFKA_CONFIG.has(cli)) { kafkaConfig.putAll((Map<String, Object>) evaluatedArgs.get(LoadOptions.KAFKA_CONFIG).get()); } kafkaProducer = ThreadLocal.withInitial(() -> new KafkaProducer<>(kafkaConfig)); int numThreads = (int) evaluatedArgs.get(LoadOptions.NUM_THREADS).get(); System.out.println("Thread pool size: " + numThreads); pool = Executors.newFixedThreadPool(numThreads); Optional<Object> eps = evaluatedArgs.get(LoadOptions.EPS); Optional<Object> outputTopic = evaluatedArgs.get(LoadOptions.OUTPUT_TOPIC); Optional<Object> monitorTopic = evaluatedArgs.get(LoadOptions.MONITOR_TOPIC); long sendDelta = (long) evaluatedArgs.get(LoadOptions.SEND_DELTA).get(); long monitorDelta = (long) evaluatedArgs.get(LoadOptions.MONITOR_DELTA).get(); if ((eps.isPresent() && outputTopic.isPresent()) || monitorTopic.isPresent()) { Timer timer = new Timer(false); long startTimeMs = System.currentTimeMillis(); if (outputTopic.isPresent() && eps.isPresent()) { List<String> templates = (List<String>) evaluatedArgs.get(LoadOptions.TEMPLATE).get(); if (templates.isEmpty()) { System.out.println("Empty templates, so nothing to do."); return; } Optional<Object> biases = evaluatedArgs.get(LoadOptions.BIASED_SAMPLE); Sampler sampler = new UnbiasedSampler(); if (biases.isPresent()) { sampler = new BiasedSampler((List<Map.Entry<Integer, Integer>>) biases.get(), templates.size()); } MessageGenerator generator = new MessageGenerator(templates, sampler); Long targetLoad = (Long) eps.get(); int periodsPerSecond = (int) (1000 / sendDelta); long messagesPerPeriod = targetLoad / periodsPerSecond; String outputTopicStr = (String) outputTopic.get(); System.out.println( "Generating data to " + outputTopicStr + " at " + targetLoad + " events per second"); System.out.println("Sending " + messagesPerPeriod + " messages to " + outputTopicStr + " every " + sendDelta + "ms"); timer.scheduleAtFixedRate(new SendToKafka(outputTopicStr, messagesPerPeriod, numThreads, generator, pool, numSent, kafkaProducer), 0, sendDelta); } List<AbstractMonitor> monitors = new ArrayList<>(); if (outputTopic.isPresent() && monitorTopic.isPresent()) { System.out.println("Monitoring " + monitorTopic.get() + " every " + monitorDelta + " ms"); monitors.add(new EPSGeneratedMonitor(outputTopic, numSent)); monitors.add(new EPSThroughputWrittenMonitor(monitorTopic, kafkaConfig)); } else if (outputTopic.isPresent() && !monitorTopic.isPresent()) { System.out.println("Monitoring " + outputTopic.get() + " every " + monitorDelta + " ms"); monitors.add(new EPSGeneratedMonitor(outputTopic, numSent)); monitors.add(new EPSThroughputWrittenMonitor(outputTopic, kafkaConfig)); } else if (!outputTopic.isPresent() && monitorTopic.isPresent()) { System.out.println("Monitoring " + monitorTopic.get() + " every " + monitorDelta + " ms"); monitors.add(new EPSThroughputWrittenMonitor(monitorTopic, kafkaConfig)); } else if (!outputTopic.isPresent() && !monitorTopic.isPresent()) { System.out.println( "You have not specified an output topic or a monitoring topic, so I have nothing to do here."); } int lookback = (int) evaluatedArgs.get(LoadOptions.SUMMARY_LOOKBACK).get(); if (lookback > 0) { System.out.println("Summarizing over the last " + lookback + " monitoring periods (" + lookback * monitorDelta + "ms)"); } else { System.out.println("Turning off summarization."); } final CSVWriter csvWriter = new CSVWriter((File) evaluatedArgs.get(LoadOptions.CSV).orElse(null)); Writer writer = new Writer(monitors, lookback, new ArrayList<Consumer<Writable>>() { { add(new ConsoleWriter()); add(csvWriter); } }); timer.scheduleAtFixedRate(new MonitorTask(writer), 0, monitorDelta); Optional<Object> timeLimit = evaluatedArgs.get(LoadOptions.TIME_LIMIT); if (timeLimit.isPresent()) { System.out.println("Ending in " + timeLimit.get() + " ms."); timer.schedule(new TimerTask() { @Override public void run() { timer.cancel(); long durationS = (System.currentTimeMillis() - startTimeMs) / 1000; System.out.println("\nGenerated " + numSent.get() + " in " + durationS + " seconds."); csvWriter.close(); System.exit(0); } } , (Long) timeLimit.get()); } } }
From source file:CSSDFarm.UserInterface.java
/** * @param args the command line arguments *//* w ww . j a v a 2s . c om*/ public static void main(String args[]) { /* Set the Nimbus look and feel */ //<editor-fold defaultstate="collapsed" desc=" Look and feel setting code (optional) "> /* If Nimbus (introduced in Java SE 6) is not available, stay with the default look and feel. * For details see http://download.oracle.com/javase/tutorial/uiswing/lookandfeel/plaf.html */ try { for (javax.swing.UIManager.LookAndFeelInfo info : javax.swing.UIManager.getInstalledLookAndFeels()) { if ("Nimbus".equals(info.getName())) { javax.swing.UIManager.setLookAndFeel(info.getClassName()); break; } } } catch (ClassNotFoundException ex) { java.util.logging.Logger.getLogger(UserInterface.class.getName()).log(java.util.logging.Level.SEVERE, null, ex); } catch (InstantiationException ex) { java.util.logging.Logger.getLogger(UserInterface.class.getName()).log(java.util.logging.Level.SEVERE, null, ex); } catch (IllegalAccessException ex) { java.util.logging.Logger.getLogger(UserInterface.class.getName()).log(java.util.logging.Level.SEVERE, null, ex); } catch (javax.swing.UnsupportedLookAndFeelException ex) { java.util.logging.Logger.getLogger(UserInterface.class.getName()).log(java.util.logging.Level.SEVERE, null, ex); } //</editor-fold> /* Create and display the form */ UserInterface userint = new UserInterface(); java.awt.EventQueue.invokeLater(new Runnable() { public void run() { userint.setVisible(true); } }); Timer t = new Timer(); t.scheduleAtFixedRate(new TimerTask() { @Override public void run() { //ADD INTERVALL CALLS HERE Vector<FieldStation> fieldStations = server.loadData(); if (fieldStations != null) { for (FieldStation aFieldStations : fieldStations) { for (Sensor aSensor : aFieldStations.getSetOfSensors().getSensors()) { if (aSensor.onInterval()) { if (server.getServerIsOn()) { userint.updateReport(); } } } } } } }, 1000, 1000); }
From source file:com.pureinfo.force.util.TimerUtil.java
/** * @param _sTime/*from w ww . j ava 2 s.co m*/ * must be "HH:mm" format */ public static Timer scheduleFrom(String _sTime, long _lPeriod, TimerTask _task) { Calendar nowTime = new GregorianCalendar(); Calendar firstTime = getTimeToday(nowTime, _sTime); firstTime.setTimeInMillis(getNext(firstTime.getTimeInMillis(), nowTime.getTimeInMillis(), _lPeriod)); Timer timer = new Timer(); timer.scheduleAtFixedRate(_task, firstTime.getTime(), _lPeriod); return timer; }
From source file:org.apache.pulsar.functions.runtime.LocalRunner.java
protected static void startLocalRun(org.apache.pulsar.functions.proto.Function.FunctionDetails functionDetails, int parallelism, int instanceIdOffset, String brokerServiceUrl, String stateStorageServiceUrl, AuthenticationConfig authConfig, String userCodeFile) throws Exception { String serviceUrl = DEFAULT_SERVICE_URL; if (brokerServiceUrl != null) { serviceUrl = brokerServiceUrl;/* w w w . j a va 2 s .com*/ } try (ProcessRuntimeFactory containerFactory = new ProcessRuntimeFactory(serviceUrl, stateStorageServiceUrl, authConfig, null, /* java instance jar file */ null, /* python instance file */ null, /* log directory */ null, /* extra dependencies dir */ new DefaultSecretsProviderConfigurator())) { List<RuntimeSpawner> spawners = new LinkedList<>(); for (int i = 0; i < parallelism; ++i) { InstanceConfig instanceConfig = new InstanceConfig(); instanceConfig.setFunctionDetails(functionDetails); // TODO: correctly implement function version and id instanceConfig.setFunctionVersion(UUID.randomUUID().toString()); instanceConfig.setFunctionId(UUID.randomUUID().toString()); instanceConfig.setInstanceId(i + instanceIdOffset); instanceConfig.setMaxBufferedTuples(1024); instanceConfig.setPort(Utils.findAvailablePort()); instanceConfig.setClusterName("local"); RuntimeSpawner runtimeSpawner = new RuntimeSpawner(instanceConfig, userCodeFile, null, containerFactory, 30000); spawners.add(runtimeSpawner); runtimeSpawner.start(); } java.lang.Runtime.getRuntime().addShutdownHook(new Thread() { public void run() { log.info("Shutting down the localrun runtimeSpawner ..."); for (RuntimeSpawner spawner : spawners) { spawner.close(); } } }); Timer statusCheckTimer = new Timer(); statusCheckTimer.scheduleAtFixedRate(new TimerTask() { @Override public void run() { CompletableFuture<String>[] futures = new CompletableFuture[spawners.size()]; int index = 0; for (RuntimeSpawner spawner : spawners) { futures[index] = spawner.getFunctionStatusAsJson(index); index++; } try { CompletableFuture.allOf(futures).get(5, TimeUnit.SECONDS); for (index = 0; index < futures.length; ++index) { String json = futures[index].get(); Gson gson = new GsonBuilder().setPrettyPrinting().create(); log.info(gson.toJson(new JsonParser().parse(json))); } } catch (Exception ex) { log.error("Could not get status from all local instances"); } } }, 30000, 30000); java.lang.Runtime.getRuntime().addShutdownHook(new Thread() { public void run() { statusCheckTimer.cancel(); } }); for (RuntimeSpawner spawner : spawners) { spawner.join(); log.info("RuntimeSpawner quit because of", spawner.getRuntime().getDeathException()); } } }
From source file:de.eod.jliki.users.utils.UserDBHelper.java
/** * Initializes the database for user management.<br/> *///from w w w .j av a2s. c om public static void initializeDB() { final Permission adminsDocroot = new Permission("*", PermissionCategoryMap.CATEGORY_DOCROOT, PermissionType.READWRITER); final Permission adminsFiles = new Permission("*", PermissionCategoryMap.CATEGORY_FILES, PermissionType.READWRITER); final Permission adminDocroot = new Permission("*", PermissionCategoryMap.CATEGORY_DOCROOT, PermissionType.OWNER); final Permission adminFiles = new Permission("*", PermissionCategoryMap.CATEGORY_FILES, PermissionType.OWNER); final UserGroup admins = new UserGroup("admins"); admins.addPermission(new Permission("*", PermissionCategoryMap.CATEGORY_CONFIG, PermissionType.READWRITER)); admins.addPermission(adminsDocroot); admins.addPermission(adminsFiles); final User admin = new User("admin", "password", "", "", ""); admin.addPermission(new Permission("*", PermissionCategoryMap.CATEGORY_CONFIG, PermissionType.OWNER)); admin.addPermission(new Permission("*", PermissionCategoryMap.CATEGORY_DOCROOT, PermissionType.OWNER)); admin.addPermission(new Permission("*", PermissionCategoryMap.CATEGORY_FILES, PermissionType.OWNER)); final UserGroup users = new UserGroup("users"); // duplicates can occurr here ... final Permission usersDocroot = ConfigManager.getInstance().getConfig().getBaseConfig() .userDocrootPermission(); final Permission usersFiles = ConfigManager.getInstance().getConfig().getBaseConfig().userFilePermission(); if (usersDocroot.equals(adminsDocroot) && usersDocroot.getType() == adminsDocroot.getType()) { users.addPermission(adminsDocroot); } else if (usersDocroot.equals(adminDocroot) && usersDocroot.getType() == adminDocroot.getType()) { users.addPermission(adminDocroot); } else { users.addPermission(usersDocroot); } if (usersFiles.equals(adminsFiles) && usersFiles.getType() == adminsFiles.getType()) { users.addPermission(adminsFiles); } else if (usersFiles.equals(adminFiles) && usersFiles.getType() == adminFiles.getType()) { users.addPermission(adminFiles); } else { users.addPermission(usersFiles); } admin.getGroups().add(admins); admin.getGroups().add(users); admin.setActive(ActiveState.ACTIVE); UserDBHelper.addUserToDB(admin); final Timer timer = new Timer(); final TimerTask task = new UserDBScheduler(); task.run(); final int onceADay = 1000 * 60 * 60 * 24; timer.scheduleAtFixedRate(new UserDBScheduler(), new Date(), onceADay); }
From source file:org.regenstrief.util.StopWatch.java
/** * Causes the given TimerTask to run every midnight, useful for servers that run constantly. * /*from www .j av a 2 s . c o m*/ * @param task the TimerTask * @return the Timer running the task **/ public final static java.util.Timer scheduleNightlyTask(final TimerTask task) { final java.util.Timer timer = new java.util.Timer(true); final Calendar start = Calendar.getInstance(); start.setTimeInMillis(start.getTimeInMillis() + MILLIS_PER_DAY); start.set(Calendar.HOUR_OF_DAY, 0); start.set(Calendar.MINUTE, 0); // Might want to parameterize start time and frequency timer.scheduleAtFixedRate(task, start.getTime(), MILLIS_PER_DAY); return timer; }
From source file:org.wso2.carbon.analytics.dataservice.core.indexing.AnalyticsDataIndexingStatsCollector.java
public AnalyticsDataIndexingStatsCollector() { Timer timer = new Timer(true); timer.scheduleAtFixedRate(this, INTERVAL, INTERVAL); }
From source file:org.apache.synapse.transport.dynamicconfigurations.FileUpdateNotificationHandler.java
/** * Schedule the handler with given interval to check files for changes * * @param interval Long time interval for the timer *//*from w w w. j av a 2s . c o m*/ private void scheduleTimer(long interval) { Timer timer = new Timer(true); timer.scheduleAtFixedRate(this, interval, interval); }