List of usage examples for java.util Calendar HOUR
int HOUR
To view the source code for java.util Calendar HOUR.
Click Source Link
get
and set
indicating the hour of the morning or afternoon. From source file:cognitivabrasil.obaa.Technical.Duration.java
/** * * @param value The value of the field// w ww . j a v a2 s . c o m * @param field The Java Calendar constant that reprensents the field, need * to be Calendar.HOUR, Calendar.MINUTE or Calendar.SECOND */ public void set(int value, int field) { if (field == Calendar.HOUR) { hours = value; } if (field == Calendar.MINUTE) { minutes = value; } if (field == Calendar.SECOND) { seconds = value; } StringBuilder builder = new StringBuilder("PT"); if (hours != 0) { builder.append(hours); builder.append("H"); } if (minutes != 0) { builder.append(minutes); builder.append("M"); } if (seconds != 0) { builder.append(seconds); builder.append("S"); } super.setText(builder.toString()); }
From source file:eu.smartfp7.foursquare.AttendanceCrawler.java
/** * The main takes an undefined number of cities as arguments, then initializes * the specific crawling of all the trending venues of these cities. * The trending venues must have been previously identified using the `DownloadPages` * program./* w w w .j a va2 s . c o m*/ * * Current valid cities are: london, amsterdam, goldcoast, sanfrancisco. * */ public static void main(String[] args) throws Exception { Settings settings = Settings.getInstance(); String folder = settings.getFolder(); // We keep info and error logs, so that we know what happened in case // of incoherence in the time series. Map<String, FileWriter> info_logs = new HashMap<String, FileWriter>(); Map<String, FileWriter> error_logs = new HashMap<String, FileWriter>(); // For each city we monitor, we store the venue IDs that we got from // a previous crawl. Map<String, Collection<String>> city_venues = new HashMap<String, Collection<String>>(); // Contains the epoch time when the last API call has been made for each // venue. Ensures that we get data only once each hour. Map<String, Long> venue_last_call = new HashMap<String, Long>(); // Contains the epoch time when we last checked if time series were broken // for each city. // We do these checks once every day before the batch forecasting begins. Map<String, Long> sanity_checks = new HashMap<String, Long>(); // We also keep in memory the number of checkins for the last hour for // each venue. Map<String, Integer> venue_last_checkin = new HashMap<String, Integer>(); Map<Long, Integer> APICallsCount = new HashMap<Long, Integer>(); DateFormat df = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss"); int total_venues = 0; long total_calls = 0; long time_spent_on_API = 0; for (String c : args) { settings.checkFileHierarchy(c); city_venues.put(c, loadVenues(c)); total_venues += city_venues.get(c).size(); info_logs.put(c, new FileWriter(folder + c + File.separator + "log" + File.separator + "info.log", true)); error_logs.put(c, new FileWriter(folder + c + File.separator + "log" + File.separator + "error.log", true)); Calendar cal = Calendar.getInstance(); info_logs.get(c).write("[" + df.format(cal.getTime()) + "] Crawler initialization for " + c + ". " + city_venues.get(c).size() + " venues loaded.\n"); info_logs.get(c).flush(); // If we interrupted the program for some reason, we can get back // the in-memory data. // Important: the program must not be interrupted for more than one // hour, or we will lose time series data. for (String venue_id : city_venues.get(c)) { String ts_file = folder + c + File.separator + "attendances_crawl" + File.separator + venue_id + ".ts"; if (new File(ts_file).exists()) { BufferedReader buffer = new BufferedReader(new FileReader(ts_file)); String mem = null, line = null; for (; (line = buffer.readLine()) != null; mem = line) ; buffer.close(); if (mem == null) continue; String[] tmp = mem.split(","); venue_last_call.put(venue_id, df.parse(tmp[0]).getTime()); venue_last_checkin.put(venue_id, Integer.parseInt(tmp[3])); VenueUtil.fixBrokenTimeSeriesVenue(new File(ts_file)); } // if } // for sanity_checks.put(c, cal.getTimeInMillis()); } // for if (total_venues > 5000) { System.out.println( "Too much venues for a single API account (max 5000).\nPlease create a new Foursquare API account and use these credentials.\nExiting now."); return; } while (true) { for (String c : args) { // We create a FIFO queue and pop venue IDs one at a time. LinkedList<String> city_venues_buffer = new LinkedList<String>(city_venues.get(c)); String venue_id = null; // Artificial wait to avoid processors looping at 100% of their capacity // when there is no more venues to crawl for the current hour. Thread.sleep(3000); while ((venue_id = city_venues_buffer.pollFirst()) != null) { // We get the current time according to the city's time zone Calendar cal = Calendar.getInstance(); cal.add(Calendar.MILLISECOND, TimeZone.getTimeZone(settings.getCityTimezone(c)).getOffset(cal.getTime().getTime()) - Calendar.getInstance().getTimeZone().getOffset(cal.getTime().getTime())); //TimeZone.getTimeZone("Europe/London").getOffset(cal.getTime().getTime())); long current_time = DateUtils.truncate(cal.getTime(), Calendar.HOUR).getTime(); // We query Foursquare only once per hour per venue. if (venue_last_call.get(venue_id) != null && current_time < venue_last_call.get(venue_id) + 3600000) continue; intelligentWait(total_venues, cal.getTime().getTime(), (total_calls == 0 ? 0 : Math.round(time_spent_on_API / total_calls))); Venue venue = null; try { long beforeCall = System.currentTimeMillis(); venue = new Venue(getFoursquareVenueById(venue_id, c)); // If there is no last call, this is the beginning of the time series // for this venue. We get the number of people "here now" to initialize // the series. if (venue_last_call.get(venue_id) == null) { /** TODO: by doing this, we keep a representation of the venue dating from the beginning * of the specific crawl. we might want to change this and update this file once * in a while. */ FileWriter info = new FileWriter(folder + c + File.separator + "foursquare_venues" + File.separator + venue_id + ".info"); info.write(venue.getFoursquareJson()); info.close(); FileWriter out = new FileWriter(folder + c + File.separator + "attendances_crawl" + File.separator + venue_id + ".ts"); out.write("Date,here_now,hour_checkins,total_checkins\n"); out.write(df.format(current_time) + "," + venue.getHereNow() + "," + venue.getHereNow() + "," + venue.getCheckincount() + "\n"); out.close(); } else { FileWriter out = new FileWriter(folder + c + File.separator + "attendances_crawl" + File.separator + venue_id + ".ts", true); int checks = venue.getCheckincount() - venue_last_checkin.get(venue_id); out.write(df.format(current_time) + "," + venue.getHereNow() + "," + Integer.toString(checks) + "," + venue.getCheckincount() + "\n"); out.close(); } if (APICallsCount.get(current_time) == null) APICallsCount.put(current_time, 1); else APICallsCount.put(current_time, APICallsCount.get(current_time) + 1); total_calls++; venue_last_call.put(venue_id, current_time); venue_last_checkin.put(venue_id, venue.getCheckincount()); time_spent_on_API += System.currentTimeMillis() - beforeCall; } catch (Exception e) { // If something bad happens (crawler not available, IO error, ...), we put the // venue_id in the FIFO queue so that it gets reevaluated later. //e.printStackTrace(); error_logs.get(c) .write("[" + df.format(cal.getTime().getTime()) + "] Error with venue " + venue_id + " (" + e.getMessage() + "). " + APICallsCount.get(current_time) + " API calls so far this hour, " + city_venues_buffer.size() + " venues remaining in the buffer.\n"); error_logs.get(c).flush(); System.out.println("[" + df.format(cal.getTime().getTime()) + "] " + c + " -- " + APICallsCount.get(current_time) + " API calls // " + city_venues_buffer.size() + " venues remaining " + " (" + e.getMessage() + ")"); if (e instanceof FoursquareAPIException) if (((FoursquareAPIException) e).getHttp_code().equals("400") && ((FoursquareAPIException) e).getError_detail() .equals("Venue " + venue_id + " has been deleted")) { city_venues.get(c).remove(venue_id); removeVenue(venue_id, c); } else city_venues_buffer.add(venue_id); continue; } } // while // Every day between 0am and 2am, we repair all the broken time series (if there // is something to repair). Calendar cal = Calendar.getInstance(); if (city_venues_buffer.peekFirst() == null && (cal.getTimeInMillis() - sanity_checks.get(c)) >= 86400000 && cal.get(Calendar.HOUR_OF_DAY) < 2) { VenueUtil.fixBrokenTimeSeriesCity(c, folder); sanity_checks.put(c, cal.getTimeInMillis()); info_logs.get(c).write("[" + df.format(cal.getTime()) + "] Sanity check OK.\n"); info_logs.get(c).flush(); } } // for } // while }
From source file:org.opensafety.hishare.managers.implementation.http.UserManagerImpl.java
public String renewUserAuthentication(String username) { Calendar expirationDate = Calendar.getInstance(); expirationDate.add(Calendar.HOUR, authenticationExpiration); User authenticatee = userDao.getByName(username); authenticatee.setAuthenticationId(UUID.randomUUID().toString()); authenticatee.setAuthenticationExpiration(expirationDate.getTime()); userDao.updateUser(authenticatee);//ww w . j a v a 2s . c o m return authenticatee.getAuthenticationId(); }
From source file:com.linuxbox.enkive.teststats.StatsHourGrainTest.java
@SuppressWarnings("unchecked") @BeforeClass//from w w w . ja v a 2 s . co m public static void setUp() throws ParseException, GathererException { gatherTester = TestHelper.BuildGathererService(); coll = TestHelper.GetTestCollection(); client = TestHelper.BuildClient(); grain = new HourConsolidator(client); Calendar cal = Calendar.getInstance(); cal.set(Calendar.MILLISECOND, 0); cal.set(Calendar.SECOND, 0); cal.set(Calendar.MINUTE, 0); for (int i = 0; i < 10; i++) { List<RawStats> stats = gatherTester.gatherStats(); List<Map<String, Object>> statsToStore = createListOfMaps(); if (i == 5) { cal.add(Calendar.HOUR, -1); } for (RawStats data : stats) { Map<String, Object> temp = data.toMap(); Map<String, Object> date = (Map<String, Object>) temp.get(STAT_TIMESTAMP); date.put(CONSOLIDATION_MIN, cal.getTime()); date.put(CONSOLIDATION_MAX, cal.getTime()); date.put(STAT_TS_POINT, cal.getTime()); statsToStore.add(temp); } client.storeData(statsToStore); } dataCount = coll.count(); }
From source file:com.github.jjYBdx4IL.utils.parser.BerlinAirQualityParserTest.java
@Test public void testParseValueBerlin1Txt() throws IOException, ParseException { @SuppressWarnings("deprecation") String source = IOUtils.toString(getClass().getResourceAsStream("berlin1.txt")); AirQualityParseResult result = parser.setSourceDoc(source).parse().getResult(ID_PM10_VERKEHR); assertEquals(24, Integer.parseInt(result.getValue())); // 01.07.2014 - 11:00 Uhr MESZ assertEquals(2014, result.getTime().get(Calendar.YEAR)); assertEquals(6, result.getTime().get(Calendar.MONTH)); assertEquals(1, result.getTime().get(Calendar.DAY_OF_MONTH)); assertEquals(11, result.getTime().get(Calendar.HOUR)); assertEquals(0, result.getTime().get(Calendar.MINUTE)); assertTrue(result.getDesc().contains("Verkehrsmessstelle")); assertEquals("PM10", result.getType().toString()); }
From source file:org.openehealth.coala.converter.PXSDateConverterTest.java
/** * @throws java.lang.Exception//from www . j a va 2 s .co m */ @Before public void setUp() throws Exception { ResourceBundle properties = ResourceBundle.getBundle("coala-document"); longPattern = properties.getString("coala.consent.longdatepattern"); shortPattern = properties.getString("coala.consent.shortdatepattern"); GregorianCalendar cal = new GregorianCalendar(); cal.set(Calendar.YEAR, 2011); cal.set(Calendar.MONTH, Calendar.JANUARY); cal.set(Calendar.DAY_OF_MONTH, 15); cal.set(Calendar.HOUR, 3); cal.set(Calendar.MINUTE, 36); cal.set(Calendar.SECOND, 50); cal.set(Calendar.MILLISECOND, 0); referenceDateLong = cal.getTime(); cal = new GregorianCalendar(); cal.set(Calendar.YEAR, 2011); cal.set(Calendar.MONTH, Calendar.JANUARY); cal.set(Calendar.DAY_OF_MONTH, 15); cal.set(Calendar.HOUR, 0); cal.set(Calendar.MINUTE, 0); cal.set(Calendar.SECOND, 0); cal.set(Calendar.MILLISECOND, 0); referenceDateShort = cal.getTime(); }
From source file:com.seajas.search.profiler.task.LoggingCleanupTask.java
/** * Perform the actual cleaning.//from w w w . j a v a 2 s . c om */ public void cleanup() { logger.info("Started log cleaner job"); // Keep track of the start date for cache clean-up Calendar currentDate = Calendar.getInstance(); currentDate.add(Calendar.HOUR, -loggingRetentionTime); List<Logging> entries = profilerService.cleanLogging(currentDate.getTime()); logger.info("Moving " + entries.size() + " entries from the logging database to on-disk storage"); // Create a date-formatter SimpleDateFormat formatter = new SimpleDateFormat("dd-MM-yyyy"); SimpleDateFormat fullFormatter = new SimpleDateFormat("dd-MM-yyyy HH:mm:ss.SSS"); // We keep a writer per day Map<String, Writer> writerMap = new HashMap<String, Writer>(); // Write the removed logging entries to their respective files for (Logging entry : entries) { String date = formatter.format(entry.getCreationDate()); try { Writer writer = writerMap.get(date); if (writer == null) writerMap.put(date, writer = new FileWriter(loggingPath + File.separator + date + ".log", true)); writer.write(fullFormatter.format(entry.getCreationDate()) + " " + StringUtils.rightPad(entry.getLevel().toUpperCase(), RIGHT_PAD_SPACES) + " " + entry.getMessage().trim() + "\n"); } catch (IOException e) { logger.error("Could not write the logging entry to the log file for date " + date); } } // Flush and then close the writers for (Map.Entry<String, Writer> entry : writerMap.entrySet()) { try { entry.getValue().flush(); entry.getValue().close(); } catch (IOException e) { logger.error("Could not flush and close the given log file", e); } } logger.info("Finishing log cleaner job"); }
From source file:com.seajas.search.attender.service.task.LoggingCleanupTask.java
/** * Perform the actual cleaning.//from w w w.j a v a2 s .c om */ public void cleanup() { logger.info("Started log cleaner job"); // Keep track of the start date for cache clean-up Calendar currentDate = Calendar.getInstance(); currentDate.add(Calendar.HOUR, -loggingRetentionTime); List<Logging> entries = attenderService.cleanLogging(currentDate.getTime()); logger.info("Moving " + entries.size() + " entries from the logging database to on-disk storage"); // Create a date-formatter SimpleDateFormat formatter = new SimpleDateFormat("dd-MM-yyyy"); SimpleDateFormat fullFormatter = new SimpleDateFormat("dd-MM-yyyy HH:mm:ss.SSS"); // We keep a writer per day Map<String, Writer> writerMap = new HashMap<String, Writer>(); // Write the removed logging entries to their respective files for (Logging entry : entries) { String date = formatter.format(entry.getCreationDate()); try { Writer writer = writerMap.get(date); if (writer == null) writerMap.put(date, writer = new FileWriter(loggingPath + File.separator + date + ".log", true)); writer.write(fullFormatter.format(entry.getCreationDate()) + " " + StringUtils.rightPad(entry.getLevel().toUpperCase(), RIGHT_PAD_SPACES) + " " + entry.getMessage().trim() + "\n"); } catch (IOException e) { logger.error("Could not write the logging entry to the log file for date " + date); } } // Flush and then close the writers for (Map.Entry<String, Writer> entry : writerMap.entrySet()) { try { entry.getValue().flush(); entry.getValue().close(); } catch (IOException e) { logger.error("Could not flush and close the given log file", e); } } logger.info("Finishing log cleaner job"); }
From source file:net.chrisrichardson.foodToGo.restaurantNotificationService.impl.hibernateImpl.SpringHibernateRestaurantNotificationUsingPessOfflineLockTests.java
protected void onSetUp() throws Exception { super.onSetUp(); jdbcTemplate.execute("DROP TABLE FTGO_LOCK IF EXISTS"); jdbcTemplate.execute(/* w w w .j a v a2 s. com*/ "CREATE TABLE FTGO_LOCK(CLASS_ID VARCHAR, PK VARCHAR, OWNER VARCHAR, PRIMARY KEY (CLASS_ID, PK))"); Calendar c = Calendar.getInstance(); c.add(Calendar.HOUR, -5); Date date = c.getTime(); order = OrderMother.makeOrder(date); save(order.getRestaurant()); save(order); }
From source file:com.square.core.agent.desactivation.relation.DesactivationRelationsAgentJmxThead.java
@Override public void run() { agent.setEtat("Agent DesactivationRelations debut de traitement"); try {// ww w . ja v a 2 s .c om final Calendar date = Calendar.getInstance(); if (StringUtils.isNotBlank(agent.getDate())) { final SimpleDateFormat formatDate = new SimpleDateFormat("dd/MM/yyyy"); date.setTime(formatDate.parse(agent.getDate())); } date.clear(Calendar.HOUR); date.clear(Calendar.MINUTE); date.clear(Calendar.SECOND); date.clear(Calendar.MILLISECOND); final SimpleDateFormat format = new SimpleDateFormat("dd/MM/yyyy HH:mm:ss"); agent.setEtat("Lancement desactivation relations la date du " + format.format(date.getTime())); int counter = 0; // Cration des criteres des relations dsactiver final RelationCriteresRechercheDto criterias = new RelationCriteresRechercheDto(); criterias.setDateFinMax(date); criterias.setActif(true); criterias.setSupprime(null); // Calcul du nombre total final int nbTotal = personneService.countRelationsParCriteres(criterias); agent.setEtat("Traitement des relations : " + nbTotal + " lments"); // Recuparation des ids de relations List<Long> listeRelationsADesactiver = personneService.rechercherIdsRelationsADesactiver(date, agent.getPagination()); while (listeRelationsADesactiver.size() > 0 && !agent.isStopping()) { counter += listeRelationsADesactiver.size(); logger.debug("Dsactivation des relations en cours " + counter + " / " + nbTotal); personneService.desactiverRelations(listeRelationsADesactiver); // Rcupration des relations dsactiver listeRelationsADesactiver = personneService.rechercherIdsRelationsADesactiver(date, agent.getPagination()); } } catch (Exception e) { agent.setEtat(e.getMessage()); e.printStackTrace(); } finally { agent.getSessionFactory().openSession().close(); agent.stop(); } }