Example usage for java.text DateFormat parse

List of usage examples for java.text DateFormat parse

Introduction

In this page you can find the example usage for java.text DateFormat parse.

Prototype

public Date parse(String source) throws ParseException 

Source Link

Document

Parses text from the beginning of the given string to produce a date.

Usage

From source file:MainClass.java

public static void main(String[] args) {
    DateFormat shortDf = DateFormat.getDateInstance(DateFormat.SHORT);

    DateFormat mediumDf = DateFormat.getDateInstance(DateFormat.MEDIUM);
    DateFormat longDf = DateFormat.getDateInstance(DateFormat.LONG);
    DateFormat fullDf = DateFormat.getDateInstance(DateFormat.FULL);
    System.out.println(shortDf.format(new Date()));
    System.out.println(mediumDf.format(new Date()));
    System.out.println(longDf.format(new Date()));
    System.out.println(fullDf.format(new Date()));

    // parsing/*from  w ww . j  a va2  s  .co  m*/
    try {
        Date date = shortDf.parse("12/12/2006");
    } catch (ParseException e) {
    }
}

From source file:com.appeligo.ccdataindexer.Indexer.java

public static void main(String args[]) throws Exception {
    ConfigurationService.setRootDir(new File("config"));
    ConfigurationService.setEnvName("live");
    ConfigurationService.init();//ww  w .  j  a  va2  s.c o m
    if (args.length < 4 || args.length > 5) {
        usage();
        System.exit(-1);
    }
    File file = new File(args[0]);
    if (!file.isDirectory()) {
        usage();
        System.exit(-1);
    }

    Date date = null;
    if (args.length == 5) {
        DateFormat format = new SimpleDateFormat("yyyy-MM-dd");
        date = format.parse(args[4]);
    }
    long now = System.currentTimeMillis();
    String epgUrl = args[1];
    String indexLocation = args[2];
    String compositeIndexLocation = args[3];
    if (indexLocation.toLowerCase().trim().equals("null")) {
        indexLocation = null;
    }
    if (compositeIndexLocation.toLowerCase().trim().equals("null")) {
        compositeIndexLocation = null;
    }
    if (indexLocation == null && compositeIndexLocation == null) {
        log.error("At least one of indexLocation or compositeIndexLocation cannot be null.");
        usage();
    }
    Indexer indexer = new Indexer(file, epgUrl, indexLocation, compositeIndexLocation);
    if (date != null) {
        indexer.setAfterDate(date);
    }
    if (date == null) {
        log.info("Processing all files");
    } else {
        log.info("Processing all files after " + date + " sanityCheck " + indexer.getAfterDate());

    }
    int count = indexer.index();
    long after = System.currentTimeMillis();
    log.info("Processing took " + ((after - now) / (60 * 1000)) + " minutes to index the programs.");
    log.info("Time spent looking up scheduled programs in epg: " + (lookupTime / (60 * 1000)) + " minutes");

    log.info("Indexed " + count + " programs");
}

From source file:com.radvision.icm.service.vcm.ICMService.java

public static void main(String[] args) {
    long currTime = System.currentTimeMillis();
    //      long tmpTime = currTime - 24 * 60 * 60 * 1000;
    Date dt = new Date(currTime);
    //      Calendar cal = Calendar.getInstance();
    //      cal.setTime(dt);
    //      System.out.println(cal);

    DateFormat df = new SimpleDateFormat("yyyy-MM-dd");
    String s = df.format(dt);//from   w  w w.  j a v  a 2 s  . c om
    System.out.println(s);
    try {
        Date dtNew = df.parse(s);
        long newTime = dtNew.getTime();
        long minutes = (currTime - newTime) / (1000 * 60);
        System.out.println(minutes / 60 + ":" + minutes % 60);
    } catch (ParseException e) {
        // TODO Auto-generated catch block
        e.printStackTrace();
    }
}

From source file:de.uniwue.dmir.heatmap.EntryPointIncremental.java

@SuppressWarnings({ "rawtypes", "unchecked" })
public static void main(String[] args) throws IOException, ParseException {

    DateFormat df = new SimpleDateFormat(DATE_FORMAT);
    SimpleDateFormat backupDf = new SimpleDateFormat(BACKUP_DATE_FORMAT);

    String workDir = System.getProperty("workDir", ".");
    LOGGER.debug("Work dir: {}", workDir);
    String configDir = System.getProperty("configDir", ".");
    LOGGER.debug("Config dir: {}", configDir);

    File seedDir = new File(workDir, SEED_DIR);
    LOGGER.debug("Seed dir: {}", seedDir);
    File currentDir = new File(workDir, CURRENT_DIR);
    LOGGER.debug("Current dir: {}", currentDir);
    File backupDir = new File(workDir, BACKUP_DIR);
    LOGGER.debug("Backup dir: {}", backupDir);

    String initialMinTimeString = System.getProperty("minTime");
    LOGGER.debug("Initial minimal time parameter: {}", initialMinTimeString);
    Date initialMinTime = initialMinTimeString == null ? new Date(0) : df.parse(initialMinTimeString);
    LOGGER.debug("Initial minimal time: {}", df.format(initialMinTime));

    String absoluteMaxTimeString = System.getProperty("maxTime");
    LOGGER.debug("Absolute maximal time parameter: {}", absoluteMaxTimeString);
    Date absoluteMaxTime = absoluteMaxTimeString == null ? new Date()
            : new SimpleDateFormat(DATE_FORMAT).parse(absoluteMaxTimeString);
    LOGGER.debug("Absolute maximal time: {}", df.format(absoluteMaxTime));

    String incrementalFile = new File("file:" + configDir, INCREMENTAL_FILE).getPath();
    String settingsFile = new File("file:" + configDir, HEATMAP_PROCESSOR__FILE).getPath();

    LOGGER.debug("Initializing incremental control file: {}", incrementalFile);
    FileSystemXmlApplicationContext incrementalContext = new FileSystemXmlApplicationContext(incrementalFile);

    // get point limit
    int pointLimit = Integer
            .parseInt(incrementalContext.getBeanFactory().resolveEmbeddedValue("${point.limit}"));
    LOGGER.debug("Print limit: {}", pointLimit);

    // get backups to keep
    int backupsToKeep = Integer
            .parseInt(incrementalContext.getBeanFactory().resolveEmbeddedValue("${backups.to.keep}"));
    LOGGER.debug("Backups to keep: {}", pointLimit);

    LOGGER.debug("Initializing process components (manager and limiter).");
    IProcessManager processManager = incrementalContext.getBean(IProcessManager.class);
    IProcessLimiter processLimiter = incrementalContext.getBean(IProcessLimiter.class);

    LOGGER.debug("Starting incremental loop.");
    while (true) { // break as soon as no new points are available

        // cleanup --- just in case
        LOGGER.debug("Deleting \"current\" dir.");
        FileUtils.deleteDirectory(currentDir);

        // copy from seed to current
        LOGGER.debug("Copying seed.");
        seedDir.mkdirs();//from ww w .  j a  va2 s. co m
        FileUtils.copyDirectory(seedDir, currentDir);

        // get min time
        LOGGER.debug("Getting minimal time ...");
        Date minTime = initialMinTime;
        ProcessManagerEntry entry = processManager.getEntry();
        if (entry != null && entry.getMaxTime() != null) {
            minTime = entry.getMaxTime();
        }
        LOGGER.debug("Minimal time: {}", new SimpleDateFormat(DATE_FORMAT).format(minTime));

        // break if we processed all available points (minTime is greater than or equal to absoluteMaxTime)
        if (minTime.getTime() >= absoluteMaxTime.getTime()) {
            LOGGER.debug("Processed all points.");
            break;
        }

        // get the maximal time
        LOGGER.debug("Get maximal time.");

        // get the time from the newest point in our point range (pointMaxTime) ...
        Date pointMaxTime = processLimiter.getMaxTime(minTime, pointLimit);

        // ... and possibly break the loop if no new points are available
        if (pointMaxTime == null)
            break;

        // set the max time and make sure we are not taking to many points 
        // (set max time to the minimum of pointMaxTime and absoluteMaxTime)
        Date maxTime = pointMaxTime.getTime() > absoluteMaxTime.getTime() ? absoluteMaxTime : pointMaxTime;

        LOGGER.debug("Maximal time: {}", new SimpleDateFormat(DATE_FORMAT).format(maxTime));

        // start process
        processManager.start(minTime);

        System.setProperty("minTimestamp", new SimpleDateFormat(DATE_FORMAT).format(minTime));

        System.setProperty("maxTimestamp", new SimpleDateFormat(DATE_FORMAT).format(maxTime));

        FileSystemXmlApplicationContext heatmapContext = new FileSystemXmlApplicationContext(settingsFile);

        IHeatmap heatmap = heatmapContext.getBean(HEATMAP_BEAN, IHeatmap.class);

        ITileProcessor tileProcessor = heatmapContext.getBean(WRITER_BEAN, ITileProcessor.class);

        heatmap.processTiles(tileProcessor);

        tileProcessor.close();
        heatmapContext.close();

        // finish process
        processManager.finish(maxTime);

        // move old seed
        if (backupsToKeep > 0) {
            FileUtils.moveDirectory(seedDir, new File(backupDir, backupDf.format(minTime))); // minTime is the maxTime of the seed

            // cleanup backups
            String[] backups = backupDir.list(DirectoryFileFilter.DIRECTORY);
            File oldestBackup = null;
            if (backups.length > backupsToKeep) {
                for (String bs : backups) {
                    File b = new File(backupDir, bs);
                    if (oldestBackup == null || oldestBackup.lastModified() > b.lastModified()) {
                        oldestBackup = b;
                    }
                }
                FileUtils.deleteDirectory(oldestBackup);
            }

        } else {
            FileUtils.deleteDirectory(seedDir);
        }

        // move new seed
        FileUtils.moveDirectory(currentDir, seedDir);

    }

    incrementalContext.close();

}

From source file:eu.smartfp7.foursquare.AttendanceCrawler.java

/**
 * The main takes an undefined number of cities as arguments, then initializes
 * the specific crawling of all the trending venues of these cities.
 * The trending venues must have been previously identified using the `DownloadPages`
 * program.//  w ww.j  a  v  a2 s  .  com
 * 
 * Current valid cities are: london, amsterdam, goldcoast, sanfrancisco.
 * 
 */
public static void main(String[] args) throws Exception {
    Settings settings = Settings.getInstance();
    String folder = settings.getFolder();

    // We keep info and error logs, so that we know what happened in case
    // of incoherence in the time series.
    Map<String, FileWriter> info_logs = new HashMap<String, FileWriter>();
    Map<String, FileWriter> error_logs = new HashMap<String, FileWriter>();

    // For each city we monitor, we store the venue IDs that we got from
    // a previous crawl.
    Map<String, Collection<String>> city_venues = new HashMap<String, Collection<String>>();

    // Contains the epoch time when the last API call has been made for each 
    // venue. Ensures that we get data only once each hour. 
    Map<String, Long> venue_last_call = new HashMap<String, Long>();

    // Contains the epoch time when we last checked if time series were broken
    // for each city.
    // We do these checks once every day before the batch forecasting begins.
    Map<String, Long> sanity_checks = new HashMap<String, Long>();

    // We also keep in memory the number of checkins for the last hour for
    // each venue.
    Map<String, Integer> venue_last_checkin = new HashMap<String, Integer>();

    Map<Long, Integer> APICallsCount = new HashMap<Long, Integer>();

    DateFormat df = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");

    int total_venues = 0;
    long total_calls = 0;
    long time_spent_on_API = 0;

    for (String c : args) {
        settings.checkFileHierarchy(c);

        city_venues.put(c, loadVenues(c));
        total_venues += city_venues.get(c).size();

        info_logs.put(c,
                new FileWriter(folder + c + File.separator + "log" + File.separator + "info.log", true));
        error_logs.put(c,
                new FileWriter(folder + c + File.separator + "log" + File.separator + "error.log", true));

        Calendar cal = Calendar.getInstance();

        info_logs.get(c).write("[" + df.format(cal.getTime()) + "] Crawler initialization for " + c + ". "
                + city_venues.get(c).size() + " venues loaded.\n");
        info_logs.get(c).flush();

        // If we interrupted the program for some reason, we can get back
        // the in-memory data.
        // Important: the program must not be interrupted for more than one
        // hour, or we will lose time series data.
        for (String venue_id : city_venues.get(c)) {
            String ts_file = folder + c + File.separator + "attendances_crawl" + File.separator + venue_id
                    + ".ts";

            if (new File(ts_file).exists()) {
                BufferedReader buffer = new BufferedReader(new FileReader(ts_file));
                String mem = null, line = null;
                for (; (line = buffer.readLine()) != null; mem = line)
                    ;
                buffer.close();

                if (mem == null)
                    continue;

                String[] tmp = mem.split(",");
                venue_last_call.put(venue_id, df.parse(tmp[0]).getTime());
                venue_last_checkin.put(venue_id, Integer.parseInt(tmp[3]));

                VenueUtil.fixBrokenTimeSeriesVenue(new File(ts_file));
            } // if
        } // for

        sanity_checks.put(c, cal.getTimeInMillis());
    } // for

    if (total_venues > 5000) {
        System.out.println(
                "Too much venues for a single API account (max 5000).\nPlease create a new Foursquare API account and use these credentials.\nExiting now.");
        return;
    }

    while (true) {

        for (String c : args) {
            // We create a FIFO queue and pop venue IDs one at a time.
            LinkedList<String> city_venues_buffer = new LinkedList<String>(city_venues.get(c));
            String venue_id = null;

            // Artificial wait to avoid processors looping at 100% of their capacity
            // when there is no more venues to crawl for the current hour.
            Thread.sleep(3000);

            while ((venue_id = city_venues_buffer.pollFirst()) != null) {
                // We get the current time according to the city's time zone
                Calendar cal = Calendar.getInstance();
                cal.add(Calendar.MILLISECOND,
                        TimeZone.getTimeZone(settings.getCityTimezone(c)).getOffset(cal.getTime().getTime())
                                - Calendar.getInstance().getTimeZone().getOffset(cal.getTime().getTime()));
                //TimeZone.getTimeZone("Europe/London").getOffset(cal.getTime().getTime()));

                long current_time = DateUtils.truncate(cal.getTime(), Calendar.HOUR).getTime();

                // We query Foursquare only once per hour per venue.
                if (venue_last_call.get(venue_id) != null
                        && current_time < venue_last_call.get(venue_id) + 3600000)
                    continue;

                intelligentWait(total_venues, cal.getTime().getTime(),
                        (total_calls == 0 ? 0 : Math.round(time_spent_on_API / total_calls)));

                Venue venue = null;

                try {
                    long beforeCall = System.currentTimeMillis();
                    venue = new Venue(getFoursquareVenueById(venue_id, c));

                    // If there is no last call, this is the beginning of the time series
                    // for this venue. We get the number of people "here now" to initialize
                    // the series.
                    if (venue_last_call.get(venue_id) == null) {
                        /** TODO: by doing this, we keep a representation of the venue dating from the beginning
                         *       of the specific crawl. we might want to change this and update this file once
                         *      in a while.
                         */
                        FileWriter info = new FileWriter(folder + c + File.separator + "foursquare_venues"
                                + File.separator + venue_id + ".info");
                        info.write(venue.getFoursquareJson());
                        info.close();

                        FileWriter out = new FileWriter(folder + c + File.separator + "attendances_crawl"
                                + File.separator + venue_id + ".ts");
                        out.write("Date,here_now,hour_checkins,total_checkins\n");
                        out.write(df.format(current_time) + "," + venue.getHereNow() + "," + venue.getHereNow()
                                + "," + venue.getCheckincount() + "\n");
                        out.close();
                    } else {
                        FileWriter out = new FileWriter(folder + c + File.separator + "attendances_crawl"
                                + File.separator + venue_id + ".ts", true);
                        int checks = venue.getCheckincount() - venue_last_checkin.get(venue_id);
                        out.write(df.format(current_time) + "," + venue.getHereNow() + ","
                                + Integer.toString(checks) + "," + venue.getCheckincount() + "\n");
                        out.close();
                    }

                    if (APICallsCount.get(current_time) == null)
                        APICallsCount.put(current_time, 1);
                    else
                        APICallsCount.put(current_time, APICallsCount.get(current_time) + 1);

                    total_calls++;

                    venue_last_call.put(venue_id, current_time);
                    venue_last_checkin.put(venue_id, venue.getCheckincount());

                    time_spent_on_API += System.currentTimeMillis() - beforeCall;
                } catch (Exception e) {
                    // If something bad happens (crawler not available, IO error, ...), we put the
                    // venue_id in the FIFO queue so that it gets reevaluated later.
                    //e.printStackTrace();
                    error_logs.get(c)
                            .write("[" + df.format(cal.getTime().getTime()) + "] Error with venue " + venue_id
                                    + " (" + e.getMessage() + "). " + APICallsCount.get(current_time)
                                    + " API calls so far this hour, " + city_venues_buffer.size()
                                    + " venues remaining in the buffer.\n");
                    error_logs.get(c).flush();

                    System.out.println("[" + df.format(cal.getTime().getTime()) + "] " + c + " -- "
                            + APICallsCount.get(current_time) + " API calls // " + city_venues_buffer.size()
                            + " venues remaining " + " (" + e.getMessage() + ")");

                    if (e instanceof FoursquareAPIException)
                        if (((FoursquareAPIException) e).getHttp_code().equals("400")
                                && ((FoursquareAPIException) e).getError_detail()
                                        .equals("Venue " + venue_id + " has been deleted")) {
                            city_venues.get(c).remove(venue_id);
                            removeVenue(venue_id, c);
                        } else
                            city_venues_buffer.add(venue_id);

                    continue;
                }
            } // while

            // Every day between 0am and 2am, we repair all the broken time series (if there
            // is something to repair).
            Calendar cal = Calendar.getInstance();
            if (city_venues_buffer.peekFirst() == null
                    && (cal.getTimeInMillis() - sanity_checks.get(c)) >= 86400000
                    && cal.get(Calendar.HOUR_OF_DAY) < 2) {
                VenueUtil.fixBrokenTimeSeriesCity(c, folder);
                sanity_checks.put(c, cal.getTimeInMillis());
                info_logs.get(c).write("[" + df.format(cal.getTime()) + "] Sanity check OK.\n");
                info_logs.get(c).flush();
            }
        } // for
    } // while
}

From source file:Main.java

public static boolean isValidDateStr(String date) throws Exception {

    DateFormat df = DateFormat.getDateInstance(DateFormat.SHORT);
    df.setLenient(false);/*  ww  w . j  a v a  2  s . c o  m*/
    df.parse(date);

    return true;
}

From source file:Main.java

public static Date parse(String value, DateFormat... formatters) {
    Date date = null;//from ww  w .  jav a  2  s.  c  o  m
    for (DateFormat formatter : formatters) {
        try {
            date = formatter.parse(value);
            break;
        } catch (ParseException e) {
        }
    }
    return date;
}

From source file:Main.java

public static String previousDateString(String dateString) throws ParseException {
    DateFormat dateFormat = new SimpleDateFormat("yyyyMMdd");

    Date myDate = dateFormat.parse(dateString);

    Calendar calendar = Calendar.getInstance();
    calendar.setTime(myDate);/*ww w  .  j  a  va2  s.co  m*/
    calendar.add(Calendar.DAY_OF_YEAR, -1);

    Date previousDate = calendar.getTime();
    String result = dateFormat.format(previousDate);

    return result;
}

From source file:Main.java

public static Date getDate(String dateStr, DateFormat dateFormat) {
    try {/*from   w  w w .  j  a v a 2 s .co m*/
        return dateFormat.parse(dateStr);
    } catch (ParseException e) {
        return null;
    }
}

From source file:Main.java

public static Date getDate(String format, String strDate) throws ParseException {
    DateFormat dateFormat = new SimpleDateFormat(format);
    Date date = dateFormat.parse(strDate);
    return date;/*ww  w .ja  v  a2  s .  co  m*/
}