Example usage for org.joda.time.format DateTimeFormatter parseMillis

List of usage examples for org.joda.time.format DateTimeFormatter parseMillis

Introduction

In this page you can find the example usage for org.joda.time.format DateTimeFormatter parseMillis.

Prototype

public long parseMillis(String text) 

Source Link

Document

Parses a datetime from the given text, returning the number of milliseconds since the epoch, 1970-01-01T00:00:00Z.

Usage

From source file:com.metawiring.generation.longfuncs.DateRange.java

License:Apache License

private long parsedEpochTime(String timeString) {
    List<Exception> exceptions = new ArrayList<>();
    for (DateTimeFormatter dtf : formatters) {
        try {/* w w w.  j  a  v  a 2s .co  m*/
            long parsed = dtf.parseMillis(timeString);
            return parsed;
        } catch (Exception e) {
            exceptions.add(e);
        }
    }
    String message = "";
    for (Exception e : exceptions) {
        message += e.getMessage() + "\n";
    }
    throw new RuntimeException(
            "Unable to parse [" + timeString + "] with any of the parsers. exceptions:" + message);
}

From source file:com.oneops.search.msg.processor.CIMessageProcessor.java

License:Apache License

private static void convertIllegalDateFormat(Map<String, String> ciAttributes, String name) {
    if (ciAttributes != null && ciAttributes.containsKey(name)) {
        String date = ciAttributes.get(name);

        //Nov  5 21:08:38 2019 GMT
        //Jan 22 18:21:47 2020 GMT
        // Some of the `expires_one` date fields apparently have date format with two spaces, so while this conversion is a hack,
        // until we migrate to newer ES version and change the mapping we need to accommodate both, since single DateTimeFormat can't handle it.
        DateTimeFormatter wrongFormat = DateTimeFormat.forPattern("MMM dd HH:mm:ss yyyy z");
        DateTimeFormatter wrongFormat2 = DateTimeFormat.forPattern("MMM  dd HH:mm:ss yyyy z");

        /// 2020-01-22T18:21:47
        DateTimeFormatter rightFormat = DateTimeFormat.forPattern("yyyy-MM-dd'T'HH:mm:ss");
        try {/*from  www.  ja v a  2s  .c o  m*/
            ciAttributes.put(name, rightFormat.print(wrongFormat.parseMillis(date)));
        } catch (Exception e) {
            try {
                ciAttributes.put(name, rightFormat.print(wrongFormat2.parseMillis(date)));
            } catch (Exception ignore) {
                // do nothing, unexpected date format
            }
        }
    }
}

From source file:com.tuplejump.stargate.lucene.query.RangeCondition.java

License:Apache License

/**
 * {@inheritDoc}//w w w.j  a  v  a2s.c  o m
 */
@Override
public Query query(Options schema) throws Exception {
    Query query;
    if (field == null || field.trim().isEmpty()) {
        throw new IllegalArgumentException("Field name required");
    }
    NumericConfig numericConfig = schema.numericFieldOptions.get(field);

    Properties properties = schema.getProperties(field);
    Properties.Type fieldType = properties != null ? properties.getType() : Properties.Type.text;
    //TODO Range on TimeUUID type
    if (fieldType.isCharSeq()) {
        String lowerVal = null, upperVal = null;
        if (this.lower != null) {
            lowerVal = analyze(field, this.lower.toString(), schema.analyzer);
        }
        if (this.upper != null) {
            upperVal = analyze(field, this.upper.toString(), schema.analyzer);
        }
        query = TermRangeQuery.newStringRange(field, lowerVal, upperVal, includeLower, includeUpper);
    } else if (fieldType == Properties.Type.integer) {
        assert numericConfig != null;
        Integer lower = this.lower == null ? Integer.MIN_VALUE
                : numericConfig.getNumberFormat().parse(this.lower.toString()).intValue();
        Integer upper = this.upper == null ? Integer.MAX_VALUE
                : numericConfig.getNumberFormat().parse(this.upper.toString()).intValue();
        query = NumericRangeQuery.newIntRange(field, lower, upper, includeLower, includeUpper);
    } else if (fieldType == Properties.Type.bigint) {
        assert numericConfig != null;
        Long lower = this.lower == null ? Long.MIN_VALUE
                : numericConfig.getNumberFormat().parse(this.lower.toString()).longValue();
        Long upper = this.upper == null ? Long.MAX_VALUE
                : numericConfig.getNumberFormat().parse(this.upper.toString()).longValue();
        query = NumericRangeQuery.newLongRange(field, lower, upper, includeLower, includeUpper);
    } else if (fieldType == Properties.Type.decimal) {
        assert numericConfig != null;
        Float lower = this.lower == null ? Float.MIN_VALUE
                : numericConfig.getNumberFormat().parse(this.lower.toString()).floatValue();
        Float upper = this.upper == null ? Float.MAX_VALUE
                : numericConfig.getNumberFormat().parse(this.upper.toString()).floatValue();
        query = NumericRangeQuery.newFloatRange(field, lower, upper, includeLower, includeUpper);
    } else if (fieldType == Properties.Type.bigdecimal) {
        assert numericConfig != null;
        Double lower = this.lower == null ? Double.MIN_VALUE
                : numericConfig.getNumberFormat().parse(this.lower.toString()).doubleValue();
        Double upper = this.upper == null ? Double.MAX_VALUE
                : numericConfig.getNumberFormat().parse(this.upper.toString()).doubleValue();
        query = NumericRangeQuery.newDoubleRange(field, lower, upper, includeLower, includeUpper);
    } else if (fieldType == Properties.Type.date) {
        Long lower;
        Long upper;
        if ("millis".equals(format)) {
            lower = this.lower == null ? Long.MIN_VALUE : Long.valueOf(this.lower.toString());
            upper = this.upper == null ? Long.MAX_VALUE : Long.valueOf(this.upper.toString());
        } else {
            FormatDateTimeFormatter formatter = Dates.forPattern(format, Locale.getDefault());
            DateTimeFormatter parser = formatter.parser();
            lower = this.lower == null ? Long.MIN_VALUE : parser.parseMillis(this.lower.toString());
            upper = this.upper == null ? Long.MAX_VALUE : parser.parseMillis(this.upper.toString());
        }
        query = NumericRangeQuery.newLongRange(field, lower, upper, includeLower, includeUpper);
    } else {
        String message = String.format("Range queries are not supported by %s mapper", fieldType);
        throw new UnsupportedOperationException(message);
    }
    query.setBoost(boost);
    return query;
}

From source file:com.twitter.elephanttwin.util.DateUtil.java

License:Apache License

/**
 * Uses the given formatter to create a calendar object from the timestamp
 * @param timestamp/*from w  ww  .j a va 2 s . c  om*/
 * @param formatter
 * @return
 */
public static Calendar formattedTimestampToCalendar(String timestamp, DateTimeFormatter formatter) {
    long millis = 0;
    try {
        millis = formatter.parseMillis(timestamp);
        return fromMilliseconds(millis);
    } catch (IllegalArgumentException e) {
        // Turns out MYSQL timestamps can sometimes contain milliseconds, and sometimes not.
        // Regular Java date parsing is insensitive to that, but Joda refuses to parse with a
        // non-matching format. Hence, the ugliness below.
        // Formatters don't define a legit equals method, so we just check if they format the
        // current timestamp to the same string.
        long ts = System.currentTimeMillis();
        if (formatter.print(ts).equals(MYSQL_TIMESTAMP_FORMATTER.print(ts))) {
            return formattedTimestampToCalendar(timestamp, MYSQL_TIMESTAMP_FORMATTER_MILLIS);
        } else {
            // SUPPRESS CHECKSTYLE string multiple times
            LOG.debug("Could not parse date " + timestamp + " with dateformat " + formatter, e);
            return null;
        }
    }
}

From source file:com.twitter.elephanttwin.util.DateUtil.java

License:Apache License

/**
 * Parse date with using given format.//from   w ww  .j  a v a 2s.co  m
 * Returns null in case of errors.
 */
public static Calendar fromString(String dateStr, DateTimeFormatter df) {
    try {
        return fromMilliseconds(df.parseMillis(dateStr));
    } catch (IllegalArgumentException e) {
        // SUPPRESS CHECKSTYLE string multiple times
        LOG.warn("Could not parse date " + dateStr + " with dateformat " + df, e);
        return null;
    }
}

From source file:edu.gslis.ts.DumpThriftData.java

License:Apache License

/**
 * @param thriftFile/* w w w  .  j av  a  2  s.co m*/
 */
public void filter(File infile, String parser, GQuery gquery, CollectionStats bgstats) {
    try {
        InputStream in = null;

        if (infile.getName().endsWith(".gz"))
            in = new GZIPInputStream(new FileInputStream(infile));
        else if (infile.getName().endsWith("xz"))
            in = new XZInputStream(new FileInputStream(infile));
        else {
            System.err.println("Regular FileInputStream");
            in = new FileInputStream(infile);
        }

        TTransport inTransport = new TIOStreamTransport(new BufferedInputStream(in));
        TBinaryProtocol inProtocol = new TBinaryProtocol(inTransport);
        inTransport.open();

        try {
            // Run through items in the thrift file
            while (true) {
                final StreamItem item = new StreamItem();
                item.read(inProtocol);
                if (item.body == null || item.body.clean_visible == null) {
                    System.err.println("Body is null.");
                    continue;
                }

                String streamId = "";
                if (item.stream_id != null) {
                    streamId = item.stream_id;
                }

                String dateTime = "";
                long epochTime = 0;
                if (item.stream_time != null && item.stream_time.zulu_timestamp != null) {
                    dateTime = item.stream_time.zulu_timestamp;
                    DateTimeFormatter dtf = ISODateTimeFormat.dateTime();
                    epochTime = dtf.parseMillis(dateTime);
                }

                String source = "";
                if (item.source != null) {
                    source = item.source;
                }

                Map<String, List<Sentence>> parsers = item.body.sentences;
                List<Sentence> sentenceParser = parsers.get(parser);

                QueryDocScorer scorer = new ScorerDirichlet();
                scorer.setCollectionStats(bgstats);
                scorer.setQuery(gquery);

                List<Double> sentenceScores = new ArrayList<Double>();
                List<String> sentences = new ArrayList<String>();
                String sentencesText = "";
                if (sentenceParser != null && sentenceParser.size() > 0) {

                    for (Sentence s : sentenceParser) {
                        try {
                            List<Token> tokens = s.tokens;
                            String sentence = "";
                            for (Token token : tokens) {
                                String tok = token.token;
                                sentence += tok + " ";
                            }
                            FeatureVector sentenceVector = new FeatureVector(sentence, null);
                            SearchHit sentenceHit = new SearchHit();
                            sentenceHit.setFeatureVector(sentenceVector);
                            sentenceHit.setLength(sentenceVector.getLength());
                            double score = scorer.score(sentenceHit);

                            sentenceScores.add(score);
                            sentences.add(sentence);

                            sentencesText += sentence + "\n";
                        } catch (Exception e) {
                            System.err
                                    .println("Issue with sentence " + sentences.size() + " in doc " + streamId);
                            System.err.println("File: " + infile.getAbsolutePath());
                        }
                    }
                    SearchHit docHit = new SearchHit();
                    docHit.setFeatureVector(new FeatureVector(sentencesText, null));
                    double docscore = scorer.score(docHit);
                    for (int i = 0; i < sentenceScores.size(); i++) {
                        System.out.println(infile.getAbsolutePath() + "\t" + source + "\t" + epochTime + "\t"
                                + streamId + "\t" + docscore + "\t" + i + "\t" + sentenceScores.get(i) + "\t"
                                + sentences.get(i));
                    }
                } else if (sentenceParser == null) {
                    System.err.println("Sentence parser null");
                } else if (sentenceParser.size() == 0) {
                    System.err.println("Sentence length 0");
                } else {
                    System.err.println("Other sentence error.");
                }

            }
        } catch (TTransportException te) {
            if (te.getType() == TTransportException.END_OF_FILE) {
            } else {
                throw te;
            }
        }

        inTransport.close();

    } catch (Exception e) {
        System.err.println("Error processing " + infile.getAbsolutePath() + " " + infile.getName());
        e.printStackTrace();
    }
}

From source file:edu.gslis.ts.ThriftToTREC.java

License:Apache License

/**
 * @param thriftFile//  w  w  w  .j a va  2s. co  m
 */
public Map<String, String> filter(File infile, File outfile, String parser) {
    Map<String, String> results = new TreeMap<String, String>();
    try {
        InputStream in = null;

        if (infile.getName().endsWith(".gz"))
            in = new GZIPInputStream(new FileInputStream(infile));
        else if (infile.getName().endsWith("xz"))
            in = new XZInputStream(new FileInputStream(infile));
        else
            in = new FileInputStream(infile);

        TTransport inTransport = new TIOStreamTransport(new BufferedInputStream(in));
        TBinaryProtocol inProtocol = new TBinaryProtocol(inTransport);
        inTransport.open();

        OutputStreamWriter out = new OutputStreamWriter(new FileOutputStream(outfile, false), "UTF-8");
        try {
            Charset charset = Charset.forName("UTF-8");
            CharsetDecoder decoder = charset.newDecoder();

            // Run through items in the thrift file
            while (true) {
                final StreamItem item = new StreamItem();
                item.read(inProtocol);
                if (item.body == null || item.body.clean_visible == null) {
                    continue;
                }

                String streamId = "";
                if (item.stream_id != null) {
                    streamId = item.stream_id;
                }

                String dateTime = "";
                long epochTime = 0;
                if (item.stream_time != null && item.stream_time.zulu_timestamp != null) {
                    dateTime = item.stream_time.zulu_timestamp;
                    DateTimeFormatter dtf = ISODateTimeFormat.dateTime();
                    epochTime = dtf.parseMillis(dateTime);
                }

                String source = "";
                if (item.source != null) {
                    source = item.source;
                }

                String url = "";
                if (item.abs_url != null) {
                    url = decoder.decode(item.abs_url).toString();
                }

                Map<String, List<Sentence>> parsers = item.body.sentences;
                List<Sentence> sentenceParser = parsers.get(parser);

                String sentencesText = "";
                int sentenceNum = 0;
                if (sentenceParser != null && sentenceParser.size() > 0) {

                    for (Sentence s : sentenceParser) {
                        List<Token> tokens = s.tokens;
                        String sentence = "";
                        for (Token token : tokens) {
                            String tok = token.token;
                            sentence += tok + " ";
                        }
                        sentencesText += sentenceNum + " " + sentence + "\n";
                        sentenceNum++;
                    }
                }

                try {
                    String hourDayDir = outfile.getName().replace(".txt", "");
                    out.write("<DOC>\n");
                    out.write("<DOCNO>" + streamId + "</DOCNO>\n");
                    out.write("<SOURCE>" + source + "</SOURCE>\n");
                    out.write("<URL>" + url + "</URL>\n");
                    out.write("<DATETIME>" + dateTime + "</DATETIME>\n");
                    out.write("<HOURDAYDIR>" + hourDayDir + "</HOURDAYDIR>\n");
                    out.write("<EPOCH>" + epochTime + "</EPOCH>\n");
                    out.write("<TEXT>\n" + sentencesText + "\n</TEXT>\n");
                    out.write("</DOC>\n");
                } catch (Exception e) {
                    System.out.println("Error processing " + infile.getAbsolutePath() + " " + item.stream_id);
                    e.printStackTrace();
                }

            }
        } catch (TTransportException te) {
            if (te.getType() == TTransportException.END_OF_FILE) {
            } else {
                throw te;
            }
        }
        inTransport.close();
        out.close();

    } catch (Exception e) {
        System.out.println("Error processing " + infile.getName());
        e.printStackTrace();
    }
    return results;
}

From source file:gov.noaa.pfel.coastwatch.Projects.java

License:Open Source License

/** 
 * This was used by Bob to convert the source NewportCTD .csv data into .nc files
 * suitable for ERDDAP EDDTableFromNcFiles.
 * <br>Lynn made the .csv files 2009-12-31.
 *    ftp://192.168.31.10/outgoing/ldewitt/Simons/northwest/
 * <br>Source URL http://192.168.31.13/cgi-bin/ERDserver/northwest.sql.das .
 *//*from  w w  w. j a va2 s .  c o m*/
public static void convertNewportCTD() throws Exception {
    String2.log("\n*** EDDTableFromNcFiles.convertNewportCTD");
    String sourceDir = "c:/data/rawSource/newportCTD2009-12/";
    String sourceCsv = "CTD_NH.csv";
    String sourceLatLon = "NH_Target_LatLong.csv";
    String destDir = "c:/u00/data/points/erdNewportCtd/";
    float mv = -9999;
    int factor = 10000;

    String dataColNames[] = String2
            .split("station_code, date,         station,      local_time,   depth_or_pressure, "
                    + "temperature,  salinity,     density,      fluorescence, project, " + "transect", ',');
    Class dataColTypes[] = { String.class, String.class, String.class, String.class, float.class, float.class,
            float.class, float.class, float.class, String.class, String.class };
    String dataUnits[] = { //date will be ...
            null, "seconds since 1970-01-01T00:00:00Z", null, null, "meters", "degree_C", "1e-3", "sigma",
            "volts", null, //1e-3 replaces PSU in CF std names 25
            null };

    Test.ensureEqual(dataColNames.length, dataColTypes.length, "dataColNames.length != dataColTypes.length");

    String latLonColNames[] = String2.split("line,        station,      latitude,    longitude,   transect",
            ',');
    //schema has double.class for lat, lon, but I think not necessary
    Class latLonColTypes[] = { String.class, String.class, float.class, float.class, String.class };

    //find timezone   America/Los_Angeles
    //String2.log(String2.toCSSVString(DateTimeZone.getAvailableIDs().toArray()));
    //Test.ensureTrue(false, "");

    //recursively delete any files in destDir 
    File2.deleteAllFiles(destDir, true, true);

    //read the data source file
    String2.log("\nreading the data source file");
    Table dataTable = new Table();
    dataTable.readASCII(sourceDir + sourceCsv, String2.readLinesFromFile(sourceDir + sourceCsv, null, 3), -1, 0,
            null, null, null, null, false); //don't simplify
    Test.ensureEqual(dataTable.nColumns(), dataColNames.length, "dataTable.nColumns() != dataColNames.length");
    String2.log("");

    //find bad rows?   there is 1, 48358, remove it
    PrimitiveArray depthPa = dataTable.getColumn(4);
    for (int row = 0; row < depthPa.size(); row++) {
        if (depthPa.getString(row).length() == 0) {
            String2.log("!!! row=" + row + " has no depth_or_pressure.  Removing it...");
            dataTable.removeRow(row);
        }
    }
    String2.log("");

    for (int col = 0; col < dataColNames.length; col++) {
        //set the column name
        dataTable.setColumnName(col, dataColNames[col]);

        //set the units 
        if (dataUnits[col] != null)
            dataTable.columnAttributes(col).set("units", dataUnits[col]);

        //change the columnType
        if (dataColTypes[col] != String.class) {
            PrimitiveArray pa = PrimitiveArray.factory(dataColTypes[col], 1, false);
            pa.append(dataTable.getColumn(col));
            dataTable.setColumn(col, pa);
        }

        //set data mv to mv (-9999)
        if (col >= 4 && col <= 8) {
            PrimitiveArray pa = dataTable.getColumn(col);
            String2.log(pa.switchFromTo("", "" + mv) + //the mv is packed, too
                    " " + dataColNames[col] + " values converted from '' to " + mv);
            if (col == 8) {
                //fluorescence has mv of -999999 and ""  
                //  and bruised double values (obviously originally floats)
                String2.log(pa.switchFromTo("-999999", "" + mv)
                        + " fluorescence values converted from -999999 to " + mv);
            }
            pa.scaleAddOffset(factor, 0);
            pa = new IntArray(pa);
            dataTable.setColumn(col, pa);
            dataTable.columnAttributes(col).set("missing_value", Math2.roundToInt(mv * factor)); //missing_value is packed, too
            dataTable.columnAttributes(col).set("scale_factor", 1 / (float) factor); //float
        }

        //convert "Ship of Opportu" 
        if (col == 9) {
            PrimitiveArray pa = dataTable.getColumn(col);
            String2.log(pa.switchFromTo("Ship of Opportu", "Ship of Opportunity")
                    + " project values converted from \"Ship of Opportu\".");
        }

        //convert transect "" to "Newport Hydrographic" ???
        if (col == 10) {
            PrimitiveArray pa = dataTable.getColumn(col);
            String2.log(pa.switchFromTo("", "Newport Hydrographic")
                    + " transect values converted from \"\" to \"Newport Hydrographic\".");
        }

    }

    //sort   (so all data for a given stationCode will be stored together)
    String2.log("\nsorting\n");
    dataTable.sort(new int[] { 0, 4 }, new boolean[] { true, true }); //stationCode, depth

    //make time (Z) from date and local_time "04/20/2007 12:00:00 AM,NH125,12/30/1899 12:04:00 AM"
    StringArray datePa = (StringArray) dataTable.findColumn("date");
    StringArray localTimePa = (StringArray) dataTable.findColumn("local_time");
    DoubleArray timePa = new DoubleArray();
    DateTimeFormatter dateTimeFormatter = DateTimeFormat.forPattern("MM/dd/yyyy hh:mm:ss aa")
            .withZone(DateTimeZone.forID("America/Los_Angeles"));
    for (int row = 0; row < datePa.size(); row++) {
        String tDate = datePa.get(row);
        if (tDate.length() == 0) {
            timePa.add(Double.NaN);
            continue;
        }
        Test.ensureEqual(tDate.substring(10), " 12:00:00 AM", "Unexpected date on row=" + row);
        String tLocal = localTimePa.get(row);
        if (tLocal.length() > 0) {
            Test.ensureEqual(tLocal.substring(0, 11), "12/30/1899 ",
                    "Unexpected local_time date on row=" + row);
            tDate = tDate.substring(0, 10) + tLocal.substring(10);
        }
        //Newport, OR is same time zone as Pacific Grove. so just use default local time zone.
        double sec = Math2.roundToDouble(dateTimeFormatter.parseMillis(tDate) / 1000.0); //timeInMillis is zulu time
        if (row == 0 || row == 6053)
            String2.log("time example: row=" + row + " \"" + tDate + "\" was converted to "
                    + Calendar2.safeEpochSecondsToIsoStringTZ(sec, ""));
        timePa.add(sec);
    }
    dataTable.setColumn(1, timePa);
    dataTable.setColumnName(1, "time");
    //remove local_time
    dataTable.removeColumn("local_time");

    //read the latLon file
    String2.log("\nreading the latLon source file");
    Table latLonTable = new Table();
    latLonTable.readASCII(sourceDir + sourceLatLon, -1, 0, null, null, null, null);
    Test.ensureEqual(latLonTable.nColumns(), latLonColNames.length,
            "latLonTable.nColumns() != latLonColNames.length");
    for (int col = 0; col < latLonColNames.length; col++) {
        //set the column name
        latLonTable.setColumnName(col, latLonColNames[col]);

        //change the columnType
        if (latLonColTypes[col] != String.class) {
            PrimitiveArray pa = PrimitiveArray.factory(latLonColTypes[col], 1, false);
            pa.append(latLonTable.getColumn(col));
            latLonTable.setColumn(col, pa);
        }
    }

    //make/insert lon lat columns
    String2.log("\nmake/insert lon lat columns");
    StringArray llLinePa = (StringArray) latLonTable.findColumn("line");
    StringArray llStationPa = (StringArray) latLonTable.findColumn("station");
    PrimitiveArray lla = latLonTable.findColumn("latitude");
    lla.scaleAddOffset(factor, 0);
    IntArray llLatPa = new IntArray(lla);
    lla = latLonTable.findColumn("longitude");
    lla.scaleAddOffset(factor, 0);
    IntArray llLonPa = new IntArray(lla);

    //add some missing stations   
    //(location calculated by interpolation - Roy said number is distance in km)
    for (int i = 0; i < 4; i++) {
        llLinePa.add("NH");
        llLatPa.add(446517);
    }
    llStationPa.add("NH02");
    llLonPa.add(-1241150);
    llStationPa.add("NH12");
    llLonPa.add(-1243416);
    llStationPa.add("NH30");
    llLonPa.add(-1247667);
    llStationPa.add("NH75");
    llLonPa.add(-1258250);

    StringArray newPlainStationPa = new StringArray();
    StringArray newLinePa = new StringArray();
    StringArray oldStationPa = (StringArray) dataTable.findColumn("station");
    IntArray newLatPa = new IntArray();
    IntArray newLonPa = new IntArray();

    String oPlainStation = "";
    for (int row = 0; row < oldStationPa.size(); row++) {
        String plainStation = oldStationPa.getString(row);
        //remove suffix letter
        while (String2.isLetter(plainStation.charAt(plainStation.length() - 1)))
            plainStation = plainStation.substring(0, plainStation.length() - 1);
        newPlainStationPa.add(plainStation);
        int po = llStationPa.indexOf(plainStation);
        Test.ensureTrue(po >= 0, "plainStation=" + plainStation + " not found starting on row=" + row);
        newLinePa.add(po < 0 ? "" : llLinePa.get(po));
        newLatPa.add(po < 0 ? Math2.roundToInt(mv * factor) : llLatPa.get(po));
        newLonPa.add(po < 0 ? Math2.roundToInt(mv * factor) : llLonPa.get(po));
        oPlainStation = plainStation;
    }
    dataTable.addColumn(3, "plain_station", newPlainStationPa,
            new Attributes().add("description", "The station without the suffix."));
    dataTable.addColumn(0, "line", newLinePa, new Attributes());
    dataTable.addColumn(1, "longitude", newLonPa,
            (new Attributes()).add("units", "degrees_east").add("scale_factor", 1 / (float) factor));
    dataTable.addColumn(2, "latitude", newLatPa,
            (new Attributes()).add("units", "degrees_north").add("scale_factor", 1 / (float) factor));

    String2.log("\ncolumnNames=" + String2.toCSSVString(dataTable.getColumnNames()) + "\n");

    //save in files
    StringArray oldStationCodePa = (StringArray) dataTable.findColumn("station_code");
    String lastStationCode = oldStationCodePa.get(0);
    int startRow = 0;
    int nRows = oldStationCodePa.size();
    for (int row = 0; row < nRows; row++) {
        if (row == nRows - 1 || !oldStationCodePa.get(row).equals(lastStationCode)) {
            int lastRow = row == nRows - 1 ? row : row - 1;
            Test.ensureTrue(oldStationPa.get(row).length() > 0, "row=" + row + " station=''");
            Test.ensureTrue(oldStationCodePa.get(row).length() > 0, "row=" + row + " oldStation=''");
            String eStation = String2.encodeFileNameSafe(oldStationPa.get(row));
            String eStationCode = String2.encodeFileNameSafe(oldStationCodePa.get(row));
            String fullName = destDir + eStation + "/" + eStationCode + ".nc";
            File2.makeDirectory(destDir + eStation + "/");

            Table table = new Table();
            for (int col = 0; col < dataTable.nColumns(); col++) {
                PrimitiveArray oldPa = dataTable.getColumn(col);
                PrimitiveArray newPa = PrimitiveArray.factory(oldPa.elementClass(), lastRow - startRow + 1,
                        false);
                for (int tRow = startRow; tRow <= lastRow; tRow++)
                    newPa.addString(oldPa.getString(tRow));
                table.addColumn(col, dataTable.getColumnName(col), newPa,
                        (Attributes) (dataTable.columnAttributes(col).clone()));
            }
            table.saveAsFlatNc(fullName, "row", false);

            if (startRow < 100 || row == nRows - 1)
                String2.log(table.toCSVString());
            //if (startRow > 100) Test.ensureTrue(false, "Evaluate the tables.");

            lastStationCode = oldStationCodePa.get(row);
            startRow = lastRow + 1;
        }
    }
    String2.log("Finished!");

}

From source file:gov.noaa.pfel.coastwatch.Projects.java

License:Open Source License

/** Convert FED Rockfish CTD .csv data files to .nc (from Lynn 2013-03-28
 * from /Volumes/PFEL_Shared_Space/PFEL_Share/Lynn2Bob/Rockfish_CTD.tar.gz).
 *//*from   w  w w  . j a  v a  2s .  co  m*/
public static void convertRockfish20130328() throws Throwable {
    String2.log("\nProjects.convertRockfish20130328()");
    String dir = "C:/u00/data/points/rockfish20130328/";
    String outerName = "ERD_CTD_HEADER_2008_to_2011"; //.csv -> .nc
    String innerName = "ERD_CTD_CAST_2008_to_2011"; //.csv -> .nc

    //read the outer .csv files
    Table outer = new Table();
    outer.readASCII(dir + outerName + ".csv", 0, 1, null, null, null, null, false); //simplify
    Test.ensureEqual(outer.getColumnNamesCSVString(),
            "CRUISE,CTD_INDEX,CTD_NO,STATION,CTD_DATE,CTD_LAT,CTD_LONG,CTD_BOTTOM_DEPTH,BUCKET_TEMP,BUCKET_SAL,TS_TEMP,TS_SAL",
            "Unexpected outer column names");
    String2.log("outer (5 rows) as read:\n" + outer.dataToCSVString(5));

    //convert to short 
    String colNames[] = { "CTD_INDEX", "CTD_NO", "STATION", "CTD_BOTTOM_DEPTH" };
    for (int coli = 0; coli < colNames.length; coli++) {
        int col = outer.findColumnNumber(colNames[coli]);
        outer.setColumn(col, new ShortArray(outer.getColumn(col)));
    }

    //convert to floats 
    colNames = new String[] { "BUCKET_TEMP", "BUCKET_SAL", "TS_TEMP", "TS_SAL" };
    for (int coli = 0; coli < colNames.length; coli++) {
        int col = outer.findColumnNumber(colNames[coli]);
        outer.setColumn(col, new FloatArray(outer.getColumn(col)));
    }

    //convert date time "5/5/2008 9:10" to time   
    DateTimeFormatter dtf = DateTimeFormat.forPattern("M/d/yyyy H:mm")
            .withZone(DateTimeZone.forID("America/Los_Angeles"));
    //GMT: erddap/convert/time.html says "5/5/2008 19:10" = 1.2100146E9
    //  if 7 hours different in summer...
    Test.ensureEqual(dtf.parseMillis("5/5/2008 12:10") / 1000.0, 1.2100146E9, //erddap/convert/time.html
            "trouble with DateTimeFormatter");
    int nOuter = outer.nRows();
    {
        int col = outer.findColumnNumber("CTD_DATE");
        PrimitiveArray oldTimePA = outer.getColumn(col);
        DoubleArray newTimePA = new DoubleArray();
        for (int row = 0; row < nOuter; row++)
            newTimePA.add(dtf.parseMillis(oldTimePA.getString(row)) / 1000.0);
        outer.setColumn(col, newTimePA);
        outer.columnAttributes(col).set("units", "seconds since 1970-01-01T00:00:00Z");
    }

    //convert lat and lon from dddmm.mmmm to decimal degrees
    colNames = new String[] { "CTD_LAT", "CTD_LONG" };
    for (int coli = 0; coli < colNames.length; coli++) {
        int col = outer.findColumnNumber(colNames[coli]);
        PrimitiveArray pa = outer.getColumn(col);
        FloatArray fa = new FloatArray();
        Test.ensureEqual(Math.floor(1234.5 / 100.0) + (1234.5 % 100.0) / 60.0, 12.575, "");
        float scale = coli == 0 ? 1 : -1; //lon are originally degrees_west!
        for (int row = 0; row < nOuter; row++) {
            double d = pa.getDouble(row);
            if (d < 0)
                throw new SimpleException("d<0 requires more testing");
            fa.add(scale * Math2.doubleToFloatNaN(Math.floor(d / 100.0) + (d % 100.0) / 60.0));
        }
        outer.setColumn(col, fa);
    }

    //save the outer as .nc
    String2.log("outer (5 rows) before save:\n" + outer.toCSVString(5));
    outer.saveAsFlatNc(dir + outerName + ".nc", "row", false); //convertToFakeMissingValues

    //just keep the outer columns needed for inner table
    StringArray desired = StringArray.fromCSV("CRUISE,CTD_INDEX,CTD_NO,STATION,CTD_DATE,CTD_LAT,CTD_LONG");
    Test.ensureEqual(outer.reorderColumns(desired, true), desired.size(),
            "Not all desired columns were found.");

    //read inner table
    Table inner = new Table();
    inner.readASCII(dir + innerName + ".csv", 0, 1, null, null, null, null, false); //simplify
    Test.ensureEqual(inner.getColumnNamesCSVString(),
            "CRUISE,CTD_INDEX,CTD_DEPTH,TEMPERATURE,SALINITY,DENSITY,DYN_HGT,IRRAD,FLUOR_VOLT,TRANSMISSIVITY,CHLOROPHYLL,OXYGEN_VOLT,OXYGEN",
            "Unexpected inner column names");

    //convert to short 
    colNames = new String[] { "CTD_INDEX", "CTD_DEPTH" };
    for (int coli = 0; coli < colNames.length; coli++) {
        int col = inner.findColumnNumber(colNames[coli]);
        inner.setColumn(col, new ShortArray(inner.getColumn(col)));
    }

    //convert to floats 
    colNames = new String[] { "TEMPERATURE", "SALINITY", "DENSITY", "DYN_HGT", "IRRAD", "FLUOR_VOLT",
            "TRANSMISSIVITY", "CHLOROPHYLL", "OXYGEN_VOLT", "OXYGEN" };
    for (int coli = 0; coli < colNames.length; coli++) {
        int col = inner.findColumnNumber(colNames[coli]);
        inner.setColumn(col, new FloatArray(inner.getColumn(col)));
    }

    //add outer info to inner table
    inner.join(2, 0, "", outer); //nKeys, keyCol, String mvKey, Table lookUpTable

    //save inner table
    String2.log("inner (5 rows) before save:\n" + inner.toCSVString(5));
    inner.saveAsFlatNc(dir + innerName + ".nc", "row", false); //convertToFakeMissingValues

    String2.log("\n*** Projects.convertRockfish20130328() finished successfully");
}

From source file:io.prestosql.operator.scalar.DateTimeFunctions.java

License:Apache License

@ScalarFunction
@LiteralParameters({ "x", "y" })
@SqlType(StandardTypes.TIMESTAMP)/* w  w w.  j  a  v  a2s  . com*/
public static long dateParse(ConnectorSession session, @SqlType("varchar(x)") Slice dateTime,
        @SqlType("varchar(y)") Slice formatString) {
    DateTimeFormatter formatter = DATETIME_FORMATTER_CACHE.get(formatString)
            .withChronology(
                    session.isLegacyTimestamp() ? getChronology(session.getTimeZoneKey()) : UTC_CHRONOLOGY)
            .withLocale(session.getLocale());

    try {
        return formatter.parseMillis(dateTime.toStringUtf8());
    } catch (IllegalArgumentException e) {
        throw new PrestoException(INVALID_FUNCTION_ARGUMENT, e);
    }
}