List of usage examples for org.joda.time.format DateTimeFormatter print
public String print(ReadablePartial partial)
From source file:org.codice.ddf.spatial.ogc.wfs.v2_0_0.catalog.source.WfsFilterDelegate.java
License:Open Source License
private String convertDateToIso8601Format(Date inputDate) { DateTimeFormatter dtf = ISODateTimeFormat.dateTimeNoMillis().withZone(DateTimeZone.UTC); return dtf.print(new DateTime(inputDate)).toString(); }
From source file:org.codice.pubsub.server.SubscriptionServer.java
License:Open Source License
public void runQuery(String msg, DateTime timestamp) { SearchQueryMessage queryMsg = null;//from w w w . j a va 2 s . c o m try { queryMsg = new ObjectMapper().readValue(msg, SearchQueryMessage.class); /* * Set date / time filter to only get results from last time processor polled * Fetch content between last fetch and current time - 1 second (don't want to miss last millisecond updates). */ String origQueryMsg = queryMsg.getQueryString(); DateTimeFormatter fmt2 = DateTimeFormat.forPattern("yyyy-MM-dd'T'HH:mm:ss'Z'"); String after = fmt2.print(timestamp); DateTime newTime = DateTime.now().minusSeconds(1); String before = fmt2.print(newTime); String cqlSnippet = " AND modified BEFORE " + before + " AND modified AFTER " + after; LOGGER.debug("String to be processed: {}", origQueryMsg + cqlSnippet); queryMsg.setQueryString(origQueryMsg + cqlSnippet); //Build Query String cqlText = queryMsg.getQueryString(); if (StringUtils.isNotEmpty(cqlText)) { Filter filter = null; try { filter = CQL.toFilter(cqlText); LOGGER.debug("CQL sets filter: {}", filter.toString()); } catch (CQLException ce) { LOGGER.error("Fatal error while trying to build CQL-based Filter from cqlText : " + cqlText); } // Run through CSW filter as it has better CQL support try { FilterVisitor f = new CswRecordMapperFilterVisitor(); filter = (Filter) filter.accept(f, null); } catch (UnsupportedOperationException ose) { try { throw new CswException(ose.getMessage(), CswConstants.INVALID_PARAMETER_VALUE, null); } catch (CswException cwe) { LOGGER.error(cwe.getMessage()); } } if (catalogFramework != null && filter != null) { LOGGER.trace("Catalog Frameowork: " + catalogFramework.getVersion()); //Starts QueryAndSend in a thread QueryAndSend qasInst = queryAndSend.newInstance(); qasInst.setEnterprise(DEFAULT_IS_ENTERPRISE); qasInst.setFilter(filter); qasInst.setSubscriptionId(queryMsg.getSubscriptionId()); qasInst.setNewTime(newTime); Callable<QueryControlInfo> worker = qasInst; Future<QueryControlInfo> ctrlInfo = executor.submit(worker); processMap.put(queryMsg.getSubscriptionId(), ctrlInfo); } } } catch (IOException ex) { LOGGER.error("Issues processing incoming query subscription: " + ex.getMessage()); ex.printStackTrace(); } }
From source file:org.craftercms.search.service.impl.DateTimeConverter.java
License:Open Source License
@Override public Object convert(String name, String value) { DateTimeFormatter incomingFormatter = DateTimeFormat.forPattern(dateTimeFieldPattern).withZoneUTC(); DateTimeFormatter outgoingFormatter = ISODateTimeFormat.dateTime(); return outgoingFormatter.print(incomingFormatter.parseDateTime(value)); }
From source file:org.craftercms.search.service.impl.SolrDocumentBuilder.java
License:Open Source License
protected String convertToISODateTimeString(String dateTimeStr) { DateTimeFormatter incomingFormatter = DateTimeFormat.forPattern(dateTimeFieldPattern); DateTimeFormatter outgoingFormatter = ISODateTimeFormat.dateTime(); return outgoingFormatter.print(incomingFormatter.parseDateTime(dateTimeStr)); }
From source file:org.dashbuilder.dataprovider.backend.elasticsearch.ElasticSearchValueTypeMapper.java
License:Apache License
public String formatDate(ElasticSearchDataSetDef definition, String columnId, Date date) { if (date == null) return ""; String datePattern = definition.getPattern(columnId); boolean isDefaultDateFormat = isEmpty(datePattern) || datePattern.equalsIgnoreCase(DATE_DEFAULT_FORMAT_KEY); DateTimeFormatter formatter = isDefaultDateFormat ? DATE_DEFAULT_FORMAT_PRINTER : DateTimeFormat.forPattern(datePattern); return formatter.print(date.getTime()); }
From source file:org.dashbuilder.dataprovider.backend.elasticsearch.rest.client.impl.jest.ElasticSearchJestClient.java
License:Apache License
/** * Formats the given value in a String type in order to send the JSON query body to the EL server. *//* w w w .ja v a 2s . co m*/ public static String formatValue(String columnId, ElasticSearchDataSetMetadata metadata, Object value) { if (value == null) return null; ColumnType columnType = metadata.getColumnType(columnId); if (ColumnType.DATE.equals(columnType)) { DateTimeFormatter formatter = null; String pattern = metadata.getFieldPattern(columnId); if (pattern == null || pattern.trim().length() == 0) { // If no custom pattern for date field, use the default by EL -> org.joda.time.format.ISODateTimeFormat#dateOptionalTimeParser formatter = ElasticSearchDataSetProvider.EL_DEFAULT_DATETIME_FORMATTER; } else { // Obtain the date value by parsing using the EL pattern specified for this field. formatter = DateTimeFormat.forPattern(pattern); } return formatter.print(((Date) value).getTime()); } else if (ColumnType.NUMBER.equals(columnType)) { return Double.toString((Double) value); } return value.toString(); }
From source file:org.dataconservancy.mhf.model.builder.xstream.AttributeFormatUtil.java
License:Apache License
public String formatDateTime(DateTime dateTime, String pattern) throws IllegalArgumentException { String formattedDateTime = null; try {// w w w. ja v a 2 s.c om DateTimeFormatter previewDateTimeFormatter = DateTimeFormat.forPattern(pattern); formattedDateTime = previewDateTimeFormatter.print(dateTime); } catch (IllegalArgumentException except) { log.warn("Could not parse GQM date time string: " + except); throw except; } return formattedDateTime; }
From source file:org.dataconservancy.ui.api.FileController.java
License:Apache License
/** * Accepts the parameters as specified below and returns the content of the file with matching id and qualifies the * criteria specified in the request header. * @param idpart id string that uniquely identify requested. * @param mimeType specifies the acceptable type of the response's content. The matching file's mime type is * determined using the {@link javax.activation.MimetypesFileTypeMap}'s default map. * @param modifiedSince specifies that the request is only for file with matching id that has been modified * since the {@code modifiedSince} date. Files with matching ids that has not been modified since * {@code modifiedSince} date will be disqualified and not returned. * @param request http request/*from w w w . j av a 2 s.c o m*/ * @throws {@link IOException} when an exception occurs when writing to the {@link HttpServletResponse} object. * */ @RequestMapping(value = "/{idpart}", method = { RequestMethod.GET }) public void handleFileGetRequest(@PathVariable String idpart, @RequestHeader(value = "Accept", required = false) String mimeType, @RequestHeader(value = "If-Modified-Since", required = false) @DateTimeFormat(iso = DATE_TIME) Date modifiedSince, HttpServletRequest request, HttpServletResponse resp) throws IOException { // this impl does not require authz at the moment, but need a user for the fileBizService getFile method // ok if null Person user = getAuthenticatedUser(); // Get file id from the request String id = requestUtil.buildRequestUrl(request); // Resolve the Request URL to the ID of the file (in this case URL == ID) if (id == null || id.trim().isEmpty()) { resp.sendError(HttpServletResponse.SC_INTERNAL_SERVER_ERROR); return; } try { archiveService.pollArchive(); // Get file's last modified date via the fileBizService if it is a DataFile, or // via the metadataFileBizService if it is a MetadataFile DateTime lastModifiedDate = (fileBizService.getLastModifiedDate(id) != null) ? fileBizService.getLastModifiedDate(id) : metadataFileBizService.getLastModifiedDate(id); // Handle if-modified-since header if (failIfModifiedSinceHeader(request, resp, modifiedSince, lastModifiedDate)) { return; } // Get file via fileBizService if it is a DataFile, or // via the metadataFileBizService if it is a MetadataFile DataFile file = null; if (fileBizService.getFile(id, user) != null) { file = fileBizService.getFile(id, user); } else { file = metadataFileBizService.retrieveMetadataFile(id); } //If file is not found if (file == null) { resp.sendError(HttpServletResponse.SC_NOT_FOUND, "No file matching this id " + id + " was found."); } else { //if file is found String fileMimeType = file.getFormat(); if (fileMimeType == null) { fileMimeType = "application/octet-stream"; } //Handling mimeType header if (failAcceptHeader(request, resp, mimeType, fileMimeType)) { return; } //Set the Content-Length resp.setContentLength((int) file.getSize()); resp.setStatus(HttpStatus.SC_OK); //Calculate ETAG resp.setHeader(ETAG, ETagCalculator.calculate(Integer.toString(file.hashCode()))); DateTimeFormatter fmt = new DateTimeFormatterBuilder().appendPattern("EEE, dd MMM yyyy HH:mm:ss Z") .toFormatter(); String rfcDate = fmt.print(lastModifiedDate); resp.setHeader(LAST_MODIFIED, rfcDate); //Set Content-Disposition resp.setHeader(CONTENT_DISPOSITION, getResponseContentDispositionString("\"" + file.getName() + "\"", file.getSize())); //Set Content-Type resp.setContentType(fileMimeType); InputStream is = new URL(file.getSource()).openStream(); IOUtils.copy(is, resp.getOutputStream()); is.close(); } } catch (BizPolicyException be) { handleException(be, request, resp); } catch (ArchiveServiceException ae) { handleException(ae, request, resp); } catch (RelationshipConstraintException re) { handleException(re, request, resp); } }
From source file:org.datavec.api.transform.ui.HtmlAnalysis.java
License:Apache License
public static String createHtmlAnalysisString(DataAnalysis analysis) throws Exception { Configuration cfg = new Configuration(new Version(2, 3, 23)); // Where do we load the templates from: cfg.setClassForTemplateLoading(HtmlAnalysis.class, "/templates/"); // Some other recommended settings: cfg.setIncompatibleImprovements(new Version(2, 3, 23)); cfg.setDefaultEncoding("UTF-8"); cfg.setLocale(Locale.US);/* w w w. j ava 2s. c o m*/ cfg.setTemplateExceptionHandler(TemplateExceptionHandler.RETHROW_HANDLER); Map<String, Object> input = new HashMap<>(); ObjectMapper ret = new ObjectMapper(); ret.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false); ret.configure(SerializationFeature.FAIL_ON_EMPTY_BEANS, false); ret.configure(MapperFeature.SORT_PROPERTIES_ALPHABETICALLY, true); ret.enable(SerializationFeature.INDENT_OUTPUT); List<ColumnAnalysis> caList = analysis.getColumnAnalysis(); Schema schema = analysis.getSchema(); int n = caList.size(); String[][] table = new String[n][3]; List<DivObject> divs = new ArrayList<>(); List<String> histogramDivNames = new ArrayList<>(); for (int i = 0; i < n; i++) { ColumnAnalysis ca = caList.get(i); String name = schema.getName(i); //namesList.get(i); ColumnType type = schema.getType(i); table[i][0] = name; table[i][1] = type.toString(); table[i][2] = ca.toString().replaceAll(",", ", "); //Hacky work-around to improve display in HTML table double[] buckets; long[] counts; switch (type) { case String: StringAnalysis sa = (StringAnalysis) ca; buckets = sa.getHistogramBuckets(); counts = sa.getHistogramBucketCounts(); break; case Integer: IntegerAnalysis ia = (IntegerAnalysis) ca; buckets = ia.getHistogramBuckets(); counts = ia.getHistogramBucketCounts(); break; case Long: LongAnalysis la = (LongAnalysis) ca; buckets = la.getHistogramBuckets(); counts = la.getHistogramBucketCounts(); break; case Double: DoubleAnalysis da = (DoubleAnalysis) ca; buckets = da.getHistogramBuckets(); counts = da.getHistogramBucketCounts(); break; case Categorical: case Time: case Bytes: buckets = null; counts = null; break; default: throw new RuntimeException("Invalid/unknown column type: " + type); } if (buckets != null) { RenderableComponentHistogram.Builder histBuilder = new RenderableComponentHistogram.Builder(); for (int j = 0; j < counts.length; j++) { histBuilder.addBin(buckets[j], buckets[j + 1], counts[j]); } histBuilder.margins(60, 60, 90, 20); RenderableComponentHistogram hist = histBuilder.title(name).build(); String divName = "histdiv_" + name.replaceAll("\\W", ""); divs.add(new DivObject(divName, ret.writeValueAsString(hist))); histogramDivNames.add(divName); } } //Create the summary table RenderableComponentTable rct = new RenderableComponentTable.Builder().table(table) .header("Column Name", "Column Type", "Column Analysis").backgroundColor("#FFFFFF") .headerColor("#CCCCCC").colWidthsPercent(20, 10, 70).border(1).padLeftPx(4).padRightPx(4).build(); divs.add(new DivObject("tablesource", ret.writeValueAsString(rct))); input.put("divs", divs); input.put("histogramIDs", histogramDivNames); //Current date/time, UTC DateTimeFormatter formatter = DateTimeFormat.forPattern("YYYY-MM-dd HH:mm:ss zzz") .withZone(DateTimeZone.UTC); long currTime = System.currentTimeMillis(); String dateTime = formatter.print(currTime); input.put("datetime", dateTime); Template template = cfg.getTemplate("analysis.ftl"); //Process template to String Writer stringWriter = new StringWriter(); template.process(input, stringWriter); return stringWriter.toString(); }
From source file:org.datavec.api.transform.ui.HtmlSequencePlotting.java
License:Apache License
/** * Create a HTML file with plots for the given sequence. * * @param title Title of the page//from w w w. j a v a 2 s. c om * @param schema Schema for the data * @param sequence Sequence to plot * @return HTML file as a string */ public static String createHtmlSequencePlots(String title, Schema schema, List<List<Writable>> sequence) throws Exception { Configuration cfg = new Configuration(new Version(2, 3, 23)); // Where do we load the templates from: cfg.setClassForTemplateLoading(HtmlSequencePlotting.class, "/templates/"); // Some other recommended settings: cfg.setIncompatibleImprovements(new Version(2, 3, 23)); cfg.setDefaultEncoding("UTF-8"); cfg.setLocale(Locale.US); cfg.setTemplateExceptionHandler(TemplateExceptionHandler.RETHROW_HANDLER); Map<String, Object> input = new HashMap<>(); input.put("pagetitle", title); ObjectMapper ret = new ObjectMapper(); ret.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false); ret.configure(SerializationFeature.FAIL_ON_EMPTY_BEANS, false); ret.configure(MapperFeature.SORT_PROPERTIES_ALPHABETICALLY, true); ret.enable(SerializationFeature.INDENT_OUTPUT); List<DivObject> divs = new ArrayList<>(); List<String> divNames = new ArrayList<>(); //First: create table for schema int n = schema.numColumns(); String[][] table = new String[n / 2 + n % 2][6]; //Number, name, type; 2 columns List<ColumnMetaData> meta = schema.getColumnMetaData(); for (int i = 0; i < meta.size(); i++) { int o = i % 2; table[i / 2][o * 3] = String.valueOf(i); table[i / 2][o * 3 + 1] = meta.get(i).getName(); table[i / 2][o * 3 + 2] = meta.get(i).getColumnType().toString(); } for (int i = 0; i < table.length; i++) { for (int j = 0; j < table[i].length; j++) { if (table[i][j] == null) { table[i][j] = ""; } } } RenderableComponentTable rct = new RenderableComponentTable.Builder().table(table) .header("#", "Name", "Type", "#", "Name", "Type").backgroundColor("#FFFFFF").headerColor("#CCCCCC") .colWidthsPercent(8, 30, 12, 8, 30, 12).border(1).padLeftPx(4).padRightPx(4).build(); divs.add(new DivObject("tablesource", ret.writeValueAsString(rct))); //Create the plots double[] x = new double[sequence.size()]; for (int i = 0; i < x.length; i++) { x[i] = i; } for (int i = 0; i < n; i++) { double[] lineData; switch (meta.get(i).getColumnType()) { case Integer: case Long: case Double: case Float: case Time: lineData = new double[sequence.size()]; for (int j = 0; j < lineData.length; j++) { lineData[j] = sequence.get(j).get(i).toDouble(); } break; case Categorical: //This is a quick-and-dirty way to plot categorical variables as a line chart List<String> stateNames = ((CategoricalMetaData) meta.get(i)).getStateNames(); lineData = new double[sequence.size()]; for (int j = 0; j < lineData.length; j++) { String state = sequence.get(j).get(i).toString(); int idx = stateNames.indexOf(state); lineData[j] = idx; } break; case Bytes: case String: case Boolean: default: //Skip continue; } String name = meta.get(i).getName(); String chartTitle = "Column: \"" + name + "\" - Column Type: " + meta.get(i).getColumnType(); if (meta.get(i).getColumnType() == ColumnType.Categorical) { List<String> stateNames = ((CategoricalMetaData) meta.get(i)).getStateNames(); StringBuilder sb = new StringBuilder(chartTitle); sb.append(" - ("); for (int j = 0; j < stateNames.size(); j++) { if (j > 0) { sb.append(", "); } sb.append(j).append("=").append(stateNames.get(j)); } sb.append(")"); chartTitle = sb.toString(); } RenderableComponentLineChart lc = new RenderableComponentLineChart.Builder().title(chartTitle) .addSeries(name, x, lineData).build(); String divname = "plot_" + i; divs.add(new DivObject(divname, ret.writeValueAsString(lc))); divNames.add(divname); } input.put("divs", divs); input.put("divnames", divNames); //Current date/time, UTC DateTimeFormatter formatter = DateTimeFormat.forPattern("YYYY-MM-dd HH:mm:ss zzz") .withZone(DateTimeZone.UTC); long currTime = System.currentTimeMillis(); String dateTime = formatter.print(currTime); input.put("datetime", dateTime); Template template = cfg.getTemplate("sequenceplot.ftl"); //Process template to String Writer stringWriter = new StringWriter(); template.process(input, stringWriter); return stringWriter.toString(); }