List of usage examples for java.time.format DateTimeFormatter ISO_OFFSET_DATE_TIME
DateTimeFormatter ISO_OFFSET_DATE_TIME
To view the source code for java.time.format DateTimeFormatter ISO_OFFSET_DATE_TIME.
Click Source Link
From source file:com.inqool.dcap.office.indexer.indexer.SolrBulkIndexer.java
private SolrInputDocument recursivelyIndex(final ModelTreeNode data) throws IOException { ZdoModel model;// w w w.j a va 2s .co m model = data.getModel(); if (model == null) { return null; } // if (!model.isIndexable()) { // logger.debug("Resource: {} retrieved without indexable type.", uri); // return null; // } logger.debug("Resource: {} retrieved with indexable type.", store.removeTransactionFromUrl(model.getUrl())); if (!allowableTypes.contains(model.get(ZdoTerms.zdoType))) { return null; } if (!ZdoGroup.ZDO.name().equals(model.get(ZdoTerms.group))) { logger.info("Not indexing this document as it is not published."); return null; } final SolrInputDocument inputDoc = modelToSolrInputDoc(model); // inputDoc.addField("datePublished", OffsetDateTime.now().withOffsetSameInstant(ZoneOffset.UTC).format(DateTimeFormatter.ISO_DATE_TIME)); String datePublished = model.get(ZdoTerms.datePublished); if (datePublished != null) { //If reindexing, we just read data about when it was originally published from Fedora inputDoc.addField("datePublished", datePublished); } else { datePublished = LocalDateTime.now().atZone(ZoneOffset.systemDefault()) .withZoneSameInstant(ZoneOffset.UTC).format(DateTimeFormatter.ISO_OFFSET_DATE_TIME); inputDoc.addField("datePublished", datePublished); //solr needs UTC time ZdoModel patchmodel = new ZdoModel(); patchmodel.setUrl(model.getUrl()); patchmodel.add(ZdoTerms.datePublished, datePublished); store.patchMetadata(patchmodel); } //Get all children's uris, parse them recursively, and add them to result //If we are an almost-leaf node, also search for children bound on the original object String originalObjectUrl = model.get(ZdoTerms.kdrObject); if (!ZdoType.isBranchEndCategory(model.get(ZdoTerms.zdoType))) { for (ModelTreeNode child : data.getChildren()) { SolrInputDocument childDoc = recursivelyIndex(child); if (childDoc != null) { inputDoc.addChildDocument(childDoc); } } } else { //we are end branch category //Treat born digital documents differently as they don't have pages but whole PDF if (ZdoType.bornDigital.name().equals(model.get(ZdoTerms.zdoType))) { //Retrieve the usercopy - PDF String queryString = "SELECT ?userCopy ?thumb WHERE {\n" + "?userCopy <http://purl.org/dc/terms/isPartOf> <" + originalObjectUrl + ">.\n" + "?userCopy <" + ZdoTerms.zdoType.getURI() + "> \"" + ZdoType.binary.name() + "\"^^<http://www.w3.org/2001/XMLSchema#string>.\n" + "?userCopy <" + ZdoTerms.fileType.getURI() + "> \"" + ZdoFileType.userCopy.name() + "\"^^<http://www.w3.org/2001/XMLSchema#string>.\n" + "}"; QueryExecution queryExecution = QueryExecutionFactory.sparqlService(SPARQL_ENDPOINT, queryString); ResultSet resultSet = queryExecution.execSelect(); if (resultSet.hasNext()) { QuerySolution querySolution = resultSet.next(); String userCopyUrl = querySolution.getResource("userCopy").getURI(); inputDoc.addField("pdfId", store.getOnlyIdFromUrl(userCopyUrl)); } else { throw new RuntimeException("Damn this pdf has no pdf or thumbnail."); } } else { //Other than born-digital branch end node //These are to sort pages based on their index SortedMap<Integer, String> imageMap = new TreeMap<>(); SortedMap<Integer, String> thumbMap = new TreeMap<>(); SortedMap<Integer, String> txtMap = new TreeMap<>(); SortedMap<Integer, String> altoMap = new TreeMap<>(); String videoUrl = null; //Retrieve image, thumbnail and ocr text info String queryString = "SELECT ?pageIndex ?userCopy ?ucMime ?thumb ?txt ?alto WHERE {\n" + //first find pages - children of the node "?page <" + ZdoTerms.zdoType.getURI() + "> \"" + ZdoType.page.name() + "\"^^<http://www.w3.org/2001/XMLSchema#string>.\n" + "?page <http://purl.org/dc/terms/isPartOf> <" + originalObjectUrl + ">.\n" + "?page <" + ZdoTerms.pageIndex.getURI() + "> ?pageIndex.\n" + "OPTIONAL {\n" + //then children of those pages that are binary usercopy images "?userCopy <http://purl.org/dc/terms/isPartOf> ?page.\n" + "?userCopy <" + ZdoTerms.zdoType.getURI() + "> \"" + ZdoType.binary.name() + "\"^^<http://www.w3.org/2001/XMLSchema#string>.\n" + "?userCopy <" + ZdoTerms.fileType.getURI() + "> \"" + ZdoFileType.userCopy.name() + "\"^^<http://www.w3.org/2001/XMLSchema#string>.\n" + "?userCopy <" + ZdoTerms.mimeType.getURI() + "> ?ucMime.\n" + "}\nOPTIONAL {\n" + //and their thumbnails "?thumb <http://purl.org/dc/terms/isPartOf> ?page.\n" + "?thumb <" + ZdoTerms.zdoType.getURI() + "> \"" + ZdoType.binary.name() + "\"^^<http://www.w3.org/2001/XMLSchema#string>.\n" + "?thumb <" + ZdoTerms.fileType.getURI() + "> \"" + ZdoFileType.thumb.name() + "\"^^<http://www.w3.org/2001/XMLSchema#string>.\n" + "}\nOPTIONAL {\n" + //and also children of those pages that are binary text "?txt <http://purl.org/dc/terms/isPartOf> ?page.\n" + "?txt <" + ZdoTerms.zdoType.getURI() + "> \"" + ZdoType.binary.name() + "\"^^<http://www.w3.org/2001/XMLSchema#string>.\n" + "?txt <" + ZdoTerms.fileType.getURI() + "> \"" + ZdoFileType.txt.name() + "\"^^<http://www.w3.org/2001/XMLSchema#string>.\n" + "}\nOPTIONAL {\n" + //and also alto children with ocr text "?alto <http://purl.org/dc/terms/isPartOf> ?page.\n" + "?alto <" + ZdoTerms.zdoType.getURI() + "> \"" + ZdoType.binary.name() + "\"^^<http://www.w3.org/2001/XMLSchema#string>.\n" + "?alto <" + ZdoTerms.fileType.getURI() + "> \"" + ZdoFileType.alto.name() + "\"^^<http://www.w3.org/2001/XMLSchema#string>.\n" + "}\n}"; QueryExecution queryExecution = QueryExecutionFactory.sparqlService(SPARQL_ENDPOINT, queryString); ResultSet resultSet = queryExecution.execSelect(); while (resultSet.hasNext()) { QuerySolution querySolution = resultSet.next(); Integer pageIndex = Integer.valueOf(querySolution.getLiteral("pageIndex").getString()); Resource userCopyResource = querySolution.getResource("userCopy"); if (userCopyResource != null) { String userCopyUrl = userCopyResource.getURI(); if (userCopyUrl != null) { if ("video/mp4".equals(querySolution.getLiteral("ucMime").getString())) { if (videoUrl != null) { logger.error( "More than one video per document encountered. There can only be one."); } videoUrl = userCopyUrl; } else { imageMap.put(pageIndex, userCopyUrl); } } } Resource thumbnailResource = querySolution.getResource("thumb"); if (thumbnailResource != null) { String thumbUrl = thumbnailResource.getURI(); if (thumbUrl != null) { thumbMap.put(pageIndex, thumbUrl); } } Resource txtResource = querySolution.getResource("txt"); if (txtResource != null) { String txtUrl = txtResource.getURI(); if (txtUrl != null) { txtMap.put(pageIndex, txtUrl); } } Resource altoResource = querySolution.getResource("alto"); if (altoResource != null) { String altoUrl = altoResource.getURI(); if (altoUrl != null) { altoMap.put(pageIndex, altoUrl); } } } if (videoUrl != null) { inputDoc.addField("videoId", store.getOnlyIdFromUrl(videoUrl)); } List<String> imageIds = new ArrayList<>(); if (!imageMap.isEmpty()) { for (String userCopyUrl : imageMap.values()) { imageIds.add(store.getOnlyIdFromUrl(userCopyUrl)); } inputDoc.addField("imageIds", imageIds); } if (!thumbMap.isEmpty()) { List<String> thumbIds = new ArrayList<>(); for (String thumbUrl : thumbMap.values()) { thumbIds.add(store.getOnlyIdFromUrl(thumbUrl)); } inputDoc.addField("thumbIds", thumbIds); } List<String> txtIds = new ArrayList<>(); if (!txtMap.isEmpty()) { String fulltext = ""; for (String txtUrl : txtMap.values()) { txtIds.add(store.getOnlyIdFromUrl(txtUrl)); InputStream in = new URL(txtUrl).openStream(); StringWriter writer = new StringWriter(); IOUtils.copy(in, writer, "utf-8"); String text = writer.toString(); fulltext += text + " "; } inputDoc.addField("fullText", fulltext.trim()); } List<String> altoIds = new ArrayList<>(); if (!altoMap.isEmpty()) { for (String altoUrl : altoMap.values()) { altoIds.add(store.getOnlyIdFromUrl(altoUrl)); } } ZdoModel kdrObject = store.get(model.get(ZdoTerms.kdrObject)); String origPdfUrl = kdrObject.get(ZdoTerms.pdfUrl); String origEpubUrl = kdrObject.get(ZdoTerms.epubUrl); ZdoModel patchModel = new ZdoModel(); //Used to add new pdf and epub data to Fedora patchModel.setUrl(model.get(ZdoTerms.kdrObject)); if ("true".equals(model.get(ZdoTerms.allowPdfExport)) && !imageIds.isEmpty()) { if (origPdfUrl == null) { String pdfId = UUID.randomUUID().toString(); patchModel.add(ZdoTerms.pdfUrl, store.createUrl(pdfId)); String orgId = model.get(ZdoTerms.organization); String watermarkId = null; if ("true".equals(model.get(ZdoTerms.watermark))) { watermarkId = organizationSettingsAccess.fetchOrgWatermark(orgId); if (watermarkId == null) { watermarkId = portalSettingsAccess.fetchPortalSettings().getWatermarkId(); } } PdfCreatorDto pdfCreatorDto = new PdfCreatorDto(pdfId, imageIds, altoIds, watermarkId, model.get(ZdoTerms.watermarkPosition)); Response response = ClientBuilder.newClient().target(IP_ENDPOINT + "pdf").request() .post(Entity.json(pdfCreatorDto)); if (response.getStatusInfo().getFamily() != Response.Status.Family.SUCCESSFUL) { throw new RuntimeException("Failed to call pdf creator in image processing war."); } inputDoc.addField("pdfId", pdfId); } else { //When reindexing, pdf already exists inputDoc.addField("pdfId", store.getOnlyIdFromUrl(origPdfUrl)); } } if ("true".equals(model.get(ZdoTerms.allowEpubExport)) && !txtIds.isEmpty()) { if (origEpubUrl == null) { String epubId = UUID.randomUUID().toString(); patchModel.add(ZdoTerms.epubUrl, store.createUrl(epubId)); epubCreator.createBook(epubId, model.get(DCTerms.title), model.get(DCTerms.creator), txtIds); inputDoc.addField("epubId", epubId); } else { inputDoc.addField("epubId", store.getOnlyIdFromUrl(origEpubUrl)); } } store.patchMetadata(patchModel); //warning, this does not go to triplestore } } logger.debug("Executing update of: {}...", store.removeTransactionFromUrl(model.getUrl())); return inputDoc; }
From source file:io.stallion.utils.GeneralUtils.java
@Deprecated public static String formatLocalDateFromZonedDate(ZonedDateTime date, String formatPattern) { if (date == null) { return ""; }// w w w.j a va 2 s. c o m ZonedDateTime localDt = date.withZoneSameInstant(Context.getSettings().getTimeZoneId()); DateTimeFormatter formatter; if (StringUtils.isEmpty(formatPattern)) { formatter = DEFAULT_FORMAT; } else if ("iso".equals(formatPattern.toLowerCase())) { formatter = DateTimeFormatter.ISO_OFFSET_DATE_TIME; } else { formatter = DateTimeFormatter.ofPattern(formatPattern); } return localDt.format(formatter); }
From source file:org.ng200.openolympus.ContestTest.java
public Contest createContestUsingAPI(int duration) throws Exception { // @formatter:off final ZonedDateTime now = ZonedDateTime.now(); final String result = this.mockMvc .perform(MockMvcRequestBuilders.post("/api/contests/create") .param("name", "TestContest_" + ContestTest.id++) .param("startTime", now.format(DateTimeFormatter.ISO_OFFSET_DATE_TIME)) .param("duration", Integer.toString(duration))) .andDo(MockMvcResultHandlers.print()).andExpect(MockMvcResultMatchers.status().isOk()) .andExpect(MockMvcResultMatchers.content().contentType(this.contentType)) .andExpect(MockMvcResultMatchers.jsonPath("$.status").value("OK")) .andExpect(MockMvcResultMatchers.jsonPath("$.data.id").exists()).andReturn().getResponse() .getContentAsString();/* w w w .j ava 2 s. c o m*/ return this.contestRepository.findOne(Long.valueOf(JsonPath.read(result, "$.data.id").toString())); // @formatter:on }
From source file:edu.purdue.cybercenter.dm.service.CqlService.java
private Object getSystemValue(String name, Map<String, Object> context) { Object value;//from w w w .j a v a 2s. co m TermName termName = new TermName(name); String alias = termName.getAlias(); String queryString = termName.getQueryString(); Integer id = null; if (queryString != null) { Map<String, Object> queryMap = (Map<String, Object>) Helper.deserialize(queryString, Map.class); id = (Integer) queryMap.get(MetaField.Id); } if (CONST_SYS_VAR_PROJECTS.equals(alias)) { if (id == null) { List<Project> items = Project.findAllProjects(); value = items; } else { Project item = projectRepository.findOne(id); value = item; } } else if (CONST_SYS_VAR_EXPERIMENTS.equals(alias)) { if (id == null) { List<Experiment> items = experimentRepository.findAll(); value = items; } else { Experiment item = experimentRepository.findOne(id); value = item; } } else if (alias.startsWith(MetaField.Current)) { String path = alias.substring(MetaField.Current.length()); int dotIndex = path.indexOf("."); int dashIndex = path.indexOf("_", 1); String objectName; String fieldName; if (dashIndex != -1) { // e.g. _job_id for compatibility if (context != null && context.get(path) != null) { value = context.get(path); } else { value = null; } } else { if (dotIndex == -1 && dashIndex == -1) { // e.g. _job objectName = path; fieldName = null; } else { // e.g. _job.id objectName = path.substring(0, dotIndex); fieldName = path.substring(dotIndex + 1); } if (objectName != null) { Object object = context.get(objectName); if (MetaField.Date.equals(objectName)) { DateTimeFormatter formatter = DateTimeFormatter.ISO_OFFSET_DATE_TIME; value = formatter.format(OffsetDateTime.now()); } else if (fieldName != null) { try { value = getPropertyValue(object, fieldName); } catch (SecurityException | IllegalArgumentException ex) { throw new RuntimeException( "Object: " + objectName + " does not have field: " + fieldName, ex); } catch (IllegalAccessException ex) { throw new RuntimeException( "Object: " + objectName + " unable to access field: " + fieldName, ex); } } else { value = object; } } else { value = null; } } } else { value = null; } return value; }
From source file:com.esri.geoportal.commons.csw.client.impl.Client.java
/** * Formats ISO date.//w w w . j a va2 s .c om * @param date date to format * @return ISO date */ private static String formatIsoDate(Date date) { ZonedDateTime zonedDateTime = ZonedDateTime.ofInstant(date.toInstant(), ZoneId.systemDefault()); return DateTimeFormatter.ISO_OFFSET_DATE_TIME.format(zonedDateTime); }
From source file:com.inqool.dcap.office.indexer.indexer.SolrBulkIndexer.java
protected SolrInputDocument modelToSolrInputDoc(ZdoModel model) { logger.debug("Constructing new SolrInputDocument..."); final Map<String, SolrInputField> fields = new HashMap<>(); //Add all Dublin Core terms for (String property : DCTools.getDcTermList()) { SolrInputField field = new SolrInputField(property); List<String> values = model.getAll(new PropertyImpl("http://purl.org/dc/terms/" + property)); if (values.isEmpty()) continue; //Skip fields that were not ticked to be published String visible = model.get(new PropertyImpl("http://purl.org/dc/terms/" + property + "_visibility")); if ("false".equals(visible) || "0".equals(visible)) { //0 should not occur any more continue; }/*ww w. ja v a2 s .c o m*/ if ("isPartOf".equals(property)) { //remove ip address from isPartOf values.set(0, store.getOnlyIdFromUrl(values.get(0))); } if ("".equals(values.get(0))) { values.set(0, "unknown"); } field.addValue(values, INDEX_TIME_BOOST); fields.put(property, field); //Suggester data if ("title".equals(property) || "creator".equals(property)) { SolrInputDocument suggesterDoc = new SolrInputDocument(); String suggestVal = values.get(0).trim(); if (!suggestVal.isEmpty() && !suggestVal.equals("unknown")) { suggesterDoc.addField("suggesterData", values.get(0).trim()); dataForSuggester.add(suggesterDoc); } } } //Add system fields SolrInputField field = new SolrInputField("id"); field.addValue(store.getOnlyIdFromUrl(model.getUrl()), INDEX_TIME_BOOST); fields.put("id", field); addSolrFieldFromFedoraProperty("inventoryId", ZdoTerms.inventoryId, model, fields); addSolrFieldFromFedoraProperty("zdoType", ZdoTerms.zdoType, model, fields); addSolrFieldFromFedoraProperty("zdoGroup", ZdoTerms.group, model, fields); addSolrFieldFromFedoraProperty("orgIdmId", ZdoTerms.organization, model, fields); addSolrFieldFromFedoraProperty("allowContentPublicly", ZdoTerms.allowContentPublicly, model, fields); addSolrFieldFromFedoraProperty("allowPdfExport", ZdoTerms.allowPdfExport, model, fields); addSolrFieldFromFedoraProperty("allowEpubExport", ZdoTerms.allowEpubExport, model, fields); addSolrFieldFromFedoraProperty("watermark", ZdoTerms.watermark, model, fields); addSolrFieldFromFedoraProperty("watermarkPosition", ZdoTerms.watermarkPosition, model, fields); addSolrFieldFromFedoraProperty("imgThumb", ZdoTerms.imgThumb, model, fields); addSolrFieldFromFedoraProperty("imgNormal", ZdoTerms.imgNormal, model, fields); String publishFromStr = model.get(ZdoTerms.publishFrom); if (publishFromStr != null) { String publishFromUtc = ZonedDateTime .ofInstant(Instant.ofEpochSecond(Long.valueOf(publishFromStr)), ZoneId.systemDefault()) .withZoneSameInstant(ZoneOffset.UTC).format(DateTimeFormatter.ISO_OFFSET_DATE_TIME); addSolrField("publishFrom", publishFromUtc, fields); } String publishToStr = model.get(ZdoTerms.publishTo); if (publishToStr != null) { String publishToUtc = ZonedDateTime .ofInstant(Instant.ofEpochSecond(Long.valueOf(publishToStr)), ZoneId.systemDefault()) .withZoneSameInstant(ZoneOffset.UTC).format(DateTimeFormatter.ISO_OFFSET_DATE_TIME); addSolrField("publishTo", publishToUtc, fields); } String created = model.get(DCTerms.created); if (created != null) { AtomicInteger yearStart = new AtomicInteger(); AtomicInteger yearEnd = new AtomicInteger(); AtomicBoolean startValid = new AtomicBoolean(); AtomicBoolean endValid = new AtomicBoolean(); YearNormalizer.normalizeCreatedYear(created, yearStart, startValid, yearEnd, endValid); if (startValid.get()) { addSolrField("yearStart", yearStart.get(), fields); } else { logger.warn("Year could not be normalized for input string " + created); } if (endValid.get()) { addSolrField("yearEnd", yearEnd.get(), fields); } } String orgName = orgNameMapping.get(model.get(ZdoTerms.organization)); if (orgName == null) { orgName = "Neznm"; } addSolrField("organization", orgName, fields); String documentTypeId = model.get(ZdoTerms.documentType); //type and subtype names must be found for id String documentSubTypeId = model.get(ZdoTerms.documentSubType); if (documentTypeId != null) { addSolrField("documentType", documentTypeAccess.getTypeNameForId(Integer.valueOf(documentTypeId)), fields); } if (documentSubTypeId != null) { addSolrField("documentSubType", documentTypeAccess.getSubTypeNameForId(Integer.valueOf(documentSubTypeId)), fields); } //Add customFields int fieldIndex = 0; //we actually start from 1 do { fieldIndex++; String fieldName = model .get(new PropertyImpl("http://inqool.cz/zdo/1.0/customField_" + fieldIndex + "_name")); if (fieldName == null) break; fieldName = "customField_" + fieldName; String visible = model .get(new PropertyImpl("http://inqool.cz/zdo/1.0/customField_" + fieldIndex + "_visibility")); if ("false".equals(visible) || "0".equals(visible)) continue; List<String> fieldValues = model .getAll(new PropertyImpl("http://inqool.cz/zdo/1.0/customField_" + fieldIndex)); if ("".equals(fieldValues.get(0))) { fieldValues.set(0, "unknown"); } SolrInputField customField = new SolrInputField(fieldName); customField.addValue(fieldValues, INDEX_TIME_BOOST); fields.put(fieldName, customField); } while (true); SolrInputDocument solrInputDocument = new SolrInputDocument(fields); return solrInputDocument; }
From source file:org.codice.ddf.confluence.source.ConfluenceInputTransformer.java
private Date getDate(Object dateTime) { if (dateTime instanceof Date) { return (Date) dateTime; }/*from ww w. j av a 2 s. c o m*/ return Date.from(Instant.from(DateTimeFormatter.ISO_OFFSET_DATE_TIME.parse((String) dateTime))); }
From source file:org.codice.ddf.confluence.source.ConfluenceInputTransformerTest.java
private Date getDate(String dateTime) { return Date.from(Instant.from(DateTimeFormatter.ISO_OFFSET_DATE_TIME.parse(dateTime))); }
From source file:org.codice.ddf.security.idp.client.IdpMetadataTest.java
/** * Return a modified version of the (XML) input. The cache duration and valid-until time are * modified to match the respective input parameters. If null is passed for the cache duration, * the value of the cache duration already in the XML is used. Because of how the substitution * works, this method can only be called only once per test. Otherwise, it will create multiple * "validUntil" XML attributes.// w w w . j av a 2 s .co m * * @param validUntil the validUntil instant * @param xml the SAML entity description document * @return SAML entity description document with a validUntil date */ private String setValidUntil(Instant validUntil, String iso8601Duration, String xml) { Pattern pattern = Pattern.compile("cacheDuration=\"(\\w*)\""); Matcher matcher = pattern.matcher(xml); assertThat("Cannot setup test data - precondition not met", matcher.find(), is(true)); assertThat("Cannot setup test data - precondition not met", matcher.groupCount(), is(1)); String duration = iso8601Duration == null ? matcher.group(1) : iso8601Duration; DateTimeFormatter formatter = DateTimeFormatter.ISO_OFFSET_DATE_TIME; ZonedDateTime temporalAccessor = ZonedDateTime.ofInstant(validUntil, ZoneId.systemDefault()); String isoTimestamp = formatter.format(temporalAccessor); return xml.replaceFirst(CACHE_DURATION_REGEX, String.format("cacheDuration=\"%s\" validUntil=\"%s\"", duration, isoTimestamp)); }
From source file:org.onosproject.drivers.polatis.netconf.PolatisAlarmConsumer.java
private long getTimeRaised(HierarchicalConfiguration cfg) { long timeRaised; String alarmTime = cfg.getString(ALARM_TIME); try {/*from w w w . j a v a 2 s. c o m*/ OffsetDateTime date = OffsetDateTime.parse(alarmTime, DateTimeFormatter.ISO_OFFSET_DATE_TIME); timeRaised = date.toInstant().toEpochMilli(); return timeRaised; } catch (DateTimeException e) { log.error("Cannot parse exception {} {}", alarmTime, e); } return System.currentTimeMillis(); }