List of usage examples for org.joda.time DateTimeZone UTC
DateTimeZone UTC
To view the source code for org.joda.time DateTimeZone UTC.
Click Source Link
From source file:com.godaddy.pubsub.ServiceApplication.java
License:Apache License
public static void main(String[] args) throws Exception { DateTimeZone.setDefault(DateTimeZone.UTC); ServiceApplication serviceApplication = new ServiceApplication(new GuiceBundleProvider()); try {/* www .j ava2 s . com*/ serviceApplication.run(args); } catch (Throwable ex) { ex.printStackTrace(); System.exit(1); } }
From source file:com.gooddata.auditevent.AuditEventPageRequest.java
License:Open Source License
@Override public UriComponentsBuilder updateWithPageParams(final UriComponentsBuilder builder) { UriComponentsBuilder builderWithPaging = super.updateWithPageParams(builder); if (from != null) { builderWithPaging.queryParam("from", from.toDateTime(DateTimeZone.UTC)); }//from w w w . j av a2 s . c o m if (to != null) { builderWithPaging.queryParam("to", to.toDateTime(DateTimeZone.UTC)); } if (type != null) { builderWithPaging.queryParam("type", type); } return builderWithPaging; }
From source file:com.google.api.ads.adwords.jaxws.extensions.report.model.util.DateUtil.java
License:Open Source License
/** * Attempts to parse the given {@code String} to a {@code DateTime} using one of the known * formatters./*www . j a v a2 s. com*/ * * The attempt falls back to all the formatters, and if the format is unknown, {@code null} is * returned. * * @param timestamp the time stamp in {@code String} format. * @return the parsed {@code DateTime}, or {@code null} in case that the format is unknown. */ public static DateTime parseDateTime(String timestamp) { if (timestamp != null) { for (DateTimeFormatter formatter : DateUtil.formatters) { try { LocalDateTime localDateTime = formatter.parseLocalDateTime(timestamp); return localDateTime.plusHours(12).toDateTime(DateTimeZone.UTC); } catch (IllegalArgumentException e) { // silently skips to the next formatter } } } return null; }
From source file:com.google.cloud.dataflow.examples.opinionanalysis.IndexerPipelineUtils.java
License:Apache License
public static int getDateIdFromTimestamp(long millis) { int result;//from w w w .ja v a 2s. co m DateTime dt = new DateTime(millis, DateTimeZone.UTC); int year = dt.getYear(); int month = dt.getMonthOfYear(); int day = dt.getDayOfMonth(); result = day + (100 * month) + (10000 * year); return result; }
From source file:com.google.cloud.dataflow.sdk.runners.DataflowPipelineRunner.java
License:Apache License
@Override public DataflowPipelineJob run(Pipeline pipeline) { logWarningIfPCollectionViewHasNonDeterministicKeyCoder(pipeline); LOG.info("Executing pipeline on the Dataflow Service, which will have billing implications " + "related to Google Compute Engine usage and other Google Cloud Services."); List<DataflowPackage> packages = options.getStager().stageFiles(); // Set a unique client_request_id in the CreateJob request. // This is used to ensure idempotence of job creation across retried // attempts to create a job. Specifically, if the service returns a job with // a different client_request_id, it means the returned one is a different // job previously created with the same job name, and that the job creation // has been effectively rejected. The SDK should return // Error::Already_Exists to user in that case. int randomNum = new Random().nextInt(9000) + 1000; String requestId = DateTimeFormat.forPattern("YYYYMMddHHmmssmmm").withZone(DateTimeZone.UTC) .print(DateTimeUtils.currentTimeMillis()) + "_" + randomNum; // Try to create a debuggee ID. This must happen before the job is translated since it may // update the options. DataflowPipelineOptions dataflowOptions = options.as(DataflowPipelineOptions.class); maybeRegisterDebuggee(dataflowOptions, requestId); JobSpecification jobSpecification = translator.translate(pipeline, this, packages); Job newJob = jobSpecification.getJob(); newJob.setClientRequestId(requestId); String version = DataflowReleaseInfo.getReleaseInfo().getVersion(); System.out.println("Dataflow SDK version: " + version); newJob.getEnvironment().setUserAgent(DataflowReleaseInfo.getReleaseInfo()); // The Dataflow Service may write to the temporary directory directly, so // must be verified. if (!Strings.isNullOrEmpty(options.getTempLocation())) { newJob.getEnvironment()/* w w w . j av a 2 s.co m*/ .setTempStoragePrefix(dataflowOptions.getPathValidator().verifyPath(options.getTempLocation())); } newJob.getEnvironment().setDataset(options.getTempDatasetId()); newJob.getEnvironment().setExperiments(options.getExperiments()); // Set the Docker container image that executes Dataflow worker harness, residing in Google // Container Registry. Translator is guaranteed to create a worker pool prior to this point. String workerHarnessContainerImage = options.as(DataflowPipelineWorkerPoolOptions.class) .getWorkerHarnessContainerImage(); for (WorkerPool workerPool : newJob.getEnvironment().getWorkerPools()) { workerPool.setWorkerHarnessContainerImage(workerHarnessContainerImage); } // Requirements about the service. Map<String, Object> environmentVersion = new HashMap<>(); environmentVersion.put(PropertyNames.ENVIRONMENT_VERSION_MAJOR_KEY, ENVIRONMENT_MAJOR_VERSION); newJob.getEnvironment().setVersion(environmentVersion); // Default jobType is JAVA_BATCH_AUTOSCALING: A Java job with workers that the job can // autoscale if specified. String jobType = "JAVA_BATCH_AUTOSCALING"; if (options.isStreaming()) { jobType = "STREAMING"; } environmentVersion.put(PropertyNames.ENVIRONMENT_VERSION_JOB_TYPE_KEY, jobType); if (hooks != null) { hooks.modifyEnvironmentBeforeSubmission(newJob.getEnvironment()); } if (!Strings.isNullOrEmpty(options.getDataflowJobFile())) { runJobFileHooks(newJob); } if (hooks != null && !hooks.shouldActuallyRunJob()) { return null; } String jobIdToUpdate = null; if (options.getUpdate()) { jobIdToUpdate = getJobIdFromName(options.getJobName()); newJob.setTransformNameMapping(options.getTransformNameMapping()); newJob.setReplaceJobId(jobIdToUpdate); } Job jobResult; try { jobResult = dataflowClient.projects().jobs().create(options.getProject(), newJob).execute(); } catch (GoogleJsonResponseException e) { String errorMessages = "Unexpected errors"; if (e.getDetails() != null) { if (Utf8.encodedLength(newJob.toString()) >= CREATE_JOB_REQUEST_LIMIT_BYTES) { errorMessages = "The size of the serialized JSON representation of the pipeline " + "exceeds the allowable limit. " + "For more information, please check the FAQ link below:\n" + "https://cloud.google.com/dataflow/faq"; } else { errorMessages = e.getDetails().getMessage(); } } throw new RuntimeException("Failed to create a workflow job: " + errorMessages, e); } catch (IOException e) { throw new RuntimeException("Failed to create a workflow job", e); } // Obtain all of the extractors from the PTransforms used in the pipeline so the // DataflowPipelineJob has access to them. AggregatorPipelineExtractor aggregatorExtractor = new AggregatorPipelineExtractor(pipeline); Map<Aggregator<?, ?>, Collection<PTransform<?, ?>>> aggregatorSteps = aggregatorExtractor .getAggregatorSteps(); DataflowAggregatorTransforms aggregatorTransforms = new DataflowAggregatorTransforms(aggregatorSteps, jobSpecification.getStepNames()); // Use a raw client for post-launch monitoring, as status calls may fail // regularly and need not be retried automatically. DataflowPipelineJob dataflowPipelineJob = new DataflowPipelineJob(options.getProject(), jobResult.getId(), Transport.newRawDataflowClient(options).build(), aggregatorTransforms); // If the service returned client request id, the SDK needs to compare it // with the original id generated in the request, if they are not the same // (i.e., the returned job is not created by this request), throw // DataflowJobAlreadyExistsException or DataflowJobAlreadyUpdatedExcetpion // depending on whether this is a reload or not. if (jobResult.getClientRequestId() != null && !jobResult.getClientRequestId().isEmpty() && !jobResult.getClientRequestId().equals(requestId)) { // If updating a job. if (options.getUpdate()) { throw new DataflowJobAlreadyUpdatedException(dataflowPipelineJob, String.format( "The job named %s with id: %s has already been updated into job id: %s " + "and cannot be updated again.", newJob.getName(), jobIdToUpdate, jobResult.getId())); } else { throw new DataflowJobAlreadyExistsException(dataflowPipelineJob, String.format("There is already an active job named %s with id: %s. If you want " + "to submit a second job, try again by setting a different name using --jobName.", newJob.getName(), jobResult.getId())); } } LOG.info("To access the Dataflow monitoring console, please navigate to {}", MonitoringUtil.getJobMonitoringPageURL(options.getProject(), jobResult.getId())); System.out.println("Submitted job: " + jobResult.getId()); LOG.info("To cancel the job using the 'gcloud' tool, run:\n> {}", MonitoringUtil.getGcloudCancelCommand(options, jobResult.getId())); return dataflowPipelineJob; }
From source file:com.google.cloud.dataflow.sdk.transforms.windowing.CalendarWindows.java
License:Apache License
/** * Returns a {@link WindowFn} that windows elements into periods measured by days. * * <p>For example, {@code CalendarWindows.days(1)} will window elements into * separate windows for each day.//from ww w . j a v a 2s . com */ public static DaysWindows days(int number) { return new DaysWindows(number, DEFAULT_START_DATE, DateTimeZone.UTC); }
From source file:com.google.cloud.dataflow.sdk.transforms.windowing.CalendarWindows.java
License:Apache License
/** * Returns a {@link WindowFn} that windows elements into periods measured by weeks. * * <p>For example, {@code CalendarWindows.weeks(1, DateTimeConstants.TUESDAY)} will * window elements into week-long windows starting on Tuesdays. *///from ww w . j ava 2s . c o m public static DaysWindows weeks(int number, int startDayOfWeek) { return new DaysWindows(7 * number, DEFAULT_START_DATE.withDayOfWeek(startDayOfWeek), DateTimeZone.UTC); }
From source file:com.google.cloud.dataflow.sdk.transforms.windowing.CalendarWindows.java
License:Apache License
/** * Returns a {@link WindowFn} that windows elements into periods measured by months. * * <p>For example,/* w w w . j a v a2 s. c om*/ * {@code CalendarWindows.months(8).withStartingMonth(2014, 1).beginningOnDay(10)} * will window elements into 8 month windows where that start on the 10th day of month, * and the first window begins in January 2014. */ public static MonthsWindows months(int number) { return new MonthsWindows(number, 1, DEFAULT_START_DATE, DateTimeZone.UTC); }
From source file:com.google.cloud.dataflow.sdk.transforms.windowing.CalendarWindows.java
License:Apache License
/** * Returns a {@link WindowFn} that windows elements into periods measured by years. * * <p>For example,//from w w w . jav a 2 s . co m * {@code CalendarWindows.years(1).withTimeZone(DateTimeZone.forId("America/Los_Angeles"))} * will window elements into year-long windows that start at midnight on Jan 1, in the * America/Los_Angeles time zone. */ public static YearsWindows years(int number) { return new YearsWindows(number, 1, 1, DEFAULT_START_DATE, DateTimeZone.UTC); }
From source file:com.google.gerrit.server.mail.receive.data.AttachmentMessage.java
License:Apache License
@Override public MailMessage expectedMailMessage() { System.out.println("\uD83D\uDE1B test"); MailMessage.Builder expect = MailMessage.builder(); expect.id("<CAM7sg=3meaAVUxW3KXeJEVs8sv_ADw1BnvpcHHiYVR2TQQi__w@mail.gmail.com>") .from(new Address("Patrick Hiesel", "hiesel@google.com")) .addTo(new Address("Patrick Hiesel", "hiesel@google.com")) .textContent("Contains unwanted attachment") .htmlContent("<div dir=\"ltr\">Contains unwanted attachment</div>").subject("Test Subject") .addAdditionalHeader("MIME-Version: 1.0") .dateReceived(new DateTime(2016, 10, 25, 9, 11, 35, 0, DateTimeZone.UTC)); return expect.build(); }