List of usage examples for java.time ZoneId systemDefault
public static ZoneId systemDefault()
From source file:com.ccserver.digital.service.LOSService.java
private XMLGregorianCalendar dateToXMLGregorianCalendar(LocalDateTime localDateTime) { if (localDateTime == null) { return null; }//from ww w .j a va 2s . c om Date date = Date.from(localDateTime.atZone(ZoneId.systemDefault()).toInstant()); return dateToXMLGregorianCalendar(date); }
From source file:alfio.manager.EventManager.java
public List<PromoCodeDiscountWithFormattedTime> findPromoCodesInOrganization(int organizationId) { ZoneId zoneId = ZoneId.systemDefault(); return promoCodeRepository.findAllInOrganization(organizationId).stream() .map((p) -> new PromoCodeDiscountWithFormattedTime(p, zoneId)).collect(toList()); }
From source file:org.openhab.binding.amazonechocontrol.internal.handler.EchoHandler.java
public void updateNotifications(ZonedDateTime currentTime, ZonedDateTime now, @Nullable JsonCommandPayloadPushNotificationChange pushPayload, JsonNotificationResponse[] notifications) { Device device = this.device; if (device == null) { return;//from w w w . ja v a2 s.c om } ZonedDateTime nextReminder = null; ZonedDateTime nextAlarm = null; ZonedDateTime nextMusicAlarm = null; ZonedDateTime nextTimer = null; for (JsonNotificationResponse notification : notifications) { if (StringUtils.equals(notification.deviceSerialNumber, device.serialNumber)) { // notification for this device if (StringUtils.equals(notification.status, "ON")) { if ("Reminder".equals(notification.type)) { String offset = ZoneId.systemDefault().getRules().getOffset(Instant.now()).toString(); ZonedDateTime alarmTime = ZonedDateTime .parse(notification.originalDate + "T" + notification.originalTime + offset); if (StringUtils.isNotBlank(notification.recurringPattern) && alarmTime.isBefore(now)) { continue; // Ignore recurring entry if alarm time is before now } if (nextReminder == null || alarmTime.isBefore(nextReminder)) { nextReminder = alarmTime; } } else if ("Timer".equals(notification.type)) { // use remaining time ZonedDateTime alarmTime = currentTime.plus(notification.remainingTime, ChronoUnit.MILLIS); if (nextTimer == null || alarmTime.isBefore(nextTimer)) { nextTimer = alarmTime; } } else if ("Alarm".equals(notification.type)) { String offset = ZoneId.systemDefault().getRules().getOffset(Instant.now()).toString(); ZonedDateTime alarmTime = ZonedDateTime .parse(notification.originalDate + "T" + notification.originalTime + offset); if (StringUtils.isNotBlank(notification.recurringPattern) && alarmTime.isBefore(now)) { continue; // Ignore recurring entry if alarm time is before now } if (nextAlarm == null || alarmTime.isBefore(nextAlarm)) { nextAlarm = alarmTime; } } else if ("MusicAlarm".equals(notification.type)) { String offset = ZoneId.systemDefault().getRules().getOffset(Instant.now()).toString(); ZonedDateTime alarmTime = ZonedDateTime .parse(notification.originalDate + "T" + notification.originalTime + offset); if (StringUtils.isNotBlank(notification.recurringPattern) && alarmTime.isBefore(now)) { continue; // Ignore recurring entry if alarm time is before now } if (nextMusicAlarm == null || alarmTime.isBefore(nextMusicAlarm)) { nextMusicAlarm = alarmTime; } } } } } updateState(CHANNEL_NEXT_REMINDER, nextReminder == null ? UnDefType.UNDEF : new DateTimeType(nextReminder)); updateState(CHANNEL_NEXT_ALARM, nextAlarm == null ? UnDefType.UNDEF : new DateTimeType(nextAlarm)); updateState(CHANNEL_NEXT_MUSIC_ALARM, nextMusicAlarm == null ? UnDefType.UNDEF : new DateTimeType(nextMusicAlarm)); updateState(CHANNEL_NEXT_TIMER, nextTimer == null ? UnDefType.UNDEF : new DateTimeType(nextTimer)); }
From source file:io.hops.hopsworks.common.project.ProjectController.java
public String[] forceCleanup(String projectName, String userEmail, String sessionId) { CleanupLogger cleanupLogger = new CleanupLogger(projectName); DistributedFileSystemOps dfso = null; YarnClientWrapper yarnClientWrapper = null; try {/*from ww w . j a v a 2 s . c om*/ dfso = dfs.getDfsOps(); yarnClientWrapper = ycs.getYarnClientSuper(settings.getConfiguration()); Project project = projectFacade.findByName(projectName); if (project != null) { cleanupLogger.logSuccess("Project not found in the database"); // Run custom handler for project deletion for (ProjectHandler projectHandler : projectHandlers) { try { projectHandler.preDelete(project); cleanupLogger.logSuccess("Handler " + projectHandler.getClassName() + " successfully run"); } catch (Exception e) { cleanupLogger.logError("Error running handler: " + projectHandler.getClassName() + " during project cleanup"); cleanupLogger.logError(e.getMessage()); } } // Remove from Project team try { updateProjectTeamRole(project, ProjectRoleTypes.UNDER_REMOVAL); cleanupLogger.logSuccess("Updated team role"); } catch (Exception ex) { cleanupLogger.logError(ex.getMessage()); } // Get Yarn applications List<ApplicationReport> projectApps = null; try { Collection<ProjectTeam> team = project.getProjectTeamCollection(); Set<String> hdfsUsers = new HashSet<>(); for (ProjectTeam pt : team) { String hdfsUsername = hdfsUsersController.getHdfsUserName(project, pt.getUser()); hdfsUsers.add(hdfsUsername); } hdfsUsers.add(project.getProjectGenericUser()); projectApps = getYarnApplications(hdfsUsers, yarnClientWrapper.getYarnClient()); cleanupLogger.logSuccess("Gotten Yarn applications"); } catch (Exception ex) { cleanupLogger.logError("Error when reading YARN apps during project cleanup"); cleanupLogger.logError(ex.getMessage()); } // Kill Zeppelin jobs try { killZeppelin(project.getId(), sessionId); cleanupLogger.logSuccess("Killed Zeppelin"); } catch (Exception ex) { LOGGER.log(Level.SEVERE, "Error when killing Zeppelin during project cleanup", ex); cleanupLogger.logError(ex.getMessage()); } // Stop Jupyter try { jupyterProcessFacade.stopProject(project); cleanupLogger.logSuccess("Stopped Jupyter"); } catch (Exception ex) { cleanupLogger.logError("Error when killing Jupyter during project cleanup"); cleanupLogger.logError(ex.getMessage()); } // Kill Yarn Jobs try { killYarnJobs(project); cleanupLogger.logSuccess("Killed Yarn jobs"); } catch (Exception ex) { cleanupLogger.logError("Error when killing YARN jobs during project cleanup"); cleanupLogger.logError(ex.getMessage()); } // Wait for Yarn logs try { waitForJobLogs(projectApps, yarnClientWrapper.getYarnClient()); cleanupLogger.logSuccess("Gotten logs for jobs"); } catch (Exception ex) { cleanupLogger.logError("Error when getting Yarn logs during project cleanup"); cleanupLogger.logError(ex.getMessage()); } // Log removal try { logProject(project, OperationType.Delete); cleanupLogger.logSuccess("Logged project removal"); } catch (Exception ex) { cleanupLogger.logError("Error when logging project removal during project cleanup"); cleanupLogger.logError(ex.getMessage()); } // Change ownership of root dir try { Path path = new Path(File.separator + Settings.DIR_ROOT + File.separator + project.getName()); changeOwnershipToSuperuser(path, dfso); cleanupLogger.logSuccess("Changed ownership of root Project dir"); } catch (Exception ex) { cleanupLogger .logError("Error when changing ownership of root Project dir during project cleanup"); cleanupLogger.logError(ex.getMessage()); } // Change ownership of tmp file Path dummy = new Path("/tmp/" + project.getName()); try { changeOwnershipToSuperuser(dummy, dfso); cleanupLogger.logSuccess("Changed ownership of dummy inode"); } catch (Exception ex) { cleanupLogger.logError("Error when changing ownership of dummy inode during project cleanup"); cleanupLogger.logError(ex.getMessage()); } // Remove Kafka try { removeKafkaTopics(project); cleanupLogger.logSuccess("Removed Kafka topics"); } catch (Exception ex) { cleanupLogger.logError("Error when removing kafka topics during project cleanup"); cleanupLogger.logError(ex.getMessage()); } // Remove certificates try { certificatesController.deleteProjectCertificates(project); cleanupLogger.logSuccess("Removed certificates"); } catch (CAException ex) { if (ex.getErrorCode() != RESTCodes.CAErrorCode.CERTNOTFOUND) { cleanupLogger.logError("Error when removing certificates during project cleanup"); } } catch (IOException ex) { cleanupLogger.logError("Error when removing certificates during project cleanup"); cleanupLogger.logError(ex.getMessage()); } List<HdfsUsers> usersToClean = getUsersToClean(project); List<HdfsGroups> groupsToClean = getGroupsToClean(project); // Remove project related files try { removeProjectRelatedFiles(usersToClean, dfso); cleanupLogger.logSuccess("Removed project related files"); } catch (Exception ex) { cleanupLogger.logError("Error when removing project-related files during project cleanup"); cleanupLogger.logError(ex.getMessage()); } // Remove quotas try { removeQuotas(project); cleanupLogger.logSuccess("Removed quotas"); } catch (Exception ex) { cleanupLogger.logError("Error when removing quota during project cleanup"); cleanupLogger.logError(ex.getMessage()); } // Change owner for files in shared datasets try { fixSharedDatasets(project, dfso); cleanupLogger.logSuccess("Fixed shared datasets"); } catch (Exception ex) { cleanupLogger.logError("Error when changing ownership during project cleanup"); cleanupLogger.logError(ex.getMessage()); } // 16) Delete Hive database - will automatically cleanup all the Hive's metadata try { hiveController.dropDatabase(project, dfso, true); cleanupLogger.logSuccess("Removed Hive db"); } catch (Exception ex) { cleanupLogger.logError("Error when removing hive db during project cleanup"); cleanupLogger.logError(ex.getMessage()); } // Delete elasticsearch template for this project try { removeElasticsearch(project); cleanupLogger.logSuccess("Removed ElasticSearch"); } catch (Exception ex) { cleanupLogger.logError("Error when removing elastic during project cleanup"); cleanupLogger.logError(ex.getMessage()); } // delete project group and users try { removeGroupAndUsers(groupsToClean, usersToClean); cleanupLogger.logSuccess("Removed HDFS Groups and Users"); } catch (Exception ex) { cleanupLogger.logError("Error when removing HDFS groups/users during project cleanup"); cleanupLogger.logError(ex.getMessage()); } // remove anaconda repos try { removeJupyter(project); cleanupLogger.logSuccess("Removed Jupyter"); } catch (Exception ex) { cleanupLogger.logError("Error when removing Anaconda during project cleanup"); cleanupLogger.logError(ex.getMessage()); } // remove running tensorboards repos try { removeTensorBoard(project); cleanupLogger.logSuccess("Removed local TensorBoards"); } catch (Exception ex) { cleanupLogger.logError("Error when removing running TensorBoards during project cleanup"); } try { tfServingController.deleteTfServings(project); cleanupLogger.logSuccess("Removed Tf Servings"); } catch (Exception ex) { cleanupLogger.logError("Error when removing Tf Serving instances"); cleanupLogger.logError(ex.getMessage()); } // remove dumy Inode try { dfso.rm(dummy, true); cleanupLogger.logSuccess("Removed dummy Inode"); } catch (Exception ex) { cleanupLogger.logError("Error when removing dummy Inode during project cleanup"); cleanupLogger.logError(ex.getMessage()); } // remove folder try { removeProjectFolder(project.getName(), dfso); cleanupLogger.logSuccess("Removed root Project folder"); } catch (Exception ex) { cleanupLogger.logError("Error when removing root Project dir during project cleanup"); cleanupLogger.logError(ex.getMessage()); } // Run custom handler for project deletion for (ProjectHandler projectHandler : projectHandlers) { try { projectHandler.postDelete(project); cleanupLogger.logSuccess("Handler " + projectHandler.getClassName() + " successfully run"); } catch (Exception e) { cleanupLogger.logError("Error running handler: " + projectHandler.getClassName() + " during project cleanup"); cleanupLogger.logError(e.getMessage()); } } } else { // Create /tmp/Project and add to database so we lock in case someone tries to create a Project // with the same name at the same time cleanupLogger.logSuccess("Project is *NOT* in the database, going to remove as much as possible"); Date now = Date.from(LocalDateTime.now().atZone(ZoneId.systemDefault()).toInstant()); Users user = userFacade.findByEmail(userEmail); Project toDeleteProject = new Project(projectName, user, now, PaymentType.PREPAID); toDeleteProject.setKafkaMaxNumTopics(settings.getKafkaMaxNumTopics()); Path tmpInodePath = new Path(File.separator + "tmp" + File.separator + projectName); try { if (!dfso.exists(tmpInodePath.toString())) { dfso.touchz(tmpInodePath); } Inode tmpInode = inodes.getInodeAtPath(tmpInodePath.toString()); if (tmpInode != null) { toDeleteProject.setInode(tmpInode); projectFacade.persistProject(toDeleteProject); projectFacade.flushEm(); cleanupLogger.logSuccess("Created dummy Inode"); } } catch (IOException ex) { cleanupLogger.logError("Could not create dummy Inode, moving on unsafe"); } // Kill jobs List<HdfsUsers> projectHdfsUsers = hdfsUsersController.getAllProjectHdfsUsers(projectName); try { Set<String> hdfsUsersStr = new HashSet<>(); for (HdfsUsers hdfsUser : projectHdfsUsers) { hdfsUsersStr.add(hdfsUser.getName()); } hdfsUsersStr.add(projectName + "__" + Settings.PROJECT_GENERIC_USER_SUFFIX); List<ApplicationReport> projectApps = getYarnApplications(hdfsUsersStr, yarnClientWrapper.getYarnClient()); waitForJobLogs(projectApps, yarnClientWrapper.getYarnClient()); cleanupLogger.logSuccess("Killed all Yarn Applications"); } catch (Exception ex) { cleanupLogger.logError(ex.getMessage()); } // Cleanup Jupyter project try { jupyterProcessFacade.stopProject(toDeleteProject); cleanupLogger.logSuccess("Cleaned Jupyter environment"); } catch (Exception ex) { cleanupLogger.logError(ex.getMessage()); } // Remove project related files try { removeProjectRelatedFiles(projectHdfsUsers, dfso); cleanupLogger.logSuccess("Removed project related files from HDFS"); } catch (IOException ex) { cleanupLogger.logError(ex.getMessage()); } // Remove Hive database try { hiveController.dropDatabase(toDeleteProject, dfso, true); cleanupLogger.logSuccess("Dropped Hive database"); } catch (IOException ex) { cleanupLogger.logError(ex.getMessage()); } // Remove ElasticSearch index try { removeElasticsearch(project); cleanupLogger.logSuccess("Removed ElasticSearch"); } catch (Exception ex) { cleanupLogger.logError(ex.getMessage()); } // Remove HDFS Groups and Users try { List<HdfsGroups> projectHdfsGroups = hdfsUsersController.getAllProjectHdfsGroups(projectName); removeGroupAndUsers(projectHdfsGroups, projectHdfsUsers); cleanupLogger.logSuccess("Removed HDFS Groups and Users"); } catch (IOException ex) { cleanupLogger.logError(ex.getMessage()); } // Remove Yarn project quota try { removeQuotas(toDeleteProject); cleanupLogger.logSuccess("Removed project quota"); } catch (Exception ex) { cleanupLogger.logError(ex.getMessage()); } // Remove Certificates try { opensslOperations.deleteProjectCertificate(projectName); userCertsFacade.removeAllCertsOfAProject(projectName); cleanupLogger.logSuccess("Deleted certificates"); } catch (IOException ex) { cleanupLogger.logError(ex.getMessage()); } // Remove root project directory try { removeProjectFolder(projectName, dfso); cleanupLogger.logSuccess("Removed root project directory"); } catch (IOException ex) { cleanupLogger.logError(ex.getMessage()); } // Remove /tmp/project try { dfso.rm(new Path(File.separator + "tmp" + File.separator + projectName), true); cleanupLogger.logSuccess("Removed /tmp"); } catch (IOException ex) { cleanupLogger.logError(ex.getMessage()); } } } finally { dfs.closeDfsClient(dfso); ycs.closeYarnClient(yarnClientWrapper); LOGGER.log(Level.INFO, cleanupLogger.getSuccessLog().toString()); LOGGER.log(Level.SEVERE, cleanupLogger.getErrorLog().toString()); sendInbox(cleanupLogger.getSuccessLog().append("\n").append(cleanupLogger.getErrorLog()).append("\n") .toString(), userEmail); } String[] logs = new String[2]; logs[0] = cleanupLogger.getSuccessLog().toString(); logs[1] = cleanupLogger.getErrorLog().toString(); return logs; }
From source file:org.qcert.camp.translator.SemRule2CAMP.java
/** * Incomplete but evolving translation for object allocations involving temporal constructs * @param ast the object allocation (SemNewObject) * @return the translation/*from w w w. j av a 2s . c om*/ */ private CampPattern translateTemporalAllocation(SemNewObject ast) { List<SemValue> args = ast.getArguments(); if (DATE_TYPE.equals(ast.getType().getDisplayName())) { Iterator<SemValue> argIter = args.iterator(); ZonedDateTime translation = ZonedDateTime.ofInstant(Instant.EPOCH, ZoneId.systemDefault()) .withYear(intFrom(argIter.next())).withMonth(intFrom(argIter.next())) .withDayOfMonth(intFrom(argIter.next())); if (argIter.hasNext()) translation = translation.withHour(intFrom(argIter.next())); if (argIter.hasNext()) translation = translation.withMinute(intFrom(argIter.next())); if (argIter.hasNext()) translation = translation.withSecond(intFrom(argIter.next())); ConstPattern constant = new ConstPattern(translation.toString()); return new UnaryPattern(UnaryOperator.ATimeFromString, constant); } if (TIME_TYPE.equals(ast.getType().getDisplayName()) && args.size() == 1) { long epochMilli = longFrom(args.get(0)); LocalTime translation = ZonedDateTime.ofInstant(Instant.ofEpochMilli(epochMilli), ZoneOffset.UTC) .toLocalTime(); // TODO this should really be a unique CAMP type corresponding to the TTRL type for LocalTimeComponentValue return new ConstPattern(translation.toString()); } return notImplemented("Translation of temporal allocation: " + ast); }
From source file:org.sleuthkit.autopsy.experimental.autoingest.FileExporterSettingsPanel.java
/** * Get the artifact condition from the user's input * * @return the ArtifactCondition, or null if there isn't one. *//* w w w . j a v a 2s . c om*/ ArtifactCondition getArtifactConditionFromInput(Rule rule) throws IllegalArgumentException { ArtifactCondition artifactCondition = null; if (cbAttributeType.isSelected()) { String selectedAttribute = comboBoxAttributeName.getSelectedItem().toString(); BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE typeFromComboBox = BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE .fromLabel(comboBoxValueType.getSelectedItem().toString()); BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE intrinsicType = attributeTypeMap .get(comboBoxAttributeName.getSelectedItem().toString()); // if we don't have a type in the map, but they have set the combobox, put it in the map if (intrinsicType == null && typeFromComboBox != null) { intrinsicType = typeFromComboBox; attributeTypeMap.put(selectedAttribute, typeFromComboBox); } if (intrinsicType == BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.DATETIME) { LocalDateTime localDateTime = dateTimePicker.getDateTime(); if (localDateTime == null) { throw new IllegalArgumentException("Bad date/time combination"); } Instant instant = localDateTime.atZone(ZoneId.systemDefault()).toInstant(); String stringValue = Long.toString(Date.from(instant).getTime()); artifactCondition = new Rule.ArtifactCondition(comboBoxArtifactName.getSelectedItem().toString(), comboBoxAttributeName.getSelectedItem().toString(), stringValue, intrinsicType, RelationalOp.fromSymbol(comboBoxAttributeComparison.getSelectedItem().toString())); } else if (intrinsicType == BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.BYTE) { try { String stringValue = tbAttributeValue.getText(); byte[] hexValue = Hex.decodeHex(stringValue.toCharArray()); String finalValue = new String(Hex.encodeHex(hexValue)); artifactCondition = new Rule.ArtifactCondition( comboBoxArtifactName.getSelectedItem().toString(), comboBoxAttributeName.getSelectedItem().toString(), finalValue, intrinsicType, RelationalOp.fromSymbol(comboBoxAttributeComparison.getSelectedItem().toString())); } catch (DecoderException ex) { throw new IllegalArgumentException(ex); } } else if (intrinsicType != null) { artifactCondition = new Rule.ArtifactCondition(comboBoxArtifactName.getSelectedItem().toString(), comboBoxAttributeName.getSelectedItem().toString(), tbAttributeValue.getText(), intrinsicType, RelationalOp.fromSymbol(comboBoxAttributeComparison.getSelectedItem().toString())); } else { throw new IllegalArgumentException(); } } return artifactCondition; }
From source file:edu.usu.sdl.openstorefront.service.ComponentServiceImpl.java
@Override public void processComponentIntegration(String componentId, String integrationConfigId) { ComponentIntegration integrationExample = new ComponentIntegration(); integrationExample.setActiveStatus(ComponentIntegration.ACTIVE_STATUS); integrationExample.setComponentId(componentId); ComponentIntegration integration = persistenceService.queryOneByExample(ComponentIntegration.class, integrationExample);/* w w w.j a v a2s . c o m*/ if (integration != null) { boolean run = true; if (RunStatus.WORKING.equals(integration.getStatus())) { //check for override String overrideTime = PropertiesManager.getValue(PropertiesManager.KEY_JOB_WORKING_STATE_OVERRIDE, "30"); if (integration.getLastStartTime() != null) { LocalDateTime maxLocalDateTime = LocalDateTime .ofInstant(integration.getLastStartTime().toInstant(), ZoneId.systemDefault()); maxLocalDateTime.plusMinutes(Convert.toLong(overrideTime)); if (maxLocalDateTime.compareTo(LocalDateTime.now()) <= 0) { log.log(Level.FINE, "Overriding the working state...assume it was stuck."); run = true; } else { run = false; } } else { throw new OpenStorefrontRuntimeException("Missing Last Start time. Data is corrupt.", "Delete the job (Integration) and recreate it.", ErrorTypeCode.INTEGRATION); } } if (run) { Component component = persistenceService.findById(Component.class, integration.getComponentId()); ComponentIntegration liveIntegration = persistenceService.findById(ComponentIntegration.class, integration.getComponentId()); log.log(Level.FINE, MessageFormat.format("Processing Integration for: {0}", component.getName())); liveIntegration.setStatus(RunStatus.WORKING); liveIntegration.setLastStartTime(TimeUtil.currentDate()); liveIntegration.setUpdateDts(TimeUtil.currentDate()); liveIntegration.setUpdateUser(OpenStorefrontConstant.SYSTEM_USER); persistenceService.persist(liveIntegration); ComponentIntegrationConfig integrationConfigExample = new ComponentIntegrationConfig(); integrationConfigExample.setActiveStatus(ComponentIntegrationConfig.ACTIVE_STATUS); integrationConfigExample.setComponentId(componentId); integrationConfigExample.setIntegrationConfigId(integrationConfigId); List<ComponentIntegrationConfig> integrationConfigs = persistenceService .queryByExample(ComponentIntegrationConfig.class, integrationConfigExample); boolean errorConfig = false; if (integrationConfigs.isEmpty() == false) { for (ComponentIntegrationConfig integrationConfig : integrationConfigs) { ComponentIntegrationConfig liveConfig = persistenceService.findById( ComponentIntegrationConfig.class, integrationConfig.getIntegrationConfigId()); try { log.log(Level.FINE, MessageFormat.format("Working on {1} Configuration for Integration for: {0}", component.getName(), integrationConfig.getIntegrationType())); liveConfig.setStatus(RunStatus.WORKING); liveConfig.setLastStartTime(TimeUtil.currentDate()); liveConfig.setUpdateDts(TimeUtil.currentDate()); liveConfig.setUpdateUser(OpenStorefrontConstant.SYSTEM_USER); persistenceService.persist(liveConfig); BaseIntegrationHandler baseIntegrationHandler = BaseIntegrationHandler .getIntegrationHandler(integrationConfig); if (baseIntegrationHandler != null) { baseIntegrationHandler.processConfig(); } else { throw new OpenStorefrontRuntimeException( "Intergration handler not supported for " + integrationConfig.getIntegrationType(), "Add handler", ErrorTypeCode.INTEGRATION); } liveConfig.setStatus(RunStatus.COMPLETE); liveConfig.setLastEndTime(TimeUtil.currentDate()); liveConfig.setUpdateDts(TimeUtil.currentDate()); liveConfig.setUpdateUser(OpenStorefrontConstant.SYSTEM_USER); persistenceService.persist(liveConfig); log.log(Level.FINE, MessageFormat.format("Completed {1} Configuration for Integration for: {0}", component.getName(), integrationConfig.getIntegrationType())); } catch (Exception e) { errorConfig = true; //This is a critical loop ErrorInfo errorInfo = new ErrorInfo(e, null); SystemErrorModel errorModel = getSystemService().generateErrorTicket(errorInfo); //put in fail state liveConfig.setStatus(RunStatus.ERROR); liveConfig.setErrorMessage(errorModel.getMessage()); liveConfig.setErrorTicketNumber(errorModel.getErrorTicketNumber()); liveConfig.setLastEndTime(TimeUtil.currentDate()); liveConfig.setUpdateDts(TimeUtil.currentDate()); liveConfig.setUpdateUser(OpenStorefrontConstant.SYSTEM_USER); persistenceService.persist(liveConfig); log.log(Level.FINE, MessageFormat.format("Failed on {1} Configuration for Integration for: {0}", component.getName(), integrationConfig.getIntegrationType()), e); } } } else { log.log(Level.WARNING, MessageFormat.format( "No Active Integration configs for: {0} (Integration is doing nothing)", component.getName())); } if (errorConfig) { liveIntegration.setStatus(RunStatus.ERROR); } else { liveIntegration.setStatus(RunStatus.COMPLETE); } liveIntegration.setLastEndTime(TimeUtil.currentDate()); liveIntegration.setUpdateDts(TimeUtil.currentDate()); liveIntegration.setUpdateUser(OpenStorefrontConstant.SYSTEM_USER); persistenceService.persist(liveIntegration); log.log(Level.FINE, MessageFormat.format("Completed Integration for: {0}", component.getName())); } else { log.log(Level.FINE, MessageFormat.format( "Not time to run integration or the system is currently working on the integration. Component Id: {0}", componentId)); } } else { log.log(Level.WARNING, MessageFormat .format("There is no active integration for this component. Id: {0}", componentId)); } }