List of usage examples for java.util Date from
public static Date from(Instant instant)
From source file:org.codice.ddf.rest.impl.CatalogServiceImpl.java
private void parseAttribute(Map<String, AttributeImpl> attributeMap, String parsedName, InputStream inputStream, AttributeType.AttributeFormat attributeFormat) { try (InputStream is = inputStream; InputStream boundedStream = new BoundedInputStream(is, MAX_INPUT_SIZE + 1L)) { if (attributeFormat == OBJECT) { LOGGER.debug("Object type not supported for override"); return; }/*w w w. j av a2 s . c o m*/ byte[] bytes = IOUtils.toByteArray(boundedStream); if (bytes.length > MAX_INPUT_SIZE) { LOGGER.debug("Attribute length is limited to {} bytes", MAX_INPUT_SIZE); return; } AttributeImpl attribute; if (attributeMap.containsKey(parsedName)) { attribute = attributeMap.get(parsedName); } else { attribute = new AttributeImpl(parsedName, Collections.emptyList()); attributeMap.put(parsedName, attribute); } if (attributeFormat == BINARY) { attribute.addValue(bytes); return; } String value = new String(bytes, Charset.defaultCharset()); switch (attributeFormat) { case XML: case GEOMETRY: case STRING: attribute.addValue(value); break; case BOOLEAN: attribute.addValue(Boolean.valueOf(value)); break; case SHORT: attribute.addValue(Short.valueOf(value)); break; case LONG: attribute.addValue(Long.valueOf(value)); break; case INTEGER: attribute.addValue(Integer.valueOf(value)); break; case FLOAT: attribute.addValue(Float.valueOf(value)); break; case DOUBLE: attribute.addValue(Double.valueOf(value)); break; case DATE: try { Instant instant = Instant.parse(value); attribute.addValue(Date.from(instant)); } catch (DateTimeParseException e) { LOGGER.debug("Unable to parse instant '{}'", attribute, e); } break; default: LOGGER.debug("Attribute format '{}' not supported", attributeFormat); break; } } catch (IOException e) { LOGGER.debug("Unable to read attribute to override", e); } }
From source file:alfio.manager.EventManager.java
void handleTicketNumberModification(Event event, TicketCategory original, TicketCategory updated, int addedTickets, boolean resetToFree) { if (addedTickets == 0) { log.debug("ticket handling not required since the number of ticket wasn't modified"); return;/*from ww w . j av a 2 s .c o m*/ } log.debug("modification detected in ticket number. The difference is: {}", addedTickets); if (addedTickets > 0) { //the updated category contains more tickets than the older one List<Integer> lockedTickets = ticketRepository.selectNotAllocatedTicketsForUpdate(event.getId(), addedTickets, asList(TicketStatus.FREE.name(), TicketStatus.RELEASED.name())); Validate.isTrue(addedTickets == lockedTickets.size(), "Cannot add %d tickets. There are only %d free tickets.", addedTickets, lockedTickets.size()); jdbc.batchUpdate(ticketRepository.bulkTicketUpdate(), lockedTickets.stream() .map(id -> new MapSqlParameterSource("id", id).addValue("categoryId", updated.getId()) .addValue("srcPriceCts", updated.getSrcPriceCts())) .toArray(MapSqlParameterSource[]::new)); if (updated.isAccessRestricted()) { //since the updated category is not public, the tickets shouldn't be distributed to waiting people. ticketRepository.revertToFree(event.getId(), updated.getId(), lockedTickets); } else if (!resetToFree) { ticketRepository.resetTickets(lockedTickets); } } else { int absDifference = Math.abs(addedTickets); final List<Integer> ids = ticketRepository.lockTicketsToInvalidate(event.getId(), updated.getId(), absDifference); int actualDifference = ids.size(); if (actualDifference < absDifference) { throw new IllegalStateException("Cannot invalidate " + absDifference + " tickets. There are only " + actualDifference + " free tickets"); } ticketRepository.invalidateTickets(ids); final MapSqlParameterSource[] params = generateEmptyTickets(event, Date.from(ZonedDateTime.now(event.getZoneId()).toInstant()), absDifference, TicketStatus.RELEASED).toArray(MapSqlParameterSource[]::new); jdbc.batchUpdate(ticketRepository.bulkTicketInitialization(), params); } }
From source file:org.codice.ddf.catalog.ui.metacard.MetacardApplication.java
protected UpdateResponse patchMetacards(List<MetacardChanges> metacardChanges, String subjectIdentifer) throws SourceUnavailableException, IngestException { Set<String> changedIds = metacardChanges.stream().flatMap(mc -> mc.getIds().stream()) .collect(Collectors.toSet()); Map<String, Result> results = util.getMetacardsWithTagById(changedIds, "*"); for (MetacardChanges changeset : metacardChanges) { for (AttributeChange attributeChange : changeset.getAttributes()) { for (String id : changeset.getIds()) { List<String> values = attributeChange.getValues(); Result result = results.get(id); if (result == null) { LOGGER.debug(//from w w w . java 2 s . c o m "Metacard {} either does not exist or user {} does not have permission to see it", id, subjectIdentifer); throw new NotFoundException("Result was not found"); } Metacard resultMetacard = result.getMetacard(); Function<Serializable, Serializable> mapFunc = Function.identity(); if (isChangeTypeDate(attributeChange, resultMetacard)) { mapFunc = mapFunc.andThen(serializable -> Date.from(util.parseDate(serializable))); } resultMetacard.setAttribute(new AttributeImpl(attributeChange.getAttribute(), values.stream().filter(Objects::nonNull).map(mapFunc).collect(Collectors.toList()))); } } } List<Metacard> changedMetacards = results.values().stream().map(Result::getMetacard) .collect(Collectors.toList()); return catalogFramework.update(new UpdateRequestImpl( changedMetacards.stream().map(Metacard::getId).toArray(String[]::new), changedMetacards)); }
From source file:com.ccserver.digital.service.LOSService.java
private XMLGregorianCalendar dateToXMLGregorianCalendar(LocalDateTime localDateTime) { if (localDateTime == null) { return null; }//w ww . j av a2 s . co m Date date = Date.from(localDateTime.atZone(ZoneId.systemDefault()).toInstant()); return dateToXMLGregorianCalendar(date); }
From source file:net.nikr.eve.jeveasset.gui.tabs.tracker.TrackerTab.java
private Date getFromDate() { LocalDate date = jFrom.getDate(); if (date == null) { return null; }//ww w . j av a 2s . c o m Instant instant = date.atStartOfDay().atZone(ZoneId.of("GMT")).toInstant(); //Start of day - GMT return Date.from(instant); }
From source file:net.nikr.eve.jeveasset.gui.tabs.tracker.TrackerTab.java
private Date getToDate() { LocalDate date = jTo.getDate(); if (date == null) { return null; }/*from w w w . j a v a 2s . com*/ Instant instant = date.atTime(23, 59, 59).atZone(ZoneId.of("GMT")).toInstant(); //End of day - GMT return Date.from(instant); }
From source file:org.sakaiproject.contentreview.turnitin.oc.ContentReviewServiceTurnitinOC.java
public void processUnsubmitted() { // Submission process phase 1 // 1. Establish submission object, get ID // 2. Upload original file to submission // 3. Start originality report int errors = 0; int success = 0; Optional<ContentReviewItem> nextItem = null; while ((nextItem = crqs.getNextItemInQueueToSubmit(getProviderId())).isPresent()) { try {//from ww w. ja va 2 s.c o m ContentReviewItem item = nextItem.get(); if (!incrementItem(item)) { errors++; continue; } // Handle items that only generate reports on due date // Get assignment associated with current item's task Id Assignment assignment = assignmentService .getAssignment(entityManager.newReference(item.getTaskId())); String reportGenSpeed = null; if (assignment != null) { Date assignmentDueDate = Date.from(assignment.getDueDate()); reportGenSpeed = assignment.getProperties().get("report_gen_speed"); // If report gen speed is set to due date, and it's before the due date right now, do not process item if (assignmentDueDate != null && GENERATE_REPORTS_ON_DUE_DATE.equals(reportGenSpeed) && assignmentDueDate.after(new Date())) { log.info("Report generate speed is 2, skipping for now. ItemID: " + item.getId()); // We don't items with gen speed 2 items to exceed retry count maximum // Reset retry count to zero item.setRetryCount(Long.valueOf(0)); item.setNextRetryTime(getDueDateRetryTime(assignmentDueDate)); crqs.update(item); continue; } } // EXTERNAL ID DOES NOT EXIST, CREATE SUBMISSION AND UPLOAD CONTENTS TO TCA // (STAGE 1) if (StringUtils.isEmpty(item.getExternalId())) { //Paper is ready to be submitted ContentResource resource = null; try { // Get resource with current item's content Id resource = contentHostingService.getResource(item.getContentId()); } catch (IdUnusedException e4) { log.error("IdUnusedException: no resource with id " + item.getContentId(), e4); item.setLastError("IdUnusedException: no resource with id " + item.getContentId()); item.setStatus(ContentReviewConstants.CONTENT_REVIEW_SUBMISSION_ERROR_NO_RETRY_CODE); crqs.update(item); errors++; continue; } catch (PermissionException e) { log.error("PermissionException: no resource with id " + item.getContentId(), e); item.setLastError("PermissionException: no resource with id " + item.getContentId()); item.setStatus(ContentReviewConstants.CONTENT_REVIEW_SUBMISSION_ERROR_NO_RETRY_CODE); crqs.update(item); errors++; continue; } catch (TypeException e) { log.error("TypeException: no resource with id " + item.getContentId(), e); item.setLastError("TypeException: no resource with id " + item.getContentId()); item.setStatus(ContentReviewConstants.CONTENT_REVIEW_SUBMISSION_ERROR_NO_RETRY_CODE); crqs.update(item); errors++; continue; } // Get filename of submission String fileName = resource.getProperties().getProperty(ResourceProperties.PROP_DISPLAY_NAME); // If fileName is empty set default if (StringUtils.isEmpty(fileName)) { fileName = "submission_" + item.getUserId() + "_" + item.getSiteId(); log.info("Using Default Filename " + fileName); } // Add .html for inline submissions if ("true".equals( resource.getProperties().getProperty(AssignmentConstants.PROP_INLINE_SUBMISSION)) && FilenameUtils.getExtension(fileName).isEmpty()) { fileName += HTML_EXTENSION; } boolean updateLastError = true; try { log.info("Submission starting..."); // Retrieve submissionId from TCA and set to externalId //get site title Site site = null; try { site = siteService.getSite(item.getSiteId()); } catch (Exception e) { //no worries, just log it log.error("Site not found for item: " + item.getId() + ", site: " + item.getSiteId(), e); } String externalId = getSubmissionId(item, fileName, site, assignment); if (StringUtils.isEmpty(externalId)) { // getSubmissionId sets the item's lastError accurately in accordance with the Turnitin response updateLastError = false; throw new Exception("Failed to obtain a submission ID from Turnitin"); } else { // Add filename to content upload headers CONTENT_UPLOAD_HEADERS.put(HEADER_DISP, "inline; filename=\"" + fileName + "\""); // Upload submission contents of to TCA uploadExternalContent(externalId, resource.getContent()); // Set item externalId to externalId item.setExternalId(externalId); // Reset retry count item.setRetryCount(new Long(0)); Calendar cal = Calendar.getInstance(); // Reset cal to current time cal.setTime(new Date()); // Reset delay time cal.add(Calendar.MINUTE, getDelayTime(item.getRetryCount())); // Schedule next retry time item.setNextRetryTime(cal.getTime()); item.setDateSubmitted(new Date()); crqs.update(item); success++; } } catch (Exception e) { log.error(e.getMessage(), e); if (updateLastError) { item.setLastError(e.getMessage()); } item.setStatus(ContentReviewConstants.CONTENT_REVIEW_SUBMISSION_ERROR_RETRY_CODE); crqs.update(item); errors++; } } else { // EXTERNAL ID EXISTS, START SIMILARITY REPORT GENERATION PROCESS (STAGE 2) try { // Get submission status, returns the state of the submission as string JSONObject submissionJSON = getSubmissionJSON(item.getExternalId()); if (!submissionJSON.containsKey("status")) { throw new TransientSubmissionException( "Response from Turnitin is missing expected data"); } String submissionStatus = submissionJSON.getString("status"); if (COMPLETE_STATUS.equals(submissionStatus)) { success++; } else if (CREATED_STATUS.equals(submissionStatus) || PROCESSING_STATUS.equals(submissionStatus)) { // do nothing item is still being processes } else { // returned with an error status errors++; } handleSubmissionStatus(submissionJSON, item, assignment); } catch (Exception e) { log.error(e.getMessage(), e); item.setLastError(e.getMessage()); item.setStatus(ContentReviewConstants.CONTENT_REVIEW_SUBMISSION_ERROR_RETRY_CODE); crqs.update(item); errors++; } } } catch (Exception e) { log.error(e.getMessage(), e); } } log.info("Turnitin submission queue completed: " + success + " items submitted, " + errors + " errors."); }
From source file:Presentacion.FUsuarios.java
private void btnRegistrarClienteMouseClicked(java.awt.event.MouseEvent evt) {//GEN-FIRST:event_btnRegistrarClienteMouseClicked if (fc == null || !fc.getSelectedFile().isFile()) { JOptionPane.showMessageDialog(this, "Debe seleccionar una imagen", "Alerta", JOptionPane.WARNING_MESSAGE); } else {//from w w w . j av a2 s . c o m if (txtApellido.getText().isEmpty() || txtDireccion.getText().isEmpty() || txtEmail.getText().isEmpty() || txtNickName.getText().isEmpty() || txtNombre.getText().isEmpty()) { JOptionPane.showMessageDialog(this, "Debe ingresar todos los datos", "Alerta", JOptionPane.WARNING_MESSAGE); } else { webservice.Cliente user = new Cliente(); user.setDireccion(txtDireccion.getText()); user.setEmail(txtEmail.getText()); user.setNickname(txtNickName.getText()); user.setNombre(txtNombre.getText()); user.setApellido(txtApellido.getText()); //PORCEDIMIENTO PARA ENCRIPTAR LA CLAVE INGRESADA CUANDO INICIA SESIN UN USUARIO. String pass = jPasswordField2.getText(); String encriptMD5 = DigestUtils.md5Hex(pass); pass = "md5:" + encriptMD5; user.setPassword(pass); try { int year = Integer.getInteger(drpYear.getToolTipText()); int day = Integer.getInteger(drpDay.getModel().getValue().toString()); int month = Integer.getInteger(drpMes.getValue().toString()); GregorianCalendar c = new GregorianCalendar(); c.setTime(new Date(year, month, day)); XMLGregorianCalendar date2 = DatatypeFactory.newInstance().newXMLGregorianCalendar(c); user.setFechaNac(date2); } catch (Exception ex) { GregorianCalendar c = new GregorianCalendar(); c.setTime(Date.from(Instant.EPOCH)); try { XMLGregorianCalendar date2 = DatatypeFactory.newInstance().newXMLGregorianCalendar(c); user.setFechaNac(date2); } catch (Exception ex2) { System.out.println("Error en fecha"); } } user.setImagen(txtNickName.getText() + ".jpg"); QuickOrderWebService webService = new QuickOrderWebService(); ControllerInterface port = webService.getQuickOrderWebServicePort(); String result = port.registrarCliente(user); if (result.isEmpty()) { JOptionPane.showMessageDialog(this, "Usuario ingresado correctamente"); txtApellido.setText(""); txtDireccion.setText(""); txtEmail.setText(""); txtNickName.setText(""); txtNombre.setText(""); jPasswordField2.setText(""); } else { if (result.equals("emailError1")) { JOptionPane.showMessageDialog(this, "El email ya se encuentra registrado, por favor ingrese otro.", "Alerta", JOptionPane.WARNING_MESSAGE); } if (result.equals("nicknameError1")) { JOptionPane.showMessageDialog(this, "El nickname no se encuentra disponible, por favor ingrese otro.", "Alerta", JOptionPane.WARNING_MESSAGE); } else { JOptionPane.showMessageDialog(this, "Error al ingresar el cliente", "Error", JOptionPane.ERROR_MESSAGE); } } } } }
From source file:io.hops.hopsworks.common.project.ProjectController.java
public String[] forceCleanup(String projectName, String userEmail, String sessionId) { CleanupLogger cleanupLogger = new CleanupLogger(projectName); DistributedFileSystemOps dfso = null; YarnClientWrapper yarnClientWrapper = null; try {/*w w w. j a v a 2 s .co m*/ dfso = dfs.getDfsOps(); yarnClientWrapper = ycs.getYarnClientSuper(settings.getConfiguration()); Project project = projectFacade.findByName(projectName); if (project != null) { cleanupLogger.logSuccess("Project not found in the database"); // Run custom handler for project deletion for (ProjectHandler projectHandler : projectHandlers) { try { projectHandler.preDelete(project); cleanupLogger.logSuccess("Handler " + projectHandler.getClassName() + " successfully run"); } catch (Exception e) { cleanupLogger.logError("Error running handler: " + projectHandler.getClassName() + " during project cleanup"); cleanupLogger.logError(e.getMessage()); } } // Remove from Project team try { updateProjectTeamRole(project, ProjectRoleTypes.UNDER_REMOVAL); cleanupLogger.logSuccess("Updated team role"); } catch (Exception ex) { cleanupLogger.logError(ex.getMessage()); } // Get Yarn applications List<ApplicationReport> projectApps = null; try { Collection<ProjectTeam> team = project.getProjectTeamCollection(); Set<String> hdfsUsers = new HashSet<>(); for (ProjectTeam pt : team) { String hdfsUsername = hdfsUsersController.getHdfsUserName(project, pt.getUser()); hdfsUsers.add(hdfsUsername); } hdfsUsers.add(project.getProjectGenericUser()); projectApps = getYarnApplications(hdfsUsers, yarnClientWrapper.getYarnClient()); cleanupLogger.logSuccess("Gotten Yarn applications"); } catch (Exception ex) { cleanupLogger.logError("Error when reading YARN apps during project cleanup"); cleanupLogger.logError(ex.getMessage()); } // Kill Zeppelin jobs try { killZeppelin(project.getId(), sessionId); cleanupLogger.logSuccess("Killed Zeppelin"); } catch (Exception ex) { LOGGER.log(Level.SEVERE, "Error when killing Zeppelin during project cleanup", ex); cleanupLogger.logError(ex.getMessage()); } // Stop Jupyter try { jupyterProcessFacade.stopProject(project); cleanupLogger.logSuccess("Stopped Jupyter"); } catch (Exception ex) { cleanupLogger.logError("Error when killing Jupyter during project cleanup"); cleanupLogger.logError(ex.getMessage()); } // Kill Yarn Jobs try { killYarnJobs(project); cleanupLogger.logSuccess("Killed Yarn jobs"); } catch (Exception ex) { cleanupLogger.logError("Error when killing YARN jobs during project cleanup"); cleanupLogger.logError(ex.getMessage()); } // Wait for Yarn logs try { waitForJobLogs(projectApps, yarnClientWrapper.getYarnClient()); cleanupLogger.logSuccess("Gotten logs for jobs"); } catch (Exception ex) { cleanupLogger.logError("Error when getting Yarn logs during project cleanup"); cleanupLogger.logError(ex.getMessage()); } // Log removal try { logProject(project, OperationType.Delete); cleanupLogger.logSuccess("Logged project removal"); } catch (Exception ex) { cleanupLogger.logError("Error when logging project removal during project cleanup"); cleanupLogger.logError(ex.getMessage()); } // Change ownership of root dir try { Path path = new Path(File.separator + Settings.DIR_ROOT + File.separator + project.getName()); changeOwnershipToSuperuser(path, dfso); cleanupLogger.logSuccess("Changed ownership of root Project dir"); } catch (Exception ex) { cleanupLogger .logError("Error when changing ownership of root Project dir during project cleanup"); cleanupLogger.logError(ex.getMessage()); } // Change ownership of tmp file Path dummy = new Path("/tmp/" + project.getName()); try { changeOwnershipToSuperuser(dummy, dfso); cleanupLogger.logSuccess("Changed ownership of dummy inode"); } catch (Exception ex) { cleanupLogger.logError("Error when changing ownership of dummy inode during project cleanup"); cleanupLogger.logError(ex.getMessage()); } // Remove Kafka try { removeKafkaTopics(project); cleanupLogger.logSuccess("Removed Kafka topics"); } catch (Exception ex) { cleanupLogger.logError("Error when removing kafka topics during project cleanup"); cleanupLogger.logError(ex.getMessage()); } // Remove certificates try { certificatesController.deleteProjectCertificates(project); cleanupLogger.logSuccess("Removed certificates"); } catch (CAException ex) { if (ex.getErrorCode() != RESTCodes.CAErrorCode.CERTNOTFOUND) { cleanupLogger.logError("Error when removing certificates during project cleanup"); } } catch (IOException ex) { cleanupLogger.logError("Error when removing certificates during project cleanup"); cleanupLogger.logError(ex.getMessage()); } List<HdfsUsers> usersToClean = getUsersToClean(project); List<HdfsGroups> groupsToClean = getGroupsToClean(project); // Remove project related files try { removeProjectRelatedFiles(usersToClean, dfso); cleanupLogger.logSuccess("Removed project related files"); } catch (Exception ex) { cleanupLogger.logError("Error when removing project-related files during project cleanup"); cleanupLogger.logError(ex.getMessage()); } // Remove quotas try { removeQuotas(project); cleanupLogger.logSuccess("Removed quotas"); } catch (Exception ex) { cleanupLogger.logError("Error when removing quota during project cleanup"); cleanupLogger.logError(ex.getMessage()); } // Change owner for files in shared datasets try { fixSharedDatasets(project, dfso); cleanupLogger.logSuccess("Fixed shared datasets"); } catch (Exception ex) { cleanupLogger.logError("Error when changing ownership during project cleanup"); cleanupLogger.logError(ex.getMessage()); } // 16) Delete Hive database - will automatically cleanup all the Hive's metadata try { hiveController.dropDatabase(project, dfso, true); cleanupLogger.logSuccess("Removed Hive db"); } catch (Exception ex) { cleanupLogger.logError("Error when removing hive db during project cleanup"); cleanupLogger.logError(ex.getMessage()); } // Delete elasticsearch template for this project try { removeElasticsearch(project); cleanupLogger.logSuccess("Removed ElasticSearch"); } catch (Exception ex) { cleanupLogger.logError("Error when removing elastic during project cleanup"); cleanupLogger.logError(ex.getMessage()); } // delete project group and users try { removeGroupAndUsers(groupsToClean, usersToClean); cleanupLogger.logSuccess("Removed HDFS Groups and Users"); } catch (Exception ex) { cleanupLogger.logError("Error when removing HDFS groups/users during project cleanup"); cleanupLogger.logError(ex.getMessage()); } // remove anaconda repos try { removeJupyter(project); cleanupLogger.logSuccess("Removed Jupyter"); } catch (Exception ex) { cleanupLogger.logError("Error when removing Anaconda during project cleanup"); cleanupLogger.logError(ex.getMessage()); } // remove running tensorboards repos try { removeTensorBoard(project); cleanupLogger.logSuccess("Removed local TensorBoards"); } catch (Exception ex) { cleanupLogger.logError("Error when removing running TensorBoards during project cleanup"); } try { tfServingController.deleteTfServings(project); cleanupLogger.logSuccess("Removed Tf Servings"); } catch (Exception ex) { cleanupLogger.logError("Error when removing Tf Serving instances"); cleanupLogger.logError(ex.getMessage()); } // remove dumy Inode try { dfso.rm(dummy, true); cleanupLogger.logSuccess("Removed dummy Inode"); } catch (Exception ex) { cleanupLogger.logError("Error when removing dummy Inode during project cleanup"); cleanupLogger.logError(ex.getMessage()); } // remove folder try { removeProjectFolder(project.getName(), dfso); cleanupLogger.logSuccess("Removed root Project folder"); } catch (Exception ex) { cleanupLogger.logError("Error when removing root Project dir during project cleanup"); cleanupLogger.logError(ex.getMessage()); } // Run custom handler for project deletion for (ProjectHandler projectHandler : projectHandlers) { try { projectHandler.postDelete(project); cleanupLogger.logSuccess("Handler " + projectHandler.getClassName() + " successfully run"); } catch (Exception e) { cleanupLogger.logError("Error running handler: " + projectHandler.getClassName() + " during project cleanup"); cleanupLogger.logError(e.getMessage()); } } } else { // Create /tmp/Project and add to database so we lock in case someone tries to create a Project // with the same name at the same time cleanupLogger.logSuccess("Project is *NOT* in the database, going to remove as much as possible"); Date now = Date.from(LocalDateTime.now().atZone(ZoneId.systemDefault()).toInstant()); Users user = userFacade.findByEmail(userEmail); Project toDeleteProject = new Project(projectName, user, now, PaymentType.PREPAID); toDeleteProject.setKafkaMaxNumTopics(settings.getKafkaMaxNumTopics()); Path tmpInodePath = new Path(File.separator + "tmp" + File.separator + projectName); try { if (!dfso.exists(tmpInodePath.toString())) { dfso.touchz(tmpInodePath); } Inode tmpInode = inodes.getInodeAtPath(tmpInodePath.toString()); if (tmpInode != null) { toDeleteProject.setInode(tmpInode); projectFacade.persistProject(toDeleteProject); projectFacade.flushEm(); cleanupLogger.logSuccess("Created dummy Inode"); } } catch (IOException ex) { cleanupLogger.logError("Could not create dummy Inode, moving on unsafe"); } // Kill jobs List<HdfsUsers> projectHdfsUsers = hdfsUsersController.getAllProjectHdfsUsers(projectName); try { Set<String> hdfsUsersStr = new HashSet<>(); for (HdfsUsers hdfsUser : projectHdfsUsers) { hdfsUsersStr.add(hdfsUser.getName()); } hdfsUsersStr.add(projectName + "__" + Settings.PROJECT_GENERIC_USER_SUFFIX); List<ApplicationReport> projectApps = getYarnApplications(hdfsUsersStr, yarnClientWrapper.getYarnClient()); waitForJobLogs(projectApps, yarnClientWrapper.getYarnClient()); cleanupLogger.logSuccess("Killed all Yarn Applications"); } catch (Exception ex) { cleanupLogger.logError(ex.getMessage()); } // Cleanup Jupyter project try { jupyterProcessFacade.stopProject(toDeleteProject); cleanupLogger.logSuccess("Cleaned Jupyter environment"); } catch (Exception ex) { cleanupLogger.logError(ex.getMessage()); } // Remove project related files try { removeProjectRelatedFiles(projectHdfsUsers, dfso); cleanupLogger.logSuccess("Removed project related files from HDFS"); } catch (IOException ex) { cleanupLogger.logError(ex.getMessage()); } // Remove Hive database try { hiveController.dropDatabase(toDeleteProject, dfso, true); cleanupLogger.logSuccess("Dropped Hive database"); } catch (IOException ex) { cleanupLogger.logError(ex.getMessage()); } // Remove ElasticSearch index try { removeElasticsearch(project); cleanupLogger.logSuccess("Removed ElasticSearch"); } catch (Exception ex) { cleanupLogger.logError(ex.getMessage()); } // Remove HDFS Groups and Users try { List<HdfsGroups> projectHdfsGroups = hdfsUsersController.getAllProjectHdfsGroups(projectName); removeGroupAndUsers(projectHdfsGroups, projectHdfsUsers); cleanupLogger.logSuccess("Removed HDFS Groups and Users"); } catch (IOException ex) { cleanupLogger.logError(ex.getMessage()); } // Remove Yarn project quota try { removeQuotas(toDeleteProject); cleanupLogger.logSuccess("Removed project quota"); } catch (Exception ex) { cleanupLogger.logError(ex.getMessage()); } // Remove Certificates try { opensslOperations.deleteProjectCertificate(projectName); userCertsFacade.removeAllCertsOfAProject(projectName); cleanupLogger.logSuccess("Deleted certificates"); } catch (IOException ex) { cleanupLogger.logError(ex.getMessage()); } // Remove root project directory try { removeProjectFolder(projectName, dfso); cleanupLogger.logSuccess("Removed root project directory"); } catch (IOException ex) { cleanupLogger.logError(ex.getMessage()); } // Remove /tmp/project try { dfso.rm(new Path(File.separator + "tmp" + File.separator + projectName), true); cleanupLogger.logSuccess("Removed /tmp"); } catch (IOException ex) { cleanupLogger.logError(ex.getMessage()); } } } finally { dfs.closeDfsClient(dfso); ycs.closeYarnClient(yarnClientWrapper); LOGGER.log(Level.INFO, cleanupLogger.getSuccessLog().toString()); LOGGER.log(Level.SEVERE, cleanupLogger.getErrorLog().toString()); sendInbox(cleanupLogger.getSuccessLog().append("\n").append(cleanupLogger.getErrorLog()).append("\n") .toString(), userEmail); } String[] logs = new String[2]; logs[0] = cleanupLogger.getSuccessLog().toString(); logs[1] = cleanupLogger.getErrorLog().toString(); return logs; }
From source file:org.apache.james.jmap.methods.integration.GetMessageListMethodTest.java
private Date convertToDate(LocalDate localDate) { return Date.from(localDate.atStartOfDay(ZONE_ID).toInstant()); }