List of usage examples for java.util Objects isNull
public static boolean isNull(Object obj)
From source file:de.hybris.platform.mpintgomsbackoffice.actions.sync.SyncToMarketplaceAction.java
@Override public boolean canPerform(final ActionContext<ConsignmentModel> actionContext) { final Object data = actionContext.getData(); ConsignmentModel consignment = null; if (data instanceof ConsignmentModel) { consignment = (ConsignmentModel) data; }/*from w w w . j av a 2s. c o m*/ // A consignment is not shippable when it is a pickup order or when there is no quantity pending if (Objects.isNull(consignment) || Objects.isNull(consignment.getConsignmentEntries()) || consignment.getConsignmentEntries().stream() .filter(entry -> Objects.nonNull(entry.getQuantityPending())) .mapToLong(ConsignmentEntryModel::getQuantityPending).sum() == 0) { return false; } if (!checkflag(consignment)) { return false; } return true; }
From source file:org.kitodo.production.security.password.SecurityPasswordEncoder.java
/** * Encrypt a given string./*www .j a va 2 s . com*/ * * @param messageToEncrypt * String to encrypt * @return encrypted string or null on error */ public String encrypt(String messageToEncrypt) { if (Objects.isNull(messageToEncrypt)) { messageToEncrypt = ""; } try { byte[] utfEight = messageToEncrypt.getBytes(StandardCharsets.UTF_8); byte[] enc = encryptionCipher.doFinal(utfEight); return new String(Base64.encodeBase64(enc), StandardCharsets.UTF_8); } catch (BadPaddingException e) { logger.warn("Catched BadPaddingException with message: " + e.getMessage()); } catch (IllegalBlockSizeException e) { logger.warn("Catched IllegalBlockSizeException with message: " + e.getMessage()); } return null; }
From source file:org.openecomp.sdc.translator.services.heattotosca.helper.VolumeTranslationHelper.java
private Optional<ResourceFileDataAndIDs> getResourceFileDataAndIDsForVolumeConnection(String resourceId, TranslateTo translateTo, List<FileData> fileDatas) { for (FileData data : fileDatas) { HeatOrchestrationTemplate heatOrchestrationTemplate = new YamlUtil().yamlToObject( translateTo.getContext().getFiles().getFileContent(data.getFile()), HeatOrchestrationTemplate.class); Map<String, Output> outputs = heatOrchestrationTemplate.getOutputs(); if (Objects.isNull(outputs)) { continue; }//from w w w. java 2s .c o m Output output = outputs.get(resourceId); if (Objects.nonNull(output)) { Optional<AttachedResourceId> attachedOutputId = HeatToToscaUtil.extractAttachedResourceId( data.getFile(), heatOrchestrationTemplate, translateTo.getContext(), output.getValue()); if (attachedOutputId.isPresent()) { AttachedResourceId attachedResourceId = attachedOutputId.get(); if (!isOutputIsGetResource(resourceId, data, attachedResourceId)) { continue; } String translatedId = (String) attachedResourceId.getTranslatedId(); if (isOutputOfTypeCinderVolume(translateTo, data, heatOrchestrationTemplate, translatedId)) { ResourceFileDataAndIDs fileDataAndIDs = new ResourceFileDataAndIDs( (String) attachedResourceId.getEntityId(), translatedId, data); return Optional.of(fileDataAndIDs); } else { logger.warn("output: '" + resourceId + "' in file '" + data.getFile() + "' is not of type '" + HeatResourcesTypes.CINDER_VOLUME_RESOURCE_TYPE.getHeatResource() + "'"); } } } else { logger.warn("output: '" + resourceId + "' in file '" + data.getFile() + "' is not found"); } } return Optional.empty(); }
From source file:org.openecomp.sdc.validation.impl.util.ResourceValidationHeatValidator.java
/** * Validate resource type.//w w w .j av a 2 s . c o m * * @param fileName the file name * @param baseFileName the base file name * @param securityGroupsNamesFromBaseFileOutputs the security groups names from base file outputs * @param heatOrchestrationTemplate the heat orchestration template * @param globalContext the global context */ public static void validateResourceType(String fileName, String baseFileName, Set<String> securityGroupsNamesFromBaseFileOutputs, HeatOrchestrationTemplate heatOrchestrationTemplate, GlobalValidationContext globalContext) { Map<String, Resource> resourceMap = heatOrchestrationTemplate.getResources() == null ? new HashMap<>() : heatOrchestrationTemplate.getResources(); Map<String, Integer> numberOfVisitsInPort = new HashMap<>(); Set<String> resourcesNames = resourceMap.keySet(); Set<String> sharedResourcesFromOutputMap = getSharedResourcesNamesFromOutputs(fileName, heatOrchestrationTemplate.getOutputs(), globalContext); boolean isBaseFile = baseFileName != null && fileName.equals(baseFileName); Map<HeatResourcesTypes, List<String>> resourceTypeToNamesListMap = HeatResourcesTypes .getListForResourceType(HeatResourcesTypes.NOVA_SERVER_GROUP_RESOURCE_TYPE, HeatResourcesTypes.NEUTRON_SECURITY_GROUP_RESOURCE_TYPE, HeatResourcesTypes.CONTRAIL_NETWORK_RULE_RESOURCE_TYPE); initResourceTypeListWithItsResourcesNames(fileName, resourceTypeToNamesListMap, resourceMap, sharedResourcesFromOutputMap, globalContext); initVisitedPortsMap(fileName, resourceMap, numberOfVisitsInPort, globalContext); for (Map.Entry<String, Resource> resourceEntry : resourceMap.entrySet()) { String resourceType = resourceEntry.getValue().getType(); validateSecurityGroupsFromBaseOutput(fileName, resourceEntry, isBaseFile, securityGroupsNamesFromBaseFileOutputs, globalContext); checkResourceDependsOn(fileName, resourceEntry.getValue(), resourcesNames, globalContext); if (Objects.isNull(resourceType)) { globalContext.addMessage(fileName, ErrorLevel.WARNING, ErrorMessagesFormatBuilder.getErrorWithParameters( Messages.INVALID_RESOURCE_TYPE.getErrorMessage(), "null", resourceEntry.getKey())); } else { HeatResourcesTypes heatResourceType = HeatResourcesTypes.findByHeatResource(resourceType); if (heatResourceType != null) { switch (heatResourceType) { case NOVA_SERVER_RESOURCE_TYPE: validateNovaServerResourceType(fileName, resourceEntry, numberOfVisitsInPort, resourceTypeToNamesListMap.get(HeatResourcesTypes.NOVA_SERVER_GROUP_RESOURCE_TYPE), heatOrchestrationTemplate, globalContext); break; case NOVA_SERVER_GROUP_RESOURCE_TYPE: validateNovaServerGroupPolicy(fileName, resourceEntry, globalContext); break; case RESOURCE_GROUP_RESOURCE_TYPE: validateResourceGroupType(fileName, resourceEntry, globalContext); break; case NEUTRON_PORT_RESOURCE_TYPE: validateNeutronPortType(fileName, resourceEntry, resourceTypeToNamesListMap .get(HeatResourcesTypes.NEUTRON_SECURITY_GROUP_RESOURCE_TYPE), globalContext); break; case CONTRAIL_NETWORK_ATTACH_RULE_RESOURCE_TYPE: validateContrailAttachPolicyType(resourceEntry, resourceTypeToNamesListMap .get(HeatResourcesTypes.CONTRAIL_NETWORK_RULE_RESOURCE_TYPE)); break; default: } } else { if (HeatValidationService.isNestedResource(resourceType)) { handleNestedResourceType(fileName, resourceEntry.getKey(), resourceEntry.getValue(), globalContext); } } } } checkForEmptyResourceNamesInMap(fileName, CollectionUtils.isEmpty(securityGroupsNamesFromBaseFileOutputs), resourceTypeToNamesListMap, globalContext); handleOrphanPorts(fileName, numberOfVisitsInPort, globalContext); }
From source file:de.hybris.platform.mpintgomsbackoffice.renderer.GetVerifyCodeRenderer.java
@Override public void render(final Component parent, final AbstractPanel panel, final TmallRefundRequestModel data, final DataType type, final WidgetInstanceManager widgetInstanceManager) { this.widgetInstanceManager = widgetInstanceManager; final Button loadBtn = new Button(Labels.getLabel("mpintgomsbackoffice.refund.button.getverifycode")); if (Objects.isNull(data) || Boolean.TRUE.equals(data.getWaitMarketPlaceResponse())) { loadBtn.setDisabled(true);/*from ww w. j a v a2 s . c o m*/ } else { loadBtn.setDisabled(true); //refund only at the first step if (RefundType.REFUND_ONLY.equals(data.getRefundType()) && RefundAction.AWAITING_APPROVAL.equals(data.getRefundAction())) { loadBtn.setDisabled(false); } //return and refund at the second step(refund step) if (RefundType.RETURN_REFUND.equals(data.getRefundType()) && RefundAction.AWAITING_RETURN_CONFIRMATION.equals(data.getRefundAction())) { loadBtn.setDisabled(false); } } loadBtn.setSclass("initial-load-btn"); loadBtn.addEventListener(Events.ON_CLICK, event -> { // trigger initial order load getVerifyCode(data); }); parent.appendChild(loadBtn); }
From source file:com.mac.holdempoker.app.impl.SimplePlayOrder.java
private int getDealOrder() { if (Objects.isNull(orderedPlayers)) { throw new NullPointerException("orderedPlayers is null"); }/* ww w . ja v a 2 s .c om*/ return dealOrder = (dealOrder + 1) < orderedPlayers.size() ? ++dealOrder : 0; }
From source file:org.dbflute.intro.app.web.welcome.WelcomeAction.java
@NotAvailableDecommentServer @Execute//from ww w . jav a 2 s.c o m public JsonResponse<Void> create(WelcomeCreateBody welcomeCreateBody) { validate(welcomeCreateBody, messages -> { ClientPart client = welcomeCreateBody.client; String projectName = client.projectName; if (clientInfoLogic.getProjectList().contains(projectName)) { messages.addErrorsWelcomeClientAlreadyExists("projectName", projectName); } // done hakiba JDBC Driver's required check depending on database type by jflute (2017/04/13) // done hakiba needs to check jar existence by jflute (2017/04/06) TargetDatabase databaseCd = client.databaseCode; if (!databaseInfoLogic.isEmbeddedJar(databaseCd) && Objects.isNull(client.jdbcDriver)) { messages.addErrorsDatabaseNeedsJar("database", databaseCd.alias()); } // done hakiba add extension check by jflute (2017/04/06) Optional.ofNullable(client.jdbcDriver).map(driverPart -> driverPart.fileName) .filter(s -> StringUtils.isNotEmpty(s) && !s.endsWith(".jar")) .ifPresent(fileName -> messages.addErrorsDatabaseNeedsJar("jdbcDriver", fileName)); }); // check latest version of DBflute and download engine if need String latestVersion; try { latestVersion = publicPropertiesLogic.findProperties(welcomeCreateBody.useSystemProxies) .getDBFluteLatestReleaseVersion(); if (!engineInstallLogic.isDownloaded(latestVersion)) { engineInstallLogic.downloadUnzipping(latestVersion, welcomeCreateBody.useSystemProxies); } } catch (EngineDownloadErrorException e) { throw new NetworkErrorException(e.getMessage()); } // create client (replace client file, copy jar file ...) ClientModel clientModel = mappingToClientModel(welcomeCreateBody.client); clientUpdateLogic.createClient(clientModel); // connect test if need if (welcomeCreateBody.testConnection) { testConnectionIfPossible(clientModel); } return JsonResponse.asEmptyBody(); }
From source file:it.greenvulcano.gvesb.virtual.utils.MimeMessageHelper.java
public static Body getMessageBody(MimeMessage message, String mimeType) { try {// w w w . j a v a 2 s. c o m if (message.getContent() instanceof Multipart) { Multipart multipartMessage = (Multipart) message.getContent(); for (int i = 0; i < multipartMessage.getCount(); i++) { BodyPart bodyPart = multipartMessage.getBodyPart(i); if (bodyPart.isMimeType(mimeType) && (Part.INLINE.equalsIgnoreCase(bodyPart.getDisposition()) || Objects.isNull(bodyPart.getDisposition()))) { return new Body(bodyPart.getContentType(), bodyPart.getContent().toString()); } } } else { return new Body(message.getContentType(), message.getContent().toString()); } } catch (Exception e) { // do nothing } return null; }
From source file:org.freeeed.main.DocumentParser.java
public void parse(DiscoveryFile discoveryFile, DocumentMetadata metadata) { LOGGER.debug("Parsing file: {}, original file name: {}", discoveryFile.getPath().getPath(), discoveryFile.getRealFileName()); TikaInputStream inputStream = null;/* w ww .j a v a 2s . c om*/ try { String extension = Util.getExtension(discoveryFile.getRealFileName()); LOGGER.debug("Detected extension: {}", extension); if ("eml".equalsIgnoreCase(extension)) { EmlParser emlParser = new EmlParser(discoveryFile.getPath()); extractEmlFields(metadata, emlParser); inputStream = TikaInputStream.get(discoveryFile.getPath()); String text = tika.parseToString(inputStream, metadata); metadata.set(DocumentMetadataKeys.DOCUMENT_TEXT, text); metadata.setContentType("message/rfc822"); parseDateTimeReceivedFields(metadata); parseDateTimeSentFields(metadata, emlParser.getSentDate()); } else if ("nsfe".equalsIgnoreCase(extension)) { NSFXDataParser emlParser = new NSFXDataParser(discoveryFile.getPath()); extractEmlFields(metadata, emlParser); metadata.setContentType("application/vnd.lotus-notes"); // } else if ("jl".equalsIgnoreCase(extension)) { // extractJlFields(discoveryFile.getPath().getPath(), metadata); } else if ("pdf".equalsIgnoreCase(extension)) { metadata.setDocumentText(ImageTextParser.parseContent(discoveryFile.getPath().getPath())); } else { inputStream = TikaInputStream.get(discoveryFile.getPath()); if (inputStream.available() > 0) metadata.setDocumentText(tika.parseToString(inputStream, metadata)); } if (Objects.isNull(metadata.getContentType())) { metadata.setContentType(extension); } if (!metadata.getContentType().equals("image/jpeg") && !metadata.getContentType().equals("tiff")) { String fileType = CONTENT_TYPE_MAPPING.getFileType(metadata.getContentType()); metadata.setFiletype(fileType); } } catch (Exception e) { // the show must still go on metadata.set(DocumentMetadataKeys.PROCESSING_EXCEPTION, e.getMessage()); LOGGER.error("Problem parsing file", e); } }
From source file:de.speexx.jira.jan.command.issuequery.CsvCreator.java
void printIssueData(final IssueData issueData, final List<FieldNamePath> currentFieldNames, final List<FieldName> historyFieldNames, final TemporalChangeOutput temporalOutput) { assert !Objects.isNull(issueData); assert !Objects.isNull(currentFieldNames); assert !Objects.isNull(historyFieldNames); assert !Objects.isNull(temporalOutput); final List<String> currentFieldEntries = fetchCurrentFieldEntries(issueData, currentFieldNames); try {//from w w w . j a v a 2s .c o m final CSVPrinter csvPrinter = new CSVPrinter(new OutputStreamWriter(System.out, StandardCharsets.UTF_8), RFC4180); if (issueData.getHistoricalCount() == 0) { final int fieldsPerChangeEntry = calculateHistoricalFieldSize(temporalOutput); final int max = historyFieldNames.size() * fieldsPerChangeEntry; final List<String> out = new ArrayList(currentFieldEntries); addEmptyChangeData(out, max); csvPrinter.printRecord(out); } else { final int fieldsPerChangeEntry = calculateHistoricalFieldSize(temporalOutput); final int historyFieldNamesSize = historyFieldNames.size(); for (int idx = 0; idx < historyFieldNamesSize; idx++) { final FieldName fieldName = historyFieldNames.get(idx); final List<HistoricalDataEntry> historicalData = issueData.getHistoricalIssueData(fieldName); LocalDateTime lastChangeDate = issueData.getCreatedDate() .orElseThrow(() -> new IllegalStateException("No createdDate available")); for (final HistoricalDataEntry entry : historicalData) { final List<String> out = new ArrayList(); for (int i = 0; i < historyFieldNamesSize; i++) { if (i != idx) { addEmptyChangeData(out, fieldsPerChangeEntry); } else { lastChangeDate = addChangeData(out, entry, temporalOutput, lastChangeDate); } } final List<String> outList = new ArrayList<>(currentFieldEntries); outList.addAll(out); csvPrinter.printRecord(outList.toArray()); } } } csvPrinter.flush(); } catch (final IOException e) { throw new JiraAnalyzeException(e); } }