List of usage examples for org.apache.commons.lang3 StringUtils isNoneBlank
public static boolean isNoneBlank(final CharSequence... css)
Checks if none of the CharSequences are blank ("") or null and whitespace only..
StringUtils.isNoneBlank(null) = false StringUtils.isNoneBlank(null, "foo") = false StringUtils.isNoneBlank(null, null) = false StringUtils.isNoneBlank("", "bar") = false StringUtils.isNoneBlank("bob", "") = false StringUtils.isNoneBlank(" bob ", null) = false StringUtils.isNoneBlank(" ", "bar") = false StringUtils.isNoneBlank("foo", "bar") = true
From source file:org.alfresco.rm.rest.api.unfiledrecordfolders.UnfiledRecordFolderChildrenRelation.java
@Override @WebApiDescription(title = "Create one (or more) nodes as children of a unfiled record folder identified by 'unfiledRecordFolderId'") public List<UnfiledRecordFolderChild> create(String unfiledRecordFolderId, final List<UnfiledRecordFolderChild> nodeInfos, Parameters parameters) { checkNotBlank("unfiledRecordFolderId", unfiledRecordFolderId); mandatory("nodeInfos", nodeInfos); mandatory("parameters", parameters); NodeRef parentNodeRef = apiUtils.lookupAndValidateNodeType(unfiledRecordFolderId, RecordsManagementModel.TYPE_UNFILED_RECORD_FOLDER); // Create the children RetryingTransactionCallback<List<NodeRef>> callback = new RetryingTransactionCallback<List<NodeRef>>() { public List<NodeRef> execute() { List<NodeRef> createdNodes = new LinkedList<>(); for (UnfiledRecordFolderChild nodeInfo : nodeInfos) { NodeRef nodeParent;//from w ww . java 2 s. c om if (StringUtils.isNoneBlank(nodeInfo.getRelativePath())) { nodeParent = apiUtils.lookupAndValidateRelativePath(parentNodeRef, nodeInfo.getRelativePath(), RecordsManagementModel.TYPE_UNFILED_RECORD_FOLDER); } else { nodeParent = parentNodeRef; } NodeRef newNodeRef = apiUtils.createRMNode(nodeParent, nodeInfo.getName(), nodeInfo.getNodeType(), nodeInfo.getProperties(), nodeInfo.getAspectNames()); createdNodes.add(newNodeRef); } return createdNodes; } }; List<NodeRef> createdNodes = transactionService.getRetryingTransactionHelper().doInTransaction(callback); // Get the nodes info List<UnfiledRecordFolderChild> result = new LinkedList<>(); Map<String, UserInfo> mapUserInfo = new HashMap<>(); for (NodeRef newNodeRef : createdNodes) { FileInfo info = fileFolderService.getFileInfo(newNodeRef); apiUtils.postActivity(info, parentNodeRef, ActivityType.FILE_ADDED); result.add(nodesModelFactory.createUnfiledRecordFolderChild(info, parameters, mapUserInfo, false)); } return result; }
From source file:org.aliuge.crawler.fetcher.FetchWorker.java
/** * ???url????url/* ww w . j a v a2 s .c om*/ * * @param url * @return */ public String extractFilterAndChangeUrl(String url) { // extractFilters??url if (null == extractFilters || extractFilters.size() == 0) { return url; } for (KeyValue<Pattern, String> p : extractFilters) { if (p.getKey().matcher(url).matches()) { if (StringUtils.isNoneBlank(p.getValue())) { String[] pp = p.getValue().split(","); //???url? if (pp.length == 2) return url.replace(pp[0], pp[1]); } return url; } } // ??url??null??url return null; }
From source file:org.aliuge.crawler.fetcher.FetchWorker.java
/** * @param url/*from w ww.j a v a 2 s . co m*/ * @desc */ public void fetchPage(WebURL url) { PageFetchResult result = null; try { if (null != url && StringUtils.isNotBlank(url.getUrl())) { result = fetcher.fetch(url, true); // ?? int statusCode = result.getStatusCode(); if (statusCode == CustomFetchStatus.PageTooBig) { onIgnored(url); return; } if (statusCode != HttpStatus.SC_OK) { onFailed(url); } else { Page page = new Page(url); onSuccessed(); if (!result.fetchContent(page)) { onFailed(url); return; } if (!parser.parse(page, url.getUrl())) { onFailed(url); return; } // ?? String e_url = extractFilterAndChangeUrl(url.getUrl()); if (StringUtils.isNoneBlank(e_url)) { url.setUrl(e_url); page.setWebURL(url); pendingPages.addElement(page); return; } // depth if (url.getDepth() > config.getMaxDepthOfCrawling() && config.getMaxDepthOfCrawling() != -1) { return; } // ???Url?Url Document doc = Jsoup.parse(new String(page.getContentData(), page.getContentCharset()), urlUtils.getBaseUrl(page.getWebURL().getUrl())); Elements links = doc.getElementsByTag("a"); if (!links.isEmpty()) { for (Element link : links) { String linkHref = link.absUrl("href"); // ???url if ((fetchFilter(linkHref) || extractFilter(linkHref)) && !bloomfilterHelper.exist(linkHref)) { WebURL purl = new WebURL(); purl.setName(link.text()); purl.setUrl(linkHref); purl.setDepth((short) (url.getDepth() + 1)); if (purl.getDepth() > config.getMaxDepthOfCrawling() && config.getMaxDepthOfCrawling() != -1) return; try { if (!pendingUrls.addElement(purl)) { FileUtils.writeStringToFile(new File("status/_urls.good"), url.getUrl() + "\n", true); } } catch (QueueException e) { log.error(e.getMessage()); } } } } } } } catch (QueueException e) { onFailed(url); } catch (Exception e) { e.printStackTrace(); onFailed(url); } finally { if (null != result) result.discardContentIfNotConsumed(); } }
From source file:org.aliuge.crawler.jobconf.FetchConfig.java
/** * ???//from w w w . j av a2 s . c om * * @param confFile * @return */ @SuppressWarnings("unchecked") public FetchConfig loadConfig(Document confDoc) throws ConfigurationException { try { Document doc = confDoc; super.setJobName(doc.select("job").attr("name")); super.setIndexName(doc.select("job").attr("indexName")); Elements e = doc.select("fetch"); this.type = e.select("type").text(); this.agent = e.select("agent").text(); String temp = e.select("threadNum").text(); if (StringUtils.isNotBlank(temp)) { this.threadNum = Integer.parseInt(temp); } temp = e.select("delayBetweenRequests").text(); if (StringUtils.isNotBlank(temp)) { this.delayBetweenRequests = Integer.parseInt(temp); } temp = e.select("maxDepthOfCrawling").text(); if (StringUtils.isNotBlank(temp)) { this.maxDepthOfCrawling = Integer.parseInt(temp); } temp = e.select("fetchBinaryContent").text(); if (StringUtils.isNotBlank(temp)) { this.fetchBinaryContent = Boolean.parseBoolean(temp); } if (StringUtils.isNotBlank(e.select("maxOutgoingLinksToFollow").text())) { this.maxOutgoingLinksToFollow = Integer.parseInt(e.select("maxOutgoingLinksToFollow").text()); } temp = e.select("fileSuffix").text(); if (StringUtils.isNotBlank(temp)) { this.fileSuffix = temp; } temp = e.select("maxDownloadSizePerPage").text(); if (StringUtils.isNotBlank(temp)) { this.maxDownloadSizePerPage = Integer.parseInt(temp); } temp = e.select("https").text(); if (StringUtils.isNotBlank(temp)) { this.https = Boolean.parseBoolean(temp); } temp = e.select("onlyDomain").text(); if (StringUtils.isNotBlank(temp)) { this.onlyDomain = Boolean.parseBoolean(temp); } temp = e.select("socketTimeoutMilliseconds").text(); if (StringUtils.isNotBlank(temp)) { this.socketTimeoutMilliseconds = Integer.parseInt(temp); } temp = e.select("connectionTimeout").text(); if (StringUtils.isNotBlank(temp)) { this.connectionTimeout = Integer.parseInt(temp); } temp = e.select("maxTotalConnections").text(); if (StringUtils.isNotBlank(temp)) { this.maxTotalConnections = Integer.parseInt(temp); } temp = e.select("maxConnectionsPerHost").text(); if (StringUtils.isNotBlank(temp)) { this.maxConnectionsPerHost = Integer.parseInt(e.select("maxConnectionsPerHost").text()); } temp = e.select("maxConnectionsPerHost").text(); if (StringUtils.isNotBlank(temp)) { this.maxConnectionsPerHost = Integer.parseInt(temp); } temp = e.select("proxy").text(); if (StringUtils.isNotBlank(temp)) { Properties p = PropertyConfigurationHelper.getProperties(temp); this.proxyIps = Lists.newLinkedList(); for (Object o : p.keySet()) { proxyIps.add((String) p.get(o)); } } // seed Elements seeds = doc.select("fetch seeds seed"); for (Element element : seeds) { // WebURL url = new WebURL(); String url = element.text(); if (StringUtils.isBlank(url)) { continue; } url = url.trim(); String area = element.attr("area"); this.seeds.add(url); WebURL areaUrl = new WebURL(area, url); try { PendingManager.getPendingArea(super.getJobName()).addElement(areaUrl); } catch (QueueException e1) { log.error("", e1); e1.printStackTrace(); } // BloomfilterHelper.getInstance().add(url.getURL()); } /* * ??Url */ Elements fetchUrlFilters = doc.select("fetchUrlFilters filter"); for (Element element : fetchUrlFilters) { String tmp = element.text(); if (StringUtils.isNoneBlank(tmp)) this.fetchUrlFilters.add(element.text()); } /* * ?????Url */ Elements extractUrlfilters = doc.select("extractUrlfilters filter"); for (Element element : extractUrlfilters) { String tmp = element.text(); String tmp_rep = element.attr("replace"); if (StringUtils.isNoneBlank(tmp)) this.extractUrlfilters.add(new KeyValue(tmp, tmp_rep)); } } catch (NumberFormatException e) { throw new ConfigurationException("?" + e.getMessage()); } // super.setFetchConfig(this); return this; }
From source file:org.aliuge.crawler.url.WebURL.java
public void setName(String name) { if (StringUtils.isNoneBlank(name)) this.name = CharMatcher.INVISIBLE.trimFrom(name); }
From source file:org.apache.bcel.generic.JDKGenericDumpTestCase.java
private static Set<String> findJavaHomesOnWindows(final String[] keys) { final Set<String> javaHomes = new HashSet<>(keys.length); for (final String key : keys) { if (Advapi32Util.registryKeyExists(HKEY_LOCAL_MACHINE, KEY_JRE + "\\" + key)) { final String javaHome = Advapi32Util.registryGetStringValue(HKEY_LOCAL_MACHINE, KEY_JRE + "\\" + key, "JavaHome"); if (StringUtils.isNoneBlank(javaHome)) { if (new File(javaHome).exists()) { javaHomes.add(javaHome); }//from w w w .j a v a 2 s . c om } } } return javaHomes; }
From source file:org.apache.falcon.resource.AbstractExtensionManager.java
public ExtensionJobList getExtensionJobs(String extensionName, String sortOrder, String doAsUser) { TreeMap<String, String> jobAndExtensionNames = new TreeMap<>(); List<ExtensionJobsBean> extensionJobs = null; if (StringUtils.isNoneBlank(extensionName)) { extensionJobs = ExtensionStore.getMetaStore().getJobsForAnExtension(extensionName); } else {/*from w w w .ja v a 2 s . c om*/ extensionJobs = ExtensionStore.getMetaStore().getAllExtensionJobs(); } for (ExtensionJobsBean job : extensionJobs) { jobAndExtensionNames.put(job.getJobName(), job.getExtensionName()); } sortOrder = (sortOrder == null) ? ASCENDING_SORT_ORDER : sortOrder; switch (sortOrder.toLowerCase()) { case DESCENDING_SORT_ORDER: return new ExtensionJobList(extensionJobs.size(), jobAndExtensionNames.descendingMap()); default: return new ExtensionJobList(extensionJobs.size(), jobAndExtensionNames); } }
From source file:org.apache.fineract.infrastructure.jobs.service.JobRegisterServiceImpl.java
/** * check if the scheduler.enables property in the "/var/lib/tomcat7/conf/quartz.properties" file is set to true * N/B - This is a temporary fix for the duplicate job scheduling issue resulting from running on a clustered * environment//from w w w .j a v a 2 s . c o m * * @return boolean true if value is true, else false **/ private boolean isSchedulerEnabledInQuartzPropertiesFile() { // scheduler is disabled by default boolean isEnabled = false; Properties quartzProperties = new Properties(); InputStream quartzPropertiesInputStream = null; File catalinaBaseConfDirectory = null; File quartzPropertiesFile = null; String scheduleDotEnablePropertyValue = null; try { // create a new File instance for the catalina base conf directory catalinaBaseConfDirectory = new File(System.getProperty("catalina.base"), "conf"); // create a new File instance for the quartz properties file quartzPropertiesFile = new File(catalinaBaseConfDirectory, "quartz.properties"); // create file inputstream to the quartz properties file quartzPropertiesInputStream = new FileInputStream(quartzPropertiesFile); // read property list from input stream quartzProperties.load(quartzPropertiesInputStream); scheduleDotEnablePropertyValue = quartzProperties.getProperty("scheduler.enabled"); // make sure it isn't blank, before trying to parse the string as boolean if (StringUtils.isNoneBlank(scheduleDotEnablePropertyValue)) { isEnabled = Boolean.parseBoolean(scheduleDotEnablePropertyValue); } } catch (FileNotFoundException ex) { isEnabled = true; } catch (IOException ex) { logger.error(ex.getMessage(), ex); } finally { if (quartzPropertiesInputStream != null) { try { quartzPropertiesInputStream.close(); } catch (IOException e) { logger.error(e.getMessage(), e); } } } return isEnabled; }
From source file:org.apache.fineract.infrastructure.sms.scheduler.SmsMessageScheduledJobServiceImpl.java
/** * Checks if the SMS module (sending sms and retrieving delivery reports) is enable in the sms properties file * /*from w w w . ja v a 2s.c om*/ * @return true if sms module is enabled, else false */ private Boolean isSmsEnabledInSmsPropertiesFile() { Boolean isEnabled = true; Properties smsProperties = new Properties(); InputStream smsPropertiesInputStream = null; File catalinaBaseConfDirectory = null; File smsPropertiesFile = null; String smsDotEnablePropertyValue = null; try { // create a new File instance for the catalina base conf directory catalinaBaseConfDirectory = new File(System.getProperty("catalina.base"), "conf"); // create a new File instance for the quartz properties file smsPropertiesFile = new File(catalinaBaseConfDirectory, "sms.properties"); // create file inputstream to the quartz properties file smsPropertiesInputStream = new FileInputStream(smsPropertiesFile); // read property list from input stream smsProperties.load(smsPropertiesInputStream); smsDotEnablePropertyValue = smsProperties.getProperty("sms.enabled"); // make sure it isn't blank, before trying to parse the string as boolean if (StringUtils.isNoneBlank(smsDotEnablePropertyValue)) { isEnabled = Boolean.parseBoolean(smsDotEnablePropertyValue); } } catch (FileNotFoundException ex) { } catch (Exception ex) { logger.error(ex.getMessage(), ex); } finally { if (smsPropertiesInputStream != null) { try { smsPropertiesInputStream.close(); } catch (Exception ex) { logger.error(ex.getMessage(), ex); } } } return isEnabled; }
From source file:org.apache.nifi.processors.azure.storage.queue.AbstractAzureQueueStorage.java
protected final CloudQueueClient createCloudQueueClient(final ProcessContext context, final FlowFile flowFile) { final String storageAccountName; final String storageAccountKey; final String sasToken; final String connectionString; if (flowFile == null) { storageAccountName = context.getProperty(AzureStorageUtils.ACCOUNT_NAME).evaluateAttributeExpressions() .getValue();//from w w w. ja va 2s.co m storageAccountKey = context.getProperty(AzureStorageUtils.ACCOUNT_KEY).evaluateAttributeExpressions() .getValue(); sasToken = context.getProperty(AzureStorageUtils.PROP_SAS_TOKEN).evaluateAttributeExpressions() .getValue(); } else { storageAccountName = context.getProperty(AzureStorageUtils.ACCOUNT_NAME) .evaluateAttributeExpressions(flowFile).getValue(); storageAccountKey = context.getProperty(AzureStorageUtils.ACCOUNT_KEY) .evaluateAttributeExpressions(flowFile).getValue(); sasToken = context.getProperty(AzureStorageUtils.PROP_SAS_TOKEN).evaluateAttributeExpressions(flowFile) .getValue(); } CloudQueueClient cloudQueueClient; try { if (StringUtils.isNoneBlank(sasToken)) { connectionString = String.format(FORMAT_QUEUE_BASE_URI, storageAccountName); StorageCredentials storageCredentials = new StorageCredentialsSharedAccessSignature(sasToken); cloudQueueClient = new CloudQueueClient(new URI(connectionString), storageCredentials); } else { connectionString = String.format(FORMAT_QUEUE_CONNECTION_STRING, storageAccountName, storageAccountKey); CloudStorageAccount storageAccount = CloudStorageAccount.parse(connectionString); cloudQueueClient = storageAccount.createCloudQueueClient(); } } catch (IllegalArgumentException | URISyntaxException e) { getLogger().error("Invalid connection string URI for '{}'", new Object[] { context.getName() }, e); throw new IllegalArgumentException(e); } catch (InvalidKeyException e) { getLogger().error("Invalid connection credentials for '{}'", new Object[] { context.getName() }, e); throw new IllegalArgumentException(e); } return cloudQueueClient; }