List of usage examples for java.util ArrayList isEmpty
public boolean isEmpty()
From source file:edu.isi.misd.scanner.network.registry.web.controller.ToolLibraryController.java
@RequestMapping(value = BASE_PATH, method = { RequestMethod.GET, RequestMethod.HEAD }, produces = HEADER_JSON_MEDIA_TYPE) public @ResponseBody List<ToolLibrary> getToolLibraries(@RequestParam Map<String, String> paramMap) { Map<String, String> params = validateParameterMap(paramMap, REQUEST_PARAM_USER_NAME, REQUEST_PARAM_STUDY_ID, REQUEST_PARAM_DATASET_ID);//from w ww . ja va 2 s . c om if (!params.isEmpty()) { ArrayList<String> missingParams = new ArrayList<String>(); String userName = params.get(REQUEST_PARAM_USER_NAME); if (userName == null) { missingParams.add(REQUEST_PARAM_USER_NAME); } String studyId = params.get(REQUEST_PARAM_STUDY_ID); if (studyId == null) { missingParams.add(REQUEST_PARAM_STUDY_ID); } String dataSetId = params.get(REQUEST_PARAM_DATASET_ID); if (dataSetId == null) { missingParams.add(REQUEST_PARAM_DATASET_ID); } if (!missingParams.isEmpty()) { throw new BadRequestException("Required parameter(s) missing: " + missingParams); } return toolLibraryRepository.findToolLibraryByStudyPolicyStatement(userName, validateIntegerParameter(REQUEST_PARAM_STUDY_ID, studyId), validateIntegerParameter(REQUEST_PARAM_DATASET_ID, dataSetId)); } List<ToolLibrary> toolLibraries = new ArrayList<ToolLibrary>(); Iterator iter = toolLibraryRepository.findAll().iterator(); CollectionUtils.addAll(toolLibraries, iter); return toolLibraries; }
From source file:gr.open.loglevelsmanager.loglevel.LogLevelsManager.java
@ProcessAction(name = "addLogLevel") public void addLogLevel(ActionRequest request, ActionResponse response) throws Exception { LogLevel logLevel = getLogLevelFromRequest(request); logLevel.setActivateOnStartup(true); ThemeDisplay themeDisplay = (ThemeDisplay) request.getAttribute(WebKeys.THEME_DISPLAY); PermissionChecker permissionChecker = themeDisplay.getPermissionChecker(); if (!LogLevelPermission.contains(permissionChecker, themeDisplay.getScopeGroupId(), "ADD_LOGLEVEL")) { LogLevelUtil.addParametersForDefaultView(response); SessionErrors.add(request, "permission-error"); return;//from w w w.j a v a 2 s. c o m } ArrayList<String> errors = LogLevelValidator.validateLogLevel(logLevel, request); if (errors.isEmpty()) { try { ServiceContext serviceContext = ServiceContextFactory.getInstance(LogLevel.class.getName(), request); LogLevelLocalServiceUtil.addLogLevel(logLevel, serviceContext); LogLevelUtil.addParametersForDefaultView(response); SessionMessages.add(request, "loglevel-added-successfully"); } catch (Exception cvex) { SessionErrors.add(request, "please-enter-a-unique-code"); PortalUtil.copyRequestParameters(request, response); LogLevelUtil.addParametersForAddWithErrors(response); } } else { for (String error : errors) { SessionErrors.add(request, error); } PortalUtil.copyRequestParameters(request, response); LogLevelUtil.addParametersForAddWithErrors(response); } }
From source file:gr.open.loglevelsmanager.loglevel.LogLevelsManager.java
@ProcessAction(name = "updateLogLevel") public void updateLogLevel(ActionRequest request, ActionResponse response) throws Exception { LogLevel logLevel = getLogLevelFromRequest(request); ThemeDisplay themeDisplay = (ThemeDisplay) request.getAttribute(WebKeys.THEME_DISPLAY); PermissionChecker permissionChecker = themeDisplay.getPermissionChecker(); if (!LogLevelEntryPermission.contains(permissionChecker, logLevel, ActionKeys.UPDATE)) { LogLevelUtil.addParametersForDefaultView(response); SessionErrors.add(request, "permission-error"); return;/*from ww w . j a v a 2s . c om*/ } ArrayList<String> errors = LogLevelValidator.validateLogLevel(logLevel, request); if (errors.isEmpty()) { try { ServiceContext serviceContext = ServiceContextFactory.getInstance(LogLevel.class.getName(), request); LogLevelLocalServiceUtil.updateLogLevel(logLevel, serviceContext); LogLevelUtil.addParametersForDefaultView(response); SessionMessages.add(request, "loglevel-updated-successfully"); } catch (Exception cvex) { SessionErrors.add(request, "please-enter-a-unique-code"); LogLevelUtil.addParametersForEdit(response, null); request.setAttribute("logLevel", logLevel); } } else { for (String error : errors) { SessionErrors.add(request, error); } LogLevelUtil.addParametersForEdit(response, Long.toString(logLevel.getPrimaryKey())); request.setAttribute("logLevel", logLevel); } }
From source file:at.treedb.util.SevenZip.java
/** * Extracts some data (files and directories) from the archive without * extracting the whole archive./*from w w w.j av a 2s . c o m*/ * * @param archive * 7-zip archive * @param fileList * file extraction list, a path with an ending '/' denotes a * directory * @return file list as a map file name/file data * @throws IOException */ public static HashMap<String, byte[]> exctact(File archive, String... fileList) throws IOException { HashSet<String> fileSet = new HashSet<String>(); ArrayList<String> dirList = new ArrayList<String>(); for (String f : fileList) { if (!f.endsWith("/")) { fileSet.add(f); } else { dirList.add(f); } } HashMap<String, byte[]> resultMap = new HashMap<String, byte[]>(); SevenZFile sevenZFile = new SevenZFile(archive); do { SevenZArchiveEntry entry = sevenZFile.getNextEntry(); if (entry == null) { break; } // convert window path to unix style String name = entry.getName().replace('\\', '/'); if (!entry.isDirectory()) { boolean storeFile = false; if (fileSet.contains(name)) { storeFile = true; } else { // search directories for (String s : dirList) { if (name.startsWith(s)) { storeFile = true; break; } } } // store the file if (storeFile) { int size = (int) entry.getSize(); byte[] data = new byte[size]; sevenZFile.read(data, 0, size); resultMap.put(name, data); // in this case we can finish the extraction loop if (dirList.isEmpty() && resultMap.size() == fileSet.size()) { break; } } } } while (true); sevenZFile.close(); return resultMap; }
From source file:com.cloudera.impala.planner.PlannerTestBase.java
private void checkColumnLineage(TestCase testCase, TExecRequest execRequest, StringBuilder errorLog, StringBuilder actualOutput) { String query = testCase.getQuery(); ArrayList<String> expectedLineage = testCase.getSectionContents(Section.LINEAGE); if (expectedLineage == null || expectedLineage.isEmpty()) return;//from ww w . jav a2s. com TLineageGraph lineageGraph = null; if (execRequest != null && execRequest.isSetQuery_exec_request()) { lineageGraph = execRequest.query_exec_request.lineage_graph; } else if (execRequest.isSetCatalog_op_request()) { lineageGraph = execRequest.catalog_op_request.lineage_graph; } ArrayList<String> expected = testCase.getSectionContents(Section.LINEAGE); if (expected.size() > 0 && lineageGraph != null) { String serializedGraph = Joiner.on("\n").join(expected); ColumnLineageGraph expectedGraph = ColumnLineageGraph.createFromJSON(serializedGraph); ColumnLineageGraph outputGraph = ColumnLineageGraph.fromThrift(lineageGraph); if (expectedGraph == null || outputGraph == null || !outputGraph.equals(expectedGraph)) { StringBuilder lineageError = new StringBuilder(); lineageError.append("section " + Section.LINEAGE + " of query:\n" + query + "\n"); lineageError.append("Output:\n"); lineageError.append(outputGraph.toJson() + "\n"); lineageError.append("Expected:\n"); lineageError.append(serializedGraph + "\n"); errorLog.append(lineageError.toString()); } actualOutput.append(Section.LINEAGE.getHeader()); actualOutput.append(TestUtils.prettyPrintJson(outputGraph.toJson())); actualOutput.append("\n"); } }
From source file:io.microprofile.showcase.speaker.persistence.SpeakerDAO.java
/** * Try and fuzzy find the specified Speaker * * @param speaker Speaker to find - may contain partial details * @return Optional matching speakers// w w w . j a v a 2s . c o m */ public Set<Speaker> find(final Speaker speaker) { final ArrayList<Speaker> speakers = new ArrayList<>(); speakers.addAll(this.speakers.values()); CollectionUtils.filter(speakers, object -> { final Speaker find = Speaker.class.cast(object); return (isMatch(find.getNameFirst(), speaker.getNameFirst()) || isMatch(find.getNameLast(), speaker.getNameLast()) || isMatch(find.getOrganization(), speaker.getOrganization()) || isMatch(find.getTwitterHandle(), speaker.getTwitterHandle())); }); if (!speakers.isEmpty()) { return new HashSet<>(speakers); } return Collections.emptySet(); }
From source file:net.opentsdb.meta.UIDMeta.java
/** * Attempts a CompareAndSet storage call, loading the object from storage, * synchronizing changes, and attempting a put. * <b>Note:</b> If the local object didn't have any fields set by the caller * then the data will not be written./*w w w .j ava 2 s.co m*/ * @param tsdb The TSDB to use for storage access * @param overwrite When the RPC method is PUT, will overwrite all user * accessible fields * @return True if the storage call was successful, false if the object was * modified in storage during the CAS call. If false, retry the call. Other * failures will result in an exception being thrown. * @throws HBaseException if there was an issue fetching * @throws IllegalArgumentException if parsing failed * @throws NoSuchUniqueId If the UID does not exist * @throws IllegalStateException if the data hasn't changed. This is OK! * @throws JSONException if the object could not be serialized */ public Deferred<Boolean> syncToStorage(final TSDB tsdb, final boolean overwrite) { if (uid == null || uid.isEmpty()) { throw new IllegalArgumentException("Missing UID"); } if (type == null) { throw new IllegalArgumentException("Missing type"); } boolean has_changes = false; for (Map.Entry<String, Boolean> entry : changed.entrySet()) { if (entry.getValue()) { has_changes = true; break; } } if (!has_changes) { LOG.debug(this + " does not have changes, skipping sync to storage"); throw new IllegalStateException("No changes detected in UID meta data"); } /** * Callback used to verify that the UID to name mapping exists. Uses the TSD * for verification so the name may be cached. If the name does not exist * it will throw a NoSuchUniqueId and the meta data will not be saved to * storage */ final class NameCB implements Callback<Deferred<Boolean>, String> { private final UIDMeta local_meta; public NameCB(final UIDMeta meta) { local_meta = meta; } /** * Nested callback used to merge and store the meta data after verifying * that the UID mapping exists. It has to access the {@code local_meta} * object so that's why it's nested within the NameCB class */ final class StoreUIDMeta implements Callback<Deferred<Boolean>, ArrayList<KeyValue>> { /** * Executes the CompareAndSet after merging changes * @return True if the CAS was successful, false if the stored data * was modified during flight. */ @Override public Deferred<Boolean> call(final ArrayList<KeyValue> row) throws Exception { final UIDMeta stored_meta; if (row == null || row.isEmpty()) { stored_meta = null; } else { stored_meta = JSON.parseToObject(row.get(0).value(), UIDMeta.class); stored_meta.initializeChangedMap(); } final byte[] original_meta = row == null || row.isEmpty() ? new byte[0] : row.get(0).value(); if (stored_meta != null) { local_meta.syncMeta(stored_meta, overwrite); } // verify the name is set locally just to be safe if (name == null || name.isEmpty()) { local_meta.name = name; } final PutRequest put = new PutRequest(tsdb.uidTable(), UniqueId.stringToUid(uid), FAMILY, (type.toString().toLowerCase() + "_meta").getBytes(CHARSET), local_meta.getStorageJSON()); return tsdb.getClient().compareAndSet(put, original_meta); } } /** * NameCB method that fetches the object from storage for merging and * use in the CAS call * @return The results of the {@link #StoreUIDMeta} callback */ @Override public Deferred<Boolean> call(final String name) throws Exception { final GetRequest get = new GetRequest(tsdb.uidTable(), UniqueId.stringToUid(uid)); get.family(FAMILY); get.qualifier((type.toString().toLowerCase() + "_meta").getBytes(CHARSET)); // #2 deferred return tsdb.getClient().get(get).addCallbackDeferring(new StoreUIDMeta()); } } // start the callback chain by veryfing that the UID name mapping exists return tsdb.getUidName(type, UniqueId.stringToUid(uid)).addCallbackDeferring(new NameCB(this)); }
From source file:com.ngdata.hbaseindexer.indexer.FusionPipelineClient.java
protected ArrayList<String> getAvailableEndpoints() throws Exception { ArrayList<String> mutable = null; synchronized (this) { mutable = new ArrayList<String>(sessions.keySet()); }//from www . j a v a2 s . c o m if (mutable.isEmpty()) { // completely hosed ... try to re-establish all sessions synchronized (this) { try { Thread.sleep(2000); } catch (InterruptedException ie) { Thread.interrupted(); } sessions = establishSessions(originalEndpoints, fusionUser, fusionPass, fusionRealm); mutable = new ArrayList<String>(sessions.keySet()); } if (mutable.isEmpty()) throw new IllegalStateException("No available endpoints! " + "Check log for previous errors as to why there are no more endpoints available. This is a fatal error."); } return mutable; }
From source file:com.nridge.ds.solr.SolrSchema.java
/** * Updates the Solr schema configuration for the search cluster with a single batch * operation. This method should be used to update the field types associated with * a schema. You should ensure that this method is invoked prior to schema fields * referencing a new field type.// w w w.ja va 2 s. co m * * Field type in a <i>Document</i> are modeled in a hierarchy consisting of a * common set of fields (name, class, etc.) and relationships representing * Analyzers, Index Analyzers and Query Analyzers. Contained within each Analyzer * relationship document are tokenizers and filters (which are tables with columns * that will depend on the class selected). * * @see <a href="http://lucene.apache.org/solr/guide/7_6/schema-api.html">Solr Schema API</a> * * @param aDocSolrFieldTypes List of Documents of type RESPONSE_SCHEMA_FIELD_TYPE * * @throws DSException Data source exception. */ public void update(ArrayList<Document> aDocSolrFieldTypes) throws DSException { Logger appLogger = mAppMgr.getLogger(this, "update"); if (!aDocSolrFieldTypes.isEmpty()) { StringBuilder stringBuilder = new StringBuilder(); stringBuilder.append(String.format("{%n")); for (Document docSolrFieldType : aDocSolrFieldTypes) update(stringBuilder, docSolrFieldType); stringBuilder.append(String.format("%n}%n")); String jsonPayload = stringBuilder.toString(); // Construct our query URI string String baseSolrURL = mSolrDS.getBaseURL(true); String solrURI = baseSolrURL + "/schema"; // Next, we will execute the HTTP POST request with the Solr schema API service. CloseableHttpResponse httpResponse = null; CloseableHttpClient httpClient = HttpClients.createDefault(); HttpPost httpPost = new HttpPost(solrURI); httpPost.addHeader("Content-type", CONTENT_TYPE_JSON); try { HttpEntity httpEntity = new ByteArrayEntity(jsonPayload.getBytes(StandardCharsets.UTF_8)); httpPost.setEntity(httpEntity); httpResponse = httpClient.execute(httpPost); StatusLine statusLine = httpResponse.getStatusLine(); int statusCode = statusLine.getStatusCode(); String msgStr = String.format("%s [%d]: %s", solrURI, statusCode, statusLine); appLogger.debug(msgStr); if (statusCode == HttpStatus.SC_OK) { httpEntity = httpResponse.getEntity(); EntityUtils.consume(httpEntity); } else { msgStr = String.format("%s [%d]: %s", solrURI, statusCode, statusLine); appLogger.error(msgStr); appLogger.debug(jsonPayload); throw new DSException(msgStr); } } catch (IOException e) { String msgStr = String.format("%s (Document List): %s", solrURI, e.getMessage()); appLogger.error(msgStr, e); throw new DSException(msgStr); } finally { if (httpResponse != null) IO.closeQuietly(httpResponse); } } appLogger.trace(mAppMgr.LOGMSG_TRACE_DEPART); }
From source file:com.zimbra.common.util.QuotedTextUtil.java
/** * Join the blocks to form a string/* w w w. j ava 2 s . c o m*/ * * @param block * @return string created from the list of blocks */ private String getTextFromBlock(ArrayList<String> block) { if (block == null || block.isEmpty()) { return ""; } String originalText = StringUtils.join(block, "\n") + "\n"; originalText.replaceAll("/\\s+$/", "\n"); return originalText.trim().isEmpty() ? null : originalText; }