List of usage examples for java.util LinkedList addAll
public boolean addAll(Collection<? extends E> c)
From source file:org.dozer.util.MappingUtils.java
@SuppressWarnings("unchecked") public static List<Class<?>> getInterfaceHierarchy(Class<?> srcClass) { final List<Class<?>> result = new LinkedList<Class<?>>(); Class<?> realClass = getRealClass(srcClass); final LinkedList<Class> interfacesToProcess = new LinkedList<Class>(); Class[] interfaces = realClass.getInterfaces(); interfacesToProcess.addAll(Arrays.asList(interfaces)); while (!interfacesToProcess.isEmpty()) { Class<?> iface = interfacesToProcess.remove(); if (!result.contains(iface)) { result.add(iface);// w ww.j a va2 s . com for (Class subiface : iface.getInterfaces()) { // if we haven't processed this interface yet then add it to be processed if (!result.contains(subiface)) { interfacesToProcess.add(subiface); } } } } return result; }
From source file:org.apache.hadoop.hdfs.server.datanode.TestDirectoryScannerDelta.java
static LinkedList<DatanodeBlockInfo> getBlockInfos(FSDataset fds, int nsid) { NamespaceMap nm = fds.volumeMap.getNamespaceMap(nsid); int numBuckets = nm.getNumBucket(); LinkedList<DatanodeBlockInfo> blocks = new LinkedList<DatanodeBlockInfo>(); for (int i = 0; i < numBuckets; i++) { blocks.addAll(nm.getBucket(i).getBlockInfosForTesting()); }/*from w w w . j ava 2 s . com*/ return blocks; }
From source file:org.epics.archiverappliance.utils.ui.GetUrlContent.java
/** * A static utilty method to combine JSON objects * @param dest Details from additionalDetails are added to this. * @param additionalDetails JSONArray//from w ww . j a va 2s . co m */ @SuppressWarnings("unchecked") public static void combineJSONArrays(LinkedList<Map<String, String>> dest, JSONArray additionalDetails) { if (additionalDetails != null) dest.addAll(additionalDetails); }
From source file:com.fengduo.bee.commons.core.lang.BeanUtils.java
public static LinkedList<Field> _getAllFields(LinkedList<Field> fields, Class<?> type) { if (Argument.isEmpty(fields)) { fields = new LinkedList<Field>(); }//from ww w . j av a 2s .co m for (Field field : type.getDeclaredFields()) { fields.add(field); } if (type.getSuperclass() != null) { fields.addAll(_getAllFields(fields, type.getSuperclass())); } return fields; }
From source file:org.elasticwarehouse.core.parsers.FileTools.java
public static LinkedList<FileDef> scanFolder(String path, List<String> excluded_extenstions, boolean isrecurrence, Date newerthan) throws java.security.AccessControlException { if (isrecurrence) { File folder = new File(path); String[] directories = folder.list(new FilenameFilter() { public boolean accept(File current, String name) { File f = new File(current, name); return f.isDirectory() && name.startsWith(".") == false; }//from w w w . j av a2 s .c om }); if (directories.length == 0) { return scanFolder(path, excluded_extenstions, newerthan); } else { LinkedList<FileDef> fulllist = new LinkedList<FileDef>(); for (int i = 0; i < directories.length; i++) { LinkedList<FileDef> ret = scanFolder(path + "/" + directories[i], excluded_extenstions, isrecurrence, newerthan); fulllist.addAll(ret); } LinkedList<FileDef> ret2 = scanFolder(path, excluded_extenstions, newerthan); fulllist.addAll(ret2); return fulllist; } } else { return scanFolder(path, excluded_extenstions, newerthan); } }
From source file:org.elasticwarehouse.core.parsers.FileTools.java
public static LinkedList<FileDef> scanFolder(String path, List<String> excluded_extenstions, Date newerthan) throws java.security.AccessControlException { LinkedList<FileDef> ret = new LinkedList<FileDef>(); File folder = new File(path); LinkedList<File> listOfFiles = new LinkedList<File>(); if (newerthan != null) { Iterator<File> newFiles = FileUtils.iterateFiles(folder, new AgeFileFilter(newerthan, false), null);//org.apache.commons.io.filefilter.TrueFileFilter.TRUE); while (newFiles.hasNext()) { listOfFiles.add(newFiles.next()); }/* www . j av a 2s .c o m*/ } else { listOfFiles.addAll(Arrays.asList(folder.listFiles())); } if (listOfFiles.size() == 0) return ret; //return listOfFiles; for (File file : listOfFiles) { if (file.isFile()) { String fname = file.getName(); boolean exclude = false; for (String excludeext : excluded_extenstions) { if (fname.endsWith("." + excludeext)) { exclude = true; break; } } if (!exclude) ret.add(new FileDef(file.getName(), file.getParent(), file.lastModified())); } } return ret; }
From source file:edu.tum.cs.vis.model.util.algorithm.ACCUM.java
/** * Diffuse a vector field at 1 vertex, weighted by a Gaussian of width 1/sqrt(invsigma2) Ported * from trimesh2 (2.12)// w w w. j a va2 s . co m */ @SuppressWarnings("javadoc") private static void diffuse_vert_field(final Model m, HashMap<Vertex, Curvature> curvatures, Map<Vertex, Long> flags, AtomicLong flag_curr, final ACCUM accum, int v, float invsigma2, Vertex flt) { Vertex vert = m.getVertices().get(v); if (vert.getNeighbors().size() == 0) { // flt.set(0, 0, 0); accum.a(m, curvatures, vert, flt, 1.0f, vert); return; } // flt.set(0, 0, 0); accum.a(m, curvatures, vert, flt, vert.getPointarea(), vert); float sum_w = vert.getPointarea(); final Vector3f nv = vert.getNormalVector(); long flag_curr_val = flag_curr.incrementAndGet(); flags.put(vert, flag_curr_val); LinkedList<Vertex> boundary = new LinkedList<Vertex>(); boundary.addAll(vert.getNeighbors()); while (boundary.size() > 0) { Vertex n = boundary.pop(); if (flags.get(n) != null && flags.get(n) == flag_curr_val) continue; flags.put(n, flag_curr_val); if (nv.dot(n.getNormalVector()) <= 0.0f) continue; // Gaussian weight float w = wt(n, vert, invsigma2); if (w == 0.0f) continue; // Downweight things pointing in different directions w *= nv.dot(n.getNormalVector()); // Surface area "belonging" to each point w *= n.getPointarea(); // Accumulate weight times field at neighbor accum.a(m, curvatures, vert, flt, w, n); sum_w += w; for (Vertex nn : n.getNeighbors()) { if (flags.get(nn) != null && flags.get(nn) == flag_curr_val) continue; boundary.push(nn); } } flt.scale(1 / sum_w); }
From source file:controllers.SnLocationsController.java
public static void discoverHereNow() { String appid = params._contains(PARAM_APPID) ? params.get(PARAM_APPID) : ""; String limit = params._contains(PARAM_LIMIT) ? params.get(PARAM_LIMIT) : ""; limit = verifyRecordLimit(limit);/*from www.j a v a 2 s . c o m*/ String ids = params._contains(PARAM_IDS) ? params.get(PARAM_IDS) : ""; //Logger.info("appid, limit, ids : %s, %s, %s \n", appid, limit, ids); Logger.info("PARAMS -> appid:%s ; limit:%s ; ids:%s", appid, limit, ids); // using Async jobs ResponseModel responseModel = new ResponseModel(); ResponseMeta responseMeta = new ResponseMeta(); LinkedList<Object> dataList = new LinkedList<Object>(); HashMap params = new HashMap(); try { params.clear(); //-if (!StringUtils.isEmpty(limit)) params.put(PARAM_LIMIT, limit); FoursquareDiscoverPoiJob mFoursquarePoiJob = new FoursquareDiscoverPoiJob(); mFoursquarePoiJob.setIds(ids); mFoursquarePoiJob.setReqParams(params); dataList.addAll((LinkedList<Object>) mFoursquarePoiJob.doJobWithResult()); // HereNow part params.clear(); if (!StringUtils.isEmpty(limit)) params.put(PARAM_LIMIT, limit); FoursquareDiscoverHereNowJob mFoursquareDiscoverHereNowJob = new FoursquareDiscoverHereNowJob(); mFoursquareDiscoverHereNowJob.setReqParams(params); mFoursquareDiscoverHereNowJob.setPoiList(dataList); dataList = new LinkedList<Object>();//dataList.clear(); dataList.addAll((LinkedList<Object>) mFoursquareDiscoverHereNowJob.doJobWithResult()); response.status = Http.StatusCode.OK; responseMeta.code = response.status; responseModel.meta = responseMeta; responseModel.data = dataList; renderJSON(LocoUtils.getGson().toJson(responseModel)); } catch (Exception ex) { responseMeta.code = Http.StatusCode.INTERNAL_ERROR; gotError(responseMeta, ex); //renderJSON(responseModel); } }
From source file:org.apache.ofbiz.solr.SolrProductSearch.java
/** * Return a map of the side deep categories. *//* w ww. j av a 2 s . co m*/ public static Map<String, Object> getSideDeepCategories(DispatchContext dctx, Map<String, Object> context) { Map<String, Object> result; String solrIndexName = (String) context.get("indexName"); try { String catalogId = null; if (UtilValidate.isNotEmpty(context.get("catalogId"))) catalogId = (String) context.get("catalogId"); String productCategoryId = (String) context.get("productCategoryId") != null ? CategoryUtil.getCategoryNameWithTrail((String) context.get("productCategoryId"), dctx) : null; result = ServiceUtil.returnSuccess(); Map<String, List<Map<String, Object>>> catLevel = new HashMap<String, List<Map<String, Object>>>(); Debug.logInfo("productCategoryId: " + productCategoryId, module); //Add toplevel categories String[] trailElements = productCategoryId.split("/"); //iterate over actual results for (String elements : trailElements) { //catIds must be greater than 3 chars if (elements.length() > 3) { Debug.logInfo("elements: " + elements, module); String categoryPath = CategoryUtil.getCategoryNameWithTrail(elements, dctx); String[] categoryPathArray = categoryPath.split("/"); int level = Integer.parseInt(categoryPathArray[0]); String facetQuery = CategoryUtil.getFacetFilterForCategory(categoryPath, dctx); //Debug.logInfo("categoryPath: "+categoryPath + " facetQuery: "+facetQuery,module); Map<String, Object> query = SolrUtil.categoriesAvailable(catalogId, categoryPath, null, facetQuery, false, 0, 0, solrIndexName); QueryResponse cat = (QueryResponse) query.get("rows"); List<Map<String, Object>> categories = new ArrayList<Map<String, Object>>(); List<FacetField> catList = (List<FacetField>) cat.getFacetFields(); for (Iterator<FacetField> catIterator = catList.iterator(); catIterator.hasNext();) { FacetField field = (FacetField) catIterator.next(); List<Count> catL = (List<Count>) field.getValues(); if (catL != null) { for (Iterator<Count> catIter = catL.iterator(); catIter.hasNext();) { FacetField.Count f = (FacetField.Count) catIter.next(); if (f.getCount() > 0) { Map<String, Object> catMap = new HashMap<String, Object>(); LinkedList<String> iName = new LinkedList<String>(); iName.addAll(Arrays.asList(f.getName().split("/"))); //Debug.logInfo("topLevel "+topLevel,""); // int l = Integer.parseInt((String) iName.getFirst()); catMap.put("catId", iName.getLast()); iName.removeFirst(); String path = f.getName(); catMap.put("path", path); if (level > 0) { iName.removeLast(); catMap.put("parentCategory", StringUtils.join(iName, "/")); } else { catMap.put("parentCategory", null); } catMap.put("count", Long.toString(f.getCount())); categories.add(catMap); } } } } catLevel.put("menu-" + level, categories); } } result.put("categories", catLevel); result.put("numFound", (long) 0); } catch (Exception e) { result = ServiceUtil.returnError(e.toString()); result.put("numFound", (long) 0); } return result; }
From source file:org.cocos2dx.lib.Cocos2dxBitmap.java
private static String[] splitString(String content, int maxHeight, int maxWidth, Paint paint) { String[] lines = content.split("\\n"); String[] ret = null;// www . j a v a2 s . com FontMetricsInt fm = paint.getFontMetricsInt(); int heightPerLine = (int) Math.ceil(fm.bottom - fm.top); int maxLines = maxHeight / heightPerLine; if (maxWidth != 0) { LinkedList<String> strList = new LinkedList<String>(); for (String line : lines) { /* * The width of line is exceed maxWidth, should divide it into * two or more lines. */ int lineWidth = (int) Math.ceil(paint.measureText(line)); if (lineWidth > maxWidth) { strList.addAll(divideStringWithMaxWidth(paint, line, maxWidth)); } else { strList.add(line); } /* * Should not exceed the max height; */ if (maxLines > 0 && strList.size() >= maxLines) { break; } } /* * Remove exceeding lines */ if (maxLines > 0 && strList.size() > maxLines) { while (strList.size() > maxLines) { strList.removeLast(); } } ret = new String[strList.size()]; strList.toArray(ret); } else if (maxHeight != 0 && lines.length > maxLines) { /* * Remove exceeding lines */ LinkedList<String> strList = new LinkedList<String>(); for (int i = 0; i < maxLines; i++) { strList.add(lines[i]); } ret = new String[strList.size()]; strList.toArray(ret); } else { ret = lines; } return ret; }