List of usage examples for java.util TreeSet TreeSet
public TreeSet()
From source file:com.sirius.hadoop.test.onlinetime.MRTest.java
@Before public void init() { onlineTimeMRDriver = MapReduceDriver.newMapReduceDriver(new StatusMapper(), new StatusReducer()); onlineTimeMRDriver.setKeyGroupingComparator(new StatusKeyGroupComparator()); String[] userIds = new String[10]; for (int i = 0; i < userIds.length; i++) { userIds[i] = UUID.randomUUID().toString(); }/* w w w . j ava2s .c om*/ Set<String> values = new TreeSet<>(); for (String userId : userIds) { long start = System.currentTimeMillis(); for (int i = 0; i < 1000; i++) { start += RandomUtils.nextLong(1000, 1000 * 60 * 5); values.add(String.format("{'u':'%s','t':'%s','ct':%s}", userId, "online", start)); start += RandomUtils.nextLong(1000, 1000 * 60 * 5); values.add(String.format("{'u':'%s','t':'%s','ct':%s}", userId, "offline", start)); } } for (String value : values) { onlineTimeMRDriver.addInput(new Text(UUID.randomUUID().toString()), new Text(value)); } }
From source file:com.nearinfinity.mele.zookeeper.ZookeeperIndexDeletionPolicy.java
@Override public void onCommit(List<? extends IndexCommit> commits) throws IOException { List<String> filesCurrentlyBeingReferenced = getListOfReferencedFiles(zk, indexRefPath); int size = commits.size(); Collection<String> previouslyReferencedFiles = new TreeSet<String>(); OUTER: for (int i = size - 2; i >= 0; i--) { IndexCommit indexCommit = commits.get(i); LOG.info("Processing index commit generation " + indexCommit.getGeneration()); Collection<String> fileNames = new TreeSet<String>(indexCommit.getFileNames()); // remove all filenames that were references in newer index commits, // this way older index commits can be released without the fear of // broken references. fileNames.removeAll(previouslyReferencedFiles); for (String fileName : fileNames) { if (filesCurrentlyBeingReferenced.contains(fileName)) { previouslyReferencedFiles.addAll(fileNames); continue OUTER; }/*from ww w .ja v a 2 s . c om*/ } LOG.info("Index Commit " + indexCommit.getGeneration() + " no longer needed, releasing " + fileNames); indexCommit.delete(); } }
From source file:com.inmobi.databus.readers.TestHadoopStreamReader.java
@BeforeTest public void setup() throws Exception { consumerNumber = 1;// ww w. ja v a2s . c o m files = new String[] { HadoopUtil.files[1], HadoopUtil.files[3], HadoopUtil.files[5] }; conf = new Configuration(); fs = FileSystem.getLocal(conf); streamDir = new Path(new Path(TestUtil.getConfiguredRootDir(), this.getClass().getSimpleName()), testStream) .makeQualified(fs); // initialize config HadoopUtil.setupHadoopCluster(conf, files, null, finalFiles, streamDir, false); inputFormatClass = SequenceFileInputFormat.class.getCanonicalName(); encoded = false; partitionMinList = new TreeSet<Integer>(); for (int i = 0; i < 60; i++) { partitionMinList.add(i); } chkPoints = new TreeMap<Integer, PartitionCheckpoint>(); partitionCheckpointList = new PartitionCheckpointList(chkPoints); }
From source file:com.shawn.dubbo.controller.serMgr.ApplicationController.java
@RequestMapping(value = "/findAllApplication", method = RequestMethod.GET) @ResponseBody//from ww w. j a va2s. com public JsonResult<Map<String, Object>> findAllApplication(String service, String application) { JsonResult<Map<String, Object>> mapJsonResult = new JsonResult<Map<String, Object>>(); Map<String, Object> resultMap = new HashMap<String, Object>(); if (!StringUtils.isBlank(service)) { Set<String> applications = new TreeSet<String>(); List<String> providerApplications = providerService.findApplicationsByServiceName(service); if (providerApplications != null && providerApplications.size() > 0) { applications.addAll(providerApplications); } List<String> consumerApplications = consumerService.findApplicationsByServiceName(service); if (consumerApplications != null && consumerApplications.size() > 0) { applications.addAll(consumerApplications); } resultMap.put("applications", applications); resultMap.put("providerApplications", providerApplications); resultMap.put("consumerApplications", consumerApplications); if (service != null && service.length() > 0) { List<Override> overrides = overrideService.findByService(service); Map<String, List<Override>> application2Overrides = new HashMap<String, List<Override>>(); if (overrides != null && overrides.size() > 0 && applications != null && applications.size() > 0) { for (String a : applications) { if (overrides != null && overrides.size() > 0) { List<Override> appOverrides = new ArrayList<Override>(); for (Override override : overrides) { if (override.isMatch(service, null, a)) { appOverrides.add(override); } } Collections.sort(appOverrides, OverrideUtils.OVERRIDE_COMPARATOR); application2Overrides.put(a, appOverrides); } } } resultMap.put("overrides", application2Overrides); } mapJsonResult = JsonResultUtils.getJsonResult(resultMap, SystemConstants.RESPONSE_STATUS_SUCCESS, null, SystemConstants.RESPONSE_MESSAGE_SUCCESS); ; return mapJsonResult; } Set<String> applications = new TreeSet<String>(); List<String> providerApplications = providerService.findApplications(); if (providerApplications != null && providerApplications.size() > 0) { applications.addAll(providerApplications); } List<String> consumerApplications = consumerService.findApplications(); if (consumerApplications != null && consumerApplications.size() > 0) { applications.addAll(consumerApplications); } Set<String> newList = new HashSet<String>(); Set<String> newProviders = new HashSet<String>(); Set<String> newConsumers = new HashSet<String>(); resultMap.put("applications", applications); resultMap.put("providerApplications", providerApplications); resultMap.put("consumerApplications", consumerApplications); if (StringUtils.isNotEmpty(application)) { application = application.toLowerCase(); for (String o : applications) { if (o.toLowerCase().indexOf(application) != -1) { newList.add(o); } } for (String o : providerApplications) { if (o.toLowerCase().indexOf(application) != -1) { newProviders.add(o); } } for (String o : consumerApplications) { if (o.toLowerCase().indexOf(application) != -1) { newConsumers.add(o); } } resultMap.put("applications", newList); resultMap.put("providerApplications", newProviders); resultMap.put("consumerApplications", newConsumers); } mapJsonResult = JsonResultUtils.getJsonResult(resultMap, SystemConstants.RESPONSE_STATUS_SUCCESS, null, SystemConstants.RESPONSE_MESSAGE_SUCCESS); ; return mapJsonResult; }
From source file:br.edu.ifrn.pdscfyp.Controller.ProfissionalController.java
@RequestMapping("/ranking") public String RankingProfissionais(HttpSession session, Model model) { Usuario u = (Usuario) session.getAttribute("usuarioLogado"); model.addAttribute("usuarioLogado", u); Map<Profissional, Integer> indexes = new HashMap(); int indice = 0; Set<Profissional> profissionais = Profissional.getProfissionais(); Set<Profissional> profissionaisOrdenados = new TreeSet(); for (Profissional p : profissionais) { profissionaisOrdenados.add(p);/*from w ww .ja v a 2 s .c o m*/ } for (Profissional p : profissionaisOrdenados) { indexes.put(p, ++indice); } model.addAttribute("profissionaisOrdenados", profissionaisOrdenados); model.addAttribute("indexes", indexes); return "ranking_pro"; }
From source file:org.jmxtrans.embedded.samples.cocktail.cocktail.CocktailRepository.java
/** * @param ingredientName/*from w w w .j av a2 s . c o m*/ * @param cocktailName * @return */ public Collection<Cocktail> find(@Nullable String ingredientName, @Nullable String cocktailName) { SortedSet<Cocktail> result = new TreeSet<Cocktail>(); for (Cocktail cocktail : cocktails.values()) { if (cocktailName == null && ingredientName == null) { result.add(cocktail); } if (StringUtils.hasLength(cocktailName)) { if (cocktail.getName().toLowerCase().contains(cocktailName.toLowerCase())) { result.add(cocktail); break; } } if (ingredientName != null) { for (String cocktailIngredient : cocktail.getIngredientNames()) { if (cocktailIngredient.toLowerCase().contains(ingredientName.toLowerCase())) { result.add(cocktail); break; } } } } return result; }
From source file:com.googlecode.android_scripting.facade.ui.AlertDialogTask.java
public AlertDialogTask(String title, String message) { mTitle = title;//from w w w . j a va 2 s . c om mMessage = message; mInputType = InputType.DEFAULT; mItems = new ArrayList<String>(); mSelectedItems = new TreeSet<Integer>(); mResultMap = new HashMap<String, Object>(); }
From source file:com.clothcat.hpoolauto.model.TransactionList.java
public TransactionList(JSONObject jo) { txids = new TreeSet<>(); try {/*from w ww . ja v a 2s . c o m*/ if (jo.has("txids")) { JSONArray transactions = jo.getJSONArray("txids"); for (int i = 0; i < transactions.length(); i++) { txids.add(transactions.getString(i)); } } } catch (JSONException ex) { Logger.getLogger(TransactionList.class.getName()).log(Level.SEVERE, null, ex); } }
From source file:com.cloudera.fts.avro.Ascii2AvroItemFn.java
@Override public void process(Pair<String, Pair<Collection<String>, Collection<String>>> input, Emitter<AvroItem> emitter) { // The first collection is events, the second attributes: need to sort them on seq if (input.second().first().size() < 1) return;/*from www . j a v a 2 s.c om*/ String eventString = Iterables.getOnlyElement(input.second().first()); splitFactory.setType(SplitFactory.Type.EVENT); AbstractAttribsRecord eventRecord = splitFactory.create(eventString); LOG.debug(input.first() + " event: " + eventRecord.toString()); assert (eventRecord.getSerialNum().equals(input.first())); if (input.second().second().size() < 1) return; Set<AbstractAttribsRecord> attribs = new TreeSet<AbstractAttribsRecord>(); splitFactory.setType(SplitFactory.Type.ATTRS); Map<CharSequence, CharSequence> map = Maps.newHashMap(); for (String attrString : input.second().second()) { AbstractAttribsRecord attribsRecord = splitFactory.create(attrString); LOG.debug(input.first() + " attribs: " + attribsRecord.toString()); assert (attribsRecord.getSerialNum().equals(input.first())); if (attribsRecord.getSeq() <= eventRecord.getSeq()) { attribs.add(attribsRecord); } } for (AbstractAttribsRecord attribsRecord : attribs) { map.put(attribsRecord.getAttrName(), attribsRecord.getAttrValue()); } if (LOG.isDebugEnabled()) { for (Map.Entry<CharSequence, CharSequence> entry : map.entrySet()) { LOG.debug(input.first() + " map: " + entry.getKey() + "#" + entry.getValue()); } } emitter.emit(AvroItem.newBuilder().setSerialNum(input.first()).setStatus(eventRecord.getStatus()) .setSeq(eventRecord.getSeq()).setAttributes(map).build()); }
From source file:com.opengamma.core.marketdatasnapshot.VolatilitySurfaceData.java
public VolatilitySurfaceData(final String definitionName, final String specificationName, final UniqueIdentifiable target, final X[] xs, final Y[] ys, final Map<Pair<X, Y>, Double> values) { Validate.notNull(definitionName, "Definition Name"); Validate.notNull(specificationName, "Specification Name"); Validate.notNull(target, "Target"); Validate.notNull(ys, "Y axis values"); Validate.notNull(xs, "X axis values"); Validate.notNull(values, "Volatility Values Map"); _definitionName = definitionName;/*from w w w . j a v a2 s . c o m*/ _specificationName = specificationName; _target = target; _values = new HashMap<Pair<X, Y>, Double>(values); _xs = xs; _ys = ys; _uniqueXs = new TreeSet<X>(); _strips = new HashMap<X, List<ObjectsPair<Y, Double>>>(); for (Map.Entry<Pair<X, Y>, Double> entries : values.entrySet()) { if (_strips.containsKey(entries.getKey().getFirst())) { _strips.get(entries.getKey().getFirst()) .add(Pair.of(entries.getKey().getSecond(), entries.getValue())); } else { _uniqueXs.add(entries.getKey().getFirst()); final List<ObjectsPair<Y, Double>> list = new ArrayList<ObjectsPair<Y, Double>>(); list.add(Pair.of(entries.getKey().getSecond(), entries.getValue())); _strips.put(entries.getKey().getFirst(), list); } } }