List of usage examples for java.util LinkedList pollFirst
public E pollFirst()
From source file:Main.java
public static void main(String[] args) { // create a LinkedList LinkedList<String> list = new LinkedList<String>(); // add some elements list.add("Hello"); list.add("from java2s.com"); list.add("10"); // print the list System.out.println("LinkedList:" + list); // retrieve and remove the first element of the list System.out.println("First element of the list:" + list.pollFirst()); // print the list System.out.println("LinkedList:" + list); }
From source file:sanghoon.flickr.photos.Dump.java
public static void main(String[] args) throws FileNotFoundException { // args = new String[] { "-b", "-122.373971375,47.55575575,-122.3455185,47.585350625000004" }; if (args.length < 1) { String header = "\n" + "Download all photos matching some criteria, using flickr.photos.search API." + "\n\n"; HelpFormatter formatter = new HelpFormatter(); formatter.printHelp("java " + Dump.class.getName() + " [OPTIONS]...", header, createOptions(), ""); return;//from w ww .java 2 s .c om } CommandLineParser parser = new DefaultParser(); CommandLine line; try { line = parser.parse(createOptions(), args); } catch (ParseException exp) { System.err.println("Parsing failed. Reason: " + exp.getMessage()); return; } String tags = line.hasOption("tags") ? line.getOptionValue("tags") : null; String minTakenDate = line.hasOption("min-taken-date") ? line.getOptionValue("min-taken-date") : null; String maxTakenDate = line.hasOption("max-taken-date") ? line.getOptionValue("max-taken-date") : null; String bbox = line.hasOption("bounding-box") ? line.getOptionValue("bounding-box") : null; if (bbox == null) { System.err.println("bounding box parameter is required"); return; } // TODO: places.find places.getInfo ? id bounding box // String woeID = line.hasOption("woe-id") ? line.getOptionValue("woe-id") : null; // String placeID = line.hasOption("place-id") ? line.getOptionValue("place-id") : null; String outputFilePrefix = line.hasOption("output-file-prefix") ? line.getOptionValue("output-file-prefix") : "output"; ApiKey key = ApiKey.load(ClassLoader.getSystemResourceAsStream("flickr_api_key.properties")); Map<String, String> outputs = new HashMap<>(); String[] coords = bbox.split(","); double minimum_longitude = Double.parseDouble(coords[0]); double minimum_latitude = Double.parseDouble(coords[1]); double maximum_longitude = Double.parseDouble(coords[2]); double maximum_latitude = Double.parseDouble(coords[3]); LinkedList<BoundingBox> bboxes = new LinkedList<>(); bboxes.add(new BoundingBox(minimum_longitude, minimum_latitude, maximum_longitude, maximum_latitude)); int index = 0; while (bboxes.isEmpty() == false) { BoundingBox box = bboxes.pollFirst(); int page = 1; System.out.print("searching for " + box.toString() + " ."); Integer total = search(key, box, tags, minTakenDate, maxTakenDate, page, outputs); if (total == null) { bboxes.addLast(box); System.out.println(" failed, retry later"); continue; } else if (MAX_PHOTOS_IN_A_BBOX < total) { // Please note that Flickr will return at most the first 4,000 results for any given search query. // If this is an issue, we recommend trying a more specific query. // List<BoundingBox> splitBoxes = box.split(); for (BoundingBox splitBox : splitBoxes) bboxes.addLast(splitBox); System.out.print(" " + total + " photos (>" + MAX_PHOTOS_IN_A_BBOX + "), split "); System.out.print("{"); for (int i = 0; i < splitBoxes.size(); i++) { if (0 < i) System.out.print(","); System.out.print("["); System.out.print(splitBoxes.get(i).toString()); System.out.print("]"); } System.out.print("}"); System.out.println(); continue; } else if (PER_PAGE < total) { // ? search box ? ? ? ? // (? ? PER_PAGE ?), // page ? while (page * PER_PAGE < total) { page++; search(key, box, tags, minTakenDate, maxTakenDate, page, outputs); System.out.print("."); } System.out.println(" " + total + " photos in " + page + " pages"); } if (MAX_PHOTOS_IN_AN_OUTPUT < outputs.size()) { // ? ? ? ? ? String filename = outputFilePrefix + "_" + index++ + ".json"; write(outputs, filename); outputs.clear(); } } String filename = outputFilePrefix + "_" + index++ + ".json"; write(outputs, filename); System.out.println("finished"); }
From source file:it.unipd.dei.ims.falcon.CmdLine.java
public static void main(String[] args) { // last argument is always index path Options options = new Options(); // one of these actions has to be specified OptionGroup actionGroup = new OptionGroup(); actionGroup.addOption(new Option("i", true, "perform indexing")); // if dir, all files, else only one file actionGroup.addOption(new Option("q", true, "perform a single query")); actionGroup.addOption(new Option("b", false, "perform a query batch (read from stdin)")); actionGroup.setRequired(true);/*from w w w. j a v a2 s . c o m*/ options.addOptionGroup(actionGroup); // other options options.addOption(new Option("l", "segment-length", true, "length of a segment (# of chroma vectors)")); options.addOption( new Option("o", "segment-overlap", true, "overlap portion of a segment (# of chroma vectors)")); options.addOption(new Option("Q", "quantization-level", true, "quantization level for chroma vectors")); options.addOption(new Option("k", "min-kurtosis", true, "minimum kurtosis for indexing chroma vectors")); options.addOption(new Option("s", "sub-sampling", true, "sub-sampling of chroma features")); options.addOption(new Option("v", "verbose", false, "verbose output (including timing info)")); options.addOption(new Option("T", "transposition-estimator-strategy", true, "parametrization for the transposition estimator strategy")); options.addOption(new Option("t", "n-transp", true, "number of transposition; if not specified, no transposition is performed")); options.addOption(new Option("f", "force-transp", true, "force transposition by an amount of semitones")); options.addOption(new Option("p", "pruning", false, "enable query pruning; if -P is unspecified, use default strategy")); options.addOption(new Option("P", "pruning-custom", true, "custom query pruning strategy")); // parse HelpFormatter formatter = new HelpFormatter(); CommandLineParser parser = new PosixParser(); CommandLine cmd = null; try { cmd = parser.parse(options, args); if (cmd.getArgs().length != 1) throw new ParseException("no index path was specified"); } catch (ParseException ex) { System.err.println("ERROR - parsing command line:"); System.err.println(ex.getMessage()); formatter.printHelp("falcon -{i,q,b} [options] index_path", options); return; } // default values final float[] DEFAULT_TRANSPOSITION_ESTIMATOR_STRATEGY = new float[] { 0.65192807f, 0.0f, 0.0f, 0.0f, 0.3532628f, 0.4997167f, 0.0f, 0.41703504f, 0.0f, 0.16297342f, 0.0f, 0.0f }; final String DEFAULT_QUERY_PRUNING_STRATEGY = "ntf:0.340765*[0.001694,0.995720];ndf:0.344143*[0.007224,0.997113];" + "ncf:0.338766*[0.001601,0.995038];nmf:0.331577*[0.002352,0.997884];"; // TODO not the final one int hashes_per_segment = Integer.parseInt(cmd.getOptionValue("l", "150")); int overlap_per_segment = Integer.parseInt(cmd.getOptionValue("o", "50")); int nranks = Integer.parseInt(cmd.getOptionValue("Q", "3")); int subsampling = Integer.parseInt(cmd.getOptionValue("s", "1")); double minkurtosis = Float.parseFloat(cmd.getOptionValue("k", "-100.")); boolean verbose = cmd.hasOption("v"); int ntransp = Integer.parseInt(cmd.getOptionValue("t", "1")); TranspositionEstimator tpe = null; if (cmd.hasOption("t")) { if (cmd.hasOption("T")) { // TODO this if branch is yet to test Pattern p = Pattern.compile("\\d\\.\\d*"); LinkedList<Double> tokens = new LinkedList<Double>(); Matcher m = p.matcher(cmd.getOptionValue("T")); while (m.find()) tokens.addLast(new Double(cmd.getOptionValue("T").substring(m.start(), m.end()))); float[] strategy = new float[tokens.size()]; if (strategy.length != 12) { System.err.println("invalid transposition estimator strategy"); System.exit(1); } for (int i = 0; i < strategy.length; i++) strategy[i] = new Float(tokens.pollFirst()); } else { tpe = new TranspositionEstimator(DEFAULT_TRANSPOSITION_ESTIMATOR_STRATEGY); } } else if (cmd.hasOption("f")) { int[] transps = parseIntArray(cmd.getOptionValue("f")); tpe = new ForcedTranspositionEstimator(transps); ntransp = transps.length; } QueryPruningStrategy qpe = null; if (cmd.hasOption("p")) { if (cmd.hasOption("P")) { qpe = new StaticQueryPruningStrategy(cmd.getOptionValue("P")); } else { qpe = new StaticQueryPruningStrategy(DEFAULT_QUERY_PRUNING_STRATEGY); } } // action if (cmd.hasOption("i")) { try { Indexing.index(new File(cmd.getOptionValue("i")), new File(cmd.getArgs()[0]), hashes_per_segment, overlap_per_segment, subsampling, nranks, minkurtosis, tpe, verbose); } catch (IndexingException ex) { Logger.getLogger(CmdLine.class.getName()).log(Level.SEVERE, null, ex); } catch (IOException ex) { Logger.getLogger(CmdLine.class.getName()).log(Level.SEVERE, null, ex); } } if (cmd.hasOption("q")) { String queryfilepath = cmd.getOptionValue("q"); doQuery(cmd, queryfilepath, hashes_per_segment, overlap_per_segment, nranks, subsampling, tpe, ntransp, minkurtosis, qpe, verbose); } if (cmd.hasOption("b")) { try { long starttime = System.currentTimeMillis(); BufferedReader in = new BufferedReader(new InputStreamReader(System.in)); String line = null; while ((line = in.readLine()) != null && !line.trim().isEmpty()) doQuery(cmd, line, hashes_per_segment, overlap_per_segment, nranks, subsampling, tpe, ntransp, minkurtosis, qpe, verbose); in.close(); long endtime = System.currentTimeMillis(); System.out.println(String.format("total time: %ds", (endtime - starttime) / 1000)); } catch (IOException ex) { Logger.getLogger(CmdLine.class.getName()).log(Level.SEVERE, null, ex); } } }
From source file:eu.smartfp7.foursquare.AttendanceCrawler.java
/** * The main takes an undefined number of cities as arguments, then initializes * the specific crawling of all the trending venues of these cities. * The trending venues must have been previously identified using the `DownloadPages` * program.// w ww. ja v a 2 s .c om * * Current valid cities are: london, amsterdam, goldcoast, sanfrancisco. * */ public static void main(String[] args) throws Exception { Settings settings = Settings.getInstance(); String folder = settings.getFolder(); // We keep info and error logs, so that we know what happened in case // of incoherence in the time series. Map<String, FileWriter> info_logs = new HashMap<String, FileWriter>(); Map<String, FileWriter> error_logs = new HashMap<String, FileWriter>(); // For each city we monitor, we store the venue IDs that we got from // a previous crawl. Map<String, Collection<String>> city_venues = new HashMap<String, Collection<String>>(); // Contains the epoch time when the last API call has been made for each // venue. Ensures that we get data only once each hour. Map<String, Long> venue_last_call = new HashMap<String, Long>(); // Contains the epoch time when we last checked if time series were broken // for each city. // We do these checks once every day before the batch forecasting begins. Map<String, Long> sanity_checks = new HashMap<String, Long>(); // We also keep in memory the number of checkins for the last hour for // each venue. Map<String, Integer> venue_last_checkin = new HashMap<String, Integer>(); Map<Long, Integer> APICallsCount = new HashMap<Long, Integer>(); DateFormat df = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss"); int total_venues = 0; long total_calls = 0; long time_spent_on_API = 0; for (String c : args) { settings.checkFileHierarchy(c); city_venues.put(c, loadVenues(c)); total_venues += city_venues.get(c).size(); info_logs.put(c, new FileWriter(folder + c + File.separator + "log" + File.separator + "info.log", true)); error_logs.put(c, new FileWriter(folder + c + File.separator + "log" + File.separator + "error.log", true)); Calendar cal = Calendar.getInstance(); info_logs.get(c).write("[" + df.format(cal.getTime()) + "] Crawler initialization for " + c + ". " + city_venues.get(c).size() + " venues loaded.\n"); info_logs.get(c).flush(); // If we interrupted the program for some reason, we can get back // the in-memory data. // Important: the program must not be interrupted for more than one // hour, or we will lose time series data. for (String venue_id : city_venues.get(c)) { String ts_file = folder + c + File.separator + "attendances_crawl" + File.separator + venue_id + ".ts"; if (new File(ts_file).exists()) { BufferedReader buffer = new BufferedReader(new FileReader(ts_file)); String mem = null, line = null; for (; (line = buffer.readLine()) != null; mem = line) ; buffer.close(); if (mem == null) continue; String[] tmp = mem.split(","); venue_last_call.put(venue_id, df.parse(tmp[0]).getTime()); venue_last_checkin.put(venue_id, Integer.parseInt(tmp[3])); VenueUtil.fixBrokenTimeSeriesVenue(new File(ts_file)); } // if } // for sanity_checks.put(c, cal.getTimeInMillis()); } // for if (total_venues > 5000) { System.out.println( "Too much venues for a single API account (max 5000).\nPlease create a new Foursquare API account and use these credentials.\nExiting now."); return; } while (true) { for (String c : args) { // We create a FIFO queue and pop venue IDs one at a time. LinkedList<String> city_venues_buffer = new LinkedList<String>(city_venues.get(c)); String venue_id = null; // Artificial wait to avoid processors looping at 100% of their capacity // when there is no more venues to crawl for the current hour. Thread.sleep(3000); while ((venue_id = city_venues_buffer.pollFirst()) != null) { // We get the current time according to the city's time zone Calendar cal = Calendar.getInstance(); cal.add(Calendar.MILLISECOND, TimeZone.getTimeZone(settings.getCityTimezone(c)).getOffset(cal.getTime().getTime()) - Calendar.getInstance().getTimeZone().getOffset(cal.getTime().getTime())); //TimeZone.getTimeZone("Europe/London").getOffset(cal.getTime().getTime())); long current_time = DateUtils.truncate(cal.getTime(), Calendar.HOUR).getTime(); // We query Foursquare only once per hour per venue. if (venue_last_call.get(venue_id) != null && current_time < venue_last_call.get(venue_id) + 3600000) continue; intelligentWait(total_venues, cal.getTime().getTime(), (total_calls == 0 ? 0 : Math.round(time_spent_on_API / total_calls))); Venue venue = null; try { long beforeCall = System.currentTimeMillis(); venue = new Venue(getFoursquareVenueById(venue_id, c)); // If there is no last call, this is the beginning of the time series // for this venue. We get the number of people "here now" to initialize // the series. if (venue_last_call.get(venue_id) == null) { /** TODO: by doing this, we keep a representation of the venue dating from the beginning * of the specific crawl. we might want to change this and update this file once * in a while. */ FileWriter info = new FileWriter(folder + c + File.separator + "foursquare_venues" + File.separator + venue_id + ".info"); info.write(venue.getFoursquareJson()); info.close(); FileWriter out = new FileWriter(folder + c + File.separator + "attendances_crawl" + File.separator + venue_id + ".ts"); out.write("Date,here_now,hour_checkins,total_checkins\n"); out.write(df.format(current_time) + "," + venue.getHereNow() + "," + venue.getHereNow() + "," + venue.getCheckincount() + "\n"); out.close(); } else { FileWriter out = new FileWriter(folder + c + File.separator + "attendances_crawl" + File.separator + venue_id + ".ts", true); int checks = venue.getCheckincount() - venue_last_checkin.get(venue_id); out.write(df.format(current_time) + "," + venue.getHereNow() + "," + Integer.toString(checks) + "," + venue.getCheckincount() + "\n"); out.close(); } if (APICallsCount.get(current_time) == null) APICallsCount.put(current_time, 1); else APICallsCount.put(current_time, APICallsCount.get(current_time) + 1); total_calls++; venue_last_call.put(venue_id, current_time); venue_last_checkin.put(venue_id, venue.getCheckincount()); time_spent_on_API += System.currentTimeMillis() - beforeCall; } catch (Exception e) { // If something bad happens (crawler not available, IO error, ...), we put the // venue_id in the FIFO queue so that it gets reevaluated later. //e.printStackTrace(); error_logs.get(c) .write("[" + df.format(cal.getTime().getTime()) + "] Error with venue " + venue_id + " (" + e.getMessage() + "). " + APICallsCount.get(current_time) + " API calls so far this hour, " + city_venues_buffer.size() + " venues remaining in the buffer.\n"); error_logs.get(c).flush(); System.out.println("[" + df.format(cal.getTime().getTime()) + "] " + c + " -- " + APICallsCount.get(current_time) + " API calls // " + city_venues_buffer.size() + " venues remaining " + " (" + e.getMessage() + ")"); if (e instanceof FoursquareAPIException) if (((FoursquareAPIException) e).getHttp_code().equals("400") && ((FoursquareAPIException) e).getError_detail() .equals("Venue " + venue_id + " has been deleted")) { city_venues.get(c).remove(venue_id); removeVenue(venue_id, c); } else city_venues_buffer.add(venue_id); continue; } } // while // Every day between 0am and 2am, we repair all the broken time series (if there // is something to repair). Calendar cal = Calendar.getInstance(); if (city_venues_buffer.peekFirst() == null && (cal.getTimeInMillis() - sanity_checks.get(c)) >= 86400000 && cal.get(Calendar.HOUR_OF_DAY) < 2) { VenueUtil.fixBrokenTimeSeriesCity(c, folder); sanity_checks.put(c, cal.getTimeInMillis()); info_logs.get(c).write("[" + df.format(cal.getTime()) + "] Sanity check OK.\n"); info_logs.get(c).flush(); } } // for } // while }
From source file:it.cnr.istc.iloc.gui.StateVariableVisualizer.java
private static String toString(Atom atom) { StringBuilder sb = new StringBuilder(); sb.append(atom.type.name).append("("); LinkedList<Type> queue = new LinkedList<>(); queue.add(atom.type);/*from www . j a v a2 s . c o m*/ while (!queue.isEmpty()) { Type c_type = queue.pollFirst(); queue.addAll(c_type.getSuperclasses()); for (Field field : c_type.getFields()) { if (!field.synthetic && !field.name.equals(SCOPE)) { IItem item = atom.get(field.name); sb.append(", ").append(field.name); switch (field.type.name) { case BOOL: sb.append(" = ").append(((IBoolItem) item).getBoolVar().evaluate()); break; case REAL: sb.append(" = ").append(atom.core.evaluate(((IArithItem) item).getArithVar())); break; case STRING: sb.append(" = ").append(((IStringItem) item).getValue()); break; } } } } sb.append(")"); return sb.toString().replace("(, ", "("); }
From source file:org.matsim.contrib.drt.analysis.DynModeTripsAnalyser.java
public static Map<Double, List<DynModeTrip>> splitTripsIntoBins(Collection<DynModeTrip> trips, int startTime, int endTime, int binSize_s) { LinkedList<DynModeTrip> alltrips = new LinkedList<>(); alltrips.addAll(trips);//from w ww .j a va 2 s . c o m Collections.sort(alltrips); DynModeTrip currentTrip = alltrips.pollFirst(); if (currentTrip.getDepartureTime() > endTime) { Logger.getLogger(DynModeTripsAnalyser.class).error("wrong end / start Times for analysis"); } Map<Double, List<DynModeTrip>> splitTrips = new TreeMap<>(); for (int time = startTime; time < endTime; time = time + binSize_s) { List<DynModeTrip> currentList = new ArrayList<>(); splitTrips.put(Double.valueOf(time), currentList); while (currentTrip.getDepartureTime() < time + binSize_s) { currentList.add(currentTrip); currentTrip = alltrips.pollFirst(); if (currentTrip == null) { return splitTrips; } } } return splitTrips; }
From source file:it.cnr.istc.iloc.gui.TimelinesChart.java
@Override public void currentNode(Solver.Node n) { final CombinedDomainXYPlot combined_plot = new CombinedDomainXYPlot(new DateAxis("Time")); combined_plot.setGap(3.0);//from www.j a va 2 s . co m combined_plot.setOrientation(PlotOrientation.VERTICAL); Set<Type> c_types = new HashSet<>(); LinkedList<Type> queue = new LinkedList<>(); queue.addAll(solver.getTypes()); while (!queue.isEmpty()) { Type c_type = queue.pollFirst(); if (!c_types.contains(c_type)) { c_types.add(c_type); queue.addAll(c_type.getTypes()); } } for (Type type : c_types) { if (visualizers.containsKey(type.getClass())) { for (XYPlot plot : visualizers.get(type.getClass()).getPlots(type)) { TextTitle title = new TextTitle(type.name, new Font("SansSerif", Font.PLAIN, 11), Color.BLACK, RectangleEdge.TOP, HorizontalAlignment.CENTER, VerticalAlignment.BOTTOM, new RectangleInsets(4, 4, 4, 4)); XYTitleAnnotation titleAnn = new XYTitleAnnotation(0.01, 1, title, RectangleAnchor.TOP_LEFT); plot.addAnnotation(titleAnn); combined_plot.add(plot, 1); } } } setChart(new JFreeChart("", new Font("SansSerif", Font.BOLD, 14), combined_plot, false)); setBorder(BorderFactory.createEtchedBorder()); }
From source file:org.apache.tajo.scheduler.FairScheduler.java
@Override protected QuerySchedulingInfo[] getScheduledQueries() { synchronized (queues) { List<QuerySchedulingInfo> queries = new ArrayList<QuerySchedulingInfo>(); for (String eachQueueName : queues.keySet()) { int runningSize = getRunningQueries(eachQueueName); QueueProperty property = queueProperties.get(eachQueueName); if (property.getMaxCapacity() == -1) { LinkedList<QuerySchedulingInfo> queue = queues.get(eachQueueName); if (queue != null && queue.size() > 0) { QuerySchedulingInfo querySchedulingInfo = queue.pollFirst(); queries.add(querySchedulingInfo); runningQueries.put(querySchedulingInfo.getQueryId(), querySchedulingInfo); }/* w w w. jav a2 s .c o m*/ } else { if (property.getMinCapacity() * (runningSize + 1) < property.getMaxCapacity()) { LinkedList<QuerySchedulingInfo> queue = queues.get(eachQueueName); if (queue != null && queue.size() > 0) { QuerySchedulingInfo querySchedulingInfo = queue.pollFirst(); queries.add(querySchedulingInfo); runningQueries.put(querySchedulingInfo.getQueryId(), querySchedulingInfo); } else { continue; } } } } return queries.toArray(new QuerySchedulingInfo[] {}); } }
From source file:org.batoo.jpa.core.pool.GenericKeyedPool.java
/** * {@inheritDoc}//from w w w .j a va2 s . c o m * */ @Override public V borrowObject(K key) throws Exception, NoSuchElementException, IllegalStateException { final LinkedList<V> pool = this.getPool(key); if (pool.size() <= GenericKeyedPool.MIN_SIZE) { synchronized (this) { while (pool.size() <= GenericKeyedPool.MIN_SIZE) { pool.addLast(this.factory.makeObject(key)); } } } return pool.pollFirst(); }
From source file:pl.openrnd.managers.fragmentsswapper.SingleContainerFragmentSwapper.java
private void handlePendingOperations() { Log.v(TAG, String.format("handlePendingOperations(): size[%d]", mPendingOperations.size())); LinkedList<Runnable> pendingOperations = new LinkedList<Runnable>(mPendingOperations); mPendingOperations.clear();/*from w ww .ja va2 s . co m*/ Runnable runnable = pendingOperations.pollFirst(); while (runnable != null) { performOperationIfAllowed(runnable); runnable = pendingOperations.pollFirst(); } }