List of usage examples for java.util Vector isEmpty
public synchronized boolean isEmpty()
From source file:com.ricemap.spateDB.operations.RangeQuery.java
public static void main(String[] args) throws IOException { CommandLineArguments cla = new CommandLineArguments(args); final QueryInput query = cla.getQuery(); final Path[] paths = cla.getPaths(); if (paths.length == 0 || (cla.getPrism() == null && cla.getSelectionRatio() < 0.0f)) { printUsage();/*from ww w.j a v a 2s . co m*/ throw new RuntimeException("Illegal parameters"); } JobConf conf = new JobConf(FileMBR.class); final Path inputFile = paths[0]; final FileSystem fs = inputFile.getFileSystem(conf); if (!fs.exists(inputFile)) { printUsage(); throw new RuntimeException("Input file does not exist"); } final Path outputPath = paths.length > 1 ? paths[1] : null; final Prism[] queryRanges = cla.getPrisms(); int concurrency = cla.getConcurrency(); final Shape stockShape = cla.getShape(true); final boolean overwrite = cla.isOverwrite(); final long[] results = new long[queryRanges.length]; final Vector<Thread> threads = new Vector<Thread>(); final BooleanWritable exceptionHappened = new BooleanWritable(); Thread.UncaughtExceptionHandler h = new Thread.UncaughtExceptionHandler() { public void uncaughtException(Thread th, Throwable ex) { ex.printStackTrace(); exceptionHappened.set(true); } }; for (int i = 0; i < queryRanges.length; i++) { Thread t = new Thread() { @Override public void run() { try { int thread_i = threads.indexOf(this); long result_count = rangeQueryMapReduce(fs, inputFile, outputPath, queryRanges[thread_i], stockShape, overwrite, false, query); results[thread_i] = result_count; } catch (IOException e) { throw new RuntimeException(e); } } }; t.setUncaughtExceptionHandler(h); threads.add(t); } long t1 = System.currentTimeMillis(); do { // Ensure that there is at least MaxConcurrentThreads running int i = 0; while (i < concurrency && i < threads.size()) { Thread.State state = threads.elementAt(i).getState(); if (state == Thread.State.TERMINATED) { // Thread already terminated, remove from the queue threads.remove(i); } else if (state == Thread.State.NEW) { // Start the thread and move to next one threads.elementAt(i++).start(); } else { // Thread is still running, skip over it i++; } } if (!threads.isEmpty()) { try { // Sleep for 10 seconds or until the first thread terminates threads.firstElement().join(10000); } catch (InterruptedException e) { e.printStackTrace(); } } } while (!threads.isEmpty()); long t2 = System.currentTimeMillis(); if (exceptionHappened.get()) throw new RuntimeException("Not all jobs finished correctly"); System.out.println("Time for " + queryRanges.length + " jobs is " + (t2 - t1) + " millis"); System.out.print("Result size: ["); for (long result : results) { System.out.print(result + ", "); } System.out.println("]"); }
From source file:edu.umn.cs.sthadoop.operations.HSPKNNQ.java
public static void main(String[] args) throws IOException { //./hadoop jar /export/scratch/louai/idea-stHadoop/st-hadoop-uber.jar pknn /mntgIndex/yyyy-MM-dd/2017-08-03 /pknn k:2 point:-78.9659,35.7998 shape:edu.umn.cs.sthadoop.mntg.STPointMntg -overwrite // args = new String[8]; // args[0] = "/export/scratch/mntgData/mntgIndex"; // args[1] = "/export/scratch/mntgData/pknn"; // args[2] = "-overwrite"; // args[3] = "k:10"; // args[4] = "point:-78.9659063204100,35.7903907684998"; // args[5] = "shape:edu.umn.cs.sthadoop.trajectory.STPointTrajectory"; // args[6] = "interval:2017-08-03,2017-08-04"; // args[7] = "-overwrite"; final OperationsParams params = new OperationsParams(new GenericOptionsParser(args)); Path[] paths = params.getPaths(); if (paths.length <= 1 && !params.checkInput()) { printUsage();/*from w ww . j a v a 2 s . com*/ System.exit(1); } if (paths.length > 1 && !params.checkInputOutput()) { printUsage(); System.exit(1); } if (params.get("interval") == null) { System.err.println("Temporal range missing"); printUsage(); System.exit(1); } TextSerializable inObj = params.getShape("shape"); if (!(inObj instanceof STPoint)) { if (!(inObj instanceof STRectangle)) { LOG.error("Shape is not instance of STPoint or instance of STRectangle"); printUsage(); System.exit(1); } } // path to the spatio-temporal index. List<Path> STPaths = new ArrayList<Path>(); try { STPaths = STRangeQuery.getIndexedSlices(params); } catch (Exception e1) { // TODO Auto-generated catch block e1.printStackTrace(); } for (Path input : STPaths) { final Path inputFile = input; int count = params.getInt("count", 1); double closeness = params.getFloat("closeness", -1.0f); final Point[] queryPoints = closeness < 0 ? params.getShapes("point", new Point()) : new Point[count]; final FileSystem fs = inputFile.getFileSystem(params); final int k = params.getInt("k", 1); int concurrency = params.getInt("concurrency", 100); if (k == 0) { LOG.warn("k = 0"); } if (queryPoints.length == 0) { printUsage(); throw new RuntimeException("Illegal arguments"); } final Path outputPath = paths.length > 1 ? new Path(paths[1].toUri() + "-" + input.getName()) : null; if (closeness >= 0) { // Get query points according to its closeness to grid intersections GlobalIndex<Partition> gindex = SpatialSite.getGlobalIndex(fs, inputFile); long seed = params.getLong("seed", System.currentTimeMillis()); Random random = new Random(seed); for (int i = 0; i < count; i++) { int i_block = random.nextInt(gindex.size()); int direction = random.nextInt(4); // Generate a point in the given direction // Get center point (x, y) Iterator<Partition> iterator = gindex.iterator(); while (i_block-- >= 0) iterator.next(); Partition partition = iterator.next(); double cx = (partition.x1 + partition.x2) / 2; double cy = (partition.y1 + partition.y2) / 2; double cw = partition.x2 - partition.x1; double ch = partition.y2 - partition.y1; int signx = ((direction & 1) == 0) ? 1 : -1; int signy = ((direction & 2) == 1) ? 1 : -1; double x = cx + cw * closeness / 2 * signx; double y = cy + ch * closeness / 2 * signy; queryPoints[i] = new Point(x, y); } } final BooleanWritable exceptionHappened = new BooleanWritable(); Thread.UncaughtExceptionHandler h = new Thread.UncaughtExceptionHandler() { public void uncaughtException(Thread th, Throwable ex) { ex.printStackTrace(); exceptionHappened.set(true); } }; // Run each query in a separate thread final Vector<Thread> threads = new Vector<Thread>(); for (int i = 0; i < queryPoints.length; i++) { Thread thread = new Thread() { @Override public void run() { try { Point query_point = queryPoints[threads.indexOf(this)]; OperationsParams newParams = new OperationsParams(params); OperationsParams.setShape(newParams, "point", query_point); Job job = knn(inputFile, outputPath, params); } catch (IOException e) { e.printStackTrace(); } catch (InterruptedException e) { e.printStackTrace(); } catch (ClassNotFoundException e) { e.printStackTrace(); } } }; thread.setUncaughtExceptionHandler(h); threads.add(thread); } long t1 = System.currentTimeMillis(); do { // Ensure that there is at least MaxConcurrentThreads running int i = 0; while (i < concurrency && i < threads.size()) { Thread.State state = threads.elementAt(i).getState(); if (state == Thread.State.TERMINATED) { // Thread already terminated, remove from the queue threads.remove(i); } else if (state == Thread.State.NEW) { // Start the thread and move to next one threads.elementAt(i++).start(); } else { // Thread is still running, skip over it i++; } } if (!threads.isEmpty()) { try { // Sleep for 10 seconds or until the first thread terminates threads.firstElement().join(10000); } catch (InterruptedException e) { e.printStackTrace(); } } } while (!threads.isEmpty()); long t2 = System.currentTimeMillis(); if (exceptionHappened.get()) throw new RuntimeException("Not all jobs finished correctly"); System.out.println("Time for " + queryPoints.length + " jobs is " + (t2 - t1) + " millis"); System.out.println("Total iterations: " + TotalIterations); } }
From source file:edu.umn.cs.spatialHadoop.operations.RangeQuery.java
public static void main(String[] args) throws IOException, ClassNotFoundException, InterruptedException { final OperationsParams params = new OperationsParams(new GenericOptionsParser(args)); final Path[] paths = params.getPaths(); if (paths.length <= 1 && !params.checkInput()) { printUsage();/*from www . ja v a 2 s . c o m*/ System.exit(1); } if (paths.length >= 2 && !params.checkInputOutput()) { printUsage(); System.exit(1); } if (params.get("rect") == null) { System.err.println("You must provide a query range"); printUsage(); System.exit(1); } final Path inPath = params.getInputPath(); final Path outPath = params.getOutputPath(); final Rectangle[] queryRanges = params.getShapes("rect", new Rectangle()); // All running jobs final Vector<Long> resultsCounts = new Vector<Long>(); Vector<Job> jobs = new Vector<Job>(); Vector<Thread> threads = new Vector<Thread>(); long t1 = System.currentTimeMillis(); for (int i = 0; i < queryRanges.length; i++) { final OperationsParams queryParams = new OperationsParams(params); OperationsParams.setShape(queryParams, "rect", queryRanges[i]); if (OperationsParams.isLocal(new JobConf(queryParams), inPath)) { // Run in local mode final Rectangle queryRange = queryRanges[i]; final Shape shape = queryParams.getShape("shape"); final Path output = outPath == null ? null : (queryRanges.length == 1 ? outPath : new Path(outPath, String.format("%05d", i))); Thread thread = new Thread() { @Override public void run() { FSDataOutputStream outFile = null; final byte[] newLine = System.getProperty("line.separator", "\n").getBytes(); try { ResultCollector<Shape> collector = null; if (output != null) { FileSystem outFS = output.getFileSystem(queryParams); final FSDataOutputStream foutFile = outFile = outFS.create(output); collector = new ResultCollector<Shape>() { final Text tempText = new Text2(); @Override public synchronized void collect(Shape r) { try { tempText.clear(); r.toText(tempText); foutFile.write(tempText.getBytes(), 0, tempText.getLength()); foutFile.write(newLine); } catch (IOException e) { e.printStackTrace(); } } }; } else { outFile = null; } long resultCount = rangeQueryLocal(inPath, queryRange, shape, queryParams, collector); resultsCounts.add(resultCount); } catch (IOException e) { e.printStackTrace(); } catch (InterruptedException e) { e.printStackTrace(); } finally { try { if (outFile != null) outFile.close(); } catch (IOException e) { e.printStackTrace(); } } } }; thread.start(); threads.add(thread); } else { // Run in MapReduce mode queryParams.setBoolean("background", true); Job job = rangeQueryMapReduce(inPath, outPath, queryParams); jobs.add(job); } } while (!jobs.isEmpty()) { Job firstJob = jobs.firstElement(); firstJob.waitForCompletion(false); if (!firstJob.isSuccessful()) { System.err.println("Error running job " + firstJob); System.err.println("Killing all remaining jobs"); for (int j = 1; j < jobs.size(); j++) jobs.get(j).killJob(); System.exit(1); } Counters counters = firstJob.getCounters(); Counter outputRecordCounter = counters.findCounter(Task.Counter.MAP_OUTPUT_RECORDS); resultsCounts.add(outputRecordCounter.getValue()); jobs.remove(0); } while (!threads.isEmpty()) { try { Thread thread = threads.firstElement(); thread.join(); threads.remove(0); } catch (InterruptedException e) { e.printStackTrace(); } } long t2 = System.currentTimeMillis(); System.out.println("Time for " + queryRanges.length + " jobs is " + (t2 - t1) + " millis"); System.out.println("Results counts: " + resultsCounts); }
From source file:edu.umn.cs.sthadoop.trajectory.TrajectoryOverlap.java
public static void main(String[] args) throws Exception { // args = new String[8]; // args[0] = "/export/scratch/mntgData/geolifeGPS/geolife_Trajectories_1.3/HDFS/index_geolife"; // args[1] = "/export/scratch/mntgData/geolifeGPS/geolife_Trajectories_1.3/HDFS/knn-dis-result"; // args[2] = "shape:edu.umn.cs.sthadoop.trajectory.GeolifeTrajectory"; // args[3] = "interval:2008-05-01,2008-05-30"; // args[4] = "time:month"; // args[5] = "traj:39.9119983,116.606835;39.9119783,116.6065483;39.9119599,116.6062649;39.9119416,116.6059899;39.9119233,116.6057282;39.9118999,116.6054783;39.9118849,116.6052366;39.9118666,116.6050099;39.91185,116.604775;39.9118299,116.604525;39.9118049,116.6042649;39.91177,116.6040166;39.9117516,116.6037583;39.9117349,116.6035066;39.9117199,116.6032666;39.9117083,116.6030232;39.9117,116.6027566;39.91128,116.5969383;39.9112583,116.5966766;39.9112383,116.5964232;39.9112149,116.5961699;39.9111933,116.5959249;39.9111716,116.5956883"; // args[6] = "-overwrite"; // args[7] = "-local";//"-no-local"; final OperationsParams params = new OperationsParams(new GenericOptionsParser(args)); final Path[] paths = params.getPaths(); if (paths.length <= 1 && !params.checkInput()) { printUsage();/* w ww. j a va 2 s . co m*/ System.exit(1); } if (paths.length >= 2 && !params.checkInputOutput()) { printUsage(); System.exit(1); } if (params.get("traj") == null) { System.err.println("Trajectory query is missing"); printUsage(); System.exit(1); } // Invoke method to compute the trajectory MBR. String rectangle = getTrajectoryRectangle(params.get("traj")); params.set("rect", rectangle); if (params.get("rect") == null) { System.err.println("You must provide a Trajectory Query"); printUsage(); System.exit(1); } if (params.get("interval") == null) { System.err.println("Temporal range missing"); printUsage(); System.exit(1); } TextSerializable inObj = params.getShape("shape"); if (!(inObj instanceof STPoint)) { LOG.error("Shape is not instance of STPoint"); printUsage(); System.exit(1); } // Get spatio-temporal slices. List<Path> STPaths = getIndexedSlices(params); final Path outPath = params.getOutputPath(); final Rectangle[] queryRanges = params.getShapes("rect", new Rectangle()); // All running jobs final Vector<Long> resultsCounts = new Vector<Long>(); Vector<Job> jobs = new Vector<Job>(); Vector<Thread> threads = new Vector<Thread>(); long t1 = System.currentTimeMillis(); for (Path stPath : STPaths) { final Path inPath = stPath; for (int i = 0; i < queryRanges.length; i++) { final OperationsParams queryParams = new OperationsParams(params); OperationsParams.setShape(queryParams, "rect", queryRanges[i]); if (OperationsParams.isLocal(new JobConf(queryParams), inPath)) { // Run in local mode final Rectangle queryRange = queryRanges[i]; final Shape shape = queryParams.getShape("shape"); final Path output = outPath == null ? null : (queryRanges.length == 1 ? outPath : new Path(outPath, String.format("%05d", i))); Thread thread = new Thread() { @Override public void run() { FSDataOutputStream outFile = null; final byte[] newLine = System.getProperty("line.separator", "\n").getBytes(); try { ResultCollector<Shape> collector = null; if (output != null) { FileSystem outFS = output.getFileSystem(queryParams); final FSDataOutputStream foutFile = outFile = outFS.create(output); collector = new ResultCollector<Shape>() { final Text tempText = new Text2(); @Override public synchronized void collect(Shape r) { try { tempText.clear(); r.toText(tempText); foutFile.write(tempText.getBytes(), 0, tempText.getLength()); foutFile.write(newLine); } catch (IOException e) { e.printStackTrace(); } } }; } else { outFile = null; } long resultCount = rangeQueryLocal(inPath, queryRange, shape, queryParams, collector); resultsCounts.add(resultCount); } catch (IOException e) { e.printStackTrace(); } catch (InterruptedException e) { e.printStackTrace(); } finally { try { if (outFile != null) outFile.close(); } catch (IOException e) { e.printStackTrace(); } } } }; thread.start(); threads.add(thread); } else { // Run in MapReduce mode Path outTempPath = outPath == null ? null : new Path(outPath, String.format("%05d", i) + "-" + inPath.getName()); queryParams.setBoolean("background", true); Job job = rangeQueryMapReduce(inPath, outTempPath, queryParams); jobs.add(job); } } } while (!jobs.isEmpty()) { Job firstJob = jobs.firstElement(); firstJob.waitForCompletion(false); if (!firstJob.isSuccessful()) { System.err.println("Error running job " + firstJob); System.err.println("Killing all remaining jobs"); for (int j = 1; j < jobs.size(); j++) jobs.get(j).killJob(); System.exit(1); } Counters counters = firstJob.getCounters(); Counter outputRecordCounter = counters.findCounter(Task.Counter.MAP_OUTPUT_RECORDS); resultsCounts.add(outputRecordCounter.getValue()); jobs.remove(0); } while (!threads.isEmpty()) { try { Thread thread = threads.firstElement(); thread.join(); threads.remove(0); } catch (InterruptedException e) { e.printStackTrace(); } } long t2 = System.currentTimeMillis(); System.out.println("QueryPlan:"); for (Path stPath : STPaths) { System.out.println(stPath.getName()); } System.out.println("Time for " + queryRanges.length + " jobs is " + (t2 - t1) + " millis"); System.out.println("Results counts: " + resultsCounts); }
From source file:edu.umn.cs.sthadoop.operations.STRangeQuery.java
public static void main(String[] args) throws Exception { // args = new String[7]; // args[0] = "/home/louai/nyc-taxi/yellowIndex"; // args[1] = "/home/louai/nyc-taxi/resultSTRQ"; // args[2] = "shape:edu.umn.cs.sthadoop.core.STPoint"; // args[3] = "rect:-74.98451232910156,35.04014587402344,-73.97936248779295,41.49399566650391"; // args[4] = "interval:2015-01-01,2015-01-02"; // args[5] = "-overwrite"; // args[6] = "-no-local"; // Query for test with output // args = new String[6]; // args[0] = "/home/louai/nyc-taxi/yellowIndex"; // args[1] = "shape:edu.umn.cs.sthadoop.core.STPoint"; // args[2] = "rect:-74.98451232910156,35.04014587402344,-73.97936248779295,41.49399566650391"; // args[3] = "interval:2015-01-01,2015-01-03"; // args[4] = "-overwrite"; // args[5 ] = "-no-local"; final OperationsParams params = new OperationsParams(new GenericOptionsParser(args)); final Path[] paths = params.getPaths(); if (paths.length <= 1 && !params.checkInput()) { printUsage();/*from w ww . j a v a2s . c o m*/ System.exit(1); } if (paths.length >= 2 && !params.checkInputOutput()) { printUsage(); System.exit(1); } if (params.get("rect") == null) { String x1 = "-" + Double.toString(Double.MAX_VALUE); String y1 = "-" + Double.toString(Double.MAX_VALUE); String x2 = Double.toString(Double.MAX_VALUE); String y2 = Double.toString(Double.MAX_VALUE); System.out.println(x1 + "," + y1 + "," + x2 + "," + y2); params.set("rect", x1 + "," + y1 + "," + x2 + "," + y2); // System.err.println("You must provide a query range"); // printUsage(); // System.exit(1); } if (params.get("interval") == null) { System.err.println("Temporal range missing"); printUsage(); System.exit(1); } TextSerializable inObj = params.getShape("shape"); if (!(inObj instanceof STPoint) && !(inObj instanceof STRectangle)) { LOG.error("Shape is not instance of STPoint or STRectangle"); printUsage(); System.exit(1); } // Get spatio-temporal slices. List<Path> STPaths = getIndexedSlices(params); final Path outPath = params.getOutputPath(); final Rectangle[] queryRanges = params.getShapes("rect", new Rectangle()); // All running jobs final Vector<Long> resultsCounts = new Vector<Long>(); Vector<Job> jobs = new Vector<Job>(); Vector<Thread> threads = new Vector<Thread>(); long t1 = System.currentTimeMillis(); for (Path stPath : STPaths) { final Path inPath = stPath; for (int i = 0; i < queryRanges.length; i++) { final OperationsParams queryParams = new OperationsParams(params); OperationsParams.setShape(queryParams, "rect", queryRanges[i]); if (OperationsParams.isLocal(new JobConf(queryParams), inPath)) { // Run in local mode final Rectangle queryRange = queryRanges[i]; final Shape shape = queryParams.getShape("shape"); final Path output = outPath == null ? null : (queryRanges.length == 1 ? outPath : new Path(outPath, String.format("%05d", i))); Thread thread = new Thread() { @Override public void run() { FSDataOutputStream outFile = null; final byte[] newLine = System.getProperty("line.separator", "\n").getBytes(); try { ResultCollector<Shape> collector = null; if (output != null) { FileSystem outFS = output.getFileSystem(queryParams); final FSDataOutputStream foutFile = outFile = outFS.create(output); collector = new ResultCollector<Shape>() { final Text tempText = new Text2(); @Override public synchronized void collect(Shape r) { try { tempText.clear(); r.toText(tempText); foutFile.write(tempText.getBytes(), 0, tempText.getLength()); foutFile.write(newLine); } catch (IOException e) { e.printStackTrace(); } } }; } else { outFile = null; } long resultCount = rangeQueryLocal(inPath, queryRange, shape, queryParams, collector); resultsCounts.add(resultCount); } catch (IOException e) { e.printStackTrace(); } catch (InterruptedException e) { e.printStackTrace(); } finally { try { if (outFile != null) outFile.close(); } catch (IOException e) { e.printStackTrace(); } } } }; thread.start(); threads.add(thread); } else { // Run in MapReduce mode Path outTempPath = outPath == null ? null : new Path(outPath, String.format("%05d", i) + "-" + inPath.getName()); queryParams.setBoolean("background", true); Job job = rangeQueryMapReduce(inPath, outTempPath, queryParams); jobs.add(job); } } } while (!jobs.isEmpty()) { Job firstJob = jobs.firstElement(); firstJob.waitForCompletion(false); if (!firstJob.isSuccessful()) { System.err.println("Error running job " + firstJob); System.err.println("Killing all remaining jobs"); for (int j = 1; j < jobs.size(); j++) jobs.get(j).killJob(); System.exit(1); } Counters counters = firstJob.getCounters(); Counter outputRecordCounter = counters.findCounter(Task.Counter.MAP_OUTPUT_RECORDS); resultsCounts.add(outputRecordCounter.getValue()); jobs.remove(0); } while (!threads.isEmpty()) { try { Thread thread = threads.firstElement(); thread.join(); threads.remove(0); } catch (InterruptedException e) { e.printStackTrace(); } } long t2 = System.currentTimeMillis(); System.out.println("QueryPlan:"); for (Path stPath : STPaths) { System.out.println(stPath.getName()); } System.out.println("Time for " + queryRanges.length + " jobs is " + (t2 - t1) + " millis"); System.out.println("Results counts: " + resultsCounts); }
From source file:org.apache.cactus.internal.util.CookieUtil.java
/** * @return the cookie string which will be added as a HTTP "Cookie" header * or null if no cookie has been set * @param theRequest the request containing all data to pass to the server * redirector.//w ww .j av a 2s. c om * @param theUrl the URL to connect to * @throws ClientException if an error occurred when creating the cookie * string */ public static String getCookieString(WebRequest theRequest, URL theUrl) throws ClientException { // If no Cookies, then exit Vector cookies = theRequest.getCookies(); if (!cookies.isEmpty()) { // transform the Cactus cookies into HttpClient cookies org.apache.commons.httpclient.Cookie[] httpclientCookies = CookieUtil .createHttpClientCookies(theRequest, theUrl); // and create the cookie header to send Header cookieHeader = createCookieHeader(CookieUtil.getCookieDomain(theRequest, theUrl.getHost()), CookieUtil.getCookiePath(theRequest, theUrl.getFile()), httpclientCookies); return cookieHeader.getValue(); } return null; }
From source file:com.buaa.cfs.net.DNS.java
/** * Returns all the host names associated by the provided nameserver with the address bound to the specified network * interface//from ww w .ja v a 2 s . c om * * @param strInterface The name of the network interface or subinterface to query (e.g. eth0 or eth0:0) * @param nameserver The DNS host name * * @return A string vector of all host names associated with the IPs tied to the specified interface * * @throws UnknownHostException if the given interface is invalid */ public static String[] getHosts(String strInterface, String nameserver) throws UnknownHostException { String[] ips = getIPs(strInterface); Vector<String> hosts = new Vector<String>(); for (int ctr = 0; ctr < ips.length; ctr++) { try { hosts.add(reverseDns(InetAddress.getByName(ips[ctr]), nameserver)); } catch (UnknownHostException ignored) { } catch (NamingException ignored) { } } if (hosts.isEmpty()) { LOG.warn("Unable to determine hostname for interface " + strInterface); return new String[] { cachedHostname }; } else { return hosts.toArray(new String[hosts.size()]); } }
From source file:org.sandrob.android.net.http.HttpsConnection.java
/** * Gets the common name from the given X509Name. * * @param name the X.509 name//from w ww .j av a2 s .com * @return the common name, null if not found */ private static String getCommonName(X509Name name) { if (name == null) { return null; } Vector<?> values = name.getValues(X509Name.CN); if (values == null || values.isEmpty()) { return null; } return values.get(0).toString(); }
From source file:edu.umn.cs.spatialHadoop.nasa.SpatioTemporalAggregateQuery.java
/** * Performs a spatio-temporal aggregate query on an indexed directory * @param inFile/*from www . j a v a 2 s.c o m*/ * @param params * @throws ParseException * @throws IOException */ public static AggregateQuadTree.Node aggregateQuery(Path inFile, OperationsParams params) throws ParseException, IOException { // 1- Run a temporal filter step to find all matching temporal partitions Vector<Path> matchingPartitions = new Vector<Path>(); // List of time ranges to check. Initially it contains one range as // specified by the user. Eventually, it can be split into at most two // partitions if partially matched by a partition. Vector<TimeRange> temporalRanges = new Vector<TimeRange>(); temporalRanges.add(new TimeRange(params.get("time"))); Path[] temporalIndexes = new Path[] { new Path(inFile, "yearly"), new Path(inFile, "monthly"), new Path(inFile, "daily") }; int index = 0; final FileSystem fs = inFile.getFileSystem(params); while (index < temporalIndexes.length && !temporalRanges.isEmpty()) { Path indexDir = temporalIndexes[index]; LOG.info("Checking index dir " + indexDir); TemporalIndex temporalIndex = new TemporalIndex(fs, indexDir); for (int iRange = 0; iRange < temporalRanges.size(); iRange++) { TimeRange range = temporalRanges.get(iRange); TemporalPartition[] matches = temporalIndex.selectContained(range.start, range.end); if (matches != null) { LOG.info("Matched " + matches.length + " partitions in " + indexDir); for (TemporalPartition match : matches) { LOG.info("Matched temporal partition: " + match.dirName); matchingPartitions.add(new Path(indexDir, match.dirName)); } // Update range to remove matching part TemporalPartition firstMatch = matches[0]; TemporalPartition lastMatch = matches[matches.length - 1]; if (range.start < firstMatch.start && range.end > lastMatch.end) { // Need to split the range into two temporalRanges.setElementAt(new TimeRange(range.start, firstMatch.start), iRange); temporalRanges.insertElementAt(new TimeRange(lastMatch.end, range.end), iRange); } else if (range.start < firstMatch.start) { // Update range in-place range.end = firstMatch.start; } else if (range.end > lastMatch.end) { // Update range in-place range.start = lastMatch.end; } else { // Current range was completely covered. Remove it temporalRanges.remove(iRange); } } } index++; } numOfTemporalPartitionsInLastQuery = matchingPartitions.size(); // 2- Find all matching files (AggregateQuadTrees) in matching partitions final Rectangle spatialRange = params.getShape("rect", new Rectangle()).getMBR(); // Convert spatialRange from lat/lng space to Sinusoidal space double cosPhiRad = Math.cos(spatialRange.y1 * Math.PI / 180); double southWest = spatialRange.x1 * cosPhiRad; double southEast = spatialRange.x2 * cosPhiRad; cosPhiRad = Math.cos(spatialRange.y2 * Math.PI / 180); double northWest = spatialRange.x1 * cosPhiRad; double northEast = spatialRange.x2 * cosPhiRad; spatialRange.x1 = Math.min(northWest, southWest); spatialRange.x2 = Math.max(northEast, southEast); // Convert to the h v space used by MODIS spatialRange.x1 = (spatialRange.x1 + 180.0) / 10.0; spatialRange.x2 = (spatialRange.x2 + 180.0) / 10.0; spatialRange.y2 = (90.0 - spatialRange.y2) / 10.0; spatialRange.y1 = (90.0 - spatialRange.y1) / 10.0; // Vertically flip because the Sinusoidal space increases to the south double tmp = spatialRange.y2; spatialRange.y2 = spatialRange.y1; spatialRange.y1 = tmp; // Find the range of cells in MODIS Sinusoidal grid overlapping the range final int h1 = (int) Math.floor(spatialRange.x1); final int h2 = (int) Math.ceil(spatialRange.x2); final int v1 = (int) Math.floor(spatialRange.y1); final int v2 = (int) Math.ceil(spatialRange.y2); PathFilter rangeFilter = new PathFilter() { @Override public boolean accept(Path p) { Matcher matcher = MODISTileID.matcher(p.getName()); if (!matcher.matches()) return false; int h = Integer.parseInt(matcher.group(1)); int v = Integer.parseInt(matcher.group(2)); return h >= h1 && h < h2 && v >= v1 && v < v2; } }; final Vector<Path> allMatchingFiles = new Vector<Path>(); for (Path matchingPartition : matchingPartitions) { // Select all matching files FileStatus[] matchingFiles = fs.listStatus(matchingPartition, rangeFilter); for (FileStatus matchingFile : matchingFiles) { allMatchingFiles.add(matchingFile.getPath()); } } // 3- Query all matching files in parallel Vector<Node> threadsResults = Parallel.forEach(allMatchingFiles.size(), new RunnableRange<AggregateQuadTree.Node>() { @Override public Node run(int i1, int i2) { Node threadResult = new AggregateQuadTree.Node(); for (int i_file = i1; i_file < i2; i_file++) { try { Path matchingFile = allMatchingFiles.get(i_file); Matcher matcher = MODISTileID.matcher(matchingFile.getName()); matcher.matches(); // It has to match int h = Integer.parseInt(matcher.group(1)); int v = Integer.parseInt(matcher.group(2)); // Clip the query region and normalize in this tile Rectangle translated = spatialRange.translate(-h, -v); int x1 = (int) (Math.max(translated.x1, 0) * 1200); int y1 = (int) (Math.max(translated.y1, 0) * 1200); int x2 = (int) (Math.min(translated.x2, 1.0) * 1200); int y2 = (int) (Math.min(translated.y2, 1.0) * 1200); AggregateQuadTree.Node fileResult = AggregateQuadTree.aggregateQuery(fs, matchingFile, new java.awt.Rectangle(x1, y1, (x2 - x1), (y2 - y1))); threadResult.accumulate(fileResult); } catch (IOException e) { e.printStackTrace(); } } return threadResult; } }); AggregateQuadTree.Node finalResult = new AggregateQuadTree.Node(); for (Node threadResult : threadsResults) finalResult.accumulate(threadResult); numOfTreesTouchesInLastRequest = allMatchingFiles.size(); return finalResult; }
From source file:edu.umn.cs.spatialHadoop.nasa.SpatioAggregateQueries.java
/** * Return all matching partitions according to a time range * @param inFile //w ww. j a v a 2 s . c o m * @param params * @return * @throws ParseException * @throws IOException */ private static Vector<Path> selectTemporalPartitions(Path inFile, OperationsParams params) throws ParseException, IOException { // 1- Run a temporal filter step to find all matching temporal partitions Vector<Path> matchingPartitions = new Vector<Path>(); // List of time ranges to check. Initially it contains one range as // specified by the user. Eventually, it can be split into at most two // partitions if partially matched by a partition. Vector<TimeRange> temporalRanges = new Vector<TimeRange>(); System.out.println(new TimeRange(params.get("time"))); temporalRanges.add(new TimeRange(params.get("time"))); Path[] temporalIndexes = new Path[] { new Path(inFile, "yearly"), new Path(inFile, "monthly"), new Path(inFile, "daily") }; int index = 0; final FileSystem fs = inFile.getFileSystem(params); while (index < temporalIndexes.length && !temporalRanges.isEmpty()) { Path indexDir = temporalIndexes[index]; LOG.info("Checking index dir " + indexDir); TemporalIndex temporalIndex = new TemporalIndex(fs, indexDir); for (int iRange = 0; iRange < temporalRanges.size(); iRange++) { TimeRange range = temporalRanges.get(iRange); TemporalPartition[] matches = temporalIndex.selectContained(range.start, range.end); if (matches != null) { LOG.info("Matched " + matches.length + " partitions in " + indexDir); for (TemporalPartition match : matches) { matchingPartitions.add(new Path(indexDir, match.dirName)); } // Update range to remove matching part TemporalPartition firstMatch = matches[0]; TemporalPartition lastMatch = matches[matches.length - 1]; if (range.start < firstMatch.start && range.end > lastMatch.end) { // Need to split the range into two temporalRanges.setElementAt(new TimeRange(range.start, firstMatch.start), iRange); temporalRanges.insertElementAt(new TimeRange(lastMatch.end, range.end), iRange); } else if (range.start < firstMatch.start) { // Update range in-place range.end = firstMatch.start; } else if (range.end > lastMatch.end) { // Update range in-place range.start = lastMatch.end; } else { // Current range was completely covered. Remove it temporalRanges.remove(iRange); } } } index++; } numOfTemporalPartitionsInLastQuery = matchingPartitions.size(); return matchingPartitions; }