List of usage examples for java.util Queue size
int size();
From source file:ch.entwine.weblounge.maven.S3DeployMojo.java
/** * //ww w . jav a2 s . c o m * {@inheritDoc} * * @see org.apache.maven.plugin.Mojo#execute() */ public void execute() throws MojoExecutionException, MojoFailureException { // Setup AWS S3 client AWSCredentials credentials = new BasicAWSCredentials(awsAccessKey, awsSecretKey); AmazonS3Client uploadClient = new AmazonS3Client(credentials); TransferManager transfers = new TransferManager(credentials); // Make sure key prefix does not start with a slash but has one at the // end if (keyPrefix.startsWith("/")) keyPrefix = keyPrefix.substring(1); if (!keyPrefix.endsWith("/")) keyPrefix = keyPrefix + "/"; // Keep track of how much data has been transferred long totalBytesTransferred = 0L; int items = 0; Queue<Upload> uploads = new LinkedBlockingQueue<Upload>(); try { // Check if S3 bucket exists getLog().debug("Checking whether bucket " + bucket + " exists"); if (!uploadClient.doesBucketExist(bucket)) { getLog().error("Desired bucket '" + bucket + "' does not exist!"); return; } getLog().debug("Collecting files to transfer from " + resources.getDirectory()); List<File> res = getResources(); for (File file : res) { // Make path of resource relative to resources directory String filename = file.getName(); String extension = FilenameUtils.getExtension(filename); String path = file.getPath().substring(resources.getDirectory().length()); String key = concat("/", keyPrefix, path).substring(1); // Delete old file version in bucket getLog().debug("Removing existing object at " + key); uploadClient.deleteObject(bucket, key); // Setup meta data ObjectMetadata meta = new ObjectMetadata(); meta.setCacheControl("public, max-age=" + String.valueOf(valid * 3600)); FileInputStream fis = null; GZIPOutputStream gzipos = null; final File fileToUpload; if (gzip && ("js".equals(extension) || "css".equals(extension))) { try { fis = new FileInputStream(file); File gzFile = File.createTempFile(file.getName(), null); gzipos = new GZIPOutputStream(new FileOutputStream(gzFile)); IOUtils.copy(fis, gzipos); fileToUpload = gzFile; meta.setContentEncoding("gzip"); if ("js".equals(extension)) meta.setContentType("text/javascript"); if ("css".equals(extension)) meta.setContentType("text/css"); } catch (FileNotFoundException e) { getLog().error(e); continue; } catch (IOException e) { getLog().error(e); continue; } finally { IOUtils.closeQuietly(fis); IOUtils.closeQuietly(gzipos); } } else { fileToUpload = file; } // Do a random check for existing errors before starting the next upload if (erroneousUpload != null) break; // Create put object request long bytesToTransfer = fileToUpload.length(); totalBytesTransferred += bytesToTransfer; PutObjectRequest request = new PutObjectRequest(bucket, key, fileToUpload); request.setProgressListener(new UploadListener(credentials, bucket, key, bytesToTransfer)); request.setMetadata(meta); // Schedule put object request getLog().info( "Uploading " + key + " (" + FileUtils.byteCountToDisplaySize((int) bytesToTransfer) + ")"); Upload upload = transfers.upload(request); uploads.add(upload); items++; } } catch (AmazonServiceException e) { getLog().error("Uploading resources failed: " + e.getMessage()); } catch (AmazonClientException e) { getLog().error("Uploading resources failed: " + e.getMessage()); } // Wait for uploads to be finished String currentUpload = null; try { Thread.sleep(1000); getLog().info("Waiting for " + uploads.size() + " uploads to finish..."); while (!uploads.isEmpty()) { Upload upload = uploads.poll(); currentUpload = upload.getDescription().substring("Uploading to ".length()); if (TransferState.InProgress.equals(upload.getState())) getLog().debug("Waiting for upload " + currentUpload + " to finish"); upload.waitForUploadResult(); } } catch (AmazonServiceException e) { throw new MojoExecutionException("Error while uploading " + currentUpload); } catch (AmazonClientException e) { throw new MojoExecutionException("Error while uploading " + currentUpload); } catch (InterruptedException e) { getLog().debug("Interrupted while waiting for upload to finish"); } // Check for errors that happened outside of the actual uploading if (erroneousUpload != null) { throw new MojoExecutionException("Error while uploading " + erroneousUpload); } getLog().info("Deployed " + items + " files (" + FileUtils.byteCountToDisplaySize((int) totalBytesTransferred) + ") to s3://" + bucket); }
From source file:io.seldon.spark.actions.GroupActionsJob.java
public static void run(CmdLineArgs cmdLineArgs) { long unixDays = 0; try {/* www . ja v a 2s.com*/ unixDays = JobUtils.dateToUnixDays(cmdLineArgs.input_date_string); } catch (ParseException e) { unixDays = 0; } System.out.println(String.format("--- started GroupActionsJob date[%s] unixDays[%s] ---", cmdLineArgs.input_date_string, unixDays)); System.out.println("Env: " + System.getenv()); System.out.println("Properties: " + System.getProperties()); SparkConf sparkConf = new SparkConf().setAppName("GroupActionsJob"); if (cmdLineArgs.debug_use_local_master) { System.out.println("Using 'local' master"); sparkConf.setMaster("local"); } Tuple2<String, String>[] sparkConfPairs = sparkConf.getAll(); System.out.println("--- sparkConf ---"); for (int i = 0; i < sparkConfPairs.length; i++) { Tuple2<String, String> kvPair = sparkConfPairs[i]; System.out.println(String.format("%s:%s", kvPair._1, kvPair._2)); } System.out.println("-----------------"); JavaSparkContext jsc = new JavaSparkContext(sparkConf); { // setup aws access Configuration hadoopConf = jsc.hadoopConfiguration(); hadoopConf.set("fs.s3.impl", "org.apache.hadoop.fs.s3native.NativeS3FileSystem"); if (cmdLineArgs.aws_access_key_id != null && !"".equals(cmdLineArgs.aws_access_key_id)) { hadoopConf.set("fs.s3n.awsAccessKeyId", cmdLineArgs.aws_access_key_id); hadoopConf.set("fs.s3n.awsSecretAccessKey", cmdLineArgs.aws_secret_access_key); } } // String output_path_dir = "./out/" + input_date_string + "-" + UUID.randomUUID(); JavaRDD<String> dataSet = jsc.textFile( JobUtils.getSourceDirFromDate(cmdLineArgs.input_path_pattern, cmdLineArgs.input_date_string)) .repartition(4); final ObjectMapper objectMapper = new ObjectMapper(); objectMapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false); final String single_client = cmdLineArgs.single_client; if (single_client != null) { Function<String, Boolean> clientFilter = new Function<String, Boolean>() { @Override public Boolean call(String t) throws Exception { ActionData actionData = JobUtils.getActionDataFromActionLogLine(objectMapper, t); return ((actionData.client != null) && (actionData.client.equals(single_client))); } }; dataSet = dataSet.filter(clientFilter); } JavaPairRDD<String, ActionData> pairs = dataSet.mapToPair(new PairFunction<String, String, ActionData>() { @Override public Tuple2<String, ActionData> call(String t) throws Exception { ActionData actionData = JobUtils.getActionDataFromActionLogLine(objectMapper, t); // String key = (actionData.userid == 0) ? "__no_userid__" : actionData.client; String key = actionData.client; return new Tuple2<String, ActionData>(key, actionData); } }).persist(StorageLevel.MEMORY_AND_DISK()); List<String> clientList = pairs.keys().distinct().collect(); Queue<ClientDetail> clientDetailQueue = new PriorityQueue<ClientDetail>(30, new Comparator<ClientDetail>() { @Override public int compare(ClientDetail o1, ClientDetail o2) { if (o1.itemCount > o2.itemCount) { return -1; } else if (o1.itemCount < o2.itemCount) { return 1; } return 0; } }); Queue<ClientDetail> clientDetailZeroQueue = new PriorityQueue<ClientDetail>(30, new Comparator<ClientDetail>() { @Override public int compare(ClientDetail o1, ClientDetail o2) { if (o1.itemCount > o2.itemCount) { return -1; } else if (o1.itemCount < o2.itemCount) { return 1; } return 0; } }); System.out.println("Client list " + clientList.toString()); for (String client : clientList) { if (client != null) { System.out.println("looking at client " + client); final String currentClient = client; JavaPairRDD<String, ActionData> filtered_by_client = pairs .filter(new Function<Tuple2<String, ActionData>, Boolean>() { @Override public Boolean call(Tuple2<String, ActionData> v1) throws Exception { if (currentClient.equalsIgnoreCase(v1._1)) { return Boolean.TRUE; } else { return Boolean.FALSE; } } }); JavaPairRDD<String, ActionData> nonZeroUserIds = filtered_by_client .filter(new Function<Tuple2<String, ActionData>, Boolean>() { @Override public Boolean call(Tuple2<String, ActionData> v1) throws Exception { if (v1._2.userid == 0) { return Boolean.FALSE; } else { return Boolean.TRUE; } } }); JavaPairRDD<String, Integer> userIdLookupRDD = nonZeroUserIds .mapToPair(new PairFunction<Tuple2<String, ActionData>, String, Integer>() { @Override public Tuple2<String, Integer> call(Tuple2<String, ActionData> t) throws Exception { String key = currentClient + "_" + t._2.client_userid; return new Tuple2<String, Integer>(key, t._2.userid); } }); Map<String, Integer> userIdLookupMap = userIdLookupRDD.collectAsMap(); Map<String, Integer> userIdLookupMap_wrapped = new HashMap<String, Integer>(userIdLookupMap); final Broadcast<Map<String, Integer>> broadcastVar = jsc.broadcast(userIdLookupMap_wrapped); JavaRDD<String> json_only_with_zeros = filtered_by_client .map(new Function<Tuple2<String, ActionData>, String>() { @Override public String call(Tuple2<String, ActionData> v1) throws Exception { Map<String, Integer> m = broadcastVar.getValue(); ActionData actionData = v1._2; if (actionData.userid == 0) { String key = currentClient + "_" + actionData.client_userid; if (m.containsKey(key)) { actionData.userid = m.get(key); } else { return ""; } } String json = JobUtils.getJsonFromActionData(actionData); return json; } }); JavaRDD<String> json_only = json_only_with_zeros.filter(new Function<String, Boolean>() { @Override public Boolean call(String v1) throws Exception { return (v1.length() == 0) ? Boolean.FALSE : Boolean.TRUE; } }); String outputPath = getOutputPath(cmdLineArgs.output_path_dir, unixDays, client); if (cmdLineArgs.gzip_output) { json_only.saveAsTextFile(outputPath, org.apache.hadoop.io.compress.GzipCodec.class); } else { json_only.saveAsTextFile(outputPath); } long json_only_count = json_only.count(); clientDetailZeroQueue .add(new ClientDetail(currentClient, json_only_with_zeros.count() - json_only_count)); clientDetailQueue.add(new ClientDetail(currentClient, json_only_count)); } else System.out.println("Found null client!"); } System.out.println("- Client Action (Zero Userid) Count -"); while (clientDetailZeroQueue.size() != 0) { GroupActionsJob.ClientDetail clientDetail = clientDetailZeroQueue.remove(); System.out.println(String.format("%s: %d", clientDetail.client, clientDetail.itemCount)); } System.out.println("- Client Action Count -"); while (clientDetailQueue.size() != 0) { GroupActionsJob.ClientDetail clientDetail = clientDetailQueue.remove(); System.out.println(String.format("%s: %d", clientDetail.client, clientDetail.itemCount)); } jsc.stop(); System.out.println(String.format("--- finished GroupActionsJob date[%s] unixDays[%s] ---", cmdLineArgs.input_date_string, unixDays)); }
From source file:at.alladin.rmbt.statisticServer.OpenTestSearchResource.java
@Get("json") public String request(final String entity) throws JSONException { addAllowOrigin();//w ww. j a va 2s .co m //this are all allowed fields in the query //for the conversion query-fieldname to db-fieldname //please take a look at formatWhereClause(); Map<String, FieldType> allowedFields = new HashMap<>(); allowedFields.put("download_kbit", FieldType.LONG); allowedFields.put("download_kbit[]", FieldType.LONG); allowedFields.put("upload_kbit", FieldType.LONG); allowedFields.put("upload_kbit[]", FieldType.LONG); allowedFields.put("ping_ms", FieldType.DOUBLE); allowedFields.put("ping_ms[]", FieldType.DOUBLE); allowedFields.put("time", FieldType.DATE); allowedFields.put("time[]", FieldType.DATE); allowedFields.put("zip_code", FieldType.LONG); allowedFields.put("zip_code[]", FieldType.LONG); allowedFields.put("cat_technology", FieldType.STRING); allowedFields.put("cat_technology[]", FieldType.STRING); allowedFields.put("client_version", FieldType.STRING); allowedFields.put("client_version[]", FieldType.STRING); allowedFields.put("model", FieldType.STRING); allowedFields.put("model[]", FieldType.STRING); allowedFields.put("network_name", FieldType.STRING); allowedFields.put("network_name[]", FieldType.STRING); allowedFields.put("network_type", FieldType.STRING); allowedFields.put("network_type[]", FieldType.STRING); allowedFields.put("platform", FieldType.STRING); allowedFields.put("platform[]", FieldType.STRING); allowedFields.put("signal_strength", FieldType.LONG); allowedFields.put("signal_strength[]", FieldType.LONG); allowedFields.put("open_uuid", FieldType.UUID); allowedFields.put("long", FieldType.DOUBLE); allowedFields.put("long[]", FieldType.DOUBLE); allowedFields.put("lat", FieldType.DOUBLE); allowedFields.put("lat[]", FieldType.DOUBLE); allowedFields.put("mobile_provider_name", FieldType.STRING); allowedFields.put("mobile_provider_name[]", FieldType.STRING); allowedFields.put("provider_name", FieldType.STRING); allowedFields.put("provider_name[]", FieldType.STRING); allowedFields.put("sim_mcc_mnc", FieldType.STRING); allowedFields.put("sim_mcc_mnc[]", FieldType.STRING); allowedFields.put("sim_country", FieldType.STRING); allowedFields.put("sim_country[]", FieldType.STRING); allowedFields.put("asn", FieldType.LONG); allowedFields.put("asn[]", FieldType.LONG); allowedFields.put("network_country", FieldType.STRING); allowedFields.put("network_country[]", FieldType.STRING); allowedFields.put("country_geoip", FieldType.STRING); allowedFields.put("country_geoip[]", FieldType.STRING); allowedFields.put("loc_accuracy", FieldType.LONG); allowedFields.put("loc_accuracy[]", FieldType.LONG); allowedFields.put("public_ip_as_name", FieldType.STRING); allowedFields.put("timestamp", FieldType.IGNORE); //for forcing no-cache allowedFields.put("_", FieldType.IGNORE); //jQuery no-cache standard allowedFields.put("sender", FieldType.IGNORE); //allowedFields.put("ip_anonym", FieldType.STRING); //allowedFields.put("ip_anonym[]", FieldType.STRING); allowedFields.put("implausible", FieldType.BOOLEAN); allowedFields.put("sort_by", FieldType.SORTBY); allowedFields.put("sort_order", FieldType.SORTORDER); allowedFields.put("cursor", FieldType.LONG); allowedFields.put("max_results", FieldType.LONG); allowedFields.put("ip_version", FieldType.LONG); //Values for the database Queue<Map.Entry<String, FieldType>> searchValues = new LinkedList<>(); String where_query = ""; String orderClause = ""; final JSONArray invalidElements = new JSONArray(); final JSONObject response = new JSONObject(); final Form getParameters = getRequest().getResourceRef().getQueryAsForm(); String sortBy = ""; String sortOrder = ""; boolean hasRestrictedField = false; for (String attr : getParameters.getNames()) { //check if attribute is allowed if (!allowedFields.containsKey(attr)) { invalidElements.put(attr); continue; } if (hiddenFields.contains(attr)) { hasRestrictedField = true; } //check if value for the attribute is correct //first, check if the attribute is an array String[] values = getParameters.getValuesArray(attr); for (String value : values) { boolean negate = false; if (value.startsWith("!") && value.length() > 0) { negate = true; value = value.substring(1); } FieldType type = allowedFields.get(attr); //do some basic sanity checks for the given parameters switch (type) { case STRING: if (value.isEmpty()) { invalidElements.put(attr); continue; } //allow using wildcard '*' instead of sql '%' value = value.replace('*', '%'); //allow using wildcard '?' instead of sql '_' value = value.replace('?', '_'); where_query += formatWhereClause(attr, value, negate, type, searchValues); break; case DATE: String comperatorDate = "="; if (value.startsWith(">") || value.startsWith("<")) { comperatorDate = value.substring(0, 1); value = value.substring(1); } if (value.isEmpty() || !isDouble(value)) { //try parsing the date long v = parseDate(value); if (v == -1) { invalidElements.put(attr); continue; } //date can be parsed => assign new value value = Long.toString(v); } long v = Long.parseLong(value); value = Long.toString(v); where_query += formatWhereClause(attr, value, comperatorDate, negate, type, searchValues); break; case UUID: if (value.isEmpty()) { invalidElements.put(attr); continue; } value = value.substring(1); //cut prefix try { UUID.fromString(value); } catch (IllegalArgumentException e) { invalidElements.put(attr); continue; } where_query += formatWhereClause(attr, value, "=", negate, type, searchValues); break; case BOOLEAN: if (value.isEmpty() || (!value.toLowerCase().equals("false") && !value.toLowerCase().equals("true"))) { invalidElements.put(attr); continue; } where_query += formatWhereClause(attr, value, "=", negate, type, searchValues); break; case DOUBLE: case LONG: String comperator = "="; if (value.startsWith(">") || value.startsWith("<")) { comperator = value.substring(0, 1); comperator += "="; value = value.substring(1); } if (value.isEmpty() || !isDouble(value)) { invalidElements.put(attr); continue; } where_query += formatWhereClause(attr, value, comperator, negate, type, searchValues); break; case IGNORE: break; //do nothing case SORTBY: if (value.isEmpty() || !openDataFieldsSortable.contains(value)) { invalidElements.put(attr); continue; } sortBy = value; break; case SORTORDER: //only "ASC", "DESC" are allowed //and the attribute is only allowed, if sort_by is also given if (value.isEmpty() || (!value.toUpperCase().equals("ASC") && !value.toUpperCase().equals("DESC")) || !getParameters.getNames().contains("sort_by")) { invalidElements.put(attr); continue; } sortOrder = value; break; } } } orderClause = formatOrderClause(sortBy, sortOrder); //calculate offset long offset = -1; if (getParameters.getNames().contains("cursor")) { //is always a valid LONG because it is checked with all other //parameters above offset = Long.parseLong(getParameters.getFirstValue("cursor")); } //get maximal results-parameter long maxrows = DEFAULTROWS; if (getParameters.getNames().contains("max_results")) { //is always a valid LONG because it is checked with all other //parameters above maxrows = Long.parseLong(getParameters.getFirstValue("max_results")); } //if there have been errors => inform the user if (invalidElements.length() > 0) { setStatus(Status.CLIENT_ERROR_BAD_REQUEST); response.put("invalid_fields", invalidElements); return response.toString(); } //if there are too many query elements (DoS-Attack?), don't let it //get to the database if (searchValues.size() > MAXQUERYFIELDS) { setStatus(Status.CLIENT_ERROR_BAD_REQUEST); response.put("invalid_fields", "field limit exceeded"); return response.toString(); } if (hasRestrictedField) { where_query += " AND publish_public_data = TRUE"; } //differentiate between histogram and search query //not a very good way... if (getRequest().getAttributes().containsKey("histogram")) { return this.getHistogram(where_query, searchValues); } else return getSearchResult(where_query, searchValues, orderClause, offset, maxrows); }