List of usage examples for java.lang Integer decode
public static Integer decode(String nm) throws NumberFormatException
From source file:org.trustedanalytics.servicebroker.hive.plans.binding.HiveBindingClient.java
@Autowired public HiveBindingClient(ExternalConfiguration configuration) throws IOException { ImmutableMap.Builder<String, Object> credentialsBuilder = new ImmutableMap.Builder<String, Object>() .putAll(configuration.hiveConfigAsMap()); this.credentials = new Credentials(credentialsBuilder.build()); this.hiveServerHost = configuration.getHiveServerHost(); this.hiveServerPort = Integer.decode(configuration.getHiveServerPort()); this.hiveConfig = configuration.hiveConfigAsHadoopConfig(); }
From source file:com.wandisco.s3hdfs.rewrite.redirect.CopyFileRedirect.java
/** * Sends a PUT command to create the container directory inside of HDFS. * It uses the URL from the original request to do so. * * @param nameNodeHttpHost/* w w w . j av a 2 s .co m*/ * @param userName * @throws IOException * @throws ServletException */ public void sendCopy(String nameNodeHttpHost, String userName, String srcBucket, String srcObject) throws IOException, ServletException { // Set up HttpGet and get original file String uri = replaceSrcs(request.getRequestURI(), srcBucket, srcObject); String[] nnHost = nameNodeHttpHost.split(":"); GetMethod httpGet = (GetMethod) getHttpMethod(request.getScheme(), nnHost[0], Integer.decode(nnHost[1]), "OPEN", userName, uri, GET); // Try up to 5 times to get the source file httpClient.executeMethod(httpGet); LOG.debug("1st response: " + httpGet.getStatusLine().toString()); for (int i = 0; i < 5 && httpGet.getStatusCode() == 403; i++) { httpGet.releaseConnection(); httpClient.executeMethod(httpGet); LOG.debug("Next response: " + httpGet.getStatusLine().toString()); } assert httpGet.getStatusCode() == 200; assert request instanceof S3HdfsRequestWrapper; ((S3HdfsRequestWrapper) request) .setInputStream(new RequestStreamWrapper(httpGet.getResponseBodyAsStream())); }
From source file:com.callidusrobotics.object.ItemData.java
Color getForeground() {
return new TrueColor(Integer.decode(color));
}
From source file:com.mtag.traffic.util.JsonTools.java
public static Object requestPath(JSONObject root, String path) { String[] ids = path.split("\\."); Object obj = root;/* w ww . ja va2s. c o m*/ for (String id : ids) { if (id.contains(":")) { String[] idIndex = id.split("\\:"); if (obj != null && obj instanceof JSONObject && ((JSONObject) obj).containsKey(idIndex[0])) { obj = ((JSONObject) obj).get(idIndex[0]); if (obj != null && obj instanceof JSONArray) { obj = ((JSONArray) obj).get(Integer.decode(idIndex[1])); } else { return null; } } else { return null; } } else { if (obj != null && obj instanceof JSONObject && ((JSONObject) obj).containsKey(id)) { obj = ((JSONObject) obj).get(id); } else { return null; } } } return obj; }
From source file:net.zyuiop.fastsurvival.updater.Updater.java
private void checkForUpdates() { Bukkit.getLogger().info("[Updater] Checking FastSurvival updates."); String version = FastSurvival.instance.getDescription().getVersion(); String[] parts = StringUtils.split(version, "."); int major, minor, build = -1; try {//from ww w . j a v a2 s . co m major = Integer.decode(parts[0]); minor = Integer.decode(parts[1]); build = Integer.decode(parts[2]); } catch (Exception e) { Bukkit.getLogger().severe("Failed to check for FastSurvival updates : malformed version."); return; } Bukkit.getLogger() .info("Current FastSurvival version is : MAJOR " + major + " MINOR " + minor + " BUILD " + build); try { InputStream stream = versionUrl.openStream(); BufferedReader reader = new BufferedReader(new InputStreamReader(stream)); String upstreamVersion = reader.readLine(); parts = StringUtils.split(upstreamVersion, "."); int _major, _minor, _build = -1; try { _major = Integer.decode(parts[0]); _minor = Integer.decode(parts[1]); _build = Integer.decode(parts[2]); } catch (Exception e) { Bukkit.getLogger().severe("Failed to check for FastSurvival updates : malformed upstream version."); return; } Bukkit.getLogger().info( "Upstream FastSurvival version is : MAJOR " + _major + " MINOR " + _minor + " BUILD " + _build); if (_build > build) { Bukkit.getLogger().info("Update available ! Run /update to update the plugin."); this.targetVersion = upstreamVersion; updates = true; downloadUrl = new URL("http://archive.zyuiop.net/FastSurvival/fastsurvival-" + _build + ".jar"); } } catch (IOException e) { e.printStackTrace(); } }
From source file:com.milaboratory.mitcr.cli.Main.java
public static void main(String[] args) { int o = 0;//from w ww . j a va 2 s .co m BuildInformation buildInformation = BuildInformationProvider.get(); final boolean isProduction = "default".equals(buildInformation.scmBranch); // buildInformation.version != null && buildInformation.version.lastIndexOf("SNAPSHOT") < 0; orderingMap.put(PARAMETERS_SET_OPTION, o++); orderingMap.put(SPECIES_OPTION, o++); orderingMap.put(GENE_OPTION, o++); orderingMap.put(ERROR_CORECTION_LEVEL_OPTION, o++); orderingMap.put(QUALITY_THRESHOLD_OPTION, o++); orderingMap.put(AVERAGE_QUALITY_OPTION, o++); orderingMap.put(LQ_OPTION, o++); orderingMap.put(CLUSTERIZATION_OPTION, o++); orderingMap.put(INCLUDE_CYS_PHE_OPTION, o++); orderingMap.put(LIMIT_OPTION, o++); orderingMap.put(EXPORT_OPTION, o++); orderingMap.put(REPORT_OPTION, o++); orderingMap.put(REPORTING_LEVEL_OPTION, o++); orderingMap.put(PHRED33_OPTION, o++); orderingMap.put(PHRED64_OPTION, o++); orderingMap.put(THREADS_OPTION, o++); orderingMap.put(COMPRESSED_OPTION, o++); orderingMap.put(PRINT_HELP_OPTION, o++); orderingMap.put(PRINT_VERSION_OPTION, o++); orderingMap.put(PRINT_DEBUG_OPTION, o++); options.addOption(OptionBuilder.withArgName("preset name").hasArg() .withDescription("preset of pipeline parameters to use").create(PARAMETERS_SET_OPTION)); options.addOption(OptionBuilder.withArgName("species").hasArg() .withDescription("overrides species ['hs' for Homo sapiens, 'mm' for us Mus musculus] " + "(default for built-in presets is 'hs')") .create(SPECIES_OPTION)); options.addOption(OptionBuilder.withArgName("gene").hasArg() .withDescription("overrides gene: TRB or TRA (default value for built-in parameter sets is TRB)") .create(GENE_OPTION)); options.addOption(OptionBuilder.withArgName("0|1|2").hasArg() .withDescription( "overrides error correction level (0 = don't correct errors, 1 = correct sequenecing " + "errors only (see -" + QUALITY_THRESHOLD_OPTION + " and -" + LQ_OPTION + " options for details), " + "2 = also correct PCR errors (see -" + CLUSTERIZATION_OPTION + " option)") .create(ERROR_CORECTION_LEVEL_OPTION)); options.addOption(OptionBuilder.withArgName("value").hasArg().withDescription( "overrides quality threshold value for segment alignment and bad quality sequences " + "correction algorithms. 0 tells the program not to process quality information. (default is 25)") .create(QUALITY_THRESHOLD_OPTION)); if (!isProduction) options.addOption(OptionBuilder.hasArg(false) .withDescription("use this option to output average instead of " + "maximal, quality for CDR3 nucleotide sequences. (Experimental option, use with caution.)") .create(AVERAGE_QUALITY_OPTION)); options.addOption(OptionBuilder.withArgName("map | drop").hasArg() .withDescription("overrides low quality CDR3s processing strategy (drop = filter off, " + "map = map onto clonotypes created from the high quality CDR3s). This option makes no difference if " + "quality threshold (-" + QUALITY_THRESHOLD_OPTION + " option) is set to 0, or error correction " + "level (-" + ERROR_CORECTION_LEVEL_OPTION + ") is 0.") .create(LQ_OPTION)); options.addOption(OptionBuilder.withArgName("smd | ete").hasArg() .withDescription("overrides the PCR error correction algorithm: smd = \"save my diversity\", " + "ete = \"eliminate these errors\". Default value for built-in parameters is ete.") .create(CLUSTERIZATION_OPTION)); options.addOption(OptionBuilder.withArgName("0|1").hasArg() .withDescription("overrides weather include bounding Cys & Phe into CDR3 sequence") .create(INCLUDE_CYS_PHE_OPTION)); options.addOption( OptionBuilder.withArgName("# of reads").hasArg() .withDescription("limits the number of input sequencing reads, use this parameter to " + "normalize several datasets or to have a glance at the data") .create(LIMIT_OPTION)); options.addOption(OptionBuilder.withArgName("new name").hasArg() .withDescription("use this option to export presets to a local xml files").create(EXPORT_OPTION)); options.addOption(OptionBuilder.withArgName("file name").hasArg() .withDescription("use this option to write analysis report (summary) to file") .create(REPORT_OPTION)); options.addOption(OptionBuilder.withArgName("1|2|3").hasArg(true) .withDescription("output detalization level (1 = simple, 2 = medium, 3 = full, this format " + "could be deserialized using mitcr API). Affects only tab-delimited output. Default value is 3.") .create(REPORTING_LEVEL_OPTION)); options.addOption(OptionBuilder.hasArg(false).withDescription( "add this option if input file is in old illumina format with 64 byte offset for quality " + "string (MiTCR will try to automatically detect file format if one of the \"-phredXX\" options is not provided)") .create(PHRED64_OPTION)); options.addOption(OptionBuilder.hasArg(false) .withDescription("add this option if input file is in Phred+33 format for quality values " + "(MiTCR will try to automatically detect file format if one of the \"-phredXX\" options is not provided)") .create(PHRED33_OPTION)); options.addOption(OptionBuilder.withArgName("threads").hasArg() .withDescription( "specifies the number of CDR3 extraction threads (default = number of available CPU cores)") .create(THREADS_OPTION)); if (!isProduction) options.addOption(OptionBuilder.hasArg(false) .withDescription("use compressed data structures for storing individual " + "clone segments statistics (from which arises the clone segment information). This option reduces required " + "amount of memory, but introduces small stochastic errors into the algorithm which determines clone " + "segments. (Experimental option, use with caution.)") .create(COMPRESSED_OPTION)); options.addOption( OptionBuilder.hasArg(false).withDescription("print this message").create(PRINT_HELP_OPTION)); options.addOption(OptionBuilder.hasArg(false).withDescription("print version information") .create(PRINT_VERSION_OPTION)); options.addOption(OptionBuilder.hasArg(false) .withDescription("print additional information about analysis process").create(PRINT_DEBUG_OPTION)); PosixParser parser = new PosixParser(); try { long input_limit = -1; int threads = Runtime.getRuntime().availableProcessors(); int reporting_level = 3; int ec_level = 2; CommandLine cl = parser.parse(options, args, true); if (cl.hasOption(PRINT_HELP_OPTION)) { printHelp(); return; } boolean averageQuality = cl.hasOption(AVERAGE_QUALITY_OPTION), compressedAggregators = cl.hasOption(COMPRESSED_OPTION); if (cl.hasOption(PRINT_VERSION_OPTION)) { System.out.println("MiTCR by MiLaboratory, version: " + buildInformation.version); System.out.println("Branch: " + buildInformation.scmBranch); System.out.println("Built: " + buildInformation.buildDate + ", " + buildInformation.jdk + " JDK, " + "build machine: " + buildInformation.builtBy); System.out.println("SCM changeset: " + buildInformation.scmChangeset + " (" + buildInformation.scmDate.replace("\"", "") + ")"); return; } //Normal execution String paramName = cl.getOptionValue(PARAMETERS_SET_OPTION); if (paramName == null) { err.println("No parameters set is specified."); return; } Parameters params = ParametersIO.getParameters(paramName); if (params == null) { err.println("No parameters set found with name '" + paramName + "'."); return; } String value; if ((value = cl.getOptionValue(THREADS_OPTION)) != null) threads = Integer.decode(value); if ((value = cl.getOptionValue(REPORTING_LEVEL_OPTION)) != null) reporting_level = Integer.decode(value); if ((value = cl.getOptionValue(LIMIT_OPTION)) != null) input_limit = Long.decode(value); if ((value = cl.getOptionValue(GENE_OPTION)) != null) params.setGene(Gene.fromXML(value)); if ((value = cl.getOptionValue(SPECIES_OPTION)) != null) params.setSpecies(Species.getFromShortName(value)); if ((value = cl.getOptionValue(INCLUDE_CYS_PHE_OPTION)) != null) { if (value.equals("1")) params.getCDR3ExtractorParameters().setIncludeCysPhe(true); else if (value.equals("0")) params.getCDR3ExtractorParameters().setIncludeCysPhe(false); else { err.println("Illegal value for -" + INCLUDE_CYS_PHE_OPTION + " parameter."); return; } } if ((value = cl.getOptionValue(ERROR_CORECTION_LEVEL_OPTION)) != null) { int v = Integer.decode(value); ec_level = v; if (v == 0) { params.setCloneGeneratorParameters(new BasicCloneGeneratorParameters()); params.setClusterizationType(CloneClusterizationType.None); } else if (v == 1) { params.setCloneGeneratorParameters(new LQMappingCloneGeneratorParameters()); params.setClusterizationType(CloneClusterizationType.None); } else if (v == 2) { params.setCloneGeneratorParameters(new LQMappingCloneGeneratorParameters()); params.setClusterizationType(CloneClusterizationType.OneMismatch, .1f); } else throw new RuntimeException("This (" + v + ") error correction level is not supported."); } if ((value = cl.getOptionValue(QUALITY_THRESHOLD_OPTION)) != null) { int v = Integer.decode(value); if (v == 0) params.setQualityInterpretationStrategy(new DummyQualityInterpretationStrategy()); else params.setQualityInterpretationStrategy(new IlluminaQualityInterpretationStrategy((byte) v)); } if ((value = cl.getOptionValue(LQ_OPTION)) != null) if (ec_level > 0) switch (value) { case "map": params.setCloneGeneratorParameters(new LQMappingCloneGeneratorParameters( ((BasicCloneGeneratorParameters) params.getCloneGeneratorParameters()) .getSegmentInformationAggregationFactor(), 3, true)); break; case "drop": params.setCloneGeneratorParameters(new LQFilteringOffCloneGeneratorParameters( ((BasicCloneGeneratorParameters) params.getCloneGeneratorParameters()) .getSegmentInformationAggregationFactor())); break; default: throw new RuntimeException("Wrong value for -" + LQ_OPTION + " option."); } if ((value = cl.getOptionValue(CLUSTERIZATION_OPTION)) != null) if (ec_level > 1) // == 2 switch (value) { case "smd": params.setClusterizationType(CloneClusterizationType.V2D1J2T3Explicit); break; case "ete": params.setClusterizationType(CloneClusterizationType.OneMismatch); break; default: throw new RuntimeException("Wrong value for -" + CLUSTERIZATION_OPTION + " option."); } ((BasicCloneGeneratorParameters) params.getCloneGeneratorParameters()) .setAccumulatorType(AccumulatorType.get(compressedAggregators, averageQuality)); if ((value = cl.getOptionValue(EXPORT_OPTION)) != null) { //Exporting parameters ParametersIO.exportParameters(params, value); return; } String[] offArgs = cl.getArgs(); if (offArgs.length == 0) { err.println("Input file not specified."); return; } else if (offArgs.length == 1) { err.println("Output file not specified."); return; } else if (offArgs.length > 2) { err.println("Unrecognized argument."); return; } String inputFileName = offArgs[0]; String outputFileName = offArgs[1]; File input = new File(inputFileName); if (!input.exists()) { err.println("Input file not found."); return; } //TODO This also done inside SFastqReader constructor CompressionType compressionType = CompressionType.None; if (inputFileName.endsWith(".gz")) compressionType = CompressionType.GZIP; QualityFormat format = null; // If variable remains null file format will be detected automatically if (cl.hasOption(PHRED33_OPTION)) format = QualityFormat.Phred33; if (cl.hasOption(PHRED64_OPTION)) if (format == null) format = QualityFormat.Phred64; else { err.println( "Options: -" + PHRED33_OPTION + " and -" + PHRED64_OPTION + " are mutually exclusive"); return; } SFastqReader reads = format == null ? new SFastqReader(input, compressionType) : new SFastqReader(input, format, compressionType); OutputPort<SSequencingRead> inputToPipeline = reads; if (input_limit >= 0) inputToPipeline = new CountLimitingOutputPort<>(inputToPipeline, input_limit); SegmentLibrary library = DefaultSegmentLibrary.load(); AnalysisStatisticsAggregator statisticsAggregator = new AnalysisStatisticsAggregator(); FullPipeline pipeline = new FullPipeline(inputToPipeline, params, false, library); pipeline.setThreads(threads); pipeline.setAnalysisListener(statisticsAggregator); new Thread(new SmartProgressReporter(pipeline, err)).start(); // Printing status to the standard error stream pipeline.run(); if (cl.hasOption(PRINT_DEBUG_OPTION)) { err.println("Memory = " + (Runtime.getRuntime().totalMemory() - Runtime.getRuntime().freeMemory())); err.println("Clusterization: " + pipeline.getQC().getReadsClusterized() + "% of reads, " + pipeline.getQC().getClonesClusterized() + " % clones"); } CloneSetClustered cloneSet = pipeline.getResult(); if ((value = cl.getOptionValue(REPORT_OPTION)) != null) { File file = new File(value); TablePrintStreamAdapter table; if (file.exists()) table = new TablePrintStreamAdapter(new FileOutputStream(file, true)); else { table = new TablePrintStreamAdapter(file); ReportExporter.printHeader(table); } //CloneSetQualityControl qc = new CloneSetQualityControl(library, params.getSpecies(), params.getGene(), cloneSet); ReportExporter.printRow(table, inputFileName, outputFileName, pipeline.getQC(), statisticsAggregator); table.close(); } if (outputFileName.endsWith(".cls")) ClsExporter.export(pipeline, outputFileName.replace(".cls", "") + " " + new Date().toString(), input.getName(), outputFileName); else { //Dry run if (outputFileName.startsWith("-")) return; ExportDetalizationLevel detalization = ExportDetalizationLevel.fromLevel(reporting_level); CompressionType compressionType1 = CompressionType.None; if (outputFileName.endsWith(".gz")) compressionType1 = CompressionType.GZIP; CloneSetIO.exportCloneSet(outputFileName, cloneSet, detalization, params, input.getAbsolutePath(), compressionType1); } } catch (ParseException | RuntimeException | IOException e) { err.println("Error occurred in the analysis pipeline."); err.println(); e.printStackTrace(); //printHelp(); } }
From source file:com.l2jfree.gameserver.model.skills.conditions.ConditionParser.java
public final Condition parseConditionWithMessage(Node n, Object template) { Condition cond = parseExistingCondition(n.getFirstChild(), template); Node msg = n.getAttributes().getNamedItem("msg"); if (msg != null) cond.setMessage(msg.getNodeValue()); Node msgId = n.getAttributes().getNamedItem("msgId"); if (msgId != null) cond.setMessageId(Integer.decode(msgId.getNodeValue())); return cond;/*from w ww . ja v a 2 s.co m*/ }
From source file:org.ohmage.prompt.multichoice.MultiChoicePrompt.java
@Override protected Object getTypeSpecificResponseObject() { JSONArray jsonArray = new JSONArray(); for (int index : mSelectedIndexes) { if (index >= 0 && index < mChoices.size()) jsonArray.put(Integer.decode(mChoices.get(index).key)); }//from w w w . j a va2s. c o m return jsonArray; }
From source file:com.comcast.oscar.dictionary.DictionaryTLV.java
/** * /*from ww w .j a va 2 s . c o m*/ * @param sTlvDotNotation Example 24.1.3 * @param dsq DictionarySQLQueries * @return ArrayDeque<String> of TLV Names found in Dictionary */ public static ArrayDeque<String> getTypeHierarchyStack(String sTlvDotNotation, DictionarySQLQueries dsq) { boolean localDebug = Boolean.FALSE; ArrayDeque<String> adTypeHierarchyStack = new ArrayDeque<String>(); List<String> lsTlvDotNotation = new ArrayList<String>(); lsTlvDotNotation = Arrays.asList(sTlvDotNotation.split("\\.")); if (debug | localDebug) System.out.println("ConfigrationFileExport.getTlvDefintion(): " + lsTlvDotNotation.toString()); //Get TLV Dictionary for the Top Level JSONObject joTlvDictionary = dsq.getTlvDefinition(Integer.decode(lsTlvDotNotation.get(0))); //Search for TLV Definition if (lsTlvDotNotation.size() == 1) { try { adTypeHierarchyStack.addFirst(joTlvDictionary.getString(Dictionary.TLV_NAME)); } catch (JSONException e) { e.printStackTrace(); } } else if (lsTlvDotNotation.size() >= 1) { try { adTypeHierarchyStack.addFirst(joTlvDictionary.getString(Dictionary.TLV_NAME)); } catch (JSONException e) { e.printStackTrace(); } int iRecursiveSearch = 0; while (iRecursiveSearch < lsTlvDotNotation.size()) { if (debug | localDebug) System.out.println("ConfigrationFileExport.getTlvDefintion(): WHILE-LOOP"); try { if (joTlvDictionary.getString(Dictionary.TYPE).equals(lsTlvDotNotation.get(iRecursiveSearch))) { if (joTlvDictionary.getBoolean(Dictionary.ARE_SUBTYPES)) { try { JSONArray jaTlvDictionary = joTlvDictionary.getJSONArray(Dictionary.SUBTYPE_ARRAY); for (int iIndex = 0; iIndex < jaTlvDictionary.length(); iIndex++) { if (debug | localDebug) System.out.println("ConfigrationFileExport.getTlvDefintion(): FOR-LOOP"); JSONObject joTlvDictionaryTemp = jaTlvDictionary.getJSONObject(iIndex); if (joTlvDictionaryTemp.getString(Dictionary.TYPE) .equals(lsTlvDotNotation.get(iRecursiveSearch + 1))) { joTlvDictionary = joTlvDictionaryTemp; iRecursiveSearch++; try { adTypeHierarchyStack .addFirst(joTlvDictionary.getString(Dictionary.TLV_NAME)); } catch (JSONException e) { e.printStackTrace(); } break; } } } catch (JSONException e) { e.printStackTrace(); } } else { iRecursiveSearch++; } } } catch (JSONException e1) { e1.printStackTrace(); } } } return adTypeHierarchyStack; }
From source file:admincommands.Raw.java
@Override public void execute(Player admin, String... params) { if (params.length != 1) { PacketSendUtility.sendMessage(admin, "Usage: //raw [name]"); return;/* ww w. ja va 2 s . c o m*/ } File file = new File(ROOT, params[0] + ".txt"); if (!file.exists() || !file.canRead()) { PacketSendUtility.sendMessage(admin, "Wrong file selected."); return; } try { List<String> lines = FileUtils.readLines(file); SM_CUSTOM_PACKET packet = null; PacketSendUtility.sendMessage(admin, "lines " + lines.size()); boolean init = false; for (int r = 0; r < lines.size(); r++) { String row = lines.get(r); String[] tokens = row.substring(0, 48).trim().split(" "); int len = tokens.length; for (int i = 0; i < len; i++) { if (!init) { if (i == 1) { packet = new SM_CUSTOM_PACKET(Integer.decode("0x" + tokens[i] + tokens[i - 1])); init = true; } } else if (r > 0 || i > 4) { packet.addElement(PacketElementType.C, "0x" + tokens[i]); } } } if (packet != null) { PacketSendUtility.sendMessage(admin, "Packet send.."); PacketSendUtility.sendPacket(admin, packet); } } catch (Exception e) { PacketSendUtility.sendMessage(admin, "An error has occurred."); logger.warn("IO Error.", e); } }