List of usage examples for java.math BigInteger ZERO
BigInteger ZERO
To view the source code for java.math BigInteger ZERO.
Click Source Link
From source file:libra.preprocess.common.kmerhistogram.KmerRangePartitioner.java
public KmerRangePartition[] getEqualAreaPartitions() { KmerRangePartition[] partitions = new KmerRangePartition[this.numPartitions]; // calc 4^kmerSize BigInteger kmerend = BigInteger.valueOf(4).pow(this.kmerSize); BigDecimal bdkmerend = new BigDecimal(kmerend); // moves between x (0~1) y (0~1) // sum of area (0.5) double kmerArea = 0.5; double sliceArea = kmerArea / this.numPartitions; // we think triangle is horizontally flipped so calc get easier. double x1 = 0; List<BigInteger> widths = new ArrayList<BigInteger>(); BigInteger widthSum = BigInteger.ZERO; for (int i = 0; i < this.numPartitions; i++) { // x2*x2 = 2*sliceArea + x1*x1 double temp = (2 * sliceArea) + (x1 * x1); double x2 = Math.sqrt(temp); BigDecimal bdx1 = BigDecimal.valueOf(x1); BigDecimal bdx2 = BigDecimal.valueOf(x2); // if i increases, bdw will be decreased BigDecimal bdw = bdx2.subtract(bdx1); BigInteger bw = bdw.multiply(bdkmerend).toBigInteger(); if (bw.compareTo(BigInteger.ZERO) <= 0) { bw = BigInteger.ONE;/*from www. ja va 2 s . c o m*/ } if (widthSum.add(bw).compareTo(kmerend) > 0) { bw = kmerend.subtract(widthSum); } if (i == this.numPartitions - 1) { // last case if (widthSum.add(bw).compareTo(kmerend) < 0) { bw = kmerend.subtract(widthSum); } } // save it widths.add(bw); widthSum = widthSum.add(bw); x1 = x2; } BigInteger cur_begin = BigInteger.ZERO; for (int i = 0; i < this.numPartitions; i++) { BigInteger slice_width = widths.get(this.numPartitions - 1 - i); BigInteger slice_begin = cur_begin; if (slice_begin.add(slice_width).compareTo(kmerend) > 0) { slice_width = kmerend.subtract(slice_begin); } BigInteger slice_end = cur_begin.add(slice_width).subtract(BigInteger.ONE); KmerRangePartition slice = new KmerRangePartition(this.kmerSize, this.numPartitions, i, slice_width, slice_begin, slice_end); partitions[i] = slice; cur_begin = cur_begin.add(slice_width); } return partitions; }
From source file:com.stratio.ingestion.sink.cassandra.EventParserTest.java
@Test public void shouldParsePrimitiveTypes() throws Exception { Object integer = EventParser.parseValue("1", DataType.Name.INT); assertThat(integer).isInstanceOf(Integer.class).isEqualTo(1); integer = EventParser.parseValue(Integer.toString(Integer.MAX_VALUE), DataType.Name.INT); assertThat(integer).isInstanceOf(Integer.class).isEqualTo(Integer.MAX_VALUE); integer = EventParser.parseValue(Integer.toString(Integer.MIN_VALUE), DataType.Name.INT); assertThat(integer).isInstanceOf(Integer.class).isEqualTo(Integer.MIN_VALUE); integer = EventParser.parseValue(" 1 2 ", DataType.Name.INT); assertThat(integer).isInstanceOf(Integer.class).isEqualTo(12); Object counter = EventParser.parseValue("1", DataType.Name.COUNTER); assertThat(counter).isEqualTo(1L);//w ww . j a va2 s.com counter = EventParser.parseValue(Long.toString(Long.MAX_VALUE), DataType.Name.COUNTER); assertThat(counter).isEqualTo(Long.MAX_VALUE); counter = EventParser.parseValue(Long.toString(Long.MIN_VALUE), DataType.Name.COUNTER); assertThat(counter).isEqualTo(Long.MIN_VALUE); counter = EventParser.parseValue(" 1 2 ", DataType.Name.COUNTER); assertThat(counter).isEqualTo(12L); Object _float = EventParser.parseValue("1", DataType.Name.FLOAT); assertThat(_float).isInstanceOf(Float.class).isEqualTo(1f); _float = EventParser.parseValue("1.0", DataType.Name.FLOAT); assertThat(_float).isInstanceOf(Float.class).isEqualTo(1f); _float = EventParser.parseValue(Float.toString(Float.MAX_VALUE), DataType.Name.FLOAT); assertThat(_float).isInstanceOf(Float.class).isEqualTo(Float.MAX_VALUE); _float = EventParser.parseValue(Float.toString(Float.MIN_VALUE), DataType.Name.FLOAT); assertThat(_float).isInstanceOf(Float.class).isEqualTo(Float.MIN_VALUE); _float = EventParser.parseValue(" 1 . 0 ", DataType.Name.FLOAT); assertThat(_float).isInstanceOf(Float.class).isEqualTo(1f); Object _double = EventParser.parseValue("1", DataType.Name.DOUBLE); assertThat(_double).isInstanceOf(Double.class).isEqualTo(1.0); _double = EventParser.parseValue("0", DataType.Name.DOUBLE); assertThat(_double).isInstanceOf(Double.class).isEqualTo(0.0); _double = EventParser.parseValue(Double.toString(Double.MAX_VALUE), DataType.Name.DOUBLE); assertThat(_double).isInstanceOf(Double.class).isEqualTo(Double.MAX_VALUE); _double = EventParser.parseValue(Double.toString(Double.MIN_VALUE), DataType.Name.DOUBLE); assertThat(_double).isInstanceOf(Double.class).isEqualTo(Double.MIN_VALUE); _double = EventParser.parseValue(" 1 . 0 ", DataType.Name.DOUBLE); assertThat(_double).isInstanceOf(Double.class).isEqualTo(1.0); for (DataType.Name type : Arrays.asList(DataType.Name.BIGINT)) { Object bigInteger = EventParser.parseValue("1", type); assertThat(bigInteger).isInstanceOf(Long.class).isEqualTo(1L); bigInteger = EventParser.parseValue("0", type); assertThat(bigInteger).isInstanceOf(Long.class).isEqualTo(0L); bigInteger = EventParser.parseValue(Long.toString(Long.MAX_VALUE), type); assertThat(bigInteger).isInstanceOf(Long.class).isEqualTo(Long.MAX_VALUE); bigInteger = EventParser.parseValue(Long.toString(Long.MIN_VALUE), type); assertThat(bigInteger).isInstanceOf(Long.class).isEqualTo(Long.MIN_VALUE); } for (DataType.Name type : Arrays.asList(DataType.Name.VARINT)) { Object bigInteger = EventParser.parseValue("1", type); assertThat(bigInteger).isInstanceOf(BigInteger.class).isEqualTo(BigInteger.ONE); bigInteger = EventParser.parseValue("0", type); assertThat(bigInteger).isInstanceOf(BigInteger.class).isEqualTo(BigInteger.ZERO); bigInteger = EventParser.parseValue( BigInteger.valueOf(Long.MAX_VALUE).multiply(BigInteger.valueOf(2L)).toString(), type); assertThat(bigInteger).isInstanceOf(BigInteger.class) .isEqualTo(BigInteger.valueOf(Long.MAX_VALUE).multiply(BigInteger.valueOf(2L))); bigInteger = EventParser.parseValue( BigInteger.valueOf(Long.MIN_VALUE).multiply(BigInteger.valueOf(2L)).toString(), type); assertThat(bigInteger).isInstanceOf(BigInteger.class) .isEqualTo(BigInteger.valueOf(Long.MIN_VALUE).multiply(BigInteger.valueOf(2L))); } Object bigDecimal = EventParser.parseValue("1", DataType.Name.DECIMAL); assertThat(bigDecimal).isInstanceOf(BigDecimal.class).isEqualTo(BigDecimal.valueOf(1)); bigDecimal = EventParser.parseValue("0", DataType.Name.DECIMAL); assertThat(bigDecimal).isInstanceOf(BigDecimal.class).isEqualTo(BigDecimal.valueOf(0)); bigDecimal = EventParser.parseValue( BigDecimal.valueOf(Double.MAX_VALUE).multiply(BigDecimal.valueOf(2)).toString(), DataType.Name.DECIMAL); assertThat(bigDecimal).isInstanceOf(BigDecimal.class) .isEqualTo(BigDecimal.valueOf(Double.MAX_VALUE).multiply(BigDecimal.valueOf(2))); bigDecimal = EventParser.parseValue( BigDecimal.valueOf(Double.MIN_VALUE).multiply(BigDecimal.valueOf(2)).toString(), DataType.Name.DECIMAL); assertThat(bigDecimal).isInstanceOf(BigDecimal.class) .isEqualTo(BigDecimal.valueOf(Double.MIN_VALUE).multiply(BigDecimal.valueOf(2))); bigDecimal = EventParser.parseValue(" 1 2 ", DataType.Name.DECIMAL); assertThat(bigDecimal).isInstanceOf(BigDecimal.class).isEqualTo(BigDecimal.valueOf(12)); Object string = EventParser.parseValue("string", DataType.Name.TEXT); assertThat(string).isInstanceOf(String.class).isEqualTo("string"); Object bool = EventParser.parseValue("true", DataType.Name.BOOLEAN); assertThat(bool).isInstanceOf(Boolean.class).isEqualTo(true); Object addr = EventParser.parseValue("192.168.1.1", DataType.Name.INET); assertThat(addr).isInstanceOf(InetAddress.class).isEqualTo(InetAddress.getByName("192.168.1.1")); UUID randomUUID = UUID.randomUUID(); Object uuid = EventParser.parseValue(randomUUID.toString(), DataType.Name.UUID); assertThat(uuid).isInstanceOf(UUID.class).isEqualTo(randomUUID); }
From source file:org.jembi.rhea.transformers.XDSRepositoryRetrieveDocumentSetResponse.java
protected String generateATNAMessage(String request, String patientId, boolean outcome, List<DocumentInfo> documentsInfo) throws JAXBException { AuditMessage res = new AuditMessage(); EventIdentificationType eid = new EventIdentificationType(); eid.setEventID(ATNAUtil.buildCodedValueType("DCM", "110107", "Import")); eid.setEventActionCode("C"); eid.setEventDateTime(ATNAUtil.newXMLGregorianCalendar()); eid.getEventTypeCode()// w ww. j a v a2 s. co m .add(ATNAUtil.buildCodedValueType("IHE Transactions", "ITI-43", "Retrieve Document Set")); eid.setEventOutcomeIndicator(outcome ? BigInteger.ZERO : new BigInteger("4")); res.setEventIdentification(eid); res.getActiveParticipant().add(ATNAUtil.buildActiveParticipant(buildRepositoryPath(), xdsRepositoryHost, false, xdsRepositoryHost, (short) 1, "DCM", "110153", "Source")); res.getActiveParticipant().add(ATNAUtil.buildActiveParticipant(ATNAUtil.WSA_REPLYTO_ANON, ATNAUtil.getProcessID(), true, ATNAUtil.getHostIP(), (short) 2, "DCM", "110152", "Destination")); res.getAuditSourceIdentification().add(ATNAUtil.buildAuditSource("openhie-repository")); res.getParticipantObjectIdentification() .add(ATNAUtil.buildParticipantObjectIdentificationType( String.format("%s^^^&%s&ISO", patientId, requestedAssigningAuthority), (short) 1, (short) 1, "RFC-3881", "2", "PatientNumber", null)); for (DocumentInfo docInfo : documentsInfo) { List<ParticipantObjectDetail> pod = new ArrayList<ParticipantObjectDetail>(); if (docInfo._reposUniqueId != null) pod.add(new ParticipantObjectDetail("Repository Unique Id", docInfo._reposUniqueId.getBytes())); if (docInfo._homeCommunityId != null) pod.add(new ParticipantObjectDetail("ihe:homeCommunityID", docInfo._homeCommunityId.getBytes())); res.getParticipantObjectIdentification().add(ATNAUtil.buildParticipantObjectIdentificationType( docInfo._docUniqueId, (short) 2, (short) 3, "RFC-3881", "9", "Report Number", request, pod)); } return ATNAUtil.marshallATNAObject(res); }
From source file:uk.co.petertribble.jangle.SnmpChart.java
private void initialize(List<String> oids, List<String> alloids) { allnames = alloids;/* w w w .j ava2s .c o m*/ tsmap = new HashMap<String, TimeSeries>(); valueMap = new HashMap<String, BigInteger>(); dataset = new TimeSeriesCollection(); lastsnap = 0; for (String oid : oids) { TimeSeries ts = new TimeSeries(smm.prettifyOID(oid)); ts.setMaximumItemAge(maxage); dataset.addSeries(ts); tsmap.put(oid, ts); valueMap.put(oid, BigInteger.ZERO); } updateAccessory(); String ylabel = showdelta ? SnmpResources.getString("CHART.RATE") : SnmpResources.getString("CHART.VALUE"); chart = ChartFactory.createTimeSeriesChart(charttitle, SnmpResources.getString("CHART.TIME"), ylabel, dataset, true, true, false); setAxes(); startLoop(); }
From source file:piuk.blockchain.android.ui.dialogs.TransactionSummaryDialog.java
@Override public Dialog onCreateDialog(final Bundle savedInstanceState) { super.onCreateDialog(savedInstanceState); final FragmentActivity activity = getActivity(); final LayoutInflater inflater = LayoutInflater.from(activity); final Builder dialog = new AlertDialog.Builder(new ContextThemeWrapper(activity, R.style.Theme_Dialog)) .setTitle(R.string.transaction_summary_title); final LinearLayout view = (LinearLayout) inflater.inflate(R.layout.transaction_summary_fragment, null); dialog.setView(view);/*from w w w. java2 s . c o m*/ try { final MyRemoteWallet wallet = application.getRemoteWallet(); BigInteger totalOutputValue = BigInteger.ZERO; for (TransactionOutput output : tx.getOutputs()) { totalOutputValue = totalOutputValue.add(output.getValue()); } final TextView resultDescriptionView = (TextView) view.findViewById(R.id.result_description); final TextView toView = (TextView) view.findViewById(R.id.transaction_to); final TextView toViewLabel = (TextView) view.findViewById(R.id.transaction_to_label); final View toViewContainer = (View) view.findViewById(R.id.transaction_to_container); final TextView hashView = (TextView) view.findViewById(R.id.transaction_hash); final TextView transactionTimeView = (TextView) view.findViewById(R.id.transaction_date); final TextView confirmationsView = (TextView) view.findViewById(R.id.transaction_confirmations); final TextView noteView = (TextView) view.findViewById(R.id.transaction_note); final Button addNoteButton = (Button) view.findViewById(R.id.add_note_button); final TextView feeView = (TextView) view.findViewById(R.id.transaction_fee); final View feeViewContainer = view.findViewById(R.id.transaction_fee_container); final TextView valueNowView = (TextView) view.findViewById(R.id.transaction_value); final View valueNowContainerView = view.findViewById(R.id.transaction_value_container); String to = null; for (TransactionOutput output : tx.getOutputs()) { try { String toAddress = output.getScriptPubKey().getToAddress().toString(); if (!wallet.isAddressMine(toAddress)) { to = toAddress; } } catch (Exception e) { e.printStackTrace(); } } String from = null; for (TransactionInput input : tx.getInputs()) { try { String fromAddress = input.getFromAddress().toString(); if (!wallet.isAddressMine(fromAddress)) { from = fromAddress; } } catch (Exception e) { e.printStackTrace(); } } long realResult = 0; int confirmations = 0; if (tx instanceof MyTransaction) { MyTransaction myTx = (MyTransaction) tx; realResult = myTx.getResult().longValue(); if (wallet.getLatestBlock() != null) { confirmations = wallet.getLatestBlock().getHeight() - myTx.getHeight() + 1; } } else if (application.isInP2PFallbackMode()) { realResult = tx.getValue(application.bitcoinjWallet).longValue(); if (tx.getConfidence().getConfidenceType() == ConfidenceType.BUILDING) confirmations = tx.getConfidence().getDepthInBlocks(); } final long finalResult = realResult; if (realResult <= 0) { toViewLabel.setText(R.string.transaction_fragment_to); if (to == null) { ((LinearLayout) toViewContainer.getParent()).removeView(toViewContainer); } else { toView.setText(to); } } else { toViewLabel.setText(R.string.transaction_fragment_from); if (from == null) { ((LinearLayout) toViewContainer.getParent()).removeView(toViewContainer); } else { toView.setText(from); } } //confirmations view if (confirmations > 0) { confirmationsView.setText("" + confirmations); } else { confirmationsView.setText("Unconfirmed"); } //Hash String view final String hashString = new String(Hex.encode(tx.getHash().getBytes()), "UTF-8"); hashView.setOnClickListener(new OnClickListener() { @Override public void onClick(View v) { Intent browserIntent = new Intent(Intent.ACTION_VIEW, Uri.parse("https:/" + Constants.BLOCKCHAIN_DOMAIN + "/tx/" + hashString)); startActivity(browserIntent); } }); //Notes View String note = wallet.getTxNotes().get(hashString); if (note == null) { addNoteButton.setOnClickListener(new OnClickListener() { @Override public void onClick(View v) { dismiss(); AddNoteDialog.showDialog(getFragmentManager(), hashString); } }); view.removeView(noteView); } else { view.removeView(addNoteButton); noteView.setText(note); noteView.setOnClickListener(new OnClickListener() { @Override public void onClick(View v) { dismiss(); AddNoteDialog.showDialog(getFragmentManager(), hashString); } }); } addNoteButton.setEnabled(!application.isInP2PFallbackMode()); SpannableString content = new SpannableString(hashString); content.setSpan(new UnderlineSpan(), 0, content.length(), 0); hashView.setText(content); if (realResult > 0 && from != null) resultDescriptionView.setText(this.getString(R.string.transaction_fragment_amount_you_received, WalletUtils.formatValue(BigInteger.valueOf(realResult)))); else if (realResult < 0 && to != null) resultDescriptionView.setText(this.getString(R.string.transaction_fragment_amount_you_sent, WalletUtils.formatValue(BigInteger.valueOf(realResult)))); else resultDescriptionView.setText(this.getString(R.string.transaction_fragment_amount_you_moved, WalletUtils.formatValue(totalOutputValue))); final Date time = tx.getUpdateTime(); transactionTimeView.setText(dateFormat.format(time)); //These will be made visible again later once information is fetched from server feeViewContainer.setVisibility(View.GONE); valueNowContainerView.setVisibility(View.GONE); if (tx instanceof MyTransaction) { MyTransaction myTx = (MyTransaction) tx; final long txIndex = myTx.getTxIndex(); final Handler handler = new Handler(); new Thread(new Runnable() { @Override public void run() { try { final JSONObject obj = getTransactionSummary(txIndex, wallet.getGUID(), finalResult); handler.post(new Runnable() { @Override public void run() { try { if (obj.get("fee") != null) { feeViewContainer.setVisibility(View.VISIBLE); feeView.setText(WalletUtils.formatValue( BigInteger.valueOf(Long.valueOf(obj.get("fee").toString()))) + " BTC"); } if (obj.get("confirmations") != null) { int confirmations = ((Number) obj.get("confirmations")).intValue(); confirmationsView.setText("" + confirmations); } String result_local = (String) obj.get("result_local"); String result_local_historical = (String) obj .get("result_local_historical"); if (result_local != null && result_local.length() > 0) { valueNowContainerView.setVisibility(View.VISIBLE); if (result_local_historical == null || result_local_historical.length() == 0 || result_local_historical.equals(result_local)) { valueNowView.setText(result_local); } else { valueNowView.setText(getString(R.string.value_now_ten, result_local, result_local_historical)); } } } catch (Exception e) { e.printStackTrace(); } } }); } catch (Exception e) { e.printStackTrace(); } } }).start(); } } catch (Exception e) { e.printStackTrace(); } Dialog d = dialog.create(); WindowManager.LayoutParams lp = new WindowManager.LayoutParams(); lp.dimAmount = 0; lp.width = WindowManager.LayoutParams.FILL_PARENT; lp.height = WindowManager.LayoutParams.WRAP_CONTENT; d.show(); d.getWindow().setAttributes(lp); d.getWindow().setBackgroundDrawable(new ColorDrawable(Color.TRANSPARENT)); return d; }
From source file:io.ecarf.core.cloud.task.processor.reason.phase0.DoReasonTask3.java
@Override public void run() throws IOException { GoogleCloudService cloud = (GoogleCloudService) this.getCloudService(); //String table = metadata.getValue(EcarfMetaData.ECARF_TABLE); //Set<String> terms = metadata.getTerms(); //String schemaFile = metadata.getValue(EcarfMetaData.ECARF_SCHEMA); //String bucket = metadata.getBucket(); Set<String> termsSet; if (terms == null) { // too large, probably saved as a file //String termsFile = metadata.getValue(EcarfMetaData.ECARF_TERMS_FILE); log.info("Using json file for terms: " + termsFile); Validate.notNull(termsFile);/*from ww w.ja v a2 s. c o m*/ String localTermsFile = Utils.TEMP_FOLDER + termsFile; cloud.downloadObjectFromCloudStorage(termsFile, localTermsFile, bucket); // convert from JSON termsSet = FileUtils.jsonFileToSet(localTermsFile); } else { termsSet = ObjectUtils.csvToSet(terms); } String localSchemaFile = Utils.TEMP_FOLDER + schemaFile; // download the file from the cloud storage cloud.downloadObjectFromCloudStorage(schemaFile, localSchemaFile, bucket); // uncompress if compressed if (GzipUtils.isCompressedFilename(schemaFile)) { localSchemaFile = GzipUtils.getUncompressedFilename(localSchemaFile); } Map<String, Set<Triple>> allSchemaTriples = TripleUtils.getRelevantSchemaNTriples(localSchemaFile, TermUtils.RDFS_TBOX); // get all the triples we care about Map<Term, Set<Triple>> schemaTerms = new HashMap<>(); for (String term : termsSet) { if (allSchemaTriples.containsKey(term)) { schemaTerms.put(new Term(term), allSchemaTriples.get(term)); } } String decoratedTable = table; int emptyRetries = 0; int totalInferredTriples = 0; int maxRetries = Config.getIntegerProperty(Constants.REASON_RETRY_KEY, 6); // timestamp loop do { //List<String> inferredFiles = new ArrayList<>(); // First of all run all the queries asynchronously and remember the jobId and filename for each term for (Entry<Term, Set<Triple>> entry : schemaTerms.entrySet()) { Term term = entry.getKey(); // add table decoration to table name String query = GenericRule.getQuery(entry.getValue(), decoratedTable); log.info("\nQuery: " + query); String jobId = cloud.startBigDataQuery(query); String encodedTerm = FileUtils.encodeFilename(term.getTerm()); String filename = Utils.TEMP_FOLDER + encodedTerm + Constants.DOT_TERMS; // remember the filename and the jobId for this query term.setFilename(filename).setJobId(jobId).setEncodedTerm(encodedTerm); } long start = System.currentTimeMillis(); String inferredTriplesFile = Utils.TEMP_FOLDER + start + Constants.DOT_INF; List<String> productiveTerms = new ArrayList<>(); int interimInferredTriples = 0; try (PrintWriter writer = new PrintWriter( new GZIPOutputStream(new FileOutputStream(inferredTriplesFile), Constants.GZIP_BUF_SIZE))) { // now loop through the queries for (Entry<Term, Set<Triple>> entry : schemaTerms.entrySet()) { Term term = entry.getKey(); log.info("Reasoning for Term: " + term); Set<Triple> schemaTriples = entry.getValue(); log.info("Schema Triples: " + Joiner.on('\n').join(schemaTriples)); List<String> select = GenericRule.getSelect(schemaTriples); // block and wait for each job to complete then save results to a file BigInteger rows = BigInteger.ZERO; try { rows = cloud.saveBigQueryResultsToFile(term.getJobId(), term.getFilename()).getTotalRows(); } catch (IOException ioe) { // transient backend errors log.warn("failed to save query results to file, jobId: " + term.getJobId()); } log.info("Query found " + rows + ", rows"); // only process if triples are found matching this term if (!BigInteger.ZERO.equals(rows)) { int inferredTriplesCount = this.inferAndSaveTriplesToFile(term, select, schemaTriples, rows, decoratedTable, writer); productiveTerms.add(term.getTerm()); interimInferredTriples += inferredTriplesCount; } } } totalInferredTriples += interimInferredTriples; if (interimInferredTriples > 0) { //TODO stream smaller numbers of inferred triples //TODO try uploading from cloud storage int streamingThreshold = Config.getIntegerProperty("ecarf.io.reasoning.streaming.threshold", 100000); log.info("Inserting " + interimInferredTriples + ", inferred triples into Big Data table for " + productiveTerms.size() + " productive terms. Filename: " + inferredTriplesFile); if (interimInferredTriples <= streamingThreshold) { // stream the data Set<Triple> inferredTriples = TripleUtils.loadCompressedCSVTriples(inferredTriplesFile, false); log.info("Total triples to stream into Big Data: " + inferredTriples.size()); cloud.streamObjectsIntoBigData(inferredTriples, TableUtils.getBigQueryTripleTable(table)); log.info("All inferred triples are streamed into Big Data table"); } else { // directly upload the data List<String> jobIds = cloud.loadLocalFilesIntoBigData(Lists.newArrayList(inferredTriplesFile), TableUtils.getBigQueryTripleTable(table), false); log.info("All inferred triples are directly loaded into Big Data table, completed jobIds: " + jobIds); } // reset empty retries emptyRetries = 0; } else { log.info("No new inferred triples"); // increment empty retries emptyRetries++; } log.info("Total inferred triples so far = " + totalInferredTriples + ", current retry count: " + emptyRetries); ApiUtils.block(Config.getIntegerProperty(Constants.REASON_SLEEP_KEY, 20)); // FIXME move into the particular cloud implementation service long elapsed = System.currentTimeMillis() - start; decoratedTable = "[" + table + "@-" + elapsed + "-]"; log.info("Using table decorator: " + decoratedTable + ". Empty retries count: " + emptyRetries); } while (!(emptyRetries == maxRetries)); // end timestamp loop log.info("Finished reasoning, total inferred triples = " + totalInferredTriples); }
From source file:org.energy_home.jemma.javagal.layers.PropertiesManager.java
/** * Initialize a local StartupAttributeInfo object with predefined convenient * fixed values./* ww w . j a v a 2s .c om*/ */ private void initDefaultStartupAttributeInfo() { sai = new StartupAttributeInfo(); sai.setShortAddress(0xFFFF); // 2 bytes sai.setDeviceType(LogicalType.ROUTER); sai.setExtendedPANId(BigInteger.ZERO); // 8 bytes // APS use extended Pan Id miss on Startup Attribute Info 8 bytes sai.setPANId(0xFFFF); // 2 bytes sai.setChannelMask((long) 0x00); // 4 bytes sai.setProtocolVersion((short) 0x02); // 1 byte sai.setStackProfile((short) 0x02); // 1 byte sai.setStartupControl((short) 0x00); // 1 byte sai.setStartupAttributeSetIndex((short) 0x00); // 1 byte sai.setTrustCenterAddress(new BigInteger("00000000000000000000000000000000", 16)); // 16bytes sai.setTrustCenterMasterKey(new byte[] { (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00 }); // 16 // bytes sai.setNetworkKey(new byte[] { (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00 }); // 16 // bytes sai.setUseInsecureJoin(true); // 1 byte sai.setPreconfiguredLinkKey(new byte[] { (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x00 }); // 16 // bytes sai.setNetworkKeySeqNum((short) 0x00); // 1 byte sai.setNetworkKeyType(KeyType.HIGH_SECURITY); // 1 byte sai.setNetworkManagerAddress(0x0000); // 2 bytes sai.setScanAttempts((short) 0x03); // 1 byte sai.setTimeBetweenScans(0x6400); // 2 bytes sai.setRejoinInterval(0x0002); // 2 bytes sai.setMaxRejoinInterval(0x0E10); // 2 bytes sai.setIndirectPollRate(0x1C84); // 2 bytes sai.setParentRetryThreshold((short) 0x02); // 1 byte sai.setConcentratorFlag(false); // 1 byte sai.setConcentratorRadius((short) 0x05); // 1 byte sai.setConcentratorDiscoveryTime((short) 0x00); // 1 byte }
From source file:com.google.uzaygezen.core.hbase.HBaseQueryTest.java
public Map<Pow2LengthBitSetRange, NodeValue<BigIntegerContent>> createRolledupCache(MockHTable table, MultiDimensionalSpec spec, SpaceFillingCurve sfc, int cacheSize) throws IOException { int[] elementLengths = Ints.toArray(new HilbertIndexMasks(sfc.getSpec()).cardinalities()); BitVector[] path = new BitVector[elementLengths.length]; for (int i = 0; i < path.length; ++i) { path[i] = BitVectorFactories.OPTIMAL.apply(elementLengths[path.length - i - 1]); }// ww w . ja v a2 s .c o m StreamingRollup<BitVector, BigIntegerContent> rollup = BoundedRollup .create(new BigIntegerContent(BigInteger.ZERO), cacheSize); Scan fullScan = new Scan(); ResultScanner scanner = table.getScanner(fullScan); BitVector hilbertIndex = BitVectorFactories.OPTIMAL.apply(spec.sumBitsPerDimension()); for (Result row : scanner) { hilbertIndex.copyFromBigEndian(row.getRow()); for (int i = 0; i < path.length; ++i) { path[i] = path[i].clone(); } BitVectorMath.split(hilbertIndex, path); // We should say the exact number of times. Saying one is correct, but // suboptimal. BigIntegerContent v = new BigIntegerContent(BigInteger.ONE); rollup.feedRow(Iterators.<BitVector>forArray(path), v); } MapNode<BitVector, BigIntegerContent> rolledupTree = rollup.finish(); Pow2LengthBitSetRangeFactory<BigIntegerContent> factory = Pow2LengthBitSetRangeFactory .create(Ints.asList(elementLengths)); Map<Pow2LengthBitSetRange, NodeValue<BigIntegerContent>> rolledupMap = factory.apply(rolledupTree); return rolledupMap; }
From source file:com.alertlogic.aws.analytics.poc.StreamUtils.java
/** * Split a shard by dividing the hash key space in half. * * @param streamName Name of the stream that contains the shard to split. * @param shardId The id of the shard to split. * * @throws IllegalArgumentException When either streamName or shardId are null or empty. * @throws LimitExceededException Shard limit for the account has been reached. * @throws ResourceNotFoundException The stream or shard cannot be found. * @throws InvalidArgumentException If the shard is closed and no eligible for splitting. * @throws AmazonClientException Error communicating with Amazon Kinesis. * *//* w ww . j a v a 2 s . co m*/ public void splitShardEvenly(String streamName, String shardId) throws LimitExceededException, ResourceNotFoundException, AmazonClientException, InvalidArgumentException, IllegalArgumentException { if (streamName == null || streamName.isEmpty()) { throw new IllegalArgumentException("stream name is required"); } if (shardId == null || shardId.isEmpty()) { throw new IllegalArgumentException("shard id is required"); } DescribeStreamResult result = kinesis.describeStream(streamName); StreamDescription description = result.getStreamDescription(); // Find the shard we want to split Shard shardToSplit = null; for (Shard shard : description.getShards()) { if (shardId.equals(shard.getShardId())) { shardToSplit = shard; break; } } if (shardToSplit == null) { throw new ResourceNotFoundException( "Could not find shard with id '" + shardId + "' in stream '" + streamName + "'"); } // Check if the shard is still open. Open shards do not have an ending sequence number. if (shardToSplit.getSequenceNumberRange().getEndingSequenceNumber() != null) { throw new InvalidArgumentException("Shard is CLOSED and is not eligible for splitting"); } // Calculate the median hash key to use as the new starting hash key for the shard. BigInteger startingHashKey = new BigInteger(shardToSplit.getHashKeyRange().getStartingHashKey()); BigInteger endingHashKey = new BigInteger(shardToSplit.getHashKeyRange().getEndingHashKey()); BigInteger[] medianHashKey = startingHashKey.add(endingHashKey).divideAndRemainder(new BigInteger("2")); BigInteger newStartingHashKey = medianHashKey[0]; if (!BigInteger.ZERO.equals(medianHashKey[1])) { // In order to more evenly distributed the new hash key ranges across the new shards we will "round up" to // the next integer when our current hash key range is not evenly divisible by 2. newStartingHashKey = newStartingHashKey.add(BigInteger.ONE); } // Submit the split shard request kinesis.splitShard(streamName, shardId, newStartingHashKey.toString()); }
From source file:io.ecarf.core.cloud.task.processor.reason.phase0.DoReasonTask4.java
@Override public void run() throws IOException { GoogleCloudService cloud = (GoogleCloudService) this.getCloudService(); //String table = metadata.getValue(EcarfMetaData.ECARF_TABLE); //Set<String> terms = metadata.getTerms(); //String schemaFile = metadata.getValue(EcarfMetaData.ECARF_SCHEMA); //String bucket = metadata.getBucket(); Set<String> termsSet; if (terms == null) { // too large, probably saved as a file //String termsFile = metadata.getValue(EcarfMetaData.ECARF_TERMS_FILE); log.info("Using json file for terms: " + termsFile); Validate.notNull(termsFile);//w w w. j av a 2 s . c o m String localTermsFile = Utils.TEMP_FOLDER + termsFile; cloud.downloadObjectFromCloudStorage(termsFile, localTermsFile, bucket); // convert from JSON termsSet = FileUtils.jsonFileToSet(localTermsFile); } else { termsSet = ObjectUtils.csvToSet(terms); } String localSchemaFile = Utils.TEMP_FOLDER + schemaFile; // download the file from the cloud storage cloud.downloadObjectFromCloudStorage(schemaFile, localSchemaFile, bucket); // uncompress if compressed if (GzipUtils.isCompressedFilename(schemaFile)) { localSchemaFile = GzipUtils.getUncompressedFilename(localSchemaFile); } Map<String, Set<Triple>> allSchemaTriples = TripleUtils.getRelevantSchemaNTriples(localSchemaFile, TermUtils.RDFS_TBOX); // get all the triples we care about Map<Term, Set<Triple>> schemaTerms = new HashMap<>(); for (String term : termsSet) { if (allSchemaTriples.containsKey(term)) { schemaTerms.put(new Term(term), allSchemaTriples.get(term)); } } String decoratedTable = table; int emptyRetries = 0; int totalInferredTriples = 0; int maxRetries = Config.getIntegerProperty(Constants.REASON_RETRY_KEY, 6); String instanceId = cloud.getInstanceId(); // timestamp loop do { //List<String> inferredFiles = new ArrayList<>(); // First of all run all the queries asynchronously and remember the jobId and filename for each term for (Entry<Term, Set<Triple>> entry : schemaTerms.entrySet()) { Term term = entry.getKey(); // add table decoration to table name String query = GenericRule.getQuery(entry.getValue(), decoratedTable); log.info("\nQuery: " + query); String jobId = cloud.startBigDataQuery(query); String encodedTerm = FileUtils.encodeFilename(term.getTerm()); String filename = Utils.TEMP_FOLDER + encodedTerm + Constants.DOT_TERMS; // remember the filename and the jobId for this query term.setFilename(filename).setJobId(jobId).setEncodedTerm(encodedTerm); } long start = System.currentTimeMillis(); String inferredTriplesFile = Utils.TEMP_FOLDER + instanceId + '_' + start + Constants.DOT_INF; List<String> productiveTerms = new ArrayList<>(); int interimInferredTriples = 0; try (PrintWriter writer = new PrintWriter( new GZIPOutputStream(new FileOutputStream(inferredTriplesFile), Constants.GZIP_BUF_SIZE))) { // now loop through the queries for (Entry<Term, Set<Triple>> entry : schemaTerms.entrySet()) { Term term = entry.getKey(); log.info("Reasoning for Term: " + term); Set<Triple> schemaTriples = entry.getValue(); log.info("Schema Triples: " + Joiner.on('\n').join(schemaTriples)); List<String> select = GenericRule.getSelect(schemaTriples); // block and wait for each job to complete then save results to a file BigInteger rows = BigInteger.ZERO; try { rows = cloud.saveBigQueryResultsToFile(term.getJobId(), term.getFilename()).getTotalRows(); } catch (IOException ioe) { // transient backend errors log.warn("failed to save query results to file, jobId: " + term.getJobId()); } log.info("Query found " + rows + ", rows"); // only process if triples are found matching this term if (!BigInteger.ZERO.equals(rows)) { int inferredTriplesCount = this.inferAndSaveTriplesToFile(term, select, schemaTriples, rows, decoratedTable, writer); productiveTerms.add(term.getTerm()); interimInferredTriples += inferredTriplesCount; this.totalRows = this.totalRows.add(rows); } } } totalInferredTriples += interimInferredTriples; if (interimInferredTriples > 0) { //TODO stream smaller numbers of inferred triples //TODO try uploading from cloud storage int streamingThreshold = Config.getIntegerProperty("ecarf.io.reasoning.streaming.threshold", 100000); log.info("Inserting " + interimInferredTriples + ", inferred triples into Big Data table for " + productiveTerms.size() + " productive terms. Filename: " + inferredTriplesFile); if (interimInferredTriples <= streamingThreshold) { // stream the data Set<Triple> inferredTriples = TripleUtils.loadCompressedCSVTriples(inferredTriplesFile, false); log.info("Total triples to stream into Big Data: " + inferredTriples.size()); cloud.streamObjectsIntoBigData(inferredTriples, TableUtils.getBigQueryTripleTable(table)); log.info("All inferred triples are streamed into Big Data table"); } else { // load the data through cloud storage // upload the file to cloud storage log.info("Uploading inferred triples file into cloud storage: " + inferredTriplesFile); StorageObject file = cloud.uploadFileToCloudStorage(inferredTriplesFile, bucket); log.info("File " + file + ", uploaded successfully. Now loading it into big data."); String jobId = cloud.loadCloudStorageFilesIntoBigData(Lists.newArrayList(file.getUri()), TableUtils.getBigQueryTripleTable(table), false); log.info( "All inferred triples are loaded into Big Data table through cloud storage, completed jobId: " + jobId); } // reset empty retries emptyRetries = 0; } else { log.info("No new inferred triples"); // increment empty retries emptyRetries++; } log.info("Total inferred triples so far = " + totalInferredTriples + ", current retry count: " + emptyRetries); ApiUtils.block(Config.getIntegerProperty(Constants.REASON_SLEEP_KEY, 20)); // FIXME move into the particular cloud implementation service long elapsed = System.currentTimeMillis() - start; decoratedTable = "[" + table + "@-" + elapsed + "-]"; log.info("Using table decorator: " + decoratedTable + ". Empty retries count: " + emptyRetries); } while (!(emptyRetries == maxRetries)); // end timestamp loop log.info("Finished reasoning, total inferred triples = " + totalInferredTriples); log.info("Number of avoided duplicate terms = " + this.duplicates); log.info("Total rows retrieved from big data = " + this.totalRows); }