List of usage examples for java.math BigInteger ZERO
BigInteger ZERO
To view the source code for java.math BigInteger ZERO.
Click Source Link
From source file:org.nd4j.linalg.util.BigDecimalMath.java
/** * Multiply and round./* www. jav a2 s.co m*/ * * @param x The left factor. * @param f The right factor. * @return The product x*f. */ static public BigDecimal multiplyRound(final BigDecimal x, final Rational f) { if (f.compareTo(BigInteger.ZERO) == 0) { return BigDecimal.ZERO; } else { /* Convert the rational value with two digits of extra precision */ MathContext mc = new MathContext(2 + x.precision()); BigDecimal fbd = f.BigDecimalValue(mc); /* and the precision of the product is then dominated by the precision in x */ return multiplyRound(x, fbd); } }
From source file:org.alfresco.opencmis.CMISTest.java
/** * ACE-33/* ww w. j av a2s . c o m*/ * * Cmis Item support */ @Test public void testItems() { withCmisService(new CmisServiceCallback<String>() { @Override public String execute(CmisService cmisService) { List<RepositoryInfo> repositories = cmisService.getRepositoryInfos(null); assertTrue(repositories.size() > 0); RepositoryInfo repo = repositories.get(0); String repositoryId = repo.getId(); TypeDefinition def = cmisService.getTypeDefinition(repositoryId, "cmis:item", null); assertNotNull("the cmis:item type is not defined", def); @SuppressWarnings("unused") TypeDefinition p = cmisService.getTypeDefinition(repositoryId, "I:cm:person", null); assertNotNull("the I:cm:person type is not defined", def); ObjectList result = cmisService.query(repositoryId, "select * from cm:person", Boolean.FALSE, Boolean.TRUE, IncludeRelationships.NONE, "", BigInteger.TEN, BigInteger.ZERO, null); assertTrue("", result.getNumItems().intValue() > 0); return ""; }; }, CmisVersion.CMIS_1_1); }
From source file:org.nd4j.linalg.util.BigDecimalMath.java
/** * Multiply and round.//from w ww. ja v a2 s .c o m * * @param x The left factor. * @param n The right factor. * @return the product x*n */ static public BigDecimal multiplyRound(final BigDecimal x, final BigInteger n) { BigDecimal resul = x.multiply(new BigDecimal(n)); /* The estimation of the absolute error in the result is |n*err(x)| */ MathContext mc = new MathContext(n.compareTo(BigInteger.ZERO) != 0 ? x.precision() : 0); return resul.round(mc); }
From source file:com.flexive.core.storage.genericSQL.GenericTreeStorage.java
protected BigInteger getNodeBounds(ResultSet rs, int index) throws SQLException { final String value = rs.getString(index); return value == null ? BigInteger.ZERO : new BigInteger(value); }
From source file:org.sparkbit.jsonrpc.SparkBitJSONRPCServiceImpl.java
private synchronized String sendassetusing_impl(String walletID, String txid, Long vout, String address, String assetRef, Double quantity, Boolean senderPays, String message, Double btcAmount) throws com.bitmechanic.barrister.RpcException { String sendTxHash = null;/*from w w w . j ava 2s. c om*/ boolean sendValidated = false; boolean sendSuccessful = false; Wallet w = getWalletForWalletName(walletID); if (w == null) { JSONRPCError.WALLET_NOT_FOUND.raiseRpcException(); } // Check send with txid and vout Sha256Hash sendWithTxidHash = null; boolean canSpendSendWithTxOut = false; if (txid != null) { try { sendWithTxidHash = new Sha256Hash(txid); } catch (IllegalArgumentException e) { // Not a valid tx hash string JSONRPCError.INVALID_TXID_HASH.raiseRpcException(); } canSpendSendWithTxOut = isTxOutSpendable(w, sendWithTxidHash, vout.intValue()); } if (quantity <= 0.0) { JSONRPCError.SEND_ASSET_AMOUNT_TOO_LOW.raiseRpcException(); } // BTC send amount, if null, use default amount of 10,000 satoshis. String sendAmount; if (btcAmount == null) { sendAmount = Utils.bitcoinValueToPlainString(BitcoinModel.COINSPARK_SEND_MINIMUM_AMOUNT); } else { double d = btcAmount.doubleValue(); if (d <= 0.0) { JSONRPCError.SEND_BITCOIN_AMOUNT_TOO_LOW.raiseRpcException(); } sendAmount = btcAmount.toString(); } BigInteger bitcoinAmountSatoshis = Utils.toNanoCoins(sendAmount); // Is the BTC amount more than what is in the wallet? BigInteger totalSpend = bitcoinAmountSatoshis.add(Transaction.REFERENCE_DEFAULT_MIN_TX_FEE); BigInteger availableBalance = w.getBalance(Wallet.BalanceType.AVAILABLE); if (totalSpend.compareTo(availableBalance) > 0) { JSONRPCError.SEND_BITCOIN_INSUFFICIENT_MONEY.raiseRpcException(); } // Does the BTC amount respect the migration fees of any assets? boolean migrationSafe = CSMiscUtils.canSafelySpendWhileRespectingMigrationFee(this.controller, w, bitcoinAmountSatoshis); if (!migrationSafe) { BigInteger migrationFee = CSMiscUtils.calcMigrationFeeSatoshis(controller, w); JSONRPCError.SEND_INSUFFICIENT_MONEY_MIGRATION.raiseRpcException( "Need to keep at least " + Utils.bitcoinValueToFriendlyString(migrationFee) + " BTC."); } CoinSparkPaymentRef paymentRef = null; String bitcoinAddress = address; if (!address.startsWith("s")) { JSONRPCError.ADDRESS_NOT_COINSPARK_ADDRESS.raiseRpcException(); } else { bitcoinAddress = CSMiscUtils.getBitcoinAddressFromCoinSparkAddress(address); if (bitcoinAddress == null) { JSONRPCError.COINSPARK_ADDRESS_INVALID.raiseRpcException(); } CoinSparkAddress csa = CSMiscUtils.decodeCoinSparkAddress(address); if (!CSMiscUtils.canSendAssetsToCoinSparkAddress(csa)) { JSONRPCError.COINSPARK_ADDRESS_MISSING_ASSET_FLAG.raiseRpcException(); } if (message != null && !CSMiscUtils.canSendTextMessageToCoinSparkAddress(csa)) { JSONRPCError.COINSPARK_ADDRESS_MISSING_TEXT_MESSAGE_FLAG.raiseRpcException(); } // payment ref? int flags = csa.getAddressFlags(); if ((flags & CoinSparkAddress.COINSPARK_ADDRESS_FLAG_PAYMENT_REFS) > 0) { paymentRef = csa.getPaymentRef(); // log.debug(">>>> CoinSpark address has payment refs flag set: " + paymentRef.toString()); } } boolean isValid = CSMiscUtils.validateBitcoinAddress(bitcoinAddress, controller); if (!isValid) { JSONRPCError.BITCOIN_ADDRESS_INVALID.raiseRpcException(); } String filename = getFullPathForWalletName(walletID); final WalletData wd = this.controller.getModel().getPerWalletModelDataByWalletFilename(filename); if (wd.isBusy()) { JSONRPCError.WALLEY_IS_BUSY.raiseRpcException(); } else { wd.setBusy(true); wd.setBusyTaskKey("jsonrpc.busy.sendasset"); this.controller.fireWalletBusyChange(true); } Transaction sendTransaction = null; try { // -- boilerplate ends here.... CSAsset asset = getAssetForAssetRefString(w, assetRef); if (asset == null) { if (isAssetRefValid(assetRef)) { JSONRPCError.ASSETREF_NOT_FOUND.raiseRpcException(); } else { JSONRPCError.ASSETREF_INVALID.raiseRpcException(); } } if (asset.getAssetState() != CSAsset.CSAssetState.VALID) { if (!CSMiscUtils.canSendInvalidAsset(controller)) { JSONRPCError.ASSET_STATE_INVALID.raiseRpcException(); } } // Check number of confirms int lastHeight = w.getLastBlockSeenHeight(); CoinSparkAssetRef assetReference = asset.getAssetReference(); if (assetReference != null) { final int blockIndex = (int) assetReference.getBlockNum(); final int numConfirmations = lastHeight - blockIndex + 1; // 0 means no confirmation, 1 is yes for sa int threshold = NUMBER_OF_CONFIRMATIONS_TO_SEND_ASSET_THRESHOLD; // FIXME: REMOVE/COMMENT OUT BEFORE RELEASE? String sendAssetWithJustOneConfirmation = controller.getModel() .getUserPreference("sendAssetWithJustOneConfirmation"); if (Boolean.TRUE.toString().equals(sendAssetWithJustOneConfirmation)) { threshold = 1; } //System.out.println(">>>> " + CSMiscUtils.getHumanReadableAssetRef(asset) + " num confirmations " + numConfirmations + ", threshold = " + threshold); if (numConfirmations < threshold) { JSONRPCError.ASSET_NOT_CONFIRMED.raiseRpcException(); } } String displayQtyString = new BigDecimal(quantity).toPlainString(); BigInteger assetAmountRawUnits = CSMiscUtils.getRawUnitsFromDisplayString(asset, displayQtyString); int assetID = asset.getAssetID(); BigInteger spendableAmount = w.CS.getAssetBalance(assetID).spendable; log.info("Want to send: " + assetAmountRawUnits + " , AssetID=" + assetID + ", total=" + w.CS.getAssetBalance(assetID).total + ", spendable=" + w.CS.getAssetBalance(assetID).spendable); // String sendAmount = Utils.bitcoinValueToPlainString(BitcoinModel.COINSPARK_SEND_MINIMUM_AMOUNT); CoinSparkGenesis genesis = asset.getGenesis(); long desiredRawUnits = assetAmountRawUnits.longValue(); short chargeBasisPoints = genesis.getChargeBasisPoints(); long rawFlatChargeAmount = genesis.getChargeFlat(); boolean chargeExists = (rawFlatChargeAmount > 0 || chargeBasisPoints > 0); if (chargeExists) { if (senderPays) { long x = genesis.calcGross(desiredRawUnits); assetAmountRawUnits = new BigInteger(String.valueOf(x)); } else { // We don't have to do anything if recipient pays, just send gross amount. // calcNet() returns what the recipient will receive, but it's not what we send. } } if (assetAmountRawUnits.compareTo(spendableAmount) > 0) { JSONRPCError.ASSET_INSUFFICIENT_BALANCE.raiseRpcException(); } // Create a SendRequest. Address sendAddressObject; String sendAddress = bitcoinAddress; sendAddressObject = new Address(controller.getModel().getNetworkParameters(), sendAddress); //SendRequest sendRequest = SendRequest.to(sendAddressObject, Utils.toNanoCoins(sendAmount)); //public static SendRequest to(Address destination,BigInteger value,int assetID, BigInteger assetValue,int split) { //BigInteger assetAmountRawUnits = new BigInteger(assetAmount); // BigInteger bitcoinAmountSatoshis = Utils.toNanoCoins(sendAmount); Wallet.SendRequest sendRequest = Wallet.SendRequest.to(sendAddressObject, bitcoinAmountSatoshis, assetID, assetAmountRawUnits, 1); sendRequest.ensureMinRequiredFee = true; sendRequest.fee = BigInteger.ZERO; sendRequest.feePerKb = BitcoinModel.SEND_FEE_PER_KB_DEFAULT; // Note - Request is populated with the AES key in the SendBitcoinNowAction after the user has entered it on the SendBitcoinConfirm form. // Send with txout vout if (canSpendSendWithTxOut) { boolean addedInput = sendRequest.addInput(w, new CSTransactionOutput(sendWithTxidHash, vout.intValue())); if (!addedInput) { // Failed to add input, so throw exception JSONRPCError.SEND_WITH_TXID_VOUT_FAILED.raiseRpcException(); } } // Send with payment ref - if it exists and is not 0 which SparkBit treats semantically as null if (paymentRef != null && paymentRef.getRef() != 0) { sendRequest.setPaymentRef(paymentRef); } // Set up message if one exists boolean isEmptyMessage = false; if (message == null || message.trim().isEmpty()) { isEmptyMessage = true; } if (!isEmptyMessage) { CoinSparkMessagePart[] parts = { CSMiscUtils.createPlainTextCoinSparkMessagePart(message) }; String[] serverURLs = CSMiscUtils.getMessageDeliveryServersArray(this.controller); sendRequest.setMessage(parts, serverURLs); } // Complete it (which works out the fee) but do not sign it yet. log.info("Just about to complete the tx (and calculate the fee)..."); // there is enough money, so let's do it for real now w.completeTx(sendRequest, false); sendValidated = true; log.info("The fee after completing the transaction was " + sendRequest.fee); // Let's do it for real now. sendTransaction = this.controller.getMultiBitService().sendCoins(wd, sendRequest, null); if (sendTransaction == null) { // a null transaction returned indicates there was not // enough money (in spite of our validation) JSONRPCError.ASSET_INSUFFICIENT_BALANCE.raiseRpcException(); } else { sendSuccessful = true; sendTxHash = sendTransaction.getHashAsString(); } if (sendSuccessful) { // There is enough money. /* If sending assets or BTC to a coinspark address, record transaction id --> coinspark address, into hashmap so we can use when displaying transactions */ if (address.startsWith("s")) { SparkBitMapDB.INSTANCE.putSendCoinSparkAddressForTxid(sendTxHash, address); } } else { // There is not enough money } //--- bolilerplate begins... } catch (InsufficientMoneyException ime) { JSONRPCError.ASSET_INSUFFICIENT_BALANCE.raiseRpcException(); } catch (com.bitmechanic.barrister.RpcException e) { throw (e); } catch (CSExceptions.CannotEncode e) { JSONRPCError.SEND_MESSAGE_CANNOT_ENCODE.raiseRpcException(e.getMessage()); } catch (Exception e) { JSONRPCError.throwAsRpcException("Could not send asset due to error: ", e); } finally { // Save the wallet. try { this.controller.getFileHandler().savePerWalletModelData(wd, false); } catch (WalletSaveException e) { // log.error(e.getMessage(), e); } if (sendSuccessful) { // This returns immediately if rpcsendassettimeout is 0. JSONRPCController.INSTANCE.waitForTxSelectable(sendTransaction); // JSONRPCController.INSTANCE.waitForTxBroadcast(sendTxHash); } // Declare that wallet is no longer busy with the task. wd.setBusyTaskKey(null); wd.setBusy(false); this.controller.fireWalletBusyChange(false); } if (sendSuccessful) { controller.fireRecreateAllViews(false); } return sendTxHash; }
From source file:org.alfresco.opencmis.AlfrescoCmisServiceImpl.java
@Override public ObjectList getObjectRelationships(String repositoryId, String objectId, Boolean includeSubRelationshipTypes, RelationshipDirection relationshipDirection, String typeId, String filter, Boolean includeAllowableActions, BigInteger maxItems, BigInteger skipCount, ExtensionsData extension) {// w ww. j ava 2s .c o m checkRepositoryId(repositoryId); // what kind of object is it? CMISNodeInfo info = getOrCreateNodeInfo(objectId, "Object"); if (info.isVariant(CMISObjectVariant.ASSOC)) { throw new CmisInvalidArgumentException("Object is a relationship!"); } if (info.isVariant(CMISObjectVariant.VERSION)) { throw new CmisInvalidArgumentException("Object is a document version!"); } // check if the relationship base type is requested if (BaseTypeId.CMIS_RELATIONSHIP.value().equals(typeId)) { boolean isrt = (includeSubRelationshipTypes == null ? false : includeSubRelationshipTypes.booleanValue()); if (isrt) { // all relationships are a direct subtype of the base type in // Alfresco -> remove filter typeId = null; } else { // there are no relationships of the base type in Alfresco -> // return empty list ObjectListImpl result = new ObjectListImpl(); result.setHasMoreItems(false); result.setNumItems(BigInteger.ZERO); result.setObjects(new ArrayList<ObjectData>()); return result; } } return connector.getObjectRelationships(info.getNodeRef(), relationshipDirection, typeId, filter, includeAllowableActions, maxItems, skipCount); }
From source file:com.example.util.FileUtils.java
/** * Counts the size of a directory recursively (sum of the length of all files). * /* w w w .j a va 2 s .co m*/ * @param directory * directory to inspect, must not be {@code null} * @return size of directory in bytes, 0 if directory is security restricted. * @throws NullPointerException * if the directory is {@code null} * @since 2.4 */ public static BigInteger sizeOfDirectoryAsBigInteger(File directory) { checkDirectory(directory); final File[] files = directory.listFiles(); if (files == null) { // null if security restricted return BigInteger.ZERO; } BigInteger size = BigInteger.ZERO; for (final File file : files) { try { if (!isSymlink(file)) { size = size.add(BigInteger.valueOf(sizeOf(file))); } } catch (IOException ioe) { // Ignore exceptions caught when asking if a File is a symlink. } } return size; }
From source file:org.proteomecommons.tranche.cacheupdater.CacheUpdater.java
private void updateTagsDatabase() { log.println("Starting to update the tags db"); log.println("Deleting entries with no associated tags."); try {/*from ww w .j av a 2 s . c o m*/ // check every entry List<Entry> entries = Database.getEntries(); for (Entry entry : entries) { try { int size = Database.getTags(entry.getId()).size(); if (size == 0) { Database.deleteEntry(entry.getId()); log.println("Deleted entry #" + entry.getId()); } } catch (Exception e) { log.println("ERROR: " + e.getMessage()); err.println(e.getMessage()); } } } catch (Exception e) { log.println("ERROR: " + e.getMessage()); err.println(e.getMessage()); } log.println("Correcting tag names in all entries."); try { // check for and update tag names according to the TagNames list for (String name : TagNames.changedNames.keySet()) { for (Tag tag : Database.getTags(name)) { removedTag(tag.getEntryId(), name, tag.getValue().trim()); addedTag(tag.getEntryId(), TagNames.changedNames.get(name), tag.getValue().trim()); Database.updateTag(tag.getId(), TagNames.changedNames.get(name), tag.getValue().trim()); } for (Tag tag : Database.getTags(name + "%")) { // make sure there was not another tag name that starts the same ("Tranche:Link" and "Tranche:Link Name") String following = ""; { // if there is more to the tag name than the one we are looking for if (tag.getName().length() > name.length()) { // skip this tag if what is left is not a number try { Long.valueOf(tag.getName().substring(name.length()).trim()); following = tag.getName().substring(name.length()); } catch (Exception e) { break; } } } // get the new name String newName = TagNames.changedNames.get(name) + following; removedTag(tag.getEntryId(), name, tag.getValue().trim()); addedTag(tag.getEntryId(), newName, tag.getValue().trim()); Database.updateTag(tag.getId(), newName, tag.getValue().trim()); } } } catch (Exception e) { log.println("ERROR: An unknown problem occurred revising tag names."); err.println(e.getMessage()); } log.println("Making sure all tags that start with \"Tranche:\" are accompanied by a number."); try { // check every entry List<Entry> entries = Database.getEntries(); for (Entry entry : entries) { // create a data structure for the set of tags Map<String, List<String>> tagMap = makeTagMap(entry.getId()); // for all other links that start with "Tranche:", if it does not end in a number, then move it to the first available spot List<String> toRemove = new ArrayList<String>(); Map<String, String> toAdd = new HashMap<String, String>(); for (String name : tagMap.keySet()) { if (name.startsWith("Tranche:")) { // stop here if the last part of this tag is a number try { Long.valueOf(name.split(" ")[name.split(" ").length - 1].trim()); // go to the next tag continue; } catch (Exception e) { } int index = 1; for (String value : tagMap.get(name)) { String newName = name + " " + index; // find an empty number while (tagMap.get(newName) != null) { index++; newName = name + " " + index; } removedTag(entry.getId(), name, value.trim()); toAdd.put(newName, value.trim()); addedTag(entry.getId(), newName, value.trim()); } toRemove.add(name); } } // implement the removal the tags for (String name : toRemove) { tagMap.remove(name); } // implement the addition of tags for (String name : toAdd.keySet()) { if (tagMap.get(name) == null) { tagMap.put(name, new ArrayList<String>()); } tagMap.get(name).add(toAdd.get(name)); } // update the database try { // delete all the old tags if (makeChanges) { Database.deleteTags(entry.getId()); } // add all the tags for (String tagName : tagMap.keySet()) { for (String tagValue : tagMap.get(tagName)) { if (makeChanges) { Database.addTag(entry.getId(), tagName, tagValue.trim()); } } } } catch (Exception e) { log.println("ERROR: There was a problem changing the database."); err.println(e.getMessage()); } } } catch (Exception e) { log.println("ERROR: An unknown problem occurred switching bad Tranche tag names."); err.println(e.getMessage()); } log.println("Updating tags for all existing entries."); try { // for all of the entries that have a tag with the name that starts with TagNames.TRANCHE_LINK for (Entry entry : Database.getEntries()) { log.println("Updating the tags for entry " + entry.getId()); // create a data structure for the set of tags Map<String, List<String>> tagMap = makeTagMap(entry.getId()); // get the number of links in this entry int links = getNumberOfLinks(tagMap); log.println(links + " Tranche Links found in the entry."); // for all of the tranche links for (int linkNum = 1; linkNum <= links; linkNum++) { try { // try to make the hash for this tranche link BigHash hash = null; try { hash = BigHash .createHashFromString(tagMap.get(TagNames.TRANCHE_LINK + " " + linkNum).get(0)); // remember that this hash is in the tags db hashesInTags.add(hash); } catch (Exception e) { // bad hash - remove it for (String value : tagMap.get(TagNames.TRANCHE_LINK + " " + linkNum)) { removedTag(entry.getId(), TagNames.TRANCHE_LINK + " " + linkNum, value); } tagMap.remove(TagNames.TRANCHE_LINK + " " + linkNum); editedEntries.add(entry.getId()); // update the database saveToDatabase(entry.getId(), tagMap); continue; } log.println("Trying hash: " + hash.toString()); // set up for the update MetaData md = null; ProjectFile pf = null; GetFileTool gft = null; // need to know if the meta data has changed boolean metaDataChanged = false; try { // set up the getfiletool gft = new GetFileTool(); gft.setValidate(validate); gft.setHash(hash); // increment the chunk meta count by the meta data chunkAndMetaCount++; // get the meta data if (md == null) { try { md = gft.getMetaData(); } catch (CantVerifySignatureException e) { addInvalid(entry.getId(), hash.toString(), tagMap, linkNum, e); log.println("ERROR: Downloaded meta data is invalid."); } catch (Exception e) { if (e.getMessage() != null && e.getMessage().toLowerCase().contains("can't find metadata")) { addMissing(entry.getId(), hash.toString(), tagMap, linkNum); log.println("ERROR: Could not get meta data."); } else { addInvalid(entry.getId(), hash.toString(), tagMap, linkNum, e); log.println("ERROR: Downloaded meta data is invalid."); } } } } catch (Exception e) { err.println(e.getMessage()); } // tags that require the meta data to check or add if (md != null) { // make sure there only valid share meta data if encrypted annotations for (MetaDataAnnotation mda : md.getAnnotations()) { // remove if this is not a valid share md if encrypted annotation if (!mda.getName().equals( MetaDataAnnotation.SHARE_META_DATA_IF_ENCRYPTED_ANNOTATION.getName()) && mda.getValue().toLowerCase() .equals(MetaDataAnnotation.SHARE_META_DATA_IF_ENCRYPTED_ANNOTATION .getValue().toLowerCase())) { removedTag(entry.getId(), hash, MetaDataAnnotation.SHARE_META_DATA_IF_ENCRYPTED_ANNOTATION.getName(), MetaDataAnnotation.SHARE_META_DATA_IF_ENCRYPTED_ANNOTATION.getValue(), true); metaDataChanged = true; md.getAnnotations().remove(mda); } } // sift through all of the meta data annotations for (MetaDataAnnotation mda : md.getAnnotations()) { if (mda.getName().equals( MetaDataAnnotation.SHARE_META_DATA_IF_ENCRYPTED_ANNOTATION.getName())) { if (tagMap.get(TagNames.TRANCHE_SHOW_MD_IF_ENC + " " + linkNum) == null) { tagMap.put(TagNames.TRANCHE_SHOW_MD_IF_ENC + " " + linkNum, new ArrayList()); } // should only be one entry if (tagMap.get(TagNames.TRANCHE_SHOW_MD_IF_ENC + " " + linkNum).size() > 1) { // remove all but the first entry for this tag name for (int i = tagMap.get(TagNames.TRANCHE_SHOW_MD_IF_ENC + " " + linkNum) .size() - 1; i > 0; i--) { String removedValue = tagMap .get(TagNames.TRANCHE_SHOW_MD_IF_ENC + " " + linkNum).remove(i); removedTag(entry.getId(), hash, TagNames.TRANCHE_SHOW_MD_IF_ENC + " " + linkNum, removedValue, false); } } // in case there are no entries, just add the tag if (tagMap.get(TagNames.TRANCHE_SHOW_MD_IF_ENC + " " + linkNum).size() == 0) { // add the tag tagMap.get(TagNames.TRANCHE_SHOW_MD_IF_ENC + " " + linkNum) .add(mda.getValue().trim()); addedTag(entry.getId(), hash, TagNames.TRANCHE_SHOW_MD_IF_ENC + " " + linkNum, mda.getValue(), false); } else if (tagMap.get(TagNames.TRANCHE_SHOW_MD_IF_ENC + " " + linkNum) .size() == 1) { if (!tagMap.get(TagNames.TRANCHE_SHOW_MD_IF_ENC + " " + linkNum).get(0) .equals(mda.getValue().trim())) { // edit the tag editedTag(entry.getId(), hash, TagNames.TRANCHE_SHOW_MD_IF_ENC + " " + linkNum, tagMap.get(TagNames.TRANCHE_SHOW_MD_IF_ENC + " " + linkNum) .get(0), mda.getValue(), false); tagMap.get(TagNames.TRANCHE_SHOW_MD_IF_ENC + " " + linkNum).clear(); tagMap.get(TagNames.TRANCHE_SHOW_MD_IF_ENC + " " + linkNum) .add(mda.getValue().trim()); } } } else if (mda.getName().equals(MetaDataAnnotation.PROP_DELETE_NEW_VERSION)) { if (tagMap.get(TagNames.TRANCHE_DELETE_NEW_LINK + " " + linkNum) == null) { tagMap.put(TagNames.TRANCHE_DELETE_NEW_LINK + " " + linkNum, new ArrayList()); } if (!tagMap.get(TagNames.TRANCHE_DELETE_NEW_LINK + " " + linkNum) .contains(mda.getValue())) { try { BigHash deleteNewVersionHash = BigHash .createHashFromString(mda.getValue()); tagMap.get(TagNames.TRANCHE_DELETE_NEW_LINK + " " + linkNum) .add(deleteNewVersionHash.toString()); addedTag(entry.getId(), hash, TagNames.TRANCHE_DELETE_NEW_LINK + " " + linkNum, deleteNewVersionHash.toString(), false); if (!metaDataContainsAnnotation(deleteNewVersionHash, MetaDataAnnotation.PROP_DELETE_OLD_VERSION, hash.toString())) { addMetaDataAnnotationNow(deleteNewVersionHash, MetaDataAnnotation.PROP_DELETE_OLD_VERSION, hash.toString()); } } catch (Exception e) { // the hash is bad - remove the annotation removedTag(entry.getId(), hash, MetaDataAnnotation.PROP_DELETE_NEW_VERSION, mda.getValue(), true); metaDataChanged = true; md.getAnnotations().remove(mda); } } } else if (mda.getName().equals(MetaDataAnnotation.PROP_DELETE_OLD_VERSION)) { if (tagMap.get(TagNames.TRANCHE_DELETE_OLD_LINK + " " + linkNum) == null) { tagMap.put(TagNames.TRANCHE_DELETE_OLD_LINK + " " + linkNum, new ArrayList()); } if (!tagMap.get(TagNames.TRANCHE_DELETE_OLD_LINK + " " + linkNum) .contains(mda.getValue())) { try { BigHash deleteOldVersionHash = BigHash .createHashFromString(mda.getValue()); tagMap.get(TagNames.TRANCHE_DELETE_OLD_LINK + " " + linkNum) .add(deleteOldVersionHash.toString()); addedTag(entry.getId(), hash, TagNames.TRANCHE_DELETE_OLD_LINK + " " + linkNum, deleteOldVersionHash.toString(), false); if (!metaDataContainsAnnotation(deleteOldVersionHash, MetaDataAnnotation.PROP_DELETE_NEW_VERSION, hash.toString())) { addMetaDataAnnotationNow(deleteOldVersionHash, MetaDataAnnotation.PROP_DELETE_NEW_VERSION, hash.toString()); } } catch (Exception e) { // the hash is bad - remove the annotation removedTag(entry.getId(), hash, MetaDataAnnotation.PROP_DELETE_OLD_VERSION, mda.getValue(), true); metaDataChanged = true; md.getAnnotations().remove(mda); } } } else if (mda.getName().equals(MetaDataAnnotation.PROP_UNDELETED)) { if (tagMap.get(TagNames.TRANCHE_UNDELETED + " " + linkNum) == null) { tagMap.put(TagNames.TRANCHE_UNDELETED + " " + linkNum, new ArrayList()); } if (!tagMap.get(TagNames.TRANCHE_UNDELETED + " " + linkNum) .contains(mda.getValue())) { try { Long undeletedTimestamp = Long.valueOf(mda.getValue()); tagMap.get(TagNames.TRANCHE_UNDELETED + " " + linkNum) .add(String.valueOf(undeletedTimestamp)); addedTag(entry.getId(), hash, TagNames.TRANCHE_UNDELETED + " " + linkNum, String.valueOf(undeletedTimestamp), false); } catch (Exception e) { // the value is not a timestamp - remove it removedTag(entry.getId(), hash, MetaDataAnnotation.PROP_UNDELETED, mda.getValue(), true); metaDataChanged = true; md.getAnnotations().remove(mda); } } } else if (mda.getName().equals(MetaDataAnnotation.PROP_NEW_VERSION)) { if (tagMap.get(TagNames.TRANCHE_NEW_LINK + " " + linkNum) == null) { tagMap.put(TagNames.TRANCHE_NEW_LINK + " " + linkNum, new ArrayList()); } if (!tagMap.get(TagNames.TRANCHE_NEW_LINK + " " + linkNum) .contains(mda.getValue().trim())) { try { BigHash newVersionHash = BigHash.createHashFromString(mda.getValue()); tagMap.get(TagNames.TRANCHE_NEW_LINK + " " + linkNum) .add(newVersionHash.toString()); addedTag(entry.getId(), hash, TagNames.TRANCHE_NEW_LINK + " " + linkNum, newVersionHash.toString(), false); if (!metaDataContainsAnnotation(newVersionHash, MetaDataAnnotation.PROP_OLD_VERSION, hash.toString())) { addMetaDataAnnotationNow(newVersionHash, MetaDataAnnotation.PROP_OLD_VERSION, hash.toString()); } } catch (Exception e) { // the hash is bad - remove the annotation removedTag(entry.getId(), hash, MetaDataAnnotation.PROP_NEW_VERSION, mda.getValue(), true); metaDataChanged = true; md.getAnnotations().remove(mda); } } } else if (mda.getName().equals(MetaDataAnnotation.PROP_OLD_VERSION)) { if (tagMap.get(TagNames.TRANCHE_OLD_LINK + " " + linkNum) == null) { tagMap.put(TagNames.TRANCHE_OLD_LINK + " " + linkNum, new ArrayList()); } if (!tagMap.get(TagNames.TRANCHE_OLD_LINK + " " + linkNum) .contains(mda.getValue().trim())) { try { BigHash oldVersionHash = BigHash.createHashFromString(mda.getValue()); tagMap.get(TagNames.TRANCHE_OLD_LINK + " " + linkNum) .add(oldVersionHash.toString()); addedTag(entry.getId(), hash, TagNames.TRANCHE_OLD_LINK + " " + linkNum, oldVersionHash.toString(), false); if (!metaDataContainsAnnotation(oldVersionHash, MetaDataAnnotation.PROP_NEW_VERSION, hash.toString())) { addMetaDataAnnotationNow(oldVersionHash, MetaDataAnnotation.PROP_NEW_VERSION, hash.toString()); } } catch (Exception e) { // the hash is bad - remove the annotation removedTag(entry.getId(), hash, MetaDataAnnotation.PROP_OLD_VERSION, mda.getValue(), true); metaDataChanged = true; md.getAnnotations().remove(mda); } } } } // resolve conflicts between delete/undelete tags if (tagMap.get(TagNames.TRANCHE_DELETED + " " + linkNum) != null && tagMap.get(TagNames.TRANCHE_UNDELETED + " " + linkNum) != null) { // remember the action that last occurred String latestTagName = ""; long latestActionTaken = 0; for (String value : tagMap.get(TagNames.TRANCHE_DELETED + " " + linkNum)) { try { if (Long.valueOf(value) > latestActionTaken) { latestTagName = TagNames.TRANCHE_DELETED + " " + linkNum; latestActionTaken = Long.valueOf(value); } } catch (Exception e) { } } for (String value : tagMap.get(TagNames.TRANCHE_UNDELETED + " " + linkNum)) { try { if (Long.valueOf(value) > latestActionTaken) { latestTagName = TagNames.TRANCHE_UNDELETED + " " + linkNum; latestActionTaken = Long.valueOf(value); } } catch (Exception e) { } } for (String value : tagMap.get(TagNames.TRANCHE_DELETED + " " + linkNum)) { removedTag(entry.getId(), hash, TagNames.TRANCHE_DELETED + " " + linkNum, value, false); } for (String value : tagMap.get(TagNames.TRANCHE_UNDELETED + " " + linkNum)) { removedTag(entry.getId(), hash, TagNames.TRANCHE_UNDELETED + " " + linkNum, value, false); } tagMap.remove(TagNames.TRANCHE_DELETED + " " + linkNum); tagMap.remove(TagNames.TRANCHE_UNDELETED + " " + linkNum); // only put it back if it's a deleted tag if (latestTagName.equals(TagNames.TRANCHE_DELETED + " " + linkNum)) { tagMap.put(latestTagName, new ArrayList<String>()); tagMap.get(latestTagName).add(String.valueOf(latestActionTaken)); addedTag(entry.getId(), hash, latestTagName, String.valueOf(latestActionTaken), false); boolean found = false; // make sure the meta data annotations have no undeleted annotations for (MetaDataAnnotation mda : md.getAnnotations()) { if (mda.getName().equals(mda.PROP_UNDELETED)) { removedTag(entry.getId(), hash, MetaDataAnnotation.PROP_UNDELETED, mda.getValue(), true); metaDataChanged = true; md.getAnnotations().remove(mda); } else if (mda.getName().equals(mda.PROP_DELETED) && !mda.getValue().equals(String.valueOf(latestActionTaken))) { removedTag(entry.getId(), hash, MetaDataAnnotation.PROP_DELETED, mda.getValue(), true); metaDataChanged = true; md.getAnnotations().remove(mda); } else if (mda.getName().equals(mda.PROP_DELETED) && mda.getValue().equals(String.valueOf(latestActionTaken))) { found = true; } } if (!found) { md.addAnnotation(new MetaDataAnnotation(MetaDataAnnotation.PROP_DELETED, String.valueOf(latestActionTaken))); metaDataChanged = true; addedTag(entry.getId(), hash, MetaDataAnnotation.PROP_DELETED, String.valueOf(latestActionTaken), true); } } else if (latestTagName.equals(TagNames.TRANCHE_UNDELETED + " " + linkNum)) { // make sure the meta data annotations have no undeleted annotations for (MetaDataAnnotation mda : md.getAnnotations()) { if (mda.getName().equals(mda.PROP_UNDELETED)) { removedTag(entry.getId(), hash, MetaDataAnnotation.PROP_UNDELETED, mda.getValue(), true); metaDataChanged = true; md.getAnnotations().remove(mda); } else if (mda.getName().equals(mda.PROP_DELETED)) { removedTag(entry.getId(), hash, MetaDataAnnotation.PROP_DELETED, mda.getValue(), true); metaDataChanged = true; md.getAnnotations().remove(mda); } } } } // resolve conflicts between new link/delete new link tags if (tagMap.get(TagNames.TRANCHE_DELETE_NEW_LINK + " " + linkNum) != null) { for (String deleteValue : tagMap .get(TagNames.TRANCHE_DELETE_NEW_LINK + " " + linkNum)) { // remove new links with the same value if (tagMap.get(TagNames.TRANCHE_NEW_LINK + " " + linkNum) != null) { if (tagMap.get(TagNames.TRANCHE_NEW_LINK + " " + linkNum) .remove(deleteValue.trim())) { removedTag(entry.getId(), hash, TagNames.TRANCHE_DELETE_NEW_LINK + " " + linkNum, deleteValue, false); } } removedTag(entry.getId(), hash, TagNames.TRANCHE_DELETE_NEW_LINK + " " + linkNum, deleteValue.trim(), false); } tagMap.remove(TagNames.TRANCHE_DELETE_NEW_LINK + " " + linkNum); } // resolve conflicts between old link/delete old link tags if (tagMap.get(TagNames.TRANCHE_DELETE_OLD_LINK + " " + linkNum) != null) { for (String deleteValue : tagMap .get(TagNames.TRANCHE_DELETE_OLD_LINK + " " + linkNum)) { // remove old links with the same value if (tagMap.get(TagNames.TRANCHE_OLD_LINK + " " + linkNum) != null) { if (tagMap.get(TagNames.TRANCHE_OLD_LINK + " " + linkNum) .remove(deleteValue.trim())) { removedTag(entry.getId(), hash, TagNames.TRANCHE_OLD_LINK + " " + linkNum, deleteValue.trim(), false); } } removedTag(entry.getId(), hash, TagNames.TRANCHE_DELETE_OLD_LINK + " " + linkNum, deleteValue.trim(), false); } tagMap.remove(TagNames.TRANCHE_DELETE_OLD_LINK + " " + linkNum); } // make sure the meta data has all the old/new links from tags if (tagMap.get(TagNames.TRANCHE_NEW_LINK + " " + linkNum) != null) { for (String newLink : tagMap.get(TagNames.TRANCHE_NEW_LINK + " " + linkNum)) { boolean foundInMetaData = false; for (MetaDataAnnotation mda : md.getAnnotations()) { if (mda.getName().equals(MetaDataAnnotation.PROP_NEW_VERSION)) { if (mda.getValue().equals(newLink)) { foundInMetaData = true; break; } } } if (!foundInMetaData) { md.addAnnotation(new MetaDataAnnotation(MetaDataAnnotation.PROP_NEW_VERSION, newLink)); metaDataChanged = true; addedTag(entry.getId(), hash, MetaDataAnnotation.PROP_NEW_VERSION, newLink, true); } // make sure the new hash's meta data has this as it's old version try { BigHash newHash = BigHash.createHashFromString(newLink); if (!metaDataContainsAnnotation(newHash, MetaDataAnnotation.PROP_OLD_VERSION, hash.toString())) { addMetaDataAnnotationNow(newHash, MetaDataAnnotation.PROP_OLD_VERSION, hash.toString()); } } catch (Exception e) { } } } if (tagMap.get(TagNames.TRANCHE_OLD_LINK + " " + linkNum) != null) { for (String oldLink : tagMap.get(TagNames.TRANCHE_OLD_LINK + " " + linkNum)) { boolean foundInMetaData = false; for (MetaDataAnnotation mda : md.getAnnotations()) { if (mda.getName().equals(MetaDataAnnotation.PROP_OLD_VERSION)) { if (mda.getValue().equals(oldLink)) { foundInMetaData = true; break; } } } if (!foundInMetaData) { md.addAnnotation(new MetaDataAnnotation(MetaDataAnnotation.PROP_OLD_VERSION, oldLink)); metaDataChanged = true; addedTag(entry.getId(), hash, MetaDataAnnotation.PROP_OLD_VERSION, oldLink, true); } // make sure the old hash's meta data has this as it's new version try { BigHash oldHash = BigHash.createHashFromString(oldLink); if (!metaDataContainsAnnotation(oldHash, MetaDataAnnotation.PROP_NEW_VERSION, hash.toString())) { addMetaDataAnnotationNow(oldHash, MetaDataAnnotation.PROP_NEW_VERSION, hash.toString()); } } catch (Exception e) { } } } // make sure the meta data annotation for showing meta info is there if it is in the tags if (tagMap.get(TagNames.TRANCHE_SHOW_MD_IF_ENC + " " + linkNum) != null) { // should only be one entry if (tagMap.get(TagNames.TRANCHE_SHOW_MD_IF_ENC + " " + linkNum).size() > 1) { for (int i = tagMap.get(TagNames.TRANCHE_SHOW_MD_IF_ENC + " " + linkNum).size() - 1; i > 0; i--) { String removedValue = tagMap .get(TagNames.TRANCHE_SHOW_MD_IF_ENC + " " + linkNum).remove(i); removedTag(entry.getId(), hash, TagNames.TRANCHE_SHOW_MD_IF_ENC + " " + linkNum, removedValue, false); } } // check if it needs to be added to the meta data if (tagMap.get(TagNames.TRANCHE_SHOW_MD_IF_ENC + " " + linkNum).size() == 1 && tagMap.get(TagNames.TRANCHE_SHOW_MD_IF_ENC + " " + linkNum).get(0) .toLowerCase() .equals(MetaDataAnnotation.SHARE_META_DATA_IF_ENCRYPTED_ANNOTATION .getValue().toLowerCase())) { if (!tagMap.get(TagNames.TRANCHE_SHOW_MD_IF_ENC + " " + linkNum).get(0) .toLowerCase() .equals(MetaDataAnnotation.SHARE_META_DATA_IF_ENCRYPTED_ANNOTATION .getValue().toLowerCase())) { // remove the tag - it's meaningless String removedValue = tagMap .get(TagNames.TRANCHE_SHOW_MD_IF_ENC + " " + linkNum).remove(0); removedTag(entry.getId(), hash, TagNames.TRANCHE_SHOW_MD_IF_ENC + " " + linkNum, removedValue, false); } else { // make sure the value is in the meta data boolean inMetaData = false; for (MetaDataAnnotation mda : md.getAnnotations()) { if (mda.getName().equals( MetaDataAnnotation.SHARE_META_DATA_IF_ENCRYPTED_ANNOTATION .getName()) && mda.getValue().toLowerCase().equals( MetaDataAnnotation.SHARE_META_DATA_IF_ENCRYPTED_ANNOTATION .getValue().toLowerCase())) { inMetaData = true; } } // add to the meta data? if (!inMetaData) { md.addAnnotation( MetaDataAnnotation.SHARE_META_DATA_IF_ENCRYPTED_ANNOTATION); metaDataChanged = true; addedTag(entry.getId(), hash, MetaDataAnnotation.SHARE_META_DATA_IF_ENCRYPTED_ANNOTATION .getName(), MetaDataAnnotation.SHARE_META_DATA_IF_ENCRYPTED_ANNOTATION .getValue(), true); } } } } // set the signatures { String signatures = ""; for (Signature signature : md.getSignatures()) { String signatureStr = signature.getCert().getSubjectDN().getName() .split("CN=")[1].split(",")[0]; // skip adding if already exists if (tagMap.get(TagNames.TRANCHE_SIGNATURES + " " + linkNum) != null) { if (tagMap.get(TagNames.TRANCHE_SIGNATURES + " " + linkNum).get(0) .contains(signatureStr)) { continue; } } signatures = signatures + signatureStr + ", "; } if (signatures.length() != 0) { signatures = signatures.substring(0, signatures.length() - 2); if (tagMap.get(TagNames.TRANCHE_SIGNATURES + " " + linkNum) == null) { tagMap.put(TagNames.TRANCHE_SIGNATURES + " " + linkNum, new ArrayList<String>()); tagMap.get(TagNames.TRANCHE_SIGNATURES + " " + linkNum).add(signatures); addedTag(entry.getId(), hash, TagNames.TRANCHE_SIGNATURES + " " + linkNum, signatures, false); } else if (!tagMap.get(TagNames.TRANCHE_SIGNATURES + " " + linkNum).get(0) .equals(signatures)) { editedTag(entry.getId(), hash, TagNames.TRANCHE_SIGNATURES + " " + linkNum, tagMap.get(TagNames.TRANCHE_SIGNATURES + " " + linkNum).get(0), signatures, false); tagMap.get(TagNames.TRANCHE_SIGNATURES + " " + linkNum).clear(); tagMap.get(TagNames.TRANCHE_SIGNATURES + " " + linkNum).add(signatures); } } } // set the timestamps if (tagMap.get(TagNames.TIMESTAMP) == null) { tagMap.put(TagNames.TIMESTAMP, new ArrayList<String>()); tagMap.get(TagNames.TIMESTAMP).add(String.valueOf(md.getTimestamp())); addedTag(entry.getId(), hash, TagNames.TIMESTAMP, String.valueOf(md.getTimestamp()), false); } if (tagMap.get(TagNames.TRANCHE_TIMESTAMP + " " + linkNum) == null) { tagMap.put(TagNames.TRANCHE_TIMESTAMP + " " + linkNum, new ArrayList<String>()); tagMap.get(TagNames.TRANCHE_TIMESTAMP + " " + linkNum) .add(String.valueOf(md.getTimestamp())); addedTag(entry.getId(), hash, TagNames.TRANCHE_TIMESTAMP + " " + linkNum, String.valueOf(md.getTimestamp()), false); } else if (!tagMap.get(TagNames.TRANCHE_TIMESTAMP + " " + linkNum).get(0) .equals(String.valueOf(md.getTimestamp()))) { editedTag(entry.getId(), hash, TagNames.TRANCHE_TIMESTAMP + " " + linkNum, tagMap.get(TagNames.TRANCHE_TIMESTAMP + " " + linkNum).get(0), String.valueOf(md.getTimestamp()), false); tagMap.get(TagNames.TRANCHE_TIMESTAMP + " " + linkNum).clear(); tagMap.get(TagNames.TRANCHE_TIMESTAMP + " " + linkNum) .add(String.valueOf(md.getTimestamp())); } // date uploaded String dateUploaded = makeDate(md.getTimestamp()); // if there were no tags, add this date uploaded if (tagMap.get(TagNames.TRANCHE_DATE_UPLOADED + " " + linkNum) == null || tagMap.get(TagNames.TRANCHE_DATE_UPLOADED + " " + linkNum).size() == 0) { tagMap.put(TagNames.TRANCHE_DATE_UPLOADED + " " + linkNum, new ArrayList<String>()); tagMap.get(TagNames.TRANCHE_DATE_UPLOADED + " " + linkNum).add(dateUploaded); addedTag(entry.getId(), hash, TagNames.TRANCHE_DATE_UPLOADED + " " + linkNum, dateUploaded, false); } // if the first date uploaded did not match, set this one as the only one else if (!tagMap.get(TagNames.TRANCHE_DATE_UPLOADED + " " + linkNum).get(0) .equals(dateUploaded)) { editedTag(entry.getId(), hash, TagNames.TRANCHE_DATE_UPLOADED + " " + linkNum, tagMap.get(TagNames.TRANCHE_DATE_UPLOADED + " " + linkNum).get(0), dateUploaded, false); tagMap.get(TagNames.TRANCHE_DATE_UPLOADED + " " + linkNum).clear(); tagMap.get(TagNames.TRANCHE_DATE_UPLOADED + " " + linkNum).add(dateUploaded); } // if there are more than one date uploaded tags, delete all but the first while (tagMap.get(TagNames.TRANCHE_DATE_UPLOADED + " " + linkNum).size() > 1) { String toDelete = tagMap.get(TagNames.TRANCHE_DATE_UPLOADED + " " + linkNum) .remove(1); removedTag(entry.getId(), hash, TagNames.TRANCHE_DATE_UPLOADED + " " + linkNum, toDelete, false); } if (md.isDeleted()) { MetaDataAnnotation deletedAnnotation = null; for (MetaDataAnnotation mda : md.getAnnotations()) { if (mda.getName().equals(MetaDataAnnotation.PROP_DELETED)) { try { if (deletedAnnotation == null || Long.valueOf(deletedAnnotation.getValue()) < Long .valueOf(mda.getValue())) { deletedAnnotation = mda; } } catch (Exception e) { } } } if (deletedAnnotation != null) { if (tagMap.get(TagNames.TRANCHE_DELETED + " " + linkNum) == null) { tagMap.put(TagNames.TRANCHE_DELETED + " " + linkNum, new ArrayList<String>()); tagMap.get(TagNames.TRANCHE_DELETED + " " + linkNum) .add(deletedAnnotation.getValue()); addedTag(entry.getId(), hash, TagNames.TRANCHE_DELETED + " " + linkNum, deletedAnnotation.getValue(), false); } else { String highestValue = null; for (String value : tagMap.get(TagNames.TRANCHE_DELETED + " " + linkNum)) { try { if (highestValue == null || Long.valueOf(value) > Long.valueOf(highestValue)) { highestValue = value; } } catch (Exception e) { } } if (highestValue != null && Long.valueOf(highestValue) < Long .valueOf(deletedAnnotation.getValue())) { tagMap.get(TagNames.TRANCHE_DELETED + " " + linkNum) .add(deletedAnnotation.getValue()); addedTag(entry.getId(), hash, TagNames.TRANCHE_DELETED + " " + linkNum, deletedAnnotation.getValue(), false); } // date deleted String dateDeleted = makeDate(Long.valueOf(highestValue)); if (tagMap.get(TagNames.TRANCHE_DATE_DELETED + " " + linkNum) == null) { tagMap.put(TagNames.TRANCHE_DATE_DELETED + " " + linkNum, new ArrayList<String>()); tagMap.get(TagNames.TRANCHE_DATE_DELETED + " " + linkNum) .add(dateDeleted); addedTag(entry.getId(), hash, TagNames.TRANCHE_DATE_DELETED + " " + linkNum, dateDeleted, false); } else if (!tagMap.get(TagNames.TRANCHE_DATE_DELETED + " " + linkNum).get(0) .equals(dateDeleted)) { editedTag(entry.getId(), hash, TagNames.TRANCHE_DATE_DELETED + " " + linkNum, tagMap.get(TagNames.TRANCHE_DATE_DELETED + " " + linkNum) .get(0), dateDeleted, false); tagMap.get(TagNames.TRANCHE_DATE_DELETED + " " + linkNum).clear(); tagMap.get(TagNames.TRANCHE_DATE_DELETED + " " + linkNum) .add(dateDeleted); } } } } else { if (tagMap.get(TagNames.TRANCHE_DELETED + " " + linkNum) != null) { String highestValue = "0"; for (String value : tagMap.get(TagNames.TRANCHE_DELETED + " " + linkNum)) { if (highestValue == null || Long.valueOf(value) > Long.valueOf(highestValue)) { highestValue = value; } } if (!highestValue.equals("0")) { MetaDataAnnotation mda = new MetaDataAnnotation( MetaDataAnnotation.PROP_DELETED, highestValue); md.addAnnotation(mda); metaDataChanged = true; addedTag(entry.getId(), hash, MetaDataAnnotation.PROP_DELETED, highestValue, true); } } } if (md.isEncrypted()) { // if there are no encrypted tags, add "True" if (tagMap.get(TagNames.TRANCHE_ENCRYPTED + " " + linkNum) == null || tagMap.get(TagNames.TRANCHE_ENCRYPTED + " " + linkNum).size() == 0) { tagMap.put(TagNames.TRANCHE_ENCRYPTED + " " + linkNum, new ArrayList<String>()); tagMap.get(TagNames.TRANCHE_ENCRYPTED + " " + linkNum).add("True"); addedTag(entry.getId(), hash, TagNames.TRANCHE_ENCRYPTED + " " + linkNum, "True", false); } // otherwise if the first is not "True", change it else if (!tagMap.get(TagNames.TRANCHE_ENCRYPTED + " " + linkNum).get(0) .equals("True")) { editedTag(entry.getId(), hash, TagNames.TRANCHE_ENCRYPTED + " " + linkNum, tagMap.get(TagNames.TRANCHE_ENCRYPTED + " " + linkNum).get(0), "True", false); tagMap.get(TagNames.TRANCHE_ENCRYPTED + " " + linkNum).clear(); tagMap.get(TagNames.TRANCHE_ENCRYPTED + " " + linkNum).add("True"); } // if there's no passphrase tag yet, check the meta data if (tagMap.get(TagNames.TRANCHE_PASSPHRASE + " " + linkNum) == null || tagMap.get(TagNames.TRANCHE_PASSPHRASE + " " + linkNum).size() == 0) { // is there a public passphrase? String publicPassphrase = md.getPublicPassphrase(); // no public passphrase in this meta data? if (publicPassphrase == null) { // check for meta data on every server to see if there is a public passphrase set there // possible a lost meta data was recovered that was not available during publishing for (String url : servers) { TrancheServer ts = null; try { ts = IOUtil.connect(url); // get the meta data from the server MetaData mdForPassphrase = MetaDataUtil .read(new ByteArrayInputStream(ts.getMetaData(hash))); // does this md have a passphrase? String passphraseInMD = mdForPassphrase.getPublicPassphrase(); // got one! break the loop if (passphraseInMD != null) { publicPassphrase = passphraseInMD; break; } } catch (Exception e) { e.printStackTrace(); } finally { IOUtil.safeClose(ts); } } } // if still nothing, try to get the passphrase from the passphrases database if (publicPassphrase == null) { // need to go to the private passphrases to download the project file if (Passphrases.getPassphrase(hash) != null) { gft.setPassphrase(Passphrases.getPassphrase(hash)); log.println("Set passphrase from private passphrase db"); } else { log.println( "No public or private passphrase found - cannot get project information."); } } else { log.println("Public passphrase set from the meta data."); tagMap.put(TagNames.TRANCHE_PASSPHRASE + " " + linkNum, new ArrayList<String>()); tagMap.get(TagNames.TRANCHE_PASSPHRASE + " " + linkNum) .add(publicPassphrase); addedTag(entry.getId(), hash, TagNames.TRANCHE_PASSPHRASE + " " + linkNum, publicPassphrase, false); } } // the passphrase could have been set if it was found in the meta data if (tagMap.get(TagNames.TRANCHE_PASSPHRASE + " " + linkNum) != null && !tagMap.get(TagNames.TRANCHE_PASSPHRASE + " " + linkNum).isEmpty()) { // remove all but the one of the passphrases - should never be more than one if (tagMap.get(TagNames.TRANCHE_PASSPHRASE + " " + linkNum).size() > 1) { log.println( "More than one passphrase tag found - removing all but the first."); // delete all but the first while (tagMap.get(TagNames.TRANCHE_PASSPHRASE + " " + linkNum).size() > 1) { String toDelete = tagMap .get(TagNames.TRANCHE_PASSPHRASE + " " + linkNum).remove(1); removedTag(entry.getId(), hash, TagNames.TRANCHE_PASSPHRASE + " " + linkNum, toDelete, false); } } // determine which public passphrase to use String publicPassphrase = null; // if the published passphrase and the tags passphrase are different, use the most recent if (md.isPublicPassphraseSet() && !tagMap.get(TagNames.TRANCHE_PASSPHRASE + " " + linkNum).get(0) .equals(md.getPublicPassphrase())) { // get the date from the tags String dateTags = null; if (tagMap.get(TagNames.TRANCHE_DATE_PUBLISHED + " " + linkNum) != null && !tagMap.get(TagNames.TRANCHE_DATE_PUBLISHED + " " + linkNum) .isEmpty()) { dateTags = tagMap.get(TagNames.TRANCHE_DATE_PUBLISHED + " " + linkNum) .get(0); } // get the date from the meta data String dateMetaData = null; for (MetaDataAnnotation mda : md.getAnnotations()) { try { if (mda.getName().equals(mda.PROP_PUBLISHED_TIMESTAMP)) { dateMetaData = makeDate(Long.valueOf(mda.getValue())); } } catch (Exception e) { } } boolean useTags = true; if (dateMetaData != null && dateTags == null) { useTags = false; } else if (dateMetaData != null && dateTags != null) { if (dateMetaData.compareTo(dateTags) >= 0) { useTags = false; } } if (useTags) { publicPassphrase = tagMap .get(TagNames.TRANCHE_PASSPHRASE + " " + linkNum).get(0); } else { // change the tags to be the same as the md public passphrase String toDelete = tagMap .get(TagNames.TRANCHE_PASSPHRASE + " " + linkNum).remove(0); removedTag(entry.getId(), hash, TagNames.TRANCHE_PASSPHRASE + " " + linkNum, toDelete, false); tagMap.get(TagNames.TRANCHE_PASSPHRASE + " " + linkNum) .add(md.getPublicPassphrase()); addedTag(entry.getId(), hash, TagNames.TRANCHE_PASSPHRASE + " " + linkNum, md.getPublicPassphrase(), false); publicPassphrase = md.getPublicPassphrase(); } } else { publicPassphrase = tagMap.get(TagNames.TRANCHE_PASSPHRASE + " " + linkNum) .get(0); } if (publicPassphrase != null) { // just go ahead and publish the passphras to all meta data no matter what // in the future, need to check all meta data to see if it's necessary to republish the passphrase // set the gft passphrase gft.setPassphrase(publicPassphrase); // set the passphrase in the meta data md.setPublicPassphrase(publicPassphrase); metaDataChanged = true; } } // how many published passphrase annotations are there? int publishedMetaDataAnnotationCount = 0; for (MetaDataAnnotation mda : md.getAnnotations()) { if (mda.getName().equals(MetaDataAnnotation.PROP_PUBLISHED_TIMESTAMP)) { publishedMetaDataAnnotationCount++; } } // if need to determine date published if (md.getPublicPassphrase() == null) { // make sure there are no published meta annotations while (publishedMetaDataAnnotationCount > 0) { for (MetaDataAnnotation mda : md.getAnnotations()) { if (mda.getName().equals(MetaDataAnnotation.PROP_PUBLISHED_TIMESTAMP)) { removedTag(entry.getId(), hash, MetaDataAnnotation.PROP_PUBLISHED_TIMESTAMP, mda.getValue(), true); metaDataChanged = true; md.getAnnotations().remove(mda); publishedMetaDataAnnotationCount--; break; } } } // set the published tag to "Unknown" if (tagMap.get(TagNames.TRANCHE_DATE_PUBLISHED + " " + linkNum) == null || tagMap.get(TagNames.TRANCHE_DATE_PUBLISHED + " " + linkNum) .size() == 0) { tagMap.put(TagNames.TRANCHE_DATE_PUBLISHED + " " + linkNum, new ArrayList<String>()); tagMap.get(TagNames.TRANCHE_DATE_PUBLISHED + " " + linkNum).add("Unknown"); addedTag(entry.getId(), hash, TagNames.TRANCHE_DATE_PUBLISHED + " " + linkNum, "Unknown", false); } else if (!tagMap.get(TagNames.TRANCHE_DATE_PUBLISHED + " " + linkNum).get(0) .equals("Unknown")) { editedTag(entry.getId(), hash, TagNames.TRANCHE_DATE_PUBLISHED + " " + linkNum, tagMap.get(TagNames.TRANCHE_DATE_PUBLISHED + " " + linkNum).get(0), "Unknown", false); tagMap.get(TagNames.TRANCHE_DATE_PUBLISHED + " " + linkNum).clear(); tagMap.get(TagNames.TRANCHE_DATE_PUBLISHED + " " + linkNum).add("Unknown"); } } else { // are there no published annotations in the md? very odd if (publishedMetaDataAnnotationCount == 0) { // add a published meta data annotation with the current timestamp String timestampStr = String.valueOf(System.currentTimeMillis()); addedTag(entry.getId(), hash, MetaDataAnnotation.PROP_PUBLISHED_TIMESTAMP, timestampStr, true); metaDataChanged = true; md.getAnnotations().add(new MetaDataAnnotation( MetaDataAnnotation.PROP_PUBLISHED_TIMESTAMP, timestampStr)); publishedMetaDataAnnotationCount++; } else { // remove all but one of the published timestamps while (publishedMetaDataAnnotationCount > 1) { for (MetaDataAnnotation mda : md.getAnnotations()) { if (mda.getName() .equals(MetaDataAnnotation.PROP_PUBLISHED_TIMESTAMP)) { removedTag(entry.getId(), hash, MetaDataAnnotation.PROP_PUBLISHED_TIMESTAMP, mda.getValue(), true); metaDataChanged = true; md.getAnnotations().remove(mda); publishedMetaDataAnnotationCount--; break; } } } // get the date published String datePublished = null; for (MetaDataAnnotation mda : md.getAnnotations()) { if (mda.getName().equals(MetaDataAnnotation.PROP_PUBLISHED_TIMESTAMP)) { // exception thrown if value is not a timestamp try { datePublished = makeDate(Long.valueOf(mda.getValue())); } catch (Exception e) { // delete this annotation removedTag(entry.getId(), hash, MetaDataAnnotation.PROP_PUBLISHED_TIMESTAMP, mda.getValue(), true); metaDataChanged = true; md.getAnnotations().remove(mda); publishedMetaDataAnnotationCount--; } finally { break; } } } // if the date published was bad - had to remove it, so put in a new one if (datePublished == null) { // set the new date published to now long timestamp = System.currentTimeMillis(); String timestampStr = String.valueOf(timestamp); datePublished = makeDate(timestamp); // add the date published annotation to the meta data if (publishedMetaDataAnnotationCount == 0) { // add a meta data annotation addedTag(entry.getId(), hash, MetaDataAnnotation.PROP_PUBLISHED_TIMESTAMP, datePublished, true); metaDataChanged = true; md.getAnnotations() .add(new MetaDataAnnotation( MetaDataAnnotation.PROP_PUBLISHED_TIMESTAMP, datePublished)); publishedMetaDataAnnotationCount++; } } // if no date published tag, add it if (tagMap.get(TagNames.TRANCHE_DATE_PUBLISHED + " " + linkNum) == null || tagMap.get(TagNames.TRANCHE_DATE_PUBLISHED + " " + linkNum) .size() == 0) { tagMap.put(TagNames.TRANCHE_DATE_PUBLISHED + " " + linkNum, new ArrayList<String>()); tagMap.get(TagNames.TRANCHE_DATE_PUBLISHED + " " + linkNum) .add(datePublished); addedTag(entry.getId(), hash, TagNames.TRANCHE_DATE_PUBLISHED + " " + linkNum, datePublished, false); } // if our date published annotation is wrong, reset it else if (!tagMap.get(TagNames.TRANCHE_DATE_PUBLISHED + " " + linkNum).get(0) .equals(datePublished)) { editedTag(entry.getId(), hash, TagNames.TRANCHE_DATE_PUBLISHED + " " + linkNum, tagMap.get(TagNames.TRANCHE_DATE_PUBLISHED + " " + linkNum) .get(0), datePublished, false); tagMap.get(TagNames.TRANCHE_DATE_PUBLISHED + " " + linkNum).clear(); tagMap.get(TagNames.TRANCHE_DATE_PUBLISHED + " " + linkNum) .add(datePublished); } } } } else { // if there are no encrypted tags, add "False" if (tagMap.get(TagNames.TRANCHE_ENCRYPTED + " " + linkNum) == null || tagMap.get(TagNames.TRANCHE_ENCRYPTED + " " + linkNum).size() == 0) { tagMap.put(TagNames.TRANCHE_ENCRYPTED + " " + linkNum, new ArrayList<String>()); tagMap.get(TagNames.TRANCHE_ENCRYPTED + " " + linkNum).add("False"); addedTag(entry.getId(), hash, TagNames.TRANCHE_ENCRYPTED + " " + linkNum, "False", false); } // otherwise if the first is not "False", change it else if (!tagMap.get(TagNames.TRANCHE_ENCRYPTED + " " + linkNum).get(0) .equals("False")) { editedTag(entry.getId(), hash, TagNames.TRANCHE_ENCRYPTED + " " + linkNum, tagMap.get(TagNames.TRANCHE_ENCRYPTED + " " + linkNum).get(0), "False", false); tagMap.get(TagNames.TRANCHE_ENCRYPTED + " " + linkNum).clear(); tagMap.get(TagNames.TRANCHE_ENCRYPTED + " " + linkNum).add("False"); } // remove any passphrase tags if they exist if (tagMap.get(TagNames.TRANCHE_PASSPHRASE + " " + linkNum) != null) { for (String toDelete : tagMap .get(TagNames.TRANCHE_PASSPHRASE + " " + linkNum)) { if (tagMap.get(TagNames.TRANCHE_PASSPHRASE + " " + linkNum) .remove(toDelete)) { removedTag(entry.getId(), hash, TagNames.TRANCHE_PASSPHRASE + " " + linkNum, toDelete, false); } } // remove the arraylist so there is no confusion tagMap.remove(TagNames.TRANCHE_PASSPHRASE + " " + linkNum); } // the date published equals to the meta data timestamp // if there were no tags, add this date uploaded as the date published if (tagMap.get(TagNames.TRANCHE_DATE_PUBLISHED + " " + linkNum) == null || tagMap .get(TagNames.TRANCHE_DATE_PUBLISHED + " " + linkNum).size() == 0) { tagMap.put(TagNames.TRANCHE_DATE_PUBLISHED + " " + linkNum, new ArrayList<String>()); tagMap.get(TagNames.TRANCHE_DATE_PUBLISHED + " " + linkNum).add(dateUploaded); addedTag(entry.getId(), hash, TagNames.TRANCHE_DATE_PUBLISHED + " " + linkNum, dateUploaded, false); } // if the first date published did not match the date uploaded, set this one as the only one else if (!tagMap.get(TagNames.TRANCHE_DATE_PUBLISHED + " " + linkNum).get(0) .equals(dateUploaded)) { editedTag(entry.getId(), hash, TagNames.TRANCHE_DATE_PUBLISHED + " " + linkNum, tagMap.get(TagNames.TRANCHE_DATE_PUBLISHED + " " + linkNum).get(0), dateUploaded, false); tagMap.get(TagNames.TRANCHE_DATE_PUBLISHED + " " + linkNum).clear(); tagMap.get(TagNames.TRANCHE_DATE_PUBLISHED + " " + linkNum).add(dateUploaded); } } // if there are more than one encrypted tags, delete all but the first if (tagMap.get(TagNames.TRANCHE_ENCRYPTED + " " + linkNum) != null) { while (tagMap.get(TagNames.TRANCHE_ENCRYPTED + " " + linkNum).size() > 1) { String toDelete = tagMap.get(TagNames.TRANCHE_ENCRYPTED + " " + linkNum) .remove(1); removedTag(entry.getId(), hash, TagNames.TRANCHE_ENCRYPTED + " " + linkNum, toDelete, false); } } // if there are more than one date published tags, delete all but the first if (tagMap.get(TagNames.TRANCHE_DATE_PUBLISHED + " " + linkNum) != null) { while (tagMap.get(TagNames.TRANCHE_DATE_PUBLISHED + " " + linkNum).size() > 1) { String toDelete = tagMap.get(TagNames.TRANCHE_DATE_PUBLISHED + " " + linkNum) .remove(1); removedTag(entry.getId(), hash, TagNames.TRANCHE_DATE_PUBLISHED + " " + linkNum, toDelete, false); } } // get the project file if (pf == null && md.isProjectFile()) { try { if (md != null && md.isProjectFile()) { // increment the chunk meta count by the size of the project file chunkAndMetaCount += md.getParts().size(); File tempFile = TempFileUtil.createTemporaryFile(); try { // catch invalid downloads, throw otherwise try { gft.getFile(tempFile); } catch (CantVerifySignatureException e) { addInvalid(entry.getId(), hash.toString(), tagMap, linkNum, e); log.println("ERROR: Downloaded project file is invalid."); } catch (Exception e) { if (e.getMessage() != null && (e.getMessage().toLowerCase() .contains("invalid") || e.getMessage().toLowerCase().contains("validate") || e.getMessage().toLowerCase().contains( "Decoded file does not match the expected file!"))) { addInvalid(entry.getId(), hash.toString(), tagMap, linkNum, e); log.println("ERROR: Project file invalid."); } else { err.println(e.getMessage()); throw e; } } // treat it as if it is a project file FileInputStream fis = null; BufferedInputStream bis = null; try { fis = new FileInputStream(tempFile); bis = new BufferedInputStream(fis); pf = ProjectFileUtil.read(bis); } catch (Exception e) { log.println("ERROR: Project file invalid."); addInvalid(entry.getId(), hash.toString(), tagMap, linkNum, e); bis.close(); fis.close(); } } finally { try { tempFile.delete(); } catch (Exception e) { err.println(e.getMessage()); } } } } catch (Exception e) { log.println("ERROR: Could not get project file"); err.println(e.getMessage()); } } if (pf != null && md.isProjectFile()) { // go through all of the files getting the file type and size try { for (ProjectFilePart pfp : pf.getParts()) { try { // update the number of meta data and chunks count // meta data chunkAndMetaCount++; // # chunks = ceiling of the size of the file divided by one MB chunkAndMetaCount += Math .ceil(Double.valueOf(pfp.getPaddingAdjustedLength()) / Double.valueOf(DataBlockUtil.ONE_MB)); // read the name and the size String name = pfp.getRelativeName().trim().toLowerCase(); if (name.contains("/")) { name = name.substring(name.lastIndexOf('/') + 1); } if (!name.contains(".")) { continue; } long size = pfp.getPaddingAdjustedLength(); // parse the file type(s) //while (name.contains(".")) { name = name.substring(name.lastIndexOf(".") + 1); // create the keys if there are none if (!numFilesFileTypeMap.containsKey(name)) { numFilesFileTypeMap.put(name, BigInteger.ZERO); } if (!sizeFileTypeMap.containsKey(name)) { sizeFileTypeMap.put(name, BigInteger.ZERO); } // increment the values appropriately numFilesFileTypeMap.put(name, numFilesFileTypeMap.get(name).add(BigInteger.ONE)); sizeFileTypeMap.put(name, sizeFileTypeMap.get(name).add(BigInteger.valueOf(size))); // } } catch (Exception e) { log.println("ERROR: Problem reading a file's information."); err.println(e.getMessage()); } } printFileTypeLog(); } catch (Exception e) { log.println("ERROR: Problem reading file type information."); err.println(e.getMessage()); } // set the files no matter what if (tagMap.get(TagNames.TRANCHE_FILES + " " + linkNum) == null) { tagMap.put(TagNames.TRANCHE_FILES + " " + linkNum, new ArrayList<String>()); tagMap.get(TagNames.TRANCHE_FILES + " " + linkNum) .add(String.valueOf(pf.getParts().size())); addedTag(entry.getId(), hash, TagNames.TRANCHE_FILES + " " + linkNum, String.valueOf(pf.getParts().size()), false); } else if (!tagMap.get(TagNames.TRANCHE_FILES + " " + linkNum).get(0) .equals(String.valueOf(pf.getParts().size()))) { editedTag(entry.getId(), hash, TagNames.TRANCHE_FILES + " " + linkNum, tagMap.get(TagNames.TRANCHE_FILES + " " + linkNum).get(0), String.valueOf(pf.getParts().size()), false); tagMap.get(TagNames.TRANCHE_FILES + " " + linkNum).clear(); tagMap.get(TagNames.TRANCHE_FILES + " " + linkNum) .add(String.valueOf(pf.getParts().size())); } // set the size no matter what if (tagMap.get(TagNames.TRANCHE_SIZE + " " + linkNum) == null) { tagMap.put(TagNames.TRANCHE_SIZE + " " + linkNum, new ArrayList<String>()); tagMap.get(TagNames.TRANCHE_SIZE + " " + linkNum).add(pf.getSize().toString()); addedTag(entry.getId(), hash, TagNames.TRANCHE_SIZE + " " + linkNum, pf.getSize().toString(), false); } else if (!tagMap.get(TagNames.TRANCHE_SIZE + " " + linkNum).get(0) .equals(pf.getSize().toString())) { editedTag(entry.getId(), hash, TagNames.TRANCHE_SIZE + " " + linkNum, tagMap.get(TagNames.TRANCHE_SIZE + " " + linkNum).get(0), pf.getSize().toString(), false); tagMap.get(TagNames.TRANCHE_SIZE + " " + linkNum).clear(); tagMap.get(TagNames.TRANCHE_SIZE + " " + linkNum).add(pf.getSize().toString()); } // title if (tagMap.get(TagNames.TITLE) == null && pf.getName() != null) { tagMap.put(TagNames.TITLE, new ArrayList<String>()); tagMap.get(TagNames.TITLE).add(pf.getName()); addedTag(entry.getId(), hash, TagNames.TITLE, pf.getName(), false); } // tranche link name // only have a tranche link name if it is different than the title if (tagMap.get(TagNames.TRANCHE_LINK_NAME + " " + linkNum) != null && pf.getName() != null && tagMap.get(TagNames.TRANCHE_LINK_NAME + " " + linkNum).get(0) .equals(tagMap.get(TagNames.TITLE).get(0))) { for (String value : tagMap.get(TagNames.TRANCHE_LINK_NAME + " " + linkNum)) { removedTag(entry.getId(), hash, TagNames.TRANCHE_LINK_NAME + " " + linkNum, value, false); } tagMap.remove(TagNames.TRANCHE_LINK_NAME + " " + linkNum); } // description if (tagMap.get(TagNames.DESCRIPTION) == null && pf.getDescription() != null) { tagMap.put(TagNames.DESCRIPTION, new ArrayList<String>()); tagMap.get(TagNames.DESCRIPTION).add(pf.getDescription()); addedTag(entry.getId(), hash, TagNames.DESCRIPTION, pf.getDescription(), false); } // tranche description // only have a tranche link if it is different from the entry description if (tagMap.get(TagNames.TRANCHE_DESCRIPTION + " " + linkNum) != null && pf.getDescription() != null && tagMap.get(TagNames.TRANCHE_DESCRIPTION + " " + linkNum).get(0) .equals(tagMap.get(TagNames.DESCRIPTION).get(0))) { for (String value : tagMap.get(TagNames.TRANCHE_DESCRIPTION + " " + linkNum)) { removedTag(entry.getId(), hash, TagNames.TRANCHE_DESCRIPTION + " " + linkNum, value, false); } tagMap.remove(TagNames.TRANCHE_DESCRIPTION + " " + linkNum); } } if (!md.isProjectFile()) { // # chunks = ceiling of the size of the file divided by one MB chunkAndMetaCount += md.getParts().size(); // set the files no matter what if (tagMap.get(TagNames.TRANCHE_FILES + " " + linkNum) == null) { tagMap.put(TagNames.TRANCHE_FILES + " " + linkNum, new ArrayList<String>()); tagMap.get(TagNames.TRANCHE_FILES + " " + linkNum).add("1"); addedTag(entry.getId(), hash, TagNames.TRANCHE_FILES + " " + linkNum, "1", false); } else if (!tagMap.get(TagNames.TRANCHE_FILES + " " + linkNum).get(0).equals("1")) { editedTag(entry.getId(), hash, TagNames.TRANCHE_FILES + " " + linkNum, tagMap.get(TagNames.TRANCHE_FILES + " " + linkNum).get(0), "1", false); tagMap.get(TagNames.TRANCHE_FILES + " " + linkNum).clear(); tagMap.get(TagNames.TRANCHE_FILES + " " + linkNum).add("1"); } // set the size no matter what if (tagMap.get(TagNames.TRANCHE_SIZE + " " + linkNum) == null) { tagMap.put(TagNames.TRANCHE_SIZE + " " + linkNum, new ArrayList<String>()); tagMap.get(TagNames.TRANCHE_SIZE + " " + linkNum) .add(String.valueOf(hash.getLength())); addedTag(entry.getId(), hash, TagNames.TRANCHE_SIZE + " " + linkNum, String.valueOf(hash.getLength()), false); } else if (!tagMap.get(TagNames.TRANCHE_SIZE + " " + linkNum).get(0) .equals(String.valueOf(hash.getLength()))) { editedTag(entry.getId(), hash, TagNames.TRANCHE_SIZE + " " + linkNum, tagMap.get(TagNames.TRANCHE_SIZE + " " + linkNum).get(0), String.valueOf(hash.getLength()), false); tagMap.get(TagNames.TRANCHE_SIZE + " " + linkNum).clear(); tagMap.get(TagNames.TRANCHE_SIZE + " " + linkNum) .add(String.valueOf(hash.getLength())); } // add the title if (md.getName() != null) { if (tagMap.get(TagNames.TITLE) == null) { tagMap.put(TagNames.TITLE, new ArrayList<String>()); tagMap.get(TagNames.TITLE).add(md.getName()); addedTag(entry.getId(), hash, TagNames.TITLE, md.getName(), false); } // check if the tranche link name is different from the title if (tagMap.get(TagNames.TRANCHE_LINK_NAME + " " + linkNum) != null && tagMap.get(TagNames.TRANCHE_LINK_NAME + " " + linkNum).get(0) .equals(tagMap.get(TagNames.TITLE).get(0))) { for (String value : tagMap .get(TagNames.TRANCHE_LINK_NAME + " " + linkNum)) { removedTag(entry.getId(), hash, TagNames.TRANCHE_LINK_NAME + " " + linkNum, value, false); } tagMap.remove(TagNames.TRANCHE_LINK_NAME + " " + linkNum); } } } } // just go ahead and possibly overwrite all older values of TagNames.HAS_DATA if (tagMap.get(TagNames.HAS_DATA) == null) { tagMap.put(TagNames.HAS_DATA, new ArrayList<String>()); tagMap.get(TagNames.HAS_DATA).add("Yes"); addedTag(entry.getId(), hash, TagNames.HAS_DATA, "Yes", false); } else if (!tagMap.get(TagNames.HAS_DATA).get(0).equals("Yes")) { editedTag(entry.getId(), hash, TagNames.HAS_DATA, tagMap.get(TagNames.HAS_DATA).get(0), "Yes", false); tagMap.get(TagNames.HAS_DATA).clear(); tagMap.get(TagNames.HAS_DATA).add("Yes"); } if (tagMap.get(TagNames.TYPE) == null) { tagMap.put(TagNames.TYPE, new ArrayList<String>()); tagMap.get(TagNames.TYPE).add("Data"); addedTag(entry.getId(), hash, TagNames.TYPE, "Data", false); } if (metaDataChanged && makeChanges) { log.println("Publishing changes to the meta data."); // create the bytestream ByteArrayOutputStream baos = new ByteArrayOutputStream(); // turn the metaData into a byte stream MetaDataUtil.write(md, baos); for (String url : servers) { try { // connect TrancheServer ts = IOUtil.connect(url); try { if (ts.hasMetaData(hash)) { // upload the changes - try up to 3 times Exception ex = null; for (int i = 0; i < 3; i++) { try { IOUtil.setMetaData(ts, user.getCertificate(), user.getPrivateKey(), hash, baos.toByteArray()); log.println("Set meta data to " + url); ex = null; break; } catch (Exception e) { ex = e; continue; } } if (ex != null) { throw ex; } } } finally { IOUtil.safeClose(ts); } } catch (Exception e) { err.println(e.getMessage()); log.println("ERROR: Could not set meta data to " + url); } } log.println("Done publishing meta data"); } } catch (Exception e) { log.println("ERROR: A problem occurred while editing the entry"); err.println(e.getMessage()); } // update the database saveToDatabase(entry.getId(), tagMap); } } } catch (Exception e) { err.println(e.getMessage()); } log.println("Checking if there is any new data on the network."); try { for (BigHash hash : hashesOnNetwork) { // do not add this data to the tags if it already exists in there if (hashesInTags.contains(hash)) { continue; } // add this data to the tags db try { // reset the meta data and the project file MetaData md = null; ProjectFile pf = null; // set up the getfiletool GetFileTool gft = new GetFileTool(); gft.setValidate(validate); gft.setHash(hash); // get the meta data try { md = gft.getMetaData(); } catch (CantVerifySignatureException e) { addInvalid(-1, hash.toString(), "", "", "", "", e); log.println("ERROR: Downloaded meta data is invalid."); } catch (Exception e) { if (e.getMessage() != null && e.getMessage().toLowerCase().contains("can't find metadata")) { // note that this meta data is missing from the network addMissing(-1, hash.toString(), "", "", "", ""); log.println("ERROR: Could not get meta data."); continue; } else { addInvalid(-1, hash.toString(), "", "", "", "", e); log.println("ERROR: Downloaded meta data is invalid."); continue; } } // if the tag entry doesnt exists, create a new one long entryId = -1; if (makeChanges) { entryId = Database.createEntry(); } // add all of the info as tags if (makeChanges) { Database.addTag(entryId, TagNames.HAS_DATA, "Yes"); } addedTag(entryId, hash, TagNames.HAS_DATA, "Yes", false, false); if (makeChanges) { Database.addTag(entryId, TagNames.TYPE, "Data"); } addedTag(entryId, hash, TagNames.TYPE, "Data", false, false); if (makeChanges) { Database.addTag(entryId, TagNames.TRANCHE_LINK + " 1", hash.toString()); } addedTag(entryId, hash, TagNames.TRANCHE_LINK + " 1", hash.toString(), false, false); if (makeChanges) { Database.addTag(entryId, TagNames.TIMESTAMP, String.valueOf(md.getTimestamp())); } addedTag(entryId, hash, TagNames.TIMESTAMP, String.valueOf(md.getTimestamp()), false, false); if (makeChanges) { Database.addTag(entryId, TagNames.TRANCHE_TIMESTAMP + " 1", String.valueOf(md.getTimestamp())); } addedTag(entryId, hash, TagNames.TRANCHE_TIMESTAMP + " 1", String.valueOf(md.getTimestamp()), false, false); String datePublished = null; if (md != null) { // set the new/old version for (MetaDataAnnotation mda : md.getAnnotations()) { if (mda.getName().equals(MetaDataAnnotation.PROP_DELETE_NEW_VERSION)) { try { BigHash newVersionHash = BigHash.createHashFromString(mda.getValue()); if (makeChanges) { Database.addTag(entryId, TagNames.TRANCHE_DELETE_NEW_LINK + " 1", newVersionHash.toString()); } addedTag(entryId, hash, TagNames.TRANCHE_DELETE_NEW_LINK + " 1", newVersionHash.toString(), false, false); } catch (Exception e) { } } else if (mda.getName().equals(MetaDataAnnotation.PROP_DELETE_OLD_VERSION)) { try { BigHash oldVersionHash = BigHash.createHashFromString(mda.getValue()); if (makeChanges) { Database.addTag(entryId, TagNames.TRANCHE_DELETE_OLD_LINK + " 1", oldVersionHash.toString()); } addedTag(entryId, hash, TagNames.TRANCHE_DELETE_OLD_LINK + " 1", oldVersionHash.toString(), false, false); } catch (Exception e) { } } else if (mda.getName().equals(MetaDataAnnotation.PROP_UNDELETED)) { if (makeChanges) { Database.addTag(entryId, TagNames.TRANCHE_UNDELETED + " 1", mda.getValue()); } addedTag(entryId, hash, TagNames.TRANCHE_UNDELETED + " 1", mda.getValue(), false, false); } else if (mda.getName().equals(MetaDataAnnotation.PROP_NEW_VERSION)) { try { BigHash newVersionHash = BigHash.createHashFromString(mda.getValue()); if (makeChanges) { Database.addTag(entryId, TagNames.TRANCHE_NEW_LINK + " 1", newVersionHash.toString()); } addedTag(entryId, hash, TagNames.TRANCHE_NEW_LINK + " 1", newVersionHash.toString(), false, false); } catch (Exception e) { } } else if (mda.getName().equals(MetaDataAnnotation.PROP_OLD_VERSION)) { try { BigHash oldVersionHash = BigHash.createHashFromString(mda.getValue()); if (makeChanges) { Database.addTag(entryId, TagNames.TRANCHE_OLD_LINK + " 1", oldVersionHash.toString()); } addedTag(entryId, hash, TagNames.TRANCHE_OLD_LINK + " 1", oldVersionHash.toString(), false, false); } catch (Exception e) { } } else if (mda.getName().equals(MetaDataAnnotation.PROP_PUBLISHED_TIMESTAMP)) { try { // date published datePublished = makeDate(Long.valueOf(mda.getValue())); if (makeChanges) { Database.addTag(entryId, TagNames.TRANCHE_DATE_PUBLISHED + " 1", datePublished); } addedTag(entryId, hash, TagNames.TRANCHE_DATE_PUBLISHED + " 1", datePublished, false, false); } catch (Exception e) { } } } String dateUploaded = makeDate(md.getTimestamp()); if (makeChanges) { Database.addTag(entryId, TagNames.TRANCHE_DATE_UPLOADED + " 1", dateUploaded); } addedTag(entryId, hash, TagNames.TRANCHE_DATE_UPLOADED + " 1", dateUploaded, false, false); if (md.isEncrypted()) { if (makeChanges) { Database.addTag(entryId, TagNames.TRANCHE_ENCRYPTED + " 1", "True"); } addedTag(entryId, hash, TagNames.TRANCHE_ENCRYPTED + " 1", "True", false, false); if (makeChanges) { Database.addTag(entryId, TagNames.TRANCHE_DATE_PUBLISHED + " 1", "Unknown"); } addedTag(entryId, hash, TagNames.TRANCHE_DATE_PUBLISHED + " 1", "Unknown", false, false); try { String passphrase = Passphrases.getPassphrase(hash); if (passphrase != null) { gft.setPassphrase(passphrase); log.println("Set the passphrase from the private passphrase db."); } } catch (Exception e) { } } else { if (makeChanges) { Database.addTag(entryId, TagNames.TRANCHE_ENCRYPTED + " 1", "False"); } addedTag(entryId, hash, TagNames.TRANCHE_ENCRYPTED + " 1", "False", false, false); if (datePublished == null) { if (makeChanges) { Database.addTag(entryId, TagNames.TRANCHE_DATE_PUBLISHED + " 1", dateUploaded); } addedTag(entryId, hash, TagNames.TRANCHE_DATE_PUBLISHED + " 1", dateUploaded, false, false); } } if (md.isDeleted()) { MetaDataAnnotation deletedAnnotation = null; for (MetaDataAnnotation mda : md.getAnnotations()) { if (mda.getName().equals(MetaDataAnnotation.PROP_DELETED)) { try { if (deletedAnnotation == null || Long.valueOf(deletedAnnotation.getValue()) < Long .valueOf(mda.getValue())) { deletedAnnotation = mda; } } catch (Exception e) { } } } if (deletedAnnotation != null) { if (makeChanges) { Database.addTag(entryId, TagNames.TRANCHE_DELETED + " 1", deletedAnnotation.getValue()); } addedTag(entryId, hash, TagNames.TRANCHE_DELETED + " 1", deletedAnnotation.getValue(), false, false); // date deleted try { String dateDeleted = makeDate(Long.valueOf(deletedAnnotation.getValue())); if (makeChanges) { Database.addTag(entryId, TagNames.TRANCHE_DATE_DELETED + " 1", dateDeleted); } addedTag(entryId, hash, TagNames.TRANCHE_DATE_DELETED + " 1", dateDeleted, false, false); } catch (Exception e) { } } } String signatures = ""; for (Signature signature : md.getSignatures()) { String signatureStr = signature.getCert().getSubjectDN().getName().split("CN=")[1] .split(",")[0]; signatures = signatures + signatureStr + ", "; } if (signatures.length() != 0) { signatures = signatures.substring(0, signatures.length() - 2); if (makeChanges) { Database.addTag(entryId, TagNames.TRANCHE_SIGNATURES + " 1", signatures); } addedTag(entryId, hash, TagNames.TRANCHE_SIGNATURES + " 1", signatures, false, false); } if (md.isProjectFile()) { try { if (md != null && md.isProjectFile()) { File tempFile = TempFileUtil.createTemporaryFile(); try { // catch invalid downloads, throw otherwise try { gft.getFile(tempFile); } catch (CantVerifySignatureException e) { addInvalid(entryId, hash.toString(), "", "", "", "", e); log.println("ERROR: Downloaded project file is invalid."); } catch (Exception e) { if (e.getMessage() != null && (e.getMessage().toLowerCase() .contains("validate") || e.getMessage().toLowerCase().contains( "Decoded file does not match the expected file!"))) { addInvalid(entryId, hash.toString(), "", "", "", "", e); log.println("ERROR: Project file invalid."); } else { throw e; } } // treat it as if it is a project file FileInputStream fis = null; BufferedInputStream bis = null; try { fis = new FileInputStream(tempFile); bis = new BufferedInputStream(fis); pf = ProjectFileUtil.read(bis); } catch (Exception e) { log.println("ERROR: Project file invalid."); addInvalid(entryId, hash.toString(), "", "", "", "", e); bis.close(); fis.close(); } } finally { try { tempFile.delete(); } catch (Exception e) { err.println(e.getMessage()); } } } if (pf != null) { if (pf.getName() != null) { if (makeChanges) { Database.addTag(entryId, TagNames.TITLE, pf.getName()); } addedTag(entryId, hash, TagNames.TITLE, pf.getName(), false, false); } if (pf.getDescription() != null) { if (makeChanges) { Database.addTag(entryId, TagNames.DESCRIPTION, pf.getDescription()); } addedTag(entryId, hash, TagNames.DESCRIPTION, pf.getDescription(), false, false); } if (makeChanges) { Database.addTag(entryId, TagNames.TRANCHE_SIZE + " 1", pf.getSize().toString()); } addedTag(entryId, hash, TagNames.TRANCHE_SIZE + " 1", pf.getSize().toString(), false, false); if (makeChanges) { Database.addTag(entryId, TagNames.TRANCHE_FILES + " 1", String.valueOf(pf.getParts().size())); } addedTag(entryId, hash, TagNames.TRANCHE_FILES + " 1", String.valueOf(pf.getParts().size()), false, false); } } catch (Exception e) { log.println("ERROR: Could not load the project file."); err.println(e.getMessage()); } } else { if (makeChanges) { Database.addTag(entryId, TagNames.TITLE, md.getName()); } addedTag(entryId, hash, TagNames.TITLE, md.getName(), false, false); if (makeChanges) { Database.addTag(entryId, TagNames.TRANCHE_FILES + " 1", "1"); } addedTag(entryId, hash, TagNames.TRANCHE_FILES + " 1", "1", false, false); if (makeChanges) { Database.addTag(entryId, TagNames.TRANCHE_SIZE + " 1", String.valueOf(hash.getLength())); } addedTag(entryId, hash, TagNames.TRANCHE_SIZE + " 1", String.valueOf(hash.getLength()), false, false); } } addedEntries.add(entryId); hashesInTags.add(hash); } catch (Exception e) { log.println("ERROR: There was a problem adding the new entry"); err.println(e.getMessage()); } } } catch (Exception e) { log.println("ERROR: There was a problem checking for new data on the network."); err.println(e.getMessage()); } log.println("Finished updating the tags database"); }
From source file:com.amazonaws.mobileconnectors.cognitoidentityprovider.CognitoUser.java
/** * Creates response for the second step of the SRP authentication. * * @param challenge REQUIRED: {@link InitiateAuthResult} contains next * challenge.// w ww . j a v a 2 s . com * @param authenticationDetails REQUIRED: {@link AuthenticationDetails} user * authentication details. * @param authenticationHelper REQUIRED: Internal helper class for SRP * calculations. * @return {@link RespondToAuthChallengeRequest}. */ private RespondToAuthChallengeRequest userSrpAuthRequest(InitiateAuthResult challenge, AuthenticationDetails authenticationDetails, AuthenticationHelper authenticationHelper) { final String userIdForSRP = challenge.getChallengeParameters() .get(CognitoServiceConstants.CHLG_PARAM_USER_ID_FOR_SRP); this.usernameInternal = challenge.getChallengeParameters().get(CognitoServiceConstants.CHLG_PARAM_USERNAME); this.deviceKey = CognitoDeviceHelper.getDeviceKey(usernameInternal, pool.getUserPoolId(), context); secretHash = CognitoSecretHash.getSecretHash(usernameInternal, clientId, clientSecret); final BigInteger srpB = new BigInteger(challenge.getChallengeParameters().get("SRP_B"), 16); if (srpB.mod(AuthenticationHelper.N).equals(BigInteger.ZERO)) { throw new CognitoInternalErrorException("SRP error, B cannot be zero"); } final BigInteger salt = new BigInteger(challenge.getChallengeParameters().get("SALT"), 16); final byte[] key = authenticationHelper.getPasswordAuthenticationKey(userIdForSRP, authenticationDetails.getPassword(), srpB, salt); final Date timestamp = new Date(); byte[] hmac; String dateString; try { final Mac mac = Mac.getInstance("HmacSHA256"); final SecretKeySpec keySpec = new SecretKeySpec(key, "HmacSHA256"); mac.init(keySpec); mac.update(pool.getUserPoolId().split("_", 2)[1].getBytes(StringUtils.UTF8)); mac.update(userIdForSRP.getBytes(StringUtils.UTF8)); final byte[] secretBlock = Base64.decode(challenge.getChallengeParameters().get("SECRET_BLOCK")); mac.update(secretBlock); final SimpleDateFormat simpleDateFormat = new SimpleDateFormat("EEE MMM d HH:mm:ss z yyyy", Locale.US); simpleDateFormat.setTimeZone(TimeZone.getTimeZone("UTC")); dateString = simpleDateFormat.format(timestamp); final byte[] dateBytes = dateString.getBytes(StringUtils.UTF8); hmac = mac.doFinal(dateBytes); } catch (final Exception e) { throw new CognitoInternalErrorException("SRP error", e); } final Map<String, String> srpAuthResponses = new HashMap<String, String>(); srpAuthResponses.put(CognitoServiceConstants.CHLG_RESP_PASSWORD_CLAIM_SECRET_BLOCK, challenge.getChallengeParameters().get(CognitoServiceConstants.CHLG_PARAM_SECRET_BLOCK)); srpAuthResponses.put(CognitoServiceConstants.CHLG_RESP_PASSWORD_CLAIM_SIGNATURE, new String(Base64.encode(hmac), StringUtils.UTF8)); srpAuthResponses.put(CognitoServiceConstants.CHLG_RESP_TIMESTAMP, dateString); srpAuthResponses.put(CognitoServiceConstants.CHLG_RESP_USERNAME, usernameInternal); srpAuthResponses.put(CognitoServiceConstants.CHLG_RESP_DEVICE_KEY, deviceKey); srpAuthResponses.put(CognitoServiceConstants.CHLG_RESP_SECRET_HASH, secretHash); final RespondToAuthChallengeRequest authChallengeRequest = new RespondToAuthChallengeRequest(); authChallengeRequest.setChallengeName(challenge.getChallengeName()); authChallengeRequest.setClientId(clientId); authChallengeRequest.setSession(challenge.getSession()); authChallengeRequest.setChallengeResponses(srpAuthResponses); return authChallengeRequest; }
From source file:com.ephesoft.dcma.batch.service.BatchSchemaServiceImpl.java
/** * Method extracted to be reused for Ephesoft Web Services. * /*from ww w . j a v a 2s . c o m*/ * @param actualFolderLocation * @param outputFilePath * @param pageID * @param pathOfHOCRFile * @param hocrPage * @return FileInputStream * @throws IOException * @throws TransformerException * @throws XPathExpressionException * @throws SAXException * @throws ParserConfigurationException */ private FileInputStream hocrGenerationInternal(final String actualFolderLocation, final String outputFilePath, final String pageID, final String pathOfHOCRFile, final HocrPage hocrPage) throws XPathExpressionException, TransformerException, IOException, ParserConfigurationException, SAXException { XMLUtil.htmlOutputStream(pathOfHOCRFile, outputFilePath); OCREngineUtil.formatHOCRForTesseract(outputFilePath, actualFolderLocation, pageID); final FileInputStream inputStream = new FileInputStream(outputFilePath); final org.w3c.dom.Document doc = XMLUtil.createDocumentFrom(inputStream); final NodeList titleNodeList = doc.getElementsByTagName(BatchConstants.TITLE); if (null != titleNodeList) { for (int index = 0; index < titleNodeList.getLength(); index++) { final Node node = titleNodeList.item(index); final NodeList childNodeList = node.getChildNodes(); final Node nodeChild = childNodeList.item(BatchConstants.ZERO); if (null != nodeChild) { final String value = nodeChild.getNodeValue(); if (value != null) { hocrPage.setTitle(value); break; } } } } final NodeList spanNodeList = doc.getElementsByTagName("span"); final Spans spans = new Spans(); hocrPage.setSpans(spans); final List<Span> spanList = spans.getSpan(); if (null != spanNodeList) { final StringBuilder hocrContent = new StringBuilder(); for (int index = BatchConstants.ZERO; index < spanNodeList.getLength(); index++) { final Node node = spanNodeList.item(index); final NodeList childNodeList = node.getChildNodes(); final Node nodeChild = childNodeList.item(BatchConstants.ZERO); final Span span = new Span(); if (null != nodeChild) { final String value = nodeChild.getNodeValue(); span.setValue(value); hocrContent.append(value); hocrContent.append(BatchConstants.SPACE); } spanList.add(span); final NamedNodeMap map = node.getAttributes(); final Node nMap = map.getNamedItem(BatchConstants.TITLE); Coordinates hocrCoordinates = null; hocrCoordinates = getHOCRCoordinates(nMap, hocrCoordinates); if (null == hocrCoordinates) { hocrCoordinates = new Coordinates(); hocrCoordinates.setX0(BigInteger.ZERO); hocrCoordinates.setX1(BigInteger.ZERO); hocrCoordinates.setY0(BigInteger.ZERO); hocrCoordinates.setY1(BigInteger.ZERO); } span.setCoordinates(hocrCoordinates); } hocrPage.setHocrContent(hocrContent.toString()); } return inputStream; }