List of usage examples for com.google.common.collect Maps newTreeMap
public static <K extends Comparable, V> TreeMap<K, V> newTreeMap()
From source file:com.github.fge.jsonschema.processors.build.ValidatorBuilder.java
/** * Process the input//from www. j a va2s .com * * @param report the report to use while processing * @param input the input for this processor * @return the output * @throws ProcessingException processing failed */ @Override public ValidatorList process(final ProcessingReport report, final SchemaDigest input) throws ProcessingException { final SortedMap<String, KeywordValidator> map = Maps.newTreeMap(); String keyword; JsonNode digest; KeywordValidator validator; Constructor<? extends KeywordValidator> constructor; for (final Map.Entry<String, JsonNode> entry : input.getDigests().entrySet()) { keyword = entry.getKey(); digest = entry.getValue(); constructor = constructors.get(keyword); validator = buildKeyword(constructor, digest); map.put(keyword, validator); } return new ValidatorList(input.getContext(), map.values()); }
From source file:org.eclipse.xtext.parser.antlr.UnorderedGroupHelper.java
@Inject public UnorderedGroupHelper(Collector collector) { groupToState = Maps.newHashMap();/*w ww.jav a 2 s. c om*/ backtrackingSnapShot = Maps.newTreeMap(); for (UnorderedGroup group : collector.getGroups()) configure(group); allGroups = collector.getGroups().toArray(new UnorderedGroup[collector.getGroups().size()]); }
From source file:co.mitro.core.servlets.GetPublicKeyForIdentity.java
@Override protected MitroRPC processCommand(MitroRequestContext context) throws SQLException, MitroServletException { RPC.GetPublicKeysForIdentityRequest in = gson.fromJson(context.jsonRequest, RPC.GetPublicKeysForIdentityRequest.class); RPC.GetPublicKeyForIdentityResponse out = new RPC.GetPublicKeyForIdentityResponse(); out.userIdToPublicKey = Maps.newTreeMap(); out.missingUsers = Lists.newLinkedList(); Set<String> userIdSet = Sets.newHashSet(in.userIds); for (DBIdentity i : DBIdentity.getUsersFromNames(context.manager, in.userIds)) { Set<String> aliases = DBUserName.getAliasesForIdentity(context.manager, i); Set<String> matchingAliasesForThisUser = Sets.intersection(userIdSet, aliases).immutableCopy(); if (matchingAliasesForThisUser.size() > 1) { throw new UserVisibleException("The following emails are aliases and cannot" + " be included simultaneously: " + Joiner.on(",").join(matchingAliasesForThisUser)); }/*from www .j a v a 2 s . c om*/ for (String email : matchingAliasesForThisUser) { out.userIdToPublicKey.put(email, i.getPublicKeyString()); userIdSet.remove(email); } } if (!userIdSet.isEmpty()) { assert in.addMissingUsers : "addMissingUsers must be true."; if (!context.isGroupSyncRequest() && !isPermittedToGetMissingUsers(context.requestor.getName(), userIdSet.size())) { throw new TooManyAccountsException(Integer.toString(in.userIds.size())); } try { for (String newUser : userIdSet) { DBIdentity identity = InviteNewUser.inviteNewUser(keyFactory, context.manager, context.requestor, newUser); out.userIdToPublicKey.put(newUser, identity.getPublicKeyString()); } } catch (CryptoError | CyclicGroupError e) { throw new MitroServletException(e); } } @SuppressWarnings("deprecation") AuthenticatedDB udb = AuthenticatedDB.deprecatedNew(context.manager, context.requestor); if (null != in.groupIds && !in.groupIds.isEmpty()) { for (Integer gid : in.groupIds) { DBGroup group = udb.getGroupForAddSecret(gid); assert (group != null) : "Invalid permissions"; out.groupIdToPublicKey.put(group.getId(), group.getPublicKeyString()); } } return out; }
From source file:org.kiji.scoring.statistics.RunningLogarithmicBin.java
/** * Get the entire bin structure. Modifications made to the return value will not be reflected in * the underlying Map.//from w w w .j ava2 s . com * * @return a copy of the entire bin structure. */ public NavigableMap<Long, Long> getBins() { final Map<Long, AtomicLong> binsCopy = Maps.newHashMap(mBins); final NavigableMap<Long, Long> output = Maps.newTreeMap(); for (Map.Entry<Long, AtomicLong> entry : binsCopy.entrySet()) { output.put(entry.getKey(), entry.getValue().get()); } return output; }
From source file:org.cinchapi.concourse.server.storage.db.PrimaryRecord.java
/** * Return a log of revision to the entire Record. * // w w w .j av a 2 s . c o m * @return the revision log */ public Map<Long, String> audit() { read.lock(); try { Map<Long, String> audit = Maps.newTreeMap(); for (Text key : present.keySet()) { /* Authorized */ audit.putAll(audit(key)); } return audit; } finally { read.unlock(); } }
From source file:eu.interedition.text.query.QueryCriterion.java
public void listen(Session session, final Text text, final int pageSize, final AnnotationListener listener) throws IOException { final long contentLength = text.getLength(); Reader contentStream = null;/*from w ww . java 2 s.co m*/ try { contentStream = text.read().getInput(); final SortedMap<Long, Set<Annotation>> starts = Maps.newTreeMap(); final SortedMap<Long, Set<Annotation>> ends = Maps.newTreeMap(); long offset = 0; long next = 0; long pageEnd = 0; listener.start(contentLength); final Set<Annotation> annotationData = Sets.newHashSet(); while (true) { if ((offset % pageSize) == 0) { pageEnd = Math.min(offset + pageSize, contentLength); final TextRange pageRange = new TextRange(offset, pageEnd); final Iterable<Annotation> pageAnnotations = and(this, text(text), rangeOverlap(pageRange)) .iterate(session); for (Annotation a : pageAnnotations) { for (TextTarget target : a.getTargets()) { if (!text.equals(target.getText())) { continue; } final long start = target.getStart(); final long end = target.getEnd(); if (start >= offset) { Set<Annotation> starting = starts.get(start); if (starting == null) { starts.put(start, starting = Sets.newHashSet()); } starting.add(a); annotationData.add(a); } if (end <= pageEnd) { Set<Annotation> ending = ends.get(end); if (ending == null) { ends.put(end, ending = Sets.newHashSet()); } ending.add(a); annotationData.add(a); } } } next = Math.min(starts.isEmpty() ? contentLength : starts.firstKey(), ends.isEmpty() ? contentLength : ends.firstKey()); } if (offset == next) { final Set<Annotation> startEvents = (!starts.isEmpty() && offset == starts.firstKey() ? starts.remove(starts.firstKey()) : Sets.<Annotation>newHashSet()); final Set<Annotation> endEvents = (!ends.isEmpty() && offset == ends.firstKey() ? ends.remove(ends.firstKey()) : Sets.<Annotation>newHashSet()); final Set<Annotation> emptyEvents = Sets.newHashSet(Sets.filter(endEvents, emptyIn(text))); endEvents.removeAll(emptyEvents); if (!endEvents.isEmpty()) listener.end(offset, filter(annotationData, endEvents, true)); if (!startEvents.isEmpty()) listener.start(offset, filter(annotationData, startEvents, false)); if (!emptyEvents.isEmpty()) listener.end(offset, filter(annotationData, emptyEvents, true)); next = Math.min(starts.isEmpty() ? contentLength : starts.firstKey(), ends.isEmpty() ? contentLength : ends.firstKey()); } if (offset == contentLength) { break; } final long readTo = Math.min(pageEnd, next); if (offset < readTo) { final char[] currentText = new char[(int) (readTo - offset)]; int read = contentStream.read(currentText); if (read > 0) { listener.text(new TextRange(offset, offset + read), new String(currentText, 0, read)); offset += read; } } } listener.end(); } finally { Closeables.close(contentStream, false); } }
From source file:org.apache.tephra.snapshot.SnapshotCodecV4.java
@Override protected NavigableMap<Long, TransactionManager.InProgressTx> decodeInProgress(BinaryDecoder decoder) throws IOException { int size = decoder.readInt(); NavigableMap<Long, TransactionManager.InProgressTx> inProgress = Maps.newTreeMap(); while (size != 0) { // zero denotes end of list as per AVRO spec for (int remaining = size; remaining > 0; --remaining) { long txId = decoder.readLong(); long expiration = decoder.readLong(); long visibilityUpperBound = decoder.readLong(); int txTypeIdx = decoder.readInt(); TransactionManager.InProgressType txType; try { txType = TransactionManager.InProgressType.values()[txTypeIdx]; } catch (ArrayIndexOutOfBoundsException e) { throw new IOException("Type enum ordinal value is out of range: " + txTypeIdx); }/*from ww w . ja va 2s.c o m*/ // read checkpoint tx IDs int checkpointPointerSize = decoder.readInt(); LongArrayList checkpointPointers = new LongArrayList(checkpointPointerSize); while (checkpointPointerSize != 0) { for (int checkpointRemaining = checkpointPointerSize; checkpointRemaining > 0; --checkpointRemaining) { checkpointPointers.add(decoder.readLong()); } checkpointPointerSize = decoder.readInt(); } inProgress.put(txId, new TransactionManager.InProgressTx(visibilityUpperBound, expiration, txType, checkpointPointers)); } size = decoder.readInt(); } return inProgress; }
From source file:org.eclipse.wb.internal.swing.model.property.editor.models.tree.TreeModelEvaluator.java
public Object evaluate(EvaluationContext context, Expression expression, ITypeBinding typeBinding, String typeQualifiedName) throws Exception { AnonymousClassDeclaration rootDeclaration = findRootNodeDeclaration(expression); if (rootDeclaration != null) { // create root node final DefaultMutableTreeNode rootNode; {//w w w .ja v a 2 s. c om ClassInstanceCreation rootNodeCreation = (ClassInstanceCreation) rootDeclaration.getParent(); StringLiteral rootTextLiteral = (StringLiteral) rootNodeCreation.arguments().get(0); rootNode = new DefaultMutableTreeNode(rootTextLiteral.getLiteralValue()); } // create nodes final Map<String, DefaultMutableTreeNode> nameToNode = Maps.newTreeMap(); rootDeclaration.accept(new ASTVisitor() { private DefaultMutableTreeNode m_lastNode; @Override public void endVisit(ClassInstanceCreation creation) { if (AstNodeUtils.getFullyQualifiedName(creation, false) .equals("javax.swing.tree.DefaultMutableTreeNode") && creation.arguments().size() == 1 && creation.arguments().get(0) instanceof StringLiteral) { StringLiteral stringLiteral = (StringLiteral) creation.arguments().get(0); DefaultMutableTreeNode node = new DefaultMutableTreeNode(stringLiteral.getLiteralValue()); if (creation.getLocationInParent() == VariableDeclarationFragment.INITIALIZER_PROPERTY) { String name = ((VariableDeclarationFragment) creation.getParent()).getName() .getIdentifier(); nameToNode.put(name, node); } else if (creation.getLocationInParent() == Assignment.RIGHT_HAND_SIDE_PROPERTY && ((Assignment) creation.getParent()).getLeftHandSide() instanceof SimpleName) { Assignment assignment = (Assignment) creation.getParent(); SimpleName variable = (SimpleName) assignment.getLeftHandSide(); String name = variable.getIdentifier(); nameToNode.put(name, node); } else { m_lastNode = node; } } } @Override public void endVisit(MethodInvocation invocation) { if (AstNodeUtils.getMethodSignature(invocation) .equals("add(javax.swing.tree.MutableTreeNode)")) { // prepare node DefaultMutableTreeNode node = null; { Object argument = invocation.arguments().get(0); if (argument instanceof SimpleName) { SimpleName variable = (SimpleName) argument; node = nameToNode.get(variable.getIdentifier()); } else if (argument instanceof ClassInstanceCreation) { node = m_lastNode; } } // prepare parent DefaultMutableTreeNode parentNode = null; if (invocation.getExpression() instanceof SimpleName) { SimpleName variable = (SimpleName) invocation.getExpression(); parentNode = nameToNode.get(variable.getIdentifier()); } else if (invocation.getExpression() == null) { parentNode = rootNode; } // add node to parent if (parentNode != null && node != null) { parentNode.add(node); } // clear last node m_lastNode = null; } } }); // OK, return model return new DefaultTreeModel(rootNode); } // we don't understand given expression return AstEvaluationEngine.UNKNOWN; }
From source file:uk.ac.ebi.mdk.io.text.mnxref.MnxRefReactionInput.java
private Map<String, MnxRefReaction> load(Reader propertyReader, Reader referenceReader) throws IOException { Map<String, MnxRefReaction> reactions = Maps.newTreeMap(); {// w w w . ja va 2s . c om CSVReader tsv = new CSVReader(propertyReader, SEPARATOR, QUOTE); try { String[] line; while ((line = tsv.readNext()) != null) { if (line[0].charAt(0) != COMMENT) { reactions.put(line[0], MnxRefReaction.parse(line)); } } } finally { tsv.close(); } } { CSVReader tsv = new CSVReader(referenceReader, SEPARATOR, QUOTE); try { String[] line; while ((line = tsv.readNext()) != null) { if (line[0].charAt(0) != COMMENT) { String mnxId = line[1]; MnxRefReaction c = reactions.get(mnxId); if (c == null) { Logger logger = Logger.getLogger(getClass()); logger.warn("cound not find MNX id " + line[1]); } else { c.add(line[1]); } } } } finally { tsv.close(); } } return reactions; }
From source file:com.orange.clara.cloud.servicedbdumper.filer.s3uploader.UploadS3StreamImpl.java
@Override public String upload(InputStream content, Blob blob) throws IOException { String key = blob.getMetadata().getName(); String bucketName = this.blobStoreContext.getBucketName(); ContentMetadata metadata = blob.getMetadata().getContentMetadata(); ObjectMetadataBuilder builder = ObjectMetadataBuilder.create().key(key) .contentType(MediaType.OCTET_STREAM.toString()).contentDisposition(key) .contentEncoding(metadata.getContentEncoding()).contentLanguage(metadata.getContentLanguage()) .userMetadata(blob.getMetadata().getUserMetadata()); String uploadId = this.s3Client.initiateMultipartUpload(bucketName, builder.build()); Integer partNum = 1;/* w w w . ja va 2 s. co m*/ Payload part = null; int bytesRead = 0; byte[] chunk = null; boolean shouldContinue = true; SortedMap<Integer, String> etags = Maps.newTreeMap(); try { while (shouldContinue) { chunk = new byte[chunkSize]; bytesRead = ByteStreams.read(content, chunk, 0, chunk.length); if (bytesRead != chunk.length) { shouldContinue = false; chunk = Arrays.copyOf(chunk, bytesRead); if (chunk.length == 0) { //something from jvm causing memory leak, we try to help jvm which seems working. //but PLEASE DON'T REPLICATE AT HOME ! chunk = null; part = null; System.gc(); // break; } } part = new ByteArrayPayload(chunk); prepareUploadPart(bucketName, key, uploadId, partNum, part, etags); partNum++; //something from jvm causing memory leak, we try to help jvm which seems working. //but PLEASE DON'T REPLICATE AT HOME ! chunk = null; part = null; System.gc(); // } return this.completeMultipartUpload(bucketName, key, uploadId, etags); } catch (RuntimeException ex) { this.s3Client.abortMultipartUpload(bucketName, key, uploadId); throw ex; } }