List of usage examples for com.google.common.collect Multimap removeAll
Collection<V> removeAll(@Nullable Object key);
From source file:org.killbill.billing.client.KillBillHttpClient.java
@Deprecated private <T> T doPrepareRequestAndMaybeFollowLocation(final String verb, final String uri, final Object body, final Multimap<String, String> optionsRo, final Multimap<String, String> optionsForFollow, final int timeoutSec, final Class<T> clazz, final boolean followLocation) throws KillBillClientException { final Multimap<String, String> options = HashMultimap.<String, String>create(optionsRo); final String createdBy = getUniqueValue(options, AUDIT_OPTION_CREATED_BY); final String reason = getUniqueValue(options, AUDIT_OPTION_REASON); final String comment = getUniqueValue(options, AUDIT_OPTION_COMMENT); String apiKey = getUniqueValue(options, TENANT_OPTION_API_KEY); if (apiKey == null) { apiKey = this.apiKey; }//from ww w . j a v a 2 s . c om String apiSecret = getUniqueValue(options, TENANT_OPTION_API_SECRET); if (apiSecret == null) { apiSecret = this.apiSecret; } String username = getUniqueValue(options, RBAC_OPTION_USERNAME); if (username == null) { username = this.username; } String password = getUniqueValue(options, RBAC_OPTION_PASSWORD); if (password == null) { password = this.password; } options.removeAll(AUDIT_OPTION_CREATED_BY); options.removeAll(AUDIT_OPTION_REASON); options.removeAll(AUDIT_OPTION_COMMENT); options.removeAll(TENANT_OPTION_API_KEY); options.removeAll(TENANT_OPTION_API_SECRET); options.removeAll(RBAC_OPTION_USERNAME); options.removeAll(RBAC_OPTION_PASSWORD); final BoundRequestBuilder builder = getBuilderWithHeaderAndQuery(verb, getKBServerUrl(uri), username, password, options); // Multi-Tenancy headers if (apiKey != null) { builder.addHeader(JaxrsResource.HDR_API_KEY, apiKey); } if (apiSecret != null) { builder.addHeader(JaxrsResource.HDR_API_SECRET, apiSecret); } // Metadata Additional headers if (createdBy != null) { builder.addHeader(JaxrsResource.HDR_CREATED_BY, createdBy); } if (reason != null) { builder.addHeader(JaxrsResource.HDR_REASON, reason); } if (comment != null) { builder.addHeader(JaxrsResource.HDR_COMMENT, comment); } if (!"GET".equals(verb) && !"HEAD".equals(verb)) { if (body != null) { if (body instanceof String) { builder.setBody((String) body); } else { try { builder.setBody(mapper.writeValueAsString(body)); } catch (final JsonProcessingException e) { throw new KillBillClientException(e); } } } else { builder.setBody("{}"); } } final Response response = doRequest(builder, timeoutSec); if (response.getStatusCode() == 404 || response.getStatusCode() == 204) { return createEmptyResult(clazz); } if (followLocation) { if (response.getHeader("Location") != null) { final String location = response.getHeader("Location"); return doGetWithUrl(location, optionsForFollow, timeoutSec, clazz); } throwExceptionOnResponseError(response); return Response.class.isAssignableFrom(clazz) ? clazz.cast(response) : null; } throwExceptionOnResponseError(response); return deserializeResponse(response, clazz); }
From source file:org.killbill.billing.client.KillBillHttpClient.java
@Deprecated private BoundRequestBuilder getBuilderWithHeaderAndQuery(final String verb, final String url, @Nullable final String username, @Nullable final String password, final Multimap<String, String> options) { final BoundRequestBuilder builder; if ("GET".equals(verb)) { builder = httpClient.prepareGet(url); } else if ("POST".equals(verb)) { builder = httpClient.preparePost(url); } else if ("PUT".equals(verb)) { builder = httpClient.preparePut(url); } else if ("DELETE".equals(verb)) { builder = httpClient.prepareDelete(url); } else if ("HEAD".equals(verb)) { builder = httpClient.prepareHead(url); } else if ("OPTIONS".equals(verb)) { builder = httpClient.prepareOptions(url); } else {/*from www.j a v a 2 s . c o m*/ throw new IllegalArgumentException("Unrecognized verb: " + verb); } if (username != null && password != null) { final Realm realm = new RealmBuilder().setPrincipal(username).setPassword(password) .setScheme(Realm.AuthScheme.BASIC).setUsePreemptiveAuth(true).build(); builder.setRealm(realm); } final Collection<String> acceptHeaders = options.removeAll(HTTP_HEADER_ACCEPT); final String acceptHeader; if (!acceptHeaders.isEmpty()) { acceptHeader = CSV_JOINER.join(acceptHeaders); } else { acceptHeader = ACCEPT_JSON; } builder.addHeader(HTTP_HEADER_ACCEPT, acceptHeader); String contentTypeHeader = getUniqueValue(options, HTTP_HEADER_CONTENT_TYPE); if (contentTypeHeader == null) { contentTypeHeader = CONTENT_TYPE_JSON; } else { options.removeAll(HTTP_HEADER_CONTENT_TYPE); } builder.addHeader(HTTP_HEADER_CONTENT_TYPE, contentTypeHeader); builder.setBodyEncoding("UTF-8"); for (final String key : options.keySet()) { if (options.get(key) != null) { for (final String value : options.get(key)) { builder.addQueryParam(key, value); } } } return builder; }
From source file:org.sonar.plugins.core.issue.IssueTracking.java
private void mapNewissues(String referenceSource, Collection<DefaultIssue> newIssues, String source, IssueTrackingResult result) {/*from ww w .ja v a2 s .co m*/ HashedSequence<StringText> hashedReference = HashedSequence.wrap(new StringText(referenceSource), StringTextComparator.IGNORE_WHITESPACE); HashedSequence<StringText> hashedSource = HashedSequence.wrap(new StringText(source), StringTextComparator.IGNORE_WHITESPACE); HashedSequenceComparator<StringText> hashedComparator = new HashedSequenceComparator<StringText>( StringTextComparator.IGNORE_WHITESPACE); ViolationTrackingBlocksRecognizer rec = new ViolationTrackingBlocksRecognizer(hashedReference, hashedSource, hashedComparator); Multimap<Integer, DefaultIssue> newIssuesByLines = newIssuesByLines(newIssues, rec, result); Multimap<Integer, IssueDto> lastIssuesByLines = lastIssuesByLines(result.unmatched(), rec); RollingHashSequence<HashedSequence<StringText>> a = RollingHashSequence.wrap(hashedReference, hashedComparator, 5); RollingHashSequence<HashedSequence<StringText>> b = RollingHashSequence.wrap(hashedSource, hashedComparator, 5); RollingHashSequenceComparator<HashedSequence<StringText>> cmp = new RollingHashSequenceComparator<HashedSequence<StringText>>( hashedComparator); Map<Integer, HashOccurrence> map = Maps.newHashMap(); for (Integer line : lastIssuesByLines.keySet()) { int hash = cmp.hash(a, line - 1); HashOccurrence hashOccurrence = map.get(hash); if (hashOccurrence == null) { // first occurrence in A hashOccurrence = new HashOccurrence(); hashOccurrence.lineA = line; hashOccurrence.countA = 1; map.put(hash, hashOccurrence); } else { hashOccurrence.countA++; } } for (Integer line : newIssuesByLines.keySet()) { int hash = cmp.hash(b, line - 1); HashOccurrence hashOccurrence = map.get(hash); if (hashOccurrence != null) { hashOccurrence.lineB = line; hashOccurrence.countB++; } } for (HashOccurrence hashOccurrence : map.values()) { if (hashOccurrence.countA == 1 && hashOccurrence.countB == 1) { // Guaranteed that lineA has been moved to lineB, so we can map all issues on lineA to all issues on lineB map(newIssuesByLines.get(hashOccurrence.lineB), lastIssuesByLines.get(hashOccurrence.lineA), result); lastIssuesByLines.removeAll(hashOccurrence.lineA); newIssuesByLines.removeAll(hashOccurrence.lineB); } } // Check if remaining number of lines exceeds threshold if (lastIssuesByLines.keySet().size() * newIssuesByLines.keySet().size() < 250000) { List<LinePair> possibleLinePairs = Lists.newArrayList(); for (Integer oldLine : lastIssuesByLines.keySet()) { for (Integer newLine : newIssuesByLines.keySet()) { int weight = rec.computeLengthOfMaximalBlock(oldLine - 1, newLine - 1); possibleLinePairs.add(new LinePair(oldLine, newLine, weight)); } } Collections.sort(possibleLinePairs, LINE_PAIR_COMPARATOR); for (LinePair linePair : possibleLinePairs) { // High probability that lineA has been moved to lineB, so we can map all Issues on lineA to all Issues on lineB map(newIssuesByLines.get(linePair.lineB), lastIssuesByLines.get(linePair.lineA), result); } } }
From source file:com.github.pms1.tppt.p2.FeatureXmlComparator.java
private void comparePlugins2(Map<String, Multimap<String, Element>> baseline, Map<String, Multimap<String, Element>> current, ElementDeltaReporter elementDeltaReporter, DeltaReporter deltaReporter) {/*from www . j a va 2 s . c o m*/ for (String id : Sets.union(baseline.keySet(), current.keySet())) { Multimap<String, Element> b = baseline.get(id); if (b == null) b = HashMultimap.create(); else b = HashMultimap.create(b); Multimap<String, Element> c = current.get(id); if (c == null) c = HashMultimap.create(); else c = HashMultimap.create(c); AttributesDeltaReporter r = new AttributesDeltaReporter() { @Override public void removed(String key) { deltaReporter.fileDelta("Plugin {0} attribute {1} removed", id, key); } @Override public void changed(String key, String left, String right) { if (key.equals("version")) { deltaReporter.pluginVersionDelta(id, left, right); } else { deltaReporter.fileDelta("Plugin {0} attribute {1} changed {2} -> {3}", id, key, left, right); } } @Override public void added(String key, String value) { deltaReporter.fileDelta("Plugin {0} attribute {1} / {2} added", id, key, value); } }; Set<String> intersection = new HashSet<>(b.keys()); intersection.retainAll(c.keys()); for (String v : intersection) { Collection<Element> be = b.get(v); Collection<Element> ce = c.get(v); if (be.size() == 1 && ce.size() == 1) { compareAttributes(Iterables.getOnlyElement(be), Iterables.getOnlyElement(ce), r); b.removeAll(v); c.removeAll(v); } } if (b.size() == 1 && c.size() == 1) { compareAttributes(Iterables.getOnlyElement(b.values()), Iterables.getOnlyElement(c.values()), r); } else { for (Element e : b.values()) deltaReporter.fileDelta("Plugin removed: {0}", domRenderer.render(e)); for (Element e : c.values()) deltaReporter.fileDelta("Plugin added: {0}", domRenderer.render(e)); } } }
From source file:com.b2international.snowowl.core.validation.ValidateRequest.java
private ValidationResult doValidate(BranchContext context, Writer index) throws IOException { final String branchPath = context.branchPath(); ValidationRuleSearchRequestBuilder req = ValidationRequests.rules().prepareSearch(); if (!CompareUtils.isEmpty(ruleIds)) { req.filterByIds(ruleIds);//from w w w .ja v a2s . co m } final ValidationRules rules = req.all().build().execute(context); final ValidationThreadPool pool = context.service(ValidationThreadPool.class); final BlockingQueue<IssuesToPersist> issuesToPersistQueue = Queues.newLinkedBlockingDeque(); final List<Promise<Object>> validationPromises = Lists.newArrayList(); // evaluate selected rules for (ValidationRule rule : rules) { checkArgument(rule.getCheckType() != null, "CheckType is missing for rule " + rule.getId()); final ValidationRuleEvaluator evaluator = ValidationRuleEvaluator.Registry.get(rule.getType()); if (evaluator != null) { validationPromises.add(pool.submit(rule.getCheckType(), () -> { Stopwatch w = Stopwatch.createStarted(); try { LOG.info("Executing rule '{}'...", rule.getId()); final List<ComponentIdentifier> componentIdentifiers = evaluator.eval(context, rule, ruleParameters); issuesToPersistQueue.offer(new IssuesToPersist(rule.getId(), componentIdentifiers)); LOG.info("Execution of rule '{}' successfully completed in '{}'.", rule.getId(), w); // TODO report successfully executed validation rule } catch (Exception e) { // TODO report failed validation rule LOG.info("Execution of rule '{}' failed after '{}'.", rule.getId(), w, e); } })); } } final Set<String> ruleIds = rules.stream().map(ValidationRule::getId).collect(Collectors.toSet()); final Multimap<String, ComponentIdentifier> whiteListedEntries = fetchWhiteListEntries(context, ruleIds); final Promise<List<Object>> promise = Promise.all(validationPromises); while (!promise.isDone() || !issuesToPersistQueue.isEmpty()) { if (!issuesToPersistQueue.isEmpty()) { final Collection<IssuesToPersist> issuesToPersist = newArrayList(); issuesToPersistQueue.drainTo(issuesToPersist); if (!issuesToPersist.isEmpty()) { final List<String> rulesToPersist = issuesToPersist.stream().map(itp -> itp.ruleId) .collect(Collectors.toList()); LOG.info("Persisting issues generated by rules '{}'...", rulesToPersist); // persist new issues generated by rules so far, extending them using the Issue Extension API int persistedIssues = 0; final Multimap<String, ValidationIssue> issuesToExtendWithDetailsByToolingId = HashMultimap .create(); for (IssuesToPersist ruleIssues : Iterables.consumingIterable(issuesToPersist)) { final String ruleId = ruleIssues.ruleId; final List<ValidationIssue> existingRuleIssues = ValidationRequests.issues().prepareSearch() .all().filterByBranchPath(branchPath).filterByRule(ruleId).build().execute(context) .getItems(); final Set<String> issueIdsToDelete = Sets.newHashSet(); final Map<ComponentIdentifier, ValidationIssue> existingIsssuesByComponentIdentifier = new HashMap<>(); for (ValidationIssue issue : existingRuleIssues) { if (existingIsssuesByComponentIdentifier.containsKey(issue.getAffectedComponent())) { issueIdsToDelete.add(issue.getId()); } else { existingIsssuesByComponentIdentifier.put(issue.getAffectedComponent(), issue); } } // remove all processed whitelist entries final Collection<ComponentIdentifier> ruleWhiteListEntries = whiteListedEntries .removeAll(ruleId); final String toolingId = rules.stream().filter(rule -> ruleId.equals(rule.getId())) .findFirst().get().getToolingId(); for (ComponentIdentifier componentIdentifier : ruleIssues.affectedComponentIds) { if (!existingIsssuesByComponentIdentifier.containsKey(componentIdentifier)) { final ValidationIssue validationIssue = new ValidationIssue( UUID.randomUUID().toString(), ruleId, branchPath, componentIdentifier, ruleWhiteListEntries.contains(componentIdentifier)); issuesToExtendWithDetailsByToolingId.put(toolingId, validationIssue); persistedIssues++; } else { final ValidationIssue issueToCopy = existingIsssuesByComponentIdentifier .get(componentIdentifier); final ValidationIssue validationIssue = new ValidationIssue(issueToCopy.getId(), issueToCopy.getRuleId(), issueToCopy.getBranchPath(), issueToCopy.getAffectedComponent(), ruleWhiteListEntries.contains(issueToCopy.getAffectedComponent())); validationIssue.setDetails(Maps.newHashMap()); issuesToExtendWithDetailsByToolingId.put(toolingId, validationIssue); persistedIssues++; existingIsssuesByComponentIdentifier.remove(componentIdentifier); } } existingRuleIssues.stream() .filter(issue -> existingIsssuesByComponentIdentifier .containsKey(issue.getAffectedComponent())) .forEach(issue -> issueIdsToDelete.add(issue.getId())); if (!issueIdsToDelete.isEmpty()) { index.removeAll(Collections.singletonMap(ValidationIssue.class, issueIdsToDelete)); } } for (String toolingId : issuesToExtendWithDetailsByToolingId.keySet()) { final ValidationIssueDetailExtension extensions = ValidationIssueDetailExtensionProvider.INSTANCE .getExtensions(toolingId); final Collection<ValidationIssue> issues = issuesToExtendWithDetailsByToolingId .removeAll(toolingId); extensions.extendIssues(context, issues); for (ValidationIssue issue : issues) { index.put(issue.getId(), issue); } } index.commit(); LOG.info("Persisted '{}' issues generated by rules '{}'.", persistedIssues, rulesToPersist); } } try { Thread.sleep(1000L); } catch (InterruptedException e) { throw new SnowowlRuntimeException(e); } } // TODO return ValidationResult object with status and new issue IDs as set return new ValidationResult(context.id(), context.branchPath()); }
From source file:org.sosy_lab.cpachecker.cpa.predicate.BlockFormulaSlicer.java
private Collection<String> sliceBlock(ARGState start, ARGState end, Set<ARGState> block, Collection<String> importantVars) { // this map contains all done states with their vars (if not removed through cleanup) final Map<ARGState, Collection<String>> s2v = Maps.newHashMapWithExpectedSize(block.size()); // this map contains all done states with their last important state // a state is important, if any outgoing edge is important final Multimap<ARGState, ARGState> s2s = HashMultimap.create(block.size(), 1); // bfs for parents, visit each state once. // we use a list for the next states, // but we also remove states from waitlist, when they are done, // so we need fast access to the states final Set<ARGState> waitlist = new LinkedHashSet<>(); // special handling of first state s2v.put(end, importantVars);// w w w .ja v a 2s . c om s2s.put(end, end); for (final ARGState parent : end.getParents()) { if (block.contains(parent)) { waitlist.add(parent); } } while (!waitlist.isEmpty()) { final ARGState current = Iterables.getFirst(waitlist, null); waitlist.remove(current); // already handled assert !s2v.keySet().contains(current); // we have to wait for all children completed, // because we want to join the branches if (!isAllChildrenDone(current, s2v.keySet(), block)) { waitlist.add(current); // re-add current state, at last position continue; } // collect new states, ignore unreachable states for (final ARGState parent : current.getParents()) { if (block.contains(parent)) { waitlist.add(parent); } } // handle state final Collection<String> vars = handleEdgesForState(current, s2v, s2s, block); s2v.put(current, vars); // cleanup, remove states, that will not be used in future for (final ARGState child : current.getChildren()) { if (block.contains(child) && isAllParentsDone(child, s2v.keySet(), block)) { s2v.remove(child); s2s.removeAll(child); } } } return s2v.get(start); }
From source file:org.eclipse.jdt.ls.core.internal.highlighting.SemanticHighlightingDiffCalculator.java
public List<SemanticHighlightingInformation> getDiffInfos(HighlightedPositionDiffContext context) throws BadLocationException { IDocument newState = context.newState; IDocument oldState = context.oldState; // Can be negative or zero too. int lineShiftCount = this.getLineShift(oldState, context.event); int eventOffset = context.event.getOffset(); int eventOldLength = context.event.getLength(); int eventEnd = eventOffset + eventOldLength; Map<Integer, SemanticHighlightingInformation> infosPerLine = Maps.newHashMap(); Multimap<Integer, SemanticHighlightingTokens.Token> tokensPerLine = HashMultimap.create(); Multimap<Integer, HighlightedPositionCore> pendingPositions = HashMultimap.create(); Map<LookupKey, HighlightedPositionCore> newPositions = Maps.newHashMap(); for (HighlightedPositionCore newPosition : context.newPositions) { LookupKey key = createKey(newState, newPosition); newPositions.put(key, newPosition); pendingPositions.put(key.line, newPosition); }/*www . ja v a 2s .co m*/ for (HighlightedPositionCore oldPosition : context.oldPositions) { int[] oldLineAndColumn = getLineAndColumn(oldState, oldPosition); int originalOldLine = oldLineAndColumn[0]; int oldColumn = oldLineAndColumn[1]; int oldOffset = oldPosition.getOffset(); int oldLength = oldPosition.getLength(); int oldEnd = oldOffset + oldLength; // If the position is before the change (event), no need to shift the line. Otherwise we consider the line shift. int adjustedOldLine = oldEnd < eventEnd ? originalOldLine : originalOldLine + lineShiftCount; @SuppressWarnings("unchecked") int scope = SemanticHighlightingService.getIndex((List<String>) oldPosition.getHighlighting()); LookupKey key = createKey(adjustedOldLine, oldColumn, getTextAt(oldState, oldPosition), scope); HighlightedPositionCore newPosition = newPositions.remove(key); if (newPosition == null && !infosPerLine.containsKey(originalOldLine)) { infosPerLine.put(originalOldLine, new SemanticHighlightingInformation(originalOldLine, null)); } } for (Entry<LookupKey, HighlightedPositionCore> entries : newPositions.entrySet()) { LookupKey lookupKey = entries.getKey(); int line = lookupKey.line; int length = lookupKey.text.length(); int character = lookupKey.column; int scope = lookupKey.scope; SemanticHighlightingInformation info = infosPerLine.get(line); if (info == null) { info = new SemanticHighlightingInformation(line, null); infosPerLine.put(line, info); } tokensPerLine.put(line, new SemanticHighlightingTokens.Token(character, length, scope)); // If a line contains at least one change, we need to invalidate the entire line by consuming all pending positions. Collection<HighlightedPositionCore> pendings = pendingPositions.removeAll(line); if (pendings != null) { for (HighlightedPositionCore pendingPosition : pendings) { if (pendingPosition != entries.getValue()) { int[] lineAndColumn = getLineAndColumn(newState, pendingPosition); int pendingCharacter = lineAndColumn[1]; int pendingLength = pendingPosition.length; @SuppressWarnings("unchecked") int pendingScope = SemanticHighlightingService .getIndex((List<String>) pendingPosition.getHighlighting()); tokensPerLine.put(line, new SemanticHighlightingTokens.Token(pendingCharacter, pendingLength, pendingScope)); } } } } for (Entry<Integer, Collection<SemanticHighlightingTokens.Token>> entry : tokensPerLine.asMap() .entrySet()) { List<SemanticHighlightingTokens.Token> tokens = newArrayList(entry.getValue()); Collections.sort(tokens); infosPerLine.get(entry.getKey()).setTokens(SemanticHighlightingTokens.encode(tokens)); } return FluentIterable.from(infosPerLine.values()).toSortedList(HighlightingInformationComparator.INSTANCE); }
From source file:edu.buaa.satla.analysis.core.predicate.BlockFormulaSlicer.java
private Collection<String> sliceBlock(ARGState start, ARGState end, Set<ARGState> block, Collection<String> importantVars) { // this map contains all done states with their vars (if not removed through cleanup) final Map<ARGState, Collection<String>> s2v = Maps.newHashMapWithExpectedSize(block.size()); // this map contains all done states with their last important state // a state is important, if any outgoing edge is important final Multimap<ARGState, ARGState> s2s = HashMultimap.create(block.size(), 1); // bfs for parents, visit each state once. // we use a list for the next states, // but we also remove states from waitlist, when they are done, // so we need fast access to the states final Set<ARGState> waitlist = new LinkedHashSet<>(); // special handling of first state s2v.put(end, importantVars);//from w w w . j a v a2 s . c om s2s.put(end, end); for (final ARGState parent : end.getParents()) { if (block.contains(parent)) { waitlist.add(parent); } } while (!waitlist.isEmpty()) { final ARGState current = Iterables.getFirst(waitlist, null); waitlist.remove(current); // already handled assert !s2v.keySet().contains(current); // we have to wait for all children completed, // because we want to join the branches if (!isAllChildrenDone(current, s2v.keySet(), block)) { waitlist.add(current); // re-add current state, at last position continue; } // collect new states, ignore unreachable states for (final ARGState parent : current.getParents()) { if (block.contains(parent)) { waitlist.add(parent); } } // handle state final Collection<String> vars = handleEdgesForState(current, s2v, s2s, block); s2v.put(current, vars); // cleanup, remove states, that will not be used in future for (final ARGState child : current.getChildren()) { if (block.contains(child) && isAllParentsDone(child, s2v.keySet(), block)) { s2v.remove(child); s2s.removeAll(child); } } } // logging // System.out.println("START:: " + (start == null ? null : start.getStateId())); // System.out.println("END:: " + end.getStateId()); // System.out.print("BLOCK:: "); // for (ARGState current : block) { // System.out.print(current.getStateId() + ", "); // } // System.out.println(); // System.out.print("VISITED:: "); // for (ARGState current : s2v.keySet()) { // System.out.print(current.getStateId() + ", "); // } // System.out.println("\n\n"); return s2v.get(start); }
From source file:jef.tools.IOUtils.java
/** * Map???properties/*from w ww . ja va2 s . c om*/ * * @param writer * ?? * @param map * ??? * @param closeWriter * true???false???? * @param sectionSupport * ? * @param ?1KV?? * 0KEY?value?? -1 KV??? * */ public static void storeProperties(Writer writer, Map<String, String> map, boolean closeWriter, Boolean sectionSupport, int saveConvert) { if (sectionSupport == null) { int limit = 3; sectionSupport = true; for (Entry<String, String> entry : map.entrySet()) { limit--; String key = entry.getKey(); if (key.indexOf('|') == -1) { sectionSupport = false; break; } if (limit < 0) { break; } } } try { if (sectionSupport) { Multimap<String, Map.Entry<String, String>> sections = CollectionUtils.group(map.entrySet(), new Function<Map.Entry<String, String>, String>() { public String apply(Entry<String, String> input) { int sectionLen = input.getKey().indexOf('|'); return sectionLen == -1 ? "" : input.getKey().substring(0, sectionLen); } }); boolean hasNoSecLine = false; for (Map.Entry<String, String> entry : sections.removeAll("")) { writer.write(saveConvert(entry.getKey(), true, saveConvert)); writer.write('='); writer.write(saveConvert(entry.getValue(), false, saveConvert)); writer.write(StringUtils.CRLF_STR); hasNoSecLine = true; } if (!sections.isEmpty()) { if (hasNoSecLine) writer.write(StringUtils.CRLF_STR); for (String section : sections.keySet()) { writer.write("[" + section + "]\r\n"); for (Map.Entry<String, String> entry : sections.get(section)) { writer.write( saveConvert(entry.getKey().substring(section.length() + 1), true, saveConvert)); writer.write('='); writer.write(saveConvert(entry.getValue(), false, saveConvert)); writer.write(StringUtils.CRLF_STR); } writer.write(StringUtils.CRLF_STR); } } } else { for (Map.Entry<String, String> entry : map.entrySet()) { writer.write(saveConvert(entry.getKey(), true, saveConvert)); writer.write('='); writer.write(saveConvert(entry.getValue(), false, saveConvert)); writer.write(StringUtils.CRLF_STR); } } writer.flush(); } catch (IOException e1) { LogUtil.exception(e1); } finally { if (closeWriter) closeQuietly(writer); } }
From source file:org.sonar.plugins.core.timemachine.ViolationTrackingDecorator.java
@VisibleForTesting Map<Violation, RuleFailureModel> mapViolations(List<Violation> newViolations, @Nullable List<RuleFailureModel> lastViolations, @Nullable String source, @Nullable Resource resource) { boolean hasLastScan = false; Multimap<Integer, RuleFailureModel> lastViolationsByRule = LinkedHashMultimap.create(); if (lastViolations != null) { hasLastScan = true;//www . ja va 2s .com unmappedLastViolations.addAll(lastViolations); for (RuleFailureModel lastViolation : lastViolations) { lastViolationsByRule.put(lastViolation.getRuleId(), lastViolation); } // Match the permanent id of the violation. This id is for example set explicitly when injecting manual violations for (Violation newViolation : newViolations) { mapViolation(newViolation, findLastViolationWithSamePermanentId(newViolation, lastViolationsByRule.get(newViolation.getRule().getId())), lastViolationsByRule, referenceViolationsMap); } // Try first to match violations on same rule with same line and with same checksum (but not necessarily with same message) for (Violation newViolation : newViolations) { if (isNotAlreadyMapped(newViolation)) { mapViolation(newViolation, findLastViolationWithSameLineAndChecksum(newViolation, lastViolationsByRule.get(newViolation.getRule().getId())), lastViolationsByRule, referenceViolationsMap); } } } // If each new violation matches an old one we can stop the matching mechanism if (referenceViolationsMap.size() != newViolations.size()) { if (source != null && resource != null && hasLastScan) { String referenceSource = lastSnapshots.getSource(resource); if (referenceSource != null) { HashedSequence<StringText> hashedReference = HashedSequence .wrap(new StringText(referenceSource), StringTextComparator.IGNORE_WHITESPACE); HashedSequence<StringText> hashedSource = HashedSequence.wrap(new StringText(source), StringTextComparator.IGNORE_WHITESPACE); HashedSequenceComparator<StringText> hashedComparator = new HashedSequenceComparator<StringText>( StringTextComparator.IGNORE_WHITESPACE); ViolationTrackingBlocksRecognizer rec = new ViolationTrackingBlocksRecognizer(hashedReference, hashedSource, hashedComparator); Multimap<Integer, Violation> newViolationsByLines = newViolationsByLines(newViolations, rec); Multimap<Integer, RuleFailureModel> lastViolationsByLines = lastViolationsByLines( unmappedLastViolations, rec); RollingHashSequence<HashedSequence<StringText>> a = RollingHashSequence.wrap(hashedReference, hashedComparator, 5); RollingHashSequence<HashedSequence<StringText>> b = RollingHashSequence.wrap(hashedSource, hashedComparator, 5); RollingHashSequenceComparator<HashedSequence<StringText>> cmp = new RollingHashSequenceComparator<HashedSequence<StringText>>( hashedComparator); Map<Integer, HashOccurrence> map = Maps.newHashMap(); for (Integer line : lastViolationsByLines.keySet()) { int hash = cmp.hash(a, line - 1); HashOccurrence hashOccurrence = map.get(hash); if (hashOccurrence == null) { // first occurrence in A hashOccurrence = new HashOccurrence(); hashOccurrence.lineA = line; hashOccurrence.countA = 1; map.put(hash, hashOccurrence); } else { hashOccurrence.countA++; } } for (Integer line : newViolationsByLines.keySet()) { int hash = cmp.hash(b, line - 1); HashOccurrence hashOccurrence = map.get(hash); if (hashOccurrence != null) { hashOccurrence.lineB = line; hashOccurrence.countB++; } } for (HashOccurrence hashOccurrence : map.values()) { if (hashOccurrence.countA == 1 && hashOccurrence.countB == 1) { // Guaranteed that lineA has been moved to lineB, so we can map all violations on lineA to all violations on lineB map(newViolationsByLines.get(hashOccurrence.lineB), lastViolationsByLines.get(hashOccurrence.lineA), lastViolationsByRule); lastViolationsByLines.removeAll(hashOccurrence.lineA); newViolationsByLines.removeAll(hashOccurrence.lineB); } } // Check if remaining number of lines exceeds threshold if (lastViolationsByLines.keySet().size() * newViolationsByLines.keySet().size() < 250000) { List<LinePair> possibleLinePairs = Lists.newArrayList(); for (Integer oldLine : lastViolationsByLines.keySet()) { for (Integer newLine : newViolationsByLines.keySet()) { int weight = rec.computeLengthOfMaximalBlock(oldLine - 1, newLine - 1); possibleLinePairs.add(new LinePair(oldLine, newLine, weight)); } } Collections.sort(possibleLinePairs, LINE_PAIR_COMPARATOR); for (LinePair linePair : possibleLinePairs) { // High probability that lineA has been moved to lineB, so we can map all violations on lineA to all violations on lineB map(newViolationsByLines.get(linePair.lineB), lastViolationsByLines.get(linePair.lineA), lastViolationsByRule); } } } } // Try then to match violations on same rule with same message and with same checksum for (Violation newViolation : newViolations) { if (isNotAlreadyMapped(newViolation)) { mapViolation(newViolation, findLastViolationWithSameChecksumAndMessage(newViolation, lastViolationsByRule.get(newViolation.getRule().getId())), lastViolationsByRule, referenceViolationsMap); } } // Try then to match violations on same rule with same line and with same message for (Violation newViolation : newViolations) { if (isNotAlreadyMapped(newViolation)) { mapViolation(newViolation, findLastViolationWithSameLineAndMessage(newViolation, lastViolationsByRule.get(newViolation.getRule().getId())), lastViolationsByRule, referenceViolationsMap); } } // Last check: match violation if same rule and same checksum but different line and different message // See SONAR-2812 for (Violation newViolation : newViolations) { if (isNotAlreadyMapped(newViolation)) { mapViolation(newViolation, findLastViolationWithSameChecksum(newViolation, lastViolationsByRule.get(newViolation.getRule().getId())), lastViolationsByRule, referenceViolationsMap); } } } unmappedLastViolations.clear(); return referenceViolationsMap; }