List of usage examples for java.util Set isEmpty
boolean isEmpty();
From source file:msi.gama.util.GAML.java
public static String getDocumentationOn2(final String query) { final String keyword = StringUtils.removeEnd(StringUtils.removeStart(query.trim(), "#"), ":"); final THashMap<String, String> results = new THashMap<>(); // Statements final SymbolProto p = DescriptionFactory.getStatementProto(keyword); if (p != null) { results.put("Statement", p.getDocumentation()); }//w w w . j a va 2s.c o m DescriptionFactory.visitStatementProtos((name, proto) -> { if (proto.getFacet(keyword) != null) { results.put("Facet of statement " + name, proto.getFacet(keyword).getDocumentation()); } }); final Set<String> types = new HashSet<>(); final String[] facetDoc = { "" }; DescriptionFactory.visitVarProtos((name, proto) -> { if (proto.getFacet(keyword) != null && types.size() < 4) { if (!Types.get(name).isAgentType() || name.equals(IKeyword.AGENT)) { types.add(name); } facetDoc[0] = proto.getFacet(keyword).getDocumentation(); } }); if (!types.isEmpty()) { results.put("Facet of attribute declarations with types " + types + (types.size() == 4 ? " ..." : ""), facetDoc[0]); } // Operators final THashMap<Signature, OperatorProto> ops = IExpressionCompiler.OPERATORS.get(keyword); if (ops != null) { ops.forEachEntry((sig, proto) -> { results.put("Operator on " + sig.toString(), proto.getDocumentation()); return true; }); } // Built-in skills final SkillDescription sd = GamaSkillRegistry.INSTANCE.get(keyword); if (sd != null) { results.put("Skill", sd.getDocumentation()); } GamaSkillRegistry.INSTANCE.visitSkills(desc -> { final SkillDescription sd1 = (SkillDescription) desc; final VariableDescription var = sd1.getAttribute(keyword); if (var != null) { results.put("Attribute of skill " + desc.getName(), var.getDocumentation()); } final ActionDescription action = sd1.getAction(keyword); if (action != null) { results.put("Primitive of skill " + desc.getName(), action.getDocumentation().isEmpty() ? "" : ":" + action.getDocumentation()); } return true; }); // Types final IType<?> t = Types.builtInTypes.containsType(keyword) ? Types.get(keyword) : null; if (t != null) { String tt = t.getDocumentation(); if (tt == null) { tt = "type " + keyword; } results.put("Type", tt); } // Built-in species for (final TypeDescription td : Types.getBuiltInSpecies()) { if (td.getName().equals(keyword)) { results.put("Built-in species", ((SpeciesDescription) td).getDocumentationWithoutMeta()); } final IDescription var = td.getOwnAttribute(keyword); if (var != null) { results.put("Attribute of built-in species " + td.getName(), var.getDocumentation()); } final ActionDescription action = td.getOwnAction(keyword); if (action != null) { results.put("Primitive of built-in species " + td.getName(), action.getDocumentation().isEmpty() ? "" : ":" + action.getDocumentation()); } } // Constants final UnitConstantExpression exp = IUnits.UNITS_EXPR.get(keyword); if (exp != null) { results.put("Constant", exp.getDocumentation()); } if (results.isEmpty()) { return "No result found"; } final StringBuilder sb = new StringBuilder(); final int max = results.keySet().stream().mapToInt(each -> each.length()).max().getAsInt(); final String separator = StringUtils.repeat("", max + 6).concat(Strings.LN); results.forEachEntry((sig, doc) -> { sb.append("").append(separator).append("|| "); sb.append(StringUtils.rightPad(sig, max)); sb.append(" ||").append(Strings.LN).append(separator); sb.append(toText(doc)).append(Strings.LN); return true; }); return sb.toString(); // }
From source file:hudson.plugins.emailext.plugins.recipients.FailingTestSuspectsRecipientProviderTest.java
private static void checkRecipients(final Build build, final String... inAuthors) throws AddressException { ExtendedEmailPublisherContext context = new ExtendedEmailPublisherContext(null, build, new Launcher.LocalLauncher(StreamTaskListener.fromStdout()), new StreamBuildListener(System.out, Charset.defaultCharset())); EnvVars envVars = new EnvVars(); Set<InternetAddress> to = new HashSet<InternetAddress>(); Set<InternetAddress> cc = new HashSet<InternetAddress>(); Set<InternetAddress> bcc = new HashSet<InternetAddress>(); FailingTestSuspectsRecipientProvider provider = new FailingTestSuspectsRecipientProvider(); provider.addRecipients(context, envVars, to, cc, bcc); final List<InternetAddress> authors = new ArrayList<InternetAddress>(); for (final String author : inAuthors) { authors.add(new InternetAddress(author + AT_DOMAIN)); }/* w w w. j av a 2 s. co m*/ // All of the authors should have received an email, so the list should be empty. authors.removeAll(to); assertTrue("Authors not receiving mail: " + authors.toString(), authors.isEmpty()); assertTrue(cc.isEmpty()); assertTrue(bcc.isEmpty()); }
From source file:com.netflix.genie.web.data.repositories.jpa.specifications.JpaClusterSpecs.java
/** * Generate a specification given the parameters. * * @param name The name of the cluster to find * @param statuses The statuses of the clusters to find * @param tags The tags of the clusters to find * @param minUpdateTime The minimum updated time of the clusters to find * @param maxUpdateTime The maximum updated time of the clusters to find * @return The specification//from w w w .j a v a 2 s.c o m */ public static Specification<ClusterEntity> find(@Nullable final String name, @Nullable final Set<ClusterStatus> statuses, @Nullable final Set<TagEntity> tags, @Nullable final Instant minUpdateTime, @Nullable final Instant maxUpdateTime) { return (final Root<ClusterEntity> root, final CriteriaQuery<?> cq, final CriteriaBuilder cb) -> { final List<Predicate> predicates = new ArrayList<>(); if (StringUtils.isNotBlank(name)) { predicates.add(JpaSpecificationUtils.getStringLikeOrEqualPredicate(cb, root.get(ClusterEntity_.name), name)); } if (minUpdateTime != null) { predicates.add(cb.greaterThanOrEqualTo(root.get(ClusterEntity_.updated), minUpdateTime)); } if (maxUpdateTime != null) { predicates.add(cb.lessThan(root.get(ClusterEntity_.updated), maxUpdateTime)); } if (tags != null && !tags.isEmpty()) { final Join<ClusterEntity, TagEntity> tagEntityJoin = root.join(ClusterEntity_.tags); predicates.add(tagEntityJoin.in(tags)); cq.groupBy(root.get(ClusterEntity_.id)); cq.having(cb.equal(cb.count(root.get(ClusterEntity_.id)), tags.size())); } if (statuses != null && !statuses.isEmpty()) { //Could optimize this as we know size could use native array predicates.add( cb.or(statuses.stream().map(status -> cb.equal(root.get(ClusterEntity_.status), status)) .toArray(Predicate[]::new))); } return cb.and(predicates.toArray(new Predicate[predicates.size()])); }; }
From source file:com.taobao.android.builder.tools.proguard.AtlasProguardHelper.java
private static void addParentKeeps(BundleItem bundleItem, Input input, AwbTransform awbTransform, Map<BundleInfo, AwbTransform> bundleInfoAwbTransformMap, AppVariantContext appVariantContext) throws IOException { Set<AwbBundle> parentBundles = new HashSet<>(); for (BundleItem parent : bundleItem.parents) { parentBundles.add(bundleInfoAwbTransformMap.get(parent.bundleInfo).getAwbBundle()); for (BundleInfo sub : parent.circles) { parentBundles.add(bundleInfoAwbTransformMap.get(sub).getAwbBundle()); }/*from w w w. jav a2 s .co m*/ } if (!parentBundles.isEmpty()) { List<AwbBundle> bundles = new ArrayList<>(parentBundles); Collections.sort(bundles, new Comparator<AwbBundle>() { @Override public int compare(AwbBundle o1, AwbBundle o2) { return o1.getName().compareTo(o2.getName()); } }); File keepFile = generateKeepFile(bundles, appVariantContext.getAwbProguardDir(awbTransform.getAwbBundle())); input.getParentKeeps().add(keepFile); } }
From source file:de.zib.scalaris.examples.wikipedia.data.xml.Main.java
/** * Imports all pages in the Wikipedia XML dump from the given file to Scalaris. * /*from w ww . j av a 2 s. co m*/ * @param filename * @param args * @param type * * @throws RuntimeException * @throws IOException * @throws SAXException * @throws FileNotFoundException */ private static void doImport(String filename, String[] args, ImportType type) throws RuntimeException, IOException, SAXException, FileNotFoundException { int i = 0; if (type == ImportType.IMPORT_DB) { int numberOfImporters = 1; if (args.length > i) { try { numberOfImporters = Integer.parseInt(args[i]); } catch (NumberFormatException e) { System.err.println("no number: " + args[i]); System.exit(-1); } } ++i; int myNumber = 1; if (args.length > i) { try { myNumber = Integer.parseInt(args[i]); } catch (NumberFormatException e) { System.err.println("no number: " + args[i]); System.exit(-1); } } ++i; Options options = null; if (args.length > i) { if (args[i].length() > 0) { options = Options.getInstance(); Options.parseOptions(options, args[i]); } } ++i; if (filename.endsWith(".db") && numberOfImporters > 0 && myNumber > 0) { WikiDumpHandler.println(System.out, "wiki import from " + filename); WikiDumpHandler.println(System.out, " importers : " + numberOfImporters); WikiDumpHandler.println(System.out, " my import nr: " + myNumber); WikiDumpPreparedSQLiteToScalaris handler = new WikiDumpPreparedSQLiteToScalaris(filename, options, numberOfImporters, myNumber); handler.setUp(); WikiDumpPreparedSQLiteToScalaris.ReportAtShutDown shutdownHook = handler.new ReportAtShutDown(); Runtime.getRuntime().addShutdownHook(shutdownHook); handler.writeToScalaris(); handler.tearDown(); shutdownHook.reportAtEnd(); Runtime.getRuntime().removeShutdownHook(shutdownHook); exitCheckHandler(handler); } else { System.err.println("incorrect command line parameters"); System.exit(-1); } return; } int maxRevisions = -1; if (args.length > i) { try { maxRevisions = Integer.parseInt(args[i]); } catch (NumberFormatException e) { System.err.println("no number: " + args[i]); System.exit(-1); } } ++i; // a timestamp in ISO8601 format Calendar minTime = null; if (args.length > i && !args[i].isEmpty()) { try { minTime = Revision.stringToCalendar(args[i]); } catch (IllegalArgumentException e) { System.err.println("no date in ISO8601: " + args[i]); System.exit(-1); } } ++i; // a timestamp in ISO8601 format Calendar maxTime = null; if (args.length > i && !args[i].isEmpty()) { try { maxTime = Revision.stringToCalendar(args[i]); } catch (IllegalArgumentException e) { System.err.println("no date in ISO8601: " + args[i]); System.exit(-1); } } ++i; Set<String> whitelist = null; String whitelistFile = ""; if (args.length > i && !args[i].isEmpty()) { whitelistFile = args[i]; whitelist = new HashSet<String>(); addFromFile(whitelist, whitelistFile); if (whitelist.isEmpty()) { whitelist = null; } } ++i; if (type == ImportType.PREPARE_DB || type == ImportType.XML_2_DB) { // only prepare the import to Scalaris, i.e. pre-process K/V pairs? String dbFileName = ""; if (args.length > i && !args[i].isEmpty()) { dbFileName = args[i]; } else { System.err.println("need a DB file name for prepare; arguments given: " + Arrays.toString(args)); System.exit(-1); } ++i; WikiDumpHandler.println(System.out, "wiki prepare file " + dbFileName); WikiDumpHandler.println(System.out, " wiki dump : " + filename); WikiDumpHandler.println(System.out, " white list : " + whitelistFile); WikiDumpHandler.println(System.out, " max revisions : " + maxRevisions); WikiDumpHandler.println(System.out, " min time : " + (minTime == null ? "null" : Revision.calendarToString(minTime))); WikiDumpHandler.println(System.out, " max time : " + (maxTime == null ? "null" : Revision.calendarToString(maxTime))); WikiDumpHandler handler = null; switch (type) { case PREPARE_DB: handler = new WikiDumpPrepareSQLiteForScalarisHandler(blacklist, whitelist, maxRevisions, minTime, maxTime, dbFileName); break; case XML_2_DB: handler = new WikiDumpXml2SQLite(blacklist, whitelist, maxRevisions, minTime, maxTime, dbFileName); break; default: throw new RuntimeException(); } runXmlHandler(handler, getFileReader(filename)); } else if (type == ImportType.IMPORT_XML) { WikiDumpHandler.println(System.out, "wiki import from " + filename); WikiDumpHandler.println(System.out, " white list : " + whitelistFile); WikiDumpHandler.println(System.out, " max revisions : " + maxRevisions); WikiDumpHandler.println(System.out, " min time : " + (minTime == null ? "null" : Revision.calendarToString(minTime))); WikiDumpHandler.println(System.out, " max time : " + (maxTime == null ? "null" : Revision.calendarToString(maxTime))); WikiDumpHandler handler = new WikiDumpToScalarisHandler(blacklist, whitelist, maxRevisions, minTime, maxTime); runXmlHandler(handler, getFileReader(filename)); } }
From source file:com.moviejukebox.plugin.trailer.AppleTrailersPlugin.java
private static void selectBestTrailer(Set<String> trailersUrl, Set<String> bestTrailersUrl) { boolean startSearch = false; for (String resolution : RESOLUTION_ARRAY) { if (CONFIG_RESOLUTION.equals(resolution)) { startSearch = true;/*from w w w.j a va 2 s . c om*/ } if (startSearch) { for (String curURL : trailersUrl) { // Search for a specific resolution if (curURL.contains(resolution)) { addTailerRealUrl(bestTrailersUrl, curURL); } } } if (!bestTrailersUrl.isEmpty()) { break; } } }
From source file:com.citytechinc.cq.component.maven.util.ComponentMojoUtil.java
/** * Retrieves a List of all classes which are annotated as Components and are * within the scope of the provided Reflections purview. * //from w w w.ja va2 s . co m * @param classPool * @param reflections * @return A List of classes annotated as Components * @throws ClassNotFoundException * @throws NotFoundException * @throws MalformedURLException */ public static List<CtClass> getAllComponentAnnotations(ClassPool classPool, Reflections reflections, Set<String> excludedClasses) throws ClassNotFoundException, NotFoundException, MalformedURLException { getLog().debug("Scanning for Components"); List<CtClass> classes = new ArrayList<CtClass>(); Set<Class<?>> annotatedClasses = reflections.getTypesAnnotatedWith(Component.class); if (excludedClasses != null && !excludedClasses.isEmpty()) { for (Class<?> c : annotatedClasses) { if (!excludedClasses.contains(c.getName())) { classes.add(classPool.getCtClass(c.getName())); } } } else { for (Class<?> c : annotatedClasses) { classes.add(classPool.getCtClass(c.getName())); } } return classes; }
From source file:fr.inria.oak.paxquery.algebra.optimizer.rules.PushProjections.java
private static Set<ProjectColumn> deriveRequiredInputColumns(BaseLogicalOperator op, Set<ProjectColumn> columnsRequiredAbove) { final Set<ProjectColumn> requiredColumns = Sets.newTreeSet(); if (op instanceof XMLConstruct) { assert columnsRequiredAbove.isEmpty(); XMLConstruct xmlConstruct = (XMLConstruct) op; requiredColumns.addAll(deriveRequiredInputColumns(xmlConstruct.getApply(), xmlConstruct.getNRSMD())); } else if (op instanceof XMLTreeConstruct) { assert columnsRequiredAbove.isEmpty(); XMLTreeConstruct xmlConstruct = (XMLTreeConstruct) op; requiredColumns.addAll(deriveRequiredInputColumns(xmlConstruct.getConstructionTreePattern())); } else if (op instanceof LeftOuterNestedJoinWithAggregation) { // Nested outer join with aggregation LeftOuterNestedJoinWithAggregation lonja = (LeftOuterNestedJoinWithAggregation) op; Set<Integer> cols = PushdownUtility.getPredicateColumns(lonja.getPred()); for (int col : cols) { requiredColumns.add(new ProjectColumn(col)); }/*w ww .j a va 2 s. com*/ if (lonja.getDocumentIDColumn() != -1) { requiredColumns.add(new ProjectColumn(lonja.getDocumentIDColumn())); } for (int col : lonja.getNodeIDColumns()) { requiredColumns.add(new ProjectColumn(col)); } requiredColumns.add(new ProjectColumn(lonja.getAggregationColumn())); final int leftNumberColumns = lonja.getNRSMD().getColNo() - 2; final int nestedField = leftNumberColumns; for (ProjectColumn column : columnsRequiredAbove) { if (column.pos < leftNumberColumns) { requiredColumns.add(column.copy()); } else if (column.pos == nestedField) { for (ProjectColumn nestedColumn : column.nestedColumns) { requiredColumns.add(nestedColumn.copy(leftNumberColumns + nestedColumn.pos)); } } } } else if (op instanceof LeftOuterNestedJoin) { // Nested outer join LeftOuterNestedJoin lonj = (LeftOuterNestedJoin) op; Set<Integer> cols = PushdownUtility.getPredicateColumns(lonj.getPred()); for (int col : cols) { requiredColumns.add(new ProjectColumn(col)); } if (lonj.getDocumentIDColumn() != -1) { requiredColumns.add(new ProjectColumn(lonj.getDocumentIDColumn())); } for (int col : lonj.getNodeIDColumns()) { requiredColumns.add(new ProjectColumn(col)); } final int leftNumberColumns = lonj.getNRSMD().getColNo() - 1; final int nestedField = leftNumberColumns; for (ProjectColumn column : columnsRequiredAbove) { if (column.pos < leftNumberColumns) { requiredColumns.add(column.copy()); } else if (column.pos == nestedField) { for (ProjectColumn nestedColumn : column.nestedColumns) { requiredColumns.add(nestedColumn.copy(leftNumberColumns + nestedColumn.pos)); } } } } else if (op instanceof BaseJoinOperator) { // Any other kind of join Set<Integer> cols = PushdownUtility.getPredicateColumns(((BaseJoinOperator) op).getPred()); for (int col : cols) { requiredColumns.add(new ProjectColumn(col)); } for (ProjectColumn column : columnsRequiredAbove) { requiredColumns.add(column.copy()); } } else if (op instanceof CartesianProduct) { // Cartesian product for (ProjectColumn column : columnsRequiredAbove) { requiredColumns.add(column.copy()); } } else if (op instanceof Aggregation) { // Aggregation Aggregation agg = (Aggregation) op; if (agg.getDocumentIDColumn() != -1) { requiredColumns.add(new ProjectColumn(agg.getDocumentIDColumn())); } requiredColumns.add(new ProjectColumn(agg.getAggregationPath()[0])); int limit; if (agg.getAggregationPath().length == 2) { limit = agg.getNRSMD().getColNo() - 1; } else if (agg.getAggregationPath().length == 1) { limit = 0; } else { limit = agg.getNRSMD().getColNo(); } for (ProjectColumn column : columnsRequiredAbove) { if (column.pos < limit) { requiredColumns.add(column.copy()); } } } else if (op instanceof GroupBy) { // Grouping GroupBy gb = (GroupBy) op; final boolean aggregate = gb instanceof GroupByWithAggregation; for (int pos : gb.getGroupByColumns()) { requiredColumns.add(new ProjectColumn(pos)); } for (int pos : gb.getReduceByColumns()) { requiredColumns.add(new ProjectColumn(pos)); } final int nestedColumnPos = aggregate ? op.getNRSMD().getColNo() - 2 : op.getNRSMD().getColNo() - 1; for (ProjectColumn column : columnsRequiredAbove) { if (column.pos == nestedColumnPos) { for (int i = 0; i < gb.getNestColumns().length; i++) { boolean added = false; for (ProjectColumn nested : column.nestedColumns) { if (nested.pos == i) { requiredColumns.add(nested.copy(gb.getNestColumns()[i])); added = true; break; } } if (!added) { requiredColumns.add(new ProjectColumn(i)); } } } } if (aggregate) { requiredColumns.add(new ProjectColumn(((GroupByWithAggregation) gb).getAggregationColumn())); } } else if (op instanceof Selection) { // Selection for (ProjectColumn column : columnsRequiredAbove) { requiredColumns.add(column.copy()); } Set<Integer> cols = PushdownUtility.getPredicateColumns(((Selection) op).getPred()); for (int col : cols) { requiredColumns.add(new ProjectColumn(col)); } } else if (op instanceof DuplicateElimination) { // DuplicateElimination for (ProjectColumn column : columnsRequiredAbove) { requiredColumns.add(column.copy()); } for (int col : ((DuplicateElimination) op).getColumns()) { requiredColumns.add(new ProjectColumn(col)); } } else if (op instanceof Navigation) { // Navigation Navigation pnop = (Navigation) op; // We add the column that we need for the navigation requiredColumns.add(new ProjectColumn(pnop.pos)); for (ProjectColumn column : columnsRequiredAbove) { if (column.pos < pnop.getChild().getNRSMD().getColNo() && column.pos != pnop.pos) { requiredColumns.add(column.copy()); } } } return requiredColumns; }
From source file:eu.stratosphere.nephele.discovery.DiscoveryService.java
/** * Attempts to retrieve the job managers address in the network through an * IP broadcast. This method should be called by the task manager. * /*from w ww.j a va 2s . co m*/ * @return the socket address of the job manager in the network * @throws DiscoveryException * thrown if the job manager's socket address could not be * discovered */ public static InetSocketAddress getJobManagerAddress() throws DiscoveryException { final int magicNumber = GlobalConfiguration.getInteger(MAGICNUMBER_KEY, DEFAULT_MAGICNUMBER); final int discoveryPort = GlobalConfiguration.getInteger(DISCOVERYPORT_KEY, DEFAULT_DISCOVERYPORT); InetSocketAddress jobManagerAddress = null; DatagramSocket socket = null; try { final Set<InetAddress> targetAddresses = getBroadcastAddresses(); if (targetAddresses.isEmpty()) { throw new DiscoveryException("Could not find any broadcast addresses available to this host"); } socket = new DatagramSocket(); LOG.debug("Setting socket timeout to " + CLIENTSOCKETTIMEOUT); socket.setSoTimeout(CLIENTSOCKETTIMEOUT); final DatagramPacket responsePacket = new DatagramPacket(new byte[RESPONSE_PACKET_SIZE], RESPONSE_PACKET_SIZE); for (int retries = 0; retries < DISCOVERFAILURERETRIES; retries++) { final DatagramPacket lookupRequest = createJobManagerLookupRequestPacket(magicNumber); for (InetAddress broadcast : targetAddresses) { lookupRequest.setAddress(broadcast); lookupRequest.setPort(discoveryPort); LOG.debug("Sending discovery request to " + lookupRequest.getSocketAddress()); socket.send(lookupRequest); } try { socket.receive(responsePacket); } catch (SocketTimeoutException ste) { LOG.debug("Timeout wainting for discovery reply. Retrying..."); continue; } if (!isPacketForUs(responsePacket, magicNumber)) { LOG.debug("Received packet which is not destined to this Nephele setup"); continue; } final int packetTypeID = getPacketTypeID(responsePacket); if (packetTypeID != JM_LOOKUP_REPLY_ID) { LOG.debug("Received unexpected packet type " + packetTypeID + ", discarding... "); continue; } final int ipcPort = extractIpcPort(responsePacket); // Replace port from discovery service with the actual RPC port // of the job manager if (USE_IPV6) { // TODO: No connection possible unless we remove the scope identifier if (responsePacket.getAddress() instanceof Inet6Address) { try { jobManagerAddress = new InetSocketAddress( InetAddress.getByAddress(responsePacket.getAddress().getAddress()), ipcPort); } catch (UnknownHostException e) { throw new DiscoveryException(StringUtils.stringifyException(e)); } } else { throw new DiscoveryException(responsePacket.getAddress() + " is not a valid IPv6 address"); } } else { jobManagerAddress = new InetSocketAddress(responsePacket.getAddress(), ipcPort); } LOG.debug("Discovered job manager at " + jobManagerAddress); break; } } catch (IOException ioe) { throw new DiscoveryException(ioe.toString()); } finally { if (socket != null) { socket.close(); } } if (jobManagerAddress == null) { LOG.debug("Unable to discover Jobmanager via IP broadcast"); throw new DiscoveryException("Unable to discover JobManager via IP broadcast!"); } return jobManagerAddress; }
From source file:org.waarp.gateway.kernel.rest.RestArgument.java
/** * @param hmacSha256/* w w w .j a v a2 s . c om*/ * SHA-256 key to create the signature * @param extraKey * might be null * @param treeMap * @param argPath * @throws HttpInvalidAuthenticationException */ protected static String computeKey(HmacSha256 hmacSha256, String extraKey, TreeMap<String, String> treeMap, String argPath) throws HttpInvalidAuthenticationException { Set<String> keys = treeMap.keySet(); StringBuilder builder = new StringBuilder(argPath); if (!keys.isEmpty() || extraKey != null) { builder.append('?'); } boolean first = true; for (String keylower : keys) { if (first) { first = false; } else { builder.append('&'); } builder.append(keylower).append('=').append(treeMap.get(keylower)); } if (extraKey != null) { if (!keys.isEmpty()) { builder.append("&"); } builder.append(REST_ROOT_FIELD.ARG_X_AUTH_INTERNALKEY.field).append("=").append(extraKey); } try { return hmacSha256.cryptToHex(builder.toString()); } catch (Exception e) { throw new HttpInvalidAuthenticationException(e); } }