List of usage examples for java.util Set toString
public String toString()
From source file:org.marketcetera.marketdata.MarketDataRequestBean.java
@Override public String toString() { StringBuilder output = new StringBuilder(); boolean delimiterNeeded = false; Set<String> symbols = getSymbols(); if (symbols != null && !symbols.isEmpty()) { boolean symbolListDelimiterNeeded = false; output.append(SYMBOLS_KEY).append(KEY_VALUE_SEPARATOR); for (String symbol : symbols) { if (symbolListDelimiterNeeded) { output.append(SYMBOL_DELIMITER); }//from w w w . j a va2 s .com output.append(symbol.trim()); symbolListDelimiterNeeded = true; } delimiterNeeded = true; } Set<String> underlyingSymbols = getUnderlyingSymbols(); if (underlyingSymbols != null && !underlyingSymbols.isEmpty()) { boolean symbolListDelimiterNeeded = false; output.append(UNDERLYINGSYMBOLS_KEY).append(KEY_VALUE_SEPARATOR); for (String underlyingSymbol : underlyingSymbols) { if (symbolListDelimiterNeeded) { output.append(SYMBOL_DELIMITER); } output.append(underlyingSymbol.trim()); symbolListDelimiterNeeded = true; } delimiterNeeded = true; } String provider = getProvider(); if (provider != null) { if (delimiterNeeded) { output.append(KEY_VALUE_DELIMITER); } output.append(PROVIDER_KEY).append(KEY_VALUE_SEPARATOR).append(String.valueOf(provider)); delimiterNeeded = true; } Set<Content> content = getContent(); if (!content.isEmpty()) { if (delimiterNeeded) { output.append(KEY_VALUE_DELIMITER); } output.append(CONTENT_KEY).append(KEY_VALUE_SEPARATOR).append(content.toString().replaceAll("[\\[\\] ]", //$NON-NLS-1$ "")); //$NON-NLS-1$ delimiterNeeded = true; } String exchange = getExchange(); if (exchange != null) { if (delimiterNeeded) { output.append(KEY_VALUE_DELIMITER); } output.append(EXCHANGE_KEY).append(KEY_VALUE_SEPARATOR).append(String.valueOf(exchange)); delimiterNeeded = true; } AssetClass assetClass = getAssetClass(); if (assetClass != null) { if (delimiterNeeded) { output.append(KEY_VALUE_DELIMITER); } output.append(ASSETCLASS_KEY).append(KEY_VALUE_SEPARATOR).append(String.valueOf(assetClass)); delimiterNeeded = true; } Map<String, String> parameters = getParameters(); if (!parameters.isEmpty()) { if (delimiterNeeded) { output.append(KEY_VALUE_DELIMITER); } output.append(PARAMETERS_KEY).append(KEY_VALUE_SEPARATOR); boolean parameterDelimiterNeeded = false; for (Map.Entry<String, String> entry : parameters.entrySet()) { if (parameterDelimiterNeeded) { output.append(ESCAPED_KEY_VALUE_DELIMITER); } output.append(entry.getKey()).append(ESCAPED_KEY_VALUE_SEPARATOR).append(entry.getValue()); parameterDelimiterNeeded = true; } delimiterNeeded = true; } return output.toString(); }
From source file:de.zib.vold.client.VolDClient.java
/** * Query a set of keys.//from ww w. j a v a 2 s. c om * * @param keys The set of keys to query * @return The set of found keys with its values. */ @Override public Map<Key, Set<String>> lookup(Set<Key> keys) { // TODO: lookup has to adjust scope appropriate and eventually merge different requests // guard { log.trace("Lookup: " + keys.toString()); checkState(); if (null == keys) { throw new IllegalArgumentException("null is no valid argument!"); } } // build variable map String uri; { uri = buildURL("", keys); log.debug("URI: " + uri); } // get responseEntity from Server ResponseEntity<Map<Key, Set<String>>> response; { final Object obj = rest.getForEntity(uri, Map.class, new HashMap<String, String>()); if (obj instanceof ResponseEntity<?>) { response = (ResponseEntity<Map<Key, Set<String>>>) obj; } else { throw new RuntimeException("THIS SHOULD NEVER HAPPEN!"); } if (response.getStatusCode() != HttpStatus.OK) { if (response.hasBody()) { throw new RuntimeException( "Something went wrong on server (" + baseURL + ")... Got body: " + response.getBody()); } else { throw new RuntimeException("Something went wrong on remote server (" + baseURL + ")..."); } } } // process and return results { if (response.hasBody()) { return response.getBody(); } else { return null; } } }
From source file:io.cloudslang.lang.compiler.modeller.transformers.SeqStepsTransformer.java
private void validateNotEmptyValues(Map<String, String> tMap, Set<String> mandatoryKeySet, Set<String> optionalKeySet) { Validate.notNull(tMap);/* ww w. j a va2s. co m*/ Validate.notNull(mandatoryKeySet); Validate.notNull(optionalKeySet); Set<String> missingKeys = new HashSet<>(); Set<String> emptyValuesKeys = new HashSet<>(); for (String reqKey : mandatoryKeySet) { String reqValue = tMap.get(reqKey); if (reqValue == null) { missingKeys.add(reqKey); } else if (reqValue.equals("")) { emptyValuesKeys.add(reqKey); } } if (isNotEmpty(missingKeys)) { throw new RuntimeException(SEQ_OPERATION_HAS_MISSING_TAGS + missingKeys.toString()); } if (isNotEmpty(emptyValuesKeys)) { throw new RuntimeException(SEQ_OPERATION_HAS_EMPTY_TAGS + emptyValuesKeys.toString()); } }
From source file:org.deri.iris.queryrewriting.QueryRewritingTest.java
public void testFORewriting() throws Exception { // Configuration. final DecompositionStrategy decomposition = DecompositionStrategy.DECOMPOSE; final RewritingLanguage rewLang = RewritingLanguage.UCQ; final SubCheckStrategy subchkStrategy = SubCheckStrategy.INTRADEC; final NCCheck ncCheckStrategy = NCCheck.NONE; LOGGER.info("Decomposition: " + decomposition.name()); LOGGER.info("Rewriting Language: " + rewLang.name()); LOGGER.info("Subsumption Check Strategy: " + subchkStrategy.name()); LOGGER.info("Negative Constraints Check Strategy " + ncCheckStrategy.name()); // Read the test-cases file final File testSuiteFile = FileUtils.getFile(_WORKING_DIR, FilenameUtils.separatorsToSystem(_DEFAULT_INPUT_PATH), "test-cases.txt"); final List<String> tests = IOUtils.readLines(new FileReader(testSuiteFile)); final String creationDate = dateFormat.format(new Date()); // Summary reporting final String summaryPrefix = StringUtils.join(creationDate, "-", decomposition.name(), "-", rewLang.name(), "-", subchkStrategy.name(), "-", ncCheckStrategy.name()); final File sizeSummaryFile = FileUtils.getFile(_WORKING_DIR, FilenameUtils.separatorsToSystem(_DEFAULT_OUTPUT_PATH), FilenameUtils.separatorsToSystem(_DEFAULT_SUMMARY_DIR), StringUtils.join(summaryPrefix, "-", "size-summary.csv")); final CSVWriter sizeSummaryWriter = new CSVWriter(new FileWriter(sizeSummaryFile), ','); final File timeSummaryFile = FileUtils.getFile(_WORKING_DIR, FilenameUtils.separatorsToSystem(_DEFAULT_OUTPUT_PATH), FilenameUtils.separatorsToSystem(_DEFAULT_SUMMARY_DIR), StringUtils.join(summaryPrefix, "-", "time-summary.csv")); final CSVWriter timeSummaryWriter = new CSVWriter(new FileWriter(timeSummaryFile), ','); final File cacheSummaryFile = FileUtils.getFile(_WORKING_DIR, FilenameUtils.separatorsToSystem(_DEFAULT_OUTPUT_PATH), FilenameUtils.separatorsToSystem(_DEFAULT_SUMMARY_DIR), StringUtils.join(summaryPrefix, "-", "cache-summary.csv")); final CSVWriter cacheSummaryWriter = new CSVWriter(new FileWriter(cacheSummaryFile), ','); final File memorySummaryFile = FileUtils.getFile(_WORKING_DIR, FilenameUtils.separatorsToSystem(_DEFAULT_OUTPUT_PATH), FilenameUtils.separatorsToSystem(_DEFAULT_SUMMARY_DIR), StringUtils.join(summaryPrefix, "-", "memory-summary.csv")); final CSVWriter memorySummaryWriter = new CSVWriter(new FileWriter(memorySummaryFile), ','); sizeSummaryWriter.writeNext(ReportingUtils.getSummaryRewritingSizeReportHeader()); timeSummaryWriter.writeNext(ReportingUtils.getSummaryRewritingTimeReportHeader()); cacheSummaryWriter.writeNext(ReportingUtils.getSummaryCachingReportHeader()); memorySummaryWriter.writeNext(ReportingUtils.getSummaryMemoryReportHeader()); // Compute the rewriting for each test ontology. for (final String testName : tests) { // Read the next test case on the list final File testFile = FileUtils.getFile(_WORKING_DIR, FilenameUtils.separatorsToSystem(_DEFAULT_INPUT_PATH), testName + ".dtg"); // Create the Directory where to store the test results final File outTestDir = FileUtils.getFile(_WORKING_DIR, FilenameUtils.separatorsToSystem(_DEFAULT_OUTPUT_PATH), testName); if (!outTestDir.exists()) { if (outTestDir.mkdir()) { LOGGER.info("Created output directory: " + testName); } else { LOGGER.fatal("Error creating output directory"); }//from ww w.ja v a2 s . c om } LOGGER.info("Processing file: " + testName); // Read the content of the current program final FileReader fr = new FileReader(testFile); final StringBuilder sb = new StringBuilder(); int ch = -1; while ((ch = fr.read()) >= 0) { sb.append((char) ch); } final String program = sb.toString(); fr.close(); // Parse the program final Parser parser = new Parser(); parser.parse(program); // Get the rules final List<IRule> rules = parser.getRules(); // Get the queries final List<IQuery> queryHeads = parser.getQueries(); // Get the TGDs from the set of rules final List<IRule> tgds = RewritingUtils.getTGDs(rules, queryHeads); // Convert the query bodies in rules final List<IRule> bodies = new LinkedList<IRule>(rules); bodies.removeAll(tgds); final List<IRule> queries = RewritingUtils.getQueries(bodies, queryHeads); // get the constraints from the set of rules final Set<IRule> constraints = RewritingUtils.getConstraints(rules, queryHeads); final Set<Expressivity> exprs = RewritingUtils.getExpressivity(tgds); LOGGER.info("Expressivity: " + exprs.toString()); if (!exprs.contains(Expressivity.LINEAR) && !exprs.contains(Expressivity.STICKY)) throw new EvaluationException("Only Linear and Sticky TGDs are supported for rewriting."); // compute the dependency graph LOGGER.debug("Computing position dependencies."); // long depGraphMem = MonitoringUtils.getHeapUsage(); long posDepTime = System.currentTimeMillis(); Map<Pair<IPosition, IPosition>, Set<List<IRule>>> deps = DepGraphUtils .computePositionDependencyGraph(tgds); posDepTime = System.currentTimeMillis() - posDepTime; // Setup caching CacheManager.setupCaching(); // if linear TGDs, compute the atom coverage graph. LOGGER.debug("Computing atom coverage graph."); long atomCoverGraphTime = System.currentTimeMillis(); if (exprs.contains(Expressivity.LINEAR)) { deps = DepGraphUtils.computeAtomCoverageGraph(deps); } atomCoverGraphTime = System.currentTimeMillis() - atomCoverGraphTime; // depGraphMem = MonitoringUtils.getHeapUsage() - depGraphMem; // rewriting constraints // long ncRewMem = MonitoringUtils.getHeapUsage(); final ParallelRewriter cnsRewriter = new ParallelRewriter(DecompositionStrategy.MONOLITIC, RewritingLanguage.UCQ, SubCheckStrategy.NONE, NCCheck.NONE); long ncRewTime = System.currentTimeMillis(); final Set<IRule> rewrittenConstraints = Sets.newHashSet(); if (!ncCheckStrategy.equals(NCCheck.NONE)) { for (final IRule c : constraints) { rewrittenConstraints .addAll(cnsRewriter.getRewriting(c, tgds, new HashSet<IRule>(), deps, exprs)); } } ncRewTime = System.currentTimeMillis() - ncRewTime; // ncRewMem = ncRewMem - MonitoringUtils.getHeapUsage(); LOGGER.debug("Finished rewriting constraints."); // dump the rewritten constraints: File outFile = FileUtils.getFile(outTestDir, testName.concat("_cns.dtg")); final FileWriter cnsFW = new FileWriter(outFile); IOUtils.writeLines(rewrittenConstraints, IOUtils.LINE_SEPARATOR, cnsFW); cnsFW.close(); // Compute the Rewriting final ParallelRewriter rewriter = new ParallelRewriter(decomposition, rewLang, subchkStrategy, ncCheckStrategy); for (final IRule q : queries) { // Setup caching CacheManager.setupCaching(); final String queryPredicate = q.getHead().iterator().next().getAtom().getPredicate() .getPredicateSymbol(); // Setup reporting final Reporter rep = Reporter.getInstance(true); Reporter.setupReporting(); Reporter.setQuery(queryPredicate); Reporter.setOntology(testName); rep.setValue(RewMetric.DEPGRAPH_TIME, posDepTime); LOGGER.info("Processing query: ".concat(q.toString())); // final long rewMem = MonitoringUtils.getHeapUsage(); final long overallTime = System.currentTimeMillis(); final Set<IRule> rewriting = rewriter.getRewriting(q, tgds, rewrittenConstraints, deps, exprs); rep.setValue(RewMetric.OVERALL_TIME, System.currentTimeMillis() - overallTime); // rep.setValue(RewMetric.REW_MEM, MonitoringUtils.getHeapUsage() - rewMem); // rep.setValue(RewMetric.DEPGRAPH_MEM, depGraphMem); rep.setValue(RewMetric.REW_SIZE, (long) rewriting.size()); rep.setValue(RewMetric.JOIN_COUNT, RewritingUtils.joinCount(rewriting)); rep.setValue(RewMetric.ATOM_COUNT, RewritingUtils.atomsCount(rewriting)); rep.setValue(RewMetric.REW_CNS_COUNT, (long) rewrittenConstraints.size()); rep.setValue(RewMetric.REW_CNS_TIME, ncRewTime); // rep.setValue(RewMetric.REW_CNS_MEM, ncRewMem); // Other metrics rep.setValue(RewMetric.OVERHEAD_TIME, rep.getValue(RewMetric.OVERALL_TIME) - rep.getValue(RewMetric.REW_TIME)); // Caching size metrics rep.setValue(RewMetric.MAX_COVERING_CACHE_SIZE, CoveringCache.getCache().size(CacheType.COVERING)); rep.setValue(RewMetric.MAX_NON_COVERING_CACHE_SIZE, CoveringCache.getCache().size(CacheType.NOT_COVERING)); rep.setValue(RewMetric.MAX_MAPSTO_CACHE_SIZE, MapsToCache.size(MapsToCache.CacheType.MAPSTO)); rep.setValue(RewMetric.MAX_NOT_MAPSTO_CACHE_SIZE, MapsToCache.size(MapsToCache.CacheType.NOT_MAPSTO)); rep.setValue(RewMetric.MAX_FACTOR_CACHE_SIZE, (long) 0); rep.setValue(RewMetric.MAX_NON_FACTOR_CACHE_SIZE, (long) 0); rep.setValue(RewMetric.MAX_RENAMING_CACHE_SIZE, RenamingCache.size()); rep.setValue(RewMetric.MAX_MGU_CACHE_SIZE, MGUCache.size()); // Create a file to store the rewriting results. outFile = FileUtils.getFile(outTestDir, queryPredicate.concat("_rew.dtg")); final FileWriter rewFW = new FileWriter(outFile); rewFW.write("/// Query: " + q + "///\n"); rewFW.write("/// Ontology: " + testName + "///"); rewFW.write("/// Created on: " + creationDate + " ///\n"); rewFW.write("/// Rules in the program: " + rules.size() + " ///\n"); rewFW.write("/// TGDs in the program: " + tgds.size() + " ///\n"); rewFW.write("/// Constraints in the program: " + constraints.size() + " ///\n"); rewFW.write("/// Theory expressivity: " + exprs.toString() + " ///\n"); rewFW.write("/// Decomposition: " + decomposition.name() + " ///\n"); rewFW.write("/// Subsumption Check Strategy: " + subchkStrategy.name() + " ///\n"); rewFW.write("/// Negative Constraints Check Strategy: " + ncCheckStrategy.name() + " ///\n"); rewFW.write(IOUtils.LINE_SEPARATOR); LOGGER.info("Writing the output at: " + outFile.getAbsolutePath()); // dump metrics for individual queries. rewFW.write(rep.getReport()); rewFW.write(IOUtils.LINE_SEPARATOR); rewFW.write(IOUtils.LINE_SEPARATOR); rewFW.write("/// Rewritten Program ///\n"); final Set<ILiteral> newHeads = new HashSet<ILiteral>(); for (final IRule qr : rewriting) { newHeads.add(qr.getHead().iterator().next()); rewFW.write(qr + "\n"); } rewFW.write("\n"); for (final ILiteral h : newHeads) { rewFW.write("?- " + h + ".\n"); } rewFW.write("\n"); rewFW.flush(); rewFW.close(); // dump summary metrics. sizeSummaryWriter.writeNext(rep.getSummarySizeMetrics()); timeSummaryWriter.writeNext(rep.getSummaryTimeMetrics()); cacheSummaryWriter.writeNext(rep.getSummaryCacheMetrics()); memorySummaryWriter.writeNext(rep.getSummaryMemoryMetrics()); sizeSummaryWriter.flush(); timeSummaryWriter.flush(); cacheSummaryWriter.flush(); memorySummaryWriter.flush(); } } sizeSummaryWriter.close(); timeSummaryWriter.close(); cacheSummaryWriter.close(); memorySummaryWriter.close(); }
From source file:org.xwalk.runtime.extension.api.contacts.ContactSaver.java
public JSONObject save(String saveString) { mOps = new ArrayList<ContentProviderOperation>(); try {/*from ww w. j a v a 2 s . c om*/ mContact = new JSONObject(saveString); } catch (JSONException e) { Log.e(TAG, "Failed to parse json data: " + e.toString()); return new JSONObject(); } mJson = new ContactJson(mContact); Builder builder = null; mId = mJson.getString("id"); mIsUpdate = mUtils.hasID(mId); Set<String> oldRawIds = null; if (!mIsUpdate) { // Create a null record for inserting later oldRawIds = mUtils.getCurrentRawIds(); mId = null; builder = ContentProviderOperation.newInsert(ContactsContract.RawContacts.CONTENT_URI); builder.withValue(ContactsContract.RawContacts.ACCOUNT_TYPE, null); builder.withValue(ContactsContract.RawContacts.ACCOUNT_NAME, null); mOps.add(builder.build()); } // W3C Android //------------------------------------------------- // displayName StructuredName.display_name // honorificPrefixes StructuredName.prefix // givenNames StructuredName.given_name // additionalNames StructuredName.middle_name // familyNames StructuredName.family_name // honorificSuffixes StructuredName.suffix // nicknames Nickname.name if (mContact.has("name")) { final JSONObject name = mJson.getObject("name"); final ContactJson nameJson = new ContactJson(name); builder = newBuilder(StructuredName.CONTENT_ITEM_TYPE); builder.withValue(StructuredName.DISPLAY_NAME, nameJson.getString("displayName")); //FIXME(hdq): should read all names builder.withValue(StructuredName.FAMILY_NAME, nameJson.getFirstValue("familyNames")); builder.withValue(StructuredName.GIVEN_NAME, nameJson.getFirstValue("givenNames")); builder.withValue(StructuredName.MIDDLE_NAME, nameJson.getFirstValue("additionalNames")); builder.withValue(StructuredName.PREFIX, nameJson.getFirstValue("honorificPrefixes")); builder.withValue(StructuredName.SUFFIX, nameJson.getFirstValue("honorificSuffixes")); mOps.add(builder.build()); // Nickname belongs to another mimetype, so we need another builder for it. if (name.has("nicknames")) { builder = newBuilder(Nickname.CONTENT_ITEM_TYPE); builder.withValue(Nickname.NAME, nameJson.getFirstValue("nicknames")); mOps.add(builder.build()); } } if (mContact.has("categories")) { List<String> groupIds = new ArrayList<String>(); for (String groupTitle : mJson.getStringArray("categories")) { groupIds.add(mUtils.getEnsuredGroupId(groupTitle)); } buildByArray(GroupMembership.CONTENT_ITEM_TYPE, GroupMembership.GROUP_ROW_ID, groupIds); } if (mContact.has("gender")) { final String gender = mJson.getString("gender"); if (Arrays.asList("male", "female", "other", "none", "unknown").contains(gender)) { builder = newBuilder(ContactConstants.CUSTOM_MIMETYPE_GENDER); builder.withValue(Data.DATA1, gender); mOps.add(builder.build()); } } buildByDate("lastUpdated", ContactConstants.CUSTOM_MIMETYPE_LASTUPDATED, Data.DATA1); buildByEvent("birthday", Event.TYPE_BIRTHDAY); buildByEvent("anniversary", Event.TYPE_ANNIVERSARY); buildByContactMapList(); // Perform the operation batch try { mUtils.mResolver.applyBatch(ContactsContract.AUTHORITY, mOps); } catch (Exception e) { if (e instanceof RemoteException || e instanceof OperationApplicationException || e instanceof SecurityException) { Log.e(TAG, "Failed to apply batch: " + e.toString()); return new JSONObject(); } else { throw new RuntimeException(e); } } // If it is a new contact, need to get and return its auto-generated id. if (!mIsUpdate) { Set<String> newRawIds = mUtils.getCurrentRawIds(); if (newRawIds == null) return new JSONObject(); newRawIds.removeAll(oldRawIds); if (newRawIds.size() != 1) { Log.e(TAG, "Something wrong after batch applied, " + "new raw ids are: " + newRawIds.toString()); return mContact; } String id = mUtils.getId(newRawIds.iterator().next()); PutToContact(id); } return mContact; }
From source file:at.rocworks.oa4j.logger.dbs.NoSQLMongoDB.java
@Override public boolean dpGetPeriod(Date t1, Date t2, Dp dp, Set<String> configs, DpGetPeriodResult result) { // db.events.find({tag: "System1:Test_1_1.Value", ts: {$gt: ISODate("2016-07-28T09:00:00.000Z"), $lt: ISODate("2016-07-28T10:00:00.000Z")}}, {_id:0, tag:1, ts:1}); JDebug.out.log(Level.INFO, "dpGetPeriod {0}-{1} dp={2} configs={3}", new Object[] { t1, t2, dp, configs.toString() }); final SimpleDateFormat fmt = new SimpleDateFormat(TimeVar.FMT_DATE_JS_MS); fmt.setTimeZone(TimeZone.getTimeZone("UTC")); // columns/*from w w w .j a va2 s . c om*/ final ArrayList<Dp> dps = createDpConfigAttrList(dp, configs); if (dps.isEmpty()) { JDebug.out.warning("dpGetPeriod without any valid config."); return false; } Document columns = new Document(); columns.append("_id", 0); columns.append("ts", 1); dps.forEach((Dp x) -> { String c = attrMap.get(x.getAttribute()); if (c != null) columns.append(c, 1); }); // filter Document query = new Document(); query.append("tag", getTagOfDp(dp)); query.append("ts", new Document("$gte", t1).append("$lte", t2)); // query FindIterable<Document> find = evcoll.find(query); find.projection(columns); find.forEach((Block<Document>) document -> { // { "ts" : { "$date" : 1469696660635 }, "value" : { "number" : 3.0 }, "status" : { "$numberLong" : "-9007199254738370303" }, "user" : 0 } //JDebug.out.info(document.toJson()); Date ts = document.getDate("ts"); Object value; for (int i = 0; i < dps.size(); i++) { try { final Dp attr = dps.get(i); switch (attr.getAttribute()) { case Value: // value_number value = document.get("value"); if (value instanceof Document) { Document dval = (Document) value; dval.keySet().forEach(type -> result.addValue(attr, ts, dval.get(type))); } break; case Status: value = document.get("status"); result.addVariable(attr, ts, new Bit32Var(value)); break; case Status64: value = document.get("status"); result.addVariable(attr, ts, new Bit64Var(value)); break; case Manager: value = document.get("manager"); result.addVariable(attr, ts, Variable.newVariable(value)); break; case User: value = document.get("user"); result.addVariable(attr, ts, Variable.newVariable(value)); break; case Stime: value = ts; result.addVariable(attr, ts, Variable.newVariable(value)); break; default: JDebug.out.log(Level.SEVERE, "unhandeled config {0}", attr.getAttribute()); } } catch (Exception ex) { JDebug.StackTrace(Level.SEVERE, ex); } } }); return true; }
From source file:com.google.enterprise.connector.sharepoint.spiimpl.SharepointAuthenticationManager.java
/** * This method makes a call to {@link LdapService} to get all AD groups and SP * groups of which he/she is a direct or indirect member of and returns * {@link AuthenticationResponse}.//from w w w . j ava 2 s. c o m * * @param searchUser to fetch all SharePoint groups and Directory groups to * which he/she is a direct or indirect member of. * @return {@link AuthenticationResponse} * @throws SharepointException */ @VisibleForTesting AuthenticationResponse getAllGroupsForTheUser(String searchUser) throws SharepointException { LOGGER.info("Attempting group resolution for user : " + searchUser); Set<Principal> allSearchUserGroups = ldapService.getAllGroupsForSearchUser(sharepointClientContext, searchUser); if (null != allSearchUserGroups && allSearchUserGroups.size() > 0) { // Should return true if there is at least one group returned by // LDAP service. LOGGER.log(Level.INFO, "Group resolution returned following groups " + "for the search user: {0}\n{1}", new Object[] { searchUser, allSearchUserGroups.toString() }); return new AuthenticationResponse(true, "", allSearchUserGroups); } else { LOGGER.info("Group resolution returned no groups for the search user: " + searchUser); // Should return true with null groups. return new AuthenticationResponse(true, "", null); } }
From source file:de.tuberlin.uebb.jdae.llmsl.ExecutableDAE.java
public int lastBlock(final Collection<GlobalVariable> vars) { final Set<GlobalVariable> left = Sets.newTreeSet(vars); left.removeAll(states);/* ww w . ja v a2s . c om*/ int i = 0; while (!left.isEmpty() && i < blocks.length) { for (GlobalVariable gv : blocks[i].variables()) left.remove(gv); i++; } if (!left.isEmpty()) throw new RuntimeException("The following variables are not computed: " + left.toString()); return i; }
From source file:io.seldon.recommendation.RecentItemsRecommender.java
@Override public ItemRecommendationResultSet recommend(String client, Long user, Set<Integer> dimensions, int maxRecsCount, RecommendationContext ctxt, List<Long> recentItemInteractions) { HashMap<Long, Double> recommendations = new HashMap<>(); Set<Long> exclusions; if (ctxt.getMode() == RecommendationContext.MODE.INCLUSION) { logger.warn("Can't run RecentItemsRecommender in inclusion context mode"); return new ItemRecommendationResultSet(name); } else {/*from w ww . j av a2s . c om*/ exclusions = ctxt.getContextItems(); } if (logger.isDebugEnabled()) logger.debug("Running with dimension " + dimensions.toString()); Collection<Long> recList = itemStorage .retrieveRecentlyAddedItems(client, maxRecsCount + exclusions.size(), dimensions).getItems(); if (recList.size() > 0) { double scoreIncr = 1.0 / (double) recList.size(); int count = 0; for (Long item : recList) { if (count >= maxRecsCount) break; else if (!exclusions.contains(item)) recommendations.put(item, 1.0 - (count++ * scoreIncr)); } List<ItemRecommendationResultSet.ItemRecommendationResult> results = new ArrayList<>(); for (Map.Entry<Long, Double> entry : recommendations.entrySet()) { results.add(new ItemRecommendationResultSet.ItemRecommendationResult(entry.getKey(), entry.getValue().floatValue())); } if (logger.isDebugEnabled()) logger.debug("Recent items algorithm returned " + recommendations.size() + " items"); return new ItemRecommendationResultSet(results, name); } else { logger.warn("No items returned for recent items of dimension " + StringUtils.join(dimensions, ",") + " for " + client); } return new ItemRecommendationResultSet(Collections.EMPTY_LIST, name); }
From source file:org.apache.hadoop.mapreduce.lib.input.TestFileInputFormat.java
private void verifySplits(List<String> expected, List<InputSplit> splits) { Iterable<String> pathsFromSplits = Iterables.transform(splits, new Function<InputSplit, String>() { @Override//from ww w . j a va2 s . c o m public String apply(@Nullable InputSplit input) { return ((FileSplit) input).getPath().toString(); } }); Set<String> expectedSet = Sets.newHashSet(expected); for (String splitPathString : pathsFromSplits) { if (!expectedSet.remove(splitPathString)) { Assert.fail("Found extra split: " + splitPathString); } } Assert.assertEquals("Not all expectedPaths matched: " + expectedSet.toString(), 0, expectedSet.size()); }