List of usage examples for java.util List toString
public String toString()
From source file:com.norconex.committer.cloudsearch.CloudSearchCommitter.java
private void uploadBatchToCloudSearch(List<JSONObject> documentBatch) { // Convert the JSON list to String and read it as a stream from memory // (for increased performance), for it to be usable by the AWS // CloudSearch UploadRequest. If memory becomes a concern, consider // streaming to file. // ArrayList.toString() joins the elements in a JSON-compliant way. byte[] bytes; try {//from w w w .j a v a 2 s . co m bytes = documentBatch.toString().getBytes(CharEncoding.UTF_8); } catch (UnsupportedEncodingException e) { throw new CommitterException("UTF-8 not supported by OS.", e); } try (ByteArrayInputStream is = new ByteArrayInputStream(bytes)) { UploadDocumentsRequest uploadRequest = new UploadDocumentsRequest(); uploadRequest.setContentType("application/json"); uploadRequest.setDocuments(is); uploadRequest.setContentLength((long) bytes.length); ensureAWSClient(); UploadDocumentsResult result = awsClient.uploadDocuments(uploadRequest); LOG.info(result.getAdds() + " Add requests and " + result.getDeletes() + " Delete requests " + "sent to the AWS CloudSearch domain."); } catch (IOException e) { throw new CommitterException("Could not upload request to CloudSearch.", e); } }
From source file:io.seldon.mf.RecentMfRecommender.java
@Override public ItemRecommendationResultSet recommend(String client, Long user, Set<Integer> dimensions, int maxRecsCount, RecommendationContext ctxt, List<Long> recentItemInteractions) { RecommendationContext.OptionsHolder opts = ctxt.getOptsHolder(); int numRecentActionsToUse = opts.getIntegerOption(RECENT_ACTIONS_PROPERTY_NAME); MfFeaturesManager.ClientMfFeaturesStore clientStore = this.store.getClientStore(client, ctxt.getOptsHolder());// w w w.j a va2s . co m if (clientStore == null) { logger.debug("Couldn't find a matrix factorization store for this client"); return new ItemRecommendationResultSet(Collections.<ItemRecommendationResult>emptyList(), name); } List<Long> itemsToScore; if (recentItemInteractions.size() > numRecentActionsToUse) { if (logger.isDebugEnabled()) logger.debug("Limiting recent items for score to size " + numRecentActionsToUse + " from present " + recentItemInteractions.size()); itemsToScore = recentItemInteractions.subList(0, numRecentActionsToUse); } else itemsToScore = new ArrayList<>(recentItemInteractions); if (logger.isDebugEnabled()) logger.debug("Recent items of size " + itemsToScore.size() + " -> " + itemsToScore.toString()); double[] userVector; if (clientStore.productFeaturesInverse != null) { //fold in user data from their recent history of item interactions logger.debug("Creating user vector by folding in features"); userVector = foldInUser(itemsToScore, clientStore.productFeaturesInverse, clientStore.idMap); } else { logger.debug("Creating user vector by averaging features"); userVector = createAvgProductVector(itemsToScore, clientStore.productFeatures); } Set<ItemRecommendationResult> recs = new HashSet<>(); if (ctxt.getMode() == RecommendationContext.MODE.INCLUSION) { // special case for INCLUSION as it's easier on the cpu. for (Long item : ctxt.getContextItems()) { if (!recentItemInteractions.contains(item)) { float[] features = clientStore.productFeatures.get(item); if (features != null) recs.add(new ItemRecommendationResult(item, dot(features, userVector))); } } } else { for (Map.Entry<Long, float[]> productFeatures : clientStore.productFeatures.entrySet()) { Long item = productFeatures.getKey().longValue(); if (!recentItemInteractions.contains(item)) { recs.add(new ItemRecommendationResult(item, dot(productFeatures.getValue(), userVector))); } } } List<ItemRecommendationResult> recsList = Ordering.natural().greatestOf(recs, maxRecsCount); if (logger.isDebugEnabled()) logger.debug("Created " + recsList.size() + " recs"); return new ItemRecommendationResultSet(recsList, name); }
From source file:fr.recolnat.database.ExportsDatabase.java
public List<String[]> listUserExports(String user) { if (log.isDebugEnabled()) { log.debug("Entering listUserExports(user=" + user + ")"); }//from w w w .j av a 2 s . com if (user == null) { throw new NullPointerException("User is null"); } while (this.database.isClosed()) { log.warn("Exports database is closed, waiting 500ms"); try { Thread.sleep(500); } catch (InterruptedException ex) { log.info("Received SIGINT."); return null; } } if (log.isDebugEnabled()) { log.debug("Opening user exports"); } Map<String, String> userFiles = this.database.treeMap(user, Serializer.STRING, Serializer.STRING) .createOrOpen(); if (log.isDebugEnabled()) { log.debug("Building exports list"); } List<String[]> ret = new LinkedList<>(); Iterator<String> itFileNames = userFiles.keySet().iterator(); if (log.isDebugEnabled()) { log.debug("Got key set iterator"); } while (itFileNames.hasNext()) { String fileName = itFileNames.next(); String fileUrl = (String) userFiles.get(fileName); if (log.isDebugEnabled()) { log.debug("Adding " + fileName + " " + fileUrl); } ret.add(new String[] { fileName, fileUrl }); } if (log.isDebugEnabled()) { log.debug("Returning " + ret.toString()); } // This commit is called because createOrOpen is used earlier. this.database.commit(); return ret; }
From source file:org.constretto.spring.EnvironmentAnnotationConfigurer.java
@SuppressWarnings("unchecked") private void removeNonAnnotatedBeansFromAutowireForType(Class lookupClass, ConfigurableListableBeanFactory configurableListableBeanFactory) throws ClassNotFoundException { List<String> beanNames = new ArrayList<String>(); Class[] interfaces = lookupClass.getInterfaces(); for (Class anInterface : interfaces) { beanNames.addAll(asList(BeanFactoryUtils .beanNamesForTypeIncludingAncestors(configurableListableBeanFactory, anInterface))); }/*w ww. j a v a2 s . c o m*/ List<BeanDefinition> potentialMatches = new ArrayList<BeanDefinition>(); for (String beanName : beanNames) { BeanDefinition beanDefinition = configurableListableBeanFactory.getBeanDefinition(beanName); Class beanClass = Class.forName(beanDefinition.getBeanClassName()); beanDefinition.setAttribute(INCLUDE_IN_COLLECTIONS, beanClass.getInterfaces()); Environment environmentAnnotation = findEnvironmentAnnotation(beanClass); if (environmentAnnotation == null) { beanDefinition.setAutowireCandidate(false); } else { potentialMatches.add(beanDefinition); } } if (potentialMatches.size() == 1) { potentialMatches.get(0).setAutowireCandidate(true); } else { List<BeanDefinition> highestPriorityBeans = new ArrayList<BeanDefinition>(); for (BeanDefinition potentialMatch : potentialMatches) { if (potentialMatch.isAutowireCandidate()) { potentialMatch.setAutowireCandidate(false); highestPriorityBeans = prioritizeBeans(potentialMatch, highestPriorityBeans); } } if (highestPriorityBeans.size() == 1) { highestPriorityBeans.get(0).setAutowireCandidate(true); } else { List<String> equalPriorityBeans = new ArrayList<String>(); for (BeanDefinition highestPriorityBean : highestPriorityBeans) { equalPriorityBeans.add(highestPriorityBean.getBeanClassName()); } throw new ConstrettoException("More than one bean with the class or interface + [" + lookupClass.getSimpleName() + "] registered with same tag. Could not resolve priority. To fix this, remove one of the following beans " + equalPriorityBeans.toString()); } } }
From source file:eu.europa.esig.dss.validation.policy.EtsiValidationPolicy.java
@Override public Constraint getSigningCertificateKeyUsageConstraint(final String context) { final String level = getValue("/ConstraintsParameters/%s/SigningCertificate/KeyUsage/@Level", context); if (StringUtils.isNotBlank(level)) { final Constraint constraint = new Constraint(level); final List<XmlDom> keyUsages = getElements( "/ConstraintsParameters/%s/SigningCertificate/KeyUsage/Identifier", context); final List<String> identifierList = XmlDom.convertToStringList(keyUsages); constraint.setExpectedValue(identifierList.toString()); constraint.setIdentifiers(identifierList); return constraint; }/*from www.ja v a2s . c om*/ return null; }
From source file:com.oneops.crawler.CMSCrawler.java
public Map<String, Organization> populateOrganizations(Connection conn) { log.info("Populating organizations cache"); DSLContext create = DSL.using(conn, SQLDialect.POSTGRES); Map<String, Organization> organizationsMap = new HashMap<>(); Result<Record4<Long, String, Integer, String>> OrganizationsWithAttributesRecords = create .select(CM_CI.CI_ID, CM_CI.CI_NAME, CM_CI_ATTRIBUTES.ATTRIBUTE_ID, CM_CI_ATTRIBUTES.DF_ATTRIBUTE_VALUE) .from(CM_CI).join(CM_CI_ATTRIBUTES).on(CM_CI.CI_ID.eq(CM_CI_ATTRIBUTES.CI_ID)) .where(CM_CI.CLASS_ID.in(create.select(MD_CLASSES.CLASS_ID).from(MD_CLASSES) .where(MD_CLASSES.CLASS_NAME.eq("account.Organization")))) .fetch();/*w w w . j ava 2 s.co m*/ List<Long> OrganizationIds = OrganizationsWithAttributesRecords.getValues(CM_CI.CI_ID); log.debug("OrganizationIds: " + OrganizationIds.toString()); Set<Long> setOfOrganizationIds = new HashSet<Long>(OrganizationIds); log.debug("setOfOrganizationIds <" + setOfOrganizationIds.size() + "> " + setOfOrganizationIds); List<String> OrganizationNames = OrganizationsWithAttributesRecords.getValues(CM_CI.CI_NAME); log.debug("OrganizationNames: " + OrganizationNames.toString()); Set<String> setOfOrganizationNames = new HashSet<String>(OrganizationNames); log.debug("setOfOrganizationNames: <" + setOfOrganizationNames.size() + "> " + setOfOrganizationNames); int description_AttribID = this.baseOrganizationMDClassAttributes_NameIdMapCache.get("description"); int full_name_AttribID = this.baseOrganizationMDClassAttributes_NameIdMapCache.get("full_name"); int owner_AttribID = this.baseOrganizationMDClassAttributes_NameIdMapCache.get("owner"); int tags_AttribID = this.baseOrganizationMDClassAttributes_NameIdMapCache.get("tags"); for (Record4<Long, String, Integer, String> OrganizationsWithAttributesRecord : OrganizationsWithAttributesRecords) { long organizationId = OrganizationsWithAttributesRecord.getValue(CM_CI.CI_ID); String organizationName = OrganizationsWithAttributesRecord.getValue(CM_CI.CI_NAME); Organization organization = organizationsMap.get(organizationName); log.debug("organizationId: " + organizationId); if (organization == null) { organization = new Organization(); organizationsMap.put(organizationName, organization); } int attributeID = OrganizationsWithAttributesRecord.getValue(CM_CI_ATTRIBUTES.ATTRIBUTE_ID); if (attributeID == description_AttribID) { organization.setDescription( OrganizationsWithAttributesRecord.getValue(CM_CI_ATTRIBUTES.DF_ATTRIBUTE_VALUE)); continue; } else if (attributeID == full_name_AttribID) { organization.setFull_name( OrganizationsWithAttributesRecord.getValue(CM_CI_ATTRIBUTES.DF_ATTRIBUTE_VALUE)); continue; } else if (attributeID == owner_AttribID) { organization .setOwner(OrganizationsWithAttributesRecord.getValue(CM_CI_ATTRIBUTES.DF_ATTRIBUTE_VALUE)); continue; } else if (attributeID == tags_AttribID) { @SuppressWarnings("unchecked") Map<String, String> tags = gson.fromJson( OrganizationsWithAttributesRecord.getValue(CM_CI_ATTRIBUTES.DF_ATTRIBUTE_VALUE), Map.class); organization.setTags(tags); continue; } } log.info("Caching for Org Data Complete"); return organizationsMap; }
From source file:eu.europa.esig.dss.validation.policy.EtsiValidationPolicy.java
@Override public Constraint getClaimedRoleConstraint() { final String level = getValue( "/ConstraintsParameters/MainSignature/MandatedSignedQProperties/ClaimedRoles/@Level"); if (StringUtils.isNotBlank(level)) { final Constraint constraint = new Constraint(level); final List<XmlDom> claimedRoles = getElements( "/ConstraintsParameters/MainSignature/MandatedSignedQProperties/ClaimedRoles/Role"); final List<String> claimedRoleList = XmlDom.convertToStringList(claimedRoles); constraint.setExpectedValue(claimedRoleList.toString()); constraint.setIdentifiers(claimedRoleList); return constraint; }/*from w ww . java2 s. co m*/ return null; }
From source file:emea.summit.architects.HackathlonAPIResource.java
@POST @Path("/service/validate") @Consumes("application/json") @ApiOperation("Sends the expected payload of your service and it will validate construct and ordering") public String validate(List<RequestPayload> request) { if (request != null) { System.out.println("Request Object ---->" + request.toString()); boolean ordered = inOrder(request.iterator(), null); if (!ordered) { return INVALID_RESPONSE + request.toString(); }//from w w w . j a va 2s . c om } return VALID_RESPONSE; }
From source file:com.ngdata.hbaseindexer.indexer.FusionDocumentWriter.java
public void deleteById(int shard, List<String> idsToDelete) throws SolrServerException, IOException { int len = 15; String listLogInfo = (idsToDelete.size() > len) ? (idsToDelete.subList(0, len).toString() + " + " + (idsToDelete.size() - len) + " more ...") : idsToDelete.toString(); log.info("Sending a deleteById '" + idsToDelete + "' to Solr(s) at: " + solrProxies); boolean deleteByIdsSucceeded = false; try {/*ww w . j a v a2 s. com*/ solrProxy.deleteById(idsToDelete, 500); indexDeleteMeter.mark(idsToDelete.size()); deleteByIdsSucceeded = true; // This statement was inserted for Zendesk ticket 4186. If the delete by IDs succeeds above, we also need to ensure // that all children documents that have the id (HBase row ID) in their id followed immediately by the // deleteByQueryAppendString followed by any additional characters are also deleted from the index. deleteByQuery(idsToDelete, "id", deleteByQueryAppendString); } catch (Exception e) { log.error("Delete docs by " + (deleteByIdsSucceeded ? "query" : "id") + " failed due to: " + e + "; ids: " + idsToDelete + (deleteByIdsSucceeded ? " appended with '" + deleteByQueryAppendString : "") + ". Retry deleting individually by id."); retryDeletesIndividually(idsToDelete, deleteByIdsSucceeded); } }
From source file:web.EventLogController.java
/** * Main method for paginating the list of events * @param pageid//w ww .j a v a2 s .com * @param request * @return */ @RequestMapping("/eventlog/vieweventlog/{pageid}") public ModelAndView showEventLogPager(@PathVariable int pageid, HttpServletRequest request) { int total = 25; int start = 1; //displays page if user isn't on first page if (pageid != 1) { start = (pageid - 1) + total + 1; } //totals up results of pagination method in DAO to provide page numbers List<EventLog> eventLogs = dao.getEventsByPage(start, total); HashMap<String, Object> context = new HashMap<String, Object>(); context.put("eventlog", eventLogs); int count = dao.getEventsCount(); context.put("pages", Math.ceil((float) count / (float) total)); context.put("page", pageid); Messages msg = (Messages) request.getSession().getAttribute("message"); if (msg != null) { context.put("message", msg); request.getSession().removeAttribute("message"); } logger.info(eventLogs.toString()); return new ModelAndView("vieweventlog", context); }