List of usage examples for java.lang Long intValue
public int intValue()
From source file:br.org.indt.ndg.server.survey.SurveyHandlerBean.java
private Integer getQtImeisBySurvey(String idSurvey, String status) { String sQuery = "select count(*) "; sQuery += "from Transactionlog where transactionType = "; sQuery += "\'"; sQuery += TransactionLogVO.TYPE_SEND_SURVEY; sQuery += "\'"; sQuery += " and survey.idSurvey = :surveyId"; /** PENDING = all not equal SUCCESS * */ if ((status != null) && status.equals(TransactionLogVO.STATUS_PENDING)) { sQuery += " and NOT(transactionStatus = " + "\'" + TransactionLogVO.STATUS_SUCCESS + "\')"; } else {/*www . jav a 2 s . co m*/ sQuery += " and transactionStatus = :status"; } Query q = manager.createQuery(sQuery); q.setParameter("surveyId", idSurvey); if ((status != null) && !status.equals(TransactionLogVO.STATUS_PENDING)) { q.setParameter("status", status); } Long qtImeis = (Long) q.getSingleResult(); return qtImeis.intValue(); }
From source file:com.hybris.mobile.lib.commerce.sync.CatalogSyncAdapter.java
/** * Loop and save the products and sub-categories of a specific category * * @param categoryHierarchy Category to be searched * @param currentPage Page Number to be received in response * @param loopSubCategories Subcategory to be searched * @param loadVariants True Load variant else false *///ww w . j a v a 2 s . co m private void loopCategory(final CategoryHierarchy categoryHierarchy, final int currentPage, final boolean loopSubCategories, final boolean loadVariants) { Log.i(TAG, "Syncing the products of the category " + categoryHierarchy.getId() + " for page " + currentPage + " to " + Constants.CATALOG_MAX_PAGE_SIZE); final QueryProducts queryProducts = new QueryProducts(); queryProducts.setIdCategory(categoryHierarchy.getId()); queryProducts.setCurrentPage(currentPage); queryProducts.setPageSize(Constants.CATALOG_MAX_PAGE_SIZE); getProducts(queryProducts, false, mOnRequestListenerEndOfSync, new Callback() { @Override public void onProductsLoadedSuccess(List<DataSync> products, Long total) { Log.i(TAG, "Response received after syncing the products of the category " + categoryHierarchy.getId()); // Construct the list of the parent category ids List<String> parentCategoriesId = getParentsCategoriesIds(categoryHierarchy, new ArrayList<String>()); // Loading the products if (products != null) { for (DataSync product : products) { loadProduct(product.getId(), categoryHierarchy.getId(), parentCategoriesId, true, loadVariants); } } // Loading the next pages of products and subcategories if (currentPage == 0) { int nbPages = (int) Math.ceil(total.intValue() / queryProducts.getPageSize()); // Get the next pages of the category for (int i = 1; i < nbPages; i++) { loopCategory(categoryHierarchy, i, loopSubCategories, loadVariants); } // Get the subcategories if (loopSubCategories && categoryHierarchy.getSubcategories() != null) { for (CategoryHierarchy subCategoryHierarchy : categoryHierarchy.getSubcategories()) { loopCategory(subCategoryHierarchy, 0, loopSubCategories, loadVariants); } } } } @Override public void onProductsLoadedError() { } }); }
From source file:de.bps.webservices.clients.onyxreporter.OnyxReporterConnector.java
/** * For every result xml file found in the survey folder a dummy student is created. * @param nodeId/*from www .j a v a 2s. c o m*/ * @return */ private ArrayList<ResultsForStudent> getAnonymizedStudentsWithResultsForSurvey(String nodeId) { ArrayList<ResultsForStudent> serviceStudents = new ArrayList<ResultsForStudent>(); File directory = new File(this.surveyFolderPath); Long fileLength; File resultFile; if (directory.exists()) { String[] allXmls = directory.list(new OnyxReporterConnectorFileNameFilter(nodeId)); if (allXmls != null && allXmls.length > 0) { int id = 0; for (String xmlFileName : allXmls) { ResultsForStudent serviceStudent = new ResultsForStudent(); serviceStudent.setFirstname(""); serviceStudent.setLastname(""); serviceStudent.setGroupname(""); serviceStudent.setTutorname(""); serviceStudent.setStudentId("st" + id); resultFile = new File(this.surveyFolderPath + xmlFileName); fileLength = resultFile.length(); byte[] resultFileStream = new byte[fileLength.intValue()]; java.io.FileInputStream inp; try { inp = new java.io.FileInputStream(resultFile); inp.read(resultFileStream); serviceStudent.setResultsFile(resultFileStream); serviceStudents.add(serviceStudent); id++; } catch (FileNotFoundException e) { log.error("Missing file: " + resultFile.getAbsolutePath(), e); } catch (IOException e) { log.error("Error copying file: " + resultFile.getAbsolutePath(), e); } } } } return serviceStudents; }
From source file:org.asqatasun.service.command.AuditCommandImpl.java
@Override public void analyse() { audit = auditDataService.read(audit.getId()); if (!audit.getStatus().equals(AuditStatus.ANALYSIS)) { LOGGER.warn(/*from w w w . ja v a2 s .co m*/ new StringBuilder(AUDIT_STATUS_IS_LOGGER_STR).append(audit.getStatus()).append(WHILE_LOGGER_STR) .append(AuditStatus.ANALYSIS).append(WAS_REQUIRED_LOGGER_STR).toString()); return; } if (LOGGER.isInfoEnabled()) { LOGGER.info("Analysing " + audit.getSubject().getURL()); } // debug tools Date beginProcessDate = null; Date endProcessDate = null; Date endPersistDate; Long persistenceDuration = (long) 0; WebResource parentWebResource = audit.getSubject(); if (parentWebResource instanceof Page) { analyserService.analyse(parentWebResource, audit); webResourceDataService.saveOrUpdate(parentWebResource); } else if (parentWebResource instanceof Site) { if (LOGGER.isDebugEnabled()) { LOGGER.debug("Analysing results of scope site"); beginProcessDate = Calendar.getInstance().getTime(); } analyserService.analyse(parentWebResource, audit); if (LOGGER.isDebugEnabled()) { endProcessDate = Calendar.getInstance().getTime(); LOGGER.debug(new StringBuilder("Analysing results of scope site took ") .append(endProcessDate.getTime() - beginProcessDate.getTime()).append(MS_LOGGER_STR) .toString()); } webResourceDataService.saveOrUpdate(parentWebResource); if (LOGGER.isDebugEnabled()) { endPersistDate = Calendar.getInstance().getTime(); LOGGER.debug(new StringBuilder("Persisting Analysis results of scope site ") .append(endPersistDate.getTime() - endProcessDate.getTime()).append(MS_LOGGER_STR) .toString()); persistenceDuration = persistenceDuration + (endPersistDate.getTime() - endProcessDate.getTime()); } Long nbOfContent = webResourceDataService.getNumberOfChildWebResource(parentWebResource); Long i = (long) 0; List<WebResource> webResourceList; if (LOGGER.isDebugEnabled()) { LOGGER.debug(new StringBuilder("Analysing ").append(nbOfContent).append(" elements ").toString()); } while (i.compareTo(nbOfContent) < 0) { if (LOGGER.isDebugEnabled()) { LOGGER.debug(new StringBuilder("Analysing results of scope page from ").append(i) .append(TO_LOGGER_STR).append(i + analyseTreatmentWindow).toString()); beginProcessDate = Calendar.getInstance().getTime(); } webResourceList = webResourceDataService.getWebResourceFromItsParent(parentWebResource, i.intValue(), analyseTreatmentWindow); for (WebResource webResource : webResourceList) { if (LOGGER.isDebugEnabled()) { endProcessDate = Calendar.getInstance().getTime(); LOGGER.debug(new StringBuilder("Analysing results for page ").append(webResource.getURL()) .append(" took ").append(endProcessDate.getTime() - beginProcessDate.getTime()) .append(MS_LOGGER_STR).toString()); } analyserService.analyse(webResource, audit); if (LOGGER.isDebugEnabled()) { endPersistDate = Calendar.getInstance().getTime(); LOGGER.debug(new StringBuilder("Persisting Analysis results for page ") .append(webResource.getURL()).append(" took ") .append(endPersistDate.getTime() - endProcessDate.getTime()).append(MS_LOGGER_STR) .toString()); persistenceDuration = persistenceDuration + (endPersistDate.getTime() - endProcessDate.getTime()); } } i = i + analyseTreatmentWindow; } } if (LOGGER.isDebugEnabled()) { LOGGER.debug(new StringBuilder("Application spent ").append(persistenceDuration) .append(" ms to write in Disk while analysing").toString()); } if (LOGGER.isInfoEnabled()) { LOGGER.info(audit.getSubject().getURL() + " has been analysed"); } setStatusToAudit(AuditStatus.COMPLETED); if (cleanUpRelatedContent) { cleanUpTestData(audit); } }
From source file:edu.harvard.iq.dvn.core.index.IndexServiceBean.java
public void updateStudiesInCollections() { long ioProblemCount = 0; boolean ioProblem = false; Indexer indexer = Indexer.getInstance(); String dvnIndexLocation = System.getProperty("dvn.index.location"); String lockFileName = dvnIndexLocation + "/IndexAll.lock"; File indexAllLockFile = new File(lockFileName); // Before we do anything else, check if the index directory is // locked for IndexAll: if (indexAllLockFile.exists()) { logger.info("Detected IndexAll in progress; skipping reindexing ofn collection-linked studies."); return;/*from w w w .j av a2 s . c o m*/ } logger.info("Starting batch reindex of collection-linked studies."); lockFileName = dvnIndexLocation + "/collReindex.lock"; File collReindexLockFile = new File(lockFileName); try { // Check for an existing lock file: if (collReindexLockFile.exists()) { String errorMessage = "Cannot reindex: collection reindexing already in progress;"; errorMessage += ("lock file " + lockFileName + ", created on " + (new Date(collReindexLockFile.lastModified())).toString() + "."); throw new IOException(errorMessage); } // Create a lock file: try { collReindexLockFile.createNewFile(); } catch (IOException ex) { String errorMessage = "Error: could not create lock file ("; errorMessage += (lockFileName + ")"); throw new IOException(errorMessage); } List<Long> vdcIdList = vdcService.findAllIds(); logger.fine("Found " + vdcIdList.size() + " dataverses."); Long maxStudyId = studyService.getMaxStudyTableId(); if (maxStudyId == null) { logger.fine("The database appears to be empty. Exiting."); return; } if (maxStudyId.intValue() != maxStudyId.longValue()) { logger.severe( "There appears to be more than 2^^31 objects in the study table; the subnetwork cross-indexing hack isn't going to work."); throw new IOException( "There appears to be more than 2^^31 objects in the study table; the subnetwork cross-indexing hack isn't going to work."); /* This is quite unlikely to happen, but still... */ } ArrayList<VDCNetwork> subNetworks = getSubNetworksAsArray(); //vdcNetworkService.getVDCSubNetworks(); // This is an array of [sub]networks organized by *network id*; // i.e., if there are subnetworks with the ids 0, 2 and 5 the array // will contain {0, NULL, network_2, NULL, NULL, network_5} if (subNetworks == null || (subNetworks.size() < 1)) { // No subnetworks in this DV Network; nothing to do. logger.fine("There's only one network in the DVN; nothing to do. Exiting"); return; } int maxSubnetworkId = subNetworks.size() - 1; if (maxSubnetworkId > 63) { logger.severe( "There are more than 63 VDC (sub)networks. The subnetwork cross-indexing hack isn't going to work." + "(we are using longs as bitstrings to store network cross-linked status of a study)"); throw new IOException( "There are more than 63 VDC (sub)networks. The subnetwork cross-indexing hack isn't going to work." + "(we are using longs as bitstrings to store network cross-linked status of a study)"); /* Not very likely to happen either... */ } long linkedVdcNetworkMap[] = new long[maxStudyId.intValue() + 1]; Long vdcId = null; VDC vdc = null; List<Long> linkedStudyIds = null; Long vdcNetworkId = null; Long studyNetworkId = null; Study linkedStudy = null; for (Iterator it = vdcIdList.iterator(); it.hasNext();) { vdcId = (Long) it.next(); vdc = vdcService.findById(vdcId); if (vdc != null && vdc.getVdcNetwork() != null) { vdcNetworkId = vdc.getVdcNetwork().getId(); if (vdcNetworkId.longValue() > 0) { // We are not interested in the VDCs in the top-level // network (network id 0); because the top-level network // already contains all the studies in it. Whatever // studies the dynamic collections may be linking, they // are still in the same DVN. linkedStudyIds = indexer.findStudiesInCollections(vdc); if (linkedStudyIds != null) { logger.fine("Found " + linkedStudyIds.size() + " linked studies in VDC " + vdc.getId() + ", subnetwork " + vdcNetworkId.toString()); for (Long studyId : linkedStudyIds) { if (studyId.longValue() <= maxStudyId.longValue()) { // otherwise this is a new study, created since we // have started this process; we'll be skipping it, // this time around. try { linkedStudy = studyService.getStudy(studyId); } catch (Exception ex) { linkedStudy = null; } if (linkedStudy != null) { studyNetworkId = linkedStudy.getOwner().getVdcNetwork().getId(); if ((studyNetworkId != null) && (vdcNetworkId.compareTo(studyNetworkId) != 0)) { // this study is cross-linked from another VDC network! logger.fine("Study " + linkedStudy.getId() + " from subnetwork " + studyNetworkId + " is linked to this VDC (" + vdc.getId() + ")."); linkedVdcNetworkMap[linkedStudy.getId() .intValue()] |= (1 << vdcNetworkId.intValue()); } } linkedStudy = null; studyNetworkId = null; } } } linkedStudyIds = null; vdcNetworkId = null; } } vdcId = null; vdc = null; } // Now go through the list of studies and reindex those for which // the cross-linked status has changed: logger.fine("Checking the cross-linking status and reindexing the studies for which it has changed:"); List<Long> linkedToNetworkIds = null; boolean reindexNecessary = false; // Check for the studies that are no longer linked to any foreign // subnetworks: List<Long> existingLinkedStudies = studyService.getAllLinkedStudyIds(); Long sid = null; for (Iterator it = existingLinkedStudies.iterator(); it.hasNext();) { sid = (Long) it.next(); if (linkedVdcNetworkMap[sid.intValue()] == 0) { // study no longer linked to any subnetworks linkedVdcNetworkMap[sid.intValue()] = -1; } } // TODO: would be faster still to retrieve the entire map of crosslinks // from the db in a single query here, cook another array of bitstrings // and then just go and compare the 2, without making any further // queries... --L.A. List<VDCNetwork> currentCrossLinks = null; for (int i = 0; i < maxStudyId.intValue() + 1; i++) { if (linkedVdcNetworkMap[i] != 0) { logger.fine("study " + i + ": cross-linked outside of its network; (still need to check if we need to reindex it)"); try { linkedStudy = studyService.getStudy(new Long(i)); } catch (Exception ex) { linkedStudy = null; } reindexNecessary = false; if (linkedStudy != null) { // Only released studies get indexed. // (but studies that are no longer released may // need to be dropped from the crosslinking map, and // from the index) currentCrossLinks = linkedStudy.getLinkedToNetworks(); if (linkedVdcNetworkMap[i] == -1) { // If it's an "unlinked" study, // remove the existing links in the database: logger.fine("study " + i + " no longer cross-linked to any subnetworks."); //linkedStudy.setLinkedToNetworks(null); linkedStudy = studyService.setLinkedToNetworks(linkedStudy.getId(), null); reindexNecessary = true; } else if (linkedStudy.isReleased()) { // else find what subnetworks this study is already linked // to in the database: linkedToNetworkIds = linkedStudy.getLinkedToNetworkIds(); long linkedNetworkBitString = produceLinkedNetworksBitstring(linkedToNetworkIds); if (linkedNetworkBitString != linkedVdcNetworkMap[i]) { // This means the cross-linking status of the study has changed! logger.fine("study " + i + ": cross-linked status has changed; updating"); // Update it in the database: //linkedStudy.setLinkedToNetworks(newLinkedToNetworks(subNetworks, linkedVdcNetworkMap[i])); linkedStudy = studyService.setLinkedToNetworks(linkedStudy.getId(), newLinkedToNetworks(subNetworks, linkedVdcNetworkMap[i])); //studyService.updateStudy(linkedStudy); reindexNecessary = true; } } if (reindexNecessary) { // Re-index the study: indexer = Indexer.getInstance(); boolean indexSuccess = true; try { indexer.deleteDocumentCarefully(linkedStudy.getId()); } catch (IOException ioe) { indexSuccess = false; } if (indexSuccess) { try { //indexer.addDocument(linkedStudy); addDocument(linkedStudy); } catch (Exception ex) { ioProblem = true; ioProblemCount++; logger.severe("Caught exception attempting to re-index re-linked study " + linkedStudy.getId() + "; " + ex.getMessage()); ex.printStackTrace(); indexSuccess = false; } } else { logger.fine("Could not delete study " + linkedStudy.getId() + " from index; skipping reindexing."); } if (!indexSuccess) { // Make sure we leave the db linking status entry // in the same shape it was before the reindexing // attempt; so that it'll hopefully get caught // by the next reindexing now. //linkedStudy.setLinkedToNetworks(currentCrossLinks); linkedStudy = studyService.setLinkedToNetworks(linkedStudy.getId(), currentCrossLinks); } } } } } logger.info("Done reindexing collection-linked studies."); } catch (Exception ex) { ioProblem = true; ioProblemCount++; logger.severe("Caught exception while trying to update studies in collections: " + ex.getMessage()); ex.printStackTrace(); } finally { // delete the lock file: if (collReindexLockFile.exists()) { collReindexLockFile.delete(); } } handleIOProblems(ioProblem, ioProblemCount); }
From source file:com.cisco.dvbu.ps.deploytool.services.ResourceCacheManagerImpl.java
private void resourceCacheAction(String actionName, String serverId, String resourceIds, String pathToResourceCacheXML, String pathToServersXML) throws CompositeException { // Validate whether the files exist or not if (!CommonUtils.fileExists(pathToResourceCacheXML)) { throw new CompositeException("File [" + pathToResourceCacheXML + "] does not exist."); }/*from w w w . j ava2s. c om*/ if (!CommonUtils.fileExists(pathToServersXML)) { throw new CompositeException("File [" + pathToServersXML + "] does not exist."); } String prefix = "dataSourceAction"; String processedIds = null; // Get the configuration property file set in the environment with a default of deploy.properties String propertyFile = CommonUtils.getFileOrSystemPropertyValue(CommonConstants.propertyFile, "CONFIG_PROPERTY_FILE"); // Extract variables for the resourceIds resourceIds = CommonUtils.extractVariable(prefix, resourceIds, propertyFile, true); // Set the Module Action Objective String s1 = (resourceIds == null) ? "no_resourceIds" : "Ids=" + resourceIds; System.setProperty("MODULE_ACTION_OBJECTIVE", actionName + " : " + s1); try { List<ResourceCacheType> resourceCacheModuleList = getResourceCache(serverId, resourceIds, pathToResourceCacheXML, pathToServersXML); if (resourceCacheModuleList != null && resourceCacheModuleList.size() > 0) { // Loop over the list of resource cache entries and apply their configurations to the target CIS instance. for (ResourceCacheType resourceCacheModule : resourceCacheModuleList) { // Get the identifier and convert any $VARIABLES String identifier = CommonUtils.extractVariable(prefix, resourceCacheModule.getId(), propertyFile, true); /** * Possible values for resource cache * 1. csv string like rc1,rc2 (we process only resource names which are passed in) * 2. '*' or whatever is configured to indicate all resources (we process all resources in this case) * 3. csv string with '-' or whatever is configured to indicate exclude resources as prefix * like -rc1,rc2 (we ignore passed in resources and process rest of the in the input xml */ if (DeployUtil.canProcessResource(resourceIds, identifier)) { // Add to the list of processed ids if (processedIds == null) processedIds = ""; else processedIds = processedIds + ","; processedIds = processedIds + identifier; CacheConfig cacheConfig = new CacheConfig(); String resourceCachePath = CommonUtils.extractVariable(prefix, resourceCacheModule.getResourcePath(), propertyFile, true); String resourceCacheType = CommonUtils.extractVariable(prefix, resourceCacheModule.getResourceType().toString(), propertyFile, false); // Set the Module Action Objective s1 = identifier + "=" + ((resourceCachePath == null) ? "no_resourceCachePath" : resourceCachePath); System.setProperty("MODULE_ACTION_OBJECTIVE", actionName + " : " + s1); if (logger.isInfoEnabled()) { logger.info( "processing action " + actionName + " on resource cache " + resourceCachePath); } if (actionName.equals(ResourceCacheDAO.action.ENABLE_DISABLE.name().toString())) { if (resourceCacheModule.getCacheConfig() != null) { // Set enabled flag Boolean enabled = null; if (resourceCacheModule.getCacheConfig().isEnabled() != null) { enabled = resourceCacheModule.getCacheConfig().isEnabled(); } // Set the Module Action Objective s1 = identifier + "=" + ((resourceCachePath == null) ? "no_resourceCachePath" : resourceCachePath); String enable_disabled_action = (enabled == true) ? "ENABLE" : "DISABLE"; System.setProperty("MODULE_ACTION_OBJECTIVE", enable_disabled_action + " : " + s1); updateResourceCacheEnabledAll(serverId, resourceCachePath, resourceCacheType, pathToServersXML, enabled); } } else { if (resourceCacheModule.getCacheConfig() != null) { // Set configured if it exists if (resourceCacheModule.getCacheConfig().isConfigured() != null) { cacheConfig.setConfigured(resourceCacheModule.getCacheConfig().isConfigured()); } // Set enabled if it exists if (resourceCacheModule.getCacheConfig().isEnabled() != null) { cacheConfig.setEnabled(resourceCacheModule.getCacheConfig().isEnabled()); } // Set the storage if it exists if (resourceCacheModule.getCacheConfig().getStorage() != null) { Storage storage = new Storage(); if (resourceCacheModule.getCacheConfig().getStorage().getMode() != null) { storage.setMode(StorageMode.valueOf( resourceCacheModule.getCacheConfig().getStorage().getMode())); } if (resourceCacheModule.getCacheConfig().getStorage() .getStorageDataSourcePath() != null) { storage.setStorageDataSourcePath( CommonUtils .extractVariable(prefix, resourceCacheModule.getCacheConfig().getStorage() .getStorageDataSourcePath(), propertyFile, true)); } if (resourceCacheModule.getCacheConfig().getStorage() .getStorageTargets() != null) { // Define the Target Storage List TargetPathTypePairList entry = new TargetPathTypePairList(); for (ResourceCacheStorageTargetsType storageTarget : resourceCacheModule .getCacheConfig().getStorage().getStorageTargets()) { // Define the Target Storage Entry TargetPathTypePair targetPair = new TargetPathTypePair(); // Set the target pair entry targetPair.setPath(CommonUtils.extractVariable(prefix, storageTarget.getPath(), propertyFile, true)); targetPair.setTargetName(CommonUtils.extractVariable(prefix, storageTarget.getTargetName(), propertyFile, false)); targetPair.setType( ResourceType.valueOf(storageTarget.getType().toUpperCase())); // Add the target pair entry to the list entry.getEntry().add(targetPair); } storage.setStorageTargets(entry); } cacheConfig.setStorage(storage); } //end::if (resourceCacheModule.getCacheConfig().getStorage() != null) { // Set the refresh if it exists if (resourceCacheModule.getCacheConfig().getRefresh() != null) { Refresh refresh = new Refresh(); String refreshMode = resourceCacheModule.getCacheConfig().getRefresh().getMode() .toUpperCase(); refresh.setMode(RefreshMode.valueOf(refreshMode)); if (resourceCacheModule.getCacheConfig().getRefresh().getSchedule() != null) { if (refreshMode.equalsIgnoreCase("SCHEDULED")) { Schedule schedule = new Schedule(); if (resourceCacheModule.getCacheConfig().getRefresh().getSchedule() .getStartTime() != null) { schedule.setStartTime(resourceCacheModule.getCacheConfig() .getRefresh().getSchedule().getStartTime()); } if (resourceCacheModule.getCacheConfig().getRefresh().getSchedule() .getRefreshPeriod().getPeriod() != null) { String period = resourceCacheModule.getCacheConfig().getRefresh() .getSchedule().getRefreshPeriod().getPeriod().toUpperCase(); // Set the mode to INTERVAL if (period.equalsIgnoreCase("SECOND") || period.equalsIgnoreCase("MINUTE")) { schedule.setMode(ScheduleMode.valueOf("INTERVAL")); Long interval = convertPeriodCount(period, resourceCacheModule.getCacheConfig().getRefresh() .getSchedule().getRefreshPeriod().getCount(), "seconds"); schedule.setInterval(interval.intValue()); } else { schedule.setMode(ScheduleMode.valueOf("CALENDAR")); schedule.setPeriod(CalendarPeriod.valueOf(resourceCacheModule .getCacheConfig().getRefresh().getSchedule() .getRefreshPeriod().getPeriod().toUpperCase())); Integer count = (int) resourceCacheModule.getCacheConfig() .getRefresh().getSchedule().getRefreshPeriod() .getCount(); schedule.setCount(count); } } refresh.setSchedule(schedule); } } cacheConfig.setRefresh(refresh); } //end::if (resourceCacheModule.getCacheConfig().getRefresh() != null) { // Set the Expiration if it exists if (resourceCacheModule.getCacheConfig().getExpirationPeriod() != null) { Long milliCount = convertPeriodCount( resourceCacheModule.getCacheConfig().getExpirationPeriod().getPeriod(), resourceCacheModule.getCacheConfig().getExpirationPeriod().getCount(), "milliseconds"); cacheConfig.setExpirationPeriod(milliCount); } // Set the clear rule if it exists if (resourceCacheModule.getCacheConfig().getClearRule() != null) { cacheConfig.setClearRule(ClearRule.valueOf( resourceCacheModule.getCacheConfig().getClearRule().toUpperCase())); } } //end::if (resourceCacheModule.getCacheConfig() != null) { // Validate that the resource exists before acting on it. Boolean validateResourceExists = true; // Execute takeResourceCacheAction() getResourceCacheDAO().takeResourceCacheAction(actionName, resourceCachePath, resourceCacheType, cacheConfig, serverId, pathToServersXML, validateResourceExists); } // end:: if (actionName.equals(ResourceCacheDAO.action.ENABLE_DISABLE.name().toString())) { } // end:: if(DeployUtil.canProcessResource(resourceIds, identifier)) { } // end:: for (ResourceCacheType resourceCache : resourceCacheList) { // Determine if any resourceIds were not processed and report on this if (processedIds != null) { if (logger.isInfoEnabled()) { logger.info("ResourceCache entries processed=" + processedIds); } } else { if (logger.isInfoEnabled()) { String msg = "Warning: No resource cache entries were processed for the input list. resourceIds=" + resourceIds; logger.info(msg); System.setProperty("MODULE_ACTION_MESSAGE", msg); } } } else { if (logger.isInfoEnabled()) { String msg = "Warning: No resource cache entries found for ResourceCache Module XML at path=" + pathToResourceCacheXML; logger.info(msg); System.setProperty("MODULE_ACTION_MESSAGE", msg); } } } catch (CompositeException e) { logger.error("Error on resource cache action (" + actionName + "): ", e); throw new ApplicationContextException(e.getMessage(), e); } }
From source file:com.ikanow.aleph2.shared.crud.mongodb.services.MongoDbCrudService.java
@Override public CompletableFuture<Long> deleteObjectsBySpec(final QueryComponent<O> spec) { try {// w ww . java 2 s. c o m final Tuple2<DBObject, DBObject> query_and_meta = MongoDbUtils.convertToMongoQuery(spec); final Long limit = (Long) query_and_meta._2().get("$limit"); final DBObject sort = (DBObject) query_and_meta._2().get("$sort"); if ((null == limit) && (null == sort)) { // Simple case, just delete as many docs as possible final WriteResult<O, K> wr = _state.coll.remove(query_and_meta._1()); return CompletableFuture.completedFuture((Long) (long) wr.getN()); } else { final com.mongodb.DBCursor cursor = Optional .of(_state.orig_coll.find(query_and_meta._1(), new BasicDBObject(_ID, 1))) // (now we're processing on a cursor "c") .map(c -> { return (null != sort) ? c.sort(sort) : c; }).map(c -> { return (null != limit) ? c.limit(limit.intValue()) : c; }).get(); final List<Object> ids = StreamSupport.stream(cursor.spliterator(), false).map(o -> o.get(_ID)) .collect(Collectors.toList()); return deleteObjectsBySpec(emptyQuery(_state.bean_clazz).withAny(_ID, ids)); } } catch (Exception e) { return FutureUtils.<Long>returnError(e); } }
From source file:com.romeikat.datamessie.core.base.dao.impl.SourceDao.java
public List<SourceOverviewDto> getAsOverviewDtos(final SharedSessionContract ssc, final Long userId, final Long projectId, final Boolean visible, final Long first, final Long count) { if (projectId == null) { return Collections.emptyList(); }//from w w w . j a v a 2 s.c o m // Restrict to user final Collection<Long> projectIdsForUser = projectDao.getIdsForUser(ssc, userId); if (projectIdsForUser.isEmpty()) { return Collections.emptyList(); } // Query: Project2Source final EntityQuery<Project2Source> project2SourceQuery = new EntityQuery<>(Project2Source.class); project2SourceQuery.addRestriction(Restrictions.eq("projectId", projectId)); project2SourceQuery.addRestriction(Restrictions.in("projectId", projectIdsForUser)); final Collection<Long> sourceIds = project2SourceQuery.listIdsForProperty(ssc, "sourceId"); if (sourceIds.isEmpty()) { return Collections.emptyList(); } // Query: Source final EntityWithIdQuery<Source> sourceQuery = new EntityWithIdQuery<>(Source.class); sourceQuery.addRestriction(Restrictions.in("id", sourceIds)); if (visible != null) { sourceQuery.addRestriction(Restrictions.eq("visible", visible)); } sourceQuery.setFirstResult(first == null ? null : first.intValue()); sourceQuery.setMaxResults(count == null ? null : count.intValue()); sourceQuery.addOrder(Order.asc("name")); // Done final List<Source> sources = sourceQuery.listObjects(ssc); // Transform final List<SourceOverviewDto> dtos = Lists.transform(sources, s -> sourceToOverviewDto(ssc, s)); return Lists.newArrayList(dtos); }
From source file:org.geoserver.geofence.gui.server.service.impl.InstancesManagerServiceImpl.java
public PagingLoadResult<GSInstanceModel> getInstances(int offset, int limit, boolean full) throws ApplicationException { int start = offset; List<GSInstanceModel> instancesListDTO = new ArrayList<GSInstanceModel>(); if (full) {/*from www . ja va 2s .c om*/ GSInstanceModel all = new GSInstanceModel(); all.setId(-1); all.setName("*"); all.setBaseURL("*"); instancesListDTO.add(all); } long instancesCount = geofenceRemoteService.getInstanceAdminService().getCount(null) + 1; Long t = new Long(instancesCount); int page = (start == 0) ? start : (start / limit); List<ShortInstance> instancesList = geofenceRemoteService.getInstanceAdminService().getList(null, page, limit); if (instancesList == null) { if (logger.isErrorEnabled()) { logger.error("No server instace found on server"); } throw new ApplicationException("No server instance found on server"); } Iterator<ShortInstance> it = instancesList.iterator(); while (it.hasNext()) { long id = it.next().getId(); org.geoserver.geofence.core.model.GSInstance remote = geofenceRemoteService.getInstanceAdminService() .get(id); GSInstanceModel local = new GSInstanceModel(); local.setId(remote.getId()); local.setName(remote.getName()); local.setDescription(remote.getDescription()); local.setDateCreation(remote.getDateCreation()); local.setBaseURL(remote.getBaseURL()); local.setUsername(remote.getUsername()); local.setPassword(remote.getPassword()); instancesListDTO.add(local); } return new RpcPageLoadResult<GSInstanceModel>(instancesListDTO, offset, t.intValue()); }