List of usage examples for java.util HashSet addAll
boolean addAll(Collection<? extends E> c);
From source file:dao.CarryonDaoDb.java
public HashSet getUniqueTags(List photos) { HashSet allSet = new HashSet(); if (photos != null && photos.size() > 0) { for (int i = 0; i < photos.size(); i++) { if ((Photo) photos.get(i) != null) { String usertags = ((Photo) photos.get(i)).getValue(DbConstants.USER_TAGS); String[] yourkeys = usertags.split(","); if (i == 0) { for (int k = 0; k < yourkeys.length; k++) { if (!RegexStrUtil.isNull(yourkeys[k])) { allSet.add(yourkeys[k]); }// w w w.ja va 2 s. c o m } } else { HashSet h1 = new HashSet(); for (int k = 0; k < yourkeys.length; k++) { if (!RegexStrUtil.isNull(yourkeys[k])) { h1.add(yourkeys[k]); } } allSet.removeAll(h1); //logger.info("removeAll h1 = " + h1.toString()); //logger.info("removeAll allSet = " + allSet.toString()); allSet.addAll(h1); //logger.info("addAll h1 = " + h1.toString()); //logger.info("addAll allSet = " + allSet.toString()); } //else } //if } // for } // if return allSet; }
From source file:net.cbtltd.rest.nextpax.A_Handler.java
private void updateInactiveProducts(HashSet<String> productsProceeded) { LOG.debug("Starting update for inactive products"); String partyId = getAltpartyid(); final SqlSession sqlSession = RazorServer.openSession(); HashSet<String> activeProducts = new HashSet<String>(); activeProducts.addAll(sqlSession.getMapper(ProductMapper.class).activeProductAltIdListBySupplier(partyId)); try {//from w w w . j a va 2 s. com activeProducts.removeAll(productsProceeded); for (String altId : activeProducts) { Product product = sqlSession.getMapper(ProductMapper.class).altread(new NameId(partyId, altId)); product.setState(Product.FINAL); sqlSession.getMapper(ProductMapper.class).update(product); LOG.debug("Product " + product.getName() + ", " + product.getId() + " inactive. Moving to Final state."); } LOG.debug("Update for inactive products finished, no errors"); } catch (Throwable e) { LOG.error("Could not finish update for inactive products, reason: " + e.getMessage()); } }
From source file:org.apache.hadoop.hive.ql.optimizer.correlation.RelaxCorrelationOptimizer.java
/** * Detect correlations and transform the query tree. * * @param pactx//from w w w . j ava2s .c o m * current parse context * @throws SemanticException */ public ParseContext transform(ParseContext pctx) throws SemanticException { pCtx = pctx; if (HiveConf.getBoolVar(pCtx.getConf(), HiveConf.ConfVars.HIVECONVERTJOIN)) { findPossibleAutoConvertedJoinOperators(); } LOG.info("TC_PRE: trace back column names and version ID in RSs"); OrigColumnNodeProcCtx origCorrCtx = new OrigColumnNodeProcCtx(pCtx); Map<Rule, NodeProcessor> origOpRules = new LinkedHashMap<Rule, NodeProcessor>(); origOpRules.put(new RuleRegExp("R1", ReduceSinkOperator.getOperatorName() + "%"), new OrigColumnNodeProc()); Dispatcher disp = new DefaultRuleDispatcher(getDefaultOrigProc(), origOpRules, origCorrCtx); GraphWalker ogw = new TopoGraphWalker(disp); // start walking from TS lists List<Node> topNodes = new ArrayList<Node>(); topNodes.addAll(pCtx.getTopOps().values()); ogw.startWalking(topNodes, null); origCorrCtx.removeWalkedAll(); abort = origCorrCtx.isAbort(); if (abort) { LOG.info("Abort. Reasons are ..."); for (String reason : origCorrCtx.getAbortReasons()) { LOG.info("-- " + reason); } return pCtx; } LOG.info("WL:TC :Flow Analysis version ID dump "); //dumpversioninfoofALLRS(origCorrCtx); dumpversioninfoofALLRSInfo(origCorrCtx); // start detecting TC // TODO: change the order of TC detection, take advantage of flow analysis results LOG.info("TC: start detection"); Map<Rule, NodeProcessor> opRules_TC = new LinkedHashMap<Rule, NodeProcessor>(); opRules_TC.put(new RuleRegExp("R1", JoinOperator.getOperatorName() + "%"), new TCNodeProc()); opRules_TC.put(new RuleRegExp("R2", FakeOperator.getOperatorName() + "%"), new TCNodeProc()); //find RS and use the methods of class TCNodeProc. disp = new DefaultRuleDispatcher(getDefaultOrigProc(), opRules_TC, origCorrCtx); ogw = new DefaultGraphWalker(disp); topNodes = new ArrayList<Node>(); HashSet<Node> tempTopNodes = new HashSet<Node>(); HashSet<Integer> shouldremoveQIDlist = new HashSet<Integer>(); boolean inlineOptimizer = true; if (inlineOptimizer) { //remove the topop with dependence flow version InterQueryFlowCtx Ictx = ((MultiParseContext) pCtx).getQueryFlowCtx(); // getmultipctx HashMap<Integer, ParseContext> multipctx = ((MultiParseContext) pCtx).getmultipctx(); for (int gidi = 0; gidi < Ictx.getTabVersion().size(); gidi++) { FileSinkOperator fs = Ictx.getWrOp(gidi); List<TableScanOperator> tslist = new ArrayList<TableScanOperator>(); if (Ictx.getRdOpList(gidi) == null) { continue; } tslist.addAll(Ictx.getRdOpList(gidi)); if (fs != null) { // getListoftopop(); tslist.add((TableScanOperator) (MultiDriver.getOneTSbyFS(fs))); for (TableScanOperator ts : tslist) { for (int i = 0; i < multipctx.size(); i++) { if (multipctx.get(i).getTopOps().values().contains(ts)) { tempTopNodes.addAll(multipctx.get(i).getTopOps().values()); shouldremoveQIDlist.add(i); } } } } } for (Node node : pCtx.getTopOps().values()) { if (!tempTopNodes.contains(node)) { topNodes.add(node); } } } else { topNodes.addAll(pCtx.getTopOps().values()); } MultiDriver.addbottomfakeoperator((MultiParseContext) pCtx, shouldremoveQIDlist); ogw.startWalking(topNodes, null); deletefakeoperator(((MultiParseContext) pCtx).getFakeOperator()); // We have finished tree walking (correlation detection). // We will first see if we need to abort (the operator tree has not been changed). // If not, we will start to transform the operator tree. abort = origCorrCtx.isAbort(); if (abort) { LOG.info("Abort. Reasons are ..."); for (String reason : origCorrCtx.getAbortReasons()) { LOG.info("-- " + reason); } } else { // transform the operator tree LOG.info("Begain query plan transformation based on intra-query correlations. " + origCorrCtx.getCorrelations().size() + " correlation(s) to be applied"); long start = System.currentTimeMillis(); /*////////////////////////////////////////// //common sub tree test LOG.info("======Print correlation(s)' common sub tree ======="); List<Operator<?>> lastOp = new ArrayList<Operator<?>>(); for (IntraQueryCorrelation correlation : origCorrCtx.getCorrelations()){ Iterator<ReduceSinkOperator> it=correlation.getTcReduceSinkOperators().iterator(); // ReduceSinkOperator lastrs; while(it.hasNext()){ lastOp.add(it.next()); } if(lastOp.size() >= 2 ){ for( int i = 0 ; i < lastOp.size() ; i++){ for(int j=i+1;j<lastOp.size();j++){ LOG.info("The correlate RS of two optree is : "+ lastOp.get(i).toString() + " "+ lastOp.get(j).toString() ); CommonSubtreeDetect cstd = new CommonSubtreeDetect(lastOp.get(i) , lastOp.get(j)); LOG.info(cstd.toString() ); } } } lastOp.clear(); } long end = System.currentTimeMillis(); double timeTaken = (end - start) / 1000.0; LOG.info("Run Common subtree detect time taken: " + timeTaken + " seconds" ); //CommonSubtree Detect */ /////////////////////////////////////*/ int count = 1; for (IntraQueryCorrelation correlation : origCorrCtx.getCorrelations()) { LOG.info(count + "th correlation is applied"); QueryPlanTreeTransformation_TC.applyCorrelation(pCtx, origCorrCtx, correlation); // LOG.info(correlation.getTcReduceSinkOperators()); count++; } } return pCtx; }
From source file:org.freebxml.omar.common.BindingUtility.java
/** * Gets the composed RegistryObjects within specified RegistryObject. * Based on scanning rim.xsd for </sequence>. * * @param ro specifies the RegistryObject whose composed objects are being sought. * @param depth specifies depth of fetch. -1 implies fetch all levels. 1 implies fetch immediate composed objects. *//*w w w . j ava 2s .c o m*/ public Set getComposedRegistryObjects(RegistryObjectType ro, int depth) { HashSet composedObjects = new HashSet(); if (ro != null) { List immediateComposedObjects = new ArrayList(); immediateComposedObjects.addAll(ro.getClassification()); immediateComposedObjects.addAll(ro.getExternalIdentifier()); if (ro instanceof ClassificationNodeType) { ClassificationNodeType node = (ClassificationNodeType) ro; immediateComposedObjects.addAll(node.getClassificationNode()); } else if (ro instanceof ClassificationSchemeType) { ClassificationSchemeType scheme = (ClassificationSchemeType) ro; immediateComposedObjects.addAll(scheme.getClassificationNode()); } else if (ro instanceof ServiceBindingType) { ServiceBindingType binding = (ServiceBindingType) ro; immediateComposedObjects.addAll(binding.getSpecificationLink()); } else if (ro instanceof RegistryPackageType) { RegistryPackageType pkg = (RegistryPackageType) ro; if (pkg.getRegistryObjectList() != null) { immediateComposedObjects.addAll(pkg.getRegistryObjectList().getIdentifiable()); } } else if (ro instanceof ServiceType) { ServiceType service = (ServiceType) ro; immediateComposedObjects.addAll(service.getServiceBinding()); } --depth; //Add each immediate composedObject Iterator iter = immediateComposedObjects.iterator(); while (iter.hasNext()) { RegistryObjectType composedObject = (RegistryObjectType) iter.next(); composedObjects.add(composedObject); //If depth != 0 then recurse and add descendant composed objects if (depth != 0) { composedObjects.addAll(getComposedRegistryObjects(composedObject, depth)); } } } return composedObjects; }
From source file:it.cnr.icar.eric.common.BindingUtility.java
/** * Gets the composed RegistryObjects within specified RegistryObject. * Based on scanning rim.xsd for </sequence>. * * @param ro specifies the RegistryObject whose composed objects are being sought. * @param depth specifies depth of fetch. -1 implies fetch all levels. 1 implies fetch immediate composed objects. *//*from w w w.jav a2s .c o m*/ @SuppressWarnings({ "rawtypes", "unchecked" }) public Set<RegistryObjectType> getComposedRegistryObjects(RegistryObjectType ro, int depth) { HashSet<RegistryObjectType> composedObjects = new HashSet<RegistryObjectType>(); if (ro != null) { List immediateComposedObjects = new ArrayList(); immediateComposedObjects.addAll(ro.getClassification()); immediateComposedObjects.addAll(ro.getExternalIdentifier()); if (ro instanceof ClassificationNodeType) { ClassificationNodeType node = (ClassificationNodeType) ro; immediateComposedObjects.addAll(node.getClassificationNode()); } else if (ro instanceof ClassificationSchemeType) { ClassificationSchemeType scheme = (ClassificationSchemeType) ro; immediateComposedObjects.addAll(scheme.getClassificationNode()); } else if (ro instanceof ServiceBindingType) { ServiceBindingType binding = (ServiceBindingType) ro; immediateComposedObjects.addAll(binding.getSpecificationLink()); } else if (ro instanceof RegistryPackageType) { RegistryPackageType pkg = (RegistryPackageType) ro; if (pkg.getRegistryObjectList() != null) { immediateComposedObjects.addAll(getIdentifiableTypeList(pkg.getRegistryObjectList())); } } else if (ro instanceof ServiceType) { ServiceType service = (ServiceType) ro; immediateComposedObjects.addAll(service.getServiceBinding()); } --depth; //Add each immediate composedObject Iterator iter = immediateComposedObjects.iterator(); while (iter.hasNext()) { RegistryObjectType composedObject = (RegistryObjectType) iter.next(); composedObjects.add(composedObject); //If depth != 0 then recurse and add descendant composed objects if (depth != 0) { composedObjects.addAll(getComposedRegistryObjects(composedObject, depth)); } } } return composedObjects; }
From source file:org.apache.lens.server.query.QueryExecutionServiceImpl.java
@Override public List<QueryHandle> getAllQueries(LensSessionHandle sessionHandle, String states, String userName, String driver, String queryName, String fromDate, String toDate) throws LensException { long fromTime = -1; long toTime = Long.MAX_VALUE; Date now = new Date(); if (fromDate != null) { fromTime = DateUtil.resolveDate(fromDate, now).getTime(); }// w w w .j a v a2s .co m if (toDate != null) { toTime = DateUtil.resolveDate(toDate, now).getTime(); } validateTimeRange(fromTime, toTime); try { acquire(sessionHandle); if (StringUtils.isBlank(userName)) { userName = getSession(sessionHandle).getLoggedInUser(); } Set<Status> statuses = getStatuses(states); List<QueryHandle> result = getQueriesInMemory(statuses, userName, driver, queryName, fromTime, toTime); List<QueryHandle> persistedQueries = getPersistedQueryHandles(userName, driver, queryName, fromTime, toTime, statuses); HashSet<QueryHandle> deduplicatedResults = new HashSet<>(result); deduplicatedResults.addAll(persistedQueries); return new ArrayList<>(deduplicatedResults); } finally { release(sessionHandle); } }
From source file:com.emc.storageos.volumecontroller.impl.plugins.IsilonCommunicationInterface.java
private void discoverUmanagedFileSystems(AccessProfile profile) throws BaseCollectionException { _log.debug("Access Profile Details : IpAddress : PortNumber : {}, namespace : {}", profile.getIpAddress() + profile.getPortNumber(), profile.getnamespace()); URI storageSystemId = profile.getSystemId(); StorageSystem storageSystem = _dbClient.queryObject(StorageSystem.class, storageSystemId); if (null == storageSystem) { return;//from ww w . j a va 2s . com } List<UnManagedFileSystem> unManagedFileSystems = new ArrayList<UnManagedFileSystem>(); List<UnManagedFileSystem> existingUnManagedFileSystems = new ArrayList<UnManagedFileSystem>(); Set<URI> allDiscoveredUnManagedFileSystems = new HashSet<URI>(); String detailedStatusMessage = "Discovery of Isilon Unmanaged FileSystem started"; long unmanagedFsCount = 0; try { IsilonApi isilonApi = getIsilonDevice(storageSystem); URIQueryResultList storagePoolURIs = new URIQueryResultList(); _dbClient.queryByConstraint( ContainmentConstraint.Factory.getStorageDeviceStoragePoolConstraint(storageSystem.getId()), storagePoolURIs); ArrayList<StoragePool> pools = new ArrayList(); Iterator<URI> poolsItr = storagePoolURIs.iterator(); while (poolsItr.hasNext()) { URI storagePoolURI = poolsItr.next(); StoragePool storagePool = _dbClient.queryObject(StoragePool.class, storagePoolURI); if (storagePool != null && !storagePool.getInactive()) { pools.add(storagePool); } } StoragePool storagePool = null; if (pools != null && !pools.isEmpty()) { storagePool = pools.get(0); } StoragePort storagePort = getStoragePortPool(storageSystem); String resumeToken = null; int totalIsilonFSDiscovered = 0; // get the associated storage port for vnas Server List<IsilonAccessZone> isilonAccessZones = isilonApi.getAccessZones(null); Map<String, NASServer> nasServers = getNASServer(storageSystem, isilonAccessZones); setDiscPathForAccess(nasServers); // Get All FileShare HashMap<String, HashSet<String>> allSMBShares = discoverAllSMBShares(storageSystem, isilonAccessZones); List<UnManagedCifsShareACL> unManagedCifsShareACLList = new ArrayList<UnManagedCifsShareACL>(); List<UnManagedCifsShareACL> oldunManagedCifsShareACLList = new ArrayList<UnManagedCifsShareACL>(); HashMap<String, HashSet<Integer>> expMap = discoverAllExports(storageSystem, isilonAccessZones); List<UnManagedNFSShareACL> unManagedNfsShareACLList = new ArrayList<UnManagedNFSShareACL>(); List<UnManagedNFSShareACL> oldunManagedNfsShareACLList = new ArrayList<UnManagedNFSShareACL>(); List<UnManagedFileExportRule> newUnManagedExportRules = new ArrayList<UnManagedFileExportRule>(); List<FileShare> discoveredFS = new ArrayList<FileShare>(); do { IsilonApi.IsilonList<FileShare> discoveredIsilonFS = discoverAllFileSystem(storageSystem, resumeToken); resumeToken = discoveredIsilonFS.getToken(); discoveredFS = discoveredIsilonFS.getList(); totalIsilonFSDiscovered += discoveredFS.size(); unManagedFileSystems = new ArrayList<UnManagedFileSystem>(); existingUnManagedFileSystems = new ArrayList<UnManagedFileSystem>(); int newFileSystemsCount = 0; int existingFileSystemsCount = 0; HashMap<String, HashMap<String, HashSet<Integer>>> exportMapTree = getExportsWithSubDirForFS( discoveredFS, expMap); for (FileShare fs : discoveredFS) { if (!checkStorageFileSystemExistsInDB(fs.getNativeGuid())) { // Create UnManaged FS String fsUnManagedFsNativeGuid = NativeGUIDGenerator .generateNativeGuidForPreExistingFileSystem(storageSystem.getSystemType(), storageSystem.getSerialNumber(), fs.getNativeId()); String fsPathName = fs.getPath(); UnManagedFileSystem unManagedFs = checkUnManagedFileSystemExistsInDB( fsUnManagedFsNativeGuid); // get the matched vNAS Server NASServer nasServer = getMatchedNASServer(nasServers, fsPathName); if (nasServer != null) { _log.info("fs path {} and nas server details {}", fs.getPath(), nasServer.toString()); if (nasServer.getStoragePorts() != null && !nasServer.getStoragePorts().isEmpty()) { storagePort = _dbClient.queryObject(StoragePort.class, URI.create(nasServer.getStoragePorts().iterator().next())); } } else { _log.info("fs path {} and vnas server not found", fs.getPath()); continue; // Skip further ingestion steps on this file share & move to next file share } boolean alreadyExist = unManagedFs == null ? false : true; unManagedFs = createUnManagedFileSystem(unManagedFs, fsUnManagedFsNativeGuid, storageSystem, storagePool, nasServer, fs); /* * Get all file exports with given file system */ HashSet<String> fsExportPaths = new HashSet<String>(); for (Entry<String, HashSet<Integer>> entry : expMap.entrySet()) { if (entry.getKey().equalsIgnoreCase(fsPathName) || entry.getKey().startsWith(fsPathName + "/")) { _log.info("filesystem path : {} and export path: {}", fs.getPath(), entry.getKey()); fsExportPaths.add(entry.getKey()); } } List<UnManagedNFSShareACL> tempUnManagedNfsShareACL = new ArrayList<UnManagedNFSShareACL>(); UnManagedNFSShareACL existingNfsACL = null; getUnmanagedNfsShareACL(unManagedFs, tempUnManagedNfsShareACL, storagePort, fs, isilonApi, fsExportPaths); if (tempUnManagedNfsShareACL != null && !tempUnManagedNfsShareACL.isEmpty()) { unManagedFs.setHasNFSAcl(true); } for (UnManagedNFSShareACL unManagedNFSACL : tempUnManagedNfsShareACL) { _log.info("Unmanaged File share acls : {}", unManagedNFSACL); String fsShareNativeId = unManagedNFSACL.getFileSystemNfsACLIndex(); _log.info("UMFS Share ACL index {}", fsShareNativeId); String fsUnManagedFileShareNativeGuid = NativeGUIDGenerator .generateNativeGuidForPreExistingFileShare(storageSystem, fsShareNativeId); _log.info("Native GUID {}", fsUnManagedFileShareNativeGuid); // set native guid, so each entry unique unManagedNFSACL.setNativeGuid(fsUnManagedFileShareNativeGuid); // Check whether the NFS share ACL was present in ViPR DB. existingNfsACL = checkUnManagedFsNfssACLExistsInDB(_dbClient, unManagedNFSACL.getNativeGuid()); if (existingNfsACL == null) { unManagedNfsShareACLList.add(unManagedNFSACL); } else { unManagedNfsShareACLList.add(unManagedNFSACL); // delete the existing acl existingNfsACL.setInactive(true); oldunManagedNfsShareACLList.add(existingNfsACL); } } // get all shares for given file system path HashSet<String> smbShareHashSet = new HashSet<String>(); for (Entry<String, HashSet<String>> entry : allSMBShares.entrySet()) { if (entry.getKey().equalsIgnoreCase(fsPathName) || entry.getKey().startsWith(fsPathName + "/")) { _log.info("filesystem path : {} and share path: {}", fs.getPath(), entry.getKey()); smbShareHashSet.addAll(entry.getValue()); } } _log.info("File System {} has shares and their size is {}", unManagedFs.getId(), smbShareHashSet.size()); if (!smbShareHashSet.isEmpty()) { List<UnManagedCifsShareACL> umfsCifsShareACL = new ArrayList<UnManagedCifsShareACL>(); // Set UnManaged ACL and also set the shares in fs object setUnmanagedCifsShareACL(unManagedFs, smbShareHashSet, umfsCifsShareACL, storagePort, fs.getName(), nasServer.getNasName(), isilonApi); if (!umfsCifsShareACL.isEmpty()) { for (UnManagedCifsShareACL unManagedCifsShareACL : umfsCifsShareACL) { _log.info("Unmanaged File share acl : {}", unManagedCifsShareACL); String fsShareNativeId = unManagedCifsShareACL.getFileSystemShareACLIndex(); _log.info("UMFS Share ACL index {}", fsShareNativeId); String fsUnManagedFileShareNativeGuid = NativeGUIDGenerator .generateNativeGuidForPreExistingFileShare(storageSystem, fsShareNativeId); _log.info("Native GUID {}", fsUnManagedFileShareNativeGuid); // set native guid, so each entry unique unManagedCifsShareACL.setNativeGuid(fsUnManagedFileShareNativeGuid); // Check whether the CIFS share ACL was present in ViPR DB. UnManagedCifsShareACL existingCifsShareACL = checkUnManagedFsCifsACLExistsInDB( _dbClient, unManagedCifsShareACL.getNativeGuid()); if (existingCifsShareACL == null) { unManagedCifsShareACLList.add(unManagedCifsShareACL); } else { unManagedCifsShareACLList.add(unManagedCifsShareACL); // delete the existing acl existingCifsShareACL.setInactive(true); oldunManagedCifsShareACLList.add(existingCifsShareACL); } } _log.info("UMFS ID {} - Size of ACL of all CIFS shares is {}", unManagedFs.getId(), umfsCifsShareACL.size()); } } // Get Export info _log.info("Getting export for {}", fs.getPath()); HashMap<String, HashSet<Integer>> expIdMap = exportMapTree.get(fs.getPath()); if (expIdMap == null) { expIdMap = new HashMap<>(); } List<UnManagedFileExportRule> unManagedExportRules = new ArrayList<UnManagedFileExportRule>(); if (!expIdMap.keySet().isEmpty()) { boolean validExportsFound = getUnManagedFSExportMap(unManagedFs, expIdMap, storagePort, fs.getPath(), nasServer.getNasName(), isilonApi); if (!validExportsFound) { // Invalid exports so ignore the FS String invalidExports = ""; for (String path : expIdMap.keySet()) { invalidExports += expIdMap.get(path); } _log.info("FS {} is ignored because it has conflicting exports {}", fs.getPath(), invalidExports); unManagedFs.setInactive(true); // Persists the inactive state before picking next UMFS!!! _dbClient.persistObject(unManagedFs); continue; } List<UnManagedFileExportRule> validExportRules = getUnManagedFSExportRules(unManagedFs, expIdMap, storagePort, fs.getPath(), nasServer.getNasName(), isilonApi); _log.info("Number of exports discovered for file system {} is {}", unManagedFs.getId(), validExportRules.size()); UnManagedFileExportRule existingRule = null; for (UnManagedFileExportRule dbExportRule : validExportRules) { _log.info("Un Managed File Export Rule : {}", dbExportRule); String fsExportRulenativeId = dbExportRule.getFsExportIndex(); _log.info("Native Id using to build Native Guid {}", fsExportRulenativeId); String fsUnManagedFileExportRuleNativeGuid = NativeGUIDGenerator .generateNativeGuidForPreExistingFileExportRule(storageSystem, fsExportRulenativeId); _log.info("Native GUID {}", fsUnManagedFileExportRuleNativeGuid); dbExportRule.setNativeGuid(fsUnManagedFileExportRuleNativeGuid); dbExportRule.setFileSystemId(unManagedFs.getId()); dbExportRule.setId(URIUtil.createId(UnManagedFileExportRule.class)); existingRule = checkUnManagedFsExportRuleExistsInDB(_dbClient, dbExportRule.getNativeGuid()); if (null == existingRule) { unManagedExportRules.add(dbExportRule); } else { existingRule.setInactive(true); _dbClient.persistObject(existingRule); unManagedExportRules.add(dbExportRule); } } // Validate Rules Compatible with ViPR - Same rules should // apply as per API SVC Validations. if (!unManagedExportRules.isEmpty()) { _log.info("Validating rules success for export {}", fs.getName()); newUnManagedExportRules.addAll(unManagedExportRules); unManagedFs.setHasExports(true); _log.info("File System {} has Exports and their size is {}", unManagedFs.getId(), newUnManagedExportRules.size()); } } if (unManagedFs.getHasExports() || unManagedFs.getHasShares()) { _log.info("FS {} is having exports/shares", fs.getPath()); unManagedFs.putFileSystemCharacterstics( UnManagedFileSystem.SupportedFileSystemCharacterstics.IS_FILESYSTEM_EXPORTED .toString(), TRUE); } else { // NO exports found _log.info("FS {} is ignored because it doesnt have exports and shares", fs.getPath()); } if (alreadyExist) { existingUnManagedFileSystems.add(unManagedFs); existingFileSystemsCount++; } else { unManagedFileSystems.add(unManagedFs); newFileSystemsCount++; } if (!newUnManagedExportRules.isEmpty()) { _log.info("Saving Number of UnManagedFileExportRule(s) {}", newUnManagedExportRules.size()); _partitionManager.updateInBatches(newUnManagedExportRules, Constants.DEFAULT_PARTITION_SIZE, _dbClient, UNMANAGED_EXPORT_RULE); newUnManagedExportRules.clear(); } // save ACLs in db if (!unManagedCifsShareACLList.isEmpty() && unManagedCifsShareACLList.size() >= MAX_UMFS_RECORD_SIZE) { _log.info("Saving Number of UnManagedCifsShareACL(s) {}", unManagedCifsShareACLList.size()); _dbClient.createObject(unManagedCifsShareACLList); unManagedCifsShareACLList.clear(); } // save old acls if (!oldunManagedCifsShareACLList.isEmpty() && oldunManagedCifsShareACLList.size() >= MAX_UMFS_RECORD_SIZE) { _log.info("Saving Number of UnManagedFileExportRule(s) {}", oldunManagedCifsShareACLList.size()); _dbClient.persistObject(oldunManagedCifsShareACLList); oldunManagedCifsShareACLList.clear(); } allDiscoveredUnManagedFileSystems.add(unManagedFs.getId()); /** * Persist 200 objects and clear them to avoid memory issue */ validateListSizeLimitAndPersist(unManagedFileSystems, existingUnManagedFileSystems, Constants.DEFAULT_PARTITION_SIZE * 2); } } _log.info("New unmanaged Isilon file systems count: {}", newFileSystemsCount); _log.info("Update unmanaged Isilon file systems count: {}", existingFileSystemsCount); if (!unManagedFileSystems.isEmpty()) { _dbClient.createObject(unManagedFileSystems); } if (!existingUnManagedFileSystems.isEmpty()) { _dbClient.updateAndReindexObject(existingUnManagedFileSystems); } } while (resumeToken != null); // save ACLs in db if (!unManagedCifsShareACLList.isEmpty()) { _log.info("Saving Number of UnManagedCifsShareACL(s) {}", unManagedCifsShareACLList.size()); _dbClient.createObject(unManagedCifsShareACLList); unManagedCifsShareACLList.clear(); } // save NFS ACLs in db if (!unManagedNfsShareACLList.isEmpty()) { _log.info("Saving Number of UnManagedNfsShareACL(s) {}", unManagedNfsShareACLList.size()); _dbClient.createObject(unManagedNfsShareACLList); unManagedNfsShareACLList.clear(); } // save old acls if (!oldunManagedCifsShareACLList.isEmpty()) { _log.info("Saving Number of UnManagedFileExportRule(s) {}", oldunManagedCifsShareACLList.size()); _dbClient.persistObject(oldunManagedCifsShareACLList); oldunManagedCifsShareACLList.clear(); } // save old acls if (!oldunManagedNfsShareACLList.isEmpty()) { _log.info("Saving Number of NFS UnManagedFileExportRule(s) {}", oldunManagedNfsShareACLList.size()); _dbClient.updateObject(oldunManagedNfsShareACLList); oldunManagedNfsShareACLList.clear(); } _log.info("Discovered {} Isilon file systems.", totalIsilonFSDiscovered); // Process those active unmanaged fs objects available in database but not in newly discovered items, to // mark them inactive. markUnManagedFSObjectsInActive(storageSystem, allDiscoveredUnManagedFileSystems); // discovery succeeds detailedStatusMessage = String.format( "Discovery completed successfully for Isilon: %s; new unmanaged file systems count: %s", storageSystemId.toString(), unmanagedFsCount); _log.info(detailedStatusMessage); } catch (Exception e) { if (storageSystem != null) { cleanupDiscovery(storageSystem); } detailedStatusMessage = String.format("Discovery failed for Isilon %s because %s", storageSystemId.toString(), e.getLocalizedMessage()); _log.error(detailedStatusMessage, e); throw new IsilonCollectionException(detailedStatusMessage); } finally { if (storageSystem != null) { try { // set detailed message storageSystem.setLastDiscoveryStatusMessage(detailedStatusMessage); _dbClient.persistObject(storageSystem); } catch (Exception ex) { _log.error("Error while persisting object to DB", ex); } } } }
From source file:net.cbtltd.rest.streamline.A_Handler.java
@SuppressWarnings("unchecked") @Override/*from w w w . j av a2 s . co m*/ public void readProducts() { String message = "readProducts Streamline (Altpartyid:" + this.getAltpartyid() + ")"; LOG.debug(message); Date version = new Date(); String responseAllProperties; StringBuilder sbNotKnowLocation = new StringBuilder(); final SqlSession sqlSession = RazorServer.openSession(); try { responseAllProperties = createXMLRequestToStreamline(sqlSession, "GetPropertyList", ""); SAXBuilder builder = new SAXBuilder(); Document document = (Document) builder.build(new StringReader(responseAllProperties)); Element rootNode = document.getRootElement(); List<Element> properties = rootNode.getChild("data").getChildren("property"); for (Element property : properties) { try { String altId = property.getChildText("id"); // LOG.debug("Current AltId="+altId); String countryISO = property.getChildText("country_name"); //data for finding location String state = property.getChildText("state_name"); String city = property.getChildText("city"); String latitudeLocationStr = property.getChildText("location_latitude"); String longitudeLocationStr = property.getChildText("location_longitude"); //Streamline provide us 2 set of coordinates. One for location (city) and one for property. //This coordinates will be used for finding locations, so first we use location coordinates, //and if not exist we use property coordinates if (StringUtils.isEmpty(latitudeLocationStr) && StringUtils.isEmpty(longitudeLocationStr)) { latitudeLocationStr = property.getChildText("latitude"); longitudeLocationStr = property.getChildText("longitude"); } Product product = PartnerService.getProduct(sqlSession, getAltpartyid(), altId); if (product == null) { continue; } if (LOG.isDebugEnabled()) LOG.debug("Processing property : " + product.getName()); Integer roomNumber = 0; Integer bathroomNumber = 0; Integer maxPersonTotalNumber = 0; Integer childNumber = 0; Integer adultsNumber = 0; Double latitude = null; Double longitude = null; try { roomNumber = Integer.parseInt(property.getChildText("bedrooms_number")); } catch (Exception parseExc) { LOG.error("Parse exception: " + parseExc.getMessage()); } try { bathroomNumber = Integer.parseInt(property.getChildText("bathrooms_number")); } catch (Exception parseExc) { LOG.error("Parse exception: " + parseExc.getMessage()); } try { maxPersonTotalNumber = Integer.parseInt(property.getChildText("max_occupants")); } catch (Exception parseExc) { LOG.error("Parse exception: " + parseExc.getMessage()); } try { adultsNumber = Integer.parseInt(property.getChildText("max_adults")); } catch (Exception parseExc) { LOG.error("Parse exception: " + parseExc.getMessage()); } childNumber = maxPersonTotalNumber - adultsNumber; if (childNumber < 0) { childNumber = 0; } try { latitude = Double.valueOf(property.getChildText("latitude")); } catch (Exception parseExc) { } try { longitude = Double.valueOf(property.getChildText("longitude")); } catch (Exception parseExc) { } StringBuilder physicalAddress = new StringBuilder(); StringBuilder physicalAddressForLocation = new StringBuilder(); if (StringUtils.isNotEmpty(property.getChildText("address"))) { physicalAddress.append(property.getChildText("address")).append("\n"); physicalAddressForLocation.append(property.getChildText("address")).append(", "); } if (StringUtils.isNotEmpty(property.getChildText("city"))) { physicalAddress.append(property.getChildText("city")).append("\n"); physicalAddressForLocation.append(property.getChildText("city")).append(", "); } if (StringUtils.isNotEmpty(property.getChildText("state_description"))) { physicalAddress.append(property.getChildText("state_description")).append("\n"); physicalAddressForLocation.append(property.getChildText("state_description")).append(", "); } if (StringUtils.isNotEmpty(property.getChildText("country_name"))) { physicalAddress.append(property.getChildText("country_name")).append("\n"); physicalAddressForLocation.append(property.getChildText("country_name")); } //finding long and lat using physical address and Google Location service //if they do not exist in API response (if exist we do not need to find) //and if we have reason to find - if address is changed or lat or long do not exist in product if ((latitude == null || longitude == null) && (!physicalAddress.toString().equalsIgnoreCase(product.getPhysicaladdress()) || product.getLatitude() == null || product.getLongitude() == null) && StringUtils.isNotEmpty(physicalAddress.toString())) { Location propertyLocation = GoogleLocationProcessor .getGoogleLocation(physicalAddressForLocation.toString()); if (propertyLocation != null) { if (latitude == null) { latitude = propertyLocation.getLatitude(); } if (longitude == null) { longitude = propertyLocation.getLongitude(); } } } String propertyName = property.getChildText("name"); // if(property.getChildText("seo_title")!=null && !property.getChildText("seo_title").equalsIgnoreCase("")){ // propertyName = property.getChildText("seo_title"); // } else{ if (property.getChildText("home_type") != null && !property.getChildText("home_type").equalsIgnoreCase("")) { propertyName += ", " + property.getChildText("home_type"); } if (city != null && !city.equalsIgnoreCase("")) { propertyName += " at " + city; } if (property.getChildText("view_name") != null && !property.getChildText("view_name").equalsIgnoreCase("")) { propertyName += ", with " + property.getChildText("view_name"); } // } if (propertyName.length() > 99) { propertyName = propertyName.substring(0, 99); } product.setCurrency(CURRENCY_IN_STREAMLINE); // product.setName(property.getChildText("name") +", "+ property.getChildText("seo_title") ); product.setName(propertyName); product.setUnit(Unit.DAY); product.setRoom(roomNumber); product.setBathroom(bathroomNumber); product.setQuantity(1); product.setPerson(adultsNumber); product.setChild(0); // product.setChild(childNumber); product.setRank(0.0); product.setPhysicaladdress(physicalAddress.toString()); if (product.getLocationid() == null) { Location location = null; if (locationMap.get(city + state + countryISO) != null) { location = locationMap.get(city + state + countryISO); } else if (StringUtils.isNotEmpty(latitudeLocationStr) && StringUtils.isNotEmpty(longitudeLocationStr)) { location = PartnerService.getLocation(sqlSession, city, state, countryISO, Double.valueOf(latitudeLocationStr), Double.valueOf(longitudeLocationStr)); locationMap.put(city + state + countryISO, location); } else { location = PartnerService.getLocation(sqlSession, city, state, countryISO); locationMap.put(city + state + countryISO, location); } if (location != null) { product.setLocationid(location.getId()); } else { product.setState(Product.SUSPENDED); sbNotKnowLocation.append("\n").append( "Streamline property: " + altId + " country: " + countryISO + " city: " + city); } } product.setLatitude(latitude); product.setLongitude(longitude); product.setWebaddress(getWebaddress()); product.setCommission(getCommission()); product.setDiscount(getDiscount()); product.setRating(5); product.setAltitude(0.0); product.setVersion(version); /* //not used `Options` `Tax` `Code` `Unspsc` `Servicedays` `Toilet` `Infant` `Baby` `Linenchange` `Refresh` `OwnerDiscount` `DynamicPricingEnabled` `AssignedtoManager` `CleaningFee` `SecurityDeposit` */ //description build StringBuilder description = new StringBuilder(); description.append(property.getChildText("seo_description")).append("\n"); description.append(property.getChildText("short_description")).append("\n"); description.append(property.getChildText("description")).append("\n"); ArrayList<String> attributes = new ArrayList<String>(); addType(attributes, property.getChildText("lodging_type_id")); //attributes String otherReqParamsAttributes = "<unit_id>" + altId + "</unit_id>"; String responseAttr = createXMLRequestToStreamline(sqlSession, "GetPropertyAmenities", otherReqParamsAttributes); builder = new SAXBuilder(); Document documentAttr = (Document) builder.build(new StringReader(responseAttr)); Element rootNodeAttributes = documentAttr.getRootElement(); List<Element> propertyAttributes = rootNodeAttributes.getChild("data").getChildren("amenity"); for (Element amenity : propertyAttributes) { addPropertyAttribute(attributes, amenity.getChildText("amenity_name")); } //removing duplicate values from attributes HashSet<String> attributeHashSet = new HashSet<String>(); attributeHashSet.addAll(attributes); attributes.clear(); attributes.addAll(attributeHashSet); sqlSession.getMapper(ProductMapper.class).update(product); product.setPublicText(new Text(product.getPublicId(), product.getName(), Text.Type.HTML, new Date(), description.toString(), Language.EN)); TextService.update(sqlSession, product.getTexts()); RelationService.replace(sqlSession, Relation.PRODUCT_VALUE, product.getId(), product.getValues()); RelationService.create(sqlSession, Relation.PRODUCT_ATTRIBUTE, product.getId(), attributes); RelationService.removeDeprecatedData(sqlSession, Relation.PRODUCT_ATTRIBUTE, product.getId(), attributes); sqlSession.commit(); } catch (Throwable x) { sqlSession.rollback(); LOG.error(x.getMessage()); x.printStackTrace(); } } //print not find attributes HashSet<String> hs = new HashSet<String>(); hs.addAll(PROPERTY_ATTRIBUTES_NOT_FOUND); PROPERTY_ATTRIBUTES_NOT_FOUND.clear(); PROPERTY_ATTRIBUTES_NOT_FOUND.addAll(hs); // LOG.debug("Streamline attributes not find (Altpartyid:"+this.getAltpartyid()+"): "); // for(String tempAttr : PROPERTY_ATTRIBUTES_NOT_FOUND){ // System.out.println(":::"+tempAttr +":::"); // } //canceling product which are not updated Product action = new Product(); action.setAltpartyid(getAltpartyid()); action.setState(Product.CREATED); action.setVersion(version); sqlSession.getMapper(ProductMapper.class).cancelversion(action); sqlSession.commit(); } catch (Throwable x) { sqlSession.rollback(); LOG.error(x.getMessage()); x.printStackTrace(); } finally { sqlSession.close(); } MonitorService.monitor(message, version); }
From source file:com.android.launcher2.Workspace.java
void removeItems(final ArrayList<String> packages) { final HashSet<String> packageNames = new HashSet<String>(); packageNames.addAll(packages); ArrayList<CellLayout> cellLayouts = getWorkspaceAndHotseatCellLayouts(); for (final CellLayout layoutParent : cellLayouts) { final ViewGroup layout = layoutParent.getShortcutsAndWidgets(); // Avoid ANRs by treating each screen separately post(new Runnable() { public void run() { final ArrayList<View> childrenToRemove = new ArrayList<View>(); childrenToRemove.clear(); int childCount = layout.getChildCount(); for (int j = 0; j < childCount; j++) { final View view = layout.getChildAt(j); Object tag = view.getTag(); if (tag instanceof ShortcutInfo) { final ShortcutInfo info = (ShortcutInfo) tag; final Intent intent = info.intent; final ComponentName name = intent.getComponent(); if (name != null) { if (packageNames.contains(name.getPackageName())) { LauncherModel.deleteItemFromDatabase(mLauncher, info); childrenToRemove.add(view); }/*from w w w. j a va2 s .com*/ } } else if (tag instanceof FolderInfo) { final FolderInfo info = (FolderInfo) tag; final ArrayList<ShortcutInfo> contents = info.contents; final int contentsCount = contents.size(); final ArrayList<ShortcutInfo> appsToRemoveFromFolder = new ArrayList<ShortcutInfo>(); for (int k = 0; k < contentsCount; k++) { final ShortcutInfo appInfo = contents.get(k); final Intent intent = appInfo.intent; final ComponentName name = intent.getComponent(); if (name != null) { if (packageNames.contains(name.getPackageName())) { appsToRemoveFromFolder.add(appInfo); } } } for (ShortcutInfo item : appsToRemoveFromFolder) { info.remove(item); LauncherModel.deleteItemFromDatabase(mLauncher, item); } } else if (tag instanceof LauncherAppWidgetInfo) { final LauncherAppWidgetInfo info = (LauncherAppWidgetInfo) tag; final ComponentName provider = info.providerName; if (provider != null) { if (packageNames.contains(provider.getPackageName())) { LauncherModel.deleteItemFromDatabase(mLauncher, info); childrenToRemove.add(view); } } } } childCount = childrenToRemove.size(); for (int j = 0; j < childCount; j++) { View child = childrenToRemove.get(j); // Note: We can not remove the view directly from CellLayoutChildren as this // does not re-mark the spaces as unoccupied. layoutParent.removeViewInLayout(child); if (child instanceof DropTarget) { mDragController.removeDropTarget((DropTarget) child); } } if (childCount > 0) { layout.requestLayout(); layout.invalidate(); } } }); } // Clean up new-apps animation list final Context context = getContext(); post(new Runnable() { @Override public void run() { String spKey = LauncherApplication.getSharedPreferencesKey(); SharedPreferences sp = context.getSharedPreferences(spKey, Context.MODE_PRIVATE); Set<String> newApps = sp.getStringSet(InstallShortcutReceiver.NEW_APPS_LIST_KEY, null); // Remove all queued items that match the same package if (newApps != null) { synchronized (newApps) { Iterator<String> iter = newApps.iterator(); while (iter.hasNext()) { try { Intent intent = Intent.parseUri(iter.next(), 0); String pn = ItemInfo.getPackageName(intent); if (packageNames.contains(pn)) { iter.remove(); } // It is possible that we've queued an item to be loaded, yet it has // not been added to the workspace, so remove those items as well. ArrayList<ItemInfo> shortcuts; shortcuts = LauncherModel.getWorkspaceShortcutItemInfosWithIntent(intent); for (ItemInfo info : shortcuts) { LauncherModel.deleteItemFromDatabase(context, info); } } catch (URISyntaxException e) { } } } } } }); }