List of usage examples for java.util Map equals
boolean equals(Object o);
From source file:org.springframework.data.crate.core.convert.MappingCrateConverterTest.java
@Test public void shouldWriteMapOfObject() { CrateDocument languageDocument = new CrateDocument("name", "aLanguage"); CrateArray languagesArray = new CrateArray(languageDocument); CrateDocument countryDocument = new CrateDocument(); countryDocument.put("name", "aCountry"); countryDocument.put("languages", languagesArray); CrateDocument mapDocument = new CrateDocument("country", countryDocument); Map<String, Object> expected = new HashMap<String, Object>(); expected.put(DEFAULT_TYPE_KEY, MapOfObject.class.getName()); expected.put("map", mapDocument); Map<String, Country> map = new HashMap<String, Country>(); map.put("country", new Country("aCountry", asList(new Language("aLanguage")))); MapOfObject entity = new MapOfObject(); entity.map = map;// w ww. j av a 2 s . c om CrateDocument document = new CrateDocument(); converter.write(entity, document); assertThat(expected.equals(document), is(true)); }
From source file:com.leanplum.Leanplum.java
/** * Applies the variables, messages, or update rules in a start or getVars response. * * @param response The response containing content. * @param alwaysApply Always apply the content regardless of whether the content changed. *//* ww w.j av a2 s. c o m*/ private static void applyContentInResponse(JSONObject response, boolean alwaysApply) { Map<String, Object> values = JsonConverter .mapFromJsonOrDefault(response.optJSONObject(Constants.Keys.VARS)); Map<String, Object> messages = JsonConverter .mapFromJsonOrDefault(response.optJSONObject(Constants.Keys.MESSAGES)); List<Map<String, Object>> updateRules = JsonConverter .listFromJsonOrDefault(response.optJSONArray(Constants.Keys.UPDATE_RULES)); List<Map<String, Object>> eventRules = JsonConverter .listFromJsonOrDefault(response.optJSONArray(Constants.Keys.EVENT_RULES)); Map<String, Object> regions = JsonConverter .mapFromJsonOrDefault(response.optJSONObject(Constants.Keys.REGIONS)); List<Map<String, Object>> variants = JsonConverter .listFromJsonOrDefault(response.optJSONArray(Constants.Keys.VARIANTS)); if (alwaysApply || !values.equals(VarCache.getDiffs()) || !messages.equals(VarCache.getMessageDiffs()) || !updateRules.equals(VarCache.getUpdateRuleDiffs()) || !eventRules.equals(VarCache.getEventRuleDiffs()) || !regions.equals(VarCache.regions())) { VarCache.applyVariableDiffs(values, messages, updateRules, eventRules, regions, variants); } }
From source file:org.jahia.services.content.interceptor.URLInterceptor.java
/** * Transform user URL with servlet context and links placeholders for storage. * <p/>/*from www.j av a2s . c o m*/ * Only URLs starting with /<context>/cms or /<context>/files are recognized. * <p/> * CMS URLs can use mode and language placeholders : /<context>/cms/render/default/en/sites/ACME/home.html and * /<context>/cms/##mode##/##lang##/sites/ACME/home.html are both recognized. * <p/> * If any link is invalid, a ConstraintViolationException is thrown. * <p/> * Add jmix:referencesInField mixin type to the parent node and j:referenceInField with the list of references * contained in the value. * * @param node * @param name * @param definition * @param originalValue Original value @return Value to set, or null @return * @throws ValueFormatException * @throws VersionException * @throws LockException * @throws ConstraintViolationException * @throws RepositoryException */ public Value beforeSetValue(final JCRNodeWrapper node, String name, final ExtendedPropertyDefinition definition, Value originalValue) throws ValueFormatException, VersionException, LockException, ConstraintViolationException, RepositoryException { String content = originalValue.getString(); // if the node is a translated node, then take the parent to have the references JCRNodeWrapper nodeWithReferences = node.isNodeType(Constants.JAHIANT_TRANSLATION) ? node.getParent() : node; if (definition.isInternationalized()) { Locale locale = node.getSession().getLocale(); if (locale == null) { // This might happen under publication if (node.isNodeType(Constants.JAHIANT_TRANSLATION)) { name += "_" + node.getProperty("jcr:language").getString(); } } else { name += "_" + locale; } } final Map<String, Long> refs = new HashMap<String, Long>(); if (logger.isDebugEnabled()) { logger.debug("Intercept setValue for " + node.getPath() + "/" + name); } if (nodeWithReferences.isNodeType(JAHIAMIX_REFERENCES_IN_FIELD)) { NodeIterator ni = nodeWithReferences.getNodes(JAHIA_REFERENCE_IN_FIELD_PREFIX); while (ni.hasNext()) { JCRNodeWrapper ref = (JCRNodeWrapper) ni.next(); if (name.equals(ref.getProperty("j:fieldName").getString()) && ref.hasProperty("j:reference")) { refs.put(ref.getProperty("j:reference").getString(), Long.valueOf(StringUtils.substringAfterLast(ref.getName(), "_"))); } } } final Map<String, Long> newRefs = new HashMap<String, Long>(); String result; try { result = urlTraverser.traverse(content, new HtmlTagAttributeVisitor() { public String visit(String value, RenderContext context, String tagName, String attrName, Resource resource) { if (StringUtils.isNotEmpty(value)) { try { value = replaceRefsByPlaceholders(value, newRefs, refs, node.getSession().getWorkspace().getName(), node.getSession().getLocale(), node, definition); } catch (RepositoryException e) { throw new RuntimeException(e); } } return value; } }); } catch (RuntimeException e) { if (e.getCause() instanceof RepositoryException) { throw (RepositoryException) e.getCause(); } else { throw e; } } Set<String> refsToRemove = new HashSet<>(refs.size()); if (!newRefs.equals(refs)) { if (!newRefs.isEmpty() && !nodeWithReferences.isNodeType(JAHIAMIX_REFERENCES_IN_FIELD)) { nodeWithReferences.addMixin(JAHIAMIX_REFERENCES_IN_FIELD); } if (logger.isDebugEnabled()) { logger.debug("New references : " + newRefs); } NodeIterator ni = nodeWithReferences.getNodes(JAHIA_REFERENCE_IN_FIELD_PREFIX); while (ni.hasNext()) { JCRNodeWrapper ref = (JCRNodeWrapper) ni.next(); if (name.equals(ref.getProperty("j:fieldName").getString()) && (!ref.hasProperty("j:reference") || !newRefs.containsKey(ref.getProperty("j:reference").getString()))) { refsToRemove.add(ref.getName()); } } for (Map.Entry<String, Long> entry : newRefs.entrySet()) { if (!refs.containsKey(entry.getKey())) { JCRNodeWrapper ref = nodeWithReferences.addNode( "j:referenceInField_" + Text.escapeIllegalJcrChars(name) + "_" + entry.getValue(), "jnt:referenceInField"); ref.setProperty("j:fieldName", name); ref.setProperty("j:reference", entry.getKey()); } } } for (String refToRemove : refsToRemove) { nodeWithReferences.getNode(refToRemove).remove(); } if (!result.equals(content)) { return node.getSession().getValueFactory().createValue(result); } return originalValue; }
From source file:com.ibm.jaggr.core.impl.layer.LayerImpl.java
@SuppressWarnings("unchecked") @Override/*from w ww.ja v a 2s . co m*/ public InputStream getInputStream(HttpServletRequest request, HttpServletResponse response) throws IOException { CacheEntry entry = null; String key = null; IAggregator aggr = (IAggregator) request.getAttribute(IAggregator.AGGREGATOR_REQATTRNAME); List<String> cacheInfoReport = null; if (_isReportCacheInfo) { cacheInfoReport = (List<String>) request.getAttribute(LAYERCACHEINFO_PROPNAME); if (cacheInfoReport != null) { cacheInfoReport.clear(); } } if (log.isLoggable(Level.FINEST) && cacheInfoReport == null) { cacheInfoReport = new LinkedList<String>(); } try { IOptions options = aggr.getOptions(); ICacheManager mgr = aggr.getCacheManager(); boolean ignoreCached = RequestUtil.isIgnoreCached(request); InputStream result; long lastModified = getLastModified(request); CacheEntry newEntry = new CacheEntry(_id, _cacheKey, lastModified); CacheEntry existingEntry = null; if (ignoreCached) { request.setAttribute(NOCACHE_RESPONSE_REQATTRNAME, Boolean.TRUE); } if (options.isDevelopmentMode()) { synchronized (this) { // See if we need to discard previously built LayerBuilds if (lastModified > _lastModified) { if (cacheInfoReport != null) { cacheInfoReport.add("update_lastmod2"); //$NON-NLS-1$ } if (lastModified != Long.MAX_VALUE) { // max value means missing requested source _lastModified = lastModified; } _cacheKeyGenerators = null; } } } Map<String, ICacheKeyGenerator> cacheKeyGenerators = _cacheKeyGenerators; // Creata a cache key. key = generateCacheKey(request, cacheKeyGenerators); if (!ignoreCached && key != null) { int loopGuard = 5; do { // Try to retrieve an existing layer build using the blocking putIfAbsent. If the return // value is null, then the newEntry was successfully added to the map, otherwise the // existing entry is returned in the buildReader and newEntry was not added. existingEntry = _layerBuilds.putIfAbsent(key, newEntry, options.isDevelopmentMode()); if (cacheInfoReport != null) { cacheInfoReport.add(existingEntry != null ? "hit_1" : "added"); //$NON-NLS-1$ //$NON-NLS-2$ } if (existingEntry != null) { if ((result = existingEntry.tryGetInputStream(request)) != null) { setResponseHeaders(request, response, existingEntry.getSize()); if (log.isLoggable(Level.FINEST)) { log.finest(cacheInfoReport.toString() + "\n" + //$NON-NLS-1$ "key:" + key + //$NON-NLS-1$ "\n" + existingEntry.toString()); //$NON-NLS-1$ } if (_isReportCacheInfo) { request.setAttribute(LAYERBUILDCACHEKEY_PROPNAME, key); } return result; } else if (existingEntry.isDeleted()) { if (_layerBuilds.replace(key, existingEntry, newEntry)) { // entry was replaced, use newEntry if (cacheInfoReport != null) { cacheInfoReport.add("replace_1"); //$NON-NLS-1$ } existingEntry = null; } else { // Existing entry was removed from the cache by another thread // between the time we retrieved it and the time we tried to // replace it. Try to add the new entry again. if (cacheInfoReport != null) { cacheInfoReport.add("retry_add"); //$NON-NLS-1$ } if (--loopGuard == 0) { // Should never happen, but just in case throw new IllegalStateException(); } continue; } } } break; } while (true); } // putIfAbsent() succeeded and the new entry was added to the cache entry = (existingEntry != null) ? existingEntry : newEntry; LayerBuilder layerBuilder = null; // List of Future<IModule.ModuleReader> objects that will be used to read the module // data from List<ICacheKeyGenerator> moduleKeyGens = null; // Synchronize on the LayerBuild object for the build. This will prevent multiple // threads from building the same output. If more than one thread requests the same // output (same cache key), then the first one to grab the sync object will win and // the rest will wait for the first thread to finish building and then just return // the output from the first thread when they wake. synchronized (entry) { // Check to see if data is available one more time in case a different thread finished // building the output while we were blocked on the sync object. if (!ignoreCached && key != null && (result = entry.tryGetInputStream(request)) != null) { if (cacheInfoReport != null) { cacheInfoReport.add("hit_2"); //$NON-NLS-1$ } setResponseHeaders(request, response, entry.getSize()); if (log.isLoggable(Level.FINEST)) { log.finest(cacheInfoReport.toString() + "\n" + //$NON-NLS-1$ "key:" + key + //$NON-NLS-1$ "\n" + entry.toString()); //$NON-NLS-1$ } if (_isReportCacheInfo) { request.setAttribute(LAYERBUILDCACHEKEY_PROPNAME, key); } return result; } boolean isGzip = RequestUtil.isGzipEncoding(request); ByteArrayOutputStream bos = new ByteArrayOutputStream(); // See if we already have a cached response that uses a different gzip // encoding option. If we do, then just zip (or unzip) the cached // response CacheEntry otherEntry = null; if (key != null) { StringBuffer sb = new StringBuffer(); Matcher m = GZIPFLAG_KEY_PATTERN.matcher(key); m.find(); m.appendReplacement(sb, new StringBuffer(s_layerCacheKeyGenerators.get(0).toString()).append(":") //$NON-NLS-1$ .append("1".equals(m.group(1)) ? "0" : "1") //$NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$ .append(":").toString() //$NON-NLS-1$ ).appendTail(sb); otherEntry = _layerBuilds.get(sb.toString()); } if (otherEntry != null) { if (isGzip) { if (cacheInfoReport != null) { cacheInfoReport.add("zip_unzipped"); //$NON-NLS-1$ } // We need gzipped and the cached entry is unzipped // Create the compression stream for the output VariableGZIPOutputStream compress = new VariableGZIPOutputStream(bos, 10240); // is 10k too big? compress.setLevel(Deflater.BEST_COMPRESSION); Writer writer = new OutputStreamWriter(compress, "UTF-8"); //$NON-NLS-1$ // Copy the data from the input stream to the output, compressing as we go. CopyUtil.copy(otherEntry.getInputStream(request), writer); } else { if (cacheInfoReport != null) { cacheInfoReport.add("unzip_zipped"); //$NON-NLS-1$ } // We need unzipped and the cached entry is zipped. Just unzip it CopyUtil.copy(new GZIPInputStream(otherEntry.getInputStream(request)), bos); } // Set the buildReader to the LayerBuild and release the lock by exiting the sync block entry.setBytes(bos.toByteArray()); if (!ignoreCached) { _layerBuilds.replace(key, entry, entry); // updates entry weight in map if (cacheInfoReport != null) { cacheInfoReport.add("update_weights_1"); //$NON-NLS-1$ } entry.persist(mgr); } } else { moduleKeyGens = new LinkedList<ICacheKeyGenerator>(); ModuleList moduleList = getModules(request); // Remove the module list from the request to safe-guard it now that we don't // need it there anymore request.removeAttribute(MODULE_FILES_PROPNAME); // Create a BuildListReader from the list of Futures. This reader will obtain a // ModuleReader from each of the Futures in the list and read data from each one in // succession until all the data has been read, blocking on each Future until the // reader becomes available. layerBuilder = new LayerBuilder(request, moduleKeyGens, moduleList); String layer = layerBuilder.build(); if (isGzip) { if (cacheInfoReport != null) { cacheInfoReport.add("zip"); //$NON-NLS-1$ } VariableGZIPOutputStream compress = new VariableGZIPOutputStream(bos, 10240); // is 10k too big? compress.setLevel(Deflater.BEST_COMPRESSION); Writer writer = new OutputStreamWriter(compress, "UTF-8"); //$NON-NLS-1$ // Copy the data from the input stream to the output, compressing as we go. CopyUtil.copy(new StringReader(layer), writer); // Set the buildReader to the LayerBuild and release the lock by exiting the sync block entry.setBytes(bos.toByteArray()); } else { entry.setBytes(layer.getBytes()); } // entry will be persisted below after we determine if cache key // generator needs to be updated } } // if any of the readers included an error response, then don't cache the layer. if (layerBuilder != null && layerBuilder.hasErrors()) { request.setAttribute(NOCACHE_RESPONSE_REQATTRNAME, Boolean.TRUE); if (cacheInfoReport != null) { cacheInfoReport.add(key == null ? "error_noaction" : "error_remove"); //$NON-NLS-1$ //$NON-NLS-2$ } if (key != null) { _layerBuilds.remove(key, entry); } } else if (layerBuilder != null) { if (!ignoreCached) { // See if we need to create or update the cache key generators Map<String, ICacheKeyGenerator> newKeyGens = new HashMap<String, ICacheKeyGenerator>(); Set<String> requiredModuleListDeps = getModules(request).getDependentFeatures(); addCacheKeyGenerators(newKeyGens, s_layerCacheKeyGenerators); addCacheKeyGenerators(newKeyGens, aggr.getTransport().getCacheKeyGenerators()); addCacheKeyGenerators(newKeyGens, Arrays.asList(new ICacheKeyGenerator[] { new FeatureSetCacheKeyGenerator(requiredModuleListDeps, false) })); addCacheKeyGenerators(newKeyGens, moduleKeyGens); boolean cacheKeyGeneratorsUpdated = false; if (!newKeyGens.equals(cacheKeyGenerators)) { // If we don't yet have a cache key for this layer, then get one // from the cache key generators, and then update the cache key for this // cache entry. synchronized (this) { if (_cacheKeyGenerators != null) { addCacheKeyGenerators(newKeyGens, _cacheKeyGenerators.values()); } _cacheKeyGenerators = Collections.unmodifiableMap(newKeyGens); } if (cacheInfoReport != null) { cacheInfoReport.add("update_keygen"); //$NON-NLS-1$ } cacheKeyGeneratorsUpdated = true; } final String originalKey = key; if (key == null || cacheKeyGeneratorsUpdated) { if (cacheInfoReport != null) { cacheInfoReport.add("update_key"); //$NON-NLS-1$ } key = generateCacheKey(request, newKeyGens); } if (originalKey == null || !originalKey.equals(key)) { /* * The cache key has changed from what was originally used to put the * un-built entry into the cache. Add the LayerBuild to the cache * using the new key. */ if (log.isLoggable(Level.FINE)) { log.fine("Key changed! Adding layer to cache with key: " + key); //$NON-NLS-1$ } final CacheEntry originalEntry = entry; CacheEntry updateEntry = (originalKey == null) ? entry : new CacheEntry(entry); CacheEntry previousEntry = _layerBuilds.putIfAbsent(key, updateEntry, options.isDevelopmentMode()); if (cacheInfoReport != null) { cacheInfoReport.add(previousEntry == null ? "update_add" : "update_hit"); //$NON-NLS-1$ //$NON-NLS-2$ } // Write the file to disk only if the LayerBuild was successfully added to the cache if (previousEntry == null) { // Updated entry was added to the cache. entry = updateEntry; entry.persist(mgr); } // If the key changed, then remove the entry under the old key. Use a // delay to give other threads a chance to start using the new cache // key generator. No need to update entry weight in map if (originalKey != null) { aggr.getExecutors().getScheduledExecutor().schedule(new Runnable() { public void run() { _layerBuilds.remove(originalKey, originalEntry); } }, LAYERBUILD_REMOVE_DELAY_SECONDS, TimeUnit.SECONDS); } } else { if (cacheInfoReport != null) { cacheInfoReport.add("update_weights_2"); //$NON-NLS-1$ } _layerBuilds.replace(key, entry, entry); // updates entry weight in map entry.persist(mgr); } } } result = entry.getInputStream(request); setResponseHeaders(request, response, entry.getSize()); // return the input stream to the LayerBuild if (log.isLoggable(Level.FINEST)) { log.finest(cacheInfoReport.toString() + "\n" + //$NON-NLS-1$ "key:" + key + //$NON-NLS-1$ "\n" + entry.toString()); //$NON-NLS-1$ } if (_isReportCacheInfo) { request.setAttribute(LAYERBUILDCACHEKEY_PROPNAME, key); } return result; } catch (IOException e) { _layerBuilds.remove(key, entry); throw e; } catch (RuntimeException e) { _layerBuilds.remove(key, entry); throw e; } finally { if (_layerBuilds.isLayerEvicted()) { _layerBuilds.removeLayerFromCache(this); } } }
From source file:org.jbpm.formbuilder.server.menu.GuvnorMenuServiceTest.java
public void testSaveMenuItemOK() throws Exception { GuvnorMenuService service = new GuvnorMenuService(); FormRepresentationDecoder decoder = FormEncodingFactory.getDecoder(); File dbFile = new File(getClass().getResource("/menuItems.json").getFile()); String jsonInitial = FileUtils.readFileToString(dbFile); Map<String, List<MenuItemDescription>> descsInitial = decoder.decodeMenuItemsMap(jsonInitial); MenuItemDescription desc = new MenuItemDescription(); desc.setClassName(CustomMenuItem.class.getName()); List<FormEffectDescription> effects = new ArrayList<FormEffectDescription>(); FormEffectDescription effDesc1 = new FormEffectDescription(); effDesc1.setClassName(RemoveEffect.class.getName()); effects.add(effDesc1);//from w w w . j av a 2 s .c om FormEffectDescription effDesc2 = new FormEffectDescription(); effDesc2.setClassName(DoneEffect.class.getName()); effects.add(effDesc2); desc.setEffects(effects); File file = new File(getClass().getResource("testSaveMenuItem.json").getFile()); String json = FileUtils.readFileToString(file); FormItemRepresentation itemRepresentation = decoder.decodeItem(json); desc.setName("test component"); desc.setItemRepresentation(itemRepresentation); String groupName = "Test Components"; service.saveMenuItem(groupName, desc); String jsonResult = FileUtils.readFileToString(dbFile); Map<String, List<MenuItemDescription>> descsResult = decoder.decodeMenuItemsMap(jsonResult); assertNotNull("saved menu items shouldn't be null", descsResult); assertNotNull("saved menu items should contain a list of " + groupName, descsResult.get(groupName)); assertFalse(groupName + " list should not be empty", descsResult.get(groupName).isEmpty()); assertFalse("descsInitial and descsResult should not be the same", descsInitial.equals(descsResult)); service.deleteMenuItem(groupName, desc); String jsonFinal = FileUtils.readFileToString(dbFile); Map<String, List<MenuItemDescription>> descsFinal = decoder.decodeMenuItemsMap(jsonFinal); assertEquals("descsInitial and descsFinal should be the same", descsInitial.entrySet(), descsFinal.entrySet()); }
From source file:amfservices.actions.PGServicesAction.java
public Map<String, Object> moveEggFromCoteToInventoryAction(String uid, Map<String, Number> eggPacks, long now) throws PGException { User user = User.getUser(uid);/*from w w w. j a v a 2s . c om*/ Cote cote = Cote.getCote(uid, user.getLastCote()); Map<String, Number> validEggs = EggStoreServices.inst().validateEgg(cote.eggStore(), eggPacks); Inventory inventory = Inventory.getInventory(uid); int inventoryAvail = PGConfig.inst().temp().MaxInventory() - inventory.numberItems(); Map<String, Number> successEggs = EggStoreServices.inst().truncateEgg(validEggs, inventoryAvail); int nEggMoved = 0; for (Map.Entry<String, Number> movedEggEntry : successEggs.entrySet()) { nEggMoved += movedEggEntry.getValue().intValue(); } if (nEggMoved > 0) { EggStoreServices.inst().moveEgg(cote.eggStore(), inventory.eggStore(), successEggs); QuestLogger qLogger = QuestServices.inst().getQuestLogger(uid, now); qLogger.log(new CollectEggsRecord(nEggMoved)); } // build response amf Map<String, Object> response = new HashMap(); // build success if (nEggMoved > 0) { response.put(PGMacro.SUCCESS_EGGS, successEggs); } // build full inventory eggs if (!successEggs.equals(validEggs)) { Map<String, Number> fullInvEggs = EggStoreServices.inst().substractEggs(validEggs, successEggs); response.put(PGMacro.FULL_INVENTORY_EGGS, fullInvEggs); } // build failed eggs if (!validEggs.equals(eggPacks)) { Map<String, Number> failedEggs = EggStoreServices.inst().substractEggs(eggPacks, validEggs); response.put(PGMacro.FAILED_EGGS, failedEggs); } return response; }
From source file:org.jahia.modules.wiki.WikiURLInterceptor.java
public Value beforeSetValue(JCRNodeWrapper node, String name, ExtendedPropertyDefinition definition, Value originalValue) throws ValueFormatException, VersionException, LockException, ConstraintViolationException, RepositoryException { String content = originalValue.getString(); final Map<String, Long> refs = new HashMap<String, Long>(); if (logger.isDebugEnabled()) { logger.debug("Intercept setValue for " + node.getPath() + "/" + name); }/* w ww .j a v a 2 s . c o m*/ if (node.isNodeType(JAHIAMIX_REFERENCES_IN_FIELD)) { NodeIterator ni = node.getNodes(JAHIA_REFERENCE_IN_FIELD_PREFIX); while (ni.hasNext()) { JCRNodeWrapper ref = (JCRNodeWrapper) ni.next(); if (name.equals(ref.getProperty("j:fieldName").getString()) && ref.hasProperty("j:reference")) { refs.put(ref.getProperty("j:reference").getString(), Long.valueOf(StringUtils.substringAfterLast(ref.getName(), "_"))); } } } Map<String, Long> newRefs = new HashMap<String, Long>(); String result = content; try { ComponentManager componentManager = WikiRenderer.getComponentManager(); Parser parser = componentManager.lookup(Parser.class, inputSyntax); XDOM xdom = parser.parse(new StringReader(content)); List<ImageBlock> l = xdom.getChildrenByType(ImageBlock.class, true); for (ImageBlock imageBlock : l) { final String url = imageBlock.getImage().getName(); if (url.startsWith(Jahia.getContextPath() + "/files/")) { String newUrl = replaceRefsByPlaceholders(url, newRefs, refs, node.getSession().getWorkspace().getName()); imageBlock.getParent().replaceChild(new ImageBlock(new URLImage(newUrl), imageBlock.isFreeStandingURI(), imageBlock.getParameters()), imageBlock); } } // new BlockRenderer(); BlockRenderer br = (BlockRenderer) componentManager.lookup(BlockRenderer.class, inputSyntax); DefaultWikiPrinter p = new DefaultWikiPrinter(); br.render(xdom.getRoot(), p); result = p.toString(); } catch (RuntimeException e) { logger.error("Error before setting value", e); } catch (ComponentLookupException e) { logger.error("Error before setting value", e); } catch (ParseException e) { logger.error("Error before setting value", e); } if (!newRefs.equals(refs)) { if (!newRefs.isEmpty() && !node.isNodeType(JAHIAMIX_REFERENCES_IN_FIELD)) { node.addMixin(JAHIAMIX_REFERENCES_IN_FIELD); } if (logger.isDebugEnabled()) { logger.debug("New references : " + newRefs); } NodeIterator ni = node.getNodes(JAHIA_REFERENCE_IN_FIELD_PREFIX); while (ni.hasNext()) { JCRNodeWrapper ref = (JCRNodeWrapper) ni.next(); if (name.equals(ref.getProperty("j:fieldName").getString()) && !newRefs.containsKey(ref.getProperty("j:reference").getString())) { ref.remove(); } } for (Map.Entry<String, Long> entry : newRefs.entrySet()) { if (!refs.containsKey(entry.getKey())) { JCRNodeWrapper ref = node.addNode("j:referenceInField_" + name + "_" + entry.getValue(), "jnt:referenceInField"); ref.setProperty("j:fieldName", name); ref.setProperty("j:reference", entry.getKey()); } } } if (!result.equals(content)) { return node.getSession().getValueFactory().createValue(result); } return originalValue; }
From source file:se.vgregion.service.innovationsslussen.idea.IdeaServiceImpl.java
boolean same(Object i1, Object i2) { if (i1 == i2) { return true; }/*from w w w. ja va 2 s. c o m*/ if (i1 == null || i2 == null) { return false; } Map bm1 = new HashMap(new BeanMap(i1)); Map bm2 = new HashMap(new BeanMap(i2)); if (!bm1.equals(bm2)) { return false; } return true; }
From source file:org.alfresco.repo.node.NodeServiceTest.java
/** * Check that simple node property modifications advance the node caches correctly *//*w w w . j a v a2 s. c o m*/ @SuppressWarnings("unchecked") @Test public void testCaches_ImmutableNodeCaches() throws Exception { final NodeRef[] nodeRefs = new NodeRef[2]; final NodeRef workspaceRootNodeRef = nodeService.getRootNode(StoreRef.STORE_REF_WORKSPACE_SPACESSTORE); buildNodeHierarchy(workspaceRootNodeRef, nodeRefs); final NodeRef nodeRef = nodeRefs[1]; // Get the current node cache key Long nodeId = (Long) findCacheValue(nodesCache, nodeRef); assertNotNull("Node not found in cache", nodeId); Node nodeOne = (Node) findCacheValue(nodesCache, nodeId); assertNotNull("Node not found in cache", nodeOne); NodeVersionKey nodeKeyOne = nodeOne.getNodeVersionKey(); // Get the node cached values Map<QName, Serializable> nodePropsOne = (Map<QName, Serializable>) findCacheValue(propsCache, nodeKeyOne); Set<QName> nodeAspectsOne = (Set<QName>) findCacheValue(aspectsCache, nodeKeyOne); // Check the values assertEquals("The node version is incorrect", Long.valueOf(1L), nodeKeyOne.getVersion()); assertNotNull("No cache entry for properties", nodePropsOne); assertNotNull("No cache entry for aspects", nodeAspectsOne); assertEquals("Property count incorrect", 1, nodePropsOne.size()); assertNotNull("Expected a cm:name property", nodePropsOne.get(ContentModel.PROP_NAME)); assertEquals("Aspect count incorrect", 1, nodeAspectsOne.size()); assertTrue("Expected a cm:auditable aspect", nodeAspectsOne.contains(ContentModel.ASPECT_AUDITABLE)); // Add a property nodeService.setProperty(nodeRef, PROP_RESIDUAL, GUID.generate()); // Get the values for the previous version Map<QName, Serializable> nodePropsOneCheck = (Map<QName, Serializable>) findCacheValue(propsCache, nodeKeyOne); Set<QName> nodeAspectsOneCheck = (Set<QName>) findCacheValue(aspectsCache, nodeKeyOne); assertTrue("Previous cache entries must be left alone", nodePropsOneCheck.equals(nodePropsOne)); assertTrue("Previous cache entries must be left alone", nodeAspectsOneCheck.equals(nodeAspectsOne)); // Get the current node cache key Node nodeTwo = (Node) findCacheValue(nodesCache, nodeId); assertNotNull("Node not found in cache", nodeTwo); NodeVersionKey nodeKeyTwo = nodeTwo.getNodeVersionKey(); // Get the node cached values Map<QName, Serializable> nodePropsTwo = (Map<QName, Serializable>) findCacheValue(propsCache, nodeKeyTwo); Set<QName> nodeAspectsTwo = (Set<QName>) findCacheValue(aspectsCache, nodeKeyTwo); // Check the values assertEquals("The node version is incorrect", Long.valueOf(2L), nodeKeyTwo.getVersion()); assertNotNull("No cache entry for properties", nodePropsTwo); assertNotNull("No cache entry for aspects", nodeAspectsTwo); assertFalse("Properties must have moved on", nodePropsTwo.equals(nodePropsOne)); assertEquals("Property count incorrect", 2, nodePropsTwo.size()); assertNotNull("Expected a cm:name property", nodePropsTwo.get(ContentModel.PROP_NAME)); assertNotNull("Expected a residual property", nodePropsTwo.get(PROP_RESIDUAL)); assertTrue("Aspects must be carried", nodeAspectsTwo.equals(nodeAspectsOne)); // Remove a property nodeService.removeProperty(nodeRef, PROP_RESIDUAL); // Get the values for the previous version Map<QName, Serializable> nodePropsTwoCheck = (Map<QName, Serializable>) findCacheValue(propsCache, nodeKeyTwo); Set<QName> nodeAspectsTwoCheck = (Set<QName>) findCacheValue(aspectsCache, nodeKeyTwo); assertTrue("Previous cache entries must be left alone", nodePropsTwoCheck.equals(nodePropsTwo)); assertTrue("Previous cache entries must be left alone", nodeAspectsTwoCheck.equals(nodeAspectsTwo)); // Get the current node cache key Node nodeThree = (Node) findCacheValue(nodesCache, nodeId); assertNotNull("Node not found in cache", nodeThree); NodeVersionKey nodeKeyThree = nodeThree.getNodeVersionKey(); // Get the node cached values Map<QName, Serializable> nodePropsThree = (Map<QName, Serializable>) findCacheValue(propsCache, nodeKeyThree); Set<QName> nodeAspectsThree = (Set<QName>) findCacheValue(aspectsCache, nodeKeyThree); // Check the values assertEquals("The node version is incorrect", Long.valueOf(3L), nodeKeyThree.getVersion()); assertNotNull("No cache entry for properties", nodePropsThree); assertNotNull("No cache entry for aspects", nodeAspectsThree); assertFalse("Properties must have moved on", nodePropsThree.equals(nodePropsTwo)); assertEquals("Property count incorrect", 1, nodePropsThree.size()); assertNotNull("Expected a cm:name property", nodePropsThree.get(ContentModel.PROP_NAME)); assertNull("Expected no residual property", nodePropsThree.get(PROP_RESIDUAL)); assertTrue("Aspects must be carried", nodeAspectsThree.equals(nodeAspectsTwo)); // Add an aspect nodeService.addAspect(nodeRef, ContentModel.ASPECT_TITLED, null); // Get the values for the previous version Map<QName, Serializable> nodePropsThreeCheck = (Map<QName, Serializable>) findCacheValue(propsCache, nodeKeyThree); Set<QName> nodeAspectsThreeCheck = (Set<QName>) findCacheValue(aspectsCache, nodeKeyThree); assertTrue("Previous cache entries must be left alone", nodePropsThreeCheck.equals(nodePropsThree)); assertTrue("Previous cache entries must be left alone", nodeAspectsThreeCheck.equals(nodeAspectsThree)); // Get the current node cache key Node nodeFour = (Node) findCacheValue(nodesCache, nodeId); assertNotNull("Node not found in cache", nodeFour); NodeVersionKey nodeKeyFour = nodeFour.getNodeVersionKey(); // Get the node cached values Map<QName, Serializable> nodePropsFour = (Map<QName, Serializable>) findCacheValue(propsCache, nodeKeyFour); Set<QName> nodeAspectsFour = (Set<QName>) findCacheValue(aspectsCache, nodeKeyFour); // Check the values assertEquals("The node version is incorrect", Long.valueOf(4L), nodeKeyFour.getVersion()); assertNotNull("No cache entry for properties", nodePropsFour); assertNotNull("No cache entry for aspects", nodeAspectsFour); assertTrue("Properties must be carried", nodePropsFour.equals(nodePropsThree)); assertFalse("Aspects must have moved on", nodeAspectsFour.equals(nodeAspectsThree)); assertTrue("Expected cm:titled aspect", nodeAspectsFour.contains(ContentModel.ASPECT_TITLED)); // Remove an aspect nodeService.removeAspect(nodeRef, ContentModel.ASPECT_TITLED); // Get the values for the previous version Map<QName, Serializable> nodePropsFourCheck = (Map<QName, Serializable>) findCacheValue(propsCache, nodeKeyFour); Set<QName> nodeAspectsFourCheck = (Set<QName>) findCacheValue(aspectsCache, nodeKeyFour); assertTrue("Previous cache entries must be left alone", nodePropsFourCheck.equals(nodePropsFour)); assertTrue("Previous cache entries must be left alone", nodeAspectsFourCheck.equals(nodeAspectsFour)); // Get the current node cache key Node nodeFive = (Node) findCacheValue(nodesCache, nodeId); assertNotNull("Node not found in cache", nodeFive); NodeVersionKey nodeKeyFive = nodeFive.getNodeVersionKey(); // Get the node cached values Map<QName, Serializable> nodePropsFive = (Map<QName, Serializable>) findCacheValue(propsCache, nodeKeyFive); Set<QName> nodeAspectsFive = (Set<QName>) findCacheValue(aspectsCache, nodeKeyFive); // Check the values assertEquals("The node version is incorrect", Long.valueOf(5L), nodeKeyFive.getVersion()); assertNotNull("No cache entry for properties", nodePropsFive); assertNotNull("No cache entry for aspects", nodeAspectsFive); assertTrue("Properties must be carried", nodePropsFive.equals(nodePropsFour)); assertFalse("Aspects must have moved on", nodeAspectsFive.equals(nodeAspectsFour)); assertFalse("Expected no cm:titled aspect ", nodeAspectsFive.contains(ContentModel.ASPECT_TITLED)); // Add an aspect, some properties and secondary association RetryingTransactionCallback<Void> nodeSixWork = new RetryingTransactionCallback<Void>() { @Override public Void execute() throws Throwable { Map<QName, Serializable> props = new HashMap<QName, Serializable>(); props.put(ContentModel.PROP_TITLE, "some title"); nodeService.addAspect(nodeRef, ContentModel.ASPECT_TITLED, props); nodeService.setProperty(nodeRef, ContentModel.PROP_DESCRIPTION, "Some description"); // Adding a child node now triggers behaviour to update a CRC property // nodeService.addChild( // Collections.singletonList(workspaceRootNodeRef), // nodeRef, // ContentModel.ASSOC_CHILDREN, // QName.createQName(TEST_PREFIX, "secondary")); return null; } }; txnService.getRetryingTransactionHelper().doInTransaction(nodeSixWork); // Get the values for the previous version Map<QName, Serializable> nodePropsFiveCheck = (Map<QName, Serializable>) findCacheValue(propsCache, nodeKeyFive); Set<QName> nodeAspectsFiveCheck = (Set<QName>) findCacheValue(aspectsCache, nodeKeyFive); assertTrue("Previous cache entries must be left alone", nodePropsFiveCheck.equals(nodePropsFive)); assertTrue("Previous cache entries must be left alone", nodeAspectsFiveCheck.equals(nodeAspectsFive)); // Get the current node cache key Node nodeSix = (Node) findCacheValue(nodesCache, nodeId); assertNotNull("Node not found in cache", nodeSix); NodeVersionKey nodeKeySix = nodeSix.getNodeVersionKey(); // Get the node cached values Map<QName, Serializable> nodePropsSix = (Map<QName, Serializable>) findCacheValue(propsCache, nodeKeySix); Set<QName> nodeAspectsSix = (Set<QName>) findCacheValue(aspectsCache, nodeKeySix); // Check the values assertEquals("The node version is incorrect", Long.valueOf(6L), nodeKeySix.getVersion()); assertNotNull("No cache entry for properties", nodePropsSix); assertNotNull("No cache entry for aspects", nodeAspectsSix); assertFalse("Properties must have moved on", nodePropsSix.equals(nodePropsFive)); assertEquals("Property count incorrect", 3, nodePropsSix.size()); assertNotNull("Expected a cm:name property", nodePropsSix.get(ContentModel.PROP_NAME)); assertNotNull("Expected a cm:title property", nodePropsSix.get(ContentModel.PROP_TITLE)); assertNotNull("Expected a cm:description property", nodePropsSix.get(ContentModel.PROP_DESCRIPTION)); assertFalse("Aspects must have moved on", nodeAspectsSix.equals(nodeAspectsFive)); assertTrue("Expected cm:titled aspect ", nodeAspectsSix.contains(ContentModel.ASPECT_TITLED)); // Remove an aspect, some properties and a secondary association RetryingTransactionCallback<Void> nodeSevenWork = new RetryingTransactionCallback<Void>() { @Override public Void execute() throws Throwable { nodeService.removeAspect(nodeRef, ContentModel.ASPECT_TITLED); nodeService.removeChild(workspaceRootNodeRef, nodeRef); return null; } }; txnService.getRetryingTransactionHelper().doInTransaction(nodeSevenWork); // Get the values for the previous version Map<QName, Serializable> nodePropsSixCheck = (Map<QName, Serializable>) findCacheValue(propsCache, nodeKeySix); Set<QName> nodeAspectsSixCheck = (Set<QName>) findCacheValue(aspectsCache, nodeKeySix); assertTrue("Previous cache entries must be left alone", nodePropsSixCheck.equals(nodePropsSix)); assertTrue("Previous cache entries must be left alone", nodeAspectsSixCheck.equals(nodeAspectsSix)); // Get the current node cache key Node nodeSeven = (Node) findCacheValue(nodesCache, nodeId); assertNotNull("Node not found in cache", nodeSeven); NodeVersionKey nodeKeySeven = nodeSeven.getNodeVersionKey(); // Get the node cached values Map<QName, Serializable> nodePropsSeven = (Map<QName, Serializable>) findCacheValue(propsCache, nodeKeySeven); Set<QName> nodeAspectsSeven = (Set<QName>) findCacheValue(aspectsCache, nodeKeySeven); // Check the values assertEquals("The node version is incorrect", Long.valueOf(7L), nodeKeySeven.getVersion()); assertNotNull("No cache entry for properties", nodePropsSeven); assertNotNull("No cache entry for aspects", nodeAspectsSeven); assertFalse("Properties must have moved on", nodePropsSeven.equals(nodePropsSix)); assertEquals("Property count incorrect", 1, nodePropsSeven.size()); assertNotNull("Expected a cm:name property", nodePropsSeven.get(ContentModel.PROP_NAME)); assertFalse("Aspects must have moved on", nodeAspectsSeven.equals(nodeAspectsSix)); assertFalse("Expected no cm:titled aspect ", nodeAspectsSeven.contains(ContentModel.ASPECT_TITLED)); // Modify cm:auditable RetryingTransactionCallback<Void> nodeEightWork = new RetryingTransactionCallback<Void>() { @Override public Void execute() throws Throwable { BehaviourFilter behaviourFilter = (BehaviourFilter) APP_CONTEXT_INIT.getApplicationContext() .getBean("policyBehaviourFilter"); // Disable behaviour for txn behaviourFilter.disableBehaviour(nodeRef, ContentModel.ASPECT_AUDITABLE); nodeService.setProperty(nodeRef, ContentModel.PROP_MODIFIER, "Fred"); return null; } }; txnService.getRetryingTransactionHelper().doInTransaction(nodeEightWork); // Get the values for the previous version Map<QName, Serializable> nodePropsSevenCheck = (Map<QName, Serializable>) findCacheValue(propsCache, nodeKeySeven); Set<QName> nodeAspectsSevenCheck = (Set<QName>) findCacheValue(aspectsCache, nodeKeySeven); assertTrue("Previous cache entries must be left alone", nodePropsSevenCheck.equals(nodePropsSeven)); assertTrue("Previous cache entries must be left alone", nodeAspectsSevenCheck.equals(nodeAspectsSeven)); // Get the current node cache key Node nodeEight = (Node) findCacheValue(nodesCache, nodeId); assertNotNull("Node not found in cache", nodeEight); NodeVersionKey nodeKeyEight = nodeEight.getNodeVersionKey(); // Get the node cached values Map<QName, Serializable> nodePropsEight = (Map<QName, Serializable>) findCacheValue(propsCache, nodeKeyEight); Set<QName> nodeAspectsEight = (Set<QName>) findCacheValue(aspectsCache, nodeKeyEight); // Check the values assertEquals("The node version is incorrect", Long.valueOf(8L), nodeKeyEight.getVersion()); assertNotNull("No cache entry for properties", nodePropsEight); assertNotNull("No cache entry for aspects", nodeAspectsEight); assertEquals("Expected change to cm:modifier", "Fred", nodeEight.getAuditableProperties().getAuditModifier()); assertTrue("Properties must be carried", nodePropsEight.equals(nodePropsSeven)); assertTrue("Aspects be carried", nodeAspectsEight.equals(nodeAspectsSeven)); }
From source file:io.warp10.continuum.egress.EgressFetchHandler.java
/** * Output a text version of fetched data. Deduplication is done on the fly so we don't decode twice. * /* w ww .j a v a2s.c o m*/ */ private static void textDump(PrintWriter pw, GTSDecoderIterator iter, long now, long timespan, boolean raw, boolean dedup, boolean signed, boolean showAttributes, AtomicReference<Metadata> lastMeta, AtomicLong lastCount, boolean sortMeta) throws IOException { String name = null; Map<String, String> labels = null; StringBuilder sb = new StringBuilder(); Metadata lastMetadata = lastMeta.get(); long currentCount = lastCount.get(); while (iter.hasNext()) { GTSDecoder decoder = iter.next(); if (!decoder.next()) { continue; } long toDecodeCount = Long.MAX_VALUE; if (timespan < 0) { Metadata meta = decoder.getMetadata(); if (!meta.equals(lastMetadata)) { lastMetadata = meta; currentCount = 0; } toDecodeCount = Math.max(0, -timespan - currentCount); } // // Only display the class + labels if they have changed since the previous GTS // Map<String, String> lbls = decoder.getLabels(); // // Compute the new name // boolean displayName = false; if (null == name || (!name.equals(decoder.getName()) || !labels.equals(lbls))) { displayName = true; name = decoder.getName(); labels = lbls; sb.setLength(0); GTSHelper.encodeName(sb, name); sb.append("{"); boolean first = true; if (sortMeta) { lbls = new TreeMap<String, String>(lbls); } for (Entry<String, String> entry : lbls.entrySet()) { // // Skip owner/producer labels and any other 'private' labels // if (!signed) { if (Constants.PRODUCER_LABEL.equals(entry.getKey())) { continue; } if (Constants.OWNER_LABEL.equals(entry.getKey())) { continue; } } if (!first) { sb.append(","); } GTSHelper.encodeName(sb, entry.getKey()); sb.append("="); GTSHelper.encodeName(sb, entry.getValue()); first = false; } sb.append("}"); if (showAttributes) { Metadata meta = decoder.getMetadata(); if (meta.getAttributesSize() > 0) { if (sortMeta) { meta.setAttributes(new TreeMap<String, String>(meta.getAttributes())); } GTSHelper.labelsToString(sb, meta.getAttributes()); } else { sb.append("{}"); } } } long timestamp = 0L; long location = GeoTimeSerie.NO_LOCATION; long elevation = GeoTimeSerie.NO_ELEVATION; Object value = null; boolean dup = true; long decoded = 0; do { if (toDecodeCount == decoded) { break; } // FIXME(hbs): only display the results which match the authorized (according to token) timerange and geo zones // // Filter out any value not in the time range // long newTimestamp = decoder.getTimestamp(); if (newTimestamp > now || (timespan >= 0 && newTimestamp <= (now - timespan))) { continue; } // // TODO(hbs): filter out values with no location or outside the selected geozone when a geozone was set // long newLocation = decoder.getLocation(); long newElevation = decoder.getElevation(); Object newValue = decoder.getValue(); dup = true; if (dedup) { if (location != newLocation || elevation != newElevation) { dup = false; } else { if (null == newValue) { // Consider nulls as duplicates (can't happen!) dup = false; } else if (newValue instanceof Number) { if (!((Number) newValue).equals(value)) { dup = false; } } else if (newValue instanceof String) { if (!((String) newValue).equals(value)) { dup = false; } } else if (newValue instanceof Boolean) { if (!((Boolean) newValue).equals(value)) { dup = false; } } } } decoded++; location = newLocation; elevation = newElevation; timestamp = newTimestamp; value = newValue; if (raw) { if (!dedup || !dup) { pw.println(GTSHelper.tickToString(sb, timestamp, location, elevation, value)); } } else { // Display the name only if we have at least one value to display // We force 'dup' to be false when we must show the name if (displayName) { pw.println(GTSHelper.tickToString(sb, decoder.getTimestamp(), decoder.getLocation(), decoder.getElevation(), decoder.getValue())); displayName = false; dup = false; } else { if (!dedup || !dup) { pw.print("="); pw.println(GTSHelper.tickToString(timestamp, location, elevation, value)); } } } } while (decoder.next()); // Update currentcount if (timespan < 0) { currentCount += decoded; } // Print any remaining value if (dedup && dup) { if (raw) { pw.println(GTSHelper.tickToString(sb, timestamp, location, elevation, value)); } else { pw.print("="); pw.println(GTSHelper.tickToString(timestamp, location, elevation, value)); } } // // If displayName is still true it means we should have displayed the name but no value matched, // so set name to null so we correctly display the name for the next decoder if it has values // if (displayName) { name = null; } } lastMeta.set(lastMetadata); lastCount.set(currentCount); }