List of usage examples for java.lang Object hashCode
@HotSpotIntrinsicCandidate public native int hashCode();
From source file:org.hammurapi.HammurapiTask.java
public void execute() throws BuildException { long started = System.currentTimeMillis(); if (!suppressLogo) { log("Hammurapi 3.18.4 Copyright (C) 2004 Hammurapi Group"); }/*from w w w . j a va2 s . c o m*/ File archiveTmpDir = processArchive(); try { Logger logger = new AntLogger(this); final VisitorStack[] visitorStack = { null }; final VisitorStackSource visitorStackSource = new VisitorStackSource() { public VisitorStack getVisitorStack() { return visitorStack[0]; } }; final SessionImpl reviewSession = new SessionImpl(); InspectorSet inspectorSet = new InspectorSet(new InspectorContextFactory() { public InspectorContext newContext(InspectorDescriptor descriptor, Logger logger) { return new InspectorContextImpl(descriptor, logger, visitorStackSource, reviewSession, violationFilters); } }, logger); if (embeddedInspectors) { log("Loading embedded inspectors", Project.MSG_VERBOSE); loadEmbeddedInspectors(inspectorSet); } log("Loading inspectors", Project.MSG_VERBOSE); Iterator it = inspectors.iterator(); while (it.hasNext()) { Object o = it.next(); if (o instanceof InspectorSource) { ((InspectorSource) o).loadInspectors(inspectorSet); } else { InspectorEntry inspectorEntry = (InspectorEntry) o; inspectorSet.addDescriptor(inspectorEntry); inspectorSet.addInspectorSourceInfo( new InspectorSourceInfo("Inline inspector " + inspectorEntry.getName(), "Build file: " + inspectorEntry.getLocation().toString(), "")); } } log("Inspectors loaded: " + inspectorSet.size(), Project.MSG_VERBOSE); log("Loading waivers", Project.MSG_VERBOSE); Date now = new Date(); WaiverSet waiverSet = new WaiverSet(); it = waivers.iterator(); while (it.hasNext()) { ((WaiverSource) it.next()).loadWaivers(waiverSet, now); } log("Waivers loaded: " + waiverSet.size(), Project.MSG_VERBOSE); log("Loading listeners", Project.MSG_VERBOSE); List listeners = new LinkedList(); it = listenerEntries.iterator(); while (it.hasNext()) { listeners.add(((ListenerEntry) it.next()).getObject(null)); } //Outputs listeners.addAll(outputs); listeners.add(new ReviewToLogListener(project)); log("Loading source files", Project.MSG_VERBOSE); RepositoryConfig config = new RepositoryConfig(); if (classPath != null) { log("Loading class files to repository", Project.MSG_DEBUG); config.setClassLoader(new AntClassLoader(project, classPath, false)); reviewSession.setClassPath(classPath.list()); } config.setLogger(logger); config.setCalculateDependencies(calculateDependencies); config.setStoreSource(storeSource); it = srcFileSets.iterator(); while (it.hasNext()) { HammurapiFileSet fs = (HammurapiFileSet) it.next(); fs.setDefaultIncludes(); DirectoryScanner scanner = fs.getDirectoryScanner(project); config.addFile(scanner.getBasedir(), scanner.getIncludedFiles()); } /** * For command-line interface */ it = srcFiles.iterator(); while (it.hasNext()) { config.addFile((File) it.next()); } config.setName(title); if (revisionMapper != null) { config.setRevisionMapper((RevisionMapper) revisionMapper.getObject(null)); } ConnectionPerThreadDataSource dataSource = createDataSource(reviewSession); reviewSession.setDatasource(dataSource); final LinkedList repoWarnings = new LinkedList(); config.setWarningSink(new WarningSink() { public void consume(final String source, final String message) { repoWarnings.add(new Violation() { public String getMessage() { return message; } public InspectorDescriptor getDescriptor() { return null; } SourceMarker sm = new SimpleSourceMarker(0, 0, source, null); public SourceMarker getSource() { return sm; } public int compareTo(Object obj) { if (obj instanceof Violation) { Violation v = (Violation) obj; int c = SourceMarkerComparator._compare(getSource(), v.getSource()); return c == 0 ? getMessage().compareTo(v.getMessage()) : c; } return hashCode() - obj.hashCode(); } }); } }); config.setDataSource(dataSource); final SQLProcessor sqlProcessor = new SQLProcessor(dataSource, null); sqlProcessor.setTimeIntervalCategory(tic); DbRepositoryImpl repositoryImpl = new DbRepositoryImpl(config); Repository repository = wrap ? (Repository) repositoryImpl.getProxy() : repositoryImpl; //new SimpleResultsFactory(waiverSet).install(); ResultsFactoryConfig rfConfig = new ResultsFactoryConfig(); rfConfig.setInspectorSet(inspectorSet); rfConfig.setName(title); rfConfig.setReportNumber(repository.getScanNumber()); rfConfig.setRepository(repository); rfConfig.setSqlProcessor(sqlProcessor); rfConfig.setHostId(hostId); rfConfig.setBaseLine(baseLine); rfConfig.setDescription(reviewDescription); try { rfConfig.setHostName(InetAddress.getLocalHost().getHostName()); } catch (Exception e) { log("Cannot resolve host name: " + e); } CompositeStorage storage = new CompositeStorage(); storage.addStorage("jdbc", new JdbcStorage(sqlProcessor)); storage.addStorage("file", new FileStorage(new File(System.getProperties().getProperty("java.io.tmpdir")))); storage.addStorage("memory", new MemoryStorage()); rfConfig.setStorage(storage); rfConfig.setWaiverSet(waiverSet); ResultsFactory resultsFactory = new ResultsFactory(rfConfig); resultsFactory.install(); CompositeResults summary = ResultsFactory.getInstance().newCompositeResults(title); ResultsFactory.getInstance().setSummary(summary); ResultsFactory.pushThreadResults(summary); Collection inspectorsPerSe = new LinkedList(inspectorSet.getInspectors()); reviewSession.setInspectors(inspectorSet); Iterator inspectorsIt = inspectorsPerSe.iterator(); log("Inspectors mapping", Project.MSG_VERBOSE); while (inspectorsIt.hasNext()) { Inspector inspector = (Inspector) inspectorsIt.next(); log("\t" + inspector.getContext().getDescriptor().getName() + " -> " + inspector.getClass().getName(), Project.MSG_VERBOSE); } // Initializes listeners it = listeners.iterator(); while (it.hasNext()) { ((Listener) it.next()).onBegin(inspectorSet); } Iterator vfit = violationFilters.iterator(); while (vfit.hasNext()) { Object vf = vfit.next(); if (vf instanceof DataAccessObject) { ((DataAccessObject) vf).setSQLProcessor(sqlProcessor); } } ResultsCollector collector = new ResultsCollector(this, inspectorSet, waiverSet, summary, listeners); inspectorsPerSe.add(collector); // Storing repo warnings while (!repoWarnings.isEmpty()) { collector.getSummary().addWarning((Violation) repoWarnings.removeFirst()); } log("Reviewing", Project.MSG_VERBOSE); inspectorsPerSe.add(new ViolationFilterVisitor()); SimpleReviewEngine rengine = new SimpleReviewEngine(inspectorsPerSe, this); reviewSession.setVisitor(rengine.getVisitor()); visitorStack[0] = rengine.getVisitorStack(); rengine.review(repository); writeWaiverStubs(waiverSet.getRejectedRequests()); ResultsFactory.getInstance().commit(System.currentTimeMillis() - started); if (cleanup) { repositoryImpl.cleanupOldScans(); resultsFactory.cleanupOldReports(); } repositoryImpl.shutdown(); reviewSession.shutdown(); resultsFactory.shutdown(); dataSource.shutdown(); //log("SQL metrics:\n"+resultsFactory.getSQLMetrics(),Project.MSG_VERBOSE); if (hadExceptions) { throw new BuildException("There have been exceptions during execution. Check log output."); } } catch (JselException e) { throw new BuildException(e); } catch (HammurapiException e) { throw new BuildException(e); } catch (ConfigurationException e) { throw new BuildException(e); } catch (FileNotFoundException e) { throw new BuildException(e); } catch (ClassNotFoundException e) { throw new BuildException(e); } catch (IOException e) { throw new BuildException(e); } catch (SQLException e) { throw new BuildException(e); } catch (RenderingException e) { throw new BuildException(e); } finally { if (archiveTmpDir != null) { deleteFile(archiveTmpDir); } } }
From source file:ConcurrentWeakHashMap.java
/** * Returns the value to which the specified key is mapped, * or {@code null} if this map contains no mapping for the key. * * <p>More formally, if this map contains a mapping from a key * {@code k} to a value {@code v} such that {@code key.equals(k)}, * then this method returns {@code v}; otherwise it returns * {@code null}. (There can be at most one such mapping.) * * @throws NullPointerException if the specified key is null *///from ww w . j a va 2 s.c o m public V get(Object key) { int hash = hash(key.hashCode()); return segmentFor(hash).get(key, hash); }
From source file:ConcurrentWeakHashMap.java
/** * Tests if the specified object is a key in this table. * * @param key possible key// ww w. j a v a2 s . c o m * @return <tt>true</tt> if and only if the specified object * is a key in this table, as determined by the * <tt>equals</tt> method; <tt>false</tt> otherwise. * @throws NullPointerException if the specified key is null */ public boolean containsKey(Object key) { int hash = hash(key.hashCode()); return segmentFor(hash).containsKey(key, hash); }
From source file:ConcurrentWeakHashMap.java
/** * Removes the key (and its corresponding value) from this map. * This method does nothing if the key is not in the map. * * @param key the key that needs to be removed * @return the previous value associated with <tt>key</tt>, or * <tt>null</tt> if there was no mapping for <tt>key</tt> * @throws NullPointerException if the specified key is null *//* w w w. j av a 2 s. co m*/ public V remove(Object key) { int hash = hash(key.hashCode()); return segmentFor(hash).remove(key, hash, null, false); }
From source file:ConcurrentWeakHashMap.java
/** * {@inheritDoc}/*from w ww . j a va 2s. c o m*/ * * @throws NullPointerException if the specified key is null */ public boolean remove(Object key, Object value) { int hash = hash(key.hashCode()); if (value == null) return false; return segmentFor(hash).remove(key, hash, value, false) != null; }
From source file:org.jabsorb.JSONRPCBridge.java
/** * Registers an object to export all instance methods and static methods. <p/> * The JSONBridge will export all instance methods and static methods of the * particular object under the name passed in as a key. <p/> This will make * available all methods of the object as * <code><key>.<methodnames></code> to JSON-RPC clients. <p/> * Calling registerObject for a name that already exists will replace the * existing entry./*from w ww . j a v a 2s . c om*/ * * @param key The named prefix to export the object as * @param o The object instance to be called upon */ public void registerObject(Object key, Object o) { ObjectInstance oi = new ObjectInstance(o); synchronized (objectMap) { objectMap.put(key, oi); } if (log.isDebugEnabled()) { log.debug("registered object " + o.hashCode() + " of class " + o.getClass().getName() + " as " + key); } }
From source file:tools.xor.AbstractBO.java
@Override public int hashCode() { int h = super.hashCode(); if (getType().isDataType()) { h = 31 * h + instance.hashCode(); } else {/* www.j a v a 2s .c o m*/ ExtendedProperty identifierProperty = (ExtendedProperty) ((EntityType) type).getIdentifierProperty(); Object id = identifierProperty != null ? identifierProperty.getValue(this) : null; if (id != null) { h = 31 * h + id.hashCode(); } } return h; }
From source file:org.apache.ojb.otm.core.ConcreteEditingContext.java
/** * * Writes all changes in this context into the persistent store. * *//* w w w. j ava2 s . c o m*/ private void checkpointInternal(boolean isCommit) throws TransactionAbortedException { if (_order.size() == 0) { return; } removeCollectionProxyListeners(); ConnectionManagerIF connMan = _pb.serviceConnectionManager(); boolean saveBatchMode = connMan.isBatchMode(); Swizzling swizzlingStrategy = _tx.getKit().getSwizzlingStrategy(); LockManager lockManager = LockManager.getInstance(); Identity[] lockOrder = (Identity[]) _order.toArray(new Identity[_order.size()]); ObjectCache cache = _pb.serviceObjectCache(); boolean isInsertVerified = _tx.getKit().isInsertVerified(); ArrayList changedCollections = new ArrayList(); // sort objects in the order of oid.hashCode to avoid deadlocks Arrays.sort(lockOrder, new Comparator() { public int compare(Object o1, Object o2) { return o1.hashCode() - o2.hashCode(); } public boolean equals(Object obj) { return false; } }); try { // mark dirty objects and lock them for write // also handle dependent objects and if there were inserted once, // repeat this process for their dependants ("cascade create") ArrayList newObjects = new ArrayList(); int countNewObjects; do { newObjects.clear(); countNewObjects = 0; for (int i = 0; i < lockOrder.length; i++) { Identity oid = lockOrder[i]; ContextEntry entry = (ContextEntry) _objects.get(oid); State state = entry.state; if (entry.userObject == null) // invalidated { continue; } if (entry.handler == null) // materialized { if (!state.isDeleted()) { Object[][] origFields = (Object[][]) _checkpointed.get(oid); Object[][] newFields = getFields(entry.userObject, true, !isCommit); if (origFields == null) { entry.needsCacheSwizzle = true; newObjects.addAll(handleDependentReferences(oid, entry.userObject, null, newFields[0], newFields[2])); newObjects.addAll(handleDependentCollections(oid, entry.userObject, null, newFields[1], newFields[3])); } else { if (isModified(origFields[0], newFields[0])) { entry.state = state.markDirty(); entry.needsCacheSwizzle = true; lockManager.ensureLock(oid, _tx, LockType.WRITE_LOCK, _pb); newObjects.addAll(handleDependentReferences(oid, entry.userObject, origFields[0], newFields[0], newFields[2])); } if (isModified(origFields[1], newFields[1])) { // there are modified collections, // so we need to lock the object and to swizzle it to cache entry.needsCacheSwizzle = true; lockManager.ensureLock(oid, _tx, LockType.WRITE_LOCK, _pb); newObjects.addAll(handleDependentCollections(oid, entry.userObject, origFields[1], newFields[1], newFields[3])); changedCollections.add(oid); } } } } } countNewObjects = newObjects.size(); if (countNewObjects > 0) { // new objects are not locked, so we don't need to ensure the order lockOrder = (Identity[]) newObjects.toArray(new Identity[countNewObjects]); } } while (countNewObjects > 0); // Swizzle the context objects and the cache objects for (Iterator it = _order.iterator(); it.hasNext();) { Identity oid = (Identity) it.next(); ContextEntry entry = (ContextEntry) _objects.get(oid); if (entry.needsCacheSwizzle) { entry.userObject = swizzlingStrategy.getRealTarget(entry.userObject); entry.cacheObject = swizzlingStrategy.swizzle( // we create the special ObjectCache implememntation // that returns cacheObject, not userObject entry.userObject, entry.cacheObject, _pb, new ObjectCache() { public Object lookup(Identity anOid) { ContextEntry ent = (ContextEntry) _objects.get(anOid); return (ent == null ? null : ent.cacheObject); } public boolean contains(Identity oid) { return lookup(oid) != null; } public void cache(Identity anOid, Object obj) { // do nothing } public boolean cacheIfNew(Identity oid, Object obj) { return false; } public void clear() { // do nothing } public void remove(Identity anOid) { // do nothing } }); } } // Cascade delete for dependent objects int countCascadeDeleted; do { countCascadeDeleted = 0; // Use intermediate new ArrayList(_order) because _order // may be changed during cascade delete for (Iterator it = (new ArrayList(_order)).iterator(); it.hasNext();) { Identity oid = (Identity) it.next(); ContextEntry entry = (ContextEntry) _objects.get(oid); if (entry.state.isDeleted()) { countCascadeDeleted += doCascadeDelete(oid, entry.userObject); } } } while (countCascadeDeleted > 0); // perform database operations connMan.setBatchMode(true); try { for (Iterator it = _order.iterator(); it.hasNext();) { Identity oid = (Identity) it.next(); ContextEntry entry = (ContextEntry) _objects.get(oid); State state = entry.state; if (!state.needsInsert() && !state.needsUpdate() && !state.needsDelete()) { if (changedCollections.contains(oid)) { _pb.store(entry.cacheObject, state); } continue; } if (state.needsInsert()) { if (isInsertVerified) { // PB verifies object existence by default _pb.store(entry.cacheObject); } else { // PB migth already created the object by auto-update if (cache.lookup(oid) == null) { _pb.store(entry.cacheObject, state); } } } else if (state.needsUpdate()) { _pb.store(entry.cacheObject, state); } else if (state.needsDelete()) { _pb.delete(entry.cacheObject); } entry.state = state.commit(); } connMan.executeBatch(); } finally { connMan.setBatchMode(saveBatchMode); } } catch (Throwable ex) { ex.printStackTrace(); throw new TransactionAbortedException(ex); } }
From source file:org.jabsorb.JSONRPCBridge.java
/** * Registers an object to export all instance methods defined by * interfaceClass. <p/> The JSONBridge will export all instance methods * defined by interfaceClass of the particular object under the name passed in * as a key. <p/> This will make available these methods of the object as * <code><key>.<methodnames></code> to JSON-RPC clients. * * @param key The named prefix to export the object as * @param o The object instance to be called upon * @param interfaceClass The type that this object should be registered as. * <p/> This can be used to restrict the exported methods to the * methods defined in a specific superclass or interface. *///from w w w.ja va 2 s. c o m public void registerObject(Object key, Object o, Class interfaceClass) { ObjectInstance oi = new ObjectInstance(o, interfaceClass); synchronized (objectMap) { objectMap.put(key, oi); } if (log.isDebugEnabled()) { log.debug("registered object " + o.hashCode() + " of class " + interfaceClass.getName() + " as " + key); } }
From source file:com.qcadoo.model.internal.classconverter.ModelXmlToClassConverterTest.java
@Test public final void shouldReturnDifferentHashCodesIfRelatedEntitiesAreDifferent() throws Exception { // given//from w w w . j a va 2s . com Object firstEntityA = classes.get(ClassNameUtils.getFullyQualifiedClassName("full", "firstEntity")) .newInstance(); Object firstEntityB = classes.get(ClassNameUtils.getFullyQualifiedClassName("full", "firstEntity")) .newInstance(); Object thirdEntityA = classes.get(ClassNameUtils.getFullyQualifiedClassName("full", "thirdEntity")) .newInstance(); Object thirdEntityB = classes.get(ClassNameUtils.getFullyQualifiedClassName("full", "thirdEntity")) .newInstance(); BeanUtils.setProperty(firstEntityA, "fieldInteger", 13); BeanUtils.setProperty(firstEntityA, "fieldString", "Xxx"); BeanUtils.setProperty(firstEntityA, "fieldManyToMany", Sets.newHashSet(thirdEntityA)); BeanUtils.setProperty(thirdEntityA, "fieldString", "Aaa"); BeanUtils.setProperty(thirdEntityA, "fieldManyToMany", Sets.newHashSet(firstEntityA)); BeanUtils.setProperty(firstEntityB, "fieldInteger", 13); BeanUtils.setProperty(firstEntityB, "fieldString", "Xxx"); BeanUtils.setProperty(firstEntityB, "fieldManyToMany", Sets.newHashSet(thirdEntityB)); BeanUtils.setProperty(thirdEntityB, "fieldString", "Bbb"); BeanUtils.setProperty(thirdEntityB, "fieldManyToMany", Sets.newHashSet(firstEntityB)); // when int hashCodeFirstB = firstEntityB.hashCode(); assertFalse(firstEntityA.hashCode() == firstEntityB.hashCode()); BeanUtils.setProperty(thirdEntityB, "fieldString", "Aaa"); assertFalse(hashCodeFirstB == firstEntityB.hashCode()); BeanUtils.setProperty(firstEntityB, "fieldManyToMany", Sets.newHashSet(thirdEntityA)); assertTrue(firstEntityA.hashCode() == firstEntityB.hashCode()); }