List of usage examples for java.util IdentityHashMap IdentityHashMap
public IdentityHashMap()
From source file:org.orekit.models.earth.tessellation.EllipsoidTessellator.java
/** Tessellate a zone of interest into tiles. * <p>// w w w . j a v a2 s.co m * The created tiles will completely cover the zone of interest. * </p> * <p> * The distance between a vertex at a tile corner and the vertex at the same corner * in the next vertex are computed by subtracting the overlap width (resp. overlap length) * from the full width (resp. full length). If for example the full width is specified to * be 55 km and the overlap in width is specified to be +5 km, successive tiles would span * as follows: * </p> * <ul> * <li>tile 1 covering from 0 km to 55 km</li> * <li>tile 2 covering from 50 km to 105 km</li> * <li>tile 3 covering from 100 km to 155 km</li> * <li>...</li> * </ul> * <p> * In order to achieve the same 50 km step but using a 5 km gap instead of an overlap, one would * need to specify the full width to be 45 km and the overlap to be -5 km. With these settings, * successive tiles would span as follows: * </p> * <ul> * <li>tile 1 covering from 0 km to 45 km</li> * <li>tile 2 covering from 50 km to 95 km</li> * <li>tile 3 covering from 100 km to 155 km</li> * <li>...</li> * </ul> * @param zone zone of interest to tessellate * @param fullWidth full tiles width as a distance on surface, including overlap (in meters) * @param fullLength full tiles length as a distance on surface, including overlap (in meters) * @param widthOverlap overlap between adjacent tiles (in meters), if negative the tiles * will have a gap between each other instead of an overlap * @param lengthOverlap overlap between adjacent tiles (in meters), if negative the tiles * will have a gap between each other instead of an overlap * @param truncateLastWidth if true, the first tiles strip will be started as close as * possible to the zone of interest, and the last tiles strip will have its width reduced * to also remain close to the zone of interest; if false all tiles strip will have the * same {@code fullWidth} and they will be balanced around zone of interest * @param truncateLastLength if true, the first tile in each strip will be started as close as * possible to the zone of interest, and the last tile in each strip will have its length reduced * to also remain close to the zone of interest; if false all tiles in each strip will have the * same {@code fullLength} and they will be balanced around zone of interest * @return a list of lists of tiles covering the zone of interest, * each sub-list corresponding to a part not connected to the other * parts (for example for islands) * @exception OrekitException if the zone cannot be tessellated */ public List<List<Tile>> tessellate(final SphericalPolygonsSet zone, final double fullWidth, final double fullLength, final double widthOverlap, final double lengthOverlap, final boolean truncateLastWidth, final boolean truncateLastLength) throws OrekitException { final double splitWidth = (fullWidth - widthOverlap) / quantization; final double splitLength = (fullLength - lengthOverlap) / quantization; final Map<Mesh, List<Tile>> map = new IdentityHashMap<Mesh, List<Tile>>(); final RegionFactory<Sphere2D> factory = new RegionFactory<Sphere2D>(); SphericalPolygonsSet remaining = (SphericalPolygonsSet) zone.copySelf(); S2Point inside = getInsidePoint(remaining); while (inside != null) { // find a mesh covering at least one connected part of the zone final List<Mesh.Node> mergingSeeds = new ArrayList<Mesh.Node>(); Mesh mesh = new Mesh(ellipsoid, zone, aiming, splitLength, splitWidth, inside); mergingSeeds.add(mesh.getNode(0, 0)); List<Tile> tiles = null; while (!mergingSeeds.isEmpty()) { // expand the mesh around the seed neighborExpandMesh(mesh, mergingSeeds, zone); // extract the tiles from the mesh // this further expands the mesh so tiles dimensions are multiples of quantization, // hence it must be performed here before checking meshes independence tiles = extractTiles(mesh, zone, lengthOverlap, widthOverlap, truncateLastWidth, truncateLastLength); // check the mesh is independent from existing meshes mergingSeeds.clear(); for (final Map.Entry<Mesh, List<Tile>> entry : map.entrySet()) { if (!factory.intersection(mesh.getCoverage(), entry.getKey().getCoverage()).isEmpty()) { // the meshes are not independent, they intersect each other! // merge the two meshes together mesh = mergeMeshes(mesh, entry.getKey(), mergingSeeds); map.remove(entry.getKey()); break; } } } // remove the part of the zone covered by the mesh remaining = (SphericalPolygonsSet) factory.difference(remaining, mesh.getCoverage()); inside = getInsidePoint(remaining); map.put(mesh, tiles); } // concatenate the lists from the independent meshes final List<List<Tile>> tilesLists = new ArrayList<List<Tile>>(map.size()); for (final Map.Entry<Mesh, List<Tile>> entry : map.entrySet()) { tilesLists.add(entry.getValue()); } return tilesLists; }
From source file:com.google.gwt.emultest.java.util.IdentityHashMapTest.java
public void testEntrySet() { IdentityHashMap hashMap = new IdentityHashMap(); checkEmptyHashMapAssumptions(hashMap); Set entrySet = hashMap.entrySet(); assertNotNull(entrySet);/*from ww w. j a v a2 s . c o m*/ // Check that the entry set looks right hashMap.put(KEY_TEST_ENTRY_SET, VALUE_TEST_ENTRY_SET_1); entrySet = hashMap.entrySet(); assertEquals(entrySet.size(), SIZE_ONE); Iterator itSet = entrySet.iterator(); Map.Entry entry = (Map.Entry) itSet.next(); assertEquals(entry.getKey(), KEY_TEST_ENTRY_SET); assertEquals(entry.getValue(), VALUE_TEST_ENTRY_SET_1); // Check that entries in the entrySet are update correctly on overwrites hashMap.put(KEY_TEST_ENTRY_SET, VALUE_TEST_ENTRY_SET_2); entrySet = hashMap.entrySet(); assertEquals(entrySet.size(), SIZE_ONE); itSet = entrySet.iterator(); entry = (Map.Entry) itSet.next(); assertEquals(entry.getKey(), KEY_TEST_ENTRY_SET); assertEquals(entry.getValue(), VALUE_TEST_ENTRY_SET_2); // Check that entries are updated on removes hashMap.remove(KEY_TEST_ENTRY_SET); checkEmptyHashMapAssumptions(hashMap); }
From source file:com.qrmedia.commons.persistence.hibernate.clone.HibernateEntityGraphClonerTest.java
@SuppressWarnings("unchecked") @Test// w w w . j av a 2 s.co m public void clone_entities() throws IllegalAccessException { final StubHibernateEntity entity1 = new StubHibernateEntity(); String property = "007"; final StubHibernateEntity relatedEntity = new SimplePropertyEqualStubHibernateEntity(property); entity1.setNonSimpleBeanProperty(relatedEntity); Set<StubHibernateEntity> nonSimpleCollectionBeanProperty = new HashSet<StubHibernateEntity>(); // reuse relatedEntity to check if its clone is used in both places nonSimpleCollectionBeanProperty.add(relatedEntity); entity1.setNonSimpleCollectionBeanProperty(nonSimpleCollectionBeanProperty); // the first call to the bean cloner creates a clone, adds a new entity and some commands final GraphWiringCommand graphWiringCommand1 = createMock(GraphWiringCommand.class); final GraphPostProcessingCommand graphPostProcessingCommand = createMock(GraphPostProcessingCommand.class); final StubHibernateEntity clone1 = new StubHibernateEntity(); entityBeanCloner.visitNode(eq(new EntityPreserveIdFlagPair(entity1, false)), same(entityGraphCloner), (IdentityHashMap<Object, Object>) anyObject()); expectLastCall() .andAnswer(new HibernateEntityBeanClonerActions(entity1, clone1, Arrays.asList(relatedEntity), Arrays.asList(graphWiringCommand1), Arrays.asList(graphPostProcessingCommand))); // note that entity2 is equal to (but not identical to) relatedEntity! final GraphWiringCommand graphWiringCommand2 = createMock(GraphWiringCommand.class); final StubHibernateEntity entity2 = new SimplePropertyEqualStubHibernateEntity(property); entity2.setNonSimpleBeanProperty(entity1); final StubHibernateEntity clone2 = new SimplePropertyEqualStubHibernateEntity(property); entityBeanCloner.visitNode(eq(new EntityPreserveIdFlagPair(entity2, false)), same(entityGraphCloner), (IdentityHashMap<Object, Object>) anyObject()); expectLastCall().andAnswer(new HibernateEntityBeanClonerActions(entity2, clone2, null, Arrays.asList(graphWiringCommand2), null)); final StubHibernateEntity relatedEntityClone = new SimplePropertyEqualStubHibernateEntity(property); entityBeanCloner.visitNode(eq(new EntityPreserveIdFlagPair(relatedEntity, false)), same(entityGraphCloner), (IdentityHashMap<Object, Object>) anyObject()); expectLastCall().andAnswer(new HibernateEntityBeanClonerActions(relatedEntity, relatedEntityClone)); // use flags mutable for the mocks to track the order of calls final ThreadLocal<Integer> numGraphWiringCommandExecuted = new ThreadLocal<Integer>(); numGraphWiringCommandExecuted.set(0); // the entity graph cloner should call the commands in the order they were added graphWiringCommand1.forEntities(); expectLastCall().andReturn(Arrays.asList(entity1)); graphWiringCommand1.execute(MapUtils.toMap(new IdentityHashMap<Object, Object>(), entity1, clone1)); expectLastCall().andAnswer(new NumGraphWiringCommandsExecutedVerifier(numGraphWiringCommandExecuted, 0)); graphWiringCommand2.forEntities(); expectLastCall().andReturn(Arrays.asList(relatedEntity)); graphWiringCommand2 .execute(MapUtils.toMap(new IdentityHashMap<Object, Object>(), relatedEntity, relatedEntityClone)); expectLastCall().andAnswer(new NumGraphWiringCommandsExecutedVerifier(numGraphWiringCommandExecuted, 1)); // this *must* be called after all the wiring commands have been completed graphPostProcessingCommand.execute(); expectLastCall().andAnswer(new IAnswer<Object>() { public Object answer() throws Throwable { if (!(numGraphWiringCommandExecuted.get() == 2)) { fail("Graph post-processing command executed before wiring was complete."); } return null; } }); replay(entityBeanCloner, graphWiringCommand1, graphWiringCommand2, graphPostProcessingCommand); Map<StubHibernateEntity, StubHibernateEntity> clones = entityGraphCloner .clone(Arrays.asList(entity1, entity2)); assertEquals(MapUtils.<StubHibernateEntity, StubHibernateEntity>toMap(entity1, clone1, entity2, clone2), clones); verify(entityBeanCloner, graphWiringCommand1, graphWiringCommand2, graphPostProcessingCommand); // check that any internal state maintained during the cloning has been cleaned up assertTrue(ReflectionUtils.<List<?>>getValue(entityGraphCloner, "graphWiringCommands").isEmpty()); assertTrue(ReflectionUtils.<List<?>>getValue(entityGraphCloner, "graphPostProcessingCommands").isEmpty()); /* * The actual wiring of the objects is *not* checked because that is the function * of the command objects, *not* the entity graph cloner. * As such, this is not within the scope of a unit test. */ }
From source file:org.logicblaze.lingo.cache.impl.ThreadCache.java
protected Map createMap() { // lets maintain order so changes are made in the correct order return new IdentityHashMap(); }
From source file:cdr.forms.FormController.java
@RequestMapping(value = "/{formId}.form", method = RequestMethod.POST) public String processForm(Model model, @PathVariable(value = "formId") String formId, @Valid @ModelAttribute("deposit") Deposit deposit, BindingResult errors, Principal user, SessionStatus sessionStatus,// w w w . j a va2 s. co m @RequestParam(value = "deposit", required = false) String submitDepositAction, HttpServletRequest request, HttpServletResponse response) throws PermissionDeniedException { request.setAttribute("hasSupplementalObjectsStep", formId.equals(SUPPLEMENTAL_OBJECTS_FORM_ID)); // Check that the form submitted by the user matches the one in the session if (!deposit.getFormId().equals(formId)) throw new Error("Form ID in session doesn't match form ID in path"); // this.getAuthorizationHandler().checkPermission(formId, deposit.getForm(), request); // try { request.setCharacterEncoding("UTF-8"); } catch (UnsupportedEncodingException e) { LOG.error("Failed to set character encoding", e); } // if (user != null) deposit.getForm().setCurrentUser(user.getName()); // Remove entries set to null, append an entry for elements with append set for (DepositElement element : deposit.getElements()) { Iterator<DepositEntry> iterator = element.getEntries().iterator(); while (iterator.hasNext()) { if (iterator.next() == null) iterator.remove(); } if (element.getAppend() != null) { element.appendEntry(); element.setAppend(null); } } // Check the deposit's files for virus signatures IdentityHashMap<DepositFile, String> signatures = new IdentityHashMap<DepositFile, String>(); for (DepositFile depositFile : deposit.getAllFiles()) scanDepositFile(depositFile, signatures); // If the "submit deposit" button was pressed, run the validator. if (submitDepositAction != null) { Validator validator = new DepositValidator(); validator.validate(deposit, errors); } // If the deposit has validation errors and no virus signatures were detected, display errors if (errors.hasErrors() && signatures.size() == 0) { LOG.debug(errors.getErrorCount() + " errors"); return "form"; } // If the "submit deposit" button was not pressed, render the form again if (submitDepositAction == null) { return "form"; } // Otherwise, display one of the result pages: if we detected a virus signature, display // the virus warning; otherwise, try to submit the deposit and display results. In each // case, we want to do the same cleanup. String view; if (signatures.size() > 0) { model.addAttribute("signatures", signatures); response.setStatus(HttpServletResponse.SC_BAD_REQUEST); view = "virus"; } else { // Redirect for supplemental objects special case if (formId.equals(SUPPLEMENTAL_OBJECTS_FORM_ID)) { return "redirect:/supplemental"; } // We're doing a regular deposit, so call the deposit handler DepositResult result = this.getDepositHandler().deposit(deposit); if (result.getStatus() == Status.FAILED) { LOG.error("deposit failed"); if (getNotificationHandler() != null) getNotificationHandler().notifyError(deposit, result); response.setStatus(HttpServletResponse.SC_INTERNAL_SERVER_ERROR); view = "failed"; } else { if (getNotificationHandler() != null) getNotificationHandler().notifyDeposit(deposit, result); view = "success"; } } // Clean up deposit.deleteAllFiles(); sessionStatus.setComplete(); request.setAttribute("formId", formId); request.setAttribute("administratorEmail", getAdministratorEmail()); return view; }
From source file:com.amazon.carbonado.repo.jdbc.JDBCRepository.java
/** * @param name name to give repository instance * @param isMaster when true, storables in this repository must manage * version properties and sequence properties * @param dataSource provides JDBC database connections * @param catalog optional catalog to search for tables -- actual meaning * is database independent//w ww.ja va 2s . c o m * @param schema optional schema to search for tables -- actual meaning is * is database independent * @param forceStoredSequence tells the repository to use a stored sequence * even if the database supports native sequences */ @SuppressWarnings("unchecked") JDBCRepository(AtomicReference<Repository> rootRef, String name, boolean isMaster, Iterable<TriggerFactory> triggerFactories, DataSource dataSource, boolean dataSourceClose, String catalog, String schema, Integer fetchSize, Map<String, Boolean> autoVersioningMap, Map<String, Boolean> suppressReloadMap, String sequenceSelectStatement, boolean forceStoredSequence, boolean primaryKeyCheckDisabled, SchemaResolver resolver) throws RepositoryException { super(name); if (dataSource == null) { throw new IllegalArgumentException("DataSource cannot be null"); } mIsMaster = isMaster; mTriggerFactories = triggerFactories; mRootRef = rootRef; mDataSource = dataSource; mDataSourceClose = dataSourceClose; mCatalog = catalog; mSchema = schema; mFetchSize = fetchSize; mPrimaryKeyCheckDisabled = primaryKeyCheckDisabled; mAutoVersioningMap = autoVersioningMap; mSuppressReloadMap = suppressReloadMap; mResolver = resolver; mOpenConnections = new IdentityHashMap<Connection, Object>(); mOpenConnectionsLock = new ReentrantLock(true); // Temporarily set to generic one, in case there's a problem during initialization. mExceptionTransformer = new JDBCExceptionTransformer(); mTxnMgr = new JDBCTransactionManager(this); getLog().info("Opening repository \"" + getName() + '"'); // Test connectivity and get some info on transaction isolation levels. Connection con = getConnection(); try { DatabaseMetaData md = con.getMetaData(); if (md == null || !md.supportsTransactions()) { throw new RepositoryException("Database does not support transactions"); } mDatabaseProductName = md.getDatabaseProductName(); boolean supportsSavepoints; try { supportsSavepoints = md.supportsSavepoints(); } catch (AbstractMethodError e) { supportsSavepoints = false; } if (supportsSavepoints) { con.setAutoCommit(false); // Some JDBC drivers (HSQLDB) lie about their savepoint support. try { con.setSavepoint(); } catch (SQLException e) { mLog.warn("JDBC driver for " + mDatabaseProductName + " reports supporting savepoints, but it " + "doesn't appear to work: " + e); supportsSavepoints = false; } finally { con.rollback(); con.setAutoCommit(true); } } mSupportsSavepoints = supportsSavepoints; mSupportsSelectForUpdate = md.supportsSelectForUpdate(); mSupportsScrollInsensitiveReadOnly = md.supportsResultSetConcurrency(ResultSet.TYPE_SCROLL_INSENSITIVE, ResultSet.CONCUR_READ_ONLY); mJdbcDefaultIsolationLevel = md.getDefaultTransactionIsolation(); mDefaultIsolationLevel = mapIsolationLevelFromJdbc(mJdbcDefaultIsolationLevel); mReadUncommittedLevel = selectIsolationLevel(md, IsolationLevel.READ_UNCOMMITTED); mReadCommittedLevel = selectIsolationLevel(md, IsolationLevel.READ_COMMITTED); mRepeatableReadLevel = selectIsolationLevel(md, IsolationLevel.REPEATABLE_READ); mSerializableLevel = selectIsolationLevel(md, IsolationLevel.SERIALIZABLE); } catch (SQLException e) { throw toRepositoryException(e); } finally { try { closeConnection(con); } catch (SQLException e) { // Don't care. } } mSupportStrategy = JDBCSupportStrategy.createStrategy(this); if (forceStoredSequence) { mSupportStrategy.setSequenceSelectStatement(null); } else if (sequenceSelectStatement != null && sequenceSelectStatement.length() > 0) { mSupportStrategy.setSequenceSelectStatement(sequenceSelectStatement); } mSupportStrategy.setForceStoredSequence(forceStoredSequence); mExceptionTransformer = mSupportStrategy.createExceptionTransformer(); getLog().info("Opened repository \"" + getName() + '"'); setAutoShutdownEnabled(true); }
From source file:org.talend.commons.ui.runtime.swt.tableviewer.tableeditor.TableEditorManager.java
private void handleSwapedEvent(ListenableListEvent event) { if (tableViewerCreator.getTable().isDisposed()) { return;/* ww w. j av a2 s.c om*/ } Table table = tableViewerCreator.getTable(); TableItem[] items = table.getItems(); // ////////////////////////////////// // Warning: using identity comparison // ////////////////////////////////// Set dataHash = MapBackedSet.decorate(new IdentityHashMap()); dataHash.addAll(Arrays.asList(event.swapedObjects)); ; for (TableItem tableItem : items) { Object data = tableItem.getData(); if (dataHash.contains(data)) { Collection<TableEditor> tableEditorCollection = dataToMultipleDataEditor.getCollection(data); for (TableEditor tableEditor : tableEditorCollection) { tableEditor.setItem(tableItem); } } } }
From source file:org.unitils.mock.report.impl.ObservedInvocationsReport.java
/** * Gets all the field values in the given test object with their corresponding field names. * * @param testedObject The test object/* w ww. ja va 2 s . c o m*/ * @return The values and names in an identity map, empty if tested object is null */ protected Map<Object, String> getFieldValuesAndNames(Object testedObject) { Map<Object, String> result = new IdentityHashMap<Object, String>(); if (testedObject == null) { return result; } Set<Field> fields = getAllFields(testedObject.getClass()); for (Field field : fields) { Object value = getFieldValue(testedObject, field); if (value != null) { result.put(value, field.getName()); } } return result; }
From source file:org.apache.solr.request.SimpleFacets.java
License:asdf
protected DocSet computeDocSet(DocSet baseDocSet, List<String> excludeTagList) throws SyntaxError, IOException { Map<?, ?> tagMap = (Map<?, ?>) req.getContext().get("tags"); // rb can be null if facets are being calculated from a RequestHandler e.g. MoreLikeThisHandler if (tagMap == null || rb == null) { return baseDocSet; }/*w w w . j a v a 2 s . c om*/ IdentityHashMap<Query, Boolean> excludeSet = new IdentityHashMap<>(); for (String excludeTag : excludeTagList) { Object olst = tagMap.get(excludeTag); // tagMap has entries of List<String,List<QParser>>, but subject to change in the future if (!(olst instanceof Collection)) continue; for (Object o : (Collection<?>) olst) { if (!(o instanceof QParser)) continue; QParser qp = (QParser) o; excludeSet.put(qp.getQuery(), Boolean.TRUE); } } if (excludeSet.size() == 0) return baseDocSet; List<Query> qlist = new ArrayList<>(); // add the base query if (!excludeSet.containsKey(rb.getQuery())) { qlist.add(rb.getQuery()); } // add the filters if (rb.getFilters() != null) { for (Query q : rb.getFilters()) { if (!excludeSet.containsKey(q)) { qlist.add(q); } } } // get the new base docset for this facet DocSet base = searcher.getDocSet(qlist); if (rb.grouping() && rb.getGroupingSpec().isTruncateGroups()) { Grouping grouping = new Grouping(searcher, null, rb.getQueryCommand(), false, 0, false); grouping.setWithinGroupSort(rb.getGroupingSpec().getSortWithinGroup()); if (rb.getGroupingSpec().getFields().length > 0) { grouping.addFieldCommand(rb.getGroupingSpec().getFields()[0], req); } else if (rb.getGroupingSpec().getFunctions().length > 0) { grouping.addFunctionCommand(rb.getGroupingSpec().getFunctions()[0], req); } else { return base; } AllGroupHeadsCollector allGroupHeadsCollector = grouping.getCommands().get(0).createAllGroupCollector(); searcher.search(base.getTopFilter(), allGroupHeadsCollector); return new BitDocSet(allGroupHeadsCollector.retrieveGroupHeads(searcher.maxDoc())); } else { return base; } }
From source file:org.openspotlight.bundle.language.java.bundle.JavaBinaryProcessor.java
private static Map<TypeDefinition, JavaType> createTypes(final List<TypeDefinition> types, final JavaGraphNodeSupport helper) throws Exception { final IdentityHashMap<TypeDefinition, JavaType> map = new IdentityHashMap<TypeDefinition, JavaType>(); for (final TypeDefinition definition : types) { if (!definition.isPrivate()) { final Class<? extends JavaType> nodeType = getNodeType(definition.getType()); final JavaType newType = helper.addTypeOnCurrentContext(nodeType, definition.getPackageName(), definition.getTypeName(), definition.getAccess()); map.put(definition, newType); }//w ww . j a v a 2 s. c o m } return map; }