List of usage examples for java.util TreeMap TreeMap
public TreeMap()
From source file:com.acme.legacy.app.repository.JsonUserRepository.java
@PostConstruct @SuppressWarnings("unchecked") public void init() throws Exception { Resource resource = new ClassPathResource("users.json"); ObjectMapper mapper = Jackson2ObjectMapperBuilder.json().build(); List<User> userList = mapper.readValue(resource.getInputStream(), new TypeReference<List<User>>() { });/* ww w . j a v a 2 s . c om*/ Map<String, User> userMap = new TreeMap<>(); for (User user : userList) userMap.put(user.getEmail(), user); this.users = Collections.unmodifiableMap(userMap); }
From source file:cat.albirar.framework.dynabean.impl.DefaultDynaBeanFactory.java
/** * Default constructor./*from w w w . j a v a 2 s. c o m*/ */ public DefaultDynaBeanFactory() { propRegistry = new SimpleTypeConverter(); descriptors = Collections.synchronizedMap(new TreeMap<String, DynaBeanDescriptor<?>>()); }
From source file:com.omertron.tvrageapi.model.EpisodeList.java
public EpisodeList() { showName = TVRageApi.UNKNOWN; totalSeasons = 0; episodeList = new TreeMap<EpisodeNumber, Episode>(); }
From source file:com.cybernostics.jsp2thymeleaf.api.expressions.function.FunctionConverterSource.java
private String format(String methodFormat, String method) { Map<String, Object> values = new TreeMap<>(); values.put("method", method); return SimpleStringTemplateProcessor.generate(methodFormat, values); }
From source file:com.acc.oauth2.controller.OAuth2AccessController.java
@RequestMapping(value = "/oauth/confirm_access", method = RequestMethod.GET) public ModelAndView getAccessConfirmation(@ModelAttribute final AuthorizationRequest clientAuth) throws Exception { final ClientDetails client = clientDetailsService.loadClientByClientId(clientAuth.getClientId()); final TreeMap<String, Object> model = new TreeMap<String, Object>(); model.put("auth_request", clientAuth); model.put("client", client); return new ModelAndView("access_confirmation", model); }
From source file:byps.test.TestSerializeReferences.java
/** * Serializes an object with a map, whereby a map item refers to the object. * @throws BException //from ww w . j a va2 s.c o m */ @Test public void testSerializeSelfRefInMap() throws BException { Node node1 = new Node(); node1.mapOfNodes = new TreeMap<String, Node>(); node1.mapOfNodes.put("node1", node1); Node nodeR = internalTestSerializeObject(node1); Assert.assertTrue("node1.mapOfNodes[node1]", nodeR.mapOfNodes.get("node1") == nodeR); }
From source file:cat.albirar.framework.sets.impl.ModelDescriptor.java
/** * Constructor for model.// ww w .j a v a2 s. co m * @param model The model, required * @throws IllegalArgumentException If model is null */ public ModelDescriptor(Class<?> model) { Assert.notNull(model, "The model is required"); relativePath = ""; originalPath = ""; this.model = model; properties = Collections.synchronizedMap(new TreeMap<String, PropertyDescriptor>()); resolveProperties(); }
From source file:laboGrid.graphs.replication.ReplicationGraphHeuristicGenerator.java
public ReplicationGraph computeReplicationGraph(DAId[] das, GraphMapping cGraph, int backupDegree) { // Creating Peers dynamic structure TreeMap<String, Set<Integer>> dynPeers = new TreeMap<String, Set<Integer>>(); Set<Integer>[] da2Sub = cGraph.getDa2Sub(); for (int i = 0; i < das.length; ++i) { DAId c = das[i];// ww w. j a v a2 s. co m if (da2Sub[i] != null && !da2Sub[i].isEmpty()) { // Da can be taken into account in backup graph // String resourcePeer = c.getPeerId(); String resourcePeer = "peerId"; Set<Integer> resources = dynPeers.get(resourcePeer); if (resources == null) { System.out.println("Detected Peer: " + resourcePeer + "."); resources = new TreeSet<Integer>(); dynPeers.put(resourcePeer, resources); } resources.add(i); } } if (dynPeers.size() == 1) { ReplicationGraphNaiveGenerator naiveGen = new ReplicationGraphNaiveGenerator(); return naiveGen.computeReplicationGraph(das, cGraph, backupDegree); } else { // Convert dynamic structure into a static one Set<Integer>[] peers = new TreeSet[dynPeers.size()]; Iterator<Entry<String, Set<Integer>>> it = dynPeers.entrySet().iterator(); for (int i = 0; i < peers.length; ++i) { Entry<String, Set<Integer>> e = it.next(); peers[i] = e.getValue(); } return new ReplicationGraph(replicationGraph(das.length, backupDegree, peers)); } }
From source file:com.google.sites.liberation.util.XmlElement.java
/** * Creates a new {@code XmlElement} of the given type. * //from w ww .j a va 2 s . c om * @param elementType tag name of this element */ public XmlElement(String elementType) { Preconditions.checkNotNull(elementType); this.elementType = elementType; children = new LinkedList<Pair<Object, ChildType>>(); attributes = new TreeMap<String, String>(); }
From source file:com.trackplus.ddl.DataReader.java
public static void writeDataToSql(DatabaseInfo databaseInfo, String dirName) throws DDLException { LOGGER.info("Exporting SQL data from \"" + databaseInfo.getUrl() + "\" ..."); Map<String, String> info = new TreeMap<String, String>(); java.util.Date d1 = new java.util.Date(); info.put("start", d1.toString()); info.put("driver", databaseInfo.getDriver()); info.put("url", databaseInfo.getUrl()); info.put("user", databaseInfo.getUser()); info.put("user", databaseInfo.getUser()); info.put("usePassword", Boolean.toString(databaseInfo.getPassword() != null)); String databaseType = MetaDataBL.getDatabaseType(databaseInfo.getUrl()); info.put(DATABASE_TYPE, databaseType); Connection connection = getConnection(databaseInfo); //log the database meta data information's logDatabaseMetaDataInfo(databaseInfo, connection); String[] versions = MetaDataBL.getVersions(connection); info.put(SYSTEM_VERSION, versions[0]); info.put(DB_VERSION, versions[1]);/* w w w.j a v a 2s . c om*/ StringValueConverter stringValueConverter = new GenericStringValueConverter(); BufferedWriter writer = createBufferedWriter(dirName + File.separator + FILE_NAME_DATA); BufferedWriter writerUpdate = createBufferedWriter(dirName + File.separator + FILE_NAME_DATA_UPDATE); BufferedWriter writerClean = createBufferedWriter(dirName + File.separator + FILE_NAME_DATA_CLEAN); BufferedWriter writerUpdateClean = createBufferedWriter( dirName + File.separator + FILE_NAME_DATA_UPDATE_CLEAN); BufferedWriter writerBlob = createBufferedWriter(dirName + File.separator + FILE_NAME_BLOB); int idx = 0; String[] tableNames = MetaDataBL.getTableNames(); for (String tableName : tableNames) { LOGGER.debug("Processing table: " + tableName + "...."); int count = getTableData(writer, writerClean, writerUpdate, writerUpdateClean, connection, tableName, stringValueConverter); info.put("_" + tableName, count + ""); LOGGER.debug("Records exported:" + count + "\n"); idx = idx + count; } LOGGER.debug("Processing blob data ...."); int count = getBlobTableData(writerBlob, connection); LOGGER.debug(" Blob record exported:" + count + "\n"); info.put("table_BLOB", count + ""); idx = idx + count; try { char dataSeparator = (char) ASCII_DATA_SEPARATOR; writerBlob.write(dataSeparator); writerBlob.newLine(); writerBlob.newLine(); writerBlob.write("--TMSPROJECTEXCHANGE"); writerBlob.newLine(); } catch (IOException e) { LOGGER.error("Error on close blob stream file :" + e.getMessage()); throw new DDLException(e.getMessage(), e); } LOGGER.debug("Processing clob data ...."); count = getClobTableData(writerBlob, connection); LOGGER.debug(" Clob record exported:" + count + "\n"); info.put("table_TMSPROJECTEXCHANGE", count + ""); idx = idx + count; info.put("allData", idx + ""); try { writer.flush(); writer.close(); writerClean.flush(); writerClean.close(); writerUpdate.flush(); writerUpdate.close(); writerUpdateClean.flush(); writerUpdateClean.close(); writerBlob.flush(); writerBlob.close(); } catch (IOException e) { LOGGER.error("Error on close stream file: " + e.getMessage()); throw new DDLException(e.getMessage(), e); } try { connection.close(); } catch (SQLException e) { throw new DDLException(e.getMessage(), e); } java.util.Date d2 = new java.util.Date(); long timeSpend = d2.getTime() - d1.getTime(); info.put("timeSpend", Long.toString(timeSpend)); writeInfoToFile(info, dirName + File.separator + FILE_NAME_INFO); LOGGER.info("Data generated. All records found: " + idx + ". Time spend: " + timeSpend + " ms!"); }