List of usage examples for java.util HashSet contains
public boolean contains(Object o)
From source file:com.linkedin.databus.core.TestDbusEventBufferPersistence.java
@Test public void testMetaFileCloseMult() throws Exception { int maxEventBufferSize = 1144; int maxIndividualBufferSize = 500; int bufNum = maxEventBufferSize / maxIndividualBufferSize; if (maxEventBufferSize % maxIndividualBufferSize > 0) bufNum++;// w w w . j ava 2 s . c o m DbusEventBuffer.StaticConfig config = getConfig(maxEventBufferSize, maxIndividualBufferSize, 100, 500, AllocationPolicy.MMAPPED_MEMORY, _mmapDirStr, true); // create buffer mult DbusEventBufferMult bufMult = createBufferMult(config); // Save all the files and validate the meta files. bufMult.close(); for (DbusEventBuffer dbusBuf : bufMult.bufIterable()) { File metaFile = new File(_mmapDir, dbusBuf.metaFileName()); // check that we don't have the files Assert.assertTrue(metaFile.exists()); validateFiles(metaFile, bufNum); } File[] entries = _mmapDir.listFiles(); // When we create a new multi-buffer, we should get renamed files as well as new files. bufMult = createBufferMult(config); entries = _mmapDir.listFiles(); // Has session dirs and renamed meta files. // Create an info file for one buffer. DbusEventBuffer buf = bufMult.bufIterable().iterator().next(); buf.saveBufferMetaInfo(true); File infoFile = new File(_mmapDir, buf.metaFileName() + ".info"); Assert.assertTrue(infoFile.exists()); // Create a session directory that has one file in it. File badSes1 = new File(_mmapDir, DbusEventBuffer.getSessionPrefix() + "m"); badSes1.mkdir(); badSes1.deleteOnExit(); File junkFile = new File(badSes1.getAbsolutePath() + "/junkFile"); junkFile.createNewFile(); junkFile.deleteOnExit(); // Create a directory that is empty File badSes2 = new File(_mmapDir, DbusEventBuffer.getSessionPrefix() + "n"); badSes2.mkdir(); badSes2.deleteOnExit(); // Create a good file under mmap directory that we don't want to see removed. final String goodFile = "GoodFile"; File gf = new File(_mmapDir, goodFile); gf.createNewFile(); // Now close the multibuf, and see that the new files are still there. // We should have deleted the unused sessions and info files. bufMult.close(); HashSet<String> validEntries = new HashSet<String>(bufNum); for (DbusEventBuffer dbusBuf : bufMult.bufIterable()) { File metaFile = new File(_mmapDir, dbusBuf.metaFileName()); // check that we don't have the files Assert.assertTrue(metaFile.exists()); validateFiles(metaFile, bufNum); validEntries.add(metaFile.getName()); DbusEventBufferMetaInfo mi = new DbusEventBufferMetaInfo(metaFile); mi.loadMetaInfo(); validEntries.add(mi.getSessionId()); } validEntries.add(goodFile); // Now we should be left with meta files, and session dirs and nothing else. entries = _mmapDir.listFiles(); for (File f : entries) { Assert.assertTrue(validEntries.contains(f.getName())); validEntries.remove(f.getName()); } Assert.assertTrue(validEntries.isEmpty()); // And everything else should have moved to the .BAK directory entries = _mmapBakDir.listFiles(); HashMap<String, File> fileHashMap = new HashMap<String, File>(entries.length); for (File f : entries) { fileHashMap.put(f.getName(), f); } Assert.assertTrue(fileHashMap.containsKey(badSes1.getName())); Assert.assertTrue(fileHashMap.get(badSes1.getName()).isDirectory()); Assert.assertEquals(fileHashMap.get(badSes1.getName()).listFiles().length, 1); Assert.assertEquals(fileHashMap.get(badSes1.getName()).listFiles()[0].getName(), junkFile.getName()); fileHashMap.remove(badSes1.getName()); Assert.assertTrue(fileHashMap.containsKey(badSes2.getName())); Assert.assertTrue(fileHashMap.get(badSes2.getName()).isDirectory()); Assert.assertEquals(fileHashMap.get(badSes2.getName()).listFiles().length, 0); fileHashMap.remove(badSes2.getName()); // We should have the renamed meta files in the hash now. for (File f : entries) { if (f.getName().startsWith(DbusEventBuffer.getMmapMetaInfoFileNamePrefix())) { Assert.assertTrue(fileHashMap.containsKey(f.getName())); Assert.assertTrue(f.isFile()); fileHashMap.remove(f.getName()); } } Assert.assertTrue(fileHashMap.isEmpty()); // One more test to make sure we create the BAK directory dynamically if it does not exist. FileUtils.deleteDirectory(_mmapBakDir); bufMult = createBufferMult(config); entries = _mmapDir.listFiles(); // Create an info file for one buffer. buf = bufMult.bufIterable().iterator().next(); buf.saveBufferMetaInfo(true); infoFile = new File(_mmapDir, buf.metaFileName() + ".info"); Assert.assertTrue(infoFile.exists()); bufMult.close(); entries = _mmapBakDir.listFiles(); fileHashMap = new HashMap<String, File>(entries.length); for (File f : entries) { fileHashMap.put(f.getName(), f); } Assert.assertTrue(fileHashMap.containsKey(infoFile.getName())); Assert.assertTrue(fileHashMap.get(infoFile.getName()).isFile()); }
From source file:com.ebay.cloud.cms.metadata.mongo.MongoMetadataServiceImpl.java
private Map<String, MetaClass> prepareMetaClassMap(List<MetaClass> metaClasses) { HashMap<String, MetaClass> metas = new HashMap<String, MetaClass>(); HashSet<String> pluralNames = new HashSet<String>(); for (MetaClass m : metaClasses) { String name = m.getName(); String pluralName = m.getpluralName(); if (StringUtils.isNullOrEmpty(name)) { throw new IllegalMetaClassException("meta class name can not be empty"); }// w w w . j a va 2 s. c o m if (metas.containsKey(name) || metas.containsKey(pluralName)) { throw new IllegalMetaClassException("duplicate metaClass name in batchUpsert"); } if (pluralNames.contains(name) || pluralNames.contains(pluralName)) { throw new IllegalMetaClassException("duplicate metaClass plural name in batchUpsert"); } if (pluralName != null) { pluralNames.add(pluralName); } metas.put(name, m); } return metas; }
From source file:com.android.contacts.common.model.ContactLoader.java
/** * Loads groups meta-data for all groups associated with all constituent raw contacts' * accounts.// w w w . j a v a2 s . co m */ private void loadGroupMetaData(Contact result) { StringBuilder selection = new StringBuilder(); ArrayList<String> selectionArgs = new ArrayList<String>(); final HashSet<AccountKey> accountsSeen = new HashSet<>(); for (RawContact rawContact : result.getRawContacts()) { final String accountName = rawContact.getAccountName(); final String accountType = rawContact.getAccountTypeString(); final String dataSet = rawContact.getDataSet(); final AccountKey accountKey = new AccountKey(accountName, accountType, dataSet); if (accountName != null && accountType != null && !accountsSeen.contains(accountKey)) { accountsSeen.add(accountKey); if (selection.length() != 0) { selection.append(" OR "); } selection.append("(" + Groups.ACCOUNT_NAME + "=? AND " + Groups.ACCOUNT_TYPE + "=?"); selectionArgs.add(accountName); selectionArgs.add(accountType); if (dataSet != null) { selection.append(" AND " + Groups.DATA_SET + "=?"); selectionArgs.add(dataSet); } else { selection.append(" AND " + Groups.DATA_SET + " IS NULL"); } selection.append(")"); } } final ImmutableList.Builder<GroupMetaData> groupListBuilder = new ImmutableList.Builder<GroupMetaData>(); final Cursor cursor = getContext().getContentResolver().query(Groups.CONTENT_URI, GroupQuery.COLUMNS, selection.toString(), selectionArgs.toArray(new String[0]), null); if (cursor != null) { try { while (cursor.moveToNext()) { final String accountName = cursor.getString(GroupQuery.ACCOUNT_NAME); final String accountType = cursor.getString(GroupQuery.ACCOUNT_TYPE); final String dataSet = cursor.getString(GroupQuery.DATA_SET); final long groupId = cursor.getLong(GroupQuery.ID); final String title = cursor.getString(GroupQuery.TITLE); final boolean defaultGroup = cursor.isNull(GroupQuery.AUTO_ADD) ? false : cursor.getInt(GroupQuery.AUTO_ADD) != 0; final boolean favorites = cursor.isNull(GroupQuery.FAVORITES) ? false : cursor.getInt(GroupQuery.FAVORITES) != 0; groupListBuilder.add(new GroupMetaData(accountName, accountType, dataSet, groupId, title, defaultGroup, favorites)); } } finally { cursor.close(); } } result.setGroupMetaData(groupListBuilder.build()); }
From source file:com.remobile.file.FileUtils.java
protected void registerExtraFileSystems(String[] filesystems, HashMap<String, String> availableFileSystems) { HashSet<String> installedFileSystems = new HashSet<String>(); /* Register filesystems in order */ for (String fsName : filesystems) { if (!installedFileSystems.contains(fsName)) { String fsRoot = availableFileSystems.get(fsName); if (fsRoot != null) { File newRoot = new File(fsRoot); if (newRoot.mkdirs() || newRoot.isDirectory()) { registerFilesystem(/*w w w. ja va 2 s . c om*/ new LocalFilesystem(fsName, this.getContext(), this.getResourceApi(), newRoot)); installedFileSystems.add(fsName); } else { Log.d(LOG_TAG, "Unable to create root dir for filesystem \"" + fsName + "\", skipping"); } } else { Log.d(LOG_TAG, "Unrecognized extra filesystem identifier: " + fsName); } } } }
From source file:ch.randelshofer.cubetwister.HTMLExporter.java
private void init() throws IOException { // Initialize the progress observer // -------------------------------- p.setMaximum(countHTMLTemplates(p) + 1); p.setIndeterminate(false);// w w w.ja v a 2 s. c o m // Determine which virtual cube kinds need to be exported virtualCubeKinds = new HashSet<CubeKind>(); for (EntityModel node : model.getCubes().getChildren()) { CubeModel cm = (CubeModel) node; virtualCubeKinds.add(cm.getKind()); } playerCubeKinds = new HashSet<CubeKind>(); for (EntityModel node : model.getCubes().getChildren()) { ScriptModel sm = (ScriptModel) node; playerCubeKinds.add(sm.getCubeModel().getKind()); } // Create unique ID's for all objects // ---------------------------------- ids = new HashMap<EntityModel, String>(); // Holds all used ID's. To prevent name collisions on // file systems, which are not case sensitive, we store // only lower-case ID's in this set. HashSet<String> usedIDs = new HashSet<String>(); for (EntityModel node : model.getRoot().preorderIterable()) { if (node instanceof InfoModel) { InfoModel im = (InfoModel) node; String baseId = toID(im.getName()); String id = baseId; for (int i = 1; usedIDs.contains(id.toLowerCase()); i++) { id = baseId + "_" + i; } usedIDs.add(id.toLowerCase()); ids.put(im, id); } } // Create top level of data on placeholder stack stack = new Stack<StackEntry>(); StackEntry entry = new StackEntry(); stack.push(entry); // Put labels DataMap data = entry.data; data.put("software.title", "CubeTwister"); data.put("software.copyright", "Copyright by Werner Randelshofer. All Rights Reserved."); data.put("software.version", Main.getVersion()); data.put("cubes.title", "Cubes"); data.put("notations.title", "Notations"); data.put("scripts.title", "Scripts"); data.put("notes.title", "Notes"); // Put information about the document data.put("document.name", documentName); data.put("document.copyright", "Copyright by Werner Randelshofer. All Rights Reserved."); data.put("cube.count", model.getCubes().getChildCount()); data.put("notation.count", model.getNotations().getChildCount()); data.put("script.count", model.getScripts().getChildCount()); data.put("note.count", model.getTexts().getChildCount()); // Push default cube and default notation on the placeholder Stack putCubeData(model.getDefaultCube(), ""); putNotationData(model.getDefaultNotation(model.getDefaultCube().getLayerCount()), ""); }
From source file:com.yahoo.dba.perf.myperf.springmvc.VardiffController.java
@Override protected ModelAndView handleRequestImpl(HttpServletRequest req, HttpServletResponse resp) throws Exception { int status = Constants.STATUS_OK; String message = "OK"; logger.info("receive url " + req.getQueryString()); QueryParameters qps = null;/*from w ww. j av a 2 s.c om*/ DBInstanceInfo dbinfo = null; DBInstanceInfo dbinfo2 = null; DBConnectionWrapper connWrapper = null; DBConnectionWrapper connWrapper2 = null; qps = WebAppUtil.parseRequestParameter(req); qps.setSql("mysql_global_variables"); qps.getSqlParams().put("p_1", ""); String group2 = req.getParameter("p_1"); String host2 = req.getParameter("p_2"); //validation input String validation = qps.validate(); if (validation == null || validation.isEmpty()) { //do we have such query? try { QueryInputValidator.validateSql(this.frameworkContext.getSqlManager(), qps); } catch (Exception ex) { validation = ex.getMessage(); } } if (validation != null && !validation.isEmpty()) return this.respondFailure(validation, req); dbinfo = this.frameworkContext.getDbInfoManager().findDB(qps.getGroup(), qps.getHost()); if (dbinfo == null) return this.respondFailure("Cannot find record for DB (" + qps.getGroup() + ", " + qps.getHost() + ")", req); dbinfo2 = this.frameworkContext.getDbInfoManager().findDB(group2, host2); if (dbinfo2 == null) return this.respondFailure("Cannot find record for DB (" + group2 + ", " + host2 + ")", req); try { connWrapper = WebAppUtil.getDBConnection(req, this.frameworkContext, dbinfo); if (connWrapper == null) { status = Constants.STATUS_BAD; message = "failed to connect to target db (" + dbinfo + ")"; } else { connWrapper2 = WebAppUtil.getDBConnection(req, this.frameworkContext, dbinfo2); if (connWrapper2 == null) { status = Constants.STATUS_BAD; message = "failed to connect to target db (" + dbinfo2 + ")"; } } } catch (Throwable th) { logger.log(Level.SEVERE, "Exception", th); status = Constants.STATUS_BAD; message = "Failed to get connection to target db (" + dbinfo + "): " + th.getMessage(); } if (status == -1) return this.respondFailure(message, req); //when we reach here, at least we have valid query and can connect to db WebAppUtil.storeLastDbInfoRequest(qps.getGroup(), qps.getHost(), req); ModelAndView mv = null; ResultList rList = null; ResultList rList2 = null; try { rList = this.frameworkContext.getQueryEngine().executeQueryGeneric(qps, connWrapper, qps.getMaxRows()); rList2 = this.frameworkContext.getQueryEngine().executeQueryGeneric(qps, connWrapper2, qps.getMaxRows()); logger.info("Done query " + qps.getSql() + " with " + (rList != null ? rList.getRows().size() : 0) + " records, " + (rList2 != null ? rList2.getRows().size() : 0) + " records"); WebAppUtil.closeDBConnection(req, connWrapper, false); WebAppUtil.closeDBConnection(req, connWrapper2, false); } catch (Throwable ex) { logger.log(Level.SEVERE, "Exception", ex); if (ex instanceof SQLException) { SQLException sqlEx = SQLException.class.cast(ex); String msg = ex.getMessage(); logger.info(sqlEx.getSQLState() + ", " + sqlEx.getErrorCode() + ", " + msg); //check if the connection is still good if (!DBUtils.checkConnection(connWrapper.getConnection())) { WebAppUtil.closeDBConnection(req, connWrapper, true); } else WebAppUtil.closeDBConnection(req, connWrapper, true); if (!DBUtils.checkConnection(connWrapper2.getConnection())) { WebAppUtil.closeDBConnection(req, connWrapper2, true); } else WebAppUtil.closeDBConnection(req, connWrapper2, true); } else { WebAppUtil.closeDBConnection(req, connWrapper, false); WebAppUtil.closeDBConnection(req, connWrapper2, false); } status = Constants.STATUS_BAD; message = "Exception: " + ex.getMessage(); } if (status == Constants.STATUS_BAD) return this.respondFailure(message, req); HashMap<String, String> param1 = new HashMap<String, String>(rList.getRows().size()); HashMap<String, String> param2 = new HashMap<String, String>(rList2.getRows().size()); for (ResultRow r : rList.getRows()) { param1.put(r.getColumns().get(0), r.getColumns().get(1)); } for (ResultRow r : rList2.getRows()) { param2.put(r.getColumns().get(0), r.getColumns().get(1)); } ColumnDescriptor desc = new ColumnDescriptor(); desc.addColumn("VARIABLE_NAME", false, 1); desc.addColumn("DB1", false, 2); desc.addColumn("DB2", false, 3); ResultList fList = new ResultList(); fList.setColumnDescriptor(desc); HashSet<String> diffSet = new HashSet<String>(); for (Map.Entry<String, String> e : param1.entrySet()) { String k = e.getKey(); String v = e.getValue(); if (v != null) v = v.trim(); else v = ""; String v2 = null; if (param2.containsKey(k)) v2 = param2.get(k); if (v2 != null) v2 = v2.trim(); else v2 = ""; if (!v.equals(v2)) { ResultRow row = new ResultRow(); List<String> cols = new ArrayList<String>(); cols.add(k); cols.add(v); cols.add(v2); row.setColumns(cols); row.setColumnDescriptor(desc); fList.addRow(row); diffSet.add(k); } } for (Map.Entry<String, String> e : param2.entrySet()) { String k = e.getKey(); String v = e.getValue(); if (v == null || v.isEmpty()) continue; if (diffSet.contains(k) || param1.containsKey(k)) continue; ResultRow row = new ResultRow(); List<String> cols = new ArrayList<String>(); cols.add(k); cols.add(""); cols.add(v); row.setColumns(cols); row.setColumnDescriptor(desc); fList.addRow(row); } mv = new ModelAndView(this.jsonView); if (req.getParameter("callback") != null && req.getParameter("callback").trim().length() > 0) mv.addObject("callback", req.getParameter("callback"));//YUI datasource binding mv.addObject("json_result", ResultListUtil.toJSONString(fList, qps, status, message)); return mv; }
From source file:com.ibm.bi.dml.runtime.controlprogram.parfor.ProgramConverter.java
/** * /* ww w. ja v a 2 s.c o m*/ * @param pbs * @return * @throws DMLRuntimeException * @throws DMLUnsupportedOperationException */ public static String rSerializeFunctionProgramBlocks(HashMap<String, FunctionProgramBlock> pbs, HashSet<String> cand) throws DMLRuntimeException, DMLUnsupportedOperationException { StringBuilder sb = new StringBuilder(); int count = 0; for (Entry<String, FunctionProgramBlock> pb : pbs.entrySet()) { if (!cand.contains(pb.getKey())) //skip function not included in the parfor body continue; if (count > 0) { sb.append(ELEMENT_DELIM); sb.append(NEWLINE); } sb.append(pb.getKey()); sb.append(KEY_VALUE_DELIM); sb.append(rSerializeProgramBlock(pb.getValue())); count++; } sb.append(NEWLINE); return sb.toString(); }
From source file:com.ramadda.plugins.investigation.PhoneDbTypeHandler.java
/** * _more_// w w w .jav a2 s . com * * @param request _more_ * @param entry _more_ * @param valueList _more_ * @param nodes _more_ * @param links _more_ * * @throws Exception _more_ */ private void getNodesAndLinks(Request request, Entry entry, List<Object[]> valueList, List<String> nodes, List<String> links) throws Exception { GraphOutputHandler goh = getWikiManager().getGraphOutputHandler(); String iconUrl = getEntryIcon(request, entry); HashSet<String> seen = new HashSet<String>(); Hashtable<String, Integer> count = new Hashtable<String, Integer>(); for (int i = 0; i < valueList.size(); i++) { Object[] values = valueList.get(i); String fromNumber = fromNumberColumn.getString(values); String toNumber = toNumberColumn.getString(values); String entryUrl = request.entryUrl(getRepository().URL_ENTRY_SHOW, entry); StringBuilder row = new StringBuilder(); if (!seen.contains(fromNumber)) { String searchUrl = HtmlUtils.url(request.makeUrl(getRepository().URL_ENTRY_SHOW), new String[] { ARG_ENTRYID, entry.getId(), ARG_DB_SEARCH, "true", getSearchUrlArgument(fromNumberColumn), fromNumber }); nodes.add(Json.map(new String[] { goh.ATTR_NAME, formatNumber(fromNumber), goh.ATTR_LABEL, fromNameColumn.getString(values), goh.ATTR_NODEID, fromNumber, goh.ATTR_URL, searchUrl, goh.ATTR_GRAPHURL, getDataUrl(request, entry, fromNumber), goh.ATTR_ICON, iconUrl }, true)); seen.add(fromNumber); } if (!seen.contains(toNumber)) { String searchUrl = HtmlUtils.url(request.makeUrl(getRepository().URL_ENTRY_SHOW), new String[] { ARG_ENTRYID, entry.getId(), ARG_DB_SEARCH, "true", getSearchUrlArgument(fromNumberColumn), fromNumber }); nodes.add(Json.map( new String[] { goh.ATTR_NAME, formatNumber(toNumber), goh.ATTR_LABEL, toNameColumn.getString(values), goh.ATTR_NODEID, toNumber, goh.ATTR_URL, searchUrl, goh.ATTR_GRAPHURL, getDataUrl(request, entry, toNumber), goh.ATTR_ICON, iconUrl }, true)); seen.add(toNumber); } links.add(Json.map(new String[] { goh.ATTR_SOURCE_ID, fromNumber, goh.ATTR_TARGET_ID, toNumber, goh.ATTR_TITLE, "" }, true)); } }
From source file:com.linkedin.databus.core.TestDbusEventBufferMult.java
@Test public void testMultiPPartionStreamStats() throws Exception { createBufMult();/*from w w w. j a va 2 s . c o m*/ PhysicalPartition[] p = { _pConfigs[0].getPhysicalPartition(), _pConfigs[1].getPhysicalPartition(), _pConfigs[2].getPhysicalPartition() }; //generate a bunch of windows for 3 partitions int windowsNum = 10; for (int i = 1; i <= windowsNum; ++i) { DbusEventBufferAppendable buf = _eventBufferMult.getDbusEventBufferAppendable(p[0]); buf.startEvents(); byte[] schema = "abcdefghijklmnop".getBytes(Charset.defaultCharset()); assertTrue(buf.appendEvent(new DbusEventKey(1), (short) 100, (short) 0, System.currentTimeMillis() * 1000000, (short) 2, schema, new byte[10], false, null)); buf.endEvents(100 * i, null); buf = _eventBufferMult.getDbusEventBufferAppendable(p[1]); buf.startEvents(); assertTrue(buf.appendEvent(new DbusEventKey(1), (short) 101, (short) 2, System.currentTimeMillis() * 1000000, (short) 2, schema, new byte[100], false, null)); assertTrue(buf.appendEvent(new DbusEventKey(2), (short) 101, (short) 2, System.currentTimeMillis() * 1000000, (short) 2, schema, new byte[10], false, null)); buf.endEvents(100 * i + 1, null); buf = _eventBufferMult.getDbusEventBufferAppendable(p[2]); buf.startEvents(); assertTrue(buf.appendEvent(new DbusEventKey(1), (short) 101, (short) 2, System.currentTimeMillis() * 1000000, (short) 2, schema, new byte[100], false, null)); assertTrue(buf.appendEvent(new DbusEventKey(2), (short) 101, (short) 2, System.currentTimeMillis() * 1000000, (short) 2, schema, new byte[10], false, null)); assertTrue(buf.appendEvent(new DbusEventKey(3), (short) 101, (short) 2, System.currentTimeMillis() * 1000000, (short) 2, schema, new byte[10], false, null)); buf.endEvents(100 * i + 2, null); } String[] pnames = new String[p.length]; int count = 0; for (PhysicalPartition ip : p) { pnames[count++] = ip.toSimpleString(); } StatsCollectors<DbusEventsStatisticsCollector> statsColl = createStats(pnames); PhysicalPartitionKey[] pkeys = { new PhysicalPartitionKey(p[0]), new PhysicalPartitionKey(p[1]), new PhysicalPartitionKey(p[2]) }; CheckpointMult cpMult = new CheckpointMult(); for (int i = 0; i < 3; ++i) { Checkpoint cp = new Checkpoint(); cp.setFlexible(); cp.setConsumptionMode(DbusClientMode.ONLINE_CONSUMPTION); cpMult.addCheckpoint(p[i], cp); } DbusEventBufferBatchReadable reader = _eventBufferMult.getDbusEventBufferBatchReadable(cpMult, Arrays.asList(pkeys), statsColl); ByteArrayOutputStream baos = new ByteArrayOutputStream(); WritableByteChannel writeChannel = Channels.newChannel(baos); reader.streamEvents(false, 1000000, writeChannel, Encoding.BINARY, new AllowAllDbusFilter()); writeChannel.close(); baos.close(); //make sure we got the physical partition names right List<String> ppartNames = statsColl.getStatsCollectorKeys(); assertEquals(ppartNames.size(), 3); HashSet<String> expectedPPartNames = new HashSet<String>( Arrays.asList(p[0].toSimpleString(), p[1].toSimpleString(), p[2].toSimpleString())); for (String ppartName : ppartNames) { assertTrue(expectedPPartNames.contains(ppartName)); } //verify event counts per partition DbusEventsTotalStats[] ppartStats = { statsColl.getStatsCollector(p[0].toSimpleString()).getTotalStats(), statsColl.getStatsCollector(p[1].toSimpleString()).getTotalStats(), statsColl.getStatsCollector(p[2].toSimpleString()).getTotalStats() }; assertEquals(ppartStats[0].getNumDataEvents(), windowsNum); assertEquals(ppartStats[1].getNumDataEvents(), windowsNum * 2); assertEquals(ppartStats[2].getNumDataEvents(), windowsNum * 3); assertEquals(ppartStats[0].getNumSysEvents(), windowsNum); assertEquals(ppartStats[1].getNumSysEvents(), windowsNum); assertEquals(ppartStats[2].getNumSysEvents(), windowsNum); assertEquals(statsColl.getStatsCollector().getTotalStats().getNumDataEvents(), windowsNum * (1 + 2 + 3)); assertEquals(statsColl.getStatsCollector().getTotalStats().getNumSysEvents(), windowsNum * 3); assertEquals(statsColl.getStatsCollector().getTotalStats().getMaxTimeLag(), Math .max(ppartStats[0].getTimeLag(), Math.max(ppartStats[1].getTimeLag(), ppartStats[2].getTimeLag()))); assertEquals(statsColl.getStatsCollector().getTotalStats().getMinTimeLag(), Math .min(ppartStats[0].getTimeLag(), Math.min(ppartStats[1].getTimeLag(), ppartStats[2].getTimeLag()))); }
From source file:com.globalsight.connector.blaise.BlaiseCreateJobHandler.java
/** * Upload the job attachment file to server, and store it in session for * later use.//from ww w . j a v a 2s.c o m */ @ActionHandler(action = "checkTargetLocalesUrl", formClass = "") public void checkTargetLocalesUrl(HttpServletRequest request, HttpServletResponse response, Object form) throws Exception { List<TranslationInboxEntryVo> selectedEntries = new ArrayList<TranslationInboxEntryVo>(); String entryIds = request.getParameter("entryIds"); if (entryIds != null) { for (String entryId : entryIds.split(",")) { try { long id = Long.parseLong(entryId); TranslationInboxEntryVo vo = currPageEntryMap.get(id); if (vo != null) { selectedEntries.add(vo); } } catch (Exception e) { logger.error(e); } } } HashSet<Locale> l10nLocales = new HashSet<Locale>(); String l10Nid = request.getParameter("l10Nid"); if (StringUtils.isNotEmpty(l10Nid)) { L10nProfile l10nP = ServerProxy.getProjectHandler().getL10nProfile(Long.parseLong(l10Nid)); for (Object obj : l10nP.getWorkflowTemplateInfos()) { WorkflowTemplateInfo wti = (WorkflowTemplateInfo) obj; l10nLocales.add(wti.getTargetLocale().getLocale()); } } Iterator<TranslationInboxEntryVo> it = selectedEntries.iterator(); while (it.hasNext()) { if (l10nLocales.contains(it.next().getTargetLocale())) { it.remove(); } } String result = ""; StringBuilder ids = new StringBuilder(); if (selectedEntries.size() > 0) { for (TranslationInboxEntryVo vo : selectedEntries) { ids.append(vo.getId()).append(","); } result = ids.toString(); result = result.substring(0, result.length() - 1); } ServletOutputStream out = response.getOutputStream(); out.write(result.getBytes("UTF-8")); pageReturn(); }