List of usage examples for java.util HashMap size
int size
To view the source code for java.util HashMap size.
Click Source Link
From source file:com.speed.ob.api.ClassStore.java
public void dump(File in, File out, Config config) throws IOException { if (in.isDirectory()) { for (ClassNode node : nodes()) { String[] parts = node.name.split("\\."); String dirName = node.name.substring(0, node.name.lastIndexOf(".")); dirName = dirName.replace(".", "/"); File dir = new File(out, dirName); if (!dir.exists()) { if (!dir.mkdirs()) throw new IOException("Could not make output dir: " + dir.getAbsolutePath()); }/*from w w w . j a va2 s .c om*/ ClassWriter writer = new ClassWriter(ClassWriter.COMPUTE_MAXS); node.accept(writer); byte[] data = writer.toByteArray(); FileOutputStream fOut = new FileOutputStream( new File(dir, node.name.substring(node.name.lastIndexOf(".") + 1))); fOut.write(data); fOut.flush(); fOut.close(); } } else if (in.getName().endsWith(".jar")) { File output = new File(out, in.getName()); JarFile jf = new JarFile(in); HashMap<JarEntry, Object> existingData = new HashMap<>(); if (output.exists()) { try { JarInputStream jarIn = new JarInputStream(new FileInputStream(output)); JarEntry entry; while ((entry = jarIn.getNextJarEntry()) != null) { if (!entry.isDirectory()) { byte[] data = IOUtils.toByteArray(jarIn); existingData.put(entry, data); jarIn.closeEntry(); } } jarIn.close(); } catch (IOException e) { Logger.getLogger(this.getClass().getName()).log(Level.SEVERE, "Could not read existing output file, overwriting", e); } } FileOutputStream fout = new FileOutputStream(output); Manifest manifest = null; if (jf.getManifest() != null) { manifest = jf.getManifest(); if (!config.getBoolean("ClassNameTransform.keep_packages") && config.getBoolean("ClassNameTransform.exclude_mains")) { manifest = new Manifest(manifest); if (manifest.getMainAttributes().getValue("Main-Class") != null) { String manifestName = manifest.getMainAttributes().getValue("Main-Class"); if (manifestName.contains(".")) { manifestName = manifestName.substring(manifestName.lastIndexOf(".") + 1); manifest.getMainAttributes().putValue("Main-Class", manifestName); } } } } jf.close(); JarOutputStream jarOut = manifest == null ? new JarOutputStream(fout) : new JarOutputStream(fout, manifest); Logger.getLogger(getClass().getName()).fine("Restoring " + existingData.size() + " existing files"); if (!existingData.isEmpty()) { for (Map.Entry<JarEntry, Object> entry : existingData.entrySet()) { Logger.getLogger(getClass().getName()).fine("Restoring " + entry.getKey().getName()); jarOut.putNextEntry(entry.getKey()); jarOut.write((byte[]) entry.getValue()); jarOut.closeEntry(); } } for (ClassNode node : nodes()) { ClassWriter writer = new ClassWriter(ClassWriter.COMPUTE_MAXS); node.accept(writer); byte[] data = writer.toByteArray(); int index = node.name.lastIndexOf("/"); String fileName; if (index > 0) { fileName = node.name.substring(0, index + 1).replace(".", "/"); fileName += node.name.substring(index + 1).concat(".class"); } else { fileName = node.name.concat(".class"); } JarEntry entry = new JarEntry(fileName); jarOut.putNextEntry(entry); jarOut.write(data); jarOut.closeEntry(); } jarOut.close(); } else { if (nodes().size() == 1) { File outputFile = new File(out, in.getName()); ClassNode node = nodes().iterator().next(); ClassWriter writer = new ClassWriter(ClassWriter.COMPUTE_MAXS); byte[] data = writer.toByteArray(); FileOutputStream stream = new FileOutputStream(outputFile); stream.write(data); stream.close(); } } }
From source file:com.wildex999.schematicbuilder.schematic.SchematicLoader.java
public static Schematic loadSchematic(NBTTagCompound tagCompound, HashMap<Short, MutableInt> blockCount) throws ExceptionLoad { boolean extraData = false; Schematic schematic = loadSchematicMeta(tagCompound); int width = schematic.getWidth(); int height = schematic.getHeight(); int length = schematic.getLength(); byte blocks[] = tagCompound.getByteArray(NBT_BLOCKS); byte metaData[] = tagCompound.getByteArray(NBT_DATA); if (blocks.length < (width * height * length)) throw new ExceptionInvalid("Not enough blocks provided to cover the defined size! Got " + blocks.length + " blocks, with size: " + (width * height * length) + "." + " Width: " + width + " Height: " + height + " Length: " + length); byte extraBlocks[] = null; byte extraBlocksNibble[] = null; if (tagCompound.hasKey(NBT_ADD_BLOCKS)) { extraData = true;// w w w.j av a 2 s . c om extraBlocksNibble = tagCompound.getByteArray(NBT_ADD_BLOCKS); if (extraBlocksNibble.length * 2 < (width * height * length)) throw new ExceptionInvalid("Not enough extra Block data provided to cover the defined size! Got " + extraBlocksNibble.length * 2 + " blocks, with size: " + (width * height * length) + "." + " Width: " + width + " Height: " + height + " Length: " + length); //This one has been packed, so we have to spread it to 2 bytes, with 4 bits per byte extraBlocks = new byte[extraBlocksNibble.length * 2]; for (int i = 0; i < extraBlocksNibble.length; i++) { extraBlocks[i * 2] = (byte) ((extraBlocksNibble[i] >> 4) & 0xF); extraBlocks[i * 2 + 1] = (byte) (extraBlocksNibble[i] & 0xF); } } else if (tagCompound.hasKey(NBT_ADD_BLOCKS_SCHEMATICA)) { extraData = true; //This one is already in the format of 4 bits per byte(But uses more space when stored) extraBlocks = tagCompound.getByteArray(NBT_ADD_BLOCKS_SCHEMATICA); } //Read Mapping for name to id if included HashMap<Integer, String> nameMap = new HashMap<Integer, String>(); if (tagCompound.hasKey(NBT_MAPPING_SCHEMATICA)) { NBTTagCompound mapping = tagCompound.getCompoundTag(NBT_MAPPING_SCHEMATICA); Set<String> names = mapping.func_150296_c(); if (ModSchematicBuilder.debug) ModLog.logger.info("Loading name mapping: "); for (String name : names) { int schematicBlockId = mapping.getInteger(name); int serverBlockId = BlockRegistry.getId(name); if (ModSchematicBuilder.debug) ModLog.logger.info("Name Map(Schematic -> Server): " + name + ": " + schematicBlockId + " -> " + serverBlockId); schematic.addSchematicMap((short) schematicBlockId, (byte) 0, name, (short) serverBlockId, (byte) 0); nameMap.put(schematicBlockId, name); } } //TODO: Read TileEntities //TODO: Read Entities //Air counting MutableInt airCount = null; if (blockCount != null) { airCount = blockCount.get(0); if (airCount == null) { airCount = new MutableInt(0); blockCount.put((short) 0, airCount); } } //Store blocks for (int x = 0; x < width; x++) { for (int y = 0; y < height; y++) { for (int z = 0; z < length; z++) { int index = x + (y * length + z) * width; int blockID = blocks[index] & 0xFF; if (extraData) blockID = blockID | ((extraBlocks[index] & 0xFF) << 8); byte meta = (byte) (metaData[index] & 0xF); //Create a mapping for this BlockID & meta if it does not exist SchematicMap map = schematic.getSchematicMap((short) blockID, meta, false); if (map == null) { //Try to get Base block id and copy that map = schematic.getSchematicMap((short) blockID, meta, true); if (map == null) { Block block = BlockRegistry.getRaw(blockID); //Raw will return null if not found(Instead of default) short serverBlockId; String blockName = null; if (block != null) { serverBlockId = (short) blockID; blockName = BlockRegistry.getNameForObject(block); } else serverBlockId = -1; map = schematic.addSchematicMap((short) blockID, meta, nameMap.size() > 0 ? nameMap.get(blockID) : blockName, serverBlockId, meta); } else map = schematic.addSchematicMap((short) blockID, meta, map.schematicBlockName, map.blockId, meta); } schematic.setBlock(x, y, z, (short) blockID, meta); if (blockCount != null) { if (blockID == 0) { //Skip lookup for air airCount.increment(); continue; } if (blockID < 0 || blockID >= SchematicLoader.maxBlockId) throw new ExceptionInvalid("Invalid block ID: " + blockID + ", is above max Block ID: " + SchematicLoader.maxBlockId + ". Schematic might be invalid!"); short blockIndex = (short) ((blockID << 4) | meta); MutableInt count = blockCount.get(blockIndex); if (count != null) count.increment(); else blockCount.put(blockIndex, new MutableInt(1)); } } } } return schematic; }
From source file:data.services.ParseBaseService.java
private void updatePropertyNames() throws SQLException, ClassNotFoundException, Exception { List<Car> carList = carDao.getAllAsc(); HashMap<Long, Car> ourOldIdCarMap = new HashMap(); for (Car car : carList) { ourOldIdCarMap.put(car.getCmqId(), car); }// w w w .j a va 2 s .c o m List<CarProperty> fullCPList = carPropertyDao.getAllAsc(); HashMap<Long, CarProperty> ourOldIdCpMap = new HashMap(); for (CarProperty cp : fullCPList) { ourOldIdCpMap.put(cp.getOldId(), cp); } HashMap<Long, HashMap<Long, PropertyName>> newInfoCarMap = new HashMap(); List<PropertyName> pnListForSave = new ArrayList(); List<PropertyName> pnListForUpdate = new ArrayList(); List<PropertyName> pnListForDelete = new ArrayList(); ResultSet resSet = getFromQutoBase( "SELECT link.*,cpv.* FROM car_modification_property_value_link link LEFT JOIN car_property_value cpv ON link.car_property_value_id=cpv.id LEFT JOIN car_modification cm ON link.car_modification_id=cm.id WHERE cm.usage='ad_archive_catalog'"); //HashMap<Long,ArrayList<Feature>> newFeatureInfo = new HashMap(); while (resSet.next()) { Long carOldId = resSet.getLong("car_modification_id"); HashMap<Long, PropertyName> newInfoPNMap = newInfoCarMap.get(carOldId); if (newInfoPNMap == null) { newInfoPNMap = new HashMap(); } PropertyName newPn = new PropertyName(); Long oldcpId = resSet.getLong("car_property_id"); String strVal = StringAdapter.getString(resSet.getString("value_string")).trim(); Double numVal = resSet.getDouble("value_number"); Long oldPnId = resSet.getLong("id"); String pnVal = strVal; if (pnVal.equals("")) { pnVal = StringAdapter.getString(numVal).replace(".", ","); } newPn.setOldValueId(oldPnId); newPn.setPropertyNameValue(pnVal); newPn.setParamValue(numVal); newInfoPNMap.put(oldcpId, newPn); newInfoCarMap.put(carOldId, newInfoPNMap); } for (Long carOldId : ourOldIdCarMap.keySet()) { try { Car car = ourOldIdCarMap.get(carOldId); List<PropertyName> oldPnList = car.getPropertyNames(); HashMap<Long, PropertyName> oldPnMap = new HashMap(); if (oldPnList == null) { oldPnList = new ArrayList(); } HashMap<Long, PropertyName> newInfoPNMap = newInfoCarMap.get(carOldId); //int existingPnSize = 0; for (PropertyName pn : oldPnList) { Long oldcpId = pn.getCarProperty().getOldId(); oldPnMap.put(oldcpId, pn); /*PropertyName newPn = newInfoPNMap.get(oldcpId); if(newPn!=null){ pn.setOldValueId(newPn.getOldValueId()); pn.setParamValue(newPn.getParamValue()); pn.setPropertyNameValue(newPn.getPropertyNameValue()); propertyNameDao.update(pn); existingPnSize++; }else{ propertyNameDao.delete(pn); }*/ } if (oldPnMap.size() != oldPnList.size()) { //throw new Exception("? :"+car.getCarId()+" ? ?!"); addError("? :" + car.getCarId() + " ? ?!"); } for (Long oldcpId : newInfoPNMap.keySet()) { PropertyName newPn = newInfoPNMap.get(oldcpId); PropertyName oldPn = oldPnMap.get(oldcpId); if (oldPn == null) { oldPn = newPn; oldPn.setCar(car); oldPn.setAudial((long) 0); oldPn.setVisual((long) 0); oldPn.setKinestet((long) 0); oldPn.setCarProperty(ourOldIdCpMap.get(oldcpId)); oldPn.setPercentValue((long) 0); if (validate(oldPn, " ? ? ?: auto_quto_id=" + car.getCmqId() + ", pnv_quto_id=" + newPn.getOldValueId() + ", cp_quto_id=" + oldcpId + "; ")) { pnListForSave.add(oldPn); } } else { oldPn.setParamValue(newPn.getParamValue()); oldPn.setPropertyNameValue(newPn.getPropertyNameValue()); oldPn.setOldValueId(newPn.getOldValueId()); if (validate(oldPn, " ? ? : auto_quto_id=" + car.getCmqId() + ", pnv_quto_id=" + newPn.getOldValueId() + ", cp_quto_id=" + oldcpId + "; ")) { pnListForUpdate.add(oldPn); } } } for (Long oldcpId : oldPnMap.keySet()) { if (newInfoPNMap.get(oldcpId) == null) { pnListForDelete.add(oldPnMap.get(oldcpId)); } } } catch (Exception e) { throw new Exception( " ? ? quto_car_id:" + carOldId + "; " + e); } } int s = 0; int u = 0; int d = 0; for (PropertyName pn : pnListForSave) { propertyNameDao.save(pn); s++; } for (PropertyName pn : pnListForUpdate) { propertyNameDao.update(pn); u++; } for (PropertyName pn : pnListForDelete) { propertyNameDao.delete(pn); d++; } addError("? : " + s + " ?, " + u + " , " + d + " ."); }
From source file:com.splicemachine.derby.impl.sql.execute.actions.DDLConstantOperation.java
/** * Adjust dependencies of a table on ANSI UDTs. We only add one dependency * between a table and a UDT. If the table already depends on the UDT, we don't add * a redundant dependency./*from www . j a va 2s . c om*/ */ protected void adjustUDTDependencies(Activation activation, ColumnInfo[] columnInfos, boolean dropWholeTable) throws StandardException { if ((!dropWholeTable) && (columnInfos == null)) { return; } LanguageConnectionContext lcc = activation.getLanguageConnectionContext(); TransactionController tc = lcc.getTransactionExecute(); DataDictionary dd = lcc.getDataDictionary(); TableDescriptor td = activation.getDDLTableDescriptor(); int changedColumnCount = columnInfos == null ? 0 : columnInfos.length; HashMap addUdtMap = new HashMap(); HashMap dropUdtMap = new HashMap(); HashSet addColumnNames = new HashSet(); HashSet dropColumnNames = new HashSet(); // first find all of the new ansi udts which the table must depend on // and the old ones which are candidates for removal for (int i = 0; i < changedColumnCount; i++) { ColumnInfo ci = columnInfos[i]; // skip this column if it is not a UDT AliasDescriptor ad = dd.getAliasDescriptorForUDT(tc, columnInfos[i].dataType); if (ad == null) { continue; } String key = ad.getObjectID().toString(); if (ci.action == ColumnInfo.CREATE) { addColumnNames.add(ci.name); // no need to add the descriptor if it is already on the list if (addUdtMap.get(key) != null) { continue; } addUdtMap.put(key, ad); } else if (ci.action == ColumnInfo.DROP) { dropColumnNames.add(ci.name); dropUdtMap.put(key, ad); } } // nothing to do if there are no changed columns of udt type // and this is not a DROP TABLE command if ((!dropWholeTable) && (addUdtMap.size() == 0) && (dropUdtMap.size() == 0)) { return; } // // Now prune from the add list all udt descriptors for which we already have dependencies. // These are the udts for old columns. This supports the ALTER TABLE ADD COLUMN // case. // // Also prune from the drop list add udt descriptors which will still be // referenced by the remaining columns. // ColumnDescriptorList cdl = td.getColumnDescriptorList(); int totalColumnCount = cdl.size(); for (int i = 0; i < totalColumnCount; i++) { ColumnDescriptor cd = cdl.elementAt(i); // skip columns that are being added and dropped. we only want the untouched columns if (addColumnNames.contains(cd.getColumnName()) || dropColumnNames.contains(cd.getColumnName())) { continue; } // nothing to do if the old column isn't a UDT AliasDescriptor ad = dd.getAliasDescriptorForUDT(tc, cd.getType()); if (ad == null) { continue; } String key = ad.getObjectID().toString(); // ha, it is a UDT. if (dropWholeTable) { dropUdtMap.put(key, ad); } else { if (addUdtMap.get(key) != null) { addUdtMap.remove(key); } if (dropUdtMap.get(key) != null) { dropUdtMap.remove(key); } } } adjustUDTDependencies(lcc, dd, td, addUdtMap, dropUdtMap); }
From source file:com.globalsight.everest.util.ajax.AjaxService.java
public void getRemoteFileProfile() { long GSEditionID = Long.parseLong(request.getParameter("id")); GSEditionManagerLocal gsEditionManager = new GSEditionManagerLocal(); GSEdition edition = gsEditionManager.getGSEditionByID(GSEditionID); try {/*from w w w . j av a 2 s . co m*/ Ambassador ambassador = WebServiceClientHelper.getClientAmbassador(edition.getHostName(), edition.getHostPort(), edition.getUserName(), edition.getPassword(), edition.getEnableHttps()); String fullAccessToken = ambassador.login(edition.getUserName(), edition.getPassword()); String realAccessToken = WebServiceClientHelper.getRealAccessToken(fullAccessToken); HashMap xliffFP = ambassador.getXliffFileProfile(realAccessToken); StringBuilder sb = new StringBuilder(); sb.append("["); Iterator itera = (Iterator) xliffFP.keySet().iterator(); int i = 0; if (itera.hasNext()) { while (itera.hasNext()) { i++; Object key = itera.next(); String val = (String) xliffFP.get(key); sb.append("{"); sb.append("\"fileprofileID\":").append(key).append(","); sb.append("\"fileprofileName\":").append("\"").append(val).append("\"").append("}"); if (i < xliffFP.size()) { sb.append(","); } } } else { sb.append("{"); sb.append("\"noXliffFile\":").append("\"true").append("\"").append("}"); } sb.append("]"); writer.write(sb.toString()); writer.close(); } catch (Exception e) { String msg = e.getMessage(); String errorInfo = null; if (msg != null && msg.indexOf("No such operation") > -1) { StringBuilder sb = new StringBuilder(); sb.append("["); sb.append("{"); sb.append("\"lowVersion\":").append("\"true").append("\"").append("}"); sb.append("]"); writer.write(sb.toString()); writer.close(); } else { if (msg != null && (msg.indexOf("Connection timed out") > -1 || msg.indexOf("UnknownHostException") > -1 || msg.indexOf("java.net.ConnectException") > -1)) { errorInfo = "Can not connect to server. Please check GS Edition configuration."; } else if (msg != null && msg.indexOf("Illegal web service access attempt from IP address") > -1) { errorInfo = "User name or password of GS Edition is wrong. Or the IP is not allowed to access server."; } else if (msg != null && msg.indexOf("The username or password may be incorrect") > -1) { errorInfo = "Can not connect to server. Please check GS Edition configuration."; } else if (msg != null && msg.indexOf("com.globalsight.webservices.WebServiceException") > -1) { errorInfo = "Can not connect to server."; } else { errorInfo = msg; } StringBuilder sb = new StringBuilder(); sb.append("["); sb.append("{"); sb.append("\"errorInfo\":").append("\"").append(errorInfo).append("\"").append("}"); sb.append("]"); writer.write(sb.toString()); writer.close(); } // e.printStackTrace(); } }
From source file:gov.anl.cue.arcane.engine.matrix.MatrixModel.java
/** * Find node bases./*from w w w.ja v a 2 s . c om*/ * * @param nodeCounts the node counts * @return the hash map */ public static HashMap<Integer, Integer> findNodeBases(HashMap<Integer, Integer> nodeCounts) { // Create the results holder. HashMap<Integer, Integer> nodeBases = new HashMap<Integer, Integer>(); // Determine the node bases. int lastNodeBase = 0; for (int nodeIndex = 0; nodeIndex < nodeCounts.size(); nodeIndex++) { // Store the next node base. nodeBases.put(nodeIndex, lastNodeBase); // Move on. lastNodeBase += nodeCounts.get(nodeIndex); } // Return the results. return nodeBases; }
From source file:com.openerp.orm.ORM.java
/** * Gets the result.//from w w w . j a va2 s. c o m * * @param dbHelper * the db helper * @param fetch_columns * @param result * the result * @return the result */ @SuppressWarnings("unchecked") private List<HashMap<String, Object>> getResult(BaseDBHelper dbHelper, String[] fetch_columns, Cursor result) { HashMap<String, Object> m2m = dbHelper.getMany2ManyColumns(); HashMap<String, Object> m2o = dbHelper.getMany2OneColumns(); List<HashMap<String, Object>> results = null; String[] columns = result.getColumnNames(); if (result.moveToFirst()) { results = new ArrayList<HashMap<String, Object>>(); HashMap<String, Object> row; do { row = new HashMap<String, Object>(); for (String col : columns) { String value = result.getString(result.getColumnIndex(col)); row.put(col, value); } List<String> user_columns = null; if (fetch_columns != null) { user_columns = Arrays.asList(fetch_columns); } // Getting many2many ids for row if (m2m.size() > 0) { String id = result.getString(result.getColumnIndex("id")); for (String key : m2m.keySet()) { if (user_columns != null && user_columns.contains(key)) { Many2Many m2mObj = (Many2Many) m2m.get(key); BaseDBHelper newdb = generateM2MHelper(dbHelper, m2mObj); String col1 = newdb.getColumns().get(0).getName(); String col2 = newdb.getColumns().get(1).getName(); String col3 = newdb.getColumns().get(2).getName(); HashMap<String, Object> rel_row = newdb.search(newdb, new String[] { col1 + " = ?", "AND", col3 + " = ?" }, new String[] { id, user_name }); int total = Integer.parseInt(rel_row.get("total").toString()); if (total > 0) { JSONArray ids_list = new JSONArray(); for (int i = 0; i < total; i++) { JSONArray ids = new JSONArray(); HashMap<String, Object> rowdata = ((List<HashMap<String, Object>>) rel_row .get("records")).get(i); BaseDBHelper rel_obj = m2mObj.getM2mObject(); HashMap<String, Object> rel_data = rel_obj.search(rel_obj, new String[] { "id = ? " }, new String[] { rowdata.get(col2).toString() }); ids.put(Integer.parseInt(rowdata.get(col2).toString())); if (Integer.parseInt(rel_data.get("total").toString()) > 0) { ids.put(((List<HashMap<String, Object>>) rel_data.get("records")).get(0) .get("name").toString()); } ids_list.put(ids); } row.put(key, ids_list); } } } } // Getting many2one [id, name] if (m2o.size() > 0) { for (String key : m2o.keySet()) { if (user_columns != null && user_columns.contains(key)) { JSONArray ids_list = new JSONArray(); String ref_id = result.getString(result.getColumnIndex(key)); if (!ref_id.equals("false")) { Many2One m2oObj = (Many2One) m2o.get(key); JSONArray ids = new JSONArray(); HashMap<String, Object> rel_data = m2oObj.getM2OObject().search( m2oObj.getM2OObject(), new String[] { "id", "name" }, new String[] { "id = ? " }, new String[] { ref_id }); ids.put(ref_id); if (Integer.parseInt(rel_data.get("total").toString()) > 0) { ids.put(((List<HashMap<String, Object>>) rel_data.get("records")).get(0) .get("name").toString()); } ids_list.put(ids); } if (ids_list.length() != 0) { row.put(key, ids_list); } else { row.put(key, false); } } } } results.add(row); } while (result.moveToNext()); } result.close(); return results; }
From source file:marytts.tools.dbselection.WikipediaMarkupCleaner.java
public void updateWordList(DBHandler wikiToDB, HashMap<String, Integer> wlNew) { String w;/*w w w.j a v a 2 s. com*/ HashMap<String, Integer> wlOld; Integer freq; Integer i; // Checking if word list exist if (wikiToDB.tableExist(locale + "_wordList")) { System.out.println("Updating " + locale + "_wordList in DB table...."); wlOld = wikiToDB.getMostFrequentWords(0, 0); // combine the two tables Iterator iterator = wlNew.keySet().iterator(); while (iterator.hasNext()) { w = iterator.next().toString(); freq = wlNew.get(w); i = (Integer) wlOld.get(w); // if key is not in the map then give it value freq // otherwise increment its value by freq if (i == null) wlOld.put(w, new Integer(freq)); else wlOld.put(w, new Integer(i.intValue() + freq)); } wikiToDB.insertWordList(wlOld); System.out.println( "Final size of wordList after combining old and new lists: wordList=[" + wlOld.size() + "]"); } else { System.out.println("Saving " + locale + "_wordList table...."); wikiToDB.insertWordList(wlNew); } }
From source file:com.mysql.stresstool.StressTool.java
/** * Print the cursor /* w ww .j a v a2 s . c o m*/ */ private void printProgress() { HashMap allTh = new HashMap(0); if (StressTool.getThreadInfoMap() != null && StressTool.getThreadInfoMap().size() > 0) allTh.putAll(StressTool.getThreadInfoMap()); if (StressTool.getThreadInfoSelectMap() != null && StressTool.getThreadInfoSelectMap().size() > 0) allTh.putAll(StressTool.getThreadInfoSelectMap()); if (StressTool.getThreadInfoDeleteMap() != null && StressTool.getThreadInfoDeleteMap().size() > 0) allTh.putAll(StressTool.getThreadInfoDeleteMap()); if (allTh != null && allTh.size() > 0) { double currentLastLoop = this.getCurrentLastLoop(); //GEt the real status of the execution if (allTh.size() > 0) { Object[] itAll = allTh.keySet().toArray(); for (int i = 0; i <= itAll.length - 1; i++) { double tmpMin = 0.0; if (itAll.length > 1 && i < itAll.length - 1) { int a = ((ThreadInfo) allTh.get(itAll[i])).getExecutedLoops(); int b = ((ThreadInfo) allTh.get(itAll[i + 1])).getExecutedLoops(); tmpMin = Math.min(a, b); } else { int a = ((ThreadInfo) allTh.get(itAll[i])).getExecutedLoops(); tmpMin = Math.min(a, a); } if (i > 0) currentLastLoop = Math.min(currentLastLoop, tmpMin); else currentLastLoop = tmpMin; // this.setCurrentLastLoop(currentLastLoop); //System.out.print("Running min = " + currentLastLoop + "\n"); // int a = ((ThreadInfo)allTh.get(i)).getExecutedLoops(); // int b = ((ThreadInfo)allTh.get(i+1)).getExecutedLoops(); // currentLastLoop = Math.min(a, b); } } else { Object[] itAll = allTh.keySet().toArray(); // currentLastLoop = ((ThreadInfo)allTh.get(itAll[0])).getExecutedLoops(); } int perccurrentLastLoop = 1; int curPrevLoop = this.getCurrentLastLoop() > 0 ? this.getCurrentLastLoop() : -1; perccurrentLastLoop = new Double(Math.ceil((((double) currentLastLoop / repeatNumber) * 100))) .intValue(); // System.out.println(perccurrentLastLoop + " " + this.getCurrentLastLoop() + " " + repeatNumber); if (perccurrentLastLoop > curPrevLoop) { // int toprint = new Double(perccurrentLastLoop - this.getCurrentLastLoop()).intValue(); int toprint = (perccurrentLastLoop - curPrevLoop); if (this.debug) System.out .println(toprint + " " + perccurrentLastLoop + " " + curPrevLoop + " " + repeatNumber); for (int ic = 0; ic < toprint; ic++) { System.out.print("*"); } this.setCurrentLastLoop(perccurrentLastLoop); // this.prevPercentLoop = perccurrentLastLoop; } } }
From source file:com.chinamobile.bcbsp.bspcontroller.JobInProgress.java
/** * JobInProgress construct method get the bspjob,jobid,controller staffnum and * staff locations information./*from w ww.ja v a 2 s . co m*/ * @param job * BSP job * @param jobId * BSP job id * @param controller * BSP controller * @param staffNum * staff num * @param locations * staff location */ public JobInProgress(BSPJob job, BSPJobID jobId, BSPController controller, int staffNum, HashMap<Integer, String[]> locations) { this.jobId = jobId; this.controller = controller; this.superStepCounter = -2; this.numBSPStaffs = staffNum; staffs = new StaffInProgress[locations.size()]; for (int i = 0; i < this.numBSPStaffs; i++) { RawSplit split = new RawSplit(); split.setLocations(locations.get(i)); split.setClassName("yes"); staffs[i] = new StaffInProgress(this.jobId, null, this.controller, null, this, i, split); } this.job = job; loadAggregators(); }