List of usage examples for java.util HashMap size
int size
To view the source code for java.util HashMap size.
Click Source Link
From source file:website.openeng.async.Connection.java
/** * Downloads any missing media files according to the mediaURL deckvar. * * @param data//from w ww. j a v a 2 s .c o m * @return The return type contains data.resultType and an array of Integer in data.data. data.data[0] is the number * of total missing media, data.data[1] is the number of downloaded ones. */ private Payload doInBackgroundDownloadMissingMedia(Payload data) { Timber.i("DownloadMissingMedia"); HashMap<String, String> missingPaths = new HashMap<String, String>(); HashMap<String, String> missingSums = new HashMap<String, String>(); data.result = (Decks) data.data[0]; // pass it to the return object so we close the deck in the deck picker String syncName = "";// deck.getDeckName(); data.success = false; data.data = new Object[] { 0, 0, 0 }; // if (!deck.hasKey("mediaURL")) { // data.success = true; // return data; // } String urlbase = "";// deck.getVar("mediaURL"); if (urlbase.equals("")) { data.success = true; return data; } String mdir = "";// deck.mediaDir(true); int totalMissing = 0; int missing = 0; int grabbed = 0; Cursor cursor = null; try { cursor = null;// deck.getDB().getDatabase().rawQuery("SELECT filename, originalPath FROM media", null); String path = null; String f = null; while (cursor.moveToNext()) { f = cursor.getString(0); path = mdir + "/" + f; File file = new File(path); if (!file.exists()) { missingPaths.put(f, path); missingSums.put(f, cursor.getString(1)); Timber.d("Missing file: %s", f); } } } finally { if (cursor != null) { cursor.close(); } } totalMissing = missingPaths.size(); data.data[0] = totalMissing; if (totalMissing == 0) { data.success = true; return data; } publishProgress(Boolean.FALSE, totalMissing, 0, syncName); URL url = null; HttpURLConnection connection = null; String path = null; String sum = null; int readbytes = 0; byte[] buf = new byte[4096]; for (String file : missingPaths.keySet()) { try { android.net.Uri uri = android.net.Uri.parse(Uri.encode(urlbase, ":/@%") + Uri.encode(file)); url = new URI(uri.toString()).toURL(); connection = (HttpURLConnection) url.openConnection(); connection.connect(); if (connection.getResponseCode() == 200) { path = missingPaths.get(file); InputStream is = connection.getInputStream(); BufferedInputStream bis = new BufferedInputStream(is, 4096); FileOutputStream fos = new FileOutputStream(path); while ((readbytes = bis.read(buf, 0, 4096)) != -1) { fos.write(buf, 0, readbytes); Timber.d("Downloaded %d file: %s", readbytes, path); } fos.close(); // Verify with checksum sum = missingSums.get(file); if (true) {// sum.equals("") || sum.equals(Utils.fileChecksum(path))) { grabbed++; } else { // Download corrupted, delete file Timber.i("Downloaded media file %s failed checksum.", path); File f = new File(path); f.delete(); missing++; } } else { Timber.e("Connection error (" + connection.getResponseCode() + ") while retrieving media file " + urlbase + file); Timber.e("Connection message: " + connection.getResponseMessage()); if (missingSums.get(file).equals("")) { // Ignore and keep going missing++; } else { data.success = false; data.data = new Object[] { file }; return data; } } connection.disconnect(); } catch (URISyntaxException e) { Timber.e(e, "doInBackgroundDownloadMissingMedia URISyntaxException"); } catch (MalformedURLException e) { Timber.e(e, "MalformedURLException while download media file " + path); if (missingSums.get(file).equals("")) { // Ignore and keep going missing++; } else { data.success = false; data.data = new Object[] { file }; return data; } } catch (IOException e) { Timber.e(e, "IOException while download media file " + path); if (missingSums.get(file).equals("")) { // Ignore and keep going missing++; } else { data.success = false; data.data = new Object[] { file }; return data; } } finally { if (connection != null) { connection.disconnect(); } } publishProgress(Boolean.TRUE, totalMissing, grabbed + missing, syncName); } data.data[1] = grabbed; data.data[2] = missing; data.success = true; return data; }
From source file:org.openmeetings.axis.services.RoomWebService.java
/** * Returns a object of type RoomReturn/*from ww w. ja v a 2 s . c o m*/ * * @param SID The SID of the User. This SID must be marked as Loggedin * @param rooms_id * @return * @throws AxisFault */ public RoomReturn getRoomWithClientObjectsById(String SID, long rooms_id) throws AxisFault { try { Long users_id = sessionManagement.checkSession(SID); Long user_level = userManagement.getUserLevelByID(users_id); if (authLevelManagement.checkWebServiceLevel(user_level)) { Rooms room = roommanagement.getRoomById(user_level, rooms_id); RoomReturn roomReturn = new RoomReturn(); roomReturn.setCreated(room.getStarttime()); roomReturn.setCreator(null); roomReturn.setName(room.getName()); roomReturn.setRoom_id(room.getRooms_id()); HashMap<String, RoomClient> map = clientListManager.getClientListByRoom(room.getRooms_id()); RoomUser[] roomUsers = new RoomUser[map.size()]; int i = 0; for (Iterator<String> iter = map.keySet().iterator(); iter.hasNext();) { RoomClient rcl = map.get(iter.next()); RoomUser roomUser = new RoomUser(); roomUser.setFirstname(rcl.getFirstname()); roomUser.setLastname(rcl.getLastname()); roomUser.setBroadcastId(rcl.getBroadCastID()); roomUser.setPublicSID(rcl.getPublicSID()); roomUser.setIsBroadCasting(rcl.getIsBroadcasting()); roomUser.setAvsettings(rcl.getAvsettings()); roomUsers[i] = roomUser; i++; } roomReturn.setRoomUser(roomUsers); return roomReturn; } return null; } catch (Exception err) { log.error("[getRoomWithClientObjectsById]", err); throw new AxisFault(err.getMessage()); } }
From source file:com.ichi2.async.DeckTask.java
private TaskData doInBackgroundImportReplace(TaskData... params) { Timber.d("doInBackgroundImportReplace"); Collection col = CollectionHelper.getInstance().getCol(mContext); String path = params[0].getString(); Resources res = AnkiDroidApp.getInstance().getBaseContext().getResources(); // extract the deck from the zip file String colPath = col.getPath(); File dir = new File(new File(colPath).getParentFile(), "tmpzip"); if (dir.exists()) { BackupManager.removeDir(dir);/*from w w w. j a v a 2 s . c o m*/ } publishProgress(new TaskData(res.getString(R.string.import_unpacking))); // from anki2.py String colFile = new File(dir, "collection.anki2").getAbsolutePath(); ZipFile zip; try { zip = new ZipFile(new File(path), ZipFile.OPEN_READ); } catch (IOException e) { Timber.e(e, "doInBackgroundImportReplace - Error while unzipping"); AnkiDroidApp.sendExceptionReport(e, "doInBackgroundImportReplace0"); return new TaskData(false); } if (!Utils.unzipFiles(zip, dir.getAbsolutePath(), new String[] { "collection.anki2", "media" }, null) || !(new File(colFile)).exists()) { return new TaskData(-2, null, false); } Collection tmpCol = null; try { tmpCol = Storage.Collection(colFile); if (!tmpCol.validCollection()) { tmpCol.close(); return new TaskData(-2, null, false); } } catch (Exception e) { Timber.e("Error opening new collection file... probably it's invalid"); try { tmpCol.close(); } catch (Exception e2) { // do nothing } return new TaskData(-2, null, false); } finally { if (tmpCol != null) { tmpCol.close(); } } publishProgress(new TaskData(res.getString(R.string.importing_collection))); if (col != null) { // unload collection and trigger a backup CollectionHelper.getInstance().closeCollection(true); CollectionHelper.getInstance().lockCollection(); BackupManager.performBackupInBackground(colPath, true); } // overwrite collection File f = new File(colFile); if (!f.renameTo(new File(colPath))) { // Exit early if this didn't work return new TaskData(-2, null, false); } int addedCount = -1; try { col = CollectionHelper.getInstance().reopenCollection(); CollectionHelper.getInstance().unlockCollection(); // because users don't have a backup of media, it's safer to import new // data and rely on them running a media db check to get rid of any // unwanted media. in the future we might also want to duplicate this step // import media HashMap<String, String> nameToNum = new HashMap<String, String>(); HashMap<String, String> numToName = new HashMap<String, String>(); File mediaMapFile = new File(dir.getAbsolutePath(), "media"); if (mediaMapFile.exists()) { JsonReader jr = new JsonReader(new FileReader(mediaMapFile)); jr.beginObject(); String name; String num; while (jr.hasNext()) { num = jr.nextName(); name = jr.nextString(); nameToNum.put(name, num); numToName.put(num, name); } jr.endObject(); jr.close(); } String mediaDir = col.getMedia().dir(); int total = nameToNum.size(); int i = 0; for (Map.Entry<String, String> entry : nameToNum.entrySet()) { String file = entry.getKey(); String c = entry.getValue(); File of = new File(mediaDir, file); if (!of.exists()) { Utils.unzipFiles(zip, mediaDir, new String[] { c }, numToName); } ++i; publishProgress(new TaskData(res.getString(R.string.import_media_count, (i + 1) * 100 / total))); } zip.close(); // delete tmp dir BackupManager.removeDir(dir); return new TaskData(true); } catch (RuntimeException e) { Timber.e(e, "doInBackgroundImportReplace - RuntimeException"); AnkiDroidApp.sendExceptionReport(e, "doInBackgroundImportReplace1"); return new TaskData(false); } catch (FileNotFoundException e) { Timber.e(e, "doInBackgroundImportReplace - FileNotFoundException"); AnkiDroidApp.sendExceptionReport(e, "doInBackgroundImportReplace2"); return new TaskData(false); } catch (IOException e) { Timber.e(e, "doInBackgroundImportReplace - IOException"); AnkiDroidApp.sendExceptionReport(e, "doInBackgroundImportReplace3"); return new TaskData(false); } }
From source file:gov.anl.cue.arcane.engine.matrix.MatrixModel.java
/** * Imports a matrix model from a template spreadsheet. * * @param matrixEngine the matrix engine * @param fileName the file name * @return the results/*from www. j a va 2s .c o m*/ */ public static MatrixModel importTemplate(MatrixEngine matrixEngine, String fileName) { // Declare the results storage. MatrixModel matrixModel = new MatrixModel(matrixEngine); // Try to read the template spreadsheet. try { // Find the node request counts. HashMap<Integer, Integer> nodeCounts = MatrixModel.importTemplateDimensions(fileName); // Find the node base index counts. HashMap<Integer, Integer> nodeBases = MatrixModel.findNodeBases(nodeCounts); // Find the dimensions of the template spreadsheet. int nodeRequests = nodeCounts.size(); int nodeCount = 0; for (Integer nodeRequest : nodeCounts.values()) { nodeCount += nodeRequest; } // Attempt to open the template spreadsheet. XSSFWorkbook workbook = new XSSFWorkbook(new FileInputStream(new File(fileName))); // Extract the fitness function. Iterator<XSSFSheet> sheets = MatrixModel.importTemplateExtractFitnessInformation(matrixModel, nodeCounts, nodeRequests, workbook); // Scan the variables. MatrixModel.importTemplateScanVariables(matrixModel, nodeCounts, nodeBases, nodeRequests, nodeCount, sheets); // Normalize the new model. matrixModel.normalize(); // Catch errors. } catch (Exception e) { // Note an error. matrixModel = null; } // Return the results. return matrixModel; }
From source file:edu.csupomona.nlp.tool.crawler.Facebook.java
/** * Get all the Pages match the searching keyword. * @param keyword Keyword for search * @param onlyVerified Return only verified pages (currently NA) * @return HashMap of Pages * @throws JSONException/*from w w w .j ava2s . co m*/ */ public HashMap<String, Page> getPages(String keyword, boolean onlyVerified) throws JSONException { HashMap<String, Page> fullPages = new HashMap<>(); int totalLikes = 0; try { // search pages according to keyword ResponseList<Page> pages = fb_.searchPages(keyword); System.out.println(pages.size()); int idx = 0; for (Page page : pages) { if (onlyVerified) { // TOTALLY GAVE UP DUE TO UNKNOWN REASON OF UNABLE TO // ACCESS FQL WITH APP ACCESS TOKEN OR USER ACCESS TOKEN // is_verified field is only accessable through FQL // String query = "select is_verified from page where page_id=" // + page.getId(); // JSONObject json = fb_.executeFQL(query).getJSONObject(0); // boolean isVerified = json.getBoolean("is_verified"); // // // reduce speed // pause(1); // // if (!isVerified) // continue; } // retrieve full information of the page Page fullPage = fb_.getPage(page.getId()); fullPages.put(fullPage.getId(), fullPage); // records number of likes totalLikes += fullPage.getLikes(); // to reduce speed // pause(1); System.out.println(idx++); } } catch (FacebookException ex) { Logger.getLogger(Facebook.class.getName()).log(Level.SEVERE, null, ex); } // post processing. only keep pages with number of likes above average int average = totalLikes / fullPages.size(); System.out.println("Average=" + average); List<String> removePageIds = new ArrayList<>(); for (String pageId : fullPages.keySet()) if (fullPages.get(pageId).getLikes() < average) { System.out.println("RM: " + fullPages.get(pageId).getName() + " [L=" + fullPages.get(pageId).getLikes().toString() + "]"); removePageIds.add(pageId); } for (String pageId : removePageIds) fullPages.remove(pageId); return fullPages; }
From source file:org.taverna.server.master.localworker.SecurityContextDelegate.java
/** * Builds and transfers a keystore with suitable credentials to the back-end * workflow execution engine.// w w w . java 2s . co m * * @throws GeneralSecurityException * If the manipulation of the keystore, keys or certificates * fails. * @throws IOException * If there are problems building the data (should not happen). * @throws RemoteException * If the conveyancing fails. */ @Override public final void conveySecurity() throws GeneralSecurityException, IOException, ImplementationException { RemoteSecurityContext rc = run.run.getSecurityContext(); synchronized (lock) { if (credentials.isEmpty() && trusted.isEmpty()) return; } char[] password = null; try { password = generateNewPassword(); log.info("constructing merged keystore"); Truststore truststore = new Truststore(password); Keystore keystore = new Keystore(password); HashMap<URI, String> uriToAliasMap = new HashMap<URI, String>(); int trustedCount = 0, keyCount = 0; synchronized (lock) { try { for (Trust t : trusted) for (Certificate cert : t.loadedCertificates) { truststore.addCertificate(cert); trustedCount++; } this.uriToAliasMap = uriToAliasMap; this.keystore = keystore; for (Credential c : credentials) { addCredentialToKeystore(c); keyCount++; } } finally { this.uriToAliasMap = null; this.keystore = null; credentials.clear(); trusted.clear(); flushToDB(); } } byte[] trustbytes = null, keybytes = null; try { trustbytes = truststore.serialize(); keybytes = keystore.serialize(); // Now we've built the security information, ship it off... log.info("transfering merged truststore with " + trustedCount + " entries"); rc.setTruststore(trustbytes); log.info("transfering merged keystore with " + keyCount + " entries"); rc.setKeystore(keybytes); } finally { if (trustbytes != null) fill(trustbytes, (byte) 0); if (keybytes != null) fill(keybytes, (byte) 0); } rc.setPassword(password); log.info("transferring serviceURL->alias map with " + uriToAliasMap.size() + " entries"); rc.setUriToAliasMap(uriToAliasMap); } finally { if (password != null) fill(password, ' '); } synchronized (lock) { conveyExtraSecuritySettings(rc); } }
From source file:fr.cirad.mgdb.exporting.markeroriented.EigenstratExportHandler.java
@Override public void exportData(OutputStream outputStream, String sModule, List<SampleId> sampleIDs, ProgressIndicator progress, DBCursor markerCursor, Map<Comparable, Comparable> markerSynonyms, int nMinimumGenotypeQuality, int nMinimumReadDepth, Map<String, InputStream> readyToExportFiles) throws Exception { // long before = System.currentTimeMillis(); File warningFile = File.createTempFile("export_warnings_", ""); FileWriter warningFileWriter = new FileWriter(warningFile); File snpFile = null;//from w w w . j av a 2s.co m try { snpFile = File.createTempFile("snpFile", ""); FileWriter snpFileWriter = new FileWriter(snpFile); ZipOutputStream zos = new ZipOutputStream(outputStream); if (ByteArrayOutputStream.class.isAssignableFrom(outputStream.getClass())) zos.setLevel(ZipOutputStream.STORED); if (readyToExportFiles != null) for (String readyToExportFile : readyToExportFiles.keySet()) { zos.putNextEntry(new ZipEntry(readyToExportFile)); InputStream inputStream = readyToExportFiles.get(readyToExportFile); byte[] dataBlock = new byte[1024]; int count = inputStream.read(dataBlock, 0, 1024); while (count != -1) { zos.write(dataBlock, 0, count); count = inputStream.read(dataBlock, 0, 1024); } } MongoTemplate mongoTemplate = MongoTemplateManager.get(sModule); int markerCount = markerCursor.count(); List<Individual> individuals = getIndividualsFromSamples(sModule, sampleIDs); ArrayList<String> individualList = new ArrayList<String>(); StringBuffer indFileContents = new StringBuffer(); for (int i = 0; i < sampleIDs.size(); i++) { Individual individual = individuals.get(i); if (!individualList.contains(individual.getId())) { individualList.add(individual.getId()); indFileContents .append(individual.getId() + "\t" + getIndividualGenderCode(sModule, individual.getId()) + "\t" + (individual.getPopulation() == null ? "." : individual.getPopulation()) + LINE_SEPARATOR); } } String exportName = sModule + "_" + markerCount + "variants_" + individualList.size() + "individuals"; zos.putNextEntry(new ZipEntry(exportName + ".ind")); zos.write(indFileContents.toString().getBytes()); zos.putNextEntry(new ZipEntry(exportName + ".eigenstratgeno")); int avgObjSize = (Integer) mongoTemplate .getCollection(mongoTemplate.getCollectionName(VariantRunData.class)).getStats() .get("avgObjSize"); int nChunkSize = nMaxChunkSizeInMb * 1024 * 1024 / avgObjSize; short nProgress = 0, nPreviousProgress = 0; long nLoadedMarkerCount = 0; while (markerCursor.hasNext()) { int nLoadedMarkerCountInLoop = 0; Map<Comparable, String> markerChromosomalPositions = new LinkedHashMap<Comparable, String>(); boolean fStartingNewChunk = true; markerCursor.batchSize(nChunkSize); while (markerCursor.hasNext() && (fStartingNewChunk || nLoadedMarkerCountInLoop % nChunkSize != 0)) { DBObject exportVariant = markerCursor.next(); DBObject refPos = (DBObject) exportVariant.get(VariantData.FIELDNAME_REFERENCE_POSITION); markerChromosomalPositions.put((Comparable) exportVariant.get("_id"), refPos.get(ReferencePosition.FIELDNAME_SEQUENCE) + ":" + refPos.get(ReferencePosition.FIELDNAME_START_SITE)); nLoadedMarkerCountInLoop++; fStartingNewChunk = false; } List<Comparable> currentMarkers = new ArrayList<Comparable>(markerChromosomalPositions.keySet()); LinkedHashMap<VariantData, Collection<VariantRunData>> variantsAndRuns = MgdbDao.getSampleGenotypes( mongoTemplate, sampleIDs, currentMarkers, true, null /*new Sort(VariantData.FIELDNAME_REFERENCE_POSITION + "." + ChromosomalPosition.FIELDNAME_SEQUENCE).and(new Sort(VariantData.FIELDNAME_REFERENCE_POSITION + "." + ChromosomalPosition.FIELDNAME_START_SITE))*/); // query mongo db for matching genotypes for (VariantData variant : variantsAndRuns.keySet()) // read data and write results into temporary files (one per sample) { Comparable variantId = variant.getId(); List<String> chromAndPos = Helper.split(markerChromosomalPositions.get(variantId), ":"); if (chromAndPos.size() == 0) LOG.warn("Chromosomal position not found for marker " + variantId); // LOG.debug(marker + "\t" + (chromAndPos.length == 0 ? "0" : chromAndPos[0]) + "\t" + 0 + "\t" + (chromAndPos.length == 0 ? 0l : Long.parseLong(chromAndPos[1])) + LINE_SEPARATOR); if (markerSynonyms != null) { Comparable syn = markerSynonyms.get(variantId); if (syn != null) variantId = syn; } snpFileWriter.write(variantId + "\t" + (chromAndPos.size() == 0 ? "0" : chromAndPos.get(0)) + "\t" + 0 + "\t" + (chromAndPos.size() == 0 ? 0l : Long.parseLong(chromAndPos.get(1))) + LINE_SEPARATOR); Map<String, List<String>> individualGenotypes = new LinkedHashMap<String, List<String>>(); Collection<VariantRunData> runs = variantsAndRuns.get(variant); if (runs != null) for (VariantRunData run : runs) for (Integer sampleIndex : run.getSampleGenotypes().keySet()) { SampleGenotype sampleGenotype = run.getSampleGenotypes().get(sampleIndex); String individualId = individuals .get(sampleIDs .indexOf(new SampleId(run.getId().getProjectId(), sampleIndex))) .getId(); Integer gq = null; try { gq = (Integer) sampleGenotype.getAdditionalInfo().get(VariantData.GT_FIELD_GQ); } catch (Exception ignored) { } if (gq != null && gq < nMinimumGenotypeQuality) continue; Integer dp = null; try { dp = (Integer) sampleGenotype.getAdditionalInfo().get(VariantData.GT_FIELD_DP); } catch (Exception ignored) { } if (dp != null && dp < nMinimumReadDepth) continue; String gtCode = sampleGenotype.getCode(); List<String> storedIndividualGenotypes = individualGenotypes.get(individualId); if (storedIndividualGenotypes == null) { storedIndividualGenotypes = new ArrayList<String>(); individualGenotypes.put(individualId, storedIndividualGenotypes); } storedIndividualGenotypes.add(gtCode); } for (int j = 0; j < individualList .size(); j++ /* we use this list because it has the proper ordering*/) { String individualId = individualList.get(j); List<String> genotypes = individualGenotypes.get(individualId); HashMap<Object, Integer> genotypeCounts = new HashMap<Object, Integer>(); // will help us to keep track of missing genotypes int highestGenotypeCount = 0; String mostFrequentGenotype = null; if (genotypes != null) for (String genotype : genotypes) { if (genotype.length() == 0) continue; /* skip missing genotypes */ int gtCount = 1 + MgdbDao.getCountForKey(genotypeCounts, genotype); if (gtCount > highestGenotypeCount) { highestGenotypeCount = gtCount; mostFrequentGenotype = genotype; } genotypeCounts.put(genotype, gtCount); } List<String> alleles = mostFrequentGenotype == null ? new ArrayList<String>() : variant.getAllelesFromGenotypeCode(mostFrequentGenotype); int nOutputCode = 0; if (mostFrequentGenotype == null) nOutputCode = 9; else for (String all : Helper.split(mostFrequentGenotype, "/")) if ("0".equals(all)) nOutputCode++; if (j == 0 && variant.getKnownAlleleList().size() > 2) warningFileWriter.write("- Variant " + variant.getId() + " is multi-allelic. Make sure Eigenstrat genotype encoding specifications are suitable for you.\n"); zos.write(("" + nOutputCode).getBytes()); if (genotypeCounts.size() > 1 || alleles.size() > 2) { if (genotypeCounts.size() > 1) warningFileWriter.write("- Dissimilar genotypes found for variant " + (variantId == null ? variant.getId() : variantId) + ", individual " + individualId + ". Exporting most frequent: " + nOutputCode + "\n"); if (alleles.size() > 2) warningFileWriter.write("- More than 2 alleles found for variant " + (variantId == null ? variant.getId() : variantId) + ", individual " + individualId + ". Exporting only the first 2 alleles.\n"); } } zos.write((LINE_SEPARATOR).getBytes()); } if (progress.hasAborted()) return; nLoadedMarkerCount += nLoadedMarkerCountInLoop; nProgress = (short) (nLoadedMarkerCount * 100 / markerCount); if (nProgress > nPreviousProgress) { // if (nProgress%5 == 0) // LOG.info("============= exportData: " + nProgress + "% =============" + (System.currentTimeMillis() - before)/1000 + "s"); progress.setCurrentStepProgress(nProgress); nPreviousProgress = nProgress; } } snpFileWriter.close(); zos.putNextEntry(new ZipEntry(exportName + ".snp")); BufferedReader in = new BufferedReader(new FileReader(snpFile)); String sLine; while ((sLine = in.readLine()) != null) zos.write((sLine + "\n").getBytes()); in.close(); warningFileWriter.close(); if (warningFile.length() > 0) { zos.putNextEntry(new ZipEntry(exportName + "-REMARKS.txt")); int nWarningCount = 0; in = new BufferedReader(new FileReader(warningFile)); while ((sLine = in.readLine()) != null) { zos.write((sLine + "\n").getBytes()); nWarningCount++; } LOG.info("Number of Warnings for export (" + exportName + "): " + nWarningCount); in.close(); } warningFile.delete(); zos.close(); progress.setCurrentStepProgress((short) 100); } finally { if (snpFile != null && snpFile.exists()) snpFile.delete(); } }
From source file:com.intellectualcrafters.plot.database.SQLManager.java
@Override public void createAllSettingsAndHelpers(final ArrayList<Plot> plots) { // TODO SEVERE [ More than 5000 plots will fail in a single SQLite // query./*from w w w.j a v a 2 s .co m*/ final HashMap<String, HashMap<PlotId, Integer>> stored = new HashMap<>(); final HashMap<Integer, ArrayList<UUID>> helpers = new HashMap<>(); try { final PreparedStatement stmt = connection.prepareStatement(GET_ALL_PLOTS); final ResultSet result = stmt.executeQuery(); while (result.next()) { final int id = result.getInt("id"); final int idx = result.getInt("plot_id_x"); final int idz = result.getInt("plot_id_z"); final String world = result.getString("world"); if (!stored.containsKey(world)) { stored.put(world, new HashMap<PlotId, Integer>()); } stored.get(world).put(new PlotId(idx, idz), id); } } catch (final SQLException e) { e.printStackTrace(); } for (final Plot plot : plots) { final String world = Bukkit.getWorld(plot.world).getName(); if (stored.containsKey(world)) { final Integer id = stored.get(world).get(plot.id); if (id != null) { helpers.put(id, plot.helpers); } } } if (helpers.size() == 0) { return; } // add plot settings final Integer[] ids = helpers.keySet().toArray(new Integer[helpers.keySet().size()]); StringBuilder statement = new StringBuilder(CREATE_SETTINGS); for (int i = 0; i < (ids.length - 1); i++) { statement.append("(?),"); } statement.append("(?)"); PreparedStatement stmt = null; try { stmt = connection.prepareStatement(statement.toString()); for (int i = 0; i < ids.length; i++) { stmt.setInt(i + 1, ids[i]); } stmt.executeUpdate(); stmt.close(); } catch (final SQLException e) { e.printStackTrace(); } // add plot helpers String prefix = ""; statement = new StringBuilder(CREATE_HELPERS); for (final Integer id : helpers.keySet()) { for (final UUID helper : helpers.get(id)) { statement.append(prefix + "(?, ?)"); prefix = ","; } } if (prefix.equals("")) { return; } try { stmt = connection.prepareStatement(statement.toString()); int counter = 0; for (final Integer id : helpers.keySet()) { for (final UUID helper : helpers.get(id)) { stmt.setInt((counter * 2) + 1, id); stmt.setString((counter * 2) + 2, helper.toString()); counter++; } } stmt.executeUpdate(); stmt.close(); } catch (final SQLException e) { Logger.add(LogLevel.WARNING, "Failed to set helper for plots"); e.printStackTrace(); } }
From source file:com.pari.reports.request.handlers.ManageExportHandlerImpl.java
@SuppressWarnings("unchecked") private ServiceContainer getSetUpWorkflowApprovalDetails(ColumnDefinition[] colDef, TableDefinition childTableDef, HashMap<String, Object> paramFilters) { ServiceImpl service = null;/*from w w w.j a v a 2 s. c om*/ ServiceContainerImpl serviceContainer = null; String customerName = getRequestDetails().getCustomer(); int customerId = -1; if (customerName != null && customerName.trim().length() > 0) { try { customerId = getRequestDetails().getServerIf().getCustomerIf().getCustomerIdByName(customerName); } catch (Exception e) { logger.error("Exception while Exporting Setup Workflow", e); } } Map<Integer, ApprovalDefinition> approvalDefMap = null; try { if (paramFilters != null && paramFilters.size() > 0) { approvalDefMap = new HashMap<Integer, ApprovalDefinition>(); ArrayList<String> grpNames = new ArrayList<String>(); Object grpIdObj = paramFilters.get("DeviceGroup"); if (grpIdObj instanceof String) { grpNames.add((String) grpIdObj); } else if (grpIdObj instanceof ArrayList) { grpNames = (ArrayList<String>) grpIdObj; } for (String grpName : grpNames) { int grpId = GroupingService.getInstance().getGroupIdFromName(grpName); ApprovalDefinition approvalDef = getRequestDetails().getServerIf().getApprovalIf() .getWorkflowApprovalDefsWithLevels(grpId, customerId); approvalDefMap.put(grpId, approvalDef); } } else { approvalDefMap = getRequestDetails().getServerIf().getApprovalIf() .getAllWorkflowApprovalDefsWithLevels(customerId); } } catch (PariException e) { e.printStackTrace(); } if (null == approvalDefMap) { return null; } SimpleDateFormat formatter = new SimpleDateFormat("EEE, MMM dd, yyyy HH:mm:ss Z"); serviceContainer = new ServiceContainerImpl(); for (Map.Entry<Integer, ApprovalDefinition> map : approvalDefMap.entrySet()) { service = new ServiceImpl(); ApprovalDefinition approvalDefinition = map.getValue(); String creationTime = (0 == approvalDefinition.getCreationTime()) ? "" : getTimeBasedOnZone(approvalDefinition.getCreationTime()); String modifierTime = (0 == approvalDefinition.getModifierTime()) ? "" : formatter.format(new Date(approvalDefinition.getModifierTime())); service.put(colDef[0].getId(), approvalDefinition.getGroupName()); service.put(colDef[1].getId(), approvalDefinition.getWorkFlowName()); service.put(colDef[2].getId(), approvalDefinition.getCreatorLogin()); service.put(colDef[3].getId(), creationTime); service.put(colDef[4].getId(), approvalDefinition.getModifierLogin()); service.put(colDef[5].getId(), modifierTime); if (childTableDef != null) { ColumnDefinition[] chidlColDef = childTableDef.getColumnDefs(); Map<Integer, ApprovalLevelDefination> approvalLevels = approvalDefinition.getApprovalLevels(); if (approvalLevels != null && approvalLevels.size() > 0) { ServiceContainerImpl approvalLevelSrvCont = new ServiceContainerImpl(); for (Integer level : approvalLevels.keySet()) { ApprovalLevelDefination approvalLevel = approvalLevels.get(level); Map<Integer, String> approverLogins = approvalLevel.getApproverLogins(); if (approverLogins != null && approverLogins.size() > 0) { for (Integer id : approverLogins.keySet()) { ServiceImpl srvImpl = new ServiceImpl(); srvImpl.put(chidlColDef[0].getId(), approvalDefinition.getGroupName()); srvImpl.put(chidlColDef[1].getId(), level); srvImpl.put(chidlColDef[2].getId(), approverLogins.get(id)); srvImpl.put(chidlColDef[3].getId(), approvalLevel.getApprovalComplType().getActionString()); approvalLevelSrvCont.addManageServices(srvImpl); } } HashMap<Integer, ApprovalGroupDefinition> approvalUsrGrps = approvalLevel .getApprovalUserGrps(); if (approvalUsrGrps != null && approvalUsrGrps.size() > 0) { for (ApprovalGroupDefinition approvalUsrGrp : approvalUsrGrps.values()) { approverLogins = approvalUsrGrp.getApproverLogins(); if (approverLogins != null && approverLogins.size() > 0) { for (Integer id : approverLogins.keySet()) { ServiceImpl srvImpl = new ServiceImpl(); srvImpl.put(chidlColDef[0].getId(), approvalDefinition.getGroupName()); srvImpl.put(chidlColDef[1].getId(), level); srvImpl.put(chidlColDef[2].getId(), approverLogins.get(id)); srvImpl.put(chidlColDef[3].getId(), approvalLevel.getApprovalComplType().getActionString()); approvalLevelSrvCont.addManageServices(srvImpl); } } } } } service.put("ApprovalLevelDefs", approvalLevelSrvCont); } } serviceContainer.addManageServices(service); } return serviceContainer; }