List of usage examples for java.util HashMap size
int size
To view the source code for java.util HashMap size.
Click Source Link
From source file:com.jinglingtec.ijiazu.util.CCPRestSmsSDK.java
private HashMap<String, Object> jsonToMap(String result) { HashMap<String, Object> hashMap = new HashMap<String, Object>(); JsonParser parser = new JsonParser(); JsonObject asJsonObject = parser.parse(result).getAsJsonObject(); Set<Entry<String, JsonElement>> entrySet = asJsonObject.entrySet(); HashMap<String, Object> hashMap2 = new HashMap<String, Object>(); for (Entry<String, JsonElement> m : entrySet) { if ("statusCode".equals(m.getKey()) || "statusMsg".equals(m.getKey())) { hashMap.put(m.getKey(), m.getValue().getAsString()); } else {// ww w . j a v a 2 s. c om if ("SubAccount".equals(m.getKey()) || "totalCount".equals(m.getKey()) || "token".equals(m.getKey()) || "downUrl".equals(m.getKey())) { if (!"SubAccount".equals(m.getKey())) { hashMap2.put(m.getKey(), m.getValue().getAsString()); } else { try { if ((m.getValue().toString().trim().length() <= 2) && !m.getValue().toString().contains("[")) { hashMap2.put(m.getKey(), m.getValue().getAsString()); hashMap.put("data", hashMap2); break; } if (m.getValue().toString().contains("[]")) { hashMap2.put(m.getKey(), new JsonArray()); hashMap.put("data", hashMap2); continue; } JsonArray asJsonArray = parser.parse(m.getValue().toString()).getAsJsonArray(); ArrayList<HashMap<String, Object>> arrayList = new ArrayList<HashMap<String, Object>>(); for (JsonElement j : asJsonArray) { Set<Entry<String, JsonElement>> entrySet2 = j.getAsJsonObject().entrySet(); HashMap<String, Object> hashMap3 = new HashMap<String, Object>(); for (Entry<String, JsonElement> m2 : entrySet2) { hashMap3.put(m2.getKey(), m2.getValue().getAsString()); } arrayList.add(hashMap3); } hashMap2.put("SubAccount", arrayList); } catch (Exception e) { JsonObject asJsonObject2 = parser.parse(m.getValue().toString()).getAsJsonObject(); Set<Entry<String, JsonElement>> entrySet2 = asJsonObject2.entrySet(); HashMap<String, Object> hashMap3 = new HashMap<String, Object>(); for (Entry<String, JsonElement> m2 : entrySet2) { hashMap3.put(m2.getKey(), m2.getValue().getAsString()); } hashMap2.put(m.getKey(), hashMap3); hashMap.put("data", hashMap2); } } hashMap.put("data", hashMap2); } else { JsonObject asJsonObject2 = parser.parse(m.getValue().toString()).getAsJsonObject(); Set<Entry<String, JsonElement>> entrySet2 = asJsonObject2.entrySet(); HashMap<String, Object> hashMap3 = new HashMap<String, Object>(); for (Entry<String, JsonElement> m2 : entrySet2) { hashMap3.put(m2.getKey(), m2.getValue().getAsString()); } if (hashMap3.size() != 0) { hashMap2.put(m.getKey(), hashMap3); } else { hashMap2.put(m.getKey(), m.getValue().getAsString()); } hashMap.put("data", hashMap2); } } } return hashMap; }
From source file:com.bizideal.whoami.utils.cloopen.CCPRestSmsSDK.java
private HashMap<String, Object> jsonToMap(String result) { HashMap<String, Object> hashMap = new HashMap<String, Object>(); JsonParser parser = new JsonParser(); JsonObject asJsonObject = parser.parse(result).getAsJsonObject(); Set<Entry<String, JsonElement>> entrySet = asJsonObject.entrySet(); HashMap<String, Object> hashMap2 = new HashMap<String, Object>(); for (Map.Entry<String, JsonElement> m : entrySet) { if ("statusCode".equals(m.getKey()) || "statusMsg".equals(m.getKey())) hashMap.put(m.getKey(), m.getValue().getAsString()); else {//from w w w.j a va 2 s .com if ("SubAccount".equals(m.getKey()) || "totalCount".equals(m.getKey()) || "token".equals(m.getKey()) || "downUrl".equals(m.getKey())) { if (!"SubAccount".equals(m.getKey())) hashMap2.put(m.getKey(), m.getValue().getAsString()); else { try { if ((m.getValue().toString().trim().length() <= 2) && !m.getValue().toString().contains("[")) { hashMap2.put(m.getKey(), m.getValue().getAsString()); hashMap.put("data", hashMap2); break; } if (m.getValue().toString().contains("[]")) { hashMap2.put(m.getKey(), new JsonArray()); hashMap.put("data", hashMap2); continue; } JsonArray asJsonArray = parser.parse(m.getValue().toString()).getAsJsonArray(); ArrayList<HashMap<String, Object>> arrayList = new ArrayList<HashMap<String, Object>>(); for (JsonElement j : asJsonArray) { Set<Entry<String, JsonElement>> entrySet2 = j.getAsJsonObject().entrySet(); HashMap<String, Object> hashMap3 = new HashMap<String, Object>(); for (Map.Entry<String, JsonElement> m2 : entrySet2) { hashMap3.put(m2.getKey(), m2.getValue().getAsString()); } arrayList.add(hashMap3); } hashMap2.put("SubAccount", arrayList); } catch (Exception e) { JsonObject asJsonObject2 = parser.parse(m.getValue().toString()).getAsJsonObject(); Set<Entry<String, JsonElement>> entrySet2 = asJsonObject2.entrySet(); HashMap<String, Object> hashMap3 = new HashMap<String, Object>(); for (Map.Entry<String, JsonElement> m2 : entrySet2) { hashMap3.put(m2.getKey(), m2.getValue().getAsString()); } hashMap2.put(m.getKey(), hashMap3); hashMap.put("data", hashMap2); } } hashMap.put("data", hashMap2); } else { JsonObject asJsonObject2 = parser.parse(m.getValue().toString()).getAsJsonObject(); Set<Entry<String, JsonElement>> entrySet2 = asJsonObject2.entrySet(); HashMap<String, Object> hashMap3 = new HashMap<String, Object>(); for (Map.Entry<String, JsonElement> m2 : entrySet2) { hashMap3.put(m2.getKey(), m2.getValue().getAsString()); } if (hashMap3.size() != 0) { hashMap2.put(m.getKey(), hashMap3); } else { hashMap2.put(m.getKey(), m.getValue().getAsString()); } hashMap.put("data", hashMap2); } } } return hashMap; }
From source file:hd3gtv.mydmam.useraction.fileoperation.UAFileOperationTrash.java
public void process(JobProgression progression, UserProfile userprofile, UAConfigurator user_configuration, HashMap<String, SourcePathIndexerElement> source_elements) throws Exception { String user_base_directory_name = userprofile.getBaseFileName_BasedOnEMail(); if (trash_directory_name == null) { trash_directory_name = "Trash"; }/* ww w . ja v a 2 s . c o m*/ Log2Dump dump = new Log2Dump(); dump.add("user", userprofile.key); dump.add("trash_directory_name", trash_directory_name); dump.add("user_base_directory_name", user_base_directory_name); dump.add("source_elements", source_elements.values()); Log2.log.debug("Prepare trash", dump); progression.update("Prepare trashs directories"); File current_user_trash_dir; HashMap<String, File> trashs_dirs = new HashMap<String, File>(); for (Map.Entry<String, SourcePathIndexerElement> entry : source_elements.entrySet()) { String storagename = entry.getValue().storagename; if (trashs_dirs.containsKey(storagename)) { continue; } File storage_dir = Explorer .getLocalBridgedElement(SourcePathIndexerElement.prepareStorageElement(storagename)); current_user_trash_dir = new File(storage_dir.getPath() + File.separator + trash_directory_name + File.separator + user_base_directory_name); if (current_user_trash_dir.exists() == false) { FileUtils.forceMkdir(current_user_trash_dir); } else { CopyMove.checkExistsCanRead(current_user_trash_dir); CopyMove.checkIsWritable(current_user_trash_dir); CopyMove.checkIsDirectory(current_user_trash_dir); } trashs_dirs.put(storagename, current_user_trash_dir); if (stop) { return; } } progression.update("Move item(s) to trash(s) directorie(s)"); progression.updateStep(1, source_elements.size()); Date now = new Date(); for (Map.Entry<String, SourcePathIndexerElement> entry : source_elements.entrySet()) { progression.incrStep(); File current_element = Explorer.getLocalBridgedElement(entry.getValue()); CopyMove.checkExistsCanRead(current_element); CopyMove.checkIsWritable(current_element); current_user_trash_dir = trashs_dirs.get(entry.getValue().storagename); File f_destination = new File(current_user_trash_dir.getPath() + File.separator + simpledateformat.format(now) + "_" + current_element.getName()); if (current_element.isDirectory()) { FileUtils.moveDirectory(current_element, f_destination); } else { FileUtils.moveFile(current_element, f_destination); } if (stop) { return; } ContainerOperations.copyMoveMetadatas(entry.getValue(), entry.getValue().storagename, "/" + trash_directory_name + "/" + user_base_directory_name, false, this); ElasticsearchBulkOperation bulk = Elasticsearch.prepareBulk(); explorer.deleteStoragePath(bulk, Arrays.asList(entry.getValue())); bulk.terminateBulk(); if (stop) { return; } } ElasticsearchBulkOperation bulk = Elasticsearch.prepareBulk(); ArrayList<SourcePathIndexerElement> spie_trashs_dirs = new ArrayList<SourcePathIndexerElement>(); for (String storage_name : trashs_dirs.keySet()) { SourcePathIndexerElement root_trash_directory = SourcePathIndexerElement .prepareStorageElement(storage_name); root_trash_directory.parentpath = root_trash_directory.prepare_key(); root_trash_directory.directory = true; root_trash_directory.currentpath = "/" + trash_directory_name; spie_trashs_dirs.add(root_trash_directory); } explorer.refreshStoragePath(bulk, spie_trashs_dirs, false); bulk.terminateBulk(); }
From source file:fr.paris.lutece.plugins.directory.service.directorysearch.DirectorySearchService.java
/** * Return a list of record key return by the search * @param directory the directory/*ww w. j a v a 2 s. com*/ * @param mapSearch a map which contains for each entry the list of * recorField(value of field search) associate * @param dateCreation the creation date * @param dateCreationBegin the date begin to search for the creation date * @param dateCreationEnd the date end to search for the creation date * @param dateModification the modification date * @param dateModificationBegin the date begin to search for the * modification date * @param dateModificationEnd the date end to search for the modification * date * @param filter the filter * @param plugin the plugin * @return a list of record key return by the search */ public List<Integer> getSearchResults(Directory directory, HashMap<String, List<RecordField>> mapSearch, Date dateCreation, Date dateCreationBegin, Date dateCreationEnd, Date dateModification, Date dateModificationBegin, Date dateModificationEnd, RecordFieldFilter filter, Plugin plugin) { List<Integer> listRecordResult = new ArrayList<Integer>(); IRecordService recordService = SpringContextService.getBean(RecordService.BEAN_SERVICE); listRecordResult = recordService.getListRecordId(filter, plugin); if (mapSearch != null) { List<Integer> listRecordResultTmp = null; List<RecordField> recordFieldSearch; HashMap<String, Object> mapSearchItemEntry; boolean bSearchRecordEmpty; boolean bSearchEmpty; try { _searcher = new IndexSearcher(DirectoryReader.open(_luceneDirectory)); IDirectorySearchEngine engine = SpringContextService.getBean(BEAN_SEARCH_ENGINE); listRecordResultTmp = new ArrayList<Integer>(); bSearchEmpty = true; for (Object entryMapSearch : mapSearch.entrySet()) { recordFieldSearch = ((Entry<String, List<RecordField>>) entryMapSearch).getValue(); int nIdEntry = DirectoryUtils .convertStringToInt(((Entry<String, List<RecordField>>) entryMapSearch).getKey()); bSearchRecordEmpty = true; if (recordFieldSearch != null) { mapSearchItemEntry = new HashMap<String, Object>(); boolean bIsArray = false; boolean bFirstRecord = true; for (RecordField recordField : recordFieldSearch) { if (recordField.getEntry() instanceof EntryTypeArray) { // for array, we do a search on content for each case bIsArray = true; mapSearchItemEntry = new HashMap<String, Object>(); recordField.getEntry().addSearchCriteria(mapSearchItemEntry, recordField); if (mapSearchItemEntry.size() > 0) { bSearchRecordEmpty = false; bSearchEmpty = false; mapSearchItemEntry.put(DirectorySearchItem.FIELD_ID_DIRECTORY, directory.getIdDirectory()); mapSearchItemEntry.put(DirectorySearchItem.FIELD_ID_DIRECTORY_ENTRY, nIdEntry); List<Integer> ids = engine.getSearchResults(mapSearchItemEntry); if (CollectionUtils.isEmpty(ids)) { listRecordResultTmp = new ArrayList<Integer>(); break; } else if (bFirstRecord) { listRecordResultTmp = ids; bFirstRecord = false; } else { listRecordResultTmp = (List<Integer>) CollectionUtils .intersection(listRecordResultTmp, ids); } } } else { recordField.getEntry().addSearchCriteria(mapSearchItemEntry, recordField); } } if (!bIsArray && (mapSearchItemEntry.size() > 0)) { bSearchRecordEmpty = false; bSearchEmpty = false; mapSearchItemEntry.put(DirectorySearchItem.FIELD_ID_DIRECTORY, directory.getIdDirectory()); mapSearchItemEntry.put(DirectorySearchItem.FIELD_ID_DIRECTORY_ENTRY, nIdEntry); listRecordResultTmp.addAll(engine.getSearchResults(mapSearchItemEntry)); } if (!bSearchRecordEmpty && !directory.isSearchOperatorOr()) { // keeping order is important for display listRecordResult = DirectoryUtils.retainAllIdsKeepingFirstOrder(listRecordResult, listRecordResultTmp); listRecordResultTmp = new ArrayList<Integer>(); } } } if (directory.isSearchOperatorOr() && !bSearchEmpty) { listRecordResult = DirectoryUtils.retainAllIdsKeepingFirstOrder(listRecordResult, listRecordResultTmp); } //date creation of a record if (dateCreation != null) { listRecordResultTmp = new ArrayList<Integer>(); mapSearchItemEntry = new HashMap<String, Object>(); mapSearchItemEntry.put(DirectorySearchItem.FIELD_ID_DIRECTORY, directory.getIdDirectory()); dateCreation.setTime(dateCreation.getTime() + CONSTANT_TIME_CORRECTION); mapSearchItemEntry.put(DirectorySearchItem.FIELD_DATE_CREATION, dateCreation); listRecordResultTmp.addAll(engine.getSearchResults(mapSearchItemEntry)); // keeping order is important for display listRecordResult = DirectoryUtils.retainAllIdsKeepingFirstOrder(listRecordResult, listRecordResultTmp); } else if ((dateCreationBegin != null) && (dateCreationEnd != null)) { dateCreationBegin.setTime(dateCreationBegin.getTime() + CONSTANT_TIME_CORRECTION); dateCreationEnd.setTime(dateCreationEnd.getTime() + CONSTANT_TIME_CORRECTION); listRecordResultTmp = new ArrayList<Integer>(); mapSearchItemEntry = new HashMap<String, Object>(); mapSearchItemEntry.put(DirectorySearchItem.FIELD_ID_DIRECTORY, directory.getIdDirectory()); mapSearchItemEntry.put(DirectorySearchItem.FIELD_DATE_CREATION_BEGIN, dateCreationBegin); mapSearchItemEntry.put(DirectorySearchItem.FIELD_DATE_CREATION_END, dateCreationEnd); listRecordResultTmp.addAll(engine.getSearchResults(mapSearchItemEntry)); // keeping order is important for display listRecordResult = DirectoryUtils.retainAllIdsKeepingFirstOrder(listRecordResult, listRecordResultTmp); } //date modification of a record if (dateModification != null) { listRecordResultTmp = new ArrayList<Integer>(); mapSearchItemEntry = new HashMap<String, Object>(); mapSearchItemEntry.put(DirectorySearchItem.FIELD_ID_DIRECTORY, directory.getIdDirectory()); dateModification.setTime(dateModification.getTime() + CONSTANT_TIME_CORRECTION); mapSearchItemEntry.put(DirectorySearchItem.FIELD_DATE_MODIFICATION, dateModification); listRecordResultTmp.addAll(engine.getSearchResults(mapSearchItemEntry)); // keeping order is important for display listRecordResult = DirectoryUtils.retainAllIdsKeepingFirstOrder(listRecordResult, listRecordResultTmp); } else if ((dateModificationBegin != null) && (dateModificationEnd != null)) { dateModificationBegin.setTime(dateModificationBegin.getTime() + CONSTANT_TIME_CORRECTION); dateModificationEnd.setTime(dateModificationEnd.getTime() + CONSTANT_TIME_CORRECTION); listRecordResultTmp = new ArrayList<Integer>(); mapSearchItemEntry = new HashMap<String, Object>(); mapSearchItemEntry.put(DirectorySearchItem.FIELD_ID_DIRECTORY, directory.getIdDirectory()); mapSearchItemEntry.put(DirectorySearchItem.FIELD_DATE_MODIFICATION_BEGIN, dateModificationBegin); mapSearchItemEntry.put(DirectorySearchItem.FIELD_DATE_MODIFICATION_END, dateModificationEnd); listRecordResultTmp.addAll(engine.getSearchResults(mapSearchItemEntry)); // keeping order is important for display listRecordResult = DirectoryUtils.retainAllIdsKeepingFirstOrder(listRecordResult, listRecordResultTmp); } } catch (Exception e) { AppLogService.error(e.getMessage(), e); // If an error occurred clean result list listRecordResult = new ArrayList<Integer>(); } } return listRecordResult; }
From source file:com.sun.honeycomb.admin.mgmt.server.HCCellAdapterBase.java
public BigInteger setCellProps(EventSender evt, HCCellProps value, Byte cellid) throws MgmtException { boolean updateSwitch = false; boolean updateSp = false; boolean isUpdateLocal = (localCellid == cellid.byteValue()) ? true : false; HashMap map = new HashMap(); if (!value.getAdminVIP().equals(MultiCellLib.getInstance().getAdminVIP())) { updateSp = true;//from w ww . ja v a 2 s .c o m updateSwitch = true; map.put(MultiCellLib.PROP_ADMIN_VIP, value.getAdminVIP()); } if (!value.getDataVIP().equals(MultiCellLib.getInstance().getDataVIP())) { updateSp = true; updateSwitch = true; map.put(MultiCellLib.PROP_DATA_VIP, value.getDataVIP()); } if (!value.getSpVIP().equals(MultiCellLib.getInstance().getSPVIP())) { updateSwitch = true; updateSp = true; map.put(MultiCellLib.PROP_SP_VIP, value.getSpVIP()); } if (!value.getSubnet().equals(MultiCellLib.getInstance().getSubnet())) { updateSwitch = true; updateSp = true; map.put(MultiCellLib.PROP_SUBNET, value.getSubnet()); } if (!value.getGateway().equals(MultiCellLib.getInstance().getGateway())) { updateSwitch = true; updateSp = true; map.put(MultiCellLib.PROP_GATEWAY, value.getGateway()); } if (map.size() != 0) { if (!isUpdateLocal) { // Update the of silo_info.xml properties on the local cell MultiCellLib.getInstance().updateProperties(cellid.byteValue(), map); try { evt.sendAsynchronousEvent( "successfully updated the " + "configuration [cell " + localCellid + "]"); } catch (MgmtException e) { logger.severe("failed to send synchronous event " + e); } // // Other cells-- on which no configuration change occur need to // notify Multicell about those changes. // Cell updatedCell = new Cell(cellid.byteValue(), value.getAdminVIP(), value.getDataVIP(), value.getSpVIP(), null, value.getSubnet(), value.getGateway()); // TODO: This code isn't valid for the emulator // Proxy calls don't seem to be supported. The // call to api.cahngeCellCfg() will fail with a class not // found exception. Need to fix for the emulator MultiCellIntf api = MultiCellIntf.Proxy.getMultiCellAPI(); if (api == null) { logger.severe("failed to grab multicellAPI"); if (MultiCellLib.getInstance().isCellMaster()) { throw new MgmtException("Internal error while " + "notifying services on master cell. Will " + " require a reboot of the master cell [cell " + localCellid + "]"); } else { return BigInteger.valueOf(-1); } } try { api.changeCellCfg(updatedCell); } catch (Exception e) { logger.log(Level.SEVERE, "failed to update Multicell service", e); if (MultiCellLib.getInstance().isCellMaster()) { throw new MgmtException("Internal error while " + "notifying services on master cell. Will " + " require a reboot of the master cell [cell " + localCellid + "]"); } else { return BigInteger.valueOf(-1); } } } else { // // We need to preform those config/update and any subsequent // operation in an async way because the dataVIP may be // reconfigured under our feet and the CLI gets screwed. // try { evt.sendAsynchronousEvent( "will update the configuration " + " and reboot the cell [cell " + localCellid + "]"); } catch (MgmtException e) { logger.severe("failed to send synchronous event " + e); } updatePropertiesAndRebootCell(map, updateSwitch, updateSp); } } return BigInteger.valueOf(0); }
From source file:edu.umich.its.lti.google.GoogleLtiServlet.java
/** * Actual permission call to google running on a separate thread. We have HashMap that is holding the SiteId and Time stamp. As of 11/04/14 Google take 5 minute * to insert permission for Roster size =200. While iterating through the Roster list we are checking if the time has passed 60 minutes since the start, then we will * terminate the thread. Once the insertion of permission call is complete we remove the siteId from the HashMap to allow further requests. * @param roster//from ww w. j a v a 2 s. c o m * @param handler * @param sendNotificationEmails * @param tcSessionData * @throws Exception */ private void insertPermissionCallToGoogleOnSeperateThread(HashMap<String, HashMap<String, String>> roster, FolderPermissionsHandler handler, boolean sendNotificationEmails, TcSessionData tcSessionData) throws Exception { long start = System.currentTimeMillis(); long end = start + SIXTY_MINUTES_IN_MILLI_SEC; // 60* 60 seconds * 1000 ms/sec int numberOFPermissionsInserted = 0; M_log.debug("duplicateCheckerHashMap while share: " + duplicateChecker.toString()); for (Entry<String, HashMap<String, String>> entry : roster.entrySet()) { String emailAddress = entry.getKey(); HashMap<String, String> value = entry.getValue(); if (System.currentTimeMillis() > end) { String content = "The Google call insertion of permission for the roster size: \"" + roster.size() + "\" took more than 60 minutes, this is unusual. The Email Id: \""; helperLogMessages(tcSessionData, content, null); break; } String roles = value.get("role"); if (!getIsEmpty(emailAddress) && !handler.getIsInstructor(emailAddress)) { StringBuilder s = new StringBuilder(); s.append("Insertion of permission call to google for user: "); s.append(emailAddress); s.append(" in site :"); s.append(tcSessionData.getContextId()); M_log.debug(s.toString()); // If result not null, the user has permission >= inserted if (null != handler.insertPermission(emailAddress, roles, sendNotificationEmails)) { numberOFPermissionsInserted++; } } } removeSiteIdFromMap(tcSessionData); M_log.info("Number of permissions Inserted Successfully to site:" + tcSessionData.getContextId() + " UserId: " + tcSessionData.getUserId() + " is " + numberOFPermissionsInserted + " / " + (roster.size() - 1)); }
From source file:fr.cirad.mgdb.exporting.markeroriented.HapMapExportHandler.java
@Override public void exportData(OutputStream outputStream, String sModule, List<SampleId> sampleIDs, ProgressIndicator progress, DBCursor markerCursor, Map<Comparable, Comparable> markerSynonyms, int nMinimumGenotypeQuality, int nMinimumReadDepth, Map<String, InputStream> readyToExportFiles) throws Exception { MongoTemplate mongoTemplate = MongoTemplateManager.get(sModule); File warningFile = File.createTempFile("export_warnings_", ""); FileWriter warningFileWriter = new FileWriter(warningFile); int markerCount = markerCursor.count(); ZipOutputStream zos = new ZipOutputStream(outputStream); if (readyToExportFiles != null) for (String readyToExportFile : readyToExportFiles.keySet()) { zos.putNextEntry(new ZipEntry(readyToExportFile)); InputStream inputStream = readyToExportFiles.get(readyToExportFile); byte[] dataBlock = new byte[1024]; int count = inputStream.read(dataBlock, 0, 1024); while (count != -1) { zos.write(dataBlock, 0, count); count = inputStream.read(dataBlock, 0, 1024); }//from ww w . ja v a 2 s. com } List<Individual> individuals = getIndividualsFromSamples(sModule, sampleIDs); ArrayList<String> individualList = new ArrayList<String>(); for (int i = 0; i < sampleIDs.size(); i++) { Individual individual = individuals.get(i); if (!individualList.contains(individual.getId())) { individualList.add(individual.getId()); } } String exportName = sModule + "_" + markerCount + "variants_" + individualList.size() + "individuals"; zos.putNextEntry(new ZipEntry(exportName + ".hapmap")); String header = "rs#" + "\t" + "alleles" + "\t" + "chrom" + "\t" + "pos" + "\t" + "strand" + "\t" + "assembly#" + "\t" + "center" + "\t" + "protLSID" + "\t" + "assayLSID" + "\t" + "panelLSID" + "\t" + "QCcode"; zos.write(header.getBytes()); for (int i = 0; i < individualList.size(); i++) { zos.write(("\t" + individualList.get(i)).getBytes()); } zos.write((LINE_SEPARATOR).getBytes()); int avgObjSize = (Integer) mongoTemplate .getCollection(mongoTemplate.getCollectionName(VariantRunData.class)).getStats().get("avgObjSize"); int nChunkSize = nMaxChunkSizeInMb * 1024 * 1024 / avgObjSize; short nProgress = 0, nPreviousProgress = 0; long nLoadedMarkerCount = 0; while (markerCursor == null || markerCursor.hasNext()) { int nLoadedMarkerCountInLoop = 0; Map<Comparable, String> markerChromosomalPositions = new LinkedHashMap<Comparable, String>(); boolean fStartingNewChunk = true; markerCursor.batchSize(nChunkSize); while (markerCursor.hasNext() && (fStartingNewChunk || nLoadedMarkerCountInLoop % nChunkSize != 0)) { DBObject exportVariant = markerCursor.next(); DBObject refPos = (DBObject) exportVariant.get(VariantData.FIELDNAME_REFERENCE_POSITION); markerChromosomalPositions.put((Comparable) exportVariant.get("_id"), refPos.get(ReferencePosition.FIELDNAME_SEQUENCE) + ":" + refPos.get(ReferencePosition.FIELDNAME_START_SITE)); nLoadedMarkerCountInLoop++; fStartingNewChunk = false; } List<Comparable> currentMarkers = new ArrayList<Comparable>(markerChromosomalPositions.keySet()); LinkedHashMap<VariantData, Collection<VariantRunData>> variantsAndRuns = MgdbDao.getSampleGenotypes( mongoTemplate, sampleIDs, currentMarkers, true, null /*new Sort(VariantData.FIELDNAME_REFERENCE_POSITION + "." + ChromosomalPosition.FIELDNAME_SEQUENCE).and(new Sort(VariantData.FIELDNAME_REFERENCE_POSITION + "." + ChromosomalPosition.FIELDNAME_START_SITE))*/); // query mongo db for matching genotypes for (VariantData variant : variantsAndRuns.keySet()) // read data and write results into temporary files (one per sample) { Comparable variantId = variant.getId(); if (markerSynonyms != null) { Comparable syn = markerSynonyms.get(variantId); if (syn != null) variantId = syn; } boolean fIsSNP = variant.getType().equals(Type.SNP.toString()); byte[] missingGenotype = ("\t" + "NN").getBytes(); String[] chromAndPos = markerChromosomalPositions.get(variant.getId()).split(":"); zos.write(((variantId == null ? variant.getId() : variantId) + "\t" + StringUtils.join(variant.getKnownAlleleList(), "/") + "\t" + chromAndPos[0] + "\t" + Long.parseLong(chromAndPos[1]) + "\t" + "+").getBytes()); for (int j = 0; j < 6; j++) zos.write(("\t" + "NA").getBytes()); Map<String, Integer> gqValueForSampleId = new LinkedHashMap<String, Integer>(); Map<String, Integer> dpValueForSampleId = new LinkedHashMap<String, Integer>(); Map<String, List<String>> individualGenotypes = new LinkedHashMap<String, List<String>>(); Collection<VariantRunData> runs = variantsAndRuns.get(variant); if (runs != null) for (VariantRunData run : runs) for (Integer sampleIndex : run.getSampleGenotypes().keySet()) { SampleGenotype sampleGenotype = run.getSampleGenotypes().get(sampleIndex); String gtCode = run.getSampleGenotypes().get(sampleIndex).getCode(); String individualId = individuals .get(sampleIDs.indexOf(new SampleId(run.getId().getProjectId(), sampleIndex))) .getId(); List<String> storedIndividualGenotypes = individualGenotypes.get(individualId); if (storedIndividualGenotypes == null) { storedIndividualGenotypes = new ArrayList<String>(); individualGenotypes.put(individualId, storedIndividualGenotypes); } storedIndividualGenotypes.add(gtCode); gqValueForSampleId.put(individualId, (Integer) sampleGenotype.getAdditionalInfo().get(VariantData.GT_FIELD_GQ)); dpValueForSampleId.put(individualId, (Integer) sampleGenotype.getAdditionalInfo().get(VariantData.GT_FIELD_DP)); } int writtenGenotypeCount = 0; for (String individualId : individualList /* we use this list because it has the proper ordering */) { int individualIndex = individualList.indexOf(individualId); while (writtenGenotypeCount < individualIndex - 1) { zos.write(missingGenotype); writtenGenotypeCount++; } List<String> genotypes = individualGenotypes.get(individualId); HashMap<Object, Integer> genotypeCounts = new HashMap<Object, Integer>(); // will help us to keep track of missing genotypes int highestGenotypeCount = 0; String mostFrequentGenotype = null; if (genotypes != null) for (String genotype : genotypes) { if (genotype.length() == 0) continue; /* skip missing genotypes */ Integer gqValue = gqValueForSampleId.get(individualId); if (gqValue != null && gqValue < nMinimumGenotypeQuality) continue; /* skip this sample because its GQ is under the threshold */ Integer dpValue = dpValueForSampleId.get(individualId); if (dpValue != null && dpValue < nMinimumReadDepth) continue; /* skip this sample because its DP is under the threshold */ int gtCount = 1 + MgdbDao.getCountForKey(genotypeCounts, genotype); if (gtCount > highestGenotypeCount) { highestGenotypeCount = gtCount; mostFrequentGenotype = genotype; } genotypeCounts.put(genotype, gtCount); } byte[] exportedGT = mostFrequentGenotype == null ? missingGenotype : ("\t" + StringUtils.join(variant.getAllelesFromGenotypeCode(mostFrequentGenotype), fIsSNP ? "" : "/")).getBytes(); zos.write(exportedGT); writtenGenotypeCount++; if (genotypeCounts.size() > 1) warningFileWriter.write("- Dissimilar genotypes found for variant " + (variantId == null ? variant.getId() : variantId) + ", individual " + individualId + ". Exporting most frequent: " + new String(exportedGT) + "\n"); } while (writtenGenotypeCount < individualList.size()) { zos.write(missingGenotype); writtenGenotypeCount++; } zos.write((LINE_SEPARATOR).getBytes()); } if (progress.hasAborted()) return; nLoadedMarkerCount += nLoadedMarkerCountInLoop; nProgress = (short) (nLoadedMarkerCount * 100 / markerCount); if (nProgress > nPreviousProgress) { // if (nProgress%5 == 0) // LOG.info("========================= exportData: " + nProgress + "% =========================" + (System.currentTimeMillis() - before)/1000 + "s"); progress.setCurrentStepProgress(nProgress); nPreviousProgress = nProgress; } } warningFileWriter.close(); if (warningFile.length() > 0) { zos.putNextEntry(new ZipEntry(exportName + "-REMARKS.txt")); int nWarningCount = 0; BufferedReader in = new BufferedReader(new FileReader(warningFile)); String sLine; while ((sLine = in.readLine()) != null) { zos.write((sLine + "\n").getBytes()); in.readLine(); nWarningCount++; } LOG.info("Number of Warnings for export (" + exportName + "): " + nWarningCount); in.close(); } warningFile.delete(); zos.close(); progress.setCurrentStepProgress((short) 100); }
From source file:de.csw.expertfinder.mediawiki.api.MediaWikiAPI.java
/** * Returns all contributions for the given user. * @param userName/*from ww w . j a va2s . c o m*/ * @return * @throws MediaWikiAPIException */ public List<MediaWikiArticleContribution> getAllContributionsForUser(String userName) throws MediaWikiAPIException { BasicNameValuePair[] params = new BasicNameValuePair[] { new BasicNameValuePair("list", "usercontribs"), new BasicNameValuePair("ucnamespace", "0"), new BasicNameValuePair("ucuser", userName), new BasicNameValuePair("ucshow", "!minor"), new BasicNameValuePair("uclimit", "500") }; HashMap<Integer, MediaWikiArticleContribution> contributionsByArticleId = new HashMap<Integer, MediaWikiArticleContribution>(); for (;;) { Document doc = queryMediaWiki("query", params); NodeList itemElements = doc.getElementsByTagName("item"); int len = itemElements.getLength(); for (int i = 0; i < len; i++) { Element itemElement = (Element) itemElements.item(i); int articleId = Integer.parseInt(itemElement.getAttribute("pageid")); String title = itemElement.getAttribute("title"); MediaWikiArticleContribution contribution = contributionsByArticleId.get(articleId); if (contribution == null) { contribution = new MediaWikiArticleContribution(articleId, title, userName); contributionsByArticleId.put(articleId, contribution); } contribution.increaseContributionCount(); } NodeList queryContinueElements = doc.getElementsByTagName("query-continue"); if (queryContinueElements.getLength() == 0) { ArrayList<MediaWikiArticleContribution> result = new ArrayList<MediaWikiArticleContribution>( contributionsByArticleId.size()); result.addAll(contributionsByArticleId.values()); Collections.sort(result, new Comparator<MediaWikiArticleContribution>() { public int compare(MediaWikiArticleContribution o1, MediaWikiArticleContribution o2) { // we want the result to be sorted in descending order, thus we swap o1 and o2 here. return o2.getContributionCount().compareTo(o1.getContributionCount()); } }); return result; } Element queryContinueElement = (Element) queryContinueElements.item(0); Element userContribsElement = (Element) queryContinueElement.getElementsByTagName("usercontribs") .item(0); String ucstart = userContribsElement.getAttribute("ucstart"); params = new BasicNameValuePair[] { new BasicNameValuePair("list", "usercontribs"), new BasicNameValuePair("ucnamespace", "0"), new BasicNameValuePair("ucuser", userName), new BasicNameValuePair("ucshow", "!minor"), new BasicNameValuePair("uclimit", "500"), new BasicNameValuePair("ucstart", ucstart) }; } }
From source file:org.jtotus.database.NetworkGoogle.java
public HashMap<String, Double> fetchPeriodAsMap(String stockName, DateTime startDate, DateTime endDate) { HashMap<String, Double> retMap = new HashMap<String, Double>(500); DefaultHttpClient client = new DefaultHttpClient(); HttpGet httpGet = null;/*from ww w . j a va2 s .c om*/ try { DateTimeFormatter formatterOUT = DateTimeFormat.forPattern(timePatternForWrite); DateTimeFormatter formatterIN = DateTimeFormat.forPattern(timePatternForRead); String query = url + "?q=" + names.getHexName(stockName) + "&" + "startdate=" + formatterOUT.print(startDate) + "&" + "enddate=" + formatterOUT.print(endDate) + "&" + "&num=30&output=csv"; System.out.printf("HttpGet:%s : date:%s\n", query, formatterOUT.print(startDate)); httpGet = new HttpGet(query); HttpResponse response = client.execute(httpGet); StatusLine status = response.getStatusLine(); if (status.getStatusCode() != 200) { throw new IOException("Invalid response from server: " + status.toString()); } HttpEntity entity = response.getEntity(); BufferedReader reader = new BufferedReader(new InputStreamReader(entity.getContent())); // Date, Open,High,Low,Close,Volume String line = reader.readLine(); //Header while ((line = reader.readLine()) != null) { String[] values = line.split(","); DateTime date = formatterIN.parseDateTime(values[0]); double value = Double.parseDouble(values[4]); retMap.put(formatter.print(date), value); } } catch (IOException ex) { System.err.printf("Unable to find market data for: %s - %s\n", names.getHexName(stockName), stockName); } catch (IllegalArgumentException ex) { System.err.printf("Unable to find market data for: %s - %s\n", names.getHexName(stockName), stockName); } finally { if (httpGet != null) { } } System.out.printf("NetworkGoogle fetched : %d values\n", retMap.size()); return retMap; }
From source file:org.callistasoftware.netcare.core.job.SystemAlarmJob.java
/** * Notifies mobile users about it's time to perform an activity. *///from w w w .jav a2 s . c o m @Scheduled(fixedDelay = 300000) public void reminderJob() { log.info("======== REMINDER JOB STARTED ========="); HashMap<PatientEntity, Integer> patients = new HashMap<PatientEntity, Integer>(); Calendar cal = Calendar.getInstance(); cal.add(Calendar.MINUTE, reminderTime); Date start = cal.getTime(); log.debug( "Find all scheduled activity with scheduled time less than: {} and that is not already reported."); List<ScheduledActivityEntity> list = saRepo.findByScheduledTimeLessThanAndReportedTimeIsNull(start); log.debug("Reminder: {} candidates found", list.size()); for (ScheduledActivityEntity sae : list) { /* * Don't send reminder unless the patient * wants it */ if (!sae.getActivityDefinitionEntity().isReminder() || !sae.getActivityDefinitionEntity().getHealthPlan().isActive()) { continue; } PatientEntity patient = sae.getActivityDefinitionEntity().getHealthPlan().getForPatient(); log.debug("Reminder: for patient {}, activity {}", patient.getFirstName(), sae.getActivityDefinitionEntity().getActivityType().getName()); log.debug("==== PUSH CHECKS ===="); log.debug("Already reminded: {}", sae.isReminderDone()); log.debug("Is activity already reported: {}", sae.getReportedTime() != null); log.debug("Is patient push enabled: {}", patient.isPushEnbaled()); log.debug("====================="); if (!sae.isReminderDone() && sae.getReportedTime() == null && patient.isPushEnbaled()) { Integer i = patients.get(patient); log.debug("Reminder: for patient {} -- add to send list", patient.getFirstName()); patients.put(patient, (i == null) ? 1 : i.intValue() + 1); sae.setReminderDone(true); saRepo.save(sae); } } log.debug("Reminder: {} to send", patients.size()); for (Map.Entry<PatientEntity, Integer> p : patients.entrySet()) { log.debug("Reminder: send {} new events reminder to patient {}", p.getValue(), p.getKey().getFirstName()); sendReminder(p.getKey(), p.getValue()); } log.info("======== REMINDER JOB COMPLETED ========="); }