List of usage examples for java.util HashMap remove
public V remove(Object key)
From source file:adminviews.MCCView.java
@SuppressWarnings("unchecked") private List<List<ISampleBean>> prepDefaultMCCSamples() { String timepoint = this.timepoint.getValue(); String treatment = this.treatment.getValue(); String patient = this.patient.getValue(); List<List<ISampleBean>> res = new ArrayList<List<ISampleBean>>(); List<ISampleBean> patients = new ArrayList<ISampleBean>(); List<ISampleBean> urine = new ArrayList<ISampleBean>(); List<ISampleBean> uAliquots = new ArrayList<ISampleBean>(); List<ISampleBean> liver = new ArrayList<ISampleBean>(); List<ISampleBean> plasma = new ArrayList<ISampleBean>(); List<ISampleBean> pAliquots = new ArrayList<ISampleBean>(); List<ISampleBean> molecules = new ArrayList<ISampleBean>(); String project = (String) mccProjects.getValue(); if (!newProject.isEmpty()) project = newProject.getValue(); String patientID = project + "ENTITY-" + patient;// new parent if (patient.length() < 2) patient = "0" + patient; String patientExtID = treatment.substring(0, 1).toUpperCase() + ":" + patient; HashMap<String, Object> metadata = new HashMap<String, Object>(); // if new patient, add to samples to register if (!this.patients.contains(patientExtID)) { metadata.put("Q_EXTERNALDB_ID", patientExtID); metadata.put("Q_NCBI_ORGANISM", "9606"); patients.add(new TSVSampleBean(patientID, project + "E1", project, mccSpace, "Q_BIOLOGICAL_ENTITY", "patient #" + patient, "", metadata)); }/*www. jav a2 s .c o m*/ String extIDBase = patientExtID + ":" + timepoint + ":"; String urineExtIDBase = extIDBase + "U:1"; metadata = new HashMap<String, Object>(); metadata.put("XML_FACTORS", "treatment: " + treatment + "; timepoint: evaluation #" + timepoint); metadata.put("Q_PRIMARY_TISSUE", "URINE"); metadata.put("Q_EXTERNALDB_ID", urineExtIDBase); String urineID = counter.getNewBarcode();// parent urine.add(new TSVSampleBean(urineID, project + "E2", project, mccSpace, "Q_BIOLOGICAL_SAMPLE", "urine sample", patientID, (HashMap<String, Object>) metadata.clone())); for (int i = 1; i < 6; i++) { char lower = (char) ('a' + i - 1); String ID = counter.getNewBarcode(); metadata.put("Q_EXTERNALDB_ID", urineExtIDBase + lower); uAliquots.add(new TSVSampleBean(ID, project + "E3", project, mccSpace, "Q_BIOLOGICAL_SAMPLE", "aliquot #" + i, urineID, (HashMap<String, Object>) metadata.clone())); } for (int i = 1; i < 4; i++) { String plasmaExtID = extIDBase + "B:" + i; String plasmaID = counter.getNewBarcode();// parent metadata.put("Q_EXTERNALDB_ID", plasmaExtID); metadata.put("Q_PRIMARY_TISSUE", "BLOOD_PLASMA"); plasma.add(new TSVSampleBean(plasmaID, project + "E4", project, mccSpace, "Q_BIOLOGICAL_SAMPLE", "EDTA plasma #" + i, patientID, (HashMap<String, Object>) metadata.clone())); if (i == 1) { for (int j = 1; j < 3; j++) { char lower = (char) ('a' + j - 1); String ID = counter.getNewBarcode(); metadata.put("Q_EXTERNALDB_ID", plasmaExtID + lower); pAliquots.add(new TSVSampleBean(ID, project + "E4", project, mccSpace, "Q_BIOLOGICAL_SAMPLE", "plasma aliquot #" + j, plasmaID, (HashMap<String, Object>) metadata.clone())); } } if (i == 3) { metadata.remove("Q_PRIMARY_TISSUE"); for (int j = 1; j < 5; j++) { char lower = (char) ('a' + j - 1); String ID = counter.getNewBarcode(); metadata.put("Q_EXTERNALDB_ID", plasmaExtID + lower); metadata.put("Q_SAMPLE_TYPE", "SMALLMOLECULES"); molecules.add(new TSVSampleBean(ID, project + "E5", project, mccSpace, "Q_TEST_SAMPLE", "cryovial #" + j, plasmaID, (HashMap<String, Object>) metadata.clone())); } metadata.remove("Q_SAMPLE_TYPE"); } } String tumorExtBase = extIDBase + "T"; for (int i = 1; i < 9; i++) { String ID = counter.getNewBarcode(); metadata.put("Q_EXTERNALDB_ID", tumorExtBase + i); metadata.put("Q_PRIMARY_TISSUE", "HEPATOCELLULAR_CARCINOMA"); liver.add(new TSVSampleBean(ID, project + "E6", project, mccSpace, "Q_BIOLOGICAL_SAMPLE", "tumor biopsy #" + i, patientID, (HashMap<String, Object>) metadata.clone())); } String liverExtBase = extIDBase + "L"; for (int i = 1; i < 5; i++) { String ID = counter.getNewBarcode(); metadata.put("Q_EXTERNALDB_ID", liverExtBase + i); metadata.put("Q_PRIMARY_TISSUE", "LIVER"); liver.add(new TSVSampleBean(ID, project + "E6", project, mccSpace, "Q_BIOLOGICAL_SAMPLE", "liver biopsy #" + i, patientID, (HashMap<String, Object>) metadata.clone())); } List<List<ISampleBean>> dummy = new ArrayList<List<ISampleBean>>( Arrays.asList(patients, urine, uAliquots, plasma, pAliquots, molecules, liver)); for (List<ISampleBean> l : dummy) if (l.size() > 0) res.add(l); return res; }
From source file:StorageEngineClient.CombineFileInputFormat.java
private void getMoreSplitsWithStatus(JobConf job, Path[] paths1, Map<String, FileStatus> fileNameToStatus, long maxSize, long minSizeNode, long minSizeRack, List<CombineFileSplit> splits) throws IOException, NullGzFileException { if (paths1.length == 0) { return;/*w ww.j av a2s . c o m*/ } Path[] paths = paths1; ArrayList<Path> splitable = new ArrayList<Path>(); ArrayList<Path> unsplitable = new ArrayList<Path>(); for (int i = 0; i < paths1.length; i++) { if (isSplitable(paths1[i].getFileSystem(job), paths1[i])) { splitable.add(paths1[i]); } else { unsplitable.add(paths1[i]); } } if (unsplitable.size() != 0) { paths = new Path[splitable.size()]; splitable.toArray(paths); } OneFileInfo[] files; HashMap<String, List<OneBlockInfo>> rackToBlocks = new HashMap<String, List<OneBlockInfo>>(); HashMap<OneBlockInfo, String[]> blockToNodes = new HashMap<OneBlockInfo, String[]>(); HashMap<String, List<OneBlockInfo>> nodeToBlocks = new HashMap<String, List<OneBlockInfo>>(); files = new OneFileInfo[paths.length]; long totLength = 0; for (int i = 0; i < paths.length; i++) { files[i] = new OneFileInfo(paths[i], fileNameToStatus.get(paths[i].toString()), job, rackToBlocks, blockToNodes, nodeToBlocks); totLength += files[i].getLength(); } for (Iterator<Map.Entry<String, List<OneBlockInfo>>> iter = nodeToBlocks.entrySet().iterator(); iter .hasNext();) { Map.Entry<String, List<OneBlockInfo>> onenode = iter.next(); this.processsplit(job, onenode, blockToNodes, maxSize, minSizeNode, minSizeRack, splits, "node"); } for (Iterator<Map.Entry<String, List<OneBlockInfo>>> iter = rackToBlocks.entrySet().iterator(); iter .hasNext();) { Map.Entry<String, List<OneBlockInfo>> onerack = iter.next(); this.processsplit(job, onerack, blockToNodes, maxSize, minSizeNode, minSizeRack, splits, "rack"); } this.processsplit(job, null, blockToNodes, maxSize, minSizeNode, minSizeRack, splits, "all"); int maxFileNumPerSplit = job.getInt("hive.merge.inputfiles.maxFileNumPerSplit", 1000); HashSet<OneBlockInfo> hs = new HashSet<OneBlockInfo>(); while (blockToNodes.size() > 0) { ArrayList<OneBlockInfo> validBlocks = new ArrayList<OneBlockInfo>(); List<String> nodes = new ArrayList<String>(); int filenum = 0; hs.clear(); for (OneBlockInfo blockInfo : blockToNodes.keySet()) { validBlocks.add(blockInfo); filenum++; for (String host : blockInfo.hosts) { nodes.add(host); } hs.add(blockInfo); if (filenum >= maxFileNumPerSplit) { break; } } for (OneBlockInfo blockInfo : hs) { blockToNodes.remove(blockInfo); } this.addCreatedSplit(job, splits, nodes, validBlocks); } if (unsplitable.size() != 0) { HashMap<OneBlockInfo, String[]> fileToNodes = new HashMap<OneBlockInfo, String[]>(); for (Path path : unsplitable) { FileSystem fs = path.getFileSystem(job); FileStatus stat = fileNameToStatus.get(path.toString());//fs.getFileStatus(path); long len = stat.getLen(); BlockLocation[] locations = fs.getFileBlockLocations(stat, 0, len); if (locations.length == 0) { console.printError("The file " + path.toUri().toString() + " maybe is empty, please check it!"); throw new NullGzFileException( "The file " + path.toUri().toString() + " maybe is empty, please check it!"); } LOG.info("unsplitable file:" + path.toUri().toString() + " length:" + len); OneBlockInfo oneblock = new OneBlockInfo(path, 0, len, locations[0].getHosts(), locations[0].getTopologyPaths()); fileToNodes.put(oneblock, locations[0].getHosts()); } this.processsplitForUnsplit(job, null, fileToNodes, maxSize, minSizeNode, minSizeRack, splits, "all"); } }
From source file:edu.ehu.galan.wiki2wordnet.wikipedia2wordnet.Mapper.java
/** * Maps LiteWi loaded topics (LionTopics) that have a Wikipedia Mapping to WordNet using * Babelnet and Fernando's work./* w ww. ja v a2 s . com*/ * * @param pBabelnetMappings - Babelnet Mappings * @param pFernandoMappings - Samuel Fernando Mappings * @param pWikiTitles - The List of Wipedia articles that will be mapped to WordNet * @param pDesambiguationContext - The reference topics that will be used as context, MUST HAVE * an unique wordnet mapping (synset) in babelnet/fernando mappings * @param pFile - The file where the ubk will be processed in case its needed * @param pWordnet - A Wordnet dict using edu.mit.jwi library with WordNet 3.0 * @param pUkbBinDir - The dir where the ukb binaries are * @return - a hashmap with the wikititles / synsets pairs */ public static HashMap<String, Integer> babelnetFernandoToWordnet( HashMap<String, List<Wiki2WordnetMapping>> pBabelnetMappings, HashMap<String, Wiki2WordnetMapping> pFernandoMappings, List<String> pWikiTitles, String pFile, IDictionary pWordnet, List<String> pDesambiguationContext, String pUkbBinDir) { HashMap<String, Integer> mappings = new HashMap<>(pWikiTitles.size()); HashMap<String, List<Wiki2WordnetMapping>> ukbList = new HashMap<>(); if (pBabelnetMappings != null && pFernandoMappings != null) { pWikiTitles.addAll(pDesambiguationContext); for (String title : pWikiTitles) { List<Wiki2WordnetMapping> babelMapping = null; Wiki2WordnetMapping fernandoMapping = null; if (pBabelnetMappings.containsKey(title)) { babelMapping = pBabelnetMappings.get(title); } if (pFernandoMappings.containsKey(title)) { fernandoMapping = pFernandoMappings.get(title); } if (babelMapping != null && fernandoMapping == null) { if (babelMapping.size() == 1) { mappings.put(title, babelMapping.get(0).getWordNetSynset()); } else if (babelMapping.size() > 1) { ukbList.put(title, babelMapping); int l = title.split("\\s").length; for (Wiki2WordnetMapping babelMapping1 : babelMapping) { int l1 = babelMapping1.getWordnetTitle().split("\\s+").length; if (l == l1) { mappings.put(title, babelMapping1.getWordNetSynset()); break; } } if (!mappings.containsKey(title)) { mappings.put(title, babelMapping.get(0).getWordNetSynset()); } } } else if (babelMapping == null && fernandoMapping != null) { mappings.put(title, fernandoMapping.getWordNetSynset()); } else if (babelMapping != null && fernandoMapping != null) { if (babelMapping.size() == 1) { if (babelMapping.get(0).getWordNetSynset() == fernandoMapping.getWordNetSynset()) { mappings.put(title, +fernandoMapping.getWordNetSynset()); } else { List<Wiki2WordnetMapping> maps = new ArrayList<>(); maps.add(fernandoMapping); maps.add(babelMapping.get(0)); ukbList.put(title, maps); mappings.put(title, fernandoMapping.getWordNetSynset()); } } else { List<Wiki2WordnetMapping> maps = new ArrayList<>(); maps.add(fernandoMapping); maps.addAll(babelMapping); ukbList.put(title, maps); int l = title.split("\\s+").length; for (Wiki2WordnetMapping babelMapping1 : babelMapping) { int l1 = babelMapping1.getWordnetTitle().split("\\s+").length; if (l == l1) { mappings.put(title, babelMapping1.getWordNetSynset()); break; } } if (mappings.get(title) == -1) { mappings.put(title, fernandoMapping.getWordNetSynset()); } } } } } else { logger.error("No mappings provided"); } if (!ukbList.isEmpty()) { disambiguateUKB(ukbList, pDesambiguationContext, pFile, pWordnet, mappings, pUkbBinDir); } for (String context : pDesambiguationContext) { mappings.remove(context); } return mappings; }
From source file:uk.ac.diamond.scisoft.ncd.rcp.handlers.DetectorInformationHandler.java
@Override public Object execute(ExecutionEvent event) throws ExecutionException { IWorkbenchWindow window = PlatformUI.getWorkbench().getActiveWorkbenchWindow(); IWorkbenchPage page = window.getActivePage(); IStructuredSelection sel = (IStructuredSelection) page.getSelection(); IWorkbenchPart part = page.getActivePart(); // Counter for a number of NeXus files in the selection int idxFiles = 0; if (sel != null) { Object[] selObjects = sel.toArray(); HashMap<String, Integer> detNames = new HashMap<String, Integer>(); HashMap<String, GroupNode> detInfo = new HashMap<String, GroupNode>(); for (int i = 0; i < selObjects.length; i++) { String tmpfilePath, tmpfileExtension; if (selObjects[i] instanceof IFile) { tmpfilePath = ((IFile) selObjects[i]).getLocation().toString(); tmpfileExtension = ((IFile) selObjects[i]).getFileExtension(); } else { tmpfilePath = ((File) selObjects[i]).getAbsolutePath(); tmpfileExtension = FilenameUtils.getExtension(tmpfilePath); }//from w w w . j av a 2 s . c om if (!tmpfileExtension.equals("nxs")) { continue; } try { Tree tmpfile = new HDF5Loader(tmpfilePath).loadTree(); String[] locations = new String[] { "/entry1", "/entry1/instrument" }; for (String loc : locations) { NodeLink nodeLink = tmpfile.findNodeLink(loc); if (nodeLink != null) { GroupNode node = (GroupNode) nodeLink.getDestination(); Iterator<String> iterator = node.getNodeNameIterator(); while (iterator.hasNext()) { String tmpName = iterator.next(); Node tmpTree = node.findNodeLink(tmpName).getDestination(); if (tmpTree instanceof GroupNode) { String nxClass = tmpTree.getAttribute(NexusUtils.NXCLASS).getFirstElement(); if (nxClass.equals(Nexus.DETECT) || nxClass.equals(Nexus.MONITOR)) { if (detNames.containsKey(tmpName)) { detNames.put(tmpName, detNames.get(tmpName) + 1); } else { detNames.put(tmpName, 1); detInfo.put(tmpName, (GroupNode) tmpTree); } } } } } } idxFiles++; } catch (Exception e) { logger.error("SCISOFT NCD: Error reading data reduction parameters", e); return null; } } // Remove detectors that are not found in all the selected files Iterator<Entry<String, Integer>> it = detNames.entrySet().iterator(); while (it.hasNext()) { Entry<String, Integer> detName = it.next(); if (detName.getValue() != idxFiles) { detInfo.remove(detName.getKey()); } } updateDetectorInformation(detInfo, window); } page.activate(part); return null; }
From source file:com.android.exchange.EasAccountService.java
private int parsePingResult(InputStream is, ContentResolver cr, HashMap<String, Integer> errorMap) throws IOException, StaleFolderListException, IllegalHeartbeatException, CommandStatusException { PingParser pp = new PingParser(is); if (pp.parse()) { // True indicates some mailboxes need syncing... // syncList has the serverId's of the mailboxes... mBindArguments[0] = Long.toString(mAccount.mId); mPingChangeList = pp.getSyncList(); for (String serverId : mPingChangeList) { mBindArguments[1] = serverId; Cursor c = cr.query(Mailbox.CONTENT_URI, Mailbox.CONTENT_PROJECTION, WHERE_ACCOUNT_KEY_AND_SERVER_ID, mBindArguments, null); if (c == null) throw new ProviderUnavailableException(); try { if (c.moveToFirst()) { /**/*from www.jav a 2s . c o m*/ * Check the boxes reporting changes to see if there really were any... * We do this because bugs in various Exchange servers can put us into a * looping behavior by continually reporting changes in a mailbox, even when * there aren't any. * * This behavior is seemingly random, and therefore we must code defensively * by backing off of push behavior when it is detected. * * One known cause, on certain Exchange 2003 servers, is acknowledged by * Microsoft, and the server hotfix for this case can be found at * http://support.microsoft.com/kb/923282 */ // Check the status of the last sync String status = c.getString(Mailbox.CONTENT_SYNC_STATUS_COLUMN); int type = ExchangeService.getStatusType(status); // This check should always be true... if (type == ExchangeService.SYNC_PING) { int changeCount = ExchangeService.getStatusChangeCount(status); if (changeCount > 0) { errorMap.remove(serverId); } else if (changeCount == 0) { // This means that a ping reported changes in error; we keep a count // of consecutive errors of this kind String name = c.getString(Mailbox.CONTENT_DISPLAY_NAME_COLUMN); Integer failures = errorMap.get(serverId); if (failures == null) { userLog("Last ping reported changes in error for: ", name); errorMap.put(serverId, 1); } else if (failures > MAX_PING_FAILURES) { // We'll back off of push for this box pushFallback(c.getLong(Mailbox.CONTENT_ID_COLUMN)); continue; } else { userLog("Last ping reported changes in error for: ", name); errorMap.put(serverId, failures + 1); } } } // If there were no problems with previous sync, we'll start another one ExchangeService.startManualSync(c.getLong(Mailbox.CONTENT_ID_COLUMN), ExchangeService.SYNC_PING, null); } } finally { c.close(); } } } return pp.getPingStatus(); }
From source file:org.telegramsecureplus.android.NotificationsController.java
@SuppressLint("InlinedApi") public void showExtraNotifications(NotificationCompat.Builder notificationBuilder, boolean notifyAboutLast) { if (Build.VERSION.SDK_INT < 19) { return;/* ww w . j a v a 2s .c o m*/ } ArrayList<Long> sortedDialogs = new ArrayList<>(); HashMap<Long, ArrayList<MessageObject>> messagesByDialogs = new HashMap<>(); for (int a = 0; a < pushMessages.size(); a++) { MessageObject messageObject = pushMessages.get(a); long dialog_id = messageObject.getDialogId(); if ((int) dialog_id == 0) { continue; } ArrayList<MessageObject> arrayList = messagesByDialogs.get(dialog_id); if (arrayList == null) { arrayList = new ArrayList<>(); messagesByDialogs.put(dialog_id, arrayList); sortedDialogs.add(0, dialog_id); } arrayList.add(messageObject); } HashMap<Long, Integer> oldIdsWear = new HashMap<>(); oldIdsWear.putAll(wearNotificationsIds); wearNotificationsIds.clear(); HashMap<Long, Integer> oldIdsAuto = new HashMap<>(); oldIdsAuto.putAll(autoNotificationsIds); autoNotificationsIds.clear(); for (int b = 0; b < sortedDialogs.size(); b++) { long dialog_id = sortedDialogs.get(b); ArrayList<MessageObject> messageObjects = messagesByDialogs.get(dialog_id); int max_id = messageObjects.get(0).getId(); int max_date = messageObjects.get(0).messageOwner.date; TLRPC.Chat chat = null; TLRPC.User user = null; String name; if (dialog_id > 0) { user = MessagesController.getInstance().getUser((int) dialog_id); if (user == null) { continue; } } else { chat = MessagesController.getInstance().getChat(-(int) dialog_id); if (chat == null) { continue; } } if (chat != null) { name = chat.title; } else { name = UserObject.getUserName(user); } Integer notificationIdWear = oldIdsWear.get(dialog_id); if (notificationIdWear == null) { notificationIdWear = wearNotificationId++; } else { oldIdsWear.remove(dialog_id); } Integer notificationIdAuto = oldIdsAuto.get(dialog_id); if (notificationIdAuto == null) { notificationIdAuto = autoNotificationId++; } else { oldIdsAuto.remove(dialog_id); } Intent msgHeardIntent = new Intent(); msgHeardIntent.addFlags(Intent.FLAG_INCLUDE_STOPPED_PACKAGES); msgHeardIntent.setAction("org.telegramsecureplus.messenger.ACTION_MESSAGE_HEARD"); msgHeardIntent.putExtra("dialog_id", dialog_id); msgHeardIntent.putExtra("max_id", max_id); PendingIntent msgHeardPendingIntent = PendingIntent.getBroadcast(ApplicationLoader.applicationContext, notificationIdAuto, msgHeardIntent, PendingIntent.FLAG_UPDATE_CURRENT); Intent msgReplyIntent = new Intent(); msgReplyIntent.addFlags(Intent.FLAG_INCLUDE_STOPPED_PACKAGES); msgReplyIntent.setAction("org.telegramsecureplus.messenger.ACTION_MESSAGE_REPLY"); msgReplyIntent.putExtra("dialog_id", dialog_id); msgReplyIntent.putExtra("max_id", max_id); PendingIntent msgReplyPendingIntent = PendingIntent.getBroadcast(ApplicationLoader.applicationContext, notificationIdAuto, msgReplyIntent, PendingIntent.FLAG_UPDATE_CURRENT); RemoteInput remoteInputAuto = new RemoteInput.Builder(NotificationsController.EXTRA_VOICE_REPLY) .setLabel(LocaleController.getString("Reply", R.string.Reply)).build(); NotificationCompat.CarExtender.UnreadConversation.Builder unreadConvBuilder = new NotificationCompat.CarExtender.UnreadConversation.Builder( name).setReadPendingIntent(msgHeardPendingIntent) .setReplyAction(msgReplyPendingIntent, remoteInputAuto) .setLatestTimestamp((long) max_date * 1000); Intent replyIntent = new Intent(ApplicationLoader.applicationContext, WearReplyReceiver.class); replyIntent.putExtra("dialog_id", dialog_id); replyIntent.putExtra("max_id", max_id); PendingIntent replyPendingIntent = PendingIntent.getBroadcast(ApplicationLoader.applicationContext, notificationIdWear, replyIntent, PendingIntent.FLAG_UPDATE_CURRENT); RemoteInput remoteInputWear = new RemoteInput.Builder(EXTRA_VOICE_REPLY) .setLabel(LocaleController.getString("Reply", R.string.Reply)).build(); String replyToString; if (chat != null) { replyToString = LocaleController.formatString("ReplyToGroup", R.string.ReplyToGroup, name); } else { replyToString = LocaleController.formatString("ReplyToUser", R.string.ReplyToUser, name); } NotificationCompat.Action action = new NotificationCompat.Action.Builder(R.drawable.ic_reply_icon, replyToString, replyPendingIntent).addRemoteInput(remoteInputWear).build(); String text = ""; for (int a = messageObjects.size() - 1; a >= 0; a--) { MessageObject messageObject = messageObjects.get(a); String message = getStringForMessage(messageObject, false); if (message == null) { continue; } if (chat != null) { message = message.replace(" @ " + name, ""); } else { message = message.replace(name + ": ", "").replace(name + " ", ""); } if (text.length() > 0) { text += "\n\n"; } text += message; unreadConvBuilder.addMessage(message); } TLRPC.FileLocation photoPath = null; if (chat != null) { if (chat.photo != null && chat.photo.photo_small != null && chat.photo.photo_small.volume_id != 0 && chat.photo.photo_small.local_id != 0) { photoPath = chat.photo.photo_small; } } else { if (user.photo != null && user.photo.photo_small != null && user.photo.photo_small.volume_id != 0 && user.photo.photo_small.local_id != 0) { photoPath = user.photo.photo_small; } } Intent intent = new Intent(ApplicationLoader.applicationContext, LaunchActivity.class); intent.setAction("com.tmessages.openchat" + Math.random() + Integer.MAX_VALUE); intent.setFlags(32768); if (chat != null) { intent.putExtra("chatId", chat.id); } else if (user != null) { intent.putExtra("userId", user.id); } PendingIntent contentIntent = PendingIntent.getActivity(ApplicationLoader.applicationContext, 0, intent, PendingIntent.FLAG_ONE_SHOT); NotificationCompat.Builder builder = new NotificationCompat.Builder( ApplicationLoader.applicationContext).setContentTitle(name) .setSmallIcon(R.drawable.notification).setGroup("messages").setContentText(text) .setColor(0xff2ca5e0).setGroupSummary(false).setContentIntent(contentIntent) .extend(new NotificationCompat.WearableExtender().addAction(action)) .extend(new NotificationCompat.CarExtender() .setUnreadConversation(unreadConvBuilder.build())) .setCategory(NotificationCompat.CATEGORY_MESSAGE); if (photoPath != null) { BitmapDrawable img = ImageLoader.getInstance().getImageFromMemory(photoPath, null, "50_50"); if (img != null) { builder.setLargeIcon(img.getBitmap()); } } if (chat == null && user != null && user.phone != null && user.phone.length() > 0) { builder.addPerson("tel:+" + user.phone); } notificationManager.notify(notificationIdWear, builder.build()); wearNotificationsIds.put(dialog_id, notificationIdWear); } for (HashMap.Entry<Long, Integer> entry : oldIdsWear.entrySet()) { notificationManager.cancel(entry.getValue()); } }
From source file:tvbrowser.core.filters.FilterList.java
private ProgramFilter[] createFilterList() { final HashMap<String, ProgramFilter> filterList = new HashMap<String, ProgramFilter>(); /* Add default filters. The user may not remove them. */ ProgramFilter showAll = new ShowAllFilter(); filterList.put(showAll.getName(), showAll); ProgramFilter pluginFilter = new PluginFilter(); filterList.put(pluginFilter.getName(), pluginFilter); ProgramFilter subtitleFilter = new SubtitleFilter(); filterList.put(subtitleFilter.getName(), subtitleFilter); ProgramFilter audioDescriptionFilter = new AudioDescriptionFilter(); filterList.put(audioDescriptionFilter.getName(), audioDescriptionFilter); /* Read the available filters from the file system and add them to the array */ if (mFilterDirectory == null) { throw new NullPointerException("directory is null"); }//from w ww . j a v a2s . co m File[] fileList = getFilterFiles(); if (fileList != null) { for (File file : fileList) { UserFilter filter = null; try { filter = new UserFilter(file); } catch (ParserException e) { mLog.warning("error parsing filter from file " + file + "; exception: " + e); } if (filter != null) { filterList.put(filter.getName(), filter); } } } PluginAccess[] plugins = PluginManagerImpl.getInstance().getActivatedPlugins(); for (PluginAccess plugin : plugins) { PluginsProgramFilter[] filters = plugin.getAvailableFilter(); if (filters != null) { for (PluginsProgramFilter filter : filters) { filterList.put(filter.getName(), filter); } } } final ArrayList<ProgramFilter> filterArr = new ArrayList<ProgramFilter>(); /* Sort the list*/ try { File filterFile = new File(mFilterDirectory, FILTER_INDEX); if (filterFile.canRead()) { StreamUtilities.bufferedReader(filterFile, new BufferedReaderProcessor() { public void process(BufferedReader inxIn) throws IOException { String curFilterName = inxIn.readLine(); while (curFilterName != null) { if (curFilterName.equals("[SEPARATOR]")) { filterArr.add(new SeparatorFilter()); } else { ProgramFilter filter = filterList.get(curFilterName); if (filter != null) { filterArr.add(filter); filterList.remove(curFilterName); } } curFilterName = inxIn.readLine(); } } }); } } catch (FileNotFoundException e) { // ignore } catch (IOException e) { e.printStackTrace(); } if (filterList.size() > 0) { for (ProgramFilter programFilter : filterList.values()) { filterArr.add(programFilter); } } return filterArr.toArray(new ProgramFilter[filterArr.size()]); }
From source file:StorageEngineClient.CombineFileInputFormat.java
private void processsplitForUnsplit(JobConf job, Map.Entry<String, List<OneBlockInfo>> one, HashMap<OneBlockInfo, String[]> blockToNodes, long maxSize, long minSizeNode, long minSizeRack, List<CombineFileSplit> splits, String type) { ArrayList<OneBlockInfo> validBlocks = new ArrayList<OneBlockInfo>(); ArrayList<String> nodes = new ArrayList<String>(); long curSplitSize = 0; if (type.equals("node")) nodes.add(one.getKey());//w w w. j a v a 2 s . c om List<OneBlockInfo> blocks = null; if (!type.equals("all")) { blocks = one.getValue(); } else { blocks = new ArrayList<OneBlockInfo>(); blocks.addAll(blockToNodes.keySet()); } OneBlockInfo[] blocksInNodeArr = blocks.toArray(new OneBlockInfo[blocks.size()]); if (job.getBoolean("hive.merge.inputfiles.sort", true)) { Arrays.sort(blocksInNodeArr, new Comparator<OneBlockInfo>() { @Override public int compare(OneBlockInfo o1, OneBlockInfo o2) { long comparereuslt = o2.length - o1.length; int result = 0; if (comparereuslt > 0) result = 1; if (comparereuslt < 0) result = -1; return result; } }); } if (job.getBoolean("hive.merge.inputfiles.rerange", false)) { Random r = new Random(123456); OneBlockInfo tmp = null; for (int i = 0; i < blocksInNodeArr.length; i++) { int idx = r.nextInt(blocksInNodeArr.length); tmp = blocksInNodeArr[i]; blocksInNodeArr[i] = blocksInNodeArr[idx]; blocksInNodeArr[idx] = tmp; } } int maxFileNumPerSplit = job.getInt("hive.merge.inputfiles.maxFileNumPerSplit", 1000); for (int i = 0; i < blocksInNodeArr.length; i++) { if (blockToNodes.containsKey(blocksInNodeArr[i])) { if (!type.equals("node")) { nodes.clear(); } curSplitSize = blocksInNodeArr[i].length; validBlocks.clear(); validBlocks.add(blocksInNodeArr[i]); blockToNodes.remove(blocksInNodeArr[i]); if (maxSize != 0 && curSplitSize >= maxSize) { if (!type.equals("node")) { for (int k = 0; k < blocksInNodeArr[i].hosts.length; k++) { nodes.add(blocksInNodeArr[i].hosts[k]); } } addCreatedSplit(job, splits, nodes, validBlocks); } else { int filenum = 1; for (int j = i + 1; j < blocksInNodeArr.length; j++) { if (blockToNodes.containsKey(blocksInNodeArr[j])) { long size1 = blocksInNodeArr[j].length; if (maxSize != 0 && curSplitSize < maxSize) { curSplitSize += size1; filenum++; validBlocks.add(blocksInNodeArr[j]); blockToNodes.remove(blocksInNodeArr[j]); } if (filenum >= maxFileNumPerSplit) { break; } if (curSplitSize >= maxSize) { break; } } } if (minSizeNode != 0 && curSplitSize >= minSizeNode) { if (!type.equals("node")) { generateNodesInfo(validBlocks, nodes); } addCreatedSplit(job, splits, nodes, validBlocks); } else { for (OneBlockInfo oneblock : validBlocks) { blockToNodes.put(oneblock, oneblock.hosts); } break; } } } } HashSet<OneBlockInfo> hs = new HashSet<OneBlockInfo>(); while (blockToNodes.size() > 0) { validBlocks = new ArrayList<OneBlockInfo>(); nodes = new ArrayList<String>(); int filenum = 0; hs.clear(); for (OneBlockInfo blockInfo : blockToNodes.keySet()) { filenum++; validBlocks.add(blockInfo); hs.add(blockInfo); if (filenum >= maxFileNumPerSplit) { break; } } for (OneBlockInfo blockInfo : hs) { blockToNodes.remove(blockInfo); } generateNodesInfo(validBlocks, nodes); this.addCreatedSplit(job, splits, nodes, validBlocks); } }
From source file:org.sakaiproject.unboundid.UnboundidDirectoryProvider.java
/** * Similar to iterating over <code>users</code> passing * each element to {@link #getUser(UserEdit)}, removing the * {@link org.sakaiproject.user.api.UserEdit} if that method * returns <code>false</code>. * //from ww w . j a v a 2 s . c o m * <p>Adds search retry capability if any one lookup fails * with a directory error. Empties <code>users</code> and * returns if a retry exits exceptionally * <p> */ public void getUsers(Collection<UserEdit> users) { log.debug("getUsers(): [Collection size = {}]", users.size()); boolean abortiveSearch = false; int maxQuerySize = getMaxObjectsToQueryFor(); UserEdit userEdit = null; HashMap<String, UserEdit> usersToSearchInLDAP = new HashMap<String, UserEdit>(); List<UserEdit> usersToRemove = new ArrayList<UserEdit>(); try { int cnt = 0; for (Iterator<UserEdit> userEdits = users.iterator(); userEdits.hasNext();) { userEdit = (UserEdit) userEdits.next(); String eid = userEdit.getEid(); if (!(isSearchableEid(eid))) { userEdits.remove(); //proceed ahead with this (perhaps the final) iteration //usersToSearchInLDAP needs to be processed unless empty } else { usersToSearchInLDAP.put(eid, userEdit); cnt++; } // We need to make sure this query isn't larger than maxQuerySize if ((!userEdits.hasNext() || cnt == maxQuerySize) && !usersToSearchInLDAP.isEmpty()) { String filter = ldapAttributeMapper.getManyUsersInOneSearch(usersToSearchInLDAP.keySet()); List<LdapUserData> ldapUsers = searchDirectory(filter, null, null, null, maxQuerySize); for (LdapUserData ldapUserData : ldapUsers) { String ldapEid = ldapUserData.getEid(); if (StringUtils.isEmpty(ldapEid)) { continue; } ldapEid = ldapEid.toLowerCase(); UserEdit ue = usersToSearchInLDAP.get(ldapEid); mapUserDataOntoUserEdit(ldapUserData, ue); usersToSearchInLDAP.remove(ldapEid); } // see if there are any users that we could not find in the LDAP query for (Map.Entry<String, UserEdit> entry : usersToSearchInLDAP.entrySet()) { usersToRemove.add(entry.getValue()); } // clear the HashMap and reset the counter usersToSearchInLDAP.clear(); cnt = 0; } } // Finally clean up the original collection and remove and users we could not find for (UserEdit userRemove : usersToRemove) { log.debug("Unboundid getUsers could not find user: {}", userRemove.getEid()); users.remove(userRemove); } } catch (LDAPException e) { abortiveSearch = true; throw new RuntimeException("getUsers(): LDAPException during search [eid = " + (userEdit == null ? null : userEdit.getEid()) + "][result code = " + e.errorCodeToString() + "][error message = " + e.getLDAPErrorMessage() + "]", e); } catch (Exception e) { abortiveSearch = true; throw new RuntimeException("getUsers(): RuntimeException during search eid = " + (userEdit == null ? null : userEdit.getEid()) + "]", e); } finally { // no sense in returning a partially complete search result if (abortiveSearch) { log.debug("getUsers(): abortive search, clearing received users collection"); users.clear(); } } }
From source file:pltag.parser.Lexicon.java
@SuppressWarnings("unchecked") public Collection<ElementaryStringTree> getEntries(String word, String wCor, String posTag, boolean noAnalysisParse, int wno) { // expand into ElementaryStringTrees! = Collection<ElementaryStringTree> treesOut = new ArrayList<ElementaryStringTree>(); Collection<String> treeStrings = new ArrayList<String>(); if (lexEntriesTree.isEmpty()) { return treesOut; }/* w ww . j a v a 2 s.c om*/ String searchWord = wCor;//word.toLowerCase(); if (!lexEntriesTree.containsKey(searchWord)) { searchWord = ""; if (opts.goldPosTags) { for (String w : posTag.split("\t")) { searchWord += w.toLowerCase() + " unk"; } } else { searchWord += "unk"; } } if (!lexEntriesTree.containsKey(searchWord)) // TODO: FIX { return treesOut; } //System.out.println(trees.size()+ "\t"+ wordPosMap.getCollection(searchWord.substring(searchWord.indexOf(" ")+1)).size()); for (String treeString : (Collection<String>) lexEntriesTree.getCollection(searchWord)) { if (treeString.contains("LEXEME1")) { String postag1 = treeString.substring(0, treeString.indexOf(" @LEXEME1@")); posTag = postag1.substring(postag1.lastIndexOf("(") + 2, postag1.lastIndexOf("^")); } String sts = posTag + "\t" + treeString.substring(treeString.indexOf("\t") + 1); if (//StatsRunner.fullLex this.lexEntriesTree.size() > 100 && ((!noOfTrees.containsKey(sts) && treeString.contains("^x")) || (noOfTrees.containsKey(sts) && noOfTrees.get(sts) < 3 && treeString.contains("^x")))) { if (!noAnalysisParse) { continue; } } if (num.matcher(word).matches() && !posTag.equals("CD")) { continue; } if (!word.contains("NUM") && upperCase.matcher(word).matches() && wno != 0 && (!posTag.startsWith("NN") && !posTag.startsWith("JJ")) && !word.equals("I")) { continue; } String unlexTreeString = treeString; treeString = insertLex(word, treeString); ElementaryStringTree tree = makeToStringTree(treeString, unlexTreeString); if (tree != null && !treeStrings.contains(tree.getTreeString().substring(2))) { treesOut.add(tree); treeStrings.add(tree.getTreeString().substring(2)); } } // for // System.out.print(trees.size()+" lexTrees\t"); if (treesOut.size() > 20) { return treesOut; } HashMap<String, Integer> posTags = new HashMap<String, Integer>(); if (!posTag.equals("") && !posTag.equals("N/A") && opts.treeFamilies && !searchWord.equals("unk")) {//don't do this for prediction trees. if (!opts.goldPosTags) { posTags = getPosTags(treeStrings, searchWord); } else { posTags.put(posTag, 1); } // System.out.print(postags.size()+"postags\t"); if (posTags.size() > 1) { for (String ptag : posTags.keySet()) { if (lexEntriesTree.containsKey(ptag)) { for (String treeString : (Collection<String>) lexEntriesTree.getCollection(ptag)) { String unlexTreeString = treeString; treeString = insertLex(word, treeString); ElementaryStringTree tree = makeToStringTree(treeString, unlexTreeString); String ts = tree.getTreeString().substring(2); if (tree != null && !treeStrings.contains(ts) && noOfTrees.get(ptag + "\t" + unlexTreeString.substring(unlexTreeString.indexOf("\t") + 1)) > 100) { //System.out.println(noOfTrees.get(ptag+"\t"+unlexTreeString.substring(unlexTreeString.indexOf("\t")+1))+"\t"+ts); treesOut.add(tree); treeStrings.add(ts); } } // for } // if } // for } // if } // if // System.out.println(trees.size()); if (treesOut.size() > 6) { return treesOut; } else //correct for bad gold pos tag. { if (!searchWord.equals("prediction: ") && opts.goldPosTags && opts.fullLex) { posTags = getPosTags(treeStrings, searchWord); //postags = wordPosMap.getCollection(searchWord.substring(searchWord.indexOf(" ")+1)); } posTags.remove(searchWord); } for (String sw : posTags.keySet()) { if (lexEntriesTree.containsKey(sw)) { for (String treeString : (Collection<String>) lexEntriesTree.getCollection(sw)) { //if (treeString.startsWith("1") && searchWord.endsWith("unk")) // continue; String unlexTreeString = treeString; treeString = insertLex(word, treeString); ElementaryStringTree tree = makeToStringTree(treeString, unlexTreeString); if (tree != null && !treeStrings.contains(tree.getTreeString().substring(2))) { treesOut.add(tree); treeStrings.add(tree.getTreeString().substring(2)); } } } } return treesOut; }