List of usage examples for java.util HashMap values
public Collection<V> values()
From source file:infrascructure.data.vocabulary.SimpleVocabularyBuider.java
protected Collection<Word> retrieveWords(ResourceMetaData resource) { String wordPattern = WORD_PATTERN; Pattern pattern = Pattern.compile(wordPattern, Pattern.CASE_INSENSITIVE); String source = resource.getData(); Matcher matcher = pattern.matcher(source); HashMap<String, Word> wordsMap = new LinkedHashMap<>(200); while (matcher.find()) { String word = matcher.group().toLowerCase(); int indexOfAp = word.indexOf("'"); if (indexOfAp != -1) { String[] ww = word.split("'"); word = ww[0];//from ww w . j av a 2s.c o m } if (!stopWords.contains(word)) { String stemmedWord = stemmer.getCanonicalForm(word); if (!stopWords.contains(stemmedWord) && word.length() > 2) { Set<String> originalWords = wordsMap.containsKey(stemmedWord) ? wordsMap.get(stemmedWord).getOriginalWords() : new HashSet<>(); originalWords.add(word); if (!wordsMap.containsKey(stemmedWord)) { wordsMap.put(stemmedWord, new Word(stemmedWord, 1, originalWords)); } else { Word existedToken = wordsMap.get(stemmedWord); wordsMap.put(stemmedWord, new Word(stemmedWord, existedToken.getCount() + 1, originalWords)); } } } } return wordsMap.values(); }
From source file:de.xirp.plugin.PluginLoader.java
/** * Checks the needs of all plugins.// w ww. j a va 2 s. co m * * @see IPlugable#requiredLibs() */ @SuppressWarnings("unchecked") private static void checkAllNeeds() { HashMap<String, IPlugable> plugables = new HashMap<String, IPlugable>(); MultiValueHashMap<String, String> refs = new MultiValueHashMap<String, String>(); // list of all plugins List<String> fullPluginList = new ArrayList<String>(plugins.size()); for (PluginInfo info : plugins.values()) { fullPluginList.add(info.getMainClass()); } ClassLoader loader = logClass.getClass().getClassLoader(); // Read the list of available jars from the class path String cp = ManagementFactory.getRuntimeMXBean().getClassPath(); // String cp = System.getProperty("java.class.path"); // //$NON-NLS-1$ String[] jars = cp.split(File.pathSeparator); List<String> jarList = new ArrayList<String>(jars.length); for (String jar : jars) { jarList.add(FilenameUtils.getName(jar)); } // The initial list of current plugins equals the full list // every plugin which does not full fill the needs // it removed from this list currentPluginList = new ArrayList<String>(fullPluginList); for (PluginInfo info : plugins.values()) { try { SecurePluginView view = PluginManager.getInstance(info, Robot.NAME_NONE); plugables.put(info.getMainClass(), view); boolean check = checkNeeds(view, loader, jarList); if (!check) { // remove plugins which reference this plugin removeRefs(info.getMainClass()); } } catch (Exception e) { logClass.trace(e, e); } } // Remove all plugins of the full list // which are no more contained in the current list for (String clazz : fullPluginList) { if (!currentPluginList.contains(clazz)) { plugins.remove(clazz); plugables.remove(clazz); } } instances = new ArrayList<IPlugable>(plugables.values()); refs.clear(); refs = null; currentPluginList.clear(); currentPluginList = null; fullPluginList.clear(); fullPluginList = null; }
From source file:mml.handler.scratch.ScratchVersionSet.java
/** * Create a scratchversionset from a database record * @param resource the dbase resource fetched from the database * @param dbase the dbase collection name it was from *//*from ww w . jav a 2s . c o m*/ public ScratchVersionSet(String resource, String dbase) { parseResource(resource); if (format.contains("MVD")) { MVD mvd = MVDFile.internalise(body); HashMap<String, ScratchVersion> map = new HashMap<String, ScratchVersion>(); String[] all = listVersions(mvd); for (short i = 1; i <= mvd.numVersions(); i++) { String vid = mvd.getVersionId(i); String localLongName = mvd.getLongNameForVersion(i); // the ONLY names in memory are upgraded ones String layerName = Layers.upgradeLayerName(all, vid); int num = ScratchVersion.layerNumber(layerName); String shortName = Layers.stripLayer(layerName); ScratchVersion sv = map.get(shortName); char[] data = mvd.getVersion(i); if (sv == null) { sv = new ScratchVersion(shortName, localLongName, docid, dbase, null, false); sv.addLayer(data, num); map.put(shortName, sv); } else if (!sv.containsLayer(num)) sv.addLayer(data, num); } // convert to list list = new ScratchVersion[map.size()]; Collection<ScratchVersion> coll = map.values(); coll.toArray(list); } else // single version/layer { if (version1 == null) version1 = "/base"; if (docid != null && body != null) { String longName = null; if (otherFields.containsKey(JSONKeys.LONGNAME)) longName = (String) otherFields.get(JSONKeys.LONGNAME); if (otherFields.containsKey(JSONKeys.DESCRIPTION)) longName = (String) otherFields.get(JSONKeys.DESCRIPTION); if (longName == null) longName = "Version " + version1; ScratchVersion sv = new ScratchVersion(version1, longName, docid, dbase, null, false); sv.addLayer(body.toCharArray(), Integer.MAX_VALUE); appendToList(sv); } } }
From source file:gov.llnl.lc.smt.command.SmtCommand.java
protected OSM_Node getOSM_NodeByName(String name) { // find the first node that matches the supplied name OSM_Node node = null;//from ww w . jav a 2 s. c o m if (name != null) { HashMap<String, OSM_Node> nodes = getOSM_Nodes(); for (OSM_Node n : nodes.values()) { if (n.pfmNode.getNode_name().startsWith(name)) { node = n; break; } } } return node; }
From source file:gedi.riboseq.inference.orf.OrfFinder.java
private void overlapUniqueCoverage(List<OrfWithCodons> orfs) { HashMap<Codon, HashSet<OrfWithCodons>> cod2Orf = new HashMap<Codon, HashSet<OrfWithCodons>>(); int numCond = -1; for (OrfWithCodons orf : orfs) for (Codon c : orf.getCodons()) { cod2Orf.computeIfAbsent(c, x -> new HashSet<>()).add(orf); numCond = c.getActivity().length; }/* w ww.j a v a2s.c o m*/ // now equivalence classes: gives you all codons that are consistent with a specific combination of orfs HashMap<HashSet<OrfWithCodons>, HashSet<Codon>> equi = new HashMap<HashSet<OrfWithCodons>, HashSet<Codon>>(); for (Codon c : cod2Orf.keySet()) { equi.computeIfAbsent(cod2Orf.get(c), x -> new HashSet<>()).add(c); } // compute equi regions for their length HashMap<HashSet<OrfWithCodons>, Integer> equiLengths = new HashMap<HashSet<OrfWithCodons>, Integer>(); for (HashSet<OrfWithCodons> e : equi.keySet()) { LinkedList<ArrayGenomicRegion> equiCodons = null; for (OrfWithCodons orf : e) { if (equiCodons == null) { equiCodons = new LinkedList<ArrayGenomicRegion>(); for (int i = 0; i < orf.getRegion().getTotalLength(); i += 3) equiCodons.add(orf.getRegion().map(new ArrayGenomicRegion(i, i + 3))); } else { Iterator<ArrayGenomicRegion> it = equiCodons.iterator(); while (it.hasNext()) { ArrayGenomicRegion cod = it.next(); if (!orf.getRegion().containsUnspliced(cod) || orf.getRegion().induce(cod.getStart()) % 3 != 0) it.remove(); } } } for (OrfWithCodons orf : orfs) { if (!e.contains(orf)) { Iterator<ArrayGenomicRegion> it = equiCodons.iterator(); while (it.hasNext()) { ArrayGenomicRegion cod = it.next(); if (orf.getRegion().containsUnspliced(cod) && orf.getRegion().induce(cod.getStart()) % 3 == 0) it.remove(); } } } equiLengths.put(e, equiCodons.size()); } HashMap<OrfWithCodons, double[]> total = estimateByCoverage(equi, equiLengths, c -> c.getTotalActivity()); double sum = EI.wrap(total.values()).mapToDouble(a -> a[0]).sum(); for (OrfWithCodons orf : total.keySet()) orf.setEstimatedTotalActivity(total.get(orf)[0], total.get(orf)[0] / sum); for (int i = 0; i < numCond; i++) { int ei = i; total = estimateByCoverage(equi, equiLengths, c -> c.getActivity()[ei]); sum = EI.wrap(total.values()).mapToDouble(a -> a[0]).sum(); for (OrfWithCodons orf : total.keySet()) orf.setEstimatedTotalActivity(i, total.get(orf)[0], total.get(orf)[0] / sum); } }
From source file:com.otway.picasasync.syncutil.AlbumSync.java
private List<ImageSync> buildImageList(PicasawebClient webClient, File localFolder, AlbumEntry albumEntry, final LocalDateTime oldestDate) throws IOException, ServiceException { List<ImageSync> allImages = new ArrayList<ImageSync>(); syncManager.getSyncState().setStatus("Querying Google for album " + albumEntry.getTitle().getPlainText()); // Get the list of remote photos List<PhotoEntry> photos = webClient.getPhotos(albumEntry); // Deal with the fact that an album can have multiple images with the same local filename. HashMap<String, List<PhotoEntry>> fileGroups = new HashMap<String, List<PhotoEntry>>(); for (PhotoEntry photo : photos) { String imageName = photo.getTitle().getPlainText().toLowerCase(); if (FilenameUtils.getExtension(imageName).toLowerCase().equals(".mov")) { log.info("Skipping file " + imageName + " with .mov file extension."); continue; }//from w ww .j a va2 s. c om List<PhotoEntry> photoList = null; if (!fileGroups.containsKey(imageName)) { photoList = new ArrayList<PhotoEntry>(); fileGroups.put(imageName, photoList); } else photoList = fileGroups.get(imageName); photoList.add(photo); } // So now we have a map of image name => List of photo entries which use that name. List<PhotoEntry> nonDupePhotos = new ArrayList<PhotoEntry>(); int dupesDiscarded = 0; for (List<PhotoEntry> list : fileGroups.values()) { String maxId = null; PhotoEntry photoToUse = null; for (PhotoEntry photo : list) { String id = PicasawebClient.getPhotoId(photo); // We'll arbitrarily pick the one with the lowest ID, and ignore the rest. if (maxId == null || maxId.compareTo(id) == -1) { maxId = id; photoToUse = photo; } } nonDupePhotos.add(photoToUse); dupesDiscarded += list.size() - 1; } if (dupesDiscarded > 0) { log.info("Ignoring " + dupesDiscarded + " duplicate photos of " + photos.size() + " from album " + getAlbumName()); } List<ImageSync> remoteImages = new ArrayList<ImageSync>(); for (PhotoEntry photo : nonDupePhotos) { String imageFile = photo.getTitle().getPlainText(); if (settings.getExcludeVideos() && photo.getMediaContents().size() > 1) { log.info("Exclude Video enabled: skipping " + imageFile); continue; } File localFileName = new File(localFolder, imageFile); remoteImages.add(new ImageSync(photo, localFileName)); } log.debug(remoteImages.size() + " remote images found in " + albumEntry.getTitle().getPlainText()); // Get the local file list File[] files = localFolder.listFiles(new FilenameFilter() { public boolean accept(File current, String name) { File file = new File(current, name); return file.isFile() && !file.isHidden(); } }); List<ImageSync> localFiles = new ArrayList<ImageSync>(); if (files != null && files.length > 0) { log.info(files.length + " local files found in " + localFolder); // Now, pull out all the local files that aren't in the list. // These are the new files that we'll upload for (File localFile : files) { if (!fileGroups.containsKey(localFile.getName().toLowerCase())) { localFiles.add(new ImageSync(null, localFile)); } } } log.debug(localFiles.size() + " local images found in " + localFolder); allImages.addAll(localFiles); // Add the remote images after. Uploads are higher priority than downloads allImages.addAll(remoteImages); log.debug(allImages.size() + " images found (new local + remote)"); // And finally, filter out anything that's too old. List<ImageSync> result = new ArrayList<ImageSync>(); for (ImageSync image : allImages) if (image.newerThan(oldestDate)) result.add(image); log.debug(result.size() + " total images after date filter applied."); return result; }
From source file:net.sf.jvifm.ui.shell.QuickRunShell.java
@SuppressWarnings("all") public TipOption[] getCompletionOptions(String text) { HashMap optionMap = new HashMap(); String[] options = AutoCompleteUtil.getFileCompleteOptions(pwd, text, false); addOptions(optionMap, buildPathOptions(options)); options = AutoCompleteUtil.getBookmarkFileOptions(text); addOptions(optionMap, buildPathOptions(options)); options = AutoCompleteUtil.getExeFileCompleteOptions(text); addOptions(optionMap, buildPathOptions(options)); ArrayList list = new ArrayList(); Shortcut[] links = AutoCompleteUtil.getShortcutsCompleteList2(text); if (links != null) { for (int i = 0; i < links.length; i++) { TipOption tipOption = new TipOption(); tipOption.setName(links[i].getName()); tipOption.setExtraInfo(links[i].getText()); tipOption.setTipType("shortcut"); list.add(tipOption);//from w w w . j ava 2 s. co m } } addOptions(optionMap, list); TipOption[] result = new TipOption[optionMap.size()]; int i = 0; for (Iterator it = optionMap.values().iterator(); it.hasNext();) { result[i++] = (TipOption) it.next(); } return result; }
From source file:org.agnitas.web.NewImportWizardAction.java
private void generateResultStatistics(HttpServletRequest request, NewImportWizardForm aForm, CustomerImportStatus status) {//from w ww .j a v a 2 s . c o m ImportRecipientsDao dao = aForm.getImportWizardHelper().getImportRecipientsDao(); int adminId = AgnUtils.getAdmin(request).getAdminID(); int datasourceId = aForm.getDatasourceId(); Integer[] types = { NewImportWizardService.RECIPIENT_TYPE_FIELD_INVALID }; int page = 0; int rowNum = NewImportWizardService.BLOCK_SIZE; HashMap<ProfileRecipientFields, ValidatorResults> recipients = null; while (recipients == null || recipients.size() == rowNum) { recipients = dao.getRecipientsByTypePaginated(types, page, rowNum, adminId, datasourceId); for (ValidatorResults validatorResults : recipients.values()) { for (CSVColumnState column : aForm.getImportWizardHelper().getColumns()) { if (column.getImportedColumn()) { if (!ImportUtils.checkIsCurrentFieldValid(validatorResults, column.getColName())) { if (column.getColName().equals("email")) { status.addError(NewImportWizardServiceImpl.EMAIL_ERROR); } else if (column.getColName().equals("mailtype")) { status.addError(NewImportWizardServiceImpl.MAILTYPE_ERROR); } else if (column.getColName().equals("gender")) { status.addError(NewImportWizardServiceImpl.GENDER_ERROR); } else if (column.getType() == CSVColumnState.TYPE_DATE) { status.addError(NewImportWizardServiceImpl.DATE_ERROR); } else if (column.getType() == CSVColumnState.TYPE_NUMERIC) { status.addError(NewImportWizardServiceImpl.NUMERIC_ERROR); } } } } } page++; } }
From source file:de.tor.tribes.util.algo.BruteForce.java
@Override public List<TroopMovement> calculateAttacks(HashMap<UnitHolder, List<Village>> pSources, HashMap<UnitHolder, List<Village>> pFakes, List<Village> pTargets, List<Village> pFakeTargets, HashMap<Village, Integer> pMaxAttacksTable, TimeFrame pTimeFrame, boolean pFakeOffTargets) { List<Village> allTargets = Arrays.asList(pTargets.toArray(new Village[pTargets.size()])); List<Village> allFakeTargets = Arrays.asList(pFakeTargets.toArray(new Village[pFakeTargets.size()])); HashMap<Village, HashMap<UnitHolder, List<Village>>> attacks = new HashMap<>(); logger.debug("Assigning offs"); logText("Starte zufllige Berechnung"); int maxStatus = allTargets.size() + allFakeTargets.size(); int currentStatus = 0; // <editor-fold defaultstate="collapsed" desc=" Assign Offs"> logInfo(" Starte Berechnung fr Offs"); for (UnitHolder unit : pSources.keySet()) { logInfo(" - Starte Berechnung fr Einheit '" + unit.getName() + "'"); List<Village> sources = pSources.get(unit); if (sources != null) { logInfo(" - Verwende " + sources.size() + " Herkunftsdrfer"); for (Village source : sources) { //time when the attacks should arrive Village vTarget = null;//from w w w.j a v a 2 s . c o m //distribute targets randomly Collections.shuffle(pTargets); currentStatus = allTargets.size() - pTargets.size(); updateStatus(currentStatus, maxStatus); //search all targets logInfo(" - Teste " + pTargets.size() + " mgliche Ziele"); for (Village v : pTargets.toArray(new Village[pTargets.size()])) { if (isAborted()) { return new LinkedList<>(); } int maxAttacksPerVillage = pMaxAttacksTable.get(v); double time = DSCalculator.calculateMoveTimeInSeconds(source, v, unit.getSpeed()); if (unit.getPlainName().equals("snob")) { if (DSCalculator.calculateDistance(source, v) > ServerSettings.getSingleton() .getSnobRange()) { //continue with the next destination Village continue; } } long runtime = (long) time * 1000; //check if attack is somehow possible if (pTimeFrame.isMovementPossible(runtime)) { //only calculate if time is in time frame //get list of source villages for current target HashMap<UnitHolder, List<Village>> attacksForVillage = attacks.get(v); if (attacksForVillage == null) { //create new table of attacks attacksForVillage = new HashMap<>(); List<Village> sourceList = new LinkedList<>(); logInfo(" * Neue Truppenbewegung: " + source + " -> " + v); sourceList.add(source); attacksForVillage.put(unit, sourceList); attacks.put(v, attacksForVillage); vTarget = v; } else { int currentAttacks = 0; for (List<Village> l : attacksForVillage.values()) { currentAttacks += l.size(); } //there are already attacks on this village if (currentAttacks < maxAttacksPerVillage) { //more attacks on this village are allowed boolean added = false; //max number of attacks neither for villages nor for player reached List<Village> attsPerUnit = attacksForVillage.get(unit); if (attsPerUnit != null) { if (!attsPerUnit.contains(source) || (unit .equals(DataHolder.getSingleton().getUnitByPlainName("snob")) && multipleSameSnobsAllowed())) { //only add source if it does not attack current target yet added = true; logInfo(" * Neue Truppenbewegung: " + source + " -> " + v); attsPerUnit.add(source); } } else { attsPerUnit = new LinkedList<>(); //only add source if it does not attack current target yet added = true; logInfo(" * Neue Truppenbewegung: " + source + " -> " + v); attsPerUnit.add(source); attacksForVillage.put(unit, attsPerUnit); } if (added) { //only increment attack count if source was added vTarget = v; //check if last missing attack was added. if (currentAttacks + 1 == maxAttacksPerVillage) { logInfo(" * Entferne vollstndiges Ziel " + v); pTargets.remove(v); } } else { vTarget = null; } } else { //max number of attacks per village reached, continue search logInfo(" * Entferne vollstndiges Ziel " + v); pTargets.remove(v); vTarget = null; } } } if (vTarget != null) { break; } } if (vTarget == null) { logInfo(" - Keine Ziele fr Herkunftsdorf " + source + " gefunden"); } } } else { logInfo(" - Keine Herkunftsdrfer fr aktuelle Einheit"); } } // </editor-fold> if (pFakeOffTargets) { /* * why would we do this? We should allow one fake for each missing off, so we can simply use pTargets as is? * logger.debug("Removing assigned off targets from fake list"); Enumeration<Village> targets = attacks.keys(); while (targets.hasMoreElements()) { Village target = targets.nextElement(); pTargets.remove(target); }*/ logger.debug("Keeping remaining Off targets for fake search"); } else { //clear target list pTargets.clear(); } //adding fake targets for (Village fakeTarget : pFakeTargets) { pTargets.add(fakeTarget); } logger.debug("Assigning fakes"); logText(" Starte Berechnung fr Fakes."); // <editor-fold defaultstate="collapsed" desc=" Assign fakes"> HashMap<Village, HashMap<UnitHolder, List<Village>>> fakes = new HashMap<>(); for (UnitHolder unit : pFakes.keySet()) { logInfo(" - Starte Berechnung fr Einheit '" + unit.getName() + "'"); List<Village> sources = pFakes.get(unit); if (sources != null) { logInfo(" - Verwende " + sources.size() + " Herkunftsdrfer"); for (Village source : sources) { //time when the attacks should arrive Village vTarget = null; //distribute targets randomly Collections.shuffle(pTargets); currentStatus = allTargets.size() + allFakeTargets.size() - pTargets.size(); updateStatus(currentStatus, maxStatus); //search all targets logInfo(" - Teste " + pTargets.size() + " mgliche Ziele"); for (Village v : pTargets.toArray(new Village[pTargets.size()])) { if (isAborted()) { return new LinkedList<>(); } int maxAttacksPerVillage = pMaxAttacksTable.get(v); double time = DSCalculator.calculateMoveTimeInSeconds(source, v, unit.getSpeed()); if (unit.getPlainName().equals("snob")) { if (DSCalculator.calculateDistance(source, v) > ServerSettings.getSingleton() .getSnobRange()) { //continue with the next destination Village continue; } } long runtime = (long) time * 1000; //check if attack is somehow possible if (pTimeFrame.isMovementPossible(runtime)) { //only calculate if time is in time frame //get list of source villages for current target HashMap<UnitHolder, List<Village>> attacksForVillage = attacks.get(v); HashMap<UnitHolder, List<Village>> fakesForVillage = fakes.get(v); if (attacksForVillage == null) { //create empty table of attacks (will stay empty, but is used for maxAttacks calculation) attacksForVillage = new HashMap<>(); List<Village> sourceList = new LinkedList<>(); attacksForVillage.put(unit, sourceList); } if (fakesForVillage == null) { //create new table of fakes fakesForVillage = new HashMap<>(); List<Village> sourceList = new LinkedList<>(); logInfo(" * Neue Truppenbewegung: " + source + " -> " + v); sourceList.add(source); fakesForVillage.put(unit, sourceList); fakes.put(v, fakesForVillage); vTarget = v; } else { int currentAttacks = 0; for (List<Village> listV : attacksForVillage.values()) { currentAttacks += listV.size(); } int currentFakes = 0; for (List<Village> listV : fakesForVillage.values()) { currentFakes += listV.size(); } //there are already attacks or fakes on this village if (currentAttacks + currentFakes < maxAttacksPerVillage) { //more attacks on this village are allowed boolean added = false; //max number of attacks neither for villages nor for player reached List<Village> attsPerUnit = attacksForVillage.get(unit); List<Village> fakesPerUnit = fakesForVillage.get(unit); if (fakesPerUnit != null) { if (!attsPerUnit.contains(source) && (attsPerUnit == null || !attsPerUnit.contains(source))) { //only add source if it does not attack current target yet added = true; logInfo(" * Neue Truppenbewegung: " + source + " -> " + v); fakesPerUnit.add(source); } } else { fakesPerUnit = new LinkedList<>(); //only add source if it does not attack current target yet added = true; logInfo(" * Neue Truppenbewegung: " + source + " -> " + v); fakesPerUnit.add(source); fakesForVillage.put(unit, attsPerUnit); } if (added) { //only increment attack count if source was added vTarget = v; //check if last missing attack was added. if (currentAttacks + currentFakes + 1 == maxAttacksPerVillage) { logInfo(" * Entferne vollstndiges Ziel " + v); pTargets.remove(v); } } else { vTarget = null; } } else { //max number of attacks per village reached, continue search logInfo(" * Entferne vollstndiges Ziel " + v); pTargets.remove(v); vTarget = null; } } } if (vTarget != null) { break; } } if (vTarget == null) { logInfo(" - Keine Ziele fr Herkunftsdorf " + source + " gefunden"); } } } else { logInfo(" - Keine Herkunftsdrfer fr aktuelle Einheit"); } } updateStatus(maxStatus, maxStatus); // </editor-fold> logText(" - Erstelle Ergebnisliste"); //convert to result list List<TroopMovement> movements = new LinkedList<>(); logger.debug(" - adding offs"); logText(String.format(" %d Offs berechnet", attacks.size())); for (Village target : allTargets) { HashMap<UnitHolder, List<Village>> sourcesForTarget = attacks.get(target); TroopMovement f = new TroopMovement(target, pMaxAttacksTable.get(target), Attack.CLEAN_TYPE); if (sourcesForTarget != null) { for (UnitHolder sourceUnit : sourcesForTarget.keySet()) { List<Village> unitVillages = attacks.get(target).get(sourceUnit); for (Village source : unitVillages) { f.addOff(sourceUnit, source); } } } if (sourcesForTarget == null && fakes.get(target) != null) { //ignore Off targets, when there are Fakes assigned and no Offs continue; } movements.add(f); } logger.debug(" - adding fakes"); logText(String.format(" %d Fakes berechnet", fakes.size())); for (Village target : (List<Village>) ListUtils.union(allFakeTargets, allTargets)) { HashMap<UnitHolder, List<Village>> sourcesForTarget = fakes.get(target); TroopMovement f = new TroopMovement(target, pMaxAttacksTable.get(target), Attack.FAKE_TYPE); if (sourcesForTarget != null) { for (UnitHolder sourceUnit : sourcesForTarget.keySet()) { List<Village> unitVillages = fakes.get(target).get(sourceUnit); for (Village source : unitVillages) { f.addOff(sourceUnit, source); } } } if (sourcesForTarget == null && allTargets.contains(target)) { //ignore Off targets, where no Fakes were assigned continue; } movements.add(f); } logText("Berechnung abgeschlossen."); return movements; }
From source file:se.inera.intyg.intygstjanst.web.service.impl.RecipientServiceImpl.java
@Override public void afterPropertiesSet() throws Exception { HashMap<String, RecipientBuilder> recipientMap = new HashMap<>(); for (String key : recipients.stringPropertyNames()) { String value = recipients.getProperty(key); String[] keyParts = key.split("\\."); switch (keyParts[0]) { case "recipient": String id = keyParts[1]; if (recipientMap.get(id) == null) { recipientMap.put(id, new RecipientBuilder().setId(id)); }//from w w w .ja v a 2 s. c o m if (keyParts[2].equals("name")) { recipientMap.get(id).setName(value); } else if (keyParts[2].equals("logicalAddress")) { recipientMap.get(id).setLogicalAddress(value); } else if (keyParts[2].equals("certificateType")) { recipientMap.get(id).setCertificateTypes(value); } break; case "recipient-transport-model-version": String recipientId = keyParts[1]; String certType = keyParts[2]; supportedTransportModelVersion.put(new RecipientCertificateType(recipientId, certType), TransportModelVersion.valueOf(value)); break; default: } } for (RecipientBuilder builder : recipientMap.values()) { recipientList.add(builder.build()); } }