List of usage examples for java.util HashMap values
public Collection<V> values()
From source file:com.github.lucapino.jira.GenerateReleaseNotesMojo.java
/** * Writes issues to output//w w w .ja va 2s.c o m * * @param issues */ void output(List<JiraIssue> issues) throws IOException, MojoFailureException { Log log = getLog(); if (targetFile == null) { log.warn("No targetFile specified. Ignoring"); return; } if (issues == null) { log.warn("No issues found. File will not be generated."); return; } HashMap<Object, Object> parameters = new HashMap<>(); HashMap<String, List<JiraIssue>> jiraIssues = processIssues(issues); List<JiraIssue> jiraIssuesList = new ArrayList<>(); for (List<JiraIssue> list : jiraIssues.values()) { jiraIssuesList.addAll(list); } parameters.put("issues", jiraIssuesList); parameters.put("issuesMap", jiraIssues); parameters.put("jiraURL", jiraURL); parameters.put("jiraProjectKey", jiraProjectKey); parameters.put("releaseVersion", releaseVersion); if (announceParameters == null) { // empty Map to prevent NPE in velocity execution parameters.put("announceParameters", java.util.Collections.EMPTY_MAP); } else { parameters.put("announceParameters", announceParameters); } boolean useDefault = false; if (templateFile == null || !templateFile.exists()) { useDefault = true; // let's use the default one // it/peng/maven/jira/releaseNotes.vm InputStream defaultTemplate = this.getClass().getClassLoader().getResourceAsStream("releaseNotes.vm"); templateFile = File.createTempFile("releaseNotes.vm", null); FileOutputStream fos = new FileOutputStream(templateFile); IOUtils.copy(defaultTemplate, fos); IOUtils.closeQuietly(defaultTemplate); IOUtils.closeQuietly(fos); } String content = getEvaluator().evaluate(templateFile, parameters); if (useDefault) { // remove the temp file templateFile.delete(); } // this creates the parent folder and the file if they doesn't exist OutputStreamWriter writer = new OutputStreamWriter(FileUtils.openOutputStream(targetFile), "UTF-8"); PrintWriter ps = new PrintWriter(writer); try { if (beforeText != null) { ps.println(beforeText); } ps.println(content); if (afterText != null) { ps.println(afterText); } } finally { ps.flush(); IOUtils.closeQuietly(ps); } }
From source file:hu.ppke.itk.nlpg.purepos.decoder.BeamedViterbi.java
private List<Pair<List<Integer>, Double>> findMax(final HashMap<NGram<Integer>, Node> beam, int resultsNumber) { // Node max = Collections.max(beam.values()); // Node act = max; // return decompose(max); SortedSet<Node> sortedKeys = new TreeSet<Node>(beam.values()); List<Pair<List<Integer>, Double>> ret = new ArrayList<Pair<List<Integer>, Double>>(); Node max;//from www . j av a 2 s. co m for (int i = 0; i < resultsNumber && !sortedKeys.isEmpty(); ++i) { max = sortedKeys.last(); sortedKeys.remove(max); List<Integer> maxTagSeq = decompose(max); ret.add(Pair.of(maxTagSeq, max.weight)); } return ret; }
From source file:io.jcml.gephi.plugins.abcd.ABCD.java
public void execute(HierarchicalGraph graph, AttributeModel attributeModel) { counts = new HashMap<Integer, HashMap<Integer, Float>>(); countsDist = new HashMap<Float, Integer>(); Integer i = 0;//from www.ja v a 2 s.c o m graph.readLock(); List<Edge> oldEdges = new ArrayList<Edge>(); // Initialize indexes: for (Node n1 : graph.getNodes()) { for (Node n2 : graph.getNodes()) { if (n1.getId() < n2.getId()) { if (!counts.containsKey(n1.getId())) { HashMap<Integer, Float> val = new HashMap<Integer, Float>(); counts.put(n1.getId(), val); } counts.get(n1.getId()).put(n2.getId(), 0f); } else { if (!counts.containsKey(n2.getId())) { HashMap<Integer, Float> val = new HashMap<Integer, Float>(); counts.put(n2.getId(), val); } counts.get(n2.getId()).put(n1.getId(), 0f); } } if (isCanceled) break; } for (Edge e1 : graph.getEdges()) { oldEdges.add(e1); // Pattern: --> this.addToCount(e1.getSource().getId(), e1.getTarget().getId(), p0Ratio * (ignoreEdgeWeights ? 1 : e1.getWeight())); // Parse edges connected to the target: for (Edge e2 : graph.getEdges(e1.getTarget())) { if (e1.getTarget() == e2.getSource()) // Pattern: --> o --> this.addToCount(e1.getSource().getId(), e2.getTarget().getId(), p1Ratio * (ignoreEdgeWeights ? 2 : (e1.getWeight() + e2.getWeight()))); else if (e1.getSource().getId() < e2.getSource().getId()) // Pattern: --> o <-- this.addToCount(e1.getSource().getId(), e2.getSource().getId(), p2Ratio * (ignoreEdgeWeights ? 2 : (e1.getWeight() + e2.getWeight()))); } // Parse edges connected to the source: for (Edge e2 : graph.getEdges(e1.getSource())) { if (e1.getSource() == e2.getSource() && e1.getTarget().getId() < e2.getTarget().getId()) // Pattern: <-- o --> this.addToCount(e1.getTarget().getId(), e2.getTarget().getId(), p3Ratio * (ignoreEdgeWeights ? 2 : (e1.getWeight() + e2.getWeight()))); } if (isCanceled) break; } // Find distribution: Integer values = 0; Float sum = 0f; Float max = 0f; for (HashMap<Integer, Float> map : counts.values()) { for (Float value : map.values()) { max = Math.max(max, value); sum += value; values++; if (!countsDist.containsKey(value)) { countsDist.put(value, 0); } countsDist.put(value, countsDist.get(value) + 1); } } avgFriendship = sum / values; graph.readUnlockAll(); graph.writeLock(); // Clear edges: graph.clearEdges(); // Add new edges: Edge edge; for (Integer key : counts.keySet()) { HashMap<Integer, Float> map = counts.get(key); for (Integer key2 : map.keySet()) { Float value = map.get(key2); if (key != key2 && value > threshold) { edge = Lookup.getDefault().lookup(GraphElementsController.class).createEdge(graph.getNode(key), graph.getNode(key2), false); edge.setWeight(value); } } } graph.writeUnlock(); // Compute modularity: Modularity mod = new Modularity(); mod.execute(graph.getGraphModel(), attributeModel); // Restore initial graph: if (!overrideEdges) { graph.writeLock(); graph.clearEdges(); for (Edge e : oldEdges) { graph.addEdge(e); } graph.writeUnlock(); } }
From source file:org.fenixedu.academic.thesis.ui.controller.StudentCandidaciesController.java
@RequestMapping(value = "", method = RequestMethod.GET) public String listProposals(Model model) { Student student = Authenticate.getUser().getPerson().getStudent(); Set<ThesisProposalsConfiguration> suggestedConfigs = service.getSuggestedConfigs(student); HashMap<Registration, Set<ThesisProposal>> proposalsByReg = service.getOpenProposalsByReg(student); Map<ThesisProposalsConfiguration, List<StudentThesisCandidacy>> candidaciesByConfig = service .getCandidaciesByConfig(student); int proposalsSize = proposalsByReg.values().stream().map(Set::size).reduce(0, (a, b) -> a + b); int candidaciesSize = candidaciesByConfig.values().stream().map(List::size).reduce(0, (a, b) -> a + b); if (participantLabelService != null) { model.addAttribute("participantLabelService", participantLabelService); }/*from w w w. j av a 2 s.c om*/ model.addAttribute("suggestedConfigs", suggestedConfigs); model.addAttribute("proposalsSize", proposalsSize); model.addAttribute("candidaciesSize", candidaciesSize); model.addAttribute("candidaciesByConfig", candidaciesByConfig); model.addAttribute("proposalsByReg", proposalsByReg); Map<ThesisProposal, Integer> applicationCountByProposalConfig = candidaciesByConfig.values().stream() .flatMap(List::stream).collect(Collectors.toMap(StudentThesisCandidacy::getThesisProposal, c -> c.getThesisProposal().getStudentThesisCandidacySet().size())); model.addAttribute("applicationCountByProposalConfig", applicationCountByProposalConfig); Map<ThesisProposal, Integer> applicationCountByProposalReg = proposalsByReg.values().stream() .flatMap(Set::stream) .collect(Collectors.toMap(tp -> tp, tp -> tp.getStudentThesisCandidacySet().size())); model.addAttribute("applicationCountByProposalReg", applicationCountByProposalReg); Set<ThesisProposal> acceptedProposals = proposalsByReg.values().stream().flatMap(Set::stream) .flatMap(tp -> tp.getStudentThesisCandidacySet().stream()) .filter(candidacy -> candidacy.getAcceptedByAdvisor()) .map(candidacy -> candidacy.getThesisProposal()).collect(Collectors.toSet()); model.addAttribute("acceptedProposals", acceptedProposals); return "studentCandidacies/list"; }
From source file:org.apache.axis2.deployment.ServiceDeployer.java
public void deploy(DeploymentFileData deploymentFileData) throws DeploymentException { boolean isDirectory = deploymentFileData.getFile().isDirectory(); ArchiveReader archiveReader;// w w w .ja va 2 s. c om StringWriter errorWriter = new StringWriter(); archiveReader = new ArchiveReader(); String serviceStatus = ""; try { deploymentFileData.setClassLoader(isDirectory, axisConfig.getServiceClassLoader(), (File) axisConfig.getParameterValue(Constants.Configuration.ARTIFACTS_TEMP_DIR), axisConfig.isChildFirstClassLoading()); HashMap<String, AxisService> wsdlservice = archiveReader.processWSDLs(deploymentFileData); if (wsdlservice != null && wsdlservice.size() > 0) { for (AxisService service : wsdlservice.values()) { Iterator<AxisOperation> operations = service.getOperations(); while (operations.hasNext()) { AxisOperation axisOperation = operations.next(); axisConfig.getPhasesInfo().setOperationPhases(axisOperation); } } } AxisServiceGroup serviceGroup = new AxisServiceGroup(axisConfig); serviceGroup.setServiceGroupClassLoader(deploymentFileData.getClassLoader()); ArrayList<AxisService> serviceList = archiveReader.processServiceGroup( deploymentFileData.getAbsolutePath(), deploymentFileData, serviceGroup, isDirectory, wsdlservice, configCtx); URL location = deploymentFileData.getFile().toURL(); // Add the hierarchical path to the service group if (location != null) { String serviceHierarchy = Utils.getServiceHierarchy(location.getPath(), this.directory); if (serviceHierarchy != null && !"".equals(serviceHierarchy)) { serviceGroup.setServiceGroupName(serviceHierarchy + serviceGroup.getServiceGroupName()); for (AxisService axisService : serviceList) { axisService.setName(serviceHierarchy + axisService.getName()); } } } DeploymentEngine.addServiceGroup(serviceGroup, serviceList, location, deploymentFileData, axisConfig); super.deploy(deploymentFileData); } catch (DeploymentException de) { de.printStackTrace(); log.error(Messages.getMessage(DeploymentErrorMsgs.INVALID_SERVICE, deploymentFileData.getName(), de.getMessage()), de); PrintWriter error_ptintWriter = new PrintWriter(errorWriter); de.printStackTrace(error_ptintWriter); serviceStatus = "Error:\n" + errorWriter.toString(); throw de; } catch (AxisFault axisFault) { log.error(Messages.getMessage(DeploymentErrorMsgs.INVALID_SERVICE, deploymentFileData.getName(), axisFault.getMessage()), axisFault); PrintWriter error_ptintWriter = new PrintWriter(errorWriter); axisFault.printStackTrace(error_ptintWriter); serviceStatus = "Error:\n" + errorWriter.toString(); throw new DeploymentException(axisFault); } catch (Exception e) { if (log.isInfoEnabled()) { StringWriter sw = new StringWriter(); PrintWriter pw = new PrintWriter(sw); e.printStackTrace(pw); log.info(Messages.getMessage(DeploymentErrorMsgs.INVALID_SERVICE, deploymentFileData.getName(), sw.getBuffer().toString())); } PrintWriter error_ptintWriter = new PrintWriter(errorWriter); e.printStackTrace(error_ptintWriter); serviceStatus = "Error:\n" + errorWriter.toString(); throw new DeploymentException(e); } catch (Throwable t) { if (log.isInfoEnabled()) { StringWriter sw = new StringWriter(); PrintWriter pw = new PrintWriter(sw); t.printStackTrace(pw); log.info(Messages.getMessage(DeploymentErrorMsgs.INVALID_SERVICE, deploymentFileData.getName(), sw.getBuffer().toString())); } PrintWriter error_ptintWriter = new PrintWriter(errorWriter); t.printStackTrace(error_ptintWriter); serviceStatus = "Error:\n" + errorWriter.toString(); throw new DeploymentException(new Exception(t)); } finally { if (serviceStatus.startsWith("Error:")) { axisConfig.getFaultyServices().put(deploymentFileData.getFile().getAbsolutePath(), serviceStatus); } } }
From source file:com.grupohqh.carservices.operator.ReadTagActivity.java
@Override public void onResume() { super.onResume(); etSerialNumber.setText(""); etLicensePlate.setText(""); etTag.setText(""); if (useMiniMe) { etEpc.setText(""); txtStatus.setText("desconectado"); try {/* www . ja v a 2 s. c o m*/ HashMap<String, UsbDevice> deviceList = manager.getDeviceList(); Iterator<UsbDevice> deviceIterator = deviceList.values().iterator(); while (deviceIterator.hasNext()) { UsbDevice device = deviceIterator.next(); if (device.getProductId() == PID && device.getVendorId() == VID) if (!manager.hasPermission(device)) { txtStatus.setText("sin permisos"); manager.requestPermission(device, PendingIntent.getBroadcast(this, 0, new Intent(ACTION_USB_PERMISSION), 0)); break; } else { txtStatus.setText("conectado"); } } } catch (Exception e) { Log.d("USB error", e.getMessage()); } } }
From source file:com.github.haixing_hu.bean.DefaultProperty.java
@Override public final void setMappedValue(final HashMap<String, Object> map) { checkKind(PropertyKind.MAPPED);/* w ww. j ava 2 s . c o m*/ requireNonNull("map", map); for (final Object obj : map.values()) { checkType(obj); } @SuppressWarnings("unchecked") final HashMap<String, Object> valueMap = (HashMap<String, Object>) value; valueMap.clear(); valueMap.putAll(map); }
From source file:disko.flow.analyzers.hgdb.RelationCounterAnalyzer.java
public void process(AnalysisContext<TextDocument> context, Ports ports) throws InterruptedException { final HyperGraph graph = this.graph != null ? this.graph : context.getGraph(); RelationCountFactory.createCountingIndices(graph); InputPort<EntityMaintainer> entityInput = ports.getInput(EntityAnalyzer.ENTITY_CHANNEL); InputPort<RelexTaskResult> parseInput = ports.getInput(FullRelexAnalyzer.PARSE_CHANNEL); for (RelexTaskResult parses = parseInput.take(); !parseInput.isEOS(parses); parses = parseInput.take()) { EntityMaintainer em = entityInput.take(); if (entityInput.isEOS(em)) break; final HashMap<String, String> entityTypes = RelationCountFactory.getEntityTypes(em); log.debug("Counting relations for all parses for: " + em.getOriginalSentence()); final RelexTaskResult currentParses = parses; ///*from w w w . j a v a 2 s .co m*/ // We encapsulate the processing of a single in a HGDB transaction. // This gives a considerable // performance boost because when there is no current transaction in // effect, HGDB will create // a transaction for every single query. For a sentence with, say, // 20 parses of about 20 relations // each this yields 2800 transactions (opening and committing a // transaction is a costly operation). // try { log.info("Saving parses for " + em.getOriginalSentence()); long startTime = System.currentTimeMillis(); graph.getTransactionManager().transact(new Callable<Object>() { public Object call() { for (ParsedSentence parsedSentence : currentParses.result.getParses()) { log.debug(parsedSentence); ArrayList<RelationCount> relationCounts = RelationCountFactory .getRelationCounts(entityTypes, parsedSentence); HashMap<HGHandle, RelationCount> counts = new HashMap<HGHandle, RelationCount>(); for (RelationCount r : relationCounts) incrementCounts(graph, counts, r, currentParses.result.getParses().size()); for (RelationCount r : counts.values()) graph.update(r); } return null; } }); log.info("Parses saved, total time elapsed=" + (System.currentTimeMillis() - startTime) / 1000.0); } catch (Throwable t) { log.error("While storing counts for " + em.getOriginalSentence(), t); } log.debug("Relation count completed."); } log.debug("RelationCounterProcessor ended"); }
From source file:com.xandy.calendar.selectcalendars.SelectCalendarsSyncFragment.java
@Override public void onPause() { final ListAdapter listAdapter = getListAdapter(); if (listAdapter != null) { HashMap<Long, SelectCalendarsSyncAdapter.CalendarRow> changes = ((SelectCalendarsSyncAdapter) listAdapter) .getChanges();/* w w w . j a v a2 s . c o m*/ if (changes != null && changes.size() > 0) { for (SelectCalendarsSyncAdapter.CalendarRow row : changes.values()) { if (row.synced == row.originalSynced) { continue; } long id = row.id; mService.cancelOperation((int) id); // Use the full long id in case it makes a difference Uri uri = ContentUris.withAppendedId(Calendars.CONTENT_URI, row.id); ContentValues values = new ContentValues(); // Toggle the current setting int synced = row.synced ? 1 : 0; values.put(Calendars.SYNC_EVENTS, synced); values.put(Calendars.VISIBLE, synced); mService.startUpdate((int) id, null, uri, values, null, null, 0); } changes.clear(); } } getActivity().getContentResolver().unregisterContentObserver(mCalendarsObserver); super.onPause(); }
From source file:nl.b3p.kaartenbalie.service.LayerValidator.java
/** Returns the combined srs's that all layers given supports * * Every Layer shall have at least one <SRS> element that is either stated explicitly or * inherited from a parent Layer (Section 7.1.4.6). The root <Layer> element shall include a * sequence of zero or more SRS elements listing all SRSes that are common to all * subsidiary layers. Use a single SRS element with empty content (like so: "<SRS></SRS>") if * there is no common SRS. Layers may optionally add to the global SRS list, or to the list * inherited from a parent layer. Any duplication shall be ignored by clients. */// w w w .j ava 2s . co m public String[] validateSRS() { HashMap hm = new HashMap(); Iterator lit = layers.iterator(); //Een teller die alle layers telt die een SRS hebben. int tellerMeeTellendeLayers = 0; //doorloop de layers while (lit.hasNext()) { HashMap supportedByLayer = new HashMap(); addLayerSupportedSRS((Layer) lit.next(), supportedByLayer); if (supportedByLayer.size() > 0) { tellerMeeTellendeLayers++; Iterator i = supportedByLayer.values().iterator(); while (i.hasNext()) { String srs = (String) i.next(); addSrsCount(hm, srs); } } } ArrayList supportedSrsen = new ArrayList(); Iterator it = hm.entrySet().iterator(); while (it.hasNext()) { Map.Entry entry = (Map.Entry) it.next(); int i = ((Integer) entry.getValue()).intValue(); if (i >= tellerMeeTellendeLayers) { supportedSrsen.add((String) entry.getKey()); } } //Voeg lege srs toe indien geen overeenkomstige gevonden if (supportedSrsen.isEmpty()) { supportedSrsen.add(""); } String[] returnValue = new String[supportedSrsen.size()]; for (int i = 0; i < returnValue.length; i++) { if (supportedSrsen.get(i) != null) { returnValue[i] = (String) supportedSrsen.get(i); } } return returnValue; }