List of usage examples for java.util EnumMap EnumMap
public EnumMap(Map<K, ? extends V> m)
From source file:org.apache.hadoop.corona.ClusterManager.java
@Override public List<RunningSession> getSessions() throws TException, SafeModeException { checkSafeMode("getSessions"); List<RunningSession> runningSessions = new LinkedList<RunningSession>(); Set<String> sessions = sessionManager.getSessions(); for (String sessionId : sessions) { try {/*from w w w .j a va 2 s . c om*/ Session session = sessionManager.getSession(sessionId); synchronized (session) { RunningSession runningSession = new RunningSession(session.getHandle(), session.getName(), session.getUserId(), PoolInfo.createPoolInfoStrings(session.getPoolInfo())); runningSession.setDeadline(session.getDeadline()); runningSession.setPriority(session.getInfo().getPriority()); Map<ResourceType, Integer> runningResources = new EnumMap<ResourceType, Integer>( ResourceType.class); for (ResourceType type : ResourceType.values()) { runningResources.put(type, session.getGrantCountForType(type)); } runningSession.setRunningResources(runningResources); runningSessions.add(runningSession); } } catch (InvalidSessionHandle invalidSessionHandle) { // This is no big deal, just means that the session has finished } } return runningSessions; }
From source file:org.squashtest.tm.service.internal.requirement.VerifiedRequirementsManagerServiceImpl.java
/** * Return a merged map. For each {@link ExecutionStatus}, the returned value is the value in map1 + value in map 2. * The state of the two arguments maps is preserved * @param mainStatusMap/* ww w. j a va 2s.com*/ * @param descendantStatusMap * @return */ private Map<ExecutionStatus, Long> mergeMapResult(Map<ExecutionStatus, Long> mainStatusMap, Map<ExecutionStatus, Long> descendantStatusMap) { Map<ExecutionStatus, Long> mergedStatusMap = new EnumMap<>(ExecutionStatus.class); EnumSet<ExecutionStatus> allStatus = EnumSet.allOf(ExecutionStatus.class); for (ExecutionStatus executionStatus : allStatus) { Long mainCount = mainStatusMap.get(executionStatus) == null ? 0 : mainStatusMap.get(executionStatus); Long descendantCount = descendantStatusMap.get(executionStatus) == null ? 0 : descendantStatusMap.get(executionStatus); Long totalCount = mainCount + descendantCount; mergedStatusMap.put(executionStatus, totalCount); } return mergedStatusMap; }
From source file:com.github.helenusdriver.driver.impl.DataDecoder.java
/** * Gets a "map" to {@link Map} decoder based on the given key and value classes. * * @author paouelle//from w ww. j a v a2 s . c om * * @param ekclazz the non-<code>null</code> class of keys * @param evclazz the non-<code>null</code> class of values * @param mandatory if the field associated with the decoder is mandatory or * represents a primary key * @return the non-<code>null</code> decoder for maps of the specified key and * value classes */ @SuppressWarnings("rawtypes") public final static DataDecoder<Map> map(final Class<?> ekclazz, final Class<?> evclazz, final boolean mandatory) { return new DataDecoder<Map>(Map.class) { @SuppressWarnings("unchecked") private Map decodeImpl(Class<?> ektype, Class<?> evtype, Map<Object, Object> map) { if (map == null) { // safe to return as is unless mandatory, that is because Cassandra // returns null for empty list and the schema definition requires // that mandatory and primary keys be non null if (mandatory) { if (ekclazz.isEnum()) { // for enum keys we create an enum map. Now we won't preserve the order // the entries were added but that should be fine anyways in this case return new EnumMap(ekclazz); } return new LinkedHashMap(16); // to keep order } return map; } final Map nmap; if (ekclazz.isEnum()) { // for enum keys we create an enum map. Now we won't preserve the order // the entries were added but that should be fine anyways in this case nmap = new EnumMap(ekclazz); } else { nmap = new LinkedHashMap(map.size()); // to keep order } if (ekclazz.isAssignableFrom(ektype) && evclazz.isAssignableFrom(evtype)) { nmap.putAll(map); } else { // will need to do some conversion of each element final ElementConverter kconverter = ElementConverter.getConverter(ekclazz, ektype); final ElementConverter vconverter = ElementConverter.getConverter(evclazz, evtype); for (final Map.Entry e : map.entrySet()) { final Object k = e.getKey(); final Object v = e.getValue(); nmap.put((k != null) ? kconverter.convert(k) : null, (v != null) ? vconverter.convert(v) : null); } } return nmap; } @SuppressWarnings("unchecked") @Override protected Map decodeImpl(Row row, String name, Class clazz) { return decodeImpl( // get the element type from the row's metadata row.getColumnDefinitions().getType(name).getTypeArguments().get(0).getName().asJavaClass(), row.getColumnDefinitions().getType(name).getTypeArguments().get(1).getName().asJavaClass(), row.isNull(name) ? null : row.getMap(name, Object.class, Object.class) // keeps things generic so we can handle our own errors ); } @SuppressWarnings("unchecked") @Override protected Map decodeImpl(UDTValue uval, String name, Class clazz) { return decodeImpl( // get the element type from the row's metadata uval.getType().getFieldType(name).getTypeArguments().get(0).getName().asJavaClass(), uval.getType().getFieldType(name).getTypeArguments().get(1).getName().asJavaClass(), uval.isNull(name) ? null : uval.getMap(name, Object.class, Object.class) // keeps things generic so we can handle our own errors ); } }; }
From source file:pl.edu.icm.cermine.pubmed.PubmedXMLGenerator.java
public BxDocument generateTrueViz(InputStream pdfStream, InputStream nlmStream) throws AnalysisException, ParserConfigurationException, SAXException, IOException, XPathExpressionException, TransformationException { XPath xpath = XPathFactory.newInstance().newXPath(); DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance(); dbf.setValidating(false);/*from w w w .j a va2 s.c o m*/ dbf.setFeature("http://xml.org/sax/features/namespaces", false); dbf.setFeature("http://xml.org/sax/features/validation", false); dbf.setFeature("http://apache.org/xml/features/nonvalidating/load-dtd-grammar", false); dbf.setFeature("http://apache.org/xml/features/nonvalidating/load-external-dtd", false); DocumentBuilder builder = dbf.newDocumentBuilder(); Document domDoc = builder.parse(nlmStream); PdfBxStructureExtractor structureExtractor = new PdfBxStructureExtractor(); BxDocument bxDoc = structureExtractor.extractStructure(pdfStream); Integer bxDocLen = bxDoc.asZones().size(); SmartHashMap entries = new SmartHashMap(); //abstract Node abstractNode = (Node) xpath.evaluate("/article/front/article-meta/abstract", domDoc, XPathConstants.NODE); String abstractString = XMLTools.extractTextFromNode(abstractNode); entries.putIf("Abstract " + abstractString, BxZoneLabel.MET_ABSTRACT); entries.putIf("Abstract", BxZoneLabel.MET_ABSTRACT); //title String titleString = (String) xpath.evaluate("/article/front/article-meta/title-group/article-title", domDoc, XPathConstants.STRING); entries.putIf(titleString, BxZoneLabel.MET_TITLE); String subtitleString = (String) xpath.evaluate("/article/front/article-meta/title-group/article-subtitle", domDoc, XPathConstants.STRING); entries.putIf(subtitleString, BxZoneLabel.MET_TITLE); //journal title String journalTitleString = (String) xpath.evaluate("/article/front/journal-meta/journal-title", domDoc, XPathConstants.STRING); if (journalTitleString == null || journalTitleString.isEmpty()) { journalTitleString = (String) xpath.evaluate( "/article/front/journal-meta/journal-title-group/journal-title", domDoc, XPathConstants.STRING); } entries.putIf(journalTitleString, BxZoneLabel.MET_BIB_INFO); //journal publisher String journalPublisherString = (String) xpath .evaluate("/article/front/journal-meta/publisher/publisher-name", domDoc, XPathConstants.STRING); entries.putIf(journalPublisherString, BxZoneLabel.MET_BIB_INFO); String journalPublisherIdString = (String) xpath.evaluate( "/article/front/journal-meta/journal-id[@journal-id-type='publisher-id']", domDoc, XPathConstants.STRING); entries.putIf(journalPublisherIdString, BxZoneLabel.MET_BIB_INFO); //journal issn String journalISSNString = (String) xpath.evaluate("/article/front/journal-meta/issn", domDoc, XPathConstants.STRING); entries.putIf(journalISSNString, BxZoneLabel.MET_BIB_INFO); //copyright/permissions String permissionsString = XMLTools.extractTextFromNode( (Node) xpath.evaluate("/article/front/article-meta/permissions", domDoc, XPathConstants.NODE)); entries.putIf(permissionsString, BxZoneLabel.MET_COPYRIGHT); //license Node licenseNode = (Node) xpath.evaluate("/article/front/article-meta/license", domDoc, XPathConstants.NODE); String licenseString = (String) XMLTools.extractTextFromNode(licenseNode); entries.putIf(licenseString, BxZoneLabel.MET_COPYRIGHT); //article type NodeList articleTypeNodes = (NodeList) xpath.evaluate("/article/@article-type", domDoc, XPathConstants.NODESET); List<String> articleTypeStrings = XMLTools.extractTextAsList(articleTypeNodes); Node articleTypeNode = (Node) xpath.evaluate("/article/front/article-meta/article-categories/subj-group", domDoc, XPathConstants.NODE); articleTypeStrings.add(XMLTools.extractTextFromNode(articleTypeNode)); entries.putIf(articleTypeStrings, BxZoneLabel.MET_TYPE); //received date List<String> receivedDate = XMLTools.extractChildrenAsTextList((Node) xpath.evaluate( "/article/front/article-meta/history/date[@date-type='received']", domDoc, XPathConstants.NODE)); if (!receivedDate.isEmpty() && receivedDate.size() >= 3) { for (String date : StringTools.produceDates(receivedDate)) { entries.putIf(date, BxZoneLabel.MET_DATES); } } //accepted date List<String> acceptedDate = XMLTools.extractChildrenAsTextList((Node) xpath.evaluate( "/article/front/article-meta/history/date[@date-type='accepted']", domDoc, XPathConstants.NODE)); if (!acceptedDate.isEmpty() && acceptedDate.size() >= 3) { for (String date : StringTools.produceDates(acceptedDate)) { entries.putIf(date, BxZoneLabel.MET_DATES); } } //publication date List<String> pubdateString; if (((NodeList) xpath.evaluate("/article/front/article-meta/pub-date", domDoc, XPathConstants.NODESET)) .getLength() > 1) { Node pubdateNode = (Node) xpath.evaluate("/article/front/article-meta/pub-date[@pub-type='epub']", domDoc, XPathConstants.NODE); pubdateString = XMLTools.extractChildrenAsTextList(pubdateNode); } else { Node pubdateNode = (Node) xpath.evaluate("/article/front/article-meta/pub-date[@pub-type='collection']", domDoc, XPathConstants.NODE); pubdateString = XMLTools.extractChildrenAsTextList(pubdateNode); } if (pubdateString != null && pubdateString.size() >= 3) { for (String date : StringTools.produceDates(pubdateString)) { entries.putIf(date, BxZoneLabel.MET_DATES); } } pubdateString.clear(); if (((NodeList) xpath.evaluate("/article/front/article-meta/pub-date", domDoc, XPathConstants.NODESET)) .getLength() > 1) { Node pubdateNode = (Node) xpath.evaluate("/article/front/article-meta/pub-date[@pub-type='ppub']", domDoc, XPathConstants.NODE); pubdateString = XMLTools.extractChildrenAsTextList(pubdateNode); } if (pubdateString != null && pubdateString.size() >= 3) { for (String date : StringTools.produceDates(pubdateString)) { entries.putIf(date, BxZoneLabel.MET_DATES); } } String extLink = (String) xpath.evaluate( "/article/front/article-meta/ext-link[@ext-link-type='uri']/xlink:href", domDoc, XPathConstants.STRING); printlnVerbose(extLink); entries.putIf(extLink, BxZoneLabel.MET_ACCESS_DATA); //keywords Node keywordsNode = (Node) xpath.evaluate("/article/front/article-meta/kwd-group", domDoc, XPathConstants.NODE); String keywordsString = XMLTools.extractTextFromNode(keywordsNode); entries.putIf(keywordsString, BxZoneLabel.MET_KEYWORDS); //DOI String doiString = (String) xpath.evaluate("/article/front/article-meta/article-id[@pub-id-type='doi']", domDoc, XPathConstants.STRING); entries.putIf("DOI " + doiString, BxZoneLabel.MET_BIB_INFO); //volume String volumeString = (String) xpath.evaluate("/article/front/article-meta/volume", domDoc, XPathConstants.STRING); entries.putIf("volume " + volumeString, BxZoneLabel.MET_BIB_INFO); entries.putIf("vol " + volumeString, BxZoneLabel.MET_BIB_INFO); //issue String issueString = (String) xpath.evaluate("/article/front/article-meta/issue", domDoc, XPathConstants.STRING); entries.putIf("number " + issueString, BxZoneLabel.MET_BIB_INFO); entries.putIf("journal", BxZoneLabel.MET_BIB_INFO); entries.putIf("et al", BxZoneLabel.MET_BIB_INFO); List<String> authorNames = new ArrayList<String>(); List<String> authorEmails = new ArrayList<String>(); List<String> authorAffiliations = new ArrayList<String>(); List<String> editors = new ArrayList<String>(); //pages String fPage = (String) xpath.evaluate("/article/front/article-meta/fpage", domDoc, XPathConstants.STRING); String lPage = (String) xpath.evaluate("/article/front/article-meta/lpage", domDoc, XPathConstants.STRING); entries.putIf("pages " + fPage + " " + lPage, BxZoneLabel.MET_BIB_INFO); entries.putIf("pp " + fPage + " " + lPage, BxZoneLabel.MET_BIB_INFO); entries.putIf(fPage, BxZoneLabel.MET_BIB_INFO); entries.putIf(lPage, BxZoneLabel.MET_BIB_INFO); entries.putIf(lPage, BxZoneLabel.OTH_PAGE_NUMBER); entries.putIf(lPage, BxZoneLabel.OTH_PAGE_NUMBER); try { int f = Integer.valueOf(fPage); int l = Integer.valueOf(lPage); while (f < l) { f++; entries.putIf(String.valueOf(f), BxZoneLabel.OTH_PAGE_NUMBER); } } catch (NumberFormatException ex) { } entries.putIf("page of", BxZoneLabel.OTH_PAGE_NUMBER); //editors NodeList editorNodes = (NodeList) xpath.evaluate( "/article/front/article-meta/contrib-group/contrib[@contrib-type='editor']", domDoc, XPathConstants.NODESET); for (int nodeIdx = 0; nodeIdx < editorNodes.getLength(); ++nodeIdx) { String editorString = XMLTools.extractTextFromNode(editorNodes.item(nodeIdx)); editors.add(editorString); } entries.putIf(StringTools.joinStrings(editors), BxZoneLabel.MET_EDITOR); NodeList authorsResult = (NodeList) xpath.evaluate( "/article/front/article-meta/contrib-group/contrib[@contrib-type='author']", domDoc, XPathConstants.NODESET); for (int nodeIdx = 0; nodeIdx < authorsResult.getLength(); ++nodeIdx) { Node curNode = authorsResult.item(nodeIdx); //author names String name = (String) xpath.evaluate("name/given-names", curNode, XPathConstants.STRING); String surname = (String) xpath.evaluate("name/surname", curNode, XPathConstants.STRING); //author affiliation List<String> aff = XMLTools.extractTextAsList((NodeList) xpath .evaluate("/article/front/article-meta/contrib-group/aff", domDoc, XPathConstants.NODESET)); //author correspondence String email; try { email = (String) xpath.evaluate("address/email", curNode, XPathConstants.STRING); } catch (XPathExpressionException e) { email = ""; } if (email.isEmpty()) { try { email = (String) xpath.evaluate("email", curNode, XPathConstants.STRING); } catch (XPathExpressionException e) { //yaaay, probably there is no e-mail at all! => do nothing } } if (!email.isEmpty()) { authorEmails.add(email); } if (!aff.isEmpty()) { authorAffiliations.addAll(aff); } authorNames.add(name + " " + surname); } entries.putIf(StringTools.joinStrings(authorNames), BxZoneLabel.MET_AUTHOR); //authors' affiliations NodeList affNodes = (NodeList) xpath.evaluate("/article/front/article-meta/aff", domDoc, XPathConstants.NODESET); authorAffiliations.addAll(XMLTools.extractTextAsList(affNodes)); entries.putIf(authorAffiliations, BxZoneLabel.MET_AFFILIATION); //correspondence again NodeList correspNodes = (NodeList) xpath.evaluate("/article/front/article-meta/author-notes/corresp", domDoc, XPathConstants.NODESET); authorEmails.add(XMLTools.extractTextFromNodes(correspNodes)); entries.putIf(authorEmails, BxZoneLabel.MET_CORRESPONDENCE); //author notes Node notesNode = (Node) xpath.evaluate("/article/front/article-meta/author-notes/corresp/fn", domDoc, XPathConstants.NODE); String notesString = XMLTools.extractTextFromNode(notesNode); entries.putIf(notesString, BxZoneLabel.MET_CORRESPONDENCE); notesString = XMLTools .extractTextFromNode((Node) xpath.evaluate("/article/back/notes", domDoc, XPathConstants.NODE)); //article body NodeList paragraphNodes = (NodeList) xpath.evaluate("/article/body//p", domDoc, XPathConstants.NODESET); List<String> paragraphStrings = XMLTools.extractTextAsList(paragraphNodes); entries.putIf(paragraphStrings, BxZoneLabel.BODY_CONTENT); NodeList appNodes = (NodeList) xpath.evaluate("/article/back/app-group//p", domDoc, XPathConstants.NODESET); String appStrings = XMLTools.extractTextFromNodes(appNodes); entries.putIf(appStrings, BxZoneLabel.BODY_CONTENT); //section titles NodeList sectionTitleNodes = (NodeList) xpath.evaluate("/article/body//title", domDoc, XPathConstants.NODESET); List<String> sectionTitles = XMLTools.extractTextAsList(sectionTitleNodes); entries.putIf(sectionTitles, BxZoneLabel.BODY_CONTENT); NodeList appTitleNodes = (NodeList) xpath.evaluate("/article/back/app-group//title", domDoc, XPathConstants.NODESET); List<String> appTitles = XMLTools.extractTextAsList(appTitleNodes); entries.putIf(appTitles, BxZoneLabel.BODY_CONTENT); //figures NodeList figureNodes = (NodeList) xpath.evaluate("/article/floats-wrap//fig", domDoc, XPathConstants.NODESET); List<String> figureStrings = XMLTools.extractTextAsList(figureNodes); figureNodes = (NodeList) xpath.evaluate("/article/floats-group//fig", domDoc, XPathConstants.NODESET); figureStrings.addAll(XMLTools.extractTextAsList(figureNodes)); figureNodes = (NodeList) xpath.evaluate("/article/back//fig", domDoc, XPathConstants.NODESET); figureStrings.addAll(XMLTools.extractTextAsList(figureNodes)); figureNodes = (NodeList) xpath.evaluate("/article/body//fig", domDoc, XPathConstants.NODESET); figureStrings.addAll(XMLTools.extractTextAsList(figureNodes)); figureNodes = (NodeList) xpath.evaluate("/article/back/app-group//fig", domDoc, XPathConstants.NODESET); figureStrings.addAll(XMLTools.extractTextAsList(figureNodes)); entries.putIf(figureStrings, BxZoneLabel.BODY_FIGURE); //tables List<String> tableCaptions = new ArrayList<String>(); List<String> tableBodies = new ArrayList<String>(); List<String> tableFootnotes = new ArrayList<String>(); //tableNodes NodeList tableNodes = (NodeList) xpath.evaluate("/article//table-wrap", domDoc, XPathConstants.NODESET); for (Integer nodeIdx = 0; nodeIdx < tableNodes.getLength(); ++nodeIdx) { Node tableNode = tableNodes.item(nodeIdx); String caption = (String) xpath.evaluate("caption", tableNode, XPathConstants.STRING); tableCaptions.add(caption); String body = XMLTools .extractTextFromNode((Node) xpath.evaluate("table", tableNode, XPathConstants.NODE)); tableBodies.add(body); List<String> footnotes = XMLTools.extractTextAsList( (NodeList) xpath.evaluate("table-wrap-foot/fn", tableNode, XPathConstants.NODESET)); tableFootnotes.addAll(footnotes); entries.putIf(caption, BxZoneLabel.BODY_TABLE); entries.putIf(body, BxZoneLabel.BODY_TABLE); entries.putIf(footnotes, BxZoneLabel.BODY_TABLE); } //financial disclosure String financialDisclosure = XMLTools.extractTextFromNode((Node) xpath .evaluate("/article//fn[@fn-type='financial-disclosure']", domDoc, XPathConstants.NODE)); entries.putIf(financialDisclosure, BxZoneLabel.BODY_ACKNOWLEDGMENT); //conflict String conflictString = XMLTools.extractTextFromNode( (Node) xpath.evaluate("/article//fn[@fn-type='conflict']", domDoc, XPathConstants.NODE)); entries.putIf(conflictString, BxZoneLabel.BODY_CONFLICT_STMT); //copyright String copyrightString = XMLTools.extractTextFromNode((Node) xpath.evaluate( "/article/front/article-meta/permissions/copyright-statement", domDoc, XPathConstants.NODE)); entries.putIf(copyrightString, BxZoneLabel.MET_COPYRIGHT); //acknowledgment String acknowledgement = XMLTools .extractTextFromNode((Node) xpath.evaluate("/article/back/ack", domDoc, XPathConstants.NODE)); entries.putIf(acknowledgement, BxZoneLabel.BODY_ACKNOWLEDGMENT); acknowledgement = XMLTools.extractTextFromNode( (Node) xpath.evaluate("/article/back/fn-group/fn", domDoc, XPathConstants.NODE)); entries.putIf(acknowledgement, BxZoneLabel.BODY_CONFLICT_STMT); //glossary String glossary = XMLTools .extractTextFromNode((Node) xpath.evaluate("/article/back/glossary", domDoc, XPathConstants.NODE)); entries.putIf(glossary, BxZoneLabel.BODY_GLOSSARY); //formula NodeList formulaNodes = (NodeList) xpath.evaluate("/article/body//disp-formula", domDoc, XPathConstants.NODESET); for (int nodeIdx = 0; nodeIdx < formulaNodes.getLength(); ++nodeIdx) { Node curFormulaNode = formulaNodes.item(nodeIdx); String label = (String) xpath.evaluate("label", curFormulaNode); entries.putIf(label, BxZoneLabel.BODY_EQUATION); NodeList curNodeChildren = curFormulaNode.getChildNodes(); List<String> formulaParts = new ArrayList<String>(); for (int childIdx = 0; childIdx < curNodeChildren.getLength(); ++childIdx) { Node curChild = curNodeChildren.item(childIdx); if (curChild.getNodeName().equals("label")) { continue; } formulaParts.add(XMLTools.extractTextFromNode(curChild)); } entries.putIf(StringTools.joinStrings(formulaParts), BxZoneLabel.BODY_EQUATION); } //references List<String> refStrings = new ArrayList<String>(); Node refParentNode = (Node) xpath.evaluate("/article/back/ref-list", domDoc, XPathConstants.NODE); if (refParentNode != null) { for (Integer refIdx = 0; refIdx < refParentNode.getChildNodes().getLength(); ++refIdx) { refStrings.add(XMLTools.extractTextFromNode(refParentNode.getChildNodes().item(refIdx))); } } entries.putIf(StringTools.joinStrings(refStrings), BxZoneLabel.REFERENCES); entries.put("references", BxZoneLabel.REFERENCES); Set<String> allBibInfos = new HashSet<String>(); for (Entry<String, BxZoneLabel> entry : entries.entrySet()) { if (BxZoneLabel.MET_BIB_INFO.equals(entry.getValue())) { allBibInfos.addAll(Arrays.asList(entry.getKey().split(" "))); } } entries.put(StringUtils.join(allBibInfos, " "), BxZoneLabel.MET_BIB_INFO); printlnVerbose("journalTitle: " + journalTitleString); printlnVerbose("journalPublisher: " + journalPublisherString); printlnVerbose("journalISSNPublisher: " + journalISSNString); printlnVerbose("articleType: " + articleTypeStrings); printlnVerbose("received: " + receivedDate); printlnVerbose("accepted: " + acceptedDate); printlnVerbose("pubdate: " + pubdateString); printlnVerbose("permissions: " + permissionsString); printlnVerbose("license: " + licenseString); printlnVerbose("title: " + titleString); printlnVerbose("abstract: " + abstractString); printlnVerbose("authorEmails: " + authorEmails); printlnVerbose("authorNames: " + authorNames); printlnVerbose("authorAff: " + authorAffiliations); printlnVerbose("authorNotes: " + notesString); printlnVerbose("editor: " + editors); printlnVerbose("keywords: " + keywordsString); printlnVerbose("DOI: " + doiString); printlnVerbose("volume: " + volumeString); printlnVerbose("issue: " + issueString); printlnVerbose("financial dis.: " + financialDisclosure); printlnVerbose("paragraphs: " + paragraphStrings); printlnVerbose("section titles: " + sectionTitles); printlnVerbose("tableBodies: " + tableBodies); printlnVerbose("tableCaptions: " + tableCaptions); printlnVerbose("tableFootnotes: " + tableFootnotes); printlnVerbose("figures: " + figureStrings); printlnVerbose("acknowledgement: " + acknowledgement); printlnVerbose("ref: " + refStrings.size() + " " + refStrings); SmithWatermanDistance smith = new SmithWatermanDistance(.1, 0.1); CosineDistance cos = new CosineDistance(); //index: (zone,entry) List<List<LabelTrio>> swLabelSim = new ArrayList<List<LabelTrio>>(bxDocLen); List<List<LabelTrio>> cosLabProb = new ArrayList<List<LabelTrio>>(bxDocLen); for (Integer i = 0; i < bxDocLen; ++i) { swLabelSim.add(new ArrayList<LabelTrio>()); cosLabProb.add(new ArrayList<LabelTrio>()); } //iterate over entries for (Entry<String, BxZoneLabel> entry : entries.entrySet()) { List<String> entryTokens = StringTools.tokenize(entry.getKey()); printlnVerbose("--------------------"); printlnVerbose(entry.getValue() + " " + entry.getKey() + "\n"); //iterate over zones for (Integer zoneIdx = 0; zoneIdx < bxDocLen; ++zoneIdx) { BxZone curZone = bxDoc.asZones().get(zoneIdx); List<String> zoneTokens = StringTools.tokenize(StringTools .removeOrphantSpaces(StringTools.cleanLigatures(curZone.toText().toLowerCase()))); Double smithSim; Double cosSim; if (curZone.toText().contains("www.biomedcentral.com")) { //ignore smithSim = 0.; cosSim = 0.; } else { smithSim = smith.compare(entryTokens, zoneTokens); cosSim = cos.compare(entryTokens, zoneTokens); } printlnVerbose(smithSim + " " + bxDoc.asZones().get(zoneIdx).toText() + "\n\n"); swLabelSim.get(zoneIdx).add(new LabelTrio(entry.getValue(), entryTokens, smithSim)); cosLabProb.get(zoneIdx).add(new LabelTrio(entry.getValue(), entryTokens, cosSim)); } } printlnVerbose("==========================="); for (BxPage page : bxDoc.getPages()) { for (BxZone zone : page.getZones()) { Integer zoneIdx = bxDoc.asZones().indexOf(zone); BxZone curZone = bxDoc.asZones().get(zoneIdx); String zoneText = StringTools.removeOrphantSpaces(curZone.toText().toLowerCase()); List<String> zoneTokens = StringTools.tokenize(zoneText); Boolean valueSet = false; Collections.sort(swLabelSim.get(zoneIdx), new Comparator<LabelTrio>() { @Override public int compare(LabelTrio t1, LabelTrio t2) { Double simDif = t1.alignment / t1.entryTokens.size() - t2.alignment / t2.entryTokens.size(); if (Math.abs(simDif) < 0.0001) { return t2.entryTokens.size() - t1.entryTokens.size(); } if (simDif > 0) { return 1; } else { return -1; } } }); Collections.reverse(swLabelSim.get(zoneIdx)); List<String> entryTokens = swLabelSim.get(zoneIdx).get(0).entryTokens; if (Math.max(zoneTokens.size(), entryTokens.size()) > 0 && Math.min(zoneTokens.size(), entryTokens.size()) / Math.max(zoneTokens.size(), (double) entryTokens.size()) > 0.7 && swLabelSim.get(zoneIdx).get(0).alignment / entryTokens.size() > 0.7) { curZone.setLabel(swLabelSim.get(zoneIdx).get(0).label); valueSet = true; printVerbose("0 "); } if (!valueSet) { Collections.sort(swLabelSim.get(zoneIdx), new Comparator<LabelTrio>() { @Override public int compare(LabelTrio t1, LabelTrio t2) { Double simDif = t1.alignment - t2.alignment; if (Math.abs(simDif) < 0.0001) { return t2.entryTokens.size() - t1.entryTokens.size(); } if (simDif > 0) { return 1; } else { return -1; } } }); Collections.reverse(swLabelSim.get(zoneIdx)); printlnVerbose("-->" + swLabelSim.get(zoneIdx).get(0).alignment / zoneTokens.size()); if (swLabelSim.get(zoneIdx).get(0).alignment / zoneTokens.size() > 0.5) { curZone.setLabel(swLabelSim.get(zoneIdx).get(0).label); valueSet = true; printVerbose("1 "); } } if (!valueSet) { Map<BxZoneLabel, Double> cumulated = new EnumMap<BxZoneLabel, Double>(BxZoneLabel.class); for (LabelTrio trio : swLabelSim.get(zoneIdx)) { if (cumulated.containsKey(trio.label)) { cumulated.put(trio.label, cumulated.get(trio.label) + trio.alignment / Math.max(zoneTokens.size(), trio.entryTokens.size())); } else { cumulated.put(trio.label, trio.alignment / Math.max(zoneTokens.size(), trio.entryTokens.size())); } } Double max = Double.NEGATIVE_INFINITY; BxZoneLabel bestLabel = null; for (Entry<BxZoneLabel, Double> entry : cumulated.entrySet()) { if (entry.getValue() > max) { max = entry.getValue(); bestLabel = entry.getKey(); } } if (max >= 0.5) { curZone.setLabel(bestLabel); printVerbose("2 "); valueSet = true; } } if (!valueSet) { Collections.sort(swLabelSim.get(zoneIdx), new Comparator<LabelTrio>() { @Override public int compare(LabelTrio t1, LabelTrio t2) { Double simDif = t1.alignment / t1.entryTokens.size() - t2.alignment / t2.entryTokens.size(); if (Math.abs(simDif) < 0.001) { return t2.entryTokens.size() - t1.entryTokens.size(); } if (simDif > 0) { return 1; } else { return -1; } } }); Collections.reverse(swLabelSim.get(zoneIdx)); List<LabelTrio> l = swLabelSim.get(zoneIdx); BxZoneLabel best = null; int bestScore = 0; for (LabelTrio lt : l) { int i = 0; for (String zt : zoneTokens) { if (lt.entryTokens.contains(zt)) { i++; } } if (i > bestScore && i > 1) { best = lt.label; bestScore = i; } } if (best != null) { curZone.setLabel(best); valueSet = true; } else { for (LabelTrio lt : l) { int i = 0; for (String zt : zoneTokens) { for (String j : lt.entryTokens) { if (zt.replaceAll("[^0-9a-zA-Z,;\\.!\\?]", "") .equals(j.replaceAll("[^0-9a-zA-Z,;\\.!\\?]", ""))) { i++; break; } } } if (i > bestScore && i > 1) { best = lt.label; bestScore = i; } } } if (best != null) { curZone.setLabel(best); valueSet = true; } } if (!valueSet) { curZone.setLabel(null); } printlnVerbose(zone.getLabel() + " " + zone.toText() + "\n"); } Map<BxZone, ZoneLocaliser> zoneLocMap = new HashMap<BxZone, ZoneLocaliser>(); Set<BxZone> unlabeledZones = new HashSet<BxZone>(); for (BxZone zone : page.getZones()) { if (zone.getLabel() == null) { unlabeledZones.add(zone); zoneLocMap.put(zone, new ZoneLocaliser(zone)); } } Integer lastNumberOfUnlabeledZones; do { lastNumberOfUnlabeledZones = unlabeledZones.size(); infereLabels(unlabeledZones, zoneLocMap); infereLabels(unlabeledZones, zoneLocMap); } while (lastNumberOfUnlabeledZones != unlabeledZones.size()); } printlnVerbose("=>=>=>=>=>=>=>=>=>=>=>=>=>="); return bxDoc; }
From source file:com.smartitengineering.cms.type.xml.XmlParser.java
private Map<ContentProcessingPhase, List<MutableContentCoProcessorDef>> parseContentCoProcessorDefs( Element contentTypeElement, String elementName) { Elements elems = contentTypeElement.getChildElements(elementName, NAMESPACE); if (elems.size() > 1) { throw new IllegalStateException("More than one " + elementName); }/*from ww w .j a va 2 s . c o m*/ Map<ContentProcessingPhase, List<MutableContentCoProcessorDef>> result = new EnumMap<ContentProcessingPhase, List<MutableContentCoProcessorDef>>( ContentProcessingPhase.class); if (elems.size() > 0) { Elements elements = elems.get(0).getChildElements(CONTENT_CO_PROCESSOR_READ_PHASE, NAMESPACE); if (elements != null && elements.size() > 0) { List<MutableContentCoProcessorDef> readProcs = parseContentCoProcessors(elements.get(0), CONTENT_CO_PROCESSORS); result.put(ContentProcessingPhase.READ, readProcs); } elements = elems.get(0).getChildElements(CONTENT_CO_PROCESSOR_WRITE_PHASE, NAMESPACE); if (elements != null && elements.size() > 0) { List<MutableContentCoProcessorDef> writeProcs = parseContentCoProcessors(elements.get(0), CONTENT_CO_PROCESSORS); result.put(ContentProcessingPhase.WRITE, writeProcs); } } if (logger.isDebugEnabled()) { logger.debug("Returning co processors " + result); } return result; }
From source file:org.openecomp.sdc.asdctool.impl.DataMigration.java
/** * the method creates all the files and dir which holds them. in case the * files exist they will not be created again. * //from ww w .ja va 2 s . c o m * @param appConfigDir * the base path under which the output dir will be created and * the export result files the created filesa are named according * to the name of the table into which it will be imported. * @param exportToEs * if true all the export files will be recreated * @returnthe returns a map of tables and the files representing them them */ private Map<Table, File> createOutPutFiles(String appConfigDir, boolean exportToEs) { Map<Table, File> result = new EnumMap<Table, File>(Table.class); File outputDir = new File(appConfigDir + "/output/"); if (!createOutPutFolder(outputDir)) { return null; } for (Table table : Table.values()) { File file = new File(outputDir + "/" + table.getTableDescription().getTableName()); if (exportToEs) { try { if (file.exists()) { Files.delete(file.toPath()); } } catch (IOException e) { log.error("failed to delete output file " + file.getAbsolutePath(), e); return null; } file = new File(outputDir + "/" + table.getTableDescription().getTableName()); } if (!file.exists()) { try { file.createNewFile(); } catch (IOException e) { log.error("failed to create output file " + file.getAbsolutePath(), e); return null; } } result.put(table, file); } return result; }
From source file:com.example.app.support.address.AddressParser.java
private static Map<AddressComponent, String> getAddrMap(Matcher m, Map<Integer, String> groupMap) { Map<AddressComponent, String> ret = new EnumMap<>(AddressComponent.class); for (int i = 1; i <= m.groupCount(); i++) { String name = groupMap.get(i); AddressComponent comp = valueOf(name); if (ret.get(comp) == null) { putIfNotNull(ret, comp, m.group(i)); }//w w w . j a v a2 s. c o m } return ret; }
From source file:com.ryan.ryanreader.reddit.prepared.RedditPreparedPost.java
public VerticalToolbar generateToolbar(final Context context, final Fragment fragmentParent, final SideToolbarOverlay overlay) { final VerticalToolbar toolbar = new VerticalToolbar(context); final EnumSet<Action> itemsPref = PrefsUtility.pref_menus_post_toolbar_items(context, PreferenceManager.getDefaultSharedPreferences(context)); final Action[] possibleItems = { Action.ACTION_MENU, fragmentParent instanceof CommentListingFragment ? Action.LINK_SWITCH : Action.COMMENTS_SWITCH, Action.UPVOTE, Action.DOWNVOTE, Action.SAVE, Action.HIDE, Action.REPLY, Action.EXTERNAL, Action.SAVE_IMAGE, Action.SHARE, Action.COPY, Action.USER_PROFILE, Action.PROPERTIES }; // TODO make static final EnumMap<Action, Integer> iconsDark = new EnumMap<Action, Integer>(Action.class); iconsDark.put(Action.ACTION_MENU, R.drawable.ic_action_overflow); iconsDark.put(Action.COMMENTS_SWITCH, R.drawable.ic_action_comments_dark); iconsDark.put(Action.LINK_SWITCH, imageUrl != null ? R.drawable.ic_action_image_dark : R.drawable.ic_action_page_dark); iconsDark.put(Action.UPVOTE, R.drawable.action_upvote_dark); iconsDark.put(Action.DOWNVOTE, R.drawable.action_downvote_dark); iconsDark.put(Action.SAVE, R.drawable.ic_action_star_filled_dark); iconsDark.put(Action.HIDE, R.drawable.ic_action_cross_dark); iconsDark.put(Action.REPLY, R.drawable.ic_action_reply_dark); iconsDark.put(Action.EXTERNAL, R.drawable.ic_action_globe_dark); iconsDark.put(Action.SAVE_IMAGE, R.drawable.ic_action_save_dark); iconsDark.put(Action.SHARE, R.drawable.ic_action_share_dark); iconsDark.put(Action.COPY, R.drawable.ic_action_copy_dark); iconsDark.put(Action.USER_PROFILE, R.drawable.ic_action_person_dark); iconsDark.put(Action.PROPERTIES, R.drawable.ic_action_info_dark); final EnumMap<Action, Integer> iconsLight = new EnumMap<Action, Integer>(Action.class); iconsLight.put(Action.ACTION_MENU, R.drawable.ic_action_overflow); iconsLight.put(Action.COMMENTS_SWITCH, R.drawable.ic_action_comments_light); iconsLight.put(Action.LINK_SWITCH, imageUrl != null ? R.drawable.ic_action_image_light : R.drawable.ic_action_page_light); iconsLight.put(Action.UPVOTE, R.drawable.action_upvote_light); iconsLight.put(Action.DOWNVOTE, R.drawable.action_downvote_light); iconsLight.put(Action.SAVE, R.drawable.ic_action_star_filled_light); iconsLight.put(Action.HIDE, R.drawable.ic_action_cross_light); iconsLight.put(Action.REPLY, R.drawable.ic_action_reply_light); iconsLight.put(Action.EXTERNAL, R.drawable.ic_action_globe_light); iconsLight.put(Action.SAVE_IMAGE, R.drawable.ic_action_save_light); iconsLight.put(Action.SHARE, R.drawable.ic_action_share_light); iconsLight.put(Action.COPY, R.drawable.ic_action_copy_light); iconsLight.put(Action.USER_PROFILE, R.drawable.ic_action_person_light); iconsLight.put(Action.PROPERTIES, R.drawable.ic_action_info_light); for (final Action action : possibleItems) { if (action == Action.SAVE_IMAGE && imageUrl == null) continue; if (itemsPref.contains(action)) { final FlatImageButton ib = new FlatImageButton(context); final int buttonPadding = General.dpToPixels(context, 10); ib.setPadding(buttonPadding, buttonPadding, buttonPadding, buttonPadding); if (action == Action.UPVOTE && isUpvoted() || action == Action.DOWNVOTE && isDownvoted() || action == Action.SAVE && isSaved() || action == Action.HIDE && isHidden()) { ib.setBackgroundColor(Color.WHITE); ib.setImageResource(iconsLight.get(action)); } else { ib.setImageResource(iconsDark.get(action)); // TODO highlight on click }/*from w ww . j av a2s.co m*/ ib.setOnClickListener(new View.OnClickListener() { public void onClick(View v) { final Action actionToTake; switch (action) { case UPVOTE: actionToTake = isUpvoted() ? Action.UNVOTE : Action.UPVOTE; break; case DOWNVOTE: actionToTake = isDownvoted() ? Action.UNVOTE : Action.DOWNVOTE; break; case SAVE: actionToTake = isSaved() ? Action.UNSAVE : Action.SAVE; break; case HIDE: actionToTake = isHidden() ? Action.UNHIDE : Action.HIDE; break; default: actionToTake = action; break; } onActionMenuItemSelected(RedditPreparedPost.this, fragmentParent, actionToTake); overlay.hide(); } }); toolbar.addItem(ib); } } return toolbar; }
From source file:org.openecomp.sdc.be.servlets.AbstractValidationsServlet.java
private void validateResourceType(Wrapper<Response> responseWrapper, UploadResourceInfo uploadResourceInfo, User user) {/*from w w w . j av a 2s .co m*/ String resourceType = uploadResourceInfo.getResourceType(); if (resourceType == null || !ResourceTypeEnum.contains(resourceType)) { ResponseFormat responseFormat = getComponentsUtils().getResponseFormat(ActionStatus.INVALID_CONTENT); Response errorResponse = buildErrorResponse(responseFormat); EnumMap<AuditingFieldsKeysEnum, Object> additionalParam = new EnumMap<AuditingFieldsKeysEnum, Object>( AuditingFieldsKeysEnum.class); additionalParam.put(AuditingFieldsKeysEnum.AUDIT_RESOURCE_NAME, uploadResourceInfo.getName()); getComponentsUtils().auditResource(responseFormat, user, null, "", "", AuditingActionEnum.IMPORT_RESOURCE, additionalParam); responseWrapper.setInnerElement(errorResponse); } }
From source file:org.talend.repository.RepositoryService.java
@Override public String exportPigudf(IProcessor processor, Property property, boolean isExport) throws ProcessorException { // build java project CorePlugin.getDefault().getRunProcessService().buildJavaProject(); Map<ExportChoice, Object> exportChoiceMap = new EnumMap<ExportChoice, Object>(ExportChoice.class); exportChoiceMap.put(ExportChoice.needPigudf, true); ProcessItem processItem = (ProcessItem) property.getItem(); ExportFileResource fileResource = new ExportFileResource(processItem, property.getLabel()); ExportFileResource[] exportFileResources = new ExportFileResource[] { fileResource }; IContext context = processor.getContext(); String contextName = "Default";//$NON-NLS-1$ if (context != null) { contextName = context.getName(); }/*from w ww .j a v a 2 s .c o m*/ JobScriptsManager jobScriptsManager = JobScriptsManagerFactory.createManagerInstance(exportChoiceMap, contextName, JobScriptsManager.ALL_ENVIRONMENTS, -1, -1, JobExportType.POJO); URL url = jobScriptsManager.getExportPigudfResources(exportFileResources); if (url == null) { return null; } File file = new File(url.getFile()); // String librariesPath = LibrariesManagerUtils.getLibrariesPath(ECodeLanguage.JAVA) + "/"; String librariesPath = processor.getCodeProject().getLocation() + "/lib/"; //$NON-NLS-1$ String targetFileName = JobJavaScriptsManager.USERPIGUDF_JAR; if (!isExport) { targetFileName = property.getLabel() + '_' + property.getVersion() + '_' + JobJavaScriptsManager.USERPIGUDF_JAR; } File target = new File(librariesPath + targetFileName); try { FilesUtils.copyFile(file, target); } catch (IOException e) { throw new ProcessorException(e.getMessage()); } return targetFileName; }