List of usage examples for java.util Collection toArray
default <T> T[] toArray(IntFunction<T[]> generator)
From source file:com.puppetlabs.geppetto.pp.dsl.contentassist.PPProposalsGenerator.java
/** * Computes attribute proposals where the class/definition name must match exactly, but where * parameters are processed with fuzzy logic. * //from w w w .j a va2 s. c om * @param currentName * @param descs * @param searchPath * TODO * @param types * @return */ public String[] computeAttributeProposals(final QualifiedName currentName, Collection<IEObjectDescription> descs, PPSearchPath searchPath) { if (currentName.getSegmentCount() < 2) return new String[0]; final DoubleMetaphone encoder = new DoubleMetaphone(); final String metaphoneName = encoder.encode(currentName.getLastSegment()); Collection<String> proposals = generateAttributeCandidates(currentName, descs, searchPath); // propose all, but sort them based on likeness String[] result = new String[proposals.size()]; proposals.toArray(result); Arrays.sort(result, new PronunciationComparator(encoder, metaphoneName)); return result; }
From source file:de.tudarmstadt.ukp.dariah.pipeline.DARIAHWriter.java
private void convert(JCas aJCas, PrintWriter aOut) { int paragraphId = 0, sentenceId = 0, tokenId = 0; aOut.printf("%s\n", StringUtils.join(getHeader(), "\t")); Map<Token, Collection<NamedEntity>> neCoveringMap = JCasUtil.indexCovering(aJCas, Token.class, NamedEntity.class); Map<Token, Collection<DirectSpeech>> directSpeechCoveringMap = JCasUtil.indexCovering(aJCas, Token.class, DirectSpeech.class); for (Paragraph para : select(aJCas, Paragraph.class)) { for (Sentence sentence : selectCovered(Sentence.class, para)) { HashMap<Token, Row> ctokens = new LinkedHashMap<Token, Row>(); // Tokens List<Token> tokens = selectCovered(Token.class, sentence); // Check if we should try to include the FEATS in output List<Morpheme> morphology = selectCovered(Morpheme.class, sentence); boolean useFeats = tokens.size() == morphology.size(); //Parsing information String[] parseFragments = null; List<ROOT> root = selectCovered(ROOT.class, sentence); if (root.size() == 1) { PennTreeNode rootNode = PennTreeUtils.convertPennTree(root.get(0)); if ("ROOT".equals(rootNode.getLabel())) { rootNode.setLabel("TOP"); }/*from w w w. ja v a 2 s .c om*/ parseFragments = toPrettyPennTree(rootNode); } boolean useParseFragements = (parseFragments != null && parseFragments.length == tokens.size()); for (int i = 0; i < tokens.size(); i++) { Row row = new Row(); row.paragraphId = paragraphId; row.sentenceId = sentenceId; row.tokenId = tokenId; row.token = tokens.get(i); if (useParseFragements) { row.parseFragment = parseFragments[i]; } if (useFeats) { row.morphology = morphology.get(i); } // Named entities Collection<NamedEntity> ne = neCoveringMap.get(row.token); if (ne.size() > 0) row.ne = ne.toArray(new NamedEntity[0])[0]; Collection<DirectSpeech> ds = directSpeechCoveringMap.get(row.token); if (ds.size() > 0) row.directSpeech = ds.toArray(new DirectSpeech[0])[0]; ctokens.put(row.token, row); tokenId++; } // Dependencies for (Dependency rel : selectCovered(Dependency.class, sentence)) { ctokens.get(rel.getDependent()).deprel = rel; } // Write sentence for (Row row : ctokens.values()) { String[] output = getData(ctokens, row); aOut.printf("%s\n", StringUtils.join(output, "\t")); } sentenceId++; } paragraphId++; } }
From source file:com.googlecode.l10nmavenplugin.validators.property.SpellCheckValidator.java
/** * WARN in case of spellcheck error using property locale. *///from w w w . ja v a 2 s . co m public int validate(Property property, List<L10nReportItem> reportItems) { Locale locale = property.getLocale(); if (locale == null) { // Case of root bundle locale = Locale.ENGLISH; } SpellChecker spellChecker = spellCheckerLocaleRepository.getSpellChecker(locale); if (spellChecker != null) { ListSpellCheckErrorListener listener = new ListSpellCheckErrorListener(spellChecker); spellChecker.addSpellCheckListener(listener); String message = property.getMessage(); spellChecker.checkSpelling(new StringWordTokenizer(message)); Collection<SpellCheckError> errors = listener.getSpellCheckErrors(); // The message with errors replaced by suggestions String correction = message; // Start from last errors, so that error position remains valid SpellCheckError[] errs = errors.toArray(new SpellCheckError[errors.size()]); for (int i = errs.length - 1; i >= 0; i--) { SpellCheckError error = errs[i]; if (error.getSuggestion() != null) { int pos = error.getPosition(); correction = StringUtils.overlay(correction, error.getSuggestion(), pos, pos + error.getError().length()); } } if (errors.size() > 0) { StringBuffer sb = new StringBuffer(); sb.append("Spellcheck error on word(s): ").append(errors.toString()).append(" and locale <") .append(locale).append(">."); if (correction != null) { sb.append(" Suggested correction: [").append(correction).append("]"); } L10nReportItem reportItem = new L10nReportItem(Type.SPELLCHECK, sb.toString(), property, null); reportItems.add(reportItem); logger.log(reportItem); } spellChecker.removeSpellCheckListener(listener); } return 0; }
From source file:de.tuberlin.uebb.jdae.simulation.DefaultSimulationRuntime.java
@Override public ExecutableDAE causalise(LoadableModel model, SimulationOptions options) { final Reduction reduction = reduce(model.equations()); final Collection<ContinuousEvent> events = model.events(reduction.ctxt); return causalise(reduction, Lists.transform(ImmutableList.copyOf(model.initialEquations()), GlobalEquation.bindFrom(reduction.ctxt)), model.initials(reduction.ctxt), events.toArray(new ContinuousEvent[events.size()]), options); }
From source file:it.tidalwave.northernwind.util.test.TestHelper.java
/******************************************************************************************************************* * ******************************************************************************************************************/ @Nonnull/*from ww w .j a v a2s. c om*/ private ApplicationContext createSpringContext(final @Nonnull Map<String, Object> properties, final @Nonnull Collection<String> configurationFiles) { configurationFiles.add(test.getClass().getSimpleName() + "/TestBeans.xml"); if (properties.isEmpty()) { return new ClassPathXmlApplicationContext(configurationFiles.toArray(new String[0])); } else { final StandardEnvironment environment = new StandardEnvironment(); environment.getPropertySources().addFirst(new MapPropertySource("test", properties)); final GenericXmlApplicationContext context = new GenericXmlApplicationContext(); context.setEnvironment(environment); context.load(configurationFiles.toArray(new String[0])); context.refresh(); return context; } }
From source file:de.cismet.cids.custom.utils.vermessungsunterlagen.tasks.VermUntTaskNivPUebersicht.java
@Override public void performTask() throws VermessungsunterlagenTaskException { final File src = new File(VermessungsunterlagenHelper.getInstance().getProperties().getAbsPathPdfNivP()); final File dst = new File(getPath() + "/" + src.getName()); if (!dst.exists()) { try {// ww w .ja v a 2 s.c o m FileUtils.copyFile(src, dst); } catch (final Exception ex) { final String message = "Beim Kopieren des NivP-Informations-PDFs kam es zu einem unerwarteten Fehler."; throw new VermessungsunterlagenTaskException(getType(), message, ex); } } final GeometryFactory geometryFactory = new GeometryFactory(); final Collection<Geometry> geometries = new ArrayList<Geometry>(getNivPoints().size()); for (final CidsBean nivPoint : getNivPoints()) { final Geometry geom = (Geometry) nivPoint.getProperty("geometrie.geo_field"); geometries.add(geom); } final Envelope envelope = geometryFactory.createGeometryCollection(geometries.toArray(new Geometry[0])) .getEnvelopeInternal(); final Coordinate center = envelope.centre(); final String landparcelcode = (String) flurstuecke.iterator().next().getProperty("alkis_id"); final AlkisProductDescription product = VermessungsunterlagenHelper .determineAlkisProduct(String.valueOf("WUP-Kommunal"), String.valueOf("NivP-bersicht"), envelope); InputStream in = null; OutputStream out = null; try { final URL url = ServerAlkisProducts.productKarteUrl(landparcelcode, product.getCode(), Double.valueOf(0).intValue(), Double.valueOf(center.x).intValue(), Double.valueOf(center.y).intValue(), product.getMassstab(), product.getMassstabMin(), product.getMassstabMax(), "", auftragsnummer, false, null); final String filename = product.getCode() + "." + landparcelcode.replace("/", "--") + ((flurstuecke.size() > 1) ? ".ua" : "") + ".pdf"; in = doGetRequest(url); out = new FileOutputStream(getPath() + "/" + filename); VermessungsunterlagenHelper.downloadStream(in, out); } catch (final Exception ex) { final String message = "Beim Herunterladen der NIVP-bersicht kam es zu einem unerwarteten Fehler."; throw new VermessungsunterlagenTaskException(getType(), message, ex); } finally { VermessungsunterlagenHelper.closeStream(in); VermessungsunterlagenHelper.closeStream(out); } }
From source file:com.smartmarmot.common.Configurator.java
private static Query[] getQueries(String parameter, Properties _propsq) throws Exception { try {// www.java2s. c o m StringTokenizer stq = new StringTokenizer(_propsq.getProperty(parameter), Constants.DELIMITER); String[] QueryLists = new String[stq.countTokens()]; int count = 0; while (stq.hasMoreTokens()) { String token = stq.nextToken().toString().trim(); QueryLists[count] = token; count++; } Collection<Query> Queries = new ArrayList<Query>(); for (int i = 0; i < QueryLists.length; i++) { try { Query q = getQueryProperties(_propsq, QueryLists[i]); Queries.add(q); } catch (Exception e1) { SmartLogger.logThis(Level.ERROR, "Error on Configurator on reading query " + QueryLists[i] + e1); SmartLogger.logThis(Level.INFO, "Query " + QueryLists[i] + " skipped due to error " + e1); } } Query[] queries = (Query[]) Queries.toArray(new Query[0]); return queries; } catch (Exception ex) { SmartLogger.logThis(Level.ERROR, "Error on Configurator on reading properties file " + _propsq.toString() + " getQueries(" + parameter + "," + _propsq.toString() + ") " + ex.getMessage()); return null; } }
From source file:com.adito.security.DefaultUserDatabase.java
public final User[] getUsersInRole(Role role) throws UserDatabaseException { Collection<User> usersWithRole = new ArrayList<User>(); for (User user : allUsers()) { if (user.memberOf(role)) { usersWithRole.add(user);//from w ww. j av a 2 s.com } } return (User[]) usersWithRole.toArray(new User[usersWithRole.size()]); }
From source file:com.microsoft.tfs.core.clients.build.internal.soapextensions.BuildInformation.java
/** * Creates a top-level build information node collection for a BuildDetail * object and initializes it from an array of BuildInformation objects. * * * @param build/*from w w w . jav a 2 s . co m*/ * The owner of this collection. * @param informationNodes * The BuildInformation objects from which the tree is initialized. */ public BuildInformation(final BuildDetail build, final BuildInformationNode[] informationNodes) { this(build, (BuildInformationNode) null); // No information nodes - nothing to do. if (informationNodes.length > 0) { final Map<Integer, Map<Integer, BuildInformationNode>> nodeParentDict = new HashMap<Integer, Map<Integer, BuildInformationNode>>(); Map<Integer, BuildInformationNode> children; for (final BuildInformationNode node : informationNodes) { // Add node to parent node dictionary. children = nodeParentDict.get(node.getParentID()); if (children == null) { children = new HashMap<Integer, BuildInformationNode>(); nodeParentDict.put(node.getParentID(), children); } node.setBuild(build); if (!children.containsKey(node.getID())) { children.put(node.getID(), node); } else { log.warn("Duplicate information nodes present in a build!"); //$NON-NLS-1$ } } // Build up as much of the tree structure as we can manage. for (final BuildInformationNode node : informationNodes) { children = nodeParentDict.get(node.getID()); if (children != null) { final BuildInformation theChildren = (BuildInformation) node.getChildren(); final Collection<BuildInformationNode> values = children.values(); final BuildInformationNode[] array = values.toArray(new BuildInformationNode[values.size()]); Arrays.sort(array); for (final BuildInformationNode child : array) { child.setParent(node); child.setOwner(theChildren); theChildren.add(child); } } } // Add any unparented nodes as top level nodes. for (final BuildInformationNode node : informationNodes) { if (node.getParent() == null) { node.setOwner(this); add(node); } } } }
From source file:com.liferay.portal.search.elasticsearch.ElasticsearchIndexSearcher.java
protected Document processSearchHit(SearchHit hit) { Document document = new DocumentImpl(); Map<String, SearchHitField> searchHitFields = hit.getFields(); for (Map.Entry<String, SearchHitField> entry : searchHitFields.entrySet()) { SearchHitField searchHitField = entry.getValue(); Collection<Object> fieldValues = searchHitField.getValues(); Field field = new Field(entry.getKey(), ArrayUtil.toStringArray(fieldValues.toArray(new Object[fieldValues.size()]))); document.add(field);/* w w w. j av a2 s.co m*/ } return document; }