List of usage examples for java.util LinkedHashSet toArray
<T> T[] toArray(T[] a);
From source file:org.rapidcontext.core.type.WebService.java
/** * Returns the HTTP methods supported for the specified request. * The OPTIONS method is always supported and the HEAD method is * automatically added if GET is supported. * * @param request the request to check * * @return the array of HTTP method names supported *//*w w w . j a v a 2 s . c om*/ public String[] methods(Request request) { LinkedHashSet set = new LinkedHashSet(); set.add(METHOD.OPTIONS); set.addAll(Arrays.asList(methodsImpl(request))); for (int i = 0; i < matchers.size(); i++) { WebMatcher m = (WebMatcher) matchers.get(i); if (m.method() != null && m.match(request) > 0) { set.add(m.method()); } } if (set.contains(METHOD.GET)) { set.add(METHOD.HEAD); } return (String[]) set.toArray(new String[set.size()]); }
From source file:org.springframework.cloud.dataflow.app.launcher.ModuleLauncher.java
public void launchAggregatedModules(List<ModuleLaunchRequest> moduleLaunchRequests, Map<String, String> aggregateArgs) { try {/*w w w . j a v a 2 s. c o m*/ List<String> mainClassNames = new ArrayList<>(); LinkedHashSet<URL> jarURLs = new LinkedHashSet<>(); List<String> seenArchives = new ArrayList<>(); final List<String[]> arguments = new ArrayList<>(); final ClassLoader classLoader; if (!(aggregateArgs.containsKey(EXCLUDE_DEPENDENCIES_ARG) || aggregateArgs.containsKey(INCLUDE_DEPENDENCIES_ARG))) { for (ModuleLaunchRequest moduleLaunchRequest : moduleLaunchRequests) { Resource resource = resolveModule(moduleLaunchRequest.getModule()); JarFileArchive jarFileArchive = new JarFileArchive(resource.getFile()); jarURLs.add(jarFileArchive.getUrl()); for (Archive archive : jarFileArchive.getNestedArchives(ArchiveMatchingEntryFilter.FILTER)) { // avoid duplication based on unique JAR names String urlAsString = archive.getUrl().toString(); String jarNameWithExtension = urlAsString.substring(0, urlAsString.lastIndexOf("!/")); String jarNameWithoutExtension = jarNameWithExtension .substring(jarNameWithExtension.lastIndexOf("/") + 1); if (!seenArchives.contains(jarNameWithoutExtension)) { seenArchives.add(jarNameWithoutExtension); jarURLs.add(archive.getUrl()); } } mainClassNames.add(jarFileArchive.getMainClass()); arguments.add(toArgArray(moduleLaunchRequest.getArguments())); } classLoader = ClassloaderUtils.createModuleClassloader(jarURLs.toArray(new URL[jarURLs.size()])); } else { // First, resolve modules and extract main classes - while slightly less efficient than just // doing the same processing after resolution, this ensures that module artifacts are processed // correctly for extracting their main class names. It is not possible in the general case to // identify, after resolution, whether a resource represents a module artifact which was part of the // original request or not. We will include the first module as root and the next as direct dependencies Coordinates root = null; ArrayList<Coordinates> includeCoordinates = new ArrayList<>(); for (ModuleLaunchRequest moduleLaunchRequest : moduleLaunchRequests) { Coordinates moduleCoordinates = toCoordinates(moduleLaunchRequest.getModule()); if (root == null) { root = moduleCoordinates; } else { includeCoordinates.add(toCoordinates(moduleLaunchRequest.getModule())); } Resource moduleResource = resolveModule(moduleLaunchRequest.getModule()); JarFileArchive moduleArchive = new JarFileArchive(moduleResource.getFile()); mainClassNames.add(moduleArchive.getMainClass()); arguments.add(toArgArray(moduleLaunchRequest.getArguments())); } for (String include : StringUtils .commaDelimitedListToStringArray(aggregateArgs.get(INCLUDE_DEPENDENCIES_ARG))) { includeCoordinates.add(toCoordinates(include)); } // Resolve all artifacts - since modules have been specified as direct dependencies, they will take // precedence in the resolution order, ensuring that the already resolved artifacts will be returned as // part of the response. Resource[] libraries = moduleResolver.resolve(root, includeCoordinates.toArray(new Coordinates[includeCoordinates.size()]), StringUtils.commaDelimitedListToStringArray(aggregateArgs.get(EXCLUDE_DEPENDENCIES_ARG))); for (Resource library : libraries) { jarURLs.add(library.getURL()); } classLoader = new URLClassLoader(jarURLs.toArray(new URL[jarURLs.size()])); } final List<Class<?>> mainClasses = new ArrayList<>(); for (String mainClass : mainClassNames) { mainClasses.add(ClassUtils.forName(mainClass, classLoader)); } Runnable moduleAggregatorRunner = new ModuleAggregatorRunner(classLoader, mainClasses, toArgArray(aggregateArgs), arguments); Thread moduleAggregatorRunnerThread = new Thread(moduleAggregatorRunner); moduleAggregatorRunnerThread.setContextClassLoader(classLoader); moduleAggregatorRunnerThread.setName(MODULE_AGGREGATOR_RUNNER_THREAD_NAME); moduleAggregatorRunnerThread.start(); } catch (Exception e) { throw new RuntimeException("failed to start aggregated modules: " + StringUtils.collectionToCommaDelimitedString(moduleLaunchRequests), e); } }
From source file:pt.webdetails.cda.utils.mondrian.CompactBandedMDXTableModel.java
public CompactBandedMDXTableModel(final Result resultSet, final int rowLimit) { if (resultSet == null) { throw new NullPointerException("ResultSet returned was null"); }//from w w w . j a va 2 s .c o m this.resultSet = resultSet; // rowcount is the product of all axis-sizes. If an axis contains more than one member, then // Mondrian already performs the crossjoin for us. // column count is the count of all hierachies of all axis. final Axis[] axes = this.resultSet.getAxes(); this.rowCount = 0; this.axesSize = new int[axes.length]; final int[] axesMembers = new int[axes.length]; @SuppressWarnings("unchecked") final List<Dimension>[] dimensionsForMembersPerAxis = new List[axes.length]; @SuppressWarnings("unchecked") final List<Integer>[] membersPerAxis = new List[axes.length]; // process the column axis first .. if (axesSize.length > 0) { final Axis axis = axes[0]; final List<Position> positions = axis.getPositions(); axesSize[0] = positions.size(); if (positions.isEmpty()) { noMeasures = true; } } // Axis contains (zero or more) positions, which contains (zero or more) members for (int axesIndex = axes.length - 1; axesIndex >= 1; axesIndex -= 1) { final Axis axis = axes[axesIndex]; final List<Position> positions = axis.getPositions(); axesSize[axesIndex] = positions.size(); if (positions.isEmpty()) { noMeasures = true; } final ArrayList<Integer> memberList = new ArrayList<Integer>(); final ArrayList<Dimension> dimensionsForMembers = new ArrayList<Dimension>(); for (int positionsIndex = 0; positionsIndex < positions.size(); positionsIndex++) { final Position position = positions.get(positionsIndex); for (int positionIndex = 0; positionIndex < position.size(); positionIndex++) { Member m = position.get(positionIndex); final Dimension dimension = m.getDimension(); int hierarchyLevelCount = 1; // Originally was 0 // // Change compared to BandedMDXTM - we don't want all levels // while (false && m != null) // { // m = m.getParentMember(); // hierarchyLevelCount += 1; // } if (memberList.size() <= positionIndex) { memberList.add(hierarchyLevelCount); dimensionsForMembers.add(dimension); } else { final Integer existingLevel = memberList.get(positionIndex); if (existingLevel.intValue() < hierarchyLevelCount) { memberList.set(positionIndex, hierarchyLevelCount); dimensionsForMembers.set(positionIndex, dimension); } } } } int memberCount = 0; for (int i = 0; i < memberList.size(); i++) { memberCount += memberList.get(i); } axesMembers[axesIndex] = memberCount; dimensionsForMembersPerAxis[axesIndex] = dimensionsForMembers; membersPerAxis[axesIndex] = memberList; } if (axesSize.length > 1) { rowCount = axesSize[1]; for (int i = 2; i < axesSize.length; i++) { final int size = axesSize[i]; rowCount *= size; } } if (noMeasures == false) { rowCount = Math.max(1, rowCount); } if (axesSize.length == 0) { columnCount = 1; } else if (axesSize.length > 0) { columnCount = axesSize[0]; } for (int i = 1; i < axesMembers.length; i++) { columnCount += axesMembers[i]; } columnNames = new String[columnCount]; columnToDimensionMapping = new Dimension[columnCount]; columnToAxisPosition = new int[columnCount]; int columnIndex = 0; int dimColIndex = 0; // final FastStack memberStack = new FastStack(); for (int axesIndex = axes.length - 1; axesIndex >= 1; axesIndex -= 1) { final Axis axis = axes[axesIndex]; final List<Position> positions = axis.getPositions(); final LinkedHashSet<String> columnNamesSet = new LinkedHashSet<String>(); for (int positionsIndex = 0; positionsIndex < positions.size(); positionsIndex++) { final Position position = positions.get(positionsIndex); for (int positionIndex = 0; positionIndex < position.size(); positionIndex++) { // memberStack.clear(); Member m = position.get(positionIndex); // Get member's hierarchy final String name = m.getHierarchy().getName(); if (columnNamesSet.contains(name) == false) { columnNamesSet.add(name); } } } if (columnNamesSet.size() != axesMembers[axesIndex]) { logger.error("ERROR: Number of names is not equal the pre-counted number."); } final List<Dimension> dimForMemberPerAxis = dimensionsForMembersPerAxis[axesIndex]; final List<Integer> memberCntPerAxis = membersPerAxis[axesIndex]; for (int i = 0; i < memberCntPerAxis.size(); i++) { final Integer count = memberCntPerAxis.get(i); final Dimension dim = dimForMemberPerAxis.get(i); for (int x = 0; x < count.intValue(); x += 1) { this.columnToDimensionMapping[dimColIndex + x] = dim; this.columnToAxisPosition[dimColIndex + x] = axesIndex; } dimColIndex = count.intValue() + dimColIndex; } final String[] names = columnNamesSet.toArray(new String[columnNamesSet.size()]); System.arraycopy(names, 0, this.columnNames, columnIndex, names.length); columnIndex += names.length; } if (axesSize.length > 0) { // now create the column names for the column-axis final Axis axis = axes[0]; final List<Position> positions = axis.getPositions(); for (int i = 0; i < positions.size(); i++) { final Position position = positions.get(i); final StringBuffer positionName = new StringBuffer(100); for (int j = 0; j < position.size(); j++) { if (j != 0) { positionName.append('/'); } final Member member = position.get(j); //positionName.append(MondrianUtil.getUniqueMemberName(member)); positionName.append(member.getName()); } columnNames[columnIndex] = positionName.toString(); columnIndex += 1; } } if (axesSize.length == 0) { columnNames[0] = "Measure"; } if (rowLimit > 0) { rowCount = Math.min(rowLimit, rowCount); } }
From source file:uk.gov.gchq.gaffer.spark.operation.dataframe.converter.schema.SchemaToStructTypeConverter.java
private void buildSchema() { LOGGER.info("Building Spark SQL schema for groups {}", StringUtils.join(groups, ',')); for (final String group : groups) { final SchemaElementDefinition elementDefn = schema.getElement(group); final List<StructField> structFieldList = new ArrayList<>(); if (elementDefn instanceof SchemaEntityDefinition) { entityOrEdgeByGroup.put(group, EntityOrEdge.ENTITY); final SchemaEntityDefinition entityDefinition = (SchemaEntityDefinition) elementDefn; final String vertexClass = schema.getType(entityDefinition.getVertex()).getClassString(); final DataType vertexType = getType(vertexClass); if (vertexType == null) { throw new RuntimeException("Vertex must be a recognised type: found " + vertexClass); }/*from w w w . ja va 2 s .c o m*/ LOGGER.info("Group {} is an entity group - {} is of type {}", group, VERTEX_COL_NAME, vertexType); structFieldList.add(new StructField(VERTEX_COL_NAME, vertexType, true, Metadata.empty())); } else { entityOrEdgeByGroup.put(group, EntityOrEdge.EDGE); final SchemaEdgeDefinition edgeDefinition = (SchemaEdgeDefinition) elementDefn; final String srcClass = schema.getType(edgeDefinition.getSource()).getClassString(); final String dstClass = schema.getType(edgeDefinition.getDestination()).getClassString(); final DataType srcType = getType(srcClass); final DataType dstType = getType(dstClass); if (srcType == null || dstType == null) { throw new RuntimeException("Both source and destination must be recognised types: source was " + srcClass + " destination was " + dstClass); } LOGGER.info("Group {} is an edge group - {} is of type {}, {} is of type {}", group, SRC_COL_NAME, srcType, DST_COL_NAME, dstType); structFieldList.add(new StructField(SRC_COL_NAME, srcType, true, Metadata.empty())); structFieldList.add(new StructField(DST_COL_NAME, dstType, true, Metadata.empty())); } final Set<String> properties = elementDefn.getProperties(); for (final String property : properties) { // Check if property is of a known type that can be handled by default final String propertyClass = elementDefn.getPropertyClass(property).getCanonicalName(); DataType propertyType = getType(propertyClass); if (propertyType != null) { propertyNeedsConversion.put(property, needsConversion(propertyClass)); structFieldList.add(new StructField(property, propertyType, true, Metadata.empty())); LOGGER.info("Property {} is of type {}", property, propertyType); } else { // Check if any of the provided converters can handle it if (converters != null) { for (final Converter converter : converters) { if (converter.canHandle(elementDefn.getPropertyClass(property))) { propertyNeedsConversion.put(property, true); propertyType = converter.convertedType(); converterByProperty.put(property, converter); structFieldList .add(new StructField(property, propertyType, true, Metadata.empty())); LOGGER.info("Property {} of type {} will be converted by {} to {}", property, propertyClass, converter.getClass().getName(), propertyType); break; } } if (propertyType == null) { LOGGER.warn( "Ignoring property {} as it is not a recognised type and none of the provided " + "converters can handle it", property); } } } } structTypeByGroup.put(group, new StructType(structFieldList.toArray(new StructField[structFieldList.size()]))); } // Create reverse map of field name to StructField final Map<String, Set<StructField>> fieldToStructs = new HashMap<>(); for (final String group : groups) { final StructType groupSchema = structTypeByGroup.get(group); for (final String field : groupSchema.fieldNames()) { if (fieldToStructs.get(field) == null) { fieldToStructs.put(field, new HashSet<StructField>()); } fieldToStructs.get(field).add(groupSchema.apply(field)); } } // Check consistency, i.e. if the same field appears in multiple groups then the types are consistent for (final Entry<String, Set<StructField>> entry : fieldToStructs.entrySet()) { final Set<StructField> schemas = entry.getValue(); if (schemas.size() > 1) { throw new IllegalArgumentException("Inconsistent fields: the field " + entry.getKey() + " has more than one definition: " + StringUtils.join(schemas, ',')); } } // Merge schemas for groups together - fields should appear in the order the groups were provided final LinkedHashSet<StructField> fields = new LinkedHashSet<>(); fields.add(new StructField(GROUP, DataTypes.StringType, false, Metadata.empty())); usedProperties.add(GROUP); for (final String group : groups) { final StructType groupSchema = structTypeByGroup.get(group); for (final String field : groupSchema.fieldNames()) { final StructField struct = groupSchema.apply(field); // Add struct to fields unless it has already been added if (!fields.contains(struct)) { fields.add(struct); usedProperties.add(field); } } } structType = new StructType(fields.toArray(new StructField[fields.size()])); LOGGER.info("Schema is {}", structType); LOGGER.debug("properties -> conversion: {}", StringUtils.join(propertyNeedsConversion.entrySet(), ',')); }