List of usage examples for java.lang Integer compare
public static int compare(int x, int y)
From source file:org.efaps.admin.common.SystemConfiguration.java
/** * Gets the value.// w ww . j a v a 2 s . com * * @param _key the key * @param _type the type * @return the value * @throws EFapsException on error */ private String getValue(final String _key, final ConfType _type) throws EFapsException { final List<Value> fv = this.values.stream().filter(p -> p.type.equals(_type)) .filter(p -> p.key.equals(_key)).filter(p -> priority(p) > 0).sorted(new Comparator<Value>() { @Override public int compare(final Value _o1, final Value _o2) { return Integer.compare(priority(_o2), priority(_o1)); } }).collect(Collectors.toList()); LOG.debug("Analyzed for key {}: {}", _key, fv); final String ret; if (fv.isEmpty()) { ret = null; } else { ret = fv.get(0).value; } return ret; }
From source file:org.opentox.jaqpot3.qsar.AbstractTrainer.java
private void preprocParametrize(IClientInput clientParameters) throws BadParameterException { if (scalingSupported()) { String minString = clientParameters.getFirstValue("scalingMin"); if (minString != null) { hasScaling = true;/*from w w w . ja v a 2 s . com*/ try { scalingMin = Double.parseDouble(minString); } catch (NumberFormatException nfe) { throw new BadParameterException( "Invalid value for the parameter 'scaling_min' (" + minString + ")", nfe); } } String maxString = clientParameters.getFirstValue("scalingMax"); if (maxString != null) { try { scalingMax = Double.parseDouble(maxString); } catch (NumberFormatException nfe) { throw new BadParameterException( "Invalid value for the parameter 'scaling_max' (" + maxString + ")", nfe); } } if (scalingMax <= scalingMin) { throw new BadParameterException( "Assertion Exception: max >= min. The values for the parameters min and max that " + "you spcified are inconsistent. min=" + scalingMin + " while max=" + scalingMax + ". It should be min < max."); } } if (normalizationSupported()) { String normalizeString = clientParameters.getFirstValue("normalize"); if (normalizeString != null) { try { int tempNorm = Integer.parseInt(normalizeString); hasNormalization = (Integer.compare(tempNorm, 1) == 0) ? true : false; } catch (NumberFormatException nfe) { throw new BadParameterException( "Invalid value for the parameter 'normalize' (" + normalizeString + ")", nfe); } } } if (hasScaling && hasNormalization) { throw new BadParameterException("cannot both scale and normalize a dataset"); } if (performMVH()) { String mvhString = clientParameters.getFirstValue("mvh"); if (mvhString != null) { try { int mvh = Integer.parseInt(mvhString); hasMVH = (Integer.compare(mvh, 1) == 0) ? true : false; } catch (NumberFormatException nfe) { throw new BadParameterException("Invalid value for the parameter 'mvh' (" + mvhString + ")", nfe); } } } }
From source file:org.asqatasun.webapp.presentation.factory.TestResultFactory.java
/** * This method sorts the processResult elements * * @param processResultList//from w w w.jav a 2s . co m */ private void sortCollection(List<? extends ProcessResult> processResultList) { Collections.sort(processResultList, new Comparator<ProcessResult>() { @Override public int compare(ProcessResult o1, ProcessResult o2) { return Integer.compare(o1.getTest().getRank(), o2.getTest().getRank()); } }); }
From source file:it.unimi.dsi.sux4j.mph.CHDMinimalPerfectHashFunction.java
/** * Creates a new CHD minimal perfect hash function for the given keys. * /*from w w w . j ava2 s .com*/ * @param keys the keys to hash, or {@code null}. * @param transform a transformation strategy for the keys. * @param lambda the average bucket size. * @param loadFactor the load factor. * @param signatureWidth a signature width, or 0 for no signature. * @param tempDir a temporary directory for the store files, or {@code null} for the standard temporary directory. * @param chunkedHashStore a chunked hash store containing the keys, or {@code null}; the store * can be unchecked, but in this case <code>keys</code> and <code>transform</code> must be non-{@code null}. */ protected CHDMinimalPerfectHashFunction(final Iterable<? extends T> keys, final TransformationStrategy<? super T> transform, final int lambda, double loadFactor, final int signatureWidth, final File tempDir, ChunkedHashStore<T> chunkedHashStore) throws IOException { this.transform = transform; final ProgressLogger pl = new ProgressLogger(LOGGER); pl.displayLocalSpeed = true; pl.displayFreeMemory = true; final RandomGenerator r = new XorShift1024StarRandomGenerator(); pl.itemsName = "keys"; final boolean givenChunkedHashStore = chunkedHashStore != null; if (!givenChunkedHashStore) { chunkedHashStore = new ChunkedHashStore<T>(transform, tempDir, pl); chunkedHashStore.reset(r.nextLong()); chunkedHashStore.addAll(keys.iterator()); } n = chunkedHashStore.size(); defRetValue = -1; // For the very few cases in which we can decide int log2NumChunks = Math.max(0, Fast.mostSignificantBit(n >> LOG2_CHUNK_SIZE)); chunkShift = chunkedHashStore.log2Chunks(log2NumChunks); final int numChunks = 1 << log2NumChunks; LOGGER.debug("Number of chunks: " + numChunks); LOGGER.debug("Average chunk size: " + (double) n / numChunks); offsetNumBucketsSeed = new long[(numChunks + 1) * 3 + 2]; int duplicates = 0; final LongArrayList holes = new LongArrayList(); @SuppressWarnings("resource") final OfflineIterable<MutableLong, MutableLong> coefficients = new OfflineIterable<MutableLong, MutableLong>( new Serializer<MutableLong, MutableLong>() { @Override public void write(final MutableLong a, final DataOutput dos) throws IOException { long x = a.longValue(); while ((x & ~0x7FL) != 0) { dos.writeByte((int) (x | 0x80)); x >>>= 7; } dos.writeByte((int) x); } @Override public void read(final DataInput dis, final MutableLong x) throws IOException { byte b = dis.readByte(); long t = b & 0x7F; for (int shift = 7; (b & 0x80) != 0; shift += 7) { b = dis.readByte(); t |= (b & 0x7FL) << shift; } x.setValue(t); } }, new MutableLong()); for (;;) { LOGGER.debug("Generating minimal perfect hash function..."); holes.clear(); coefficients.clear(); pl.expectedUpdates = numChunks; pl.itemsName = "chunks"; pl.start("Analysing chunks... "); try { int chunkNumber = 0; for (ChunkedHashStore.Chunk chunk : chunkedHashStore) { /* We treat a chunk as a single hash function. The number of bins is thus * the first prime larger than the chunk size divided by the load factor. */ final int p = Primes.nextPrime((int) Math.ceil(chunk.size() / loadFactor) + 1); final boolean used[] = new boolean[p]; final int numBuckets = (chunk.size() + lambda - 1) / lambda; numBuckets(chunkNumber + 1, numBuckets(chunkNumber) + numBuckets); final int[] cc0 = new int[numBuckets]; final int[] cc1 = new int[numBuckets]; @SuppressWarnings("unchecked") final ArrayList<long[]>[] bucket = new ArrayList[numBuckets]; for (int i = bucket.length; i-- != 0;) bucket[i] = new ArrayList<long[]>(); tryChunk: for (;;) { for (ArrayList<long[]> b : bucket) b.clear(); Arrays.fill(used, false); /* At each try, the allocation to keys to bucket is randomized differently. */ final long seed = r.nextLong(); // System.err.println( "Number of keys: " + chunk.size() + " Number of bins: " + p + " seed: " + seed ); /* We distribute the keys in this chunks in the buckets. */ for (Iterator<long[]> iterator = chunk.iterator(); iterator.hasNext();) { final long[] triple = iterator.next(); final long[] h = new long[3]; Hashes.spooky4(triple, seed, h); final ArrayList<long[]> b = bucket[(int) ((h[0] >>> 1) % numBuckets)]; h[1] = (int) ((h[1] >>> 1) % p); h[2] = (int) ((h[2] >>> 1) % (p - 1)) + 1; // All elements in a bucket must have either different h[ 1 ] or different h[ 2 ] for (long[] t : b) if (t[1] == h[1] && t[2] == h[2]) { LOGGER.info("Duplicate index" + Arrays.toString(t)); continue tryChunk; } b.add(h); } final int[] perm = Util.identity(bucket.length); IntArrays.quickSort(perm, new AbstractIntComparator() { private static final long serialVersionUID = 1L; @Override public int compare(int a0, int a1) { return Integer.compare(bucket[a1].size(), bucket[a0].size()); } }); for (int i = 0; i < perm.length;) { final LinkedList<Integer> bucketsToDo = new LinkedList<Integer>(); final int size = bucket[perm[i]].size(); //System.err.println( "Bucket size: " + size ); int j; // Gather indices of all buckets with the same size for (j = i; j < perm.length && bucket[perm[j]].size() == size; j++) bucketsToDo.add(Integer.valueOf(perm[j])); // Examine for each pair (c0,c1) the buckets still to do ext: for (int c1 = 0; c1 < p; c1++) for (int c0 = 0; c0 < p; c0++) { //System.err.println( "Testing " + c0 + ", " + c1 + " (to do: " + bucketsToDo.size() + ")" ); for (Iterator<Integer> iterator = bucketsToDo.iterator(); iterator.hasNext();) { final int k = iterator.next().intValue(); final ArrayList<long[]> b = bucket[k]; boolean completed = true; final IntArrayList done = new IntArrayList(); // Try to see whether the necessary entries are not used for (long[] h : b) { //assert k == h[ 0 ]; int pos = (int) ((h[1] + c0 * h[2] + c1) % p); //System.err.println( "Testing pos " + pos + " for " + Arrays.toString( e )); if (used[pos]) { completed = false; break; } else { used[pos] = true; done.add(pos); } } if (completed) { // All positions were free cc0[k] = c0; cc1[k] = c1; iterator.remove(); } else for (int d : done) used[d] = false; } if (bucketsToDo.isEmpty()) break ext; } if (!bucketsToDo.isEmpty()) continue tryChunk; seed(chunkNumber, seed); i = j; } break; } // System.err.println("DONE!"); if (ASSERTS) { final IntOpenHashSet pos = new IntOpenHashSet(); final long h[] = new long[3]; for (Iterator<long[]> iterator = chunk.iterator(); iterator.hasNext();) { final long[] triple = iterator.next(); Hashes.spooky4(triple, seed(chunkNumber), h); h[0] = (h[0] >>> 1) % numBuckets; h[1] = (int) ((h[1] >>> 1) % p); h[2] = (int) ((h[2] >>> 1) % (p - 1)) + 1; //System.err.println( Arrays.toString( e ) ); assert pos.add((int) ((h[1] + cc0[(int) (h[0])] * h[2] + cc1[(int) (h[0])]) % p)); } } final MutableLong l = new MutableLong(); for (int i = 0; i < numBuckets; i++) { l.setValue(cc0[i] + cc1[i] * p); coefficients.add(l); } for (int i = 0; i < p; i++) if (!used[i]) holes.add(offset(chunkNumber) + i); offset(chunkNumber + 1, offset(chunkNumber) + p); chunkNumber++; pl.update(); } pl.done(); break; } catch (ChunkedHashStore.DuplicateException e) { if (keys == null) throw new IllegalStateException( "You provided no keys, but the chunked hash store was not checked"); if (duplicates++ > 3) throw new IllegalArgumentException("The input list contains duplicates"); LOGGER.warn("Found duplicate. Recomputing triples..."); chunkedHashStore.reset(r.nextLong()); chunkedHashStore.addAll(keys.iterator()); } } rank = new SparseRank(offset(offsetNumBucketsSeed.length / 3 - 1), holes.size(), holes.iterator()); globalSeed = chunkedHashStore.seed(); this.coefficients = new EliasFanoLongBigList(new AbstractLongIterator() { final OfflineIterator<MutableLong, MutableLong> iterator = coefficients.iterator(); @Override public boolean hasNext() { return iterator.hasNext(); } public long nextLong() { return iterator.next().longValue(); } }, 0, true); coefficients.close(); LOGGER.info("Completed."); LOGGER.info("Actual bit cost per key: " + (double) numBits() / n); if (signatureWidth != 0) { signatureMask = -1L >>> Long.SIZE - signatureWidth; (signatures = LongArrayBitVector.getInstance().asLongBigList(signatureWidth)).size(n); pl.expectedUpdates = n; pl.itemsName = "signatures"; pl.start("Signing..."); for (ChunkedHashStore.Chunk chunk : chunkedHashStore) { Iterator<long[]> iterator = chunk.iterator(); for (int i = chunk.size(); i-- != 0;) { final long[] triple = iterator.next(); long t = getLongByTripleNoCheck(triple); signatures.set(t, signatureMask & triple[0]); pl.lightUpdate(); } } pl.done(); } else { signatureMask = 0; signatures = null; } if (!givenChunkedHashStore) chunkedHashStore.close(); }
From source file:org.codice.ddf.admin.application.service.impl.ApplicationServiceImpl.java
@Override public List<Feature> getInstallationProfiles() { LOGGER.debug("Looking for installation profile features"); List<Feature> profiles = new ArrayList<>(); try {/*www.ja v a2 s .c o m*/ profiles = Arrays.asList(featuresService.listFeatures()).stream() .filter(f -> f.getName().contains(INSTALLATION_PROFILE_PREFIX)) .sorted((f1, f2) -> Integer.compare(f1.getStartLevel(), f2.getStartLevel())) .collect(Collectors.toList()); } catch (Exception e) { LOGGER.error("Encountered an error while trying to obtain the installation profile features.", e); } return profiles; }
From source file:com.github.javalbert.reflection.ClassAccessFactory.java
private void initializeMethods() { List<Method> methods = Arrays.stream(clazz.getDeclaredMethods()).sorted((a, b) -> { int compareMethodName = a.getName().compareTo(b.getName()); if (compareMethodName != 0) { return compareMethodName; }// ww w . ja v a 2s. c o m Class<?>[] aparams = a.getParameterTypes(); Class<?>[] bparams = b.getParameterTypes(); int len = Math.min(aparams.length, bparams.length); for (int i = 0; i < len; i++) { int compareParamType = aparams[i].getName().compareTo(bparams[i].getName()); if (compareParamType != 0) { return compareParamType; } } return Integer.compare(aparams.length, bparams.length); }).collect(toList()); for (int i = 0; i < methods.size(); i++) { Method method = methods.get(i); setAccessible(method); addMethodInfo(new MethodInfo(method, i)); } }
From source file:edu.cmu.tetrad.search.TimeSeriesUtils.java
/** * Creates new time series dataset from the given one with index variable (e.g., time) *///ww w. j a va 2 s. c om public static DataSet createLagDataWithIndex(DataSet data, int numLags) { List<Node> variables = data.getVariables(); int dataSize = variables.size(); int laggedRows = data.getNumRows() - numLags; IKnowledge knowledge = new Knowledge2(); Node[][] laggedNodes = new Node[numLags + 1][dataSize]; List<Node> newVariables = new ArrayList<>((numLags + 1) * dataSize + 2); // added 1 to this for (int lag = 0; lag <= numLags; lag++) { for (int col = 0; col < dataSize; col++) { Node node = variables.get(col); String varName = node.getName(); Node laggedNode; String name = varName; if (lag != 0) { name = name + ":" + lag; } if (node instanceof ContinuousVariable) { laggedNode = new ContinuousVariable(name); } else if (node instanceof DiscreteVariable) { DiscreteVariable var = (DiscreteVariable) node; laggedNode = new DiscreteVariable(var); laggedNode.setName(name); } else { throw new IllegalStateException("Node must be either continuous or discrete"); } newVariables.add(laggedNode); laggedNode.setCenter(80 * col + 50, 80 * (numLags - lag) + 50); laggedNodes[lag][col] = laggedNode; // knowledge.addToTier(numLags - lag + 1, laggedNode.getName()); } } String name = "time"; Node indexNode = new ContinuousVariable(name); indexNode.setName(name); newVariables.add(indexNode); indexNode.setCenter(50, 80 * (numLags - 1) + 50); knowledge.addToTier(0, indexNode.getName()); // System.out.println("Variable list before the sort = " + variables); Collections.sort(newVariables, new Comparator<Node>() { @Override public int compare(Node o1, Node o2) { String name1 = getNameNoLag(o1); String name2 = getNameNoLag(o2); // System.out.println("name 1 = " + name1); // System.out.println("name 2 = " + name2); String prefix1 = getPrefix(name1); String prefix2 = getPrefix(name2); // System.out.println("prefix 1 = " + prefix1); // System.out.println("prefix 2 = " + prefix2); int index1 = getIndex(name1); int index2 = getIndex(name2); // System.out.println("index 1 = " + index1); // System.out.println("index 2 = " + index2); if (getLag(o1.getName()) == getLag(o2.getName())) { if (prefix1.compareTo(prefix2) == 0) { return Integer.compare(index1, index2); } else { return prefix1.compareTo(prefix2); } } else { return getLag(o1.getName()) - getLag(o2.getName()); } } }); // System.out.println("Variable list after the sort = " + variables); for (Node node : newVariables) { String varName = node.getName(); if (varName.equals("time")) continue; String tmp; int lag; if (varName.indexOf(':') == -1) { lag = 0; // laglist.add(lag); } else { tmp = varName.substring(varName.indexOf(':') + 1, varName.length()); lag = Integer.parseInt(tmp); // laglist.add(lag); } knowledge.addToTier(numLags - lag + 1, node.getName()); } DataSet laggedData = new ColtDataSet(laggedRows, newVariables); for (int lag = 0; lag <= numLags; lag++) { for (int col = 0; col < dataSize; col++) { for (int row = 0; row < laggedRows; row++) { Node laggedNode = laggedNodes[lag][col]; if (laggedNode instanceof ContinuousVariable) { double value = data.getDouble(row + numLags - lag, col); laggedData.setDouble(row, col + lag * dataSize, value); } else { int value = data.getInt(row + numLags - lag, col); laggedData.setInt(row, col + lag * dataSize, value); } } } } // fill indexNode with for loop over rows for (int row = 0; row < laggedRows; row++) { laggedData.setDouble(row, dataSize + numLags * dataSize, row + 1); } knowledge.setDefaultToKnowledgeLayout(true); laggedData.setKnowledge(knowledge); System.out.println("Knowledge set to : " + knowledge); return laggedData; }
From source file:com.milaboratory.mitcr.cli.Main.java
public static void printHelp() { HelpFormatter formatter = new HelpFormatter(); formatter.setOptionComparator(new Comparator<Option>() { @Override//from w ww. j a v a2 s .co m public int compare(Option o1, Option o2) { return Integer.compare(orderingMap.get(o1.getOpt()), orderingMap.get(o2.getOpt())); } }); final String executable = System.getProperty("executable", "java -jar mitcr.jar"); err.println("usage: " + executable + " -pset <preset name> [options] input_file output_file.cls"); err.println(" " + executable + " -pset <preset name> [options] input_file output_file.txt"); err.println(" " + executable + " -pset <preset name> [options] -export newPresetName"); err.println(); formatter.printOptions(new PrintWriter(err, true), 85, options, 2, 3); err.println(); }
From source file:org.apache.flink.table.codegen.SortCodeGeneratorTest.java
private void testInner() throws Exception { List<MemorySegment> segments = new ArrayList<>(); for (int i = 0; i < 100; i++) { segments.add(MemorySegmentFactory.wrap(new byte[32768])); }//from ww w . java 2 s . co m InternalType[] fieldTypes = getFieldTypes(); InternalType[] keyTypes = getKeyTypes(); Tuple2<NormalizedKeyComputer, RecordComparator> tuple2 = getSortBaseWithNulls( this.getClass().getSimpleName(), keyTypes, keys, orders, nullsIsLast); BinaryRowSerializer serializer = new BinaryRowSerializer(fieldTypes.length); BinaryInMemorySortBuffer sortBuffer = BinaryInMemorySortBuffer.createBuffer(tuple2.f0, (AbstractRowSerializer) serializer, serializer, tuple2.f1, segments); BinaryRow[] dataArray = getTestData(); List<BinaryRow> data = Arrays.asList(dataArray.clone()); List<BinaryRow> binaryRows = Arrays.asList(dataArray.clone()); Collections.shuffle(binaryRows); for (BinaryRow row : binaryRows) { if (!sortBuffer.write(row)) { throw new RuntimeException(); } } new QuickSort().sort(sortBuffer); MutableObjectIterator<BinaryRow> iter = sortBuffer.getIterator(); List<BinaryRow> result = new ArrayList<>(); BinaryRow row = serializer.createInstance(); while ((row = iter.next(row)) != null) { result.add(row.copy()); } data.sort((o1, o2) -> { for (int i = 0; i < keys.length; i++) { InternalType t = types[fields[keys[i]]]; boolean order = orders[i]; Object first = null; Object second = null; if (!o1.isNullAt(keys[i])) { first = TypeGetterSetters.get(o1, keys[i], keyTypes[i]); } if (!o2.isNullAt(keys[i])) { second = TypeGetterSetters.get(o2, keys[i], keyTypes[i]); } if (first == null && second == null) { } else if (first == null) { return order ? -1 : 1; } else if (second == null) { return order ? 1 : -1; } else if (first instanceof Comparable) { int ret = ((Comparable) first).compareTo(second); if (ret != 0) { return order ? ret : -ret; } } else if (t instanceof ArrayType) { BinaryArray leftArray = (BinaryArray) first; BinaryArray rightArray = (BinaryArray) second; int minLength = Math.min(leftArray.numElements(), rightArray.numElements()); for (int j = 0; j < minLength; j++) { boolean isNullLeft = leftArray.isNullAt(j); boolean isNullRight = rightArray.isNullAt(j); if (isNullLeft && isNullRight) { // Do nothing. } else if (isNullLeft) { return order ? -1 : 1; } else if (isNullRight) { return order ? 1 : -1; } else { int comp = Byte.compare(leftArray.getByte(j), rightArray.getByte(j)); if (comp != 0) { return order ? comp : -comp; } } } if (leftArray.numElements() < rightArray.numElements()) { return order ? -1 : 1; } else if (leftArray.numElements() > rightArray.numElements()) { return order ? 1 : -1; } } else if (t.equals(InternalTypes.BINARY)) { int comp = org.apache.flink.table.runtime.sort.SortUtil.compareBinary((byte[]) first, (byte[]) second); if (comp != 0) { return order ? comp : -comp; } } else if (t instanceof RowType) { RowType rowType = (RowType) t; int comp; if (rowType.getTypeAt(0).equals(InternalTypes.INT)) { comp = INT_ROW_COMP.compare(INT_ROW_CONV.toExternal(first), INT_ROW_CONV.toExternal(second)); } else { comp = NEST_ROW_COMP.compare(NEST_ROW_CONV.toExternal(first), NEST_ROW_CONV.toExternal(second)); } if (comp != 0) { return order ? comp : -comp; } } else if (t instanceof GenericType) { Integer i1 = BinaryGeneric.getJavaObjectFromBinaryGeneric((BinaryGeneric) first, IntSerializer.INSTANCE); Integer i2 = BinaryGeneric.getJavaObjectFromBinaryGeneric((BinaryGeneric) second, IntSerializer.INSTANCE); int comp = Integer.compare(i1, i2); if (comp != 0) { return order ? comp : -comp; } } else { throw new RuntimeException(); } } return 0; }); StringBuilder builder = new StringBuilder(); for (int i = 0; i < data.size(); i++) { builder.append("\n").append("expect: ").append(data.get(i).toOriginString(fieldTypes)) .append("; actual: ").append(result.get(i).toOriginString(fieldTypes)); } builder.append("\n").append("types: ").append(Arrays.asList(fieldTypes)); builder.append("\n").append("keys: ").append(Arrays.toString(keys)); String msg = builder.toString(); for (int i = 0; i < data.size(); i++) { for (int j = 0; j < keys.length; j++) { boolean isNull1 = data.get(i).isNullAt(keys[j]); boolean isNull2 = result.get(i).isNullAt(keys[j]); Assert.assertEquals(msg, isNull1, isNull2); if (!isNull1 || !isNull2) { Object o1 = TypeGetterSetters.get(data.get(i), keys[j], keyTypes[j]); Object o2 = TypeGetterSetters.get(result.get(i), keys[j], keyTypes[j]); if (keyTypes[j].equals(InternalTypes.BINARY)) { Assert.assertArrayEquals(msg, (byte[]) o1, (byte[]) o2); } else { Assert.assertEquals(msg, o1, o2); } } } } }
From source file:acmi.l2.clientmod.xdat.Controller.java
private static List<PropertySheetItem> loadProperties(Object obj) { Class<?> objClass = obj.getClass(); List<PropertySheetItem> list = new ArrayList<>(); while (objClass != Object.class) { try {//from ww w .j a v a 2 s.c om List<String> names = Arrays.stream(objClass.getDeclaredFields()) .map(field -> field.getName().replace("Prop", "")).collect(Collectors.toList()); BeanInfo beanInfo = Introspector.getBeanInfo(objClass, objClass.getSuperclass()); PropertyDescriptor[] propertyDescriptors = beanInfo.getPropertyDescriptors(); Arrays.sort(propertyDescriptors, (pd1, pd2) -> Integer.compare(names.indexOf(pd1.getName()), names.indexOf(pd2.getName()))); for (PropertyDescriptor descriptor : propertyDescriptors) { if ("metaClass".equals(descriptor.getName())) continue; if (Collection.class.isAssignableFrom(descriptor.getPropertyType())) continue; AnnotatedElement getter = descriptor.getReadMethod(); if (getter.isAnnotationPresent(Deprecated.class) || getter.isAnnotationPresent(Hide.class)) continue; String description = ""; if (getter.isAnnotationPresent(Description.class)) description = getter.getAnnotation(Description.class).value(); Class<? extends PropertyEditor<?>> propertyEditorClass = null; if (descriptor.getPropertyType() == Boolean.class || descriptor.getPropertyType() == Boolean.TYPE) { propertyEditorClass = BooleanPropertyEditor.class; } else if (getter.isAnnotationPresent(Tex.class)) { propertyEditorClass = TexturePropertyEditor.class; } else if (getter.isAnnotationPresent(Sysstr.class)) { propertyEditorClass = SysstringPropertyEditor.class; } BeanProperty property = new BeanProperty(descriptor, objClass.getSimpleName(), description, propertyEditorClass); list.add(property); } } catch (IntrospectionException e) { e.printStackTrace(); } objClass = objClass.getSuperclass(); } return list; }