Example usage for org.apache.commons.lang3.tuple Pair getKey

List of usage examples for org.apache.commons.lang3.tuple Pair getKey

Introduction

In this page you can find the example usage for org.apache.commons.lang3.tuple Pair getKey.

Prototype

@Override
public final L getKey() 

Source Link

Document

Gets the key from this pair.

This method implements the Map.Entry interface returning the left element as the key.

Usage

From source file:mase.mason.world.DistanceSensorArcs.java

/**
 * Very efficient implementation using an ordered TreeMap Should ensure
 * scalability when large numbers of objects are present, as there is no
 * need to check angles with objects that are farther than the closest
 * object in the given cone. Potential limitation (unlikely): if there are
 * two objects at exactly the same distance but at different angles, only
 * one of them will be considered, as the distance is used as key in the
 * TreeMap//from   ww w.  j  av a  2 s. c  o  m
 */
@Override
public double[] readValues() {
    lastDistances = new double[valueCount()];
    Arrays.fill(lastDistances, Double.POSITIVE_INFINITY);
    Arrays.fill(closestObjects, null);
    if (range < 0.001) {
        return lastDistances;
    }
    double rangeNoiseAbs = Double.isInfinite(range) ? rangeNoise * fieldDiagonal : range * rangeNoise;

    WorldObject[] candidates = getCandidates();

    // TODO: replace treemap with collection-sort
    Pair<Double, WorldObject>[] distances = new Pair[candidates.length];
    int index = 0;
    for (WorldObject o : candidates) {
        if (!centerToCenter && o.isInside(ag.getLocation())) {
            Arrays.fill(lastDistances, 0);
            Arrays.fill(closestObjects, o);
            return lastDistances;
        }

        double dist = centerToCenter ? ag.getLocation().distance(o.getLocation())
                : Math.max(0, ag.distanceTo(o));
        if (rangeNoiseAbs > 0) {
            dist += rangeNoiseAbs
                    * (noiseType == UNIFORM ? state.random.nextDouble() * 2 - 1 : state.random.nextGaussian());
            dist = Math.max(dist, 0);
        }
        if (dist <= range) {
            distances[index++] = Pair.of(dist, o);
        }
    }
    if (index < distances.length) {
        distances = Arrays.copyOf(distances, index);
    }

    Arrays.sort(distances, new Comparator<Pair<Double, WorldObject>>() {
        @Override
        public int compare(Pair<Double, WorldObject> a, Pair<Double, WorldObject> b) {
            return Double.compare(a.getLeft(), b.getLeft());
        }
    });

    int filled = 0;
    for (Pair<Double, WorldObject> e : distances) {
        if (filled == arcStart.length) {
            break;
        }
        double angle = ag.angleTo(e.getRight().getLocation());
        if (orientationNoise > 0) {
            angle += orientationNoise
                    * (noiseType == UNIFORM ? state.random.nextDouble() * 2 - 1 : state.random.nextGaussian());
            angle = EmboddiedAgent.normalizeAngle(angle);
        }
        for (int a = 0; a < arcStart.length; a++) {
            if (Double.isInfinite(lastDistances[a]) && ((angle >= arcStart[a] && angle <= arcEnd[a])
                    || (arcStart[a] > arcEnd[a] && (angle >= arcStart[a] || angle <= arcEnd[a])))) {
                filled++;
                lastDistances[a] = e.getKey();
                closestObjects[a] = e.getValue();
            }
        }
    }
    return lastDistances;
}

From source file:io.anserini.rerank.lib.AxiomReranker.java

/**
 * Calculate the scores (weights) of each term that occured in the reranking pool.
 * The Process://  ww  w .j  a  va  2  s. c  o m
 * 1. For each query term, calculate its score for each term in the reranking pool. the score
 * is calcuated as
 * <pre>
 * P(both occurs)*log{P(both occurs)/P(t1 occurs)/P(t2 occurs)}
 * + P(both not occurs)*log{P(both not occurs)/P(t1 not occurs)/P(t2 not occurs)}
 * + P(t1 occurs t2 not occurs)*log{P(t1 occurs t2 not occurs)/P(t1 occurs)/P(t2 not occurs)}
 * + P(t1 not occurs t2 occurs)*log{P(t1 not occurs t2 occurs)/P(t1 not occurs)/P(t2 occurs)}
 * </pre>
 * 2. For each query term the scores of every other term in the reranking pool are stored in a
 * PriorityQueue, only the top {@code K} are kept.
 * 3. Add the scores of the same term together and pick the top {@code M} ones.
 *
 * @param termInvertedList A Map of <term -> Set<docId>> where the Set of docIds is where the term occurs
 * @param context An instance of RerankerContext
 * @return Map<String, Double> Top terms and their weight scores in a HashMap
 */
private Map<String, Double> computeTermScore(Map<String, Set<Integer>> termInvertedList,
        RerankerContext<T> context) throws IOException {
    class ScoreComparator implements Comparator<Pair<String, Double>> {
        public int compare(Pair<String, Double> a, Pair<String, Double> b) {
            int cmp = Double.compare(b.getRight(), a.getRight());
            if (cmp == 0) {
                return a.getLeft().compareToIgnoreCase(b.getLeft());
            } else {
                return cmp;
            }
        }
    }

    // get collection statistics so that we can get idf later on.
    IndexReader reader;
    if (this.externalIndexPath != null) {
        Path indexPath = Paths.get(this.externalIndexPath);
        if (!Files.exists(indexPath) || !Files.isDirectory(indexPath) || !Files.isReadable(indexPath)) {
            throw new IllegalArgumentException(
                    this.externalIndexPath + " does not exist or is not a directory.");
        }
        reader = DirectoryReader.open(FSDirectory.open(indexPath));
    } else {
        IndexSearcher searcher = context.getIndexSearcher();
        reader = searcher.getIndexReader();
    }
    final long docCount = reader.numDocs() == -1 ? reader.maxDoc() : reader.numDocs();

    //calculate the Mutual Information between term with each query term
    List<String> queryTerms = context.getQueryTokens();
    Map<String, Integer> queryTermsCounts = new HashMap<>();
    for (String qt : queryTerms) {
        queryTermsCounts.put(qt, queryTermsCounts.getOrDefault(qt, 0) + 1);
    }

    Set<Integer> allDocIds = new HashSet<>();
    for (Set<Integer> s : termInvertedList.values()) {
        allDocIds.addAll(s);
    }
    int docIdsCount = allDocIds.size();

    // Each priority queue corresponds to a query term: The p-queue itself stores all terms
    // in the reranking pool and their reranking scores to the query term.
    List<PriorityQueue<Pair<String, Double>>> allTermScoresPQ = new ArrayList<>();
    for (Map.Entry<String, Integer> q : queryTermsCounts.entrySet()) {
        String queryTerm = q.getKey();
        long df = reader.docFreq(new Term(LuceneDocumentGenerator.FIELD_BODY, queryTerm));
        if (df == 0L) {
            continue;
        }
        float idf = (float) Math.log((1 + docCount) / df);
        int qtf = q.getValue();
        if (termInvertedList.containsKey(queryTerm)) {
            PriorityQueue<Pair<String, Double>> termScorePQ = new PriorityQueue<>(new ScoreComparator());
            double selfMI = computeMutualInformation(termInvertedList.get(queryTerm),
                    termInvertedList.get(queryTerm), docIdsCount);
            for (Map.Entry<String, Set<Integer>> termEntry : termInvertedList.entrySet()) {
                double score;
                if (termEntry.getKey().equals(queryTerm)) { // The mutual information to itself will always be 1
                    score = idf * qtf;
                } else {
                    double crossMI = computeMutualInformation(termInvertedList.get(queryTerm),
                            termEntry.getValue(), docIdsCount);
                    score = idf * beta * qtf * crossMI / selfMI;
                }
                termScorePQ.add(Pair.of(termEntry.getKey(), score));
            }
            allTermScoresPQ.add(termScorePQ);
        }
    }

    Map<String, Double> aggTermScores = new HashMap<>();
    for (PriorityQueue<Pair<String, Double>> termScores : allTermScoresPQ) {
        for (int i = 0; i < Math.min(termScores.size(), this.K); i++) {
            Pair<String, Double> termScore = termScores.poll();
            String term = termScore.getLeft();
            Double score = termScore.getRight();
            if (score - 0.0 > 1e-8) {
                aggTermScores.put(term, aggTermScores.getOrDefault(term, 0.0) + score);
            }
        }
    }
    PriorityQueue<Pair<String, Double>> termScoresPQ = new PriorityQueue<>(new ScoreComparator());
    for (Map.Entry<String, Double> termScore : aggTermScores.entrySet()) {
        termScoresPQ.add(Pair.of(termScore.getKey(), termScore.getValue() / queryTerms.size()));
    }
    Map<String, Double> resultTermScores = new HashMap<>();
    for (int i = 0; i < Math.min(termScoresPQ.size(), this.M); i++) {
        Pair<String, Double> termScore = termScoresPQ.poll();
        String term = termScore.getKey();
        double score = termScore.getValue();
        resultTermScores.put(term, score);
    }

    return resultTermScores;
}

From source file:cc.kave.commons.pointsto.analysis.unification.UnificationAnalysisVisitorContext.java

private FunctionLocation createFunctionLocation(ILambdaExpression lambdaExpr, SetRepresentative functionRep) {
    List<IParameterName> lambdaParameters = lambdaExpr.getName().getParameters();
    List<ReferenceLocation> parameterLocations = new ArrayList<>(lambdaParameters.size());

    for (IParameterName formalParameter : lambdaParameters) {
        DistinctReference distRef = new DistinctLambdaParameterReference(formalParameter, lambdaExpr);
        ReferenceLocation formalParameterLocation = getOrCreateLocation(distRef);

        parameterLocations.add(formalParameterLocation);
    }// w  ww.  j  a va 2 s .  c  o m

    Pair<ILambdaExpression, ReferenceLocation> currentLambdaEntry = lambdaStack.getFirst();
    Asserts.assertTrue(lambdaExpr == currentLambdaEntry.getKey());

    return new FunctionLocation(parameterLocations, currentLambdaEntry.getValue(), functionRep);
}

From source file:com.devicehive.service.NetworkServiceTest.java

@Test
@DirtiesContext(methodMode = DirtiesContext.MethodMode.BEFORE_METHOD)
public void should_filter_networks_by_name() throws Exception {
    List<Pair<Long, String>> names = new ArrayList<>();
    for (int i = 0; i < 10; i++) {
        NetworkVO network = new NetworkVO();
        network.setName(namePrefix + randomUUID());
        network.setDescription("network description_" + randomUUID());
        NetworkVO created = networkService.create(network);
        assertThat(created.getId(), notNullValue());
        names.add(Pair.of(created.getId(), created.getName()));
    }/*from   www  .  ja va 2 s  .c  om*/
    int index = new Random().nextInt(10);
    Pair<Long, String> randomNetwork = names.get(index);
    handleListNetworkRequest();
    networkService.list(randomNetwork.getRight(), null, null, true, 10, 0, null).thenAccept(networks -> {
        assertThat(networks, hasSize(1));
        assertThat(networks.get(0).getId(), equalTo(randomNetwork.getKey()));
        assertThat(networks.get(0).getName(), equalTo(randomNetwork.getRight()));
    }).get(5, TimeUnit.SECONDS);

    verify(requestHandler, times(1)).handle(argument.capture());
}

From source file:com.twinsoft.convertigo.engine.ContextManager.java

private void managePoolContexts() {
    if (Engine.isStudioMode()) {
        return;/*from  w  w w  .  j  a  v  a 2s . c om*/
    }

    if (!Engine.isStarted) {
        Engine.logContextManager.debug("Engine is stopped => do not manage pools");
        return;
    }

    Engine.logContextManager.debug("Executing vulture thread for context pooling");

    try {
        long timeout = manage_poll_timeout;
        long now = System.currentTimeMillis();
        if (timeout != -1) {
            timeout += now;
        }

        pooledContextsToCreateSet.clear();
        Map<String, Integer> counters = new HashMap<String, Integer>();

        // Create the pooled contexts and initialize the pooled contexts
        // with the auto-start transaction
        for (String projectName : Engine.theApp.databaseObjectsManager.getAllProjectNamesList()) {
            if (!isRunning)
                return;

            Engine.logContextManager.trace("Analyzing project " + projectName);
            Project project = null;
            try {
                project = Engine.theApp.databaseObjectsManager.getProjectByName(projectName);
            } catch (Exception e) {
                Engine.logContextManager.warn(
                        "Unable to load project '" + projectName + "'; avorting pool research for this project",
                        e);
                continue;
            }

            Collection<Connector> vConnectors = project.getConnectorsList();
            Engine.logContextManager.trace("Connectors: " + vConnectors);

            for (Connector connector : vConnectors) {
                if (!isRunning)
                    return;
                Engine.logContextManager.trace("Connector: " + connector);

                Collection<Pool> vPools = connector.getPoolsList();
                Engine.logContextManager.trace("Pools: " + vPools);
                String poolName;
                for (Pool pool : vPools) {
                    if (!isRunning)
                        return;
                    poolName = pool.getName();
                    Engine.logContextManager.trace("Pool: " + poolName);
                    int pooledContexts = pool.getNumberOfContexts();
                    Engine.logContextManager.debug("Pool size: " + pooledContexts);
                    String poolNameWithPath = pool.getNameWithPath();

                    pooledContextsInUse = 0;
                    pooledContextsLocked = 0;
                    pooledContextsZombie = 0;
                    pooledContextsToCreate = 0;
                    counters.put(poolNameWithPath, 0);

                    if (pooledContexts > 0) {
                        for (int i = 1; i <= pool.getNumberOfContexts(); i++) {
                            if (!isRunning)
                                return;
                            Project localProject = Engine.theApp.databaseObjectsManager
                                    .getProjectByName(projectName);
                            Connector localConnector = localProject.getConnectorByName(connector.getName());
                            Pool localPool = localConnector.getPoolByName(pool.getName());
                            String servCode = localPool.getServiceCode();
                            if (servCode != null && !servCode.equals("")) {
                                if (localConnector instanceof JavelinConnector) {
                                    ((JavelinConnector) localConnector).setServiceCode(servCode);
                                    Engine.logContextManager
                                            .trace("Connector service code overridden to : " + servCode);
                                }
                                // TODO add code for each specific connector to use pools serviceCode property
                            }
                            managePoolContext(localProject, localConnector, localPool, i);
                        }

                        int pooledContextsInUsePercentage = 100 * pooledContextsInUse / pooledContexts;
                        int pooledContextsLockedPercentage = 100 * pooledContextsLocked / pooledContexts;

                        String poolStatistics = "Pool '" + poolNameWithPath + "' usage: pool size: "
                                + pooledContexts + "; in use contexts: " + pooledContextsInUse + " ("
                                + pooledContextsInUsePercentage + "%); zombie contexts: "
                                + pooledContextsZombie;
                        ;

                        if (pooledContextsZombie > 0) {
                            Engine.logContextManager
                                    .warn("Pool '" + poolNameWithPath + "' had zombie contexts!");
                            Engine.logContextManager.warn(poolStatistics);
                        }

                        if (pooledContextsInUsePercentage > 80) {
                            Engine.logContextManager.warn("Pool '" + poolNameWithPath + "' is overloaded!");
                            Engine.logContextManager.warn(poolStatistics);
                        }

                        Engine.theApp.usageMonitor.setUsageCounter("[Pool] '" + poolNameWithPath + "' size",
                                pooledContexts);
                        Engine.theApp.usageMonitor.setUsageCounter(
                                "[Pool] '" + poolNameWithPath + "' in use contexts",
                                pooledContextsInUse + " (" + pooledContextsInUsePercentage + "%)");
                        Engine.theApp.usageMonitor.setUsageCounter(
                                "[Pool] '" + poolNameWithPath + "' locked contexts",
                                pooledContextsLocked + " (" + pooledContextsLockedPercentage + "%)");
                        Engine.theApp.usageMonitor.setUsageCounter(
                                "[Pool] '" + poolNameWithPath + "' zombie contexts", pooledContextsZombie);
                        Engine.theApp.usageMonitor.setUsageCounter(
                                "[Pool] '" + poolNameWithPath + "' to be created contexts",
                                pooledContextsToCreate);
                    }
                }
            }
        }

        for (Pair<Pool, Integer> pooledContextToCreate : pooledContextsToCreateSet) {
            if (!isRunning)
                return;
            String key = pooledContextToCreate.getKey().getNameWithPath();
            createPoolContext(pooledContextToCreate.getKey(), pooledContextToCreate.getValue());
            counters.put(key, counters.get(key) + 1);
            if (timeout != -1 && (now = System.currentTimeMillis()) > timeout)
                break;
        }
        for (Entry<String, Integer> entry : counters.entrySet()) {
            if (!isRunning)
                return;
            Engine.theApp.usageMonitor.setUsageCounter("[Pool] '" + entry.getKey() + "' (re)created contexts",
                    entry.getValue());
        }
    } catch (EngineException e) {
        Engine.logContextManager.error(
                "An unexpected error has occured in the ContextManager vulture while managing the pool contexts.",
                e);
    }

    Engine.logContextManager.debug("Pools creation successfully finished");
}

From source file:com.epam.catgenome.dao.index.FeatureIndexDao.java

/**
 * Deletes features from specified feature files from project's index
 *
 * @param projectId a project to delete index entries
 * @param fileIds a list of Pair of feature types to file Ids, which entries to delete. To delete gene file
 *                entries, pass FeatureType.GENE
 *//*w w w  .ja v  a 2s .  com*/
public void deleteFromIndexByFileId(final long projectId, List<Pair<FeatureType, Long>> fileIds) {
    if (fileIds == null || fileIds.isEmpty() || !fileManager.indexForProjectExists(projectId)) {
        return;
    }

    try (StandardAnalyzer analyzer = new StandardAnalyzer();
            Directory index = fileManager.getIndexForProject(projectId);
            IndexWriter writer = new IndexWriter(index,
                    new IndexWriterConfig(analyzer).setOpenMode(IndexWriterConfig.OpenMode.CREATE_OR_APPEND))) {
        if (fileManager.indexForProjectExists(projectId)) {
            for (Pair<FeatureType, Long> id : fileIds) {
                deleteDocumentByTypeAndId(id.getKey(), id.getValue(), writer);
            }
        }
    } catch (IOException e) {
        LOGGER.error("Exception while deleting from index:", e);
    }
}

From source file:com.bluepowermod.part.tube.TubeLogic.java

public boolean retrieveStack(TileEntity target, ForgeDirection dirToRetrieveInto, ItemStack filter,
        TubeColor color) {/*  www.  jav  a  2 s  . co m*/

    if (tube.getWorld() == null)
        return false;
    TubeStack stack = new TubeStack(filter, null, color);
    stack.setTarget(target, dirToRetrieveInto);

    Pair<ForgeDirection, TileEntity> result = getHeadingForItem(stack, false);
    if (result == null)
        return false;

    int fuzzySetting = 0;
    if (target instanceof IFuzzyRetrieving) {
        fuzzySetting = ((IFuzzyRetrieving) target).getFuzzySetting();
    }

    ItemStack extractedItem = null;
    if (result.getValue() instanceof TileManager) {// Exception for managers, the result can only end up as a manager if the pulling inventory was
        // a manager.
        TileEntity managedInventory = ((TileManager) result.getValue())
                .getTileCache(((TileManager) result.getValue()).getFacingDirection());
        extractedItem = IOHelper.extract(managedInventory, result.getKey().getOpposite(), filter, false, false,
                fuzzySetting);
    } else if (filter != null) {
        extractedItem = IOHelper.extract(result.getValue(), result.getKey().getOpposite(), filter,
                !(target instanceof TileManager), false, fuzzySetting);
    } else {
        extractedItem = IOHelper.extract(result.getValue(), result.getKey().getOpposite(), false);
    }
    if (extractedItem == null)
        throw new IllegalArgumentException("This isn't possible!");

    stack = new TubeStack(extractedItem, result.getKey().getOpposite(), color);
    stack.setTarget(target, dirToRetrieveInto);

    PneumaticTube tube = MultipartCompatibility.getPart(this.tube.getWorld(),
            result.getValue().xCoord - result.getKey().offsetX,
            result.getValue().yCoord - result.getKey().offsetY,
            result.getValue().zCoord - result.getKey().offsetZ, PneumaticTube.class);
    if (tube == null)
        throw new IllegalArgumentException("wieeeeerd!");
    return tube.getLogic().injectStack(stack, result.getKey().getOpposite(), false);
}

From source file:com.formkiq.core.service.FolderServiceImpl.java

/**
 * Finds Form Data.// w  w w.ja v  a  2  s .c  o m
 * @param folder {@link String}
 * @param uuid {@link String}
 * @return {@link Pair}
 * @throws IOException IOException
 */
private Pair<byte[], FolderForm> findFormDataInternal(final String folder, final String uuid)
        throws IOException {

    User user = (User) this.securityService.getUserDetails();
    Pair<FolderForm, FolderAccess> pair = this.folderDao.findForm(user, folder, uuid);

    FolderForm form = pair.getKey();

    if (form != null) {
        byte[] data = this.assetService.findAsset(folder, form.getAssetid().toString());
        return Pair.of(data, form);
    }

    throw new FormNotFoundException("form " + uuid + " not found");
}

From source file:gobblin.data.management.conversion.hive.validation.ValidationJob.java

/***
 * Validate a {@link Table} if it was updated recently by checking if its update time
 * lies between between maxLookBackTime and skipRecentThanTime window.
 * @param hiveDataset {@link ConvertibleHiveDataset} containing {@link Table} info.
 * @throws IOException Issue in validating {@link HiveDataset}
 *//*from  w ww. j av a  2s  . co  m*/
private void processNonPartitionedTable(final ConvertibleHiveDataset hiveDataset) throws IOException {
    try {
        // Validate table
        final long updateTime = this.updateProvider.getUpdateTime(hiveDataset.getTable());

        log.info(String.format("Validating table: %s", hiveDataset.getTable()));

        for (final String format : hiveDataset.getDestFormats()) {
            Optional<ConvertibleHiveDataset.ConversionConfig> conversionConfigOptional = hiveDataset
                    .getConversionConfigForFormat(format);
            if (conversionConfigOptional.isPresent()) {
                ConvertibleHiveDataset.ConversionConfig conversionConfig = conversionConfigOptional.get();
                String orcTableName = conversionConfig.getDestinationTableName();
                String orcTableDatabase = conversionConfig.getDestinationDbName();
                Pair<Optional<org.apache.hadoop.hive.metastore.api.Table>, Optional<List<Partition>>> destinationMeta = getDestinationTableMeta(
                        orcTableDatabase, orcTableName, this.props);

                // Generate validation queries
                final List<String> validationQueries = HiveValidationQueryGenerator
                        .generateCountValidationQueries(hiveDataset, Optional.<Partition>absent(),
                                conversionConfig);
                final List<String> dataValidationQueries = Lists.newArrayList(HiveValidationQueryGenerator
                        .generateDataValidationQuery(hiveDataset.getTable().getTableName(),
                                hiveDataset.getTable().getDbName(), destinationMeta.getKey().get(),
                                Optional.<Partition>absent(), this.isNestedORC));

                this.futures.add(this.exec.submit(new Callable<Void>() {
                    @Override
                    public Void call() throws Exception {

                        // Execute validation queries
                        log.debug(String.format("Going to execute queries: %s for format: %s",
                                validationQueries, format));
                        List<Long> rowCounts = ValidationJob.this
                                .getValidationOutputFromHive(validationQueries);
                        log.debug(String.format("Going to execute queries: %s for format: %s",
                                dataValidationQueries, format));
                        List<Long> rowDataValidatedCount = ValidationJob.this
                                .getValidationOutputFromHive(dataValidationQueries);
                        // Validate and populate report
                        validateAndPopulateReport(hiveDataset.getTable().getCompleteName(), updateTime,
                                rowCounts, rowDataValidatedCount.get(0));

                        return null;
                    }
                }));
            } else {
                log.warn(String.format("No config found for format: %s So skipping table: %s for this format",
                        format, hiveDataset.getTable().getCompleteName()));
            }
        }
    } catch (UncheckedExecutionException e) {
        log.warn(String.format("Not validating table: %s %s", hiveDataset.getTable().getCompleteName(),
                e.getMessage()));
    } catch (UpdateNotFoundException e) {
        log.warn(String.format("Not validating table: %s as update time was not found. %s",
                hiveDataset.getTable().getCompleteName(), e.getMessage()));
    }
}

From source file:de.ks.flatadocdb.metamodel.Parser.java

public EntityDescriptor parse(Class<?> clazz) throws ParseException {
    Entity annotation = checkEntityAnnotation(clazz);
    EntityPersister persister = getInstance(annotation.persister());
    FileGenerator fileGenerator = getInstance(annotation.fileGenerator());
    FolderGenerator folderGenerator = getInstance(annotation.folderGenerator());
    LuceneDocumentExtractor luceneDocumentExtractor = getInstance(annotation.luceneDocExtractor());

    @SuppressWarnings("unchecked")
    Set<Field> allFields = ReflectionUtils.getAllFields(clazz, this::filterField);

    MethodHandle idGetterHandle = resolveIdFieldGetter(clazz, allFields);
    MethodHandle idSetterHandle = resolveIdFieldSetter(clazz, allFields);
    MethodHandle versionGetterHandle = resolveVersionFieldGetter(clazz, allFields);
    MethodHandle versionSetterHandle = resolveVersionFieldSetter(clazz, allFields);
    MethodHandle naturalIdHandle = resolveNaturalIdField(clazz, allFields);
    Pair<MethodHandle, MethodHandle> pathInRepo = resolvePathInRepoField(clazz, allFields);

    Map<Field, PropertyPersister> propertyPersisters = resolvePropertyPersisters(allFields);
    Map<LifeCycle, Set<MethodHandle>> lifecycleMethods = new LifeCycleParser().parseMethods(clazz);

    RelationParser relationParser = new RelationParser();

    QueryParser queryParser = new QueryParser();

    EntityDescriptor.Builder builder = EntityDescriptor.Builder.create();
    builder.entity(clazz);//from w  w  w. j  ava2 s.c om
    builder.id(idGetterHandle, idSetterHandle);
    builder.version(versionGetterHandle, versionSetterHandle);
    builder.natural(naturalIdHandle);
    builder.persister(persister);
    builder.extractor(luceneDocumentExtractor);
    builder.fileGenerator(fileGenerator);
    builder.folderGenerator(folderGenerator);
    builder.properties(propertyPersisters);
    builder.lifecycle(lifecycleMethods);
    builder.toOnes(relationParser.parseToOneRelations(clazz));
    builder.toMany(relationParser.parseToManyRelations(clazz));
    builder.toOneChild(relationParser.parseToOneChildRelations(clazz));
    builder.toManyChild(relationParser.parseToManyChildRelations(clazz));
    builder.queries(queryParser.getQueries(clazz));
    if (pathInRepo != null) {
        builder.pathInRepo(pathInRepo.getKey(), pathInRepo.getValue());
    }
    return builder.build();
}