Example usage for java.lang InterruptedException getClass

List of usage examples for java.lang InterruptedException getClass

Introduction

In this page you can find the example usage for java.lang InterruptedException getClass.

Prototype

@HotSpotIntrinsicCandidate
public final native Class<?> getClass();

Source Link

Document

Returns the runtime class of this Object .

Usage

From source file:no.sintef.jarfter.Jarfter.java

/**
 * Encapsulates the use of ProccessBuilder
 * @param command//from w  ww .j  av  a 2  s  . c  o m
 * @param arguments
 * @throws IOException
 * @throws InterruptedException 
 */
private void runCommand(String command, String... arguments) throws JarfterException {
    log("runCommand - Starting " + command + "...\n");
    List<String> commandList = new ArrayList<String>();
    commandList.add(command);
    for (String argument : arguments) {
        commandList.add(argument);
    }

    ProcessBuilder procBuilder = new ProcessBuilder(commandList);
    Process detachedProc = null;
    try {
        detachedProc = procBuilder.start();
    } catch (IOException ioe) {
        log("runCommand - Could not start the detachedProc...");
        error(ioe);
        throw new JarfterException();
    }

    String line;
    String stdout = "";
    String stderr = "";

    try {
        // Reading output
        BufferedReader outputReader = new BufferedReader(new InputStreamReader(detachedProc.getInputStream()));
        while ((line = outputReader.readLine()) != null) {
            stdout += line;
        }
        outputReader.close();

        // Reading error
        BufferedReader errorReader = new BufferedReader(new InputStreamReader(detachedProc.getErrorStream()));
        while ((line = errorReader.readLine()) != null) {
            stderr += line;
        }
        errorReader.close();

    } catch (IOException ioe) {
        log("runCommand - caught exception while reading stdout and stderr...");
        error(ioe);
        throw new JarfterException(JarfterException.Error.IO_PROCESS_OUTPUT);
    }

    log("runCommand - stdout:\n" + stdout);
    log("runCommand - stderr:\n" + stderr);

    try {
        detachedProc.waitFor();
    } catch (InterruptedException interruption) {
        log("runCommand - caught InterruptedException from detachedProc.waitFor()...");
        error(interruption);
        throw new JarfterException(interruption.getClass().getName(), interruption.getLocalizedMessage());
    }
    detachedProc.destroy();

    if (!stderr.equals("")) {
        runCommandAnalyzeStderr(command, stderr);
    }

}

From source file:com.aol.advertising.qiao.injector.PatternMatchFileInjector.java

private void quarantine(Path file, long checksum) {
    try {//from w ww.jav  a 2  s . c o m
        if (checksum == 0)
            checksum = CommonUtils.checksumOptionalylUseFileLength(file.toFile(), checksumByteLength);

        quarantineFileHandler.moveFileToQuarantineDirIfExists(file, checksum);
    } catch (InterruptedException e) {
    } catch (IOException e) {
        logger.warn(e.getClass().getName() + ": " + e.getMessage());
    }
}

From source file:org.apache.manifoldcf.agents.output.hdfs.HDFSOutputConnector.java

protected void createFile(Path path, InputStream input, IOutputAddActivity activities, String documentURI)
        throws ManifoldCFException, ServiceInterruption {
    CreateFileThread t = new CreateFileThread(getSession(), path, input);
    String errorCode = null;// www  .jav  a  2  s .c om
    String errorDesc = null;
    try {
        t.start();
        t.finishUp();
    } catch (InterruptedException e) {
        t.interrupt();
        errorCode = e.getClass().getSimpleName().toUpperCase(Locale.ROOT);
        errorDesc = "Failed to write document due to: " + e.getMessage();
        throw new ManifoldCFException("Interrupted: " + e.getMessage(), e, ManifoldCFException.INTERRUPTED);
    } catch (java.net.SocketTimeoutException e) {
        errorCode = e.getClass().getSimpleName().toUpperCase(Locale.ROOT);
        errorDesc = "Failed to write document due to: " + e.getMessage();
        handleIOException(e);
    } catch (InterruptedIOException e) {
        t.interrupt();
        errorCode = e.getClass().getSimpleName().toUpperCase(Locale.ROOT);
        errorDesc = "Failed to write document due to: " + e.getMessage();
        handleIOException(e);
    } catch (IOException e) {
        errorCode = e.getClass().getSimpleName().toUpperCase(Locale.ROOT);
        errorDesc = "Failed to write document due to: " + e.getMessage();
        handleIOException(e);
    } finally {
        if (errorCode != null & errorDesc != null) {
            activities.recordActivity(null, INGEST_ACTIVITY, null, documentURI, errorCode, errorDesc);
        }
    }
}

From source file:org.apache.manifoldcf.agents.output.hdfs.HDFSOutputConnector.java

protected void deleteFile(Path path, IOutputRemoveActivity activities, String documentURI)
        throws ManifoldCFException, ServiceInterruption {
    // Establish a session
    DeleteFileThread t = new DeleteFileThread(getSession(), path);
    String errorCode = null;//from   ww  w  .ja  v  a2  s  .c om
    String errorDesc = null;
    try {
        t.start();
        t.finishUp();
    } catch (InterruptedException e) {
        t.interrupt();
        errorCode = e.getClass().getSimpleName().toUpperCase(Locale.ROOT);
        errorDesc = "Failed to write document due to: " + e.getMessage();
        throw new ManifoldCFException("Interrupted: " + e.getMessage(), e, ManifoldCFException.INTERRUPTED);
    } catch (java.net.SocketTimeoutException e) {
        errorCode = e.getClass().getSimpleName().toUpperCase(Locale.ROOT);
        errorDesc = "Failed to write document due to: " + e.getMessage();
        handleIOException(e);
    } catch (InterruptedIOException e) {
        t.interrupt();
        errorCode = e.getClass().getSimpleName().toUpperCase(Locale.ROOT);
        errorDesc = "Failed to write document due to: " + e.getMessage();
        handleIOException(e);
    } catch (IOException e) {
        errorCode = e.getClass().getSimpleName().toUpperCase(Locale.ROOT);
        errorDesc = "Failed to write document due to: " + e.getMessage();
        handleIOException(e);
    } finally {
        if (errorCode != null & errorDesc != null) {
            activities.recordActivity(null, REMOVE_ACTIVITY, null, documentURI, errorCode, errorDesc);
        }
    }
}

From source file:org.scilla.core.MediaStream.java

public MediaStream(String filename, RunnerObject runner) throws ScillaException {
    this.filename = filename;
    this.runner = runner;

    // make sure input file exists
    File f = new File(filename);
    if (!f.exists()) {
        if (runner != null && !runner.hasFinished()) {
            // wait for file to appear
            int timeout = timeoutForFile / sleepForFile;
            for (; timeout > 0 && !f.exists() && !runner.hasFinished(); timeout--) {
                try {
                    Thread.sleep(sleepForFile);
                } catch (InterruptedException ex) {
                    // ignore
                }/*from   w w  w  . j  ava 2 s.  c  om*/
            }
            if (timeout <= 0) {
                throw new ScillaException("timeout waiting for output");
            }
        }
        // did runner leave any output?
        if (!f.exists()) {
            String err = runner != null ? runner.getErrorMessage() : null;
            if (err != null) {
                throw new ScillaConversionFailedException(err);
            } else {
                throw new ScillaNoOutputException();
            }
        }
    }

    // open file for reading
    try {
        in = new FileInputStream(f);
    } catch (FileNotFoundException ex) {
        // will never happen?
        log.error(ex);
        throw new RuntimeException("unexpected " + ex.getClass().getName() + " for " + filename);
    }
}

From source file:voldemort.store.readonly.fetcher.HdfsFetcher.java

public File fetch(String sourceFileUrl, String destinationFile, String hadoopConfigPath) throws IOException {
    if (this.globalThrottleLimit != null) {
        if (this.globalThrottleLimit.getSpeculativeRate() < this.minBytesPerSecond)
            throw new VoldemortException("Too many push jobs.");
        this.globalThrottleLimit.incrementNumJobs();
    }/*from w w w.j av a  2s  .c o m*/

    ObjectName jmxName = null;
    try {

        final Configuration config = new Configuration();
        FileSystem fs = null;
        config.setInt("io.socket.receive.buffer", bufferSize);
        config.set("hadoop.rpc.socket.factory.class.ClientProtocol", ConfigurableSocketFactory.class.getName());
        config.set("hadoop.security.group.mapping", "org.apache.hadoop.security.ShellBasedUnixGroupsMapping");

        final Path path = new Path(sourceFileUrl);

        boolean isHftpBasedFetch = sourceFileUrl.length() > 4 && sourceFileUrl.substring(0, 4).equals("hftp");
        logger.info("URL : " + sourceFileUrl + " and hftp protocol enabled = " + isHftpBasedFetch);
        logger.info("Hadoop path = " + hadoopConfigPath + " , keytab path = " + HdfsFetcher.keytabPath
                + " , kerberos principal = " + HdfsFetcher.kerberosPrincipal);

        if (hadoopConfigPath.length() > 0 && !isHftpBasedFetch) {

            config.addResource(new Path(hadoopConfigPath + "/core-site.xml"));
            config.addResource(new Path(hadoopConfigPath + "/hdfs-site.xml"));

            String security = config.get(CommonConfigurationKeys.HADOOP_SECURITY_AUTHENTICATION);

            if (security == null || !security.equals("kerberos")) {
                logger.error("Security isn't turned on in the conf: "
                        + CommonConfigurationKeys.HADOOP_SECURITY_AUTHENTICATION + " = "
                        + config.get(CommonConfigurationKeys.HADOOP_SECURITY_AUTHENTICATION));
                logger.error("Please make sure that the Hadoop config directory path is valid.");
                throw new VoldemortException(
                        "Error in getting Hadoop filesystem. Invalid Hadoop config directory path.");
            } else {
                logger.info("Security is turned on in the conf. Trying to authenticate ...");

            }
        }

        if (HdfsFetcher.keytabPath.length() > 0 && !isHftpBasedFetch) {

            /*
             * We're seeing intermittent errors while trying to get the
             * Hadoop filesystem in a privileged doAs block. This happens
             * when we fetch the files over hdfs or webhdfs. This retry loop
             * is inserted here as a temporary measure.
             */
            for (int attempt = 0; attempt < maxAttempts; attempt++) {
                boolean isValidFilesystem = false;

                if (!new File(HdfsFetcher.keytabPath).exists()) {
                    logger.error("Invalid keytab file path. Please provide a valid keytab path");
                    throw new VoldemortException(
                            "Error in getting Hadoop filesystem. Invalid keytab file path.");
                }

                /*
                 * The Hadoop path for getting a Filesystem object in a
                 * privileged doAs block is not thread safe. This might be
                 * causing intermittent NPE exceptions. Adding a
                 * synchronized block.
                 */
                synchronized (this) {
                    /*
                     * First login using the specified principal and keytab
                     * file
                     */
                    UserGroupInformation.setConfiguration(config);
                    UserGroupInformation.loginUserFromKeytab(HdfsFetcher.kerberosPrincipal,
                            HdfsFetcher.keytabPath);

                    /*
                     * If login is successful, get the filesystem object.
                     * NOTE: Ideally we do not need a doAs block for this.
                     * Consider removing it in the future once the Hadoop
                     * jars have the corresponding patch (tracked in the
                     * Hadoop Apache project: HDFS-3367)
                     */
                    try {
                        logger.info("I've logged in and am now Doasing as "
                                + UserGroupInformation.getCurrentUser().getUserName());
                        fs = UserGroupInformation.getCurrentUser()
                                .doAs(new PrivilegedExceptionAction<FileSystem>() {

                                    @Override
                                    public FileSystem run() throws Exception {
                                        FileSystem fs = path.getFileSystem(config);
                                        return fs;
                                    }
                                });
                        isValidFilesystem = true;
                    } catch (InterruptedException e) {
                        logger.error(e.getMessage(), e);
                    } catch (Exception e) {
                        logger.error("Got an exception while getting the filesystem object: ");
                        logger.error("Exception class : " + e.getClass());
                        e.printStackTrace();
                        for (StackTraceElement et : e.getStackTrace()) {
                            logger.error(et.toString());
                        }
                    }
                }

                if (isValidFilesystem) {
                    break;
                } else if (attempt < maxAttempts - 1) {
                    logger.error(
                            "Attempt#" + attempt + " Could not get a valid Filesystem object. Trying again in "
                                    + retryDelayMs + " ms");
                    sleepForRetryDelayMs();
                }
            }
        } else {
            fs = path.getFileSystem(config);
        }

        CopyStats stats = new CopyStats(sourceFileUrl, sizeOfPath(fs, path));
        jmxName = JmxUtils.registerMbean("hdfs-copy-" + copyCount.getAndIncrement(), stats);
        File destination = new File(destinationFile);

        if (destination.exists()) {
            throw new VoldemortException(
                    "Version directory " + destination.getAbsolutePath() + " already exists");
        }

        logger.info("Starting fetch for : " + sourceFileUrl);
        boolean result = fetch(fs, path, destination, stats);
        logger.info("Completed fetch : " + sourceFileUrl);

        // Close the filesystem
        fs.close();

        if (result) {
            return destination;
        } else {
            return null;
        }
    } catch (Throwable te) {
        te.printStackTrace();
        logger.error("Error thrown while trying to get data from Hadoop filesystem", te);
        throw new VoldemortException("Error thrown while trying to get data from Hadoop filesystem : " + te);
    } finally {
        if (this.globalThrottleLimit != null) {
            this.globalThrottleLimit.decrementNumJobs();
        }
        if (jmxName != null)
            JmxUtils.unregisterMbean(jmxName);
    }
}

From source file:com.searchcode.app.jobs.IndexSvnRepoJob.java

/**
 * Indexes all the documents in the repository changed file effectively performing a delta update
 * Should only be called when there is a genuine update IE something was indexed previously and
 * has has a new commit.//from  ww w .  j  a v a2 s  .c  o m
 */
public void indexDocsByDelta(Path path, String repoName, String repoLocations, String repoRemoteLocation,
        RepositoryChanged repositoryChanged) {
    SearchcodeLib scl = Singleton.getSearchCodeLib(); // Should have data object by this point
    Queue<CodeIndexDocument> codeIndexDocumentQueue = Singleton.getCodeIndexQueue();
    String fileRepoLocations = FilenameUtils.separatorsToUnix(repoLocations);

    Singleton.getLogger().info("Repository Changed File List " + repositoryChanged.getChangedFiles());

    for (String changedFile : repositoryChanged.getChangedFiles()) {

        Singleton.getLogger().info("Indexing " + changedFile + " in " + repoName);

        while (CodeIndexer.shouldPauseAdding()) {
            Singleton.getLogger().info("Pausing parser.");
            try {
                Thread.sleep(SLEEPTIME);
            } catch (InterruptedException ex) {
            }
        }

        String[] split = changedFile.split("/");
        String fileName = split[split.length - 1];
        changedFile = fileRepoLocations + repoName + "/" + changedFile;

        String md5Hash = Values.EMPTYSTRING;
        List<String> codeLines = null;

        try {
            codeLines = Helpers.readFileLines(changedFile, MAXFILELINEDEPTH);
        } catch (IOException ex) {
            Singleton.getLogger().warning("ERROR - caught a " + ex.getClass() + " in " + this.getClass()
                    + "\n with message: " + ex.getMessage());
            break;
        }

        try {
            FileInputStream fis = new FileInputStream(new File(changedFile));
            md5Hash = org.apache.commons.codec.digest.DigestUtils.md5Hex(fis);
            fis.close();
        } catch (IOException ex) {
            Singleton.getLogger().warning("Unable to generate MD5 for " + changedFile);
        }

        if (scl.isMinified(codeLines)) {
            Singleton.getLogger().info("Appears to be minified will not index  " + changedFile);
            break;
        }

        String languageName = scl.languageGuesser(changedFile, codeLines);
        String fileLocation = changedFile.replace(fileRepoLocations, Values.EMPTYSTRING).replace(fileName,
                Values.EMPTYSTRING);
        String fileLocationFilename = changedFile.replace(fileRepoLocations, Values.EMPTYSTRING);
        String repoLocationRepoNameLocationFilename = changedFile;

        String newString = getBlameFilePath(fileLocationFilename);
        String codeOwner = getInfoExternal(codeLines.size(), repoName, fileRepoLocations, newString).getName();

        if (codeLines != null) {
            if (this.LOWMEMORY) {
                try {
                    CodeIndexer.indexDocument(new CodeIndexDocument(repoLocationRepoNameLocationFilename,
                            repoName, fileName, fileLocation, fileLocationFilename, md5Hash, languageName,
                            codeLines.size(), StringUtils.join(codeLines, " "), repoRemoteLocation, codeOwner));
                } catch (IOException ex) {
                    Singleton.incrementCodeIndexLinesCount(codeLines.size());
                    Singleton.getLogger().warning("ERROR - caught a " + ex.getClass() + " in " + this.getClass()
                            + "\n with message: " + ex.getMessage());
                }
            } else {
                codeIndexDocumentQueue.add(new CodeIndexDocument(repoLocationRepoNameLocationFilename, repoName,
                        fileName, fileLocation, fileLocationFilename, md5Hash, languageName, codeLines.size(),
                        StringUtils.join(codeLines, " "), repoRemoteLocation, codeOwner));
            }
        }
    }

    for (String deletedFile : repositoryChanged.getDeletedFiles()) {
        Singleton.getLogger().info("Missing from disk, removing from index " + deletedFile);
        try {
            CodeIndexer.deleteByFileLocationFilename(deletedFile);
        } catch (IOException ex) {
            Singleton.getLogger().warning("ERROR - caught a " + ex.getClass() + " in " + this.getClass()
                    + "\n with message: " + ex.getMessage());
        }
    }
}

From source file:org.paxle.filter.robots.impl.RobotsTxtManager.java

/**
 * Check a list of {@link URI URI} against the robots.txt file of the servers hosting the {@link URI}.
 * @param hostPort the web-server hosting the {@link URI URIs}
 * @param urlList a list of {@link URI}/* w  w w.j  a va2s  . c  o m*/
 * 
 * @return all {@link URI} that are blocked by the servers
 */
public List<URI> isDisallowed(Collection<URI> urlList) {
    if (urlList == null)
        throw new NullPointerException("The URI-list is null.");

    // group the URL list based on hostname:port
    HashMap<URI, List<URI>> uriBlocks = this.groupURI(urlList);
    ArrayList<URI> disallowedURI = new ArrayList<URI>();

    /*
     * Asynchronous execution and parallel check of all blocks 
     */
    final CompletionService<Collection<URI>> execCompletionService = new ExecutorCompletionService<Collection<URI>>(
            this.execService);

    // loop through the blocks and start a worker for each block
    for (Entry<URI, List<URI>> uriBlock : uriBlocks.entrySet()) {
        URI baseUri = uriBlock.getKey();
        List<URI> uriList = uriBlock.getValue();
        execCompletionService.submit(new RobotsTxtManagerCallable(baseUri, uriList));
    }

    // wait for the worker-threads to finish execution
    for (int i = 0; i < uriBlocks.size(); ++i) {
        try {
            Collection<URI> disallowedInGroup = execCompletionService.take().get();
            if (disallowedInGroup != null) {
                disallowedURI.addAll(disallowedInGroup);
            }
        } catch (InterruptedException e) {
            this.logger.info(String.format("Interruption detected while waiting for robots.txt-check result."));
            // XXX should we break here?
        } catch (ExecutionException e) {
            this.logger.error(
                    String.format("Unexpected '%s' while performing robots.txt check.", e.getClass().getName()),
                    e);
        }
    }

    return disallowedURI;
}

From source file:com.searchcode.app.jobs.IndexGitRepoJob.java

/**
 * Indexes all the documents in the repository changed file effectively performing a delta update
 * Should only be called when there is a genuine update IE something was indexed previously and
 * has has a new commit./*from   ww w . j  a  va2s  .  c  om*/
 */
public void indexDocsByDelta(Path path, String repoName, String repoLocations, String repoRemoteLocation,
        RepositoryChanged repositoryChanged) {
    SearchcodeLib scl = Singleton.getSearchCodeLib(); // Should have data object by this point
    Queue<CodeIndexDocument> codeIndexDocumentQueue = Singleton.getCodeIndexQueue();
    String fileRepoLocations = FilenameUtils.separatorsToUnix(repoLocations);

    for (String changedFile : repositoryChanged.getChangedFiles()) {

        while (CodeIndexer.shouldPauseAdding()) {
            Singleton.getLogger().info("Pausing parser.");
            try {
                Thread.sleep(SLEEPTIME);
            } catch (InterruptedException ex) {
            }
        }

        String[] split = changedFile.split("/");
        String fileName = split[split.length - 1];
        changedFile = fileRepoLocations + "/" + repoName + "/" + changedFile;

        String md5Hash = Values.EMPTYSTRING;
        List<String> codeLines = null;

        try {
            codeLines = Helpers.readFileLines(changedFile, this.MAXFILELINEDEPTH);
        } catch (IOException ex) {
            Singleton.getLogger().warning("ERROR - caught a " + ex.getClass() + " in " + this.getClass()
                    + "\n with message: " + ex.getMessage());
            break;
        }

        try {
            FileInputStream fis = new FileInputStream(new File(changedFile));
            md5Hash = org.apache.commons.codec.digest.DigestUtils.md5Hex(fis);
            fis.close();
        } catch (IOException ex) {
            Singleton.getLogger().warning("Unable to generate MD5 for " + changedFile);
        }

        if (scl.isMinified(codeLines)) {
            Singleton.getLogger().info("Appears to be minified will not index  " + changedFile);
            break;
        }

        String languageName = scl.languageGuesser(changedFile, codeLines);
        String fileLocation = changedFile.replace(fileRepoLocations, Values.EMPTYSTRING).replace(fileName,
                Values.EMPTYSTRING);
        String fileLocationFilename = changedFile.replace(fileRepoLocations, Values.EMPTYSTRING);
        String repoLocationRepoNameLocationFilename = changedFile;

        String newString = getBlameFilePath(fileLocationFilename);
        List<CodeOwner> owners;
        if (this.USESYSTEMGIT) {
            owners = getBlameInfoExternal(codeLines.size(), repoName, fileRepoLocations, newString);
        } else {
            owners = getBlameInfo(codeLines.size(), repoName, fileRepoLocations, newString);
        }
        String codeOwner = scl.codeOwner(owners);

        if (codeLines != null) {
            if (this.LOWMEMORY) {
                try {
                    CodeIndexer.indexDocument(new CodeIndexDocument(repoLocationRepoNameLocationFilename,
                            repoName, fileName, fileLocation, fileLocationFilename, md5Hash, languageName,
                            codeLines.size(), StringUtils.join(codeLines, " "), repoRemoteLocation, codeOwner));
                } catch (IOException ex) {
                    Singleton.getLogger().warning("ERROR - caught a " + ex.getClass() + " in " + this.getClass()
                            + "\n with message: " + ex.getMessage());
                }
            } else {
                Singleton.incrementCodeIndexLinesCount(codeLines.size());
                codeIndexDocumentQueue.add(new CodeIndexDocument(repoLocationRepoNameLocationFilename, repoName,
                        fileName, fileLocation, fileLocationFilename, md5Hash, languageName, codeLines.size(),
                        StringUtils.join(codeLines, " "), repoRemoteLocation, codeOwner));
            }
        }
    }

    for (String deletedFile : repositoryChanged.getDeletedFiles()) {
        Singleton.getLogger().info("Missing from disk, removing from index " + deletedFile);
        try {
            CodeIndexer.deleteByFileLocationFilename(deletedFile);
        } catch (IOException ex) {
            Singleton.getLogger().warning("ERROR - caught a " + ex.getClass() + " in " + this.getClass()
                    + "\n with message: " + ex.getMessage());
        }
    }
}

From source file:com.amalto.core.server.DefaultItem.java

/**
 * Extract results through a view and transform them using a transformer<br/>
 * This call is asynchronous and results will be pushed via the passed {@link com.amalto.core.objects.transformers.util.TransformerCallBack}
 *
 * @param dataClusterPOJOPK The Data Cluster where to run the query
 * @param transformerPOJOPK The transformer to use
 * @param viewPOJOPK A filtering view/* w  w  w . j  a v  a  2s .c o  m*/
 * @param whereItem The condition
 * @param spellThreshold The condition spell checking threshold. A negative value de-activates spell
 * @param orderBy The full path of the item user to order
 * @param direction One of {@link com.amalto.xmlserver.interfaces.IXmlServerSLWrapper#ORDER_ASCENDING} or
 * {@link com.amalto.xmlserver.interfaces.IXmlServerSLWrapper#ORDER_DESCENDING}
 * @param start The first item index (starts at zero)
 * @param limit The maximum number of items to return
 */
@Override
public TransformerContext extractUsingTransformerThroughView(DataClusterPOJOPK dataClusterPOJOPK,
        TransformerV2POJOPK transformerPOJOPK, ViewPOJOPK viewPOJOPK, IWhereItem whereItem, int spellThreshold,
        String orderBy, String direction, int start, int limit) throws XtentisException {
    try {
        if (LOGGER.isDebugEnabled()) {
            LOGGER.debug("extractUsingTransformerThroughView() ");
        }
        TransformerContext context = new TransformerContext(transformerPOJOPK);
        ArrayList<TypedContent> content = new ArrayList<TypedContent>();
        context.put("com.amalto.core.itemctrl2.content", content); //$NON-NLS-1$
        context.put("com.amalto.core.itemctrl2.ready", false); //$NON-NLS-1$
        TransformerCallBack globalCallBack = new TransformerCallBack() {
            @Override
            public void contentIsReady(TransformerContext context) throws XtentisException {
            }

            @Override
            public void done(TransformerContext context) throws XtentisException {
                context.put("com.amalto.core.itemctrl2.ready", true); //$NON-NLS-1$
            }
        };
        extractUsingTransformerThroughView(dataClusterPOJOPK, context, globalCallBack, viewPOJOPK, whereItem,
                spellThreshold, orderBy, direction, start, limit);
        while (!(Boolean) context.get("com.amalto.core.itemctrl2.ready")) {
            try {
                Thread.sleep(50);
            } catch (InterruptedException e) {
                LOGGER.error("Error while waiting for transformer's end", e);
            }
        }
        return context;
    } catch (XtentisException e) {
        throw (e);
    } catch (Exception e) {
        String err = "Unable to extract items using transformer " + transformerPOJOPK.getUniqueId()
                + " through view " + viewPOJOPK.getUniqueId() + ": " + e.getClass().getName() + ": "
                + e.getLocalizedMessage();
        LOGGER.error(err, e);
        throw new XtentisException(err, e);
    }
}