Example usage for java.lang InterruptedException getCause

List of usage examples for java.lang InterruptedException getCause

Introduction

In this page you can find the example usage for java.lang InterruptedException getCause.

Prototype

public synchronized Throwable getCause() 

Source Link

Document

Returns the cause of this throwable or null if the cause is nonexistent or unknown.

Usage

From source file:org.apache.solr.client.solrj.impl.CloudSolrServer.java

private NamedList directUpdate(AbstractUpdateRequest request, ClusterState clusterState)
        throws SolrServerException {
    UpdateRequest updateRequest = (UpdateRequest) request;
    ModifiableSolrParams params = (ModifiableSolrParams) request.getParams();
    ModifiableSolrParams routableParams = new ModifiableSolrParams();
    ModifiableSolrParams nonRoutableParams = new ModifiableSolrParams();

    if (params != null) {
        nonRoutableParams.add(params);//from  ww w.j  a  va  2s.  c om
        routableParams.add(params);
        for (String param : NON_ROUTABLE_PARAMS) {
            routableParams.remove(param);
        }
    }

    String collection = nonRoutableParams.get(UpdateParams.COLLECTION, defaultCollection);
    if (collection == null) {
        throw new SolrServerException(
                "No collection param specified on request and no default collection has been set.");
    }

    //Check to see if the collection is an alias.
    Aliases aliases = zkStateReader.getAliases();
    if (aliases != null) {
        Map<String, String> collectionAliases = aliases.getCollectionAliasMap();
        if (collectionAliases != null && collectionAliases.containsKey(collection)) {
            collection = collectionAliases.get(collection);
        }
    }

    DocCollection col = clusterState.getCollection(collection);

    DocRouter router = col.getRouter();

    if (router instanceof ImplicitDocRouter) {
        // short circuit as optimization
        return null;
    }

    //Create the URL map, which is keyed on slice name.
    //The value is a list of URLs for each replica in the slice.
    //The first value in the list is the leader for the slice.
    Map<String, List<String>> urlMap = buildUrlMap(col);
    if (urlMap == null) {
        // we could not find a leader yet - use unoptimized general path
        return null;
    }

    NamedList<Throwable> exceptions = new NamedList<Throwable>();
    NamedList<NamedList> shardResponses = new NamedList<NamedList>();

    Map<String, LBHttpSolrServer.Req> routes = updateRequest.getRoutes(router, col, urlMap, routableParams,
            this.idField);
    if (routes == null) {
        return null;
    }

    long start = System.nanoTime();

    if (parallelUpdates) {
        final Map<String, Future<NamedList<?>>> responseFutures = new HashMap<>(routes.size());
        for (final Map.Entry<String, LBHttpSolrServer.Req> entry : routes.entrySet()) {
            final String url = entry.getKey();
            final LBHttpSolrServer.Req lbRequest = entry.getValue();
            responseFutures.put(url, threadPool.submit(new Callable<NamedList<?>>() {
                @Override
                public NamedList<?> call() throws Exception {
                    return lbServer.request(lbRequest).getResponse();
                }
            }));
        }

        for (final Map.Entry<String, Future<NamedList<?>>> entry : responseFutures.entrySet()) {
            final String url = entry.getKey();
            final Future<NamedList<?>> responseFuture = entry.getValue();
            try {
                shardResponses.add(url, responseFuture.get());
            } catch (InterruptedException e) {
                Thread.currentThread().interrupt();
                throw new RuntimeException(e);
            } catch (ExecutionException e) {
                exceptions.add(url, e.getCause());
            }
        }

        if (exceptions.size() > 0) {
            throw new RouteException(ErrorCode.SERVER_ERROR, exceptions, routes);
        }
    } else {
        for (Map.Entry<String, LBHttpSolrServer.Req> entry : routes.entrySet()) {
            String url = entry.getKey();
            LBHttpSolrServer.Req lbRequest = entry.getValue();
            try {
                NamedList rsp = lbServer.request(lbRequest).getResponse();
                shardResponses.add(url, rsp);
            } catch (Exception e) {
                throw new SolrServerException(e);
            }
        }
    }

    UpdateRequest nonRoutableRequest = null;
    List<String> deleteQuery = updateRequest.getDeleteQuery();
    if (deleteQuery != null && deleteQuery.size() > 0) {
        UpdateRequest deleteQueryRequest = new UpdateRequest();
        deleteQueryRequest.setDeleteQuery(deleteQuery);
        nonRoutableRequest = deleteQueryRequest;
    }

    Set<String> paramNames = nonRoutableParams.getParameterNames();

    Set<String> intersection = new HashSet<>(paramNames);
    intersection.retainAll(NON_ROUTABLE_PARAMS);

    if (nonRoutableRequest != null || intersection.size() > 0) {
        if (nonRoutableRequest == null) {
            nonRoutableRequest = new UpdateRequest();
        }
        nonRoutableRequest.setParams(nonRoutableParams);
        List<String> urlList = new ArrayList<>();
        urlList.addAll(routes.keySet());
        Collections.shuffle(urlList, rand);
        LBHttpSolrServer.Req req = new LBHttpSolrServer.Req(nonRoutableRequest, urlList);
        try {
            LBHttpSolrServer.Rsp rsp = lbServer.request(req);
            shardResponses.add(urlList.get(0), rsp.getResponse());
        } catch (Exception e) {
            throw new SolrException(ErrorCode.SERVER_ERROR, urlList.get(0), e);
        }
    }

    long end = System.nanoTime();

    RouteResponse rr = condenseResponse(shardResponses, (long) ((end - start) / 1000000));
    rr.setRouteResponses(shardResponses);
    rr.setRoutes(routes);
    return rr;
}

From source file:burlov.ultracipher.swing.SwingGuiApplication.java

/**
 * Kombinierte Methode die Daten hochlaedt und dann lokal abspeichert.
 *
 * @return 'true' wenn alles fehlerfrei lief
 *///w  w w .java  2 s  . c o  m
private boolean saveDatabase() {
    if (core.getCurrentCryptor() == null) {
        if (!createNewCryptor(true)) {
            return false;
        }
    }
    if (core.getSyncCredentials() != null) {
        if (!synced) {
            /*
             * Zuerst mit frischen Daten syncen. Sonst koennen beim
             * Hochladen nicht gesyncte Aenderungen ueberschrieben werden
             */
            downloadAndMergeData();
        }
        Callable<String> callable = new Callable<String>() {

            @Override
            public String call() throws Exception {
                // Aktuelle Daten zum Sync-Account schicken
                System.out.println("Upload data to " + core.getSyncCredentials().getEmailaddress());
                core.save(core.getSyncCredentials());
                return null;
            }
        };
        CallableTask<String> task = new CallableTask<String>(callable);
        WaitDialog dlg = new WaitDialog(getMainFrame(), "Upload data", task, 0, 0);
        dlg.start();
        try {
            task.get();
        } catch (InterruptedException e) {
            e.printStackTrace();
            return false;
        } catch (ExecutionException e) {
            e.printStackTrace();
            showError("Upload failed", e.getCause());
            return false;
        }
    }
    setNeedSave(!localSaveData());
    return !hasChanges;
}

From source file:org.opennaas.extensions.vcpe.manager.VCPENetworkManager.java

@Override
public boolean getBuildResult(String resourceId) throws VCPENetworkManagerException {
    if (!hasFinishedBuild(resourceId)) {
        throw new VCPENetworkManagerException("Build task has not yet finished.");
    }/*from  w w w  .java 2s  . co  m*/

    Future<Boolean> f = futures.get(resourceId);
    if (f == null) {
        throw new VCPENetworkManagerException("No building task for resource " + resourceId);
    }

    boolean result;
    try {
        result = f.get();
    } catch (InterruptedException e) {
        log.error("Creation of VCPE has been interrupted", e);
        throw new VCPENetworkManagerException("Creation of VCPE has been interrupted: " + e.getMessage());
    } catch (ExecutionException e) {
        if (e.getCause() instanceof VCPENetworkManagerException)
            throw (VCPENetworkManagerException) e.getCause();
        else {
            throw new VCPENetworkManagerException(e.getCause());
        }
    } finally {
        // remove future from pending tasks
        futures.remove(resourceId);
    }

    return result;
}

From source file:org.jaqpot.core.service.resource.DatasetResource.java

@POST
@Path("/{id}/qprf-dummy")
@ApiOperation("Creates QPRF Report")
@Authorize/*from   ww  w . j a  v a 2s.c  o m*/
public Response createQPRFReportDummy(
        @ApiParam(value = "Authorization token") @HeaderParam("subjectid") String subjectId,
        @PathParam("id") String id, @FormParam("substance_uri") String substanceURI,
        @FormParam("title") String title, @FormParam("description") String description) {

    Dataset ds = datasetHandler.find(id);
    if (ds == null) {
        throw new NotFoundException("Dataset with id:" + id + " was not found on the server.");
    }
    if (ds.getByModel() == null || ds.getByModel().isEmpty()) {
        throw new BadRequestException("Selected dataset was not produced by a valid model.");
    }
    Model model = modelHandler.find(ds.getByModel());
    if (model == null) {
        throw new BadRequestException("Selected dataset was not produced by a valid model.");
    }
    String datasetURI = model.getDatasetUri();
    if (datasetURI == null || datasetURI.isEmpty()) {
        throw new BadRequestException(
                "The model that created this dataset does not point to a valid training dataset.");
    }
    Dataset trainingDS = client.target(datasetURI).request().accept(MediaType.APPLICATION_JSON)
            .header("subjectid", subjectId).get(Dataset.class);
    if (trainingDS == null) {
        throw new BadRequestException(
                "The model that created this dataset does not point to a valid training dataset.");
    }

    if (model.getTransformationModels() != null) {
        for (String transModelURI : model.getTransformationModels()) {
            Model transModel = modelHandler.find(transModelURI.split("model/")[1]);
            if (transModel == null) {
                throw new NotFoundException(
                        "Transformation model with id:" + transModelURI + " was not found.");
            }
            try {
                trainingDS = jpdiClient
                        .predict(trainingDS, transModel, trainingDS.getMeta(), UUID.randomUUID().toString())
                        .get();
            } catch (InterruptedException ex) {
                LOG.log(Level.SEVERE, "JPDI Training procedure interupted", ex);
                throw new InternalServerErrorException("JPDI Training procedure interupted", ex);
            } catch (ExecutionException ex) {
                LOG.log(Level.SEVERE, "Training procedure execution error", ex.getCause());
                throw new InternalServerErrorException("JPDI Training procedure error", ex.getCause());
            } catch (CancellationException ex) {
                throw new InternalServerErrorException("Procedure was cancelled");
            }
        }
    }

    List<String> retainableFeatures = new ArrayList<>(model.getIndependentFeatures());
    retainableFeatures.addAll(model.getDependentFeatures());

    trainingDS.getDataEntry().parallelStream().forEach(dataEntry -> {
        dataEntry.getValues().keySet().retainAll(retainableFeatures);
    });

    DataEntry dataEntry = ds.getDataEntry().stream()
            .filter(de -> de.getCompound().getURI().equals(substanceURI)).findFirst()
            .orElseThrow(() -> new BadRequestException(""));

    trainingDS.getDataEntry().add(dataEntry);

    Map<String, Object> parameters = new HashMap<>();

    UrlValidator urlValidator = new UrlValidator();
    if (urlValidator.isValid(substanceURI)) {
        Dataset structures = client.target(substanceURI + "/structures").request()
                .accept(MediaType.APPLICATION_JSON).header("subjectid", subjectId).get(Dataset.class);
        List<Map<String, String>> structuresList = structures.getDataEntry().stream().map(de -> {
            String compound = de.getCompound().getURI();
            String casrn = Optional.ofNullable(de.getValues().get(
                    "https://apps.ideaconsult.net/enmtest/feature/http%3A%2F%2Fwww.opentox.org%2Fapi%2F1.1%23CASRNDefault"))
                    .orElse("").toString();
            String einecs = Optional.ofNullable(de.getValues().get(
                    "https://apps.ideaconsult.net/enmtest/feature/http%3A%2F%2Fwww.opentox.org%2Fapi%2F1.1%23EINECSDefault"))
                    .orElse("").toString();
            String iuclid5 = Optional.ofNullable(de.getValues().get(
                    "https://apps.ideaconsult.net/enmtest/feature/http%3A%2F%2Fwww.opentox.org%2Fapi%2F1.1%23IUCLID5_UUIDDefault"))
                    .orElse("").toString();
            String inchi = Optional.ofNullable(de.getValues().get(
                    "https://apps.ideaconsult.net/enmtest/feature/http%3A%2F%2Fwww.opentox.org%2Fapi%2F1.1%23InChI_stdDefault"))
                    .orElse("").toString();
            String reach = Optional.ofNullable(de.getValues().get(
                    "https://apps.ideaconsult.net/enmtest/feature/http%3A%2F%2Fwww.opentox.org%2Fapi%2F1.1%23REACHRegistrationDateDefault"))
                    .orElse("").toString();
            String iupac = Optional.ofNullable(de.getValues().get(
                    "https://apps.ideaconsult.net/enmtest/feature/http%3A%2F%2Fwww.opentox.org%2Fapi%2F1.1%23IUPACNameDefault"))
                    .orElse("").toString();

            Map<String, String> structuresMap = new HashMap<>();
            structuresMap.put("Compound", compound);
            structuresMap.put("CasRN", casrn);
            structuresMap.put("EC number", einecs);
            structuresMap.put("REACH registration date", reach);
            structuresMap.put("IUCLID 5 Reference substance UUID", iuclid5);
            structuresMap.put("Std. InChI", inchi);
            structuresMap.put("IUPAC name", iupac);

            return structuresMap;
        }).collect(Collectors.toList());
        parameters.put("structures", structuresList);
    } else {
        List<Map<String, String>> structuresList = new ArrayList<>();
        Map<String, String> structuresMap = new HashMap<>();
        structuresMap.put("Compound", "");
        structuresMap.put("CasRN", "");
        structuresMap.put("EC number", "");
        structuresMap.put("REACH registration date", "");
        structuresMap.put("IUCLID 5 Reference substance UUID", "");
        structuresMap.put("Std. InChI", "");
        structuresMap.put("IUPAC name", "");
        structuresList.add(structuresMap);
        parameters.put("structures", structuresList);
    }

    parameters.put("predictedFeature", model.getPredictedFeatures().stream().findFirst()
            .orElseThrow(() -> new BadRequestException("Model does not have a valid predicted feature")));

    parameters.put("algorithm", algorithmHandler.find(model.getAlgorithm().getId()));
    parameters.put("substanceURI", substanceURI);
    if (model.getLinkedModels() != null && !model.getLinkedModels().isEmpty()) {
        Model doa = modelHandler.find(model.getLinkedModels().get(0).split("model/")[1]);
        if (doa != null) {
            parameters.put("doaURI", doa.getPredictedFeatures().get(0));
            parameters.put("doaMethod", doa.getAlgorithm().getId());
        }
    }
    TrainingRequest request = new TrainingRequest();

    request.setDataset(trainingDS);
    request.setParameters(parameters);
    request.setPredictionFeature(model.getDependentFeatures().stream().findFirst()
            .orElseThrow(() -> new BadRequestException("Model does not have a valid prediction feature")));

    return Response.ok(request).build();
    //        Report report = client.target("http://147.102.82.32:8094/pws/qprf")
    //                .request()
    //                .header("Content-Type", MediaType.APPLICATION_JSON)
    //                .accept(MediaType.APPLICATION_JSON)
    //                .post(Entity.json(request), Report.class);
    //
    //        report.setMeta(MetaInfoBuilder.builder()
    //                .addTitles(title)
    //                .addDescriptions(description)
    //                .addCreators(securityContext.getUserPrincipal().getName())
    //                .build()
    //        );
    //        report.setId(new ROG(true).nextString(15));
    //        report.setVisible(Boolean.TRUE);
    //        reportHandler.create(report);
    //
    //        return Response.ok(report).build();
}

From source file:org.jaqpot.core.service.resource.DatasetResource.java

@POST
@Path("/{id}/qprf")
@ApiOperation("Creates QPRF Report")
@Authorize/*  w  w  w .j av  a 2s .  co  m*/
public Response createQPRFReport(
        @ApiParam(value = "Authorization token") @HeaderParam("subjectid") String subjectId,
        @PathParam("id") String id, @FormParam("substance_uri") String substanceURI,
        @FormParam("title") String title, @FormParam("description") String description)
        throws QuotaExceededException {

    User user = userHandler.find(securityContext.getUserPrincipal().getName());
    long reportCount = reportHandler.countAllOfCreator(user.getId());
    int maxAllowedReports = new UserFacade(user).getMaxReports();

    if (reportCount > maxAllowedReports) {
        LOG.info(String.format("User %s has %d algorithms while maximum is %d", user.getId(), reportCount,
                maxAllowedReports));
        throw new QuotaExceededException("Dear " + user.getId()
                + ", your quota has been exceeded; you already have " + reportCount + " reports. "
                + "No more than " + maxAllowedReports + " are allowed with your subscription.");
    }

    Dataset ds = datasetHandler.find(id);
    if (ds == null) {
        throw new NotFoundException("Dataset with id:" + id + " was not found on the server.");
    }
    if (ds.getByModel() == null || ds.getByModel().isEmpty()) {
        throw new BadRequestException("Selected dataset was not produced by a valid model.");
    }
    Model model = modelHandler.find(ds.getByModel());
    if (model == null) {
        throw new BadRequestException("Selected dataset was not produced by a valid model.");
    }
    String datasetURI = model.getDatasetUri();
    if (datasetURI == null || datasetURI.isEmpty()) {
        throw new BadRequestException(
                "The model that created this dataset does not point to a valid training dataset.");
    }
    Dataset trainingDS = client.target(datasetURI).request().accept(MediaType.APPLICATION_JSON)
            .header("subjectid", subjectId).get(Dataset.class);
    if (trainingDS == null) {
        throw new BadRequestException(
                "The model that created this dataset does not point to a valid training dataset.");
    }

    if (model.getTransformationModels() != null) {
        for (String transModelURI : model.getTransformationModels()) {
            Model transModel = modelHandler.find(transModelURI.split("model/")[1]);
            if (transModel == null) {
                throw new NotFoundException(
                        "Transformation model with id:" + transModelURI + " was not found.");
            }
            try {
                trainingDS = jpdiClient
                        .predict(trainingDS, transModel, trainingDS.getMeta(), UUID.randomUUID().toString())
                        .get();
            } catch (InterruptedException ex) {
                LOG.log(Level.SEVERE, "JPDI Training procedure interupted", ex);
                throw new InternalServerErrorException("JPDI Training procedure interupted", ex);
            } catch (ExecutionException ex) {
                LOG.log(Level.SEVERE, "Training procedure execution error", ex.getCause());
                throw new InternalServerErrorException("JPDI Training procedure error", ex.getCause());
            } catch (CancellationException ex) {
                throw new InternalServerErrorException("Procedure was cancelled");
            }
        }
    }

    List<String> retainableFeatures = new ArrayList<>(model.getIndependentFeatures());
    retainableFeatures.addAll(model.getDependentFeatures());

    trainingDS.getDataEntry().parallelStream().forEach(dataEntry -> {
        dataEntry.getValues().keySet().retainAll(retainableFeatures);
    });

    DataEntry dataEntry = ds.getDataEntry().stream()
            .filter(de -> de.getCompound().getURI().equals(substanceURI)).findFirst()
            .orElseThrow(() -> new BadRequestException(""));

    trainingDS.getDataEntry().add(dataEntry);
    trainingDS.getMeta().setCreators(new HashSet<>(Arrays.asList(user.getId())));

    Map<String, Object> parameters = new HashMap<>();

    UrlValidator urlValidator = new UrlValidator();
    if (urlValidator.isValid(substanceURI)) {
        Dataset structures = client.target(substanceURI + "/structures").request()
                .accept(MediaType.APPLICATION_JSON).header("subjectid", subjectId).get(Dataset.class);
        List<Map<String, String>> structuresList = structures.getDataEntry().stream().map(de -> {
            String compound = de.getCompound().getURI();
            String casrn = Optional.ofNullable(de.getValues().get(
                    "https://apps.ideaconsult.net/enmtest/feature/http%3A%2F%2Fwww.opentox.org%2Fapi%2F1.1%23CASRNDefault"))
                    .orElse("").toString();
            String einecs = Optional.ofNullable(de.getValues().get(
                    "https://apps.ideaconsult.net/enmtest/feature/http%3A%2F%2Fwww.opentox.org%2Fapi%2F1.1%23EINECSDefault"))
                    .orElse("").toString();
            String iuclid5 = Optional.ofNullable(de.getValues().get(
                    "https://apps.ideaconsult.net/enmtest/feature/http%3A%2F%2Fwww.opentox.org%2Fapi%2F1.1%23IUCLID5_UUIDDefault"))
                    .orElse("").toString();
            String inchi = Optional.ofNullable(de.getValues().get(
                    "https://apps.ideaconsult.net/enmtest/feature/http%3A%2F%2Fwww.opentox.org%2Fapi%2F1.1%23InChI_stdDefault"))
                    .orElse("").toString();
            String reach = Optional.ofNullable(de.getValues().get(
                    "https://apps.ideaconsult.net/enmtest/feature/http%3A%2F%2Fwww.opentox.org%2Fapi%2F1.1%23REACHRegistrationDateDefault"))
                    .orElse("").toString();
            String iupac = Optional.ofNullable(de.getValues().get(
                    "https://apps.ideaconsult.net/enmtest/feature/http%3A%2F%2Fwww.opentox.org%2Fapi%2F1.1%23IUPACNameDefault"))
                    .orElse("").toString();

            Map<String, String> structuresMap = new HashMap<>();
            structuresMap.put("Compound", compound);
            structuresMap.put("CasRN", casrn);
            structuresMap.put("EC number", einecs);
            structuresMap.put("REACH registration date", reach);
            structuresMap.put("IUCLID 5 Reference substance UUID", iuclid5);
            structuresMap.put("Std. InChI", inchi);
            structuresMap.put("IUPAC name", iupac);

            return structuresMap;
        }).collect(Collectors.toList());
        parameters.put("structures", structuresList);
    } else {
        List<Map<String, String>> structuresList = new ArrayList<>();
        Map<String, String> structuresMap = new HashMap<>();
        structuresMap.put("Compound", "");
        structuresMap.put("CasRN", "");
        structuresMap.put("EC number", "");
        structuresMap.put("REACH registration date", "");
        structuresMap.put("IUCLID 5 Reference substance UUID", "");
        structuresMap.put("Std. InChI", "");
        structuresMap.put("IUPAC name", "");
        structuresList.add(structuresMap);
        parameters.put("structures", structuresList);
    }

    parameters.put("predictedFeature", model.getPredictedFeatures().stream().findFirst()
            .orElseThrow(() -> new BadRequestException("Model does not have a valid predicted feature")));

    parameters.put("algorithm", algorithmHandler.find(model.getAlgorithm().getId()));
    parameters.put("substanceURI", substanceURI);
    if (model.getLinkedModels() != null && !model.getLinkedModels().isEmpty()) {
        Model doa = modelHandler.find(model.getLinkedModels().get(0).split("model/")[1]);
        if (doa != null) {
            parameters.put("doaURI", doa.getPredictedFeatures().get(0));
            parameters.put("doaMethod", doa.getAlgorithm().getId());
        }
    }
    TrainingRequest request = new TrainingRequest();

    request.setDataset(trainingDS);
    request.setParameters(parameters);
    request.setPredictionFeature(model.getDependentFeatures().stream().findFirst()
            .orElseThrow(() -> new BadRequestException("Model does not have a valid prediction feature")));

    Report report = client.target("http://147.102.82.32:8094/pws/qprf").request()
            .header("Content-Type", MediaType.APPLICATION_JSON).accept(MediaType.APPLICATION_JSON)
            .post(Entity.json(request), Report.class);

    report.setMeta(MetaInfoBuilder.builder().addTitles(title).addDescriptions(description)
            .addCreators(securityContext.getUserPrincipal().getName()).build());
    report.setId(new ROG(true).nextString(15));
    report.setVisible(Boolean.TRUE);
    reportHandler.create(report);

    return Response.ok(report).build();
}

From source file:org.apache.pulsar.broker.web.PulsarWebResource.java

/**
 * If the namespace is global, validate the following - 1. If replicated clusters are configured for this global
 * namespace 2. If local cluster belonging to this namespace is replicated 3. If replication is enabled for this
 * namespace <br/>/*from   w  w  w  . j  av  a2s  .  c  o m*/
 * It validates if local cluster is part of replication-cluster. If local cluster is not part of the replication
 * cluster then it redirects request to peer-cluster if any of the peer-cluster is part of replication-cluster of
 * this namespace. If none of the cluster is part of the replication cluster then it fails the validation.
 *
 * @param namespace
 * @throws Exception
 */
protected void validateGlobalNamespaceOwnership(NamespaceName namespace) {
    try {
        ClusterData peerClusterData = checkLocalOrGetPeerReplicationCluster(pulsar(), namespace)
                .get(cacheTimeOutInSec, SECONDS);
        // if peer-cluster-data is present it means namespace is owned by that peer-cluster and request should be
        // redirect to the peer-cluster
        if (peerClusterData != null) {
            URI redirect = getRedirectionUrl(peerClusterData);
            // redirect to the cluster requested
            if (log.isDebugEnabled()) {
                log.debug("[{}] Redirecting the rest call to {}: cluster={}", redirect, peerClusterData);

            }
            throw new WebApplicationException(Response.temporaryRedirect(redirect).build());
        }
    } catch (InterruptedException e) {
        log.warn("Time-out {} sec while validating policy on {} ", cacheTimeOutInSec, namespace);
        throw new RestException(Status.SERVICE_UNAVAILABLE, String.format(
                "Failed to validate global cluster configuration : ns=%s  emsg=%s", namespace, e.getMessage()));
    } catch (WebApplicationException e) {
        throw e;
    } catch (Exception e) {
        if (e.getCause() instanceof WebApplicationException) {
            throw (WebApplicationException) e.getCause();
        }
        throw new RestException(Status.SERVICE_UNAVAILABLE, String.format(
                "Failed to validate global cluster configuration : ns=%s  emsg=%s", namespace, e.getMessage()));
    }
}

From source file:org.apache.hadoop.hdfs.server.datanode.AvatarDataNode.java

@Override
public void refreshNamenodes(Configuration conf) throws IOException {
    LOG.info("refresh namenodes");
    try {//from  w  ww . j  a v a2 s.  c  o m
        Collection<String> serviceIds = DFSUtil.getNameServiceIds(conf);
        List<InetSocketAddress> nameAddrs0 = DFSUtil.getRPCAddresses("0", conf, serviceIds,
                DATANODE_PROTOCOL_ADDRESS, DFS_NAMENODE_RPC_ADDRESS_KEY);
        List<InetSocketAddress> nameAddrs1 = DFSUtil.getRPCAddresses("1", conf, serviceIds,
                DATANODE_PROTOCOL_ADDRESS, DFS_NAMENODE_RPC_ADDRESS_KEY);
        List<InetSocketAddress> avatarAddrs0 = getAvatarNodeAddresses("0", conf, serviceIds);
        List<InetSocketAddress> avatarAddrs1 = getAvatarNodeAddresses("1", conf, serviceIds);
        List<String> defaultAddresses = getZnodePaths(serviceIds, conf);
        ((AvatarNamespaceManager) namespaceManager).refreshNamenodes(nameAddrs0, nameAddrs1, avatarAddrs0,
                avatarAddrs1, defaultAddresses, serviceIds);
    } catch (InterruptedException e) {
        throw new IOException(e.getCause());
    }
}

From source file:org.apache.jackrabbit.core.cluster.ClusterNode.java

/**
 * Synchronize contents from journal.//  ww  w.ja  va 2 s  . c om
 *
 * @throws ClusterException if an error occurs
 */
public void sync() throws ClusterException {
    try {
        syncLock.acquire();
    } catch (InterruptedException e) {
        String msg = "Interrupted while waiting for mutex.";
        throw new ClusterException(msg);
    }

    try {
        journal.sync();
    } catch (JournalException e) {
        throw new ClusterException(e.getMessage(), e.getCause());
    } finally {
        syncLock.release();
    }
}

From source file:com.joliciel.talismane.machineLearning.maxent.custom.GISTrainer.java

private double nextIteration(double correctionConstant) {
    // compute contribution of p(a|b_i) for each feature and the new
    // correction parameter
    double loglikelihood = 0.0;
    int numEvents = 0;
    int numCorrect = 0;

    int numberOfThreads = modelExpects.length;

    ExecutorService executor = Executors.newFixedThreadPool(numberOfThreads);

    int taskSize = numUniqueEvents / numberOfThreads;

    int leftOver = numUniqueEvents % numberOfThreads;

    List<Future<?>> futures = new ArrayList<Future<?>>();

    for (int i = 0; i < numberOfThreads; i++) {
        if (i != numberOfThreads - 1)
            futures.add(executor.submit(new ModelExpactationComputeTask(i, i * taskSize, taskSize)));
        else/*  ww  w.jav  a  2 s  . co m*/
            futures.add(executor.submit(new ModelExpactationComputeTask(i, i * taskSize, taskSize + leftOver)));
    }

    for (Future<?> future : futures) {
        ModelExpactationComputeTask finishedTask = null;
        try {
            finishedTask = (ModelExpactationComputeTask) future.get();
        } catch (InterruptedException e) {
            // TODO: We got interrupted, but that is currently not really supported!
            // For now we just print the exception and fail hard. We hopefully soon
            // handle this case properly!
            e.printStackTrace();
            throw new IllegalStateException("Interruption is not supported!", e);
        } catch (ExecutionException e) {
            // Only runtime exception can be thrown during training, if one was thrown
            // it should be re-thrown. That could for example be a NullPointerException
            // which is caused through a bug in our implementation.
            throw new RuntimeException(e.getCause());
        }

        // When they are done, retrieve the results ...
        numEvents += finishedTask.getNumEvents();
        numCorrect += finishedTask.getNumCorrect();
        loglikelihood += finishedTask.getLoglikelihood();
    }

    executor.shutdown();

    display(".");

    // merge the results of the two computations
    for (int pi = 0; pi < numPreds; pi++) {
        int[] activeOutcomes = params[pi].getOutcomes();

        for (int aoi = 0; aoi < activeOutcomes.length; aoi++) {
            for (int i = 1; i < modelExpects.length; i++) {
                modelExpects[0][pi].updateParameter(aoi, modelExpects[i][pi].getParameters()[aoi]);
            }
        }
    }

    display(".");

    // compute the new parameter values
    for (int pi = 0; pi < numPreds; pi++) {
        double[] observed = observedExpects[pi].getParameters();
        double[] model = modelExpects[0][pi].getParameters();
        int[] activeOutcomes = params[pi].getOutcomes();
        for (int aoi = 0; aoi < activeOutcomes.length; aoi++) {
            if (useGaussianSmoothing) {
                params[pi].updateParameter(aoi, gaussianUpdate(pi, aoi, numEvents, correctionConstant));
            } else {
                if (model[aoi] == 0) {
                    LOG.error("Model expects == 0 for " + predLabels[pi] + " " + outcomeLabels[aoi]);
                }
                //params[pi].updateParameter(aoi,(Math.log(observed[aoi]) - Math.log(model[aoi])));
                params[pi].updateParameter(aoi,
                        ((Math.log(observed[aoi]) - Math.log(model[aoi])) / correctionConstant));
            }

            for (int i = 0; i < modelExpects.length; i++)
                modelExpects[i][pi].setParameter(aoi, 0.0); // re-initialize to 0.0's

        }
    }

    display(". loglikelihood=" + loglikelihood + "\t" + ((double) numCorrect / numEvents) + "\n");

    return loglikelihood;
}