Example usage for java.util List sort

List of usage examples for java.util List sort

Introduction

In this page you can find the example usage for java.util List sort.

Prototype

@SuppressWarnings({ "unchecked", "rawtypes" })
default void sort(Comparator<? super E> c) 

Source Link

Document

Sorts this list according to the order induced by the specified Comparator .

Usage

From source file:org.apache.druid.indexing.kafka.supervisor.KafkaSupervisor.java

/**
 * This method does two things -//from ww w  . j  a v  a  2 s.c  o m
 * 1. Makes sure the checkpoints information in the taskGroup is consistent with that of the tasks, if not kill
 * inconsistent tasks.
 * 2. truncates the checkpoints in the taskGroup corresponding to which segments have been published, so that any newly
 * created tasks for the taskGroup start indexing from after the latest published offsets.
 */
private void verifyAndMergeCheckpoints(final TaskGroup taskGroup) {
    final int groupId = taskGroup.groupId;
    final List<Pair<String, TreeMap<Integer, Map<Integer, Long>>>> taskSequences = new ArrayList<>();
    final List<ListenableFuture<TreeMap<Integer, Map<Integer, Long>>>> futures = new ArrayList<>();
    final List<String> taskIds = new ArrayList<>();

    for (String taskId : taskGroup.taskIds()) {
        final ListenableFuture<TreeMap<Integer, Map<Integer, Long>>> checkpointsFuture = taskClient
                .getCheckpointsAsync(taskId, true);
        taskIds.add(taskId);
        futures.add(checkpointsFuture);
    }

    try {
        List<TreeMap<Integer, Map<Integer, Long>>> futuresResult = Futures.successfulAsList(futures)
                .get(futureTimeoutInSeconds, TimeUnit.SECONDS);

        for (int i = 0; i < futuresResult.size(); i++) {
            final TreeMap<Integer, Map<Integer, Long>> checkpoints = futuresResult.get(i);
            final String taskId = taskIds.get(i);
            if (checkpoints == null) {
                try {
                    // catch the exception in failed futures
                    futures.get(i).get();
                } catch (Exception e) {
                    log.error(e, "Problem while getting checkpoints for task [%s], killing the task", taskId);
                    killTask(taskId);
                    taskGroup.tasks.remove(taskId);
                }
            } else if (checkpoints.isEmpty()) {
                log.warn("Ignoring task [%s], as probably it is not started running yet", taskId);
            } else {
                taskSequences.add(new Pair<>(taskId, checkpoints));
            }
        }
    } catch (Exception e) {
        throw new RuntimeException(e);
    }

    final KafkaDataSourceMetadata latestDataSourceMetadata = (KafkaDataSourceMetadata) indexerMetadataStorageCoordinator
            .getDataSourceMetadata(dataSource);
    final boolean hasValidOffsetsFromDb = latestDataSourceMetadata != null
            && latestDataSourceMetadata.getKafkaPartitions() != null
            && ioConfig.getTopic().equals(latestDataSourceMetadata.getKafkaPartitions().getTopic());
    final Map<Integer, Long> latestOffsetsFromDb;
    if (hasValidOffsetsFromDb) {
        latestOffsetsFromDb = latestDataSourceMetadata.getKafkaPartitions().getPartitionOffsetMap();
    } else {
        latestOffsetsFromDb = null;
    }

    // order tasks of this taskGroup by the latest sequenceId
    taskSequences.sort((o1, o2) -> o2.rhs.firstKey().compareTo(o1.rhs.firstKey()));

    final Set<String> tasksToKill = new HashSet<>();
    final AtomicInteger earliestConsistentSequenceId = new AtomicInteger(-1);
    int taskIndex = 0;

    while (taskIndex < taskSequences.size()) {
        TreeMap<Integer, Map<Integer, Long>> taskCheckpoints = taskSequences.get(taskIndex).rhs;
        String taskId = taskSequences.get(taskIndex).lhs;
        if (earliestConsistentSequenceId.get() == -1) {
            // find the first replica task with earliest sequenceId consistent with datasource metadata in the metadata
            // store
            if (taskCheckpoints.entrySet().stream()
                    .anyMatch(sequenceCheckpoint -> sequenceCheckpoint.getValue().entrySet().stream()
                            .allMatch(partitionOffset -> Longs.compare(partitionOffset.getValue(),
                                    latestOffsetsFromDb == null ? partitionOffset.getValue()
                                            : latestOffsetsFromDb.getOrDefault(partitionOffset.getKey(),
                                                    partitionOffset.getValue())) == 0)
                            && earliestConsistentSequenceId.compareAndSet(-1, sequenceCheckpoint.getKey()))
                    || (pendingCompletionTaskGroups.getOrDefault(groupId, EMPTY_LIST).size() > 0
                            && earliestConsistentSequenceId.compareAndSet(-1, taskCheckpoints.firstKey()))) {
                final SortedMap<Integer, Map<Integer, Long>> latestCheckpoints = new TreeMap<>(
                        taskCheckpoints.tailMap(earliestConsistentSequenceId.get()));
                log.info("Setting taskGroup sequences to [%s] for group [%d]", latestCheckpoints, groupId);
                taskGroup.sequenceOffsets.clear();
                taskGroup.sequenceOffsets.putAll(latestCheckpoints);
            } else {
                log.debug("Adding task [%s] to kill list, checkpoints[%s], latestoffsets from DB [%s]", taskId,
                        taskCheckpoints, latestOffsetsFromDb);
                tasksToKill.add(taskId);
            }
        } else {
            // check consistency with taskGroup sequences
            if (taskCheckpoints.get(taskGroup.sequenceOffsets.firstKey()) == null
                    || !(taskCheckpoints.get(taskGroup.sequenceOffsets.firstKey())
                            .equals(taskGroup.sequenceOffsets.firstEntry().getValue()))
                    || taskCheckpoints.tailMap(taskGroup.sequenceOffsets.firstKey())
                            .size() != taskGroup.sequenceOffsets.size()) {
                log.debug("Adding task [%s] to kill list, checkpoints[%s], taskgroup checkpoints [%s]", taskId,
                        taskCheckpoints, taskGroup.sequenceOffsets);
                tasksToKill.add(taskId);
            }
        }
        taskIndex++;
    }

    if ((tasksToKill.size() > 0 && tasksToKill.size() == taskGroup.tasks.size()) || (taskGroup.tasks.size() == 0
            && pendingCompletionTaskGroups.getOrDefault(groupId, EMPTY_LIST).size() == 0)) {
        // killing all tasks or no task left in the group ?
        // clear state about the taskgroup so that get latest offset information is fetched from metadata store
        log.warn("Clearing task group [%d] information as no valid tasks left the group", groupId);
        taskGroups.remove(groupId);
        partitionGroups.get(groupId).replaceAll((partition, offset) -> NOT_SET);
    }

    taskSequences.stream().filter(taskIdSequences -> tasksToKill.contains(taskIdSequences.lhs))
            .forEach(sequenceCheckpoint -> {
                log.warn(
                        "Killing task [%s], as its checkpoints [%s] are not consistent with group checkpoints[%s] or latest "
                                + "persisted offsets in metadata store [%s]",
                        sequenceCheckpoint.lhs, sequenceCheckpoint.rhs, taskGroup.sequenceOffsets,
                        latestOffsetsFromDb);
                killTask(sequenceCheckpoint.lhs);
                taskGroup.tasks.remove(sequenceCheckpoint.lhs);
            });
}

From source file:org.openecomp.sdc.be.components.impl.ArtifactsBusinessLogic.java

/**
 * /*from w w w.ja va  2s.co m*/
 * @param artifactDefinition
 * @return
 */
public Either<ArtifactDefinition, ResponseFormat> generateHeatEnvArtifact(ArtifactDefinition artifactDefinition,
        org.openecomp.sdc.be.model.Component component, String resourceInstanceName, User modifier,
        boolean shouldLock) {
    List<HeatParameterDefinition> heatParameters = artifactDefinition.getHeatParameters();
    heatParameters.sort(Comparator.comparing(e -> e.getName()));
    StringBuilder sb = new StringBuilder("parameters:\n");
    if (heatParameters != null) {

        for (HeatParameterDefinition heatParameterDefinition : heatParameters) {
            if (heatParameterDefinition.getCurrentValue() != null) {
                HeatParameterType type = HeatParameterType.isValidType(heatParameterDefinition.getType());
                if (type != null) {
                    switch (type) {
                    case BOOLEAN:
                        sb.append("  ").append(heatParameterDefinition.getName()).append(":").append(" ")
                                .append(Boolean.parseBoolean(heatParameterDefinition.getCurrentValue()))
                                .append("\n");
                        break;
                    case NUMBER:
                        // if
                        // (ValidationUtils.isFloatNumber(heatParameterDefinition.getCurrentValue()))
                        // {
                        // sb.append("
                        // ").append(heatParameterDefinition.getName()).append(":").append("
                        // ").append(Float.parseFloat(heatParameterDefinition.getCurrentValue())).append("\n");
                        // } else {
                        // sb.append("
                        // ").append(heatParameterDefinition.getName()).append(":").append("
                        // ").append(Integer.parseInt(heatParameterDefinition.getCurrentValue())).append("\n");
                        // }
                        sb.append("  ").append(heatParameterDefinition.getName()).append(":").append(" ")
                                .append(new BigDecimal(heatParameterDefinition.getCurrentValue())
                                        .toPlainString())
                                .append("\n");
                        break;
                    case COMMA_DELIMITED_LIST:
                    case JSON:
                        sb.append("  ").append(heatParameterDefinition.getName()).append(":").append(" ")
                                .append(heatParameterDefinition.getCurrentValue()).append("\n");
                        break;
                    default:
                        String value = heatParameterDefinition.getCurrentValue();
                        boolean starts = value.startsWith("\"");
                        boolean ends = value.endsWith("\"");
                        if (!(starts && ends)) {
                            starts = value.startsWith("'");
                            ends = value.endsWith("'");
                            if (!(starts && ends)) {
                                value = "\"" + value + "\"";
                            }
                        }
                        sb.append("  ").append(heatParameterDefinition.getName()).append(":").append(" ")
                                .append(value);
                        sb.append("\n");
                        break;

                    }
                }
            }
        }
    }
    return generateAndSaveHeatEnvArtifact(artifactDefinition, sb.toString(), component, resourceInstanceName,
            modifier, shouldLock);

}

From source file:eu.itesla_project.dymola.DymolaImpactAnalysis.java

private List<String> writeDymolaInputs(Path workingDir, List<Contingency> contingencies) throws IOException {
    LOGGER.info(" Start writing dymola inputs");

    List<String> retList = new ArrayList<>();

    DdbConfig ddbConfig = DdbConfig.load();
    String jbossHost = ddbConfig.getJbossHost();
    String jbossPort = ddbConfig.getJbossPort();
    String jbossUser = ddbConfig.getJbossUser();
    String jbossPassword = ddbConfig.getJbossPassword();

    Path dymolaExportPath = workingDir.resolve(MO_EXPORT_DIRECTORY);
    if (!Files.exists(dymolaExportPath)) {
        Files.createDirectory(dymolaExportPath);
    }/*from   w w  w .j a  va2  s  . co m*/

    //retrieve modelica export parameters from configuration
    String modelicaVersion = config.getModelicaVersion();
    String sourceEngine = config.getSourceEngine();
    String sourceVersion = config.getSourceEngineVersion();
    Path modelicaPowerSystemLibraryPath = Paths.get(config.getModelicaPowerSystemLibraryFile());

    //write the modelica events file, to feed the modelica exporter
    Path eventsPath = workingDir.resolve(MODELICA_EVENTS_CSV_FILENAME);
    writeModelicaExporterContingenciesFile(eventsPath, contingencies);

    //these are only optional params needed if the source is eurostag
    Path modelicaLibPath = null;

    String slackId = config.getSlackId();
    if ("".equals(slackId)) {
        slackId = null; // null when not specified ()
    }

    LoadFlowFactory loadFlowFactory;
    try {
        loadFlowFactory = config.getLoadFlowFactoryClass().newInstance();
    } catch (InstantiationException | IllegalAccessException e) {
        throw new RuntimeException(e);
    }

    LOGGER.info("Exporting modelica data for network {}, working state-id {} ", network,
            network.getStateManager().getWorkingStateId());
    ModelicaMainExporter exporter = new ModelicaMainExporter(network, slackId, jbossHost, jbossPort, jbossUser,
            jbossPassword, modelicaVersion, sourceEngine, sourceVersion, modelicaLibPath, loadFlowFactory);
    exporter.export(dymolaExportPath);
    ModEventsExport eventsExporter = new ModEventsExport(
            dymolaExportPath.resolve(network.getId() + ".mo").toFile(), eventsPath.toFile());
    eventsExporter.export(dymolaExportPath);
    LOGGER.info(" modelica data exported.");

    // now assemble the input files to feed dymola
    //  one .zip per contingency; in the zip, the .mo file and the powersystem library
    //TODO here it is assumed that contingencies ids in csv file start from 0 (i.e. 0 is the first contingency); id should be decoupled from the implementation
    try (final Stream<Path> pathStream = Files.walk(dymolaExportPath)) {
        pathStream.filter((p) -> !p.toFile().isDirectory() && p.toFile().getAbsolutePath().contains("events_")
                && p.toFile().getAbsolutePath().endsWith(".mo")).forEach(p -> {
                    GenericArchive archive = ShrinkWrap.createDomain().getArchiveFactory()
                            .create(GenericArchive.class);
                    try (FileSystem fileSystem = ShrinkWrapFileSystems.newFileSystem(archive)) {
                        Path rootDir = fileSystem.getPath("/");
                        Files.copy(modelicaPowerSystemLibraryPath,
                                rootDir.resolve(modelicaPowerSystemLibraryPath.getFileName()));
                        Files.copy(Paths.get(p.toString()),
                                rootDir.resolve(DymolaUtil.DYMOLA_SIM_MODEL_INPUT_PREFIX + ".mo"));

                        String[] c = p.getFileName().toString().replace(".mo", "").split("_");
                        try (OutputStream os = Files.newOutputStream(dymolaExportPath.getParent().resolve(
                                DymolaUtil.DYMOLAINPUTZIPFILENAMEPREFIX + "_" + c[c.length - 1] + ".zip"))) {
                            archive.as(ZipExporter.class).exportTo(os);
                            retList.add(new String(c[c.length - 1]));
                        } catch (IOException e) {
                            //e.printStackTrace();
                            throw new RuntimeException(e);
                        }

                    } catch (IOException e) {
                        throw new RuntimeException(e);
                    }

                });
    }
    retList.sort(Comparator.<String>naturalOrder());

    //prepare param inputs for indexes from indexes properties file
    LOGGER.info("writing input indexes parameters in  .mat format - start ");
    try {
        Path baseWp43ConfigFile = PlatformConfig.CONFIG_DIR.resolve(WP43_CONFIG_FILE_NAME);
        HierarchicalINIConfiguration configuration = new HierarchicalINIConfiguration(
                baseWp43ConfigFile.toFile());

        //fix params for smallsignal index (cfr EurostagImpactAnalysis sources)
        SubnodeConfiguration node = configuration.getSection("smallsignal");
        node.setProperty("f_instant", Double.toString(parameters.getFaultEventInstant()));
        for (int i = 0; i < contingencies.size(); i++) {
            Contingency contingency = contingencies.get(i);
            if (contingency.getElements().isEmpty()) {
                throw new AssertionError("Empty contingency " + contingency.getId());
            }
            Iterator<ContingencyElement> it = contingency.getElements().iterator();
            // compute the maximum fault duration
            double maxDuration = getFaultDuration(it.next());
            while (it.hasNext()) {
                maxDuration = Math.max(maxDuration, getFaultDuration(it.next()));
            }
            node.setProperty("f_duration", Double.toString(maxDuration));
        }

        DymolaAdaptersMatParamsWriter writer = new DymolaAdaptersMatParamsWriter(configuration);
        for (String cId : retList) {
            String parFileNamePrefix = DymolaUtil.DYMOLA_SIM_MAT_OUTPUT_PREFIX + "_" + cId + "_wp43_";
            String parFileNameSuffix = "_pars.mat";
            String zippedParFileNameSuffix = "_pars.zip";

            try (OutputStream os = Files.newOutputStream(dymolaExportPath.getParent()
                    .resolve(DymolaUtil.DYMOLAINPUTZIPFILENAMEPREFIX + "_" + cId + zippedParFileNameSuffix))) {
                JavaArchive archive = ShrinkWrap.create(JavaArchive.class);
                Path sfile1 = ShrinkWrapFileSystems.newFileSystem(archive).getPath("/");

                Arrays.asList(config.getIndexesNames()).forEach(indexName -> writer.write(indexName,
                        sfile1.resolve(parFileNamePrefix + indexName + parFileNameSuffix)));

                archive.as(ZipExporter.class).exportTo(os);
            } catch (Exception e) {
                throw new RuntimeException(e);
            }

        }

    } catch (ConfigurationException exc) {
        throw new RuntimeException(exc);
    }

    LOGGER.info("writing input indexes parameters in  .mat format - end - {}", retList);
    return retList;
}

From source file:ch.cyberduck.core.b2.B2LargeUploadService.java

@Override
public BaseB2Response upload(final Path file, final Local local, final BandwidthThrottle throttle,
        final StreamListener listener, final TransferStatus status, final ConnectionCallback callback)
        throws BackgroundException {
    final DefaultThreadPool pool = new DefaultThreadPool("largeupload", concurrency);
    try {/* w  ww  .  j  a  v a 2  s .c o  m*/
        final String fileid;
        // Get the results of the uploads in the order they were submitted
        // this is important for building the manifest, and is not a problem in terms of performance
        // because we should only continue when all segments have uploaded successfully
        final List<B2UploadPartResponse> completed = new ArrayList<B2UploadPartResponse>();
        final Map<String, String> fileinfo = new HashMap<>(status.getMetadata());
        final Checksum checksum = status.getChecksum();
        if (Checksum.NONE != checksum) {
            switch (checksum.algorithm) {
            case sha1:
                fileinfo.put(X_BZ_INFO_LARGE_FILE_SHA1, status.getChecksum().hash);
                break;
            }
        }
        if (null != status.getTimestamp()) {
            fileinfo.put(X_BZ_INFO_SRC_LAST_MODIFIED_MILLIS, String.valueOf(status.getTimestamp()));
        }
        if (status.isAppend() || status.isRetry()) {
            // Add already completed parts
            final B2LargeUploadPartService partService = new B2LargeUploadPartService(session);
            final List<B2FileInfoResponse> uploads = partService.find(file);
            if (uploads.isEmpty()) {
                fileid = session.getClient()
                        .startLargeFileUpload(
                                new B2FileidProvider(session).getFileid(containerService.getContainer(file),
                                        new DisabledListProgressListener()),
                                containerService.getKey(file), status.getMime(), fileinfo)
                        .getFileId();
            } else {
                fileid = uploads.iterator().next().getFileId();
                completed.addAll(partService.list(fileid));
            }
        } else {
            fileid = session.getClient()
                    .startLargeFileUpload(
                            new B2FileidProvider(session).getFileid(containerService.getContainer(file),
                                    new DisabledListProgressListener()),
                            containerService.getKey(file), status.getMime(), fileinfo)
                    .getFileId();
        }
        // Submit file segments for concurrent upload
        final List<Future<B2UploadPartResponse>> parts = new ArrayList<Future<B2UploadPartResponse>>();
        long remaining = status.getLength();
        long offset = 0;
        for (int partNumber = 1; remaining > 0; partNumber++) {
            boolean skip = false;
            if (status.isAppend() || status.isRetry()) {
                if (log.isInfoEnabled()) {
                    log.info(String.format("Determine if part number %d can be skipped", partNumber));
                }
                for (B2UploadPartResponse c : completed) {
                    if (c.getPartNumber().equals(partNumber)) {
                        if (log.isInfoEnabled()) {
                            log.info(String.format("Skip completed part number %d", partNumber));
                        }
                        skip = true;
                        offset += c.getContentLength();
                        break;
                    }
                }
            }
            if (!skip) {
                final Long length = Math.min(Math.max(
                        ((status.getLength() + status.getOffset()) / B2LargeUploadService.MAXIMUM_UPLOAD_PARTS),
                        partSize), remaining);
                // Submit to queue
                parts.add(this.submit(pool, file, local, throttle, listener, status, partNumber, offset, length,
                        callback));
                if (log.isDebugEnabled()) {
                    log.debug(String.format("Part %s submitted with size %d and offset %d", partNumber, length,
                            offset));
                }
                remaining -= length;
                offset += length;
            }
        }
        try {
            for (Future<B2UploadPartResponse> f : parts) {
                completed.add(f.get());
            }
        } catch (InterruptedException e) {
            log.error("Part upload failed with interrupt failure");
            status.setCanceled();
            throw new ConnectionCanceledException(e);
        } catch (ExecutionException e) {
            log.warn(String.format("Part upload failed with execution failure %s", e.getMessage()));
            if (e.getCause() instanceof BackgroundException) {
                throw (BackgroundException) e.getCause();
            }
            throw new DefaultExceptionMappingService().map(e.getCause());
        } finally {
            pool.shutdown(false);
        }
        completed.sort(new Comparator<B2UploadPartResponse>() {
            @Override
            public int compare(final B2UploadPartResponse o1, final B2UploadPartResponse o2) {
                return o1.getPartNumber().compareTo(o2.getPartNumber());
            }
        });
        final List<String> checksums = new ArrayList<String>();
        for (B2UploadPartResponse part : completed) {
            checksums.add(part.getContentSha1());
        }
        final B2FinishLargeFileResponse response = session.getClient().finishLargeFileUpload(fileid,
                checksums.toArray(new String[checksums.size()]));
        if (log.isInfoEnabled()) {
            log.info(String.format("Finished large file upload %s with %d parts", file, completed.size()));
        }
        // Mark parent status as complete
        status.setComplete();
        return response;
    } catch (B2ApiException e) {
        throw new B2ExceptionMappingService().map("Upload {0} failed", e, file);
    } catch (IOException e) {
        throw new DefaultIOExceptionMappingService().map("Upload {0} failed", e, file);
    }
}

From source file:ch.cyberduck.core.s3.S3MultipartUploadService.java

@Override
public StorageObject upload(final Path file, final Local local, final BandwidthThrottle throttle,
        final StreamListener listener, final TransferStatus status, final ConnectionCallback callback)
        throws BackgroundException {
    final DefaultThreadPool pool = new DefaultThreadPool("multipart", concurrency);
    try {//from  w w w .  ja  v a 2  s. c  o  m
        MultipartUpload multipart = null;
        try {
            if (status.isAppend() || status.isRetry()) {
                final List<MultipartUpload> list = multipartService.find(file);
                if (!list.isEmpty()) {
                    multipart = list.iterator().next();
                }
            }
        } catch (AccessDeniedException | InteroperabilityException e) {
            log.warn(String.format("Ignore failure listing incomplete multipart uploads. %s", e.getDetail()));
        }
        final List<MultipartPart> completed = new ArrayList<MultipartPart>();
        // Not found or new upload
        if (null == multipart) {
            if (log.isInfoEnabled()) {
                log.info("No pending multipart upload found");
            }
            final S3Object object = new S3WriteFeature(session, new S3DisabledMultipartService())
                    .getDetails(file, status);
            // ID for the initiated multipart upload.
            multipart = session.getClient().multipartStartUpload(containerService.getContainer(file).getName(),
                    object);
            if (log.isDebugEnabled()) {
                log.debug(String.format("Multipart upload started for %s with ID %s", multipart.getObjectKey(),
                        multipart.getUploadId()));
            }
        } else {
            if (status.isAppend() || status.isRetry()) {
                // Add already completed parts
                completed.addAll(multipartService.list(multipart));
            }
        }
        try {
            final List<Future<MultipartPart>> parts = new ArrayList<Future<MultipartPart>>();
            long remaining = status.getLength();
            long offset = 0;
            for (int partNumber = 1; remaining > 0; partNumber++) {
                boolean skip = false;
                if (status.isAppend() || status.isRetry()) {
                    if (log.isInfoEnabled()) {
                        log.info(String.format("Determine if part number %d can be skipped", partNumber));
                    }
                    for (MultipartPart c : completed) {
                        if (c.getPartNumber().equals(partNumber)) {
                            if (log.isInfoEnabled()) {
                                log.info(String.format("Skip completed part number %d", partNumber));
                            }
                            skip = true;
                            offset += c.getSize();
                            break;
                        }
                    }
                }
                if (!skip) {
                    // Last part can be less than 5 MB. Adjust part size.
                    final Long length = Math.min(Math.max(((status.getLength() + status.getOffset())
                            / S3DefaultMultipartService.MAXIMUM_UPLOAD_PARTS), partsize), remaining);
                    // Submit to queue
                    parts.add(this.submit(pool, file, local, throttle, listener, status, multipart, partNumber,
                            offset, length, callback));
                    remaining -= length;
                    offset += length;
                }
            }
            for (Future<MultipartPart> future : parts) {
                try {
                    completed.add(future.get());
                } catch (InterruptedException e) {
                    log.error("Part upload failed with interrupt failure");
                    status.setCanceled();
                    throw new ConnectionCanceledException(e);
                } catch (ExecutionException e) {
                    log.warn(String.format("Part upload failed with execution failure %s", e.getMessage()));
                    if (e.getCause() instanceof BackgroundException) {
                        throw (BackgroundException) e.getCause();
                    }
                    throw new BackgroundException(e.getCause());
                }
            }
            // Combining all the given parts into the final object. Processing of a Complete Multipart Upload request
            // could take several minutes to complete. Because a request could fail after the initial 200 OK response
            // has been sent, it is important that you check the response body to determine whether the request succeeded.
            final MultipartCompleted complete = session.getClient().multipartCompleteUpload(multipart,
                    completed);
            if (log.isInfoEnabled()) {
                log.info(String.format("Completed multipart upload for %s with %d parts and checksum %s",
                        complete.getObjectKey(), completed.size(), complete.getEtag()));
            }
            if (file.getType().contains(Path.Type.encrypted)) {
                log.warn(String.format("Skip checksum verification for %s with client side encryption enabled",
                        file));
            } else {
                completed.sort(new MultipartPart.PartNumberComparator());
                final StringBuilder concat = new StringBuilder();
                for (MultipartPart part : completed) {
                    concat.append(part.getEtag());
                }
                final String expected = String.format("%s-%d",
                        new MD5ChecksumCompute().compute(concat.toString(), status), completed.size());
                final String reference;
                if (complete.getEtag().startsWith("\"") && complete.getEtag().endsWith("\"")) {
                    reference = complete.getEtag().substring(1, complete.getEtag().length() - 1);
                } else {
                    reference = complete.getEtag();
                }
                if (!expected.equals(reference)) {
                    if (session.getHost().getHostname()
                            .endsWith(preferences.getProperty("s3.hostname.default"))) {
                        throw new ChecksumException(MessageFormat.format(
                                LocaleFactory.localizedString("Upload {0} failed", "Error"), file.getName()),
                                MessageFormat.format(
                                        "Mismatch between MD5 hash {0} of uploaded data and ETag {1} returned by the server",
                                        expected, reference));
                    } else {
                        log.warn(String.format(
                                "Mismatch between MD5 hash %s of uploaded data and ETag %s returned by the server",
                                expected, reference));
                    }
                }
            }
            // Mark parent status as complete
            status.setComplete();
            final StorageObject object = new StorageObject(containerService.getKey(file));
            object.setETag(complete.getEtag());
            return object;
        } finally {
            // Cancel future tasks
            pool.shutdown(false);
        }
    } catch (ServiceException e) {
        throw new S3ExceptionMappingService().map("Upload {0} failed", e, file);
    }
}

From source file:org.languagetool.rules.spelling.suggestions.XGBoostSuggestionsOrderer.java

@Override
public List<SuggestedReplacement> orderSuggestions(List<String> suggestions, String word,
        AnalyzedSentence sentence, int startPos) {
    if (!isMlAvailable()) {
        throw new IllegalStateException("Illegal call to orderSuggestions() - isMlAvailable() returned false.");
    }/*from  w w w  .  j  ava2  s  .c om*/
    long featureStartTime = System.currentTimeMillis();

    String langCode = language.getShortCodeWithCountryAndVariant();

    Pair<List<SuggestedReplacement>, SortedMap<String, Float>> candidatesAndFeatures = computeFeatures(
            suggestions, word, sentence, startPos);
    //System.out.printf("Computing %d features took %d ms.%n", suggestions.size(), System.currentTimeMillis() - featureStartTime);
    List<SuggestedReplacement> candidates = candidatesAndFeatures.getLeft();
    SortedMap<String, Float> matchFeatures = candidatesAndFeatures.getRight();
    List<SortedMap<String, Float>> suggestionFeatures = candidates.stream()
            .map(SuggestedReplacement::getFeatures).collect(Collectors.toList());
    if (candidates.isEmpty()) {
        return Collections.emptyList();
    }
    if (candidates.size() != suggestionFeatures.size()) {
        throw new RuntimeException(
                String.format("Mismatch between candidates and corresponding feature list: length %d / %d",
                        candidates.size(), suggestionFeatures.size()));
    }

    int numFeatures = matchFeatures.size() + topN * suggestionFeatures.get(0).size(); // padding with zeros
    float[] data = new float[numFeatures];

    int featureIndex = 0;
    //System.out.printf("Features for match on '%s': %n", word);
    int expectedMatchFeatures = matchFeatureCount.getOrDefault(langCode, -1);
    int expectedCandidateFeatures = candidateFeatureCount.getOrDefault(langCode, -1);
    if (matchFeatures.size() != expectedMatchFeatures) {
        logger.warn(String.format("Match features '%s' do not have expected size %d.", matchFeatures,
                expectedMatchFeatures));
    }
    for (Map.Entry<String, Float> feature : matchFeatures.entrySet()) {
        //System.out.printf("%s = %f%n", feature.getKey(), feature.getValue());
        data[featureIndex++] = feature.getValue();
    }
    //int suggestionIndex = 0;
    for (SortedMap<String, Float> candidateFeatures : suggestionFeatures) {
        if (candidateFeatures.size() != expectedCandidateFeatures) {
            logger.warn(String.format("Candidate features '%s' do not have expected size %d.",
                    candidateFeatures, expectedCandidateFeatures));
        }
        //System.out.printf("Features for candidate '%s': %n", candidates.get(suggestionIndex++).getReplacement());
        for (Map.Entry<String, Float> feature : candidateFeatures.entrySet()) {
            //System.out.printf("%s = %f%n", feature.getKey(), feature.getValue());
            data[featureIndex++] = feature.getValue();
        }
    }
    List<Integer> labels = modelClasses.get(langCode);

    Booster model = null;
    try {
        long modelStartTime = System.currentTimeMillis();
        model = modelPool.borrowObject(language);
        //System.out.printf("Loading model took %d ms.%n", System.currentTimeMillis() - modelStartTime);
        DMatrix matrix = new DMatrix(data, 1, numFeatures);
        long predictStartTime = System.currentTimeMillis();
        float[][] output = model.predict(matrix);
        //System.out.printf("Prediction took %d ms.%n", System.currentTimeMillis() - predictStartTime);
        if (output.length != 1) {
            throw new XGBoostError(String.format(
                    "XGBoost returned array with first dimension of length %d, expected 1.", output.length));
        }
        float[] probabilities = output[0];
        if (probabilities.length != labels.size()) {
            throw new XGBoostError(
                    String.format("XGBoost returned array with second dimension of length %d, expected %d.",
                            probabilities.length, labels.size()));
        }
        // TODO: could react to label -1 (not in list) by e.g. evaluating more candidates
        //if (labels.get(0) != -1) {
        //  throw new IllegalStateException(String.format(
        //    "Expected first label of ML ranking model to be -1 (= suggestion not in list), was %d", labels.get(0)));
        //}
        //float notInListProbabilily = probabilites[0];
        for (int candidateIndex = 0; candidateIndex < candidates.size(); candidateIndex++) {
            int labelIndex = labels.indexOf(candidateIndex);
            float prob = 0.0f;
            if (labelIndex != -1) {
                prob = probabilities[labelIndex];
            }
            candidates.get(candidateIndex).setConfidence(prob);
        }
    } catch (XGBoostError xgBoostError) {
        logger.error("Error while applying XGBoost model to spelling suggestions", xgBoostError);
        return candidates;
    } catch (Exception e) {
        logger.error("Error while loading XGBoost model for spelling suggestions", e);
        return candidates;
    } finally {
        if (model != null) {
            try {
                modelPool.returnObject(language, model);
            } catch (Exception e) {
                throw new RuntimeException(e);
            }
        }
    }
    candidates.sort(Collections.reverseOrder(Comparator.comparing(SuggestedReplacement::getConfidence)));
    return candidates;
}

From source file:org.sakaiproject.assignment.impl.AssignmentServiceImpl.java

@Override
public List<User> getSortedGroupUsers(Group g) {
    List<User> users = new ArrayList<>();
    g.getMembers().stream()//from ww  w  .j  av a 2 s .  co  m
            .filter(m -> (m.getRole().isAllowed(SECURE_ADD_ASSIGNMENT_SUBMISSION)
                    || g.isAllowed(m.getUserId(), SECURE_ADD_ASSIGNMENT_SUBMISSION))
                    && !m.getRole().isAllowed(SECURE_GRADE_ASSIGNMENT_SUBMISSION)
                    && !g.isAllowed(m.getUserId(), SECURE_GRADE_ASSIGNMENT_SUBMISSION))
            .forEach(member -> {
                try {
                    users.add(userDirectoryService.getUser(member.getUserId()));
                } catch (Exception e) {
                    log.warn("Creating a list of users, user = {}, {}", member.getUserId(), e.getMessage());
                }
            });
    users.sort(new UserComparator());
    return users;
}

From source file:org.cgiar.ccafs.marlo.action.summaries.ReportingSummaryAction.java

private TypedTableModel getPartnerLeaderTableModel(ProjectPartner projectLeader) {
    TypedTableModel model = new TypedTableModel(
            new String[] { "org_leader", "pp_id", "responsibilities", "countryOffices",
                    "partnerPartnershipFormal", "partnerPartnershipResearchPhase",
                    "partnerPartnershipGeographicScope", "partnerPartnershipRegion",
                    "partnerPartnershipCountries", "partnerPartnershipMainArea", "showRegion", "showCountry" },
            new Class[] { String.class, Long.class, String.class, String.class, String.class, String.class,
                    String.class, String.class, String.class, String.class, Boolean.class, Boolean.class },
            0);//from   w w  w.  j a  va  2 s  .c om
    long ppId = 0;
    String orgLeader = null;
    String responsibilities = null;
    String countryOffices = null;
    String partnerPartnershipFormal = null, partnerPartnershipResearchPhase = null,
            partnerPartnershipGeographicScope = null, partnerPartnershipRegion = null,
            partnerPartnershipCountries = null, partnerPartnershipMainArea = null;
    Boolean showRegion = false, showCountry = false;

    // Partnerships
    List<ProjectPartnerPartnership> projectPartnerPartnerships = projectLeader.getProjectPartnerPartnerships()
            .stream().filter(p -> p.isActive()).collect(Collectors.toList());
    if (projectPartnerPartnerships != null && projectPartnerPartnerships.size() > 0) {
        if (projectPartnerPartnerships.size() > 1) {
            LOG.warn("There is more than one Partner Partnership for P" + this.getProjectID() + " Phase "
                    + this.getSelectedPhase().toString());
        }
        ProjectPartnerPartnership partnerPartnership = projectPartnerPartnerships.get(0);
        if (projectLeader.getHasPartnerships() != null) {
            partnerPartnershipFormal = projectLeader.getHasPartnerships() ? "Yes" : "No";
        }
        List<ProjectPartnerPartnershipResearchPhase> projectPartnerPartnershipResearchPhases = partnerPartnership
                .getProjectPartnerPartnershipResearchPhases().stream().filter(pp -> pp.isActive())
                .collect(Collectors.toList());
        Set<String> researchPhases = new HashSet<String>();
        if (projectPartnerPartnershipResearchPhases != null
                && projectPartnerPartnershipResearchPhases.size() > 0) {
            for (ProjectPartnerPartnershipResearchPhase projectPartnerPartnershipResearchPhase : projectPartnerPartnershipResearchPhases) {
                researchPhases.add(
                        projectPartnerPartnershipResearchPhase.getRepIndPhaseResearchPartnership().getName());
            }
            partnerPartnershipResearchPhase = String.join(",", researchPhases);
        }

        if (partnerPartnership.getGeographicScope() != null) {
            Long geographicScopeID = partnerPartnership.getGeographicScope().getId();
            partnerPartnershipGeographicScope = partnerPartnership.getGeographicScope().getName();
            if (!geographicScopeID.equals(this.getReportingIndGeographicScopeGlobal())) {
                if (geographicScopeID.equals(this.getReportingIndGeographicScopeRegional())) {
                    showRegion = true;
                    if (partnerPartnership.getRegion() != null) {
                        partnerPartnershipRegion = partnerPartnership.getRegion().getName();
                    }
                } else {
                    showCountry = true;
                    List<ProjectPartnerPartnershipLocation> partnershipLocations = partnerPartnership
                            .getProjectPartnerPartnershipLocations().stream().filter(pl -> pl.isActive())
                            .collect(Collectors.toList());
                    if (partnershipLocations != null && partnershipLocations.size() > 0) {
                        partnershipLocations.sort((pl1, pl2) -> pl1.getLocation().getName()
                                .compareTo(pl2.getLocation().getName()));
                        Set<String> countries = new HashSet<String>();
                        for (ProjectPartnerPartnershipLocation partnershipLocation : partnershipLocations) {
                            countries.add(partnershipLocation.getLocation().getName());
                        }
                        partnerPartnershipCountries = String.join(", ", countries);
                    }
                }
            }
        }
        if (partnerPartnership.getMainArea() != null && !partnerPartnership.getMainArea().isEmpty()) {
            partnerPartnershipMainArea = partnerPartnership.getMainArea();
        }
    }

    if (projectLeader.getId() != null && projectLeader.getInstitution() != null) {
        ppId = projectLeader.getId();
        orgLeader = projectLeader.getInstitution().getComposedName();
        responsibilities = projectLeader.getResponsibilities();
        for (ProjectPartnerLocation projectPartnerLocation : projectLeader.getProjectPartnerLocations().stream()
                .filter(ppl -> ppl.isActive()).collect(Collectors.toList())) {
            if (countryOffices == null || countryOffices.isEmpty()) {
                countryOffices = projectPartnerLocation.getInstitutionLocation().getLocElement().getName();
            } else {
                countryOffices += ", "
                        + projectPartnerLocation.getInstitutionLocation().getLocElement().getName();
            }
        }

        model.addRow(new Object[] { orgLeader, ppId, responsibilities, countryOffices, partnerPartnershipFormal,
                partnerPartnershipResearchPhase, partnerPartnershipGeographicScope, partnerPartnershipRegion,
                partnerPartnershipCountries, partnerPartnershipMainArea, showRegion, showCountry });
    } else if (projectLeader.getId() != null && projectLeader.getInstitution() == null) {
        ppId = projectLeader.getId();
        model.addRow(new Object[] { null, ppId, responsibilities, countryOffices, partnerPartnershipFormal,
                partnerPartnershipResearchPhase, partnerPartnershipGeographicScope, partnerPartnershipRegion,
                partnerPartnershipCountries, partnerPartnershipMainArea, showRegion, showCountry });
    } else if (projectLeader.getId() == null && projectLeader.getInstitution() != null) {
        orgLeader = projectLeader.getInstitution().getComposedName();
        responsibilities = projectLeader.getResponsibilities();
        for (ProjectPartnerLocation projectPartnerLocation : projectLeader.getProjectPartnerLocations().stream()
                .filter(ppl -> ppl.isActive()).collect(Collectors.toList())) {
            if (countryOffices == null || countryOffices.isEmpty()) {
                countryOffices = projectPartnerLocation.getInstitutionLocation().getLocElement().getName();
            } else {
                countryOffices += ", "
                        + projectPartnerLocation.getInstitutionLocation().getLocElement().getName();
            }
        }
        model.addRow(new Object[] { orgLeader, null, responsibilities, countryOffices, partnerPartnershipFormal,
                partnerPartnershipResearchPhase, partnerPartnershipGeographicScope, partnerPartnershipRegion,
                partnerPartnershipCountries, partnerPartnershipMainArea, showRegion, showCountry });
    }
    return model;
}

From source file:org.cgiar.ccafs.marlo.action.summaries.ReportingSummaryAction.java

private TypedTableModel getPartnersOtherTableModel(ProjectPartner projectLeader) {
    TypedTableModel model = new TypedTableModel(
            new String[] { "instituttion", "pp_id", "leader_count", "responsibilities", "countryOffices",
                    "partnerPartnershipFormal", "partnerPartnershipResearchPhase",
                    "partnerPartnershipGeographicScope", "partnerPartnershipRegion",
                    "partnerPartnershipCountries", "partnerPartnershipMainArea", "showRegion", "showCountry" },
            new Class[] { String.class, Long.class, Integer.class, String.class, String.class, String.class,
                    String.class, String.class, String.class, String.class, String.class, Boolean.class,
                    Boolean.class },
            0);//from w w w  .  j a v  a  2 s.c  om
    int leaderCount = 0;

    if (projectLeader.getId() != null) {
        leaderCount = 1;
        // Get list of partners except project leader
        for (ProjectPartner projectPartner : project
                .getProjectPartners().stream().filter(c -> c.isActive() && c.getId() != projectLeader.getId()
                        && c.getPhase() != null && c.getPhase().equals(this.getSelectedPhase()))
                .collect(Collectors.toList())) {
            String responsibilities = null;
            String countryOffices = null;
            String partnerPartnershipFormal = null, partnerPartnershipResearchPhase = null,
                    partnerPartnershipGeographicScope = null, partnerPartnershipRegion = null,
                    partnerPartnershipCountries = null, partnerPartnershipMainArea = null;
            Boolean showRegion = false, showCountry = false;
            // Partnerships
            List<ProjectPartnerPartnership> projectPartnerPartnerships = projectPartner
                    .getProjectPartnerPartnerships().stream().filter(p -> p.isActive())
                    .collect(Collectors.toList());
            if (projectPartnerPartnerships != null && projectPartnerPartnerships.size() > 0) {
                if (projectPartnerPartnerships.size() > 1) {
                    LOG.warn("There is more than one Partner Partnership for P" + this.getProjectID()
                            + " Phase " + this.getSelectedPhase().toString());
                }
                ProjectPartnerPartnership partnerPartnership = projectPartnerPartnerships.get(0);
                if (projectPartner.getHasPartnerships() != null) {
                    partnerPartnershipFormal = projectPartner.getHasPartnerships() ? "Yes" : "No";
                }
                List<ProjectPartnerPartnershipResearchPhase> projectPartnerPartnershipResearchPhases = partnerPartnership
                        .getProjectPartnerPartnershipResearchPhases().stream().filter(pp -> pp.isActive())
                        .collect(Collectors.toList());
                Set<String> researchPhases = new HashSet<String>();
                if (projectPartnerPartnershipResearchPhases != null
                        && projectPartnerPartnershipResearchPhases.size() > 0) {
                    for (ProjectPartnerPartnershipResearchPhase projectPartnerPartnershipResearchPhase : projectPartnerPartnershipResearchPhases) {
                        researchPhases.add(projectPartnerPartnershipResearchPhase
                                .getRepIndPhaseResearchPartnership().getName());
                    }
                    partnerPartnershipResearchPhase = String.join(",", researchPhases);
                }
                if (partnerPartnership.getGeographicScope() != null) {
                    Long geographicScopeID = partnerPartnership.getGeographicScope().getId();
                    partnerPartnershipGeographicScope = partnerPartnership.getGeographicScope().getName();
                    if (!geographicScopeID.equals(this.getReportingIndGeographicScopeGlobal())) {
                        if (geographicScopeID.equals(this.getReportingIndGeographicScopeRegional())) {
                            showRegion = true;
                            if (partnerPartnership.getRegion() != null) {
                                partnerPartnershipRegion = partnerPartnership.getRegion().getName();
                            }
                        } else {
                            showCountry = true;
                            List<ProjectPartnerPartnershipLocation> partnershipLocations = partnerPartnership
                                    .getProjectPartnerPartnershipLocations().stream()
                                    .filter(pl -> pl.isActive()).collect(Collectors.toList());
                            if (partnershipLocations != null && partnershipLocations.size() > 0) {
                                partnershipLocations.sort((pl1, pl2) -> pl1.getLocation().getName()
                                        .compareTo(pl2.getLocation().getName()));
                                Set<String> countries = new HashSet<String>();
                                for (ProjectPartnerPartnershipLocation partnershipLocation : partnershipLocations) {
                                    countries.add(partnershipLocation.getLocation().getName());
                                }
                                partnerPartnershipCountries = String.join(", ", countries);
                            }
                        }
                    }
                }
                if (partnerPartnership.getMainArea() != null && !partnerPartnership.getMainArea().isEmpty()) {
                    partnerPartnershipMainArea = partnerPartnership.getMainArea();
                }
            }

            responsibilities = projectPartner.getResponsibilities();
            for (ProjectPartnerLocation projectPartnerLocation : projectPartner.getProjectPartnerLocations()
                    .stream().filter(ppl -> ppl.isActive()).collect(Collectors.toList())) {
                if (countryOffices == null || countryOffices.isEmpty()) {
                    countryOffices = projectPartnerLocation.getInstitutionLocation().getLocElement().getName();
                } else {
                    countryOffices += ", "
                            + projectPartnerLocation.getInstitutionLocation().getLocElement().getName();
                }
            }
            model.addRow(new Object[] { projectPartner.getInstitution().getComposedName(),
                    projectPartner.getId(), leaderCount, responsibilities, countryOffices,
                    partnerPartnershipFormal, partnerPartnershipResearchPhase,
                    partnerPartnershipGeographicScope, partnerPartnershipRegion, partnerPartnershipCountries,
                    partnerPartnershipMainArea, showRegion, showCountry });
        }
    } else {
        // Get all partners
        for (ProjectPartner projectPartner : project.getProjectPartners().stream().filter(
                c -> c.isActive() && c.getPhase() != null && c.getPhase().equals(this.getSelectedPhase()))
                .collect(Collectors.toList())) {
            String responsibilities = null;
            String countryOffices = null;
            String partnerPartnershipFormal = null, partnerPartnershipResearchPhase = null,
                    partnerPartnershipGeographicScope = null, partnerPartnershipRegion = null,
                    partnerPartnershipCountries = null, partnerPartnershipMainArea = null;
            Boolean showRegion = false, showCountry = false;
            // Partnerships
            List<ProjectPartnerPartnership> projectPartnerPartnerships = projectPartner
                    .getProjectPartnerPartnerships().stream().filter(p -> p.isActive())
                    .collect(Collectors.toList());
            if (projectPartnerPartnerships != null && projectPartnerPartnerships.size() > 0) {
                if (projectPartnerPartnerships.size() > 1) {
                    LOG.warn("There is more than one Partner Partnership for P" + this.getProjectID()
                            + " Phase " + this.getSelectedPhase().toString());
                }
                ProjectPartnerPartnership partnerPartnership = projectPartnerPartnerships.get(0);
                if (projectPartner.getHasPartnerships() != null) {
                    partnerPartnershipFormal = projectPartner.getHasPartnerships() ? "Yes" : "No";
                }
                List<ProjectPartnerPartnershipResearchPhase> projectPartnerPartnershipResearchPhases = partnerPartnership
                        .getProjectPartnerPartnershipResearchPhases().stream().filter(pp -> pp.isActive())
                        .collect(Collectors.toList());
                Set<String> researchPhases = new HashSet<String>();
                if (projectPartnerPartnershipResearchPhases != null
                        && projectPartnerPartnershipResearchPhases.size() > 0) {
                    for (ProjectPartnerPartnershipResearchPhase projectPartnerPartnershipResearchPhase : projectPartnerPartnershipResearchPhases) {
                        researchPhases.add(projectPartnerPartnershipResearchPhase
                                .getRepIndPhaseResearchPartnership().getName());
                    }
                    partnerPartnershipResearchPhase = String.join(",", researchPhases);
                }
                if (partnerPartnership.getGeographicScope() != null) {
                    Long geographicScopeID = partnerPartnership.getGeographicScope().getId();
                    partnerPartnershipGeographicScope = partnerPartnership.getGeographicScope().getName();
                    if (!geographicScopeID.equals(this.getReportingIndGeographicScopeGlobal())) {
                        if (geographicScopeID.equals(this.getReportingIndGeographicScopeRegional())) {
                            showRegion = true;
                            if (partnerPartnership.getRegion() != null) {
                                partnerPartnershipRegion = partnerPartnership.getRegion().getName();
                            }
                        } else {
                            showCountry = true;
                            List<ProjectPartnerPartnershipLocation> partnershipLocations = partnerPartnership
                                    .getProjectPartnerPartnershipLocations().stream()
                                    .filter(pl -> pl.isActive()).collect(Collectors.toList());
                            if (partnershipLocations != null && partnershipLocations.size() > 0) {
                                partnershipLocations.sort((pl1, pl2) -> pl1.getLocation().getName()
                                        .compareTo(pl2.getLocation().getName()));
                                Set<String> countries = new HashSet<String>();
                                for (ProjectPartnerPartnershipLocation partnershipLocation : partnershipLocations) {
                                    countries.add(partnershipLocation.getLocation().getName());
                                }
                                partnerPartnershipCountries = String.join(", ", countries);
                            }
                        }
                    }
                }
                if (partnerPartnership.getMainArea() != null && !partnerPartnership.getMainArea().isEmpty()) {
                    partnerPartnershipMainArea = partnerPartnership.getMainArea();
                }
            }
            responsibilities = projectPartner.getResponsibilities();
            for (ProjectPartnerLocation projectPartnerLocation : projectPartner.getProjectPartnerLocations()
                    .stream().filter(ppl -> ppl.isActive()).collect(Collectors.toList())) {
                if (countryOffices == null || countryOffices.isEmpty()) {
                    countryOffices = projectPartnerLocation.getInstitutionLocation().getLocElement().getName();
                } else {
                    countryOffices += ", "
                            + projectPartnerLocation.getInstitutionLocation().getLocElement().getName();
                }
            }
            model.addRow(new Object[] { projectPartner.getInstitution().getComposedName(),
                    projectPartner.getId(), leaderCount, responsibilities, countryOffices,
                    partnerPartnershipFormal, partnerPartnershipResearchPhase,
                    partnerPartnershipGeographicScope, partnerPartnershipRegion, partnerPartnershipCountries,
                    partnerPartnershipMainArea, showRegion, showCountry });
        }
    }
    return model;
}

From source file:org.cgiar.ccafs.marlo.action.summaries.ReportingSummaryAction.java

private TypedTableModel getDeliverablesTableModel() {
    TypedTableModel model = new TypedTableModel(
            new String[] { "deliverable_id", "title", "deliv_type", "deliv_sub_type", "deliv_status",
                    "deliv_year", "key_output", "leader", "institution", "funding_sources", "cross_cutting" },
            new Class[] { Long.class, String.class, String.class, String.class, String.class, String.class,
                    String.class, String.class, String.class, String.class, String.class },
            0);/*from   w ww .j  a v  a  2 s .  c om*/
    if (!project.getDeliverables().isEmpty()) {
        for (Deliverable deliverable : project.getDeliverables().stream()
                .sorted((d1, d2) -> Long.compare(d1.getId(), d2.getId()))
                .filter(d -> d.isActive() && d.getDeliverableInfo(this.getSelectedPhase()) != null && ((d
                        .getDeliverableInfo().getStatus() == null
                        && d.getDeliverableInfo().getYear() == this.getSelectedYear())
                        || (d.getDeliverableInfo().getStatus() != null
                                && d.getDeliverableInfo().getStatus().intValue() == Integer
                                        .parseInt(ProjectStatusEnum.Extended.getStatusId())
                                && d.getDeliverableInfo().getNewExpectedYear() != null
                                && d.getDeliverableInfo().getNewExpectedYear() == this.getSelectedYear())
                        || (d.getDeliverableInfo().getStatus() != null
                                && d.getDeliverableInfo().getYear() == this.getSelectedYear()
                                && d.getDeliverableInfo().getStatus().intValue() == Integer
                                        .parseInt(ProjectStatusEnum.Ongoing.getStatusId()))
                        || (d.getDeliverableInfo().getStatus() != null
                                && d.getDeliverableInfo().getStatus().intValue() == Integer
                                        .parseInt(ProjectStatusEnum.Complete.getStatusId())
                                && ((d.getDeliverableInfo().getNewExpectedYear() != null && d
                                        .getDeliverableInfo().getNewExpectedYear() == this.getSelectedYear())
                                        || (d.getDeliverableInfo().getNewExpectedYear() == null && d
                                                .getDeliverableInfo().getYear() == this.getSelectedYear())))))
                .collect(Collectors.toList())) {
            String delivType = null;
            String delivSubType = null;
            String delivStatus = deliverable.getDeliverableInfo(this.getSelectedPhase())
                    .getStatusName(this.getSelectedPhase());
            String delivYear = null;
            String keyOutput = "";
            String leader = null;
            String institution = null;
            String fundingSources = "";
            if (deliverable.getDeliverableInfo(this.getSelectedPhase()).getDeliverableType() != null) {
                delivSubType = deliverable.getDeliverableInfo(this.getSelectedPhase()).getDeliverableType()
                        .getName();
                if (deliverable.getDeliverableInfo(this.getSelectedPhase()).getDeliverableType()
                        .getDeliverableCategory() != null) {
                    delivType = deliverable.getDeliverableInfo(this.getSelectedPhase()).getDeliverableType()
                            .getDeliverableCategory().getName();
                }
            }
            if (delivStatus.equals("")) {
                delivStatus = null;
            }
            if (deliverable.getDeliverableInfo(this.getSelectedPhase()).getYear() != 0) {
                delivYear = "" + deliverable.getDeliverableInfo(this.getSelectedPhase()).getYear();
            }
            if (deliverable.getDeliverableInfo(this.getSelectedPhase()).getCrpClusterKeyOutput() != null) {
                keyOutput += "? ";
                if (deliverable.getDeliverableInfo(this.getSelectedPhase()).getCrpClusterKeyOutput()
                        .getCrpClusterOfActivity().getCrpProgram() != null) {
                    keyOutput += deliverable.getDeliverableInfo(this.getSelectedPhase())
                            .getCrpClusterKeyOutput().getCrpClusterOfActivity().getCrpProgram().getAcronym()
                            + " - ";
                }
                keyOutput += deliverable.getDeliverableInfo(this.getSelectedPhase()).getCrpClusterKeyOutput()
                        .getKeyOutput();
            }
            // Get partner responsible and institution
            List<DeliverablePartnership> deliverablePartnershipResponsibles = deliverablePartnershipManager
                    .findByDeliverablePhaseAndType(deliverable.getId(), this.getSelectedPhase().getId(),
                            DeliverablePartnershipTypeEnum.RESPONSIBLE.getValue());
            if (deliverablePartnershipResponsibles != null && deliverablePartnershipResponsibles.size() > 0) {
                if (deliverablePartnershipResponsibles.size() > 1) {
                    LOG.warn("There are more than 1 deliverable responsibles for D" + deliverable.getId()
                            + ". Phase: " + this.getSelectedPhase().toString());
                    deliverablePartnershipResponsibles.sort((d1, d2) -> d1.getProjectPartnerPerson().getId()
                            .compareTo(d2.getProjectPartnerPerson().getId()));
                }
                DeliverablePartnership responisble = deliverablePartnershipResponsibles.get(0);

                if (responisble != null) {
                    if (responisble.getProjectPartnerPerson() != null) {
                        ProjectPartnerPerson responsibleppp = responisble.getProjectPartnerPerson();
                        leader = responsibleppp.getUser().getComposedName() + "<br>&lt;"
                                + responsibleppp.getUser().getEmail() + "&gt;";
                        if (responsibleppp.getProjectPartner() != null) {
                            if (responsibleppp.getProjectPartner().getInstitution() != null) {
                                institution = responsibleppp.getProjectPartner().getInstitution()
                                        .getComposedName();
                            }
                        }
                    }
                }
            }
            // Get funding sources if exist
            for (DeliverableFundingSource dfs : deliverable.getDeliverableFundingSources().stream().filter(
                    d -> d.isActive() && d.getPhase() != null && d.getPhase().equals(this.getSelectedPhase()))
                    .collect(Collectors.toList())) {
                fundingSources += "? " + "(" + dfs.getFundingSource().getId() + ") - "
                        + dfs.getFundingSource().getFundingSourceInfo(this.getSelectedPhase()).getTitle()
                        + "<br>";
            }
            if (fundingSources.isEmpty()) {
                fundingSources = null;
            }
            // Get cross_cutting dimension
            String crossCutting = "";
            if (deliverable.getDeliverableInfo(this.getSelectedPhase()).getCrossCuttingNa() != null) {
                if (deliverable.getDeliverableInfo(this.getSelectedPhase()).getCrossCuttingNa() == true) {
                    crossCutting += "&nbsp;&nbsp;&nbsp;&nbsp;? N/A <br>";
                }
            }
            if (deliverable.getDeliverableInfo(this.getSelectedPhase()).getCrossCuttingGender() != null) {
                if (deliverable.getDeliverableInfo(this.getSelectedPhase()).getCrossCuttingGender() == true) {
                    Long scoring = deliverable.getDeliverableInfo().getCrossCuttingScoreGender();
                    if (scoring != null) {
                        CrossCuttingScoring crossCuttingScoring = crossCuttingScoringManager
                                .getCrossCuttingScoringById(scoring);
                        crossCutting += "&nbsp;&nbsp;&nbsp;&nbsp;? Gender ("
                                + crossCuttingScoring.getDescription() + ")<br>";
                    } else {
                        crossCutting += "&nbsp;&nbsp;&nbsp;&nbsp;? Gender <br>";
                    }
                }
            }
            if (deliverable.getDeliverableInfo(this.getSelectedPhase()).getCrossCuttingYouth() != null) {
                if (deliverable.getDeliverableInfo(this.getSelectedPhase()).getCrossCuttingYouth() == true) {
                    Long scoring = deliverable.getDeliverableInfo().getCrossCuttingScoreYouth();
                    if (scoring != null) {
                        CrossCuttingScoring crossCuttingScoring = crossCuttingScoringManager
                                .getCrossCuttingScoringById(scoring);
                        crossCutting += "&nbsp;&nbsp;&nbsp;&nbsp;? Youth ("
                                + crossCuttingScoring.getDescription() + ")<br>";
                    } else {
                        crossCutting += "&nbsp;&nbsp;&nbsp;&nbsp;? Youth <br>";
                    }
                }
            }
            if (deliverable.getDeliverableInfo(this.getSelectedPhase()).getCrossCuttingCapacity() != null) {
                if (deliverable.getDeliverableInfo(this.getSelectedPhase()).getCrossCuttingCapacity() == true) {
                    Long scoring = deliverable.getDeliverableInfo().getCrossCuttingScoreCapacity();
                    if (scoring != null) {
                        CrossCuttingScoring crossCuttingScoring = crossCuttingScoringManager
                                .getCrossCuttingScoringById(scoring);
                        crossCutting += "&nbsp;&nbsp;&nbsp;&nbsp;? Capacity Development ("
                                + crossCuttingScoring.getDescription() + ")<br>";
                    } else {
                        crossCutting += "&nbsp;&nbsp;&nbsp;&nbsp;? Capacity Development <br>";
                    }
                }
            }
            if (deliverable.getDeliverableInfo(this.getSelectedPhase()).getCrossCuttingGender() != null) {
                if (deliverable.getDeliverableInfo(this.getSelectedPhase()).getCrossCuttingGender() == true) {
                    if (deliverable.getDeliverableGenderLevels() == null
                            || deliverable.getDeliverableGenderLevels().isEmpty()) {
                        crossCutting += "<br><b>Gender level(s):</b><br>&nbsp;&nbsp;&nbsp;&nbsp;&lt;Not Defined&gt;";
                    } else {
                        crossCutting += "<br><b>Gender level(s): </b><br>";
                        for (DeliverableGenderLevel dgl : deliverable.getDeliverableGenderLevels().stream()
                                .filter(dgl -> dgl.isActive() && dgl.getPhase() != null
                                        && dgl.getPhase().equals(this.getSelectedPhase()))
                                .collect(Collectors.toList())) {
                            if (dgl.getGenderLevel() != 0.0) {
                                crossCutting += "&nbsp;&nbsp;&nbsp;&nbsp;? " + genderTypeManager
                                        .getGenderTypeById(dgl.getGenderLevel()).getDescription() + "<br>";
                            }
                        }
                    }
                }
            }
            if (crossCutting.isEmpty()) {
                crossCutting = null;
            }
            if (keyOutput.isEmpty()) {
                keyOutput = null;
            }
            model.addRow(new Object[] { deliverable.getId(),
                    deliverable.getDeliverableInfo(this.getSelectedPhase()).getTitle(), delivType, delivSubType,
                    delivStatus, delivYear, keyOutput, leader, institution, fundingSources, crossCutting });
        }
    }
    return model;
}