Example usage for org.apache.commons.lang3.tuple Pair get

List of usage examples for org.apache.commons.lang3.tuple Pair get

Introduction

In this page you can find the example usage for org.apache.commons.lang3.tuple Pair get.

Prototype

V get(Object key);

Source Link

Document

Returns the value to which the specified key is mapped, or null if this map contains no mapping for the key.

Usage

From source file:org.kitodo.production.forms.dataeditor.StructurePanel.java

/**
 * Loads the tree(s) into the panel and sets the selected element to the
 * root element of the structure tree.//from  w  ww  . ja v a2s  . c om
 */
public void show() {
    this.structure = dataEditor.getWorkpiece().getRootElement();
    Pair<List<DefaultTreeNode>, Collection<View>> result = buildStructureTree();
    this.logicalTree = result.getLeft().get(result.getLeft().size() - 1); // TODO size() - 1 might be dangerous
    if (separateMedia != null) {
        this.physicalTree = buildMediaTree(dataEditor.getWorkpiece().getMediaUnit());
    }
    this.selectedLogicalNode = logicalTree.getChildren().get(0);
    this.selectedPhysicalNode = physicalTree.getChildren().get(0);
    this.previouslySelectedLogicalNode = selectedLogicalNode;
    this.previouslySelectedPhysicalNode = selectedPhysicalNode;
}

From source file:org.molasdin.wbase.hibernate.cursor.BasicHibernateQueryCursor.java

@Override
@SuppressWarnings("unchecked")
public List<T> dataCallback(Session session) {
    Pair<Pair<String, String>, Map<String, Object>> spec = searchSpecification().query();
    Pair<String, String> query = spec.getLeft();
    StrBuilder resultQuery = new StrBuilder();
    resultQuery.append(query.getLeft());
    resultQuery.append(' ');
    resultQuery.append(query.getRight());
    populateFilters(resultQuery, spec.getLeft().getRight(), searchSpecification().filterModes());
    List<Pair<String, Order>> orders = orders();
    if (!orders.isEmpty()) {
        StrBuilder builder = new StrBuilder(ORDER_BY);
        int pos = 0;
        for (Pair<String, Order> order : orders) {
            String prop = translateProperty(order.getLeft());
            builder.appendSeparator(",", pos);
            builder.append(//from  www. j a va 2  s . c o m
                    String.format(ORDER_BY_CLAUSE, prop, Order.ASC.equals(order.getRight()) ? "asc" : "desc"));
            pos++;
        }
        resultQuery.append(' ').append(builder.toString());
    }

    int rowOffset = calculatedRowOffset();

    Query q = session.createQuery(resultQuery.toString());
    for (String param : spec.getRight().keySet()) {
        q.setParameter(param, spec.getRight().get(param));
    }
    return postProcessData((List<T>) q.setFirstResult(rowOffset).setMaxResults(pageSize()).list());
}

From source file:org.molasdin.wbase.hibernate.cursor.BasicHibernateQueryCursor.java

@Override
public Long totalCallback(Session session) {
    Pair<Pair<String, String>, Map<String, Object>> spec = searchSpecification().query();
    StrBuilder query = new StrBuilder("select ");
    if (searchSpecification().distinctProperty() != null) {
        query.append("count( distinct ").append(searchSpecification().distinctProperty()).append(" )");
    } else {/*from ww  w  . j a  v  a2 s.c  o  m*/
        query.append("count(*)");
    }
    query.append(' ');
    query.append(spec.getLeft().getRight());
    populateFilters(query, spec.getLeft().getRight(), searchSpecification().filterModes());
    Query q = session.createQuery(query.toString());
    for (String param : spec.getRight().keySet()) {
        q.setParameter(param, spec.getRight().get(param));
    }
    return (Long) q.uniqueResult();
}

From source file:org.openepics.discs.ccdb.gui.ui.common.AbstractExcelSingleFileImportUI.java

@Override
public ImportFileStatistics getImportedFileStatistics() {
    Preconditions.checkNotNull(importData);
    try (InputStream inputStream = new ByteArrayInputStream(importData)) {
        final List<Pair<Integer, List<String>>> inputRows = ExcelImportFileReader.importExcelFile(inputStream,
                dataLoader.getImportDataStartIndex(), dataLoader.getDataWidth());

        int dataRows = 0;
        int createRows = 0;
        int updateRows = 0;
        int deleteRows = 0;

        for (final Pair<Integer, List<String>> row : inputRows) {
            final String command = row.getRight().get(0);
            switch (command) {
            case DataLoader.CMD_CREATE:
            case DataLoader.CMD_CREATE_DEVICE:
            case DataLoader.CMD_CREATE_PROPERTY:
            case DataLoader.CMD_CREATE_DEVICE_TYPE:
            case DataLoader.CMD_CREATE_ENTITY:
            case DataLoader.CMD_CREATE_RELATION:
            case DataLoader.CMD_INSTALL:
                ++createRows;/*from w w w  .  j  a v a  2  s  . c  o m*/
                break;
            case DataLoader.CMD_UPDATE:
            case DataLoader.CMD_UPDATE_DEVICE:
            case DataLoader.CMD_UPDATE_PROPERTY:
            case DataLoader.CMD_UPDATE_DEVICE_TYPE:
            case DataLoader.CMD_UPDATE_ENTITY:
                ++updateRows;
                break;
            case DataLoader.CMD_DELETE:
            case DataLoader.CMD_DELETE_DEVICE:
            case DataLoader.CMD_DELETE_PROPERTY:
            case DataLoader.CMD_DELETE_DEVICE_TYPE:
            case DataLoader.CMD_DELETE_ENTITY:
            case DataLoader.CMD_DELETE_ENTITY_AND_CHILDREN:
            case DataLoader.CMD_DELETE_RELATION:
            case DataLoader.CMD_UNINSTALL:
                ++deleteRows;
                break;
            }
            if (DataLoader.CMD_END.equals(command)) {
                break;
            }
            ++dataRows;
        }
        return new ImportFileStatistics(dataRows, createRows, updateRows, deleteRows);
    } catch (IOException e) {
        throw new RuntimeException(e);
    }
}

From source file:org.openlmis.fulfillment.Resource2DbTest.java

@Test
public void resourceCsvToBatchedPairShouldReturnListPair() throws IOException {
    // given//w w  w . j  a v  a 2  s  .  c  om
    Resource resource = mock(Resource.class);
    InputStream inputStream = spy(IOUtils.toInputStream("Col1,Col2\na,b"));
    when(resource.getInputStream()).thenReturn(inputStream);

    // when
    Pair<List<String>, List<Object[]>> batchedPair = resource2Db.resourceCsvToBatchedPair(resource);

    // then
    List headers = batchedPair.getLeft();
    assertEquals(2, headers.size());
    assertEquals("Col1", headers.get(0));
    assertEquals("Col2", headers.get(1));

    List rows = batchedPair.getRight();
    assertEquals(1, rows.size());
    Object[] rowData = batchedPair.getRight().get(0);
    assertEquals(2, rowData.length);
    assertEquals("a", rowData[0]);
    assertEquals("b", rowData[1]);
}

From source file:rapture.dp.DefaultDecisionProcessExecutor.java

public void publishSplitChildren(Worker parent, Step step, Workflow flow) {
    String base = step.getExecutable().substring(StepHelper.SPLIT_PREFIX.length() + 1);
    String names[] = base.split(",");
    parent.setWaitCount(names.length);/*from  w ww .  j av  a2  s.  c om*/
    parent.setStatus(WorkerExecutionState.BLOCKED);
    List<ImmutablePair<Worker, String>> children = Lists.newArrayList();
    List<Worker> stillborn = Lists.newArrayList();

    // prepare children and eliminate
    for (int i = 0; i < names.length; i++) {
        Step target = getStep(names[i], flow);
        Worker child = SplitUtils.createSplitChild(parent, flow, i, names.length, target);
        if (target == null) {
            child.setDetail("Attempt to start worker with non-extant step " + names[i] + " from "
                    + step.getName() + " in " + flow.getWorkflowURI());
            log.error(child.getDetail());
            parent.setWaitCount(parent.getWaitCount() - 1);
            child.setStatus(WorkerExecutionState.ERROR);
            saveWorker(child);
            stillborn.add(child);
        } else {
            saveWorker(child);
            children.add(ImmutablePair.of(child, calculateCategory(target, flow)));
        }
    }
    saveWorker(parent);

    // register workers with workorder
    String workOrderUri = parent.getWorkOrderURI();
    WorkOrder workOrder = WorkOrderStorage.readByFields(workOrderUri);

    try {
        grabMultiWorkerLock(workOrder, parent, FORCE);
        workOrder = WorkOrderStorage.readByFields(workOrderUri);
        for (Pair<Worker, String> pair : children) {
            workOrder.getWorkerIds().add(pair.getLeft().getId());
            if (log.isDebugEnabled()) {
                String at = (pair.getLeft().getStack().size() > 0) ? pair.getLeft().getStack().get(0)
                        : "UNKNOWN_LOCATION";
                log.debug("Adding new worker " + pair.getLeft().getId() + " at " + at);
            }
        }
        for (Worker child : stillborn) {
            workOrder.getWorkerIds().add(child.getId());
        }
        WorkOrderStorage.add(new RaptureURI(workOrderUri, Scheme.WORKORDER), workOrder,
                ContextFactory.getKernelUser().getUser(), "Update for split");
        JoinCountdown countdown = new JoinCountdown();
        countdown.setParentId(parent.getId());
        countdown.setWorkOrderURI(parent.getWorkOrderURI());
        countdown.setWaitCount(children.size());
        JoinCountdownStorage.add(null, countdown, ContextFactory.getKernelUser().getUser(),
                "Starting Countdown");
    } finally {
        releaseMultiWorkerLock(workOrder, parent, FORCE);
    }

    // publish viable children
    for (Pair<Worker, String> pair : children) {
        publishStep(pair.getLeft(), pair.getRight());
    }
}