Example usage for java.util ArrayList contains

List of usage examples for java.util ArrayList contains

Introduction

In this page you can find the example usage for java.util ArrayList contains.

Prototype

public boolean contains(Object o) 

Source Link

Document

Returns true if this list contains the specified element.

Usage

From source file:by.stub.yaml.stubs.StubRequest.java

@VisibleForTesting
boolean arraysIntersect(final ArrayList<String> dataStoreArray, final ArrayList<String> thisAssertingArray) {
    if (dataStoreArray.isEmpty()) {
        return true;
    } else if (!thisAssertingArray.isEmpty()) {
        for (final String entry : thisAssertingArray) {
            if (dataStoreArray.contains(entry)) {
                return true;
            }//from   w w w. j  av a2s . c o m
        }
    }
    return false;
}

From source file:matrix.CreateUserList.java

public void tweetsToUserList()
        throws FileNotFoundException, UnsupportedEncodingException, IOException, ParseException {

    File fout = new File(userListPathOutput);
    FileOutputStream fos = new FileOutputStream(fout);
    BufferedWriter bw = new BufferedWriter(new OutputStreamWriter(fos));

    BufferedReader inputTW = new BufferedReader(
            new InputStreamReader(new FileInputStream(tweetsJsonInput), "ISO-8859-9"));
    ArrayList userList = new ArrayList();
    JSONParser jsonParser = new JSONParser();
    JSONArray jsonArray = (JSONArray) jsonParser.parse(inputTW);
    int sayac = 0;
    for (Object obj : jsonArray) {

        JSONObject tweet = (JSONObject) obj;
        JSONObject user = (JSONObject) tweet.get("user");
        //            String userID = user.get("id").toString();
        //            String userName = user.get("name").toString();
        String userID = user.get("id").toString();
        String userName = user.get("name").toString();

        if (userList.contains(userID) == false) {
            userList.add(userID);//from w  ww  . j  a  va  2  s .com
            bw.write(userID + "," + userName);
            bw.newLine();
            sayac++;
        }

    }
    System.out.println(sayac);
}

From source file:com.marklogic.dom.AttrImpl.java

@Override
protected int getPrefixID(int uriAtom) {
    int parentNodeRepID = tree.nodeParentNodeRepID[node];
    if (parentNodeRepID == -1)
        parentNodeRepID = node;//  w  ww.  j av  a  2 s.c om
    ArrayList<Integer> ubp = new ArrayList<Integer>();
    long sum_ordinal = tree.ordinal + tree.nodeOrdinal[parentNodeRepID];
    for (int ns = getNSNodeID(sum_ordinal); ns >= 0; ns = nextNSNodeID(ns, 0)) {
        int uri = tree.nsNodeUriAtom[ns];
        int prefix = tree.nsNodePrefixAtom[ns];
        if (tree.atomString(uri) == null) {
            ubp.add(prefix);
            continue;
        }
        if (uri != uriAtom)
            continue;
        if (ubp.contains(prefix))
            continue;
        if (tree.atomString(prefix) == null)
            continue;
        return prefix;
    }
    return -1;
}

From source file:userinterface.StateNetworkAdminRole.StateReportsJPanel.java

private CategoryDataset createDataSetForPatientsReports() {
    ArrayList<String> cityList = new ArrayList<>();
    ArrayList<Integer> yearList = new ArrayList<>();

    DefaultCategoryDataset barChartData = new DefaultCategoryDataset();

    for (Patient patient : patientList.getPatientDirectory()) {
        if (!cityList.contains(patient.getPatientLocation())) {
            cityList.add(patient.getPatientLocation());
        }//from ww w. j  av  a  2 s . c  o m
    }

    for (Patient patient : patientList.getPatientDirectory()) {
        Date reqDate = patient.getTransplantRequestDate();
        Calendar cal = Calendar.getInstance();
        cal.setTime(reqDate);
        int year = cal.get(Calendar.YEAR);
        if (!yearList.contains(year)) {
            yearList.add(year);
        }

    }
    for (String city : cityList) {
        Map<Integer, Double> yearPatientMap = new HashMap<>();
        for (int reqYear : yearList) {
            double sum = 0;
            int count = 0;
            double avg = 0.0;
            for (Patient patient2 : patientList.getPatientDirectory()) {
                if (patient2.getPatientLocation().equals(city)) {
                    //if patient ka add == city
                    //if patient ka reg year = year
                    Date compDate = patient2.getTransplantCompletionDate();

                    if (compDate != null) {
                        Date reqDate = patient2.getTransplantRequestDate();
                        Calendar cal = Calendar.getInstance();
                        cal.setTime(reqDate);
                        int year = cal.get(Calendar.YEAR);
                        double diff = 0;
                        if (year == reqYear) {
                            //                            diff = (patient2.getTransplantCompletionDate().getTime() - patient2.getTransplantRequestDate().getTime()) / ((1000 * 60 * 60 * 24 * 30));
                            //                            sum = sum+diff;
                            //                            count++;
                            Calendar startCalendar = new GregorianCalendar();
                            startCalendar.setTime(reqDate);
                            Calendar endCalendar = new GregorianCalendar();
                            endCalendar.setTime(patient2.getTransplantCompletionDate());

                            int diffyear = endCalendar.get(Calendar.YEAR) - startCalendar.get(Calendar.YEAR);
                            int monthDiff = diffyear * 12 + endCalendar.get(Calendar.MONTH)
                                    - startCalendar.get(Calendar.MONTH);

                            sum += monthDiff;
                            count++;

                        }

                    }
                }
            }

            avg = sum / count;
            yearPatientMap.put(reqYear, avg);
        }
        //putting in data set
        for (Map.Entry<Integer, Double> entryset : yearPatientMap.entrySet()) {
            barChartData.addValue(entryset.getValue(), city, entryset.getKey());
        }
    }

    return barChartData;
}

From source file:edu.oregonstate.eecs.mcplan.ml.ClusterContingencyTable.java

public ClusterContingencyTable(final ArrayList<Set<RealVector>> U, final ArrayList<Set<RealVector>> V) {
    R = U.size();/*w ww . j a  va  2  s.  co  m*/
    C = V.size();

    int N = 0;
    a = new int[R];
    b = new int[C];
    n = new int[R][C];
    for (int i = 0; i < R; ++i) {
        final Set<RealVector> u = U.get(i);
        for (int j = 0; j < C; ++j) {
            final Set<RealVector> v = V.get(j);
            for (final RealVector uu : u) {
                if (v.contains(uu)) {
                    a[i] += 1;
                    b[j] += 1;
                    n[i][j] += 1;
                    N += 1;
                }
            }
        }
    }
    this.N = N;
}

From source file:com.clustercontrol.agent.filecheck.FileCheck.java

/**
 * ?<BR>//www  .  j a  va 2s .  c  o  m
 * 
 */
public void run() {
    m_log.debug("check start. directory=" + m_directory);

    ArrayList<JobFileCheck> kickList = new ArrayList<JobFileCheck>();

    // 1. 
    File directory = new File(m_directory);
    if (!directory.isDirectory()) {
        m_log.warn(m_directory + " is not directory");
        return;
    }
    File[] files = directory.listFiles();
    if (files == null) {
        m_log.warn(m_directory + " does not have a reference permission");
        return;
    }
    ArrayList<File> fileList = new ArrayList<File>();

    for (File file : files) {
        if (!file.isFile()) {
            m_log.debug(file.getName() + " is not file");
            continue;
        }
        fileList.add(file);
    }

    // 2. ??
    ArrayList<String> filenameList = new ArrayList<String>();
    for (File file : fileList) {
        filenameList.add(file.getName());
    }
    for (String filename : fileTimestampCache.keySet()) {
        if (!filenameList.contains(filename)) {
            fileTimestampCache.remove(filename);
            fileTimestampFlagCache.remove(filename);
            fileSizeCache.remove(filename);
            fileSizeFlagCache.remove(filename);
            for (JobFileCheck check : m_jobFileCheckList) {
                if (check.getEventType() == FileCheckConstant.TYPE_DELETE && matchFile(check, filename)) {
                    m_log.info("kickList.add [" + filename + "] (delete)");
                    JobFileCheck kick = getCopy(check);
                    kick.setFileName(filename);
                    kickList.add(kick);
                }
            }
        }
    }

    // 3. ??
    for (File file : fileList) {
        String filename = file.getName();
        Long newTimestamp = file.lastModified();
        Long oldTimestamp = fileTimestampCache.get(filename);
        if (oldTimestamp == null) {
            fileTimestampCache.put(filename, newTimestamp);
            fileTimestampFlagCache.put(filename, false);
            for (JobFileCheck check : m_jobFileCheckList) {
                if (check.getEventType() == FileCheckConstant.TYPE_CREATE && matchFile(check, filename)) {
                    m_log.info("kickList.add [" + filename + "] (create)");
                    JobFileCheck kick = getCopy(check);
                    kick.setFileName(filename);
                    kickList.add(kick);
                }
            }
        } else if (!oldTimestamp.equals(newTimestamp)) {
            m_log.info("timestamp : " + oldTimestamp + "->" + newTimestamp + " (" + filename + ")");
            fileTimestampCache.put(filename, newTimestamp);
            fileTimestampFlagCache.put(filename, true);
        } else {
            if (fileTimestampFlagCache.get(filename) != null && fileTimestampFlagCache.get(filename)) {
                // ?????????
                for (JobFileCheck check : m_jobFileCheckList) {
                    if (check.getEventType() == FileCheckConstant.TYPE_MODIFY
                            && check.getModifyType() == FileCheckConstant.TYPE_MODIFY_TIMESTAMP
                            && matchFile(check, filename)) {
                        m_log.info("kickList.add [" + filename + "] (timestamp)");
                        JobFileCheck kick = getCopy(check);
                        kick.setFileName(filename);
                        kickList.add(kick);
                    }
                }
            }
            fileTimestampFlagCache.put(filename, false);
        }
    }

    // 4. ??
    for (File file : fileList) {
        String filename = file.getName();
        RandomAccessFileWrapper fr = null;
        try {
            fr = new RandomAccessFileWrapper(file, "r");
            Long newSize = fr.length();
            Long oldSize = fileSizeCache.get(filename);
            if (oldSize == null) {
                fileSizeCache.put(filename, newSize);
                fileSizeFlagCache.put(filename, false);
            } else if (!oldSize.equals(newSize)) {
                m_log.info("size : " + oldSize + "->" + newSize + " (" + filename + ")");
                fileSizeCache.put(filename, newSize);
                fileSizeFlagCache.put(filename, true);
            } else {
                if (fileSizeFlagCache.get(filename) != null && fileSizeFlagCache.get(filename)) {
                    // ?????????
                    for (JobFileCheck check : m_jobFileCheckList) {
                        if (check.getEventType() == FileCheckConstant.TYPE_MODIFY
                                && check.getModifyType() == FileCheckConstant.TYPE_MODIFY_FILESIZE
                                && matchFile(check, filename)) {
                            m_log.info("kickList.add [" + filename + "] (filesize)");
                            JobFileCheck kick = getCopy(check);
                            kick.setFileName(filename);
                            kickList.add(kick);
                        }
                    }
                }
                fileSizeFlagCache.put(filename, false);
            }
        } catch (IOException e) {
            m_log.info("run() : IOException: " + e.getMessage());
        } catch (Exception e) {
            m_log.warn("run() : IOException: " + e.getMessage());
        } finally {
            if (fr != null) {
                try {
                    fr.close();
                } catch (final Exception e) {
                    m_log.debug("run() : " + e.getMessage());
                }
            }
        }
    }

    // 1??????
    if (initFlag) {
        initFlag = false;
        return;
    }

    // 5. Job?
    for (JobFileCheck jobFileCheck : kickList) {
        m_log.info("kick " + jobFileCheck.getId());
        String calendarId = jobFileCheck.getCalendarId();
        CalendarInfo calendarInfo = jobFileCheck.getCalendarInfo();
        boolean run = true;
        if (calendarId != null && calendarInfo == null) {
            m_log.info("unknown error : id=" + jobFileCheck.getId() + "calendarId=" + calendarId);
        }
        if (calendarInfo != null) {
            run = CalendarWSUtil.isRun(calendarInfo);
        }

        if (!run) {
            m_log.info("not exec(calendar) : id=" + jobFileCheck.getId() + "calendarId=" + calendarId);
            continue;
        }
        try {
            String sessionId = jobFileCheckResultRetry(jobFileCheck);
            String jobunitId = jobFileCheck.getJobunitId();
            String jobId = jobFileCheck.getJobId();
            m_log.info("jobFileCheckResult sessionId=" + sessionId + ", jobunitId=" + jobunitId + ", jobId="
                    + jobId);
        } catch (Exception e) {
            m_log.warn("run(jobFileCheckResult) : " + e.getClass().getSimpleName() + ", " + e.getMessage(), e);
        }
    }
}

From source file:de.ks.idnadrev.expimp.xls.XlsxExporterTest.java

@Test
public void testExportThoughts() throws Exception {
    File tempFile = File.createTempFile("thoughtExport", ".xlsx");
    EntityExportSource<Thought> source = new EntityExportSource<>(getAllIds(), Thought.class);
    XlsxExporter exporter = new XlsxExporter();
    exporter.export(tempFile, source);//from  ww w  .j a v a  2 s .  com

    Workbook wb = WorkbookFactory.create(tempFile);
    Sheet sheet = wb.getSheetAt(0);
    assertEquals(Thought.class.getName(), sheet.getSheetName());
    int lastRowNum = sheet.getLastRowNum();
    assertEquals(COUNT, lastRowNum);
    Row firstRow = sheet.getRow(0);

    ArrayList<String> titles = new ArrayList<>();
    firstRow.cellIterator().forEachRemaining(col -> titles.add(col.getStringCellValue()));
    assertThat(titles.size(), greaterThanOrEqualTo(3));
    log.info("Found titles {}", titles);

    String creationTime = PropertyPath.property(Thought.class, t -> t.getCreationTime());
    String name = PropertyPath.property(Thought.class, t -> t.getName());
    String description = PropertyPath.property(Thought.class, t -> t.getDescription());

    assertTrue(titles.contains(creationTime));
    assertTrue(titles.contains(name));
    assertTrue(titles.contains(description));

    int nameColumn = titles.indexOf(name);
    ArrayList<String> names = new ArrayList<String>(COUNT);
    for (int i = 1; i <= COUNT; i++) {
        Row row = sheet.getRow(i);
        names.add(row.getCell(nameColumn).getStringCellValue());
    }
    Collections.sort(names);
    assertEquals("Thought000", names.get(0));
    assertEquals("Thought141", names.get(COUNT - 1));

    Date excelDate = sheet.getRow(1).getCell(titles.indexOf(creationTime)).getDateCellValue();

    Thought thought = PersistentWork.forName(Thought.class, "Thought000");

    Timestamp timestamp = java.sql.Timestamp.valueOf(thought.getCreationTime());
    Date creationDate = new Date(timestamp.getTime());
    assertEquals(creationDate, excelDate);
}

From source file:com.mythesis.userbehaviouranalysis.ProfileAnalysis.java

/**
 * a method that returns a number of random queries
 * @param path SWebRank output directory 
 * @param numOfQueries number of random queries
 * @return a list of paths for the queries
 *//*from   www  .  j  a  v  a2 s .c  om*/
private ArrayList<String> getQueries(String path, int numOfQueries) {

    //Find output paths
    File root = new File(path);
    File[] contents = root.listFiles();
    List<String> sWebRanklevels = new ArrayList<>();
    for (File f : contents) {
        if (f.getAbsolutePath().contains("level"))
            sWebRanklevels.add(f.getAbsolutePath());
    }

    //Find all query paths
    ArrayList<String> queries = new ArrayList<>();
    for (String s : sWebRanklevels) {
        File level = new File(s);
        File[] queriesFiles = level.listFiles();
        for (File f : queriesFiles) {
            if (!f.getAbsolutePath().contains("txt"))
                queries.add(f.getAbsolutePath());

        }
    }

    if (numOfQueries > queries.size()) {
        return queries;
    }

    //Select a number of random queries
    int totalQueries = queries.size() - 1;
    Random randomQuery = new Random();
    ArrayList<String> randomQueries = new ArrayList<>();
    int count = 0;
    while (count < numOfQueries) {
        String val = queries.get(randomQuery.nextInt(totalQueries));
        if (!randomQueries.contains(val)) {
            randomQueries.add(val);
            count++;
        }
    }

    return randomQueries;
}

From source file:com.timtripcony.AbstractDocumentMapModel.java

/**
 * Loads the object with settings based on the passed UNID:
 * <ul>//  w ww .  j  a v a  2  s .  co m
 * <ol>
 * Whether or not it is a new Document
 * </ol>
 * <ol>
 * Whether or not the Document is deleted
 * </ol>
 * <ol>
 * Whether or not the Document is read only
 * </ol>
 * <ol>
 * Sets wrappedDoc as a DominoDocument object
 * </ol>
 * <ol>
 * Loads all fields into the object, omitting named fields, anything prefixed with "$", or with type "Error"
 * </ol>
 * </ul>
 * 
 * Extended by PW to allow specific fields to be ignored and to ensure empty fields are loaded as "" (empty String),
 * not "[]" (String version of empty Vector)
 * 
 * @param unid
 *            String UNID or Note ID relating to this Document (empty = new Document)
 */
public void load(final String unid) {
    setUnid(unid);
    Document doc = null;
    setNewNote(true); // Default to true
    setDeleted(false); // Default to false
    setReadOnly(false); // Default to false
    try {
        if (StringUtil.isNotEmpty(unid)) {
            try {
                doc = AppUtils.getDocumentByNoteID_Or_UNID(unid);
                setWrappedDoc(DominoDocument.wrap(AppUtils.getDataDbPath(), // database name
                        doc, // Document
                        null, // computeWithForm
                        null, // concurrency Mode
                        false, // allowDeletedDocs
                        null, // saveLinksAs
                        null)); // webQuerySaveAgent
                for (Object eachItem : doc.getItems()) {
                    if (eachItem instanceof Item) {
                        Item item = (Item) eachItem;
                        String itemName = item.getName();
                        // Certainly not a comprehensive list of items to skip
                        ArrayList<String> ignoreList = ArrayListUtil.stringToArrayList("MIME_Version");
                        String firstChar = StringUtils.left(itemName, 1);
                        if (!ignoreList.contains(itemName) && !StringUtils.equals(firstChar, "$")) {
                            // Item may be of type "Error"
                            if (item.getType() != Type.ERRORITEM.getValue()) {
                                Object itemValue = wrappedDoc.getValue(itemName);
                                setValue(itemName, itemValue);
                                if (itemValue instanceof Vector) {
                                    if ("[]".equals(itemValue.toString())) {
                                        setValue(itemName, new String(""));
                                    }
                                }
                            }
                        }
                    }
                }
                if (doc.isDeleted() || !doc.isValid()) {
                    setDeleted(true);
                }
                setNewNote(false);
            } catch (Throwable t) {
                AppUtils.handleException(t);
            }
        }
    } catch (Throwable t) {
        AppUtils.handleException(t);
    }
}

From source file:de.tudarmstadt.tk.statistics.importer.ExternalResultsReader.java

public static SampleData interpretCSV(StatsConfig config, List<String[]> rows, ReportTypes pipelineType,
        HashMap<String, Integer> pipelineMetadata) {

    HashMap<Integer, ArrayList<ArrayList<Double>>> samplesPerMeasure = new HashMap<Integer, ArrayList<ArrayList<Double>>>();

    //Only remove first line if it is a header line
    if (rows.size() > 0 && rows.get(0)[6].equals("IsBaseline")) {
        rows.remove(0);//ww  w.j  ava 2 s  .  c  o  m
    }

    if (rows.size() > 1) {

        logger.log(Level.INFO, "Extracting samples and metadata from imported data.");
        int selectBestN = config.getSelectBestN();
        String selectByMeasure = config.getSelectByMeasure();

        // Preprocessing: Parse different models (classifier + feature set column) and measures
        ArrayList<String> measures = new ArrayList<String>();
        ArrayList<Pair<String, String>> datasets = new ArrayList<Pair<String, String>>();
        ArrayList<Pair<String, String>> models = new ArrayList<Pair<String, String>>();
        ArrayList<Pair<String, String>> baselineModels = new ArrayList<Pair<String, String>>();

        for (int i = 0; i < rows.size(); i++) {
            String[] columns = rows.get(i);
            String classifier = columns[2];
            if (classifier.equals("0")) {
                classifier = "Aggregated";
            }
            String featureSets = columns[3];
            Pair<String, String> model = Pair.of(classifier, featureSets);
            if (!models.contains(model)) {
                models.add(model);
                if (!baselineModels.contains(model) && Integer.parseInt(columns[6]) == 1) {
                    baselineModels.add(model);
                }
            }
            if (!measures.contains(columns[4])) {
                measures.add(columns[4]);
            }
        }

        //Check: Baseline only allowed when > 2 models are evaluated
        if (models.size() <= 2 && baselineModels.size() > 0) {
            logger.log(Level.WARN,
                    "At least three models are required to make an evaluation against a baseline meaningful. In the dataset, a baseline was specified for only two models. The baseline indicator will be ignored.");
            System.err.println(
                    "At least three models are required to make an evaluation against a baseline meaningful. In the dataset, a baseline was specified for only two models. The baseline indicator will be ignored.");
            baselineModels.clear();
        }

        // Now sort samples according to data
        Collections.sort(rows, new Helpers.LexicographicArrayComparator());
        for (int i = 0; i < rows.size(); i++) {
            String[] columns = rows.get(i);
            Pair<String, String> data = null;
            String trainData = columns[0].trim();
            String testData = columns[1].trim();

            //If this is a CV, numbers after a dot indicate fold UUIDS, they thus have to be splitted to retain the original dataset name
            if (pipelineType == ReportTypes.CV) {
                trainData = trainData.split("\\.")[0];
                testData = testData.split("\\.")[0];
            }

            if (trainData.equals(testData)) {
                data = Pair.of(trainData, null);
            } else {
                //columns[1] = columns[1].split(".")[0];
                data = Pair.of(trainData, testData);
            }
            if (!datasets.contains(data)) {
                datasets.add(data);
            }
        }

        // Preprocessing: Initialize sample container per measure/model
        for (int i = 0; i < measures.size(); i++) {
            ArrayList<ArrayList<Double>> samplesPerModel = new ArrayList<ArrayList<Double>>();
            for (int j = 0; j < models.size(); j++) {
                samplesPerModel.add(new ArrayList<Double>());
            }
            samplesPerMeasure.put(i, samplesPerModel);
        }

        // Assign samples to different models
        for (int i = 0; i < rows.size(); i++) {
            String[] columns = rows.get(i);
            String classifier = columns[2];
            if (classifier.equals("0")) {
                classifier = "Aggregated";
            }
            String featureSet = columns[3];
            String measure = columns[4];
            double value = Double.parseDouble(columns[5]);

            int measureIndex = measures.indexOf(measure);
            int modelIndex = models.indexOf(Pair.of(classifier, featureSet));

            ArrayList<ArrayList<Double>> sPMeasure = samplesPerMeasure.get(measureIndex);
            sPMeasure.get(modelIndex).add(value);
        }

        // Transform into data format required by the statistical evaluation
        HashMap<String, ArrayList<ArrayList<Double>>> indexedSamples = new HashMap<String, ArrayList<ArrayList<Double>>>();
        HashMap<String, ArrayList<Double>> indexedSamplesAverage = new HashMap<String, ArrayList<Double>>();

        Iterator<Integer> it = samplesPerMeasure.keySet().iterator();
        while (it.hasNext()) {
            int measureIndex = it.next();
            ArrayList<ArrayList<Double>> samplesPerModel = samplesPerMeasure.get(measureIndex);

            ArrayList<Double> sampleAverages = new ArrayList<Double>(models.size());
            for (int modelIndex = 0; modelIndex < models.size(); modelIndex++) {
                ArrayList<Double> sample = samplesPerModel.get(modelIndex);
                double average = 0;
                for (int j = 0; j < sample.size(); j++) {
                    average += sample.get(j);
                }
                average /= sample.size();
                sampleAverages.add(average);
            }
            indexedSamplesAverage.put(measures.get(measureIndex), sampleAverages);
            indexedSamples.put(measures.get(measureIndex), samplesPerMeasure.get(measureIndex));
        }

        // Check if data fulfills general requirements: > 5 samples for each model, same number of samples per model
        it = samplesPerMeasure.keySet().iterator();
        while (it.hasNext()) {
            Integer measureIndex = it.next();
            ArrayList<ArrayList<Double>> samplesPerModel = samplesPerMeasure.get(measureIndex);
            int s = samplesPerModel.get(0).size();

            for (int i = 1; i < samplesPerModel.size(); i++) {
                if (samplesPerModel.get(i).size() < 5) {
                    logger.log(Level.ERROR, "More than 5 samples are needed per model and measure. Aborting.");
                    System.err.println("More than 5 samples are needed per model and measure. Aborting.");
                    System.exit(1);
                }
                if (samplesPerModel.get(i).size() != s) {
                    logger.log(Level.ERROR,
                            "Different models are not represented by the same number of samples. Aborting.");
                    System.err.println(
                            "Different models are not represented by the same number of samples. Aborting.");
                    System.exit(1);
                }
            }
        }

        // Collect remaining data required for creating a SampleData object
        // Check if data fulfills requirements of the specific PipelineTypes
        int nFolds = 1;
        int nRepetitions = 1;
        switch (pipelineType) {
        case CV:
            if (datasets.size() > 1) {
                System.err.println(
                        "Input data corrupted. More than one dataset specified for Single-Domain Cross-Validation.");
                logger.log(Level.ERROR,
                        "Input data corrupted. More than one dataset specified for Single-Domain Cross-Validation.");
                return null;
            } else if (datasets.get(0).getValue() != null) {
                System.err.println(
                        "Input data corrupted. Training and Test dataset must be same for Cross-Validation.");
                logger.log(Level.ERROR,
                        "Input data corrupted. Training and Test dataset must be same for Cross-Validation.");
                return null;
            }
            nFolds = indexedSamples.get(measures.get(0)).get(0).size();
            nRepetitions = 1;
            break;
        case MULTIPLE_CV:
            if (datasets.size() > 1) {
                System.err.println(
                        "Input data corrupted. More than one dataset specified for Single-Domain Cross-Validation.");
                logger.log(Level.ERROR,
                        "Input data corrupted. More than one dataset specified for Single-Domain Cross-Validation.");
                return null;
            } else if (datasets.get(0).getValue() != null) {
                System.err.println(
                        "Input data corrupted. Training and Test dataset must be same for Cross-Validation.");
                logger.log(Level.ERROR,
                        "Input data corrupted. Training and Test dataset must be same for Cross-Validation.");
                return null;
            }
            nFolds = pipelineMetadata.get("nFolds");
            nRepetitions = indexedSamples.get(measures.get(0)).get(0).size();
            break;
        case CV_DATASET_LVL:
            nFolds = pipelineMetadata.get("nFolds");
            nRepetitions = 1;
            break;
        case MULTIPLE_CV_DATASET_LVL:
            nFolds = pipelineMetadata.get("nFolds");
            nRepetitions = pipelineMetadata.get("nRepetitions");
            break;
        case TRAIN_TEST_DATASET_LVL:
            nFolds = 1;
            nRepetitions = 1;
            break;
        default:
            System.err.println("Unknown PipelineType. Aborting.");
            logger.log(Level.ERROR, "Unknown PipelineType. Aborting.");
            return null;
        }

        //Reorder data in case of a baseline evaluation (baseline first)
        if (baselineModels.size() == 1) {
            Pair<String, String> baselineModel = baselineModels.get(0);
            int modelIndex = models.indexOf(baselineModel);
            models.remove(modelIndex);
            models.add(0, baselineModel);
            for (String measure : indexedSamples.keySet()) {
                ArrayList<Double> s = indexedSamples.get(measure).get(modelIndex);
                indexedSamples.get(measure).remove(modelIndex);
                indexedSamples.get(measure).add(0, s);
                double a = indexedSamplesAverage.get(measure).get(modelIndex);
                indexedSamplesAverage.get(measure).remove(modelIndex);
                indexedSamplesAverage.get(measure).add(0, a);
            }
        }

        SampleData sampleData = new SampleData(null, indexedSamples, indexedSamplesAverage, datasets, models,
                baselineModels, pipelineType, nFolds, nRepetitions);
        sampleData = Helpers.truncateData(sampleData, selectBestN, selectByMeasure);

        return sampleData;
    }
    return null;
}