Example usage for java.util ArrayList contains

List of usage examples for java.util ArrayList contains

Introduction

In this page you can find the example usage for java.util ArrayList contains.

Prototype

public boolean contains(Object o) 

Source Link

Document

Returns true if this list contains the specified element.

Usage

From source file:edu.kit.dama.rest.client.DataManagerPropertiesHelper.java

/**
 * Test accesspoint settings of KIT Data Manager. If 'testSettings' is true no
 * query will be triggered.//from w ww  . j  a v a 2s  . c o  m
 *
 * @return Are there any changes?
 * @throws InvalidDataManagerPropertiesException Setting of a property seems
 * to be invalid or KIT Data Manager is not well configured.
 */
private boolean testAccessPoint() throws InvalidDataManagerPropertiesException {
    boolean propertiesChanged = false;

    // <editor-fold defaultstate="collapsed" desc="Select access point">
    StagingAccessPointConfigurationWrapper allAccessPoints;
    StagingServiceRESTClient ssrc;
    ssrc = new StagingServiceRESTClient(restServerUrl + REST_STAGING_PATH, context);
    allAccessPoints = ssrc.getAllAccessPoints(properties.getUserGroup(), null);
    // Test for proper initialized KIT Data Manager
    if (allAccessPoints.getCount() == 0) {
        String noAccessPointDefined = "There should at least one access point defined!\nPlease initialize KIT Data Manager first!";
        error.println(noAccessPointDefined);
        LOGGER.error(noAccessPointDefined);
        throw new InvalidDataManagerPropertiesException(noAccessPointDefined);
    }
    ArrayList<String> accessPointList = new ArrayList<>();
    ArrayList<String> accessPointDescriptionList = new ArrayList<>();
    for (StagingAccessPointConfiguration item : allAccessPoints.getEntities()) {
        StagingAccessPointConfiguration accessPoint = ssrc.getAccessPointById(item.getId(), context)
                .getEntities().get(0);
        accessPointList.add(accessPoint.getUniqueIdentifier());
        accessPointDescriptionList
                .add(String.format("%s [group: %s]", accessPoint.getDescription(), accessPoint.getGroupId()));
    }
    if (testSettings) {
        if (!accessPointList.contains(properties.getAccessPoint())) {
            String invalidAccessPoint = properties.getAccessPoint() + " -> No valid access point!";
            LOGGER.error(invalidAccessPoint);
            error.println(invalidAccessPoint);
            throw new InvalidDataManagerPropertiesException(properties,
                    DataManagerProperties.ACCESS_POINT_LABEL);

        }
    }
    // If 'accessPoint' is invalid or all properties should be queried -> query accesspoint
    if ((!accessPointList.contains(properties.getAccessPoint())) || (properties.getAccessPoint() == null)
            || queryAllProperties) {
        propertiesChanged |= properties.readPropertyIndex(DataManagerProperties.ACCESS_POINT_LABEL,
                accessPointList.toArray(new String[accessPointList.size()]),
                accessPointDescriptionList.toArray(new String[accessPointDescriptionList.size()]),
                "Chosen accessPoint: '%s' - %s");
    }
    // </editor-fold>
    return propertiesChanged;
}

From source file:gsn.http.restapi.RequestHandler.java

public RestResponse getMeasurementsForSensor(User user, String sensor, String from, String to, String size,
        String filter, String selectedFields) {
    RestResponse restResponse = userHasAccessToVirtualSensor(user, sensor);
    if (restResponse != null) { //error occured
        return restResponse;
    }//from   w w  w  .ja v  a2  s .c o  m

    restResponse = new RestResponse();

    String filename = String.format(stringConstantsProperties.getProperty("FILENAME_SENSOR_FIELDS"), sensor,
            datetime);
    setRestResponseParams(restResponse, filename);

    long fromAsLong = 0;
    long toAsLong = 0;
    int window = -1;
    try {
        fromAsLong = new java.text.SimpleDateFormat(stringConstantsProperties.getProperty("ISO_FORMAT"))
                .parse(from).getTime();
        toAsLong = new java.text.SimpleDateFormat(stringConstantsProperties.getProperty("ISO_FORMAT")).parse(to)
                .getTime();
        if (size != null)
            window = Integer.parseInt(size);
    } catch (NumberFormatException e) {
        logger.error(e.getMessage(), e);
        restResponse = errorResponse(ErrorType.MALFORMED_SIZE, user, sensor);
        return restResponse;
    } catch (Exception e) {
        logger.error(e.getMessage(), e);
        restResponse = errorResponse(ErrorType.MALFORMED_DATE_FROM_TO, user, sensor);
        return restResponse;
    }

    String[] conditionList = null;
    if (filter != null) {
        String[] filters = filter.split(",");
        Try<String[]> conditions = XprConditions.serializeConditions(filters);
        if (conditions.isFailure()) {
            logger.error(conditions.failed().toString(), conditions.failed().get());
            return errorResponse(ErrorType.MALFORMED_FILTER, user, sensor);
        } else {
            conditionList = conditions.get();
        }
    }
    VSensorConfig sensorConfig = Mappings.getConfig(sensor);
    VirtualSensor sensorObj = new VirtualSensor();

    sensorObj.setMetadata(createHeaderMap(sensorConfig));
    sensorObj.appendField(new DataField(stringConstantsProperties.getProperty("TIME"), "Time"));
    sensorObj.appendField(new DataField(stringConstantsProperties.getProperty("TIMESTAMP"), "BigInt"));

    Vector<Long> timestamps = new Vector<Long>();
    ArrayList<Vector<Double>> elements = new ArrayList<Vector<Double>>();
    ArrayList<String> fields = new ArrayList<String>();
    ArrayList<String> allfields = new ArrayList<String>();

    for (DataField df : sensorConfig.getOutputStructure()) {
        allfields.add(df.getName().toLowerCase());
        if (selectedFields == null) {
            sensorObj.appendField(df);
            fields.add(df.getName().toLowerCase());
        }
    }

    String[] fieldNames = null;
    if (selectedFields != null) {
        fieldNames = selectedFields.toLowerCase().split(",");
        for (String f : fieldNames) {
            if (!allfields.contains(f)) {
                logger.error("Invalid field name in selection: " + f);
                return errorResponse(ErrorType.MALFORMED_FIELD_SELECT, user, sensor);
            }
            fields.add(f);
        }
    }

    for (DataField df : sensorConfig.getOutputStructure()) {
        String fieldName = df.getName().toLowerCase();
        if (selectedFields != null && fields.contains(fieldName)) {
            sensorObj.appendField(df);
        }
    }

    boolean errorFlag = !getData(sensor, fields, fromAsLong, toAsLong, window, elements, timestamps,
            conditionList);

    if (errorFlag) {
        return errorResponse(ErrorType.ERROR_IN_REQUEST, user, sensor);
    }

    sensorObj.setValues(elements, timestamps);

    List<VirtualSensor> listSens = new LinkedList<VirtualSensor>();
    listSens.add(sensorObj);

    restResponse.setResponse(VirtualSensor.generateFileContent(listSens, format));

    return restResponse;
}

From source file:net.antidot.semantic.rdf.rdb2rdf.dm.core.DirectMappingEngineWD20120529.java

private ArrayList<CandidateKey> extractPrimaryKeys(ResultSet primaryKeysSet, StdHeader header,
        String tableName) {//from  w w  w  .  j  a va  2 s  . c o  m
    ArrayList<CandidateKey> primaryKeys = new ArrayList<CandidateKey>();
    // In particular : primary key

    ArrayList<String> columnNames = new ArrayList<String>();
    int size = 0;
    // Extract columns names
    try {
        while (primaryKeysSet.next()) {
            size++;
            String columnName = primaryKeysSet.getString("COLUMN_NAME");
            columnNames.add(columnName);
        }
    } catch (SQLException e) {
        log.error("[DirectMappingEngine:extractPrimaryKeys] SQL Error during primary key of tuples extraction");
        e.printStackTrace();
    }
    // Sort columns
    ArrayList<String> sortedColumnNames = new ArrayList<String>();
    for (String columnName : header.getColumnNames()) {
        if (columnNames.contains(columnName))
            sortedColumnNames.add(columnName);
    }
    // Create object
    if (size != 0) {
        CandidateKey primaryKey = new CandidateKey(sortedColumnNames, tableName, CandidateKey.KeyType.PRIMARY);
        primaryKeys.add(primaryKey);
    }
    if (primaryKeys.size() > 1)
        throw new IllegalStateException(
                "[DirectMappingEngine:extractPrimaryKeys] Table " + tableName + " has more primary keys.");
    return primaryKeys;
}

From source file:MSUmpire.LCMSPeakStructure.LCMSPeakBase.java

public ArrayList<PeakCluster> FindAllPeakClustersForMappedPep(PepIonID pep) {
    ArrayList<PeakCluster> allclusterList = new ArrayList<>();
    float idrt = pep.GetRT();
    if (idrt != -1) {
        float calibratedmass = InstrumentParameter.GetMzByPPM(pep.CalcNeutralPepMass(), 1, -GetMassError(idrt));
        ArrayList<PeakCluster> clusterList = FindPeakClustersByMassIDTime(calibratedmass, pep.Charge, idrt,
                parameter.RTtol);/*from  ww  w .java  2 s .  com*/
        return clusterList;
    }
    for (float rt : pep.PredictRT) {
        float calibratedmass = InstrumentParameter.GetMzByPPM(pep.CalcNeutralPepMass(), 1, -GetMassError(rt));
        float rtrange = parameter.RT_window_Targeted;
        if (rtrange == -1) {
            rtrange = Math.min(5, Math.max(pep.GetRTSD() * 2, parameter.RTtol));
        }
        ArrayList<PeakCluster> clusterList = FindPeakClustersByMassRTTol(calibratedmass, pep.Charge, rt,
                rtrange);
        for (PeakCluster cluster : clusterList) {
            if (!cluster.Identified && !allclusterList.contains(cluster)) {
                allclusterList.add(cluster);
            }
        }
    }
    return allclusterList;
}

From source file:gsn.webservice.standard.GSNWebServiceSkeleton.java

private HashMap<String, ArrayList<String>> buildSelection(GSNWebService_FieldSelector[] fieldSelectors) {

    // Build Mappings
    HashMap<String, ArrayList<String>> vsToField = new HashMap<String, ArrayList<String>>();
    HashMap<String, ArrayList<String>> fieldToVs = new HashMap<String, ArrayList<String>>();
    Iterator<VSensorConfig> iter = Mappings.getAllVSensorConfigs();
    while (iter.hasNext()) {
        VSensorConfig vsConfig = iter.next();
        ArrayList<String> fields = new ArrayList<String>();
        for (gsn.beans.DataField df : vsConfig.getOutputStructure()) {
            ArrayList<String> vss = fieldToVs.get(df.getName());
            if (vss == null) {
                vss = new ArrayList<String>();
                fieldToVs.put(df.getName(), vss);
            }/* w w  w  .  jav a  2  s  .  c o m*/
            vss.add(vsConfig.getName());
            fields.add(df.getName());
        }
        vsToField.put(vsConfig.getName(), fields);
    }

    HashMap<String, ArrayList<String>> vsAndFields = new HashMap<String, ArrayList<String>>();

    for (GSNWebService_FieldSelector fs : fieldSelectors) {
        String[] fields = fs.getFieldNames();
        // 1. Virtual Sensor Selection
        if ("ALL".equalsIgnoreCase(fs.getVsname())) {
            // 2. Fields Selection for the current Virtual Sensor Selection
            if (fields == null) {
                // We select all the fields for all the virtual sensors
                for (Map.Entry<String, ArrayList<String>> entry : vsToField.entrySet()) {
                    updateSelectionKey(vsAndFields, entry.getKey(), entry.getValue());
                }
            } else {
                // We select the specified fields (if they exist) for all the virtual sensors
                for (String field : fields) {
                    ArrayList<String> _vss = fieldToVs.get(field);
                    if (_vss != null)
                        updateSelectionValue(vsAndFields, _vss, field);
                }
            }
        } else {
            // 2. Fields Selection for the current Virtual Sensor Selection
            ArrayList<String> _fields = vsToField.get(fs.getVsname());
            if (_fields != null) {
                // The virtual sensor exists
                if (fields == null) {
                    // We select all the fields for the specified virtual sensor
                    updateSelectionKey(vsAndFields, fs.getVsname(), _fields);
                } else {
                    // We select the specified fields (if they exist) for the specified virtual sensor
                    for (String field : fields) {
                        if (_fields.contains(field))
                            updateSelection(vsAndFields, fs.getVsname(), field);
                    }
                }
            }
        }
    }
    return vsAndFields;
}

From source file:com.globalsight.util.file.XliffFileUtil.java

/**
 * Process Xliff files in target pages, used in exporting process
 * //from  w  w  w.j  a va2s . c  om
 * @param p_workflow
 *            Work flow which contains all target pages
 * 
 * @version 1.0
 * @since 8.2.2
 */
public static void processXliffFiles(Workflow p_workflow) {
    if (p_workflow == null || p_workflow.getAllTargetPages() == null)
        return;
    try {
        int index = -1;
        String sourceFilename = "";
        String filename = "";
        String tmp = "";
        String exportLocation = "";
        File file = null;
        ArrayList<String> subPaths = new ArrayList<String>();
        Vector<TargetPage> targetPages = p_workflow.getAllTargetPages();

        String baseDocDir = AmbFileStoragePathUtils.getCxeDocDirPath();
        if (CompanyWrapper.SUPER_COMPANY_ID.equals(CompanyWrapper.getCurrentCompanyId())
                && !CompanyWrapper.SUPER_COMPANY_ID
                        .equals(String.valueOf(p_workflow.getJob().getCompanyId()))) {
            baseDocDir += baseDocDir + File.separator;
        }

        // Get sub folders which are split by xliff file with multiple
        // <File> tags
        for (TargetPage targetPage : targetPages) {
            exportLocation = targetPage.getExportSubDir();
            if (exportLocation.startsWith("\\") || exportLocation.startsWith("/"))
                exportLocation = exportLocation.substring(1);
            sourceFilename = targetPage.getSourcePage().getExternalPageId();
            sourceFilename = sourceFilename.replace("/", File.separator);
            filename = sourceFilename.substring(sourceFilename.indexOf(File.separator) + 1,
                    sourceFilename.lastIndexOf(File.separator));
            index = sourceFilename.toLowerCase().lastIndexOf(SEPARATE_FLAG + File.separator);
            if (index != -1) {
                tmp = baseDocDir + File.separator + sourceFilename.substring(0, index);
                file = new File(tmp);
                if (file.exists() && file.isFile() && !subPaths.contains(filename))
                    subPaths.add(filename);
            }
        }
        p_workflow.getJob().getSourceLocale().toString();
        String[] subFiles = null, sortedSubFiles = null;
        for (String subPath : subPaths) {
            tmp = baseDocDir + File.separator + exportLocation + File.separator + subPath;
            file = new File(tmp);
            subFiles = file.list();
            if (subFiles == null)
                continue;
            sortedSubFiles = sortSubFiles(subFiles);
            combineSeparatedXliffFiles(tmp, sortedSubFiles);
        }
    } catch (Exception e) {
        logger.error("Error found in processXliffFiles", e);
    }

}

From source file:io.siddhi.extension.io.file.FileSinkTestCase.java

@Test
public void fileSinkTest6() throws InterruptedException {
    log.info("test SiddhiIoFile Sink 6");

    String streams = "" + "@App:name('TestSiddhiApp')"
            + "define stream FooStream (symbol string, price float, volume long); "
            + "@sink(type='file', @map(type='json'), append='true', " + "file.uri='" + dirUri
            + "/apache.json') " + "define stream BarStream (symbol string, price float, volume long); ";

    String query = "" + "from FooStream " + "select * " + "insert into BarStream; ";

    SiddhiManager siddhiManager = new SiddhiManager();
    SiddhiAppRuntime siddhiAppRuntime = siddhiManager.createSiddhiAppRuntime(streams + query);
    InputHandler stockStream = siddhiAppRuntime.getInputHandler("FooStream");

    siddhiAppRuntime.start();//from w w w.  j av a  2s.  co  m

    stockStream.send(new Object[] { "WSO2", 55.6f, 100L });
    stockStream.send(new Object[] { "IBM", 57.678f, 100L });
    stockStream.send(new Object[] { "GOOGLE", 50f, 100L });
    stockStream.send(new Object[] { "REDHAT", 50f, 100L });
    Thread.sleep(100);

    ArrayList<String> msgs = new ArrayList<>();
    msgs.add("{\"event\":{\"symbol\":\"apache\",\"price\":80.0,\"volume\":100}}");
    msgs.add("{\"event\":{\"symbol\":\"WSO2\",\"price\":55.6,\"volume\":100}}");
    msgs.add("{\"event\":{\"symbol\":\"IBM\",\"price\":57.678,\"volume\":100}}");
    msgs.add("{\"event\":{\"symbol\":\"GOOGLE\",\"price\":50.0,\"volume\":100}}");
    msgs.add("{\"event\":{\"symbol\":\"REDHAT\",\"price\":50.0,\"volume\":100}}");

    File file = new File(dirUri + "/apache.json");
    try {
        BufferedReader bufferedReader = new BufferedReader(new FileReader(file));
        String line = null;
        while ((line = bufferedReader.readLine()) != null) {
            if (msgs.contains(line)) {
                count.incrementAndGet();
            } else {
                AssertJUnit.fail("Message " + line + " is not supposed to be written.");
            }
        }
    } catch (FileNotFoundException e) {
        AssertJUnit.fail(e.getMessage());
    } catch (IOException e) {
        AssertJUnit.fail("Error occurred during reading the file '" + file.getAbsolutePath());
    }

    AssertJUnit.assertEquals(5, count.intValue());

    Thread.sleep(1000);
    siddhiAppRuntime.shutdown();
}

From source file:blue.automation.AutomationManager.java

private void removeParameters(ArrayList<String> paramIds) {
    if (paramIds == null || paramIds.size() == 0) {
        return;/*from   w ww .j a  va 2 s  .  c  o  m*/
    }

    for (int i = 0; i < score.getLayerGroupCount(); i++) {
        LayerGroup layerGroup = score.getLayerGroup(i);

        if (layerGroup instanceof PolyObject) {
            PolyObject pObj = (PolyObject) layerGroup;

            for (int j = 0; j < pObj.getSize(); j++) {
                SoundLayer layer = (SoundLayer) pObj.getLayerAt(j);

                ParameterIdList automationParameters = layer.getAutomationParameters();

                for (int k = automationParameters.size() - 1; k >= 0; k--) {
                    String paramId = automationParameters.getParameterId(k);

                    if (paramIds.contains(paramId)) {
                        automationParameters.removeParameterId(k);
                    }
                }
            }
        }
    }
}

From source file:edu.uga.cs.fluxbuster.features.FeatureCalculator.java

/**
 * Gets run dates previous to a specific date within a window
 * of days from that date./*from   w  w  w.  j a  v a 2s.com*/
 *
 * @param log_date the run date
 * @param window the number of days previous to the current date
 * @return the list of previous run dates
 * @throws SQLException if there is an error retrieving the previous
 *       run dates
 */
public ArrayList<Date> getPrevDates(Date log_date, int window) throws SQLException {
    ArrayList<Date> prevDates = new ArrayList<Date>();
    if (prevDateBufDate != null && prevDateBuf != null && prevDateBufDate.equals(log_date)
            && prevDateBufWindow >= window) {

        //pull the dates within the day window from the prevDateBuf cache
        Date pd = null;
        int windowcount = 0;
        for (Date d : prevDateBuf) {
            if (windowcount >= window) {
                break;
            }
            if (pd == null) {
                pd = d;
                windowcount++;
            } else {
                DateTime morerecent = new DateTime(d.getTime());
                DateTime lessrecent = new DateTime(pd.getTime());
                Days days = Days.daysBetween(morerecent, lessrecent);
                windowcount += days.getDays();
                pd = d;
            }
            prevDates.add(d);
        }

    } else {
        String domainsprefix = properties.getProperty(DOMAINSPREFIXKEY);
        String resipsprefix = properties.getProperty(RESIPSPREFIXKEY);

        ArrayList<String> tablenames = new ArrayList<String>();
        ResultSet rs1 = null;
        try {
            rs1 = dbi.executeQueryWithResult(properties.getProperty(TABLES_QUERY1KEY));
            while (rs1.next()) {
                tablenames.add(rs1.getString(1));
            }
        } catch (Exception e) {
            if (log.isErrorEnabled()) {
                log.error(e);
            }
        } finally {
            if (rs1 != null && !rs1.isClosed()) {
                rs1.close();
            }
        }

        GregorianCalendar cal = new GregorianCalendar();
        cal.setTime(log_date);
        for (int i = 0; i < window; i++) {
            cal.roll(Calendar.DAY_OF_YEAR, false);
            Date temp = cal.getTime();
            String datestr = df.format(temp);
            if (tablenames.contains(domainsprefix + "_" + datestr)
                    && tablenames.contains(resipsprefix + "_" + datestr)) {
                prevDates.add(temp);
            }
        }

        //cache the values for later
        if (prevDateBuf == null) {
            prevDateBuf = new ArrayList<Date>();
        } else {
            prevDateBuf.clear();
        }
        prevDateBuf.addAll(prevDates);
        prevDateBufDate = log_date;
        prevDateBufWindow = window;
    }
    return prevDates;
}

From source file:GUI.ReadFile.java

public boolean readTrace(String fileName) {
    FileReader fileReader;//from w w w  . j av  a  2 s.co m
    CSVParser csvFileParser;
    boolean isSuccess = true;
    CSVFormat csvFileFormat = CSVFormat.DEFAULT.withHeader(TRACE_HEADER_MAPPING);

    try {
        ArrayList<String> Activity_set = new ArrayList<String>();
        HashSet<String> ID_set = new HashSet<String>();
        traces = new ArrayList<Trace>();
        //initialize FileReader object
        System.out.println(fileName);
        fileReader = new FileReader(fileName);

        //initialize CSVParser object
        csvFileParser = new CSVParser(fileReader, csvFileFormat);
        //Get a list of CSV file records
        List<CSVRecord> csvRecords = csvFileParser.getRecords();
        Trace t = new Trace("");
        //Read the CSV file records starting from the second record to skip the header
        for (int i = 1; i < csvRecords.size(); i++) {
            CSVRecord record = csvRecords.get(i);
            String ID = record.get(CaseID);
            if (!ID_set.contains(ID) || (i == csvRecords.size() - 1)) {
                //Discard void trace
                if (i != 1) {
                    traces.add(t);
                }
                ID_set.add(ID);
                t = new Trace(ID);
            }
            Activity ac = new Activity(record.get(Activity), record.get(StartTime), record.get(CompleteTime),
                    record.get(Timestamp));
            t.add_activity(ac);

            if (!Activity_set.contains(ac.get_name())) {
                Activity_set.add(ac.get_name());
            }
        }
        //sort activity set by string
        Collections.sort(Activity_set);

        //sort trace by ID
        Collections.sort(traces, new Comparator<Trace>() {
            @Override
            public int compare(Trace t1, Trace t2) {
                return Integer.parseInt(t1.get_ID()) < Integer.parseInt(t2.get_ID()) ? -1 : 1;
            }
        });
        //Set activity set for each trace
        for (Trace T : traces) {
            T.set_ActivitySet((List<String>) Activity_set.clone());
        }

    } catch (Exception e) {
        System.out.println("Error in CsvFileReader !!!");
        e.printStackTrace();
        isSuccess = false;
        return isSuccess;
    }
    if (isSuccess) {
        try {
            fileReader.close();
            csvFileParser.close();
        } catch (IOException e) {
            System.out.println("Error while closing fileReader/csvFileParser !!!");
        }
    }
    return isSuccess;
}