Example usage for java.util HashSet contains

List of usage examples for java.util HashSet contains

Introduction

In this page you can find the example usage for java.util HashSet contains.

Prototype

public boolean contains(Object o) 

Source Link

Document

Returns true if this set contains the specified element.

Usage

From source file:com.antsdb.saltedfish.server.mysql.replication.MysqlSlave.java

private Parameters toParameters(TableMeta meta, Row row) {
    List<Column> columns = row.getColumns();
    Object[] pureValues;// w w  w  .j  av a2s.c om

    PrimaryKeyMeta keyMeta = meta.getPrimaryKey();
    if (keyMeta != null) {
        List<ColumnMeta> primaryKeys = keyMeta.getColumns(meta);
        HashSet<Integer> pkNum = new HashSet<>();
        for (ColumnMeta key : primaryKeys) {
            pkNum.add(key.getColumnId());
        }

        pureValues = new Object[pkNum.size()];
        for (int i = 0; i < columns.size(); i++) {
            // col id starts with 1
            if (pkNum.contains(i + 1)) {
                pureValues[i] = toParameter(columns.get(i));
            }
        }
    } else {
        pureValues = new Object[columns.size()];
        for (int i = 0; i < columns.size(); i++) {
            pureValues[i] = toParameter(columns.get(i));
        }
    }
    return new Parameters(pureValues);
}

From source file:com.pinterest.arcee.autoscaling.AwsAutoScalingManager.java

@Override
public ASGStatus getAutoScalingGroupStatus(String groupName) throws Exception {
    AutoScalingGroup group = getAutoScalingGroup(groupName);
    if (group == null) {
        return ASGStatus.UNKNOWN;
    }//from w  w w.j a v a 2 s . co  m

    List<SuspendedProcess> suspendedProcesses = group.getSuspendedProcesses();
    HashSet<String> processName = new HashSet<>();
    for (SuspendedProcess process : suspendedProcesses) {
        processName.add(process.getProcessName());
    }
    if (processName.contains(PROCESS_ALARMNOTIFICATION) && processName.contains(PROCESS_SCHEDULEDACTIONS)) {
        return ASGStatus.DISABLED;
    } else {
        return ASGStatus.ENABLED;
    }
}

From source file:com.multimedia.service.wallpaper.WallpaperServiceImpl.java

/**
  * get random wallpapers from given with given
 * @param id_pages/*from   w ww.j a va  2s . c o m*/
 * @return
 */
@Override
public List<Wallpaper> getMainImages(List<Long> id_pages, int count) {
    Object[][] values = new Object[][] { null, new Object[] { Boolean.TRUE } };

    Long[] id_pages_a = new Long[id_pages.size()];
    values[0] = id_pages.toArray(id_pages_a);
    int size = dao.getRowCount(WALLPAPERS_WHERE, values).intValue();

    Random r = new Random();
    List<Wallpaper> temp_wallpaper;
    HashSet<Integer> generated = new HashSet<Integer>(count + 1);
    //generating list of uniq values from 0 to count
    if (size > count) {
        List<Wallpaper> rez = new LinkedList<Wallpaper>();
        for (int i = 0; i < count; i++) {
            Integer num = r.nextInt(size);
            while (generated.contains(num)) {
                num = r.nextInt(size);
            }
            generated.add(num);
            temp_wallpaper = dao.getByPropertiesValuesPortionOrdered(null, null, WALLPAPERS_WHERE, values, num,
                    1, null, null);
            rez.add(temp_wallpaper.get(0));
        }
        return rez;
    } else {
        return dao.getByPropertiesValuesPortionOrdered(null, null, WALLPAPERS_WHERE, values, 0, -1, null, null);
    }
}

From source file:io.pravega.segmentstore.server.containers.StreamSegmentMapperTests.java

private void setupStorageCreateHandler(TestContext context, HashSet<String> storageSegments) {
    context.storage.createHandler = segmentName -> {
        synchronized (storageSegments) {
            if (storageSegments.contains(segmentName)) {
                return FutureHelpers.failedFuture(new StreamSegmentExistsException(segmentName));
            } else {
                storageSegments.add(segmentName);
                return CompletableFuture.completedFuture(
                        new StreamSegmentInformation(segmentName, 0, false, false, new ImmutableDate()));
            }//from w w w  . j  a v a2 s .  c  o  m
        }
    };
}

From source file:net.sf.maltcms.chromaui.annotations.PeakAnnotationRenderer.java

/**
 *
 * @param container//www.  jav a  2s .  c  o  m
 * @param dataset
 * @return
 */
public List<VisualPeakAnnotation> generatePeak2DShapes(Peak1DContainer container,
        ADataset2D<IChromatogram2D, IScan2D> dataset, HashSet<UUID> non2DPeaks) {
    List<VisualPeakAnnotation> l = new ArrayList<>();
    if (dataset == null) {
        return l;
    }
    IChromatogramDescriptor chromatogram = container.getChromatogram();
    int seriesIndex = getSeriesIndex(dataset, chromatogram);
    if (seriesIndex != -1) {
        for (IPeakAnnotationDescriptor peakDescr : container.getMembers()) {
            if (peakDescr != null) {
                if (!non2DPeaks.contains(peakDescr.getId())) {
                    IPeak2DAnnotationDescriptor peak2D = (IPeak2DAnnotationDescriptor) peakDescr;
                    generatePeakShape(chromatogram, peak2D, dataset, seriesIndex, l);
                }
            }
        }
    } else {
        Logger.getLogger(getClass().getName()).log(Level.WARNING,
                "Could not find match for chromatogram {0} in dataset!", chromatogram.getName());
    }
    return l;
}

From source file:gov.nih.nci.evs.browser.utils.ViewInHierarchyUtils.java

public List<LexEvsTreeNode> getChildren(String codingScheme, String version, String parent_code,
        boolean from_root) {
    // root: input parent_code = "@" or "@@";
    List<LexEvsTreeNode> list = new ArrayList();
    CodingSchemeVersionOrTag versionOrTag = new CodingSchemeVersionOrTag();
    if (version != null)
        versionOrTag.setVersion(version);
    TreeService treeService = TreeServiceFactory.getInstance()
            .getTreeService(RemoteServerUtil.createLexBIGService());

    LexEvsTree lexEvsTree = treeService.getTree(codingScheme, versionOrTag, parent_code);
    LexEvsTreeNode parent_node = null;/*w  w w .ja  va2 s  .c o  m*/
    if (!from_root) {
        parent_node = lexEvsTree.findNodeInTree(parent_code);
    } else {
        parent_node = lexEvsTree.findNodeInTree("@@");
        if (parent_node == null) {
            parent_node = lexEvsTree.findNodeInTree("@");
        }
    }
    if (parent_node == null) {
        return null;
    }

    LexEvsTreeNode.ExpandableStatus parent_node_status = parent_node.getExpandableStatus();
    if (parent_node_status == LexEvsTreeNode.ExpandableStatus.IS_EXPANDABLE) {
        ChildTreeNodeIterator itr = parent_node.getChildIterator();

        try {
            HashSet hset = new HashSet();
            int lcv = 0;

            while (itr.hasNext()) {
                LexEvsTreeNode child = itr.next();
                lcv++;
                if (child != null) {
                    String child_code = child.getCode();
                    if (!hset.contains(child_code)) {
                        hset.add(child_code);
                        list.add(child);
                    } else {
                        break;
                    }
                } else {
                    break;
                }
            }
        } catch (Exception ex) {
            //ex.printStackTrace();
            _logger.debug("WARNING: ChildTreeNodeIterator exception...");
        }
    }
    return list;
}

From source file:com.pinterest.arcee.autoscaling.AwsAutoScalingManager.java

@Override
public AutoScalingGroupBean getAutoScalingGroupInfoByName(String groupName) throws Exception {
    AutoScalingGroupBean asgInfo = generateDefaultASGInfo();
    AutoScalingGroup asgroup = getAutoScalingGroup(groupName);
    if (asgroup == null) {
        return asgInfo;
    }/* w  w w.  j av a2 s .  c  o m*/
    // set autoscaling group status
    List<SuspendedProcess> suspendedProcesses = asgroup.getSuspendedProcesses();
    HashSet<String> processName = new HashSet<>();
    for (SuspendedProcess process : suspendedProcesses) {
        processName.add(process.getProcessName());
    }
    if (processName.contains(PROCESS_ALARMNOTIFICATION) && processName.contains(PROCESS_SCHEDULEDACTIONS)) {
        asgInfo.setStatus(ASGStatus.DISABLED);
    } else {
        asgInfo.setStatus(ASGStatus.ENABLED);
    }

    asgInfo.setMinSize(asgroup.getMinSize());
    asgInfo.setMaxSize(asgroup.getMaxSize());
    asgInfo.setDesiredCapacity(asgroup.getDesiredCapacity());
    // TODO this is dangerous that we are using the same value of TerminationPolicy
    String policy = asgroup.getTerminationPolicies().isEmpty() ? "Default"
            : new String(asgroup.getTerminationPolicies().get(0).getBytes());
    asgInfo.setTerminationPolicy(
            AutoScalingTerminationPolicy.valueOf(AutoScalingTerminationPolicy.class, policy));

    List<Instance> instances = asgroup.getInstances();
    for (Instance instance : instances) {
        if (instance.getInstanceId() != null) {
            asgInfo.addToInstances(instance.getInstanceId());
        }
    }
    return asgInfo;
}

From source file:com.vmware.bdd.service.resmgmt.impl.ResourceService.java

@Override
public List<VcCluster> getAvailableClusters() throws VcProviderException {
    final HashSet<String> clusterNameFromRps = new HashSet<>(rpDao.findAllClusterName());

    return VcContext.inVcSessionDo(new VcSession<List<VcCluster>>() {

        @Override//  ww w  . ja v  a 2s.co m
        protected List<VcCluster> body() throws Exception {
            List<VcCluster> results = new ArrayList<>();

            for (VcCluster vcCluster : VcResourceUtils.getClusters()) {
                if (clusterNameFromRps.contains(vcCluster.getName())) {
                    results.add(vcCluster);
                }
            }

            return results;
        }

    });
}

From source file:GUI.ReadFile.java

public boolean readTrace(String fileName) {
    FileReader fileReader;//ww w  .ja v  a 2 s  .c  o  m
    CSVParser csvFileParser;
    boolean isSuccess = true;
    CSVFormat csvFileFormat = CSVFormat.DEFAULT.withHeader(TRACE_HEADER_MAPPING);

    try {
        ArrayList<String> Activity_set = new ArrayList<String>();
        HashSet<String> ID_set = new HashSet<String>();
        traces = new ArrayList<Trace>();
        //initialize FileReader object
        System.out.println(fileName);
        fileReader = new FileReader(fileName);

        //initialize CSVParser object
        csvFileParser = new CSVParser(fileReader, csvFileFormat);
        //Get a list of CSV file records
        List<CSVRecord> csvRecords = csvFileParser.getRecords();
        Trace t = new Trace("");
        //Read the CSV file records starting from the second record to skip the header
        for (int i = 1; i < csvRecords.size(); i++) {
            CSVRecord record = csvRecords.get(i);
            String ID = record.get(CaseID);
            if (!ID_set.contains(ID) || (i == csvRecords.size() - 1)) {
                //Discard void trace
                if (i != 1) {
                    traces.add(t);
                }
                ID_set.add(ID);
                t = new Trace(ID);
            }
            Activity ac = new Activity(record.get(Activity), record.get(StartTime), record.get(CompleteTime),
                    record.get(Timestamp));
            t.add_activity(ac);

            if (!Activity_set.contains(ac.get_name())) {
                Activity_set.add(ac.get_name());
            }
        }
        //sort activity set by string
        Collections.sort(Activity_set);

        //sort trace by ID
        Collections.sort(traces, new Comparator<Trace>() {
            @Override
            public int compare(Trace t1, Trace t2) {
                return Integer.parseInt(t1.get_ID()) < Integer.parseInt(t2.get_ID()) ? -1 : 1;
            }
        });
        //Set activity set for each trace
        for (Trace T : traces) {
            T.set_ActivitySet((List<String>) Activity_set.clone());
        }

    } catch (Exception e) {
        System.out.println("Error in CsvFileReader !!!");
        e.printStackTrace();
        isSuccess = false;
        return isSuccess;
    }
    if (isSuccess) {
        try {
            fileReader.close();
            csvFileParser.close();
        } catch (IOException e) {
            System.out.println("Error while closing fileReader/csvFileParser !!!");
        }
    }
    return isSuccess;
}