Example usage for java.util Collections synchronizedSet

List of usage examples for java.util Collections synchronizedSet

Introduction

In this page you can find the example usage for java.util Collections synchronizedSet.

Prototype

public static <T> Set<T> synchronizedSet(Set<T> s) 

Source Link

Document

Returns a synchronized (thread-safe) set backed by the specified set.

Usage

From source file:com.smartitengineering.cms.spi.impl.content.VelocityGeneratorTest.java

@Test
public void testContinuousVelocityRepGeneration() throws IOException {
    TypeRepresentationGenerator generator = new VelocityRepresentationGenerator();
    final RepresentationTemplate template = mockery.mock(RepresentationTemplate.class);
    WorkspaceAPIImpl impl = new WorkspaceAPIImpl() {

        @Override//from   w  w  w .j  a va  2 s  .  com
        public RepresentationTemplate getRepresentationTemplate(WorkspaceId id, String name) {
            return template;
        }
    };
    impl.setRepresentationGenerators(Collections.singletonMap(TemplateType.VELOCITY, generator));
    final RepresentationProvider provider = new RepresentationProviderImpl();
    final WorkspaceAPI api = impl;
    registerBeanFactory(api);
    final Content content = mockery.mock(Content.class);
    final Field field = mockery.mock(Field.class);
    final FieldValue value = mockery.mock(FieldValue.class);
    final Map<String, Field> fieldMap = mockery.mock(Map.class);
    final ContentType type = mockery.mock(ContentType.class);
    final Map<String, RepresentationDef> reps = mockery.mock(Map.class, "repMap");
    final RepresentationDef def = mockery.mock(RepresentationDef.class);
    final int threadCount = new Random().nextInt(100);
    logger.info("Number of parallel threads " + threadCount);
    mockery.checking(new Expectations() {

        {
            exactly(threadCount).of(template).getTemplateType();
            will(returnValue(TemplateType.VELOCITY));
            exactly(threadCount).of(template).getTemplate();
            final byte[] toByteArray = IOUtils.toByteArray(
                    getClass().getClassLoader().getResourceAsStream("scripts/velocity/test-template.vm"));
            will(returnValue(toByteArray));
            exactly(threadCount).of(template).getName();
            will(returnValue(REP_NAME));
            for (int i = 0; i < threadCount; ++i) {
                exactly(1).of(value).getValue();
                will(returnValue(String.valueOf(Integer.MAX_VALUE)));
            }
            exactly(threadCount).of(field).getValue();
            will(returnValue(value));
            exactly(threadCount).of(fieldMap).get(with(Expectations.<String>anything()));
            will(returnValue(field));
            exactly(threadCount).of(content).getFields();
            will(returnValue(fieldMap));
            exactly(threadCount).of(content).getContentDefinition();
            will(returnValue(type));
            final ContentId contentId = mockery.mock(ContentId.class);
            exactly(2 * threadCount).of(content).getContentId();
            will(returnValue(contentId));
            final WorkspaceId wId = mockery.mock(WorkspaceId.class);
            exactly(threadCount).of(contentId).getWorkspaceId();
            will(returnValue(wId));
            exactly(2 * threadCount).of(type).getRepresentationDefs();
            will(returnValue(reps));
            exactly(2 * threadCount).of(reps).get(with(REP_NAME));
            will(returnValue(def));
            exactly(threadCount).of(def).getParameters();
            will(returnValue(Collections.emptyMap()));
            exactly(threadCount).of(def).getMIMEType();
            will(returnValue(GroovyGeneratorTest.MIME_TYPE));
            final ResourceUri rUri = mockery.mock(ResourceUri.class);
            exactly(threadCount).of(def).getResourceUri();
            will(returnValue(rUri));
            exactly(threadCount).of(rUri).getValue();
            will(returnValue("iUri"));
        }
    });
    final Set<String> set = Collections.synchronizedSet(new LinkedHashSet<String>(threadCount));
    final List<String> list = Collections.synchronizedList(new ArrayList<String>(threadCount));
    final AtomicInteger integer = new AtomicInteger(0);
    Threads group = new Threads();
    for (int i = 0; i < threadCount; ++i) {
        group.addThread(new Thread(new Runnable() {

            public void run() {
                Representation representation = provider.getRepresentation(REP_NAME, type, content);
                Assert.assertNotNull(representation);
                Assert.assertEquals(REP_NAME, representation.getName());
                final String rep = StringUtils.newStringUtf8(representation.getRepresentation());
                list.add(rep);
                set.add(rep);
                Assert.assertEquals(GroovyGeneratorTest.MIME_TYPE, representation.getMimeType());
                integer.addAndGet(1);
            }
        }));
    }
    group.start();
    try {
        group.join();
    } catch (Exception ex) {
        logger.error(ex.getMessage(), ex);
    }
    logger.info("Generated reps list: " + list);
    logger.info("Generated reps set: " + set);
    Assert.assertEquals(threadCount, integer.get());
    Assert.assertEquals(threadCount, list.size());
    Assert.assertEquals(1, set.size());
}

From source file:HSqlManager.java

public static void uniqueDB(Connection connection, int bps) throws ClassNotFoundException, SQLException,
        InstantiationException, IllegalAccessException, IOException {
    DpalLoad.main(new String[1]);
    HSqlPrimerDesign.Dpal_Inst = DpalLoad.INSTANCE_WIN64;
    String base = new File("").getAbsolutePath();
    if (!written) {
        CSV.makeDirectory(new File(base + "/PhageData"));
        INSTANCE.readFileAll(INSTANCE.path).stream().forEach(x -> {
            try {
                CSV.writeDataCSV(x[1], Fasta.process(x[1], bps), bps);
            } catch (IOException e) {
                e.printStackTrace();/* ww  w .j av  a  2 s.c  om*/
            }
        });
    }
    Connection db = connection;
    db.setAutoCommit(false);
    Statement stat = db.createStatement();
    PrintWriter log = new PrintWriter(new File("javalog.log"));
    stat.execute("SET FILES LOG FALSE;\n");
    PreparedStatement st = db
            .prepareStatement("UPDATE Primerdb.Primers" + " SET UniqueP = true, Tm = ?, GC =?, Hairpin =?"
                    + "WHERE Cluster = ? and Strain = ? and " + "Sequence = ? and Bp = ?");
    ResultSet call = stat.executeQuery("Select * From Primerdb.Phages;");
    List<String[]> phages = new ArrayList<>();
    while (call.next()) {
        String[] r = new String[3];
        r[0] = call.getString("Strain");
        r[1] = call.getString("Cluster");
        r[2] = call.getString("Name");
        phages.add(r);
    }
    phages.stream().map(x -> x[0]).collect(Collectors.toSet()).stream().forEach(x -> {
        phages.stream().filter(y -> y[0].equals(x)).map(y -> y[1]).collect(Collectors.toSet()).parallelStream()
                .forEach(z -> {
                    try {
                        Set<String> nonclustphages = phages.stream()
                                .filter(a -> a[0].equals(x) && !a[1].equals(z)).map(a -> a[2])
                                .collect(Collectors.toSet());
                        ResultSet resultSet = stat.executeQuery("Select Sequence from primerdb.primers"
                                + " where Strain ='" + x + "' and Cluster ='" + z + "' and CommonP = true"
                                + " and Bp = " + Integer.valueOf(bps) + " ");
                        Set<CharSequence> primers = Collections.synchronizedSet(new HashSet<>());
                        while (resultSet.next()) {
                            primers.add(resultSet.getString("Sequence"));
                        }
                        for (String phage : nonclustphages) {
                            CSV.readCSV(base + "/PhageData/" + Integer.toString(bps) + phage + ".csv")
                                    .parallelStream().filter(primer -> primers.contains(primer))
                                    .forEach(primers::remove);

                        }
                        int i = 0;
                        for (CharSequence a : primers) {
                            try {
                                st.setDouble(1, HSqlPrimerDesign.primerTm(a, 0, 800, 1.5, 0.2));
                                st.setDouble(2, HSqlPrimerDesign.gcContent(a));
                                st.setBoolean(3, HSqlPrimerDesign.calcHairpin((String) a, 4));
                                st.setString(4, z);
                                st.setString(5, x);
                                st.setString(6, a.toString());
                                st.setInt(7, bps);
                                st.addBatch();
                            } catch (SQLException e) {
                                e.printStackTrace();
                                System.out.println("Error occurred at " + x + " " + z);
                            }
                            i++;
                            if (i == 1000) {
                                i = 0;
                                st.executeBatch();
                                db.commit();
                            }
                        }
                        if (i > 0) {
                            st.executeBatch();
                            db.commit();
                        }
                    } catch (SQLException e) {
                        e.printStackTrace();
                        System.out.println("Error occurred at " + x + " " + z);
                    }
                    log.println(z);
                    log.flush();
                    System.gc();
                });
    });
    stat.execute("SET FILES LOG TRUE\n");
    st.close();
    stat.close();
    System.out.println("Unique Updated");
}

From source file:com.smartitengineering.cms.spi.impl.content.PythonGeneratorTest.java

@Test
public void testMultiPythonRepGeneration() throws IOException {
    TypeRepresentationGenerator generator = new PythonRepresentationGenerator();
    final RepresentationTemplate template = mockery.mock(RepresentationTemplate.class);
    WorkspaceAPIImpl impl = new WorkspaceAPIImpl() {

        @Override//from ww  w. j a v  a  2 s . c  o m
        public RepresentationTemplate getRepresentationTemplate(WorkspaceId id, String name) {
            return template;
        }
    };
    impl.setRepresentationGenerators(Collections.singletonMap(TemplateType.PYTHON, generator));
    final RepresentationProvider provider = new RepresentationProviderImpl();
    final WorkspaceAPI api = impl;
    registerBeanFactory(api);
    final Content content = mockery.mock(Content.class);
    final Field field = mockery.mock(Field.class);
    final FieldValue value = mockery.mock(FieldValue.class);
    final Map<String, Field> fieldMap = mockery.mock(Map.class);
    final ContentType type = mockery.mock(ContentType.class);
    final Map<String, RepresentationDef> reps = mockery.mock(Map.class, "repMap");
    final RepresentationDef def = mockery.mock(RepresentationDef.class);
    final int threadCount = new Random().nextInt(50) + 50;
    logger.info("Number of parallel threads " + threadCount);
    final Set<Integer> expecteds = new LinkedHashSet<Integer>();
    mockery.checking(new Expectations() {

        {
            exactly(threadCount).of(template).getTemplateType();
            will(returnValue(TemplateType.PYTHON));
            exactly(threadCount).of(template).getTemplate();
            final byte[] toByteArray = IOUtils.toByteArray(getClass().getClassLoader()
                    .getResourceAsStream("scripts/python/test-multi-rep-gen-script.py"));
            will(returnValue(toByteArray));
            exactly(threadCount).of(template).getName();
            will(returnValue(REP_NAME));

            for (int i = 0; i < threadCount; ++i) {
                exactly(1).of(value).getValue();
                will(returnValue(String.valueOf(i)));
                expecteds.add(i);
            }
            exactly(threadCount).of(field).getValue();
            will(returnValue(value));
            exactly(threadCount).of(fieldMap).get(with(Expectations.<String>anything()));
            will(returnValue(field));
            exactly(threadCount).of(content).getFields();
            will(returnValue(fieldMap));
            exactly(threadCount).of(content).getContentDefinition();
            will(returnValue(type));
            final ContentId contentId = mockery.mock(ContentId.class);
            exactly(2 * threadCount).of(content).getContentId();
            will(returnValue(contentId));
            final WorkspaceId wId = mockery.mock(WorkspaceId.class);
            exactly(threadCount).of(contentId).getWorkspaceId();
            will(returnValue(wId));
            exactly(2 * threadCount).of(type).getRepresentationDefs();
            will(returnValue(reps));
            exactly(2 * threadCount).of(reps).get(with(REP_NAME));
            will(returnValue(def));
            exactly(threadCount).of(def).getParameters();
            will(returnValue(Collections.emptyMap()));
            exactly(threadCount).of(def).getMIMEType();
            will(returnValue(GroovyGeneratorTest.MIME_TYPE));
            final ResourceUri rUri = mockery.mock(ResourceUri.class);
            exactly(threadCount).of(def).getResourceUri();
            will(returnValue(rUri));
            exactly(threadCount).of(rUri).getValue();
            will(returnValue("iUri"));
        }
    });
    final Set<String> set = Collections.synchronizedSet(new LinkedHashSet<String>(threadCount));
    final List<String> list = Collections.synchronizedList(new ArrayList<String>(threadCount));
    final AtomicInteger integer = new AtomicInteger(0);
    Threads group = new Threads();
    for (int i = 0; i < threadCount; ++i) {
        group.addThread(new Thread(new Runnable() {

            public void run() {
                Representation representation = provider.getRepresentation(REP_NAME, type, content);
                Assert.assertNotNull(representation);
                Assert.assertEquals(REP_NAME, representation.getName());
                final String rep = StringUtils.newStringUtf8(representation.getRepresentation());
                list.add(rep);
                set.add(rep);
                Assert.assertEquals(GroovyGeneratorTest.MIME_TYPE, representation.getMimeType());
                integer.addAndGet(1);
            }
        }));
    }
    long start = System.currentTimeMillis();
    group.start();
    try {
        group.join();
    } catch (Exception ex) {
        logger.error(ex.getMessage(), ex);
    }
    long end = System.currentTimeMillis();
    logger.info("For generating " + threadCount + " simple representation it took " + (end - start) + "ms");
    logger.info("Generated reps list: " + list);
    logger.info("Generated reps set: " + set);
    Assert.assertEquals(threadCount, expecteds.size());
    Assert.assertEquals(threadCount, integer.get());
    Assert.assertEquals(threadCount, list.size());
    Assert.assertEquals(threadCount, set.size());
}

From source file:org.apache.hadoop.mapred.HFSPScheduler.java

@Override
public synchronized void start() throws IOException {
    super.start();

    this.preemptionStrategy = HFSPScheduler.loadPreemptionStrategyInstance(conf);

    // lookup utilities
    this.taskTrackers = new HashMap<String, TaskTrackerStatus>();
    this.jIDToJIP = new HashMap<JobID, JobInProgress>();
    this.jIDToMapJDI = Collections.synchronizedMap(new HashMap<JobID, JobDurationInfo>());
    this.jIDToReduceJDI = Collections.synchronizedMap(new HashMap<JobID, JobDurationInfo>());

    // job added listener
    this.jobInProgressListener = new HFSPJIPListener(this);
    this.taskTrackerManager.addJobInProgressListener(jobInProgressListener);

    // job initializer
    // if (!this.mockMode)
    this.eagerTaskInitializationListener.setTaskTrackerManager(this.taskTrackerManager);
    this.eagerTaskInitializationListener.start();
    this.taskTrackerManager.addJobInProgressListener(this.eagerTaskInitializationListener);
    // }/* w  ww.  j a  va2 s .  c om*/

    // Training queues
    this.trainingMapJobs = Collections.synchronizedSet(new HashSet<JobInProgress>());
    this.trainingReduceJobs = Collections.synchronizedSet(new HashSet<JobInProgress>());

    // Size Based queues
    this.sizeBasedMapJobsQueue = new TreeMap<JobDurationInfo, JobInProgress>(
            HFSPScheduler.JOB_DURATION_COMPARATOR);
    this.sizeBasedReduceJobsQueue = new TreeMap<JobDurationInfo, JobInProgress>(
            HFSPScheduler.JOB_DURATION_COMPARATOR);

    // Simulator
    this.cluster = new VirtualCluster<TaskDurationInfo>(this.getMaxTasks(TaskType.MAP),
            this.getMaxTasks(TaskType.REDUCE));
    this.scheduler = new MaxMinFSScheduler<JobDurationInfo, TaskDurationInfo>();
    this.progressManager = new ProgressManager(this.cluster, this.scheduler);

    // Trainer
    // try {
    // this.trainer = new BrokerTrainer(this, conf, this.clock);
    // } catch (Exception e) {
    // throw new RuntimeException();
    // }

    // Update thread
    // this.updateInterval = conf.getLong(UPDATE_INTERVAL_KEYNAME, 5000);
    this.updateThread = new UpdateThread(this);
    // if (!this.mockMode) {
    // this.updateThread.start();
    // } else {
    // this.delayEnabled = false; // problem with delay enabled in mock mode
    // }

    // localityDelay = conf.getLong("mapred.fairscheduler.locality.delay", -1);
    if (localityDelay == -1)
        autoComputeLocalityDelay = true; // Compute from heartbeat interval

    if (LOG.isDebugEnabled()) {
        StringBuilder builder = new StringBuilder(HFSPScheduler.class.toString());
        // this.numSlotsForMapTrain = conf.getInt(TRAIN_MAP_SLOTS_KEYNAME, 0);
        // this.numSlotsForReduceTrain = conf.getInt(TRAIN_REDUCE_SLOTS_KEYNAME,
        // 0);
        // this.numMapSlotsForJob = conf.getInt(TRAINER_MIN_MAPS_KEYNAME, 2);
        // this.numReduceSlotsForJob = conf.getInt(TRAINER_MIN_REDUCES_KEYNAME,
        // 2);
        // this.eagerPreemptionEnabled
        builder.append(" initialized");
        if (this.mockMode) {
            builder.append(" in mockMode");
        }
        builder.append(" with configuration:").append("\t").append("update interval: ")
                .append(this.updateInterval).append("\t").append("eager preemption: ")
                .append(this.preemptionStrategy).append("\t").append("delay enabled: ")
                .append(this.delayEnabled).append("\t").append("num slots for train map: ")
                .append(this.numSlotsForMapTrain).append("\t").append("num slots for train reduce: ")
                .append(this.numSlotsForReduceTrain).append("\t").append("min map tasks for train: ")
                .append(this.numMapTrainSlotsForJob).append("\t").append("min reduce tasks for train: ")
                .append(this.numReduceTrainSlotsForJob).append("\t").append("initial map task duration: ")
                .append(this.initialMapTaskDuration).append("\t").append("initial reduce task duration: ")
                .append(this.initialReduceTaskDuration).append("\t").append("duration modifier map: ")
                .append(this.durationModifierMap).append("\t").append("duration modifier reduce: ")
                .append(this.durationModifierReduce).append("\t").append(" . Forcing the first update");

        LOG.debug(builder.toString());
    } else {
        LOG.info(HFSPScheduler.class + " initialized, forcing the first update");
    }

    // if (!this.mockMode)
    this.update();
}

From source file:de.javakaffee.kryoserializers.KryoTest.java

@DataProvider
public Object[][] synchronizedCollections() {
    final HashMap<String, String> m = new HashMap<String, String>();
    m.put("foo", "bar");
    return new Object[][] {
            { Collections.synchronizedList(new ArrayList<String>(Arrays.asList("foo", "bar"))) },
            { Collections.synchronizedSet(new HashSet<String>(Arrays.asList("foo", "bar"))) },
            { Collections.synchronizedMap(m) }, };
}

From source file:com.xerox.amazonws.simpledb.Domain.java

/**
 * Performs a query against this domain. A set of items is returned which may not be
 * complete. If the nextToken in the result is set, this method can be called again
 * with the same query and the supplied nextToken.
 *
 * @param selectExpression the query to be run
 * @param nextToken an optional token to get more results
 * @param consistent if true, consistency is assured
 * @throws SDBException wraps checked exceptions
 *///from  w  w w . j  a va  2s . co m
public SDBListResult<Item> selectItems(String selectExpression, String nextToken, boolean consistent)
        throws SDBException {
    Map<String, String> params = new HashMap<String, String>();
    params.put("SelectExpression", selectExpression);
    if (nextToken != null) {
        params.put("NextToken", nextToken);
    }
    if (consistent) {
        params.put("ConsistentRead", "true");
    }
    HttpGet method = new HttpGet();
    SelectResponse response = makeRequestInt(method, "Select", params, SelectResponse.class);
    SDBListResult<Item> ret = new SDBListResult<Item>(response.getSelectResult().getNextToken(),
            response.getResponseMetadata().getRequestId(), response.getResponseMetadata().getBoxUsage());
    List<Item> results = ret.getItems();
    for (com.xerox.amazonws.typica.sdb.jaxb.Item i : response.getSelectResult().getItems()) {
        String iName = i.getName().getValue();
        String encoding = i.getName().getEncoding();
        if (encoding != null && encoding.equals("base64")) {
            iName = new String(Base64.decodeBase64(iName.getBytes()));
        }
        Item newItem = new ItemVO(iName);
        Map<String, Set<String>> attrs = newItem.getAttributes();
        for (Attribute a : i.getAttributes()) {
            String name = a.getName().getValue();
            encoding = a.getName().getEncoding();
            if (encoding != null && encoding.equals("base64")) {
                name = new String(Base64.decodeBase64(name.getBytes()));
            }
            String value = a.getValue().getValue();
            encoding = a.getValue().getEncoding();
            if (encoding != null && encoding.equals("base64")) {
                value = new String(Base64.decodeBase64(value.getBytes()));
            }
            Set<String> vals = attrs.get(name);
            if (vals == null) {
                vals = Collections.synchronizedSet(new HashSet<String>());
                attrs.put(name, vals);
            }
            vals.add(value);
        }
        results.add(newItem);
    }

    return ret;
}

From source file:org.kuali.rice.krad.uif.lifecycle.ViewLifecycle.java

/**
 * Gets the set of visited IDs for use during the apply model phase.
 * /*from   ww  w . ja v a2 s.  com*/
 * @return The set of visited IDs for use during the apply model phase.
 */
public static Set<String> getVisitedIds() {
    ViewLifecycle active = getActiveLifecycle();

    if (active.visitedIds == null) {
        synchronized (active) {
            if (active.visitedIds == null) {
                active.visitedIds = Collections.synchronizedSet(new HashSet<String>());
            }
        }
    }

    return active.visitedIds;
}

From source file:org.schedulesdirect.grabber.Grabber.java

private void updateZip(NetworkEpgClient clnt) throws IOException, JSONException, JsonParseException {
    Set<String> completedListings = new HashSet<String>();
    LOG.debug(String.format("Using %d worker threads", globalOpts.getMaxThreads()));
    pool = createThreadPoolExecutor();/*from   w w  w . jav a  2s . com*/
    start = System.currentTimeMillis();
    File dest = grabOpts.getTarget();
    cachedSeriesIds = new HashSet<String>();
    boolean rmDest = false;
    if (dest.exists()) {
        ZipEpgClient zipClnt = null;
        try {
            zipClnt = new ZipEpgClient(dest);
            if (!zipClnt.getUserStatus().getLastServerRefresh()
                    .before(clnt.getUserStatus().getLastServerRefresh())) {
                LOG.info(
                        "Current cache file contains latest data from Schedules Direct server; use --force-download to force a new download from server.");
                boolean force = grabOpts.isForce();
                if (!force)
                    return;
                else
                    LOG.warn("Forcing an update of data with the server due to user request!");
            }
        } catch (Exception e) {
            if (grabOpts.isKeep()) {
                LOG.error("Existing cache is invalid, keeping by user request!", e);
                return;
            } else {
                LOG.warn("Existing cache is invalid, deleting it; use --keep-bad-cache to keep existing cache!",
                        e);
                rmDest = true;
            }
        } finally {
            if (zipClnt != null)
                try {
                    zipClnt.close();
                } catch (IOException e) {
                }
            if (rmDest && !dest.delete())
                throw new IOException("Unable to delete " + dest);
        }
    }

    freshZip = !dest.exists();
    try (FileSystem vfs = FileSystems.newFileSystem(new URI(String.format("jar:%s", dest.toURI())),
            Collections.singletonMap("create", "true"))) {
        if (freshZip) {
            Path target = vfs.getPath(ZipEpgClient.ZIP_VER_FILE);
            Files.write(target, Integer.toString(ZipEpgClient.ZIP_VER).getBytes(ZipEpgClient.ZIP_CHARSET));
        }
        ProgramCache progCache = ProgramCache.get(vfs);
        Path lineups = vfs.getPath("lineups.txt");
        Files.deleteIfExists(lineups);
        Path scheds = vfs.getPath("/schedules/");
        if (!Files.isDirectory(scheds))
            Files.createDirectory(scheds);
        Path maps = vfs.getPath("/maps/");
        PathUtils.removeDirectory(maps);
        Files.createDirectory(maps);
        Path progs = vfs.getPath("/programs/");
        if (!Files.isDirectory(progs))
            Files.createDirectory(progs);
        Path logos = vfs.getPath("/logos/");
        if (!Files.isDirectory(logos))
            Files.createDirectory(logos);
        Path md5s = vfs.getPath("/md5s/");
        if (!Files.isDirectory(md5s))
            Files.createDirectory(md5s);
        Path cache = vfs.getPath(LOGO_CACHE);
        if (Files.exists(cache)) {
            String cacheData = new String(Files.readAllBytes(cache), ZipEpgClient.ZIP_CHARSET);
            logoCache = Config.get().getObjectMapper().readValue(cacheData, JSONObject.class);
        } else
            logoCache = new JSONObject();
        Path seriesInfo = vfs.getPath("/seriesInfo/");
        if (!Files.isDirectory(seriesInfo))
            Files.createDirectories(seriesInfo);
        loadSeriesInfoIds(seriesInfo);
        missingSeriesIds = Collections.synchronizedSet(new HashSet<String>());
        loadRetryIds(vfs.getPath(SERIES_INFO_DATA));

        JSONObject resp = Config.get().getObjectMapper().readValue(
                factory.get(DefaultJsonRequest.Action.GET, RestNouns.LINEUPS, clnt.getHash(),
                        clnt.getUserAgent(), globalOpts.getUrl().toString()).submitForJson(null),
                JSONObject.class);
        if (!JsonResponseUtils.isErrorResponse(resp))
            Files.write(lineups, resp.toString(3).getBytes(ZipEpgClient.ZIP_CHARSET));
        else
            LOG.error("Received error response when requesting lineup data!");

        for (Lineup l : clnt.getLineups()) {
            buildStationList();
            JSONObject o = Config.get().getObjectMapper()
                    .readValue(
                            factory.get(DefaultJsonRequest.Action.GET, l.getUri(), clnt.getHash(),
                                    clnt.getUserAgent(), globalOpts.getUrl().toString()).submitForJson(null),
                            JSONObject.class);
            Files.write(vfs.getPath("/maps", ZipEpgClient.scrubFileName(String.format("%s.txt", l.getId()))),
                    o.toString(3).getBytes(ZipEpgClient.ZIP_CHARSET));
            JSONArray stations = o.getJSONArray("stations");
            JSONArray ids = new JSONArray();
            for (int i = 0; i < stations.length(); ++i) {
                JSONObject obj = stations.getJSONObject(i);
                String sid = obj.getString("stationID");
                if (stationList != null && !stationList.contains(sid))
                    LOG.debug(String.format("Skipped %s; not listed in station file", sid));
                else if (completedListings.add(sid)) {
                    ids.put(sid);
                    if (!grabOpts.isNoLogos()) {
                        if (logoCacheInvalid(obj))
                            pool.execute(new LogoTask(obj, vfs, logoCache));
                        else if (LOG.isDebugEnabled())
                            LOG.debug(String.format("Skipped logo for %s; already cached!",
                                    obj.optString("callsign", null)));
                    } else if (!logosWarned) {
                        logosWarned = true;
                        LOG.warn("Logo downloads disabled by user request!");
                    }
                } else
                    LOG.debug(String.format("Skipped %s; already downloaded.", sid));
                //pool.setMaximumPoolSize(5); // Processing these new schedules takes all kinds of memory!
                if (ids.length() == grabOpts.getMaxSchedChunk()) {
                    pool.execute(new ScheduleTask(ids, vfs, clnt, progCache, factory));
                    ids = new JSONArray();
                }
            }
            if (ids.length() > 0)
                pool.execute(new ScheduleTask(ids, vfs, clnt, progCache, factory));
        }
        pool.shutdown();
        try {
            LOG.debug("Waiting for SchedLogoExecutor to terminate...");
            if (pool.awaitTermination(15, TimeUnit.MINUTES))
                LOG.debug("SchedLogoExecutor: Terminated successfully.");
            else {
                failedTask = true;
                LOG.warn(
                        "SchedLogoExecutor: Termination timed out; some tasks probably didn't finish properly!");
            }
        } catch (InterruptedException e) {
            failedTask = true;
            LOG.warn(
                    "SchedLogoExecutor: Termination interrupted); some tasks probably didn't finish properly!");
        }
        Files.write(cache, logoCache.toString(3).getBytes(ZipEpgClient.ZIP_CHARSET),
                StandardOpenOption.TRUNCATE_EXISTING, StandardOpenOption.WRITE, StandardOpenOption.CREATE);
        ScheduleTask.commit(vfs);

        pool = createThreadPoolExecutor();
        //pool.setMaximumPoolSize(5); // Again, we've got memory problems
        String[] dirtyPrograms = progCache.getDirtyIds();
        progCache.markAllClean();
        progCache = null;
        LOG.info(String.format("Identified %d program ids requiring an update!", dirtyPrograms.length));
        Collection<String> progIds = new ArrayList<String>();
        for (String progId : dirtyPrograms) {
            progIds.add(progId);
            if (progIds.size() == grabOpts.getMaxProgChunk()) {
                pool.execute(new ProgramTask(progIds, vfs, clnt, factory, missingSeriesIds, "programs", null,
                        false));
                progIds.clear();
            }
        }
        if (progIds.size() > 0)
            pool.execute(
                    new ProgramTask(progIds, vfs, clnt, factory, missingSeriesIds, "programs", null, false));
        pool.shutdown();
        try {
            LOG.debug("Waiting for ProgramExecutor to terminate...");
            if (pool.awaitTermination(15, TimeUnit.MINUTES)) {
                LOG.debug("ProgramExecutor: Terminated successfully.");
                Iterator<String> itr = missingSeriesIds.iterator();
                while (itr.hasNext()) {
                    String id = itr.next();
                    if (cachedSeriesIds.contains(id))
                        itr.remove();
                }
                if (missingSeriesIds.size() > 0) {
                    LOG.info(String.format("Grabbing %d series info programs!", missingSeriesIds.size()));
                    Set<String> retrySet = new HashSet<>();
                    try {
                        new ProgramTask(missingSeriesIds, vfs, clnt, factory, missingSeriesIds, "seriesInfo",
                                retrySet, true).run();
                    } catch (RuntimeException e) {
                        LOG.error("SeriesInfo task failed!", e);
                        Grabber.failedTask = true;
                    }
                    Path seriesInfoData = vfs.getPath(SERIES_INFO_DATA);
                    if (retrySet.size() > 0) {
                        StringBuilder sb = new StringBuilder();
                        for (String id : retrySet)
                            sb.append(id + "\n");
                        Files.write(seriesInfoData, sb.toString().getBytes(ZipEpgClient.ZIP_CHARSET),
                                StandardOpenOption.WRITE, StandardOpenOption.TRUNCATE_EXISTING,
                                StandardOpenOption.CREATE);
                    } else if (Files.exists(seriesInfoData))
                        Files.delete(seriesInfoData);
                }
            } else {
                failedTask = true;
                LOG.warn("ProgramExecutor: Termination timed out; some tasks probably didn't finish properly!");
            }
        } catch (InterruptedException e) {
            failedTask = true;
            LOG.warn("ProgramExecutor: Termination interrupted); some tasks probably didn't finish properly!");
        }

        String userData = clnt.getUserStatus().toJson();
        if (failedTask) {
            LOG.error("One or more tasks failed!  Resetting last data refresh timestamp to zero.");
            SimpleDateFormat fmt = Config.get().getDateTimeFormat();
            String exp = fmt.format(new Date(0L));
            JSONObject o = Config.get().getObjectMapper().readValue(userData, JSONObject.class);
            o.put("lastDataUpdate", exp);
            userData = o.toString(2);
        }
        Path p = vfs.getPath(USER_DATA);
        Files.write(p, userData.getBytes(ZipEpgClient.ZIP_CHARSET), StandardOpenOption.WRITE,
                StandardOpenOption.TRUNCATE_EXISTING, StandardOpenOption.CREATE);
        removeIgnoredStations(vfs);
    } catch (URISyntaxException e1) {
        throw new RuntimeException(e1);
    } finally {
        Runtime rt = Runtime.getRuntime();
        LOG.info(String.format("MemStats:%n\tFREE: %s%n\tUSED: %s%n\t MAX: %s",
                FileUtils.byteCountToDisplaySize(rt.freeMemory()),
                FileUtils.byteCountToDisplaySize(rt.totalMemory()),
                FileUtils.byteCountToDisplaySize(rt.maxMemory())));
    }
}

From source file:canreg.client.dataentry.Convert.java

public static boolean importFile(canreg.client.gui.management.CanReg4MigrationInternalFrame.MigrationTask task,
        Document doc, List<canreg.client.dataentry.Relation> map, File file, CanRegServerInterface server,
        ImportOptions io) throws SQLException, RemoteException, SecurityException, RecordLockedException {
    boolean success = false;

    Set<String> noNeedToLookAtPatientVariables = new TreeSet<String>();

    noNeedToLookAtPatientVariables/*from ww  w .jav a  2 s  .  c  o m*/
            .add(canreg.common.Tools.toLowerCaseStandardized(io.getPatientIDVariableName()));
    noNeedToLookAtPatientVariables
            .add(canreg.common.Tools.toLowerCaseStandardized(io.getPatientRecordIDVariableName()));

    String firstNameVariableName = io.getFirstNameVariableName();
    String sexVariableName = io.getSexVariableName();

    CSVParser parser = null;

    HashMap mpCodes = new HashMap();

    int numberOfLinesRead = 0;

    Map<String, Integer> nameSexTable = server.getNameSexTables();

    try {
        // Logger.getLogger(Import.class.getName()).log(Level.CONFIG, "Name of the character encoding {0}");

        int numberOfRecordsInFile = canreg.common.Tools.numberOfLinesInFile(file.getAbsolutePath());

        debugOut("Importing data from " + file);

        CSVFormat format = CSVFormat.DEFAULT.withFirstRecordAsHeader().withDelimiter(io.getSeparator());

        parser = CSVParser.parse(file, io.getFileCharset(), format);

        int linesToRead = io.getMaxLines();
        if (linesToRead == -1 || linesToRead > numberOfRecordsInFile) {
            linesToRead = numberOfRecordsInFile;
        }

        for (CSVRecord csvRecord : parser) {
            numberOfLinesRead++;
            // We allow for null tasks...
            boolean needToSavePatientAgain = true;
            int patientDatabaseRecordID = -1;

            if (task != null) {
                if (canreg.client.gui.management.CanReg4MigrationInternalFrame.isPaused) {
                    task.firePropertyChange("paused", false, true);
                }
                if (!canreg.client.gui.management.CanReg4MigrationInternalFrame.isPaused) {
                    task.firePropertyChange("paused", true, false);
                    task.firePropertyChange("progress", (numberOfLinesRead - 1) * 100 / linesToRead,
                            (numberOfLinesRead) * 100 / linesToRead);
                }
            }

            // Build patient part
            Patient patient = new Patient();
            for (int i = 0; i < map.size(); i++) {
                Relation rel = map.get(i);
                if (rel.getDatabaseTableVariableID() >= 0
                        && rel.getDatabaseTableName().equalsIgnoreCase("patient")) {
                    if (rel.getFileColumnNumber() < csvRecord.size()) {
                        if (rel.getVariableType().equalsIgnoreCase("Number")) {
                            if (csvRecord.get(rel.getFileColumnNumber()).length() > 0) {
                                try {
                                    patient.setVariable(rel.getDatabaseVariableName(),
                                            Integer.parseInt(csvRecord.get(rel.getFileColumnNumber())));
                                } catch (NumberFormatException ex) {
                                    Logger.getLogger(Import.class.getName()).log(Level.SEVERE,
                                            "Number format error in line: " + (numberOfLinesRead + 1 + 1)
                                                    + ". ",
                                            ex);
                                    success = false;
                                }
                            }
                        } else {
                            patient.setVariable(rel.getDatabaseVariableName(),
                                    StringEscapeUtils.unescapeCsv(csvRecord.get(rel.getFileColumnNumber())));
                        }
                    } else {
                        Logger.getLogger(Import.class.getName()).log(Level.INFO,
                                "Something wrong with patient part of line " + numberOfLinesRead + ".",
                                new Exception("Error in line: " + numberOfLinesRead + ". Can't find field: "
                                        + rel.getDatabaseVariableName()));
                    }
                }
            }
            // debugOut(patient.toString());

            // Build tumour part
            Tumour tumour = new Tumour();
            for (int i = 0; i < map.size(); i++) {
                Relation rel = map.get(i);
                if (rel.getDatabaseTableVariableID() >= 0
                        && rel.getDatabaseTableName().equalsIgnoreCase("tumour")
                        && rel.getFileColumnNumber() < csvRecord.size()) {
                    if (rel.getFileColumnNumber() < csvRecord.size()) {
                        if (rel.getVariableType().equalsIgnoreCase("Number")) {
                            if (csvRecord.get(rel.getFileColumnNumber()).length() > 0) {
                                try {
                                    tumour.setVariable(rel.getDatabaseVariableName(),
                                            Integer.parseInt(csvRecord.get(rel.getFileColumnNumber())));
                                } catch (NumberFormatException ex) {
                                    Logger.getLogger(Import.class.getName()).log(Level.SEVERE,
                                            "Number format error in line: " + (numberOfLinesRead + 1 + 1)
                                                    + ". ",
                                            ex);
                                    success = false;
                                }
                            }
                        } else {
                            tumour.setVariable(rel.getDatabaseVariableName(),
                                    StringEscapeUtils.unescapeCsv(csvRecord.get(rel.getFileColumnNumber())));
                        }
                    } else {
                        Logger.getLogger(Import.class.getName()).log(Level.INFO,
                                "Something wrong with tumour part of line " + numberOfLinesRead + ".",
                                new Exception("Error in line: " + numberOfLinesRead + ". Can't find field: "
                                        + rel.getDatabaseVariableName()));
                    }
                }
            }

            // Build source part
            Set<Source> sources = Collections.synchronizedSet(new LinkedHashSet<Source>());
            Source source = new Source();
            for (int i = 0; i < map.size(); i++) {
                Relation rel = map.get(i);
                if (rel.getDatabaseTableVariableID() >= 0
                        && rel.getDatabaseTableName().equalsIgnoreCase(Globals.SOURCE_TABLE_NAME)
                        && rel.getFileColumnNumber() < csvRecord.size()) {
                    if (rel.getFileColumnNumber() < csvRecord.size()) {
                        if (rel.getVariableType().equalsIgnoreCase("Number")) {
                            if (csvRecord.get(rel.getFileColumnNumber()).length() > 0) {
                                try {
                                    source.setVariable(rel.getDatabaseVariableName(),
                                            Integer.parseInt(csvRecord.get(rel.getFileColumnNumber())));
                                } catch (NumberFormatException ex) {
                                    Logger.getLogger(Import.class.getName()).log(Level.SEVERE,
                                            "Number format error in line: " + (numberOfLinesRead + 1 + 1)
                                                    + ". ",
                                            ex);
                                    success = false;
                                }
                            }
                        } else {
                            source.setVariable(rel.getDatabaseVariableName(),
                                    StringEscapeUtils.unescapeCsv(csvRecord.get(rel.getFileColumnNumber())));
                        }
                    } else {
                        Logger.getLogger(Import.class.getName()).log(Level.INFO,
                                "Something wrong with source part of line " + numberOfLinesRead + ".",
                                new Exception("Error in line: " + numberOfLinesRead + ". Can't find field: "
                                        + rel.getDatabaseVariableName()));
                    }

                }

            }
            sources.add(source);
            tumour.setSources(sources);

            // debugOut(tumour.toString());
            // add patient to the database
            Object patientID = patient.getVariable(io.getPatientIDVariableName());
            Object patientRecordID = patient.getVariable(io.getPatientRecordIDVariableName());

            if (patientID == null) {
                // save the record to get the new patientID;
                patientDatabaseRecordID = server.savePatient(patient);
                patient = (Patient) server.getRecord(patientDatabaseRecordID, Globals.PATIENT_TABLE_NAME,
                        false);
                patientID = patient.getVariable(io.getPatientIDVariableName());
                patientRecordID = patient.getVariable(io.getPatientRecordIDVariableName());
            }

            if (io.isDataFromPreviousCanReg()) {
                // set update date for the patient the same as for the tumour
                Object updateDate = tumour.getVariable(io.getTumourUpdateDateVariableName());
                patient.setVariable(io.getPatientUpdateDateVariableName(), updateDate);

                // Set the patientID the same as the tumourID initially

                // Object tumourSequence = tumour.getVariable(io.getTumourSequenceVariableName());
                Object tumourSequence = "1";

                String tumourSequenceString = tumourSequence + "";
                while (tumourSequenceString.length() < Globals.ADDITIONAL_DIGITS_FOR_PATIENT_RECORD) {
                    tumourSequenceString = "0" + tumourSequenceString;
                }
                patientRecordID = patientID + "" + tumourSequenceString;

                // If this is a multiple primary tumour...
                String mpCodeString = (String) tumour.getVariable(io.getMultiplePrimaryVariableName());
                if (mpCodeString != null && mpCodeString.length() > 0) {
                    patientID = lookUpPatientID(mpCodeString, patientID, mpCodes);

                    // rebuild sequenceNumber
                    Tumour[] tumours = new Tumour[0];
                    try {
                        tumours = CanRegClientApp.getApplication()
                                .getTumourRecordsBasedOnPatientID(patientID + "", false);
                    } catch (DistributedTableDescriptionException ex) {
                        Logger.getLogger(Import.class.getName()).log(Level.SEVERE, null, ex);
                    } catch (UnknownTableException ex) {
                        Logger.getLogger(Import.class.getName()).log(Level.SEVERE, null, ex);
                    }

                    tumourSequenceString = (tumours.length + 1) + "";
                    while (tumourSequenceString.length() < Globals.ADDITIONAL_DIGITS_FOR_PATIENT_RECORD) {
                        tumourSequenceString = "0" + tumourSequenceString;
                    }

                    patientRecordID = patientID + "" + tumourSequenceString;
                    Patient[] oldPatients = null;
                    try {
                        oldPatients = CanRegClientApp.getApplication().getPatientRecordsByID((String) patientID,
                                false);
                    } catch (RemoteException ex) {
                        Logger.getLogger(Import.class.getName()).log(Level.SEVERE, null, ex);
                    } catch (SecurityException ex) {
                        Logger.getLogger(Import.class.getName()).log(Level.SEVERE, null, ex);
                    } catch (DistributedTableDescriptionException ex) {
                        Logger.getLogger(Import.class.getName()).log(Level.SEVERE, null, ex);
                    } catch (RecordLockedException ex) {
                        Logger.getLogger(Import.class.getName()).log(Level.SEVERE, null, ex);
                    } catch (SQLException ex) {
                        Logger.getLogger(Import.class.getName()).log(Level.SEVERE, null, ex);
                    } catch (UnknownTableException ex) {
                        Logger.getLogger(Import.class.getName()).log(Level.SEVERE, null, ex);
                    }
                    for (Patient oldPatient : oldPatients) {
                        if (!Tools.newRecordContainsNewInfo(patient, oldPatient,
                                noNeedToLookAtPatientVariables)) {
                            needToSavePatientAgain = false;
                            patient = oldPatient;
                            patientRecordID = oldPatient.getVariable(io.getPatientRecordIDVariableName());
                        }
                    }
                }

                Object tumourID = patientRecordID + "" + tumourSequenceString;
                //
                patient.setVariable(io.getPatientIDVariableName(), patientID);
                tumour.setVariable(io.getTumourIDVariablename(), tumourID);
                // And store the record ID

                patient.setVariable(io.getPatientRecordIDVariableName(), patientRecordID);

                // Set the patient ID number on the tumour
                tumour.setVariable(io.getPatientIDTumourTableVariableName(), patientID);
                tumour.setVariable(io.getPatientRecordIDTumourTableVariableName(), patientRecordID);

                // Set the deprecated flag to 0 - no obsolete records from CR4
                tumour.setVariable(io.getObsoleteTumourFlagVariableName(), "0");
                patient.setVariable(io.getObsoletePatientFlagVariableName(), "0");

            }

            // Set the name in the firstName database
            String sex = (String) patient.getVariable(sexVariableName);
            if (sex != null && sex.length() > 0) {
                Integer sexCode = Integer.parseInt(sex);
                String firstNames = (String) patient.getVariable(firstNameVariableName);
                if (firstNames != null) {
                    String[] firstNamesArray = firstNames.split(" ");
                    for (String firstName : firstNamesArray) {
                        if (firstName != null && firstName.trim().length() > 0) {
                            // here we use the locale specific toUpperCase
                            Integer registeredSexCode = nameSexTable.get(firstName);
                            if (registeredSexCode == null) {
                                NameSexRecord nsr = new NameSexRecord();
                                nsr.setName(firstName);
                                nsr.setSex(sexCode);

                                server.saveNameSexRecord(nsr, false);

                                nameSexTable.put(firstName, sexCode);
                            } else if (registeredSexCode != sexCode) {
                                if (registeredSexCode != 9) {
                                    sexCode = 9;
                                    NameSexRecord nsr = new NameSexRecord();
                                    nsr.setName(firstName);
                                    nsr.setSex(sexCode);
                                    server.saveNameSexRecord(nsr, true);
                                    nameSexTable.remove(firstName);
                                    nameSexTable.put(firstName, sexCode);
                                }
                            }
                        }
                    }
                }
            }

            if (needToSavePatientAgain) {
                if (patientDatabaseRecordID > 0) {
                    server.editPatient(patient);
                } else {
                    patientDatabaseRecordID = server.savePatient(patient);
                }
            }
            if (patient != null && tumour != null) {
                String icd10 = (String) tumour.getVariable(io.getICD10VariableName());
                if (icd10 == null || icd10.trim().length() == 0) {
                    ConversionResult[] conversionResult = canreg.client.CanRegClientApp.getApplication()
                            .performConversions(Converter.ConversionName.ICDO3toICD10, patient, tumour);
                    tumour.setVariable(io.getICD10VariableName(), conversionResult[0].getValue());
                }
            }
            if (tumour.getVariable(io.getPatientIDTumourTableVariableName()) == null) {
                tumour.setVariable(io.getPatientIDTumourTableVariableName(), patientID);
            }

            if (tumour.getVariable(io.getPatientRecordIDTumourTableVariableName()) == null) {
                tumour.setVariable(io.getPatientRecordIDTumourTableVariableName(), patientRecordID);
            }

            int tumourDatabaseIDNumber = server.saveTumour(tumour);

            if (Thread.interrupted()) {
                //We've been interrupted: no more importing.
                throw new InterruptedException();
            }
        }
        task.firePropertyChange("finished", null, null);
        success = true;
    } catch (IOException ex) {
        Logger.getLogger(Import.class.getName()).log(Level.SEVERE,
                "Error in line: " + (numberOfLinesRead + 1 + 1) + ". ", ex);
        success = false;
    } catch (NumberFormatException ex) {
        Logger.getLogger(Import.class.getName()).log(Level.SEVERE,
                "Error in line: " + (numberOfLinesRead + 1 + 1) + ". ", ex);
        success = false;
    } catch (InterruptedException ex) {
        Logger.getLogger(Import.class.getName()).log(Level.INFO,
                "Interupted on line: " + (numberOfLinesRead + 1) + ". ", ex);
        success = true;
    } catch (IndexOutOfBoundsException ex) {
        Logger.getLogger(Import.class.getName()).log(Level.SEVERE,
                "Error in line: " + (numberOfLinesRead + 1 + 1) + ". ", ex);
        success = false;
    } catch (SQLException ex) {
        Logger.getLogger(Import.class.getName()).log(Level.SEVERE,
                "Error in line: " + (numberOfLinesRead + 1 + 1) + ". ", ex);
        success = false;
    } finally {
        if (parser != null) {
            try {
                parser.close();
            } catch (IOException ex) {
                Logger.getLogger(Import.class.getName()).log(Level.SEVERE, null, ex);
            }
        }
    }
    return success;
}

From source file:org.springframework.kafka.listener.ConcurrentMessageListenerContainerTests.java

@Test
public void testRebalanceWithSlowConsumer() throws Exception {
    this.logger.info("Start auto");
    Map<String, Object> props = KafkaTestUtils.consumerProps("test101", "false", embeddedKafka);
    props.put(ConsumerConfig.FETCH_MIN_BYTES_CONFIG, "20000");
    DefaultKafkaConsumerFactory<Integer, String> cf = new DefaultKafkaConsumerFactory<>(props);
    ContainerProperties containerProps = new ContainerProperties(topic1);
    final CountDownLatch latch = new CountDownLatch(8);
    final Set<String> listenerThreadNames = Collections.synchronizedSet(new HashSet<String>());
    List<String> receivedMessages = Collections.synchronizedList(new ArrayList<>());
    containerProps.setMessageListener((MessageListener<Integer, String>) message -> {
        listenerThreadNames.add(Thread.currentThread().getName());
        try {//from   w  w w .  j av  a2s.c o  m
            Thread.sleep(2000);
        } catch (InterruptedException e) {
            // ignore
        }
        receivedMessages.add(message.value());
        listenerThreadNames.add(Thread.currentThread().getName());
        latch.countDown();
    });

    ConcurrentMessageListenerContainer<Integer, String> container = new ConcurrentMessageListenerContainer<>(cf,
            containerProps);
    ConcurrentMessageListenerContainer<Integer, String> container2 = new ConcurrentMessageListenerContainer<>(
            cf, containerProps);
    container.setConcurrency(1);
    container2.setConcurrency(1);
    container.setBeanName("testAuto");
    container2.setBeanName("testAuto2");
    container.start();
    ContainerTestUtils.waitForAssignment(container, embeddedKafka.getPartitionsPerTopic());
    Map<String, Object> senderProps = KafkaTestUtils.producerProps(embeddedKafka);
    ProducerFactory<Integer, String> pf = new DefaultKafkaProducerFactory<>(senderProps);
    KafkaTemplate<Integer, String> template = new KafkaTemplate<>(pf);
    template.setDefaultTopic(topic1);
    template.sendDefault(0, 0, "foo");
    template.sendDefault(0, 2, "bar");
    template.sendDefault(0, 0, "baz");
    template.sendDefault(0, 2, "qux");
    template.sendDefault(1, 2, "corge");
    template.sendDefault(1, 2, "grault");
    template.sendDefault(1, 2, "garply");
    template.sendDefault(1, 2, "waldo");
    template.flush();
    container2.start();
    assertThat(latch.await(60, TimeUnit.SECONDS)).isTrue();
    assertThat(receivedMessages).containsOnlyOnce("foo", "bar", "baz", "qux", "corge", "grault", "garply",
            "waldo");
    // all messages are received
    assertThat(receivedMessages).hasSize(8);
    // messages are received on separate threads
    assertThat(listenerThreadNames.size()).isGreaterThanOrEqualTo(2);
    container.stop();
    container2.stop();
    this.logger.info("Stop auto");
}