Example usage for java.util.concurrent BlockingQueue put

List of usage examples for java.util.concurrent BlockingQueue put

Introduction

In this page you can find the example usage for java.util.concurrent BlockingQueue put.

Prototype

void put(E e) throws InterruptedException;

Source Link

Document

Inserts the specified element into this queue, waiting if necessary for space to become available.

Usage

From source file:org.languagetool.rules.spelling.suggestions.SuggestionChangesTest.java

public void testChanges() throws IOException, InterruptedException {

    File configFile = new File(System.getProperty("config", "SuggestionChangesTestConfig.json"));
    ObjectMapper mapper = new ObjectMapper(new JsonFactory().enable(JsonParser.Feature.ALLOW_COMMENTS));
    SuggestionChangesTestConfig config = mapper.readValue(configFile, SuggestionChangesTestConfig.class);

    SimpleDateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd_HH:mm:ss");
    String timestamp = dateFormat.format(new Date());
    Path loggingFile = Paths.get(config.logDir, String.format("suggestionChangesExperiment_%s.log", timestamp));
    Path datasetFile = Paths.get(config.logDir, String.format("suggestionChangesExperiment_%s.csv", timestamp));

    BufferedWriter writer = Files.newBufferedWriter(loggingFile);
    CSVPrinter datasetWriter = new CSVPrinter(Files.newBufferedWriter(datasetFile),
            CSVFormat.DEFAULT.withEscape('\\'));
    List<String> datasetHeader = new ArrayList<>(
            Arrays.asList("sentence", "correction", "covered", "replacement", "dataset_id"));

    SuggestionsChanges.init(config, writer);
    writer.write("Evaluation configuration: \n");
    String configContent = String.join("\n", Files.readAllLines(configFile.toPath()));
    writer.write(configContent);//from ww  w  . ja  va  2  s  .  c  o m
    writer.write("\nRunning experiments: \n");
    int experimentId = 0;
    for (SuggestionChangesExperiment experiment : SuggestionsChanges.getInstance().getExperiments()) {
        experimentId++;
        writer.write(String.format("#%d: %s%n", experimentId, experiment));
        datasetHeader.add(String.format("experiment_%d_suggestions", experimentId));
        datasetHeader.add(String.format("experiment_%d_metadata", experimentId));
        datasetHeader.add(String.format("experiment_%d_suggestions_metadata", experimentId));
    }
    writer.newLine();
    datasetWriter.printRecord(datasetHeader);

    BlockingQueue<SuggestionTestData> tasks = new LinkedBlockingQueue<>(1000);
    ConcurrentLinkedQueue<Pair<SuggestionTestResultData, String>> results = new ConcurrentLinkedQueue<>();
    List<SuggestionTestThread> threads = new ArrayList<>();
    for (int i = 0; i < Runtime.getRuntime().availableProcessors(); i++) {
        SuggestionTestThread worker = new SuggestionTestThread(tasks, results);
        worker.start();
        threads.add(worker);
    }

    // Thread for writing results from worker threads into CSV
    Thread logger = new Thread(() -> {
        try {
            long messages = 0;
            //noinspection InfiniteLoopStatement
            while (true) {
                Pair<SuggestionTestResultData, String> message = results.poll();
                if (message != null) {
                    writer.write(message.getRight());

                    SuggestionTestResultData result = message.getLeft();
                    int datasetId = 1 + config.datasets.indexOf(result.getInput().getDataset());
                    if (result != null && result.getSuggestions() != null && !result.getSuggestions().isEmpty()
                            && result.getSuggestions().stream()
                                    .noneMatch(m -> m.getSuggestedReplacements() == null
                                            || m.getSuggestedReplacements().isEmpty())) {

                        List<Object> record = new ArrayList<>(Arrays.asList(result.getInput().getSentence(),
                                result.getInput().getCorrection(), result.getInput().getCovered(),
                                result.getInput().getReplacement(), datasetId));
                        for (RuleMatch match : result.getSuggestions()) {
                            List<String> suggestions = match.getSuggestedReplacements();
                            record.add(mapper.writeValueAsString(suggestions));
                            // features extracted by SuggestionsOrdererFeatureExtractor
                            record.add(mapper.writeValueAsString(match.getFeatures()));
                            List<SortedMap<String, Float>> suggestionsMetadata = new ArrayList<>();
                            for (SuggestedReplacement replacement : match.getSuggestedReplacementObjects()) {
                                suggestionsMetadata.add(replacement.getFeatures());
                            }
                            record.add(mapper.writeValueAsString(suggestionsMetadata));
                        }
                        datasetWriter.printRecord(record);
                    }

                    if (++messages % 1000 == 0) {
                        writer.flush();
                        System.out.printf("Evaluated %d corrections.%n", messages);
                    }
                }
            }
        } catch (IOException e) {
            throw new RuntimeException(e);
        }
    });
    logger.setDaemon(true);
    logger.start();

    // format straight from database dump
    String[] header = { "id", "sentence", "correction", "language", "rule_id", "suggestion_pos",
            "accept_language", "country", "region", "created_at", "updated_at", "covered", "replacement",
            "text_session_id", "client" };

    int datasetId = 0;
    // read data, send to worker threads via queue
    for (SuggestionChangesDataset dataset : config.datasets) {

        writer.write(String.format("Evaluating dataset #%d: %s.%n", ++datasetId, dataset));

        CSVFormat format = CSVFormat.DEFAULT;
        if (dataset.type.equals("dump")) {
            format = format.withEscape('\\').withNullString("\\N").withHeader(header);
        } else if (dataset.type.equals("artificial")) {
            format = format.withEscape('\\').withFirstRecordAsHeader();
        }
        try (CSVParser parser = new CSVParser(new FileReader(dataset.path), format)) {
            for (CSVRecord record : parser) {

                String lang = record.get("language");
                String rule = dataset.type.equals("dump") ? record.get("rule_id") : "";
                String covered = record.get("covered");
                String replacement = record.get("replacement");
                String sentence = record.get("sentence");
                String correction = record.isSet("correction") ? record.get("correction") : "";
                String acceptLanguage = dataset.type.equals("dump") ? record.get("accept_language") : "";

                if (sentence == null || sentence.trim().isEmpty()) {
                    continue;
                }

                if (!config.language.equals(lang)) {
                    continue; // TODO handle auto maybe?
                }
                if (dataset.type.equals("dump") && !config.rule.equals(rule)) {
                    continue;
                }

                // correction column missing in export from doccano; workaround
                if (dataset.enforceCorrect && !record.isSet("correction")) {
                    throw new IllegalStateException("enforceCorrect in dataset configuration enabled,"
                            + " but column 'correction' is not set for entry " + record);
                }

                if (dataset.type.equals("dump") && dataset.enforceAcceptLanguage) {
                    if (acceptLanguage != null) {
                        String[] entries = acceptLanguage.split(",", 2);
                        if (entries.length == 2) {
                            String userLanguage = entries[0]; // TODO: what to do with e.g. de-AT,de-DE;...
                            if (!config.language.equals(userLanguage)) {
                                continue;
                            }
                        }
                    }
                }

                tasks.put(new SuggestionTestData(lang, sentence, covered, replacement, correction, dataset));
            }
        }

    }

    for (Thread t : threads) {
        t.join();
    }
    logger.join(10000L);
    logger.interrupt();
    datasetWriter.close();
}

From source file:any.servable.LsServable.java

public void process(String cmd, String content, BlockingQueue<Message> outQueue) {
    logger.info("start LS SERVABLE");
    String filename = content.substring(0, content.length());

    File file = new File(filename);

    try {/*w w w.  j  a v  a2s. c o  m*/

        File[] list = file.listFiles(new FilenameFilter() {

            @Override
            public boolean accept(File arg0, String name) {
                return !(name.startsWith(".") && !(name.endsWith("~")));
            }
        });

        StringBuilder sb = new StringBuilder();

        // parent
        File fp = file.getParentFile();

        logger.debug("do file: " + file + " - list: " + list + " fp:" + fp);

        if (fp != null) {
            TblUtil tbl = new TblUtil();

            // DISPLAYTEXT
            tbl.set(TblUtil.DISPLAYTEXT, "[..] " + fp.getName());

            // DETAILTEXT
            tbl.set(TblUtil.DETAILTEXT, "UP");

            //            // ICONRES
            //            try { 
            //               JFileChooser ch = new JFileChooser();
            //               Icon icon = ch.getIcon(fp);
            //
            //               BufferedImage offscreen = new BufferedImage(
            //                     icon.getIconHeight(), icon.getIconWidth(),
            //                     BufferedImage.TYPE_4BYTE_ABGR);
            //               icon.paintIcon(null, offscreen.getGraphics(), 0, 0);
            //
            //               ByteArrayOutputStream baos = new ByteArrayOutputStream();
            //               ImageIO.write(offscreen, "png", baos);
            //               baos.flush();
            //               byte[] imageInByte = baos.toByteArray();
            //               baos.close();
            //
            //               String strrep = new String(imageInByte);
            //               if (!resSet.containsKey(strrep)) {
            //
            //                  System.out.println(fp.getName() + ": "
            //                        + icon.toString() + " " + icon.hashCode());
            //
            //                  outQueue.put(new Message("res", icon.toString(),
            //                        "image/png", "NO", imageInByte));
            //                  resSet.put(strrep, icon.toString());
            //                  tbl.set(TblUtil.ICONRES, icon.toString());
            //               } else {
            //                  tbl.set(TblUtil.ICONRES, resSet.get(strrep));
            //               }
            //            } catch (Error e) {
            //               // TODO
            //               System.err.println("Exception due to icon caught");
            //               // e.printStackTrace();
            //            }

            // TABCMD
            String tcmd = ((fp.isDirectory() ? "ls://host" : "get://host") + fp.getAbsolutePath());
            tbl.set(TblUtil.TABCMD, tcmd);

            // DETAILCMD
            tbl.set(TblUtil.DETAILCMD, "tmpl:" + tcmd);

            // DELETECMD

            sb.append(tbl.makeCell());

        }

        logger.debug("do list: " + list);

        if (list != null) {
            for (File f : list) {

                logger.debug("do file: " + f);

                TblUtil tbl = new TblUtil();

                // DISPLAYTEXT
                tbl.set(TblUtil.DISPLAYTEXT, f.getName());

                // DETAILTEXT
                tbl.set(TblUtil.DETAILTEXT,
                        (f.isDirectory() ? " --      " : humanReadableByteCount(f.length(), true)) + " - "
                                + df.format(f.lastModified()));

                //               // ICONRES
                //               try {
                //                  JFileChooser ch = new JFileChooser();
                //                  Icon icon = ch.getIcon(f);
                //
                //                  BufferedImage offscreen = new BufferedImage(
                //                        icon.getIconHeight(), icon.getIconWidth(),
                //                        BufferedImage.TYPE_4BYTE_ABGR);
                //                  icon.paintIcon(null, offscreen.getGraphics(), 0, 0);
                //
                //                  ByteArrayOutputStream baos = new ByteArrayOutputStream();
                //                  ImageIO.write(offscreen, "png", baos);
                //                  baos.flush();
                //                  byte[] imageInByte = baos.toByteArray();
                //                  baos.close();
                //
                //                  String strrep = new String(imageInByte);
                //                  if (!resSet.containsKey(strrep)) {
                //
                //                     System.out.println(f.getName() + ": "
                //                           + icon.toString() + " " + icon.hashCode());
                //
                //                     outQueue.put(new Message("res", icon.toString(),
                //                           "image/png", "NO", imageInByte));
                //                     resSet.put(strrep, icon.toString());
                //                     tbl.set(TblUtil.ICONRES, icon.toString());
                //                  } else {
                //                     tbl.set(TblUtil.ICONRES, resSet.get(strrep));
                //                  }
                //
                //               } catch (Error e) {
                //                  // TODO
                //                  System.err.println("Exception due to icon caught");
                //                  // e.printStackTrace();
                //               }

                String fullpath = f.getAbsolutePath();
                if (!fullpath.startsWith("/")) {
                    fullpath = "/" + fullpath;
                }

                // TABCMD
                String tcmd = ((f.isDirectory() ? "ls://host" : "get://host") + fullpath);
                tbl.set(TblUtil.TABCMD, tcmd);

                // DETAILCMD
                tbl.set(TblUtil.DETAILCMD, "tmpl:" + tcmd);

                // DELETECMD

                sb.append(tbl.makeCell());
            }

            outQueue.put(
                    new Message("vset", file.getName(), TblUtil.TYPE, "YES", "1", sb.toString().getBytes()));

        }
    } catch (InterruptedException e) {
        e.printStackTrace();
        final Writer result = new StringWriter();
        final PrintWriter printWriter = new PrintWriter(result);
        e.printStackTrace(printWriter);
        outQueue.add(new Message("vset", "_error", "text/plain", "YES", result.toString().getBytes()));
        //      } catch (IOException e) {
        //         e.printStackTrace();
        //         final Writer result = new StringWriter();
        //         final PrintWriter printWriter = new PrintWriter(result);
        //         e.printStackTrace(printWriter);
        //         outQueue.add(new Message("vset", "_error", "text/plain", "YES",
        //               result.toString().getBytes()));
    } catch (NullPointerException e) {
        e.printStackTrace();
        final Writer result = new StringWriter();
        final PrintWriter printWriter = new PrintWriter(result);
        e.printStackTrace(printWriter);
        outQueue.add(new Message("vset", "_error", "text/plain", "YES", result.toString().getBytes()));
    }

    logger.info("finished LS SERVABLE");
}

From source file:org.micromanager.asidispim.AcquisitionPanel.java

/**
 * Actually runs the acquisition; does the dirty work of setting
 * up the controller, the circular buffer, starting the cameras,
 * grabbing the images and putting them into the acquisition, etc.
 * @param testAcq true if running test acquisition only (see runTestAcquisition() javadoc)
 * @param testAcqSide only applies to test acquisition, passthrough from runTestAcquisition() 
 * @return true if ran without any fatal errors.
 *///from w w  w.  j  a  v  a2 s. c o m
private boolean runAcquisitionPrivate(boolean testAcq, Devices.Sides testAcqSide) {

    // sanity check, shouldn't call this unless we aren't running an acquisition
    if (gui_.isAcquisitionRunning()) {
        MyDialogUtils.showError("An acquisition is already running");
        return false;
    }

    if (ASIdiSPIM.getFrame().getHardwareInUse()) {
        MyDialogUtils.showError("Hardware is being used by something else (maybe autofocus?)");
        return false;
    }

    boolean liveModeOriginally = gui_.isLiveModeOn();
    if (liveModeOriginally) {
        gui_.enableLiveMode(false);
    }

    // make sure slice timings are up to date
    // do this automatically; we used to prompt user if they were out of date
    // do this before getting snapshot of sliceTiming_ in acqSettings
    recalculateSliceTiming(!minSlicePeriodCB_.isSelected());

    if (!sliceTiming_.valid) {
        MyDialogUtils.showError("Error in calculating the slice timing; is the camera mode set correctly?");
        return false;
    }

    AcquisitionSettings acqSettingsOrig = getCurrentAcquisitionSettings();

    if (acqSettingsOrig.cameraMode == CameraModes.Keys.LIGHT_SHEET && core_.getPixelSizeUm() < 1e-6) { // can't compare equality directly with floating point values so call < 1e-9 is zero or negative
        ReportingUtils.showError("Need to configure pixel size in Micro-Manager to use light sheet mode.");
        return false;
    }

    // if a test acquisition then only run single timpoint, no autofocus
    // allow multi-positions for test acquisition for now, though perhaps this is not desirable
    if (testAcq) {
        acqSettingsOrig.useTimepoints = false;
        acqSettingsOrig.numTimepoints = 1;
        acqSettingsOrig.useAutofocus = false;
        acqSettingsOrig.separateTimepoints = false;

        // if called from the setup panels then the side will be specified
        //   so we can do an appropriate single-sided acquisition
        // if called from the acquisition panel then NONE will be specified
        //   and run according to existing settings
        if (testAcqSide != Devices.Sides.NONE) {
            acqSettingsOrig.numSides = 1;
            acqSettingsOrig.firstSideIsA = (testAcqSide == Devices.Sides.A);
        }

        // work around limitation of not being able to use PLogic per-volume switching with single side
        // => do per-volume switching instead (only difference should be extra time to switch)
        if (acqSettingsOrig.useChannels && acqSettingsOrig.channelMode == MultichannelModes.Keys.VOLUME_HW
                && acqSettingsOrig.numSides < 2) {
            acqSettingsOrig.channelMode = MultichannelModes.Keys.VOLUME;
        }

    }

    double volumeDuration = computeActualVolumeDuration(acqSettingsOrig);
    double timepointDuration = computeTimepointDuration();
    long timepointIntervalMs = Math.round(acqSettingsOrig.timepointInterval * 1000);

    // use hardware timing if < 1 second between timepoints
    // experimentally need ~0.5 sec to set up acquisition, this gives a bit of cushion
    // cannot do this in getCurrentAcquisitionSettings because of mutually recursive
    // call with computeActualVolumeDuration()
    if (acqSettingsOrig.numTimepoints > 1 && timepointIntervalMs < (timepointDuration + 750)
            && !acqSettingsOrig.isStageScanning) {
        acqSettingsOrig.hardwareTimepoints = true;
    }

    if (acqSettingsOrig.useMultiPositions) {
        if (acqSettingsOrig.hardwareTimepoints
                || ((acqSettingsOrig.numTimepoints > 1) && (timepointIntervalMs < timepointDuration * 1.2))) {
            // change to not hardwareTimepoints and warn user
            // but allow acquisition to continue
            acqSettingsOrig.hardwareTimepoints = false;
            MyDialogUtils.showError("Timepoint interval may not be sufficient "
                    + "depending on actual time required to change positions. " + "Proceed at your own risk.");
        }
    }

    // now acqSettings should be read-only
    final AcquisitionSettings acqSettings = acqSettingsOrig;

    // generate string for log file
    Gson gson = new GsonBuilder().setPrettyPrinting().create();
    final String acqSettingsJSON = gson.toJson(acqSettings);

    // get MM device names for first/second cameras to acquire
    String firstCamera, secondCamera;
    Devices.Keys firstCameraKey, secondCameraKey;
    boolean firstSideA = acqSettings.firstSideIsA;
    if (firstSideA) {
        firstCamera = devices_.getMMDevice(Devices.Keys.CAMERAA);
        firstCameraKey = Devices.Keys.CAMERAA;
        secondCamera = devices_.getMMDevice(Devices.Keys.CAMERAB);
        secondCameraKey = Devices.Keys.CAMERAB;
    } else {
        firstCamera = devices_.getMMDevice(Devices.Keys.CAMERAB);
        firstCameraKey = Devices.Keys.CAMERAB;
        secondCamera = devices_.getMMDevice(Devices.Keys.CAMERAA);
        secondCameraKey = Devices.Keys.CAMERAA;
    }

    boolean sideActiveA, sideActiveB;
    final boolean twoSided = acqSettings.numSides > 1;
    if (twoSided) {
        sideActiveA = true;
        sideActiveB = true;
    } else {
        secondCamera = null;
        if (firstSideA) {
            sideActiveA = true;
            sideActiveB = false;
        } else {
            sideActiveA = false;
            sideActiveB = true;
        }
    }

    final boolean acqBothCameras = acqSettings.acquireBothCamerasSimultaneously;
    boolean camActiveA = sideActiveA || acqBothCameras;
    boolean camActiveB = sideActiveB || acqBothCameras;

    if (camActiveA) {
        if (!devices_.isValidMMDevice(Devices.Keys.CAMERAA)) {
            MyDialogUtils.showError("Using side A but no camera specified for that side.");
            return false;
        }
        Devices.Keys camKey = Devices.Keys.CAMERAA;
        Devices.Libraries camLib = devices_.getMMDeviceLibrary(camKey);
        if (!CameraModes.getValidModeKeys(camLib).contains(getSPIMCameraMode())) {
            MyDialogUtils.showError("Camera trigger mode set to " + getSPIMCameraMode().toString()
                    + " but camera A doesn't support it.");
            return false;
        }
        // Hamamatsu only supports light sheet mode with USB cameras.  Tt seems due to static architecture of getValidModeKeys
        //   there is no good way to tell earlier that light sheet mode isn't supported.  I don't like this but don't see another option.
        if (camLib == Devices.Libraries.HAMCAM && props_.getPropValueString(camKey, Properties.Keys.CAMERA_BUS)
                .equals(Properties.Values.USB3)) {
            if (getSPIMCameraMode() == CameraModes.Keys.LIGHT_SHEET) {
                MyDialogUtils.showError("Hamamatsu only supports light sheet mode with CameraLink readout.");
                return false;
            }
        }
    }

    if (sideActiveA) {
        if (!devices_.isValidMMDevice(Devices.Keys.GALVOA)) {
            MyDialogUtils.showError("Using side A but no scanner specified for that side.");
            return false;
        }
        if (requiresPiezos(acqSettings.spimMode) && !devices_.isValidMMDevice(Devices.Keys.PIEZOA)) {
            MyDialogUtils.showError(
                    "Using side A and acquisition mode requires piezos but no piezo specified for that side.");
            return false;
        }
    }

    if (camActiveB) {
        if (!devices_.isValidMMDevice(Devices.Keys.CAMERAB)) {
            MyDialogUtils.showError("Using side B but no camera specified for that side.");
            return false;
        }
        if (!CameraModes.getValidModeKeys(devices_.getMMDeviceLibrary(Devices.Keys.CAMERAB))
                .contains(getSPIMCameraMode())) {
            MyDialogUtils.showError("Camera trigger mode set to " + getSPIMCameraMode().toString()
                    + " but camera B doesn't support it.");
            return false;
        }
    }

    if (sideActiveB) {
        if (!devices_.isValidMMDevice(Devices.Keys.GALVOB)) {
            MyDialogUtils.showError("Using side B but no scanner specified for that side.");
            return false;
        }
        if (requiresPiezos(acqSettings.spimMode) && !devices_.isValidMMDevice(Devices.Keys.PIEZOB)) {
            MyDialogUtils.showError(
                    "Using side B and acquisition mode requires piezos but no piezo specified for that side.");
            return false;
        }
    }

    boolean usingDemoCam = (devices_.getMMDeviceLibrary(Devices.Keys.CAMERAA).equals(Devices.Libraries.DEMOCAM)
            && camActiveA)
            || (devices_.getMMDeviceLibrary(Devices.Keys.CAMERAB).equals(Devices.Libraries.DEMOCAM)
                    && camActiveB);

    // set up channels
    int nrChannelsSoftware = acqSettings.numChannels; // how many times we trigger the controller per stack
    int nrSlicesSoftware = acqSettings.numSlices;
    String originalChannelConfig = "";
    boolean changeChannelPerVolumeSoftware = false;
    if (acqSettings.useChannels) {
        if (acqSettings.numChannels < 1) {
            MyDialogUtils.showError("\"Channels\" is checked, but no channels are selected");
            return false;
        }
        // get current channel so that we can restore it, then set channel appropriately
        originalChannelConfig = multiChannelPanel_.getCurrentConfig();
        switch (acqSettings.channelMode) {
        case VOLUME:
            changeChannelPerVolumeSoftware = true;
            multiChannelPanel_.initializeChannelCycle();
            break;
        case VOLUME_HW:
        case SLICE_HW:
            if (acqSettings.numChannels == 1) { // only 1 channel selected so don't have to really use hardware switching
                multiChannelPanel_.initializeChannelCycle();
                multiChannelPanel_.selectNextChannel();
            } else { // we have at least 2 channels
                boolean success = controller_.setupHardwareChannelSwitching(acqSettings);
                if (!success) {
                    MyDialogUtils.showError("Couldn't set up slice hardware channel switching.");
                    return false;
                }
                nrChannelsSoftware = 1;
                nrSlicesSoftware = acqSettings.numSlices * acqSettings.numChannels;
            }
            break;
        default:
            MyDialogUtils
                    .showError("Unsupported multichannel mode \"" + acqSettings.channelMode.toString() + "\"");
            return false;
        }
    }
    if (twoSided && acqBothCameras) {
        nrSlicesSoftware *= 2;
    }

    if (acqSettings.hardwareTimepoints) {
        // in hardwareTimepoints case we trigger controller once for all timepoints => need to
        //   adjust number of frames we expect back from the camera during MM's SequenceAcquisition
        if (acqSettings.cameraMode == CameraModes.Keys.OVERLAP) {
            // For overlap mode we are send one extra trigger per channel per side for volume-switching (both PLogic and not)
            // This holds for all multi-channel modes, just the order in which the extra trigger comes varies
            // Very last trigger won't ever return a frame so subtract 1.
            nrSlicesSoftware = ((acqSettings.numSlices + 1) * acqSettings.numChannels
                    * acqSettings.numTimepoints);
            if (twoSided && acqBothCameras) {
                nrSlicesSoftware *= 2;
            }
            nrSlicesSoftware -= 1;
        } else {
            // we get back one image per trigger for all trigger modes other than OVERLAP
            //   and we have already computed how many images that is (nrSlicesSoftware)
            nrSlicesSoftware *= acqSettings.numTimepoints;
            if (twoSided && acqBothCameras) {
                nrSlicesSoftware *= 2;
            }
        }
    }

    // set up XY positions
    int nrPositions = 1;
    PositionList positionList = new PositionList();
    if (acqSettings.useMultiPositions) {
        try {
            positionList = gui_.getPositionList();
            nrPositions = positionList.getNumberOfPositions();
        } catch (MMScriptException ex) {
            MyDialogUtils.showError(ex, "Error getting position list for multiple XY positions");
        }
        if (nrPositions < 1) {
            MyDialogUtils.showError("\"Positions\" is checked, but no positions are in position list");
            return false;
        }
    }

    // make sure we have cameras selected
    if (!checkCamerasAssigned(true)) {
        return false;
    }

    final float cameraReadoutTime = computeCameraReadoutTime();
    final double exposureTime = acqSettings.sliceTiming.cameraExposure;

    final boolean save = saveCB_.isSelected() && !testAcq;
    final String rootDir = rootField_.getText();

    // make sure we have a valid directory to save in
    final File dir = new File(rootDir);
    if (save) {
        try {
            if (!dir.exists()) {
                if (!dir.mkdir()) {
                    throw new Exception();
                }
            }
        } catch (Exception ex) {
            MyDialogUtils.showError("Could not create directory for saving acquisition data.");
            return false;
        }
    }

    if (acqSettings.separateTimepoints) {
        // because separate timepoints closes windows when done, force the user to save data to disk to avoid confusion
        if (!save) {
            MyDialogUtils.showError("For separate timepoints, \"Save while acquiring\" must be enabled.");
            return false;
        }
        // for separate timepoints, make sure the directory is empty to make sure naming pattern is "clean"
        // this is an arbitrary choice to avoid confusion later on when looking at file names
        if (dir.list().length > 0) {
            MyDialogUtils.showError("For separate timepoints the saving directory must be empty.");
            return false;
        }
    }

    int nrFrames; // how many Micro-manager "frames" = time points to take
    if (acqSettings.separateTimepoints) {
        nrFrames = 1;
        nrRepeats_ = acqSettings.numTimepoints;
    } else {
        nrFrames = acqSettings.numTimepoints;
        nrRepeats_ = 1;
    }

    AcquisitionModes.Keys spimMode = acqSettings.spimMode;

    boolean autoShutter = core_.getAutoShutter();
    boolean shutterOpen = false; // will read later
    String originalCamera = core_.getCameraDevice();

    // more sanity checks
    // TODO move these checks earlier, before we set up channels and XY positions

    // make sure stage scan is supported if selected
    if (acqSettings.isStageScanning) {
        if (!devices_.isTigerDevice(Devices.Keys.XYSTAGE)
                || !props_.hasProperty(Devices.Keys.XYSTAGE, Properties.Keys.STAGESCAN_NUMLINES)) {
            MyDialogUtils.showError("Must have stage with scan-enabled firmware for stage scanning.");
            return false;
        }
        if (acqSettings.spimMode == AcquisitionModes.Keys.STAGE_SCAN_INTERLEAVED && acqSettings.numSides < 2) {
            MyDialogUtils.showError("Interleaved mode requires two sides.");
            return false;
        }
    }

    double sliceDuration = acqSettings.sliceTiming.sliceDuration;
    if (exposureTime + cameraReadoutTime > sliceDuration) {
        // should only only possible to mess this up using advanced timing settings
        // or if there are errors in our own calculations
        MyDialogUtils.showError("Exposure time of " + exposureTime
                + " is longer than time needed for a line scan with" + " readout time of " + cameraReadoutTime
                + "\n" + "This will result in dropped frames. " + "Please change input");
        return false;
    }

    // if we want to do hardware timepoints make sure there's not a problem
    // lots of different situations where hardware timepoints can't be used...
    if (acqSettings.hardwareTimepoints) {
        if (acqSettings.useChannels && acqSettings.channelMode == MultichannelModes.Keys.VOLUME_HW) {
            // both hardware time points and volume channel switching use SPIMNumRepeats property
            // TODO this seems a severe limitation, maybe this could be changed in the future via firmware change
            MyDialogUtils.showError("Cannot use hardware time points (small time point interval)"
                    + " with hardware channel switching volume-by-volume.");
            return false;
        }
        if (acqSettings.isStageScanning) {
            // stage scanning needs to be triggered for each time point
            MyDialogUtils.showError(
                    "Cannot use hardware time points (small time point interval)" + " with stage scanning.");
            return false;
        }
        if (acqSettings.separateTimepoints) {
            MyDialogUtils.showError("Cannot use hardware time points (small time point interval)"
                    + " with separate viewers/file for each time point.");
            return false;
        }
        if (acqSettings.useAutofocus) {
            MyDialogUtils.showError("Cannot use hardware time points (small time point interval)"
                    + " with autofocus during acquisition.");
            return false;
        }
        if (acqSettings.useMovementCorrection) {
            MyDialogUtils.showError("Cannot use hardware time points (small time point interval)"
                    + " with movement correction during acquisition.");
            return false;
        }
        if (acqSettings.useChannels && acqSettings.channelMode == MultichannelModes.Keys.VOLUME) {
            MyDialogUtils.showError("Cannot use hardware time points (small time point interval)"
                    + " with software channels (need to use PLogic channel switching).");
            return false;
        }
        if (spimMode == AcquisitionModes.Keys.NO_SCAN) {
            MyDialogUtils.showError("Cannot do hardware time points when no scan mode is used."
                    + " Use the number of slices to set the number of images to acquire.");
            return false;
        }
    }

    if (acqSettings.useChannels && acqSettings.channelMode == MultichannelModes.Keys.VOLUME_HW
            && acqSettings.numSides < 2) {
        MyDialogUtils.showError("Cannot do PLogic channel switching of volume when only one"
                + " side is selected. Pester the developers if you need this.");
        return false;
    }

    // make sure we aren't trying to collect timepoints faster than we can
    if (!acqSettings.useMultiPositions && acqSettings.numTimepoints > 1) {
        if (timepointIntervalMs < volumeDuration) {
            MyDialogUtils
                    .showError("Time point interval shorter than" + " the time to collect a single volume.\n");
            return false;
        }
    }

    // Autofocus settings; only used if acqSettings.useAutofocus is true
    boolean autofocusAtT0 = false;
    int autofocusEachNFrames = 10;
    String autofocusChannel = "";
    if (acqSettings.useAutofocus) {
        autofocusAtT0 = prefs_.getBoolean(MyStrings.PanelNames.AUTOFOCUS.toString(),
                Properties.Keys.PLUGIN_AUTOFOCUS_ACQBEFORESTART, false);
        autofocusEachNFrames = props_.getPropValueInteger(Devices.Keys.PLUGIN,
                Properties.Keys.PLUGIN_AUTOFOCUS_EACHNIMAGES);
        autofocusChannel = props_.getPropValueString(Devices.Keys.PLUGIN,
                Properties.Keys.PLUGIN_AUTOFOCUS_CHANNEL);
        // double-check that selected channel is valid if we are doing multi-channel
        if (acqSettings.useChannels) {
            String channelGroup = props_.getPropValueString(Devices.Keys.PLUGIN,
                    Properties.Keys.PLUGIN_MULTICHANNEL_GROUP);
            StrVector channels = gui_.getMMCore().getAvailableConfigs(channelGroup);
            boolean found = false;
            for (String channel : channels) {
                if (channel.equals(autofocusChannel)) {
                    found = true;
                    break;
                }
            }
            if (!found) {
                MyDialogUtils.showError("Invalid autofocus channel selected on autofocus tab.");
                return false;
            }
        }
    }

    // Movement Correction settings; only used if acqSettings.useMovementCorrection is true
    int correctMovementEachNFrames = 10;
    String correctMovementChannel = "";
    int cmChannelNumber = -1;
    if (acqSettings.useMovementCorrection) {
        correctMovementEachNFrames = props_.getPropValueInteger(Devices.Keys.PLUGIN,
                Properties.Keys.PLUGIN_AUTOFOCUS_CORRECTMOVEMENT_EACHNIMAGES);
        correctMovementChannel = props_.getPropValueString(Devices.Keys.PLUGIN,
                Properties.Keys.PLUGIN_AUTOFOCUS_CORRECTMOVEMENT_CHANNEL);
        // double-check that selected channel is valid if we are doing multi-channel
        if (acqSettings.useChannels) {
            String channelGroup = props_.getPropValueString(Devices.Keys.PLUGIN,
                    Properties.Keys.PLUGIN_MULTICHANNEL_GROUP);
            StrVector channels = gui_.getMMCore().getAvailableConfigs(channelGroup);
            boolean found = false;
            for (String channel : channels) {
                if (channel.equals(correctMovementChannel)) {
                    found = true;
                    break;
                }
            }
            if (!found) {
                MyDialogUtils.showError("Invalid movement correction channel selected on autofocus tab.");
                return false;
            }
        }

    }

    // the circular buffer, which is used by both cameras, can only have one image size setting
    //    => require same image height and width for both cameras if both are used 
    if (twoSided || acqBothCameras) {
        try {
            Rectangle roi_1 = core_.getROI(firstCamera);
            Rectangle roi_2 = core_.getROI(secondCamera);
            if (roi_1.width != roi_2.width || roi_1.height != roi_2.height) {
                MyDialogUtils.showError(
                        "Two cameras' ROI height and width must be equal because of Micro-Manager's circular buffer");
                return false;
            }
        } catch (Exception ex) {
            MyDialogUtils.showError(ex, "Problem getting camera ROIs");
        }
    }

    cameras_.setCameraForAcquisition(firstCameraKey, true);
    if (twoSided || acqBothCameras) {
        cameras_.setCameraForAcquisition(secondCameraKey, true);
    }

    // save exposure time, will restore at end of acquisition
    try {
        prefs_.putFloat(MyStrings.PanelNames.SETTINGS.toString(),
                Properties.Keys.PLUGIN_CAMERA_LIVE_EXPOSURE_FIRST.toString(),
                (float) core_.getExposure(devices_.getMMDevice(firstCameraKey)));
        if (twoSided || acqBothCameras) {
            prefs_.putFloat(MyStrings.PanelNames.SETTINGS.toString(),
                    Properties.Keys.PLUGIN_CAMERA_LIVE_EXPOSURE_SECOND.toString(),
                    (float) core_.getExposure(devices_.getMMDevice(secondCameraKey)));
        }
    } catch (Exception ex) {
        MyDialogUtils.showError(ex, "could not cache exposure");
    }

    try {
        core_.setExposure(firstCamera, exposureTime);
        if (twoSided || acqBothCameras) {
            core_.setExposure(secondCamera, exposureTime);
        }
        gui_.refreshGUIFromCache();
    } catch (Exception ex) {
        MyDialogUtils.showError(ex, "could not set exposure");
    }

    // seems to have a problem if the core's camera has been set to some other
    // camera before we start doing things, so set to a SPIM camera
    try {
        core_.setCameraDevice(firstCamera);
    } catch (Exception ex) {
        MyDialogUtils.showError(ex, "could not set camera");
    }

    // empty out circular buffer
    try {
        core_.clearCircularBuffer();
    } catch (Exception ex) {
        MyDialogUtils.showError(ex, "Error emptying out the circular buffer");
        return false;
    }

    // stop the serial traffic for position updates during acquisition
    // if we return from this function (including aborting) we need to unpause
    posUpdater_.pauseUpdates(true);

    // initialize stage scanning so we can restore state
    Point2D.Double xyPosUm = new Point2D.Double();
    float origXSpeed = 1f; // don't want 0 in case something goes wrong
    float origXAccel = 1f; // don't want 0 in case something goes wrong
    if (acqSettings.isStageScanning) {
        try {
            xyPosUm = core_.getXYStagePosition(devices_.getMMDevice(Devices.Keys.XYSTAGE));
            origXSpeed = props_.getPropValueFloat(Devices.Keys.XYSTAGE, Properties.Keys.STAGESCAN_MOTOR_SPEED);
            origXAccel = props_.getPropValueFloat(Devices.Keys.XYSTAGE, Properties.Keys.STAGESCAN_MOTOR_ACCEL);
        } catch (Exception ex) {
            MyDialogUtils.showError(
                    "Could not get XY stage position, speed, or acceleration for stage scan initialization");
            posUpdater_.pauseUpdates(false);
            return false;
        }

        // if X speed is less than 0.2 mm/s then it probably wasn't restored to correct speed some other time
        // we offer to set it to a more normal speed in that case, until the user declines and we stop asking
        if (origXSpeed < 0.2 && resetXaxisSpeed_) {
            resetXaxisSpeed_ = MyDialogUtils.getConfirmDialogResult(
                    "Max speed of X axis is small, perhaps it was not correctly restored after stage scanning previously.  Do you want to set it to 1 mm/s now?",
                    JOptionPane.YES_NO_OPTION);
            // once the user selects "no" then resetXaxisSpeed_ will be false and stay false until plugin is launched again
            if (resetXaxisSpeed_) {
                props_.setPropValue(Devices.Keys.XYSTAGE, Properties.Keys.STAGESCAN_MOTOR_SPEED, 1f);
                origXSpeed = 1f;
            }
        }
    }

    numTimePointsDone_ = 0;

    // force saving as image stacks, not individual files
    // implementation assumes just two options, either 
    //  TaggedImageStorageDiskDefault.class or TaggedImageStorageMultipageTiff.class
    boolean separateImageFilesOriginally = ImageUtils.getImageStorageClass()
            .equals(TaggedImageStorageDiskDefault.class);
    ImageUtils.setImageStorageClass(TaggedImageStorageMultipageTiff.class);

    // Set up controller SPIM parameters (including from Setup panel settings)
    // want to do this, even with demo cameras, so we can test everything else
    if (!controller_.prepareControllerForAquisition(acqSettings)) {
        posUpdater_.pauseUpdates(false);
        return false;
    }

    boolean nonfatalError = false;
    long acqButtonStart = System.currentTimeMillis();
    String acqName = "";
    acq_ = null;

    // do not want to return from within this loop => throw exception instead
    // loop is executed once per acquisition (i.e. once if separate viewers isn't selected
    //   or once per timepoint if separate viewers is selected)
    long repeatStart = System.currentTimeMillis();
    for (int acqNum = 0; !cancelAcquisition_.get() && acqNum < nrRepeats_; acqNum++) {
        // handle intervals between (software-timed) repeats
        // only applies when doing separate viewers for each timepoint
        // and have multiple timepoints
        long repeatNow = System.currentTimeMillis();
        long repeatdelay = repeatStart + acqNum * timepointIntervalMs - repeatNow;
        while (repeatdelay > 0 && !cancelAcquisition_.get()) {
            updateAcquisitionStatus(AcquisitionStatus.WAITING, (int) (repeatdelay / 1000));
            long sleepTime = Math.min(1000, repeatdelay);
            try {
                Thread.sleep(sleepTime);
            } catch (InterruptedException e) {
                ReportingUtils.showError(e);
            }
            repeatNow = System.currentTimeMillis();
            repeatdelay = repeatStart + acqNum * timepointIntervalMs - repeatNow;
        }

        BlockingQueue<TaggedImage> bq = new LinkedBlockingQueue<TaggedImage>(10);

        // try to close last acquisition viewer if there could be one open (only in single acquisition per timepoint mode)
        if (acqSettings.separateTimepoints && (acq_ != null) && !cancelAcquisition_.get()) {
            try {
                // following line needed due to some arcane internal reason, otherwise
                //   call to closeAcquisitionWindow() fails silently. 
                //   See http://sourceforge.net/p/micro-manager/mailman/message/32999320/
                acq_.promptToSave(false);
                gui_.closeAcquisitionWindow(acqName);
            } catch (Exception ex) {
                // do nothing if unsuccessful
            }
        }

        if (acqSettings.separateTimepoints) {
            // call to getUniqueAcquisitionName is extra safety net, we have checked that directory is empty before starting
            acqName = gui_.getUniqueAcquisitionName(prefixField_.getText() + "_" + acqNum);
        } else {
            acqName = gui_.getUniqueAcquisitionName(prefixField_.getText());
        }

        long extraStageScanTimeout = 0;
        if (acqSettings.isStageScanning) {
            // approximately compute the extra time to wait for stack to begin (ramp up time)
            //   by getting the volume duration and subtracting the acquisition duration and then dividing by two
            extraStageScanTimeout = (long) Math.ceil(computeActualVolumeDuration(acqSettings)
                    - (acqSettings.numSlices * acqSettings.numChannels * acqSettings.sliceTiming.sliceDuration))
                    / 2;
        }

        long extraMultiXYTimeout = 0;
        if (acqSettings.useMultiPositions) {
            // give 20 extra seconds to arrive at intended XY position instead of trying to get fancy about computing actual move time
            extraMultiXYTimeout = XYSTAGETIMEOUT;
            // furthermore make sure that the main timeout value is at least 20ms because MM's position list uses this (via MultiStagePosition.goToPosition)
            if (props_.getPropValueInteger(Devices.Keys.CORE,
                    Properties.Keys.CORE_TIMEOUT_MS) < XYSTAGETIMEOUT) {
                props_.setPropValue(Devices.Keys.CORE, Properties.Keys.CORE_TIMEOUT_MS, XYSTAGETIMEOUT);
            }
        }

        VirtualAcquisitionDisplay vad = null;
        WindowListener wl_acq = null;
        WindowListener[] wls_orig = null;
        try {
            // check for stop button before each acquisition
            if (cancelAcquisition_.get()) {
                throw new IllegalMonitorStateException("User stopped the acquisition");
            }

            // flag that we are actually running acquisition now
            acquisitionRunning_.set(true);

            ReportingUtils.logMessage("diSPIM plugin starting acquisition " + acqName
                    + " with following settings: " + acqSettingsJSON);

            final int numMMChannels = acqSettings.numSides * acqSettings.numChannels * (acqBothCameras ? 2 : 1);

            if (spimMode == AcquisitionModes.Keys.NO_SCAN && !acqSettings.separateTimepoints) {
                // swap nrFrames and numSlices
                gui_.openAcquisition(acqName, rootDir, acqSettings.numSlices, numMMChannels, nrFrames,
                        nrPositions, true, save);
            } else {
                gui_.openAcquisition(acqName, rootDir, nrFrames, numMMChannels, acqSettings.numSlices,
                        nrPositions, true, save);
            }

            channelNames_ = new String[numMMChannels];

            // generate channel names and colors
            // also builds viewString for MultiViewRegistration metadata
            String viewString = "";
            final String SEPARATOR = "_";
            for (int reflect = 0; reflect < 2; reflect++) {
                // only run for loop once unless acqBothCameras is true
                // if acqBothCameras is true then run second time to add "epi" channels
                if (reflect > 0 && !acqBothCameras) {
                    continue;
                }
                // set up channels (side A/B is treated as channel too)
                if (acqSettings.useChannels) {
                    ChannelSpec[] channels = multiChannelPanel_.getUsedChannels();
                    for (int i = 0; i < channels.length; i++) {
                        String chName = "-" + channels[i].config_ + (reflect > 0 ? "-epi" : "");
                        // same algorithm for channel index vs. specified channel and side as in comments of code below
                        //   that figures out the channel where to file each incoming image
                        int channelIndex = i;
                        if (twoSided) {
                            channelIndex *= 2;
                        }
                        channelIndex += reflect * numMMChannels / 2;
                        channelNames_[channelIndex] = firstCamera + chName;
                        viewString += NumberUtils.intToDisplayString(0) + SEPARATOR;
                        if (twoSided) {
                            channelNames_[channelIndex + 1] = secondCamera + chName;
                            viewString += NumberUtils.intToDisplayString(90) + SEPARATOR;
                        }
                    }
                } else { // single-channel
                    int channelIndex = reflect * numMMChannels / 2;
                    channelNames_[channelIndex] = firstCamera + (reflect > 0 ? "-epi" : "");
                    viewString += NumberUtils.intToDisplayString(0) + SEPARATOR;
                    if (twoSided) {
                        channelNames_[channelIndex + 1] = secondCamera + (reflect > 0 ? "-epi" : "");
                        viewString += NumberUtils.intToDisplayString(90) + SEPARATOR;
                    }
                }
            }
            // strip last separator of viewString (for Multiview Reconstruction)
            viewString = viewString.substring(0, viewString.length() - 1);

            // assign channel names and colors
            for (int i = 0; i < numMMChannels; i++) {
                gui_.setChannelName(acqName, i, channelNames_[i]);
                gui_.setChannelColor(acqName, i, getChannelColor(i));
            }

            if (acqSettings.useMovementCorrection) {
                for (int i = 0; i < acqSettings.numChannels; i++) {
                    if (channelNames_[i].equals(firstCamera + "-" + correctMovementChannel)) {
                        cmChannelNumber = i;
                    }
                }
                if (cmChannelNumber == -1) {
                    MyDialogUtils.showError(
                            "The channel selected for movement correction on the auitofocus tab was not found in this acquisition");
                    return false;
                }
            }

            zStepUm_ = acqSettings.isStageScanning ? controller_.getActualStepSizeUm() // computed step size, accounting for quantization of controller
                    : acqSettings.stepSizeUm; // should be same as PanelUtils.getSpinnerFloatValue(stepSize_)

            // initialize acquisition
            gui_.initializeAcquisition(acqName, (int) core_.getImageWidth(), (int) core_.getImageHeight(),
                    (int) core_.getBytesPerPixel(), (int) core_.getImageBitDepth());
            gui_.promptToSaveAcquisition(acqName, !testAcq);

            // These metadata have to be added after initialization, 
            // otherwise they will not be shown?!
            gui_.setAcquisitionProperty(acqName, "NumberOfSides",
                    NumberUtils.doubleToDisplayString(acqSettings.numSides));
            gui_.setAcquisitionProperty(acqName, "FirstSide", acqSettings.firstSideIsA ? "A" : "B");
            gui_.setAcquisitionProperty(acqName, "SlicePeriod_ms", actualSlicePeriodLabel_.getText());
            gui_.setAcquisitionProperty(acqName, "LaserExposure_ms",
                    NumberUtils.doubleToDisplayString(acqSettings.desiredLightExposure));
            gui_.setAcquisitionProperty(acqName, "VolumeDuration", actualVolumeDurationLabel_.getText());
            gui_.setAcquisitionProperty(acqName, "SPIMmode", spimMode.toString());
            // Multi-page TIFF saving code wants this one (cameras are all 16-bits, so not much reason for anything else)
            gui_.setAcquisitionProperty(acqName, "PixelType", "GRAY16");
            gui_.setAcquisitionProperty(acqName, "UseAutofocus",
                    acqSettings.useAutofocus ? Boolean.TRUE.toString() : Boolean.FALSE.toString());
            gui_.setAcquisitionProperty(acqName, "UseMotionCorrection",
                    acqSettings.useMovementCorrection ? Boolean.TRUE.toString() : Boolean.FALSE.toString());
            gui_.setAcquisitionProperty(acqName, "HardwareTimepoints",
                    acqSettings.hardwareTimepoints ? Boolean.TRUE.toString() : Boolean.FALSE.toString());
            gui_.setAcquisitionProperty(acqName, "SeparateTimepoints",
                    acqSettings.separateTimepoints ? Boolean.TRUE.toString() : Boolean.FALSE.toString());
            gui_.setAcquisitionProperty(acqName, "CameraMode", acqSettings.cameraMode.toString());
            gui_.setAcquisitionProperty(acqName, "z-step_um", NumberUtils.doubleToDisplayString(zStepUm_));
            // Properties for use by MultiViewRegistration plugin
            // Format is: x_y_z, set to 1 if we should rotate around this axis.
            gui_.setAcquisitionProperty(acqName, "MVRotationAxis", "0_1_0");
            gui_.setAcquisitionProperty(acqName, "MVRotations", viewString);
            // save XY and SPIM head position in metadata
            // update positions first at expense of two extra serial transactions
            refreshXYZPositions();
            gui_.setAcquisitionProperty(acqName, "Position_X",
                    positions_.getPositionString(Devices.Keys.XYSTAGE, Directions.X));
            gui_.setAcquisitionProperty(acqName, "Position_Y",
                    positions_.getPositionString(Devices.Keys.XYSTAGE, Directions.Y));
            gui_.setAcquisitionProperty(acqName, "Position_SPIM_Head",
                    positions_.getPositionString(Devices.Keys.UPPERZDRIVE));
            gui_.setAcquisitionProperty(acqName, "SPIMAcqSettings", acqSettingsJSON);
            gui_.setAcquisitionProperty(acqName, "SPIMtype", ASIdiSPIM.oSPIM ? "oSPIM" : "diSPIM");
            gui_.setAcquisitionProperty(acqName, "AcquisitionName", acqName);
            gui_.setAcquisitionProperty(acqName, "Prefix", acqName);

            // get circular buffer ready
            // do once here but not per-trigger; need to ensure ROI changes registered
            core_.initializeCircularBuffer(); // superset of clearCircularBuffer()

            // TODO: use new acquisition interface that goes through the pipeline
            //gui_.setAcquisitionAddImageAsynchronous(acqName); 
            acq_ = gui_.getAcquisition(acqName);

            // Dive into MM internals since script interface does not support pipelines
            ImageCache imageCache = acq_.getImageCache();
            vad = acq_.getAcquisitionWindow();
            imageCache.addImageCacheListener(vad);

            // Start pumping images into the ImageCache
            DefaultTaggedImageSink sink = new DefaultTaggedImageSink(bq, imageCache);
            sink.start();

            // remove usual window listener(s) and replace it with our own
            //   that will prompt before closing and cancel acquisition if confirmed
            // this should be considered a hack, it may not work perfectly
            // I have confirmed that there is only one windowListener and it seems to 
            //   also be related to window closing
            // Note that ImageJ's acquisition window is AWT instead of Swing
            wls_orig = vad.getImagePlus().getWindow().getWindowListeners();
            for (WindowListener l : wls_orig) {
                vad.getImagePlus().getWindow().removeWindowListener(l);
            }
            wl_acq = new WindowAdapter() {
                @Override
                public void windowClosing(WindowEvent arg0) {
                    // if running acquisition only close if user confirms
                    if (acquisitionRunning_.get()) {
                        boolean stop = MyDialogUtils.getConfirmDialogResult(
                                "Do you really want to abort the acquisition?", JOptionPane.YES_NO_OPTION);
                        if (stop) {
                            cancelAcquisition_.set(true);
                        }
                    }
                }
            };
            vad.getImagePlus().getWindow().addWindowListener(wl_acq);

            // patterned after implementation in MMStudio.java
            // will be null if not saving to disk
            lastAcquisitionPath_ = acq_.getImageCache().getDiskLocation();
            lastAcquisitionName_ = acqName;

            // only used when motion correction was requested
            MovementDetector[] movementDetectors = new MovementDetector[nrPositions];

            // Transformation matrices to convert between camera and stage coordinates
            final Vector3D yAxis = new Vector3D(0.0, 1.0, 0.0);
            final Rotation camARotation = new Rotation(yAxis, Math.toRadians(-45));
            final Rotation camBRotation = new Rotation(yAxis, Math.toRadians(45));

            final Vector3D zeroPoint = new Vector3D(0.0, 0.0, 0.0); // cache a zero point for efficiency

            // make sure all devices have arrived, e.g. a stage isn't still moving
            try {
                core_.waitForSystem();
            } catch (Exception e) {
                ReportingUtils.logError("error waiting for system");
            }

            // Loop over all the times we trigger the controller's acquisition
            //  (although if multi-channel with volume switching is selected there
            //   is inner loop to trigger once per channel)
            // remember acquisition start time for software-timed timepoints
            // For hardware-timed timepoints we only trigger the controller once
            long acqStart = System.currentTimeMillis();
            for (int trigNum = 0; trigNum < nrFrames; trigNum++) {
                // handle intervals between (software-timed) time points
                // when we are within the same acquisition
                // (if separate viewer is selected then nothing bad happens here
                // but waiting during interval handled elsewhere)
                long acqNow = System.currentTimeMillis();
                long delay = acqStart + trigNum * timepointIntervalMs - acqNow;
                while (delay > 0 && !cancelAcquisition_.get()) {
                    updateAcquisitionStatus(AcquisitionStatus.WAITING, (int) (delay / 1000));
                    long sleepTime = Math.min(1000, delay);
                    Thread.sleep(sleepTime);
                    acqNow = System.currentTimeMillis();
                    delay = acqStart + trigNum * timepointIntervalMs - acqNow;
                }

                // check for stop button before each time point
                if (cancelAcquisition_.get()) {
                    throw new IllegalMonitorStateException("User stopped the acquisition");
                }

                int timePoint = acqSettings.separateTimepoints ? acqNum : trigNum;

                // this is where we autofocus if requested
                if (acqSettings.useAutofocus) {
                    // Note that we will not autofocus as expected when using hardware
                    // timing.  Seems OK, since hardware timing will result in short
                    // acquisition times that do not need autofocus.  We have already
                    // ensured that we aren't doing both
                    if ((autofocusAtT0 && timePoint == 0)
                            || ((timePoint > 0) && (timePoint % autofocusEachNFrames == 0))) {
                        if (acqSettings.useChannels) {
                            multiChannelPanel_.selectChannel(autofocusChannel);
                        }
                        if (sideActiveA) {
                            AutofocusUtils.FocusResult score = autofocus_.runFocus(this, Devices.Sides.A, false,
                                    sliceTiming_, false);
                            updateCalibrationOffset(Devices.Sides.A, score);
                        }
                        if (sideActiveB) {
                            AutofocusUtils.FocusResult score = autofocus_.runFocus(this, Devices.Sides.B, false,
                                    sliceTiming_, false);
                            updateCalibrationOffset(Devices.Sides.B, score);
                        }
                        // Restore settings of the controller
                        controller_.prepareControllerForAquisition(acqSettings);
                        if (acqSettings.useChannels
                                && acqSettings.channelMode != MultichannelModes.Keys.VOLUME) {
                            controller_.setupHardwareChannelSwitching(acqSettings);
                        }
                        // make sure circular buffer is cleared
                        core_.clearCircularBuffer();
                    }
                }

                numTimePointsDone_++;
                updateAcquisitionStatus(AcquisitionStatus.ACQUIRING);

                // loop over all positions
                for (int positionNum = 0; positionNum < nrPositions; positionNum++) {
                    if (acqSettings.useMultiPositions) {

                        // make sure user didn't stop things
                        if (cancelAcquisition_.get()) {
                            throw new IllegalMonitorStateException("User stopped the acquisition");
                        }

                        // want to move between positions move stage fast, so we 
                        //   will clobber stage scanning setting so need to restore it
                        float scanXSpeed = 1f;
                        float scanXAccel = 1f;
                        if (acqSettings.isStageScanning) {
                            scanXSpeed = props_.getPropValueFloat(Devices.Keys.XYSTAGE,
                                    Properties.Keys.STAGESCAN_MOTOR_SPEED);
                            props_.setPropValue(Devices.Keys.XYSTAGE, Properties.Keys.STAGESCAN_MOTOR_SPEED,
                                    origXSpeed);
                            scanXAccel = props_.getPropValueFloat(Devices.Keys.XYSTAGE,
                                    Properties.Keys.STAGESCAN_MOTOR_ACCEL);
                            props_.setPropValue(Devices.Keys.XYSTAGE, Properties.Keys.STAGESCAN_MOTOR_ACCEL,
                                    origXAccel);
                        }

                        final MultiStagePosition nextPosition = positionList.getPosition(positionNum);

                        // blocking call; will wait for stages to move
                        MultiStagePosition.goToPosition(nextPosition, core_);

                        // for stage scanning: restore speed and set up scan at new position 
                        // non-multi-position situation is handled in prepareControllerForAquisition instead
                        if (acqSettings.isStageScanning) {
                            props_.setPropValue(Devices.Keys.XYSTAGE, Properties.Keys.STAGESCAN_MOTOR_SPEED,
                                    scanXSpeed);
                            props_.setPropValue(Devices.Keys.XYSTAGE, Properties.Keys.STAGESCAN_MOTOR_ACCEL,
                                    scanXAccel);
                            StagePosition pos = nextPosition.get(devices_.getMMDevice(Devices.Keys.XYSTAGE)); // get ideal position from position list, not current position
                            controller_.prepareStageScanForAcquisition(pos.x, pos.y);
                        }

                        refreshXYZPositions();

                        // wait any extra time the user requests
                        Thread.sleep(Math.round(PanelUtils.getSpinnerFloatValue(positionDelay_)));
                    }

                    // loop over all the times we trigger the controller
                    // usually just once, but will be the number of channels if we have
                    //  multiple channels and aren't using PLogic to change between them
                    for (int channelNum = 0; channelNum < nrChannelsSoftware; channelNum++) {
                        try {
                            // flag that we are using the cameras/controller
                            ASIdiSPIM.getFrame().setHardwareInUse(true);

                            // deal with shutter before starting acquisition
                            shutterOpen = core_.getShutterOpen();
                            if (autoShutter) {
                                core_.setAutoShutter(false);
                                if (!shutterOpen) {
                                    core_.setShutterOpen(true);
                                }
                            }

                            // start the cameras
                            core_.startSequenceAcquisition(firstCamera, nrSlicesSoftware, 0, true);
                            if (twoSided || acqBothCameras) {
                                core_.startSequenceAcquisition(secondCamera, nrSlicesSoftware, 0, true);
                            }

                            // deal with channel if needed (hardware channel switching doesn't happen here)
                            if (changeChannelPerVolumeSoftware) {
                                multiChannelPanel_.selectNextChannel();
                            }

                            // special case: single-sided piezo acquisition risks illumination piezo sleeping
                            // prevent this from happening by sending relative move of 0 like we do in live mode before each trigger
                            // NB: this won't help for hardware-timed timepoints
                            final Devices.Keys piezoIllumKey = firstSideA ? Devices.Keys.PIEZOB
                                    : Devices.Keys.PIEZOA;
                            if (!twoSided && props_.getPropValueInteger(piezoIllumKey,
                                    Properties.Keys.AUTO_SLEEP_DELAY) > 0) {
                                core_.setRelativePosition(devices_.getMMDevice(piezoIllumKey), 0);
                            }

                            // trigger the state machine on the controller
                            // do this even with demo cameras to test everything else
                            boolean success = controller_.triggerControllerStartAcquisition(spimMode,
                                    firstSideA);
                            if (!success) {
                                throw new Exception("Controller triggering not successful");
                            }

                            ReportingUtils.logDebugMessage("Starting time point " + (timePoint + 1) + " of "
                                    + nrFrames + " with (software) channel number " + channelNum);

                            // Wait for first image to create ImageWindow, so that we can be sure about image size
                            // Do not actually grab first image here, just make sure it is there
                            long start = System.currentTimeMillis();
                            long now = start;
                            final long timeout = Math.max(3000,
                                    Math.round(10 * sliceDuration + 2 * acqSettings.delayBeforeSide))
                                    + extraStageScanTimeout + extraMultiXYTimeout;
                            while (core_.getRemainingImageCount() == 0 && (now - start < timeout)
                                    && !cancelAcquisition_.get()) {
                                now = System.currentTimeMillis();
                                Thread.sleep(5);
                            }
                            if (now - start >= timeout) {
                                String msg = "Camera did not send first image within a reasonable time.\n";
                                if (acqSettings.isStageScanning) {
                                    msg += "Make sure jumpers are correct on XY card and also micro-micromirror card.";
                                } else {
                                    msg += "Make sure camera trigger cables are connected properly.";
                                }
                                throw new Exception(msg);
                            }

                            // grab all the images from the cameras, put them into the acquisition
                            int[] channelImageNr = new int[4 * acqSettings.numChannels]; // keep track of how many frames we have received for each MM "channel"
                            int[] cameraImageNr = new int[2]; // keep track of how many images we have received from the camera
                            int[] tpNumber = new int[2 * acqSettings.numChannels]; // keep track of which timepoint we are on for hardware timepoints
                            int imagesToSkip = 0; // hardware timepoints have to drop spurious images with overlap mode
                            final boolean checkForSkips = acqSettings.hardwareTimepoints
                                    && (acqSettings.cameraMode == CameraModes.Keys.OVERLAP);
                            boolean done = false;
                            long timeout2 = Math.max(1000, Math.round(5 * sliceDuration));
                            if (acqSettings.isStageScanning) { // for stage scanning have to allow extra time for turn-around
                                timeout2 += (2 * (long) Math.ceil(getStageRampDuration(acqSettings))); // ramp up and then down
                                timeout2 += 5000; // ample extra time for turn-around (e.g. antibacklash move in Y), interestingly 500ms extra seems insufficient for reasons I don't understand yet so just pad this for now  // TODO figure out why turn-aronud is taking so long
                                if (acqSettings.spimMode == AcquisitionModes.Keys.STAGE_SCAN_UNIDIRECTIONAL) {
                                    timeout2 += (long) Math.ceil(getStageRetraceDuration(acqSettings)); // in unidirectional case also need to rewind
                                }
                            }
                            start = System.currentTimeMillis();
                            long last = start;
                            try {
                                while ((core_.getRemainingImageCount() > 0
                                        || core_.isSequenceRunning(firstCamera) || ((twoSided || acqBothCameras)
                                                && core_.isSequenceRunning(secondCamera)))
                                        && !done) {
                                    now = System.currentTimeMillis();
                                    if (core_.getRemainingImageCount() > 0) { // we have an image to grab
                                        TaggedImage timg = core_.popNextTaggedImage();

                                        if (checkForSkips && imagesToSkip != 0) {
                                            imagesToSkip--;
                                            continue; // goes to next iteration of this loop without doing anything else
                                        }

                                        // figure out which channel index this frame belongs to
                                        // "channel index" is channel of MM acquisition
                                        // channel indexes will go from 0 to (numSides * numChannels - 1) for standard (non-reflective) imaging
                                        // if double-sided then second camera gets odd channel indexes (1, 3, etc.)
                                        //    and adjacent pairs will be same color (e.g. 0 and 1 will be from first color, 2 and 3 from second, etc.)
                                        // if acquisition from both cameras (reflective imaging) then
                                        //    second half of channel indices are from opposite (epi) view
                                        // e.g. for 3-color 1-sided (A first) standard (non-reflective) then
                                        //    0 will be A-illum A-cam 1st color
                                        //    2 will be A-illum A-cam 2nd color
                                        //    4 will be A-illum A-cam 3rd color
                                        // e.g. for 3-color 2-sided (A first) standard (non-reflective) then
                                        //    0 will be A-illum A-cam 1st color
                                        //    1 will be B-illum B-cam 1st color
                                        //    2 will be A-illum A-cam 2nd color
                                        //    3 will be B-illum B-cam 2nd color
                                        //    4 will be A-illum A-cam 3rd color
                                        //    5 will be B-illum B-cam 3rd color
                                        // e.g. for 3-color 1-sided (A first) both camera (reflective) then
                                        //    0 will be A-illum A-cam 1st color
                                        //    1 will be A-illum A-cam 2nd color
                                        //    2 will be A-illum A-cam 3rd color
                                        //    3 will be A-illum B-cam 1st color
                                        //    4 will be A-illum B-cam 2nd color
                                        //    5 will be A-illum B-cam 3rd color
                                        // e.g. for 3-color 2-sided (A first) both camera (reflective) then
                                        //    0 will be A-illum A-cam 1st color
                                        //    1 will be B-illum B-cam 1st color
                                        //    2 will be A-illum A-cam 2nd color
                                        //    3 will be B-illum B-cam 2nd color
                                        //    4 will be A-illum A-cam 3rd color
                                        //    5 will be B-illum B-cam 3rd color
                                        //    6 will be A-illum B-cam 1st color
                                        //    7 will be B-illum A-cam 1st color
                                        //    8 will be A-illum B-cam 2nd color
                                        //    9 will be B-illum A-cam 2nd color
                                        //   10 will be A-illum B-cam 3rd color
                                        //   11 will be B-illum A-cam 3rd color
                                        String camera = (String) timg.tags.get("Camera");
                                        int cameraIndex = camera.equals(firstCamera) ? 0 : 1;
                                        int channelIndex_tmp;
                                        switch (acqSettings.channelMode) {
                                        case NONE:
                                        case VOLUME:
                                            channelIndex_tmp = channelNum;
                                            break;
                                        case VOLUME_HW:
                                            channelIndex_tmp = cameraImageNr[cameraIndex]
                                                    / acqSettings.numSlices; // want quotient only
                                            break;
                                        case SLICE_HW:
                                            channelIndex_tmp = cameraImageNr[cameraIndex]
                                                    % acqSettings.numChannels; // want modulo arithmetic
                                            break;
                                        default:
                                            // should never get here
                                            throw new Exception("Undefined channel mode");
                                        }
                                        if (acqBothCameras) {
                                            if (twoSided) { // 2-sided, both cameras
                                                channelIndex_tmp = channelIndex_tmp * 2 + cameraIndex;
                                                // determine whether first or second side by whether we've seen half the images yet
                                                if (cameraImageNr[cameraIndex] > nrSlicesSoftware / 2) {
                                                    // second illumination side => second half of channels
                                                    channelIndex_tmp += 2 * acqSettings.numChannels;
                                                }
                                            } else { // 1-sided, both cameras
                                                channelIndex_tmp += cameraIndex * acqSettings.numChannels;
                                            }
                                        } else { // normal situation, non-reflective imaging
                                            if (twoSided) {
                                                channelIndex_tmp *= 2;
                                            }
                                            channelIndex_tmp += cameraIndex;
                                        }
                                        final int channelIndex = channelIndex_tmp;

                                        int actualTimePoint = timePoint;
                                        if (acqSettings.hardwareTimepoints) {
                                            actualTimePoint = tpNumber[channelIndex];
                                        }
                                        if (acqSettings.separateTimepoints) {
                                            // if we are doing separate timepoints then frame is always 0
                                            actualTimePoint = 0;
                                        }
                                        // note that hardwareTimepoints and separateTimepoints can never both be true

                                        // add image to acquisition
                                        if (spimMode == AcquisitionModes.Keys.NO_SCAN
                                                && !acqSettings.separateTimepoints) {
                                            // create time series for no scan
                                            addImageToAcquisition(acq_, channelImageNr[channelIndex],
                                                    channelIndex, actualTimePoint, positionNum, now - acqStart,
                                                    timg, bq);
                                        } else { // standard, create Z-stacks
                                            addImageToAcquisition(acq_, actualTimePoint, channelIndex,
                                                    channelImageNr[channelIndex], positionNum, now - acqStart,
                                                    timg, bq);
                                        }

                                        // update our counters to be ready for next image
                                        channelImageNr[channelIndex]++;
                                        cameraImageNr[cameraIndex]++;

                                        // if hardware timepoints then we only send one trigger and
                                        //   manually keep track of which channel/timepoint comes next
                                        if (acqSettings.hardwareTimepoints
                                                && channelImageNr[channelIndex] >= acqSettings.numSlices) { // only do this if we are done with the slices in this MM channel

                                            // we just finished filling one MM channel with all its slices so go to next timepoint for this channel
                                            channelImageNr[channelIndex] = 0;
                                            tpNumber[channelIndex]++;

                                            // see if we are supposed to skip next image
                                            if (checkForSkips) {
                                                // one extra image per MM channel, this includes case of only 1 color (either multi-channel disabled or else only 1 channel selected)
                                                // if we are interleaving by slice then next nrChannel images will be from extra slice position
                                                // any other configuration we will just drop the next image
                                                if (acqSettings.useChannels
                                                        && acqSettings.channelMode == MultichannelModes.Keys.SLICE_HW) {
                                                    imagesToSkip = acqSettings.numChannels;
                                                } else {
                                                    imagesToSkip = 1;
                                                }
                                            }

                                            // update acquisition status message for hardware acquisition
                                            //   (for non-hardware acquisition message is updated elsewhere)
                                            //   Arbitrarily choose one possible channel to do this on.
                                            if (channelIndex == 0
                                                    && (numTimePointsDone_ < acqSettings.numTimepoints)) {
                                                numTimePointsDone_++;
                                                updateAcquisitionStatus(AcquisitionStatus.ACQUIRING);
                                            }
                                        }

                                        last = now; // keep track of last image timestamp

                                    } else { // no image ready yet
                                        done = cancelAcquisition_.get();
                                        Thread.sleep(1);
                                        if (now - last >= timeout2) {
                                            ReportingUtils
                                                    .logError("Camera did not send all expected images within"
                                                            + " a reasonable period for timepoint "
                                                            + numTimePointsDone_ + ".  Continuing anyway.");
                                            nonfatalError = true;
                                            done = true;
                                        }
                                    }
                                }

                                // update count if we stopped in the middle
                                if (cancelAcquisition_.get()) {
                                    numTimePointsDone_--;
                                }

                                // if we are using demo camera then add some extra time to let controller finish
                                // since we got images without waiting for controller to actually send triggers
                                if (usingDemoCam) {
                                    Thread.sleep(200); // for serial communication overhead
                                    Thread.sleep((long) volumeDuration / nrChannelsSoftware); // estimate the time per channel, not ideal in case of software channel switching
                                    if (acqSettings.isStageScanning) {
                                        Thread.sleep(1000 + extraStageScanTimeout); // extra 1 second plus ramp time for stage scanning 
                                    }
                                }

                            } catch (InterruptedException iex) {
                                MyDialogUtils.showError(iex);
                            }

                            if (acqSettings.hardwareTimepoints) {
                                break; // only trigger controller once
                            }

                        } catch (Exception ex) {
                            MyDialogUtils.showError(ex);
                        } finally {
                            // cleanup at the end of each time we trigger the controller

                            ASIdiSPIM.getFrame().setHardwareInUse(false);

                            // put shutter back to original state
                            core_.setShutterOpen(shutterOpen);
                            core_.setAutoShutter(autoShutter);

                            // make sure cameras aren't running anymore
                            if (core_.isSequenceRunning(firstCamera)) {
                                core_.stopSequenceAcquisition(firstCamera);
                            }
                            if ((twoSided || acqBothCameras) && core_.isSequenceRunning(secondCamera)) {
                                core_.stopSequenceAcquisition(secondCamera);
                            }

                            // make sure SPIM state machine on micromirror and SCAN of XY card are stopped (should normally be but sanity check)
                            if ((acqSettings.numSides > 1) || acqSettings.firstSideIsA) {
                                props_.setPropValue(Devices.Keys.GALVOA, Properties.Keys.SPIM_STATE,
                                        Properties.Values.SPIM_IDLE, true);
                            }
                            if ((acqSettings.numSides > 1) || !acqSettings.firstSideIsA) {
                                props_.setPropValue(Devices.Keys.GALVOB, Properties.Keys.SPIM_STATE,
                                        Properties.Values.SPIM_IDLE, true);
                            }
                            if (acqSettings.isStageScanning) {
                                props_.setPropValue(Devices.Keys.XYSTAGE, Properties.Keys.STAGESCAN_STATE,
                                        Properties.Values.SPIM_IDLE);
                            }
                        }
                    }

                    if (acqSettings.useMovementCorrection && (timePoint % correctMovementEachNFrames) == 0) {
                        if (movementDetectors[positionNum] == null) {
                            // Transform from camera space to stage space:
                            Rotation rotation = camBRotation;
                            if (firstSideA) {
                                rotation = camARotation;
                            }
                            movementDetectors[positionNum] = new MovementDetector(prefs_, acq_, cmChannelNumber,
                                    positionNum, rotation);
                        }

                        Vector3D movement = movementDetectors[positionNum]
                                .detectMovement(Method.PhaseCorrelation);

                        String msg1 = "TimePoint: " + timePoint + ", Detected movement.  X: " + movement.getX()
                                + ", Y: " + movement.getY() + ", Z: " + movement.getZ();
                        System.out.println(msg1);

                        if (!movement.equals(zeroPoint)) {
                            String msg = "ASIdiSPIM motion corrector moving stages: X: " + movement.getX()
                                    + ", Y: " + movement.getY() + ", Z: " + movement.getZ();
                            gui_.logMessage(msg);
                            System.out.println(msg);

                            // if we are using the position list, update the position in the list
                            if (acqSettings.useMultiPositions) {
                                MultiStagePosition position = positionList.getPosition(positionNum);
                                StagePosition pos = position.get(devices_.getMMDevice(Devices.Keys.XYSTAGE));
                                pos.x += movement.getX();
                                pos.y += movement.getY();
                                StagePosition zPos = position
                                        .get(devices_.getMMDevice(Devices.Keys.UPPERZDRIVE));
                                if (zPos != null) {
                                    zPos.x += movement.getZ();
                                }
                            } else {
                                // only a single position, move the stage now
                                core_.setRelativeXYPosition(devices_.getMMDevice(Devices.Keys.XYSTAGE),
                                        movement.getX(), movement.getY());
                                core_.setRelativePosition(devices_.getMMDevice(Devices.Keys.UPPERZDRIVE),
                                        movement.getZ());
                            }

                        }
                    }
                }
                if (acqSettings.hardwareTimepoints) {
                    break;
                }
            }
        } catch (IllegalMonitorStateException ex) {
            // do nothing, the acquisition was simply halted during its operation
            // will log error message during finally clause
        } catch (MMScriptException mex) {
            MyDialogUtils.showError(mex);
        } catch (Exception ex) {
            MyDialogUtils.showError(ex);
        } finally { // end of this acquisition (could be about to restart if separate viewers)
            try {
                // restore original window listeners
                try {
                    vad.getImagePlus().getWindow().removeWindowListener(wl_acq);
                    for (WindowListener l : wls_orig) {
                        vad.getImagePlus().getWindow().addWindowListener(l);
                    }
                } catch (Exception ex) {
                    // do nothing, window is probably gone
                }

                if (cancelAcquisition_.get()) {
                    ReportingUtils.logMessage("User stopped the acquisition");
                }

                bq.put(TaggedImageQueue.POISON);
                // TODO: evaluate closeAcquisition call
                // at the moment, the Micro-Manager api has a bug that causes 
                // a closed acquisition not be really closed, causing problems
                // when the user closes a window of the previous acquisition
                // changed r14705 (2014-11-24)
                // gui_.closeAcquisition(acqName);
                ReportingUtils.logMessage("diSPIM plugin acquisition " + acqName + " took: "
                        + (System.currentTimeMillis() - acqButtonStart) + "ms");

                //               while(gui_.isAcquisitionRunning()) {
                //                  Thread.sleep(10);
                //                  ReportingUtils.logMessage("waiting for acquisition to finish.");
                //               }

                // flag that we are done with acquisition
                acquisitionRunning_.set(false);

                // write acquisition settings if requested
                if (lastAcquisitionPath_ != null && prefs_.getBoolean(MyStrings.PanelNames.SETTINGS.toString(),
                        Properties.Keys.PLUGIN_WRITE_ACQ_SETTINGS_FILE, false)) {
                    String path = "";
                    try {
                        path = lastAcquisitionPath_ + File.separator + "AcqSettings.txt";
                        PrintWriter writer = new PrintWriter(path);
                        writer.println(acqSettingsJSON);
                        writer.flush();
                        writer.close();
                    } catch (Exception ex) {
                        MyDialogUtils.showError(ex,
                                "Could not save acquisition settings to file as requested to path " + path);
                    }
                }

            } catch (Exception ex) {
                // exception while stopping sequence acquisition, not sure what to do...
                MyDialogUtils.showError(ex, "Problem while finishing acquisition");
            }
        }

    } // for loop over acquisitions

    // cleanup after end of all acquisitions

    // TODO be more careful and always do these if we actually started acquisition, 
    // even if exception happened

    cameras_.setCameraForAcquisition(firstCameraKey, false);
    if (twoSided || acqBothCameras) {
        cameras_.setCameraForAcquisition(secondCameraKey, false);
    }

    // restore exposure times of SPIM cameras
    try {
        core_.setExposure(firstCamera, prefs_.getFloat(MyStrings.PanelNames.SETTINGS.toString(),
                Properties.Keys.PLUGIN_CAMERA_LIVE_EXPOSURE_FIRST.toString(), 10f));
        if (twoSided || acqBothCameras) {
            core_.setExposure(secondCamera, prefs_.getFloat(MyStrings.PanelNames.SETTINGS.toString(),
                    Properties.Keys.PLUGIN_CAMERA_LIVE_EXPOSURE_SECOND.toString(), 10f));
        }
        gui_.refreshGUIFromCache();
    } catch (Exception ex) {
        MyDialogUtils.showError("Could not restore exposure after acquisition");
    }

    // reset channel to original if we clobbered it
    if (acqSettings.useChannels) {
        multiChannelPanel_.setConfig(originalChannelConfig);
    }

    // clean up controller settings after acquisition
    // want to do this, even with demo cameras, so we can test everything else
    // TODO figure out if we really want to return piezos to 0 position (maybe center position,
    //   maybe not at all since we move when we switch to setup tab, something else??)
    controller_.cleanUpControllerAfterAcquisition(acqSettings.numSides, acqSettings.firstSideIsA, true);

    // if we did stage scanning restore its position and speed
    if (acqSettings.isStageScanning) {
        try {
            // make sure stage scanning state machine is stopped, otherwise setting speed/position won't take
            props_.setPropValue(Devices.Keys.XYSTAGE, Properties.Keys.STAGESCAN_STATE,
                    Properties.Values.SPIM_IDLE);
            props_.setPropValue(Devices.Keys.XYSTAGE, Properties.Keys.STAGESCAN_MOTOR_SPEED, origXSpeed);
            props_.setPropValue(Devices.Keys.XYSTAGE, Properties.Keys.STAGESCAN_MOTOR_ACCEL, origXAccel);
            core_.setXYPosition(devices_.getMMDevice(Devices.Keys.XYSTAGE), xyPosUm.x, xyPosUm.y);
        } catch (Exception ex) {
            MyDialogUtils.showError("Could not restore XY stage position after acquisition");
        }
    }

    updateAcquisitionStatus(AcquisitionStatus.DONE);
    posUpdater_.pauseUpdates(false);
    if (testAcq && prefs_.getBoolean(MyStrings.PanelNames.SETTINGS.toString(),
            Properties.Keys.PLUGIN_TESTACQ_SAVE, false)) {
        String path = "";
        try {
            path = prefs_.getString(MyStrings.PanelNames.SETTINGS.toString(),
                    Properties.Keys.PLUGIN_TESTACQ_PATH, "");
            IJ.saveAs(acq_.getAcquisitionWindow().getImagePlus(), "raw", path);
            // TODO consider generating a short metadata file to assist in interpretation
        } catch (Exception ex) {
            MyDialogUtils.showError("Could not save raw data from test acquisition to path " + path);
        }
    }

    if (separateImageFilesOriginally) {
        ImageUtils.setImageStorageClass(TaggedImageStorageDiskDefault.class);
    }

    // restore camera
    try {
        core_.setCameraDevice(originalCamera);
    } catch (Exception ex) {
        MyDialogUtils.showError("Could not restore camera after acquisition");
    }

    if (liveModeOriginally) {
        gui_.enableLiveMode(true);
    }

    if (nonfatalError) {
        MyDialogUtils.showError("Missed some images during acquisition, see core log for details");
    }

    return true;
}