List of usage examples for java.util List equals
boolean equals(Object o);
From source file:org.dspace.content.BundleServiceImpl.java
@Override public void setOrder(Context context, Bundle bundle, UUID[] bitstreamIds) throws AuthorizeException, SQLException { authorizeService.authorizeAction(context, bundle, Constants.WRITE); List<Bitstream> currentBitstreams = bundle.getBitstreams(); List<Bitstream> updatedBitstreams = new ArrayList<Bitstream>(); // Loop through and ensure these Bitstream IDs are all valid. Add them to list of updatedBitstreams. for (int i = 0; i < bitstreamIds.length; i++) { UUID bitstreamId = bitstreamIds[i]; Bitstream bitstream = bitstreamService.find(context, bitstreamId); // If we have an invalid Bitstream ID, just ignore it, but log a warning if (bitstream == null) { //This should never occur but just in case log.warn(LogManager.getHeader(context, "Invalid bitstream id while changing bitstream order", "Bundle: " + bundle.getID() + ", bitstream id: " + bitstreamId)); continue; }/* w w w. j av a 2 s .co m*/ // If we have a Bitstream not in the current list, log a warning & exit immediately if (!currentBitstreams.contains(bitstream)) { log.warn(LogManager.getHeader(context, "Encountered a bitstream not in this bundle while changing bitstream order. Bitstream order will not be changed.", "Bundle: " + bundle.getID() + ", bitstream id: " + bitstreamId)); return; } updatedBitstreams.add(bitstream); } // If our lists are different sizes, exit immediately if (updatedBitstreams.size() != currentBitstreams.size()) { log.warn(LogManager.getHeader(context, "Size of old list and new list do not match. Bitstream order will not be changed.", "Bundle: " + bundle.getID())); return; } // As long as the order has changed, update it if (CollectionUtils.isNotEmpty(updatedBitstreams) && !updatedBitstreams.equals(currentBitstreams)) { //First clear out the existing list of bitstreams bundle.clearBitstreams(); // Now add them back in the proper order for (Bitstream bitstream : updatedBitstreams) { bitstream.getBundles().remove(bundle); bundle.addBitstream(bitstream); bitstream.getBundles().add(bundle); bitstreamService.update(context, bitstream); } //The order of the bitstreams has changed, ensure that we update the last modified of our item Item owningItem = (Item) getParentObject(context, bundle); if (owningItem != null) { itemService.updateLastModified(context, owningItem); itemService.update(context, owningItem); } } }
From source file:ubic.BAMSandAllen.MatrixPairs.MatrixPair.java
public Map<String, String> testDataRows(List<String> rows, int testSamples, boolean degree, boolean spearman) throws Exception { // result - probably should be an object Map<String, String> results = new HashMap<String, String>(); // backup matrix ABAMSDataMatrix originalMatrixB = matrixB; if (Util.intersect(rows, matrixB.getRowNames()).isEmpty()) { throw new RuntimeException("Error, no rows remain"); }//from w w w.j a v a2 s . c o m setMatrixBDataRows(rows); // size int size = rows.size(); results.put("size", size + ""); // correlation String statisticString = "correlation"; if (degree) statisticString = "degree correlation"; if (spearman && degree) statisticString = "degree Spearman correlation"; double statistic; if (degree) { statistic = getFlattenedCorrelation(spearman); } else { // if ( spearman ) throw new RuntimeException( "Spearman not supported" ); statistic = getCorrelation(); } results.put(statisticString, statistic + ""); // p-value shuffle String spearmanString = ""; if (spearman) spearmanString = " Spearman"; if (!degree) results.put("p-value shuffle" + spearmanString, test(testSamples) + ""); // p-value for resampling Random r = new Random(1); int greaterHits = 0; int lesserHits = 0; // slow File f = new File(SetupParameters.getDataFolder() + statisticString + ".distro." + size + ".txt"); f.delete(); DoubleArrayList sampleHistory = new DoubleArrayList(); if (!rows.equals(getMatrixBDataRows())) { for (int i = 0; i < testSamples; i++) { matrixB = originalMatrixB; // randomly choose a set of the same size List<String> rownames = new LinkedList<String>(matrixB.getRowNames()); Collections.shuffle(rownames, r); setMatrixBDataRows(rownames.subList(0, size)); double sampleStatistic; if (!degree) sampleStatistic = getCorrelation(); else sampleStatistic = getFlattenedCorrelation(spearman); FileTools.stringToFile(sampleStatistic + "\n", f, true); sampleHistory.add(sampleStatistic); if (sampleStatistic < statistic) { lesserHits++; log.info("Sample " + statisticString + ":" + sampleStatistic + " less than " + statistic); } if (sampleStatistic > statistic) { greaterHits++; log.info("Sample " + statisticString + ":" + sampleStatistic + " greater than " + statistic); } } double mean = DescriptiveWithMissing.mean(sampleHistory); log.info("Sample " + statisticString + " greater than " + statistic + ", " + greaterHits + " times"); log.info("Sample " + statisticString + " less than " + statistic + ", " + lesserHits + " times"); results.put("average resample" + spearmanString, mean + ""); results.put("standard deviation resample" + spearmanString, Math.sqrt(DescriptiveWithMissing.sampleVariance(sampleHistory, mean)) + ""); double finalHits = 0; boolean leftTail = mean > statistic; results.put("left tail" + spearmanString, leftTail + ""); // Choose tail based on center if (!leftTail) finalHits = greaterHits; else finalHits = lesserHits; results.put("p-value resample" + spearmanString, finalHits / (double) testSamples + ""); // put it back to the original matrix } matrixB = originalMatrixB; return results; }
From source file:ugr.cristian.serverVideoApp.PacketHandler.java
/** *Function that is called when a edge is down *@param edge The edge which is down./* www. j av a 2 s.c om*/ *@param node The node where the edge is detected */ private boolean delFlow(Edge edge, Node node) { NodeConnector tempConnector = edge.getTailNodeConnector(); Node tempNode = tempConnector.getNode(); boolean result = false; List<FlowOnNode> flowsOnNode = new ArrayList(); if (tempNode.equals(node)) { try { flowsOnNode = statisticsManager.getFlows(tempNode); } catch (RuntimeException bad) { log.trace("No flows get, time to try in noCache flows"); try { flowsOnNode = statisticsManager.getFlowsNoCache(tempNode); } catch (RuntimeException veryBad) { log.trace("Impossible to obtain the flows"); } } for (int i = 0; i < flowsOnNode.size(); i++) { FlowOnNode tempFlowOnNode = flowsOnNode.get(i); Flow tempFlow = tempFlowOnNode.getFlow(); if (tempFlow != null) { List<Action> oldActions = tempFlow.getActions(); if (oldActions != null) { List<Action> tempActions = new ArrayList<Action>(); tempActions.add(new Output(tempConnector)); if (tempActions.equals(oldActions)) { log.trace("Deleting flow with outputAction " + tempConnector + " in the node " + node); semaphore.tryAcquire(); try { flowProgrammerService.removeFlow(tempNode, tempFlow); } catch (RuntimeException e8) { log.trace("Error removing flow"); } log.trace("success removing flow"); semaphore.release(); } } } } } return result; }
From source file:de.innovationgate.wgpublisher.hdb.HDBModel.java
private void initModel(Document modelDoc, WGContent parent, boolean completeReinit) throws WGAPIException { WGContent document;// w ww. j av a 2 s . c o m if (modelDoc instanceof Storage) { Storage storage = (Storage) modelDoc; HDBModelParams params = new HDBModelParams(TYPE_STORAGE); params.setContentClass(storage.getStorageId()); if (parent != null) { if (_hdb.isStorage(parent)) { document = _hdb.getOrCreateStorage(parent, storage.getStorageId(), params); } else { document = _hdb.getOrCreateUIDContent(parent, storage.getStorageId(), params); } } else { document = _hdb.getOrCreateStorage(storage.getStorageId(), params); } // If the storage does not yet have a listener item we regard it as just created an initialize it if (!document.hasItem(WGHierarchicalDatabase.ITEMNAME_LISTENER_CLASS_OR_MODULE)) { document.setItemValue(WGHierarchicalDatabase.ITEMNAME_LISTENER_CLASS_OR_MODULE, HDBModelListener.class.getName()); document.setItemValue(ITEM_STORAGE_ID, storage.getStorageId()); document.setContentClass(getStorageContentClass(storage)); document.save(); } // Ensure type is set. May be unset for: just created "real" HDB storages, storages from earlier HDBModel versions if (!document.hasItem(ITEM_TYPE)) { document.setItemValue(ITEM_TYPE, TYPE_STORAGE); document.save(); } // Ensure correct position among sibling storages DocumentParent parentModel = _definition.getDocumentParent(storage); List<Storage> siblings = null; if (parentModel instanceof Content) { siblings = ((Content) parentModel).getChildStorages(); } else if (parentModel instanceof Storage) { siblings = ((Storage) parentModel).getChildStorages(); } else if (parentModel instanceof ModelDefinition) { siblings = ((ModelDefinition) parentModel).getRootStorages(); } WGStructEntry page = document.getStructEntry(); boolean pageEdited = false; if (siblings != null) { int idx = siblings.indexOf(storage); int position = (idx + 1) * 10; if (page.getPosition().intValue() != position) { page.setPosition(position); pageEdited = true; } } // Enforce accessibility rights if (storage.getReaders() != null) { List<String> readers = WGUtils.deserializeCollection(storage.getReaders(), ",", true); if (!readers.equals(page.getReaders())) { page.setReaders(readers); pageEdited = true; } } if (storage.getEditors() != null) { List<String> childEditors = WGUtils.deserializeCollection(storage.getEditors(), ",", true); if (!childEditors.equals(page.getChildEditors())) { page.setChildEditors(childEditors); pageEdited = true; } } if (pageEdited) { page.save(); } // Initialize eventual children Iterator<SingletonContent> childSingletonContents = storage.getChildSingletonContents().iterator(); while (childSingletonContents.hasNext()) { SingletonContent child = childSingletonContents.next(); initModel(child, document, completeReinit); } Iterator<Storage> childStorages = storage.getChildStorages().iterator(); while (childStorages.hasNext()) { Storage child = childStorages.next(); initModel(child, document, completeReinit); } Iterator<Content> childContents = storage.getChildContents().iterator(); while (childContents.hasNext()) { Content child = childContents.next(); initModel(child, document, completeReinit); } } else if (modelDoc instanceof SingletonContent) { SingletonContent sContent = (SingletonContent) modelDoc; HDBModelParams params = new HDBModelParams(TYPE_CONTENT); params.setCreateContentID(sContent.getContentId()); params.setContentClass(sContent.getContentId()); document = _hdb.getOrCreateUIDContent(parent, sContent.getContentId(), params); // If the document does not yet have a type item we regard it as just created an initialize it if (!document.hasItem(WGHierarchicalDatabase.ITEMNAME_LISTENER_CLASS_OR_MODULE)) { document.setItemValue(WGHierarchicalDatabase.ITEMNAME_LISTENER_CLASS_OR_MODULE, HDBModelListener.class.getName()); document.save(); } Map<String, Object> itemDefaultValues = fetchItemDefaultValues(sContent, sContent.getItems(), document); if (initContentItems(document, itemDefaultValues)) { LOG.debug("Initializing items on document '" + document.getDocumentKey() + "'"); document.saveWithGivenTimestamps(document.getCreated(), document.getLastModified()); } } // We only initialize beyond contents if the model version changed else if (modelDoc instanceof Content) { Content content = (Content) modelDoc; if (parent != null && completeReinit) { Iterator<WGContent> childContents = parent.getChildContentIterator(10); while (childContents.hasNext()) { WGContent childContent = childContents.next(); if (content.getContentClass().equals(childContent.getContentClass())) { Map<String, Object> itemDefaultValues = fetchItemDefaultValues(content, content.getItems(), childContent); boolean somethingDone = false; if (initContentItems(childContent, itemDefaultValues)) { somethingDone = true; } if (HDBModel.updateParentRelations(childContent)) { somethingDone = true; } if (updateContentUniqueName(childContent)) { somethingDone = true; } if (somethingDone) { LOG.debug("Initializing data on document '" + childContent.getDocumentKey() + "'"); childContent.saveWithGivenTimestamps(childContent.getCreated(), childContent.getLastModified()); } // Initialize children Iterator<Storage> childStorages = content.getChildStorages().iterator(); while (childStorages.hasNext()) { Storage child = childStorages.next(); initModel(child, childContent, completeReinit); } } performReinitSessionRefresh(); } } } }
From source file:squash.booking.lambdas.core.RuleManagerTest.java
@Test public void testApplyRulesReturnsBookingsItHasMade_MultipleBookings() throws Exception { // applyRules should return a list of the rule-based bookings it has made - // so that, e.g., they can be backed up. // ARRANGE// ww w.java 2s .c o m initialiseRuleManager(); List<BookingRule> existingRules = new ArrayList<>(); existingRules.addAll(existingBookingRules); // Add second booking rule for the same day of the week as an existing one BookingRule sameDayRule = new BookingRule(existingFridayRecurringRuleWithoutExclusions); // Tweak so does not clash with existing rule sameDayRule.getBooking() .setCourt(sameDayRule.getBooking().getCourt() + sameDayRule.getBooking().getCourtSpan()); sameDayRule.getBooking() .setSlot(sameDayRule.getBooking().getSlot() + sameDayRule.getBooking().getSlotSpan()); existingRules.add(sameDayRule); expectOptimisticPersisterToReturnVersionedAttributes(2, existingRules); expectBookingManagerCall(existingFridayRecurringRuleWithoutExclusions.getBooking()); expectBookingManagerCall(sameDayRule.getBooking()); expectPurgeExpiredRulesAndRuleExclusions(42, existingRules); List<Booking> expectedBookings = new ArrayList<>(); expectedBookings.add(existingFridayRecurringRuleWithoutExclusions.getBooking()); expectedBookings.add(sameDayRule.getBooking()); // ACT // This should create two bookings for the specified date List<Booking> bookings = ruleManager.applyRules(sameDayRule.getBooking().getDate(), false); // ASSERT assertTrue("Unexpected bookings returned by applyRules", bookings.equals(expectedBookings)); }
From source file:com.ciphertool.genetics.algorithms.mutation.GroupMutationAlgorithmTest.java
@Test public void testMutateRandomGeneGroupBoundaryConditions() { MockKeylessChromosome mockKeylessChromosome = new MockKeylessChromosome(); List<MockGene> originalGenes = new ArrayList<>(); MockGene mockGene1 = new MockGene(); mockGene1.addSequence(new MockSequence("g")); mockGene1.addSequence(new MockSequence("e")); mockGene1.addSequence(new MockSequence("o")); mockGene1.addSequence(new MockSequence("r")); mockGene1.addSequence(new MockSequence("g")); mockGene1.addSequence(new MockSequence("e")); mockKeylessChromosome.addGene(mockGene1); originalGenes.add(mockGene1);// ww w . j av a 2 s .c om MockGene mockGene2 = new MockGene(); mockGene2.addSequence(new MockSequence("b")); mockGene2.addSequence(new MockSequence("e")); mockGene2.addSequence(new MockSequence("l")); mockGene2.addSequence(new MockSequence("d")); mockGene2.addSequence(new MockSequence("e")); mockGene2.addSequence(new MockSequence("n")); mockKeylessChromosome.addGene(mockGene2); originalGenes.add(mockGene2); MockGene mockGeneOfSize3 = new MockGene(); mockGeneOfSize3.addSequence(new MockSequence("x")); mockGeneOfSize3.addSequence(new MockSequence("y")); mockGeneOfSize3.addSequence(new MockSequence("z")); when(geneDaoMock.findRandomGene(same(mockKeylessChromosome))).thenReturn(mockGeneOfSize3); List<Integer> availableIndices = new ArrayList<Integer>(); availableIndices.add(0); availableIndices.add(1); groupMutationAlgorithm.mutateRandomGeneGroup(mockKeylessChromosome, availableIndices, 3); assertFalse(originalGenes.equals(mockKeylessChromosome.getGenes())); assertTrue(availableIndices.isEmpty()); assertEquals(12, mockKeylessChromosome.actualSize().intValue()); System.out.println(mockKeylessChromosome); assertEquals(4, mockKeylessChromosome.getGenes().size()); assertEquals(mockGeneOfSize3, mockKeylessChromosome.getGenes().get(0)); assertEquals(mockGeneOfSize3, mockKeylessChromosome.getGenes().get(1)); assertEquals(mockGeneOfSize3, mockKeylessChromosome.getGenes().get(2)); assertEquals(mockGeneOfSize3, mockKeylessChromosome.getGenes().get(3)); verify(geneDaoMock, times(4)).findRandomGene(same(mockKeylessChromosome)); verifyZeroInteractions(logMock); }
From source file:com.rapid.core.Page.java
private void getEventHandlersJavaScript(RapidRequest rapidRequest, StringBuilder stringBuilder, Application application, List<Control> controls) throws JSONException { // check there are some controls if (controls != null) { // if we're at the root of the page if (controls.equals(_controls)) { // check for page events if (_events != null) { // loop page events and get js functions for (Event event : _events) getEventJavaScriptFunction(rapidRequest, stringBuilder, application, null, event); }//ww w.j a v a2 s. c o m } for (Control control : controls) { // check event actions if (control.getEvents() != null) { // loop page events and get js functions for (Event event : control.getEvents()) getEventJavaScriptFunction(rapidRequest, stringBuilder, application, control, event); } // now call iteratively for child controls (of this [child] control, etc.) if (control.getChildControls() != null) getEventHandlersJavaScript(rapidRequest, stringBuilder, application, control.getChildControls()); } } }
From source file:com.netflix.genie.agent.execution.services.impl.LaunchJobServiceImpl.java
/** * {@inheritDoc}/* ww w . j av a 2 s .co m*/ */ @Override public void launchProcess(final File jobDirectory, final Map<String, String> environmentVariablesMap, final List<String> commandLine, final boolean interactive) throws JobLaunchException { if (!launched.compareAndSet(false, true)) { throw new IllegalStateException("Job already launched"); } final ProcessBuilder processBuilder = new ProcessBuilder(); // Validate job running directory if (jobDirectory == null) { throw new JobLaunchException("Job directory is null"); } else if (!jobDirectory.exists()) { throw new JobLaunchException("Job directory does not exist: " + jobDirectory); } else if (!jobDirectory.isDirectory()) { throw new JobLaunchException("Job directory is not a directory: " + jobDirectory); } else if (!jobDirectory.canWrite()) { throw new JobLaunchException("Job directory is not writable: " + jobDirectory); } final Map<String, String> currentEnvironmentVariables = processBuilder.environment(); if (environmentVariablesMap == null) { throw new JobLaunchException("Job environment variables map is null"); } // Merge job environment variables into process inherited environment environmentVariablesMap.forEach((key, value) -> { final String replacedValue = currentEnvironmentVariables.put(key, value); if (StringUtils.isBlank(replacedValue)) { log.debug("Added job environment variable: {}={}", key, value); } else if (!replacedValue.equals(value)) { log.debug("Set job environment variable: {}={} (previous value: {})", key, value, replacedValue); } }); // Validate arguments if (commandLine == null) { throw new JobLaunchException("Job command-line arguments is null"); } else if (commandLine.isEmpty()) { throw new JobLaunchException("Job command-line arguments are empty"); } // Configure arguments log.info("Job command-line: {}", Arrays.toString(commandLine.toArray())); final List<String> expandedCommandLine; try { expandedCommandLine = expandCommandLineVariables(commandLine, Collections.unmodifiableMap(currentEnvironmentVariables)); } catch (final EnvUtils.VariableSubstitutionException e) { throw new JobLaunchException("Job command-line arguments variables could not be expanded"); } if (!commandLine.equals(expandedCommandLine)) { log.info("Job command-line with variables expanded: {}", Arrays.toString(expandedCommandLine.toArray())); } processBuilder.command(expandedCommandLine); if (interactive) { processBuilder.inheritIO(); } else { processBuilder.redirectError(PathUtils.jobStdErrPath(jobDirectory).toFile()); processBuilder.redirectOutput(PathUtils.jobStdOutPath(jobDirectory).toFile()); } if (killed.get()) { log.info("Job aborted, skipping launch"); } else { log.info("Launching job"); try { processReference.set(processBuilder.start()); } catch (final IOException | SecurityException e) { throw new JobLaunchException("Failed to launch job: ", e); } log.info("Process launched (pid: {})", getPid(processReference.get())); } }
From source file:com.ciphertool.genetics.algorithms.mutation.SingleSequenceMutationAlgorithmTest.java
@Test public void testMutateChromosome() { singleSequenceMutationAlgorithm.setMaxMutationsPerChromosome(MAX_MUTATIONS); MockKeylessChromosome mockKeylessChromosome = new MockKeylessChromosome(); List<Gene> originalGenes = new ArrayList<Gene>(); MockGene mockGene1 = new MockGene(); mockGene1.addSequence(new MockSequence("w")); mockGene1.addSequence(new MockSequence("e")); mockKeylessChromosome.addGene(mockGene1); originalGenes.add(mockGene1);//from w w w . ja v a 2 s.c om MockGene mockGene2 = new MockGene(); mockGene2.addSequence(new MockSequence("s")); mockGene2.addSequence(new MockSequence("m")); mockGene2.addSequence(new MockSequence("i")); mockGene2.addSequence(new MockSequence("l")); mockGene2.addSequence(new MockSequence("e")); mockKeylessChromosome.addGene(mockGene2); originalGenes.add(mockGene2); when(sequenceDaoMock.findRandomSequence(any(Gene.class), anyInt())).thenReturn(new MockSequence("x")); singleSequenceMutationAlgorithm.mutateChromosome(mockKeylessChromosome); MockGene originalMockGene1 = new MockGene(); MockSequence mockGene1Sequence1 = new MockSequence("w"); originalMockGene1.addSequence(mockGene1Sequence1); mockGene1Sequence1.setGene(mockGene1); MockSequence mockGene1Sequence2 = new MockSequence("e"); originalMockGene1.addSequence(mockGene1Sequence2); mockGene1Sequence2.setGene(mockGene1); originalMockGene1.setChromosome(mockKeylessChromosome); MockGene originalMockGene2 = new MockGene(); MockSequence mockGene2Sequence1 = new MockSequence("s"); originalMockGene2.addSequence(mockGene2Sequence1); mockGene2Sequence1.setGene(mockGene2); MockSequence mockGene2Sequence2 = new MockSequence("m"); originalMockGene2.addSequence(mockGene2Sequence2); mockGene2Sequence2.setGene(mockGene2); MockSequence mockGene2Sequence3 = new MockSequence("i"); originalMockGene2.addSequence(mockGene2Sequence3); mockGene2Sequence3.setGene(mockGene2); MockSequence mockGene2Sequence4 = new MockSequence("l"); originalMockGene2.addSequence(mockGene2Sequence4); mockGene2Sequence4.setGene(mockGene2); MockSequence mockGene2Sequence5 = new MockSequence("e"); originalMockGene2.addSequence(mockGene2Sequence5); mockGene2Sequence5.setGene(mockGene2); originalMockGene2.setChromosome(mockKeylessChromosome); assertFalse(originalGenes.equals(mockKeylessChromosome)); verify(sequenceDaoMock, atLeastOnce()).findRandomSequence(any(Gene.class), anyInt()); verify(sequenceDaoMock, atMost(2)).findRandomSequence(any(Gene.class), anyInt()); verifyZeroInteractions(logMock); }
From source file:com.spotify.helios.master.ZooKeeperMasterModel.java
@Override public void updateDeploymentGroupHosts(final String groupName, final List<String> hosts) throws DeploymentGroupDoesNotExistException { log.debug("updating deployment-group hosts: name={}", groupName); final ZooKeeperClient client = provider.get("updateDeploymentGroupHosts"); try {//from w w w. j av a 2 s . c om Optional<Integer> curHostsVersion = Optional.absent(); List<String> curHosts; try { // addDeploymentGroup creates Paths.statusDeploymentGroupHosts(name) so it should always // exist. If it doesn't, then the DG was (likely) deleted. final Node chn = client.getNode(Paths.statusDeploymentGroupHosts(groupName)); curHostsVersion = Optional.of(chn.getStat().getVersion()); curHosts = Json.read(chn.getBytes(), new TypeReference<List<String>>() { }); } catch (JsonMappingException e) { curHosts = Collections.emptyList(); } final DeploymentGroupStatus status = getDeploymentGroupStatus(groupName); if (!allowHostChange(status)) { return; } if (!curHostsVersion.isPresent() || !hosts.equals(curHosts)) { // Node not present or hosts have changed final List<ZooKeeperOperation> ops = Lists.newArrayList(); ops.add(set(Paths.statusDeploymentGroupHosts(groupName), Json.asBytes(hosts))); final Node dgn = client.getNode(Paths.configDeploymentGroup(groupName)); final Integer deploymentGroupVersion = dgn.getStat().getVersion(); DeploymentGroup deploymentGroup = Json.read(dgn.getBytes(), DeploymentGroup.class); ImmutableList<Map<String, Object>> events = ImmutableList.of(); if (deploymentGroup.getJobId() != null) { if (updateOnHostChange(deploymentGroup, status)) { deploymentGroup = deploymentGroup.toBuilder().setRollingUpdateReason(HOSTS_CHANGED).build(); // Fail transaction if the deployment group has been updated elsewhere. ops.add(check(Paths.configDeploymentGroup(groupName), deploymentGroupVersion)); // NOTE: If the DG was removed this set() cause the transaction to fail, because // removing the DG removes this node. It's *important* that there's an operation that // causes the transaction to fail if the DG was removed or we'll end up with // inconsistent state. ops.add(set(Paths.configDeploymentGroup(groupName), deploymentGroup)); final RollingUpdateOp op = getInitRollingUpdateOps(deploymentGroup, hosts, client); ops.addAll(op.operations()); events = op.events(); } } log.info( "starting zookeeper transaction for updateDeploymentGroupHosts on " + "deployment-group name {} jobId={}. List of operations: {}", groupName, deploymentGroup.getJobId(), ops); client.transaction(ops); emitEvents(DEPLOYMENT_GROUP_EVENTS_KAFKA_TOPIC, events); } } catch (NoNodeException e) { throw new DeploymentGroupDoesNotExistException(groupName, e); } catch (KeeperException | IOException e) { throw new HeliosRuntimeException("updating deployment group hosts failed", e); } }