List of usage examples for java.util Random setSeed
public synchronized void setSeed(long seed)
From source file:com.evolveum.polygon.connector.ldap.schema.AbstractSchemaTranslator.java
private String hashBytes(byte[] clear, String alg, long seed) { MessageDigest md = null;/*from ww w . j a v a2s .co m*/ try { if (alg.equalsIgnoreCase("SSHA") || alg.equalsIgnoreCase("SHA")) { md = MessageDigest.getInstance("SHA-1"); } else if (alg.equalsIgnoreCase("SMD5") || alg.equalsIgnoreCase("MD5")) { md = MessageDigest.getInstance("MD5"); } } catch (NoSuchAlgorithmException e) { throw new ConnectorException("Could not find MessageDigest algorithm: " + alg); } if (md == null) { throw new ConnectorException("Unsupported MessageDigest algorithm: " + alg); } byte[] salt = {}; if (alg.equalsIgnoreCase("SSHA") || alg.equalsIgnoreCase("SMD5")) { Random rnd = new Random(); rnd.setSeed(System.currentTimeMillis() + seed); salt = new byte[8]; rnd.nextBytes(salt); } md.reset(); md.update(clear); md.update(salt); byte[] hash = md.digest(); byte[] hashAndSalt = new byte[hash.length + salt.length]; System.arraycopy(hash, 0, hashAndSalt, 0, hash.length); System.arraycopy(salt, 0, hashAndSalt, hash.length, salt.length); StringBuilder resSb = new StringBuilder(alg.length() + hashAndSalt.length); resSb.append('{'); resSb.append(alg); resSb.append('}'); resSb.append(Base64.encode(hashAndSalt)); return resSb.toString(); }
From source file:edu.illinois.enforcemop.examples.apache.pool.TestGenericObjectPool.java
private void checkEvictorVisiting(boolean lifo) throws Exception { VisitTrackerFactory factory = new VisitTrackerFactory(); GenericObjectPool pool = new GenericObjectPool(factory); pool.setNumTestsPerEvictionRun(2);// w ww . j a va 2s . co m pool.setMinEvictableIdleTimeMillis(-1); pool.setTestWhileIdle(true); pool.setLifo(lifo); pool.setTestOnReturn(false); pool.setTestOnBorrow(false); for (int i = 0; i < 8; i++) { pool.addObject(); } pool.evict(); // Visit oldest 2 - 0 and 1 Object obj = pool.borrowObject(); pool.returnObject(obj); obj = pool.borrowObject(); pool.returnObject(obj); // borrow, return, borrow, return // FIFO will move 0 and 1 to end // LIFO, 7 out, then in, then out, then in pool.evict(); // Should visit 2 and 3 in either case for (int i = 0; i < 8; i++) { VisitTracker tracker = (VisitTracker) pool.borrowObject(); if (tracker.getId() >= 4) { assertEquals("Unexpected instance visited " + tracker.getId(), 0, tracker.getValidateCount()); } else { assertEquals("Instance " + tracker.getId() + " visited wrong number of times.", 1, tracker.getValidateCount()); } } factory = new VisitTrackerFactory(); pool = new GenericObjectPool(factory); pool.setNumTestsPerEvictionRun(3); pool.setMinEvictableIdleTimeMillis(-1); pool.setTestWhileIdle(true); pool.setLifo(lifo); pool.setTestOnReturn(false); pool.setTestOnBorrow(false); for (int i = 0; i < 8; i++) { pool.addObject(); } pool.evict(); // 0, 1, 2 pool.evict(); // 3, 4, 5 obj = pool.borrowObject(); pool.returnObject(obj); obj = pool.borrowObject(); pool.returnObject(obj); obj = pool.borrowObject(); pool.returnObject(obj); // borrow, return, borrow, return // FIFO 3,4,5,6,7,0,1,2 // LIFO 7,6,5,4,3,2,1,0 // In either case, pointer should be at 6 pool.evict(); // Should hit 6,7,0 - 0 for second time for (int i = 0; i < 8; i++) { VisitTracker tracker = (VisitTracker) pool.borrowObject(); if (tracker.getId() != 0) { assertEquals("Instance " + tracker.getId() + " visited wrong number of times.", 1, tracker.getValidateCount()); } else { assertEquals("Instance " + tracker.getId() + " visited wrong number of times.", 2, tracker.getValidateCount()); } } // Randomly generate a pools with random numTests // and make sure evictor cycles through elements appropriately int[] smallPrimes = { 2, 3, 5, 7 }; Random random = new Random(); random.setSeed(System.currentTimeMillis()); for (int i = 0; i < 4; i++) { pool.setNumTestsPerEvictionRun(smallPrimes[i]); for (int j = 0; j < 5; j++) { pool = new GenericObjectPool(factory); pool.setNumTestsPerEvictionRun(3); pool.setMinEvictableIdleTimeMillis(-1); pool.setTestWhileIdle(true); pool.setLifo(lifo); pool.setTestOnReturn(false); pool.setTestOnBorrow(false); pool.setMaxIdle(-1); int instanceCount = 10 + random.nextInt(20); pool.setMaxActive(instanceCount); for (int k = 0; k < instanceCount; k++) { pool.addObject(); } // Execute a random number of evictor runs int runs = 10 + random.nextInt(50); for (int k = 0; k < runs; k++) { pool.evict(); } // Number of times evictor should have cycled through the pool int cycleCount = (runs * pool.getNumTestsPerEvictionRun()) / instanceCount; // Look at elements and make sure they are visited cycleCount // or cycleCount + 1 times VisitTracker tracker = null; int visitCount = 0; for (int k = 0; k < instanceCount; k++) { tracker = (VisitTracker) pool.borrowObject(); assertTrue(pool.getNumActive() <= pool.getMaxActive()); visitCount = tracker.getValidateCount(); assertTrue(visitCount >= cycleCount && visitCount <= cycleCount + 1); } } } }
From source file:com.hichinaschool.flashcards.libanki.Sched.java
private boolean _fillLrnDay() { if (mLrnCount == 0) { return false; }/*from ww w . ja va 2 s .com*/ if (!mLrnDayQueue.isEmpty()) { return true; } while (mLrnDids.size() > 0) { long did = mLrnDids.getFirst(); // fill the queue with the current did mLrnDayQueue.clear(); Cursor cur = null; try { cur = mCol.getDb().getDatabase().rawQuery("SELECT id FROM cards WHERE did = " + did + " AND queue = 3 AND due <= " + mToday + " LIMIT " + mQueueLimit, null); while (cur.moveToNext()) { mLrnDayQueue.add(cur.getLong(0)); } } finally { if (cur != null && !cur.isClosed()) { cur.close(); } } if (mLrnDayQueue.size() > 0) { // order Random r = new Random(); r.setSeed(mToday); Collections.shuffle(mLrnDayQueue, r); // is the current did empty? if (mLrnDayQueue.size() < mQueueLimit) { mLrnDids.remove(); } return true; } // nothing left in the deck; move to next mLrnDids.remove(); } return false; }
From source file:com.hichinaschool.flashcards.libanki.Sched.java
private boolean _fillRev() { if (!mRevQueue.isEmpty()) { return true; }// w w w .j av a2s. c o m if (mRevCount == 0) { return false; } while (mRevDids.size() > 0) { long did = mRevDids.getFirst(); int lim = Math.min(mQueueLimit, _deckRevLimit(did)); Cursor cur = null; if (lim != 0) { mRevQueue.clear(); // fill the queue with the current did try { cur = mCol.getDb().getDatabase().rawQuery("SELECT id FROM cards WHERE did = " + did + " AND queue = 2 AND due <= " + mToday + " LIMIT " + lim, null); while (cur.moveToNext()) { mRevQueue.add(cur.getLong(0)); } } finally { if (cur != null && !cur.isClosed()) { cur.close(); } } if (!mRevQueue.isEmpty()) { // ordering try { if (mCol.getDecks().get(did).getInt("dyn") != 0) { // dynamic decks need due order preserved // Note: libanki reverses mRevQueue and returns the last element in _getRevCard(). // AnkiDroid differs by leaving the queue intact and returning the *first* element // in _getRevCard(). } else { Random r = new Random(); r.setSeed(mToday); Collections.shuffle(mRevQueue, r); } } catch (JSONException e) { throw new RuntimeException(e); } // is the current did empty? if (mRevQueue.size() < lim) { mRevDids.remove(); } return true; } } // nothing left in the deck; move to next mRevDids.remove(); } if (mRevCount != 0) { // if we didn't get a card but the count is non-zero, // we need to check again for any cards that were // removed from the queue but not buried _resetRev(); return _fillRev(); } return false; }
From source file:org.fhaes.jsea.JSEAStatsFunctions.java
/** * TODO/*w ww . ja v a 2 s.co m*/ * * @param titleForRun * @param outputFilePrefix * @param seedNumber * @param yearsPriorToEvent * @param yearsAfterTheEvent * @param numberOfSimulations * @param firstYearOfProcess * @param lastYearOfProcess * @param includeIncompleteEpochs * @param randomSampling * @param chronologyYears * @param chronologyActual * @param events * @param growth * @param save * @param usingSegmentation * @param segmentTable * @param chronologyFile * @param alphaLevel95 * @param alphaLevel99 * @param alphaLevel999 */ public JSEAStatsFunctions(String titleForRun, String outputFilePrefix, Integer seedNumber, Integer yearsPriorToEvent, Integer yearsAfterTheEvent, Integer numberOfSimulations, Integer firstYearOfProcess, Integer lastYearOfProcess, boolean includeIncompleteEpochs, boolean randomSampling, ArrayList<Integer> chronologyYears, ArrayList<Double> chronologyActual, ArrayList<Integer> events, boolean growth, boolean save, boolean usingSegmentation, SegmentTable segmentTable, String chronologyFile, boolean alphaLevel95, boolean alphaLevel99, boolean alphaLevel999, boolean doZScore) { long begintime = System.currentTimeMillis(); this.titleForRun = titleForRun; this.outputFilePrefix = outputFilePrefix; this.yearsPriorToEvent = yearsPriorToEvent; this.yearsAfterTheEvent = yearsAfterTheEvent; this.randomSampling = randomSampling; this.numberOfSimulations = numberOfSimulations; this.seedNumber = seedNumber; this.firstYearOfProcess = firstYearOfProcess; this.lastYearOfProcess = lastYearOfProcess; // this.excludeIncompleteEpochs = excludeIncompleteEpochs; this.includeIncompleteEpochs = includeIncompleteEpochs; this.chronologyYears = chronologyYears; this.chronologyActual = chronologyActual; this.events = events; this.isFirstIteration = true; this.save = save; this.growth = growth; this.usingSegmentation = usingSegmentation; this.segmentTable = segmentTable; this.chronologyFile = chronologyFile; this.alphaLevel95 = alphaLevel95; this.alphaLevel99 = alphaLevel99; this.alphaLevel999 = alphaLevel999; this.doZScore = doZScore; log.debug("this.titleForRun = " + titleForRun); log.debug("this.outputFilePrefix = " + outputFilePrefix); log.debug("this.yearsPriorToEvent = " + yearsPriorToEvent); log.debug("this.yearsAfterTheEvent = " + yearsAfterTheEvent); log.debug("this.randomSampling = " + randomSampling); log.debug("this.numberOfSimulations = " + numberOfSimulations); log.debug("this.seedNumber = " + seedNumber); log.debug("this.firstYearOfProcess = " + firstYearOfProcess); log.debug("this.lastYearOfProcess = " + lastYearOfProcess); // log.debug("this.excludeIncompleteEpochs = "+excludeIncompleteEpochs); log.debug("this.includeIncompleteEpochs = " + includeIncompleteEpochs); log.debug("this.chronologyYears = " + chronologyYears); log.debug("this.chronologyActual = " + chronologyActual); log.debug("this.events = " + events); log.debug("this.save = " + save); log.debug("this.growth = " + growth); log.debug("this.usingSegmentation = " + usingSegmentation); // log.debug("this.segmentTable = earliestYear " + segmentTable.getEarliestYear() + ", latestYear " + segmentTable.getLatestYear()); log.debug("this.chronologyFile = " + chronologyFile); log.debug("this.alphaLevel95 = " + alphaLevel95); log.debug("this.alphaLevel99 = " + alphaLevel99); log.debug("this.alphaLevel999 = " + alphaLevel999); /* * Setting the three decimal format */ DecimalFormat threePlacess = new DecimalFormat("0.000"); /* * Creating the date of the run of the program */ Date now = new Date(); /* * Creating the files necessary (two txt files) */ // File outputFile = new File(outputFilePrefix + ".out"); // Writer wr; // String bigbuffer = ""; report = new String(""); actualTable = new String(""); simulationTable = new String(""); cdbuffer = new String(""); pdfbufferA = new String(""); pdfbufferB = new String(""); pdfbufferpar1 = new String(""); pdfbufferpar2 = new String(""); /* * Converting Arraylists into arrays chronologyActual into chronoActual chronologyYears into yearsActual events into keyEvents */ chronoActual = new Double[chronologyActual.size()]; chronoActual = chronologyActual.toArray(chronoActual); yearsActual = new Integer[chronologyYears.size()]; yearsActual = chronologyYears.toArray(yearsActual); Collections.sort(events); /* * Setting default values for first yearofprocess, lastyearofprocess recall the firstYearchrono is set as the default on the * firtYearOfProcess. also firstYearchrono is set as the default for firstYearsegment lastYearchrono is set as the default of the * lastYearOfProcess */ if (firstYearOfProcess == 0) { firstYearOfProcess = yearsActual[0]; } if (lastYearOfProcess == 0) { lastYearOfProcess = yearsActual[yearsActual.length]; } if (numberOfSimulations == 0) { System.out.println("the number of simulations need to be set"); } /* * 1. statistical Analysis of the whole time series chronology 2. statistical Analysis of the adjusted time series chronologyAdj 3. * statistical Analysis of the whole Event list events 4. print using the method printReport */ // Statistical Analysis for the whole Climate Series DescriptiveStatistics stats = new DescriptiveStatistics(); dchronoActual = new double[chronologyActual.size()]; // Add the data from the array for (int i = 0; i < chronoActual.length; i++) { stats.addValue(chronoActual[i].doubleValue()); dchronoActual[i] = chronoActual[i].doubleValue(); } // Obtain the mean sensitivity meanSensitivity = 0; for (int i = 1; i < chronoActual.length; i++) { double senDenominator = Math.abs(dchronoActual[i]) + Math.abs(dchronoActual[i - 1]); if (senDenominator != 0) { meanSensitivity = meanSensitivity + Math.abs(2 * (dchronoActual[i] - dchronoActual[i - 1])) / senDenominator; } } meanSensitivity = meanSensitivity / (dchronoActual.length - 1); /* * Obtain and display the general statistical information on the whole climate series. */ mean = stats.getMean(); std = stats.getStandardDeviation(); median = StatUtils.percentile(dchronoActual, 50); kurt = stats.getKurtosis(); skew = stats.getSkewness(); /* * is segmentlength is different than 0 find the beginning and end year for each segment */ firstYearsArray = new ArrayList<Integer>(); lastYearsArray = new ArrayList<Integer>(); // NO SEGMENTATION IS USED if (!usingSegmentation) { firstYearsArray.add(firstYearOfProcess); lastYearsArray.add(lastYearOfProcess); } // SEGMENTATION IS USED AND HAS BEEN DEFINED if (usingSegmentation) { for (int i = 0; i < segmentTable.tableModel.getSegments().size(); i++) { firstYearsArray.add(segmentTable.tableModel.getSegment(i).getFirstYear()); lastYearsArray.add(segmentTable.tableModel.getSegment(i).getLastYear()); } } /* * set up the loop for the typed of segmentation */ /* * set the adjusted time series 1. set up the loop for the typed of segmentation 3.find the index of the first event in the actual * array. 2. adjust the series by yearsActual[indexofthefirstevent]-yearsPriortToEvent 3. adjust the series by * yearsActual[indexofthelasteventinseries]+yearsAfterTheEvent */ for (int segmentIndex = 0; segmentIndex < firstYearsArray.size(); segmentIndex++) { beginingYearAdj = chronologyYears.get(0).intValue(); lastYearAdj = chronologyYears.get(chronologyYears.size() - 1).intValue(); firstYearOfProcess = firstYearsArray.get(segmentIndex); lastYearOfProcess = lastYearsArray.get(segmentIndex); if (firstYearOfProcess.intValue() > beginingYearAdj) { beginingYearAdj = firstYearOfProcess.intValue(); } if (lastYearOfProcess.intValue() < lastYearAdj) { lastYearAdj = lastYearOfProcess.intValue(); } /* * Obtain and display information on the Events actual Time span same as the adjusted. number of events. Events.size() and total * number of Events used. Mean years between events minimun differece between event years. * */ keventsinadj = new ArrayList<Integer>(); keventsinadjyeprior = new ArrayList<Integer>(); keventsinadjyeafter = new ArrayList<Integer>(); kevents = new ArrayList<Integer>(); numberOfEventsinAdj = 0; for (int i = 0; i < events.size(); i++) { if (chronologyYears.contains(events.get(i))) { // System.out.println("the chronologyYears contains event " + i + "\t" // + beginingYearAdj + "\t" + lastYearAdj); if ((beginingYearAdj <= events.get(i).intValue()) && (events.get(i).intValue() <= lastYearAdj)) { kevents.add(events.get(i)); } } if ((chronologyYears.contains(events.get(i))) && (!includeIncompleteEpochs)) { if (((events.get(i).intValue() - beginingYearAdj) >= yearsPriorToEvent.intValue()) && ((lastYearAdj - events.get(i).intValue()) >= yearsAfterTheEvent.intValue())) { numberOfEventsinAdj = numberOfEventsinAdj + 1; keventsinadj.add(events.get(i)); } ; } ;// end of exclude incomplete epochs if ((chronologyYears.contains(events.get(i))) && (includeIncompleteEpochs)) { if ((beginingYearAdj <= events.get(i).intValue()) && (events.get(i).intValue() <= lastYearAdj)) { numberOfEventsinAdj = numberOfEventsinAdj + 1; keventsinadj.add(events.get(i)); // if ((events.get(i).intValue() - beginingYearAdj) < yearsPriorToEvent.intValue()) { keventsinadjyeprior.add(events.get(i).intValue() - beginingYearAdj); } else { keventsinadjyeprior.add(yearsPriorToEvent); } if ((lastYearAdj - events.get(i).intValue()) < yearsAfterTheEvent.intValue()) { keventsinadjyeafter.add(lastYearAdj - events.get(i).intValue()); } else { keventsinadjyeafter.add(yearsAfterTheEvent.intValue()); } // } ; } ; // end of include incomplete } ;// end of the loop for all events /* * set up if statement so that if we have two or less key events in the chronology we do not do anything */ // System.out.println("size of kevents is " + kevents.size()); if (kevents.size() >= 2) { keyEvents = new int[kevents.size()]; for (int i = 0; i < kevents.size(); i++) { keyEvents[i] = kevents.get(i).intValue(); } ; /* * Sorting keyEvents */ Arrays.sort(keyEvents); if (keventsinadj.size() >= 2) { keyEventsAdj = new int[numberOfEventsinAdj]; keyEventsAdjBeYear = new int[numberOfEventsinAdj]; keyEventsAdjLaYear = new int[numberOfEventsinAdj]; for (int i = 0; i < keventsinadj.size(); i++) { keyEventsAdj[i] = keventsinadj.get(i).intValue(); keyEventsAdjBeYear[i] = keyEventsAdj[i] - yearsPriorToEvent.intValue(); keyEventsAdjLaYear[i] = keyEventsAdj[i] + yearsAfterTheEvent.intValue(); } ; Arrays.sort(keyEventsAdj); // Calculate the difference between events load in array diffBetweenEvents = new double[keyEvents.length - 1]; sumOfDiff = 0; for (int i = 1; i < keyEvents.length; i++) { diffBetweenEvents[i - 1] = keyEvents[i] - keyEvents[i - 1]; sumOfDiff = sumOfDiff + diffBetweenEvents[i - 1]; } ; // Calculate the mean difference between events = // sum(y(i)-y(i-1))/total number of differences meanDiffBetweenEvents = sumOfDiff / diffBetweenEvents.length; // adjusting the beginning year that that it account for the events // years // and the beginning year of the process etc beginingYearAdj = Math.max(beginingYearAdj, (keyEvents[0] - yearsPriorToEvent)); lastYearAdj = Math.min(lastYearAdj, (keyEvents[keyEvents.length - 1] + yearsAfterTheEvent)); DescriptiveStatistics statsAdj = new DescriptiveStatistics(); chronoAdj = new double[lastYearAdj - beginingYearAdj + 1]; // Add data from the array for (int i = beginingYearAdj; i < lastYearAdj + 1; i++) { statsAdj.addValue(chronoActual[chronologyYears.indexOf(i)].doubleValue()); chronoAdj[i - beginingYearAdj] = chronoActual[chronologyYears.indexOf(i)].doubleValue(); } ; // Obtain the mean sensativity meanSensitivityAdj = 0; for (int i = 1; i < chronoAdj.length; i++) { double senDenominatorAdj = Math.abs(chronoAdj[i]) + Math.abs(chronoAdj[i - 1]); if (senDenominatorAdj != 0) { meanSensitivityAdj = meanSensitivityAdj + Math.abs(2 * (chronoAdj[i] - chronoAdj[i - 1])) / senDenominatorAdj; } } meanSensitivityAdj = meanSensitivityAdj / (chronoAdj.length - 1); /* * Obtain and display the general statistical information on the whole time series data. */ meanAdj = statsAdj.getMean(); stdAdj = statsAdj.getStandardDeviation(); medianAdj = StatUtils.percentile(chronoAdj, 50); kurtAdj = statsAdj.getKurtosis(); skewAdj = statsAdj.getSkewness(); // new PearsonsCorrelation().correlation(chronoAdj, chronoAdj); double autoNumSum = 0.0; double autoDemSum = 0.0; System.out.println("the length of chronoAdj is " + chronoAdj.length); for (int j = 0; j < (chronoAdj.length - 1); j++) { // System.out.println("j is: "+j + "mean is "+ meanAdj + "chronoadj is "+chronoAdj[j] ); autoNumSum = autoNumSum + (chronoAdj[j] - meanAdj) * (chronoAdj[j + 1] - meanAdj); } for (int j = 0; j < chronoAdj.length; j++) { autoDemSum = autoDemSum + (chronoAdj[j] - meanAdj) * (chronoAdj[j] - meanAdj); } autocorrelationAdj = autoNumSum / autoDemSum; // autocorrelationAdj=new PearsonsCorrelation().correlation(chronoAdj, chronoAdj); System.out.println("the autocorrelation of the adjustchonology is: " + autocorrelationAdj); /* * Calculate the statistical information per window of the Actual Events. load the values of the choronoActual per * window in window into a two dimensional array calculate the mean per row calculate the standard deviation per row * calculate end values of the confidence interval for 95%,99%.99.9% per row */ // Definition of the length of the window of interest. lengthOfWindow = yearsPriorToEvent + yearsAfterTheEvent + 1; // define the two dimensional array for the calculations of the Actual // Event windows stats meanByWindow = new double[lengthOfWindow]; varianceByWindow = new double[lengthOfWindow]; standardDevByWindow = new double[lengthOfWindow]; maximunByWindow = new double[lengthOfWindow]; minimunByWindow = new double[lengthOfWindow]; eventWindowsAct = new double[lengthOfWindow][]; eventWindowPattern = new int[lengthOfWindow][]; Simnumdates = new int[lengthOfWindow]; test = new ArrayList<Double>(); for (int k = 0; k < lengthOfWindow; k++) { eventWindowPattern[k] = new int[keventsinadj.size()]; int kWindow = k - yearsPriorToEvent.intValue(); for (int i = 0; i < keventsinadj.size(); i++) { if ((beginingYearAdj <= (keventsinadj.get(i).intValue() + kWindow)) && ((keventsinadj.get(i).intValue() + kWindow) <= lastYearAdj)) { test.add(chronologyActual .get(chronologyYears.indexOf(keventsinadj.get(i).intValue() + kWindow))); eventWindowPattern[k][i] = 1; } else { eventWindowPattern[k][i] = 0; } } Simnumdates[k] = test.size(); eventWindowsAct[k] = new double[test.size()]; // new line for (int ij = 0; ij < test.size(); ij++) { eventWindowsAct[k][ij] = test.get(ij).doubleValue(); } test.clear(); meanByWindow[k] = StatUtils.mean(eventWindowsAct[k]); varianceByWindow[k] = StatUtils.variance(eventWindowsAct[k]); standardDevByWindow[k] = Math.sqrt(varianceByWindow[k]); maximunByWindow[k] = StatUtils.max(eventWindowsAct[k]); minimunByWindow[k] = StatUtils.min(eventWindowsAct[k]); } // end k loop Arrays.sort(Simnumdates); temp = Simnumdates[0]; leftEndPoint = new double[lengthOfWindow][3]; rightEndPoint = new double[lengthOfWindow][3]; for (int i = 0; i < lengthOfWindow; i++) { for (int j = 0; j < 3; j++) { leftEndPoint[i][j] = meanByWindow[i] - stdDevMultiplier[j] * standardDevByWindow[i]; rightEndPoint[i][j] = meanByWindow[i] + stdDevMultiplier[j] * standardDevByWindow[i]; } } /* * calculate the percentile Marks for simulation table */ percentileMark = new int[4]; percentileMark[1] = (int) Math.max(Math.round(this.numberOfSimulations / 40.0), 1) - 1; percentileMark[3] = (int) Math.max(Math.round(this.numberOfSimulations / 200.0), 1) - 1; percentileMark[0] = this.numberOfSimulations - percentileMark[1] - 1; percentileMark[2] = this.numberOfSimulations - percentileMark[3] - 1; // System.out.println("percentailmarks "+percentileMark[0]+" , " // +percentileMark[1]+" , " + percentileMark[2]+" , " + // percentileMark[3]); // start the simulations: by selecting events.size() number of random // years Random myrand = new Random(); myrand.setSeed(seedNumber); double[][] meanByWindowSim = new double[lengthOfWindow][this.numberOfSimulations]; int[] eventYearSimulation = new int[keventsinadj.size()];// changed // keventsinadj.size() // by temp double[][] eventWindowsSims = new double[lengthOfWindow][]; simulationtest = new ArrayList<Double>(); /* * Simulation Start */ System.out .println("Before Simulation Time " + (System.currentTimeMillis() - begintime) / 1000F); for (int ii = 0; ii < this.numberOfSimulations; ii++) { for (int i = 0; i < keventsinadj.size(); i++) { // Here add the two if statement for include and exclude so the // range of the selection of years if (includeIncompleteEpochs) { eventYearSimulation[i] = (beginingYearAdj + keventsinadjyeprior.get(i).intValue()) + myrand.nextInt((lastYearAdj - keventsinadjyeafter.get(i).intValue()) - (beginingYearAdj + keventsinadjyeprior.get(i).intValue()) + 1); } if (!includeIncompleteEpochs) { eventYearSimulation[i] = (beginingYearAdj + 6) + myrand.nextInt((lastYearAdj - 4) - (beginingYearAdj + 6) + 1); } } // end i loop Arrays.sort(eventYearSimulation); // System.out.println("after selection of key events in sim " + ii + " time " + (System.currentTimeMillis() - // start) / 1000F); /* * Once the events have been simulated build the two sised matrix (lengthOfWindow) by events.size() */ for (int k = 0; k < lengthOfWindow; k++) { eventWindowsSims[k] = new double[keventsinadj.size()];// new line int kWindow = k - yearsPriorToEvent.intValue(); for (int i = 0; i < keventsinadj.size(); i++) { if (eventWindowPattern[k][i] == 1) { simulationtest.add(chronologyActual .get(chronologyYears.indexOf(eventYearSimulation[i] + kWindow))); } } // i loop eventWindowsSims[k] = new double[simulationtest.size()]; // new // line for (int ij = 0; ij < simulationtest.size(); ij++) { eventWindowsSims[k][ij] = simulationtest.get(ij).doubleValue(); } // edn ij loop simulationtest.clear(); meanByWindowSim[k][ii] = StatUtils.mean(eventWindowsSims[k]); } // end k loop numberofsimulation loop } // end simulatrion loop System.out.println("I am done with simulation"); // calculate the mean of the means double sum = 0.0; meanMeanByWindow = new double[lengthOfWindow]; varianceMeanByWindow = new double[lengthOfWindow]; standardDevMeanByWindow = new double[lengthOfWindow]; maxMeanByWindow = new double[lengthOfWindow]; minMeanByWindow = new double[lengthOfWindow]; double[] tempMeanMean = new double[this.numberOfSimulations]; leftEndPointPer = new double[lengthOfWindow][2]; rightEndPointPer = new double[lengthOfWindow][2]; for (int i = 0; i < lengthOfWindow; i++) { // int kWindow = i - yearsPriorToEvent.intValue(); for (int k = 0; k < this.numberOfSimulations; k++) { // for(int k=0;k < (Integer)numberOfSimulations.intValue();k++){ if (k < 1) { // /eSystem.out.println("on the " +i+","+k+" the value is " + // meanByWindowSim[i][k]); } ; tempMeanMean[k] = meanByWindowSim[i][k]; sum = sum + tempMeanMean[k]; // System.out.println("tempMeanMean is " + tempMeanMean[k]); } meanMeanByWindow[i] = StatUtils.mean(tempMeanMean); varianceMeanByWindow[i] = StatUtils.variance(tempMeanMean); standardDevMeanByWindow[i] = Math.sqrt(varianceMeanByWindow[i]); Arrays.sort(tempMeanMean); maxMeanByWindow[i] = StatUtils.max(tempMeanMean); minMeanByWindow[i] = StatUtils.min(tempMeanMean); leftEndPointPer[i][0] = tempMeanMean[percentileMark[1]]; rightEndPointPer[i][0] = tempMeanMean[percentileMark[0]]; leftEndPointPer[i][1] = tempMeanMean[percentileMark[3]]; rightEndPointPer[i][1] = tempMeanMean[percentileMark[2]]; // System.out.println("[ "+ // Math.round(leftEndPoint[i][j]*1000.0)/1000.0 + " , " + // Math.round(rightEndPoint[i][j]*1000.0)/1000.0+"]"); // System.out.println("meanMeanByWindow is " + meanMeanByWindow[i]); if (i < 1) { // /eSystem.out.println("the window "+i+" has mean: " + // Math.round(meanMeanByWindow[i]*1000.0)/1000.0); } ; // System.out.println("the window "+i+" has variance: " + // Math.round(varianceMeanByWindow[i]*1000.0)/1000.0); // System.out.println("the window "+i+" has standard dev: " + // Math.round(standardDevMeanByWindow[i]*1000.0)/1000.0); } ;// end of i loop // }//end of ikj loop // Calculate the confidence interval for 95%,99%,99.9% leftEndPointSim = new double[lengthOfWindow][3]; rightEndPointSim = new double[lengthOfWindow][3]; for (int i = 0; i < lengthOfWindow; i++) { for (int j = 0; j < 3; j++) { leftEndPointSim[i][j] = meanMeanByWindow[i] - stdDevMultiplier[j] * standardDevMeanByWindow[i]; rightEndPointSim[i][j] = meanMeanByWindow[i] + stdDevMultiplier[j] * standardDevMeanByWindow[i]; // System.out.println("[ "+ // Math.round(leftEndPoint[i][j]*1000.0)/1000.0 + " , " + // Math.round(rightEndPoint[i][j]*1000.0)/1000.0+"]"); } } // }//end of ikj loop /* * detecting which p-level was selected in gui */ if (alphaLevel95) { alphaLevel = 0; } else if (alphaLevel99) { alphaLevel = 1; } else { alphaLevel = 2; } /* * adding the chart and the creation on the buffer here */ // BarChartParametersModel m = new BarChartParametersModel(titleForRun, meanByWindow, lengthOfWindow, yearsPriorToEvent, // yearsAfterTheEvent, leftEndPointSim, rightEndPointSim, outputFilePrefix, alphaLevel, segmentIndex, // firstYearsArray.size(), firstYearsArray.get(segmentIndex), lastYearsArray.get(segmentIndex)); BarChartParametersModel m = new BarChartParametersModel(titleForRun, meanByWindow, lengthOfWindow, yearsPriorToEvent, yearsAfterTheEvent, leftEndPointSim, rightEndPointSim, outputFilePrefix, alphaLevel, segmentIndex, firstYearsArray.size(), beginingYearAdj, lastYearAdj); // m.setChart(new JSEABarChart(m).getChart()); this.chartList.add(m); /* * try { // ChartUtilities.saveChartAsJPEG(new File(outputFilePrefix+"chart"+ikj+ ".jpg"), chart, 500, 300); * ChartUtilities.saveChartAsJPEG(new File(outputFilePrefix+"chart.jpg"), chart, 500, 300); } catch (IOException ex) { * System.err.println(ex.getLocalizedMessage()); } */ // Date now = new Date(); // System.out.println("the date today is: " + now); // adding the cdbuffer stuff log.debug("the value of the beginingyear of the adj crono is " + beginingYearAdj); String delim = ","; cdbuffer = cdbuffer + "Range:" + "\n"; cdbuffer = cdbuffer + beginingYearAdj + delim + lastYearAdj + "\n"; cdbuffer = cdbuffer + "Lags" + delim + "Events Mean" + delim + "95% CONF INT" + delim + "95% CONF INT" + delim + "99% CONF INT" + delim + "99% CONF INT" + delim + "99.9% CONF INT" + delim + "99.9% CONF INT" + delim + "\n"; for (int i = 0; i < lengthOfWindow; i++) { cdbuffer = cdbuffer + (i - yearsPriorToEvent.intValue()) + delim + threePlacess.format(meanByWindow[i]) + delim + threePlacess.format(leftEndPointSim[i][0]) + delim + threePlacess.format(rightEndPointSim[i][0]) + delim + threePlacess.format(leftEndPointSim[i][1]) + "," + threePlacess.format(rightEndPointSim[i][1]) + delim + threePlacess.format(leftEndPointSim[i][2]) + delim + threePlacess.format(rightEndPointSim[i][2]) + "\n"; } // adding the bigbuffer and pdfbufferpar1 stuff // Paragraph pdfbufferpar11 = new Paragraph( ); report = report + "\n"; report = report + "SUPERPOSED EPOCH ANALYSIS RESULTS" + "\n"; report = report + "Date: " + now + "\n"; report = report + "Name of the time series file: " + chronologyFile; pdfbufferpar1 = pdfbufferpar1 + "\n"; pdfbufferpar1 = pdfbufferpar1 + "SUPERPOSED EPOCH ANALYSIS RESULTS" + "\n"; pdfbufferpar1 = pdfbufferpar1 + "Date: " + now + "\n"; pdfbufferpar1 = pdfbufferpar1 + "Name of the time series file: " + chronologyFile; if (firstYearOfProcess.intValue() > chronologyYears.get(0).intValue()) { report = report + "\n" + "First Year= " + firstYearOfProcess; pdfbufferpar1 = pdfbufferpar1 + "\n" + "First Year= " + firstYearOfProcess; } else { report = report + "\n" + "First Year= " + chronologyYears.get(0); pdfbufferpar1 = pdfbufferpar1 + "\n" + "First Year= " + chronologyYears.get(0); } if (lastYearOfProcess.intValue() < chronologyYears.get(chronologyYears.size() - 1).intValue()) { report = report + "\n" + "Last Year= " + lastYearOfProcess; pdfbufferpar1 = pdfbufferpar1 + "\n" + "Last Year= " + lastYearOfProcess; } else { report = report + "\n" + "Last Year= " + chronologyYears.get(chronologyYears.size() - 1); pdfbufferpar1 = pdfbufferpar1 + "\n" + "Last Year= " + chronologyYears.get(chronologyYears.size() - 1); } /* * Display the general statistical information on the Adjusted time series data. */ report = report + "\n" + "DESCRIPTIVE STATISTICS INFORMATION ABOUT THE ADJUSTED CONTINUOUS TIME SERIES: " + "\n" + "\n"; report = report + "\t" + "The adjusted time series RANGES from " + beginingYearAdj + " to " + lastYearAdj + "\n"; report = report + "\t" + "The NUMBER OF YEARS in the adjusted time series is " + chronoAdj.length + "\n"; report = report + "\t" + "MEAN of the adjusted time series is " + threePlacess.format(meanAdj) + "\n"; report = report + "\t" + "MEDIAN of the adjusted time series is " + threePlacess.format(medianAdj) + "\n"; report = report + "\t" + "MEAN SENSITIVITY for the adjusted time series is " + threePlacess.format(meanSensitivityAdj) + "\n"; report = report + "\t" + "STANDARD DEVIATION of the adjusted time series is " + threePlacess.format(stdAdj) + "\n"; report = report + "\t" + "SKEWNESS of the adjusted time series is " + threePlacess.format(skewAdj) + "\n"; report = report + "\t" + "KURTOSIS of the adjusted time series is " + threePlacess.format(kurtAdj) + "\n"; report = report + "\t" + "First Order AUTOCORRELATION Index of the adjusted time series is " + threePlacess.format(autocorrelationAdj) + "\n"; /* * save the general statistical information on the Adjusted time series data in pdf fie. */ pdfbufferpar1 = pdfbufferpar1 + "\n" + "DESCRIPTIVE STATISTICS INFORMATION ABOUT THE ADJUSTED CONTINUOUS TIME SERIES: " + "\n" + "\n"; pdfbufferpar1 = pdfbufferpar1 + "\t" + "The adjusted time series RANGES from " + beginingYearAdj + " to " + lastYearAdj + "\n"; pdfbufferpar1 = pdfbufferpar1 + "\t" + "The NUMBER OF YEARS in the adjusted time series is " + chronoAdj.length + "\n"; pdfbufferpar1 = pdfbufferpar1 + "\t" + "MEAN of the adjusted time series is " + threePlacess.format(meanAdj) + "\n"; pdfbufferpar1 = pdfbufferpar1 + "\t" + "MEDIAN of the adjusted time series is " + threePlacess.format(medianAdj) + "\n"; pdfbufferpar1 = pdfbufferpar1 + "\t" + "MEAN SENSITIVITY for the adjusted time series is " + threePlacess.format(meanSensitivityAdj) + "\n"; pdfbufferpar1 = pdfbufferpar1 + "\t" + "STANDARD DEVIATION of the adjusted time series is " + threePlacess.format(stdAdj) + "\n"; pdfbufferpar1 = pdfbufferpar1 + "\t" + "SKEWNESS of the adjusted time series is " + threePlacess.format(skewAdj) + "\n"; pdfbufferpar1 = pdfbufferpar1 + "\t" + "KURTOSIS of the adjusted time series is " + threePlacess.format(kurtAdj) + "\n"; pdfbufferpar1 = pdfbufferpar1 + "\t" + "First Order AUTOCORRELATION Index of the adjusted time series is " + threePlacess.format(autocorrelationAdj) + "\n"; /* * Display the general information on the Actual Event list. */ report = report + "\n" + "THE INFORMATION ON THE ACTUAL KEY EVENTS IS" + "\n" + "\n"; report = report + "\t" + "Number of key events: " + keyEvents.length + "\n"; report = report + "\t" + "Number of key events used in analysis: " + numberOfEventsinAdj + "\n"; report = report + "\t" + "Mean years between events is " + threePlacess.format(meanDiffBetweenEvents) + "\n"; report = report + "\t" + "Minimum difference is " + StatUtils.min(diffBetweenEvents) + "\n"; /* * write the general information on the Actual Event list to pdf file. */ pdfbufferpar1 = pdfbufferpar1 + "\n" + "THE INFORMATION ON THE ACTUAL KEY EVENTS IS" + "\n" + "\n"; pdfbufferpar1 = pdfbufferpar1 + "\t" + "Number of key events: " + keyEvents.length + "\n"; pdfbufferpar1 = pdfbufferpar1 + "\t" + "Number of key events used in analysis: " + numberOfEventsinAdj + "\n"; pdfbufferpar1 = pdfbufferpar1 + "\t" + "Mean years between events is " + threePlacess.format(meanDiffBetweenEvents) + "\n"; pdfbufferpar1 = pdfbufferpar1 + "\t" + "Minimum difference is " + StatUtils.min(diffBetweenEvents) + "\n"; pdfbufferpar11.add(pdfbufferpar1); para1.add(pdfbufferpar11); printTableActFlag.add(true); /* * Write out everything that goes into the actualTable. */ PdfPTable tableAct = new PdfPTable(7); if (isFirstIteration) { String tempStrA = ""; if (alphaLevel95) { tempStrA = String.format( "\t %-12s" + "\t %-8s" + "\t %-8s" + "\t %-8s" + "\t %-20s" + "\t %-8s" + "\t %-8s", " ADJ SEG ", " LAGS ", " MEAN ", "STA DEV", " 95% CONF INT ", " MIN ", " MAX "); } else if (alphaLevel99) { tempStrA = String.format( "\t %-12s" + "\t %-8s" + "\t %-8s" + "\t %-8s" + "\t %-20s" + "\t %-8s" + "\t %-8s", " ADJ SEG ", " LAGS ", " MEAN ", "STA DEV", " 99% CONF INT ", " MIN ", " MAX "); } else if (alphaLevel999) { tempStrA = String.format( "\t %-12s" + "\t %-8s" + "\t %-8s" + "\t %-8s" + "\t %-20s" + "\t %-8s" + "\t %-8s", " ADJ SEG ", " LAGS ", " MEAN ", "STA DEV", " 99.9% CONF INT ", " MIN ", " MAX "); } report = report + tempStrA + "\n"; actualTable = actualTable + tempStrA.substring(1) + "\n"; PdfPCell cell00A = new PdfPCell(new Paragraph(" ADJ SEG ")); tableAct.addCell(cell00A); PdfPCell cell01A = new PdfPCell(new Paragraph(" LAGS ")); tableAct.addCell(cell01A); PdfPCell cell02A = new PdfPCell(new Paragraph(" MEAN ")); tableAct.addCell(cell02A); PdfPCell cell03A = new PdfPCell(new Paragraph(" STA DEV ")); tableAct.addCell(cell03A); if (alphaLevel95) { PdfPCell cell04A = new PdfPCell(new Paragraph(" 95% CONF INT ")); tableAct.addCell(cell04A); } else if (alphaLevel99) { PdfPCell cell04A = new PdfPCell(new Paragraph(" 99% CONF INT ")); tableAct.addCell(cell04A); } else if (alphaLevel999) { PdfPCell cell04A = new PdfPCell(new Paragraph(" 99.9% CONF INT ")); tableAct.addCell(cell04A); } PdfPCell cell05A = new PdfPCell(new Paragraph(" MIN ")); tableAct.addCell(cell05A); PdfPCell cell06A = new PdfPCell(new Paragraph(" MAX ")); tableAct.addCell(cell06A); } for (int i = 0; i < lengthOfWindow; i++) { if (alphaLevel95) { pdfbufferA = String.format( "\t %-12s" + "\t %-8s" + "\t %-8s" + "\t %-8s" + "\t %-20s" + "\t %-8s" + "\t %-8s", // (firstYearsArray.get(segmentIndex) + " - " + lastYearsArray.get(segmentIndex)), beginingYearAdj + " - " + lastYearAdj, (i - yearsPriorToEvent.intValue()), threePlacess.format(meanByWindow[i]), threePlacess.format(standardDevByWindow[i]), "[" + threePlacess.format(leftEndPoint[i][0]) + "," + threePlacess.format(rightEndPoint[i][0]) + "]", threePlacess.format(minimunByWindow[i]), threePlacess.format(maximunByWindow[i])); } else if (alphaLevel99) { pdfbufferA = String.format( "\t %-12s" + "\t %-8s" + "\t %-8s" + "\t %-8s" + "\t %-20s" + "\t %-8s" + "\t %-8s", // (firstYearsArray.get(segmentIndex) + " - " + lastYearsArray.get(segmentIndex)), beginingYearAdj + " - " + lastYearAdj, (i - yearsPriorToEvent.intValue()), threePlacess.format(meanByWindow[i]), threePlacess.format(standardDevByWindow[i]), "[" + threePlacess.format(leftEndPoint[i][1]) + "," + threePlacess.format(rightEndPoint[i][1]) + "]", threePlacess.format(minimunByWindow[i]), threePlacess.format(maximunByWindow[i])); } else if (alphaLevel999) { pdfbufferA = String.format( "\t %-12s" + "\t %-8s" + "\t %-8s" + "\t %-8s" + "\t %-20s" + "\t %-8s" + "\t %-8s", // (firstYearsArray.get(segmentIndex) + " - " + lastYearsArray.get(segmentIndex)), beginingYearAdj + " - " + lastYearAdj, (i - yearsPriorToEvent.intValue()), threePlacess.format(meanByWindow[i]), threePlacess.format(standardDevByWindow[i]), "[" + threePlacess.format(leftEndPoint[i][2]) + "," + threePlacess.format(rightEndPoint[i][2]) + "]", threePlacess.format(minimunByWindow[i]), threePlacess.format(maximunByWindow[i])); } report = report + pdfbufferA + "\n"; actualTable = actualTable + pdfbufferA.substring(1) + "\n"; PdfPCell cell00A = new PdfPCell(new Paragraph( firstYearsArray.get(segmentIndex) + " - " + lastYearsArray.get(segmentIndex))); tableAct.addCell(cell00A); PdfPCell cell01A = new PdfPCell(new Paragraph((i - yearsPriorToEvent.intValue()))); tableAct.addCell(cell01A); PdfPCell cell02A = new PdfPCell(new Paragraph(threePlacess.format(meanByWindow[i]))); tableAct.addCell(cell02A); PdfPCell cell03A = new PdfPCell(new Paragraph(threePlacess.format(standardDevByWindow[i]))); tableAct.addCell(cell03A); if (alphaLevel95) { PdfPCell cell04A = new PdfPCell( new Paragraph("[" + threePlacess.format(leftEndPoint[i][0]) + "," + threePlacess.format(rightEndPoint[i][0]) + "]")); tableAct.addCell(cell04A); } else if (alphaLevel99) { PdfPCell cell04A = new PdfPCell( new Paragraph("[" + threePlacess.format(leftEndPoint[i][1]) + "," + threePlacess.format(rightEndPoint[i][1]) + "]")); tableAct.addCell(cell04A); } else if (alphaLevel999) { PdfPCell cell04A = new PdfPCell( new Paragraph("[" + threePlacess.format(leftEndPoint[i][2]) + "," + threePlacess.format(rightEndPoint[i][2]) + "]")); tableAct.addCell(cell04A); } PdfPCell cell05A = new PdfPCell(new Paragraph(threePlacess.format(minimunByWindow[i]))); tableAct.addCell(cell05A); PdfPCell cell06A = new PdfPCell(new Paragraph(threePlacess.format(maximunByWindow[i]))); tableAct.addCell(cell06A); } printTableAct.add(tableAct); /* * Display the general information on the Simulations. (Normality is assumed) */ report = report + "\n" + "SIMULATIONS RESULTS: " + "\n" + "\n"; report = report + "\t" + "NUMBER OF SIMULATIONS is: " + this.numberOfSimulations + "\n"; report = report + "\t" + "RANDOM SEED: " + seedNumber + "\n"; /* * Save the general information on the Simulations. (Normality is assumed) for the pdf file */ pdfbufferpar2 = pdfbufferpar2 + "\n" + "SIMULATIONS RESULTS: " + "\n" + "\n"; pdfbufferpar2 = pdfbufferpar2 + "\t" + "NUMBER OF SIMULATIONS is: " + numberOfSimulations + "\n"; pdfbufferpar2 = pdfbufferpar2 + "\t" + "RANDOM SEED: " + seedNumber + "\n"; pdfbufferpar12.add(pdfbufferpar2); para2.add(pdfbufferpar12); /* * Write out everything that goes into the simulationTable. */ PdfPTable tableSim = new PdfPTable(7); if (isFirstIteration) { String tempStrB = ""; if (alphaLevel95) { tempStrB = String.format( "\t %-12s" + "\t %-8s" + "\t %-8s" + "\t %-8s" + "\t %-20s" + "\t %-8s" + "\t %-8s", " ADJ SEG ", " LAGS ", " MEAN ", "STA DEV", " 95% CONF INT ", " MIN ", " MAX "); } else if (alphaLevel99) { tempStrB = String.format( "\t %-12s" + "\t %-8s" + "\t %-8s" + "\t %-8s" + "\t %-20s" + "\t %-8s" + "\t %-8s", " ADJ SEG ", " LAGS ", " MEAN ", "STA DEV", " 99% CONF INT ", " MIN ", " MAX "); } else if (alphaLevel999) { tempStrB = String.format( "\t %-12s" + "\t %-8s" + "\t %-8s" + "\t %-8s" + "\t %-20s" + "\t %-8s" + "\t %-8s", " ADJ SEG ", " LAGS ", " MEAN ", "STA DEV", " 99.9% CONF INT ", " MIN ", " MAX "); } report = report + tempStrB + "\n"; simulationTable = simulationTable + tempStrB.substring(1) + "\n"; PdfPCell cell00B = new PdfPCell(new Paragraph(" ADJ SEG ")); tableSim.addCell(cell00B); PdfPCell cell01B = new PdfPCell(new Paragraph(" LAGS ")); tableSim.addCell(cell01B); PdfPCell cell02B = new PdfPCell(new Paragraph(" MEAN ")); tableSim.addCell(cell02B); PdfPCell cell03B = new PdfPCell(new Paragraph(" STA DEV ")); tableSim.addCell(cell03B); if (alphaLevel95) { PdfPCell cell04B = new PdfPCell(new Paragraph(" 95% CONF INT ")); tableSim.addCell(cell04B); } else if (alphaLevel99) { PdfPCell cell04B = new PdfPCell(new Paragraph(" 99% CONF INT ")); tableSim.addCell(cell04B); } else if (alphaLevel999) { PdfPCell cell04B = new PdfPCell(new Paragraph(" 99.9% CONF INT ")); tableSim.addCell(cell04B); } PdfPCell cell05B = new PdfPCell(new Paragraph(" MIN ")); tableSim.addCell(cell05B); PdfPCell cell06B = new PdfPCell(new Paragraph(" MAX ")); tableSim.addCell(cell06B); isFirstIteration = false; } for (int i = 0; i < lengthOfWindow; i++) { if (alphaLevel95) { pdfbufferB = String.format( "\t %-12s" + "\t %-8s" + "\t %-8s" + "\t %-8s" + "\t %-20s" + "\t %-8s" + "\t %-8s", // (firstYearsArray.get(segmentIndex) + " - " + lastYearsArray.get(segmentIndex)), beginingYearAdj + " - " + lastYearAdj, (i - yearsPriorToEvent.intValue()), threePlacess.format(meanMeanByWindow[i]), threePlacess.format(standardDevMeanByWindow[i]), "[" + threePlacess.format(leftEndPointSim[i][0]) + "," + threePlacess.format(rightEndPointSim[i][0]) + "]", threePlacess.format(minMeanByWindow[i]), threePlacess.format(maxMeanByWindow[i])); } else if (alphaLevel99) { pdfbufferB = String.format( "\t %-12s" + "\t %-8s" + "\t %-8s" + "\t %-8s" + "\t %-20s" + "\t %-8s" + "\t %-8s", // (firstYearsArray.get(segmentIndex) + " - " + lastYearsArray.get(segmentIndex)), beginingYearAdj + " - " + lastYearAdj, (i - yearsPriorToEvent.intValue()), threePlacess.format(meanMeanByWindow[i]), threePlacess.format(standardDevMeanByWindow[i]), "[" + threePlacess.format(leftEndPointSim[i][1]) + "," + threePlacess.format(rightEndPointSim[i][1]) + "]", threePlacess.format(minMeanByWindow[i]), threePlacess.format(maxMeanByWindow[i])); } else if (alphaLevel999) { pdfbufferB = String.format( "\t %-12s" + "\t %-8s" + "\t %-8s" + "\t %-8s" + "\t %-20s" + "\t %-8s" + "\t %-8s", // (firstYearsArray.get(segmentIndex) + " - " + lastYearsArray.get(segmentIndex)), beginingYearAdj + " - " + lastYearAdj, (i - yearsPriorToEvent.intValue()), threePlacess.format(meanMeanByWindow[i]), threePlacess.format(standardDevMeanByWindow[i]), "[" + threePlacess.format(leftEndPointSim[i][2]) + "," + threePlacess.format(rightEndPointSim[i][2]) + "]", threePlacess.format(minMeanByWindow[i]), threePlacess.format(maxMeanByWindow[i])); } report = report + pdfbufferB + "\n"; simulationTable = simulationTable + pdfbufferB.substring(1) + "\n"; PdfPCell cell00B = new PdfPCell(new Paragraph( firstYearsArray.get(segmentIndex) + " - " + lastYearsArray.get(segmentIndex))); tableSim.addCell(cell00B); PdfPCell cell01B = new PdfPCell(new Paragraph((i - yearsPriorToEvent.intValue()))); tableSim.addCell(cell01B); PdfPCell cell02B = new PdfPCell(new Paragraph(threePlacess.format(meanMeanByWindow[i]))); tableSim.addCell(cell02B); PdfPCell cell03B = new PdfPCell( new Paragraph(threePlacess.format(standardDevMeanByWindow[i]))); tableSim.addCell(cell03B); if (alphaLevel95) { PdfPCell cell04B = new PdfPCell( new Paragraph("[" + threePlacess.format(leftEndPointSim[i][0]) + "," + threePlacess.format(rightEndPointSim[i][0]) + "]")); tableSim.addCell(cell04B); // PdfPCell cell05B = new PdfPCell(new Paragraph("[" + threePlacess.format(leftEndPointPer[i][0]) + "," // + threePlacess.format(rightEndPointPer[i][0]) + "]")); // tableSim.addCell(cell05B); } else if (alphaLevel99) { PdfPCell cell04B = new PdfPCell( new Paragraph("[" + threePlacess.format(leftEndPointSim[i][1]) + "," + threePlacess.format(rightEndPointSim[i][1]) + "]")); tableSim.addCell(cell04B); // PdfPCell cell05B = new PdfPCell(new Paragraph("[" + threePlacess.format(leftEndPointPer[i][0]) + "," // + threePlacess.format(rightEndPointPer[i][0]) + "]")); // tableSim.addCell(cell05B); } else if (alphaLevel999) { PdfPCell cell04B = new PdfPCell( new Paragraph("[" + threePlacess.format(leftEndPointSim[i][2]) + "," + threePlacess.format(rightEndPointSim[i][2]) + "]")); tableSim.addCell(cell04B); // PdfPCell cell05B = new PdfPCell(new Paragraph("[" + threePlacess.format(leftEndPointPer[i][0]) + "," // + threePlacess.format(rightEndPointPer[i][0]) + "]")); // tableSim.addCell(cell05B); } PdfPCell cell06B = new PdfPCell(new Paragraph(threePlacess.format(minMeanByWindow[i]))); tableSim.addCell(cell06B); PdfPCell cell07B = new PdfPCell(new Paragraph(threePlacess.format(maxMeanByWindow[i]))); tableSim.addCell(cell07B); } printTableSim.add(tableSim); } // end of if keventsinadj >=2 else { cdbuffer = cdbuffer + "Range:" + "\n"; cdbuffer = cdbuffer + beginingYearAdj + "," + lastYearAdj + "\n"; cdbuffer = cdbuffer + "Segment: " + (segmentIndex + 1) + "has not enough events to run the analysis" + "\n"; // ADDED SO THAT BAD SEGMENTS CANNOT BE SELECTED FOR DISPLAY ON THE CHART segmentTable.tableModel.getSegment(segmentIndex).setBadSegmentFlag(true); printTableActFlag.add(false); pdfbufferpar1 = pdfbufferpar1 + "\n"; pdfbufferpar1 = pdfbufferpar1 + "SUPERPOSED EPOCH ANALYSIS RESULTS" + "\n"; pdfbufferpar1 = pdfbufferpar1 + "Date: " + now + "\n"; pdfbufferpar1 = pdfbufferpar1 + "Name of the time series file: " + chronologyFile + "\n"; if (firstYearOfProcess.intValue() > chronologyYears.get(0).intValue()) { report = report + "\n" + "The First year processed: " + firstYearOfProcess + "\n"; pdfbufferpar1 = pdfbufferpar1 + "\n" + "The First year processed: " + firstYearOfProcess + "\n"; } else { report = report + "\n" + "The First year processed " + chronologyYears.get(0) + "\n"; pdfbufferpar1 = pdfbufferpar1 + "\n" + "The First year processed " + chronologyYears.get(0) + "\n"; } if (lastYearOfProcess.intValue() < chronologyYears.get(chronologyYears.size() - 1).intValue()) { report = report + "\n" + "The last year of the process is " + lastYearOfProcess + "\n"; pdfbufferpar1 = pdfbufferpar1 + "\n" + "The last year of the process is " + lastYearOfProcess + "\n"; } else { report = report + "\n" + "The last year of the process is " + chronologyYears.get(chronologyYears.size() - 1) + "\n"; pdfbufferpar1 = pdfbufferpar1 + "\n" + "The last year of the process is " + chronologyYears.get(chronologyYears.size() - 1) + "\n"; } report = report + "Not enough events within the window in the time series (or segment of the time series) to proceed with the analysis " + keventsinadj.size() + "\n"; pdfbufferpar1 = pdfbufferpar1 + "Not enough events within the window in the time series (or segment of the time series) to proceed with the analysis " + keventsinadj.size() + "\n"; } ;// end of else for if keventsinadd >=2 } // end of if kevents >=2 else { cdbuffer = cdbuffer + "Range:" + "\n"; cdbuffer = cdbuffer + beginingYearAdj + "," + lastYearAdj + "\n"; cdbuffer = cdbuffer + "Segement: " + (segmentIndex + 1) + "has not enough events to run the analysis" + "\n"; // ADDED SO THAT BAD SEGMENTS CANNOT BE SELECTED FOR DISPLAY ON THE CHART segmentTable.tableModel.getSegment(segmentIndex).setBadSegmentFlag(true); printTableActFlag.add(false); pdfbufferpar1 = pdfbufferpar1 + "\n"; pdfbufferpar1 = pdfbufferpar1 + "SUPERPOSED EPOCH ANALYSIS RESULTS" + "\n"; pdfbufferpar1 = pdfbufferpar1 + "Date: " + now + "\n"; pdfbufferpar1 = pdfbufferpar1 + "Name of the time series file: " + chronologyFile + "\n"; if (firstYearOfProcess.intValue() > chronologyYears.get(0).intValue()) { report = report + "\n" + "The First year processed: " + firstYearOfProcess + "\n"; pdfbufferpar1 = pdfbufferpar1 + "\n" + "The First year processed: " + firstYearOfProcess + "\n"; } else { report = report + "\n" + "The First year processed " + chronologyYears.get(0) + "\n"; pdfbufferpar1 = pdfbufferpar1 + "\n" + "The First year processed " + chronologyYears.get(0) + "\n"; } if (lastYearOfProcess.intValue() < chronologyYears.get(chronologyYears.size() - 1).intValue()) { report = report + "\n" + "The last year of the process is " + lastYearOfProcess + "\n"; pdfbufferpar1 = pdfbufferpar1 + "\n" + "The last year of the process is " + lastYearOfProcess + "\n"; } else { report = report + "\n" + "The last year of the process is " + chronologyYears.get(chronologyYears.size() - 1) + "\n"; pdfbufferpar1 = pdfbufferpar1 + "\n" + "The last year of the process is " + chronologyYears.get(chronologyYears.size() - 1) + "\n"; } report = report + "Not enough events in the time series (or segment of the time series) to proceed with the analysis " + kevents.size() + "\n"; pdfbufferpar1 = pdfbufferpar1 + "Not enough events in the time series (or segment of the time series) to proceed with the analysis " + kevents.size() + "\n"; } pdfbufferpar1 = ""; pdfbufferpar2 = ""; } ; // ending the huge loop ikj // ending of additions }
From source file:org.apache.hadoop.yarn.server.nodemanager.containermanager.localizer.TestResourceLocalizationService.java
@Test @SuppressWarnings("unchecked") public void testPublicResourceAddResourceExceptions() throws Exception { List<Path> localDirs = new ArrayList<Path>(); String[] sDirs = new String[4]; for (int i = 0; i < 4; ++i) { localDirs.add(lfs.makeQualified(new Path(basedir, i + ""))); sDirs[i] = localDirs.get(i).toString(); }/* ww w .j a va 2 s.c o m*/ conf.setStrings(YarnConfiguration.NM_LOCAL_DIRS, sDirs); conf.setBoolean(Dispatcher.DISPATCHER_EXIT_ON_ERROR_KEY, true); DrainDispatcher dispatcher = new DrainDispatcher(); EventHandler<ApplicationEvent> applicationBus = mock(EventHandler.class); dispatcher.register(ApplicationEventType.class, applicationBus); EventHandler<ContainerEvent> containerBus = mock(EventHandler.class); dispatcher.register(ContainerEventType.class, containerBus); ContainerExecutor exec = mock(ContainerExecutor.class); DeletionService delService = mock(DeletionService.class); LocalDirsHandlerService dirsHandler = new LocalDirsHandlerService(); LocalDirsHandlerService dirsHandlerSpy = spy(dirsHandler); dirsHandlerSpy.init(conf); dispatcher.init(conf); dispatcher.start(); try { ResourceLocalizationService rawService = new ResourceLocalizationService(dispatcher, exec, delService, dirsHandlerSpy, nmContext); ResourceLocalizationService spyService = spy(rawService); doReturn(mockServer).when(spyService).createServer(); doReturn(lfs).when(spyService).getLocalFileContext(isA(Configuration.class)); spyService.init(conf); spyService.start(); final String user = "user0"; final String userFolder = "user0Folder"; // init application final Application app = mock(Application.class); final ApplicationId appId = BuilderUtils.newApplicationId(314159265358979L, 3); when(app.getUser()).thenReturn(user); when(app.getUserFolder()).thenReturn(userFolder); when(app.getAppId()).thenReturn(appId); spyService.handle( new ApplicationLocalizationEvent(LocalizationEventType.INIT_APPLICATION_RESOURCES, app)); dispatcher.await(); // init resources Random r = new Random(); r.setSeed(r.nextLong()); // Queue localization request for the public resource final LocalResource pubResource = getPublicMockedResource(r); final LocalResourceRequest pubReq = new LocalResourceRequest(pubResource); Map<LocalResourceVisibility, Collection<LocalResourceRequest>> req = new HashMap<LocalResourceVisibility, Collection<LocalResourceRequest>>(); req.put(LocalResourceVisibility.PUBLIC, Collections.singletonList(pubReq)); // init container. final Container c = getMockContainer(appId, 42, user, userFolder); // first test ioexception Mockito.doThrow(new IOException()).when(dirsHandlerSpy).getLocalPathForWrite(isA(String.class), Mockito.anyLong(), Mockito.anyBoolean()); // send request spyService.handle(new ContainerLocalizationRequestEvent(c, req)); dispatcher.await(); LocalResourcesTracker tracker = spyService.getLocalResourcesTracker(LocalResourceVisibility.PUBLIC, user, appId); Assert.assertNull(tracker.getLocalizedResource(pubReq)); // test IllegalArgumentException String name = Long.toHexString(r.nextLong()); URL url = getPath("/local/PRIVATE/" + name + "/"); final LocalResource rsrc = BuilderUtils.newLocalResource(url, LocalResourceType.FILE, LocalResourceVisibility.PUBLIC, r.nextInt(1024) + 1024L, r.nextInt(1024) + 2048L, false); final LocalResourceRequest pubReq1 = new LocalResourceRequest(rsrc); Map<LocalResourceVisibility, Collection<LocalResourceRequest>> req1 = new HashMap<LocalResourceVisibility, Collection<LocalResourceRequest>>(); req1.put(LocalResourceVisibility.PUBLIC, Collections.singletonList(pubReq1)); Mockito.doCallRealMethod().when(dirsHandlerSpy).getLocalPathForWrite(isA(String.class), Mockito.anyLong(), Mockito.anyBoolean()); // send request spyService.handle(new ContainerLocalizationRequestEvent(c, req1)); dispatcher.await(); tracker = spyService.getLocalResourcesTracker(LocalResourceVisibility.PUBLIC, user, appId); Assert.assertNull(tracker.getLocalizedResource(pubReq)); // test RejectedExecutionException by shutting down the thread pool PublicLocalizer publicLocalizer = spyService.getPublicLocalizer(); publicLocalizer.threadPool.shutdown(); spyService.handle(new ContainerLocalizationRequestEvent(c, req)); dispatcher.await(); tracker = spyService.getLocalResourcesTracker(LocalResourceVisibility.PUBLIC, user, appId); Assert.assertNull(tracker.getLocalizedResource(pubReq)); } finally { // if we call stop with events in the queue, an InterruptedException gets // thrown resulting in the dispatcher thread causing a system exit dispatcher.await(); dispatcher.stop(); } }
From source file:org.apache.hadoop.yarn.server.nodemanager.containermanager.localizer.TestResourceLocalizationService.java
@Test(timeout = 10000) @SuppressWarnings("unchecked") // mocked generics public void testLocalizerRunnerException() throws Exception { DrainDispatcher dispatcher = new DrainDispatcher(); dispatcher.init(conf);/*w ww . ja v a 2s. co m*/ dispatcher.start(); EventHandler<ApplicationEvent> applicationBus = mock(EventHandler.class); dispatcher.register(ApplicationEventType.class, applicationBus); EventHandler<ContainerEvent> containerBus = mock(EventHandler.class); dispatcher.register(ContainerEventType.class, containerBus); ContainerExecutor exec = mock(ContainerExecutor.class); LocalDirsHandlerService dirsHandler = new LocalDirsHandlerService(); LocalDirsHandlerService dirsHandlerSpy = spy(dirsHandler); dirsHandlerSpy.init(conf); DeletionService delServiceReal = new DeletionService(exec); DeletionService delService = spy(delServiceReal); delService.init(new Configuration()); delService.start(); ResourceLocalizationService rawService = new ResourceLocalizationService(dispatcher, exec, delService, dirsHandlerSpy, nmContext); ResourceLocalizationService spyService = spy(rawService); doReturn(mockServer).when(spyService).createServer(); try { spyService.init(conf); spyService.start(); // init application final Application app = mock(Application.class); final ApplicationId appId = BuilderUtils.newApplicationId(314159265358979L, 3); when(app.getUser()).thenReturn("user0"); when(app.getAppId()).thenReturn(appId); spyService.handle( new ApplicationLocalizationEvent(LocalizationEventType.INIT_APPLICATION_RESOURCES, app)); dispatcher.await(); Random r = new Random(); long seed = r.nextLong(); System.out.println("SEED: " + seed); r.setSeed(seed); final Container c = getMockContainer(appId, 42, "user0", "user0Folder"); final LocalResource resource1 = getPrivateMockedResource(r); System.out.println("Here 4"); final LocalResourceRequest req1 = new LocalResourceRequest(resource1); Map<LocalResourceVisibility, Collection<LocalResourceRequest>> rsrcs = new HashMap<LocalResourceVisibility, Collection<LocalResourceRequest>>(); List<LocalResourceRequest> privateResourceList = new ArrayList<LocalResourceRequest>(); privateResourceList.add(req1); rsrcs.put(LocalResourceVisibility.PRIVATE, privateResourceList); final Constructor<?>[] constructors = FSError.class.getDeclaredConstructors(); constructors[0].setAccessible(true); FSError fsError = (FSError) constructors[0].newInstance(new IOException("Disk Error")); Mockito.doThrow(fsError).when(dirsHandlerSpy).getLocalPathForWrite(isA(String.class)); spyService.handle(new ContainerLocalizationRequestEvent(c, rsrcs)); Thread.sleep(1000); dispatcher.await(); // Verify if ContainerResourceFailedEvent is invoked on FSError verify(containerBus).handle(isA(ContainerResourceFailedEvent.class)); } finally { spyService.stop(); dispatcher.stop(); delService.stop(); } }
From source file:org.apache.hadoop.yarn.server.nodemanager.containermanager.localizer.TestResourceLocalizationService.java
@Test(timeout = 20000) @SuppressWarnings("unchecked") // mocked generics public void testFailedPublicResource() throws Exception { List<Path> localDirs = new ArrayList<Path>(); String[] sDirs = new String[4]; for (int i = 0; i < 4; ++i) { localDirs.add(lfs.makeQualified(new Path(basedir, i + ""))); sDirs[i] = localDirs.get(i).toString(); }/* ww w .j av a2s. c o m*/ conf.setStrings(YarnConfiguration.NM_LOCAL_DIRS, sDirs); DrainDispatcher dispatcher = new DrainDispatcher(); EventHandler<ApplicationEvent> applicationBus = mock(EventHandler.class); dispatcher.register(ApplicationEventType.class, applicationBus); EventHandler<ContainerEvent> containerBus = mock(EventHandler.class); dispatcher.register(ContainerEventType.class, containerBus); ContainerExecutor exec = mock(ContainerExecutor.class); DeletionService delService = mock(DeletionService.class); LocalDirsHandlerService dirsHandler = new LocalDirsHandlerService(); dirsHandler.init(conf); dispatcher.init(conf); dispatcher.start(); try { ResourceLocalizationService rawService = new ResourceLocalizationService(dispatcher, exec, delService, dirsHandler, nmContext); ResourceLocalizationService spyService = spy(rawService); doReturn(mockServer).when(spyService).createServer(); doReturn(lfs).when(spyService).getLocalFileContext(isA(Configuration.class)); spyService.init(conf); spyService.start(); final String user = "user0"; final String userFolder = "user0Folder"; // init application final Application app = mock(Application.class); final ApplicationId appId = BuilderUtils.newApplicationId(314159265358979L, 3); when(app.getUser()).thenReturn(user); when(app.getUserFolder()).thenReturn(userFolder); when(app.getAppId()).thenReturn(appId); spyService.handle( new ApplicationLocalizationEvent(LocalizationEventType.INIT_APPLICATION_RESOURCES, app)); dispatcher.await(); // init container. final Container c = getMockContainer(appId, 42, user, userFolder); // init resources Random r = new Random(); long seed = r.nextLong(); System.out.println("SEED: " + seed); r.setSeed(seed); // cause chmod to fail after a delay final CyclicBarrier barrier = new CyclicBarrier(2); doAnswer(new Answer<Void>() { public Void answer(InvocationOnMock invocation) throws IOException { try { barrier.await(); } catch (InterruptedException e) { } catch (BrokenBarrierException e) { } throw new IOException("forced failure"); } }).when(spylfs).setPermission(isA(Path.class), isA(FsPermission.class)); // Queue up two localization requests for the same public resource final LocalResource pubResource = getPublicMockedResource(r); final LocalResourceRequest pubReq = new LocalResourceRequest(pubResource); Map<LocalResourceVisibility, Collection<LocalResourceRequest>> req = new HashMap<LocalResourceVisibility, Collection<LocalResourceRequest>>(); req.put(LocalResourceVisibility.PUBLIC, Collections.singletonList(pubReq)); Set<LocalResourceRequest> pubRsrcs = new HashSet<LocalResourceRequest>(); pubRsrcs.add(pubReq); spyService.handle(new ContainerLocalizationRequestEvent(c, req)); spyService.handle(new ContainerLocalizationRequestEvent(c, req)); dispatcher.await(); // allow the chmod to fail now that both requests have been queued barrier.await(); verify(containerBus, timeout(5000).times(2)).handle(isA(ContainerResourceFailedEvent.class)); } finally { dispatcher.stop(); } }
From source file:org.apache.hadoop.yarn.server.nodemanager.containermanager.localizer.TestResourceLocalizationService.java
@Test(timeout = 20000) @SuppressWarnings("unchecked") public void testLocalizerHeartbeatWhenAppCleaningUp() throws Exception { conf.set(YarnConfiguration.NM_LOCAL_DIRS, lfs.makeQualified(new Path(basedir, 0 + "")).toString()); // Start dispatcher. DrainDispatcher dispatcher = new DrainDispatcher(); dispatcher.init(conf);/*from w w w . j a v a 2 s. c o m*/ dispatcher.start(); dispatcher.register(ApplicationEventType.class, mock(EventHandler.class)); dispatcher.register(ContainerEventType.class, mock(EventHandler.class)); DummyExecutor exec = new DummyExecutor(); LocalDirsHandlerService dirsHandler = new LocalDirsHandlerService(); dirsHandler.init(conf); // Start resource localization service. ResourceLocalizationService rawService = new ResourceLocalizationService(dispatcher, exec, mock(DeletionService.class), dirsHandler, nmContext); ResourceLocalizationService spyService = spy(rawService); doReturn(mockServer).when(spyService).createServer(); doReturn(lfs).when(spyService).getLocalFileContext(isA(Configuration.class)); try { spyService.init(conf); spyService.start(); // Init application resources. final Application app = mock(Application.class); final ApplicationId appId = BuilderUtils.newApplicationId(1234567890L, 3); when(app.getUser()).thenReturn("user0"); when(app.getUserFolder()).thenReturn("user0Folder"); when(app.getAppId()).thenReturn(appId); when(app.toString()).thenReturn(appId.toString()); spyService.handle( new ApplicationLocalizationEvent(LocalizationEventType.INIT_APPLICATION_RESOURCES, app)); dispatcher.await(); // Initialize localizer. Random r = new Random(); long seed = r.nextLong(); System.out.println("SEED: " + seed); r.setSeed(seed); final Container c = getMockContainer(appId, 46, "user0", "user0Folder"); FSDataOutputStream out = new FSDataOutputStream(new DataOutputBuffer(), null); doReturn(out).when(spylfs).createInternal(isA(Path.class), isA(EnumSet.class), isA(FsPermission.class), anyInt(), anyShort(), anyLong(), isA(Progressable.class), isA(ChecksumOpt.class), anyBoolean()); final LocalResource resource1 = getAppMockedResource(r); final LocalResource resource2 = getAppMockedResource(r); // Send localization requests for container. // 2 resources generated with APPLICATION visibility. final LocalResourceRequest req1 = new LocalResourceRequest(resource1); final LocalResourceRequest req2 = new LocalResourceRequest(resource2); Map<LocalResourceVisibility, Collection<LocalResourceRequest>> rsrcs = new HashMap<LocalResourceVisibility, Collection<LocalResourceRequest>>(); List<LocalResourceRequest> appResourceList = Arrays.asList(req1, req2); rsrcs.put(LocalResourceVisibility.APPLICATION, appResourceList); spyService.handle(new ContainerLocalizationRequestEvent(c, rsrcs)); dispatcher.await(); // Wait for localization to begin. exec.waitForLocalizers(1); final String containerIdStr = c.getContainerId().toString(); LocalizerRunner locRunnerForContainer = spyService.getLocalizerRunner(containerIdStr); // Heartbeats from container localizer LocalResourceStatus rsrcSuccess = mock(LocalResourceStatus.class); LocalizerStatus stat = mock(LocalizerStatus.class); when(stat.getLocalizerId()).thenReturn(containerIdStr); when(rsrcSuccess.getResource()).thenReturn(resource1); when(rsrcSuccess.getLocalSize()).thenReturn(4344L); when(rsrcSuccess.getLocalPath()).thenReturn(getPath("/some/path")); when(rsrcSuccess.getStatus()).thenReturn(ResourceStatusType.FETCH_SUCCESS); when(stat.getResources()).thenReturn(Collections.<LocalResourceStatus>emptyList()); // First heartbeat which schedules first resource. LocalizerHeartbeatResponse response = spyService.heartbeat(stat); assertEquals("NM should tell localizer to be LIVE in Heartbeat.", LocalizerAction.LIVE, response.getLocalizerAction()); // Cleanup application. spyService.handle(new ContainerLocalizationCleanupEvent(c, rsrcs)); spyService.handle( new ApplicationLocalizationEvent(LocalizationEventType.DESTROY_APPLICATION_RESOURCES, app)); dispatcher.await(); try { // Directly send heartbeat to introduce race as app is being cleaned up. locRunnerForContainer.processHeartbeat(Collections.singletonList(rsrcSuccess)); } catch (Exception e) { fail("Exception should not have been thrown on processing heartbeat"); } // Send another heartbeat. response = spyService.heartbeat(stat); assertEquals("NM should tell localizer to DIE in Heartbeat.", LocalizerAction.DIE, response.getLocalizerAction()); exec.setStopLocalization(); } finally { spyService.stop(); dispatcher.stop(); } }
From source file:org.apache.hadoop.yarn.server.nodemanager.containermanager.localizer.TestResourceLocalizationService.java
@Test @SuppressWarnings("unchecked") public void testPublicResourceInitializesLocalDir() throws Exception { // Setup state to simulate restart NM with existing state meaning no // directory creation during initialization NMStateStoreService spyStateStore = spy(nmContext.getNMStateStore()); when(spyStateStore.canRecover()).thenReturn(true); NMContext spyContext = spy(nmContext); when(spyContext.getNMStateStore()).thenReturn(spyStateStore); List<Path> localDirs = new ArrayList<Path>(); String[] sDirs = new String[4]; for (int i = 0; i < 4; ++i) { localDirs.add(lfs.makeQualified(new Path(basedir, i + ""))); sDirs[i] = localDirs.get(i).toString(); }// w ww . jav a 2 s .c o m conf.setStrings(YarnConfiguration.NM_LOCAL_DIRS, sDirs); DrainDispatcher dispatcher = new DrainDispatcher(); EventHandler<ApplicationEvent> applicationBus = mock(EventHandler.class); dispatcher.register(ApplicationEventType.class, applicationBus); EventHandler<ContainerEvent> containerBus = mock(EventHandler.class); dispatcher.register(ContainerEventType.class, containerBus); ContainerExecutor exec = mock(ContainerExecutor.class); DeletionService delService = mock(DeletionService.class); LocalDirsHandlerService dirsHandler = new LocalDirsHandlerService(); dirsHandler.init(conf); dispatcher.init(conf); dispatcher.start(); try { ResourceLocalizationService rawService = new ResourceLocalizationService(dispatcher, exec, delService, dirsHandler, spyContext); ResourceLocalizationService spyService = spy(rawService); doReturn(mockServer).when(spyService).createServer(); doReturn(lfs).when(spyService).getLocalFileContext(isA(Configuration.class)); spyService.init(conf); final FsPermission defaultPerm = new FsPermission((short) 0755); // verify directory is not created at initialization for (Path p : localDirs) { p = new Path((new URI(p.toString())).getPath()); Path publicCache = new Path(p, ContainerLocalizer.FILECACHE); verify(spylfs, never()).mkdir(eq(publicCache), eq(defaultPerm), eq(true)); } spyService.start(); final String user = "user0"; final String userFolder = "user0Folder"; // init application final Application app = mock(Application.class); final ApplicationId appId = BuilderUtils.newApplicationId(314159265358979L, 3); when(app.getUser()).thenReturn(user); when(app.getUserFolder()).thenReturn(userFolder); when(app.getAppId()).thenReturn(appId); spyService.handle( new ApplicationLocalizationEvent(LocalizationEventType.INIT_APPLICATION_RESOURCES, app)); dispatcher.await(); // init container. final Container c = getMockContainer(appId, 42, user, userFolder); // init resources Random r = new Random(); long seed = r.nextLong(); System.out.println("SEED: " + seed); r.setSeed(seed); // Queue up public resource localization final LocalResource pubResource1 = getPublicMockedResource(r); final LocalResourceRequest pubReq1 = new LocalResourceRequest(pubResource1); LocalResource pubResource2 = null; do { pubResource2 = getPublicMockedResource(r); } while (pubResource2 == null || pubResource2.equals(pubResource1)); // above call to make sure we don't get identical resources. final LocalResourceRequest pubReq2 = new LocalResourceRequest(pubResource2); Set<LocalResourceRequest> pubRsrcs = new HashSet<LocalResourceRequest>(); pubRsrcs.add(pubReq1); pubRsrcs.add(pubReq2); Map<LocalResourceVisibility, Collection<LocalResourceRequest>> req = new HashMap<LocalResourceVisibility, Collection<LocalResourceRequest>>(); req.put(LocalResourceVisibility.PUBLIC, pubRsrcs); spyService.handle(new ContainerLocalizationRequestEvent(c, req)); dispatcher.await(); verify(spyService, times(1)).checkAndInitializeLocalDirs(); // verify directory creation for (Path p : localDirs) { p = new Path((new URI(p.toString())).getPath()); Path publicCache = new Path(p, ContainerLocalizer.FILECACHE); verify(spylfs).mkdir(eq(publicCache), eq(defaultPerm), eq(true)); } } finally { dispatcher.stop(); } }