Example usage for java.util Random nextLong

List of usage examples for java.util Random nextLong

Introduction

In this page you can find the example usage for java.util Random nextLong.

Prototype

public long nextLong() 

Source Link

Document

Returns the next pseudorandom, uniformly distributed long value from this random number generator's sequence.

Usage

From source file:org.apache.bookkeeper.replication.AuditorLedgerCheckerTest.java

@Test
public void testTriggerAuditorWithNoPendingAuditTask() throws Exception {
    // wait for a second so that the initial periodic check finishes
    Thread.sleep(1000);/*  ww w.j a  v a  2s  . co  m*/
    int lostBookieRecoveryDelayConfValue = baseConf.getLostBookieRecoveryDelay();
    Auditor auditorBookiesAuditor = getAuditorBookiesAuditor();
    Future<?> auditTask = auditorBookiesAuditor.getAuditTask();
    int lostBookieRecoveryDelayBeforeChange = auditorBookiesAuditor.getLostBookieRecoveryDelayBeforeChange();
    Assert.assertEquals("auditTask is supposed to be null", null, auditTask);
    Assert.assertEquals(
            "lostBookieRecoveryDelayBeforeChange of Auditor should be equal to BaseConf's lostBookieRecoveryDelay",
            lostBookieRecoveryDelayConfValue, lostBookieRecoveryDelayBeforeChange);

    @Cleanup("shutdown")
    OrderedScheduler scheduler = OrderedScheduler.newSchedulerBuilder().name("test-scheduler").numThreads(1)
            .build();
    @Cleanup
    MetadataClientDriver driver = MetadataDrivers
            .getClientDriver(URI.create(baseClientConf.getMetadataServiceUri()));
    driver.initialize(baseClientConf, scheduler, NullStatsLogger.INSTANCE, Optional.of(zkc));

    // there is no easy way to validate if the Auditor has executed Audit process (Auditor.startAudit),
    // without shuttingdown Bookie. To test if by resetting LostBookieRecoveryDelay it does Auditing
    // even when there is no pending AuditTask, following approach is needed.

    // Here we are creating few ledgers ledgermetadata with non-existing bookies as its ensemble.
    // When Auditor does audit it recognizes these ledgers as underreplicated and mark them as
    // under-replicated, since these bookies are not available.
    int numofledgers = 5;
    Random rand = new Random();
    for (int i = 0; i < numofledgers; i++) {
        LedgerMetadata metadata = new LedgerMetadata(3, 2, 2, DigestType.CRC32, "passwd".getBytes());
        ArrayList<BookieSocketAddress> ensemble = new ArrayList<BookieSocketAddress>();
        ensemble.add(new BookieSocketAddress("99.99.99.99:9999"));
        ensemble.add(new BookieSocketAddress("11.11.11.11:1111"));
        ensemble.add(new BookieSocketAddress("88.88.88.88:8888"));
        metadata.addEnsemble(0, ensemble);

        MutableInt ledgerCreateRC = new MutableInt(-1);
        CountDownLatch latch = new CountDownLatch(1);
        long ledgerId = (Math.abs(rand.nextLong())) % 100000000;

        try (LedgerManager lm = driver.getLedgerManagerFactory().newLedgerManager()) {
            lm.createLedgerMetadata(ledgerId, metadata, (rc, result) -> {
                ledgerCreateRC.setValue(rc);
                latch.countDown();
            });
        }

        Assert.assertTrue("Ledger creation should complete within 2 secs",
                latch.await(2000, TimeUnit.MILLISECONDS));
        Assert.assertEquals("LedgerCreate should succeed and return OK rc value", BKException.Code.OK,
                ledgerCreateRC.getValue());
        ledgerList.add(ledgerId);
    }

    final CountDownLatch underReplicaLatch = registerUrLedgerWatcher(ledgerList.size());
    urLedgerMgr.setLostBookieRecoveryDelay(lostBookieRecoveryDelayBeforeChange);
    assertTrue("Audit should be triggered and created ledgers should be marked as underreplicated",
            underReplicaLatch.await(2, TimeUnit.SECONDS));
    assertEquals("All the ledgers should be marked as underreplicated", ledgerList.size(), urLedgerList.size());

    auditTask = auditorBookiesAuditor.getAuditTask();
    Assert.assertEquals("auditTask is supposed to be null", null, auditTask);
    Assert.assertEquals(
            "lostBookieRecoveryDelayBeforeChange of Auditor should be equal to BaseConf's lostBookieRecoveryDelay",
            lostBookieRecoveryDelayBeforeChange,
            auditorBookiesAuditor.getLostBookieRecoveryDelayBeforeChange());
}

From source file:com.facebook.LinkBench.LinkBenchDriverInj.java

void load() throws IOException, InterruptedException, Throwable {

    if (!doLoad) {
        logger.info("Skipping load data per the cmdline arg");
        return;//from  ww w .j  a  v a  2s.c o  m
    }
    // load data
    int nLinkLoaders = ConfigUtil.getInt(props, Config.NUM_LOADERS);

    boolean bulkLoad = true;
    BlockingQueue<LoadChunk> chunk_q = new LinkedBlockingQueue<LoadChunk>();

    // max id1 to generate
    long maxid1 = ConfigUtil.getLong(props, Config.MAX_ID);
    // id1 at which to start
    long startid1 = ConfigUtil.getLong(props, Config.MIN_ID);

    // Create loaders
    logger.info("Starting loaders " + nLinkLoaders);
    logger.debug("Bulk Load setting: " + bulkLoad);

    Random masterRandom = createMasterRNG(props, Config.LOAD_RANDOM_SEED);

    boolean genNodes = ConfigUtil.getBool(props, Config.GENERATE_NODES);
    int nTotalLoaders = genNodes ? nLinkLoaders + 1 : nLinkLoaders;

    LatencyStats latencyStats = new LatencyStats(nTotalLoaders);
    List<Runnable> loaders = new ArrayList<Runnable>(nTotalLoaders);

    LoadProgress loadTracker = LoadProgress.create(logger, props);
    for (int i = 0; i < nLinkLoaders; i++) {
        LinkStore linkStore = createLinkStore();

        bulkLoad = bulkLoad && linkStore.bulkLoadBatchSize() > 0;
        LinkBenchLoad l = new LinkBenchLoad(linkStore, props, latencyStats, csvStreamFile, i,
                maxid1 == startid1 + 1, chunk_q, loadTracker);
        loaders.add(l);
    }

    if (genNodes) {
        logger.info("Will generate graph nodes during loading");
        int loaderId = nTotalLoaders - 1;
        NodeStore nodeStore = createNodeStore(null);
        Random rng = new Random(masterRandom.nextLong());
        loaders.add(new NodeLoader(props, logger, nodeStore, rng, latencyStats, csvStreamFile, loaderId));
    }
    enqueueLoadWork(chunk_q, startid1, maxid1, nLinkLoaders, new Random(masterRandom.nextLong()));
    // run loaders
    loadTracker.startTimer();
    long loadTime = concurrentExec(loaders, false, new Random(masterRandom.nextLong()));

    long expectedNodes = maxid1 - startid1;
    long actualLinks = 0;
    long actualNodes = 0;
    for (final Runnable l : loaders) {
        if (l instanceof LinkBenchLoad) {
            actualLinks += ((LinkBenchLoad) l).getLinksLoaded();
        } else {
            assert (l instanceof NodeLoader);
            actualNodes += ((NodeLoader) l).getNodesLoaded();
        }
    }

    latencyStats.displayLatencyStats();

    if (csvStatsFile != null) {
        latencyStats.printCSVStats(csvStatsFile, true);
    }

    double loadTime_s = (loadTime / 1000.0);
    logger.info(String.format(
            "LOAD PHASE COMPLETED. " + " Loaded %d nodes (Expected %d)."
                    + " Loaded %d links (%.2f links per node). " + " Took %.1f seconds.  Links/second = %d",
            actualNodes, expectedNodes, actualLinks, actualLinks / (double) actualNodes, loadTime_s,
            (long) Math.round(actualLinks / loadTime_s)));
}

From source file:energy.usef.core.service.business.CorePlanboardBusinessServiceTest.java

@Test
public void testStorePrognosisWithAplan() {
    // given//from   ww  w .  j av  a 2 s . c om
    Random random = new Random();
    Prognosis prognosis = new Prognosis();
    prognosis.getPTU().addAll(IntStream.rangeClosed(1, 96).mapToObj(elem -> {
        PTU ptuDto = new PTU();
        ptuDto.setPower(BigInteger.valueOf(random.nextInt(500)));
        ptuDto.setStart(BigInteger.valueOf(elem));
        return ptuDto;
    }).collect(Collectors.toList()));
    prognosis.setPeriod(new LocalDate());
    prognosis.setSequence(random.nextLong());
    prognosis.setType(PrognosisType.A_PLAN);

    PowerMockito.when(connectionGroupRepository.find(Matchers.anyString()))
            .thenReturn(PowerMockito.mock(ConnectionGroup.class));
    // when
    corePlanboardBusinessService.storePrognosis("agr.usef-example.com", prognosis, DocumentType.A_PLAN,
            DocumentStatus.PROCESSED, "agr.usef-example.com", null, false);

    // then
    Mockito.verify(ptuPrognosisRepository, Mockito.times(96)).persist(Matchers.any(PtuPrognosis.class));
}

From source file:com.wizecommerce.hecuba.HecubaCassandraManagerTestBase.java

/**
 * testUpdateRowWithSomeTimestampsSetButNoTTL
 * /*from   w  ww  .  ja va2  s. c  om*/
 * @throws Exception
 */
@Test
public void testUpdateRowScenario4() throws Exception {
    Map<String, Long> timestamps = new HashMap<String, Long>();
    Random random = new Random();

    timestamps.put("Column_2", Math.abs(random.nextLong()));
    timestamps.put("Column_4", Math.abs(random.nextLong()));

    testUpdateRowResults(timestamps, null, "testUpdateRowScenario4");
}

From source file:org.sakaiproject.kernel.util.JcrUtilsT.java

@Test
public void multiThreadTest() throws Exception {
    Session session = jcrService.loginSystem();
    nodeFactory.createFile(randomFile1 + "xyz1", "text/plain");
    session.save();//from  w ww. j  a v a  2s . com
    jcrService.logout();

    Thread[] threads = new Thread[10];
    running = 0;
    failed = 0;
    for (int i = 0; i < threads.length; i++) {
        threads[i] = new Thread(new Runnable() {

            public void run() {
                running++;
                Random random = new Random();
                try {
                    for (int j = 0; j < 10; j++) {
                        try {
                            Session session = jcrService.loginSystem();
                            Node node = (Node) session.getItem(randomFile1);
                            Lock lock = jcrService.lock(node);
                            LOG.info("Locked +++++++++++++++++++++++++++++" + lock.getLocked());
                            locked++;
                            assertEquals(1, locked);
                            try {
                                node.getProperty("sakaijcr:test").remove();
                            } catch (Exception e) {

                            }
                            LOG.info("Unlocking ---------------------------" + lock.getLocked());
                            locked--;
                            assertEquals(0, locked);
                            session.save(); // save performs an unlock
                            Thread.sleep(100);
                            lock = jcrService.lock(node);
                            LOG.info("Locked +++++++++++++++++++++++++++++++2" + lock.getLocked());
                            locked++;
                            assertEquals(1, locked);
                            node.setProperty("sakaijcr:test", "new value" + random.nextLong());
                            LOG.info("Unlocking -----------------------------2" + lock.getLocked());
                            locked--;
                            assertEquals(0, locked);
                            session.save(); // save performs an unlock
                        } catch (Exception e) {
                            failed++;
                            e.printStackTrace();
                        } finally {
                            try {
                                jcrService.logout();
                            } catch (Exception e) {
                                e.printStackTrace();
                            }
                            cacheManagerService.unbind(CacheScope.REQUEST);
                        }
                    }
                } catch (Throwable t) {
                    failed++;
                    t.printStackTrace();
                } finally {
                    System.err.println("Exiting " + Thread.currentThread());
                    running--;
                }
            }
        });
    }
    for (Thread thread : threads) {
        thread.start();
        Thread.sleep(100);
    }

    while (running > 0) {
        Thread.sleep(100);
    }
    assertEquals(0, failed);
}

From source file:com.ephesoft.dcma.webservice.service.EphesoftWebService.java

/**
 * To prepare data and restart the batch.
 * @param batchId String/*w  ww  .j  a v a  2s. c  o  m*/
 * @param serverURL String
 * @param folderPath String
 * @param batchClassId String
 * @param moduleName String
 * @param newBatchInstanceIdentifier String
 * @param batchName String
 * @return String
 */
@RequestMapping(value = "/batchIdentifier/{batchId}/server/{serverURL}/folderLocation/{folderPath}/batchClassId/{batchClassId}/moduleName/{moduleName}/newBatchInstanceIdentifier/{newBatchInstanceIdentifier}/batchName/{batchName}", method = RequestMethod.GET)
@ResponseBody
public String prepareDataAndRestartBatch(@PathVariable("batchId") final String batchId,
        @PathVariable("serverURL") final String serverURL, @PathVariable("folderPath") final String folderPath,
        @PathVariable("batchClassId") final String batchClassId,
        @PathVariable("moduleName") final String moduleName,
        @PathVariable("newBatchInstanceIdentifier") final String newBatchInstanceIdentifier,
        @PathVariable("batchName") final String batchName) {
    String newBatchInstanceID = null;
    newBatchInstanceID = EphesoftStringUtil.getDecodedString(newBatchInstanceIdentifier);
    String folderPathLocalVariable = folderPath;
    String serverURLLocalVariable = EphesoftStringUtil.getDecodedString(serverURL);
    String moduleNameDecoded = EphesoftStringUtil.getDecodedString(moduleName);
    folderPathLocalVariable = EphesoftStringUtil.getDecodedString(folderPathLocalVariable)
            .replace(WorkFlowConstants.CARET_SYMBOL, WorkFlowConstants.BACK_SLASH_SYMBOL);
    serverURLLocalVariable = EphesoftStringUtil.getDecodedString(serverURLLocalVariable)
            .replace(WorkFlowConstants.CARET_SYMBOL, WorkFlowConstants.FORWARD_SLASH_SYMBOL);
    String batchNameLocalVariable = EphesoftStringUtil.getDecodedString(batchName);
    LOGGER.info("==========Inside EphesoftWebService=============");
    LOGGER.info("Folder path is " + folderPathLocalVariable);
    LOGGER.info("Server URL is" + serverURLLocalVariable);
    Random random = new Random();
    BatchClass batchClass = batchClassService.getBatchClassByIdentifier(batchClassId);

    boolean isZipSwitchOn = batchSchemaService.isZipSwitchOn();
    LOGGER.info("Zipped Batch XML switch is:" + isZipSwitchOn);

    String sourceDirectory = batchId;
    String oldBatchId = batchId.split(WorkFlowConstants.UNDERSCORE_SYMBOL)[WebserviceConstants.ZERO];
    LOGGER.info("Source URL Directory is" + sourceDirectory);
    long folderName = random.nextLong();
    if (folderName < WebserviceConstants.ZERO) {
        folderName = Math.abs(folderName);
    }
    String downloadDirectory = batchSchemaService.getLocalFolderLocation() + File.separator + folderName;
    LOGGER.info("Preparing to download data from the FTP server");
    try {
        ftpService.downloadDirectory(sourceDirectory, downloadDirectory, WebserviceConstants.ZERO, true);
        newBatchInstanceID = checkBatchInstanceIdentifier(newBatchInstanceIdentifier, serverURLLocalVariable,
                oldBatchId, batchClassId, batchNameLocalVariable, moduleNameDecoded);
        if (newBatchInstanceID != null) {
            boolean isPreparedData = preparedFiles(newBatchInstanceID, batchClass, oldBatchId, folderName,
                    batchNameLocalVariable, isZipSwitchOn);
            if (isPreparedData) {
                LOGGER.info("Restarting workflow batchInstanceIdentifier" + newBatchInstanceIdentifier
                        + "module name" + moduleNameDecoded);
                LOGGER.info("Starting to create serialize file");
                pluginPropertiesService.getPluginProperties(newBatchInstanceID);
                LOGGER.info("Created serialize file");
                String moduleWorkflowName = getModuleWorkflowNameForBatchClassId(batchClassId,
                        moduleNameDecoded);
                workflowService.startWorkflow(new BatchInstanceID(newBatchInstanceID), moduleWorkflowName);
            } else {
                LOGGER.info("Error in preparing data " + newBatchInstanceIdentifier + ".Returning null");
            }
            LOGGER.info("Returning New batch instance identifier" + newBatchInstanceID);
        }
    } catch (FTPDataDownloadException e) {
        LOGGER.error("Error in downloading data from FTP. Marking batch as error.... " + e.getMessage(), e);
    }
    return newBatchInstanceID;
}

From source file:com.wizecommerce.hecuba.HecubaCassandraManagerTestBase.java

/**
 * testUpdateRowWithAllTimpStampsSetButNoTTL
 * //ww w. j ava  2 s .co m
 * @throws Exception
 */
@Test
public void testUpdateRowScenario5() throws Exception {
    Map<String, Long> timestamps = new HashMap<String, Long>();
    Random random = new Random();
    for (int i = 0; i < 5; i++) {
        timestamps.put("Column_" + i, Math.abs(random.nextLong()));
    }
    testUpdateRowResults(timestamps, null, "testUpdateRowScenario5");
}

From source file:com.wizecommerce.hecuba.HecubaCassandraManagerTestBase.java

/**
 * testUpdateRowWithAllTTLAndTimestampSet
 * // w  w w.  j a  v a  2s  .co m
 * @throws Exception
 */
@Test
public void testUpdateRowScenario1() throws Exception {
    Map<String, Integer> ttls = new HashMap<String, Integer>();
    Map<String, Long> timestamps = new HashMap<String, Long>();
    Random random = new Random();
    for (int i = 0; i < 5; i++) {
        ttls.put("Column_" + i, Math.abs(random.nextInt(TEN_YEARS) + 20));
        timestamps.put("Column_" + i, Math.abs(random.nextLong()));
    }
    testUpdateRowResults(timestamps, ttls, "testUpdateRowScenario1");
}

From source file:org.shaman.terrain.polygonal.PolygonalMapGenerator.java

/**
 * Third step, assign coastline/*w  w  w .j  ava 2  s .c o  m*/
 */
private void assignCoastline() {
    if (graph == null || coastline == null) {
        return;
    }
    Random rand = new Random(seed);
    //reset
    for (Graph.Center c : graph.centers) {
        c.water = false;
        c.border = false;
        c.ocean = false;
    }
    for (Graph.Corner c : graph.corners) {
        c.water = false;
        c.ocean = false;
    }
    //set water parameter of corners
    int waterCorners = 0;
    switch (coastline) {
    case PERLIN:
        //Fractal perlin noise
        Noise[] noise = new Noise[5];
        for (int i = 0; i < noise.length; ++i) {
            noise[i] = new Noise(rand.nextLong());
        }
        for (Graph.Corner c : graph.corners) {
            float val = 0;
            float octave = 6; //to be tuned
            float amplitude = 0.5f; //to be tuned
            for (int i = 0; i < noise.length; ++i) {
                val += noise[i].noise(c.point.x * octave, c.point.y * octave) * amplitude;
                octave *= 2;
                amplitude /= 2.5;
            }
            float dist = c.point.distanceSquared(0.5f, 0.5f);
            float distInfluence = 2.2f; //to be tuned
            float perlinOffset = -0.2f; //to be tuned
            if (val > perlinOffset + distInfluence * dist && !c.border) {
                c.water = false;
            } else {
                c.water = true;
                waterCorners++;
            }
        }
        break;

    case RADIAL:
        //radial sine waves
        double islandFactor = 1.07;
        int bumps = rand.nextInt(6) + 1;
        double startAngle = rand.nextDouble() * 2 * Math.PI;
        double dipAngle = rand.nextDouble() * 2 * Math.PI;
        double dipWidth = rand.nextDouble() * 0.5 + 0.2;
        for (Graph.Corner c : graph.corners) {
            double x = (c.point.x - 0.5) * 2.2;
            double y = (c.point.y - 0.5) * 2.2;
            double angle = Math.atan2(y, x);
            double length = 0.5 * (Math.max(Math.abs(x), Math.abs(y)) + new Vector2d(x, y).length());
            double r1 = 0.5 * 0.4 * Math.sin(startAngle + bumps * angle + Math.cos((bumps + 3) * angle));
            double r2 = 0.7 - 0.2 * Math.sin(startAngle + bumps * angle - Math.sin((bumps + 2) * angle));
            if (Math.abs(angle - dipAngle) < dipWidth || Math.abs(angle - dipAngle + 2 * Math.PI) < dipWidth
                    || Math.abs(angle - dipAngle - 2 * Math.PI) < dipWidth) {
                r1 = r2 = 0.2;
            }
            if ((length < r1 || (length > r1 * islandFactor && length < r2)) && !c.border) {
                c.water = false;
            } else {
                c.water = true;
                waterCorners++;
            }
        }
        break;
    }
    LOG.log(Level.INFO, "corners with water: {0}, without water: {1}",
            new Object[] { waterCorners, graph.corners.size() - waterCorners });

    findOceans();

    updateBiomesGeometry();
}

From source file:org.apache.pig.newplan.logical.relational.LogToPhyTranslationVisitor.java

/**
 * Transformation from Logical to Physical Plan involves the following steps:
 * First, it is generated a random number which will link a POCounter within a PORank.
 * On this way, avoiding possible collisions on parallel rank operations.
 * Then, if it is row number mode:/*from   ww  w.  j av  a 2 s  .  c om*/
 * <pre>
 * In case of a RANK operation (row number mode), are used two steps:
 *   1.- Each tuple is counted sequentially on each mapper, and are produced global counters
 *   2.- Global counters are gathered and summed, each tuple calls to the respective counter value
 *       in order to calculate the corresponding rank value.
 * </pre>
 * or not:
 * <pre>
 * In case of a RANK BY operation, then are necessary five steps:
 *   1.- Group by the fields involved on the rank operation: POPackage
 *   2.- In case of multi-fields, the key (group field) is flatten: POForEach
 *   3.- Sort operation by the fields available after flattening: POSort
 *   4.- Each group is sequentially counted on each mapper through a global counter: POCounter
 *   5.- Global counters are summed and passed to the rank operation: PORank
 * </pre>
 * @param loRank describe if the rank operation is on a row number mode
 * or is rank by (dense or not)
 **/
@Override
public void visit(LORank loRank) throws FrontendException {
    String scope = DEFAULT_SCOPE;
    PORank poRank;
    POCounter poCounter;

    Random randomGenerator = new Random();
    Long operationID = Math.abs(randomGenerator.nextLong());

    try {
        // Physical operations for RANK operator:
        // In case of a RANK BY operation, then are necessary five steps:
        //   1.- Group by the fields involved on the rank operation: POPackage
        //   2.- In case of multi-fields, the key (group field) is flatten: POForEach
        //   3.- Sort operation by the fields available after flattening: POSort
        //   4.- Each group is sequentially counted on each mapper through a global counter: POCounter
        //   5.- Global counters are summed and passed to the rank operation: PORank
        if (!loRank.isRowNumber()) {

            boolean[] flags = { false };

            MultiMap<Integer, LogicalExpressionPlan> expressionPlans = new MultiMap<Integer, LogicalExpressionPlan>();
            for (int i = 0; i < loRank.getRankColPlans().size(); i++)
                expressionPlans.put(i, loRank.getRankColPlans());

            POPackage poPackage = compileToLR_GR_PackTrio(loRank, null, flags, expressionPlans);
            poPackage.getPkgr().setPackageType(PackageType.GROUP);
            translateSoftLinks(loRank);

            List<Boolean> flattenLst = Arrays.asList(true, false);

            PhysicalPlan fep1 = new PhysicalPlan();
            POProject feproj1 = new POProject(new OperatorKey(scope, nodeGen.getNextNodeId(scope)), -1);
            feproj1.addOriginalLocation(loRank.getAlias(), loRank.getLocation());
            feproj1.setColumn(0);
            feproj1.setResultType(poPackage.getPkgr().getKeyType());
            feproj1.setStar(false);
            feproj1.setOverloaded(false);
            fep1.add(feproj1);

            PhysicalPlan fep2 = new PhysicalPlan();
            POProject feproj2 = new POProject(new OperatorKey(scope, nodeGen.getNextNodeId(scope)), -1);
            feproj2.addOriginalLocation(loRank.getAlias(), loRank.getLocation());
            feproj2.setColumn(1);
            feproj2.setResultType(DataType.BAG);
            feproj2.setStar(false);
            feproj2.setOverloaded(false);
            fep2.add(feproj2);
            List<PhysicalPlan> fePlans = Arrays.asList(fep1, fep2);

            POForEach poForEach = new POForEach(new OperatorKey(scope, nodeGen.getNextNodeId(scope)), -1,
                    fePlans, flattenLst);

            List<LogicalExpressionPlan> rankPlans = loRank.getRankColPlans();
            byte[] newTypes = new byte[rankPlans.size()];

            for (int i = 0; i < rankPlans.size(); i++) {
                LogicalExpressionPlan loep = rankPlans.get(i);
                Iterator<Operator> inpOpers = loep.getOperators();

                while (inpOpers.hasNext()) {
                    Operator oper = inpOpers.next();
                    newTypes[i] = ((ProjectExpression) oper).getType();
                }
            }

            List<PhysicalPlan> newPhysicalPlan = new ArrayList<PhysicalPlan>();
            List<Boolean> newOrderPlan = new ArrayList<Boolean>();

            for (int i = 0; i < loRank.getRankColPlans().size(); i++) {
                PhysicalPlan fep3 = new PhysicalPlan();
                POProject feproj3 = new POProject(new OperatorKey(scope, nodeGen.getNextNodeId(scope)), -1);
                feproj3.addOriginalLocation(loRank.getAlias(), loRank.getLocation());
                feproj3.setColumn(i);
                feproj3.setResultType(newTypes[i]);
                feproj3.setStar(false);
                feproj3.setOverloaded(false);
                fep3.add(feproj3);

                newPhysicalPlan.add(fep3);
                newOrderPlan.add(loRank.getAscendingCol().get(i));
            }

            POSort poSort;
            poSort = new POSort(new OperatorKey(scope, nodeGen.getNextNodeId(scope)), -1, null, newPhysicalPlan,
                    newOrderPlan, null);
            //poSort.setRequestedParallelism(loRank.getRequestedParallelism());
            poSort.addOriginalLocation(loRank.getAlias(), loRank.getLocation());

            poCounter = new POCounter(new OperatorKey(scope, nodeGen.getNextNodeId(scope)), -1, null,
                    newPhysicalPlan, newOrderPlan);

            poCounter.addOriginalLocation(loRank.getAlias(), loRank.getLocation());
            poCounter.setResultType(DataType.TUPLE);
            poCounter.setIsRowNumber(loRank.isRowNumber());
            poCounter.setIsDenseRank(loRank.isDenseRank());
            poCounter.setOperationID(String.valueOf(operationID));

            poRank = new PORank(new OperatorKey(scope, nodeGen.getNextNodeId(scope)), -1, null, newPhysicalPlan,
                    newOrderPlan);

            poRank.addOriginalLocation(loRank.getAlias(), loRank.getLocation());
            poRank.setResultType(DataType.TUPLE);
            poRank.setOperationID(String.valueOf(operationID));

            List<Boolean> flattenLst2 = Arrays.asList(false, true);

            PhysicalPlan fep12 = new PhysicalPlan();
            POProject feproj12 = new POProject(new OperatorKey(scope, nodeGen.getNextNodeId(scope)), -1);
            feproj12.addOriginalLocation(loRank.getAlias(), loRank.getLocation());
            feproj12.setColumn(0);
            feproj12.setResultType(DataType.LONG);
            feproj12.setStar(false);
            feproj12.setOverloaded(false);
            fep12.add(feproj12);

            PhysicalPlan fep22 = new PhysicalPlan();
            POProject feproj22 = new POProject(new OperatorKey(scope, nodeGen.getNextNodeId(scope)), -1);
            feproj22.addOriginalLocation(loRank.getAlias(), loRank.getLocation());
            feproj22.setColumn(loRank.getRankColPlans().size() + 1);
            feproj22.setResultType(DataType.BAG);
            feproj22.setStar(false);
            feproj22.setOverloaded(false);
            fep22.add(feproj22);
            List<PhysicalPlan> fePlans2 = Arrays.asList(fep12, fep22);

            POForEach poForEach2 = new POForEach(new OperatorKey(scope, nodeGen.getNextNodeId(scope)), -1,
                    fePlans2, flattenLst2);

            currentPlan.add(poForEach);
            currentPlan.add(poSort);
            currentPlan.add(poCounter);
            currentPlan.add(poRank);
            currentPlan.add(poForEach2);

            try {
                currentPlan.connect(poPackage, poForEach);
                currentPlan.connect(poForEach, poSort);
                currentPlan.connect(poSort, poCounter);
                currentPlan.connect(poCounter, poRank);
                currentPlan.connect(poRank, poForEach2);
            } catch (PlanException e) {
                throw new LogicalToPhysicalTranslatorException(e.getMessage(), e.getErrorCode(),
                        e.getErrorSource(), e);
            }

            logToPhyMap.put(loRank, poForEach2);

            // In case of a RANK operation, are used two steps:
            //   1.- Each tuple is counted sequentially on each mapper, and are produced global counters
            //   2.- Global counters are gathered and summed, each tuple calls to the respective counter value
            //       in order to calculate the corresponding rank value.
        } else {

            List<LogicalExpressionPlan> logPlans = loRank.getRankColPlans();
            List<PhysicalPlan> rankPlans = new ArrayList<PhysicalPlan>(logPlans.size());

            // convert all the logical expression plans to physical expression plans
            currentPlans.push(currentPlan);
            for (LogicalExpressionPlan plan : logPlans) {
                currentPlan = new PhysicalPlan();
                PlanWalker childWalker = new ReverseDependencyOrderWalkerWOSeenChk(plan);
                pushWalker(childWalker);
                childWalker.walk(new ExpToPhyTranslationVisitor(currentWalker.getPlan(), childWalker, loRank,
                        currentPlan, logToPhyMap));
                rankPlans.add(currentPlan);
                popWalker();
            }
            currentPlan = currentPlans.pop();

            poCounter = new POCounter(new OperatorKey(scope, nodeGen.getNextNodeId(scope)), -1, null, rankPlans,
                    loRank.getAscendingCol());

            poCounter.addOriginalLocation(loRank.getAlias(), loRank.getLocation());
            poCounter.setResultType(DataType.TUPLE);
            poCounter.setIsRowNumber(loRank.isRowNumber());
            poCounter.setIsDenseRank(loRank.isDenseRank());
            poCounter.setOperationID(String.valueOf(operationID));

            poRank = new PORank(new OperatorKey(scope, nodeGen.getNextNodeId(scope)), -1, null, rankPlans,
                    loRank.getAscendingCol());

            poRank.addOriginalLocation(loRank.getAlias(), loRank.getLocation());
            poRank.setResultType(DataType.TUPLE);
            poRank.setOperationID(String.valueOf(operationID));

            currentPlan.add(poCounter);
            currentPlan.add(poRank);

            List<Operator> op = loRank.getPlan().getPredecessors(loRank);
            PhysicalOperator from;

            if (op != null) {
                from = logToPhyMap.get(op.get(0));
            } else {
                int errCode = 2051;
                String msg = "Did not find a predecessor for Rank.";
                throw new LogicalToPhysicalTranslatorException(msg, errCode, PigException.BUG);
            }

            currentPlan.connect(from, poCounter);
            currentPlan.connect(poCounter, poRank);

            logToPhyMap.put(loRank, poRank);
        }

    } catch (PlanException e) {
        int errCode = 2015;
        String msg = "Invalid physical operators in the physical plan";
        throw new LogicalToPhysicalTranslatorException(msg, errCode, PigException.BUG, e);
    }

}