Example usage for java.util Random nextLong

List of usage examples for java.util Random nextLong

Introduction

In this page you can find the example usage for java.util Random nextLong.

Prototype

public long nextLong() 

Source Link

Document

Returns the next pseudorandom, uniformly distributed long value from this random number generator's sequence.

Usage

From source file:org.apache.synapse.commons.vfs.VFSUtils.java

/**
 * Acquires a file item lock before processing the item, guaranteing that
 * the file is not processed while it is being uploaded and/or the item is
 * not processed by two listeners//  w  ww.  ja va  2  s. co m
 * 
 * @param fsManager
 *            used to resolve the processing file
 * @param fo
 *            representing the processing file item
 * @param fso
 *            represents file system options used when resolving file from file system manager.
 * @return boolean true if the lock has been acquired or false if not
 */
public synchronized static boolean acquireLock(FileSystemManager fsManager, FileObject fo, VFSParamDTO paramDTO,
        FileSystemOptions fso) {

    // generate a random lock value to ensure that there are no two parties
    // processing the same file
    Random random = new Random();
    // Lock format random:hostname:hostip:time
    String strLockValue = String.valueOf(random.nextLong());
    try {
        strLockValue += STR_SPLITER + InetAddress.getLocalHost().getHostName();
        strLockValue += STR_SPLITER + InetAddress.getLocalHost().getHostAddress();
    } catch (UnknownHostException ue) {
        if (log.isDebugEnabled()) {
            log.debug("Unable to get the Hostname or IP.");
        }
    }
    strLockValue += STR_SPLITER + (new Date()).getTime();
    byte[] lockValue = strLockValue.getBytes();

    try {
        // check whether there is an existing lock for this item, if so it is assumed
        // to be processed by an another listener (downloading) or a sender (uploading)
        // lock file is derived by attaching the ".lock" second extension to the file name
        String fullPath = fo.getName().getURI();
        int pos = fullPath.indexOf("?");
        if (pos != -1) {
            fullPath = fullPath.substring(0, pos);
        }
        FileObject lockObject = fsManager.resolveFile(fullPath + ".lock", fso);
        if (lockObject.exists()) {
            log.debug("There seems to be an external lock, aborting the processing of the file "
                    + maskURLPassword(fo.getName().getURI())
                    + ". This could possibly be due to some other party already "
                    + "processing this file or the file is still being uploaded");
            if (paramDTO != null && paramDTO.isAutoLockRelease()) {
                releaseLock(lockValue, strLockValue, lockObject, paramDTO.isAutoLockReleaseSameNode(),
                        paramDTO.getAutoLockReleaseInterval());
            }
        } else {
            // write a lock file before starting of the processing, to ensure that the
            // item is not processed by any other parties
            lockObject.createFile();
            OutputStream stream = lockObject.getContent().getOutputStream();
            try {
                stream.write(lockValue);
                stream.flush();
                stream.close();
            } catch (IOException e) {
                lockObject.delete();
                log.error(
                        "Couldn't create the lock file before processing the file " + maskURLPassword(fullPath),
                        e);
                return false;
            } finally {
                lockObject.close();
            }

            // check whether the lock is in place and is it me who holds the lock. This is
            // required because it is possible to write the lock file simultaneously by
            // two processing parties. It checks whether the lock file content is the same
            // as the written random lock value.
            // NOTE: this may not be optimal but is sub optimal
            FileObject verifyingLockObject = fsManager.resolveFile(fullPath + ".lock", fso);
            if (verifyingLockObject.exists() && verifyLock(lockValue, verifyingLockObject)) {
                return true;
            }
        }
    } catch (FileSystemException fse) {
        log.error("Cannot get the lock for the file : " + maskURLPassword(fo.getName().getURI())
                + " before processing");
    }
    return false;
}

From source file:org.catechis.Domartin.java

public static synchronized long getNewID() {
    Random rnd = new Random();
    long uniqueId = ((System.currentTimeMillis() >>> 16) << 16) + rnd.nextLong();
    return current + uniqueId;
}

From source file:org.apache.hadoop.yarn.server.nodemanager.containermanager.localizer.TestContainerLocalizer.java

static ResourceLocalizationSpec getMockRsrc(Random r, LocalResourceVisibility vis, Path p) {
    ResourceLocalizationSpec resourceLocalizationSpec = mock(ResourceLocalizationSpec.class);

    LocalResource rsrc = mock(LocalResource.class);
    String name = Long.toHexString(r.nextLong());
    URL uri = mock(org.apache.hadoop.yarn.api.records.URL.class);
    when(uri.getScheme()).thenReturn("file");
    when(uri.getHost()).thenReturn(null);
    when(uri.getFile()).thenReturn("/local/" + vis + "/" + name);

    when(rsrc.getResource()).thenReturn(uri);
    when(rsrc.getSize()).thenReturn(r.nextInt(1024) + 1024L);
    when(rsrc.getTimestamp()).thenReturn(r.nextInt(1024) + 2048L);
    when(rsrc.getType()).thenReturn(LocalResourceType.FILE);
    when(rsrc.getVisibility()).thenReturn(vis);

    when(resourceLocalizationSpec.getResource()).thenReturn(rsrc);
    when(resourceLocalizationSpec.getDestinationDirectory()).thenReturn(URL.fromPath(p));
    return resourceLocalizationSpec;
}

From source file:org.sakaiproject.turnitin.util.TurnitinAPIUtil.java

public static InputStream callTurnitinReturnInputStream(String apiURL, Map<String, Object> parameters,
        String secretKey, int timeout, Proxy proxy, boolean isMultipart)
        throws TransientSubmissionException, SubmissionException {
    InputStream togo = null;/*from   w  ww.  j  a  va  2  s .c  o  m*/

    StringBuilder apiDebugSB = new StringBuilder();

    if (!parameters.containsKey("fid")) {
        throw new IllegalArgumentException("You must to include a fid in the parameters");
    }

    //if (!parameters.containsKey("gmttime")) {
    parameters.put("gmtime", getGMTime());
    //}

    /**
     * Some debug logging
     */
    if (log.isDebugEnabled()) {
        Set<Entry<String, Object>> ets = parameters.entrySet();
        Iterator<Entry<String, Object>> it = ets.iterator();
        while (it.hasNext()) {
            Entry<String, Object> entr = it.next();
            log.debug("Paramater entry: " + entr.getKey() + ": " + entr.getValue());
        }
    }

    List<String> sortedkeys = new ArrayList<String>();
    sortedkeys.addAll(parameters.keySet());

    String md5 = buildTurnitinMD5(parameters, secretKey, sortedkeys);

    HttpsURLConnection connection;
    String boundary = "";
    try {
        connection = fetchConnection(apiURL, timeout, proxy);

        if (isMultipart) {
            Random rand = new Random();
            //make up a boundary that should be unique
            boundary = Long.toString(rand.nextLong(), 26) + Long.toString(rand.nextLong(), 26)
                    + Long.toString(rand.nextLong(), 26);
            connection.setRequestMethod("POST");
            connection.setRequestProperty("Content-Type", "multipart/form-data; boundary=" + boundary);
        }

        log.debug("HTTPS Connection made to Turnitin");

        OutputStream outStream = connection.getOutputStream();

        if (isMultipart) {
            if (apiTraceLog.isDebugEnabled()) {
                apiDebugSB.append("Starting Multipart TII CALL:\n");
            }
            for (int i = 0; i < sortedkeys.size(); i++) {
                if (parameters.get(sortedkeys.get(i)) instanceof ContentResource) {
                    ContentResource resource = (ContentResource) parameters.get(sortedkeys.get(i));
                    outStream.write(
                            ("--" + boundary + "\r\nContent-Disposition: form-data; name=\"pdata\"; filename=\""
                                    + resource.getId() + "\"\r\n" + "Content-Type: " + resource.getContentType()
                                    + "\r\ncontent-transfer-encoding: binary" + "\r\n\r\n").getBytes());
                    //TODO this loads the doc into memory rather use the stream method
                    byte[] content = resource.getContent();
                    if (content == null) {
                        throw new SubmissionException("zero length submission!");
                    }
                    outStream.write(content);
                    outStream.write("\r\n".getBytes("UTF-8"));
                    if (apiTraceLog.isDebugEnabled()) {
                        apiDebugSB.append(sortedkeys.get(i));
                        apiDebugSB.append(" = ContentHostingResource: ");
                        apiDebugSB.append(resource.getId());
                        apiDebugSB.append("\n");
                    }
                } else {
                    if (apiTraceLog.isDebugEnabled()) {
                        apiDebugSB.append(sortedkeys.get(i));
                        apiDebugSB.append(" = ");
                        apiDebugSB.append(parameters.get(sortedkeys.get(i)).toString());
                        apiDebugSB.append("\n");
                    }
                    outStream.write(encodeParam(sortedkeys.get(i), parameters.get(sortedkeys.get(i)).toString(),
                            boundary).getBytes());
                }
            }
            outStream.write(encodeParam("md5", md5, boundary).getBytes());
            outStream.write(("--" + boundary + "--").getBytes());

            if (apiTraceLog.isDebugEnabled()) {
                apiDebugSB.append("md5 = ");
                apiDebugSB.append(md5);
                apiDebugSB.append("\n");
                apiTraceLog.debug(apiDebugSB.toString());
            }
        } else {
            writeBytesToOutputStream(outStream, sortedkeys.get(0), "=",
                    parameters.get(sortedkeys.get(0)).toString());
            if (apiTraceLog.isDebugEnabled()) {
                apiDebugSB.append("Starting TII CALL:\n");
                apiDebugSB.append(sortedkeys.get(0));
                apiDebugSB.append(" = ");
                apiDebugSB.append(parameters.get(sortedkeys.get(0)).toString());
                apiDebugSB.append("\n");
            }

            for (int i = 1; i < sortedkeys.size(); i++) {
                writeBytesToOutputStream(outStream, "&", sortedkeys.get(i), "=",
                        parameters.get(sortedkeys.get(i)).toString());
                if (apiTraceLog.isDebugEnabled()) {
                    apiDebugSB.append(sortedkeys.get(i));
                    apiDebugSB.append(" = ");
                    apiDebugSB.append(parameters.get(sortedkeys.get(i)).toString());
                    apiDebugSB.append("\n");
                }
            }

            writeBytesToOutputStream(outStream, "&md5=", md5);
            if (apiTraceLog.isDebugEnabled()) {
                apiDebugSB.append("md5 = ");
                apiDebugSB.append(md5);
                apiTraceLog.debug(apiDebugSB.toString());
            }
        }

        outStream.close();

        togo = connection.getInputStream();
    } catch (IOException t) {
        log.error("IOException making turnitin call.", t);
        throw new TransientSubmissionException("IOException making turnitin call.", t);
    } catch (ServerOverloadException t) {
        throw new TransientSubmissionException("Unable to submit the content data from ContentHosting", t);
    }

    return togo;

}

From source file:org.sakaiproject.contentreview.turnitin.util.TurnitinAPIUtil.java

public static InputStream callTurnitinReturnInputStream(String apiURL, Map<String, Object> parameters,
        String secretKey, int timeout, Proxy proxy, boolean isMultipart)
        throws TransientSubmissionException, SubmissionException {
    InputStream togo = null;//from  ww  w .  j ava2 s .  com

    StringBuilder apiDebugSB = new StringBuilder();

    if (!parameters.containsKey("fid")) {
        throw new IllegalArgumentException("You must to include a fid in the parameters");
    }

    //if (!parameters.containsKey("gmttime")) {
    parameters.put("gmtime", getGMTime());
    //}

    /**
     * Some debug logging
     */
    if (log.isDebugEnabled()) {
        Set<Entry<String, Object>> ets = parameters.entrySet();
        Iterator<Entry<String, Object>> it = ets.iterator();
        while (it.hasNext()) {
            Entry<String, Object> entr = it.next();
            log.debug("Paramater entry: " + entr.getKey() + ": " + entr.getValue());
        }
    }

    List<String> sortedkeys = new ArrayList<String>();
    sortedkeys.addAll(parameters.keySet());

    String md5 = buildTurnitinMD5(parameters, secretKey, sortedkeys);

    HttpsURLConnection connection;
    String boundary = "";
    try {
        connection = fetchConnection(apiURL, timeout, proxy);
        connection.setHostnameVerifier(new HostnameVerifier() {

            @Override
            public boolean verify(String hostname, SSLSession session) {
                return true;
            }
        });

        if (isMultipart) {
            Random rand = new Random();
            //make up a boundary that should be unique
            boundary = Long.toString(rand.nextLong(), 26) + Long.toString(rand.nextLong(), 26)
                    + Long.toString(rand.nextLong(), 26);
            connection.setRequestMethod("POST");
            connection.setRequestProperty("Content-Type", "multipart/form-data; boundary=" + boundary);
        }

        log.debug("HTTPS Connection made to Turnitin");

        OutputStream outStream = connection.getOutputStream();

        if (isMultipart) {
            if (apiTraceLog.isDebugEnabled()) {
                apiDebugSB.append("Starting Multipart TII CALL:\n");
            }
            for (int i = 0; i < sortedkeys.size(); i++) {
                if (parameters.get(sortedkeys.get(i)) instanceof ContentResource) {
                    ContentResource resource = (ContentResource) parameters.get(sortedkeys.get(i));
                    outStream.write(
                            ("--" + boundary + "\r\nContent-Disposition: form-data; name=\"pdata\"; filename=\""
                                    + resource.getId() + "\"\r\n" + "Content-Type: " + resource.getContentType()
                                    + "\r\ncontent-transfer-encoding: binary" + "\r\n\r\n").getBytes());
                    //TODO this loads the doc into memory rather use the stream method
                    byte[] content = resource.getContent();
                    if (content == null) {
                        throw new SubmissionException("zero length submission!");
                    }
                    outStream.write(content);
                    outStream.write("\r\n".getBytes("UTF-8"));
                    if (apiTraceLog.isDebugEnabled()) {
                        apiDebugSB.append(sortedkeys.get(i));
                        apiDebugSB.append(" = ContentHostingResource: ");
                        apiDebugSB.append(resource.getId());
                        apiDebugSB.append("\n");
                    }
                } else {
                    if (apiTraceLog.isDebugEnabled()) {
                        apiDebugSB.append(sortedkeys.get(i));
                        apiDebugSB.append(" = ");
                        apiDebugSB.append(parameters.get(sortedkeys.get(i)).toString());
                        apiDebugSB.append("\n");
                    }
                    outStream.write(encodeParam(sortedkeys.get(i), parameters.get(sortedkeys.get(i)).toString(),
                            boundary).getBytes());
                }
            }
            outStream.write(encodeParam("md5", md5, boundary).getBytes());
            outStream.write(("--" + boundary + "--").getBytes());

            if (apiTraceLog.isDebugEnabled()) {
                apiDebugSB.append("md5 = ");
                apiDebugSB.append(md5);
                apiDebugSB.append("\n");
                apiTraceLog.debug(apiDebugSB.toString());
            }
        } else {
            writeBytesToOutputStream(outStream, sortedkeys.get(0), "=",
                    parameters.get(sortedkeys.get(0)).toString());
            if (apiTraceLog.isDebugEnabled()) {
                apiDebugSB.append("Starting TII CALL:\n");
                apiDebugSB.append(sortedkeys.get(0));
                apiDebugSB.append(" = ");
                apiDebugSB.append(parameters.get(sortedkeys.get(0)).toString());
                apiDebugSB.append("\n");
            }

            for (int i = 1; i < sortedkeys.size(); i++) {
                writeBytesToOutputStream(outStream, "&", sortedkeys.get(i), "=",
                        parameters.get(sortedkeys.get(i)).toString());
                if (apiTraceLog.isDebugEnabled()) {
                    apiDebugSB.append(sortedkeys.get(i));
                    apiDebugSB.append(" = ");
                    apiDebugSB.append(parameters.get(sortedkeys.get(i)).toString());
                    apiDebugSB.append("\n");
                }
            }

            writeBytesToOutputStream(outStream, "&md5=", md5);
            if (apiTraceLog.isDebugEnabled()) {
                apiDebugSB.append("md5 = ");
                apiDebugSB.append(md5);
                apiTraceLog.debug(apiDebugSB.toString());
            }
        }

        outStream.close();

        togo = connection.getInputStream();
    } catch (IOException t) {
        log.error("IOException making turnitin call.", t);
        throw new TransientSubmissionException("IOException making turnitin call.", t);
    } catch (ServerOverloadException t) {
        throw new TransientSubmissionException("Unable to submit the content data from ContentHosting", t);
    }

    return togo;

}

From source file:com.ricemap.spateDB.operations.Sampler.java

/**
 * Reads a sample of the given file and returns the number of items read.
 * /* w w w .j a v  a  2  s  .  c  om*/
 * @param fs
 * @param file
 * @param count
 * @return
 * @throws IOException
 */
public static <T extends TextSerializable, O extends TextSerializable> int sampleLocalByCount(FileSystem fs,
        Path[] files, int count, long seed, ResultCollector<O> output, T inObj, O outObj) throws IOException {
    ArrayList<Path> data_files = new ArrayList<Path>();
    for (Path file : files) {
        if (fs.getFileStatus(file).isDir()) {
            // Directory, process all data files in this directory (visible files)
            FileStatus[] fileStatus = fs.listStatus(file, hiddenFileFilter);
            for (FileStatus f : fileStatus) {
                data_files.add(f.getPath());
            }
        } else {
            // File, process this file
            data_files.add(file);
        }
    }

    files = data_files.toArray(new Path[data_files.size()]);

    ResultCollector<T> converter = createConverter(output, inObj, outObj);
    long[] files_start_offset = new long[files.length + 1]; // Prefix sum of files sizes
    long total_length = 0;
    for (int i_file = 0; i_file < files.length; i_file++) {
        files_start_offset[i_file] = total_length;
        total_length += fs.getFileStatus(files[i_file]).getLen();
    }
    files_start_offset[files.length] = total_length;

    // Generate offsets to read from and make sure they are ordered to minimize
    // seeks between different HDFS blocks
    Random random = new Random(seed);
    long[] offsets = new long[count];
    for (int i = 0; i < offsets.length; i++) {
        if (total_length == 0)
            offsets[i] = 0;
        else
            offsets[i] = Math.abs(random.nextLong()) % total_length;
    }
    Arrays.sort(offsets);

    int record_i = 0; // Number of records read so far
    int records_returned = 0;

    int file_i = 0; // Index of the current file being sampled
    while (record_i < count) {
        // Skip to the file that contains the next sample
        while (offsets[record_i] > files_start_offset[file_i + 1])
            file_i++;

        // Open a stream to the current file and use it to read all samples
        // in this file
        FSDataInputStream current_file_in = fs.open(files[file_i]);
        long current_file_size = files_start_offset[file_i + 1] - files_start_offset[file_i];

        // The start and end offsets of data within this block
        // offsets are calculated relative to file start
        long data_start_offset = 0;
        if (current_file_in.readLong() == SpatialSite.RTreeFileMarker) {
            // This file is an RTree file. Update the start offset to point
            // to the first byte after the header
            data_start_offset = 8 + RTree.getHeaderSize(current_file_in);
        }
        // Get the end offset of data by searching for the beginning of the
        // last line
        long data_end_offset = current_file_size;
        // Skip the last line too to ensure to ensure that the mapped position
        // will be before some line in the block
        current_file_in.seek(data_end_offset);
        data_end_offset = Tail.tail(current_file_in, 1, null, null);
        long file_data_size = data_end_offset - data_start_offset;

        // Keep sampling as long as records offsets are within this file
        while (record_i < count && (offsets[record_i] - files_start_offset[file_i]) < current_file_size) {
            offsets[record_i] -= files_start_offset[file_i];
            // Map file position to element index in this tree assuming fixed
            // size records
            long element_offset_in_file = offsets[record_i] * file_data_size / current_file_size
                    + data_start_offset;
            current_file_in.seek(element_offset_in_file);
            LineReader reader = new LineReader(current_file_in, 4096);
            // Read the first line after that offset
            Text line = new Text();
            reader.readLine(line); // Skip the rest of the current line
            reader.readLine(line); // Read next line

            // Report this element to output
            if (converter != null) {
                inObj.fromText(line);
                converter.collect(inObj);
            }
            record_i++;
            records_returned++;
        }
        current_file_in.close();
    }
    return records_returned;
}

From source file:org.latticesoft.util.common.NumeralUtil.java

/**
 * Generates next random long/*from  w w  w  . jav  a2  s  . co  m*/
 * @return the generated long
 */
public static long getRandomLong() {
    Random rand = new Random(System.currentTimeMillis());
    return rand.nextLong();
}

From source file:org.apache.hadoop.hdfs.qjournal.client.TestQJMWithFaults.java

private static QuorumJournalManager createRandomFaultyQJM(MiniJournalCluster cluster,
        final Random seedGenerator) throws IOException, URISyntaxException {

    AsyncLogger.Factory spyFactory = new AsyncLogger.Factory() {
        @Override/*from w w  w  .  j  a  va2 s  . c  om*/
        public AsyncLogger createLogger(Configuration conf, NamespaceInfo nsInfo, String journalId,
                InetSocketAddress addr) {
            return new RandomFaultyChannel(conf, nsInfo, journalId, addr, seedGenerator.nextLong());
        }
    };
    return new QuorumJournalManager(conf, cluster.getQuorumJournalURI(JID), FAKE_NSINFO, spyFactory);
}

From source file:com.linkedin.pinot.query.aggregation.groupby.NoDictionaryGroupKeyGeneratorTest.java

/**
 * Helper method to build a segment as follows:
 * <ul>//w  ww .  jav  a 2 s  .com
 *   <li> One string column without dictionary. </li>
 *   <li> One integer column with dictionary. </li>
 * </ul>
 *
 * It also computes the unique group keys while it generates the index.
 *
 * @return Set containing unique group keys from the created segment.
 *
 * @throws Exception
 */
private static RecordReader buildSegment() throws Exception {
    Schema schema = new Schema();

    for (int i = 0; i < COLUMN_NAMES.length; i++) {
        DimensionFieldSpec dimensionFieldSpec = new DimensionFieldSpec(COLUMN_NAMES[i], DATA_TYPES[i], true);
        schema.addField(dimensionFieldSpec);
    }

    SegmentGeneratorConfig config = new SegmentGeneratorConfig(schema);
    config.setRawIndexCreationColumns(Arrays.asList(NO_DICT_COLUMN_NAMES));

    config.setOutDir(INDEX_DIR_PATH);
    config.setSegmentName(SEGMENT_NAME);

    Random random = new Random();
    List<GenericRow> rows = new ArrayList<>(NUM_ROWS);
    for (int i = 0; i < NUM_ROWS; i++) {
        Map<String, Object> map = new HashMap<>(NUM_COLUMNS);

        for (FieldSpec fieldSpec : schema.getAllFieldSpecs()) {
            String column = fieldSpec.getName();

            FieldSpec.DataType dataType = fieldSpec.getDataType();
            switch (dataType) {
            case INT:
                map.put(column, random.nextInt());
                break;

            case LONG:
                map.put(column, random.nextLong());
                break;

            case FLOAT:
                map.put(column, random.nextFloat());
                break;

            case DOUBLE:
                map.put(column, random.nextDouble());
                break;

            case STRING:
                map.put(column, "value_" + i);
                break;

            default:
                throw new IllegalArgumentException("Illegal data type specified: " + dataType);
            }
        }

        GenericRow genericRow = new GenericRow();
        genericRow.init(map);
        rows.add(genericRow);
    }

    RecordReader recordReader = new GenericRowRecordReader(rows, schema);
    SegmentIndexCreationDriverImpl driver = new SegmentIndexCreationDriverImpl();
    driver.init(config, recordReader);
    driver.build();

    return recordReader;
}

From source file:edu.umn.cs.spatialHadoop.operations.Sampler.java

private static <T extends TextSerializable> int sampleLocalByCount(Path[] files, ResultCollector<T> output,
        OperationsParams params) throws IOException {

    ArrayList<Path> data_files = new ArrayList<Path>();
    for (Path file : files) {
        FileSystem fs = file.getFileSystem(params);
        if (fs.getFileStatus(file).isDir()) {
            // Directory, process all data files in this directory (visible files)
            FileStatus[] fileStatus = fs.listStatus(file, hiddenFileFilter);
            for (FileStatus f : fileStatus) {
                data_files.add(f.getPath());
            }//from  ww w. jav  a 2 s. com
        } else {
            // File, process this file
            data_files.add(file);
        }
    }

    files = data_files.toArray(new Path[data_files.size()]);

    TextSerializable inObj1, outObj1;
    inObj1 = OperationsParams.getTextSerializable(params, "shape", new Text2());
    outObj1 = OperationsParams.getTextSerializable(params, "outshape", new Text2());

    // Make the objects final to be able to use in the anonymous inner class
    final TextSerializable inObj = inObj1;
    final T outObj = (T) outObj1;

    ResultCollector<TextSerializable> converter = createConverter(output, inObj, outObj);
    long[] files_start_offset = new long[files.length + 1]; // Prefix sum of files sizes
    long total_length = 0;
    for (int i_file = 0; i_file < files.length; i_file++) {
        FileSystem fs = files[i_file].getFileSystem(params);
        files_start_offset[i_file] = total_length;
        total_length += fs.getFileStatus(files[i_file]).getLen();
    }
    files_start_offset[files.length] = total_length;

    // Generate offsets to read from and make sure they are ordered to minimize
    // seeks between different HDFS blocks
    Random random = new Random(params.getLong("seed", System.currentTimeMillis()));
    long[] offsets = new long[params.getInt("count", 0)];
    for (int i = 0; i < offsets.length; i++) {
        if (total_length == 0)
            offsets[i] = 0;
        else
            offsets[i] = Math.abs(random.nextLong()) % total_length;
    }
    Arrays.sort(offsets);

    int record_i = 0; // Number of records read so far
    int records_returned = 0;

    int file_i = 0; // Index of the current file being sampled
    while (record_i < offsets.length) {
        // Skip to the file that contains the next sample
        while (offsets[record_i] > files_start_offset[file_i + 1])
            file_i++;

        long current_file_size = files_start_offset[file_i + 1] - files_start_offset[file_i];
        FileSystem fs = files[file_i].getFileSystem(params);
        ShapeLineRecordReader reader = new ShapeLineRecordReader(fs.getConf(),
                new FileSplit(files[file_i], 0, current_file_size, new String[] {}));
        Rectangle key = reader.createKey();
        Text line = reader.createValue();
        long pos = files_start_offset[file_i];

        while (record_i < offsets.length && offsets[record_i] <= files_start_offset[file_i + 1]
                && reader.next(key, line)) {
            pos += line.getLength();
            if (pos > offsets[record_i]) {
                // Passed the offset of record_i
                // Report this element to output
                if (converter != null) {
                    inObj.fromText(line);
                    converter.collect(inObj);
                }
                record_i++;
                records_returned++;
            }
        }
        reader.close();

        // Skip any remaining records that were supposed to be read from this file
        // This case might happen if a generated random position was in the middle
        // of the last line.
        while (record_i < offsets.length && offsets[record_i] <= files_start_offset[file_i + 1])
            record_i++;
    }
    return records_returned;
}