Example usage for org.apache.hadoop.conf Configuration Configuration

List of usage examples for org.apache.hadoop.conf Configuration Configuration

Introduction

In this page you can find the example usage for org.apache.hadoop.conf Configuration Configuration.

Prototype

public Configuration() 

Source Link

Document

A new configuration.

Usage

From source file:FDFRW1.java

License:Open Source License

static void read(String filename) throws Exception {
    FormatDataFile fdf = new FormatDataFile(new Configuration());
    fdf.open(filename);//from   w w  w  . j a  v  a  2 s.co  m
    for (int i = 0; i < 5000; i++) {
        Record rec = fdf.getRecordByLine(i);
        rec.show();
    }
    fdf.close();

}

From source file:TestFS.java

License:Open Source License

public static void main(String[] args) throws IOException {
    FileSystem fs = FileSystem.get(new Configuration());

    System.out.println(fs.toString());

}

From source file:TestIFileInfo.java

License:Open Source License

public void testIFileInfoWrite() throws IOException {
    IFileInfo fileInfo;// ww  w  . jav a  2 s .c  o  m
    try {
        Configuration conf = new Configuration();
        fileInfo = new IFileInfo(conf);
        String fileName = "test";
        IHead head = new IHead();
        IFieldMap fieldMap = new IFieldMap();
        fieldMap.addFieldType(new IFieldType.IFieldByteType());
        fieldMap.addFieldType(new IFieldType.IFieldShortType());
        fieldMap.addFieldType(new IFieldType.IFieldIntType());
        fieldMap.addFieldType(new IFieldType.IFieldLongType());
        fieldMap.addFieldType(new IFieldType.IFieldFloatType());
        fieldMap.addFieldType(new IFieldType.IFieldDoubleType());
        head.setFieldMap(fieldMap);
        head.setPrimaryIndex((short) 2);
        IUserDefinedHeadInfo udi = new IUserDefinedHeadInfo();
        udi.addInfo(0, fileName);
        head.setUdi(udi);

        fileInfo.initialize(fileName, head);

        assertEquals(fileName, fileInfo.filename());
        assertTrue(!fileInfo.compressed());
        assertTrue(!fileInfo.isVar());
        assertTrue(fileInfo.havekeyindex());
        assertTrue(fileInfo.havelineindex());
        assertEquals(ConstVar.WS_Write, fileInfo.workStatus());

        fileInfo.setcurrentline(10);
        assertEquals(10, fileInfo.currentline());

        fileInfo.increasecurrentline();
        assertEquals(11, fileInfo.currentline());

        assertEquals(28, fileInfo.recordlen());
    } catch (IOException e) {
        e.printStackTrace();
    }
}

From source file:q4.java

@Override
public void init() {
    try {//from  www . java2s  . c  om
        TEAM = "DEADLINE,276906431060,152339165514,931814217121\n";
        conf = new Configuration();
        conf.set("hbase.master", publicDNS + ":60000");
        conf.set("hbase.zookeeper.quorum", publicDNS);
        conf.setInt("hbase.zookeeper.property.maxClientCnxns", 100);
        connection = HConnectionManager.createConnection(conf);
        tableName = "q4";
        System.out.println("try connecting");
    } catch (IOException e) {
        // TODO Auto-generated catch block
        e.printStackTrace();
    }
}

From source file:Inlinks.java

License:Apache License

public static void main(String[] args) throws Exception {
    Configuration conf = new Configuration();
    String[] otherArgs = new GenericOptionsParser(conf, args).getRemainingArgs();
    if (otherArgs.length < 2) {
        System.err.println("Usage: inlinks <in> [<in>...] <out>");
        System.exit(2);//from w  ww.j  a  va 2  s . c  o m
    }
    Job job = new Job(conf, "inlinks");
    job.setJarByClass(Inlinks.class);
    job.setMapperClass(TokenizerMapper.class);
    //job.setCombinerClass(IdentityReducer.class);
    job.setReducerClass(IdentityReducer.class);
    job.setNumReduceTasks(10);
    job.setOutputKeyClass(IntWritable.class);
    job.setOutputValueClass(IntWritable.class);
    for (int i = 0; i < otherArgs.length - 1; ++i) {
        FileInputFormat.addInputPath(job, new Path(otherArgs[i]));
    }
    FileOutputFormat.setOutputPath(job, new Path(otherArgs[otherArgs.length - 1]));
    System.exit(job.waitForCompletion(true) ? 0 : 1);
}

From source file:CloseHangTest.java

License:Apache License

public static void main(String[] args) throws Exception {
    if (args.length < 1) {
        System.out.println("CloseHangTest: must supply the HDFS uri.");
        System.exit(1);/*from www  .j  a v a2  s  .c o m*/
    }
    String hdfsUri = args[0];
    final Configuration conf = new Configuration();
    FileSystem fs = FileSystem.get(new URI(hdfsUri), conf);
    UploadManager manager = new UploadManager(fs);
    manager.startWorkers();
    manager.monitor();
}

From source file:DisplayClustering.java

License:Apache License

/**
 * This method plots points and colors them according to their cluster
 * membership, rather than drawing ellipses.
 *
 * As of commit, this method is used only by K-means spectral clustering.
 * Since the cluster assignments are set within the eigenspace of the data, it
 * is not inherent that the original data cluster as they would in K-means:
 * that is, as symmetric gaussian mixtures.
 *
 * Since Spectral K-Means uses K-Means to cluster the eigenspace data, the raw
 * output is not directly usable. Rather, the cluster assignments from the raw
 * output need to be transferred back to the original data. As such, this
 * method will read the SequenceFile cluster results of K-means and transfer
 * the cluster assignments to the original data, coloring them appropriately.
 *
 * @param g2// w w w.  j ava2s.co m
 * @param data
 */
protected static void plotClusteredSampleData(Graphics2D g2, Path data) {
    double sx = (double) res / DS;
    g2.setTransform(AffineTransform.getScaleInstance(sx, sx));

    g2.setColor(Color.BLACK);
    Vector dv = new DenseVector(2).assign(SIZE / 2.0);
    plotRectangle(g2, new DenseVector(2).assign(2), dv);
    plotRectangle(g2, new DenseVector(2).assign(-2), dv);

    // plot the sample data, colored according to the cluster they belong to
    dv.assign(0.03);

    Path clusteredPointsPath = new Path(data, "clusteredPoints");
    Path inputPath = new Path(clusteredPointsPath, "part-m-00000");
    Map<Integer, Color> colors = new HashMap<Integer, Color>();
    int point = 0;
    for (Pair<IntWritable, WeightedVectorWritable> record : new SequenceFileIterable<IntWritable, WeightedVectorWritable>(
            inputPath, new Configuration())) {
        int clusterId = record.getFirst().get();
        VectorWritable v = SAMPLE_DATA.get(point++);
        Integer key = clusterId;
        if (!colors.containsKey(key)) {
            colors.put(key, COLORS[Math.min(COLORS.length - 1, colors.size())]);
        }
        plotClusteredRectangle(g2, v.get(), dv, colors.get(key));
    }
}

From source file:DisplayClustering.java

License:Apache License

protected static void writeSampleData(Path output) throws IOException {
    Configuration conf = new Configuration();
    FileSystem fs = FileSystem.get(output.toUri(), conf);
    SequenceFile.Writer writer = new SequenceFile.Writer(fs, conf, output, Text.class, VectorWritable.class);
    try {/* w w  w  . ja  v  a2s.  com*/
        int i = 0;
        for (VectorWritable vw : SAMPLE_DATA) {
            writer.append(new Text("sample_" + i++), vw);
        }
    } finally {
        Closeables.close(writer, false);
    }
}

From source file:DisplayClustering.java

License:Apache License

protected static List<Cluster> readClustersWritable(Path clustersIn) {
    List<Cluster> clusters = Lists.newArrayList();
    Configuration conf = new Configuration();
    for (ClusterWritable value : new SequenceFileDirValueIterable<ClusterWritable>(clustersIn, PathType.LIST,
            PathFilters.logsCRCFilter(), conf)) {
        Cluster cluster = value.getValue();
        log.info("Reading Cluster:{} center:{} numPoints:{} radius:{}", cluster.getId(),
                AbstractCluster.formatVector(cluster.getCenter(), null), cluster.getNumObservations(),
                AbstractCluster.formatVector(cluster.getRadius(), null));
        clusters.add(cluster);/*from  w w  w . ja v a  2 s. co m*/
    }
    return clusters;
}

From source file:DisplayClustering.java

License:Apache License

protected static void loadClustersWritable(Path output) throws IOException {
    Configuration conf = new Configuration();
    FileSystem fs = FileSystem.get(output.toUri(), conf);
    for (FileStatus s : fs.listStatus(output, new ClustersFilter())) {
        List<Cluster> clusters = readClustersWritable(s.getPath());
        CLUSTERS.add(clusters);/*w  w w .j av a2s  . c  om*/
    }
}