Example usage for org.apache.hadoop.conf Configuration setInt

List of usage examples for org.apache.hadoop.conf Configuration setInt

Introduction

In this page you can find the example usage for org.apache.hadoop.conf Configuration setInt.

Prototype

public void setInt(String name, int value) 

Source Link

Document

Set the value of the name property to an int.

Usage

From source file:com.ricemap.spateDB.util.ImageOutputFormat.java

License:Apache License

public static void setImageHeight(Configuration conf, int height) {
    conf.setInt(ImageHeight, height);
}

From source file:com.rim.logdriver.sawmill.Sawmill.java

License:Apache License

public void run(String[] args) {
    if (args.length < 1) {
        System.out.println("Usage: " + this.getClass().getSimpleName() + " <config.properties>");
        System.exit(1);/*from   w ww  .  j  a v a2  s. com*/
    }

    LOG.info("Starting {}", Sawmill.class.getSimpleName());

    // First arg is the config
    String configFile = args[0];

    // Load configuration.
    Properties conf = new Properties();
    try {
        conf.load(new FileInputStream(configFile));
    } catch (FileNotFoundException e) {
        LOG.error("Config file not found.", e);
        System.exit(1);
    } catch (Throwable t) {
        LOG.error("Error reading config file.", t);
        System.exit(1);
    }

    // Parse the configuration.

    // Load in any Hadoop config files.
    Configuration hConf = new Configuration();
    {
        String[] hadoopConfs = Configs.hadoopConfigPaths.getArray(conf);
        for (String confPath : hadoopConfs) {
            hConf.addResource(new Path(confPath));
        }
        // Also, don't shut down my FileSystem automatically!!!
        hConf.setBoolean("fs.automatic.close", false);
        for (Entry<Object, Object> e : System.getProperties().entrySet()) {
            if (e.getValue() instanceof Integer) {
                hConf.setInt(e.getKey().toString(), (Integer) e.getValue());
            } else if (e.getValue() instanceof Long) {
                hConf.setLong(e.getKey().toString(), (Long) e.getValue());
            } else {
                hConf.set(e.getKey().toString(), e.getValue().toString());
            }
        }
    }

    // Ensure that UserGroupInformation is set up, and knows if security is
    // enabled.
    UserGroupInformation.setConfiguration(hConf);

    // Kerberos credentials. If these are not present, then it just won't try to
    // authenticate.
    String kerbConfPrincipal = Configs.kerberosPrincipal.get(conf);
    String kerbKeytab = Configs.kerberosKeytab.get(conf);
    Authenticator.getInstance().setKerbConfPrincipal(kerbConfPrincipal);
    Authenticator.getInstance().setKerbKeytab(kerbKeytab);

    // Check out the number of threads for workers, and creater the threadpools
    // for both workers and stats updates.
    int threadCount = Configs.threadpoolSize.getInteger(conf);
    final ScheduledExecutorService executor = Executors.newScheduledThreadPool(threadCount);

    // Get the MBean server
    MBeanServer mbs = ManagementFactory.getPlatformMBeanServer();

    // Set up the Mina Exception Monitor
    ExceptionMonitor.setInstance(new ExceptionLoggerExceptionMonitor());

    // For each port->output mapping, create a path (listener, queue, worker).
    // List<DataPath> paths = new ArrayList<DataPath>();
    final List<IoAcceptor> acceptors = new ArrayList<IoAcceptor>();
    final List<Writer> writers = new ArrayList<Writer>();
    {
        String[] pathStrings = Configs.paths.getArray(conf);
        for (String p : pathStrings) {
            Properties pathConf = Util.subProperties(conf, "path." + p);

            String name = Configs.name.get(pathConf);
            if (name == null) {
                LOG.info("Path has no name.  Using {}", p);
                name = p;
            }
            LOG.info("[{}] Configuring path {}", name, name);

            // Check the properties for this specific instance
            Integer maxLineLength = Configs.tcpMaxLineLength.getInteger(pathConf);
            if (maxLineLength == null) {
                maxLineLength = Configs.defaultTcpMaxLineLength.getInteger(conf);
            }
            LOG.info("[{}] Maximum line length is {}", name, maxLineLength);

            InetAddress bindAddress = null;
            try {
                String address = Configs.bindAddress.get(pathConf);
                bindAddress = InetAddress.getByName(address);
            } catch (UnknownHostException e) {
                LOG.error("[{}] Error getting bindAddress from string {}",
                        new Object[] { name, pathConf.getProperty("bindAddress") }, e);
            }

            Integer port = Configs.port.getInteger(pathConf);
            if (port == null) {
                LOG.error("[{}] Port not set.  Skipping this path.", name);
                continue;
            }

            int queueLength = Configs.queueCapacity.getInteger(pathConf);

            // Set up the actual processing chain
            IoAcceptor acceptor = new NioSocketAcceptor();
            SocketSessionConfig sessionConfig = (SocketSessionConfig) acceptor.getSessionConfig();
            sessionConfig.setReuseAddress(true);
            acceptors.add(acceptor);

            String charsetName = Configs.charset.getString(pathConf);
            Charset charset = null;
            try {
                charset = Charset.forName(charsetName);
            } catch (UnsupportedCharsetException e) {
                LOG.error("[{}] Charset '{}' is not supported.  Defaulting to UTF-8.", name, charsetName);
                charset = Charset.forName("UTF-8");
            }
            LOG.info("[{}] Using character set {}", name, charset.displayName());
            TextLineCodecFactory textLineCodecFactory = new TextLineCodecFactory(charset, LineDelimiter.UNIX,
                    LineDelimiter.AUTO);
            textLineCodecFactory.setDecoderMaxLineLength(maxLineLength);
            acceptor.getFilterChain().addLast("textLineCodec", new ProtocolCodecFilter(textLineCodecFactory));

            int numBuckets = Configs.outputBuckets.getInteger(pathConf);
            if (numBuckets > 1) {
                // Set up mulitple writers for one MultiEnqueueHandler
                @SuppressWarnings("unchecked")
                BlockingQueue<String>[] queues = new BlockingQueue[numBuckets];

                for (int i = 0; i < numBuckets; i++) {
                    BlockingQueue<String> queue = new ArrayBlockingQueue<String>(queueLength);
                    queues[i] = queue;

                    // Set up the processor on the other end.
                    Writer writer = new Writer();
                    writer.setName(name);
                    writer.setConfig(pathConf);
                    writer.setHadoopConf(hConf);
                    writer.setQueue(queue);
                    writer.init();

                    // Set up MBean for the Writer
                    {
                        ObjectName mbeanName = null;
                        try {
                            mbeanName = new ObjectName(Writer.class.getPackage().getName() + ":type="
                                    + Writer.class.getSimpleName() + " [" + i + "]" + ",name=" + name);
                        } catch (MalformedObjectNameException e) {
                            LOG.error("[{}] Error creating MBean name.", name, e);
                        } catch (NullPointerException e) {
                            LOG.error("[{}] Error creating MBean name.", name, e);
                        }
                        try {
                            mbs.registerMBean(writer, mbeanName);
                        } catch (InstanceAlreadyExistsException e) {
                            LOG.error("[{}] Error registering MBean name.", name, e);
                        } catch (MBeanRegistrationException e) {
                            LOG.error("[{}] Error registering MBean name.", name, e);
                        } catch (NotCompliantMBeanException e) {
                            LOG.error("[{}] Error registering MBean name.", name, e);
                        }
                    }

                    executor.scheduleWithFixedDelay(writer, 0, 100, TimeUnit.MILLISECONDS);
                    writers.add(writer);
                }

                MultiEnqueueHandler handler = new MultiEnqueueHandler(queues);
                acceptor.setHandler(handler);

                // Set up MBean for the MultiEnqueueHandler
                {
                    ObjectName mbeanName = null;
                    try {
                        mbeanName = new ObjectName(MultiEnqueueHandler.class.getPackage().getName() + ":type="
                                + MultiEnqueueHandler.class.getSimpleName() + ",name=" + name);
                    } catch (MalformedObjectNameException e) {
                        LOG.error("[{}] Error creating MBean name.", name, e);
                    } catch (NullPointerException e) {
                        LOG.error("[{}] Error creating MBean name.", name, e);
                    }
                    try {
                        mbs.registerMBean(handler, mbeanName);
                    } catch (InstanceAlreadyExistsException e) {
                        LOG.error("[{}] Error registering MBean name.", name, e);
                    } catch (MBeanRegistrationException e) {
                        LOG.error("[{}] Error registering MBean name.", name, e);
                    } catch (NotCompliantMBeanException e) {
                        LOG.error("[{}] Error registering MBean name.", name, e);
                    }
                }
            } else {
                BlockingQueue<String> queue = new ArrayBlockingQueue<String>(queueLength);

                // Set up the processor on the other end.
                Writer writer = new Writer();
                writer.setName(name);
                writer.setConfig(pathConf);
                writer.setHadoopConf(hConf);
                writer.setQueue(queue);
                writer.init();

                // Set up MBean for the Writer
                {
                    ObjectName mbeanName = null;
                    try {
                        mbeanName = new ObjectName(Writer.class.getPackage().getName() + ":type="
                                + Writer.class.getSimpleName() + ",name=" + name);
                    } catch (MalformedObjectNameException e) {
                        LOG.error("[{}] Error creating MBean name.", name, e);
                    } catch (NullPointerException e) {
                        LOG.error("[{}] Error creating MBean name.", name, e);
                    }
                    try {
                        mbs.registerMBean(writer, mbeanName);
                    } catch (InstanceAlreadyExistsException e) {
                        LOG.error("[{}] Error registering MBean name.", name, e);
                    } catch (MBeanRegistrationException e) {
                        LOG.error("[{}] Error registering MBean name.", name, e);
                    } catch (NotCompliantMBeanException e) {
                        LOG.error("[{}] Error registering MBean name.", name, e);
                    }
                }

                executor.scheduleWithFixedDelay(writer, 0, 100, TimeUnit.MILLISECONDS);
                writers.add(writer);

                EnqueueHandler handler = new EnqueueHandler(queue);
                acceptor.setHandler(handler);

                // Set up MBean for the EnqueueHandler
                {
                    ObjectName mbeanName = null;
                    try {
                        mbeanName = new ObjectName(EnqueueHandler.class.getPackage().getName() + ":type="
                                + EnqueueHandler.class.getSimpleName() + ",name=" + name);
                    } catch (MalformedObjectNameException e) {
                        LOG.error("[{}] Error creating MBean name.", name, e);
                    } catch (NullPointerException e) {
                        LOG.error("[{}] Error creating MBean name.", name, e);
                    }
                    try {
                        mbs.registerMBean(handler, mbeanName);
                    } catch (InstanceAlreadyExistsException e) {
                        LOG.error("[{}] Error registering MBean name.", name, e);
                    } catch (MBeanRegistrationException e) {
                        LOG.error("[{}] Error registering MBean name.", name, e);
                    } catch (NotCompliantMBeanException e) {
                        LOG.error("[{}] Error registering MBean name.", name, e);
                    }
                }
            }

            acceptor.getSessionConfig().setReadBufferSize(Configs.tcpReadBufferSize.getInteger(pathConf));
            acceptor.getSessionConfig().setIdleTime(IdleStatus.BOTH_IDLE, 5);

            while (true) {
                try {
                    acceptor.bind(new InetSocketAddress(bindAddress, port));
                } catch (IOException e) {
                    LOG.error("Error binding to {}:{}.  Retrying...", bindAddress, port);

                    try {
                        Thread.sleep(2000);
                    } catch (InterruptedException e1) {
                        // nothing
                    }

                    continue;
                }

                break;
            }

        }
    }

    // Register a shutdown hook..
    Runtime.getRuntime().addShutdownHook(new Thread() {
        public void run() {
            LOG.info("Shutting down");

            LOG.info("Unbinding and disposing of all IoAcceptors");
            for (IoAcceptor acceptor : acceptors) {
                acceptor.unbind();
                acceptor.dispose(true);
            }

            LOG.info("Shutting down worker threadpools.  This could take a little while.");
            executor.shutdown();
            try {
                executor.awaitTermination(10, TimeUnit.MINUTES);
            } catch (InterruptedException e) {
                LOG.error("Interrupted waiting for writer threadpool termination.", e);
            }
            if (!executor.isTerminated()) {
                LOG.error("Threadpool did not terminate cleanly.");
            }

            LOG.info("Cleaning out any remaining messages from the queues.");
            List<Thread> threads = new ArrayList<Thread>();
            for (final Writer writer : writers) {
                Runnable r = new Runnable() {
                    @Override
                    public void run() {
                        try {
                            writer.runAndClose();
                        } catch (Throwable t) {
                            LOG.error("Error shutting down writer [{}]", writer.getName(), t);
                        }
                    }
                };
                Thread t = new Thread(r);
                t.setDaemon(false);
                t.start();
                threads.add(t);
            }

            for (Thread t : threads) {
                try {
                    t.join();
                } catch (InterruptedException e) {
                    LOG.error("Interrupted waiting for thread to finish.");
                }
            }

            LOG.info("Closing filesystems.");
            try {
                FileSystem.closeAll();
            } catch (Throwable t) {
                LOG.error("Error closing filesystems.", t);
            }

            LOG.info("Finished shutting down cleanly.");
        }
    });
}

From source file:com.savy3.util.DBConfiguration.java

License:Apache License

/**
 * Sets the DB access related fields in the {@link Configuration}.
 * @param conf the configuration//from  ww  w. jav a 2 s.c o  m
 * @param driverClass JDBC Driver class name
 * @param dbUrl JDBC DB access URL
 * @param userName DB access username
 * @param passwd DB access passwd
 * @param fetchSize DB fetch size
 * @param connectionParams JDBC connection parameters
 */
public static void configureDB(Configuration conf, String driverClass, String dbUrl, String userName,
        String passwd, Integer fetchSize, Properties connectionParams) {

    conf.set(DRIVER_CLASS_PROPERTY, driverClass);
    conf.set(URL_PROPERTY, dbUrl);
    if (userName != null) {
        conf.set(USERNAME_PROPERTY, userName);
    }
    if (passwd != null) {
        setPassword((JobConf) conf, passwd);
    }
    if (fetchSize != null) {
        conf.setInt(FETCH_SIZE, fetchSize);
    }
    if (connectionParams != null) {
        conf.set(CONNECTION_PARAMS_PROPERTY, propertiesToString(connectionParams));
    }

}

From source file:com.scaleoutsoftware.soss.hserver.GridOutputFormat.java

License:Apache License

/**
 * Sets the {@link NamedMap} to direct output to.
 *
 * @param job job to modify//from   w  w  w . j  a  va  2  s. c  o  m
 * @param map named map to be used for output
 */
public static void setNamedMap(Job job, NamedMap map) {
    Configuration configuration = job.getConfiguration();
    configuration.setBoolean(outputIsNamedMapProperty, true);
    configuration.setStrings(outputNamedMapProperty, map.getMapName());
    CustomSerializer keySerializer = map.getKeySerializer();
    CustomSerializer valueSerializer = map.getValueSerializer();
    SerializationMode serializationMode = map.getSerializationMode();
    AvailabilityMode availabilityMode = map.getAvailabilityMode();
    configuration.setInt(SERIALIZATION_MODE, serializationMode.ordinal());
    configuration.setInt(AVAILABILITY_MODE, availabilityMode.ordinal());
    configuration.setClass(outputNamedMapKeySerializerProperty, keySerializer.getClass(), Object.class);
    configuration.setClass(outputNamedMapValueSerializerProperty, valueSerializer.getClass(), Object.class);
    if (keySerializer.getObjectClass() != null) {
        configuration.setClass(outputNamedMapKeyProperty, keySerializer.getObjectClass(), Object.class);
    }
    if (valueSerializer.getObjectClass() != null) {
        configuration.setClass(outputNamedMapValueProperty, valueSerializer.getObjectClass(), Object.class);
    }
}

From source file:com.scaleoutsoftware.soss.hserver.NamedMapInputFormat.java

License:Apache License

/**
 * Sets {@link com.scaleoutsoftware.soss.client.map.NamedMap} as an input source for the job.
 *
 * @param job job to modify//  ww w  . j a v a  2  s . c om
 * @param map name of the map to be used as a job input
 * @param <K> the type of the key
 * @param <V> the type of the value
 */
public static <K, V> void setNamedMap(Job job, NamedMap<K, V> map) {
    Configuration configuration = job.getConfiguration();
    configuration.setInt(inputAppIdProperty, map.getMapId());
    CustomSerializer<K> keySerializer = map.getKeySerializer();
    CustomSerializer<V> valueSerializer = map.getValueSerializer();
    SerializationMode serializationMode = map.getSerializationMode();
    AvailabilityMode availabilityMode = map.getAvailabilityMode();
    configuration.setInt(SERIALIZATION_MODE, serializationMode.ordinal());
    configuration.setInt(AVAILABILITY_MODE, availabilityMode.ordinal());
    configuration.setClass(inputNamedMapKeySerializerProperty, keySerializer.getClass(), Object.class);
    configuration.setClass(inputNamedMapValueSerializerProperty, valueSerializer.getClass(), Object.class);
    if (keySerializer.getObjectClass() != null) {
        configuration.setClass(inputNamedMapKeyProperty, keySerializer.getObjectClass(), Object.class);
    }
    if (valueSerializer.getObjectClass() != null) {
        configuration.setClass(inputNamedMapValueProperty, valueSerializer.getObjectClass(), Object.class);
    }

}

From source file:com.scaleoutsoftware.soss.hserver.Test_MapToMapCopy.java

License:Apache License

public static void main(String argv[]) throws Exception {
    final NamedMap<IntWritable, Text> inputMap = NamedMapFactory.getMap("map-i",
            new WritableSerializer(IntWritable.class), new WritableSerializer(Text.class));
    final NamedMap<IntWritable, Text> outputMap = NamedMapFactory.getMap("map-o",
            new WritableSerializer(IntWritable.class), new WritableSerializer(Text.class));
    inputMap.clear();/*from www  .  j ava 2s . com*/
    outputMap.clear();
    Thread.sleep(10000);
    BulkLoader<IntWritable, Text> put = inputMap.getBulkLoader();
    String content = "xcccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx";
    Text contentW = new Text(content);

    IntWritable count = new IntWritable();
    for (int i = 0; i < 1000; i++) {
        count.set(i);
        put.put(count, contentW);
    }
    put.close();

    InvocationGrid grid = HServerJob.getInvocationGridBuilder("MyGrid" + System.currentTimeMillis())
            .addClass(Test_MapToMapCopy.class).load();

    HServerJob job;
    Configuration configuration;

    for (int i = 0; i < 100; i++) {
        // MMF
        configuration = new Configuration();
        configuration.setInt("mapred.hserver.setting.reducer.usememorymappedfiles", 1);
        configuration.setInt("mapred.hserver.setting.namedmap.usememorymappedfiles", 1);
        configuration.setInt("mapred.hserver.setting.map.maxtempmemorykb", 100000);
        job = new HServerJob(configuration, "Sample job");
        job.setGrid(grid);
        job.setMapOutputKeyClass(IntWritable.class);
        job.setMapOutputValueClass(Text.class);
        job.setOutputKeyClass(IntWritable.class);
        job.setOutputValueClass(Text.class);
        job.setInputFormatClass(NamedMapInputFormat.class);
        job.setOutputFormatClass(GridOutputFormat.class);
        NamedMapInputFormat.setNamedMap(job, inputMap);
        NamedMapInputFormat.setSuggestedNumberOfSplits(job, 64);
        GridOutputFormat.setNamedMap(job, outputMap);
        job.waitForCompletion(false);
        assertEquals(inputMap.size(), outputMap.size());
        outputMap.clear();
    }
    grid.unload();
}

From source file:com.scaleoutsoftware.soss.hserver.Test_NoReducer.java

License:Apache License

public static void main(String[] args) throws Exception {

    writeFile();/*from  w w w. ja v  a  2  s  . co m*/
    DataAccessor.clearAllObjects();

    Configuration conf = new Configuration();
    conf.setInt("mapred.hserver.setting.reducer.usememorymappedfiles", 0);

    String in = args.length == 2 ? args[0] : "random.txt";
    String out = args.length == 2 ? args[1]
            : "c:\\development\\mapred_output\\dir" + System.currentTimeMillis();

    HServerJob job;

    job = new HServerJob(conf);
    job.setJarPath("/path/to/your/classes.jar");
    long time = System.currentTimeMillis();
    job.setJarByClass(Test_WordCount.class);
    job.setMapperClass(TokenizerMapper.class);
    job.setNumReduceTasks(0);
    job.setMapOutputKeyClass(Text.class);
    job.setOutputKeyClass(Text.class);
    job.setOutputValueClass(IntWritable.class);
    FileInputFormat.addInputPath(job, new Path(in));
    FileOutputFormat.setOutputPath(job, new Path(out + System.currentTimeMillis()));
    job.waitForCompletion(true);
    System.out.println("Job done in: " + (System.currentTimeMillis() - time));
}

From source file:com.scaleoutsoftware.soss.hserver.Test_WordCount.java

License:Apache License

public static void main(String[] args) throws Exception {
    writeFile();// w w w  . j a  v  a  2  s.  co m
    DataAccessor.clearAllObjects();

    Configuration conf = new Configuration();
    conf.setInt("mapred.hserver.setting.reducer.usememorymappedfiles", 0);

    String in = args.length == 2 ? args[0] : "random.txt";
    String out = args.length == 2 ? args[1]
            : "c:\\development\\mapred_output\\dir" + System.currentTimeMillis();

    HServerJob job;
    job = new HServerJob(conf, "overrides", true);

    Job job1 = job;
    // check overrides
    System.out.println("Check to ensure casting is correct..." + job.isSuccessful() + job1.isSuccessful());

    // With phase1, run several times to test recording and replaying
    long time = System.currentTimeMillis();
    // check runtime
    for (int i = 0; i < 3; i++) {
        job = new HServerJob(conf, "Job #" + i, true);
        // Need to manually edit this per deployment
        job.setJarPath("/path/to/your/classes.jar");
        job.setJarByClass(Test_WordCount.class);
        job.setMapperClass(TokenizerMapper.class);
        job.setCombinerClass(IntSumReducer.class);
        job.setReducerClass(IntSumReducer.class);
        job.setMapOutputKeyClass(Text.class);
        job.setOutputKeyClass(Text.class);
        job.setOutputValueClass(IntWritable.class);
        job.setNumReduceTasks(8);
        FileInputFormat.addInputPath(job, new Path(in));
        FileOutputFormat.setOutputPath(job, new Path(out + System.currentTimeMillis()));
        job.waitForCompletion(true);
    }
    System.out.println("Job done in " + (System.currentTimeMillis() - time) / 10);

    //Without combiner
    job = new HServerJob(conf);
    job.setJarPath("/path/to/your/classes.jar");
    time = System.currentTimeMillis();
    job.setJarByClass(Test_WordCount.class);
    job.setMapperClass(TokenizerMapper.class);
    job.setReducerClass(IntSumReducer.class);
    job.setMapOutputKeyClass(Text.class);
    job.setOutputKeyClass(Text.class);
    job.setOutputValueClass(IntWritable.class);
    job.setNumReduceTasks(8);
    FileInputFormat.addInputPath(job, new Path(in));
    FileOutputFormat.setOutputPath(job, new Path(out + System.currentTimeMillis()));
    job.waitForCompletion(true);
    System.out.println("Job done in " + (System.currentTimeMillis() - time));
}

From source file:com.skp.experiment.cf.als.hadoop.DistributedParallelALSFactorizationJob.java

License:Apache License

private void runSolver(Path ratings, Path output, Path pathToUorI, Path pathToTranspose, int rowNums)
        throws ClassNotFoundException, IOException, InterruptedException {

    @SuppressWarnings("rawtypes")
    Class<? extends Mapper> solverMapper = null;
    if (implicitFeedback) {
        solverMapper = SolveImplicitFeedbackMapper.class;
    } else {/*from  w ww  . j a  va  2s  .c  o m*/
        solverMapper = SolveExplicitFeedbackMapper.class;
    }

    Job solverForUorI = prepareJob(ratings, output, SequenceFileInputFormat.class, solverMapper,
            IntWritable.class, VectorWritable.class, SequenceFileOutputFormat.class);

    Configuration solverConf = solverForUorI.getConfiguration();
    solverConf.set(LAMBDA, String.valueOf(lambda));
    solverConf.set(ALPHA, String.valueOf(alpha));
    solverConf.setInt(NUM_FEATURES, numFeatures);
    solverConf.set(FEATURE_MATRIX, pathToUorI.toString());
    solverConf.set(FEATURE_MATRIX_TRANSPOSE, pathToTranspose.toString());
    solverConf.setInt("rowNums", rowNums);
    solverConf.set("mapred.child.java.opts", SMALL_MATRIX_MEMORY);
    solverConf.setBoolean("mapred.map.tasks.speculative.execution", false);
    solverConf.setInt("mapred.job.reuse.jvm.num.tasks", -1);
    solverConf.setBoolean("mapred.compress.map.output", true);
    solverConf.set("mapred.map.output.compression.codec", LZO_CODEC_CLASS);
    solverForUorI.waitForCompletion(true);
}

From source file:com.skp.experiment.cf.als.hadoop.DistributedParallelALSFactorizationJob.java

License:Apache License

private void runDistributedImplicitSolver(Path ratings, Path output, Path pathToUorI, Path pathToTranspose,
        int rowNums) throws IOException, InterruptedException, ClassNotFoundException {
    @SuppressWarnings("rawtypes")
    Class<? extends Mapper> solverMapper = DistributedSolveImplicitFeedbackMapper.class;
    Job solverForUorI = prepareJob(ratings, output, SequenceFileInputFormat.class, solverMapper,
            IntWritable.class, VectorWritable.class, SequenceFileOutputFormat.class);

    Configuration solverConf = solverForUorI.getConfiguration();

    solverConf.setLong("mapred.min.split.size", dfsBlockSize);
    solverConf.setLong("mapred.max.split.size", dfsBlockSize);
    solverConf.setBoolean("mapred.map.tasks.speculative.execution", false);
    solverConf.setInt("mapred.map.tasks", LARGE_MATRIX_MAP_TASKS_NUM);
    solverConf.setLong("mapred.task.timeout", 600000 * 5);
    solverConf.setInt("mapred.job.reuse.jvm.num.tasks", -1);
    solverConf.set("mapred.child.java.opts", SMALL_MATRIX_MEMORY);

    solverConf.set(LAMBDA, String.valueOf(lambda));
    solverConf.set(ALPHA, String.valueOf(alpha));
    solverConf.setInt(NUM_FEATURES, numFeatures);
    solverConf.set(FEATURE_MATRIX, pathToUorI.toString());
    solverConf.set(FEATURE_MATRIX_TRANSPOSE, pathToTranspose.toString());
    solverConf.setInt("rowNums", rowNums);
    solverConf.setBoolean("mapred.compress.map.output", true);
    solverConf.set("mapred.map.output.compression.codec", LZO_CODEC_CLASS);
    solverForUorI.waitForCompletion(true);
}