com.bah.applefox.main.plugins.imageindex.ImageLoader.java Source code

Java tutorial

Introduction

Here is the source code for com.bah.applefox.main.plugins.imageindex.ImageLoader.java

Source

/**
 * Copyright 2012 Booz Allen Hamilton. See the NOTICE file
 * distributed with this work for additional information
 * regarding copyright ownership.  Booz Allen Hamilton licenses this file
 * to you under the Apache License, Version 2.0 (the
 * "License"); you may not use this file except in compliance
 * with the License.  You may obtain a copy of the License at
 *
 *     http://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
 */
package com.bah.applefox.main.plugins.imageindex;

import java.io.IOException;
import java.net.MalformedURLException;
import java.util.Collections;
import java.util.Set;

import org.apache.accumulo.core.client.AccumuloException;
import org.apache.accumulo.core.client.AccumuloSecurityException;
import org.apache.accumulo.core.client.BatchWriter;
import org.apache.accumulo.core.client.Scanner;
import org.apache.accumulo.core.client.TableExistsException;
import org.apache.accumulo.core.client.TableNotFoundException;
import org.apache.accumulo.core.client.mapreduce.AccumuloInputFormat;
import org.apache.accumulo.core.client.mapreduce.AccumuloOutputFormat;
import org.apache.accumulo.core.client.mapreduce.InputFormatBase;
import org.apache.accumulo.core.data.Key;
import org.apache.accumulo.core.data.Mutation;
import org.apache.accumulo.core.data.Range;
import org.apache.accumulo.core.data.Value;
import org.apache.accumulo.core.security.Authorizations;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.util.Tool;

import com.bah.applefox.main.plugins.imageindex.utilites.ImageHasher;
import com.bah.applefox.main.plugins.utilities.AccumuloUtils;
import com.bah.applefox.main.plugins.webcrawler.utilities.PageCrawlException;
import com.bah.applefox.main.plugins.webcrawler.utilities.WebPageCrawl;

/**
 * This class is used to load image hashes and tags into their respective
 * tables. The image hash is generated by the ImageHasher class in utilities.
 * The format for the hash tag table is RowID: hashtag | Column Family: Image
 * location | Column Qualifier: Parent URL | Value: hashtag. The format for the
 * tag table is RowID: tag | Column Family: Image Location | Column Qualifier:
 * Parent URL | Value: hashtag.
 * 
 * 
 */
public class ImageLoader extends Configured implements Tool {
    // The error log
    private static Log log = LogFactory.getLog(ImageLoader.class);

    private static String checkedImages;
    private static String hashTable;
    private static String tagTable;
    private static String UserAgent;
    private static String divsFile;

    /**
     * MapperClass extends the Mapper class. It performs the map functionality
     * of MapReduce.
     * 
     */
    public static class MapperClass extends Mapper<Key, Value, Key, Value> {

        /**
         * Gets a URL from the URLs table in Accumulo, feeds that data into
         * addToDataBaseTable
         * 
         */
        @Override
        public void map(Key key, Value value, Context context) throws IOException, InterruptedException {

            Value v = new Value("0".getBytes());
            try {
                // Ensure the page has not yet been checked for images
                Scanner scan = AccumuloUtils.connectRead(checkedImages);
                scan.setRange(new Range(key.getRow()));
                if (!scan.iterator().hasNext()) {
                    context.write(new Key(key.getRow()), v);
                }
            } catch (AccumuloException e) {
                if (e.getMessage() != null) {
                    log.error(e.getMessage());
                } else {
                    log.error(e.getStackTrace());
                }
            } catch (AccumuloSecurityException e) {
                if (e.getMessage() != null) {
                    log.error(e.getMessage());
                } else {
                    log.error(e.getStackTrace());
                }
            } catch (TableNotFoundException e) {
                if (e.getMessage() != null) {
                    log.error(e.getMessage());
                } else {
                    log.error(e.getStackTrace());
                }
            }

        }
    }

    /**
     * ReducerClass extends Reducer and would perform the Reduce functionality
     * of MapReduce, but in this case it is only a place holder.
     * 
     */
    public static class ReducerClass extends Reducer<Key, Value, Key, Value> {
        public void reduce(Key key, Iterable<Value> values, Context context)
                throws IOException, InterruptedException {
            //TODO use actual accumulo input and output formats.
            try {

                BatchWriter w = AccumuloUtils.connectBatchWrite(checkedImages);
                // Check the page for images and add them to the tables with
                // this method
                addImageHashes(key.getRow().toString(), UserAgent);

                // Write to the table that the page has been checked
                Mutation m = new Mutation(key.getRow().toString());
                m.put("0", "0", new Value("0".getBytes()));
                w.addMutation(m);
                w.close();

            } catch (AccumuloException e) {
                if (e.getMessage() != null) {
                    log.error(e.getMessage());
                } else {
                    log.error(e.getStackTrace());
                }
            } catch (AccumuloSecurityException e) {
                if (e.getMessage() != null) {
                    log.error(e.getMessage());
                } else {
                    log.error(e.getStackTrace());
                }
            } catch (TableNotFoundException e) {
                if (e.getMessage() != null) {
                    log.error(e.getMessage());
                } else {
                    log.error(e.getStackTrace());
                }
            } catch (TableExistsException e) {
                if (e.getMessage() != null) {
                    log.error(e.getMessage());
                } else {
                    log.error(e.getStackTrace());
                }
            } catch (PageCrawlException e) {
                log.info("Error crawling page", e);
            }
        }
    }

    public static void addImageHashes(String url, String UserAgent)
            throws MalformedURLException, IOException, AccumuloException, AccumuloSecurityException,
            TableNotFoundException, TableExistsException, PageCrawlException {

        // Use web page parser to find images
        System.out.println("Checking URL: " + url + " for images");
        WebPageCrawl iExtract = new WebPageCrawl(url, UserAgent, Collections.<String>emptySet());

        // Connect to the hash table
        BatchWriter hashTableWriter = AccumuloUtils.connectBatchWrite(hashTable);

        // Connect to the tag table
        BatchWriter tagTableWriter = AccumuloUtils.connectBatchWrite(tagTable);
        Value v = new Value("0".getBytes());

        // Loop through all of the images found
        for (String image : iExtract.getChildImages()) {
            try {
                // Up to the first space is the location of the image
                int firstSpace = image.indexOf(" ");
                System.out.println(image);

                String imURL = image.substring(0, firstSpace);

                // Set the alternative text
                String altText = image.toLowerCase().substring(firstSpace);
                // Set the hash
                String hash = ImageHasher.hash(imURL);

                v = new Value(hash.getBytes());

                Mutation m = new Mutation(hash);
                m.put(imURL, url, v);

                // Write to the hash table
                hashTableWriter.addMutation(m);

                // Split on the spaces in altText and write them to the table
                for (String oneTag : altText.split(" ")) {
                    if (oneTag.length() > 2) {
                        m = new Mutation(oneTag);
                        m.put(imURL, url, v);
                        tagTableWriter.addMutation(m);
                    }
                }

            } catch (Exception e) {
                log.equals("Error adding " + image);
            }
        }

        // Flush and close all of the batch writers
        hashTableWriter.flush();
        tagTableWriter.flush();
        hashTableWriter.close();
        tagTableWriter.close();
    }

    /**
     * run takes the comandline args as arguments (in this case from a
     * configuration file), creates a new job, configures it, initiates it,
     * waits for completion, and returns 0 if it is successful (1 if it is not)
     * 
     * @param args
     *            the commandline arguments (in this case from a configuration
     *            file)
     * 
     * @return 0 if the job ran successfully and 1 if it isn't
     */
    public int run(String[] args) throws Exception {

        checkedImages = args[18];
        hashTable = args[17];
        tagTable = args[19];
        divsFile = args[20];
        UserAgent = args[6];

        // Create the table
        AccumuloUtils.setSplitSize(args[23]);
        AccumuloUtils.connectBatchWrite(checkedImages).close();

        // Give the job a name
        String jobName = this.getClass().getSimpleName() + "_" + System.currentTimeMillis();

        // Create the job and set its jar
        Job job = new Job(getConf(), jobName);
        job.setJarByClass(this.getClass());

        // Set the url table to read from
        String urlTable = args[5];

        job.setInputFormatClass(AccumuloInputFormat.class);
        InputFormatBase.setZooKeeperInstance(job.getConfiguration(), args[0], args[1]);
        InputFormatBase.setInputInfo(job.getConfiguration(), args[2], args[3].getBytes(), urlTable,
                new Authorizations());

        job.setMapperClass(MapperClass.class);
        job.setMapOutputKeyClass(Key.class);
        job.setMapOutputValueClass(Value.class);

        job.setNumReduceTasks(Integer.parseInt(args[4]));

        job.setReducerClass(ReducerClass.class);

        job.setOutputFormatClass(AccumuloOutputFormat.class);
        job.setOutputKeyClass(Key.class);
        job.setOutputValueClass(Value.class);
        AccumuloOutputFormat.setZooKeeperInstance(job.getConfiguration(), args[0], args[1]);
        AccumuloOutputFormat.setOutputInfo(job.getConfiguration(), args[2], args[3].getBytes(), true, urlTable);

        AccumuloUtils.setSplitSize(args[22]);

        job.waitForCompletion(true);

        return job.isSuccessful() ? 0 : 1;
    }

}