Example usage for com.amazonaws AmazonServiceException getErrorType

List of usage examples for com.amazonaws AmazonServiceException getErrorType

Introduction

In this page you can find the example usage for com.amazonaws AmazonServiceException getErrorType.

Prototype

public ErrorType getErrorType() 

Source Link

Document

Indicates who is responsible for this exception (caller, service, or unknown).

Usage

From source file:org.apache.flink.streaming.connectors.kinesis.proxy.KinesisProxy.java

License:Apache License

/**
 * Determines whether the exception is recoverable using exponential-backoff.
 * //from ww  w .  j a  v a2  s.  c  o m
 * @param ex Exception to inspect
 * @return <code>true</code> if the exception can be recovered from, else
 *         <code>false</code>
 */
protected static boolean isRecoverableException(AmazonServiceException ex) {
    if (ex.getErrorType() == null) {
        return false;
    }

    switch (ex.getErrorType()) {
    case Client:
        return ex instanceof ProvisionedThroughputExceededException;
    case Service:
    case Unknown:
        return true;
    default:
        return false;
    }
}

From source file:org.apache.hadoop.fs.s3a.S3AFileSystem.java

License:Apache License

private void printAmazonServiceException(AmazonServiceException ase) {
    LOG.info("Caught an AmazonServiceException, which means your request made it "
            + "to Amazon S3, but was rejected with an error response for some reason.");
    LOG.info("Error Message: " + ase.getMessage());
    LOG.info("HTTP Status Code: " + ase.getStatusCode());
    LOG.info("AWS Error Code: " + ase.getErrorCode());
    LOG.info("Error Type: " + ase.getErrorType());
    LOG.info("Request ID: " + ase.getRequestId());
    LOG.info("Class Name: " + ase.getClass().getName());
}

From source file:org.apache.hadoop.fs.s3a.S3AUtils.java

License:Apache License

/**
 * Get low level details of an amazon exception for logging; multi-line.
 * @param e exception/*from  www . ja  va  2 s  .  c  o m*/
 * @return string details
 */
public static String stringify(AmazonServiceException e) {
    StringBuilder builder = new StringBuilder(String.format("%s: %s error %d: %s; %s%s%n", e.getErrorType(),
            e.getServiceName(), e.getStatusCode(), e.getErrorCode(), e.getErrorMessage(),
            (e.isRetryable() ? " (retryable)" : "")));
    String rawResponseContent = e.getRawResponseContent();
    if (rawResponseContent != null) {
        builder.append(rawResponseContent);
    }
    return builder.toString();
}

From source file:org.apache.nifi.processors.aws.dynamodb.AbstractDynamoDBProcessor.java

License:Apache License

protected List<FlowFile> processServiceException(final ProcessSession session, List<FlowFile> flowFiles,
        AmazonServiceException exception) {
    List<FlowFile> failedFlowFiles = new ArrayList<>();
    for (FlowFile flowFile : flowFiles) {
        Map<String, String> attributes = new HashMap<>();
        attributes.put(DYNAMODB_ERROR_EXCEPTION_MESSAGE, exception.getMessage());
        attributes.put(DYNAMODB_ERROR_CODE, exception.getErrorCode());
        attributes.put(DYNAMODB_ERROR_MESSAGE, exception.getErrorMessage());
        attributes.put(DYNAMODB_ERROR_TYPE, exception.getErrorType().name());
        attributes.put(DYNAMODB_ERROR_SERVICE, exception.getServiceName());
        attributes.put(DYNAMODB_ERROR_RETRYABLE, Boolean.toString(exception.isRetryable()));
        attributes.put(DYNAMODB_ERROR_REQUEST_ID, exception.getRequestId());
        attributes.put(DYNAMODB_ERROR_STATUS_CODE, Integer.toString(exception.getStatusCode()));
        attributes.put(DYNAMODB_ERROR_EXCEPTION_MESSAGE, exception.getMessage());
        attributes.put(DYNAMODB_ERROR_RETRYABLE, Boolean.toString(exception.isRetryable()));
        flowFile = session.putAllAttributes(flowFile, attributes);
        failedFlowFiles.add(flowFile);/* ww  w . j  a  va 2 s.c  om*/
    }
    return failedFlowFiles;
}

From source file:org.apache.nifi.processors.aws.lambda.PutLambda.java

License:Apache License

/**
 * Populate exception attributes in the flow file
 * @param session process session//from  w w  w . ja v  a2  s.co m
 * @param flowFile the flow file
 * @param exception exception thrown during invocation
 * @return FlowFile the updated flow file
 */
private FlowFile populateExceptionAttributes(final ProcessSession session, FlowFile flowFile,
        final AmazonServiceException exception) {
    Map<String, String> attributes = new HashMap<>();
    attributes.put(AWS_LAMBDA_EXCEPTION_MESSAGE, exception.getErrorMessage());
    attributes.put(AWS_LAMBDA_EXCEPTION_ERROR_CODE, exception.getErrorCode());
    attributes.put(AWS_LAMBDA_EXCEPTION_REQUEST_ID, exception.getRequestId());
    attributes.put(AWS_LAMBDA_EXCEPTION_STATUS_CODE, Integer.toString(exception.getStatusCode()));
    if (exception.getCause() != null)
        attributes.put(AWS_LAMBDA_EXCEPTION_CAUSE, exception.getCause().getMessage());
    attributes.put(AWS_LAMBDA_EXCEPTION_ERROR_TYPE, exception.getErrorType().toString());
    attributes.put(AWS_LAMBDA_EXCEPTION_MESSAGE, exception.getErrorMessage());
    flowFile = session.putAllAttributes(flowFile, attributes);
    return flowFile;
}

From source file:org.apache.s4.serializer.dynamodb.EventCountAndReportPE.java

License:Apache License

public void onEvent(TopicEvent event) {
    if (firstEvent) {
        logger.info("Handling new Event [{}]", getId());
        firstEvent = false;//w w w .j  ava2 s  .c o m
        firstInsert = true;
    }
    count += event.getCount();
    //        countUsedEvents++; // SB
    //        logger.info("Used Data Events counter [{}]", countUsedEvents); // SB

    if (false) { // BEGINNING OF THE BLOCK!!!!!!!!!!!

        if (firstInsert) {

            firstInsert = false;

            try {

                // Data fusion config file:
                try {
                    //              File fusionPropsFile = new File(System.getProperty("user.home") + "/DataFusion.properties");
                    File fusionPropsFile = new File("/home/ec2-user/DataFusion.properties");
                    if (!fusionPropsFile.exists()) {

                        fusionPropsFile = new File(System.getProperty("user.home") + "/DataFusion.properties");
                        if (!fusionPropsFile.exists()) {
                            logger.error(
                                    "Cannot find Data fusion properties file in this location :[{}]. Make sure it is available at this place and includes AWS credentials (accessKey, secretKey)",
                                    fusionPropsFile.getAbsolutePath());
                        }
                    }
                    fusionProperties.load(new FileInputStream(fusionPropsFile));
                    accuracy = Double.parseDouble(fusionProperties.getProperty("accuracy"));
                    confidence = Double.parseDouble(fusionProperties.getProperty("confidence"));

                } catch (Exception e) {
                    logger.error("Cannot find Data fusion config file", e);
                }

                // Create and configure DynamoDB client
                AWSCredentials credentials = new BasicAWSCredentials(awsProperties.getProperty("accessKey"),
                        awsProperties.getProperty("secretKey"));

                AmazonDynamoDBClient dynamoDBClient = new AmazonDynamoDBClient(credentials);
                logger.info("Create DynamoDB client");
                dynamoDBClient.setEndpoint("dynamodb.eu-west-1.amazonaws.com");
                logger.info("DynamoDB client credentials are accepted and endpoint selected");

                //                try {

                // Extracted context, e.g query, activity
                String searchQueryAPI = "Test KnowledgeDiscovery API Query";
                String object = "Object detected";

                Map<String, AttributeValue> itemRT = new HashMap<String, AttributeValue>();
                Map<String, AttributeValue> itemDQ = new HashMap<String, AttributeValue>();

                Iterable<String> dataSplit = Splitter.on(' ').omitEmptyStrings().trimResults().split(getId());
                // List<String> dataList = Lists.newArrayList(Elements.getElements(dataSplit));
                // String receivedMsgs = dataList.get(dataList.size()-1);
                // countReceivedMsgs = Integer.parseInt(receivedMsgs);;

                int i = 0;
                for (String token : dataSplit) {
                    i++;
                    receivedMsgs = token;
                }
                int k = 0;
                for (String token : dataSplit) {
                    k++;
                    if (k == (i - 2)) {
                        receivedAppID = token;
                    } else if (k == (i - 1)) {
                        receivedUserID = token;
                    }
                }

                appID = Double.parseDouble(receivedAppID);
                userID = Double.parseDouble(receivedUserID);

                // STUPID HARDCODE but fast for prototype, should change to class later :)
                if (appID == 0 && userID > 0) {
                    // CV app and serialization table
                    rtEventsTableName = "TableEventVector_CV";
                    tableDataQuality = "EventVectorQuality_CV";
                    db_orig = db_base_dir + "/cv.db";
                    countReceivedMsgs_CV = Integer.parseInt(receivedMsgs) - countReceivedMsgsPrev_CV;
                    countReceivedMsgsPrev_CV = Integer.parseInt(receivedMsgs);
                    countUsedMsgs_CV++;
                    countReceivedMsgs = countReceivedMsgs_CV;
                    countUsedMsgs = countUsedMsgs_CV;
                } else if (appID == 1 && userID > 0) {
                    // NLP
                    rtEventsTableName = "TableEventVector_NLP";
                    tableDataQuality = "EventVectorSetQuality_NLP";
                    db_orig = db_base_dir + "/nlp.db";
                    countReceivedMsgs_NLP = Integer.parseInt(receivedMsgs) - countReceivedMsgsPrev_NLP;
                    countReceivedMsgsPrev_NLP = Integer.parseInt(receivedMsgs);
                    countUsedMsgs_NLP++;
                    countReceivedMsgs = countReceivedMsgs_NLP;
                    countUsedMsgs = countUsedMsgs_NLP;
                } else if (appID == 2 && userID > 0) {
                    // Audio
                    rtEventsTableName = "TableEventVector_Audio";
                    tableDataQuality = "EventVectorQuality_Audio";
                    db_orig = db_base_dir + "/audio.db";
                    countReceivedMsgs_Audio = Integer.parseInt(receivedMsgs) - countReceivedMsgsPrev_Audio;
                    countReceivedMsgsPrev_Audio = Integer.parseInt(receivedMsgs);
                    countUsedMsgs_Audio++;
                    countReceivedMsgs = countReceivedMsgs_Audio;
                    countUsedMsgs = countUsedMsgs_Audio;
                } else {
                    // all others Events available in DB
                    rtEventsTableName = "TableEventVector";
                    tableDataQuality = "EventVectorQuality";
                    countReceivedMsgs = Integer.parseInt(receivedMsgs) - countReceivedMsgsPrev;
                    countReceivedMsgsPrev = Integer.parseInt(receivedMsgs);
                    countUsedMsgs++;
                }

                try {
                    // Users database connection
                    db_conn = DriverManager.getConnection("jdbc:sqlite:" + db_orig);

                    //Actual invocation of Users DB without "rating" field
                    db_stmt = db_conn.prepareStatement(
                            "SELECT id, title, country, name, surname FROM user WHERE appID = ? AND userID = ?");
                    db_stmt.setDouble(1, userID);
                    db_stmt.setDouble(2, appID);
                    rs = db_stmt.executeQuery();

                    // Index updates/inserts
                    String ID = rs.getString(1);
                    String location = rs.getString(2);
                    String country = rs.getString(3);
                    String name = rs.getString(4);
                    String surname = rs.getString(5);

                    // resultSet adjustment according to the Accuracy and Confidence levels (1 / number of results and multiplied by 100%)
                    accuracyRT = (1 / rs.getFetchSize()) * 100;
                    confidence = sqrt(accuracyRT * accuracyRT + accuracy * accuracy);

                    // Collect to DynamoDB items (CandidateSet and CandidateSetQuality)

                    itemRT.put("id", new AttributeValue().withS(placesID));
                    itemRT.put("country", new AttributeValue().withS(country));
                    itemRT.put("name", new AttributeValue().withS(String.valueOf(lat)));
                    itemRT.put("surname", new AttributeValue().withS(String.valueOf(lon)));
                    itemRT.put("query", new AttributeValue().withS(searchQueryAPI));
                    itemRT.put("rating", new AttributeValue().withN(String.valueOf(count)));
                    itemRT.put("title", new AttributeValue().withS(location));
                    itemRT.put("topic", new AttributeValue().withS(getId()));
                    itemRT.put("event", new AttributeValue().withS(activity));
                    itemRT.put("ts", new AttributeValue().withS(dateFormatter.format(new Date())));

                    itemDQ.put("TimeStamp", new AttributeValue().withS(dateFormatter.format(new Date())));
                    itemDQ.put("ReceivedMsgs", new AttributeValue().withN(String.valueOf(countReceivedMsgs)));
                    itemDQ.put("UsedMsgs", new AttributeValue().withN(String.valueOf(countUsedMsgs)));
                    itemDQ.put("Accuracy", new AttributeValue().withN(String.valueOf(count)));
                    itemDQ.put("Timeliness", new AttributeValue().withS(dateFormatter.format(new Date())));
                    itemDQ.put("Completeness", new AttributeValue().withN(String.valueOf(count)));
                    itemDQ.put("Consistency", new AttributeValue().withN(String.valueOf(count)));
                    itemDQ.put("Confidence", new AttributeValue().withN(String.valueOf(count)));
                    itemDQ.put("Privacy", new AttributeValue().withS("anonymised"));

                    PutItemRequest itemRequestRT = new PutItemRequest().withTableName(rtEventsTableName)
                            .withItem(itemRT);
                    PutItemRequest itemRequestDQ = new PutItemRequest().withTableName(tableDataQuality)
                            .withItem(itemDQ);
                    dynamoDBClient.putItem(itemRequestRT);
                    dynamoDBClient.putItem(itemRequestDQ);
                    itemRT.clear();
                    itemDQ.clear();

                    logger.info("TableEvent set size [{}], last known size [{}] ", countReceivedMsgs,
                            countReceivedMsgsPrev);
                    logger.info("Wrote EventVector to DynamoDB [{}] ", rtEventsTableName);
                    logger.info("Wrote EventVector Quality measurements to DynamoDB [{}] ", tableDataQuality);

                    // Closing second "try"
                } catch (Exception e) {
                    //                logger.error("Cannot close DB file", e);
                } finally {
                    try {
                        rs.close();
                    } catch (SQLException e) {
                        logger.error("Cannot close ResultSet", e);
                    }
                    try {
                        db_stmt.close();
                    } catch (SQLException e) {
                        logger.error("Cannot close Statement", e);
                    }
                    try {
                        db_conn.close();
                    } catch (SQLException e) {
                        logger.error("Cannot close DB file", e);
                    }
                }
                // Closing first "try"
            } catch (AmazonServiceException ase) {
                logger.error(
                        "Caught an AmazonServiceException, which means your request made it to AWS, but was rejected with an error response for some reason.");
                logger.error("Error Message: " + ase.getMessage());
                logger.error("HTTP Status Code: " + ase.getStatusCode());
                logger.error("AWS Error Code: " + ase.getErrorCode());
                logger.error("Error Type: " + ase.getErrorType());
                logger.error("Request ID: " + ase.getRequestId());

            }

        } // end of if (count == 1)

    } // END OF THE BLOCK !!!!!!!!!!!!!!!

}

From source file:org.apereo.portal.portlets.dynamicskin.storage.s3.AwsS3DynamicSkinService.java

License:Apache License

private void logAmazonServiceException(final AmazonServiceException exception,
        final AmazonWebServiceRequest request) {
    log.info("Caught an AmazonServiceException, which means your request made it "
            + "to Amazon S3, but was rejected with an error response for some reason.");
    log.info("Error Message:    {}", exception.getMessage());
    log.info("HTTP Status Code: {}", exception.getStatusCode());
    log.info("AWS Error Code:   {}", exception.getErrorCode());
    log.info("Error Type:       {}", exception.getErrorType());
    log.info("Request ID:       {}", exception.getRequestId());
}

From source file:org.applicationMigrator.serverAgent.ServerAgentFileTransferClient.java

License:Apache License

private void uploadFile(AWSCredentials awsCredentials, String sourcePathString, String destinationPathString,
        boolean forceUpload) throws FileNotFoundException {
    // TODO Think about one file being used by many apps (e.g HP1.pdf read
    // through Adobe reader and OpenOffice)
    AmazonS3 s3client = new AmazonS3Client(awsCredentials);
    boolean fileIsPresentOnServer = checkIfFileIsPresentOnServer(s3client, BUCKET_NAME, destinationPathString);
    if (fileIsPresentOnServer && !forceUpload)
        return;//  w  w  w.j av  a2 s.  co  m
    try {
        File file = new File(sourcePathString);
        if (!file.exists())
            throw new FileNotFoundException();
        s3client.putObject(new PutObjectRequest(BUCKET_NAME, destinationPathString, file));
    } catch (AmazonServiceException ase) {
        System.out.println("Caught an AmazonServiceException, which " + "means your request made it "
                + "to Amazon S3, but was rejected with an error response" + " for some reason.");
        System.out.println("Error Message: " + ase.getMessage());
        System.out.println("HTTP Status Code: " + ase.getStatusCode());
        System.out.println("AWS Error Code: " + ase.getErrorCode());
        System.out.println("Error Type: " + ase.getErrorType());
        System.out.println("Request ID: " + ase.getRequestId());
        throw ase;
    } catch (AmazonClientException ace) {
        System.out.println("Caught an AmazonClientException, which " + "means the client encountered "
                + "an internal error while trying to " + "communicate with S3, "
                + "such as not being able to access the network.");
        System.out.println("Error Message: " + ace.getMessage());
        throw ace;
    }
    // TODO:verify completion of upload operation

}

From source file:org.boriken.s3fileuploader.S3SampleRefactored.java

License:Open Source License

public static void main(String[] args) throws IOException {
    /*//from   ww w .java2 s  . co  m
     * Important: Be sure to fill in your AWS access credentials in the
     *            AwsCredentials.properties file before you try to run this
     *            sample.
     * http://aws.amazon.com/security-credentials
     */
    AmazonS3 s3 = new AmazonS3Client(new PropertiesCredentials(
            S3SampleRefactored.class.getResourceAsStream("../conf/AwsCredentials.properties")));

    //        String bucketName = "chamakits-my-first-s3-bucket-" + UUID.randomUUID();
    String bucketName = "chamakits-HelloS3";
    String key = "somekey";

    System.out.println("===========================================");
    System.out.println("Getting Started with Amazon S3");
    System.out.println("===========================================\n");

    try {
        //           createBucket(s3,bucketName);
        //           listBuckets(s3);
        //           createFile(s3,bucketName,key);
        //           downloadFile(s3,bucketName,key);
        //           listFiles(s3, bucketName,"");
        //            deleteFile(s3, bucketName, key);
        //           deleteBucket(s3, bucketName);

        listFiles(s3, bucketName, "");

    } catch (AmazonServiceException ase) {
        System.out.println("Caught an AmazonServiceException, which means your request made it "
                + "to Amazon S3, but was rejected with an error response for some reason.");
        System.out.println("Error Message:    " + ase.getMessage());
        System.out.println("HTTP Status Code: " + ase.getStatusCode());
        System.out.println("AWS Error Code:   " + ase.getErrorCode());
        System.out.println("Error Type:       " + ase.getErrorType());
        System.out.println("Request ID:       " + ase.getRequestId());
    } catch (AmazonClientException ace) {
        System.out.println("Caught an AmazonClientException, which means the client encountered "
                + "a serious internal problem while trying to communicate with S3, "
                + "such as not being able to access the network.");
        System.out.println("Error Message: " + ace.getMessage());
    }
}

From source file:org.crypto.sse.IEX2LevAMAZON.java

License:Open Source License

/**
 * @param args//from   ww w  . ja  va2 s.  com
 * @throws Exception
 */
@SuppressWarnings("null")
public static void main(String[] args) throws Exception {

    //First Job
    Configuration conf = new Configuration();

    Job job = Job.getInstance(conf, "IEX-2Lev");

    job.setJarByClass(IEX2LevAMAZON.class);

    job.setMapperClass(MLK1.class);

    job.setReducerClass(RLK1.class);

    job.setMapOutputKeyClass(Text.class);

    job.setMapOutputValueClass(Text.class);

    job.setOutputKeyClass(Text.class);

    job.setNumReduceTasks(1);

    job.setOutputValueClass(ArrayListWritable.class);

    job.setInputFormatClass(FileNameKeyInputFormat.class);

    FileInputFormat.addInputPath(job, new Path(args[0]));
    FileOutputFormat.setOutputPath(job, new Path(args[1]));

    //Second Job
    Configuration conf2 = new Configuration();

    Job job2 = Job.getInstance(conf2, "IEX-2Lev");

    job2.setJarByClass(IEX2LevAMAZON.class);

    job2.setMapperClass(MLK2.class);

    job2.setReducerClass(RLK2.class);

    job2.setNumReduceTasks(1);

    job2.setMapOutputKeyClass(Text.class);

    job2.setMapOutputValueClass(Text.class);

    job2.setOutputKeyClass(Text.class);

    job2.setOutputValueClass(ArrayListWritable.class);

    job2.setInputFormatClass(FileNameKeyInputFormat.class);

    FileInputFormat.addInputPath(job2, new Path(args[0]));
    FileOutputFormat.setOutputPath(job2, new Path(args[2]));

    job.waitForCompletion(true);
    job2.waitForCompletion(true);

    //Here add your Amazon Credentials

    AWSCredentials credentials = new BasicAWSCredentials("XXXXXXXXXXXXXXXX", "XXXXXXXXXXXXXXXX");
    // create a client connection based on credentials
    AmazonS3 s3client = new AmazonS3Client(credentials);

    // create bucket - name must be unique for all S3 users
    String bucketName = "iexmaptest";

    S3Object s3object = s3client.getObject(new GetObjectRequest(bucketName, args[4]));
    System.out.println(s3object.getObjectMetadata().getContentType());
    System.out.println(s3object.getObjectMetadata().getContentLength());
    List<String> lines = new ArrayList<String>();

    String folderName = "2";

    BufferedReader reader = new BufferedReader(new InputStreamReader(s3object.getObjectContent()));
    String line;
    int counter = 0;
    while ((line = reader.readLine()) != null) {
        // can copy the content locally as well
        // using a buffered writer
        lines.add(line);
        System.out.println(line);
        // upload file to folder 
        String fileName = folderName + "/" + Integer.toString(counter);
        ByteArrayInputStream input = new ByteArrayInputStream(line.getBytes());
        s3client.putObject(bucketName, fileName, input, new ObjectMetadata());
        counter++;
    }

    Multimap<String, String> lookup = ArrayListMultimap.create();

    for (int i = 0; i < lines.size(); i++) {
        String[] tokens = lines.get(i).split("\\s+");
        for (int j = 1; j < tokens.length; j++) {
            lookup.put(tokens[0], tokens[j]);
        }
    }

    // Loading inverted index that associates files identifiers to keywords
    lines = new ArrayList<String>();
    s3object = s3client.getObject(new GetObjectRequest(bucketName, args[5]));
    System.out.println(s3object.getObjectMetadata().getContentType());
    System.out.println(s3object.getObjectMetadata().getContentLength());

    // Loading inverted index that associates keywords to identifiers

    reader = new BufferedReader(new InputStreamReader(s3object.getObjectContent()));
    while ((line = reader.readLine()) != null) {
        lines.add(line);
    }
    Multimap<String, String> lookup2 = ArrayListMultimap.create();
    for (int i = 0; i < lines.size(); i++) {
        String[] tokens = lines.get(i).split("\\s+");
        for (int j = 1; j < tokens.length; j++) {
            lookup2.put(tokens[0], tokens[j]);
        }
    }

    // Delete File
    try {
        s3client.deleteObject(new DeleteObjectRequest(bucketName, args[4]));
    } catch (AmazonServiceException ase) {
        System.out.println("Caught an AmazonServiceException.");
        System.out.println("Error Message:    " + ase.getMessage());
        System.out.println("HTTP Status Code: " + ase.getStatusCode());
        System.out.println("AWS Error Code:   " + ase.getErrorCode());
        System.out.println("Error Type:       " + ase.getErrorType());
        System.out.println("Request ID:       " + ase.getRequestId());
    } catch (AmazonClientException ace) {
        System.out.println("Caught an AmazonClientException.");
        System.out.println("Error Message: " + ace.getMessage());
    }

    /*
     * Start of IEX-2Lev construction
     */

    // Generation of keys for IEX-2Lev
    BufferedReader keyRead = new BufferedReader(new InputStreamReader(System.in));
    System.out.println("Enter your password :");
    String pass = keyRead.readLine();

    // You can change the size of the key; Here we set it to 128

    List<byte[]> listSK = IEX2Lev.keyGen(128, pass, "salt/salt", 100);

    // Generation of Local Multi-maps with Mapper job only without reducer

    Configuration conf3 = new Configuration();

    String testSerialization1 = new String(Base64.encodeBase64(Serializer.serialize(lookup)));
    String testSerialization2 = new String(Base64.encodeBase64(Serializer.serialize(lookup2)));

    String testSerialization3 = new String(Base64.encodeBase64(Serializer.serialize(listSK)));

    //String testSerialization2 = gson.toJson(lookup2);
    conf3.set("lookup", testSerialization1);
    conf3.set("lookup2", testSerialization2);
    conf3.set("setKeys", testSerialization3);

    Job job3 = Job.getInstance(conf3, "Local MM");

    job3.setJarByClass(IEX2LevAMAZON.class);

    job3.setMapperClass(LocalMM.class);

    job3.setNumReduceTasks(0);

    FileInputFormat.addInputPath(job3, new Path(args[2]));
    FileOutputFormat.setOutputPath(job3, new Path(args[3]));

    job3.waitForCompletion(true);

}