Example usage for java.util LinkedList size

List of usage examples for java.util LinkedList size

Introduction

In this page you can find the example usage for java.util LinkedList size.

Prototype

int size

To view the source code for java.util LinkedList size.

Click Source Link

Usage

From source file:edu.umn.msi.tropix.common.jobqueue.impl.JobProcessorQueueImpl.java

public Status getStatus(final Ticket ticket) {
    initializationTracker.waitForInitialization(ticket);
    final LinkedList<StatusEntry> entries = new LinkedList<StatusEntry>();
    final JobInfo jobInfo = jobMap.get(ticket); // jobMap.get(ticket);
    final QueueStage queueStage = jobInfo == null ? QueueStage.ABSENT : jobInfo.queueStage;
    final Status status = new Status();
    status.setStage(new Stage(queueStage.getStageEnumerationValue()));
    if (queueStage.equals(QueueStage.RUNNING)) {
        final Double percentComplete = jobInfo.percentComplete;
        if (percentComplete != null) {
            final PercentComplete percentCompleteObject = new PercentComplete();
            percentCompleteObject.setValue(percentComplete);
            entries.add(percentCompleteObject);
        }/*from   ww  w  .java 2s  .c  om*/
    }
    if (jobInfo != null && jobInfo.cancelled) {
        entries.add(new WasCancelled(true));
    }
    status.setStatusEntry(entries.toArray(new StatusEntry[entries.size()]));
    // If the execution queue might have more information to add to the
    // status allow it too.
    if (queueStage.equals(QueueStage.PENDING) || queueStage.equals(QueueStage.RUNNING)
            || queueStage.equals(QueueStage.COMPLETED) || queueStage.equals(QueueStage.FAILED)) {
        if (statusModifier != null) {
            statusModifier.extendStatus(ticket.getValue(), status);
        }
    }
    return status;
}

From source file:com.erudika.para.persistence.MongoDBDAO.java

@Override
public <P extends ParaObject> List<P> readPage(String appid, Pager pager) {
    LinkedList<P> results = new LinkedList<P>();
    if (StringUtils.isBlank(appid)) {
        return results;
    }//w  ww. j a  v  a2s. com
    if (pager == null) {
        pager = new Pager();
    }
    try {
        String lastKey = pager.getLastKey();
        MongoCursor<Document> cursor;
        Bson filter = Filters.gt(_OBJECT_ID, lastKey);
        if (lastKey == null) {
            cursor = getTable(appid).find().batchSize(pager.getLimit()).limit(pager.getLimit()).iterator();
        } else {
            cursor = getTable(appid).find(filter).batchSize(pager.getLimit()).limit(pager.getLimit())
                    .iterator();
        }
        while (cursor.hasNext()) {
            Map<String, Object> row = documentToMap(cursor.next());
            P obj = fromRow(row);
            if (obj != null) {
                results.add(obj);
                pager.setLastKey((String) row.get(_OBJECT_ID));
            }
        }
        if (!results.isEmpty()) {
            pager.setCount(pager.getCount() + results.size());
        }
    } catch (Exception e) {
        logger.error(null, e);
    }
    logger.debug("readPage() page: {}, results:", pager.getPage(), results.size());
    return results;
}

From source file:com.trsst.Command.java

public int doBegin(String[] argv, PrintStream out, InputStream in) {

    buildOptions(argv, out, in);/*from   w w w .j a  v a2 s.  c  om*/

    int result = 0;
    Server server = null;
    try {

        CommandLineParser argParser = new GnuParser();
        CommandLine commands;
        try {
            commands = argParser.parse(mergedOptions, argv);
        } catch (Throwable t) {
            log.error("Unexpected error parsing arguments: " + Arrays.asList(argv), t);
            return 127;
        }
        LinkedList<String> arguments = new LinkedList<String>();
        for (Object o : commands.getArgList()) {
            arguments.add(o.toString()); // dodge untyped param warning
        }
        if (commands.hasOption("?")) {
            printAllUsage();
            return 0;
        }
        if (arguments.size() < 1) {
            printAllUsage();
            return 127; // "command not found"
        }
        if (!commands.hasOption("strict")) {
            // most trsst nodes run with self-signed certificates,
            // so by default we accept them
            Common.enableAnonymousSSL();
        } else {
            System.err.println("Requiring signed SSL");
        }
        // System.out.println("Commands: " + arguments );
        String mode = arguments.removeFirst().toString();

        // for port requests
        if ("serve".equals(mode)) {
            // start a server and exit
            result = doServe(commands, arguments);
            return 0;
        }

        // attempt to parse next argument as a server url
        Client client = null;
        if (commands.hasOption("h")) {
            String host = commands.getOptionValue("h");
            try {
                URL url = new URL(host);
                // this argument is a server url
                client = new Client(url);
                System.err.println("Using service: " + host);
            } catch (MalformedURLException e) {
                // otherwise: ignore and continue
                System.err.println("Bad hostname: " + host);
            }
        }

        // if a server url wasn't specified
        if (client == null) {
            // start a client with a local server
            server = new Server();
            client = new Client(server.getServiceURL());
            System.err.println("Starting temporary service at: " + server.getServiceURL());
        }

        if ("pull".equals(mode)) {
            // pull feeds from server
            result = doPull(client, commands, arguments, out);
        } else if ("push".equals(mode)) {
            // push feeds to server
            result = doPush(client, commands, arguments, out);
        } else if ("post".equals(mode)) {
            // post (and push) entries
            result = doPost(client, commands, arguments, out, in);
        } else {
            printAllUsage();
            result = 127; // "command not found"
        }
    } catch (Throwable t) {
        log.error("Unexpected error: " + t, t);
        result = 1; // "catchall for general errors"
    }
    if (server != null) {
        server.stop();
    }
    return result;
}

From source file:dk.dma.ais.decode.DecodeTest.java

/**
 * Decode all messages in a file Tries to handle proprietary messages
 * //from  w  w  w .  jav  a 2s. c  o  m
 * Demonstrates and tests the process of decoding lines into Vdm messages, and the decoding into AIS messages
 * 
 * @throws IOException
 */
@Test
public void readLoopTest() throws IOException {
    // Make a list of proprietary handlers

    // Open file
    URL url = ClassLoader.getSystemResource("stream_example.txt");
    Assert.assertNotNull(url);
    try (BufferedReader in = new BufferedReader(new InputStreamReader(url.openStream()))) {
        Assert.assertNotNull(in);
        String line;

        // Prepare message classes
        AisMessage message;
        Vdm vdm = new Vdm();
        LinkedList<IProprietaryTag> tags = new LinkedList<>();

        while ((line = in.readLine()) != null) {

            // Ignore everything else than sentences
            if (!line.startsWith("$") && !line.startsWith("!")) {
                continue;
            }

            // Check if proprietary line
            if (ProprietaryFactory.isProprietaryTag(line)) {
                // Try to parse with one the registered factories in
                // META-INF/services/dk.dma.ais.proprietary.ProprietaryFactory
                IProprietaryTag tag = ProprietaryFactory.parseTag(new SentenceLine(line));
                if (tag != null) {
                    tags.add(tag);
                }
                continue;
            }

            // Handle VDM/VDO line
            try {
                int result = vdm.parse(new SentenceLine(line));
                // LOG.info("result = " + result);
                if (result == 0) {
                    message = AisMessage.getInstance(vdm);
                    Assert.assertNotNull(message);
                    if (tags.size() > 0) {
                        message.setTags(tags);
                    }

                    // Message ready for handling

                } else if (result == 1) {
                    // Wait for more data
                    continue;
                } else {
                    LOG.error("Failed to parse line: " + line + " result = " + result);
                    Assert.assertTrue(false);
                }

            } catch (Exception e) {
                LOG.info("VDM failed: " + e.getMessage() + " line: " + line + " tag: "
                        + (tags.size() > 0 ? tags.peekLast() : "null"));
                Assert.assertTrue(false);
            }

            // Create new VDM
            vdm = new Vdm();
            tags.clear();
        }
    }
}

From source file:fi.ni.IFC_ClassModel.java

/**
 * Creates the object tree.//from w  ww .ja v  a2s  . c  om
 */
private void createObjectTree() {
    for (Map.Entry<Long, IFC_X3_VO> entry : linemap.entrySet()) {
        IFC_X3_VO vo = entry.getValue();
        fillJavaClassInstanceValues("root", vo, vo, 0);
    }

    try {
        for (Map.Entry<Long, IFC_X3_VO> entry : linemap.entrySet()) {
            IFC_X3_VO vo = entry.getValue();
            if (vo.inverse_pointer_sets.size() > 0) {
                for (Map.Entry<String, LinkedList<IFC_X3_VO>> inverse_set : vo.inverse_pointer_sets
                        .entrySet()) {
                    LinkedList<IFC_X3_VO> li = inverse_set.getValue();
                    String subject = filter_illegal_chars(":" + ifc_filename + "_i" + vo.getLine_num());
                    if (vo.getGid() != null) {
                        subject = ":guid" + GuidCompressor.uncompressGuidString(filter_extras(vo.getGid()));
                    }
                    for (int i = 0; i < li.size(); i++) {
                        IFC_X3_VO ivo = li.get(i);
                        addLiteralValue(vo.getLine_num(), ivo.getLine_num(), subject, inverse_set.getKey());

                    }

                } // for map inverse_set

            } // if
        } // for map linemap
    } catch (Exception e) {
        e.printStackTrace();
    }
}

From source file:com.trsst.Command.java

public int doPost(Client client, CommandLine commands, LinkedList<String> arguments, PrintStream out,
        InputStream in) {/*from   w ww. j a v  a2s  .  c  om*/

    String id = null;

    if (arguments.size() == 0 && commands.getArgList().size() == 0) {
        printPostUsage();
        return 127; // "command not found"
    }

    if (arguments.size() > 0) {
        id = arguments.removeFirst();
        System.err.println("Obtaining keys for feed id: " + id);
    } else {
        System.err.println("Generating new feed id... ");
    }

    // read input text
    String subject = commands.getOptionValue("s");
    String verb = commands.getOptionValue("v");
    String base = commands.getOptionValue("b");
    String body = commands.getOptionValue("c");
    String name = commands.getOptionValue("n");
    String email = commands.getOptionValue("m");
    String uri = commands.getOptionValue("uri");
    String title = commands.getOptionValue("t");
    String subtitle = commands.getOptionValue("subtitle");
    String icon = commands.getOptionValue("i");
    if (icon == null && commands.hasOption("i")) {
        icon = "-";
    }
    String logo = commands.getOptionValue("l");
    if (logo == null && commands.hasOption("l")) {
        logo = "-";
    }
    String attach = commands.getOptionValue("a");
    if (attach == null && commands.hasOption("a")) {
        attach = "-";
    }
    String[] recipients = commands.getOptionValues("e");
    String[] mentions = commands.getOptionValues("r");
    String[] tags = commands.getOptionValues("g");
    String url = commands.getOptionValue("u");
    String vanity = commands.getOptionValue("vanity");

    // obtain password
    char[] password = null;
    String pass = commands.getOptionValue("p");
    if (pass != null) {
        password = pass.toCharArray();
    } else {
        try {
            Console console = System.console();
            if (console != null) {
                password = console.readPassword("Password: ");
            } else {
                log.info("No console detected for password input.");
            }
        } catch (Throwable t) {
            log.error("Unexpected error while reading password", t);
        }
    }
    if (password == null) {
        log.error("Password is required to post.");
        return 127; // "command not found"
    }
    if (password.length < 6) {
        System.err.println("Password must be at least six characters in length.");
        return 127; // "command not found"
    }

    // obtain keys
    KeyPair signingKeys = null;
    KeyPair encryptionKeys = null;
    String keyPath = commands.getOptionValue("k");

    // if id was not specified from the command line
    if (id == null) {

        // if password was not specified from command line
        if (pass == null) {
            try {
                // verify password
                char[] verify = null;
                Console console = System.console();
                if (console != null) {
                    verify = console.readPassword("Re-type Password: ");
                } else {
                    log.info("No console detected for password verification.");
                }
                if (verify == null || verify.length != password.length) {
                    System.err.println("Passwords do not match.");
                    return 127; // "command not found"
                }
                for (int i = 0; i < verify.length; i++) {
                    if (verify[i] != password[i]) {
                        System.err.println("Passwords do not match.");
                        return 127; // "command not found"
                    }
                    verify[i] = 0;
                }
            } catch (Throwable t) {
                log.error("Unexpected error while verifying password: " + t.getMessage(), t);
            }
        }

        // create new account
        if (base == null) {
            // default to trsst hub
            base = "https://home.trsst.com/feed";
        }

        // generate vanity id if required
        if (vanity != null) {
            System.err.println("Searching for vanity feed id prefix: " + vanity);
            switch (vanity.length()) {
            case 0:
            case 1:
                break;
            case 2:
                System.err.println("This may take several minutes.");
                break;
            case 3:
                System.err.println("This may take several hours.");
                break;
            case 4:
                System.err.println("This may take several days.");
                break;
            case 5:
                System.err.println("This may take several months.");
                break;
            default:
                System.err.println("This may take several years.");
                break;
            }
            System.err.println("Started: " + new Date());
            System.err.println("^C to exit");
        }
        do {
            signingKeys = Common.generateSigningKeyPair();
            id = Common.toFeedId(signingKeys.getPublic());
        } while (vanity != null && !id.startsWith(vanity));
        if (vanity != null) {
            System.err.println("Finished: " + new Date());
        }

        encryptionKeys = Common.generateEncryptionKeyPair();
        System.err.println("New feed id created: " + id);

        File keyFile;
        if (keyPath != null) {
            keyFile = new File(keyPath, id + Common.KEY_EXTENSION);
        } else {
            keyFile = new File(Common.getClientRoot(), id + Common.KEY_EXTENSION);
        }

        // persist to keystore
        writeSigningKeyPair(signingKeys, id, keyFile, password);
        writeEncryptionKeyPair(encryptionKeys, id, keyFile, password);

    } else {

        File keyFile;
        if (keyPath != null) {
            keyFile = new File(Common.getClientRoot(), keyPath);
        } else {
            keyFile = new File(Common.getClientRoot(), id + Common.KEY_EXTENSION);
        }

        if (keyFile.exists()) {
            System.err.println("Using existing account id: " + id);

        } else {
            System.err.println("Cannot locate keys for account id: " + id);
            return 78; // "configuration error"
        }

        signingKeys = readSigningKeyPair(id, keyFile, password);
        if (signingKeys != null) {
            encryptionKeys = readEncryptionKeyPair(id, keyFile, password);
            if (encryptionKeys == null) {
                encryptionKeys = signingKeys;
            }
        }
    }

    // clear password chars
    for (int i = 0; i < password.length; i++) {
        password[i] = 0;
    }
    if (signingKeys == null) {
        System.err.println("Could not obtain keys for signing.");
        return 73; // "can't create output error"
    }

    String[] recipientIds = null;
    if (recipients != null) {
        LinkedList<String> keys = new LinkedList<String>();
        for (int i = 0; i < recipients.length; i++) {
            if ("-".equals(recipients[i])) {
                // "-" is shorthand for encrypt for mentioned ids
                if (mentions != null) {
                    for (String mention : mentions) {
                        if (Common.isFeedId(mention)) {
                            keys.add(mention);
                        }
                    }
                }
            } else if (Common.isFeedId(recipients[i])) {
                keys.add(recipients[i]);
            } else {
                log.warn("Could not parse recipient id: " + recipients[i]);
            }
        }
        recipientIds = keys.toArray(new String[0]);
    }

    // handle binary attachment
    String mimetype = null;
    byte[] attachment = null;
    if (attach != null) {
        InputStream input = null;
        try {
            if ("-".equals(attach)) {
                input = new BufferedInputStream(in);
            } else {
                File file = new File(attach);
                input = new BufferedInputStream(new FileInputStream(file));
                System.err.println("Attaching: " + file.getCanonicalPath());
            }
            attachment = Common.readFully(input);
            mimetype = new Tika().detect(attachment);
            System.err.println("Detected type: " + mimetype);
        } catch (Throwable t) {
            log.error("Could not read attachment: " + attach, t);
            return 73; // "can't create output error"
        } finally {
            try {
                input.close();
            } catch (IOException ioe) {
                // suppress any futher error on closing
            }
        }
    }

    Object result;
    try {
        EntryOptions options = new EntryOptions();
        options.setStatus(subject);
        options.setVerb(verb);
        if (mentions != null) {
            options.setMentions(mentions);
        }
        if (tags != null) {
            options.setTags(tags);
        }
        options.setBody(body);
        if (attachment != null) {
            options.addContentData(attachment, mimetype);
        } else if (url != null) {
            options.setContentUrl(url);
        }
        FeedOptions feedOptions = new FeedOptions();
        feedOptions.setAuthorEmail(email);
        feedOptions.setAuthorName(name);
        feedOptions.setAuthorUri(uri);
        feedOptions.setTitle(title);
        feedOptions.setSubtitle(subtitle);
        feedOptions.setBase(base);
        if (icon != null) {
            if ("-".equals(icon)) {
                feedOptions.setAsIcon(true);
            } else {
                feedOptions.setIconURL(icon);
            }
        }
        if (logo != null) {
            if ("-".equals(logo)) {
                feedOptions.setAsLogo(true);
            } else {
                feedOptions.setLogoURL(logo);
            }
        }
        if (recipientIds != null) {
            EntryOptions publicEntry = new EntryOptions().setStatus("Encrypted content").setVerb("encrypt");
            // TODO: add duplicate mentions to outside of envelope
            options.encryptFor(recipientIds, publicEntry);
        }
        result = client.post(signingKeys, encryptionKeys, options, feedOptions);
    } catch (IllegalArgumentException e) {
        log.error("Invalid request: " + id + " : " + e.getMessage(), e);
        return 76; // "remote error"
    } catch (IOException e) {
        log.error("Error connecting to service for id: " + id, e);
        return 76; // "remote error"
    } catch (org.apache.abdera.security.SecurityException e) {
        log.error("Error generating signatures for id: " + id, e);
        return 73; // "can't create output error"
    } catch (Exception e) {
        log.error("General security error for id: " + id, e);
        return 74; // "general io error"
    }

    if (result != null) {
        if (format) {
            out.println(Common.formatXML(result.toString()));
        } else {
            out.println(result.toString());
        }
    }

    return 0; // "OK"
}

From source file:com.projity.script.object.TimeIntervals.java

public TimeIntervals translate(int winCount) { //TODO case winCount<0

    //      for (TimeWindow w : history) System.out.println("history0: "+w);
    //      for (TimeWindow w : win) System.out.println("win0: "+w);

    //for (TimeWindow w : history) System.out.println("id="+w.getId());
    TimeIntervals t = new TimeIntervals();
    t.setScale(scale);/*from  ww w  .  j a  v a 2 s  .  c o m*/
    LinkedList<TimeWindow> twin = t.getWin();
    if (winCount == 0 || win.size() == 0)
        return t; //or null
    if (winCount > 0) {
        t.winId = winId + win.size();
        int lastId = t.winId - 1 + winCount;
        int maxHistoryId = Math.min(history.getLast().getId(), lastId);
        int i = t.winId;
        if (i <= maxHistoryId) {
            ListIterator<TimeWindow> it = history.listIterator();
            TimeWindow w;
            while (it.hasNext()) {
                w = it.next();
                if (w.getId() == t.winId) {
                    it.previous();
                    break;
                }
            }
            for (; i <= maxHistoryId && it.hasNext(); i++) {
                w = it.next();
                twin.add(w);
                //               System.out.println("Found in history: "+w);
            }
        }
        LinkedList<TimeWindow> newWin = new LinkedList<TimeWindow>();
        generateWindows(scale, (twin.size() > 0 ? twin : win).getLast().getE(), start, end, lastId - i + 1,
                newWin);
        t.indexWindows(t.winId + t.getWin().size(), newWin);
        //         for (TimeWindow w : newWin) System.out.println("New window: "+w);
        t.getWin().addAll(newWin);
        history.addAll(newWin);
    } else {
        t.winId = winId - 1;
        int lastId = t.winId + 1 + winCount;
        int minHistoryId = Math.max(history.getFirst().getId(), lastId);
        int i = t.winId;
        if (i >= minHistoryId) {
            ListIterator<TimeWindow> it = history.listIterator(history.size() - 1);
            TimeWindow w;
            while (it.hasPrevious()) {
                w = it.previous();
                if (w.getId() == t.winId) {
                    it.next();
                    break;
                }
            }
            for (; i >= minHistoryId; i--) {
                w = it.previous();
                twin.addFirst(w);
                //               System.out.println("Found in history: "+w);
            }
        }
        //         System.out.println("winId="+winId+", t.winId="+t.winId+", lastId="+lastId+", i="+i+" minHistoryId="+minHistoryId);
        LinkedList<TimeWindow> newWin = new LinkedList<TimeWindow>();
        generateWindows(scale, (twin.size() > 0 ? twin : win).getFirst().getS(), start, end, lastId - i - 1,
                newWin);
        t.indexWindows(lastId, newWin);
        //         for (TimeWindow w : newWin) System.out.println("New window: "+w);
        t.getWin().addAll(0, newWin);
        history.addAll(0, newWin);
    }

    int translation = 0;
    for (TimeWindow w : t.getWin()) {
        if (winCount > 0) {
            win.removeFirst();
            win.addLast(w);
            translation++;
        } else {
            win.removeLast();
            win.addFirst(w);
            translation--;
        }
    }
    winId = winId + translation;
    t.setTranslation(translation);

    //      for (TimeWindow w : history) System.out.println("history1: "+w);
    //      for (TimeWindow w : win) System.out.println("win1: "+w);
    //      for (TimeWindow w : twin) System.out.println("t.win1: "+w);

    return t;
}

From source file:com.att.nsa.cambria.service.impl.EventsServiceImpl.java

/**
 * //from w w w .j a  v a 2  s .  c o m
 * @param ctx
 * @param topic
 * @param msg
 * @param defaultPartition
 * @param chunked
 * @param mediaType
 * @throws ConfigDbException
 * @throws AccessDeniedException
 * @throws TopicExistsException
 * @throws CambriaApiException
 * @throws IOException
 */
private void pushEvents(DMaaPContext ctx, String topic, InputStream msg, String defaultPartition,
        boolean chunked, String mediaType) throws ConfigDbException, AccessDeniedException,
        TopicExistsException, CambriaApiException, IOException {
    final MetricsSet metricsSet = ctx.getConfigReader().getfMetrics();

    // setup the event set
    final CambriaEventSet events = new CambriaEventSet(mediaType, msg, chunked, defaultPartition);

    // start processing, building a batch to push to the backend
    final long startMs = System.currentTimeMillis();
    long count = 0;

    long maxEventBatch = 1024 * 16;
    String batchlen = AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, BATCH_LENGTH);
    if (null != batchlen)
        maxEventBatch = Long.parseLong(batchlen);

    // long maxEventBatch = ctx.getConfigReader().getSettings().getLong(BATCH_LENGTH, 1024 * 16);
    final LinkedList<Publisher.message> batch = new LinkedList<Publisher.message>();
    final ArrayList<KeyedMessage<String, String>> kms = new ArrayList<KeyedMessage<String, String>>();

    try {
        // for each message...
        Publisher.message m = null;
        while ((m = events.next()) != null) {
            // add the message to the batch
            batch.add(m);
            final KeyedMessage<String, String> data = new KeyedMessage<String, String>(topic, m.getKey(),
                    m.getMessage());
            kms.add(data);
            // check if the batch is full
            final int sizeNow = batch.size();
            if (sizeNow > maxEventBatch) {
                ctx.getConfigReader().getfPublisher().sendBatchMessage(topic, kms);
                kms.clear();
                batch.clear();
                metricsSet.publishTick(sizeNow);
                count += sizeNow;
            }
        }

        // send the pending batch
        final int sizeNow = batch.size();
        if (sizeNow > 0) {
            ctx.getConfigReader().getfPublisher().sendBatchMessage(topic, kms);
            kms.clear();
            batch.clear();
            metricsSet.publishTick(sizeNow);
            count += sizeNow;
        }

        final long endMs = System.currentTimeMillis();
        final long totalMs = endMs - startMs;

        LOG.info("Published " + count + " msgs in " + totalMs + "ms for topic " + topic);

        // build a responseP
        final JSONObject response = new JSONObject();
        response.put("count", count);
        response.put("serverTimeMs", totalMs);
        DMaaPResponseBuilder.respondOk(ctx, response);

    } catch (Exception excp) {
        int status = HttpStatus.SC_NOT_FOUND;
        String errorMsg = null;
        if (excp instanceof CambriaApiException) {
            status = ((CambriaApiException) excp).getStatus();
            JSONTokener jsonTokener = new JSONTokener(((CambriaApiException) excp).getBody());
            JSONObject errObject = new JSONObject(jsonTokener);
            errorMsg = (String) errObject.get("message");

        }
        ErrorResponse errRes = new ErrorResponse(status,
                DMaaPResponseCode.PARTIAL_PUBLISH_MSGS.getResponseCode(),
                errorMessages.getPublishMsgError() + ":" + topic + "." + errorMessages.getPublishMsgCount()
                        + count + "." + errorMsg,
                null, Utils.getFormattedDate(new Date()), topic, null, ctx.getRequest().getRemoteHost(), null,
                null);
        LOG.info(errRes.toString());
        throw new CambriaApiException(errRes);

    }
}

From source file:com.linkedpipes.plugin.loader.dcatAp11ToDkanBatch.DcatAp11ToDkanBatch.java

@Override
public void execute() throws LpException {

    apiURI = configuration.getApiUri();//  w  ww  .j  a  va  2s . c  om

    //for HTTP request failing on "failed to respond"
    boolean responded = false;

    if (apiURI == null || apiURI.isEmpty() || configuration.getUsername() == null
            || configuration.getUsername().isEmpty() || configuration.getPassword() == null
            || configuration.getPassword().isEmpty()) {
        throw exceptionFactory.failure("Missing required settings.");
    }

    Map<String, String> groups = getGroups();

    LOG.debug("Querying metadata for datasets");

    LinkedList<String> datasets = new LinkedList<>();
    for (Map<String, Value> map : executeSelectQuery(
            "SELECT ?d WHERE {?d a <" + DcatAp11ToDkanBatchVocabulary.DCAT_DATASET_CLASS + ">}")) {
        datasets.add(map.get("d").stringValue());
    }

    int current = 0;
    int total = datasets.size();
    LOG.info("Found " + total + " datasets");
    progressReport.start(total);

    token = getToken(configuration.getUsername(), configuration.getPassword());

    for (String datasetURI : datasets) {
        current++;

        CloseableHttpResponse queryResponse = null;

        LOG.info("Processing dataset " + current + "/" + total + ": " + datasetURI);

        String publisher_uri = executeSimpleSelectQuery("SELECT ?publisher_uri WHERE {<" + datasetURI + "> <"
                + DCTERMS.PUBLISHER + "> ?publisher_uri }", "publisher_uri");
        String publisher_name = executeSimpleSelectQuery(
                "SELECT ?publisher_name WHERE {<" + datasetURI + "> <" + DCTERMS.PUBLISHER + ">/<" + FOAF.NAME
                        + "> ?publisher_name FILTER(LANGMATCHES(LANG(?publisher_name), \""
                        + configuration.getLoadLanguage() + "\"))}",
                "publisher_name");

        if (!groups.containsKey(publisher_uri)) {
            LOG.debug("Creating group " + publisher_uri);

            if (publisher_name == null || publisher_name.isEmpty()) {
                throw exceptionFactory.failure("Publisher has no name: " + publisher_uri);
            }

            HttpPost httpPost = new HttpPost(apiURI + "/node");
            httpPost.addHeader(new BasicHeader("Accept", "application/json"));
            httpPost.addHeader(new BasicHeader("X-CSRF-Token", token));

            ArrayList<NameValuePair> postParameters = new ArrayList<>();
            postParameters.add(new BasicNameValuePair("type", "group"));
            postParameters.add(new BasicNameValuePair("title", publisher_name));
            postParameters.add(new BasicNameValuePair("body[und][0][value]", publisher_uri));

            try {
                UrlEncodedFormEntity form = new UrlEncodedFormEntity(postParameters, "UTF-8");
                httpPost.setEntity(form);
            } catch (UnsupportedEncodingException e) {
                LOG.error("Unexpected encoding issue");
            }

            CloseableHttpResponse response = null;

            responded = false;
            do {
                try {
                    response = postClient.execute(httpPost);
                    if (response.getStatusLine().getStatusCode() == 200) {
                        LOG.debug("Group created OK");
                        String orgID = new JSONObject(EntityUtils.toString(response.getEntity()))
                                .getString("nid");
                        groups.put(publisher_uri, orgID);
                    } else {
                        String ent = EntityUtils.toString(response.getEntity());
                        LOG.error("Group:" + ent);
                        //throw exceptionFactory.failed("Error creating group: " + ent);
                    }
                    responded = true;
                } catch (Exception e) {
                    LOG.error(e.getLocalizedMessage(), e);
                } finally {
                    if (response != null) {
                        try {
                            response.close();
                        } catch (IOException e) {
                            LOG.error(e.getLocalizedMessage(), e);
                            throw exceptionFactory.failure("Error creating group");
                        }
                    }
                }
            } while (!responded);
        }

        ArrayList<NameValuePair> datasetFields = new ArrayList<>();
        datasetFields.add(new BasicNameValuePair("type", "dataset"));

        LinkedList<String> keywords = new LinkedList<>();
        for (Map<String, Value> map : executeSelectQuery(
                "SELECT ?keyword WHERE {<" + datasetURI + "> <" + DcatAp11ToDkanBatchVocabulary.DCAT_KEYWORD
                        + "> ?keyword FILTER(LANGMATCHES(LANG(?keyword), \"" + configuration.getLoadLanguage()
                        + "\"))}")) {
            keywords.add(map.get("keyword").stringValue());
        }
        String concatTags = "";
        for (String keyword : keywords) {
            String safekeyword = fixKeyword(keyword);
            if (safekeyword.length() >= 2) {
                concatTags += "\"\"" + safekeyword + "\"\" ";
            }
        }
        if (!concatTags.isEmpty()) {
            datasetFields.add(new BasicNameValuePair("field_tags[und][value_field]", concatTags));
        }

        String title = executeSimpleSelectQuery("SELECT ?title WHERE {<" + datasetURI + "> <" + DCTERMS.TITLE
                + "> ?title FILTER(LANGMATCHES(LANG(?title), \"" + configuration.getLoadLanguage() + "\"))}",
                "title");
        if (!title.isEmpty()) {
            datasetFields.add(new BasicNameValuePair("title", title));
        }
        String description = executeSimpleSelectQuery("SELECT ?description WHERE {<" + datasetURI + "> <"
                + DCTERMS.DESCRIPTION + "> ?description FILTER(LANGMATCHES(LANG(?description), \""
                + configuration.getLoadLanguage() + "\"))}", "description");
        if (!description.isEmpty()) {
            datasetFields.add(new BasicNameValuePair("body[und][0][value]", description));
        } else if (configuration.getProfile()
                .equals(DcatAp11ToDkanBatchVocabulary.PROFILES_NKOD.stringValue())) {
            //Description is mandatory in NKOD. If missing, add at least title.
            datasetFields.add(new BasicNameValuePair("body[und][0][value]", title));
        }
        String issued = executeSimpleSelectQuery(
                "SELECT ?issued WHERE {<" + datasetURI + "> <" + DCTERMS.ISSUED + "> ?issued }", "issued");
        if (!issued.isEmpty()) {
            //long unixTime = System.currentTimeMillis() / 1000L;
            datasetFields.add(new BasicNameValuePair("created", issued));
        }
        String modified = executeSimpleSelectQuery(
                "SELECT ?modified WHERE {<" + datasetURI + "> <" + DCTERMS.MODIFIED + "> ?modified }",
                "modified");
        if (!modified.isEmpty()) {
            datasetFields.add(new BasicNameValuePair("changed", modified));
        }

        if (!publisher_uri.isEmpty()) {
            datasetFields
                    .add(new BasicNameValuePair("og_group_ref[und][target_id]", groups.get(publisher_uri)));
        }

        if (configuration.getProfile().equals(DcatAp11ToDkanBatchVocabulary.PROFILES_NKOD.stringValue())) {
            String contactPoint = executeSimpleSelectQuery("SELECT ?contact WHERE {<" + datasetURI + "> <"
                    + DcatAp11ToDkanBatchVocabulary.DCAT_CONTACT_POINT + ">/<"
                    + DcatAp11ToDkanBatchVocabulary.VCARD_HAS_EMAIL + "> ?contact }", "contact");
            if (!contactPoint.isEmpty()) {
                datasetFields
                        .add(new BasicNameValuePair("field_maintainer_email[und][0][value]", contactPoint));
            }
            String curatorName = executeSimpleSelectQuery("SELECT ?name WHERE {<" + datasetURI + "> <"
                    + DcatAp11ToDkanBatchVocabulary.DCAT_CONTACT_POINT + ">/<"
                    + DcatAp11ToDkanBatchVocabulary.VCARD_FN + "> ?name }", "name");
            if (!curatorName.isEmpty()) {
                datasetFields.add(new BasicNameValuePair("field_maintainer[und][0][value]", curatorName));
            }
            if (!publisher_uri.isEmpty()) {
                datasetFields.add(new BasicNameValuePair("field_publisher_uri[und][0][value]", publisher_uri));
            }
            if (!publisher_name.isEmpty()) {
                datasetFields
                        .add(new BasicNameValuePair("field_publisher_name[und][0][value]", publisher_name));
            }

            String periodicity = executeSimpleSelectQuery("SELECT ?periodicity WHERE {<" + datasetURI + "> <"
                    + DCTERMS.ACCRUAL_PERIODICITY + "> ?periodicity }", "periodicity");
            if (!periodicity.isEmpty()) {
                datasetFields.add(new BasicNameValuePair("field_frequency_ods[und][0][value]", periodicity));
            } else {
                //Mandatory in NKOD
                datasetFields.add(new BasicNameValuePair("field_frequency_ods[und][0][value]",
                        "http://publications.europa.eu/resource/authority/frequency/UNKNOWN"));
            }
            String temporalStart = executeSimpleSelectQuery(
                    "SELECT ?temporalStart WHERE {<" + datasetURI + "> <" + DCTERMS.TEMPORAL + ">/<"
                            + DcatAp11ToDkanBatchVocabulary.SCHEMA_STARTDATE + "> ?temporalStart }",
                    "temporalStart");
            if (!temporalStart.isEmpty()) {
                datasetFields.add(new BasicNameValuePair("field_temporal_start[und][0][value]", temporalStart));
            }
            String temporalEnd = executeSimpleSelectQuery(
                    "SELECT ?temporalEnd WHERE {<" + datasetURI + "> <" + DCTERMS.TEMPORAL + ">/<"
                            + DcatAp11ToDkanBatchVocabulary.SCHEMA_ENDDATE + "> ?temporalEnd }",
                    "temporalEnd");
            if (!temporalEnd.isEmpty()) {
                datasetFields.add(new BasicNameValuePair("field_temporal_end[und][0][value]", temporalEnd));
            }
            String schemaURL = executeSimpleSelectQuery(
                    "SELECT ?schema WHERE {<" + datasetURI + "> <" + FOAF.PAGE + "> ?schema }", "schema");
            if (!schemaURL.isEmpty()) {
                datasetFields.add(new BasicNameValuePair("field_schema[und][0][value]", schemaURL));
            }
            String spatial = executeSimpleSelectQuery(
                    "SELECT ?spatial WHERE {<" + datasetURI + "> <" + DCTERMS.SPATIAL + "> ?spatial }",
                    "spatial");
            if (!spatial.isEmpty()) {
                datasetFields.add(new BasicNameValuePair("field_spatial[und][0][value]", spatial));
                if (spatial.matches("http:\\/\\/ruian.linked.opendata.cz\\/resource\\/.*")) {
                    String type = spatial.replaceAll(
                            "http:\\/\\/ruian.linked.opendata.cz\\/resource\\/([^\\/]+)\\/(.*)", "$1");
                    String code = spatial.replaceAll(
                            "http:\\/\\/ruian.linked.opendata.cz\\/resource\\/([^\\/]+)\\/(.*)", "$2");
                    String typ;
                    //We should not parse IRIs, however, here we have no choice.
                    switch (type) {
                    case "vusc":
                        typ = "VC";
                        break;
                    case "obce":
                        typ = "OB";
                        break;
                    case "kraje":
                        typ = "KR";
                        break;
                    case "orp":
                        typ = "OP";
                        break;
                    case "momc":
                        typ = "MC";
                        break;
                    case "pou":
                        typ = "PU";
                        break;
                    default:
                        typ = "ST";
                    }
                    datasetFields.add(new BasicNameValuePair("field_ruian_type[und][0][value]", typ));
                    datasetFields.add(new BasicNameValuePair("field_ruian_code[und][0][value]", code));
                } else {
                    //RIAN type and code are mandatory in NKOD
                    datasetFields.add(new BasicNameValuePair("field_ruian_type[und][0][value]", "ST"));
                    datasetFields.add(new BasicNameValuePair("field_ruian_code[und][0][value]", "1"));
                }
            } else {
                //RIAN type and code are mandatory in NKOD
                datasetFields.add(new BasicNameValuePair("field_ruian_type[und][0][value]", "ST"));
                datasetFields.add(new BasicNameValuePair("field_ruian_code[und][0][value]", "1"));
            }
            //DCAT-AP v1.1: has to be an IRI from http://publications.europa.eu/mdr/authority/file-type/index.html
            LinkedList<String> themes = new LinkedList<>();
            for (Map<String, Value> map : executeSelectQuery("SELECT ?theme WHERE {<" + datasetURI + "> <"
                    + DcatAp11ToDkanBatchVocabulary.DCAT_THEME + "> ?theme }")) {
                themes.add(map.get("theme").stringValue());
            }
            String concatThemes = "";
            for (String theme : themes) {
                concatThemes += theme + " ";
            }
            if (!concatThemes.isEmpty())
                datasetFields.add(new BasicNameValuePair("field_theme[und][0][value]", concatThemes));
        }

        //Distributions

        LinkedList<String> distributions = new LinkedList<>();
        for (Map<String, Value> map : executeSelectQuery("SELECT ?distribution WHERE {<" + datasetURI + "> <"
                + DcatAp11ToDkanBatchVocabulary.DCAT_DISTRIBUTION + "> ?distribution }")) {
            distributions.add(map.get("distribution").stringValue());
        }

        for (int d = 0; d < distributions.size(); d++) {
            String distribution = distributions.get(d);
            ArrayList<NameValuePair> distroFields = new ArrayList<>();
            distroFields.add(new BasicNameValuePair("type", "resource"));

            String dtitle = executeSimpleSelectQuery("SELECT ?title WHERE {<" + distribution + "> <"
                    + DCTERMS.TITLE + "> ?title FILTER(LANGMATCHES(LANG(?title), \""
                    + configuration.getLoadLanguage() + "\"))}", "title");
            if (dtitle.isEmpty()) {
                //Distribution title is mandatory in DKAN
                dtitle = title.isEmpty() ? "Resource" : title;
            }
            distroFields.add(new BasicNameValuePair("title", dtitle));

            String ddescription = executeSimpleSelectQuery("SELECT ?description WHERE {<" + distribution + "> <"
                    + DCTERMS.DESCRIPTION + "> ?description FILTER(LANGMATCHES(LANG(?description), \""
                    + configuration.getLoadLanguage() + "\"))}", "description");
            if (!ddescription.isEmpty()) {
                distroFields.add(new BasicNameValuePair("body[und][0][value]", ddescription));
            }
            /*String dformat = executeSimpleSelectQuery("SELECT ?format WHERE {<" + distribution + "> <"+ DCTERMS.FORMAT + "> ?format }", "format");
            if (!dformat.isEmpty() && codelists != null) {
            String formatlabel = executeSimpleCodelistSelectQuery("SELECT ?formatlabel WHERE {<" + dformat + "> <"+ SKOS.PREF_LABEL + "> ?formatlabel FILTER(LANGMATCHES(LANG(?formatlabel), \"en\"))}", "formatlabel");
            if (!formatlabel.isEmpty()) {
                distroFields.add(new BasicNameValuePair("field_format[und][0][value]", formatlabel));
            }
            }*/
            String dmimetype = executeSimpleSelectQuery("SELECT ?format WHERE {<" + distribution + "> <"
                    + DcatAp11ToDkanBatchVocabulary.DCAT_MEDIATYPE + "> ?format }", "format");
            if (!dmimetype.isEmpty()) {
                distroFields.add(new BasicNameValuePair("field_link_remote_file[und][0][filemime]",
                        dmimetype.replaceAll(".*\\/([^\\/]+\\/[^\\/]+)", "$1")));
            }

            String dwnld = executeSimpleSelectQuery("SELECT ?dwnld WHERE {<" + distribution + "> <"
                    + DcatAp11ToDkanBatchVocabulary.DCAT_DOWNLOADURL + "> ?dwnld }", "dwnld");
            String access = executeSimpleSelectQuery("SELECT ?acc WHERE {<" + distribution + "> <"
                    + DcatAp11ToDkanBatchVocabulary.DCAT_ACCESSURL + "> ?acc }", "acc");

            //we prefer downloadURL, but only accessURL is mandatory
            if (dwnld == null || dwnld.isEmpty()) {
                dwnld = access;
                if (dwnld == null || dwnld.isEmpty()) {
                    LOG.warn("Empty download and access URLs: " + datasetURI);
                    continue;
                }
            }

            if (!dwnld.isEmpty()) {
                distroFields.add(new BasicNameValuePair(
                        "field_link_remote_file[und][0][filefield_remotefile][url]", dwnld));
            }

            /*if (!distribution.isEmpty()) {
            distro.put("distro_url", distribution);
            }*/

            String dissued = executeSimpleSelectQuery(
                    "SELECT ?issued WHERE {<" + distribution + "> <" + DCTERMS.ISSUED + "> ?issued }",
                    "issued");
            if (!dissued.isEmpty()) {
                distroFields.add(new BasicNameValuePair("created", dissued));
            }
            String dmodified = executeSimpleSelectQuery(
                    "SELECT ?modified WHERE {<" + distribution + "> <" + DCTERMS.MODIFIED + "> ?modified }",
                    "modified");
            if (!dmodified.isEmpty()) {
                distroFields.add(new BasicNameValuePair("changed", dmodified));
            }

            if (configuration.getProfile().equals(DcatAp11ToDkanBatchVocabulary.PROFILES_NKOD.stringValue())) {
                String dtemporalStart = executeSimpleSelectQuery(
                        "SELECT ?temporalStart WHERE {<" + distribution + "> <" + DCTERMS.TEMPORAL + ">/<"
                                + DcatAp11ToDkanBatchVocabulary.SCHEMA_STARTDATE + "> ?temporalStart }",
                        "temporalStart");
                if (!dtemporalStart.isEmpty()) {
                    distroFields
                            .add(new BasicNameValuePair("field_temporal_start[und][0][value]", dtemporalStart));
                }
                String dtemporalEnd = executeSimpleSelectQuery(
                        "SELECT ?temporalEnd WHERE {<" + distribution + "> <" + DCTERMS.TEMPORAL + ">/<"
                                + DcatAp11ToDkanBatchVocabulary.SCHEMA_ENDDATE + "> ?temporalEnd }",
                        "temporalEnd");
                if (!dtemporalEnd.isEmpty()) {
                    distroFields.add(new BasicNameValuePair("field_temporal_end[und][0][value]", dtemporalEnd));
                }
                String dschemaURL = executeSimpleSelectQuery(
                        "SELECT ?schema WHERE {<" + distribution + "> <" + DCTERMS.CONFORMS_TO + "> ?schema }",
                        "schema");
                if (!dschemaURL.isEmpty()) {
                    distroFields.add(new BasicNameValuePair("field_described_by[und][0][value]", dschemaURL));
                }
                String dlicense = executeSimpleSelectQuery(
                        "SELECT ?license WHERE {<" + distribution + "> <" + DCTERMS.LICENSE + "> ?license }",
                        "license");
                if (dlicense.isEmpty()) {
                    //This is mandatory in NKOD and DKAN extension
                    dlicense = "http://joinup.ec.europa.eu/category/licence/unknown-licence";
                }
                distroFields.add(new BasicNameValuePair("field_licence[und][0][value]", dlicense));
                if (dmimetype.isEmpty()) {
                    //! field_format => mimetype
                    //This is mandatory in NKOD and DKAN extension
                    dmimetype = "http://www.iana.org/assignments/media-types/application/octet-stream";
                }
                distroFields.add(new BasicNameValuePair("field_mimetype[und][0][value]",
                        dmimetype.replaceAll(".*\\/([^\\/]+\\/[^\\/]+)", "$1")));
            }

            //POST DISTRIBUTION

            LOG.debug("Creating resource " + distribution);

            HttpPost httpPost = new HttpPost(apiURI + "/node");
            httpPost.addHeader(new BasicHeader("Accept", "application/json"));
            httpPost.addHeader(new BasicHeader("X-CSRF-Token", token));

            try {
                UrlEncodedFormEntity form = new UrlEncodedFormEntity(distroFields, "UTF-8");
                httpPost.setEntity(form);
            } catch (UnsupportedEncodingException e) {
                LOG.error("Unexpected encoding issue");
            }

            CloseableHttpResponse response = null;

            String resID = null;
            responded = false;
            do {
                try {
                    LOG.debug("POSTing resource " + distribution);
                    response = postClient.execute(httpPost);
                    if (response.getStatusLine().getStatusCode() == 200) {
                        String resp = EntityUtils.toString(response.getEntity());
                        LOG.debug("Resource created OK: " + resp);
                        try {
                            resID = new JSONObject(resp).getString("nid");
                            datasetFields.add(new BasicNameValuePair(
                                    "field_resources[und][" + d + "][target_id]", dtitle + " (" + resID + ")"));
                        } catch (JSONException e) {
                            LOG.error(e.getLocalizedMessage(), e);
                            LOG.error("Request: " + distroFields.toString());
                            LOG.error("Response: " + resp);
                        }
                    } else {
                        String ent = EntityUtils.toString(response.getEntity());
                        LOG.error("Resource:" + ent);
                        //throw exceptionFactory.failed("Error creating resource: " + ent);
                    }
                    responded = true;
                } catch (NoHttpResponseException e) {
                    LOG.error(e.getLocalizedMessage(), e);
                } catch (IOException e) {
                    LOG.error(e.getLocalizedMessage(), e);
                } finally {
                    if (response != null) {
                        try {
                            response.close();
                        } catch (IOException e) {
                            LOG.error(e.getLocalizedMessage(), e);
                            //throw exceptionFactory.failed("Error creating resource");
                        }
                    }
                }
            } while (!responded);

        }

        LOG.debug("Creating dataset " + datasetURI);

        HttpPost httpPost = new HttpPost(apiURI + "/node");
        httpPost.addHeader(new BasicHeader("Accept", "application/json"));
        httpPost.addHeader(new BasicHeader("X-CSRF-Token", token));

        try {
            UrlEncodedFormEntity form = new UrlEncodedFormEntity(datasetFields, "UTF-8");
            httpPost.setEntity(form);
        } catch (UnsupportedEncodingException e) {
            LOG.error("Unexpected encoding issue");
        }

        CloseableHttpResponse response = null;

        responded = false;
        do {
            try {
                LOG.debug("POSTing dataset " + datasetURI);
                response = postClient.execute(httpPost);
                if (response.getStatusLine().getStatusCode() == 200) {
                    LOG.debug("Dataset created OK");
                } else {
                    String ent = EntityUtils.toString(response.getEntity());
                    LOG.error("Dataset:" + ent);
                    //throw exceptionFactory.failed("Error creating dataset: " + ent);
                }
                responded = true;
            } catch (NoHttpResponseException e) {
                LOG.error(e.getLocalizedMessage(), e);
            } catch (IOException e) {
                LOG.error(e.getLocalizedMessage(), e);
            } finally {
                if (response != null) {
                    try {
                        response.close();
                    } catch (IOException e) {
                        LOG.error(e.getLocalizedMessage(), e);
                        throw exceptionFactory.failure("Error creating dataset");
                    }
                }
            }
        } while (!responded);

        progressReport.entryProcessed();
    }

    try {
        queryClient.close();
        createClient.close();
        postClient.close();
    } catch (IOException e) {
        LOG.error(e.getLocalizedMessage(), e);
    }

    progressReport.done();

}

From source file:eu.stratosphere.nephele.instance.ec2.EC2CloudManager.java

/**
 * {@inheritDoc}// www  .ja v a  2 s  .c om
 */
@Override
public void requestInstance(final JobID jobID, Configuration conf, final InstanceRequestMap instanceRequestMap,
        final List<String> splitAffinityList) throws InstanceException {

    if (conf == null) {
        throw new IllegalArgumentException("No job configuration provided, unable to acquire credentials");
    }

    // First check, if all required configuration entries are available

    final String awsAccessId = conf.getString(AWS_ACCESS_ID_KEY, null);
    if (awsAccessId == null) {
        throw new InstanceException("Unable to allocate cloud instance: Cannot find AWS access ID");
    }

    final String awsSecretKey = conf.getString(AWS_SECRET_KEY_KEY, null);
    if (awsSecretKey == null) {
        throw new InstanceException("Unable to allocate cloud instance: Cannot find AWS secret key");
    }

    if (conf.getString(AWS_AMI_KEY, null) == null) {
        throw new InstanceException("Unable to allocate cloud instance: Cannot find AMI image ID");
    }

    // First we check, if there are any orphaned instances that are accessible with the provided configuration
    checkAndConvertOrphanedInstances(conf);

    // Check if there already exist a mapping for this job
    JobToInstancesMapping jobToInstanceMapping = null;
    synchronized (this.jobToInstancesAssignmentMap) {
        jobToInstanceMapping = this.jobToInstancesAssignmentMap.get(jobID);

        // Create new mapping if it does not yet exist
        if (jobToInstanceMapping == null) {
            LOG.debug("Creating new mapping for job " + jobID);
            jobToInstanceMapping = new JobToInstancesMapping(awsAccessId, awsSecretKey);
            this.jobToInstancesAssignmentMap.put(jobID, jobToInstanceMapping);
        }
    }

    // Check if there already exists a network topology for this job
    NetworkTopology networkTopology = null;
    synchronized (this.networkTopologies) {
        networkTopology = this.networkTopologies.get(jobID);
        if (networkTopology == null) {
            networkTopology = NetworkTopology.createEmptyTopology();
            this.networkTopologies.put(jobID, networkTopology);
        }
    }

    // Our bill with all instances that we will provide...
    final LinkedList<FloatingInstance> floatingInstances = new LinkedList<FloatingInstance>();
    final LinkedList<String> requestedInstances = new LinkedList<String>();

    // We iterate over the maximum of requested Instances...
    final Iterator<Map.Entry<InstanceType, Integer>> it = instanceRequestMap.getMaximumIterator();

    while (it.hasNext()) {

        final Map.Entry<InstanceType, Integer> e = it.next();

        // This is our actual type...
        final InstanceType actualtype = e.getKey();
        final int maxcount = e.getValue();
        final int mincount = maxcount;
        LOG.info("Requesting " + maxcount + " instances of type " + actualtype + " for job " + jobID);

        // And this is the list of instances we will have...
        LinkedList<FloatingInstance> actualFloatingInstances = null;
        LinkedList<String> actualRequestedInstances = null;

        // Check if floating instances available...
        actualFloatingInstances = anyFloatingInstancesAvailable(awsAccessId, awsSecretKey, actualtype,
                maxcount);

        // Do we need more instances?
        if (actualFloatingInstances.size() < maxcount) {
            int minimumrequestcount = Math.max(mincount - actualFloatingInstances.size(), 1);
            int maximumrequestcount = maxcount - actualFloatingInstances.size();

            actualRequestedInstances = allocateCloudInstance(conf, actualtype, minimumrequestcount,
                    maximumrequestcount);
        } else {
            actualRequestedInstances = new LinkedList<String>();
        }

        // Add provided Instances to overall bill...
        floatingInstances.addAll(actualFloatingInstances);
        requestedInstances.addAll(actualRequestedInstances);

        // Are we outer limits?
        if (actualRequestedInstances.size() + actualFloatingInstances.size() < mincount) {
            LOG.error("Requested: " + mincount + " to " + maxcount + " instances of type "
                    + actualtype.getIdentifier() + ", but could only provide "
                    + (actualRequestedInstances.size() + actualFloatingInstances.size()) + ".");

            // something went wrong.. give the floating instances back!
            synchronized (this.floatingInstances) {
                for (FloatingInstance i : floatingInstances) {
                    this.floatingInstances.put(i.getInstanceConnectionInfo(), i);
                }
            }
            throw new InstanceException("Could not allocate enough cloud instances. See logs for details.");
        } // End outer limits

    } // End iterating over instance types..

    // Convert and allocate Floating Instances...
    final List<AllocatedResource> allocatedResources = new ArrayList<AllocatedResource>();

    for (final FloatingInstance fi : floatingInstances) {
        final EC2CloudInstance ci = fi.asCloudInstance(networkTopology.getRootNode());
        jobToInstanceMapping.assignInstanceToJob(ci);
        allocatedResources.add(ci.asAllocatedResource());
    }

    // Finally, inform the scheduler about the instances which have been floating before
    if (!allocatedResources.isEmpty()) {
        final EC2CloudInstanceNotifier notifier = new EC2CloudInstanceNotifier(this.instanceListener, jobID,
                allocatedResources);
        notifier.start();
    }

    // Add reserved Instances to Job Mapping...
    for (final String i : requestedInstances) {
        this.reservedInstancesToJobMapping.put(i, jobID);
    }
}