Example usage for java.util LinkedList get

List of usage examples for java.util LinkedList get

Introduction

In this page you can find the example usage for java.util LinkedList get.

Prototype

public E get(int index) 

Source Link

Document

Returns the element at the specified position in this list.

Usage

From source file:mase.spec.StochasticHybridExchanger.java

protected Individual[] selectIndividuals(Individual[] pool, int num, MergeSelection mode,
        EvolutionState state) {//ww w  .  j  ava 2 s .c o  m
    Individual[] picked = new Individual[num];
    if (mode == MergeSelection.truncate) {
        Individual[] sorted = sortedCopy(pool);
        System.arraycopy(sorted, 0, picked, 0, num);
    } else if (mode == MergeSelection.fitnessproportionate) {
        double total = 0;
        LinkedList<Individual> poolList = new LinkedList<>();
        for (Individual ind : pool) {
            poolList.add(ind);
            total += ((SimpleFitness) ind.fitness).fitness();
        }
        int index = 0;
        while (index < num) {
            double accum = 0;
            double rand = state.random[0].nextDouble() * total;
            Iterator<Individual> iter = poolList.iterator();
            while (iter.hasNext()) {
                Individual ind = iter.next();
                accum += ((SimpleFitness) ind.fitness).fitness();
                if (accum >= rand) {
                    picked[index++] = ind;
                    iter.remove();
                    total -= ((SimpleFitness) ind.fitness).fitness();
                    break;
                }
            }
        }
    } else if (mode == MergeSelection.random) {
        LinkedList<Individual> poolList = new LinkedList<>(Arrays.asList(pool));
        int index = 0;
        while (index < num) {
            int rand = state.random[0].nextInt(poolList.size());
            picked[index++] = poolList.get(rand);
            poolList.remove(rand);
        }
    } else {
        state.output.fatal("Unknown picking mode: " + mode);
    }
    return picked;
}

From source file:mase.spec.BasicHybridExchanger.java

protected Individual[] pickIndividuals(Individual[] pool, int num, PickMode mode, EvolutionState state) {
    Individual[] picked = new Individual[num];
    if (mode == PickMode.first) {
        System.arraycopy(pool, 0, picked, 0, num);
    } else if (mode == PickMode.elite) {
        Individual[] sorted = sortedCopy(pool);
        System.arraycopy(sorted, 0, picked, 0, num);
    } else if (mode == PickMode.probabilistic) {
        double total = 0;
        LinkedList<Individual> poolList = new LinkedList<>();
        for (Individual ind : pool) {
            poolList.add(ind);// w  w w. j a v a  2  s  .  c om
            total += ((SimpleFitness) ind.fitness).fitness();
        }
        int index = 0;
        while (index < num) {
            double accum = 0;
            double rand = state.random[0].nextDouble() * total;
            Iterator<Individual> iter = poolList.iterator();
            while (iter.hasNext()) {
                Individual ind = iter.next();
                accum += ((SimpleFitness) ind.fitness).fitness();
                if (accum >= rand) {
                    picked[index++] = ind;
                    iter.remove();
                    total -= ((SimpleFitness) ind.fitness).fitness();
                    break;
                }
            }
        }
    } else if (mode == PickMode.random) {
        LinkedList<Individual> poolList = new LinkedList<>(Arrays.asList(pool));
        int index = 0;
        while (index < num) {
            int rand = state.random[0].nextInt(poolList.size());
            picked[index++] = poolList.get(rand);
            poolList.remove(rand);
        }
    } else {
        state.output.fatal("Unknown picking mode: " + mode);
    }
    return picked;
}

From source file:com.ibm.bi.dml.runtime.controlprogram.parfor.opt.PerfTestTool.java

/**
 * /*from  w w w. jav a 2  s .  c om*/
 * @throws DMLRuntimeException
 * @throws DMLUnsupportedOperationException
 * @throws IOException
 */
private static void executeTest() throws DMLRuntimeException, DMLUnsupportedOperationException, IOException {
    System.out.println("SystemML PERFORMANCE TEST TOOL:");

    //foreach registered instruction   
    for (Entry<Integer, Instruction> inst : _regInst.entrySet()) {
        int instID = inst.getKey();
        System.out.println("Running INSTRUCTION " + _regInst_IDNames.get(instID));

        Integer[] testDefIDs = _regInst_IDTestDef.get(instID);
        boolean vectors = _regInst_IDVectors.get(instID);
        IOSchema schema = _regInst_IDIOSchema.get(instID);

        //create tmp program block and set instruction
        Program prog = new Program();
        ProgramBlock pb = new ProgramBlock(prog);
        ArrayList<Instruction> ainst = new ArrayList<Instruction>();
        ainst.add(inst.getValue());
        pb.setInstructions(ainst);

        ExecutionContext ec = ExecutionContextFactory.createContext();

        //foreach registered test configuration
        for (Integer defID : testDefIDs) {
            PerfTestDef def = _regTestDef.get(defID);
            TestMeasure m = def.getMeasure();
            TestVariable lv = def.getVariable();
            DataFormat df = def.getDataformat();
            InternalTestVariable[] pv = def.getInternalVariables();
            double min = def.getMin();
            double max = def.getMax();
            double samples = def.getNumSamples();

            System.out.println("Running TESTDEF(measure=" + m + ", variable=" + String.valueOf(lv) + " "
                    + pv.length + ", format=" + String.valueOf(df) + ")");

            //vary input variable
            LinkedList<Double> dmeasure = new LinkedList<Double>();
            LinkedList<Double> dvariable = generateSequence(min, max, samples);
            int plen = pv.length;

            if (plen == 1) //1D function 
            {
                for (Double var : dvariable) {
                    dmeasure.add(executeTestCase1D(m, pv[0], df, var, pb, vectors, schema, ec));
                }
            } else //multi-dim function
            {
                //init index stack
                int[] index = new int[plen];
                for (int i = 0; i < plen; i++)
                    index[i] = 0;

                //execute test 
                int dlen = dvariable.size();
                double[] buff = new double[plen];
                while (index[0] < dlen) {
                    //set buffer values
                    for (int i = 0; i < plen; i++)
                        buff[i] = dvariable.get(index[i]);

                    //core execution
                    dmeasure.add(executeTestCaseMD(m, pv, df, buff, pb, schema, ec)); //not applicable for vector flag

                    //increment indexes
                    for (int i = plen - 1; i >= 0; i--) {
                        if (i == plen - 1)
                            index[i]++;
                        else if (index[i + 1] >= dlen) {
                            index[i]++;
                            index[i + 1] = 0;
                        }
                    }
                }
            }

            //append values to results
            if (!_results.containsKey(instID))
                _results.put(instID, new HashMap<Integer, LinkedList<Double>>());
            _results.get(instID).put(defID, dmeasure);

        }
    }
}

From source file:com.mirth.connect.plugins.dashboardstatus.DashboardConnectorStatusMonitor.java

public synchronized Object invoke(String method, Object object, String sessionId) {
    if (method.equals(METHOD_GET_STATES)) {
        return connectorStateMap;
    } else if (method.equals(METHOD_GET_CONNECTION_INFO_LOGS)) {
        String channelName;//  w ww  .  j a  va2s  . c  om
        LinkedList<String[]> channelLog;

        if (object == null) {
            /*
             * object is null - no channel is selected. return the latest
             * entire log entries of all channels combined. ONLY new
             * entries.
             */
            channelName = STATE_NO_SELECTION;
            channelLog = entireConnectorInfoLogs;
        } else {
            // object is not null - a channel is selected. return the latest
            // (LOG_SIZE) of that particular channel.
            channelName = object.toString();
            // return only the newly added log entries for the client with
            // matching sessionId.
            channelLog = connectorInfoLogs.get(channelName);

            if (channelLog == null) {
                channelLog = new LinkedList<String[]>();
                connectorInfoLogs.put(channelName, channelLog);
            }
        }

        HashMap<String, Long> lastDisplayedLogIdByChannel;

        if (lastDisplayedLogIndexBySessionId.containsKey(sessionId)) {
            // client exist with the sessionId.
            lastDisplayedLogIdByChannel = lastDisplayedLogIndexBySessionId.get(sessionId);

            if (lastDisplayedLogIdByChannel.containsKey(channelName)) {
                // existing channel on an already open client.
                // -> only display new log entries.
                long lastDisplayedLogId = lastDisplayedLogIdByChannel.get(channelName);
                LinkedList<String[]> newChannelLogEntries = new LinkedList<String[]>();

                // FYI, channelLog.size() will never be larger than LOG_SIZE
                // = 1000.
                for (String[] aChannelLog : channelLog) {
                    if (lastDisplayedLogId < Long.parseLong(aChannelLog[0])) {
                        newChannelLogEntries.addLast(aChannelLog);
                    }
                }

                if (newChannelLogEntries.size() > 0) {
                    /*
                     * put the lastDisplayedLogId into the HashMap. index 0
                     * is the most recent entry, and index0 of that entry
                     * contains the logId.
                     */
                    lastDisplayedLogIdByChannel.put(channelName,
                            Long.parseLong(newChannelLogEntries.get(0)[0]));
                    lastDisplayedLogIndexBySessionId.put(sessionId, lastDisplayedLogIdByChannel);
                }

                try {
                    return SerializationUtils.clone(newChannelLogEntries);
                } catch (SerializationException e) {
                    logger.error(e);
                }
            } else {
                /*
                 * new channel viewing on an already open client. -> all log
                 * entries are new. display them all. put the
                 * lastDisplayedLogId into the HashMap. index0 is the most
                 * recent entry, and index0 of that entry object contains
                 * the logId.
                 */
                if (channelLog.size() > 0) {
                    lastDisplayedLogIdByChannel.put(channelName, Long.parseLong(channelLog.get(0)[0]));
                    lastDisplayedLogIndexBySessionId.put(sessionId, lastDisplayedLogIdByChannel);
                }

                try {
                    return SerializationUtils.clone(channelLog);
                } catch (SerializationException e) {
                    logger.error(e);
                }
            }

        } else {
            // brand new client.
            // thus also new channel viewing.
            // -> all log entries are new. display them all.
            lastDisplayedLogIdByChannel = new HashMap<String, Long>();

            if (channelLog.size() > 0) {
                lastDisplayedLogIdByChannel.put(channelName, Long.parseLong(channelLog.get(0)[0]));
            } else {
                // no log exist at all. put the currentLogId-1, which is the
                // very latest logId.
                lastDisplayedLogIdByChannel.put(channelName, logId - 1);
            }

            lastDisplayedLogIndexBySessionId.put(sessionId, lastDisplayedLogIdByChannel);

            try {
                return SerializationUtils.clone(channelLog);
            } catch (SerializationException e) {
                logger.error(e);
            }
        }

    } else if (method.equals(METHOD_CHANNELS_DEPLOYED)) {
        if (channelsDeployedFlagForEachClient.containsKey(sessionId)) {
            // sessionId found. no (re)deploy occurred.
            return false;
        } else {
            // no sessionId found, which means channels have just been
            // (re)deployed - clear out all clients' Dashboard Connector
            // Logs.
            channelsDeployedFlagForEachClient.put(sessionId, true);
            return true;
        }
    } else if (method.equals(METHOD_REMOVE_SESSIONID)) {
        // client shut down, or user logged out -> remove everything
        // involving this sessionId.
        if (lastDisplayedLogIndexBySessionId.containsKey(sessionId)) {
            lastDisplayedLogIndexBySessionId.remove(sessionId);
        }

        if (channelsDeployedFlagForEachClient.containsKey(sessionId)) {
            channelsDeployedFlagForEachClient.remove(sessionId);
        }

        return null;
    }

    return null;
}

From source file:fi.ni.IFC_ClassModel.java

/**
 * Creates the object tree.//from  w  w w .  j a v  a  2s  . c o  m
 */
private void createObjectTree() {
    for (Map.Entry<Long, IFC_X3_VO> entry : linemap.entrySet()) {
        IFC_X3_VO vo = entry.getValue();
        fillJavaClassInstanceValues("root", vo, vo, 0);
    }

    try {
        for (Map.Entry<Long, IFC_X3_VO> entry : linemap.entrySet()) {
            IFC_X3_VO vo = entry.getValue();
            if (vo.inverse_pointer_sets.size() > 0) {
                for (Map.Entry<String, LinkedList<IFC_X3_VO>> inverse_set : vo.inverse_pointer_sets
                        .entrySet()) {
                    LinkedList<IFC_X3_VO> li = inverse_set.getValue();
                    String subject = filter_illegal_chars(":" + ifc_filename + "_i" + vo.getLine_num());
                    if (vo.getGid() != null) {
                        subject = ":guid" + GuidCompressor.uncompressGuidString(filter_extras(vo.getGid()));
                    }
                    for (int i = 0; i < li.size(); i++) {
                        IFC_X3_VO ivo = li.get(i);
                        addLiteralValue(vo.getLine_num(), ivo.getLine_num(), subject, inverse_set.getKey());

                    }

                } // for map inverse_set

            } // if
        } // for map linemap
    } catch (Exception e) {
        e.printStackTrace();
    }
}

From source file:org.agnitas.web.CampaignAction.java

private void setSortedMailingList(Campaign.Stats stat, HttpServletRequest req, CampaignForm aForm) {
    LinkedList<Number> resultList = new LinkedList<Number>();
    MailingDao mailDao = (MailingDao) getBean("MailingDao");

    // this hashmap contains the mapping from a Date back to the Mail-ID.
    HashMap<Date, Number> tmpDate2MailIDMapping = new HashMap<Date, Number>();
    LinkedList<Date> sortedMailingList = new LinkedList<Date>();

    Hashtable map = stat.getMailingData(); // holds the complete mailing Data
    map.keySet(); // all keys for the mailingData (mailIDs)

    Number tmpMailID = null;/*from w ww.j av a2 s .  co m*/
    MaildropEntry tmpEntry = null;
    Mailing tmpMailing = null;

    // loop over all keys.
    Iterator it = map.keySet().iterator();
    while (it.hasNext()) {
        LinkedList<Date> sortDates = new LinkedList<Date>();
        tmpMailID = (Number) it.next(); // get the mailID   
        // get one Mailing with tmpMailID
        tmpMailing = (Mailing) mailDao.getMailing(tmpMailID.intValue(), getCompanyID(req));
        // check if it is a World-Mailing. We have testmailings and dont care about them!
        if (tmpMailing.isWorldMailingSend() == true) {
            // loop over all tmpMailingdropStatus.
            // we look over all mails and take the first send mailing Time.
            // unfortunately is the set not sorted, so we have to sort it ourself.
            Iterator it2 = tmpMailing.getMaildropStatus().iterator();
            while (it2.hasNext()) {
                tmpEntry = (MaildropEntry) it2.next();
                sortDates.add(tmpEntry.getSendDate());
            }
            // check if sortDates has entries and put the one into the Hashmap.
            if (sortDates.size() != 0) {
                Collections.sort(sortDates);
                tmpDate2MailIDMapping.put(sortDates.get(0), tmpMailID);
                sortedMailingList.add(sortDates.get(0));
            }
        }
    }
    // at this point, we have a Hashmap with all Dates and Mailing ID's and a List with all Date's.
    // now we sort this List and put the result into the Form (sort with reverse Order ;-) ).
    Collections.sort(sortedMailingList, Collections.reverseOrder());
    // loop over the List and put the corresponding MailID into the List.
    for (int i = 0; i < sortedMailingList.size(); i++) {
        resultList.add(tmpDate2MailIDMapping.get(sortedMailingList.get(i)));
    }
    aForm.setSortedKeys(resultList);
}

From source file:it.doqui.index.ecmengine.business.personalization.multirepository.index.lucene.RepositoryAwareADMLuceneSearcherImpl.java

public List<Pair<String, Integer>> getTopTerms(String field, int count) {
    RepositoryAwareClosingIndexSearcher searcher = null;
    try {//from   w w w.  j av  a 2s  . co  m
        LinkedList<Pair<String, Integer>> answer = new LinkedList<Pair<String, Integer>>();
        searcher = getSearcher(indexer);
        IndexReader reader = searcher.getIndexReader();
        TermEnum terms = reader.terms(new Term(field, ""));
        do {
            Term term = terms.term();
            if (term != null) {
                if (!term.field().equals(field)) {
                    break;
                }
                int freq = terms.docFreq();
                Pair<String, Integer> pair = new Pair<String, Integer>(term.text(), Integer.valueOf(freq));
                if (answer.size() < count) {
                    if (answer.size() == 0) {
                        answer.add(pair);
                    } else if (answer.get(answer.size() - 1).getSecond().compareTo(pair.getSecond()) >= 0) {
                        answer.add(pair);
                    } else {
                        for (ListIterator<Pair<String, Integer>> it = answer.listIterator(); it
                                .hasNext(); /**/) {
                            Pair<String, Integer> test = it.next();
                            if (test.getSecond().compareTo(pair.getSecond()) < 0) {
                                it.previous();
                                it.add(pair);
                                break;
                            }
                        }
                    }
                } else if (answer.get(count - 1).getSecond().compareTo(pair.getSecond()) < 0) {
                    for (ListIterator<Pair<String, Integer>> it = answer.listIterator(); it.hasNext(); /**/) {
                        Pair<String, Integer> test = it.next();
                        if (test.getSecond().compareTo(pair.getSecond()) < 0) {
                            it.previous();
                            it.add(pair);
                            break;
                        }
                    }
                    answer.removeLast();
                } else {
                    // off the end
                }
            }
        } while (terms.next());
        terms.close();
        return answer;

    } catch (IOException e) {
        throw new SearcherException(e);
    } finally {
        if (searcher != null) {
            try {
                searcher.close();
            } catch (IOException e) {
                throw new SearcherException(e);
            }
        }
    }

}

From source file:appeng.items.tools.powered.ToolColorApplicator.java

private ItemStack findNextColor(final ItemStack is, final ItemStack anchor, final int scrollOffset) {
    ItemStack newColor = null;//from   w w w  . ja v  a2s  .c  o  m

    final IMEInventory<IAEItemStack> inv = AEApi.instance().registries().cell().getCellInventory(is, null,
            StorageChannel.ITEMS);
    if (inv != null) {
        final IItemList<IAEItemStack> itemList = inv
                .getAvailableItems(AEApi.instance().storage().createItemList());
        if (anchor == null) {
            final IAEItemStack firstItem = itemList.getFirstItem();
            if (firstItem != null) {
                newColor = firstItem.getItemStack();
            }
        } else {
            final LinkedList<IAEItemStack> list = new LinkedList<IAEItemStack>();

            for (final IAEItemStack i : itemList) {
                list.add(i);
            }

            Collections.sort(list, new Comparator<IAEItemStack>() {

                @Override
                public int compare(final IAEItemStack a, final IAEItemStack b) {
                    return ItemSorters.compareInt(a.getItemDamage(), b.getItemDamage());
                }
            });

            if (list.size() <= 0) {
                return null;
            }

            IAEItemStack where = list.getFirst();
            int cycles = 1 + list.size();

            while (cycles > 0 && !where.equals(anchor)) {
                list.addLast(list.removeFirst());
                cycles--;
                where = list.getFirst();
            }

            if (scrollOffset > 0) {
                list.addLast(list.removeFirst());
            }

            if (scrollOffset < 0) {
                list.addFirst(list.removeLast());
            }

            return list.get(0).getItemStack();
        }
    }

    if (newColor != null) {
        this.setColor(is, newColor);
    }

    return newColor;
}

From source file:com.ibm.jaggr.service.impl.deps.DepTree.java

/**
 * Object constructor. Attempts to de-serialize the cached dependency lists
 * from disk and then validates the dependency lists based on last-modified
 * dates, looking for any new or removed files. If the cached dependency
 * list data cannot be de-serialized, new lists are constructed. Once the
 * dependency lists have been validated, the list data is serialized back
 * out to disk./*from w  ww  .j  a  v  a  2  s .c  o  m*/
 * 
 * @param paths
 *            Collection of URIs which specify the target resources
 *            to be scanned for javascript files.
 * @param aggregator
 *            The servlet instance for this object
 * @param stamp
 *            timestamp associated with external override/customization 
 *            resources that are check on every server restart                     
 * @param clean
 *            If true, then the dependency lists are generated from scratch
 *            rather than by de-serializing and then validating the cached
 *            dependency lists.
 * @param validateDeps
 *            If true, then validate existing cached dependencies using
 *            file last-modified times.
 * @throws IOException
 */
public DepTree(Collection<URI> paths, IAggregator aggregator, long stamp, boolean clean, boolean validateDeps)
        throws IOException {
    this.stamp = stamp;
    IConfig config = aggregator.getConfig();
    rawConfig = config.toString();

    File cacheDir = new File(aggregator.getWorkingDirectory(), DEPCACHE_DIRNAME);
    File cacheFile = new File(cacheDir, CACHE_FILE);

    /*
     * The de-serialized dependency map. If we have a cached dependency map,
     * then it will be validated against the last-modified dates of the
     * current files and only the files that have changed will need to be
     * re-parsed to update the dependency lists.
     */
    DepTree cached = null;

    if (!clean) {
        // If we're not starting clean, try to de-serialize the map from
        // cache
        try {
            ObjectInputStream is = new ObjectInputStream(new FileInputStream(cacheFile));
            try {
                cached = (DepTree) is.readObject();
            } finally {
                try {
                    is.close();
                } catch (Exception ignore) {
                }
            }
        } catch (FileNotFoundException e) {
            /*
             * Not an error. Just means that the cache file hasn't been
             * written yet or else it's been deleted.
             */
            if (log.isLoggable(Level.INFO))
                log.log(Level.INFO, Messages.DepTree_1);
        } catch (Exception e) {
            if (log.isLoggable(Level.SEVERE))
                log.log(Level.SEVERE, e.getMessage(), e);
        }
    }

    // If the cacheBust config param has changed, then do a clean build
    // of the dependencies.
    if (cached != null) {
        if (stamp == 0) {
            // no init stamp provided.  Preserve the cached one.
            stamp = cached.stamp;
        }
        if (stamp > cached.stamp) {
            // init stamp has been updated.  Validate dependencies.
            validateDeps = true;
        }
        cacheBust = aggregator.getOptions().getCacheBust();
        if (!StringUtils.equals(cacheBust, cached.cacheBust)) {
            if (log.isLoggable(Level.INFO)) {
                log.info(Messages.DepTree_2);
            }
            cached = null;
        }
    }

    /*
     * If we de-serialized a previously saved dependency map, then go with
     * that.
     */
    if (cached != null && rawConfig.equals(cached.rawConfig) && !validateDeps && !clean) {
        depMap = cached.depMap;
        return;
    }

    // Initialize the dependency map
    depMap = new ConcurrentHashMap<URI, DepTreeNode>();

    // This can take a while, so print something to the console
    String msg = MessageFormat.format(Messages.DepTree_3, new Object[] { aggregator.getName() });

    ConsoleService cs = new ConsoleService();
    cs.println(msg);

    if (log.isLoggable(Level.INFO)) {
        log.info(msg);
    }
    // Make sure that all the paths are unique and orthogonal
    paths = DepUtils.removeRedundantPaths(paths);

    /*
     * Create the thread pools, one for the tree builders and one for the
     * parsers. Since a tree builder thread will wait for all the outstanding
     * parser threads started by that builder to complete, we need to use two
     * independent thread pools to guard against the possibility of deadlock
     * caused by all the threads in the pool being consumed by tree builders
     * and leaving none available to service the parsers.
     */
    final ThreadGroup treeBuilderTG = new ThreadGroup(TREEBUILDER_TGNAME),
            parserTG = new ThreadGroup(JSPARSER_TGNAME);
    ExecutorService treeBuilderExc = Executors.newFixedThreadPool(10, new ThreadFactory() {
        public Thread newThread(Runnable r) {
            return new Thread(treeBuilderTG, r, MessageFormat.format(THREADNAME,
                    new Object[] { treeBuilderTG.getName(), treeBuilderTG.activeCount() }));
        }
    }), parserExc = Executors.newFixedThreadPool(20, new ThreadFactory() {
        public Thread newThread(Runnable r) {
            return new Thread(parserTG, r, MessageFormat.format(THREADNAME,
                    new Object[] { parserTG.getName(), parserTG.activeCount() }));
        }
    });

    // Counter to keep track of number of tree builder threads started
    AtomicInteger treeBuilderCount = new AtomicInteger(0);

    // The completion services for the thread pools
    final CompletionService<URI> parserCs = new ExecutorCompletionService<URI>(parserExc);
    CompletionService<DepTreeBuilder.Result> treeBuilderCs = new ExecutorCompletionService<DepTreeBuilder.Result>(
            treeBuilderExc);

    // Start the tree builder threads to process the paths
    for (final URI path : paths) {
        /*
         * Create or get from cache the root node for this path and
         * add it to the new map.
         */
        DepTreeNode root = new DepTreeNode(PathUtil.getModuleName(path));
        DepTreeNode cachedNode = null;
        if (cached != null) {
            cachedNode = cached.depMap.get(path);
            if (log.isLoggable(Level.INFO)) {
                log.info(MessageFormat.format(Messages.DepTree_4, new Object[] { path }));
            }
        } else {
            if (log.isLoggable(Level.INFO)) {
                log.info(MessageFormat.format(Messages.DepTree_5, new Object[] { path }));
            }
        }
        depMap.put(path, root);

        treeBuilderCount.incrementAndGet();
        treeBuilderCs.submit(new DepTreeBuilder(aggregator, parserCs, path, root, cachedNode));
    }

    // List of parser exceptions
    LinkedList<Exception> parserExceptions = new LinkedList<Exception>();

    /*
     * Pull the completed tree builder tasks from the completion queue until
     * all the paths have been processed
     */
    while (treeBuilderCount.decrementAndGet() >= 0) {
        try {
            DepTreeBuilder.Result result = treeBuilderCs.take().get();
            if (log.isLoggable(Level.INFO)) {
                log.info(MessageFormat.format(Messages.DepTree_6,
                        new Object[] { result.parseCount, result.dirName }));
            }
        } catch (Exception e) {
            if (log.isLoggable(Level.SEVERE))
                log.log(Level.SEVERE, e.getMessage(), e);
            parserExceptions.add(e);
        }
    }

    // shutdown the thread pools now that we're done with them
    parserExc.shutdown();
    treeBuilderExc.shutdown();

    // If parser exceptions occurred, then rethrow the first one 
    if (parserExceptions.size() > 0) {
        throw new RuntimeException(parserExceptions.get(0));
    }

    // Prune dead nodes (nodes with no children or dependency lists)
    for (Map.Entry<URI, DepTreeNode> entry : depMap.entrySet()) {
        entry.getValue().prune();
    }

    /*
     * Make sure the cache directory exists before we try to serialize the
     * dependency map.
     */
    if (!cacheDir.exists())
        if (!cacheDir.mkdirs()) {
            throw new IOException(
                    MessageFormat.format(Messages.DepTree_0, new Object[] { cacheDir.getAbsolutePath() }));
        }

    // Serialize the map to the cache directory
    ObjectOutputStream os;
    os = new ObjectOutputStream(new FileOutputStream(cacheFile));
    try {
        os.writeObject(this);
    } finally {
        try {
            os.close();
        } catch (Exception ignore) {
        }
    }
    msg = MessageFormat.format(Messages.DepTree_7, new Object[] { aggregator.getName() });

    // Output that we're done.
    cs.println(msg);
    if (log.isLoggable(Level.INFO)) {
        log.info(msg);
    }
}

From source file:com.linkedpipes.plugin.loader.dcatAp11ToDkanBatch.DcatAp11ToDkanBatch.java

@Override
public void execute() throws LpException {

    apiURI = configuration.getApiUri();//from   w ww  .j a  v  a2 s  .  c o  m

    //for HTTP request failing on "failed to respond"
    boolean responded = false;

    if (apiURI == null || apiURI.isEmpty() || configuration.getUsername() == null
            || configuration.getUsername().isEmpty() || configuration.getPassword() == null
            || configuration.getPassword().isEmpty()) {
        throw exceptionFactory.failure("Missing required settings.");
    }

    Map<String, String> groups = getGroups();

    LOG.debug("Querying metadata for datasets");

    LinkedList<String> datasets = new LinkedList<>();
    for (Map<String, Value> map : executeSelectQuery(
            "SELECT ?d WHERE {?d a <" + DcatAp11ToDkanBatchVocabulary.DCAT_DATASET_CLASS + ">}")) {
        datasets.add(map.get("d").stringValue());
    }

    int current = 0;
    int total = datasets.size();
    LOG.info("Found " + total + " datasets");
    progressReport.start(total);

    token = getToken(configuration.getUsername(), configuration.getPassword());

    for (String datasetURI : datasets) {
        current++;

        CloseableHttpResponse queryResponse = null;

        LOG.info("Processing dataset " + current + "/" + total + ": " + datasetURI);

        String publisher_uri = executeSimpleSelectQuery("SELECT ?publisher_uri WHERE {<" + datasetURI + "> <"
                + DCTERMS.PUBLISHER + "> ?publisher_uri }", "publisher_uri");
        String publisher_name = executeSimpleSelectQuery(
                "SELECT ?publisher_name WHERE {<" + datasetURI + "> <" + DCTERMS.PUBLISHER + ">/<" + FOAF.NAME
                        + "> ?publisher_name FILTER(LANGMATCHES(LANG(?publisher_name), \""
                        + configuration.getLoadLanguage() + "\"))}",
                "publisher_name");

        if (!groups.containsKey(publisher_uri)) {
            LOG.debug("Creating group " + publisher_uri);

            if (publisher_name == null || publisher_name.isEmpty()) {
                throw exceptionFactory.failure("Publisher has no name: " + publisher_uri);
            }

            HttpPost httpPost = new HttpPost(apiURI + "/node");
            httpPost.addHeader(new BasicHeader("Accept", "application/json"));
            httpPost.addHeader(new BasicHeader("X-CSRF-Token", token));

            ArrayList<NameValuePair> postParameters = new ArrayList<>();
            postParameters.add(new BasicNameValuePair("type", "group"));
            postParameters.add(new BasicNameValuePair("title", publisher_name));
            postParameters.add(new BasicNameValuePair("body[und][0][value]", publisher_uri));

            try {
                UrlEncodedFormEntity form = new UrlEncodedFormEntity(postParameters, "UTF-8");
                httpPost.setEntity(form);
            } catch (UnsupportedEncodingException e) {
                LOG.error("Unexpected encoding issue");
            }

            CloseableHttpResponse response = null;

            responded = false;
            do {
                try {
                    response = postClient.execute(httpPost);
                    if (response.getStatusLine().getStatusCode() == 200) {
                        LOG.debug("Group created OK");
                        String orgID = new JSONObject(EntityUtils.toString(response.getEntity()))
                                .getString("nid");
                        groups.put(publisher_uri, orgID);
                    } else {
                        String ent = EntityUtils.toString(response.getEntity());
                        LOG.error("Group:" + ent);
                        //throw exceptionFactory.failed("Error creating group: " + ent);
                    }
                    responded = true;
                } catch (Exception e) {
                    LOG.error(e.getLocalizedMessage(), e);
                } finally {
                    if (response != null) {
                        try {
                            response.close();
                        } catch (IOException e) {
                            LOG.error(e.getLocalizedMessage(), e);
                            throw exceptionFactory.failure("Error creating group");
                        }
                    }
                }
            } while (!responded);
        }

        ArrayList<NameValuePair> datasetFields = new ArrayList<>();
        datasetFields.add(new BasicNameValuePair("type", "dataset"));

        LinkedList<String> keywords = new LinkedList<>();
        for (Map<String, Value> map : executeSelectQuery(
                "SELECT ?keyword WHERE {<" + datasetURI + "> <" + DcatAp11ToDkanBatchVocabulary.DCAT_KEYWORD
                        + "> ?keyword FILTER(LANGMATCHES(LANG(?keyword), \"" + configuration.getLoadLanguage()
                        + "\"))}")) {
            keywords.add(map.get("keyword").stringValue());
        }
        String concatTags = "";
        for (String keyword : keywords) {
            String safekeyword = fixKeyword(keyword);
            if (safekeyword.length() >= 2) {
                concatTags += "\"\"" + safekeyword + "\"\" ";
            }
        }
        if (!concatTags.isEmpty()) {
            datasetFields.add(new BasicNameValuePair("field_tags[und][value_field]", concatTags));
        }

        String title = executeSimpleSelectQuery("SELECT ?title WHERE {<" + datasetURI + "> <" + DCTERMS.TITLE
                + "> ?title FILTER(LANGMATCHES(LANG(?title), \"" + configuration.getLoadLanguage() + "\"))}",
                "title");
        if (!title.isEmpty()) {
            datasetFields.add(new BasicNameValuePair("title", title));
        }
        String description = executeSimpleSelectQuery("SELECT ?description WHERE {<" + datasetURI + "> <"
                + DCTERMS.DESCRIPTION + "> ?description FILTER(LANGMATCHES(LANG(?description), \""
                + configuration.getLoadLanguage() + "\"))}", "description");
        if (!description.isEmpty()) {
            datasetFields.add(new BasicNameValuePair("body[und][0][value]", description));
        } else if (configuration.getProfile()
                .equals(DcatAp11ToDkanBatchVocabulary.PROFILES_NKOD.stringValue())) {
            //Description is mandatory in NKOD. If missing, add at least title.
            datasetFields.add(new BasicNameValuePair("body[und][0][value]", title));
        }
        String issued = executeSimpleSelectQuery(
                "SELECT ?issued WHERE {<" + datasetURI + "> <" + DCTERMS.ISSUED + "> ?issued }", "issued");
        if (!issued.isEmpty()) {
            //long unixTime = System.currentTimeMillis() / 1000L;
            datasetFields.add(new BasicNameValuePair("created", issued));
        }
        String modified = executeSimpleSelectQuery(
                "SELECT ?modified WHERE {<" + datasetURI + "> <" + DCTERMS.MODIFIED + "> ?modified }",
                "modified");
        if (!modified.isEmpty()) {
            datasetFields.add(new BasicNameValuePair("changed", modified));
        }

        if (!publisher_uri.isEmpty()) {
            datasetFields
                    .add(new BasicNameValuePair("og_group_ref[und][target_id]", groups.get(publisher_uri)));
        }

        if (configuration.getProfile().equals(DcatAp11ToDkanBatchVocabulary.PROFILES_NKOD.stringValue())) {
            String contactPoint = executeSimpleSelectQuery("SELECT ?contact WHERE {<" + datasetURI + "> <"
                    + DcatAp11ToDkanBatchVocabulary.DCAT_CONTACT_POINT + ">/<"
                    + DcatAp11ToDkanBatchVocabulary.VCARD_HAS_EMAIL + "> ?contact }", "contact");
            if (!contactPoint.isEmpty()) {
                datasetFields
                        .add(new BasicNameValuePair("field_maintainer_email[und][0][value]", contactPoint));
            }
            String curatorName = executeSimpleSelectQuery("SELECT ?name WHERE {<" + datasetURI + "> <"
                    + DcatAp11ToDkanBatchVocabulary.DCAT_CONTACT_POINT + ">/<"
                    + DcatAp11ToDkanBatchVocabulary.VCARD_FN + "> ?name }", "name");
            if (!curatorName.isEmpty()) {
                datasetFields.add(new BasicNameValuePair("field_maintainer[und][0][value]", curatorName));
            }
            if (!publisher_uri.isEmpty()) {
                datasetFields.add(new BasicNameValuePair("field_publisher_uri[und][0][value]", publisher_uri));
            }
            if (!publisher_name.isEmpty()) {
                datasetFields
                        .add(new BasicNameValuePair("field_publisher_name[und][0][value]", publisher_name));
            }

            String periodicity = executeSimpleSelectQuery("SELECT ?periodicity WHERE {<" + datasetURI + "> <"
                    + DCTERMS.ACCRUAL_PERIODICITY + "> ?periodicity }", "periodicity");
            if (!periodicity.isEmpty()) {
                datasetFields.add(new BasicNameValuePair("field_frequency_ods[und][0][value]", periodicity));
            } else {
                //Mandatory in NKOD
                datasetFields.add(new BasicNameValuePair("field_frequency_ods[und][0][value]",
                        "http://publications.europa.eu/resource/authority/frequency/UNKNOWN"));
            }
            String temporalStart = executeSimpleSelectQuery(
                    "SELECT ?temporalStart WHERE {<" + datasetURI + "> <" + DCTERMS.TEMPORAL + ">/<"
                            + DcatAp11ToDkanBatchVocabulary.SCHEMA_STARTDATE + "> ?temporalStart }",
                    "temporalStart");
            if (!temporalStart.isEmpty()) {
                datasetFields.add(new BasicNameValuePair("field_temporal_start[und][0][value]", temporalStart));
            }
            String temporalEnd = executeSimpleSelectQuery(
                    "SELECT ?temporalEnd WHERE {<" + datasetURI + "> <" + DCTERMS.TEMPORAL + ">/<"
                            + DcatAp11ToDkanBatchVocabulary.SCHEMA_ENDDATE + "> ?temporalEnd }",
                    "temporalEnd");
            if (!temporalEnd.isEmpty()) {
                datasetFields.add(new BasicNameValuePair("field_temporal_end[und][0][value]", temporalEnd));
            }
            String schemaURL = executeSimpleSelectQuery(
                    "SELECT ?schema WHERE {<" + datasetURI + "> <" + FOAF.PAGE + "> ?schema }", "schema");
            if (!schemaURL.isEmpty()) {
                datasetFields.add(new BasicNameValuePair("field_schema[und][0][value]", schemaURL));
            }
            String spatial = executeSimpleSelectQuery(
                    "SELECT ?spatial WHERE {<" + datasetURI + "> <" + DCTERMS.SPATIAL + "> ?spatial }",
                    "spatial");
            if (!spatial.isEmpty()) {
                datasetFields.add(new BasicNameValuePair("field_spatial[und][0][value]", spatial));
                if (spatial.matches("http:\\/\\/ruian.linked.opendata.cz\\/resource\\/.*")) {
                    String type = spatial.replaceAll(
                            "http:\\/\\/ruian.linked.opendata.cz\\/resource\\/([^\\/]+)\\/(.*)", "$1");
                    String code = spatial.replaceAll(
                            "http:\\/\\/ruian.linked.opendata.cz\\/resource\\/([^\\/]+)\\/(.*)", "$2");
                    String typ;
                    //We should not parse IRIs, however, here we have no choice.
                    switch (type) {
                    case "vusc":
                        typ = "VC";
                        break;
                    case "obce":
                        typ = "OB";
                        break;
                    case "kraje":
                        typ = "KR";
                        break;
                    case "orp":
                        typ = "OP";
                        break;
                    case "momc":
                        typ = "MC";
                        break;
                    case "pou":
                        typ = "PU";
                        break;
                    default:
                        typ = "ST";
                    }
                    datasetFields.add(new BasicNameValuePair("field_ruian_type[und][0][value]", typ));
                    datasetFields.add(new BasicNameValuePair("field_ruian_code[und][0][value]", code));
                } else {
                    //RIAN type and code are mandatory in NKOD
                    datasetFields.add(new BasicNameValuePair("field_ruian_type[und][0][value]", "ST"));
                    datasetFields.add(new BasicNameValuePair("field_ruian_code[und][0][value]", "1"));
                }
            } else {
                //RIAN type and code are mandatory in NKOD
                datasetFields.add(new BasicNameValuePair("field_ruian_type[und][0][value]", "ST"));
                datasetFields.add(new BasicNameValuePair("field_ruian_code[und][0][value]", "1"));
            }
            //DCAT-AP v1.1: has to be an IRI from http://publications.europa.eu/mdr/authority/file-type/index.html
            LinkedList<String> themes = new LinkedList<>();
            for (Map<String, Value> map : executeSelectQuery("SELECT ?theme WHERE {<" + datasetURI + "> <"
                    + DcatAp11ToDkanBatchVocabulary.DCAT_THEME + "> ?theme }")) {
                themes.add(map.get("theme").stringValue());
            }
            String concatThemes = "";
            for (String theme : themes) {
                concatThemes += theme + " ";
            }
            if (!concatThemes.isEmpty())
                datasetFields.add(new BasicNameValuePair("field_theme[und][0][value]", concatThemes));
        }

        //Distributions

        LinkedList<String> distributions = new LinkedList<>();
        for (Map<String, Value> map : executeSelectQuery("SELECT ?distribution WHERE {<" + datasetURI + "> <"
                + DcatAp11ToDkanBatchVocabulary.DCAT_DISTRIBUTION + "> ?distribution }")) {
            distributions.add(map.get("distribution").stringValue());
        }

        for (int d = 0; d < distributions.size(); d++) {
            String distribution = distributions.get(d);
            ArrayList<NameValuePair> distroFields = new ArrayList<>();
            distroFields.add(new BasicNameValuePair("type", "resource"));

            String dtitle = executeSimpleSelectQuery("SELECT ?title WHERE {<" + distribution + "> <"
                    + DCTERMS.TITLE + "> ?title FILTER(LANGMATCHES(LANG(?title), \""
                    + configuration.getLoadLanguage() + "\"))}", "title");
            if (dtitle.isEmpty()) {
                //Distribution title is mandatory in DKAN
                dtitle = title.isEmpty() ? "Resource" : title;
            }
            distroFields.add(new BasicNameValuePair("title", dtitle));

            String ddescription = executeSimpleSelectQuery("SELECT ?description WHERE {<" + distribution + "> <"
                    + DCTERMS.DESCRIPTION + "> ?description FILTER(LANGMATCHES(LANG(?description), \""
                    + configuration.getLoadLanguage() + "\"))}", "description");
            if (!ddescription.isEmpty()) {
                distroFields.add(new BasicNameValuePair("body[und][0][value]", ddescription));
            }
            /*String dformat = executeSimpleSelectQuery("SELECT ?format WHERE {<" + distribution + "> <"+ DCTERMS.FORMAT + "> ?format }", "format");
            if (!dformat.isEmpty() && codelists != null) {
            String formatlabel = executeSimpleCodelistSelectQuery("SELECT ?formatlabel WHERE {<" + dformat + "> <"+ SKOS.PREF_LABEL + "> ?formatlabel FILTER(LANGMATCHES(LANG(?formatlabel), \"en\"))}", "formatlabel");
            if (!formatlabel.isEmpty()) {
                distroFields.add(new BasicNameValuePair("field_format[und][0][value]", formatlabel));
            }
            }*/
            String dmimetype = executeSimpleSelectQuery("SELECT ?format WHERE {<" + distribution + "> <"
                    + DcatAp11ToDkanBatchVocabulary.DCAT_MEDIATYPE + "> ?format }", "format");
            if (!dmimetype.isEmpty()) {
                distroFields.add(new BasicNameValuePair("field_link_remote_file[und][0][filemime]",
                        dmimetype.replaceAll(".*\\/([^\\/]+\\/[^\\/]+)", "$1")));
            }

            String dwnld = executeSimpleSelectQuery("SELECT ?dwnld WHERE {<" + distribution + "> <"
                    + DcatAp11ToDkanBatchVocabulary.DCAT_DOWNLOADURL + "> ?dwnld }", "dwnld");
            String access = executeSimpleSelectQuery("SELECT ?acc WHERE {<" + distribution + "> <"
                    + DcatAp11ToDkanBatchVocabulary.DCAT_ACCESSURL + "> ?acc }", "acc");

            //we prefer downloadURL, but only accessURL is mandatory
            if (dwnld == null || dwnld.isEmpty()) {
                dwnld = access;
                if (dwnld == null || dwnld.isEmpty()) {
                    LOG.warn("Empty download and access URLs: " + datasetURI);
                    continue;
                }
            }

            if (!dwnld.isEmpty()) {
                distroFields.add(new BasicNameValuePair(
                        "field_link_remote_file[und][0][filefield_remotefile][url]", dwnld));
            }

            /*if (!distribution.isEmpty()) {
            distro.put("distro_url", distribution);
            }*/

            String dissued = executeSimpleSelectQuery(
                    "SELECT ?issued WHERE {<" + distribution + "> <" + DCTERMS.ISSUED + "> ?issued }",
                    "issued");
            if (!dissued.isEmpty()) {
                distroFields.add(new BasicNameValuePair("created", dissued));
            }
            String dmodified = executeSimpleSelectQuery(
                    "SELECT ?modified WHERE {<" + distribution + "> <" + DCTERMS.MODIFIED + "> ?modified }",
                    "modified");
            if (!dmodified.isEmpty()) {
                distroFields.add(new BasicNameValuePair("changed", dmodified));
            }

            if (configuration.getProfile().equals(DcatAp11ToDkanBatchVocabulary.PROFILES_NKOD.stringValue())) {
                String dtemporalStart = executeSimpleSelectQuery(
                        "SELECT ?temporalStart WHERE {<" + distribution + "> <" + DCTERMS.TEMPORAL + ">/<"
                                + DcatAp11ToDkanBatchVocabulary.SCHEMA_STARTDATE + "> ?temporalStart }",
                        "temporalStart");
                if (!dtemporalStart.isEmpty()) {
                    distroFields
                            .add(new BasicNameValuePair("field_temporal_start[und][0][value]", dtemporalStart));
                }
                String dtemporalEnd = executeSimpleSelectQuery(
                        "SELECT ?temporalEnd WHERE {<" + distribution + "> <" + DCTERMS.TEMPORAL + ">/<"
                                + DcatAp11ToDkanBatchVocabulary.SCHEMA_ENDDATE + "> ?temporalEnd }",
                        "temporalEnd");
                if (!dtemporalEnd.isEmpty()) {
                    distroFields.add(new BasicNameValuePair("field_temporal_end[und][0][value]", dtemporalEnd));
                }
                String dschemaURL = executeSimpleSelectQuery(
                        "SELECT ?schema WHERE {<" + distribution + "> <" + DCTERMS.CONFORMS_TO + "> ?schema }",
                        "schema");
                if (!dschemaURL.isEmpty()) {
                    distroFields.add(new BasicNameValuePair("field_described_by[und][0][value]", dschemaURL));
                }
                String dlicense = executeSimpleSelectQuery(
                        "SELECT ?license WHERE {<" + distribution + "> <" + DCTERMS.LICENSE + "> ?license }",
                        "license");
                if (dlicense.isEmpty()) {
                    //This is mandatory in NKOD and DKAN extension
                    dlicense = "http://joinup.ec.europa.eu/category/licence/unknown-licence";
                }
                distroFields.add(new BasicNameValuePair("field_licence[und][0][value]", dlicense));
                if (dmimetype.isEmpty()) {
                    //! field_format => mimetype
                    //This is mandatory in NKOD and DKAN extension
                    dmimetype = "http://www.iana.org/assignments/media-types/application/octet-stream";
                }
                distroFields.add(new BasicNameValuePair("field_mimetype[und][0][value]",
                        dmimetype.replaceAll(".*\\/([^\\/]+\\/[^\\/]+)", "$1")));
            }

            //POST DISTRIBUTION

            LOG.debug("Creating resource " + distribution);

            HttpPost httpPost = new HttpPost(apiURI + "/node");
            httpPost.addHeader(new BasicHeader("Accept", "application/json"));
            httpPost.addHeader(new BasicHeader("X-CSRF-Token", token));

            try {
                UrlEncodedFormEntity form = new UrlEncodedFormEntity(distroFields, "UTF-8");
                httpPost.setEntity(form);
            } catch (UnsupportedEncodingException e) {
                LOG.error("Unexpected encoding issue");
            }

            CloseableHttpResponse response = null;

            String resID = null;
            responded = false;
            do {
                try {
                    LOG.debug("POSTing resource " + distribution);
                    response = postClient.execute(httpPost);
                    if (response.getStatusLine().getStatusCode() == 200) {
                        String resp = EntityUtils.toString(response.getEntity());
                        LOG.debug("Resource created OK: " + resp);
                        try {
                            resID = new JSONObject(resp).getString("nid");
                            datasetFields.add(new BasicNameValuePair(
                                    "field_resources[und][" + d + "][target_id]", dtitle + " (" + resID + ")"));
                        } catch (JSONException e) {
                            LOG.error(e.getLocalizedMessage(), e);
                            LOG.error("Request: " + distroFields.toString());
                            LOG.error("Response: " + resp);
                        }
                    } else {
                        String ent = EntityUtils.toString(response.getEntity());
                        LOG.error("Resource:" + ent);
                        //throw exceptionFactory.failed("Error creating resource: " + ent);
                    }
                    responded = true;
                } catch (NoHttpResponseException e) {
                    LOG.error(e.getLocalizedMessage(), e);
                } catch (IOException e) {
                    LOG.error(e.getLocalizedMessage(), e);
                } finally {
                    if (response != null) {
                        try {
                            response.close();
                        } catch (IOException e) {
                            LOG.error(e.getLocalizedMessage(), e);
                            //throw exceptionFactory.failed("Error creating resource");
                        }
                    }
                }
            } while (!responded);

        }

        LOG.debug("Creating dataset " + datasetURI);

        HttpPost httpPost = new HttpPost(apiURI + "/node");
        httpPost.addHeader(new BasicHeader("Accept", "application/json"));
        httpPost.addHeader(new BasicHeader("X-CSRF-Token", token));

        try {
            UrlEncodedFormEntity form = new UrlEncodedFormEntity(datasetFields, "UTF-8");
            httpPost.setEntity(form);
        } catch (UnsupportedEncodingException e) {
            LOG.error("Unexpected encoding issue");
        }

        CloseableHttpResponse response = null;

        responded = false;
        do {
            try {
                LOG.debug("POSTing dataset " + datasetURI);
                response = postClient.execute(httpPost);
                if (response.getStatusLine().getStatusCode() == 200) {
                    LOG.debug("Dataset created OK");
                } else {
                    String ent = EntityUtils.toString(response.getEntity());
                    LOG.error("Dataset:" + ent);
                    //throw exceptionFactory.failed("Error creating dataset: " + ent);
                }
                responded = true;
            } catch (NoHttpResponseException e) {
                LOG.error(e.getLocalizedMessage(), e);
            } catch (IOException e) {
                LOG.error(e.getLocalizedMessage(), e);
            } finally {
                if (response != null) {
                    try {
                        response.close();
                    } catch (IOException e) {
                        LOG.error(e.getLocalizedMessage(), e);
                        throw exceptionFactory.failure("Error creating dataset");
                    }
                }
            }
        } while (!responded);

        progressReport.entryProcessed();
    }

    try {
        queryClient.close();
        createClient.close();
        postClient.close();
    } catch (IOException e) {
        LOG.error(e.getLocalizedMessage(), e);
    }

    progressReport.done();

}