Example usage for java.util LinkedList size

List of usage examples for java.util LinkedList size

Introduction

In this page you can find the example usage for java.util LinkedList size.

Prototype

int size

To view the source code for java.util LinkedList size.

Click Source Link

Usage

From source file:edu.cuny.cat.stat.HistoricalReport.java

/**
 * /*from w  ww  .  j av  a2 s  .co m*/
 * @param shouts
 * @param counts
 * @return a string that prints out the shouts in the list and grouped
 *         according to the counts array in a pretty way.
 */
protected String prettyString(final LinkedList<Shout> shouts, final int counts[]) {
    String s = "";
    int size = shouts.size();
    for (int i = 0; i < memorySize; i++) {
        final int index = (currentMemoryCell + memorySize - i) % memorySize;
        s = prettyString(shouts, size - counts[index], counts[index]) + "\n" + s;
        size -= counts[index];
    }

    return s;
}

From source file:edu.cuny.cat.stat.HistoricalReport.java

/**
 * /*w ww  .j av  a 2 s .  c  o  m*/
 * @param shouts
 * @param index
 * @param length
 * @return a string that prints out the specified shouts in the list in a
 *         pretty way.
 */
protected String prettyString(final LinkedList<Shout> shouts, final int index, final int length) {
    int last = shouts.size();
    if (length >= 0) {
        last = index + length;
    }

    String s = "";

    for (int i = index; i < last; i++) {
        if (s.length() == 0) {
            s += "[";
        } else {
            s += ", ";
        }
        s += prettyString(shouts.get(i));
    }

    if (s.length() == 0) {
        s += "[";
    }

    s += "]";

    return s;
}

From source file:acromusashi.stream.component.rabbitmq.AbstractContextBuilder.java

/**
 * ????????????RabbitMQ??//from w ww .  j  a v a2s.co m
 * 
 * @param contextList ?
 * @return RabbitMQ
 * @throws RabbitmqCommunicateException ??????????
 */
protected Map<String, List<String>> initProcessLists(List<RabbitmqClusterContext> contextList)
        throws RabbitmqCommunicateException {
    if (this.contextMap == null) {
        this.contextMap = initContextMap(contextList);
    }

    Map<String, List<String>> processLists = new HashMap<String, List<String>>();

    LinkedList<String> processList = null;
    String connectionProcess = null;
    int processIndex = 0;

    for (String queueName : this.contextMap.keySet()) {
        //???????
        RabbitmqClusterContext context = this.contextMap.get(queueName);
        processList = new LinkedList<String>(context.getMqProcessList());

        //RabbitMQ????????
        //??(0)
        processIndex = 0;
        connectionProcess = context.getConnectionProcessMap().get(getClientId(queueName));
        if (connectionProcess != null) {
            processIndex = processList.indexOf(connectionProcess);
        }

        //RabbitMQ?????RabbitMQ??
        LinkedList<String> backwardProcesses = new LinkedList<String>(processList.subList(0, processIndex));
        LinkedList<String> forwardProcesses = new LinkedList<String>(
                processList.subList(processIndex, processList.size()));
        forwardProcesses.addAll(backwardProcesses);
        processList = new LinkedList<String>(forwardProcesses);

        processLists.put(queueName, processList);
    }

    return processLists;
}

From source file:net.semanticmetadata.lire.imageanalysis.bovw.LocalFeatureHistogramBuilderFromCodeBook.java

private HashSet<Integer> selectVocabularyDocs() throws IOException {
    // need to make sure that this is not running forever ...
    int loopCount = 0;
    float maxDocs = reader.maxDoc();
    int capacity = (int) Math.min(numDocsForVocabulary, maxDocs);
    if (capacity < 0)
        capacity = (int) (maxDocs / 2);
    HashSet<Integer> result = new HashSet<Integer>(capacity);
    int tmpDocNumber, tmpIndex;
    LinkedList<Integer> docCandidates = new LinkedList<Integer>();
    // three cases:
    ////from   w w w  .j a v  a 2s . c  o m
    // either it's more or the same number as documents
    if (numDocsForVocabulary >= maxDocs) {
        for (int i = 0; i < maxDocs; i++) {
            result.add(i);
        }
        return result;
    } else if (numDocsForVocabulary >= maxDocs - 100) { // or it's slightly less:
        for (int i = 0; i < maxDocs; i++) {
            result.add(i);
        }
        while (result.size() > numDocsForVocabulary) {
            result.remove((int) Math.floor(Math.random() * result.size()));
        }
        return result;
    } else {
        for (int i = 0; i < maxDocs; i++) {
            docCandidates.add(i);
        }
        for (int r = 0; r < capacity; r++) {
            boolean worksFine = false;
            do {
                tmpIndex = (int) Math.floor(Math.random() * (double) docCandidates.size());
                tmpDocNumber = docCandidates.get(tmpIndex);
                docCandidates.remove(tmpIndex);
                // check if the selected doc number is valid: not null, not deleted and not already chosen.
                worksFine = (reader.document(tmpDocNumber) != null) && !result.contains(tmpDocNumber);
            } while (!worksFine);
            result.add(tmpDocNumber);
            // need to make sure that this is not running forever ...
            if (loopCount++ > capacity * 100)
                throw new UnsupportedOperationException(
                        "Could not get the documents, maybe there are not enough documents in the index?");
        }
        return result;
    }
}

From source file:uk.ac.horizon.ubihelper.service.PeerManager.java

private synchronized void srvDiscoveryComplete(DnsClient dc, InetAddress src) {
    // copy peers in case of delete
    ArrayList<PeerRequestInfo> peers2 = new ArrayList<PeerRequestInfo>();
    peers2.addAll(peerRequests);/*from www  .  j ava2 s.  c  o  m*/
    for (int i = 0; i < peers2.size(); i++) {
        PeerRequestInfo pi = peers2.get(i);
        if (pi.state == PeerRequestState.STATE_SRV_DISCOVERY && pi.src.equals(src)) {
            LinkedList<DnsProtocol.RR> as = dc.getAnswers();
            if (as.size() == 0) {
                pi.state = PeerRequestState.STATE_SRV_DISCOVERY_FAILED;
                updatePeer(pi);
            } else {
                try {
                    DnsProtocol.SrvData srv = DnsProtocol.srvFromData(as.get(0).rdata);
                    pi.state = PeerRequestState.STATE_SRV_FOUND;
                    pi.port = srv.port;
                    if (!srv.target.equals(src.getHostAddress())) {
                        Log.w(TAG, "SRV returned different IP: " + srv.target + " vs " + src.getHostAddress());
                    }
                    updatePeer(pi);
                } catch (IOException e) {
                    Log.w(TAG, "Error parsing SRV data: " + e.getMessage());
                    pi.state = PeerRequestState.STATE_SRV_DISCOVERY_FAILED;
                    updatePeer(pi);
                }
            }
        }
    }
}

From source file:mase.spec.StochasticHybridExchanger.java

protected Individual[] selectIndividuals(Individual[] pool, int num, MergeSelection mode,
        EvolutionState state) {//  w ww . j a va 2s.c  o m
    Individual[] picked = new Individual[num];
    if (mode == MergeSelection.truncate) {
        Individual[] sorted = sortedCopy(pool);
        System.arraycopy(sorted, 0, picked, 0, num);
    } else if (mode == MergeSelection.fitnessproportionate) {
        double total = 0;
        LinkedList<Individual> poolList = new LinkedList<>();
        for (Individual ind : pool) {
            poolList.add(ind);
            total += ((SimpleFitness) ind.fitness).fitness();
        }
        int index = 0;
        while (index < num) {
            double accum = 0;
            double rand = state.random[0].nextDouble() * total;
            Iterator<Individual> iter = poolList.iterator();
            while (iter.hasNext()) {
                Individual ind = iter.next();
                accum += ((SimpleFitness) ind.fitness).fitness();
                if (accum >= rand) {
                    picked[index++] = ind;
                    iter.remove();
                    total -= ((SimpleFitness) ind.fitness).fitness();
                    break;
                }
            }
        }
    } else if (mode == MergeSelection.random) {
        LinkedList<Individual> poolList = new LinkedList<>(Arrays.asList(pool));
        int index = 0;
        while (index < num) {
            int rand = state.random[0].nextInt(poolList.size());
            picked[index++] = poolList.get(rand);
            poolList.remove(rand);
        }
    } else {
        state.output.fatal("Unknown picking mode: " + mode);
    }
    return picked;
}

From source file:it.doqui.index.ecmengine.business.personalization.multirepository.index.lucene.RepositoryAwareADMLuceneSearcherImpl.java

public List<Pair<String, Integer>> getTopTerms(String field, int count) {
    RepositoryAwareClosingIndexSearcher searcher = null;
    try {/*from   w  ww .j a v  a  2s.  c  o m*/
        LinkedList<Pair<String, Integer>> answer = new LinkedList<Pair<String, Integer>>();
        searcher = getSearcher(indexer);
        IndexReader reader = searcher.getIndexReader();
        TermEnum terms = reader.terms(new Term(field, ""));
        do {
            Term term = terms.term();
            if (term != null) {
                if (!term.field().equals(field)) {
                    break;
                }
                int freq = terms.docFreq();
                Pair<String, Integer> pair = new Pair<String, Integer>(term.text(), Integer.valueOf(freq));
                if (answer.size() < count) {
                    if (answer.size() == 0) {
                        answer.add(pair);
                    } else if (answer.get(answer.size() - 1).getSecond().compareTo(pair.getSecond()) >= 0) {
                        answer.add(pair);
                    } else {
                        for (ListIterator<Pair<String, Integer>> it = answer.listIterator(); it
                                .hasNext(); /**/) {
                            Pair<String, Integer> test = it.next();
                            if (test.getSecond().compareTo(pair.getSecond()) < 0) {
                                it.previous();
                                it.add(pair);
                                break;
                            }
                        }
                    }
                } else if (answer.get(count - 1).getSecond().compareTo(pair.getSecond()) < 0) {
                    for (ListIterator<Pair<String, Integer>> it = answer.listIterator(); it.hasNext(); /**/) {
                        Pair<String, Integer> test = it.next();
                        if (test.getSecond().compareTo(pair.getSecond()) < 0) {
                            it.previous();
                            it.add(pair);
                            break;
                        }
                    }
                    answer.removeLast();
                } else {
                    // off the end
                }
            }
        } while (terms.next());
        terms.close();
        return answer;

    } catch (IOException e) {
        throw new SearcherException(e);
    } finally {
        if (searcher != null) {
            try {
                searcher.close();
            } catch (IOException e) {
                throw new SearcherException(e);
            }
        }
    }

}

From source file:mase.spec.BasicHybridExchanger.java

protected Individual[] pickIndividuals(Individual[] pool, int num, PickMode mode, EvolutionState state) {
    Individual[] picked = new Individual[num];
    if (mode == PickMode.first) {
        System.arraycopy(pool, 0, picked, 0, num);
    } else if (mode == PickMode.elite) {
        Individual[] sorted = sortedCopy(pool);
        System.arraycopy(sorted, 0, picked, 0, num);
    } else if (mode == PickMode.probabilistic) {
        double total = 0;
        LinkedList<Individual> poolList = new LinkedList<>();
        for (Individual ind : pool) {
            poolList.add(ind);/*from w  w w.  j ava 2  s .  c om*/
            total += ((SimpleFitness) ind.fitness).fitness();
        }
        int index = 0;
        while (index < num) {
            double accum = 0;
            double rand = state.random[0].nextDouble() * total;
            Iterator<Individual> iter = poolList.iterator();
            while (iter.hasNext()) {
                Individual ind = iter.next();
                accum += ((SimpleFitness) ind.fitness).fitness();
                if (accum >= rand) {
                    picked[index++] = ind;
                    iter.remove();
                    total -= ((SimpleFitness) ind.fitness).fitness();
                    break;
                }
            }
        }
    } else if (mode == PickMode.random) {
        LinkedList<Individual> poolList = new LinkedList<>(Arrays.asList(pool));
        int index = 0;
        while (index < num) {
            int rand = state.random[0].nextInt(poolList.size());
            picked[index++] = poolList.get(rand);
            poolList.remove(rand);
        }
    } else {
        state.output.fatal("Unknown picking mode: " + mode);
    }
    return picked;
}

From source file:com.linkedpipes.plugin.loader.dcatAp11ToCkanBatch.DcatAp11ToCkanBatch.java

@Override
public void execute() throws LpException {

    apiURI = configuration.getApiUri();/*from  w  w  w  . ja va2s.c  om*/

    if (apiURI == null || apiURI.isEmpty() || configuration.getApiKey() == null
            || configuration.getApiKey().isEmpty()) {
        throw exceptionFactory.failure("Missing required settings.");
    }

    Map<String, String> organizations = getOrganizations();

    LOG.debug("Querying metadata for datasets");

    LinkedList<String> datasets = new LinkedList<>();
    for (Map<String, Value> map : executeSelectQuery(
            "SELECT ?d WHERE {?d a <" + DcatAp11ToCkanBatchVocabulary.DCAT_DATASET_CLASS + ">}")) {
        datasets.add(map.get("d").stringValue());
    }

    int current = 0;
    int total = datasets.size();

    LOG.info("Found " + total + " datasets");

    progressReport.start(total);

    for (String datasetURI : datasets) {
        current++;

        CloseableHttpResponse queryResponse = null;

        LOG.info("Processing dataset " + current + "/" + total + ": " + datasetURI);

        String datasetID = executeSimpleSelectQuery("SELECT ?did WHERE {<" + datasetURI + "> <"
                + DcatAp11ToCkanBatchVocabulary.LODCZCKAN_DATASET_ID + "> ?did }", "did");
        if (datasetID.isEmpty()) {
            LOG.warn("Dataset " + datasetURI + " has missing CKAN ID");
            continue;
        }

        boolean datasetExists = false;

        Map<String, String> resUrlIdMap = new HashMap<>();
        Map<String, String> resDistroIdMap = new HashMap<>();
        Map<String, JSONObject> resourceList = new HashMap<>();

        LOG.debug("Querying for the dataset " + datasetID + " in CKAN");
        HttpGet httpGet = new HttpGet(apiURI + "/package_show?id=" + datasetID);
        try {
            queryResponse = queryClient.execute(httpGet);
            if (queryResponse.getStatusLine().getStatusCode() == 200) {
                LOG.debug("Dataset found");
                datasetExists = true;

                JSONObject response = new JSONObject(EntityUtils.toString(queryResponse.getEntity()))
                        .getJSONObject("result");
                JSONArray resourcesArray = response.getJSONArray("resources");
                for (int i = 0; i < resourcesArray.length(); i++) {
                    String id = resourcesArray.getJSONObject(i).getString("id");
                    resourceList.put(id, resourcesArray.getJSONObject(i));

                    String url = resourcesArray.getJSONObject(i).getString("url");
                    resUrlIdMap.put(url, id);

                    if (resourcesArray.getJSONObject(i).has("distro_url")) {
                        String distro = resourcesArray.getJSONObject(i).getString("distro_url");
                        resDistroIdMap.put(distro, id);
                    }
                }
            } else {
                String ent = EntityUtils.toString(queryResponse.getEntity());
                LOG.debug("Dataset not found: " + ent);
            }
        } catch (Exception e) {
            LOG.error(e.getLocalizedMessage(), e);
        } finally {
            if (queryResponse != null) {
                try {
                    queryResponse.close();
                } catch (IOException e) {
                    LOG.error(e.getLocalizedMessage(), e);
                }
            }
        }

        LinkedList<String> keywords = new LinkedList<>();
        for (Map<String, Value> map : executeSelectQuery(
                "SELECT ?keyword WHERE {<" + datasetURI + "> <" + DcatAp11ToCkanBatchVocabulary.DCAT_KEYWORD
                        + "> ?keyword FILTER(LANGMATCHES(LANG(?keyword), \"" + configuration.getLoadLanguage()
                        + "\"))}")) {
            keywords.add(map.get("keyword").stringValue());
        }

        String publisher_uri = executeSimpleSelectQuery("SELECT ?publisher_uri WHERE {<" + datasetURI + "> <"
                + DCTERMS.PUBLISHER + "> ?publisher_uri }", "publisher_uri");
        String publisher_name = executeSimpleSelectQuery(
                "SELECT ?publisher_name WHERE {<" + datasetURI + "> <" + DCTERMS.PUBLISHER + ">/<" + FOAF.NAME
                        + "> ?publisher_name FILTER(LANGMATCHES(LANG(?publisher_name), \""
                        + configuration.getLoadLanguage() + "\"))}",
                "publisher_name");

        if (!organizations.containsKey(publisher_uri)) {
            LOG.debug("Creating organization " + publisher_uri);
            JSONObject root = new JSONObject();

            if (publisher_name == null || publisher_name.isEmpty()) {
                throw exceptionFactory.failure("Organization has no name: " + publisher_uri);
            }

            root.put("title", publisher_name);
            String orgname = Normalizer.normalize(publisher_name, Normalizer.Form.NFD)
                    .replaceAll("\\P{InBasic_Latin}", "").replace(' ', '-').replace('.', '-').toLowerCase();
            root.put("name", orgname);
            JSONArray org_extras = new JSONArray();
            org_extras.put(new JSONObject().put("key", "uri").put("value", publisher_uri));
            root.put("extras", org_extras);

            HttpPost httpPost = new HttpPost(apiURI + "/organization_create");
            httpPost.addHeader(new BasicHeader("Authorization", configuration.getApiKey()));

            String json = root.toString();

            httpPost.setEntity(new StringEntity(json, Charset.forName("utf-8")));

            CloseableHttpResponse response = null;

            try {
                response = postClient.execute(httpPost);
                if (response.getStatusLine().getStatusCode() == 200) {
                    LOG.debug("Organization created OK");
                    //LOG.info("Response: " + EntityUtils.toString(response.getEntity()));
                    organizations.put(publisher_uri, orgname);
                } else if (response.getStatusLine().getStatusCode() == 409) {
                    String ent = EntityUtils.toString(response.getEntity());
                    LOG.error("Organization conflict: " + ent);
                    throw exceptionFactory.failure("Organization conflict: " + ent);
                } else {
                    String ent = EntityUtils.toString(response.getEntity());
                    LOG.error("Response:" + ent);
                    throw exceptionFactory.failure("Error creating organization: " + ent);
                }
            } catch (Exception e) {
                LOG.error(e.getLocalizedMessage(), e);
            } finally {
                if (response != null) {
                    try {
                        response.close();
                    } catch (IOException e) {
                        LOG.error(e.getLocalizedMessage(), e);
                        throw exceptionFactory.failure("Error creating dataset");
                    }
                }
            }
        }

        LOG.debug("Creating JSON");

        JSONObject root = new JSONObject();

        JSONArray tags = new JSONArray();
        for (String keyword : keywords) {
            String safekeyword = fixKeyword(keyword);
            if (safekeyword.length() >= 2) {
                tags.put(new JSONObject().put("name", safekeyword));
            }
        }
        root.put("tags", tags);

        JSONArray resources = new JSONArray();

        if (!datasetID.isEmpty()) {
            root.put("name", datasetID);
        }

        String title = executeSimpleSelectQuery("SELECT ?title WHERE {<" + datasetURI + "> <" + DCTERMS.TITLE
                + "> ?title FILTER(LANGMATCHES(LANG(?title), \"" + configuration.getLoadLanguage() + "\"))}",
                "title");
        if (!title.isEmpty()) {
            root.put("title", title);
        }
        String description = executeSimpleSelectQuery("SELECT ?description WHERE {<" + datasetURI + "> <"
                + DCTERMS.DESCRIPTION + "> ?description FILTER(LANGMATCHES(LANG(?description), \""
                + configuration.getLoadLanguage() + "\"))}", "description");
        if (!description.isEmpty()) {
            root.put("notes", description);
        }
        String contactPoint = executeSimpleSelectQuery("SELECT ?contact WHERE {<" + datasetURI + "> <"
                + DcatAp11ToCkanBatchVocabulary.DCAT_CONTACT_POINT + ">/<"
                + DcatAp11ToCkanBatchVocabulary.VCARD_HAS_EMAIL + "> ?contact }", "contact");
        if (!contactPoint.isEmpty()) {
            root.put("maintainer_email", contactPoint);
        }
        String curatorName = executeSimpleSelectQuery(
                "SELECT ?name WHERE {<" + datasetURI + "> <" + DcatAp11ToCkanBatchVocabulary.DCAT_CONTACT_POINT
                        + ">/<" + DcatAp11ToCkanBatchVocabulary.VCARD_FN + "> ?name }",
                "name");
        if (!curatorName.isEmpty()) {
            root.put("maintainer", curatorName);
        }
        String issued = executeSimpleSelectQuery(
                "SELECT ?issued WHERE {<" + datasetURI + "> <" + DCTERMS.ISSUED + "> ?issued }", "issued");
        if (!issued.isEmpty()) {
            root.put("metadata_created", issued);
        }
        String modified = executeSimpleSelectQuery(
                "SELECT ?modified WHERE {<" + datasetURI + "> <" + DCTERMS.MODIFIED + "> ?modified }",
                "modified");
        if (!modified.isEmpty()) {
            root.put("metadata_modified", modified);
        }

        if (configuration.getProfile().equals(DcatAp11ToCkanBatchVocabulary.PROFILES_NKOD.stringValue())) {
            if (!publisher_uri.isEmpty()) {
                root.put("publisher_uri", publisher_uri);
            }
            if (!publisher_name.isEmpty()) {
                root.put("publisher_name", publisher_name);
            }

            String periodicity = executeSimpleSelectQuery("SELECT ?periodicity WHERE {<" + datasetURI + "> <"
                    + DCTERMS.ACCRUAL_PERIODICITY + "> ?periodicity }", "periodicity");
            if (!periodicity.isEmpty()) {
                root.put("frequency", periodicity);
            }
            String temporalStart = executeSimpleSelectQuery(
                    "SELECT ?temporalStart WHERE {<" + datasetURI + "> <" + DCTERMS.TEMPORAL + ">/<"
                            + DcatAp11ToCkanBatchVocabulary.SCHEMA_STARTDATE + "> ?temporalStart }",
                    "temporalStart");
            if (!temporalStart.isEmpty()) {
                root.put("temporal_start", temporalStart);
            }
            String temporalEnd = executeSimpleSelectQuery(
                    "SELECT ?temporalEnd WHERE {<" + datasetURI + "> <" + DCTERMS.TEMPORAL + ">/<"
                            + DcatAp11ToCkanBatchVocabulary.SCHEMA_ENDDATE + "> ?temporalEnd }",
                    "temporalEnd");
            if (!temporalEnd.isEmpty()) {
                root.put("temporal_end", temporalEnd);
            }
            String schemaURL = executeSimpleSelectQuery(
                    "SELECT ?schema WHERE {<" + datasetURI + "> <" + FOAF.PAGE + "> ?schema }", "schema");
            if (!schemaURL.isEmpty()) {
                root.put("schema", schemaURL);
            }
            String spatial = executeSimpleSelectQuery(
                    "SELECT ?spatial WHERE {<" + datasetURI + "> <" + DCTERMS.SPATIAL + "> ?spatial }",
                    "spatial");
            if (!spatial.isEmpty()) {
                root.put("spatial_uri", spatial);
            }
            LinkedList<String> themes = new LinkedList<>();
            for (Map<String, Value> map : executeSelectQuery("SELECT ?theme WHERE {<" + datasetURI + "> <"
                    + DcatAp11ToCkanBatchVocabulary.DCAT_THEME + "> ?theme }")) {
                themes.add(map.get("theme").stringValue());
            }
            String concatThemes = "";
            for (String theme : themes) {
                concatThemes += theme + " ";
            }
            if (!concatThemes.isEmpty())
                root.put("theme", concatThemes);

        }

        //Distributions

        LinkedList<String> distributions = new LinkedList<>();
        for (Map<String, Value> map : executeSelectQuery("SELECT ?distribution WHERE {<" + datasetURI + "> <"
                + DcatAp11ToCkanBatchVocabulary.DCAT_DISTRIBUTION + "> ?distribution }")) {
            distributions.add(map.get("distribution").stringValue());
        }

        for (String distribution : distributions) {
            JSONObject distro = new JSONObject();

            String dtitle = executeSimpleSelectQuery("SELECT ?title WHERE {<" + distribution + "> <"
                    + DCTERMS.TITLE + "> ?title FILTER(LANGMATCHES(LANG(?title), \""
                    + configuration.getLoadLanguage() + "\"))}", "title");
            if (!dtitle.isEmpty()) {
                distro.put("name", dtitle);
            }
            String ddescription = executeSimpleSelectQuery("SELECT ?description WHERE {<" + distribution + "> <"
                    + DCTERMS.DESCRIPTION + "> ?description FILTER(LANGMATCHES(LANG(?description), \""
                    + configuration.getLoadLanguage() + "\"))}", "description");
            if (!ddescription.isEmpty()) {
                distro.put("description", ddescription);
            }
            //DCAT-AP v1.1: has to be am IRI from http://publications.europa.eu/mdr/authority/file-type/index.html
            String dformat = executeSimpleSelectQuery(
                    "SELECT ?format WHERE {<" + distribution + "> <" + DCTERMS.FORMAT + "> ?format }",
                    "format");
            if (!dformat.isEmpty() && codelists != null) {
                String formatlabel = executeSimpleCodelistSelectQuery(
                        "SELECT ?formatlabel WHERE {<" + dformat + "> <" + SKOS.PREF_LABEL
                                + "> ?formatlabel FILTER(LANGMATCHES(LANG(?formatlabel), \"en\"))}",
                        "formatlabel");
                if (!formatlabel.isEmpty()) {
                    distro.put("format", formatlabel);
                }
            }

            String dwnld = executeSimpleSelectQuery("SELECT ?dwnld WHERE {<" + distribution + "> <"
                    + DcatAp11ToCkanBatchVocabulary.DCAT_DOWNLOADURL + "> ?dwnld }", "dwnld");
            String access = executeSimpleSelectQuery("SELECT ?acc WHERE {<" + distribution + "> <"
                    + DcatAp11ToCkanBatchVocabulary.DCAT_ACCESSURL + "> ?acc }", "acc");

            //we prefer downloadURL, but only accessURL is mandatory
            if (dwnld == null || dwnld.isEmpty()) {
                dwnld = access;
                if (dwnld == null || dwnld.isEmpty()) {
                    LOG.warn("Empty download and access URLs: " + datasetURI);
                    continue;
                }
            }

            if (!dwnld.isEmpty()) {
                distro.put("url", dwnld);
            }
            if (!distribution.isEmpty()) {
                distro.put("distro_url", distribution);
            }

            distro.put("resource_type", "file");

            if (resDistroIdMap.containsKey(distribution)) {
                String id = resDistroIdMap.get(distribution);
                distro.put("id", id);
                resourceList.remove(id);
            } else if (resUrlIdMap.containsKey(dwnld)) {
                String id = resUrlIdMap.get(dwnld);
                distro.put("id", id);
                resourceList.remove(id);
            }

            String dissued = executeSimpleSelectQuery(
                    "SELECT ?issued WHERE {<" + distribution + "> <" + DCTERMS.ISSUED + "> ?issued }",
                    "issued");
            if (!dissued.isEmpty()) {
                distro.put("created", dissued);
            }
            String dmodified = executeSimpleSelectQuery(
                    "SELECT ?modified WHERE {<" + distribution + "> <" + DCTERMS.MODIFIED + "> ?modified }",
                    "modified");
            if (!dmodified.isEmpty()) {
                distro.put("last_modified", dmodified);
            }

            if (configuration.getProfile().equals(DcatAp11ToCkanBatchVocabulary.PROFILES_NKOD.stringValue())) {
                String dtemporalStart = executeSimpleSelectQuery(
                        "SELECT ?temporalStart WHERE {<" + distribution + "> <" + DCTERMS.TEMPORAL + ">/<"
                                + DcatAp11ToCkanBatchVocabulary.SCHEMA_STARTDATE + "> ?temporalStart }",
                        "temporalStart");
                if (!dtemporalStart.isEmpty()) {
                    distro.put("temporal_start", dtemporalStart);
                }
                String dtemporalEnd = executeSimpleSelectQuery(
                        "SELECT ?temporalEnd WHERE {<" + distribution + "> <" + DCTERMS.TEMPORAL + ">/<"
                                + DcatAp11ToCkanBatchVocabulary.SCHEMA_ENDDATE + "> ?temporalEnd }",
                        "temporalEnd");
                if (!dtemporalEnd.isEmpty()) {
                    distro.put("temporal_end", dtemporalEnd);
                }
                String dspatial = executeSimpleSelectQuery(
                        "SELECT ?spatial WHERE {<" + distribution + "> <" + DCTERMS.SPATIAL + "> ?spatial }",
                        "spatial");
                if (!dspatial.isEmpty()) {
                    root.put("spatial_uri", dspatial);
                }
                String dschemaURL = executeSimpleSelectQuery(
                        "SELECT ?schema WHERE {<" + distribution + "> <" + DCTERMS.CONFORMS_TO + "> ?schema }",
                        "schema");
                if (!dschemaURL.isEmpty()) {
                    distro.put("describedBy", dschemaURL);
                }
                String dlicense = executeSimpleSelectQuery(
                        "SELECT ?license WHERE {<" + distribution + "> <" + DCTERMS.LICENSE + "> ?license }",
                        "license");
                if (!dlicense.isEmpty()) {
                    distro.put("license_link", dlicense);
                }
                String dmimetype = executeSimpleSelectQuery("SELECT ?format WHERE {<" + distribution + "> <"
                        + DcatAp11ToCkanBatchVocabulary.DCAT_MEDIATYPE + "> ?format }", "format");
                if (!dmimetype.isEmpty()) {
                    distro.put("mimetype", dmimetype.replaceAll(".*\\/([^\\/]+\\/[^\\/]+)", "$1"));
                }
            }

            resources.put(distro);
        }

        //Add the remaining distributions that were not updated but existed in the original dataset
        for (Entry<String, JSONObject> resource : resourceList.entrySet()) {
            resources.put(resource.getValue());
        }

        root.put("resources", resources);

        //Create new dataset
        if (!datasetExists) {
            JSONObject createRoot = new JSONObject();
            CloseableHttpResponse response = null;

            createRoot.put("name", datasetID);
            createRoot.put("title", title);
            createRoot.put("owner_org", organizations.get(publisher_uri));

            LOG.debug("Creating dataset in CKAN");
            HttpPost httpPost = new HttpPost(apiURI + "/package_create?id=" + datasetID);
            httpPost.addHeader(new BasicHeader("Authorization", configuration.getApiKey()));

            String json = createRoot.toString();

            LOG.debug("Creating dataset with: " + json);

            httpPost.setEntity(new StringEntity(json, Charset.forName("utf-8")));

            try {
                response = createClient.execute(httpPost);
                if (response.getStatusLine().getStatusCode() == 200) {
                    LOG.debug("Dataset created OK");
                    //LOG.info("Response: " + EntityUtils.toString(response.getEntity()));
                } else if (response.getStatusLine().getStatusCode() == 409) {
                    String ent = EntityUtils.toString(response.getEntity());
                    LOG.error("Dataset already exists: " + ent);
                    throw exceptionFactory.failure("Dataset already exists");
                } else {
                    String ent = EntityUtils.toString(response.getEntity());
                    LOG.error("Response:" + ent);
                    throw exceptionFactory.failure("Error creating dataset");
                }
            } catch (Exception e) {
                LOG.error(e.getLocalizedMessage(), e);
            } finally {
                if (response != null) {
                    try {
                        response.close();
                    } catch (IOException e) {
                        LOG.error(e.getLocalizedMessage(), e);
                        throw exceptionFactory.failure("Error creating dataset");
                    }
                }
            }
        }

        //Update existing dataset
        String json = root.toString();
        LOG.debug("Posting to CKAN");
        HttpPost httpPost = new HttpPost(apiURI + "/package_update?id=" + datasetID);
        httpPost.addHeader(new BasicHeader("Authorization", configuration.getApiKey()));

        LOG.debug(json);

        httpPost.setEntity(new StringEntity(json, Charset.forName("utf-8")));
        CloseableHttpResponse response = null;

        try {
            response = postClient.execute(httpPost);
            if (response.getStatusLine().getStatusCode() == 200) {
                //LOG.info("Response:" + EntityUtils.toString(response.getEntity()));
            } else {
                String ent = EntityUtils.toString(response.getEntity());
                LOG.error("Response:" + ent);
                throw exceptionFactory.failure("Error updating dataset");
            }
        } catch (Exception e) {
            LOG.error(e.getLocalizedMessage(), e);
        } finally {
            if (response != null) {
                try {
                    response.close();
                } catch (IOException e) {
                    LOG.error(e.getLocalizedMessage(), e);
                    throw exceptionFactory.failure("Error updating dataset");
                }
            }
        }

        progressReport.entryProcessed();
    }

    try {
        queryClient.close();
        createClient.close();
        postClient.close();
    } catch (IOException e) {
        LOG.error(e.getLocalizedMessage(), e);
    }

    progressReport.done();

}

From source file:com.googlecode.psiprobe.controllers.logs.FollowController.java

protected ModelAndView handleLogFile(HttpServletRequest request, HttpServletResponse response,
        LogDestination logDest) throws Exception {

    ModelAndView mv = new ModelAndView(getViewName());
    File file = logDest.getFile();

    if (file.exists()) {
        LinkedList lines = new LinkedList();
        long actualLength = file.length();
        long lastKnownLength = ServletRequestUtils.getLongParameter(request, "lastKnownLength", 0);
        long currentLength = ServletRequestUtils.getLongParameter(request, "currentLength", actualLength);
        long maxReadLines = ServletRequestUtils.getLongParameter(request, "maxReadLines", 0);

        if (lastKnownLength > currentLength || lastKnownLength > actualLength || currentLength > actualLength) {
            //// w  w  w .  j a va  2 s .c om
            // file length got reset
            //
            lastKnownLength = 0;
            lines.add(" ------------- THE FILE HAS BEEN TRUNCATED --------------");
        }

        BackwardsFileStream bfs = new BackwardsFileStream(file, currentLength);
        try {
            BackwardsLineReader br = new BackwardsLineReader(bfs);
            long readSize = 0;
            long totalReadSize = currentLength - lastKnownLength;
            String s;
            while (readSize < totalReadSize && (s = br.readLine()) != null) {
                if (!s.equals("")) {
                    lines.addFirst(s);
                    readSize += s.length();
                } else {
                    readSize++;
                }
                if (maxReadLines != 0 && lines.size() >= maxReadLines) {
                    break;
                }
            }

            if (lastKnownLength != 0 && readSize > totalReadSize) {
                lines.removeFirst();
            }
        } finally {
            bfs.close();
        }

        mv.addObject("lines", lines);
    }
    return mv;
}