Example usage for org.apache.solr.client.solrj.util ClientUtils escapeQueryChars

List of usage examples for org.apache.solr.client.solrj.util ClientUtils escapeQueryChars

Introduction

In this page you can find the example usage for org.apache.solr.client.solrj.util ClientUtils escapeQueryChars.

Prototype


public static String escapeQueryChars(String s) 

Source Link

Document

See: <a href="https://www.google.com/?gws_rd=ssl#q=lucene+query+parser+syntax">Lucene query parser syntax</a> for more information on Escaping Special Characters

Usage

From source file:org.dspace.app.xmlui.aspect.discovery.SearchFacetFilter.java

License:BSD License

private void renderFacetField(SearchFilterParam browseParams, DSpaceObject dso, FacetField field,
        Table singleTable, List<String> filterQueries, FacetField.Count value)
        throws SQLException, WingException, UnsupportedEncodingException {
    String displayedValue = value.getName();
    String filterQuery = value.getAsFilterQuery();
    if (field.getName().equals("location.comm") || field.getName().equals("location.coll")) {
        //We have a community/collection, resolve it to a dspaceObject
        displayedValue = SolrServiceImpl.locationToName(context, field.getName(), displayedValue);
    }//from  w ww  . ja v a  2s  .  c  o m
    if (field.getGap() != null) {
        //We have a date get the year so we can display it
        DateFormat simpleDateformat = new SimpleDateFormat("yyyy");
        displayedValue = simpleDateformat.format(SolrServiceImpl.toDate(displayedValue));
        filterQuery = ClientUtils.escapeQueryChars(value.getFacetField().getName()) + ":" + displayedValue
                + "*";
    }

    Cell cell = singleTable.addRow().addCell();

    //No use in selecting the same filter twice
    if (filterQueries.contains(filterQuery)) {
        cell.addContent(SearchUtils.getFilterQueryDisplay(displayedValue) + " (" + value.getCount() + ")");
    } else {
        //Add the basics
        Map<String, String> urlParams = new HashMap<String, String>();
        urlParams.putAll(browseParams.getCommonBrowseParams());
        String url = generateURL(contextPath + (dso == null ? "" : "/handle/" + dso.getHandle()) + "/discover",
                urlParams);
        //Add already existing filter queries
        url = addFilterQueriesToUrl(url);
        //Last add the current filter query
        url += "&fq=" + filterQuery;
        cell.addXref(url, SearchUtils.getFilterQueryDisplay(displayedValue) + " (" + value.getCount() + ")");
    }
}

From source file:org.dspace.discovery.SolrServiceImpl.java

License:BSD License

public DiscoverFilterQuery toFilterQuery(Context context, String field, String operator, String value)
        throws SQLException {
    DiscoverFilterQuery result = new DiscoverFilterQuery();

    StringBuilder filterQuery = new StringBuilder();
    if (StringUtils.isNotBlank(field)) {
        filterQuery.append(field);//from   ww  w  . j a  v  a2s .c om
        if ("equals".equals(operator)) {
            //Query the keyword indexed field !
            filterQuery.append("_keyword");
        } else if ("authority".equals(operator)) {
            //Query the authority indexed field !
            filterQuery.append("_authority");
        } else if ("notequals".equals(operator) || "notcontains".equals(operator)
                || "notauthority".equals(operator)) {
            filterQuery.insert(0, "-");
        }
        filterQuery.append(":");
        if ("equals".equals(operator) || "notequals".equals(operator)) {
            //DO NOT ESCAPE RANGE QUERIES !
            if (!value.matches("\\[.*TO.*\\]")) {
                value = ClientUtils.escapeQueryChars(value);
                filterQuery.append(value);
            } else {
                if (value.matches("\\[\\d{1,4} TO \\d{1,4}\\]")) {
                    int minRange = Integer.parseInt(value.substring(1, value.length() - 1).split(" TO ")[0]);
                    int maxRange = Integer.parseInt(value.substring(1, value.length() - 1).split(" TO ")[1]);
                    value = "[" + String.format("%04d", minRange) + " TO " + String.format("%04d", maxRange)
                            + "]";
                }
                filterQuery.append(value);
            }
        } else {
            //DO NOT ESCAPE RANGE QUERIES !
            if (!value.matches("\\[.*TO.*\\]")) {
                value = ClientUtils.escapeQueryChars(value);
                filterQuery.append("(").append(value).append(")");
            } else {
                filterQuery.append(value);
            }
        }

    }

    result.setDisplayedValue(transformDisplayedValue(context, field, value));
    result.setFilterQuery(filterQuery.toString());
    return result;
}

From source file:org.dspace.statistics.content.StatisticsDataSearches.java

License:BSD License

@Override
public Dataset createDataset(Context context)
        throws SQLException, SolrServerException, IOException, ParseException {
    // Check if we already have one.
    // If we do then give it back.
    if (getDataset() != null) {
        return getDataset();
    }//from   ww  w .  ja v a2  s . c o  m

    List<StatisticsFilter> filters = getFilters();
    List<String> defaultFilters = new ArrayList<String>();
    for (StatisticsFilter statisticsFilter : filters) {
        defaultFilters.add(statisticsFilter.toQuery());
    }

    String defaultFilterQuery = StringUtils.join(defaultFilters.iterator(), " AND ");

    String query = getQuery();

    Dataset dataset = new Dataset(0, 0);
    List<DatasetGenerator> datasetGenerators = getDatasetGenerators();
    if (0 < datasetGenerators.size()) {
        //At the moment we can only have one dataset generator
        DatasetGenerator datasetGenerator = datasetGenerators.get(0);
        if (datasetGenerator instanceof DatasetSearchGenerator) {
            DatasetSearchGenerator typeGenerator = (DatasetSearchGenerator) datasetGenerator;

            if (typeGenerator.getMode() == DatasetSearchGenerator.Mode.SEARCH_OVERVIEW) {
                StringBuilder fqBuffer = new StringBuilder(defaultFilterQuery);
                if (0 < fqBuffer.length()) {
                    fqBuffer.append(" AND ");
                }
                fqBuffer.append(getSearchFilterQuery());

                ObjectCount[] topCounts = SolrLogger.queryFacetField(query, fqBuffer.toString(),
                        typeGenerator.getType(), typeGenerator.getMax(),
                        (typeGenerator.isPercentage() || typeGenerator.isIncludeTotal()), null);
                long totalCount = -1;
                if (typeGenerator.isPercentage() && 0 < topCounts.length) {
                    //Retrieve the total required to calculate the percentage
                    totalCount = topCounts[topCounts.length - 1].getCount();
                    //Remove the total count from view !
                    topCounts = (ObjectCount[]) ArrayUtils.subarray(topCounts, 0, topCounts.length - 1);
                }

                int nrColumns = 2;
                if (typeGenerator.isPercentage()) {
                    nrColumns++;
                }
                if (typeGenerator.isRetrievePageViews()) {
                    nrColumns++;
                }

                dataset = new Dataset(topCounts.length, nrColumns);
                dataset.setColLabel(0, "search-terms");
                dataset.setColLabel(1, "searches");
                if (typeGenerator.isPercentage()) {
                    dataset.setColLabel(2, "percent-total");
                }
                if (typeGenerator.isRetrievePageViews()) {
                    dataset.setColLabel(3, "views-search");
                }
                for (int i = 0; i < topCounts.length; i++) {
                    ObjectCount queryCount = topCounts[i];

                    dataset.setRowLabel(i, String.valueOf(i + 1));
                    String displayedValue = queryCount.getValue();
                    if (new DSpace().getConfigurationService().getPropertyAsType(
                            "usage-statistics.search.statistics.unescape.queries", Boolean.TRUE)) {
                        displayedValue = displayedValue.replace("\\", "");
                    }
                    dataset.addValueToMatrix(i, 0, displayedValue);
                    dataset.addValueToMatrix(i, 1, queryCount.getCount());
                    if (typeGenerator.isPercentage()) {
                        //Calculate our percentage from the total !
                        dataset.addValueToMatrix(i, 2,
                                percentageFormat.format(((float) queryCount.getCount() / totalCount)));
                    }
                    if (typeGenerator.isRetrievePageViews()) {
                        String queryString = ClientUtils.escapeQueryChars(queryCount.getValue());
                        if (queryString.equals("")) {
                            queryString = "\"\"";
                        }

                        ObjectCount totalPageViews = getTotalPageViews("query:" + queryString,
                                defaultFilterQuery);
                        dataset.addValueToMatrix(i, 3, pageViewFormat
                                .format((float) totalPageViews.getCount() / queryCount.getCount()));
                    }
                }
            } else if (typeGenerator.getMode() == DatasetSearchGenerator.Mode.SEARCH_OVERVIEW_TOTAL) {
                //Retrieve the total counts !
                ObjectCount totalCount = SolrLogger.queryTotal(query, getSearchFilterQuery());

                //Retrieve the filtered count by using the default filter query
                StringBuilder fqBuffer = new StringBuilder(defaultFilterQuery);
                if (0 < fqBuffer.length()) {
                    fqBuffer.append(" AND ");
                }
                fqBuffer.append(getSearchFilterQuery());

                ObjectCount totalFiltered = SolrLogger.queryTotal(query, fqBuffer.toString());

                fqBuffer = new StringBuilder(defaultFilterQuery);
                if (0 < fqBuffer.length()) {
                    fqBuffer.append(" AND ");
                }
                fqBuffer.append("statistics_type:").append(SolrLogger.StatisticsType.SEARCH_RESULT.text());

                ObjectCount totalPageViews = getTotalPageViews(query, defaultFilterQuery);

                dataset = new Dataset(1, 3);
                dataset.setRowLabel(0, "");

                dataset.setColLabel(0, "searches");
                dataset.addValueToMatrix(0, 0, totalFiltered.getCount());
                dataset.setColLabel(1, "percent-total");
                //Ensure that we do NOT divide by 0
                float percentTotal;
                if (totalCount.getCount() == 0) {
                    percentTotal = 0;
                } else {
                    percentTotal = (float) totalFiltered.getCount() / totalCount.getCount();
                }

                dataset.addValueToMatrix(0, 1, percentageFormat.format(percentTotal));
                dataset.setColLabel(2, "views-search");
                //Ensure that we do NOT divide by 0
                float pageViews;
                if (totalFiltered.getCount() == 0) {
                    pageViews = 0;
                } else {
                    pageViews = (float) totalPageViews.getCount() / totalFiltered.getCount();
                }

                dataset.addValueToMatrix(0, 2, pageViewFormat.format(pageViews));
            }
        } else {
            throw new IllegalArgumentException(
                    "Data generator with class" + datasetGenerator.getClass().getName()
                            + " is not supported by the statistics search engine !");
        }
    }

    return dataset;
}

From source file:org.dspace.statistics.content.StatisticsDataVisits.java

License:BSD License

public Dataset createDataset(Context context) throws SQLException, SolrServerException, ParseException {
    // Check if we already have one.
    // If we do then give it back.
    if (getDataset() != null) {
        return getDataset();
    }/*from  w w w.jav a2s  .  co m*/

    ///////////////////////////
    // 1. DETERMINE OUR AXIS //
    ///////////////////////////
    ArrayList<DatasetQuery> datasetQueries = new ArrayList<DatasetQuery>();
    for (int i = 0; i < getDatasetGenerators().size(); i++) {
        DatasetGenerator dataSet = getDatasetGenerators().get(i);
        processAxis(dataSet, datasetQueries);
    }

    // Now lets determine our values.
    // First check if we have a date facet & if so find it.
    DatasetTimeGenerator dateFacet = null;
    if (getDatasetGenerators().get(0) instanceof DatasetTimeGenerator || (1 < getDatasetGenerators().size()
            && getDatasetGenerators().get(1) instanceof DatasetTimeGenerator)) {
        if (getDatasetGenerators().get(0) instanceof DatasetTimeGenerator) {
            dateFacet = (DatasetTimeGenerator) getDatasetGenerators().get(0);
        } else {
            dateFacet = (DatasetTimeGenerator) getDatasetGenerators().get(1);
        }
    }

    /////////////////////////
    // 2. DETERMINE VALUES //
    /////////////////////////
    boolean showTotal = false;
    // Check if we need our total
    if ((getDatasetGenerators().get(0) != null && getDatasetGenerators().get(0).isIncludeTotal())
            || (1 < getDatasetGenerators().size() && getDatasetGenerators().get(1) != null
                    && getDatasetGenerators().get(1).isIncludeTotal())) {
        showTotal = true;
    }

    if (dateFacet != null && dateFacet.getActualStartDate() != null && dateFacet.getActualEndDate() != null) {
        StatisticsSolrDateFilter dateFilter = new StatisticsSolrDateFilter();
        dateFilter.setStartDate(dateFacet.getActualStartDate());
        dateFilter.setEndDate(dateFacet.getActualEndDate());
        dateFilter.setTypeStr(dateFacet.getDateType());
        addFilters(dateFilter);
    } else if (dateFacet != null && dateFacet.getStartDate() != null && dateFacet.getEndDate() != null) {
        StatisticsSolrDateFilter dateFilter = new StatisticsSolrDateFilter();
        dateFilter.setStartStr(dateFacet.getStartDate());
        dateFilter.setEndStr(dateFacet.getEndDate());
        dateFilter.setTypeStr(dateFacet.getDateType());
        addFilters(dateFilter);
    }

    // Determine our filterQuery
    String filterQuery = "";
    for (int i = 0; i < getFilters().size(); i++) {
        StatisticsFilter filter = getFilters().get(i);

        filterQuery += "(" + filter.toQuery() + ")";
        if (i != (getFilters().size() - 1)) {
            filterQuery += " AND ";
        }
    }
    if (StringUtils.isNotBlank(filterQuery)) {
        filterQuery += " AND ";
    }
    //Only use the view type and make sure old data (where no view type is present) is also supported
    //Solr doesn't explicitly apply boolean logic, so this query cannot be simplified to an OR query
    filterQuery += "-(statistics_type:[* TO *] AND -statistics_type:" + SolrLogger.StatisticsType.VIEW.text()
            + ")";

    //        System.out.println("FILTERQUERY: " + filterQuery);

    // We determine our values on the queries resolved above
    Dataset dataset = null;

    // Run over our queries.
    // First how many queries do we have ?
    if (dateFacet != null) {
        // So do all the queries and THEN do the date facet
        for (int i = 0; i < datasetQueries.size(); i++) {
            DatasetQuery dataSetQuery = datasetQueries.get(i);
            if (dataSetQuery.getQueries().size() != 1) {
                // TODO: do this
            } else {
                String query = dataSetQuery.getQueries().get(0).getQuery();
                if (dataSetQuery.getMax() == -1) {
                    // We are asking from our current query all the visits faceted by date
                    ObjectCount[] results = SolrLogger.queryFacetDate(query, filterQuery, dataSetQuery.getMax(),
                            dateFacet.getDateType(), dateFacet.getStartDate(), dateFacet.getEndDate(),
                            showTotal, context);
                    dataset = new Dataset(1, results.length);
                    // Now that we have our results put em in a matrix
                    for (int j = 0; j < results.length; j++) {
                        dataset.setColLabel(j, results[j].getValue());
                        dataset.addValueToMatrix(0, j, results[j].getCount());
                    }
                    // TODO: change this !
                    // Now add the column label
                    dataset.setRowLabel(0, getResultName(dataSetQuery.getName(), dataSetQuery, context));
                    dataset.setRowLabelAttr(0, getAttributes(dataSetQuery.getName(), dataSetQuery, context));
                } else {
                    // We need to get the max objects and the next part of the query on them (next part beeing the datasettimequery
                    ObjectCount[] maxObjectCounts = SolrLogger.queryFacetField(query, filterQuery,
                            dataSetQuery.getFacetField(), dataSetQuery.getMax(), false, null);
                    for (int j = 0; j < maxObjectCounts.length; j++) {
                        ObjectCount firstCount = maxObjectCounts[j];
                        String newQuery = dataSetQuery.getFacetField() + ": "
                                + ClientUtils.escapeQueryChars(firstCount.getValue()) + " AND " + query;
                        ObjectCount[] maxDateFacetCounts = SolrLogger.queryFacetDate(newQuery, filterQuery,
                                dataSetQuery.getMax(), dateFacet.getDateType(), dateFacet.getStartDate(),
                                dateFacet.getEndDate(), showTotal, context);

                        // Make sure we have a dataSet
                        if (dataset == null) {
                            dataset = new Dataset(maxObjectCounts.length, maxDateFacetCounts.length);
                        }

                        // TODO: this is a very dirty fix change this ! ! ! ! ! !
                        dataset.setRowLabel(j, getResultName(firstCount.getValue(), dataSetQuery, context));
                        dataset.setRowLabelAttr(j, getAttributes(firstCount.getValue(), dataSetQuery, context));

                        for (int k = 0; k < maxDateFacetCounts.length; k++) {
                            ObjectCount objectCount = maxDateFacetCounts[k];
                            // No need to add this many times
                            if (j == 0) {
                                dataset.setColLabel(k, objectCount.getValue());
                            }
                            dataset.addValueToMatrix(j, k, objectCount.getCount());
                        }
                    }
                    if (dataset != null && !(getDatasetGenerators().get(0) instanceof DatasetTimeGenerator)) {
                        dataset.flipRowCols();
                    }
                }
            }
        }
    } else {
        // We do NOT have a date facet so just do queries after each other
        /*
        for (int i = 0; i < datasetQueries.size(); i++) {
        DatasetQuery datasetQuery = datasetQueries.get(i);
        if(datasetQuery.getQueries().size() != 1){
            // TODO: do this
        }else{
            String query = datasetQuery.getQueries().get(0);
            // Loop over the queries & do em
        //                    ObjectCount[] topCounts = SolrLogger.queryFacetField(query, );
        }
        }
        */
        DatasetQuery firsDataset = datasetQueries.get(0);
        //Do the first query

        ObjectCount[] topCounts1 = null;
        //            if(firsDataset.getQueries().size() == 1){
        topCounts1 = queryFacetField(firsDataset, firsDataset.getQueries().get(0).getQuery(), filterQuery);
        //            }else{
        //                TODO: do this
        //            }
        // Check if we have more queries that need to be done
        if (datasetQueries.size() == 2) {
            DatasetQuery secondDataSet = datasetQueries.get(1);
            // Now do the second one
            ObjectCount[] topCounts2 = queryFacetField(secondDataSet,
                    secondDataSet.getQueries().get(0).getQuery(), filterQuery);
            // Now that have results for both of them lets do x.y queries
            List<String> facetQueries = new ArrayList<String>();
            for (ObjectCount count2 : topCounts2) {
                String facetQuery = secondDataSet.getFacetField() + ":"
                        + ClientUtils.escapeQueryChars(count2.getValue());
                // Check if we also have a type present (if so this should be put into the query)
                if ("id".equals(secondDataSet.getFacetField())
                        && secondDataSet.getQueries().get(0).getDsoType() != -1) {
                    facetQuery += " AND type:" + secondDataSet.getQueries().get(0).getDsoType();
                }

                facetQueries.add(facetQuery);
            }
            for (int i = 0; i < topCounts1.length; i++) {
                ObjectCount count1 = topCounts1[i];
                ObjectCount[] currentResult = new ObjectCount[topCounts2.length];

                // Make sure we have a dataSet
                if (dataset == null) {
                    dataset = new Dataset(topCounts2.length, topCounts1.length);
                }
                dataset.setColLabel(i, getResultName(count1.getValue(), firsDataset, context));
                dataset.setColLabelAttr(i, getAttributes(count1.getValue(), firsDataset, context));

                String query = firsDataset.getFacetField() + ":"
                        + ClientUtils.escapeQueryChars(count1.getValue());
                // Check if we also have a type present (if so this should be put into the query)
                if ("id".equals(firsDataset.getFacetField())
                        && firsDataset.getQueries().get(0).getDsoType() != -1) {
                    query += " AND type:" + firsDataset.getQueries().get(0).getDsoType();
                }

                Map<String, Integer> facetResult = SolrLogger.queryFacetQuery(query, filterQuery, facetQueries);

                // TODO: the show total
                // No need to add this many times
                // TODO: dit vervangen door te displayen value
                for (int j = 0; j < topCounts2.length; j++) {
                    ObjectCount count2 = topCounts2[j];
                    if (i == 0) {
                        dataset.setRowLabel(j, getResultName(count2.getValue(), secondDataSet, context));
                        dataset.setRowLabelAttr(j, getAttributes(count2.getValue(), secondDataSet, context));

                    }
                    // Get our value the value is the same as the query
                    String facetQuery = secondDataSet.getFacetField() + ":"
                            + ClientUtils.escapeQueryChars(count2.getValue());
                    // Check if we also have a type present (if so this should be put into the query
                    if ("id".equals(secondDataSet.getFacetField())
                            && secondDataSet.getQueries().get(0).getDsoType() != -1) {
                        facetQuery += " AND type:" + secondDataSet.getQueries().get(0).getDsoType();
                    }

                    // We got our query so now get the value
                    dataset.addValueToMatrix(j, i, facetResult.get(facetQuery));
                }

                /*
                for (int j = 0; j < topCounts2.length; j++) {
                ObjectCount count2 = topCounts2[j];
                String query = firsDataset.getFacetField() + ":" + count1.getValue();
                // Check if we also have a type present (if so this should be put into the query
                if("id".equals(firsDataset.getFacetField()) && firsDataset.getQueries().get(0).getDsoType() != -1)
                    query += " AND type:" + firsDataset.getQueries().get(0).getDsoType();
                        
                query += " AND " + secondDataSet.getFacetField() + ":" + count2.getValue();
                // Check if we also have a type present (if so this should be put into the query
                if("id".equals(secondDataSet.getFacetField()) && secondDataSet.getQueries().get(0).getDsoType() != -1)
                    query += " AND type:" + secondDataSet.getQueries().get(0).getDsoType();
                        
                long count = SolrLogger.queryFacetQuery(query, filterQuery);
                        
                // TODO: the show total
                // No need to add this many times
                // TODO: dit vervangen door te displayen value
                if(i == 0) {
                    dataset.setRowLabel(j, getResultName(count2.getValue(), secondDataSet, context));
                    dataset.setRowLabelAttr(j, getAttributes(count2.getValue(), secondDataSet, context));
                        
                }
                        
                dataset.addValueToMatrix(j, i, count);
                }
                */
            }

            //                System.out.println("BOTH");

        } else {
            // Make sure we have a dataSet
            dataset = new Dataset(1, topCounts1.length);
            for (int i = 0; i < topCounts1.length; i++) {
                ObjectCount count = topCounts1[i];
                dataset.setColLabel(i, getResultName(count.getValue(), firsDataset, context));
                dataset.setColLabelAttr(i, getAttributes(count.getValue(), firsDataset, context));
                dataset.addValueToMatrix(0, i, count.getCount());
            }
        }

    }
    if (dataset != null) {
        dataset.setRowTitle("Dataset 1");
        dataset.setColTitle("Dataset 2");
    } else {
        dataset = new Dataset(0, 0);
    }
    return dataset;
}

From source file:org.dspace.statistics.SolrLogger.java

License:BSD License

public static void shardSolrIndex() throws IOException, SolrServerException {
    /*//from   w  w  w .  ja  va 2  s. c  o m
    Start by faceting by year so we can include each year in a separate core !
     */
    SolrQuery yearRangeQuery = new SolrQuery();
    yearRangeQuery.setQuery("*:*");
    yearRangeQuery.setRows(0);
    yearRangeQuery.setFacet(true);
    yearRangeQuery.add(FacetParams.FACET_RANGE, "time");
    //We go back to 2000 the year 2000, this is a bit overkill but this way we ensure we have everything
    //The alternative would be to sort but that isn't recommended since it would be a very costly query !
    yearRangeQuery.add(FacetParams.FACET_RANGE_START,
            "NOW/YEAR-" + (Calendar.getInstance().get(Calendar.YEAR) - 2000) + "YEARS");
    //Add the +0year to ensure that we DO NOT include the current year
    yearRangeQuery.add(FacetParams.FACET_RANGE_END, "NOW/YEAR+0YEARS");
    yearRangeQuery.add(FacetParams.FACET_RANGE_GAP, "+1YEAR");
    yearRangeQuery.add(FacetParams.FACET_MINCOUNT, String.valueOf(1));

    //Create a temp directory to store our files in !
    File tempDirectory = new File(
            ConfigurationManager.getProperty("dspace.dir") + File.separator + "temp" + File.separator);
    tempDirectory.mkdirs();

    QueryResponse queryResponse = solr.query(yearRangeQuery);
    //We only have one range query !
    List<RangeFacet.Count> yearResults = queryResponse.getFacetRanges().get(0).getCounts();
    for (RangeFacet.Count count : yearResults) {
        long totalRecords = count.getCount();

        //Create a range query from this !
        //We start with out current year
        DCDate dcStart = new DCDate(count.getValue());
        Calendar endDate = Calendar.getInstance();
        //Advance one year for the start of the next one !
        endDate.setTime(dcStart.toDate());
        endDate.add(Calendar.YEAR, 1);
        DCDate dcEndDate = new DCDate(endDate.getTime());

        StringBuilder filterQuery = new StringBuilder();
        filterQuery.append("time:([");
        filterQuery.append(ClientUtils.escapeQueryChars(dcStart.toString()));
        filterQuery.append(" TO ");
        filterQuery.append(ClientUtils.escapeQueryChars(dcEndDate.toString()));
        filterQuery.append("]");
        //The next part of the filter query excludes the content from midnight of the next year !
        filterQuery.append(" NOT ").append(ClientUtils.escapeQueryChars(dcEndDate.toString()));
        filterQuery.append(")");

        Map<String, String> yearQueryParams = new HashMap<String, String>();
        yearQueryParams.put(CommonParams.Q, "*:*");
        yearQueryParams.put(CommonParams.ROWS, String.valueOf(10000));
        yearQueryParams.put(CommonParams.FQ, filterQuery.toString());
        yearQueryParams.put(CommonParams.WT, "csv");

        //Start by creating a new core
        String coreName = "statistics-" + dcStart.getYear();
        HttpSolrServer statisticsYearServer = createCore(solr, coreName);

        System.out.println("Moving: " + totalRecords + " into core " + coreName);
        log.info("Moving: " + totalRecords + " records into core " + coreName);

        List<File> filesToUpload = new ArrayList<File>();
        for (int i = 0; i < totalRecords; i += 10000) {
            String solrRequestUrl = solr.getBaseURL() + "/select";
            solrRequestUrl = generateURL(solrRequestUrl, yearQueryParams);

            HttpGet get = new HttpGet(solrRequestUrl);
            HttpResponse response = new DefaultHttpClient().execute(get);
            InputStream csvInputstream = response.getEntity().getContent();
            //Write the csv ouput to a file !
            File csvFile = new File(tempDirectory.getPath() + File.separatorChar + "temp." + dcStart.getYear()
                    + "." + i + ".csv");
            FileUtils.copyInputStreamToFile(csvInputstream, csvFile);
            filesToUpload.add(csvFile);

            //Add 10000 & start over again
            yearQueryParams.put(CommonParams.START, String.valueOf((i + 10000)));
        }

        for (File tempCsv : filesToUpload) {
            //Upload the data in the csv files to our new solr core
            ContentStreamUpdateRequest contentStreamUpdateRequest = new ContentStreamUpdateRequest(
                    "/update/csv");
            contentStreamUpdateRequest.setParam("stream.contentType", "text/plain;charset=utf-8");
            contentStreamUpdateRequest.setAction(AbstractUpdateRequest.ACTION.COMMIT, true, true);
            contentStreamUpdateRequest.addFile(tempCsv, "text/plain;charset=utf-8");

            statisticsYearServer.request(contentStreamUpdateRequest);
        }
        statisticsYearServer.commit(true, true);

        //Delete contents of this year from our year query !
        solr.deleteByQuery(filterQuery.toString());
        solr.commit(true, true);

        log.info("Moved " + totalRecords + " records into core: " + coreName);
    }

    FileUtils.deleteDirectory(tempDirectory);
}

From source file:org.dspace.statistics.SolrLoggerServiceImpl.java

License:BSD License

@Override
public void shardSolrIndex() throws IOException, SolrServerException {
    /*//from  w  ww.  ja v  a 2s  .  c  o  m
    Start by faceting by year so we can include each year in a separate core !
     */
    SolrQuery yearRangeQuery = new SolrQuery();
    yearRangeQuery.setQuery("*:*");
    yearRangeQuery.setRows(0);
    yearRangeQuery.setFacet(true);
    yearRangeQuery.add(FacetParams.FACET_RANGE, "time");
    //We go back to 2000 the year 2000, this is a bit overkill but this way we ensure we have everything
    //The alternative would be to sort but that isn't recommended since it would be a very costly query !
    yearRangeQuery.add(FacetParams.FACET_RANGE_START,
            "NOW/YEAR-" + (Calendar.getInstance().get(Calendar.YEAR) - 2000) + "YEARS");
    //Add the +0year to ensure that we DO NOT include the current year
    yearRangeQuery.add(FacetParams.FACET_RANGE_END, "NOW/YEAR+0YEARS");
    yearRangeQuery.add(FacetParams.FACET_RANGE_GAP, "+1YEAR");
    yearRangeQuery.add(FacetParams.FACET_MINCOUNT, String.valueOf(1));

    //Create a temp directory to store our files in !
    File tempDirectory = new File(
            configurationService.getProperty("dspace.dir") + File.separator + "temp" + File.separator);
    tempDirectory.mkdirs();

    QueryResponse queryResponse = solr.query(yearRangeQuery);
    //We only have one range query !
    List<RangeFacet.Count> yearResults = queryResponse.getFacetRanges().get(0).getCounts();
    for (RangeFacet.Count count : yearResults) {
        long totalRecords = count.getCount();

        //Create a range query from this !
        //We start with out current year
        DCDate dcStart = new DCDate(count.getValue());
        Calendar endDate = Calendar.getInstance();
        //Advance one year for the start of the next one !
        endDate.setTime(dcStart.toDate());
        endDate.add(Calendar.YEAR, 1);
        DCDate dcEndDate = new DCDate(endDate.getTime());

        StringBuilder filterQuery = new StringBuilder();
        filterQuery.append("time:([");
        filterQuery.append(ClientUtils.escapeQueryChars(dcStart.toString()));
        filterQuery.append(" TO ");
        filterQuery.append(ClientUtils.escapeQueryChars(dcEndDate.toString()));
        filterQuery.append("]");
        //The next part of the filter query excludes the content from midnight of the next year !
        filterQuery.append(" NOT ").append(ClientUtils.escapeQueryChars(dcEndDate.toString()));
        filterQuery.append(")");

        Map<String, String> yearQueryParams = new HashMap<String, String>();
        yearQueryParams.put(CommonParams.Q, "*:*");
        yearQueryParams.put(CommonParams.ROWS, String.valueOf(10000));
        yearQueryParams.put(CommonParams.FQ, filterQuery.toString());
        yearQueryParams.put(CommonParams.WT, "csv");

        //Start by creating a new core
        String coreName = "statistics-" + dcStart.getYear();
        HttpSolrServer statisticsYearServer = createCore(solr, coreName);

        System.out.println("Moving: " + totalRecords + " into core " + coreName);
        log.info("Moving: " + totalRecords + " records into core " + coreName);

        List<File> filesToUpload = new ArrayList<File>();
        for (int i = 0; i < totalRecords; i += 10000) {
            String solrRequestUrl = solr.getBaseURL() + "/select";
            solrRequestUrl = generateURL(solrRequestUrl, yearQueryParams);

            HttpGet get = new HttpGet(solrRequestUrl);
            HttpResponse response = new DefaultHttpClient().execute(get);
            InputStream csvInputstream = response.getEntity().getContent();
            //Write the csv ouput to a file !
            File csvFile = new File(tempDirectory.getPath() + File.separatorChar + "temp." + dcStart.getYear()
                    + "." + i + ".csv");
            FileUtils.copyInputStreamToFile(csvInputstream, csvFile);
            filesToUpload.add(csvFile);

            //Add 10000 & start over again
            yearQueryParams.put(CommonParams.START, String.valueOf((i + 10000)));
        }

        for (File tempCsv : filesToUpload) {
            //Upload the data in the csv files to our new solr core
            ContentStreamUpdateRequest contentStreamUpdateRequest = new ContentStreamUpdateRequest(
                    "/update/csv");
            contentStreamUpdateRequest.setParam("stream.contentType", "text/plain;charset=utf-8");
            contentStreamUpdateRequest.setParam("skip", "_version_");
            contentStreamUpdateRequest.setAction(AbstractUpdateRequest.ACTION.COMMIT, true, true);
            contentStreamUpdateRequest.addFile(tempCsv, "text/plain;charset=utf-8");

            statisticsYearServer.request(contentStreamUpdateRequest);
        }
        statisticsYearServer.commit(true, true);

        //Delete contents of this year from our year query !
        solr.deleteByQuery(filterQuery.toString());
        solr.commit(true, true);

        log.info("Moved " + totalRecords + " records into core: " + coreName);
    }

    FileUtils.deleteDirectory(tempDirectory);
}

From source file:org.dspace.xoai.filter.DateFromFilter.java

License:BSD License

@Override
public SolrFilterResult buildSolrQuery() {
    String format = dateProvider.format(date).replace("Z", ".000Z"); // Tweak to set the milliseconds
    return new SolrFilterResult("item.lastmodified:[" + ClientUtils.escapeQueryChars(format) + " TO *]");
}

From source file:org.dspace.xoai.filter.DateUntilFilter.java

License:BSD License

@Override
public SolrFilterResult buildSolrQuery() {
    String format = dateProvider.format(date).replace("Z", ".999Z"); // Tweak to set the millisecon
    return new SolrFilterResult("item.lastmodified:[* TO " + ClientUtils.escapeQueryChars(format) + "]");
}

From source file:org.dspace.xoai.filter.DSpaceAtLeastOneMetadataFilter.java

License:BSD License

@Override
public SolrFilterResult buildSolrQuery() {
    String field = this.getField();
    List<String> parts = new ArrayList<String>();
    if (this.getField() != null) {
        for (String v : this.getValues())
            this.buildQuery("metadata." + field, ClientUtils.escapeQueryChars(v), parts);
        if (parts.size() > 0) {
            return new SolrFilterResult(StringUtils.join(parts.iterator(), " OR "));
        }// w ww. jav a 2  s . c o m
    }
    return new SolrFilterResult();
}

From source file:org.dspace.xoai.filter.DSpaceSetSpecFilter.java

License:BSD License

@Override
public SolrFilterResult buildSolrQuery() {
    if (setSpec.startsWith("col_")) {
        try {// w w  w .  ja v a  2 s .c o m
            return new SolrFilterResult("item.collections:" + ClientUtils.escapeQueryChars(setSpec));
        } catch (Exception ex) {
            log.error(ex.getMessage(), ex);
        }
    } else if (setSpec.startsWith("com_")) {
        try {
            return new SolrFilterResult("item.communities:" + ClientUtils.escapeQueryChars(setSpec));
        } catch (Exception e) {
            log.error(e.getMessage(), e);
        }
    }
    return new SolrFilterResult();
}