Java tutorial
/** * Copyright 2015 OpenSearchServer Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.opensearchserver.client.v1; import java.io.IOException; import java.net.URISyntaxException; import java.util.List; import javax.xml.bind.annotation.XmlTransient; import org.apache.http.client.fluent.Request; import org.apache.http.client.utils.URIBuilder; import com.fasterxml.jackson.annotation.JsonIgnore; import com.fasterxml.jackson.core.type.TypeReference; import com.opensearchserver.client.JsonClient1; import com.opensearchserver.client.common.AbstractApi; import com.opensearchserver.client.common.CommonListResult; import com.opensearchserver.client.common.CommonResult; import com.opensearchserver.client.v1.search.FieldValueList1; public class WebCrawlerApi1 extends AbstractApi<JsonClient1> { public WebCrawlerApi1(JsonClient1 client) { super(client); } @XmlTransient @JsonIgnore public final static TypeReference<CommonListResult<List<FieldValueList1>>> LISTCRAWL_TYPEREF = new TypeReference<CommonListResult<List<FieldValueList1>>>() { }; /** * Crawl an URL and return data * * @param indexName * The name of the index * @param url * The URL to crawl * @param msTimeOut * The timeout in milliseconds * * @return the result of the crawl * @throws IOException * if any IO error occurs * @throws URISyntaxException * if the URI is not valid */ public CommonListResult<List<FieldValueList1>> crawlWithData(String indexName, String url, Integer msTimeOut) throws IOException, URISyntaxException { URIBuilder uriBuilder = client.getBaseUrl("index/", indexName, "/crawler/web/crawl") .addParameter("url", url).addParameter("returnData", "true"); Request request = Request.Get(uriBuilder.build()); return client.execute(request, null, msTimeOut, LISTCRAWL_TYPEREF, 200); } /** * Crawl an URL * * @param indexName * The name of the index * @param url * The URL to crawl * @param msTimeOut * The timeout in milliseconds * @return the status of the crawl * @throws IOException * if any IO error occurs * @throws URISyntaxException * if the URI is not valid */ public CommonResult crawl(String indexName, String url, Integer msTimeOut) throws IOException, URISyntaxException { URIBuilder uriBuilder = client.getBaseUrl("index/", indexName, "/crawler/web/crawl") .addParameter("url", url).addParameter("returnData", "false"); Request request = Request.Get(uriBuilder.build()); return client.execute(request, null, msTimeOut, CommonResult.class, 200); } /** * Enable or disable pattern inclusion and exclusion * * @param indexName * The name of the index * @param inclusionStatus * Enable or disable inclusion list * @param exclusionStatus * Enable or disable inclusion list * @return the result of the call * @throws IOException * if any IO error occurs * @throws URISyntaxException * if the URI is not valid */ public CommonResult setPatternStatus(String indexName, Boolean inclusionStatus, Boolean exclusionStatus) throws IOException, URISyntaxException { URIBuilder uriBuilder = client.getBaseUrl("index/", indexName, "/crawler/web/patterns/status"); if (inclusionStatus != null) uriBuilder.addParameter("inclusion", inclusionStatus.toString()); if (exclusionStatus != null) uriBuilder.addParameter("exclusion", exclusionStatus.toString()); Request request = Request.Put(uriBuilder.build()); return client.execute(request, null, null, CommonResult.class, 200); } }