get Url content with max retries
import java.io.IOException;
import java.net.SocketException;
import org.apache.http.HttpEntity;
import org.apache.http.client.methods.HttpGet;
import org.apache.http.impl.client.DefaultHttpClient;
import org.apache.http.params.BasicHttpParams;
import org.apache.http.params.HttpParams;
import org.apache.http.util.EntityUtils;
class Main {
private static HttpParams defaultHttpParams = new BasicHttpParams();
public static String getUrl(String url, int maxRetries) throws IOException {
String result = null;
int retries = 0;
DefaultHttpClient httpclient = new DefaultHttpClient(defaultHttpParams);
httpclient.setCookieStore(null);
HttpGet httpget = new HttpGet(url);
while (retries <= maxRetries && result == null) {
try {
retries++;
HttpEntity entity = httpclient.execute(httpget).getEntity();
if (entity != null) {
result = EntityUtils.toString(entity).trim();
}
} catch (SocketException se) {
if (retries > maxRetries) {
throw se;
} else {
//Log.v(TAG, "SocketException, retrying " + retries);
}
}
}
return result;
}
}
Related examples in the same category