Unifying external page requests to a single method

This commit is contained in:
4pr0n 2014-06-20 04:09:36 -07:00
parent d215beb7e3
commit 1b20e98f8f
47 changed files with 280 additions and 397 deletions

View File

@ -12,7 +12,12 @@ import java.util.Observable;
import org.apache.log4j.FileAppender; import org.apache.log4j.FileAppender;
import org.apache.log4j.Logger; import org.apache.log4j.Logger;
import org.jsoup.Connection;
import org.jsoup.Connection.Method;
import org.jsoup.Connection.Response;
import org.jsoup.HttpStatusException; import org.jsoup.HttpStatusException;
import org.jsoup.Jsoup;
import org.jsoup.nodes.Document;
import com.rarchives.ripme.ui.RipStatusHandler; import com.rarchives.ripme.ui.RipStatusHandler;
import com.rarchives.ripme.ui.RipStatusMessage; import com.rarchives.ripme.ui.RipStatusMessage;
@ -28,6 +33,8 @@ public abstract class AbstractRipper
public static final String USER_AGENT = public static final String USER_AGENT =
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10.7; rv:29.0) Gecko/20100101 Firefox/29.0"; "Mozilla/5.0 (Macintosh; Intel Mac OS X 10.7; rv:29.0) Gecko/20100101 Firefox/29.0";
public static final int TIMEOUT = Utils.getConfigInteger("page.timeout", 5 * 1000);
protected URL url; protected URL url;
protected File workingDir; protected File workingDir;
protected DownloadThreadPool threadPool; protected DownloadThreadPool threadPool;
@ -54,6 +61,10 @@ public abstract class AbstractRipper
} }
} }
protected int getTimeout() {
return TIMEOUT;
}
/** /**
* Ensures inheriting ripper can rip this URL, raises exception if not. * Ensures inheriting ripper can rip this URL, raises exception if not.
* Otherwise initializes working directory and thread pool. * Otherwise initializes working directory and thread pool.
@ -335,4 +346,84 @@ public abstract class AbstractRipper
public void setBytesCompleted(int bytes) { public void setBytesCompleted(int bytes) {
// Do nothing // Do nothing
} }
// Thar be overloaded methods afoot
public Document getDocument(URL url) throws IOException {
return getDocument(url.toExternalForm());
}
public Document getDocument(String url) throws IOException {
return getResponse(url).parse();
}
public Document getDocument(String url, boolean ignoreContentType) throws IOException {
return getResponse(url, ignoreContentType).parse();
}
public Document getDocument(String url, Map<String,String> cookies) throws IOException {
return getResponse(url, cookies).parse();
}
public Document getDocument(String url, String referrer, Map<String,String> cookies) throws IOException {
return getResponse(url, Method.GET, referrer, cookies).parse();
}
public Response getResponse(String url) throws IOException {
return getResponse(url, Method.GET, USER_AGENT, null, null, false);
}
public Response getResponse(URL url) throws IOException {
return getResponse(url.toExternalForm());
}
public Response getResponse(String url, String referrer) throws IOException {
return getResponse(url, Method.GET, USER_AGENT, referrer, null, false);
}
public Response getResponse(URL url, boolean ignoreContentType) throws IOException {
return getResponse(url.toExternalForm(), Method.GET, USER_AGENT, null, null, ignoreContentType);
}
public Response getResponse(String url, Map<String, String> cookies) throws IOException {
return getResponse(url, Method.GET, USER_AGENT, cookies);
}
public Response getResponse(String url, String referrer, Map<String, String> cookies) throws IOException {
return getResponse(url, Method.GET, referrer, cookies);
}
public Response getResponse(String url, Method method) throws IOException {
return getResponse(url, method, USER_AGENT, null, null, false);
}
public Response getResponse(String url, Method method, String referrer, Map<String,String> cookies) throws IOException {
return getResponse(url, method, USER_AGENT, referrer, cookies, false);
}
public Response getResponse(String url, boolean ignoreContentType) throws IOException {
return getResponse(url, Method.GET, USER_AGENT, null, null, ignoreContentType);
}
public Response getResponse(String url, Method method, boolean ignoreContentType) throws IOException {
return getResponse(url, method, USER_AGENT, null, null, false);
}
public Response getResponse(String url,
Method method,
String userAgent,
String referrer,
Map<String,String> cookies,
boolean ignoreContentType)
throws IOException {
Connection connection = Jsoup.connect(url);
connection.method( (method == null) ? Method.GET : method);
connection.userAgent( (userAgent == null) ? USER_AGENT : userAgent);
connection.ignoreContentType(ignoreContentType);
connection.timeout(getTimeout());
connection.maxBodySize(0);
if (cookies != null) { connection.cookies(cookies); }
if (referrer != null) { connection.referrer(referrer); }
Response response = null;
int retries = Utils.getConfigInteger("download.retries", 1);;
while (retries >= 0) {
retries--;
try {
response = connection.execute();
} catch (IOException e) {
logger.warn("Error while loading " + url, e);
continue;
}
}
return response;
}
} }

View File

@ -8,11 +8,6 @@ import java.util.Collections;
import java.util.HashMap; import java.util.HashMap;
import java.util.Map; import java.util.Map;
import org.jsoup.Connection;
import org.jsoup.Connection.Method;
import org.jsoup.Connection.Response;
import org.jsoup.Jsoup;
import com.rarchives.ripme.ui.RipStatusMessage; import com.rarchives.ripme.ui.RipStatusMessage;
import com.rarchives.ripme.ui.RipStatusMessage.STATUS; import com.rarchives.ripme.ui.RipStatusMessage.STATUS;
import com.rarchives.ripme.utils.Utils; import com.rarchives.ripme.utils.Utils;
@ -175,47 +170,5 @@ public abstract class AlbumRipper extends AbstractRipper {
.append(", Errored: " ).append(itemsErrored.size()); .append(", Errored: " ).append(itemsErrored.size());
return sb.toString(); return sb.toString();
} }
public Response getResponse(String url,
Method method,
String userAgent,
String referrer,
Map<String,String> cookies,
boolean ignoreContentType)
throws IOException {
Connection connection = Jsoup.connect(url);
if (method == null) {
method = Method.GET;
}
connection.method(method);
if (userAgent == null) {
userAgent = USER_AGENT;
}
connection.userAgent(userAgent);
if (cookies != null) {
connection.cookies(cookies);
}
if (referrer != null) {
connection.referrer(referrer);
}
connection.ignoreContentType(ignoreContentType);
connection.maxBodySize(0);
Response response = null;
int retries = Utils.getConfigInteger("download.retries", 1);;
while (retries >= 0) {
retries--;
try {
response = connection.execute();
} catch (IOException e) {
logger.warn("Error while loading " + url, e);
continue;
}
}
return response;
}
} }

View File

@ -46,9 +46,9 @@ public class ButttoucherRipper extends AlbumRipper {
@Override @Override
public void rip() throws IOException { public void rip() throws IOException {
logger.info(" Retrieving " + this.url.toExternalForm()); logger.info("Retrieving " + this.url);
if (albumDoc == null) { if (albumDoc == null) {
albumDoc = Jsoup.connect(this.url.toExternalForm()).get(); albumDoc = getDocument(this.url);
} }
int index = 0; int index = 0;
for (Element thumb : albumDoc.select("div.image-gallery > a > img")) { for (Element thumb : albumDoc.select("div.image-gallery > a > img")) {

View File

@ -8,7 +8,6 @@ import java.util.Set;
import java.util.regex.Matcher; import java.util.regex.Matcher;
import java.util.regex.Pattern; import java.util.regex.Pattern;
import org.jsoup.Jsoup;
import org.jsoup.nodes.Document; import org.jsoup.nodes.Document;
import org.jsoup.nodes.Element; import org.jsoup.nodes.Element;
@ -79,10 +78,8 @@ public class ChanRipper extends AlbumRipper {
Set<String> attempted = new HashSet<String>(); Set<String> attempted = new HashSet<String>();
int index = 0; int index = 0;
Pattern p; Matcher m; Pattern p; Matcher m;
logger.info(" Retrieving " + this.url.toExternalForm()); logger.info("Retrieving " + this.url);
Document doc = Jsoup.connect(this.url.toExternalForm()) Document doc = getDocument(this.url);
.userAgent(USER_AGENT)
.get();
for (Element link : doc.select("a")) { for (Element link : doc.select("a")) {
if (!link.hasAttr("href")) { if (!link.hasAttr("href")) {
continue; continue;

View File

@ -65,10 +65,7 @@ public class DeviantartRipper extends AlbumRipper {
logger.info(" Retrieving " + nextURL); logger.info(" Retrieving " + nextURL);
sendUpdate(STATUS.LOADING_RESOURCE, "Retrieving " + nextURL); sendUpdate(STATUS.LOADING_RESOURCE, "Retrieving " + nextURL);
Document doc = Jsoup.connect(nextURL) Document doc = getDocument(nextURL, cookies);
.cookies(cookies)
.userAgent(USER_AGENT)
.get();
// Iterate over all thumbnails // Iterate over all thumbnails
for (Element thumb : doc.select("div.zones-container a.thumb")) { for (Element thumb : doc.select("div.zones-container a.thumb")) {
@ -190,12 +187,7 @@ public class DeviantartRipper extends AlbumRipper {
public String smallToFull(String thumb, String page) { public String smallToFull(String thumb, String page) {
try { try {
// Fetch the image page // Fetch the image page
Response resp = Jsoup.connect(page) Response resp = getResponse(page, Method.GET, USER_AGENT, this.url.toExternalForm(), cookies, false);
.userAgent(USER_AGENT)
.cookies(cookies)
.referrer(this.url.toExternalForm())
.method(Method.GET)
.execute();
Map<String,String> cookies = resp.cookies(); Map<String,String> cookies = resp.cookies();
cookies.putAll(this.cookies); cookies.putAll(this.cookies);
@ -262,10 +254,7 @@ public class DeviantartRipper extends AlbumRipper {
if (username == null || password == null) { if (username == null || password == null) {
throw new IOException("could not find username or password in config"); throw new IOException("could not find username or password in config");
} }
Response resp = Jsoup.connect("http://www.deviantart.com/") Response resp = getResponse("http://www.deviantart.com/");
.userAgent(USER_AGENT)
.method(Method.GET)
.execute();
for (Element input : resp.parse().select("form#form-login input[type=hidden]")) { for (Element input : resp.parse().select("form#form-login input[type=hidden]")) {
postData.put(input.attr("name"), input.attr("value")); postData.put(input.attr("name"), input.attr("value"));
} }

View File

@ -6,7 +6,6 @@ import java.net.URL;
import java.util.regex.Matcher; import java.util.regex.Matcher;
import java.util.regex.Pattern; import java.util.regex.Pattern;
import org.jsoup.Jsoup;
import org.jsoup.nodes.Document; import org.jsoup.nodes.Document;
import org.jsoup.nodes.Element; import org.jsoup.nodes.Element;
import org.jsoup.select.Elements; import org.jsoup.select.Elements;
@ -62,8 +61,8 @@ public class DrawcrowdRipper extends AlbumRipper {
public void rip() throws IOException { public void rip() throws IOException {
int index = 0; int index = 0;
sendUpdate(STATUS.LOADING_RESOURCE, this.url.toExternalForm()); sendUpdate(STATUS.LOADING_RESOURCE, this.url.toExternalForm());
logger.info(" Retrieving " + this.url.toExternalForm()); logger.info("Retrieving " + this.url);
Document albumDoc = Jsoup.connect(this.url.toExternalForm()).get(); Document albumDoc = getDocument(this.url);
while (true) { while (true) {
if (isStopped()) { if (isStopped()) {
break; break;
@ -92,7 +91,7 @@ public class DrawcrowdRipper extends AlbumRipper {
throw new IOException(e); throw new IOException(e);
} }
sendUpdate(STATUS.LOADING_RESOURCE, nextURL); sendUpdate(STATUS.LOADING_RESOURCE, nextURL);
albumDoc = Jsoup.connect(nextURL).get(); albumDoc = getDocument(nextURL);
} }
waitForThreads(); waitForThreads();
} }

View File

@ -4,10 +4,11 @@ import java.io.File;
import java.io.IOException; import java.io.IOException;
import java.net.MalformedURLException; import java.net.MalformedURLException;
import java.net.URL; import java.net.URL;
import java.util.HashMap;
import java.util.Map;
import java.util.regex.Matcher; import java.util.regex.Matcher;
import java.util.regex.Pattern; import java.util.regex.Pattern;
import org.jsoup.Jsoup;
import org.jsoup.nodes.Document; import org.jsoup.nodes.Document;
import org.jsoup.nodes.Element; import org.jsoup.nodes.Element;
import org.jsoup.select.Elements; import org.jsoup.select.Elements;
@ -22,7 +23,6 @@ public class EHentaiRipper extends AlbumRipper {
private static final int PAGE_SLEEP_TIME = 3 * 1000; private static final int PAGE_SLEEP_TIME = 3 * 1000;
private static final int IMAGE_SLEEP_TIME = 1 * 1000; private static final int IMAGE_SLEEP_TIME = 1 * 1000;
private static final int IP_BLOCK_SLEEP_TIME = 60 * 1000; private static final int IP_BLOCK_SLEEP_TIME = 60 * 1000;
private static final int TIMEOUT = 5 * 1000;
private static final String DOMAIN = "g.e-hentai.org", HOST = "e-hentai"; private static final String DOMAIN = "g.e-hentai.org", HOST = "e-hentai";
@ -31,6 +31,12 @@ public class EHentaiRipper extends AlbumRipper {
// Current HTML document // Current HTML document
private Document albumDoc = null; private Document albumDoc = null;
private static final Map<String,String> cookies = new HashMap<String,String>();
static {
cookies.put("nw", "1");
cookies.put("tip", "1");
}
public EHentaiRipper(URL url) throws IOException { public EHentaiRipper(URL url) throws IOException {
super(url); super(url);
@ -49,14 +55,9 @@ public class EHentaiRipper extends AlbumRipper {
try { try {
// Attempt to use album title as GID // Attempt to use album title as GID
if (albumDoc == null) { if (albumDoc == null) {
logger.info(" Retrieving " + url.toExternalForm());
sendUpdate(STATUS.LOADING_RESOURCE, url.toString()); sendUpdate(STATUS.LOADING_RESOURCE, url.toString());
albumDoc = Jsoup.connect(url.toExternalForm()) logger.info("Retrieving " + url);
.userAgent(USER_AGENT) albumDoc = getDocument(url.toExternalForm(), cookies);
.cookie("nw", "1")
.cookie("tip", "1")
.timeout(TIMEOUT)
.get();
} }
Elements elems = albumDoc.select("#gn"); Elements elems = albumDoc.select("#gn");
return HOST + "_" + elems.get(0).text(); return HOST + "_" + elems.get(0).text();
@ -95,12 +96,7 @@ public class EHentaiRipper extends AlbumRipper {
if (albumDoc == null) { if (albumDoc == null) {
logger.info(" Retrieving album page " + nextUrl); logger.info(" Retrieving album page " + nextUrl);
sendUpdate(STATUS.LOADING_RESOURCE, nextUrl); sendUpdate(STATUS.LOADING_RESOURCE, nextUrl);
albumDoc = Jsoup.connect(nextUrl) albumDoc = getDocument(nextUrl, this.url.toExternalForm(), cookies);
.userAgent(USER_AGENT)
.cookie("nw", "1")
.timeout(TIMEOUT)
.referrer(this.url.toExternalForm())
.get();
} }
// Check for rate limiting // Check for rate limiting
if (albumDoc.toString().contains("IP address will be automatically banned")) { if (albumDoc.toString().contains("IP address will be automatically banned")) {
@ -201,12 +197,8 @@ public class EHentaiRipper extends AlbumRipper {
private void fetchImage() { private void fetchImage() {
try { try {
Document doc = Jsoup.connect(this.url.toExternalForm()) String u = this.url.toExternalForm();
.userAgent(USER_AGENT) Document doc = getDocument(u, u, cookies);
.cookie("nw", "1")
.timeout(TIMEOUT)
.referrer(this.url.toExternalForm())
.get();
// Check for rate limit // Check for rate limit
if (doc.toString().contains("IP address will be automatically banned")) { if (doc.toString().contains("IP address will be automatically banned")) {
if (this.retries == 0) { if (this.retries == 0) {

View File

@ -7,9 +7,6 @@ import java.net.URL;
import java.util.regex.Matcher; import java.util.regex.Matcher;
import java.util.regex.Pattern; import java.util.regex.Pattern;
import org.jsoup.Connection.Method;
import org.jsoup.Connection.Response;
import org.jsoup.Jsoup;
import org.jsoup.nodes.Document; import org.jsoup.nodes.Document;
import org.jsoup.nodes.Element; import org.jsoup.nodes.Element;
@ -43,12 +40,7 @@ public class EightmusesRipper extends AlbumRipper {
try { try {
// Attempt to use album title as GID // Attempt to use album title as GID
if (albumDoc == null) { if (albumDoc == null) {
albumDoc = Jsoup.connect(url.toExternalForm()) albumDoc = getDocument(url);
.userAgent(USER_AGENT)
.method(Method.GET)
.timeout(Utils.getConfigInteger("download.timeout", 5000))
.execute()
.parse();
} }
Element titleElement = albumDoc.select("meta[name=description]").first(); Element titleElement = albumDoc.select("meta[name=description]").first();
String title = titleElement.attr("content"); String title = titleElement.attr("content");
@ -71,11 +63,7 @@ public class EightmusesRipper extends AlbumRipper {
logger.info(" Retrieving " + url); logger.info(" Retrieving " + url);
sendUpdate(STATUS.LOADING_RESOURCE, url); sendUpdate(STATUS.LOADING_RESOURCE, url);
if (albumDoc == null) { if (albumDoc == null) {
Response resp = Jsoup.connect(url) albumDoc = getDocument(url);
.userAgent(USER_AGENT)
.timeout(Utils.getConfigInteger("download.timeout", 5000))
.execute();
albumDoc = resp.parse();
} }
int index = 0; // Both album index and image index int index = 0; // Both album index and image index

View File

@ -6,12 +6,12 @@ import java.net.URL;
import java.util.regex.Matcher; import java.util.regex.Matcher;
import java.util.regex.Pattern; import java.util.regex.Pattern;
import org.jsoup.Jsoup;
import org.jsoup.nodes.Document; import org.jsoup.nodes.Document;
import org.jsoup.nodes.Element; import org.jsoup.nodes.Element;
import com.rarchives.ripme.ripper.AlbumRipper; import com.rarchives.ripme.ripper.AlbumRipper;
import com.rarchives.ripme.ui.RipStatusMessage.STATUS; import com.rarchives.ripme.ui.RipStatusMessage.STATUS;
import com.rarchives.ripme.utils.Utils;
public class FapprovedRipper extends AlbumRipper { public class FapprovedRipper extends AlbumRipper {
@ -38,7 +38,7 @@ public class FapprovedRipper extends AlbumRipper {
} }
@Override @Override
public void rip() throws IOException { public void rip() throws IOException {
int page = 0; int index = 0, page = 0;
String url, user = getGID(this.url); String url, user = getGID(this.url);
boolean hasNextPage = true; boolean hasNextPage = true;
while (hasNextPage) { while (hasNextPage) {
@ -46,15 +46,18 @@ public class FapprovedRipper extends AlbumRipper {
url = "http://fapproved.com/users/" + user + "/images?page=" + page; url = "http://fapproved.com/users/" + user + "/images?page=" + page;
this.sendUpdate(STATUS.LOADING_RESOURCE, url); this.sendUpdate(STATUS.LOADING_RESOURCE, url);
logger.info(" Retrieving " + url); logger.info(" Retrieving " + url);
Document doc = Jsoup.connect(url) Document doc = getDocument(url, true);
.ignoreContentType(true)
.get();
for (Element image : doc.select("div.actual-image img")) { for (Element image : doc.select("div.actual-image img")) {
String imageUrl = image.attr("src"); String imageUrl = image.attr("src");
if (imageUrl.startsWith("//")) { if (imageUrl.startsWith("//")) {
imageUrl = "http:" + imageUrl; imageUrl = "http:" + imageUrl;
} }
addURLToDownload(new URL(imageUrl)); index++;
String prefix = "";
if (Utils.getConfigBoolean("download.save_order", true)) {
prefix = String.format("%03d_", index);
}
addURLToDownload(new URL(imageUrl), prefix);
} }
if ( (doc.select("div.pagination li.next.disabled").size() != 0) if ( (doc.select("div.pagination li.next.disabled").size() != 0)
|| (doc.select("div.pagination").size() == 0) ) { || (doc.select("div.pagination").size() == 0) ) {

View File

@ -61,7 +61,7 @@ public class FlickrRipper extends AlbumRipper {
try { try {
// Attempt to use album title as GID // Attempt to use album title as GID
if (albumDoc == null) { if (albumDoc == null) {
albumDoc = Jsoup.connect(url.toExternalForm()).get(); albumDoc = getDocument(url);
} }
String user = url.toExternalForm(); String user = url.toExternalForm();
user = user.substring(user.indexOf("/photos/") + "/photos/".length()); user = user.substring(user.indexOf("/photos/") + "/photos/".length());
@ -124,8 +124,7 @@ public class FlickrRipper extends AlbumRipper {
} }
logger.info(" Retrieving " + nextURL); logger.info(" Retrieving " + nextURL);
if (albumDoc == null) { if (albumDoc == null) {
albumDoc = Jsoup.connect(nextURL) albumDoc = getDocument(nextURL);
.get();
} }
for (Element thumb : albumDoc.select("a[data-track=photo-click]")) { for (Element thumb : albumDoc.select("a[data-track=photo-click]")) {
String imageTitle = null; String imageTitle = null;
@ -212,10 +211,10 @@ public class FlickrRipper extends AlbumRipper {
postData.put("passwd", new String(Base64.decode("MUZha2V5ZmFrZQ=="))); postData.put("passwd", new String(Base64.decode("MUZha2V5ZmFrZQ==")));
String action = doc.select("form[method=post]").get(0).attr("action"); String action = doc.select("form[method=post]").get(0).attr("action");
resp = Jsoup.connect(action) resp = Jsoup.connect(action)
.cookies(resp.cookies()) .cookies(resp.cookies())
.data(postData) .data(postData)
.method(Method.POST) .method(Method.POST)
.execute(); .execute();
return resp.cookies(); return resp.cookies();
} }
@ -260,9 +259,7 @@ public class FlickrRipper extends AlbumRipper {
private Document getLargestImagePageDocument(URL url) throws IOException { private Document getLargestImagePageDocument(URL url) throws IOException {
// Get current page // Get current page
Document doc = Jsoup.connect(url.toExternalForm()) Document doc = getDocument(url);
.userAgent(USER_AGENT)
.get();
// Look for larger image page // Look for larger image page
String largestImagePage = this.url.toExternalForm(); String largestImagePage = this.url.toExternalForm();
for (Element olSize : doc.select("ol.sizes-list > li > ol > li")) { for (Element olSize : doc.select("ol.sizes-list > li > ol > li")) {
@ -280,9 +277,7 @@ public class FlickrRipper extends AlbumRipper {
} }
if (!largestImagePage.equals(this.url.toExternalForm())) { if (!largestImagePage.equals(this.url.toExternalForm())) {
// Found larger image page, get it. // Found larger image page, get it.
doc = Jsoup.connect(largestImagePage) doc = getDocument(largestImagePage);
.userAgent(USER_AGENT)
.get();
} }
return doc; return doc;
} }

View File

@ -50,11 +50,7 @@ public class GifyoRipper extends AlbumRipper {
logger.info(" Retrieving " + this.url + "(page #" + page + ")"); logger.info(" Retrieving " + this.url + "(page #" + page + ")");
Response resp = null; Response resp = null;
if (page == 0) { if (page == 0) {
resp = Jsoup.connect(this.url.toExternalForm()) resp = getResponse(this.url, true);
.ignoreContentType(true)
.userAgent(USER_AGENT)
.method(Method.GET)
.execute();
cookies = resp.cookies(); cookies = resp.cookies();
} }
else { else {

View File

@ -6,7 +6,6 @@ import java.net.URL;
import java.util.regex.Matcher; import java.util.regex.Matcher;
import java.util.regex.Pattern; import java.util.regex.Pattern;
import org.jsoup.Jsoup;
import org.jsoup.nodes.Document; import org.jsoup.nodes.Document;
import org.jsoup.nodes.Element; import org.jsoup.nodes.Element;
import org.jsoup.select.Elements; import org.jsoup.select.Elements;
@ -17,7 +16,6 @@ import com.rarchives.ripme.ui.RipStatusMessage.STATUS;
public class GirlsOfDesireRipper extends AlbumRipper { public class GirlsOfDesireRipper extends AlbumRipper {
// All sleep times are in milliseconds // All sleep times are in milliseconds
private static final int IMAGE_SLEEP_TIME = 100; private static final int IMAGE_SLEEP_TIME = 100;
private static final int TIMEOUT = 5 * 1000;
private static final String DOMAIN = "girlsofdesire.org", HOST = "GirlsOfDesire"; private static final String DOMAIN = "girlsofdesire.org", HOST = "GirlsOfDesire";
@ -43,10 +41,7 @@ public class GirlsOfDesireRipper extends AlbumRipper {
if (albumDoc == null) { if (albumDoc == null) {
logger.info(" Retrieving " + url.toExternalForm()); logger.info(" Retrieving " + url.toExternalForm());
sendUpdate(STATUS.LOADING_RESOURCE, url.toString()); sendUpdate(STATUS.LOADING_RESOURCE, url.toString());
albumDoc = Jsoup.connect(url.toExternalForm()) albumDoc = getDocument(url);
.userAgent(USER_AGENT)
.timeout(TIMEOUT)
.get();
} }
Elements elems = albumDoc.select(".albumName"); Elements elems = albumDoc.select(".albumName");
return HOST + "_" + elems.first().text(); return HOST + "_" + elems.first().text();
@ -81,11 +76,7 @@ public class GirlsOfDesireRipper extends AlbumRipper {
if (albumDoc == null) { if (albumDoc == null) {
logger.info(" Retrieving album page " + nextUrl); logger.info(" Retrieving album page " + nextUrl);
sendUpdate(STATUS.LOADING_RESOURCE, nextUrl); sendUpdate(STATUS.LOADING_RESOURCE, nextUrl);
albumDoc = Jsoup.connect(nextUrl) albumDoc = getDocument(nextUrl);
.userAgent(USER_AGENT)
.timeout(TIMEOUT)
.referrer(this.url.toExternalForm())
.get();
} }
// Find thumbnails // Find thumbnails

View File

@ -57,10 +57,7 @@ public class GonewildRipper extends AlbumRipper {
gwURL = baseGwURL gwURL = baseGwURL
+ "&start=" + start; + "&start=" + start;
start += count; start += count;
jsonString = Jsoup.connect(gwURL) jsonString = getResponse(gwURL, true).body();
.ignoreContentType(true)
.execute()
.body();
json = new JSONObject(jsonString); json = new JSONObject(jsonString);
if (json.has("error")) { if (json.has("error")) {
logger.error("Error while retrieving user posts:" + json.getString("error")); logger.error("Error while retrieving user posts:" + json.getString("error"));

View File

@ -7,9 +7,7 @@ import java.util.Map;
import java.util.regex.Matcher; import java.util.regex.Matcher;
import java.util.regex.Pattern; import java.util.regex.Pattern;
import org.jsoup.Connection.Method;
import org.jsoup.Connection.Response; import org.jsoup.Connection.Response;
import org.jsoup.Jsoup;
import org.jsoup.nodes.Document; import org.jsoup.nodes.Document;
import org.jsoup.nodes.Element; import org.jsoup.nodes.Element;
import org.jsoup.select.Elements; import org.jsoup.select.Elements;
@ -42,14 +40,11 @@ public class HentaifoundryRipper extends AlbumRipper {
int index = 0; int index = 0;
// Get cookies // Get cookies
Response resp = Jsoup.connect("http://www.hentai-foundry.com/") Response resp = getResponse("http://www.hentai-foundry.com/");
.execute();
Map<String,String> cookies = resp.cookies(); Map<String,String> cookies = resp.cookies();
resp = Jsoup.connect("http://www.hentai-foundry.com/?enterAgree=1&size=1500") resp = getResponse("http://www.hentai-foundry.com/?enterAgree=1&size=1500",
.referrer("http://www.hentai-foundry.com/") "http://www.hentai-foundry.com/",
.cookies(cookies) cookies);
.method(Method.GET)
.execute();
cookies = resp.cookies(); cookies = resp.cookies();
logger.info("cookies: " + cookies); logger.info("cookies: " + cookies);
@ -59,12 +54,7 @@ public class HentaifoundryRipper extends AlbumRipper {
break; break;
} }
sendUpdate(STATUS.LOADING_RESOURCE, nextURL); sendUpdate(STATUS.LOADING_RESOURCE, nextURL);
Document doc = Jsoup.connect(nextURL) Document doc = getDocument(nextURL, this.url.toExternalForm(), cookies);
.userAgent(USER_AGENT)
.timeout(5000)
.cookies(cookies)
.referrer(this.url.toExternalForm())
.get();
for (Element thumb : doc.select("td > a:first-child")) { for (Element thumb : doc.select("td > a:first-child")) {
if (isStopped()) { if (isStopped()) {
break; break;

View File

@ -6,11 +6,11 @@ import java.net.URL;
import java.util.regex.Matcher; import java.util.regex.Matcher;
import java.util.regex.Pattern; import java.util.regex.Pattern;
import org.jsoup.Jsoup;
import org.jsoup.nodes.Document; import org.jsoup.nodes.Document;
import org.jsoup.nodes.Element; import org.jsoup.nodes.Element;
import com.rarchives.ripme.ripper.AlbumRipper; import com.rarchives.ripme.ripper.AlbumRipper;
import com.rarchives.ripme.ui.RipStatusMessage.STATUS;
import com.rarchives.ripme.utils.Utils; import com.rarchives.ripme.utils.Utils;
public class ImagearnRipper extends AlbumRipper { public class ImagearnRipper extends AlbumRipper {
@ -41,7 +41,7 @@ public class ImagearnRipper extends AlbumRipper {
} }
private URL getGalleryFromImage(URL url) throws IOException { private URL getGalleryFromImage(URL url) throws IOException {
Document doc = Jsoup.connect(url.toExternalForm()).get(); Document doc = getDocument(url);
for (Element link : doc.select("a[href~=^gallery\\.php.*$]")) { for (Element link : doc.select("a[href~=^gallery\\.php.*$]")) {
logger.info("LINK: " + link.toString()); logger.info("LINK: " + link.toString());
if (link.hasAttr("href") if (link.hasAttr("href")
@ -57,9 +57,13 @@ public class ImagearnRipper extends AlbumRipper {
@Override @Override
public void rip() throws IOException { public void rip() throws IOException {
int index = 0; int index = 0;
logger.info("[ ] Retrieving " + this.url.toExternalForm()); logger.info("Retrieving " + this.url.toExternalForm());
Document doc = Jsoup.connect(url.toExternalForm()).get(); sendUpdate(STATUS.LOADING_RESOURCE, this.url.toExternalForm());
Document doc = getDocument(this.url);
for (Element thumb : doc.select("img.border")) { for (Element thumb : doc.select("img.border")) {
if (isStopped()) {
break;
}
String image = thumb.attr("src"); String image = thumb.attr("src");
image = image.replaceAll("thumbs[0-9]*\\.imagearn\\.com/", "img.imagearn.com/imags/"); image = image.replaceAll("thumbs[0-9]*\\.imagearn\\.com/", "img.imagearn.com/imags/");
index += 1; index += 1;

View File

@ -6,7 +6,6 @@ import java.net.URL;
import java.util.regex.Matcher; import java.util.regex.Matcher;
import java.util.regex.Pattern; import java.util.regex.Pattern;
import org.jsoup.Jsoup;
import org.jsoup.nodes.Document; import org.jsoup.nodes.Document;
import org.jsoup.nodes.Element; import org.jsoup.nodes.Element;
import org.jsoup.select.Elements; import org.jsoup.select.Elements;
@ -48,10 +47,7 @@ public class ImagebamRipper extends AlbumRipper {
if (albumDoc == null) { if (albumDoc == null) {
logger.info(" Retrieving " + url.toExternalForm()); logger.info(" Retrieving " + url.toExternalForm());
sendUpdate(STATUS.LOADING_RESOURCE, url.toString()); sendUpdate(STATUS.LOADING_RESOURCE, url.toString());
albumDoc = Jsoup.connect(url.toExternalForm()) albumDoc = getDocument(url);
.userAgent(USER_AGENT)
.timeout(5000)
.get();
} }
Elements elems = albumDoc.select("legend"); Elements elems = albumDoc.select("legend");
String title = elems.first().text(); String title = elems.first().text();
@ -98,11 +94,7 @@ public class ImagebamRipper extends AlbumRipper {
if (albumDoc == null) { if (albumDoc == null) {
logger.info(" Retrieving album page " + nextUrl); logger.info(" Retrieving album page " + nextUrl);
sendUpdate(STATUS.LOADING_RESOURCE, nextUrl); sendUpdate(STATUS.LOADING_RESOURCE, nextUrl);
albumDoc = Jsoup.connect(nextUrl) albumDoc = getDocument(nextUrl, this.url.toExternalForm(), null);
.userAgent(USER_AGENT)
.timeout(5000)
.referrer(this.url.toExternalForm())
.get();
} }
// Find thumbnails // Find thumbnails
Elements thumbs = albumDoc.select("div > a[target=_blank]:not(.footera)"); Elements thumbs = albumDoc.select("div > a[target=_blank]:not(.footera)");
@ -149,6 +141,7 @@ public class ImagebamRipper extends AlbumRipper {
} }
} }
imagebamThreadPool.waitForThreads();
waitForThreads(); waitForThreads();
} }
@ -178,12 +171,7 @@ public class ImagebamRipper extends AlbumRipper {
private void fetchImage() { private void fetchImage() {
try { try {
Document doc = Jsoup.connect(this.url.toExternalForm()) Document doc = getDocument(url);
.userAgent(USER_AGENT)
.cookie("nw", "1")
.timeout(5000)
.referrer(this.url.toExternalForm())
.get();
// Find image // Find image
Elements images = doc.select("td > img"); Elements images = doc.select("td > img");
if (images.size() == 0) { if (images.size() == 0) {
@ -192,7 +180,7 @@ public class ImagebamRipper extends AlbumRipper {
} }
Element image = images.first(); Element image = images.first();
String imgsrc = image.attr("src"); String imgsrc = image.attr("src");
logger.info("Found URL " + imgsrc + " via " + images.get(0)); logger.info("Found URL " + imgsrc);
// Provide prefix and let the AbstractRipper "guess" the filename // Provide prefix and let the AbstractRipper "guess" the filename
String prefix = ""; String prefix = "";
if (Utils.getConfigBoolean("download.save_order", true)) { if (Utils.getConfigBoolean("download.save_order", true)) {

View File

@ -45,7 +45,7 @@ public class ImagefapRipper extends AlbumRipper {
try { try {
// Attempt to use album title as GID // Attempt to use album title as GID
if (albumDoc == null) { if (albumDoc == null) {
albumDoc = Jsoup.connect(url.toExternalForm()).get(); albumDoc = getDocument(url);
} }
String title = albumDoc.title(); String title = albumDoc.title();
Pattern p = Pattern.compile("^Porn pics of (.*) \\(Page 1\\)$"); Pattern p = Pattern.compile("^Porn pics of (.*) \\(Page 1\\)$");
@ -92,9 +92,9 @@ public class ImagefapRipper extends AlbumRipper {
public void rip() throws IOException { public void rip() throws IOException {
int index = 0; int index = 0;
sendUpdate(STATUS.LOADING_RESOURCE, this.url.toExternalForm()); sendUpdate(STATUS.LOADING_RESOURCE, this.url.toExternalForm());
logger.info(" Retrieving " + this.url.toExternalForm()); logger.info("Retrieving " + this.url);
if (albumDoc == null) { if (albumDoc == null) {
albumDoc = Jsoup.connect(this.url.toExternalForm()).get(); albumDoc = getDocument(this.url);
} }
while (true) { while (true) {
if (isStopped()) { if (isStopped()) {

View File

@ -8,8 +8,6 @@ import java.util.regex.Pattern;
import org.json.JSONArray; import org.json.JSONArray;
import org.json.JSONObject; import org.json.JSONObject;
import org.jsoup.Connection.Method;
import org.jsoup.Jsoup;
import com.rarchives.ripme.ripper.AlbumRipper; import com.rarchives.ripme.ripper.AlbumRipper;
import com.rarchives.ripme.ui.RipStatusMessage.STATUS; import com.rarchives.ripme.ui.RipStatusMessage.STATUS;
@ -40,20 +38,21 @@ public class ImagestashRipper extends AlbumRipper {
String baseURL = "https://imagestash.org/images?tags=" + getGID(this.url); String baseURL = "https://imagestash.org/images?tags=" + getGID(this.url);
int page = 0, index = 0; int page = 0, index = 0;
while (true) { while (true) {
if (isStopped()) {
break;
}
page++; page++;
String nextURL = baseURL + "&page=" + page; String nextURL = baseURL + "&page=" + page;
logger.info("[ ] Retrieving " + nextURL); logger.info("[ ] Retrieving " + nextURL);
sendUpdate(STATUS.LOADING_RESOURCE, nextURL); sendUpdate(STATUS.LOADING_RESOURCE, nextURL);
String jsonText = Jsoup.connect(nextURL) String jsonText = getResponse(nextURL, true).body();
.ignoreContentType(true)
.userAgent(USER_AGENT)
.method(Method.GET)
.execute()
.body();
logger.info(jsonText); logger.info(jsonText);
JSONObject json = new JSONObject(jsonText); JSONObject json = new JSONObject(jsonText);
JSONArray images = json.getJSONArray("images"); JSONArray images = json.getJSONArray("images");
for (int i = 0; i < images.length(); i++) { for (int i = 0; i < images.length(); i++) {
if (isStopped()) {
break;
}
JSONObject image = images.getJSONObject(i); JSONObject image = images.getJSONObject(i);
String imageURL = image.getString("src"); String imageURL = image.getString("src");
if (imageURL.startsWith("/")) { if (imageURL.startsWith("/")) {

View File

@ -6,7 +6,6 @@ import java.net.URL;
import java.util.regex.Matcher; import java.util.regex.Matcher;
import java.util.regex.Pattern; import java.util.regex.Pattern;
import org.jsoup.Jsoup;
import org.jsoup.nodes.Document; import org.jsoup.nodes.Document;
import org.jsoup.nodes.Element; import org.jsoup.nodes.Element;
import org.jsoup.select.Elements; import org.jsoup.select.Elements;
@ -61,11 +60,7 @@ public class ImagevenueRipper extends AlbumRipper {
String nextUrl = this.url.toExternalForm(); String nextUrl = this.url.toExternalForm();
logger.info(" Retrieving album page " + nextUrl); logger.info(" Retrieving album page " + nextUrl);
sendUpdate(STATUS.LOADING_RESOURCE, nextUrl); sendUpdate(STATUS.LOADING_RESOURCE, nextUrl);
Document albumDoc = Jsoup.connect(nextUrl) Document albumDoc = getDocument(nextUrl);
.userAgent(USER_AGENT)
.timeout(5000)
.referrer(this.url.toExternalForm())
.get();
// Find thumbnails // Find thumbnails
Elements thumbs = albumDoc.select("a[target=_blank]"); Elements thumbs = albumDoc.select("a[target=_blank]");
if (thumbs.size() == 0) { if (thumbs.size() == 0) {
@ -119,11 +114,8 @@ public class ImagevenueRipper extends AlbumRipper {
private void fetchImage() { private void fetchImage() {
try { try {
Document doc = Jsoup.connect(this.url.toExternalForm()) sendUpdate(STATUS.LOADING_RESOURCE, this.url.toExternalForm());
.userAgent(USER_AGENT) Document doc = getDocument(this.url);
.timeout(5000)
.referrer(this.url.toExternalForm())
.get();
// Find image // Find image
Elements images = doc.select("a > img"); Elements images = doc.select("a > img");
if (images.size() == 0) { if (images.size() == 0) {

View File

@ -6,7 +6,6 @@ import java.net.URL;
import java.util.regex.Matcher; import java.util.regex.Matcher;
import java.util.regex.Pattern; import java.util.regex.Pattern;
import org.jsoup.Jsoup;
import org.jsoup.nodes.Document; import org.jsoup.nodes.Document;
import org.jsoup.nodes.Element; import org.jsoup.nodes.Element;
import org.jsoup.select.Elements; import org.jsoup.select.Elements;
@ -36,11 +35,8 @@ public class ImgboxRipper extends AlbumRipper {
@Override @Override
public void rip() throws IOException { public void rip() throws IOException {
logger.info(" Retrieving " + this.url);
sendUpdate(STATUS.LOADING_RESOURCE, url.toExternalForm()); sendUpdate(STATUS.LOADING_RESOURCE, url.toExternalForm());
Document doc = Jsoup.connect(this.url.toExternalForm()) Document doc = getDocument(this.url);
.userAgent(USER_AGENT)
.get();
Elements images = doc.select("div.boxed-content > a > img"); Elements images = doc.select("div.boxed-content > a > img");
if (images.size() == 0) { if (images.size() == 0) {
logger.error("No images found at " + this.url); logger.error("No images found at " + this.url);
@ -48,6 +44,9 @@ public class ImgboxRipper extends AlbumRipper {
} }
int index = 0; int index = 0;
for (Element image : images) { for (Element image : images) {
if (isStopped()) {
break;
}
index++; index++;
String imageUrl = image.attr("src").replace("s.imgbox.com", "i.imgbox.com"); String imageUrl = image.attr("src").replace("s.imgbox.com", "i.imgbox.com");
String prefix = ""; String prefix = "";

View File

@ -83,11 +83,7 @@ public class ImgurRipper extends AlbumRipper {
try { try {
// Attempt to use album title as GID // Attempt to use album title as GID
if (albumDoc == null) { if (albumDoc == null) {
albumDoc = Jsoup.connect(url.toExternalForm()) albumDoc = getDocument(url);
.userAgent(USER_AGENT)
.timeout(10 * 1000)
.maxBodySize(0)
.get();
} }
String title = albumDoc.title(); String title = albumDoc.title();
if (!title.contains(" - Imgur") if (!title.contains(" - Imgur")
@ -261,8 +257,9 @@ public class ImgurRipper extends AlbumRipper {
* @throws IOException * @throws IOException
*/ */
private void ripUserAccount(URL url) throws IOException { private void ripUserAccount(URL url) throws IOException {
logger.info("[ ] Retrieving " + url.toExternalForm()); logger.info("Retrieving " + url);
Document doc = Jsoup.connect(url.toExternalForm()).get(); sendUpdate(STATUS.LOADING_RESOURCE, url.toExternalForm());
Document doc = getDocument(url);
for (Element album : doc.select("div.cover a")) { for (Element album : doc.select("div.cover a")) {
stopCheck(); stopCheck();
if (!album.hasAttr("href") if (!album.hasAttr("href")
@ -292,10 +289,7 @@ public class ImgurRipper extends AlbumRipper {
try { try {
page++; page++;
String jsonUrlWithParams = jsonUrl + "?sort=0&order=1&album=0&page=" + page + "&perPage=60"; String jsonUrlWithParams = jsonUrl + "?sort=0&order=1&album=0&page=" + page + "&perPage=60";
String jsonString = Jsoup.connect(jsonUrlWithParams) String jsonString = getResponse(jsonUrlWithParams, true).body();
.ignoreContentType(true)
.execute()
.body();
JSONObject json = new JSONObject(jsonString); JSONObject json = new JSONObject(jsonString);
JSONObject jsonData = json.getJSONObject("data"); JSONObject jsonData = json.getJSONObject("data");
if (jsonData.has("count")) { if (jsonData.has("count")) {
@ -333,10 +327,7 @@ public class ImgurRipper extends AlbumRipper {
} }
pageURL += "page/" + page + "/miss?scrolled"; pageURL += "page/" + page + "/miss?scrolled";
logger.info(" Retrieving " + pageURL); logger.info(" Retrieving " + pageURL);
Document doc = Jsoup.connect(pageURL) Document doc = getDocument(pageURL);
.userAgent(USER_AGENT)
.timeout(10 * 1000)
.get();
Elements imgs = doc.select(".post img"); Elements imgs = doc.select(".post img");
for (Element img : imgs) { for (Element img : imgs) {
String image = img.attr("src"); String image = img.attr("src");

View File

@ -8,7 +8,6 @@ import java.util.regex.Pattern;
import org.json.JSONArray; import org.json.JSONArray;
import org.json.JSONObject; import org.json.JSONObject;
import org.jsoup.Jsoup;
import org.jsoup.nodes.Document; import org.jsoup.nodes.Document;
import org.jsoup.nodes.Element; import org.jsoup.nodes.Element;
@ -63,7 +62,7 @@ public class InstagramRipper extends AlbumRipper {
} }
private URL getUserPageFromImage(URL url) throws IOException { private URL getUserPageFromImage(URL url) throws IOException {
Document doc = Jsoup.connect(url.toExternalForm()).get(); Document doc = getDocument(url);
for (Element element : doc.select("meta[property='og:description']")) { for (Element element : doc.select("meta[property='og:description']")) {
String content = element.attr("content"); String content = element.attr("content");
if (content.endsWith("'s photo on Instagram")) { if (content.endsWith("'s photo on Instagram")) {
@ -74,9 +73,9 @@ public class InstagramRipper extends AlbumRipper {
} }
private String getUserID(URL url) throws IOException { private String getUserID(URL url) throws IOException {
logger.info(" Retrieving " + url); logger.info("Retrieving " + url);
this.sendUpdate(STATUS.LOADING_RESOURCE, url.toExternalForm()); this.sendUpdate(STATUS.LOADING_RESOURCE, url.toExternalForm());
Document doc = Jsoup.connect(this.url.toExternalForm()).get(); Document doc = getDocument(url);
for (Element element : doc.select("input[id=user_public]")) { for (Element element : doc.select("input[id=user_public]")) {
return element.attr("value"); return element.attr("value");
} }
@ -92,12 +91,7 @@ public class InstagramRipper extends AlbumRipper {
String url = baseURL + params; String url = baseURL + params;
this.sendUpdate(STATUS.LOADING_RESOURCE, url); this.sendUpdate(STATUS.LOADING_RESOURCE, url);
logger.info(" Retrieving " + url); logger.info(" Retrieving " + url);
String jsonString = Jsoup.connect(url) String jsonString = getResponse(url, true).body();
.userAgent(USER_AGENT)
.timeout(10000)
.ignoreContentType(true)
.execute()
.body();
JSONObject json = new JSONObject(jsonString); JSONObject json = new JSONObject(jsonString);
JSONArray datas = json.getJSONArray("data"); JSONArray datas = json.getJSONArray("data");
String nextMaxID = ""; String nextMaxID = "";

View File

@ -6,7 +6,6 @@ import java.net.URL;
import org.json.JSONArray; import org.json.JSONArray;
import org.json.JSONObject; import org.json.JSONObject;
import org.jsoup.Jsoup;
import com.rarchives.ripme.ripper.AlbumRipper; import com.rarchives.ripme.ripper.AlbumRipper;
import com.rarchives.ripme.ripper.rippers.ImgurRipper.ImgurAlbum; import com.rarchives.ripme.ripper.rippers.ImgurRipper.ImgurAlbum;
@ -16,8 +15,6 @@ import com.rarchives.ripme.utils.Utils;
public class IrarchivesRipper extends AlbumRipper { public class IrarchivesRipper extends AlbumRipper {
private static final int TIMEOUT = 60000; // Long timeout for this poorly-optimized site.
private static final String DOMAIN = "i.rarchives.com", private static final String DOMAIN = "i.rarchives.com",
HOST = "irarchives"; HOST = "irarchives";
@ -25,6 +22,11 @@ public class IrarchivesRipper extends AlbumRipper {
super(url); super(url);
} }
@Override
public int getTimeout() {
return 60 * 1000;
}
@Override @Override
public boolean canRip(URL url) { public boolean canRip(URL url) {
return url.getHost().endsWith(DOMAIN); return url.getHost().endsWith(DOMAIN);
@ -46,11 +48,7 @@ public class IrarchivesRipper extends AlbumRipper {
@Override @Override
public void rip() throws IOException { public void rip() throws IOException {
logger.info(" Retrieving " + this.url); logger.info(" Retrieving " + this.url);
String jsonString = Jsoup.connect(this.url.toExternalForm()) String jsonString = getResponse(url, true).body();
.ignoreContentType(true)
.timeout(TIMEOUT)
.execute()
.body();
JSONObject json = new JSONObject(jsonString); JSONObject json = new JSONObject(jsonString);
JSONArray posts = json.getJSONArray("posts"); JSONArray posts = json.getJSONArray("posts");
if (posts.length() == 0) { if (posts.length() == 0) {

View File

@ -15,7 +15,6 @@ import javax.swing.JOptionPane;
import org.json.JSONArray; import org.json.JSONArray;
import org.json.JSONObject; import org.json.JSONObject;
import org.jsoup.Jsoup;
import com.rarchives.ripme.ripper.AlbumRipper; import com.rarchives.ripme.ripper.AlbumRipper;
import com.rarchives.ripme.ui.RipStatusMessage.STATUS; import com.rarchives.ripme.ui.RipStatusMessage.STATUS;
@ -23,8 +22,6 @@ import com.rarchives.ripme.utils.Utils;
public class MediacrushRipper extends AlbumRipper { public class MediacrushRipper extends AlbumRipper {
private static final int TIMEOUT = 60000; // Long timeout for this poorly-optimized site.
private static final String DOMAIN = "mediacru.sh", private static final String DOMAIN = "mediacru.sh",
HOST = "mediacrush"; HOST = "mediacrush";
@ -72,12 +69,7 @@ public class MediacrushRipper extends AlbumRipper {
sendUpdate(STATUS.LOADING_RESOURCE, url); sendUpdate(STATUS.LOADING_RESOURCE, url);
String jsonString = null; String jsonString = null;
try { try {
jsonString = Jsoup.connect(url) jsonString = getResponse(url, true).body();
.ignoreContentType(true)
.userAgent(USER_AGENT)
.timeout(TIMEOUT)
.execute()
.body();
} catch (Exception re) { } catch (Exception re) {
// Check for >1024 bit encryption but in older versions of Java // Check for >1024 bit encryption but in older versions of Java
if (re.getCause().getCause() instanceof InvalidAlgorithmParameterException) { if (re.getCause().getCause() instanceof InvalidAlgorithmParameterException) {

View File

@ -9,7 +9,6 @@ import java.util.regex.Pattern;
import org.json.JSONArray; import org.json.JSONArray;
import org.json.JSONObject; import org.json.JSONObject;
import org.jsoup.Connection.Response; import org.jsoup.Connection.Response;
import org.jsoup.Jsoup;
import org.jsoup.nodes.Document; import org.jsoup.nodes.Document;
import org.jsoup.select.Elements; import org.jsoup.select.Elements;
@ -48,9 +47,7 @@ public class MinusRipper extends AlbumRipper {
try { try {
// Attempt to use album title as GID // Attempt to use album title as GID
if (albumDoc == null) { if (albumDoc == null) {
albumDoc = Jsoup.connect(url.toExternalForm()) albumDoc = getDocument(url);
.userAgent(USER_AGENT)
.get();
} }
Elements titles = albumDoc.select("meta[property=og:title]"); Elements titles = albumDoc.select("meta[property=og:title]");
if (titles.size() > 0) { if (titles.size() > 0) {
@ -132,11 +129,7 @@ public class MinusRipper extends AlbumRipper {
+ user + "/shares.json/" + user + "/shares.json/"
+ page; + page;
logger.info(" Retrieving " + jsonUrl); logger.info(" Retrieving " + jsonUrl);
Response resp = Jsoup.connect(jsonUrl) Response resp = getResponse(jsonUrl, true);
.userAgent(USER_AGENT)
.ignoreContentType(true)
.execute();
System.err.println(resp.body());
JSONObject json = new JSONObject(resp.body()); JSONObject json = new JSONObject(resp.body());
JSONArray galleries = json.getJSONArray("galleries"); JSONArray galleries = json.getJSONArray("galleries");
for (int i = 0; i < galleries.length(); i++) { for (int i = 0; i < galleries.length(); i++) {
@ -158,9 +151,7 @@ public class MinusRipper extends AlbumRipper {
private void ripAlbum(URL url, String subdir) throws IOException { private void ripAlbum(URL url, String subdir) throws IOException {
logger.info(" Retrieving " + url.toExternalForm()); logger.info(" Retrieving " + url.toExternalForm());
if (albumDoc == null || !subdir.equals("")) { if (albumDoc == null || !subdir.equals("")) {
albumDoc = Jsoup.connect(url.toExternalForm()) albumDoc = getDocument(url);
.userAgent(USER_AGENT)
.get();
} }
Pattern p = Pattern.compile("^.*var gallerydata = (\\{.*\\});.*$", Pattern.DOTALL); Pattern p = Pattern.compile("^.*var gallerydata = (\\{.*\\});.*$", Pattern.DOTALL);
Matcher m = p.matcher(albumDoc.data()); Matcher m = p.matcher(albumDoc.data());

View File

@ -6,12 +6,12 @@ import java.net.URL;
import java.util.regex.Matcher; import java.util.regex.Matcher;
import java.util.regex.Pattern; import java.util.regex.Pattern;
import org.jsoup.Jsoup;
import org.jsoup.nodes.Document; import org.jsoup.nodes.Document;
import org.jsoup.nodes.Element; import org.jsoup.nodes.Element;
import com.rarchives.ripme.ripper.AlbumRipper; import com.rarchives.ripme.ripper.AlbumRipper;
import com.rarchives.ripme.ripper.DownloadThreadPool; import com.rarchives.ripme.ripper.DownloadThreadPool;
import com.rarchives.ripme.ui.RipStatusMessage.STATUS;
import com.rarchives.ripme.utils.Utils; import com.rarchives.ripme.utils.Utils;
public class MotherlessRipper extends AlbumRipper { public class MotherlessRipper extends AlbumRipper {
@ -61,13 +61,16 @@ public class MotherlessRipper extends AlbumRipper {
int index = 0, page = 1; int index = 0, page = 1;
String nextURL = this.url.toExternalForm(); String nextURL = this.url.toExternalForm();
while (nextURL != null) { while (nextURL != null) {
logger.info(" Retrieving " + nextURL); if (isStopped()) {
Document doc = Jsoup.connect(nextURL) break;
.userAgent(USER_AGENT) }
.timeout(5000) logger.info("Retrieving " + nextURL);
.referrer("http://motherless.com") sendUpdate(STATUS.LOADING_RESOURCE, nextURL);
.get(); Document doc = getDocument(nextURL, "http://motherless.com", null);
for (Element thumb : doc.select("div.thumb a.img-container")) { for (Element thumb : doc.select("div.thumb a.img-container")) {
if (isStopped()) {
break;
}
String thumbURL = thumb.attr("href"); String thumbURL = thumb.attr("href");
if (thumbURL.contains("pornmd.com")) { if (thumbURL.contains("pornmd.com")) {
continue; continue;
@ -111,11 +114,11 @@ public class MotherlessRipper extends AlbumRipper {
@Override @Override
public void run() { public void run() {
try { try {
Document doc = Jsoup.connect(this.url.toExternalForm()) if (isStopped()) {
.userAgent(USER_AGENT) return;
.timeout(5000) }
.referrer(this.url.toExternalForm()) String u = this.url.toExternalForm();
.get(); Document doc = getDocument(u, u, null);
Pattern p = Pattern.compile("^.*__fileurl = '([^']{1,})';.*$", Pattern.DOTALL); Pattern p = Pattern.compile("^.*__fileurl = '([^']{1,})';.*$", Pattern.DOTALL);
Matcher m = p.matcher(doc.outerHtml()); Matcher m = p.matcher(doc.outerHtml());
if (m.matches()) { if (m.matches()) {

View File

@ -8,7 +8,6 @@ import java.util.List;
import java.util.regex.Matcher; import java.util.regex.Matcher;
import java.util.regex.Pattern; import java.util.regex.Pattern;
import org.jsoup.Jsoup;
import org.jsoup.nodes.Document; import org.jsoup.nodes.Document;
import org.jsoup.nodes.Element; import org.jsoup.nodes.Element;
import org.jsoup.select.Elements; import org.jsoup.select.Elements;
@ -47,9 +46,7 @@ public class NfsfwRipper extends AlbumRipper {
try { try {
// Attempt to use album title as GID // Attempt to use album title as GID
if (albumDoc == null) { if (albumDoc == null) {
albumDoc = Jsoup.connect(url.toExternalForm()) albumDoc = getDocument(url);
.userAgent(USER_AGENT)
.get();
} }
String title = albumDoc.select("h2").first().text().trim(); String title = albumDoc.select("h2").first().text().trim();
return "nfsfw_" + Utils.filesystemSafe(title); return "nfsfw_" + Utils.filesystemSafe(title);
@ -90,12 +87,13 @@ public class NfsfwRipper extends AlbumRipper {
sendUpdate(STATUS.LOADING_RESOURCE, nextURL); sendUpdate(STATUS.LOADING_RESOURCE, nextURL);
logger.info(" Retrieving " + nextURL); logger.info(" Retrieving " + nextURL);
if (albumDoc == null) { if (albumDoc == null) {
albumDoc = Jsoup.connect(nextURL) albumDoc = getDocument(nextURL);
.userAgent(USER_AGENT)
.get();
} }
// Subalbums // Subalbums
for (Element suba : albumDoc.select("td.IMG > a")) { for (Element suba : albumDoc.select("td.IMG > a")) {
if (isStopped()) {
break;
}
String subURL = "http://nfsfw.com" + suba.attr("href"); String subURL = "http://nfsfw.com" + suba.attr("href");
String subdir = subURL; String subdir = subURL;
while (subdir.endsWith("/")) { while (subdir.endsWith("/")) {
@ -106,6 +104,9 @@ public class NfsfwRipper extends AlbumRipper {
} }
// Images // Images
for (Element thumb : albumDoc.select("td.giItemCell > div > a")) { for (Element thumb : albumDoc.select("td.giItemCell > div > a")) {
if (isStopped()) {
break;
}
String imagePage = "http://nfsfw.com" + thumb.attr("href"); String imagePage = "http://nfsfw.com" + thumb.attr("href");
try { try {
NfsfwImageThread t = new NfsfwImageThread(new URL(imagePage), nextSubalbum, ++index); NfsfwImageThread t = new NfsfwImageThread(new URL(imagePage), nextSubalbum, ++index);
@ -155,11 +156,8 @@ public class NfsfwRipper extends AlbumRipper {
@Override @Override
public void run() { public void run() {
try { try {
Document doc = Jsoup.connect(this.url.toExternalForm()) String u = this.url.toExternalForm();
.userAgent(USER_AGENT) Document doc = getDocument(u, u, null);
.timeout(5000)
.referrer(this.url.toExternalForm())
.get();
Elements images = doc.select(".gbBlock img"); Elements images = doc.select(".gbBlock img");
if (images.size() == 0) { if (images.size() == 0) {
logger.error("Failed to find image at " + this.url); logger.error("Failed to find image at " + this.url);

View File

@ -11,7 +11,6 @@ import java.util.regex.Pattern;
import org.json.JSONArray; import org.json.JSONArray;
import org.json.JSONObject; import org.json.JSONObject;
import org.jsoup.Connection.Response; import org.jsoup.Connection.Response;
import org.jsoup.Jsoup;
import org.jsoup.nodes.Document; import org.jsoup.nodes.Document;
import org.jsoup.nodes.Element; import org.jsoup.nodes.Element;
import org.jsoup.select.Elements; import org.jsoup.select.Elements;
@ -50,7 +49,7 @@ public class PhotobucketRipper extends AlbumRipper {
try { try {
// Attempt to use album title as GID // Attempt to use album title as GID
if (pageResponse == null) { if (pageResponse == null) {
pageResponse = Jsoup.connect(url.toExternalForm()).execute(); pageResponse = getResponse(url);
} }
Document albumDoc = pageResponse.parse(); Document albumDoc = pageResponse.parse();
Elements els = albumDoc.select("div.libraryTitle > h1"); Elements els = albumDoc.select("div.libraryTitle > h1");
@ -132,7 +131,7 @@ public class PhotobucketRipper extends AlbumRipper {
if (pageIndex > 1 || pageResponse == null) { if (pageIndex > 1 || pageResponse == null) {
url = theUrl + String.format("?sort=3&page=", pageIndex); url = theUrl + String.format("?sort=3&page=", pageIndex);
logger.info(" Retrieving " + url); logger.info(" Retrieving " + url);
pageResponse = Jsoup.connect(url).execute(); pageResponse = getResponse(url);
} }
Document albumDoc = pageResponse.parse(); Document albumDoc = pageResponse.parse();
// Retrieve JSON from request // Retrieve JSON from request
@ -190,10 +189,7 @@ public class PhotobucketRipper extends AlbumRipper {
+ "&json=1"; + "&json=1";
try { try {
logger.info("Loading " + apiUrl); logger.info("Loading " + apiUrl);
Document doc = Jsoup.connect(apiUrl) Document doc = getDocument(apiUrl, true);
.ignoreContentType(true)
.referrer(url)
.get();
String jsonString = doc.body().html().replace("&quot;", "\""); String jsonString = doc.body().html().replace("&quot;", "\"");
JSONObject json = new JSONObject(jsonString); JSONObject json = new JSONObject(jsonString);
JSONArray subalbums = json.getJSONObject("body").getJSONArray("subAlbums"); JSONArray subalbums = json.getJSONObject("body").getJSONArray("subAlbums");

View File

@ -49,10 +49,7 @@ public class PornhubRipper extends AlbumRipper {
if (albumDoc == null) { if (albumDoc == null) {
logger.info(" Retrieving " + url.toExternalForm()); logger.info(" Retrieving " + url.toExternalForm());
sendUpdate(STATUS.LOADING_RESOURCE, url.toString()); sendUpdate(STATUS.LOADING_RESOURCE, url.toString());
albumDoc = Jsoup.connect(url.toExternalForm()) albumDoc = getDocument(url);
.userAgent(USER_AGENT)
.timeout(TIMEOUT)
.get();
} }
Elements elems = albumDoc.select(".photoAlbumTitleV2"); Elements elems = albumDoc.select(".photoAlbumTitleV2");
return HOST + "_" + elems.get(0).text(); return HOST + "_" + elems.get(0).text();
@ -82,7 +79,7 @@ public class PornhubRipper extends AlbumRipper {
@Override @Override
public void rip() throws IOException { public void rip() throws IOException {
int index = 0, retries = 3; int index = 0;
String nextUrl = this.url.toExternalForm(); String nextUrl = this.url.toExternalForm();
if (isStopped()) { if (isStopped()) {
@ -92,11 +89,7 @@ public class PornhubRipper extends AlbumRipper {
if (albumDoc == null) { if (albumDoc == null) {
logger.info(" Retrieving album page " + nextUrl); logger.info(" Retrieving album page " + nextUrl);
sendUpdate(STATUS.LOADING_RESOURCE, nextUrl); sendUpdate(STATUS.LOADING_RESOURCE, nextUrl);
albumDoc = Jsoup.connect(nextUrl) albumDoc = getDocument(nextUrl, this.url.toExternalForm(), null);
.userAgent(USER_AGENT)
.timeout(TIMEOUT)
.referrer(this.url.toExternalForm())
.get();
} }
// Find thumbnails // Find thumbnails
@ -139,14 +132,11 @@ public class PornhubRipper extends AlbumRipper {
private class PornhubImageThread extends Thread { private class PornhubImageThread extends Thread {
private URL url; private URL url;
private int index; private int index;
private File workingDir;
private int retries = 3;
public PornhubImageThread(URL url, int index, File workingDir) { public PornhubImageThread(URL url, int index, File workingDir) {
super(); super();
this.url = url; this.url = url;
this.index = index; this.index = index;
this.workingDir = workingDir;
} }
@Override @Override
@ -156,11 +146,8 @@ public class PornhubRipper extends AlbumRipper {
private void fetchImage() { private void fetchImage() {
try { try {
Document doc = Jsoup.connect(this.url.toExternalForm()) String u = this.url.toExternalForm();
.userAgent(USER_AGENT) Document doc = getDocument(u, u, null);
.timeout(TIMEOUT)
.referrer(this.url.toExternalForm())
.get();
// Find image // Find image
Elements images = doc.select("#photoImageSection img"); Elements images = doc.select("#photoImageSection img");

View File

@ -11,7 +11,6 @@ import java.util.regex.Pattern;
import org.json.JSONArray; import org.json.JSONArray;
import org.json.JSONObject; import org.json.JSONObject;
import org.json.JSONTokener; import org.json.JSONTokener;
import org.jsoup.Jsoup;
import org.jsoup.nodes.Document; import org.jsoup.nodes.Document;
import com.rarchives.ripme.ripper.AlbumRipper; import com.rarchives.ripme.ripper.AlbumRipper;
@ -118,10 +117,7 @@ public class RedditRipper extends AlbumRipper {
logger.info(" Retrieving " + url); logger.info(" Retrieving " + url);
while(doc == null && attempts++ < 3) { while(doc == null && attempts++ < 3) {
try { try {
doc= Jsoup.connect(url.toExternalForm()) doc = getResponse(url, true).parse();
.ignoreContentType(true)
.userAgent(USER_AGENT)
.get();
} catch(SocketTimeoutException ex) { } catch(SocketTimeoutException ex) {
if(attempts >= 3) throw ex; if(attempts >= 3) throw ex;
logger.warn(String.format("[!] Connection timed out (attempt %d)", attempts)); logger.warn(String.format("[!] Connection timed out (attempt %d)", attempts));

View File

@ -7,12 +7,14 @@ import java.util.regex.Matcher;
import java.util.regex.Pattern; import java.util.regex.Pattern;
import org.json.JSONObject; import org.json.JSONObject;
import org.jsoup.Connection.Method;
import org.jsoup.Jsoup; import org.jsoup.Jsoup;
import org.jsoup.nodes.Document; import org.jsoup.nodes.Document;
import org.jsoup.nodes.Element; import org.jsoup.nodes.Element;
import com.rarchives.ripme.ripper.AlbumRipper; import com.rarchives.ripme.ripper.AlbumRipper;
import com.rarchives.ripme.ripper.DownloadThreadPool; import com.rarchives.ripme.ripper.DownloadThreadPool;
import com.rarchives.ripme.ui.RipStatusMessage.STATUS;
public class SeeniveRipper extends AlbumRipper { public class SeeniveRipper extends AlbumRipper {
@ -40,14 +42,16 @@ public class SeeniveRipper extends AlbumRipper {
public void rip() throws IOException { public void rip() throws IOException {
String baseURL = this.url.toExternalForm(); String baseURL = this.url.toExternalForm();
logger.info(" Retrieving " + baseURL); logger.info(" Retrieving " + baseURL);
Document doc = Jsoup.connect(baseURL) Document doc = getDocument(baseURL, baseURL, null);
.header("Referer", baseURL)
.userAgent(USER_AGENT)
.timeout(5000)
.get();
while (true) { while (true) {
if (isStopped()) {
break;
}
String lastID = null; String lastID = null;
for (Element element : doc.select("a.facebox")) { for (Element element : doc.select("a.facebox")) {
if (isStopped()) {
break;
}
String card = element.attr("href"); // "/v/<video_id>" String card = element.attr("href"); // "/v/<video_id>"
URL videoURL = new URL("https://seenive.com" + card); URL videoURL = new URL("https://seenive.com" + card);
SeeniveImageThread vit = new SeeniveImageThread(videoURL); SeeniveImageThread vit = new SeeniveImageThread(videoURL);
@ -66,11 +70,7 @@ public class SeeniveRipper extends AlbumRipper {
} }
logger.info("[ ] Retrieving " + baseURL + "/next/" + lastID); logger.info("[ ] Retrieving " + baseURL + "/next/" + lastID);
String jsonString = Jsoup.connect(baseURL + "/next/" + lastID) String jsonString = getResponse(baseURL + "/next/" + lastID, Method.GET, USER_AGENT, baseURL, null, true).body();
.header("Referer", baseURL)
.userAgent(USER_AGENT)
.ignoreContentType(true)
.execute().body();
JSONObject json = new JSONObject(jsonString); JSONObject json = new JSONObject(jsonString);
String html = json.getString("Html"); String html = json.getString("Html");
if (html.equals("")) { if (html.equals("")) {
@ -111,10 +111,9 @@ public class SeeniveRipper extends AlbumRipper {
@Override @Override
public void run() { public void run() {
try { try {
Document doc = Jsoup.connect(this.url.toExternalForm()) Document doc = getDocument(this.url);
.userAgent(USER_AGENT)
.get();
logger.info("[ ] Retreiving video page " + this.url); logger.info("[ ] Retreiving video page " + this.url);
sendUpdate(STATUS.LOADING_RESOURCE, this.url.toExternalForm());
for (Element element : doc.select("source")) { for (Element element : doc.select("source")) {
String video = element.attr("src"); String video = element.attr("src");
synchronized (threadPool) { synchronized (threadPool) {

View File

@ -6,7 +6,6 @@ import java.net.URL;
import java.util.regex.Matcher; import java.util.regex.Matcher;
import java.util.regex.Pattern; import java.util.regex.Pattern;
import org.jsoup.Jsoup;
import org.jsoup.nodes.Document; import org.jsoup.nodes.Document;
import org.jsoup.nodes.Element; import org.jsoup.nodes.Element;
@ -38,16 +37,16 @@ public class SmuttyRipper extends AlbumRipper {
String url, tag = getGID(this.url); String url, tag = getGID(this.url);
boolean hasNextPage = true; boolean hasNextPage = true;
while (hasNextPage) { while (hasNextPage) {
if (isStopped()) {
break;
}
page++; page++;
url = "http://smutty.com/h/" + tag + "/?q=%23" + tag + "&page=" + page + "&sort=date&lazy=1"; url = "http://smutty.com/h/" + tag + "/?q=%23" + tag + "&page=" + page + "&sort=date&lazy=1";
this.sendUpdate(STATUS.LOADING_RESOURCE, url); this.sendUpdate(STATUS.LOADING_RESOURCE, url);
logger.info(" Retrieving " + url); logger.info(" Retrieving " + url);
Document doc; Document doc;
try { try {
doc = Jsoup.connect(url) doc = getResponse(url, true).parse();
.userAgent(USER_AGENT)
.ignoreContentType(true)
.get();
} catch (IOException e) { } catch (IOException e) {
if (e.toString().contains("Status=404")) { if (e.toString().contains("Status=404")) {
logger.info("No more pages to load"); logger.info("No more pages to load");
@ -57,6 +56,9 @@ public class SmuttyRipper extends AlbumRipper {
break; break;
} }
for (Element image : doc.select("a.l > img")) { for (Element image : doc.select("a.l > img")) {
if (isStopped()) {
break;
}
String imageUrl = image.attr("src"); String imageUrl = image.attr("src");
// Construct direct link to image based on thumbnail // Construct direct link to image based on thumbnail

View File

@ -7,7 +7,6 @@ import java.util.regex.Matcher;
import java.util.regex.Pattern; import java.util.regex.Pattern;
import org.jsoup.HttpStatusException; import org.jsoup.HttpStatusException;
import org.jsoup.Jsoup;
import org.jsoup.nodes.Document; import org.jsoup.nodes.Document;
import org.jsoup.nodes.Element; import org.jsoup.nodes.Element;
import org.jsoup.select.Elements; import org.jsoup.select.Elements;
@ -48,10 +47,7 @@ public class SupertangasRipper extends AlbumRipper {
try { try {
logger.info(" Retrieving " + theURL); logger.info(" Retrieving " + theURL);
sendUpdate(STATUS.LOADING_RESOURCE, theURL); sendUpdate(STATUS.LOADING_RESOURCE, theURL);
doc = Jsoup.connect(theURL) doc = getDocument(theURL);
.userAgent(USER_AGENT)
.timeout(5 * 1000)
.get();
} catch (HttpStatusException e) { } catch (HttpStatusException e) {
logger.debug("Hit end of pages at page " + page, e); logger.debug("Hit end of pages at page " + page, e);
break; break;

View File

@ -6,12 +6,12 @@ import java.net.URL;
import java.util.regex.Matcher; import java.util.regex.Matcher;
import java.util.regex.Pattern; import java.util.regex.Pattern;
import org.jsoup.Jsoup;
import org.jsoup.nodes.Document; import org.jsoup.nodes.Document;
import org.jsoup.nodes.Element; import org.jsoup.nodes.Element;
import org.jsoup.select.Elements; import org.jsoup.select.Elements;
import com.rarchives.ripme.ripper.AlbumRipper; import com.rarchives.ripme.ripper.AlbumRipper;
import com.rarchives.ripme.ui.RipStatusMessage.STATUS;
import com.rarchives.ripme.utils.Utils; import com.rarchives.ripme.utils.Utils;
public class TeenplanetRipper extends AlbumRipper { public class TeenplanetRipper extends AlbumRipper {
@ -38,7 +38,7 @@ public class TeenplanetRipper extends AlbumRipper {
try { try {
// Attempt to use album title as GID // Attempt to use album title as GID
if (albumDoc == null) { if (albumDoc == null) {
albumDoc = Jsoup.connect(url.toExternalForm()).get(); albumDoc = getDocument(url);
} }
Elements elems = albumDoc.select("div.header > h2"); Elements elems = albumDoc.select("div.header > h2");
return HOST + "_" + elems.get(0).text(); return HOST + "_" + elems.get(0).text();
@ -68,9 +68,10 @@ public class TeenplanetRipper extends AlbumRipper {
@Override @Override
public void rip() throws IOException { public void rip() throws IOException {
int index = 0; int index = 0;
logger.info(" Retrieving " + this.url.toExternalForm()); logger.info("Retrieving " + this.url);
sendUpdate(STATUS.LOADING_RESOURCE, this.url.toExternalForm());
if (albumDoc == null) { if (albumDoc == null) {
albumDoc = Jsoup.connect(this.url.toExternalForm()).get(); albumDoc = getDocument(url);
} }
for (Element thumb : albumDoc.select("#galleryImages > a > img")) { for (Element thumb : albumDoc.select("#galleryImages > a > img")) {
if (!thumb.hasAttr("src")) { if (!thumb.hasAttr("src")) {

View File

@ -9,10 +9,10 @@ import java.util.regex.Pattern;
import org.apache.commons.lang.StringUtils; import org.apache.commons.lang.StringUtils;
import org.json.JSONArray; import org.json.JSONArray;
import org.json.JSONObject; import org.json.JSONObject;
import org.jsoup.Jsoup;
import org.jsoup.nodes.Document; import org.jsoup.nodes.Document;
import com.rarchives.ripme.ripper.AlbumRipper; import com.rarchives.ripme.ripper.AlbumRipper;
import com.rarchives.ripme.ui.RipStatusMessage.STATUS;
import com.rarchives.ripme.utils.Utils; import com.rarchives.ripme.utils.Utils;
public class TumblrRipper extends AlbumRipper { public class TumblrRipper extends AlbumRipper {
@ -66,10 +66,7 @@ public class TumblrRipper extends AlbumRipper {
checkURL += url.getHost(); checkURL += url.getHost();
checkURL += "/info?api_key=" + API_KEY; checkURL += "/info?api_key=" + API_KEY;
try { try {
Document doc = Jsoup.connect(checkURL) Document doc = getResponse(checkURL, true).parse();
.ignoreContentType(true)
.userAgent(USER_AGENT)
.get();
String jsonString = doc.body().html().replaceAll("&quot;", "\""); String jsonString = doc.body().html().replaceAll("&quot;", "\"");
JSONObject json = new JSONObject(jsonString); JSONObject json = new JSONObject(jsonString);
int status = json.getJSONObject("meta").getInt("status"); int status = json.getJSONObject("meta").getInt("status");
@ -90,18 +87,18 @@ public class TumblrRipper extends AlbumRipper {
} }
int offset; int offset;
for (String mediaType : mediaTypes) { for (String mediaType : mediaTypes) {
if (isStopped()) {
break;
}
offset = 0; offset = 0;
while (true) { while (true) {
if (isStopped()) { if (isStopped()) {
break; break;
} }
String apiURL = getTumblrApiURL(mediaType, offset); String apiURL = getTumblrApiURL(mediaType, offset);
logger.info(" Retrieving " + apiURL); logger.info("Retrieving " + apiURL);
Document doc = Jsoup.connect(apiURL) sendUpdate(STATUS.LOADING_RESOURCE, apiURL);
.ignoreContentType(true) Document doc = getResponse(apiURL, true).parse();
.timeout(10 * 1000)
.header("User-agent", USER_AGENT)
.get();
try { try {
Thread.sleep(1000); Thread.sleep(1000);
} catch (InterruptedException e) { } catch (InterruptedException e) {

View File

@ -6,7 +6,6 @@ import java.net.URL;
import java.util.regex.Matcher; import java.util.regex.Matcher;
import java.util.regex.Pattern; import java.util.regex.Pattern;
import org.jsoup.Jsoup;
import org.jsoup.nodes.Document; import org.jsoup.nodes.Document;
import org.jsoup.select.Elements; import org.jsoup.select.Elements;
@ -47,10 +46,10 @@ public class VidbleRipper extends AlbumRipper {
@Override @Override
public void rip() throws IOException { public void rip() throws IOException {
logger.info(" Retrieving " + this.url.toExternalForm()); logger.info("Retrieving " + this.url);
sendUpdate(STATUS.LOADING_RESOURCE, this.url.toExternalForm()); sendUpdate(STATUS.LOADING_RESOURCE, this.url.toExternalForm());
if (albumDoc == null) { if (albumDoc == null) {
albumDoc = Jsoup.connect(this.url.toExternalForm()).get(); albumDoc = getDocument(this.url);
} }
Elements els = albumDoc.select("#ContentPlaceHolder1_thumbs"); Elements els = albumDoc.select("#ContentPlaceHolder1_thumbs");
if (els.size() == 0) { if (els.size() == 0) {

View File

@ -9,7 +9,6 @@ import java.util.regex.Pattern;
import org.json.JSONArray; import org.json.JSONArray;
import org.json.JSONObject; import org.json.JSONObject;
import org.jsoup.HttpStatusException; import org.jsoup.HttpStatusException;
import org.jsoup.Jsoup;
import org.jsoup.nodes.Document; import org.jsoup.nodes.Document;
import com.rarchives.ripme.ripper.AlbumRipper; import com.rarchives.ripme.ripper.AlbumRipper;
@ -48,10 +47,7 @@ public class VineRipper extends AlbumRipper {
try { try {
logger.info(" Retrieving " + theURL); logger.info(" Retrieving " + theURL);
sendUpdate(STATUS.LOADING_RESOURCE, theURL); sendUpdate(STATUS.LOADING_RESOURCE, theURL);
doc = Jsoup.connect(theURL) doc = getResponse(theURL, true).parse();
.ignoreContentType(true)
.timeout(5 * 1000)
.get();
} catch (HttpStatusException e) { } catch (HttpStatusException e) {
logger.debug("Hit end of pages at page " + page, e); logger.debug("Hit end of pages at page " + page, e);
break; break;

View File

@ -7,11 +7,11 @@ import java.util.regex.Matcher;
import java.util.regex.Pattern; import java.util.regex.Pattern;
import org.jsoup.HttpStatusException; import org.jsoup.HttpStatusException;
import org.jsoup.Jsoup;
import org.jsoup.nodes.Document; import org.jsoup.nodes.Document;
import org.jsoup.nodes.Element; import org.jsoup.nodes.Element;
import com.rarchives.ripme.ripper.AlbumRipper; import com.rarchives.ripme.ripper.AlbumRipper;
import com.rarchives.ripme.ui.RipStatusMessage.STATUS;
public class VineboxRipper extends AlbumRipper { public class VineboxRipper extends AlbumRipper {
@ -39,9 +39,10 @@ public class VineboxRipper extends AlbumRipper {
while (true) { while (true) {
page++; page++;
String urlPaged = this.url.toExternalForm() + "?page=" + page; String urlPaged = this.url.toExternalForm() + "?page=" + page;
logger.info(" Retrieving " + urlPaged); logger.info("Retrieving " + urlPaged);
sendUpdate(STATUS.LOADING_RESOURCE, urlPaged);
try { try {
doc = Jsoup.connect(urlPaged).get(); doc = getDocument(this.url);;
} catch (HttpStatusException e) { } catch (HttpStatusException e) {
logger.debug("Hit end of pages at page " + page, e); logger.debug("Hit end of pages at page " + page, e);
break; break;

View File

@ -6,7 +6,6 @@ import java.net.URL;
import java.util.regex.Matcher; import java.util.regex.Matcher;
import java.util.regex.Pattern; import java.util.regex.Pattern;
import org.jsoup.Jsoup;
import org.jsoup.nodes.Document; import org.jsoup.nodes.Document;
import org.jsoup.nodes.Element; import org.jsoup.nodes.Element;
@ -38,7 +37,7 @@ public class XhamsterRipper extends AlbumRipper {
String nextURL = this.url.toExternalForm(); String nextURL = this.url.toExternalForm();
while (nextURL != null) { while (nextURL != null) {
logger.info(" Retrieving " + nextURL); logger.info(" Retrieving " + nextURL);
Document doc = Jsoup.connect(nextURL).get(); Document doc = getDocument(nextURL);
for (Element thumb : doc.select("table.iListing div.img img")) { for (Element thumb : doc.select("table.iListing div.img img")) {
if (!thumb.hasAttr("src")) { if (!thumb.hasAttr("src")) {
continue; continue;

View File

@ -8,7 +8,6 @@ import java.util.regex.Pattern;
import org.json.JSONException; import org.json.JSONException;
import org.json.JSONObject; import org.json.JSONObject;
import org.jsoup.Jsoup;
import org.jsoup.nodes.Document; import org.jsoup.nodes.Document;
import com.rarchives.ripme.ripper.VideoRipper; import com.rarchives.ripme.ripper.VideoRipper;
@ -54,10 +53,8 @@ public class BeegRipper extends VideoRipper {
@Override @Override
public void rip() throws IOException { public void rip() throws IOException {
logger.info(" Retrieving " + this.url.toExternalForm()); logger.info(" Retrieving " + this.url);
Document doc = Jsoup.connect(this.url.toExternalForm()) Document doc = getDocument(this.url);
.userAgent(USER_AGENT)
.get();
Pattern p = Pattern.compile("^.*var qualityArr = (.*});.*$", Pattern.DOTALL); Pattern p = Pattern.compile("^.*var qualityArr = (.*});.*$", Pattern.DOTALL);
Matcher m = p.matcher(doc.html()); Matcher m = p.matcher(doc.html());
if (m.matches()) { if (m.matches()) {

View File

@ -6,7 +6,6 @@ import java.net.URL;
import java.util.regex.Matcher; import java.util.regex.Matcher;
import java.util.regex.Pattern; import java.util.regex.Pattern;
import org.jsoup.Jsoup;
import org.jsoup.nodes.Document; import org.jsoup.nodes.Document;
import org.jsoup.select.Elements; import org.jsoup.select.Elements;
@ -52,9 +51,7 @@ public class GfycatRipper extends VideoRipper {
@Override @Override
public void rip() throws IOException { public void rip() throws IOException {
logger.info(" Retrieving " + this.url.toExternalForm()); logger.info(" Retrieving " + this.url.toExternalForm());
Document doc = Jsoup.connect(this.url.toExternalForm()) Document doc = getDocument(this.url);
.userAgent(USER_AGENT)
.get();
Elements videos = doc.select("source#mp4source"); Elements videos = doc.select("source#mp4source");
if (videos.size() == 0) { if (videos.size() == 0) {
throw new IOException("Could not find source#mp4source at " + url); throw new IOException("Could not find source#mp4source at " + url);

View File

@ -57,9 +57,7 @@ public class PornhubRipper extends VideoRipper {
@Override @Override
public void rip() throws IOException { public void rip() throws IOException {
logger.info(" Retrieving " + this.url.toExternalForm()); logger.info(" Retrieving " + this.url.toExternalForm());
Document doc = Jsoup.connect(this.url.toExternalForm()) Document doc = getDocument(this.url);
.userAgent(USER_AGENT)
.get();
Pattern p = Pattern.compile("^.*var flashvars = (.*});.*$", Pattern.DOTALL); Pattern p = Pattern.compile("^.*var flashvars = (.*});.*$", Pattern.DOTALL);
Matcher m = p.matcher(doc.body().html()); Matcher m = p.matcher(doc.body().html());
if (m.matches()) { if (m.matches()) {

View File

@ -6,7 +6,6 @@ import java.net.URL;
import java.util.regex.Matcher; import java.util.regex.Matcher;
import java.util.regex.Pattern; import java.util.regex.Pattern;
import org.jsoup.Jsoup;
import org.jsoup.nodes.Document; import org.jsoup.nodes.Document;
import org.jsoup.select.Elements; import org.jsoup.select.Elements;
@ -54,9 +53,7 @@ public class ViddmeRipper extends VideoRipper {
@Override @Override
public void rip() throws IOException { public void rip() throws IOException {
logger.info(" Retrieving " + this.url.toExternalForm()); logger.info(" Retrieving " + this.url.toExternalForm());
Document doc = Jsoup.connect(this.url.toExternalForm()) Document doc = getDocument(this.url);
.userAgent(USER_AGENT)
.get();
Elements videos = doc.select("meta[name=twitter:player:stream]"); Elements videos = doc.select("meta[name=twitter:player:stream]");
if (videos.size() == 0) { if (videos.size() == 0) {
throw new IOException("Could not find twitter:player:stream at " + url); throw new IOException("Could not find twitter:player:stream at " + url);

View File

@ -6,7 +6,6 @@ import java.net.URL;
import java.util.regex.Matcher; import java.util.regex.Matcher;
import java.util.regex.Pattern; import java.util.regex.Pattern;
import org.jsoup.Jsoup;
import org.jsoup.nodes.Document; import org.jsoup.nodes.Document;
import org.jsoup.select.Elements; import org.jsoup.select.Elements;
@ -55,9 +54,7 @@ public class VineRipper extends VideoRipper {
@Override @Override
public void rip() throws IOException { public void rip() throws IOException {
logger.info(" Retrieving " + this.url.toExternalForm()); logger.info(" Retrieving " + this.url.toExternalForm());
Document doc = Jsoup.connect(this.url.toExternalForm()) Document doc = getDocument(this.url);
.userAgent(USER_AGENT)
.get();
Elements props = doc.select("meta[property=twitter:player:stream]"); Elements props = doc.select("meta[property=twitter:player:stream]");
if (props.size() == 0) { if (props.size() == 0) {
throw new IOException("Could not find meta property 'twitter:player:stream' at " + url); throw new IOException("Could not find meta property 'twitter:player:stream' at " + url);

View File

@ -7,7 +7,6 @@ import java.net.URLDecoder;
import java.util.regex.Matcher; import java.util.regex.Matcher;
import java.util.regex.Pattern; import java.util.regex.Pattern;
import org.jsoup.Jsoup;
import org.jsoup.nodes.Document; import org.jsoup.nodes.Document;
import org.jsoup.nodes.Element; import org.jsoup.nodes.Element;
import org.jsoup.select.Elements; import org.jsoup.select.Elements;
@ -55,10 +54,8 @@ public class XvideosRipper extends VideoRipper {
@Override @Override
public void rip() throws IOException { public void rip() throws IOException {
logger.info(" Retrieving " + this.url.toExternalForm()); logger.info(" Retrieving " + this.url);
Document doc = Jsoup.connect(this.url.toExternalForm()) Document doc = getDocument(this.url);
.userAgent(USER_AGENT)
.get();
Elements embeds = doc.select("embed"); Elements embeds = doc.select("embed");
if (embeds.size() == 0) { if (embeds.size() == 0) {
throw new IOException("Could not find Embed code at " + url); throw new IOException("Could not find Embed code at " + url);

View File

@ -54,10 +54,8 @@ public class YoupornRipper extends VideoRipper {
@Override @Override
public void rip() throws IOException { public void rip() throws IOException {
logger.info(" Retrieving " + this.url.toExternalForm()); logger.info(" Retrieving " + this.url);
Document doc = Jsoup.connect(this.url.toExternalForm()) Document doc = getDocument(this.url);
.userAgent(USER_AGENT)
.get();
Elements videos = doc.select("video"); Elements videos = doc.select("video");
if (videos.size() == 0) { if (videos.size() == 0) {
throw new IOException("Could not find Embed code at " + url); throw new IOException("Could not find Embed code at " + url);

View File

@ -11,6 +11,9 @@ download.retries = 1
# File download timeout (in milliseconds) # File download timeout (in milliseconds)
download.timeout = 60000 download.timeout = 60000
# Page download timeout (in milliseconds)
page.timeout = 5000
# Maximum size of downloaded files in bytes (required) # Maximum size of downloaded files in bytes (required)
download.max_size = 104857600 download.max_size = 104857600