Using new wrapper for HTTP requests, started abstract classes to simplify rippers
This commit is contained in:
parent
1b20e98f8f
commit
b7397cd31e
@ -0,0 +1,80 @@
|
|||||||
|
package com.rarchives.ripme.ripper;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.net.MalformedURLException;
|
||||||
|
import java.net.URL;
|
||||||
|
import java.util.List;
|
||||||
|
|
||||||
|
import org.jsoup.nodes.Document;
|
||||||
|
|
||||||
|
import com.rarchives.ripme.ui.RipStatusMessage.STATUS;
|
||||||
|
import com.rarchives.ripme.utils.Utils;
|
||||||
|
|
||||||
|
public abstract class AbstractMultiPageRipper extends AlbumRipper {
|
||||||
|
|
||||||
|
public AbstractMultiPageRipper(URL url) throws IOException {
|
||||||
|
super(url);
|
||||||
|
}
|
||||||
|
|
||||||
|
public abstract String getDomain();
|
||||||
|
public abstract String getHost();
|
||||||
|
|
||||||
|
public abstract Document getFirstPage() throws IOException;
|
||||||
|
public abstract Document getNextPage(Document doc) throws IOException;
|
||||||
|
public abstract List<String> getURLsFromPage(Document page);
|
||||||
|
public abstract void downloadURL(URL url, int index);
|
||||||
|
|
||||||
|
public boolean keepSortOrder() {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public boolean canRip(URL url) {
|
||||||
|
return url.getHost().endsWith(getDomain());
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public URL sanitizeURL(URL url) throws MalformedURLException {
|
||||||
|
return url;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void rip() throws IOException {
|
||||||
|
int index = 0;
|
||||||
|
logger.info("Retrieving " + this.url);
|
||||||
|
sendUpdate(STATUS.LOADING_RESOURCE, this.url.toExternalForm());
|
||||||
|
Document doc = getFirstPage();
|
||||||
|
|
||||||
|
while (doc != null) {
|
||||||
|
List<String> imageURLs = getURLsFromPage(doc);
|
||||||
|
|
||||||
|
if (imageURLs.size() == 0) {
|
||||||
|
throw new IOException("No images found at " + this.url);
|
||||||
|
}
|
||||||
|
|
||||||
|
for (String imageURL : imageURLs) {
|
||||||
|
if (isStopped()) {
|
||||||
|
logger.info("Interrupted");
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
index += 1;
|
||||||
|
downloadURL(new URL(imageURL), index);
|
||||||
|
}
|
||||||
|
try {
|
||||||
|
doc = getNextPage(doc);
|
||||||
|
} catch (IOException e) {
|
||||||
|
logger.info("Can't get next page: " + e.getMessage());
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
waitForThreads();
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getPrefix(int index) {
|
||||||
|
String prefix = "";
|
||||||
|
if (keepSortOrder() && Utils.getConfigBoolean("download.save_order", true)) {
|
||||||
|
prefix = String.format("%03d_", index);
|
||||||
|
}
|
||||||
|
return prefix;
|
||||||
|
}
|
||||||
|
}
|
@ -12,12 +12,7 @@ import java.util.Observable;
|
|||||||
|
|
||||||
import org.apache.log4j.FileAppender;
|
import org.apache.log4j.FileAppender;
|
||||||
import org.apache.log4j.Logger;
|
import org.apache.log4j.Logger;
|
||||||
import org.jsoup.Connection;
|
|
||||||
import org.jsoup.Connection.Method;
|
|
||||||
import org.jsoup.Connection.Response;
|
|
||||||
import org.jsoup.HttpStatusException;
|
import org.jsoup.HttpStatusException;
|
||||||
import org.jsoup.Jsoup;
|
|
||||||
import org.jsoup.nodes.Document;
|
|
||||||
|
|
||||||
import com.rarchives.ripme.ui.RipStatusHandler;
|
import com.rarchives.ripme.ui.RipStatusHandler;
|
||||||
import com.rarchives.ripme.ui.RipStatusMessage;
|
import com.rarchives.ripme.ui.RipStatusMessage;
|
||||||
@ -33,8 +28,6 @@ public abstract class AbstractRipper
|
|||||||
public static final String USER_AGENT =
|
public static final String USER_AGENT =
|
||||||
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10.7; rv:29.0) Gecko/20100101 Firefox/29.0";
|
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10.7; rv:29.0) Gecko/20100101 Firefox/29.0";
|
||||||
|
|
||||||
public static final int TIMEOUT = Utils.getConfigInteger("page.timeout", 5 * 1000);
|
|
||||||
|
|
||||||
protected URL url;
|
protected URL url;
|
||||||
protected File workingDir;
|
protected File workingDir;
|
||||||
protected DownloadThreadPool threadPool;
|
protected DownloadThreadPool threadPool;
|
||||||
@ -61,10 +54,6 @@ public abstract class AbstractRipper
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
protected int getTimeout() {
|
|
||||||
return TIMEOUT;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Ensures inheriting ripper can rip this URL, raises exception if not.
|
* Ensures inheriting ripper can rip this URL, raises exception if not.
|
||||||
* Otherwise initializes working directory and thread pool.
|
* Otherwise initializes working directory and thread pool.
|
||||||
@ -339,6 +328,16 @@ public abstract class AbstractRipper
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public boolean sleep(int milliseconds) {
|
||||||
|
try {
|
||||||
|
Thread.sleep(milliseconds);
|
||||||
|
return true;
|
||||||
|
} catch (InterruptedException e) {
|
||||||
|
logger.error("Interrupted while waiting to load next page", e);
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
public void setBytesTotal(int bytes) {
|
public void setBytesTotal(int bytes) {
|
||||||
// Do nothing
|
// Do nothing
|
||||||
@ -348,82 +347,5 @@ public abstract class AbstractRipper
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Thar be overloaded methods afoot
|
// Thar be overloaded methods afoot
|
||||||
public Document getDocument(URL url) throws IOException {
|
|
||||||
return getDocument(url.toExternalForm());
|
|
||||||
}
|
|
||||||
public Document getDocument(String url) throws IOException {
|
|
||||||
return getResponse(url).parse();
|
|
||||||
}
|
|
||||||
public Document getDocument(String url, boolean ignoreContentType) throws IOException {
|
|
||||||
return getResponse(url, ignoreContentType).parse();
|
|
||||||
}
|
|
||||||
public Document getDocument(String url, Map<String,String> cookies) throws IOException {
|
|
||||||
return getResponse(url, cookies).parse();
|
|
||||||
}
|
|
||||||
public Document getDocument(String url, String referrer, Map<String,String> cookies) throws IOException {
|
|
||||||
return getResponse(url, Method.GET, referrer, cookies).parse();
|
|
||||||
}
|
|
||||||
public Response getResponse(String url) throws IOException {
|
|
||||||
return getResponse(url, Method.GET, USER_AGENT, null, null, false);
|
|
||||||
}
|
|
||||||
public Response getResponse(URL url) throws IOException {
|
|
||||||
return getResponse(url.toExternalForm());
|
|
||||||
}
|
|
||||||
public Response getResponse(String url, String referrer) throws IOException {
|
|
||||||
return getResponse(url, Method.GET, USER_AGENT, referrer, null, false);
|
|
||||||
}
|
|
||||||
public Response getResponse(URL url, boolean ignoreContentType) throws IOException {
|
|
||||||
return getResponse(url.toExternalForm(), Method.GET, USER_AGENT, null, null, ignoreContentType);
|
|
||||||
}
|
|
||||||
public Response getResponse(String url, Map<String, String> cookies) throws IOException {
|
|
||||||
return getResponse(url, Method.GET, USER_AGENT, cookies);
|
|
||||||
}
|
|
||||||
public Response getResponse(String url, String referrer, Map<String, String> cookies) throws IOException {
|
|
||||||
return getResponse(url, Method.GET, referrer, cookies);
|
|
||||||
}
|
|
||||||
public Response getResponse(String url, Method method) throws IOException {
|
|
||||||
return getResponse(url, method, USER_AGENT, null, null, false);
|
|
||||||
}
|
|
||||||
public Response getResponse(String url, Method method, String referrer, Map<String,String> cookies) throws IOException {
|
|
||||||
return getResponse(url, method, USER_AGENT, referrer, cookies, false);
|
|
||||||
}
|
|
||||||
public Response getResponse(String url, boolean ignoreContentType) throws IOException {
|
|
||||||
return getResponse(url, Method.GET, USER_AGENT, null, null, ignoreContentType);
|
|
||||||
}
|
|
||||||
public Response getResponse(String url, Method method, boolean ignoreContentType) throws IOException {
|
|
||||||
return getResponse(url, method, USER_AGENT, null, null, false);
|
|
||||||
}
|
|
||||||
|
|
||||||
public Response getResponse(String url,
|
|
||||||
Method method,
|
|
||||||
String userAgent,
|
|
||||||
String referrer,
|
|
||||||
Map<String,String> cookies,
|
|
||||||
boolean ignoreContentType)
|
|
||||||
throws IOException {
|
|
||||||
Connection connection = Jsoup.connect(url);
|
|
||||||
|
|
||||||
connection.method( (method == null) ? Method.GET : method);
|
|
||||||
connection.userAgent( (userAgent == null) ? USER_AGENT : userAgent);
|
|
||||||
connection.ignoreContentType(ignoreContentType);
|
|
||||||
connection.timeout(getTimeout());
|
|
||||||
connection.maxBodySize(0);
|
|
||||||
|
|
||||||
if (cookies != null) { connection.cookies(cookies); }
|
|
||||||
if (referrer != null) { connection.referrer(referrer); }
|
|
||||||
|
|
||||||
Response response = null;
|
|
||||||
int retries = Utils.getConfigInteger("download.retries", 1);;
|
|
||||||
while (retries >= 0) {
|
|
||||||
retries--;
|
|
||||||
try {
|
|
||||||
response = connection.execute();
|
|
||||||
} catch (IOException e) {
|
|
||||||
logger.warn("Error while loading " + url, e);
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return response;
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
}
|
@ -0,0 +1,70 @@
|
|||||||
|
package com.rarchives.ripme.ripper;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.net.MalformedURLException;
|
||||||
|
import java.net.URL;
|
||||||
|
import java.util.List;
|
||||||
|
|
||||||
|
import org.jsoup.nodes.Document;
|
||||||
|
|
||||||
|
import com.rarchives.ripme.ui.RipStatusMessage.STATUS;
|
||||||
|
import com.rarchives.ripme.utils.Utils;
|
||||||
|
|
||||||
|
public abstract class AbstractSinglePageRipper extends AlbumRipper {
|
||||||
|
|
||||||
|
public AbstractSinglePageRipper(URL url) throws IOException {
|
||||||
|
super(url);
|
||||||
|
}
|
||||||
|
|
||||||
|
public abstract String getDomain();
|
||||||
|
public abstract String getHost();
|
||||||
|
|
||||||
|
public abstract Document getFirstPage() throws IOException;
|
||||||
|
public abstract List<String> getURLsFromPage(Document page);
|
||||||
|
public abstract void downloadURL(URL url, int index);
|
||||||
|
|
||||||
|
public boolean keepSortOrder() {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public boolean canRip(URL url) {
|
||||||
|
return url.getHost().endsWith(getDomain());
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public URL sanitizeURL(URL url) throws MalformedURLException {
|
||||||
|
return url;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void rip() throws IOException {
|
||||||
|
int index = 0;
|
||||||
|
logger.info("Retrieving " + this.url);
|
||||||
|
sendUpdate(STATUS.LOADING_RESOURCE, this.url.toExternalForm());
|
||||||
|
Document doc = getFirstPage();
|
||||||
|
List<String> imageURLs = getURLsFromPage(doc);
|
||||||
|
|
||||||
|
if (imageURLs.size() == 0) {
|
||||||
|
throw new IOException("No images found at " + this.url);
|
||||||
|
}
|
||||||
|
|
||||||
|
for (String imageURL : imageURLs) {
|
||||||
|
if (isStopped()) {
|
||||||
|
logger.info("Interrupted");
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
index += 1;
|
||||||
|
downloadURL(new URL(imageURL), index);
|
||||||
|
}
|
||||||
|
waitForThreads();
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getPrefix(int index) {
|
||||||
|
String prefix = "";
|
||||||
|
if (keepSortOrder() && Utils.getConfigBoolean("download.save_order", true)) {
|
||||||
|
prefix = String.format("%03d_", index);
|
||||||
|
}
|
||||||
|
return prefix;
|
||||||
|
}
|
||||||
|
}
|
@ -3,22 +3,18 @@ package com.rarchives.ripme.ripper.rippers;
|
|||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.net.MalformedURLException;
|
import java.net.MalformedURLException;
|
||||||
import java.net.URL;
|
import java.net.URL;
|
||||||
|
import java.util.ArrayList;
|
||||||
|
import java.util.List;
|
||||||
import java.util.regex.Matcher;
|
import java.util.regex.Matcher;
|
||||||
import java.util.regex.Pattern;
|
import java.util.regex.Pattern;
|
||||||
|
|
||||||
import org.jsoup.Jsoup;
|
|
||||||
import org.jsoup.nodes.Document;
|
import org.jsoup.nodes.Document;
|
||||||
import org.jsoup.nodes.Element;
|
import org.jsoup.nodes.Element;
|
||||||
|
|
||||||
import com.rarchives.ripme.ripper.AlbumRipper;
|
import com.rarchives.ripme.ripper.AbstractSinglePageRipper;
|
||||||
import com.rarchives.ripme.utils.Utils;
|
import com.rarchives.ripme.utils.Http;
|
||||||
|
|
||||||
public class ButttoucherRipper extends AlbumRipper {
|
public class ButttoucherRipper extends AbstractSinglePageRipper {
|
||||||
|
|
||||||
private static final String DOMAIN = "butttoucher.com",
|
|
||||||
HOST = "butttoucher";
|
|
||||||
|
|
||||||
private Document albumDoc = null;
|
|
||||||
|
|
||||||
public ButttoucherRipper(URL url) throws IOException {
|
public ButttoucherRipper(URL url) throws IOException {
|
||||||
super(url);
|
super(url);
|
||||||
@ -26,9 +22,13 @@ public class ButttoucherRipper extends AlbumRipper {
|
|||||||
|
|
||||||
@Override
|
@Override
|
||||||
public String getHost() {
|
public String getHost() {
|
||||||
return HOST;
|
return "butttoucher";
|
||||||
}
|
}
|
||||||
|
@Override
|
||||||
|
public String getDomain() {
|
||||||
|
return "butttoucher.com";
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public String getGID(URL url) throws MalformedURLException {
|
public String getGID(URL url) throws MalformedURLException {
|
||||||
Pattern p; Matcher m;
|
Pattern p; Matcher m;
|
||||||
@ -45,38 +45,26 @@ public class ButttoucherRipper extends AlbumRipper {
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void rip() throws IOException {
|
public Document getFirstPage() throws IOException {
|
||||||
logger.info("Retrieving " + this.url);
|
return Http.url(this.url).get();
|
||||||
if (albumDoc == null) {
|
}
|
||||||
albumDoc = getDocument(this.url);
|
|
||||||
}
|
@Override
|
||||||
int index = 0;
|
public List<String> getURLsFromPage(Document page) {
|
||||||
for (Element thumb : albumDoc.select("div.image-gallery > a > img")) {
|
List<String> thumbs = new ArrayList<String>();
|
||||||
|
for (Element thumb : page.select("div.image-gallery > a > img")) {
|
||||||
if (!thumb.hasAttr("src")) {
|
if (!thumb.hasAttr("src")) {
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
String smallImage = thumb.attr("src");
|
String smallImage = thumb.attr("src");
|
||||||
String image = smallImage.replace("m.", ".");
|
thumbs.add(smallImage.replace("m.", "."));
|
||||||
index += 1;
|
|
||||||
String prefix = "";
|
|
||||||
if (Utils.getConfigBoolean("download.save_order", true)) {
|
|
||||||
prefix = String.format("%03d_", index);
|
|
||||||
}
|
|
||||||
addURLToDownload(new URL(image), prefix);
|
|
||||||
}
|
}
|
||||||
waitForThreads();
|
return thumbs;
|
||||||
}
|
|
||||||
|
|
||||||
public boolean canRip(URL url) {
|
|
||||||
if (!url.getHost().endsWith(DOMAIN)) {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
return true;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public URL sanitizeURL(URL url) throws MalformedURLException {
|
public void downloadURL(URL url, int index) {
|
||||||
return url;
|
addURLToDownload(url, getPrefix(index));
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
@ -3,18 +3,18 @@ package com.rarchives.ripme.ripper.rippers;
|
|||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.net.MalformedURLException;
|
import java.net.MalformedURLException;
|
||||||
import java.net.URL;
|
import java.net.URL;
|
||||||
import java.util.HashSet;
|
import java.util.ArrayList;
|
||||||
import java.util.Set;
|
import java.util.List;
|
||||||
import java.util.regex.Matcher;
|
import java.util.regex.Matcher;
|
||||||
import java.util.regex.Pattern;
|
import java.util.regex.Pattern;
|
||||||
|
|
||||||
import org.jsoup.nodes.Document;
|
import org.jsoup.nodes.Document;
|
||||||
import org.jsoup.nodes.Element;
|
import org.jsoup.nodes.Element;
|
||||||
|
|
||||||
import com.rarchives.ripme.ripper.AlbumRipper;
|
import com.rarchives.ripme.ripper.AbstractSinglePageRipper;
|
||||||
import com.rarchives.ripme.utils.Utils;
|
import com.rarchives.ripme.utils.Http;
|
||||||
|
|
||||||
public class ChanRipper extends AlbumRipper {
|
public class ChanRipper extends AbstractSinglePageRipper {
|
||||||
|
|
||||||
public ChanRipper(URL url) throws IOException {
|
public ChanRipper(URL url) throws IOException {
|
||||||
super(url);
|
super(url);
|
||||||
@ -43,10 +43,6 @@ public class ChanRipper extends AlbumRipper {
|
|||||||
|| url.toExternalForm().contains("/thread/")); // 4chan
|
|| url.toExternalForm().contains("/thread/")); // 4chan
|
||||||
}
|
}
|
||||||
|
|
||||||
public URL sanitizeURL(URL url) throws MalformedURLException {
|
|
||||||
return url;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public String getGID(URL url) throws MalformedURLException {
|
public String getGID(URL url) throws MalformedURLException {
|
||||||
Pattern p; Matcher m;
|
Pattern p; Matcher m;
|
||||||
@ -74,13 +70,20 @@ public class ChanRipper extends AlbumRipper {
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void rip() throws IOException {
|
public String getDomain() {
|
||||||
Set<String> attempted = new HashSet<String>();
|
return this.url.getHost();
|
||||||
int index = 0;
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Document getFirstPage() throws IOException {
|
||||||
|
return Http.url(this.url).get();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public List<String> getURLsFromPage(Document page) {
|
||||||
|
List<String> imageURLs = new ArrayList<String>();
|
||||||
Pattern p; Matcher m;
|
Pattern p; Matcher m;
|
||||||
logger.info("Retrieving " + this.url);
|
for (Element link : page.select("a")) {
|
||||||
Document doc = getDocument(this.url);
|
|
||||||
for (Element link : doc.select("a")) {
|
|
||||||
if (!link.hasAttr("href")) {
|
if (!link.hasAttr("href")) {
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
@ -104,20 +107,20 @@ public class ChanRipper extends AlbumRipper {
|
|||||||
if (image.startsWith("/")) {
|
if (image.startsWith("/")) {
|
||||||
image = "http://" + this.url.getHost() + image;
|
image = "http://" + this.url.getHost() + image;
|
||||||
}
|
}
|
||||||
if (attempted.contains(image)) {
|
// Don't download the same URL twice
|
||||||
|
if (imageURLs.contains(image)) {
|
||||||
logger.debug("Already attempted: " + image);
|
logger.debug("Already attempted: " + image);
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
index += 1;
|
imageURLs.add(image);
|
||||||
String prefix = "";
|
|
||||||
if (Utils.getConfigBoolean("download.save_order", true)) {
|
|
||||||
prefix = String.format("%03d_", index);
|
|
||||||
}
|
|
||||||
addURLToDownload(new URL(image), prefix);
|
|
||||||
attempted.add(image);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
waitForThreads();
|
return imageURLs;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void downloadURL(URL url, int index) {
|
||||||
|
addURLToDownload(url, getPrefix(index));
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
@ -6,148 +6,152 @@ import java.net.URL;
|
|||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.Arrays;
|
import java.util.Arrays;
|
||||||
import java.util.HashMap;
|
import java.util.HashMap;
|
||||||
|
import java.util.HashSet;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
|
import java.util.Set;
|
||||||
import java.util.regex.Matcher;
|
import java.util.regex.Matcher;
|
||||||
import java.util.regex.Pattern;
|
import java.util.regex.Pattern;
|
||||||
|
|
||||||
import org.jsoup.Connection.Method;
|
import org.jsoup.Connection.Method;
|
||||||
import org.jsoup.Connection.Response;
|
import org.jsoup.Connection.Response;
|
||||||
import org.jsoup.Jsoup;
|
|
||||||
import org.jsoup.nodes.Document;
|
import org.jsoup.nodes.Document;
|
||||||
import org.jsoup.nodes.Element;
|
import org.jsoup.nodes.Element;
|
||||||
import org.jsoup.select.Elements;
|
import org.jsoup.select.Elements;
|
||||||
|
|
||||||
import com.rarchives.ripme.ripper.AlbumRipper;
|
import com.rarchives.ripme.ripper.AbstractMultiPageRipper;
|
||||||
import com.rarchives.ripme.ui.RipStatusMessage.STATUS;
|
|
||||||
import com.rarchives.ripme.utils.Base64;
|
import com.rarchives.ripme.utils.Base64;
|
||||||
|
import com.rarchives.ripme.utils.Http;
|
||||||
import com.rarchives.ripme.utils.Utils;
|
import com.rarchives.ripme.utils.Utils;
|
||||||
|
|
||||||
public class DeviantartRipper extends AlbumRipper {
|
public class DeviantartRipper extends AbstractMultiPageRipper {
|
||||||
|
|
||||||
private static final String DOMAIN = "deviantart.com",
|
|
||||||
HOST = "deviantart";
|
|
||||||
|
|
||||||
private static final int SLEEP_TIME = 2000;
|
private static final int SLEEP_TIME = 2000;
|
||||||
|
|
||||||
private Map<String,String> cookies = new HashMap<String,String>();
|
private Map<String,String> cookies = new HashMap<String,String>();
|
||||||
|
private Set<String> triedURLs = new HashSet<String>();
|
||||||
|
|
||||||
public DeviantartRipper(URL url) throws IOException {
|
public DeviantartRipper(URL url) throws IOException {
|
||||||
super(url);
|
super(url);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public boolean canRip(URL url) {
|
public String getHost() {
|
||||||
return url.getHost().endsWith(DOMAIN);
|
return "deviantart";
|
||||||
|
}
|
||||||
|
@Override
|
||||||
|
public String getDomain() {
|
||||||
|
return "deviantart.com";
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public URL sanitizeURL(URL url) throws MalformedURLException {
|
public URL sanitizeURL(URL url) throws MalformedURLException {
|
||||||
String u = url.toExternalForm();
|
String u = url.toExternalForm();
|
||||||
u = u.replaceAll("\\?.*", "");
|
String subdir = "/";
|
||||||
|
if (u.contains("catpath=scraps")) {
|
||||||
|
subdir = "scraps";
|
||||||
|
}
|
||||||
|
u = u.replaceAll("\\?.*", "?catpath=" + subdir);
|
||||||
return new URL(u);
|
return new URL(u);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void rip() throws IOException {
|
public String getGID(URL url) throws MalformedURLException {
|
||||||
int index = 0;
|
Pattern p = Pattern.compile("^https?://([a-zA-Z0-9\\-]+)\\.deviantart\\.com(/gallery)?/?(\\?.*)?$");
|
||||||
String nextURL = this.url.toExternalForm();
|
Matcher m = p.matcher(url.toExternalForm());
|
||||||
|
if (m.matches()) {
|
||||||
|
// Root gallery
|
||||||
|
if (url.toExternalForm().contains("catpath=scraps")) {
|
||||||
|
return m.group(1) + "_scraps";
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
return m.group(1);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
p = Pattern.compile("^https?://([a-zA-Z0-9\\-]{1,})\\.deviantart\\.com/gallery/([0-9]{1,}).*$");
|
||||||
|
m = p.matcher(url.toExternalForm());
|
||||||
|
if (m.matches()) {
|
||||||
|
// Subgallery
|
||||||
|
return m.group(1) + "_" + m.group(2);
|
||||||
|
}
|
||||||
|
throw new MalformedURLException("Expected URL format: http://username.deviantart.com/[/gallery/#####], got: " + url);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Document getFirstPage() throws IOException {
|
||||||
// Login
|
// Login
|
||||||
try {
|
try {
|
||||||
cookies = loginToDeviantart();
|
cookies = loginToDeviantart();
|
||||||
} catch (Exception e) {
|
} catch (Exception e) {
|
||||||
logger.warn("Failed to login: ", e);
|
logger.warn("Failed to login: ", e);
|
||||||
}
|
}
|
||||||
|
return Http.url(this.url)
|
||||||
|
.cookies(cookies)
|
||||||
|
.get();
|
||||||
|
}
|
||||||
|
|
||||||
// Iterate over every page
|
@Override
|
||||||
while (nextURL != null) {
|
public List<String> getURLsFromPage(Document page) {
|
||||||
|
List<String> imageURLs = new ArrayList<String>();
|
||||||
|
|
||||||
logger.info(" Retrieving " + nextURL);
|
// Iterate over all thumbnails
|
||||||
sendUpdate(STATUS.LOADING_RESOURCE, "Retrieving " + nextURL);
|
for (Element thumb : page.select("div.zones-container a.thumb")) {
|
||||||
Document doc = getDocument(nextURL, cookies);
|
if (isStopped()) {
|
||||||
|
|
||||||
// Iterate over all thumbnails
|
|
||||||
for (Element thumb : doc.select("div.zones-container a.thumb")) {
|
|
||||||
if (isStopped()) {
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
Element img = thumb.select("img").get(0);
|
|
||||||
if (img.attr("transparent").equals("false")) {
|
|
||||||
continue; // a.thumbs to other albums are invisible
|
|
||||||
}
|
|
||||||
|
|
||||||
index++;
|
|
||||||
|
|
||||||
String fullSize = null;
|
|
||||||
try {
|
|
||||||
fullSize = thumbToFull(img.attr("src"), true);
|
|
||||||
} catch (Exception e) {
|
|
||||||
logger.info("Attempting to get full size image from " + thumb.attr("href"));
|
|
||||||
fullSize = smallToFull(img.attr("src"), thumb.attr("href"));
|
|
||||||
if (fullSize == null) {
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
try {
|
|
||||||
URL fullsizeURL = new URL(fullSize);
|
|
||||||
String imageId = fullSize.substring(fullSize.lastIndexOf('-') + 1);
|
|
||||||
imageId = imageId.substring(0, imageId.indexOf('.'));
|
|
||||||
long imageIdLong = alphaToLong(imageId);
|
|
||||||
addURLToDownload(fullsizeURL, String.format("%010d_", imageIdLong));
|
|
||||||
} catch (MalformedURLException e) {
|
|
||||||
logger.error("[!] Invalid thumbnail image: " + fullSize);
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
try {
|
|
||||||
Thread.sleep(SLEEP_TIME);
|
|
||||||
} catch (InterruptedException e) {
|
|
||||||
logger.error("[!] Interrupted while waiting for page to load", e);
|
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
Element img = thumb.select("img").get(0);
|
||||||
// Find the next page
|
if (img.attr("transparent").equals("false")) {
|
||||||
nextURL = null;
|
continue; // a.thumbs to other albums are invisible
|
||||||
for (Element nextButton : doc.select("a.away")) {
|
|
||||||
if (nextButton.attr("href").contains("offset=" + index)) {
|
|
||||||
nextURL = this.url.toExternalForm() + "?offset=" + index;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
if (nextURL == null) {
|
|
||||||
logger.info("No next button found");
|
// Get full-sized image via helper methods
|
||||||
|
String fullSize = null;
|
||||||
|
try {
|
||||||
|
fullSize = thumbToFull(img.attr("src"), true);
|
||||||
|
} catch (Exception e) {
|
||||||
|
logger.info("Attempting to get full size image from " + thumb.attr("href"));
|
||||||
|
fullSize = smallToFull(img.attr("src"), thumb.attr("href"));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (fullSize == null) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
if (triedURLs.contains(fullSize)) {
|
||||||
|
logger.warn("Already tried to download " + fullSize);
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
triedURLs.add(fullSize);
|
||||||
|
imageURLs.add(fullSize);
|
||||||
}
|
}
|
||||||
waitForThreads();
|
return imageURLs;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Document getNextPage(Document page) throws IOException {
|
||||||
|
Elements nextButtons = page.select("li.next > a");
|
||||||
|
if (nextButtons.size() == 0) {
|
||||||
|
throw new IOException("No next page found");
|
||||||
|
}
|
||||||
|
Element a = nextButtons.first();
|
||||||
|
if (a.hasClass("disabled")) {
|
||||||
|
throw new IOException("Hit end of pages");
|
||||||
|
}
|
||||||
|
String nextPage = a.attr("href");
|
||||||
|
if (nextPage.startsWith("/")) {
|
||||||
|
nextPage = "http://" + this.url.getHost() + nextPage;
|
||||||
|
}
|
||||||
|
if (!sleep(SLEEP_TIME)) {
|
||||||
|
throw new IOException("Interrupted while waiting to load next page: " + nextPage);
|
||||||
|
}
|
||||||
|
logger.info("Found next page: " + nextPage);
|
||||||
|
return Http.url(nextPage)
|
||||||
|
.cookies(cookies)
|
||||||
|
.get();
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
@Override
|
||||||
* Convert alpha-numeric string into a corresponding number
|
public void downloadURL(URL url, int index) {
|
||||||
* @param alpha String to convert
|
addURLToDownload(url, getPrefix(index), "", this.url.toExternalForm(), cookies);
|
||||||
* @return Numeric representation of 'alpha'
|
|
||||||
*/
|
|
||||||
public static long alphaToLong(String alpha) {
|
|
||||||
long result = 0;
|
|
||||||
for (int i = 0; i < alpha.length(); i++) {
|
|
||||||
result += charToInt(alpha, i);
|
|
||||||
}
|
|
||||||
return result;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Convert character at index in a string 'text' to numeric form (base-36)
|
|
||||||
* @param text Text to retrieve the character from
|
|
||||||
* @param index Index of the desired character
|
|
||||||
* @return Number representing character at text[index]
|
|
||||||
*/
|
|
||||||
private static int charToInt(String text, int index) {
|
|
||||||
char c = text.charAt(text.length() - index - 1);
|
|
||||||
c = Character.toLowerCase(c);
|
|
||||||
int number = "0123456789abcdefghijklmnopqrstuvwxyz".indexOf(c);
|
|
||||||
number *= Math.pow(36, index);
|
|
||||||
return number;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@ -163,7 +167,6 @@ public class DeviantartRipper extends AlbumRipper {
|
|||||||
fields.remove(4);
|
fields.remove(4);
|
||||||
if (!fields.get(4).equals("f") && throwException) {
|
if (!fields.get(4).equals("f") && throwException) {
|
||||||
// Not a full-size image
|
// Not a full-size image
|
||||||
logger.warn("Can't get full size image from " + thumb);
|
|
||||||
throw new Exception("Can't get full size image from " + thumb);
|
throw new Exception("Can't get full size image from " + thumb);
|
||||||
}
|
}
|
||||||
StringBuilder result = new StringBuilder();
|
StringBuilder result = new StringBuilder();
|
||||||
@ -187,27 +190,20 @@ public class DeviantartRipper extends AlbumRipper {
|
|||||||
public String smallToFull(String thumb, String page) {
|
public String smallToFull(String thumb, String page) {
|
||||||
try {
|
try {
|
||||||
// Fetch the image page
|
// Fetch the image page
|
||||||
Response resp = getResponse(page, Method.GET, USER_AGENT, this.url.toExternalForm(), cookies, false);
|
Response resp = Http.url(page)
|
||||||
Map<String,String> cookies = resp.cookies();
|
.referrer(this.url)
|
||||||
cookies.putAll(this.cookies);
|
.cookies(cookies)
|
||||||
|
.response();
|
||||||
|
cookies.putAll(resp.cookies());
|
||||||
|
|
||||||
// Try to find the "Download" box
|
// Try to find the "Download" box
|
||||||
Elements els = resp.parse().select("a.dev-page-download");
|
Elements els = resp.parse().select("a.dev-page-download");
|
||||||
if (els.size() == 0) {
|
if (els.size() == 0) {
|
||||||
throw new IOException("no download page found");
|
throw new IOException("No download page found");
|
||||||
}
|
}
|
||||||
// Full-size image
|
// Full-size image
|
||||||
String fsimage = els.get(0).attr("href");
|
String fsimage = els.get(0).attr("href");
|
||||||
|
return fsimage;
|
||||||
String prefix = "";
|
|
||||||
if (Utils.getConfigBoolean("download.save_order", true)) {
|
|
||||||
String imageId = fsimage.substring(fsimage.lastIndexOf('-') + 1);
|
|
||||||
imageId = imageId.substring(0, imageId.indexOf('.'));
|
|
||||||
prefix = String.format("%010d_", alphaToLong(imageId));
|
|
||||||
}
|
|
||||||
// Download it
|
|
||||||
addURLToDownload(new URL(fsimage), prefix, "", page, cookies);
|
|
||||||
return null;
|
|
||||||
} catch (IOException ioe) {
|
} catch (IOException ioe) {
|
||||||
try {
|
try {
|
||||||
logger.info("Failed to get full size download image at " + page + " : '" + ioe.getMessage() + "'");
|
logger.info("Failed to get full size download image at " + page + " : '" + ioe.getMessage() + "'");
|
||||||
@ -220,28 +216,6 @@ public class DeviantartRipper extends AlbumRipper {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
|
||||||
public String getHost() {
|
|
||||||
return HOST;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public String getGID(URL url) throws MalformedURLException {
|
|
||||||
Pattern p = Pattern.compile("^https?://([a-zA-Z0-9\\-]{1,})\\.deviantart\\.com(/gallery)?/?$");
|
|
||||||
Matcher m = p.matcher(url.toExternalForm());
|
|
||||||
if (m.matches()) {
|
|
||||||
// Root gallery
|
|
||||||
return m.group(1);
|
|
||||||
}
|
|
||||||
p = Pattern.compile("^https?://([a-zA-Z0-9\\-]{1,})\\.deviantart\\.com/gallery/([0-9]{1,}).*$");
|
|
||||||
m = p.matcher(url.toExternalForm());
|
|
||||||
if (m.matches()) {
|
|
||||||
// Subgallery
|
|
||||||
return m.group(1) + "_" + m.group(2);
|
|
||||||
}
|
|
||||||
throw new MalformedURLException("Expected URL format: http://username.deviantart.com/[/gallery/#####], got: " + url);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Logs into deviant art. Required to rip full-size NSFW content.
|
* Logs into deviant art. Required to rip full-size NSFW content.
|
||||||
* @return Map of cookies containing session data.
|
* @return Map of cookies containing session data.
|
||||||
@ -254,7 +228,8 @@ public class DeviantartRipper extends AlbumRipper {
|
|||||||
if (username == null || password == null) {
|
if (username == null || password == null) {
|
||||||
throw new IOException("could not find username or password in config");
|
throw new IOException("could not find username or password in config");
|
||||||
}
|
}
|
||||||
Response resp = getResponse("http://www.deviantart.com/");
|
Response resp = Http.url("http://www.deviantart.com/")
|
||||||
|
.response();
|
||||||
for (Element input : resp.parse().select("form#form-login input[type=hidden]")) {
|
for (Element input : resp.parse().select("form#form-login input[type=hidden]")) {
|
||||||
postData.put(input.attr("name"), input.attr("value"));
|
postData.put(input.attr("name"), input.attr("value"));
|
||||||
}
|
}
|
||||||
@ -263,17 +238,17 @@ public class DeviantartRipper extends AlbumRipper {
|
|||||||
postData.put("remember_me", "1");
|
postData.put("remember_me", "1");
|
||||||
|
|
||||||
// Send login request
|
// Send login request
|
||||||
resp = Jsoup.connect("https://www.deviantart.com/users/login")
|
resp = Http.url("https://www.deviantart.com/users/login")
|
||||||
.userAgent(USER_AGENT)
|
.userAgent(USER_AGENT)
|
||||||
.data(postData)
|
.data(postData)
|
||||||
.cookies(resp.cookies())
|
.cookies(resp.cookies())
|
||||||
.method(Method.POST)
|
.method(Method.POST)
|
||||||
.execute();
|
.response();
|
||||||
|
|
||||||
// Assert we are logged in
|
// Assert we are logged in
|
||||||
if (resp.hasHeader("Location") && resp.header("Location").contains("password")) {
|
if (resp.hasHeader("Location") && resp.header("Location").contains("password")) {
|
||||||
// Wrong password
|
// Wrong password
|
||||||
throw new IOException("Wrong pasword");
|
throw new IOException("Wrong password");
|
||||||
}
|
}
|
||||||
if (resp.url().toExternalForm().contains("bad_form")) {
|
if (resp.url().toExternalForm().contains("bad_form")) {
|
||||||
throw new IOException("Login form was incorrectly submitted");
|
throw new IOException("Login form was incorrectly submitted");
|
||||||
@ -285,5 +260,4 @@ public class DeviantartRipper extends AlbumRipper {
|
|||||||
// We are logged in, save the cookies
|
// We are logged in, save the cookies
|
||||||
return resp.cookies();
|
return resp.cookies();
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
@ -3,6 +3,8 @@ package com.rarchives.ripme.ripper.rippers;
|
|||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.net.MalformedURLException;
|
import java.net.MalformedURLException;
|
||||||
import java.net.URL;
|
import java.net.URL;
|
||||||
|
import java.util.ArrayList;
|
||||||
|
import java.util.List;
|
||||||
import java.util.regex.Matcher;
|
import java.util.regex.Matcher;
|
||||||
import java.util.regex.Pattern;
|
import java.util.regex.Pattern;
|
||||||
|
|
||||||
@ -10,14 +12,10 @@ import org.jsoup.nodes.Document;
|
|||||||
import org.jsoup.nodes.Element;
|
import org.jsoup.nodes.Element;
|
||||||
import org.jsoup.select.Elements;
|
import org.jsoup.select.Elements;
|
||||||
|
|
||||||
import com.rarchives.ripme.ripper.AlbumRipper;
|
import com.rarchives.ripme.ripper.AbstractMultiPageRipper;
|
||||||
import com.rarchives.ripme.ui.RipStatusMessage.STATUS;
|
import com.rarchives.ripme.utils.Http;
|
||||||
import com.rarchives.ripme.utils.Utils;
|
|
||||||
|
|
||||||
public class DrawcrowdRipper extends AlbumRipper {
|
public class DrawcrowdRipper extends AbstractMultiPageRipper {
|
||||||
|
|
||||||
private static final String DOMAIN = "drawcrowd.com",
|
|
||||||
HOST = "drawcrowd";
|
|
||||||
|
|
||||||
public DrawcrowdRipper(URL url) throws IOException {
|
public DrawcrowdRipper(URL url) throws IOException {
|
||||||
super(url);
|
super(url);
|
||||||
@ -25,14 +23,11 @@ public class DrawcrowdRipper extends AlbumRipper {
|
|||||||
|
|
||||||
@Override
|
@Override
|
||||||
public String getHost() {
|
public String getHost() {
|
||||||
return HOST;
|
return "drawcrowd";
|
||||||
}
|
}
|
||||||
|
@Override
|
||||||
/**
|
public String getDomain() {
|
||||||
* Reformat given URL into the desired format (all images on single page)
|
return "drawcrowd.com";
|
||||||
*/
|
|
||||||
public URL sanitizeURL(URL url) throws MalformedURLException {
|
|
||||||
return url;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
@ -58,46 +53,39 @@ public class DrawcrowdRipper extends AlbumRipper {
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void rip() throws IOException {
|
public Document getFirstPage() throws IOException {
|
||||||
int index = 0;
|
return Http.url(this.url).get();
|
||||||
sendUpdate(STATUS.LOADING_RESOURCE, this.url.toExternalForm());
|
|
||||||
logger.info("Retrieving " + this.url);
|
|
||||||
Document albumDoc = getDocument(this.url);
|
|
||||||
while (true) {
|
|
||||||
if (isStopped()) {
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
for (Element thumb : albumDoc.select("div.item.asset img")) {
|
|
||||||
String image = thumb.attr("src");
|
|
||||||
image = image
|
|
||||||
.replaceAll("/medium/", "/large/")
|
|
||||||
.replaceAll("/small/", "/large/");
|
|
||||||
index++;
|
|
||||||
String prefix = "";
|
|
||||||
if (Utils.getConfigBoolean("download.save_order", true)) {
|
|
||||||
prefix = String.format("%03d_", index);
|
|
||||||
}
|
|
||||||
addURLToDownload(new URL(image), prefix);
|
|
||||||
}
|
|
||||||
Elements loadMore = albumDoc.select("a#load-more");
|
|
||||||
if (loadMore.size() == 0) {
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
String nextURL = "http://drawcrowd.com" + loadMore.get(0).attr("href");
|
|
||||||
try {
|
|
||||||
Thread.sleep(1000);
|
|
||||||
} catch (InterruptedException e) {
|
|
||||||
logger.error("Interrupted while waiting to load next page", e);
|
|
||||||
throw new IOException(e);
|
|
||||||
}
|
|
||||||
sendUpdate(STATUS.LOADING_RESOURCE, nextURL);
|
|
||||||
albumDoc = getDocument(nextURL);
|
|
||||||
}
|
|
||||||
waitForThreads();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public boolean canRip(URL url) {
|
@Override
|
||||||
return url.getHost().endsWith(DOMAIN);
|
public Document getNextPage(Document doc) throws IOException {
|
||||||
|
Elements loadMore = doc.select("a#load-more");
|
||||||
|
if (loadMore.size() == 0) {
|
||||||
|
throw new IOException("No next page found");
|
||||||
|
}
|
||||||
|
if (!sleep(1000)) {
|
||||||
|
throw new IOException("Interrupted while waiting for next page");
|
||||||
|
}
|
||||||
|
String nextPage = "http://drawcrowd.com" + loadMore.get(0).attr("href");
|
||||||
|
return Http.url(nextPage).get();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public List<String> getURLsFromPage(Document page) {
|
||||||
|
List<String> imageURLs = new ArrayList<String>();
|
||||||
|
for (Element thumb : page.select("div.item.asset img")) {
|
||||||
|
String image = thumb.attr("src");
|
||||||
|
image = image
|
||||||
|
.replaceAll("/medium/", "/large/")
|
||||||
|
.replaceAll("/small/", "/large/");
|
||||||
|
imageURLs.add(image);
|
||||||
|
}
|
||||||
|
return imageURLs;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void downloadURL(URL url, int index) {
|
||||||
|
addURLToDownload(url, getPrefix(index));
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
@ -16,6 +16,7 @@ import org.jsoup.select.Elements;
|
|||||||
import com.rarchives.ripme.ripper.AlbumRipper;
|
import com.rarchives.ripme.ripper.AlbumRipper;
|
||||||
import com.rarchives.ripme.ripper.DownloadThreadPool;
|
import com.rarchives.ripme.ripper.DownloadThreadPool;
|
||||||
import com.rarchives.ripme.ui.RipStatusMessage.STATUS;
|
import com.rarchives.ripme.ui.RipStatusMessage.STATUS;
|
||||||
|
import com.rarchives.ripme.utils.Http;
|
||||||
import com.rarchives.ripme.utils.Utils;
|
import com.rarchives.ripme.utils.Utils;
|
||||||
|
|
||||||
public class EHentaiRipper extends AlbumRipper {
|
public class EHentaiRipper extends AlbumRipper {
|
||||||
@ -57,7 +58,9 @@ public class EHentaiRipper extends AlbumRipper {
|
|||||||
if (albumDoc == null) {
|
if (albumDoc == null) {
|
||||||
sendUpdate(STATUS.LOADING_RESOURCE, url.toString());
|
sendUpdate(STATUS.LOADING_RESOURCE, url.toString());
|
||||||
logger.info("Retrieving " + url);
|
logger.info("Retrieving " + url);
|
||||||
albumDoc = getDocument(url.toExternalForm(), cookies);
|
albumDoc = Http.url(url)
|
||||||
|
.cookies(cookies)
|
||||||
|
.get();
|
||||||
}
|
}
|
||||||
Elements elems = albumDoc.select("#gn");
|
Elements elems = albumDoc.select("#gn");
|
||||||
return HOST + "_" + elems.get(0).text();
|
return HOST + "_" + elems.get(0).text();
|
||||||
@ -96,7 +99,10 @@ public class EHentaiRipper extends AlbumRipper {
|
|||||||
if (albumDoc == null) {
|
if (albumDoc == null) {
|
||||||
logger.info(" Retrieving album page " + nextUrl);
|
logger.info(" Retrieving album page " + nextUrl);
|
||||||
sendUpdate(STATUS.LOADING_RESOURCE, nextUrl);
|
sendUpdate(STATUS.LOADING_RESOURCE, nextUrl);
|
||||||
albumDoc = getDocument(nextUrl, this.url.toExternalForm(), cookies);
|
albumDoc = Http.url(nextUrl)
|
||||||
|
.referrer(this.url)
|
||||||
|
.cookies(cookies)
|
||||||
|
.get();
|
||||||
}
|
}
|
||||||
// Check for rate limiting
|
// Check for rate limiting
|
||||||
if (albumDoc.toString().contains("IP address will be automatically banned")) {
|
if (albumDoc.toString().contains("IP address will be automatically banned")) {
|
||||||
@ -197,8 +203,10 @@ public class EHentaiRipper extends AlbumRipper {
|
|||||||
|
|
||||||
private void fetchImage() {
|
private void fetchImage() {
|
||||||
try {
|
try {
|
||||||
String u = this.url.toExternalForm();
|
Document doc = Http.url(this.url)
|
||||||
Document doc = getDocument(u, u, cookies);
|
.referrer(this.url)
|
||||||
|
.cookies(cookies)
|
||||||
|
.get();
|
||||||
// Check for rate limit
|
// Check for rate limit
|
||||||
if (doc.toString().contains("IP address will be automatically banned")) {
|
if (doc.toString().contains("IP address will be automatically banned")) {
|
||||||
if (this.retries == 0) {
|
if (this.retries == 0) {
|
||||||
|
@ -12,6 +12,7 @@ import org.jsoup.nodes.Element;
|
|||||||
|
|
||||||
import com.rarchives.ripme.ripper.AlbumRipper;
|
import com.rarchives.ripme.ripper.AlbumRipper;
|
||||||
import com.rarchives.ripme.ui.RipStatusMessage.STATUS;
|
import com.rarchives.ripme.ui.RipStatusMessage.STATUS;
|
||||||
|
import com.rarchives.ripme.utils.Http;
|
||||||
import com.rarchives.ripme.utils.Utils;
|
import com.rarchives.ripme.utils.Utils;
|
||||||
|
|
||||||
public class EightmusesRipper extends AlbumRipper {
|
public class EightmusesRipper extends AlbumRipper {
|
||||||
@ -40,7 +41,7 @@ public class EightmusesRipper extends AlbumRipper {
|
|||||||
try {
|
try {
|
||||||
// Attempt to use album title as GID
|
// Attempt to use album title as GID
|
||||||
if (albumDoc == null) {
|
if (albumDoc == null) {
|
||||||
albumDoc = getDocument(url);
|
albumDoc = Http.url(url).get();
|
||||||
}
|
}
|
||||||
Element titleElement = albumDoc.select("meta[name=description]").first();
|
Element titleElement = albumDoc.select("meta[name=description]").first();
|
||||||
String title = titleElement.attr("content");
|
String title = titleElement.attr("content");
|
||||||
@ -63,7 +64,7 @@ public class EightmusesRipper extends AlbumRipper {
|
|||||||
logger.info(" Retrieving " + url);
|
logger.info(" Retrieving " + url);
|
||||||
sendUpdate(STATUS.LOADING_RESOURCE, url);
|
sendUpdate(STATUS.LOADING_RESOURCE, url);
|
||||||
if (albumDoc == null) {
|
if (albumDoc == null) {
|
||||||
albumDoc = getDocument(url);
|
albumDoc = Http.url(url).get();
|
||||||
}
|
}
|
||||||
|
|
||||||
int index = 0; // Both album index and image index
|
int index = 0; // Both album index and image index
|
||||||
|
@ -11,6 +11,7 @@ import org.jsoup.nodes.Element;
|
|||||||
|
|
||||||
import com.rarchives.ripme.ripper.AlbumRipper;
|
import com.rarchives.ripme.ripper.AlbumRipper;
|
||||||
import com.rarchives.ripme.ui.RipStatusMessage.STATUS;
|
import com.rarchives.ripme.ui.RipStatusMessage.STATUS;
|
||||||
|
import com.rarchives.ripme.utils.Http;
|
||||||
import com.rarchives.ripme.utils.Utils;
|
import com.rarchives.ripme.utils.Utils;
|
||||||
|
|
||||||
public class FapprovedRipper extends AlbumRipper {
|
public class FapprovedRipper extends AlbumRipper {
|
||||||
@ -46,7 +47,9 @@ public class FapprovedRipper extends AlbumRipper {
|
|||||||
url = "http://fapproved.com/users/" + user + "/images?page=" + page;
|
url = "http://fapproved.com/users/" + user + "/images?page=" + page;
|
||||||
this.sendUpdate(STATUS.LOADING_RESOURCE, url);
|
this.sendUpdate(STATUS.LOADING_RESOURCE, url);
|
||||||
logger.info(" Retrieving " + url);
|
logger.info(" Retrieving " + url);
|
||||||
Document doc = getDocument(url, true);
|
Document doc = Http.url(url)
|
||||||
|
.ignoreContentType()
|
||||||
|
.get();
|
||||||
for (Element image : doc.select("div.actual-image img")) {
|
for (Element image : doc.select("div.actual-image img")) {
|
||||||
String imageUrl = image.attr("src");
|
String imageUrl = image.attr("src");
|
||||||
if (imageUrl.startsWith("//")) {
|
if (imageUrl.startsWith("//")) {
|
||||||
|
@ -20,6 +20,7 @@ import org.jsoup.select.Elements;
|
|||||||
import com.rarchives.ripme.ripper.AlbumRipper;
|
import com.rarchives.ripme.ripper.AlbumRipper;
|
||||||
import com.rarchives.ripme.ripper.DownloadThreadPool;
|
import com.rarchives.ripme.ripper.DownloadThreadPool;
|
||||||
import com.rarchives.ripme.utils.Base64;
|
import com.rarchives.ripme.utils.Base64;
|
||||||
|
import com.rarchives.ripme.utils.Http;
|
||||||
import com.rarchives.ripme.utils.Utils;
|
import com.rarchives.ripme.utils.Utils;
|
||||||
|
|
||||||
public class FlickrRipper extends AlbumRipper {
|
public class FlickrRipper extends AlbumRipper {
|
||||||
@ -61,7 +62,7 @@ public class FlickrRipper extends AlbumRipper {
|
|||||||
try {
|
try {
|
||||||
// Attempt to use album title as GID
|
// Attempt to use album title as GID
|
||||||
if (albumDoc == null) {
|
if (albumDoc == null) {
|
||||||
albumDoc = getDocument(url);
|
albumDoc = Http.url(url).get();
|
||||||
}
|
}
|
||||||
String user = url.toExternalForm();
|
String user = url.toExternalForm();
|
||||||
user = user.substring(user.indexOf("/photos/") + "/photos/".length());
|
user = user.substring(user.indexOf("/photos/") + "/photos/".length());
|
||||||
@ -124,7 +125,7 @@ public class FlickrRipper extends AlbumRipper {
|
|||||||
}
|
}
|
||||||
logger.info(" Retrieving " + nextURL);
|
logger.info(" Retrieving " + nextURL);
|
||||||
if (albumDoc == null) {
|
if (albumDoc == null) {
|
||||||
albumDoc = getDocument(nextURL);
|
albumDoc = Http.url(nextURL).get();
|
||||||
}
|
}
|
||||||
for (Element thumb : albumDoc.select("a[data-track=photo-click]")) {
|
for (Element thumb : albumDoc.select("a[data-track=photo-click]")) {
|
||||||
String imageTitle = null;
|
String imageTitle = null;
|
||||||
@ -259,7 +260,7 @@ public class FlickrRipper extends AlbumRipper {
|
|||||||
|
|
||||||
private Document getLargestImagePageDocument(URL url) throws IOException {
|
private Document getLargestImagePageDocument(URL url) throws IOException {
|
||||||
// Get current page
|
// Get current page
|
||||||
Document doc = getDocument(url);
|
Document doc = Http.url(url).get();
|
||||||
// Look for larger image page
|
// Look for larger image page
|
||||||
String largestImagePage = this.url.toExternalForm();
|
String largestImagePage = this.url.toExternalForm();
|
||||||
for (Element olSize : doc.select("ol.sizes-list > li > ol > li")) {
|
for (Element olSize : doc.select("ol.sizes-list > li > ol > li")) {
|
||||||
@ -277,7 +278,7 @@ public class FlickrRipper extends AlbumRipper {
|
|||||||
}
|
}
|
||||||
if (!largestImagePage.equals(this.url.toExternalForm())) {
|
if (!largestImagePage.equals(this.url.toExternalForm())) {
|
||||||
// Found larger image page, get it.
|
// Found larger image page, get it.
|
||||||
doc = getDocument(largestImagePage);
|
doc = Http.url(largestImagePage).get();
|
||||||
}
|
}
|
||||||
return doc;
|
return doc;
|
||||||
}
|
}
|
||||||
|
@ -10,13 +10,13 @@ import java.util.regex.Pattern;
|
|||||||
|
|
||||||
import org.jsoup.Connection.Method;
|
import org.jsoup.Connection.Method;
|
||||||
import org.jsoup.Connection.Response;
|
import org.jsoup.Connection.Response;
|
||||||
import org.jsoup.Jsoup;
|
|
||||||
import org.jsoup.nodes.Document;
|
import org.jsoup.nodes.Document;
|
||||||
import org.jsoup.nodes.Element;
|
import org.jsoup.nodes.Element;
|
||||||
import org.jsoup.select.Elements;
|
import org.jsoup.select.Elements;
|
||||||
|
|
||||||
import com.rarchives.ripme.ripper.AlbumRipper;
|
import com.rarchives.ripme.ripper.AlbumRipper;
|
||||||
import com.rarchives.ripme.ui.RipStatusMessage.STATUS;
|
import com.rarchives.ripme.ui.RipStatusMessage.STATUS;
|
||||||
|
import com.rarchives.ripme.utils.Http;
|
||||||
|
|
||||||
public class GifyoRipper extends AlbumRipper {
|
public class GifyoRipper extends AlbumRipper {
|
||||||
|
|
||||||
@ -50,7 +50,9 @@ public class GifyoRipper extends AlbumRipper {
|
|||||||
logger.info(" Retrieving " + this.url + "(page #" + page + ")");
|
logger.info(" Retrieving " + this.url + "(page #" + page + ")");
|
||||||
Response resp = null;
|
Response resp = null;
|
||||||
if (page == 0) {
|
if (page == 0) {
|
||||||
resp = getResponse(this.url, true);
|
resp = Http.url(this.url)
|
||||||
|
.ignoreContentType()
|
||||||
|
.response();
|
||||||
cookies = resp.cookies();
|
cookies = resp.cookies();
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
@ -59,13 +61,12 @@ public class GifyoRipper extends AlbumRipper {
|
|||||||
postData.put("view", "gif");
|
postData.put("view", "gif");
|
||||||
postData.put("layout", "grid");
|
postData.put("layout", "grid");
|
||||||
postData.put("page", Integer.toString(page));
|
postData.put("page", Integer.toString(page));
|
||||||
resp = Jsoup.connect(this.url.toExternalForm())
|
resp = Http.url(this.url)
|
||||||
.ignoreContentType(true)
|
.ignoreContentType()
|
||||||
.userAgent(USER_AGENT)
|
|
||||||
.data(postData)
|
.data(postData)
|
||||||
.cookies(cookies)
|
.cookies(cookies)
|
||||||
.method(Method.POST)
|
.method(Method.POST)
|
||||||
.execute();
|
.response();
|
||||||
cookies.putAll(resp.cookies());
|
cookies.putAll(resp.cookies());
|
||||||
}
|
}
|
||||||
Document doc = resp.parse();
|
Document doc = resp.parse();
|
||||||
|
@ -12,6 +12,7 @@ import org.jsoup.select.Elements;
|
|||||||
|
|
||||||
import com.rarchives.ripme.ripper.AlbumRipper;
|
import com.rarchives.ripme.ripper.AlbumRipper;
|
||||||
import com.rarchives.ripme.ui.RipStatusMessage.STATUS;
|
import com.rarchives.ripme.ui.RipStatusMessage.STATUS;
|
||||||
|
import com.rarchives.ripme.utils.Http;
|
||||||
|
|
||||||
public class GirlsOfDesireRipper extends AlbumRipper {
|
public class GirlsOfDesireRipper extends AlbumRipper {
|
||||||
// All sleep times are in milliseconds
|
// All sleep times are in milliseconds
|
||||||
@ -41,7 +42,7 @@ public class GirlsOfDesireRipper extends AlbumRipper {
|
|||||||
if (albumDoc == null) {
|
if (albumDoc == null) {
|
||||||
logger.info(" Retrieving " + url.toExternalForm());
|
logger.info(" Retrieving " + url.toExternalForm());
|
||||||
sendUpdate(STATUS.LOADING_RESOURCE, url.toString());
|
sendUpdate(STATUS.LOADING_RESOURCE, url.toString());
|
||||||
albumDoc = getDocument(url);
|
albumDoc = Http.url(url).get();
|
||||||
}
|
}
|
||||||
Elements elems = albumDoc.select(".albumName");
|
Elements elems = albumDoc.select(".albumName");
|
||||||
return HOST + "_" + elems.first().text();
|
return HOST + "_" + elems.first().text();
|
||||||
@ -76,7 +77,7 @@ public class GirlsOfDesireRipper extends AlbumRipper {
|
|||||||
if (albumDoc == null) {
|
if (albumDoc == null) {
|
||||||
logger.info(" Retrieving album page " + nextUrl);
|
logger.info(" Retrieving album page " + nextUrl);
|
||||||
sendUpdate(STATUS.LOADING_RESOURCE, nextUrl);
|
sendUpdate(STATUS.LOADING_RESOURCE, nextUrl);
|
||||||
albumDoc = getDocument(nextUrl);
|
albumDoc = Http.url(nextUrl).get();
|
||||||
}
|
}
|
||||||
|
|
||||||
// Find thumbnails
|
// Find thumbnails
|
||||||
|
@ -8,9 +8,9 @@ import java.util.regex.Pattern;
|
|||||||
|
|
||||||
import org.json.JSONArray;
|
import org.json.JSONArray;
|
||||||
import org.json.JSONObject;
|
import org.json.JSONObject;
|
||||||
import org.jsoup.Jsoup;
|
|
||||||
|
|
||||||
import com.rarchives.ripme.ripper.AlbumRipper;
|
import com.rarchives.ripme.ripper.AlbumRipper;
|
||||||
|
import com.rarchives.ripme.utils.Http;
|
||||||
import com.rarchives.ripme.utils.Utils;
|
import com.rarchives.ripme.utils.Utils;
|
||||||
|
|
||||||
public class GonewildRipper extends AlbumRipper {
|
public class GonewildRipper extends AlbumRipper {
|
||||||
@ -49,7 +49,7 @@ public class GonewildRipper extends AlbumRipper {
|
|||||||
+ "?method=get_user"
|
+ "?method=get_user"
|
||||||
+ "&user=" + username
|
+ "&user=" + username
|
||||||
+ "&count=" + count;
|
+ "&count=" + count;
|
||||||
String gwURL, jsonString, imagePath;
|
String gwURL, imagePath;
|
||||||
JSONArray posts, images;
|
JSONArray posts, images;
|
||||||
JSONObject json, post, image;
|
JSONObject json, post, image;
|
||||||
while (true) {
|
while (true) {
|
||||||
@ -57,8 +57,8 @@ public class GonewildRipper extends AlbumRipper {
|
|||||||
gwURL = baseGwURL
|
gwURL = baseGwURL
|
||||||
+ "&start=" + start;
|
+ "&start=" + start;
|
||||||
start += count;
|
start += count;
|
||||||
jsonString = getResponse(gwURL, true).body();
|
json = Http.url(gwURL)
|
||||||
json = new JSONObject(jsonString);
|
.getJSON();
|
||||||
if (json.has("error")) {
|
if (json.has("error")) {
|
||||||
logger.error("Error while retrieving user posts:" + json.getString("error"));
|
logger.error("Error while retrieving user posts:" + json.getString("error"));
|
||||||
break;
|
break;
|
||||||
|
@ -14,6 +14,7 @@ import org.jsoup.select.Elements;
|
|||||||
|
|
||||||
import com.rarchives.ripme.ripper.AlbumRipper;
|
import com.rarchives.ripme.ripper.AlbumRipper;
|
||||||
import com.rarchives.ripme.ui.RipStatusMessage.STATUS;
|
import com.rarchives.ripme.ui.RipStatusMessage.STATUS;
|
||||||
|
import com.rarchives.ripme.utils.Http;
|
||||||
import com.rarchives.ripme.utils.Utils;
|
import com.rarchives.ripme.utils.Utils;
|
||||||
|
|
||||||
public class HentaifoundryRipper extends AlbumRipper {
|
public class HentaifoundryRipper extends AlbumRipper {
|
||||||
@ -40,11 +41,12 @@ public class HentaifoundryRipper extends AlbumRipper {
|
|||||||
int index = 0;
|
int index = 0;
|
||||||
|
|
||||||
// Get cookies
|
// Get cookies
|
||||||
Response resp = getResponse("http://www.hentai-foundry.com/");
|
Response resp = Http.url("http://www.hentai-foundry.com/").response();
|
||||||
Map<String,String> cookies = resp.cookies();
|
Map<String,String> cookies = resp.cookies();
|
||||||
resp = getResponse("http://www.hentai-foundry.com/?enterAgree=1&size=1500",
|
resp = Http.url("http://www.hentai-foundry.com/?enterAgree=1&size=1500")
|
||||||
"http://www.hentai-foundry.com/",
|
.referrer("http://www.hentai-foundry.com/")
|
||||||
cookies);
|
.cookies(cookies)
|
||||||
|
.response();
|
||||||
cookies = resp.cookies();
|
cookies = resp.cookies();
|
||||||
logger.info("cookies: " + cookies);
|
logger.info("cookies: " + cookies);
|
||||||
|
|
||||||
@ -54,7 +56,10 @@ public class HentaifoundryRipper extends AlbumRipper {
|
|||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
sendUpdate(STATUS.LOADING_RESOURCE, nextURL);
|
sendUpdate(STATUS.LOADING_RESOURCE, nextURL);
|
||||||
Document doc = getDocument(nextURL, this.url.toExternalForm(), cookies);
|
Document doc = Http.url(nextURL)
|
||||||
|
.referrer(this.url)
|
||||||
|
.cookies(cookies)
|
||||||
|
.get();
|
||||||
for (Element thumb : doc.select("td > a:first-child")) {
|
for (Element thumb : doc.select("td > a:first-child")) {
|
||||||
if (isStopped()) {
|
if (isStopped()) {
|
||||||
break;
|
break;
|
||||||
|
@ -11,6 +11,7 @@ import org.jsoup.nodes.Element;
|
|||||||
|
|
||||||
import com.rarchives.ripme.ripper.AlbumRipper;
|
import com.rarchives.ripme.ripper.AlbumRipper;
|
||||||
import com.rarchives.ripme.ui.RipStatusMessage.STATUS;
|
import com.rarchives.ripme.ui.RipStatusMessage.STATUS;
|
||||||
|
import com.rarchives.ripme.utils.Http;
|
||||||
import com.rarchives.ripme.utils.Utils;
|
import com.rarchives.ripme.utils.Utils;
|
||||||
|
|
||||||
public class ImagearnRipper extends AlbumRipper {
|
public class ImagearnRipper extends AlbumRipper {
|
||||||
@ -41,7 +42,7 @@ public class ImagearnRipper extends AlbumRipper {
|
|||||||
}
|
}
|
||||||
|
|
||||||
private URL getGalleryFromImage(URL url) throws IOException {
|
private URL getGalleryFromImage(URL url) throws IOException {
|
||||||
Document doc = getDocument(url);
|
Document doc = Http.url(url).get();
|
||||||
for (Element link : doc.select("a[href~=^gallery\\.php.*$]")) {
|
for (Element link : doc.select("a[href~=^gallery\\.php.*$]")) {
|
||||||
logger.info("LINK: " + link.toString());
|
logger.info("LINK: " + link.toString());
|
||||||
if (link.hasAttr("href")
|
if (link.hasAttr("href")
|
||||||
@ -59,7 +60,7 @@ public class ImagearnRipper extends AlbumRipper {
|
|||||||
int index = 0;
|
int index = 0;
|
||||||
logger.info("Retrieving " + this.url.toExternalForm());
|
logger.info("Retrieving " + this.url.toExternalForm());
|
||||||
sendUpdate(STATUS.LOADING_RESOURCE, this.url.toExternalForm());
|
sendUpdate(STATUS.LOADING_RESOURCE, this.url.toExternalForm());
|
||||||
Document doc = getDocument(this.url);
|
Document doc = Http.url(this.url).get();
|
||||||
for (Element thumb : doc.select("img.border")) {
|
for (Element thumb : doc.select("img.border")) {
|
||||||
if (isStopped()) {
|
if (isStopped()) {
|
||||||
break;
|
break;
|
||||||
|
@ -13,6 +13,7 @@ import org.jsoup.select.Elements;
|
|||||||
import com.rarchives.ripme.ripper.AlbumRipper;
|
import com.rarchives.ripme.ripper.AlbumRipper;
|
||||||
import com.rarchives.ripme.ripper.DownloadThreadPool;
|
import com.rarchives.ripme.ripper.DownloadThreadPool;
|
||||||
import com.rarchives.ripme.ui.RipStatusMessage.STATUS;
|
import com.rarchives.ripme.ui.RipStatusMessage.STATUS;
|
||||||
|
import com.rarchives.ripme.utils.Http;
|
||||||
import com.rarchives.ripme.utils.Utils;
|
import com.rarchives.ripme.utils.Utils;
|
||||||
|
|
||||||
public class ImagebamRipper extends AlbumRipper {
|
public class ImagebamRipper extends AlbumRipper {
|
||||||
@ -47,7 +48,7 @@ public class ImagebamRipper extends AlbumRipper {
|
|||||||
if (albumDoc == null) {
|
if (albumDoc == null) {
|
||||||
logger.info(" Retrieving " + url.toExternalForm());
|
logger.info(" Retrieving " + url.toExternalForm());
|
||||||
sendUpdate(STATUS.LOADING_RESOURCE, url.toString());
|
sendUpdate(STATUS.LOADING_RESOURCE, url.toString());
|
||||||
albumDoc = getDocument(url);
|
albumDoc = Http.url(url).get();
|
||||||
}
|
}
|
||||||
Elements elems = albumDoc.select("legend");
|
Elements elems = albumDoc.select("legend");
|
||||||
String title = elems.first().text();
|
String title = elems.first().text();
|
||||||
@ -94,7 +95,9 @@ public class ImagebamRipper extends AlbumRipper {
|
|||||||
if (albumDoc == null) {
|
if (albumDoc == null) {
|
||||||
logger.info(" Retrieving album page " + nextUrl);
|
logger.info(" Retrieving album page " + nextUrl);
|
||||||
sendUpdate(STATUS.LOADING_RESOURCE, nextUrl);
|
sendUpdate(STATUS.LOADING_RESOURCE, nextUrl);
|
||||||
albumDoc = getDocument(nextUrl, this.url.toExternalForm(), null);
|
albumDoc = Http.url(nextUrl)
|
||||||
|
.referrer(this.url)
|
||||||
|
.get();
|
||||||
}
|
}
|
||||||
// Find thumbnails
|
// Find thumbnails
|
||||||
Elements thumbs = albumDoc.select("div > a[target=_blank]:not(.footera)");
|
Elements thumbs = albumDoc.select("div > a[target=_blank]:not(.footera)");
|
||||||
@ -171,7 +174,7 @@ public class ImagebamRipper extends AlbumRipper {
|
|||||||
|
|
||||||
private void fetchImage() {
|
private void fetchImage() {
|
||||||
try {
|
try {
|
||||||
Document doc = getDocument(url);
|
Document doc = Http.url(url).get();
|
||||||
// Find image
|
// Find image
|
||||||
Elements images = doc.select("td > img");
|
Elements images = doc.select("td > img");
|
||||||
if (images.size() == 0) {
|
if (images.size() == 0) {
|
||||||
|
@ -12,6 +12,7 @@ import org.jsoup.nodes.Element;
|
|||||||
|
|
||||||
import com.rarchives.ripme.ripper.AlbumRipper;
|
import com.rarchives.ripme.ripper.AlbumRipper;
|
||||||
import com.rarchives.ripme.ui.RipStatusMessage.STATUS;
|
import com.rarchives.ripme.ui.RipStatusMessage.STATUS;
|
||||||
|
import com.rarchives.ripme.utils.Http;
|
||||||
import com.rarchives.ripme.utils.Utils;
|
import com.rarchives.ripme.utils.Utils;
|
||||||
|
|
||||||
public class ImagefapRipper extends AlbumRipper {
|
public class ImagefapRipper extends AlbumRipper {
|
||||||
@ -45,7 +46,7 @@ public class ImagefapRipper extends AlbumRipper {
|
|||||||
try {
|
try {
|
||||||
// Attempt to use album title as GID
|
// Attempt to use album title as GID
|
||||||
if (albumDoc == null) {
|
if (albumDoc == null) {
|
||||||
albumDoc = getDocument(url);
|
albumDoc = Http.url(url).get();
|
||||||
}
|
}
|
||||||
String title = albumDoc.title();
|
String title = albumDoc.title();
|
||||||
Pattern p = Pattern.compile("^Porn pics of (.*) \\(Page 1\\)$");
|
Pattern p = Pattern.compile("^Porn pics of (.*) \\(Page 1\\)$");
|
||||||
@ -94,7 +95,7 @@ public class ImagefapRipper extends AlbumRipper {
|
|||||||
sendUpdate(STATUS.LOADING_RESOURCE, this.url.toExternalForm());
|
sendUpdate(STATUS.LOADING_RESOURCE, this.url.toExternalForm());
|
||||||
logger.info("Retrieving " + this.url);
|
logger.info("Retrieving " + this.url);
|
||||||
if (albumDoc == null) {
|
if (albumDoc == null) {
|
||||||
albumDoc = getDocument(this.url);
|
albumDoc = Http.url(this.url).get();
|
||||||
}
|
}
|
||||||
while (true) {
|
while (true) {
|
||||||
if (isStopped()) {
|
if (isStopped()) {
|
||||||
|
@ -11,6 +11,7 @@ import org.json.JSONObject;
|
|||||||
|
|
||||||
import com.rarchives.ripme.ripper.AlbumRipper;
|
import com.rarchives.ripme.ripper.AlbumRipper;
|
||||||
import com.rarchives.ripme.ui.RipStatusMessage.STATUS;
|
import com.rarchives.ripme.ui.RipStatusMessage.STATUS;
|
||||||
|
import com.rarchives.ripme.utils.Http;
|
||||||
import com.rarchives.ripme.utils.Utils;
|
import com.rarchives.ripme.utils.Utils;
|
||||||
|
|
||||||
public class ImagestashRipper extends AlbumRipper {
|
public class ImagestashRipper extends AlbumRipper {
|
||||||
@ -45,9 +46,7 @@ public class ImagestashRipper extends AlbumRipper {
|
|||||||
String nextURL = baseURL + "&page=" + page;
|
String nextURL = baseURL + "&page=" + page;
|
||||||
logger.info("[ ] Retrieving " + nextURL);
|
logger.info("[ ] Retrieving " + nextURL);
|
||||||
sendUpdate(STATUS.LOADING_RESOURCE, nextURL);
|
sendUpdate(STATUS.LOADING_RESOURCE, nextURL);
|
||||||
String jsonText = getResponse(nextURL, true).body();
|
JSONObject json = Http.url(nextURL).getJSON();
|
||||||
logger.info(jsonText);
|
|
||||||
JSONObject json = new JSONObject(jsonText);
|
|
||||||
JSONArray images = json.getJSONArray("images");
|
JSONArray images = json.getJSONArray("images");
|
||||||
for (int i = 0; i < images.length(); i++) {
|
for (int i = 0; i < images.length(); i++) {
|
||||||
if (isStopped()) {
|
if (isStopped()) {
|
||||||
|
@ -13,6 +13,7 @@ import org.jsoup.select.Elements;
|
|||||||
import com.rarchives.ripme.ripper.AlbumRipper;
|
import com.rarchives.ripme.ripper.AlbumRipper;
|
||||||
import com.rarchives.ripme.ripper.DownloadThreadPool;
|
import com.rarchives.ripme.ripper.DownloadThreadPool;
|
||||||
import com.rarchives.ripme.ui.RipStatusMessage.STATUS;
|
import com.rarchives.ripme.ui.RipStatusMessage.STATUS;
|
||||||
|
import com.rarchives.ripme.utils.Http;
|
||||||
import com.rarchives.ripme.utils.Utils;
|
import com.rarchives.ripme.utils.Utils;
|
||||||
|
|
||||||
public class ImagevenueRipper extends AlbumRipper {
|
public class ImagevenueRipper extends AlbumRipper {
|
||||||
@ -60,7 +61,7 @@ public class ImagevenueRipper extends AlbumRipper {
|
|||||||
String nextUrl = this.url.toExternalForm();
|
String nextUrl = this.url.toExternalForm();
|
||||||
logger.info(" Retrieving album page " + nextUrl);
|
logger.info(" Retrieving album page " + nextUrl);
|
||||||
sendUpdate(STATUS.LOADING_RESOURCE, nextUrl);
|
sendUpdate(STATUS.LOADING_RESOURCE, nextUrl);
|
||||||
Document albumDoc = getDocument(nextUrl);
|
Document albumDoc = Http.url(nextUrl).get();
|
||||||
// Find thumbnails
|
// Find thumbnails
|
||||||
Elements thumbs = albumDoc.select("a[target=_blank]");
|
Elements thumbs = albumDoc.select("a[target=_blank]");
|
||||||
if (thumbs.size() == 0) {
|
if (thumbs.size() == 0) {
|
||||||
@ -115,7 +116,7 @@ public class ImagevenueRipper extends AlbumRipper {
|
|||||||
private void fetchImage() {
|
private void fetchImage() {
|
||||||
try {
|
try {
|
||||||
sendUpdate(STATUS.LOADING_RESOURCE, this.url.toExternalForm());
|
sendUpdate(STATUS.LOADING_RESOURCE, this.url.toExternalForm());
|
||||||
Document doc = getDocument(this.url);
|
Document doc = Http.url(this.url).get();
|
||||||
// Find image
|
// Find image
|
||||||
Elements images = doc.select("a > img");
|
Elements images = doc.select("a > img");
|
||||||
if (images.size() == 0) {
|
if (images.size() == 0) {
|
||||||
|
@ -12,6 +12,7 @@ import org.jsoup.select.Elements;
|
|||||||
|
|
||||||
import com.rarchives.ripme.ripper.AlbumRipper;
|
import com.rarchives.ripme.ripper.AlbumRipper;
|
||||||
import com.rarchives.ripme.ui.RipStatusMessage.STATUS;
|
import com.rarchives.ripme.ui.RipStatusMessage.STATUS;
|
||||||
|
import com.rarchives.ripme.utils.Http;
|
||||||
import com.rarchives.ripme.utils.Utils;
|
import com.rarchives.ripme.utils.Utils;
|
||||||
|
|
||||||
public class ImgboxRipper extends AlbumRipper {
|
public class ImgboxRipper extends AlbumRipper {
|
||||||
@ -36,7 +37,7 @@ public class ImgboxRipper extends AlbumRipper {
|
|||||||
@Override
|
@Override
|
||||||
public void rip() throws IOException {
|
public void rip() throws IOException {
|
||||||
sendUpdate(STATUS.LOADING_RESOURCE, url.toExternalForm());
|
sendUpdate(STATUS.LOADING_RESOURCE, url.toExternalForm());
|
||||||
Document doc = getDocument(this.url);
|
Document doc = Http.url(this.url).get();
|
||||||
Elements images = doc.select("div.boxed-content > a > img");
|
Elements images = doc.select("div.boxed-content > a > img");
|
||||||
if (images.size() == 0) {
|
if (images.size() == 0) {
|
||||||
logger.error("No images found at " + this.url);
|
logger.error("No images found at " + this.url);
|
||||||
|
@ -19,6 +19,7 @@ import org.jsoup.select.Elements;
|
|||||||
|
|
||||||
import com.rarchives.ripme.ripper.AlbumRipper;
|
import com.rarchives.ripme.ripper.AlbumRipper;
|
||||||
import com.rarchives.ripme.ui.RipStatusMessage.STATUS;
|
import com.rarchives.ripme.ui.RipStatusMessage.STATUS;
|
||||||
|
import com.rarchives.ripme.utils.Http;
|
||||||
import com.rarchives.ripme.utils.Utils;
|
import com.rarchives.ripme.utils.Utils;
|
||||||
|
|
||||||
public class ImgurRipper extends AlbumRipper {
|
public class ImgurRipper extends AlbumRipper {
|
||||||
@ -83,7 +84,7 @@ public class ImgurRipper extends AlbumRipper {
|
|||||||
try {
|
try {
|
||||||
// Attempt to use album title as GID
|
// Attempt to use album title as GID
|
||||||
if (albumDoc == null) {
|
if (albumDoc == null) {
|
||||||
albumDoc = getDocument(url);
|
albumDoc = Http.url(url).get();
|
||||||
}
|
}
|
||||||
String title = albumDoc.title();
|
String title = albumDoc.title();
|
||||||
if (!title.contains(" - Imgur")
|
if (!title.contains(" - Imgur")
|
||||||
@ -259,7 +260,7 @@ public class ImgurRipper extends AlbumRipper {
|
|||||||
private void ripUserAccount(URL url) throws IOException {
|
private void ripUserAccount(URL url) throws IOException {
|
||||||
logger.info("Retrieving " + url);
|
logger.info("Retrieving " + url);
|
||||||
sendUpdate(STATUS.LOADING_RESOURCE, url.toExternalForm());
|
sendUpdate(STATUS.LOADING_RESOURCE, url.toExternalForm());
|
||||||
Document doc = getDocument(url);
|
Document doc = Http.url(url).get();
|
||||||
for (Element album : doc.select("div.cover a")) {
|
for (Element album : doc.select("div.cover a")) {
|
||||||
stopCheck();
|
stopCheck();
|
||||||
if (!album.hasAttr("href")
|
if (!album.hasAttr("href")
|
||||||
@ -289,8 +290,7 @@ public class ImgurRipper extends AlbumRipper {
|
|||||||
try {
|
try {
|
||||||
page++;
|
page++;
|
||||||
String jsonUrlWithParams = jsonUrl + "?sort=0&order=1&album=0&page=" + page + "&perPage=60";
|
String jsonUrlWithParams = jsonUrl + "?sort=0&order=1&album=0&page=" + page + "&perPage=60";
|
||||||
String jsonString = getResponse(jsonUrlWithParams, true).body();
|
JSONObject json = Http.url(jsonUrlWithParams).getJSON();
|
||||||
JSONObject json = new JSONObject(jsonString);
|
|
||||||
JSONObject jsonData = json.getJSONObject("data");
|
JSONObject jsonData = json.getJSONObject("data");
|
||||||
if (jsonData.has("count")) {
|
if (jsonData.has("count")) {
|
||||||
imagesTotal = jsonData.getInt("count");
|
imagesTotal = jsonData.getInt("count");
|
||||||
@ -327,7 +327,7 @@ public class ImgurRipper extends AlbumRipper {
|
|||||||
}
|
}
|
||||||
pageURL += "page/" + page + "/miss?scrolled";
|
pageURL += "page/" + page + "/miss?scrolled";
|
||||||
logger.info(" Retrieving " + pageURL);
|
logger.info(" Retrieving " + pageURL);
|
||||||
Document doc = getDocument(pageURL);
|
Document doc = Http.url(pageURL).get();
|
||||||
Elements imgs = doc.select(".post img");
|
Elements imgs = doc.select(".post img");
|
||||||
for (Element img : imgs) {
|
for (Element img : imgs) {
|
||||||
String image = img.attr("src");
|
String image = img.attr("src");
|
||||||
|
@ -13,6 +13,7 @@ import org.jsoup.nodes.Element;
|
|||||||
|
|
||||||
import com.rarchives.ripme.ripper.AlbumRipper;
|
import com.rarchives.ripme.ripper.AlbumRipper;
|
||||||
import com.rarchives.ripme.ui.RipStatusMessage.STATUS;
|
import com.rarchives.ripme.ui.RipStatusMessage.STATUS;
|
||||||
|
import com.rarchives.ripme.utils.Http;
|
||||||
|
|
||||||
public class InstagramRipper extends AlbumRipper {
|
public class InstagramRipper extends AlbumRipper {
|
||||||
|
|
||||||
@ -62,7 +63,7 @@ public class InstagramRipper extends AlbumRipper {
|
|||||||
}
|
}
|
||||||
|
|
||||||
private URL getUserPageFromImage(URL url) throws IOException {
|
private URL getUserPageFromImage(URL url) throws IOException {
|
||||||
Document doc = getDocument(url);
|
Document doc = Http.url(url).get();
|
||||||
for (Element element : doc.select("meta[property='og:description']")) {
|
for (Element element : doc.select("meta[property='og:description']")) {
|
||||||
String content = element.attr("content");
|
String content = element.attr("content");
|
||||||
if (content.endsWith("'s photo on Instagram")) {
|
if (content.endsWith("'s photo on Instagram")) {
|
||||||
@ -75,7 +76,7 @@ public class InstagramRipper extends AlbumRipper {
|
|||||||
private String getUserID(URL url) throws IOException {
|
private String getUserID(URL url) throws IOException {
|
||||||
logger.info("Retrieving " + url);
|
logger.info("Retrieving " + url);
|
||||||
this.sendUpdate(STATUS.LOADING_RESOURCE, url.toExternalForm());
|
this.sendUpdate(STATUS.LOADING_RESOURCE, url.toExternalForm());
|
||||||
Document doc = getDocument(url);
|
Document doc = Http.url(url).get();
|
||||||
for (Element element : doc.select("input[id=user_public]")) {
|
for (Element element : doc.select("input[id=user_public]")) {
|
||||||
return element.attr("value");
|
return element.attr("value");
|
||||||
}
|
}
|
||||||
@ -91,8 +92,7 @@ public class InstagramRipper extends AlbumRipper {
|
|||||||
String url = baseURL + params;
|
String url = baseURL + params;
|
||||||
this.sendUpdate(STATUS.LOADING_RESOURCE, url);
|
this.sendUpdate(STATUS.LOADING_RESOURCE, url);
|
||||||
logger.info(" Retrieving " + url);
|
logger.info(" Retrieving " + url);
|
||||||
String jsonString = getResponse(url, true).body();
|
JSONObject json = Http.url(url).getJSON();
|
||||||
JSONObject json = new JSONObject(jsonString);
|
|
||||||
JSONArray datas = json.getJSONArray("data");
|
JSONArray datas = json.getJSONArray("data");
|
||||||
String nextMaxID = "";
|
String nextMaxID = "";
|
||||||
if (datas.length() == 0) {
|
if (datas.length() == 0) {
|
||||||
|
@ -11,6 +11,7 @@ import com.rarchives.ripme.ripper.AlbumRipper;
|
|||||||
import com.rarchives.ripme.ripper.rippers.ImgurRipper.ImgurAlbum;
|
import com.rarchives.ripme.ripper.rippers.ImgurRipper.ImgurAlbum;
|
||||||
import com.rarchives.ripme.ripper.rippers.ImgurRipper.ImgurImage;
|
import com.rarchives.ripme.ripper.rippers.ImgurRipper.ImgurImage;
|
||||||
import com.rarchives.ripme.ui.RipStatusMessage.STATUS;
|
import com.rarchives.ripme.ui.RipStatusMessage.STATUS;
|
||||||
|
import com.rarchives.ripme.utils.Http;
|
||||||
import com.rarchives.ripme.utils.Utils;
|
import com.rarchives.ripme.utils.Utils;
|
||||||
|
|
||||||
public class IrarchivesRipper extends AlbumRipper {
|
public class IrarchivesRipper extends AlbumRipper {
|
||||||
@ -22,11 +23,6 @@ public class IrarchivesRipper extends AlbumRipper {
|
|||||||
super(url);
|
super(url);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
|
||||||
public int getTimeout() {
|
|
||||||
return 60 * 1000;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public boolean canRip(URL url) {
|
public boolean canRip(URL url) {
|
||||||
return url.getHost().endsWith(DOMAIN);
|
return url.getHost().endsWith(DOMAIN);
|
||||||
@ -48,8 +44,9 @@ public class IrarchivesRipper extends AlbumRipper {
|
|||||||
@Override
|
@Override
|
||||||
public void rip() throws IOException {
|
public void rip() throws IOException {
|
||||||
logger.info(" Retrieving " + this.url);
|
logger.info(" Retrieving " + this.url);
|
||||||
String jsonString = getResponse(url, true).body();
|
JSONObject json = Http.url(url)
|
||||||
JSONObject json = new JSONObject(jsonString);
|
.timeout(60 * 1000)
|
||||||
|
.getJSON();
|
||||||
JSONArray posts = json.getJSONArray("posts");
|
JSONArray posts = json.getJSONArray("posts");
|
||||||
if (posts.length() == 0) {
|
if (posts.length() == 0) {
|
||||||
logger.error("No posts found at " + this.url);
|
logger.error("No posts found at " + this.url);
|
||||||
|
@ -18,6 +18,7 @@ import org.json.JSONObject;
|
|||||||
|
|
||||||
import com.rarchives.ripme.ripper.AlbumRipper;
|
import com.rarchives.ripme.ripper.AlbumRipper;
|
||||||
import com.rarchives.ripme.ui.RipStatusMessage.STATUS;
|
import com.rarchives.ripme.ui.RipStatusMessage.STATUS;
|
||||||
|
import com.rarchives.ripme.utils.Http;
|
||||||
import com.rarchives.ripme.utils.Utils;
|
import com.rarchives.ripme.utils.Utils;
|
||||||
|
|
||||||
public class MediacrushRipper extends AlbumRipper {
|
public class MediacrushRipper extends AlbumRipper {
|
||||||
@ -67,9 +68,9 @@ public class MediacrushRipper extends AlbumRipper {
|
|||||||
String url = this.url.toExternalForm();
|
String url = this.url.toExternalForm();
|
||||||
logger.info(" Retrieving " + url);
|
logger.info(" Retrieving " + url);
|
||||||
sendUpdate(STATUS.LOADING_RESOURCE, url);
|
sendUpdate(STATUS.LOADING_RESOURCE, url);
|
||||||
String jsonString = null;
|
JSONObject json = null;
|
||||||
try {
|
try {
|
||||||
jsonString = getResponse(url, true).body();
|
json = Http.url(url).getJSON();
|
||||||
} catch (Exception re) {
|
} catch (Exception re) {
|
||||||
// Check for >1024 bit encryption but in older versions of Java
|
// Check for >1024 bit encryption but in older versions of Java
|
||||||
if (re.getCause().getCause() instanceof InvalidAlgorithmParameterException) {
|
if (re.getCause().getCause() instanceof InvalidAlgorithmParameterException) {
|
||||||
@ -96,7 +97,6 @@ public class MediacrushRipper extends AlbumRipper {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Convert to JSON
|
// Convert to JSON
|
||||||
JSONObject json = new JSONObject(jsonString);
|
|
||||||
if (!json.has("files")) {
|
if (!json.has("files")) {
|
||||||
sendUpdate(STATUS.RIP_ERRORED, "No files found at " + url);
|
sendUpdate(STATUS.RIP_ERRORED, "No files found at " + url);
|
||||||
throw new IOException("Could not find any files at " + url);
|
throw new IOException("Could not find any files at " + url);
|
||||||
|
@ -8,11 +8,11 @@ import java.util.regex.Pattern;
|
|||||||
|
|
||||||
import org.json.JSONArray;
|
import org.json.JSONArray;
|
||||||
import org.json.JSONObject;
|
import org.json.JSONObject;
|
||||||
import org.jsoup.Connection.Response;
|
|
||||||
import org.jsoup.nodes.Document;
|
import org.jsoup.nodes.Document;
|
||||||
import org.jsoup.select.Elements;
|
import org.jsoup.select.Elements;
|
||||||
|
|
||||||
import com.rarchives.ripme.ripper.AlbumRipper;
|
import com.rarchives.ripme.ripper.AlbumRipper;
|
||||||
|
import com.rarchives.ripme.utils.Http;
|
||||||
import com.rarchives.ripme.utils.Utils;
|
import com.rarchives.ripme.utils.Utils;
|
||||||
|
|
||||||
public class MinusRipper extends AlbumRipper {
|
public class MinusRipper extends AlbumRipper {
|
||||||
@ -47,7 +47,7 @@ public class MinusRipper extends AlbumRipper {
|
|||||||
try {
|
try {
|
||||||
// Attempt to use album title as GID
|
// Attempt to use album title as GID
|
||||||
if (albumDoc == null) {
|
if (albumDoc == null) {
|
||||||
albumDoc = getDocument(url);
|
albumDoc = Http.url(url).get();
|
||||||
}
|
}
|
||||||
Elements titles = albumDoc.select("meta[property=og:title]");
|
Elements titles = albumDoc.select("meta[property=og:title]");
|
||||||
if (titles.size() > 0) {
|
if (titles.size() > 0) {
|
||||||
@ -129,8 +129,7 @@ public class MinusRipper extends AlbumRipper {
|
|||||||
+ user + "/shares.json/"
|
+ user + "/shares.json/"
|
||||||
+ page;
|
+ page;
|
||||||
logger.info(" Retrieving " + jsonUrl);
|
logger.info(" Retrieving " + jsonUrl);
|
||||||
Response resp = getResponse(jsonUrl, true);
|
JSONObject json = Http.url(jsonUrl).getJSON();
|
||||||
JSONObject json = new JSONObject(resp.body());
|
|
||||||
JSONArray galleries = json.getJSONArray("galleries");
|
JSONArray galleries = json.getJSONArray("galleries");
|
||||||
for (int i = 0; i < galleries.length(); i++) {
|
for (int i = 0; i < galleries.length(); i++) {
|
||||||
JSONObject gallery = galleries.getJSONObject(i);
|
JSONObject gallery = galleries.getJSONObject(i);
|
||||||
@ -151,7 +150,7 @@ public class MinusRipper extends AlbumRipper {
|
|||||||
private void ripAlbum(URL url, String subdir) throws IOException {
|
private void ripAlbum(URL url, String subdir) throws IOException {
|
||||||
logger.info(" Retrieving " + url.toExternalForm());
|
logger.info(" Retrieving " + url.toExternalForm());
|
||||||
if (albumDoc == null || !subdir.equals("")) {
|
if (albumDoc == null || !subdir.equals("")) {
|
||||||
albumDoc = getDocument(url);
|
albumDoc = Http.url(url).get();
|
||||||
}
|
}
|
||||||
Pattern p = Pattern.compile("^.*var gallerydata = (\\{.*\\});.*$", Pattern.DOTALL);
|
Pattern p = Pattern.compile("^.*var gallerydata = (\\{.*\\});.*$", Pattern.DOTALL);
|
||||||
Matcher m = p.matcher(albumDoc.data());
|
Matcher m = p.matcher(albumDoc.data());
|
||||||
|
@ -12,6 +12,7 @@ import org.jsoup.nodes.Element;
|
|||||||
import com.rarchives.ripme.ripper.AlbumRipper;
|
import com.rarchives.ripme.ripper.AlbumRipper;
|
||||||
import com.rarchives.ripme.ripper.DownloadThreadPool;
|
import com.rarchives.ripme.ripper.DownloadThreadPool;
|
||||||
import com.rarchives.ripme.ui.RipStatusMessage.STATUS;
|
import com.rarchives.ripme.ui.RipStatusMessage.STATUS;
|
||||||
|
import com.rarchives.ripme.utils.Http;
|
||||||
import com.rarchives.ripme.utils.Utils;
|
import com.rarchives.ripme.utils.Utils;
|
||||||
|
|
||||||
public class MotherlessRipper extends AlbumRipper {
|
public class MotherlessRipper extends AlbumRipper {
|
||||||
@ -66,7 +67,9 @@ public class MotherlessRipper extends AlbumRipper {
|
|||||||
}
|
}
|
||||||
logger.info("Retrieving " + nextURL);
|
logger.info("Retrieving " + nextURL);
|
||||||
sendUpdate(STATUS.LOADING_RESOURCE, nextURL);
|
sendUpdate(STATUS.LOADING_RESOURCE, nextURL);
|
||||||
Document doc = getDocument(nextURL, "http://motherless.com", null);
|
Document doc = Http.url(nextURL)
|
||||||
|
.referrer("http://motherless.com")
|
||||||
|
.get();
|
||||||
for (Element thumb : doc.select("div.thumb a.img-container")) {
|
for (Element thumb : doc.select("div.thumb a.img-container")) {
|
||||||
if (isStopped()) {
|
if (isStopped()) {
|
||||||
break;
|
break;
|
||||||
@ -118,7 +121,9 @@ public class MotherlessRipper extends AlbumRipper {
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
String u = this.url.toExternalForm();
|
String u = this.url.toExternalForm();
|
||||||
Document doc = getDocument(u, u, null);
|
Document doc = Http.url(u)
|
||||||
|
.referrer(u)
|
||||||
|
.get();
|
||||||
Pattern p = Pattern.compile("^.*__fileurl = '([^']{1,})';.*$", Pattern.DOTALL);
|
Pattern p = Pattern.compile("^.*__fileurl = '([^']{1,})';.*$", Pattern.DOTALL);
|
||||||
Matcher m = p.matcher(doc.outerHtml());
|
Matcher m = p.matcher(doc.outerHtml());
|
||||||
if (m.matches()) {
|
if (m.matches()) {
|
||||||
|
@ -15,6 +15,7 @@ import org.jsoup.select.Elements;
|
|||||||
import com.rarchives.ripme.ripper.AlbumRipper;
|
import com.rarchives.ripme.ripper.AlbumRipper;
|
||||||
import com.rarchives.ripme.ripper.DownloadThreadPool;
|
import com.rarchives.ripme.ripper.DownloadThreadPool;
|
||||||
import com.rarchives.ripme.ui.RipStatusMessage.STATUS;
|
import com.rarchives.ripme.ui.RipStatusMessage.STATUS;
|
||||||
|
import com.rarchives.ripme.utils.Http;
|
||||||
import com.rarchives.ripme.utils.Utils;
|
import com.rarchives.ripme.utils.Utils;
|
||||||
|
|
||||||
public class NfsfwRipper extends AlbumRipper {
|
public class NfsfwRipper extends AlbumRipper {
|
||||||
@ -46,7 +47,7 @@ public class NfsfwRipper extends AlbumRipper {
|
|||||||
try {
|
try {
|
||||||
// Attempt to use album title as GID
|
// Attempt to use album title as GID
|
||||||
if (albumDoc == null) {
|
if (albumDoc == null) {
|
||||||
albumDoc = getDocument(url);
|
albumDoc = Http.url(url).get();
|
||||||
}
|
}
|
||||||
String title = albumDoc.select("h2").first().text().trim();
|
String title = albumDoc.select("h2").first().text().trim();
|
||||||
return "nfsfw_" + Utils.filesystemSafe(title);
|
return "nfsfw_" + Utils.filesystemSafe(title);
|
||||||
@ -87,7 +88,7 @@ public class NfsfwRipper extends AlbumRipper {
|
|||||||
sendUpdate(STATUS.LOADING_RESOURCE, nextURL);
|
sendUpdate(STATUS.LOADING_RESOURCE, nextURL);
|
||||||
logger.info(" Retrieving " + nextURL);
|
logger.info(" Retrieving " + nextURL);
|
||||||
if (albumDoc == null) {
|
if (albumDoc == null) {
|
||||||
albumDoc = getDocument(nextURL);
|
albumDoc = Http.url(nextURL).get();
|
||||||
}
|
}
|
||||||
// Subalbums
|
// Subalbums
|
||||||
for (Element suba : albumDoc.select("td.IMG > a")) {
|
for (Element suba : albumDoc.select("td.IMG > a")) {
|
||||||
@ -156,8 +157,9 @@ public class NfsfwRipper extends AlbumRipper {
|
|||||||
@Override
|
@Override
|
||||||
public void run() {
|
public void run() {
|
||||||
try {
|
try {
|
||||||
String u = this.url.toExternalForm();
|
Document doc = Http.url(this.url)
|
||||||
Document doc = getDocument(u, u, null);
|
.referrer(this.url)
|
||||||
|
.get();
|
||||||
Elements images = doc.select(".gbBlock img");
|
Elements images = doc.select(".gbBlock img");
|
||||||
if (images.size() == 0) {
|
if (images.size() == 0) {
|
||||||
logger.error("Failed to find image at " + this.url);
|
logger.error("Failed to find image at " + this.url);
|
||||||
|
@ -16,6 +16,7 @@ import org.jsoup.nodes.Element;
|
|||||||
import org.jsoup.select.Elements;
|
import org.jsoup.select.Elements;
|
||||||
|
|
||||||
import com.rarchives.ripme.ripper.AlbumRipper;
|
import com.rarchives.ripme.ripper.AlbumRipper;
|
||||||
|
import com.rarchives.ripme.utils.Http;
|
||||||
|
|
||||||
public class PhotobucketRipper extends AlbumRipper {
|
public class PhotobucketRipper extends AlbumRipper {
|
||||||
|
|
||||||
@ -49,7 +50,7 @@ public class PhotobucketRipper extends AlbumRipper {
|
|||||||
try {
|
try {
|
||||||
// Attempt to use album title as GID
|
// Attempt to use album title as GID
|
||||||
if (pageResponse == null) {
|
if (pageResponse == null) {
|
||||||
pageResponse = getResponse(url);
|
pageResponse = Http.url(url).response();
|
||||||
}
|
}
|
||||||
Document albumDoc = pageResponse.parse();
|
Document albumDoc = pageResponse.parse();
|
||||||
Elements els = albumDoc.select("div.libraryTitle > h1");
|
Elements els = albumDoc.select("div.libraryTitle > h1");
|
||||||
@ -131,7 +132,7 @@ public class PhotobucketRipper extends AlbumRipper {
|
|||||||
if (pageIndex > 1 || pageResponse == null) {
|
if (pageIndex > 1 || pageResponse == null) {
|
||||||
url = theUrl + String.format("?sort=3&page=", pageIndex);
|
url = theUrl + String.format("?sort=3&page=", pageIndex);
|
||||||
logger.info(" Retrieving " + url);
|
logger.info(" Retrieving " + url);
|
||||||
pageResponse = getResponse(url);
|
pageResponse = Http.url(url).response();
|
||||||
}
|
}
|
||||||
Document albumDoc = pageResponse.parse();
|
Document albumDoc = pageResponse.parse();
|
||||||
// Retrieve JSON from request
|
// Retrieve JSON from request
|
||||||
@ -189,9 +190,7 @@ public class PhotobucketRipper extends AlbumRipper {
|
|||||||
+ "&json=1";
|
+ "&json=1";
|
||||||
try {
|
try {
|
||||||
logger.info("Loading " + apiUrl);
|
logger.info("Loading " + apiUrl);
|
||||||
Document doc = getDocument(apiUrl, true);
|
JSONObject json = Http.url(apiUrl).getJSON();
|
||||||
String jsonString = doc.body().html().replace(""", "\"");
|
|
||||||
JSONObject json = new JSONObject(jsonString);
|
|
||||||
JSONArray subalbums = json.getJSONObject("body").getJSONArray("subAlbums");
|
JSONArray subalbums = json.getJSONObject("body").getJSONArray("subAlbums");
|
||||||
for (int i = 0; i < subalbums.length(); i++) {
|
for (int i = 0; i < subalbums.length(); i++) {
|
||||||
String suburl =
|
String suburl =
|
||||||
|
@ -7,7 +7,6 @@ import java.net.URL;
|
|||||||
import java.util.regex.Matcher;
|
import java.util.regex.Matcher;
|
||||||
import java.util.regex.Pattern;
|
import java.util.regex.Pattern;
|
||||||
|
|
||||||
import org.jsoup.Jsoup;
|
|
||||||
import org.jsoup.nodes.Document;
|
import org.jsoup.nodes.Document;
|
||||||
import org.jsoup.nodes.Element;
|
import org.jsoup.nodes.Element;
|
||||||
import org.jsoup.select.Elements;
|
import org.jsoup.select.Elements;
|
||||||
@ -15,12 +14,12 @@ import org.jsoup.select.Elements;
|
|||||||
import com.rarchives.ripme.ripper.AlbumRipper;
|
import com.rarchives.ripme.ripper.AlbumRipper;
|
||||||
import com.rarchives.ripme.ripper.DownloadThreadPool;
|
import com.rarchives.ripme.ripper.DownloadThreadPool;
|
||||||
import com.rarchives.ripme.ui.RipStatusMessage.STATUS;
|
import com.rarchives.ripme.ui.RipStatusMessage.STATUS;
|
||||||
|
import com.rarchives.ripme.utils.Http;
|
||||||
import com.rarchives.ripme.utils.Utils;
|
import com.rarchives.ripme.utils.Utils;
|
||||||
|
|
||||||
public class PornhubRipper extends AlbumRipper {
|
public class PornhubRipper extends AlbumRipper {
|
||||||
// All sleep times are in milliseconds
|
// All sleep times are in milliseconds
|
||||||
private static final int IMAGE_SLEEP_TIME = 1 * 1000;
|
private static final int IMAGE_SLEEP_TIME = 1 * 1000;
|
||||||
private static final int TIMEOUT = 5 * 1000;
|
|
||||||
|
|
||||||
private static final String DOMAIN = "pornhub.com", HOST = "Pornhub";
|
private static final String DOMAIN = "pornhub.com", HOST = "Pornhub";
|
||||||
|
|
||||||
@ -49,7 +48,7 @@ public class PornhubRipper extends AlbumRipper {
|
|||||||
if (albumDoc == null) {
|
if (albumDoc == null) {
|
||||||
logger.info(" Retrieving " + url.toExternalForm());
|
logger.info(" Retrieving " + url.toExternalForm());
|
||||||
sendUpdate(STATUS.LOADING_RESOURCE, url.toString());
|
sendUpdate(STATUS.LOADING_RESOURCE, url.toString());
|
||||||
albumDoc = getDocument(url);
|
albumDoc = Http.url(url).get();
|
||||||
}
|
}
|
||||||
Elements elems = albumDoc.select(".photoAlbumTitleV2");
|
Elements elems = albumDoc.select(".photoAlbumTitleV2");
|
||||||
return HOST + "_" + elems.get(0).text();
|
return HOST + "_" + elems.get(0).text();
|
||||||
@ -89,7 +88,9 @@ public class PornhubRipper extends AlbumRipper {
|
|||||||
if (albumDoc == null) {
|
if (albumDoc == null) {
|
||||||
logger.info(" Retrieving album page " + nextUrl);
|
logger.info(" Retrieving album page " + nextUrl);
|
||||||
sendUpdate(STATUS.LOADING_RESOURCE, nextUrl);
|
sendUpdate(STATUS.LOADING_RESOURCE, nextUrl);
|
||||||
albumDoc = getDocument(nextUrl, this.url.toExternalForm(), null);
|
albumDoc = Http.url(nextUrl)
|
||||||
|
.referrer(this.url)
|
||||||
|
.get();
|
||||||
}
|
}
|
||||||
|
|
||||||
// Find thumbnails
|
// Find thumbnails
|
||||||
@ -146,8 +147,9 @@ public class PornhubRipper extends AlbumRipper {
|
|||||||
|
|
||||||
private void fetchImage() {
|
private void fetchImage() {
|
||||||
try {
|
try {
|
||||||
String u = this.url.toExternalForm();
|
Document doc = Http.url(this.url)
|
||||||
Document doc = getDocument(u, u, null);
|
.referrer(this.url)
|
||||||
|
.get();
|
||||||
|
|
||||||
// Find image
|
// Find image
|
||||||
Elements images = doc.select("#photoImageSection img");
|
Elements images = doc.select("#photoImageSection img");
|
||||||
|
@ -11,9 +11,9 @@ import java.util.regex.Pattern;
|
|||||||
import org.json.JSONArray;
|
import org.json.JSONArray;
|
||||||
import org.json.JSONObject;
|
import org.json.JSONObject;
|
||||||
import org.json.JSONTokener;
|
import org.json.JSONTokener;
|
||||||
import org.jsoup.nodes.Document;
|
|
||||||
|
|
||||||
import com.rarchives.ripme.ripper.AlbumRipper;
|
import com.rarchives.ripme.ripper.AlbumRipper;
|
||||||
|
import com.rarchives.ripme.utils.Http;
|
||||||
import com.rarchives.ripme.utils.RipUtils;
|
import com.rarchives.ripme.utils.RipUtils;
|
||||||
import com.rarchives.ripme.utils.Utils;
|
import com.rarchives.ripme.utils.Utils;
|
||||||
|
|
||||||
@ -113,27 +113,25 @@ public class RedditRipper extends AlbumRipper {
|
|||||||
lastRequestTime = System.currentTimeMillis();
|
lastRequestTime = System.currentTimeMillis();
|
||||||
|
|
||||||
int attempts = 0;
|
int attempts = 0;
|
||||||
Document doc = null;
|
|
||||||
logger.info(" Retrieving " + url);
|
logger.info(" Retrieving " + url);
|
||||||
while(doc == null && attempts++ < 3) {
|
JSONObject json = null;
|
||||||
|
while(json == null && attempts++ < 3) {
|
||||||
try {
|
try {
|
||||||
doc = getResponse(url, true).parse();
|
json = Http.url(url).getJSON();
|
||||||
} catch(SocketTimeoutException ex) {
|
} catch(SocketTimeoutException ex) {
|
||||||
if(attempts >= 3) throw ex;
|
if(attempts >= 3) throw ex;
|
||||||
logger.warn(String.format("[!] Connection timed out (attempt %d)", attempts));
|
logger.warn(String.format("[!] Connection timed out (attempt %d)", attempts));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
String jsonString = doc.body().html().replaceAll(""", "\"");
|
Object jsonObj = new JSONTokener(json.toString()).nextValue();
|
||||||
|
|
||||||
Object jsonObj = new JSONTokener(jsonString).nextValue();
|
|
||||||
JSONArray jsonArray = new JSONArray();
|
JSONArray jsonArray = new JSONArray();
|
||||||
if (jsonObj instanceof JSONObject) {
|
if (jsonObj instanceof JSONObject) {
|
||||||
jsonArray.put( (JSONObject) jsonObj);
|
jsonArray.put( (JSONObject) jsonObj);
|
||||||
} else if (jsonObj instanceof JSONArray){
|
} else if (jsonObj instanceof JSONArray){
|
||||||
jsonArray = (JSONArray) jsonObj;
|
jsonArray = (JSONArray) jsonObj;
|
||||||
} else {
|
} else {
|
||||||
logger.warn("[!] Unable to parse child: " + jsonString);
|
logger.warn("[!] Unable to parse child: " + json.toString());
|
||||||
}
|
}
|
||||||
return jsonArray;
|
return jsonArray;
|
||||||
}
|
}
|
||||||
|
@ -7,7 +7,6 @@ import java.util.regex.Matcher;
|
|||||||
import java.util.regex.Pattern;
|
import java.util.regex.Pattern;
|
||||||
|
|
||||||
import org.json.JSONObject;
|
import org.json.JSONObject;
|
||||||
import org.jsoup.Connection.Method;
|
|
||||||
import org.jsoup.Jsoup;
|
import org.jsoup.Jsoup;
|
||||||
import org.jsoup.nodes.Document;
|
import org.jsoup.nodes.Document;
|
||||||
import org.jsoup.nodes.Element;
|
import org.jsoup.nodes.Element;
|
||||||
@ -15,6 +14,7 @@ import org.jsoup.nodes.Element;
|
|||||||
import com.rarchives.ripme.ripper.AlbumRipper;
|
import com.rarchives.ripme.ripper.AlbumRipper;
|
||||||
import com.rarchives.ripme.ripper.DownloadThreadPool;
|
import com.rarchives.ripme.ripper.DownloadThreadPool;
|
||||||
import com.rarchives.ripme.ui.RipStatusMessage.STATUS;
|
import com.rarchives.ripme.ui.RipStatusMessage.STATUS;
|
||||||
|
import com.rarchives.ripme.utils.Http;
|
||||||
|
|
||||||
public class SeeniveRipper extends AlbumRipper {
|
public class SeeniveRipper extends AlbumRipper {
|
||||||
|
|
||||||
@ -42,7 +42,9 @@ public class SeeniveRipper extends AlbumRipper {
|
|||||||
public void rip() throws IOException {
|
public void rip() throws IOException {
|
||||||
String baseURL = this.url.toExternalForm();
|
String baseURL = this.url.toExternalForm();
|
||||||
logger.info(" Retrieving " + baseURL);
|
logger.info(" Retrieving " + baseURL);
|
||||||
Document doc = getDocument(baseURL, baseURL, null);
|
Document doc = Http.url(baseURL)
|
||||||
|
.referrer(baseURL)
|
||||||
|
.get();
|
||||||
while (true) {
|
while (true) {
|
||||||
if (isStopped()) {
|
if (isStopped()) {
|
||||||
break;
|
break;
|
||||||
@ -70,8 +72,9 @@ public class SeeniveRipper extends AlbumRipper {
|
|||||||
}
|
}
|
||||||
|
|
||||||
logger.info("[ ] Retrieving " + baseURL + "/next/" + lastID);
|
logger.info("[ ] Retrieving " + baseURL + "/next/" + lastID);
|
||||||
String jsonString = getResponse(baseURL + "/next/" + lastID, Method.GET, USER_AGENT, baseURL, null, true).body();
|
JSONObject json = Http.url(baseURL + "/next/" + lastID)
|
||||||
JSONObject json = new JSONObject(jsonString);
|
.referrer(baseURL)
|
||||||
|
.getJSON();
|
||||||
String html = json.getString("Html");
|
String html = json.getString("Html");
|
||||||
if (html.equals("")) {
|
if (html.equals("")) {
|
||||||
break;
|
break;
|
||||||
@ -111,7 +114,7 @@ public class SeeniveRipper extends AlbumRipper {
|
|||||||
@Override
|
@Override
|
||||||
public void run() {
|
public void run() {
|
||||||
try {
|
try {
|
||||||
Document doc = getDocument(this.url);
|
Document doc = Http.url(this.url).get();
|
||||||
logger.info("[ ] Retreiving video page " + this.url);
|
logger.info("[ ] Retreiving video page " + this.url);
|
||||||
sendUpdate(STATUS.LOADING_RESOURCE, this.url.toExternalForm());
|
sendUpdate(STATUS.LOADING_RESOURCE, this.url.toExternalForm());
|
||||||
for (Element element : doc.select("source")) {
|
for (Element element : doc.select("source")) {
|
||||||
|
@ -11,6 +11,7 @@ import org.jsoup.nodes.Element;
|
|||||||
|
|
||||||
import com.rarchives.ripme.ripper.AlbumRipper;
|
import com.rarchives.ripme.ripper.AlbumRipper;
|
||||||
import com.rarchives.ripme.ui.RipStatusMessage.STATUS;
|
import com.rarchives.ripme.ui.RipStatusMessage.STATUS;
|
||||||
|
import com.rarchives.ripme.utils.Http;
|
||||||
|
|
||||||
public class SmuttyRipper extends AlbumRipper {
|
public class SmuttyRipper extends AlbumRipper {
|
||||||
|
|
||||||
@ -46,7 +47,9 @@ public class SmuttyRipper extends AlbumRipper {
|
|||||||
logger.info(" Retrieving " + url);
|
logger.info(" Retrieving " + url);
|
||||||
Document doc;
|
Document doc;
|
||||||
try {
|
try {
|
||||||
doc = getResponse(url, true).parse();
|
doc = Http.url(url)
|
||||||
|
.ignoreContentType()
|
||||||
|
.get();
|
||||||
} catch (IOException e) {
|
} catch (IOException e) {
|
||||||
if (e.toString().contains("Status=404")) {
|
if (e.toString().contains("Status=404")) {
|
||||||
logger.info("No more pages to load");
|
logger.info("No more pages to load");
|
||||||
|
@ -13,6 +13,7 @@ import org.jsoup.select.Elements;
|
|||||||
|
|
||||||
import com.rarchives.ripme.ripper.AlbumRipper;
|
import com.rarchives.ripme.ripper.AlbumRipper;
|
||||||
import com.rarchives.ripme.ui.RipStatusMessage.STATUS;
|
import com.rarchives.ripme.ui.RipStatusMessage.STATUS;
|
||||||
|
import com.rarchives.ripme.utils.Http;
|
||||||
|
|
||||||
public class SupertangasRipper extends AlbumRipper {
|
public class SupertangasRipper extends AlbumRipper {
|
||||||
|
|
||||||
@ -47,7 +48,7 @@ public class SupertangasRipper extends AlbumRipper {
|
|||||||
try {
|
try {
|
||||||
logger.info(" Retrieving " + theURL);
|
logger.info(" Retrieving " + theURL);
|
||||||
sendUpdate(STATUS.LOADING_RESOURCE, theURL);
|
sendUpdate(STATUS.LOADING_RESOURCE, theURL);
|
||||||
doc = getDocument(theURL);
|
doc = Http.url(theURL).get();
|
||||||
} catch (HttpStatusException e) {
|
} catch (HttpStatusException e) {
|
||||||
logger.debug("Hit end of pages at page " + page, e);
|
logger.debug("Hit end of pages at page " + page, e);
|
||||||
break;
|
break;
|
||||||
|
@ -12,6 +12,7 @@ import org.jsoup.select.Elements;
|
|||||||
|
|
||||||
import com.rarchives.ripme.ripper.AlbumRipper;
|
import com.rarchives.ripme.ripper.AlbumRipper;
|
||||||
import com.rarchives.ripme.ui.RipStatusMessage.STATUS;
|
import com.rarchives.ripme.ui.RipStatusMessage.STATUS;
|
||||||
|
import com.rarchives.ripme.utils.Http;
|
||||||
import com.rarchives.ripme.utils.Utils;
|
import com.rarchives.ripme.utils.Utils;
|
||||||
|
|
||||||
public class TeenplanetRipper extends AlbumRipper {
|
public class TeenplanetRipper extends AlbumRipper {
|
||||||
@ -38,7 +39,7 @@ public class TeenplanetRipper extends AlbumRipper {
|
|||||||
try {
|
try {
|
||||||
// Attempt to use album title as GID
|
// Attempt to use album title as GID
|
||||||
if (albumDoc == null) {
|
if (albumDoc == null) {
|
||||||
albumDoc = getDocument(url);
|
albumDoc = Http.url(url).get();
|
||||||
}
|
}
|
||||||
Elements elems = albumDoc.select("div.header > h2");
|
Elements elems = albumDoc.select("div.header > h2");
|
||||||
return HOST + "_" + elems.get(0).text();
|
return HOST + "_" + elems.get(0).text();
|
||||||
@ -71,7 +72,7 @@ public class TeenplanetRipper extends AlbumRipper {
|
|||||||
logger.info("Retrieving " + this.url);
|
logger.info("Retrieving " + this.url);
|
||||||
sendUpdate(STATUS.LOADING_RESOURCE, this.url.toExternalForm());
|
sendUpdate(STATUS.LOADING_RESOURCE, this.url.toExternalForm());
|
||||||
if (albumDoc == null) {
|
if (albumDoc == null) {
|
||||||
albumDoc = getDocument(url);
|
albumDoc = Http.url(url).get();
|
||||||
}
|
}
|
||||||
for (Element thumb : albumDoc.select("#galleryImages > a > img")) {
|
for (Element thumb : albumDoc.select("#galleryImages > a > img")) {
|
||||||
if (!thumb.hasAttr("src")) {
|
if (!thumb.hasAttr("src")) {
|
||||||
|
@ -9,10 +9,10 @@ import java.util.regex.Pattern;
|
|||||||
import org.apache.commons.lang.StringUtils;
|
import org.apache.commons.lang.StringUtils;
|
||||||
import org.json.JSONArray;
|
import org.json.JSONArray;
|
||||||
import org.json.JSONObject;
|
import org.json.JSONObject;
|
||||||
import org.jsoup.nodes.Document;
|
|
||||||
|
|
||||||
import com.rarchives.ripme.ripper.AlbumRipper;
|
import com.rarchives.ripme.ripper.AlbumRipper;
|
||||||
import com.rarchives.ripme.ui.RipStatusMessage.STATUS;
|
import com.rarchives.ripme.ui.RipStatusMessage.STATUS;
|
||||||
|
import com.rarchives.ripme.utils.Http;
|
||||||
import com.rarchives.ripme.utils.Utils;
|
import com.rarchives.ripme.utils.Utils;
|
||||||
|
|
||||||
public class TumblrRipper extends AlbumRipper {
|
public class TumblrRipper extends AlbumRipper {
|
||||||
@ -66,9 +66,8 @@ public class TumblrRipper extends AlbumRipper {
|
|||||||
checkURL += url.getHost();
|
checkURL += url.getHost();
|
||||||
checkURL += "/info?api_key=" + API_KEY;
|
checkURL += "/info?api_key=" + API_KEY;
|
||||||
try {
|
try {
|
||||||
Document doc = getResponse(checkURL, true).parse();
|
JSONObject json = Http.url(checkURL)
|
||||||
String jsonString = doc.body().html().replaceAll(""", "\"");
|
.getJSON();
|
||||||
JSONObject json = new JSONObject(jsonString);
|
|
||||||
int status = json.getJSONObject("meta").getInt("status");
|
int status = json.getJSONObject("meta").getInt("status");
|
||||||
return status == 200;
|
return status == 200;
|
||||||
} catch (IOException e) {
|
} catch (IOException e) {
|
||||||
@ -98,15 +97,14 @@ public class TumblrRipper extends AlbumRipper {
|
|||||||
String apiURL = getTumblrApiURL(mediaType, offset);
|
String apiURL = getTumblrApiURL(mediaType, offset);
|
||||||
logger.info("Retrieving " + apiURL);
|
logger.info("Retrieving " + apiURL);
|
||||||
sendUpdate(STATUS.LOADING_RESOURCE, apiURL);
|
sendUpdate(STATUS.LOADING_RESOURCE, apiURL);
|
||||||
Document doc = getResponse(apiURL, true).parse();
|
JSONObject json = Http.url(apiURL).getJSON();
|
||||||
try {
|
try {
|
||||||
Thread.sleep(1000);
|
Thread.sleep(1000);
|
||||||
} catch (InterruptedException e) {
|
} catch (InterruptedException e) {
|
||||||
logger.error("[!] Interrupted while waiting to load next album:", e);
|
logger.error("[!] Interrupted while waiting to load next album:", e);
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
String jsonString = doc.body().html().replaceAll(""", "\"");
|
if (!handleJSON(json)) {
|
||||||
if (!handleJSON(jsonString)) {
|
|
||||||
// Returns false if an error occurs and we should stop.
|
// Returns false if an error occurs and we should stop.
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
@ -119,12 +117,7 @@ public class TumblrRipper extends AlbumRipper {
|
|||||||
waitForThreads();
|
waitForThreads();
|
||||||
}
|
}
|
||||||
|
|
||||||
private boolean handleJSON(String jsonString) {
|
private boolean handleJSON(JSONObject json) {
|
||||||
JSONObject json = new JSONObject(jsonString);
|
|
||||||
if (json == null || !json.has("response")) {
|
|
||||||
logger.error("[!] JSON response from tumblr was invalid: " + jsonString);
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
JSONArray posts, photos;
|
JSONArray posts, photos;
|
||||||
JSONObject post, photo;
|
JSONObject post, photo;
|
||||||
URL fileURL;
|
URL fileURL;
|
||||||
|
@ -12,10 +12,10 @@ import org.json.JSONArray;
|
|||||||
import org.json.JSONException;
|
import org.json.JSONException;
|
||||||
import org.json.JSONObject;
|
import org.json.JSONObject;
|
||||||
import org.json.JSONTokener;
|
import org.json.JSONTokener;
|
||||||
import org.jsoup.Jsoup;
|
|
||||||
import org.jsoup.nodes.Document;
|
import org.jsoup.nodes.Document;
|
||||||
|
|
||||||
import com.rarchives.ripme.ripper.AlbumRipper;
|
import com.rarchives.ripme.ripper.AlbumRipper;
|
||||||
|
import com.rarchives.ripme.utils.Http;
|
||||||
import com.rarchives.ripme.utils.Utils;
|
import com.rarchives.ripme.utils.Utils;
|
||||||
|
|
||||||
public class TwitterRipper extends AlbumRipper {
|
public class TwitterRipper extends AlbumRipper {
|
||||||
@ -71,13 +71,13 @@ public class TwitterRipper extends AlbumRipper {
|
|||||||
}
|
}
|
||||||
|
|
||||||
private void getAccessToken() throws IOException {
|
private void getAccessToken() throws IOException {
|
||||||
Document doc = Jsoup.connect("https://api.twitter.com/oauth2/token")
|
Document doc = Http.url("https://api.twitter.com/oauth2/token")
|
||||||
.ignoreContentType(true)
|
.ignoreContentType()
|
||||||
.header("Authorization", "Basic " + authKey)
|
.header("Authorization", "Basic " + authKey)
|
||||||
.header("Content-Type", "application/x-www-form-urlencoded;charset=UTF-8")
|
.header("Content-Type", "application/x-www-form-urlencoded;charset=UTF-8")
|
||||||
.header("User-agent", "ripe and zipe")
|
.header("User-agent", "ripe and zipe")
|
||||||
.data("grant_type", "client_credentials")
|
.data("grant_type", "client_credentials")
|
||||||
.post();
|
.post();
|
||||||
String body = doc.body().html().replaceAll(""", "\"");
|
String body = doc.body().html().replaceAll(""", "\"");
|
||||||
try {
|
try {
|
||||||
JSONObject json = new JSONObject(body);
|
JSONObject json = new JSONObject(body);
|
||||||
@ -90,8 +90,8 @@ public class TwitterRipper extends AlbumRipper {
|
|||||||
}
|
}
|
||||||
|
|
||||||
private void checkRateLimits(String resource, String api) throws IOException {
|
private void checkRateLimits(String resource, String api) throws IOException {
|
||||||
Document doc = Jsoup.connect("https://api.twitter.com/1.1/application/rate_limit_status.json?resources=" + resource)
|
Document doc = Http.url("https://api.twitter.com/1.1/application/rate_limit_status.json?resources=" + resource)
|
||||||
.ignoreContentType(true)
|
.ignoreContentType()
|
||||||
.header("Authorization", "Bearer " + accessToken)
|
.header("Authorization", "Bearer " + accessToken)
|
||||||
.header("Content-Type", "application/x-www-form-urlencoded;charset=UTF-8")
|
.header("Content-Type", "application/x-www-form-urlencoded;charset=UTF-8")
|
||||||
.header("User-agent", "ripe and zipe")
|
.header("User-agent", "ripe and zipe")
|
||||||
@ -143,12 +143,12 @@ public class TwitterRipper extends AlbumRipper {
|
|||||||
private List<JSONObject> getTweets(String url) throws IOException {
|
private List<JSONObject> getTweets(String url) throws IOException {
|
||||||
List<JSONObject> tweets = new ArrayList<JSONObject>();
|
List<JSONObject> tweets = new ArrayList<JSONObject>();
|
||||||
logger.info(" Retrieving " + url);
|
logger.info(" Retrieving " + url);
|
||||||
Document doc = Jsoup.connect(url)
|
Document doc = Http.url(url)
|
||||||
.ignoreContentType(true)
|
.ignoreContentType()
|
||||||
.header("Authorization", "Bearer " + accessToken)
|
.header("Authorization", "Bearer " + accessToken)
|
||||||
.header("Content-Type", "application/x-www-form-urlencoded;charset=UTF-8")
|
.header("Content-Type", "application/x-www-form-urlencoded;charset=UTF-8")
|
||||||
.header("User-agent", "ripe and zipe")
|
.header("User-agent", "ripe and zipe")
|
||||||
.get();
|
.get();
|
||||||
String body = doc.body().html().replaceAll(""", "\"");
|
String body = doc.body().html().replaceAll(""", "\"");
|
||||||
Object jsonObj = new JSONTokener(body).nextValue();
|
Object jsonObj = new JSONTokener(body).nextValue();
|
||||||
JSONArray statuses;
|
JSONArray statuses;
|
||||||
|
@ -11,6 +11,7 @@ import org.jsoup.select.Elements;
|
|||||||
|
|
||||||
import com.rarchives.ripme.ripper.AlbumRipper;
|
import com.rarchives.ripme.ripper.AlbumRipper;
|
||||||
import com.rarchives.ripme.ui.RipStatusMessage.STATUS;
|
import com.rarchives.ripme.ui.RipStatusMessage.STATUS;
|
||||||
|
import com.rarchives.ripme.utils.Http;
|
||||||
import com.rarchives.ripme.utils.Utils;
|
import com.rarchives.ripme.utils.Utils;
|
||||||
|
|
||||||
public class VidbleRipper extends AlbumRipper {
|
public class VidbleRipper extends AlbumRipper {
|
||||||
@ -49,7 +50,7 @@ public class VidbleRipper extends AlbumRipper {
|
|||||||
logger.info("Retrieving " + this.url);
|
logger.info("Retrieving " + this.url);
|
||||||
sendUpdate(STATUS.LOADING_RESOURCE, this.url.toExternalForm());
|
sendUpdate(STATUS.LOADING_RESOURCE, this.url.toExternalForm());
|
||||||
if (albumDoc == null) {
|
if (albumDoc == null) {
|
||||||
albumDoc = getDocument(this.url);
|
albumDoc = Http.url(this.url).get();
|
||||||
}
|
}
|
||||||
Elements els = albumDoc.select("#ContentPlaceHolder1_thumbs");
|
Elements els = albumDoc.select("#ContentPlaceHolder1_thumbs");
|
||||||
if (els.size() == 0) {
|
if (els.size() == 0) {
|
||||||
|
@ -9,10 +9,10 @@ import java.util.regex.Pattern;
|
|||||||
import org.json.JSONArray;
|
import org.json.JSONArray;
|
||||||
import org.json.JSONObject;
|
import org.json.JSONObject;
|
||||||
import org.jsoup.HttpStatusException;
|
import org.jsoup.HttpStatusException;
|
||||||
import org.jsoup.nodes.Document;
|
|
||||||
|
|
||||||
import com.rarchives.ripme.ripper.AlbumRipper;
|
import com.rarchives.ripme.ripper.AlbumRipper;
|
||||||
import com.rarchives.ripme.ui.RipStatusMessage.STATUS;
|
import com.rarchives.ripme.ui.RipStatusMessage.STATUS;
|
||||||
|
import com.rarchives.ripme.utils.Http;
|
||||||
|
|
||||||
public class VineRipper extends AlbumRipper {
|
public class VineRipper extends AlbumRipper {
|
||||||
|
|
||||||
@ -37,7 +37,7 @@ public class VineRipper extends AlbumRipper {
|
|||||||
public void rip() throws IOException {
|
public void rip() throws IOException {
|
||||||
int page = 0;
|
int page = 0;
|
||||||
String baseURL = "https://vine.co/api/timelines/users/" + getGID(this.url);
|
String baseURL = "https://vine.co/api/timelines/users/" + getGID(this.url);
|
||||||
Document doc;
|
JSONObject json = null;
|
||||||
while (true) {
|
while (true) {
|
||||||
page++;
|
page++;
|
||||||
String theURL = baseURL;
|
String theURL = baseURL;
|
||||||
@ -47,14 +47,11 @@ public class VineRipper extends AlbumRipper {
|
|||||||
try {
|
try {
|
||||||
logger.info(" Retrieving " + theURL);
|
logger.info(" Retrieving " + theURL);
|
||||||
sendUpdate(STATUS.LOADING_RESOURCE, theURL);
|
sendUpdate(STATUS.LOADING_RESOURCE, theURL);
|
||||||
doc = getResponse(theURL, true).parse();
|
json = Http.url(theURL).getJSON();
|
||||||
} catch (HttpStatusException e) {
|
} catch (HttpStatusException e) {
|
||||||
logger.debug("Hit end of pages at page " + page, e);
|
logger.debug("Hit end of pages at page " + page, e);
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
String jsonString = doc.body().html();
|
|
||||||
jsonString = jsonString.replace(""", "\"");
|
|
||||||
JSONObject json = new JSONObject(jsonString);
|
|
||||||
JSONArray records = json.getJSONObject("data").getJSONArray("records");
|
JSONArray records = json.getJSONObject("data").getJSONArray("records");
|
||||||
for (int i = 0; i < records.length(); i++) {
|
for (int i = 0; i < records.length(); i++) {
|
||||||
String videoURL = records.getJSONObject(i).getString("videoUrl");
|
String videoURL = records.getJSONObject(i).getString("videoUrl");
|
||||||
|
@ -12,6 +12,7 @@ import org.jsoup.nodes.Element;
|
|||||||
|
|
||||||
import com.rarchives.ripme.ripper.AlbumRipper;
|
import com.rarchives.ripme.ripper.AlbumRipper;
|
||||||
import com.rarchives.ripme.ui.RipStatusMessage.STATUS;
|
import com.rarchives.ripme.ui.RipStatusMessage.STATUS;
|
||||||
|
import com.rarchives.ripme.utils.Http;
|
||||||
|
|
||||||
public class VineboxRipper extends AlbumRipper {
|
public class VineboxRipper extends AlbumRipper {
|
||||||
|
|
||||||
@ -42,7 +43,7 @@ public class VineboxRipper extends AlbumRipper {
|
|||||||
logger.info("Retrieving " + urlPaged);
|
logger.info("Retrieving " + urlPaged);
|
||||||
sendUpdate(STATUS.LOADING_RESOURCE, urlPaged);
|
sendUpdate(STATUS.LOADING_RESOURCE, urlPaged);
|
||||||
try {
|
try {
|
||||||
doc = getDocument(this.url);;
|
doc = Http.url(this.url).get();
|
||||||
} catch (HttpStatusException e) {
|
} catch (HttpStatusException e) {
|
||||||
logger.debug("Hit end of pages at page " + page, e);
|
logger.debug("Hit end of pages at page " + page, e);
|
||||||
break;
|
break;
|
||||||
|
@ -18,6 +18,7 @@ import org.jsoup.nodes.Document;
|
|||||||
import org.jsoup.nodes.Element;
|
import org.jsoup.nodes.Element;
|
||||||
|
|
||||||
import com.rarchives.ripme.ripper.AlbumRipper;
|
import com.rarchives.ripme.ripper.AlbumRipper;
|
||||||
|
import com.rarchives.ripme.utils.Http;
|
||||||
import com.rarchives.ripme.utils.Utils;
|
import com.rarchives.ripme.utils.Utils;
|
||||||
|
|
||||||
public class VkRipper extends AlbumRipper {
|
public class VkRipper extends AlbumRipper {
|
||||||
@ -66,13 +67,11 @@ public class VkRipper extends AlbumRipper {
|
|||||||
postData.put("act", "load_videos_silent");
|
postData.put("act", "load_videos_silent");
|
||||||
postData.put("offset", "0");
|
postData.put("offset", "0");
|
||||||
postData.put("oid", oid);
|
postData.put("oid", oid);
|
||||||
Document doc = Jsoup.connect(u)
|
Document doc = Http.url(u)
|
||||||
.header("Referer", this.url.toExternalForm())
|
.referrer(this.url)
|
||||||
.ignoreContentType(true)
|
.ignoreContentType()
|
||||||
.userAgent(USER_AGENT)
|
.data(postData)
|
||||||
.timeout(5000)
|
.post();
|
||||||
.data(postData)
|
|
||||||
.post();
|
|
||||||
String[] jsonStrings = doc.toString().split("<!>");
|
String[] jsonStrings = doc.toString().split("<!>");
|
||||||
JSONObject json = new JSONObject(jsonStrings[jsonStrings.length - 1]);
|
JSONObject json = new JSONObject(jsonStrings[jsonStrings.length - 1]);
|
||||||
JSONArray videos = json.getJSONArray("all");
|
JSONArray videos = json.getJSONArray("all");
|
||||||
@ -108,13 +107,11 @@ public class VkRipper extends AlbumRipper {
|
|||||||
postData.put("al", "1");
|
postData.put("al", "1");
|
||||||
postData.put("offset", Integer.toString(offset));
|
postData.put("offset", Integer.toString(offset));
|
||||||
postData.put("part", "1");
|
postData.put("part", "1");
|
||||||
Document doc = Jsoup.connect(this.url.toExternalForm())
|
Document doc = Http.url(this.url)
|
||||||
.header("Referer", this.url.toExternalForm())
|
.referrer(this.url)
|
||||||
.ignoreContentType(true)
|
.ignoreContentType()
|
||||||
.userAgent(USER_AGENT)
|
.data(postData)
|
||||||
.timeout(5000)
|
.post();
|
||||||
.data(postData)
|
|
||||||
.post();
|
|
||||||
|
|
||||||
String body = doc.toString();
|
String body = doc.toString();
|
||||||
if (!body.contains("<div")) {
|
if (!body.contains("<div")) {
|
||||||
|
@ -10,6 +10,7 @@ import org.jsoup.nodes.Document;
|
|||||||
import org.jsoup.nodes.Element;
|
import org.jsoup.nodes.Element;
|
||||||
|
|
||||||
import com.rarchives.ripme.ripper.AlbumRipper;
|
import com.rarchives.ripme.ripper.AlbumRipper;
|
||||||
|
import com.rarchives.ripme.utils.Http;
|
||||||
import com.rarchives.ripme.utils.Utils;
|
import com.rarchives.ripme.utils.Utils;
|
||||||
|
|
||||||
public class XhamsterRipper extends AlbumRipper {
|
public class XhamsterRipper extends AlbumRipper {
|
||||||
@ -37,7 +38,7 @@ public class XhamsterRipper extends AlbumRipper {
|
|||||||
String nextURL = this.url.toExternalForm();
|
String nextURL = this.url.toExternalForm();
|
||||||
while (nextURL != null) {
|
while (nextURL != null) {
|
||||||
logger.info(" Retrieving " + nextURL);
|
logger.info(" Retrieving " + nextURL);
|
||||||
Document doc = getDocument(nextURL);
|
Document doc = Http.url(nextURL).get();
|
||||||
for (Element thumb : doc.select("table.iListing div.img img")) {
|
for (Element thumb : doc.select("table.iListing div.img img")) {
|
||||||
if (!thumb.hasAttr("src")) {
|
if (!thumb.hasAttr("src")) {
|
||||||
continue;
|
continue;
|
||||||
|
@ -11,6 +11,7 @@ import org.json.JSONObject;
|
|||||||
import org.jsoup.nodes.Document;
|
import org.jsoup.nodes.Document;
|
||||||
|
|
||||||
import com.rarchives.ripme.ripper.VideoRipper;
|
import com.rarchives.ripme.ripper.VideoRipper;
|
||||||
|
import com.rarchives.ripme.utils.Http;
|
||||||
|
|
||||||
public class BeegRipper extends VideoRipper {
|
public class BeegRipper extends VideoRipper {
|
||||||
|
|
||||||
@ -54,7 +55,7 @@ public class BeegRipper extends VideoRipper {
|
|||||||
@Override
|
@Override
|
||||||
public void rip() throws IOException {
|
public void rip() throws IOException {
|
||||||
logger.info(" Retrieving " + this.url);
|
logger.info(" Retrieving " + this.url);
|
||||||
Document doc = getDocument(this.url);
|
Document doc = Http.url(this.url).get();
|
||||||
Pattern p = Pattern.compile("^.*var qualityArr = (.*});.*$", Pattern.DOTALL);
|
Pattern p = Pattern.compile("^.*var qualityArr = (.*});.*$", Pattern.DOTALL);
|
||||||
Matcher m = p.matcher(doc.html());
|
Matcher m = p.matcher(doc.html());
|
||||||
if (m.matches()) {
|
if (m.matches()) {
|
||||||
|
@ -10,6 +10,7 @@ import org.jsoup.nodes.Document;
|
|||||||
import org.jsoup.select.Elements;
|
import org.jsoup.select.Elements;
|
||||||
|
|
||||||
import com.rarchives.ripme.ripper.VideoRipper;
|
import com.rarchives.ripme.ripper.VideoRipper;
|
||||||
|
import com.rarchives.ripme.utils.Http;
|
||||||
|
|
||||||
public class GfycatRipper extends VideoRipper {
|
public class GfycatRipper extends VideoRipper {
|
||||||
|
|
||||||
@ -51,7 +52,7 @@ public class GfycatRipper extends VideoRipper {
|
|||||||
@Override
|
@Override
|
||||||
public void rip() throws IOException {
|
public void rip() throws IOException {
|
||||||
logger.info(" Retrieving " + this.url.toExternalForm());
|
logger.info(" Retrieving " + this.url.toExternalForm());
|
||||||
Document doc = getDocument(this.url);
|
Document doc = Http.url(this.url).get();
|
||||||
Elements videos = doc.select("source#mp4source");
|
Elements videos = doc.select("source#mp4source");
|
||||||
if (videos.size() == 0) {
|
if (videos.size() == 0) {
|
||||||
throw new IOException("Could not find source#mp4source at " + url);
|
throw new IOException("Could not find source#mp4source at " + url);
|
||||||
|
@ -9,11 +9,11 @@ import java.util.regex.Pattern;
|
|||||||
|
|
||||||
import org.json.JSONException;
|
import org.json.JSONException;
|
||||||
import org.json.JSONObject;
|
import org.json.JSONObject;
|
||||||
import org.jsoup.Jsoup;
|
|
||||||
import org.jsoup.nodes.Document;
|
import org.jsoup.nodes.Document;
|
||||||
|
|
||||||
import com.rarchives.ripme.ripper.VideoRipper;
|
import com.rarchives.ripme.ripper.VideoRipper;
|
||||||
import com.rarchives.ripme.utils.AES;
|
import com.rarchives.ripme.utils.AES;
|
||||||
|
import com.rarchives.ripme.utils.Http;
|
||||||
|
|
||||||
public class PornhubRipper extends VideoRipper {
|
public class PornhubRipper extends VideoRipper {
|
||||||
|
|
||||||
@ -57,7 +57,7 @@ public class PornhubRipper extends VideoRipper {
|
|||||||
@Override
|
@Override
|
||||||
public void rip() throws IOException {
|
public void rip() throws IOException {
|
||||||
logger.info(" Retrieving " + this.url.toExternalForm());
|
logger.info(" Retrieving " + this.url.toExternalForm());
|
||||||
Document doc = getDocument(this.url);
|
Document doc = Http.url(this.url).get();
|
||||||
Pattern p = Pattern.compile("^.*var flashvars = (.*});.*$", Pattern.DOTALL);
|
Pattern p = Pattern.compile("^.*var flashvars = (.*});.*$", Pattern.DOTALL);
|
||||||
Matcher m = p.matcher(doc.body().html());
|
Matcher m = p.matcher(doc.body().html());
|
||||||
if (m.matches()) {
|
if (m.matches()) {
|
||||||
|
@ -10,6 +10,7 @@ import org.jsoup.nodes.Document;
|
|||||||
import org.jsoup.select.Elements;
|
import org.jsoup.select.Elements;
|
||||||
|
|
||||||
import com.rarchives.ripme.ripper.VideoRipper;
|
import com.rarchives.ripme.ripper.VideoRipper;
|
||||||
|
import com.rarchives.ripme.utils.Http;
|
||||||
|
|
||||||
public class ViddmeRipper extends VideoRipper {
|
public class ViddmeRipper extends VideoRipper {
|
||||||
|
|
||||||
@ -53,7 +54,7 @@ public class ViddmeRipper extends VideoRipper {
|
|||||||
@Override
|
@Override
|
||||||
public void rip() throws IOException {
|
public void rip() throws IOException {
|
||||||
logger.info(" Retrieving " + this.url.toExternalForm());
|
logger.info(" Retrieving " + this.url.toExternalForm());
|
||||||
Document doc = getDocument(this.url);
|
Document doc = Http.url(this.url).get();
|
||||||
Elements videos = doc.select("meta[name=twitter:player:stream]");
|
Elements videos = doc.select("meta[name=twitter:player:stream]");
|
||||||
if (videos.size() == 0) {
|
if (videos.size() == 0) {
|
||||||
throw new IOException("Could not find twitter:player:stream at " + url);
|
throw new IOException("Could not find twitter:player:stream at " + url);
|
||||||
|
@ -10,6 +10,7 @@ import org.jsoup.nodes.Document;
|
|||||||
import org.jsoup.select.Elements;
|
import org.jsoup.select.Elements;
|
||||||
|
|
||||||
import com.rarchives.ripme.ripper.VideoRipper;
|
import com.rarchives.ripme.ripper.VideoRipper;
|
||||||
|
import com.rarchives.ripme.utils.Http;
|
||||||
|
|
||||||
public class VineRipper extends VideoRipper {
|
public class VineRipper extends VideoRipper {
|
||||||
|
|
||||||
@ -54,7 +55,7 @@ public class VineRipper extends VideoRipper {
|
|||||||
@Override
|
@Override
|
||||||
public void rip() throws IOException {
|
public void rip() throws IOException {
|
||||||
logger.info(" Retrieving " + this.url.toExternalForm());
|
logger.info(" Retrieving " + this.url.toExternalForm());
|
||||||
Document doc = getDocument(this.url);
|
Document doc = Http.url(this.url).get();
|
||||||
Elements props = doc.select("meta[property=twitter:player:stream]");
|
Elements props = doc.select("meta[property=twitter:player:stream]");
|
||||||
if (props.size() == 0) {
|
if (props.size() == 0) {
|
||||||
throw new IOException("Could not find meta property 'twitter:player:stream' at " + url);
|
throw new IOException("Could not find meta property 'twitter:player:stream' at " + url);
|
||||||
|
@ -6,10 +6,10 @@ import java.net.URL;
|
|||||||
import java.util.regex.Matcher;
|
import java.util.regex.Matcher;
|
||||||
import java.util.regex.Pattern;
|
import java.util.regex.Pattern;
|
||||||
|
|
||||||
import org.jsoup.Jsoup;
|
|
||||||
import org.jsoup.nodes.Document;
|
import org.jsoup.nodes.Document;
|
||||||
|
|
||||||
import com.rarchives.ripme.ripper.VideoRipper;
|
import com.rarchives.ripme.ripper.VideoRipper;
|
||||||
|
import com.rarchives.ripme.utils.Http;
|
||||||
|
|
||||||
public class VkRipper extends VideoRipper {
|
public class VkRipper extends VideoRipper {
|
||||||
|
|
||||||
@ -59,9 +59,9 @@ public class VkRipper extends VideoRipper {
|
|||||||
}
|
}
|
||||||
|
|
||||||
public static String getVideoURLAtPage(String url) throws IOException {
|
public static String getVideoURLAtPage(String url) throws IOException {
|
||||||
Document doc = Jsoup.connect(url)
|
Document doc = Http.url(url)
|
||||||
.userAgent(USER_AGENT)
|
.userAgent(USER_AGENT)
|
||||||
.get();
|
.get();
|
||||||
String html = doc.outerHtml();
|
String html = doc.outerHtml();
|
||||||
String videoURL = null;
|
String videoURL = null;
|
||||||
for (String quality : new String[] {"1080", "720", "480", "240"}) {
|
for (String quality : new String[] {"1080", "720", "480", "240"}) {
|
||||||
|
@ -12,6 +12,7 @@ import org.jsoup.nodes.Element;
|
|||||||
import org.jsoup.select.Elements;
|
import org.jsoup.select.Elements;
|
||||||
|
|
||||||
import com.rarchives.ripme.ripper.VideoRipper;
|
import com.rarchives.ripme.ripper.VideoRipper;
|
||||||
|
import com.rarchives.ripme.utils.Http;
|
||||||
|
|
||||||
public class XvideosRipper extends VideoRipper {
|
public class XvideosRipper extends VideoRipper {
|
||||||
|
|
||||||
@ -55,7 +56,7 @@ public class XvideosRipper extends VideoRipper {
|
|||||||
@Override
|
@Override
|
||||||
public void rip() throws IOException {
|
public void rip() throws IOException {
|
||||||
logger.info(" Retrieving " + this.url);
|
logger.info(" Retrieving " + this.url);
|
||||||
Document doc = getDocument(this.url);
|
Document doc = Http.url(this.url).get();
|
||||||
Elements embeds = doc.select("embed");
|
Elements embeds = doc.select("embed");
|
||||||
if (embeds.size() == 0) {
|
if (embeds.size() == 0) {
|
||||||
throw new IOException("Could not find Embed code at " + url);
|
throw new IOException("Could not find Embed code at " + url);
|
||||||
|
@ -6,12 +6,12 @@ import java.net.URL;
|
|||||||
import java.util.regex.Matcher;
|
import java.util.regex.Matcher;
|
||||||
import java.util.regex.Pattern;
|
import java.util.regex.Pattern;
|
||||||
|
|
||||||
import org.jsoup.Jsoup;
|
|
||||||
import org.jsoup.nodes.Document;
|
import org.jsoup.nodes.Document;
|
||||||
import org.jsoup.nodes.Element;
|
import org.jsoup.nodes.Element;
|
||||||
import org.jsoup.select.Elements;
|
import org.jsoup.select.Elements;
|
||||||
|
|
||||||
import com.rarchives.ripme.ripper.VideoRipper;
|
import com.rarchives.ripme.ripper.VideoRipper;
|
||||||
|
import com.rarchives.ripme.utils.Http;
|
||||||
|
|
||||||
public class YoupornRipper extends VideoRipper {
|
public class YoupornRipper extends VideoRipper {
|
||||||
|
|
||||||
@ -55,7 +55,7 @@ public class YoupornRipper extends VideoRipper {
|
|||||||
@Override
|
@Override
|
||||||
public void rip() throws IOException {
|
public void rip() throws IOException {
|
||||||
logger.info(" Retrieving " + this.url);
|
logger.info(" Retrieving " + this.url);
|
||||||
Document doc = getDocument(this.url);
|
Document doc = Http.url(this.url).get();
|
||||||
Elements videos = doc.select("video");
|
Elements videos = doc.select("video");
|
||||||
if (videos.size() == 0) {
|
if (videos.size() == 0) {
|
||||||
throw new IOException("Could not find Embed code at " + url);
|
throw new IOException("Could not find Embed code at " + url);
|
||||||
|
136
src/main/java/com/rarchives/ripme/utils/Http.java
Normal file
136
src/main/java/com/rarchives/ripme/utils/Http.java
Normal file
@ -0,0 +1,136 @@
|
|||||||
|
package com.rarchives.ripme.utils;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.net.URL;
|
||||||
|
import java.util.HashMap;
|
||||||
|
import java.util.Map;
|
||||||
|
|
||||||
|
import org.apache.log4j.Logger;
|
||||||
|
import org.json.JSONObject;
|
||||||
|
import org.jsoup.Connection;
|
||||||
|
import org.jsoup.Connection.Method;
|
||||||
|
import org.jsoup.Connection.Response;
|
||||||
|
import org.jsoup.Jsoup;
|
||||||
|
import org.jsoup.nodes.Document;
|
||||||
|
|
||||||
|
import com.rarchives.ripme.ripper.AbstractRipper;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Wrapper around the Jsoup connection methods.
|
||||||
|
*
|
||||||
|
* Benefit is retry logic.
|
||||||
|
*/
|
||||||
|
public class Http {
|
||||||
|
|
||||||
|
public static final int TIMEOUT = Utils.getConfigInteger("page.timeout", 5 * 1000);
|
||||||
|
private static final Logger logger = Logger.getLogger(AbstractRipper.class);
|
||||||
|
|
||||||
|
private int retries;
|
||||||
|
private String url;
|
||||||
|
private Connection connection;
|
||||||
|
|
||||||
|
// Constructors
|
||||||
|
public Http(String url) {
|
||||||
|
this.url = url;
|
||||||
|
defaultSettings();
|
||||||
|
}
|
||||||
|
public Http(URL url) {
|
||||||
|
this.url = url.toExternalForm();
|
||||||
|
defaultSettings();
|
||||||
|
}
|
||||||
|
|
||||||
|
public static Http url(String url) {
|
||||||
|
return new Http(url);
|
||||||
|
}
|
||||||
|
public static Http url(URL url) {
|
||||||
|
return new Http(url);
|
||||||
|
}
|
||||||
|
|
||||||
|
private void defaultSettings() {
|
||||||
|
this.retries = Utils.getConfigInteger("download.retries", 1);
|
||||||
|
connection = Jsoup.connect(this.url);
|
||||||
|
connection.userAgent(AbstractRipper.USER_AGENT);
|
||||||
|
connection.method(Method.GET);
|
||||||
|
connection.timeout(TIMEOUT);
|
||||||
|
connection.maxBodySize(0);
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
// Setters
|
||||||
|
public Http timeout(int timeout) {
|
||||||
|
connection.timeout(timeout);
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
public Http ignoreContentType() {
|
||||||
|
connection.ignoreContentType(true);
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
public Http referrer(String ref) {
|
||||||
|
connection.referrer(ref);
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
public Http referrer(URL ref) {
|
||||||
|
return referrer(ref.toExternalForm());
|
||||||
|
}
|
||||||
|
public Http userAgent(String ua) {
|
||||||
|
connection.userAgent(ua);
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
public Http retries(int tries) {
|
||||||
|
this.retries = tries;
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
public Http header(String name, String value) {
|
||||||
|
connection.header(name, value);
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
public Http cookies(Map<String,String> cookies) {
|
||||||
|
connection.cookies(cookies);
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
public Http data(Map<String,String> data) {
|
||||||
|
connection.data(data);
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
public Http data(String name, String value) {
|
||||||
|
Map<String,String> data = new HashMap<String,String>();
|
||||||
|
data.put(name, value);
|
||||||
|
return data(data);
|
||||||
|
}
|
||||||
|
public Http method(Method method) {
|
||||||
|
connection.method(method);
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Getters
|
||||||
|
public Document get() throws IOException {
|
||||||
|
connection.method(Method.GET);
|
||||||
|
return response().parse();
|
||||||
|
}
|
||||||
|
|
||||||
|
public Document post() throws IOException {
|
||||||
|
connection.method(Method.POST);
|
||||||
|
return response().parse();
|
||||||
|
}
|
||||||
|
|
||||||
|
public JSONObject getJSON() throws IOException {
|
||||||
|
ignoreContentType();
|
||||||
|
String jsonString = response().body().replace(""", "\"");
|
||||||
|
return new JSONObject(jsonString);
|
||||||
|
}
|
||||||
|
|
||||||
|
public Response response() throws IOException {
|
||||||
|
Response response = null;
|
||||||
|
int retries = this.retries;
|
||||||
|
while (--retries >= 0) {
|
||||||
|
try {
|
||||||
|
response = connection.execute();
|
||||||
|
return response;
|
||||||
|
} catch (IOException e) {
|
||||||
|
logger.warn("Error while loading " + url, e);
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
throw new IOException("Failed to load " + url + " after " + this.retries + " attempts");
|
||||||
|
}
|
||||||
|
}
|
@ -8,13 +8,6 @@ import java.util.List;
|
|||||||
import com.rarchives.ripme.ripper.rippers.DeviantartRipper;
|
import com.rarchives.ripme.ripper.rippers.DeviantartRipper;
|
||||||
|
|
||||||
public class DeviantartRipperTest extends RippersTest {
|
public class DeviantartRipperTest extends RippersTest {
|
||||||
|
|
||||||
public void testAlphaSorting() {
|
|
||||||
String[] strings = new String[]{"a", "aa", "aaa", "d6hg2dz", "d6fspba", "d6fcvvr"};
|
|
||||||
for (String string : strings) {
|
|
||||||
System.err.println(string + ": " + DeviantartRipper.alphaToLong(string));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
public void testDeviantartAlbums() throws IOException {
|
public void testDeviantartAlbums() throws IOException {
|
||||||
if (!DOWNLOAD_CONTENT) {
|
if (!DOWNLOAD_CONTENT) {
|
||||||
|
Loading…
Reference in New Issue
Block a user