Removed SinglePage ripper
This commit is contained in:
parent
c166f93d57
commit
b9e3d77449
@ -10,6 +10,9 @@ import org.jsoup.nodes.Document;
|
|||||||
import com.rarchives.ripme.ui.RipStatusMessage.STATUS;
|
import com.rarchives.ripme.ui.RipStatusMessage.STATUS;
|
||||||
import com.rarchives.ripme.utils.Utils;
|
import com.rarchives.ripme.utils.Utils;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Simplified ripper, designed for ripping from sites by parsing HTML.
|
||||||
|
*/
|
||||||
public abstract class AbstractHTMLRipper extends AlbumRipper {
|
public abstract class AbstractHTMLRipper extends AlbumRipper {
|
||||||
|
|
||||||
public AbstractHTMLRipper(URL url) throws IOException {
|
public AbstractHTMLRipper(URL url) throws IOException {
|
||||||
@ -20,7 +23,9 @@ public abstract class AbstractHTMLRipper extends AlbumRipper {
|
|||||||
public abstract String getHost();
|
public abstract String getHost();
|
||||||
|
|
||||||
public abstract Document getFirstPage() throws IOException;
|
public abstract Document getFirstPage() throws IOException;
|
||||||
public abstract Document getNextPage(Document doc) throws IOException;
|
public Document getNextPage(Document doc) throws IOException {
|
||||||
|
throw new IOException("getNextPage not implemented");
|
||||||
|
}
|
||||||
public abstract List<String> getURLsFromPage(Document page);
|
public abstract List<String> getURLsFromPage(Document page);
|
||||||
public abstract void downloadURL(URL url, int index);
|
public abstract void downloadURL(URL url, int index);
|
||||||
public DownloadThreadPool getThreadPool() {
|
public DownloadThreadPool getThreadPool() {
|
||||||
|
@ -10,6 +10,9 @@ import org.json.JSONObject;
|
|||||||
import com.rarchives.ripme.ui.RipStatusMessage.STATUS;
|
import com.rarchives.ripme.ui.RipStatusMessage.STATUS;
|
||||||
import com.rarchives.ripme.utils.Utils;
|
import com.rarchives.ripme.utils.Utils;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Simplified ripper, designed for ripping from sites by parsing JSON.
|
||||||
|
*/
|
||||||
public abstract class AbstractJSONRipper extends AlbumRipper {
|
public abstract class AbstractJSONRipper extends AlbumRipper {
|
||||||
|
|
||||||
public AbstractJSONRipper(URL url) throws IOException {
|
public AbstractJSONRipper(URL url) throws IOException {
|
||||||
|
@ -1,70 +0,0 @@
|
|||||||
package com.rarchives.ripme.ripper;
|
|
||||||
|
|
||||||
import java.io.IOException;
|
|
||||||
import java.net.MalformedURLException;
|
|
||||||
import java.net.URL;
|
|
||||||
import java.util.List;
|
|
||||||
|
|
||||||
import org.jsoup.nodes.Document;
|
|
||||||
|
|
||||||
import com.rarchives.ripme.ui.RipStatusMessage.STATUS;
|
|
||||||
import com.rarchives.ripme.utils.Utils;
|
|
||||||
|
|
||||||
public abstract class AbstractSinglePageRipper extends AlbumRipper {
|
|
||||||
|
|
||||||
public AbstractSinglePageRipper(URL url) throws IOException {
|
|
||||||
super(url);
|
|
||||||
}
|
|
||||||
|
|
||||||
public abstract String getDomain();
|
|
||||||
public abstract String getHost();
|
|
||||||
|
|
||||||
public abstract Document getFirstPage() throws IOException;
|
|
||||||
public abstract List<String> getURLsFromPage(Document page);
|
|
||||||
public abstract void downloadURL(URL url, int index);
|
|
||||||
|
|
||||||
public boolean keepSortOrder() {
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public boolean canRip(URL url) {
|
|
||||||
return url.getHost().endsWith(getDomain());
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public URL sanitizeURL(URL url) throws MalformedURLException {
|
|
||||||
return url;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public void rip() throws IOException {
|
|
||||||
int index = 0;
|
|
||||||
logger.info("Retrieving " + this.url);
|
|
||||||
sendUpdate(STATUS.LOADING_RESOURCE, this.url.toExternalForm());
|
|
||||||
Document doc = getFirstPage();
|
|
||||||
List<String> imageURLs = getURLsFromPage(doc);
|
|
||||||
|
|
||||||
if (imageURLs.size() == 0) {
|
|
||||||
throw new IOException("No images found at " + this.url);
|
|
||||||
}
|
|
||||||
|
|
||||||
for (String imageURL : imageURLs) {
|
|
||||||
if (isStopped()) {
|
|
||||||
logger.info("Interrupted");
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
index += 1;
|
|
||||||
downloadURL(new URL(imageURL), index);
|
|
||||||
}
|
|
||||||
waitForThreads();
|
|
||||||
}
|
|
||||||
|
|
||||||
public String getPrefix(int index) {
|
|
||||||
String prefix = "";
|
|
||||||
if (keepSortOrder() && Utils.getConfigBoolean("download.save_order", true)) {
|
|
||||||
prefix = String.format("%03d_", index);
|
|
||||||
}
|
|
||||||
return prefix;
|
|
||||||
}
|
|
||||||
}
|
|
@ -11,10 +11,10 @@ import java.util.regex.Pattern;
|
|||||||
import org.jsoup.nodes.Document;
|
import org.jsoup.nodes.Document;
|
||||||
import org.jsoup.nodes.Element;
|
import org.jsoup.nodes.Element;
|
||||||
|
|
||||||
import com.rarchives.ripme.ripper.AbstractSinglePageRipper;
|
import com.rarchives.ripme.ripper.AbstractHTMLRipper;
|
||||||
import com.rarchives.ripme.utils.Http;
|
import com.rarchives.ripme.utils.Http;
|
||||||
|
|
||||||
public class ButttoucherRipper extends AbstractSinglePageRipper {
|
public class ButttoucherRipper extends AbstractHTMLRipper {
|
||||||
|
|
||||||
public ButttoucherRipper(URL url) throws IOException {
|
public ButttoucherRipper(URL url) throws IOException {
|
||||||
super(url);
|
super(url);
|
||||||
|
@ -11,10 +11,10 @@ import java.util.regex.Pattern;
|
|||||||
import org.jsoup.nodes.Document;
|
import org.jsoup.nodes.Document;
|
||||||
import org.jsoup.nodes.Element;
|
import org.jsoup.nodes.Element;
|
||||||
|
|
||||||
import com.rarchives.ripme.ripper.AbstractSinglePageRipper;
|
import com.rarchives.ripme.ripper.AbstractHTMLRipper;
|
||||||
import com.rarchives.ripme.utils.Http;
|
import com.rarchives.ripme.utils.Http;
|
||||||
|
|
||||||
public class ChanRipper extends AbstractSinglePageRipper {
|
public class ChanRipper extends AbstractHTMLRipper {
|
||||||
|
|
||||||
public ChanRipper(URL url) throws IOException {
|
public ChanRipper(URL url) throws IOException {
|
||||||
super(url);
|
super(url);
|
||||||
|
@ -16,11 +16,11 @@ import org.jsoup.nodes.Document;
|
|||||||
import org.jsoup.nodes.Element;
|
import org.jsoup.nodes.Element;
|
||||||
import org.jsoup.select.Elements;
|
import org.jsoup.select.Elements;
|
||||||
|
|
||||||
import com.rarchives.ripme.ripper.AbstractSinglePageRipper;
|
import com.rarchives.ripme.ripper.AbstractHTMLRipper;
|
||||||
import com.rarchives.ripme.ui.RipStatusMessage.STATUS;
|
import com.rarchives.ripme.ui.RipStatusMessage.STATUS;
|
||||||
import com.rarchives.ripme.utils.Http;
|
import com.rarchives.ripme.utils.Http;
|
||||||
|
|
||||||
public class EightmusesRipper extends AbstractSinglePageRipper {
|
public class EightmusesRipper extends AbstractHTMLRipper {
|
||||||
|
|
||||||
private Document albumDoc = null;
|
private Document albumDoc = null;
|
||||||
private Map<String,String> cookies = new HashMap<String,String>();
|
private Map<String,String> cookies = new HashMap<String,String>();
|
||||||
|
@ -12,14 +12,10 @@ import org.jsoup.nodes.Document;
|
|||||||
import org.jsoup.nodes.Element;
|
import org.jsoup.nodes.Element;
|
||||||
import org.jsoup.select.Elements;
|
import org.jsoup.select.Elements;
|
||||||
|
|
||||||
import com.rarchives.ripme.ripper.AbstractSinglePageRipper;
|
import com.rarchives.ripme.ripper.AbstractHTMLRipper;
|
||||||
import com.rarchives.ripme.ui.RipStatusMessage.STATUS;
|
|
||||||
import com.rarchives.ripme.utils.Http;
|
import com.rarchives.ripme.utils.Http;
|
||||||
|
|
||||||
public class GirlsOfDesireRipper extends AbstractSinglePageRipper {
|
public class GirlsOfDesireRipper extends AbstractHTMLRipper {
|
||||||
// All sleep times are in milliseconds
|
|
||||||
private static final int IMAGE_SLEEP_TIME = 100;
|
|
||||||
|
|
||||||
// Current HTML document
|
// Current HTML document
|
||||||
private Document albumDoc = null;
|
private Document albumDoc = null;
|
||||||
|
|
||||||
@ -65,7 +61,7 @@ public class GirlsOfDesireRipper extends AbstractSinglePageRipper {
|
|||||||
+ "http://www.girlsofdesire.org/galleries/<name>/"
|
+ "http://www.girlsofdesire.org/galleries/<name>/"
|
||||||
+ " Got: " + url);
|
+ " Got: " + url);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Document getFirstPage() throws IOException {
|
public Document getFirstPage() throws IOException {
|
||||||
if (albumDoc == null) {
|
if (albumDoc == null) {
|
||||||
@ -90,44 +86,7 @@ public class GirlsOfDesireRipper extends AbstractSinglePageRipper {
|
|||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void downloadURL(URL url, int index) {
|
public void downloadURL(URL url, int index) {
|
||||||
addURLToDownload(url, getPrefix(index));
|
// Send referrer when downloading images
|
||||||
}
|
addURLToDownload(url, getPrefix(index), "", this.url.toExternalForm(), null);
|
||||||
|
|
||||||
@Override
|
|
||||||
public void rip() throws IOException {
|
|
||||||
String nextUrl = this.url.toExternalForm();
|
|
||||||
|
|
||||||
if (albumDoc == null) {
|
|
||||||
logger.info(" Retrieving album page " + nextUrl);
|
|
||||||
sendUpdate(STATUS.LOADING_RESOURCE, nextUrl);
|
|
||||||
albumDoc = Http.url(nextUrl).get();
|
|
||||||
}
|
|
||||||
|
|
||||||
// Find thumbnails
|
|
||||||
Elements thumbs = albumDoc.select("td.vtop > a > img");
|
|
||||||
if (thumbs.size() == 0) {
|
|
||||||
logger.info("No images found at " + nextUrl);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Iterate over images on page
|
|
||||||
for (Element thumb : thumbs) {
|
|
||||||
if (isStopped()) {
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
// Convert thumbnail to full-size image
|
|
||||||
String imgSrc = thumb.attr("src");
|
|
||||||
imgSrc = imgSrc.replaceAll("_thumb\\.", ".");
|
|
||||||
URL imgUrl = new URL(url, imgSrc);
|
|
||||||
|
|
||||||
addURLToDownload(imgUrl, "", "", this.url.toExternalForm(), null);
|
|
||||||
|
|
||||||
try {
|
|
||||||
Thread.sleep(IMAGE_SLEEP_TIME);
|
|
||||||
} catch (InterruptedException e) {
|
|
||||||
logger.warn("Interrupted while waiting to load next image", e);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
waitForThreads();
|
|
||||||
}
|
}
|
||||||
}
|
}
|
Loading…
Reference in New Issue
Block a user