Convert tabs to spaces, remove some unused imports.
This commit is contained in:
parent
65b11d1e00
commit
d2a4412a85
@ -30,7 +30,7 @@ public abstract class AbstractHTMLRipper extends AlbumRipper {
|
||||
}
|
||||
public abstract List<String> getURLsFromPage(Document page);
|
||||
public List<String> getDescriptionsFromPage(Document doc) throws IOException {
|
||||
throw new IOException("getDescriptionsFromPage not implemented"); // Do I do this or make an abstract function?
|
||||
throw new IOException("getDescriptionsFromPage not implemented"); // Do I do this or make an abstract function?
|
||||
}
|
||||
public abstract void downloadURL(URL url, int index);
|
||||
public DownloadThreadPool getThreadPool() {
|
||||
@ -51,10 +51,10 @@ public abstract class AbstractHTMLRipper extends AlbumRipper {
|
||||
return url;
|
||||
}
|
||||
public boolean hasDescriptionSupport() {
|
||||
return false;
|
||||
return false;
|
||||
}
|
||||
public String getDescription(String page) throws IOException {
|
||||
throw new IOException("getDescription not implemented"); // Do I do this or make an abstract function?
|
||||
throw new IOException("getDescription not implemented"); // Do I do this or make an abstract function?
|
||||
}
|
||||
public int descSleepTime() {
|
||||
return 0;
|
||||
@ -90,23 +90,23 @@ public abstract class AbstractHTMLRipper extends AlbumRipper {
|
||||
}
|
||||
if (hasDescriptionSupport() && Utils.getConfigBoolean("descriptions.save", false)) {
|
||||
logger.debug("Fetching description(s) from " + doc.location());
|
||||
List<String> textURLs = getDescriptionsFromPage(doc);
|
||||
if (textURLs.size() > 0) {
|
||||
List<String> textURLs = getDescriptionsFromPage(doc);
|
||||
if (textURLs.size() > 0) {
|
||||
logger.debug("Found description link(s) from " + doc.location());
|
||||
for (String textURL : textURLs) {
|
||||
if (isStopped()) {
|
||||
break;
|
||||
}
|
||||
textindex += 1;
|
||||
logger.debug("Getting description from " + textURL);
|
||||
for (String textURL : textURLs) {
|
||||
if (isStopped()) {
|
||||
break;
|
||||
}
|
||||
textindex += 1;
|
||||
logger.debug("Getting description from " + textURL);
|
||||
sleep(descSleepTime());
|
||||
String tempDesc = getDescription(textURL);
|
||||
if (tempDesc != null) {
|
||||
logger.debug("Got description: " + tempDesc);
|
||||
saveText(new URL(textURL), "", tempDesc, textindex);
|
||||
}
|
||||
}
|
||||
}
|
||||
String tempDesc = getDescription(textURL);
|
||||
if (tempDesc != null) {
|
||||
logger.debug("Got description: " + tempDesc);
|
||||
saveText(new URL(textURL), "", tempDesc, textindex);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (isStopped() || isThisATest()) {
|
||||
|
@ -195,7 +195,7 @@ public abstract class AbstractRipper
|
||||
public void retrievingSource(String url) {
|
||||
RipStatusMessage msg = new RipStatusMessage(STATUS.LOADING_RESOURCE, url);
|
||||
if (observer != null) {
|
||||
observer.update(this, msg);
|
||||
observer.update(this, msg);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -48,7 +48,7 @@ public class DeviantartRipper extends AbstractHTMLRipper {
|
||||
}
|
||||
@Override
|
||||
public boolean hasDescriptionSupport() {
|
||||
return true;
|
||||
return true;
|
||||
}
|
||||
@Override
|
||||
public URL sanitizeURL(URL url) throws MalformedURLException {
|
||||
|
@ -1,4 +1,3 @@
|
||||
|
||||
package com.rarchives.ripme.ripper.rippers;
|
||||
|
||||
import com.rarchives.ripme.ripper.AbstractHTMLRipper;
|
||||
@ -8,174 +7,165 @@ import com.rarchives.ripme.utils.Utils;
|
||||
import java.io.IOException;
|
||||
import java.io.UnsupportedEncodingException;
|
||||
import java.net.MalformedURLException;
|
||||
import java.net.URI;
|
||||
import java.net.URISyntaxException;
|
||||
import java.net.URL;
|
||||
import java.net.URLDecoder;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.logging.Level;
|
||||
import java.util.logging.Logger;
|
||||
import java.util.regex.Matcher;
|
||||
import java.util.regex.Pattern;
|
||||
import org.jsoup.nodes.Document;
|
||||
import org.jsoup.nodes.Element;
|
||||
import org.jsoup.select.Elements;
|
||||
|
||||
/**
|
||||
*
|
||||
* @author
|
||||
*/
|
||||
public class E621Ripper extends AbstractHTMLRipper {
|
||||
public static final int POOL_IMAGES_PER_PAGE = 24;
|
||||
public static final int POOL_IMAGES_PER_PAGE = 24;
|
||||
|
||||
private DownloadThreadPool e621ThreadPool = new DownloadThreadPool("e621");
|
||||
private DownloadThreadPool e621ThreadPool = new DownloadThreadPool("e621");
|
||||
|
||||
public E621Ripper(URL url) throws IOException {
|
||||
super(url);
|
||||
}
|
||||
public E621Ripper(URL url) throws IOException {
|
||||
super(url);
|
||||
}
|
||||
|
||||
@Override
|
||||
public DownloadThreadPool getThreadPool() {
|
||||
return e621ThreadPool;
|
||||
}
|
||||
@Override
|
||||
public DownloadThreadPool getThreadPool() {
|
||||
return e621ThreadPool;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getDomain() {
|
||||
return "e621.net";
|
||||
}
|
||||
@Override
|
||||
public String getDomain() {
|
||||
return "e621.net";
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getHost() {
|
||||
return "e621";
|
||||
}
|
||||
@Override
|
||||
public String getHost() {
|
||||
return "e621";
|
||||
}
|
||||
|
||||
@Override
|
||||
public Document getFirstPage() throws IOException {
|
||||
if (url.getPath().startsWith("/pool/show/")) {
|
||||
return Http.url("https://e621.net/pool/show/" + getTerm(url)).get();
|
||||
} else {
|
||||
return Http.url("https://e621.net/post/index/1/" + getTerm(url)).get();
|
||||
}
|
||||
}
|
||||
@Override
|
||||
public Document getFirstPage() throws IOException {
|
||||
if (url.getPath().startsWith("/pool/show/")) {
|
||||
return Http.url("https://e621.net/pool/show/" + getTerm(url)).get();
|
||||
} else {
|
||||
return Http.url("https://e621.net/post/index/1/" + getTerm(url)).get();
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<String> getURLsFromPage(Document page) {
|
||||
Elements elements = page.select("#post-list .thumb a,#pool-show .thumb a");
|
||||
List<String> res = new ArrayList<String>(elements.size());
|
||||
@Override
|
||||
public List<String> getURLsFromPage(Document page) {
|
||||
Elements elements = page.select("#post-list .thumb a,#pool-show .thumb a");
|
||||
List<String> res = new ArrayList<String>(elements.size());
|
||||
|
||||
if (page.getElementById("pool-show") != null) {
|
||||
int index = 0;
|
||||
if (page.getElementById("pool-show") != null) {
|
||||
int index = 0;
|
||||
|
||||
Element e = page.getElementById("paginator");
|
||||
if (e != null) {
|
||||
e = e.getElementsByClass("current").first();
|
||||
if (e != null) {
|
||||
index = (Integer.parseInt(e.text()) - 1) * POOL_IMAGES_PER_PAGE;
|
||||
}
|
||||
}
|
||||
Element e = page.getElementById("paginator");
|
||||
if (e != null) {
|
||||
e = e.getElementsByClass("current").first();
|
||||
if (e != null) {
|
||||
index = (Integer.parseInt(e.text()) - 1) * POOL_IMAGES_PER_PAGE;
|
||||
}
|
||||
}
|
||||
|
||||
for (Element e_ : elements) {
|
||||
res.add(e_.absUrl("href") + "#" + ++index);
|
||||
}
|
||||
for (Element e_ : elements) {
|
||||
res.add(e_.absUrl("href") + "#" + ++index);
|
||||
}
|
||||
|
||||
} else {
|
||||
for (Element e : elements) {
|
||||
res.add(e.absUrl("href") + "#" + e.child(0).attr("id").substring(1));
|
||||
}
|
||||
}
|
||||
} else {
|
||||
for (Element e : elements) {
|
||||
res.add(e.absUrl("href") + "#" + e.child(0).attr("id").substring(1));
|
||||
}
|
||||
}
|
||||
|
||||
return res;
|
||||
}
|
||||
return res;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Document getNextPage(Document page) throws IOException {
|
||||
for (Element e : page.select("#paginator a")) {
|
||||
if (e.attr("rel").equals("next")) {
|
||||
return Http.url(e.absUrl("href")).get();
|
||||
}
|
||||
}
|
||||
@Override
|
||||
public Document getNextPage(Document page) throws IOException {
|
||||
for (Element e : page.select("#paginator a")) {
|
||||
if (e.attr("rel").equals("next")) {
|
||||
return Http.url(e.absUrl("href")).get();
|
||||
}
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void downloadURL(final URL url, int index) {
|
||||
e621ThreadPool.addThread(new Thread(new Runnable() {
|
||||
public void run() {
|
||||
try {
|
||||
Document page = Http.url(url).get();
|
||||
Element e = page.getElementById("image");
|
||||
@Override
|
||||
public void downloadURL(final URL url, int index) {
|
||||
e621ThreadPool.addThread(new Thread(new Runnable() {
|
||||
public void run() {
|
||||
try {
|
||||
Document page = Http.url(url).get();
|
||||
Element e = page.getElementById("image");
|
||||
|
||||
if (e != null) {
|
||||
addURLToDownload(new URL(e.absUrl("src")), Utils.getConfigBoolean("download.save_order", true) ? url.getRef() + "-" : "");
|
||||
} else if ((e = page.select(".content object>param[name=\"movie\"]").first()) != null) {
|
||||
addURLToDownload(new URL(e.absUrl("value")), Utils.getConfigBoolean("download.save_order", true) ? url.getRef() + "-" : "");
|
||||
} else {
|
||||
Logger.getLogger(E621Ripper.class.getName()).log(Level.WARNING, "Unsupported media type - please report to program author: " + url.toString());
|
||||
}
|
||||
if (e != null) {
|
||||
addURLToDownload(new URL(e.absUrl("src")), Utils.getConfigBoolean("download.save_order", true) ? url.getRef() + "-" : "");
|
||||
} else if ((e = page.select(".content object>param[name=\"movie\"]").first()) != null) {
|
||||
addURLToDownload(new URL(e.absUrl("value")), Utils.getConfigBoolean("download.save_order", true) ? url.getRef() + "-" : "");
|
||||
} else {
|
||||
Logger.getLogger(E621Ripper.class.getName()).log(Level.WARNING, "Unsupported media type - please report to program author: " + url.toString());
|
||||
}
|
||||
|
||||
} catch (IOException ex) {
|
||||
Logger.getLogger(E621Ripper.class.getName()).log(Level.SEVERE, null, ex);
|
||||
}
|
||||
}
|
||||
}));
|
||||
}
|
||||
} catch (IOException ex) {
|
||||
Logger.getLogger(E621Ripper.class.getName()).log(Level.SEVERE, null, ex);
|
||||
}
|
||||
}
|
||||
}));
|
||||
}
|
||||
|
||||
private String getTerm(URL url) throws MalformedURLException {
|
||||
String query = url.getQuery();
|
||||
private String getTerm(URL url) throws MalformedURLException {
|
||||
String query = url.getQuery();
|
||||
|
||||
if (query != null) {
|
||||
return Utils.parseUrlQuery(query, "tags");
|
||||
}
|
||||
if (query != null) {
|
||||
return Utils.parseUrlQuery(query, "tags");
|
||||
}
|
||||
|
||||
if (query == null) {
|
||||
if ((query = url.getPath()).startsWith("/post/index/")) {
|
||||
query = query.substring(12);
|
||||
if (query == null) {
|
||||
if ((query = url.getPath()).startsWith("/post/index/")) {
|
||||
query = query.substring(12);
|
||||
|
||||
int pos = query.indexOf('/');
|
||||
if (pos == -1) {
|
||||
return null;
|
||||
}
|
||||
int pos = query.indexOf('/');
|
||||
if (pos == -1) {
|
||||
return null;
|
||||
}
|
||||
|
||||
// skip page number
|
||||
query = query.substring(pos + 1);
|
||||
// skip page number
|
||||
query = query.substring(pos + 1);
|
||||
|
||||
if (query.endsWith("/")) {
|
||||
query = query.substring(0, query.length() - 1);
|
||||
}
|
||||
if (query.endsWith("/")) {
|
||||
query = query.substring(0, query.length() - 1);
|
||||
}
|
||||
|
||||
try {
|
||||
return URLDecoder.decode(query, "UTF-8");
|
||||
} catch (UnsupportedEncodingException e) {
|
||||
// Shouldn't happen since UTF-8 is required to be supported
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
try {
|
||||
return URLDecoder.decode(query, "UTF-8");
|
||||
} catch (UnsupportedEncodingException e) {
|
||||
// Shouldn't happen since UTF-8 is required to be supported
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
|
||||
} else if (query.startsWith("/pool/show/")) {
|
||||
query = query.substring(11);
|
||||
} else if (query.startsWith("/pool/show/")) {
|
||||
query = query.substring(11);
|
||||
|
||||
if (query.endsWith("/")) {
|
||||
query = query.substring(0, query.length() - 1);
|
||||
}
|
||||
if (query.endsWith("/")) {
|
||||
query = query.substring(0, query.length() - 1);
|
||||
}
|
||||
|
||||
return query;
|
||||
}
|
||||
}
|
||||
return query;
|
||||
}
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getGID(URL url) throws MalformedURLException {
|
||||
String prefix = "";
|
||||
if (url.getPath().startsWith("/pool/show/")) {
|
||||
prefix = "pool_";
|
||||
} else {
|
||||
prefix = "term_";
|
||||
}
|
||||
@Override
|
||||
public String getGID(URL url) throws MalformedURLException {
|
||||
String prefix = "";
|
||||
if (url.getPath().startsWith("/pool/show/")) {
|
||||
prefix = "pool_";
|
||||
} else {
|
||||
prefix = "term_";
|
||||
}
|
||||
|
||||
return Utils.filesystemSafe(prefix + getTerm(url));
|
||||
}
|
||||
|
||||
}
|
||||
return Utils.filesystemSafe(prefix + getTerm(url));
|
||||
}
|
||||
}
|
||||
|
@ -259,50 +259,50 @@ public class FivehundredpxRipper extends AbstractJSONRipper {
|
||||
List<String> imageURLs = new ArrayList<String>();
|
||||
JSONArray photos = json.getJSONArray("photos");
|
||||
for (int i = 0; i < photos.length(); i++) {
|
||||
if (super.isStopped()) {
|
||||
break;
|
||||
}
|
||||
if (super.isStopped()) {
|
||||
break;
|
||||
}
|
||||
JSONObject photo = photos.getJSONObject(i);
|
||||
String imageURL = null;
|
||||
String rawUrl = "https://500px.com" + photo.getString("url");
|
||||
Document doc;
|
||||
Elements images = new Elements();
|
||||
try {
|
||||
logger.debug("Loading " + rawUrl);
|
||||
super.retrievingSource(rawUrl);
|
||||
doc = Http.url(rawUrl).get();
|
||||
images = doc.select("div#preload img");
|
||||
logger.debug("Loading " + rawUrl);
|
||||
super.retrievingSource(rawUrl);
|
||||
doc = Http.url(rawUrl).get();
|
||||
images = doc.select("div#preload img");
|
||||
}
|
||||
catch (IOException e) {
|
||||
logger.error("Error fetching full-size image from " + rawUrl, e);
|
||||
logger.error("Error fetching full-size image from " + rawUrl, e);
|
||||
}
|
||||
if (images.size() > 0) {
|
||||
imageURL = images.first().attr("src");
|
||||
logger.debug("Found full-size non-watermarked image: " + imageURL);
|
||||
imageURL = images.first().attr("src");
|
||||
logger.debug("Found full-size non-watermarked image: " + imageURL);
|
||||
}
|
||||
else {
|
||||
logger.debug("Falling back to image_url from API response");
|
||||
imageURL = photo.getString("image_url");
|
||||
imageURL = imageURL.replaceAll("/4\\.", "/5.");
|
||||
// See if there's larger images
|
||||
for (String imageSize : new String[] { "2048" } ) {
|
||||
String fsURL = imageURL.replaceAll("/5\\.", "/" + imageSize + ".");
|
||||
sleep(10);
|
||||
if (urlExists(fsURL)) {
|
||||
logger.info("Found larger image at " + fsURL);
|
||||
imageURL = fsURL;
|
||||
break;
|
||||
}
|
||||
}
|
||||
logger.debug("Falling back to image_url from API response");
|
||||
imageURL = photo.getString("image_url");
|
||||
imageURL = imageURL.replaceAll("/4\\.", "/5.");
|
||||
// See if there's larger images
|
||||
for (String imageSize : new String[] { "2048" } ) {
|
||||
String fsURL = imageURL.replaceAll("/5\\.", "/" + imageSize + ".");
|
||||
sleep(10);
|
||||
if (urlExists(fsURL)) {
|
||||
logger.info("Found larger image at " + fsURL);
|
||||
imageURL = fsURL;
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
if (imageURL == null) {
|
||||
logger.error("Failed to find image for photo " + photo.toString());
|
||||
logger.error("Failed to find image for photo " + photo.toString());
|
||||
}
|
||||
else {
|
||||
imageURLs.add(imageURL);
|
||||
if (isThisATest()) {
|
||||
break;
|
||||
}
|
||||
imageURLs.add(imageURL);
|
||||
if (isThisATest()) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
return imageURLs;
|
||||
|
@ -246,7 +246,7 @@ public class FuraffinityRipper extends AbstractHTMLRipper {
|
||||
if (donwloadLink.size() == 0) {
|
||||
logger.warn("Could not download " + this.url);
|
||||
return;
|
||||
}
|
||||
}
|
||||
String link = "http:" + donwloadLink.first().attr("href");
|
||||
logger.info("Found URL " + link);
|
||||
String[] fileNameSplit = link.split("/");
|
||||
@ -266,7 +266,7 @@ public class FuraffinityRipper extends AbstractHTMLRipper {
|
||||
addURLToDownload(new URL(link),saveAS,"",cookies);
|
||||
} catch (IOException e) {
|
||||
logger.error("[!] Exception while loading/parsing " + this.url, e);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user