Convert tabs to spaces, remove some unused imports.
This commit is contained in:
parent
65b11d1e00
commit
d2a4412a85
@ -30,7 +30,7 @@ public abstract class AbstractHTMLRipper extends AlbumRipper {
|
|||||||
}
|
}
|
||||||
public abstract List<String> getURLsFromPage(Document page);
|
public abstract List<String> getURLsFromPage(Document page);
|
||||||
public List<String> getDescriptionsFromPage(Document doc) throws IOException {
|
public List<String> getDescriptionsFromPage(Document doc) throws IOException {
|
||||||
throw new IOException("getDescriptionsFromPage not implemented"); // Do I do this or make an abstract function?
|
throw new IOException("getDescriptionsFromPage not implemented"); // Do I do this or make an abstract function?
|
||||||
}
|
}
|
||||||
public abstract void downloadURL(URL url, int index);
|
public abstract void downloadURL(URL url, int index);
|
||||||
public DownloadThreadPool getThreadPool() {
|
public DownloadThreadPool getThreadPool() {
|
||||||
@ -51,10 +51,10 @@ public abstract class AbstractHTMLRipper extends AlbumRipper {
|
|||||||
return url;
|
return url;
|
||||||
}
|
}
|
||||||
public boolean hasDescriptionSupport() {
|
public boolean hasDescriptionSupport() {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
public String getDescription(String page) throws IOException {
|
public String getDescription(String page) throws IOException {
|
||||||
throw new IOException("getDescription not implemented"); // Do I do this or make an abstract function?
|
throw new IOException("getDescription not implemented"); // Do I do this or make an abstract function?
|
||||||
}
|
}
|
||||||
public int descSleepTime() {
|
public int descSleepTime() {
|
||||||
return 0;
|
return 0;
|
||||||
@ -90,23 +90,23 @@ public abstract class AbstractHTMLRipper extends AlbumRipper {
|
|||||||
}
|
}
|
||||||
if (hasDescriptionSupport() && Utils.getConfigBoolean("descriptions.save", false)) {
|
if (hasDescriptionSupport() && Utils.getConfigBoolean("descriptions.save", false)) {
|
||||||
logger.debug("Fetching description(s) from " + doc.location());
|
logger.debug("Fetching description(s) from " + doc.location());
|
||||||
List<String> textURLs = getDescriptionsFromPage(doc);
|
List<String> textURLs = getDescriptionsFromPage(doc);
|
||||||
if (textURLs.size() > 0) {
|
if (textURLs.size() > 0) {
|
||||||
logger.debug("Found description link(s) from " + doc.location());
|
logger.debug("Found description link(s) from " + doc.location());
|
||||||
for (String textURL : textURLs) {
|
for (String textURL : textURLs) {
|
||||||
if (isStopped()) {
|
if (isStopped()) {
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
textindex += 1;
|
textindex += 1;
|
||||||
logger.debug("Getting description from " + textURL);
|
logger.debug("Getting description from " + textURL);
|
||||||
sleep(descSleepTime());
|
sleep(descSleepTime());
|
||||||
String tempDesc = getDescription(textURL);
|
String tempDesc = getDescription(textURL);
|
||||||
if (tempDesc != null) {
|
if (tempDesc != null) {
|
||||||
logger.debug("Got description: " + tempDesc);
|
logger.debug("Got description: " + tempDesc);
|
||||||
saveText(new URL(textURL), "", tempDesc, textindex);
|
saveText(new URL(textURL), "", tempDesc, textindex);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if (isStopped() || isThisATest()) {
|
if (isStopped() || isThisATest()) {
|
||||||
|
@ -195,7 +195,7 @@ public abstract class AbstractRipper
|
|||||||
public void retrievingSource(String url) {
|
public void retrievingSource(String url) {
|
||||||
RipStatusMessage msg = new RipStatusMessage(STATUS.LOADING_RESOURCE, url);
|
RipStatusMessage msg = new RipStatusMessage(STATUS.LOADING_RESOURCE, url);
|
||||||
if (observer != null) {
|
if (observer != null) {
|
||||||
observer.update(this, msg);
|
observer.update(this, msg);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -48,7 +48,7 @@ public class DeviantartRipper extends AbstractHTMLRipper {
|
|||||||
}
|
}
|
||||||
@Override
|
@Override
|
||||||
public boolean hasDescriptionSupport() {
|
public boolean hasDescriptionSupport() {
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
@Override
|
@Override
|
||||||
public URL sanitizeURL(URL url) throws MalformedURLException {
|
public URL sanitizeURL(URL url) throws MalformedURLException {
|
||||||
|
@ -1,4 +1,3 @@
|
|||||||
|
|
||||||
package com.rarchives.ripme.ripper.rippers;
|
package com.rarchives.ripme.ripper.rippers;
|
||||||
|
|
||||||
import com.rarchives.ripme.ripper.AbstractHTMLRipper;
|
import com.rarchives.ripme.ripper.AbstractHTMLRipper;
|
||||||
@ -8,174 +7,165 @@ import com.rarchives.ripme.utils.Utils;
|
|||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.io.UnsupportedEncodingException;
|
import java.io.UnsupportedEncodingException;
|
||||||
import java.net.MalformedURLException;
|
import java.net.MalformedURLException;
|
||||||
import java.net.URI;
|
|
||||||
import java.net.URISyntaxException;
|
|
||||||
import java.net.URL;
|
import java.net.URL;
|
||||||
import java.net.URLDecoder;
|
import java.net.URLDecoder;
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.logging.Level;
|
import java.util.logging.Level;
|
||||||
import java.util.logging.Logger;
|
import java.util.logging.Logger;
|
||||||
import java.util.regex.Matcher;
|
|
||||||
import java.util.regex.Pattern;
|
|
||||||
import org.jsoup.nodes.Document;
|
import org.jsoup.nodes.Document;
|
||||||
import org.jsoup.nodes.Element;
|
import org.jsoup.nodes.Element;
|
||||||
import org.jsoup.select.Elements;
|
import org.jsoup.select.Elements;
|
||||||
|
|
||||||
/**
|
|
||||||
*
|
|
||||||
* @author
|
|
||||||
*/
|
|
||||||
public class E621Ripper extends AbstractHTMLRipper {
|
public class E621Ripper extends AbstractHTMLRipper {
|
||||||
public static final int POOL_IMAGES_PER_PAGE = 24;
|
public static final int POOL_IMAGES_PER_PAGE = 24;
|
||||||
|
|
||||||
private DownloadThreadPool e621ThreadPool = new DownloadThreadPool("e621");
|
private DownloadThreadPool e621ThreadPool = new DownloadThreadPool("e621");
|
||||||
|
|
||||||
public E621Ripper(URL url) throws IOException {
|
public E621Ripper(URL url) throws IOException {
|
||||||
super(url);
|
super(url);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public DownloadThreadPool getThreadPool() {
|
public DownloadThreadPool getThreadPool() {
|
||||||
return e621ThreadPool;
|
return e621ThreadPool;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public String getDomain() {
|
public String getDomain() {
|
||||||
return "e621.net";
|
return "e621.net";
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public String getHost() {
|
public String getHost() {
|
||||||
return "e621";
|
return "e621";
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Document getFirstPage() throws IOException {
|
public Document getFirstPage() throws IOException {
|
||||||
if (url.getPath().startsWith("/pool/show/")) {
|
if (url.getPath().startsWith("/pool/show/")) {
|
||||||
return Http.url("https://e621.net/pool/show/" + getTerm(url)).get();
|
return Http.url("https://e621.net/pool/show/" + getTerm(url)).get();
|
||||||
} else {
|
} else {
|
||||||
return Http.url("https://e621.net/post/index/1/" + getTerm(url)).get();
|
return Http.url("https://e621.net/post/index/1/" + getTerm(url)).get();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public List<String> getURLsFromPage(Document page) {
|
public List<String> getURLsFromPage(Document page) {
|
||||||
Elements elements = page.select("#post-list .thumb a,#pool-show .thumb a");
|
Elements elements = page.select("#post-list .thumb a,#pool-show .thumb a");
|
||||||
List<String> res = new ArrayList<String>(elements.size());
|
List<String> res = new ArrayList<String>(elements.size());
|
||||||
|
|
||||||
if (page.getElementById("pool-show") != null) {
|
if (page.getElementById("pool-show") != null) {
|
||||||
int index = 0;
|
int index = 0;
|
||||||
|
|
||||||
Element e = page.getElementById("paginator");
|
Element e = page.getElementById("paginator");
|
||||||
if (e != null) {
|
if (e != null) {
|
||||||
e = e.getElementsByClass("current").first();
|
e = e.getElementsByClass("current").first();
|
||||||
if (e != null) {
|
if (e != null) {
|
||||||
index = (Integer.parseInt(e.text()) - 1) * POOL_IMAGES_PER_PAGE;
|
index = (Integer.parseInt(e.text()) - 1) * POOL_IMAGES_PER_PAGE;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
for (Element e_ : elements) {
|
for (Element e_ : elements) {
|
||||||
res.add(e_.absUrl("href") + "#" + ++index);
|
res.add(e_.absUrl("href") + "#" + ++index);
|
||||||
}
|
}
|
||||||
|
|
||||||
} else {
|
} else {
|
||||||
for (Element e : elements) {
|
for (Element e : elements) {
|
||||||
res.add(e.absUrl("href") + "#" + e.child(0).attr("id").substring(1));
|
res.add(e.absUrl("href") + "#" + e.child(0).attr("id").substring(1));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return res;
|
return res;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Document getNextPage(Document page) throws IOException {
|
public Document getNextPage(Document page) throws IOException {
|
||||||
for (Element e : page.select("#paginator a")) {
|
for (Element e : page.select("#paginator a")) {
|
||||||
if (e.attr("rel").equals("next")) {
|
if (e.attr("rel").equals("next")) {
|
||||||
return Http.url(e.absUrl("href")).get();
|
return Http.url(e.absUrl("href")).get();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void downloadURL(final URL url, int index) {
|
public void downloadURL(final URL url, int index) {
|
||||||
e621ThreadPool.addThread(new Thread(new Runnable() {
|
e621ThreadPool.addThread(new Thread(new Runnable() {
|
||||||
public void run() {
|
public void run() {
|
||||||
try {
|
try {
|
||||||
Document page = Http.url(url).get();
|
Document page = Http.url(url).get();
|
||||||
Element e = page.getElementById("image");
|
Element e = page.getElementById("image");
|
||||||
|
|
||||||
if (e != null) {
|
if (e != null) {
|
||||||
addURLToDownload(new URL(e.absUrl("src")), Utils.getConfigBoolean("download.save_order", true) ? url.getRef() + "-" : "");
|
addURLToDownload(new URL(e.absUrl("src")), Utils.getConfigBoolean("download.save_order", true) ? url.getRef() + "-" : "");
|
||||||
} else if ((e = page.select(".content object>param[name=\"movie\"]").first()) != null) {
|
} else if ((e = page.select(".content object>param[name=\"movie\"]").first()) != null) {
|
||||||
addURLToDownload(new URL(e.absUrl("value")), Utils.getConfigBoolean("download.save_order", true) ? url.getRef() + "-" : "");
|
addURLToDownload(new URL(e.absUrl("value")), Utils.getConfigBoolean("download.save_order", true) ? url.getRef() + "-" : "");
|
||||||
} else {
|
} else {
|
||||||
Logger.getLogger(E621Ripper.class.getName()).log(Level.WARNING, "Unsupported media type - please report to program author: " + url.toString());
|
Logger.getLogger(E621Ripper.class.getName()).log(Level.WARNING, "Unsupported media type - please report to program author: " + url.toString());
|
||||||
}
|
}
|
||||||
|
|
||||||
} catch (IOException ex) {
|
} catch (IOException ex) {
|
||||||
Logger.getLogger(E621Ripper.class.getName()).log(Level.SEVERE, null, ex);
|
Logger.getLogger(E621Ripper.class.getName()).log(Level.SEVERE, null, ex);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}));
|
}));
|
||||||
}
|
}
|
||||||
|
|
||||||
private String getTerm(URL url) throws MalformedURLException {
|
private String getTerm(URL url) throws MalformedURLException {
|
||||||
String query = url.getQuery();
|
String query = url.getQuery();
|
||||||
|
|
||||||
if (query != null) {
|
if (query != null) {
|
||||||
return Utils.parseUrlQuery(query, "tags");
|
return Utils.parseUrlQuery(query, "tags");
|
||||||
}
|
}
|
||||||
|
|
||||||
if (query == null) {
|
if (query == null) {
|
||||||
if ((query = url.getPath()).startsWith("/post/index/")) {
|
if ((query = url.getPath()).startsWith("/post/index/")) {
|
||||||
query = query.substring(12);
|
query = query.substring(12);
|
||||||
|
|
||||||
int pos = query.indexOf('/');
|
int pos = query.indexOf('/');
|
||||||
if (pos == -1) {
|
if (pos == -1) {
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
|
||||||
// skip page number
|
// skip page number
|
||||||
query = query.substring(pos + 1);
|
query = query.substring(pos + 1);
|
||||||
|
|
||||||
if (query.endsWith("/")) {
|
if (query.endsWith("/")) {
|
||||||
query = query.substring(0, query.length() - 1);
|
query = query.substring(0, query.length() - 1);
|
||||||
}
|
}
|
||||||
|
|
||||||
try {
|
try {
|
||||||
return URLDecoder.decode(query, "UTF-8");
|
return URLDecoder.decode(query, "UTF-8");
|
||||||
} catch (UnsupportedEncodingException e) {
|
} catch (UnsupportedEncodingException e) {
|
||||||
// Shouldn't happen since UTF-8 is required to be supported
|
// Shouldn't happen since UTF-8 is required to be supported
|
||||||
throw new RuntimeException(e);
|
throw new RuntimeException(e);
|
||||||
}
|
}
|
||||||
|
|
||||||
} else if (query.startsWith("/pool/show/")) {
|
} else if (query.startsWith("/pool/show/")) {
|
||||||
query = query.substring(11);
|
query = query.substring(11);
|
||||||
|
|
||||||
if (query.endsWith("/")) {
|
if (query.endsWith("/")) {
|
||||||
query = query.substring(0, query.length() - 1);
|
query = query.substring(0, query.length() - 1);
|
||||||
}
|
}
|
||||||
|
|
||||||
return query;
|
return query;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public String getGID(URL url) throws MalformedURLException {
|
public String getGID(URL url) throws MalformedURLException {
|
||||||
String prefix = "";
|
String prefix = "";
|
||||||
if (url.getPath().startsWith("/pool/show/")) {
|
if (url.getPath().startsWith("/pool/show/")) {
|
||||||
prefix = "pool_";
|
prefix = "pool_";
|
||||||
} else {
|
} else {
|
||||||
prefix = "term_";
|
prefix = "term_";
|
||||||
}
|
}
|
||||||
|
|
||||||
return Utils.filesystemSafe(prefix + getTerm(url));
|
|
||||||
}
|
|
||||||
|
|
||||||
|
return Utils.filesystemSafe(prefix + getTerm(url));
|
||||||
|
}
|
||||||
}
|
}
|
@ -259,50 +259,50 @@ public class FivehundredpxRipper extends AbstractJSONRipper {
|
|||||||
List<String> imageURLs = new ArrayList<String>();
|
List<String> imageURLs = new ArrayList<String>();
|
||||||
JSONArray photos = json.getJSONArray("photos");
|
JSONArray photos = json.getJSONArray("photos");
|
||||||
for (int i = 0; i < photos.length(); i++) {
|
for (int i = 0; i < photos.length(); i++) {
|
||||||
if (super.isStopped()) {
|
if (super.isStopped()) {
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
JSONObject photo = photos.getJSONObject(i);
|
JSONObject photo = photos.getJSONObject(i);
|
||||||
String imageURL = null;
|
String imageURL = null;
|
||||||
String rawUrl = "https://500px.com" + photo.getString("url");
|
String rawUrl = "https://500px.com" + photo.getString("url");
|
||||||
Document doc;
|
Document doc;
|
||||||
Elements images = new Elements();
|
Elements images = new Elements();
|
||||||
try {
|
try {
|
||||||
logger.debug("Loading " + rawUrl);
|
logger.debug("Loading " + rawUrl);
|
||||||
super.retrievingSource(rawUrl);
|
super.retrievingSource(rawUrl);
|
||||||
doc = Http.url(rawUrl).get();
|
doc = Http.url(rawUrl).get();
|
||||||
images = doc.select("div#preload img");
|
images = doc.select("div#preload img");
|
||||||
}
|
}
|
||||||
catch (IOException e) {
|
catch (IOException e) {
|
||||||
logger.error("Error fetching full-size image from " + rawUrl, e);
|
logger.error("Error fetching full-size image from " + rawUrl, e);
|
||||||
}
|
}
|
||||||
if (images.size() > 0) {
|
if (images.size() > 0) {
|
||||||
imageURL = images.first().attr("src");
|
imageURL = images.first().attr("src");
|
||||||
logger.debug("Found full-size non-watermarked image: " + imageURL);
|
logger.debug("Found full-size non-watermarked image: " + imageURL);
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
logger.debug("Falling back to image_url from API response");
|
logger.debug("Falling back to image_url from API response");
|
||||||
imageURL = photo.getString("image_url");
|
imageURL = photo.getString("image_url");
|
||||||
imageURL = imageURL.replaceAll("/4\\.", "/5.");
|
imageURL = imageURL.replaceAll("/4\\.", "/5.");
|
||||||
// See if there's larger images
|
// See if there's larger images
|
||||||
for (String imageSize : new String[] { "2048" } ) {
|
for (String imageSize : new String[] { "2048" } ) {
|
||||||
String fsURL = imageURL.replaceAll("/5\\.", "/" + imageSize + ".");
|
String fsURL = imageURL.replaceAll("/5\\.", "/" + imageSize + ".");
|
||||||
sleep(10);
|
sleep(10);
|
||||||
if (urlExists(fsURL)) {
|
if (urlExists(fsURL)) {
|
||||||
logger.info("Found larger image at " + fsURL);
|
logger.info("Found larger image at " + fsURL);
|
||||||
imageURL = fsURL;
|
imageURL = fsURL;
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if (imageURL == null) {
|
if (imageURL == null) {
|
||||||
logger.error("Failed to find image for photo " + photo.toString());
|
logger.error("Failed to find image for photo " + photo.toString());
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
imageURLs.add(imageURL);
|
imageURLs.add(imageURL);
|
||||||
if (isThisATest()) {
|
if (isThisATest()) {
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return imageURLs;
|
return imageURLs;
|
||||||
|
@ -246,7 +246,7 @@ public class FuraffinityRipper extends AbstractHTMLRipper {
|
|||||||
if (donwloadLink.size() == 0) {
|
if (donwloadLink.size() == 0) {
|
||||||
logger.warn("Could not download " + this.url);
|
logger.warn("Could not download " + this.url);
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
String link = "http:" + donwloadLink.first().attr("href");
|
String link = "http:" + donwloadLink.first().attr("href");
|
||||||
logger.info("Found URL " + link);
|
logger.info("Found URL " + link);
|
||||||
String[] fileNameSplit = link.split("/");
|
String[] fileNameSplit = link.split("/");
|
||||||
@ -266,7 +266,7 @@ public class FuraffinityRipper extends AbstractHTMLRipper {
|
|||||||
addURLToDownload(new URL(link),saveAS,"",cookies);
|
addURLToDownload(new URL(link),saveAS,"",cookies);
|
||||||
} catch (IOException e) {
|
} catch (IOException e) {
|
||||||
logger.error("[!] Exception while loading/parsing " + this.url, e);
|
logger.error("[!] Exception while loading/parsing " + this.url, e);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
Loading…
Reference in New Issue
Block a user