Merge branch 'formatting': Various code cleanup and formatting.
This commit is contained in:
commit
dc2c100d9d
@ -146,7 +146,7 @@ public class App {
|
|||||||
try {
|
try {
|
||||||
String url;
|
String url;
|
||||||
BufferedReader br = new BufferedReader(new FileReader(filename));
|
BufferedReader br = new BufferedReader(new FileReader(filename));
|
||||||
while((url = br.readLine()) != null) {
|
while ((url = br.readLine()) != null) {
|
||||||
// loop through each url in the file and proces each url individually.
|
// loop through each url in the file and proces each url individually.
|
||||||
ripURL(url.trim(), cl.hasOption("n"));
|
ripURL(url.trim(), cl.hasOption("n"));
|
||||||
}
|
}
|
||||||
@ -171,7 +171,7 @@ public class App {
|
|||||||
if (!history.contains(url.toExternalForm())) {
|
if (!history.contains(url.toExternalForm())) {
|
||||||
history.add(url.toExternalForm());
|
history.add(url.toExternalForm());
|
||||||
Utils.setConfigList("download.history", Arrays.asList(history.toArray()));
|
Utils.setConfigList("download.history", Arrays.asList(history.toArray()));
|
||||||
if(saveConfig) {
|
if (saveConfig) {
|
||||||
Utils.saveConfig();
|
Utils.saveConfig();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -16,7 +16,7 @@ import com.rarchives.ripme.utils.Utils;
|
|||||||
* Simplified ripper, designed for ripping from sites by parsing HTML.
|
* Simplified ripper, designed for ripping from sites by parsing HTML.
|
||||||
*/
|
*/
|
||||||
public abstract class AbstractHTMLRipper extends AlbumRipper {
|
public abstract class AbstractHTMLRipper extends AlbumRipper {
|
||||||
|
|
||||||
public AbstractHTMLRipper(URL url) throws IOException {
|
public AbstractHTMLRipper(URL url) throws IOException {
|
||||||
super(url);
|
super(url);
|
||||||
}
|
}
|
||||||
@ -30,7 +30,7 @@ public abstract class AbstractHTMLRipper extends AlbumRipper {
|
|||||||
}
|
}
|
||||||
public abstract List<String> getURLsFromPage(Document page);
|
public abstract List<String> getURLsFromPage(Document page);
|
||||||
public List<String> getDescriptionsFromPage(Document doc) throws IOException {
|
public List<String> getDescriptionsFromPage(Document doc) throws IOException {
|
||||||
throw new IOException("getDescriptionsFromPage not implemented"); // Do I do this or make an abstract function?
|
throw new IOException("getDescriptionsFromPage not implemented"); // Do I do this or make an abstract function?
|
||||||
}
|
}
|
||||||
public abstract void downloadURL(URL url, int index);
|
public abstract void downloadURL(URL url, int index);
|
||||||
public DownloadThreadPool getThreadPool() {
|
public DownloadThreadPool getThreadPool() {
|
||||||
@ -45,16 +45,16 @@ public abstract class AbstractHTMLRipper extends AlbumRipper {
|
|||||||
public boolean canRip(URL url) {
|
public boolean canRip(URL url) {
|
||||||
return url.getHost().endsWith(getDomain());
|
return url.getHost().endsWith(getDomain());
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public URL sanitizeURL(URL url) throws MalformedURLException {
|
public URL sanitizeURL(URL url) throws MalformedURLException {
|
||||||
return url;
|
return url;
|
||||||
}
|
}
|
||||||
public boolean hasDescriptionSupport() {
|
public boolean hasDescriptionSupport() {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
public String getDescription(String page) throws IOException {
|
public String getDescription(String page) throws IOException {
|
||||||
throw new IOException("getDescription not implemented"); // Do I do this or make an abstract function?
|
throw new IOException("getDescription not implemented"); // Do I do this or make an abstract function?
|
||||||
}
|
}
|
||||||
public int descSleepTime() {
|
public int descSleepTime() {
|
||||||
return 0;
|
return 0;
|
||||||
@ -66,7 +66,7 @@ public abstract class AbstractHTMLRipper extends AlbumRipper {
|
|||||||
logger.info("Retrieving " + this.url);
|
logger.info("Retrieving " + this.url);
|
||||||
sendUpdate(STATUS.LOADING_RESOURCE, this.url.toExternalForm());
|
sendUpdate(STATUS.LOADING_RESOURCE, this.url.toExternalForm());
|
||||||
Document doc = getFirstPage();
|
Document doc = getFirstPage();
|
||||||
|
|
||||||
while (doc != null) {
|
while (doc != null) {
|
||||||
List<String> imageURLs = getURLsFromPage(doc);
|
List<String> imageURLs = getURLsFromPage(doc);
|
||||||
// Remove all but 1 image
|
// Remove all but 1 image
|
||||||
@ -79,7 +79,7 @@ public abstract class AbstractHTMLRipper extends AlbumRipper {
|
|||||||
if (imageURLs.size() == 0) {
|
if (imageURLs.size() == 0) {
|
||||||
throw new IOException("No images found at " + doc.location());
|
throw new IOException("No images found at " + doc.location());
|
||||||
}
|
}
|
||||||
|
|
||||||
for (String imageURL : imageURLs) {
|
for (String imageURL : imageURLs) {
|
||||||
index += 1;
|
index += 1;
|
||||||
logger.debug("Found image url #" + index + ": " + imageURL);
|
logger.debug("Found image url #" + index + ": " + imageURL);
|
||||||
@ -90,23 +90,23 @@ public abstract class AbstractHTMLRipper extends AlbumRipper {
|
|||||||
}
|
}
|
||||||
if (hasDescriptionSupport() && Utils.getConfigBoolean("descriptions.save", false)) {
|
if (hasDescriptionSupport() && Utils.getConfigBoolean("descriptions.save", false)) {
|
||||||
logger.debug("Fetching description(s) from " + doc.location());
|
logger.debug("Fetching description(s) from " + doc.location());
|
||||||
List<String> textURLs = getDescriptionsFromPage(doc);
|
List<String> textURLs = getDescriptionsFromPage(doc);
|
||||||
if (textURLs.size() > 0) {
|
if (textURLs.size() > 0) {
|
||||||
logger.debug("Found description link(s) from " + doc.location());
|
logger.debug("Found description link(s) from " + doc.location());
|
||||||
for (String textURL : textURLs) {
|
for (String textURL : textURLs) {
|
||||||
if (isStopped()) {
|
if (isStopped()) {
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
textindex += 1;
|
textindex += 1;
|
||||||
logger.debug("Getting description from " + textURL);
|
logger.debug("Getting description from " + textURL);
|
||||||
sleep(descSleepTime());
|
sleep(descSleepTime());
|
||||||
String tempDesc = getDescription(textURL);
|
String tempDesc = getDescription(textURL);
|
||||||
if (tempDesc != null) {
|
if (tempDesc != null) {
|
||||||
logger.debug("Got description: " + tempDesc);
|
logger.debug("Got description: " + tempDesc);
|
||||||
saveText(new URL(textURL), "", tempDesc, textindex);
|
saveText(new URL(textURL), "", tempDesc, textindex);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if (isStopped() || isThisATest()) {
|
if (isStopped() || isThisATest()) {
|
||||||
|
@ -195,7 +195,7 @@ public abstract class AbstractRipper
|
|||||||
public void retrievingSource(String url) {
|
public void retrievingSource(String url) {
|
||||||
RipStatusMessage msg = new RipStatusMessage(STATUS.LOADING_RESOURCE, url);
|
RipStatusMessage msg = new RipStatusMessage(STATUS.LOADING_RESOURCE, url);
|
||||||
if (observer != null) {
|
if (observer != null) {
|
||||||
observer.update(this, msg);
|
observer.update(this, msg);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -19,7 +19,7 @@ public class DownloadThreadPool {
|
|||||||
public DownloadThreadPool() {
|
public DownloadThreadPool() {
|
||||||
initialize("Main");
|
initialize("Main");
|
||||||
}
|
}
|
||||||
|
|
||||||
public DownloadThreadPool(String threadPoolName) {
|
public DownloadThreadPool(String threadPoolName) {
|
||||||
initialize(threadPoolName);
|
initialize(threadPoolName);
|
||||||
}
|
}
|
||||||
|
@ -46,12 +46,12 @@ public class BcfakesRipper extends AbstractHTMLRipper {
|
|||||||
+ "http://www.bcfakes.com/celebritylist/name"
|
+ "http://www.bcfakes.com/celebritylist/name"
|
||||||
+ " Got: " + url);
|
+ " Got: " + url);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Document getFirstPage() throws IOException {
|
public Document getFirstPage() throws IOException {
|
||||||
return Http.url(url).get();
|
return Http.url(url).get();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Document getNextPage(Document doc) throws IOException {
|
public Document getNextPage(Document doc) throws IOException {
|
||||||
// Find next page
|
// Find next page
|
||||||
@ -63,7 +63,7 @@ public class BcfakesRipper extends AbstractHTMLRipper {
|
|||||||
sleep(500);
|
sleep(500);
|
||||||
return Http.url(nextUrl).get();
|
return Http.url(nextUrl).get();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public List<String> getURLsFromPage(Document doc) {
|
public List<String> getURLsFromPage(Document doc) {
|
||||||
List<String> imageURLs = new ArrayList<String>();
|
List<String> imageURLs = new ArrayList<String>();
|
||||||
|
@ -158,13 +158,13 @@ public class ChanRipper extends AbstractHTMLRipper {
|
|||||||
Boolean self_hosted = false;
|
Boolean self_hosted = false;
|
||||||
if (!generalChanSite) {
|
if (!generalChanSite) {
|
||||||
for (String cdnDomain : chanSite.cdnDomains) {
|
for (String cdnDomain : chanSite.cdnDomains) {
|
||||||
if (href.contains(cdnDomain)){
|
if (href.contains(cdnDomain)) {
|
||||||
self_hosted = true;
|
self_hosted = true;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if (self_hosted || generalChanSite){
|
if (self_hosted || generalChanSite) {
|
||||||
p = Pattern.compile("^.*\\.(jpg|jpeg|png|gif|apng|webp|tif|tiff|webm)$", Pattern.CASE_INSENSITIVE);
|
p = Pattern.compile("^.*\\.(jpg|jpeg|png|gif|apng|webp|tif|tiff|webm)$", Pattern.CASE_INSENSITIVE);
|
||||||
m = p.matcher(href);
|
m = p.matcher(href);
|
||||||
if (m.matches()) {
|
if (m.matches()) {
|
||||||
@ -194,7 +194,7 @@ public class ChanRipper extends AbstractHTMLRipper {
|
|||||||
}
|
}
|
||||||
|
|
||||||
List<URL> urls = RipUtils.getFilesFromURL(originalURL);
|
List<URL> urls = RipUtils.getFilesFromURL(originalURL);
|
||||||
for(URL imageurl : urls){
|
for (URL imageurl : urls) {
|
||||||
imageURLs.add(imageurl.toString());
|
imageURLs.add(imageurl.toString());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -46,7 +46,7 @@ public class DatwinRipper extends AbstractHTMLRipper {
|
|||||||
public Document getFirstPage() throws IOException {
|
public Document getFirstPage() throws IOException {
|
||||||
return Http.url(url).get();
|
return Http.url(url).get();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public List<String> getURLsFromPage(Document doc) {
|
public List<String> getURLsFromPage(Document doc) {
|
||||||
List<String> imageURLs = new ArrayList<String>();
|
List<String> imageURLs = new ArrayList<String>();
|
||||||
@ -57,7 +57,7 @@ public class DatwinRipper extends AbstractHTMLRipper {
|
|||||||
}
|
}
|
||||||
return imageURLs;
|
return imageURLs;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void downloadURL(URL url, int index) {
|
public void downloadURL(URL url, int index) {
|
||||||
addURLToDownload(url, getPrefix(index));
|
addURLToDownload(url, getPrefix(index));
|
||||||
|
@ -48,7 +48,7 @@ public class DeviantartRipper extends AbstractHTMLRipper {
|
|||||||
}
|
}
|
||||||
@Override
|
@Override
|
||||||
public boolean hasDescriptionSupport() {
|
public boolean hasDescriptionSupport() {
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
@Override
|
@Override
|
||||||
public URL sanitizeURL(URL url) throws MalformedURLException {
|
public URL sanitizeURL(URL url) throws MalformedURLException {
|
||||||
@ -239,7 +239,7 @@ public class DeviantartRipper extends AbstractHTMLRipper {
|
|||||||
}
|
}
|
||||||
return result.toString();
|
return result.toString();
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Attempts to download description for image.
|
* Attempts to download description for image.
|
||||||
* Comes in handy when people put entire stories in their description.
|
* Comes in handy when people put entire stories in their description.
|
||||||
@ -277,7 +277,7 @@ public class DeviantartRipper extends AbstractHTMLRipper {
|
|||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* If largest resolution for image at 'thumb' is found, starts downloading
|
* If largest resolution for image at 'thumb' is found, starts downloading
|
||||||
* and returns null.
|
* and returns null.
|
||||||
|
@ -1,4 +1,3 @@
|
|||||||
|
|
||||||
package com.rarchives.ripme.ripper.rippers;
|
package com.rarchives.ripme.ripper.rippers;
|
||||||
|
|
||||||
import com.rarchives.ripme.ripper.AbstractHTMLRipper;
|
import com.rarchives.ripme.ripper.AbstractHTMLRipper;
|
||||||
@ -8,174 +7,165 @@ import com.rarchives.ripme.utils.Utils;
|
|||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.io.UnsupportedEncodingException;
|
import java.io.UnsupportedEncodingException;
|
||||||
import java.net.MalformedURLException;
|
import java.net.MalformedURLException;
|
||||||
import java.net.URI;
|
|
||||||
import java.net.URISyntaxException;
|
|
||||||
import java.net.URL;
|
import java.net.URL;
|
||||||
import java.net.URLDecoder;
|
import java.net.URLDecoder;
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.logging.Level;
|
import java.util.logging.Level;
|
||||||
import java.util.logging.Logger;
|
import java.util.logging.Logger;
|
||||||
import java.util.regex.Matcher;
|
|
||||||
import java.util.regex.Pattern;
|
|
||||||
import org.jsoup.nodes.Document;
|
import org.jsoup.nodes.Document;
|
||||||
import org.jsoup.nodes.Element;
|
import org.jsoup.nodes.Element;
|
||||||
import org.jsoup.select.Elements;
|
import org.jsoup.select.Elements;
|
||||||
|
|
||||||
/**
|
|
||||||
*
|
|
||||||
* @author
|
|
||||||
*/
|
|
||||||
public class E621Ripper extends AbstractHTMLRipper {
|
public class E621Ripper extends AbstractHTMLRipper {
|
||||||
public static final int POOL_IMAGES_PER_PAGE = 24;
|
public static final int POOL_IMAGES_PER_PAGE = 24;
|
||||||
|
|
||||||
private DownloadThreadPool e621ThreadPool = new DownloadThreadPool("e621");
|
private DownloadThreadPool e621ThreadPool = new DownloadThreadPool("e621");
|
||||||
|
|
||||||
public E621Ripper(URL url) throws IOException {
|
public E621Ripper(URL url) throws IOException {
|
||||||
super(url);
|
super(url);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public DownloadThreadPool getThreadPool() {
|
public DownloadThreadPool getThreadPool() {
|
||||||
return e621ThreadPool;
|
return e621ThreadPool;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public String getDomain() {
|
public String getDomain() {
|
||||||
return "e621.net";
|
return "e621.net";
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public String getHost() {
|
public String getHost() {
|
||||||
return "e621";
|
return "e621";
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Document getFirstPage() throws IOException {
|
public Document getFirstPage() throws IOException {
|
||||||
if (url.getPath().startsWith("/pool/show/")) {
|
if (url.getPath().startsWith("/pool/show/")) {
|
||||||
return Http.url("https://e621.net/pool/show/" + getTerm(url)).get();
|
return Http.url("https://e621.net/pool/show/" + getTerm(url)).get();
|
||||||
} else {
|
} else {
|
||||||
return Http.url("https://e621.net/post/index/1/" + getTerm(url)).get();
|
return Http.url("https://e621.net/post/index/1/" + getTerm(url)).get();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public List<String> getURLsFromPage(Document page) {
|
public List<String> getURLsFromPage(Document page) {
|
||||||
Elements elements = page.select("#post-list .thumb a,#pool-show .thumb a");
|
Elements elements = page.select("#post-list .thumb a,#pool-show .thumb a");
|
||||||
List<String> res = new ArrayList<String>(elements.size());
|
List<String> res = new ArrayList<String>(elements.size());
|
||||||
|
|
||||||
if (page.getElementById("pool-show") != null) {
|
if (page.getElementById("pool-show") != null) {
|
||||||
int index = 0;
|
int index = 0;
|
||||||
|
|
||||||
Element e = page.getElementById("paginator");
|
Element e = page.getElementById("paginator");
|
||||||
if (e != null) {
|
if (e != null) {
|
||||||
e = e.getElementsByClass("current").first();
|
e = e.getElementsByClass("current").first();
|
||||||
if (e != null) {
|
if (e != null) {
|
||||||
index = (Integer.parseInt(e.text()) - 1) * POOL_IMAGES_PER_PAGE;
|
index = (Integer.parseInt(e.text()) - 1) * POOL_IMAGES_PER_PAGE;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
for (Element e_ : elements) {
|
for (Element e_ : elements) {
|
||||||
res.add(e_.absUrl("href") + "#" + ++index);
|
res.add(e_.absUrl("href") + "#" + ++index);
|
||||||
}
|
}
|
||||||
|
|
||||||
} else {
|
} else {
|
||||||
for (Element e : elements) {
|
for (Element e : elements) {
|
||||||
res.add(e.absUrl("href") + "#" + e.child(0).attr("id").substring(1));
|
res.add(e.absUrl("href") + "#" + e.child(0).attr("id").substring(1));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return res;
|
return res;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Document getNextPage(Document page) throws IOException {
|
public Document getNextPage(Document page) throws IOException {
|
||||||
for (Element e : page.select("#paginator a")) {
|
for (Element e : page.select("#paginator a")) {
|
||||||
if (e.attr("rel").equals("next")) {
|
if (e.attr("rel").equals("next")) {
|
||||||
return Http.url(e.absUrl("href")).get();
|
return Http.url(e.absUrl("href")).get();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void downloadURL(final URL url, int index) {
|
public void downloadURL(final URL url, int index) {
|
||||||
e621ThreadPool.addThread(new Thread(new Runnable() {
|
e621ThreadPool.addThread(new Thread(new Runnable() {
|
||||||
public void run() {
|
public void run() {
|
||||||
try {
|
try {
|
||||||
Document page = Http.url(url).get();
|
Document page = Http.url(url).get();
|
||||||
Element e = page.getElementById("image");
|
Element e = page.getElementById("image");
|
||||||
|
|
||||||
if (e != null) {
|
if (e != null) {
|
||||||
addURLToDownload(new URL(e.absUrl("src")), Utils.getConfigBoolean("download.save_order", true) ? url.getRef() + "-" : "");
|
addURLToDownload(new URL(e.absUrl("src")), Utils.getConfigBoolean("download.save_order", true) ? url.getRef() + "-" : "");
|
||||||
} else if ((e = page.select(".content object>param[name=\"movie\"]").first()) != null) {
|
} else if ((e = page.select(".content object>param[name=\"movie\"]").first()) != null) {
|
||||||
addURLToDownload(new URL(e.absUrl("value")), Utils.getConfigBoolean("download.save_order", true) ? url.getRef() + "-" : "");
|
addURLToDownload(new URL(e.absUrl("value")), Utils.getConfigBoolean("download.save_order", true) ? url.getRef() + "-" : "");
|
||||||
} else {
|
} else {
|
||||||
Logger.getLogger(E621Ripper.class.getName()).log(Level.WARNING, "Unsupported media type - please report to program author: " + url.toString());
|
Logger.getLogger(E621Ripper.class.getName()).log(Level.WARNING, "Unsupported media type - please report to program author: " + url.toString());
|
||||||
}
|
}
|
||||||
|
|
||||||
} catch (IOException ex) {
|
} catch (IOException ex) {
|
||||||
Logger.getLogger(E621Ripper.class.getName()).log(Level.SEVERE, null, ex);
|
Logger.getLogger(E621Ripper.class.getName()).log(Level.SEVERE, null, ex);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}));
|
}));
|
||||||
}
|
}
|
||||||
|
|
||||||
private String getTerm(URL url) throws MalformedURLException {
|
private String getTerm(URL url) throws MalformedURLException {
|
||||||
String query = url.getQuery();
|
String query = url.getQuery();
|
||||||
|
|
||||||
if (query != null) {
|
if (query != null) {
|
||||||
return Utils.parseUrlQuery(query, "tags");
|
return Utils.parseUrlQuery(query, "tags");
|
||||||
}
|
}
|
||||||
|
|
||||||
if (query == null) {
|
if (query == null) {
|
||||||
if ((query = url.getPath()).startsWith("/post/index/")) {
|
if ((query = url.getPath()).startsWith("/post/index/")) {
|
||||||
query = query.substring(12);
|
query = query.substring(12);
|
||||||
|
|
||||||
int pos = query.indexOf('/');
|
int pos = query.indexOf('/');
|
||||||
if (pos == -1) {
|
if (pos == -1) {
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
|
||||||
// skip page number
|
// skip page number
|
||||||
query = query.substring(pos + 1);
|
query = query.substring(pos + 1);
|
||||||
|
|
||||||
if (query.endsWith("/")) {
|
if (query.endsWith("/")) {
|
||||||
query = query.substring(0, query.length() - 1);
|
query = query.substring(0, query.length() - 1);
|
||||||
}
|
}
|
||||||
|
|
||||||
try {
|
try {
|
||||||
return URLDecoder.decode(query, "UTF-8");
|
return URLDecoder.decode(query, "UTF-8");
|
||||||
} catch (UnsupportedEncodingException e) {
|
} catch (UnsupportedEncodingException e) {
|
||||||
// Shouldn't happen since UTF-8 is required to be supported
|
// Shouldn't happen since UTF-8 is required to be supported
|
||||||
throw new RuntimeException(e);
|
throw new RuntimeException(e);
|
||||||
}
|
}
|
||||||
|
|
||||||
} else if (query.startsWith("/pool/show/")) {
|
} else if (query.startsWith("/pool/show/")) {
|
||||||
query = query.substring(11);
|
query = query.substring(11);
|
||||||
|
|
||||||
if (query.endsWith("/")) {
|
if (query.endsWith("/")) {
|
||||||
query = query.substring(0, query.length() - 1);
|
query = query.substring(0, query.length() - 1);
|
||||||
}
|
}
|
||||||
|
|
||||||
return query;
|
return query;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public String getGID(URL url) throws MalformedURLException {
|
public String getGID(URL url) throws MalformedURLException {
|
||||||
String prefix = "";
|
String prefix = "";
|
||||||
if (url.getPath().startsWith("/pool/show/")) {
|
if (url.getPath().startsWith("/pool/show/")) {
|
||||||
prefix = "pool_";
|
prefix = "pool_";
|
||||||
} else {
|
} else {
|
||||||
prefix = "term_";
|
prefix = "term_";
|
||||||
}
|
}
|
||||||
|
|
||||||
return Utils.filesystemSafe(prefix + getTerm(url));
|
return Utils.filesystemSafe(prefix + getTerm(url));
|
||||||
}
|
}
|
||||||
|
}
|
||||||
}
|
|
||||||
|
@ -9,7 +9,6 @@ import java.io.IOException;
|
|||||||
import java.net.MalformedURLException;
|
import java.net.MalformedURLException;
|
||||||
import java.net.URL;
|
import java.net.URL;
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.Arrays;
|
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.regex.Matcher;
|
import java.util.regex.Matcher;
|
||||||
import java.util.regex.Pattern;
|
import java.util.regex.Pattern;
|
||||||
@ -18,10 +17,8 @@ import org.jsoup.Connection.Response;
|
|||||||
import org.jsoup.nodes.Document;
|
import org.jsoup.nodes.Document;
|
||||||
import org.jsoup.nodes.Element;
|
import org.jsoup.nodes.Element;
|
||||||
import org.jsoup.select.Elements;
|
import org.jsoup.select.Elements;
|
||||||
import org.jsoup.Connection.Method;
|
|
||||||
|
|
||||||
import com.rarchives.ripme.ripper.AbstractHTMLRipper;
|
import com.rarchives.ripme.ripper.AbstractHTMLRipper;
|
||||||
import com.rarchives.ripme.ui.RipStatusMessage.STATUS;
|
|
||||||
import com.rarchives.ripme.utils.Http;
|
import com.rarchives.ripme.utils.Http;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@ -45,7 +42,7 @@ public class EroShareRipper extends AbstractHTMLRipper {
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void downloadURL(URL url, int index){
|
public void downloadURL(URL url, int index) {
|
||||||
addURLToDownload(url);
|
addURLToDownload(url);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -66,12 +63,12 @@ public class EroShareRipper extends AbstractHTMLRipper {
|
|||||||
|
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public List<String> getURLsFromPage(Document doc){
|
public List<String> getURLsFromPage(Document doc) {
|
||||||
List<String> URLs = new ArrayList<String>();
|
List<String> URLs = new ArrayList<String>();
|
||||||
//Pictures
|
//Pictures
|
||||||
Elements imgs = doc.getElementsByTag("img");
|
Elements imgs = doc.getElementsByTag("img");
|
||||||
for (Element img : imgs){
|
for (Element img : imgs) {
|
||||||
if (img.hasClass("album-image")){
|
if (img.hasClass("album-image")) {
|
||||||
String imageURL = img.attr("src");
|
String imageURL = img.attr("src");
|
||||||
imageURL = "https:" + imageURL;
|
imageURL = "https:" + imageURL;
|
||||||
URLs.add(imageURL);
|
URLs.add(imageURL);
|
||||||
@ -79,8 +76,8 @@ public class EroShareRipper extends AbstractHTMLRipper {
|
|||||||
}
|
}
|
||||||
//Videos
|
//Videos
|
||||||
Elements vids = doc.getElementsByTag("video");
|
Elements vids = doc.getElementsByTag("video");
|
||||||
for (Element vid : vids){
|
for (Element vid : vids) {
|
||||||
if (vid.hasClass("album-video")){
|
if (vid.hasClass("album-video")) {
|
||||||
Elements source = vid.getElementsByTag("source");
|
Elements source = vid.getElementsByTag("source");
|
||||||
String videoURL = source.first().attr("src");
|
String videoURL = source.first().attr("src");
|
||||||
URLs.add(videoURL);
|
URLs.add(videoURL);
|
||||||
@ -122,8 +119,8 @@ public class EroShareRipper extends AbstractHTMLRipper {
|
|||||||
List<URL> URLs = new ArrayList<URL>();
|
List<URL> URLs = new ArrayList<URL>();
|
||||||
//Pictures
|
//Pictures
|
||||||
Elements imgs = doc.getElementsByTag("img");
|
Elements imgs = doc.getElementsByTag("img");
|
||||||
for (Element img : imgs){
|
for (Element img : imgs) {
|
||||||
if (img.hasClass("album-image")){
|
if (img.hasClass("album-image")) {
|
||||||
String imageURL = img.attr("src");
|
String imageURL = img.attr("src");
|
||||||
imageURL = "https:" + imageURL;
|
imageURL = "https:" + imageURL;
|
||||||
URLs.add(new URL(imageURL));
|
URLs.add(new URL(imageURL));
|
||||||
@ -131,8 +128,8 @@ public class EroShareRipper extends AbstractHTMLRipper {
|
|||||||
}
|
}
|
||||||
//Videos
|
//Videos
|
||||||
Elements vids = doc.getElementsByTag("video");
|
Elements vids = doc.getElementsByTag("video");
|
||||||
for (Element vid : vids){
|
for (Element vid : vids) {
|
||||||
if (vid.hasClass("album-video")){
|
if (vid.hasClass("album-video")) {
|
||||||
Elements source = vid.getElementsByTag("source");
|
Elements source = vid.getElementsByTag("source");
|
||||||
String videoURL = source.first().attr("src");
|
String videoURL = source.first().attr("src");
|
||||||
URLs.add(new URL(videoURL));
|
URLs.add(new URL(videoURL));
|
||||||
|
@ -1,101 +1,101 @@
|
|||||||
package com.rarchives.ripme.ripper.rippers;
|
package com.rarchives.ripme.ripper.rippers;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.net.MalformedURLException;
|
import java.net.MalformedURLException;
|
||||||
import java.net.URL;
|
import java.net.URL;
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.regex.Matcher;
|
import java.util.regex.Matcher;
|
||||||
import java.util.regex.Pattern;
|
import java.util.regex.Pattern;
|
||||||
|
|
||||||
import org.jsoup.nodes.Document;
|
import org.jsoup.nodes.Document;
|
||||||
import org.jsoup.nodes.Element;
|
import org.jsoup.nodes.Element;
|
||||||
|
|
||||||
import com.rarchives.ripme.ripper.AbstractHTMLRipper;
|
import com.rarchives.ripme.ripper.AbstractHTMLRipper;
|
||||||
import com.rarchives.ripme.utils.Http;
|
import com.rarchives.ripme.utils.Http;
|
||||||
|
|
||||||
public class FapprovedRipper extends AbstractHTMLRipper {
|
public class FapprovedRipper extends AbstractHTMLRipper {
|
||||||
|
|
||||||
private int pageIndex = 1;
|
private int pageIndex = 1;
|
||||||
private String username = null;
|
private String username = null;
|
||||||
|
|
||||||
public FapprovedRipper(URL url) throws IOException {
|
public FapprovedRipper(URL url) throws IOException {
|
||||||
super(url);
|
super(url);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public String getHost() {
|
public String getHost() {
|
||||||
return "fapproved";
|
return "fapproved";
|
||||||
}
|
}
|
||||||
@Override
|
@Override
|
||||||
public String getDomain() {
|
public String getDomain() {
|
||||||
return "fapproved.com";
|
return "fapproved.com";
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public String getGID(URL url) throws MalformedURLException {
|
public String getGID(URL url) throws MalformedURLException {
|
||||||
Pattern p = Pattern.compile("^https?://[w.]*fapproved.com/users/([a-zA-Z0-9\\-_]{3,}).*$");
|
Pattern p = Pattern.compile("^https?://[w.]*fapproved.com/users/([a-zA-Z0-9\\-_]{3,}).*$");
|
||||||
Matcher m = p.matcher(url.toExternalForm());
|
Matcher m = p.matcher(url.toExternalForm());
|
||||||
if (m.matches()) {
|
if (m.matches()) {
|
||||||
username = m.group(1);
|
username = m.group(1);
|
||||||
return username;
|
return username;
|
||||||
}
|
}
|
||||||
throw new MalformedURLException("Fapproved user not found in " + url + ", expected http://fapproved.com/users/username/images");
|
throw new MalformedURLException("Fapproved user not found in " + url + ", expected http://fapproved.com/users/username/images");
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public URL sanitizeURL(URL url) throws MalformedURLException {
|
public URL sanitizeURL(URL url) throws MalformedURLException {
|
||||||
return new URL("http://fapproved.com/users/" + getGID(url));
|
return new URL("http://fapproved.com/users/" + getGID(url));
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Document getFirstPage() throws IOException {
|
public Document getFirstPage() throws IOException {
|
||||||
pageIndex = 1;
|
pageIndex = 1;
|
||||||
String pageURL = getPageURL(pageIndex);
|
String pageURL = getPageURL(pageIndex);
|
||||||
return Http.url(pageURL)
|
return Http.url(pageURL)
|
||||||
.ignoreContentType()
|
.ignoreContentType()
|
||||||
.get();
|
.get();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Document getNextPage(Document doc) throws IOException {
|
public Document getNextPage(Document doc) throws IOException {
|
||||||
if ( (doc.select("div.pagination li.next.disabled").size() != 0)
|
if ((doc.select("div.pagination li.next.disabled").size() != 0)
|
||||||
|| (doc.select("div.pagination").size() == 0) ) {
|
|| (doc.select("div.pagination").size() == 0)) {
|
||||||
throw new IOException("No more pages found");
|
throw new IOException("No more pages found");
|
||||||
}
|
}
|
||||||
sleep(1000);
|
sleep(1000);
|
||||||
pageIndex++;
|
pageIndex++;
|
||||||
String pageURL = getPageURL(pageIndex);
|
String pageURL = getPageURL(pageIndex);
|
||||||
return Http.url(pageURL)
|
return Http.url(pageURL)
|
||||||
.ignoreContentType()
|
.ignoreContentType()
|
||||||
.get();
|
.get();
|
||||||
}
|
}
|
||||||
|
|
||||||
private String getPageURL(int index) throws IOException {
|
private String getPageURL(int index) throws IOException {
|
||||||
if (username == null) {
|
if (username == null) {
|
||||||
username = getGID(this.url);
|
username = getGID(this.url);
|
||||||
}
|
}
|
||||||
return "http://fapproved.com/users/" + username + "/images?page=" + pageIndex;
|
return "http://fapproved.com/users/" + username + "/images?page=" + pageIndex;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public List<String> getURLsFromPage(Document page) {
|
public List<String> getURLsFromPage(Document page) {
|
||||||
List<String> imageURLs = new ArrayList<String>();
|
List<String> imageURLs = new ArrayList<String>();
|
||||||
for (Element image : page.select("div.actual-image img")) {
|
for (Element image : page.select("div.actual-image img")) {
|
||||||
String imageURL = image.attr("src");
|
String imageURL = image.attr("src");
|
||||||
if (imageURL.startsWith("//")) {
|
if (imageURL.startsWith("//")) {
|
||||||
imageURL = "http:" + imageURL;
|
imageURL = "http:" + imageURL;
|
||||||
}
|
}
|
||||||
else if (imageURL.startsWith("/")) {
|
else if (imageURL.startsWith("/")) {
|
||||||
imageURL = "http://fapproved.com" + imageURL;
|
imageURL = "http://fapproved.com" + imageURL;
|
||||||
}
|
}
|
||||||
imageURLs.add(imageURL);
|
imageURLs.add(imageURL);
|
||||||
}
|
}
|
||||||
return imageURLs;
|
return imageURLs;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void downloadURL(URL url, int index) {
|
public void downloadURL(URL url, int index) {
|
||||||
addURLToDownload(url, getPrefix(index));
|
addURLToDownload(url, getPrefix(index));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -155,7 +155,7 @@ public class FivehundredpxRipper extends AbstractJSONRipper {
|
|||||||
private String getUserID(String username) throws IOException {
|
private String getUserID(String username) throws IOException {
|
||||||
logger.info("Fetching user ID for " + username);
|
logger.info("Fetching user ID for " + username);
|
||||||
JSONObject json = new Http("https://api.500px.com/v1/" +
|
JSONObject json = new Http("https://api.500px.com/v1/" +
|
||||||
"users/show" +
|
"users/show" +
|
||||||
"?username=" + username +
|
"?username=" + username +
|
||||||
"&consumer_key=" + CONSUMER_KEY)
|
"&consumer_key=" + CONSUMER_KEY)
|
||||||
.getJSON();
|
.getJSON();
|
||||||
@ -259,55 +259,55 @@ public class FivehundredpxRipper extends AbstractJSONRipper {
|
|||||||
List<String> imageURLs = new ArrayList<String>();
|
List<String> imageURLs = new ArrayList<String>();
|
||||||
JSONArray photos = json.getJSONArray("photos");
|
JSONArray photos = json.getJSONArray("photos");
|
||||||
for (int i = 0; i < photos.length(); i++) {
|
for (int i = 0; i < photos.length(); i++) {
|
||||||
if (super.isStopped()) {
|
if (super.isStopped()) {
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
JSONObject photo = photos.getJSONObject(i);
|
JSONObject photo = photos.getJSONObject(i);
|
||||||
String imageURL = null;
|
String imageURL = null;
|
||||||
String rawUrl = "https://500px.com" + photo.getString("url");
|
String rawUrl = "https://500px.com" + photo.getString("url");
|
||||||
Document doc;
|
Document doc;
|
||||||
Elements images = new Elements();
|
Elements images = new Elements();
|
||||||
try {
|
try {
|
||||||
logger.debug("Loading " + rawUrl);
|
logger.debug("Loading " + rawUrl);
|
||||||
super.retrievingSource(rawUrl);
|
super.retrievingSource(rawUrl);
|
||||||
doc = Http.url(rawUrl).get();
|
doc = Http.url(rawUrl).get();
|
||||||
images = doc.select("div#preload img");
|
images = doc.select("div#preload img");
|
||||||
}
|
}
|
||||||
catch (IOException e) {
|
catch (IOException e) {
|
||||||
logger.error("Error fetching full-size image from " + rawUrl, e);
|
logger.error("Error fetching full-size image from " + rawUrl, e);
|
||||||
}
|
}
|
||||||
if (images.size() > 0) {
|
if (images.size() > 0) {
|
||||||
imageURL = images.first().attr("src");
|
imageURL = images.first().attr("src");
|
||||||
logger.debug("Found full-size non-watermarked image: " + imageURL);
|
logger.debug("Found full-size non-watermarked image: " + imageURL);
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
logger.debug("Falling back to image_url from API response");
|
logger.debug("Falling back to image_url from API response");
|
||||||
imageURL = photo.getString("image_url");
|
imageURL = photo.getString("image_url");
|
||||||
imageURL = imageURL.replaceAll("/4\\.", "/5.");
|
imageURL = imageURL.replaceAll("/4\\.", "/5.");
|
||||||
// See if there's larger images
|
// See if there's larger images
|
||||||
for (String imageSize : new String[] { "2048" } ) {
|
for (String imageSize : new String[] { "2048" } ) {
|
||||||
String fsURL = imageURL.replaceAll("/5\\.", "/" + imageSize + ".");
|
String fsURL = imageURL.replaceAll("/5\\.", "/" + imageSize + ".");
|
||||||
sleep(10);
|
sleep(10);
|
||||||
if (urlExists(fsURL)) {
|
if (urlExists(fsURL)) {
|
||||||
logger.info("Found larger image at " + fsURL);
|
logger.info("Found larger image at " + fsURL);
|
||||||
imageURL = fsURL;
|
imageURL = fsURL;
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if (imageURL == null) {
|
if (imageURL == null) {
|
||||||
logger.error("Failed to find image for photo " + photo.toString());
|
logger.error("Failed to find image for photo " + photo.toString());
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
imageURLs.add(imageURL);
|
imageURLs.add(imageURL);
|
||||||
if (isThisATest()) {
|
if (isThisATest()) {
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return imageURLs;
|
return imageURLs;
|
||||||
}
|
}
|
||||||
|
|
||||||
private boolean urlExists(String url) {
|
private boolean urlExists(String url) {
|
||||||
try {
|
try {
|
||||||
HttpURLConnection connection = (HttpURLConnection) new URL(url).openConnection();
|
HttpURLConnection connection = (HttpURLConnection) new URL(url).openConnection();
|
||||||
|
@ -246,7 +246,7 @@ public class FuraffinityRipper extends AbstractHTMLRipper {
|
|||||||
if (donwloadLink.size() == 0) {
|
if (donwloadLink.size() == 0) {
|
||||||
logger.warn("Could not download " + this.url);
|
logger.warn("Could not download " + this.url);
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
String link = "http:" + donwloadLink.first().attr("href");
|
String link = "http:" + donwloadLink.first().attr("href");
|
||||||
logger.info("Found URL " + link);
|
logger.info("Found URL " + link);
|
||||||
String[] fileNameSplit = link.split("/");
|
String[] fileNameSplit = link.split("/");
|
||||||
@ -266,7 +266,7 @@ public class FuraffinityRipper extends AbstractHTMLRipper {
|
|||||||
addURLToDownload(new URL(link),saveAS,"",cookies);
|
addURLToDownload(new URL(link),saveAS,"",cookies);
|
||||||
} catch (IOException e) {
|
} catch (IOException e) {
|
||||||
logger.error("[!] Exception while loading/parsing " + this.url, e);
|
logger.error("[!] Exception while loading/parsing " + this.url, e);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -69,7 +69,7 @@ public class GirlsOfDesireRipper extends AbstractHTMLRipper {
|
|||||||
}
|
}
|
||||||
return albumDoc;
|
return albumDoc;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public List<String> getURLsFromPage(Document doc) {
|
public List<String> getURLsFromPage(Document doc) {
|
||||||
List<String> imageURLs = new ArrayList<String>();
|
List<String> imageURLs = new ArrayList<String>();
|
||||||
@ -83,7 +83,7 @@ public class GirlsOfDesireRipper extends AbstractHTMLRipper {
|
|||||||
}
|
}
|
||||||
return imageURLs;
|
return imageURLs;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void downloadURL(URL url, int index) {
|
public void downloadURL(URL url, int index) {
|
||||||
// Send referrer when downloading images
|
// Send referrer when downloading images
|
||||||
|
@ -69,12 +69,12 @@ public class ImagearnRipper extends AbstractHTMLRipper {
|
|||||||
}
|
}
|
||||||
throw new IOException("Failed to find gallery at URL " + url);
|
throw new IOException("Failed to find gallery at URL " + url);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Document getFirstPage() throws IOException {
|
public Document getFirstPage() throws IOException {
|
||||||
return Http.url(url).get();
|
return Http.url(url).get();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public List<String> getURLsFromPage(Document doc) {
|
public List<String> getURLsFromPage(Document doc) {
|
||||||
List<String> imageURLs = new ArrayList<String>();
|
List<String> imageURLs = new ArrayList<String>();
|
||||||
@ -85,7 +85,7 @@ public class ImagearnRipper extends AbstractHTMLRipper {
|
|||||||
}
|
}
|
||||||
return imageURLs;
|
return imageURLs;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void downloadURL(URL url, int index) {
|
public void downloadURL(URL url, int index) {
|
||||||
addURLToDownload(url, getPrefix(index));
|
addURLToDownload(url, getPrefix(index));
|
||||||
|
@ -58,7 +58,7 @@ public class ImagebamRipper extends AbstractHTMLRipper {
|
|||||||
+ "http://www.imagebam.com/gallery/galleryid"
|
+ "http://www.imagebam.com/gallery/galleryid"
|
||||||
+ " Got: " + url);
|
+ " Got: " + url);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Document getFirstPage() throws IOException {
|
public Document getFirstPage() throws IOException {
|
||||||
if (albumDoc == null) {
|
if (albumDoc == null) {
|
||||||
@ -66,7 +66,7 @@ public class ImagebamRipper extends AbstractHTMLRipper {
|
|||||||
}
|
}
|
||||||
return albumDoc;
|
return albumDoc;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Document getNextPage(Document doc) throws IOException {
|
public Document getNextPage(Document doc) throws IOException {
|
||||||
// Find next page
|
// Find next page
|
||||||
@ -78,7 +78,7 @@ public class ImagebamRipper extends AbstractHTMLRipper {
|
|||||||
sleep(500);
|
sleep(500);
|
||||||
return Http.url(nextUrl).get();
|
return Http.url(nextUrl).get();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public List<String> getURLsFromPage(Document doc) {
|
public List<String> getURLsFromPage(Document doc) {
|
||||||
List<String> imageURLs = new ArrayList<String>();
|
List<String> imageURLs = new ArrayList<String>();
|
||||||
@ -117,7 +117,7 @@ public class ImagebamRipper extends AbstractHTMLRipper {
|
|||||||
|
|
||||||
/**
|
/**
|
||||||
* Helper class to find and download images found on "image" pages
|
* Helper class to find and download images found on "image" pages
|
||||||
*
|
*
|
||||||
* Handles case when site has IP-banned the user.
|
* Handles case when site has IP-banned the user.
|
||||||
*/
|
*/
|
||||||
private class ImagebamImageThread extends Thread {
|
private class ImagebamImageThread extends Thread {
|
||||||
@ -134,7 +134,7 @@ public class ImagebamRipper extends AbstractHTMLRipper {
|
|||||||
public void run() {
|
public void run() {
|
||||||
fetchImage();
|
fetchImage();
|
||||||
}
|
}
|
||||||
|
|
||||||
private void fetchImage() {
|
private void fetchImage() {
|
||||||
try {
|
try {
|
||||||
Document doc = Http.url(url).get();
|
Document doc = Http.url(url).get();
|
||||||
|
@ -93,7 +93,7 @@ public class ImagefapRipper extends AbstractHTMLRipper {
|
|||||||
+ "imagefap.com/pictures/####..."
|
+ "imagefap.com/pictures/####..."
|
||||||
+ " Got: " + url);
|
+ " Got: " + url);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Document getFirstPage() throws IOException {
|
public Document getFirstPage() throws IOException {
|
||||||
if (albumDoc == null) {
|
if (albumDoc == null) {
|
||||||
@ -101,7 +101,7 @@ public class ImagefapRipper extends AbstractHTMLRipper {
|
|||||||
}
|
}
|
||||||
return albumDoc;
|
return albumDoc;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Document getNextPage(Document doc) throws IOException {
|
public Document getNextPage(Document doc) throws IOException {
|
||||||
String nextURL = null;
|
String nextURL = null;
|
||||||
@ -117,7 +117,7 @@ public class ImagefapRipper extends AbstractHTMLRipper {
|
|||||||
sleep(1000);
|
sleep(1000);
|
||||||
return Http.url(nextURL).get();
|
return Http.url(nextURL).get();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public List<String> getURLsFromPage(Document doc) {
|
public List<String> getURLsFromPage(Document doc) {
|
||||||
List<String> imageURLs = new ArrayList<String>();
|
List<String> imageURLs = new ArrayList<String>();
|
||||||
@ -137,7 +137,7 @@ public class ImagefapRipper extends AbstractHTMLRipper {
|
|||||||
}
|
}
|
||||||
return imageURLs;
|
return imageURLs;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void downloadURL(URL url, int index) {
|
public void downloadURL(URL url, int index) {
|
||||||
// Send referrer for image downloads
|
// Send referrer for image downloads
|
||||||
|
@ -55,12 +55,12 @@ public class ImagevenueRipper extends AbstractHTMLRipper {
|
|||||||
+ "http://...imagevenue.com/galshow.php?gal=gallery_...."
|
+ "http://...imagevenue.com/galshow.php?gal=gallery_...."
|
||||||
+ " Got: " + url);
|
+ " Got: " + url);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Document getFirstPage() throws IOException {
|
public Document getFirstPage() throws IOException {
|
||||||
return Http.url(url).get();
|
return Http.url(url).get();
|
||||||
}
|
}
|
||||||
|
|
||||||
public List<String> getURLsFromPage(Document doc) {
|
public List<String> getURLsFromPage(Document doc) {
|
||||||
List<String> imageURLs = new ArrayList<String>();
|
List<String> imageURLs = new ArrayList<String>();
|
||||||
for (Element thumb : doc.select("a[target=_blank]")) {
|
for (Element thumb : doc.select("a[target=_blank]")) {
|
||||||
@ -68,7 +68,7 @@ public class ImagevenueRipper extends AbstractHTMLRipper {
|
|||||||
}
|
}
|
||||||
return imageURLs;
|
return imageURLs;
|
||||||
}
|
}
|
||||||
|
|
||||||
public void downloadURL(URL url, int index) {
|
public void downloadURL(URL url, int index) {
|
||||||
ImagevenueImageThread t = new ImagevenueImageThread(url, index);
|
ImagevenueImageThread t = new ImagevenueImageThread(url, index);
|
||||||
imagevenueThreadPool.addThread(t);
|
imagevenueThreadPool.addThread(t);
|
||||||
@ -76,7 +76,7 @@ public class ImagevenueRipper extends AbstractHTMLRipper {
|
|||||||
|
|
||||||
/**
|
/**
|
||||||
* Helper class to find and download images found on "image" pages
|
* Helper class to find and download images found on "image" pages
|
||||||
*
|
*
|
||||||
* Handles case when site has IP-banned the user.
|
* Handles case when site has IP-banned the user.
|
||||||
*/
|
*/
|
||||||
private class ImagevenueImageThread extends Thread {
|
private class ImagevenueImageThread extends Thread {
|
||||||
@ -93,7 +93,7 @@ public class ImagevenueRipper extends AbstractHTMLRipper {
|
|||||||
public void run() {
|
public void run() {
|
||||||
fetchImage();
|
fetchImage();
|
||||||
}
|
}
|
||||||
|
|
||||||
private void fetchImage() {
|
private void fetchImage() {
|
||||||
try {
|
try {
|
||||||
Document doc = Http.url(url)
|
Document doc = Http.url(url)
|
||||||
|
@ -39,7 +39,7 @@ public class ImgboxRipper extends AbstractHTMLRipper {
|
|||||||
throw new MalformedURLException("Expected imgbox.com URL format: " +
|
throw new MalformedURLException("Expected imgbox.com URL format: " +
|
||||||
"imgbox.com/g/albumid - got " + url + "instead");
|
"imgbox.com/g/albumid - got " + url + "instead");
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Document getFirstPage() throws IOException {
|
public Document getFirstPage() throws IOException {
|
||||||
return Http.url(url).get();
|
return Http.url(url).get();
|
||||||
|
@ -11,11 +11,8 @@ import java.util.regex.Pattern;
|
|||||||
import org.json.JSONArray;
|
import org.json.JSONArray;
|
||||||
import org.json.JSONException;
|
import org.json.JSONException;
|
||||||
import org.json.JSONObject;
|
import org.json.JSONObject;
|
||||||
import org.jsoup.nodes.Document;
|
|
||||||
import org.jsoup.nodes.Element;
|
|
||||||
|
|
||||||
import com.rarchives.ripme.ripper.AbstractJSONRipper;
|
import com.rarchives.ripme.ripper.AbstractJSONRipper;
|
||||||
import com.rarchives.ripme.ui.RipStatusMessage.STATUS;
|
|
||||||
import com.rarchives.ripme.utils.Http;
|
import com.rarchives.ripme.utils.Http;
|
||||||
|
|
||||||
public class InstagramRipper extends AbstractJSONRipper {
|
public class InstagramRipper extends AbstractJSONRipper {
|
||||||
@ -60,18 +57,18 @@ public class InstagramRipper extends AbstractJSONRipper {
|
|||||||
|
|
||||||
throw new MalformedURLException("Expected username in URL (instagram.com/username and not " + url);
|
throw new MalformedURLException("Expected username in URL (instagram.com/username and not " + url);
|
||||||
}
|
}
|
||||||
|
|
||||||
private String getUserID(URL url) throws IOException {
|
private String getUserID(URL url) throws IOException {
|
||||||
|
|
||||||
Pattern p = Pattern.compile("^https?://instagram\\.com/([^/]+)");
|
Pattern p = Pattern.compile("^https?://instagram\\.com/([^/]+)");
|
||||||
Matcher m = p.matcher(url.toExternalForm());
|
Matcher m = p.matcher(url.toExternalForm());
|
||||||
if(m.matches()) {
|
if (m.matches()) {
|
||||||
return m.group(1);
|
return m.group(1);
|
||||||
}
|
}
|
||||||
|
|
||||||
throw new IOException("Unable to find userID at " + this.url);
|
throw new IOException("Unable to find userID at " + this.url);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public JSONObject getFirstPage() throws IOException {
|
public JSONObject getFirstPage() throws IOException {
|
||||||
userID = getUserID(url);
|
userID = getUserID(url);
|
||||||
@ -95,7 +92,7 @@ public class InstagramRipper extends AbstractJSONRipper {
|
|||||||
throw new IOException("No additional pages found");
|
throw new IOException("No additional pages found");
|
||||||
}
|
}
|
||||||
|
|
||||||
if(nextPageAvailable) {
|
if (nextPageAvailable) {
|
||||||
JSONArray items = json.getJSONArray("items");
|
JSONArray items = json.getJSONArray("items");
|
||||||
JSONObject last_item = items.getJSONObject(items.length() - 1);
|
JSONObject last_item = items.getJSONObject(items.length() - 1);
|
||||||
String nextMaxID = last_item.getString("id");
|
String nextMaxID = last_item.getString("id");
|
||||||
@ -111,7 +108,7 @@ public class InstagramRipper extends AbstractJSONRipper {
|
|||||||
throw new IOException("No more images found");
|
throw new IOException("No more images found");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public List<String> getURLsFromJSON(JSONObject json) {
|
public List<String> getURLsFromJSON(JSONObject json) {
|
||||||
List<String> imageURLs = new ArrayList<String>();
|
List<String> imageURLs = new ArrayList<String>();
|
||||||
@ -142,7 +139,7 @@ public class InstagramRipper extends AbstractJSONRipper {
|
|||||||
}
|
}
|
||||||
return imageURLs;
|
return imageURLs;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void downloadURL(URL url, int index) {
|
public void downloadURL(URL url, int index) {
|
||||||
addURLToDownload(url);
|
addURLToDownload(url);
|
||||||
|
@ -138,9 +138,9 @@ public class MediacrushRipper extends AbstractJSONRipper {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Iterates over list if "file" objects and returns the preferred
|
* Iterates over list if "file" objects and returns the preferred
|
||||||
* image format.
|
* image format.
|
||||||
* @param subfiles Array of "files" (JSONObjects) which contain
|
* @param subfiles Array of "files" (JSONObjects) which contain
|
||||||
* @return Preferred media format.
|
* @return Preferred media format.
|
||||||
*/
|
*/
|
||||||
private String getPreferredUrl(JSONArray subfiles) {
|
private String getPreferredUrl(JSONArray subfiles) {
|
||||||
|
@ -2,24 +2,15 @@ package com.rarchives.ripme.ripper.rippers;
|
|||||||
|
|
||||||
import com.rarchives.ripme.ripper.AbstractHTMLRipper;
|
import com.rarchives.ripme.ripper.AbstractHTMLRipper;
|
||||||
import com.rarchives.ripme.utils.Http;
|
import com.rarchives.ripme.utils.Http;
|
||||||
import com.rarchives.ripme.utils.Utils;
|
|
||||||
import java.io.File;
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.net.MalformedURLException;
|
import java.net.MalformedURLException;
|
||||||
import java.net.URI;
|
|
||||||
import java.net.URISyntaxException;
|
|
||||||
import java.net.URL;
|
import java.net.URL;
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.HashMap;
|
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Map;
|
|
||||||
import java.util.logging.Level;
|
|
||||||
import java.util.logging.Logger;
|
|
||||||
import java.util.regex.Matcher;
|
import java.util.regex.Matcher;
|
||||||
import java.util.regex.Pattern;
|
import java.util.regex.Pattern;
|
||||||
import org.jsoup.nodes.Document;
|
import org.jsoup.nodes.Document;
|
||||||
import org.jsoup.nodes.Element;
|
import org.jsoup.nodes.Element;
|
||||||
import org.jsoup.select.Elements;
|
|
||||||
|
|
||||||
public class MyhentaicomicsRipper extends AbstractHTMLRipper {
|
public class MyhentaicomicsRipper extends AbstractHTMLRipper {
|
||||||
public static boolean isTag;
|
public static boolean isTag;
|
||||||
@ -95,13 +86,13 @@ public class MyhentaicomicsRipper extends AbstractHTMLRipper {
|
|||||||
List<String> albumPagesList = new ArrayList<String>();
|
List<String> albumPagesList = new ArrayList<String>();
|
||||||
int pageNumber = 1;
|
int pageNumber = 1;
|
||||||
albumPagesList.add("http://myhentaicomics.com/index.php/" + pageUrl.split("\\?")[0] + "?page=" + Integer.toString(pageNumber));
|
albumPagesList.add("http://myhentaicomics.com/index.php/" + pageUrl.split("\\?")[0] + "?page=" + Integer.toString(pageNumber));
|
||||||
while(true) {
|
while (true) {
|
||||||
String urlToGet = "http://myhentaicomics.com/index.php/" + pageUrl.split("\\?")[0] + "?page=" + Integer.toString(pageNumber);
|
String urlToGet = "http://myhentaicomics.com/index.php/" + pageUrl.split("\\?")[0] + "?page=" + Integer.toString(pageNumber);
|
||||||
Document nextAlbumPage;
|
Document nextAlbumPage;
|
||||||
try {
|
try {
|
||||||
logger.info("Grabbing " + urlToGet);
|
logger.info("Grabbing " + urlToGet);
|
||||||
nextAlbumPage = Http.url(urlToGet).get();
|
nextAlbumPage = Http.url(urlToGet).get();
|
||||||
} catch(IOException e){
|
} catch(IOException e) {
|
||||||
logger.warn("Failed to log link in Jsoup");
|
logger.warn("Failed to log link in Jsoup");
|
||||||
nextAlbumPage = null;
|
nextAlbumPage = null;
|
||||||
e.printStackTrace();
|
e.printStackTrace();
|
||||||
@ -109,7 +100,7 @@ public class MyhentaicomicsRipper extends AbstractHTMLRipper {
|
|||||||
Element elem = nextAlbumPage.select("a.ui-icon-right").first();
|
Element elem = nextAlbumPage.select("a.ui-icon-right").first();
|
||||||
String nextPage = elem.attr("href");
|
String nextPage = elem.attr("href");
|
||||||
pageNumber = pageNumber + 1;
|
pageNumber = pageNumber + 1;
|
||||||
if(nextPage == ""){
|
if (nextPage == "") {
|
||||||
logger.info("Got " + pageNumber + " pages");
|
logger.info("Got " + pageNumber + " pages");
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
@ -145,7 +136,7 @@ public class MyhentaicomicsRipper extends AbstractHTMLRipper {
|
|||||||
else {
|
else {
|
||||||
album_doc = Http.url(element).get();
|
album_doc = Http.url(element).get();
|
||||||
}
|
}
|
||||||
} catch(IOException e){
|
} catch(IOException e) {
|
||||||
logger.warn("Failed to log link in Jsoup");
|
logger.warn("Failed to log link in Jsoup");
|
||||||
album_doc = null;
|
album_doc = null;
|
||||||
e.printStackTrace();
|
e.printStackTrace();
|
||||||
|
@ -4,7 +4,6 @@ import java.io.IOException;
|
|||||||
import java.net.MalformedURLException;
|
import java.net.MalformedURLException;
|
||||||
import java.net.URL;
|
import java.net.URL;
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.Arrays;
|
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.regex.Matcher;
|
import java.util.regex.Matcher;
|
||||||
import java.util.regex.Pattern;
|
import java.util.regex.Pattern;
|
||||||
@ -13,15 +12,14 @@ import org.jsoup.nodes.Document;
|
|||||||
import org.jsoup.nodes.Element;
|
import org.jsoup.nodes.Element;
|
||||||
|
|
||||||
import com.rarchives.ripme.ripper.AbstractHTMLRipper;
|
import com.rarchives.ripme.ripper.AbstractHTMLRipper;
|
||||||
import com.rarchives.ripme.ripper.rippers.ripperhelpers.ChanSite;
|
|
||||||
import com.rarchives.ripme.utils.Http;
|
import com.rarchives.ripme.utils.Http;
|
||||||
|
|
||||||
public class NatalieMuRipper extends AbstractHTMLRipper {
|
public class NatalieMuRipper extends AbstractHTMLRipper {
|
||||||
|
|
||||||
public int news_id = 0;
|
public int news_id = 0;
|
||||||
|
|
||||||
public NatalieMuRipper(URL url) throws IOException {
|
public NatalieMuRipper(URL url) throws IOException {
|
||||||
super(url);
|
super(url);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
@ -37,12 +35,12 @@ public class NatalieMuRipper extends AbstractHTMLRipper {
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public boolean canRip(URL url) {
|
public boolean canRip(URL url) {
|
||||||
//urls like:
|
//urls like:
|
||||||
// http://cdn2.natalie.mu/music/gallery/show/news_id/xxxxxx/image_id/xxxxxx
|
// http://cdn2.natalie.mu/music/gallery/show/news_id/xxxxxx/image_id/xxxxxx
|
||||||
// http://cdn2.natalie.mu/music/news/140411
|
// http://cdn2.natalie.mu/music/news/140411
|
||||||
return url.toExternalForm().contains("natalie.mu") // Most chans
|
return url.toExternalForm().contains("natalie.mu") // Most chans
|
||||||
&& (url.toExternalForm().contains("/news_id/")
|
&& (url.toExternalForm().contains("/news_id/")
|
||||||
|| url.toExternalForm().contains("/news/")); // 4chan, archive.moe
|
|| url.toExternalForm().contains("/news/")); // 4chan, archive.moe
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -61,13 +59,13 @@ public class NatalieMuRipper extends AbstractHTMLRipper {
|
|||||||
m = p.matcher(u);
|
m = p.matcher(u);
|
||||||
if (m.find()) {
|
if (m.find()) {
|
||||||
return m.group(1);
|
return m.group(1);
|
||||||
}
|
}
|
||||||
} else if (u.contains("/news/")) {
|
} else if (u.contains("/news/")) {
|
||||||
p = Pattern.compile("/news/([0-9]+)/?");
|
p = Pattern.compile("/news/([0-9]+)/?");
|
||||||
m = p.matcher(u);
|
m = p.matcher(u);
|
||||||
if (m.find()) {
|
if (m.find()) {
|
||||||
return m.group(1);
|
return m.group(1);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
throw new MalformedURLException(
|
throw new MalformedURLException(
|
||||||
@ -85,18 +83,18 @@ public class NatalieMuRipper extends AbstractHTMLRipper {
|
|||||||
public Document getFirstPage() throws IOException {
|
public Document getFirstPage() throws IOException {
|
||||||
return Http.url(this.url).get();
|
return Http.url(this.url).get();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public List<String> getURLsFromPage(Document page) {
|
public List<String> getURLsFromPage(Document page) {
|
||||||
List<String> imageURLs = new ArrayList<String>();
|
List<String> imageURLs = new ArrayList<String>();
|
||||||
Pattern p; Matcher m;
|
Pattern p; Matcher m;
|
||||||
//select all album thumbnails
|
//select all album thumbnails
|
||||||
for (Element span : page.select(".NA_articleGallery span")) {
|
for (Element span : page.select(".NA_articleGallery span")) {
|
||||||
if (!span.hasAttr("style")) {
|
if (!span.hasAttr("style")) {
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
String style = span.attr("style").trim();
|
String style = span.attr("style").trim();
|
||||||
|
|
||||||
p = Pattern.compile("background-image: url\\((.*list_thumb_inbox.*)\\);", Pattern.CASE_INSENSITIVE);
|
p = Pattern.compile("background-image: url\\((.*list_thumb_inbox.*)\\);", Pattern.CASE_INSENSITIVE);
|
||||||
m = p.matcher(style);
|
m = p.matcher(style);
|
||||||
if (m.find()) {
|
if (m.find()) {
|
||||||
@ -118,7 +116,7 @@ public class NatalieMuRipper extends AbstractHTMLRipper {
|
|||||||
if (isThisATest()) {
|
if (isThisATest()) {
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if (isStopped()) {
|
if (isStopped()) {
|
||||||
break;
|
break;
|
||||||
@ -130,5 +128,5 @@ public class NatalieMuRipper extends AbstractHTMLRipper {
|
|||||||
@Override
|
@Override
|
||||||
public void downloadURL(URL url, int index) {
|
public void downloadURL(URL url, int index) {
|
||||||
addURLToDownload(url, getPrefix(index), "", this.url.toString(), null);
|
addURLToDownload(url, getPrefix(index), "", this.url.toString(), null);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -21,102 +21,105 @@ import org.jsoup.nodes.Document;
|
|||||||
import org.jsoup.nodes.Element;
|
import org.jsoup.nodes.Element;
|
||||||
import org.jsoup.select.Elements;
|
import org.jsoup.select.Elements;
|
||||||
|
|
||||||
/**
|
public class PahealRipper extends AbstractHTMLRipper {
|
||||||
*
|
private static Map<String, String> cookies = null;
|
||||||
* @author
|
private static Pattern gidPattern = null;
|
||||||
*/
|
|
||||||
public class PahealRipper extends AbstractHTMLRipper{
|
|
||||||
private static Map<String,String> cookies=null;
|
|
||||||
private static Pattern gidPattern=null;
|
|
||||||
|
|
||||||
private static Map<String, String> getCookies() {
|
private static Map<String, String> getCookies() {
|
||||||
if(cookies==null){
|
if (cookies == null) {
|
||||||
cookies=new HashMap<String, String>(1);
|
cookies = new HashMap<String, String>(1);
|
||||||
cookies.put("ui-tnc-agreed","true");
|
cookies.put("ui-tnc-agreed", "true");
|
||||||
}
|
}
|
||||||
return cookies;
|
return cookies;
|
||||||
}
|
}
|
||||||
|
|
||||||
public PahealRipper(URL url) throws IOException {
|
public PahealRipper(URL url) throws IOException {
|
||||||
super(url);
|
super(url);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public String getDomain() {
|
public String getDomain() {
|
||||||
return "rule34.paheal.net";
|
return "rule34.paheal.net";
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public String getHost() {
|
public String getHost() {
|
||||||
return "paheal";
|
return "paheal";
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Document getFirstPage() throws IOException {
|
public Document getFirstPage() throws IOException {
|
||||||
return Http.url("http://rule34.paheal.net/post/list/"+getTerm(url)+"/1").cookies(getCookies()).get();
|
return Http.url("http://rule34.paheal.net/post/list/" + getTerm(url) + "/1").cookies(getCookies()).get();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Document getNextPage(Document page) throws IOException {
|
public Document getNextPage(Document page) throws IOException {
|
||||||
for(Element e:page.select("#paginator a")){
|
for (Element e : page.select("#paginator a")) {
|
||||||
if(e.text().toLowerCase().equals("next"))
|
if (e.text().toLowerCase().equals("next")) {
|
||||||
return Http.url(e.absUrl("href")).cookies(getCookies()).get();
|
return Http.url(e.absUrl("href")).cookies(getCookies()).get();
|
||||||
}
|
}
|
||||||
|
}
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
return null;
|
||||||
public List<String> getURLsFromPage(Document page) {
|
}
|
||||||
Elements elements=page.select(".shm-thumb.thumb>a").not(".shm-thumb-link");
|
|
||||||
List<String> res=new ArrayList<String>(elements.size());
|
|
||||||
|
|
||||||
for(Element e:elements)
|
|
||||||
res.add(e.absUrl("href"));
|
|
||||||
|
|
||||||
return res;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void downloadURL(URL url, int index) {
|
public List<String> getURLsFromPage(Document page) {
|
||||||
try {
|
Elements elements = page.select(".shm-thumb.thumb>a").not(".shm-thumb-link");
|
||||||
String name=url.getPath();
|
List<String> res = new ArrayList<String>(elements.size());
|
||||||
String ext=".png";
|
|
||||||
|
|
||||||
name=name.substring(name.lastIndexOf('/')+1);
|
|
||||||
if(name.indexOf('.')>=0){
|
|
||||||
ext=name.substring(name.lastIndexOf('.'));
|
|
||||||
name=name.substring(0,name.length()-ext.length());
|
|
||||||
}
|
|
||||||
|
|
||||||
addURLToDownload(url,new File(workingDir.getCanonicalPath()+File.separator+Utils.filesystemSafe(new URI(name).getPath())+ext));
|
|
||||||
} catch (IOException ex) {
|
|
||||||
Logger.getLogger(PahealRipper.class.getName()).log(Level.SEVERE, null, ex);
|
|
||||||
} catch (URISyntaxException ex) {
|
|
||||||
Logger.getLogger(PahealRipper.class.getName()).log(Level.SEVERE, null, ex);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private String getTerm(URL url) throws MalformedURLException{
|
|
||||||
if(gidPattern==null)
|
|
||||||
gidPattern=Pattern.compile("^https?://(www\\.)?rule34\\.paheal\\.net/post/list/([a-zA-Z0-9$_.+!*'(),%-]+)(/.*)?(#.*)?$");
|
|
||||||
|
|
||||||
Matcher m = gidPattern.matcher(url.toExternalForm());
|
for (Element e : elements) {
|
||||||
if(m.matches())
|
res.add(e.absUrl("href"));
|
||||||
return m.group(2);
|
}
|
||||||
|
|
||||||
throw new MalformedURLException("Expected paheal.net URL format: rule34.paheal.net/post/list/searchterm - got "+url+" instead");
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
return res;
|
||||||
public String getGID(URL url) throws MalformedURLException {
|
}
|
||||||
try {
|
|
||||||
return Utils.filesystemSafe(new URI(getTerm(url)).getPath());
|
@Override
|
||||||
} catch (URISyntaxException ex) {
|
public void downloadURL(URL url, int index) {
|
||||||
Logger.getLogger(PahealRipper.class.getName()).log(Level.SEVERE, null, ex);
|
try {
|
||||||
}
|
String name = url.getPath();
|
||||||
|
String ext = ".png";
|
||||||
throw new MalformedURLException("Expected paheal.net URL format: rule34.paheal.net/post/list/searchterm - got "+url+" instead");
|
|
||||||
}
|
name = name.substring(name.lastIndexOf('/') + 1);
|
||||||
|
if (name.indexOf('.') >= 0) {
|
||||||
|
ext = name.substring(name.lastIndexOf('.'));
|
||||||
|
name = name.substring(0, name.length() - ext.length());
|
||||||
|
}
|
||||||
|
|
||||||
|
File outFile = new File(workingDir.getCanonicalPath()
|
||||||
|
+ File.separator
|
||||||
|
+ Utils.filesystemSafe(new URI(name).getPath())
|
||||||
|
+ ext);
|
||||||
|
addURLToDownload(url, outFile);
|
||||||
|
} catch (IOException ex) {
|
||||||
|
Logger.getLogger(PahealRipper.class.getName()).log(Level.SEVERE, null, ex);
|
||||||
|
} catch (URISyntaxException ex) {
|
||||||
|
Logger.getLogger(PahealRipper.class.getName()).log(Level.SEVERE, null, ex);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private String getTerm(URL url) throws MalformedURLException {
|
||||||
|
if (gidPattern == null) {
|
||||||
|
gidPattern = Pattern.compile("^https?://(www\\.)?rule34\\.paheal\\.net/post/list/([a-zA-Z0-9$_.+!*'(),%-]+)(/.*)?(#.*)?$");
|
||||||
|
}
|
||||||
|
|
||||||
|
Matcher m = gidPattern.matcher(url.toExternalForm());
|
||||||
|
if (m.matches()) {
|
||||||
|
return m.group(2);
|
||||||
|
}
|
||||||
|
|
||||||
|
throw new MalformedURLException("Expected paheal.net URL format: rule34.paheal.net/post/list/searchterm - got " + url + " instead");
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public String getGID(URL url) throws MalformedURLException {
|
||||||
|
try {
|
||||||
|
return Utils.filesystemSafe(new URI(getTerm(url)).getPath());
|
||||||
|
} catch (URISyntaxException ex) {
|
||||||
|
Logger.getLogger(PahealRipper.class.getName()).log(Level.SEVERE, null, ex);
|
||||||
|
}
|
||||||
|
|
||||||
|
throw new MalformedURLException("Expected paheal.net URL format: rule34.paheal.net/post/list/searchterm - got " + url + " instead");
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
@ -80,7 +80,7 @@ public class PornhubRipper extends AlbumRipper {
|
|||||||
public void rip() throws IOException {
|
public void rip() throws IOException {
|
||||||
int index = 0;
|
int index = 0;
|
||||||
String nextUrl = this.url.toExternalForm();
|
String nextUrl = this.url.toExternalForm();
|
||||||
|
|
||||||
if (albumDoc == null) {
|
if (albumDoc == null) {
|
||||||
logger.info(" Retrieving album page " + nextUrl);
|
logger.info(" Retrieving album page " + nextUrl);
|
||||||
sendUpdate(STATUS.LOADING_RESOURCE, nextUrl);
|
sendUpdate(STATUS.LOADING_RESOURCE, nextUrl);
|
||||||
@ -127,7 +127,7 @@ public class PornhubRipper extends AlbumRipper {
|
|||||||
|
|
||||||
/**
|
/**
|
||||||
* Helper class to find and download images found on "image" pages
|
* Helper class to find and download images found on "image" pages
|
||||||
*
|
*
|
||||||
* Handles case when site has IP-banned the user.
|
* Handles case when site has IP-banned the user.
|
||||||
*/
|
*/
|
||||||
private class PornhubImageThread extends Thread {
|
private class PornhubImageThread extends Thread {
|
||||||
@ -144,28 +144,28 @@ public class PornhubRipper extends AlbumRipper {
|
|||||||
public void run() {
|
public void run() {
|
||||||
fetchImage();
|
fetchImage();
|
||||||
}
|
}
|
||||||
|
|
||||||
private void fetchImage() {
|
private void fetchImage() {
|
||||||
try {
|
try {
|
||||||
Document doc = Http.url(this.url)
|
Document doc = Http.url(this.url)
|
||||||
.referrer(this.url)
|
.referrer(this.url)
|
||||||
.get();
|
.get();
|
||||||
|
|
||||||
// Find image
|
// Find image
|
||||||
Elements images = doc.select("#photoImageSection img");
|
Elements images = doc.select("#photoImageSection img");
|
||||||
Element image = images.first();
|
Element image = images.first();
|
||||||
String imgsrc = image.attr("src");
|
String imgsrc = image.attr("src");
|
||||||
logger.info("Found URL " + imgsrc + " via " + images.get(0));
|
logger.info("Found URL " + imgsrc + " via " + images.get(0));
|
||||||
|
|
||||||
// Provide prefix and let the AbstractRipper "guess" the filename
|
// Provide prefix and let the AbstractRipper "guess" the filename
|
||||||
String prefix = "";
|
String prefix = "";
|
||||||
if (Utils.getConfigBoolean("download.save_order", true)) {
|
if (Utils.getConfigBoolean("download.save_order", true)) {
|
||||||
prefix = String.format("%03d_", index);
|
prefix = String.format("%03d_", index);
|
||||||
}
|
}
|
||||||
|
|
||||||
URL imgurl = new URL(url, imgsrc);
|
URL imgurl = new URL(url, imgsrc);
|
||||||
addURLToDownload(imgurl, prefix);
|
addURLToDownload(imgurl, prefix);
|
||||||
|
|
||||||
} catch (IOException e) {
|
} catch (IOException e) {
|
||||||
logger.error("[!] Exception while loading/parsing " + this.url, e);
|
logger.error("[!] Exception while loading/parsing " + this.url, e);
|
||||||
}
|
}
|
||||||
|
@ -26,13 +26,13 @@ public class RedditRipper extends AlbumRipper {
|
|||||||
|
|
||||||
private static final String HOST = "reddit";
|
private static final String HOST = "reddit";
|
||||||
private static final String DOMAIN = "reddit.com";
|
private static final String DOMAIN = "reddit.com";
|
||||||
|
|
||||||
private static final String REDDIT_USER_AGENT = "RipMe:github/4pr0n/ripme:" + UpdateUtils.getThisJarVersion() + " (by /u/4_pr0n)";
|
private static final String REDDIT_USER_AGENT = "RipMe:github/4pr0n/ripme:" + UpdateUtils.getThisJarVersion() + " (by /u/4_pr0n)";
|
||||||
|
|
||||||
private static final int SLEEP_TIME = 2000;
|
private static final int SLEEP_TIME = 2000;
|
||||||
|
|
||||||
//private static final String USER_AGENT = "ripme by /u/4_pr0n github.com/4pr0n/ripme";
|
//private static final String USER_AGENT = "ripme by /u/4_pr0n github.com/4pr0n/ripme";
|
||||||
|
|
||||||
private long lastRequestTime = 0;
|
private long lastRequestTime = 0;
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
@ -68,9 +68,9 @@ public class RedditRipper extends AlbumRipper {
|
|||||||
}
|
}
|
||||||
waitForThreads();
|
waitForThreads();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
private URL getAndParseAndReturnNext(URL url) throws IOException {
|
private URL getAndParseAndReturnNext(URL url) throws IOException {
|
||||||
JSONArray jsonArray = getJsonArrayFromURL(url), children;
|
JSONArray jsonArray = getJsonArrayFromURL(url), children;
|
||||||
JSONObject json, data;
|
JSONObject json, data;
|
||||||
@ -108,7 +108,7 @@ public class RedditRipper extends AlbumRipper {
|
|||||||
}
|
}
|
||||||
return nextURL;
|
return nextURL;
|
||||||
}
|
}
|
||||||
|
|
||||||
private JSONArray getJsonArrayFromURL(URL url) throws IOException {
|
private JSONArray getJsonArrayFromURL(URL url) throws IOException {
|
||||||
// Wait 2 seconds before the next request
|
// Wait 2 seconds before the next request
|
||||||
long timeDiff = System.currentTimeMillis() - lastRequestTime;
|
long timeDiff = System.currentTimeMillis() - lastRequestTime;
|
||||||
@ -132,7 +132,7 @@ public class RedditRipper extends AlbumRipper {
|
|||||||
JSONArray jsonArray = new JSONArray();
|
JSONArray jsonArray = new JSONArray();
|
||||||
if (jsonObj instanceof JSONObject) {
|
if (jsonObj instanceof JSONObject) {
|
||||||
jsonArray.put( (JSONObject) jsonObj);
|
jsonArray.put( (JSONObject) jsonObj);
|
||||||
} else if (jsonObj instanceof JSONArray){
|
} else if (jsonObj instanceof JSONArray) {
|
||||||
jsonArray = (JSONArray) jsonObj;
|
jsonArray = (JSONArray) jsonObj;
|
||||||
} else {
|
} else {
|
||||||
logger.warn("[!] Unable to parse JSON: " + jsonString);
|
logger.warn("[!] Unable to parse JSON: " + jsonString);
|
||||||
|
@ -194,9 +194,9 @@ public class TumblrRipper extends AlbumRipper {
|
|||||||
try {
|
try {
|
||||||
fileURL = new URL(photo.getJSONObject("original_size").getString("url"));
|
fileURL = new URL(photo.getJSONObject("original_size").getString("url"));
|
||||||
m = p.matcher(fileURL.toString());
|
m = p.matcher(fileURL.toString());
|
||||||
if(m.matches()) {
|
if (m.matches()) {
|
||||||
addURLToDownload(fileURL);
|
addURLToDownload(fileURL);
|
||||||
} else{
|
} else {
|
||||||
URL redirectedURL = Http.url(fileURL).ignoreContentType().response().url();
|
URL redirectedURL = Http.url(fileURL).ignoreContentType().response().url();
|
||||||
addURLToDownload(redirectedURL);
|
addURLToDownload(redirectedURL);
|
||||||
}
|
}
|
||||||
|
@ -36,7 +36,7 @@ public class TwodgalleriesRipper extends AbstractHTMLRipper {
|
|||||||
public String getDomain() {
|
public String getDomain() {
|
||||||
return "2dgalleries.com";
|
return "2dgalleries.com";
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public String getGID(URL url) throws MalformedURLException {
|
public String getGID(URL url) throws MalformedURLException {
|
||||||
Pattern p; Matcher m;
|
Pattern p; Matcher m;
|
||||||
@ -108,7 +108,7 @@ public class TwodgalleriesRipper extends AbstractHTMLRipper {
|
|||||||
public void downloadURL(URL url, int index) {
|
public void downloadURL(URL url, int index) {
|
||||||
addURLToDownload(url, getPrefix(index));
|
addURLToDownload(url, getPrefix(index));
|
||||||
}
|
}
|
||||||
|
|
||||||
private void login() throws IOException {
|
private void login() throws IOException {
|
||||||
Response resp = Http.url(this.url).response();
|
Response resp = Http.url(this.url).response();
|
||||||
cookies = resp.cookies();
|
cookies = resp.cookies();
|
||||||
|
@ -29,7 +29,7 @@ public class VidbleRipper extends AbstractHTMLRipper {
|
|||||||
public String getDomain() {
|
public String getDomain() {
|
||||||
return "vidble.com";
|
return "vidble.com";
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public String getGID(URL url) throws MalformedURLException {
|
public String getGID(URL url) throws MalformedURLException {
|
||||||
Pattern p; Matcher m;
|
Pattern p; Matcher m;
|
||||||
@ -62,7 +62,7 @@ public class VidbleRipper extends AbstractHTMLRipper {
|
|||||||
for (Element img : imgs) {
|
for (Element img : imgs) {
|
||||||
String src = img.absUrl("src");
|
String src = img.absUrl("src");
|
||||||
src = src.replaceAll("_[a-zA-Z]{3,5}", "");
|
src = src.replaceAll("_[a-zA-Z]{3,5}", "");
|
||||||
|
|
||||||
if (!src.equals("")) {
|
if (!src.equals("")) {
|
||||||
imageURLs.add(src);
|
imageURLs.add(src);
|
||||||
}
|
}
|
||||||
|
@ -11,155 +11,151 @@ import java.util.regex.Pattern;
|
|||||||
|
|
||||||
import org.jsoup.nodes.Document;
|
import org.jsoup.nodes.Document;
|
||||||
import org.jsoup.nodes.Element;
|
import org.jsoup.nodes.Element;
|
||||||
import org.jsoup.select.Elements;
|
|
||||||
|
|
||||||
import com.rarchives.ripme.ripper.AbstractHTMLRipper;
|
import com.rarchives.ripme.ripper.AbstractHTMLRipper;
|
||||||
import com.rarchives.ripme.utils.Http;
|
import com.rarchives.ripme.utils.Http;
|
||||||
|
|
||||||
public class WordpressComicRipper extends AbstractHTMLRipper {
|
public class WordpressComicRipper extends AbstractHTMLRipper {
|
||||||
|
|
||||||
public WordpressComicRipper(URL url) throws IOException {
|
public WordpressComicRipper(URL url) throws IOException {
|
||||||
super(url);
|
super(url);
|
||||||
}
|
}
|
||||||
|
|
||||||
public static List<String> explicit_domains = Arrays.asList("www.totempole666.com",
|
public static List<String> explicit_domains = Arrays.asList("www.totempole666.com",
|
||||||
"buttsmithy.com", "themonsterunderthebed.net", "prismblush.com");
|
"buttsmithy.com", "themonsterunderthebed.net", "prismblush.com");
|
||||||
@Override
|
|
||||||
public String getHost() {
|
|
||||||
String host = url.toExternalForm().split("/")[2];
|
|
||||||
return host;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public String getDomain() {
|
public String getHost() {
|
||||||
String host = url.toExternalForm().split("/")[2];
|
String host = url.toExternalForm().split("/")[2];
|
||||||
return host;
|
return host;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public boolean canRip(URL url) {
|
public String getDomain() {
|
||||||
String url_name = url.toExternalForm();
|
String host = url.toExternalForm().split("/")[2];
|
||||||
if (explicit_domains.contains(url_name.split("/")[2]) == true) {
|
return host;
|
||||||
Pattern totempole666Pat = Pattern.compile("https?://www\\.totempole666.com/comic/([a-zA-Z0-9_-]*)/?$");
|
}
|
||||||
Matcher totempole666Mat = totempole666Pat.matcher(url.toExternalForm());
|
|
||||||
if (totempole666Mat.matches()) {
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
Pattern buttsmithyPat = Pattern.compile("https?://buttsmithy.com/archives/comic/([a-zA-Z0-9_-]*)/?$");
|
@Override
|
||||||
Matcher buttsmithyMat = buttsmithyPat.matcher(url.toExternalForm());
|
public boolean canRip(URL url) {
|
||||||
if (buttsmithyMat.matches()) {
|
String url_name = url.toExternalForm();
|
||||||
return true;
|
if (explicit_domains.contains(url_name.split("/")[2]) == true) {
|
||||||
}
|
Pattern totempole666Pat = Pattern.compile("https?://www\\.totempole666.com/comic/([a-zA-Z0-9_-]*)/?$");
|
||||||
|
|
||||||
Pattern theMonsterUnderTheBedPat = Pattern.compile("https?://themonsterunderthebed.net/\\?comic=([a-zA-Z0-9_-]*)/?$");
|
|
||||||
Matcher theMonsterUnderTheBedMat = theMonsterUnderTheBedPat.matcher(url.toExternalForm());
|
|
||||||
if (theMonsterUnderTheBedMat.matches()) {
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
Pattern prismblushPat = Pattern.compile("https?://prismblush.com/comic/([a-zA-Z0-9_-]*)/?$");
|
|
||||||
Matcher prismblushMat = prismblushPat.matcher(url.toExternalForm());
|
|
||||||
if (prismblushMat.matches()) {
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public String getAlbumTitle(URL url) throws MalformedURLException {
|
|
||||||
Pattern totempole666Pat = Pattern.compile("(?:https?://)?(?:www\\.)?totempole666.com\\/comic/([a-zA-Z0-9_-]*)/?$");
|
|
||||||
Matcher totempole666Mat = totempole666Pat.matcher(url.toExternalForm());
|
Matcher totempole666Mat = totempole666Pat.matcher(url.toExternalForm());
|
||||||
if (totempole666Mat.matches()) {
|
if (totempole666Mat.matches()) {
|
||||||
return "totempole666.com" + "_" + "The_cummoner";
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
Pattern buttsmithyPat = Pattern.compile("https?://buttsmithy.com/archives/comic/([a-zA-Z0-9_-]*)/?$");
|
Pattern buttsmithyPat = Pattern.compile("https?://buttsmithy.com/archives/comic/([a-zA-Z0-9_-]*)/?$");
|
||||||
Matcher buttsmithyMat = buttsmithyPat.matcher(url.toExternalForm());
|
Matcher buttsmithyMat = buttsmithyPat.matcher(url.toExternalForm());
|
||||||
if (buttsmithyMat.matches()) {
|
if (buttsmithyMat.matches()) {
|
||||||
return "buttsmithy.com" + "_" + "Alfie";
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
Pattern theMonsterUnderTheBedPat = Pattern.compile("https?://themonsterunderthebed.net/?comic=([a-zA-Z0-9_-]*)/?$");
|
Pattern theMonsterUnderTheBedPat = Pattern.compile("https?://themonsterunderthebed.net/\\?comic=([a-zA-Z0-9_-]*)/?$");
|
||||||
Matcher theMonsterUnderTheBedMat = theMonsterUnderTheBedPat.matcher(url.toExternalForm());
|
Matcher theMonsterUnderTheBedMat = theMonsterUnderTheBedPat.matcher(url.toExternalForm());
|
||||||
if (theMonsterUnderTheBedMat.matches()) {
|
if (theMonsterUnderTheBedMat.matches()) {
|
||||||
return "themonsterunderthebed.net_TheMonsterUnderTheBed";
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
Pattern prismblushPat = Pattern.compile("https?://prismblush.com/comic/([a-zA-Z0-9_-]*)/?$");
|
Pattern prismblushPat = Pattern.compile("https?://prismblush.com/comic/([a-zA-Z0-9_-]*)/?$");
|
||||||
Matcher prismblushMat = prismblushPat.matcher(url.toExternalForm());
|
Matcher prismblushMat = prismblushPat.matcher(url.toExternalForm());
|
||||||
if (prismblushMat.matches()) {
|
if (prismblushMat.matches()) {
|
||||||
return "prismblush.com_" + prismblushMat.group(1).replaceAll("-pg-\\d+", "");
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
return super.getAlbumTitle(url);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
return false;
|
||||||
@Override
|
|
||||||
public String getGID(URL url) throws MalformedURLException {
|
|
||||||
String url_name = url.toExternalForm();
|
|
||||||
// We shouldn't need to return any GID
|
|
||||||
if (explicit_domains.contains(url_name.split("/")[2]) == true) {
|
|
||||||
return "";
|
|
||||||
}
|
|
||||||
throw new MalformedURLException("You should never see this error message");
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public Document getFirstPage() throws IOException {
|
|
||||||
// "url" is an instance field of the superclass
|
|
||||||
return Http.url(url).get();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public Document getNextPage(Document doc) throws IOException {
|
|
||||||
// Find next page
|
|
||||||
String nextPage = "";
|
|
||||||
Element elem = null;
|
|
||||||
if (explicit_domains.contains("www.totempole666.com") == true
|
|
||||||
|| explicit_domains.contains("buttsmithy.com") == true
|
|
||||||
|| explicit_domains.contains("themonsterunderthebed.net")
|
|
||||||
|| explicit_domains.contains("prismblush.com")) {
|
|
||||||
elem = doc.select("a.comic-nav-next").first();
|
|
||||||
if (elem == null) {
|
|
||||||
throw new IOException("No more pages");
|
|
||||||
}
|
|
||||||
nextPage = elem.attr("href");
|
|
||||||
}
|
|
||||||
if (nextPage == "") {
|
|
||||||
throw new IOException("No more pages");
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
return Http.url(nextPage).get();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public List<String> getURLsFromPage(Document doc) {
|
|
||||||
List<String> result = new ArrayList<String>();
|
|
||||||
if (explicit_domains.contains("www.totempole666.com") == true
|
|
||||||
|| explicit_domains.contains("buttsmithy.com") == true
|
|
||||||
|| explicit_domains.contains("themonsterunderthebed.net")
|
|
||||||
|| explicit_domains.contains("prismblush.com")) {
|
|
||||||
Element elem = doc.select("div.comic-table > div#comic > a > img").first();
|
|
||||||
// If doc is the last page in the comic then elem.attr("src") returns null
|
|
||||||
// because there is no link <a> to the next page
|
|
||||||
if (elem == null) {
|
|
||||||
logger.debug("Got last page in totempole666 comic");
|
|
||||||
elem = doc.select("div.comic-table > div#comic > img").first();
|
|
||||||
}
|
|
||||||
result.add(elem.attr("src"));
|
|
||||||
}
|
|
||||||
return result;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public void downloadURL(URL url, int index) {
|
|
||||||
addURLToDownload(url, getPrefix(index));
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public String getAlbumTitle(URL url) throws MalformedURLException {
|
||||||
|
Pattern totempole666Pat = Pattern.compile("(?:https?://)?(?:www\\.)?totempole666.com\\/comic/([a-zA-Z0-9_-]*)/?$");
|
||||||
|
Matcher totempole666Mat = totempole666Pat.matcher(url.toExternalForm());
|
||||||
|
if (totempole666Mat.matches()) {
|
||||||
|
return "totempole666.com" + "_" + "The_cummoner";
|
||||||
|
}
|
||||||
|
|
||||||
|
Pattern buttsmithyPat = Pattern.compile("https?://buttsmithy.com/archives/comic/([a-zA-Z0-9_-]*)/?$");
|
||||||
|
Matcher buttsmithyMat = buttsmithyPat.matcher(url.toExternalForm());
|
||||||
|
if (buttsmithyMat.matches()) {
|
||||||
|
return "buttsmithy.com" + "_" + "Alfie";
|
||||||
|
}
|
||||||
|
|
||||||
|
Pattern theMonsterUnderTheBedPat = Pattern.compile("https?://themonsterunderthebed.net/?comic=([a-zA-Z0-9_-]*)/?$");
|
||||||
|
Matcher theMonsterUnderTheBedMat = theMonsterUnderTheBedPat.matcher(url.toExternalForm());
|
||||||
|
if (theMonsterUnderTheBedMat.matches()) {
|
||||||
|
return "themonsterunderthebed.net_TheMonsterUnderTheBed";
|
||||||
|
}
|
||||||
|
|
||||||
|
Pattern prismblushPat = Pattern.compile("https?://prismblush.com/comic/([a-zA-Z0-9_-]*)/?$");
|
||||||
|
Matcher prismblushMat = prismblushPat.matcher(url.toExternalForm());
|
||||||
|
if (prismblushMat.matches()) {
|
||||||
|
return "prismblush.com_" + prismblushMat.group(1).replaceAll("-pg-\\d+", "");
|
||||||
|
}
|
||||||
|
|
||||||
|
return super.getAlbumTitle(url);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public String getGID(URL url) throws MalformedURLException {
|
||||||
|
String url_name = url.toExternalForm();
|
||||||
|
// We shouldn't need to return any GID
|
||||||
|
if (explicit_domains.contains(url_name.split("/")[2]) == true) {
|
||||||
|
return "";
|
||||||
|
}
|
||||||
|
throw new MalformedURLException("You should never see this error message");
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Document getFirstPage() throws IOException {
|
||||||
|
// "url" is an instance field of the superclass
|
||||||
|
return Http.url(url).get();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Document getNextPage(Document doc) throws IOException {
|
||||||
|
// Find next page
|
||||||
|
String nextPage = "";
|
||||||
|
Element elem = null;
|
||||||
|
if (explicit_domains.contains("www.totempole666.com") == true
|
||||||
|
|| explicit_domains.contains("buttsmithy.com") == true
|
||||||
|
|| explicit_domains.contains("themonsterunderthebed.net")
|
||||||
|
|| explicit_domains.contains("prismblush.com")) {
|
||||||
|
elem = doc.select("a.comic-nav-next").first();
|
||||||
|
if (elem == null) {
|
||||||
|
throw new IOException("No more pages");
|
||||||
|
}
|
||||||
|
nextPage = elem.attr("href");
|
||||||
|
}
|
||||||
|
|
||||||
|
if (nextPage == "") {
|
||||||
|
throw new IOException("No more pages");
|
||||||
|
} else {
|
||||||
|
return Http.url(nextPage).get();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public List<String> getURLsFromPage(Document doc) {
|
||||||
|
List<String> result = new ArrayList<String>();
|
||||||
|
if (explicit_domains.contains("www.totempole666.com") == true
|
||||||
|
|| explicit_domains.contains("buttsmithy.com") == true
|
||||||
|
|| explicit_domains.contains("themonsterunderthebed.net")
|
||||||
|
|| explicit_domains.contains("prismblush.com")) {
|
||||||
|
Element elem = doc.select("div.comic-table > div#comic > a > img").first();
|
||||||
|
// If doc is the last page in the comic then elem.attr("src") returns null
|
||||||
|
// because there is no link <a> to the next page
|
||||||
|
if (elem == null) {
|
||||||
|
logger.debug("Got last page in totempole666 comic");
|
||||||
|
elem = doc.select("div.comic-table > div#comic > img").first();
|
||||||
|
}
|
||||||
|
result.add(elem.attr("src"));
|
||||||
|
}
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void downloadURL(URL url, int index) {
|
||||||
|
addURLToDownload(url, getPrefix(index));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
@ -1,4 +1,3 @@
|
|||||||
|
|
||||||
package com.rarchives.ripme.ripper.rippers;
|
package com.rarchives.ripme.ripper.rippers;
|
||||||
|
|
||||||
import com.rarchives.ripme.ripper.AbstractHTMLRipper;
|
import com.rarchives.ripme.ripper.AbstractHTMLRipper;
|
||||||
@ -18,80 +17,79 @@ import java.util.regex.Pattern;
|
|||||||
import org.jsoup.nodes.Document;
|
import org.jsoup.nodes.Document;
|
||||||
import org.jsoup.nodes.Element;
|
import org.jsoup.nodes.Element;
|
||||||
|
|
||||||
/**
|
public class XbooruRipper extends AbstractHTMLRipper {
|
||||||
*
|
private static Pattern gidPattern = null;
|
||||||
* @author
|
|
||||||
*/
|
|
||||||
public class XbooruRipper extends AbstractHTMLRipper{
|
|
||||||
private static Pattern gidPattern=null;
|
|
||||||
|
|
||||||
public XbooruRipper(URL url) throws IOException {
|
public XbooruRipper(URL url) throws IOException {
|
||||||
super(url);
|
super(url);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public String getDomain() {
|
public String getDomain() {
|
||||||
return "xbooru.com";
|
return "xbooru.com";
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public String getHost() {
|
public String getHost() {
|
||||||
return "xbooru";
|
return "xbooru";
|
||||||
}
|
}
|
||||||
|
|
||||||
private String getPage(int num) throws MalformedURLException{
|
|
||||||
return "http://xbooru.com/index.php?page=dapi&s=post&q=index&pid="+num+"&tags="+getTerm(url);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
private String getPage(int num) throws MalformedURLException {
|
||||||
public Document getFirstPage() throws IOException {
|
return "http://xbooru.com/index.php?page=dapi&s=post&q=index&pid=" + num + "&tags=" + getTerm(url);
|
||||||
return Http.url(getPage(0)).get();
|
}
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Document getNextPage(Document doc) throws IOException {
|
public Document getFirstPage() throws IOException {
|
||||||
int offset=Integer.parseInt(doc.getElementsByTag("posts").first().attr("offset"));
|
return Http.url(getPage(0)).get();
|
||||||
int num=Integer.parseInt(doc.getElementsByTag("posts").first().attr("count"));
|
}
|
||||||
|
|
||||||
if(offset+100>num)
|
|
||||||
return null;
|
|
||||||
|
|
||||||
return Http.url(getPage(offset/100+1)).get();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public List<String> getURLsFromPage(Document page) {
|
public Document getNextPage(Document doc) throws IOException {
|
||||||
List<String> res=new ArrayList<String>(100);
|
int offset = Integer.parseInt(doc.getElementsByTag("posts").first().attr("offset"));
|
||||||
for(Element e:page.getElementsByTag("post"))
|
int num = Integer.parseInt(doc.getElementsByTag("posts").first().attr("count"));
|
||||||
res.add(e.absUrl("file_url")+"#"+e.attr("id"));
|
|
||||||
return res;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
if (offset + 100 > num) {
|
||||||
public void downloadURL(URL url, int index) {
|
return null;
|
||||||
addURLToDownload(url,Utils.getConfigBoolean("download.save_order",true)?url.getRef()+"-":"");
|
}
|
||||||
}
|
|
||||||
|
|
||||||
private String getTerm(URL url) throws MalformedURLException{
|
|
||||||
if(gidPattern==null)
|
|
||||||
gidPattern=Pattern.compile("^https?://(www\\.)?xbooru\\.com/(index.php)?.*([?&]tags=([a-zA-Z0-9$_.+!*'(),%-]+))(\\&|(#.*)?$)");
|
|
||||||
|
|
||||||
Matcher m = gidPattern.matcher(url.toExternalForm());
|
return Http.url(getPage(offset / 100 + 1)).get();
|
||||||
if(m.matches())
|
}
|
||||||
return m.group(4);
|
|
||||||
|
|
||||||
throw new MalformedURLException("Expected xbooru.com URL format: xbooru.com/index.php?tags=searchterm - got "+url+" instead");
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public String getGID(URL url) throws MalformedURLException {
|
public List<String> getURLsFromPage(Document page) {
|
||||||
try {
|
List<String> res = new ArrayList<String>(100);
|
||||||
return Utils.filesystemSafe(new URI(getTerm(url)).getPath());
|
for (Element e : page.getElementsByTag("post")) {
|
||||||
} catch (URISyntaxException ex) {
|
res.add(e.absUrl("file_url") + "#" + e.attr("id"));
|
||||||
Logger.getLogger(PahealRipper.class.getName()).log(Level.SEVERE, null, ex);
|
}
|
||||||
}
|
return res;
|
||||||
|
}
|
||||||
throw new MalformedURLException("Expected xbooru.com URL format: xbooru.com/index.php?tags=searchterm - got "+url+" instead");
|
|
||||||
}
|
@Override
|
||||||
|
public void downloadURL(URL url, int index) {
|
||||||
|
addURLToDownload(url, Utils.getConfigBoolean("download.save_order", true) ? url.getRef() + "-" : "");
|
||||||
|
}
|
||||||
|
|
||||||
|
private String getTerm(URL url) throws MalformedURLException {
|
||||||
|
if (gidPattern == null) {
|
||||||
|
gidPattern = Pattern.compile("^https?://(www\\.)?xbooru\\.com/(index.php)?.*([?&]tags=([a-zA-Z0-9$_.+!*'(),%-]+))(\\&|(#.*)?$)");
|
||||||
|
}
|
||||||
|
|
||||||
|
Matcher m = gidPattern.matcher(url.toExternalForm());
|
||||||
|
if (m.matches()) {
|
||||||
|
return m.group(4);
|
||||||
|
}
|
||||||
|
|
||||||
|
throw new MalformedURLException("Expected xbooru.com URL format: xbooru.com/index.php?tags=searchterm - got " + url + " instead");
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public String getGID(URL url) throws MalformedURLException {
|
||||||
|
try {
|
||||||
|
return Utils.filesystemSafe(new URI(getTerm(url)).getPath());
|
||||||
|
} catch (URISyntaxException ex) {
|
||||||
|
Logger.getLogger(PahealRipper.class.getName()).log(Level.SEVERE, null, ex);
|
||||||
|
}
|
||||||
|
|
||||||
|
throw new MalformedURLException("Expected xbooru.com URL format: xbooru.com/index.php?tags=searchterm - got " + url + " instead");
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
@ -4,7 +4,6 @@ import java.io.IOException;
|
|||||||
import java.net.MalformedURLException;
|
import java.net.MalformedURLException;
|
||||||
import java.net.URL;
|
import java.net.URL;
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.Collections;
|
|
||||||
import java.util.HashMap;
|
import java.util.HashMap;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
@ -14,10 +13,8 @@ import java.util.regex.Pattern;
|
|||||||
import org.jsoup.Connection.Response;
|
import org.jsoup.Connection.Response;
|
||||||
import org.jsoup.nodes.Document;
|
import org.jsoup.nodes.Document;
|
||||||
import org.jsoup.nodes.Element;
|
import org.jsoup.nodes.Element;
|
||||||
import org.jsoup.select.Elements;
|
|
||||||
|
|
||||||
import com.rarchives.ripme.ripper.AbstractHTMLRipper;
|
import com.rarchives.ripme.ripper.AbstractHTMLRipper;
|
||||||
import com.rarchives.ripme.ui.RipStatusMessage.STATUS;
|
|
||||||
import com.rarchives.ripme.utils.Http;
|
import com.rarchives.ripme.utils.Http;
|
||||||
|
|
||||||
public class ZizkiRipper extends AbstractHTMLRipper {
|
public class ZizkiRipper extends AbstractHTMLRipper {
|
||||||
|
@ -3,23 +3,27 @@ package com.rarchives.ripme.ripper.rippers.ripperhelpers;
|
|||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
|
||||||
public class ChanSite {
|
public class ChanSite {
|
||||||
//The domains where the threads are hosted.
|
// The domains where the threads are hosted.
|
||||||
public List<String> domains;
|
public List<String> domains;
|
||||||
//The domains where the images are hosted.
|
// The domains where the images are hosted.
|
||||||
public List<String> cdnDomains;
|
public List<String> cdnDomains;
|
||||||
|
|
||||||
public ChanSite(List<String> Domains, List<String> CdnDomains){
|
public ChanSite(List<String> Domains, List<String> CdnDomains) {
|
||||||
if(Domains.isEmpty())
|
if (Domains.isEmpty()) {
|
||||||
throw new IllegalArgumentException("Domains");
|
throw new IllegalArgumentException("Domains");
|
||||||
if(CdnDomains.isEmpty())
|
}
|
||||||
|
if (CdnDomains.isEmpty()) {
|
||||||
throw new IllegalArgumentException("CdnDomains");
|
throw new IllegalArgumentException("CdnDomains");
|
||||||
|
}
|
||||||
domains = Domains;
|
domains = Domains;
|
||||||
cdnDomains = CdnDomains;
|
cdnDomains = CdnDomains;
|
||||||
}
|
}
|
||||||
public ChanSite(List<String> Domains){
|
|
||||||
if(Domains.isEmpty())
|
public ChanSite(List<String> Domains) {
|
||||||
throw new IllegalArgumentException("Domains");
|
if (Domains.isEmpty()) {
|
||||||
|
throw new IllegalArgumentException("Domains");
|
||||||
|
}
|
||||||
domains = Domains;
|
domains = Domains;
|
||||||
cdnDomains = Domains;
|
cdnDomains = Domains;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -39,13 +39,13 @@ public class tamindirmp3 extends AbstractHTMLRipper {
|
|||||||
throw new MalformedURLException("Expected tamindir.com URL format: " +
|
throw new MalformedURLException("Expected tamindir.com URL format: " +
|
||||||
"tamindir.com/files/albumid - got " + url + "instead");
|
"tamindir.com/files/albumid - got " + url + "instead");
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Document getFirstPage() throws IOException {
|
public Document getFirstPage() throws IOException {
|
||||||
return Http.url(url).get();
|
return Http.url(url).get();
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public List<String> getURLsFromPage(Document doc) {
|
public List<String> getURLsFromPage(Document doc) {
|
||||||
List<String> music = new ArrayList<String>();
|
List<String> music = new ArrayList<String>();
|
||||||
|
@ -33,7 +33,7 @@ public class CliphunterRipper extends VideoRipper {
|
|||||||
Matcher m = p.matcher(url.toExternalForm());
|
Matcher m = p.matcher(url.toExternalForm());
|
||||||
return m.matches();
|
return m.matches();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public URL sanitizeURL(URL url) throws MalformedURLException {
|
public URL sanitizeURL(URL url) throws MalformedURLException {
|
||||||
return url;
|
return url;
|
||||||
|
@ -29,7 +29,7 @@ public class GfycatRipper extends VideoRipper {
|
|||||||
public boolean canRip(URL url) {
|
public boolean canRip(URL url) {
|
||||||
return url.getHost().endsWith(HOST);
|
return url.getHost().endsWith(HOST);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public URL sanitizeURL(URL url) throws MalformedURLException {
|
public URL sanitizeURL(URL url) throws MalformedURLException {
|
||||||
return url;
|
return url;
|
||||||
|
@ -69,9 +69,9 @@ public class PornhubRipper extends VideoRipper {
|
|||||||
|
|
||||||
vidUrl = null;
|
vidUrl = null;
|
||||||
for (String quality : new String[] {"quality_1080p", "quality_720p", "quality_480p", "quality_240p"}) {
|
for (String quality : new String[] {"quality_1080p", "quality_720p", "quality_480p", "quality_240p"}) {
|
||||||
Pattern pv = Pattern.compile("^.*var player_" + quality + " = '([^']*)'.*$", Pattern.DOTALL);
|
Pattern pv = Pattern.compile("^.*var player_" + quality + " = '([^']*)'.*$", Pattern.DOTALL);
|
||||||
Matcher mv = pv.matcher(html);
|
Matcher mv = pv.matcher(html);
|
||||||
if (mv.matches()) {
|
if (mv.matches()) {
|
||||||
vidUrl = mv.group(1);
|
vidUrl = mv.group(1);
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
@ -32,7 +32,7 @@ public class VidearnRipper extends VideoRipper {
|
|||||||
Matcher m = p.matcher(url.toExternalForm());
|
Matcher m = p.matcher(url.toExternalForm());
|
||||||
return m.matches();
|
return m.matches();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public URL sanitizeURL(URL url) throws MalformedURLException {
|
public URL sanitizeURL(URL url) throws MalformedURLException {
|
||||||
return url;
|
return url;
|
||||||
|
@ -32,7 +32,7 @@ public class VineRipper extends VideoRipper {
|
|||||||
Matcher m = p.matcher(url.toExternalForm());
|
Matcher m = p.matcher(url.toExternalForm());
|
||||||
return m.matches();
|
return m.matches();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public URL sanitizeURL(URL url) throws MalformedURLException {
|
public URL sanitizeURL(URL url) throws MalformedURLException {
|
||||||
return url;
|
return url;
|
||||||
|
@ -30,7 +30,7 @@ public class VkRipper extends VideoRipper {
|
|||||||
Matcher m = p.matcher(url.toExternalForm());
|
Matcher m = p.matcher(url.toExternalForm());
|
||||||
return m.matches();
|
return m.matches();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public URL sanitizeURL(URL url) throws MalformedURLException {
|
public URL sanitizeURL(URL url) throws MalformedURLException {
|
||||||
return url;
|
return url;
|
||||||
@ -57,7 +57,7 @@ public class VkRipper extends VideoRipper {
|
|||||||
addURLToDownload(new URL(videoURL), HOST + "_" + getGID(this.url));
|
addURLToDownload(new URL(videoURL), HOST + "_" + getGID(this.url));
|
||||||
waitForThreads();
|
waitForThreads();
|
||||||
}
|
}
|
||||||
|
|
||||||
public static String getVideoURLAtPage(String url) throws IOException {
|
public static String getVideoURLAtPage(String url) throws IOException {
|
||||||
Document doc = Http.url(url)
|
Document doc = Http.url(url)
|
||||||
.userAgent(USER_AGENT)
|
.userAgent(USER_AGENT)
|
||||||
|
@ -31,7 +31,7 @@ public class XhamsterRipper extends VideoRipper {
|
|||||||
Matcher m = p.matcher(url.toExternalForm());
|
Matcher m = p.matcher(url.toExternalForm());
|
||||||
return m.matches();
|
return m.matches();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public URL sanitizeURL(URL url) throws MalformedURLException {
|
public URL sanitizeURL(URL url) throws MalformedURLException {
|
||||||
return url;
|
return url;
|
||||||
|
@ -33,7 +33,7 @@ public class XvideosRipper extends VideoRipper {
|
|||||||
Matcher m = p.matcher(url.toExternalForm());
|
Matcher m = p.matcher(url.toExternalForm());
|
||||||
return m.matches();
|
return m.matches();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public URL sanitizeURL(URL url) throws MalformedURLException {
|
public URL sanitizeURL(URL url) throws MalformedURLException {
|
||||||
return url;
|
return url;
|
||||||
|
@ -32,7 +32,7 @@ public class YoupornRipper extends VideoRipper {
|
|||||||
Matcher m = p.matcher(url.toExternalForm());
|
Matcher m = p.matcher(url.toExternalForm());
|
||||||
return m.matches();
|
return m.matches();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public URL sanitizeURL(URL url) throws MalformedURLException {
|
public URL sanitizeURL(URL url) throws MalformedURLException {
|
||||||
return url;
|
return url;
|
||||||
|
@ -14,7 +14,7 @@ import static com.rarchives.ripme.App.logger;
|
|||||||
|
|
||||||
public class ClipboardUtils {
|
public class ClipboardUtils {
|
||||||
private static AutoripThread autoripThread = new AutoripThread();
|
private static AutoripThread autoripThread = new AutoripThread();
|
||||||
|
|
||||||
public static void setClipboardAutoRip(boolean enabled) {
|
public static void setClipboardAutoRip(boolean enabled) {
|
||||||
if (enabled) {
|
if (enabled) {
|
||||||
autoripThread.kill();
|
autoripThread.kill();
|
||||||
@ -44,7 +44,7 @@ public class ClipboardUtils {
|
|||||||
e.printStackTrace();
|
e.printStackTrace();
|
||||||
} catch (IOException e) {
|
} catch (IOException e) {
|
||||||
e.printStackTrace();
|
e.printStackTrace();
|
||||||
}
|
}
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -61,15 +61,15 @@ class AutoripThread extends Thread {
|
|||||||
String clipboard = ClipboardUtils.getClipboardString();
|
String clipboard = ClipboardUtils.getClipboardString();
|
||||||
if (clipboard != null) {
|
if (clipboard != null) {
|
||||||
Pattern p = Pattern.compile(
|
Pattern p = Pattern.compile(
|
||||||
"\\b(((ht|f)tp(s?)\\:\\/\\/|~\\/|\\/)|www.)" +
|
"\\b(((ht|f)tp(s?)\\:\\/\\/|~\\/|\\/)|www.)" +
|
||||||
"(\\w+:\\w+@)?(([-\\w]+\\.)+(com|org|net|gov" +
|
"(\\w+:\\w+@)?(([-\\w]+\\.)+(com|org|net|gov" +
|
||||||
"|mil|biz|info|mobi|name|aero|jobs|museum" +
|
"|mil|biz|info|mobi|name|aero|jobs|museum" +
|
||||||
"|travel|[a-z]{2}))(:[\\d]{1,5})?" +
|
"|travel|[a-z]{2}))(:[\\d]{1,5})?" +
|
||||||
"(((\\/([-\\w~!$+|.,=]|%[a-f\\d]{2})+)+|\\/)+|\\?|#)?" +
|
"(((\\/([-\\w~!$+|.,=]|%[a-f\\d]{2})+)+|\\/)+|\\?|#)?" +
|
||||||
"((\\?([-\\w~!$+|.,*:]|%[a-f\\d{2}])+=?" +
|
"((\\?([-\\w~!$+|.,*:]|%[a-f\\d{2}])+=?" +
|
||||||
"([-\\w~!$+|.,*:=]|%[a-f\\d]{2})*)" +
|
"([-\\w~!$+|.,*:=]|%[a-f\\d]{2})*)" +
|
||||||
"(&(?:[-\\w~!$+|.,*:]|%[a-f\\d{2}])+=?" +
|
"(&(?:[-\\w~!$+|.,*:]|%[a-f\\d{2}])+=?" +
|
||||||
"([-\\w~!$+|.,*:=]|%[a-f\\d]{2})*)*)*" +
|
"([-\\w~!$+|.,*:=]|%[a-f\\d]{2})*)*)*" +
|
||||||
"(#([-\\w~!$+|.,*:=]|%[a-f\\d]{2})*)?\\b");
|
"(#([-\\w~!$+|.,*:=]|%[a-f\\d]{2})*)?\\b");
|
||||||
Matcher m = p.matcher(clipboard);
|
Matcher m = p.matcher(clipboard);
|
||||||
while (m.find()) {
|
while (m.find()) {
|
||||||
@ -87,7 +87,7 @@ class AutoripThread extends Thread {
|
|||||||
e.printStackTrace();
|
e.printStackTrace();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
public void kill() {
|
public void kill() {
|
||||||
isRunning = false;
|
isRunning = false;
|
||||||
}
|
}
|
||||||
|
@ -27,12 +27,12 @@ public class QueueMenuMouseListener extends MouseAdapter {
|
|||||||
removeSelected = new AbstractAction("Remove Selected") {
|
removeSelected = new AbstractAction("Remove Selected") {
|
||||||
@Override
|
@Override
|
||||||
public void actionPerformed(ActionEvent ae) {
|
public void actionPerformed(ActionEvent ae) {
|
||||||
Object o = queueList.getSelectedValue();
|
Object o = queueList.getSelectedValue();
|
||||||
while (o != null) {
|
while (o != null) {
|
||||||
queueListModel.removeElement(o);
|
queueListModel.removeElement(o);
|
||||||
o = queueList.getSelectedValue();
|
o = queueList.getSelectedValue();
|
||||||
}
|
}
|
||||||
updateUI();
|
updateUI();
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
popup.add(removeSelected);
|
popup.add(removeSelected);
|
||||||
@ -40,8 +40,8 @@ public class QueueMenuMouseListener extends MouseAdapter {
|
|||||||
clearQueue = new AbstractAction("Remove All") {
|
clearQueue = new AbstractAction("Remove All") {
|
||||||
@Override
|
@Override
|
||||||
public void actionPerformed(ActionEvent ae) {
|
public void actionPerformed(ActionEvent ae) {
|
||||||
queueListModel.removeAllElements();
|
queueListModel.removeAllElements();
|
||||||
updateUI();
|
updateUI();
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
popup.add(clearQueue);
|
popup.add(clearQueue);
|
||||||
|
@ -10,7 +10,7 @@ public class RipStatusMessage {
|
|||||||
DOWNLOAD_STARTED("Download Started"),
|
DOWNLOAD_STARTED("Download Started"),
|
||||||
DOWNLOAD_COMPLETE("Download Complete"),
|
DOWNLOAD_COMPLETE("Download Complete"),
|
||||||
DOWNLOAD_ERRORED("Download Errored"),
|
DOWNLOAD_ERRORED("Download Errored"),
|
||||||
RIP_COMPLETE("Rip Complete"),
|
RIP_COMPLETE("Rip Complete"),
|
||||||
DOWNLOAD_WARN("Download problem"),
|
DOWNLOAD_WARN("Download problem"),
|
||||||
TOTAL_BYTES("Total bytes"),
|
TOTAL_BYTES("Total bytes"),
|
||||||
COMPLETED_BYTES("Completed bytes"),
|
COMPLETED_BYTES("Completed bytes"),
|
||||||
@ -29,11 +29,11 @@ public class RipStatusMessage {
|
|||||||
this.status = status;
|
this.status = status;
|
||||||
this.object = object;
|
this.object = object;
|
||||||
}
|
}
|
||||||
|
|
||||||
public STATUS getStatus() {
|
public STATUS getStatus() {
|
||||||
return status;
|
return status;
|
||||||
}
|
}
|
||||||
|
|
||||||
public Object getObject() {
|
public Object getObject() {
|
||||||
return object;
|
return object;
|
||||||
}
|
}
|
||||||
|
@ -3,16 +3,15 @@ package com.rarchives.ripme.utils;
|
|||||||
/**
|
/**
|
||||||
* Base64 encoder/decoder
|
* Base64 encoder/decoder
|
||||||
* From http://stackoverflow.com/a/4265472
|
* From http://stackoverflow.com/a/4265472
|
||||||
*
|
|
||||||
*/
|
*/
|
||||||
public class Base64 {
|
public class Base64 {
|
||||||
private final static char[] ALPHABET = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/".toCharArray();
|
private final static char[] ALPHABET = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/".toCharArray();
|
||||||
|
|
||||||
private static int[] toInt = new int[128];
|
private static int[] toInt = new int[128];
|
||||||
|
|
||||||
static {
|
static {
|
||||||
for(int i=0; i< ALPHABET.length; i++){
|
for (int i = 0; i < ALPHABET.length; i++) {
|
||||||
toInt[ALPHABET[i]]= i;
|
toInt[ALPHABET[i]] = i;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -22,12 +21,12 @@ public class Base64 {
|
|||||||
* @param buf the byte array (not null)
|
* @param buf the byte array (not null)
|
||||||
* @return the translated Base64 string (not null)
|
* @return the translated Base64 string (not null)
|
||||||
*/
|
*/
|
||||||
public static String encode(byte[] buf){
|
public static String encode(byte[] buf) {
|
||||||
int size = buf.length;
|
int size = buf.length;
|
||||||
char[] ar = new char[((size + 2) / 3) * 4];
|
char[] ar = new char[((size + 2) / 3) * 4];
|
||||||
int a = 0;
|
int a = 0;
|
||||||
int i=0;
|
int i = 0;
|
||||||
while(i < size){
|
while (i < size) {
|
||||||
byte b0 = buf[i++];
|
byte b0 = buf[i++];
|
||||||
byte b1 = (i < size) ? buf[i++] : 0;
|
byte b1 = (i < size) ? buf[i++] : 0;
|
||||||
byte b2 = (i < size) ? buf[i++] : 0;
|
byte b2 = (i < size) ? buf[i++] : 0;
|
||||||
@ -38,9 +37,9 @@ public class Base64 {
|
|||||||
ar[a++] = ALPHABET[((b1 << 2) | ((b2 & 0xFF) >> 6)) & mask];
|
ar[a++] = ALPHABET[((b1 << 2) | ((b2 & 0xFF) >> 6)) & mask];
|
||||||
ar[a++] = ALPHABET[b2 & mask];
|
ar[a++] = ALPHABET[b2 & mask];
|
||||||
}
|
}
|
||||||
switch(size % 3){
|
switch (size % 3) {
|
||||||
case 1: ar[--a] = '=';
|
case 1: ar[--a] = '=';
|
||||||
case 2: ar[--a] = '=';
|
case 2: ar[--a] = '=';
|
||||||
}
|
}
|
||||||
return new String(ar);
|
return new String(ar);
|
||||||
}
|
}
|
||||||
@ -51,26 +50,26 @@ public class Base64 {
|
|||||||
* @param s the Base64 string (not null)
|
* @param s the Base64 string (not null)
|
||||||
* @return the byte array (not null)
|
* @return the byte array (not null)
|
||||||
*/
|
*/
|
||||||
public static byte[] decode(String s){
|
public static byte[] decode(String s) {
|
||||||
int delta = s.endsWith( "==" ) ? 2 : s.endsWith( "=" ) ? 1 : 0;
|
int delta = s.endsWith("==") ? 2 : s.endsWith("=") ? 1 : 0;
|
||||||
byte[] buffer = new byte[s.length()*3/4 - delta];
|
byte[] buffer = new byte[s.length() * 3 / 4 - delta];
|
||||||
int mask = 0xFF;
|
int mask = 0xFF;
|
||||||
int index = 0;
|
int index = 0;
|
||||||
for(int i=0; i< s.length(); i+=4){
|
for (int i = 0; i < s.length(); i += 4) {
|
||||||
int c0 = toInt[s.charAt( i )];
|
int c0 = toInt[s.charAt(i)];
|
||||||
int c1 = toInt[s.charAt( i + 1)];
|
int c1 = toInt[s.charAt(i + 1)];
|
||||||
buffer[index++]= (byte)(((c0 << 2) | (c1 >> 4)) & mask);
|
buffer[index++] = (byte) (((c0 << 2) | (c1 >> 4)) & mask);
|
||||||
if(index >= buffer.length){
|
if (index >= buffer.length) {
|
||||||
return buffer;
|
return buffer;
|
||||||
}
|
}
|
||||||
int c2 = toInt[s.charAt( i + 2)];
|
int c2 = toInt[s.charAt(i + 2)];
|
||||||
buffer[index++]= (byte)(((c1 << 4) | (c2 >> 2)) & mask);
|
buffer[index++] = (byte) (((c1 << 4) | (c2 >> 2)) & mask);
|
||||||
if(index >= buffer.length){
|
if (index >= buffer.length) {
|
||||||
return buffer;
|
return buffer;
|
||||||
}
|
}
|
||||||
int c3 = toInt[s.charAt( i + 3 )];
|
int c3 = toInt[s.charAt(i + 3)];
|
||||||
buffer[index++]= (byte)(((c2 << 6) | c3) & mask);
|
buffer[index++] = (byte) (((c2 << 6) | c3) & mask);
|
||||||
}
|
}
|
||||||
return buffer;
|
return buffer;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -23,7 +23,6 @@ import com.rarchives.ripme.ripper.rippers.VidbleRipper;
|
|||||||
import com.rarchives.ripme.ripper.rippers.video.GfycatRipper;
|
import com.rarchives.ripme.ripper.rippers.video.GfycatRipper;
|
||||||
import com.rarchives.ripme.ripper.rippers.EroShareRipper;
|
import com.rarchives.ripme.ripper.rippers.EroShareRipper;
|
||||||
|
|
||||||
|
|
||||||
public class RipUtils {
|
public class RipUtils {
|
||||||
private static final Logger logger = Logger.getLogger(RipUtils.class);
|
private static final Logger logger = Logger.getLogger(RipUtils.class);
|
||||||
|
|
||||||
@ -32,7 +31,7 @@ public class RipUtils {
|
|||||||
|
|
||||||
logger.debug("Checking " + url);
|
logger.debug("Checking " + url);
|
||||||
// Imgur album
|
// Imgur album
|
||||||
if ((url.getHost().endsWith("imgur.com"))
|
if ((url.getHost().endsWith("imgur.com"))
|
||||||
&& url.toExternalForm().contains("imgur.com/a/")) {
|
&& url.toExternalForm().contains("imgur.com/a/")) {
|
||||||
try {
|
try {
|
||||||
logger.debug("Fetching imgur album at " + url);
|
logger.debug("Fetching imgur album at " + url);
|
||||||
@ -47,7 +46,7 @@ public class RipUtils {
|
|||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
else if (url.getHost().endsWith("imgur.com") && url.toExternalForm().contains(",")) {
|
else if (url.getHost().endsWith("imgur.com") && url.toExternalForm().contains(",")) {
|
||||||
// Imgur image series.
|
// Imgur image series.
|
||||||
try {
|
try {
|
||||||
logger.debug("Fetching imgur series at " + url);
|
logger.debug("Fetching imgur series at " + url);
|
||||||
ImgurAlbum imgurAlbum = ImgurRipper.getImgurSeries(url);
|
ImgurAlbum imgurAlbum = ImgurRipper.getImgurSeries(url);
|
||||||
@ -91,7 +90,7 @@ public class RipUtils {
|
|||||||
}
|
}
|
||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
|
|
||||||
Pattern p = Pattern.compile("https?://i.reddituploads.com/([a-zA-Z0-9]+)\\?.*");
|
Pattern p = Pattern.compile("https?://i.reddituploads.com/([a-zA-Z0-9]+)\\?.*");
|
||||||
Matcher m = p.matcher(url.toExternalForm());
|
Matcher m = p.matcher(url.toExternalForm());
|
||||||
if (m.matches()) {
|
if (m.matches()) {
|
||||||
@ -117,9 +116,9 @@ public class RipUtils {
|
|||||||
logger.error("[!] Not a valid URL: '" + url + "'", e);
|
logger.error("[!] Not a valid URL: '" + url + "'", e);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if (url.getHost().equals("imgur.com") ||
|
if (url.getHost().equals("imgur.com") ||
|
||||||
url.getHost().equals("m.imgur.com")){
|
url.getHost().equals("m.imgur.com")) {
|
||||||
try {
|
try {
|
||||||
// Fetch the page
|
// Fetch the page
|
||||||
Document doc = Jsoup.connect(url.toExternalForm())
|
Document doc = Jsoup.connect(url.toExternalForm())
|
||||||
@ -138,13 +137,13 @@ public class RipUtils {
|
|||||||
} catch (IOException ex) {
|
} catch (IOException ex) {
|
||||||
logger.error("[!] Error", ex);
|
logger.error("[!] Error", ex);
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
logger.error("[!] Unable to rip URL: " + url);
|
logger.error("[!] Unable to rip URL: " + url);
|
||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
|
|
||||||
public static Pattern getURLRegex() {
|
public static Pattern getURLRegex() {
|
||||||
return Pattern.compile("(https?://[a-zA-Z0-9\\-\\.]+\\.[a-zA-Z]{2,3}(/\\S*))");
|
return Pattern.compile("(https?://[a-zA-Z0-9\\-\\.]+\\.[a-zA-Z]{2,3}(/\\S*))");
|
||||||
}
|
}
|
||||||
@ -165,18 +164,6 @@ public class RipUtils {
|
|||||||
if (url == null) url = urlFromSiteDirectoryName(dir, "vinebox", "http://finebox.co/u/", "");
|
if (url == null) url = urlFromSiteDirectoryName(dir, "vinebox", "http://finebox.co/u/", "");
|
||||||
if (url == null) url = urlFromSiteDirectoryName(dir, "imgbox", "http://imgbox.com/g/", "");
|
if (url == null) url = urlFromSiteDirectoryName(dir, "imgbox", "http://imgbox.com/g/", "");
|
||||||
if (url == null) url = urlFromSiteDirectoryName(dir, "modelmayhem", "http://www.modelmayhem.com/", "");
|
if (url == null) url = urlFromSiteDirectoryName(dir, "modelmayhem", "http://www.modelmayhem.com/", "");
|
||||||
/*
|
|
||||||
if (url == null) url = urlFromSiteDirectoryName(dir, "", "", "");
|
|
||||||
if (url == null) url = urlFromSiteDirectoryName(dir, "", "", "");
|
|
||||||
if (url == null) url = urlFromSiteDirectoryName(dir, "", "", "");
|
|
||||||
if (url == null) url = urlFromSiteDirectoryName(dir, "", "", "");
|
|
||||||
if (url == null) url = urlFromSiteDirectoryName(dir, "", "", "");
|
|
||||||
if (url == null) url = urlFromSiteDirectoryName(dir, "", "", "");
|
|
||||||
if (url == null) url = urlFromSiteDirectoryName(dir, "", "", "");
|
|
||||||
if (url == null) url = urlFromSiteDirectoryName(dir, "", "", "");
|
|
||||||
if (url == null) url = urlFromSiteDirectoryName(dir, "", "", "");
|
|
||||||
if (url == null) url = urlFromSiteDirectoryName(dir, "", "", "");
|
|
||||||
*/
|
|
||||||
//if (url == null) url = urlFromSiteDirectoryName(dir, "8muses", "http://www.8muses.com/index/category/", "");
|
//if (url == null) url = urlFromSiteDirectoryName(dir, "8muses", "http://www.8muses.com/index/category/", "");
|
||||||
return url;
|
return url;
|
||||||
}
|
}
|
||||||
@ -248,9 +235,8 @@ public class RipUtils {
|
|||||||
List<String> fields = Arrays.asList(dir.split("_"));
|
List<String> fields = Arrays.asList(dir.split("_"));
|
||||||
String album = fields.get(1);
|
String album = fields.get(1);
|
||||||
String url = "http://";
|
String url = "http://";
|
||||||
if ( (fields.contains("top") || fields.contains("new"))
|
if ((fields.contains("top") || fields.contains("new"))
|
||||||
&& (fields.contains("year") || fields.contains("month") || fields.contains("week") || fields.contains("all"))
|
&& (fields.contains("year") || fields.contains("month") || fields.contains("week") || fields.contains("all"))) {
|
||||||
) {
|
|
||||||
// Subreddit
|
// Subreddit
|
||||||
fields.remove(0); // "imgur"
|
fields.remove(0); // "imgur"
|
||||||
String sub = "";
|
String sub = "";
|
||||||
|
@ -34,7 +34,6 @@ import com.rarchives.ripme.ripper.AbstractRipper;
|
|||||||
* Common utility functions used in various places throughout the project.
|
* Common utility functions used in various places throughout the project.
|
||||||
*/
|
*/
|
||||||
public class Utils {
|
public class Utils {
|
||||||
|
|
||||||
public static final String RIP_DIRECTORY = "rips";
|
public static final String RIP_DIRECTORY = "rips";
|
||||||
private static final String configFile = "rip.properties";
|
private static final String configFile = "rip.properties";
|
||||||
private static final Logger logger = Logger.getLogger(Utils.class);
|
private static final Logger logger = Logger.getLogger(Utils.class);
|
||||||
@ -50,7 +49,7 @@ public class Utils {
|
|||||||
}
|
}
|
||||||
config = new PropertiesConfiguration(configPath);
|
config = new PropertiesConfiguration(configPath);
|
||||||
logger.info("Loaded " + config.getPath());
|
logger.info("Loaded " + config.getPath());
|
||||||
if (f.exists()){
|
if (f.exists()) {
|
||||||
// Config was loaded from file
|
// Config was loaded from file
|
||||||
if ( !config.containsKey("twitter.auth")
|
if ( !config.containsKey("twitter.auth")
|
||||||
|| !config.containsKey("twitter.max_requests")
|
|| !config.containsKey("twitter.max_requests")
|
||||||
@ -171,16 +170,18 @@ public class Utils {
|
|||||||
public static String stripURLParameter(String url, String parameter) {
|
public static String stripURLParameter(String url, String parameter) {
|
||||||
int paramIndex = url.indexOf("?" + parameter);
|
int paramIndex = url.indexOf("?" + parameter);
|
||||||
boolean wasFirstParam = true;
|
boolean wasFirstParam = true;
|
||||||
if(paramIndex < 0) {
|
if (paramIndex < 0) {
|
||||||
wasFirstParam = false;
|
wasFirstParam = false;
|
||||||
paramIndex = url.indexOf("&" + parameter);
|
paramIndex = url.indexOf("&" + parameter);
|
||||||
}
|
}
|
||||||
|
|
||||||
if(paramIndex > 0) {
|
if (paramIndex > 0) {
|
||||||
int nextParam = url.indexOf("&", paramIndex+1);
|
int nextParam = url.indexOf("&", paramIndex+1);
|
||||||
if(nextParam != -1) {
|
if (nextParam != -1) {
|
||||||
String c = "&";
|
String c = "&";
|
||||||
if(wasFirstParam) c = "?";
|
if (wasFirstParam) {
|
||||||
|
c = "?";
|
||||||
|
}
|
||||||
url = url.substring(0, paramIndex) + c + url.substring(nextParam+1, url.length());
|
url = url.substring(0, paramIndex) + c + url.substring(nextParam+1, url.length());
|
||||||
} else {
|
} else {
|
||||||
url = url.substring(0, paramIndex);
|
url = url.substring(0, paramIndex);
|
||||||
@ -250,10 +251,10 @@ public class Utils {
|
|||||||
jarPath = URLDecoder.decode(jarPath, "UTF-8");
|
jarPath = URLDecoder.decode(jarPath, "UTF-8");
|
||||||
JarFile jarFile = new JarFile(jarPath);
|
JarFile jarFile = new JarFile(jarPath);
|
||||||
Enumeration<JarEntry> entries = jarFile.entries();
|
Enumeration<JarEntry> entries = jarFile.entries();
|
||||||
while(entries.hasMoreElements()) {
|
while (entries.hasMoreElements()) {
|
||||||
JarEntry nextElement = entries.nextElement();
|
JarEntry nextElement = entries.nextElement();
|
||||||
String entryName = nextElement.getName();
|
String entryName = nextElement.getName();
|
||||||
if(entryName.startsWith(relPath)
|
if (entryName.startsWith(relPath)
|
||||||
&& entryName.length() > (relPath.length() + "/".length())
|
&& entryName.length() > (relPath.length() + "/".length())
|
||||||
&& !nextElement.isDirectory()) {
|
&& !nextElement.isDirectory()) {
|
||||||
String className = entryName.replace('/', '.').replace('\\', '.').replace(".class", "");
|
String className = entryName.replace('/', '.').replace('\\', '.').replace(".class", "");
|
||||||
@ -393,7 +394,7 @@ public class Utils {
|
|||||||
|
|
||||||
/**
|
/**
|
||||||
* Parses an URL query
|
* Parses an URL query
|
||||||
*
|
*
|
||||||
* @param query
|
* @param query
|
||||||
* The query part of an URL
|
* The query part of an URL
|
||||||
* @return The map of all query parameters
|
* @return The map of all query parameters
|
||||||
@ -401,7 +402,7 @@ public class Utils {
|
|||||||
public static Map<String,String> parseUrlQuery(String query) {
|
public static Map<String,String> parseUrlQuery(String query) {
|
||||||
Map<String,String> res = new HashMap<String, String>();
|
Map<String,String> res = new HashMap<String, String>();
|
||||||
|
|
||||||
if (query.equals("")){
|
if (query.equals("")) {
|
||||||
return res;
|
return res;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -410,9 +411,9 @@ public class Utils {
|
|||||||
|
|
||||||
try {
|
try {
|
||||||
for (String part : parts) {
|
for (String part : parts) {
|
||||||
if ((pos = part.indexOf('=')) >= 0){
|
if ((pos = part.indexOf('=')) >= 0) {
|
||||||
res.put(URLDecoder.decode(part.substring(0, pos), "UTF-8"), URLDecoder.decode(part.substring(pos + 1), "UTF-8"));
|
res.put(URLDecoder.decode(part.substring(0, pos), "UTF-8"), URLDecoder.decode(part.substring(pos + 1), "UTF-8"));
|
||||||
}else{
|
} else {
|
||||||
res.put(URLDecoder.decode(part, "UTF-8"), "");
|
res.put(URLDecoder.decode(part, "UTF-8"), "");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -426,7 +427,7 @@ public class Utils {
|
|||||||
|
|
||||||
/**
|
/**
|
||||||
* Parses an URL query and returns the requested parameter's value
|
* Parses an URL query and returns the requested parameter's value
|
||||||
*
|
*
|
||||||
* @param query
|
* @param query
|
||||||
* The query part of an URL
|
* The query part of an URL
|
||||||
* @param key
|
* @param key
|
||||||
@ -434,7 +435,7 @@ public class Utils {
|
|||||||
* @return The associated value or null if key wasn't found
|
* @return The associated value or null if key wasn't found
|
||||||
*/
|
*/
|
||||||
public static String parseUrlQuery(String query, String key) {
|
public static String parseUrlQuery(String query, String key) {
|
||||||
if (query.equals("")){
|
if (query.equals("")) {
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -444,7 +445,7 @@ public class Utils {
|
|||||||
try {
|
try {
|
||||||
for (String part : parts) {
|
for (String part : parts) {
|
||||||
if ((pos = part.indexOf('=')) >= 0) {
|
if ((pos = part.indexOf('=')) >= 0) {
|
||||||
if (URLDecoder.decode(part.substring(0, pos), "UTF-8").equals(key)){
|
if (URLDecoder.decode(part.substring(0, pos), "UTF-8").equals(key)) {
|
||||||
return URLDecoder.decode(part.substring(pos + 1), "UTF-8");
|
return URLDecoder.decode(part.substring(pos + 1), "UTF-8");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -106,7 +106,7 @@ public class BasicRippersTest extends RippersTest {
|
|||||||
testRipper(ripper);
|
testRipper(ripper);
|
||||||
}
|
}
|
||||||
*/
|
*/
|
||||||
|
|
||||||
public void test500pxAlbum() throws IOException {
|
public void test500pxAlbum() throws IOException {
|
||||||
FivehundredpxRipper ripper = new FivehundredpxRipper(new URL("https://marketplace.500px.com/alexander_hurman"));
|
FivehundredpxRipper ripper = new FivehundredpxRipper(new URL("https://marketplace.500px.com/alexander_hurman"));
|
||||||
testRipper(ripper);
|
testRipper(ripper);
|
||||||
@ -261,7 +261,7 @@ public class BasicRippersTest extends RippersTest {
|
|||||||
AbstractRipper ripper = new VineRipper(new URL("https://vine.co/u/954440445776334848"));
|
AbstractRipper ripper = new VineRipper(new URL("https://vine.co/u/954440445776334848"));
|
||||||
testRipper(ripper);
|
testRipper(ripper);
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testVkSubalbumRip() throws IOException {
|
public void testVkSubalbumRip() throws IOException {
|
||||||
VkRipper ripper = new VkRipper(new URL("http://vk.com/album45506334_0"));
|
VkRipper ripper = new VkRipper(new URL("http://vk.com/album45506334_0"));
|
||||||
testRipper(ripper);
|
testRipper(ripper);
|
||||||
|
@ -10,7 +10,7 @@ import java.util.Map;
|
|||||||
import com.rarchives.ripme.ripper.rippers.InstagramRipper;
|
import com.rarchives.ripme.ripper.rippers.InstagramRipper;
|
||||||
|
|
||||||
public class InstagramRipperTest extends RippersTest {
|
public class InstagramRipperTest extends RippersTest {
|
||||||
|
|
||||||
public void testInstagramGID() throws IOException {
|
public void testInstagramGID() throws IOException {
|
||||||
Map<URL, String> testURLs = new HashMap<URL, String>();
|
Map<URL, String> testURLs = new HashMap<URL, String>();
|
||||||
testURLs.put(new URL("http://instagram.com/Test_User"), "Test_User");
|
testURLs.put(new URL("http://instagram.com/Test_User"), "Test_User");
|
||||||
|
@ -12,7 +12,7 @@ import com.rarchives.ripme.ripper.rippers.video.XvideosRipper;
|
|||||||
import com.rarchives.ripme.ripper.rippers.video.YoupornRipper;
|
import com.rarchives.ripme.ripper.rippers.video.YoupornRipper;
|
||||||
|
|
||||||
public class VideoRippersTest extends RippersTest {
|
public class VideoRippersTest extends RippersTest {
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Helper method for testing a video ripper
|
* Helper method for testing a video ripper
|
||||||
* @param ripper The video ripper
|
* @param ripper The video ripper
|
||||||
@ -44,7 +44,7 @@ public class VideoRippersTest extends RippersTest {
|
|||||||
videoTestHelper(ripper);
|
videoTestHelper(ripper);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testPornhubRipper() throws IOException {
|
public void testPornhubRipper() throws IOException {
|
||||||
List<URL> contentURLs = new ArrayList<URL>();
|
List<URL> contentURLs = new ArrayList<URL>();
|
||||||
contentURLs.add(new URL("http://www.pornhub.com/view_video.php?viewkey=993166542"));
|
contentURLs.add(new URL("http://www.pornhub.com/view_video.php?viewkey=993166542"));
|
||||||
|
Loading…
Reference in New Issue
Block a user