Various fixes to tests:
Ability to set log level, lots of debugging messages Turn on debug logging during tests, simplified test cases for HTML ripper Fix fusktator ripper, added test Fixed gifyo, added test Added tests for *all* rippers Adding a few album-guessing URLs
This commit is contained in:
parent
41842c0850
commit
c5ea044f79
@ -66,6 +66,12 @@ public abstract class AbstractHTMLRipper extends AlbumRipper {
|
||||
|
||||
while (doc != null) {
|
||||
List<String> imageURLs = getURLsFromPage(doc);
|
||||
// Remove all but 1 image
|
||||
if (isThisATest()) {
|
||||
while (imageURLs.size() > 1) {
|
||||
imageURLs.remove(1);
|
||||
}
|
||||
}
|
||||
|
||||
if (imageURLs.size() == 0) {
|
||||
throw new IOException("No images found at " + doc.location());
|
||||
@ -73,12 +79,14 @@ public abstract class AbstractHTMLRipper extends AlbumRipper {
|
||||
|
||||
for (String imageURL : imageURLs) {
|
||||
index += 1;
|
||||
logger.debug("Found image url #" + index + ": " + imageURL);
|
||||
downloadURL(new URL(imageURL), index);
|
||||
if (isStopped()) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (hasDescriptionSupport()) {
|
||||
logger.debug("Fetching description(s) from " + doc.location());
|
||||
List<String> textURLs = getDescriptionsFromPage(doc);
|
||||
if (textURLs.size() > 0) {
|
||||
for (String textURL : textURLs) {
|
||||
@ -86,15 +94,17 @@ public abstract class AbstractHTMLRipper extends AlbumRipper {
|
||||
break;
|
||||
}
|
||||
textindex += 1;
|
||||
logger.debug("Getting decription from " + textURL);
|
||||
String tempDesc = getDescription(textURL);
|
||||
if (tempDesc != null) {
|
||||
logger.debug("Got description: " + tempDesc);
|
||||
saveText(new URL(textURL), "", tempDesc, textindex);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (isStopped()) {
|
||||
if (isStopped() || isThisATest()) {
|
||||
break;
|
||||
}
|
||||
|
||||
@ -109,6 +119,7 @@ public abstract class AbstractHTMLRipper extends AlbumRipper {
|
||||
|
||||
// If they're using a thread pool, wait for it.
|
||||
if (getThreadPool() != null) {
|
||||
logger.debug("Waiting for threadpool " + getThreadPool().getClass().getName());
|
||||
getThreadPool().waitForThreads();
|
||||
}
|
||||
waitForThreads();
|
||||
|
@ -55,6 +55,12 @@ public abstract class AbstractJSONRipper extends AlbumRipper {
|
||||
|
||||
while (json != null) {
|
||||
List<String> imageURLs = getURLsFromJSON(json);
|
||||
// Remove all but 1 image
|
||||
if (isThisATest()) {
|
||||
while (imageURLs.size() > 1) {
|
||||
imageURLs.remove(1);
|
||||
}
|
||||
}
|
||||
|
||||
if (imageURLs.size() == 0) {
|
||||
throw new IOException("No images found at " + this.url);
|
||||
@ -65,10 +71,11 @@ public abstract class AbstractJSONRipper extends AlbumRipper {
|
||||
break;
|
||||
}
|
||||
index += 1;
|
||||
logger.debug("Found image url #" + index+ ": " + imageURL);
|
||||
downloadURL(new URL(imageURL), index);
|
||||
}
|
||||
|
||||
if (isStopped()) {
|
||||
if (isStopped() || isThisATest()) {
|
||||
break;
|
||||
}
|
||||
|
||||
@ -83,6 +90,7 @@ public abstract class AbstractJSONRipper extends AlbumRipper {
|
||||
|
||||
// If they're using a thread pool, wait for it.
|
||||
if (getThreadPool() != null) {
|
||||
logger.debug("Waiting for threadpool " + getThreadPool().getClass().getName());
|
||||
getThreadPool().waitForThreads();
|
||||
}
|
||||
waitForThreads();
|
||||
|
@ -102,8 +102,10 @@ public abstract class AbstractRipper
|
||||
try {
|
||||
stopCheck();
|
||||
} catch (IOException e) {
|
||||
logger.debug("Ripper has been stopped");
|
||||
return false;
|
||||
}
|
||||
logger.debug("url: " + url + ", prefix: " + prefix + ", subdirectory" + subdirectory + ", referrer: " + referrer + ", cookies: " + cookies);
|
||||
String saveAs = url.toExternalForm();
|
||||
saveAs = saveAs.substring(saveAs.lastIndexOf('/')+1);
|
||||
if (saveAs.indexOf('?') >= 0) { saveAs = saveAs.substring(0, saveAs.indexOf('?')); }
|
||||
@ -163,6 +165,7 @@ public abstract class AbstractRipper
|
||||
* Waits for downloading threads to complete.
|
||||
*/
|
||||
protected void waitForThreads() {
|
||||
logger.debug("Waiting for threads to finish");
|
||||
completed = false;
|
||||
threadPool.waitForThreads();
|
||||
checkIfComplete();
|
||||
@ -212,6 +215,7 @@ public abstract class AbstractRipper
|
||||
*/
|
||||
protected void checkIfComplete() {
|
||||
if (observer == null) {
|
||||
logger.debug("observer is null");
|
||||
return;
|
||||
}
|
||||
|
||||
@ -226,6 +230,7 @@ public abstract class AbstractRipper
|
||||
Logger rootLogger = Logger.getRootLogger();
|
||||
FileAppender fa = (FileAppender) rootLogger.getAppender("FILE");
|
||||
if (fa != null) {
|
||||
logger.debug("Changing log file back to 'ripme.log'");
|
||||
fa.setFile("ripme.log");
|
||||
fa.activateOptions();
|
||||
}
|
||||
@ -272,6 +277,7 @@ public abstract class AbstractRipper
|
||||
for (Constructor<?> constructor : getRipperConstructors("com.rarchives.ripme.ripper.rippers")) {
|
||||
try {
|
||||
AlbumRipper ripper = (AlbumRipper) constructor.newInstance(url);
|
||||
logger.debug("Found album ripper: " + ripper.getClass().getName());
|
||||
return ripper;
|
||||
} catch (Exception e) {
|
||||
// Incompatible rippers *will* throw exceptions during instantiation.
|
||||
@ -280,6 +286,7 @@ public abstract class AbstractRipper
|
||||
for (Constructor<?> constructor : getRipperConstructors("com.rarchives.ripme.ripper.rippers.video")) {
|
||||
try {
|
||||
VideoRipper ripper = (VideoRipper) constructor.newInstance(url);
|
||||
logger.debug("Found video ripper: " + ripper.getClass().getName());
|
||||
return ripper;
|
||||
} catch (Exception e) {
|
||||
// Incompatible rippers *will* throw exceptions during instantiation.
|
||||
@ -355,6 +362,7 @@ public abstract class AbstractRipper
|
||||
|
||||
public boolean sleep(int milliseconds) {
|
||||
try {
|
||||
logger.debug("Sleeping " + milliseconds + "ms");
|
||||
Thread.sleep(milliseconds);
|
||||
return true;
|
||||
} catch (InterruptedException e) {
|
||||
@ -372,6 +380,7 @@ public abstract class AbstractRipper
|
||||
|
||||
/** Methods for detecting when we're running a test. */
|
||||
public void markAsTest() {
|
||||
logger.debug("THIS IS A TEST RIP");
|
||||
thisIsATest = true;
|
||||
}
|
||||
public boolean isThisATest() {
|
||||
|
@ -175,6 +175,7 @@ public abstract class AlbumRipper extends AbstractRipper {
|
||||
} else {
|
||||
title = super.getAlbumTitle(this.url);
|
||||
}
|
||||
logger.debug("Using album title '" + title + "'");
|
||||
title = Utils.filesystemSafe(title);
|
||||
path += title + File.separator;
|
||||
this.workingDir = new File(path);
|
||||
|
@ -111,9 +111,11 @@ public class DownloadFileThread extends Thread {
|
||||
cookie += key + "=" + cookies.get(key);
|
||||
}
|
||||
huc.setRequestProperty("Cookie", cookie);
|
||||
logger.debug("Request properties: " + huc.getRequestProperties());
|
||||
huc.connect();
|
||||
|
||||
int statusCode = huc.getResponseCode();
|
||||
logger.debug("Status code: " + statusCode);
|
||||
if (statusCode / 100 == 3) { // 3xx Redirect
|
||||
if (!redirected) {
|
||||
// Don't increment retries on the first redirect
|
||||
@ -148,12 +150,14 @@ public class DownloadFileThread extends Thread {
|
||||
IOUtils.copy(bis, fos);
|
||||
break; // Download successful: break out of infinite loop
|
||||
} catch (HttpStatusException hse) {
|
||||
logger.debug("HTTP status exception", hse);
|
||||
logger.error("[!] HTTP status " + hse.getStatusCode() + " while downloading from " + urlToDownload);
|
||||
if (hse.getStatusCode() == 404 && Utils.getConfigBoolean("errors.skip404", false)) {
|
||||
observer.downloadErrored(url, "HTTP status code " + hse.getStatusCode() + " while downloading " + url.toExternalForm());
|
||||
return;
|
||||
}
|
||||
} catch (IOException e) {
|
||||
logger.debug("IOException", e);
|
||||
logger.error("[!] Exception while downloading file: " + url + " - " + e.getMessage());
|
||||
} finally {
|
||||
// Close any open streams
|
||||
|
@ -71,7 +71,7 @@ public class DownloadVideoThread extends Thread {
|
||||
}
|
||||
observer.setBytesTotal(bytesTotal);
|
||||
observer.sendUpdate(STATUS.TOTAL_BYTES, bytesTotal);
|
||||
logger.info("Size of file at " + this.url + " = " + bytesTotal + "b");
|
||||
logger.debug("Size of file at " + this.url + " = " + bytesTotal + "b");
|
||||
|
||||
int tries = 0; // Number of attempts to download
|
||||
do {
|
||||
@ -95,6 +95,7 @@ public class DownloadVideoThread extends Thread {
|
||||
huc.setRequestProperty("Referer", this.url.toExternalForm()); // Sic
|
||||
huc.setRequestProperty("User-agent", AbstractRipper.USER_AGENT);
|
||||
tries += 1;
|
||||
logger.debug("Request properties: " + huc.getRequestProperties().toString());
|
||||
huc.connect();
|
||||
// Check status code
|
||||
bis = new BufferedInputStream(huc.getInputStream());
|
||||
|
@ -58,6 +58,9 @@ public abstract class VideoRipper extends AbstractRipper {
|
||||
}
|
||||
else {
|
||||
if (isThisATest()) {
|
||||
// Tests shouldn't download the whole video
|
||||
// Just change this.url to the download URL so the test knows we found it.
|
||||
logger.debug("Test rip, found URL: " + url);
|
||||
this.url = url;
|
||||
return true;
|
||||
}
|
||||
|
@ -1,5 +1,6 @@
|
||||
package com.rarchives.ripme.ripper.rippers;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.net.HttpURLConnection;
|
||||
import java.net.MalformedURLException;
|
||||
@ -117,13 +118,15 @@ public class FivehundredpxRipper extends AbstractJSONRipper {
|
||||
@Override
|
||||
public JSONObject getFirstPage() throws IOException {
|
||||
URL apiURL = new URL(baseURL + "&consumer_key=" + CONSUMER_KEY);
|
||||
logger.debug("apiURL: " + apiURL);
|
||||
JSONObject json = Http.url(apiURL).getJSON();
|
||||
if (baseURL.contains("/blogs?")) {
|
||||
// List of stories
|
||||
// List of stories to return
|
||||
JSONObject result = new JSONObject();
|
||||
result.put("photos", new JSONArray());
|
||||
JSONArray jsonBlogs = json.getJSONArray("blog_posts");
|
||||
|
||||
// Iterate over every story
|
||||
JSONArray jsonBlogs = json.getJSONArray("blog_posts");
|
||||
for (int i = 0; i < jsonBlogs.length(); i++) {
|
||||
if (i > 0) {
|
||||
sleep(500);
|
||||
@ -153,6 +156,9 @@ public class FivehundredpxRipper extends AbstractJSONRipper {
|
||||
|
||||
@Override
|
||||
public JSONObject getNextPage(JSONObject json) throws IOException {
|
||||
if (isThisATest()) {
|
||||
return null;
|
||||
}
|
||||
// Check previous JSON to see if we hit the last page
|
||||
if (!json.has("current_page")
|
||||
|| !json.has("total_pages")) {
|
||||
@ -191,6 +197,9 @@ public class FivehundredpxRipper extends AbstractJSONRipper {
|
||||
}
|
||||
}
|
||||
imageURLs.add(imageURL);
|
||||
if (isThisATest()) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
return imageURLs;
|
||||
}
|
||||
@ -208,12 +217,18 @@ public class FivehundredpxRipper extends AbstractJSONRipper {
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean keepSortOrder() {
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void downloadURL(URL url, int index) {
|
||||
String u = url.toExternalForm();
|
||||
String[] fields = u.split("/");
|
||||
String prefix = getPrefix(index) + fields[fields.length - 2] + "-";
|
||||
addURLToDownload(url, prefix);
|
||||
String prefix = getPrefix(index) + fields[fields.length - 3];
|
||||
File saveAs = new File(getWorkingDir() + File.separator + prefix + ".jpg");
|
||||
addURLToDownload(url, saveAs, "", null);
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -130,6 +130,9 @@ public class FlickrRipper extends AbstractHTMLRipper {
|
||||
|
||||
@Override
|
||||
public Document getNextPage(Document doc) throws IOException {
|
||||
if (isThisATest()) {
|
||||
return null;
|
||||
}
|
||||
// Find how many pages there are
|
||||
int lastPage = 0;
|
||||
for (Element apage : doc.select("a[data-track^=page-]")) {
|
||||
@ -185,6 +188,9 @@ public class FlickrRipper extends AbstractHTMLRipper {
|
||||
}
|
||||
attempted.add(imagePage);
|
||||
imageURLs.add(imagePage);
|
||||
if (isThisATest()) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
return imageURLs;
|
||||
}
|
||||
|
@ -63,7 +63,7 @@ public class FuskatorRipper extends AbstractHTMLRipper {
|
||||
List<String> imageURLs = new ArrayList<String>();
|
||||
String html = doc.html();
|
||||
// Get "baseUrl"
|
||||
String baseUrl = Utils.between(html, "var baseUrl = unescape('", "'").get(0);
|
||||
String baseUrl = Utils.between(html, "unescape('", "'").get(0);
|
||||
try {
|
||||
baseUrl = URLDecoder.decode(baseUrl, "UTF-8");
|
||||
} catch (UnsupportedEncodingException e) {
|
||||
@ -73,7 +73,7 @@ public class FuskatorRipper extends AbstractHTMLRipper {
|
||||
baseUrl = "http:" + baseUrl;
|
||||
}
|
||||
// Iterate over images
|
||||
for (String filename : Utils.between(html, ".src=baseUrl+'", "'")) {
|
||||
for (String filename : Utils.between(html, "+'", "'")) {
|
||||
imageURLs.add(baseUrl + filename);
|
||||
}
|
||||
return imageURLs;
|
||||
|
@ -93,8 +93,8 @@ public class GifyoRipper extends AbstractHTMLRipper {
|
||||
@Override
|
||||
public List<String> getURLsFromPage(Document doc) {
|
||||
List<String> imageURLs = new ArrayList<String>();
|
||||
for (Element image : doc.select("div.gif img")) {
|
||||
String imageUrl = image.attr("src");
|
||||
for (Element image : doc.select("img.profile_gif")) {
|
||||
String imageUrl = image.attr("data-animated");
|
||||
if (imageUrl.startsWith("//")) {
|
||||
imageUrl = "http:" + imageUrl;
|
||||
}
|
||||
@ -102,7 +102,7 @@ public class GifyoRipper extends AbstractHTMLRipper {
|
||||
imageUrl = imageUrl.replace("_s.gif", ".gif");
|
||||
imageURLs.add(imageUrl);
|
||||
}
|
||||
logger.info("Found " + imageURLs.size() + " images");
|
||||
logger.debug("Found " + imageURLs.size() + " images");
|
||||
return imageURLs;
|
||||
}
|
||||
|
||||
|
@ -136,8 +136,12 @@ public class MinusRipper extends AlbumRipper {
|
||||
String title = gallery.getString("name");
|
||||
String albumUrl = "http://" + user + ".minus.com/m" + gallery.getString("reader_id");
|
||||
ripAlbum(new URL(albumUrl), Utils.filesystemSafe(title));
|
||||
|
||||
if (isThisATest()) {
|
||||
break;
|
||||
}
|
||||
if (page >= json.getInt("total_pages")) {
|
||||
}
|
||||
if (page >= json.getInt("total_pages") || isThisATest()) {
|
||||
break;
|
||||
}
|
||||
page++;
|
||||
@ -169,6 +173,9 @@ public class MinusRipper extends AlbumRipper {
|
||||
prefix = String.format("%03d_", i + 1);
|
||||
}
|
||||
addURLToDownload(new URL(image), prefix, subdir);
|
||||
if (isThisATest()) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -110,6 +110,10 @@ public class ModelmayhemRipper extends AlbumRipper {
|
||||
prefix = String.format("%03d_", i + 1);
|
||||
}
|
||||
addURLToDownload(new URL(bigImage), prefix);
|
||||
|
||||
if (isThisATest()) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
waitForThreads();
|
||||
}
|
||||
|
@ -92,7 +92,7 @@ public class NfsfwRipper extends AlbumRipper {
|
||||
}
|
||||
// Subalbums
|
||||
for (Element suba : albumDoc.select("td.IMG > a")) {
|
||||
if (isStopped()) {
|
||||
if (isStopped() || isThisATest()) {
|
||||
break;
|
||||
}
|
||||
String subURL = "http://nfsfw.com" + suba.attr("href");
|
||||
@ -112,10 +112,16 @@ public class NfsfwRipper extends AlbumRipper {
|
||||
try {
|
||||
NfsfwImageThread t = new NfsfwImageThread(new URL(imagePage), nextSubalbum, ++index);
|
||||
nfsfwThreadPool.addThread(t);
|
||||
if (isThisATest()) {
|
||||
break;
|
||||
}
|
||||
} catch (MalformedURLException mue) {
|
||||
logger.warn("Invalid URL: " + imagePage);
|
||||
}
|
||||
}
|
||||
if (isThisATest()) {
|
||||
break;
|
||||
}
|
||||
// Get next page
|
||||
for (Element a : albumDoc.select("a.next")) {
|
||||
subAlbums.add(0, new Pair("http://nfsfw.com" + a.attr("href"), ""));
|
||||
|
@ -167,7 +167,7 @@ public class PhotobucketRipper extends AlbumRipper {
|
||||
filesIndex += 1;
|
||||
addURLToDownload(new URL(image),
|
||||
"",
|
||||
object.getString("location"),
|
||||
object.getString("location").replaceAll(" ", "_"),
|
||||
albumDoc.location(),
|
||||
pageResponse.cookies());
|
||||
}
|
||||
|
@ -81,10 +81,6 @@ public class PornhubRipper extends AlbumRipper {
|
||||
int index = 0;
|
||||
String nextUrl = this.url.toExternalForm();
|
||||
|
||||
if (isStopped()) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (albumDoc == null) {
|
||||
logger.info(" Retrieving album page " + nextUrl);
|
||||
sendUpdate(STATUS.LOADING_RESOURCE, nextUrl);
|
||||
@ -96,8 +92,8 @@ public class PornhubRipper extends AlbumRipper {
|
||||
// Find thumbnails
|
||||
Elements thumbs = albumDoc.select(".photoBlockBox li");
|
||||
if (thumbs.size() == 0) {
|
||||
logger.info("albumDoc: " + albumDoc);
|
||||
logger.info("No images found at " + nextUrl);
|
||||
logger.debug("albumDoc: " + albumDoc);
|
||||
logger.debug("No images found at " + nextUrl);
|
||||
return;
|
||||
}
|
||||
|
||||
@ -111,6 +107,9 @@ public class PornhubRipper extends AlbumRipper {
|
||||
URL imagePage = new URL(url, imagePageUrl);
|
||||
PornhubImageThread t = new PornhubImageThread(imagePage, index, this.workingDir);
|
||||
pornhubThreadPool.addThread(t);
|
||||
if (isThisATest()) {
|
||||
break;
|
||||
}
|
||||
try {
|
||||
Thread.sleep(IMAGE_SLEEP_TIME);
|
||||
} catch (InterruptedException e) {
|
||||
@ -118,6 +117,7 @@ public class PornhubRipper extends AlbumRipper {
|
||||
}
|
||||
}
|
||||
|
||||
pornhubThreadPool.waitForThreads();
|
||||
waitForThreads();
|
||||
}
|
||||
|
||||
|
@ -34,12 +34,12 @@ public class SankakuComplexRipper extends AbstractHTMLRipper {
|
||||
|
||||
@Override
|
||||
public String getDomain() {
|
||||
return "idol.sankakucomplex.com";
|
||||
return "sankakucomplex.com";
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getGID(URL url) throws MalformedURLException {
|
||||
Pattern p = Pattern.compile("^https?://idol\\.sankakucomplex\\.com/.*tags=([^&]+).*$");
|
||||
Pattern p = Pattern.compile("^https?://([a-zA-Z0-9]+\\.)?sankakucomplex\\.com/.*tags=([^&]+).*$");
|
||||
Matcher m = p.matcher(url.toExternalForm());
|
||||
if (m.matches()) {
|
||||
try {
|
||||
@ -72,8 +72,8 @@ public class SankakuComplexRipper extends AbstractHTMLRipper {
|
||||
String postId = thumbSpan.attr("id").replaceAll("p", "");
|
||||
Element thumb = thumbSpan.getElementsByTag("img").first();
|
||||
String image = thumb.attr("abs:src")
|
||||
.replace("i.sankakucomplex.com/data/preview",
|
||||
"is.sankakucomplex.com/data") + "?" + postId;
|
||||
.replace(".sankakucomplex.com/data/preview",
|
||||
"s.sankakucomplex.com/data") + "?" + postId;
|
||||
imageURLs.add(image);
|
||||
}
|
||||
return imageURLs;
|
||||
|
@ -13,6 +13,13 @@ import com.rarchives.ripme.ripper.AlbumRipper;
|
||||
import com.rarchives.ripme.ui.RipStatusMessage.STATUS;
|
||||
import com.rarchives.ripme.utils.Http;
|
||||
|
||||
/**
|
||||
* Appears to be broken as of 2015-02-11.
|
||||
* Generating large image from thumbnail requires replacing "/m/" with something else:
|
||||
* -> Sometimes "/b/"
|
||||
* -> Sometimes "/p/"
|
||||
* No way to know without loading the image page.
|
||||
*/
|
||||
public class SmuttyRipper extends AlbumRipper {
|
||||
|
||||
private static final String DOMAIN = "smutty.com",
|
||||
|
@ -15,6 +15,10 @@ import com.rarchives.ripme.ripper.AlbumRipper;
|
||||
import com.rarchives.ripme.ui.RipStatusMessage.STATUS;
|
||||
import com.rarchives.ripme.utils.Http;
|
||||
|
||||
/**
|
||||
* Appears to be broken as of 2015-02-11.
|
||||
* Looks like supertangas changed their site completely.
|
||||
*/
|
||||
public class SupertangasRipper extends AlbumRipper {
|
||||
|
||||
private static final String DOMAIN = "supertangas.com",
|
||||
|
@ -88,6 +88,9 @@ public class TapasticRipper extends AbstractHTMLRipper {
|
||||
prefix.append(episode.filename.replace(" ", "-"));
|
||||
prefix.append("-");
|
||||
addURLToDownload(new URL(link), prefix.toString());
|
||||
if (isThisATest()) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
} catch (IOException e) {
|
||||
logger.error("[!] Exception while downloading " + url, e);
|
||||
|
@ -88,6 +88,9 @@ public class TeenplanetRipper extends AlbumRipper {
|
||||
prefix = String.format("%03d_", index);
|
||||
}
|
||||
addURLToDownload(new URL(image), prefix);
|
||||
if (isThisATest()) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
waitForThreads();
|
||||
}
|
||||
|
@ -59,7 +59,7 @@ public class VidbleRipper extends AbstractHTMLRipper {
|
||||
Elements els = doc.select("#ContentPlaceHolder1_thumbs");
|
||||
String thumbs = els.first().attr("value");
|
||||
for (String thumb : thumbs.split(",")) {
|
||||
if (thumb.trim().equals("")) {
|
||||
if (thumb.trim().equals("") || thumb.contains("reddit.com")) {
|
||||
continue;
|
||||
}
|
||||
thumb = thumb.replaceAll("_[a-zA-Z]{3,5}", "");
|
||||
|
@ -1,66 +0,0 @@
|
||||
package com.rarchives.ripme.ripper.rippers;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.net.MalformedURLException;
|
||||
import java.net.URL;
|
||||
import java.util.LinkedList;
|
||||
import java.util.List;
|
||||
import java.util.regex.Matcher;
|
||||
import java.util.regex.Pattern;
|
||||
|
||||
import org.apache.commons.lang.StringEscapeUtils;
|
||||
import org.jsoup.nodes.Document;
|
||||
import org.jsoup.nodes.Element;
|
||||
|
||||
import com.rarchives.ripme.ripper.AbstractHTMLRipper;
|
||||
import com.rarchives.ripme.utils.Http;
|
||||
|
||||
public class VidmeRipper extends AbstractHTMLRipper {
|
||||
|
||||
public VidmeRipper(URL url) throws IOException {
|
||||
super(url);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getDomain() {
|
||||
return "vid.me";
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getHost() {
|
||||
return "vid";
|
||||
}
|
||||
|
||||
@Override
|
||||
public Document getFirstPage() throws IOException {
|
||||
return Http.url(url).get();
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<String> getURLsFromPage(Document page) {
|
||||
List<String> result = new LinkedList<String>();
|
||||
for(Element elem : page.select("a.js-download-video-link")){
|
||||
String link = StringEscapeUtils.unescapeHtml(elem.attr("data-href").toString());
|
||||
result.add(link);
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void downloadURL(URL url, int index) {
|
||||
addURLToDownload(url, getPrefix(index));
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getGID(URL url) throws MalformedURLException {
|
||||
Pattern p = Pattern.compile("^https?://vid\\.me/([a-zA-Z0-9]+).*$");
|
||||
Matcher m = p.matcher(url.toExternalForm());
|
||||
if (m.matches()) {
|
||||
// Return the text contained between () in the regex
|
||||
return m.group(1);
|
||||
}
|
||||
throw new MalformedURLException("Expected imgur.com URL format: "
|
||||
+ "imgur.com/a/albumid - got " + url + " instead");
|
||||
}
|
||||
|
||||
}
|
@ -56,6 +56,12 @@ public class VineRipper extends AlbumRipper {
|
||||
for (int i = 0; i < records.length(); i++) {
|
||||
String videoURL = records.getJSONObject(i).getString("videoUrl");
|
||||
addURLToDownload(new URL(videoURL));
|
||||
if (isThisATest()) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (isThisATest()) {
|
||||
break;
|
||||
}
|
||||
if (records.length() == 0) {
|
||||
logger.info("Zero records returned");
|
||||
|
@ -57,6 +57,12 @@ public class XhamsterRipper extends AlbumRipper {
|
||||
prefix = String.format("%03d_", index);
|
||||
}
|
||||
addURLToDownload(new URL(image), prefix);
|
||||
if (isThisATest()) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (isThisATest()) {
|
||||
break;
|
||||
}
|
||||
nextURL = null;
|
||||
for (Element element : doc.select("a.last")) {
|
||||
|
@ -14,7 +14,7 @@ import com.rarchives.ripme.utils.Http;
|
||||
|
||||
public class ViddmeRipper extends VideoRipper {
|
||||
|
||||
private static final String HOST = "vidd";
|
||||
private static final String HOST = "vid";
|
||||
|
||||
public ViddmeRipper(URL url) throws IOException {
|
||||
super(url);
|
||||
@ -27,7 +27,7 @@ public class ViddmeRipper extends VideoRipper {
|
||||
|
||||
@Override
|
||||
public boolean canRip(URL url) {
|
||||
Pattern p = Pattern.compile("^https?://[wm.]*vidd\\.me/[a-zA-Z0-9]+.*$");
|
||||
Pattern p = Pattern.compile("^https?://[wm.]*vid\\.me/[a-zA-Z0-9]+.*$");
|
||||
Matcher m = p.matcher(url.toExternalForm());
|
||||
return m.matches();
|
||||
}
|
||||
@ -39,15 +39,15 @@ public class ViddmeRipper extends VideoRipper {
|
||||
|
||||
@Override
|
||||
public String getGID(URL url) throws MalformedURLException {
|
||||
Pattern p = Pattern.compile("^https?://[wm.]*vidd\\.me/([a-zA-Z0-9]+).*$");
|
||||
Pattern p = Pattern.compile("^https?://[wm.]*vid\\.me/([a-zA-Z0-9]+).*$");
|
||||
Matcher m = p.matcher(url.toExternalForm());
|
||||
if (m.matches()) {
|
||||
return m.group(1);
|
||||
}
|
||||
|
||||
throw new MalformedURLException(
|
||||
"Expected vidd.me format:"
|
||||
+ "vidd.me/id"
|
||||
"Expected vid.me format:"
|
||||
+ "vid.me/id"
|
||||
+ " Got: " + url);
|
||||
}
|
||||
|
||||
|
@ -37,6 +37,7 @@ import javax.swing.DefaultListModel;
|
||||
import javax.swing.ImageIcon;
|
||||
import javax.swing.JButton;
|
||||
import javax.swing.JCheckBox;
|
||||
import javax.swing.JComboBox;
|
||||
import javax.swing.JFileChooser;
|
||||
import javax.swing.JFrame;
|
||||
import javax.swing.JLabel;
|
||||
@ -62,6 +63,8 @@ import javax.swing.text.SimpleAttributeSet;
|
||||
import javax.swing.text.StyleConstants;
|
||||
import javax.swing.text.StyledDocument;
|
||||
|
||||
import org.apache.log4j.ConsoleAppender;
|
||||
import org.apache.log4j.FileAppender;
|
||||
import org.apache.log4j.Level;
|
||||
import org.apache.log4j.Logger;
|
||||
|
||||
@ -124,6 +127,7 @@ public class MainWindow implements Runnable, RipStatusHandler {
|
||||
private static JButton configSaveDirButton;
|
||||
private static JTextField configRetriesText;
|
||||
private static JCheckBox configAutoupdateCheckbox;
|
||||
private static JComboBox configLogLevelCombobox;
|
||||
private static JCheckBox configPlaySound;
|
||||
private static JCheckBox configSaveOrderCheckbox;
|
||||
private static JCheckBox configShowPopup;
|
||||
@ -143,7 +147,6 @@ public class MainWindow implements Runnable, RipStatusHandler {
|
||||
private static AbstractRipper ripper;
|
||||
|
||||
public MainWindow() {
|
||||
Logger.getRootLogger().setLevel(Level.ERROR);
|
||||
mainFrame = new JFrame("RipMe v" + UpdateUtils.getThisJarVersion());
|
||||
mainFrame.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE);
|
||||
mainFrame.setResizable(false);
|
||||
@ -196,6 +199,7 @@ public class MainWindow implements Runnable, RipStatusHandler {
|
||||
Utils.setConfigInteger("download.timeout", Integer.parseInt(configTimeoutText.getText()));
|
||||
Utils.setConfigBoolean("clipboard.autorip", ClipboardUtils.getClipboardAutoRip());
|
||||
Utils.setConfigBoolean("auto.update", configAutoupdateCheckbox.isSelected());
|
||||
Utils.setConfigString("log.level", configLogLevelCombobox.getSelectedItem().toString());
|
||||
Utils.setConfigBoolean("play.sound", configPlaySound.isSelected());
|
||||
Utils.setConfigBoolean("download.save_order", configSaveOrderCheckbox.isSelected());
|
||||
Utils.setConfigBoolean("download.show_popup", configShowPopup.isSelected());
|
||||
@ -419,6 +423,9 @@ public class MainWindow implements Runnable, RipStatusHandler {
|
||||
configAutoupdateCheckbox = new JCheckBox("Auto-update?", Utils.getConfigBoolean("auto.update", true));
|
||||
configAutoupdateCheckbox.setHorizontalAlignment(JCheckBox.RIGHT);
|
||||
configAutoupdateCheckbox.setHorizontalTextPosition(JCheckBox.LEFT);
|
||||
configLogLevelCombobox = new JComboBox(new String[] {"Log level: Error", "Log level: Warn", "Log level: Info", "Log level: Debug"});
|
||||
configLogLevelCombobox.setSelectedItem(Utils.getConfigString("log.level", "Log level: Debug"));
|
||||
setLogLevel(configLogLevelCombobox.getSelectedItem().toString());
|
||||
configPlaySound = new JCheckBox("Sound when rip completes", Utils.getConfigBoolean("play.sound", false));
|
||||
configPlaySound.setHorizontalAlignment(JCheckBox.RIGHT);
|
||||
configPlaySound.setHorizontalTextPosition(JCheckBox.LEFT);
|
||||
@ -451,6 +458,7 @@ public class MainWindow implements Runnable, RipStatusHandler {
|
||||
gbc.gridy = 0; gbc.gridx = 0; configurationPanel.add(configUpdateLabel, gbc);
|
||||
gbc.gridx = 1; configurationPanel.add(configUpdateButton, gbc);
|
||||
gbc.gridy = 1; gbc.gridx = 0; configurationPanel.add(configAutoupdateCheckbox, gbc);
|
||||
gbc.gridx = 1; configurationPanel.add(configLogLevelCombobox, gbc);
|
||||
gbc.gridy = 2; gbc.gridx = 0; configurationPanel.add(configThreadsLabel, gbc);
|
||||
gbc.gridx = 1; configurationPanel.add(configThreadsText, gbc);
|
||||
gbc.gridy = 3; gbc.gridx = 0; configurationPanel.add(configTimeoutLabel, gbc);
|
||||
@ -648,6 +656,14 @@ public class MainWindow implements Runnable, RipStatusHandler {
|
||||
t.start();
|
||||
}
|
||||
});
|
||||
configLogLevelCombobox.addActionListener(new ActionListener() {
|
||||
@Override
|
||||
public void actionPerformed(ActionEvent arg0) {
|
||||
String level = ((JComboBox) arg0.getSource()).getSelectedItem().toString();
|
||||
setLogLevel(level);
|
||||
}
|
||||
});
|
||||
|
||||
configSaveDirButton.addActionListener(new ActionListener() {
|
||||
@Override
|
||||
public void actionPerformed(ActionEvent arg0) {
|
||||
@ -730,6 +746,27 @@ public class MainWindow implements Runnable, RipStatusHandler {
|
||||
});
|
||||
}
|
||||
|
||||
private void setLogLevel(String level) {
|
||||
Level newLevel = Level.ERROR;
|
||||
level = level.substring(level.lastIndexOf(' ') + 1);
|
||||
if (level.equals("Debug")) {
|
||||
newLevel = Level.DEBUG;
|
||||
}
|
||||
else if (level.equals("Info")) {
|
||||
newLevel = Level.INFO;
|
||||
}
|
||||
else if (level.equals("Warn")) {
|
||||
newLevel = Level.WARN;
|
||||
}
|
||||
else if (level.equals("Error")) {
|
||||
newLevel = Level.ERROR;
|
||||
}
|
||||
Logger.getRootLogger().setLevel(newLevel);
|
||||
logger.setLevel(newLevel);
|
||||
((ConsoleAppender)Logger.getRootLogger().getAppender("stdout")).setThreshold(newLevel);
|
||||
((FileAppender)Logger.getRootLogger().getAppender("FILE")).setThreshold(newLevel);
|
||||
}
|
||||
|
||||
private void setupTrayIcon() {
|
||||
mainFrame.addWindowListener(new WindowAdapter() {
|
||||
public void windowActivated(WindowEvent e) { trayMenuMain.setLabel("Hide"); }
|
||||
|
@ -41,6 +41,7 @@ public class UpdateUtils {
|
||||
|
||||
Document doc = null;
|
||||
try {
|
||||
logger.debug("Retrieving " + UpdateUtils.updateJsonURL);
|
||||
doc = Jsoup.connect(UpdateUtils.updateJsonURL)
|
||||
.timeout(10 * 1000)
|
||||
.ignoreContentType(true)
|
||||
@ -69,6 +70,7 @@ public class UpdateUtils {
|
||||
|
||||
String latestVersion = json.getString("latestVersion");
|
||||
if (UpdateUtils.isNewerVersion(latestVersion)) {
|
||||
logger.info("Found newer version: " + latestVersion);
|
||||
int result = JOptionPane.showConfirmDialog(
|
||||
null,
|
||||
"<html><font color=\"green\">New version (" + latestVersion + ") is available!</font>"
|
||||
@ -95,8 +97,10 @@ public class UpdateUtils {
|
||||
return;
|
||||
}
|
||||
} else {
|
||||
logger.debug("This version (" + UpdateUtils.getThisJarVersion() +
|
||||
") is the same or newer than the website's version (" + latestVersion + ")");
|
||||
configUpdateLabel.setText("<html><font color=\"green\">v" + UpdateUtils.getThisJarVersion() + " is the latest version</font></html>");
|
||||
logger.info("Running latest version: " + UpdateUtils.getThisJarVersion());
|
||||
logger.debug("Running latest version: " + UpdateUtils.getThisJarVersion());
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -33,8 +33,10 @@ public class RipUtils {
|
||||
if ((url.getHost().endsWith("imgur.com"))
|
||||
&& url.toExternalForm().contains("imgur.com/a/")) {
|
||||
try {
|
||||
logger.debug("Fetching imgur album at " + url);
|
||||
ImgurAlbum imgurAlbum = ImgurRipper.getImgurAlbum(url);
|
||||
for (ImgurImage imgurImage : imgurAlbum.images) {
|
||||
logger.debug("Got imgur image: " + imgurImage.url);
|
||||
result.add(imgurImage.url);
|
||||
}
|
||||
} catch (IOException e) {
|
||||
@ -44,7 +46,9 @@ public class RipUtils {
|
||||
}
|
||||
else if (url.getHost().endsWith("gfycat.com")) {
|
||||
try {
|
||||
logger.debug("Fetching gfycat page " + url);
|
||||
String videoURL = GfycatRipper.getVideoURL(url);
|
||||
logger.debug("Got gfycat URL: " + videoURL);
|
||||
result.add(new URL(videoURL));
|
||||
} catch (IOException e) {
|
||||
// Do nothing
|
||||
@ -54,6 +58,7 @@ public class RipUtils {
|
||||
}
|
||||
else if (url.toExternalForm().contains("vidble.com/album/")) {
|
||||
try {
|
||||
logger.info("Getting vidble album " + url);
|
||||
result.addAll(VidbleRipper.getURLsFromPage(url));
|
||||
} catch (IOException e) {
|
||||
// Do nothing
|
||||
@ -68,6 +73,7 @@ public class RipUtils {
|
||||
if (m.matches()) {
|
||||
try {
|
||||
URL singleURL = new URL(m.group(1));
|
||||
logger.debug("Found single URL: " + singleURL);
|
||||
result.add(singleURL);
|
||||
return result;
|
||||
} catch (MalformedURLException e) {
|
||||
@ -114,9 +120,10 @@ public class RipUtils {
|
||||
if (url == null) url = urlFromSiteDirectoryName(dir, "datwin", "http://datw.in/", "");
|
||||
if (url == null) url = urlFromSiteDirectoryName(dir, "drawcrowd", "http://drawcrowd.com/", "");
|
||||
if (url == null) url = urlFromSiteDirectoryName(dir.replace("-", "/"), "ehentai", "http://g.e-hentai.org/g/", "");
|
||||
if (url == null) url = urlFromSiteDirectoryName(dir, "8muses", "http://www.8muses.com/index/category/", "");
|
||||
if (url == null) url = urlFromSiteDirectoryName(dir, "fapproved", "http://fapproved.com/users/", "");
|
||||
if (url == null) url = urlFromSiteDirectoryName(dir, "vinebox", "http://finebox.co/u/", "");
|
||||
if (url == null) url = urlFromSiteDirectoryName(dir, "imgbox", "http://imgbox.com/g/", "");
|
||||
if (url == null) url = urlFromSiteDirectoryName(dir, "modelmayhem", "http://www.modelmayhem.com/", "");
|
||||
/*
|
||||
if (url == null) url = urlFromSiteDirectoryName(dir, "", "", "");
|
||||
if (url == null) url = urlFromSiteDirectoryName(dir, "", "", "");
|
||||
@ -129,6 +136,7 @@ public class RipUtils {
|
||||
if (url == null) url = urlFromSiteDirectoryName(dir, "", "", "");
|
||||
if (url == null) url = urlFromSiteDirectoryName(dir, "", "", "");
|
||||
*/
|
||||
//if (url == null) url = urlFromSiteDirectoryName(dir, "8muses", "http://www.8muses.com/index/category/", "");
|
||||
return url;
|
||||
}
|
||||
|
||||
|
@ -5,7 +5,7 @@ log4j.appender.FILE.ImmediateFlush = true
|
||||
log4j.appender.FILE.Threshold = warn
|
||||
log4j.appender.FILE.maxFileSize = 20MB
|
||||
log4j.appender.FILE.layout = org.apache.log4j.PatternLayout
|
||||
log4j.appender.FILE.layout.ConversionPattern = %d %-5p %c{2} %x - %m%n
|
||||
log4j.appender.FILE.layout.ConversionPattern = %d %-5p %c{2} %x.%M() %m%n
|
||||
|
||||
# define the console appender
|
||||
log4j.appender.stdout = org.apache.log4j.ConsoleAppender
|
||||
|
@ -2,21 +2,42 @@ package com.rarchives.ripme.tst.ripper.rippers;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.net.URL;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
import org.apache.log4j.Level;
|
||||
import org.apache.log4j.Logger;
|
||||
|
||||
import com.rarchives.ripme.ripper.AbstractRipper;
|
||||
import com.rarchives.ripme.ripper.rippers.DeviantartRipper;
|
||||
import com.rarchives.ripme.ripper.rippers.EightmusesRipper;
|
||||
import com.rarchives.ripme.ripper.rippers.FineboxRipper;
|
||||
import com.rarchives.ripme.ripper.rippers.FivehundredpxRipper;
|
||||
import com.rarchives.ripme.ripper.rippers.FlickrRipper;
|
||||
import com.rarchives.ripme.ripper.rippers.FuraffinityRipper;
|
||||
import com.rarchives.ripme.ripper.rippers.FuskatorRipper;
|
||||
import com.rarchives.ripme.ripper.rippers.GifyoRipper;
|
||||
import com.rarchives.ripme.ripper.rippers.GirlsOfDesireRipper;
|
||||
import com.rarchives.ripme.ripper.rippers.GonewildRipper;
|
||||
import com.rarchives.ripme.ripper.rippers.HentaifoundryRipper;
|
||||
import com.rarchives.ripme.ripper.rippers.ImagearnRipper;
|
||||
import com.rarchives.ripme.ripper.rippers.ImagebamRipper;
|
||||
import com.rarchives.ripme.ripper.rippers.ImagestashRipper;
|
||||
import com.rarchives.ripme.ripper.rippers.ImagevenueRipper;
|
||||
import com.rarchives.ripme.ripper.rippers.ImgboxRipper;
|
||||
import com.rarchives.ripme.ripper.rippers.IrarchivesRipper;
|
||||
import com.rarchives.ripme.ripper.rippers.MinusRipper;
|
||||
import com.rarchives.ripme.ripper.rippers.ModelmayhemRipper;
|
||||
import com.rarchives.ripme.ripper.rippers.MotherlessRipper;
|
||||
import com.rarchives.ripme.ripper.rippers.NfsfwRipper;
|
||||
import com.rarchives.ripme.ripper.rippers.PhotobucketRipper;
|
||||
import com.rarchives.ripme.ripper.rippers.PornhubRipper;
|
||||
import com.rarchives.ripme.ripper.rippers.RedditRipper;
|
||||
import com.rarchives.ripme.ripper.rippers.SankakuComplexRipper;
|
||||
import com.rarchives.ripme.ripper.rippers.SeeniveRipper;
|
||||
import com.rarchives.ripme.ripper.rippers.ShesFreakyRipper;
|
||||
import com.rarchives.ripme.ripper.rippers.TapasticRipper;
|
||||
import com.rarchives.ripme.ripper.rippers.TeenplanetRipper;
|
||||
import com.rarchives.ripme.ripper.rippers.TumblrRipper;
|
||||
import com.rarchives.ripme.ripper.rippers.TwitterRipper;
|
||||
import com.rarchives.ripme.ripper.rippers.TwodgalleriesRipper;
|
||||
import com.rarchives.ripme.ripper.rippers.VidbleRipper;
|
||||
import com.rarchives.ripme.ripper.rippers.VineRipper;
|
||||
import com.rarchives.ripme.ripper.rippers.VkRipper;
|
||||
import com.rarchives.ripme.ripper.rippers.XhamsterRipper;
|
||||
|
||||
@ -26,127 +47,240 @@ import com.rarchives.ripme.ripper.rippers.XhamsterRipper;
|
||||
*/
|
||||
public class BasicRippersTest extends RippersTest {
|
||||
|
||||
public void testMotherlessAlbums() throws IOException {
|
||||
List<URL> contentURLs = new ArrayList<URL>();
|
||||
|
||||
// Image album
|
||||
contentURLs.add(new URL("http://motherless.com/G4DAA18D"));
|
||||
// Video album
|
||||
// XXX: Commented out because test takes too long to download the file.
|
||||
// contentURLs.add(new URL("http://motherless.com/GFD0F537"));
|
||||
|
||||
for (URL url : contentURLs) {
|
||||
MotherlessRipper ripper = new MotherlessRipper(url);
|
||||
public void testDeviantartAlbum() throws IOException {
|
||||
DeviantartRipper ripper = new DeviantartRipper(new URL("http://airgee.deviantart.com/gallery/"));
|
||||
testRipper(ripper);
|
||||
}
|
||||
}
|
||||
|
||||
public void testDeviantartAlbums() throws IOException {
|
||||
List<URL> contentURLs = new ArrayList<URL>();
|
||||
|
||||
// Small gallery
|
||||
contentURLs.add(new URL("http://airgee.deviantart.com/gallery/"));
|
||||
public void testDeviantartNSFWAlbum() throws IOException {
|
||||
// NSFW gallery
|
||||
contentURLs.add(new URL("http://faterkcx.deviantart.com/gallery/"));
|
||||
// Multi-page NSFW
|
||||
contentURLs.add(new URL("http://geekysica.deviantart.com/gallery/35209412"));
|
||||
|
||||
for (URL url : contentURLs) {
|
||||
DeviantartRipper ripper = new DeviantartRipper(url);
|
||||
DeviantartRipper ripper = new DeviantartRipper(new URL("http://faterkcx.deviantart.com/gallery/"));
|
||||
testRipper(ripper);
|
||||
}
|
||||
}
|
||||
|
||||
public void testEightmusesAlbums() throws IOException {
|
||||
List<URL> contentURLs = new ArrayList<URL>();
|
||||
|
||||
contentURLs.add(new URL("http://www.8muses.com/index/category/jab-hotassneighbor7"));
|
||||
|
||||
for (URL url : contentURLs) {
|
||||
EightmusesRipper ripper = new EightmusesRipper(url);
|
||||
public void testEightmusesAlbum() throws IOException {
|
||||
EightmusesRipper ripper = new EightmusesRipper(new URL("http://www.8muses.com/index/category/jab-hotassneighbor7"));
|
||||
testRipper(ripper);
|
||||
}
|
||||
}
|
||||
|
||||
public void testVineboxAlbums() throws IOException {
|
||||
Logger.getRootLogger().setLevel(Level.ALL);
|
||||
List<URL> contentURLs = new ArrayList<URL>();
|
||||
contentURLs.add(new URL("http://vinebox.co/u/wi57hMjc2Ka"));
|
||||
contentURLs.add(new URL("http://finebox.co/u/wi57hMjc2Ka"));
|
||||
for (URL url : contentURLs) {
|
||||
FineboxRipper ripper = new FineboxRipper(url);
|
||||
public void testVineboxAlbum() throws IOException {
|
||||
FineboxRipper ripper = new FineboxRipper(new URL("http://vinebox.co/u/wi57hMjc2Ka"));
|
||||
testRipper(ripper);
|
||||
}
|
||||
}
|
||||
|
||||
public void testXhamsterAlbums() throws IOException {
|
||||
List<URL> contentURLs = new ArrayList<URL>();
|
||||
contentURLs.add(new URL("http://xhamster.com/photos/gallery/1462237/alyssa_gadson.html"));
|
||||
contentURLs.add(new URL("http://xhamster.com/photos/gallery/2941201/tableau_d_039_art_ii.html"));
|
||||
for (URL url : contentURLs) {
|
||||
XhamsterRipper ripper = new XhamsterRipper(url);
|
||||
public void testFineboxAlbum() throws IOException {
|
||||
FineboxRipper ripper = new FineboxRipper(new URL("http://finebox.co/u/wi57hMjc2Ka"));
|
||||
testRipper(ripper);
|
||||
}
|
||||
|
||||
public void testRedditSubredditRip() throws IOException {
|
||||
RedditRipper ripper = new RedditRipper(new URL("http://www.reddit.com/r/nsfw_oc"));
|
||||
testRipper(ripper);
|
||||
}
|
||||
public void testRedditSubredditTopRip() throws IOException {
|
||||
RedditRipper ripper = new RedditRipper(new URL("http://www.reddit.com/r/nsfw_oc/top?t=all"));
|
||||
testRipper(ripper);
|
||||
}
|
||||
public void testRedditPostRip() throws IOException {
|
||||
RedditRipper ripper = new RedditRipper(new URL("http://www.reddit.com/r/UnrealGirls/comments/1ziuhl/in_class_veronique_popa/"));
|
||||
testRipper(ripper);
|
||||
}
|
||||
|
||||
public void testSeeniveAlbum() throws IOException {
|
||||
SeeniveRipper ripper = new SeeniveRipper(new URL("http://seenive.com/u/946491170220040192"));
|
||||
testRipper(ripper);
|
||||
}
|
||||
|
||||
public void testTumblrFullRip() throws IOException {
|
||||
TumblrRipper ripper = new TumblrRipper(new URL("http://wrouinr.tumblr.com/archive"));
|
||||
testRipper(ripper);
|
||||
}
|
||||
public void testTumblrTagRip() throws IOException {
|
||||
TumblrRipper ripper = new TumblrRipper(new URL("http://topinstagirls.tumblr.com/tagged/berlinskaya"));
|
||||
testRipper(ripper);
|
||||
}
|
||||
public void testTumblrPostRip() throws IOException {
|
||||
TumblrRipper ripper = new TumblrRipper(new URL("http://genekellyclarkson.tumblr.com/post/86100752527/lucyannebrooks-rachaelboden-friends-goodtimes-bed-boobs"));
|
||||
testRipper(ripper);
|
||||
}
|
||||
|
||||
public void testTwitterUserRip() throws IOException {
|
||||
TwitterRipper ripper = new TwitterRipper(new URL("https://twitter.com/danngamber01/media"));
|
||||
testRipper(ripper);
|
||||
}
|
||||
public void testTwitterSearchRip() throws IOException {
|
||||
TwitterRipper ripper = new TwitterRipper(new URL("https://twitter.com/search?q=from%3Apurrbunny%20filter%3Aimages&src=typd"));
|
||||
testRipper(ripper);
|
||||
}
|
||||
|
||||
public void test500pxAlbum() throws IOException {
|
||||
FivehundredpxRipper ripper = new FivehundredpxRipper(new URL("https://prime.500px.com/alexander_hurman"));
|
||||
testRipper(ripper);
|
||||
}
|
||||
|
||||
public void testFlickrAlbum() throws IOException {
|
||||
FlickrRipper ripper = new FlickrRipper(new URL("https://www.flickr.com/photos/leavingallbehind/sets/72157621895942720/"));
|
||||
testRipper(ripper);
|
||||
}
|
||||
|
||||
public void testFuraffinityAlbum() throws IOException {
|
||||
FuraffinityRipper ripper = new FuraffinityRipper(new URL("https://www.furaffinity.net/gallery/mustardgas/"));
|
||||
testRipper(ripper);
|
||||
}
|
||||
|
||||
public void testFuskatorAlbum() throws IOException {
|
||||
FuskatorRipper ripper = new FuskatorRipper(new URL("http://fuskator.com/full/emJa1U6cqbi/index.html"));
|
||||
testRipper(ripper);
|
||||
}
|
||||
|
||||
public void testGifyoAlbum() throws IOException {
|
||||
GifyoRipper ripper = new GifyoRipper(new URL("http://gifyo.com/PieSecrets/"));
|
||||
testRipper(ripper);
|
||||
}
|
||||
|
||||
public void testGirlsofdesireAlbum() throws IOException {
|
||||
GirlsOfDesireRipper ripper = new GirlsOfDesireRipper(new URL("http://www.girlsofdesire.org/galleries/krillia/"));
|
||||
testRipper(ripper);
|
||||
}
|
||||
|
||||
public void testGonewildAlbums() throws IOException {
|
||||
List<URL> contentURLs = new ArrayList<URL>();
|
||||
contentURLs.add(new URL("http://gonewild.com/user/amle69"));
|
||||
for (URL url : contentURLs) {
|
||||
GonewildRipper ripper = new GonewildRipper(url);
|
||||
GonewildRipper ripper = new GonewildRipper(new URL("http://gonewild.com/user/amle69"));
|
||||
testRipper(ripper);
|
||||
}
|
||||
}
|
||||
|
||||
public void testRedditAlbums() throws IOException {
|
||||
List<URL> contentURLs = new ArrayList<URL>();
|
||||
contentURLs.add(new URL("http://www.reddit.com/r/nsfw_oc"));
|
||||
contentURLs.add(new URL("http://www.reddit.com/r/nsfw_oc/top?t=all"));
|
||||
contentURLs.add(new URL("http://www.reddit.com/r/UnrealGirls/comments/1ziuhl/in_class_veronique_popa/"));
|
||||
for (URL url : contentURLs) {
|
||||
RedditRipper ripper = new RedditRipper(url);
|
||||
public void testHentaifoundryRip() throws IOException {
|
||||
HentaifoundryRipper ripper = new HentaifoundryRipper(new URL("http://www.hentai-foundry.com/pictures/user/personalami"));
|
||||
testRipper(ripper);
|
||||
}
|
||||
}
|
||||
|
||||
public void testSeeniveAlbums() throws IOException {
|
||||
List<URL> contentURLs = new ArrayList<URL>();
|
||||
contentURLs.add(new URL("http://seenive.com/u/946491170220040192"));
|
||||
for (URL url : contentURLs) {
|
||||
SeeniveRipper ripper = new SeeniveRipper(url);
|
||||
public void testImagearnRip() throws IOException {
|
||||
AbstractRipper ripper = new ImagearnRipper(new URL("http://imagearn.com//gallery.php?id=578682"));
|
||||
testRipper(ripper);
|
||||
}
|
||||
}
|
||||
|
||||
public void testTumblrAlbums() throws IOException {
|
||||
List<URL> contentURLs = new ArrayList<URL>();
|
||||
contentURLs.add(new URL("http://wrouinr.tumblr.com/archive"));
|
||||
contentURLs.add(new URL("http://topinstagirls.tumblr.com/tagged/berlinskaya"));
|
||||
contentURLs.add(new URL("http://genekellyclarkson.tumblr.com/post/86100752527/lucyannebrooks-rachaelboden-friends-goodtimes-bed-boobs"));
|
||||
for (URL url : contentURLs) {
|
||||
TumblrRipper ripper = new TumblrRipper(url);
|
||||
public void testImagebamRip() throws IOException {
|
||||
AbstractRipper ripper = new ImagebamRipper(new URL("http://www.imagebam.com/gallery/488cc796sllyf7o5srds8kpaz1t4m78i"));
|
||||
testRipper(ripper);
|
||||
}
|
||||
}
|
||||
|
||||
public void testTwitterAlbums() throws IOException {
|
||||
List<URL> contentURLs = new ArrayList<URL>();
|
||||
contentURLs.add(new URL("https://twitter.com/danngamber01/media"));
|
||||
contentURLs.add(new URL("https://twitter.com/search?q=from%3Apurrbunny%20filter%3Aimages&src=typd"));
|
||||
for (URL url : contentURLs) {
|
||||
TwitterRipper ripper = new TwitterRipper(url);
|
||||
public void testImagestashRip() throws IOException {
|
||||
AbstractRipper ripper = new ImagestashRipper(new URL("https://imagestash.org/tag/everydayuncensor"));
|
||||
testRipper(ripper);
|
||||
}
|
||||
}
|
||||
|
||||
public void testVkAlbum() throws IOException {
|
||||
List<URL> contentURLs = new ArrayList<URL>();
|
||||
contentURLs.add(new URL("https://vk.com/album45506334_172415053"));
|
||||
contentURLs.add(new URL("https://vk.com/album45506334_0"));
|
||||
contentURLs.add(new URL("https://vk.com/photos45506334"));
|
||||
for (URL url : contentURLs) {
|
||||
VkRipper ripper = new VkRipper(url);
|
||||
public void testImagevenueRip() throws IOException {
|
||||
AbstractRipper ripper = new ImagevenueRipper(new URL("http://img120.imagevenue.com/galshow.php?gal=gallery_1373818527696_191lo"));
|
||||
testRipper(ripper);
|
||||
}
|
||||
|
||||
public void testImgboxRip() throws IOException {
|
||||
AbstractRipper ripper = new ImgboxRipper(new URL("http://imgbox.com/g/sEMHfsqx4w"));
|
||||
testRipper(ripper);
|
||||
}
|
||||
|
||||
public void testIrarchivesRip() throws IOException {
|
||||
AbstractRipper ripper = new IrarchivesRipper(new URL("http://i.rarchives.com/?url=user%3Agingerpuss"));
|
||||
testRipper(ripper);
|
||||
}
|
||||
|
||||
public void testMinusUserRip() throws IOException {
|
||||
AbstractRipper ripper = new MinusRipper(new URL("http://vampyr3.minus.com/"));
|
||||
testRipper(ripper);
|
||||
deleteSubdirs(ripper.getWorkingDir());
|
||||
deleteDir(ripper.getWorkingDir());
|
||||
}
|
||||
public void testMinusUserAlbumRip() throws IOException {
|
||||
AbstractRipper ripper = new MinusRipper(new URL("http://vampyr3.minus.com/mw7ztQ6xzP7ae"));
|
||||
testRipper(ripper);
|
||||
}
|
||||
public void testMinusUserUploadsRip() throws IOException {
|
||||
AbstractRipper ripper = new MinusRipper(new URL("http://vampyr3.minus.com/uploads"));
|
||||
testRipper(ripper);
|
||||
}
|
||||
public void testMinusAlbumRip() throws IOException {
|
||||
AbstractRipper ripper = new MinusRipper(new URL("http://minus.com/mw7ztQ6xzP7ae"));
|
||||
testRipper(ripper);
|
||||
}
|
||||
|
||||
public void testModelmayhemRip() throws IOException {
|
||||
AbstractRipper ripper = new ModelmayhemRipper(new URL("http://www.modelmayhem.com/portfolio/520206/viewall"));
|
||||
testRipper(ripper);
|
||||
}
|
||||
|
||||
public void testMotherlessAlbumRip() throws IOException {
|
||||
MotherlessRipper ripper = new MotherlessRipper(new URL("http://motherless.com/G4DAA18D"));
|
||||
testRipper(ripper);
|
||||
}
|
||||
|
||||
public void testNfsfwRip() throws IOException {
|
||||
AbstractRipper ripper = new NfsfwRipper(new URL("http://nfsfw.com/gallery/v/Kitten/"));
|
||||
testRipper(ripper);
|
||||
}
|
||||
|
||||
public void testPhotobucketRip() throws IOException {
|
||||
AbstractRipper ripper = new PhotobucketRipper(new URL("http://s844.photobucket.com/user/SpazzySpizzy/library/Album%20Covers?sort=3&page=1"));
|
||||
testRipper(ripper);
|
||||
deleteSubdirs(ripper.getWorkingDir());
|
||||
deleteDir(ripper.getWorkingDir());
|
||||
}
|
||||
|
||||
public void testPornhubRip() throws IOException {
|
||||
AbstractRipper ripper = new PornhubRipper(new URL("http://www.pornhub.com/album/428351"));
|
||||
testRipper(ripper);
|
||||
}
|
||||
|
||||
public void testSankakuChanRip() throws IOException {
|
||||
AbstractRipper ripper = new SankakuComplexRipper(new URL("https://chan.sankakucomplex.com/?tags=blue_necktie"));
|
||||
testRipper(ripper);
|
||||
}
|
||||
public void testSankakuIdolRip() throws IOException {
|
||||
AbstractRipper ripper = new SankakuComplexRipper(new URL("https://idol.sankakucomplex.com/?tags=meme_%28me%21me%21me%21%29_%28cosplay%29"));
|
||||
testRipper(ripper);
|
||||
}
|
||||
|
||||
public void testShesFreakyRip() throws IOException {
|
||||
AbstractRipper ripper = new ShesFreakyRipper(new URL("http://www.shesfreaky.com/gallery/nicee-snow-bunny-579NbPjUcYa.html"));
|
||||
testRipper(ripper);
|
||||
}
|
||||
|
||||
public void testTapasticRip() throws IOException {
|
||||
AbstractRipper ripper = new TapasticRipper(new URL("http://tapastic.com/episode/2139"));
|
||||
testRipper(ripper);
|
||||
}
|
||||
|
||||
public void testTeenplanetRip() throws IOException {
|
||||
AbstractRipper ripper = new TeenplanetRipper(new URL("http://teenplanet.org/galleries/the-perfect-side-of-me-6588.html"));
|
||||
testRipper(ripper);
|
||||
}
|
||||
|
||||
public void testTwodgalleriesRip() throws IOException {
|
||||
AbstractRipper ripper = new TwodgalleriesRipper(new URL("http://www.2dgalleries.com/artist/regis-loisel-6477"));
|
||||
testRipper(ripper);
|
||||
}
|
||||
|
||||
public void testVidbleRip() throws IOException {
|
||||
AbstractRipper ripper = new VidbleRipper(new URL("http://www.vidble.com/album/y1oyh3zd"));
|
||||
testRipper(ripper);
|
||||
}
|
||||
|
||||
public void testVineRip() throws IOException {
|
||||
AbstractRipper ripper = new VineRipper(new URL("https://vine.co/u/954440445776334848"));
|
||||
testRipper(ripper);
|
||||
}
|
||||
|
||||
public void testVkSubalbumRip() throws IOException {
|
||||
VkRipper ripper = new VkRipper(new URL("https://vk.com/album45506334_172415053"));
|
||||
testRipper(ripper);
|
||||
}
|
||||
public void testVkRootAlbumRip() throws IOException {
|
||||
VkRipper ripper = new VkRipper(new URL("https://vk.com/album45506334_0"));
|
||||
testRipper(ripper);
|
||||
}
|
||||
public void testVkPhotosRip() throws IOException {
|
||||
VkRipper ripper = new VkRipper(new URL("https://vk.com/photos45506334"));
|
||||
testRipper(ripper);
|
||||
}
|
||||
|
||||
public void testXhamsterAlbums() throws IOException {
|
||||
XhamsterRipper ripper = new XhamsterRipper(new URL("http://xhamster.com/photos/gallery/1462237/alyssa_gadson.html"));
|
||||
testRipper(ripper);
|
||||
}
|
||||
}
|
||||
|
@ -5,6 +5,8 @@ import java.io.IOException;
|
||||
|
||||
import junit.framework.TestCase;
|
||||
|
||||
import org.apache.log4j.ConsoleAppender;
|
||||
import org.apache.log4j.Level;
|
||||
import org.apache.log4j.Logger;
|
||||
|
||||
import com.rarchives.ripme.ripper.AbstractRipper;
|
||||
@ -24,14 +26,16 @@ public class RippersTest extends TestCase {
|
||||
|
||||
protected void testRipper(AbstractRipper ripper) {
|
||||
try {
|
||||
Utils.setConfigInteger("page.timeout", 5 * 1000);
|
||||
// Turn on Debug logging
|
||||
((ConsoleAppender)Logger.getRootLogger().getAppender("stdout")).setThreshold(Level.DEBUG);
|
||||
|
||||
// Decrease timeout
|
||||
Utils.setConfigInteger("page.timeout", 10 * 1000);
|
||||
|
||||
ripper.setup();
|
||||
ripper.markAsTest();
|
||||
ripper.rip();
|
||||
for (File f : ripper.getWorkingDir().listFiles()) {
|
||||
System.err.println(f.toString());
|
||||
}
|
||||
assertTrue("Failed to download files from " + ripper.getURL(), ripper.getWorkingDir().listFiles().length >= 1);
|
||||
assertTrue("Failed to download a single file from " + ripper.getURL(), ripper.getWorkingDir().listFiles().length >= 1);
|
||||
} catch (IOException e) {
|
||||
if (e.getMessage().contains("Ripping interrupted")) {
|
||||
// We expect some rips to get interrupted
|
||||
@ -78,5 +82,17 @@ public class RippersTest extends TestCase {
|
||||
}
|
||||
dir.delete();
|
||||
}
|
||||
protected void deleteSubdirs(File workingDir) {
|
||||
for (File f : workingDir.listFiles()) {
|
||||
if (f.isDirectory()) {
|
||||
for (File sf : f.listFiles()) {
|
||||
logger.debug("Deleting " + sf);
|
||||
sf.delete();
|
||||
}
|
||||
logger.debug("Deleting " + f);
|
||||
f.delete();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
Loading…
Reference in New Issue
Block a user