diff --git a/pom.xml b/pom.xml
index a35c6486..17c923d4 100644
--- a/pom.xml
+++ b/pom.xml
@@ -4,7 +4,7 @@
com.rarchives.ripme
ripme
jar
- 1.7.8
+ 1.7.11
ripme
http://rip.rarchives.com
diff --git a/ripme.json b/ripme.json
index 303d0db6..b2520900 100644
--- a/ripme.json
+++ b/ripme.json
@@ -1,6 +1,9 @@
{
- "latestVersion": "1.7.8",
+ "latestVersion": "1.7.11",
"changeList": [
+ "1.7.11: Added gwarchives support to the cheveretoRipper; Gfycat Tests & Fix for bad reddit submissions; instagram ripper can now be made to skip videos",
+ "1.7.10: Added basic pornpics.com ripper; Fixed hentai.cafe regex",
+ "1.7.9: FuraffinityRipper can now rip non-public albums; Added 2 new api keys, ripper can now download raw images from tumblr; Erome ripper now matchs links without the www; Tumblr ripper now tells the user if it hits the rate limit",
"1.7.8: Forced https for tumblr image links; Fixed imgur album filenames; SankakuComplexRipper now downloads full sized images; Added dribbble.com ripper; Added comfirm button for clearing history",
"1.7.7: Fixed E621 Ripper; Added unit test for zizki.com; Added unit test for Xbooru.com; Updated reddit useragent",
"1.7.6: Added OglafRipper",
diff --git a/src/main/java/com/rarchives/ripme/App.java b/src/main/java/com/rarchives/ripme/App.java
index e046d5a6..0bb5f3f6 100644
--- a/src/main/java/com/rarchives/ripme/App.java
+++ b/src/main/java/com/rarchives/ripme/App.java
@@ -63,13 +63,21 @@ public class App {
SwingUtilities.invokeLater(mw);
}
}
-
+ /**
+ * Creates an abstract ripper and instructs it to rip.
+ * @param url URL to be ripped
+ * @throws Exception
+ */
private static void rip(URL url) throws Exception {
AbstractRipper ripper = AbstractRipper.getRipper(url);
ripper.setup();
ripper.rip();
}
+ /**
+ * For dealing with command-line arguments.
+ * @param args Array of Command-line arguments
+ */
private static void handleArguments(String[] args) {
CommandLine cl = getArgs(args);
if (cl.hasOption('h')) {
@@ -109,7 +117,7 @@ public class App {
}
if (cl.hasOption('R')) {
loadHistory();
- if (HISTORY.toList().size() == 0) {
+ if (HISTORY.toList().isEmpty()) {
logger.error("There are no history entries to re-rip. Rip some albums first");
System.exit(-1);
}
@@ -173,14 +181,18 @@ public class App {
}
}
- // this function will attempt to rip the provided url
+ /**
+ * Attempt to rip targetURL.
+ * @param targetURL URL to rip
+ * @param saveConfig Whether or not you want to save the config (?)
+ */
private static void ripURL(String targetURL, boolean saveConfig) {
try {
URL url = new URL(targetURL);
rip(url);
List history = Utils.getConfigList("download.history");
- if (!history.contains(url.toExternalForm())) {
- history.add(url.toExternalForm());
+ if (!history.contains(url.toExternalForm())) {//if you haven't already downloaded the file before
+ history.add(url.toExternalForm());//add it to history so you won't have to redownload
Utils.setConfigList("download.history", Arrays.asList(history.toArray()));
if (saveConfig) {
Utils.saveConfig();
@@ -195,6 +207,10 @@ public class App {
}
}
+ /**
+ * Creates an Options object, returns it.
+ * @return Returns all acceptable command-line options.
+ */
private static Options getOptions() {
Options opts = new Options();
opts.addOption("h", "help", false, "Print the help");
@@ -213,6 +229,11 @@ public class App {
return opts;
}
+ /**
+ * Tries to parse commandline arguments.
+ * @param args Array of commandline arguments.
+ * @return CommandLine object containing arguments.
+ */
private static CommandLine getArgs(String[] args) {
BasicParser parser = new BasicParser();
try {
@@ -223,7 +244,10 @@ public class App {
return null;
}
}
-
+
+ /**
+ * Loads history from history file into memory.
+ */
private static void loadHistory() {
File historyFile = new File(Utils.getConfigDir() + File.separator + "history.json");
HISTORY.clear();
diff --git a/src/main/java/com/rarchives/ripme/ripper/AbstractHTMLRipper.java b/src/main/java/com/rarchives/ripme/ripper/AbstractHTMLRipper.java
index 3b7496a0..25d51007 100644
--- a/src/main/java/com/rarchives/ripme/ripper/AbstractHTMLRipper.java
+++ b/src/main/java/com/rarchives/ripme/ripper/AbstractHTMLRipper.java
@@ -140,6 +140,14 @@ public abstract class AbstractHTMLRipper extends AlbumRipper {
}
waitForThreads();
}
+
+ /**
+ * Gets the file name from the URL
+ * @param url
+ * URL that you want to get the filename from
+ * @return
+ * Filename of the URL
+ */
private String fileNameFromURL(URL url) {
String saveAs = url.toExternalForm();
if (saveAs.substring(saveAs.length() - 1) == "/") { saveAs = saveAs.substring(0,saveAs.length() - 1) ;}
@@ -150,6 +158,20 @@ public abstract class AbstractHTMLRipper extends AlbumRipper {
if (saveAs.indexOf(':') >= 0) { saveAs = saveAs.substring(0, saveAs.indexOf(':')); }
return saveAs;
}
+ /**
+ *
+ * @param url
+ * Target URL
+ * @param subdirectory
+ * Path to subdirectory where you want to save it
+ * @param text
+ * Text you want to save
+ * @param index
+ * Index in something like an album
+ * @return
+ * True if ripped successfully
+ * False if failed
+ */
public boolean saveText(URL url, String subdirectory, String text, int index) {
String saveAs = fileNameFromURL(url);
return saveText(url,subdirectory,text,index,saveAs);
@@ -189,6 +211,14 @@ public abstract class AbstractHTMLRipper extends AlbumRipper {
}
return true;
}
+
+ /**
+ * Gets prefix based on where in the index it is
+ * @param index
+ * The index in question
+ * @return
+ * Returns prefix for a file. (?)
+ */
protected String getPrefix(int index) {
String prefix = "";
if (keepSortOrder() && Utils.getConfigBoolean("download.save_order", true)) {
diff --git a/src/main/java/com/rarchives/ripme/ripper/AbstractJSONRipper.java b/src/main/java/com/rarchives/ripme/ripper/AbstractJSONRipper.java
index a315bcdc..8c73bbb6 100644
--- a/src/main/java/com/rarchives/ripme/ripper/AbstractJSONRipper.java
+++ b/src/main/java/com/rarchives/ripme/ripper/AbstractJSONRipper.java
@@ -20,6 +20,7 @@ public abstract class AbstractJSONRipper extends AlbumRipper {
}
protected abstract String getDomain();
+ @Override
public abstract String getHost();
protected abstract JSONObject getFirstPage() throws IOException;
diff --git a/src/main/java/com/rarchives/ripme/ripper/AbstractRipper.java b/src/main/java/com/rarchives/ripme/ripper/AbstractRipper.java
index a852e177..8209df68 100644
--- a/src/main/java/com/rarchives/ripme/ripper/AbstractRipper.java
+++ b/src/main/java/com/rarchives/ripme/ripper/AbstractRipper.java
@@ -84,7 +84,14 @@ public abstract class AbstractRipper
}
}
}
-
+
+ /**
+ * Checks to see if Ripme has already downloaded a URL
+ * @param url URL to check if downloaded
+ * @return
+ * Returns true if previously downloaded.
+ * Returns false if not yet downloaded.
+ */
private boolean hasDownloadedURL(String url) {
File file = new File(URLHistoryFile);
try {
@@ -118,6 +125,15 @@ public abstract class AbstractRipper
this.url = sanitizeURL(url);
}
+ /**
+ * Sets ripper's:
+ * Working directory
+ * Logger (for debugging)
+ * FileAppender
+ * Threadpool
+ * @throws IOException
+ * Always be prepared.
+ */
public void setup() throws IOException {
setWorkingDir(this.url);
Logger rootLogger = Logger.getRootLogger();
@@ -155,9 +171,27 @@ public abstract class AbstractRipper
* @param cookies
* The cookies to send to the server while downloading this file.
* @return
+ * True if downloaded successfully
+ * False if failed to download
*/
protected abstract boolean addURLToDownload(URL url, File saveAs, String referrer, Map cookies);
+ /**
+ * Queues image to be downloaded and saved.
+ * @param url
+ * URL of the file
+ * @param prefix
+ * Prefix for the downloaded file
+ * @param subdirectory
+ * Path to get to desired directory from working directory
+ * @param referrer
+ * The HTTP referrer to use while downloading this file.
+ * @param cookies
+ * The cookies to send to the server while downloading this file.
+ * @return
+ * True if downloaded successfully
+ * False if failed to download
+ */
protected boolean addURLToDownload(URL url, String prefix, String subdirectory, String referrer, Map cookies) {
if (Utils.getConfigBoolean("remember.url_history", true) && !isThisATest()) {
if (hasDownloadedURL(url.toExternalForm())) {
@@ -322,6 +356,11 @@ public abstract class AbstractRipper
}
}
+ /**
+ * Gets URL
+ * @return
+ * Returns URL that wants to be downloaded.
+ */
public URL getURL() {
return url;
}
@@ -335,8 +374,20 @@ public abstract class AbstractRipper
return workingDir;
}
+ @Override
public abstract void setWorkingDir(URL url) throws IOException;
+ /**
+ *
+ * @param url
+ * The URL you want to get the title of.
+ * @return
+ * host_URLid
+ * e.g. (for a reddit post)
+ * reddit_post_7mg2ur
+ * @throws MalformedURLException
+ * If any of those damned URLs gets malformed.
+ */
public String getAlbumTitle(URL url) throws MalformedURLException {
return getHost() + "_" + getGID(url);
}
@@ -391,7 +442,7 @@ public abstract class AbstractRipper
/**
* Sends an update message to the relevant observer(s) on this ripper.
- * @param status
+ * @param status
* @param message
*/
public void sendUpdate(STATUS status, Object message) {
@@ -400,9 +451,17 @@ public abstract class AbstractRipper
}
observer.update(this, new RipStatusMessage(status, message));
}
-
+
+ /**
+ * Get the completion percentage.
+ * @return
+ * Percentage complete
+ */
public abstract int getCompletionPercentage();
-
+ /**
+ * @return
+ * Text for status
+ */
public abstract String getStatusText();
/**
@@ -423,7 +482,9 @@ public abstract class AbstractRipper
cleanup();
}
}
-
+ /**
+ * Tries to delete any empty directories
+ */
private void cleanup() {
if (this.workingDir.list().length == 0) {
// No files, delete the dir
@@ -434,7 +495,15 @@ public abstract class AbstractRipper
}
}
}
-
+
+ /**
+ * Pauses thread for a set amount of time.
+ * @param milliseconds
+ * Amount of time (in milliseconds) that the thread gets paused for
+ * @return
+ * True if paused successfully
+ * False if failed to pause/got interrupted.
+ */
protected boolean sleep(int milliseconds) {
try {
logger.debug("Sleeping " + milliseconds + "ms");
diff --git a/src/main/java/com/rarchives/ripme/ripper/AlbumRipper.java b/src/main/java/com/rarchives/ripme/ripper/AlbumRipper.java
index a5b2c91e..a92f3870 100644
--- a/src/main/java/com/rarchives/ripme/ripper/AlbumRipper.java
+++ b/src/main/java/com/rarchives/ripme/ripper/AlbumRipper.java
@@ -13,6 +13,10 @@ import com.rarchives.ripme.ui.RipStatusMessage;
import com.rarchives.ripme.ui.RipStatusMessage.STATUS;
import com.rarchives.ripme.utils.Utils;
+
+/**'
+ * For ripping delicious albums off the interwebz.
+ */
public abstract class AlbumRipper extends AbstractRipper {
private Map itemsPending = Collections.synchronizedMap(new HashMap());
@@ -34,10 +38,17 @@ public abstract class AlbumRipper extends AbstractRipper {
}
@Override
+ /**
+ * Returns total amount of files attempted.
+ */
public int getCount() {
return itemsCompleted.size() + itemsErrored.size();
}
+ @Override
+ /**
+ * Queues multiple URLs of single images to download from a single Album URL
+ */
public boolean addURLToDownload(URL url, File saveAs, String referrer, Map cookies) {
// Only download one file if this is a test.
if (super.isThisATest() &&
@@ -101,6 +112,9 @@ public abstract class AlbumRipper extends AbstractRipper {
}
@Override
+ /**
+ * Cleans up & tells user about successful download
+ */
public void downloadCompleted(URL url, File saveAs) {
if (observer == null) {
return;
@@ -119,6 +133,9 @@ public abstract class AlbumRipper extends AbstractRipper {
}
@Override
+ /**
+ * Cleans up & tells user about failed download.
+ */
public void downloadErrored(URL url, String reason) {
if (observer == null) {
return;
@@ -131,6 +148,10 @@ public abstract class AlbumRipper extends AbstractRipper {
}
@Override
+ /**
+ * Tells user that a single file in the album they wish to download has
+ * already been downloaded in the past.
+ */
public void downloadExists(URL url, File file) {
if (observer == null) {
return;
diff --git a/src/main/java/com/rarchives/ripme/ripper/DownloadThreadPool.java b/src/main/java/com/rarchives/ripme/ripper/DownloadThreadPool.java
index 30e3c2b7..a811c98a 100644
--- a/src/main/java/com/rarchives/ripme/ripper/DownloadThreadPool.java
+++ b/src/main/java/com/rarchives/ripme/ripper/DownloadThreadPool.java
@@ -23,17 +23,28 @@ public class DownloadThreadPool {
public DownloadThreadPool(String threadPoolName) {
initialize(threadPoolName);
}
-
+
+ /**
+ * Initializes the threadpool.
+ * @param threadPoolName Name of the threadpool.
+ */
private void initialize(String threadPoolName) {
int threads = Utils.getConfigInteger("threads.size", 10);
logger.debug("Initializing " + threadPoolName + " thread pool with " + threads + " threads");
threadPool = (ThreadPoolExecutor) Executors.newFixedThreadPool(threads);
}
-
+ /**
+ * For adding threads to execution pool.
+ * @param t
+ * Thread to be added.
+ */
public void addThread(Thread t) {
threadPool.execute(t);
}
+ /**
+ * Tries to shutdown threadpool.
+ */
public void waitForThreads() {
threadPool.shutdown();
try {
diff --git a/src/main/java/com/rarchives/ripme/ripper/DownloadVideoThread.java b/src/main/java/com/rarchives/ripme/ripper/DownloadVideoThread.java
index ee97ce60..437f18d0 100644
--- a/src/main/java/com/rarchives/ripme/ripper/DownloadVideoThread.java
+++ b/src/main/java/com/rarchives/ripme/ripper/DownloadVideoThread.java
@@ -136,6 +136,12 @@ class DownloadVideoThread extends Thread {
logger.info("[+] Saved " + url + " as " + this.prettySaveAs);
}
+ /**
+ * @param url
+ * Target URL
+ * @return
+ * Returns connection length
+ */
private int getTotalBytes(URL url) throws IOException {
HttpURLConnection conn = (HttpURLConnection) url.openConnection();
conn.setRequestMethod("HEAD");
diff --git a/src/main/java/com/rarchives/ripme/ripper/RipperInterface.java b/src/main/java/com/rarchives/ripme/ripper/RipperInterface.java
index 57ff8418..550209c0 100644
--- a/src/main/java/com/rarchives/ripme/ripper/RipperInterface.java
+++ b/src/main/java/com/rarchives/ripme/ripper/RipperInterface.java
@@ -7,6 +7,8 @@ import java.net.URL;
/**
* I have no idea why I made this interface. Everything is captured within the AbstractRipper.
* Oh well, here's to encapsulation and abstraction! (raises glass)
+ *
+ * (cheers!)
*/
interface RipperInterface {
void rip() throws IOException;
diff --git a/src/main/java/com/rarchives/ripme/ripper/VideoRipper.java b/src/main/java/com/rarchives/ripme/ripper/VideoRipper.java
index 62be7858..13008cd9 100644
--- a/src/main/java/com/rarchives/ripme/ripper/VideoRipper.java
+++ b/src/main/java/com/rarchives/ripme/ripper/VideoRipper.java
@@ -74,6 +74,12 @@ public abstract class VideoRipper extends AbstractRipper {
return addURLToDownload(url, saveAs);
}
+
+ /**
+ * Creates & sets working directory based on URL.
+ * @param url
+ * Target URL
+ */
@Override
public void setWorkingDir(URL url) throws IOException {
String path = Utils.getWorkingDirectory().getCanonicalPath();
@@ -88,12 +94,23 @@ public abstract class VideoRipper extends AbstractRipper {
}
logger.debug("Set working directory to: " + this.workingDir);
}
-
+
+ /**
+ * @return
+ * Returns % of video done downloading.
+ */
@Override
public int getCompletionPercentage() {
return (int) (100 * (bytesCompleted / (float) bytesTotal));
}
-
+
+ /**
+ * Runs if download successfully completed.
+ * @param url
+ * Target URL
+ * @param saveAs
+ * Path to file, including filename.
+ */
@Override
public void downloadCompleted(URL url, File saveAs) {
if (observer == null) {
@@ -109,6 +126,14 @@ public abstract class VideoRipper extends AbstractRipper {
logger.error("Exception while updating observer: ", e);
}
}
+
+ /**
+ * Runs if the download errored somewhere.
+ * @param url
+ * Target URL
+ * @param reason
+ * Reason why the download failed.
+ */
@Override
public void downloadErrored(URL url, String reason) {
if (observer == null) {
@@ -117,6 +142,15 @@ public abstract class VideoRipper extends AbstractRipper {
observer.update(this, new RipStatusMessage(STATUS.DOWNLOAD_ERRORED, url + " : " + reason));
checkIfComplete();
}
+
+
+ /**
+ * Runs if user tries to redownload an already existing File.
+ * @param url
+ * Target URL
+ * @param file
+ * Existing file
+ */
@Override
public void downloadExists(URL url, File file) {
if (observer == null) {
@@ -126,6 +160,11 @@ public abstract class VideoRipper extends AbstractRipper {
checkIfComplete();
}
+ /**
+ * Gets the status and changes it to a human-readable form.
+ * @return
+ * Status of current download.
+ */
@Override
public String getStatusText() {
StringBuilder sb = new StringBuilder();
@@ -139,6 +178,10 @@ public abstract class VideoRipper extends AbstractRipper {
}
@Override
+ /**
+ * Sanitizes URL.
+ * Usually just returns itself.
+ */
public URL sanitizeURL(URL url) throws MalformedURLException {
return url;
}
diff --git a/src/main/java/com/rarchives/ripme/ripper/rippers/CheveretoRipper.java b/src/main/java/com/rarchives/ripme/ripper/rippers/CheveretoRipper.java
index 899db87d..4b5fb9e6 100644
--- a/src/main/java/com/rarchives/ripme/ripper/rippers/CheveretoRipper.java
+++ b/src/main/java/com/rarchives/ripme/ripper/rippers/CheveretoRipper.java
@@ -28,7 +28,7 @@ public class CheveretoRipper extends AbstractHTMLRipper {
super(url);
}
- private static List explicit_domains_1 = Arrays.asList("hushpix.com", "tag-fox.com");
+ private static List explicit_domains_1 = Arrays.asList("hushpix.com", "tag-fox.com", "gwarchives.com");
@Override
public String getHost() {
diff --git a/src/main/java/com/rarchives/ripme/ripper/rippers/EromeRipper.java b/src/main/java/com/rarchives/ripme/ripper/rippers/EromeRipper.java
index e9f2fa54..84e63e76 100644
--- a/src/main/java/com/rarchives/ripme/ripper/rippers/EromeRipper.java
+++ b/src/main/java/com/rarchives/ripme/ripper/rippers/EromeRipper.java
@@ -1,8 +1,3 @@
-/*
- * To change this license header, choose License Headers in Project Properties.
- * To change this template file, choose Tools | Templates
- * and open the template in the editor.
- */
package com.rarchives.ripme.ripper.rippers;
import java.io.IOException;
@@ -62,6 +57,11 @@ public class EromeRipper extends AbstractHTMLRipper {
return super.getAlbumTitle(url);
}
+ @Override
+ public URL sanitizeURL(URL url) throws MalformedURLException {
+ return new URL(url.toExternalForm().replaceAll("https?://erome.com", "https://www.erome.com"));
+ }
+
@Override
public List getURLsFromPage(Document doc) {
@@ -99,7 +99,15 @@ public class EromeRipper extends AbstractHTMLRipper {
if (m.matches()) {
return m.group(1);
}
- throw new MalformedURLException("erome album not found in " + url + ", expected https://erome.com/album");
+
+ p = Pattern.compile("^https?://erome.com/a/([a-zA-Z0-9]*)/?$");
+ m = p.matcher(url.toExternalForm());
+
+ if (m.matches()) {
+ return m.group(1);
+ }
+
+ throw new MalformedURLException("erome album not found in " + url + ", expected https://www.erome.com/album");
}
public static List getURLs(URL url) throws IOException{
diff --git a/src/main/java/com/rarchives/ripme/ripper/rippers/FuraffinityRipper.java b/src/main/java/com/rarchives/ripme/ripper/rippers/FuraffinityRipper.java
index a12912a5..973796cf 100644
--- a/src/main/java/com/rarchives/ripme/ripper/rippers/FuraffinityRipper.java
+++ b/src/main/java/com/rarchives/ripme/ripper/rippers/FuraffinityRipper.java
@@ -28,12 +28,16 @@ import com.rarchives.ripme.utils.Http;
public class FuraffinityRipper extends AbstractHTMLRipper {
- private static Map cookies=null;
private static final String urlBase = "https://www.furaffinity.net";
+ private static Map cookies = new HashMap<>();
+ static {
+ cookies.put("b", "bd5ccac8-51dc-4265-8ae1-7eac685ad667");
+ cookies.put("a", "7c41b782-d01d-4b0e-b45b-62a4f0b2a369");
+ }
// Thread pool for finding direct image links from "image" pages (html)
private DownloadThreadPool furaffinityThreadPool
- = new DownloadThreadPool( "furaffinity");
+ = new DownloadThreadPool( "furaffinity");
@Override
public DownloadThreadPool getThreadPool() {
@@ -59,57 +63,28 @@ public class FuraffinityRipper extends AbstractHTMLRipper {
}
@Override
public Document getFirstPage() throws IOException {
-
- return Http.url(url).get();
- }
-
- private void login() throws IOException {
- String user = new String(Base64.decode("cmlwbWU="));
- String pass = new String(Base64.decode("cmlwbWVwYXNzd29yZA=="));
-
- Response loginPage = Http.url(urlBase + "/login/")
- .referrer(urlBase)
- .response();
- cookies = loginPage.cookies();
-
- Map formData = new HashMap<>();
- formData.put("action", "login");
- formData.put("retard_protection", "1");
- formData.put("name", user);
- formData.put("pass", pass);
- formData.put("login", "Login to FurAffinity");
-
- Response doLogin = Http.url(urlBase + "/login/?ref=" + url)
- .referrer(urlBase + "/login/")
- .data(formData)
- .method(Method.POST)
- .response();
- cookies.putAll(doLogin.cookies());
+ return Http.url(url).cookies(cookies).get();
}
@Override
public Document getNextPage(Document doc) throws IOException {
// Find next page
- Elements nextPageUrl = doc.select("td[align=right] form");
+ Elements nextPageUrl = doc.select("a.right");
if (nextPageUrl.size() == 0) {
throw new IOException("No more pages");
}
- String nextUrl = urlBase + nextPageUrl.first().attr("action");
+ String nextUrl = urlBase + nextPageUrl.first().attr("href");
sleep(500);
- Document nextPage = Http.url(nextUrl).get();
+ Document nextPage = Http.url(nextUrl).cookies(cookies).get();
- Elements hrefs = nextPage.select("div#no-images");
- if (hrefs.size() != 0) {
- throw new IOException("No more pages");
- }
return nextPage;
}
private String getImageFromPost(String url) {
try {
- logger.info("found url " + Http.url(url).get().select("meta[property=og:image]").attr("content"));
- return Http.url(url).get().select("meta[property=og:image]").attr("content");
+ logger.info("found url " + Http.url(url).cookies(cookies).get().select("meta[property=og:image]").attr("content"));
+ return Http.url(url).cookies(cookies).get().select("meta[property=og:image]").attr("content");
} catch (IOException e) {
return "";
}
@@ -169,8 +144,8 @@ public class FuraffinityRipper extends AbstractHTMLRipper {
}
@Override
public boolean saveText(URL url, String subdirectory, String text, int index) {
- //TODO Make this better please?
- try {
+ //TODO Make this better please?
+ try {
stopCheck();
} catch (IOException e) {
return false;
@@ -181,7 +156,7 @@ public class FuraffinityRipper extends AbstractHTMLRipper {
saveAs = text.split("\n")[0];
saveAs = saveAs.replaceAll("^(\\S+)\\s+by\\s+(.*)$", "$2_$1");
for (int i = 1;i < text.split("\n").length; i++) {
- newText = newText.replace("\\","").replace("/","").replace("~","") + "\n" + text.split("\n")[i];
+ newText = newText.replace("\\","").replace("/","").replace("~","") + "\n" + text.split("\n")[i];
}
try {
if (!subdirectory.equals("")) {
@@ -243,4 +218,4 @@ public class FuraffinityRipper extends AbstractHTMLRipper {
}
-}
+}
\ No newline at end of file
diff --git a/src/main/java/com/rarchives/ripme/ripper/rippers/HentaiCafeRipper.java b/src/main/java/com/rarchives/ripme/ripper/rippers/HentaiCafeRipper.java
index ba2f0b83..d25bb47e 100644
--- a/src/main/java/com/rarchives/ripme/ripper/rippers/HentaiCafeRipper.java
+++ b/src/main/java/com/rarchives/ripme/ripper/rippers/HentaiCafeRipper.java
@@ -31,7 +31,7 @@ public class HentaiCafeRipper extends AbstractHTMLRipper {
@Override
public String getGID(URL url) throws MalformedURLException {
- Pattern p = Pattern.compile("https?://hentai\\.cafe/([a-zA-Z1-9_\\-%]*)/?$");
+ Pattern p = Pattern.compile("https?://hentai\\.cafe/([a-zA-Z0-9_\\-%]*)/?$");
Matcher m = p.matcher(url.toExternalForm());
if (m.matches()) {
return m.group(1);
diff --git a/src/main/java/com/rarchives/ripme/ripper/rippers/InstagramRipper.java b/src/main/java/com/rarchives/ripme/ripper/rippers/InstagramRipper.java
index 0352e982..ec7825e9 100644
--- a/src/main/java/com/rarchives/ripme/ripper/rippers/InstagramRipper.java
+++ b/src/main/java/com/rarchives/ripme/ripper/rippers/InstagramRipper.java
@@ -19,6 +19,8 @@ import com.rarchives.ripme.utils.Http;
import org.jsoup.nodes.Document;
import org.jsoup.nodes.Element;
+import com.rarchives.ripme.ui.RipStatusMessage;
+import com.rarchives.ripme.utils.Utils;
public class InstagramRipper extends AbstractHTMLRipper {
@@ -234,7 +236,11 @@ public class InstagramRipper extends AbstractHTMLRipper {
}
addURLToDownload(new URL(getOriginalUrl(data.getString("thumbnail_src"))), image_date);
} else {
- addURLToDownload(new URL(getVideoFromPage(data.getString("code"))), image_date);
+ if (!Utils.getConfigBoolean("instagram.download_images_only", false)) {
+ addURLToDownload(new URL(getVideoFromPage(data.getString("code"))), image_date);
+ } else {
+ sendUpdate(RipStatusMessage.STATUS.DOWNLOAD_WARN, "Skipping video " + data.getString("code"));
+ }
}
} catch (MalformedURLException e) {
return imageURLs;
diff --git a/src/main/java/com/rarchives/ripme/ripper/rippers/PornpicsRipper.java b/src/main/java/com/rarchives/ripme/ripper/rippers/PornpicsRipper.java
new file mode 100644
index 00000000..b779c480
--- /dev/null
+++ b/src/main/java/com/rarchives/ripme/ripper/rippers/PornpicsRipper.java
@@ -0,0 +1,63 @@
+package com.rarchives.ripme.ripper.rippers;
+
+import com.rarchives.ripme.ripper.AbstractHTMLRipper;
+import com.rarchives.ripme.utils.Http;
+import org.jsoup.nodes.Document;
+import org.jsoup.nodes.Element;
+
+import java.io.IOException;
+import java.net.MalformedURLException;
+import java.net.URL;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
+
+public class PornpicsRipper extends AbstractHTMLRipper {
+
+ public PornpicsRipper(URL url) throws IOException {
+ super(url);
+ }
+
+ @Override
+ public String getHost() {
+ return "pornpics";
+ }
+
+ @Override
+ public String getDomain() {
+ return "pornpics.com";
+ }
+
+ @Override
+ public String getGID(URL url) throws MalformedURLException {
+ Pattern p = Pattern.compile("https?://www.pornpics.com/galleries/([a-zA-Z0-9_-]*)/?");
+ Matcher m = p.matcher(url.toExternalForm());
+ if (m.matches()) {
+ return m.group(1);
+ }
+ throw new MalformedURLException("Expected pornpics URL format: " +
+ "www.pornpics.com/galleries/ID - got " + url + " instead");
+ }
+
+ @Override
+ public Document getFirstPage() throws IOException {
+ // "url" is an instance field of the superclass
+ return Http.url(url).get();
+ }
+
+ @Override
+ public List getURLsFromPage(Document doc) {
+ List result = new ArrayList<>();
+ for (Element el : doc.select("a.rel-link")) {
+ result.add(el.attr("href"));
+ }
+ return result;
+ }
+
+ @Override
+ public void downloadURL(URL url, int index) {
+ addURLToDownload(url, getPrefix(index));
+ }
+}
diff --git a/src/main/java/com/rarchives/ripme/ripper/rippers/TumblrRipper.java b/src/main/java/com/rarchives/ripme/ripper/rippers/TumblrRipper.java
index 018cd9e4..7a8dff75 100644
--- a/src/main/java/com/rarchives/ripme/ripper/rippers/TumblrRipper.java
+++ b/src/main/java/com/rarchives/ripme/ripper/rippers/TumblrRipper.java
@@ -4,6 +4,9 @@ import java.io.IOException;
import java.net.HttpURLConnection;
import java.net.MalformedURLException;
import java.net.URL;
+import java.util.Arrays;
+import java.util.List;
+import java.util.Random;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
@@ -34,18 +37,19 @@ public class TumblrRipper extends AlbumRipper {
private static String TUMBLR_AUTH_CONFIG_KEY = "tumblr.auth";
private static boolean useDefaultApiKey = false; // fall-back for bad user-specified key
- private static final String DEFAULT_API_KEY = "JFNLu3CbINQjRdUvZibXW9VpSEVYYtiPJ86o8YmvgLZIoKyuNX";
+ private static final List apiKeys = Arrays.asList("JFNLu3CbINQjRdUvZibXW9VpSEVYYtiPJ86o8YmvgLZIoKyuNX",
+ "FQrwZMCxVnzonv90rgNUJcAk4FpnoS0mYuSuGYqIpM2cFgp9L4",
+ "qpdkY6nMknksfvYAhf2xIHp0iNRLkMlcWShxqzXyFJRxIsZ1Zz");
+ private static final String API_KEY = apiKeys.get(new Random().nextInt(apiKeys.size()));
- private static final String API_KEY;
- static {
- API_KEY = Utils.getConfigString(TUMBLR_AUTH_CONFIG_KEY, DEFAULT_API_KEY);
- }
private static String getApiKey() {
- if (useDefaultApiKey) {
- return DEFAULT_API_KEY;
- } else {
+ if (useDefaultApiKey || Utils.getConfigString(TUMBLR_AUTH_CONFIG_KEY, "JFNLu3CbINQjRdUvZibXW9VpSEVYYtiPJ86o8YmvgLZIoKyuNX").equals("JFNLu3CbINQjRdUvZibXW9VpSEVYYtiPJ86o8YmvgLZIoKyuNX")) {
+ logger.info("Using api key: " + API_KEY);
return API_KEY;
+ } else {
+ logger.info("Using user tumblr.auth api key");
+ return Utils.getConfigString(TUMBLR_AUTH_CONFIG_KEY, "JFNLu3CbINQjRdUvZibXW9VpSEVYYtiPJ86o8YmvgLZIoKyuNX");
}
}
@@ -207,7 +211,14 @@ public class TumblrRipper extends AlbumRipper {
for (int j = 0; j < photos.length(); j++) {
photo = photos.getJSONObject(j);
try {
- fileURL = new URL(photo.getJSONObject("original_size").getString("url").replaceAll("http", "https"));
+ if (Utils.getConfigBoolean("tumblr.get_raw_image", false)) {
+ String urlString = photo.getJSONObject("original_size").getString("url").replaceAll("https", "http");
+ urlString = urlString.replaceAll("https?://[a-sA-Z0-9_\\-\\.]*\\.tumblr", "http://data.tumblr");
+ urlString = urlString.replaceAll("_\\d+\\.", "_raw.");
+ fileURL = new URL(urlString);
+ } else {
+ fileURL = new URL(photo.getJSONObject("original_size").getString("url").replaceAll("http", "https"));
+ }
m = p.matcher(fileURL.toString());
if (m.matches()) {
addURLToDownload(fileURL);
diff --git a/src/main/java/com/rarchives/ripme/ripper/rippers/video/GfycatRipper.java b/src/main/java/com/rarchives/ripme/ripper/rippers/video/GfycatRipper.java
index dd0d20eb..35f42564 100644
--- a/src/main/java/com/rarchives/ripme/ripper/rippers/video/GfycatRipper.java
+++ b/src/main/java/com/rarchives/ripme/ripper/rippers/video/GfycatRipper.java
@@ -32,6 +32,8 @@ public class GfycatRipper extends VideoRipper {
@Override
public URL sanitizeURL(URL url) throws MalformedURLException {
+ url = new URL(url.toExternalForm().replace("/gifs/detail", ""));
+
return url;
}
@@ -64,6 +66,10 @@ public class GfycatRipper extends VideoRipper {
*/
public static String getVideoURL(URL url) throws IOException {
logger.info("Retrieving " + url.toExternalForm());
+
+ //Sanitize the URL first
+ url = new URL(url.toExternalForm().replace("/gifs/detail", ""));
+
Document doc = Http.url(url).get();
Elements videos = doc.select("source#mp4Source");
if (videos.size() == 0) {
diff --git a/src/main/java/com/rarchives/ripme/ui/UpdateUtils.java b/src/main/java/com/rarchives/ripme/ui/UpdateUtils.java
index 646ce266..bf4508bd 100644
--- a/src/main/java/com/rarchives/ripme/ui/UpdateUtils.java
+++ b/src/main/java/com/rarchives/ripme/ui/UpdateUtils.java
@@ -21,7 +21,7 @@ import com.rarchives.ripme.utils.Utils;
public class UpdateUtils {
private static final Logger logger = Logger.getLogger(UpdateUtils.class);
- private static final String DEFAULT_VERSION = "1.7.8";
+ private static final String DEFAULT_VERSION = "1.7.11";
private static final String REPO_NAME = "ripmeapp/ripme";
private static final String updateJsonURL = "https://raw.githubusercontent.com/" + REPO_NAME + "/master/ripme.json";
private static final String mainFileName = "ripme.jar";
diff --git a/src/test/java/com/rarchives/ripme/tst/ripper/rippers/CheveretoRipperTest.java b/src/test/java/com/rarchives/ripme/tst/ripper/rippers/CheveretoRipperTest.java
index 519c5a41..0c6344ea 100644
--- a/src/test/java/com/rarchives/ripme/tst/ripper/rippers/CheveretoRipperTest.java
+++ b/src/test/java/com/rarchives/ripme/tst/ripper/rippers/CheveretoRipperTest.java
@@ -15,4 +15,9 @@ public class CheveretoRipperTest extends RippersTest {
CheveretoRipper ripper = new CheveretoRipper(new URL("http://tag-fox.com/album/Thjb"));
testRipper(ripper);
}
+
+ public void testgwarchives() throws IOException {
+ CheveretoRipper ripper = new CheveretoRipper(new URL("https://gwarchives.com/album/ns4q"));
+ testRipper(ripper);
+ }
}
diff --git a/src/test/java/com/rarchives/ripme/tst/ripper/rippers/FuskatorRipperTest.java b/src/test/java/com/rarchives/ripme/tst/ripper/rippers/FuskatorRipperTest.java
index 952887e5..f5bc6f0d 100644
--- a/src/test/java/com/rarchives/ripme/tst/ripper/rippers/FuskatorRipperTest.java
+++ b/src/test/java/com/rarchives/ripme/tst/ripper/rippers/FuskatorRipperTest.java
@@ -7,7 +7,7 @@ import com.rarchives.ripme.ripper.rippers.FuskatorRipper;
public class FuskatorRipperTest extends RippersTest {
public void testFuskatorAlbum() throws IOException {
- FuskatorRipper ripper = new FuskatorRipper(new URL("http://fuskator.com/full/emJa1U6cqbi/index.html"));
+ FuskatorRipper ripper = new FuskatorRipper(new URL("https://fuskator.com/thumbs/hqt6pPXAf9z/Shaved-Blonde-Babe-Katerina-Ambre.html"));
testRipper(ripper);
}
}
diff --git a/src/test/java/com/rarchives/ripme/tst/ripper/rippers/GfycatRipperTest.java b/src/test/java/com/rarchives/ripme/tst/ripper/rippers/GfycatRipperTest.java
new file mode 100644
index 00000000..ca73f138
--- /dev/null
+++ b/src/test/java/com/rarchives/ripme/tst/ripper/rippers/GfycatRipperTest.java
@@ -0,0 +1,26 @@
+package com.rarchives.ripme.tst.ripper.rippers;
+
+import com.rarchives.ripme.ripper.rippers.video.GfycatRipper;
+import java.io.IOException;
+import java.net.URL;
+
+
+public class GfycatRipperTest extends RippersTest {
+
+ /**
+ * Rips correctly formatted URL directly from Gfycat
+ * @throws IOException
+ */
+ public void GfycatGoodURL() throws IOException{
+ GfycatRipper ripper = new GfycatRipper(new URL("https://gfycat.com/TemptingExcellentIchthyosaurs"));
+ testRipper(ripper);
+ }
+ /**
+ * Rips badly formatted URL directly from Gfycat
+ * @throws IOException
+ */
+ public void GfycatBadURL() throws IOException {
+ GfycatRipper ripper = new GfycatRipper(new URL("https://gfycat.com/gifs/detail/limitedtestyamericancrow"));
+ testRipper(ripper);
+ }
+}
\ No newline at end of file
diff --git a/src/test/java/com/rarchives/ripme/tst/ripper/rippers/PornpicsRipperTest.java b/src/test/java/com/rarchives/ripme/tst/ripper/rippers/PornpicsRipperTest.java
new file mode 100644
index 00000000..1f79b254
--- /dev/null
+++ b/src/test/java/com/rarchives/ripme/tst/ripper/rippers/PornpicsRipperTest.java
@@ -0,0 +1,13 @@
+package com.rarchives.ripme.tst.ripper.rippers;
+
+import java.io.IOException;
+import java.net.URL;
+
+import com.rarchives.ripme.ripper.rippers.PornpicsRipper;
+
+public class PornpicsRipperTest extends RippersTest {
+ public void testRip() throws IOException {
+ PornpicsRipper ripper = new PornpicsRipper(new URL("https://www.pornpics.com/galleries/pornstar-dahlia-sky-takes-a-fat-cock-in-her-butthole-wearing-fishnet-stockings/"));
+ testRipper(ripper);
+ }
+}
\ No newline at end of file
diff --git a/src/test/java/com/rarchives/ripme/tst/ripper/rippers/RedditRipperTest.java b/src/test/java/com/rarchives/ripme/tst/ripper/rippers/RedditRipperTest.java
index f20a21f2..fdd61cf9 100644
--- a/src/test/java/com/rarchives/ripme/tst/ripper/rippers/RedditRipperTest.java
+++ b/src/test/java/com/rarchives/ripme/tst/ripper/rippers/RedditRipperTest.java
@@ -5,6 +5,7 @@ import java.io.IOException;
import java.net.URL;
import com.rarchives.ripme.ripper.rippers.RedditRipper;
+import com.rarchives.ripme.ripper.rippers.video.GfycatRipper;
public class RedditRipperTest extends RippersTest {
// https://github.com/RipMeApp/ripme/issues/253 - Disabled tests: RedditRipperTest#testRedditSubreddit*Rip is flaky
@@ -22,4 +23,25 @@ public class RedditRipperTest extends RippersTest {
RedditRipper ripper = new RedditRipper(new URL("http://www.reddit.com/r/UnrealGirls/comments/1ziuhl/in_class_veronique_popa/"));
testRipper(ripper);
}
+
+ /**
+ * GFYCAT TEST
+ * Tests a good GfycatURL (no "/gifs/detail")
+ * @throws IOException
+ */
+ public void testRedditGfyGoodURL() throws IOException {
+ RedditRipper ripper = new RedditRipper(new URL("https://www.reddit.com/r/bottesting/comments/7msozf/good_link/"));
+ testRipper(ripper);
+ }
+
+
+ /**
+ * GFYCAT TEST
+ * Tests a Bad URL with the "/gifs/detail" inside.
+ * @throws IOException
+ */
+ public void testRedditGfyBadURL() throws IOException {
+ RedditRipper ripper = new RedditRipper(new URL("https://www.reddit.com/r/bottesting/comments/7msmhi/bad_link/"));
+ testRipper(ripper);
+ }
}