2014-02-25 10:28:22 +01:00
|
|
|
package com.rarchives.ripme.ripper;
|
|
|
|
|
2014-06-28 18:47:46 +02:00
|
|
|
import java.awt.Desktop;
|
2017-11-07 10:11:13 +01:00
|
|
|
import java.io.*;
|
2014-03-02 04:37:09 +01:00
|
|
|
import java.lang.reflect.Constructor;
|
2014-02-25 10:28:22 +01:00
|
|
|
import java.net.MalformedURLException;
|
|
|
|
import java.net.URL;
|
2014-03-02 04:37:09 +01:00
|
|
|
import java.util.ArrayList;
|
|
|
|
import java.util.List;
|
2014-04-23 05:48:41 +02:00
|
|
|
import java.util.Map;
|
2014-03-01 11:13:32 +01:00
|
|
|
import java.util.Observable;
|
2014-02-25 10:28:22 +01:00
|
|
|
|
2014-06-08 03:03:40 +02:00
|
|
|
import org.apache.log4j.FileAppender;
|
2014-02-27 04:54:44 +01:00
|
|
|
import org.apache.log4j.Logger;
|
2014-05-05 07:40:53 +02:00
|
|
|
import org.jsoup.HttpStatusException;
|
2014-02-27 04:54:44 +01:00
|
|
|
|
2015-01-11 09:23:43 +01:00
|
|
|
import com.rarchives.ripme.ui.RipStatusComplete;
|
2014-04-04 08:53:51 +02:00
|
|
|
import com.rarchives.ripme.ui.RipStatusHandler;
|
2014-03-01 11:13:32 +01:00
|
|
|
import com.rarchives.ripme.ui.RipStatusMessage;
|
|
|
|
import com.rarchives.ripme.ui.RipStatusMessage.STATUS;
|
2014-02-26 08:44:22 +01:00
|
|
|
import com.rarchives.ripme.utils.Utils;
|
2017-11-07 10:11:13 +01:00
|
|
|
|
2017-11-05 20:35:15 +01:00
|
|
|
import java.io.File;
|
|
|
|
import java.util.Scanner;
|
2014-02-26 08:44:22 +01:00
|
|
|
|
2017-05-10 00:03:12 +02:00
|
|
|
public abstract class AbstractRipper
|
2014-03-01 11:13:32 +01:00
|
|
|
extends Observable
|
2014-03-01 11:43:47 +01:00
|
|
|
implements RipperInterface, Runnable {
|
2014-02-25 10:28:22 +01:00
|
|
|
|
2014-04-20 07:41:11 +02:00
|
|
|
protected static final Logger logger = Logger.getLogger(AbstractRipper.class);
|
2017-11-07 10:49:39 +01:00
|
|
|
private final String URLHistoryFile = Utils.getURLHistoryFile();
|
2014-02-27 04:54:44 +01:00
|
|
|
|
2017-05-10 00:03:12 +02:00
|
|
|
public static final String USER_AGENT =
|
2017-11-30 08:11:35 +01:00
|
|
|
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/61.0.3163.100 Safari/537.36";
|
2014-03-09 08:59:36 +01:00
|
|
|
|
2014-02-25 10:28:22 +01:00
|
|
|
protected URL url;
|
2014-02-27 04:54:44 +01:00
|
|
|
protected File workingDir;
|
2017-10-24 16:33:28 +02:00
|
|
|
DownloadThreadPool threadPool;
|
|
|
|
RipStatusHandler observer = null;
|
2014-03-01 11:13:32 +01:00
|
|
|
|
2017-10-24 16:33:28 +02:00
|
|
|
private boolean completed = true;
|
2014-02-26 08:44:22 +01:00
|
|
|
|
|
|
|
public abstract void rip() throws IOException;
|
2014-02-27 04:54:44 +01:00
|
|
|
public abstract String getHost();
|
|
|
|
public abstract String getGID(URL url) throws MalformedURLException;
|
2017-12-03 01:20:09 +01:00
|
|
|
public boolean hasASAPRipping() { return false; }
|
2018-02-26 00:10:30 +01:00
|
|
|
// Everytime addUrlToDownload skips a already downloaded url this increases by 1
|
|
|
|
public int alreadyDownloadedUrls = 0;
|
2014-04-18 07:11:37 +02:00
|
|
|
private boolean shouldStop = false;
|
2015-02-06 08:58:17 +01:00
|
|
|
private boolean thisIsATest = false;
|
2014-04-18 07:11:37 +02:00
|
|
|
|
|
|
|
public void stop() {
|
|
|
|
shouldStop = true;
|
|
|
|
}
|
|
|
|
public boolean isStopped() {
|
|
|
|
return shouldStop;
|
|
|
|
}
|
|
|
|
protected void stopCheck() throws IOException {
|
|
|
|
if (shouldStop) {
|
|
|
|
throw new IOException("Ripping interrupted");
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-03-05 19:16:19 +01:00
|
|
|
|
|
|
|
/**
|
|
|
|
* Adds a URL to the url history file
|
|
|
|
* @param downloadedURL URL to check if downloaded
|
|
|
|
*/
|
2017-11-05 20:35:15 +01:00
|
|
|
private void writeDownloadedURL(String downloadedURL) throws IOException {
|
2018-03-05 19:16:19 +01:00
|
|
|
downloadedURL = normalizeUrl(downloadedURL);
|
2017-11-05 20:35:15 +01:00
|
|
|
BufferedWriter bw = null;
|
|
|
|
FileWriter fw = null;
|
|
|
|
try {
|
2017-11-07 10:11:13 +01:00
|
|
|
File file = new File(URLHistoryFile);
|
2018-05-24 19:43:54 +02:00
|
|
|
if (!new File(Utils.getConfigDir()).exists()) {
|
|
|
|
logger.error("Config dir doesn't exist");
|
2018-05-25 07:14:39 +02:00
|
|
|
logger.info("Making config dir");
|
|
|
|
boolean couldMakeDir = new File(Utils.getConfigDir()).mkdirs();
|
|
|
|
if (!couldMakeDir) {
|
|
|
|
logger.error("Couldn't make config dir");
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
// if file doesnt exists, then create it
|
|
|
|
if (!file.exists()) {
|
|
|
|
boolean couldMakeDir = file.createNewFile();
|
|
|
|
if (!couldMakeDir) {
|
|
|
|
logger.error("Couldn't url history file");
|
|
|
|
return;
|
|
|
|
}
|
2018-05-24 19:43:54 +02:00
|
|
|
}
|
|
|
|
if (!file.canWrite()) {
|
|
|
|
logger.error("Can't write to url history file: " + URLHistoryFile);
|
|
|
|
return;
|
|
|
|
}
|
2017-11-05 20:35:15 +01:00
|
|
|
fw = new FileWriter(file.getAbsoluteFile(), true);
|
|
|
|
bw = new BufferedWriter(fw);
|
|
|
|
bw.write(downloadedURL);
|
|
|
|
} catch (IOException e) {
|
|
|
|
e.printStackTrace();
|
|
|
|
} finally {
|
|
|
|
try {
|
|
|
|
if (bw != null)
|
|
|
|
bw.close();
|
|
|
|
if (fw != null)
|
|
|
|
fw.close();
|
|
|
|
} catch (IOException ex) {
|
|
|
|
ex.printStackTrace();
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2018-03-05 19:16:19 +01:00
|
|
|
|
|
|
|
|
|
|
|
/**
|
|
|
|
* Normalize a URL
|
|
|
|
* @param url URL to check if downloaded
|
|
|
|
*/
|
|
|
|
public String normalizeUrl(String url) {
|
|
|
|
return url;
|
|
|
|
}
|
2017-12-28 06:04:23 +01:00
|
|
|
|
|
|
|
/**
|
|
|
|
* Checks to see if Ripme has already downloaded a URL
|
|
|
|
* @param url URL to check if downloaded
|
|
|
|
* @return
|
|
|
|
* Returns true if previously downloaded.
|
|
|
|
* Returns false if not yet downloaded.
|
|
|
|
*/
|
2017-11-07 10:27:36 +01:00
|
|
|
private boolean hasDownloadedURL(String url) {
|
2017-11-07 10:11:13 +01:00
|
|
|
File file = new File(URLHistoryFile);
|
2018-03-05 19:16:19 +01:00
|
|
|
url = normalizeUrl(url);
|
2018-05-30 00:17:00 +02:00
|
|
|
|
|
|
|
try (Scanner scanner = new Scanner(file)) {
|
2017-11-07 10:11:13 +01:00
|
|
|
while (scanner.hasNextLine()) {
|
|
|
|
final String lineFromFile = scanner.nextLine();
|
|
|
|
if (lineFromFile.equals(url)) {
|
|
|
|
return true;
|
|
|
|
}
|
2017-11-05 20:35:15 +01:00
|
|
|
}
|
2017-11-07 10:11:13 +01:00
|
|
|
} catch (FileNotFoundException e) {
|
|
|
|
return false;
|
2017-11-05 20:35:15 +01:00
|
|
|
}
|
2018-05-30 00:17:00 +02:00
|
|
|
|
2017-11-05 20:35:15 +01:00
|
|
|
return false;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-02-26 08:44:22 +01:00
|
|
|
/**
|
2014-02-27 04:54:44 +01:00
|
|
|
* Ensures inheriting ripper can rip this URL, raises exception if not.
|
|
|
|
* Otherwise initializes working directory and thread pool.
|
2017-05-10 00:03:12 +02:00
|
|
|
*
|
2014-02-26 08:44:22 +01:00
|
|
|
* @param url
|
|
|
|
* URL to rip.
|
|
|
|
* @throws IOException
|
|
|
|
* If anything goes wrong.
|
|
|
|
*/
|
|
|
|
public AbstractRipper(URL url) throws IOException {
|
2014-02-25 10:28:22 +01:00
|
|
|
if (!canRip(url)) {
|
|
|
|
throw new MalformedURLException("Unable to rip url: " + url);
|
|
|
|
}
|
2014-02-27 04:54:44 +01:00
|
|
|
this.url = sanitizeURL(url);
|
2014-06-11 05:22:28 +02:00
|
|
|
}
|
2014-06-12 07:21:09 +02:00
|
|
|
|
2017-12-28 06:04:23 +01:00
|
|
|
/**
|
|
|
|
* Sets ripper's:
|
|
|
|
* Working directory
|
|
|
|
* Logger (for debugging)
|
|
|
|
* FileAppender
|
|
|
|
* Threadpool
|
|
|
|
* @throws IOException
|
|
|
|
* Always be prepared.
|
|
|
|
*/
|
2014-06-11 05:22:28 +02:00
|
|
|
public void setup() throws IOException {
|
|
|
|
setWorkingDir(this.url);
|
2014-06-08 03:03:40 +02:00
|
|
|
Logger rootLogger = Logger.getRootLogger();
|
|
|
|
FileAppender fa = (FileAppender) rootLogger.getAppender("FILE");
|
2014-06-12 07:21:09 +02:00
|
|
|
if (fa != null) {
|
|
|
|
fa.setFile(this.workingDir + File.separator + "log.txt");
|
|
|
|
fa.activateOptions();
|
|
|
|
}
|
2014-06-08 03:03:40 +02:00
|
|
|
|
2014-02-27 04:54:44 +01:00
|
|
|
this.threadPool = new DownloadThreadPool();
|
|
|
|
}
|
2014-03-09 09:20:22 +01:00
|
|
|
|
2014-03-13 20:18:35 +01:00
|
|
|
public void setObserver(RipStatusHandler obs) {
|
2014-03-01 11:13:32 +01:00
|
|
|
this.observer = obs;
|
|
|
|
}
|
2014-06-12 07:21:09 +02:00
|
|
|
|
2014-02-28 12:04:03 +01:00
|
|
|
/**
|
|
|
|
* Queues image to be downloaded and saved.
|
|
|
|
* @param url
|
|
|
|
* URL of the file
|
|
|
|
* @param saveAs
|
|
|
|
* Path of the local file to save the content to.
|
2018-03-12 17:40:13 +01:00
|
|
|
* @return True on success, false on failure.
|
2014-02-28 12:04:03 +01:00
|
|
|
*/
|
2014-10-15 14:02:36 +02:00
|
|
|
public abstract boolean addURLToDownload(URL url, File saveAs);
|
2017-05-09 23:02:24 +02:00
|
|
|
|
|
|
|
/**
|
|
|
|
* Queues image to be downloaded and saved.
|
|
|
|
* @param url
|
|
|
|
* URL of the file
|
|
|
|
* @param saveAs
|
|
|
|
* Path of the local file to save the content to.
|
|
|
|
* @param referrer
|
|
|
|
* The HTTP referrer to use while downloading this file.
|
|
|
|
* @param cookies
|
|
|
|
* The cookies to send to the server while downloading this file.
|
|
|
|
* @return
|
2017-12-28 06:04:23 +01:00
|
|
|
* True if downloaded successfully
|
|
|
|
* False if failed to download
|
2017-05-09 23:02:24 +02:00
|
|
|
*/
|
2018-05-05 11:36:00 +02:00
|
|
|
protected abstract boolean addURLToDownload(URL url, File saveAs, String referrer, Map<String, String> cookies,
|
|
|
|
Boolean getFileExtFromMIME);
|
2014-02-28 12:04:03 +01:00
|
|
|
|
2017-12-28 06:04:23 +01:00
|
|
|
/**
|
|
|
|
* Queues image to be downloaded and saved.
|
|
|
|
* @param url
|
|
|
|
* URL of the file
|
|
|
|
* @param prefix
|
|
|
|
* Prefix for the downloaded file
|
|
|
|
* @param subdirectory
|
|
|
|
* Path to get to desired directory from working directory
|
|
|
|
* @param referrer
|
|
|
|
* The HTTP referrer to use while downloading this file.
|
|
|
|
* @param cookies
|
|
|
|
* The cookies to send to the server while downloading this file.
|
2018-03-12 17:40:13 +01:00
|
|
|
* @param fileName
|
|
|
|
* The name that file will be written to
|
2017-12-28 06:04:23 +01:00
|
|
|
* @return
|
|
|
|
* True if downloaded successfully
|
|
|
|
* False if failed to download
|
|
|
|
*/
|
2018-05-05 11:36:00 +02:00
|
|
|
protected boolean addURLToDownload(URL url, String prefix, String subdirectory, String referrer, Map<String, String> cookies, String fileName, String extension, Boolean getFileExtFromMIME) {
|
2018-02-26 00:10:30 +01:00
|
|
|
// Don't re-add the url if it was downloaded in a previous rip
|
2017-11-10 02:59:31 +01:00
|
|
|
if (Utils.getConfigBoolean("remember.url_history", true) && !isThisATest()) {
|
2017-11-07 21:23:41 +01:00
|
|
|
if (hasDownloadedURL(url.toExternalForm())) {
|
|
|
|
sendUpdate(STATUS.DOWNLOAD_WARN, "Already downloaded " + url.toExternalForm());
|
2018-02-26 00:10:30 +01:00
|
|
|
alreadyDownloadedUrls += 1;
|
2017-11-07 21:23:41 +01:00
|
|
|
return false;
|
|
|
|
}
|
2017-11-05 20:35:15 +01:00
|
|
|
}
|
2014-04-18 07:11:37 +02:00
|
|
|
try {
|
|
|
|
stopCheck();
|
|
|
|
} catch (IOException e) {
|
2015-02-10 08:29:29 +01:00
|
|
|
logger.debug("Ripper has been stopped");
|
2014-10-15 14:02:36 +02:00
|
|
|
return false;
|
2014-04-18 07:11:37 +02:00
|
|
|
}
|
2018-03-12 17:40:13 +01:00
|
|
|
logger.debug("url: " + url + ", prefix: " + prefix + ", subdirectory" + subdirectory + ", referrer: " + referrer + ", cookies: " + cookies + ", fileName: " + fileName);
|
2018-04-23 21:58:01 +02:00
|
|
|
String saveAs = getFileName(url, fileName, extension);
|
2014-02-27 04:54:44 +01:00
|
|
|
File saveFileAs;
|
|
|
|
try {
|
2014-02-28 12:04:03 +01:00
|
|
|
if (!subdirectory.equals("")) {
|
|
|
|
subdirectory = File.separator + subdirectory;
|
|
|
|
}
|
2017-11-17 11:37:02 +01:00
|
|
|
prefix = Utils.filesystemSanitized(prefix);
|
2014-02-28 12:04:03 +01:00
|
|
|
saveFileAs = new File(
|
|
|
|
workingDir.getCanonicalPath()
|
|
|
|
+ subdirectory
|
|
|
|
+ File.separator
|
|
|
|
+ prefix
|
|
|
|
+ saveAs);
|
2014-02-27 04:54:44 +01:00
|
|
|
} catch (IOException e) {
|
2014-02-28 04:49:28 +01:00
|
|
|
logger.error("[!] Error creating save file path for URL '" + url + "':", e);
|
2014-10-15 14:02:36 +02:00
|
|
|
return false;
|
2014-02-27 04:54:44 +01:00
|
|
|
}
|
2014-02-27 10:28:23 +01:00
|
|
|
logger.debug("Downloading " + url + " to " + saveFileAs);
|
2014-02-28 12:04:03 +01:00
|
|
|
if (!saveFileAs.getParentFile().exists()) {
|
|
|
|
logger.info("[+] Creating directory: " + Utils.removeCWD(saveFileAs.getParent()));
|
|
|
|
saveFileAs.getParentFile().mkdirs();
|
|
|
|
}
|
2017-11-10 02:59:31 +01:00
|
|
|
if (Utils.getConfigBoolean("remember.url_history", true) && !isThisATest()) {
|
2017-11-07 21:23:41 +01:00
|
|
|
try {
|
|
|
|
writeDownloadedURL(url.toExternalForm() + "\n");
|
|
|
|
} catch (IOException e) {
|
2017-11-05 20:35:15 +01:00
|
|
|
logger.debug("Unable to write URL history file");
|
2017-11-07 21:23:41 +01:00
|
|
|
}
|
2017-11-05 20:35:15 +01:00
|
|
|
}
|
2018-05-05 11:36:00 +02:00
|
|
|
return addURLToDownload(url, saveFileAs, referrer, cookies, getFileExtFromMIME);
|
|
|
|
}
|
|
|
|
|
|
|
|
protected boolean addURLToDownload(URL url, String prefix, String subdirectory, String referrer, Map<String,String> cookies, String fileName, String extension) {
|
|
|
|
return addURLToDownload(url, prefix, subdirectory, referrer, cookies, fileName, extension, false);
|
2014-02-27 04:54:44 +01:00
|
|
|
}
|
2017-05-10 00:03:12 +02:00
|
|
|
|
2018-04-23 21:58:01 +02:00
|
|
|
protected boolean addURLToDownload(URL url, String prefix, String subdirectory, String referrer, Map<String, String> cookies, String fileName) {
|
|
|
|
return addURLToDownload(url, prefix, subdirectory, referrer, cookies, fileName, null);
|
|
|
|
}
|
|
|
|
|
2014-04-23 05:48:41 +02:00
|
|
|
/**
|
|
|
|
* Queues file to be downloaded and saved. With options.
|
|
|
|
* @param url
|
|
|
|
* URL to download.
|
|
|
|
* @param prefix
|
|
|
|
* Prefix to prepend to the saved filename.
|
|
|
|
* @param subdirectory
|
|
|
|
* Sub-directory of the working directory to save the images to.
|
2017-05-09 23:02:24 +02:00
|
|
|
* @return True on success, flase on failure.
|
2014-04-23 05:48:41 +02:00
|
|
|
*/
|
2017-10-24 16:33:28 +02:00
|
|
|
protected boolean addURLToDownload(URL url, String prefix, String subdirectory) {
|
2018-03-12 17:40:13 +01:00
|
|
|
return addURLToDownload(url, prefix, subdirectory, null, null, null);
|
|
|
|
}
|
|
|
|
|
|
|
|
protected boolean addURLToDownload(URL url, String prefix, String subdirectory, String referrer, Map<String, String> cookies) {
|
|
|
|
return addURLToDownload(url, prefix, subdirectory, referrer, cookies, null);
|
2014-04-23 05:48:41 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
/**
|
|
|
|
* Queues image to be downloaded and saved.
|
|
|
|
* Uses filename from URL (and 'prefix') to decide filename.
|
|
|
|
* @param url
|
|
|
|
* URL to download
|
|
|
|
* @param prefix
|
|
|
|
* Text to append to saved filename.
|
2017-05-09 23:02:24 +02:00
|
|
|
* @return True on success, flase on failure.
|
2014-04-23 05:48:41 +02:00
|
|
|
*/
|
2017-10-24 16:33:28 +02:00
|
|
|
protected boolean addURLToDownload(URL url, String prefix) {
|
2014-04-23 05:48:41 +02:00
|
|
|
// Use empty subdirectory
|
2014-10-15 14:02:36 +02:00
|
|
|
return addURLToDownload(url, prefix, "");
|
2014-04-23 05:48:41 +02:00
|
|
|
}
|
2018-03-12 17:40:13 +01:00
|
|
|
|
2018-04-23 21:58:01 +02:00
|
|
|
public static String getFileName(URL url, String fileName, String extension) {
|
|
|
|
String saveAs;
|
|
|
|
if (fileName != null) {
|
|
|
|
saveAs = fileName;
|
|
|
|
} else {
|
|
|
|
saveAs = url.toExternalForm();
|
|
|
|
saveAs = saveAs.substring(saveAs.lastIndexOf('/')+1);
|
|
|
|
}
|
|
|
|
if (extension == null) {
|
|
|
|
// Get the extension of the file
|
|
|
|
String[] lastBitOfURL = url.toExternalForm().split("/");
|
|
|
|
|
|
|
|
String[] lastBit = lastBitOfURL[lastBitOfURL.length - 1].split(".");
|
|
|
|
if (lastBit.length != 0) {
|
|
|
|
extension = lastBit[lastBit.length - 1];
|
|
|
|
saveAs = saveAs + "." + extension;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
if (saveAs.indexOf('?') >= 0) { saveAs = saveAs.substring(0, saveAs.indexOf('?')); }
|
|
|
|
if (saveAs.indexOf('#') >= 0) { saveAs = saveAs.substring(0, saveAs.indexOf('#')); }
|
|
|
|
if (saveAs.indexOf('&') >= 0) { saveAs = saveAs.substring(0, saveAs.indexOf('&')); }
|
|
|
|
if (saveAs.indexOf(':') >= 0) { saveAs = saveAs.substring(0, saveAs.indexOf(':')); }
|
|
|
|
if (extension != null) {
|
|
|
|
saveAs = saveAs + "." + extension;
|
|
|
|
}
|
|
|
|
return saveAs;
|
|
|
|
}
|
|
|
|
|
2018-03-12 17:40:13 +01:00
|
|
|
|
2014-03-09 09:20:22 +01:00
|
|
|
/**
|
|
|
|
* Waits for downloading threads to complete.
|
|
|
|
*/
|
2014-03-02 03:08:16 +01:00
|
|
|
protected void waitForThreads() {
|
2015-02-10 08:29:29 +01:00
|
|
|
logger.debug("Waiting for threads to finish");
|
2014-03-02 03:08:16 +01:00
|
|
|
completed = false;
|
|
|
|
threadPool.waitForThreads();
|
2014-03-10 01:12:10 +01:00
|
|
|
checkIfComplete();
|
2014-03-02 03:08:16 +01:00
|
|
|
}
|
2014-02-25 10:28:22 +01:00
|
|
|
|
2014-03-09 09:20:22 +01:00
|
|
|
/**
|
|
|
|
* Notifies observers that source is being retrieved.
|
|
|
|
* @param url
|
|
|
|
* URL being retrieved
|
|
|
|
*/
|
2015-12-22 16:47:58 +01:00
|
|
|
public void retrievingSource(String url) {
|
2014-03-01 11:13:32 +01:00
|
|
|
RipStatusMessage msg = new RipStatusMessage(STATUS.LOADING_RESOURCE, url);
|
2016-04-17 03:56:57 +02:00
|
|
|
if (observer != null) {
|
2017-05-10 00:35:43 +02:00
|
|
|
observer.update(this, msg);
|
2016-04-17 03:56:57 +02:00
|
|
|
}
|
2014-03-01 11:13:32 +01:00
|
|
|
}
|
|
|
|
|
2014-03-09 09:20:22 +01:00
|
|
|
/**
|
|
|
|
* Notifies observers that a file download has completed.
|
|
|
|
* @param url
|
|
|
|
* URL that was completed.
|
|
|
|
* @param saveAs
|
|
|
|
* Where the downloaded file is stored.
|
|
|
|
*/
|
2014-04-20 07:41:11 +02:00
|
|
|
public abstract void downloadCompleted(URL url, File saveAs);
|
2014-03-09 09:20:22 +01:00
|
|
|
/**
|
|
|
|
* Notifies observers that a file could not be downloaded (includes a reason).
|
|
|
|
* @param url
|
|
|
|
* @param reason
|
|
|
|
*/
|
2014-04-20 07:41:11 +02:00
|
|
|
public abstract void downloadErrored(URL url, String reason);
|
2014-03-09 09:20:22 +01:00
|
|
|
/**
|
|
|
|
* Notify observers that a download could not be completed,
|
|
|
|
* but was not technically an "error".
|
|
|
|
* @param url
|
2017-05-09 23:02:24 +02:00
|
|
|
* @param file
|
2014-03-09 09:20:22 +01:00
|
|
|
*/
|
2015-01-11 14:11:10 +01:00
|
|
|
public abstract void downloadExists(URL url, File file);
|
2014-03-08 21:22:49 +01:00
|
|
|
|
2015-01-11 09:23:43 +01:00
|
|
|
/**
|
|
|
|
* @return Number of files downloaded.
|
|
|
|
*/
|
2017-10-24 16:33:28 +02:00
|
|
|
int getCount() {
|
2015-01-11 09:23:43 +01:00
|
|
|
return 1;
|
|
|
|
}
|
|
|
|
|
2014-03-09 09:20:22 +01:00
|
|
|
/**
|
|
|
|
* Notifies observers and updates state if all files have been ripped.
|
|
|
|
*/
|
2017-10-24 16:33:28 +02:00
|
|
|
void checkIfComplete() {
|
2014-03-11 09:29:46 +01:00
|
|
|
if (observer == null) {
|
2015-02-10 08:29:29 +01:00
|
|
|
logger.debug("observer is null");
|
2014-03-11 09:29:46 +01:00
|
|
|
return;
|
|
|
|
}
|
2015-01-11 09:23:43 +01:00
|
|
|
|
2014-04-20 07:41:11 +02:00
|
|
|
if (!completed) {
|
2014-03-13 20:18:35 +01:00
|
|
|
completed = true;
|
|
|
|
logger.info(" Rip completed!");
|
2014-06-28 18:47:46 +02:00
|
|
|
|
2015-01-11 09:23:43 +01:00
|
|
|
RipStatusComplete rsc = new RipStatusComplete(workingDir, getCount());
|
|
|
|
RipStatusMessage msg = new RipStatusMessage(STATUS.RIP_COMPLETE, rsc);
|
2014-03-13 20:18:35 +01:00
|
|
|
observer.update(this, msg);
|
2015-01-11 09:23:43 +01:00
|
|
|
|
2014-06-12 08:00:08 +02:00
|
|
|
Logger rootLogger = Logger.getRootLogger();
|
|
|
|
FileAppender fa = (FileAppender) rootLogger.getAppender("FILE");
|
|
|
|
if (fa != null) {
|
2015-02-10 08:29:29 +01:00
|
|
|
logger.debug("Changing log file back to 'ripme.log'");
|
2014-06-12 08:00:08 +02:00
|
|
|
fa.setFile("ripme.log");
|
|
|
|
fa.activateOptions();
|
|
|
|
}
|
2014-06-28 18:47:46 +02:00
|
|
|
if (Utils.getConfigBoolean("urls_only.save", false)) {
|
|
|
|
String urlFile = this.workingDir + File.separator + "urls.txt";
|
|
|
|
try {
|
|
|
|
Desktop.getDesktop().open(new File(urlFile));
|
|
|
|
} catch (IOException e) {
|
|
|
|
logger.warn("Error while opening " + urlFile, e);
|
|
|
|
}
|
|
|
|
}
|
2014-03-01 11:13:32 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2017-12-28 06:04:23 +01:00
|
|
|
/**
|
|
|
|
* Gets URL
|
|
|
|
* @return
|
|
|
|
* Returns URL that wants to be downloaded.
|
|
|
|
*/
|
2014-02-25 10:28:22 +01:00
|
|
|
public URL getURL() {
|
|
|
|
return url;
|
|
|
|
}
|
2017-05-10 00:03:12 +02:00
|
|
|
|
2014-03-09 09:20:22 +01:00
|
|
|
/**
|
|
|
|
* @return
|
2017-05-10 00:03:12 +02:00
|
|
|
* Path to the directory in which all files
|
2014-03-09 09:20:22 +01:00
|
|
|
* ripped via this ripper will be stored.
|
|
|
|
*/
|
2014-03-03 09:44:07 +01:00
|
|
|
public File getWorkingDir() {
|
|
|
|
return workingDir;
|
|
|
|
}
|
2014-02-25 10:28:22 +01:00
|
|
|
|
2017-12-28 06:04:23 +01:00
|
|
|
@Override
|
2014-04-20 07:41:11 +02:00
|
|
|
public abstract void setWorkingDir(URL url) throws IOException;
|
2017-05-10 00:03:12 +02:00
|
|
|
|
2017-12-28 06:04:23 +01:00
|
|
|
/**
|
|
|
|
*
|
|
|
|
* @param url
|
|
|
|
* The URL you want to get the title of.
|
|
|
|
* @return
|
|
|
|
* host_URLid
|
|
|
|
* e.g. (for a reddit post)
|
|
|
|
* reddit_post_7mg2ur
|
|
|
|
* @throws MalformedURLException
|
|
|
|
* If any of those damned URLs gets malformed.
|
|
|
|
*/
|
2014-04-11 08:42:14 +02:00
|
|
|
public String getAlbumTitle(URL url) throws MalformedURLException {
|
|
|
|
return getHost() + "_" + getGID(url);
|
|
|
|
}
|
2014-03-01 11:13:32 +01:00
|
|
|
|
2014-02-27 10:28:23 +01:00
|
|
|
/**
|
|
|
|
* Finds, instantiates, and returns a compatible ripper for given URL.
|
|
|
|
* @param url
|
|
|
|
* URL to rip.
|
|
|
|
* @return
|
|
|
|
* Instantiated ripper ready to rip given URL.
|
|
|
|
* @throws Exception
|
|
|
|
* If no compatible rippers can be found.
|
|
|
|
*/
|
|
|
|
public static AbstractRipper getRipper(URL url) throws Exception {
|
2014-04-20 07:41:11 +02:00
|
|
|
for (Constructor<?> constructor : getRipperConstructors("com.rarchives.ripme.ripper.rippers")) {
|
2014-03-02 04:37:09 +01:00
|
|
|
try {
|
2017-05-23 19:38:08 +02:00
|
|
|
AlbumRipper ripper = (AlbumRipper) constructor.newInstance(url); // by design: can throw ClassCastException
|
2015-02-10 08:29:29 +01:00
|
|
|
logger.debug("Found album ripper: " + ripper.getClass().getName());
|
2014-04-20 07:41:11 +02:00
|
|
|
return ripper;
|
2017-05-23 19:38:08 +02:00
|
|
|
} catch (Exception e) {
|
2014-04-20 07:41:11 +02:00
|
|
|
// Incompatible rippers *will* throw exceptions during instantiation.
|
|
|
|
}
|
|
|
|
}
|
|
|
|
for (Constructor<?> constructor : getRipperConstructors("com.rarchives.ripme.ripper.rippers.video")) {
|
|
|
|
try {
|
2017-05-23 19:38:08 +02:00
|
|
|
VideoRipper ripper = (VideoRipper) constructor.newInstance(url); // by design: can throw ClassCastException
|
2015-02-10 08:29:29 +01:00
|
|
|
logger.debug("Found video ripper: " + ripper.getClass().getName());
|
2014-03-02 04:37:09 +01:00
|
|
|
return ripper;
|
2017-05-23 19:38:08 +02:00
|
|
|
} catch (Exception e) {
|
2014-03-02 04:37:09 +01:00
|
|
|
// Incompatible rippers *will* throw exceptions during instantiation.
|
|
|
|
}
|
|
|
|
}
|
2014-02-27 10:28:23 +01:00
|
|
|
throw new Exception("No compatible ripper found");
|
|
|
|
}
|
2014-03-04 14:35:35 +01:00
|
|
|
|
2014-03-09 09:20:22 +01:00
|
|
|
/**
|
2017-05-09 23:02:24 +02:00
|
|
|
* @param pkg
|
|
|
|
* The package name.
|
2014-03-09 09:20:22 +01:00
|
|
|
* @return
|
|
|
|
* List of constructors for all eligible Rippers.
|
|
|
|
* @throws Exception
|
|
|
|
*/
|
2014-04-27 19:09:52 +02:00
|
|
|
public static List<Constructor<?>> getRipperConstructors(String pkg) throws Exception {
|
2017-10-24 16:33:28 +02:00
|
|
|
List<Constructor<?>> constructors = new ArrayList<>();
|
2014-04-20 07:41:11 +02:00
|
|
|
for (Class<?> clazz : Utils.getClassesForPackage(pkg)) {
|
2014-03-03 11:29:02 +01:00
|
|
|
if (AbstractRipper.class.isAssignableFrom(clazz)) {
|
2017-10-24 16:33:28 +02:00
|
|
|
constructors.add(clazz.getConstructor(URL.class));
|
2014-03-02 04:37:09 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
return constructors;
|
|
|
|
}
|
2014-03-01 11:13:32 +01:00
|
|
|
|
2014-03-09 09:20:22 +01:00
|
|
|
/**
|
|
|
|
* Sends an update message to the relevant observer(s) on this ripper.
|
2017-12-28 06:04:23 +01:00
|
|
|
* @param status
|
2014-03-09 09:20:22 +01:00
|
|
|
* @param message
|
|
|
|
*/
|
2014-03-01 11:13:32 +01:00
|
|
|
public void sendUpdate(STATUS status, Object message) {
|
2014-03-05 13:55:51 +01:00
|
|
|
if (observer == null) {
|
2014-03-02 04:37:09 +01:00
|
|
|
return;
|
|
|
|
}
|
2014-03-13 20:18:35 +01:00
|
|
|
observer.update(this, new RipStatusMessage(status, message));
|
2014-03-01 11:13:32 +01:00
|
|
|
}
|
2017-12-28 06:04:23 +01:00
|
|
|
|
|
|
|
/**
|
|
|
|
* Get the completion percentage.
|
|
|
|
* @return
|
|
|
|
* Percentage complete
|
|
|
|
*/
|
2014-04-20 07:41:11 +02:00
|
|
|
public abstract int getCompletionPercentage();
|
2017-12-28 06:04:23 +01:00
|
|
|
/**
|
|
|
|
* @return
|
|
|
|
* Text for status
|
|
|
|
*/
|
2014-04-20 07:41:11 +02:00
|
|
|
public abstract String getStatusText();
|
2014-03-01 11:13:32 +01:00
|
|
|
|
2014-03-09 09:20:22 +01:00
|
|
|
/**
|
|
|
|
* Rips the album when the thread is invoked.
|
|
|
|
*/
|
2014-03-01 11:43:47 +01:00
|
|
|
public void run() {
|
|
|
|
try {
|
|
|
|
rip();
|
2014-05-05 07:40:53 +02:00
|
|
|
} catch (HttpStatusException e) {
|
|
|
|
logger.error("Got exception while running ripper:", e);
|
2014-05-05 07:49:32 +02:00
|
|
|
waitForThreads();
|
2015-01-11 14:39:36 +01:00
|
|
|
sendUpdate(STATUS.RIP_ERRORED, "HTTP status code " + e.getStatusCode() + " for URL " + e.getUrl());
|
|
|
|
} catch (Exception e) {
|
|
|
|
logger.error("Got exception while running ripper:", e);
|
|
|
|
waitForThreads();
|
2014-05-05 07:40:53 +02:00
|
|
|
sendUpdate(STATUS.RIP_ERRORED, e.getMessage());
|
|
|
|
} finally {
|
2014-06-08 04:54:36 +02:00
|
|
|
cleanup();
|
|
|
|
}
|
|
|
|
}
|
2017-12-28 06:04:23 +01:00
|
|
|
/**
|
|
|
|
* Tries to delete any empty directories
|
|
|
|
*/
|
2017-10-24 16:33:28 +02:00
|
|
|
private void cleanup() {
|
2014-06-08 04:54:36 +02:00
|
|
|
if (this.workingDir.list().length == 0) {
|
|
|
|
// No files, delete the dir
|
|
|
|
logger.info("Deleting empty directory " + this.workingDir);
|
|
|
|
boolean deleteResult = this.workingDir.delete();
|
|
|
|
if (!deleteResult) {
|
|
|
|
logger.error("Unable to delete empty directory " + this.workingDir);
|
2014-05-05 07:40:53 +02:00
|
|
|
}
|
2014-03-01 11:43:47 +01:00
|
|
|
}
|
|
|
|
}
|
2017-12-28 06:04:23 +01:00
|
|
|
|
|
|
|
/**
|
|
|
|
* Pauses thread for a set amount of time.
|
|
|
|
* @param milliseconds
|
|
|
|
* Amount of time (in milliseconds) that the thread gets paused for
|
|
|
|
* @return
|
|
|
|
* True if paused successfully
|
|
|
|
* False if failed to pause/got interrupted.
|
|
|
|
*/
|
2017-10-24 16:33:28 +02:00
|
|
|
protected boolean sleep(int milliseconds) {
|
2014-06-22 02:08:42 +02:00
|
|
|
try {
|
2015-02-10 08:29:29 +01:00
|
|
|
logger.debug("Sleeping " + milliseconds + "ms");
|
2014-06-22 02:08:42 +02:00
|
|
|
Thread.sleep(milliseconds);
|
|
|
|
return true;
|
|
|
|
} catch (InterruptedException e) {
|
|
|
|
logger.error("Interrupted while waiting to load next page", e);
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
}
|
2014-03-08 21:22:49 +01:00
|
|
|
|
2014-04-20 07:41:11 +02:00
|
|
|
public void setBytesTotal(int bytes) {
|
|
|
|
// Do nothing
|
|
|
|
}
|
|
|
|
public void setBytesCompleted(int bytes) {
|
|
|
|
// Do nothing
|
|
|
|
}
|
2014-06-20 13:09:36 +02:00
|
|
|
|
2015-02-06 08:58:17 +01:00
|
|
|
/** Methods for detecting when we're running a test. */
|
|
|
|
public void markAsTest() {
|
2015-02-10 08:29:29 +01:00
|
|
|
logger.debug("THIS IS A TEST RIP");
|
2015-02-06 08:58:17 +01:00
|
|
|
thisIsATest = true;
|
|
|
|
}
|
2017-10-24 16:33:28 +02:00
|
|
|
protected boolean isThisATest() {
|
2015-02-06 08:58:17 +01:00
|
|
|
return thisIsATest;
|
|
|
|
}
|
2017-05-10 00:03:12 +02:00
|
|
|
}
|