ImgScroll/src/main/java/com/rarchives/ripme/ripper/AbstractRipper.java

464 lines
15 KiB
Java
Raw Normal View History

package com.rarchives.ripme.ripper;
import java.awt.Desktop;
import java.io.*;
import java.lang.reflect.Constructor;
import java.net.MalformedURLException;
import java.net.URL;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.Observable;
import org.apache.log4j.FileAppender;
import org.apache.log4j.Logger;
import org.jsoup.HttpStatusException;
2015-01-11 09:23:43 +01:00
import com.rarchives.ripme.ui.RipStatusComplete;
import com.rarchives.ripme.ui.RipStatusHandler;
import com.rarchives.ripme.ui.RipStatusMessage;
import com.rarchives.ripme.ui.RipStatusMessage.STATUS;
import com.rarchives.ripme.utils.Utils;
import java.io.File;
import java.util.Scanner;
2017-05-10 00:03:12 +02:00
public abstract class AbstractRipper
extends Observable
implements RipperInterface, Runnable {
protected static final Logger logger = Logger.getLogger(AbstractRipper.class);
private final String URLHistoryFile = Utils.getURLHistoryFile();
2017-05-10 00:03:12 +02:00
public static final String USER_AGENT =
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10.7; rv:36.0) Gecko/20100101 Firefox/36.0";
2014-03-09 08:59:36 +01:00
protected URL url;
protected File workingDir;
DownloadThreadPool threadPool;
RipStatusHandler observer = null;
private boolean completed = true;
public abstract void rip() throws IOException;
public abstract String getHost();
public abstract String getGID(URL url) throws MalformedURLException;
2014-04-18 07:11:37 +02:00
private boolean shouldStop = false;
private boolean thisIsATest = false;
2014-04-18 07:11:37 +02:00
public void stop() {
shouldStop = true;
}
public boolean isStopped() {
return shouldStop;
}
protected void stopCheck() throws IOException {
if (shouldStop) {
throw new IOException("Ripping interrupted");
}
}
private void writeDownloadedURL(String downloadedURL) throws IOException {
BufferedWriter bw = null;
FileWriter fw = null;
try {
File file = new File(URLHistoryFile);
// if file doesnt exists, then create it
if (!file.exists()) {
file.createNewFile();
}
fw = new FileWriter(file.getAbsoluteFile(), true);
bw = new BufferedWriter(fw);
bw.write(downloadedURL);
} catch (IOException e) {
e.printStackTrace();
} finally {
try {
if (bw != null)
bw.close();
if (fw != null)
fw.close();
} catch (IOException ex) {
ex.printStackTrace();
}
}
}
private boolean hasDownloadedURL(String url) {
File file = new File(URLHistoryFile);
try {
Scanner scanner = new Scanner(file);
while (scanner.hasNextLine()) {
final String lineFromFile = scanner.nextLine();
if (lineFromFile.equals(url)) {
return true;
}
}
} catch (FileNotFoundException e) {
return false;
}
return false;
}
/**
* Ensures inheriting ripper can rip this URL, raises exception if not.
* Otherwise initializes working directory and thread pool.
2017-05-10 00:03:12 +02:00
*
* @param url
* URL to rip.
* @throws IOException
* If anything goes wrong.
*/
public AbstractRipper(URL url) throws IOException {
if (!canRip(url)) {
throw new MalformedURLException("Unable to rip url: " + url);
}
this.url = sanitizeURL(url);
}
public void setup() throws IOException {
setWorkingDir(this.url);
Logger rootLogger = Logger.getRootLogger();
FileAppender fa = (FileAppender) rootLogger.getAppender("FILE");
if (fa != null) {
fa.setFile(this.workingDir + File.separator + "log.txt");
fa.activateOptions();
}
this.threadPool = new DownloadThreadPool();
}
2014-03-09 09:20:22 +01:00
public void setObserver(RipStatusHandler obs) {
this.observer = obs;
}
/**
* Queues image to be downloaded and saved.
* @param url
* URL of the file
* @param saveAs
* Path of the local file to save the content to.
* @return True on success, flase on failure.
*/
2014-10-15 14:02:36 +02:00
public abstract boolean addURLToDownload(URL url, File saveAs);
/**
* Queues image to be downloaded and saved.
* @param url
* URL of the file
* @param saveAs
* Path of the local file to save the content to.
* @param referrer
* The HTTP referrer to use while downloading this file.
* @param cookies
* The cookies to send to the server while downloading this file.
* @return
*/
protected abstract boolean addURLToDownload(URL url, File saveAs, String referrer, Map<String, String> cookies);
protected boolean addURLToDownload(URL url, String prefix, String subdirectory, String referrer, Map<String, String> cookies) {
if (Utils.getConfigBoolean("remember.url_history", true)) {
if (hasDownloadedURL(url.toExternalForm())) {
sendUpdate(STATUS.DOWNLOAD_WARN, "Already downloaded " + url.toExternalForm());
return false;
}
}
2014-04-18 07:11:37 +02:00
try {
stopCheck();
} catch (IOException e) {
logger.debug("Ripper has been stopped");
2014-10-15 14:02:36 +02:00
return false;
2014-04-18 07:11:37 +02:00
}
logger.debug("url: " + url + ", prefix: " + prefix + ", subdirectory" + subdirectory + ", referrer: " + referrer + ", cookies: " + cookies);
String saveAs = url.toExternalForm();
saveAs = saveAs.substring(saveAs.lastIndexOf('/')+1);
if (saveAs.indexOf('?') >= 0) { saveAs = saveAs.substring(0, saveAs.indexOf('?')); }
if (saveAs.indexOf('#') >= 0) { saveAs = saveAs.substring(0, saveAs.indexOf('#')); }
if (saveAs.indexOf('&') >= 0) { saveAs = saveAs.substring(0, saveAs.indexOf('&')); }
2014-03-04 14:35:35 +01:00
if (saveAs.indexOf(':') >= 0) { saveAs = saveAs.substring(0, saveAs.indexOf(':')); }
File saveFileAs;
try {
if (!subdirectory.equals("")) {
subdirectory = File.separator + subdirectory;
}
saveFileAs = new File(
workingDir.getCanonicalPath()
+ subdirectory
+ File.separator
+ prefix
+ saveAs);
} catch (IOException e) {
2014-02-28 04:49:28 +01:00
logger.error("[!] Error creating save file path for URL '" + url + "':", e);
2014-10-15 14:02:36 +02:00
return false;
}
logger.debug("Downloading " + url + " to " + saveFileAs);
if (!saveFileAs.getParentFile().exists()) {
logger.info("[+] Creating directory: " + Utils.removeCWD(saveFileAs.getParent()));
saveFileAs.getParentFile().mkdirs();
}
if (Utils.getConfigBoolean("remember.url_history", true)) {
try {
writeDownloadedURL(url.toExternalForm() + "\n");
} catch (IOException e) {
logger.debug("Unable to write URL history file");
}
}
2014-10-15 14:02:36 +02:00
return addURLToDownload(url, saveFileAs, referrer, cookies);
}
2017-05-10 00:03:12 +02:00
/**
* Queues file to be downloaded and saved. With options.
* @param url
* URL to download.
* @param prefix
* Prefix to prepend to the saved filename.
* @param subdirectory
* Sub-directory of the working directory to save the images to.
* @return True on success, flase on failure.
*/
protected boolean addURLToDownload(URL url, String prefix, String subdirectory) {
2014-10-15 14:02:36 +02:00
return addURLToDownload(url, prefix, subdirectory, null, null);
}
/**
* Queues image to be downloaded and saved.
* Uses filename from URL (and 'prefix') to decide filename.
* @param url
* URL to download
* @param prefix
* Text to append to saved filename.
* @return True on success, flase on failure.
*/
protected boolean addURLToDownload(URL url, String prefix) {
// Use empty subdirectory
2014-10-15 14:02:36 +02:00
return addURLToDownload(url, prefix, "");
}
2014-03-09 09:20:22 +01:00
/**
* Waits for downloading threads to complete.
*/
protected void waitForThreads() {
logger.debug("Waiting for threads to finish");
completed = false;
threadPool.waitForThreads();
checkIfComplete();
}
2014-03-09 09:20:22 +01:00
/**
* Notifies observers that source is being retrieved.
* @param url
* URL being retrieved
*/
public void retrievingSource(String url) {
RipStatusMessage msg = new RipStatusMessage(STATUS.LOADING_RESOURCE, url);
if (observer != null) {
observer.update(this, msg);
}
}
2014-03-09 09:20:22 +01:00
/**
* Notifies observers that a file download has completed.
* @param url
* URL that was completed.
* @param saveAs
* Where the downloaded file is stored.
*/
public abstract void downloadCompleted(URL url, File saveAs);
2014-03-09 09:20:22 +01:00
/**
* Notifies observers that a file could not be downloaded (includes a reason).
* @param url
* @param reason
*/
public abstract void downloadErrored(URL url, String reason);
2014-03-09 09:20:22 +01:00
/**
* Notify observers that a download could not be completed,
* but was not technically an "error".
* @param url
* @param file
2014-03-09 09:20:22 +01:00
*/
public abstract void downloadExists(URL url, File file);
2015-01-11 09:23:43 +01:00
/**
* @return Number of files downloaded.
*/
int getCount() {
2015-01-11 09:23:43 +01:00
return 1;
}
2014-03-09 09:20:22 +01:00
/**
* Notifies observers and updates state if all files have been ripped.
*/
void checkIfComplete() {
2014-03-11 09:29:46 +01:00
if (observer == null) {
logger.debug("observer is null");
2014-03-11 09:29:46 +01:00
return;
}
2015-01-11 09:23:43 +01:00
if (!completed) {
completed = true;
logger.info(" Rip completed!");
2015-01-11 09:23:43 +01:00
RipStatusComplete rsc = new RipStatusComplete(workingDir, getCount());
RipStatusMessage msg = new RipStatusMessage(STATUS.RIP_COMPLETE, rsc);
observer.update(this, msg);
2015-01-11 09:23:43 +01:00
2014-06-12 08:00:08 +02:00
Logger rootLogger = Logger.getRootLogger();
FileAppender fa = (FileAppender) rootLogger.getAppender("FILE");
if (fa != null) {
logger.debug("Changing log file back to 'ripme.log'");
2014-06-12 08:00:08 +02:00
fa.setFile("ripme.log");
fa.activateOptions();
}
if (Utils.getConfigBoolean("urls_only.save", false)) {
String urlFile = this.workingDir + File.separator + "urls.txt";
try {
Desktop.getDesktop().open(new File(urlFile));
} catch (IOException e) {
logger.warn("Error while opening " + urlFile, e);
}
}
}
}
public URL getURL() {
return url;
}
2017-05-10 00:03:12 +02:00
2014-03-09 09:20:22 +01:00
/**
* @return
2017-05-10 00:03:12 +02:00
* Path to the directory in which all files
2014-03-09 09:20:22 +01:00
* ripped via this ripper will be stored.
*/
public File getWorkingDir() {
return workingDir;
}
public abstract void setWorkingDir(URL url) throws IOException;
2017-05-10 00:03:12 +02:00
public String getAlbumTitle(URL url) throws MalformedURLException {
return getHost() + "_" + getGID(url);
}
/**
* Finds, instantiates, and returns a compatible ripper for given URL.
* @param url
* URL to rip.
* @return
* Instantiated ripper ready to rip given URL.
* @throws Exception
* If no compatible rippers can be found.
*/
public static AbstractRipper getRipper(URL url) throws Exception {
for (Constructor<?> constructor : getRipperConstructors("com.rarchives.ripme.ripper.rippers")) {
try {
AlbumRipper ripper = (AlbumRipper) constructor.newInstance(url); // by design: can throw ClassCastException
logger.debug("Found album ripper: " + ripper.getClass().getName());
return ripper;
} catch (Exception e) {
// Incompatible rippers *will* throw exceptions during instantiation.
}
}
for (Constructor<?> constructor : getRipperConstructors("com.rarchives.ripme.ripper.rippers.video")) {
try {
VideoRipper ripper = (VideoRipper) constructor.newInstance(url); // by design: can throw ClassCastException
logger.debug("Found video ripper: " + ripper.getClass().getName());
return ripper;
} catch (Exception e) {
// Incompatible rippers *will* throw exceptions during instantiation.
}
}
throw new Exception("No compatible ripper found");
}
2014-03-04 14:35:35 +01:00
2014-03-09 09:20:22 +01:00
/**
* @param pkg
* The package name.
2014-03-09 09:20:22 +01:00
* @return
* List of constructors for all eligible Rippers.
* @throws Exception
*/
2014-04-27 19:09:52 +02:00
public static List<Constructor<?>> getRipperConstructors(String pkg) throws Exception {
List<Constructor<?>> constructors = new ArrayList<>();
for (Class<?> clazz : Utils.getClassesForPackage(pkg)) {
if (AbstractRipper.class.isAssignableFrom(clazz)) {
constructors.add(clazz.getConstructor(URL.class));
}
}
return constructors;
}
2014-03-09 09:20:22 +01:00
/**
* Sends an update message to the relevant observer(s) on this ripper.
* @param status
* @param message
*/
public void sendUpdate(STATUS status, Object message) {
2014-03-05 13:55:51 +01:00
if (observer == null) {
return;
}
observer.update(this, new RipStatusMessage(status, message));
}
2017-05-10 00:03:12 +02:00
public abstract int getCompletionPercentage();
2017-05-10 00:03:12 +02:00
public abstract String getStatusText();
2014-03-09 09:20:22 +01:00
/**
* Rips the album when the thread is invoked.
*/
public void run() {
try {
rip();
} catch (HttpStatusException e) {
logger.error("Got exception while running ripper:", e);
waitForThreads();
sendUpdate(STATUS.RIP_ERRORED, "HTTP status code " + e.getStatusCode() + " for URL " + e.getUrl());
} catch (Exception e) {
logger.error("Got exception while running ripper:", e);
waitForThreads();
sendUpdate(STATUS.RIP_ERRORED, e.getMessage());
} finally {
cleanup();
}
}
2017-05-10 00:03:12 +02:00
private void cleanup() {
if (this.workingDir.list().length == 0) {
// No files, delete the dir
logger.info("Deleting empty directory " + this.workingDir);
boolean deleteResult = this.workingDir.delete();
if (!deleteResult) {
logger.error("Unable to delete empty directory " + this.workingDir);
}
}
}
2017-05-10 00:03:12 +02:00
protected boolean sleep(int milliseconds) {
try {
logger.debug("Sleeping " + milliseconds + "ms");
Thread.sleep(milliseconds);
return true;
} catch (InterruptedException e) {
logger.error("Interrupted while waiting to load next page", e);
return false;
}
}
public void setBytesTotal(int bytes) {
// Do nothing
}
public void setBytesCompleted(int bytes) {
// Do nothing
}
/** Methods for detecting when we're running a test. */
public void markAsTest() {
logger.debug("THIS IS A TEST RIP");
thisIsATest = true;
}
protected boolean isThisATest() {
return thisIsATest;
}
2017-05-10 00:03:12 +02:00
}