diff --git a/.github/ISSUE_TEMPLATE.md b/.github/ISSUE_TEMPLATE.md index 238b6c54..06283ebf 100644 --- a/.github/ISSUE_TEMPLATE.md +++ b/.github/ISSUE_TEMPLATE.md @@ -1,6 +1,8 @@ * Ripme version: * Java version: * Operating system: + * Exact URL you were trying to rip when the problem occurred: * Please include any additional information about how to reproduce the problem: diff --git a/README.md b/README.md index 62ec5aaa..5861aef6 100644 --- a/README.md +++ b/README.md @@ -24,7 +24,7 @@ RipMe is an album ripper for various websites. Runs on your computer. Requires J Download `ripme.jar` from the [latest release](https://github.com/ripmeapp/ripme/releases). -**Note: If you're currently using version 1.2.x or 1.3.x, you will not automatically get updates to the newest versions. We recommend downloading the latest version from the link above.** +**Note: If you're currently using version 1.2.x, 1.3.x or 1.7.49, you will not automatically get updates to the newest versions. We recommend downloading the latest version from the link above.** For information about running the `.jar` file, see [the How To Run wiki](https://github.com/ripmeapp/ripme/wiki/How-To-Run-RipMe). diff --git a/patch.py b/patch.py index b836f72b..ca63cbf3 100644 --- a/patch.py +++ b/patch.py @@ -1,19 +1,40 @@ import json import subprocess +from hashlib import sha256 # This script will: # - read current version # - increment patch version # - update version in a few places # - insert new line in ripme.json with message +# - build ripme +# - add the hash of the lastest binary to ripme.json message = input('message: ') -with open('ripme.json') as dataFile: - ripmeJson = json.load(dataFile) -currentVersion = ripmeJson["latestVersion"] +def get_ripme_json(): + with open('ripme.json') as dataFile: + ripmeJson = json.load(dataFile) + return ripmeJson -print ('Current version ' + currentVersion) +def update_hash(current_hash): + ripmeJson = get_ripme_json() + with open('ripme.json', 'w') as dataFile: + ripmeJson["currentHash"] = current_hash + print(ripmeJson["currentHash"]) + json.dump(ripmeJson, dataFile, indent=4) + +def update_change_list(message): + ripmeJson = get_ripme_json() + with open('ripme.json', 'w') as dataFile: + ripmeJson["changeList"] = ripmeJson["changeList"].insert(0, message) + print(ripmeJson["currentHash"]) + json.dump(ripmeJson, dataFile, indent=4) + + +currentVersion = get_ripme_json()["latestVersion"] + +print('Current version ' + currentVersion) versionFields = currentVersion.split('.') patchCur = int(versionFields[2]) @@ -22,14 +43,14 @@ majorMinor = versionFields[:2] majorMinor.append(str(patchNext)) nextVersion = '.'.join(majorMinor) -print ('Updating to ' + nextVersion) +print('Updating to ' + nextVersion) substrExpr = 's/' + currentVersion + '/' + nextVersion + '/' subprocess.call(['sed', '-i', '-e', substrExpr, 'src/main/java/com/rarchives/ripme/ui/UpdateUtils.java']) subprocess.call(['git', 'grep', 'DEFAULT_VERSION.*' + nextVersion, 'src/main/java/com/rarchives/ripme/ui/UpdateUtils.java']) -substrExpr = 's/\\\"latestVersion\\\": \\\"' + currentVersion + '\\\"/\\\"latestVersion\\\": \\\"' +\ +substrExpr = 's/\\\"latestVersion\\\": \\\"' + currentVersion + '\\\"/\\\"latestVersion\\\": \\\"' + \ nextVersion + '\\\"/' subprocess.call(['sed', '-i', '-e', substrExpr, 'ripme.json']) subprocess.call(['git', 'grep', 'latestVersion', 'ripme.json']) @@ -54,3 +75,12 @@ dataFile.close() subprocess.call(['git', 'add', '-u']) subprocess.call(['git', 'commit', '-m', commitMessage]) subprocess.call(['git', 'tag', nextVersion]) +print("Building ripme") +subprocess.call(["mvn", "clean", "compile", "assembly:single"]) +print("Hashing .jar file") +openedFile = open("./target/ripme-{}-jar-with-dependencies.jar".format(nextVersion), "rb") +readFile = openedFile.read() +file_hash = sha256(readFile).hexdigest() +print("Hash is: {}".format(file_hash)) +print("Updating hash") +update_hash(file_hash) \ No newline at end of file diff --git a/pom.xml b/pom.xml index 317b816e..c94f1d56 100644 --- a/pom.xml +++ b/pom.xml @@ -4,7 +4,7 @@ com.rarchives.ripme ripme jar - 1.7.45 + 1.7.50 ripme http://rip.rarchives.com diff --git a/ripme.json b/ripme.json index 73b9b240..b2a8e044 100644 --- a/ripme.json +++ b/ripme.json @@ -1,6 +1,11 @@ { - "latestVersion": "1.7.45", + "currentHash": "f6e1e6c931abfbeffdd37dabb65f83e4335ca11ccc017f31e1d835ee6e6bec7a", "changeList": [ + "1.7.50: Ripme now checks file hash before running update; fixed update bug which cased ripme to report every update as new", + "1.7.49: Fixed -n flag; Added ability to change locale at runtime and from gui; Update kr_KR translation; Removed support for tnbtu.com; No longer writes url to url_history file is save urls only is checked", + "1.7.48: Fixed instagram ripper; Added Korean translation; Added quickQueue support to nhentairipper; Rewrote nhentairipper to be faster; myhentaicomics ripper now requests proper url when downloading images; Can now include comments in url files; Added the ability to blacklist tags on e-hentai.org", + "1.7.47: Added quick queue support for hentai2read ripper; Fixed instagram ripper; SankakuComplexRipper can now download from different subdomains; Added ripper for bato.to; Added quick queue support for 8muses.download; ", + "1.7.46: Fixed hentai2read ripper; Rewrote the myhentaicomics ripper to use the new getAlbumsToQueue func; Can now blacklist nhentai tags; SinnercomicsRipper no longer adds -page-01 to folder names; EightmusesRipper now adds file extension to filename; disbaled test for twitch ripper", "1.7.45: Fixed hentai2read ripper; ImageBam album fixed; Added various translations; TsuminoRipper no longer requires album name to download", "1.7.44: Fixed instagram ripper regex", "1.7.43: Fixed queryId regex in instagram ripper", @@ -216,5 +221,6 @@ "1.0.4: Fixed spaces-in-directory bug", "1.0.3: Added VK.com ripper", "1.0.1: Added auto-update functionality" - ] -} + ], + "latestVersion": "1.7.50" +} \ No newline at end of file diff --git a/src/main/java/com/rarchives/ripme/App.java b/src/main/java/com/rarchives/ripme/App.java index 6f650d56..81dafce5 100644 --- a/src/main/java/com/rarchives/ripme/App.java +++ b/src/main/java/com/rarchives/ripme/App.java @@ -33,13 +33,22 @@ import com.rarchives.ripme.utils.Utils; /** * Entry point to application. + * This is where all the fun happens, with the main method. * Decides to display UI or to run silently via command-line. + * + * As the "controller" to all other classes, it parses command line parameters and loads the history. */ public class App { public static final Logger logger = Logger.getLogger(App.class); private static final History HISTORY = new History(); + /** + * Where everything starts. Takes in, and tries to parse as many commandline arguments as possible. + * Otherwise, it launches a GUI. + * + * @param args Array of command line arguments. + */ public static void main(String[] args) throws MalformedURLException { CommandLine cl = getArgs(args); @@ -74,7 +83,8 @@ public class App { /** * Creates an abstract ripper and instructs it to rip. * @param url URL to be ripped - * @throws Exception + * @throws Exception Nothing too specific here, just a catch-all. + * */ private static void rip(URL url) throws Exception { AbstractRipper ripper = AbstractRipper.getRipper(url); @@ -89,6 +99,7 @@ public class App { private static void handleArguments(String[] args) { CommandLine cl = getArgs(args); + //Help (list commands) if (cl.hasOption('h') || args.length == 0) { HelpFormatter hf = new HelpFormatter(); hf.printHelp("java -jar ripme.jar [OPTIONS]", getOptions()); @@ -98,28 +109,34 @@ public class App { Utils.configureLogger(); logger.info("Initialized ripme v" + UpdateUtils.getThisJarVersion()); + //Allow file overwriting if (cl.hasOption('w')) { Utils.setConfigBoolean("file.overwrite", true); } + //SOCKS proxy server if (cl.hasOption('s')) { String sservfull = cl.getOptionValue('s').trim(); Proxy.setSocks(sservfull); } + //HTTP proxy server if (cl.hasOption('p')) { String proxyserverfull = cl.getOptionValue('p').trim(); Proxy.setHTTPProxy(proxyserverfull); } + //Number of threads if (cl.hasOption('t')) { Utils.setConfigInteger("threads.size", Integer.parseInt(cl.getOptionValue('t'))); } + //Ignore 404 if (cl.hasOption('4')) { Utils.setConfigBoolean("errors.skip404", true); } + //Re-rip all previous albums if (cl.hasOption('r')) { // Re-rip all via command-line List history = Utils.getConfigList("download.history"); @@ -142,6 +159,7 @@ public class App { System.exit(0); } + //Re-rip all selected albums if (cl.hasOption('R')) { loadHistory(); if (HISTORY.toList().isEmpty()) { @@ -174,32 +192,41 @@ public class App { } } + //Save the order of images in album if (cl.hasOption('d')) { Utils.setConfigBoolean("download.save_order", true); } + //Don't save the order of images in album if (cl.hasOption('D')) { Utils.setConfigBoolean("download.save_order", false); } + //In case specify both, break and exit since it isn't possible. if ((cl.hasOption('d'))&&(cl.hasOption('D'))) { logger.error("\nCannot specify '-d' and '-D' simultaneously"); System.exit(-1); } + //Destination directory if (cl.hasOption('l')) { // change the default rips directory Utils.setConfigString("rips.directory", cl.getOptionValue('l')); } + //Read URLs from File if (cl.hasOption('f')) { String filename = cl.getOptionValue('f'); - try { + + try (BufferedReader br = new BufferedReader(new FileReader(filename))) { String url; - BufferedReader br = new BufferedReader(new FileReader(filename)); while ((url = br.readLine()) != null) { - // loop through each url in the file and proces each url individually. - ripURL(url.trim(), cl.hasOption("n")); + if (url.startsWith("//") || url.startsWith("#")) { + logger.debug("Skipping over line \"" + url + "\"because it is a comment"); + } else { + // loop through each url in the file and process each url individually. + ripURL(url.trim(), !cl.hasOption("n")); + } } } catch (FileNotFoundException fne) { logger.error("[!] File containing list of URLs not found. Cannot continue."); @@ -208,9 +235,14 @@ public class App { } } + //The URL to rip. if (cl.hasOption('u')) { String url = cl.getOptionValue('u').trim(); - ripURL(url, cl.hasOption("n")); + ripURL(url, !cl.hasOption("n")); + } + + if (cl.hasOption('j')) { + UpdateUtils.updateProgramCLI(); } } @@ -262,6 +294,7 @@ public class App { opts.addOption("v", "version", false, "Show current version"); opts.addOption("s", "socks-server", true, "Use socks server ([user:password]@host[:port])"); opts.addOption("p", "proxy-server", true, "Use HTTP Proxy server ([user:password]@host[:port])"); + opts.addOption("j", "update", false, "Update ripme"); return opts; } @@ -302,7 +335,7 @@ public class App { } else { logger.info("Loading history from configuration"); HISTORY.fromList(Utils.getConfigList("download.history")); - if (HISTORY.toList().size() == 0) { + if (HISTORY.toList().isEmpty()) { // Loaded from config, still no entries. // Guess rip history based on rip folder String[] dirs = Utils.getWorkingDirectory().list((dir, file) -> new File(dir.getAbsolutePath() + File.separator + file).isDirectory()); diff --git a/src/main/java/com/rarchives/ripme/ripper/AbstractHTMLRipper.java b/src/main/java/com/rarchives/ripme/ripper/AbstractHTMLRipper.java index e0fd3548..6a9e62a3 100644 --- a/src/main/java/com/rarchives/ripme/ripper/AbstractHTMLRipper.java +++ b/src/main/java/com/rarchives/ripme/ripper/AbstractHTMLRipper.java @@ -11,6 +11,7 @@ import org.jsoup.nodes.Document; import com.rarchives.ripme.ui.RipStatusMessage.STATUS; import com.rarchives.ripme.utils.Utils; +import com.rarchives.ripme.ui.MainWindow; /** * Simplified ripper, designed for ripping from sites by parsing HTML. @@ -53,12 +54,29 @@ public abstract class AbstractHTMLRipper extends AlbumRipper { protected boolean hasDescriptionSupport() { return false; } + protected String[] getDescription(String url, Document page) throws IOException { throw new IOException("getDescription not implemented"); // Do I do this or make an abstract function? } protected int descSleepTime() { return 100; } + + protected List getAlbumsToQueue(Document doc) { + return null; + } + + // If a page has Queue support then it has no images we want to download, just a list of urls we want to add to + // the queue + protected boolean hasQueueSupport() { + return false; + } + + // Takes a url and checks if it is for a page of albums + protected boolean pageContainsAlbums(URL url) { + return false; + } + @Override public void rip() throws IOException { int index = 0; @@ -67,6 +85,16 @@ public abstract class AbstractHTMLRipper extends AlbumRipper { sendUpdate(STATUS.LOADING_RESOURCE, this.url.toExternalForm()); Document doc = getFirstPage(); + if (hasQueueSupport() && pageContainsAlbums(this.url)) { + List urls = getAlbumsToQueue(doc); + for (String url : urls) { + MainWindow.addUrlToQueue(url); + } + + // We set doc to null here so the while loop below this doesn't fire + doc = null; + } + while (doc != null) { if (alreadyDownloadedUrls >= Utils.getConfigInteger("history.end_rip_after_already_seen", 1000000000) && !isThisATest()) { sendUpdate(STATUS.DOWNLOAD_COMPLETE, "Already seen the last " + alreadyDownloadedUrls + " images ending rip"); @@ -83,7 +111,7 @@ public abstract class AbstractHTMLRipper extends AlbumRipper { } } - if (imageURLs.size() == 0) { + if (imageURLs.isEmpty()) { throw new IOException("No images found at " + doc.location()); } @@ -99,7 +127,7 @@ public abstract class AbstractHTMLRipper extends AlbumRipper { if (hasDescriptionSupport() && Utils.getConfigBoolean("descriptions.save", false)) { logger.debug("Fetching description(s) from " + doc.location()); List textURLs = getDescriptionsFromPage(doc); - if (textURLs.size() > 0) { + if (!textURLs.isEmpty()) { logger.debug("Found description link(s) from " + doc.location()); for (String textURL : textURLs) { if (isStopped()) { diff --git a/src/main/java/com/rarchives/ripme/ripper/AbstractJSONRipper.java b/src/main/java/com/rarchives/ripme/ripper/AbstractJSONRipper.java index 8c73bbb6..93146d4b 100644 --- a/src/main/java/com/rarchives/ripme/ripper/AbstractJSONRipper.java +++ b/src/main/java/com/rarchives/ripme/ripper/AbstractJSONRipper.java @@ -56,6 +56,12 @@ public abstract class AbstractJSONRipper extends AlbumRipper { while (json != null) { List imageURLs = getURLsFromJSON(json); + + if (alreadyDownloadedUrls >= Utils.getConfigInteger("history.end_rip_after_already_seen", 1000000000) && !isThisATest()) { + sendUpdate(STATUS.DOWNLOAD_COMPLETE, "Already seen the last " + alreadyDownloadedUrls + " images ending rip"); + break; + } + // Remove all but 1 image if (isThisATest()) { while (imageURLs.size() > 1) { @@ -63,7 +69,7 @@ public abstract class AbstractJSONRipper extends AlbumRipper { } } - if (imageURLs.size() == 0) { + if (imageURLs.isEmpty()) { throw new IOException("No images found at " + this.url); } @@ -71,6 +77,7 @@ public abstract class AbstractJSONRipper extends AlbumRipper { if (isStopped()) { break; } + index += 1; logger.debug("Found image url #" + index+ ": " + imageURL); downloadURL(new URL(imageURL), index); @@ -104,4 +111,4 @@ public abstract class AbstractJSONRipper extends AlbumRipper { } return prefix; } -} \ No newline at end of file +} diff --git a/src/main/java/com/rarchives/ripme/ripper/AbstractRipper.java b/src/main/java/com/rarchives/ripme/ripper/AbstractRipper.java index 20889495..dc04e801 100644 --- a/src/main/java/com/rarchives/ripme/ripper/AbstractRipper.java +++ b/src/main/java/com/rarchives/ripme/ripper/AbstractRipper.java @@ -67,14 +67,35 @@ public abstract class AbstractRipper * @param downloadedURL URL to check if downloaded */ private void writeDownloadedURL(String downloadedURL) throws IOException { + // If "save urls only" is checked don't write to the url history file + if (Utils.getConfigBoolean("urls_only.save", false)) { + return; + } downloadedURL = normalizeUrl(downloadedURL); BufferedWriter bw = null; FileWriter fw = null; try { File file = new File(URLHistoryFile); + if (!new File(Utils.getConfigDir()).exists()) { + logger.error("Config dir doesn't exist"); + logger.info("Making config dir"); + boolean couldMakeDir = new File(Utils.getConfigDir()).mkdirs(); + if (!couldMakeDir) { + logger.error("Couldn't make config dir"); + return; + } + } // if file doesnt exists, then create it if (!file.exists()) { - file.createNewFile(); + boolean couldMakeDir = file.createNewFile(); + if (!couldMakeDir) { + logger.error("Couldn't url history file"); + return; + } + } + if (!file.canWrite()) { + logger.error("Can't write to url history file: " + URLHistoryFile); + return; } fw = new FileWriter(file.getAbsoluteFile(), true); bw = new BufferedWriter(fw); @@ -112,8 +133,8 @@ public abstract class AbstractRipper private boolean hasDownloadedURL(String url) { File file = new File(URLHistoryFile); url = normalizeUrl(url); - try { - Scanner scanner = new Scanner(file); + + try (Scanner scanner = new Scanner(file)) { while (scanner.hasNextLine()) { final String lineFromFile = scanner.nextLine(); if (lineFromFile.equals(url)) { @@ -123,6 +144,7 @@ public abstract class AbstractRipper } catch (FileNotFoundException e) { return false; } + return false; } diff --git a/src/main/java/com/rarchives/ripme/ripper/AlbumRipper.java b/src/main/java/com/rarchives/ripme/ripper/AlbumRipper.java index f700f012..b037052e 100644 --- a/src/main/java/com/rarchives/ripme/ripper/AlbumRipper.java +++ b/src/main/java/com/rarchives/ripme/ripper/AlbumRipper.java @@ -68,11 +68,10 @@ public abstract class AlbumRipper extends AbstractRipper { if (Utils.getConfigBoolean("urls_only.save", false)) { // Output URL to file String urlFile = this.workingDir + File.separator + "urls.txt"; - try { - FileWriter fw = new FileWriter(urlFile, true); + try (FileWriter fw = new FileWriter(urlFile, true)) { fw.write(url.toExternalForm()); fw.write("\n"); - fw.close(); + RipStatusMessage msg = new RipStatusMessage(STATUS.DOWNLOAD_COMPLETE, urlFile); itemsCompleted.put(url, new File(urlFile)); observer.update(this, msg); diff --git a/src/main/java/com/rarchives/ripme/ripper/VideoRipper.java b/src/main/java/com/rarchives/ripme/ripper/VideoRipper.java index 29200d5a..6b1032e5 100644 --- a/src/main/java/com/rarchives/ripme/ripper/VideoRipper.java +++ b/src/main/java/com/rarchives/ripme/ripper/VideoRipper.java @@ -45,11 +45,11 @@ public abstract class VideoRipper extends AbstractRipper { if (Utils.getConfigBoolean("urls_only.save", false)) { // Output URL to file String urlFile = this.workingDir + File.separator + "urls.txt"; - try { - FileWriter fw = new FileWriter(urlFile, true); + + try (FileWriter fw = new FileWriter(urlFile, true)) { fw.write(url.toExternalForm()); fw.write("\n"); - fw.close(); + RipStatusMessage msg = new RipStatusMessage(STATUS.DOWNLOAD_COMPLETE, urlFile); observer.update(this, msg); } catch (IOException e) { diff --git a/src/main/java/com/rarchives/ripme/ripper/rippers/AerisdiesRipper.java b/src/main/java/com/rarchives/ripme/ripper/rippers/AerisdiesRipper.java index 8e43aab0..74504b12 100644 --- a/src/main/java/com/rarchives/ripme/ripper/rippers/AerisdiesRipper.java +++ b/src/main/java/com/rarchives/ripme/ripper/rippers/AerisdiesRipper.java @@ -20,7 +20,6 @@ import java.util.HashMap; public class AerisdiesRipper extends AbstractHTMLRipper { - private Document albumDoc = null; private Map cookies = new HashMap<>(); @@ -41,17 +40,21 @@ public class AerisdiesRipper extends AbstractHTMLRipper { public String getGID(URL url) throws MalformedURLException { Pattern p = Pattern.compile("^https?://www.aerisdies.com/html/lb/[a-z]*_(\\d+)_\\d\\.html"); Matcher m = p.matcher(url.toExternalForm()); - if (!m.matches()) { - throw new MalformedURLException("Expected URL format: http://www.aerisdies.com/html/lb/albumDIG, got: " + url); + if (m.matches()) { + return m.group(1); } - return m.group(1); + throw new MalformedURLException("Expected URL format: http://www.aerisdies.com/html/lb/albumDIG, got: " + url); + } @Override public String getAlbumTitle(URL url) throws MalformedURLException { try { - // Attempt to use album title as GID - String title = getFirstPage().select("div > div > span[id=albumname] > a").first().text(); + Element el = getFirstPage().select(".headtext").first(); + if (el == null) { + throw new IOException("Unable to get album title"); + } + String title = el.text(); return getHost() + "_" + getGID(url) + "_" + title.trim(); } catch (IOException e) { // Fall back to default album naming convention @@ -62,12 +65,7 @@ public class AerisdiesRipper extends AbstractHTMLRipper { @Override public Document getFirstPage() throws IOException { - if (albumDoc == null) { - Response resp = Http.url(url).response(); - cookies.putAll(resp.cookies()); - albumDoc = resp.parse(); - } - return albumDoc; + return Http.url(url).get(); } @Override diff --git a/src/main/java/com/rarchives/ripme/ripper/rippers/BatoRipper.java b/src/main/java/com/rarchives/ripme/ripper/rippers/BatoRipper.java new file mode 100644 index 00000000..5978bff5 --- /dev/null +++ b/src/main/java/com/rarchives/ripme/ripper/rippers/BatoRipper.java @@ -0,0 +1,131 @@ +package com.rarchives.ripme.ripper.rippers; + +import java.io.IOException; +import java.net.MalformedURLException; +import java.net.URL; +import java.util.ArrayList; +import java.util.List; +import java.util.regex.Matcher; +import java.util.regex.Pattern; + +import org.json.JSONObject; +import org.jsoup.nodes.Document; +import org.jsoup.nodes.Element; + +import com.rarchives.ripme.ripper.AbstractHTMLRipper; +import com.rarchives.ripme.utils.Http; + +public class BatoRipper extends AbstractHTMLRipper { + + public BatoRipper(URL url) throws IOException { + super(url); + } + + @Override + public String getHost() { + return "bato"; + } + + @Override + public String getDomain() { + return "bato.to"; + } + + @Override + public String getGID(URL url) throws MalformedURLException { + Pattern p = Pattern.compile("https?://bato.to/chapter/([\\d]+)/?"); + Matcher m = p.matcher(url.toExternalForm()); + if (m.matches()) { + return m.group(1); + } + // As this is just for quick queue support it does matter what this if returns + p = Pattern.compile("https?://bato.to/series/([\\d]+)/?"); + m = p.matcher(url.toExternalForm()); + if (m.matches()) { + return ""; + } + throw new MalformedURLException("Expected bato.to URL format: " + + "bato.to/chapter/ID - got " + url + " instead"); + } + + @Override + public boolean hasQueueSupport() { + return true; + } + + @Override + public boolean pageContainsAlbums(URL url) { + Pattern p = Pattern.compile("https?://bato.to/series/([\\d]+)/?"); + Matcher m = p.matcher(url.toExternalForm()); + return m.matches(); + } + + @Override + public List getAlbumsToQueue(Document doc) { + List urlsToAddToQueue = new ArrayList<>(); + for (Element elem : doc.select("div.main > div > a")) { + urlsToAddToQueue.add("https://" + getDomain() + elem.attr("href")); + } + return urlsToAddToQueue; + } + + @Override + public String getAlbumTitle(URL url) throws MalformedURLException { + try { + // Attempt to use album title as GID + return getHost() + "_" + getGID(url) + "_" + getFirstPage().select("title").first().text().replaceAll(" ", "_"); + } catch (IOException e) { + // Fall back to default album naming convention + logger.info("Unable to find title at " + url); + } + return super.getAlbumTitle(url); + } + + @Override + public boolean canRip(URL url) { + Pattern p = Pattern.compile("https?://bato.to/series/([\\d]+)/?"); + Matcher m = p.matcher(url.toExternalForm()); + if (m.matches()) { + return true; + } + + p = Pattern.compile("https?://bato.to/chapter/([\\d]+)/?"); + m = p.matcher(url.toExternalForm()); + return m.matches(); + } + + @Override + public Document getFirstPage() throws IOException { + // "url" is an instance field of the superclass + return Http.url(url).get(); + } + + @Override + public List getURLsFromPage(Document doc) { + List result = new ArrayList<>(); + for (Element script : doc.select("script")) { + if (script.data().contains("var images = ")) { + String s = script.data(); + s = s.replaceAll("var seriesId = \\d+;", ""); + s = s.replaceAll("var chapterId = \\d+;", ""); + s = s.replaceAll("var pages = \\d+;", ""); + s = s.replaceAll("var page = \\d+;", ""); + s = s.replaceAll("var prevCha = null;", ""); + s = s.replaceAll("var nextCha = \\.*;", ""); + String json = s.replaceAll("var images = ", "").replaceAll(";", ""); + logger.info(s); + JSONObject images = new JSONObject(json); + for (int i = 1; i < images.length() +1; i++) { + result.add(images.getString(Integer.toString(i))); + } + + } + } + return result; + } + + @Override + public void downloadURL(URL url, int index) { + addURLToDownload(url, getPrefix(index)); + } +} diff --git a/src/main/java/com/rarchives/ripme/ripper/rippers/BcfakesRipper.java b/src/main/java/com/rarchives/ripme/ripper/rippers/BcfakesRipper.java index 1138b364..2a77f02d 100644 --- a/src/main/java/com/rarchives/ripme/ripper/rippers/BcfakesRipper.java +++ b/src/main/java/com/rarchives/ripme/ripper/rippers/BcfakesRipper.java @@ -56,7 +56,7 @@ public class BcfakesRipper extends AbstractHTMLRipper { public Document getNextPage(Document doc) throws IOException { // Find next page Elements hrefs = doc.select("a.next"); - if (hrefs.size() == 0) { + if (hrefs.isEmpty()) { throw new IOException("No more pages"); } String nextUrl = "http://www.bcfakes.com" + hrefs.first().attr("href"); diff --git a/src/main/java/com/rarchives/ripme/ripper/rippers/DeviantartRipper.java b/src/main/java/com/rarchives/ripme/ripper/rippers/DeviantartRipper.java index 9ab23ec8..aac968b4 100644 --- a/src/main/java/com/rarchives/ripme/ripper/rippers/DeviantartRipper.java +++ b/src/main/java/com/rarchives/ripme/ripper/rippers/DeviantartRipper.java @@ -241,8 +241,8 @@ public class DeviantartRipper extends AbstractHTMLRipper { return null; } Elements nextButtons = page.select("link[rel=\"next\"]"); - if (nextButtons.size() == 0) { - if (page.select("link[rel=\"prev\"]").size() == 0) { + if (nextButtons.isEmpty()) { + if (page.select("link[rel=\"prev\"]").isEmpty()) { throw new IOException("No next page found"); } else { throw new IOException("Hit end of pages"); @@ -376,7 +376,7 @@ public class DeviantartRipper extends AbstractHTMLRipper { Elements els = doc.select("img.dev-content-full"); String fsimage = null; // Get the largest resolution image on the page - if (els.size() > 0) { + if (!els.isEmpty()) { // Large image fsimage = els.get(0).attr("src"); logger.info("Found large-scale: " + fsimage); @@ -386,7 +386,7 @@ public class DeviantartRipper extends AbstractHTMLRipper { } // Try to find the download button els = doc.select("a.dev-page-download"); - if (els.size() > 0) { + if (!els.isEmpty()) { // Full-size image String downloadLink = els.get(0).attr("href"); logger.info("Found download button link: " + downloadLink); @@ -469,4 +469,4 @@ public class DeviantartRipper extends AbstractHTMLRipper { // We are logged in, save the cookies return resp.cookies(); } -} +} \ No newline at end of file diff --git a/src/main/java/com/rarchives/ripme/ripper/rippers/DrawcrowdRipper.java b/src/main/java/com/rarchives/ripme/ripper/rippers/DrawcrowdRipper.java index 14eacbb5..521bc7c4 100644 --- a/src/main/java/com/rarchives/ripme/ripper/rippers/DrawcrowdRipper.java +++ b/src/main/java/com/rarchives/ripme/ripper/rippers/DrawcrowdRipper.java @@ -60,7 +60,7 @@ public class DrawcrowdRipper extends AbstractHTMLRipper { @Override public Document getNextPage(Document doc) throws IOException { Elements loadMore = doc.select("a#load-more"); - if (loadMore.size() == 0) { + if (loadMore.isEmpty()) { throw new IOException("No next page found"); } if (!sleep(1000)) { diff --git a/src/main/java/com/rarchives/ripme/ripper/rippers/DribbbleRipper.java b/src/main/java/com/rarchives/ripme/ripper/rippers/DribbbleRipper.java index 25d123a3..dc8cd77e 100644 --- a/src/main/java/com/rarchives/ripme/ripper/rippers/DribbbleRipper.java +++ b/src/main/java/com/rarchives/ripme/ripper/rippers/DribbbleRipper.java @@ -49,7 +49,7 @@ public class DribbbleRipper extends AbstractHTMLRipper { public Document getNextPage(Document doc) throws IOException { // Find next page Elements hrefs = doc.select("a.next_page"); - if (hrefs.size() == 0) { + if (hrefs.isEmpty()) { throw new IOException("No more pages"); } String nextUrl = "https://www.dribbble.com" + hrefs.first().attr("href"); diff --git a/src/main/java/com/rarchives/ripme/ripper/rippers/EHentaiRipper.java b/src/main/java/com/rarchives/ripme/ripper/rippers/EHentaiRipper.java index 8bd87cb2..43728fb4 100644 --- a/src/main/java/com/rarchives/ripme/ripper/rippers/EHentaiRipper.java +++ b/src/main/java/com/rarchives/ripme/ripper/rippers/EHentaiRipper.java @@ -11,6 +11,7 @@ import java.util.Map; import java.util.regex.Matcher; import java.util.regex.Pattern; +import com.rarchives.ripme.ui.RipStatusMessage; import org.jsoup.nodes.Document; import org.jsoup.nodes.Element; import org.jsoup.select.Elements; @@ -125,12 +126,55 @@ public class EHentaiRipper extends AbstractHTMLRipper { } } + /** + * Checks for blacklisted tags on page. If it finds one it returns it, if not it return null + * + * @param doc + * @return String + */ + public String checkTags(Document doc, String[] blackListedTags) { + // If the user hasn't blacklisted any tags we return null; + if (blackListedTags == null) { + return null; + } + logger.info("Blacklisted tags " + blackListedTags[0]); + List tagsOnPage = getTags(doc); + for (String tag : blackListedTags) { + for (String pageTag : tagsOnPage) { + // We replace all dashes in the tag with spaces because the tags we get from the site are separated using + // dashes + if (tag.trim().toLowerCase().equals(pageTag.toLowerCase())) { + return tag; + } + } + } + return null; + } + + private List getTags(Document doc) { + List tags = new ArrayList<>(); + logger.info("Getting tags"); + for (Element tag : doc.select("td > div > a")) { + logger.info("Found tag " + tag.text()); + tags.add(tag.text()); + } + return tags; + } + + @Override public Document getFirstPage() throws IOException { if (albumDoc == null) { albumDoc = getPageWithRetries(this.url); } this.lastURL = this.url.toExternalForm(); + logger.info("Checking blacklist"); + String blacklistedTag = checkTags(albumDoc, Utils.getConfigStringArray("ehentai.blacklist.tags")); + if (blacklistedTag != null) { + sendUpdate(RipStatusMessage.STATUS.DOWNLOAD_WARN, "Skipping " + url.toExternalForm() + " as it " + + "contains the blacklisted tag \"" + blacklistedTag + "\""); + return null; + } return albumDoc; } @@ -142,7 +186,7 @@ public class EHentaiRipper extends AbstractHTMLRipper { } // Find next page Elements hrefs = doc.select(".ptt a"); - if (hrefs.size() == 0) { + if (hrefs.isEmpty()) { logger.info("doc: " + doc.html()); throw new IOException("No navigation links found"); } @@ -211,10 +255,10 @@ public class EHentaiRipper extends AbstractHTMLRipper { // Find image Elements images = doc.select(".sni > a > img"); - if (images.size() == 0) { + if (images.isEmpty()) { // Attempt to find image elsewise (Issue #41) images = doc.select("img#img"); - if (images.size() == 0) { + if (images.isEmpty()) { logger.warn("Image not found at " + this.url); return; } diff --git a/src/main/java/com/rarchives/ripme/ripper/rippers/EightmusesRipper.java b/src/main/java/com/rarchives/ripme/ripper/rippers/EightmusesRipper.java index ca9c24e3..3e06695b 100644 --- a/src/main/java/com/rarchives/ripme/ripper/rippers/EightmusesRipper.java +++ b/src/main/java/com/rarchives/ripme/ripper/rippers/EightmusesRipper.java @@ -126,9 +126,9 @@ public class EightmusesRipper extends AbstractHTMLRipper { image = getFullSizeImage(imageHref); URL imageUrl = new URL(image); if (Utils.getConfigBoolean("8muses.use_short_names", false)) { - addURLToDownload(imageUrl, getPrefixShort(x), getSubdir(page.select("title").text()), this.url.toExternalForm(), cookies, ""); + addURLToDownload(imageUrl, getPrefixShort(x), getSubdir(page.select("title").text()), this.url.toExternalForm(), cookies, "", null, true); } else { - addURLToDownload(imageUrl, getPrefixLong(x), getSubdir(page.select("title").text()), this.url.toExternalForm(), cookies); + addURLToDownload(imageUrl, getPrefixLong(x), getSubdir(page.select("title").text()), this.url.toExternalForm(), cookies, "", null, true); } // X is our page index x++; diff --git a/src/main/java/com/rarchives/ripme/ripper/rippers/FivehundredpxRipper.java b/src/main/java/com/rarchives/ripme/ripper/rippers/FivehundredpxRipper.java index 93aedba2..4ade270b 100644 --- a/src/main/java/com/rarchives/ripme/ripper/rippers/FivehundredpxRipper.java +++ b/src/main/java/com/rarchives/ripme/ripper/rippers/FivehundredpxRipper.java @@ -276,7 +276,7 @@ public class FivehundredpxRipper extends AbstractJSONRipper { catch (IOException e) { logger.error("Error fetching full-size image from " + rawUrl, e); } - if (images.size() > 0) { + if (!images.isEmpty()) { imageURL = images.first().attr("src"); logger.debug("Found full-size non-watermarked image: " + imageURL); } diff --git a/src/main/java/com/rarchives/ripme/ripper/rippers/FlickrRipper.java b/src/main/java/com/rarchives/ripme/ripper/rippers/FlickrRipper.java index df7aee5b..a1a1c2b8 100644 --- a/src/main/java/com/rarchives/ripme/ripper/rippers/FlickrRipper.java +++ b/src/main/java/com/rarchives/ripme/ripper/rippers/FlickrRipper.java @@ -250,7 +250,7 @@ public class FlickrRipper extends AbstractHTMLRipper { try { Document doc = getLargestImagePageDocument(this.url); Elements fullsizeImages = doc.select("div#allsizes-photo img"); - if (fullsizeImages.size() == 0) { + if (fullsizeImages.isEmpty()) { logger.error("Could not find flickr image at " + doc.location() + " - missing 'div#allsizes-photo img'"); } else { @@ -274,7 +274,7 @@ public class FlickrRipper extends AbstractHTMLRipper { String largestImagePage = this.url.toExternalForm(); for (Element olSize : doc.select("ol.sizes-list > li > ol > li")) { Elements ola = olSize.select("a"); - if (ola.size() == 0) { + if (ola.isEmpty()) { largestImagePage = this.url.toExternalForm(); } else { diff --git a/src/main/java/com/rarchives/ripme/ripper/rippers/FuraffinityRipper.java b/src/main/java/com/rarchives/ripme/ripper/rippers/FuraffinityRipper.java index 68aa950a..ec8fc5cf 100644 --- a/src/main/java/com/rarchives/ripme/ripper/rippers/FuraffinityRipper.java +++ b/src/main/java/com/rarchives/ripme/ripper/rippers/FuraffinityRipper.java @@ -68,7 +68,7 @@ public class FuraffinityRipper extends AbstractHTMLRipper { public Document getNextPage(Document doc) throws IOException { // Find next page Elements nextPageUrl = doc.select("a.right"); - if (nextPageUrl.size() == 0) { + if (nextPageUrl.isEmpty()) { throw new IOException("No more pages"); } String nextUrl = urlBase + nextPageUrl.first().attr("href"); @@ -121,7 +121,7 @@ public class FuraffinityRipper extends AbstractHTMLRipper { // Try to find the description Elements els = resp.parse().select("td[class=alt1][width=\"70%\"]"); - if (els.size() == 0) { + if (els.isEmpty()) { logger.debug("No description at " + page); throw new IOException("No description found"); } diff --git a/src/main/java/com/rarchives/ripme/ripper/rippers/Hentai2readRipper.java b/src/main/java/com/rarchives/ripme/ripper/rippers/Hentai2readRipper.java index a87575bd..e0dbff17 100644 --- a/src/main/java/com/rarchives/ripme/ripper/rippers/Hentai2readRipper.java +++ b/src/main/java/com/rarchives/ripme/ripper/rippers/Hentai2readRipper.java @@ -31,25 +31,59 @@ public class Hentai2readRipper extends AbstractHTMLRipper { return "hentai2read.com"; } - @Override - public String getGID(URL url) throws MalformedURLException { - Pattern p = Pattern.compile("https?://hentai2read\\.com/([a-zA-Z0-9_-]*)/(\\d)?/?"); - Matcher m = p.matcher(url.toExternalForm()); - if (m.matches()) { - return m.group(1); - } - throw new MalformedURLException("Expected hentai2read.com URL format: " + - "hbrowse.com/COMICID - got " + url + " instead"); + @Override + public boolean hasQueueSupport() { + return true; + } + + @Override + public boolean pageContainsAlbums(URL url) { + logger.info("Page contains albums"); + Pattern pat = Pattern.compile("https?://hentai2read\\.com/([a-zA-Z0-9_-]*)/?"); + Matcher mat = pat.matcher(url.toExternalForm()); + if (mat.matches()) { + return true; } + return false; + } + + @Override + public List getAlbumsToQueue(Document doc) { + List urlsToAddToQueue = new ArrayList<>(); + for (Element elem : doc.select(".nav-chapters > li > div.media > a")) { + urlsToAddToQueue.add(elem.attr("href")); + } + return urlsToAddToQueue; + } + + @Override + public String getGID(URL url) throws MalformedURLException { + Pattern p = Pattern.compile("https?://hentai2read\\.com/([a-zA-Z0-9_-]*)/(\\d+)?/?"); + Matcher m = p.matcher(url.toExternalForm()); + if (m.matches()) { + return m.group(1) + "_" + m.group(2); + } + throw new MalformedURLException("Expected hentai2read.com URL format: " + + "hentai2read.com/COMICID - got " + url + " instead"); + } @Override public Document getFirstPage() throws IOException { + String thumbnailLink; try { + // If the page contains albums we want to load the main page + if (pageContainsAlbums(url)) { + return Http.url(url).get(); + } Document tempDoc; tempDoc = Http.url(url).get(); // Get the thumbnail page so we can rip all images without loading every page in the comic - String thumbnailLink = tempDoc.select("a[data-original-title=Thumbnails").attr("href"); - return Http.url(thumbnailLink).get(); + thumbnailLink = tempDoc.select("div.col-xs-12 > div.reader-controls > div.controls-block > button > a").attr("href"); + if (!thumbnailLink.equals("")) { + return Http.url(thumbnailLink).get(); + } else { + return Http.url(tempDoc.select("a[data-original-title=Thumbnails").attr("href")).get(); + } } catch (IOException e) { throw new IOException("Unable to get first page"); } diff --git a/src/main/java/com/rarchives/ripme/ripper/rippers/HentaifoundryRipper.java b/src/main/java/com/rarchives/ripme/ripper/rippers/HentaifoundryRipper.java index 8d953de1..a81b47f9 100644 --- a/src/main/java/com/rarchives/ripme/ripper/rippers/HentaifoundryRipper.java +++ b/src/main/java/com/rarchives/ripme/ripper/rippers/HentaifoundryRipper.java @@ -111,7 +111,7 @@ public class HentaifoundryRipper extends AbstractHTMLRipper { @Override public Document getNextPage(Document doc) throws IOException { - if (doc.select("li.next.hidden").size() != 0) { + if (!doc.select("li.next.hidden").isEmpty()) { // Last page throw new IOException("No more pages"); } diff --git a/src/main/java/com/rarchives/ripme/ripper/rippers/ImagevenueRipper.java b/src/main/java/com/rarchives/ripme/ripper/rippers/ImagevenueRipper.java index d62722e7..f9175656 100644 --- a/src/main/java/com/rarchives/ripme/ripper/rippers/ImagevenueRipper.java +++ b/src/main/java/com/rarchives/ripme/ripper/rippers/ImagevenueRipper.java @@ -101,7 +101,7 @@ public class ImagevenueRipper extends AbstractHTMLRipper { .get(); // Find image Elements images = doc.select("a > img"); - if (images.size() == 0) { + if (images.isEmpty()) { logger.warn("Image not found at " + this.url); return; } diff --git a/src/main/java/com/rarchives/ripme/ripper/rippers/ImgurRipper.java b/src/main/java/com/rarchives/ripme/ripper/rippers/ImgurRipper.java index fe7937d3..280060eb 100644 --- a/src/main/java/com/rarchives/ripme/ripper/rippers/ImgurRipper.java +++ b/src/main/java/com/rarchives/ripme/ripper/rippers/ImgurRipper.java @@ -304,10 +304,10 @@ public class ImgurRipper extends AlbumRipper { ImgurAlbum imgurAlbum = new ImgurAlbum(url); for (Element thumb : doc.select("div.image")) { String image; - if (thumb.select("a.zoom").size() > 0) { + if (!thumb.select("a.zoom").isEmpty()) { // Clickably full-size image = "http:" + thumb.select("a").attr("href"); - } else if (thumb.select("img").size() > 0) { + } else if (!thumb.select("img").isEmpty()) { image = "http:" + thumb.select("img").attr("src"); } else { // Unable to find image in this div @@ -449,7 +449,7 @@ public class ImgurRipper extends AlbumRipper { URL imageURL = new URL(image); addURLToDownload(imageURL); } - if (imgs.size() == 0) { + if (imgs.isEmpty()) { break; } page++; diff --git a/src/main/java/com/rarchives/ripme/ripper/rippers/InstagramRipper.java b/src/main/java/com/rarchives/ripme/ripper/rippers/InstagramRipper.java index efc4cb40..94331650 100644 --- a/src/main/java/com/rarchives/ripme/ripper/rippers/InstagramRipper.java +++ b/src/main/java/com/rarchives/ripme/ripper/rippers/InstagramRipper.java @@ -19,10 +19,11 @@ import org.json.JSONArray; import org.json.JSONException; import org.json.JSONObject; -import com.rarchives.ripme.ripper.AbstractHTMLRipper; +import com.rarchives.ripme.ripper.AbstractJSONRipper; import com.rarchives.ripme.utils.Http; import org.jsoup.Connection; +import org.jsoup.Jsoup; import org.jsoup.nodes.Document; import org.jsoup.nodes.Element; import com.rarchives.ripme.ui.RipStatusMessage; @@ -30,7 +31,7 @@ import com.rarchives.ripme.utils.Utils; import java.util.HashMap; -public class InstagramRipper extends AbstractHTMLRipper { +public class InstagramRipper extends AbstractJSONRipper { String nextPageID = ""; private String qHash; private boolean rippingTag = false; @@ -39,6 +40,9 @@ public class InstagramRipper extends AbstractHTMLRipper { private String userID; private String rhx_gis = null; private String csrftoken; + // Run into a weird issue with Jsoup cutting some json pages in half, this is a work around + // see https://github.com/RipMeApp/ripme/issues/601 + private String workAroundJsonString; @@ -73,11 +77,9 @@ public class InstagramRipper extends AbstractHTMLRipper { return url.replaceAll("/[A-Z0-9]{8}/", "/"); } - private List getPostsFromSinglePage(Document Doc) { + private List getPostsFromSinglePage(JSONObject json) { List imageURLs = new ArrayList<>(); JSONArray datas; - try { - JSONObject json = getJSONFromPage(Doc); if (json.getJSONObject("entry_data").getJSONArray("PostPage") .getJSONObject(0).getJSONObject("graphql").getJSONObject("shortcode_media") .has("edge_sidecar_to_children")) { @@ -103,10 +105,6 @@ public class InstagramRipper extends AbstractHTMLRipper { } } return imageURLs; - } catch (IOException e) { - logger.error("Unable to get JSON from page " + url.toExternalForm()); - return null; - } } @Override @@ -184,14 +182,14 @@ public class InstagramRipper extends AbstractHTMLRipper { } @Override - public Document getFirstPage() throws IOException { + public JSONObject getFirstPage() throws IOException { Connection.Response resp = Http.url(url).response(); logger.info(resp.cookies()); csrftoken = resp.cookie("csrftoken"); Document p = resp.parse(); // Get the query hash so we can download the next page qHash = getQHash(p); - return p; + return getJSONFromPage(p); } private String getVideoFromPage(String videoID) { @@ -235,14 +233,8 @@ public class InstagramRipper extends AbstractHTMLRipper { } @Override - public List getURLsFromPage(Document doc) { + public List getURLsFromJSON(JSONObject json) { List imageURLs = new ArrayList<>(); - JSONObject json = new JSONObject(); - try { - json = getJSONFromPage(doc); - } catch (IOException e) { - logger.warn("Unable to exact json from page"); - } // get the rhx_gis value so we can get the next page later on if (rhx_gis == null) { @@ -282,7 +274,7 @@ public class InstagramRipper extends AbstractHTMLRipper { if (data.getString("__typename").equals("GraphSidecar")) { try { Document slideShowDoc = Http.url(new URL("https://www.instagram.com/p/" + data.getString("shortcode"))).get(); - List toAdd = getPostsFromSinglePage(slideShowDoc); + List toAdd = getPostsFromSinglePage(getJSONFromPage(slideShowDoc)); for (int slideShowInt = 0; slideShowInt < toAdd.size(); slideShowInt++) { addURLToDownload(new URL(toAdd.get(slideShowInt)), image_date + data.getString("shortcode")); } @@ -295,7 +287,7 @@ public class InstagramRipper extends AbstractHTMLRipper { } try { if (!data.getBoolean("is_video")) { - if (imageURLs.size() == 0) { + if (imageURLs.isEmpty()) { // We add this one item to the array because either wise // the ripper will error out because we returned an empty array imageURLs.add(getOriginalUrl(data.getString("display_url"))); @@ -321,7 +313,7 @@ public class InstagramRipper extends AbstractHTMLRipper { } else { // We're ripping from a single page logger.info("Ripping from single page"); - imageURLs = getPostsFromSinglePage(doc); + imageURLs = getPostsFromSinglePage(json); } return imageURLs; @@ -348,8 +340,8 @@ public class InstagramRipper extends AbstractHTMLRipper { } @Override - public Document getNextPage(Document doc) throws IOException { - Document toreturn; + public JSONObject getNextPage(JSONObject json) throws IOException { + JSONObject toreturn; java.util.Map cookies = new HashMap(); // This shouldn't be hardcoded and will break one day cookies.put("ig_pr", "1"); @@ -360,10 +352,13 @@ public class InstagramRipper extends AbstractHTMLRipper { sleep(2500); String vars = "{\"tag_name\":\"" + tagName + "\",\"first\":4,\"after\":\"" + nextPageID + "\"}"; String ig_gis = getIGGis(vars); - toreturn = Http.url("https://www.instagram.com/graphql/query/?query_hash=" + qHash + - "&variables=" + vars).header("x-instagram-gis", ig_gis).cookies(cookies).ignoreContentType().get(); + toreturn = getPage("https://www.instagram.com/graphql/query/?query_hash=" + qHash + + "&variables=" + vars, ig_gis); // Sleep for a while to avoid a ban - logger.info(toreturn.html()); + logger.info(toreturn); + if (!pageHasImages(toreturn)) { + throw new IOException("No more pages"); + } return toreturn; } catch (IOException e) { @@ -377,8 +372,8 @@ public class InstagramRipper extends AbstractHTMLRipper { String vars = "{\"id\":\"" + userID + "\",\"first\":50,\"after\":\"" + nextPageID + "\"}"; String ig_gis = getIGGis(vars); logger.info(ig_gis); - toreturn = Http.url("https://www.instagram.com/graphql/query/?query_hash=" + qHash + "&variables=" + vars - ).header("x-instagram-gis", ig_gis).cookies(cookies).ignoreContentType().get(); + + toreturn = getPage("https://www.instagram.com/graphql/query/?query_hash=" + qHash + "&variables=" + vars, ig_gis); if (!pageHasImages(toreturn)) { throw new IOException("No more pages"); } @@ -396,8 +391,7 @@ public class InstagramRipper extends AbstractHTMLRipper { addURLToDownload(url); } - private boolean pageHasImages(Document doc) { - JSONObject json = new JSONObject(stripHTMLTags(doc.html())); + private boolean pageHasImages(JSONObject json) { int numberOfImages = json.getJSONObject("data").getJSONObject("user") .getJSONObject("edge_owner_to_timeline_media").getJSONArray("edges").length(); if (numberOfImages == 0) { @@ -406,6 +400,34 @@ public class InstagramRipper extends AbstractHTMLRipper { return true; } + private JSONObject getPage(String url, String ig_gis) { + StringBuilder sb = new StringBuilder(); + try { + // We can't use Jsoup here because it won't download a non-html file larger than a MB + // even if you set maxBodySize to 0 + URLConnection connection = new URL(url).openConnection(); + connection.setRequestProperty("User-Agent", USER_AGENT); + connection.setRequestProperty("x-instagram-gis", ig_gis); + BufferedReader in = new BufferedReader(new InputStreamReader(connection.getInputStream())); + String line; + while ((line = in.readLine()) != null) { + sb.append(line); + + } + in.close(); + workAroundJsonString = sb.toString(); + return new JSONObject(sb.toString()); + + } catch (MalformedURLException e) { + logger.info("Unable to get query_hash, " + url + " is a malformed URL"); + return null; + } catch (IOException e) { + logger.info("Unable to get query_hash"); + logger.info(e.getMessage()); + return null; + } + } + private String getQHash(Document doc) { String jsFileURL = "https://www.instagram.com" + doc.select("link[rel=preload]").attr("href"); StringBuilder sb = new StringBuilder(); @@ -440,6 +462,11 @@ public class InstagramRipper extends AbstractHTMLRipper { if (m.find()) { return m.group(1); } + jsP = Pattern.compile("0:n.pagination},queryId:.([a-zA-Z0-9]+)."); + m = jsP.matcher(sb.toString()); + if (m.find()) { + return m.group(1); + } } else { Pattern jsP = Pattern.compile("return e.tagMedia.byTagName.get\\(t\\).pagination},queryId:.([a-zA-Z0-9]+)."); diff --git a/src/main/java/com/rarchives/ripme/ripper/rippers/MyhentaicomicsRipper.java b/src/main/java/com/rarchives/ripme/ripper/rippers/MyhentaicomicsRipper.java index 5b60c4f2..453826a3 100644 --- a/src/main/java/com/rarchives/ripme/ripper/rippers/MyhentaicomicsRipper.java +++ b/src/main/java/com/rarchives/ripme/ripper/rippers/MyhentaicomicsRipper.java @@ -34,21 +34,18 @@ public class MyhentaicomicsRipper extends AbstractHTMLRipper { Pattern p = Pattern.compile("^https?://myhentaicomics.com/index.php/([a-zA-Z0-9-]*)/?$"); Matcher m = p.matcher(url.toExternalForm()); if (m.matches()) { - isTag = false; return m.group(1); } Pattern pa = Pattern.compile("^https?://myhentaicomics.com/index.php/search\\?q=([a-zA-Z0-9-]*)([a-zA-Z0-9=&]*)?$"); Matcher ma = pa.matcher(url.toExternalForm()); if (ma.matches()) { - isTag = true; return ma.group(1); } Pattern pat = Pattern.compile("^https?://myhentaicomics.com/index.php/tag/([0-9]*)/?([a-zA-Z%0-9+?=:]*)?$"); Matcher mat = pat.matcher(url.toExternalForm()); if (mat.matches()) { - isTag = true; return mat.group(1); } @@ -56,6 +53,37 @@ public class MyhentaicomicsRipper extends AbstractHTMLRipper { "myhentaicomics.com/index.php/albumName - got " + url + " instead"); } + @Override + public boolean hasQueueSupport() { + return true; + } + + @Override + public boolean pageContainsAlbums(URL url) { + Pattern pa = Pattern.compile("^https?://myhentaicomics.com/index.php/search\\?q=([a-zA-Z0-9-]*)([a-zA-Z0-9=&]*)?$"); + Matcher ma = pa.matcher(url.toExternalForm()); + if (ma.matches()) { + return true; + } + + Pattern pat = Pattern.compile("^https?://myhentaicomics.com/index.php/tag/([0-9]*)/?([a-zA-Z%0-9+?=:]*)?$"); + Matcher mat = pat.matcher(url.toExternalForm()); + if (mat.matches()) { + isTag = true; + return true; + } + return false; + } + + @Override + public List getAlbumsToQueue(Document doc) { + List urlsToAddToQueue = new ArrayList<>(); + for (Element elem : doc.select(".g-album > a")) { + urlsToAddToQueue.add(getDomain() + elem.attr("href")); + } + return urlsToAddToQueue; + } + @Override public Document getFirstPage() throws IOException { // "url" is an instance field of the superclass @@ -71,7 +99,7 @@ public class MyhentaicomicsRipper extends AbstractHTMLRipper { Pattern p = Pattern.compile("/index.php/[a-zA-Z0-9_-]*\\?page=\\d"); Matcher m = p.matcher(nextPage); if (m.matches()) { - nextUrl = "http://myhentaicomics.com" + m.group(0); + nextUrl = "https://myhentaicomics.com" + m.group(0); } if (nextUrl.equals("")) { throw new IOException("No more pages"); @@ -81,171 +109,20 @@ public class MyhentaicomicsRipper extends AbstractHTMLRipper { return Http.url(nextUrl).get(); } - // This replaces getNextPage when downloading from searchs and tags - private List getNextAlbumPage(String pageUrl) { - List albumPagesList = new ArrayList<>(); - int pageNumber = 1; - albumPagesList.add("http://myhentaicomics.com/index.php/" + pageUrl.split("\\?")[0] + "?page=" + Integer.toString(pageNumber)); - while (true) { - String urlToGet = "http://myhentaicomics.com/index.php/" + pageUrl.split("\\?")[0] + "?page=" + Integer.toString(pageNumber); - Document nextAlbumPage; - try { - logger.info("Grabbing " + urlToGet); - nextAlbumPage = Http.url(urlToGet).get(); - } catch (IOException e) { - logger.warn("Failed to log link in Jsoup"); - nextAlbumPage = null; - e.printStackTrace(); - } - Element elem = nextAlbumPage.select("a.ui-icon-right").first(); - String nextPage = elem.attr("href"); - pageNumber = pageNumber + 1; - if (nextPage.equals("")) { - logger.info("Got " + pageNumber + " pages"); - break; - } - else { - logger.info(nextPage); - albumPagesList.add(nextPage); - logger.info("Adding " + nextPage); - } - } - return albumPagesList; - } - private List getAlbumsFromPage(String url) { - List pagesToRip; - List result = new ArrayList<>(); - logger.info("Running getAlbumsFromPage"); - Document doc; - try { - doc = Http.url("http://myhentaicomics.com" + url).get(); - } catch (IOException e) { - logger.warn("Failed to log link in Jsoup"); - doc = null; - e.printStackTrace(); - } - // This for goes over every album on the page - for (Element elem : doc.select("li.g-album > a")) { - String link = elem.attr("href"); - logger.info("Grabbing album " + link); - pagesToRip = getNextAlbumPage(link); - logger.info(pagesToRip); - for (String element : pagesToRip) { - Document album_doc; - try { - logger.info("grabbing " + element + " with jsoup"); - boolean startsWithHttp = element.startsWith("http://"); - if (!startsWithHttp) { - album_doc = Http.url("http://myhentaicomics.com/" + element).get(); - } - else { - album_doc = Http.url(element).get(); - } - } catch (IOException e) { - logger.warn("Failed to log link in Jsoup"); - album_doc = null; - e.printStackTrace(); - } - for (Element el :album_doc.select("img")) { - String imageSource = el.attr("src"); - // This bool is here so we don't try and download the site logo - if (!imageSource.startsWith("http://")) { - // We replace thumbs with resizes so we can the full sized images - imageSource = imageSource.replace("thumbs", "resizes"); - String url_string = "http://myhentaicomics.com/" + imageSource; - url_string = url_string.replace("%20", "_"); - url_string = url_string.replace("%27", ""); - url_string = url_string.replace("%28", "_"); - url_string = url_string.replace("%29", "_"); - url_string = url_string.replace("%2C", "_"); - if (isTag) { - logger.info("Downloading from a tag or search"); - try { - sleep(500); - result.add("http://myhentaicomics.com/" + imageSource); - addURLToDownload(new URL("http://myhentaicomics.com/" + imageSource), "", url_string.split("/")[6]); - } - catch (MalformedURLException e) { - logger.warn("Malformed URL"); - e.printStackTrace(); - } - } - } - } - } - } - return result; - } - - private List getListOfPages(Document doc) { - List pages = new ArrayList<>(); - // Get the link from the last button - String nextPageUrl = doc.select("a.ui-icon-right").last().attr("href"); - Pattern pat = Pattern.compile("/index\\.php/tag/[0-9]*/[a-zA-Z0-9_\\-:+]*\\?page=(\\d+)"); - Matcher mat = pat.matcher(nextPageUrl); - if (mat.matches()) { - logger.debug("Getting pages from a tag"); - String base_link = mat.group(0).replaceAll("\\?page=\\d+", ""); - logger.debug("base_link is " + base_link); - int numOfPages = Integer.parseInt(mat.group(1)); - for (int x = 1; x != numOfPages +1; x++) { - logger.debug("running loop"); - String link = base_link + "?page=" + Integer.toString(x); - pages.add(link); - } - } else { - Pattern pa = Pattern.compile("/index\\.php/search\\?q=[a-zA-Z0-9_\\-:]*&page=(\\d+)"); - Matcher ma = pa.matcher(nextPageUrl); - if (ma.matches()) { - logger.debug("Getting pages from a search"); - String base_link = ma.group(0).replaceAll("page=\\d+", ""); - logger.debug("base_link is " + base_link); - int numOfPages = Integer.parseInt(ma.group(1)); - for (int x = 1; x != numOfPages +1; x++) { - logger.debug("running loop"); - String link = base_link + "page=" + Integer.toString(x); - logger.debug(link); - pages.add(link); - } - } - } - return pages; - } @Override public List getURLsFromPage(Document doc) { List result = new ArrayList<>(); - // Checks if this is a comic page or a page of albums - // If true the page is a page of albums - if (doc.toString().contains("class=\"g-item g-album\"")) { - // This if checks that there is more than 1 page - if (!doc.select("a.ui-icon-right").last().attr("href").equals("")) { - // There is more than one page so we call getListOfPages - List pagesToRip = getListOfPages(doc); - logger.debug("Pages to rip = " + pagesToRip); - for (String url : pagesToRip) { - logger.debug("Getting albums from " + url); - result = getAlbumsFromPage(url); - } - } else { - logger.debug("There is only one page on this page of albums"); - // There is only 1 page so we call getAlbumsFromPage and pass it the page url - result = getAlbumsFromPage(doc.select("div.g-description > a").attr("href")); - } - return result; - } - else { for (Element el : doc.select("img")) { String imageSource = el.attr("src"); // This bool is here so we don't try and download the site logo if (!imageSource.startsWith("http://") && !imageSource.startsWith("https://")) { // We replace thumbs with resizes so we can the full sized images imageSource = imageSource.replace("thumbs", "resizes"); - result.add("http://myhentaicomics.com/" + imageSource); + result.add("https://myhentaicomics.com" + imageSource); } } - } return result; } diff --git a/src/main/java/com/rarchives/ripme/ripper/rippers/NfsfwRipper.java b/src/main/java/com/rarchives/ripme/ripper/rippers/NfsfwRipper.java index 7715be23..098f1e45 100644 --- a/src/main/java/com/rarchives/ripme/ripper/rippers/NfsfwRipper.java +++ b/src/main/java/com/rarchives/ripme/ripper/rippers/NfsfwRipper.java @@ -78,7 +78,7 @@ public class NfsfwRipper extends AlbumRipper { List subAlbums = new ArrayList<>(); int index = 0; subAlbums.add(new Pair(this.url.toExternalForm(), "")); - while (subAlbums.size() > 0) { + while (!subAlbums.isEmpty()) { if (isStopped()) { break; } @@ -167,7 +167,7 @@ public class NfsfwRipper extends AlbumRipper { .referrer(this.url) .get(); Elements images = doc.select(".gbBlock img"); - if (images.size() == 0) { + if (images.isEmpty()) { logger.error("Failed to find image at " + this.url); return; } diff --git a/src/main/java/com/rarchives/ripme/ripper/rippers/NhentaiRipper.java b/src/main/java/com/rarchives/ripme/ripper/rippers/NhentaiRipper.java index ac8f782d..7752f18c 100644 --- a/src/main/java/com/rarchives/ripme/ripper/rippers/NhentaiRipper.java +++ b/src/main/java/com/rarchives/ripme/ripper/rippers/NhentaiRipper.java @@ -2,6 +2,7 @@ package com.rarchives.ripme.ripper.rippers; import com.rarchives.ripme.ripper.AbstractHTMLRipper; import com.rarchives.ripme.ripper.DownloadThreadPool; +import com.rarchives.ripme.ui.RipStatusMessage; import com.rarchives.ripme.utils.Http; import com.rarchives.ripme.utils.Utils; import org.jsoup.nodes.Document; @@ -19,15 +20,33 @@ import java.util.regex.Pattern; public class NhentaiRipper extends AbstractHTMLRipper { - // All sleep times are in milliseconds - private static final int IMAGE_SLEEP_TIME = 1500; - private String albumTitle; private Document firstPage; // Thread pool for finding direct image links from "image" pages (html) private DownloadThreadPool nhentaiThreadPool = new DownloadThreadPool("nhentai"); + @Override + public boolean hasQueueSupport() { + return true; + } + + @Override + public boolean pageContainsAlbums(URL url) { + Pattern pa = Pattern.compile("^https?://nhentai\\.net/tag/([a-zA-Z0-9_\\-]+)/?"); + Matcher ma = pa.matcher(url.toExternalForm()); + return ma.matches(); + } + + @Override + public List getAlbumsToQueue(Document doc) { + List urlsToAddToQueue = new ArrayList<>(); + for (Element elem : doc.select("a.cover")) { + urlsToAddToQueue.add("https://" + getDomain() + elem.attr("href")); + } + return urlsToAddToQueue; + } + @Override public DownloadThreadPool getThreadPool() { return nhentaiThreadPool; @@ -64,6 +83,39 @@ public class NhentaiRipper extends AbstractHTMLRipper { return "nhentai" + title; } + private List getTags(Document doc) { + List tags = new ArrayList<>(); + for (Element tag : doc.select("a.tag")) { + tags.add(tag.attr("href").replaceAll("/tag/", "").replaceAll("/", "")); + } + return tags; + } + + /** + * Checks for blacklisted tags on page. If it finds one it returns it, if not it return null + * + * @param doc + * @return String + */ + public String checkTags(Document doc, String[] blackListedTags) { + // If the user hasn't blacklisted any tags we return false; + if (blackListedTags == null) { + return null; + } + logger.info("Blacklisted tags " + blackListedTags[0]); + List tagsOnPage = getTags(doc); + for (String tag : blackListedTags) { + for (String pageTag : tagsOnPage) { + // We replace all dashes in the tag with spaces because the tags we get from the site are separated using + // dashes + if (tag.trim().toLowerCase().equals(pageTag.replaceAll("-", " ").toLowerCase())) { + return tag; + } + } + } + return null; + } + @Override public String getGID(URL url) throws MalformedURLException { // Ex: https://nhentai.net/g/159174/ @@ -82,90 +134,30 @@ public class NhentaiRipper extends AbstractHTMLRipper { if (firstPage == null) { firstPage = Http.url(url).get(); } + + String blacklistedTag = checkTags(firstPage, Utils.getConfigStringArray("nhentai.blacklist.tags")); + if (blacklistedTag != null) { + sendUpdate(RipStatusMessage.STATUS.DOWNLOAD_WARN, "Skipping " + url.toExternalForm() + " as it " + + "contains the blacklisted tag \"" + blacklistedTag + "\""); + return null; + } return firstPage; } @Override public List getURLsFromPage(Document page) { List imageURLs = new ArrayList<>(); - Elements thumbs = page.select(".gallerythumb"); + Elements thumbs = page.select("a.gallerythumb > img"); for (Element el : thumbs) { - String imageUrl = el.attr("href"); - imageURLs.add("https://nhentai.net" + imageUrl); + imageURLs.add(el.attr("data-src").replaceAll("t\\.n", "i.n").replaceAll("t\\.", ".")); } return imageURLs; } @Override public void downloadURL(URL url, int index) { - NHentaiImageThread t = new NHentaiImageThread(url, index, this.workingDir); - nhentaiThreadPool.addThread(t); - try { - Thread.sleep(IMAGE_SLEEP_TIME); - } catch (InterruptedException e) { - logger.warn("Interrupted while waiting to load next image", e); - } + addURLToDownload(url, getPrefix(index), "", this.url.toExternalForm(), null); } - private class NHentaiImageThread extends Thread { - private URL url; - private int index; - private File workingDir; - - NHentaiImageThread(URL url, int index, File workingDir) { - super(); - this.url = url; - this.index = index; - this.workingDir = workingDir; - } - - @Override - public void run() { - fetchImage(); - } - - private void fetchImage() { - try { - //Document doc = getPageWithRetries(this.url); - Document doc = Http.url(this.url).get(); - - // Find image - Elements images = doc.select("#image-container > a > img"); - if (images.size() == 0) { - // Attempt to find image elsewise (Issue #41) - images = doc.select("img#img"); - if (images.size() == 0) { - logger.warn("Image not found at " + this.url); - return; - } - } - Element image = images.first(); - String imgsrc = image.attr("src"); - logger.info("Found URL " + imgsrc + " via " + images.get(0)); - - Pattern p = Pattern.compile("^https?://i.nhentai.net/galleries/\\d+/(.+)$"); - Matcher m = p.matcher(imgsrc); - if (m.matches()) { - // Manually discover filename from URL - String savePath = this.workingDir + File.separator; - if (Utils.getConfigBoolean("download.save_order", true)) { - savePath += String.format("%03d_", index); - } - savePath += m.group(1); - addURLToDownload(new URL(imgsrc), new File(savePath)); - } else { - // Provide prefix and let the AbstractRipper "guess" the filename - String prefix = ""; - if (Utils.getConfigBoolean("download.save_order", true)) { - prefix = String.format("%03d_", index); - } - addURLToDownload(new URL(imgsrc), prefix); - } - } catch (IOException e) { - logger.error("[!] Exception while loading/parsing " + this.url, e); - } - } - - } } diff --git a/src/main/java/com/rarchives/ripme/ripper/rippers/PhotobucketRipper.java b/src/main/java/com/rarchives/ripme/ripper/rippers/PhotobucketRipper.java index fd0eebde..f5cf5cf7 100644 --- a/src/main/java/com/rarchives/ripme/ripper/rippers/PhotobucketRipper.java +++ b/src/main/java/com/rarchives/ripme/ripper/rippers/PhotobucketRipper.java @@ -54,7 +54,7 @@ public class PhotobucketRipper extends AlbumRipper { } Document albumDoc = pageResponse.parse(); Elements els = albumDoc.select("div.libraryTitle > h1"); - if (els.size() == 0) { + if (els.isEmpty()) { throw new IOException("Could not find libraryTitle at " + url); } return els.get(0).text(); @@ -92,7 +92,7 @@ public class PhotobucketRipper extends AlbumRipper { subsToRip.add(sub); } - while (subsToRip.size() > 0 && !isStopped()) { + while (!subsToRip.isEmpty() && !isStopped()) { try { Thread.sleep(1000); } catch (InterruptedException e) { diff --git a/src/main/java/com/rarchives/ripme/ripper/rippers/PichunterRipper.java b/src/main/java/com/rarchives/ripme/ripper/rippers/PichunterRipper.java index d9f63626..e6c5d110 100644 --- a/src/main/java/com/rarchives/ripme/ripper/rippers/PichunterRipper.java +++ b/src/main/java/com/rarchives/ripme/ripper/rippers/PichunterRipper.java @@ -60,10 +60,7 @@ public class PichunterRipper extends AbstractHTMLRipper { private boolean isPhotoSet(URL url) { Pattern p = Pattern.compile("https?://www.pichunter.com/gallery/\\d+/(\\S*)/?"); Matcher m = p.matcher(url.toExternalForm()); - if (m.matches()) { - return true; - } - return false; + return m.matches(); } @Override diff --git a/src/main/java/com/rarchives/ripme/ripper/rippers/PornhubRipper.java b/src/main/java/com/rarchives/ripme/ripper/rippers/PornhubRipper.java index 25f71d09..8b3b8d7d 100644 --- a/src/main/java/com/rarchives/ripme/ripper/rippers/PornhubRipper.java +++ b/src/main/java/com/rarchives/ripme/ripper/rippers/PornhubRipper.java @@ -91,7 +91,7 @@ public class PornhubRipper extends AlbumRipper { // Find thumbnails Elements thumbs = albumDoc.select(".photoBlockBox li"); - if (thumbs.size() == 0) { + if (thumbs.isEmpty()) { logger.debug("albumDoc: " + albumDoc); logger.debug("No images found at " + nextUrl); return; diff --git a/src/main/java/com/rarchives/ripme/ripper/rippers/Rule34Ripper.java b/src/main/java/com/rarchives/ripme/ripper/rippers/Rule34Ripper.java index b7e0f7b0..681738fa 100644 --- a/src/main/java/com/rarchives/ripme/ripper/rippers/Rule34Ripper.java +++ b/src/main/java/com/rarchives/ripme/ripper/rippers/Rule34Ripper.java @@ -37,10 +37,7 @@ public class Rule34Ripper extends AbstractHTMLRipper { public boolean canRip(URL url){ Pattern p = Pattern.compile("https?://rule34.xxx/index.php\\?page=post&s=list&tags=([\\S]+)"); Matcher m = p.matcher(url.toExternalForm()); - if (m.matches()) { - return true; - } - return false; + return m.matches(); } @Override diff --git a/src/main/java/com/rarchives/ripme/ripper/rippers/SankakuComplexRipper.java b/src/main/java/com/rarchives/ripme/ripper/rippers/SankakuComplexRipper.java index c6440bb8..d83d5930 100644 --- a/src/main/java/com/rarchives/ripme/ripper/rippers/SankakuComplexRipper.java +++ b/src/main/java/com/rarchives/ripme/ripper/rippers/SankakuComplexRipper.java @@ -43,7 +43,7 @@ public class SankakuComplexRipper extends AbstractHTMLRipper { Matcher m = p.matcher(url.toExternalForm()); if (m.matches()) { try { - return URLDecoder.decode(m.group(2), "UTF-8"); + return URLDecoder.decode(m.group(1) + "_" + m.group(2), "UTF-8"); } catch (UnsupportedEncodingException e) { throw new MalformedURLException("Cannot decode tag name '" + m.group(1) + "'"); } @@ -53,6 +53,20 @@ public class SankakuComplexRipper extends AbstractHTMLRipper { url + "instead"); } + public String getSubDomain(URL url){ + Pattern p = Pattern.compile("^https?://([a-zA-Z0-9]+\\.)?sankakucomplex\\.com/.*tags=([^&]+).*$"); + Matcher m = p.matcher(url.toExternalForm()); + if (m.matches()) { + try { + return URLDecoder.decode(m.group(1), "UTF-8"); + } catch (UnsupportedEncodingException e) { + return null; + } + } + return null; + + } + @Override public Document getFirstPage() throws IOException { if (albumDoc == null) { @@ -71,9 +85,11 @@ public class SankakuComplexRipper extends AbstractHTMLRipper { for (Element thumbSpan : doc.select("div.content > div > span.thumb > a")) { String postLink = thumbSpan.attr("href"); try { + String subDomain = getSubDomain(url); + String siteURL = "https://" + subDomain + "sankakucomplex.com"; // Get the page the full sized image is on - Document subPage = Http.url("https://chan.sankakucomplex.com" + postLink).get(); - logger.info("Checking page " + "https://chan.sankakucomplex.com" + postLink); + Document subPage = Http.url(siteURL + postLink).get(); + logger.info("Checking page " + siteURL + postLink); imageURLs.add("https:" + subPage.select("div[id=stats] > ul > li > a[id=highres]").attr("href")); } catch (IOException e) { logger.warn("Error while loading page " + postLink, e); diff --git a/src/main/java/com/rarchives/ripme/ripper/rippers/SinnercomicsRipper.java b/src/main/java/com/rarchives/ripme/ripper/rippers/SinnercomicsRipper.java index 326de1a1..166bce88 100644 --- a/src/main/java/com/rarchives/ripme/ripper/rippers/SinnercomicsRipper.java +++ b/src/main/java/com/rarchives/ripme/ripper/rippers/SinnercomicsRipper.java @@ -34,7 +34,7 @@ public class SinnercomicsRipper extends AbstractHTMLRipper { Pattern p = Pattern.compile("^https?://sinnercomics.com/comic/([a-zA-Z0-9-]*)/?$"); Matcher m = p.matcher(url.toExternalForm()); if (m.matches()) { - return m.group(1); + return m.group(1).replaceAll("-page-\\d+", ""); } throw new MalformedURLException("Expected sinnercomics.com URL format: " + "sinnercomics.com/comic/albumName - got " + url + " instead"); diff --git a/src/main/java/com/rarchives/ripme/ripper/rippers/TheyiffgalleryRipper.java b/src/main/java/com/rarchives/ripme/ripper/rippers/TheyiffgalleryRipper.java index 9bdbfec6..ac3e363c 100644 --- a/src/main/java/com/rarchives/ripme/ripper/rippers/TheyiffgalleryRipper.java +++ b/src/main/java/com/rarchives/ripme/ripper/rippers/TheyiffgalleryRipper.java @@ -59,8 +59,8 @@ public class TheyiffgalleryRipper extends AbstractHTMLRipper { @Override public List getURLsFromPage(Document doc) { List result = new ArrayList<>(); - for (Element el : doc.select("ul.thumbnails > li.gdthumb")) { - String imageSource = el.select("a > img").attr("src"); + for (Element el : doc.select("img.thumbnail")) { + String imageSource = el.attr("src"); imageSource = imageSource.replaceAll("_data/i", ""); imageSource = imageSource.replaceAll("-\\w\\w_\\w\\d+x\\d+", ""); result.add("https://theyiffgallery.com" + imageSource); diff --git a/src/main/java/com/rarchives/ripme/ripper/rippers/TumblrRipper.java b/src/main/java/com/rarchives/ripme/ripper/rippers/TumblrRipper.java index 89884854..41da5d13 100644 --- a/src/main/java/com/rarchives/ripme/ripper/rippers/TumblrRipper.java +++ b/src/main/java/com/rarchives/ripme/ripper/rippers/TumblrRipper.java @@ -23,8 +23,8 @@ import com.rarchives.ripme.utils.Utils; public class TumblrRipper extends AlbumRipper { private static final String DOMAIN = "tumblr.com", - HOST = "tumblr", - IMAGE_PATTERN = "([^\\s]+(\\.(?i)(jpg|png|gif|bmp))$)"; + HOST = "tumblr", + IMAGE_PATTERN = "([^\\s]+(\\.(?i)(jpg|png|gif|bmp))$)"; private enum ALBUM_TYPE { SUBDOMAIN, @@ -37,11 +37,8 @@ public class TumblrRipper extends AlbumRipper { private static final String TUMBLR_AUTH_CONFIG_KEY = "tumblr.auth"; private static boolean useDefaultApiKey = false; // fall-back for bad user-specified key - private static final List APIKEYS = Arrays.asList("JFNLu3CbINQjRdUvZibXW9VpSEVYYtiPJ86o8YmvgLZIoKyuNX", - "FQrwZMCxVnzonv90rgNUJcAk4FpnoS0mYuSuGYqIpM2cFgp9L4", - "qpdkY6nMknksfvYAhf2xIHp0iNRLkMlcWShxqzXyFJRxIsZ1Zz"); - private static int genNum = new Random().nextInt(APIKEYS.size()); - private static final String API_KEY = APIKEYS.get(genNum); // Select random API key from APIKEYS + private static String API_KEY = null; + /** * Gets the API key. @@ -49,6 +46,10 @@ public class TumblrRipper extends AlbumRipper { * @return Tumblr API key */ public static String getApiKey() { + if (API_KEY == null) { + API_KEY = pickRandomApiKey(); + } + if (useDefaultApiKey || Utils.getConfigString(TUMBLR_AUTH_CONFIG_KEY, "JFNLu3CbINQjRdUvZibXW9VpSEVYYtiPJ86o8YmvgLZIoKyuNX").equals("JFNLu3CbINQjRdUvZibXW9VpSEVYYtiPJ86o8YmvgLZIoKyuNX")) { logger.info("Using api key: " + API_KEY); return API_KEY; @@ -60,9 +61,19 @@ public class TumblrRipper extends AlbumRipper { } + private static String pickRandomApiKey() { + final List APIKEYS = Arrays.asList("JFNLu3CbINQjRdUvZibXW9VpSEVYYtiPJ86o8YmvgLZIoKyuNX", + "FQrwZMCxVnzonv90rgNUJcAk4FpnoS0mYuSuGYqIpM2cFgp9L4", + "qpdkY6nMknksfvYAhf2xIHp0iNRLkMlcWShxqzXyFJRxIsZ1Zz"); + int genNum = new Random().nextInt(APIKEYS.size()); + logger.info(genNum); + final String API_KEY = APIKEYS.get(genNum); // Select random API key from APIKEYS + return API_KEY; + } + public TumblrRipper(URL url) throws IOException { super(url); - if (API_KEY == null) { + if (getApiKey() == null) { throw new IOException("Could not find tumblr authentication key in configuration"); } } @@ -100,7 +111,7 @@ public class TumblrRipper extends AlbumRipper { checkURL += "/info?api_key=" + getApiKey(); try { JSONObject json = Http.url(checkURL) - .getJSON(); + .getJSON(); int status = json.getJSONObject("meta").getInt("status"); return status == 200; } catch (IOException e) { @@ -245,11 +256,11 @@ public class TumblrRipper extends AlbumRipper { } } else if (post.has("video_url")) { try { - fileURL = new URL(post.getString("video_url").replaceAll("http", "https")); + fileURL = new URL(post.getString("video_url").replaceAll("http:", "https:")); addURLToDownload(fileURL); } catch (Exception e) { - logger.error("[!] Error while parsing video in " + post, e); - return true; + logger.error("[!] Error while parsing video in " + post, e); + return true; } } if (albumType == ALBUM_TYPE.POST) { @@ -263,24 +274,24 @@ public class TumblrRipper extends AlbumRipper { StringBuilder sb = new StringBuilder(); if (albumType == ALBUM_TYPE.POST) { sb.append("http://api.tumblr.com/v2/blog/") - .append(subdomain) - .append("/posts?id=") - .append(postNumber) - .append("&api_key=") - .append(getApiKey()); + .append(subdomain) + .append("/posts?id=") + .append(postNumber) + .append("&api_key=") + .append(getApiKey()); return sb.toString(); } sb.append("http://api.tumblr.com/v2/blog/") - .append(subdomain) - .append("/posts/") - .append(mediaType) - .append("?api_key=") - .append(getApiKey()) - .append("&offset=") - .append(offset); + .append(subdomain) + .append("/posts/") + .append(mediaType) + .append("?api_key=") + .append(getApiKey()) + .append("&offset=") + .append(offset); if (albumType == ALBUM_TYPE.TAG) { - sb.append("&tag=") - .append(tagName); + sb.append("&tag=") + .append(tagName); } return sb.toString(); } diff --git a/src/main/java/com/rarchives/ripme/ripper/rippers/TwitterRipper.java b/src/main/java/com/rarchives/ripme/ripper/rippers/TwitterRipper.java index bd828f1b..60c7a6bb 100644 --- a/src/main/java/com/rarchives/ripme/ripper/rippers/TwitterRipper.java +++ b/src/main/java/com/rarchives/ripme/ripper/rippers/TwitterRipper.java @@ -228,7 +228,7 @@ public class TwitterRipper extends AlbumRipper { int parsedCount = 0; for (int i = 0; i < MAX_REQUESTS; i++) { List tweets = getTweets(getApiURL(lastMaxID - 1)); - if (tweets.size() == 0) { + if (tweets.isEmpty()) { logger.info(" No more tweets found."); break; } diff --git a/src/main/java/com/rarchives/ripme/ripper/rippers/TwodgalleriesRipper.java b/src/main/java/com/rarchives/ripme/ripper/rippers/TwodgalleriesRipper.java index 1202936d..d7cd8e10 100644 --- a/src/main/java/com/rarchives/ripme/ripper/rippers/TwodgalleriesRipper.java +++ b/src/main/java/com/rarchives/ripme/ripper/rippers/TwodgalleriesRipper.java @@ -82,7 +82,7 @@ public class TwodgalleriesRipper extends AbstractHTMLRipper { Document nextDoc = Http.url(url) .cookies(cookies) .get(); - if (nextDoc.select("div.hcaption > img").size() == 0) { + if (nextDoc.select("div.hcaption > img").isEmpty()) { throw new IOException("No more images to retrieve"); } return nextDoc; diff --git a/src/main/java/com/rarchives/ripme/ripper/rippers/WebtoonsRipper.java b/src/main/java/com/rarchives/ripme/ripper/rippers/WebtoonsRipper.java index de785a44..ded3ce2c 100644 --- a/src/main/java/com/rarchives/ripme/ripper/rippers/WebtoonsRipper.java +++ b/src/main/java/com/rarchives/ripme/ripper/rippers/WebtoonsRipper.java @@ -38,10 +38,7 @@ public class WebtoonsRipper extends AbstractHTMLRipper { public boolean canRip(URL url) { Pattern pat = Pattern.compile("https?://www.webtoons.com/[a-zA-Z-_]+/[a-zA-Z_-]+/([a-zA-Z0-9_-]*)/[a-zA-Z0-9_-]+/\\S*"); Matcher mat = pat.matcher(url.toExternalForm()); - if (mat.matches()) { - return true; - } - return false; + return mat.matches(); } diff --git a/src/main/java/com/rarchives/ripme/ripper/rippers/WordpressComicRipper.java b/src/main/java/com/rarchives/ripme/ripper/rippers/WordpressComicRipper.java index 8b7b7b80..bf46a40f 100644 --- a/src/main/java/com/rarchives/ripme/ripper/rippers/WordpressComicRipper.java +++ b/src/main/java/com/rarchives/ripme/ripper/rippers/WordpressComicRipper.java @@ -31,7 +31,6 @@ public class WordpressComicRipper extends AbstractHTMLRipper { // http://www.konradokonski.com/wiory/comic/08182008/ // http://freeadultcomix.com/finders-feepaid-in-full-sparrow/ // http://thisis.delvecomic.com/NewWP/comic/in-too-deep/ - // http://tnbtu.com/comic/01-00/ // http://shipinbottle.pepsaga.com/?p=281 private static List explicit_domains = Arrays.asList( @@ -43,7 +42,6 @@ public class WordpressComicRipper extends AbstractHTMLRipper { "www.konradokonski.com", "freeadultcomix.com", "thisis.delvecomic.com", - "tnbtu.com", "shipinbottle.pepsaga.com", "8muses.download", "spyingwithlana.com" @@ -56,7 +54,6 @@ public class WordpressComicRipper extends AbstractHTMLRipper { "prismblush.com", "www.konradokonski.com", "thisis.delvecomic.com", - "tnbtu.com", "spyingwithlana.com" ); @@ -137,12 +134,6 @@ public class WordpressComicRipper extends AbstractHTMLRipper { return true; } - Pattern tnbtuPat = Pattern.compile("https?://tnbtu.com/comic/([0-9_\\-]*)/?$"); - Matcher tnbtuMat = tnbtuPat.matcher(url.toExternalForm()); - if (tnbtuMat.matches()) { - return true; - } - Pattern shipinbottlePat = Pattern.compile("https?://shipinbottle.pepsaga.com/\\?p=([0-9]*)/?$"); Matcher shipinbottleMat =shipinbottlePat.matcher(url.toExternalForm()); if (shipinbottleMat.matches()) { @@ -160,12 +151,67 @@ public class WordpressComicRipper extends AbstractHTMLRipper { if (spyingwithlanaMat.matches()) { return true; } + + Pattern pa = Pattern.compile("^https?://8muses.download/\\?s=([a-zA-Z0-9-]*)"); + Matcher ma = pa.matcher(url.toExternalForm()); + if (ma.matches()) { + return true; + } + + Pattern pat = Pattern.compile("https?://8muses.download/page/\\d+/\\?s=([a-zA-Z0-9-]*)"); + Matcher mat = pat.matcher(url.toExternalForm()); + if (mat.matches()) { + return true; + } + + pat = Pattern.compile("https://8muses.download/category/([a-zA-Z0-9-]*)/?"); + mat = pat.matcher(url.toExternalForm()); + if (mat.matches()) { + return true; + } } return false; } + @Override + public boolean hasQueueSupport() { + return true; + } + + @Override + public boolean pageContainsAlbums(URL url) { + Pattern pa = Pattern.compile("^https?://8muses.download/\\?s=([a-zA-Z0-9-]*)"); + Matcher ma = pa.matcher(url.toExternalForm()); + if (ma.matches()) { + return true; + } + + Pattern pat = Pattern.compile("https?://8muses.download/page/\\d+/\\?s=([a-zA-Z0-9-]*)"); + Matcher mat = pat.matcher(url.toExternalForm()); + if (mat.matches()) { + return true; + } + + pat = Pattern.compile("https://8muses.download/category/([a-zA-Z0-9-]*)/?"); + mat = pat.matcher(url.toExternalForm()); + if (mat.matches()) { + return true; + } + + return false; + } + + @Override + public List getAlbumsToQueue(Document doc) { + List urlsToAddToQueue = new ArrayList<>(); + for (Element elem : doc.select("#post_masonry > article > div > figure > a")) { + urlsToAddToQueue.add(elem.attr("href")); + } + return urlsToAddToQueue; + } + @Override public String getAlbumTitle(URL url) throws MalformedURLException { Pattern totempole666Pat = Pattern.compile("(?:https?://)?(?:www\\.)?totempole666.com/comic/([a-zA-Z0-9_-]*)/?$"); @@ -222,12 +268,6 @@ public class WordpressComicRipper extends AbstractHTMLRipper { return getHost() + "_" + comicsxxxMat.group(1); } - Pattern tnbtuPat = Pattern.compile("https?://tnbtu.com/comic/([0-9_\\-]*)/?$"); - Matcher tnbtuMat = tnbtuPat.matcher(url.toExternalForm()); - if (tnbtuMat.matches()) { - return getHost() + "_" + "The_Night_Belongs_to_Us"; - } - Pattern shipinbottlePat = Pattern.compile("https?://shipinbottle.pepsaga.com/\\?p=([0-9]*)/?$"); Matcher shipinbottleMat =shipinbottlePat.matcher(url.toExternalForm()); if (shipinbottleMat.matches()) { @@ -358,13 +398,10 @@ public class WordpressComicRipper extends AbstractHTMLRipper { || getHost().contains("themonsterunderthebed.net")) { addURLToDownload(url, pageTitle + "_"); } - if (getHost().contains("tnbtu.com")) { - // We need to set the referrer header for tnbtu - addURLToDownload(url, getPrefix(index), "","http://www.tnbtu.com/comic", null); - } else { - // If we're ripping a site where we can't get the page number/title we just rip normally - addURLToDownload(url, getPrefix(index)); - } + + // If we're ripping a site where we can't get the page number/title we just rip normally + addURLToDownload(url, getPrefix(index)); + } diff --git a/src/main/java/com/rarchives/ripme/ripper/rippers/XhamsterRipper.java b/src/main/java/com/rarchives/ripme/ripper/rippers/XhamsterRipper.java index b92aa9dd..ce01f1cf 100644 --- a/src/main/java/com/rarchives/ripme/ripper/rippers/XhamsterRipper.java +++ b/src/main/java/com/rarchives/ripme/ripper/rippers/XhamsterRipper.java @@ -64,10 +64,7 @@ public class XhamsterRipper extends AbstractHTMLRipper { public boolean canRip(URL url) { Pattern p = Pattern.compile("^https?://[wmde.]*xhamster\\.com/photos/gallery/.*?(\\d+)$"); Matcher m = p.matcher(url.toExternalForm()); - if (m.matches()) { - return true; - } - return false; + return m.matches(); } @Override diff --git a/src/main/java/com/rarchives/ripme/ripper/rippers/video/GfycatRipper.java b/src/main/java/com/rarchives/ripme/ripper/rippers/video/GfycatRipper.java index 35f42564..ccaaa225 100644 --- a/src/main/java/com/rarchives/ripme/ripper/rippers/video/GfycatRipper.java +++ b/src/main/java/com/rarchives/ripme/ripper/rippers/video/GfycatRipper.java @@ -72,7 +72,7 @@ public class GfycatRipper extends VideoRipper { Document doc = Http.url(url).get(); Elements videos = doc.select("source#mp4Source"); - if (videos.size() == 0) { + if (videos.isEmpty()) { throw new IOException("Could not find source#mp4source at " + url); } String vidUrl = videos.first().attr("src"); diff --git a/src/main/java/com/rarchives/ripme/ripper/rippers/video/MotherlessVideoRipper.java b/src/main/java/com/rarchives/ripme/ripper/rippers/video/MotherlessVideoRipper.java index 2c076d00..9a2e47b1 100644 --- a/src/main/java/com/rarchives/ripme/ripper/rippers/video/MotherlessVideoRipper.java +++ b/src/main/java/com/rarchives/ripme/ripper/rippers/video/MotherlessVideoRipper.java @@ -58,7 +58,7 @@ public class MotherlessVideoRipper extends VideoRipper { logger.error("WTF"); } List vidUrls = Utils.between(html, "__fileurl = '", "';"); - if (vidUrls.size() == 0) { + if (vidUrls.isEmpty()) { throw new IOException("Could not find video URL at " + url); } String vidUrl = vidUrls.get(0); diff --git a/src/main/java/com/rarchives/ripme/ripper/rippers/video/TwitchVideoRipper.java b/src/main/java/com/rarchives/ripme/ripper/rippers/video/TwitchVideoRipper.java index 9cca50aa..8a483066 100644 --- a/src/main/java/com/rarchives/ripme/ripper/rippers/video/TwitchVideoRipper.java +++ b/src/main/java/com/rarchives/ripme/ripper/rippers/video/TwitchVideoRipper.java @@ -62,7 +62,7 @@ public class TwitchVideoRipper extends VideoRipper { String title = doc.title(); Elements script = doc.select("script"); - if (script.size() == 0) { + if (script.isEmpty()) { throw new IOException("Could not find script code at " + url); } //Regex assumes highest quality source is listed first diff --git a/src/main/java/com/rarchives/ripme/ripper/rippers/video/ViddmeRipper.java b/src/main/java/com/rarchives/ripme/ripper/rippers/video/ViddmeRipper.java index 6084607a..f36d7ce4 100644 --- a/src/main/java/com/rarchives/ripme/ripper/rippers/video/ViddmeRipper.java +++ b/src/main/java/com/rarchives/ripme/ripper/rippers/video/ViddmeRipper.java @@ -56,7 +56,7 @@ public class ViddmeRipper extends VideoRipper { logger.info(" Retrieving " + this.url.toExternalForm()); Document doc = Http.url(this.url).get(); Elements videos = doc.select("meta[name=twitter:player:stream]"); - if (videos.size() == 0) { + if (videos.isEmpty()) { throw new IOException("Could not find twitter:player:stream at " + url); } String vidUrl = videos.first().attr("content"); diff --git a/src/main/java/com/rarchives/ripme/ripper/rippers/video/VidearnRipper.java b/src/main/java/com/rarchives/ripme/ripper/rippers/video/VidearnRipper.java index 7f215770..bae7a965 100644 --- a/src/main/java/com/rarchives/ripme/ripper/rippers/video/VidearnRipper.java +++ b/src/main/java/com/rarchives/ripme/ripper/rippers/video/VidearnRipper.java @@ -57,7 +57,7 @@ public class VidearnRipper extends VideoRipper { logger.info("Retrieving " + this.url); Document doc = Http.url(url).get(); List mp4s = Utils.between(doc.html(), "file:\"", "\""); - if (mp4s.size() == 0) { + if (mp4s.isEmpty()) { throw new IOException("Could not find files at " + url); } String vidUrl = mp4s.get(0); diff --git a/src/main/java/com/rarchives/ripme/ripper/rippers/video/VineRipper.java b/src/main/java/com/rarchives/ripme/ripper/rippers/video/VineRipper.java index 09637b84..d2931b0b 100644 --- a/src/main/java/com/rarchives/ripme/ripper/rippers/video/VineRipper.java +++ b/src/main/java/com/rarchives/ripme/ripper/rippers/video/VineRipper.java @@ -57,7 +57,7 @@ public class VineRipper extends VideoRipper { logger.info(" Retrieving " + this.url.toExternalForm()); Document doc = Http.url(this.url).get(); Elements props = doc.select("meta[property=twitter:player:stream]"); - if (props.size() == 0) { + if (props.isEmpty()) { throw new IOException("Could not find meta property 'twitter:player:stream' at " + url); } String vidUrl = props.get(0).attr("content"); diff --git a/src/main/java/com/rarchives/ripme/ripper/rippers/video/XhamsterRipper.java b/src/main/java/com/rarchives/ripme/ripper/rippers/video/XhamsterRipper.java index b30f5a1a..09df1c8d 100644 --- a/src/main/java/com/rarchives/ripme/ripper/rippers/video/XhamsterRipper.java +++ b/src/main/java/com/rarchives/ripme/ripper/rippers/video/XhamsterRipper.java @@ -57,7 +57,7 @@ public class XhamsterRipper extends VideoRipper { logger.info("Retrieving " + this.url); Document doc = Http.url(url).get(); Elements videos = doc.select("div.player-container > a"); - if (videos.size() == 0) { + if (videos.isEmpty()) { throw new IOException("Could not find Embed code at " + url); } String vidUrl = videos.attr("href"); diff --git a/src/main/java/com/rarchives/ripme/ripper/rippers/video/YoupornRipper.java b/src/main/java/com/rarchives/ripme/ripper/rippers/video/YoupornRipper.java index a5e00b4b..0c87f175 100644 --- a/src/main/java/com/rarchives/ripme/ripper/rippers/video/YoupornRipper.java +++ b/src/main/java/com/rarchives/ripme/ripper/rippers/video/YoupornRipper.java @@ -57,7 +57,7 @@ public class YoupornRipper extends VideoRipper { logger.info(" Retrieving " + this.url); Document doc = Http.url(this.url).get(); Elements videos = doc.select("video"); - if (videos.size() == 0) { + if (videos.isEmpty()) { throw new IOException("Could not find Embed code at " + url); } Element video = videos.get(0); diff --git a/src/main/java/com/rarchives/ripme/ripper/rippers/video/YuvutuRipper.java b/src/main/java/com/rarchives/ripme/ripper/rippers/video/YuvutuRipper.java index f455abe2..34c10947 100644 --- a/src/main/java/com/rarchives/ripme/ripper/rippers/video/YuvutuRipper.java +++ b/src/main/java/com/rarchives/ripme/ripper/rippers/video/YuvutuRipper.java @@ -65,7 +65,7 @@ public class YuvutuRipper extends VideoRipper { throw new IOException("Could not find iframe code at " + url); } Elements script = doc.select("script"); - if (script.size() == 0) { + if (script.isEmpty()) { throw new IOException("Could not find script code at " + url); } Pattern p = Pattern.compile("file: \"(.*?)\""); diff --git a/src/main/java/com/rarchives/ripme/ui/MainWindow.java b/src/main/java/com/rarchives/ripme/ui/MainWindow.java index 95961f44..8d881fdf 100644 --- a/src/main/java/com/rarchives/ripme/ui/MainWindow.java +++ b/src/main/java/com/rarchives/ripme/ui/MainWindow.java @@ -115,7 +115,7 @@ public final class MainWindow implements Runnable, RipStatusHandler { private static JButton configSaveDirButton; private static JTextField configRetriesText; private static JCheckBox configAutoupdateCheckbox; - private static JComboBox configLogLevelCombobox; + private static JComboBox configLogLevelCombobox; private static JCheckBox configURLHistoryCheckbox; private static JCheckBox configPlaySound; private static JCheckBox configSaveOrderCheckbox; @@ -127,6 +127,10 @@ public final class MainWindow implements Runnable, RipStatusHandler { private static JCheckBox configSaveDescriptions; private static JCheckBox configPreferMp4; private static JCheckBox configWindowPosition; + private static JComboBox configSelectLangComboBox; + private static JLabel configThreadsLabel; + private static JLabel configTimeoutLabel; + private static JLabel configRetriesLabel; private static TrayIcon trayIcon; private static MenuItem trayMenuMain; @@ -136,11 +140,11 @@ public final class MainWindow implements Runnable, RipStatusHandler { private static AbstractRipper ripper; - private ResourceBundle rb = Utils.getResourceBundle(); + private ResourceBundle rb = Utils.getResourceBundle(null); private void updateQueueLabel() { if (queueListModel.size() > 0) { - optionQueue.setText( rb.getString("Queue") + " (" + queueListModel.size() + ")"); + optionQueue.setText(rb.getString("Queue") + " (" + queueListModel.size() + ")"); } else { optionQueue.setText(rb.getString("Queue")); } @@ -162,6 +166,11 @@ public final class MainWindow implements Runnable, RipStatusHandler { return checkbox; } + + public static void addUrlToQueue(String url) { + queueListModel.addElement(url); + } + public MainWindow() { mainFrame = new JFrame("RipMe v" + UpdateUtils.getThisJarVersion()); mainFrame.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE); @@ -189,7 +198,7 @@ public final class MainWindow implements Runnable, RipStatusHandler { if (!configurationPanel.isVisible()) { optionConfiguration.doClick(); } - Runnable r = () -> UpdateUtils.updateProgram(configUpdateLabel); + Runnable r = () -> UpdateUtils.updateProgramGUI(configUpdateLabel); new Thread(r).start(); } @@ -466,9 +475,9 @@ public final class MainWindow implements Runnable, RipStatusHandler { // TODO Configuration components configUpdateButton = new JButton(rb.getString("check.for.updates")); configUpdateLabel = new JLabel( rb.getString("current.version") + ": " + UpdateUtils.getThisJarVersion(), JLabel.RIGHT); - JLabel configThreadsLabel = new JLabel(rb.getString("max.download.threads") + ":", JLabel.RIGHT); - JLabel configTimeoutLabel = new JLabel(rb.getString("timeout.mill"), JLabel.RIGHT); - JLabel configRetriesLabel = new JLabel(rb.getString("retry.download.count"), JLabel.RIGHT); + configThreadsLabel = new JLabel(rb.getString("max.download.threads") + ":", JLabel.RIGHT); + configTimeoutLabel = new JLabel(rb.getString("timeout.mill"), JLabel.RIGHT); + configRetriesLabel = new JLabel(rb.getString("retry.download.count"), JLabel.RIGHT); configThreadsText = new JTextField(Integer.toString(Utils.getConfigInteger("threads.size", 3))); configTimeoutText = new JTextField(Integer.toString(Utils.getConfigInteger("download.timeout", 60000))); configRetriesText = new JTextField(Integer.toString(Utils.getConfigInteger("download.retries", 3))); @@ -486,7 +495,8 @@ public final class MainWindow implements Runnable, RipStatusHandler { configWindowPosition = addNewCheckbox(rb.getString("restore.window.position"), "window.position", true); configURLHistoryCheckbox = addNewCheckbox(rb.getString("remember.url.history"), "remember.url_history", true); - configLogLevelCombobox = new JComboBox(new String[] {"Log level: Error", "Log level: Warn", "Log level: Info", "Log level: Debug"}); + configLogLevelCombobox = new JComboBox<>(new String[] {"Log level: Error", "Log level: Warn", "Log level: Info", "Log level: Debug"}); + configSelectLangComboBox = new JComboBox<>(new String[] {"en_US", "de_DE", "es_ES", "fr_CH", "kr_KR", "pt_PT", "fi_FI", "in_ID", "porrisavvo_FI"}); configLogLevelCombobox.setSelectedItem(Utils.getConfigString("log.level", "Log level: Debug")); setLogLevel(configLogLevelCombobox.getSelectedItem().toString()); configSaveDirLabel = new JLabel(); @@ -499,30 +509,22 @@ public final class MainWindow implements Runnable, RipStatusHandler { configSaveDirLabel.setToolTipText(configSaveDirLabel.getText()); configSaveDirLabel.setHorizontalAlignment(JLabel.RIGHT); configSaveDirButton = new JButton("Select Save Directory..."); - gbc.gridy = 0; gbc.gridx = 0; configurationPanel.add(configUpdateLabel, gbc); - gbc.gridx = 1; configurationPanel.add(configUpdateButton, gbc); - gbc.gridy = 1; gbc.gridx = 0; configurationPanel.add(configAutoupdateCheckbox, gbc); - gbc.gridx = 1; configurationPanel.add(configLogLevelCombobox, gbc); - gbc.gridy = 2; gbc.gridx = 0; configurationPanel.add(configThreadsLabel, gbc); - gbc.gridx = 1; configurationPanel.add(configThreadsText, gbc); - gbc.gridy = 3; gbc.gridx = 0; configurationPanel.add(configTimeoutLabel, gbc); - gbc.gridx = 1; configurationPanel.add(configTimeoutText, gbc); - gbc.gridy = 4; gbc.gridx = 0; configurationPanel.add(configRetriesLabel, gbc); - gbc.gridx = 1; configurationPanel.add(configRetriesText, gbc); - gbc.gridy = 5; gbc.gridx = 0; configurationPanel.add(configOverwriteCheckbox, gbc); - gbc.gridx = 1; configurationPanel.add(configSaveOrderCheckbox, gbc); - gbc.gridy = 6; gbc.gridx = 0; configurationPanel.add(configPlaySound, gbc); - gbc.gridx = 1; configurationPanel.add(configSaveLogs, gbc); - gbc.gridy = 7; gbc.gridx = 0; configurationPanel.add(configShowPopup, gbc); - gbc.gridx = 1; configurationPanel.add(configSaveURLsOnly, gbc); - gbc.gridy = 8; gbc.gridx = 0; configurationPanel.add(configClipboardAutorip, gbc); - gbc.gridx = 1; configurationPanel.add(configSaveAlbumTitles, gbc); - gbc.gridy = 9; gbc.gridx = 0; configurationPanel.add(configSaveDescriptions, gbc); - gbc.gridx = 1; configurationPanel.add(configPreferMp4, gbc); - gbc.gridy = 10; gbc.gridx = 0; configurationPanel.add(configWindowPosition, gbc); - gbc.gridx = 1; configurationPanel.add(configURLHistoryCheckbox, gbc); - gbc.gridy = 11; gbc.gridx = 0; configurationPanel.add(configSaveDirLabel, gbc); - gbc.gridx = 1; configurationPanel.add(configSaveDirButton, gbc); + + addItemToConfigGridBagConstraints(gbc, 0, configUpdateLabel, configUpdateButton); + addItemToConfigGridBagConstraints(gbc, 1, configAutoupdateCheckbox, configLogLevelCombobox); + addItemToConfigGridBagConstraints(gbc, 2, configThreadsLabel, configThreadsText); + addItemToConfigGridBagConstraints(gbc, 3, configTimeoutLabel, configTimeoutText); + addItemToConfigGridBagConstraints(gbc, 4, configRetriesLabel, configRetriesText); + addItemToConfigGridBagConstraints(gbc, 5, configOverwriteCheckbox, configSaveOrderCheckbox); + addItemToConfigGridBagConstraints(gbc, 6, configPlaySound, configSaveLogs); + addItemToConfigGridBagConstraints(gbc, 7, configShowPopup, configSaveURLsOnly); + addItemToConfigGridBagConstraints(gbc, 8, configClipboardAutorip, configSaveAlbumTitles); + addItemToConfigGridBagConstraints(gbc, 9, configSaveDescriptions, configPreferMp4); + addItemToConfigGridBagConstraints(gbc, 10, configWindowPosition, configURLHistoryCheckbox); + addItemToConfigGridBagConstraints(gbc, 11, configSelectLangComboBox); + addItemToConfigGridBagConstraints(gbc, 12, configSaveDirLabel, configSaveDirButton); + + emptyPanel = new JPanel(); @@ -545,6 +547,56 @@ public final class MainWindow implements Runnable, RipStatusHandler { gbc.fill = GridBagConstraints.HORIZONTAL; } + private void addItemToConfigGridBagConstraints(GridBagConstraints gbc, int gbcYValue, JLabel thing1ToAdd, JButton thing2ToAdd ) { + gbc.gridy = gbcYValue; gbc.gridx = 0; configurationPanel.add(thing1ToAdd, gbc); + gbc.gridx = 1; configurationPanel.add(thing2ToAdd, gbc); + } + + private void addItemToConfigGridBagConstraints(GridBagConstraints gbc, int gbcYValue, JLabel thing1ToAdd, JTextField thing2ToAdd ) { + gbc.gridy = gbcYValue; gbc.gridx = 0; configurationPanel.add(thing1ToAdd, gbc); + gbc.gridx = 1; configurationPanel.add(thing2ToAdd, gbc); + } + + private void addItemToConfigGridBagConstraints(GridBagConstraints gbc, int gbcYValue, JCheckBox thing1ToAdd, JCheckBox thing2ToAdd ) { + gbc.gridy = gbcYValue; gbc.gridx = 0; configurationPanel.add(thing1ToAdd, gbc); + gbc.gridx = 1; configurationPanel.add(thing2ToAdd, gbc); + } + + private void addItemToConfigGridBagConstraints(GridBagConstraints gbc, int gbcYValue, JCheckBox thing1ToAdd, JComboBox thing2ToAdd ) { + gbc.gridy = gbcYValue; gbc.gridx = 0; configurationPanel.add(thing1ToAdd, gbc); + gbc.gridx = 1; configurationPanel.add(thing2ToAdd, gbc); + } + + private void addItemToConfigGridBagConstraints(GridBagConstraints gbc, int gbcYValue, JComboBox thing1ToAdd ) { + gbc.gridy = gbcYValue; gbc.gridx = 0; configurationPanel.add(thing1ToAdd, gbc); + } + + private void changeLocale() { + statusLabel.setText(rb.getString("inactive")); + configUpdateButton.setText(rb.getString("check.for.updates")); + configUpdateLabel.setText(rb.getString("current.version") + ": " + UpdateUtils.getThisJarVersion()); + configThreadsLabel.setText(rb.getString("max.download.threads")); + configTimeoutLabel.setText(rb.getString("timeout.mill")); + configRetriesLabel.setText(rb.getString("retry.download.count")); + configOverwriteCheckbox.setText(rb.getString("overwrite.existing.files")); + configAutoupdateCheckbox.setText(rb.getString("auto.update")); + configPlaySound.setText(rb.getString("sound.when.rip.completes")); + configShowPopup.setText(rb.getString("notification.when.rip.starts")); + configSaveOrderCheckbox.setText(rb.getString("preserve.order")); + configSaveLogs.setText(rb.getString("save.logs")); + configSaveURLsOnly.setText(rb.getString("save.urls.only")); + configSaveAlbumTitles.setText(rb.getString("save.album.titles")); + configClipboardAutorip.setText(rb.getString("autorip.from.clipboard")); + configSaveDescriptions.setText(rb.getString("save.descriptions")); + configPreferMp4.setText(rb.getString("prefer.mp4.over.gif")); + configWindowPosition.setText(rb.getString("restore.window.position")); + configURLHistoryCheckbox.setText(rb.getString("remember.url.history")); + optionLog.setText(rb.getString("Log")); + optionHistory.setText(rb.getString("History")); + optionQueue.setText(rb.getString("Queue")); + optionConfiguration.setText(rb.getString("Configuration")); + } + private void setupHandlers() { ripButton.addActionListener(new RipButtonHandler()); ripTextfield.addActionListener(new RipButtonHandler()); @@ -734,13 +786,18 @@ public final class MainWindow implements Runnable, RipStatusHandler { } }); configUpdateButton.addActionListener(arg0 -> { - Thread t = new Thread(() -> UpdateUtils.updateProgram(configUpdateLabel)); + Thread t = new Thread(() -> UpdateUtils.updateProgramGUI(configUpdateLabel)); t.start(); }); configLogLevelCombobox.addActionListener(arg0 -> { String level = ((JComboBox) arg0.getSource()).getSelectedItem().toString(); setLogLevel(level); }); + configSelectLangComboBox.addActionListener(arg0 -> { + String level = ((JComboBox) arg0.getSource()).getSelectedItem().toString(); + rb = Utils.getResourceBundle(level); + changeLocale(); + }); configSaveDirLabel.addMouseListener(new MouseAdapter() { @Override public void mouseClicked(MouseEvent e) { @@ -892,7 +949,7 @@ public final class MainWindow implements Runnable, RipStatusHandler { new ImageIcon(mainIcon)); if (response == JOptionPane.YES_OPTION) { try { - Desktop.getDesktop().browse(URI.create("http://github.com/4pr0n/ripme")); + Desktop.getDesktop().browse(URI.create("http://github.com/ripmeapp/ripme")); } catch (IOException e) { logger.error("Exception while opening project home page", e); } @@ -982,7 +1039,7 @@ public final class MainWindow implements Runnable, RipStatusHandler { } else { logger.info(rb.getString("loading.history.from.configuration")); HISTORY.fromList(Utils.getConfigList("download.history")); - if (HISTORY.toList().size() == 0) { + if (HISTORY.toList().isEmpty()) { // Loaded from config, still no entries. // Guess rip history based on rip folder String[] dirs = Utils.getWorkingDirectory().list((dir, file) -> new File(dir.getAbsolutePath() + File.separator + file).isDirectory()); diff --git a/src/main/java/com/rarchives/ripme/ui/UpdateUtils.java b/src/main/java/com/rarchives/ripme/ui/UpdateUtils.java index 38e88ae7..95af45c6 100644 --- a/src/main/java/com/rarchives/ripme/ui/UpdateUtils.java +++ b/src/main/java/com/rarchives/ripme/ui/UpdateUtils.java @@ -1,13 +1,12 @@ package com.rarchives.ripme.ui; -import java.io.BufferedWriter; -import java.io.File; -import java.io.FileOutputStream; -import java.io.FileWriter; -import java.io.IOException; +import java.io.*; +import java.security.MessageDigest; +import java.security.NoSuchAlgorithmException; import javax.swing.JLabel; import javax.swing.JOptionPane; +import javax.xml.bind.annotation.adapters.HexBinaryAdapter; import org.apache.log4j.Logger; import org.json.JSONArray; @@ -21,11 +20,12 @@ import com.rarchives.ripme.utils.Utils; public class UpdateUtils { private static final Logger logger = Logger.getLogger(UpdateUtils.class); - private static final String DEFAULT_VERSION = "1.7.45"; + private static final String DEFAULT_VERSION = "1.7.50"; private static final String REPO_NAME = "ripmeapp/ripme"; private static final String updateJsonURL = "https://raw.githubusercontent.com/" + REPO_NAME + "/master/ripme.json"; private static final String mainFileName = "ripme.jar"; private static final String updateFileName = "ripme.jar.update"; + private static JSONObject ripmeJson; private static String getUpdateJarURL(String latestVersion) { return "https://github.com/" + REPO_NAME + "/releases/download/" + latestVersion + "/ripme.jar"; @@ -39,8 +39,57 @@ public class UpdateUtils { } return thisVersion; } + public static void updateProgramCLI() { + logger.info("Checking for update..."); - public static void updateProgram(JLabel configUpdateLabel) { + Document doc = null; + try { + logger.debug("Retrieving " + UpdateUtils.updateJsonURL); + doc = Jsoup.connect(UpdateUtils.updateJsonURL) + .timeout(10 * 1000) + .ignoreContentType(true) + .get(); + } catch (IOException e) { + logger.error("Error while fetching update: ", e); + JOptionPane.showMessageDialog(null, + "Error while fetching update: " + e.getMessage() + "", + "RipMe Updater", + JOptionPane.ERROR_MESSAGE); + return; + } finally { + logger.info("Current version: " + getThisJarVersion()); + } + String jsonString = doc.body().html().replaceAll(""", "\""); + ripmeJson = new JSONObject(jsonString); + JSONArray jsonChangeList = ripmeJson.getJSONArray("changeList"); + StringBuilder changeList = new StringBuilder(); + for (int i = 0; i < jsonChangeList.length(); i++) { + String change = jsonChangeList.getString(i); + if (change.startsWith(UpdateUtils.getThisJarVersion() + ":")) { + break; + } + changeList.append("
+ ").append(change); + } + + String latestVersion = ripmeJson.getString("latestVersion"); + if (UpdateUtils.isNewerVersion(latestVersion)) { + logger.info("Found newer version: " + latestVersion); + logger.info("Downloading new version..."); + logger.info("New version found, downloading..."); + try { + UpdateUtils.downloadJarAndLaunch(getUpdateJarURL(latestVersion), false); + } catch (IOException e) { + logger.error("Error while updating: ", e); + } + } else { + logger.debug("This version (" + UpdateUtils.getThisJarVersion() + + ") is the same or newer than the website's version (" + latestVersion + ")"); + logger.info("v" + UpdateUtils.getThisJarVersion() + " is the latest version"); + logger.debug("Running latest version: " + UpdateUtils.getThisJarVersion()); + } + } + + public static void updateProgramGUI(JLabel configUpdateLabel) { configUpdateLabel.setText("Checking for update..."); Document doc = null; @@ -61,8 +110,8 @@ public class UpdateUtils { configUpdateLabel.setText("Current version: " + getThisJarVersion()); } String jsonString = doc.body().html().replaceAll(""", "\""); - JSONObject json = new JSONObject(jsonString); - JSONArray jsonChangeList = json.getJSONArray("changeList"); + ripmeJson = new JSONObject(jsonString); + JSONArray jsonChangeList = ripmeJson.getJSONArray("changeList"); StringBuilder changeList = new StringBuilder(); for (int i = 0; i < jsonChangeList.length(); i++) { String change = jsonChangeList.getString(i); @@ -72,7 +121,7 @@ public class UpdateUtils { changeList.append("
+ ").append(change); } - String latestVersion = json.getString("latestVersion"); + String latestVersion = ripmeJson.getString("latestVersion"); if (UpdateUtils.isNewerVersion(latestVersion)) { logger.info("Found newer version: " + latestVersion); int result = JOptionPane.showConfirmDialog( @@ -90,7 +139,7 @@ public class UpdateUtils { configUpdateLabel.setText("Downloading new version..."); logger.info("New version found, downloading..."); try { - UpdateUtils.downloadJarAndLaunch(getUpdateJarURL(latestVersion)); + UpdateUtils.downloadJarAndLaunch(getUpdateJarURL(latestVersion), true); } catch (IOException e) { JOptionPane.showMessageDialog(null, "Error while updating: " + e.getMessage(), @@ -141,7 +190,32 @@ public class UpdateUtils { return intVersions; } - private static void downloadJarAndLaunch(String updateJarURL) + // Code take from https://stackoverflow.com/a/30925550 + private static String createSha256(File file) { + try { + MessageDigest digest = MessageDigest.getInstance("SHA-256"); + InputStream fis = new FileInputStream(file); + int n = 0; + byte[] buffer = new byte[8192]; + while (n != -1) { + n = fis.read(buffer); + if (n > 0) { + digest.update(buffer, 0, n); + } + } + // As patch.py writes the hash in lowercase this must return the has in lowercase + return new HexBinaryAdapter().marshal(digest.digest()).toLowerCase(); + } catch (NoSuchAlgorithmException e) { + logger.error("Got error getting file hash " + e.getMessage()); + } catch (FileNotFoundException e) { + logger.error("Could not find file: " + file.getName()); + } catch (IOException e) { + logger.error("Got error getting file hash " + e.getMessage()); + } + return null; + } + + private static void downloadJarAndLaunch(String updateJarURL, Boolean shouldLaunch) throws IOException { Response response; response = Jsoup.connect(updateJarURL) @@ -149,60 +223,77 @@ public class UpdateUtils { .timeout(Utils.getConfigInteger("download.timeout", 60 * 1000)) .maxBodySize(1024 * 1024 * 100) .execute(); - FileOutputStream out = new FileOutputStream(updateFileName); - out.write(response.bodyAsBytes()); - out.close(); + + try (FileOutputStream out = new FileOutputStream(updateFileName)) { + out.write(response.bodyAsBytes()); + } + String updateHash = createSha256(new File(updateFileName)); logger.info("Download of new version complete; saved to " + updateFileName); + logger.info("Checking hash of update"); - // Setup updater script - final String batchFile, script; - final String[] batchExec; - String os = System.getProperty("os.name").toLowerCase(); - if (os.contains("win")) { - // Windows - batchFile = "update_ripme.bat"; - String batchPath = new File(batchFile).getAbsolutePath(); - script = "@echo off\r\n" - + "timeout 1" + "\r\n" - + "copy " + updateFileName + " " + mainFileName + "\r\n" - + "del " + updateFileName + "\r\n" - + "ripme.jar" + "\r\n" - + "del " + batchPath + "\r\n"; - batchExec = new String[] { batchPath }; + if (!ripmeJson.getString("currentHash").equals(updateHash)) { + logger.error("Error: Update has bad hash"); + logger.debug("Expected hash: " + ripmeJson.getString("currentHash")); + logger.debug("Actual hash: " + updateHash); + throw new IOException("Got bad file hash"); + } else { + logger.info("Hash is good"); + } + if (shouldLaunch) { + // Setup updater script + final String batchFile, script; + final String[] batchExec; + String os = System.getProperty("os.name").toLowerCase(); + if (os.contains("win")) { + // Windows + batchFile = "update_ripme.bat"; + String batchPath = new File(batchFile).getAbsolutePath(); + script = "@echo off\r\n" + + "timeout 1" + "\r\n" + + "copy " + updateFileName + " " + mainFileName + "\r\n" + + "del " + updateFileName + "\r\n" + + "ripme.jar" + "\r\n" + + "del " + batchPath + "\r\n"; + batchExec = new String[]{batchPath}; - } - else { - // Mac / Linux - batchFile = "update_ripme.sh"; - String batchPath = new File(batchFile).getAbsolutePath(); - script = "#!/bin/sh\n" - + "sleep 1" + "\n" - + "cd " + new File(mainFileName).getAbsoluteFile().getParent() + "\n" - + "cp -f " + updateFileName + " " + mainFileName + "\n" - + "rm -f " + updateFileName + "\n" - + "java -jar \"" + new File(mainFileName).getAbsolutePath() + "\" &\n" - + "sleep 1" + "\n" - + "rm -f " + batchPath + "\n"; - batchExec = new String[] { "sh", batchPath }; - } - // Create updater script - BufferedWriter bw = new BufferedWriter(new FileWriter(batchFile)); - bw.write(script); - bw.flush(); - bw.close(); - logger.info("Saved update script to " + batchFile); - // Run updater script on exit - Runtime.getRuntime().addShutdownHook(new Thread(() -> { - try { - logger.info("Executing: " + batchFile); - Runtime.getRuntime().exec(batchExec); - } catch (IOException e) { - //TODO implement proper stack trace handling this is really just intented as a placeholder until you implement proper error handling - e.printStackTrace(); + } else { + // Mac / Linux + batchFile = "update_ripme.sh"; + String batchPath = new File(batchFile).getAbsolutePath(); + script = "#!/bin/sh\n" + + "sleep 1" + "\n" + + "cd " + new File(mainFileName).getAbsoluteFile().getParent() + "\n" + + "cp -f " + updateFileName + " " + mainFileName + "\n" + + "rm -f " + updateFileName + "\n" + + "java -jar \"" + new File(mainFileName).getAbsolutePath() + "\" &\n" + + "sleep 1" + "\n" + + "rm -f " + batchPath + "\n"; + batchExec = new String[]{"sh", batchPath}; } - })); - logger.info("Exiting older version, should execute update script (" + batchFile + ") during exit"); - System.exit(0); + + // Create updater script + try (BufferedWriter bw = new BufferedWriter(new FileWriter(batchFile))) { + bw.write(script); + bw.flush(); + } + + logger.info("Saved update script to " + batchFile); + // Run updater script on exit + Runtime.getRuntime().addShutdownHook(new Thread(() -> { + try { + logger.info("Executing: " + batchFile); + Runtime.getRuntime().exec(batchExec); + } catch (IOException e) { + //TODO implement proper stack trace handling this is really just intented as a placeholder until you implement proper error handling + e.printStackTrace(); + } + })); + logger.info("Exiting older version, should execute update script (" + batchFile + ") during exit"); + System.exit(0); + } else { + new File(mainFileName).delete(); + new File(updateFileName).renameTo(new File(mainFileName)); + } } } diff --git a/src/main/java/com/rarchives/ripme/utils/Utils.java b/src/main/java/com/rarchives/ripme/utils/Utils.java index cd1048df..e773a305 100644 --- a/src/main/java/com/rarchives/ripme/utils/Utils.java +++ b/src/main/java/com/rarchives/ripme/utils/Utils.java @@ -89,9 +89,25 @@ public class Utils { return workingDir; } + /** + * Gets the value of a specific config key. + * + * @param key The name of the config parameter you want to find. + * @param defaultValue What the default value would be. + */ public static String getConfigString(String key, String defaultValue) { return config.getString(key, defaultValue); } + + public static String[] getConfigStringArray(String key) { + String[] s = config.getStringArray(key); + if (s.length == 0) { + return null; + } else { + return s; + } + } + public static int getConfigInteger(String key, int defaultValue) { return config.getInt(key, defaultValue); } @@ -132,31 +148,53 @@ public class Utils { } } + /** + * Determines if your current system is a Windows system. + */ private static boolean isWindows() { return OS.contains("win"); } + /** + * Determines if your current system is a Mac system + */ private static boolean isMacOS() { return OS.contains("mac"); } + /** + * Determines if current system is based on UNIX + */ private static boolean isUnix() { return OS.contains("nix") || OS.contains("nux") || OS.contains("bsd"); } + /** + * Gets the directory of where the config file is stored on a Windows machine. + */ private static String getWindowsConfigDir() { return System.getenv("LOCALAPPDATA") + File.separator + "ripme"; } + + /** + * Gets the directory of where the config file is stored on a UNIX machine. + */ private static String getUnixConfigDir() { return System.getProperty("user.home") + File.separator + ".config" + File.separator + "ripme"; } - + + /** + * Gets the directory of where the config file is stored on a Mac machine. + */ private static String getMacOSConfigDir() { return System.getProperty("user.home") + File.separator + "Library" + File.separator + "Application Support" + File.separator + "ripme"; } + /** + * Determines if the app is running in a portable mode. i.e. on a USB stick + */ private static boolean portableMode() { try { File f = new File(new File(".").getCanonicalPath() + File.separator + configFile); @@ -169,7 +207,9 @@ public class Utils { return false; } - + /** + * Gets the directory of the config directory, for all systems. + */ public static String getConfigDir() { if (portableMode()) { try { @@ -189,17 +229,24 @@ public class Utils { return "."; } } - // Delete the url history file + /** + * Delete the url history file + */ public static void clearURLHistory() { File file = new File(getURLHistoryFile()); file.delete(); } - // Return the path of the url history file + /** + * Return the path of the url history file + */ public static String getURLHistoryFile() { return getConfigDir() + File.separator + "url_history.txt"; } + /** + * Gets the path to the configuration file. + */ private static String getConfigFilePath() { return getConfigDir() + File.separator + configFile; } @@ -225,6 +272,15 @@ public class Utils { return prettySaveAs; } + /** + * Strips away URL parameters, which usually appear at the end of URLs. + * E.g. the ?query on PHP + * + * @param url The URL to filter/strip + * @param parameter The parameter to strip + * + * @return The stripped URL + */ public static String stripURLParameter(String url, String parameter) { int paramIndex = url.indexOf("?" + parameter); boolean wasFirstParam = true; @@ -252,6 +308,7 @@ public class Utils { /** * Removes the current working directory from a given filename * @param file + * Path to the file * @return * 'file' without the leading current working directory */ @@ -335,9 +392,24 @@ public class Utils { } private static final int SHORTENED_PATH_LENGTH = 12; + /** + * Shortens the path to a file + * @param path + * String of the path to the file + * @return + * The simplified path to the file. + */ public static String shortenPath(String path) { return shortenPath(new File(path)); } + + /** + * Shortens the path to a file + * @param file + * File object that you want the shortened path of. + * @return + * The simplified path to the file. + */ public static String shortenPath(File file) { String path = removeCWD(file); if (path.length() < SHORTENED_PATH_LENGTH * 2) { @@ -348,6 +420,13 @@ public class Utils { + path.substring(path.length() - SHORTENED_PATH_LENGTH); } + /** + * Sanitizes a string so that a filesystem can handle it + * @param text + * The text to be sanitized. + * @return + * The sanitized text. + */ public static String filesystemSanitized(String text) { text = text.replaceAll("[^a-zA-Z0-9.-]", "_"); return text; @@ -397,6 +476,13 @@ public class Utils { return original; } + /** + * Converts an integer into a human readable string + * @param bytes + * Non-human readable integer. + * @return + * Human readable interpretation of a byte. + */ public static String bytesToHumanReadable(int bytes) { float fbytes = (float) bytes; String[] mags = new String[] {"", "K", "M", "G", "T"}; @@ -408,6 +494,10 @@ public class Utils { return String.format("%.2f%siB", fbytes, mags[magIndex]); } + /** + * Gets and returns a list of all the album rippers present in the "com.rarchives.ripme.ripper.rippers" package. + * @return List of all album rippers present. + */ public static List getListOfAlbumRippers() throws Exception { List list = new ArrayList<>(); for (Constructor ripper : AbstractRipper.getRipperConstructors("com.rarchives.ripme.ripper.rippers")) { @@ -415,6 +505,11 @@ public class Utils { } return list; } + + /** + * Gets and returns a list of all video rippers present in the "com.rarchives.rime.rippers.video" package + * @return List of all the video rippers. + */ public static List getListOfVideoRippers() throws Exception { List list = new ArrayList<>(); for (Constructor ripper : AbstractRipper.getRipperConstructors("com.rarchives.ripme.ripper.rippers.video")) { @@ -423,6 +518,11 @@ public class Utils { return list; } + /** + * Plays a sound from a file. + * @param filename + * Path to the sound file + */ public static void playSound(String filename) { URL resource = ClassLoader.getSystemClassLoader().getResource(filename); try { @@ -560,6 +660,9 @@ public class Utils { cookieCache = new HashMap>(); } + /** + * Gets all the cookies from a certain host + */ public static Map getCookies(String host) { HashMap domainCookies = cookieCache.get(host); if (domainCookies == null) { @@ -577,18 +680,30 @@ public class Utils { return domainCookies; } - public static ResourceBundle getResourceBundle() { - if (!getConfigString("lang", "").equals("")) { - String[] langCode = getConfigString("lang", "").split("_"); - logger.info("Setting locale to " + getConfigString("lang", "")); + /** + * Gets the ResourceBundle AKA language package. + * Used for choosing the language of the UI. + * + * @return Returns the default resource bundle using the language specified in the config file. + */ + public static ResourceBundle getResourceBundle(String langSelect) { + if (langSelect == null) { + if (!getConfigString("lang", "").equals("")) { + String[] langCode = getConfigString("lang", "").split("_"); + logger.info("Setting locale to " + getConfigString("lang", "")); + return ResourceBundle.getBundle("LabelsBundle", new Locale(langCode[0], langCode[1]), new UTF8Control()); + } + } else { + String[] langCode = langSelect.split("_"); + logger.info("Setting locale to " + langSelect); return ResourceBundle.getBundle("LabelsBundle", new Locale(langCode[0], langCode[1]), new UTF8Control()); } try { - ResourceBundle rb = ResourceBundle.getBundle("LabelsBundle", Locale.getDefault(), new UTF8Control()); - return rb; + logger.info("Setting locale to default"); + return ResourceBundle.getBundle("LabelsBundle", Locale.getDefault(), new UTF8Control()); } catch (MissingResourceException e) { - ResourceBundle rb = ResourceBundle.getBundle("LabelsBundle", Locale.ROOT); - return rb; + logger.info("Setting locale to root"); + return ResourceBundle.getBundle("LabelsBundle", Locale.ROOT); } } } diff --git a/src/main/resources/LabelsBundle.properties b/src/main/resources/LabelsBundle.properties index 3a42ab0c..f991d02f 100644 --- a/src/main/resources/LabelsBundle.properties +++ b/src/main/resources/LabelsBundle.properties @@ -10,7 +10,7 @@ Configuration = Configuration current.version = Current version check.for.updates = Check for updates auto.update = Auto-update? -max.download.threads = Maximum download threads +max.download.threads = Maximum download threads: timeout.mill = Timeout (in milliseconds): retry.download.count = Retry download count overwrite.existing.files = Overwrite existing files? diff --git a/src/main/resources/LabelsBundle_en_US.properties b/src/main/resources/LabelsBundle_en_US.properties new file mode 100644 index 00000000..f991d02f --- /dev/null +++ b/src/main/resources/LabelsBundle_en_US.properties @@ -0,0 +1,37 @@ +Log = Log +History = History +created = created +modified = modified +Queue = Queue +Configuration = Configuration + +# Keys for the Configuration menu + +current.version = Current version +check.for.updates = Check for updates +auto.update = Auto-update? +max.download.threads = Maximum download threads: +timeout.mill = Timeout (in milliseconds): +retry.download.count = Retry download count +overwrite.existing.files = Overwrite existing files? +sound.when.rip.completes = Sound when rip completes +preserve.order = Preserve order +save.logs = Save logs +notification.when.rip.starts = Notification when rip starts +save.urls.only = Save URLs only +save.album.titles = Save album titles +autorip.from.clipboard = Autorip from Clipboard +save.descriptions = Save descriptions +prefer.mp4.over.gif = Prefer MP4 over GIF +restore.window.position = Restore window position +remember.url.history = Remember URL history +loading.history.from = Loading history from + +# Misc UI keys + +loading.history.from.configuration = Loading history from configuration +interrupted.while.waiting.to.rip.next.album = Interrupted while waiting to rip next album +inactive = Inactive +re-rip.checked = Re-rip Checked +remove = Remove +clear = Clear \ No newline at end of file diff --git a/src/main/resources/LabelsBundle_fi_FI.properties b/src/main/resources/LabelsBundle_fi_FI.properties new file mode 100644 index 00000000..89818706 --- /dev/null +++ b/src/main/resources/LabelsBundle_fi_FI.properties @@ -0,0 +1,37 @@ +Log = Logi +History = Historia +created = luotu +modified = muokattu +Queue = Jono +Configuration = Asetukset + +# Keys for the Configuration menu + +current.version = Nykyinen versio +check.for.updates = Tarkista päivitykset +auto.update = Automaattipäivitys? +max.download.threads = Yhtäaikaiset lataukset +timeout.mill = Aikakatkaisu (millisekunneissa): +retry.download.count = Latauksen uudelleenyritykset +overwrite.existing.files = Korvaa nykyiset tiedostot? +sound.when.rip.completes = Valmistumisääni +preserve.order = Pidä järjestys +save.logs = Tallenna logit +notification.when.rip.starts = Valmistumisilmoitus +save.urls.only = Tallenna vain osoitteet +save.album.titles = Tallenna albumiotsikot +autorip.from.clipboard = Ota linkit leikepöydältä +save.descriptions = Tallenna kuvaukset +prefer.mp4.over.gif = Suosi MP4:jää GIF:fin sijasta +restore.window.position = Palauta ikkunan sijainti +remember.url.history = Muista osoitehistoria +loading.history.from = Ladataan historiaa kohteesta + +# Misc UI keys + +loading.history.from.configuration = Ladataan historiaa asetustiedostosta +interrupted.while.waiting.to.rip.next.album = Keskeytetty odottaessa seuraavaa albumia +inactive = Toimeton +re-rip.checked = Uudelleenlataa merkatut +remove = Poista +clear = Tyhjennä diff --git a/src/main/resources/LabelsBundle_in_ID.properties b/src/main/resources/LabelsBundle_in_ID.properties new file mode 100644 index 00000000..6abc7626 --- /dev/null +++ b/src/main/resources/LabelsBundle_in_ID.properties @@ -0,0 +1,37 @@ +Log = Log +History = Riwayat +created = dibuat pada +modified = diubah pada +Queue = Antrian +Configuration = Pengaturan + +# Keys for the Configuration menu + +current.version = Versi terbaru +check.for.updates = Periksa update +auto.update = Update otomatis? +max.download.threads = Thread unduh maksimal +timeout.mill = Batas waktu (dalam milidetik): +retry.download.count = Jumlah percobaan unduh +overwrite.existing.files = Timpa file yang ada? +sound.when.rip.completes = Hidupkan suara saat rip selesai +preserve.order = Pertahankan urutan +save.logs = Simpan log +notification.when.rip.starts = Pemberitahuan saat rip dimulai +save.urls.only = Simpan URL saja +save.album.titles = Simpan judul album +autorip.from.clipboard = Rip otomatis dari clipboard +save.descriptions = Simpan deskripsi +prefer.mp4.over.gif = Utamakan MP4 dari GIF +restore.window.position = Kembalikan ukuran Window +remember.url.history = Ingat riwayat URL +loading.history.from = Ambil riwayat dari + +# Misc UI keys + +loading.history.from.configuration = Ambil riwayat dari pengaturan +interrupted.while.waiting.to.rip.next.album = Terputus saat menunggu rip album selanjutnya +inactive = Tidak aktif +re-rip.checked = Rip Ulang +remove = Hapus +clear = Hapus Semua diff --git a/src/main/resources/LabelsBundle_kr_KR.properties b/src/main/resources/LabelsBundle_kr_KR.properties new file mode 100644 index 00000000..03f23e8a --- /dev/null +++ b/src/main/resources/LabelsBundle_kr_KR.properties @@ -0,0 +1,37 @@ +Log = \uB85C\uADF8 +History = \uD788\uC2A4\uD1A0\uB9AC +created = \uC0DD\uC0B0\uB428 +modified = \uC218\uC815\uB428 +Queue = \uB300\uAE30\uC5F4 +Configuration = \uAD6C\uC131 + +# Keys for the Configuration menu + +current.version = \uD604\uC7AC \uBC84\uC804 +check.for.updates = \uC5C5\uB370\uC774\uD2B8 \uD655\uC778 +auto.update = \uC790\uB3D9 \uC5C5\uB370\uC774\uD2B8 +max.download.threads = \uCD5C\uB300 \uB2E4\uC6B4\uB85C\uB4DC \uC4F0\uB808\uB4DC \uC218 +timeout.mill = \uC2DC\uAC04 \uC81C\uD55C (\uBC00\uB9AC\uCD08): +retry.download.count = \uB2E4\uC6B4\uB85C\uB4DC \uC7AC\uC2DC\uB3C4 \uD68C\uC218 +overwrite.existing.files = \uC911\uBCF5\uD30C\uC77C \uB36E\uC5B4\uC4F0\uAE30 +sound.when.rip.completes = \uC644\uB8CC\uC2DC \uC54C\uB9BC +preserve.order = \uBA85\uB839 \uAE30\uC5B5\uD558\uAE30 +save.logs = \uB85C\uADF8 \uC800\uC7A5 +notification.when.rip.starts = \uC2DC\uC791\uC2DC \uC54C\uB9BC +save.urls.only = URL\uB9CC \uC800\uC7A5\uD558\uAE30 +save.album.titles = \uC568\uBC94 \uC81C\uBAA9 \uC800\uC7A5 +autorip.from.clipboard = \uD074\uB9BD\uBCF4\uB4DC\uC5D0\uC11C \uC790\uB3D9\uC73C\uB85C \uAC00\uC838\uC624\uAE30 +save.descriptions = \uC568\uBC94 \uC124\uBA85 \uC800\uC7A5 +prefer.mp4.over.gif = GIF\uBCF4\uB2E4 MP4 \uC120\uD638 +restore.window.position = \uCC3D \uC704\uCE58 \uBCF5\uC6D0 +remember.url.history = URL \uD788\uC2A4\uD1A0\uB9AC \uAE30\uC5B5\uD558\uAE30 +loading.history.from = \uD788\uC2A4\uD1A0\uB9AC \uAC00\uC838\uC624\uAE30 + +# Misc UI keys + +loading.history.from.configuration = \uAD6C\uC131\uC5D0\uC11C \uD788\uC2A4\uD1A0\uB9AC \uBD88\uB7EC\uC624\uAE30 +interrupted.while.waiting.to.rip.next.album = \uB2E4\uC74C \uC568\uBC94 \uBCF5\uC0AC\uB97C \uAE30\uB2E4\uB9AC\uB294\uB3D9\uC548 \uC911\uB2E8\uB428 +inactive = \uBE44\uD65C\uC131\uD654 +re-rip.checked = \uB2E4\uC2DC \uBCF5\uC0AC\uD558\uAE30 \uCCB4\uD06C\uB428 +remove = \uC120\uD0DD\uD55C \uAE30\uB85D \uC0AD\uC81C +clear = \uD788\uC2A4\uD1A0\uB9AC \uBAA8\uB450 \uC0AD\uC81C diff --git a/src/main/resources/LabelsBundle_porrisavvo_FI.properties b/src/main/resources/LabelsBundle_porrisavvo_FI.properties new file mode 100644 index 00000000..fc9446c8 --- /dev/null +++ b/src/main/resources/LabelsBundle_porrisavvo_FI.properties @@ -0,0 +1,37 @@ +Log = Loki +History = Historriijja +created = luatu +modified = muakat +Queue = Jono +Configuration = Assetuksse + +# Keys for the Configuration menu + +current.version = Nykyne versijjo +check.for.updates = Tarkist update +auto.update = Automaatpäivvitys? +max.download.threads = Yht'aikasse ripi +timeout.mill = Timeout (millisekois): +retry.download.count = Ripi retry count +overwrite.existing.files = Korvvaa nykysse filu? +sound.when.rip.completes = Valmistummis'ään +preserve.order = Pir järestys +save.logs = Tallen loki +notification.when.rip.starts = Valmistummisilmotus +save.urls.only = Tallen vaa ossottee +save.album.titles = Tallen album'otsiko +autorip.from.clipboard = Ot linki leikpöyrrält +save.descriptions = Tallen kuvvauksse +prefer.mp4.over.gif = Suasi MP4 GIF sijjaa +restore.window.position = Palaut ikkunna sijaant +remember.url.history = Muist osot'hissa +loading.history.from = Larrataa hissaa lähteest + +# Misc UI keys + +loading.history.from.configuration = Larrataa hissaa asetusfilust +interrupted.while.waiting.to.rip.next.album = Keskeytet venates seurraavvaa album +inactive = Idle +re-rip.checked = Re-rip merkatu +remove = Poist +clear = Tyhjen diff --git a/src/test/java/com/rarchives/ripme/tst/Base64Test.java b/src/test/java/com/rarchives/ripme/tst/Base64Test.java new file mode 100644 index 00000000..ffe2f3f8 --- /dev/null +++ b/src/test/java/com/rarchives/ripme/tst/Base64Test.java @@ -0,0 +1,11 @@ +package com.rarchives.ripme.tst; + +import junit.framework.TestCase; +import com.rarchives.ripme.utils.Base64; + +public class Base64Test extends TestCase { + + public void testDecode() { + assertEquals("test", new String(Base64.decode("dGVzdA=="))); + } +} diff --git a/src/test/java/com/rarchives/ripme/tst/ripper/rippers/AerisdiesRipperTest.java b/src/test/java/com/rarchives/ripme/tst/ripper/rippers/AerisdiesRipperTest.java index 503db2c3..2e667787 100644 --- a/src/test/java/com/rarchives/ripme/tst/ripper/rippers/AerisdiesRipperTest.java +++ b/src/test/java/com/rarchives/ripme/tst/ripper/rippers/AerisdiesRipperTest.java @@ -16,5 +16,16 @@ public class AerisdiesRipperTest extends RippersTest { testRipper(ripper); } + public void testDjAlbum() throws IOException { + AerisdiesRipper ripper = new AerisdiesRipper(new URL("http://www.aerisdies.com/html/lb/douj_5230_1.html")); + testRipper(ripper); + } + + public void testGetGID() throws IOException { + URL url = new URL("http://www.aerisdies.com/html/lb/douj_5230_1.html"); + AerisdiesRipper ripper = new AerisdiesRipper(url); + assertEquals("5230", ripper.getGID(url)); + } + // TODO: Add a test for an album with a title. } diff --git a/src/test/java/com/rarchives/ripme/tst/ripper/rippers/BatoRipperTest.java b/src/test/java/com/rarchives/ripme/tst/ripper/rippers/BatoRipperTest.java new file mode 100644 index 00000000..6bd8744a --- /dev/null +++ b/src/test/java/com/rarchives/ripme/tst/ripper/rippers/BatoRipperTest.java @@ -0,0 +1,25 @@ +package com.rarchives.ripme.tst.ripper.rippers; + +import java.io.IOException; +import java.net.URL; + +import com.rarchives.ripme.ripper.rippers.BatoRipper; + +public class BatoRipperTest extends RippersTest { + public void testRip() throws IOException { + BatoRipper ripper = new BatoRipper(new URL("https://bato.to/chapter/1207152")); + testRipper(ripper); + } + + public void testGetGID() throws IOException { + URL url = new URL("https://bato.to/chapter/1207152"); + BatoRipper ripper = new BatoRipper(url); + assertEquals("1207152", ripper.getGID(url)); + } + + public void testGetAlbumTitle() throws IOException { + URL url = new URL("https://bato.to/chapter/1207152"); + BatoRipper ripper = new BatoRipper(url); + assertEquals("bato_1207152_I_Messed_Up_by_Teaching_at_a_Black_Gyaru_School!_Ch.2", ripper.getAlbumTitle(url)); + } +} diff --git a/src/test/java/com/rarchives/ripme/tst/ripper/rippers/DeviantartRipperTest.java b/src/test/java/com/rarchives/ripme/tst/ripper/rippers/DeviantartRipperTest.java index a3fdfd81..a3e6a9c8 100644 --- a/src/test/java/com/rarchives/ripme/tst/ripper/rippers/DeviantartRipperTest.java +++ b/src/test/java/com/rarchives/ripme/tst/ripper/rippers/DeviantartRipperTest.java @@ -16,4 +16,10 @@ public class DeviantartRipperTest extends RippersTest { DeviantartRipper ripper = new DeviantartRipper(new URL("http://faterkcx.deviantart.com/gallery/")); testRipper(ripper); } + + public void testGetGID() throws IOException { + URL url = new URL("http://airgee.deviantart.com/gallery/"); + DeviantartRipper ripper = new DeviantartRipper(url); + assertEquals("airgee", ripper.getGID(url)); + } } diff --git a/src/test/java/com/rarchives/ripme/tst/ripper/rippers/EhentaiRipperTest.java b/src/test/java/com/rarchives/ripme/tst/ripper/rippers/EhentaiRipperTest.java index 00a3f8b6..cdab6b73 100644 --- a/src/test/java/com/rarchives/ripme/tst/ripper/rippers/EhentaiRipperTest.java +++ b/src/test/java/com/rarchives/ripme/tst/ripper/rippers/EhentaiRipperTest.java @@ -10,4 +10,19 @@ public class EhentaiRipperTest extends RippersTest { EHentaiRipper ripper = new EHentaiRipper(new URL("https://e-hentai.org/g/1144492/e823bdf9a5/")); testRipper(ripper); } + + // Test the tag black listing + public void testTagBlackList() throws IOException { + URL url = new URL("https://e-hentai.org/g/1228503/1a2f455f96/"); + EHentaiRipper ripper = new EHentaiRipper(url); + // Test multiple blacklisted tags + String[] tags = {"test", "one", "yuri"}; + String blacklistedTag = ripper.checkTags(ripper.getFirstPage(), tags); + assertEquals("yuri", blacklistedTag); + + // test tags with spaces in them + String[] tags2 = {"test", "one", "midnight on mars"}; + blacklistedTag = ripper.checkTags(ripper.getFirstPage(), tags2); + assertEquals("midnight on mars", blacklistedTag); + } } \ No newline at end of file diff --git a/src/test/java/com/rarchives/ripme/tst/ripper/rippers/FuraffinityRipperTest.java b/src/test/java/com/rarchives/ripme/tst/ripper/rippers/FuraffinityRipperTest.java index 4285993b..90d66ecd 100644 --- a/src/test/java/com/rarchives/ripme/tst/ripper/rippers/FuraffinityRipperTest.java +++ b/src/test/java/com/rarchives/ripme/tst/ripper/rippers/FuraffinityRipperTest.java @@ -10,4 +10,10 @@ public class FuraffinityRipperTest extends RippersTest { FuraffinityRipper ripper = new FuraffinityRipper(new URL("https://www.furaffinity.net/gallery/mustardgas/")); testRipper(ripper); } + + public void testGetGID() throws IOException { + URL url = new URL("https://www.furaffinity.net/gallery/mustardgas/"); + FuraffinityRipper ripper = new FuraffinityRipper(url); + assertEquals("mustardgas", ripper.getGID(url)); + } } diff --git a/src/test/java/com/rarchives/ripme/tst/ripper/rippers/GfycatporntubeRipperTest.java b/src/test/java/com/rarchives/ripme/tst/ripper/rippers/GfycatporntubeRipperTest.java index 6856eb06..e53c78e6 100644 --- a/src/test/java/com/rarchives/ripme/tst/ripper/rippers/GfycatporntubeRipperTest.java +++ b/src/test/java/com/rarchives/ripme/tst/ripper/rippers/GfycatporntubeRipperTest.java @@ -10,4 +10,10 @@ public class GfycatporntubeRipperTest extends RippersTest { GfycatporntubeRipper ripper = new GfycatporntubeRipper(new URL("https://gfycatporntube.com/blowjob-bunny-puts-on-a-show/")); testRipper(ripper); } + + public void testGetGID() throws IOException { + URL url = new URL("https://gfycatporntube.com/blowjob-bunny-puts-on-a-show/"); + GfycatporntubeRipper ripper = new GfycatporntubeRipper(url); + assertEquals("blowjob-bunny-puts-on-a-show", ripper.getGID(url)); + } } diff --git a/src/test/java/com/rarchives/ripme/tst/ripper/rippers/Hentai2readRipperTest.java b/src/test/java/com/rarchives/ripme/tst/ripper/rippers/Hentai2readRipperTest.java index b5765047..f142635d 100644 --- a/src/test/java/com/rarchives/ripme/tst/ripper/rippers/Hentai2readRipperTest.java +++ b/src/test/java/com/rarchives/ripme/tst/ripper/rippers/Hentai2readRipperTest.java @@ -7,9 +7,7 @@ import com.rarchives.ripme.ripper.rippers.Hentai2readRipper; public class Hentai2readRipperTest extends RippersTest { public void testHentai2readAlbum() throws IOException { - Hentai2readRipper ripper = new Hentai2readRipper(new URL("https://hentai2read.com/sm_school_memorial/")); - testRipper(ripper); - ripper = new Hentai2readRipper(new URL("https://hentai2read.com/sm_school_memorial/1/")); + Hentai2readRipper ripper = new Hentai2readRipper(new URL("https://hentai2read.com/sm_school_memorial/1/")); testRipper(ripper); } } \ No newline at end of file diff --git a/src/test/java/com/rarchives/ripme/tst/ripper/rippers/ImagevenueRipperTest.java b/src/test/java/com/rarchives/ripme/tst/ripper/rippers/ImagevenueRipperTest.java index 135a7b0a..90d76442 100644 --- a/src/test/java/com/rarchives/ripme/tst/ripper/rippers/ImagevenueRipperTest.java +++ b/src/test/java/com/rarchives/ripme/tst/ripper/rippers/ImagevenueRipperTest.java @@ -10,4 +10,10 @@ public class ImagevenueRipperTest extends RippersTest { ImagevenueRipper ripper = new ImagevenueRipper(new URL("http://img120.imagevenue.com/galshow.php?gal=gallery_1373818527696_191lo")); testRipper(ripper); } + + public void testGetGID() throws IOException { + URL url = new URL("http://img120.imagevenue.com/galshow.php?gal=gallery_1373818527696_191lo"); + ImagevenueRipper ripper = new ImagevenueRipper(url); + assertEquals("gallery_1373818527696_191lo", ripper.getGID(url)); + } } diff --git a/src/test/java/com/rarchives/ripme/tst/ripper/rippers/ImgboxRipperTest.java b/src/test/java/com/rarchives/ripme/tst/ripper/rippers/ImgboxRipperTest.java index db8e1680..27ebdca2 100644 --- a/src/test/java/com/rarchives/ripme/tst/ripper/rippers/ImgboxRipperTest.java +++ b/src/test/java/com/rarchives/ripme/tst/ripper/rippers/ImgboxRipperTest.java @@ -10,4 +10,10 @@ public class ImgboxRipperTest extends RippersTest { ImgboxRipper ripper = new ImgboxRipper(new URL("https://imgbox.com/g/FJPF7t26FD")); testRipper(ripper); } + + public void testGetGID() throws IOException { + URL url = new URL("https://imgbox.com/g/FJPF7t26FD"); + ImgboxRipper ripper = new ImgboxRipper(url); + assertEquals("FJPF7t26FD", ripper.getGID(url)); + } } diff --git a/src/test/java/com/rarchives/ripme/tst/ripper/rippers/NhentaiRipperTest.java b/src/test/java/com/rarchives/ripme/tst/ripper/rippers/NhentaiRipperTest.java new file mode 100644 index 00000000..108feed2 --- /dev/null +++ b/src/test/java/com/rarchives/ripme/tst/ripper/rippers/NhentaiRipperTest.java @@ -0,0 +1,33 @@ +package com.rarchives.ripme.tst.ripper.rippers; + +import java.io.IOException; +import java.net.URL; + +import com.rarchives.ripme.ripper.rippers.NhentaiRipper; + +public class NhentaiRipperTest extends RippersTest { + public void testRip() throws IOException { + NhentaiRipper ripper = new NhentaiRipper(new URL("https://nhentai.net/g/233295/")); + testRipper(ripper); + } + + public void testGetGID() throws IOException { + NhentaiRipper ripper = new NhentaiRipper(new URL("https://nhentai.net/g/233295/")); + assertEquals("233295", ripper.getGID(new URL("https://nhentai.net/g/233295/"))); + } + + // Test the tag black listing + public void testTagBlackList() throws IOException { + URL url = new URL("https://nhentai.net/g/233295/"); + NhentaiRipper ripper = new NhentaiRipper(url); + // Test multiple blacklisted tags + String[] tags = {"test", "one", "blowjob"}; + String blacklistedTag = ripper.checkTags(ripper.getFirstPage(), tags); + assertEquals("blowjob", blacklistedTag); + + // test tags with spaces in them + String[] tags2 = {"test", "one", "sole female"}; + blacklistedTag = ripper.checkTags(ripper.getFirstPage(), tags2); + assertEquals("sole female", blacklistedTag); + } +} diff --git a/src/test/java/com/rarchives/ripme/tst/ripper/rippers/PornhubRipperTest.java b/src/test/java/com/rarchives/ripme/tst/ripper/rippers/PornhubRipperTest.java index 96685cbc..74bee8d9 100644 --- a/src/test/java/com/rarchives/ripme/tst/ripper/rippers/PornhubRipperTest.java +++ b/src/test/java/com/rarchives/ripme/tst/ripper/rippers/PornhubRipperTest.java @@ -10,4 +10,10 @@ public class PornhubRipperTest extends RippersTest { PornhubRipper ripper = new PornhubRipper(new URL("https://www.pornhub.com/album/15680522")); testRipper(ripper); } + + public void testGetGID() throws IOException { + URL url = new URL("https://www.pornhub.com/album/15680522"); + PornhubRipper ripper = new PornhubRipper(url); + assertEquals("15680522", ripper.getGID(url)); + } } diff --git a/src/test/java/com/rarchives/ripme/tst/ripper/rippers/Rule34RipperTest.java b/src/test/java/com/rarchives/ripme/tst/ripper/rippers/Rule34RipperTest.java index cffc807d..4c63d66e 100644 --- a/src/test/java/com/rarchives/ripme/tst/ripper/rippers/Rule34RipperTest.java +++ b/src/test/java/com/rarchives/ripme/tst/ripper/rippers/Rule34RipperTest.java @@ -11,4 +11,10 @@ public class Rule34RipperTest extends RippersTest { testRipper(ripper); } + public void testGetGID() throws IOException { + URL url = new URL("https://rule34.xxx/index.php?page=post&s=list&tags=bimbo"); + Rule34Ripper ripper = new Rule34Ripper(url); + assertEquals("bimbo", ripper.getGID(url)); + } + } diff --git a/src/test/java/com/rarchives/ripme/tst/ripper/rippers/SankakuComplexRipperTest.java b/src/test/java/com/rarchives/ripme/tst/ripper/rippers/SankakuComplexRipperTest.java index 6bbc8890..5b57e291 100644 --- a/src/test/java/com/rarchives/ripme/tst/ripper/rippers/SankakuComplexRipperTest.java +++ b/src/test/java/com/rarchives/ripme/tst/ripper/rippers/SankakuComplexRipperTest.java @@ -17,4 +17,15 @@ public class SankakuComplexRipperTest extends RippersTest { testRipper(ripper); } */ + public void testgetGID() throws IOException { + URL url = new URL("https://idol.sankakucomplex.com/?tags=meme_%28me%21me%21me%21%29_%28cosplay%29"); + SankakuComplexRipper ripper = new SankakuComplexRipper(url); + assertEquals("idol._meme_(me!me!me!)_(cosplay)", ripper.getGID(url)); + } + + public void testgetSubDomain() throws IOException { + URL url = new URL("https://idol.sankakucomplex.com/?tags=meme_%28me%21me%21me%21%29_%28cosplay%29"); + SankakuComplexRipper ripper = new SankakuComplexRipper(url); + assertEquals("idol.", ripper.getSubDomain(url)); + } } diff --git a/src/test/java/com/rarchives/ripme/tst/ripper/rippers/ShesFreakyRipperTest.java b/src/test/java/com/rarchives/ripme/tst/ripper/rippers/ShesFreakyRipperTest.java index 3efa31b4..65f371d1 100644 --- a/src/test/java/com/rarchives/ripme/tst/ripper/rippers/ShesFreakyRipperTest.java +++ b/src/test/java/com/rarchives/ripme/tst/ripper/rippers/ShesFreakyRipperTest.java @@ -13,4 +13,10 @@ public class ShesFreakyRipperTest extends RippersTest { testRipper(ripper); } */ + + public void testGetGID() throws IOException { + URL url = new URL("http://www.shesfreaky.com/gallery/nicee-snow-bunny-579NbPjUcYa.html"); + ShesFreakyRipper ripper = new ShesFreakyRipper(url); + assertEquals("nicee-snow-bunny-579NbPjUcYa", ripper.getGID(url)); + } } diff --git a/src/test/java/com/rarchives/ripme/tst/ripper/rippers/SinfestRipperTest.java b/src/test/java/com/rarchives/ripme/tst/ripper/rippers/SinfestRipperTest.java index c46e922c..c4f56432 100644 --- a/src/test/java/com/rarchives/ripme/tst/ripper/rippers/SinfestRipperTest.java +++ b/src/test/java/com/rarchives/ripme/tst/ripper/rippers/SinfestRipperTest.java @@ -10,4 +10,10 @@ public class SinfestRipperTest extends RippersTest { SinfestRipper ripper = new SinfestRipper(new URL("http://sinfest.net/view.php?date=2000-01-17")); testRipper(ripper); } + + public void testGetGID() throws IOException { + URL url = new URL("http://sinfest.net/view.php?date=2000-01-17"); + SinfestRipper ripper = new SinfestRipper(url); + assertEquals("2000-01-17", ripper.getGID(url)); + } } \ No newline at end of file diff --git a/src/test/java/com/rarchives/ripme/tst/ripper/rippers/SinnercomicsRipperTest.java b/src/test/java/com/rarchives/ripme/tst/ripper/rippers/SinnercomicsRipperTest.java index d1ce6b33..3866b6ba 100644 --- a/src/test/java/com/rarchives/ripme/tst/ripper/rippers/SinnercomicsRipperTest.java +++ b/src/test/java/com/rarchives/ripme/tst/ripper/rippers/SinnercomicsRipperTest.java @@ -10,4 +10,10 @@ public class SinnercomicsRipperTest extends RippersTest { SinnercomicsRipper ripper = new SinnercomicsRipper(new URL("https://sinnercomics.com/comic/beyond-the-hotel-page-01/")); testRipper(ripper); } + + public void testGetGID() throws IOException { + URL url = new URL("https://sinnercomics.com/comic/beyond-the-hotel-page-01/"); + SinnercomicsRipper ripper = new SinnercomicsRipper(url); + assertEquals("beyond-the-hotel", ripper.getGID(url)); + } } \ No newline at end of file diff --git a/src/test/java/com/rarchives/ripme/tst/ripper/rippers/SmuttyRipperTest.java b/src/test/java/com/rarchives/ripme/tst/ripper/rippers/SmuttyRipperTest.java index 949e715f..c7aa694e 100644 --- a/src/test/java/com/rarchives/ripme/tst/ripper/rippers/SmuttyRipperTest.java +++ b/src/test/java/com/rarchives/ripme/tst/ripper/rippers/SmuttyRipperTest.java @@ -10,4 +10,10 @@ public class SmuttyRipperTest extends RippersTest { SmuttyRipper ripper = new SmuttyRipper(new URL("https://smutty.com/user/QUIGON/")); testRipper(ripper); } + + public void testGetGID() throws IOException { + URL url = new URL("https://smutty.com/user/QUIGON/"); + SmuttyRipper ripper = new SmuttyRipper(url); + assertEquals("QUIGON", ripper.getGID(url)); + } } diff --git a/src/test/java/com/rarchives/ripme/tst/ripper/rippers/StaRipperTest.java b/src/test/java/com/rarchives/ripme/tst/ripper/rippers/StaRipperTest.java index 128f3f17..c9aded3e 100644 --- a/src/test/java/com/rarchives/ripme/tst/ripper/rippers/StaRipperTest.java +++ b/src/test/java/com/rarchives/ripme/tst/ripper/rippers/StaRipperTest.java @@ -10,4 +10,10 @@ public class StaRipperTest extends RippersTest { StaRipper ripper = new StaRipper(new URL("https://sta.sh/2hn9rtavr1g")); testRipper(ripper); } + + public void testGetGID() throws IOException { + URL url = new URL("https://sta.sh/2hn9rtavr1g"); + StaRipper ripper = new StaRipper(url); + assertEquals("2hn9rtavr1g", ripper.getGID(url)); + } } \ No newline at end of file diff --git a/src/test/java/com/rarchives/ripme/tst/ripper/rippers/TapasticRipperTest.java b/src/test/java/com/rarchives/ripme/tst/ripper/rippers/TapasticRipperTest.java index 9617e4b8..f9e448e6 100644 --- a/src/test/java/com/rarchives/ripme/tst/ripper/rippers/TapasticRipperTest.java +++ b/src/test/java/com/rarchives/ripme/tst/ripper/rippers/TapasticRipperTest.java @@ -10,4 +10,10 @@ public class TapasticRipperTest extends RippersTest { TapasticRipper ripper = new TapasticRipper(new URL("https://tapas.io/series/tsiwbakd-comic")); testRipper(ripper); } + + public void testGetGID() throws IOException { + URL url = new URL("https://tapas.io/series/tsiwbakd-comic"); + TapasticRipper ripper = new TapasticRipper(url); + assertEquals("series_ tsiwbakd-comic", ripper.getGID(url)); + } } diff --git a/src/test/java/com/rarchives/ripme/tst/ripper/rippers/TeenplanetRipperTest.java b/src/test/java/com/rarchives/ripme/tst/ripper/rippers/TeenplanetRipperTest.java index aa43103b..a402ebc3 100644 --- a/src/test/java/com/rarchives/ripme/tst/ripper/rippers/TeenplanetRipperTest.java +++ b/src/test/java/com/rarchives/ripme/tst/ripper/rippers/TeenplanetRipperTest.java @@ -10,4 +10,10 @@ public class TeenplanetRipperTest extends RippersTest { TeenplanetRipper ripper = new TeenplanetRipper(new URL("http://teenplanet.org/galleries/the-perfect-side-of-me-6588.html")); testRipper(ripper); } + + public void testGetGID() throws IOException { + URL url = new URL("http://teenplanet.org/galleries/the-perfect-side-of-me-6588.html"); + TeenplanetRipper ripper = new TeenplanetRipper(url); + assertEquals("the-perfect-side-of-me-6588", ripper.getGID(url)); + } } diff --git a/src/test/java/com/rarchives/ripme/tst/ripper/rippers/TheyiffgalleryRipperTest.java b/src/test/java/com/rarchives/ripme/tst/ripper/rippers/TheyiffgalleryRipperTest.java index 3c7a8fbd..91fc0617 100644 --- a/src/test/java/com/rarchives/ripme/tst/ripper/rippers/TheyiffgalleryRipperTest.java +++ b/src/test/java/com/rarchives/ripme/tst/ripper/rippers/TheyiffgalleryRipperTest.java @@ -10,4 +10,10 @@ public class TheyiffgalleryRipperTest extends RippersTest { TheyiffgalleryRipper ripper = new TheyiffgalleryRipper(new URL("https://theyiffgallery.com/index?/category/4303")); testRipper(ripper); } + + public void testGetGID() throws IOException { + URL url = new URL("https://theyiffgallery.com/index?/category/4303"); + TheyiffgalleryRipper ripper = new TheyiffgalleryRipper(url); + assertEquals("4303", ripper.getGID(url)); + } } diff --git a/src/test/java/com/rarchives/ripme/tst/ripper/rippers/VidbleRipperTest.java b/src/test/java/com/rarchives/ripme/tst/ripper/rippers/VidbleRipperTest.java index ccbeb8bb..9659d630 100644 --- a/src/test/java/com/rarchives/ripme/tst/ripper/rippers/VidbleRipperTest.java +++ b/src/test/java/com/rarchives/ripme/tst/ripper/rippers/VidbleRipperTest.java @@ -10,5 +10,11 @@ public class VidbleRipperTest extends RippersTest { VidbleRipper ripper = new VidbleRipper(new URL("http://www.vidble.com/album/y1oyh3zd")); testRipper(ripper); } + + public void testGetGID() throws IOException { + URL url = new URL("http://www.vidble.com/album/y1oyh3zd"); + VidbleRipper ripper = new VidbleRipper(url); + assertEquals("y1oyh3zd", ripper.getGID(url)); + } } diff --git a/src/test/java/com/rarchives/ripme/tst/ripper/rippers/WebtoonsRipperTest.java b/src/test/java/com/rarchives/ripme/tst/ripper/rippers/WebtoonsRipperTest.java index 4aa50d0a..16407ad7 100644 --- a/src/test/java/com/rarchives/ripme/tst/ripper/rippers/WebtoonsRipperTest.java +++ b/src/test/java/com/rarchives/ripme/tst/ripper/rippers/WebtoonsRipperTest.java @@ -15,4 +15,10 @@ public class WebtoonsRipperTest extends RippersTest { WebtoonsRipper ripper = new WebtoonsRipper(new URL("http://www.webtoons.com/en/drama/lookism/ep-145/viewer?title_no=1049&episode_no=145")); testRipper(ripper); } + + public void testGetGID() throws IOException { + URL url = new URL("http://www.webtoons.com/en/drama/my-boo/ep-33/viewer?title_no=1185&episode_no=33"); + WebtoonsRipper ripper = new WebtoonsRipper(url); + assertEquals("my-boo", ripper.getGID(url)); + } } diff --git a/src/test/java/com/rarchives/ripme/tst/ripper/rippers/WordpressComicRipperTest.java b/src/test/java/com/rarchives/ripme/tst/ripper/rippers/WordpressComicRipperTest.java index 8879c561..3857fd96 100644 --- a/src/test/java/com/rarchives/ripme/tst/ripper/rippers/WordpressComicRipperTest.java +++ b/src/test/java/com/rarchives/ripme/tst/ripper/rippers/WordpressComicRipperTest.java @@ -86,12 +86,6 @@ public class WordpressComicRipperTest extends RippersTest { testRipper(ripper); } - public void test_tnbtu() throws IOException { - WordpressComicRipper ripper = new WordpressComicRipper( - new URL("http://tnbtu.com/comic/01-00/")); - testRipper(ripper); - } - public void test_Eightmuses_download() throws IOException { WordpressComicRipper ripper = new WordpressComicRipper( new URL("https://8muses.download/lustomic-playkittens-josh-samuel-porn-comics-8-muses/")); diff --git a/src/test/java/com/rarchives/ripme/tst/ripper/rippers/XbooruRipperTest.java b/src/test/java/com/rarchives/ripme/tst/ripper/rippers/XbooruRipperTest.java index 8eefd4e3..6dfc0bab 100644 --- a/src/test/java/com/rarchives/ripme/tst/ripper/rippers/XbooruRipperTest.java +++ b/src/test/java/com/rarchives/ripme/tst/ripper/rippers/XbooruRipperTest.java @@ -10,4 +10,10 @@ public class XbooruRipperTest extends RippersTest { XbooruRipper ripper = new XbooruRipper(new URL("http://xbooru.com/index.php?page=post&s=list&tags=furry")); testRipper(ripper); } + + public void testGetGID() throws IOException { + URL url = new URL("http://xbooru.com/index.php?page=post&s=list&tags=furry"); + XbooruRipper ripper = new XbooruRipper(url); + assertEquals("furry", ripper.getGID(url)); + } } \ No newline at end of file diff --git a/src/test/java/com/rarchives/ripme/tst/ripper/rippers/XhamsterRipperTest.java b/src/test/java/com/rarchives/ripme/tst/ripper/rippers/XhamsterRipperTest.java index f66b27d1..54b22eb0 100644 --- a/src/test/java/com/rarchives/ripme/tst/ripper/rippers/XhamsterRipperTest.java +++ b/src/test/java/com/rarchives/ripme/tst/ripper/rippers/XhamsterRipperTest.java @@ -16,4 +16,10 @@ public class XhamsterRipperTest extends RippersTest { XhamsterRipper ripper = new XhamsterRipper(new URL("https://xhamster.com/photos/gallery/japanese-dolls-4-asahi-mizuno-7254664")); testRipper(ripper); } + + public void testGetGID() throws IOException { + URL url = new URL("https://xhamster.com/photos/gallery/japanese-dolls-4-asahi-mizuno-7254664"); + XhamsterRipper ripper = new XhamsterRipper(url); + assertEquals("7254664", ripper.getGID(url)); + } } diff --git a/src/test/java/com/rarchives/ripme/tst/ripper/rippers/YuvutuRipperTest.java b/src/test/java/com/rarchives/ripme/tst/ripper/rippers/YuvutuRipperTest.java index a74b76e9..1a304468 100644 --- a/src/test/java/com/rarchives/ripme/tst/ripper/rippers/YuvutuRipperTest.java +++ b/src/test/java/com/rarchives/ripme/tst/ripper/rippers/YuvutuRipperTest.java @@ -16,4 +16,10 @@ public class YuvutuRipperTest extends RippersTest { YuvutuRipper ripper = new YuvutuRipper(new URL("http://www.yuvutu.com/modules.php?name=YuGallery&action=view&set_id=420333")); testRipper(ripper); } + + public void testGetGID() throws IOException { + URL url = new URL("http://www.yuvutu.com/modules.php?name=YuGallery&action=view&set_id=420333"); + YuvutuRipper ripper = new YuvutuRipper(url); + assertEquals("420333", ripper.getGID(url)); + } } diff --git a/src/test/java/com/rarchives/ripme/tst/ripper/rippers/ZizkiRipperTest.java b/src/test/java/com/rarchives/ripme/tst/ripper/rippers/ZizkiRipperTest.java index de953be8..9facf481 100644 --- a/src/test/java/com/rarchives/ripme/tst/ripper/rippers/ZizkiRipperTest.java +++ b/src/test/java/com/rarchives/ripme/tst/ripper/rippers/ZizkiRipperTest.java @@ -10,4 +10,16 @@ public class ZizkiRipperTest extends RippersTest { ZizkiRipper ripper = new ZizkiRipper(new URL("http://zizki.com/dee-chorde/we-got-spirit")); testRipper(ripper); } + + public void testGetGID() throws IOException { + URL url = new URL("http://zizki.com/dee-chorde/we-got-spirit"); + ZizkiRipper ripper = new ZizkiRipper(url); + assertEquals("dee-chorde", ripper.getGID(url)); + } + + public void testAlbumTitle() throws IOException { + URL url = new URL("http://zizki.com/dee-chorde/we-got-spirit"); + ZizkiRipper ripper = new ZizkiRipper(url); + assertEquals("zizki_Dee Chorde_We Got Spirit", ripper.getAlbumTitle(url)); + } } diff --git a/src/test/java/com/rarchives/ripme/tst/ui/RipStatusMessageTest.java b/src/test/java/com/rarchives/ripme/tst/ui/RipStatusMessageTest.java new file mode 100644 index 00000000..cabe6491 --- /dev/null +++ b/src/test/java/com/rarchives/ripme/tst/ui/RipStatusMessageTest.java @@ -0,0 +1,21 @@ +package com.rarchives.ripme.tst.ui; + +import com.rarchives.ripme.ui.RipStatusMessage; +import junit.framework.Assert; +import junit.framework.TestCase; + +public class RipStatusMessageTest extends TestCase { + + public void testConstructor() { + RipStatusMessage.STATUS loadingResource = RipStatusMessage.STATUS.LOADING_RESOURCE; + String path = "path/to/file"; + String toStringValue = "Loading Resource: " + path; + + RipStatusMessage ripStatusMessage = new RipStatusMessage(loadingResource, path); + + Assert.assertEquals(loadingResource, ripStatusMessage.getStatus()); + Assert.assertEquals(path, ripStatusMessage.getObject()); + Assert.assertEquals(toStringValue, ripStatusMessage.toString()); + } + +} \ No newline at end of file