Merge branch 'master' into unit-tests
This commit is contained in:
commit
dee5c84b95
2
pom.xml
2
pom.xml
@ -4,7 +4,7 @@
|
||||
<groupId>com.rarchives.ripme</groupId>
|
||||
<artifactId>ripme</artifactId>
|
||||
<packaging>jar</packaging>
|
||||
<version>1.7.43</version>
|
||||
<version>1.7.46</version>
|
||||
<name>ripme</name>
|
||||
<url>http://rip.rarchives.com</url>
|
||||
<properties>
|
||||
|
@ -1,6 +1,9 @@
|
||||
{
|
||||
"latestVersion": "1.7.43",
|
||||
"latestVersion": "1.7.46",
|
||||
"changeList": [
|
||||
"1.7.46: Fixed hentai2read ripper; Rewrote the myhentaicomics ripper to use the new getAlbumsToQueue func; Can now blacklist nhentai tags; SinnercomicsRipper no longer adds -page-01 to folder names; EightmusesRipper now adds file extension to filename; disbaled test for twitch ripper",
|
||||
"1.7.45: Fixed hentai2read ripper; ImageBam album fixed; Added various translations; TsuminoRipper no longer requires album name to download",
|
||||
"1.7.44: Fixed instagram ripper regex",
|
||||
"1.7.43: Fixed queryId regex in instagram ripper",
|
||||
"1.7.42: Added user support to SmuttyRipper; Removed vine ripper; Fixed NudeGalsRipper; addURLToDownload improvments; Fixed Instagram ripper",
|
||||
"1.7.41: Added support for spyingwithlana.com; Added ManganeloRipper; Added support for dynasty-scans.com",
|
||||
|
@ -33,13 +33,22 @@ import com.rarchives.ripme.utils.Utils;
|
||||
|
||||
/**
|
||||
* Entry point to application.
|
||||
* This is where all the fun happens, with the main method.
|
||||
* Decides to display UI or to run silently via command-line.
|
||||
*
|
||||
* As the "controller" to all other classes, it parses command line parameters and loads the history.
|
||||
*/
|
||||
public class App {
|
||||
|
||||
public static final Logger logger = Logger.getLogger(App.class);
|
||||
private static final History HISTORY = new History();
|
||||
|
||||
/**
|
||||
* Where everything starts. Takes in, and tries to parse as many commandline arguments as possible.
|
||||
* Otherwise, it launches a GUI.
|
||||
*
|
||||
* @param args Array of command line arguments.
|
||||
*/
|
||||
public static void main(String[] args) throws MalformedURLException {
|
||||
CommandLine cl = getArgs(args);
|
||||
|
||||
@ -74,7 +83,8 @@ public class App {
|
||||
/**
|
||||
* Creates an abstract ripper and instructs it to rip.
|
||||
* @param url URL to be ripped
|
||||
* @throws Exception
|
||||
* @throws Exception Nothing too specific here, just a catch-all.
|
||||
*
|
||||
*/
|
||||
private static void rip(URL url) throws Exception {
|
||||
AbstractRipper ripper = AbstractRipper.getRipper(url);
|
||||
@ -89,6 +99,7 @@ public class App {
|
||||
private static void handleArguments(String[] args) {
|
||||
CommandLine cl = getArgs(args);
|
||||
|
||||
//Help (list commands)
|
||||
if (cl.hasOption('h') || args.length == 0) {
|
||||
HelpFormatter hf = new HelpFormatter();
|
||||
hf.printHelp("java -jar ripme.jar [OPTIONS]", getOptions());
|
||||
@ -98,28 +109,34 @@ public class App {
|
||||
Utils.configureLogger();
|
||||
logger.info("Initialized ripme v" + UpdateUtils.getThisJarVersion());
|
||||
|
||||
//Allow file overwriting
|
||||
if (cl.hasOption('w')) {
|
||||
Utils.setConfigBoolean("file.overwrite", true);
|
||||
}
|
||||
|
||||
//SOCKS proxy server
|
||||
if (cl.hasOption('s')) {
|
||||
String sservfull = cl.getOptionValue('s').trim();
|
||||
Proxy.setSocks(sservfull);
|
||||
}
|
||||
|
||||
//HTTP proxy server
|
||||
if (cl.hasOption('p')) {
|
||||
String proxyserverfull = cl.getOptionValue('p').trim();
|
||||
Proxy.setHTTPProxy(proxyserverfull);
|
||||
}
|
||||
|
||||
//Number of threads
|
||||
if (cl.hasOption('t')) {
|
||||
Utils.setConfigInteger("threads.size", Integer.parseInt(cl.getOptionValue('t')));
|
||||
}
|
||||
|
||||
//Ignore 404
|
||||
if (cl.hasOption('4')) {
|
||||
Utils.setConfigBoolean("errors.skip404", true);
|
||||
}
|
||||
|
||||
//Re-rip <i>all</i> previous albums
|
||||
if (cl.hasOption('r')) {
|
||||
// Re-rip all via command-line
|
||||
List<String> history = Utils.getConfigList("download.history");
|
||||
@ -142,6 +159,7 @@ public class App {
|
||||
System.exit(0);
|
||||
}
|
||||
|
||||
//Re-rip all <i>selected</i> albums
|
||||
if (cl.hasOption('R')) {
|
||||
loadHistory();
|
||||
if (HISTORY.toList().isEmpty()) {
|
||||
@ -174,24 +192,29 @@ public class App {
|
||||
}
|
||||
}
|
||||
|
||||
//Save the order of images in album
|
||||
if (cl.hasOption('d')) {
|
||||
Utils.setConfigBoolean("download.save_order", true);
|
||||
}
|
||||
|
||||
//Don't save the order of images in album
|
||||
if (cl.hasOption('D')) {
|
||||
Utils.setConfigBoolean("download.save_order", false);
|
||||
}
|
||||
|
||||
//In case specify both, break and exit since it isn't possible.
|
||||
if ((cl.hasOption('d'))&&(cl.hasOption('D'))) {
|
||||
logger.error("\nCannot specify '-d' and '-D' simultaneously");
|
||||
System.exit(-1);
|
||||
}
|
||||
|
||||
//Destination directory
|
||||
if (cl.hasOption('l')) {
|
||||
// change the default rips directory
|
||||
Utils.setConfigString("rips.directory", cl.getOptionValue('l'));
|
||||
}
|
||||
|
||||
//Read URLs from File
|
||||
if (cl.hasOption('f')) {
|
||||
String filename = cl.getOptionValue('f');
|
||||
try {
|
||||
@ -208,6 +231,7 @@ public class App {
|
||||
}
|
||||
}
|
||||
|
||||
//The URL to rip.
|
||||
if (cl.hasOption('u')) {
|
||||
String url = cl.getOptionValue('u').trim();
|
||||
ripURL(url, cl.hasOption("n"));
|
||||
|
@ -11,6 +11,7 @@ import org.jsoup.nodes.Document;
|
||||
|
||||
import com.rarchives.ripme.ui.RipStatusMessage.STATUS;
|
||||
import com.rarchives.ripme.utils.Utils;
|
||||
import com.rarchives.ripme.ui.MainWindow;
|
||||
|
||||
/**
|
||||
* Simplified ripper, designed for ripping from sites by parsing HTML.
|
||||
@ -53,12 +54,29 @@ public abstract class AbstractHTMLRipper extends AlbumRipper {
|
||||
protected boolean hasDescriptionSupport() {
|
||||
return false;
|
||||
}
|
||||
|
||||
protected String[] getDescription(String url, Document page) throws IOException {
|
||||
throw new IOException("getDescription not implemented"); // Do I do this or make an abstract function?
|
||||
}
|
||||
protected int descSleepTime() {
|
||||
return 100;
|
||||
}
|
||||
|
||||
protected List<String> getAlbumsToQueue(Document doc) {
|
||||
return null;
|
||||
}
|
||||
|
||||
// If a page has Queue support then it has no images we want to download, just a list of urls we want to add to
|
||||
// the queue
|
||||
protected boolean hasQueueSupport() {
|
||||
return false;
|
||||
}
|
||||
|
||||
// Takes a url and checks if it is for a page of albums
|
||||
protected boolean pageContainsAlbums(URL url) {
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void rip() throws IOException {
|
||||
int index = 0;
|
||||
@ -67,6 +85,16 @@ public abstract class AbstractHTMLRipper extends AlbumRipper {
|
||||
sendUpdate(STATUS.LOADING_RESOURCE, this.url.toExternalForm());
|
||||
Document doc = getFirstPage();
|
||||
|
||||
if (hasQueueSupport() && pageContainsAlbums(this.url)) {
|
||||
List<String> urls = getAlbumsToQueue(doc);
|
||||
for (String url : urls) {
|
||||
MainWindow.addUrlToQueue(url);
|
||||
}
|
||||
|
||||
// We set doc to null here so the while loop below this doesn't fire
|
||||
doc = null;
|
||||
}
|
||||
|
||||
while (doc != null) {
|
||||
if (alreadyDownloadedUrls >= Utils.getConfigInteger("history.end_rip_after_already_seen", 1000000000) && !isThisATest()) {
|
||||
sendUpdate(STATUS.DOWNLOAD_COMPLETE, "Already seen the last " + alreadyDownloadedUrls + " images ending rip");
|
||||
|
@ -1,5 +1,9 @@
|
||||
package com.rarchives.ripme.ripper.rippers;
|
||||
|
||||
import com.rarchives.ripme.ripper.AbstractHTMLRipper;
|
||||
import com.rarchives.ripme.utils.Base64;
|
||||
import com.rarchives.ripme.utils.Http;
|
||||
import com.rarchives.ripme.utils.Utils;
|
||||
import java.io.IOException;
|
||||
import java.net.HttpURLConnection;
|
||||
import java.net.MalformedURLException;
|
||||
@ -13,7 +17,6 @@ import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.regex.Matcher;
|
||||
import java.util.regex.Pattern;
|
||||
|
||||
import org.jsoup.Connection.Method;
|
||||
import org.jsoup.Connection.Response;
|
||||
import org.jsoup.Jsoup;
|
||||
@ -22,11 +25,6 @@ import org.jsoup.nodes.Element;
|
||||
import org.jsoup.safety.Whitelist;
|
||||
import org.jsoup.select.Elements;
|
||||
|
||||
import com.rarchives.ripme.ripper.AbstractHTMLRipper;
|
||||
import com.rarchives.ripme.utils.Base64;
|
||||
import com.rarchives.ripme.utils.Http;
|
||||
import com.rarchives.ripme.utils.Utils;
|
||||
|
||||
public class DeviantartRipper extends AbstractHTMLRipper {
|
||||
|
||||
private static final int PAGE_SLEEP_TIME = 3000,
|
||||
@ -43,17 +41,14 @@ public class DeviantartRipper extends AbstractHTMLRipper {
|
||||
public String getHost() {
|
||||
return "deviantart";
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getDomain() {
|
||||
return "deviantart.com";
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean hasDescriptionSupport() {
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public URL sanitizeURL(URL url) throws MalformedURLException {
|
||||
String u = url.toExternalForm();
|
||||
@ -111,20 +106,46 @@ public class DeviantartRipper extends AbstractHTMLRipper {
|
||||
throw new MalformedURLException("Expected URL format: http://username.deviantart.com/[/gallery/#####], got: " + url);
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets first page.
|
||||
* Will determine if login is supplied,
|
||||
* if there is a login, then login and add that login cookies.
|
||||
* Otherwise, just bypass the age gate with an anonymous flag.
|
||||
* @return
|
||||
* @throws IOException
|
||||
*/
|
||||
@Override
|
||||
public Document getFirstPage() throws IOException {
|
||||
// Login
|
||||
try {
|
||||
cookies = loginToDeviantart();
|
||||
} catch (Exception e) {
|
||||
logger.warn("Failed to login: ", e);
|
||||
|
||||
//Test to see if there is a login:
|
||||
String username = Utils.getConfigString("deviantart.username", new String(Base64.decode("Z3JhYnB5")));
|
||||
String password = Utils.getConfigString("deviantart.password", new String(Base64.decode("ZmFrZXJz")));
|
||||
|
||||
if (username == null || password == null) {
|
||||
logger.debug("No DeviantArt login provided.");
|
||||
cookies.put("agegate_state","1"); // Bypasses the age gate
|
||||
} else {
|
||||
// Attempt Login
|
||||
try {
|
||||
cookies = loginToDeviantart();
|
||||
} catch (IOException e) {
|
||||
logger.warn("Failed to login: ", e);
|
||||
cookies.put("agegate_state","1"); // Bypasses the age gate
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
return Http.url(this.url)
|
||||
.cookies(cookies)
|
||||
.get();
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
*
|
||||
* @param page
|
||||
* @param id
|
||||
* @return
|
||||
*/
|
||||
private String jsonToImage(Document page, String id) {
|
||||
Elements js = page.select("script[type=\"text/javascript\"]");
|
||||
for (Element tag : js) {
|
||||
@ -146,7 +167,6 @@ public class DeviantartRipper extends AbstractHTMLRipper {
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<String> getURLsFromPage(Document page) {
|
||||
List<String> imageURLs = new ArrayList<>();
|
||||
@ -197,7 +217,6 @@ public class DeviantartRipper extends AbstractHTMLRipper {
|
||||
}
|
||||
return imageURLs;
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<String> getDescriptionsFromPage(Document page) {
|
||||
List<String> textURLs = new ArrayList<>();
|
||||
@ -216,7 +235,6 @@ public class DeviantartRipper extends AbstractHTMLRipper {
|
||||
}
|
||||
return textURLs;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Document getNextPage(Document page) throws IOException {
|
||||
if (isThisATest()) {
|
||||
@ -451,4 +469,4 @@ public class DeviantartRipper extends AbstractHTMLRipper {
|
||||
// We are logged in, save the cookies
|
||||
return resp.cookies();
|
||||
}
|
||||
}
|
||||
}
|
@ -126,9 +126,9 @@ public class EightmusesRipper extends AbstractHTMLRipper {
|
||||
image = getFullSizeImage(imageHref);
|
||||
URL imageUrl = new URL(image);
|
||||
if (Utils.getConfigBoolean("8muses.use_short_names", false)) {
|
||||
addURLToDownload(imageUrl, getPrefixShort(x), getSubdir(page.select("title").text()), this.url.toExternalForm(), cookies, "");
|
||||
addURLToDownload(imageUrl, getPrefixShort(x), getSubdir(page.select("title").text()), this.url.toExternalForm(), cookies, "", null, true);
|
||||
} else {
|
||||
addURLToDownload(imageUrl, getPrefixLong(x), getSubdir(page.select("title").text()), this.url.toExternalForm(), cookies);
|
||||
addURLToDownload(imageUrl, getPrefixLong(x), getSubdir(page.select("title").text()), this.url.toExternalForm(), cookies, "", null, true);
|
||||
}
|
||||
// X is our page index
|
||||
x++;
|
||||
|
@ -44,20 +44,20 @@ public class Hentai2readRipper extends AbstractHTMLRipper {
|
||||
|
||||
@Override
|
||||
public Document getFirstPage() throws IOException {
|
||||
Document tempDoc;
|
||||
// get the first page of the comic
|
||||
if (url.toExternalForm().substring(url.toExternalForm().length() - 1).equals("/")) {
|
||||
tempDoc = Http.url(url + "1").get();
|
||||
} else {
|
||||
tempDoc = Http.url(url + "/1").get();
|
||||
}
|
||||
for (Element el : tempDoc.select("ul.nav > li > a")) {
|
||||
if (el.attr("href").startsWith("https://hentai2read.com/thumbnails/")) {
|
||||
// Get the page with the thumbnails
|
||||
return Http.url(el.attr("href")).get();
|
||||
String thumbnailLink;
|
||||
try {
|
||||
Document tempDoc;
|
||||
tempDoc = Http.url(url).get();
|
||||
// Get the thumbnail page so we can rip all images without loading every page in the comic
|
||||
thumbnailLink = tempDoc.select("div.col-xs-12 > div.reader-controls > div.controls-block > button > a").attr("href");
|
||||
if (!thumbnailLink.equals("")) {
|
||||
return Http.url(thumbnailLink).get();
|
||||
} else {
|
||||
return Http.url(tempDoc.select("a[data-original-title=Thumbnails").attr("href")).get();
|
||||
}
|
||||
} catch (IOException e) {
|
||||
throw new IOException("Unable to get first page");
|
||||
}
|
||||
throw new IOException("Unable to get first page");
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -1,5 +1,9 @@
|
||||
package com.rarchives.ripme.ripper.rippers;
|
||||
|
||||
import com.rarchives.ripme.ripper.AbstractHTMLRipper;
|
||||
import com.rarchives.ripme.ripper.DownloadThreadPool;
|
||||
import com.rarchives.ripme.utils.Http;
|
||||
import com.rarchives.ripme.utils.Utils;
|
||||
import java.io.IOException;
|
||||
import java.net.MalformedURLException;
|
||||
import java.net.URL;
|
||||
@ -7,16 +11,10 @@ import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.regex.Matcher;
|
||||
import java.util.regex.Pattern;
|
||||
|
||||
import org.jsoup.nodes.Document;
|
||||
import org.jsoup.nodes.Element;
|
||||
import org.jsoup.select.Elements;
|
||||
|
||||
import com.rarchives.ripme.ripper.AbstractHTMLRipper;
|
||||
import com.rarchives.ripme.ripper.DownloadThreadPool;
|
||||
import com.rarchives.ripme.utils.Http;
|
||||
import com.rarchives.ripme.utils.Utils;
|
||||
|
||||
public class ImagebamRipper extends AbstractHTMLRipper {
|
||||
|
||||
// Current HTML document
|
||||
@ -71,7 +69,7 @@ public class ImagebamRipper extends AbstractHTMLRipper {
|
||||
public Document getNextPage(Document doc) throws IOException {
|
||||
// Find next page
|
||||
Elements hrefs = doc.select("a.pagination_current + a.pagination_link");
|
||||
if (hrefs.size() == 0) {
|
||||
if (hrefs.isEmpty()) {
|
||||
throw new IOException("No more pages");
|
||||
}
|
||||
String nextUrl = "http://www.imagebam.com" + hrefs.first().attr("href");
|
||||
@ -121,8 +119,8 @@ public class ImagebamRipper extends AbstractHTMLRipper {
|
||||
* Handles case when site has IP-banned the user.
|
||||
*/
|
||||
private class ImagebamImageThread extends Thread {
|
||||
private URL url;
|
||||
private int index;
|
||||
private URL url; //link to "image page"
|
||||
private int index; //index in album
|
||||
|
||||
ImagebamImageThread(URL url, int index) {
|
||||
super();
|
||||
@ -134,28 +132,43 @@ public class ImagebamRipper extends AbstractHTMLRipper {
|
||||
public void run() {
|
||||
fetchImage();
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Rips useful image from "image page"
|
||||
*/
|
||||
private void fetchImage() {
|
||||
try {
|
||||
Document doc = Http.url(url).get();
|
||||
// Find image
|
||||
Elements images = doc.select(".image-container img");
|
||||
if (images.size() == 0) {
|
||||
Elements metaTags = doc.getElementsByTag("meta");
|
||||
|
||||
String imgsrc = "";//initialize, so no NullPointerExceptions should ever happen.
|
||||
|
||||
for (Element metaTag: metaTags) {
|
||||
//the direct link to the image seems to always be linked in the <meta> part of the html.
|
||||
if (metaTag.attr("property").equals("og:image")) {
|
||||
imgsrc = metaTag.attr("content");
|
||||
logger.info("Found URL " + imgsrc);
|
||||
break;//only one (useful) image possible for an "image page".
|
||||
}
|
||||
}
|
||||
|
||||
//for debug, or something goes wrong.
|
||||
if (imgsrc.isEmpty()) {
|
||||
logger.warn("Image not found at " + this.url);
|
||||
return;
|
||||
}
|
||||
Element image = images.first();
|
||||
String imgsrc = image.attr("src");
|
||||
logger.info("Found URL " + imgsrc);
|
||||
|
||||
// Provide prefix and let the AbstractRipper "guess" the filename
|
||||
String prefix = "";
|
||||
if (Utils.getConfigBoolean("download.save_order", true)) {
|
||||
prefix = String.format("%03d_", index);
|
||||
}
|
||||
|
||||
addURLToDownload(new URL(imgsrc), prefix);
|
||||
} catch (IOException e) {
|
||||
logger.error("[!] Exception while loading/parsing " + this.url, e);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -435,6 +435,12 @@ public class InstagramRipper extends AbstractHTMLRipper {
|
||||
if (m.find()) {
|
||||
return m.group(1);
|
||||
}
|
||||
jsP = Pattern.compile("n.pagination:n},queryId:.([a-zA-Z0-9]+).");
|
||||
m = jsP.matcher(sb.toString());
|
||||
if (m.find()) {
|
||||
return m.group(1);
|
||||
}
|
||||
|
||||
} else {
|
||||
Pattern jsP = Pattern.compile("return e.tagMedia.byTagName.get\\(t\\).pagination},queryId:.([a-zA-Z0-9]+).");
|
||||
Matcher m = jsP.matcher(sb.toString());
|
||||
|
@ -34,21 +34,18 @@ public class MyhentaicomicsRipper extends AbstractHTMLRipper {
|
||||
Pattern p = Pattern.compile("^https?://myhentaicomics.com/index.php/([a-zA-Z0-9-]*)/?$");
|
||||
Matcher m = p.matcher(url.toExternalForm());
|
||||
if (m.matches()) {
|
||||
isTag = false;
|
||||
return m.group(1);
|
||||
}
|
||||
|
||||
Pattern pa = Pattern.compile("^https?://myhentaicomics.com/index.php/search\\?q=([a-zA-Z0-9-]*)([a-zA-Z0-9=&]*)?$");
|
||||
Matcher ma = pa.matcher(url.toExternalForm());
|
||||
if (ma.matches()) {
|
||||
isTag = true;
|
||||
return ma.group(1);
|
||||
}
|
||||
|
||||
Pattern pat = Pattern.compile("^https?://myhentaicomics.com/index.php/tag/([0-9]*)/?([a-zA-Z%0-9+?=:]*)?$");
|
||||
Matcher mat = pat.matcher(url.toExternalForm());
|
||||
if (mat.matches()) {
|
||||
isTag = true;
|
||||
return mat.group(1);
|
||||
}
|
||||
|
||||
@ -56,6 +53,37 @@ public class MyhentaicomicsRipper extends AbstractHTMLRipper {
|
||||
"myhentaicomics.com/index.php/albumName - got " + url + " instead");
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean hasQueueSupport() {
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean pageContainsAlbums(URL url) {
|
||||
Pattern pa = Pattern.compile("^https?://myhentaicomics.com/index.php/search\\?q=([a-zA-Z0-9-]*)([a-zA-Z0-9=&]*)?$");
|
||||
Matcher ma = pa.matcher(url.toExternalForm());
|
||||
if (ma.matches()) {
|
||||
return true;
|
||||
}
|
||||
|
||||
Pattern pat = Pattern.compile("^https?://myhentaicomics.com/index.php/tag/([0-9]*)/?([a-zA-Z%0-9+?=:]*)?$");
|
||||
Matcher mat = pat.matcher(url.toExternalForm());
|
||||
if (mat.matches()) {
|
||||
isTag = true;
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<String> getAlbumsToQueue(Document doc) {
|
||||
List<String> urlsToAddToQueue = new ArrayList<>();
|
||||
for (Element elem : doc.select(".g-album > a")) {
|
||||
urlsToAddToQueue.add(getDomain() + elem.attr("href"));
|
||||
}
|
||||
return urlsToAddToQueue;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Document getFirstPage() throws IOException {
|
||||
// "url" is an instance field of the superclass
|
||||
@ -81,161 +109,11 @@ public class MyhentaicomicsRipper extends AbstractHTMLRipper {
|
||||
return Http.url(nextUrl).get();
|
||||
}
|
||||
|
||||
// This replaces getNextPage when downloading from searchs and tags
|
||||
private List<String> getNextAlbumPage(String pageUrl) {
|
||||
List<String> albumPagesList = new ArrayList<>();
|
||||
int pageNumber = 1;
|
||||
albumPagesList.add("http://myhentaicomics.com/index.php/" + pageUrl.split("\\?")[0] + "?page=" + Integer.toString(pageNumber));
|
||||
while (true) {
|
||||
String urlToGet = "http://myhentaicomics.com/index.php/" + pageUrl.split("\\?")[0] + "?page=" + Integer.toString(pageNumber);
|
||||
Document nextAlbumPage;
|
||||
try {
|
||||
logger.info("Grabbing " + urlToGet);
|
||||
nextAlbumPage = Http.url(urlToGet).get();
|
||||
} catch (IOException e) {
|
||||
logger.warn("Failed to log link in Jsoup");
|
||||
nextAlbumPage = null;
|
||||
e.printStackTrace();
|
||||
}
|
||||
Element elem = nextAlbumPage.select("a.ui-icon-right").first();
|
||||
String nextPage = elem.attr("href");
|
||||
pageNumber = pageNumber + 1;
|
||||
if (nextPage.equals("")) {
|
||||
logger.info("Got " + pageNumber + " pages");
|
||||
break;
|
||||
}
|
||||
else {
|
||||
logger.info(nextPage);
|
||||
albumPagesList.add(nextPage);
|
||||
logger.info("Adding " + nextPage);
|
||||
}
|
||||
}
|
||||
return albumPagesList;
|
||||
}
|
||||
|
||||
private List<String> getAlbumsFromPage(String url) {
|
||||
List<String> pagesToRip;
|
||||
List<String> result = new ArrayList<>();
|
||||
logger.info("Running getAlbumsFromPage");
|
||||
Document doc;
|
||||
try {
|
||||
doc = Http.url("http://myhentaicomics.com" + url).get();
|
||||
} catch (IOException e) {
|
||||
logger.warn("Failed to log link in Jsoup");
|
||||
doc = null;
|
||||
e.printStackTrace();
|
||||
}
|
||||
// This for goes over every album on the page
|
||||
for (Element elem : doc.select("li.g-album > a")) {
|
||||
String link = elem.attr("href");
|
||||
logger.info("Grabbing album " + link);
|
||||
pagesToRip = getNextAlbumPage(link);
|
||||
logger.info(pagesToRip);
|
||||
for (String element : pagesToRip) {
|
||||
Document album_doc;
|
||||
try {
|
||||
logger.info("grabbing " + element + " with jsoup");
|
||||
boolean startsWithHttp = element.startsWith("http://");
|
||||
if (!startsWithHttp) {
|
||||
album_doc = Http.url("http://myhentaicomics.com/" + element).get();
|
||||
}
|
||||
else {
|
||||
album_doc = Http.url(element).get();
|
||||
}
|
||||
} catch (IOException e) {
|
||||
logger.warn("Failed to log link in Jsoup");
|
||||
album_doc = null;
|
||||
e.printStackTrace();
|
||||
}
|
||||
for (Element el :album_doc.select("img")) {
|
||||
String imageSource = el.attr("src");
|
||||
// This bool is here so we don't try and download the site logo
|
||||
if (!imageSource.startsWith("http://")) {
|
||||
// We replace thumbs with resizes so we can the full sized images
|
||||
imageSource = imageSource.replace("thumbs", "resizes");
|
||||
String url_string = "http://myhentaicomics.com/" + imageSource;
|
||||
url_string = url_string.replace("%20", "_");
|
||||
url_string = url_string.replace("%27", "");
|
||||
url_string = url_string.replace("%28", "_");
|
||||
url_string = url_string.replace("%29", "_");
|
||||
url_string = url_string.replace("%2C", "_");
|
||||
if (isTag) {
|
||||
logger.info("Downloading from a tag or search");
|
||||
try {
|
||||
sleep(500);
|
||||
result.add("http://myhentaicomics.com/" + imageSource);
|
||||
addURLToDownload(new URL("http://myhentaicomics.com/" + imageSource), "", url_string.split("/")[6]);
|
||||
}
|
||||
catch (MalformedURLException e) {
|
||||
logger.warn("Malformed URL");
|
||||
e.printStackTrace();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
private List<String> getListOfPages(Document doc) {
|
||||
List<String> pages = new ArrayList<>();
|
||||
// Get the link from the last button
|
||||
String nextPageUrl = doc.select("a.ui-icon-right").last().attr("href");
|
||||
Pattern pat = Pattern.compile("/index\\.php/tag/[0-9]*/[a-zA-Z0-9_\\-:+]*\\?page=(\\d+)");
|
||||
Matcher mat = pat.matcher(nextPageUrl);
|
||||
if (mat.matches()) {
|
||||
logger.debug("Getting pages from a tag");
|
||||
String base_link = mat.group(0).replaceAll("\\?page=\\d+", "");
|
||||
logger.debug("base_link is " + base_link);
|
||||
int numOfPages = Integer.parseInt(mat.group(1));
|
||||
for (int x = 1; x != numOfPages +1; x++) {
|
||||
logger.debug("running loop");
|
||||
String link = base_link + "?page=" + Integer.toString(x);
|
||||
pages.add(link);
|
||||
}
|
||||
} else {
|
||||
Pattern pa = Pattern.compile("/index\\.php/search\\?q=[a-zA-Z0-9_\\-:]*&page=(\\d+)");
|
||||
Matcher ma = pa.matcher(nextPageUrl);
|
||||
if (ma.matches()) {
|
||||
logger.debug("Getting pages from a search");
|
||||
String base_link = ma.group(0).replaceAll("page=\\d+", "");
|
||||
logger.debug("base_link is " + base_link);
|
||||
int numOfPages = Integer.parseInt(ma.group(1));
|
||||
for (int x = 1; x != numOfPages +1; x++) {
|
||||
logger.debug("running loop");
|
||||
String link = base_link + "page=" + Integer.toString(x);
|
||||
logger.debug(link);
|
||||
pages.add(link);
|
||||
}
|
||||
}
|
||||
}
|
||||
return pages;
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<String> getURLsFromPage(Document doc) {
|
||||
List<String> result = new ArrayList<>();
|
||||
// Checks if this is a comic page or a page of albums
|
||||
// If true the page is a page of albums
|
||||
if (doc.toString().contains("class=\"g-item g-album\"")) {
|
||||
// This if checks that there is more than 1 page
|
||||
if (!doc.select("a.ui-icon-right").last().attr("href").equals("")) {
|
||||
// There is more than one page so we call getListOfPages
|
||||
List<String> pagesToRip = getListOfPages(doc);
|
||||
logger.debug("Pages to rip = " + pagesToRip);
|
||||
for (String url : pagesToRip) {
|
||||
logger.debug("Getting albums from " + url);
|
||||
result = getAlbumsFromPage(url);
|
||||
}
|
||||
} else {
|
||||
logger.debug("There is only one page on this page of albums");
|
||||
// There is only 1 page so we call getAlbumsFromPage and pass it the page url
|
||||
result = getAlbumsFromPage(doc.select("div.g-description > a").attr("href"));
|
||||
}
|
||||
return result;
|
||||
}
|
||||
else {
|
||||
for (Element el : doc.select("img")) {
|
||||
String imageSource = el.attr("src");
|
||||
// This bool is here so we don't try and download the site logo
|
||||
@ -245,7 +123,6 @@ public class MyhentaicomicsRipper extends AbstractHTMLRipper {
|
||||
result.add("http://myhentaicomics.com/" + imageSource);
|
||||
}
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
|
@ -2,6 +2,7 @@ package com.rarchives.ripme.ripper.rippers;
|
||||
|
||||
import com.rarchives.ripme.ripper.AbstractHTMLRipper;
|
||||
import com.rarchives.ripme.ripper.DownloadThreadPool;
|
||||
import com.rarchives.ripme.ui.RipStatusMessage;
|
||||
import com.rarchives.ripme.utils.Http;
|
||||
import com.rarchives.ripme.utils.Utils;
|
||||
import org.jsoup.nodes.Document;
|
||||
@ -64,6 +65,39 @@ public class NhentaiRipper extends AbstractHTMLRipper {
|
||||
return "nhentai" + title;
|
||||
}
|
||||
|
||||
private List<String> getTags(Document doc) {
|
||||
List<String> tags = new ArrayList<>();
|
||||
for (Element tag : doc.select("a.tag")) {
|
||||
tags.add(tag.attr("href").replaceAll("/tag/", "").replaceAll("/", ""));
|
||||
}
|
||||
return tags;
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks for blacklisted tags on page. If it finds one it returns it, if not it return null
|
||||
*
|
||||
* @param doc
|
||||
* @return String
|
||||
*/
|
||||
public String checkTags(Document doc, String[] blackListedTags) {
|
||||
// If the user hasn't blacklisted any tags we return false;
|
||||
if (blackListedTags == null) {
|
||||
return null;
|
||||
}
|
||||
logger.info("Blacklisted tags " + blackListedTags[0]);
|
||||
List<String> tagsOnPage = getTags(doc);
|
||||
for (String tag : blackListedTags) {
|
||||
for (String pageTag : tagsOnPage) {
|
||||
// We replace all dashes in the tag with spaces because the tags we get from the site are separated using
|
||||
// dashes
|
||||
if (tag.trim().toLowerCase().equals(pageTag.replaceAll("-", " ").toLowerCase())) {
|
||||
return tag;
|
||||
}
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getGID(URL url) throws MalformedURLException {
|
||||
// Ex: https://nhentai.net/g/159174/
|
||||
@ -82,6 +116,13 @@ public class NhentaiRipper extends AbstractHTMLRipper {
|
||||
if (firstPage == null) {
|
||||
firstPage = Http.url(url).get();
|
||||
}
|
||||
|
||||
String blacklistedTag = checkTags(firstPage, Utils.getConfigStringArray("nhentai.blacklist.tags"));
|
||||
if (blacklistedTag != null) {
|
||||
sendUpdate(RipStatusMessage.STATUS.DOWNLOAD_WARN, "Skipping " + url.toExternalForm() + " as it " +
|
||||
"contains the blacklisted tag \"" + blacklistedTag + "\"");
|
||||
return null;
|
||||
}
|
||||
return firstPage;
|
||||
}
|
||||
|
||||
|
@ -34,7 +34,7 @@ public class SinnercomicsRipper extends AbstractHTMLRipper {
|
||||
Pattern p = Pattern.compile("^https?://sinnercomics.com/comic/([a-zA-Z0-9-]*)/?$");
|
||||
Matcher m = p.matcher(url.toExternalForm());
|
||||
if (m.matches()) {
|
||||
return m.group(1);
|
||||
return m.group(1).replaceAll("-page-\\d+", "");
|
||||
}
|
||||
throw new MalformedURLException("Expected sinnercomics.com URL format: " +
|
||||
"sinnercomics.com/comic/albumName - got " + url + " instead");
|
||||
|
@ -59,11 +59,16 @@ public class TsuminoRipper extends AbstractHTMLRipper {
|
||||
|
||||
@Override
|
||||
public String getGID(URL url) throws MalformedURLException {
|
||||
Pattern p = Pattern.compile("https?://www.tsumino.com/Book/Info/([0-9]+)/([a-zA-Z0-9_-]*)");
|
||||
Pattern p = Pattern.compile("https?://www.tsumino.com/Book/Info/([0-9]+)/([a-zA-Z0-9_-]*)/?");
|
||||
Matcher m = p.matcher(url.toExternalForm());
|
||||
if (m.matches()) {
|
||||
return m.group(1) + "_" + m.group(2);
|
||||
}
|
||||
p = Pattern.compile("https?://www.tsumino.com/Book/Info/([0-9]+)/?");
|
||||
m = p.matcher(url.toExternalForm());
|
||||
if (m.matches()) {
|
||||
return m.group(1);
|
||||
}
|
||||
throw new MalformedURLException("Expected tsumino URL format: " +
|
||||
"tsumino.com/Book/Info/ID/TITLE - got " + url + " instead");
|
||||
}
|
||||
|
@ -16,9 +16,7 @@ import java.net.URL;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.nio.file.Paths;
|
||||
import java.util.Collections;
|
||||
import java.util.Date;
|
||||
import java.util.Enumeration;
|
||||
import java.util.*;
|
||||
import java.util.List;
|
||||
|
||||
import javax.imageio.ImageIO;
|
||||
@ -138,6 +136,17 @@ public final class MainWindow implements Runnable, RipStatusHandler {
|
||||
|
||||
private static AbstractRipper ripper;
|
||||
|
||||
private ResourceBundle rb = Utils.getResourceBundle();
|
||||
|
||||
private void updateQueueLabel() {
|
||||
if (queueListModel.size() > 0) {
|
||||
optionQueue.setText( rb.getString("Queue") + " (" + queueListModel.size() + ")");
|
||||
} else {
|
||||
optionQueue.setText(rb.getString("Queue"));
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
private static void addCheckboxListener(JCheckBox checkBox, String configString) {
|
||||
checkBox.addActionListener(arg0 -> {
|
||||
Utils.setConfigBoolean(configString, checkBox.isSelected());
|
||||
@ -153,6 +162,11 @@ public final class MainWindow implements Runnable, RipStatusHandler {
|
||||
return checkbox;
|
||||
}
|
||||
|
||||
|
||||
public static void addUrlToQueue(String url) {
|
||||
queueListModel.addElement(url);
|
||||
}
|
||||
|
||||
public MainWindow() {
|
||||
mainFrame = new JFrame("RipMe v" + UpdateUtils.getThisJarVersion());
|
||||
mainFrame.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE);
|
||||
@ -289,7 +303,7 @@ public final class MainWindow implements Runnable, RipStatusHandler {
|
||||
gbc.gridx = 3; ripPanel.add(stopButton, gbc);
|
||||
gbc.weightx = 1;
|
||||
|
||||
statusLabel = new JLabel("Inactive");
|
||||
statusLabel = new JLabel(rb.getString("inactive"));
|
||||
statusLabel.setHorizontalAlignment(JLabel.CENTER);
|
||||
openButton = new JButton();
|
||||
openButton.setVisible(false);
|
||||
@ -307,10 +321,10 @@ public final class MainWindow implements Runnable, RipStatusHandler {
|
||||
|
||||
JPanel optionsPanel = new JPanel(new GridBagLayout());
|
||||
optionsPanel.setBorder(emptyBorder);
|
||||
optionLog = new JButton("Log");
|
||||
optionHistory = new JButton("History");
|
||||
optionQueue = new JButton("Queue");
|
||||
optionConfiguration = new JButton("Configuration");
|
||||
optionLog = new JButton(rb.getString("Log"));
|
||||
optionHistory = new JButton(rb.getString("History"));
|
||||
optionQueue = new JButton(rb.getString("Queue"));
|
||||
optionConfiguration = new JButton(rb.getString("Configuration"));
|
||||
optionLog.setFont(optionLog.getFont().deriveFont(Font.PLAIN));
|
||||
optionHistory.setFont(optionLog.getFont().deriveFont(Font.PLAIN));
|
||||
optionQueue.setFont(optionLog.getFont().deriveFont(Font.PLAIN));
|
||||
@ -402,9 +416,9 @@ public final class MainWindow implements Runnable, RipStatusHandler {
|
||||
historyTable.getColumnModel().getColumn(i).setPreferredWidth(width);
|
||||
}
|
||||
JScrollPane historyTableScrollPane = new JScrollPane(historyTable);
|
||||
historyButtonRemove = new JButton("Remove");
|
||||
historyButtonClear = new JButton("Clear");
|
||||
historyButtonRerip = new JButton("Re-rip Checked");
|
||||
historyButtonRemove = new JButton(rb.getString("remove"));
|
||||
historyButtonClear = new JButton(rb.getString("clear"));
|
||||
historyButtonRerip = new JButton(rb.getString("re-rip.checked"));
|
||||
gbc.gridx = 0;
|
||||
// History List Panel
|
||||
JPanel historyTablePanel = new JPanel(new GridBagLayout());
|
||||
@ -440,11 +454,7 @@ public final class MainWindow implements Runnable, RipStatusHandler {
|
||||
for (String item : Utils.getConfigList("queue")) {
|
||||
queueListModel.addElement(item);
|
||||
}
|
||||
if (queueListModel.size() > 0) {
|
||||
optionQueue.setText("Queue (" + queueListModel.size() + ")");
|
||||
} else {
|
||||
optionQueue.setText("Queue");
|
||||
}
|
||||
updateQueueLabel();
|
||||
gbc.gridx = 0;
|
||||
JPanel queueListPanel = new JPanel(new GridBagLayout());
|
||||
gbc.fill = GridBagConstraints.BOTH;
|
||||
@ -459,27 +469,27 @@ public final class MainWindow implements Runnable, RipStatusHandler {
|
||||
configurationPanel.setBorder(emptyBorder);
|
||||
configurationPanel.setVisible(false);
|
||||
// TODO Configuration components
|
||||
configUpdateButton = new JButton("Check for updates");
|
||||
configUpdateLabel = new JLabel("Current version: " + UpdateUtils.getThisJarVersion(), JLabel.RIGHT);
|
||||
JLabel configThreadsLabel = new JLabel("Maximum download threads:", JLabel.RIGHT);
|
||||
JLabel configTimeoutLabel = new JLabel("Timeout (in milliseconds):", JLabel.RIGHT);
|
||||
JLabel configRetriesLabel = new JLabel("Retry download count:", JLabel.RIGHT);
|
||||
configUpdateButton = new JButton(rb.getString("check.for.updates"));
|
||||
configUpdateLabel = new JLabel( rb.getString("current.version") + ": " + UpdateUtils.getThisJarVersion(), JLabel.RIGHT);
|
||||
JLabel configThreadsLabel = new JLabel(rb.getString("max.download.threads") + ":", JLabel.RIGHT);
|
||||
JLabel configTimeoutLabel = new JLabel(rb.getString("timeout.mill"), JLabel.RIGHT);
|
||||
JLabel configRetriesLabel = new JLabel(rb.getString("retry.download.count"), JLabel.RIGHT);
|
||||
configThreadsText = new JTextField(Integer.toString(Utils.getConfigInteger("threads.size", 3)));
|
||||
configTimeoutText = new JTextField(Integer.toString(Utils.getConfigInteger("download.timeout", 60000)));
|
||||
configRetriesText = new JTextField(Integer.toString(Utils.getConfigInteger("download.retries", 3)));
|
||||
configOverwriteCheckbox = addNewCheckbox("Overwrite existing files?", "file.overwrite", false);
|
||||
configAutoupdateCheckbox = addNewCheckbox("Auto-update?", "auto.update", true);
|
||||
configPlaySound = addNewCheckbox("Sound when rip completes", "play.sound", false);
|
||||
configShowPopup = addNewCheckbox("Notification when rip starts", "download.show_popup", false);
|
||||
configSaveOrderCheckbox = addNewCheckbox("Preserve order", "download.save_order", true);
|
||||
configSaveLogs = addNewCheckbox("Save logs", "log.save", false);
|
||||
configSaveURLsOnly = addNewCheckbox("Save URLs only", "urls_only.save", false);
|
||||
configSaveAlbumTitles = addNewCheckbox("Save album titles", "album_titles.save", true);
|
||||
configClipboardAutorip = addNewCheckbox("Autorip from Clipboard", "clipboard.autorip", false);
|
||||
configSaveDescriptions = addNewCheckbox("Save descriptions", "descriptions.save", true);
|
||||
configPreferMp4 = addNewCheckbox("Prefer MP4 over GIF","prefer.mp4", false);
|
||||
configWindowPosition = addNewCheckbox("Restore window position", "window.position", true);
|
||||
configURLHistoryCheckbox = addNewCheckbox("Remember URL history", "remember.url_history", true);
|
||||
configOverwriteCheckbox = addNewCheckbox(rb.getString("overwrite.existing.files"), "file.overwrite", false);
|
||||
configAutoupdateCheckbox = addNewCheckbox(rb.getString("auto.update"), "auto.update", true);
|
||||
configPlaySound = addNewCheckbox(rb.getString("sound.when.rip.completes"), "play.sound", false);
|
||||
configShowPopup = addNewCheckbox(rb.getString("notification.when.rip.starts"), "download.show_popup", false);
|
||||
configSaveOrderCheckbox = addNewCheckbox(rb.getString("preserve.order"), "download.save_order", true);
|
||||
configSaveLogs = addNewCheckbox(rb.getString("save.logs"), "log.save", false);
|
||||
configSaveURLsOnly = addNewCheckbox(rb.getString("save.urls.only"), "urls_only.save", false);
|
||||
configSaveAlbumTitles = addNewCheckbox(rb.getString("save.album.titles"), "album_titles.save", true);
|
||||
configClipboardAutorip = addNewCheckbox(rb.getString("autorip.from.clipboard"), "clipboard.autorip", false);
|
||||
configSaveDescriptions = addNewCheckbox(rb.getString("save.descriptions"), "descriptions.save", true);
|
||||
configPreferMp4 = addNewCheckbox(rb.getString("prefer.mp4.over.gif"),"prefer.mp4", false);
|
||||
configWindowPosition = addNewCheckbox(rb.getString("restore.window.position"), "window.position", true);
|
||||
configURLHistoryCheckbox = addNewCheckbox(rb.getString("remember.url.history"), "remember.url_history", true);
|
||||
|
||||
configLogLevelCombobox = new JComboBox(new String[] {"Log level: Error", "Log level: Warn", "Log level: Info", "Log level: Debug"});
|
||||
configLogLevelCombobox.setSelectedItem(Utils.getConfigString("log.level", "Log level: Debug"));
|
||||
@ -785,11 +795,7 @@ public final class MainWindow implements Runnable, RipStatusHandler {
|
||||
queueListModel.addListDataListener(new ListDataListener() {
|
||||
@Override
|
||||
public void intervalAdded(ListDataEvent arg0) {
|
||||
if (queueListModel.size() > 0) {
|
||||
optionQueue.setText("Queue (" + queueListModel.size() + ")");
|
||||
} else {
|
||||
optionQueue.setText("Queue");
|
||||
}
|
||||
updateQueueLabel();
|
||||
if (!isRipping) {
|
||||
ripNextAlbum();
|
||||
}
|
||||
@ -966,7 +972,7 @@ public final class MainWindow implements Runnable, RipStatusHandler {
|
||||
HISTORY.clear();
|
||||
if (historyFile.exists()) {
|
||||
try {
|
||||
logger.info("Loading history from " + historyFile.getCanonicalPath());
|
||||
logger.info(rb.getString("loading.history.from") + " " + historyFile.getCanonicalPath());
|
||||
HISTORY.fromFile(historyFile.getCanonicalPath());
|
||||
} catch (IOException e) {
|
||||
logger.error("Failed to load history from file " + historyFile, e);
|
||||
@ -979,7 +985,7 @@ public final class MainWindow implements Runnable, RipStatusHandler {
|
||||
JOptionPane.ERROR_MESSAGE);
|
||||
}
|
||||
} else {
|
||||
logger.info("Loading history from configuration");
|
||||
logger.info(rb.getString("loading.history.from.configuration"));
|
||||
HISTORY.fromList(Utils.getConfigList("download.history"));
|
||||
if (HISTORY.toList().size() == 0) {
|
||||
// Loaded from config, still no entries.
|
||||
@ -1025,17 +1031,13 @@ public final class MainWindow implements Runnable, RipStatusHandler {
|
||||
return;
|
||||
}
|
||||
String nextAlbum = (String) queueListModel.remove(0);
|
||||
if (queueListModel.isEmpty()) {
|
||||
optionQueue.setText("Queue");
|
||||
} else {
|
||||
optionQueue.setText("Queue (" + queueListModel.size() + ")");
|
||||
}
|
||||
updateQueueLabel();
|
||||
Thread t = ripAlbum(nextAlbum);
|
||||
if (t == null) {
|
||||
try {
|
||||
Thread.sleep(500);
|
||||
} catch (InterruptedException ie) {
|
||||
logger.error("Interrupted while waiting to rip next album", ie);
|
||||
logger.error(rb.getString("interrupted.while.waiting.to.rip.next.album"), ie);
|
||||
}
|
||||
ripNextAlbum();
|
||||
} else {
|
||||
|
@ -21,7 +21,7 @@ import com.rarchives.ripme.utils.Utils;
|
||||
public class UpdateUtils {
|
||||
|
||||
private static final Logger logger = Logger.getLogger(UpdateUtils.class);
|
||||
private static final String DEFAULT_VERSION = "1.7.43";
|
||||
private static final String DEFAULT_VERSION = "1.7.46";
|
||||
private static final String REPO_NAME = "ripmeapp/ripme";
|
||||
private static final String updateJsonURL = "https://raw.githubusercontent.com/" + REPO_NAME + "/master/ripme.json";
|
||||
private static final String mainFileName = "ripme.jar";
|
||||
|
46
src/main/java/com/rarchives/ripme/utils/UTF8Control.java
Normal file
46
src/main/java/com/rarchives/ripme/utils/UTF8Control.java
Normal file
@ -0,0 +1,46 @@
|
||||
package com.rarchives.ripme.utils;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.io.InputStreamReader;
|
||||
import java.net.URL;
|
||||
import java.net.URLConnection;
|
||||
import java.util.Locale;
|
||||
import java.util.PropertyResourceBundle;
|
||||
import java.util.ResourceBundle;
|
||||
|
||||
// Code taken from https://stackoverflow.com/questions/4659929/how-to-use-utf-8-in-resource-properties-with-resourcebundle/4660195#4660195
|
||||
|
||||
public class UTF8Control extends ResourceBundle.Control {
|
||||
public ResourceBundle newBundle
|
||||
(String baseName, Locale locale, String format, ClassLoader loader, boolean reload)
|
||||
throws IllegalAccessException, InstantiationException, IOException
|
||||
{
|
||||
// The below is a copy of the default implementation.
|
||||
String bundleName = toBundleName(baseName, locale);
|
||||
String resourceName = toResourceName(bundleName, "properties");
|
||||
ResourceBundle bundle = null;
|
||||
InputStream stream = null;
|
||||
if (reload) {
|
||||
URL url = loader.getResource(resourceName);
|
||||
if (url != null) {
|
||||
URLConnection connection = url.openConnection();
|
||||
if (connection != null) {
|
||||
connection.setUseCaches(false);
|
||||
stream = connection.getInputStream();
|
||||
}
|
||||
}
|
||||
} else {
|
||||
stream = loader.getResourceAsStream(resourceName);
|
||||
}
|
||||
if (stream != null) {
|
||||
try {
|
||||
// Only this line is changed to make it to read properties files as UTF-8.
|
||||
bundle = new PropertyResourceBundle(new InputStreamReader(stream, "UTF-8"));
|
||||
} finally {
|
||||
stream.close();
|
||||
}
|
||||
}
|
||||
return bundle;
|
||||
}
|
||||
}
|
@ -1,9 +1,6 @@
|
||||
package com.rarchives.ripme.utils;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.io.UnsupportedEncodingException;
|
||||
import java.io.*;
|
||||
import java.lang.reflect.Constructor;
|
||||
import java.net.URISyntaxException;
|
||||
import java.net.URL;
|
||||
@ -92,9 +89,25 @@ public class Utils {
|
||||
return workingDir;
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the value of a specific config key.
|
||||
*
|
||||
* @param key The name of the config parameter you want to find.
|
||||
* @param defaultValue What the default value would be.
|
||||
*/
|
||||
public static String getConfigString(String key, String defaultValue) {
|
||||
return config.getString(key, defaultValue);
|
||||
}
|
||||
|
||||
public static String[] getConfigStringArray(String key) {
|
||||
String[] s = config.getStringArray(key);
|
||||
if (s.length == 0) {
|
||||
return null;
|
||||
} else {
|
||||
return s;
|
||||
}
|
||||
}
|
||||
|
||||
public static int getConfigInteger(String key, int defaultValue) {
|
||||
return config.getInt(key, defaultValue);
|
||||
}
|
||||
@ -135,31 +148,53 @@ public class Utils {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Determines if your current system is a Windows system.
|
||||
*/
|
||||
private static boolean isWindows() {
|
||||
return OS.contains("win");
|
||||
}
|
||||
|
||||
/**
|
||||
* Determines if your current system is a Mac system
|
||||
*/
|
||||
private static boolean isMacOS() {
|
||||
return OS.contains("mac");
|
||||
}
|
||||
|
||||
/**
|
||||
* Determines if current system is based on UNIX
|
||||
*/
|
||||
private static boolean isUnix() {
|
||||
return OS.contains("nix") || OS.contains("nux") || OS.contains("bsd");
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the directory of where the config file is stored on a Windows machine.
|
||||
*/
|
||||
private static String getWindowsConfigDir() {
|
||||
return System.getenv("LOCALAPPDATA") + File.separator + "ripme";
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Gets the directory of where the config file is stored on a UNIX machine.
|
||||
*/
|
||||
private static String getUnixConfigDir() {
|
||||
return System.getProperty("user.home") + File.separator + ".config" + File.separator + "ripme";
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Gets the directory of where the config file is stored on a Mac machine.
|
||||
*/
|
||||
private static String getMacOSConfigDir() {
|
||||
return System.getProperty("user.home")
|
||||
+ File.separator + "Library" + File.separator + "Application Support" + File.separator + "ripme";
|
||||
}
|
||||
|
||||
/**
|
||||
* Determines if the app is running in a portable mode. i.e. on a USB stick
|
||||
*/
|
||||
private static boolean portableMode() {
|
||||
try {
|
||||
File f = new File(new File(".").getCanonicalPath() + File.separator + configFile);
|
||||
@ -172,7 +207,9 @@ public class Utils {
|
||||
return false;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Gets the directory of the config directory, for all systems.
|
||||
*/
|
||||
public static String getConfigDir() {
|
||||
if (portableMode()) {
|
||||
try {
|
||||
@ -192,17 +229,24 @@ public class Utils {
|
||||
return ".";
|
||||
}
|
||||
}
|
||||
// Delete the url history file
|
||||
/**
|
||||
* Delete the url history file
|
||||
*/
|
||||
public static void clearURLHistory() {
|
||||
File file = new File(getURLHistoryFile());
|
||||
file.delete();
|
||||
}
|
||||
|
||||
// Return the path of the url history file
|
||||
/**
|
||||
* Return the path of the url history file
|
||||
*/
|
||||
public static String getURLHistoryFile() {
|
||||
return getConfigDir() + File.separator + "url_history.txt";
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the path to the configuration file.
|
||||
*/
|
||||
private static String getConfigFilePath() {
|
||||
return getConfigDir() + File.separator + configFile;
|
||||
}
|
||||
@ -228,6 +272,15 @@ public class Utils {
|
||||
return prettySaveAs;
|
||||
}
|
||||
|
||||
/**
|
||||
* Strips away URL parameters, which usually appear at the end of URLs.
|
||||
* E.g. the ?query on PHP
|
||||
*
|
||||
* @param url The URL to filter/strip
|
||||
* @param parameter The parameter to strip
|
||||
*
|
||||
* @return The stripped URL
|
||||
*/
|
||||
public static String stripURLParameter(String url, String parameter) {
|
||||
int paramIndex = url.indexOf("?" + parameter);
|
||||
boolean wasFirstParam = true;
|
||||
@ -255,6 +308,7 @@ public class Utils {
|
||||
/**
|
||||
* Removes the current working directory from a given filename
|
||||
* @param file
|
||||
* Path to the file
|
||||
* @return
|
||||
* 'file' without the leading current working directory
|
||||
*/
|
||||
@ -338,9 +392,24 @@ public class Utils {
|
||||
}
|
||||
|
||||
private static final int SHORTENED_PATH_LENGTH = 12;
|
||||
/**
|
||||
* Shortens the path to a file
|
||||
* @param path
|
||||
* String of the path to the file
|
||||
* @return
|
||||
* The simplified path to the file.
|
||||
*/
|
||||
public static String shortenPath(String path) {
|
||||
return shortenPath(new File(path));
|
||||
}
|
||||
|
||||
/**
|
||||
* Shortens the path to a file
|
||||
* @param file
|
||||
* File object that you want the shortened path of.
|
||||
* @return
|
||||
* The simplified path to the file.
|
||||
*/
|
||||
public static String shortenPath(File file) {
|
||||
String path = removeCWD(file);
|
||||
if (path.length() < SHORTENED_PATH_LENGTH * 2) {
|
||||
@ -351,6 +420,13 @@ public class Utils {
|
||||
+ path.substring(path.length() - SHORTENED_PATH_LENGTH);
|
||||
}
|
||||
|
||||
/**
|
||||
* Sanitizes a string so that a filesystem can handle it
|
||||
* @param text
|
||||
* The text to be sanitized.
|
||||
* @return
|
||||
* The sanitized text.
|
||||
*/
|
||||
public static String filesystemSanitized(String text) {
|
||||
text = text.replaceAll("[^a-zA-Z0-9.-]", "_");
|
||||
return text;
|
||||
@ -400,6 +476,13 @@ public class Utils {
|
||||
return original;
|
||||
}
|
||||
|
||||
/**
|
||||
* Converts an integer into a human readable string
|
||||
* @param bytes
|
||||
* Non-human readable integer.
|
||||
* @return
|
||||
* Human readable interpretation of a byte.
|
||||
*/
|
||||
public static String bytesToHumanReadable(int bytes) {
|
||||
float fbytes = (float) bytes;
|
||||
String[] mags = new String[] {"", "K", "M", "G", "T"};
|
||||
@ -411,6 +494,10 @@ public class Utils {
|
||||
return String.format("%.2f%siB", fbytes, mags[magIndex]);
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets and returns a list of all the album rippers present in the "com.rarchives.ripme.ripper.rippers" package.
|
||||
* @return List<String> of all album rippers present.
|
||||
*/
|
||||
public static List<String> getListOfAlbumRippers() throws Exception {
|
||||
List<String> list = new ArrayList<>();
|
||||
for (Constructor<?> ripper : AbstractRipper.getRipperConstructors("com.rarchives.ripme.ripper.rippers")) {
|
||||
@ -418,6 +505,11 @@ public class Utils {
|
||||
}
|
||||
return list;
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets and returns a list of all video rippers present in the "com.rarchives.rime.rippers.video" package
|
||||
* @return List<String> of all the video rippers.
|
||||
*/
|
||||
public static List<String> getListOfVideoRippers() throws Exception {
|
||||
List<String> list = new ArrayList<>();
|
||||
for (Constructor<?> ripper : AbstractRipper.getRipperConstructors("com.rarchives.ripme.ripper.rippers.video")) {
|
||||
@ -426,6 +518,11 @@ public class Utils {
|
||||
return list;
|
||||
}
|
||||
|
||||
/**
|
||||
* Plays a sound from a file.
|
||||
* @param filename
|
||||
* Path to the sound file
|
||||
*/
|
||||
public static void playSound(String filename) {
|
||||
URL resource = ClassLoader.getSystemClassLoader().getResource(filename);
|
||||
try {
|
||||
@ -563,6 +660,9 @@ public class Utils {
|
||||
cookieCache = new HashMap<String, HashMap<String, String>>();
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets all the cookies from a certain host
|
||||
*/
|
||||
public static Map<String, String> getCookies(String host) {
|
||||
HashMap<String, String> domainCookies = cookieCache.get(host);
|
||||
if (domainCookies == null) {
|
||||
@ -579,4 +679,25 @@ public class Utils {
|
||||
}
|
||||
return domainCookies;
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the ResourceBundle AKA language package.
|
||||
* Used for choosing the language of the UI.
|
||||
*
|
||||
* @return Returns the default resource bundle using the language specified in the config file.
|
||||
*/
|
||||
public static ResourceBundle getResourceBundle() {
|
||||
if (!getConfigString("lang", "").equals("")) {
|
||||
String[] langCode = getConfigString("lang", "").split("_");
|
||||
logger.info("Setting locale to " + getConfigString("lang", ""));
|
||||
return ResourceBundle.getBundle("LabelsBundle", new Locale(langCode[0], langCode[1]), new UTF8Control());
|
||||
}
|
||||
try {
|
||||
ResourceBundle rb = ResourceBundle.getBundle("LabelsBundle", Locale.getDefault(), new UTF8Control());
|
||||
return rb;
|
||||
} catch (MissingResourceException e) {
|
||||
ResourceBundle rb = ResourceBundle.getBundle("LabelsBundle", Locale.ROOT);
|
||||
return rb;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
37
src/main/resources/LabelsBundle.properties
Normal file
37
src/main/resources/LabelsBundle.properties
Normal file
@ -0,0 +1,37 @@
|
||||
Log = Log
|
||||
History = History
|
||||
created = created
|
||||
modified = modified
|
||||
Queue = Queue
|
||||
Configuration = Configuration
|
||||
|
||||
# Keys for the Configuration menu
|
||||
|
||||
current.version = Current version
|
||||
check.for.updates = Check for updates
|
||||
auto.update = Auto-update?
|
||||
max.download.threads = Maximum download threads
|
||||
timeout.mill = Timeout (in milliseconds):
|
||||
retry.download.count = Retry download count
|
||||
overwrite.existing.files = Overwrite existing files?
|
||||
sound.when.rip.completes = Sound when rip completes
|
||||
preserve.order = Preserve order
|
||||
save.logs = Save logs
|
||||
notification.when.rip.starts = Notification when rip starts
|
||||
save.urls.only = Save URLs only
|
||||
save.album.titles = Save album titles
|
||||
autorip.from.clipboard = Autorip from Clipboard
|
||||
save.descriptions = Save descriptions
|
||||
prefer.mp4.over.gif = Prefer MP4 over GIF
|
||||
restore.window.position = Restore window position
|
||||
remember.url.history = Remember URL history
|
||||
loading.history.from = Loading history from
|
||||
|
||||
# Misc UI keys
|
||||
|
||||
loading.history.from.configuration = Loading history from configuration
|
||||
interrupted.while.waiting.to.rip.next.album = Interrupted while waiting to rip next album
|
||||
inactive = Inactive
|
||||
re-rip.checked = Re-rip Checked
|
||||
remove = Remove
|
||||
clear = Clear
|
38
src/main/resources/LabelsBundle_de_DE.properties
Normal file
38
src/main/resources/LabelsBundle_de_DE.properties
Normal file
@ -0,0 +1,38 @@
|
||||
Log = Log
|
||||
History = Verlauf
|
||||
created = erstellt
|
||||
modified = geändert
|
||||
Queue = Queue
|
||||
Configuration = Konfiguration
|
||||
|
||||
# Keys for the Configuration menu
|
||||
|
||||
current.version = Aktuelle Version
|
||||
check.for.updates = Suche nach Aktualisierungen
|
||||
auto.update = Automatisch Aktualisieren?
|
||||
max.download.threads = Maximum download threads
|
||||
timeout.mill = Timeout (in milliseconds):
|
||||
retry.download.count = Anzahl der Downloadversuche
|
||||
overwrite.existing.files = Überschreibe bereits existierende Dateien?
|
||||
sound.when.rip.completes = Ton abspielen bei fertigem Download
|
||||
preserve.order = Reihenfolge beibehalten
|
||||
save.logs = Speichere Logs
|
||||
notification.when.rip.starts = Benachrichtigung wenn Download startet
|
||||
save.urls.only = Speicher nur URLs
|
||||
save.album.titles = Speichere Albumtitels
|
||||
autorip.from.clipboard = Automatisch Downloaden von der Zwischenablage
|
||||
save.descriptions = Speichere Beschreibungen
|
||||
prefer.mp4.over.gif = Bevorzuge MP4 über GIF
|
||||
restore.window.position = Wieder herstellen der Fensterposition
|
||||
remember.url.history = Erinnere URL Verlauf
|
||||
loading.history.from = Lade Verlauf von
|
||||
|
||||
# Misc UI keys
|
||||
|
||||
loading.history.from.configuration = Lade Verlauf aus Konfiguration
|
||||
interrupted.while.waiting.to.rip.next.album = Unterbrochen während Download des nächsten Albums
|
||||
inactive = Inaktiv
|
||||
re-rip.checked = Re-rip Überprüft
|
||||
remove = Entfernen
|
||||
clear = Leeren
|
||||
|
37
src/main/resources/LabelsBundle_es_ES.properties
Normal file
37
src/main/resources/LabelsBundle_es_ES.properties
Normal file
@ -0,0 +1,37 @@
|
||||
Log = Log
|
||||
History = Historia
|
||||
created = creado
|
||||
modified = modificado
|
||||
Queue = Cola
|
||||
Configuration = Configuracion
|
||||
|
||||
# Keys for the Configuration menu
|
||||
|
||||
current.version = Version Actual
|
||||
check.for.updates = Buscar actualizaciones
|
||||
auto.update = Auto-actualizar?
|
||||
max.download.threads = Maximos procesos de descarga
|
||||
timeout.mill = Timeout (in milliseconds):
|
||||
retry.download.count = Numero de reintentos de descarga
|
||||
overwrite.existing.files = Sobreescribir archivos existentes?
|
||||
sound.when.rip.completes = Sonar cuando el Rip termina
|
||||
preserve.order = Mantener orden
|
||||
save.logs = Guardar logs
|
||||
notification.when.rip.starts = Notificar cuando el Rip comienza
|
||||
save.urls.only = Guardar solamente URLs
|
||||
save.album.titles = Guardar titulos de albunes
|
||||
autorip.from.clipboard = Autorip desde Portapapeles
|
||||
save.descriptions = Guardar descripciones
|
||||
prefer.mp4.over.gif = Preferir MP4 sobre GIF
|
||||
restore.window.position = Restaurar posicion de ventana
|
||||
remember.url.history = Recordar historia URL
|
||||
loading.history.from = Cargando historia desde
|
||||
|
||||
# Misc UI keys
|
||||
|
||||
loading.history.from.configuration = Cargando historia desde la configuracion
|
||||
interrupted.while.waiting.to.rip.next.album = Interrumpido esperando el Rip del proximo album
|
||||
inactive = Inactivo
|
||||
re-rip.checked = Re-rip marcado
|
||||
remove = Quitar
|
||||
clear = Limpiar
|
37
src/main/resources/LabelsBundle_fr_CH.properties
Normal file
37
src/main/resources/LabelsBundle_fr_CH.properties
Normal file
@ -0,0 +1,37 @@
|
||||
Log = Journal
|
||||
History = Historique
|
||||
created = créé le
|
||||
modified = modifié le
|
||||
Queue = File d'attente
|
||||
Configuration = Configuration
|
||||
|
||||
# Keys for the Configuration menu
|
||||
|
||||
current.version = Version actuelle
|
||||
check.for.updates = Vérifier mises à jour
|
||||
auto.update = Mises à jour automatiques?
|
||||
max.download.threads = Nombre de téléchargements parallèles maximum
|
||||
timeout.mill = Délai d'expiration (en millisecondes):
|
||||
retry.download.count = Nombre d'essais téléchargement
|
||||
overwrite.existing.files = Remplacer fichiers existants ?
|
||||
sound.when.rip.completes = Son lorsque le rip est terminé
|
||||
preserve.order = Conserver l'ordre
|
||||
save.logs = Enregistrer journaux
|
||||
notification.when.rip.starts = Notification lorsqu'un rip commence
|
||||
save.urls.only = Enregistrer URL uniquement
|
||||
save.album.titles = Enregistrer titres d'album
|
||||
autorip.from.clipboard = Autorip depuis presse-papier
|
||||
save.descriptions = Enregistrer descriptions
|
||||
prefer.mp4.over.gif = Préférer MP4 à GIF
|
||||
restore.window.position = Restaurer la position de la fenêtre
|
||||
remember.url.history = Se souvenir de l'historique des URL
|
||||
loading.history.from = Charger l'historique depuis
|
||||
|
||||
# Misc UI keys
|
||||
|
||||
loading.history.from.configuration = Charger l'historique depuis la configuration
|
||||
interrupted.while.waiting.to.rip.next.album = Interrompu lors de l'attente pour ripper le prochain album
|
||||
inactive = Inactif
|
||||
re-rip.checked = Re-rip vérifié
|
||||
remove = Enlever
|
||||
clear = Effacer
|
37
src/main/resources/LabelsBundle_pt_PT.properties
Normal file
37
src/main/resources/LabelsBundle_pt_PT.properties
Normal file
@ -0,0 +1,37 @@
|
||||
Log = Registo
|
||||
History = Histórico
|
||||
created = criado
|
||||
modified = modificado
|
||||
Queue = Fila
|
||||
Configuration = Configuração
|
||||
|
||||
# Keys for the Configuration menu
|
||||
|
||||
current.version = Versão atual
|
||||
check.for.updates = Verificar atualizações
|
||||
auto.update = Atualização automática?
|
||||
max.download.threads = Número máximo de processos de transferência
|
||||
timeout.mill = Timeout (em milissegundos):
|
||||
retry.download.count = Número de novas tentativas de transferência
|
||||
overwrite.existing.files = Sobrescrever ficheiros existentes?
|
||||
sound.when.rip.completes = Notificar quando o rip é concluído
|
||||
preserve.order = Manter a ordem
|
||||
save.logs = Guardar registos
|
||||
notification.when.rip.starts = Notificar quando o rip começar
|
||||
save.urls.only = Apenas guardar URLs
|
||||
save.album.titles = Guardar os títulos de álbuns
|
||||
autorip.from.clipboard = Autorip da área de transferência
|
||||
save.descriptions = Guardar descrições
|
||||
prefer.mp4.over.gif = Preferir MP4 a GIF
|
||||
restore.window.position = Restaurar posição da janela
|
||||
remember.url.history = Lembrar histórico de URL
|
||||
loading.history.from = Carregar histórico de
|
||||
|
||||
# Misc UI keys
|
||||
|
||||
loading.history.from.configuration = A carregar o histórico da configuração
|
||||
interrupted.while.waiting.to.rip.next.album = Interrompido durante a espera do rip do próximo álbum
|
||||
inactive = Inativo
|
||||
re-rip.checked = Re-rip verificado
|
||||
remove = Remover
|
||||
clear = Limpar
|
@ -0,0 +1,33 @@
|
||||
package com.rarchives.ripme.tst.ripper.rippers;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.net.URL;
|
||||
|
||||
import com.rarchives.ripme.ripper.rippers.NhentaiRipper;
|
||||
|
||||
public class NhentaiRipperTest extends RippersTest {
|
||||
public void testRip() throws IOException {
|
||||
NhentaiRipper ripper = new NhentaiRipper(new URL("https://nhentai.net/g/233295/"));
|
||||
testRipper(ripper);
|
||||
}
|
||||
|
||||
public void testGetGID() throws IOException {
|
||||
NhentaiRipper ripper = new NhentaiRipper(new URL("https://nhentai.net/g/233295/"));
|
||||
assertEquals("233295", ripper.getGID(new URL("https://nhentai.net/g/233295/")));
|
||||
}
|
||||
|
||||
// Test the tag black listing
|
||||
public void testTagBlackList() throws IOException {
|
||||
URL url = new URL("https://nhentai.net/g/233295/");
|
||||
NhentaiRipper ripper = new NhentaiRipper(url);
|
||||
// Test multiple blacklisted tags
|
||||
String[] tags = {"test", "one", "blowjob"};
|
||||
String blacklistedTag = ripper.checkTags(ripper.getFirstPage(), tags);
|
||||
assertEquals("blowjob", blacklistedTag);
|
||||
|
||||
// test tags with spaces in them
|
||||
String[] tags2 = {"test", "one", "sole female"};
|
||||
blacklistedTag = ripper.checkTags(ripper.getFirstPage(), tags2);
|
||||
assertEquals("sole female", blacklistedTag);
|
||||
}
|
||||
}
|
@ -38,14 +38,17 @@ public class VideoRippersTest extends RippersTest {
|
||||
}
|
||||
}
|
||||
|
||||
public void testTwitchVideoRipper() throws IOException {
|
||||
List<URL> contentURLs = new ArrayList<>();
|
||||
contentURLs.add(new URL("https://clips.twitch.tv/FaithfulIncredulousPotTBCheesePull"));
|
||||
for (URL url : contentURLs) {
|
||||
TwitchVideoRipper ripper = new TwitchVideoRipper(url);
|
||||
videoTestHelper(ripper);
|
||||
}
|
||||
}
|
||||
|
||||
// Test disbaled. See https://github.com/RipMeApp/ripme/issues/574
|
||||
|
||||
// public void testTwitchVideoRipper() throws IOException {
|
||||
// List<URL> contentURLs = new ArrayList<>();
|
||||
// contentURLs.add(new URL("https://clips.twitch.tv/FaithfulIncredulousPotTBCheesePull"));
|
||||
// for (URL url : contentURLs) {
|
||||
// TwitchVideoRipper ripper = new TwitchVideoRipper(url);
|
||||
// videoTestHelper(ripper);
|
||||
// }
|
||||
// }
|
||||
|
||||
public void testXhamsterRipper() throws IOException {
|
||||
List<URL> contentURLs = new ArrayList<>();
|
||||
|
Loading…
Reference in New Issue
Block a user