commit
126d8f67ff
2
.gitignore
vendored
2
.gitignore
vendored
@ -112,7 +112,7 @@ rips/
|
|||||||
.history
|
.history
|
||||||
ripme.jar.update
|
ripme.jar.update
|
||||||
*.swp
|
*.swp
|
||||||
rip.properties
|
*.properties
|
||||||
history.json
|
history.json
|
||||||
*.iml
|
*.iml
|
||||||
.settings/
|
.settings/
|
||||||
|
@ -2,3 +2,5 @@ language: java
|
|||||||
jdk:
|
jdk:
|
||||||
- oraclejdk8
|
- oraclejdk8
|
||||||
- openjdk8
|
- openjdk8
|
||||||
|
after_success:
|
||||||
|
- mvn clean test jacoco:report coveralls:report
|
||||||
|
19
README.md
19
README.md
@ -1,9 +1,22 @@
|
|||||||
# RipMe
|
# RipMe [![Licensed under the MIT License](https://img.shields.io/badge/License-MIT-blue.svg)](https://github.com/RipMeApp/ripme/blob/master/LICENSE.txt) [![Join the chat at https://gitter.im/RipMeApp/Lobby](https://badges.gitter.im/RipMeApp/Lobby.svg)](https://gitter.im/RipMeApp/Lobby?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge)
|
||||||
|
|
||||||
[![Build Status](https://travis-ci.org/RipMeApp/ripme.svg?branch=master)](https://travis-ci.org/RipMeApp/ripme)
|
[![Build Status](https://travis-ci.org/RipMeApp/ripme.svg?branch=master)](https://travis-ci.org/RipMeApp/ripme)
|
||||||
[![Join the chat at https://gitter.im/RipMeApp/Lobby](https://badges.gitter.im/RipMeApp/Lobby.svg)](https://gitter.im/RipMeApp/Lobby?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge)
|
[![Coverage Status](https://coveralls.io/repos/github/RipMeApp/ripme/badge.svg?branch=master)](https://coveralls.io/github/RipMeApp/ripme?branch=master)
|
||||||
|
|
||||||
Album ripper for various websites. Runs on your computer. Requires Java 8.
|
# Contribute
|
||||||
|
|
||||||
|
RipMe is maintained with ♥️ and in our limited free time by **[@MetaPrime](https://github.com/metaprime)** and **[@cyian-1756](https://github.com/cyian-1756)**. If you'd like to contribute but aren't good with code, help keep us happy with a small contribution!
|
||||||
|
|
||||||
|
[![Tip with PayPal](https://img.shields.io/badge/PayPal-Buy_us...-lightgrey.svg)](https://www.paypal.me/ripmeapp)
|
||||||
|
[![Tip with PayPal](https://img.shields.io/badge/coffee-%245-green.svg)](https://www.paypal.com/paypalme/ripmeapp/send?amount=5.00¤cyCode=USD&locale.x=en_US&country.x=US)
|
||||||
|
[![Tip with PayPal](https://img.shields.io/badge/beer-%2410-yellow.svg)](https://www.paypal.com/paypalme/ripmeapp/send?amount=10.00¤cyCode=USD&locale.x=en_US&country.x=US)
|
||||||
|
[![Tip with PayPal](https://img.shields.io/badge/lunch-%2420-orange.svg)](https://www.paypal.com/paypalme/ripmeapp/send?amount=20.00¤cyCode=USD&locale.x=en_US&country.x=US)
|
||||||
|
[![Tip with PayPal](https://img.shields.io/badge/dinner-%2450-red.svg)](https://www.paypal.com/paypalme/ripmeapp/send?amount=50.00¤cyCode=USD&locale.x=en_US&country.x=US)
|
||||||
|
[![Tip with PayPal](https://img.shields.io/badge/custom_amount-...-lightgrey.svg)](https://www.paypal.me/ripmeapp)
|
||||||
|
|
||||||
|
# About
|
||||||
|
|
||||||
|
RipMe is an album ripper for various websites. Runs on your computer. Requires Java 8.
|
||||||
|
|
||||||
![Screenshot](http://i.imgur.com/kWzhsIu.png)
|
![Screenshot](http://i.imgur.com/kWzhsIu.png)
|
||||||
|
|
||||||
|
@ -1,17 +0,0 @@
|
|||||||
<?xml version="1.0" encoding="UTF-8"?>
|
|
||||||
<actions>
|
|
||||||
<action>
|
|
||||||
<actionName>run</actionName>
|
|
||||||
<packagings>
|
|
||||||
<packaging>jar</packaging>
|
|
||||||
</packagings>
|
|
||||||
<goals>
|
|
||||||
<goal>process-classes</goal>
|
|
||||||
<goal>org.codehaus.mojo:exec-maven-plugin:1.2.1:exec</goal>
|
|
||||||
</goals>
|
|
||||||
<properties>
|
|
||||||
<exec.args>-classpath %classpath com.rarchives.ripme.App</exec.args>
|
|
||||||
<exec.executable>java</exec.executable>
|
|
||||||
</properties>
|
|
||||||
</action>
|
|
||||||
</actions>
|
|
21
pom.xml
21
pom.xml
@ -4,7 +4,7 @@
|
|||||||
<groupId>com.rarchives.ripme</groupId>
|
<groupId>com.rarchives.ripme</groupId>
|
||||||
<artifactId>ripme</artifactId>
|
<artifactId>ripme</artifactId>
|
||||||
<packaging>jar</packaging>
|
<packaging>jar</packaging>
|
||||||
<version>1.6.12</version>
|
<version>1.6.13</version>
|
||||||
<name>ripme</name>
|
<name>ripme</name>
|
||||||
<url>http://rip.rarchives.com</url>
|
<url>http://rip.rarchives.com</url>
|
||||||
<properties>
|
<properties>
|
||||||
@ -88,6 +88,25 @@
|
|||||||
<target>1.8</target>
|
<target>1.8</target>
|
||||||
</configuration>
|
</configuration>
|
||||||
</plugin>
|
</plugin>
|
||||||
|
<plugin>
|
||||||
|
<groupId>org.eluder.coveralls</groupId>
|
||||||
|
<artifactId>coveralls-maven-plugin</artifactId>
|
||||||
|
<version>4.3.0</version>
|
||||||
|
</plugin>
|
||||||
|
<plugin>
|
||||||
|
<!-- At time of writing: JaCoCo is (allegedly) the only coverage report generator that supports Java 8 -->
|
||||||
|
<groupId>org.jacoco</groupId>
|
||||||
|
<artifactId>jacoco-maven-plugin</artifactId>
|
||||||
|
<version>0.7.6.201602180812</version>
|
||||||
|
<executions>
|
||||||
|
<execution>
|
||||||
|
<id>prepare-agent</id>
|
||||||
|
<goals>
|
||||||
|
<goal>prepare-agent</goal>
|
||||||
|
</goals>
|
||||||
|
</execution>
|
||||||
|
</executions>
|
||||||
|
</plugin>
|
||||||
</plugins>
|
</plugins>
|
||||||
</build>
|
</build>
|
||||||
</project>
|
</project>
|
||||||
|
@ -1,6 +1,7 @@
|
|||||||
{
|
{
|
||||||
"latestVersion": "1.6.12",
|
"latestVersion": "1.6.13",
|
||||||
"changeList": [
|
"changeList": [
|
||||||
|
"1.6.13: Added support for ripping from instagram tags; Instagram regex now matches all ripable urls; improved regex for pichunter",
|
||||||
"1.6.12: Fix InstagramRipper with timestamps; Pichunter galleries support; logging improvements",
|
"1.6.12: Fix InstagramRipper with timestamps; Pichunter galleries support; logging improvements",
|
||||||
"1.6.11: dded pichunter.com ripper; Changed instagram output name format; added tehyiffgallery ripper; Fixed xchan ripper; Fixed regession in chanRipper folder naming",
|
"1.6.11: dded pichunter.com ripper; Changed instagram output name format; added tehyiffgallery ripper; Fixed xchan ripper; Fixed regession in chanRipper folder naming",
|
||||||
"1.6.10: Added viewcomic ripper; Fixed webtoons malformed url error message; Fixed chan ripper thread title; Fixed Modelmayhem ripper",
|
"1.6.10: Added viewcomic ripper; Fixed webtoons malformed url error message; Fixed chan ripper thread title; Fixed Modelmayhem ripper",
|
||||||
|
@ -34,9 +34,15 @@ import com.rarchives.ripme.utils.Utils;
|
|||||||
*/
|
*/
|
||||||
public class App {
|
public class App {
|
||||||
|
|
||||||
public static Logger logger;
|
public static final Logger logger;
|
||||||
private static final History HISTORY = new History();
|
private static final History HISTORY = new History();
|
||||||
|
|
||||||
|
static {
|
||||||
|
//initialize logger
|
||||||
|
Utils.configureLogger();
|
||||||
|
logger = Logger.getLogger(App.class);
|
||||||
|
}
|
||||||
|
|
||||||
public static void main(String[] args) throws MalformedURLException {
|
public static void main(String[] args) throws MalformedURLException {
|
||||||
CommandLine cl = getArgs(args);
|
CommandLine cl = getArgs(args);
|
||||||
if (args.length > 0 && cl.hasOption('v')){
|
if (args.length > 0 && cl.hasOption('v')){
|
||||||
@ -44,11 +50,8 @@ public class App {
|
|||||||
System.exit(0);
|
System.exit(0);
|
||||||
}
|
}
|
||||||
|
|
||||||
//initialize logger
|
|
||||||
Utils.configureLogger();
|
|
||||||
System.setProperty("apple.laf.useScreenMenuBar", "true");
|
System.setProperty("apple.laf.useScreenMenuBar", "true");
|
||||||
System.setProperty("com.apple.mrj.application.apple.menu.about.name", "RipMe");
|
System.setProperty("com.apple.mrj.application.apple.menu.about.name", "RipMe");
|
||||||
logger = Logger.getLogger(App.class);
|
|
||||||
logger.info("Initialized ripme v" + UpdateUtils.getThisJarVersion());
|
logger.info("Initialized ripme v" + UpdateUtils.getThisJarVersion());
|
||||||
|
|
||||||
if (args.length > 0) {
|
if (args.length > 0) {
|
||||||
|
@ -1,4 +1,3 @@
|
|||||||
|
|
||||||
package com.rarchives.ripme.ripper.rippers;
|
package com.rarchives.ripme.ripper.rippers;
|
||||||
|
|
||||||
import com.rarchives.ripme.ripper.AbstractHTMLRipper;
|
import com.rarchives.ripme.ripper.AbstractHTMLRipper;
|
||||||
@ -12,19 +11,16 @@ import java.net.URISyntaxException;
|
|||||||
import java.net.URL;
|
import java.net.URL;
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.logging.Level;
|
|
||||||
import java.util.logging.Logger;
|
|
||||||
import java.util.regex.Matcher;
|
import java.util.regex.Matcher;
|
||||||
import java.util.regex.Pattern;
|
import java.util.regex.Pattern;
|
||||||
|
import org.apache.log4j.Logger;
|
||||||
import org.jsoup.nodes.Document;
|
import org.jsoup.nodes.Document;
|
||||||
import org.jsoup.nodes.Element;
|
import org.jsoup.nodes.Element;
|
||||||
import org.jsoup.select.Elements;
|
import org.jsoup.select.Elements;
|
||||||
|
|
||||||
/**
|
|
||||||
*
|
|
||||||
* @author
|
|
||||||
*/
|
|
||||||
public class E621Ripper extends AbstractHTMLRipper{
|
public class E621Ripper extends AbstractHTMLRipper{
|
||||||
|
private static final Logger logger = Logger.getLogger(E621Ripper.class);
|
||||||
|
|
||||||
private static Pattern gidPattern=null;
|
private static Pattern gidPattern=null;
|
||||||
private static Pattern gidPattern2=null;
|
private static Pattern gidPattern2=null;
|
||||||
private static Pattern gidPatternPool=null;
|
private static Pattern gidPatternPool=null;
|
||||||
@ -89,7 +85,7 @@ public class E621Ripper extends AbstractHTMLRipper{
|
|||||||
|
|
||||||
addURLToDownload(new URL(page.getElementById("image").absUrl("src")),Utils.getConfigBoolean("download.save_order",true)?url.getRef()+"-":"");
|
addURLToDownload(new URL(page.getElementById("image").absUrl("src")),Utils.getConfigBoolean("download.save_order",true)?url.getRef()+"-":"");
|
||||||
} catch (IOException ex) {
|
} catch (IOException ex) {
|
||||||
Logger.getLogger(E621Ripper.class.getName()).log(Level.SEVERE, null, ex);
|
logger.error(ex);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}));
|
}));
|
||||||
@ -121,7 +117,7 @@ public class E621Ripper extends AbstractHTMLRipper{
|
|||||||
|
|
||||||
return Utils.filesystemSafe(prefix+new URI(getTerm(url)).getPath());
|
return Utils.filesystemSafe(prefix+new URI(getTerm(url)).getPath());
|
||||||
} catch (URISyntaxException ex) {
|
} catch (URISyntaxException ex) {
|
||||||
Logger.getLogger(PahealRipper.class.getName()).log(Level.SEVERE, null, ex);
|
logger.error(ex);
|
||||||
}
|
}
|
||||||
|
|
||||||
throw new MalformedURLException("Expected e621.net URL format: e621.net/post/index/1/searchterm - got "+url+" instead");
|
throw new MalformedURLException("Expected e621.net URL format: e621.net/post/index/1/searchterm - got "+url+" instead");
|
||||||
|
@ -43,6 +43,13 @@ public class InstagramRipper extends AbstractHTMLRipper {
|
|||||||
return (url.getHost().endsWith("instagram.com"));
|
return (url.getHost().endsWith("instagram.com"));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public URL sanitizeURL(URL url) throws MalformedURLException {
|
||||||
|
URL san_url = new URL(url.toExternalForm().replaceAll("\\?hl=\\S*", ""));
|
||||||
|
logger.info("sanitized URL is " + san_url.toExternalForm());
|
||||||
|
return san_url;
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public String getGID(URL url) throws MalformedURLException {
|
public String getGID(URL url) throws MalformedURLException {
|
||||||
Pattern p = Pattern.compile("^https?://instagram.com/([^/]+)/?");
|
Pattern p = Pattern.compile("^https?://instagram.com/([^/]+)/?");
|
||||||
@ -51,7 +58,7 @@ public class InstagramRipper extends AbstractHTMLRipper {
|
|||||||
return m.group(1);
|
return m.group(1);
|
||||||
}
|
}
|
||||||
|
|
||||||
p = Pattern.compile("^https?://www.instagram.com/([^/]+)/?");
|
p = Pattern.compile("^https?://www.instagram.com/([^/]+)/?(?:\\?hl=\\S*)?/?");
|
||||||
m = p.matcher(url.toExternalForm());
|
m = p.matcher(url.toExternalForm());
|
||||||
if (m.matches()) {
|
if (m.matches()) {
|
||||||
return m.group(1);
|
return m.group(1);
|
||||||
@ -62,6 +69,13 @@ public class InstagramRipper extends AbstractHTMLRipper {
|
|||||||
if (m.matches()) {
|
if (m.matches()) {
|
||||||
return m.group(1);
|
return m.group(1);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
p = Pattern.compile("^https?://www.instagram.com/explore/tags/([^/]+)/?");
|
||||||
|
m = p.matcher(url.toExternalForm());
|
||||||
|
if (m.matches()) {
|
||||||
|
return m.group(1);
|
||||||
|
}
|
||||||
|
|
||||||
throw new MalformedURLException("Unable to find user in " + url);
|
throw new MalformedURLException("Unable to find user in " + url);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -134,11 +148,18 @@ public class InstagramRipper extends AbstractHTMLRipper {
|
|||||||
logger.warn("Unable to exact json from page");
|
logger.warn("Unable to exact json from page");
|
||||||
}
|
}
|
||||||
|
|
||||||
Pattern p = Pattern.compile("^.*instagram\\.com/([a-zA-Z0-9\\-_.]+)/?");
|
Pattern p = Pattern.compile("^.*instagram.com/p/([a-zA-Z0-9\\-_.]+)/?");
|
||||||
Matcher m = p.matcher(url.toExternalForm());
|
Matcher m = p.matcher(url.toExternalForm());
|
||||||
if (m.matches()) {
|
if (!m.matches()) {
|
||||||
JSONArray profilePage = json.getJSONObject("entry_data").getJSONArray("ProfilePage");
|
JSONArray datas = new JSONArray();
|
||||||
JSONArray datas = profilePage.getJSONObject(0).getJSONObject("user").getJSONObject("media").getJSONArray("nodes");
|
try {
|
||||||
|
JSONArray profilePage = json.getJSONObject("entry_data").getJSONArray("ProfilePage");
|
||||||
|
datas = profilePage.getJSONObject(0).getJSONObject("user").getJSONObject("media").getJSONArray("nodes");
|
||||||
|
} catch (JSONException e) {
|
||||||
|
// Handle hashtag pages
|
||||||
|
datas = json.getJSONObject("entry_data").getJSONArray("TagPage").getJSONObject(0)
|
||||||
|
.getJSONObject("tag").getJSONObject("media").getJSONArray("nodes");
|
||||||
|
}
|
||||||
for (int i = 0; i < datas.length(); i++) {
|
for (int i = 0; i < datas.length(); i++) {
|
||||||
JSONObject data = (JSONObject) datas.get(i);
|
JSONObject data = (JSONObject) datas.get(i);
|
||||||
Long epoch = data.getLong("date");
|
Long epoch = data.getLong("date");
|
||||||
@ -168,6 +189,21 @@ public class InstagramRipper extends AbstractHTMLRipper {
|
|||||||
}
|
}
|
||||||
// Rip the next page
|
// Rip the next page
|
||||||
if (!nextPageID.equals("") && !isThisATest()) {
|
if (!nextPageID.equals("") && !isThisATest()) {
|
||||||
|
if (url.toExternalForm().contains("/tags/")) {
|
||||||
|
try {
|
||||||
|
// Sleep for a while to avoid a ban
|
||||||
|
sleep(2500);
|
||||||
|
if (url.toExternalForm().substring(url.toExternalForm().length() - 1).equals("/")) {
|
||||||
|
getURLsFromPage(Http.url(url.toExternalForm() + "?max_id=" + nextPageID).get());
|
||||||
|
} else {
|
||||||
|
getURLsFromPage(Http.url(url.toExternalForm() + "/?max_id=" + nextPageID).get());
|
||||||
|
}
|
||||||
|
|
||||||
|
} catch (IOException e) {
|
||||||
|
return imageURLs;
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
try {
|
try {
|
||||||
// Sleep for a while to avoid a ban
|
// Sleep for a while to avoid a ban
|
||||||
sleep(2500);
|
sleep(2500);
|
||||||
@ -175,8 +211,11 @@ public class InstagramRipper extends AbstractHTMLRipper {
|
|||||||
} catch (IOException e) {
|
} catch (IOException e) {
|
||||||
return imageURLs;
|
return imageURLs;
|
||||||
}
|
}
|
||||||
|
} else {
|
||||||
|
logger.warn("Can't get net page");
|
||||||
}
|
}
|
||||||
} else { // We're ripping from a single page
|
} else { // We're ripping from a single page
|
||||||
|
logger.info("Ripping from single page");
|
||||||
if (!doc.select("meta[property=og:video]").attr("content").equals("")) {
|
if (!doc.select("meta[property=og:video]").attr("content").equals("")) {
|
||||||
String videoURL = doc.select("meta[property=og:video]").attr("content");
|
String videoURL = doc.select("meta[property=og:video]").attr("content");
|
||||||
// We're ripping a page with a video on it
|
// We're ripping a page with a video on it
|
||||||
|
@ -13,15 +13,16 @@ import java.util.ArrayList;
|
|||||||
import java.util.HashMap;
|
import java.util.HashMap;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
import java.util.logging.Level;
|
|
||||||
import java.util.logging.Logger;
|
|
||||||
import java.util.regex.Matcher;
|
import java.util.regex.Matcher;
|
||||||
import java.util.regex.Pattern;
|
import java.util.regex.Pattern;
|
||||||
|
import org.apache.log4j.Logger;
|
||||||
import org.jsoup.nodes.Document;
|
import org.jsoup.nodes.Document;
|
||||||
import org.jsoup.nodes.Element;
|
import org.jsoup.nodes.Element;
|
||||||
import org.jsoup.select.Elements;
|
import org.jsoup.select.Elements;
|
||||||
|
|
||||||
public class PahealRipper extends AbstractHTMLRipper {
|
public class PahealRipper extends AbstractHTMLRipper {
|
||||||
|
private static final Logger logger = Logger.getLogger(PahealRipper.class);
|
||||||
|
|
||||||
private static Map<String, String> cookies = null;
|
private static Map<String, String> cookies = null;
|
||||||
private static Pattern gidPattern = null;
|
private static Pattern gidPattern = null;
|
||||||
|
|
||||||
@ -93,7 +94,7 @@ public class PahealRipper extends AbstractHTMLRipper {
|
|||||||
+ ext);
|
+ ext);
|
||||||
addURLToDownload(url, outFile);
|
addURLToDownload(url, outFile);
|
||||||
} catch (IOException | URISyntaxException ex) {
|
} catch (IOException | URISyntaxException ex) {
|
||||||
Logger.getLogger(PahealRipper.class.getName()).log(Level.SEVERE, null, ex);
|
logger.error("Error while downloading URL " + url, ex);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -115,7 +116,7 @@ public class PahealRipper extends AbstractHTMLRipper {
|
|||||||
try {
|
try {
|
||||||
return Utils.filesystemSafe(new URI(getTerm(url)).getPath());
|
return Utils.filesystemSafe(new URI(getTerm(url)).getPath());
|
||||||
} catch (URISyntaxException ex) {
|
} catch (URISyntaxException ex) {
|
||||||
Logger.getLogger(PahealRipper.class.getName()).log(Level.SEVERE, null, ex);
|
logger.error(ex);
|
||||||
}
|
}
|
||||||
|
|
||||||
throw new MalformedURLException("Expected paheal.net URL format: rule34.paheal.net/post/list/searchterm - got " + url + " instead");
|
throw new MalformedURLException("Expected paheal.net URL format: rule34.paheal.net/post/list/searchterm - got " + url + " instead");
|
||||||
|
@ -32,23 +32,23 @@ public class PichunterRipper extends AbstractHTMLRipper {
|
|||||||
|
|
||||||
@Override
|
@Override
|
||||||
public String getGID(URL url) throws MalformedURLException {
|
public String getGID(URL url) throws MalformedURLException {
|
||||||
Pattern p = Pattern.compile("https?://www.pichunter.com/(|tags|models|sites)/([a-zA-Z0-9_-]+)/?");
|
Pattern p = Pattern.compile("https?://www.pichunter.com/(|tags|models|sites)/(\\S*)/?");
|
||||||
Matcher m = p.matcher(url.toExternalForm());
|
Matcher m = p.matcher(url.toExternalForm());
|
||||||
if (m.matches()) {
|
if (m.matches()) {
|
||||||
return m.group(2);
|
return m.group(2);
|
||||||
}
|
}
|
||||||
p = Pattern.compile("https?://www.pichunter.com/(tags|models|sites)/([a-zA-Z0-9_-]+)/photos/\\d+/?");
|
p = Pattern.compile("https?://www.pichunter.com/(tags|models|sites)/(\\S*)/photos/\\d+/?");
|
||||||
m = p.matcher(url.toExternalForm());
|
m = p.matcher(url.toExternalForm());
|
||||||
if (m.matches()) {
|
if (m.matches()) {
|
||||||
return m.group(2);
|
return m.group(2);
|
||||||
}
|
}
|
||||||
p = Pattern.compile("https?://www.pichunter.com/tags/all/([a-zA-Z0-9_-]+)/\\d+/?");
|
p = Pattern.compile("https?://www.pichunter.com/tags/all/(\\S*)/\\d+/?");
|
||||||
m = p.matcher(url.toExternalForm());
|
m = p.matcher(url.toExternalForm());
|
||||||
if (m.matches()) {
|
if (m.matches()) {
|
||||||
return m.group(1);
|
return m.group(1);
|
||||||
}
|
}
|
||||||
|
|
||||||
p = Pattern.compile("https?://www.pichunter.com/gallery/\\d+/([a-zA-Z0-9_-]+)/?");
|
p = Pattern.compile("https?://www.pichunter.com/gallery/\\d+/(\\S*)/?");
|
||||||
m = p.matcher(url.toExternalForm());
|
m = p.matcher(url.toExternalForm());
|
||||||
if (m.matches()) {
|
if (m.matches()) {
|
||||||
return m.group(1);
|
return m.group(1);
|
||||||
@ -58,7 +58,7 @@ public class PichunterRipper extends AbstractHTMLRipper {
|
|||||||
}
|
}
|
||||||
|
|
||||||
private boolean isPhotoSet(URL url) {
|
private boolean isPhotoSet(URL url) {
|
||||||
Pattern p = Pattern.compile("https?://www.pichunter.com/gallery/\\d+/([a-zA-Z0-9_-]+)/?");
|
Pattern p = Pattern.compile("https?://www.pichunter.com/gallery/\\d+/(\\S*)/?");
|
||||||
Matcher m = p.matcher(url.toExternalForm());
|
Matcher m = p.matcher(url.toExternalForm());
|
||||||
if (m.matches()) {
|
if (m.matches()) {
|
||||||
return true;
|
return true;
|
||||||
|
@ -10,14 +10,15 @@ import java.net.URISyntaxException;
|
|||||||
import java.net.URL;
|
import java.net.URL;
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.logging.Level;
|
|
||||||
import java.util.logging.Logger;
|
|
||||||
import java.util.regex.Matcher;
|
import java.util.regex.Matcher;
|
||||||
import java.util.regex.Pattern;
|
import java.util.regex.Pattern;
|
||||||
|
import org.apache.log4j.Logger;
|
||||||
import org.jsoup.nodes.Document;
|
import org.jsoup.nodes.Document;
|
||||||
import org.jsoup.nodes.Element;
|
import org.jsoup.nodes.Element;
|
||||||
|
|
||||||
public class XbooruRipper extends AbstractHTMLRipper {
|
public class XbooruRipper extends AbstractHTMLRipper {
|
||||||
|
private static final Logger logger = Logger.getLogger(XbooruRipper.class);
|
||||||
|
|
||||||
private static Pattern gidPattern = null;
|
private static Pattern gidPattern = null;
|
||||||
|
|
||||||
public XbooruRipper(URL url) throws IOException {
|
public XbooruRipper(URL url) throws IOException {
|
||||||
@ -87,7 +88,7 @@ public class XbooruRipper extends AbstractHTMLRipper {
|
|||||||
try {
|
try {
|
||||||
return Utils.filesystemSafe(new URI(getTerm(url)).getPath());
|
return Utils.filesystemSafe(new URI(getTerm(url)).getPath());
|
||||||
} catch (URISyntaxException ex) {
|
} catch (URISyntaxException ex) {
|
||||||
Logger.getLogger(PahealRipper.class.getName()).log(Level.SEVERE, null, ex);
|
logger.error(ex);
|
||||||
}
|
}
|
||||||
|
|
||||||
throw new MalformedURLException("Expected xbooru.com URL format: xbooru.com/index.php?tags=searchterm - got " + url + " instead");
|
throw new MalformedURLException("Expected xbooru.com URL format: xbooru.com/index.php?tags=searchterm - got " + url + " instead");
|
||||||
|
@ -21,7 +21,7 @@ import com.rarchives.ripme.utils.Utils;
|
|||||||
public class UpdateUtils {
|
public class UpdateUtils {
|
||||||
|
|
||||||
private static final Logger logger = Logger.getLogger(UpdateUtils.class);
|
private static final Logger logger = Logger.getLogger(UpdateUtils.class);
|
||||||
private static final String DEFAULT_VERSION = "1.6.12";
|
private static final String DEFAULT_VERSION = "1.6.13";
|
||||||
private static final String REPO_NAME = "ripmeapp/ripme";
|
private static final String REPO_NAME = "ripmeapp/ripme";
|
||||||
private static final String updateJsonURL = "https://raw.githubusercontent.com/" + REPO_NAME + "/master/ripme.json";
|
private static final String updateJsonURL = "https://raw.githubusercontent.com/" + REPO_NAME + "/master/ripme.json";
|
||||||
private static final String mainFileName = "ripme.jar";
|
private static final String mainFileName = "ripme.jar";
|
||||||
|
@ -1,57 +0,0 @@
|
|||||||
package com.rarchives.ripme.utils;
|
|
||||||
|
|
||||||
import java.lang.reflect.Field;
|
|
||||||
import java.util.Arrays;
|
|
||||||
|
|
||||||
import javax.crypto.Cipher;
|
|
||||||
import javax.crypto.SecretKey;
|
|
||||||
import javax.crypto.spec.IvParameterSpec;
|
|
||||||
import javax.crypto.spec.SecretKeySpec;
|
|
||||||
|
|
||||||
class AES {
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Hack to get JCE Unlimited Strenght so we can use weird AES encryption stuff.
|
|
||||||
* From http://stackoverflow.com/a/20286961
|
|
||||||
*/
|
|
||||||
static {
|
|
||||||
try {
|
|
||||||
Field field = Class.forName("javax.crypto.JceSecurity").getDeclaredField("isRestricted");
|
|
||||||
if (!field.isAccessible()) {
|
|
||||||
field.setAccessible(true);
|
|
||||||
field.set(null, java.lang.Boolean.FALSE);
|
|
||||||
}
|
|
||||||
} catch (Exception ex) {
|
|
||||||
// Assume it's fine.
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
public static String decrypt(String cipherText, String key, int nBits) throws Exception {
|
|
||||||
String res = null;
|
|
||||||
nBits = nBits / 8;
|
|
||||||
byte[] data = Base64.decode(cipherText);
|
|
||||||
byte[] k = Arrays.copyOf(key.getBytes(), nBits);
|
|
||||||
|
|
||||||
Cipher cipher = Cipher.getInstance("AES/CTR/NoPadding");
|
|
||||||
SecretKey secretKey = generateSecretKey(k, nBits);
|
|
||||||
byte[] nonceBytes = Arrays.copyOf(Arrays.copyOf(data, 8), nBits / 2);
|
|
||||||
IvParameterSpec nonce = new IvParameterSpec(nonceBytes);
|
|
||||||
cipher.init(Cipher.ENCRYPT_MODE, secretKey, nonce);
|
|
||||||
res = new String(cipher.doFinal(data, 8, data.length - 8));
|
|
||||||
return res;
|
|
||||||
}
|
|
||||||
|
|
||||||
private static SecretKey generateSecretKey(byte[] keyBytes, int nBits) throws Exception {
|
|
||||||
try {
|
|
||||||
SecretKey secretKey = new SecretKeySpec(keyBytes, "AES");
|
|
||||||
Cipher cipher = Cipher.getInstance("AES/ECB/NoPadding");
|
|
||||||
cipher.init(Cipher.ENCRYPT_MODE, secretKey);
|
|
||||||
keyBytes = cipher.doFinal(keyBytes);
|
|
||||||
} catch (Throwable e1) {
|
|
||||||
e1.printStackTrace();
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
System.arraycopy(keyBytes, 0, keyBytes, nBits / 2, nBits / 2);
|
|
||||||
return new SecretKeySpec(keyBytes, "AES");
|
|
||||||
}
|
|
||||||
}
|
|
@ -23,7 +23,7 @@ import com.rarchives.ripme.ripper.AbstractRipper;
|
|||||||
public class Http {
|
public class Http {
|
||||||
|
|
||||||
private static final int TIMEOUT = Utils.getConfigInteger("page.timeout", 5 * 1000);
|
private static final int TIMEOUT = Utils.getConfigInteger("page.timeout", 5 * 1000);
|
||||||
private static final Logger logger = Logger.getLogger(AbstractRipper.class);
|
private static final Logger logger = Logger.getLogger(Http.class);
|
||||||
|
|
||||||
private int retries;
|
private int retries;
|
||||||
private String url;
|
private String url;
|
||||||
|
@ -215,12 +215,15 @@ public class BasicRippersTest extends RippersTest {
|
|||||||
testRipper(ripper);
|
testRipper(ripper);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
// https://github.com/RipMeApp/ripme/issues/229 : Disabled test (temporary) : BasicRippersTest#testPhotobucketRip (timing out)
|
||||||
public void testPhotobucketRip() throws IOException {
|
public void testPhotobucketRip() throws IOException {
|
||||||
AbstractRipper ripper = new PhotobucketRipper(new URL("http://s844.photobucket.com/user/SpazzySpizzy/library/Album%20Covers?sort=3&page=1"));
|
AbstractRipper ripper = new PhotobucketRipper(new URL("http://s844.photobucket.com/user/SpazzySpizzy/library/Album%20Covers?sort=3&page=1"));
|
||||||
testRipper(ripper);
|
testRipper(ripper);
|
||||||
deleteSubdirs(ripper.getWorkingDir());
|
deleteSubdirs(ripper.getWorkingDir());
|
||||||
deleteDir(ripper.getWorkingDir());
|
deleteDir(ripper.getWorkingDir());
|
||||||
}
|
}
|
||||||
|
*/
|
||||||
|
|
||||||
public void testPornhubRip() throws IOException {
|
public void testPornhubRip() throws IOException {
|
||||||
AbstractRipper ripper = new PornhubRipper(new URL("https://www.pornhub.com/album/15680522"));
|
AbstractRipper ripper = new PornhubRipper(new URL("https://www.pornhub.com/album/15680522"));
|
||||||
@ -290,6 +293,7 @@ public class BasicRippersTest extends RippersTest {
|
|||||||
testRipper(ripper);
|
testRipper(ripper);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// https://github.com/RipMeApp/ripme/issues/206 : Disabled test : BasicRippersTest#testXhamsterAlbums -- fix and re-enable
|
||||||
// public void testXhamsterAlbums() throws IOException {
|
// public void testXhamsterAlbums() throws IOException {
|
||||||
// XhamsterRipper ripper = new XhamsterRipper(new URL("https://xhamster.com/photos/gallery/sexy-preggo-girls-9026608"));
|
// XhamsterRipper ripper = new XhamsterRipper(new URL("https://xhamster.com/photos/gallery/sexy-preggo-girls-9026608"));
|
||||||
// testRipper(ripper);
|
// testRipper(ripper);
|
||||||
|
@ -11,6 +11,10 @@ public class ImagefapRipperTest extends RippersTest {
|
|||||||
|
|
||||||
public void testImagefapAlbums() throws IOException {
|
public void testImagefapAlbums() throws IOException {
|
||||||
Map<URL, String> testURLs = new HashMap<>();
|
Map<URL, String> testURLs = new HashMap<>();
|
||||||
|
|
||||||
|
/*
|
||||||
|
Temporarily disabled test. See issue https://github.com/RipMeApp/ripme/issues/226
|
||||||
|
|
||||||
// Album with specific title
|
// Album with specific title
|
||||||
testURLs.put(new URL("http://www.imagefap.com/pictures/4649440/Frozen-%28Elsa-and-Anna%29?view=2"),
|
testURLs.put(new URL("http://www.imagefap.com/pictures/4649440/Frozen-%28Elsa-and-Anna%29?view=2"),
|
||||||
"Frozen (Elsa and Anna)");
|
"Frozen (Elsa and Anna)");
|
||||||
@ -18,6 +22,7 @@ public class ImagefapRipperTest extends RippersTest {
|
|||||||
// New URL format
|
// New URL format
|
||||||
testURLs.put(new URL("http://www.imagefap.com/gallery.php?pgid=fffd68f659befa5535cf78f014e348f1"),
|
testURLs.put(new URL("http://www.imagefap.com/gallery.php?pgid=fffd68f659befa5535cf78f014e348f1"),
|
||||||
"imagefap_fffd68f659befa5535cf78f014e348f1");
|
"imagefap_fffd68f659befa5535cf78f014e348f1");
|
||||||
|
*/
|
||||||
|
|
||||||
for (URL url : testURLs.keySet()) {
|
for (URL url : testURLs.keySet()) {
|
||||||
ImagefapRipper ripper = new ImagefapRipper(url);
|
ImagefapRipper ripper = new ImagefapRipper(url);
|
||||||
|
Loading…
x
Reference in New Issue
Block a user