Merge branch 'master' of https://github.com/RipMeApp/ripme into logger

# Conflicts:
#	src/main/java/com/rarchives/ripme/ripper/VideoRipper.java
#	src/main/java/com/rarchives/ripme/ripper/rippers/VscoRipper.java
This commit is contained in:
Gaboso 2018-06-04 22:36:14 -04:00
commit ccaa2edeb0
8 changed files with 178 additions and 112 deletions

View File

@ -37,6 +37,7 @@ For information about running the `.jar` file, see [the How To Run wiki](https:/
* Built in updater * Built in updater
* Can rip images from tumblr in the size they were uploaded in [See here for how to enable](https://github.com/RipMeApp/ripme/wiki/Config-options#tumblrget_raw_image) * Can rip images from tumblr in the size they were uploaded in [See here for how to enable](https://github.com/RipMeApp/ripme/wiki/Config-options#tumblrget_raw_image)
* Skips already downloaded images by default * Skips already downloaded images by default
* Can auto skip e-hentai and nhentai albums containing certain tags [See here for how to enable](https://github.com/RipMeApp/ripme/wiki/Config-options#nhentaiblacklisttags)
## [List of Supported Sites](https://github.com/ripmeapp/ripme/wiki/Supported-Sites) ## [List of Supported Sites](https://github.com/ripmeapp/ripme/wiki/Supported-Sites)

View File

@ -1 +1,2 @@
mvn clean compile assembly:single mvn clean compile assembly:single
mvn io.github.zlika:reproducible-build-maven-plugin:0.6:strip-jar

View File

@ -1,2 +1,4 @@
#!/usr/bin/env bash #!/usr/bin/env bash
mvn clean compile assembly:single mvn clean compile assembly:single
# Strip the jar of any non-reproducible metadata such as timestamps
mvn io.github.zlika:reproducible-build-maven-plugin:0.6:strip-jar

View File

@ -77,6 +77,8 @@ subprocess.call(['git', 'commit', '-m', commitMessage])
subprocess.call(['git', 'tag', nextVersion]) subprocess.call(['git', 'tag', nextVersion])
print("Building ripme") print("Building ripme")
subprocess.call(["mvn", "clean", "compile", "assembly:single"]) subprocess.call(["mvn", "clean", "compile", "assembly:single"])
print("Stripping jar")
subprocess.call(["mvn", "io.github.zlika:reproducible-build-maven-plugin:0.6:strip-jar"])
print("Hashing .jar file") print("Hashing .jar file")
openedFile = open("./target/ripme-{}-jar-with-dependencies.jar".format(nextVersion), "rb") openedFile = open("./target/ripme-{}-jar-with-dependencies.jar".format(nextVersion), "rb")
readFile = openedFile.read() readFile = openedFile.read()

View File

@ -61,6 +61,11 @@
</dependencies> </dependencies>
<build> <build>
<plugins> <plugins>
<plugin>
<groupId>io.github.zlika</groupId>
<artifactId>reproducible-build-maven-plugin</artifactId>
<version>0.6</version>
</plugin>
<plugin> <plugin>
<artifactId>maven-assembly-plugin</artifactId> <artifactId>maven-assembly-plugin</artifactId>
<configuration> <configuration>

View File

@ -1,5 +1,9 @@
package com.rarchives.ripme.ripper; package com.rarchives.ripme.ripper;
import com.rarchives.ripme.ui.RipStatusMessage;
import com.rarchives.ripme.ui.RipStatusMessage.STATUS;
import com.rarchives.ripme.utils.Utils;
import java.io.File; import java.io.File;
import java.io.FileWriter; import java.io.FileWriter;
import java.io.IOException; import java.io.IOException;
@ -7,28 +11,27 @@ import java.net.MalformedURLException;
import java.net.URL; import java.net.URL;
import java.util.Map; import java.util.Map;
import com.rarchives.ripme.ui.RipStatusMessage;
import com.rarchives.ripme.ui.RipStatusMessage.STATUS;
import com.rarchives.ripme.utils.Utils;
public abstract class VideoRipper extends AbstractRipper { public abstract class VideoRipper extends AbstractRipper {
private int bytesTotal = 1, private int bytesTotal = 1;
bytesCompleted = 1; private int bytesCompleted = 1;
protected VideoRipper(URL url) throws IOException { protected VideoRipper(URL url) throws IOException {
super(url); super(url);
} }
public abstract boolean canRip(URL url);
public abstract void rip() throws IOException; public abstract void rip() throws IOException;
public abstract String getHost(); public abstract String getHost();
public abstract String getGID(URL url) throws MalformedURLException; public abstract String getGID(URL url) throws MalformedURLException;
@Override @Override
public void setBytesTotal(int bytes) { public void setBytesTotal(int bytes) {
this.bytesTotal = bytes; this.bytesTotal = bytes;
} }
@Override @Override
public void setBytesCompleted(int bytes) { public void setBytesCompleted(int bytes) {
this.bytesCompleted = bytes; this.bytesCompleted = bytes;
@ -55,8 +58,7 @@ public abstract class VideoRipper extends AbstractRipper {
LOGGER.error("Error while writing to " + urlFile, e); LOGGER.error("Error while writing to " + urlFile, e);
return false; return false;
} }
} } else {
else {
if (isThisATest()) { if (isThisATest()) {
// Tests shouldn't download the whole video // Tests shouldn't download the whole video
// Just change this.url to the download URL so the test knows we found it. // Just change this.url to the download URL so the test knows we found it.
@ -70,34 +72,36 @@ public abstract class VideoRipper extends AbstractRipper {
} }
@Override @Override
public boolean addURLToDownload(URL url, File saveAs, String referrer, Map<String,String> cookies, Boolean getFileExtFromMIME) { public boolean addURLToDownload(URL url, File saveAs, String referrer, Map<String, String> cookies, Boolean getFileExtFromMIME) {
return addURLToDownload(url, saveAs); return addURLToDownload(url, saveAs);
} }
/** /**
* Creates & sets working directory based on URL. * Creates & sets working directory based on URL.
* @param url *
* Target URL * @param url Target URL
*/ */
@Override @Override
public void setWorkingDir(URL url) throws IOException { public void setWorkingDir(URL url) throws IOException {
String path = Utils.getWorkingDirectory().getCanonicalPath(); String path = Utils.getWorkingDirectory().getCanonicalPath();
if (!path.endsWith(File.separator)) { if (!path.endsWith(File.separator)) {
path += File.separator; path += File.separator;
} }
path += "videos" + File.separator; path += "videos" + File.separator;
this.workingDir = new File(path); workingDir = new File(path);
if (!this.workingDir.exists()) {
LOGGER.info("[+] Creating directory: " + Utils.removeCWD(this.workingDir)); if (!workingDir.exists()) {
this.workingDir.mkdirs(); LOGGER.info("[+] Creating directory: " + Utils.removeCWD(workingDir));
workingDir.mkdirs();
} }
LOGGER.debug("Set working directory to: " + this.workingDir);
LOGGER.debug("Set working directory to: " + workingDir);
} }
/** /**
* @return * @return Returns % of video done downloading.
* Returns % of video done downloading.
*/ */
@Override @Override
public int getCompletionPercentage() { public int getCompletionPercentage() {
@ -106,16 +110,16 @@ public abstract class VideoRipper extends AbstractRipper {
/** /**
* Runs if download successfully completed. * Runs if download successfully completed.
* @param url *
* Target URL * @param url Target URL
* @param saveAs * @param saveAs Path to file, including filename.
* Path to file, including filename.
*/ */
@Override @Override
public void downloadCompleted(URL url, File saveAs) { public void downloadCompleted(URL url, File saveAs) {
if (observer == null) { if (observer == null) {
return; return;
} }
try { try {
String path = Utils.removeCWD(saveAs); String path = Utils.removeCWD(saveAs);
RipStatusMessage msg = new RipStatusMessage(STATUS.DOWNLOAD_COMPLETE, path); RipStatusMessage msg = new RipStatusMessage(STATUS.DOWNLOAD_COMPLETE, path);
@ -129,59 +133,55 @@ public abstract class VideoRipper extends AbstractRipper {
/** /**
* Runs if the download errored somewhere. * Runs if the download errored somewhere.
* @param url *
* Target URL * @param url Target URL
* @param reason * @param reason Reason why the download failed.
* Reason why the download failed.
*/ */
@Override @Override
public void downloadErrored(URL url, String reason) { public void downloadErrored(URL url, String reason) {
if (observer == null) { if (observer == null) {
return; return;
} }
observer.update(this, new RipStatusMessage(STATUS.DOWNLOAD_ERRORED, url + " : " + reason)); observer.update(this, new RipStatusMessage(STATUS.DOWNLOAD_ERRORED, url + " : " + reason));
checkIfComplete(); checkIfComplete();
} }
/** /**
* Runs if user tries to redownload an already existing File. * Runs if user tries to redownload an already existing File.
* @param url *
* Target URL * @param url Target URL
* @param file * @param file Existing file
* Existing file
*/ */
@Override @Override
public void downloadExists(URL url, File file) { public void downloadExists(URL url, File file) {
if (observer == null) { if (observer == null) {
return; return;
} }
observer.update(this, new RipStatusMessage(STATUS.DOWNLOAD_WARN, url + " already saved as " + file)); observer.update(this, new RipStatusMessage(STATUS.DOWNLOAD_WARN, url + " already saved as " + file));
checkIfComplete(); checkIfComplete();
} }
/** /**
* Gets the status and changes it to a human-readable form. * Gets the status and changes it to a human-readable form.
* @return *
* Status of current download. * @return Status of current download.
*/ */
@Override @Override
public String getStatusText() { public String getStatusText() {
StringBuilder sb = new StringBuilder(); return String.valueOf(getCompletionPercentage()) +
sb.append(getCompletionPercentage()) "% - " +
.append("% ") Utils.bytesToHumanReadable(bytesCompleted) +
.append(" - ") " / " +
.append(Utils.bytesToHumanReadable(bytesCompleted)) Utils.bytesToHumanReadable(bytesTotal);
.append(" / ")
.append(Utils.bytesToHumanReadable(bytesTotal));
return sb.toString();
} }
@Override
/** /**
* Sanitizes URL. * Sanitizes URL.
* Usually just returns itself. * Usually just returns itself.
*/ */
@Override
public URL sanitizeURL(URL url) throws MalformedURLException { public URL sanitizeURL(URL url) throws MalformedURLException {
return url; return url;
} }
@ -194,8 +194,10 @@ public abstract class VideoRipper extends AbstractRipper {
if (observer == null) { if (observer == null) {
return; return;
} }
if (bytesCompleted >= bytesTotal) { if (bytesCompleted >= bytesTotal) {
super.checkIfComplete(); super.checkIfComplete();
} }
} }
} }

View File

@ -6,10 +6,11 @@ import com.rarchives.ripme.utils.Http;
import java.io.IOException; import java.io.IOException;
import java.net.MalformedURLException; import java.net.MalformedURLException;
import java.net.URL; import java.net.URL;
import java.util.ArrayList; import java.util.*;
import java.util.List;
import java.util.regex.Matcher; import java.util.regex.Matcher;
import java.util.regex.Pattern; import java.util.regex.Pattern;
import org.json.JSONObject;
import org.jsoup.Jsoup; import org.jsoup.Jsoup;
import org.jsoup.nodes.Document; import org.jsoup.nodes.Document;
@ -19,7 +20,11 @@ import org.jsoup.select.Elements;
/** /**
* For ripping VSCO pictures. * For ripping VSCO pictures.
*/ */
public class VscoRipper extends AbstractHTMLRipper{ public class VscoRipper extends AbstractHTMLRipper {
int pageNumber = 1;
JSONObject profileJSON;
private static final String DOMAIN = "vsco.co", private static final String DOMAIN = "vsco.co",
HOST = "vsco"; HOST = "vsco";
@ -76,30 +81,20 @@ public class VscoRipper extends AbstractHTMLRipper{
LOGGER.debug("Failed to convert " + url.toString() + " to external form."); LOGGER.debug("Failed to convert " + url.toString() + " to external form.");
} }
} else {//want to rip a member profile } else {
/* String username = getUserName();
String baseURL = "https://vsco.co"; String userTkn = getUserTkn(username);
String siteID = getSiteID(userTkn, username);
while (true) {
//Find all the relative links, adds Base URL, then adds them to an ArrayList profileJSON = getProfileJSON(userTkn, username, Integer.toString(pageNumber), siteID);
List<URL> relativeLinks = new ArrayList<>(); for (int i = 0; i < profileJSON.getJSONArray("media").length(); i++) {
Elements links = page.getElementsByTag("a"); toRip.add("https://" + profileJSON.getJSONArray("media").getJSONObject(i).getString("responsive_url"));
for(Element link : links){
System.out.println(link.toString());
//if link includes "/media/", add it to the list
if (link.attr("href").contains("/media")) {
try {
String relativeURL = vscoImageToURL(link.attr("href"));
toRip.add(baseURL + relativeURL);
} catch (IOException ex) {
LOGGER.debug("Could not add \"" + link.toString() + "\" to list for ripping.");
} }
if (pageNumber * 1000 > profileJSON.getInt("total")) {
return toRip;
} }
pageNumber++;
} }
*/
LOGGER.debug("Sorry, RipMe currently only supports ripping single images.");
} }
@ -107,6 +102,59 @@ public class VscoRipper extends AbstractHTMLRipper{
return toRip; return toRip;
} }
private String getUserTkn(String username) {
String userinfoPage = "https://vsco.co/content/Static/userinfo";
String referer = "https://vsco.co/" + username + "/images/1";
Map<String,String> cookies = new HashMap<>();
cookies.put("vs_anonymous_id", UUID.randomUUID().toString());
try {
Element doc = Http.url(userinfoPage).cookies(cookies).referrer(referer).ignoreContentType().get().body();
String json = doc.text().replaceAll("define\\(", "");
json = json.replaceAll("\\)", "");
return new JSONObject(json).getString("tkn");
} catch (IOException e) {
LOGGER.error("Could not get user tkn");
return null;
}
}
private String getUserName() {
Pattern p = Pattern.compile("^https?://vsco.co/([a-zA-Z0-9]+)/images/[0-9]+");
Matcher m = p.matcher(url.toExternalForm());
if (m.matches()) {
String user = m.group(1);
return user;
}
return null;
}
private JSONObject getProfileJSON(String tkn, String username, String page, String siteId) {
String size = "1000";
String purl = "https://vsco.co/ajxp/" + tkn + "/2.0/medias?site_id=" + siteId + "&page=" + page + "&size=" + size;
Map<String,String> cookies = new HashMap<>();
cookies.put("vs", tkn);
try {
JSONObject j = Http.url(purl).cookies(cookies).getJSON();
return j;
} catch (IOException e) {
LOGGER.error("Could not profile images");
return null;
}
}
private String getSiteID(String tkn, String username) {
Map<String,String> cookies = new HashMap<>();
cookies.put("vs", tkn);
try {
JSONObject j = Http.url("https://vsco.co/ajxp/" + tkn + "/2.0/sites?subdomain=" + username).cookies(cookies).getJSON();
return Integer.toString(j.getJSONArray("sites").getJSONObject(0).getInt("id"));
} catch (IOException e) {
LOGGER.error("Could not get site id");
return null;
}
}
private String vscoImageToURL(String url) throws IOException{ private String vscoImageToURL(String url) throws IOException{
Document page = Jsoup.connect(url).userAgent(USER_AGENT) Document page = Jsoup.connect(url).userAgent(USER_AGENT)
.get(); .get();
@ -177,11 +225,6 @@ public class VscoRipper extends AbstractHTMLRipper{
return Http.url(url).get(); return Http.url(url).get();
} }
@Override
public Document getNextPage(Document doc) throws IOException {
return super.getNextPage(doc);
}
@Override @Override
public void downloadURL(URL url, int index) { public void downloadURL(URL url, int index) {
addURLToDownload(url, getPrefix(index)); addURLToDownload(url, getPrefix(index));

View File

@ -39,6 +39,20 @@ public class UpdateUtils {
} }
return thisVersion; return thisVersion;
} }
private static String getChangeList(JSONObject rj) {
JSONArray jsonChangeList = rj.getJSONArray("changeList");
StringBuilder changeList = new StringBuilder();
for (int i = 0; i < jsonChangeList.length(); i++) {
String change = jsonChangeList.getString(i);
if (change.startsWith(UpdateUtils.getThisJarVersion() + ":")) {
break;
}
changeList.append("\n").append(change);
}
return changeList.toString();
}
public static void updateProgramCLI() { public static void updateProgramCLI() {
logger.info("Checking for update..."); logger.info("Checking for update...");
@ -61,15 +75,10 @@ public class UpdateUtils {
} }
String jsonString = doc.body().html().replaceAll("&quot;", "\""); String jsonString = doc.body().html().replaceAll("&quot;", "\"");
ripmeJson = new JSONObject(jsonString); ripmeJson = new JSONObject(jsonString);
JSONArray jsonChangeList = ripmeJson.getJSONArray("changeList");
StringBuilder changeList = new StringBuilder(); String changeList = getChangeList(ripmeJson);
for (int i = 0; i < jsonChangeList.length(); i++) {
String change = jsonChangeList.getString(i); logger.info("Change log: \n" + changeList);
if (change.startsWith(UpdateUtils.getThisJarVersion() + ":")) {
break;
}
changeList.append("<br> + ").append(change);
}
String latestVersion = ripmeJson.getString("latestVersion"); String latestVersion = ripmeJson.getString("latestVersion");
if (UpdateUtils.isNewerVersion(latestVersion)) { if (UpdateUtils.isNewerVersion(latestVersion)) {
@ -111,15 +120,8 @@ public class UpdateUtils {
} }
String jsonString = doc.body().html().replaceAll("&quot;", "\""); String jsonString = doc.body().html().replaceAll("&quot;", "\"");
ripmeJson = new JSONObject(jsonString); ripmeJson = new JSONObject(jsonString);
JSONArray jsonChangeList = ripmeJson.getJSONArray("changeList");
StringBuilder changeList = new StringBuilder(); String changeList = getChangeList(ripmeJson);
for (int i = 0; i < jsonChangeList.length(); i++) {
String change = jsonChangeList.getString(i);
if (change.startsWith(UpdateUtils.getThisJarVersion() + ":")) {
break;
}
changeList.append("<br> + ").append(change);
}
String latestVersion = ripmeJson.getString("latestVersion"); String latestVersion = ripmeJson.getString("latestVersion");
if (UpdateUtils.isNewerVersion(latestVersion)) { if (UpdateUtils.isNewerVersion(latestVersion)) {
@ -127,7 +129,7 @@ public class UpdateUtils {
int result = JOptionPane.showConfirmDialog( int result = JOptionPane.showConfirmDialog(
null, null,
"<html><font color=\"green\">New version (" + latestVersion + ") is available!</font>" "<html><font color=\"green\">New version (" + latestVersion + ") is available!</font>"
+ "<br><br>Recent changes:" + changeList.toString() + "<br><br>Recent changes:" + changeList
+ "<br><br>Do you want to download and run the newest version?</html>", + "<br><br>Do you want to download and run the newest version?</html>",
"RipMe Updater", "RipMe Updater",
JOptionPane.YES_NO_OPTION); JOptionPane.YES_NO_OPTION);
@ -157,6 +159,11 @@ public class UpdateUtils {
} }
private static boolean isNewerVersion(String latestVersion) { private static boolean isNewerVersion(String latestVersion) {
// If we're testing the update utils we want the program to always try to update
if (Utils.getConfigBoolean("testing.always_try_to_update", false)) {
logger.info("isNewerVersion is returning true because the key \"testing.always_try_to_update\" is true");
return true;
}
int[] oldVersions = versionStringToInt(getThisJarVersion()); int[] oldVersions = versionStringToInt(getThisJarVersion());
int[] newVersions = versionStringToInt(latestVersion); int[] newVersions = versionStringToInt(latestVersion);
if (oldVersions.length < newVersions.length) { if (oldVersions.length < newVersions.length) {
@ -227,6 +234,8 @@ public class UpdateUtils {
try (FileOutputStream out = new FileOutputStream(updateFileName)) { try (FileOutputStream out = new FileOutputStream(updateFileName)) {
out.write(response.bodyAsBytes()); out.write(response.bodyAsBytes());
} }
// Only check the hash if the user hasn't disabled hash checking
if (Utils.getConfigBoolean("security.check_update_hash", true)) {
String updateHash = createSha256(new File(updateFileName)); String updateHash = createSha256(new File(updateFileName));
logger.info("Download of new version complete; saved to " + updateFileName); logger.info("Download of new version complete; saved to " + updateFileName);
logger.info("Checking hash of update"); logger.info("Checking hash of update");
@ -239,6 +248,7 @@ public class UpdateUtils {
} else { } else {
logger.info("Hash is good"); logger.info("Hash is good");
} }
}
if (shouldLaunch) { if (shouldLaunch) {
// Setup updater script // Setup updater script
final String batchFile, script; final String batchFile, script;