Merge branch 'master' of https://github.com/RipMeApp/ripme into logger
# Conflicts: # src/main/java/com/rarchives/ripme/ripper/VideoRipper.java # src/main/java/com/rarchives/ripme/ripper/rippers/VscoRipper.java
This commit is contained in:
commit
ccaa2edeb0
@ -37,6 +37,7 @@ For information about running the `.jar` file, see [the How To Run wiki](https:/
|
||||
* Built in updater
|
||||
* Can rip images from tumblr in the size they were uploaded in [See here for how to enable](https://github.com/RipMeApp/ripme/wiki/Config-options#tumblrget_raw_image)
|
||||
* Skips already downloaded images by default
|
||||
* Can auto skip e-hentai and nhentai albums containing certain tags [See here for how to enable](https://github.com/RipMeApp/ripme/wiki/Config-options#nhentaiblacklisttags)
|
||||
|
||||
## [List of Supported Sites](https://github.com/ripmeapp/ripme/wiki/Supported-Sites)
|
||||
|
||||
|
@ -1 +1,2 @@
|
||||
mvn clean compile assembly:single
|
||||
mvn io.github.zlika:reproducible-build-maven-plugin:0.6:strip-jar
|
2
build.sh
2
build.sh
@ -1,2 +1,4 @@
|
||||
#!/usr/bin/env bash
|
||||
mvn clean compile assembly:single
|
||||
# Strip the jar of any non-reproducible metadata such as timestamps
|
||||
mvn io.github.zlika:reproducible-build-maven-plugin:0.6:strip-jar
|
2
patch.py
2
patch.py
@ -77,6 +77,8 @@ subprocess.call(['git', 'commit', '-m', commitMessage])
|
||||
subprocess.call(['git', 'tag', nextVersion])
|
||||
print("Building ripme")
|
||||
subprocess.call(["mvn", "clean", "compile", "assembly:single"])
|
||||
print("Stripping jar")
|
||||
subprocess.call(["mvn", "io.github.zlika:reproducible-build-maven-plugin:0.6:strip-jar"])
|
||||
print("Hashing .jar file")
|
||||
openedFile = open("./target/ripme-{}-jar-with-dependencies.jar".format(nextVersion), "rb")
|
||||
readFile = openedFile.read()
|
||||
|
5
pom.xml
5
pom.xml
@ -61,6 +61,11 @@
|
||||
</dependencies>
|
||||
<build>
|
||||
<plugins>
|
||||
<plugin>
|
||||
<groupId>io.github.zlika</groupId>
|
||||
<artifactId>reproducible-build-maven-plugin</artifactId>
|
||||
<version>0.6</version>
|
||||
</plugin>
|
||||
<plugin>
|
||||
<artifactId>maven-assembly-plugin</artifactId>
|
||||
<configuration>
|
||||
|
@ -1,5 +1,9 @@
|
||||
package com.rarchives.ripme.ripper;
|
||||
|
||||
import com.rarchives.ripme.ui.RipStatusMessage;
|
||||
import com.rarchives.ripme.ui.RipStatusMessage.STATUS;
|
||||
import com.rarchives.ripme.utils.Utils;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.FileWriter;
|
||||
import java.io.IOException;
|
||||
@ -7,28 +11,27 @@ import java.net.MalformedURLException;
|
||||
import java.net.URL;
|
||||
import java.util.Map;
|
||||
|
||||
import com.rarchives.ripme.ui.RipStatusMessage;
|
||||
import com.rarchives.ripme.ui.RipStatusMessage.STATUS;
|
||||
import com.rarchives.ripme.utils.Utils;
|
||||
|
||||
public abstract class VideoRipper extends AbstractRipper {
|
||||
|
||||
private int bytesTotal = 1,
|
||||
bytesCompleted = 1;
|
||||
private int bytesTotal = 1;
|
||||
private int bytesCompleted = 1;
|
||||
|
||||
protected VideoRipper(URL url) throws IOException {
|
||||
super(url);
|
||||
}
|
||||
|
||||
public abstract boolean canRip(URL url);
|
||||
public abstract void rip() throws IOException;
|
||||
|
||||
public abstract String getHost();
|
||||
|
||||
public abstract String getGID(URL url) throws MalformedURLException;
|
||||
|
||||
@Override
|
||||
public void setBytesTotal(int bytes) {
|
||||
this.bytesTotal = bytes;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setBytesCompleted(int bytes) {
|
||||
this.bytesCompleted = bytes;
|
||||
@ -55,8 +58,7 @@ public abstract class VideoRipper extends AbstractRipper {
|
||||
LOGGER.error("Error while writing to " + urlFile, e);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
else {
|
||||
} else {
|
||||
if (isThisATest()) {
|
||||
// Tests shouldn't download the whole video
|
||||
// Just change this.url to the download URL so the test knows we found it.
|
||||
@ -70,34 +72,36 @@ public abstract class VideoRipper extends AbstractRipper {
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean addURLToDownload(URL url, File saveAs, String referrer, Map<String,String> cookies, Boolean getFileExtFromMIME) {
|
||||
public boolean addURLToDownload(URL url, File saveAs, String referrer, Map<String, String> cookies, Boolean getFileExtFromMIME) {
|
||||
return addURLToDownload(url, saveAs);
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Creates & sets working directory based on URL.
|
||||
* @param url
|
||||
* Target URL
|
||||
*
|
||||
* @param url Target URL
|
||||
*/
|
||||
@Override
|
||||
public void setWorkingDir(URL url) throws IOException {
|
||||
String path = Utils.getWorkingDirectory().getCanonicalPath();
|
||||
|
||||
if (!path.endsWith(File.separator)) {
|
||||
path += File.separator;
|
||||
}
|
||||
|
||||
path += "videos" + File.separator;
|
||||
this.workingDir = new File(path);
|
||||
if (!this.workingDir.exists()) {
|
||||
LOGGER.info("[+] Creating directory: " + Utils.removeCWD(this.workingDir));
|
||||
this.workingDir.mkdirs();
|
||||
workingDir = new File(path);
|
||||
|
||||
if (!workingDir.exists()) {
|
||||
LOGGER.info("[+] Creating directory: " + Utils.removeCWD(workingDir));
|
||||
workingDir.mkdirs();
|
||||
}
|
||||
LOGGER.debug("Set working directory to: " + this.workingDir);
|
||||
|
||||
LOGGER.debug("Set working directory to: " + workingDir);
|
||||
}
|
||||
|
||||
/**
|
||||
* @return
|
||||
* Returns % of video done downloading.
|
||||
* @return Returns % of video done downloading.
|
||||
*/
|
||||
@Override
|
||||
public int getCompletionPercentage() {
|
||||
@ -106,16 +110,16 @@ public abstract class VideoRipper extends AbstractRipper {
|
||||
|
||||
/**
|
||||
* Runs if download successfully completed.
|
||||
* @param url
|
||||
* Target URL
|
||||
* @param saveAs
|
||||
* Path to file, including filename.
|
||||
*
|
||||
* @param url Target URL
|
||||
* @param saveAs Path to file, including filename.
|
||||
*/
|
||||
@Override
|
||||
public void downloadCompleted(URL url, File saveAs) {
|
||||
if (observer == null) {
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
String path = Utils.removeCWD(saveAs);
|
||||
RipStatusMessage msg = new RipStatusMessage(STATUS.DOWNLOAD_COMPLETE, path);
|
||||
@ -129,59 +133,55 @@ public abstract class VideoRipper extends AbstractRipper {
|
||||
|
||||
/**
|
||||
* Runs if the download errored somewhere.
|
||||
* @param url
|
||||
* Target URL
|
||||
* @param reason
|
||||
* Reason why the download failed.
|
||||
*
|
||||
* @param url Target URL
|
||||
* @param reason Reason why the download failed.
|
||||
*/
|
||||
@Override
|
||||
public void downloadErrored(URL url, String reason) {
|
||||
if (observer == null) {
|
||||
return;
|
||||
}
|
||||
|
||||
observer.update(this, new RipStatusMessage(STATUS.DOWNLOAD_ERRORED, url + " : " + reason));
|
||||
checkIfComplete();
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Runs if user tries to redownload an already existing File.
|
||||
* @param url
|
||||
* Target URL
|
||||
* @param file
|
||||
* Existing file
|
||||
*
|
||||
* @param url Target URL
|
||||
* @param file Existing file
|
||||
*/
|
||||
@Override
|
||||
public void downloadExists(URL url, File file) {
|
||||
if (observer == null) {
|
||||
return;
|
||||
}
|
||||
|
||||
observer.update(this, new RipStatusMessage(STATUS.DOWNLOAD_WARN, url + " already saved as " + file));
|
||||
checkIfComplete();
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the status and changes it to a human-readable form.
|
||||
* @return
|
||||
* Status of current download.
|
||||
*
|
||||
* @return Status of current download.
|
||||
*/
|
||||
@Override
|
||||
public String getStatusText() {
|
||||
StringBuilder sb = new StringBuilder();
|
||||
sb.append(getCompletionPercentage())
|
||||
.append("% ")
|
||||
.append(" - ")
|
||||
.append(Utils.bytesToHumanReadable(bytesCompleted))
|
||||
.append(" / ")
|
||||
.append(Utils.bytesToHumanReadable(bytesTotal));
|
||||
return sb.toString();
|
||||
return String.valueOf(getCompletionPercentage()) +
|
||||
"% - " +
|
||||
Utils.bytesToHumanReadable(bytesCompleted) +
|
||||
" / " +
|
||||
Utils.bytesToHumanReadable(bytesTotal);
|
||||
}
|
||||
|
||||
@Override
|
||||
/**
|
||||
* Sanitizes URL.
|
||||
* Usually just returns itself.
|
||||
*/
|
||||
@Override
|
||||
public URL sanitizeURL(URL url) throws MalformedURLException {
|
||||
return url;
|
||||
}
|
||||
@ -194,8 +194,10 @@ public abstract class VideoRipper extends AbstractRipper {
|
||||
if (observer == null) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (bytesCompleted >= bytesTotal) {
|
||||
super.checkIfComplete();
|
||||
}
|
||||
}
|
||||
|
||||
}
|
@ -6,10 +6,11 @@ import com.rarchives.ripme.utils.Http;
|
||||
import java.io.IOException;
|
||||
import java.net.MalformedURLException;
|
||||
import java.net.URL;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.*;
|
||||
import java.util.regex.Matcher;
|
||||
import java.util.regex.Pattern;
|
||||
|
||||
import org.json.JSONObject;
|
||||
import org.jsoup.Jsoup;
|
||||
|
||||
import org.jsoup.nodes.Document;
|
||||
@ -19,7 +20,11 @@ import org.jsoup.select.Elements;
|
||||
/**
|
||||
* For ripping VSCO pictures.
|
||||
*/
|
||||
public class VscoRipper extends AbstractHTMLRipper{
|
||||
public class VscoRipper extends AbstractHTMLRipper {
|
||||
|
||||
int pageNumber = 1;
|
||||
JSONObject profileJSON;
|
||||
|
||||
|
||||
private static final String DOMAIN = "vsco.co",
|
||||
HOST = "vsco";
|
||||
@ -76,30 +81,20 @@ public class VscoRipper extends AbstractHTMLRipper{
|
||||
LOGGER.debug("Failed to convert " + url.toString() + " to external form.");
|
||||
}
|
||||
|
||||
} else {//want to rip a member profile
|
||||
/*
|
||||
String baseURL = "https://vsco.co";
|
||||
|
||||
|
||||
//Find all the relative links, adds Base URL, then adds them to an ArrayList
|
||||
List<URL> relativeLinks = new ArrayList<>();
|
||||
Elements links = page.getElementsByTag("a");
|
||||
|
||||
|
||||
for(Element link : links){
|
||||
System.out.println(link.toString());
|
||||
//if link includes "/media/", add it to the list
|
||||
if (link.attr("href").contains("/media")) {
|
||||
try {
|
||||
String relativeURL = vscoImageToURL(link.attr("href"));
|
||||
toRip.add(baseURL + relativeURL);
|
||||
} catch (IOException ex) {
|
||||
LOGGER.debug("Could not add \"" + link.toString() + "\" to list for ripping.");
|
||||
}
|
||||
} else {
|
||||
String username = getUserName();
|
||||
String userTkn = getUserTkn(username);
|
||||
String siteID = getSiteID(userTkn, username);
|
||||
while (true) {
|
||||
profileJSON = getProfileJSON(userTkn, username, Integer.toString(pageNumber), siteID);
|
||||
for (int i = 0; i < profileJSON.getJSONArray("media").length(); i++) {
|
||||
toRip.add("https://" + profileJSON.getJSONArray("media").getJSONObject(i).getString("responsive_url"));
|
||||
}
|
||||
if (pageNumber * 1000 > profileJSON.getInt("total")) {
|
||||
return toRip;
|
||||
}
|
||||
pageNumber++;
|
||||
}
|
||||
*/
|
||||
LOGGER.debug("Sorry, RipMe currently only supports ripping single images.");
|
||||
|
||||
|
||||
}
|
||||
@ -107,6 +102,59 @@ public class VscoRipper extends AbstractHTMLRipper{
|
||||
return toRip;
|
||||
}
|
||||
|
||||
private String getUserTkn(String username) {
|
||||
String userinfoPage = "https://vsco.co/content/Static/userinfo";
|
||||
String referer = "https://vsco.co/" + username + "/images/1";
|
||||
Map<String,String> cookies = new HashMap<>();
|
||||
cookies.put("vs_anonymous_id", UUID.randomUUID().toString());
|
||||
try {
|
||||
Element doc = Http.url(userinfoPage).cookies(cookies).referrer(referer).ignoreContentType().get().body();
|
||||
String json = doc.text().replaceAll("define\\(", "");
|
||||
json = json.replaceAll("\\)", "");
|
||||
return new JSONObject(json).getString("tkn");
|
||||
} catch (IOException e) {
|
||||
LOGGER.error("Could not get user tkn");
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
private String getUserName() {
|
||||
Pattern p = Pattern.compile("^https?://vsco.co/([a-zA-Z0-9]+)/images/[0-9]+");
|
||||
Matcher m = p.matcher(url.toExternalForm());
|
||||
|
||||
if (m.matches()) {
|
||||
String user = m.group(1);
|
||||
return user;
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
private JSONObject getProfileJSON(String tkn, String username, String page, String siteId) {
|
||||
String size = "1000";
|
||||
String purl = "https://vsco.co/ajxp/" + tkn + "/2.0/medias?site_id=" + siteId + "&page=" + page + "&size=" + size;
|
||||
Map<String,String> cookies = new HashMap<>();
|
||||
cookies.put("vs", tkn);
|
||||
try {
|
||||
JSONObject j = Http.url(purl).cookies(cookies).getJSON();
|
||||
return j;
|
||||
} catch (IOException e) {
|
||||
LOGGER.error("Could not profile images");
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
private String getSiteID(String tkn, String username) {
|
||||
Map<String,String> cookies = new HashMap<>();
|
||||
cookies.put("vs", tkn);
|
||||
try {
|
||||
JSONObject j = Http.url("https://vsco.co/ajxp/" + tkn + "/2.0/sites?subdomain=" + username).cookies(cookies).getJSON();
|
||||
return Integer.toString(j.getJSONArray("sites").getJSONObject(0).getInt("id"));
|
||||
} catch (IOException e) {
|
||||
LOGGER.error("Could not get site id");
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
private String vscoImageToURL(String url) throws IOException{
|
||||
Document page = Jsoup.connect(url).userAgent(USER_AGENT)
|
||||
.get();
|
||||
@ -177,11 +225,6 @@ public class VscoRipper extends AbstractHTMLRipper{
|
||||
return Http.url(url).get();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Document getNextPage(Document doc) throws IOException {
|
||||
return super.getNextPage(doc);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void downloadURL(URL url, int index) {
|
||||
addURLToDownload(url, getPrefix(index));
|
||||
|
@ -39,6 +39,20 @@ public class UpdateUtils {
|
||||
}
|
||||
return thisVersion;
|
||||
}
|
||||
|
||||
private static String getChangeList(JSONObject rj) {
|
||||
JSONArray jsonChangeList = rj.getJSONArray("changeList");
|
||||
StringBuilder changeList = new StringBuilder();
|
||||
for (int i = 0; i < jsonChangeList.length(); i++) {
|
||||
String change = jsonChangeList.getString(i);
|
||||
if (change.startsWith(UpdateUtils.getThisJarVersion() + ":")) {
|
||||
break;
|
||||
}
|
||||
changeList.append("\n").append(change);
|
||||
}
|
||||
return changeList.toString();
|
||||
}
|
||||
|
||||
public static void updateProgramCLI() {
|
||||
logger.info("Checking for update...");
|
||||
|
||||
@ -61,15 +75,10 @@ public class UpdateUtils {
|
||||
}
|
||||
String jsonString = doc.body().html().replaceAll(""", "\"");
|
||||
ripmeJson = new JSONObject(jsonString);
|
||||
JSONArray jsonChangeList = ripmeJson.getJSONArray("changeList");
|
||||
StringBuilder changeList = new StringBuilder();
|
||||
for (int i = 0; i < jsonChangeList.length(); i++) {
|
||||
String change = jsonChangeList.getString(i);
|
||||
if (change.startsWith(UpdateUtils.getThisJarVersion() + ":")) {
|
||||
break;
|
||||
}
|
||||
changeList.append("<br> + ").append(change);
|
||||
}
|
||||
|
||||
String changeList = getChangeList(ripmeJson);
|
||||
|
||||
logger.info("Change log: \n" + changeList);
|
||||
|
||||
String latestVersion = ripmeJson.getString("latestVersion");
|
||||
if (UpdateUtils.isNewerVersion(latestVersion)) {
|
||||
@ -111,15 +120,8 @@ public class UpdateUtils {
|
||||
}
|
||||
String jsonString = doc.body().html().replaceAll(""", "\"");
|
||||
ripmeJson = new JSONObject(jsonString);
|
||||
JSONArray jsonChangeList = ripmeJson.getJSONArray("changeList");
|
||||
StringBuilder changeList = new StringBuilder();
|
||||
for (int i = 0; i < jsonChangeList.length(); i++) {
|
||||
String change = jsonChangeList.getString(i);
|
||||
if (change.startsWith(UpdateUtils.getThisJarVersion() + ":")) {
|
||||
break;
|
||||
}
|
||||
changeList.append("<br> + ").append(change);
|
||||
}
|
||||
|
||||
String changeList = getChangeList(ripmeJson);
|
||||
|
||||
String latestVersion = ripmeJson.getString("latestVersion");
|
||||
if (UpdateUtils.isNewerVersion(latestVersion)) {
|
||||
@ -127,7 +129,7 @@ public class UpdateUtils {
|
||||
int result = JOptionPane.showConfirmDialog(
|
||||
null,
|
||||
"<html><font color=\"green\">New version (" + latestVersion + ") is available!</font>"
|
||||
+ "<br><br>Recent changes:" + changeList.toString()
|
||||
+ "<br><br>Recent changes:" + changeList
|
||||
+ "<br><br>Do you want to download and run the newest version?</html>",
|
||||
"RipMe Updater",
|
||||
JOptionPane.YES_NO_OPTION);
|
||||
@ -157,6 +159,11 @@ public class UpdateUtils {
|
||||
}
|
||||
|
||||
private static boolean isNewerVersion(String latestVersion) {
|
||||
// If we're testing the update utils we want the program to always try to update
|
||||
if (Utils.getConfigBoolean("testing.always_try_to_update", false)) {
|
||||
logger.info("isNewerVersion is returning true because the key \"testing.always_try_to_update\" is true");
|
||||
return true;
|
||||
}
|
||||
int[] oldVersions = versionStringToInt(getThisJarVersion());
|
||||
int[] newVersions = versionStringToInt(latestVersion);
|
||||
if (oldVersions.length < newVersions.length) {
|
||||
@ -227,17 +234,20 @@ public class UpdateUtils {
|
||||
try (FileOutputStream out = new FileOutputStream(updateFileName)) {
|
||||
out.write(response.bodyAsBytes());
|
||||
}
|
||||
String updateHash = createSha256(new File(updateFileName));
|
||||
logger.info("Download of new version complete; saved to " + updateFileName);
|
||||
logger.info("Checking hash of update");
|
||||
// Only check the hash if the user hasn't disabled hash checking
|
||||
if (Utils.getConfigBoolean("security.check_update_hash", true)) {
|
||||
String updateHash = createSha256(new File(updateFileName));
|
||||
logger.info("Download of new version complete; saved to " + updateFileName);
|
||||
logger.info("Checking hash of update");
|
||||
|
||||
if (!ripmeJson.getString("currentHash").equals(updateHash)) {
|
||||
logger.error("Error: Update has bad hash");
|
||||
logger.debug("Expected hash: " + ripmeJson.getString("currentHash"));
|
||||
logger.debug("Actual hash: " + updateHash);
|
||||
throw new IOException("Got bad file hash");
|
||||
} else {
|
||||
logger.info("Hash is good");
|
||||
if (!ripmeJson.getString("currentHash").equals(updateHash)) {
|
||||
logger.error("Error: Update has bad hash");
|
||||
logger.debug("Expected hash: " + ripmeJson.getString("currentHash"));
|
||||
logger.debug("Actual hash: " + updateHash);
|
||||
throw new IOException("Got bad file hash");
|
||||
} else {
|
||||
logger.info("Hash is good");
|
||||
}
|
||||
}
|
||||
if (shouldLaunch) {
|
||||
// Setup updater script
|
||||
|
Loading…
Reference in New Issue
Block a user