From c2938af568e4b7bfc6740a49e93bcf3921a9e9b0 Mon Sep 17 00:00:00 2001 From: cyian-1756 Date: Fri, 15 Jun 2018 22:23:29 -0400 Subject: [PATCH] Reddit ripper can now rip videos from v.redd.it --- .../ripme/ripper/rippers/RedditRipper.java | 35 +++++++++++++++++++ .../com/rarchives/ripme/utils/RipUtils.java | 3 ++ 2 files changed, 38 insertions(+) diff --git a/src/main/java/com/rarchives/ripme/ripper/rippers/RedditRipper.java b/src/main/java/com/rarchives/ripme/ripper/rippers/RedditRipper.java index bb60d616..5a967068 100644 --- a/src/main/java/com/rarchives/ripme/ripper/rippers/RedditRipper.java +++ b/src/main/java/com/rarchives/ripme/ripper/rippers/RedditRipper.java @@ -17,6 +17,10 @@ import com.rarchives.ripme.ui.UpdateUtils; import com.rarchives.ripme.utils.Http; import com.rarchives.ripme.utils.RipUtils; import com.rarchives.ripme.utils.Utils; +import org.jsoup.Jsoup; + +import javax.swing.text.Document; +import javax.swing.text.Element; public class RedditRipper extends AlbumRipper { @@ -179,6 +183,32 @@ public class RedditRipper extends AlbumRipper { } } + private URL parseRedditVideoMPD(String vidURL) { + org.jsoup.nodes.Document doc = null; + try { + doc = Http.url(vidURL + "/DASHPlaylist.mpd").ignoreContentType().get(); + int largestHeight = 0; + String baseURL = null; + // Loops over all the videos and finds the one with the largest height and sets baseURL to the base url of that video + for (org.jsoup.nodes.Element e : doc.select("MPD > Period > AdaptationSet > Representation")) { + String height = e.attr("height"); + if (height.equals("")) { + height = "0"; + } + if (largestHeight < Integer.parseInt(height)) { + largestHeight = Integer.parseInt(height); + baseURL = doc.select("MPD > Period > AdaptationSet > Representation[height=" + height + "]").select("BaseURL").text(); + } + LOGGER.info("H " + e.attr("height") + " V " + e.attr("width")); + } + return new URL(vidURL + "/" + baseURL); + } catch (IOException e) { + e.printStackTrace(); + } + return null; + + } + private void handleURL(String theUrl, String id) { URL originalURL; try { @@ -198,6 +228,11 @@ public class RedditRipper extends AlbumRipper { savePath += id + "-" + m.group(1) + ".jpg"; addURLToDownload(urls.get(0), new File(savePath)); } + if (url.contains("v.redd.it")) { + String savePath = this.workingDir + File.separator; + savePath += id + "-" + url.split("/")[3] + ".mp4"; + addURLToDownload(parseRedditVideoMPD(urls.get(0).toExternalForm()), new File(savePath)); + } else { addURLToDownload(urls.get(0), id + "-", "", theUrl, null); } diff --git a/src/main/java/com/rarchives/ripme/utils/RipUtils.java b/src/main/java/com/rarchives/ripme/utils/RipUtils.java index 84b17ca1..34081852 100644 --- a/src/main/java/com/rarchives/ripme/utils/RipUtils.java +++ b/src/main/java/com/rarchives/ripme/utils/RipUtils.java @@ -86,6 +86,9 @@ public class RipUtils { logger.warn("Exception while retrieving eroshare page:", e); } return result; + } else if (url.toExternalForm().contains("v.redd.it")) { + result.add(url); + return result; } else if (url.toExternalForm().contains("erome.com")) {