diff --git a/src/main/java/com/rarchives/ripme/ripper/AbstractJSONRipper.java b/src/main/java/com/rarchives/ripme/ripper/AbstractJSONRipper.java index a89e8974..4c21821e 100644 --- a/src/main/java/com/rarchives/ripme/ripper/AbstractJSONRipper.java +++ b/src/main/java/com/rarchives/ripme/ripper/AbstractJSONRipper.java @@ -40,7 +40,7 @@ public abstract class AbstractJSONRipper extends AlbumRipper { public boolean canRip(URL url) { return url.getHost().endsWith(getDomain()); } - + @Override public URL sanitizeURL(URL url) throws MalformedURLException { return url; diff --git a/src/main/java/com/rarchives/ripme/ripper/AlbumRipper.java b/src/main/java/com/rarchives/ripme/ripper/AlbumRipper.java index e8dbc6e3..50df96c4 100644 --- a/src/main/java/com/rarchives/ripme/ripper/AlbumRipper.java +++ b/src/main/java/com/rarchives/ripme/ripper/AlbumRipper.java @@ -92,7 +92,7 @@ public abstract class AlbumRipper extends AbstractRipper { * Uses filename from URL to decide filename. * @param url * URL to download - * @return + * @return * True on success */ public boolean addURLToDownload(URL url) { @@ -139,7 +139,7 @@ public abstract class AlbumRipper extends AbstractRipper { itemsPending.remove(url); itemsCompleted.put(url, file); observer.update(this, new RipStatusMessage(STATUS.DOWNLOAD_WARN, url + " already saved as " + file.getAbsolutePath())); - + checkIfComplete(); } @@ -160,8 +160,8 @@ public abstract class AlbumRipper extends AbstractRipper { * Sets directory to save all ripped files to. * @param url * URL to define how the working directory should be saved. - * @throws - * IOException + * @throws + * IOException */ @Override public void setWorkingDir(URL url) throws IOException { diff --git a/src/main/java/com/rarchives/ripme/ripper/DownloadVideoThread.java b/src/main/java/com/rarchives/ripme/ripper/DownloadVideoThread.java index dc437c87..85e375f1 100644 --- a/src/main/java/com/rarchives/ripme/ripper/DownloadVideoThread.java +++ b/src/main/java/com/rarchives/ripme/ripper/DownloadVideoThread.java @@ -135,7 +135,7 @@ public class DownloadVideoThread extends Thread { observer.downloadCompleted(url, saveAs); logger.info("[+] Saved " + url + " as " + this.prettySaveAs); } - + private int getTotalBytes(URL url) throws IOException { HttpURLConnection conn = (HttpURLConnection) url.openConnection(); conn.setRequestMethod("HEAD"); diff --git a/src/main/java/com/rarchives/ripme/ripper/rippers/CheebyRipper.java b/src/main/java/com/rarchives/ripme/ripper/rippers/CheebyRipper.java index f088081c..c90cfae8 100644 --- a/src/main/java/com/rarchives/ripme/ripper/rippers/CheebyRipper.java +++ b/src/main/java/com/rarchives/ripme/ripper/rippers/CheebyRipper.java @@ -109,20 +109,20 @@ public class CheebyRipper extends AbstractHTMLRipper { } return imageURLs; } - + @Override public void rip() throws IOException { logger.info("Retrieving " + this.url); sendUpdate(STATUS.LOADING_RESOURCE, this.url.toExternalForm()); Document doc = getFirstPage(); - + while (doc != null) { List images = getImagesFromPage(doc); if (images.size() == 0) { throw new IOException("No images found at " + doc.location()); } - + for (Image image : images) { if (isStopped()) { break; @@ -167,7 +167,7 @@ public class CheebyRipper extends AbstractHTMLRipper { } } } - + private class Image { String url, prefix; int index; diff --git a/src/main/java/com/rarchives/ripme/ripper/rippers/EroShareRipper.java b/src/main/java/com/rarchives/ripme/ripper/rippers/EroShareRipper.java index 4ef8c1e1..75c56044 100644 --- a/src/main/java/com/rarchives/ripme/ripper/rippers/EroShareRipper.java +++ b/src/main/java/com/rarchives/ripme/ripper/rippers/EroShareRipper.java @@ -46,7 +46,7 @@ public class EroShareRipper extends AbstractHTMLRipper { } @Override - public void downloadURL(URL url, int index){ + public void downloadURL(URL url, int index) { addURLToDownload(url); } @Override @@ -107,12 +107,12 @@ public class EroShareRipper extends AbstractHTMLRipper { @Override - public List getURLsFromPage(Document doc){ + public List getURLsFromPage(Document doc) { List URLs = new ArrayList(); //Pictures Elements imgs = doc.getElementsByTag("img"); - for (Element img : imgs){ - if (img.hasClass("album-image")){ + for (Element img : imgs) { + if (img.hasClass("album-image")) { String imageURL = img.attr("src"); imageURL = "https:" + imageURL; URLs.add(imageURL); @@ -120,8 +120,8 @@ public class EroShareRipper extends AbstractHTMLRipper { } //Videos Elements vids = doc.getElementsByTag("video"); - for (Element vid : vids){ - if (vid.hasClass("album-video")){ + for (Element vid : vids) { + if (vid.hasClass("album-video")) { Elements source = vid.getElementsByTag("source"); String videoURL = source.first().attr("src"); URLs.add(videoURL); @@ -129,18 +129,18 @@ public class EroShareRipper extends AbstractHTMLRipper { } // Profile videos Elements links = doc.select("div.item-container > a.item"); - for (Element link : links){ + for (Element link : links) { Document video_page; try { video_page = Http.url("https://eroshare.com" + link.attr("href")).get(); - } catch(IOException e) { + } catch (IOException e) { logger.warn("Failed to log link in Jsoup"); video_page = null; e.printStackTrace(); } Elements profile_vids = video_page.getElementsByTag("video"); - for (Element vid : profile_vids){ - if (vid.hasClass("album-video")){ + for (Element vid : profile_vids) { + if (vid.hasClass("album-video")) { Elements source = vid.getElementsByTag("source"); String videoURL = source.first().attr("src"); URLs.add(videoURL); @@ -190,8 +190,8 @@ public class EroShareRipper extends AbstractHTMLRipper { List URLs = new ArrayList(); //Pictures Elements imgs = doc.getElementsByTag("img"); - for (Element img : imgs){ - if (img.hasClass("album-image")){ + for (Element img : imgs) { + if (img.hasClass("album-image")) { String imageURL = img.attr("src"); imageURL = "https:" + imageURL; URLs.add(new URL(imageURL)); @@ -199,8 +199,8 @@ public class EroShareRipper extends AbstractHTMLRipper { } //Videos Elements vids = doc.getElementsByTag("video"); - for (Element vid : vids){ - if (vid.hasClass("album-video")){ + for (Element vid : vids) { + if (vid.hasClass("album-video")) { Elements source = vid.getElementsByTag("source"); String videoURL = source.first().attr("src"); URLs.add(new URL(videoURL)); diff --git a/src/main/java/com/rarchives/ripme/ripper/rippers/FapprovedRipper.java b/src/main/java/com/rarchives/ripme/ripper/rippers/FapprovedRipper.java index f33d40ce..a594a72b 100644 --- a/src/main/java/com/rarchives/ripme/ripper/rippers/FapprovedRipper.java +++ b/src/main/java/com/rarchives/ripme/ripper/rippers/FapprovedRipper.java @@ -1,101 +1,101 @@ -package com.rarchives.ripme.ripper.rippers; - -import java.io.IOException; -import java.net.MalformedURLException; -import java.net.URL; -import java.util.ArrayList; -import java.util.List; -import java.util.regex.Matcher; -import java.util.regex.Pattern; - -import org.jsoup.nodes.Document; -import org.jsoup.nodes.Element; - -import com.rarchives.ripme.ripper.AbstractHTMLRipper; -import com.rarchives.ripme.utils.Http; - -public class FapprovedRipper extends AbstractHTMLRipper { - - private int pageIndex = 1; - private String username = null; - - public FapprovedRipper(URL url) throws IOException { - super(url); - } - - @Override - public String getHost() { - return "fapproved"; - } - @Override - public String getDomain() { - return "fapproved.com"; - } - - @Override - public String getGID(URL url) throws MalformedURLException { - Pattern p = Pattern.compile("^https?://[w.]*fapproved.com/users/([a-zA-Z0-9\\-_]{3,}).*$"); - Matcher m = p.matcher(url.toExternalForm()); - if (m.matches()) { - username = m.group(1); - return username; - } - throw new MalformedURLException("Fapproved user not found in " + url + ", expected http://fapproved.com/users/username/images"); - } - - @Override - public URL sanitizeURL(URL url) throws MalformedURLException { - return new URL("http://fapproved.com/users/" + getGID(url)); - } - - @Override - public Document getFirstPage() throws IOException { - pageIndex = 1; - String pageURL = getPageURL(pageIndex); - return Http.url(pageURL) - .ignoreContentType() - .get(); - } - - @Override - public Document getNextPage(Document doc) throws IOException { - if ((doc.select("div.pagination li.next.disabled").size() != 0) - || (doc.select("div.pagination").size() == 0)) { - throw new IOException("No more pages found"); - } - sleep(1000); - pageIndex++; - String pageURL = getPageURL(pageIndex); - return Http.url(pageURL) - .ignoreContentType() - .get(); - } - - private String getPageURL(int index) throws IOException { - if (username == null) { - username = getGID(this.url); - } - return "http://fapproved.com/users/" + username + "/images?page=" + pageIndex; - } - - @Override - public List getURLsFromPage(Document page) { - List imageURLs = new ArrayList(); - for (Element image : page.select("div.actual-image img")) { - String imageURL = image.attr("src"); - if (imageURL.startsWith("//")) { - imageURL = "http:" + imageURL; - } - else if (imageURL.startsWith("/")) { - imageURL = "http://fapproved.com" + imageURL; - } - imageURLs.add(imageURL); - } - return imageURLs; - } - - @Override - public void downloadURL(URL url, int index) { - addURLToDownload(url, getPrefix(index)); - } -} +package com.rarchives.ripme.ripper.rippers; + +import java.io.IOException; +import java.net.MalformedURLException; +import java.net.URL; +import java.util.ArrayList; +import java.util.List; +import java.util.regex.Matcher; +import java.util.regex.Pattern; + +import org.jsoup.nodes.Document; +import org.jsoup.nodes.Element; + +import com.rarchives.ripme.ripper.AbstractHTMLRipper; +import com.rarchives.ripme.utils.Http; + +public class FapprovedRipper extends AbstractHTMLRipper { + + private int pageIndex = 1; + private String username = null; + + public FapprovedRipper(URL url) throws IOException { + super(url); + } + + @Override + public String getHost() { + return "fapproved"; + } + @Override + public String getDomain() { + return "fapproved.com"; + } + + @Override + public String getGID(URL url) throws MalformedURLException { + Pattern p = Pattern.compile("^https?://[w.]*fapproved.com/users/([a-zA-Z0-9\\-_]{3,}).*$"); + Matcher m = p.matcher(url.toExternalForm()); + if (m.matches()) { + username = m.group(1); + return username; + } + throw new MalformedURLException("Fapproved user not found in " + url + ", expected http://fapproved.com/users/username/images"); + } + + @Override + public URL sanitizeURL(URL url) throws MalformedURLException { + return new URL("http://fapproved.com/users/" + getGID(url)); + } + + @Override + public Document getFirstPage() throws IOException { + pageIndex = 1; + String pageURL = getPageURL(pageIndex); + return Http.url(pageURL) + .ignoreContentType() + .get(); + } + + @Override + public Document getNextPage(Document doc) throws IOException { + if ((doc.select("div.pagination li.next.disabled").size() != 0) + || (doc.select("div.pagination").size() == 0)) { + throw new IOException("No more pages found"); + } + sleep(1000); + pageIndex++; + String pageURL = getPageURL(pageIndex); + return Http.url(pageURL) + .ignoreContentType() + .get(); + } + + private String getPageURL(int index) throws IOException { + if (username == null) { + username = getGID(this.url); + } + return "http://fapproved.com/users/" + username + "/images?page=" + pageIndex; + } + + @Override + public List getURLsFromPage(Document page) { + List imageURLs = new ArrayList(); + for (Element image : page.select("div.actual-image img")) { + String imageURL = image.attr("src"); + if (imageURL.startsWith("//")) { + imageURL = "http:" + imageURL; + } + else if (imageURL.startsWith("/")) { + imageURL = "http://fapproved.com" + imageURL; + } + imageURLs.add(imageURL); + } + return imageURLs; + } + + @Override + public void downloadURL(URL url, int index) { + addURLToDownload(url, getPrefix(index)); + } +} diff --git a/src/main/java/com/rarchives/ripme/ripper/rippers/FlickrRipper.java b/src/main/java/com/rarchives/ripme/ripper/rippers/FlickrRipper.java index 71d35da1..b893693e 100644 --- a/src/main/java/com/rarchives/ripme/ripper/rippers/FlickrRipper.java +++ b/src/main/java/com/rarchives/ripme/ripper/rippers/FlickrRipper.java @@ -63,7 +63,7 @@ public class FlickrRipper extends AbstractHTMLRipper { } return new URL(sUrl); } - + public String getAlbumTitle(URL url) throws MalformedURLException { if (!url.toExternalForm().contains("/sets/")) { return super.getAlbumTitle(url); @@ -90,7 +90,7 @@ public class FlickrRipper extends AbstractHTMLRipper { // Root: https://www.flickr.com/photos/115858035@N04/ // Album: https://www.flickr.com/photos/115858035@N04/sets/72157644042355643/ - + final String domainRegex = "https?://[wm.]*flickr.com"; final String userRegex = "[a-zA-Z0-9@]+"; // Album @@ -159,7 +159,7 @@ public class FlickrRipper extends AbstractHTMLRipper { } return Http.url(nextURL).get(); } - + @Override public List getURLsFromPage(Document page) { List imageURLs = new ArrayList(); @@ -194,7 +194,7 @@ public class FlickrRipper extends AbstractHTMLRipper { } return imageURLs; } - + @Override public void downloadURL(URL url, int index) { // Add image page to threadpool to grab the image & download it @@ -267,7 +267,7 @@ public class FlickrRipper extends AbstractHTMLRipper { logger.error("[!] Exception while loading/parsing " + this.url, e); } } - + private Document getLargestImagePageDocument(URL url) throws IOException { // Get current page Document doc = Http.url(url).get(); diff --git a/src/main/java/com/rarchives/ripme/ripper/rippers/GifyoRipper.java b/src/main/java/com/rarchives/ripme/ripper/rippers/GifyoRipper.java index 6c27a2c4..3e4d4853 100644 --- a/src/main/java/com/rarchives/ripme/ripper/rippers/GifyoRipper.java +++ b/src/main/java/com/rarchives/ripme/ripper/rippers/GifyoRipper.java @@ -51,7 +51,7 @@ public class GifyoRipper extends AbstractHTMLRipper { public URL sanitizeURL(URL url) throws MalformedURLException { return new URL("http://gifyo.com/" + getGID(url) + "/"); } - + @Override public Document getFirstPage() throws IOException { Response resp = Http.url(this.url) @@ -66,7 +66,7 @@ public class GifyoRipper extends AbstractHTMLRipper { } return doc; } - + @Override public Document getNextPage(Document doc) throws IOException { page++; @@ -89,7 +89,7 @@ public class GifyoRipper extends AbstractHTMLRipper { sleep(2000); return nextDoc; } - + @Override public List getURLsFromPage(Document doc) { List imageURLs = new ArrayList(); @@ -105,7 +105,7 @@ public class GifyoRipper extends AbstractHTMLRipper { logger.debug("Found " + imageURLs.size() + " images"); return imageURLs; } - + @Override public void downloadURL(URL url, int index) { addURLToDownload(url); diff --git a/src/main/java/com/rarchives/ripme/ripper/rippers/ImagestashRipper.java b/src/main/java/com/rarchives/ripme/ripper/rippers/ImagestashRipper.java index ce2347a7..ad94b85b 100644 --- a/src/main/java/com/rarchives/ripme/ripper/rippers/ImagestashRipper.java +++ b/src/main/java/com/rarchives/ripme/ripper/rippers/ImagestashRipper.java @@ -43,7 +43,7 @@ public class ImagestashRipper extends AbstractJSONRipper { + "imagestash.org/tag/tagname" + " Got: " + url); } - + @Override public JSONObject getFirstPage() throws IOException { String baseURL = "https://imagestash.org/images?tags=" @@ -51,7 +51,7 @@ public class ImagestashRipper extends AbstractJSONRipper { + "&page=" + page; return Http.url(baseURL).getJSON(); } - + @Override public JSONObject getNextPage(JSONObject json) throws IOException { int count = json.getInt("count"), @@ -64,7 +64,7 @@ public class ImagestashRipper extends AbstractJSONRipper { page++; return getFirstPage(); } - + @Override public List getURLsFromJSON(JSONObject json) { List imageURLs = new ArrayList(); @@ -79,7 +79,7 @@ public class ImagestashRipper extends AbstractJSONRipper { } return imageURLs; } - + @Override public void downloadURL(URL url, int index) { addURLToDownload(url, getPrefix(index)); diff --git a/src/main/java/com/rarchives/ripme/ripper/rippers/InstagramRipper.java b/src/main/java/com/rarchives/ripme/ripper/rippers/InstagramRipper.java index c2b50229..e31cfb38 100644 --- a/src/main/java/com/rarchives/ripme/ripper/rippers/InstagramRipper.java +++ b/src/main/java/com/rarchives/ripme/ripper/rippers/InstagramRipper.java @@ -121,7 +121,7 @@ public class InstagramRipper extends AbstractJSONRipper { imageURL = imageURL.replaceAll("\\?ig_cache_key.+$", ""); return imageURL; } - + private String getMedia(JSONObject data) { String imageURL = ""; if (data.has("videos")) { @@ -131,14 +131,14 @@ public class InstagramRipper extends AbstractJSONRipper { } return imageURL; } - + @Override public List getURLsFromJSON(JSONObject json) { List imageURLs = new ArrayList(); JSONArray datas = json.getJSONArray("items"); for (int i = 0; i < datas.length(); i++) { JSONObject data = (JSONObject) datas.get(i); - + String dataType = data.getString("type"); if (dataType.equals("carousel")) { JSONArray carouselMedias = data.getJSONArray("carousel_media"); diff --git a/src/main/java/com/rarchives/ripme/ripper/rippers/ModelmayhemRipper.java b/src/main/java/com/rarchives/ripme/ripper/rippers/ModelmayhemRipper.java index 58ecded9..9dc5c563 100644 --- a/src/main/java/com/rarchives/ripme/ripper/rippers/ModelmayhemRipper.java +++ b/src/main/java/com/rarchives/ripme/ripper/rippers/ModelmayhemRipper.java @@ -55,7 +55,7 @@ public class ModelmayhemRipper extends AlbumRipper { .method(Method.GET) .execute(); cookies = resp.cookies(); - + resp = Jsoup.connect("http://www.modelmayhem.com/includes/js/auth.php") .cookies(cookies) .ignoreContentType(true) diff --git a/src/main/java/com/rarchives/ripme/ripper/rippers/MyhentaicomicsRipper.java b/src/main/java/com/rarchives/ripme/ripper/rippers/MyhentaicomicsRipper.java index 203eb1ba..10006999 100644 --- a/src/main/java/com/rarchives/ripme/ripper/rippers/MyhentaicomicsRipper.java +++ b/src/main/java/com/rarchives/ripme/ripper/rippers/MyhentaicomicsRipper.java @@ -94,7 +94,7 @@ public class MyhentaicomicsRipper extends AbstractHTMLRipper { try { logger.info("Grabbing " + urlToGet); nextAlbumPage = Http.url(urlToGet).get(); - } catch(IOException e) { + } catch (IOException e) { logger.warn("Failed to log link in Jsoup"); nextAlbumPage = null; e.printStackTrace(); @@ -122,7 +122,7 @@ public class MyhentaicomicsRipper extends AbstractHTMLRipper { Document doc; try { doc = Http.url("http://myhentaicomics.com" + url).get(); - } catch(IOException e){ + } catch (IOException e) { logger.warn("Failed to log link in Jsoup"); doc = null; e.printStackTrace(); @@ -144,7 +144,7 @@ public class MyhentaicomicsRipper extends AbstractHTMLRipper { else { album_doc = Http.url(element).get(); } - } catch(IOException e){ + } catch (IOException e) { logger.warn("Failed to log link in Jsoup"); album_doc = null; e.printStackTrace(); @@ -168,7 +168,7 @@ public class MyhentaicomicsRipper extends AbstractHTMLRipper { result.add("http://myhentaicomics.com/" + imageSource); addURLToDownload(new URL("http://myhentaicomics.com/" + imageSource), "", url_string.split("/")[6]); } - catch(MalformedURLException e) { + catch (MalformedURLException e) { logger.warn("Malformed URL"); e.printStackTrace(); } diff --git a/src/main/java/com/rarchives/ripme/ripper/rippers/NfsfwRipper.java b/src/main/java/com/rarchives/ripme/ripper/rippers/NfsfwRipper.java index a51b6d89..d9d8b538 100644 --- a/src/main/java/com/rarchives/ripme/ripper/rippers/NfsfwRipper.java +++ b/src/main/java/com/rarchives/ripme/ripper/rippers/NfsfwRipper.java @@ -41,7 +41,7 @@ public class NfsfwRipper extends AlbumRipper { public URL sanitizeURL(URL url) throws MalformedURLException { return url; } - + @Override public String getAlbumTitle(URL url) throws MalformedURLException { try { @@ -185,7 +185,7 @@ public class NfsfwRipper extends AlbumRipper { } } } - + private class Pair { public String first, second; public Pair(String first, String second) { diff --git a/src/main/java/com/rarchives/ripme/ripper/rippers/PhotobucketRipper.java b/src/main/java/com/rarchives/ripme/ripper/rippers/PhotobucketRipper.java index 751dfafc..09ede236 100644 --- a/src/main/java/com/rarchives/ripme/ripper/rippers/PhotobucketRipper.java +++ b/src/main/java/com/rarchives/ripme/ripper/rippers/PhotobucketRipper.java @@ -45,7 +45,7 @@ public class PhotobucketRipper extends AlbumRipper { return url; } } - + public String getAlbumTitle(URL url) throws MalformedURLException { try { // Attempt to use album title as GID @@ -165,7 +165,7 @@ public class PhotobucketRipper extends AlbumRipper { JSONObject object = objects.getJSONObject(i); String image = object.getString("fullsizeUrl"); filesIndex += 1; - addURLToDownload(new URL(image), + addURLToDownload(new URL(image), "", object.getString("location").replaceAll(" ", "_"), albumDoc.location(), @@ -179,7 +179,7 @@ public class PhotobucketRipper extends AlbumRipper { return new ArrayList(); } } - + private List getSubAlbums(String url, String currentAlbumPath) { List result = new ArrayList(); String subdomain = url.substring(url.indexOf("://")+3); @@ -193,7 +193,7 @@ public class PhotobucketRipper extends AlbumRipper { JSONObject json = Http.url(apiUrl).getJSON(); JSONArray subalbums = json.getJSONObject("body").getJSONArray("subAlbums"); for (int i = 0; i < subalbums.length(); i++) { - String suburl = + String suburl = "http://" + subdomain + ".photobucket.com" diff --git a/src/main/java/com/rarchives/ripme/ripper/rippers/SankakuComplexRipper.java b/src/main/java/com/rarchives/ripme/ripper/rippers/SankakuComplexRipper.java index 8627f9e3..cbd922bb 100644 --- a/src/main/java/com/rarchives/ripme/ripper/rippers/SankakuComplexRipper.java +++ b/src/main/java/com/rarchives/ripme/ripper/rippers/SankakuComplexRipper.java @@ -31,7 +31,7 @@ public class SankakuComplexRipper extends AbstractHTMLRipper { public String getHost() { return "sankakucomplex"; } - + @Override public String getDomain() { return "sankakucomplex.com"; @@ -52,7 +52,7 @@ public class SankakuComplexRipper extends AbstractHTMLRipper { "idol.sankakucomplex.com?...&tags=something... - got " + url + "instead"); } - + @Override public Document getFirstPage() throws IOException { if (albumDoc == null) { @@ -62,7 +62,7 @@ public class SankakuComplexRipper extends AbstractHTMLRipper { } return albumDoc; } - + @Override public List getURLsFromPage(Document doc) { List imageURLs = new ArrayList(); @@ -78,7 +78,7 @@ public class SankakuComplexRipper extends AbstractHTMLRipper { } return imageURLs; } - + @Override public void downloadURL(URL url, int index) { // Mock up the URL of the post page based on the post ID at the end of the URL. @@ -91,7 +91,7 @@ public class SankakuComplexRipper extends AbstractHTMLRipper { Element pagination = doc.select("div.pagination").first(); if (pagination.hasAttr("next-page-url")) { return Http.url(pagination.attr("abs:next-page-url")).cookies(cookies).get(); - } else{ + } else { return null; } } diff --git a/src/main/java/com/rarchives/ripme/ripper/rippers/TeenplanetRipper.java b/src/main/java/com/rarchives/ripme/ripper/rippers/TeenplanetRipper.java index b81daa7a..630a0d0f 100644 --- a/src/main/java/com/rarchives/ripme/ripper/rippers/TeenplanetRipper.java +++ b/src/main/java/com/rarchives/ripme/ripper/rippers/TeenplanetRipper.java @@ -34,7 +34,7 @@ public class TeenplanetRipper extends AlbumRipper { public URL sanitizeURL(URL url) throws MalformedURLException { return url; } - + public String getAlbumTitle(URL url) throws MalformedURLException { try { // Attempt to use album title as GID diff --git a/src/main/java/com/rarchives/ripme/ripper/rippers/VkRipper.java b/src/main/java/com/rarchives/ripme/ripper/rippers/VkRipper.java index d4707f6e..7026e0b7 100644 --- a/src/main/java/com/rarchives/ripme/ripper/rippers/VkRipper.java +++ b/src/main/java/com/rarchives/ripme/ripper/rippers/VkRipper.java @@ -160,7 +160,7 @@ public class VkRipper extends AlbumRipper { } waitForThreads(); } - + private Map getPhotoIDsToURLs(String photoID) throws IOException { Map photoIDsToURLs = new HashMap(); Map postData = new HashMap(); diff --git a/src/main/java/com/rarchives/ripme/ripper/rippers/video/MotherlessVideoRipper.java b/src/main/java/com/rarchives/ripme/ripper/rippers/video/MotherlessVideoRipper.java index 275c6ba5..66a8eae2 100644 --- a/src/main/java/com/rarchives/ripme/ripper/rippers/video/MotherlessVideoRipper.java +++ b/src/main/java/com/rarchives/ripme/ripper/rippers/video/MotherlessVideoRipper.java @@ -30,7 +30,7 @@ public class MotherlessVideoRipper extends VideoRipper { Matcher m = p.matcher(url.toExternalForm()); return m.matches(); } - + @Override public URL sanitizeURL(URL url) throws MalformedURLException { return url; diff --git a/src/main/java/com/rarchives/ripme/ripper/rippers/video/ViddmeRipper.java b/src/main/java/com/rarchives/ripme/ripper/rippers/video/ViddmeRipper.java index f807b496..6084607a 100644 --- a/src/main/java/com/rarchives/ripme/ripper/rippers/video/ViddmeRipper.java +++ b/src/main/java/com/rarchives/ripme/ripper/rippers/video/ViddmeRipper.java @@ -31,7 +31,7 @@ public class ViddmeRipper extends VideoRipper { Matcher m = p.matcher(url.toExternalForm()); return m.matches(); } - + @Override public URL sanitizeURL(URL url) throws MalformedURLException { return url; diff --git a/src/main/java/com/rarchives/ripme/ui/RipStatusComplete.java b/src/main/java/com/rarchives/ripme/ui/RipStatusComplete.java index b6f89471..720aa9a7 100644 --- a/src/main/java/com/rarchives/ripme/ui/RipStatusComplete.java +++ b/src/main/java/com/rarchives/ripme/ui/RipStatusComplete.java @@ -16,7 +16,7 @@ public class RipStatusComplete { this.dir = dir; this.count = count; } - + public String getDir() { String result; try { diff --git a/src/main/java/com/rarchives/ripme/utils/AES.java b/src/main/java/com/rarchives/ripme/utils/AES.java index b0b49ec9..e506d1bd 100644 --- a/src/main/java/com/rarchives/ripme/utils/AES.java +++ b/src/main/java/com/rarchives/ripme/utils/AES.java @@ -31,7 +31,7 @@ public class AES { nBits = nBits / 8; byte[] data = Base64.decode(cipherText); byte[] k = Arrays.copyOf(key.getBytes(), nBits); - + Cipher cipher = Cipher.getInstance("AES/CTR/NoPadding"); SecretKey secretKey = generateSecretKey(k, nBits); byte[] nonceBytes = Arrays.copyOf(Arrays.copyOf(data, 8), nBits / 2); diff --git a/src/main/java/com/rarchives/ripme/utils/Http.java b/src/main/java/com/rarchives/ripme/utils/Http.java index bc135306..a29e5cd6 100644 --- a/src/main/java/com/rarchives/ripme/utils/Http.java +++ b/src/main/java/com/rarchives/ripme/utils/Http.java @@ -17,7 +17,7 @@ import com.rarchives.ripme.ripper.AbstractRipper; /** * Wrapper around the Jsoup connection methods. - * + * * Benefit is retry logic. */ public class Http { @@ -38,7 +38,7 @@ public class Http { this.url = url.toExternalForm(); defaultSettings(); } - + public static Http url(String url) { return new Http(url); } diff --git a/src/test/java/com/rarchives/ripme/tst/ripper/rippers/ImagefapRipperTest.java b/src/test/java/com/rarchives/ripme/tst/ripper/rippers/ImagefapRipperTest.java index 2e19e209..d38eddc1 100644 --- a/src/test/java/com/rarchives/ripme/tst/ripper/rippers/ImagefapRipperTest.java +++ b/src/test/java/com/rarchives/ripme/tst/ripper/rippers/ImagefapRipperTest.java @@ -8,7 +8,7 @@ import java.util.Map; import com.rarchives.ripme.ripper.rippers.ImagefapRipper; public class ImagefapRipperTest extends RippersTest { - + public void testImagefapAlbums() throws IOException { Map testURLs = new HashMap(); // Album with specific title