Update to Java 8

* Changed the Maven target to 1.8
* Performed a preliminary cleanup using IntelliJ's Code Analysis (Only Java 7/8 updates and a few other entries in the Error and Warnings categories)
* Updated the readme to change the required Java version
This commit is contained in:
Julien Machiels 2017-10-24 16:33:28 +02:00
parent d4f83c0ae6
commit d043685d2e
94 changed files with 585 additions and 787 deletions

View File

@ -3,7 +3,7 @@
[![Build Status](https://travis-ci.org/4pr0n/ripme.svg?branch=master)](https://travis-ci.org/4pr0n/ripme)
[![Join the chat at https://gitter.im/RipMeApp/Lobby](https://badges.gitter.im/RipMeApp/Lobby.svg)](https://gitter.im/RipMeApp/Lobby?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge)
Album ripper for various websites. Runs on your computer. Requires Java 1.6
Album ripper for various websites. Runs on your computer. Requires Java 8.
![Screenshot](http://i.imgur.com/kWzhsIu.png)

View File

@ -84,8 +84,8 @@
<artifactId>maven-compiler-plugin</artifactId>
<version>3.1</version>
<configuration>
<source>1.6</source>
<target>1.6</target>
<source>1.8</source>
<target>1.8</target>
</configuration>
</plugin>
</plugins>

View File

@ -62,13 +62,13 @@ public class App {
}
}
public static void rip(URL url) throws Exception {
private static void rip(URL url) throws Exception {
AbstractRipper ripper = AbstractRipper.getRipper(url);
ripper.setup();
ripper.rip();
}
public static void handleArguments(String[] args) {
private static void handleArguments(String[] args) {
CommandLine cl = getArgs(args);
if (cl.hasOption('h')) {
HelpFormatter hf = new HelpFormatter();
@ -172,7 +172,7 @@ public class App {
}
// this function will attempt to rip the provided url
public static void ripURL(String targetURL, boolean saveConfig) {
private static void ripURL(String targetURL, boolean saveConfig) {
try {
URL url = new URL(targetURL);
rip(url);
@ -193,7 +193,7 @@ public class App {
}
}
public static Options getOptions() {
private static Options getOptions() {
Options opts = new Options();
opts.addOption("h", "help", false, "Print the help");
opts.addOption("u", "url", true, "URL of album to rip");
@ -211,11 +211,10 @@ public class App {
return opts;
}
public static CommandLine getArgs(String[] args) {
private static CommandLine getArgs(String[] args) {
BasicParser parser = new BasicParser();
try {
CommandLine cl = parser.parse(getOptions(), args, false);
return cl;
return parser.parse(getOptions(), args, false);
} catch (ParseException e) {
logger.error("[!] Error while parsing command-line arguments: " + Arrays.toString(args), e);
System.exit(-1);
@ -244,12 +243,7 @@ public class App {
if (HISTORY.toList().size() == 0) {
// Loaded from config, still no entries.
// Guess rip history based on rip folder
String[] dirs = Utils.getWorkingDirectory().list(new FilenameFilter() {
@Override
public boolean accept(File dir, String file) {
return new File(dir.getAbsolutePath() + File.separator + file).isDirectory();
}
});
String[] dirs = Utils.getWorkingDirectory().list((dir, file) -> new File(dir.getAbsolutePath() + File.separator + file).isDirectory());
for (String dir : dirs) {
String url = RipUtils.urlFromDirectoryName(dir);
if (url != null) {

View File

@ -17,27 +17,27 @@ import com.rarchives.ripme.utils.Utils;
*/
public abstract class AbstractHTMLRipper extends AlbumRipper {
public AbstractHTMLRipper(URL url) throws IOException {
protected AbstractHTMLRipper(URL url) throws IOException {
super(url);
}
public abstract String getDomain();
protected abstract String getDomain();
public abstract String getHost();
public abstract Document getFirstPage() throws IOException;
protected abstract Document getFirstPage() throws IOException;
public Document getNextPage(Document doc) throws IOException {
return null;
}
public abstract List<String> getURLsFromPage(Document page);
public List<String> getDescriptionsFromPage(Document doc) throws IOException {
protected abstract List<String> getURLsFromPage(Document page);
protected List<String> getDescriptionsFromPage(Document doc) throws IOException {
throw new IOException("getDescriptionsFromPage not implemented"); // Do I do this or make an abstract function?
}
public abstract void downloadURL(URL url, int index);
public DownloadThreadPool getThreadPool() {
protected abstract void downloadURL(URL url, int index);
protected DownloadThreadPool getThreadPool() {
return null;
}
public boolean keepSortOrder() {
protected boolean keepSortOrder() {
return true;
}
@ -50,13 +50,13 @@ public abstract class AbstractHTMLRipper extends AlbumRipper {
public URL sanitizeURL(URL url) throws MalformedURLException {
return url;
}
public boolean hasDescriptionSupport() {
protected boolean hasDescriptionSupport() {
return false;
}
public String[] getDescription(String url,Document page) throws IOException {
protected String[] getDescription(String url, Document page) throws IOException {
throw new IOException("getDescription not implemented"); // Do I do this or make an abstract function?
}
public int descSleepTime() {
protected int descSleepTime() {
return 100;
}
@Override
@ -140,7 +140,7 @@ public abstract class AbstractHTMLRipper extends AlbumRipper {
}
waitForThreads();
}
public String fileNameFromURL(URL url) {
private String fileNameFromURL(URL url) {
String saveAs = url.toExternalForm();
if (saveAs.substring(saveAs.length() - 1) == "/") { saveAs = saveAs.substring(0,saveAs.length() - 1) ;}
saveAs = saveAs.substring(saveAs.lastIndexOf('/')+1);
@ -154,7 +154,7 @@ public abstract class AbstractHTMLRipper extends AlbumRipper {
String saveAs = fileNameFromURL(url);
return saveText(url,subdirectory,text,index,saveAs);
}
public boolean saveText(URL url, String subdirectory, String text, int index, String fileName) {
private boolean saveText(URL url, String subdirectory, String text, int index, String fileName) {
// Not the best for some cases, like FurAffinity. Overridden there.
try {
stopCheck();
@ -189,7 +189,7 @@ public abstract class AbstractHTMLRipper extends AlbumRipper {
}
return true;
}
public String getPrefix(int index) {
protected String getPrefix(int index) {
String prefix = "";
if (keepSortOrder() && Utils.getConfigBoolean("download.save_order", true)) {
prefix = String.format("%03d_", index);

View File

@ -15,24 +15,24 @@ import com.rarchives.ripme.utils.Utils;
*/
public abstract class AbstractJSONRipper extends AlbumRipper {
public AbstractJSONRipper(URL url) throws IOException {
protected AbstractJSONRipper(URL url) throws IOException {
super(url);
}
public abstract String getDomain();
protected abstract String getDomain();
public abstract String getHost();
public abstract JSONObject getFirstPage() throws IOException;
public JSONObject getNextPage(JSONObject doc) throws IOException {
protected abstract JSONObject getFirstPage() throws IOException;
protected JSONObject getNextPage(JSONObject doc) throws IOException {
throw new IOException("getNextPage not implemented");
}
public abstract List<String> getURLsFromJSON(JSONObject json);
public abstract void downloadURL(URL url, int index);
public DownloadThreadPool getThreadPool() {
protected abstract List<String> getURLsFromJSON(JSONObject json);
protected abstract void downloadURL(URL url, int index);
private DownloadThreadPool getThreadPool() {
return null;
}
public boolean keepSortOrder() {
protected boolean keepSortOrder() {
return true;
}
@ -96,7 +96,7 @@ public abstract class AbstractJSONRipper extends AlbumRipper {
waitForThreads();
}
public String getPrefix(int index) {
protected String getPrefix(int index) {
String prefix = "";
if (keepSortOrder() && Utils.getConfigBoolean("download.save_order", true)) {
prefix = String.format("%03d_", index);

View File

@ -32,10 +32,10 @@ public abstract class AbstractRipper
protected URL url;
protected File workingDir;
protected DownloadThreadPool threadPool;
protected RipStatusHandler observer = null;
DownloadThreadPool threadPool;
RipStatusHandler observer = null;
protected boolean completed = true;
private boolean completed = true;
public abstract void rip() throws IOException;
public abstract String getHost();
@ -110,9 +110,9 @@ public abstract class AbstractRipper
* The cookies to send to the server while downloading this file.
* @return
*/
public abstract boolean addURLToDownload(URL url, File saveAs, String referrer, Map<String,String> cookies);
protected abstract boolean addURLToDownload(URL url, File saveAs, String referrer, Map<String, String> cookies);
public boolean addURLToDownload(URL url, String prefix, String subdirectory, String referrer, Map<String,String> cookies) {
protected boolean addURLToDownload(URL url, String prefix, String subdirectory, String referrer, Map<String, String> cookies) {
try {
stopCheck();
} catch (IOException e) {
@ -159,7 +159,7 @@ public abstract class AbstractRipper
* Sub-directory of the working directory to save the images to.
* @return True on success, flase on failure.
*/
public boolean addURLToDownload(URL url, String prefix, String subdirectory) {
protected boolean addURLToDownload(URL url, String prefix, String subdirectory) {
return addURLToDownload(url, prefix, subdirectory, null, null);
}
@ -172,7 +172,7 @@ public abstract class AbstractRipper
* Text to append to saved filename.
* @return True on success, flase on failure.
*/
public boolean addURLToDownload(URL url, String prefix) {
protected boolean addURLToDownload(URL url, String prefix) {
// Use empty subdirectory
return addURLToDownload(url, prefix, "");
}
@ -223,14 +223,14 @@ public abstract class AbstractRipper
/**
* @return Number of files downloaded.
*/
public int getCount() {
int getCount() {
return 1;
}
/**
* Notifies observers and updates state if all files have been ripped.
*/
protected void checkIfComplete() {
void checkIfComplete() {
if (observer == null) {
logger.debug("observer is null");
return;
@ -320,10 +320,10 @@ public abstract class AbstractRipper
* @throws Exception
*/
public static List<Constructor<?>> getRipperConstructors(String pkg) throws Exception {
List<Constructor<?>> constructors = new ArrayList<Constructor<?>>();
List<Constructor<?>> constructors = new ArrayList<>();
for (Class<?> clazz : Utils.getClassesForPackage(pkg)) {
if (AbstractRipper.class.isAssignableFrom(clazz)) {
constructors.add( (Constructor<?>) clazz.getConstructor(URL.class) );
constructors.add(clazz.getConstructor(URL.class));
}
}
return constructors;
@ -355,10 +355,6 @@ public abstract class AbstractRipper
logger.error("Got exception while running ripper:", e);
waitForThreads();
sendUpdate(STATUS.RIP_ERRORED, "HTTP status code " + e.getStatusCode() + " for URL " + e.getUrl());
} catch (IOException e) {
logger.error("Got exception while running ripper:", e);
waitForThreads();
sendUpdate(STATUS.RIP_ERRORED, e.getMessage());
} catch (Exception e) {
logger.error("Got exception while running ripper:", e);
waitForThreads();
@ -368,7 +364,7 @@ public abstract class AbstractRipper
}
}
public void cleanup() {
private void cleanup() {
if (this.workingDir.list().length == 0) {
// No files, delete the dir
logger.info("Deleting empty directory " + this.workingDir);
@ -379,7 +375,7 @@ public abstract class AbstractRipper
}
}
public boolean sleep(int milliseconds) {
protected boolean sleep(int milliseconds) {
try {
logger.debug("Sleeping " + milliseconds + "ms");
Thread.sleep(milliseconds);
@ -402,7 +398,7 @@ public abstract class AbstractRipper
logger.debug("THIS IS A TEST RIP");
thisIsATest = true;
}
public boolean isThisATest() {
protected boolean isThisATest() {
return thisIsATest;
}
}

View File

@ -15,11 +15,11 @@ import com.rarchives.ripme.utils.Utils;
public abstract class AlbumRipper extends AbstractRipper {
protected Map<URL, File> itemsPending = Collections.synchronizedMap(new HashMap<URL, File>());
protected Map<URL, File> itemsCompleted = Collections.synchronizedMap(new HashMap<URL, File>());
protected Map<URL, String> itemsErrored = Collections.synchronizedMap(new HashMap<URL, String>());
private Map<URL, File> itemsPending = Collections.synchronizedMap(new HashMap<URL, File>());
private Map<URL, File> itemsCompleted = Collections.synchronizedMap(new HashMap<URL, File>());
private Map<URL, String> itemsErrored = Collections.synchronizedMap(new HashMap<URL, String>());
public AlbumRipper(URL url) throws IOException {
protected AlbumRipper(URL url) throws IOException {
super(url);
}
@ -29,7 +29,7 @@ public abstract class AlbumRipper extends AbstractRipper {
public abstract String getHost();
public abstract String getGID(URL url) throws MalformedURLException;
public boolean allowDuplicates() {
protected boolean allowDuplicates() {
return false;
}
@ -95,7 +95,7 @@ public abstract class AlbumRipper extends AbstractRipper {
* @return
* True on success
*/
public boolean addURLToDownload(URL url) {
protected boolean addURLToDownload(URL url) {
// Use empty prefix and empty subdirectory
return addURLToDownload(url, "", "");
}

View File

@ -24,12 +24,12 @@ import com.rarchives.ripme.utils.Utils;
* Thread for downloading files.
* Includes retry logic, observer notifications, and other goodies.
*/
public class DownloadFileThread extends Thread {
class DownloadFileThread extends Thread {
private static final Logger logger = Logger.getLogger(DownloadFileThread.class);
private String referrer = "";
private Map<String,String> cookies = new HashMap<String,String>();
private Map<String,String> cookies = new HashMap<>();
private URL url;
private File saveAs;

View File

@ -20,7 +20,7 @@ import com.rarchives.ripme.utils.Utils;
* Thread for downloading files.
* Includes retry logic, observer notifications, and other goodies.
*/
public class DownloadVideoThread extends Thread {
class DownloadVideoThread extends Thread {
private static final Logger logger = Logger.getLogger(DownloadVideoThread.class);

View File

@ -8,7 +8,7 @@ import java.net.URL;
* I have no idea why I made this interface. Everything is captured within the AbstractRipper.
* Oh well, here's to encapsulation and abstraction! (raises glass)
*/
public interface RipperInterface {
interface RipperInterface {
void rip() throws IOException;
boolean canRip(URL url);
URL sanitizeURL(URL url) throws MalformedURLException;

View File

@ -16,7 +16,7 @@ public abstract class VideoRipper extends AbstractRipper {
private int bytesTotal = 1,
bytesCompleted = 1;
public VideoRipper(URL url) throws IOException {
protected VideoRipper(URL url) throws IOException {
super(url);
}

View File

@ -23,7 +23,7 @@ import java.util.HashMap;
public class AerisdiesRipper extends AbstractHTMLRipper {
private Document albumDoc = null;
private Map<String,String> cookies = new HashMap<String,String>();
private Map<String,String> cookies = new HashMap<>();
public AerisdiesRipper(URL url) throws IOException {
@ -74,7 +74,7 @@ public class AerisdiesRipper extends AbstractHTMLRipper {
@Override
public List<String> getURLsFromPage(Document page) {
List<String> imageURLs = new ArrayList<String>();
List<String> imageURLs = new ArrayList<>();
Elements albumElements = page.select("div.imgbox > a > img");
for (Element imageBox : albumElements) {
String imageUrl = imageBox.attr("src");

View File

@ -66,7 +66,7 @@ public class BcfakesRipper extends AbstractHTMLRipper {
@Override
public List<String> getURLsFromPage(Document doc) {
List<String> imageURLs = new ArrayList<String>();
List<String> imageURLs = new ArrayList<>();
for (Element thumb : doc.select("div.ngg-gallery-thumbnail > a > img")) {
String imageURL = thumb.attr("src");
imageURL = imageURL.replace("thumbs/thumbs_", "");

View File

@ -33,7 +33,7 @@ public class ButttoucherRipper extends AbstractHTMLRipper {
public String getGID(URL url) throws MalformedURLException {
Pattern p; Matcher m;
p = Pattern.compile("^.*butttoucher.com/users/([a-zA-Z0-9_\\-]{1,}).*$");
p = Pattern.compile("^.*butttoucher.com/users/([a-zA-Z0-9_\\-]+).*$");
m = p.matcher(url.toExternalForm());
if (m.matches()) {
return m.group(1);
@ -51,7 +51,7 @@ public class ButttoucherRipper extends AbstractHTMLRipper {
@Override
public List<String> getURLsFromPage(Document page) {
List<String> thumbs = new ArrayList<String>();
List<String> thumbs = new ArrayList<>();
for (Element thumb : page.select(".thumb img")) {
if (!thumb.hasAttr("src")) {
continue;

View File

@ -71,7 +71,7 @@ public class CfakeRipper extends AbstractHTMLRipper {
@Override
public List<String> getURLsFromPage(Document doc) {
List<String> result = new ArrayList<String>();
List<String> result = new ArrayList<>();
for (Element el : doc.select("table.display > tbody > tr > td > table > tbody > tr > td > a")) {
if (el.attr("href").contains("upload")) {
return result;

View File

@ -18,7 +18,7 @@ import com.rarchives.ripme.utils.Http;
import com.rarchives.ripme.utils.RipUtils;
public class ChanRipper extends AbstractHTMLRipper {
public static List<ChanSite> explicit_domains = Arrays.asList(
private static List<ChanSite> explicit_domains = Arrays.asList(
new ChanSite(Arrays.asList("boards.4chan.org"), Arrays.asList("4cdn.org", "is.4chan.org", "is2.4chan.org")),
new ChanSite(Arrays.asList("archive.moe"), Arrays.asList("data.archive.moe")),
new ChanSite(Arrays.asList("4archive.org"), Arrays.asList("imgur.com")),
@ -26,15 +26,15 @@ public class ChanRipper extends AbstractHTMLRipper {
new ChanSite(Arrays.asList("fgts.jp"), Arrays.asList("dat.fgtsi.org"))
);
public static List<String> url_piece_blacklist = Arrays.asList(
private static List<String> url_piece_blacklist = Arrays.asList(
"=http",
"http://imgops.com/",
"iqdb.org",
"saucenao.com"
);
public ChanSite chanSite;
public Boolean generalChanSite = true;
private ChanSite chanSite;
private Boolean generalChanSite = true;
public ChanRipper(URL url) throws IOException {
super(url);
@ -143,7 +143,7 @@ public class ChanRipper extends AbstractHTMLRipper {
}
@Override
public List<String> getURLsFromPage(Document page) {
List<String> imageURLs = new ArrayList<String>();
List<String> imageURLs = new ArrayList<>();
Pattern p; Matcher m;
for (Element link : page.select("a")) {
if (!link.hasAttr("href")) {

View File

@ -21,7 +21,7 @@ import com.rarchives.ripme.utils.Http;
public class CheebyRipper extends AbstractHTMLRipper {
private int offset = 0;
private Map<String, Integer> albumSets = new HashMap<String, Integer>();
private Map<String, Integer> albumSets = new HashMap<>();
public CheebyRipper(URL url) throws IOException {
super(url);
@ -81,8 +81,8 @@ public class CheebyRipper extends AbstractHTMLRipper {
return null;
}
public List<Image> getImagesFromPage(Document page) {
List<Image> imageURLs = new ArrayList<Image>();
private List<Image> getImagesFromPage(Document page) {
List<Image> imageURLs = new ArrayList<>();
for (Element image : page.select("div.i a img")) {
// Get image URL
String imageURL = image.attr("src");
@ -171,7 +171,7 @@ public class CheebyRipper extends AbstractHTMLRipper {
private class Image {
String url, prefix;
int index;
public Image(String url, String prefix, int index) {
Image(String url, String prefix, int index) {
this.url = url;
this.prefix = prefix;
this.index = index;

View File

@ -22,23 +22,21 @@ public class CheveretoRipper extends AbstractHTMLRipper {
super(url);
}
public static List<String> explicit_domains_1 = Arrays.asList("hushpix.com", "tag-fox.com");
private static List<String> explicit_domains_1 = Arrays.asList("hushpix.com", "tag-fox.com");
@Override
public String getHost() {
String host = url.toExternalForm().split("/")[2];
return host;
return url.toExternalForm().split("/")[2];
}
@Override
public String getDomain() {
String host = url.toExternalForm().split("/")[2];
return host;
return url.toExternalForm().split("/")[2];
}
@Override
public boolean canRip(URL url) {
String url_name = url.toExternalForm();
if (explicit_domains_1.contains(url_name.split("/")[2]) == true) {
if (explicit_domains_1.contains(url_name.split("/")[2])) {
Pattern pa = Pattern.compile("(?:https?://)?(?:www\\.)?[a-z1-9-]*\\.[a-z1-9]*/album/([a-zA-Z1-9]*)/?$");
Matcher ma = pa.matcher(url.toExternalForm());
if (ma.matches()) {
@ -103,7 +101,7 @@ public class CheveretoRipper extends AbstractHTMLRipper {
@Override
public List<String> getURLsFromPage(Document doc) {
List<String> result = new ArrayList<String>();
List<String> result = new ArrayList<>();
for (Element el : doc.select("a.image-container > img")) {
String imageSource = el.attr("src");
// We remove the .md from images so we download the full size image

View File

@ -49,7 +49,7 @@ public class DatwinRipper extends AbstractHTMLRipper {
@Override
public List<String> getURLsFromPage(Document doc) {
List<String> imageURLs = new ArrayList<String>();
List<String> imageURLs = new ArrayList<>();
for (Element thumb : doc.select("img.attachment-thumbnail")) {
String image = thumb.attr("src");
image = image.replaceAll("-\\d{1,3}x\\d{1,3}", "");

View File

@ -14,6 +14,7 @@ import java.util.Set;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import com.rarchives.ripme.ripper.AbstractRipper;
import org.jsoup.Connection.Method;
import org.jsoup.Connection.Response;
import org.jsoup.Jsoup;
@ -32,8 +33,8 @@ public class DeviantartRipper extends AbstractHTMLRipper {
private static final int PAGE_SLEEP_TIME = 3000,
IMAGE_SLEEP_TIME = 2000;
private Map<String,String> cookies = new HashMap<String,String>();
private Set<String> triedURLs = new HashSet<String>();
private Map<String,String> cookies = new HashMap<>();
private Set<String> triedURLs = new HashSet<>();
public DeviantartRipper(URL url) throws IOException {
super(url);
@ -63,7 +64,7 @@ public class DeviantartRipper extends AbstractHTMLRipper {
u += "gallery/?";
}
Pattern p = Pattern.compile("^https?://([a-zA-Z0-9\\-]{1,})\\.deviantart\\.com/favou?rites/([0-9]+)/*?$");
Pattern p = Pattern.compile("^https?://([a-zA-Z0-9\\-]+)\\.deviantart\\.com/favou?rites/([0-9]+)/*?$");
Matcher m = p.matcher(url.toExternalForm());
if (!m.matches()) {
String subdir = "/";
@ -88,18 +89,18 @@ public class DeviantartRipper extends AbstractHTMLRipper {
return m.group(1);
}
}
p = Pattern.compile("^https?://([a-zA-Z0-9\\-]{1,})\\.deviantart\\.com/gallery/([0-9]{1,}).*$");
p = Pattern.compile("^https?://([a-zA-Z0-9\\-]+)\\.deviantart\\.com/gallery/([0-9]+).*$");
m = p.matcher(url.toExternalForm());
if (m.matches()) {
// Subgallery
return m.group(1) + "_" + m.group(2);
}
p = Pattern.compile("^https?://([a-zA-Z0-9\\-]{1,})\\.deviantart\\.com/favou?rites/([0-9]+)/.*?$");
p = Pattern.compile("^https?://([a-zA-Z0-9\\-]+)\\.deviantart\\.com/favou?rites/([0-9]+)/.*?$");
m = p.matcher(url.toExternalForm());
if (m.matches()) {
return m.group(1) + "_faves_" + m.group(2);
}
p = Pattern.compile("^https?://([a-zA-Z0-9\\-]{1,})\\.deviantart\\.com/favou?rites/?$");
p = Pattern.compile("^https?://([a-zA-Z0-9\\-]+)\\.deviantart\\.com/favou?rites/?$");
m = p.matcher(url.toExternalForm());
if (m.matches()) {
// Subgallery
@ -121,14 +122,14 @@ public class DeviantartRipper extends AbstractHTMLRipper {
.cookies(cookies)
.get();
}
public String jsonToImage(Document page,String id) {
private String jsonToImage(Document page, String id) {
Elements js = page.select("script[type=\"text/javascript\"]");
for (Element tag : js) {
if (tag.html().contains("window.__pageload")) {
try {
String script = tag.html();
script = script.substring(script.indexOf("window.__pageload"));
if (script.indexOf(id) < 0) {
if (!script.contains(id)) {
continue;
}
script = script.substring(script.indexOf(id));
@ -144,7 +145,7 @@ public class DeviantartRipper extends AbstractHTMLRipper {
}
@Override
public List<String> getURLsFromPage(Document page) {
List<String> imageURLs = new ArrayList<String>();
List<String> imageURLs = new ArrayList<>();
// Iterate over all thumbnails
for (Element thumb : page.select("div.zones-container span.thumb")) {
@ -194,7 +195,7 @@ public class DeviantartRipper extends AbstractHTMLRipper {
}
@Override
public List<String> getDescriptionsFromPage(Document page) {
List<String> textURLs = new ArrayList<String>();
List<String> textURLs = new ArrayList<>();
// Iterate over all thumbnails
for (Element thumb : page.select("div.zones-container span.thumb")) {
logger.info(thumb.attr("href"));
@ -257,9 +258,9 @@ public class DeviantartRipper extends AbstractHTMLRipper {
* @return Full-size image URL
* @throws Exception If it can't find the full-size URL
*/
public static String thumbToFull(String thumb, boolean throwException) throws Exception {
private static String thumbToFull(String thumb, boolean throwException) throws Exception {
thumb = thumb.replace("http://th", "http://fc");
List<String> fields = new ArrayList<String>(Arrays.asList(thumb.split("/")));
List<String> fields = new ArrayList<>(Arrays.asList(thumb.split("/")));
fields.remove(4);
if (!fields.get(4).equals("f") && throwException) {
// Not a full-size image
@ -339,7 +340,7 @@ public class DeviantartRipper extends AbstractHTMLRipper {
* @param page Page the thumbnail is retrieved from
* @return Highest-resolution version of the image based on thumbnail URL and the page.
*/
public String smallToFull(String thumb, String page) {
private String smallToFull(String thumb, String page) {
try {
// Fetch the image page
Response resp = Http.url(page)
@ -373,7 +374,7 @@ public class DeviantartRipper extends AbstractHTMLRipper {
}
cookieString = cookieString.substring(0,cookieString.length() - 1);
con.setRequestProperty("Cookie",cookieString);
con.setRequestProperty("User-Agent",this.USER_AGENT);
con.setRequestProperty("User-Agent", USER_AGENT);
con.setInstanceFollowRedirects(true);
con.connect();
int code = con.getResponseCode();
@ -406,7 +407,7 @@ public class DeviantartRipper extends AbstractHTMLRipper {
*/
private Map<String, String> loginToDeviantart() throws IOException {
// Populate postData fields
Map<String,String> postData = new HashMap<String,String>();
Map<String,String> postData = new HashMap<>();
String username = Utils.getConfigString("deviantart.username", new String(Base64.decode("Z3JhYnB5")));
String password = Utils.getConfigString("deviantart.password", new String(Base64.decode("ZmFrZXJz")));
if (username == null || password == null) {

View File

@ -72,7 +72,7 @@ public class DrawcrowdRipper extends AbstractHTMLRipper {
@Override
public List<String> getURLsFromPage(Document page) {
List<String> imageURLs = new ArrayList<String>();
List<String> imageURLs = new ArrayList<>();
for (Element thumb : page.select("div.item.asset img")) {
String image = thumb.attr("src");
image = image

View File

@ -18,11 +18,11 @@ import org.jsoup.nodes.Element;
import org.jsoup.select.Elements;
public class E621Ripper extends AbstractHTMLRipper {
public static final int POOL_IMAGES_PER_PAGE = 24;
private static final int POOL_IMAGES_PER_PAGE = 24;
private DownloadThreadPool e621ThreadPool = new DownloadThreadPool("e621");
public E621Ripper(URL url) throws IOException {
private E621Ripper(URL url) throws IOException {
super(url);
}
@ -53,7 +53,7 @@ public class E621Ripper extends AbstractHTMLRipper {
@Override
public List<String> getURLsFromPage(Document page) {
Elements elements = page.select("#post-list .thumb a,#pool-show .thumb a");
List<String> res = new ArrayList<String>(elements.size());
List<String> res = new ArrayList<>(elements.size());
if (page.getElementById("pool-show") != null) {
int index = 0;
@ -92,23 +92,21 @@ public class E621Ripper extends AbstractHTMLRipper {
@Override
public void downloadURL(final URL url, int index) {
e621ThreadPool.addThread(new Thread(new Runnable() {
public void run() {
try {
Document page = Http.url(url).get();
Element e = page.getElementById("image");
e621ThreadPool.addThread(new Thread(() -> {
try {
Document page = Http.url(url).get();
Element e = page.getElementById("image");
if (e != null) {
addURLToDownload(new URL(e.absUrl("src")), Utils.getConfigBoolean("download.save_order", true) ? url.getRef() + "-" : "");
} else if ((e = page.select(".content object>param[name=\"movie\"]").first()) != null) {
addURLToDownload(new URL(e.absUrl("value")), Utils.getConfigBoolean("download.save_order", true) ? url.getRef() + "-" : "");
} else {
Logger.getLogger(E621Ripper.class.getName()).log(Level.WARNING, "Unsupported media type - please report to program author: " + url.toString());
}
} catch (IOException ex) {
Logger.getLogger(E621Ripper.class.getName()).log(Level.SEVERE, null, ex);
if (e != null) {
addURLToDownload(new URL(e.absUrl("src")), Utils.getConfigBoolean("download.save_order", true) ? url.getRef() + "-" : "");
} else if ((e = page.select(".content object>param[name=\"movie\"]").first()) != null) {
addURLToDownload(new URL(e.absUrl("value")), Utils.getConfigBoolean("download.save_order", true) ? url.getRef() + "-" : "");
} else {
Logger.getLogger(E621Ripper.class.getName()).log(Level.WARNING, "Unsupported media type - please report to program author: " + url.toString());
}
} catch (IOException ex) {
Logger.getLogger(E621Ripper.class.getName()).log(Level.SEVERE, null, ex);
}
}));
}

View File

@ -39,7 +39,7 @@ public class EHentaiRipper extends AbstractHTMLRipper {
// Current HTML document
private Document albumDoc = null;
private static final Map<String,String> cookies = new HashMap<String,String>();
private static final Map<String,String> cookies = new HashMap<>();
static {
cookies.put("nw", "1");
cookies.put("tip", "1");
@ -162,7 +162,7 @@ public class EHentaiRipper extends AbstractHTMLRipper {
@Override
public List<String> getURLsFromPage(Document page) {
List<String> imageURLs = new ArrayList<String>();
List<String> imageURLs = new ArrayList<>();
Elements thumbs = page.select("#gdt > .gdtm a");
// Iterate over images on page
for (Element thumb : thumbs) {
@ -193,7 +193,7 @@ public class EHentaiRipper extends AbstractHTMLRipper {
private int index;
private File workingDir;
public EHentaiImageThread(URL url, int index, File workingDir) {
EHentaiImageThread(URL url, int index, File workingDir) {
super();
this.url = url;
this.index = index;

View File

@ -23,10 +23,10 @@ import com.rarchives.ripme.utils.Http;
public class EightmusesRipper extends AbstractHTMLRipper {
private Document albumDoc = null;
private Map<String,String> cookies = new HashMap<String,String>();
private Map<String,String> cookies = new HashMap<>();
// TODO put up a wiki page on using maps to store titles
// the map for storing the title of each album when downloading sub albums
private Map<URL,String> urlTitles = new HashMap<URL,String>();
private Map<URL,String> urlTitles = new HashMap<>();
private Boolean rippingSubalbums = false;
@ -81,7 +81,7 @@ public class EightmusesRipper extends AbstractHTMLRipper {
@Override
public List<String> getURLsFromPage(Document page) {
List<String> imageURLs = new ArrayList<String>();
List<String> imageURLs = new ArrayList<>();
// get the first image link on the page and check if the last char in it is a number
// if it is a number then we're ripping a comic if not it's a subalbum
String firstImageLink = page.select("div.gallery > a.t-hover").first().attr("href");
@ -136,7 +136,6 @@ public class EightmusesRipper extends AbstractHTMLRipper {
imageURLs.addAll(subalbumImages);
} catch (IOException e) {
logger.warn("Error while loading subalbum " + subUrl, e);
continue;
}
}
}
@ -182,8 +181,7 @@ public class EightmusesRipper extends AbstractHTMLRipper {
Element fullSizeImage = doc.select(".photo").first(); // Select the "photo" element from the page (there should only be 1)
// subdir is the sub dir the cdn has the image stored in
String subdir = doc.select("input#imageDir").first().attr("value");
String path = "https://cdn.ampproject.org/i/s/www.8muses.com/" + subdir + "small/" + fullSizeImage.children().select("#imageName").attr("value"); // Append the path to the fullsize image file to the standard prefix
return path;
return "https://cdn.ampproject.org/i/s/www.8muses.com/" + subdir + "small/" + fullSizeImage.children().select("#imageName").attr("value");
}
@Override

View File

@ -71,19 +71,13 @@ public class EroShareRipper extends AbstractHTMLRipper {
Pattern p_eroshare_profile = Pattern.compile("^https?://eroshare.com/u/([a-zA-Z0-9\\-_]+)/?$");
Matcher m_eroshare_profile = p_eroshare_profile.matcher(url.toExternalForm());
if (m_eroshare_profile.matches()) {
return true;
}
return false;
return m_eroshare_profile.matches();
}
public boolean is_profile(URL url) {
private boolean is_profile(URL url) {
Pattern pa = Pattern.compile("^https?://eroshae.com/u/([a-zA-Z0-9\\-_]+)/?$");
Matcher ma = pa.matcher(url.toExternalForm());
if (ma.matches()) {
return true;
}
return false;
return ma.matches();
}
@Override
@ -103,7 +97,7 @@ public class EroShareRipper extends AbstractHTMLRipper {
@Override
public String getAlbumTitle(URL url) throws MalformedURLException {
if (is_profile(url) == false) {
if (!is_profile(url)) {
try {
// Attempt to use album title as GID
Element titleElement = getFirstPage().select("meta[property=og:title]").first();
@ -122,7 +116,7 @@ public class EroShareRipper extends AbstractHTMLRipper {
@Override
public List<String> getURLsFromPage(Document doc) {
List<String> URLs = new ArrayList<String>();
List<String> URLs = new ArrayList<>();
//Pictures
Elements imgs = doc.getElementsByTag("img");
for (Element img : imgs) {
@ -172,9 +166,7 @@ public class EroShareRipper extends AbstractHTMLRipper {
.ignoreContentType()
.response();
Document doc = resp.parse();
return doc;
return resp.parse();
}
@Override
@ -214,7 +206,7 @@ public class EroShareRipper extends AbstractHTMLRipper {
Document doc = resp.parse();
List<URL> URLs = new ArrayList<URL>();
List<URL> URLs = new ArrayList<>();
//Pictures
Elements imgs = doc.getElementsByTag("img");
for (Element img : imgs) {

View File

@ -68,7 +68,7 @@ public class EromeRipper extends AbstractHTMLRipper {
@Override
public List<String> getURLsFromPage(Document doc) {
List<String> URLs = new ArrayList<String>();
List<String> URLs = new ArrayList<>();
//Pictures
Elements imgs = doc.select("div.img > img.img-front");
for (Element img : imgs) {
@ -92,9 +92,7 @@ public class EromeRipper extends AbstractHTMLRipper {
.ignoreContentType()
.response();
Document doc = resp.parse();
return doc;
return resp.parse();
}
@Override
@ -115,7 +113,7 @@ public class EromeRipper extends AbstractHTMLRipper {
Document doc = resp.parse();
List<URL> URLs = new ArrayList<URL>();
List<URL> URLs = new ArrayList<>();
//Pictures
Elements imgs = doc.getElementsByTag("img");
for (Element img : imgs) {

View File

@ -80,7 +80,7 @@ public class FapprovedRipper extends AbstractHTMLRipper {
@Override
public List<String> getURLsFromPage(Document page) {
List<String> imageURLs = new ArrayList<String>();
List<String> imageURLs = new ArrayList<>();
for (Element image : page.select("div.actual-image img")) {
String imageURL = image.attr("src");
if (imageURL.startsWith("//")) {

View File

@ -80,7 +80,7 @@ public class FineboxRipper extends AlbumRipper {
@Override
public String getGID(URL url) throws MalformedURLException {
Pattern p = Pattern.compile("^https?://(www\\.)?(v|f)inebox\\.co/u/([a-zA-Z0-9]{1,}).*$");
Pattern p = Pattern.compile("^https?://(www\\.)?([vf])inebox\\.co/u/([a-zA-Z0-9]+).*$");
Matcher m = p.matcher(url.toExternalForm());
if (!m.matches()) {
throw new MalformedURLException("Expected format: http://"+DOMAIN+"/u/USERNAME");

View File

@ -256,7 +256,7 @@ public class FivehundredpxRipper extends AbstractJSONRipper {
@Override
public List<String> getURLsFromJSON(JSONObject json) {
List<String> imageURLs = new ArrayList<String>();
List<String> imageURLs = new ArrayList<>();
JSONArray photos = json.getJSONArray("photos");
for (int i = 0; i < photos.length(); i++) {
if (super.isStopped()) {

View File

@ -28,9 +28,9 @@ import com.rarchives.ripme.utils.Utils;
public class FlickrRipper extends AbstractHTMLRipper {
private int page = 1;
private Set<String> attempted = new HashSet<String>();
private Set<String> attempted = new HashSet<>();
private Document albumDoc = null;
private DownloadThreadPool flickrThreadPool;
private final DownloadThreadPool flickrThreadPool;
@Override
public DownloadThreadPool getThreadPool() {
return flickrThreadPool;
@ -162,7 +162,7 @@ public class FlickrRipper extends AbstractHTMLRipper {
@Override
public List<String> getURLsFromPage(Document page) {
List<String> imageURLs = new ArrayList<String>();
List<String> imageURLs = new ArrayList<>();
for (Element thumb : page.select("a[data-track=photo-click]")) {
/* TODO find a way to persist the image title
String imageTitle = null;
@ -215,7 +215,7 @@ public class FlickrRipper extends AbstractHTMLRipper {
.method(Method.GET)
.execute();
Document doc = resp.parse();
Map<String,String> postData = new HashMap<String,String>();
Map<String,String> postData = new HashMap<>();
for (Element input : doc.select("input[type=hidden]")) {
postData.put(input.attr("name"), input.attr("value"));
}
@ -239,7 +239,7 @@ public class FlickrRipper extends AbstractHTMLRipper {
private URL url;
private int index;
public FlickrImageThread(URL url, int index) {
FlickrImageThread(URL url, int index) {
super();
this.url = url;
this.index = index;
@ -252,7 +252,6 @@ public class FlickrRipper extends AbstractHTMLRipper {
Elements fullsizeImages = doc.select("div#allsizes-photo img");
if (fullsizeImages.size() == 0) {
logger.error("Could not find flickr image at " + doc.location() + " - missing 'div#allsizes-photo img'");
return;
}
else {
String prefix = "";

View File

@ -28,8 +28,8 @@ import com.rarchives.ripme.utils.Http;
public class FuraffinityRipper extends AbstractHTMLRipper {
static Map<String, String> cookies=null;
static final String urlBase = "https://www.furaffinity.net";
private static Map<String, String> cookies=null;
private static final String urlBase = "https://www.furaffinity.net";
// Thread pool for finding direct image links from "image" pages (html)
private DownloadThreadPool furaffinityThreadPool
@ -75,7 +75,7 @@ public class FuraffinityRipper extends AbstractHTMLRipper {
.response();
cookies = loginPage.cookies();
Map<String,String> formData = new HashMap<String,String>();
Map<String,String> formData = new HashMap<>();
formData.put("action", "login");
formData.put("retard_protection", "1");
formData.put("name", user);
@ -112,7 +112,7 @@ public class FuraffinityRipper extends AbstractHTMLRipper {
@Override
public List<String> getURLsFromPage(Document page) {
List<String> urls = new ArrayList<String>();
List<String> urls = new ArrayList<>();
Elements urlElements = page.select("figure.t-image > b > u > a");
for (Element e : urlElements) {
urls.add(urlBase + e.select("a").first().attr("href"));
@ -121,7 +121,7 @@ public class FuraffinityRipper extends AbstractHTMLRipper {
}
@Override
public List<String> getDescriptionsFromPage(Document page) {
List<String> urls = new ArrayList<String>();
List<String> urls = new ArrayList<>();
Elements urlElements = page.select("figure.t-image > b > u > a");
for (Element e : urlElements) {
urls.add(urlBase + e.select("a").first().attr("href"));
@ -157,9 +157,7 @@ public class FuraffinityRipper extends AbstractHTMLRipper {
ele.select("p").prepend("\\n\\n");
logger.debug("Returning description at " + page);
String tempPage = Jsoup.clean(ele.html().replaceAll("\\\\n", System.getProperty("line.separator")), "", Whitelist.none(), new Document.OutputSettings().prettyPrint(false));
String title = documentz.select("meta[property=og:title]").attr("content");
String tempText = title;
return tempText + "\n" + tempPage; // Overridden saveText takes first line and makes it the file name.
return documentz.select("meta[property=og:title]").attr("content") + "\n" + tempPage; // Overridden saveText takes first line and makes it the file name.
} catch (IOException ioe) {
logger.info("Failed to get description " + page + " : '" + ioe.getMessage() + "'");
return null;
@ -232,7 +230,7 @@ public class FuraffinityRipper extends AbstractHTMLRipper {
private class FuraffinityDocumentThread extends Thread {
private URL url;
public FuraffinityDocumentThread(URL url) {
FuraffinityDocumentThread(URL url) {
super();
this.url = url;
}

View File

@ -60,7 +60,7 @@ public class FuskatorRipper extends AbstractHTMLRipper {
@Override
public List<String> getURLsFromPage(Document doc) {
List<String> imageURLs = new ArrayList<String>();
List<String> imageURLs = new ArrayList<>();
String html = doc.html();
// Get "baseUrl"
String baseUrl = Utils.between(html, "unescape('", "'").get(0);

View File

@ -22,7 +22,7 @@ import com.rarchives.ripme.utils.Http;
public class GifyoRipper extends AbstractHTMLRipper {
private int page = 0;
private Map<String,String> cookies = new HashMap<String,String>();
private Map<String,String> cookies = new HashMap<>();
public GifyoRipper(URL url) throws IOException {
super(url);
@ -70,7 +70,7 @@ public class GifyoRipper extends AbstractHTMLRipper {
@Override
public Document getNextPage(Document doc) throws IOException {
page++;
Map<String,String> postData = new HashMap<String,String>();
Map<String,String> postData = new HashMap<>();
postData.put("cmd", "refreshData");
postData.put("view", "gif");
postData.put("layout", "grid");
@ -92,7 +92,7 @@ public class GifyoRipper extends AbstractHTMLRipper {
@Override
public List<String> getURLsFromPage(Document doc) {
List<String> imageURLs = new ArrayList<String>();
List<String> imageURLs = new ArrayList<>();
for (Element image : doc.select("img.profile_gif")) {
String imageUrl = image.attr("data-animated");
if (imageUrl.startsWith("//")) {

View File

@ -50,7 +50,7 @@ public class GirlsOfDesireRipper extends AbstractHTMLRipper {
Pattern p;
Matcher m;
p = Pattern.compile("^www\\.girlsofdesire\\.org\\/galleries\\/([\\w\\d-]+)\\/$");
p = Pattern.compile("^www\\.girlsofdesire\\.org/galleries/([\\w\\d-]+)/$");
m = p.matcher(url.toExternalForm());
if (m.matches()) {
return m.group(1);
@ -72,7 +72,7 @@ public class GirlsOfDesireRipper extends AbstractHTMLRipper {
@Override
public List<String> getURLsFromPage(Document doc) {
List<String> imageURLs = new ArrayList<String>();
List<String> imageURLs = new ArrayList<>();
for (Element thumb : doc.select("td.vtop > a > img")) {
String imgSrc = thumb.attr("src");
imgSrc = imgSrc.replaceAll("_thumb\\.", ".");

View File

@ -64,7 +64,7 @@ public class HentaiCafeRipper extends AbstractHTMLRipper {
@Override
public List<String> getURLsFromPage(Document doc) {
List<String> result = new ArrayList<String>();
List<String> result = new ArrayList<>();
result.add(doc.select("div[id=page] > div.inner > a > img.open").attr("src"));
return result;
}

View File

@ -20,7 +20,7 @@ import com.rarchives.ripme.utils.Http;
public class HentaifoundryRipper extends AbstractHTMLRipper {
private Map<String,String> cookies = new HashMap<String,String>();
private Map<String,String> cookies = new HashMap<>();
public HentaifoundryRipper(URL url) throws IOException {
super(url);
}
@ -84,7 +84,7 @@ public class HentaifoundryRipper extends AbstractHTMLRipper {
@Override
public List<String> getURLsFromPage(Document doc) {
List<String> imageURLs = new ArrayList<String>();
List<String> imageURLs = new ArrayList<>();
Pattern imgRegex = Pattern.compile(".*/user/([a-zA-Z0-9\\-_]+)/(\\d+)/.*");
for (Element thumb : doc.select("div.thumb_square > a.thumbLink")) {
if (isStopped()) {
@ -115,7 +115,7 @@ public class HentaifoundryRipper extends AbstractHTMLRipper {
imagePage = null;
}
// This is here for when the image is resized to a thumbnail because ripme doesn't report a screensize
if (imagePage.select("div.boxbody > img.center").attr("src").contains("thumbs.") == true) {
if (imagePage.select("div.boxbody > img.center").attr("src").contains("thumbs.")) {
imageURLs.add("http:" + imagePage.select("div.boxbody > img.center").attr("onclick").replace("this.src=", "").replace("'", "").replace("; $(#resize_message).hide();", ""));
}
else {

View File

@ -31,7 +31,7 @@ public class ImagearnRipper extends AbstractHTMLRipper {
@Override
public String getGID(URL url) throws MalformedURLException {
Pattern p = Pattern.compile("^.*imagearn.com/{1,}gallery.php\\?id=([0-9]{1,}).*$");
Pattern p = Pattern.compile("^.*imagearn.com/+gallery.php\\?id=([0-9]+).*$");
Matcher m = p.matcher(url.toExternalForm());
if (m.matches()) {
return m.group(1);
@ -43,7 +43,7 @@ public class ImagearnRipper extends AbstractHTMLRipper {
}
public URL sanitizeURL(URL url) throws MalformedURLException {
Pattern p = Pattern.compile("^.*imagearn.com/{1,}image.php\\?id=[0-9]{1,}.*$");
Pattern p = Pattern.compile("^.*imagearn.com/+image.php\\?id=[0-9]+.*$");
Matcher m = p.matcher(url.toExternalForm());
if (m.matches()) {
// URL points to imagearn *image*, not gallery
@ -77,7 +77,7 @@ public class ImagearnRipper extends AbstractHTMLRipper {
@Override
public List<String> getURLsFromPage(Document doc) {
List<String> imageURLs = new ArrayList<String>();
List<String> imageURLs = new ArrayList<>();
for (Element thumb : doc.select("div#gallery > div > a")) {
String imageURL = thumb.attr("href");
try {

View File

@ -81,7 +81,7 @@ public class ImagebamRipper extends AbstractHTMLRipper {
@Override
public List<String> getURLsFromPage(Document doc) {
List<String> imageURLs = new ArrayList<String>();
List<String> imageURLs = new ArrayList<>();
for (Element thumb : doc.select("div > a[target=_blank]:not(.footera)")) {
imageURLs.add(thumb.attr("href"));
}
@ -124,7 +124,7 @@ public class ImagebamRipper extends AbstractHTMLRipper {
private URL url;
private int index;
public ImagebamImageThread(URL url, int index) {
ImagebamImageThread(URL url, int index) {
super();
this.url = url;
this.index = index;

View File

@ -120,7 +120,7 @@ public class ImagefapRipper extends AbstractHTMLRipper {
@Override
public List<String> getURLsFromPage(Document doc) {
List<String> imageURLs = new ArrayList<String>();
List<String> imageURLs = new ArrayList<>();
for (Element thumb : doc.select("#gallery img")) {
if (!thumb.hasAttr("src") || !thumb.hasAttr("width")) {
continue;
@ -129,7 +129,7 @@ public class ImagefapRipper extends AbstractHTMLRipper {
image = image.replaceAll(
"http://x.*.fap.to/images/thumb/",
"http://fap.to/images/full/");
image = image.replaceAll("w[0-9]{1,}-h[0-9]{1,}/", "");
image = image.replaceAll("w[0-9]+-h[0-9]+/", "");
imageURLs.add(image);
if (isThisATest()) {
break;

View File

@ -67,7 +67,7 @@ public class ImagestashRipper extends AbstractJSONRipper {
@Override
public List<String> getURLsFromJSON(JSONObject json) {
List<String> imageURLs = new ArrayList<String>();
List<String> imageURLs = new ArrayList<>();
JSONArray images = json.getJSONArray("images");
for (int i = 0; i < images.length(); i++) {
JSONObject image = images.getJSONObject(i);

View File

@ -62,7 +62,7 @@ public class ImagevenueRipper extends AbstractHTMLRipper {
}
public List<String> getURLsFromPage(Document doc) {
List<String> imageURLs = new ArrayList<String>();
List<String> imageURLs = new ArrayList<>();
for (Element thumb : doc.select("a[target=_blank]")) {
imageURLs.add(thumb.attr("href"));
}
@ -83,7 +83,7 @@ public class ImagevenueRipper extends AbstractHTMLRipper {
private URL url;
private int index;
public ImagevenueImageThread(URL url, int index) {
ImagevenueImageThread(URL url, int index) {
super();
this.url = url;
this.index = index;

View File

@ -46,7 +46,7 @@ public class ImgboxRipper extends AbstractHTMLRipper {
}
@Override
public List<String> getURLsFromPage(Document doc) {
List<String> imageURLs = new ArrayList<String>();
List<String> imageURLs = new ArrayList<>();
for (Element thumb : doc.select("div.boxed-content > a > img")) {
String image = thumb.attr("src")
.replaceAll("[-a-zA-Z0-9.]+s.imgbox.com",

View File

@ -31,14 +31,15 @@ public class ImgurRipper extends AlbumRipper {
private Document albumDoc;
static enum ALBUM_TYPE {
enum ALBUM_TYPE {
ALBUM,
USER,
USER_ALBUM,
USER_IMAGES,
SERIES_OF_IMAGES,
SUBREDDIT
};
}
private ALBUM_TYPE albumType;
public ImgurRipper(URL url) throws IOException {
@ -223,7 +224,7 @@ public class ImgurRipper extends AlbumRipper {
String[] imageIds = m.group(1).split(",");
for (String imageId : imageIds) {
// TODO: Fetch image with ID imageId
logger.debug("Fetching image info for ID " + imageId);;
logger.debug("Fetching image info for ID " + imageId);
try {
JSONObject json = Http.url("https://api.imgur.com/2/image/" + imageId + ".json").getJSON();
if (!json.has("image")) {
@ -350,7 +351,6 @@ public class ImgurRipper extends AlbumRipper {
Thread.sleep(SLEEP_BETWEEN_ALBUMS * 1000);
} catch (Exception e) {
logger.error("Error while ripping album: " + e.getMessage(), e);
continue;
}
}
}
@ -515,12 +515,12 @@ public class ImgurRipper extends AlbumRipper {
}
public static class ImgurImage {
public String title = "",
description = "",
extension = "";
String title = "";
String description = "";
String extension = "";
public URL url = null;
public ImgurImage(URL url) {
ImgurImage(URL url) {
this.url = url;
String tempUrl = url.toExternalForm();
this.extension = tempUrl.substring(tempUrl.lastIndexOf('.'));
@ -528,7 +528,7 @@ public class ImgurRipper extends AlbumRipper {
this.extension = this.extension.substring(0, this.extension.indexOf("?"));
}
}
public ImgurImage(URL url, String title) {
ImgurImage(URL url, String title) {
this(url);
this.title = title;
}
@ -536,7 +536,7 @@ public class ImgurRipper extends AlbumRipper {
this(url, title);
this.description = description;
}
public String getSaveAs() {
String getSaveAs() {
String saveAs = this.title;
String u = url.toExternalForm();
if (u.contains("?")) {
@ -554,17 +554,17 @@ public class ImgurRipper extends AlbumRipper {
}
public static class ImgurAlbum {
public String title = null;
String title = null;
public URL url = null;
public List<ImgurImage> images = new ArrayList<ImgurImage>();
public ImgurAlbum(URL url) {
public List<ImgurImage> images = new ArrayList<>();
ImgurAlbum(URL url) {
this.url = url;
}
public ImgurAlbum(URL url, String title) {
this(url);
this.title = title;
}
public void addImage(ImgurImage image) {
void addImage(ImgurImage image) {
images.add(image);
}
}

View File

@ -75,8 +75,7 @@ public class InstagramRipper extends AbstractJSONRipper {
String baseURL = "http://instagram.com/" + userID + "/media";
try {
JSONObject result = Http.url(baseURL).getJSON();
return result;
return Http.url(baseURL).getJSON();
} catch (JSONException e) {
throw new IOException("Could not get instagram user via: " + baseURL);
}
@ -101,9 +100,7 @@ public class InstagramRipper extends AbstractJSONRipper {
logger.info("Loading " + baseURL);
sleep(1000);
JSONObject nextJSON = Http.url(baseURL).getJSON();
return nextJSON;
return Http.url(baseURL).getJSON();
} else {
throw new IOException("No more images found");
}
@ -129,7 +126,7 @@ public class InstagramRipper extends AbstractJSONRipper {
// Instagram returns cropped images to unauthenticated applications to maintain legacy support.
// To retrieve the uncropped image, remove this segment from the URL.
// Segment format: cX.Y.W.H - eg: c0.134.1080.1080
imageURL = imageURL.replaceAll("\\/c\\d{1,4}\\.\\d{1,4}\\.\\d{1,4}\\.\\d{1,4}", "");
imageURL = imageURL.replaceAll("/c\\d{1,4}\\.\\d{1,4}\\.\\d{1,4}\\.\\d{1,4}", "");
imageURL = imageURL.replaceAll("\\?ig_cache_key.+$", "");
return imageURL;
@ -154,7 +151,7 @@ public class InstagramRipper extends AbstractJSONRipper {
@Override
public List<String> getURLsFromJSON(JSONObject json) {
List<String> imageURLs = new ArrayList<String>();
List<String> imageURLs = new ArrayList<>();
JSONArray datas = json.getJSONArray("items");
for (int i = 0; i < datas.length(); i++) {
JSONObject data = (JSONObject) datas.get(i);

View File

@ -50,7 +50,7 @@ public class JagodibujaRipper extends AbstractHTMLRipper {
@Override
public List<String> getURLsFromPage(Document doc) {
List<String> result = new ArrayList<String>();
List<String> result = new ArrayList<>();
for (Element comicPageUrl : doc.select("div.gallery-icon > a")) {
try {
sleep(500);

View File

@ -51,7 +51,7 @@ public class LusciousRipper extends AbstractHTMLRipper {
@Override
public List<String> getURLsFromPage(Document page) {
List<String> urls = new ArrayList<String>();
List<String> urls = new ArrayList<>();
Elements urlElements = page.select("img#single_picture");
for (Element e : urlElements) {
urls.add(e.attr("src"));

View File

@ -25,7 +25,7 @@ import com.rarchives.ripme.utils.Http;
public class MediacrushRipper extends AbstractJSONRipper {
/** Ordered list of preferred formats, sorted by preference (low-to-high) */
private static final Map<String, Integer> PREFERRED_FORMATS = new HashMap<String,Integer>();
private static final Map<String, Integer> PREFERRED_FORMATS = new HashMap<>();
static {
PREFERRED_FORMATS.put("mp4", 0);
PREFERRED_FORMATS.put("wemb",1);
@ -36,7 +36,7 @@ public class MediacrushRipper extends AbstractJSONRipper {
PREFERRED_FORMATS.put("png", 6);
PREFERRED_FORMATS.put("jpg", 7);
PREFERRED_FORMATS.put("jpeg",8);
};
}
public MediacrushRipper(URL url) throws IOException {
super(url);
@ -113,7 +113,7 @@ public class MediacrushRipper extends AbstractJSONRipper {
@Override
public List<String> getURLsFromJSON(JSONObject json) {
List<String> imageURLs = new ArrayList<String>();
List<String> imageURLs = new ArrayList<>();
// Iterate over all files
JSONArray files = json.getJSONArray("files");
for (int i = 0; i < files.length(); i++) {

View File

@ -41,7 +41,7 @@ public class ModelmayhemRipper extends AlbumRipper {
@Override
public void rip() throws IOException {
Map<String,String> cookies = null,
postData = new HashMap<String,String>();
postData = new HashMap<>();
String gid = getGID(this.url),
ref = "http://www.modelmayhem.com/" + gid;

View File

@ -126,7 +126,7 @@ public class MotherlessRipper extends AlbumRipper {
private URL url;
private int index;
public MotherlessImageThread(URL url, int index) {
MotherlessImageThread(URL url, int index) {
super();
this.url = url;
this.index = index;
@ -142,7 +142,7 @@ public class MotherlessRipper extends AlbumRipper {
Document doc = Http.url(u)
.referrer(u)
.get();
Pattern p = Pattern.compile("^.*__fileurl = '([^']{1,})';.*$", Pattern.DOTALL);
Pattern p = Pattern.compile("^.*__fileurl = '([^']+)';.*$", Pattern.DOTALL);
Matcher m = p.matcher(doc.outerHtml());
if (m.matches()) {
String file = m.group(1);

View File

@ -15,7 +15,7 @@ import org.jsoup.select.Elements;
import java.util.Arrays;
public class MyhentaicomicsRipper extends AbstractHTMLRipper {
public static boolean isTag;
private static boolean isTag;
public MyhentaicomicsRipper(URL url) throws IOException {
super(url);
@ -47,7 +47,7 @@ public class MyhentaicomicsRipper extends AbstractHTMLRipper {
return ma.group(1);
}
Pattern pat = Pattern.compile("^https?://myhentaicomics.com/index.php/tag/([0-9]*)/?([a-zA-Z%0-9+\\?=:]*)?$");
Pattern pat = Pattern.compile("^https?://myhentaicomics.com/index.php/tag/([0-9]*)/?([a-zA-Z%0-9+?=:]*)?$");
Matcher mat = pat.matcher(url.toExternalForm());
if (mat.matches()) {
isTag = true;
@ -84,8 +84,8 @@ public class MyhentaicomicsRipper extends AbstractHTMLRipper {
}
// This replaces getNextPage when downloading from searchs and tags
public List<String> getNextAlbumPage(String pageUrl) {
List<String> albumPagesList = new ArrayList<String>();
private List<String> getNextAlbumPage(String pageUrl) {
List<String> albumPagesList = new ArrayList<>();
int pageNumber = 1;
albumPagesList.add("http://myhentaicomics.com/index.php/" + pageUrl.split("\\?")[0] + "?page=" + Integer.toString(pageNumber));
while (true) {
@ -115,9 +115,9 @@ public class MyhentaicomicsRipper extends AbstractHTMLRipper {
return albumPagesList;
}
public List<String> getAlbumsFromPage(String url) {
private List<String> getAlbumsFromPage(String url) {
List<String> pagesToRip;
List<String> result = new ArrayList<String>();
List<String> result = new ArrayList<>();
logger.info("Running getAlbumsFromPage");
Document doc;
try {
@ -161,7 +161,7 @@ public class MyhentaicomicsRipper extends AbstractHTMLRipper {
url_string = url_string.replace("%28", "_");
url_string = url_string.replace("%29", "_");
url_string = url_string.replace("%2C", "_");
if (isTag == true) {
if (isTag) {
logger.info("Downloading from a tag or search");
try {
sleep(500);
@ -180,11 +180,11 @@ public class MyhentaicomicsRipper extends AbstractHTMLRipper {
return result;
}
public List<String> getListOfPages(Document doc) {
List<String> pages = new ArrayList<String>();
private List<String> getListOfPages(Document doc) {
List<String> pages = new ArrayList<>();
// Get the link from the last button
String nextPageUrl = doc.select("a.ui-icon-right").last().attr("href");
Pattern pat = Pattern.compile("\\/index\\.php\\/tag\\/[0-9]*\\/[a-zA-Z0-9_\\-\\:+]*\\?page=(\\d+)");
Pattern pat = Pattern.compile("/index\\.php/tag/[0-9]*/[a-zA-Z0-9_\\-:+]*\\?page=(\\d+)");
Matcher mat = pat.matcher(nextPageUrl);
if (mat.matches()) {
logger.debug("Getting pages from a tag");
@ -197,7 +197,7 @@ public class MyhentaicomicsRipper extends AbstractHTMLRipper {
pages.add(link);
}
} else {
Pattern pa = Pattern.compile("\\/index\\.php\\/search\\?q=[a-zA-Z0-9_\\-\\:]*\\&page=(\\d+)");
Pattern pa = Pattern.compile("/index\\.php/search\\?q=[a-zA-Z0-9_\\-:]*&page=(\\d+)");
Matcher ma = pa.matcher(nextPageUrl);
if (ma.matches()) {
logger.debug("Getting pages from a search");
@ -217,7 +217,7 @@ public class MyhentaicomicsRipper extends AbstractHTMLRipper {
@Override
public List<String> getURLsFromPage(Document doc) {
List<String> result = new ArrayList<String>();
List<String> result = new ArrayList<>();
// Checks if this is a comic page or a page of albums
// If true the page is a page of albums
if (doc.toString().contains("class=\"g-item g-album\"")) {

View File

@ -86,7 +86,7 @@ public class NatalieMuRipper extends AbstractHTMLRipper {
@Override
public List<String> getURLsFromPage(Document page) {
List<String> imageURLs = new ArrayList<String>();
List<String> imageURLs = new ArrayList<>();
Pattern p; Matcher m;
//select all album thumbnails
for (Element span : page.select(".NA_articleGallery span")) {

View File

@ -75,7 +75,7 @@ public class NfsfwRipper extends AlbumRipper {
@Override
public void rip() throws IOException {
List<Pair> subAlbums = new ArrayList<Pair>();
List<Pair> subAlbums = new ArrayList<>();
int index = 0;
subAlbums.add(new Pair(this.url.toExternalForm(), ""));
while (subAlbums.size() > 0) {
@ -153,7 +153,7 @@ public class NfsfwRipper extends AlbumRipper {
private String subdir;
private int index;
public NfsfwImageThread(URL url, String subdir, int index) {
NfsfwImageThread(URL url, String subdir, int index) {
super();
this.url = url;
this.subdir = subdir;
@ -187,8 +187,9 @@ public class NfsfwRipper extends AlbumRipper {
}
private class Pair {
public String first, second;
public Pair(String first, String second) {
String first;
String second;
Pair(String first, String second) {
this.first = first;
this.second = second;
}

View File

@ -87,7 +87,7 @@ public class NhentaiRipper extends AbstractHTMLRipper {
@Override
public List<String> getURLsFromPage(Document page) {
List<String> imageURLs = new ArrayList<String>();
List<String> imageURLs = new ArrayList<>();
Elements thumbs = page.select(".gallerythumb");
for (Element el : thumbs) {
String imageUrl = el.attr("href");

View File

@ -55,7 +55,7 @@ public class NudeGalsRipper extends AbstractHTMLRipper {
Pattern p;
Matcher m;
p = Pattern.compile("^.*nude-gals\\.com\\/photoshoot\\.php\\?photoshoot_id=(\\d+)$");
p = Pattern.compile("^.*nude-gals\\.com/photoshoot\\.php\\?photoshoot_id=(\\d+)$");
m = p.matcher(url.toExternalForm());
if (m.matches()) {
return m.group(1);
@ -77,7 +77,7 @@ public class NudeGalsRipper extends AbstractHTMLRipper {
@Override
public List<String> getURLsFromPage(Document doc) {
List<String> imageURLs = new ArrayList<String>();
List<String> imageURLs = new ArrayList<>();
Elements thumbs = doc.select("#grid_container .grid > .grid_box");
for (Element thumb : thumbs) {

View File

@ -27,7 +27,7 @@ public class PahealRipper extends AbstractHTMLRipper {
private static Map<String, String> getCookies() {
if (cookies == null) {
cookies = new HashMap<String, String>(1);
cookies = new HashMap<>(1);
cookies.put("ui-tnc-agreed", "true");
}
return cookies;
@ -66,7 +66,7 @@ public class PahealRipper extends AbstractHTMLRipper {
@Override
public List<String> getURLsFromPage(Document page) {
Elements elements = page.select(".shm-thumb.thumb>a").not(".shm-thumb-link");
List<String> res = new ArrayList<String>(elements.size());
List<String> res = new ArrayList<>(elements.size());
for (Element e : elements) {
res.add(e.absUrl("href"));
@ -92,9 +92,7 @@ public class PahealRipper extends AbstractHTMLRipper {
+ Utils.filesystemSafe(new URI(name).getPath())
+ ext);
addURLToDownload(url, outFile);
} catch (IOException ex) {
Logger.getLogger(PahealRipper.class.getName()).log(Level.SEVERE, null, ex);
} catch (URISyntaxException ex) {
} catch (IOException | URISyntaxException ex) {
Logger.getLogger(PahealRipper.class.getName()).log(Level.SEVERE, null, ex);
}
}

View File

@ -85,8 +85,8 @@ public class PhotobucketRipper extends AlbumRipper {
public void rip() throws IOException {
List<String> subalbums = ripAlbumAndGetSubalbums(this.url.toExternalForm());
List<String> subsToRip = new ArrayList<String>(),
rippedSubs = new ArrayList<String>();
List<String> subsToRip = new ArrayList<>(),
rippedSubs = new ArrayList<>();
for (String sub : subalbums) {
subsToRip.add(sub);
@ -117,7 +117,7 @@ public class PhotobucketRipper extends AlbumRipper {
waitForThreads();
}
public List<String> ripAlbumAndGetSubalbums(String theUrl) throws IOException {
private List<String> ripAlbumAndGetSubalbums(String theUrl) throws IOException {
int filesIndex = 0,
filesTotal = 0,
pageIndex = 0;
@ -145,7 +145,7 @@ public class PhotobucketRipper extends AlbumRipper {
}
// Grab the JSON
Pattern p; Matcher m;
p = Pattern.compile("^.*collectionData: (\\{.*\\}).*$", Pattern.DOTALL);
p = Pattern.compile("^.*collectionData: (\\{.*}).*$", Pattern.DOTALL);
m = p.matcher(data);
if (m.matches()) {
jsonString = m.group(1);
@ -176,12 +176,12 @@ public class PhotobucketRipper extends AlbumRipper {
if (url != null) {
return getSubAlbums(url, currentAlbumPath);
} else {
return new ArrayList<String>();
return new ArrayList<>();
}
}
private List<String> getSubAlbums(String url, String currentAlbumPath) {
List<String> result = new ArrayList<String>();
List<String> result = new ArrayList<>();
String subdomain = url.substring(url.indexOf("://")+3);
subdomain = subdomain.substring(0, subdomain.indexOf("."));
String apiUrl = "http://" + subdomain + ".photobucket.com/component/Albums-SubalbumList"

View File

@ -19,7 +19,7 @@ import com.rarchives.ripme.utils.Utils;
public class PornhubRipper extends AlbumRipper {
// All sleep times are in milliseconds
private static final int IMAGE_SLEEP_TIME = 1 * 1000;
private static final int IMAGE_SLEEP_TIME = 1000;
private static final String DOMAIN = "pornhub.com", HOST = "Pornhub";
@ -134,7 +134,7 @@ public class PornhubRipper extends AlbumRipper {
private URL url;
private int index;
public PornhubImageThread(URL url, int index, File workingDir) {
PornhubImageThread(URL url, int index, File workingDir) {
super();
this.url = url;
this.index = index;

View File

@ -64,7 +64,7 @@ public class RajceRipper extends AbstractHTMLRipper {
@Override
public List<String> getURLsFromPage(Document page) {
List<String> result = new ArrayList<String>();
List<String> result = new ArrayList<>();
for (Element el : page.select("a.photoThumb")) {
result.add(el.attr("href"));
}

View File

@ -131,7 +131,7 @@ public class RedditRipper extends AlbumRipper {
Object jsonObj = new JSONTokener(jsonString).nextValue();
JSONArray jsonArray = new JSONArray();
if (jsonObj instanceof JSONObject) {
jsonArray.put( (JSONObject) jsonObj);
jsonArray.put(jsonObj);
} else if (jsonObj instanceof JSONArray) {
jsonArray = (JSONArray) jsonObj;
} else {
@ -167,7 +167,7 @@ public class RedditRipper extends AlbumRipper {
}
}
public void handleBody(String body, String id) {
private void handleBody(String body, String id) {
Pattern p = RipUtils.getURLRegex();
Matcher m = p.matcher(body);
while (m.find()) {
@ -179,7 +179,7 @@ public class RedditRipper extends AlbumRipper {
}
}
public void handleURL(String theUrl, String id) {
private void handleURL(String theUrl, String id) {
URL originalURL;
try {
originalURL = new URL(theUrl);
@ -220,21 +220,21 @@ public class RedditRipper extends AlbumRipper {
@Override
public String getGID(URL url) throws MalformedURLException {
// User
Pattern p = Pattern.compile("^https?://[a-zA-Z0-9\\.]{0,4}reddit\\.com/(user|u)/([a-zA-Z0-9_\\-]{3,}).*$");
Pattern p = Pattern.compile("^https?://[a-zA-Z0-9.]{0,4}reddit\\.com/(user|u)/([a-zA-Z0-9_\\-]{3,}).*$");
Matcher m = p.matcher(url.toExternalForm());
if (m.matches()) {
return "user_" + m.group(m.groupCount());
}
// Post
p = Pattern.compile("^https?://[a-zA-Z0-9\\.]{0,4}reddit\\.com/.*comments/([a-zA-Z0-9]{1,8}).*$");
p = Pattern.compile("^https?://[a-zA-Z0-9.]{0,4}reddit\\.com/.*comments/([a-zA-Z0-9]{1,8}).*$");
m = p.matcher(url.toExternalForm());
if (m.matches()) {
return "post_" + m.group(m.groupCount());
}
// Subreddit
p = Pattern.compile("^https?://[a-zA-Z0-9\\.]{0,4}reddit\\.com/r/([a-zA-Z0-9_]{1,}).*$");
p = Pattern.compile("^https?://[a-zA-Z0-9.]{0,4}reddit\\.com/r/([a-zA-Z0-9_]+).*$");
m = p.matcher(url.toExternalForm());
if (m.matches()) {
return "sub_" + m.group(m.groupCount());

View File

@ -21,7 +21,7 @@ import com.rarchives.ripme.utils.Http;
public class SankakuComplexRipper extends AbstractHTMLRipper {
private Document albumDoc = null;
private Map<String,String> cookies = new HashMap<String,String>();
private Map<String,String> cookies = new HashMap<>();
public SankakuComplexRipper(URL url) throws IOException {
super(url);
@ -65,7 +65,7 @@ public class SankakuComplexRipper extends AbstractHTMLRipper {
@Override
public List<String> getURLsFromPage(Document doc) {
List<String> imageURLs = new ArrayList<String>();
List<String> imageURLs = new ArrayList<>();
// Image URLs are basically thumbnail URLs with a different domain, a simple
// path replacement, and a ?xxxxxx post ID at the end (obtainable from the href)
for (Element thumbSpan : doc.select("div.content > div > span.thumb > a")) {
@ -77,7 +77,6 @@ public class SankakuComplexRipper extends AbstractHTMLRipper {
imageURLs.add("https:" + subPage.select("div[id=post-content] > a > img").attr("src"));
} catch (IOException e) {
logger.warn("Error while loading page " + postLink, e);
continue;
}
}
return imageURLs;
@ -96,7 +95,7 @@ public class SankakuComplexRipper extends AbstractHTMLRipper {
String nextPage = pagination.attr("abs:next-page-url");
// Only logged in users can see past page 25
// Trying to rip page 26 will throw a no images found error
if (nextPage.contains("page=26") == false) {
if (!nextPage.contains("page=26")) {
logger.info("Getting next page: " + pagination.attr("abs:next-page-url"));
return Http.url(pagination.attr("abs:next-page-url")).cookies(cookies).get();
}

View File

@ -48,7 +48,7 @@ public class ShesFreakyRipper extends AbstractHTMLRipper {
@Override
public List<String> getURLsFromPage(Document doc) {
List<String> imageURLs = new ArrayList<String>();
List<String> imageURLs = new ArrayList<>();
for (Element thumb : doc.select("a[data-lightbox=\"gallery\"]")) {
String image = thumb.attr("href");
imageURLs.add(image);

View File

@ -71,7 +71,7 @@ public class SinnercomicsRipper extends AbstractHTMLRipper {
@Override
public List<String> getURLsFromPage(Document doc) {
List<String> result = new ArrayList<String>();
List<String> result = new ArrayList<>();
for (Element el : doc.select("meta[property=og:image]")) {
String imageSource = el.attr("content");
imageSource = imageSource.replace(" alt=", "");

View File

@ -63,7 +63,7 @@ public class SupertangasRipper extends AlbumRipper {
}
for (Element imageElement : images) {
String image = imageElement.attr("href");
image = image.replaceAll("\\/fotos\\/", "/fotos/images/");
image = image.replaceAll("/fotos/", "/fotos/images/");
addURLToDownload(new URL(image));
}
try {

View File

@ -18,19 +18,19 @@ import com.rarchives.ripme.utils.Http;
import com.rarchives.ripme.utils.Utils;
class TapasticEpisode {
protected int index, id;
protected String title, filename;
int id;
String filename;
public TapasticEpisode(int index, int id, String title) {
this.index = index;
int index1 = index;
this.id = id;
this.title = title;
String title1 = title;
this.filename = Utils.filesystemSafe(title);
}
}
public class TapasticRipper extends AbstractHTMLRipper {
private List<TapasticEpisode> episodes=new ArrayList<TapasticEpisode>();
private List<TapasticEpisode> episodes= new ArrayList<>();
public TapasticRipper(URL url) throws IOException {
super(url);
@ -53,7 +53,7 @@ public class TapasticRipper extends AbstractHTMLRipper {
@Override
public List<String> getURLsFromPage(Document page) {
List<String> urls = new ArrayList<String>();
List<String> urls = new ArrayList<>();
String html = page.data();
if (!html.contains("episodeList : ")) {
logger.error("No 'episodeList' found at " + this.url);

View File

@ -23,7 +23,6 @@ import org.jsoup.nodes.Element;
import org.jsoup.select.Elements;
public class ThechiveRipper extends AbstractHTMLRipper {
public static boolean isTag;
public ThechiveRipper(URL url) throws IOException {
super(url);
@ -44,7 +43,7 @@ public class ThechiveRipper extends AbstractHTMLRipper {
Pattern p = Pattern.compile("^https?://thechive.com/[0-9]*/[0-9]*/[0-9]*/([a-zA-Z0-9_\\-]*)/?$");
Matcher m = p.matcher(url.toExternalForm());
if (m.matches()) {
isTag = false;
boolean isTag = false;
return m.group(1);
}
throw new MalformedURLException("Expected thechive.com URL format: " +
@ -59,7 +58,7 @@ public class ThechiveRipper extends AbstractHTMLRipper {
@Override
public List<String> getURLsFromPage(Document doc) {
List<String> result = new ArrayList<String>();
List<String> result = new ArrayList<>();
for (Element el : doc.select("img.attachment-gallery-item-full")) {
String imageSource = el.attr("src");
// We replace thumbs with resizes so we can the full sized images

View File

@ -77,7 +77,7 @@ public class TumblrRipper extends AlbumRipper {
return url;
}
public boolean isTumblrURL(URL url) {
private boolean isTumblrURL(URL url) {
String checkURL = "http://api.tumblr.com/v2/blog/";
checkURL += url.getHost();
checkURL += "/info?api_key=" + getApiKey();
@ -202,7 +202,6 @@ public class TumblrRipper extends AlbumRipper {
}
} catch (Exception e) {
logger.error("[!] Error while parsing photo in " + photo, e);
continue;
}
}
} else if (post.has("video_url")) {
@ -254,7 +253,7 @@ public class TumblrRipper extends AlbumRipper {
@Override
public String getGID(URL url) throws MalformedURLException {
final String DOMAIN_REGEX = "^https?://([a-zA-Z0-9\\-\\.]+)";
final String DOMAIN_REGEX = "^https?://([a-zA-Z0-9\\-.]+)";
Pattern p;
Matcher m;

View File

@ -54,14 +54,14 @@ public class TwitterRipper extends AlbumRipper {
@Override
public URL sanitizeURL(URL url) throws MalformedURLException {
// https://twitter.com/search?q=from%3Apurrbunny%20filter%3Aimages&src=typd
Pattern p = Pattern.compile("^https?://(m\\.)?twitter\\.com/search\\?q=([a-zA-Z0-9%\\-_]{1,}).*$");
Pattern p = Pattern.compile("^https?://(m\\.)?twitter\\.com/search\\?q=([a-zA-Z0-9%\\-_]+).*$");
Matcher m = p.matcher(url.toExternalForm());
if (m.matches()) {
albumType = ALBUM_TYPE.SEARCH;
searchText = m.group(2);
return url;
}
p = Pattern.compile("^https?://(m\\.)?twitter\\.com/([a-zA-Z0-9\\-_]{1,}).*$");
p = Pattern.compile("^https?://(m\\.)?twitter\\.com/([a-zA-Z0-9\\-_]+).*$");
m = p.matcher(url.toExternalForm());
if (m.matches()) {
albumType = ALBUM_TYPE.ACCOUNT;
@ -83,7 +83,6 @@ public class TwitterRipper extends AlbumRipper {
try {
JSONObject json = new JSONObject(body);
accessToken = json.getString("access_token");
return;
} catch (JSONException e) {
// Fall through
throw new IOException("Failure while parsing JSON: " + body, e);
@ -142,7 +141,7 @@ public class TwitterRipper extends AlbumRipper {
}
private List<JSONObject> getTweets(String url) throws IOException {
List<JSONObject> tweets = new ArrayList<JSONObject>();
List<JSONObject> tweets = new ArrayList<>();
logger.info(" Retrieving " + url);
Document doc = Http.url(url)
.ignoreContentType()
@ -283,7 +282,6 @@ public class TwitterRipper extends AlbumRipper {
if (c == '%') {
gid.append('_');
i += 2;
continue;
// Ignore non-alphanumeric chars
} else if (
(c >= 'a' && c <= 'z')

View File

@ -22,7 +22,7 @@ import com.rarchives.ripme.utils.Http;
public class TwodgalleriesRipper extends AbstractHTMLRipper {
private int offset = 0;
private Map<String,String> cookies = new HashMap<String,String>();
private Map<String,String> cookies = new HashMap<>();
public TwodgalleriesRipper(URL url) throws IOException {
super(url);
@ -90,7 +90,7 @@ public class TwodgalleriesRipper extends AbstractHTMLRipper {
@Override
public List<String> getURLsFromPage(Document doc) {
List<String> imageURLs = new ArrayList<String>();
List<String> imageURLs = new ArrayList<>();
for (Element thumb : doc.select("div.hcaption > img")) {
String image = thumb.attr("src");
image = image.replace("/200H/", "/");
@ -114,7 +114,7 @@ public class TwodgalleriesRipper extends AbstractHTMLRipper {
cookies = resp.cookies();
String ctoken = resp.parse().select("form > input[name=ctoken]").first().attr("value");
Map<String,String> postdata = new HashMap<String,String>();
Map<String,String> postdata = new HashMap<>();
postdata.put("user[login]", new String(Base64.decode("cmlwbWU=")));
postdata.put("user[password]", new String(Base64.decode("cmlwcGVy")));
postdata.put("rememberme", "1");

View File

@ -56,7 +56,7 @@ public class VidbleRipper extends AbstractHTMLRipper {
}
private static List<String> getURLsFromPageStatic(Document doc) {
List<String> imageURLs = new ArrayList<String>();
List<String> imageURLs = new ArrayList<>();
Elements els = doc.select("#ContentPlaceHolder1_divContent");
Elements imgs = els.select("img");
for (Element img : imgs) {
@ -76,7 +76,7 @@ public class VidbleRipper extends AbstractHTMLRipper {
}
public static List<URL> getURLsFromPage(URL url) throws IOException {
List<URL> urls = new ArrayList<URL>();
List<URL> urls = new ArrayList<>();
Document doc = Http.url(url).get();
for (String stringURL : getURLsFromPageStatic(doc)) {
urls.add(new URL(stringURL));

View File

@ -84,7 +84,7 @@ public class VineRipper extends AlbumRipper {
@Override
public String getGID(URL url) throws MalformedURLException {
Pattern p = Pattern.compile("^https?://(www\\.)?vine\\.co/u/([0-9]{1,}).*$");
Pattern p = Pattern.compile("^https?://(www\\.)?vine\\.co/u/([0-9]+).*$");
Matcher m = p.matcher(url.toExternalForm());
if (!m.matches()) {
throw new MalformedURLException("Expected format: http://vine.co/u/######");

View File

@ -37,11 +37,7 @@ public class VkRipper extends AlbumRipper {
}
// Ignore /video pages (but not /videos pages)
String u = url.toExternalForm();
if (u.contains("/video") && !u.contains("videos")) {
// Single video page
return false;
}
return true;
return !u.contains("/video") || u.contains("videos");
}
@Override
@ -62,7 +58,7 @@ public class VkRipper extends AlbumRipper {
private void ripVideos() throws IOException {
String oid = getGID(this.url).replace("videos", "");
String u = "http://vk.com/al_video.php";
Map<String,String> postData = new HashMap<String,String>();
Map<String,String> postData = new HashMap<>();
postData.put("al", "1");
postData.put("act", "load_videos_silent");
postData.put("offset", "0");
@ -97,13 +93,13 @@ public class VkRipper extends AlbumRipper {
}
private void ripImages() throws IOException {
Map<String,String> photoIDsToURLs = new HashMap<String,String>();
Map<String,String> photoIDsToURLs = new HashMap<>();
int offset = 0;
while (true) {
logger.info(" Retrieving " + this.url);
// al=1&offset=80&part=1
Map<String,String> postData = new HashMap<String,String>();
Map<String,String> postData = new HashMap<>();
postData.put("al", "1");
postData.put("offset", Integer.toString(offset));
postData.put("part", "1");
@ -120,7 +116,7 @@ public class VkRipper extends AlbumRipper {
body = body.substring(body.indexOf("<div"));
doc = Jsoup.parseBodyFragment(body);
List<Element> elements = doc.select("a");
Set<String> photoIDsToGet = new HashSet<String>();
Set<String> photoIDsToGet = new HashSet<>();
for (Element a : elements) {
if (!a.attr("onclick").contains("showPhoto('")) {
logger.error("a: " + a);
@ -162,8 +158,8 @@ public class VkRipper extends AlbumRipper {
}
private Map<String,String> getPhotoIDsToURLs(String photoID) throws IOException {
Map<String,String> photoIDsToURLs = new HashMap<String,String>();
Map<String,String> postData = new HashMap<String,String>();
Map<String,String> photoIDsToURLs = new HashMap<>();
Map<String,String> postData = new HashMap<>();
// act=show&al=1&list=album45506334_172415053&module=photos&photo=45506334_304658196
postData.put("list", getGID(this.url));
postData.put("act", "show");
@ -202,7 +198,7 @@ public class VkRipper extends AlbumRipper {
@Override
public String getGID(URL url) throws MalformedURLException {
Pattern p = Pattern.compile("^https?://(www\\.)?vk\\.com/(photos|album|videos)-?([a-zA-Z0-9_]{1,}).*$");
Pattern p = Pattern.compile("^https?://(www\\.)?vk\\.com/(photos|album|videos)-?([a-zA-Z0-9_]+).*$");
Matcher m = p.matcher(url.toExternalForm());
if (!m.matches()) {
throw new MalformedURLException("Expected format: http://vk.com/album#### or vk.com/photos####");

View File

@ -16,7 +16,7 @@ import com.rarchives.ripme.ripper.AbstractHTMLRipper;
import com.rarchives.ripme.utils.Http;
public class WordpressComicRipper extends AbstractHTMLRipper {
String pageTitle = "";
private String pageTitle = "";
public WordpressComicRipper(URL url) throws IOException {
super(url);
@ -34,7 +34,7 @@ public class WordpressComicRipper extends AbstractHTMLRipper {
// http://tnbtu.com/comic/01-00/
// http://shipinbottle.pepsaga.com/?p=281
public static List<String> explicit_domains = Arrays.asList(
private static List<String> explicit_domains = Arrays.asList(
"www.totempole666.com",
"buttsmithy.com",
"themonsterunderthebed.net",
@ -49,14 +49,12 @@ public class WordpressComicRipper extends AbstractHTMLRipper {
@Override
public String getHost() {
String host = url.toExternalForm().split("/")[2];
return host;
return url.toExternalForm().split("/")[2];
}
@Override
public String getDomain() {
String host = url.toExternalForm().split("/")[2];
return host;
return url.toExternalForm().split("/")[2];
}
@Override
@ -130,7 +128,7 @@ public class WordpressComicRipper extends AbstractHTMLRipper {
@Override
public String getAlbumTitle(URL url) throws MalformedURLException {
Pattern totempole666Pat = Pattern.compile("(?:https?://)?(?:www\\.)?totempole666.com\\/comic/([a-zA-Z0-9_-]*)/?$");
Pattern totempole666Pat = Pattern.compile("(?:https?://)?(?:www\\.)?totempole666.com/comic/([a-zA-Z0-9_-]*)/?$");
Matcher totempole666Mat = totempole666Pat.matcher(url.toExternalForm());
if (totempole666Mat.matches()) {
return "totempole666.com" + "_" + "The_cummoner";
@ -237,7 +235,7 @@ public class WordpressComicRipper extends AbstractHTMLRipper {
@Override
public List<String> getURLsFromPage(Document doc) {
List<String> result = new ArrayList<String>();
List<String> result = new ArrayList<>();
if (getHost().contains("www.totempole666.com")
|| getHost().contains("buttsmithy.com")
|| getHost().contains("themonsterunderthebed.net")

View File

@ -57,7 +57,7 @@ public class XbooruRipper extends AbstractHTMLRipper {
@Override
public List<String> getURLsFromPage(Document page) {
List<String> res = new ArrayList<String>(100);
List<String> res = new ArrayList<>(100);
for (Element e : page.getElementsByTag("post")) {
res.add(e.absUrl("file_url") + "#" + e.attr("id"));
}
@ -71,7 +71,7 @@ public class XbooruRipper extends AbstractHTMLRipper {
private String getTerm(URL url) throws MalformedURLException {
if (gidPattern == null) {
gidPattern = Pattern.compile("^https?://(www\\.)?xbooru\\.com/(index.php)?.*([?&]tags=([a-zA-Z0-9$_.+!*'(),%-]+))(\\&|(#.*)?$)");
gidPattern = Pattern.compile("^https?://(www\\.)?xbooru\\.com/(index.php)?.*([?&]tags=([a-zA-Z0-9$_.+!*'(),%-]+))(&|(#.*)?$)");
}
Matcher m = gidPattern.matcher(url.toExternalForm());

View File

@ -87,7 +87,7 @@ public class XhamsterRipper extends AlbumRipper {
@Override
public String getGID(URL url) throws MalformedURLException {
Pattern p = Pattern.compile("^https?://[wmde.]*xhamster\\.com/photos/gallery/.*?(\\d{1,})$");
Pattern p = Pattern.compile("^https?://[wmde.]*xhamster\\.com/photos/gallery/.*?(\\d+)$");
Matcher m = p.matcher(url.toExternalForm());
if (m.matches()) {
return m.group(1);

View File

@ -20,7 +20,7 @@ import com.rarchives.ripme.utils.Http;
public class ZizkiRipper extends AbstractHTMLRipper {
private Document albumDoc = null;
private Map<String,String> cookies = new HashMap<String,String>();
private Map<String,String> cookies = new HashMap<>();
public ZizkiRipper(URL url) throws IOException {
super(url);
@ -76,7 +76,7 @@ public class ZizkiRipper extends AbstractHTMLRipper {
@Override
public List<String> getURLsFromPage(Document page) {
List<String> imageURLs = new ArrayList<String>();
List<String> imageURLs = new ArrayList<>();
// Page contains images
logger.info("Look for images.");
for (Element thumb : page.select("img")) {
@ -99,7 +99,6 @@ public class ZizkiRipper extends AbstractHTMLRipper {
src = thumb.attr("src");
logger.debug("Found url with " + src);
if (!src.contains("zizki.com")) {
continue;
} else {
imageURLs.add(src.replace("/styles/medium/public/","/styles/large/public/"));
}

View File

@ -48,7 +48,7 @@ public class tamindirmp3 extends AbstractHTMLRipper {
@Override
public List<String> getURLsFromPage(Document doc) {
List<String> music = new ArrayList<String>();
List<String> music = new ArrayList<>();
for (Element el : doc.select("mp3")) {
music.add(el.attr("src"));
}

View File

@ -12,7 +12,7 @@ import java.util.regex.Pattern;
import static com.rarchives.ripme.App.logger;
public class ClipboardUtils {
class ClipboardUtils {
private static AutoripThread autoripThread = new AutoripThread();
public static void setClipboardAutoRip(boolean enabled) {
@ -38,11 +38,7 @@ public class ClipboardUtils {
} catch (IllegalStateException e) {
e.printStackTrace();
logger.error("Caught and recovered from IllegalStateException: " + e.getMessage());
} catch (HeadlessException e) {
e.printStackTrace();
} catch (UnsupportedFlavorException e) {
e.printStackTrace();
} catch (IOException e) {
} catch (HeadlessException | IOException | UnsupportedFlavorException e) {
e.printStackTrace();
}
return null;
@ -50,8 +46,8 @@ public class ClipboardUtils {
}
class AutoripThread extends Thread {
protected volatile boolean isRunning = false;
Set<String> rippedURLs = new HashSet<String>();
volatile boolean isRunning = false;
private Set<String> rippedURLs = new HashSet<>();
public void run() {
isRunning = true;
@ -61,11 +57,11 @@ class AutoripThread extends Thread {
String clipboard = ClipboardUtils.getClipboardString();
if (clipboard != null) {
Pattern p = Pattern.compile(
"\\b(((ht|f)tp(s?)\\:\\/\\/|~\\/|\\/)|www.)" +
"\\b(((ht|f)tp(s?)://|~/|/)|www.)" +
"(\\w+:\\w+@)?(([-\\w]+\\.)+(com|org|net|gov" +
"|mil|biz|info|mobi|name|aero|jobs|museum" +
"|travel|[a-z]{2}))(:[\\d]{1,5})?" +
"(((\\/([-\\w~!$+|.,=]|%[a-f\\d]{2})+)+|\\/)+|\\?|#)?" +
"(((/([-\\w~!$+|.,=]|%[a-f\\d]{2})+)+|/)+|\\?|#)?" +
"((\\?([-\\w~!$+|.,*:]|%[a-f\\d{2}])+=?" +
"([-\\w~!$+|.,*:=]|%[a-f\\d]{2})*)" +
"(&(?:[-\\w~!$+|.,*:]|%[a-f\\d{2}])+=?" +

View File

@ -30,7 +30,7 @@ public class ContextMenuMouseListener extends MouseAdapter {
private String savedString = "";
private Actions lastActionSelected;
private enum Actions { UNDO, CUT, COPY, PASTE, SELECT_ALL };
private enum Actions { UNDO, CUT, COPY, PASTE, SELECT_ALL }
@SuppressWarnings("serial")
public ContextMenuMouseListener() {

View File

@ -26,7 +26,7 @@ public class History {
};
public History() {
this.list = new ArrayList<HistoryEntry>();
this.list = new ArrayList<>();
}
public void add(HistoryEntry entry) {
@ -90,7 +90,7 @@ public class History {
throw new RuntimeException("Could not find URL " + url + " in History");
}
public void fromJSON(JSONArray jsonArray) {
private void fromJSON(JSONArray jsonArray) {
JSONObject json;
for (int i = 0; i < jsonArray.length(); i++) {
json = jsonArray.getJSONObject(i);
@ -99,15 +99,12 @@ public class History {
}
public void fromFile(String filename) throws IOException {
InputStream is = new FileInputStream(filename);
try {
try (InputStream is = new FileInputStream(filename)) {
String jsonString = IOUtils.toString(is);
JSONArray jsonArray = new JSONArray(jsonString);
fromJSON(jsonArray);
} catch (JSONException e) {
throw new IOException("Failed to load JSON file " + filename + ": " + e.getMessage(), e);
} finally {
is.close();
}
}
@ -119,7 +116,7 @@ public class History {
}
}
public JSONArray toJSON() {
private JSONArray toJSON() {
JSONArray jsonArray = new JSONArray();
for (HistoryEntry entry : list) {
jsonArray.put(entry.toJSON());
@ -136,11 +133,8 @@ public class History {
}
public void toFile(String filename) throws IOException {
OutputStream os = new FileOutputStream(filename);
try {
try (OutputStream os = new FileOutputStream(filename)) {
IOUtils.write(toJSON().toString(2), os);
} finally {
os.close();
}
}
}

View File

@ -10,17 +10,13 @@ import javax.swing.Action;
import javax.swing.JPopupMenu;
import javax.swing.JTable;
public class HistoryMenuMouseListener extends MouseAdapter {
class HistoryMenuMouseListener extends MouseAdapter {
private JPopupMenu popup = new JPopupMenu();
private Action checkAllAction,
uncheckAllAction,
checkSelected,
uncheckSelected;
private JTable tableComponent;
@SuppressWarnings("serial")
public HistoryMenuMouseListener() {
checkAllAction = new AbstractAction("Check All") {
Action checkAllAction = new AbstractAction("Check All") {
@Override
public void actionPerformed(ActionEvent ae) {
for (int row = 0; row < tableComponent.getRowCount(); row++) {
@ -30,7 +26,7 @@ public class HistoryMenuMouseListener extends MouseAdapter {
};
popup.add(checkAllAction);
uncheckAllAction = new AbstractAction("Check None") {
Action uncheckAllAction = new AbstractAction("Check None") {
@Override
public void actionPerformed(ActionEvent ae) {
for (int row = 0; row < tableComponent.getRowCount(); row++) {
@ -42,7 +38,7 @@ public class HistoryMenuMouseListener extends MouseAdapter {
popup.addSeparator();
checkSelected = new AbstractAction("Check Selected") {
Action checkSelected = new AbstractAction("Check Selected") {
@Override
public void actionPerformed(ActionEvent ae) {
for (int row : tableComponent.getSelectedRows()) {
@ -52,7 +48,7 @@ public class HistoryMenuMouseListener extends MouseAdapter {
};
popup.add(checkSelected);
uncheckSelected = new AbstractAction("Uncheck Selected") {
Action uncheckSelected = new AbstractAction("Uncheck Selected") {
@Override
public void actionPerformed(ActionEvent ae) {
for (int row : tableComponent.getSelectedRows()) {

View File

@ -103,7 +103,6 @@ public final class MainWindow implements Runnable, RipStatusHandler {
private static JButton optionLog;
private static JPanel logPanel;
private static JTextPane logText;
private static JScrollPane logTextScroll;
// History
private static JButton optionHistory;
@ -111,8 +110,6 @@ public final class MainWindow implements Runnable, RipStatusHandler {
private static JPanel historyPanel;
private static JTable historyTable;
private static AbstractTableModel historyTableModel;
private static JScrollPane historyTableScrollPane;
private static JPanel historyButtonPanel;
private static JButton historyButtonRemove,
historyButtonClear,
historyButtonRerip;
@ -120,9 +117,7 @@ public final class MainWindow implements Runnable, RipStatusHandler {
// Queue
public static JButton optionQueue;
private static JPanel queuePanel;
private static JList queueList;
private static DefaultListModel queueListModel;
private static JScrollPane queueListScroll;
// Configuration
private static JButton optionConfiguration;
@ -150,8 +145,6 @@ public final class MainWindow implements Runnable, RipStatusHandler {
private static TrayIcon trayIcon;
private static MenuItem trayMenuMain;
private static MenuItem trayMenuAbout;
private static MenuItem trayMenuExit;
private static CheckboxMenuItem trayMenuAutorip;
private static Image mainIcon;
@ -168,12 +161,7 @@ public final class MainWindow implements Runnable, RipStatusHandler {
loadHistory();
setupHandlers();
Thread shutdownThread = new Thread() {
@Override
public void run() {
shutdownCleanup();
}
};
Thread shutdownThread = new Thread(() -> shutdownCleanup());
Runtime.getRuntime().addShutdownHook(shutdownThread);
if (Utils.getConfigBoolean("auto.update", true)) {
@ -185,16 +173,11 @@ public final class MainWindow implements Runnable, RipStatusHandler {
trayMenuAutorip.setState(autoripEnabled);
}
public void upgradeProgram() {
private void upgradeProgram() {
if (!configurationPanel.isVisible()) {
optionConfiguration.doClick();
}
Runnable r = new Runnable() {
@Override
public void run() {
UpdateUtils.updateProgram(configUpdateLabel);
}
};
Runnable r = () -> UpdateUtils.updateProgram(configUpdateLabel);
new Thread(r).start();
}
@ -204,7 +187,7 @@ public final class MainWindow implements Runnable, RipStatusHandler {
mainFrame.setVisible(true);
}
public void shutdownCleanup() {
private void shutdownCleanup() {
Utils.setConfigBoolean("file.overwrite", configOverwriteCheckbox.isSelected());
Utils.setConfigInteger("threads.size", Integer.parseInt(configThreadsText.getText()));
Utils.setConfigInteger("download.retries", Integer.parseInt(configRetriesText.getText()));
@ -241,12 +224,7 @@ public final class MainWindow implements Runnable, RipStatusHandler {
}
private void pack() {
SwingUtilities.invokeLater(new Runnable() {
@Override
public void run() {
mainFrame.pack();
}
});
SwingUtilities.invokeLater(() -> mainFrame.pack());
}
private void createUI(Container pane) {
@ -263,13 +241,7 @@ public final class MainWindow implements Runnable, RipStatusHandler {
try {
UIManager.setLookAndFeel(UIManager.getSystemLookAndFeelClassName());
} catch (ClassNotFoundException e) {
logger.error("[!] Exception setting system theme:", e);
} catch (InstantiationException e) {
logger.error("[!] Exception setting system theme:", e);
} catch (IllegalAccessException e) {
logger.error("[!] Exception setting system theme:", e);
} catch (UnsupportedLookAndFeelException e) {
} catch (ClassNotFoundException | InstantiationException | UnsupportedLookAndFeelException | IllegalAccessException e) {
logger.error("[!] Exception setting system theme:", e);
}
@ -282,7 +254,7 @@ public final class MainWindow implements Runnable, RipStatusHandler {
try {
Image stopIcon = ImageIO.read(getClass().getClassLoader().getResource("stop.png"));
stopButton.setIcon(new ImageIcon(stopIcon));
} catch (Exception e) { }
} catch (Exception ignored) { }
JPanel ripPanel = new JPanel(new GridBagLayout());
ripPanel.setBorder(emptyBorder);
@ -333,7 +305,7 @@ public final class MainWindow implements Runnable, RipStatusHandler {
logPanel = new JPanel(new GridBagLayout());
logPanel.setBorder(emptyBorder);
logText = new JTextPaneNoWrap();
logTextScroll = new JScrollPane(logText);
JScrollPane logTextScroll = new JScrollPane(logText);
logPanel.setVisible(false);
logPanel.setPreferredSize(new Dimension(300, 250));
logPanel.add(logTextScroll, gbc);
@ -349,7 +321,7 @@ public final class MainWindow implements Runnable, RipStatusHandler {
return HISTORY.getColumnName(col);
}
@Override
public Class<? extends Object> getColumnClass(int c) {
public Class<?> getColumnClass(int c) {
return getValueAt(0, c).getClass();
}
@Override
@ -394,7 +366,7 @@ public final class MainWindow implements Runnable, RipStatusHandler {
}
historyTable.getColumnModel().getColumn(i).setPreferredWidth(width);
}
historyTableScrollPane = new JScrollPane(historyTable);
JScrollPane historyTableScrollPane = new JScrollPane(historyTable);
historyButtonRemove = new JButton("Remove");
historyButtonClear = new JButton("Clear");
historyButtonRerip = new JButton("Re-rip Checked");
@ -405,7 +377,7 @@ public final class MainWindow implements Runnable, RipStatusHandler {
gbc.ipady = 180;
historyPanel.add(historyTablePanel, gbc);
gbc.ipady = 0;
historyButtonPanel = new JPanel(new GridBagLayout());
JPanel historyButtonPanel = new JPanel(new GridBagLayout());
historyButtonPanel.setPreferredSize(new Dimension(300, 10));
historyButtonPanel.setBorder(emptyBorder);
gbc.gridx = 0; historyButtonPanel.add(historyButtonRemove, gbc);
@ -419,10 +391,10 @@ public final class MainWindow implements Runnable, RipStatusHandler {
queuePanel.setVisible(false);
queuePanel.setPreferredSize(new Dimension(300, 250));
queueListModel = new DefaultListModel();
queueList = new JList(queueListModel);
JList queueList = new JList(queueListModel);
queueList.setSelectionMode(ListSelectionModel.MULTIPLE_INTERVAL_SELECTION);
queueList.addMouseListener(new QueueMenuMouseListener());
queueListScroll = new JScrollPane(queueList,
JScrollPane queueListScroll = new JScrollPane(queueList,
JScrollPane.VERTICAL_SCROLLBAR_AS_NEEDED,
JScrollPane.HORIZONTAL_SCROLLBAR_NEVER);
for (String item : Utils.getConfigList("queue")) {
@ -568,148 +540,113 @@ public final class MainWindow implements Runnable, RipStatusHandler {
}
}
});
stopButton.addActionListener(new ActionListener() {
@Override
public void actionPerformed(ActionEvent event) {
if (ripper != null) {
ripper.stop();
isRipping = false;
stopButton.setEnabled(false);
statusProgress.setValue(0);
statusProgress.setVisible(false);
pack();
statusProgress.setValue(0);
status("Ripping interrupted");
appendLog("Ripper interrupted", Color.RED);
}
}
});
optionLog.addActionListener(new ActionListener() {
@Override
public void actionPerformed(ActionEvent event) {
logPanel.setVisible(!logPanel.isVisible());
historyPanel.setVisible(false);
queuePanel.setVisible(false);
configurationPanel.setVisible(false);
optionLog.setFont(optionLog.getFont().deriveFont(Font.BOLD));
optionHistory.setFont(optionLog.getFont().deriveFont(Font.PLAIN));
optionQueue.setFont(optionLog.getFont().deriveFont(Font.PLAIN));
optionConfiguration.setFont(optionLog.getFont().deriveFont(Font.PLAIN));
stopButton.addActionListener(event -> {
if (ripper != null) {
ripper.stop();
isRipping = false;
stopButton.setEnabled(false);
statusProgress.setValue(0);
statusProgress.setVisible(false);
pack();
statusProgress.setValue(0);
status("Ripping interrupted");
appendLog("Ripper interrupted", Color.RED);
}
});
optionHistory.addActionListener(new ActionListener() {
@Override
public void actionPerformed(ActionEvent event) {
logPanel.setVisible(false);
historyPanel.setVisible(!historyPanel.isVisible());
queuePanel.setVisible(false);
configurationPanel.setVisible(false);
optionLog.setFont(optionLog.getFont().deriveFont(Font.PLAIN));
optionHistory.setFont(optionLog.getFont().deriveFont(Font.BOLD));
optionQueue.setFont(optionLog.getFont().deriveFont(Font.PLAIN));
optionConfiguration.setFont(optionLog.getFont().deriveFont(Font.PLAIN));
pack();
}
optionLog.addActionListener(event -> {
logPanel.setVisible(!logPanel.isVisible());
historyPanel.setVisible(false);
queuePanel.setVisible(false);
configurationPanel.setVisible(false);
optionLog.setFont(optionLog.getFont().deriveFont(Font.BOLD));
optionHistory.setFont(optionLog.getFont().deriveFont(Font.PLAIN));
optionQueue.setFont(optionLog.getFont().deriveFont(Font.PLAIN));
optionConfiguration.setFont(optionLog.getFont().deriveFont(Font.PLAIN));
pack();
});
optionQueue.addActionListener(new ActionListener() {
@Override
public void actionPerformed(ActionEvent event) {
logPanel.setVisible(false);
historyPanel.setVisible(false);
queuePanel.setVisible(!queuePanel.isVisible());
configurationPanel.setVisible(false);
optionLog.setFont(optionLog.getFont().deriveFont(Font.PLAIN));
optionHistory.setFont(optionLog.getFont().deriveFont(Font.PLAIN));
optionQueue.setFont(optionLog.getFont().deriveFont(Font.BOLD));
optionConfiguration.setFont(optionLog.getFont().deriveFont(Font.PLAIN));
pack();
}
optionHistory.addActionListener(event -> {
logPanel.setVisible(false);
historyPanel.setVisible(!historyPanel.isVisible());
queuePanel.setVisible(false);
configurationPanel.setVisible(false);
optionLog.setFont(optionLog.getFont().deriveFont(Font.PLAIN));
optionHistory.setFont(optionLog.getFont().deriveFont(Font.BOLD));
optionQueue.setFont(optionLog.getFont().deriveFont(Font.PLAIN));
optionConfiguration.setFont(optionLog.getFont().deriveFont(Font.PLAIN));
pack();
});
optionConfiguration.addActionListener(new ActionListener() {
@Override
public void actionPerformed(ActionEvent event) {
logPanel.setVisible(false);
historyPanel.setVisible(false);
queuePanel.setVisible(false);
configurationPanel.setVisible(!configurationPanel.isVisible());
optionLog.setFont(optionLog.getFont().deriveFont(Font.PLAIN));
optionHistory.setFont(optionLog.getFont().deriveFont(Font.PLAIN));
optionQueue.setFont(optionLog.getFont().deriveFont(Font.PLAIN));
optionConfiguration.setFont(optionLog.getFont().deriveFont(Font.BOLD));
pack();
}
optionQueue.addActionListener(event -> {
logPanel.setVisible(false);
historyPanel.setVisible(false);
queuePanel.setVisible(!queuePanel.isVisible());
configurationPanel.setVisible(false);
optionLog.setFont(optionLog.getFont().deriveFont(Font.PLAIN));
optionHistory.setFont(optionLog.getFont().deriveFont(Font.PLAIN));
optionQueue.setFont(optionLog.getFont().deriveFont(Font.BOLD));
optionConfiguration.setFont(optionLog.getFont().deriveFont(Font.PLAIN));
pack();
});
historyButtonRemove.addActionListener(new ActionListener() {
@Override
public void actionPerformed(ActionEvent event) {
int[] indices = historyTable.getSelectedRows();
for (int i = indices.length - 1; i >= 0; i--) {
int modelIndex = historyTable.convertRowIndexToModel(indices[i]);
HISTORY.remove(modelIndex);
}
try {
historyTableModel.fireTableDataChanged();
} catch (Exception e) { }
saveHistory();
}
optionConfiguration.addActionListener(event -> {
logPanel.setVisible(false);
historyPanel.setVisible(false);
queuePanel.setVisible(false);
configurationPanel.setVisible(!configurationPanel.isVisible());
optionLog.setFont(optionLog.getFont().deriveFont(Font.PLAIN));
optionHistory.setFont(optionLog.getFont().deriveFont(Font.PLAIN));
optionQueue.setFont(optionLog.getFont().deriveFont(Font.PLAIN));
optionConfiguration.setFont(optionLog.getFont().deriveFont(Font.BOLD));
pack();
});
historyButtonClear.addActionListener(new ActionListener() {
@Override
public void actionPerformed(ActionEvent event) {
HISTORY.clear();
try {
historyTableModel.fireTableDataChanged();
} catch (Exception e) { }
saveHistory();
historyButtonRemove.addActionListener(event -> {
int[] indices = historyTable.getSelectedRows();
for (int i = indices.length - 1; i >= 0; i--) {
int modelIndex = historyTable.convertRowIndexToModel(indices[i]);
HISTORY.remove(modelIndex);
}
try {
historyTableModel.fireTableDataChanged();
} catch (Exception e) { }
saveHistory();
});
historyButtonClear.addActionListener(event -> {
HISTORY.clear();
try {
historyTableModel.fireTableDataChanged();
} catch (Exception e) { }
saveHistory();
});
// Re-rip all history
historyButtonRerip.addActionListener(new ActionListener() {
@Override
public void actionPerformed(ActionEvent event) {
if (HISTORY.isEmpty()) {
JOptionPane.showMessageDialog(null,
"There are no history entries to re-rip. Rip some albums first",
"RipMe Error",
JOptionPane.ERROR_MESSAGE);
return;
}
int added = 0;
for (HistoryEntry entry : HISTORY.toList()) {
if (entry.selected) {
added++;
queueListModel.addElement(entry.url);
}
}
if (added == 0) {
JOptionPane.showMessageDialog(null,
"No history entries have been 'Checked'\n" +
"Check an entry by clicking the checkbox to the right of the URL or Right-click a URL to check/uncheck all items",
"RipMe Error",
JOptionPane.ERROR_MESSAGE);
historyButtonRerip.addActionListener(event -> {
if (HISTORY.isEmpty()) {
JOptionPane.showMessageDialog(null,
"There are no history entries to re-rip. Rip some albums first",
"RipMe Error",
JOptionPane.ERROR_MESSAGE);
return;
}
int added = 0;
for (HistoryEntry entry : HISTORY.toList()) {
if (entry.selected) {
added++;
queueListModel.addElement(entry.url);
}
}
if (added == 0) {
JOptionPane.showMessageDialog(null,
"No history entries have been 'Checked'\n" +
"Check an entry by clicking the checkbox to the right of the URL or Right-click a URL to check/uncheck all items",
"RipMe Error",
JOptionPane.ERROR_MESSAGE);
}
});
configUpdateButton.addActionListener(new ActionListener() {
@Override
public void actionPerformed(ActionEvent arg0) {
Thread t = new Thread() {
@Override
public void run() {
UpdateUtils.updateProgram(configUpdateLabel);
}
};
t.start();
}
configUpdateButton.addActionListener(arg0 -> {
Thread t = new Thread(() -> UpdateUtils.updateProgram(configUpdateLabel));
t.start();
});
configLogLevelCombobox.addActionListener(new ActionListener() {
@Override
public void actionPerformed(ActionEvent arg0) {
String level = ((JComboBox) arg0.getSource()).getSelectedItem().toString();
setLogLevel(level);
}
configLogLevelCombobox.addActionListener(arg0 -> {
String level = ((JComboBox) arg0.getSource()).getSelectedItem().toString();
setLogLevel(level);
});
configSaveDirLabel.addMouseListener(new MouseAdapter() {
@Override
@ -721,90 +658,56 @@ public final class MainWindow implements Runnable, RipStatusHandler {
} catch (Exception e1) { }
}
});
configSaveDirButton.addActionListener(new ActionListener() {
@Override
public void actionPerformed(ActionEvent arg0) {
UIManager.put("FileChooser.useSystemExtensionHiding", false);
JFileChooser jfc = new JFileChooser(Utils.getWorkingDirectory());
jfc.setFileSelectionMode(JFileChooser.DIRECTORIES_ONLY);
int returnVal = jfc.showDialog(null, "select directory");
if (returnVal != JFileChooser.APPROVE_OPTION) {
return;
}
File chosenFile = jfc.getSelectedFile();
String chosenPath = null;
try {
chosenPath = chosenFile.getCanonicalPath();
} catch (Exception e) {
logger.error("Error while getting selected path: ", e);
return;
}
configSaveDirLabel.setText(Utils.shortenPath(chosenPath));
Utils.setConfigString("rips.directory", chosenPath);
configSaveDirButton.addActionListener(arg0 -> {
UIManager.put("FileChooser.useSystemExtensionHiding", false);
JFileChooser jfc = new JFileChooser(Utils.getWorkingDirectory());
jfc.setFileSelectionMode(JFileChooser.DIRECTORIES_ONLY);
int returnVal = jfc.showDialog(null, "select directory");
if (returnVal != JFileChooser.APPROVE_OPTION) {
return;
}
File chosenFile = jfc.getSelectedFile();
String chosenPath = null;
try {
chosenPath = chosenFile.getCanonicalPath();
} catch (Exception e) {
logger.error("Error while getting selected path: ", e);
return;
}
configSaveDirLabel.setText(Utils.shortenPath(chosenPath));
Utils.setConfigString("rips.directory", chosenPath);
});
configOverwriteCheckbox.addActionListener(new ActionListener() {
@Override
public void actionPerformed(ActionEvent arg0) {
Utils.setConfigBoolean("file.overwrite", configOverwriteCheckbox.isSelected());
}
configOverwriteCheckbox.addActionListener(arg0 -> Utils.setConfigBoolean("file.overwrite", configOverwriteCheckbox.isSelected()));
configSaveOrderCheckbox.addActionListener(arg0 -> Utils.setConfigBoolean("download.save_order", configSaveOrderCheckbox.isSelected()));
configSaveLogs.addActionListener(arg0 -> {
Utils.setConfigBoolean("log.save", configSaveLogs.isSelected());
Utils.configureLogger();
});
configSaveOrderCheckbox.addActionListener(new ActionListener() {
@Override
public void actionPerformed(ActionEvent arg0) {
Utils.setConfigBoolean("download.save_order", configSaveOrderCheckbox.isSelected());
}
configSaveURLsOnly.addActionListener(arg0 -> {
Utils.setConfigBoolean("urls_only.save", configSaveURLsOnly.isSelected());
Utils.configureLogger();
});
configSaveLogs.addActionListener(new ActionListener() {
@Override
public void actionPerformed(ActionEvent arg0) {
Utils.setConfigBoolean("log.save", configSaveLogs.isSelected());
Utils.configureLogger();
}
configSaveAlbumTitles.addActionListener(arg0 -> {
Utils.setConfigBoolean("album_titles.save", configSaveAlbumTitles.isSelected());
Utils.configureLogger();
});
configSaveURLsOnly.addActionListener(new ActionListener() {
@Override
public void actionPerformed(ActionEvent arg0) {
Utils.setConfigBoolean("urls_only.save", configSaveURLsOnly.isSelected());
Utils.configureLogger();
}
configClipboardAutorip.addActionListener(arg0 -> {
Utils.setConfigBoolean("clipboard.autorip", configClipboardAutorip.isSelected());
ClipboardUtils.setClipboardAutoRip(configClipboardAutorip.isSelected());
trayMenuAutorip.setState(configClipboardAutorip.isSelected());
Utils.configureLogger();
});
configSaveAlbumTitles.addActionListener(new ActionListener() {
@Override
public void actionPerformed(ActionEvent arg0) {
Utils.setConfigBoolean("album_titles.save", configSaveAlbumTitles.isSelected());
Utils.configureLogger();
}
configSaveDescriptions.addActionListener(arg0 -> {
Utils.setConfigBoolean("descriptions.save", configSaveDescriptions.isSelected());
Utils.configureLogger();
});
configClipboardAutorip.addActionListener(new ActionListener() {
@Override
public void actionPerformed(ActionEvent arg0) {
Utils.setConfigBoolean("clipboard.autorip", configClipboardAutorip.isSelected());
ClipboardUtils.setClipboardAutoRip(configClipboardAutorip.isSelected());
trayMenuAutorip.setState(configClipboardAutorip.isSelected());
Utils.configureLogger();
}
configPreferMp4.addActionListener(arg0 -> {
Utils.setConfigBoolean("prefer.mp4", configPreferMp4.isSelected());
Utils.configureLogger();
});
configSaveDescriptions.addActionListener(new ActionListener() {
@Override
public void actionPerformed(ActionEvent arg0) {
Utils.setConfigBoolean("descriptions.save", configSaveDescriptions.isSelected());
Utils.configureLogger();
}
});
configPreferMp4.addActionListener(new ActionListener() {
@Override
public void actionPerformed(ActionEvent arg0) {
Utils.setConfigBoolean("prefer.mp4", configPreferMp4.isSelected());
Utils.configureLogger();
}
});
configWindowPosition.addActionListener(new ActionListener() {
@Override
public void actionPerformed(ActionEvent arg0) {
Utils.setConfigBoolean("window.position", configWindowPosition.isSelected());
Utils.configureLogger();
}
configWindowPosition.addActionListener(arg0 -> {
Utils.setConfigBoolean("window.position", configWindowPosition.isSelected());
Utils.configureLogger();
});
queueListModel.addListDataListener(new ListDataListener() {
@Override
@ -828,17 +731,19 @@ public final class MainWindow implements Runnable, RipStatusHandler {
private void setLogLevel(String level) {
Level newLevel = Level.ERROR;
level = level.substring(level.lastIndexOf(' ') + 1);
if (level.equals("Debug")) {
newLevel = Level.DEBUG;
}
else if (level.equals("Info")) {
newLevel = Level.INFO;
}
else if (level.equals("Warn")) {
newLevel = Level.WARN;
}
else if (level.equals("Error")) {
newLevel = Level.ERROR;
switch (level) {
case "Debug":
newLevel = Level.DEBUG;
break;
case "Info":
newLevel = Level.INFO;
break;
case "Warn":
newLevel = Level.WARN;
break;
case "Error":
newLevel = Level.ERROR;
break;
}
Logger.getRootLogger().setLevel(newLevel);
logger.setLevel(newLevel);
@ -865,82 +770,66 @@ public final class MainWindow implements Runnable, RipStatusHandler {
});
PopupMenu trayMenu = new PopupMenu();
trayMenuMain = new MenuItem("Hide");
trayMenuMain.addActionListener(new ActionListener() {
@Override
public void actionPerformed(ActionEvent arg0) {
toggleTrayClick();
}
});
trayMenuAbout = new MenuItem("About " + mainFrame.getTitle());
trayMenuAbout.addActionListener(new ActionListener() {
@Override
public void actionPerformed(ActionEvent arg0) {
StringBuilder about = new StringBuilder();
about.append("<html><h1>")
.append(mainFrame.getTitle())
.append("</h1>");
about.append("Download albums from various websites:");
try {
List<String> rippers = Utils.getListOfAlbumRippers();
about.append("<ul>");
for (String ripper : rippers) {
about.append("<li>");
ripper = ripper.substring(ripper.lastIndexOf('.') + 1);
if (ripper.contains("Ripper")) {
ripper = ripper.substring(0, ripper.indexOf("Ripper"));
}
about.append(ripper);
about.append("</li>");
trayMenuMain.addActionListener(arg0 -> toggleTrayClick());
MenuItem trayMenuAbout = new MenuItem("About " + mainFrame.getTitle());
trayMenuAbout.addActionListener(arg0 -> {
StringBuilder about = new StringBuilder();
about.append("<html><h1>")
.append(mainFrame.getTitle())
.append("</h1>");
about.append("Download albums from various websites:");
try {
List<String> rippers = Utils.getListOfAlbumRippers();
about.append("<ul>");
for (String ripper : rippers) {
about.append("<li>");
ripper = ripper.substring(ripper.lastIndexOf('.') + 1);
if (ripper.contains("Ripper")) {
ripper = ripper.substring(0, ripper.indexOf("Ripper"));
}
about.append("</ul>");
} catch (Exception e) { }
about.append("<br>And download videos from video sites:");
try {
List<String> rippers = Utils.getListOfVideoRippers();
about.append("<ul>");
for (String ripper : rippers) {
about.append("<li>");
ripper = ripper.substring(ripper.lastIndexOf('.') + 1);
if (ripper.contains("Ripper")) {
ripper = ripper.substring(0, ripper.indexOf("Ripper"));
}
about.append(ripper);
about.append("</li>");
about.append(ripper);
about.append("</li>");
}
about.append("</ul>");
} catch (Exception e) { }
about.append("<br>And download videos from video sites:");
try {
List<String> rippers = Utils.getListOfVideoRippers();
about.append("<ul>");
for (String ripper : rippers) {
about.append("<li>");
ripper = ripper.substring(ripper.lastIndexOf('.') + 1);
if (ripper.contains("Ripper")) {
ripper = ripper.substring(0, ripper.indexOf("Ripper"));
}
about.append("</ul>");
} catch (Exception e) { }
about.append(ripper);
about.append("</li>");
}
about.append("</ul>");
} catch (Exception e) { }
about.append("Do you want to visit the project homepage on Github?");
about.append("</html>");
int response = JOptionPane.showConfirmDialog(null,
about.toString(),
mainFrame.getTitle(),
JOptionPane.YES_NO_OPTION,
JOptionPane.PLAIN_MESSAGE,
new ImageIcon(mainIcon));
if (response == JOptionPane.YES_OPTION) {
try {
Desktop.getDesktop().browse(URI.create("http://github.com/4pr0n/ripme"));
} catch (IOException e) {
logger.error("Exception while opening project home page", e);
}
about.append("Do you want to visit the project homepage on Github?");
about.append("</html>");
int response = JOptionPane.showConfirmDialog(null,
about.toString(),
mainFrame.getTitle(),
JOptionPane.YES_NO_OPTION,
JOptionPane.PLAIN_MESSAGE,
new ImageIcon(mainIcon));
if (response == JOptionPane.YES_OPTION) {
try {
Desktop.getDesktop().browse(URI.create("http://github.com/4pr0n/ripme"));
} catch (IOException e) {
logger.error("Exception while opening project home page", e);
}
}
});
trayMenuExit = new MenuItem("Exit");
trayMenuExit.addActionListener(new ActionListener() {
@Override
public void actionPerformed(ActionEvent arg0) {
System.exit(0);
}
});
MenuItem trayMenuExit = new MenuItem("Exit");
trayMenuExit.addActionListener(arg0 -> System.exit(0));
trayMenuAutorip = new CheckboxMenuItem("Clipboard Autorip");
trayMenuAutorip.addItemListener(new ItemListener() {
@Override
public void itemStateChanged(ItemEvent arg0) {
ClipboardUtils.setClipboardAutoRip(trayMenuAutorip.getState());
configClipboardAutorip.setSelected(trayMenuAutorip.getState());
}
trayMenuAutorip.addItemListener(arg0 -> {
ClipboardUtils.setClipboardAutoRip(trayMenuAutorip.getState());
configClipboardAutorip.setSelected(trayMenuAutorip.getState());
});
trayMenu.add(trayMenuMain);
trayMenu.add(trayMenuAbout);
@ -966,10 +855,7 @@ public final class MainWindow implements Runnable, RipStatusHandler {
mainFrame.setAlwaysOnTop(false);
}
});
} catch (IOException e) {
//TODO implement proper stack trace handling this is really just intented as a placeholder until you implement proper error handling
e.printStackTrace();
} catch (AWTException e) {
} catch (IOException | AWTException e) {
//TODO implement proper stack trace handling this is really just intented as a placeholder until you implement proper error handling
e.printStackTrace();
}
@ -1027,12 +913,7 @@ public final class MainWindow implements Runnable, RipStatusHandler {
if (HISTORY.toList().size() == 0) {
// Loaded from config, still no entries.
// Guess rip history based on rip folder
String[] dirs = Utils.getWorkingDirectory().list(new FilenameFilter() {
@Override
public boolean accept(File dir, String file) {
return new File(dir.getAbsolutePath() + File.separator + file).isDirectory();
}
});
String[] dirs = Utils.getWorkingDirectory().list((dir, file) -> new File(dir.getAbsolutePath() + File.separator + file).isDirectory());
for (String dir : dirs) {
String url = RipUtils.urlFromDirectoryName(dir);
if (url != null) {
@ -1131,7 +1012,7 @@ public final class MainWindow implements Runnable, RipStatusHandler {
try {
mainFrame.setTitle("Ripping - RipMe v" + UpdateUtils.getThisJarVersion());
status("Starting rip...");
ripper.setObserver((RipStatusHandler) this);
ripper.setObserver(this);
Thread t = new Thread(ripper);
if (configShowPopup.isSelected() &&
(!mainFrame.isVisible() || !mainFrame.isActive())) {
@ -1170,7 +1051,7 @@ public final class MainWindow implements Runnable, RipStatusHandler {
private final AbstractRipper ripper;
private final RipStatusMessage msg;
public StatusEvent(AbstractRipper ripper, RipStatusMessage msg) {
StatusEvent(AbstractRipper ripper, RipStatusMessage msg) {
this.ripper = ripper;
this.msg = msg;
}
@ -1195,11 +1076,11 @@ public final class MainWindow implements Runnable, RipStatusHandler {
case LOADING_RESOURCE:
case DOWNLOAD_STARTED:
if (logger.isEnabledFor(Level.INFO)) {
appendLog( "Downloading " + (String) msg.getObject(), Color.BLACK);
appendLog( "Downloading " + msg.getObject(), Color.BLACK);
}
break;
case DOWNLOAD_COMPLETE:
appendLog( "Downloaded " + (String) msg.getObject(), Color.GREEN);
appendLog( "Downloaded " + msg.getObject(), Color.GREEN);
break;
case DOWNLOAD_ERRORED:
if (logger.isEnabledFor(Level.ERROR)) {
@ -1219,7 +1100,7 @@ public final class MainWindow implements Runnable, RipStatusHandler {
statusProgress.setVisible(false);
openButton.setVisible(false);
pack();
statusWithColor("Error: " + (String) msg.getObject(), Color.RED);
statusWithColor("Error: " + msg.getObject(), Color.RED);
break;
case RIP_COMPLETE:
@ -1260,14 +1141,11 @@ public final class MainWindow implements Runnable, RipStatusHandler {
} catch (Exception e) { }
appendLog( "Rip complete, saved to " + f.getAbsolutePath(), Color.GREEN);
openButton.setActionCommand(f.toString());
openButton.addActionListener(new ActionListener() {
@Override
public void actionPerformed(ActionEvent event) {
try {
Desktop.getDesktop().open(new File(event.getActionCommand()));
} catch (Exception e) {
logger.error(e);
}
openButton.addActionListener(event -> {
try {
Desktop.getDesktop().open(new File(event.getActionCommand()));
} catch (Exception e) {
logger.error(e);
}
});
pack();
@ -1310,26 +1188,20 @@ public final class MainWindow implements Runnable, RipStatusHandler {
Utils.setConfigBoolean("window.position", false);
}
public static boolean hasWindowPositionBug() {
private static boolean hasWindowPositionBug() {
String osName = System.getProperty("os.name");
if (osName != null) {
// Java on Windows has a bug where if we try to manually set the position of the Window,
// javaw.exe will not close itself down when the application is closed.
// Therefore, even if isWindowPositioningEnabled, if we are on Windows, we ignore it.
return osName.startsWith("Windows");
} else {
// If we're unsure, since we know there might be a bug,
// better be safe and report that the bug exists.
return true;
}
// Java on Windows has a bug where if we try to manually set the position of the Window,
// javaw.exe will not close itself down when the application is closed.
// Therefore, even if isWindowPositioningEnabled, if we are on Windows, we ignore it.
return osName == null || osName.startsWith("Windows");
}
public static boolean isWindowPositioningEnabled() {
private static boolean isWindowPositioningEnabled() {
boolean isEnabled = Utils.getConfigBoolean("window.position", true);
return isEnabled && !hasWindowPositionBug();
}
public static void saveWindowPosition(Frame frame) {
private static void saveWindowPosition(Frame frame) {
if (!isWindowPositioningEnabled()) {
return;
}
@ -1357,7 +1229,7 @@ public final class MainWindow implements Runnable, RipStatusHandler {
logger.debug("Saved window position (x=" + x + ", y=" + y + ", w=" + w + ", h=" + h + ")");
}
public static void restoreWindowPosition(Frame frame) {
private static void restoreWindowPosition(Frame frame) {
if (!isWindowPositioningEnabled()) {
mainFrame.setLocationRelativeTo(null); // default to middle of screen
return;

View File

@ -14,17 +14,15 @@ import javax.swing.JPopupMenu;
import com.rarchives.ripme.utils.Utils;
public class QueueMenuMouseListener extends MouseAdapter {
class QueueMenuMouseListener extends MouseAdapter {
private JPopupMenu popup = new JPopupMenu();
private Action removeSelected,
clearQueue;
private JList queueList;
private DefaultListModel queueListModel;
@SuppressWarnings("serial")
public QueueMenuMouseListener() {
removeSelected = new AbstractAction("Remove Selected") {
Action removeSelected = new AbstractAction("Remove Selected") {
@Override
public void actionPerformed(ActionEvent ae) {
Object o = queueList.getSelectedValue();
@ -37,7 +35,7 @@ public class QueueMenuMouseListener extends MouseAdapter {
};
popup.add(removeSelected);
clearQueue = new AbstractAction("Remove All") {
Action clearQueue = new AbstractAction("Remove All") {
@Override
public void actionPerformed(ActionEvent ae) {
queueListModel.removeAllElements();

View File

@ -27,7 +27,7 @@ public class UpdateUtils {
private static final String mainFileName = "ripme.jar";
private static final String updateFileName = "ripme.jar.update";
public static String getUpdateJarURL(String latestVersion) {
private static String getUpdateJarURL(String latestVersion) {
return "https://github.com/" + REPO_NAME + "/releases/download/" + latestVersion + "/ripme.jar";
}
@ -98,7 +98,6 @@ public class UpdateUtils {
JOptionPane.ERROR_MESSAGE);
configUpdateLabel.setText("");
logger.error("Error while updating: ", e);
return;
}
} else {
logger.debug("This version (" + UpdateUtils.getThisJarVersion() +
@ -193,18 +192,15 @@ public class UpdateUtils {
bw.close();
logger.info("Saved update script to " + batchFile);
// Run updater script on exit
Runtime.getRuntime().addShutdownHook(new Thread() {
@Override
public void run() {
try {
logger.info("Executing: " + batchFile);
Runtime.getRuntime().exec(batchExec);
} catch (IOException e) {
//TODO implement proper stack trace handling this is really just intented as a placeholder until you implement proper error handling
e.printStackTrace();
}
Runtime.getRuntime().addShutdownHook(new Thread(() -> {
try {
logger.info("Executing: " + batchFile);
Runtime.getRuntime().exec(batchExec);
} catch (IOException e) {
//TODO implement proper stack trace handling this is really just intented as a placeholder until you implement proper error handling
e.printStackTrace();
}
});
}));
logger.info("Exiting older version, should execute update script (" + batchFile + ") during exit");
System.exit(0);
}

View File

@ -8,7 +8,7 @@ import javax.crypto.SecretKey;
import javax.crypto.spec.IvParameterSpec;
import javax.crypto.spec.SecretKeySpec;
public class AES {
class AES {
/**
* Hack to get JCE Unlimited Strenght so we can use weird AES encryption stuff.

View File

@ -22,7 +22,7 @@ import com.rarchives.ripme.ripper.AbstractRipper;
*/
public class Http {
public static final int TIMEOUT = Utils.getConfigInteger("page.timeout", 5 * 1000);
private static final int TIMEOUT = Utils.getConfigInteger("page.timeout", 5 * 1000);
private static final Logger logger = Logger.getLogger(AbstractRipper.class);
private int retries;
@ -34,7 +34,7 @@ public class Http {
this.url = url;
defaultSettings();
}
public Http(URL url) {
private Http(URL url) {
this.url = url.toExternalForm();
defaultSettings();
}
@ -92,7 +92,7 @@ public class Http {
return this;
}
public Http data(String name, String value) {
Map<String,String> data = new HashMap<String,String>();
Map<String,String> data = new HashMap<>();
data.put(name, value);
return data(data);
}
@ -132,7 +132,6 @@ public class Http {
} catch (IOException e) {
logger.warn("Error while loading " + url, e);
lastException = e;
continue;
}
}
throw new IOException("Failed to load " + url + " after " + this.retries + " attempts", lastException);

View File

@ -27,7 +27,7 @@ public class RipUtils {
private static final Logger logger = Logger.getLogger(RipUtils.class);
public static List<URL> getFilesFromURL(URL url) {
List<URL> result = new ArrayList<URL>();
List<URL> result = new ArrayList<>();
logger.debug("Checking " + url);
// Imgur album
@ -104,7 +104,7 @@ public class RipUtils {
}
// Direct link to image
p = Pattern.compile("(https?://[a-zA-Z0-9\\-\\.]+\\.[a-zA-Z]{2,3}(/\\S*)\\.(jpg|jpeg|gif|png|mp4)(\\?.*)?)");
p = Pattern.compile("(https?://[a-zA-Z0-9\\-.]+\\.[a-zA-Z]{2,3}(/\\S*)\\.(jpg|jpeg|gif|png|mp4)(\\?.*)?)");
m = p.matcher(url.toExternalForm());
if (m.matches()) {
try {
@ -145,7 +145,7 @@ public class RipUtils {
}
public static Pattern getURLRegex() {
return Pattern.compile("(https?://[a-zA-Z0-9\\-\\.]+\\.[a-zA-Z]{2,3}(/\\S*))");
return Pattern.compile("(https?://[a-zA-Z0-9\\-.]+\\.[a-zA-Z]{2,3}(/\\S*))");
}
public static String urlFromDirectoryName(String dir) {
@ -182,14 +182,16 @@ public class RipUtils {
}
String url = null;
String[] fields = dir.split("_");
if (fields[0].equals("sub")) {
url = "http://reddit.com/r/" + dir;
}
else if (fields[0].equals("user")) {
url = "http://reddit.com/user/" + dir;
}
else if (fields[0].equals("post")) {
url = "http://reddit.com/comments/" + dir;
switch (fields[0]) {
case "sub":
url = "http://reddit.com/r/" + dir;
break;
case "user":
url = "http://reddit.com/user/" + dir;
break;
case "post":
url = "http://reddit.com/comments/" + dir;
break;
}
return url;
}

View File

@ -34,7 +34,7 @@ import com.rarchives.ripme.ripper.AbstractRipper;
* Common utility functions used in various places throughout the project.
*/
public class Utils {
public static final String RIP_DIRECTORY = "rips";
private static final String RIP_DIRECTORY = "rips";
private static final String configFile = "rip.properties";
private static final String OS = System.getProperty("os.name").toLowerCase();
private static final Logger logger = Logger.getLogger(Utils.class);
@ -107,7 +107,7 @@ public class Utils {
return config.getBoolean(key, defaultValue);
}
public static List<String> getConfigList(String key) {
List<String> result = new ArrayList<String>();
List<String> result = new ArrayList<>();
for (Object obj : config.getList(key, new ArrayList<String>())) {
if (obj instanceof String) {
result.add( (String) obj);
@ -124,7 +124,7 @@ public class Utils {
}
public static void setConfigList(String key, Enumeration<Object> enumeration) {
config.clearProperty(key);
List<Object> list = new ArrayList<Object>();
List<Object> list = new ArrayList<>();
while (enumeration.hasMoreElements()) {
list.add(enumeration.nextElement());
}
@ -246,7 +246,7 @@ public class Utils {
* List of classes within the package
*/
public static ArrayList<Class<?>> getClassesForPackage(String pkgname) {
ArrayList<Class<?>> classes = new ArrayList<Class<?>>();
ArrayList<Class<?>> classes = new ArrayList<>();
String relPath = pkgname.replace('.', '/');
URL resource = ClassLoader.getSystemClassLoader().getResource(relPath);
if (resource == null) {
@ -311,7 +311,7 @@ public class Utils {
return classes;
}
public static final int SHORTENED_PATH_LENGTH = 12;
private static final int SHORTENED_PATH_LENGTH = 12;
public static String shortenPath(String path) {
return shortenPath(new File(path));
}
@ -347,14 +347,14 @@ public class Utils {
}
public static List<String> getListOfAlbumRippers() throws Exception {
List<String> list = new ArrayList<String>();
List<String> list = new ArrayList<>();
for (Constructor<?> ripper : AbstractRipper.getRipperConstructors("com.rarchives.ripme.ripper.rippers")) {
list.add(ripper.getName());
}
return list;
}
public static List<String> getListOfVideoRippers() throws Exception {
List<String> list = new ArrayList<String>();
List<String> list = new ArrayList<>();
for (Constructor<?> ripper : AbstractRipper.getRipperConstructors("com.rarchives.ripme.ripper.rippers.video")) {
list.add(ripper.getName());
}
@ -365,12 +365,9 @@ public class Utils {
URL resource = ClassLoader.getSystemClassLoader().getResource(filename);
try {
final Clip clip = (Clip) AudioSystem.getLine(new Line.Info(Clip.class));
clip.addLineListener(new LineListener() {
@Override
public void update(LineEvent event) {
if (event.getType() == LineEvent.Type.STOP) {
clip.close();
}
clip.addLineListener(event -> {
if (event.getType() == LineEvent.Type.STOP) {
clip.close();
}
});
clip.open(AudioSystem.getAudioInputStream(resource));
@ -412,7 +409,7 @@ public class Utils {
* @return List of all strings that are between 'start' and 'finish'
*/
public static List<String> between(String fullText, String start, String finish) {
List<String> result = new ArrayList<String>();
List<String> result = new ArrayList<>();
int i, j;
i = fullText.indexOf(start);
while (i >= 0) {
@ -435,7 +432,7 @@ public class Utils {
* @return The map of all query parameters
*/
public static Map<String,String> parseUrlQuery(String query) {
Map<String,String> res = new HashMap<String, String>();
Map<String,String> res = new HashMap<>();
if (query.equals("")) {
return res;

View File

@ -10,7 +10,7 @@ import com.rarchives.ripme.ripper.rippers.ChanRipper;
public class ChanRipperTest extends RippersTest {
public void testChanURLFailures() throws IOException {
List<URL> failURLs = new ArrayList<URL>();
List<URL> failURLs = new ArrayList<>();
// URLs that should not work
for (URL url : failURLs) {
try {
@ -18,13 +18,12 @@ public class ChanRipperTest extends RippersTest {
fail("Instantiated ripper for URL that should not work: " + url);
} catch (Exception e) {
// Expected
continue;
}
}
}
public void testChanURLPasses() throws IOException {
List<URL> passURLs = new ArrayList<URL>();
List<URL> passURLs = new ArrayList<>();
// URLs that should work
passURLs.add(new URL("http://desuchan.net/v/res/7034.html"));
passURLs.add(new URL("http://boards.4chan.org/hr/thread/2214511"));
@ -46,7 +45,7 @@ public class ChanRipperTest extends RippersTest {
}
public void testChanRipper() throws IOException {
List<URL> contentURLs = new ArrayList<URL>();
List<URL> contentURLs = new ArrayList<>();
// URLs that should return more than 1 image
//contentURLs.add(new URL("http://desuchan.net/v/res/7034.html"));
//contentURLs.add(new URL("http://boards.420chan.org/ana/res/75984.php"));

View File

@ -10,7 +10,7 @@ import com.rarchives.ripme.ripper.rippers.ImagefapRipper;
public class ImagefapRipperTest extends RippersTest {
public void testImagefapAlbums() throws IOException {
Map<URL, String> testURLs = new HashMap<URL, String>();
Map<URL, String> testURLs = new HashMap<>();
// Album with specific title
testURLs.put(new URL("http://www.imagefap.com/pictures/4649440/Frozen-%28Elsa-and-Anna%29?view=2"),
"Frozen (Elsa and Anna)");

View File

@ -16,7 +16,7 @@ import com.rarchives.ripme.utils.Utils;
public class ImgurRipperTest extends RippersTest {
public void testImgurURLFailures() throws IOException {
List<URL> failURLs = new ArrayList<URL>();
List<URL> failURLs = new ArrayList<>();
// Imgur urls that should not work
failURLs.add(new URL("http://imgur.com"));
failURLs.add(new URL("http://imgur.com/"));
@ -31,13 +31,12 @@ public class ImgurRipperTest extends RippersTest {
fail("Instantiated ripper for URL that should not work: " + url);
} catch (Exception e) {
// Expected
continue;
}
}
}
public void testImgurAlbums() throws IOException {
List<URL> contentURLs = new ArrayList<URL>();
List<URL> contentURLs = new ArrayList<>();
// URLs that should return more than 1 image
contentURLs.add(new URL("http://imgur.com/a/dS9OQ#0")); // Horizontal layout
contentURLs.add(new URL("http://imgur.com/a/YpsW9#0")); // Grid layout

View File

@ -12,7 +12,7 @@ import com.rarchives.ripme.ripper.rippers.InstagramRipper;
public class InstagramRipperTest extends RippersTest {
public void testInstagramGID() throws IOException {
Map<URL, String> testURLs = new HashMap<URL, String>();
Map<URL, String> testURLs = new HashMap<>();
testURLs.put(new URL("http://instagram.com/Test_User"), "Test_User");
testURLs.put(new URL("http://instagram.com/_test_user_"), "_test_user_");
testURLs.put(new URL("http://instagram.com/-test-user-"), "-test-user-");
@ -25,7 +25,7 @@ public class InstagramRipperTest extends RippersTest {
}
public void testInstagramAlbums() throws IOException {
List<URL> contentURLs = new ArrayList<URL>();
List<URL> contentURLs = new ArrayList<>();
contentURLs.add(new URL("http://instagram.com/anacheri"));
for (URL url : contentURLs) {
InstagramRipper ripper = new InstagramRipper(url);

View File

@ -10,7 +10,7 @@ import com.rarchives.ripme.ripper.rippers.NatalieMuRipper;
public class NatalieMuRipperTest extends RippersTest {
public void testNatalieMuURLFailures() throws IOException {
List<URL> failURLs = new ArrayList<URL>();
List<URL> failURLs = new ArrayList<>();
// URLs that should not work
for (URL url : failURLs) {
try {
@ -18,13 +18,12 @@ public class NatalieMuRipperTest extends RippersTest {
fail("Instantiated ripper for URL that should not work: " + url);
} catch (Exception e) {
// Expected
continue;
}
}
}
public void testNatalieMuURLPasses() throws IOException {
List<URL> passURLs = new ArrayList<URL>();
List<URL> passURLs = new ArrayList<>();
// URLs that should work
passURLs.add(new URL("http://natalie.mu/music/news/140367"));
passURLs.add(new URL("http://cdn2.natalie.mu/music/news/140411"));
@ -41,7 +40,7 @@ public class NatalieMuRipperTest extends RippersTest {
}
public void testNatalieMuRipper() throws IOException {
List<URL> contentURLs = new ArrayList<URL>();
List<URL> contentURLs = new ArrayList<>();
// URLs that should return more than 1 image
contentURLs.add(new URL("http://natalie.mu/music/news/140367"));
contentURLs.add(new URL("http://cdn2.natalie.mu/music/news/140411"));

View File

@ -18,14 +18,9 @@ import com.rarchives.ripme.utils.Utils;
*/
public class RippersTest extends TestCase {
public final Logger logger = Logger.getLogger(RippersTest.class);
private final Logger logger = Logger.getLogger(RippersTest.class);
/** Dummy test to make JUnit not complain */
public void test() {
assert(true);
}
protected void testRipper(AbstractRipper ripper) {
void testRipper(AbstractRipper ripper) {
try {
// Turn on Debug logging
((ConsoleAppender)Logger.getRootLogger().getAppender("stdout")).setThreshold(Level.DEBUG);
@ -61,7 +56,7 @@ public class RippersTest extends TestCase {
"txt", "log", "php"};
/** Recursively deletes a directory */
protected void deleteDir(File dir) {
void deleteDir(File dir) {
if (!dir.getName().contains("_")) {
// All ripped albums contain an underscore
// Don't delete an album if it doesn't have an underscore
@ -83,7 +78,7 @@ public class RippersTest extends TestCase {
}
dir.delete();
}
protected void deleteSubdirs(File workingDir) {
void deleteSubdirs(File workingDir) {
for (File f : workingDir.listFiles()) {
if (f.isDirectory()) {
for (File sf : f.listFiles()) {

View File

@ -36,7 +36,7 @@ public class VideoRippersTest extends RippersTest {
}
public void testXvideosRipper() throws IOException {
List<URL> contentURLs = new ArrayList<URL>();
List<URL> contentURLs = new ArrayList<>();
contentURLs.add(new URL("https://www.xvideos.com/video19719109/ziggy_star_ultra_hard_anal_pounding"));
contentURLs.add(new URL("https://www.xvideos.com/video23515878/dee_s_pool_toys"));
for (URL url : contentURLs) {
@ -46,7 +46,7 @@ public class VideoRippersTest extends RippersTest {
}
public void testPornhubRipper() throws IOException {
List<URL> contentURLs = new ArrayList<URL>();
List<URL> contentURLs = new ArrayList<>();
contentURLs.add(new URL("http://www.pornhub.com/view_video.php?viewkey=993166542"));
for (URL url : contentURLs) {
PornhubRipper ripper = new PornhubRipper(url);
@ -55,7 +55,7 @@ public class VideoRippersTest extends RippersTest {
}
public void testVineRipper() throws IOException {
List<URL> contentURLs = new ArrayList<URL>();
List<URL> contentURLs = new ArrayList<>();
contentURLs.add(new URL("https://vine.co/v/hiqQrP0eUZx"));
for (URL url : contentURLs) {
VineRipper ripper = new VineRipper(url);
@ -64,7 +64,7 @@ public class VideoRippersTest extends RippersTest {
}
public void testYoupornRipper() throws IOException {
List<URL> contentURLs = new ArrayList<URL>();
List<URL> contentURLs = new ArrayList<>();
contentURLs.add(new URL("http://www.youporn.com/watch/7669155/mrs-li-amateur-69-orgasm/?from=categ"));
for (URL url : contentURLs) {
YoupornRipper ripper = new YoupornRipper(url);