diff --git a/src/main/java/com/rarchives/ripme/App.java b/src/main/java/com/rarchives/ripme/App.java index 326dc5f8..e046d5a6 100644 --- a/src/main/java/com/rarchives/ripme/App.java +++ b/src/main/java/com/rarchives/ripme/App.java @@ -34,9 +34,15 @@ import com.rarchives.ripme.utils.Utils; */ public class App { - public static Logger logger; + public static final Logger logger; private static final History HISTORY = new History(); + static { + //initialize logger + Utils.configureLogger(); + logger = Logger.getLogger(App.class); + } + public static void main(String[] args) throws MalformedURLException { CommandLine cl = getArgs(args); if (args.length > 0 && cl.hasOption('v')){ @@ -44,11 +50,8 @@ public class App { System.exit(0); } - //initialize logger - Utils.configureLogger(); System.setProperty("apple.laf.useScreenMenuBar", "true"); System.setProperty("com.apple.mrj.application.apple.menu.about.name", "RipMe"); - logger = Logger.getLogger(App.class); logger.info("Initialized ripme v" + UpdateUtils.getThisJarVersion()); if (args.length > 0) { diff --git a/src/main/java/com/rarchives/ripme/ripper/rippers/E621Ripper.java b/src/main/java/com/rarchives/ripme/ripper/rippers/E621Ripper.java index 231533e2..e381661c 100644 --- a/src/main/java/com/rarchives/ripme/ripper/rippers/E621Ripper.java +++ b/src/main/java/com/rarchives/ripme/ripper/rippers/E621Ripper.java @@ -1,4 +1,3 @@ - package com.rarchives.ripme.ripper.rippers; import com.rarchives.ripme.ripper.AbstractHTMLRipper; @@ -12,19 +11,16 @@ import java.net.URISyntaxException; import java.net.URL; import java.util.ArrayList; import java.util.List; -import java.util.logging.Level; -import java.util.logging.Logger; import java.util.regex.Matcher; import java.util.regex.Pattern; +import org.apache.log4j.Logger; import org.jsoup.nodes.Document; import org.jsoup.nodes.Element; import org.jsoup.select.Elements; -/** - * - * @author - */ public class E621Ripper extends AbstractHTMLRipper{ + private static final Logger logger = Logger.getLogger(E621Ripper.class); + private static Pattern gidPattern=null; private static Pattern gidPattern2=null; private static Pattern gidPatternPool=null; @@ -89,7 +85,7 @@ public class E621Ripper extends AbstractHTMLRipper{ addURLToDownload(new URL(page.getElementById("image").absUrl("src")),Utils.getConfigBoolean("download.save_order",true)?url.getRef()+"-":""); } catch (IOException ex) { - Logger.getLogger(E621Ripper.class.getName()).log(Level.SEVERE, null, ex); + logger.error(ex); } } })); @@ -121,7 +117,7 @@ public class E621Ripper extends AbstractHTMLRipper{ return Utils.filesystemSafe(prefix+new URI(getTerm(url)).getPath()); } catch (URISyntaxException ex) { - Logger.getLogger(PahealRipper.class.getName()).log(Level.SEVERE, null, ex); + logger.error(ex); } throw new MalformedURLException("Expected e621.net URL format: e621.net/post/index/1/searchterm - got "+url+" instead"); diff --git a/src/main/java/com/rarchives/ripme/ripper/rippers/PahealRipper.java b/src/main/java/com/rarchives/ripme/ripper/rippers/PahealRipper.java index 22060b88..d2421f37 100644 --- a/src/main/java/com/rarchives/ripme/ripper/rippers/PahealRipper.java +++ b/src/main/java/com/rarchives/ripme/ripper/rippers/PahealRipper.java @@ -13,15 +13,16 @@ import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; -import java.util.logging.Level; -import java.util.logging.Logger; import java.util.regex.Matcher; import java.util.regex.Pattern; +import org.apache.log4j.Logger; import org.jsoup.nodes.Document; import org.jsoup.nodes.Element; import org.jsoup.select.Elements; public class PahealRipper extends AbstractHTMLRipper { + private static final Logger logger = Logger.getLogger(PahealRipper.class); + private static Map cookies = null; private static Pattern gidPattern = null; @@ -93,7 +94,7 @@ public class PahealRipper extends AbstractHTMLRipper { + ext); addURLToDownload(url, outFile); } catch (IOException | URISyntaxException ex) { - Logger.getLogger(PahealRipper.class.getName()).log(Level.SEVERE, null, ex); + logger.error("Error while downloading URL " + url, ex); } } @@ -115,7 +116,7 @@ public class PahealRipper extends AbstractHTMLRipper { try { return Utils.filesystemSafe(new URI(getTerm(url)).getPath()); } catch (URISyntaxException ex) { - Logger.getLogger(PahealRipper.class.getName()).log(Level.SEVERE, null, ex); + logger.error(ex); } throw new MalformedURLException("Expected paheal.net URL format: rule34.paheal.net/post/list/searchterm - got " + url + " instead"); diff --git a/src/main/java/com/rarchives/ripme/ripper/rippers/XbooruRipper.java b/src/main/java/com/rarchives/ripme/ripper/rippers/XbooruRipper.java index e987a294..6b697f70 100644 --- a/src/main/java/com/rarchives/ripme/ripper/rippers/XbooruRipper.java +++ b/src/main/java/com/rarchives/ripme/ripper/rippers/XbooruRipper.java @@ -10,14 +10,15 @@ import java.net.URISyntaxException; import java.net.URL; import java.util.ArrayList; import java.util.List; -import java.util.logging.Level; -import java.util.logging.Logger; import java.util.regex.Matcher; import java.util.regex.Pattern; +import org.apache.log4j.Logger; import org.jsoup.nodes.Document; import org.jsoup.nodes.Element; public class XbooruRipper extends AbstractHTMLRipper { + private static final Logger logger = Logger.getLogger(XbooruRipper.class); + private static Pattern gidPattern = null; public XbooruRipper(URL url) throws IOException { @@ -87,7 +88,7 @@ public class XbooruRipper extends AbstractHTMLRipper { try { return Utils.filesystemSafe(new URI(getTerm(url)).getPath()); } catch (URISyntaxException ex) { - Logger.getLogger(PahealRipper.class.getName()).log(Level.SEVERE, null, ex); + logger.error(ex); } throw new MalformedURLException("Expected xbooru.com URL format: xbooru.com/index.php?tags=searchterm - got " + url + " instead"); diff --git a/src/main/java/com/rarchives/ripme/utils/Http.java b/src/main/java/com/rarchives/ripme/utils/Http.java index 01b54c6f..885a194d 100644 --- a/src/main/java/com/rarchives/ripme/utils/Http.java +++ b/src/main/java/com/rarchives/ripme/utils/Http.java @@ -23,7 +23,7 @@ import com.rarchives.ripme.ripper.AbstractRipper; public class Http { private static final int TIMEOUT = Utils.getConfigInteger("page.timeout", 5 * 1000); - private static final Logger logger = Logger.getLogger(AbstractRipper.class); + private static final Logger logger = Logger.getLogger(Http.class); private int retries; private String url;