Added utility functions for parsing URL queries
Rewrote E621Ripper to not use regexes anymore (therefore interacting better with special chars in URLs)
This commit is contained in:
parent
17ad27f5fb
commit
2f4793e9e3
@ -6,10 +6,12 @@ import com.rarchives.ripme.ripper.DownloadThreadPool;
|
|||||||
import com.rarchives.ripme.utils.Http;
|
import com.rarchives.ripme.utils.Http;
|
||||||
import com.rarchives.ripme.utils.Utils;
|
import com.rarchives.ripme.utils.Utils;
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
|
import java.io.UnsupportedEncodingException;
|
||||||
import java.net.MalformedURLException;
|
import java.net.MalformedURLException;
|
||||||
import java.net.URI;
|
import java.net.URI;
|
||||||
import java.net.URISyntaxException;
|
import java.net.URISyntaxException;
|
||||||
import java.net.URL;
|
import java.net.URL;
|
||||||
|
import java.net.URLDecoder;
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.logging.Level;
|
import java.util.logging.Level;
|
||||||
@ -25,9 +27,7 @@ import org.jsoup.select.Elements;
|
|||||||
* @author
|
* @author
|
||||||
*/
|
*/
|
||||||
public class E621Ripper extends AbstractHTMLRipper{
|
public class E621Ripper extends AbstractHTMLRipper{
|
||||||
private static Pattern gidPattern=null;
|
public static final int POOL_IMAGES_PER_PAGE = 24;
|
||||||
private static Pattern gidPattern2=null;
|
|
||||||
private static Pattern gidPatternPool=null;
|
|
||||||
|
|
||||||
private DownloadThreadPool e621ThreadPool = new DownloadThreadPool("e621");
|
private DownloadThreadPool e621ThreadPool = new DownloadThreadPool("e621");
|
||||||
|
|
||||||
@ -63,7 +63,18 @@ public class E621Ripper extends AbstractHTMLRipper{
|
|||||||
Elements elements = page.select("#post-list .thumb a,#pool-show .thumb a");
|
Elements elements = page.select("#post-list .thumb a,#pool-show .thumb a");
|
||||||
List<String> res = new ArrayList<String>(elements.size());
|
List<String> res = new ArrayList<String>(elements.size());
|
||||||
|
|
||||||
for(Element e:elements){
|
if (page.getElementById("pool-show") != null) {
|
||||||
|
int index = 0;
|
||||||
|
|
||||||
|
Element e = page.getElementById("paginator");
|
||||||
|
if (e != null && (e = e.getElementsByClass("current").first()) != null)
|
||||||
|
index = (Integer.parseInt(e.text()) - 1) * POOL_IMAGES_PER_PAGE;
|
||||||
|
|
||||||
|
for (Element e_ : elements)
|
||||||
|
res.add(e_.absUrl("href") + "#" + ++index);
|
||||||
|
|
||||||
|
} else {
|
||||||
|
for (Element e : elements)
|
||||||
res.add(e.absUrl("href") + "#" + e.child(0).attr("id").substring(1));
|
res.add(e.absUrl("href") + "#" + e.child(0).attr("id").substring(1));
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -86,8 +97,15 @@ public class E621Ripper extends AbstractHTMLRipper{
|
|||||||
public void run() {
|
public void run() {
|
||||||
try {
|
try {
|
||||||
Document page = Http.url(url).get();
|
Document page = Http.url(url).get();
|
||||||
|
Element e = page.getElementById("image");
|
||||||
|
|
||||||
|
if (e != null)
|
||||||
|
addURLToDownload(new URL(e.absUrl("src")), Utils.getConfigBoolean("download.save_order", true) ? url.getRef() + "-" : "");
|
||||||
|
else if ((e = page.select(".content object>param[name=\"movie\"]").first()) != null)
|
||||||
|
addURLToDownload(new URL(e.absUrl("value")), Utils.getConfigBoolean("download.save_order", true) ? url.getRef() + "-" : "");
|
||||||
|
else
|
||||||
|
Logger.getLogger(E621Ripper.class.getName()).log(Level.WARNING, "Unsupported media type - please report to program author: " + url.toString());
|
||||||
|
|
||||||
addURLToDownload(new URL(page.getElementById("image").absUrl("src")),Utils.getConfigBoolean("download.save_order",true)?url.getRef()+"-":"");
|
|
||||||
} catch (IOException ex) {
|
} catch (IOException ex) {
|
||||||
Logger.getLogger(E621Ripper.class.getName()).log(Level.SEVERE, null, ex);
|
Logger.getLogger(E621Ripper.class.getName()).log(Level.SEVERE, null, ex);
|
||||||
}
|
}
|
||||||
@ -96,47 +114,54 @@ public class E621Ripper extends AbstractHTMLRipper{
|
|||||||
}
|
}
|
||||||
|
|
||||||
private String getTerm(URL url) throws MalformedURLException {
|
private String getTerm(URL url) throws MalformedURLException {
|
||||||
if(gidPattern==null)
|
String query = url.getQuery();
|
||||||
gidPattern=Pattern.compile("^https?://(www\\.)?e621\\.net/post/index/[^/]+/([a-zA-Z0-9$_.+!*'(),%-]+)(/.*)?(#.*)?$");
|
|
||||||
if(gidPatternPool==null)
|
|
||||||
gidPatternPool=Pattern.compile("^https?://(www\\.)?e621\\.net/pool/show/([a-zA-Z0-9$_.+!*'(),%-]+)(\\?.*)?(/.*)?(#.*)?$");
|
|
||||||
|
|
||||||
Matcher m = gidPattern.matcher(url.toExternalForm());
|
if (query != null)
|
||||||
if(m.matches())
|
return Utils.parseUrlQuery(query, "tags");
|
||||||
return m.group(2);
|
|
||||||
|
|
||||||
m = gidPatternPool.matcher(url.toExternalForm());
|
if (query == null) {
|
||||||
if(m.matches())
|
if ((query = url.getPath()).startsWith("/post/index/")) {
|
||||||
return m.group(2);
|
query = query.substring(12);
|
||||||
|
|
||||||
throw new MalformedURLException("Expected e621.net URL format: e621.net/post/index/1/searchterm - got "+url+" instead");
|
int pos = query.indexOf('/');
|
||||||
|
if (pos == -1)
|
||||||
|
return null;
|
||||||
|
|
||||||
|
// skip page number
|
||||||
|
query = query.substring(pos + 1);
|
||||||
|
|
||||||
|
if (query.endsWith("/"))
|
||||||
|
query = query.substring(0, query.length() - 1);
|
||||||
|
|
||||||
|
try {
|
||||||
|
return URLDecoder.decode(query, "UTF-8");
|
||||||
|
} catch (UnsupportedEncodingException e) {
|
||||||
|
// Shouldn't happen since UTF-8 is required to be supported
|
||||||
|
throw new RuntimeException(e);
|
||||||
|
}
|
||||||
|
|
||||||
|
} else if (query.startsWith("/pool/show/")) {
|
||||||
|
query = query.substring(11);
|
||||||
|
|
||||||
|
if (query.endsWith("/"))
|
||||||
|
query = query.substring(0, query.length() - 1);
|
||||||
|
|
||||||
|
return query;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return null;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public String getGID(URL url) throws MalformedURLException {
|
public String getGID(URL url) throws MalformedURLException {
|
||||||
try {
|
|
||||||
String prefix = "";
|
String prefix = "";
|
||||||
if (url.getPath().startsWith("/pool/show/"))
|
if (url.getPath().startsWith("/pool/show/"))
|
||||||
prefix = "pool_";
|
prefix = "pool_";
|
||||||
|
else
|
||||||
|
prefix = "term_";
|
||||||
|
|
||||||
return Utils.filesystemSafe(prefix+new URI(getTerm(url)).getPath());
|
return Utils.filesystemSafe(prefix + getTerm(url));
|
||||||
} catch (URISyntaxException ex) {
|
|
||||||
Logger.getLogger(PahealRipper.class.getName()).log(Level.SEVERE, null, ex);
|
|
||||||
}
|
|
||||||
|
|
||||||
throw new MalformedURLException("Expected e621.net URL format: e621.net/post/index/1/searchterm - got "+url+" instead");
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public URL sanitizeURL(URL url) throws MalformedURLException {
|
|
||||||
if(gidPattern2==null)
|
|
||||||
gidPattern2=Pattern.compile("^https?://(www\\.)?e621\\.net/post/search\\?tags=([a-zA-Z0-9$_.+!*'(),%-]+)(/.*)?(#.*)?$");
|
|
||||||
|
|
||||||
Matcher m = gidPattern2.matcher(url.toExternalForm());
|
|
||||||
if(m.matches())
|
|
||||||
return new URL("https://e621.net/post/index/1/"+m.group(2).replace("+","%20"));
|
|
||||||
|
|
||||||
return url;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
@ -3,13 +3,16 @@ package com.rarchives.ripme.utils;
|
|||||||
import java.io.File;
|
import java.io.File;
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.io.InputStream;
|
import java.io.InputStream;
|
||||||
|
import java.io.UnsupportedEncodingException;
|
||||||
import java.lang.reflect.Constructor;
|
import java.lang.reflect.Constructor;
|
||||||
import java.net.URISyntaxException;
|
import java.net.URISyntaxException;
|
||||||
import java.net.URL;
|
import java.net.URL;
|
||||||
import java.net.URLDecoder;
|
import java.net.URLDecoder;
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.Enumeration;
|
import java.util.Enumeration;
|
||||||
|
import java.util.HashMap;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
import java.util.Map;
|
||||||
import java.util.jar.JarEntry;
|
import java.util.jar.JarEntry;
|
||||||
import java.util.jar.JarFile;
|
import java.util.jar.JarFile;
|
||||||
|
|
||||||
@ -387,4 +390,70 @@ public class Utils {
|
|||||||
}
|
}
|
||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Parses an URL query
|
||||||
|
*
|
||||||
|
* @param query
|
||||||
|
* The query part of an URL
|
||||||
|
* @return The map of all query parameters
|
||||||
|
*/
|
||||||
|
public static Map<String,String> parseUrlQuery(String query) {
|
||||||
|
Map<String,String> res = new HashMap<String, String>();
|
||||||
|
|
||||||
|
if (query.equals(""))
|
||||||
|
return res;
|
||||||
|
|
||||||
|
String[] parts = query.split("&");
|
||||||
|
int pos;
|
||||||
|
|
||||||
|
try {
|
||||||
|
for (String part : parts) {
|
||||||
|
if ((pos = part.indexOf('=')) >= 0)
|
||||||
|
res.put(URLDecoder.decode(part.substring(0, pos), "UTF-8"),
|
||||||
|
URLDecoder.decode(part.substring(pos + 1), "UTF-8"));
|
||||||
|
else
|
||||||
|
res.put(URLDecoder.decode(part, "UTF-8"), "");
|
||||||
|
}
|
||||||
|
} catch (UnsupportedEncodingException e) {
|
||||||
|
// Shouldn't happen since UTF-8 is required to be supported
|
||||||
|
throw new RuntimeException(e);
|
||||||
|
}
|
||||||
|
|
||||||
|
return res;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Parses an URL query and returns the requested parameter's value
|
||||||
|
*
|
||||||
|
* @param query
|
||||||
|
* The query part of an URL
|
||||||
|
* @param key
|
||||||
|
* The key whose value is requested
|
||||||
|
* @return The associated value or null if key wasn't found
|
||||||
|
*/
|
||||||
|
public static String parseUrlQuery(String query, String key) {
|
||||||
|
if (query.equals(""))
|
||||||
|
return null;
|
||||||
|
|
||||||
|
String[] parts = query.split("&");
|
||||||
|
int pos;
|
||||||
|
|
||||||
|
try {
|
||||||
|
for (String part : parts) {
|
||||||
|
if ((pos = part.indexOf('=')) >= 0) {
|
||||||
|
if (URLDecoder.decode(part.substring(0, pos), "UTF-8").equals(key))
|
||||||
|
return URLDecoder.decode(part.substring(pos + 1), "UTF-8");
|
||||||
|
|
||||||
|
} else if (URLDecoder.decode(part, "UTF-8").equals(key)) {
|
||||||
|
return "";
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} catch (UnsupportedEncodingException e) {
|
||||||
|
// Shouldn't happen since UTF-8 is required to be supported
|
||||||
|
throw new RuntimeException(e);
|
||||||
|
}
|
||||||
|
|
||||||
|
return null;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
Loading…
Reference in New Issue
Block a user