diff --git a/src/main/java/com/rarchives/ripme/ripper/DownloadFileThread.java b/src/main/java/com/rarchives/ripme/ripper/DownloadFileThread.java index 85a8f64d..2f8a1503 100644 --- a/src/main/java/com/rarchives/ripme/ripper/DownloadFileThread.java +++ b/src/main/java/com/rarchives/ripme/ripper/DownloadFileThread.java @@ -214,6 +214,7 @@ class DownloadFileThread extends Thread { } byte[] data = new byte[1024 * 256]; int bytesRead; + boolean shouldSkipFileDownload = huc.getContentLength() / 10000000 >= 10; while ( (bytesRead = bis.read(data)) != -1) { try { observer.stopCheck(); @@ -228,7 +229,7 @@ class DownloadFileThread extends Thread { observer.sendUpdate(STATUS.COMPLETED_BYTES, bytesDownloaded); } // If this is a test and we're downloading a large file - if (AbstractRipper.isThisATest() && bytesTotal / 10000000 >= 10) { + if (AbstractRipper.isThisATest() && shouldSkipFileDownload) { logger.debug("Not downloading whole file because it is over 10mb and this is a test"); bis.close(); fos.close(); diff --git a/src/main/java/com/rarchives/ripme/ripper/rippers/XhamsterRipper.java b/src/main/java/com/rarchives/ripme/ripper/rippers/XhamsterRipper.java index edc0f39e..b844b4f5 100644 --- a/src/main/java/com/rarchives/ripme/ripper/rippers/XhamsterRipper.java +++ b/src/main/java/com/rarchives/ripme/ripper/rippers/XhamsterRipper.java @@ -26,6 +26,12 @@ public class XhamsterRipper extends AbstractHTMLRipper { super(url); } + private int index = 1; + + @Override public boolean hasASAPRipping() { + return true; + } + @Override public String getHost() { return "xhamster"; @@ -144,24 +150,21 @@ public class XhamsterRipper extends AbstractHTMLRipper { @Override public List getURLsFromPage(Document doc) { + LOGGER.debug("Checking for urls"); List result = new ArrayList<>(); if (!isVideoUrl(url)) { - for (Element thumb : doc.select("div.picture_view > div.pictures_block > div.items > div.item-container > a > div.thumb_container > div.img > img")) { - String image = thumb.attr("src"); - // replace thumbnail urls with the urls to the full sized images - image = image.replaceAll( - "https://upt.xhcdn\\.", - "http://up.xhamster."); - image = image.replaceAll("ept\\.xhcdn", "ep.xhamster"); - image = image.replaceAll( - "_160\\.", - "_1000."); - // Xhamster has bad cert management and uses invalid certs for some cdns, so we change all our requests to http - image = image.replaceAll("https", "http"); - result.add(image); - } + for (Element page : doc.select("div.items > div.item-container > a.item")) { + String pageWithImageUrl = page.attr("href"); + try { + String image = Http.url(new URL(pageWithImageUrl)).get().select("div.picture_container > a > img").attr("src"); + downloadFile(image); + } catch (IOException e) { + LOGGER.error("Was unable to load page " + pageWithImageUrl); + } + } } else { - result.add(doc.select("div.player-container > a").attr("href")); + String imgUrl = doc.select("div.player-container > a").attr("href"); + downloadFile(imgUrl); } return result; } @@ -170,6 +173,15 @@ public class XhamsterRipper extends AbstractHTMLRipper { public void downloadURL(URL url, int index) { addURLToDownload(url, getPrefix(index)); } + + private void downloadFile(String url) { + try { + addURLToDownload(new URL(url), getPrefix(index)); + index = index + 1; + } catch (MalformedURLException e) { + LOGGER.error("The url \"" + url + "\" is malformed"); + } + } @Override public String getAlbumTitle(URL url) throws MalformedURLException { diff --git a/src/test/java/com/rarchives/ripme/tst/ripper/rippers/XhamsterRipperTest.java b/src/test/java/com/rarchives/ripme/tst/ripper/rippers/XhamsterRipperTest.java index 7847a36b..3ccccb49 100644 --- a/src/test/java/com/rarchives/ripme/tst/ripper/rippers/XhamsterRipperTest.java +++ b/src/test/java/com/rarchives/ripme/tst/ripper/rippers/XhamsterRipperTest.java @@ -35,7 +35,7 @@ public class XhamsterRipperTest extends RippersTest { } public void testGetNextPage() throws IOException { - XhamsterRipper ripper = new XhamsterRipper(new URL("https://pt.xhamster.com/photos/gallery/silvana-7105696")); + XhamsterRipper ripper = new XhamsterRipper(new URL("https://pt.xhamster.com/photos/gallery/mega-compil-6-10728626")); Document doc = ripper.getFirstPage(); try { ripper.getNextPage(doc);