Merge pull request #986 from cyian-1756/xhamsterFixes
Fixed xhamster ripper
This commit is contained in:
commit
89680cf48d
@ -214,6 +214,7 @@ class DownloadFileThread extends Thread {
|
|||||||
}
|
}
|
||||||
byte[] data = new byte[1024 * 256];
|
byte[] data = new byte[1024 * 256];
|
||||||
int bytesRead;
|
int bytesRead;
|
||||||
|
boolean shouldSkipFileDownload = huc.getContentLength() / 10000000 >= 10;
|
||||||
while ( (bytesRead = bis.read(data)) != -1) {
|
while ( (bytesRead = bis.read(data)) != -1) {
|
||||||
try {
|
try {
|
||||||
observer.stopCheck();
|
observer.stopCheck();
|
||||||
@ -228,7 +229,7 @@ class DownloadFileThread extends Thread {
|
|||||||
observer.sendUpdate(STATUS.COMPLETED_BYTES, bytesDownloaded);
|
observer.sendUpdate(STATUS.COMPLETED_BYTES, bytesDownloaded);
|
||||||
}
|
}
|
||||||
// If this is a test and we're downloading a large file
|
// If this is a test and we're downloading a large file
|
||||||
if (AbstractRipper.isThisATest() && bytesTotal / 10000000 >= 10) {
|
if (AbstractRipper.isThisATest() && shouldSkipFileDownload) {
|
||||||
logger.debug("Not downloading whole file because it is over 10mb and this is a test");
|
logger.debug("Not downloading whole file because it is over 10mb and this is a test");
|
||||||
bis.close();
|
bis.close();
|
||||||
fos.close();
|
fos.close();
|
||||||
|
@ -26,6 +26,12 @@ public class XhamsterRipper extends AbstractHTMLRipper {
|
|||||||
super(url);
|
super(url);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private int index = 1;
|
||||||
|
|
||||||
|
@Override public boolean hasASAPRipping() {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public String getHost() {
|
public String getHost() {
|
||||||
return "xhamster";
|
return "xhamster";
|
||||||
@ -144,24 +150,21 @@ public class XhamsterRipper extends AbstractHTMLRipper {
|
|||||||
|
|
||||||
@Override
|
@Override
|
||||||
public List<String> getURLsFromPage(Document doc) {
|
public List<String> getURLsFromPage(Document doc) {
|
||||||
|
LOGGER.debug("Checking for urls");
|
||||||
List<String> result = new ArrayList<>();
|
List<String> result = new ArrayList<>();
|
||||||
if (!isVideoUrl(url)) {
|
if (!isVideoUrl(url)) {
|
||||||
for (Element thumb : doc.select("div.picture_view > div.pictures_block > div.items > div.item-container > a > div.thumb_container > div.img > img")) {
|
for (Element page : doc.select("div.items > div.item-container > a.item")) {
|
||||||
String image = thumb.attr("src");
|
String pageWithImageUrl = page.attr("href");
|
||||||
// replace thumbnail urls with the urls to the full sized images
|
try {
|
||||||
image = image.replaceAll(
|
String image = Http.url(new URL(pageWithImageUrl)).get().select("div.picture_container > a > img").attr("src");
|
||||||
"https://upt.xhcdn\\.",
|
downloadFile(image);
|
||||||
"http://up.xhamster.");
|
} catch (IOException e) {
|
||||||
image = image.replaceAll("ept\\.xhcdn", "ep.xhamster");
|
LOGGER.error("Was unable to load page " + pageWithImageUrl);
|
||||||
image = image.replaceAll(
|
}
|
||||||
"_160\\.",
|
|
||||||
"_1000.");
|
|
||||||
// Xhamster has bad cert management and uses invalid certs for some cdns, so we change all our requests to http
|
|
||||||
image = image.replaceAll("https", "http");
|
|
||||||
result.add(image);
|
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
result.add(doc.select("div.player-container > a").attr("href"));
|
String imgUrl = doc.select("div.player-container > a").attr("href");
|
||||||
|
downloadFile(imgUrl);
|
||||||
}
|
}
|
||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
@ -171,6 +174,15 @@ public class XhamsterRipper extends AbstractHTMLRipper {
|
|||||||
addURLToDownload(url, getPrefix(index));
|
addURLToDownload(url, getPrefix(index));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private void downloadFile(String url) {
|
||||||
|
try {
|
||||||
|
addURLToDownload(new URL(url), getPrefix(index));
|
||||||
|
index = index + 1;
|
||||||
|
} catch (MalformedURLException e) {
|
||||||
|
LOGGER.error("The url \"" + url + "\" is malformed");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public String getAlbumTitle(URL url) throws MalformedURLException {
|
public String getAlbumTitle(URL url) throws MalformedURLException {
|
||||||
try {
|
try {
|
||||||
|
@ -35,7 +35,7 @@ public class XhamsterRipperTest extends RippersTest {
|
|||||||
}
|
}
|
||||||
|
|
||||||
public void testGetNextPage() throws IOException {
|
public void testGetNextPage() throws IOException {
|
||||||
XhamsterRipper ripper = new XhamsterRipper(new URL("https://pt.xhamster.com/photos/gallery/silvana-7105696"));
|
XhamsterRipper ripper = new XhamsterRipper(new URL("https://pt.xhamster.com/photos/gallery/mega-compil-6-10728626"));
|
||||||
Document doc = ripper.getFirstPage();
|
Document doc = ripper.getFirstPage();
|
||||||
try {
|
try {
|
||||||
ripper.getNextPage(doc);
|
ripper.getNextPage(doc);
|
||||||
|
Loading…
Reference in New Issue
Block a user