fixing rip completion check, various log lines

This commit is contained in:
4pr0n 2014-03-09 17:12:10 -07:00
parent c288afdd0a
commit 8e1dc5789b
10 changed files with 29 additions and 26 deletions

View File

@ -151,6 +151,7 @@ public abstract class AbstractRipper
protected void waitForThreads() {
completed = false;
threadPool.waitForThreads();
checkIfComplete();
}
/**
@ -223,22 +224,24 @@ public abstract class AbstractRipper
itemsErrored.put(url, message);
observer.update(this, new RipStatusMessage(STATUS.DOWNLOAD_WARN, url + " : " + message));
observer.notifyAll();
checkIfComplete();
}
checkIfComplete();
}
/**
* Notifies observers and updates state if all files have been ripped.
*/
private void checkIfComplete() {
if (!completed && itemsPending.size() == 0) {
completed = true;
logger.info(" Rip completed!");
observer.update(this,
new RipStatusMessage(
STATUS.RIP_COMPLETE,
workingDir));
observer.notifyAll();
synchronized (observer) {
if (!completed && itemsPending.size() == 0) {
completed = true;
logger.info(" Rip completed!");
observer.update(this,
new RipStatusMessage(
STATUS.RIP_COMPLETE,
workingDir));
observer.notifyAll();
}
}
}

View File

@ -33,7 +33,7 @@ public class DownloadThreadPool {
public void addThread(Thread t) {
threadPool.execute(t);
}
public void waitForThreads() {
threadPool.shutdown();
try {

View File

@ -60,7 +60,7 @@ public class ImagefapRipper extends AbstractRipper {
@Override
public void rip() throws IOException {
int index = 0;
logger.info("[ ] Retrieving " + this.url.toExternalForm());
logger.info(" Retrieving " + this.url.toExternalForm());
Document doc = Jsoup.connect(this.url.toExternalForm()).get();
for (Element thumb : doc.select("#gallery img")) {
if (!thumb.hasAttr("src") || !thumb.hasAttr("width")) {

View File

@ -63,7 +63,7 @@ public class InstagramRipper extends AbstractRipper {
}
private String getUserID(URL url) throws IOException {
logger.info(" Retrieving " + url);
logger.info(" Retrieving " + url);
Document doc = Jsoup.connect(this.url.toExternalForm()).get();
for (Element element : doc.select("input[id=user_public]")) {
return element.attr("value");

View File

@ -58,7 +58,7 @@ public class MotherlessRipper extends AbstractRipper {
@Override
public void rip() throws IOException {
int index = 0;
logger.info("[ ] Retrieving " + this.url.toExternalForm());
logger.info(" Retrieving " + this.url.toExternalForm());
Document doc = Jsoup.connect(this.url.toExternalForm())
.userAgent(USER_AGENT)
.get();

View File

@ -41,7 +41,7 @@ public class SeeniveRipper extends AbstractRipper {
@Override
public void rip() throws IOException {
String baseURL = this.url.toExternalForm();
logger.info("[ ] Retrieving " + baseURL);
logger.info(" Retrieving " + baseURL);
Document doc = Jsoup.connect(baseURL)
.header("Referer", baseURL)
.userAgent(USER_AGENT)

View File

@ -62,20 +62,20 @@ public class TumblrRipper extends AbstractRipper {
offset = 0;
while (true) {
String apiURL = getTumblrApiURL(mediaType, offset);
logger.info(" Retrieving " + apiURL);
logger.info(" Retrieving " + apiURL);
Document doc = Jsoup.connect(apiURL)
.ignoreContentType(true)
.header("User-agent", USER_AGENT)
.get();
String jsonString = doc.body().html().replaceAll(""", "\"");
if (!handleJSON(jsonString)) {
// Returns false if an error occurs and we should stop.
break;
}
try {
Thread.sleep(1000);
} catch (InterruptedException e) {
logger.error("[!] Exception while waiting to load next album:", e);
logger.error("[!] Interrupted while waiting to load next album:", e);
break;
}
String jsonString = doc.body().html().replaceAll(""", "\"");
if (!handleJSON(jsonString)) {
// Returns false if an error occurs and we should stop.
break;
}
offset += 20;
@ -96,7 +96,7 @@ public class TumblrRipper extends AbstractRipper {
posts = json.getJSONObject("response").getJSONArray("posts");
if (posts.length() == 0) {
logger.info(" Zero posts returned. Dropping out.");
logger.info(" Zero posts returned.");
return false;
}

View File

@ -144,7 +144,7 @@ public class TwitterRipper extends AbstractRipper {
private List<JSONObject> getTweets(String url) throws IOException {
List<JSONObject> tweets = new ArrayList<JSONObject>();
logger.info(" Retrieving " + url);
logger.info(" Retrieving " + url);
Document doc = Jsoup.connect(url)
.ignoreContentType(true)
.header("Authorization", "Bearer " + accessToken)

View File

@ -41,7 +41,7 @@ public class VineboxRipper extends AbstractRipper {
while (true) {
page++;
String urlPaged = this.url.toExternalForm() + "?page=" + page;
logger.info("[ ] Retrieving " + urlPaged);
logger.info(" Retrieving " + urlPaged);
try {
doc = Jsoup.connect(urlPaged).get();
} catch (HttpStatusException e) {

View File

@ -15,8 +15,8 @@ public class TumblrRipperTest extends RippersTest {
}
List<URL> contentURLs = new ArrayList<URL>();
contentURLs.add(new URL("http://wrouinr.tumblr.com/archive"));
contentURLs.add(new URL("http://topinstagirls.tumblr.com/tagged/berlinskaya"));
contentURLs.add(new URL("http://fittingroomgirls.tumblr.com/post/78268776776"));
//contentURLs.add(new URL("http://topinstagirls.tumblr.com/tagged/berlinskaya"));
//contentURLs.add(new URL("http://fittingroomgirls.tumblr.com/post/78268776776"));
for (URL url : contentURLs) {
try {
TumblrRipper ripper = new TumblrRipper(url);