Merge pull request #54 from lacqui/no_retry_404

Added ability to skip retries on 404 errors.
This commit is contained in:
4pr0n 2014-06-04 20:26:26 -07:00
commit 2e7e0ec7bb
3 changed files with 15 additions and 1 deletions

View File

@ -57,6 +57,9 @@ public class App {
if (cl.hasOption('t')) {
Utils.setConfigInteger("threads.size", Integer.parseInt(cl.getOptionValue('t')));
}
if (cl.hasOption('4')) {
Utils.setConfigBoolean("errors.skip404", true);
}
if (cl.hasOption('r')) {
// Re-rip all via command-line
List<String> history = Utils.getConfigList("download.history");
@ -111,6 +114,7 @@ public class App {
opts.addOption("t", "threads", true, "Number of download threads per rip");
opts.addOption("w", "overwrite", false, "Overwrite existing files");
opts.addOption("r", "rerip", false, "Re-rip all ripped albums");
opts.addOption("4", "skip404", false, "Don't retry after a 404 (not found) error");
return opts;
}

View File

@ -10,6 +10,7 @@ import java.util.Map;
import org.apache.log4j.Logger;
import org.jsoup.Connection.Response;
import org.jsoup.Jsoup;
import org.jsoup.HttpStatusException;
import com.rarchives.ripme.ui.RipStatusMessage.STATUS;
import com.rarchives.ripme.utils.Utils;
@ -105,6 +106,12 @@ public class DownloadFileThread extends Thread {
out.write(response.bodyAsBytes());
out.close();
break; // Download successful: break out of infinite loop
} catch (HttpStatusException hse) {
logger.error("[!] HTTP status " + hse.getStatusCode() + " while downloading from " + url);
observer.downloadErrored(url, "HTTP status code " + hse.getStatusCode() + " while downloading " + url.toExternalForm());
if (hse.getStatusCode() == 404 && Utils.getConfigBoolean("errors.skip404", false)) {
return;
}
} catch (IOException e) {
logger.error("[!] Exception while downloading file: " + url + " - " + e.getMessage(), e);
}
@ -118,4 +125,4 @@ public class DownloadFileThread extends Thread {
logger.info("[+] Saved " + url + " as " + this.prettySaveAs);
}
}
}

View File

@ -14,6 +14,9 @@ download.timeout = 60000
# Maximum size of downloaded files in bytes (required)
download.max_size = 104857600
# Don't retry on 404 errors
error.skip404 = true
# API creds
twitter.auth = VW9Ybjdjb1pkd2J0U3kwTUh2VXVnOm9GTzVQVzNqM29LQU1xVGhnS3pFZzhKbGVqbXU0c2lHQ3JrUFNNZm8=
tumblr.auth = v5kUqGQXUtmF7K0itri1DGtgTs0VQpbSEbh1jxYgj9d2Sq18F8