Skip to content

Commit

Permalink
Merge pull request RipMeApp#1 from Isaaku/issues/location_fix
Browse files Browse the repository at this point in the history
Issues/location fix
  • Loading branch information
Isaaku authored Aug 29, 2019
2 parents 7c1858d + 5545677 commit 34386de
Show file tree
Hide file tree
Showing 82 changed files with 816 additions and 499 deletions.
12 changes: 9 additions & 3 deletions .travis.yml
Original file line number Diff line number Diff line change
@@ -1,6 +1,12 @@
language: java
jdk:
- oraclejdk8
- openjdk8

matrix:
include:
- jdk: openjdk9
before_install:
- rm "${JAVA_HOME}/lib/security/cacerts"
- ln -s /etc/ssl/certs/java/cacerts "${JAVA_HOME}/lib/security/cacerts"
- jdk: openjdk8

after_success:
- mvn clean test jacoco:report coveralls:report
2 changes: 1 addition & 1 deletion pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
<groupId>com.rarchives.ripme</groupId>
<artifactId>ripme</artifactId>
<packaging>jar</packaging>
<version>1.7.86</version>
<version>1.7.87</version>
<name>ripme</name>
<url>http://rip.rarchives.com</url>
<properties>
Expand Down
7 changes: 4 additions & 3 deletions ripme.json
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
{
"latestVersion": "1.7.86",
"currentHash": "d78f7dfaa8ef55575a8485cdf93bbd09e3ea9a3bd224e84e970bcdd0a51d5305",
"currentHash": "52dfb707d6247f44949c0d97b19c7815dc848b26837b98ae561c0dea20993a12",
"changeList": [
"1.7.87: Added ripper for allporncomic.com; Fixed Xhamster ripper; Added support xhamster2.com and xhamster.desi; Fixes for gfycat thumbs urls",
"1.7.86: Added Meituri Ripper; fixed -u flag; Fixed pornhub ripper; Xhamster ripper can now queue users videos",
"1.7.85: Fixed instagram ripper; Flickr ripper now downloads largest image",
"1.7.84: Fixed instagram ripper; xhamster ripper now accepts urls with page numbers; Fixed Deviantart Ripper",
Expand Down Expand Up @@ -258,5 +258,6 @@
"1.0.4: Fixed spaces-in-directory bug",
"1.0.3: Added VK.com ripper",
"1.0.1: Added auto-update functionality"
]
],
"latestVersion": "1.7.87"
}
118 changes: 70 additions & 48 deletions src/main/java/com/rarchives/ripme/ripper/DownloadFileThread.java
Original file line number Diff line number Diff line change
Expand Up @@ -20,17 +20,14 @@
import com.rarchives.ripme.utils.Utils;

/**
* Thread for downloading files.
* Includes retry logic, observer notifications, and other goodies.
* Thread for downloading files. Includes retry logic, observer notifications,
* and other goodies.
*/
class DownloadFileThread extends Thread {

private ResourceBundle rb = MainWindow.rb;

private static final Logger logger = Logger.getLogger(DownloadFileThread.class);

private String referrer = "";
private Map<String,String> cookies = new HashMap<>();
private Map<String, String> cookies = new HashMap<>();

private URL url;
private File saveAs;
Expand All @@ -55,18 +52,19 @@ public DownloadFileThread(URL url, File saveAs, AbstractRipper observer, Boolean
public void setReferrer(String referrer) {
this.referrer = referrer;
}
public void setCookies(Map<String,String> cookies) {

public void setCookies(Map<String, String> cookies) {
this.cookies = cookies;
}


/**
* Attempts to download the file. Retries as needed.
* Notifies observers upon completion/error/warn.
* Attempts to download the file. Retries as needed. Notifies observers upon
* completion/error/warn.
*/
public void run() {
// First thing we make sure the file name doesn't have any illegal chars in it
saveAs = new File(saveAs.getParentFile().getAbsolutePath() + File.separator + Utils.sanitizeSaveAs(saveAs.getName()));
saveAs = new File(
saveAs.getParentFile().getAbsolutePath() + File.separator + Utils.sanitizeSaveAs(saveAs.getName()));
long fileSize = 0;
int bytesTotal = 0;
int bytesDownloaded = 0;
Expand All @@ -76,16 +74,18 @@ public void run() {
try {
observer.stopCheck();
} catch (IOException e) {
observer.downloadErrored(url, rb.getString("download.interrupted"));
observer.downloadErrored(url, Utils.getLocalizedString("download.interrupted"));
return;
}
if (saveAs.exists() && !observer.tryResumeDownload() && !getFileExtFromMIME ||
Utils.fuzzyExists(new File(saveAs.getParent()), saveAs.getName()) && getFileExtFromMIME && !observer.tryResumeDownload()) {
if (saveAs.exists() && !observer.tryResumeDownload() && !getFileExtFromMIME
|| Utils.fuzzyExists(new File(saveAs.getParent()), saveAs.getName()) && getFileExtFromMIME
&& !observer.tryResumeDownload()) {
if (Utils.getConfigBoolean("file.overwrite", false)) {
logger.info("[!] " + rb.getString("deleting.existing.file") + prettySaveAs);
logger.info("[!] " + Utils.getLocalizedString("deleting.existing.file") + prettySaveAs);
saveAs.delete();
} else {
logger.info("[!] " + rb.getString("skipping") + url + " -- " + rb.getString("file.already.exists") + ": " + prettySaveAs);
logger.info("[!] " + Utils.getLocalizedString("skipping") + url + " -- "
+ Utils.getLocalizedString("file.already.exists") + ": " + prettySaveAs);
observer.downloadExists(url, saveAs);
return;
}
Expand All @@ -95,7 +95,8 @@ public void run() {
int tries = 0; // Number of attempts to download
do {
tries += 1;
InputStream bis = null; OutputStream fos = null;
InputStream bis = null;
OutputStream fos = null;
try {
logger.info(" Downloading file: " + urlToDownload + (tries > 0 ? " Retry #" + tries : ""));
observer.sendUpdate(STATUS.DOWNLOAD_STARTED, url.toExternalForm());
Expand All @@ -104,16 +105,16 @@ public void run() {
HttpURLConnection huc;
if (this.url.toString().startsWith("https")) {
huc = (HttpsURLConnection) urlToDownload.openConnection();
}
else {
} else {
huc = (HttpURLConnection) urlToDownload.openConnection();
}
huc.setInstanceFollowRedirects(true);
// It is important to set both ConnectTimeout and ReadTimeout. If you don't then ripme will wait forever
// It is important to set both ConnectTimeout and ReadTimeout. If you don't then
// ripme will wait forever
// for the server to send data after connecting.
huc.setConnectTimeout(TIMEOUT);
huc.setReadTimeout(TIMEOUT);
huc.setRequestProperty("accept", "*/*");
huc.setRequestProperty("accept", "*/*");
if (!referrer.equals("")) {
huc.setRequestProperty("Referer", referrer); // Sic
}
Expand All @@ -131,17 +132,18 @@ public void run() {
huc.setRequestProperty("Range", "bytes=" + fileSize + "-");
}
}
logger.debug(rb.getString("request.properties") + ": " + huc.getRequestProperties());
logger.debug(Utils.getLocalizedString("request.properties") + ": " + huc.getRequestProperties());
huc.connect();

int statusCode = huc.getResponseCode();
logger.debug("Status code: " + statusCode);
// If the server doesn't allow resuming downloads error out
if (statusCode != 206 && observer.tryResumeDownload() && saveAs.exists()) {
// TODO find a better way to handle servers that don't support resuming downloads then just erroring out
throw new IOException(rb.getString("server.doesnt.support.resuming.downloads"));
// TODO find a better way to handle servers that don't support resuming
// downloads then just erroring out
throw new IOException(Utils.getLocalizedString("server.doesnt.support.resuming.downloads"));
}
if (statusCode / 100 == 3) { // 3xx Redirect
if (statusCode / 100 == 3) { // 3xx Redirect
if (!redirected) {
// Don't increment retries on the first redirect
tries--;
Expand All @@ -153,14 +155,17 @@ public void run() {
throw new IOException("Redirect status code " + statusCode + " - redirect to " + location);
}
if (statusCode / 100 == 4) { // 4xx errors
logger.error("[!] " + rb.getString("nonretriable.status.code") + " " + statusCode + " while downloading from " + url);
observer.downloadErrored(url, rb.getString("nonretriable.status.code") + " " + statusCode + " while downloading " + url.toExternalForm());
logger.error("[!] " + Utils.getLocalizedString("nonretriable.status.code") + " " + statusCode
+ " while downloading from " + url);
observer.downloadErrored(url, Utils.getLocalizedString("nonretriable.status.code") + " "
+ statusCode + " while downloading " + url.toExternalForm());
return; // Not retriable, drop out.
}
if (statusCode / 100 == 5) { // 5xx errors
observer.downloadErrored(url, rb.getString("retriable.status.code") + " " + statusCode + " while downloading " + url.toExternalForm());
observer.downloadErrored(url, Utils.getLocalizedString("retriable.status.code") + " " + statusCode
+ " while downloading " + url.toExternalForm());
// Throw exception so download can be retried
throw new IOException(rb.getString("retriable.status.code") + " " + statusCode);
throw new IOException(Utils.getLocalizedString("retriable.status.code") + " " + statusCode);
}
if (huc.getContentLength() == 503 && urlToDownload.getHost().endsWith("imgur.com")) {
// Imgur image with 503 bytes is "404"
Expand All @@ -169,7 +174,8 @@ public void run() {
return;
}

// If the ripper is using the bytes progress bar set bytesTotal to huc.getContentLength()
// If the ripper is using the bytes progress bar set bytesTotal to
// huc.getContentLength()
if (observer.useByteProgessBar()) {
bytesTotal = huc.getContentLength();
observer.setBytesTotal(bytesTotal);
Expand All @@ -190,14 +196,15 @@ public void run() {
logger.error("Was unable to get content type from stream");
// Try to get the file type from the magic number
byte[] magicBytes = new byte[8];
bis.read(magicBytes,0, 5);
bis.read(magicBytes, 0, 5);
bis.reset();
fileExt = Utils.getEXTFromMagic(magicBytes);
if (fileExt != null) {
saveAs = new File(saveAs.toString() + "." + fileExt);
} else {
logger.error(rb.getString("was.unable.to.get.content.type.using.magic.number"));
logger.error(rb.getString("magic.number.was") + ": " + Arrays.toString(magicBytes));
logger.error(Utils.getLocalizedString("was.unable.to.get.content.type.using.magic.number"));
logger.error(
Utils.getLocalizedString("magic.number.was") + ": " + Arrays.toString(magicBytes));
}
}
}
Expand All @@ -210,21 +217,26 @@ public void run() {
} catch (FileNotFoundException e) {
// We do this because some filesystems have a max name length
if (e.getMessage().contains("File name too long")) {
logger.error("The filename " + saveAs.getName() + " is to long to be saved on this file system.");
logger.error("The filename " + saveAs.getName()
+ " is to long to be saved on this file system.");
logger.info("Shortening filename");
String[] saveAsSplit = saveAs.getName().split("\\.");
// Get the file extension so when we shorten the file name we don't cut off the file extension
// Get the file extension so when we shorten the file name we don't cut off the
// file extension
String fileExt = saveAsSplit[saveAsSplit.length - 1];
// The max limit for filenames on Linux with Ext3/4 is 255 bytes
logger.info(saveAs.getName().substring(0, 254 - fileExt.length()) + fileExt);
String filename = saveAs.getName().substring(0, 254 - fileExt.length()) + "." + fileExt;
// We can't just use the new file name as the saveAs because the file name doesn't include the
// We can't just use the new file name as the saveAs because the file name
// doesn't include the
// users save path, so we get the user save path from the old saveAs
saveAs = new File(saveAs.getParentFile().getAbsolutePath() + File.separator + filename);
fos = new FileOutputStream(saveAs);
} else if (saveAs.getAbsolutePath().length() > 259 && Utils.isWindows()) {
// This if is for when the file path has gone above 260 chars which windows does not allow
fos = new FileOutputStream(Utils.shortenSaveAsWindows(saveAs.getParentFile().getPath(), saveAs.getName()));
// This if is for when the file path has gone above 260 chars which windows does
// not allow
fos = new FileOutputStream(
Utils.shortenSaveAsWindows(saveAs.getParentFile().getPath(), saveAs.getName()));
}
}
}
Expand All @@ -239,7 +251,7 @@ public void run() {
try {
observer.stopCheck();
} catch (IOException e) {
observer.downloadErrored(url, rb.getString("download.interrupted"));
observer.downloadErrored(url, Utils.getLocalizedString("download.interrupted"));
return;
}
fos.write(data, 0, bytesRead);
Expand All @@ -259,27 +271,37 @@ public void run() {
// Download failed, break out of loop
break;
} catch (HttpStatusException hse) {
logger.debug(rb.getString("http.status.exception"), hse);
logger.debug(Utils.getLocalizedString("http.status.exception"), hse);
logger.error("[!] HTTP status " + hse.getStatusCode() + " while downloading from " + urlToDownload);
if (hse.getStatusCode() == 404 && Utils.getConfigBoolean("errors.skip404", false)) {
observer.downloadErrored(url, "HTTP status code " + hse.getStatusCode() + " while downloading " + url.toExternalForm());
observer.downloadErrored(url,
"HTTP status code " + hse.getStatusCode() + " while downloading " + url.toExternalForm());
return;
}
} catch (IOException e) {
logger.debug("IOException", e);
logger.error("[!] " + rb.getString("exception.while.downloading.file") + ": " + url + " - " + e.getMessage());
logger.error("[!] " + Utils.getLocalizedString("exception.while.downloading.file") + ": " + url + " - "
+ e.getMessage());
} finally {
// Close any open streams
try {
if (bis != null) { bis.close(); }
} catch (IOException e) { }
if (bis != null) {
bis.close();
}
} catch (IOException e) {
}
try {
if (fos != null) { fos.close(); }
} catch (IOException e) { }
if (fos != null) {
fos.close();
}
} catch (IOException e) {
}
}
if (tries > this.retries) {
logger.error("[!] " + rb.getString ("exceeded.maximum.retries") + " (" + this.retries + ") for URL " + url);
observer.downloadErrored(url, rb.getString("failed.to.download") + " " + url.toExternalForm());
logger.error("[!] " + Utils.getLocalizedString("exceeded.maximum.retries") + " (" + this.retries
+ ") for URL " + url);
observer.downloadErrored(url,
Utils.getLocalizedString("failed.to.download") + " " + url.toExternalForm());
return;
}
} while (true);
Expand Down
19 changes: 10 additions & 9 deletions src/main/java/com/rarchives/ripme/ripper/rippers/GfycatRipper.java
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@ public class GfycatRipper extends AbstractHTMLRipper {


public GfycatRipper(URL url) throws IOException {
super(url);
super(new URL(url.toExternalForm().split("-")[0].replace("thumbs.", "")));
}

@Override
Expand Down Expand Up @@ -76,23 +76,24 @@ public void downloadURL(URL url, int index) {

@Override
public String getGID(URL url) throws MalformedURLException {
Pattern p = Pattern.compile("^https?://[wm.]*gfycat\\.com/@?([a-zA-Z0-9]+).*$");
Pattern p = Pattern.compile("^https?://(thumbs\\.|[wm\\.]*)gfycat\\.com/@?([a-zA-Z0-9]+).*$");
Matcher m = p.matcher(url.toExternalForm());
if (m.matches()) {
return m.group(1);
}


if (m.matches())
return m.group(2);
throw new MalformedURLException(
"Expected gfycat.com format:"
+ "gfycat.com/id"
"Expected gfycat.com format: "
+ "gfycat.com/id or "
+ "thumbs.gfycat.com/id.gif"
+ " Got: " + url);
}

private String stripHTMLTags(String t) {
t = t.replaceAll("<html>\n" +
" <head></head>\n" +
" <body>", "");
t.replaceAll("</body>\n" +
t = t.replaceAll("</body>\n" +
"</html>", "");
t = t.replaceAll("\n", "");
t = t.replaceAll("=\"\"", "");
Expand Down
Loading

0 comments on commit 34386de

Please sign in to comment.