_DownloadFile()
+eh_DownloadFile() {
+ local pUrl="$1"
+ local pFileCnt="$2"
+ local pUrlMod="$pUrl"
+ if grep -Eqi '\.onion' <<< "$pUrlMod" && grep -Eqi 'https://' <<< "$pUrlMod" ; then
+ echo -e "${PINK}| Reverting .onion address to http...${NC}"
+ pUrlMod="${pUrlMod/https/http}"
+ fi
+ if grep -Eqi '\.onion/file/' <<< "$pUrlMod" ; then
+ echo -e "${PINK}| Switching to download url...${NC}"
+ pUrlMod="${pUrlMod/\.onion\/file\//\.onion\/download\/}"
+ fi
+ echo -e "[${BLUE}ModifiedUrl${NC}]: ${pUrlMod}"
+ direct_DownloadFile "$pUrl" "$pFileCnt" "$pUrlMod"
+}
diff --git a/hosts/fileblade.sh b/hosts/fileblade.sh
old mode 100644
new mode 100755
index 4ddae54..87dd105
--- a/hosts/fileblade.sh
+++ b/hosts/fileblade.sh
@@ -1,6 +1,6 @@
#! Name: isupload.sh
#! Author: kittykat
-#! Version: 2024.12.20
+#! Version: 2024.12.28
#! Desc: Add support for downloading and processing of urls for a new host
#! Usage: Copy this file into the ./${ScriptDir}/hosts/ folder
#!
@@ -136,7 +136,6 @@ fb_FetchFileInfo() {
post_id=$(grep -oPi -m 1 '(?<=input type="hidden" name="id" value=").*(?=">.*$)' <<< "$response")
post_fname=$(grep -oPi -m 1 '(?<=input type="hidden" name="fname" value=").*(?=">.*$)' <<< "$response")
post_referer=$(grep -oPi -m 1 '(?<=input type="hidden" name="referer" value=").*(?=">.*$)' <<< "$response")
- post_action=$(urlencode_literal_grouped_case_urlendingonly "${post_action}")
fi
if [[ -z "$post_action" ]] || [[ -z "$post_op" ]] || [[ -z "$post_id" ]] || [[ -z "$post_fname" ]] ; then
rm -f "${fb_cookie_jar}";
@@ -155,20 +154,18 @@ fb_FetchFileInfo() {
break
fi
done
- echo -e "| Download countdown (10s)…"
- sleep 10s
echo -e "${GREEN}# Fetching download2…${NC}"
for ((i=1; i<=$maxfetchretries; i++)); do
printf " _"
download_url=""
CLEANSTRING=${remote_url//[^a-zA-Z0-9]/}
trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; echo ""; rm -f $fb_cookie_jar; tput cnorm; exit" 0 1 2 3 6 15
- form_data="op=$post_op&usr_login=$post_usr_login&id=$post_id&fname=$post_fname&referer=$post_referer&method_free=method_free"
+ form_data="op=$post_op&usr_login=&id=$post_id&fname=$post_fname&referer=&method_free=method_free"
response=$(tor_curl_request --insecure -L -s -X POST \
-b "${fb_cookie_jar}" -c "${fb_cookie_jar}" \
--data "$form_data" "$post_action")
if [ "${DebugAllEnabled}" == "true" ] ; then
- debugHtml "${remote_url##*/}" "fb_post" "post_action: ${post_action}"$'\n'"form_data: ${form_data}"$'\n'"${response}"
+ debugHtml "${remote_url##*/}" "fb_post(1)" "post_action: ${post_action}"$'\n'"form_data: ${form_data}"$'\n'"${response}"
fi
if [[ -z $response ]] ; then
if [ $i == $maxfetchretries ] ; then
@@ -201,14 +198,29 @@ fb_FetchFileInfo() {
failedRetryDownload "${remote_url}" "Pro download only. [Free users not allowed download over 100MB]" ""
return 1
fi
- if grep -Eqi 'Just a moment...' <<< "$response"; then
+ if grep -Eqi 'Your subsequent download will be started in' <<< "$response"; then
if [ $i == $maxfetchretries ] ; then
rm -f "${fb_cookie_jar}";
printf "\\n"
- echo -e "${RED}| Failed to extract download link [3].${NC}"
+ echo -e "${RED}| Subsequent download wait.. [3b]${NC}"
warnAndRetryUnknownError=true
if [ "${finalAttempt}" == "true" ] ; then
- failedRetryDownload "${remote_url}" "" ""
+ failedRetryDownload "${remote_url}" "Subsequent download wait.. [3b]" ""
+ fi
+ return 1
+ else
+ tor_identity="${RANDOM}"
+ continue
+ fi
+ fi
+ if grep -Eqi '' <<< "$response"; then
+ if [ $i == $maxfetchretries ] ; then
+ rm -f "${fb_cookie_jar}";
+ printf "\\n"
+ echo -e "${RED}| Failed to extract download link (Unknown warning encountered) [3c]${NC}"
+ warnAndRetryUnknownError=true
+ if [ "${finalAttempt}" == "true" ] ; then
+ failedRetryDownload "${remote_url}" "Unknown warning encountered in download2 [3c]" ""
fi
return 1
else
@@ -255,7 +267,7 @@ fb_FetchFileInfo() {
-b "${fb_cookie_jar}" -c "${fb_cookie_jar}" \
--data "$form_data" "$post_action")
if [ "${DebugAllEnabled}" == "true" ] ; then
- debugHtml "${remote_url##*/}" "fb_post2" "post_action: ${post_action}"$'\n'"form_data: ${form_data}"$'\n'"${response}"
+ debugHtml "${remote_url##*/}" "fb_post(2)" "post_action: ${post_action}"$'\n'"form_data: ${form_data}"$'\n'"${response}"
fi
if [[ -z $response ]] ; then
if [ $i == $maxfetchretries ] ; then
diff --git a/hosts/gofile.sh b/hosts/gofile.sh
old mode 100644
new mode 100755
index e4c8de8..0326ccd
--- a/hosts/gofile.sh
+++ b/hosts/gofile.sh
@@ -1,6 +1,6 @@
#! Name: gofile.sh
#! Author: kittykat
-#! Version: 2024.09.13
+#! Version: 2025.01.03
#! Desc: Add support for downloading and processing of urls for a new host
#! Usage: Copy this file into the ./${ScriptDir}/hosts/ folder
#!
@@ -197,6 +197,14 @@ gofile_FetchFileInfo() {
else
continue
fi
+ fi
+ if grep -Eqi '"children":\{\}' <<< "$response"; then
+ rm -f "${gofile_cookie_jar}";
+ printf "\\n"
+ echo -e "${RED}| Bulk download is a Premium feature. (No children)${NC}"
+ exitDownloadError=true
+ removedDownload "${remote_url}" "Bulk download is a Premium feature"
+ return 1
fi
if grep -Eqi '"status":"ok"' <<< "$response"; then
download_url=$(grep -oPi '(?<="link":").*?(?=")' <<< "$response")
@@ -225,7 +233,7 @@ gofile_FetchFileInfo() {
return 1
else
continue
- fi
+ fi
fi
else
rm -f "${gofile_cookie_jar}";
diff --git a/hosts/up_axfc.sh b/hosts/up_axfc.sh
old mode 100644
new mode 100755
index 11a2489..0419285
--- a/hosts/up_axfc.sh
+++ b/hosts/up_axfc.sh
@@ -1,6 +1,6 @@
#! Name: up_axfc.sh
#! Author: kittykat
-#! Version: 2024.12.26
+#! Version: 2025.01.02
#! Desc: Add support for uploading files to a new host
#! Info: Files are accessible at https://www.axfc.net/
#! MaxSize: 2GB
@@ -190,14 +190,28 @@ axfc_PostFile() {
if [ "${DebugAllEnabled}" == "true" ] ; then
debugHtml "${filepath##*/}" "${_hostCode}_upload" "post_url: ${PostUrlHost}"$'\n'"${response}"
fi
- if grep -Eqi 'Axfc Uploader -投稿完了.*キーワード付きURL:.*a href="https://www.axfc.net.*(QueryString無しVer)' <<< "${response}" ; then
+ response_ascii=$(echo "$response" | iconv -c -f UTF-8 -t ASCII//TRANSLIT)
+ if [ "${DebugAllEnabled}" == "true" ] ; then
+ debugHtml "${filepath##*/}" "${_hostCode}_upload_ascii" "post_url: ${PostUrlHost}"$'\n'"${response_ascii}"
+ fi
+ if grep -Eqi -m 1 'a href="https://www\.axfc\.net\/u\/.*\?key=1234"' <<< "${response_ascii}" ; then
+ url=$(grep -oPi -m 1 '(?<=a href="https://www\.axfc\.net\/u\/).*?(?=".*$)' <<< "$response_ascii")
+ key=$(grep -oPi -m 1 '(?<=\?key=).*?(?=".*$)' <<< "$response_ascii")
+ filesize=$(GetFileSize "$filepath" "false")
+ downloadLink="https://www.axfc.net/u/${url%%$'\n'*}?key=${key%%$'\n'*}"
+ echo -e "${GREEN}| Upload Success${NC}"
+ echo -e "| Size: ${BLUE}${filesize}${NC} bytes${NC}"
+ echo -e "| Link [1]: ${YELLOW}${downloadLink}${NC}"
+ successUpload "$pline" "${filepath}" "${_hostCode}" "${filesize}" "${downloadLink}" "{$response}"
+ return 0
+ elif grep -Eqi 'Axfc Uploader -投稿完了.*キーワード付きURL:.*a href="https://www.axfc.net.*(QueryString無しVer)' <<< "${response}" ; then
subSearch=$(awk '/Axfc Uploader -投稿完了/,/(QueryString無しVer)/' <<< "$response")
url=$(grep -oPi -m 1 '(?<=キーワード付きURL:\\\\| \ファイルIDのみ.*$)' <<< "$subSearch")
filesize=$(GetFileSize "$filepath" "false")
downloadLink="${url%%$'\n'*}"
echo -e "${GREEN}| Upload Success${NC}"
echo -e "| Size: ${BLUE}${filesize}${NC} bytes${NC}"
- echo -e "| Link: ${YELLOW}${downloadLink}${NC}"
+ echo -e "| Link [2]: ${YELLOW}${downloadLink}${NC}"
successUpload "$pline" "${filepath}" "${_hostCode}" "${filesize}" "${downloadLink}" "{$response}"
return 0
else
diff --git a/mad.sh b/mad.sh
index e77396d..6cf2f4d 100644
--- a/mad.sh
+++ b/mad.sh
@@ -1,6 +1,6 @@
#!/bin/bash
-# Copyright 2023-2024 kittykat (kittykat@morke.org) | (luvherpurr@torbox3uiot6wchz.onion)
+# Copyright 2023-2025 kittykat (kittykat@morke.org) | (luvherpurr@torbox3uiot6wchz.onion)
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -31,9 +31,16 @@
# * klonkerz - feedback and suggestions, url only processing
# * Everyone who provided feedback and helped test.. and those who wish to remain anonymous
-ScriptVersion=2024.12.26
+ScriptVersion=2025.01.04
#=================================================
# Recent Additions
+# 2024.01.03 - [gofile] Detect "Bulk download is a Premium feature" response (no children)
+# 2025.01.02 - [up_axfc] Update PUT response check to handle kanji chars (remove)
+# 2025.01.02 - [dashfile] Add response 'This file reached max downloads limit'. New cookie on captcha fail
+# 2024.12.28 - [dashfile] Update captcha code check
+# 2024.12.28 - [anonfile] Add new download link href response
+# 2024.12.28 - [fileblade] Add additional response handling (subsequent downloads, unknown warnings)
+# 2024.12.28 - [eternalhosting] Update eternal.onion to handle landing page (eternal.onion/file/)
# 2024.12.26 - [up_kouploader / up_axfc / up_torup] Fixed failedRetryUpload (was using download logging)
# * Thanks Belky
# 2024.12.26 - [anonfile / up_anonfile] Add anonfile.de as upload / download host
|