# 2024.01.03 - [gofile] Detect "Bulk download is a Premium feature" response (no children)

# 2025.01.02 - [up_axfc] Update PUT response check to handle kanji chars (remove)
# 2025.01.02 - [dashfile] Add response 'This file reached max downloads limit'. New cookie on captcha fail
# 2024.12.28 - [dashfile] Update captcha code check
# 2024.12.28 - [anonfile] Add new download link href response
# 2024.12.28 - [fileblade] Add additional response handling (subsequent downloads, unknown warnings)
# 2024.12.28 - [eternalhosting] Update eternal.onion to handle landing page (eternal.onion/file/)
This commit is contained in:
kittykat 2025-01-04 05:51:50 +00:00
parent e6804e01e1
commit 30eedaf567
Signed by: kittykat
GPG key ID: E3F1556620F70C3C
10 changed files with 766 additions and 687 deletions

28
hosts/anonfile.sh Normal file → Executable file
View file

@ -1,6 +1,6 @@
#! Name: anonfile.sh
#! Author: kittykat
#! Version: 2024.12.26
#! Version: 2024.12.28
#! Desc: Add support for downloading and processing of urls for a new host
#! Usage: Copy this file into the ./${ScriptDir}/hosts/ folder
#!
@ -137,7 +137,7 @@ anon_FetchFileInfo() {
fi
if grep -Eqi 'input type="hidden" name="id" value="' <<< "$response"; then
printf "\\n"
echo -e "${GREEN}| Captcha found.${NC}"
echo -e "${GREEN}| Post link found.${NC}"
post_op=$(grep -oP '(?<=input type="hidden" name="op" value=").*(?=">)' <<< "$response")
post_id=$(grep -oP '(?<=input type="hidden" name="id" value=").*(?=">)' <<< "$response")
post_fname=$(grep -oP '(?<=input type="hidden" name="fname" value=").*(?=">)' <<< "$response")
@ -327,8 +327,8 @@ anon_FetchFileInfo() {
fi
fi
done
echo -e "| Captcha countdown (10s)…"
sleep 10s
echo -e "| Captcha countdown (5s)…"
sleep 5s
maxfetchretries=1
echo -e "${GREEN}# Fetching download url…${NC}"
for ((i=1; i<=$maxfetchretries; i++)); do
@ -413,7 +413,12 @@ anon_FetchFileInfo() {
fi
if grep -Eqi '<a class="stretched-link" href="https://anonfile.de' <<< "$response"; then
printf "\\n"
echo -e "${GREEN}| Download url found.${NC}"
echo -e "${GREEN}| Download url found [1]${NC}"
download_url=$(grep -oP '(?<=<a class="stretched-link" href=").*?(?=">.*$)' <<< "$response")
download_url=$(urlencode_literal_grouped_case_urlendingonly "$download_url")
elif grep -Eqi '<a class="stretched-link" href="' <<< "$response"; then
printf "\\n"
echo -e "${GREEN}| Download url found [2]${NC}"
download_url=$(grep -oP '(?<=<a class="stretched-link" href=").*?(?=">.*$)' <<< "$response")
download_url=$(urlencode_literal_grouped_case_urlendingonly "$download_url")
fi
@ -531,12 +536,15 @@ anon_FetchFileInfo() {
#! ----------- (3) Fetch File / Download File Function --------------- #
#!
anon_GetFile() {
echo -e "${GREEN}# Downloading…"
echo -e "${GREEN}# Downloading… ${BLUE}(No Resume)${NC}"
echo -e "${YELLOW}| File path:${NC}\t./.inflight/${filename}\n"
fileCnt=$1
retryCnt=$2
finalAttempt=$3
flockDownload="${WorkDir}/.flocks/${filename//[^a-zA-Z0-9\.\_\-]/}.flock"
if [ -f "$file_path" ]; then
rm -f "$file_path"
fi
for ((j=1; j<=$MaxDownloadRetries; j++)); do
pd_presize=0
if [ -f "$file_path" ] ; then
@ -552,12 +560,12 @@ anon_GetFile() {
--speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval \
-b "${anon_cookie_jar}" -c "${anon_cookie_jar}" \
--referer "${fixed_url}" \
"$download_url" --continue-at - --output "$file_path"
"$download_url" --output "$file_path"
else
tor_curl_request --insecure \
-b "${anon_cookie_jar}" -c "${anon_cookie_jar}" \
--referer "${fixed_url}" \
"$download_url" --continue-at - --output "$file_path"
"$download_url" --output "$file_path"
fi
else
if [ "${RateMonitorEnabled}" == "true" ]; then
@ -576,7 +584,7 @@ anon_GetFile() {
-H "Sec-Fetch-User: ?1" \
-b "${anon_cookie_jar}" -c "${anon_cookie_jar}" \
--referer "${fixed_url}" \
"$download_url" --continue-at - --output "$file_path"
"$download_url" --output "$file_path"
else
tor_curl_request --insecure \
-H "User-Agent: $RandomUA" \
@ -592,7 +600,7 @@ anon_GetFile() {
-H "Sec-Fetch-User: ?1" \
-b "${anon_cookie_jar}" -c "${anon_cookie_jar}" \
--referer "${fixed_url}" \
"$download_url" --continue-at - --output "$file_path"
"$download_url" --output "$file_path"
fi
fi
received_file_size=0

53
hosts/dashfile.sh Normal file → Executable file
View file

@ -1,6 +1,6 @@
#! Name: dashfile.sh
#! Author: kittykat
#! Version: 2024.12.25
#! Version: 2025.01.02
#! Desc: Add support for downloading and processing of urls for a new host
#! Usage: Copy this file into the ./${ScriptDir}/hosts/ folder
#!
@ -107,7 +107,7 @@ dash_FetchFileInfo() {
continue
fi
fi
if grep -Eqi 'File Not Found|No such file with this filename|File was deleted|<table id="error_message"' <<< "$response"; then
if grep -Eqi 'No such file with this filename|File was deleted|<table id="error_message"' <<< "$response"; then
rm -f "${dash_cookie_jar}";
printf "\\n"
echo -e "${RED}| The file was not found. It could be deleted or expired.${NC}"
@ -115,6 +115,14 @@ dash_FetchFileInfo() {
removedDownload "${remote_url}"
return 1
fi
if grep -Eqi 'This file reached max downloads limit' <<< "$response"; then
rm -f "${dash_cookie_jar}";
printf "\\n"
echo -e "${RED}| The file has reached max downloads limit${NC}"
exitDownloadError=true
removedDownload "${remote_url}"
return 1
fi
if grep -Eqi 'name="method_free" value="Free Download">' <<< "$response"; then
printf "\\n"
echo -e "${GREEN}| Post link found.${NC}"
@ -194,7 +202,15 @@ dash_FetchFileInfo() {
exitDownloadError=true
removedDownload "${remote_url}"
return 1
fi
fi
if grep -Eqi 'This file reached max downloads limit' <<< "$response"; then
rm -f "${dash_cookie_jar}";
printf "\\n"
echo -e "${RED}| The file has reached max downloads limit${NC}"
exitDownloadError=true
removedDownload "${remote_url}"
return 1
fi
if grep -Eqi 'you have to wait|seconds till next download' <<< "$response"; then
if [ $i == $maxfetchretries ] ; then
rm -f "${dash_cookie_jar}";
@ -220,9 +236,21 @@ dash_FetchFileInfo() {
pval4=$(grep -oP -m 1 '<span style='"'"'position:absolute;padding-left:6[0-9]px;padding-top:[0-9]+px;'"'"'>&#\K.*?(?=;</span>)' <<< "$codeline" )
val1=$((pval1-48)); val2=$((pval2-48)); val3=$((pval3-48)); val4=$((pval4-48))
captcha_code="${val1}${val2}${val3}${val4}"
if grep -Eqi 'name="method_free" value="Free Download">' <<< "$response"; then
if [ -z "$captcha_code" ] || grep -Eqi '-' <<< "$captcha_code"; then
rm -f "${dash_cookie_jar}";
printf "\\n"
echo -e "${RED}| Bad captcha code [2]${NC}"
warnAndRetryUnknownError=true
if [ "${finalAttempt}" == "true" ] ; then
failedRetryDownload "${remote_url}" "Bad captcha code [2]" ""
fi
return 1
else
printf "\\n"
echo -e "${GREEN}| Captcha found.${NC}"
fi
if grep -Eqi 'name="method_free" value="Free Download">' <<< "$response"; then
echo -e "${GREEN}| Post2 found.${NC}"
post_op=$(grep -oP '(?<=input type="hidden" name="op" value=").*(?=">)' <<< "$response")
post_id=$(grep -oP '(?<=input type="hidden" name="id" value=").*(?=">)' <<< "$response")
post_rand=$(grep -oP '(?<=input type="hidden" name="rand" value=").*(?=">)' <<< "$response")
@ -231,7 +259,6 @@ dash_FetchFileInfo() {
else
if [ $i == $maxfetchretries ] ; then
rm -f "${dash_cookie_jar}";
printf "\\n"
echo -e "${RED}| Failed to extract download link [5]${NC}"
warnAndRetryUnknownError=true
if [ "${finalAttempt}" == "true" ] ; then
@ -243,25 +270,9 @@ dash_FetchFileInfo() {
continue
fi
fi
if grep -Eqi '-' <<< "$captcha_code"; then
if [ $i == $maxfetchretries ] ; then
rm -f "${dash_cookie_jar}";
printf "\\n"
echo -e "${RED}| Bad captcha code [2]${NC}"
warnAndRetryUnknownError=true
if [ "${finalAttempt}" == "true" ] ; then
failedRetryDownload "${remote_url}" "Bad captcha code [2]" ""
fi
return 1
else
tor_identity="${RANDOM}"
continue
fi
fi
if [[ -z "$captcha_code" ]] || [[ -z "$post_op" ]] || [[ -z "$post_id" ]] || [[ -z "$post_rand" ]] ; then
if [ $i == $maxfetchretries ] ; then
rm -f "${dash_cookie_jar}";
printf "\\n"
echo -e "${RED}| Failed to extract download link [6]${NC}"
warnAndRetryUnknownError=true
if [ "${finalAttempt}" == "true" ] ; then

25
hosts/eternalhosting.sh Normal file → Executable file
View file

@ -1,6 +1,6 @@
#! Name: eternalhosting.sh
#! Author: kittykat
#! Version: 2024.09.13
#! Version: 2024.12.28
#! Desc: Add support for downloading and processing of urls for a new host
#! Usage: Copy this file into the ./${ScriptDir}/hosts/ folder
#!
@ -15,7 +15,7 @@
#! HostDomainRegex: The regex used to verify matching urls
HostCode='eh'
HostNick='eternal hosting'
HostFuncPrefix='direct'
HostFuncPrefix='eh'
HostUrls='eternalcbrzpicytj4zyguygpmkjlkddxob7tptlr25cdipe5svyqoqd.onion'
HostDomainRegex='^(http|https)://eternalcbrzpicytj4zyguygpmkjlkddxob7tptlr25cdipe5svyqoqd\.onion/file/'
#!
@ -26,6 +26,21 @@ ListHostAndDomainRegexes=${ListHostAndDomainRegexes}'/'${HostCode}'/'${HostNick}
#!
#! ------------ (1) Host Main Download Function --------------- #
#!
#! This is a direct= download host, so all the functions are already in mad.sh
#! Since the HostFuncPrefix is defined above as "direct", nothing further needs to be done as it will
#! call the direct_DownloadFile() function already in mad.sh
#! @REQUIRED: Host Main Download function
#! Must be named specifically as such:
#! <HostFuncPrefix>_DownloadFile()
eh_DownloadFile() {
local pUrl="$1"
local pFileCnt="$2"
local pUrlMod="$pUrl"
if grep -Eqi '\.onion' <<< "$pUrlMod" && grep -Eqi 'https://' <<< "$pUrlMod" ; then
echo -e "${PINK}| Reverting .onion address to http...${NC}"
pUrlMod="${pUrlMod/https/http}"
fi
if grep -Eqi '\.onion/file/' <<< "$pUrlMod" ; then
echo -e "${PINK}| Switching to download url...${NC}"
pUrlMod="${pUrlMod/\.onion\/file\//\.onion\/download\/}"
fi
echo -e "[${BLUE}ModifiedUrl${NC}]: ${pUrlMod}"
direct_DownloadFile "$pUrl" "$pFileCnt" "$pUrlMod"
}

32
hosts/fileblade.sh Normal file → Executable file
View file

@ -1,6 +1,6 @@
#! Name: isupload.sh
#! Author: kittykat
#! Version: 2024.12.20
#! Version: 2024.12.28
#! Desc: Add support for downloading and processing of urls for a new host
#! Usage: Copy this file into the ./${ScriptDir}/hosts/ folder
#!
@ -136,7 +136,6 @@ fb_FetchFileInfo() {
post_id=$(grep -oPi -m 1 '(?<=input type="hidden" name="id" value=").*(?=">.*$)' <<< "$response")
post_fname=$(grep -oPi -m 1 '(?<=input type="hidden" name="fname" value=").*(?=">.*$)' <<< "$response")
post_referer=$(grep -oPi -m 1 '(?<=input type="hidden" name="referer" value=").*(?=">.*$)' <<< "$response")
post_action=$(urlencode_literal_grouped_case_urlendingonly "${post_action}")
fi
if [[ -z "$post_action" ]] || [[ -z "$post_op" ]] || [[ -z "$post_id" ]] || [[ -z "$post_fname" ]] ; then
rm -f "${fb_cookie_jar}";
@ -155,20 +154,18 @@ fb_FetchFileInfo() {
break
fi
done
echo -e "| Download countdown (10s)…"
sleep 10s
echo -e "${GREEN}# Fetching download2…${NC}"
for ((i=1; i<=$maxfetchretries; i++)); do
printf " _"
download_url=""
CLEANSTRING=${remote_url//[^a-zA-Z0-9]/}
trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; echo ""; rm -f $fb_cookie_jar; tput cnorm; exit" 0 1 2 3 6 15
form_data="op=$post_op&usr_login=$post_usr_login&id=$post_id&fname=$post_fname&referer=$post_referer&method_free=method_free"
form_data="op=$post_op&usr_login=&id=$post_id&fname=$post_fname&referer=&method_free=method_free"
response=$(tor_curl_request --insecure -L -s -X POST \
-b "${fb_cookie_jar}" -c "${fb_cookie_jar}" \
--data "$form_data" "$post_action")
if [ "${DebugAllEnabled}" == "true" ] ; then
debugHtml "${remote_url##*/}" "fb_post" "post_action: ${post_action}"$'\n'"form_data: ${form_data}"$'\n'"${response}"
debugHtml "${remote_url##*/}" "fb_post(1)" "post_action: ${post_action}"$'\n'"form_data: ${form_data}"$'\n'"${response}"
fi
if [[ -z $response ]] ; then
if [ $i == $maxfetchretries ] ; then
@ -201,14 +198,29 @@ fb_FetchFileInfo() {
failedRetryDownload "${remote_url}" "Pro download only. [Free users not allowed download over 100MB]" ""
return 1
fi
if grep -Eqi 'Just a moment...' <<< "$response"; then
if grep -Eqi 'Your subsequent download will be started in' <<< "$response"; then
if [ $i == $maxfetchretries ] ; then
rm -f "${fb_cookie_jar}";
printf "\\n"
echo -e "${RED}| Failed to extract download link [3].${NC}"
echo -e "${RED}| Subsequent download wait.. [3b]${NC}"
warnAndRetryUnknownError=true
if [ "${finalAttempt}" == "true" ] ; then
failedRetryDownload "${remote_url}" "" ""
failedRetryDownload "${remote_url}" "Subsequent download wait.. [3b]" ""
fi
return 1
else
tor_identity="${RANDOM}"
continue
fi
fi
if grep -Eqi '<p class="text-warning mb-0">' <<< "$response"; then
if [ $i == $maxfetchretries ] ; then
rm -f "${fb_cookie_jar}";
printf "\\n"
echo -e "${RED}| Failed to extract download link (Unknown warning encountered) [3c]${NC}"
warnAndRetryUnknownError=true
if [ "${finalAttempt}" == "true" ] ; then
failedRetryDownload "${remote_url}" "Unknown warning encountered in download2 [3c]" ""
fi
return 1
else
@ -255,7 +267,7 @@ fb_FetchFileInfo() {
-b "${fb_cookie_jar}" -c "${fb_cookie_jar}" \
--data "$form_data" "$post_action")
if [ "${DebugAllEnabled}" == "true" ] ; then
debugHtml "${remote_url##*/}" "fb_post2" "post_action: ${post_action}"$'\n'"form_data: ${form_data}"$'\n'"${response}"
debugHtml "${remote_url##*/}" "fb_post(2)" "post_action: ${post_action}"$'\n'"form_data: ${form_data}"$'\n'"${response}"
fi
if [[ -z $response ]] ; then
if [ $i == $maxfetchretries ] ; then

12
hosts/gofile.sh Normal file → Executable file
View file

@ -1,6 +1,6 @@
#! Name: gofile.sh
#! Author: kittykat
#! Version: 2024.09.13
#! Version: 2025.01.03
#! Desc: Add support for downloading and processing of urls for a new host
#! Usage: Copy this file into the ./${ScriptDir}/hosts/ folder
#!
@ -197,6 +197,14 @@ gofile_FetchFileInfo() {
else
continue
fi
fi
if grep -Eqi '"children":\{\}' <<< "$response"; then
rm -f "${gofile_cookie_jar}";
printf "\\n"
echo -e "${RED}| Bulk download is a Premium feature. (No children)${NC}"
exitDownloadError=true
removedDownload "${remote_url}" "Bulk download is a Premium feature"
return 1
fi
if grep -Eqi '"status":"ok"' <<< "$response"; then
download_url=$(grep -oPi '(?<="link":").*?(?=")' <<< "$response")
@ -225,7 +233,7 @@ gofile_FetchFileInfo() {
return 1
else
continue
fi
fi
fi
else
rm -f "${gofile_cookie_jar}";

20
hosts/up_axfc.sh Normal file → Executable file
View file

@ -1,6 +1,6 @@
#! Name: up_axfc.sh
#! Author: kittykat
#! Version: 2024.12.26
#! Version: 2025.01.02
#! Desc: Add support for uploading files to a new host
#! Info: Files are accessible at https://www.axfc.net/<hash>
#! MaxSize: 2GB
@ -190,14 +190,28 @@ axfc_PostFile() {
if [ "${DebugAllEnabled}" == "true" ] ; then
debugHtml "${filepath##*/}" "${_hostCode}_upload" "post_url: ${PostUrlHost}"$'\n'"${response}"
fi
if grep -Eqi 'Axfc Uploader -投稿完了.*キーワード付きURL.*a href="https://www.axfc.net.*(QueryString無しVer)' <<< "${response}" ; then
response_ascii=$(echo "$response" | iconv -c -f UTF-8 -t ASCII//TRANSLIT)
if [ "${DebugAllEnabled}" == "true" ] ; then
debugHtml "${filepath##*/}" "${_hostCode}_upload_ascii" "post_url: ${PostUrlHost}"$'\n'"${response_ascii}"
fi
if grep -Eqi -m 1 'a href="https://www\.axfc\.net\/u\/.*\?key=1234"' <<< "${response_ascii}" ; then
url=$(grep -oPi -m 1 '(?<=a href="https://www\.axfc\.net\/u\/).*?(?=".*$)' <<< "$response_ascii")
key=$(grep -oPi -m 1 '(?<=\?key=).*?(?=".*$)' <<< "$response_ascii")
filesize=$(GetFileSize "$filepath" "false")
downloadLink="https://www.axfc.net/u/${url%%$'\n'*}?key=${key%%$'\n'*}"
echo -e "${GREEN}| Upload Success${NC}"
echo -e "| Size: ${BLUE}${filesize}${NC} bytes${NC}"
echo -e "| Link [1]: ${YELLOW}${downloadLink}${NC}"
successUpload "$pline" "${filepath}" "${_hostCode}" "${filesize}" "${downloadLink}" "{$response}"
return 0
elif grep -Eqi 'Axfc Uploader -投稿完了.*キーワード付きURL.*a href="https://www.axfc.net.*(QueryString無しVer)' <<< "${response}" ; then
subSearch=$(awk '/Axfc Uploader -投稿完了/,/(QueryString無しVer)/' <<< "$response")
url=$(grep -oPi -m 1 '(?<=キーワード付きURL\</td\>\</tr\>\<tr\>\<td\>\<a href\=").*?(?=" target="_blank">ファイルIDのみ.*$)' <<< "$subSearch")
filesize=$(GetFileSize "$filepath" "false")
downloadLink="${url%%$'\n'*}"
echo -e "${GREEN}| Upload Success${NC}"
echo -e "| Size: ${BLUE}${filesize}${NC} bytes${NC}"
echo -e "| Link: ${YELLOW}${downloadLink}${NC}"
echo -e "| Link [2]: ${YELLOW}${downloadLink}${NC}"
successUpload "$pline" "${filepath}" "${_hostCode}" "${filesize}" "${downloadLink}" "{$response}"
return 0
else