# 2025.02.25 - [mad + allhosts] Re-engineer BadHtml scan to only scan the first 10kb of downloaded partials
# 2025.02.24 - [pixeldrain] Update "The file is IP limited" response handling retry # 2025.02.22 - [blackcloud_onion] Add bcloud.onion download handling (url fixing) # 2025.02.21 - [anonfile] Update cdn link parsing to handle new subdomains # 2025.02.21 - [anonfile] Add download limit reached response handling # 2025.02.21 - [anonfile] Update file info retrieval (head no longer responds) # 2025.02.21 - [sendspace] Add sendspace.com as download host # 2025.02.21 - [oshi / up_oshi] Revert /nossl/ changes for oshi.at (clearnet)
This commit is contained in:
parent
6f338dec65
commit
83d17967d6
54 changed files with 1316 additions and 822 deletions
|
|
@ -1,6 +1,6 @@
|
|||
#! Name: anonfile.sh
|
||||
#! Author: kittykat
|
||||
#! Version: 2025.01.13
|
||||
#! Version: 2025.02.21
|
||||
#! Desc: Add support for downloading and processing of urls for a new host
|
||||
#! Usage: Copy this file into the ./${ScriptDir}/hosts/ folder
|
||||
#!
|
||||
|
|
@ -381,6 +381,21 @@ anon_FetchFileInfo() {
|
|||
continue
|
||||
fi
|
||||
fi
|
||||
if grep -Eqi 'You have reached the download-limit' <<< "$response"; then
|
||||
if [[ $i == $maxfetchretries ]] ; then
|
||||
rm -f "${anon_cookie_jar}";
|
||||
printf "\\n"
|
||||
echo -e "${RED}| Failed to extract download link [limit].${NC}"
|
||||
warnAndRetryUnknownError=true
|
||||
if [[ "${finalAttempt}" == "true" ]] ; then
|
||||
failedRetryDownload "${remote_url}" "Failed to extract download link [limit]" ""
|
||||
fi
|
||||
return 1
|
||||
else
|
||||
tor_identity="${RANDOM}"
|
||||
continue
|
||||
fi
|
||||
fi
|
||||
if grep -Eqi 'Just a moment...' <<< "$response"; then
|
||||
if [[ $i == $maxfetchretries ]] ; then
|
||||
rm -f "${anon_cookie_jar}";
|
||||
|
|
@ -411,18 +426,25 @@ anon_FetchFileInfo() {
|
|||
continue
|
||||
fi
|
||||
fi
|
||||
if grep -Eqi '<a class="stretched-link" href="https://anonfile.de' <<< "$response"; then
|
||||
if grep -Eqi 'File Download Link Generated' <<< "$response" && grep -Eqi 'bytes\)</small>' <<< "$response" ; then
|
||||
file_size_bytes=$(grep -oPi -m 1 '(?<= <small>\().*?(?= bytes\)</small>.*$)' <<< "$response")
|
||||
file_size_bytes=${file_size_bytes//[$'\t\r\n']}
|
||||
fi
|
||||
if [[ -z "$file_size_bytes" ]]; then
|
||||
printf "\\n"
|
||||
echo -e "${RED}| Failed to extract file info [3]${NC}"
|
||||
warnAndRetryUnknownError=true
|
||||
if [[ "${finalAttempt}" == "true" ]] ; then
|
||||
failedRetryDownload "${remote_url}" "Failed to extract file info [3]" ""
|
||||
fi
|
||||
return 1
|
||||
fi
|
||||
if grep -Eqi '<a class="stretched-link" href="https:' <<< "$response"; then
|
||||
printf "\\n"
|
||||
echo -e "${GREEN}| Download url found [1]${NC}"
|
||||
download_url=$(grep -oP '(?<=<a class="stretched-link" href=").*?(?=">.*$)' <<< "$response")
|
||||
filename="${download_url##*\/}"
|
||||
download_url=$(urlencode_literal_grouped_case_urlendingonly "$download_url")
|
||||
elif grep -Eqi '<a class="stretched-link" href="' <<< "$response"; then
|
||||
printf "\\n"
|
||||
echo -e "${GREEN}| Download url found [2]${NC}"
|
||||
download_url=$(grep -oP '(?<=<a class="stretched-link" href=").*?(?=">.*$)' <<< "$response")
|
||||
filename="${download_url##*\/}"
|
||||
download_url=$(urlencode_literal_grouped_case_urlendingonly "$download_url")
|
||||
fi
|
||||
if [[ -z "$download_url" ]] ; then
|
||||
if [[ $i == $maxfetchretries ]] ; then
|
||||
|
|
@ -443,68 +465,6 @@ anon_FetchFileInfo() {
|
|||
fi
|
||||
done
|
||||
rm -f "${anon_cookie_jar}";
|
||||
echo -e "${GREEN}# Fetching file info…${NC}"
|
||||
maxfetchretries=3
|
||||
for ((j=1; j<=$maxfetchretries; j++)); do
|
||||
printf " ."
|
||||
CLEANSTRING=${remote_url//[^a-zA-Z0-9]/}
|
||||
trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; rm -f ${anon_cookie_jar}; echo ""; tput cnorm; exit" 0 1 2 3 6 15
|
||||
GetRandomUA
|
||||
file_header=$(tor_curl_request -i -s --head \
|
||||
--referer "${fixed_url}" \
|
||||
"$download_url")
|
||||
if [[ "${DebugAllEnabled}" == "true" ]] ; then
|
||||
debugHtml "${remote_url##*/}" "anon_head$j" "download_url: ${download_url}"$'\n'"${file_header}"
|
||||
fi
|
||||
if [[ -z $file_header ]] ; then
|
||||
if [[ $j == $maxfetchretries ]] ; then
|
||||
rm -f "${anon_cookie_jar}";
|
||||
printf "\\n"
|
||||
echo -e "${RED}| Failed to extract file info${NC}"
|
||||
warnAndRetryUnknownError=true
|
||||
if [[ "${finalAttempt}" == "true" ]] ; then
|
||||
failedRetryDownload "${remote_url}" "Failed to extract file info" ""
|
||||
fi
|
||||
return 1
|
||||
else
|
||||
tor_identity="${RANDOM}"
|
||||
continue
|
||||
fi
|
||||
fi
|
||||
if ! grep -Eqi 'HTTP/2 200|HTTP/1.1 200|200 OK' <<< $file_header ; then
|
||||
if [[ $j == $maxfetchretries ]] ; then
|
||||
rm -f "${anon_cookie_jar}";
|
||||
printf "\\n"
|
||||
echo -e "${RED}| Failed to extract file info${NC}"
|
||||
warnAndRetryUnknownError=true
|
||||
if [[ "${finalAttempt}" == "true" ]] ; then
|
||||
failedRetryDownload "${remote_url}" "" ""
|
||||
fi
|
||||
return 1
|
||||
else
|
||||
tor_identity="${RANDOM}"
|
||||
continue
|
||||
fi
|
||||
fi
|
||||
file_size_bytes=$(grep -oPi '(?<=content-length: ).*' <<< "$file_header")
|
||||
file_size_bytes=${file_size_bytes//[$'\t\r\n']}
|
||||
if [[ -z "$file_size_bytes" ]]; then
|
||||
if [[ $j == $maxfetchretries ]] ; then
|
||||
rm -f "${anon_cookie_jar}";
|
||||
printf "\\n"
|
||||
echo -e "${RED}| Failed to extract file size.${NC}"
|
||||
warnAndRetryUnknownError=true
|
||||
if [[ "${finalAttempt}" == "true" ]] ; then
|
||||
failedRetryDownload "${remote_url}" "" ""
|
||||
fi
|
||||
return 1
|
||||
else
|
||||
tor_identity="${RANDOM}"
|
||||
continue
|
||||
fi
|
||||
fi
|
||||
break #Good to go here
|
||||
done
|
||||
touch "${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}"
|
||||
if [[ ! "$filename_override" == "" ]] ; then
|
||||
filename="$filename_override"
|
||||
|
|
@ -607,7 +567,7 @@ anon_GetFile() {
|
|||
if [[ -f "$file_path" ]] ; then
|
||||
received_file_size=$(stat --format="%s" "$file_path" | tr -d '[:space:]')
|
||||
fi
|
||||
if CheckNoHtml "$remote_url" "$filename" "$file_path" ; then
|
||||
if CheckNoHtml "$remote_url" "$filename" "$file_path" "$((received_file_size - pd_presize))" ; then
|
||||
containsHtml=false
|
||||
else
|
||||
containsHtml=true
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue