# 2025.03.08 - [mad] Only check files in determining which curl header

# 2025.03.06 - [uploadhive] Disable global resume for servers without resume
# 2025.03.05 - [torup] Fix torup removed response change from prior
# 2025.03.05 - [mad] Add uploadflix.cc back to recommended download upload host lists (working)
# 2025.03.04 - [mad] Add "expect100-timeout" header to curl upload request
# 2025.03.03 - [up_sendspace] Add sendspace.com as upload host (300MB)
# 2025.03.01 - [filedot] Fix filename parsing. Add 3 second wait for Free Download post.
# 2025.03.01 - [torup] Update file removed response
# 2025.02.26 - [uploadhive] Fix "Wrong IP" error -- use uploadhive.com IP4 address to connect for post
# 2025.02.26 - [up_lainsafe] Fix retry terminal output
This commit is contained in:
kittykat 2025-03-11 01:03:01 +00:00
parent 83d17967d6
commit a62ac882de
Signed by: kittykat
GPG key ID: E3F1556620F70C3C
11 changed files with 817 additions and 570 deletions

View file

@ -1,6 +1,6 @@
#! Name: filedot.sh
#! Author: kittykat
#! Version: 2024.11.21
#! Version: 2025.03.01
#! Desc: Add support for downloading and processing of urls for a new host
#! Usage: Copy this file into the ./${ScriptDir}/hosts/ folder
#!
@ -130,7 +130,7 @@ fdot_FetchFileInfo() {
continue
fi
if grep -Eqi '<input type="hidden" name="op" value="login">' <<< "${PAGE}" ; then
post_token=$(grep -oP '(?<=input type="hidden" name="token" value=").*(?=">)' <<< "$PAGE")
post_token=$(grep -oP -m 1 '(?<=input type="hidden" name="token" value=").*(?=">.*$)' <<< "$PAGE")
break
else
rm -f "${fdot_cookie_jar}";
@ -211,8 +211,8 @@ fdot_FetchFileInfo() {
return 1
fi
if grep -Eqi '<input type="hidden" name="op" value="download1">' <<< "${resp_login}" ; then
post_id=$(grep -oP '(?<=input type="hidden" name="id" value=").*(?=">)' <<< "$resp_login")
post_fname=$(grep -oP '(?<=input type="hidden" name="fname" value=").*(?=">)' <<< "$resp_login")
post_id=$(grep -oP -m 1 '(?<=input type="hidden" name="id" value=").*(?=">.*$)' <<< "$resp_login")
post_fname=$(grep -oP -m 1 '(?<=input type="hidden" name="fname" value=").*(?=">.*$)' <<< "$resp_login")
post_fname_enc=$(urlencode_literal_grouped_case $post_fname)
break
else
@ -232,7 +232,8 @@ fdot_FetchFileInfo() {
filename="$post_fname"
printf "\\n"
echo -e "${GREEN}# Getting download link…${NC}"
for ((c=1; c<=26; c++)); do
sleep 3s
for ((c=1; c<=30; c++)); do
printf "${YELLOW}${NC}"
mkdir -p "${WorkDir}/.temp"
trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; rm -f ${fdot_cookie_jar}; echo ""; tput cnorm; exit" 0 1 2 3 6 15
@ -311,7 +312,7 @@ fdot_FetchFileInfo() {
fi
done
printf "\\n"
for ((d=1; d<=26; d++)); do
for ((d=1; d<=30; d++)); do
printf "${YELLOW} _${NC}"
mkdir -p "${WorkDir}/.temp"
download_url=""
@ -372,7 +373,7 @@ fdot_FetchFileInfo() {
return 1
fi
if grep -Eqi 'class="bigres"><a href="' <<< "${response}" ; then
download_url=$(grep -oP '(?<=class="bigres"><a href=").*(?="><img src=)' <<< "$response")
download_url=$(grep -oP -m 1 '(?<=class="bigres"><a href=").*(?="><img src=.*$)' <<< "$response")
download_url=$(urlencode_literal_grouped_case_urlendingonly "$download_url")
if [[ -z $download_url ]] ; then
if ((d >= 26)) ; then
@ -410,10 +411,10 @@ fdot_FetchFileInfo() {
if [[ -z $file_header ]] ; then
if [[ $j == $maxfetchretries ]] ; then
printf "\\n"
echo -e "${RED}| Failed to extract file info.${NC}"
echo -e "${RED}| Failed to extract file info [1]${NC}"
warnAndRetryUnknownError=true
if [[ "${finalAttempt}" == "true" ]] ; then
failedRetryDownload "${remote_url}" "" ""
failedRetryDownload "${remote_url}" "Failed to extract file info [1]" ""
fi
return 1
else
@ -423,25 +424,25 @@ fdot_FetchFileInfo() {
if ! grep -Eqi 'HTTP/2 200|HTTP/1.1 200|200 OK' <<< $file_header ; then
if [[ $j == $maxfetchretries ]] ; then
printf "\\n"
echo -e "${RED}| Failed to extract file info${NC}"
echo -e "${RED}| Failed to extract file info [2]${NC}"
warnAndRetryUnknownError=true
if [[ "${finalAttempt}" == "true" ]] ; then
failedRetryDownload "${remote_url}" "" ""
failedRetryDownload "${remote_url}" "Failed to extract file info [2]" ""
fi
return 1
else
continue
fi
fi
file_size_bytes=$(grep -oPi '(?<=content-length: ).*' <<< "$file_header")
file_size_bytes=$(grep -oPi -m 1 '(?<=content-length: ).*$' <<< "$file_header")
file_size_bytes=${file_size_bytes//[$'\t\r\n']}
if [[ -z "$file_size_bytes" ]]; then
if [[ $j == $maxfetchretries ]] ; then
printf "\\n"
echo -e "${RED}| Failed to extract file info.${NC}"
echo -e "${RED}| Failed to extract file info [3]${NC}"
warnAndRetryUnknownError=true
if [[ "${finalAttempt}" == "true" ]] ; then
failedRetryDownload "${remote_url}" "" ""
failedRetryDownload "${remote_url}" "Failed to extract file info [3]" ""
fi
return 1
else