# 2025.03.08 - [mad] Only check files in determining which curl header

# 2025.03.06 - [uploadhive] Disable global resume for servers without resume
# 2025.03.05 - [torup] Fix torup removed response change from prior
# 2025.03.05 - [mad] Add uploadflix.cc back to recommended download upload host lists (working)
# 2025.03.04 - [mad] Add "expect100-timeout" header to curl upload request
# 2025.03.03 - [up_sendspace] Add sendspace.com as upload host (300MB)
# 2025.03.01 - [filedot] Fix filename parsing. Add 3 second wait for Free Download post.
# 2025.03.01 - [torup] Update file removed response
# 2025.02.26 - [uploadhive] Fix "Wrong IP" error -- use uploadhive.com IP4 address to connect for post
# 2025.02.26 - [up_lainsafe] Fix retry terminal output
This commit is contained in:
kittykat 2025-03-11 01:03:01 +00:00
parent 83d17967d6
commit a62ac882de
Signed by: kittykat
GPG key ID: E3F1556620F70C3C
11 changed files with 817 additions and 570 deletions

View file

@ -1,6 +1,6 @@
#! Name: filedot.sh
#! Author: kittykat
#! Version: 2024.11.21
#! Version: 2025.03.01
#! Desc: Add support for downloading and processing of urls for a new host
#! Usage: Copy this file into the ./${ScriptDir}/hosts/ folder
#!
@ -130,7 +130,7 @@ fdot_FetchFileInfo() {
continue
fi
if grep -Eqi '<input type="hidden" name="op" value="login">' <<< "${PAGE}" ; then
post_token=$(grep -oP '(?<=input type="hidden" name="token" value=").*(?=">)' <<< "$PAGE")
post_token=$(grep -oP -m 1 '(?<=input type="hidden" name="token" value=").*(?=">.*$)' <<< "$PAGE")
break
else
rm -f "${fdot_cookie_jar}";
@ -211,8 +211,8 @@ fdot_FetchFileInfo() {
return 1
fi
if grep -Eqi '<input type="hidden" name="op" value="download1">' <<< "${resp_login}" ; then
post_id=$(grep -oP '(?<=input type="hidden" name="id" value=").*(?=">)' <<< "$resp_login")
post_fname=$(grep -oP '(?<=input type="hidden" name="fname" value=").*(?=">)' <<< "$resp_login")
post_id=$(grep -oP -m 1 '(?<=input type="hidden" name="id" value=").*(?=">.*$)' <<< "$resp_login")
post_fname=$(grep -oP -m 1 '(?<=input type="hidden" name="fname" value=").*(?=">.*$)' <<< "$resp_login")
post_fname_enc=$(urlencode_literal_grouped_case $post_fname)
break
else
@ -232,7 +232,8 @@ fdot_FetchFileInfo() {
filename="$post_fname"
printf "\\n"
echo -e "${GREEN}# Getting download link…${NC}"
for ((c=1; c<=26; c++)); do
sleep 3s
for ((c=1; c<=30; c++)); do
printf "${YELLOW}${NC}"
mkdir -p "${WorkDir}/.temp"
trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; rm -f ${fdot_cookie_jar}; echo ""; tput cnorm; exit" 0 1 2 3 6 15
@ -311,7 +312,7 @@ fdot_FetchFileInfo() {
fi
done
printf "\\n"
for ((d=1; d<=26; d++)); do
for ((d=1; d<=30; d++)); do
printf "${YELLOW} _${NC}"
mkdir -p "${WorkDir}/.temp"
download_url=""
@ -372,7 +373,7 @@ fdot_FetchFileInfo() {
return 1
fi
if grep -Eqi 'class="bigres"><a href="' <<< "${response}" ; then
download_url=$(grep -oP '(?<=class="bigres"><a href=").*(?="><img src=)' <<< "$response")
download_url=$(grep -oP -m 1 '(?<=class="bigres"><a href=").*(?="><img src=.*$)' <<< "$response")
download_url=$(urlencode_literal_grouped_case_urlendingonly "$download_url")
if [[ -z $download_url ]] ; then
if ((d >= 26)) ; then
@ -410,10 +411,10 @@ fdot_FetchFileInfo() {
if [[ -z $file_header ]] ; then
if [[ $j == $maxfetchretries ]] ; then
printf "\\n"
echo -e "${RED}| Failed to extract file info.${NC}"
echo -e "${RED}| Failed to extract file info [1]${NC}"
warnAndRetryUnknownError=true
if [[ "${finalAttempt}" == "true" ]] ; then
failedRetryDownload "${remote_url}" "" ""
failedRetryDownload "${remote_url}" "Failed to extract file info [1]" ""
fi
return 1
else
@ -423,25 +424,25 @@ fdot_FetchFileInfo() {
if ! grep -Eqi 'HTTP/2 200|HTTP/1.1 200|200 OK' <<< $file_header ; then
if [[ $j == $maxfetchretries ]] ; then
printf "\\n"
echo -e "${RED}| Failed to extract file info${NC}"
echo -e "${RED}| Failed to extract file info [2]${NC}"
warnAndRetryUnknownError=true
if [[ "${finalAttempt}" == "true" ]] ; then
failedRetryDownload "${remote_url}" "" ""
failedRetryDownload "${remote_url}" "Failed to extract file info [2]" ""
fi
return 1
else
continue
fi
fi
file_size_bytes=$(grep -oPi '(?<=content-length: ).*' <<< "$file_header")
file_size_bytes=$(grep -oPi -m 1 '(?<=content-length: ).*$' <<< "$file_header")
file_size_bytes=${file_size_bytes//[$'\t\r\n']}
if [[ -z "$file_size_bytes" ]]; then
if [[ $j == $maxfetchretries ]] ; then
printf "\\n"
echo -e "${RED}| Failed to extract file info.${NC}"
echo -e "${RED}| Failed to extract file info [3]${NC}"
warnAndRetryUnknownError=true
if [[ "${finalAttempt}" == "true" ]] ; then
failedRetryDownload "${remote_url}" "" ""
failedRetryDownload "${remote_url}" "Failed to extract file info [3]" ""
fi
return 1
else

View file

@ -1,6 +1,6 @@
#! Name: torup.sh
#! Author: kittykat
#! Version: 2024.11.09
#! Version: 2025.03.05
#! Desc: Add support for downloading and processing of urls for a new host
#! Usage: Copy this file into the ./${ScriptDir}/hosts/ folder
#!
@ -96,6 +96,7 @@ torp_FetchFileInfo() {
debugHtml "${remote_url##*/}" "torp_fetch$i" "${response}"
fi
if [[ -z $response ]] ; then
rm -f "${torp_cookie_jar}";
if [[ $i == $maxfetchretries ]] ; then
printf "\\n"
echo -e "${RED}| Failed to extract download url [1]${NC}"
@ -108,7 +109,8 @@ torp_FetchFileInfo() {
continue
fi
fi
if grep -Eqi "There is no such file|File was deleted because" <<< "$response"; then
if grep -Eqi "File Unavailable|This file has been disabled or deleted from our system|There is no such file" <<< "$response"; then
rm -f "${torp_cookie_jar}";
printf "\\n"
echo -e "${RED}| The file was not found. It could be deleted or expired.${NC}"
exitDownloadError=true
@ -126,6 +128,7 @@ torp_FetchFileInfo() {
download_url="${fixed_url}/file"
break
else
rm -f "${torp_cookie_jar}";
if [[ $i == $maxfetchretries ]] ; then
printf "\\n"
echo -e "${RED}| Failed to extract download url [2]${NC}"

View file

@ -1,6 +1,6 @@
#! Name: up_lainsafe.sh
#! Author: kittykat
#! Version: 2025.02.03
#! Version: 2025.02.26
#! Desc: Add support for uploading files to dashfile.net
#! Info: https://pomf2.lain.la/<filehash>
#! MaxSize: 1GB
@ -101,6 +101,7 @@ lain_PostFile() {
printf "\033[1A\r"
printf "\033[1A\r"
printf "\033[1A\r"
printf "\033[1A\r"
printf "\33[2K\r"
fi
echo -e "${BLUE}| Attempt:${NC} $((i+1))${NC}"

198
hosts/up_sendspace.sh Normal file
View file

@ -0,0 +1,198 @@
#! Name: up_sendspace.sh
#! Author: kittykat
#! Version: 2025.03.03
#! Desc: Add support for uploading files to bedrive.ru
#! Info: Files are accessible at https://bedrive.ru/<file_code>
#! MaxSize: 300MB
#! Expire: ??
#! Usage: Copy this file into the ./${ScriptDir}/hosts/ folder
#!
#!
#! ------------ REQUIRED SECTION ---------------
#! @[UPDATE] ListUploadHosts: This string is loaded into mad.sh and allows dynamic handling of new url data
#! Format: '/HostCode/HostNick/HostFuncPrefix@'
#! HostCode: <aUniqueCodeForHost> (ie. 'fh' for filehaus -- cannot be used by other hosts)
#! HostNick: What is displayed throughout MAD output
#! HostFuncPrefix: <aUniqueStringThatMustPrefixHostFunctions> ie. 'fh' -- fh_UploadFile()
#! * Note: Must begin with a letter a-z (functions beginning with numbers are no bueno)
HostCode='ss'
HostNick='sendspace'
HostFuncPrefix='ss'
#!
#! !! DO NOT UPDATE OR REMOVE !!
#! This merges the Required HostAndDomainRegexes into mad.sh
ListUploadHosts=${ListUploadHosts}'/'${HostCode}'/'${HostNick}'/'${HostFuncPrefix}'@'
#!
#!
#! Configurables
#! -------------
#!
#! ------------ (1) Host Main Upload Function --------------- #
#!
#! @REQUIRED: Host Main Upload function
#! Must be named specifically as such:
#! <HostFuncPrefix>_UploadFile()
ss_UploadFile() {
local _hostCode=${1}
local filepath=${2}
local filecnt=${3}
local pline=${4}
local filename="${filepath##*/}"
warnAndRetryUnknownError=false
exitUploadError=false
exitUploadNotAvailable=false
fileAlreadyDone=false
tor_identity="${RANDOM}"
UploadTicket="${WorkDir}/.flocks/upload_${_hostCode}_${filepath//[^a-zA-Z0-9]/}"
MaxUploadSizeInBytes=314572800
fsize=$(GetFileSize "$filepath" "false")
if ((fsize > MaxUploadSizeInBytes)); then
rm -f "${UploadTicket}"
echo -e "${YELLOW}| SKIP${NC}: The size of $filename is to large for $_hostCode. ($fsize > $MaxUploadSizeInBytes)"
failedUpload "$pline" "${filepath}" "${_hostCode}" "Skipping upload. The size of $filename is to large for $_hostCode. ($fsize > $MaxUploadSizeInBytes)"
return 1
fi
finalAttempt="false"
for ((z=0; z<=$MaxUploadRetries; z++)); do
if [[ $z -eq $MaxUploadRetries ]] ; then
finalAttempt="true"
fi
trap "rm -f "${UploadTicket}"; echo ""; tput cnorm; exit" 0 1 2 3 6 15
if ss_PostFile "${filepath}" "${_hostCode}" "${filename}" "${filecnt}" $((z+1)) $finalAttempt $pline ; then
return 0
elif [[ $z -lt $MaxUploadRetries ]]; then
if [[ "${fileAlreadyDone}" == "true" ]] ; then
break
fi
if [[ "${warnAndRetryUnknownError}" == "true" ]] ; then
if [[ "${DebugAllEnabled}" == "true" ]] ; then
debugHtml "${filepath##*/}" "error" "Retry due to an unknown issue: attempt #$((z+1)) of ${MaxUploadRetries}"
fi
fi
if [[ "${exitUploadError}" == "true" || "${exitUploadNotAvailable}" == "true" ]] ; then
if [[ "${DebugAllEnabled}" == "true" ]] ; then
debugHtml "${filepath##*/}" "error" "Exit due to unrecoverable issue"
fi
rm -f "${UploadTicket}"
break
fi
echo -e "\n${YELLOW}A recoverable error occurred, retry attempt $((z+1))/${MaxUploadRetries}${NC}"
sleep 3
fi
done
rm -f "${UploadTicket}"
}
#!
#! ----------- (2) Post File / Upload File Function --------------- #
#!
ss_PostFile() {
local filepath=$1
local _hostCode=$2
local filename=$3
local fileCnt=$4
local retryCnt=$5
local finalAttempt=$6
local pline=${7}
UploadTicket="${WorkDir}/.flocks/upload_${_hostCode}_${filepath//[^a-zA-Z0-9]/}"
echo -e "${GREEN}# Fetching post upload ticket…${NC}"
maxfetchretries=5
for ((i=1; i<=maxfetchretries; i++)); do
mkdir -p "${WorkDir}/.temp"
ss_cookie_jar=$(mktemp "${WorkDir}/.temp/ss_cookies""${instance_no}"".XXXXXX")
printf " ."
tor_identity="${RANDOM}"
CLEANSTRING=${remote_url//[^a-zA-Z0-9]/}
trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; rm -f "${ss_cookie_jar}"; echo ""; tput cnorm; exit" 0 1 2 3 6 15
response=$(tor_curl_request --insecure -L -s -b "${ss_cookie_jar}" -c "${ss_cookie_jar}" "https://sendspace.com")
if [[ "${DebugAllEnabled}" == "true" ]] ; then
debugHtml "${remote_url##*/}" "${_hostCode}_upload_fetch$i" "${response}"
fi
if [[ -z $response ]] ; then
rm -f "${ss_cookie_jar}";
if [[ $i == $maxfetchretries ]] ; then
printf "\\n"
echo -e "${RED}| Failed to extract download upload info${NC}"
failedRetryUpload "$pline" "${filepath}" "${_hostCode}" "Failed to upload file." "No response"
exitUploadError=true
return 1
else
continue
fi
fi
if grep -Eqi "blocked downloads from the Tor network|banned your IP|IP has been banned|you are banned" <<< "$response"; then
rm -f "${ss_cookie_jar}";
if [[ $i == $maxfetchretries ]] ; then
printf "\\n"
echo -e "${RED}| Blocked Tor ip${NC}"
failedRetryUpload "$pline" "${filepath}" "${_hostCode}" "Failed to upload file." "Blocked Tor ip"
exitUploadError=true
return 1
else
continue
fi
fi
if grep -Eqi 'form role="main" method="post" action="' <<< "$response"; then
printf " +\\n"
echo -e "${GREEN}| Upload info found${NC}"
post_action=$(grep -oPi -m 1 '(?<=form role="main" method="post" action=").*(?=" enctype=.*$)' <<< "$response")
post_sig=$(grep -oPi '(?<=input type="hidden" name="signature" value=").*(?=" />.*$)' <<< "$response")
fi
if [[ -z "$post_action" ]] || [[ -z "$post_sig" ]] ; then
rm -f "${ss_cookie_jar}";
if [[ $i == $maxfetchretries ]] ; then
printf "\\n"
echo -e "${RED}| Failed to extract upload info [2]${NC}"
failedRetryUpload "$pline" "${filepath}" "${_hostCode}" "Failed to upload file." "Failed to extract upload info [2]"
exitUploadError=true
return 1
else
continue
fi
else
break
fi
done
echo -e "[${YELLOW}${_hostCode}${NC}] Uploading ${GREEN}${filename}${NC}"
tor_identity="${RANDOM}"
PostUrlHost="$post_action"
arrFiles=("$filepath")
trap "rm -f ${UploadTicket}; echo ""; rm -f "${ss_cookie_jar}"; tput cnorm; exit" 0 1 2 3 6 15
response=$(tor_curl_upload --insecure -i -L \
-H "Content-Type: multipart/form-data" \
-F "terms=1" \
-F "utype=anon" \
-F "signature=$post_sig" \
-F "file[]=" \
-F "upload_file[]=@$filepath" \
-b "${ss_cookie_jar}" -c "${ss_cookie_jar}" \
"${PostUrlHost}")
if [[ "${DebugAllEnabled}" == "true" ]] ; then
debugHtml "${filepath##*/}" "${_hostCode}_upload" "post_url: ${PostUrlHost}"$'\n'"${response}"
fi
if grep -Eqi 'aria-label="Download Page Link" href="https://www.sendspace.com' <<< "${response}" ; then
url=$(grep -oPi -m 1 '(?<=aria-label="Download Page Link" href=").*?(?=" target.*$)' <<< "$response")
filesize=$(GetFileSize "$filepath" "false")
downloadLink="${url}"
echo -e "${GREEN}| Upload Success${NC}"
echo -e "| Size: ${BLUE}${filesize}${NC} bytes${NC}"
echo -e "| Link: ${YELLOW}${downloadLink}${NC}"
rm -f "${ss_cookie_jar}";
successUpload "$pline" "${filepath}" "${_hostCode}" "${filesize}" "${downloadLink}" "{$response}"
return 0
else
err=$(grep -oPi '(?<="error":).*?(?=,.*$)' <<< "$response")
if [[ "${finalAttempt}" == "true" ]] ; then
rm -f "${ss_cookie_jar}";
printf "\\n"
echo -e "${RED}| Upload failed. Status: ${err}${NC}"
failedRetryUpload "$pline" "${filepath}" "${_hostCode}" "Failed to upload file" "Status: $err"
exitUploadError=true
return 1
else
return 1
fi
fi
}
#!
#! --------------- Host Extra Functions ------------------- #
#!

View file

@ -1,6 +1,6 @@
#! Name: uploadhive.sh
#! Author: kittykat
#! Version: 2025.02.17
#! Version: 2025.03.06
#! Desc: Add support for downloading and processing of urls for a new host
#! Usage: Copy this file into the ./${ScriptDir}/hosts/ folder
#!
@ -132,7 +132,7 @@ uhive_FetchFileInfo() {
CLEANSTRING=${remote_url//[^a-zA-Z0-9]/}
trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; echo ""; tput cnorm; exit" 0 1 2 3 6 15
form_data="op=${post_op}&id=${post_id}&rand=${post_rand}&referer=${post_referer}&method_free=&method_premium="
response=$(tor_curl_request --insecure -L -s -X POST --data "$form_data" "$remote_url")
response=$(tor_curl_request --insecure -L -s -X POST --data "$form_data" "$remote_url" --connect-to uploadhive.com::172.67.130.243)
if [[ "${DebugAllEnabled}" == "true" ]] ; then
debugHtml "${remote_url##*/}" "uhive_post" "${response}"
fi
@ -189,7 +189,6 @@ uhive_FetchFileInfo() {
fi
return 1
fi
echo -e "download_url: $download_url"
if [[ -z $download_url ]] ; then
echo -e "${RED}| Failed to extract download link [2]${NC}"
warnAndRetryUnknownError=true
@ -229,12 +228,15 @@ uhive_FetchFileInfo() {
#! ----------- (3) Fetch File / Download File Function --------------- #
#!
uhive_GetFile() {
echo -e "${GREEN}# Downloading…"
echo -e "${GREEN}# Downloading… ${BLUE}(no resume)${NC}"
echo -e "${YELLOW}| File path:${NC}\t./.inflight/${filename}\n"
fileCnt=$1
retryCnt=$2
finalAttempt=$3
flockDownload="${WorkDir}/.flocks/${filename//[^a-zA-Z0-9\.\_\-]/}.flock"
if [[ -f "$file_path" ]]; then
rm -f "$file_path"
fi
for ((j=1; j<=$MaxDownloadRetries; j++)); do
pd_presize=0
if [[ -f "$file_path" ]] ; then