# 2025.02.18 - [uploadhive] Add handling of the new /cgi-bin/dl.cgi/ url tickets (WIP)

#               (unfortunately, this is tied to the requesting ip, so downloads get "Wrong IP")
# 2025.02.18 - [up_oshi] Add Manage url as comment on uploads
# 2025.02.18 - [up_oshi / oshi] use /nossl/ url and http
# 2025.02.17 - [gofile] Add a random sleep if 429 response detected (too many requests)
# 2025.02.17 - [*ALL] Audit and update all single bracket operations
# 2025.02.17 - [filehaus] Fix downloading from fh
# 2025.02.15 - [uploadbay] Update urls regex for acceptable alternate
# 2025.02.15 - [up_sendnow] Add send.now as upload host
# 2025.02.15 - [sendnow] Fix handling of filenames with special characters in url
This commit is contained in:
kittykat 2025-02-19 13:41:07 +00:00
parent d48116dbe3
commit d62376f7a8
Signed by: kittykat
GPG key ID: E3F1556620F70C3C
103 changed files with 3541 additions and 3517 deletions

View file

@ -43,24 +43,24 @@ fh_DownloadFile() {
tor_identity="${RANDOM}"
finalAttempt="false"
for ((z=0; z<=$MaxUrlRetries; z++)); do
if [ $z -eq $MaxUrlRetries ] ; then
if [[ $z -eq $MaxUrlRetries ]] ; then
finalAttempt="true"
fi
CLEANSTRING=${remote_url//[^a-zA-Z0-9]/}
trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; echo ""; tput cnorm; exit" 0 1 2 3 6 15
if fh_FetchFileInfo $finalAttempt && fh_GetFile "${filecnt}" $((z+1)) $finalAttempt ; then
return 0
elif [ $z -lt $MaxUrlRetries ]; then
if [ "${fileAlreadyDone}" == "true" ] ; then
elif [[ $z -lt $MaxUrlRetries ]]; then
if [[ "${fileAlreadyDone}" == "true" ]] ; then
break
fi
if [[ "${warnAndRetryUnknownError}" == "true" ]] ; then
if [ "${DebugAllEnabled}" == "true" ] ; then
if [[ "${DebugAllEnabled}" == "true" ]] ; then
debugHtml "${remote_url##*/}" "error" "Retry due to an unknown issue: attempt #$((z+1)) of ${MaxUrlRetries}"
fi
fi
if [[ "${exitDownloadError}" == "true" || "${exitDownloadNotAvailable}" == "true" ]] ; then
if [ "${DebugAllEnabled}" == "true" ] ; then
if [[ "${DebugAllEnabled}" == "true" ]] ; then
debugHtml "${remote_url##*/}" "error" "Exit due to unrecoverable issue"
fi
rm -f "${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}"
@ -99,10 +99,10 @@ fh_FetchFileInfo() {
CLEANSTRING=${remote_url//[^a-zA-Z0-9]/}
trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; echo ""; tput cnorm; exit" 0 1 2 3 6 15
file_header=$(tor_curl_request_extended --insecure -L --head -s --referer "${remote_url//\.org/\.cc}" "$download_url")
if [ "${DebugAllEnabled}" == "true" ] ; then
if [[ "${DebugAllEnabled}" == "true" ]] ; then
debugHtml "${remote_url##*/}" "fh_head$j" "download_url: ${download_url}"$'\n'"${file_header}"
fi
if [ ! -z "$file_header" ] ; then
if [[ ! -z "$file_header" ]] ; then
if grep -Eqi '404 Not Found' <<< "${file_header}" ; then
printf "\\n"
echo -e "${RED}| The file has been removed (404).${NC}"
@ -111,10 +111,10 @@ fh_FetchFileInfo() {
return 1
fi
if ! grep -Eqi '200|content-length' <<< "${file_header}" ; then
if [ $j == $maxfetchretries ] ; then
if [[ $j == $maxfetchretries ]] ; then
printf "\\n"
echo -e "${RED}| Failed to extract file size.${NC}"
if [ "${finalAttempt}" == "true" ] ; then
if [[ "${finalAttempt}" == "true" ]] ; then
failedRetryDownload "${remote_url}" "" ""
fi
return 1
@ -137,15 +137,15 @@ fh_FetchFileInfo() {
printf "\\n"
break
done
touch ${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}
if [ ! "$filename_override" == "" ] ; then
touch "${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}"
if [[ ! "$filename_override" == "" ]] ; then
filename="$filename_override"
fi
filename=$(sanitize_file_or_folder_name "${filename}")
file_size_bytes=$(grep -oPi '(?<=content-length: ).*' <<< "$file_header")
file_size_bytes=${file_size_bytes//[$'\t\r\n']}
if [ -z $file_size_bytes ] ; then
if [ "${finalAttempt}" == "true" ] ; then
if [[ -z $file_size_bytes ]] ; then
if [[ "${finalAttempt}" == "true" ]] ; then
failedRetryDownload "${remote_url}" "Filesize not found!" ""
fi
echo -e "${YELLOW}| Filesize not found… retry${NC}"
@ -181,19 +181,19 @@ fh_GetFile() {
splitnum=1
fi
pd_presize=0
if [ -f "$file_path" ] ; then
if [[ -f "$file_path" ]] ; then
pd_presize=$(stat --format="%s" "$file_path" | tr -d '[:space:]')
fi
tor_identity="${RANDOM}"
CLEANSTRING=${remote_url//[^a-zA-Z0-9]/}
trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; rm -f $flockDownload; echo ""; tput cnorm; exit" 0 1 2 3 6 15
if [ "${RateMonitorEnabled}" == "true" ]; then
if [[ "${RateMonitorEnabled}" == "true" ]]; then
tor_curl_request_extended --insecure --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval --referer "$download_url" "$download_url" --continue-at - --output "$file_path"
else
tor_curl_request_extended --insecure --referer "$download_url" "$download_url" --continue-at - --output "$file_path"
fi
received_file_size=0
if [ -f "$file_path" ] ; then
if [[ -f "$file_path" ]] ; then
received_file_size=$(stat --format="%s" "$file_path" | tr -d '[:space:]')
fi
if CheckNoHtml "$remote_url" "$filename" "$file_path" ; then
@ -202,9 +202,9 @@ fh_GetFile() {
containsHtml=true
fi
downDelta=$(( received_file_size - pd_presize ))
if [[ "${received_file_size}" -ne "${file_size_bytes}" ]] || [ "$containsHtml" == "true" ]; then
if [ "${AutoRepairBadPartials}" == "true" ] && (( downDelta > 0 && downDelta < 1024 )) ; then
if [ -f "${file_path}" ] ; then
if [[ "${received_file_size}" -ne "${file_size_bytes}" ]] || [[ "$containsHtml" == "true" ]]; then
if [[ "${AutoRepairBadPartials}" == "true" ]] && (( downDelta > 0 && downDelta < 1024 )) ; then
if [[ -f "${file_path}" ]] ; then
if ((pd_presize > 0)); then
echo -e "${YELLOW}Bad node / HTML found:${NC} reverting to previous file..."
truncateDownload "$remote_url" "$filename" "$pd_presize" "$received_file_size"
@ -216,15 +216,15 @@ fh_GetFile() {
fi
if ((j >= $MaxDownloadRetries)) ; then
rm -f "$flockDownload";
if [ "${finalAttempt}" == "true" ] ; then
if [[ "${finalAttempt}" == "true" ]] ; then
droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}"
fi
return 1
else
continue
fi
elif [ "${AutoRepairBadPartials}" == "true" ] && [ "$containsHtml" == "true" ] ; then
if [ -f "${file_path}" ] ; then
elif [[ "${AutoRepairBadPartials}" == "true" ]] && [[ "$containsHtml" == "true" ]] ; then
if [[ -f "${file_path}" ]] ; then
if ((pd_presize > 0)); then
echo -e "${YELLOW}Bad node / HTML found:${NC} reverting to previous file..."
truncateDownload "$remote_url" "$filename" "$pd_presize" "$received_file_size"
@ -236,21 +236,21 @@ fh_GetFile() {
fi
if ((j >= $MaxDownloadRetries)) ; then
rm -f "$flockDownload";
if [ "${finalAttempt}" == "true" ] ; then
if [[ "${finalAttempt}" == "true" ]] ; then
droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}"
fi
return 1
else
continue
fi
elif (( downDelta > 0 && downDelta < 1024 )) || [ "$containsHtml" == "true" ] ; then
if [ -f "$file_path" ] ; then
elif (( downDelta > 0 && downDelta < 1024 )) || [[ "$containsHtml" == "true" ]] ; then
if [[ -f "$file_path" ]] ; then
rm -rf "$file_path"
fi
echo -e "\n${YELLOW}Bad node / HTML found:${NC} tainted partial removed..."
if ((j >= $MaxDownloadRetries)) ; then
rm -f "$flockDownload";
if [ "${finalAttempt}" == "true" ] ; then
if [[ "${finalAttempt}" == "true" ]] ; then
droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}"
fi
return 1
@ -262,7 +262,7 @@ fh_GetFile() {
echo -e "\n${RED}Download failed, file is incomplete.${NC}"
if ((j >= $MaxDownloadRetries)) ; then
rm -f "$flockDownload";
if [ "${finalAttempt}" == "true" ] ; then
if [[ "${finalAttempt}" == "true" ]] ; then
droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}"
fi
return 1