# 2025.02.18 - [uploadhive] Add handling of the new /cgi-bin/dl.cgi/ url tickets (WIP)
# (unfortunately, this is tied to the requesting ip, so downloads get "Wrong IP") # 2025.02.18 - [up_oshi] Add Manage url as comment on uploads # 2025.02.18 - [up_oshi / oshi] use /nossl/ url and http # 2025.02.17 - [gofile] Add a random sleep if 429 response detected (too many requests) # 2025.02.17 - [*ALL] Audit and update all single bracket operations # 2025.02.17 - [filehaus] Fix downloading from fh # 2025.02.15 - [uploadbay] Update urls regex for acceptable alternate # 2025.02.15 - [up_sendnow] Add send.now as upload host # 2025.02.15 - [sendnow] Fix handling of filenames with special characters in url
This commit is contained in:
parent
d48116dbe3
commit
d62376f7a8
103 changed files with 3541 additions and 3517 deletions
|
|
@ -43,24 +43,24 @@ fs_DownloadFile() {
|
|||
tor_identity="${RANDOM}"
|
||||
finalAttempt="false"
|
||||
for ((z=0; z<=$MaxUrlRetries; z++)); do
|
||||
if [ $z -eq $MaxUrlRetries ] ; then
|
||||
if [[ $z -eq $MaxUrlRetries ]] ; then
|
||||
finalAttempt="true"
|
||||
fi
|
||||
CLEANSTRING=${remote_url//[^a-zA-Z0-9]/}
|
||||
trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; echo ""; tput cnorm; exit" 0 1 2 3 6 15
|
||||
if fs_FetchFileInfo $finalAttempt && fs_GetFile "${filecnt}" $((z+1)) $finalAttempt ; then
|
||||
return 0
|
||||
elif [ $z -lt $MaxUrlRetries ]; then
|
||||
if [ "${fileAlreadyDone}" == "true" ] ; then
|
||||
elif [[ $z -lt $MaxUrlRetries ]]; then
|
||||
if [[ "${fileAlreadyDone}" == "true" ]] ; then
|
||||
break
|
||||
fi
|
||||
if [[ "${warnAndRetryUnknownError}" == "true" ]] ; then
|
||||
if [ "${DebugAllEnabled}" == "true" ] ; then
|
||||
if [[ "${DebugAllEnabled}" == "true" ]] ; then
|
||||
debugHtml "${remote_url##*/}" "error" "Retry due to an unknown issue: attempt #$((z+1)) of ${MaxUrlRetries}"
|
||||
fi
|
||||
fi
|
||||
if [[ "${exitDownloadError}" == "true" || "${exitDownloadNotAvailable}" == "true" ]] ; then
|
||||
if [ "${DebugAllEnabled}" == "true" ] ; then
|
||||
if [[ "${DebugAllEnabled}" == "true" ]] ; then
|
||||
debugHtml "${remote_url##*/}" "error" "Exit due to unrecoverable issue"
|
||||
fi
|
||||
rm -f "${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}"
|
||||
|
|
@ -89,22 +89,22 @@ fs_FetchFileInfo() {
|
|||
for ((j=1; j<=$maxfetchretries; j++)); do
|
||||
mkdir -p "${WorkDir}/.temp"
|
||||
printf " ."
|
||||
if [ "$newIdent" == "true" ] ; then
|
||||
if [[ "$newIdent" == "true" ]] ; then
|
||||
tor_identity="${RANDOM}"
|
||||
newIdent=false
|
||||
fi
|
||||
CLEANSTRING=${remote_url//[^a-zA-Z0-9]/}
|
||||
trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; echo ""; tput cnorm; exit" 0 1 2 3 6 15
|
||||
response=$(tor_curl_request --insecure -L -s "${fixed_url}")
|
||||
if [ "${DebugAllEnabled}" == "true" ] ; then
|
||||
if [[ "${DebugAllEnabled}" == "true" ]] ; then
|
||||
debugHtml "${remote_url##*/}" "fs_${fetchnum}fetch_$j" "fixed_url: ${fixed_url}"$'\n'"${response}"
|
||||
fi
|
||||
if [[ -z $response ]] ; then
|
||||
if [ $j == $maxfetchretries ] ; then
|
||||
if [[ $j == $maxfetchretries ]] ; then
|
||||
printf "\\n"
|
||||
echo -e "${RED}| Failed to extract link.${NC}"
|
||||
warnAndRetryUnknownError=true
|
||||
if [ "${finalAttempt}" == "true" ] ; then
|
||||
if [[ "${finalAttempt}" == "true" ]] ; then
|
||||
failedRetryDownload "${remote_url}" "" ""
|
||||
fi
|
||||
return 1
|
||||
|
|
@ -135,11 +135,11 @@ fs_FetchFileInfo() {
|
|||
j=$((j-1))
|
||||
continue
|
||||
fi
|
||||
if [ $j == $maxfetchretries ] ; then
|
||||
if [[ $j == $maxfetchretries ]] ; then
|
||||
printf "\\n"
|
||||
echo -e "${RED}| Failed to extract link (unknown)${NC}"
|
||||
warnAndRetryUnknownError=true
|
||||
if [ "${finalAttempt}" == "true" ] ; then
|
||||
if [[ "${finalAttempt}" == "true" ]] ; then
|
||||
failedRetryDownload "${remote_url}" "Failed to extract link (unknown)" ""
|
||||
fi
|
||||
return 1
|
||||
|
|
@ -159,11 +159,11 @@ fs_FetchFileInfo() {
|
|||
j=$((j-1))
|
||||
continue
|
||||
fi
|
||||
if [ $j == $maxfetchretries ] ; then
|
||||
if [[ $j == $maxfetchretries ]] ; then
|
||||
printf "\\n"
|
||||
echo -e "${RED}| Failed to extract link (unknown)${NC}"
|
||||
warnAndRetryUnknownError=true
|
||||
if [ "${finalAttempt}" == "true" ] ; then
|
||||
if [[ "${finalAttempt}" == "true" ]] ; then
|
||||
failedRetryDownload "${remote_url}" "Failed to extract link (unknown)" ""
|
||||
fi
|
||||
return 1
|
||||
|
|
@ -183,11 +183,11 @@ fs_FetchFileInfo() {
|
|||
j=$((j-1))
|
||||
continue
|
||||
fi
|
||||
if [ $j == $maxfetchretries ] ; then
|
||||
if [[ $j == $maxfetchretries ]] ; then
|
||||
printf "\\n"
|
||||
echo -e "${RED}| Failed to extract link (unknown)${NC}"
|
||||
warnAndRetryUnknownError=true
|
||||
if [ "${finalAttempt}" == "true" ] ; then
|
||||
if [[ "${finalAttempt}" == "true" ]] ; then
|
||||
failedRetryDownload "${remote_url}" "Failed to extract link (unknown)" ""
|
||||
fi
|
||||
return 1
|
||||
|
|
@ -206,11 +206,11 @@ fs_FetchFileInfo() {
|
|||
download_url=$(grep -oP -m 1 '(?<=<a href=").*(?=" id="downloadlink)' <<< "$response")
|
||||
break
|
||||
fi
|
||||
if [ $j == $maxfetchretries ] ; then
|
||||
if [[ $j == $maxfetchretries ]] ; then
|
||||
printf "\\n"
|
||||
echo -e "${RED}| Failed to extract link (unknown).${NC}"
|
||||
warnAndRetryUnknownError=true
|
||||
if [ "${finalAttempt}" == "true" ] ; then
|
||||
if [[ "${finalAttempt}" == "true" ]] ; then
|
||||
failedRetryDownload "${remote_url}" "" ""
|
||||
fi
|
||||
return 1
|
||||
|
|
@ -224,15 +224,15 @@ fs_FetchFileInfo() {
|
|||
trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; echo ""; tput cnorm; exit" 0 1 2 3 6 15
|
||||
tor_identity="${RANDOM}"
|
||||
file_header=$(tor_curl_request --insecure -L --head -s --referer "${fixed_url}" "$download_url")
|
||||
if [ "${DebugAllEnabled}" == "true" ] ; then
|
||||
if [[ "${DebugAllEnabled}" == "true" ]] ; then
|
||||
debugHtml "${remote_url##*/}" "fs_head$j" "download_url: ${download_url}"$'\n'"${file_header}"
|
||||
fi
|
||||
if [[ -z $file_header ]] ; then
|
||||
if [ $j == $maxfetchretries ] ; then
|
||||
if [[ $j == $maxfetchretries ]] ; then
|
||||
printf "\\n"
|
||||
echo -e "${RED}| Failed to extract file info${NC}"
|
||||
warnAndRetryUnknownError=true
|
||||
if [ "${finalAttempt}" == "true" ] ; then
|
||||
if [[ "${finalAttempt}" == "true" ]] ; then
|
||||
failedRetryDownload "${remote_url}" "" ""
|
||||
fi
|
||||
return 1
|
||||
|
|
@ -241,11 +241,11 @@ fs_FetchFileInfo() {
|
|||
fi
|
||||
fi
|
||||
if ! grep -Eqi 'HTTP/2 200|HTTP/1.1 200|200 OK' <<< $file_header ; then
|
||||
if [ $j == $maxfetchretries ] ; then
|
||||
if [[ $j == $maxfetchretries ]] ; then
|
||||
printf "\\n"
|
||||
echo -e "${RED}| Failed to extract file info${NC}"
|
||||
warnAndRetryUnknownError=true
|
||||
if [ "${finalAttempt}" == "true" ] ; then
|
||||
if [[ "${finalAttempt}" == "true" ]] ; then
|
||||
failedRetryDownload "${remote_url}" "" ""
|
||||
fi
|
||||
return 1
|
||||
|
|
@ -253,14 +253,14 @@ fs_FetchFileInfo() {
|
|||
continue
|
||||
fi
|
||||
fi
|
||||
if [ "$filename_override" == "" ] ; then
|
||||
if [[ "$filename_override" == "" ]] ; then
|
||||
filename=$(grep -oPi '(?<=filename=").*(?=")' <<< "$file_header")
|
||||
if [[ -z "$filename" ]]; then
|
||||
if [ $j == $maxfetchretries ] ; then
|
||||
if [[ $j == $maxfetchretries ]] ; then
|
||||
printf "\\n"
|
||||
echo -e "${RED}| Failed to extract file name${NC}"
|
||||
warnAndRetryUnknownError=true
|
||||
if [ "${finalAttempt}" == "true" ] ; then
|
||||
if [[ "${finalAttempt}" == "true" ]] ; then
|
||||
failedRetryDownload "${remote_url}" "" ""
|
||||
fi
|
||||
return 1
|
||||
|
|
@ -272,11 +272,11 @@ fs_FetchFileInfo() {
|
|||
file_size_bytes=$(grep -oPi '(?<=content-length: ).*' <<< "$file_header")
|
||||
file_size_bytes=${file_size_bytes//[$'\t\r\n']}
|
||||
if [[ -z "$file_size_bytes" ]]; then
|
||||
if [ $j == $maxfetchretries ] ; then
|
||||
if [[ $j == $maxfetchretries ]] ; then
|
||||
printf "\\n"
|
||||
echo -e "${RED}| Failed to extract file size.${NC}"
|
||||
warnAndRetryUnknownError=true
|
||||
if [ "${finalAttempt}" == "true" ] ; then
|
||||
if [[ "${finalAttempt}" == "true" ]] ; then
|
||||
failedRetryDownload "${remote_url}" "" ""
|
||||
fi
|
||||
return 1
|
||||
|
|
@ -286,15 +286,15 @@ fs_FetchFileInfo() {
|
|||
fi
|
||||
break #Good to go here
|
||||
done
|
||||
touch ${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}
|
||||
if [ ! "$filename_override" == "" ] ; then
|
||||
touch "${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}"
|
||||
if [[ ! "$filename_override" == "" ]] ; then
|
||||
filename="$filename_override"
|
||||
fi
|
||||
filename=$(sanitize_file_or_folder_name "${filename}")
|
||||
printf "\\n"
|
||||
echo -e "${YELLOW}| File name:${NC}\t\"${filename}\""
|
||||
if [ -z $file_size_bytes ] ; then
|
||||
if [ "${finalAttempt}" == "true" ] ; then
|
||||
if [[ -z $file_size_bytes ]] ; then
|
||||
if [[ "${finalAttempt}" == "true" ]] ; then
|
||||
failedRetryDownload "${remote_url}" "Filesize not found!" ""
|
||||
fi
|
||||
echo -e "${YELLOW}| Filesize not found… retry${NC}"
|
||||
|
|
@ -325,19 +325,19 @@ fs_GetFile() {
|
|||
flockDownload="${WorkDir}/.flocks/${filename//[^a-zA-Z0-9\.\_\-]/}.flock"
|
||||
for ((j=1; j<=$MaxDownloadRetries; j++)); do
|
||||
pd_presize=0
|
||||
if [ -f "$file_path" ] ; then
|
||||
if [[ -f "$file_path" ]] ; then
|
||||
pd_presize=$(stat --format="%s" "$file_path" | tr -d '[:space:]')
|
||||
fi
|
||||
tor_identity="${RANDOM}"
|
||||
CLEANSTRING=${remote_url//[^a-zA-Z0-9]/}
|
||||
trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; rm -f $flockDownload; echo ""; tput cnorm; exit" 0 1 2 3 6 15
|
||||
if [ "${RateMonitorEnabled}" == "true" ]; then
|
||||
if [[ "${RateMonitorEnabled}" == "true" ]]; then
|
||||
tor_curl_request --insecure -L -G --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval "$download_url" --continue-at - --output "$file_path"
|
||||
else
|
||||
tor_curl_request --insecure -L -G "$download_url" --continue-at - --output "$file_path"
|
||||
fi
|
||||
received_file_size=0
|
||||
if [ -f "$file_path" ] ; then
|
||||
if [[ -f "$file_path" ]] ; then
|
||||
received_file_size=$(stat --format="%s" "$file_path" | tr -d '[:space:]')
|
||||
fi
|
||||
if CheckNoHtml "$remote_url" "$filename" "$file_path" ; then
|
||||
|
|
@ -346,9 +346,9 @@ fs_GetFile() {
|
|||
containsHtml=true
|
||||
fi
|
||||
downDelta=$(( received_file_size - pd_presize ))
|
||||
if [[ "${received_file_size}" -ne "${file_size_bytes}" ]] || [ "$containsHtml" == "true" ]; then
|
||||
if [ "${AutoRepairBadPartials}" == "true" ] && (( downDelta > 0 && downDelta < 1024 )) ; then
|
||||
if [ -f "${file_path}" ] ; then
|
||||
if [[ "${received_file_size}" -ne "${file_size_bytes}" ]] || [[ "$containsHtml" == "true" ]]; then
|
||||
if [[ "${AutoRepairBadPartials}" == "true" ]] && (( downDelta > 0 && downDelta < 1024 )) ; then
|
||||
if [[ -f "${file_path}" ]] ; then
|
||||
if ((pd_presize > 0)); then
|
||||
echo -e "${YELLOW}Bad node / HTML found:${NC} reverting to previous file..."
|
||||
truncateDownload "$remote_url" "$filename" "$pd_presize" "$received_file_size"
|
||||
|
|
@ -360,15 +360,15 @@ fs_GetFile() {
|
|||
fi
|
||||
if ((j >= $MaxDownloadRetries)) ; then
|
||||
rm -f "$flockDownload";
|
||||
if [ "${finalAttempt}" == "true" ] ; then
|
||||
if [[ "${finalAttempt}" == "true" ]] ; then
|
||||
droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}"
|
||||
fi
|
||||
return 1
|
||||
else
|
||||
continue
|
||||
fi
|
||||
elif [ "${AutoRepairBadPartials}" == "true" ] && [ "$containsHtml" == "true" ] ; then
|
||||
if [ -f "${file_path}" ] ; then
|
||||
elif [[ "${AutoRepairBadPartials}" == "true" ]] && [[ "$containsHtml" == "true" ]] ; then
|
||||
if [[ -f "${file_path}" ]] ; then
|
||||
if ((pd_presize > 0)); then
|
||||
echo -e "${YELLOW}Bad node / HTML found:${NC} reverting to previous file..."
|
||||
truncateDownload "$remote_url" "$filename" "$pd_presize" "$received_file_size"
|
||||
|
|
@ -380,21 +380,21 @@ fs_GetFile() {
|
|||
fi
|
||||
if ((j >= $MaxDownloadRetries)) ; then
|
||||
rm -f "$flockDownload";
|
||||
if [ "${finalAttempt}" == "true" ] ; then
|
||||
if [[ "${finalAttempt}" == "true" ]] ; then
|
||||
droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}"
|
||||
fi
|
||||
return 1
|
||||
else
|
||||
continue
|
||||
fi
|
||||
elif (( downDelta > 0 && downDelta < 1024 )) || [ "$containsHtml" == "true" ] ; then
|
||||
if [ -f "$file_path" ] ; then
|
||||
elif (( downDelta > 0 && downDelta < 1024 )) || [[ "$containsHtml" == "true" ]] ; then
|
||||
if [[ -f "$file_path" ]] ; then
|
||||
rm -rf "$file_path"
|
||||
fi
|
||||
echo -e "\n${YELLOW}Bad node / HTML found:${NC} tainted partial removed..."
|
||||
if ((j >= $MaxDownloadRetries)) ; then
|
||||
rm -f "$flockDownload";
|
||||
if [ "${finalAttempt}" == "true" ] ; then
|
||||
if [[ "${finalAttempt}" == "true" ]] ; then
|
||||
droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}"
|
||||
fi
|
||||
return 1
|
||||
|
|
@ -406,7 +406,7 @@ fs_GetFile() {
|
|||
echo -e "\n${RED}Download failed, file is incomplete.${NC}"
|
||||
if ((j >= $MaxDownloadRetries)) ; then
|
||||
rm -f "$flockDownload";
|
||||
if [ "${finalAttempt}" == "true" ] ; then
|
||||
if [[ "${finalAttempt}" == "true" ]] ; then
|
||||
droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}"
|
||||
fi
|
||||
return 1
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue