# 2025.02.18 - [uploadhive] Add handling of the new /cgi-bin/dl.cgi/ url tickets (WIP)

#               (unfortunately, this is tied to the requesting ip, so downloads get "Wrong IP")
# 2025.02.18 - [up_oshi] Add Manage url as comment on uploads
# 2025.02.18 - [up_oshi / oshi] use /nossl/ url and http
# 2025.02.17 - [gofile] Add a random sleep if 429 response detected (too many requests)
# 2025.02.17 - [*ALL] Audit and update all single bracket operations
# 2025.02.17 - [filehaus] Fix downloading from fh
# 2025.02.15 - [uploadbay] Update urls regex for acceptable alternate
# 2025.02.15 - [up_sendnow] Add send.now as upload host
# 2025.02.15 - [sendnow] Fix handling of filenames with special characters in url
This commit is contained in:
kittykat 2025-02-19 13:41:07 +00:00
parent d48116dbe3
commit d62376f7a8
Signed by: kittykat
GPG key ID: E3F1556620F70C3C
103 changed files with 3541 additions and 3517 deletions

View file

@ -43,24 +43,24 @@ pd_DownloadFile() {
tor_identity="${RANDOM}"
finalAttempt="false"
for ((z=0; z<=$MaxUrlRetries; z++)); do
if [ $z -eq $MaxUrlRetries ] ; then
if [[ $z -eq $MaxUrlRetries ]] ; then
finalAttempt="true"
fi
CLEANSTRING=${remote_url//[^a-zA-Z0-9]/}
trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; echo ""; tput cnorm; exit" 0 1 2 3 6 15
if pd_FetchFileInfo $finalAttempt && pd_GetFile "${filecnt}" $((z+1)) $finalAttempt ; then
return 0
elif [ $z -lt $MaxUrlRetries ]; then
if [ "${fileAlreadyDone}" == "true" ] ; then
elif [[ $z -lt $MaxUrlRetries ]]; then
if [[ "${fileAlreadyDone}" == "true" ]] ; then
break
fi
if [[ "${warnAndRetryUnknownError}" == "true" ]] ; then
if [ "${DebugAllEnabled}" == "true" ] ; then
if [[ "${DebugAllEnabled}" == "true" ]] ; then
debugHtml "${remote_url##*/}" "error" "Retry due to an unknown issue: attempt #$((z+1)) of ${MaxUrlRetries}"
fi
fi
if [[ "${exitDownloadError}" == "true" || "${exitDownloadNotAvailable}" == "true" ]] ; then
if [ "${DebugAllEnabled}" == "true" ] ; then
if [[ "${DebugAllEnabled}" == "true" ]] ; then
debugHtml "${remote_url##*/}" "error" "Exit due to unrecoverable issue"
fi
rm -f "${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}"
@ -92,18 +92,18 @@ pd_FetchFileInfo() {
CLEANSTRING=${remote_url//[^a-zA-Z0-9]/}
trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; echo ""; tput cnorm; exit" 0 1 2 3 6 15
response=$(tor_curl_request --insecure -L -s "https://pixeldrain.com/u/$fileid")
if [ "${DebugAllEnabled}" == "true" ] ; then
if [[ "${DebugAllEnabled}" == "true" ]] ; then
debugHtml "${remote_url##*/}" "pd_fetch$i" "$response"
fi
if [ ! -z "$response" ] ; then
if [[ ! -z "$response" ]] ; then
if grep -q -Eq '"views":' <<< "$response"; then
pdpreviews=$(grep -o -P '(?<="views":).+?(?=,")' <<< "$response")
fi
if grep -i -Eq "You have reached the maximum number of open download connections" <<< "$response"; then
if [ $i -ge 5 ] ; then
if ((i >= 5)) ; then
printf "\\n"
echo -e "${YELLOW}| Bad node. Reached the maximum number of open download connections…${NC}"
if [ "${finalAttempt}" == "true" ] ; then
if [[ "${finalAttempt}" == "true" ]] ; then
failedRetryDownload "${remote_url}" "" ""
fi
return 1
@ -135,7 +135,7 @@ pd_FetchFileInfo() {
if ((i > 1)) ; then
printf "\\n"
fi
if grep -Eq "pjscloud.sh" <<< "$LoadPlugins" && [ "$PJSCloud_pixeldrain" == "true" ]; then
if grep -Eq "pjscloud.sh" <<< "$LoadPlugins" && [[ "$PJSCloud_pixeldrain" == "true" ]]; then
if ! grep -Eq "pjscloud.sh" <<< "$LoadPlugins" ; then
echo -e "${RED}| Pixeldrain viewpump requires pjscloud.sh plugin.${NC}"
failedRetryDownload "${remote_url}" "Captcha Rate Limited (needs view pumping). Requires pjscloud.sh plugin."
@ -157,7 +157,7 @@ pd_FetchFileInfo() {
tor_identity="${RANDOM}"
trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; rm -f $jsonRequest; echo ""; tput cnorm; exit" 0 1 2 3 6 15
resp_pump=$(pjscloud_tor_request "https://pixeldrain.com/u/$fileid")
if [ "${DebugAllEnabled}" == "true" ] ; then
if [[ "${DebugAllEnabled}" == "true" ]] ; then
debugHtml "${remote_url##*/}" "pd_pump$k" "preViews: $pdpreviews"$'\n'"postViews: $pdpostviews"$'\n'"$resp_pump"
fi
if grep -q -Eq 'Error: Forbidden' <<< "$resp_pump"; then
@ -182,7 +182,7 @@ pd_FetchFileInfo() {
echo -e "| Final views: $pdpostviews (+1)"
echo -e "| Waiting a few seconds to allow pd views to update…"
sleepRandomSecs 45 120
if [ "${finalAttempt}" == "true" ] ; then
if [[ "${finalAttempt}" == "true" ]] ; then
failedRetryDownload "${remote_url}" "" ""
return 1
fi
@ -207,22 +207,22 @@ pd_FetchFileInfo() {
printf "\\n"
fi
echo -e "${YELLOW}| Unknown availability: $pd_message${NC}"
if [ "${finalAttempt}" == "true" ] ; then
if [[ "${finalAttempt}" == "true" ]] ; then
failedRetryDownload "${remote_url}" "" ""
return 1
fi
fi
fi
if [ $i -gt 1 ] ; then
if ((i > 1)) ; then
printf "\\n"
fi
echo -e "| Current views: $pdpreviews"
break
else
if [ $i -ge 5 ] ; then
if ((i >= 5)) ; then
printf "\\n"
echo -e "${YELLOW}| No response…${NC}"
if [ "${finalAttempt}" == "true" ] ; then
if [[ "${finalAttempt}" == "true" ]] ; then
failedRetryDownload "${remote_url}" "" ""
fi
return 1
@ -233,14 +233,14 @@ pd_FetchFileInfo() {
fi
done
filename=$(grep -oP '(?<="name":")[^"]+(?=")' <<< "$response")
touch ${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}
if [ ! "$filename_override" == "" ] ; then
touch "${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}"
if [[ ! "$filename_override" == "" ]] ; then
filename="$filename_override"
fi
filename=$(sanitize_file_or_folder_name "${filename}")
if [ "$filename_override" == "" ] && [ -z "$filename" ] ; then
if [[ "$filename_override" == "" ]] && [[ -z "$filename" ]] ; then
echo -e "${RED}| Failed to extract file name.${NC}"
if [ "${finalAttempt}" == "true" ] ; then
if [[ "${finalAttempt}" == "true" ]] ; then
failedRetryDownload "${remote_url}" "" ""
fi
return 1
@ -248,13 +248,13 @@ pd_FetchFileInfo() {
echo -e "${YELLOW}| File name:${NC}\t\"${filename}\""
for ((i=1; i<=6; i++)); do
pdheadurl="https://pixeldrain.com/api/file/${fileid}"
if [ "${UsePixeldrainBypass}" == "true" ]; then
if [[ "${UsePixeldrainBypass}" == "true" ]]; then
pdheadurl="https://pd.cybar.xyz/$fileid"
fi
CLEANSTRING=${remote_url//[^a-zA-Z0-9]/}
trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; echo ""; tput cnorm; exit" 0 1 2 3 6 15
file_header=$(tor_curl_request --insecure --head -L -s --referer "$file_url" "$pdheadurl")
if [ "${DebugAllEnabled}" == "true" ] ; then
if [[ "${DebugAllEnabled}" == "true" ]] ; then
debugHtml "${remote_url##*/}" "pd_head$i" "url: ${pdheadurl}"$'\n'"${file_header}"
fi
if ! grep -Eqi 'HTTP/2 200|HTTP/1.1 200|200 OK' <<< $file_header ; then
@ -265,7 +265,7 @@ pd_FetchFileInfo() {
return 1
fi
fi
if [ "${UsePixeldrainBypass}" == "true" ]; then
if [[ "${UsePixeldrainBypass}" == "true" ]]; then
download_url=$(grep -oP '(?<=location: ).*$' <<< "$file_header")
download_url="${download_url//[$'\t\r\n']}"
else
@ -274,8 +274,8 @@ pd_FetchFileInfo() {
download_url=$(urlencode_literal_grouped_case_urlendingonly "$download_url")
file_size_bytes=$(grep -oPi '(?<=content-length: ).*' <<< "$file_header")
file_size_bytes=${file_size_bytes//[$'\t\r\n']}
if [ -z $file_size_bytes ] ; then
if [ "${finalAttempt}" == "true" ] ; then
if [[ -z $file_size_bytes ]] ; then
if [[ "${finalAttempt}" == "true" ]] ; then
failedRetryDownload "${remote_url}" "Filesize not found!" ""
fi
echo -e "${YELLOW}| Filesize not found… retry${NC}"
@ -294,7 +294,7 @@ pd_FetchFileInfo() {
if CheckDownloadExists "$remote_url" "$MoveToFolder" "$filecnt" "$filename" "$file_path" "$completed_location" ; then
return 1
fi
if [ "${UsePixeldrainBypass}" == "true" ]; then
if [[ "${UsePixeldrainBypass}" == "true" ]]; then
echo -e "| ${BLUE}PixelDrain bypass:${NC} Knight beds queen ${RED}]${NC}°${PINK}----${RED}[${NC} ♞♝ ${NC}|▀▄▀▄▀▄▀▄▀▄▀▄▀▄▀▄|${NC}"
fi
echo "${remote_url//[^a-zA-Z0-9]/}" > $flockDownload
@ -311,7 +311,7 @@ pd_GetFile() {
flockDownload="${WorkDir}/.flocks/${filename//[^a-zA-Z0-9\.\_\-]/}.flock"
for ((j=1; j<=$MaxDownloadRetries; j++)); do
pd_presize=0
if [ -f "$file_path" ] ; then
if [[ -f "$file_path" ]] ; then
pd_presize=$(stat --format="%s" "$file_path" | tr -d '[:space:]')
fi
echo -e "Download Url: $download_url"
@ -319,8 +319,8 @@ pd_GetFile() {
tor_identity="${RANDOM}"
CLEANSTRING=${remote_url//[^a-zA-Z0-9]/}
trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; rm -f $flockDownload; echo ""; tput cnorm; exit" 0 1 2 3 6 15
if [ "${UseTorCurlImpersonate}" == "true" ]; then
if [ "${RateMonitorEnabled}" == "true" ]; then
if [[ "${UseTorCurlImpersonate}" == "true" ]]; then
if [[ "${RateMonitorEnabled}" == "true" ]]; then
tor_curl_request --insecure \
--speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval \
--referer "$file_url" "$download_url" --continue-at - --output "$file_path"
@ -329,7 +329,7 @@ pd_GetFile() {
--referer "$file_url" "$download_url" --continue-at - --output "$file_path"
fi
else
if [ "${RateMonitorEnabled}" == "true" ]; then
if [[ "${RateMonitorEnabled}" == "true" ]]; then
tor_curl_request --insecure \
-H "User-Agent: $RandomUA" \
-H 'Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8' \
@ -349,7 +349,7 @@ pd_GetFile() {
fi
fi
received_file_size=0
if [ -f "$file_path" ] ; then
if [[ -f "$file_path" ]] ; then
received_file_size=$(stat --format="%s" "$file_path" | tr -d '[:space:]')
fi
if CheckNoHtml "$remote_url" "$filename" "$file_path" ; then
@ -358,9 +358,9 @@ pd_GetFile() {
containsHtml=true
fi
downDelta=$(( received_file_size - pd_presize ))
if [[ "${received_file_size}" -ne "${file_size_bytes}" ]] || [ "$containsHtml" == "true" ]; then
if [ "${AutoRepairBadPartials}" == "true" ] && (( downDelta > 0 && downDelta < 1024 )) ; then
if [ -f "${file_path}" ] ; then
if [[ "${received_file_size}" -ne "${file_size_bytes}" ]] || [[ "$containsHtml" == "true" ]]; then
if [[ "${AutoRepairBadPartials}" == "true" ]] && (( downDelta > 0 && downDelta < 1024 )) ; then
if [[ -f "${file_path}" ]] ; then
if ((pd_presize > 0)); then
echo -e "${YELLOW}Bad node / HTML found:${NC} reverting to previous file..."
truncateDownload "$remote_url" "$filename" "$pd_presize" "$received_file_size"
@ -372,15 +372,15 @@ pd_GetFile() {
fi
if ((j >= $MaxDownloadRetries)) ; then
rm -f "$flockDownload";
if [ "${finalAttempt}" == "true" ] ; then
if [[ "${finalAttempt}" == "true" ]] ; then
droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}"
fi
return 1
else
continue
fi
elif [ "${AutoRepairBadPartials}" == "true" ] && [ "$containsHtml" == "true" ] ; then
if [ -f "${file_path}" ] ; then
elif [[ "${AutoRepairBadPartials}" == "true" ]] && [[ "$containsHtml" == "true" ]] ; then
if [[ -f "${file_path}" ]] ; then
if ((pd_presize > 0)); then
echo -e "${YELLOW}Bad node / HTML found:${NC} reverting to previous file..."
truncateDownload "$remote_url" "$filename" "$pd_presize" "$received_file_size"
@ -392,21 +392,21 @@ pd_GetFile() {
fi
if ((j >= $MaxDownloadRetries)) ; then
rm -f "$flockDownload";
if [ "${finalAttempt}" == "true" ] ; then
if [[ "${finalAttempt}" == "true" ]] ; then
droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}"
fi
return 1
else
continue
fi
elif (( downDelta > 0 && downDelta < 1024 )) || [ "$containsHtml" == "true" ] ; then
if [ -f "$file_path" ] ; then
elif (( downDelta > 0 && downDelta < 1024 )) || [[ "$containsHtml" == "true" ]] ; then
if [[ -f "$file_path" ]] ; then
rm -rf "$file_path"
fi
echo -e "\n${YELLOW}Bad node / HTML found:${NC} tainted partial removed..."
if ((j >= $MaxDownloadRetries)) ; then
rm -f "$flockDownload";
if [ "${finalAttempt}" == "true" ] ; then
if [[ "${finalAttempt}" == "true" ]] ; then
droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}"
fi
return 1
@ -418,7 +418,7 @@ pd_GetFile() {
echo -e "\n${RED}Download failed, file is incomplete.${NC}"
if ((j >= $MaxDownloadRetries)) ; then
rm -f "$flockDownload";
if [ "${finalAttempt}" == "true" ] ; then
if [[ "${finalAttempt}" == "true" ]] ; then
droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}"
fi
return 1