# 2025.02.24 - [pixeldrain] Update "The file is IP limited" response handling retry # 2025.02.22 - [blackcloud_onion] Add bcloud.onion download handling (url fixing) # 2025.02.21 - [anonfile] Update cdn link parsing to handle new subdomains # 2025.02.21 - [anonfile] Add download limit reached response handling # 2025.02.21 - [anonfile] Update file info retrieval (head no longer responds) # 2025.02.21 - [sendspace] Add sendspace.com as download host # 2025.02.21 - [oshi / up_oshi] Revert /nossl/ changes for oshi.at (clearnet)
393 lines
17 KiB
Bash
393 lines
17 KiB
Bash
#! Name: nippy.sh
|
|
#! Author: kittykat
|
|
#! Version: 2024.11.28
|
|
#! Desc: Add support for downloading and processing of urls for a new host
|
|
#! Usage: Copy this file into the ./${ScriptDir}/hosts/ folder
|
|
#!
|
|
#!
|
|
#! ------------ REQUIRED SECTION ---------------
|
|
#! @[UPDATE] HostAndDomainRegexes: This string is loaded into mad.sh and allows dynamic handling of new url data
|
|
#! Format: '/HostCode/HostNick/HostFuncPrefix:HostDomainRegex@'
|
|
#! HostCode: <aUniqueCodeForHost> (ie. 'fh' for filehaus -- cannot be used by other hosts)
|
|
#! HostNick: What is displayed throughout MAD output (ie. 'filehaus' -- "urls.txt has 10 filehaus.." will be displayed)
|
|
#! HostFuncPrefix: <aUniqueStringThatMustPrefixHostFunctions> (ie. 'fh' -- fh_DownloadFile(), fh_FetchFileInfo() .. )
|
|
#! * Note: Must begin with a letter a-z (functions beginning with numbers are no bueno)
|
|
#! HostDomainRegex: The regex used to verify matching urls
|
|
HostCode='nippy'
|
|
HostNick='nippy'
|
|
HostFuncPrefix='nippy'
|
|
HostUrls='nippyshare.com, nippydrive.com, nippybox.com, nippyfile.com, nippyspace.com'
|
|
HostDomainRegex='^(http|https)://(.*\.)?nippy(share|drive|box|file|space)\.(com|me)/'
|
|
#!
|
|
#! !! DO NOT UPDATE OR REMOVE !!
|
|
#! This merges the Required HostAndDomainRegexes into mad.sh
|
|
ListHostAndDomainRegexes=${ListHostAndDomainRegexes}'/'${HostCode}'/'${HostNick}'/'${HostFuncPrefix}'/'${HostUrls}':'${HostDomainRegex}'@'
|
|
#!
|
|
#!
|
|
#! ------------ (1) Host Main Download Function --------------- #
|
|
#!
|
|
#! @REQUIRED: Host Main Download function
|
|
#! Must be named specifically as such:
|
|
#! <HostFuncPrefix>_DownloadFile()
|
|
nippy_DownloadFile() {
|
|
local remote_url=${1}
|
|
local file_url=${1}
|
|
local filecnt=${2}
|
|
warnAndRetryUnknownError=false
|
|
exitDownloadError=false
|
|
exitDownloadNotAvailable=false
|
|
fileAlreadyDone=false
|
|
download_inflight_path="${WorkDir}/.inflight/"
|
|
mkdir -p "$download_inflight_path"
|
|
completed_location="${WorkDir}/downloads/"
|
|
tor_identity="${RANDOM}"
|
|
finalAttempt="false"
|
|
for ((z=0; z<=$MaxUrlRetries; z++)); do
|
|
if [[ $z -eq $MaxUrlRetries ]] ; then
|
|
finalAttempt="true"
|
|
fi
|
|
CLEANSTRING=${remote_url//[^a-zA-Z0-9]/}
|
|
trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; echo ""; tput cnorm; exit" 0 1 2 3 6 15
|
|
if nippy_FetchFileInfo $finalAttempt && nippy_GetFile "${filecnt}" $((z+1)) $finalAttempt ; then
|
|
return 0
|
|
elif [[ $z -lt $MaxUrlRetries ]]; then
|
|
if [[ "${fileAlreadyDone}" == "true" ]] ; then
|
|
break
|
|
fi
|
|
if [[ "${warnAndRetryUnknownError}" == "true" ]] ; then
|
|
if [[ "${DebugAllEnabled}" == "true" ]] ; then
|
|
debugHtml "${remote_url##*/}" "error" "Retry due to an unknown issue: attempt #$((z+1)) of ${MaxUrlRetries}"
|
|
fi
|
|
fi
|
|
if [[ "${exitDownloadError}" == "true" || "${exitDownloadNotAvailable}" == "true" ]] ; then
|
|
if [[ "${DebugAllEnabled}" == "true" ]] ; then
|
|
debugHtml "${remote_url##*/}" "error" "Exit due to unrecoverable issue"
|
|
fi
|
|
rm -f "${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}"
|
|
break
|
|
fi
|
|
echo -e "\n${YELLOW}A recoverable error occurred, retry attempt $((z+1))/${MaxUrlRetries}${NC}"
|
|
sleep 3
|
|
fi
|
|
done
|
|
rm -f "${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}"
|
|
}
|
|
#!
|
|
#! ------------- (2) Fetch File Info Function ----------------- #
|
|
#!
|
|
nippy_FetchFileInfo() {
|
|
finalAttempt=$1
|
|
maxfetchretries=5
|
|
nippy_cookie_jar=""
|
|
local nippyhost="nippyshare.com"
|
|
if grep -Eqi "nippyshare.com" <<< "$remote_url"; then
|
|
nippyhost="nippyshare.com"
|
|
elif grep -Eqi "nippyshare.me" <<< "$remote_url"; then
|
|
nippyhost="nippyshare.me"
|
|
elif grep -Eqi "nippydrive.com" <<< "$remote_url"; then
|
|
nippyhost="nippydrive.com"
|
|
elif grep -Eqi "nippybox.com" <<< "$remote_url"; then
|
|
nippyhost="nippybox.com"
|
|
elif grep -Eqi "nippyfile.com" <<< "$remote_url"; then
|
|
nippyhost="nippyfile.com"
|
|
elif grep -Eqi "nippyspace.com" <<< "$remote_url"; then
|
|
nippyhost="nippyspace.com"
|
|
elif grep -Eqi "yolobit.com" <<< "$remote_url"; then
|
|
nippyhost="yolobit.com"
|
|
elif grep -Eqi "nofile.org" <<< "$remote_url"; then
|
|
nippyhost="nofile.org"
|
|
elif grep -Eqi "shareonline.vg" <<< "$remote_url"; then
|
|
nippyhost="shareonline.vg"
|
|
elif grep -Eqi "dbree.me" <<< "$remote_url"; then
|
|
nippyhost="dbree.me"
|
|
elif grep -Eqi "dbree.org" <<< "$remote_url"; then
|
|
nippyhost="dbree.me"
|
|
fi
|
|
if grep -Eqi "dbree.org" <<< "$remote_url"; then
|
|
fixed_url=${remote_url/dbree.org/dbree.me}
|
|
else
|
|
fixed_url=${remote_url}
|
|
fi
|
|
echo -e "${GREEN}# Fetching download link…${NC}"
|
|
for ((i=1; i<=$maxfetchretries; i++)); do
|
|
mkdir -p "${WorkDir}/.temp"
|
|
nippy_cookie_jar=$(mktemp "${WorkDir}/.temp/nippy_cookies""${instance_no}"".XXXXXX")
|
|
printf " ."
|
|
tor_identity="${RANDOM}"
|
|
CLEANSTRING=${remote_url//[^a-zA-Z0-9]/}
|
|
trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; rm -f ${nippy_cookie_jar}; echo ""; tput cnorm; exit" 0 1 2 3 6 15
|
|
response=$(tor_curl_request --insecure -L -s -b "${nippy_cookie_jar}" -c "${nippy_cookie_jar}" "$fixed_url")
|
|
if [[ "${DebugAllEnabled}" == "true" ]] ; then
|
|
debugHtml "${remote_url##*/}" "nippy_dwnpage$i" "fixed_url: ${fixed_url}"$'\n'"${response}"
|
|
fi
|
|
if [[ -z $response ]] ; then
|
|
rm -f "${nippy_cookie_jar}";
|
|
if [[ $i == $maxfetchretries ]] ; then
|
|
printf "\\n"
|
|
echo -e "${RED}| Failed to extract download link.${NC}"
|
|
warnAndRetryUnknownError=true
|
|
if [[ "${finalAttempt}" == "true" ]] ; then
|
|
failedRetryDownload "${remote_url}" "" ""
|
|
fi
|
|
return 1
|
|
else
|
|
continue
|
|
fi
|
|
fi
|
|
if grep -Eqi "There is no such file|File was deleted because" <<< "$response"; then
|
|
rm -f "${nippy_cookie_jar}";
|
|
printf "\\n"
|
|
echo -e "${RED}| The file was not found. It could be deleted or expired.${NC}"
|
|
exitDownloadError=true
|
|
removedDownload "${remote_url}"
|
|
return 1
|
|
fi
|
|
if grep -Eqi "is temporarily unavailable|Please try back later" <<< "$response"; then
|
|
rm -f "${nippy_cookie_jar}";
|
|
if ((i == maxfetchretries)) ; then
|
|
printf "\\n"
|
|
echo -e "${RED}| Download is temporarily unavailable. Try again later.${NC}"
|
|
exitDownloadError=true
|
|
failedRetryDownload "${remote_url}" "Download is temporarily unavailable. Try again later."
|
|
return 1
|
|
else
|
|
continue
|
|
fi
|
|
fi
|
|
if grep -Eqi '<h2><a href='\''//'"${nippyhost}"'/d/' <<< "$response"; then
|
|
printf "\\n"
|
|
echo -e "${GREEN}| Download url found.${NC}"
|
|
download_url="https:"$(grep -oP '(?<=<h2><a href='\'').*(?='\'' class=)' <<< "$response")
|
|
break
|
|
fi
|
|
if ((i == maxfetchretries)) ; then
|
|
rm -f "${nippy_cookie_jar}";
|
|
printf "\\n"
|
|
echo -e "${RED}| Failed to extract download url (unknown).${NC}"
|
|
warnAndRetryUnknownError=true
|
|
if [[ "${finalAttempt}" == "true" ]] ; then
|
|
failedRetryDownload "${remote_url}" "" ""
|
|
fi
|
|
return 1
|
|
else
|
|
rm -f "${nippy_cookie_jar}";
|
|
continue
|
|
fi
|
|
done
|
|
echo -e "${GREEN}# Fetching file info…${NC}"
|
|
filename=""
|
|
file_size_bytes=""
|
|
cdn_url=""
|
|
if [[ ! "$filename_override" == "" ]] ; then
|
|
filename="$filename_override"
|
|
fi
|
|
for ((j=1; j<=$maxfetchretries; j++)); do
|
|
printf " ."
|
|
CLEANSTRING=${remote_url//[^a-zA-Z0-9]/}
|
|
trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; rm -f ${nippy_cookie_jar}; echo ""; tput cnorm; exit" 0 1 2 3 6 15
|
|
file_header=$(tor_curl_request --insecure -L --head -s \
|
|
-b "${nippy_cookie_jar}" -c "${nippy_cookie_jar}" \
|
|
"$download_url")
|
|
if [[ "${DebugAllEnabled}" == "true" ]] ; then
|
|
debugHtml "${remote_url##*/}" "nippy_head$j" "FileInfoUrl: ${download_url}"$'\n'"${file_header}"
|
|
fi
|
|
if [[ -z $file_header ]] ; then
|
|
if [[ $j == $maxfetchretries ]] ; then
|
|
rm -f "${nippy_cookie_jar}";
|
|
printf "\\n"
|
|
echo -e "${RED}| Failed to extract file info${NC}"
|
|
warnAndRetryUnknownError=true
|
|
if [[ "${finalAttempt}" == "true" ]] ; then
|
|
failedRetryDownload "${remote_url}" "Failed to extract file info" ""
|
|
fi
|
|
return 1
|
|
else
|
|
continue
|
|
fi
|
|
fi
|
|
if grep -Eqi 'HTTP.* 404' <<< $file_header ; then
|
|
printf "\\n"
|
|
echo -e "${RED}| 404 Not Found (file removed)${NC}"
|
|
exitDownloadError=true
|
|
removedDownload "${remote_url}"
|
|
return 1
|
|
fi
|
|
if ! grep -Eqi 'HTTP.* 200' <<< $file_header ; then
|
|
if [[ $j == $maxfetchretries ]] ; then
|
|
printf "\\n"
|
|
echo -e "${RED}| Failed to extract file info${NC}"
|
|
warnAndRetryUnknownError=true
|
|
if [[ "${finalAttempt}" == "true" ]] ; then
|
|
failedRetryDownload "${remote_url}" "Failed to extract file info" ""
|
|
fi
|
|
return 1
|
|
else
|
|
continue
|
|
fi
|
|
fi
|
|
if [[ -z "$cdn_url" ]] ; then
|
|
cdn_url="https:"$(grep -oP '(?<=location: ).*$' <<< "$file_header")
|
|
cdn_url=${cdn_url//[$'\t\r\n']}
|
|
fi
|
|
if [[ -z $filename ]]; then
|
|
filename=$(grep -oPi '(?<=filename=").*(?=")' <<< "$file_header")
|
|
fi
|
|
if [[ -z $file_size_bytes ]] ; then
|
|
file_size_bytes=$(grep -oPi '(?<=content-length: ).*' <<< "$file_header")
|
|
file_size_bytes=${file_size_bytes//[$'\t\r\n']}
|
|
fi
|
|
if [[ -z $filename ]] || [[ -z "$cdn_url" ]] || [[ -z "$file_size_bytes" ]] ; then
|
|
if [[ $j == $maxfetchretries ]] ; then
|
|
rm -f "${nippy_cookie_jar}";
|
|
printf "\\n"
|
|
echo -e "${RED}| Failed to extract file info.${NC}"
|
|
warnAndRetryUnknownError=true
|
|
if [[ "${finalAttempt}" == "true" ]] ; then
|
|
failedRetryDownload "${remote_url}" "" ""
|
|
fi
|
|
return 1
|
|
else
|
|
continue
|
|
fi
|
|
fi
|
|
break #Good to go here
|
|
done
|
|
download_url="$cdn_url"
|
|
touch "${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}"
|
|
filename=$(sanitize_file_or_folder_name "${filename}")
|
|
printf "\\n"
|
|
echo -e "${YELLOW}| File name:${NC}\t\"${filename}\""
|
|
if [[ -z $file_size_bytes ]] ; then
|
|
if [[ "${finalAttempt}" == "true" ]] ; then
|
|
failedRetryDownload "${remote_url}" "Filesize not found!" ""
|
|
fi
|
|
echo -e "${YELLOW}| Filesize not found… retry${NC}"
|
|
return 1
|
|
else
|
|
file_size_readable="$(numfmt --to=iec --from=auto --format "%.2f" <<< "$file_size_bytes")"
|
|
fi
|
|
echo -e "${YELLOW}| File size:${NC}\t${file_size_readable}"
|
|
file_path="${download_inflight_path}${filename}"
|
|
flockDownload="${WorkDir}/.flocks/${filename//[^a-zA-Z0-9\.\_\-]/}.flock"
|
|
if CheckFileSize "${remote_url}" "${file_size_bytes}" ; then
|
|
return 1
|
|
fi
|
|
if CheckDownloadExists "$remote_url" "$MoveToFolder" "$filecnt" "$filename" "$file_path" "$completed_location" ; then
|
|
return 1
|
|
fi
|
|
echo "${remote_url//[^a-zA-Z0-9]/}" > $flockDownload
|
|
}
|
|
#!
|
|
#! ----------- (3) Fetch File / Download File Function --------------- #
|
|
#!
|
|
nippy_GetFile() {
|
|
echo -e "${GREEN}# Downloading…${NC}"
|
|
echo -e "${YELLOW}| File path:${NC}\t./.inflight/${filename}\n"
|
|
fileCnt=$1
|
|
retryCnt=$2
|
|
finalAttempt=$3
|
|
flockDownload="${WorkDir}/.flocks/${filename//[^a-zA-Z0-9\.\_\-]/}.flock"
|
|
for ((j=1; j<=$MaxDownloadRetries; j++)); do
|
|
pd_presize=0
|
|
if [[ -f "$file_path" ]] ; then
|
|
pd_presize=$(stat --format="%s" "$file_path" | tr -d '[:space:]')
|
|
fi
|
|
GetRandomUA
|
|
CLEANSTRING=${remote_url//[^a-zA-Z0-9]/}
|
|
trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; rm -f ${nippy_cookie_jar}; rm -f $flockDownload; echo ""; tput cnorm; exit" 0 1 2 3 6 15
|
|
if [[ "${RateMonitorEnabled}" == "true" ]]; then
|
|
tor_curl_request --insecure -L --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval \
|
|
"$download_url" --continue-at - --output "$file_path"
|
|
else
|
|
tor_curl_request --insecure -L "$download_url" --continue-at - --output "$file_path"
|
|
fi
|
|
received_file_size=0
|
|
if [[ -f "$file_path" ]] ; then
|
|
received_file_size=$(stat --format="%s" "$file_path" | tr -d '[:space:]')
|
|
fi
|
|
if CheckNoHtml "$remote_url" "$filename" "$file_path" "$((received_file_size - pd_presize))" ; then
|
|
containsHtml=false
|
|
else
|
|
containsHtml=true
|
|
fi
|
|
downDelta=$(( received_file_size - pd_presize ))
|
|
if [[ "${received_file_size}" -ne "${file_size_bytes}" ]] || [[ "$containsHtml" == "true" ]]; then
|
|
if [[ "${AutoRepairBadPartials}" == "true" ]] && (( downDelta > 0 && downDelta < 1024 )) ; then
|
|
if [[ -f "${file_path}" ]] ; then
|
|
if ((pd_presize > 0)); then
|
|
echo -e "${YELLOW}Bad node / HTML found:${NC} reverting to previous file..."
|
|
truncateDownload "$remote_url" "$filename" "$pd_presize" "$received_file_size"
|
|
truncate -s $pd_presize "${file_path}"
|
|
else
|
|
echo -e "${YELLOW}Bad node / HTML found:${NC} tainted partial removed..."
|
|
rm -f "${file_path}"
|
|
fi
|
|
fi
|
|
if ((j >= $MaxDownloadRetries)) ; then
|
|
rm -f "$flockDownload";
|
|
if [[ "${finalAttempt}" == "true" ]] ; then
|
|
droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}"
|
|
fi
|
|
return 1
|
|
else
|
|
continue
|
|
fi
|
|
elif [[ "${AutoRepairBadPartials}" == "true" ]] && [[ "$containsHtml" == "true" ]] ; then
|
|
if [[ -f "${file_path}" ]] ; then
|
|
if ((pd_presize > 0)); then
|
|
echo -e "${YELLOW}Bad node / HTML found:${NC} reverting to previous file..."
|
|
truncateDownload "$remote_url" "$filename" "$pd_presize" "$received_file_size"
|
|
truncate -s $pd_presize "${file_path}"
|
|
else
|
|
echo -e "${YELLOW}Bad node / HTML found:${NC} tainted partial removed..."
|
|
rm -f "${file_path}"
|
|
fi
|
|
fi
|
|
if ((j >= $MaxDownloadRetries)) ; then
|
|
rm -f "$flockDownload";
|
|
if [[ "${finalAttempt}" == "true" ]] ; then
|
|
droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}"
|
|
fi
|
|
return 1
|
|
else
|
|
continue
|
|
fi
|
|
elif (( downDelta > 0 && downDelta < 1024 )) || [[ "$containsHtml" == "true" ]] ; then
|
|
if [[ -f "$file_path" ]] ; then
|
|
rm -rf "$file_path"
|
|
fi
|
|
echo -e "\n${YELLOW}Bad node / HTML found:${NC} tainted partial removed..."
|
|
if ((j >= $MaxDownloadRetries)) ; then
|
|
rm -f "$flockDownload";
|
|
if [[ "${finalAttempt}" == "true" ]] ; then
|
|
droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}"
|
|
fi
|
|
return 1
|
|
else
|
|
continue
|
|
fi
|
|
fi
|
|
if [[ "${received_file_size}" -ne "${file_size_bytes}" ]]; then
|
|
echo -e "\n${RED}Download failed, file is incomplete.${NC}"
|
|
if ((j >= $MaxDownloadRetries)) ; then
|
|
rm -f "$flockDownload";
|
|
if [[ "${finalAttempt}" == "true" ]] ; then
|
|
droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}"
|
|
fi
|
|
return 1
|
|
else
|
|
continue
|
|
fi
|
|
fi
|
|
else
|
|
break
|
|
fi
|
|
done
|
|
rm -f "$flockDownload";
|
|
rm -f "${nippy_cookie_jar}";
|
|
ProcessCompletedDownload "$remote_url" "$MoveToFolder" "$filecnt" "$filename" "$file_size_bytes" "$completed_location" "$file_path"
|
|
return 0
|
|
}
|
|
#!
|
|
#! --------------- Host Extra Functions ------------------- #
|
|
#!
|