2024.11.27 - [up_ranoz] Modify download link to not use the upload url ticket link
# 2024.11.26 - [filehaus] Handle "404 Not found" on first instance # 2024.11.25 - [up_moocloud / moocloud] Add moocloud.ch as an upload and download host # 2024.11.24 - [uploadhive] Handle "Error creating download link" response -- do not mark Removed # 2024.11.23 - [filehaus] Use tor_curl_request_extended for head / get for filehaus urls # 2024.11.23 - [mad] Make tor_curl_request_extended a random timeout between 30-60 seconds # 2024.11.22 - [up_quax, quax] Add qu.ax as an upload and download host # 2024.11.21 - [filedot] Fix check for post filename # 2024.11.20 - [gofile] Handle parsing parent gofile url into multiple download urls # (still needs updating to handle child urls gofile.io/download/web/<guid>/file) # 2024.11.19 - [mad] Add updateUrlDownload function to handle updating a url # (ie. parent gofile url with children urls)
This commit is contained in:
parent
fa83163a58
commit
ec7d121c0e
12 changed files with 656 additions and 34 deletions
39
mad.sh
39
mad.sh
|
|
@ -31,9 +31,21 @@
|
|||
# * klonkerz - feedback and suggestions, url only processing
|
||||
# * Everyone who provided feedback and helped test.. and those who wish to remain anonymous
|
||||
|
||||
ScriptVersion=2024.11.18
|
||||
ScriptVersion=2024.11.28
|
||||
#=================================================
|
||||
# Recent Additions
|
||||
# 2024.11.27 - [up_ranoz] Modify download link to not use the upload url ticket link
|
||||
# 2024.11.26 - [filehaus] Handle "404 Not found" on first instance
|
||||
# 2024.11.25 - [up_moocloud / moocloud] Add moocloud.ch as an upload and download host
|
||||
# 2024.11.24 - [uploadhive] Handle "Error creating download link" response -- do not mark Removed
|
||||
# 2024.11.23 - [filehaus] Use tor_curl_request_extended for head / get for filehaus urls
|
||||
# 2024.11.23 - [mad] Make tor_curl_request_extended a random timeout between 30-60 seconds
|
||||
# 2024.11.22 - [up_quax, quax] Add qu.ax as an upload and download host
|
||||
# 2024.11.21 - [filedot] Fix check for post filename
|
||||
# 2024.11.20 - [gofile] Handle parsing parent gofile url into multiple download urls
|
||||
# (still needs updating to handle child urls gofile.io/download/web/<guid>/file)
|
||||
# 2024.11.19 - [mad] Add updateUrlDownload function to handle updating a url
|
||||
# (ie. parent gofile url with children urls)
|
||||
# 2024.11.18 - [up_fileditch / fileditch] Add fileditch.com as upload and download host
|
||||
# 2024.11.17 - [innocent] Fix "Fetching file info". Support resume downloads.
|
||||
# 2024.11.16 - [mad] Fix reload on uploads.txt modified (uploads: filemode)
|
||||
|
|
@ -166,8 +178,8 @@ UploadSpeedMin=10
|
|||
# [RateMonitor - UploadTimeoutInterval]: Amount of time in seconds a transfer can remain below the UploadSpeedMin before it will timeout.
|
||||
# This helps ensure an upload doesn't go stale and hit a speed of 0 for too long. (! Requires RateMonitorEnabled=true)
|
||||
# ie. curl: (28) Operation too slow. Less than 5000 bytes/sec transferred the last 60 seconds
|
||||
# @Default=300 (5 min)
|
||||
UploadTimeoutInterval=300
|
||||
# @Default=600 (10 min)
|
||||
UploadTimeoutInterval=600
|
||||
|
||||
|
||||
#=================================================
|
||||
|
|
@ -390,10 +402,11 @@ tor_curl_request() {
|
|||
fi
|
||||
}
|
||||
tor_curl_request_extended() {
|
||||
randomtimeout=$((30 + RANDOM % (60 - 30)))
|
||||
if [ "${UseTorCurlImpersonate}" == "true" ]; then
|
||||
"${curl_impersonate[@]}" --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" --connect-timeout 60 --compressed --globoff "$@"
|
||||
"${curl_impersonate[@]}" --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" --connect-timeout $randomtimeout --compressed --globoff "$@"
|
||||
else
|
||||
curl --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" --connect-timeout 60 --compressed --globoff "$@"
|
||||
curl --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" --connect-timeout $randomtimeout --compressed --globoff "$@"
|
||||
fi
|
||||
}
|
||||
tor_curl_upload() {
|
||||
|
|
@ -779,6 +792,22 @@ renameDuplicateDownload() {
|
|||
echo -e "| Filename: \"./downloads/$filename\""
|
||||
fi
|
||||
}
|
||||
updateUrlDownload() {
|
||||
local url=$(literalize_string "${1//[$'\t\r\n']}")
|
||||
local newurl="$2"
|
||||
echo -e "1${PINK}$newurl${NC}"
|
||||
if [ ! "$UrlOnly" == "true" ]; then
|
||||
sed -i -e "s%^$url.*%${newurl//[[:space:]]/$'\\\n'}%g" "${InputFile}" #processed url
|
||||
sed -i -e "s%^${url/https:/http:}.*%${newurl//[[:space:]]/$'\\\n'}%g" "${InputFile}" #http (if changed)
|
||||
fi
|
||||
mkdir -p "${WorkDir}/downloads"
|
||||
dateStamp=$(date '+%Y/%m/%d %H:%M:%S')
|
||||
echo -e "$dateStamp [UPDATE] ${url} (new url: ${newfilename})" >> "${WorkDir}/downloads/results.txt"
|
||||
CLEANSTRING=${remote_url//[^a-zA-Z0-9]/}
|
||||
if [ -f "${WorkDir}/.flocks/${CLEANSTRING}" ]; then
|
||||
rm -f "${WorkDir}/.flocks/${CLEANSTRING}"
|
||||
fi
|
||||
}
|
||||
droppedSizeBadDownload() {
|
||||
local url="${1//[$'\t\r\n']}"
|
||||
local filename=$(literalize_string "$2")
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue