2024.11.27 - [up_ranoz] Modify download link to not use the upload url ticket link

# 2024.11.26 - [filehaus] Handle "404 Not found" on first instance
# 2024.11.25 - [up_moocloud / moocloud] Add moocloud.ch as an upload and download host
# 2024.11.24 - [uploadhive] Handle "Error creating download link" response -- do not mark Removed
# 2024.11.23 - [filehaus] Use tor_curl_request_extended for head / get for filehaus urls
# 2024.11.23 - [mad] Make tor_curl_request_extended a random timeout between 30-60 seconds
# 2024.11.22 - [up_quax, quax] Add qu.ax as an upload and download host
# 2024.11.21 - [filedot] Fix check for post filename
# 2024.11.20 - [gofile] Handle parsing parent gofile url into multiple download urls
#               (still needs updating to handle child urls gofile.io/download/web/<guid>/file)
# 2024.11.19 - [mad] Add updateUrlDownload function to handle updating a url
#               (ie. parent gofile url with children urls)
This commit is contained in:
kittykat 2024-11-27 05:54:28 +00:00
parent fa83163a58
commit ec7d121c0e
Signed by: kittykat
GPG key ID: E3F1556620F70C3C
12 changed files with 656 additions and 34 deletions

View file

@ -24,6 +24,7 @@ Max Size . HostCode . Nickname . Notes
- 1GB kraken krakenfiles.com 90d inactive expiry
1GB ansh anonsharing.com 6mo expiry
300MB trbo turbo.onion ~40d expiry
256MB qx qu.ax ?? expiry
250MB upev uploadev.org 90d inactive expiry
* 240MB ko kouploader.jp 5mo expiry (240MB max)
150MB torp TorUp.onion 30d inactive expiry
@ -41,6 +42,7 @@ Max Size . HostCode . Nickname . Notes
10GB anarc anarchaserver.org ~1mo expiry, jirafrau
10GB nlib netlib.re ~1mo expiry, jirafrau
* 10GB raja uploadraja.com 4d inactive expiry
5GB moo moocloud.sh ~1mo expiry, jirafrau
5GB squid filesquid.net ~1mo expiry, jirafrau
4GB tmpsh temp.sh 3d expiry
1GB kaz depot.kaz.bzh ~1mo expiry, jirafrau

View file

@ -1,6 +1,6 @@
#! Name: filedot.sh
#! Author: kittykat
#! Version: 2024.09.22
#! Version: 2024.11.21
#! Desc: Add support for downloading and processing of urls for a new host
#! Usage: Copy this file into the ./${ScriptDir}/hosts/ folder
#!
@ -225,7 +225,7 @@ fdot_FetchFileInfo() {
continue
fi
done
if [ -z $post_fname ]; then
if [[ -z "$post_fname" ]]; then
echo -e "${YELLOW}| Login failed [2] (filename not found)${NC}"
return 1
fi

View file

@ -97,21 +97,17 @@ fh_FetchFileInfo() {
tor_identity="${RANDOM}"
CLEANSTRING=${remote_url//[^a-zA-Z0-9]/}
trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; echo ""; tput cnorm; exit" 0 1 2 3 6 15
file_header=$(tor_curl_request --insecure -L --head -s --referer "${remote_url//\.org/\.cc}" "$download_url")
file_header=$(tor_curl_request_extended --insecure -L --head -s --referer "${remote_url//\.org/\.cc}" "$download_url")
if [ "${DebugAllEnabled}" == "true" ] ; then
debugHtml "${remote_url##*/}" "fh_head$j" "download_url: ${download_url}"$'\n'"${file_header}"
fi
if [ ! -z "$file_header" ] ; then
if grep -Eqi '404 Not Found' <<< "${file_header}" ; then
if [ $j == $maxfetchretries ] ; then
printf "\\n"
echo -e "${RED}| The file has been removed (404).${NC}"
removedDownload "${remote_url}"
exitDownloadNotAvailable=true
return 1
else
continue
fi
fi
if ! grep -Eqi '200|content-length' <<< "${file_header}" ; then
if [ $j == $maxfetchretries ] ; then
@ -194,9 +190,9 @@ fh_GetFile() {
CLEANSTRING=${remote_url//[^a-zA-Z0-9]/}
trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; rm -f $flockDownload; echo ""; tput cnorm; exit" 0 1 2 3 6 15
if [ "${RateMonitorEnabled}" == "true" ]; then
tor_curl_request --insecure --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval --referer "$download_url" "$download_url" --continue-at - --output "$file_path"
tor_curl_request_extended --insecure --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval --referer "$download_url" "$download_url" --continue-at - --output "$file_path"
else
tor_curl_request --insecure --referer "$download_url" "$download_url" --continue-at - --output "$file_path"
tor_curl_request_extended --insecure --referer "$download_url" "$download_url" --continue-at - --output "$file_path"
fi
received_file_size=0
if [ -f "$file_path" ] ; then

View file

@ -17,7 +17,7 @@ HostCode='gofile'
HostNick='gofile'
HostFuncPrefix='gofile'
HostUrls='gofile.io'
HostDomainRegex='^(http|https)://(.*\.)?gofile\.io/d/'
HostDomainRegex='^(http|https)://(.*\.)?gofile\.io/(d|download/web)/'
#!
#! !! DO NOT UPDATE OR REMOVE !!
#! This merges the Required HostAndDomainRegexes into mad.sh
@ -80,8 +80,8 @@ gofile_FetchFileInfo() {
maxfetchretries=5
gofile_cookie_jar=""
if ! grep -Eqi 'gofile.io/d/' <<< "$remote_url"; then
echo -e "${RED}| Bad gofile url (Format should be gofile.io/d/xxxxxx)${NC}"
badUrlDownload "${remote_url}" "Expect format http://gofile.io/d/xxxxxxxxx"
echo -e "${RED}| Bad gofile url (format: gofile.io/d/xxxxx)${NC}"
badUrlDownload "${remote_url}" "Expect format: http://*.gofile.io/d/xxxxx"
exitDownloadError=true
return 1
fi
@ -200,10 +200,19 @@ gofile_FetchFileInfo() {
fi
if grep -Eqi '"status":"ok"' <<< "$response"; then
download_url=$(grep -oPi '(?<="link":").*?(?=")' <<< "$response")
if [ ! -z $download_url ]; then
printf "\\n"
echo -e "${GREEN}| Download url found.${NC}"
break
if [[ ! -z "$download_url" ]]; then
cnturls=$(grep -oin 'https://' <<< "$download_url" | wc -l)
if ((cnturls > 1)); then
printf "\\n"
echo -e "${BLUE}| Multiple urls found. Writing urls to $InputFile and reloading.${NC}"
updateUrlDownload "$remote_url" "$download_url"
fileAlreadyDone=true
return 1
else
printf "\\n"
echo -e "${GREEN}| Download url found.${NC}"
break
fi
else
rm -f "${gofile_cookie_jar}";
if [ $i == $maxfetchretries ] ; then

41
hosts/moocloud.sh Normal file
View file

@ -0,0 +1,41 @@
#! Name: moocloud.sh
#! Author: kittykat
#! Version: 2024.11.26
#! Desc: Add support for downloading and processing of urls for a new host
#! Usage: Copy this file into the ./${ScriptDir}/hosts/ folder
#!
#!
#! ------------ REQUIRED SECTION ---------------
#! @[UPDATE] HostAndDomainRegexes: This string is loaded into mad.sh and allows dynamic handling of new url data
#! Format: '/HostCode/HostNick/HostFuncPrefix:HostDomainRegex@'
#! HostCode: <aUniqueCodeForHost> (ie. 'fh' for filehaus -- cannot be used by other hosts)
#! HostNick: What is displayed throughout MAD output (ie. 'filehaus' -- "urls.txt has 10 filehaus.." will be displayed)
#! HostFuncPrefix: <aUniqueStringThatMustPrefixHostFunctions> (ie. 'fh' -- fh_DownloadFile(), fh_FetchFileInfo() .. )
#! * Note: Must begin with a letter a-z (functions beginning with numbers are no bueno)
#! HostDomainRegex: The regex used to verify matching urls
HostCode='moo'
HostNick='moocloud.ch'
HostFuncPrefix='moo'
HostUrls='moocloud.ch'
HostDomainRegex='^(http|https)://(.*\.)?file\.tools\.moocloud\.ch/'
#!
#! !! DO NOT UPDATE OR REMOVE !!
#! This merges the Required HostAndDomainRegexes into mad.sh
ListHostAndDomainRegexes=${ListHostAndDomainRegexes}'/'${HostCode}'/'${HostNick}'/'${HostFuncPrefix}'/'${HostUrls}':'${HostDomainRegex}'@'
#!
#!
#! ------------ (1) Host Main Download Function --------------- #
#!
#! This is a direct= download host, so all the functions are already in mad.sh
#! Since the HostFuncPrefix is defined above as "direct", nothing further needs to be done as it will
#! call the direct_DownloadFile() function already in mad.sh
moo_DownloadFile() {
local pUrl="$1"
local pFileCnt="$2"
local pFileUrl="$pUrl"
if ! grep -Eqi '&p=1$' <<< "$pUrl" ; then
pFileUrl="${pUrl}&p=1"
echo -e "[${BLUE}ModifiedUrl${NC}]: ${pFileUrl}"
fi
direct_DownloadFile "$pUrl" "$pFileCnt" "$pFileUrl"
}

268
hosts/quax.sh Normal file
View file

@ -0,0 +1,268 @@
#! Name: quax.sh
#! Author: kittykat
#! Version: 2024.11.22
#! Desc: Add support for downloading and processing of urls for a new host
#! Usage: Copy this file into the ./${ScriptDir}/hosts/ folder
#!
#!
#! ------------ REQUIRED SECTION ---------------
#! @[UPDATE] HostAndDomainRegexes: This string is loaded into mad.sh and allows dynamic handling of new url data
#! Format: '/HostCode/HostNick/HostFuncPrefix:HostDomainRegex@'
#! HostCode: <aUniqueCodeForHost> (ie. 'fh' for filehaus -- cannot be used by other hosts)
#! HostNick: What is displayed throughout MAD output (ie. 'filehaus' -- "urls.txt has 10 filehaus.." will be displayed)
#! HostFuncPrefix: <aUniqueStringThatMustPrefixHostFunctions> (ie. 'fh' -- fh_DownloadFile(), fh_FetchFileInfo() .. )
#! * Note: Must begin with a letter a-z (functions beginning with numbers are no bueno)
#! HostDomainRegex: The regex used to verify matching urls
HostCode='qx'
HostNick='qu.ax'
HostFuncPrefix='qx'
HostUrls='qu.ax'
HostDomainRegex='^(http|https)://(.*\.)?qu\.ax/'
#!
#! !! DO NOT UPDATE OR REMOVE !!
#! This merges the Required HostAndDomainRegexes into mad.sh
ListHostAndDomainRegexes=${ListHostAndDomainRegexes}'/'${HostCode}'/'${HostNick}'/'${HostFuncPrefix}'/'${HostUrls}':'${HostDomainRegex}'@'
#!
#!
#! ------------ (1) Host Main Download Function --------------- #
#!
#! @REQUIRED: Host Main Download function
#! Must be named specifically as such:
#! <HostFuncPrefix>_DownloadFile()
qx_DownloadFile() {
local remote_url=${1}
local file_url=${1}
local filecnt=${2}
warnAndRetryUnknownError=false
exitDownloadError=false
exitDownloadNotAvailable=false
fileAlreadyDone=false
download_inflight_path="${WorkDir}/.inflight/"
mkdir -p "$download_inflight_path"
completed_location="${WorkDir}/downloads/"
tor_identity="${RANDOM}"
finalAttempt="false"
for ((z=0; z<=$MaxUrlRetries; z++)); do
if [ $z -eq $MaxUrlRetries ] ; then
finalAttempt="true"
fi
CLEANSTRING=${remote_url//[^a-zA-Z0-9]/}
trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; echo ""; tput cnorm; exit" 0 1 2 3 6 15
if qx_FetchFileInfo $finalAttempt && qx_GetFile "${filecnt}" $((z+1)) $finalAttempt ; then
return 0
elif [ $z -lt $MaxUrlRetries ]; then
if [ "${fileAlreadyDone}" == "true" ] ; then
break
fi
if [[ "${warnAndRetryUnknownError}" == "true" ]] ; then
if [ "${DebugAllEnabled}" == "true" ] ; then
debugHtml "${remote_url##*/}" "error" "Retry due to an unknown issue: attempt #$((z+1)) of ${MaxUrlRetries}"
fi
fi
if [[ "${exitDownloadError}" == "true" || "${exitDownloadNotAvailable}" == "true" ]] ; then
if [ "${DebugAllEnabled}" == "true" ] ; then
debugHtml "${remote_url##*/}" "error" "Exit due to unrecoverable issue"
fi
rm -f "${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}"
break
fi
echo -e "\n${YELLOW}A recoverable error occurred, retry attempt $((z+1))/${MaxUrlRetries}${NC}"
sleep 3
fi
done
rm -f "${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}"
}
#!
#! ------------- (2) Fetch File Info Function ----------------- #
#!
qx_FetchFileInfo() {
echo -e "${GREEN}# Fetching file info…${NC}"
finalAttempt=$1
CLEANSTRING=${remote_url//[^a-zA-Z0-9]/}
trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; echo ""; tput cnorm; exit" 0 1 2 3 6 15
download_url=$(urlencode_literal_grouped_case_urlendingonly "$remote_url")
tor_identity="${RANDOM}"
file_header=$(tor_curl_request --insecure --head -L -s "$download_url")
if [ "${DebugAllEnabled}" == "true" ] ; then
debugHtml "${remote_url##*/}" "qx_head$j" "download_url: ${download_url}"$'\n'"${file_header}"
fi
if [ ! -z "$file_header" ] ; then
if grep -Eqi '404 Not Found' <<< "${file_header}" ; then
echo -e "${RED}| The file has been removed (404).${NC}"
removedDownload "${remote_url}"
exitDownloadNotAvailable=true
return 1
fi
if ! grep -Eqi 'HTTP/.* 200' <<< $file_header ; then
echo -e "${RED}| Failed to extract file info${NC}"
warnAndRetryUnknownError=true
if [ "${finalAttempt}" == "true" ] ; then
failedRetryDownload "${remote_url}" "" ""
fi
return 1
fi
if [ "$filename_override" == "" ] ; then
if grep -Eqi 'filename=' <<< "${file_header}" ; then
filename=$(grep -oP 'filename=\K.*$' <<< "${file_header}")
filename=${filename##filename}
filename=${filename//\"/}
filename=${filename//[$'\t\r\n']}
else
filename="${download_url##*/}"
fi
fi
if ! grep -Eqi 'Content-Length' <<< "${file_header}" ; then
echo -e "${RED}| Failed to extract file size.${NC}"
if [ "${finalAttempt}" == "true" ] ; then
failedRetryDownload "${remote_url}" "" ""
fi
return 1
fi
else
echo -e "${RED}| No response. Try again later.${NC}"
if [ "${finalAttempt}" == "true" ] ; then
failedRetryDownload "${remote_url}" "" ""
fi
return 1
fi
touch ${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}
if [ ! "$filename_override" == "" ] ; then
filename="$filename_override"
elif [ -z $filename ] ; then
filename=${download_url##*/}
fi
filename=$(sanitize_file_or_folder_name "${filename}")
file_size_bytes=$(grep -oPi '(?<=content-length: ).*' <<< "$file_header")
file_size_bytes=${file_size_bytes//[$'\t\r\n']}
if [ -z $file_size_bytes ] ; then
if [ "${finalAttempt}" == "true" ] ; then
failedRetryDownload "${remote_url}" "Filesize not found!" ""
fi
echo -e "${YELLOW}| Filesize not found… retry${NC}"
return 1
else
file_size_readable="$(numfmt --to=iec --from=auto --format "%.2f" <<< "$file_size_bytes")"
fi
echo -e "${YELLOW}| File size:${NC}\t${file_size_readable}"
file_path="${download_inflight_path}${filename}"
echo -e "${YELLOW}| File name:${NC}\t\"${filename}\""
flockDownload="${WorkDir}/.flocks/${filename//[^a-zA-Z0-9\.\_\-]/}.flock"
if CheckFileSize "${remote_url}" "${file_size_bytes}" ; then
return 1
fi
if CheckDownloadExists "$remote_url" "$MoveToFolder" "$filecnt" "$filename" "$file_path" "$completed_location" ; then
return 1
fi
echo "${remote_url//[^a-zA-Z0-9]/}" > $flockDownload
}
#!
#! ----------- (3) Fetch File / Download File Function --------------- #
#!
qx_GetFile() {
echo -e "${GREEN}# Downloading…"
echo -e "${YELLOW}| File path:${NC}\t./.inflight/${filename}\n"
fileCnt=$1
retryCnt=$2
finalAttempt=$3
flockDownload="${WorkDir}/.flocks/${filename//[^a-zA-Z0-9\.\_\-]/}.flock"
for ((j=1; j<=$MaxDownloadRetries; j++)); do
pd_presize=0
if [ -f "$file_path" ] ; then
pd_presize=$(stat --format="%s" "$file_path" | tr -d '[:space:]')
fi
CLEANSTRING=${remote_url//[^a-zA-Z0-9]/}
trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; rm -f $flockDownload; echo ""; tput cnorm; exit" 0 1 2 3 6 15
if [ "${RateMonitorEnabled}" == "true" ]; then
tor_curl_request --insecure --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval "$download_url" --continue-at - --output "$file_path"
else
tor_curl_request --insecure "$download_url" --continue-at - --output "$file_path"
fi
received_file_size=0
if [ -f "$file_path" ] ; then
received_file_size=$(stat --format="%s" "$file_path" | tr -d '[:space:]')
fi
if CheckNoHtml "$remote_url" "$filename" "$file_path" ; then
containsHtml=false
else
containsHtml=true
fi
downDelta=$(( received_file_size - pd_presize ))
if [[ "${received_file_size}" -ne "${file_size_bytes}" ]] || [ "$containsHtml" == "true" ]; then
if [ "${AutoRepairBadPartials}" == "true" ] && (( downDelta > 0 && downDelta < 1024 )) ; then
if [ -f "${file_path}" ] ; then
if ((pd_presize > 0)); then
echo -e "${YELLOW}Bad node / HTML found:${NC} reverting to previous file..."
truncateDownload "$remote_url" "$filename" "$pd_presize" "$received_file_size"
truncate -s $pd_presize "${file_path}"
else
echo -e "${YELLOW}Bad node / HTML found:${NC} tainted partial removed..."
rm -f "${file_path}"
fi
fi
if ((j >= $MaxDownloadRetries)) ; then
rm -f "$flockDownload";
if [ "${finalAttempt}" == "true" ] ; then
droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}"
fi
return 1
else
continue
fi
elif [ "${AutoRepairBadPartials}" == "true" ] && [ "$containsHtml" == "true" ] ; then
if [ -f "${file_path}" ] ; then
if ((pd_presize > 0)); then
echo -e "${YELLOW}Bad node / HTML found:${NC} reverting to previous file..."
truncateDownload "$remote_url" "$filename" "$pd_presize" "$received_file_size"
truncate -s $pd_presize "${file_path}"
else
echo -e "${YELLOW}Bad node / HTML found:${NC} tainted partial removed..."
rm -f "${file_path}"
fi
fi
if ((j >= $MaxDownloadRetries)) ; then
rm -f "$flockDownload";
if [ "${finalAttempt}" == "true" ] ; then
droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}"
fi
return 1
else
continue
fi
elif (( downDelta > 0 && downDelta < 1024 )) || [ "$containsHtml" == "true" ] ; then
if [ -f "$file_path" ] ; then
rm -rf "$file_path"
fi
echo -e "\n${YELLOW}Bad node / HTML found:${NC} tainted partial removed..."
if ((j >= $MaxDownloadRetries)) ; then
rm -f "$flockDownload";
if [ "${finalAttempt}" == "true" ] ; then
droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}"
fi
return 1
else
continue
fi
fi
if [[ "${received_file_size}" -ne "${file_size_bytes}" ]]; then
echo -e "\n${RED}Download failed, file is incomplete.${NC}"
if ((j >= $MaxDownloadRetries)) ; then
rm -f "$flockDownload";
if [ "${finalAttempt}" == "true" ] ; then
droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}"
fi
return 1
else
continue
fi
fi
else
break
fi
done
rm -f "$flockDownload";
ProcessCompletedDownload "$remote_url" "$MoveToFolder" "$filecnt" "$filename" "$file_size_bytes" "$completed_location" "$file_path"
return 0
}
#!
#! --------------- Host Extra Functions ------------------- #
#!

135
hosts/up_moocloud.sh Normal file
View file

@ -0,0 +1,135 @@
#! Name: up_moocloud.sh
#! Author: kittykat
#! Version: 2024.11.26
#! Desc: Add support for uploading files to file.tools.moocloud.ch
#! Info: Files are accessible at https://address/f.php?h=<file_code>&p=1
#! MaxSize: 5GB
#! Expire: 1 Month
#! Usage: Copy this file into the ./${ScriptDir}/hosts/ folder
#!
#!
#! ------------ REQUIRED SECTION ---------------
#! @[UPDATE] ListUploadHosts: This string is loaded into mad.sh and allows dynamic handling of new url data
#! Format: '/HostCode/HostNick/HostFuncPrefix@'
#! HostCode: <aUniqueCodeForHost> (ie. 'fh' for filehaus -- cannot be used by other hosts)
#! HostNick: What is displayed throughout MAD output
#! HostFuncPrefix: <aUniqueStringThatMustPrefixHostFunctions> ie. 'fh' -- fh_UploadFile()
#! * Note: Must begin with a letter a-z (functions beginning with numbers are no bueno)
HostCode='moo'
HostNick='moocloud.ch'
HostFuncPrefix='moo'
#!
#! !! DO NOT UPDATE OR REMOVE !!
#! This merges the Required HostAndDomainRegexes into mad.sh
ListUploadHosts=${ListUploadHosts}'/'${HostCode}'/'${HostNick}'/'${HostFuncPrefix}'@'
#!
#!
#! Configurables
#! -------------
#!
#! ------------ (1) Host Main Upload Function --------------- #
#!
#! @REQUIRED: Host Main Upload function
#! Must be named specifically as such:
#! <HostFuncPrefix>_UploadFile()
moo_UploadFile() {
local _hostCode=${1}
local filepath=${2}
local filecnt=${3}
local pline=${4}
local filename="${filepath##*/}"
warnAndRetryUnknownError=false
exitUploadError=false
exitUploadNotAvailable=false
fileAlreadyDone=false
tor_identity="${RANDOM}"
UploadTicket="${WorkDir}/.flocks/upload_${_hostCode}_${filepath//[^a-zA-Z0-9]/}"
MaxUploadSizeInBytes=5368709120
fsize=$(GetFileSize "$filepath" "false")
if ((fsize > MaxUploadSizeInBytes)); then
rm -f "${UploadTicket}"
echo -e "${YELLOW}| SKIP${NC}: The size of $filename is to large for $_hostCode. ($fsize > $MaxUploadSizeInBytes)"
failedUpload "$pline" "${filepath}" "${_hostCode}" "Skipping upload. The size of $filename is to large for $_hostCode. ($fsize > $MaxUploadSizeInBytes)"
return 1
fi
finalAttempt="false"
for ((z=0; z<=$MaxUploadRetries; z++)); do
if [ $z -eq $MaxUploadRetries ] ; then
finalAttempt="true"
fi
trap "rm -f "${UploadTicket}"; echo ""; tput cnorm; exit" 0 1 2 3 6 15
if moo_PostFile "${filepath}" "${_hostCode}" "${filename}" "${filecnt}" $((z+1)) $finalAttempt $pline ; then
return 0
elif [ $z -lt $MaxUploadRetries ]; then
if [ "${fileAlreadyDone}" == "true" ] ; then
break
fi
if [[ "${warnAndRetryUnknownError}" == "true" ]] ; then
if [ "${DebugAllEnabled}" == "true" ] ; then
debugHtml "${filepath##*/}" "error" "Retry due to an unknown issue: attempt #$((z+1)) of ${MaxUploadRetries}"
fi
fi
if [[ "${exitUploadError}" == "true" || "${exitUploadNotAvailable}" == "true" ]] ; then
if [ "${DebugAllEnabled}" == "true" ] ; then
debugHtml "${filepath##*/}" "error" "Exit due to unrecoverable issue"
fi
rm -f "${UploadTicket}"
break
fi
echo -e "\n${YELLOW}A recoverable error occurred, retry attempt $((z+1))/${MaxUploadRetries}${NC}"
sleep 3
fi
done
rm -f "${UploadTicket}"
}
#!
#! ----------- (2) Post File / Upload File Function --------------- #
#!
moo_PostFile() {
local filepath=$1
local _hostCode=$2
local filename=$3
local fileCnt=$4
local retryCnt=$5
local finalAttempt=$6
local pline=${7}
UploadTicket="${WorkDir}/.flocks/upload_${_hostCode}_${filepath//[^a-zA-Z0-9]/}"
echo -e "[${YELLOW}${_hostCode}${NC}] Uploading ${GREEN}${filename}${NC}"
tor_identity="${RANDOM}"
PostUrlHost='https://file.tools.moocloud.ch/script.php'
arrFiles=("$filepath")
trap "rm -f ${UploadTicket}; echo ""; tput cnorm; exit" 0 1 2 3 6 15
response=$(tor_curl_upload --insecure -i \
-H "Content-Type: multipart/form-data" \
-F "time=month" \
-F "file=@${filepath}" \
"${PostUrlHost}")
if [ "${DebugAllEnabled}" == "true" ] ; then
debugHtml "${filepath##*/}" "${_hostCode}_upload" "post_url: ${PostUrlHost}"$'\n'"${response}"
fi
if grep -Eqi ' 200 ' <<< "${response}" ; then
hash=$(echo "$response" | tail -2 | head -1)
hash=${hash//[$'\t\r\n']}
filesize=$(GetFileSize "$filepath" "false")
downloadLink="https://file.tools.moocloud.ch/f.php?h=${hash}&p=1"
echo -e "${GREEN}| Upload Success${NC}"
echo -e "| Size: ${BLUE}${filesize}${NC} bytes${NC}"
echo -e "| Link: ${YELLOW}${downloadLink}${NC}"
successUpload "$pline" "${filepath}" "${_hostCode}" "${filesize}" "${downloadLink}" "{$response}"
return 0
else
err=$(grep -oPi '(?<=HTTP/).*?(?=$)' <<< "$response")
if [ "${finalAttempt}" == "true" ] ; then
printf "\\n"
echo -e "${RED}| Upload failed. Status: ${err}${NC}"
failedRetryUpload "$pline" "${filepath}" "${_hostCode}" "Failed to upload file" "Status: $err"
exitUploadError=true
return 1
else
return 1
fi
fi
}
#!
#! --------------- Host Extra Functions ------------------- #
#!

134
hosts/up_quax.sh Normal file
View file

@ -0,0 +1,134 @@
#! Name: up_quax.sh
#! Author: kittykat
#! Version: 2024.11.22
#! Desc: Add support for uploading files to qu.ax
#! Info: Files are accessible at https://qu.ax/<file_code>
#! MaxSize: 256MB
#! Expire: ??
#! Usage: Copy this file into the ./${ScriptDir}/hosts/ folder
#!
#!
#! ------------ REQUIRED SECTION ---------------
#! @[UPDATE] ListUploadHosts: This string is loaded into mad.sh and allows dynamic handling of new url data
#! Format: '/HostCode/HostNick/HostFuncPrefix@'
#! HostCode: <aUniqueCodeForHost> (ie. 'fh' for filehaus -- cannot be used by other hosts)
#! HostNick: What is displayed throughout MAD output
#! HostFuncPrefix: <aUniqueStringThatMustPrefixHostFunctions> ie. 'fh' -- fh_UploadFile()
#! * Note: Must begin with a letter a-z (functions beginning with numbers are no bueno)
HostCode='qx'
HostNick='qu.ax'
HostFuncPrefix='qx'
#!
#! !! DO NOT UPDATE OR REMOVE !!
#! This merges the Required HostAndDomainRegexes into mad.sh
ListUploadHosts=${ListUploadHosts}'/'${HostCode}'/'${HostNick}'/'${HostFuncPrefix}'@'
#!
#!
#! Configurables
#! -------------
#!
#! ------------ (1) Host Main Upload Function --------------- #
#!
#! @REQUIRED: Host Main Upload function
#! Must be named specifically as such:
#! <HostFuncPrefix>_UploadFile()
qx_UploadFile() {
local _hostCode=${1}
local filepath=${2}
local filecnt=${3}
local pline=${4}
local filename="${filepath##*/}"
warnAndRetryUnknownError=false
exitUploadError=false
exitUploadNotAvailable=false
fileAlreadyDone=false
tor_identity="${RANDOM}"
UploadTicket="${WorkDir}/.flocks/upload_${_hostCode}_${filepath//[^a-zA-Z0-9]/}"
MaxUploadSizeInBytes=268435456
fsize=$(GetFileSize "$filepath" "false")
if ((fsize > MaxUploadSizeInBytes)); then
rm -f "${UploadTicket}"
echo -e "${YELLOW}| SKIP${NC}: The size of $filename is to large for $_hostCode. ($fsize > $MaxUploadSizeInBytes)"
failedUpload "$pline" "${filepath}" "${_hostCode}" "Skipping upload. The size of $filename is to large for $_hostCode. ($fsize > $MaxUploadSizeInBytes)"
return 1
fi
finalAttempt="false"
for ((z=0; z<=$MaxUploadRetries; z++)); do
if [ $z -eq $MaxUploadRetries ] ; then
finalAttempt="true"
fi
trap "rm -f "${UploadTicket}"; echo ""; tput cnorm; exit" 0 1 2 3 6 15
if qx_PostFile "${filepath}" "${_hostCode}" "${filename}" "${filecnt}" $((z+1)) $finalAttempt $pline ; then
return 0
elif [ $z -lt $MaxUploadRetries ]; then
if [ "${fileAlreadyDone}" == "true" ] ; then
break
fi
if [[ "${warnAndRetryUnknownError}" == "true" ]] ; then
if [ "${DebugAllEnabled}" == "true" ] ; then
debugHtml "${filepath##*/}" "error" "Retry due to an unknown issue: attempt #$((z+1)) of ${MaxUploadRetries}"
fi
fi
if [[ "${exitUploadError}" == "true" || "${exitUploadNotAvailable}" == "true" ]] ; then
if [ "${DebugAllEnabled}" == "true" ] ; then
debugHtml "${filepath##*/}" "error" "Exit due to unrecoverable issue"
fi
rm -f "${UploadTicket}"
break
fi
echo -e "\n${YELLOW}A recoverable error occurred, retry attempt $((z+1))/${MaxUploadRetries}${NC}"
sleep 3
fi
done
rm -f "${UploadTicket}"
}
#!
#! ----------- (2) Post File / Upload File Function --------------- #
#!
qx_PostFile() {
local filepath=$1
local _hostCode=$2
local filename=$3
local fileCnt=$4
local retryCnt=$5
local finalAttempt=$6
local pline=${7}
UploadTicket="${WorkDir}/.flocks/upload_${_hostCode}_${filepath//[^a-zA-Z0-9]/}"
echo -e "[${YELLOW}${_hostCode}${NC}] Uploading ${GREEN}${filename}${NC}"
tor_identity="${RANDOM}"
PostUrlHost='https://qu.ax/upload.php'
arrFiles=("$filepath")
trap "rm -f ${UploadTicket}; echo ""; tput cnorm; exit" 0 1 2 3 6 15
response=$(tor_curl_upload --insecure -i \
-H "Content-Type: multipart/form-data" \
-H "expiry=-1" \
-F "files[]=@${arrFiles[@]}" \
"${PostUrlHost}")
if [ "${DebugAllEnabled}" == "true" ] ; then
debugHtml "${filepath##*/}" "${_hostCode}_upload" "post_url: ${PostUrlHost}"$'\n'"${response}"
fi
if grep -Eqi '"success": true,' <<< "${response}" ; then
url=$(grep -oPi '(?<="url": ").*?(?=".*$)' <<< "$response")
filesize=$(GetFileSize "$filepath" "false")
downloadLink="${url//\\/}"
echo -e "${GREEN}| Upload Success${NC}"
echo -e "| Size: ${BLUE}${filesize}${NC} bytes${NC}"
echo -e "| Link: ${YELLOW}${downloadLink}${NC}"
successUpload "$pline" "${filepath}" "${_hostCode}" "${filesize}" "${downloadLink}" "{$response}"
return 0
else
err=$(grep -oPi '(?<=HTTP/).*?(?=$)' <<< "$response")
if [ "${finalAttempt}" == "true" ] ; then
printf "\\n"
echo -e "${RED}| Upload failed. Status: ${err}${NC}"
failedRetryUpload "$pline" "${filepath}" "${_hostCode}" "Failed to upload file" "Status: $err"
exitUploadError=true
return 1
else
return 1
fi
fi
}
#!
#! --------------- Host Extra Functions ------------------- #
#!

View file

@ -1,6 +1,6 @@
#! Name: up_ranoz.sh
#! Author: kittykat
#! Version: 2024.11.05
#! Version: 2024.11.27
#! Desc: Add support for uploading files to bedrive.ru
#! Info: Files are accessible at https://ranoz.gg/file/<file_code>
#! MaxSize: 20GB
@ -110,9 +110,7 @@ rz_PostFile() {
fi
if grep -Eqi '"upload_url":"https://' <<< "$response" ; then
PostUrlHost=$(grep -oPi '(?<="upload_url":").*?(?=".*$)' <<< "$response")
upload_url=$(grep -oPi '(?<="upload_url":").*?(?=\?.*$)' <<< "$response")
echo -e "upload_url: $upload_url"
downloadLink="${upload_url%%\.*}"'.ranoz.gg/'"${upload_url##*/}"
downloadLink=$(grep -oPi '(?<="url":").*?(?=".*$)' <<< "$response")
echo -e "${GREEN}| Upload url obtained...${NC}"
else
err=$(grep -oPi '(?<="errors":\[\{"message":").*?(?=".*$)' <<< "$response")

View file

@ -136,10 +136,10 @@ uhive_FetchFileInfo() {
debugHtml "${remote_url##*/}" "uhive_post" "${response}"
fi
if [[ -z $response ]] ; then
echo -e "${RED}| Failed to extract download link.${NC}"
echo -e "${RED}| Failed to extract download link [1]${NC}"
warnAndRetryUnknownError=true
if [ "${finalAttempt}" == "true" ] ; then
failedRetryDownload "${remote_url}" "" ""
failedRetryDownload "${remote_url}" "Failed to extract download link [1]" ""
fi
return 1
fi
@ -149,21 +149,31 @@ uhive_FetchFileInfo() {
removedDownload "${remote_url}"
return 1
fi
if grep -Eqi 'Error happened when generating Download Link' <<< "$response"; then
err=$(grep -oP '(?<=Error happened when generating Download Link.<br>).*(?=</font>.*$)' <<< "$response")
echo -e "${RED}| Error happened when generating Download Link.${NC}"
echo -e "(error: $(sanitize_html_tags "${err}"))"
exitDownloadError=true
failedRetryDownload "${remote_url}" "Error happened when generating Download Link" $(sanitize_html_tags "${err}")
return 1
fi
if grep -Eqi '<span id="direct_link"' <<< "$response"; then
echo -e "${GREEN}| Download link found.${NC}"
download_url=$(grep -oP '(?<=<a href=").*(?=.*uploadhive.com/files/).*(?=">)' <<< "$response")
filename=${download_url##*/}
else
echo -e "${RED}| The file was not found. It could be deleted or expired.${NC}"
exitDownloadError=true
removedDownload "${remote_url}"
warnAndRetryUnknownError=true
if [ "${finalAttempt}" == "true" ] ; then
failedRetryDownload "${remote_url}" "The file was not found. It could be deleted or expired" ""
fi
return 1
fi
if [[ -z $download_url ]] ; then
echo -e "${RED}| Failed to extract download link.${NC}"
echo -e "${RED}| Failed to extract download link [2]${NC}"
warnAndRetryUnknownError=true
if [ "${finalAttempt}" == "true" ] ; then
failedRetryDownload "${remote_url}" "" ""
failedRetryDownload "${remote_url}" "Failed to extract download link [2]" ""
fi
return 1
fi

39
mad.sh
View file

@ -31,9 +31,21 @@
# * klonkerz - feedback and suggestions, url only processing
# * Everyone who provided feedback and helped test.. and those who wish to remain anonymous
ScriptVersion=2024.11.18
ScriptVersion=2024.11.28
#=================================================
# Recent Additions
# 2024.11.27 - [up_ranoz] Modify download link to not use the upload url ticket link
# 2024.11.26 - [filehaus] Handle "404 Not found" on first instance
# 2024.11.25 - [up_moocloud / moocloud] Add moocloud.ch as an upload and download host
# 2024.11.24 - [uploadhive] Handle "Error creating download link" response -- do not mark Removed
# 2024.11.23 - [filehaus] Use tor_curl_request_extended for head / get for filehaus urls
# 2024.11.23 - [mad] Make tor_curl_request_extended a random timeout between 30-60 seconds
# 2024.11.22 - [up_quax, quax] Add qu.ax as an upload and download host
# 2024.11.21 - [filedot] Fix check for post filename
# 2024.11.20 - [gofile] Handle parsing parent gofile url into multiple download urls
# (still needs updating to handle child urls gofile.io/download/web/<guid>/file)
# 2024.11.19 - [mad] Add updateUrlDownload function to handle updating a url
# (ie. parent gofile url with children urls)
# 2024.11.18 - [up_fileditch / fileditch] Add fileditch.com as upload and download host
# 2024.11.17 - [innocent] Fix "Fetching file info". Support resume downloads.
# 2024.11.16 - [mad] Fix reload on uploads.txt modified (uploads: filemode)
@ -166,8 +178,8 @@ UploadSpeedMin=10
# [RateMonitor - UploadTimeoutInterval]: Amount of time in seconds a transfer can remain below the UploadSpeedMin before it will timeout.
# This helps ensure an upload doesn't go stale and hit a speed of 0 for too long. (! Requires RateMonitorEnabled=true)
# ie. curl: (28) Operation too slow. Less than 5000 bytes/sec transferred the last 60 seconds
# @Default=300 (5 min)
UploadTimeoutInterval=300
# @Default=600 (10 min)
UploadTimeoutInterval=600
#=================================================
@ -390,10 +402,11 @@ tor_curl_request() {
fi
}
tor_curl_request_extended() {
randomtimeout=$((30 + RANDOM % (60 - 30)))
if [ "${UseTorCurlImpersonate}" == "true" ]; then
"${curl_impersonate[@]}" --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" --connect-timeout 60 --compressed --globoff "$@"
"${curl_impersonate[@]}" --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" --connect-timeout $randomtimeout --compressed --globoff "$@"
else
curl --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" --connect-timeout 60 --compressed --globoff "$@"
curl --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" --connect-timeout $randomtimeout --compressed --globoff "$@"
fi
}
tor_curl_upload() {
@ -779,6 +792,22 @@ renameDuplicateDownload() {
echo -e "| Filename: \"./downloads/$filename\""
fi
}
updateUrlDownload() {
local url=$(literalize_string "${1//[$'\t\r\n']}")
local newurl="$2"
echo -e "1${PINK}$newurl${NC}"
if [ ! "$UrlOnly" == "true" ]; then
sed -i -e "s%^$url.*%${newurl//[[:space:]]/$'\\\n'}%g" "${InputFile}" #processed url
sed -i -e "s%^${url/https:/http:}.*%${newurl//[[:space:]]/$'\\\n'}%g" "${InputFile}" #http (if changed)
fi
mkdir -p "${WorkDir}/downloads"
dateStamp=$(date '+%Y/%m/%d %H:%M:%S')
echo -e "$dateStamp [UPDATE] ${url} (new url: ${newfilename})" >> "${WorkDir}/downloads/results.txt"
CLEANSTRING=${remote_url//[^a-zA-Z0-9]/}
if [ -f "${WorkDir}/.flocks/${CLEANSTRING}" ]; then
rm -f "${WorkDir}/.flocks/${CLEANSTRING}"
fi
}
droppedSizeBadDownload() {
local url="${1//[$'\t\r\n']}"
local filename=$(literalize_string "$2")

View file

@ -10,13 +10,13 @@
# 10GB raja uploadraja.com 240MB ko kouploader.jp 4GB tmpsh temp.sh
# 10GB tmpme tempfile.me 300MB trbo turbo.onion 100MB inno innocent.onion
# 1GB ansh anonsharing.com 1GB torp TorUp.onion 4GB offcat offshore.cat
# 100MB upee upload.ee 5GB fd fileditch.com
# 100MB upee upload.ee 5GB fd fileditch.com 256MB qx qu.ax
# Jirafraeu hosts (recommended upload 100MB splits as many host only support that)
# 10GB anarc anarchaserver 1GB kaz depot.kaz.bzh 5GB squid filesquid
# 10GB nant nantes.cloud 500MB soy soyjak.download 512MB linx linxx.net
# 10GB nlib netlib.re 100MB ffl famille-flender
# 10GB nlib netlib.re 100MB ffl famille-flender 5GB moo moocloud.sh
# ?? fr4e sendfree4e.fr 100MB harr harrault.fr 100MB acid dl.acid.fr
# ?? skpr skrepr.com
# ?? skpr skrepr.com
# (Require js -- do not use)
# 4GB daily dailyuploads 1GB kraken krakenfiles 2GB hex hexload
# 4GB bd bedrive.ru 5GB uflix uploadflix