# 2025.01.14 - [gagneux / up_gagneux] Add fichier.gagneux.info as upload / download host

# 2025.01.14 - [uwabaki] Add uwabaki.party as download host
# 2025.01.14 - [fileblade] Additional retries and handling for blocked Tor ips (until alternative)
# 2025.01.13 - [ocr_captcha] Create imagemagick OCR function for testing without tesseract
# 2025.01.13 - [anonfile, dailyuploads] Update ocr call to use tesseract function
# 2025.01.13 - [up_anonfile] Modify to use new upload url
# 2025.01.12 - [ateasystems] Update 404 Not found response
# 2025.01.11 - [mad] Update direct head response handling
# 2025.01.11 - [ranoz] Add 404 Not found handling on head
# 2025.01.09 - [ranoz] Add handling of "NEXT_NOT_FOUND" response
# 2025.01.09 - [fileblade] Fix cdn url parsing
# 2025.01.08 - [up_pixeldrain] Fix success response from pixeldrain
# 2025.01.08 - [ramsgaard / up_ramsgaard] Add data.ramsgaard.me as upload / download host
# 2025.01.08 - [euromussels / up_euromussels] Add uploads.euromussels.eu as upload / download host
# 2025.01.07 - [up_fileland] Add fileland.io as upload host
# 2025.01.07 - [up_fireget] Add fireget.com as upload host
# 2025.01.06 - [uploadhive] Update the removed / gone response detection
# 2025.01.06 - [fileblade] Add "user does not allow free downloads over 100MB" response (and warnings)
# 2025.01.06 - [desiupload] Add desiupload as download host
# 2025.01.05 - [isupload] Fix filename detection
This commit is contained in:
kittykat 2025-01-16 07:54:05 +00:00
parent 30eedaf567
commit eeb8054960
Signed by: kittykat
GPG key ID: E3F1556620F70C3C
29 changed files with 1951 additions and 634 deletions

56
hosts/fileblade.sh Executable file → Normal file
View file

@ -1,6 +1,6 @@
#! Name: isupload.sh
#! Author: kittykat
#! Version: 2024.12.28
#! Version: 2025.01.14
#! Desc: Add support for downloading and processing of urls for a new host
#! Usage: Copy this file into the ./${ScriptDir}/hosts/ folder
#!
@ -77,7 +77,7 @@ fb_DownloadFile() {
#!
fb_FetchFileInfo() {
finalAttempt=$1
maxfetchretries=5
maxfetchretries=10
fb_cookie_jar=""
echo -e "${GREEN}# Fetching download1…${NC}"
for ((i=1; i<=$maxfetchretries; i++)); do
@ -105,14 +105,14 @@ fb_FetchFileInfo() {
continue
fi
fi
if grep -Eqi "Sorry, you are banned" <<< "$response"; then
rm -f "${fb_cookie_jar}";
if grep -Eqi "Sorry, you are banned|Sorry, you have been blocked" <<< "$response"; then
if [ $i == $maxfetchretries ] ; then
rm -f "${fb_cookie_jar}";
printf "\\n"
echo -e "${RED}| Failed to extract download link.${NC}"
echo -e "${RED}| Failed to extract download link [blocked ip]${NC}"
warnAndRetryUnknownError=true
if [ "${finalAttempt}" == "true" ] ; then
failedRetryDownload "${remote_url}" "" ""
failedRetryDownload "${remote_url}" "Failed to extract download link [blocked ip]" ""
fi
return 1
else
@ -154,6 +154,7 @@ fb_FetchFileInfo() {
break
fi
done
maxfetchretries=36
echo -e "${GREEN}# Fetching download2…${NC}"
for ((i=1; i<=$maxfetchretries; i++)); do
printf " _"
@ -178,7 +179,6 @@ fb_FetchFileInfo() {
fi
return 1
else
tor_identity="${RANDOM}"
continue
fi
fi
@ -190,13 +190,28 @@ fb_FetchFileInfo() {
removedDownload "${remote_url}"
return 1
fi
if grep -Eqi 'The file owner does not allow FREE users to download files which are over 100 MB' <<< "$response"; then
if grep -Eqi 'file owner does not allow FREE users to download files which are over 100 MB' <<< "$response"; then
rm -f "${fb_cookie_jar}";
printf "\\n"
echo -e "${RED}| Pro download only. (Free users not allowed download > 100MB)${NC}"
echo -e "${RED}| Pro download only. (Free users not allowed downloads over 100MB)${NC}"
exitDownloadError=true
failedRetryDownload "${remote_url}" "Pro download only. [Free users not allowed download over 100MB]" ""
failedRetryDownload "${remote_url}" "Pro download only. [Free users not allowed downloads over 100MB]" ""
return 1
fi
if grep -Eqi "Sorry, you are banned|Sorry, you have been blocked" <<< "$response"; then
if [ $i == $maxfetchretries ] ; then
rm -f "${fb_cookie_jar}";
printf "\\n"
echo -e "${RED}| Failed to extract download link [blocked ip]${NC}"
warnAndRetryUnknownError=true
if [ "${finalAttempt}" == "true" ] ; then
failedRetryDownload "${remote_url}" "Failed to extract download link [blocked ip]" ""
fi
return 1
else
tor_identity="${RANDOM}"
continue
fi
fi
if grep -Eqi 'Your subsequent download will be started in' <<< "$response"; then
if [ $i == $maxfetchretries ] ; then
@ -292,22 +307,7 @@ fb_FetchFileInfo() {
removedDownload "${remote_url}"
return 1
fi
if grep -Eqi 'Just a moment...' <<< "$response"; then
if [ $i == $maxfetchretries ] ; then
rm -f "${fb_cookie_jar}";
printf "\\n"
echo -e "${RED}| Failed to extract download link [5].${NC}"
warnAndRetryUnknownError=true
if [ "${finalAttempt}" == "true" ] ; then
failedRetryDownload "${remote_url}" "Failed to extract download link [5]" ""
fi
return 1
else
tor_identity="${RANDOM}"
continue
fi
fi
if ! grep -Eqi '<a href="https://de6.fileblade.com/files/' <<< "$response"; then
if ! grep -oPi '(?=href="https://.*?\.fileblade.com/files/.*?" class=.*$)' <<< "$response"; then
printf "\\n"
echo -e "${RED}| Failed to extract download link [6]${NC}"
warnAndRetryUnknownError=true
@ -319,9 +319,9 @@ fb_FetchFileInfo() {
else
printf "\\n"
echo -e "${GREEN}| Download url found.${NC}"
download_url=$(grep -oP -m 1 '(?<=a href="https://de6.fileblade.com/files/).*?(?=" class=.*$)' <<< "$response")
download_url=$(grep -oP -m 1 '(?<=a href="https://).*?(?=\.fileblade.com/files/).*?(?=" class=.*$)' <<< "$response")
download_url="${download_url//[$'\t\r\n']}"
download_url='https://de6.fileblade.com/files/'$(urlencode_literal_grouped_case_urlendingonly "$download_url")
download_url='https://'$(urlencode_literal_grouped_case_urlendingonly "$download_url")
break
fi
done