# 2024.11.18 - [up_fileditch / fileditch] Add fileditch.com as upload and download host

# 2024.11.17 - [innocent] Fix "Fetching file info". Support resume downloads.
# 2024.11.16 - [mad] Fix reload on uploads.txt modified (uploads: filemode)
# 2024.11.16 - [up_*] Fix removal of upload ticket if filesize is not supported
# 2024.11.15 - [familleflender] Add famille-flender.fr as download host
# 2024.11.15 - [up_familleflender] Add famille-flender.fr as upload host
# 2024.11.15 - [up_filehaus] Finish the uploader (the server is back online)
This commit is contained in:
kittykat 2024-11-18 14:48:48 +00:00
parent d3d08d63f5
commit fa83163a58
Signed by: kittykat
GPG key ID: E3F1556620F70C3C
101 changed files with 1648 additions and 790 deletions

30
hosts/innocent.sh Executable file → Normal file
View file

@ -1,6 +1,6 @@
#! Name: innocent.sh
#! Author: kittykat
#! Version: 2024.10.22
#! Version: 2024.11.17
#! Desc: Add support for downloading and processing of urls for a new host
#! Usage: Copy this file into the ./${ScriptDir}/hosts/ folder
#!
@ -92,7 +92,16 @@ inno_FetchFileInfo() {
fi
tput sc
tor_identity="${RANDOM}"
if ((j % 2 == 0)); then
if ((j % 1 == 0)); then
printf "| Retrieving Head: attempt #$j"
file_header=$(tor_curl_request --insecure --head -L -s "$download_url")
elif ((j % 2 == 0)); then
printf "| Retrieving Head (Get): attempt #$j"
file_header=$(tor_curl_request --insecure -m 8 -s -D - -o /dev/null \
-H "Connection: keep-alive" \
-w 'EffectiveUrl=%{url_effective}' \
"$download_url")
elif ((j % 3 == 0)); then
printf "| Retrieving Head (hack): attempt #$j"
rm -f "${WorkDir}/.temp/directhead"
file_header=$(tor_curl_request --insecure --head -H "Connection: keep-alive" -L -s -i "$download_url" |
@ -105,12 +114,6 @@ inno_FetchFileInfo() {
touch "${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}"
fi
rm -f "${WorkDir}/.temp/directhead"
elif ((j % 3 == 0)); then
printf "| Retrieving Head (Get): attempt #$j"
file_header=$(tor_curl_request --insecure -m 16 -s -D - -o /dev/null \
-H "Connection: keep-alive" \
-w 'EffectiveUrl=%{url_effective}' \
"$download_url")
else
printf "| Retrieving Head: attempt #$j"
file_header=$(tor_curl_request --insecure --head -L -s "$download_url")
@ -141,9 +144,10 @@ inno_FetchFileInfo() {
if [ "$filename_override" == "" ] ; then
filename=${download_url##*/}
fi
if grep -Eqi 'content-length:' <<< "${file_header}" ; then
if grep -Eqi 'Content-Length:' <<< "${file_header}" ; then
file_size_bytes=$(grep -oPi '(?<=content-length: ).*?(?=$)' <<< "$file_header")
file_size_bytes=${file_size_bytes//[$'\t\r\n']}
break
fi
else
if ((j>=$maxretries)); then
@ -203,8 +207,12 @@ inno_GetFile() {
fi
CLEANSTRING=${remote_url//[^a-zA-Z0-9]/}
trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; rm -f $flockDownload; echo ""; tput cnorm; exit" 0 1 2 3 6 15
echo -e "${BLUE}| No Resume Fetch${NC} (unknown filesize)"
tor_curl_request --insecure "$download_url" --output "$file_path"
if [ ! -z $file_size_bytes ] ; then
tor_curl_request --insecure "$download_url" --continue-at - --output "$file_path"
else
echo -e "${BLUE}| No Resume Fetch${NC}"
tor_curl_request --insecure "$download_url" --output "$file_path"
fi
received_file_size=0
if [ -f "$file_path" ] ; then
received_file_size=$(stat --format="%s" "$file_path" | tr -d '[:space:]')