# 2024.01.03 - [gofile] Detect "Bulk download is a Premium feature" response (no children)
# 2025.01.02 - [up_axfc] Update PUT response check to handle kanji chars (remove) # 2025.01.02 - [dashfile] Add response 'This file reached max downloads limit'. New cookie on captcha fail # 2024.12.28 - [dashfile] Update captcha code check # 2024.12.28 - [anonfile] Add new download link href response # 2024.12.28 - [fileblade] Add additional response handling (subsequent downloads, unknown warnings) # 2024.12.28 - [eternalhosting] Update eternal.onion to handle landing page (eternal.onion/file/)
This commit is contained in:
parent
e6804e01e1
commit
30eedaf567
10 changed files with 766 additions and 687 deletions
28
hosts/anonfile.sh
Normal file → Executable file
28
hosts/anonfile.sh
Normal file → Executable file
|
|
@ -1,6 +1,6 @@
|
|||
#! Name: anonfile.sh
|
||||
#! Author: kittykat
|
||||
#! Version: 2024.12.26
|
||||
#! Version: 2024.12.28
|
||||
#! Desc: Add support for downloading and processing of urls for a new host
|
||||
#! Usage: Copy this file into the ./${ScriptDir}/hosts/ folder
|
||||
#!
|
||||
|
|
@ -137,7 +137,7 @@ anon_FetchFileInfo() {
|
|||
fi
|
||||
if grep -Eqi 'input type="hidden" name="id" value="' <<< "$response"; then
|
||||
printf "\\n"
|
||||
echo -e "${GREEN}| Captcha found.${NC}"
|
||||
echo -e "${GREEN}| Post link found.${NC}"
|
||||
post_op=$(grep -oP '(?<=input type="hidden" name="op" value=").*(?=">)' <<< "$response")
|
||||
post_id=$(grep -oP '(?<=input type="hidden" name="id" value=").*(?=">)' <<< "$response")
|
||||
post_fname=$(grep -oP '(?<=input type="hidden" name="fname" value=").*(?=">)' <<< "$response")
|
||||
|
|
@ -327,8 +327,8 @@ anon_FetchFileInfo() {
|
|||
fi
|
||||
fi
|
||||
done
|
||||
echo -e "| Captcha countdown (10s)…"
|
||||
sleep 10s
|
||||
echo -e "| Captcha countdown (5s)…"
|
||||
sleep 5s
|
||||
maxfetchretries=1
|
||||
echo -e "${GREEN}# Fetching download url…${NC}"
|
||||
for ((i=1; i<=$maxfetchretries; i++)); do
|
||||
|
|
@ -413,7 +413,12 @@ anon_FetchFileInfo() {
|
|||
fi
|
||||
if grep -Eqi '<a class="stretched-link" href="https://anonfile.de' <<< "$response"; then
|
||||
printf "\\n"
|
||||
echo -e "${GREEN}| Download url found.${NC}"
|
||||
echo -e "${GREEN}| Download url found [1]${NC}"
|
||||
download_url=$(grep -oP '(?<=<a class="stretched-link" href=").*?(?=">.*$)' <<< "$response")
|
||||
download_url=$(urlencode_literal_grouped_case_urlendingonly "$download_url")
|
||||
elif grep -Eqi '<a class="stretched-link" href="' <<< "$response"; then
|
||||
printf "\\n"
|
||||
echo -e "${GREEN}| Download url found [2]${NC}"
|
||||
download_url=$(grep -oP '(?<=<a class="stretched-link" href=").*?(?=">.*$)' <<< "$response")
|
||||
download_url=$(urlencode_literal_grouped_case_urlendingonly "$download_url")
|
||||
fi
|
||||
|
|
@ -531,12 +536,15 @@ anon_FetchFileInfo() {
|
|||
#! ----------- (3) Fetch File / Download File Function --------------- #
|
||||
#!
|
||||
anon_GetFile() {
|
||||
echo -e "${GREEN}# Downloading…"
|
||||
echo -e "${GREEN}# Downloading… ${BLUE}(No Resume)${NC}"
|
||||
echo -e "${YELLOW}| File path:${NC}\t./.inflight/${filename}\n"
|
||||
fileCnt=$1
|
||||
retryCnt=$2
|
||||
finalAttempt=$3
|
||||
flockDownload="${WorkDir}/.flocks/${filename//[^a-zA-Z0-9\.\_\-]/}.flock"
|
||||
if [ -f "$file_path" ]; then
|
||||
rm -f "$file_path"
|
||||
fi
|
||||
for ((j=1; j<=$MaxDownloadRetries; j++)); do
|
||||
pd_presize=0
|
||||
if [ -f "$file_path" ] ; then
|
||||
|
|
@ -552,12 +560,12 @@ anon_GetFile() {
|
|||
--speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval \
|
||||
-b "${anon_cookie_jar}" -c "${anon_cookie_jar}" \
|
||||
--referer "${fixed_url}" \
|
||||
"$download_url" --continue-at - --output "$file_path"
|
||||
"$download_url" --output "$file_path"
|
||||
else
|
||||
tor_curl_request --insecure \
|
||||
-b "${anon_cookie_jar}" -c "${anon_cookie_jar}" \
|
||||
--referer "${fixed_url}" \
|
||||
"$download_url" --continue-at - --output "$file_path"
|
||||
"$download_url" --output "$file_path"
|
||||
fi
|
||||
else
|
||||
if [ "${RateMonitorEnabled}" == "true" ]; then
|
||||
|
|
@ -576,7 +584,7 @@ anon_GetFile() {
|
|||
-H "Sec-Fetch-User: ?1" \
|
||||
-b "${anon_cookie_jar}" -c "${anon_cookie_jar}" \
|
||||
--referer "${fixed_url}" \
|
||||
"$download_url" --continue-at - --output "$file_path"
|
||||
"$download_url" --output "$file_path"
|
||||
else
|
||||
tor_curl_request --insecure \
|
||||
-H "User-Agent: $RandomUA" \
|
||||
|
|
@ -592,7 +600,7 @@ anon_GetFile() {
|
|||
-H "Sec-Fetch-User: ?1" \
|
||||
-b "${anon_cookie_jar}" -c "${anon_cookie_jar}" \
|
||||
--referer "${fixed_url}" \
|
||||
"$download_url" --continue-at - --output "$file_path"
|
||||
"$download_url" --output "$file_path"
|
||||
fi
|
||||
fi
|
||||
received_file_size=0
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue