# 2025.02.25 - [mad + allhosts] Re-engineer BadHtml scan to only scan the first 10kb of downloaded partials

# 2025.02.24 - [pixeldrain] Update "The file is IP limited" response handling retry
# 2025.02.22 - [blackcloud_onion] Add bcloud.onion download handling (url fixing)
# 2025.02.21 - [anonfile] Update cdn link parsing to handle new subdomains
# 2025.02.21 - [anonfile] Add download limit reached response handling
# 2025.02.21 - [anonfile] Update file info retrieval (head no longer responds)
# 2025.02.21 - [sendspace] Add sendspace.com as download host
# 2025.02.21 - [oshi / up_oshi] Revert /nossl/ changes for oshi.at (clearnet)
This commit is contained in:
kittykat 2025-02-26 12:00:57 +00:00
parent 6f338dec65
commit 83d17967d6
Signed by: kittykat
GPG key ID: E3F1556620F70C3C
54 changed files with 1316 additions and 822 deletions

View file

@ -1,6 +1,6 @@
#! Name: oshi.sh
#! Author: kittykat
#! Version: 2025.02.17
#! Version: 2025.02.21
#! Desc: Add support for downloading and processing of urls for a new host
#! Usage: Copy this file into the ./${ScriptDir}/hosts/ folder
#!
@ -96,11 +96,10 @@ oshi_FetchFileInfo() {
elif [[ "${OshiBaseUrlOverride}" == "oshionion" ]]; then
download_url=${remote_url//oshi\.at/5ety7tpkim5me6eszuwcje7bmy25pbtrjtue7zkqqgziljwqy3rrikqd\.onion}
fi
if ! grep -Eqi '/nossl/' <<< "$download_url"; then
download_url=${download_url//oshi\.at/oshi\.at\/nossl}
if ! grep -Eqi '/nossl/' <<< "$download_url" && grep -Eqi '5ety7tpkim5me6eszuwcje7bmy25pbtrjtue7zkqqgziljwqy3rrikqd/' <<< "$download_url" ; then
download_url=${download_url//5ety7tpkim5me6eszuwcje7bmy25pbtrjtue7zkqqgziljwqy3rrikqd\.onion/5ety7tpkim5me6eszuwcje7bmy25pbtrjtue7zkqqgziljwqy3rrikqdi\.onion\/nossl}
fi
if grep -Eqi '^https' <<< "$download_url"; then
if grep -Eqi '^https' <<< "$download_url" && grep -Eqi '5ety7tpkim5me6eszuwcje7bmy25pbtrjtue7zkqqgziljwqy3rrikqd/' <<< "$download_url" ; then
download_url=${download_url//https:/http:}
fi
download_url=$(urlencode_literal_grouped_case_urlendingonly "$download_url")
@ -199,15 +198,15 @@ oshi_GetFile() {
CLEANSTRING=${remote_url//[^a-zA-Z0-9]/}
trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; rm -f $flockDownload; echo ""; tput cnorm; exit" 0 1 2 3 6 15
if [[ "${RateMonitorEnabled}" == "true" ]]; then
tor_curl_request --insecure --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval --referer "$file_url" "$download_url" --continue-at - --output "$file_path"
tor_curl_request --insecure -L --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval --referer "$file_url" "$download_url" --continue-at - --output "$file_path"
else
tor_curl_request --insecure --referer "$file_url" "$download_url" --continue-at - --output "$file_path"
tor_curl_request --insecure -L --referer "$file_url" "$download_url" --continue-at - --output "$file_path"
fi
received_file_size=0
if [[ -f "$file_path" ]] ; then
received_file_size=$(stat --format="%s" "$file_path" | tr -d '[:space:]')
fi
if CheckNoHtml "$remote_url" "$filename" "$file_path" ; then
if CheckNoHtml "$remote_url" "$filename" "$file_path" "$((received_file_size - pd_presize))" ; then
containsHtml=false
else
containsHtml=true