diff --git a/.audit/mad-audit-curl.log b/.audit/mad-audit-curl.log index 455f759..f552149 100755 --- a/.audit/mad-audit-curl.log +++ b/.audit/mad-audit-curl.log @@ -1,4 +1,4 @@ -DateTime: 25.02.20 +DateTime: 25.02.24 Files: ./hosts/1fichier.sh @@ -11,6 +11,7 @@ Files: ./hosts/ateasystems.sh ./hosts/bedrive.sh ./hosts/biteblob.sh +./hosts/blackcloud_onion.sh ./hosts/bowfile.sh ./hosts/click.sh ./hosts/cyssoux.sh @@ -63,6 +64,7 @@ Files: ./hosts/ramsgaard.sh ./hosts/ranoz.sh ./hosts/sendnow.sh +./hosts/sendspace.sh ./hosts/shareonline.sh ./hosts/skrepr.sh ./hosts/soyjak.sh @@ -177,12 +179,11 @@ _________________________________________________________________________ ./hosts/anonfile.sh:186: response=$(tor_curl_request --insecure -L -s -X POST \ ./hosts/anonfile.sh:240: tor_curl_request --insecure -s "$captcha_img_url" --output "$tmp_captcha_img" ./hosts/anonfile.sh:340: response=$(tor_curl_request --insecure -L -s -X POST \ -./hosts/anonfile.sh:453: file_header=$(tor_curl_request -i -s --head \ -./hosts/anonfile.sh:557: if [[ "${UseTorCurlImpersonate}" == "true" ]]; then -./hosts/anonfile.sh:559: tor_curl_request --insecure \ -./hosts/anonfile.sh:565: tor_curl_request --insecure \ -./hosts/anonfile.sh:572: tor_curl_request --insecure \ -./hosts/anonfile.sh:589: tor_curl_request --insecure \ +./hosts/anonfile.sh:517: if [[ "${UseTorCurlImpersonate}" == "true" ]]; then +./hosts/anonfile.sh:519: tor_curl_request --insecure \ +./hosts/anonfile.sh:525: tor_curl_request --insecure \ +./hosts/anonfile.sh:532: tor_curl_request --insecure \ +./hosts/anonfile.sh:549: tor_curl_request --insecure \ ./hosts/anonsharing.sh:91: response=$(tor_curl_request --insecure -i -s \ ./hosts/anonsharing.sh:150: file_header=$(tor_curl_request --insecure --head -L -i -s \ ./hosts/anonsharing.sh:158: file_header=$(tor_curl_request --insecure --head -H "Connection: keep-alive" -s -i \ @@ -204,8 +205,8 @@ _________________________________________________________________________ ./hosts/biteblob.sh:96: response=$(tor_curl_request --insecure -L -s "${fixed_url}") ./hosts/biteblob.sh:144: file_header=$(tor_curl_request --insecure --head -L -s "$download_url") ./hosts/biteblob.sh:227: tor_curl_request --insecure --referer "$file_url" "$download_url" --output "$file_path" -./hosts/biteblob.sh:271: tor_curl_request --insecure --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval "$download_url" --continue-at - --output "$file_path" -./hosts/biteblob.sh:273: tor_curl_request --insecure --referer "$file_url" "$download_url" --continue-at - --output "$file_path" +./hosts/biteblob.sh:284: tor_curl_request --insecure --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval "$download_url" --continue-at - --output "$file_path" +./hosts/biteblob.sh:286: tor_curl_request --insecure --referer "$file_url" "$download_url" --continue-at - --output "$file_path" ./hosts/bowfile.sh:91: response=$(tor_curl_request --insecure -L -s -b "${bow_cookie_jar}" -c "${bow_cookie_jar}" \ ./hosts/bowfile.sh:143: response=$(tor_curl_request --insecure -s --head \ ./hosts/bowfile.sh:182: file_header=$(tor_curl_request --insecure -L -sS -i --head \ @@ -326,8 +327,8 @@ _________________________________________________________________________ ./hosts/isupload.sh:251: file_header=$(tor_curl_request --insecure --head -H "Connection: keep-alive" -L -s -i "$download_url" | ./hosts/isupload.sh:263: file_header=$(tor_curl_request_extended --insecure --head -L -s "$download_url") ./hosts/isupload.sh:355: tor_curl_request_extended --insecure -L "$download_url" --output "$file_path" -./hosts/isupload.sh:399: tor_curl_request_extended --insecure -L --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval "$download_url" --continue-at - --output "$file_path" -./hosts/isupload.sh:401: tor_curl_request_extended --insecure -L "$download_url" --continue-at - --output "$file_path" +./hosts/isupload.sh:403: tor_curl_request_extended --insecure -L --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval "$download_url" --continue-at - --output "$file_path" +./hosts/isupload.sh:405: tor_curl_request_extended --insecure -L "$download_url" --continue-at - --output "$file_path" ./hosts/kraken.sh:104: PAGE=$(tor_curl_request --insecure --max-time "$ConnectTimeout" -s -L -c "${kraken_cookie_jar}" "${fixed_url}") ./hosts/kraken.sh:169: down_request=$(tor_curl_request --insecure -L -s -b "${kraken_cookie_jar}" -c "${kraken_cookie_jar}" -F "token=${kraken_token}" "${kraken_action}") ./hosts/kraken.sh:186: file_header=$(tor_curl_request --insecure --head -L -s -b "${kraken_cookie_jar}" -c "${kraken_cookie_jar}" --referer "$kraken_action" "$download_url") @@ -341,16 +342,16 @@ _________________________________________________________________________ ./hosts/nippy.sh:188: file_header=$(tor_curl_request --insecure -L --head -s \ ./hosts/nippy.sh:299: tor_curl_request --insecure -L --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval \ ./hosts/nippy.sh:302: tor_curl_request --insecure -L "$download_url" --continue-at - --output "$file_path" -./hosts/oshi.sh:108: file_header=$(tor_curl_request --insecure --head -L -s --referer "$remote_url" "$download_url") -./hosts/oshi.sh:202: tor_curl_request --insecure --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval --referer "$file_url" "$download_url" --continue-at - --output "$file_path" -./hosts/oshi.sh:204: tor_curl_request --insecure --referer "$file_url" "$download_url" --continue-at - --output "$file_path" +./hosts/oshi.sh:107: file_header=$(tor_curl_request --insecure --head -L -s --referer "$remote_url" "$download_url") +./hosts/oshi.sh:201: tor_curl_request --insecure -L --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval --referer "$file_url" "$download_url" --continue-at - --output "$file_path" +./hosts/oshi.sh:203: tor_curl_request --insecure -L --referer "$file_url" "$download_url" --continue-at - --output "$file_path" ./hosts/pixeldrain.sh:94: response=$(tor_curl_request --insecure -L -s "https://pixeldrain.com/u/$fileid") -./hosts/pixeldrain.sh:256: file_header=$(tor_curl_request --insecure --head -L -s --referer "$file_url" "$pdheadurl") -./hosts/pixeldrain.sh:322: if [[ "${UseTorCurlImpersonate}" == "true" ]]; then -./hosts/pixeldrain.sh:324: tor_curl_request --insecure \ -./hosts/pixeldrain.sh:328: tor_curl_request --insecure \ -./hosts/pixeldrain.sh:333: tor_curl_request --insecure \ -./hosts/pixeldrain.sh:342: tor_curl_request --insecure \ +./hosts/pixeldrain.sh:259: file_header=$(tor_curl_request --insecure --head -L -s --referer "$file_url" "$pdheadurl") +./hosts/pixeldrain.sh:325: if [[ "${UseTorCurlImpersonate}" == "true" ]]; then +./hosts/pixeldrain.sh:327: tor_curl_request --insecure \ +./hosts/pixeldrain.sh:331: tor_curl_request --insecure \ +./hosts/pixeldrain.sh:336: tor_curl_request --insecure \ +./hosts/pixeldrain.sh:345: tor_curl_request --insecure \ ./hosts/quax.sh:85: file_header=$(tor_curl_request --insecure --head -L -s "$download_url") ./hosts/quax.sh:176: tor_curl_request --insecure --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval "$download_url" --continue-at - --output "$file_path" ./hosts/quax.sh:178: tor_curl_request --insecure "$download_url" --continue-at - --output "$file_path" @@ -361,14 +362,21 @@ _________________________________________________________________________ ./hosts/ranoz.sh:276: tor_curl_request --insecure -L -G --no-alpn \ ./hosts/ranoz.sh:281: tor_curl_request --insecure -L -G --no-alpn \ ./hosts/ranoz.sh:296: tor_curl_request --insecure -L -G --no-alpn \ -./hosts/sendnow.sh:90: response=$(tor_curl_request --insecure -L -s -b "${snow_cookie_jar}" -c "${snow_cookie_jar}" "$remote_url") -./hosts/sendnow.sh:160: response=$(tor_curl_request --insecure -L -svo. -X POST \ -./hosts/sendnow.sh:204: file_header=$(tor_curl_request_extended --insecure --head -Lis \ -./hosts/sendnow.sh:325: if [[ "${UseTorCurlImpersonate}" == "true" ]]; then -./hosts/sendnow.sh:327: tor_curl_request_extended --insecure -L --no-alpn \ -./hosts/sendnow.sh:345: tor_curl_request --insecure -L --no-alpn \ -./hosts/sendnow.sh:364: tor_curl_request --insecure -L --no-alpn \ -./hosts/sendnow.sh:383: tor_curl_request --insecure -L --no-alpn \ +./hosts/sendnow.sh:89: response=$(tor_curl_request --insecure -L -s -b "${snow_cookie_jar}" -c "${snow_cookie_jar}" "$remote_url") +./hosts/sendnow.sh:162: response=$(tor_curl_request --insecure -L -svo. -X POST \ +./hosts/sendnow.sh:206: file_header=$(tor_curl_request_extended --insecure --head -Lis \ +./hosts/sendnow.sh:327: if [[ "${UseTorCurlImpersonate}" == "true" ]]; then +./hosts/sendnow.sh:329: tor_curl_request_extended --insecure -L --no-alpn \ +./hosts/sendnow.sh:347: tor_curl_request --insecure -L --no-alpn \ +./hosts/sendnow.sh:366: tor_curl_request --insecure -L --no-alpn \ +./hosts/sendnow.sh:385: tor_curl_request --insecure -L --no-alpn \ +./hosts/sendspace.sh:90: response=$(tor_curl_request --insecure -L -s -b "${ss_cookie_jar}" -c "${ss_cookie_jar}" "$remote_url") +./hosts/sendspace.sh:157: file_header=$(tor_curl_request --insecure -L --head -s \ +./hosts/sendspace.sh:273: if [[ "${UseTorCurlImpersonate}" == "true" ]]; then +./hosts/sendspace.sh:275: tor_curl_request --insecure --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval "$download_url" --continue-at - --output "$file_path" +./hosts/sendspace.sh:277: tor_curl_request --insecure "$download_url" --continue-at - --output "$file_path" +./hosts/sendspace.sh:281: tor_curl_request --insecure --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval \ +./hosts/sendspace.sh:296: tor_curl_request --insecure \ ./hosts/syspro.sh:88: response=$(tor_curl_request --insecure -L -s "$remote_url") ./hosts/syspro.sh:186: if [[ "${UseTorCurlImpersonate}" == "true" ]]; then ./hosts/syspro.sh:188: tor_curl_request --insecure -L \ @@ -487,119 +495,119 @@ _________________________________________________________________________ ./hosts/youdbox.sh:183: file_header=$(tor_curl_request --insecure -L --head -s --referer "${fixed_url}" "$download_url") ./hosts/youdbox.sh:276: tor_curl_request --insecure -L -G --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval "$download_url" --continue-at - --output "$file_path" ./hosts/youdbox.sh:278: tor_curl_request --insecure -L -G "$download_url" --continue-at - --output "$file_path" -./mad.sh:98:UseTorCurlImpersonate=false -./mad.sh:394:tor_curl_request() { -./mad.sh:395: if [[ "${UseTorCurlImpersonate}" == "true" ]]; then -./mad.sh:396: "${curl_impersonate[@]}" --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" -4 --connect-timeout ${ConnectTimeout} --compressed --globoff "$@" -./mad.sh:398: curl --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" -4 --connect-timeout ${ConnectTimeout} --compressed --globoff "$@" -./mad.sh:401:tor_curl_request_extended() { +./mad.sh:91:UseTorCurlImpersonate=false +./mad.sh:387:tor_curl_request() { +./mad.sh:388: if [[ "${UseTorCurlImpersonate}" == "true" ]]; then +./mad.sh:389: "${curl_impersonate[@]}" --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" -4 --connect-timeout ${ConnectTimeout} --compressed --globoff "$@" +./mad.sh:391: curl --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" -4 --connect-timeout ${ConnectTimeout} --compressed --globoff "$@" +./mad.sh:394:tor_curl_request_extended() { +./mad.sh:396: if [[ "${UseTorCurlImpersonate}" == "true" ]]; then +./mad.sh:397: "${curl_impersonate[@]}" --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" -4 --connect-timeout $randomtimeout --compressed --globoff "$@" +./mad.sh:399: curl --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" -4 --connect-timeout $randomtimeout --compressed --globoff "$@" +./mad.sh:402:tor_curl_upload() { ./mad.sh:403: if [[ "${UseTorCurlImpersonate}" == "true" ]]; then -./mad.sh:404: "${curl_impersonate[@]}" --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" -4 --connect-timeout $randomtimeout --compressed --globoff "$@" -./mad.sh:406: curl --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" -4 --connect-timeout $randomtimeout --compressed --globoff "$@" -./mad.sh:409:tor_curl_upload() { -./mad.sh:410: if [[ "${UseTorCurlImpersonate}" == "true" ]]; then -./mad.sh:412: "${curl_impersonate[@]}" --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" -4 --connect-timeout ${ConnectTimeoutUpload} --speed-limit $UploadSpeedMin --speed-time $UploadTimeoutInterval --compressed --globoff "$@" -./mad.sh:414: "${curl_impersonate[@]}" --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" -4 --connect-timeout ${ConnectTimeoutUpload} --compressed --globoff "$@" -./mad.sh:418: curl --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" -4 --connect-timeout ${ConnectTimeoutUpload} --speed-limit $UploadSpeedMin --speed-time $UploadTimeoutInterval -H 'User-Agent: Mozilla/5.0 (Windows NT 10.0; rv:109.0) Gecko/20100101 Firefox/115.0' -H 'Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8' -H 'Accept-Language: en-US,en;q=0.5' -H 'Accept-Encoding: gzip, deflate, br' -H 'Origin: null' -H 'Connection: keep-alive' -H 'Upgrade-Insecure-Requests: 1' -H 'Sec-Fetch-Dest: document' -H 'Sec-Fetch-Mode: navigate' -H 'Sec-Fetch-Site: cross-site' -H 'Sec-Fetch-User: ?1' -H 'TE: trailers' --compressed --globoff "$@" -./mad.sh:420: curl --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" -4 --connect-timeout ${ConnectTimeoutUpload} -H 'User-Agent: Mozilla/5.0 (Windows NT 10.0; rv:109.0) Gecko/20100101 Firefox/115.0' -H 'Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8' -H 'Accept-Language: en-US,en;q=0.5' -H 'Accept-Encoding: gzip, deflate, br' -H 'Origin: null' -H 'Connection: keep-alive' -H 'Upgrade-Insecure-Requests: 1' -H 'Sec-Fetch-Dest: document' -H 'Sec-Fetch-Mode: navigate' -H 'Sec-Fetch-Site: cross-site' -H 'Sec-Fetch-User: ?1' -H 'TE: trailers' --compressed --globoff "$@" -./mad.sh:1421:install_curl_impersonate() { -./mad.sh:1423: echo -e "${BLUE}lwthiker curl_impersonate${NC} is the original dev, but it is relatively inactive." -./mad.sh:1424: echo -e "- Currently uses curl v8.1.1." -./mad.sh:1428: echo -e "${BLUE}lexiforest curl_impersonate${NC} is an active fork of curl_impersonate." -./mad.sh:1429: echo -e "+ Currently uses curl v8.7.1" -./mad.sh:1433: PS3='Please select which curl_impersonate to install: ' -./mad.sh:1441: install_curl_impersonate_lwthiker_orig -./mad.sh:1445: install_curl_impersonate_lexiforest_fork -./mad.sh:1455:install_curl_impersonate_lwthiker_orig() { -./mad.sh:1459: echo -e "${BLUE}lwthiker curl_impersonate${NC} is the original curl_impersonate." -./mad.sh:1460: echo -e "+ Currently uses curl v8.1.1, and has low activity for updates" -./mad.sh:1463: echo -e "${GREEN}| Fetching:${NC} latest ${BLUE}lwthiker curl_impersonate${NC} info from github...${NC}" -./mad.sh:1466: response=$(tor_curl_request --insecure -L -s https://github.com/lwthiker/curl-impersonate/releases/latest) -./mad.sh:1468: debugHtml "github" "lbf_inst_curlimp$j" "$response" -./mad.sh:1471: latestTag=$(grep -oPi -m 1 '(?<=/curl-impersonate/releases/tag/).*?(?=")' <<< "$response") -./mad.sh:1481: yes_or_no "Do you wish to download and extract curl_impersonate $latestTag (using tor+curl)?" && { -./mad.sh:1483: download_url='https://github.com/lwthiker/curl-impersonate/releases/download/'"$latestTag"'/curl-impersonate-'"$latestTag"'.x86_64-linux-gnu.tar.gz' -./mad.sh:1486: file_header=$(tor_curl_request --insecure --head -Ls "$download_url") -./mad.sh:1488: debugHtml "github" "head_inst_curlimp$j" "${file_header}" -./mad.sh:1536: tor_curl_request --insecure -L "$download_url" --continue-at - --output "$file_path" -./mad.sh:1565: echo -e "| Extracting curl_impersonate..." -./mad.sh:1567: rm -f "${ScriptDir}"/curl* -./mad.sh:1568: mv "$extract_location/curl-impersonate-ff" "${ScriptDir}/" -./mad.sh:1569: mv "$extract_location/curl_ff109" "${ScriptDir}/" -./mad.sh:1570: echo -e "${GREEN}| Done.${NC} Update ${BLUE}\"UseTorCurlImpersonate=true\"${NC} in script to use..." -./mad.sh:1578:install_curl_impersonate_lexiforest_fork() { -./mad.sh:1582: echo -e "${BLUE}lexiforest curl_impersonate${NC} is an active fork of curl_impersonate." -./mad.sh:1583: echo -e "+ Currently uses curl v8.7.1, and is patched for latest CVEs" -./mad.sh:1586: echo -e "${GREEN}| Fetching:${NC} latest ${BLUE}lexiforest curl_impersonate fork${NC} info from github...${NC}" -./mad.sh:1589: response=$(tor_curl_request --insecure -L -s https://github.com/lexiforest/curl-impersonate/releases/latest) -./mad.sh:1591: debugHtml "github" "lbf_inst_curlimp$j" "$response" -./mad.sh:1594: latestTag=$(grep -oPi -m 1 '(?<=/curl-impersonate/releases/tag/).*?(?=")' <<< "$response") -./mad.sh:1604: yes_or_no "Do you wish to download and extract curl_impersonate $latestTag (using tor+curl)?" && { -./mad.sh:1606: download_url='https://github.com/lexiforest/curl-impersonate/releases/download/'"$latestTag"'/curl-impersonate-'"$latestTag"'.x86_64-linux-gnu.tar.gz' -./mad.sh:1609: file_header=$(tor_curl_request --insecure --head -Ls "$download_url") -./mad.sh:1611: debugHtml "github" "head_inst_curlimp$j" "${file_header}" -./mad.sh:1659: tor_curl_request --insecure -L "$download_url" --continue-at - --output "$file_path" -./mad.sh:1688: echo -e "| Extracting curl_impersonate..." -./mad.sh:1690: rm -f "${ScriptDir}"/curl* -./mad.sh:1691: mv "$extract_location/curl-impersonate-chrome" "${ScriptDir}/" -./mad.sh:1692: mv "$extract_location/curl_chrome131" "${ScriptDir}/" -./mad.sh:1693: echo -e "${GREEN}| Done.${NC} Update ${BLUE}\"UseTorCurlImpersonate=true\"${NC} in script to use..." -./mad.sh:1855: echo -e ":${NC} ${GREEN}MAD${PINK} Audit${NC} : Reports usage of http & curl in scripts${PINK}${BLD} :" -./mad.sh:1863: maud_curl=$(grep -n -vxE '[[:blank:]]*([#].*)?' $fil | grep --color='always' -Ei 'curl') -./mad.sh:1864: maud_torcurl=$(grep -n -vxE '[[:blank:]]*([#].*)?' $fil | grep -A 12 --color='always' -Ei 'tor_curl') -./mad.sh:1873: echo -e "${PINK}MAD Audit of curl:${NC} (${GREEN}grep \"curl\"${NC})" -./mad.sh:1875: echo -e "$maud_curl" -./mad.sh:1877: echo -e "${PINK}MAD Audit of tor_curl (+12 lines after):${NC} (${GREEN}grep \"tor_curl\"${NC})" -./mad.sh:1879: echo -e "$maud_torcurl" -./mad.sh:1891: maud_curl=$(grep -n -vxE '[[:blank:]]*([#].*)?' $fil | grep --color='always' -Ei 'curl') -./mad.sh:1892: maud_torcurl=$(grep -n -vxE '[[:blank:]]*([#].*)?' $fil | grep -A 12 --color='always' -Ei 'tor_curl') -./mad.sh:1901: echo -e "${PINK}MAD Audit of curl:${NC} (${GREEN}grep \"curl \"${NC})" -./mad.sh:1903: echo -e "$maud_curl" -./mad.sh:1905: echo -e "${PINK}MAD Audit of tor_curl (+12 lines after):${NC} (${GREEN}grep \"tor_curl\"${NC})" -./mad.sh:1907: echo -e "$maud_torcurl" -./mad.sh:1913: maud_curl=$(grep -n -vxE '[[:blank:]]*([#].*)?' $fil | grep --color='always' -Ei 'curl') -./mad.sh:1914: maud_torcurl=$(grep -n -vxE '[[:blank:]]*([#].*)?' $fil | grep -A 12 --color='always' -Ei 'tor_curl') -./mad.sh:1923: echo -e "${PINK}MAD Audit of curl:${NC} (${GREEN}grep \"curl\"${NC})" -./mad.sh:1925: echo -e "$maud_curl" -./mad.sh:1927: echo -e "${PINK}MAD Audit of tor_curl (+12 lines after):${NC} (${GREEN}grep \"tor_curl\"${NC})" -./mad.sh:1929: echo -e "$maud_torcurl" -./mad.sh:2876: if [[ "${UseTorCurlImpersonate}" == "true" ]]; then -./mad.sh:2877: printf "client: ${GREEN}Tor${NC} + ${BLUE}curl_impersonate${NC}\\n" -./mad.sh:2879: printf "client: ${GREEN}Tor${NC} + ${GREEN}curl${NC}\\n" -./mad.sh:3051: if [[ "${UseTorCurlImpersonate}" == "true" ]]; then -./mad.sh:3052: printf "client: ${GREEN}Tor${NC} + ${BLUE}curl_impersonate${NC}\\n" -./mad.sh:3054: printf "client: ${GREEN}Tor${NC} + ${GREEN}curl${NC}\\n" -./mad.sh:3252: file_header=$(tor_curl_request --insecure -m 8 -s -D - -o /dev/null \ -./mad.sh:3259: file_header=$(tor_curl_request --insecure --head -H "Connection: keep-alive" -L -s -i "$download_url" | -./mad.sh:3396: tor_curl_request --insecure -L --referer "$file_url" "$download_url" --output "$file_path" -./mad.sh:3449: tor_curl_request --insecure -L --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval "$download_url" --continue-at - --output "$file_path" -./mad.sh:3451: tor_curl_request --insecure -L --referer "$file_url" "$download_url" --continue-at - --output "$file_path" -./mad.sh:3649: response=$(tor_curl_upload --insecure -i \ -./mad.sh:3656: response=$(tor_curl_upload --insecure -i \ -./mad.sh:3727:if [[ "${UseTorCurlImpersonate}" == "true" ]]; then -./mad.sh:3728: curl_impersonate=() -./mad.sh:3729: readarray -d $'' arrFiles < <(find "$ScriptDir" -maxdepth 1 -name "curl_*" -printf '%p\n' | sort -Vk1) -./mad.sh:3730: bFoundCurlHeader=false -./mad.sh:3734: curl_impersonate=($fil) -./mad.sh:3735: bFoundCurlHeader=true -./mad.sh:3739: if [[ "$bFoundCurlHeader" == "false" ]]; then -./mad.sh:3740: echo -e "${RED}[ERROR] Missing dependency \"curl-impersonate\"!${NC}" -./mad.sh:3743: echo -e "You'll need to download ${GREEN}\"curl-impersonate\"${NC}." -./mad.sh:3746: echo -e "The latest binary can be obtained on GitHub, search for \"curl-impersonate\"" -./mad.sh:3748: echo -e " 1. Visit the page of curl-impersonate and add \"/releases/latest/\" at end of URL." -./mad.sh:3752: echo -e " 4. Download archive ${GREEN}\"curl-impersonate-vX.Y.Z.x86_64-linux-gnu.tar.gz\"${YELLOW}." -./mad.sh:3753: echo -e " 5. Extract files ${GREEN}\"curl-impersonate-ff\"${NC} and ${GREEN}\"curl_ff109\"${NC} next to this script." -./mad.sh:3756: echo -e "run $0 install_curl_impersonate\\n" -./mad.sh:3758: yes_or_no "Do you wish to download and extract latest curl_impersonate (using tor+curl)?" && { -./mad.sh:3759: UseTorCurlImpersonate=false -./mad.sh:3760: install_curl_impersonate -./mad.sh:3844: echo -e "[${YELLOW}Install curl_impersonate${NC}]: Downloads the latest binary for curl_impersonate from github repo (3 choices)" -./mad.sh:3845: printf " %s install_curl_impersonate\\n" "$0" -./mad.sh:3923:elif [[ "$arg1" == "install_curl_impersonate" ]]; then -./mad.sh:3924: install_curl_impersonate -./mad.sh:3955:if [[ "${UseTorCurlImpersonate}" == "true" ]]; then -./mad.sh:3956: printf "client: ${GREEN}Tor${NC} + ${BLUE}curl_impersonate${NC}\\n" -./mad.sh:3958: printf "client: ${GREEN}Tor${NC} + ${GREEN}curl${NC}\\n" +./mad.sh:405: "${curl_impersonate[@]}" --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" -4 --connect-timeout ${ConnectTimeoutUpload} --speed-limit $UploadSpeedMin --speed-time $UploadTimeoutInterval --compressed --globoff "$@" +./mad.sh:407: "${curl_impersonate[@]}" --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" -4 --connect-timeout ${ConnectTimeoutUpload} --compressed --globoff "$@" +./mad.sh:411: curl --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" -4 --connect-timeout ${ConnectTimeoutUpload} --speed-limit $UploadSpeedMin --speed-time $UploadTimeoutInterval -H 'User-Agent: Mozilla/5.0 (Windows NT 10.0; rv:109.0) Gecko/20100101 Firefox/115.0' -H 'Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8' -H 'Accept-Language: en-US,en;q=0.5' -H 'Accept-Encoding: gzip, deflate, br' -H 'Origin: null' -H 'Connection: keep-alive' -H 'Upgrade-Insecure-Requests: 1' -H 'Sec-Fetch-Dest: document' -H 'Sec-Fetch-Mode: navigate' -H 'Sec-Fetch-Site: cross-site' -H 'Sec-Fetch-User: ?1' -H 'TE: trailers' --compressed --globoff "$@" +./mad.sh:413: curl --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" -4 --connect-timeout ${ConnectTimeoutUpload} -H 'User-Agent: Mozilla/5.0 (Windows NT 10.0; rv:109.0) Gecko/20100101 Firefox/115.0' -H 'Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8' -H 'Accept-Language: en-US,en;q=0.5' -H 'Accept-Encoding: gzip, deflate, br' -H 'Origin: null' -H 'Connection: keep-alive' -H 'Upgrade-Insecure-Requests: 1' -H 'Sec-Fetch-Dest: document' -H 'Sec-Fetch-Mode: navigate' -H 'Sec-Fetch-Site: cross-site' -H 'Sec-Fetch-User: ?1' -H 'TE: trailers' --compressed --globoff "$@" +./mad.sh:1415:install_curl_impersonate() { +./mad.sh:1417: echo -e "${BLUE}lwthiker curl_impersonate${NC} is the original dev, but it is relatively inactive." +./mad.sh:1418: echo -e "- Currently uses curl v8.1.1." +./mad.sh:1422: echo -e "${BLUE}lexiforest curl_impersonate${NC} is an active fork of curl_impersonate." +./mad.sh:1423: echo -e "+ Currently uses curl v8.7.1" +./mad.sh:1427: PS3='Please select which curl_impersonate to install: ' +./mad.sh:1435: install_curl_impersonate_lwthiker_orig +./mad.sh:1439: install_curl_impersonate_lexiforest_fork +./mad.sh:1449:install_curl_impersonate_lwthiker_orig() { +./mad.sh:1453: echo -e "${BLUE}lwthiker curl_impersonate${NC} is the original curl_impersonate." +./mad.sh:1454: echo -e "+ Currently uses curl v8.1.1, and has low activity for updates" +./mad.sh:1457: echo -e "${GREEN}| Fetching:${NC} latest ${BLUE}lwthiker curl_impersonate${NC} info from github...${NC}" +./mad.sh:1460: response=$(tor_curl_request --insecure -L -s https://github.com/lwthiker/curl-impersonate/releases/latest) +./mad.sh:1462: debugHtml "github" "lbf_inst_curlimp$j" "$response" +./mad.sh:1465: latestTag=$(grep -oPi -m 1 '(?<=/curl-impersonate/releases/tag/).*?(?=")' <<< "$response") +./mad.sh:1475: yes_or_no "Do you wish to download and extract curl_impersonate $latestTag (using tor+curl)?" && { +./mad.sh:1477: download_url='https://github.com/lwthiker/curl-impersonate/releases/download/'"$latestTag"'/curl-impersonate-'"$latestTag"'.x86_64-linux-gnu.tar.gz' +./mad.sh:1480: file_header=$(tor_curl_request --insecure --head -Ls "$download_url") +./mad.sh:1482: debugHtml "github" "head_inst_curlimp$j" "${file_header}" +./mad.sh:1530: tor_curl_request --insecure -L "$download_url" --continue-at - --output "$file_path" +./mad.sh:1559: echo -e "| Extracting curl_impersonate..." +./mad.sh:1561: rm -f "${ScriptDir}"/curl* +./mad.sh:1562: mv "$extract_location/curl-impersonate-ff" "${ScriptDir}/" +./mad.sh:1563: mv "$extract_location/curl_ff109" "${ScriptDir}/" +./mad.sh:1564: echo -e "${GREEN}| Done.${NC} Update ${BLUE}\"UseTorCurlImpersonate=true\"${NC} in script to use..." +./mad.sh:1572:install_curl_impersonate_lexiforest_fork() { +./mad.sh:1576: echo -e "${BLUE}lexiforest curl_impersonate${NC} is an active fork of curl_impersonate." +./mad.sh:1577: echo -e "+ Currently uses curl v8.7.1, and is patched for latest CVEs" +./mad.sh:1580: echo -e "${GREEN}| Fetching:${NC} latest ${BLUE}lexiforest curl_impersonate fork${NC} info from github...${NC}" +./mad.sh:1583: response=$(tor_curl_request --insecure -L -s https://github.com/lexiforest/curl-impersonate/releases/latest) +./mad.sh:1585: debugHtml "github" "lbf_inst_curlimp$j" "$response" +./mad.sh:1588: latestTag=$(grep -oPi -m 1 '(?<=/curl-impersonate/releases/tag/).*?(?=")' <<< "$response") +./mad.sh:1598: yes_or_no "Do you wish to download and extract curl_impersonate $latestTag (using tor+curl)?" && { +./mad.sh:1600: download_url='https://github.com/lexiforest/curl-impersonate/releases/download/'"$latestTag"'/curl-impersonate-'"$latestTag"'.x86_64-linux-gnu.tar.gz' +./mad.sh:1603: file_header=$(tor_curl_request --insecure --head -Ls "$download_url") +./mad.sh:1605: debugHtml "github" "head_inst_curlimp$j" "${file_header}" +./mad.sh:1653: tor_curl_request --insecure -L "$download_url" --continue-at - --output "$file_path" +./mad.sh:1682: echo -e "| Extracting curl_impersonate..." +./mad.sh:1684: rm -f "${ScriptDir}"/curl* +./mad.sh:1685: mv "$extract_location/curl-impersonate-chrome" "${ScriptDir}/" +./mad.sh:1686: mv "$extract_location/curl_chrome131" "${ScriptDir}/" +./mad.sh:1687: echo -e "${GREEN}| Done.${NC} Update ${BLUE}\"UseTorCurlImpersonate=true\"${NC} in script to use..." +./mad.sh:1849: echo -e ":${NC} ${GREEN}MAD${PINK} Audit${NC} : Reports usage of http & curl in scripts${PINK}${BLD} :" +./mad.sh:1857: maud_curl=$(grep -n -vxE '[[:blank:]]*([#].*)?' $fil | grep --color='always' -Ei 'curl') +./mad.sh:1858: maud_torcurl=$(grep -n -vxE '[[:blank:]]*([#].*)?' $fil | grep -A 12 --color='always' -Ei 'tor_curl') +./mad.sh:1867: echo -e "${PINK}MAD Audit of curl:${NC} (${GREEN}grep \"curl\"${NC})" +./mad.sh:1869: echo -e "$maud_curl" +./mad.sh:1871: echo -e "${PINK}MAD Audit of tor_curl (+12 lines after):${NC} (${GREEN}grep \"tor_curl\"${NC})" +./mad.sh:1873: echo -e "$maud_torcurl" +./mad.sh:1885: maud_curl=$(grep -n -vxE '[[:blank:]]*([#].*)?' $fil | grep --color='always' -Ei 'curl') +./mad.sh:1886: maud_torcurl=$(grep -n -vxE '[[:blank:]]*([#].*)?' $fil | grep -A 12 --color='always' -Ei 'tor_curl') +./mad.sh:1895: echo -e "${PINK}MAD Audit of curl:${NC} (${GREEN}grep \"curl \"${NC})" +./mad.sh:1897: echo -e "$maud_curl" +./mad.sh:1899: echo -e "${PINK}MAD Audit of tor_curl (+12 lines after):${NC} (${GREEN}grep \"tor_curl\"${NC})" +./mad.sh:1901: echo -e "$maud_torcurl" +./mad.sh:1907: maud_curl=$(grep -n -vxE '[[:blank:]]*([#].*)?' $fil | grep --color='always' -Ei 'curl') +./mad.sh:1908: maud_torcurl=$(grep -n -vxE '[[:blank:]]*([#].*)?' $fil | grep -A 12 --color='always' -Ei 'tor_curl') +./mad.sh:1917: echo -e "${PINK}MAD Audit of curl:${NC} (${GREEN}grep \"curl\"${NC})" +./mad.sh:1919: echo -e "$maud_curl" +./mad.sh:1921: echo -e "${PINK}MAD Audit of tor_curl (+12 lines after):${NC} (${GREEN}grep \"tor_curl\"${NC})" +./mad.sh:1923: echo -e "$maud_torcurl" +./mad.sh:2878: if [[ "${UseTorCurlImpersonate}" == "true" ]]; then +./mad.sh:2879: printf "client: ${GREEN}Tor${NC} + ${BLUE}curl_impersonate${NC}\\n" +./mad.sh:2881: printf "client: ${GREEN}Tor${NC} + ${GREEN}curl${NC}\\n" +./mad.sh:3053: if [[ "${UseTorCurlImpersonate}" == "true" ]]; then +./mad.sh:3054: printf "client: ${GREEN}Tor${NC} + ${BLUE}curl_impersonate${NC}\\n" +./mad.sh:3056: printf "client: ${GREEN}Tor${NC} + ${GREEN}curl${NC}\\n" +./mad.sh:3254: file_header=$(tor_curl_request --insecure -m 8 -s -D - -o /dev/null \ +./mad.sh:3261: file_header=$(tor_curl_request --insecure --head -H "Connection: keep-alive" -L -s -i "$download_url" | +./mad.sh:3398: tor_curl_request --insecure -L --referer "$file_url" "$download_url" --output "$file_path" +./mad.sh:3455: tor_curl_request --insecure -L --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval "$download_url" --continue-at - --output "$file_path" +./mad.sh:3457: tor_curl_request --insecure -L --referer "$file_url" "$download_url" --continue-at - --output "$file_path" +./mad.sh:3655: response=$(tor_curl_upload --insecure -i \ +./mad.sh:3662: response=$(tor_curl_upload --insecure -i \ +./mad.sh:3733:if [[ "${UseTorCurlImpersonate}" == "true" ]]; then +./mad.sh:3734: curl_impersonate=() +./mad.sh:3735: readarray -d $'' arrFiles < <(find "$ScriptDir" -maxdepth 1 -name "curl_*" -printf '%p\n' | sort -Vk1) +./mad.sh:3736: bFoundCurlHeader=false +./mad.sh:3740: curl_impersonate=($fil) +./mad.sh:3741: bFoundCurlHeader=true +./mad.sh:3745: if [[ "$bFoundCurlHeader" == "false" ]]; then +./mad.sh:3746: echo -e "${RED}[ERROR] Missing dependency \"curl-impersonate\"!${NC}" +./mad.sh:3749: echo -e "You'll need to download ${GREEN}\"curl-impersonate\"${NC}." +./mad.sh:3752: echo -e "The latest binary can be obtained on GitHub, search for \"curl-impersonate\"" +./mad.sh:3754: echo -e " 1. Visit the page of curl-impersonate and add \"/releases/latest/\" at end of URL." +./mad.sh:3758: echo -e " 4. Download archive ${GREEN}\"curl-impersonate-vX.Y.Z.x86_64-linux-gnu.tar.gz\"${YELLOW}." +./mad.sh:3759: echo -e " 5. Extract files ${GREEN}\"curl-impersonate-ff\"${NC} and ${GREEN}\"curl_ff109\"${NC} next to this script." +./mad.sh:3762: echo -e "run $0 install_curl_impersonate\\n" +./mad.sh:3764: yes_or_no "Do you wish to download and extract latest curl_impersonate (using tor+curl)?" && { +./mad.sh:3765: UseTorCurlImpersonate=false +./mad.sh:3766: install_curl_impersonate +./mad.sh:3850: echo -e "[${YELLOW}Install curl_impersonate${NC}]: Downloads the latest binary for curl_impersonate from github repo (3 choices)" +./mad.sh:3851: printf " %s install_curl_impersonate\\n" "$0" +./mad.sh:3929:elif [[ "$arg1" == "install_curl_impersonate" ]]; then +./mad.sh:3930: install_curl_impersonate +./mad.sh:3961:if [[ "${UseTorCurlImpersonate}" == "true" ]]; then +./mad.sh:3962: printf "client: ${GREEN}Tor${NC} + ${BLUE}curl_impersonate${NC}\\n" +./mad.sh:3964: printf "client: ${GREEN}Tor${NC} + ${GREEN}curl${NC}\\n" ./plugins/pjscloud.sh:44: if [[ "${UseTorCurlImpersonate}" == "true" ]]; then ./plugins/pjscloud.sh:45: response=$("${curl_impersonate[@]}" --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" \ ./plugins/pjscloud.sh:53: response=$(curl --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" \ diff --git a/.audit/mad-audit-http.log b/.audit/mad-audit-http.log index b091eef..9c39085 100755 --- a/.audit/mad-audit-http.log +++ b/.audit/mad-audit-http.log @@ -1,4 +1,4 @@ -DateTime: 25.02.20 +DateTime: 25.02.24 Files: ./hosts/1fichier.sh @@ -11,6 +11,7 @@ Files: ./hosts/ateasystems.sh ./hosts/bedrive.sh ./hosts/biteblob.sh +./hosts/blackcloud_onion.sh ./hosts/bowfile.sh ./hosts/click.sh ./hosts/cyssoux.sh @@ -63,6 +64,7 @@ Files: ./hosts/ramsgaard.sh ./hosts/ranoz.sh ./hosts/sendnow.sh +./hosts/sendspace.sh ./hosts/shareonline.sh ./hosts/skrepr.sh ./hosts/soyjak.sh @@ -163,8 +165,8 @@ _________________________________________________________________________ ./hosts/1fichier.sh:166: if ! grep -Eqi "https://" <<< "${target_file_link}" > /dev/null ; then ./hosts/9saves.sh:141: --data "$form_data" "https://9saves.com/") ./hosts/anonfile.sh:230: if grep -Eqi 'img src="https://anonfile.de/captchas/' <<< "$response" ; then -./hosts/anonfile.sh:414: if grep -Eqi '^${url/https:/http:}.*>#& #OK# ${filename}>g" "${InputFile}" #http (if changed) -./mad.sh:719: sed -i -e "s>^direct=${url/https:/http:}.*>#& #OK# ${filename}>g" "${InputFile}" #direct url https -./mad.sh:722: sed -i -e "s>^${url/https:/http:}.*>#&>g" "${InputFile}" #http (if changed) -./mad.sh:724: sed -i -e "s>^direct=${url/https:/http:}.*>#&>g" "${InputFile}" #direct url https -./mad.sh:745: sed -i -e "s>^${url/https:/http:}.*>#& #OK# ${filename} (File exists)>g" "${InputFile}" #http (if changed) -./mad.sh:747: sed -i -e "s>^direct=${url/https:/http:}.*>#& #OK# ${filename} (File exists)>g" "${InputFile}" #direct url https -./mad.sh:750: sed -i -e "s>^${url/https:/http:}.*>#&>g" "${InputFile}" #http (if changed) -./mad.sh:752: sed -i -e "s>^direct=${url/https:/http:}.*>#&>g" "${InputFile}" #direct url https -./mad.sh:773: sed -i -e "s>^${url/https:/http:}.*>#& #RETRY# ${message}>g" "${InputFile}" #http (if changed) -./mad.sh:775: sed -i -e "s>^direct=${url/https:/http:}.*>#& #RETRY# ${message}>g" "${InputFile}" #direct url https -./mad.sh:778: sed -i -e "s>^${url/https:/http:}.*>#&>g" "${InputFile}" #http (if changed) -./mad.sh:780: sed -i -e "s>^direct=${url/https:/http:}.*>#&>g" "${InputFile}" #direct url https -./mad.sh:802: sed -i -e "s>^${url/https:/http:}.*>#& #FAIL# $message>g" "${InputFile}" #http (if changed) -./mad.sh:804: sed -i -e "s>^direct=${url/https:/http:}.*>#& #FAIL# $message>g" "${InputFile}" #direct url https -./mad.sh:807: sed -i -e "s>^${url/https:/http:}.*>#&>g" "${InputFile}" #http (if changed) -./mad.sh:809: sed -i -e "s>^direct=${url/https:/http:}.*>#&>g" "${InputFile}" #direct url https -./mad.sh:833: sed -i -e "s>^${url/https:/http:}.*>#& #REMOVED#${message}>g" "${InputFile}" #http (if changed) -./mad.sh:835: sed -i -e "s>^direct=${url/https:/http:}.*>#& #REMOVED#${message}>g" "${InputFile}" #direct url https -./mad.sh:838: sed -i -e "s>^${url/https:/http:}.*>#&>g" "${InputFile}" #http (if changed) -./mad.sh:840: sed -i -e "s>^direct=${url/https:/http:}.*>#&>g" "${InputFile}" #direct url https -./mad.sh:866: sed -i -e "s>^${url/https:/http:}.*>${url}|${newfilename}>g" "${InputFile}" #http (if changed) -./mad.sh:868: sed -i -e "s>^direct=${url/https:/http:}.*>direct=${url}|${newfilename}>g" "${InputFile}" #direct url https -./mad.sh:888: sed -i -e "s%^${url/https:/http:}.*%${newurl//[[:space:]]/$'\\\n'}%g" "${InputFile}" #http (if changed) -./mad.sh:909: sed -i -e "s>^${url/https:/http:}.*>#& #RETRY# ${filename} (dropped or bad size)>g" "${InputFile}" #http (if changed) -./mad.sh:911: sed -i -e "s>^direct=${url/https:/http:}.*>#& #RETRY# ${filename} (dropped or bad size)>g" "${InputFile}" #direct url https -./mad.sh:914: sed -i -e "s>^${url/https:/http:}.*>#&>g" "${InputFile}" #http (if changed) -./mad.sh:916: sed -i -e "s>^direct=${url/https:/http:}.*>#&>g" "${InputFile}" #direct url https -./mad.sh:932: sed -i -e "s>^${url/https:/http:}.*>#& #RETRY# (No CDN found)>g" "${InputFile}" #http (if changed) -./mad.sh:934: sed -i -e "s>^direct=${url/https:/http:}.*>#& #RETRY# (No CDN found)>g" "${InputFile}" #direct url https -./mad.sh:937: sed -i -e "s>^${url/https:/http:}.*>#&>g" "${InputFile}" #http (if changed) -./mad.sh:939: sed -i -e "s>^direct=${url/https:/http:}.*>#&>g" "${InputFile}" #direct url https -./mad.sh:958: sed -i -e "s>^${url/https:/http:}.*>#& #PASSWORD#>g" "${InputFile}" #http (if changed) -./mad.sh:960: sed -i -e "s>^direct=${url/https:/http:}.*>#& #PASSWORD#>g" "${InputFile}" #direct url https -./mad.sh:963: sed -i -e "s>^${url/https:/http:}.*>#&>g" "${InputFile}" #http (if changed) -./mad.sh:965: sed -i -e "s>^direct=${url/https:/http:}.*>#&>g" "${InputFile}" #direct url https -./mad.sh:985: sed -i -e "s>^${url/https:/http:}.*>#& #BAD-URL#${message}>g" "${InputFile}" #http (if changed) -./mad.sh:987: sed -i -e "s>^direct=${url/https:/http:}.*>#& #BAD-URL#${message}>g" "${InputFile}" #direct url https -./mad.sh:990: sed -i -e "s>^${url/https:/http:}.*>#&>g" "${InputFile}" #http (if changed) -./mad.sh:992: sed -i -e "s>^direct=${url/https:/http:}.*>#&>g" "${InputFile}" #direct url https -./mad.sh:1010: sed -i -e "s>^${url/https:/http:}.*>#& #RETRY# ${filename} (No filename or size. Try again later)>g" "${InputFile}" #http (if changed) -./mad.sh:1012: sed -i -e "s>^direct=${url/https:/http:}.*>#& #RETRY# ${filename} (No filename or size. Try again later)>g" "${InputFile}" #direct url https -./mad.sh:1015: sed -i -e "s>^${url/https:/http:}.*>#&>g" "${InputFile}" #http (if changed) -./mad.sh:1017: sed -i -e "s>^direct=${url/https:/http:}.*>#&>g" "${InputFile}" #direct url https -./mad.sh:1036: sed -i -e "s>^${url/https:/http:}.*>#& #RETRY# ${filename} (File exists and is too large--retry or delete from downloads)>g" "${InputFile}" #http (if changed) -./mad.sh:1038: sed -i -e "s>^direct=${url/https:/http:}.*>#& #RETRY# ${filename} (File exists and is too large--retry or delete from downloads)>g" "${InputFile}" #direct url https -./mad.sh:1041: sed -i -e "s>^${url/https:/http:}.*>#&>g" "${InputFile}" #http (if changed) -./mad.sh:1043: sed -i -e "s>^direct=${url/https:/http:}.*>#&>g" "${InputFile}" #direct url https -./mad.sh:1466: response=$(tor_curl_request --insecure -L -s https://github.com/lwthiker/curl-impersonate/releases/latest) -./mad.sh:1483: download_url='https://github.com/lwthiker/curl-impersonate/releases/download/'"$latestTag"'/curl-impersonate-'"$latestTag"'.x86_64-linux-gnu.tar.gz' -./mad.sh:1589: response=$(tor_curl_request --insecure -L -s https://github.com/lexiforest/curl-impersonate/releases/latest) -./mad.sh:1606: download_url='https://github.com/lexiforest/curl-impersonate/releases/download/'"$latestTag"'/curl-impersonate-'"$latestTag"'.x86_64-linux-gnu.tar.gz' -./mad.sh:1869: echo -e "${PINK}MAD Audit of http lines:${NC} (${GREEN}grep \"http:\" or \"https:\"${NC})" -./mad.sh:1897: echo -e "${PINK}MAD Audit of http lines:${NC} (${GREEN}grep \"http:\" or \"https:\"${NC})" -./mad.sh:1919: echo -e "${PINK}MAD Audit of http lines:${NC} (${GREEN}grep \"http:\" or \"https:\"${NC})" -./mad.sh:3235: if grep -Eqi '.onion' <<< "$download_url" && grep -Eqi 'https://' <<< "$download_url" ; then -./mad.sh:3766:arg2="$2" # auto, filelist, -./mad.sh:3863: echo -e " - http://oshi.at/abcd/origAABB.rar|My specified file.part1.rar" -./mad.sh:3865: echo -e " - direct=http://pomf2.lain.la/f/abcd00zz.7z" -./mad.sh:3867: echo -e ' - ie. direct=http://somehost.onion/abcD|filename.part1.rar' -./mad.sh:4086: if [[ ${remote_url} =~ ^http: ]] ; then -./mad.sh:4087: remote_url=${remote_url/http:/https:} -./mad.sh:4108: if [[ ${remote_url} =~ ^http: ]] ; then -./mad.sh:4109: remote_url=${remote_url/http:/https:} -./mad.sh:4475: if [[ ${remote_url} =~ ^http: ]] ; then -./mad.sh:4476: remote_url=${remote_url/http:/https:} -./mad.sh:4534: if [[ ${remote_url} =~ ^http: ]] ; then -./mad.sh:4535: remote_url=${remote_url/http:/https:} -./mad.sh:4561: if [[ ${remote_url} =~ ^http: ]] ; then -./mad.sh:4562: remote_url=${remote_url/http:/https:} +./mad.sh:711: sed -i -e "s>^${url/https:/http:}.*>#& #OK# ${filename}>g" "${InputFile}" #http (if changed) +./mad.sh:713: sed -i -e "s>^direct=${url/https:/http:}.*>#& #OK# ${filename}>g" "${InputFile}" #direct url https +./mad.sh:716: sed -i -e "s>^${url/https:/http:}.*>#&>g" "${InputFile}" #http (if changed) +./mad.sh:718: sed -i -e "s>^direct=${url/https:/http:}.*>#&>g" "${InputFile}" #direct url https +./mad.sh:739: sed -i -e "s>^${url/https:/http:}.*>#& #OK# ${filename} (File exists)>g" "${InputFile}" #http (if changed) +./mad.sh:741: sed -i -e "s>^direct=${url/https:/http:}.*>#& #OK# ${filename} (File exists)>g" "${InputFile}" #direct url https +./mad.sh:744: sed -i -e "s>^${url/https:/http:}.*>#&>g" "${InputFile}" #http (if changed) +./mad.sh:746: sed -i -e "s>^direct=${url/https:/http:}.*>#&>g" "${InputFile}" #direct url https +./mad.sh:767: sed -i -e "s>^${url/https:/http:}.*>#& #RETRY# ${message}>g" "${InputFile}" #http (if changed) +./mad.sh:769: sed -i -e "s>^direct=${url/https:/http:}.*>#& #RETRY# ${message}>g" "${InputFile}" #direct url https +./mad.sh:772: sed -i -e "s>^${url/https:/http:}.*>#&>g" "${InputFile}" #http (if changed) +./mad.sh:774: sed -i -e "s>^direct=${url/https:/http:}.*>#&>g" "${InputFile}" #direct url https +./mad.sh:796: sed -i -e "s>^${url/https:/http:}.*>#& #FAIL# $message>g" "${InputFile}" #http (if changed) +./mad.sh:798: sed -i -e "s>^direct=${url/https:/http:}.*>#& #FAIL# $message>g" "${InputFile}" #direct url https +./mad.sh:801: sed -i -e "s>^${url/https:/http:}.*>#&>g" "${InputFile}" #http (if changed) +./mad.sh:803: sed -i -e "s>^direct=${url/https:/http:}.*>#&>g" "${InputFile}" #direct url https +./mad.sh:827: sed -i -e "s>^${url/https:/http:}.*>#& #REMOVED#${message}>g" "${InputFile}" #http (if changed) +./mad.sh:829: sed -i -e "s>^direct=${url/https:/http:}.*>#& #REMOVED#${message}>g" "${InputFile}" #direct url https +./mad.sh:832: sed -i -e "s>^${url/https:/http:}.*>#&>g" "${InputFile}" #http (if changed) +./mad.sh:834: sed -i -e "s>^direct=${url/https:/http:}.*>#&>g" "${InputFile}" #direct url https +./mad.sh:860: sed -i -e "s>^${url/https:/http:}.*>${url}|${newfilename}>g" "${InputFile}" #http (if changed) +./mad.sh:862: sed -i -e "s>^direct=${url/https:/http:}.*>direct=${url}|${newfilename}>g" "${InputFile}" #direct url https +./mad.sh:882: sed -i -e "s%^${url/https:/http:}.*%${newurl//[[:space:]]/$'\\\n'}%g" "${InputFile}" #http (if changed) +./mad.sh:903: sed -i -e "s>^${url/https:/http:}.*>#& #RETRY# ${filename} (dropped or bad size)>g" "${InputFile}" #http (if changed) +./mad.sh:905: sed -i -e "s>^direct=${url/https:/http:}.*>#& #RETRY# ${filename} (dropped or bad size)>g" "${InputFile}" #direct url https +./mad.sh:908: sed -i -e "s>^${url/https:/http:}.*>#&>g" "${InputFile}" #http (if changed) +./mad.sh:910: sed -i -e "s>^direct=${url/https:/http:}.*>#&>g" "${InputFile}" #direct url https +./mad.sh:926: sed -i -e "s>^${url/https:/http:}.*>#& #RETRY# (No CDN found)>g" "${InputFile}" #http (if changed) +./mad.sh:928: sed -i -e "s>^direct=${url/https:/http:}.*>#& #RETRY# (No CDN found)>g" "${InputFile}" #direct url https +./mad.sh:931: sed -i -e "s>^${url/https:/http:}.*>#&>g" "${InputFile}" #http (if changed) +./mad.sh:933: sed -i -e "s>^direct=${url/https:/http:}.*>#&>g" "${InputFile}" #direct url https +./mad.sh:952: sed -i -e "s>^${url/https:/http:}.*>#& #PASSWORD#>g" "${InputFile}" #http (if changed) +./mad.sh:954: sed -i -e "s>^direct=${url/https:/http:}.*>#& #PASSWORD#>g" "${InputFile}" #direct url https +./mad.sh:957: sed -i -e "s>^${url/https:/http:}.*>#&>g" "${InputFile}" #http (if changed) +./mad.sh:959: sed -i -e "s>^direct=${url/https:/http:}.*>#&>g" "${InputFile}" #direct url https +./mad.sh:979: sed -i -e "s>^${url/https:/http:}.*>#& #BAD-URL#${message}>g" "${InputFile}" #http (if changed) +./mad.sh:981: sed -i -e "s>^direct=${url/https:/http:}.*>#& #BAD-URL#${message}>g" "${InputFile}" #direct url https +./mad.sh:984: sed -i -e "s>^${url/https:/http:}.*>#&>g" "${InputFile}" #http (if changed) +./mad.sh:986: sed -i -e "s>^direct=${url/https:/http:}.*>#&>g" "${InputFile}" #direct url https +./mad.sh:1004: sed -i -e "s>^${url/https:/http:}.*>#& #RETRY# ${filename} (No filename or size. Try again later)>g" "${InputFile}" #http (if changed) +./mad.sh:1006: sed -i -e "s>^direct=${url/https:/http:}.*>#& #RETRY# ${filename} (No filename or size. Try again later)>g" "${InputFile}" #direct url https +./mad.sh:1009: sed -i -e "s>^${url/https:/http:}.*>#&>g" "${InputFile}" #http (if changed) +./mad.sh:1011: sed -i -e "s>^direct=${url/https:/http:}.*>#&>g" "${InputFile}" #direct url https +./mad.sh:1030: sed -i -e "s>^${url/https:/http:}.*>#& #RETRY# ${filename} (File exists and is too large--retry or delete from downloads)>g" "${InputFile}" #http (if changed) +./mad.sh:1032: sed -i -e "s>^direct=${url/https:/http:}.*>#& #RETRY# ${filename} (File exists and is too large--retry or delete from downloads)>g" "${InputFile}" #direct url https +./mad.sh:1035: sed -i -e "s>^${url/https:/http:}.*>#&>g" "${InputFile}" #http (if changed) +./mad.sh:1037: sed -i -e "s>^direct=${url/https:/http:}.*>#&>g" "${InputFile}" #direct url https +./mad.sh:1460: response=$(tor_curl_request --insecure -L -s https://github.com/lwthiker/curl-impersonate/releases/latest) +./mad.sh:1477: download_url='https://github.com/lwthiker/curl-impersonate/releases/download/'"$latestTag"'/curl-impersonate-'"$latestTag"'.x86_64-linux-gnu.tar.gz' +./mad.sh:1583: response=$(tor_curl_request --insecure -L -s https://github.com/lexiforest/curl-impersonate/releases/latest) +./mad.sh:1600: download_url='https://github.com/lexiforest/curl-impersonate/releases/download/'"$latestTag"'/curl-impersonate-'"$latestTag"'.x86_64-linux-gnu.tar.gz' +./mad.sh:1863: echo -e "${PINK}MAD Audit of http lines:${NC} (${GREEN}grep \"http:\" or \"https:\"${NC})" +./mad.sh:1891: echo -e "${PINK}MAD Audit of http lines:${NC} (${GREEN}grep \"http:\" or \"https:\"${NC})" +./mad.sh:1913: echo -e "${PINK}MAD Audit of http lines:${NC} (${GREEN}grep \"http:\" or \"https:\"${NC})" +./mad.sh:3237: if grep -Eqi '.onion' <<< "$download_url" && grep -Eqi 'https://' <<< "$download_url" ; then +./mad.sh:3772:arg2="$2" # auto, filelist, +./mad.sh:3869: echo -e " - http://oshi.at/abcd/origAABB.rar|My specified file.part1.rar" +./mad.sh:3871: echo -e " - direct=http://pomf2.lain.la/f/abcd00zz.7z" +./mad.sh:3873: echo -e ' - ie. direct=http://somehost.onion/abcD|filename.part1.rar' +./mad.sh:4092: if [[ ${remote_url} =~ ^http: ]] ; then +./mad.sh:4093: remote_url=${remote_url/http:/https:} +./mad.sh:4114: if [[ ${remote_url} =~ ^http: ]] ; then +./mad.sh:4115: remote_url=${remote_url/http:/https:} +./mad.sh:4481: if [[ ${remote_url} =~ ^http: ]] ; then +./mad.sh:4482: remote_url=${remote_url/http:/https:} +./mad.sh:4540: if [[ ${remote_url} =~ ^http: ]] ; then +./mad.sh:4541: remote_url=${remote_url/http:/https:} +./mad.sh:4567: if [[ ${remote_url} =~ ^http: ]] ; then +./mad.sh:4568: remote_url=${remote_url/http:/https:} ./plugins/pjscloud.sh:51: "https://PhantomJScloud.com/api/browser/v2/$RandomPjsKey/" & sleep 8s; kill -HUP $! 2>/dev/null) ./plugins/pjscloud.sh:59: "https://PhantomJScloud.com/api/browser/v2/$RandomPjsKey/" & sleep 8s; kill -HUP $! 2>/dev/null) diff --git a/.audit/mad-audit-tor_curl-details.log b/.audit/mad-audit-tor_curl-details.log index 695327c..168a821 100755 --- a/.audit/mad-audit-tor_curl-details.log +++ b/.audit/mad-audit-tor_curl-details.log @@ -1,4 +1,4 @@ -DateTime: 25.02.20 +DateTime: 25.02.24 Files: ./hosts/1fichier.sh @@ -11,6 +11,7 @@ Files: ./hosts/ateasystems.sh ./hosts/bedrive.sh ./hosts/biteblob.sh +./hosts/blackcloud_onion.sh ./hosts/bowfile.sh ./hosts/click.sh ./hosts/cyssoux.sh @@ -63,6 +64,7 @@ Files: ./hosts/ramsgaard.sh ./hosts/ranoz.sh ./hosts/sendnow.sh +./hosts/sendspace.sh ./hosts/shareonline.sh ./hosts/skrepr.sh ./hosts/soyjak.sh @@ -340,54 +342,42 @@ _________________________________________________________________________ ./hosts/anonfile.sh:349: printf "\\n" ./hosts/anonfile.sh:350: echo -e "${RED}| Failed to extract download link [3].${NC}" -- -./hosts/anonfile.sh:453: file_header=$(tor_curl_request -i -s --head \ -./hosts/anonfile.sh:454: --referer "${fixed_url}" \ -./hosts/anonfile.sh:455: "$download_url") -./hosts/anonfile.sh:456: if [[ "${DebugAllEnabled}" == "true" ]] ; then -./hosts/anonfile.sh:457: debugHtml "${remote_url##*/}" "anon_head$j" "download_url: ${download_url}"$'\n'"${file_header}" -./hosts/anonfile.sh:458: fi -./hosts/anonfile.sh:459: if [[ -z $file_header ]] ; then -./hosts/anonfile.sh:460: if [[ $j == $maxfetchretries ]] ; then -./hosts/anonfile.sh:461: rm -f "${anon_cookie_jar}"; -./hosts/anonfile.sh:462: printf "\\n" -./hosts/anonfile.sh:463: echo -e "${RED}| Failed to extract file info${NC}" +./hosts/anonfile.sh:519: tor_curl_request --insecure \ +./hosts/anonfile.sh:520: --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval \ +./hosts/anonfile.sh:521: -b "${anon_cookie_jar}" -c "${anon_cookie_jar}" \ +./hosts/anonfile.sh:522: --referer "${fixed_url}" \ +./hosts/anonfile.sh:523: "$download_url" --output "$file_path" +./hosts/anonfile.sh:524: else +./hosts/anonfile.sh:525: tor_curl_request --insecure \ +./hosts/anonfile.sh:526: -b "${anon_cookie_jar}" -c "${anon_cookie_jar}" \ +./hosts/anonfile.sh:527: --referer "${fixed_url}" \ +./hosts/anonfile.sh:528: "$download_url" --output "$file_path" +./hosts/anonfile.sh:529: fi +./hosts/anonfile.sh:530: else +./hosts/anonfile.sh:531: if [[ "${RateMonitorEnabled}" == "true" ]]; then +./hosts/anonfile.sh:532: tor_curl_request --insecure \ +./hosts/anonfile.sh:533: --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval \ +./hosts/anonfile.sh:534: -H "User-Agent: $RandomUA" \ +./hosts/anonfile.sh:535: -H "Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8" \ +./hosts/anonfile.sh:536: -H "Accept-Language: en-US,en;q=0.5" \ +./hosts/anonfile.sh:537: -H "Accept-Encoding: gzip, deflate, br" \ +./hosts/anonfile.sh:538: -H "Connection: keep-alive" \ +./hosts/anonfile.sh:539: -H "Cookie: lng=eng" \ +./hosts/anonfile.sh:540: -H "Upgrade-Insecure-Requests: 1" \ +./hosts/anonfile.sh:541: -H "Sec-Fetch-Dest: document" \ +./hosts/anonfile.sh:542: -H "Sec-Fetch-Mode: navigate" \ -- -./hosts/anonfile.sh:559: tor_curl_request --insecure \ -./hosts/anonfile.sh:560: --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval \ -./hosts/anonfile.sh:561: -b "${anon_cookie_jar}" -c "${anon_cookie_jar}" \ -./hosts/anonfile.sh:562: --referer "${fixed_url}" \ -./hosts/anonfile.sh:563: "$download_url" --output "$file_path" -./hosts/anonfile.sh:564: else -./hosts/anonfile.sh:565: tor_curl_request --insecure \ -./hosts/anonfile.sh:566: -b "${anon_cookie_jar}" -c "${anon_cookie_jar}" \ -./hosts/anonfile.sh:567: --referer "${fixed_url}" \ -./hosts/anonfile.sh:568: "$download_url" --output "$file_path" -./hosts/anonfile.sh:569: fi -./hosts/anonfile.sh:570: else -./hosts/anonfile.sh:571: if [[ "${RateMonitorEnabled}" == "true" ]]; then -./hosts/anonfile.sh:572: tor_curl_request --insecure \ -./hosts/anonfile.sh:573: --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval \ -./hosts/anonfile.sh:574: -H "User-Agent: $RandomUA" \ -./hosts/anonfile.sh:575: -H "Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8" \ -./hosts/anonfile.sh:576: -H "Accept-Language: en-US,en;q=0.5" \ -./hosts/anonfile.sh:577: -H "Accept-Encoding: gzip, deflate, br" \ -./hosts/anonfile.sh:578: -H "Connection: keep-alive" \ -./hosts/anonfile.sh:579: -H "Cookie: lng=eng" \ -./hosts/anonfile.sh:580: -H "Upgrade-Insecure-Requests: 1" \ -./hosts/anonfile.sh:581: -H "Sec-Fetch-Dest: document" \ -./hosts/anonfile.sh:582: -H "Sec-Fetch-Mode: navigate" \ --- -./hosts/anonfile.sh:589: tor_curl_request --insecure \ -./hosts/anonfile.sh:590: -H "User-Agent: $RandomUA" \ -./hosts/anonfile.sh:591: -H "Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8" \ -./hosts/anonfile.sh:592: -H "Accept-Language: en-US,en;q=0.5" \ -./hosts/anonfile.sh:593: -H "Accept-Encoding: gzip, deflate, br" \ -./hosts/anonfile.sh:594: -H "Connection: keep-alive" \ -./hosts/anonfile.sh:595: -H "Cookie: lng=eng" \ -./hosts/anonfile.sh:596: -H "Upgrade-Insecure-Requests: 1" \ -./hosts/anonfile.sh:597: -H "Sec-Fetch-Dest: document" \ -./hosts/anonfile.sh:598: -H "Sec-Fetch-Mode: navigate" \ -./hosts/anonfile.sh:599: -H "Sec-Fetch-Site: same-origin" \ +./hosts/anonfile.sh:549: tor_curl_request --insecure \ +./hosts/anonfile.sh:550: -H "User-Agent: $RandomUA" \ +./hosts/anonfile.sh:551: -H "Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8" \ +./hosts/anonfile.sh:552: -H "Accept-Language: en-US,en;q=0.5" \ +./hosts/anonfile.sh:553: -H "Accept-Encoding: gzip, deflate, br" \ +./hosts/anonfile.sh:554: -H "Connection: keep-alive" \ +./hosts/anonfile.sh:555: -H "Cookie: lng=eng" \ +./hosts/anonfile.sh:556: -H "Upgrade-Insecure-Requests: 1" \ +./hosts/anonfile.sh:557: -H "Sec-Fetch-Dest: document" \ +./hosts/anonfile.sh:558: -H "Sec-Fetch-Mode: navigate" \ +./hosts/anonfile.sh:559: -H "Sec-Fetch-Site: same-origin" \ -- ./hosts/anonsharing.sh:91: response=$(tor_curl_request --insecure -i -s \ ./hosts/anonsharing.sh:92: -b "${ansh_cookie_jar}" -c "${ansh_cookie_jar}" \ @@ -429,7 +419,7 @@ _________________________________________________________________________ ./hosts/anonsharing.sh:278: if [[ -f "$file_path" ]] ; then ./hosts/anonsharing.sh:279: received_file_size=$(stat --format="%s" "$file_path" | tr -d '[:space:]') ./hosts/anonsharing.sh:280: fi -./hosts/anonsharing.sh:281: if CheckNoHtml "$remote_url" "$filename" "$file_path" ; then +./hosts/anonsharing.sh:281: if CheckNoHtml "$remote_url" "$filename" "$file_path" "$((received_file_size - pd_presize))" ; then ./hosts/anonsharing.sh:282: containsHtml=false ./hosts/anonsharing.sh:283: else ./hosts/anonsharing.sh:284: containsHtml=true @@ -470,7 +460,7 @@ _________________________________________________________________________ ./hosts/ateasystems.sh:240: --output "$file_path" --output "$file_path" ./hosts/ateasystems.sh:241: fi ./hosts/ateasystems.sh:242: fi -./hosts/ateasystems.sh:243: if CheckNoHtml "$remote_url" "$filename" "$file_path" ; then +./hosts/ateasystems.sh:243: if CheckNoHtml "$remote_url" "$filename" "$file_path" "$((received_file_size - 0))" ; then ./hosts/ateasystems.sh:244: containsHtml=false ./hosts/ateasystems.sh:245: else ./hosts/ateasystems.sh:246: containsHtml=true @@ -572,19 +562,19 @@ _________________________________________________________________________ ./hosts/biteblob.sh:236: printf "\n\n" ./hosts/biteblob.sh:237: fi -- -./hosts/biteblob.sh:271: tor_curl_request --insecure --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval "$download_url" --continue-at - --output "$file_path" -./hosts/biteblob.sh:272: else -./hosts/biteblob.sh:273: tor_curl_request --insecure --referer "$file_url" "$download_url" --continue-at - --output "$file_path" -./hosts/biteblob.sh:274: fi -./hosts/biteblob.sh:275: received_file_size=0 -./hosts/biteblob.sh:276: if [[ -f "$file_path" ]] ; then -./hosts/biteblob.sh:277: received_file_size=$(stat --format="%s" "$file_path" | tr -d '[:space:]') -./hosts/biteblob.sh:278: fi -./hosts/biteblob.sh:279: if CheckNoHtml "$remote_url" "$filename" "$file_path" ; then -./hosts/biteblob.sh:280: containsHtml=false -./hosts/biteblob.sh:281: else -./hosts/biteblob.sh:282: containsHtml=true -./hosts/biteblob.sh:283: fi +./hosts/biteblob.sh:284: tor_curl_request --insecure --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval "$download_url" --continue-at - --output "$file_path" +./hosts/biteblob.sh:285: else +./hosts/biteblob.sh:286: tor_curl_request --insecure --referer "$file_url" "$download_url" --continue-at - --output "$file_path" +./hosts/biteblob.sh:287: fi +./hosts/biteblob.sh:288: received_file_size=0 +./hosts/biteblob.sh:289: if [[ -f "$file_path" ]] ; then +./hosts/biteblob.sh:290: received_file_size=$(stat --format="%s" "$file_path" | tr -d '[:space:]') +./hosts/biteblob.sh:291: fi +./hosts/biteblob.sh:292: if CheckNoHtml "$remote_url" "$filename" "$file_path" "$((received_file_size - pd_presize))" ; then +./hosts/biteblob.sh:293: containsHtml=false +./hosts/biteblob.sh:294: else +./hosts/biteblob.sh:295: containsHtml=true +./hosts/biteblob.sh:296: fi -- ./hosts/bowfile.sh:91: response=$(tor_curl_request --insecure -L -s -b "${bow_cookie_jar}" -c "${bow_cookie_jar}" \ ./hosts/bowfile.sh:92: -w "\n\nHTTP_CODE: %{http_code}\nEFFECTIVE_URL: %{url_effective}\n" \ @@ -1202,7 +1192,7 @@ _________________________________________________________________________ ./hosts/examples/ExampleNewHost.sh:204: if [[ -f "$file_path" ]] ; then ./hosts/examples/ExampleNewHost.sh:205: received_file_size=$(stat --format="%s" "$file_path" | tr -d '[:space:]') ./hosts/examples/ExampleNewHost.sh:206: fi -./hosts/examples/ExampleNewHost.sh:207: if CheckNoHtml "$remote_url" "$filename" "$file_path" ; then +./hosts/examples/ExampleNewHost.sh:207: if CheckNoHtml "$remote_url" "$filename" "$file_path" "$((received_file_size - pd_presize))" ; then ./hosts/examples/ExampleNewHost.sh:208: containsHtml=false ./hosts/examples/ExampleNewHost.sh:209: else ./hosts/examples/ExampleNewHost.sh:210: containsHtml=true @@ -1321,7 +1311,7 @@ _________________________________________________________________________ ./hosts/fileditch.sh:192: if [[ -f "$file_path" ]] ; then ./hosts/fileditch.sh:193: received_file_size=$(stat --format="%s" "$file_path" | tr -d '[:space:]') ./hosts/fileditch.sh:194: fi -./hosts/fileditch.sh:195: if CheckNoHtml "$remote_url" "$filename" "$file_path" ; then +./hosts/fileditch.sh:195: if CheckNoHtml "$remote_url" "$filename" "$file_path" "$((received_file_size - pd_presize))" ; then ./hosts/fileditch.sh:196: containsHtml=false ./hosts/fileditch.sh:197: else ./hosts/fileditch.sh:198: containsHtml=true @@ -1395,7 +1385,7 @@ _________________________________________________________________________ ./hosts/filedot.sh:504: if [[ -f "$file_path" ]] ; then ./hosts/filedot.sh:505: received_file_size=$(stat --format="%s" "$file_path" | tr -d '[:space:]') ./hosts/filedot.sh:506: fi -./hosts/filedot.sh:507: if CheckNoHtml "$remote_url" "$filename" "$file_path" ; then +./hosts/filedot.sh:507: if CheckNoHtml "$remote_url" "$filename" "$file_path" "$((received_file_size - pd_presize))" ; then ./hosts/filedot.sh:508: containsHtml=false ./hosts/filedot.sh:509: else ./hosts/filedot.sh:510: containsHtml=true @@ -1421,7 +1411,7 @@ _________________________________________________________________________ ./hosts/filehaus.sh:196: if [[ -f "$file_path" ]] ; then ./hosts/filehaus.sh:197: received_file_size=$(stat --format="%s" "$file_path" | tr -d '[:space:]') ./hosts/filehaus.sh:198: fi -./hosts/filehaus.sh:199: if CheckNoHtml "$remote_url" "$filename" "$file_path" ; then +./hosts/filehaus.sh:199: if CheckNoHtml "$remote_url" "$filename" "$file_path" "$((received_file_size - pd_presize))" ; then ./hosts/filehaus.sh:200: containsHtml=false ./hosts/filehaus.sh:201: else ./hosts/filehaus.sh:202: containsHtml=true @@ -1459,7 +1449,7 @@ _________________________________________________________________________ ./hosts/firestorage.sh:340: if [[ -f "$file_path" ]] ; then ./hosts/firestorage.sh:341: received_file_size=$(stat --format="%s" "$file_path" | tr -d '[:space:]') ./hosts/firestorage.sh:342: fi -./hosts/firestorage.sh:343: if CheckNoHtml "$remote_url" "$filename" "$file_path" ; then +./hosts/firestorage.sh:343: if CheckNoHtml "$remote_url" "$filename" "$file_path" "$((received_file_size - pd_presize))" ; then ./hosts/firestorage.sh:344: containsHtml=false ./hosts/firestorage.sh:345: else ./hosts/firestorage.sh:346: containsHtml=true @@ -1571,7 +1561,7 @@ _________________________________________________________________________ ./hosts/hexload.sh:326: if [[ -f "$file_path" ]] ; then ./hosts/hexload.sh:327: received_file_size=$(stat --format="%s" "$file_path" | tr -d '[:space:]') ./hosts/hexload.sh:328: fi -./hosts/hexload.sh:329: if CheckNoHtml "$remote_url" "$filename" "$file_path" ; then +./hosts/hexload.sh:329: if CheckNoHtml "$remote_url" "$filename" "$file_path" "$((received_file_size - pd_presize))" ; then ./hosts/hexload.sh:330: containsHtml=false ./hosts/hexload.sh:331: else ./hosts/hexload.sh:332: containsHtml=true @@ -1620,7 +1610,7 @@ _________________________________________________________________________ ./hosts/innocent.sh:217: if [[ -f "$file_path" ]] ; then ./hosts/innocent.sh:218: received_file_size=$(stat --format="%s" "$file_path" | tr -d '[:space:]') ./hosts/innocent.sh:219: fi -./hosts/innocent.sh:220: if CheckNoHtml "$remote_url" "$filename" "$file_path" ; then +./hosts/innocent.sh:220: if CheckNoHtml "$remote_url" "$filename" "$file_path" "$((received_file_size - pd_presize))" ; then ./hosts/innocent.sh:221: containsHtml=false ./hosts/innocent.sh:222: else ./hosts/innocent.sh:223: containsHtml=true @@ -1696,19 +1686,19 @@ _________________________________________________________________________ ./hosts/isupload.sh:364: printf "\n\n" ./hosts/isupload.sh:365: fi -- -./hosts/isupload.sh:399: tor_curl_request_extended --insecure -L --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval "$download_url" --continue-at - --output "$file_path" -./hosts/isupload.sh:400: else -./hosts/isupload.sh:401: tor_curl_request_extended --insecure -L "$download_url" --continue-at - --output "$file_path" -./hosts/isupload.sh:402: fi -./hosts/isupload.sh:403: received_file_size=0 -./hosts/isupload.sh:404: if [[ -f "$file_path" ]] ; then -./hosts/isupload.sh:405: received_file_size=$(stat --format="%s" "$file_path" | tr -d '[:space:]') +./hosts/isupload.sh:403: tor_curl_request_extended --insecure -L --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval "$download_url" --continue-at - --output "$file_path" +./hosts/isupload.sh:404: else +./hosts/isupload.sh:405: tor_curl_request_extended --insecure -L "$download_url" --continue-at - --output "$file_path" ./hosts/isupload.sh:406: fi -./hosts/isupload.sh:407: if CheckNoHtml "$remote_url" "$filename" "$file_path" ; then -./hosts/isupload.sh:408: containsHtml=false -./hosts/isupload.sh:409: else -./hosts/isupload.sh:410: containsHtml=true -./hosts/isupload.sh:411: fi +./hosts/isupload.sh:407: received_file_size=0 +./hosts/isupload.sh:408: if [[ -f "$file_path" ]] ; then +./hosts/isupload.sh:409: received_file_size=$(stat --format="%s" "$file_path" | tr -d '[:space:]') +./hosts/isupload.sh:410: fi +./hosts/isupload.sh:411: if CheckNoHtml "$remote_url" "$filename" "$file_path" "$((received_file_size - pd_presize))" ; then +./hosts/isupload.sh:412: containsHtml=false +./hosts/isupload.sh:413: else +./hosts/isupload.sh:414: containsHtml=true +./hosts/isupload.sh:415: fi -- ./hosts/kraken.sh:104: PAGE=$(tor_curl_request --insecure --max-time "$ConnectTimeout" -s -L -c "${kraken_cookie_jar}" "${fixed_url}") ./hosts/kraken.sh:105: if [[ "${DebugAllEnabled}" == "true" ]] ; then @@ -1754,7 +1744,7 @@ _________________________________________________________________________ ./hosts/kraken.sh:291: if [[ -f "$file_path" ]] ; then ./hosts/kraken.sh:292: received_file_size=$(stat --format="%s" "$file_path" | tr -d '[:space:]') ./hosts/kraken.sh:293: fi -./hosts/kraken.sh:294: if CheckNoHtml "$remote_url" "$filename" "$file_path" ; then +./hosts/kraken.sh:294: if CheckNoHtml "$remote_url" "$filename" "$file_path" "$((received_file_size - pd_presize))" ; then ./hosts/kraken.sh:295: containsHtml=false ./hosts/kraken.sh:296: else ./hosts/kraken.sh:297: containsHtml=true @@ -1793,7 +1783,7 @@ _________________________________________________________________________ ./hosts/mediafire.sh:282: if [[ -f "$file_path" ]] ; then ./hosts/mediafire.sh:283: received_file_size=$(stat --format="%s" "$file_path" | tr -d '[:space:]') ./hosts/mediafire.sh:284: fi -./hosts/mediafire.sh:285: if CheckNoHtml "$remote_url" "$filename" "$file_path" ; then +./hosts/mediafire.sh:285: if CheckNoHtml "$remote_url" "$filename" "$file_path" "$((received_file_size - pd_presize))" ; then ./hosts/mediafire.sh:286: containsHtml=false ./hosts/mediafire.sh:287: else ./hosts/mediafire.sh:288: containsHtml=true @@ -1832,37 +1822,37 @@ _________________________________________________________________________ ./hosts/nippy.sh:305: if [[ -f "$file_path" ]] ; then ./hosts/nippy.sh:306: received_file_size=$(stat --format="%s" "$file_path" | tr -d '[:space:]') ./hosts/nippy.sh:307: fi -./hosts/nippy.sh:308: if CheckNoHtml "$remote_url" "$filename" "$file_path" ; then +./hosts/nippy.sh:308: if CheckNoHtml "$remote_url" "$filename" "$file_path" "$((received_file_size - pd_presize))" ; then ./hosts/nippy.sh:309: containsHtml=false ./hosts/nippy.sh:310: else ./hosts/nippy.sh:311: containsHtml=true ./hosts/nippy.sh:312: fi -- -./hosts/oshi.sh:108: file_header=$(tor_curl_request --insecure --head -L -s --referer "$remote_url" "$download_url") -./hosts/oshi.sh:109: if [[ "${DebugAllEnabled}" == "true" ]] ; then -./hosts/oshi.sh:110: debugHtml "${remote_url##*/}" "oshi_head$j" "download_url: ${download_url}"$'\n'"${file_header}" -./hosts/oshi.sh:111: fi -./hosts/oshi.sh:112: if [[ ! -z "$file_header" ]] ; then -./hosts/oshi.sh:113: if grep -Eqi '404 Not Found' <<< "${file_header}" ; then -./hosts/oshi.sh:114: echo -e "${RED}| O shi-, (404). The file has been removed.${NC}" -./hosts/oshi.sh:115: removedDownload "${remote_url}" -./hosts/oshi.sh:116: exitDownloadNotAvailable=true -./hosts/oshi.sh:117: return 1 -./hosts/oshi.sh:118: fi +./hosts/oshi.sh:107: file_header=$(tor_curl_request --insecure --head -L -s --referer "$remote_url" "$download_url") +./hosts/oshi.sh:108: if [[ "${DebugAllEnabled}" == "true" ]] ; then +./hosts/oshi.sh:109: debugHtml "${remote_url##*/}" "oshi_head$j" "download_url: ${download_url}"$'\n'"${file_header}" +./hosts/oshi.sh:110: fi +./hosts/oshi.sh:111: if [[ ! -z "$file_header" ]] ; then +./hosts/oshi.sh:112: if grep -Eqi '404 Not Found' <<< "${file_header}" ; then +./hosts/oshi.sh:113: echo -e "${RED}| O shi-, (404). The file has been removed.${NC}" +./hosts/oshi.sh:114: removedDownload "${remote_url}" +./hosts/oshi.sh:115: exitDownloadNotAvailable=true +./hosts/oshi.sh:116: return 1 +./hosts/oshi.sh:117: fi -- -./hosts/oshi.sh:202: tor_curl_request --insecure --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval --referer "$file_url" "$download_url" --continue-at - --output "$file_path" -./hosts/oshi.sh:203: else -./hosts/oshi.sh:204: tor_curl_request --insecure --referer "$file_url" "$download_url" --continue-at - --output "$file_path" -./hosts/oshi.sh:205: fi -./hosts/oshi.sh:206: received_file_size=0 -./hosts/oshi.sh:207: if [[ -f "$file_path" ]] ; then -./hosts/oshi.sh:208: received_file_size=$(stat --format="%s" "$file_path" | tr -d '[:space:]') -./hosts/oshi.sh:209: fi -./hosts/oshi.sh:210: if CheckNoHtml "$remote_url" "$filename" "$file_path" ; then -./hosts/oshi.sh:211: containsHtml=false -./hosts/oshi.sh:212: else -./hosts/oshi.sh:213: containsHtml=true -./hosts/oshi.sh:214: fi +./hosts/oshi.sh:201: tor_curl_request --insecure -L --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval --referer "$file_url" "$download_url" --continue-at - --output "$file_path" +./hosts/oshi.sh:202: else +./hosts/oshi.sh:203: tor_curl_request --insecure -L --referer "$file_url" "$download_url" --continue-at - --output "$file_path" +./hosts/oshi.sh:204: fi +./hosts/oshi.sh:205: received_file_size=0 +./hosts/oshi.sh:206: if [[ -f "$file_path" ]] ; then +./hosts/oshi.sh:207: received_file_size=$(stat --format="%s" "$file_path" | tr -d '[:space:]') +./hosts/oshi.sh:208: fi +./hosts/oshi.sh:209: if CheckNoHtml "$remote_url" "$filename" "$file_path" "$((received_file_size - pd_presize))" ; then +./hosts/oshi.sh:210: containsHtml=false +./hosts/oshi.sh:211: else +./hosts/oshi.sh:212: containsHtml=true +./hosts/oshi.sh:213: fi -- ./hosts/pixeldrain.sh:94: response=$(tor_curl_request --insecure -L -s "https://pixeldrain.com/u/$fileid") ./hosts/pixeldrain.sh:95: if [[ "${DebugAllEnabled}" == "true" ]] ; then @@ -1876,47 +1866,47 @@ _________________________________________________________________________ ./hosts/pixeldrain.sh:103: if ((i >= 5)) ; then ./hosts/pixeldrain.sh:104: printf "\\n" -- -./hosts/pixeldrain.sh:256: file_header=$(tor_curl_request --insecure --head -L -s --referer "$file_url" "$pdheadurl") -./hosts/pixeldrain.sh:257: if [[ "${DebugAllEnabled}" == "true" ]] ; then -./hosts/pixeldrain.sh:258: debugHtml "${remote_url##*/}" "pd_head$i" "url: ${pdheadurl}"$'\n'"${file_header}" -./hosts/pixeldrain.sh:259: fi -./hosts/pixeldrain.sh:260: if ! grep -Eqi 'HTTP/2 200|HTTP/1.1 200|200 OK' <<< $file_header ; then -./hosts/pixeldrain.sh:261: if ((i < 6)); then -./hosts/pixeldrain.sh:262: continue -./hosts/pixeldrain.sh:263: else -./hosts/pixeldrain.sh:264: echo -e "${YELLOW}| Filesize not found.${NC}" -./hosts/pixeldrain.sh:265: return 1 -./hosts/pixeldrain.sh:266: fi +./hosts/pixeldrain.sh:259: file_header=$(tor_curl_request --insecure --head -L -s --referer "$file_url" "$pdheadurl") +./hosts/pixeldrain.sh:260: if [[ "${DebugAllEnabled}" == "true" ]] ; then +./hosts/pixeldrain.sh:261: debugHtml "${remote_url##*/}" "pd_head$i" "url: ${pdheadurl}"$'\n'"${file_header}" +./hosts/pixeldrain.sh:262: fi +./hosts/pixeldrain.sh:263: if ! grep -Eqi 'HTTP/2 200|HTTP/1.1 200|200 OK' <<< $file_header ; then +./hosts/pixeldrain.sh:264: if ((i < 6)); then +./hosts/pixeldrain.sh:265: continue +./hosts/pixeldrain.sh:266: else +./hosts/pixeldrain.sh:267: echo -e "${YELLOW}| Filesize not found.${NC}" +./hosts/pixeldrain.sh:268: return 1 +./hosts/pixeldrain.sh:269: fi -- -./hosts/pixeldrain.sh:324: tor_curl_request --insecure \ -./hosts/pixeldrain.sh:325: --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval \ -./hosts/pixeldrain.sh:326: --referer "$file_url" "$download_url" --continue-at - --output "$file_path" -./hosts/pixeldrain.sh:327: else -./hosts/pixeldrain.sh:328: tor_curl_request --insecure \ +./hosts/pixeldrain.sh:327: tor_curl_request --insecure \ +./hosts/pixeldrain.sh:328: --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval \ ./hosts/pixeldrain.sh:329: --referer "$file_url" "$download_url" --continue-at - --output "$file_path" -./hosts/pixeldrain.sh:330: fi -./hosts/pixeldrain.sh:331: else -./hosts/pixeldrain.sh:332: if [[ "${RateMonitorEnabled}" == "true" ]]; then -./hosts/pixeldrain.sh:333: tor_curl_request --insecure \ -./hosts/pixeldrain.sh:334: -H "User-Agent: $RandomUA" \ -./hosts/pixeldrain.sh:335: -H 'Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8' \ -./hosts/pixeldrain.sh:336: -H 'Accept-Language: en-US,en;q=0.5' -H 'Accept-Encoding: gzip, deflate, br' \ -./hosts/pixeldrain.sh:337: -H 'Connection: keep-alive' -H 'Upgrade-Insecure-Requests: 1' -H 'Sec-Fetch-Dest: document' \ -./hosts/pixeldrain.sh:338: -H 'Sec-Fetch-Mode: navigate' -H 'Sec-Fetch-Site: none' -H 'Sec-Fetch-User: ?1' \ -./hosts/pixeldrain.sh:339: --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval \ -./hosts/pixeldrain.sh:340: --referer "$file_url" "$download_url" --continue-at - --output "$file_path" -./hosts/pixeldrain.sh:341: else -./hosts/pixeldrain.sh:342: tor_curl_request --insecure \ -./hosts/pixeldrain.sh:343: -H "User-Agent: $RandomUA" \ -./hosts/pixeldrain.sh:344: -H 'Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8' \ -./hosts/pixeldrain.sh:345: -H 'Accept-Language: en-US,en;q=0.5' -H 'Accept-Encoding: gzip, deflate, br' \ -./hosts/pixeldrain.sh:346: -H 'Connection: keep-alive' -H 'Upgrade-Insecure-Requests: 1' -H 'Sec-Fetch-Dest: document' \ -./hosts/pixeldrain.sh:347: -H 'Sec-Fetch-Mode: navigate' -H 'Sec-Fetch-Site: none' -H 'Sec-Fetch-User: ?1' \ -./hosts/pixeldrain.sh:348: --referer "$file_url" "$download_url" --continue-at - --output "$file_path" -./hosts/pixeldrain.sh:349: fi -./hosts/pixeldrain.sh:350: fi -./hosts/pixeldrain.sh:351: received_file_size=0 -./hosts/pixeldrain.sh:352: if [[ -f "$file_path" ]] ; then +./hosts/pixeldrain.sh:330: else +./hosts/pixeldrain.sh:331: tor_curl_request --insecure \ +./hosts/pixeldrain.sh:332: --referer "$file_url" "$download_url" --continue-at - --output "$file_path" +./hosts/pixeldrain.sh:333: fi +./hosts/pixeldrain.sh:334: else +./hosts/pixeldrain.sh:335: if [[ "${RateMonitorEnabled}" == "true" ]]; then +./hosts/pixeldrain.sh:336: tor_curl_request --insecure \ +./hosts/pixeldrain.sh:337: -H "User-Agent: $RandomUA" \ +./hosts/pixeldrain.sh:338: -H 'Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8' \ +./hosts/pixeldrain.sh:339: -H 'Accept-Language: en-US,en;q=0.5' -H 'Accept-Encoding: gzip, deflate, br' \ +./hosts/pixeldrain.sh:340: -H 'Connection: keep-alive' -H 'Upgrade-Insecure-Requests: 1' -H 'Sec-Fetch-Dest: document' \ +./hosts/pixeldrain.sh:341: -H 'Sec-Fetch-Mode: navigate' -H 'Sec-Fetch-Site: none' -H 'Sec-Fetch-User: ?1' \ +./hosts/pixeldrain.sh:342: --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval \ +./hosts/pixeldrain.sh:343: --referer "$file_url" "$download_url" --continue-at - --output "$file_path" +./hosts/pixeldrain.sh:344: else +./hosts/pixeldrain.sh:345: tor_curl_request --insecure \ +./hosts/pixeldrain.sh:346: -H "User-Agent: $RandomUA" \ +./hosts/pixeldrain.sh:347: -H 'Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8' \ +./hosts/pixeldrain.sh:348: -H 'Accept-Language: en-US,en;q=0.5' -H 'Accept-Encoding: gzip, deflate, br' \ +./hosts/pixeldrain.sh:349: -H 'Connection: keep-alive' -H 'Upgrade-Insecure-Requests: 1' -H 'Sec-Fetch-Dest: document' \ +./hosts/pixeldrain.sh:350: -H 'Sec-Fetch-Mode: navigate' -H 'Sec-Fetch-Site: none' -H 'Sec-Fetch-User: ?1' \ +./hosts/pixeldrain.sh:351: --referer "$file_url" "$download_url" --continue-at - --output "$file_path" +./hosts/pixeldrain.sh:352: fi +./hosts/pixeldrain.sh:353: fi +./hosts/pixeldrain.sh:354: received_file_size=0 +./hosts/pixeldrain.sh:355: if [[ -f "$file_path" ]] ; then -- ./hosts/quax.sh:85: file_header=$(tor_curl_request --insecure --head -L -s "$download_url") ./hosts/quax.sh:86: if [[ "${DebugAllEnabled}" == "true" ]] ; then @@ -1938,7 +1928,7 @@ _________________________________________________________________________ ./hosts/quax.sh:181: if [[ -f "$file_path" ]] ; then ./hosts/quax.sh:182: received_file_size=$(stat --format="%s" "$file_path" | tr -d '[:space:]') ./hosts/quax.sh:183: fi -./hosts/quax.sh:184: if CheckNoHtml "$remote_url" "$filename" "$file_path" ; then +./hosts/quax.sh:184: if CheckNoHtml "$remote_url" "$filename" "$file_path" "$((received_file_size - pd_presize))" ; then ./hosts/quax.sh:185: containsHtml=false ./hosts/quax.sh:186: else ./hosts/quax.sh:187: containsHtml=true @@ -2001,89 +1991,143 @@ _________________________________________________________________________ ./hosts/ranoz.sh:305: -H "Sec-Fetch-Mode: navigate" \ ./hosts/ranoz.sh:306: -H "Sec-Fetch-Site: same-origin" \ -- -./hosts/sendnow.sh:90: response=$(tor_curl_request --insecure -L -s -b "${snow_cookie_jar}" -c "${snow_cookie_jar}" "$remote_url") -./hosts/sendnow.sh:91: if [[ "${DebugAllEnabled}" == "true" ]] ; then -./hosts/sendnow.sh:92: debugHtml "${remote_url##*/}" "snow_dwnpage$i" "${response}" -./hosts/sendnow.sh:93: fi -./hosts/sendnow.sh:94: if [[ -z $response ]] ; then +./hosts/sendnow.sh:89: response=$(tor_curl_request --insecure -L -s -b "${snow_cookie_jar}" -c "${snow_cookie_jar}" "$remote_url") +./hosts/sendnow.sh:90: if [[ "${DebugAllEnabled}" == "true" ]] ; then +./hosts/sendnow.sh:91: debugHtml "${remote_url##*/}" "snow_dwnpage$i" "${response}" +./hosts/sendnow.sh:92: fi +./hosts/sendnow.sh:93: if [[ -z $response ]] ; then +./hosts/sendnow.sh:94: printf " ." ./hosts/sendnow.sh:95: rm -f "${snow_cookie_jar}"; ./hosts/sendnow.sh:96: if [[ $i == $maxfetchretries ]] ; then ./hosts/sendnow.sh:97: printf "\\n" ./hosts/sendnow.sh:98: echo -e "${RED}| Failed to extract download link.${NC}" ./hosts/sendnow.sh:99: warnAndRetryUnknownError=true -./hosts/sendnow.sh:100: if [[ "${finalAttempt}" == "true" ]] ; then -- -./hosts/sendnow.sh:160: response=$(tor_curl_request --insecure -L -svo. -X POST \ -./hosts/sendnow.sh:161: -b "${snow_cookie_jar}" -c "${snow_cookie_jar}" \ -./hosts/sendnow.sh:162: --data-raw "$form_data" "$remote_url" 2>&1) -./hosts/sendnow.sh:163: if [[ "${DebugAllEnabled}" == "true" ]] ; then -./hosts/sendnow.sh:164: debugHtml "${remote_url##*/}" "snow_post" "post_action: ${post_action}"$'\n'"form_data: ${form_data}"$'\n'"${response}" -./hosts/sendnow.sh:165: fi -./hosts/sendnow.sh:166: if [[ -z $response ]] ; then -./hosts/sendnow.sh:167: echo -e "${RED}| Failed to extract download link [2]${NC}" -./hosts/sendnow.sh:168: warnAndRetryUnknownError=true -./hosts/sendnow.sh:169: if [[ "${finalAttempt}" == "true" ]] ; then -./hosts/sendnow.sh:170: rm -f "${snow_cookie_jar}"; +./hosts/sendnow.sh:162: response=$(tor_curl_request --insecure -L -svo. -X POST \ +./hosts/sendnow.sh:163: -b "${snow_cookie_jar}" -c "${snow_cookie_jar}" \ +./hosts/sendnow.sh:164: --data-raw "$form_data" "$remote_url" 2>&1) +./hosts/sendnow.sh:165: if [[ "${DebugAllEnabled}" == "true" ]] ; then +./hosts/sendnow.sh:166: debugHtml "${remote_url##*/}" "snow_post" "post_action: ${post_action}"$'\n'"form_data: ${form_data}"$'\n'"${response}" +./hosts/sendnow.sh:167: fi +./hosts/sendnow.sh:168: if [[ -z $response ]] ; then +./hosts/sendnow.sh:169: echo -e "${RED}| Failed to extract download link [2]${NC}" +./hosts/sendnow.sh:170: warnAndRetryUnknownError=true +./hosts/sendnow.sh:171: if [[ "${finalAttempt}" == "true" ]] ; then +./hosts/sendnow.sh:172: rm -f "${snow_cookie_jar}"; -- -./hosts/sendnow.sh:204: file_header=$(tor_curl_request_extended --insecure --head -Lis \ -./hosts/sendnow.sh:205: -H "Host: $fshost" \ -./hosts/sendnow.sh:206: -H "Accept: text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8" \ -./hosts/sendnow.sh:207: -H "Accept-Language: en-US,en;q=0.5" \ -./hosts/sendnow.sh:208: -H "Accept-Encoding: gzip, deflate, br, zstd" \ -./hosts/sendnow.sh:209: -H "Referer: https://send.now/" \ -./hosts/sendnow.sh:210: -H "Sec-GPC: 1" \ -./hosts/sendnow.sh:211: -H "Connection: keep-alive" \ -./hosts/sendnow.sh:212: -H "Upgrade-Insecure-Requests: 1" \ -./hosts/sendnow.sh:213: -H "Sec-Fetch-Dest: document" \ -./hosts/sendnow.sh:214: -H "Sec-Fetch-Mode: navigate" \ +./hosts/sendnow.sh:206: file_header=$(tor_curl_request_extended --insecure --head -Lis \ +./hosts/sendnow.sh:207: -H "Host: $fshost" \ +./hosts/sendnow.sh:208: -H "Accept: text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8" \ +./hosts/sendnow.sh:209: -H "Accept-Language: en-US,en;q=0.5" \ +./hosts/sendnow.sh:210: -H "Accept-Encoding: gzip, deflate, br, zstd" \ +./hosts/sendnow.sh:211: -H "Referer: https://send.now/" \ +./hosts/sendnow.sh:212: -H "Sec-GPC: 1" \ +./hosts/sendnow.sh:213: -H "Connection: keep-alive" \ +./hosts/sendnow.sh:214: -H "Upgrade-Insecure-Requests: 1" \ +./hosts/sendnow.sh:215: -H "Sec-Fetch-Dest: document" \ +./hosts/sendnow.sh:216: -H "Sec-Fetch-Mode: navigate" \ -- -./hosts/sendnow.sh:327: tor_curl_request_extended --insecure -L --no-alpn \ -./hosts/sendnow.sh:328: --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval \ -./hosts/sendnow.sh:329: -H "Host: $fshost" \ -./hosts/sendnow.sh:330: -H "Accept: text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8" \ -./hosts/sendnow.sh:331: -H "Accept-Language: en-US,en;q=0.5" \ -./hosts/sendnow.sh:332: -H "Accept-Encoding: gzip, deflate, br, zstd" \ -./hosts/sendnow.sh:333: -H "Referer: https://send.now/" \ -./hosts/sendnow.sh:334: -H "Sec-GPC: 1" \ -./hosts/sendnow.sh:335: -H "Connection: keep-alive" \ -./hosts/sendnow.sh:336: -H "Upgrade-Insecure-Requests: 1" \ -./hosts/sendnow.sh:337: -H "Sec-Fetch-Dest: document" \ +./hosts/sendnow.sh:329: tor_curl_request_extended --insecure -L --no-alpn \ +./hosts/sendnow.sh:330: --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval \ +./hosts/sendnow.sh:331: -H "Host: $fshost" \ +./hosts/sendnow.sh:332: -H "Accept: text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8" \ +./hosts/sendnow.sh:333: -H "Accept-Language: en-US,en;q=0.5" \ +./hosts/sendnow.sh:334: -H "Accept-Encoding: gzip, deflate, br, zstd" \ +./hosts/sendnow.sh:335: -H "Referer: https://send.now/" \ +./hosts/sendnow.sh:336: -H "Sec-GPC: 1" \ +./hosts/sendnow.sh:337: -H "Connection: keep-alive" \ +./hosts/sendnow.sh:338: -H "Upgrade-Insecure-Requests: 1" \ +./hosts/sendnow.sh:339: -H "Sec-Fetch-Dest: document" \ -- -./hosts/sendnow.sh:345: tor_curl_request --insecure -L --no-alpn \ -./hosts/sendnow.sh:346: -H "Host: $fshost" \ -./hosts/sendnow.sh:347: -H "Accept: text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8" \ -./hosts/sendnow.sh:348: -H "Accept-Language: en-US,en;q=0.5" \ -./hosts/sendnow.sh:349: -H "Accept-Encoding: gzip, deflate, br, zstd" \ -./hosts/sendnow.sh:350: -H "Referer: https://send.now/" \ -./hosts/sendnow.sh:351: -H "Sec-GPC: 1" \ -./hosts/sendnow.sh:352: -H "Connection: keep-alive" \ -./hosts/sendnow.sh:353: -H "Upgrade-Insecure-Requests: 1" \ -./hosts/sendnow.sh:354: -H "Sec-Fetch-Dest: document" \ -./hosts/sendnow.sh:355: -H "Sec-Fetch-Mode: navigate" \ +./hosts/sendnow.sh:347: tor_curl_request --insecure -L --no-alpn \ +./hosts/sendnow.sh:348: -H "Host: $fshost" \ +./hosts/sendnow.sh:349: -H "Accept: text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8" \ +./hosts/sendnow.sh:350: -H "Accept-Language: en-US,en;q=0.5" \ +./hosts/sendnow.sh:351: -H "Accept-Encoding: gzip, deflate, br, zstd" \ +./hosts/sendnow.sh:352: -H "Referer: https://send.now/" \ +./hosts/sendnow.sh:353: -H "Sec-GPC: 1" \ +./hosts/sendnow.sh:354: -H "Connection: keep-alive" \ +./hosts/sendnow.sh:355: -H "Upgrade-Insecure-Requests: 1" \ +./hosts/sendnow.sh:356: -H "Sec-Fetch-Dest: document" \ +./hosts/sendnow.sh:357: -H "Sec-Fetch-Mode: navigate" \ -- -./hosts/sendnow.sh:364: tor_curl_request --insecure -L --no-alpn \ -./hosts/sendnow.sh:365: --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval \ -./hosts/sendnow.sh:366: -H "User-Agent: $RandomUA" \ -./hosts/sendnow.sh:367: -H "Host: $fshost" \ -./hosts/sendnow.sh:368: -H "Accept: text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8" \ -./hosts/sendnow.sh:369: -H "Accept-Language: en-US,en;q=0.5" \ -./hosts/sendnow.sh:370: -H "Accept-Encoding: gzip, deflate, br, zstd" \ -./hosts/sendnow.sh:371: -H "Referer: https://send.now/" \ -./hosts/sendnow.sh:372: -H "Sec-GPC: 1" \ -./hosts/sendnow.sh:373: -H "Connection: keep-alive" \ -./hosts/sendnow.sh:374: -H "Upgrade-Insecure-Requests: 1" \ +./hosts/sendnow.sh:366: tor_curl_request --insecure -L --no-alpn \ +./hosts/sendnow.sh:367: --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval \ +./hosts/sendnow.sh:368: -H "User-Agent: $RandomUA" \ +./hosts/sendnow.sh:369: -H "Host: $fshost" \ +./hosts/sendnow.sh:370: -H "Accept: text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8" \ +./hosts/sendnow.sh:371: -H "Accept-Language: en-US,en;q=0.5" \ +./hosts/sendnow.sh:372: -H "Accept-Encoding: gzip, deflate, br, zstd" \ +./hosts/sendnow.sh:373: -H "Referer: https://send.now/" \ +./hosts/sendnow.sh:374: -H "Sec-GPC: 1" \ +./hosts/sendnow.sh:375: -H "Connection: keep-alive" \ +./hosts/sendnow.sh:376: -H "Upgrade-Insecure-Requests: 1" \ -- -./hosts/sendnow.sh:383: tor_curl_request --insecure -L --no-alpn \ -./hosts/sendnow.sh:384: -H "User-Agent: $RandomUA" \ -./hosts/sendnow.sh:385: -H "Host: $fshost" \ -./hosts/sendnow.sh:386: -H "Accept: text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8" \ -./hosts/sendnow.sh:387: -H "Accept-Language: en-US,en;q=0.5" \ -./hosts/sendnow.sh:388: -H "Accept-Encoding: gzip, deflate, br, zstd" \ -./hosts/sendnow.sh:389: -H "Referer: https://send.now/" \ -./hosts/sendnow.sh:390: -H "Sec-GPC: 1" \ -./hosts/sendnow.sh:391: -H "Connection: keep-alive" \ -./hosts/sendnow.sh:392: -H "Upgrade-Insecure-Requests: 1" \ -./hosts/sendnow.sh:393: -H "Sec-Fetch-Dest: document" \ +./hosts/sendnow.sh:385: tor_curl_request --insecure -L --no-alpn \ +./hosts/sendnow.sh:386: -H "User-Agent: $RandomUA" \ +./hosts/sendnow.sh:387: -H "Host: $fshost" \ +./hosts/sendnow.sh:388: -H "Accept: text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8" \ +./hosts/sendnow.sh:389: -H "Accept-Language: en-US,en;q=0.5" \ +./hosts/sendnow.sh:390: -H "Accept-Encoding: gzip, deflate, br, zstd" \ +./hosts/sendnow.sh:391: -H "Referer: https://send.now/" \ +./hosts/sendnow.sh:392: -H "Sec-GPC: 1" \ +./hosts/sendnow.sh:393: -H "Connection: keep-alive" \ +./hosts/sendnow.sh:394: -H "Upgrade-Insecure-Requests: 1" \ +./hosts/sendnow.sh:395: -H "Sec-Fetch-Dest: document" \ +-- +./hosts/sendspace.sh:90: response=$(tor_curl_request --insecure -L -s -b "${ss_cookie_jar}" -c "${ss_cookie_jar}" "$remote_url") +./hosts/sendspace.sh:91: if [[ "${DebugAllEnabled}" == "true" ]] ; then +./hosts/sendspace.sh:92: debugHtml "${remote_url##*/}" "ss_dwnpage$i" "${response}" +./hosts/sendspace.sh:93: fi +./hosts/sendspace.sh:94: if [[ -z $response ]] ; then +./hosts/sendspace.sh:95: rm -f "${ss_cookie_jar}"; +./hosts/sendspace.sh:96: if [[ $i == $maxfetchretries ]] ; then +./hosts/sendspace.sh:97: printf "\\n" +./hosts/sendspace.sh:98: echo -e "${RED}| Failed to extract download link.${NC}" +./hosts/sendspace.sh:99: warnAndRetryUnknownError=true +./hosts/sendspace.sh:100: if [[ "${finalAttempt}" == "true" ]] ; then +-- +./hosts/sendspace.sh:157: file_header=$(tor_curl_request --insecure -L --head -s \ +./hosts/sendspace.sh:158: -b "${ss_cookie_jar}" -c "${ss_cookie_jar}" \ +./hosts/sendspace.sh:159: "$download_url" | tr -d '\0') +./hosts/sendspace.sh:160: if [[ "${DebugAllEnabled}" == "true" ]] ; then +./hosts/sendspace.sh:161: debugHtml "${remote_url##*/}" "ss_head$j" "FileInfoUrl: ${download_url}"$'\n'"${file_header}" +./hosts/sendspace.sh:162: fi +./hosts/sendspace.sh:163: if [[ -z $file_header ]] ; then +./hosts/sendspace.sh:164: if ((j == maxfetchretries)) ; then +./hosts/sendspace.sh:165: rm -f "${ss_cookie_jar}"; +./hosts/sendspace.sh:166: printf "\\n" +./hosts/sendspace.sh:167: echo -e "${RED}| Failed to extract file info [1]${NC}" +-- +./hosts/sendspace.sh:275: tor_curl_request --insecure --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval "$download_url" --continue-at - --output "$file_path" +./hosts/sendspace.sh:276: else +./hosts/sendspace.sh:277: tor_curl_request --insecure "$download_url" --continue-at - --output "$file_path" +./hosts/sendspace.sh:278: fi +./hosts/sendspace.sh:279: else +./hosts/sendspace.sh:280: if [[ "${RateMonitorEnabled}" == "true" ]]; then +./hosts/sendspace.sh:281: tor_curl_request --insecure --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval \ +./hosts/sendspace.sh:282: -H "User-Agent: $RandomUA" \ +./hosts/sendspace.sh:283: -H "Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8" \ +./hosts/sendspace.sh:284: -H "Accept-Language: en-US,en;q=0.5" \ +./hosts/sendspace.sh:285: -H "Accept-Encoding: gzip, deflate, br" \ +./hosts/sendspace.sh:286: -H "Connection: keep-alive" \ +./hosts/sendspace.sh:287: -H "Cookie: lng=eng" \ +./hosts/sendspace.sh:288: -H "Upgrade-Insecure-Requests: 1" \ +./hosts/sendspace.sh:289: -H "Sec-Fetch-Dest: document" \ +./hosts/sendspace.sh:290: -H "Sec-Fetch-Mode: navigate" \ +./hosts/sendspace.sh:291: -H "Sec-Fetch-Site: same-origin" \ +-- +./hosts/sendspace.sh:296: tor_curl_request --insecure \ +./hosts/sendspace.sh:297: -H "User-Agent: $RandomUA" \ +./hosts/sendspace.sh:298: -H "Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8" \ +./hosts/sendspace.sh:299: -H "Accept-Language: en-US,en;q=0.5" \ +./hosts/sendspace.sh:300: -H "Accept-Encoding: gzip, deflate, br" \ +./hosts/sendspace.sh:301: -H "Connection: keep-alive" \ +./hosts/sendspace.sh:302: -H "Cookie: lng=eng" \ +./hosts/sendspace.sh:303: -H "Upgrade-Insecure-Requests: 1" \ +./hosts/sendspace.sh:304: -H "Sec-Fetch-Dest: document" \ +./hosts/sendspace.sh:305: -H "Sec-Fetch-Mode: navigate" \ +./hosts/sendspace.sh:306: -H "Sec-Fetch-Site: same-origin" \ -- ./hosts/syspro.sh:88: response=$(tor_curl_request --insecure -L -s "$remote_url") ./hosts/syspro.sh:89: if [[ "${DebugAllEnabled}" == "true" ]] ; then @@ -2120,7 +2164,7 @@ _________________________________________________________________________ ./hosts/syspro.sh:208: --output "$file_path" --output "$file_path" ./hosts/syspro.sh:209: fi ./hosts/syspro.sh:210: fi -./hosts/syspro.sh:211: if CheckNoHtml "$remote_url" "$filename" "$file_path" ; then +./hosts/syspro.sh:211: if CheckNoHtml "$remote_url" "$filename" "$file_path" "$((received_file_size - pd_presize))" ; then ./hosts/syspro.sh:212: containsHtml=false ./hosts/syspro.sh:213: else ./hosts/syspro.sh:214: containsHtml=true @@ -2531,7 +2575,7 @@ _________________________________________________________________________ ./hosts/uploadflix.sh:291: if [[ -f "$file_path" ]] ; then ./hosts/uploadflix.sh:292: received_file_size=$(stat --format="%s" "$file_path" | tr -d '[:space:]') ./hosts/uploadflix.sh:293: fi -./hosts/uploadflix.sh:294: if CheckNoHtml "$remote_url" "$filename" "$file_path" ; then +./hosts/uploadflix.sh:294: if CheckNoHtml "$remote_url" "$filename" "$file_path" "$((received_file_size - pd_presize))" ; then ./hosts/uploadflix.sh:295: containsHtml=false ./hosts/uploadflix.sh:296: else ./hosts/uploadflix.sh:297: containsHtml=true @@ -2569,7 +2613,7 @@ _________________________________________________________________________ ./hosts/uploadhive.sh:252: if [[ -f "$file_path" ]] ; then ./hosts/uploadhive.sh:253: received_file_size=$(stat --format="%s" "$file_path" | tr -d '[:space:]') ./hosts/uploadhive.sh:254: fi -./hosts/uploadhive.sh:255: if CheckNoHtml "$remote_url" "$filename" "$file_path" ; then +./hosts/uploadhive.sh:255: if CheckNoHtml "$remote_url" "$filename" "$file_path" "$((received_file_size - pd_presize))" ; then ./hosts/uploadhive.sh:256: containsHtml=false ./hosts/uploadhive.sh:257: else ./hosts/uploadhive.sh:258: containsHtml=true @@ -3291,241 +3335,241 @@ _________________________________________________________________________ ./hosts/youdbox.sh:281: if [[ -f "$file_path" ]] ; then ./hosts/youdbox.sh:282: received_file_size=$(stat --format="%s" "$file_path" | tr -d '[:space:]') ./hosts/youdbox.sh:283: fi -./hosts/youdbox.sh:284: if CheckNoHtml "$remote_url" "$filename" "$file_path" ; then +./hosts/youdbox.sh:284: if CheckNoHtml "$remote_url" "$filename" "$file_path" "$((received_file_size - pd_presize))" ; then ./hosts/youdbox.sh:285: containsHtml=false ./hosts/youdbox.sh:286: else ./hosts/youdbox.sh:287: containsHtml=true ./hosts/youdbox.sh:288: fi -- -./mad.sh:394:tor_curl_request() { -./mad.sh:395: if [[ "${UseTorCurlImpersonate}" == "true" ]]; then -./mad.sh:396: "${curl_impersonate[@]}" --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" -4 --connect-timeout ${ConnectTimeout} --compressed --globoff "$@" -./mad.sh:397: else -./mad.sh:398: curl --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" -4 --connect-timeout ${ConnectTimeout} --compressed --globoff "$@" -./mad.sh:399: fi -./mad.sh:400:} -./mad.sh:401:tor_curl_request_extended() { -./mad.sh:402: randomtimeout=$((30 + RANDOM % (60 - 30))) +./mad.sh:387:tor_curl_request() { +./mad.sh:388: if [[ "${UseTorCurlImpersonate}" == "true" ]]; then +./mad.sh:389: "${curl_impersonate[@]}" --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" -4 --connect-timeout ${ConnectTimeout} --compressed --globoff "$@" +./mad.sh:390: else +./mad.sh:391: curl --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" -4 --connect-timeout ${ConnectTimeout} --compressed --globoff "$@" +./mad.sh:392: fi +./mad.sh:393:} +./mad.sh:394:tor_curl_request_extended() { +./mad.sh:395: randomtimeout=$((30 + RANDOM % (60 - 30))) +./mad.sh:396: if [[ "${UseTorCurlImpersonate}" == "true" ]]; then +./mad.sh:397: "${curl_impersonate[@]}" --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" -4 --connect-timeout $randomtimeout --compressed --globoff "$@" +./mad.sh:398: else +./mad.sh:399: curl --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" -4 --connect-timeout $randomtimeout --compressed --globoff "$@" +./mad.sh:400: fi +./mad.sh:401:} +./mad.sh:402:tor_curl_upload() { ./mad.sh:403: if [[ "${UseTorCurlImpersonate}" == "true" ]]; then -./mad.sh:404: "${curl_impersonate[@]}" --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" -4 --connect-timeout $randomtimeout --compressed --globoff "$@" -./mad.sh:405: else -./mad.sh:406: curl --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" -4 --connect-timeout $randomtimeout --compressed --globoff "$@" -./mad.sh:407: fi -./mad.sh:408:} -./mad.sh:409:tor_curl_upload() { -./mad.sh:410: if [[ "${UseTorCurlImpersonate}" == "true" ]]; then -./mad.sh:411: if [[ "${RateMonitorEnabled}" == "true" ]]; then -./mad.sh:412: "${curl_impersonate[@]}" --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" -4 --connect-timeout ${ConnectTimeoutUpload} --speed-limit $UploadSpeedMin --speed-time $UploadTimeoutInterval --compressed --globoff "$@" -./mad.sh:413: else -./mad.sh:414: "${curl_impersonate[@]}" --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" -4 --connect-timeout ${ConnectTimeoutUpload} --compressed --globoff "$@" -./mad.sh:415: fi -./mad.sh:416: else -./mad.sh:417: if [[ "${RateMonitorEnabled}" == "true" ]]; then -./mad.sh:418: curl --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" -4 --connect-timeout ${ConnectTimeoutUpload} --speed-limit $UploadSpeedMin --speed-time $UploadTimeoutInterval -H 'User-Agent: Mozilla/5.0 (Windows NT 10.0; rv:109.0) Gecko/20100101 Firefox/115.0' -H 'Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8' -H 'Accept-Language: en-US,en;q=0.5' -H 'Accept-Encoding: gzip, deflate, br' -H 'Origin: null' -H 'Connection: keep-alive' -H 'Upgrade-Insecure-Requests: 1' -H 'Sec-Fetch-Dest: document' -H 'Sec-Fetch-Mode: navigate' -H 'Sec-Fetch-Site: cross-site' -H 'Sec-Fetch-User: ?1' -H 'TE: trailers' --compressed --globoff "$@" -./mad.sh:419: else +./mad.sh:404: if [[ "${RateMonitorEnabled}" == "true" ]]; then +./mad.sh:405: "${curl_impersonate[@]}" --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" -4 --connect-timeout ${ConnectTimeoutUpload} --speed-limit $UploadSpeedMin --speed-time $UploadTimeoutInterval --compressed --globoff "$@" +./mad.sh:406: else +./mad.sh:407: "${curl_impersonate[@]}" --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" -4 --connect-timeout ${ConnectTimeoutUpload} --compressed --globoff "$@" +./mad.sh:408: fi +./mad.sh:409: else +./mad.sh:410: if [[ "${RateMonitorEnabled}" == "true" ]]; then +./mad.sh:411: curl --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" -4 --connect-timeout ${ConnectTimeoutUpload} --speed-limit $UploadSpeedMin --speed-time $UploadTimeoutInterval -H 'User-Agent: Mozilla/5.0 (Windows NT 10.0; rv:109.0) Gecko/20100101 Firefox/115.0' -H 'Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8' -H 'Accept-Language: en-US,en;q=0.5' -H 'Accept-Encoding: gzip, deflate, br' -H 'Origin: null' -H 'Connection: keep-alive' -H 'Upgrade-Insecure-Requests: 1' -H 'Sec-Fetch-Dest: document' -H 'Sec-Fetch-Mode: navigate' -H 'Sec-Fetch-Site: cross-site' -H 'Sec-Fetch-User: ?1' -H 'TE: trailers' --compressed --globoff "$@" +./mad.sh:412: else -- -./mad.sh:1466: response=$(tor_curl_request --insecure -L -s https://github.com/lwthiker/curl-impersonate/releases/latest) -./mad.sh:1467: if [[ "${DebugAllEnabled}" == "true" ]] ; then -./mad.sh:1468: debugHtml "github" "lbf_inst_curlimp$j" "$response" -./mad.sh:1469: fi -./mad.sh:1470: if [[ ! -z "$response" ]]; then -./mad.sh:1471: latestTag=$(grep -oPi -m 1 '(?<=/curl-impersonate/releases/tag/).*?(?=")' <<< "$response") -./mad.sh:1472: latestBinaryDate=$(grep -oPi -m 1 '(?<== MaxDownloadRetries)) ; then -./mad.sh:1545: echo -e "${RED}| FAILED: Size mismatch after downloading${NC}" -./mad.sh:1546: exit 1 +./mad.sh:1530: tor_curl_request --insecure -L "$download_url" --continue-at - --output "$file_path" +./mad.sh:1531: received_file_size=0 +./mad.sh:1532: if [[ -f "$file_path" ]] ; then +./mad.sh:1533: received_file_size=$(stat --format="%s" "$file_path" | tr -d '[:space:]') +./mad.sh:1534: fi +./mad.sh:1535: if ((received_file_size == file_size_bytes)) ; then +./mad.sh:1536: break +./mad.sh:1537: elif ((received_file_size < file_size_bytes)) ; then +./mad.sh:1538: if ((j >= MaxDownloadRetries)) ; then +./mad.sh:1539: echo -e "${RED}| FAILED: Size mismatch after downloading${NC}" +./mad.sh:1540: exit 1 -- -./mad.sh:1589: response=$(tor_curl_request --insecure -L -s https://github.com/lexiforest/curl-impersonate/releases/latest) -./mad.sh:1590: if [[ "${DebugAllEnabled}" == "true" ]] ; then -./mad.sh:1591: debugHtml "github" "lbf_inst_curlimp$j" "$response" -./mad.sh:1592: fi -./mad.sh:1593: if [[ ! -z "$response" ]]; then -./mad.sh:1594: latestTag=$(grep -oPi -m 1 '(?<=/curl-impersonate/releases/tag/).*?(?=")' <<< "$response") -./mad.sh:1595: latestBinaryDate=$(grep -oPi -m 1 '(?<== MaxDownloadRetries)) ; then -./mad.sh:1668: echo -e "${RED}| FAILED: Size mismatch after downloading${NC}" -./mad.sh:1669: exit 1 +./mad.sh:1653: tor_curl_request --insecure -L "$download_url" --continue-at - --output "$file_path" +./mad.sh:1654: received_file_size=0 +./mad.sh:1655: if [[ -f "$file_path" ]] ; then +./mad.sh:1656: received_file_size=$(stat --format="%s" "$file_path" | tr -d '[:space:]') +./mad.sh:1657: fi +./mad.sh:1658: if ((received_file_size == file_size_bytes)) ; then +./mad.sh:1659: break +./mad.sh:1660: elif ((received_file_size < file_size_bytes)) ; then +./mad.sh:1661: if ((j >= MaxDownloadRetries)) ; then +./mad.sh:1662: echo -e "${RED}| FAILED: Size mismatch after downloading${NC}" +./mad.sh:1663: exit 1 -- -./mad.sh:1864: maud_torcurl=$(grep -n -vxE '[[:blank:]]*([#].*)?' $fil | grep -A 12 --color='always' -Ei 'tor_curl') -./mad.sh:1865: echo -e "Files:" -./mad.sh:1866: echo -e "${BLUE}${fil}${NC}" -./mad.sh:1867: echo -e "" -./mad.sh:1868: echo -e "" -./mad.sh:1869: echo -e "${PINK}MAD Audit of http lines:${NC} (${GREEN}grep \"http:\" or \"https:\"${NC})" -./mad.sh:1870: echo -e "_________________________________________________________________________" -./mad.sh:1871: echo -e "$maud_http" -./mad.sh:1872: echo -e "" -./mad.sh:1873: echo -e "${PINK}MAD Audit of curl:${NC} (${GREEN}grep \"curl\"${NC})" -./mad.sh:1874: echo -e "_________________________________________________________________________" +./mad.sh:1858: maud_torcurl=$(grep -n -vxE '[[:blank:]]*([#].*)?' $fil | grep -A 12 --color='always' -Ei 'tor_curl') +./mad.sh:1859: echo -e "Files:" +./mad.sh:1860: echo -e "${BLUE}${fil}${NC}" +./mad.sh:1861: echo -e "" +./mad.sh:1862: echo -e "" +./mad.sh:1863: echo -e "${PINK}MAD Audit of http lines:${NC} (${GREEN}grep \"http:\" or \"https:\"${NC})" +./mad.sh:1864: echo -e "_________________________________________________________________________" +./mad.sh:1865: echo -e "$maud_http" +./mad.sh:1866: echo -e "" +./mad.sh:1867: echo -e "${PINK}MAD Audit of curl:${NC} (${GREEN}grep \"curl\"${NC})" +./mad.sh:1868: echo -e "_________________________________________________________________________" -- -./mad.sh:1877: echo -e "${PINK}MAD Audit of tor_curl (+12 lines after):${NC} (${GREEN}grep \"tor_curl\"${NC})" -./mad.sh:1878: echo -e "_________________________________________________________________________" -./mad.sh:1879: echo -e "$maud_torcurl" -./mad.sh:1880: echo -e "" -./mad.sh:1881: echo -e "" -./mad.sh:1882: done -./mad.sh:1883: else -./mad.sh:1884: cd "$ScriptDir" -./mad.sh:1885: readarray -d $'' arrFiles < <(find . -name "*.sh" -printf '%p\n' | sort -Vk1) -./mad.sh:1886: cd "$WorkDir" -./mad.sh:1887: readarray -d $'' arrFiles2 < <(find . -name "*.sh" -printf '%p\n' | sort -Vk1) +./mad.sh:1871: echo -e "${PINK}MAD Audit of tor_curl (+12 lines after):${NC} (${GREEN}grep \"tor_curl\"${NC})" +./mad.sh:1872: echo -e "_________________________________________________________________________" +./mad.sh:1873: echo -e "$maud_torcurl" +./mad.sh:1874: echo -e "" +./mad.sh:1875: echo -e "" +./mad.sh:1876: done +./mad.sh:1877: else +./mad.sh:1878: cd "$ScriptDir" +./mad.sh:1879: readarray -d $'' arrFiles < <(find . -name "*.sh" -printf '%p\n' | sort -Vk1) +./mad.sh:1880: cd "$WorkDir" +./mad.sh:1881: readarray -d $'' arrFiles2 < <(find . -name "*.sh" -printf '%p\n' | sort -Vk1) -- -./mad.sh:1892: maud_torcurl=$(grep -n -vxE '[[:blank:]]*([#].*)?' $fil | grep -A 12 --color='always' -Ei 'tor_curl') -./mad.sh:1893: echo -e "Files:" -./mad.sh:1894: echo -e "${BLUE}${fil}${NC}" -./mad.sh:1895: echo -e "" -./mad.sh:1896: echo -e "" -./mad.sh:1897: echo -e "${PINK}MAD Audit of http lines:${NC} (${GREEN}grep \"http:\" or \"https:\"${NC})" -./mad.sh:1898: echo -e "_________________________________________________________________________" -./mad.sh:1899: echo -e "$maud_http" -./mad.sh:1900: echo -e "" -./mad.sh:1901: echo -e "${PINK}MAD Audit of curl:${NC} (${GREEN}grep \"curl \"${NC})" -./mad.sh:1902: echo -e "_________________________________________________________________________" +./mad.sh:1886: maud_torcurl=$(grep -n -vxE '[[:blank:]]*([#].*)?' $fil | grep -A 12 --color='always' -Ei 'tor_curl') +./mad.sh:1887: echo -e "Files:" +./mad.sh:1888: echo -e "${BLUE}${fil}${NC}" +./mad.sh:1889: echo -e "" +./mad.sh:1890: echo -e "" +./mad.sh:1891: echo -e "${PINK}MAD Audit of http lines:${NC} (${GREEN}grep \"http:\" or \"https:\"${NC})" +./mad.sh:1892: echo -e "_________________________________________________________________________" +./mad.sh:1893: echo -e "$maud_http" +./mad.sh:1894: echo -e "" +./mad.sh:1895: echo -e "${PINK}MAD Audit of curl:${NC} (${GREEN}grep \"curl \"${NC})" +./mad.sh:1896: echo -e "_________________________________________________________________________" -- -./mad.sh:1905: echo -e "${PINK}MAD Audit of tor_curl (+12 lines after):${NC} (${GREEN}grep \"tor_curl\"${NC})" -./mad.sh:1906: echo -e "_________________________________________________________________________" -./mad.sh:1907: echo -e "$maud_torcurl" -./mad.sh:1908: echo -e "" -./mad.sh:1909: done -./mad.sh:1910: for fil in "${arrFiles2[@]}"; -./mad.sh:1911: do -./mad.sh:1912: maud_http=$(grep -n -vxE '[[:blank:]]*([#].*)?' $fil | grep --color='always' -Ei '(http|https):') -./mad.sh:1913: maud_curl=$(grep -n -vxE '[[:blank:]]*([#].*)?' $fil | grep --color='always' -Ei 'curl') -./mad.sh:1914: maud_torcurl=$(grep -n -vxE '[[:blank:]]*([#].*)?' $fil | grep -A 12 --color='always' -Ei 'tor_curl') -./mad.sh:1915: echo -e "Files:" -./mad.sh:1916: echo -e "${BLUE}${fil}${NC}" -./mad.sh:1917: echo -e "" -./mad.sh:1918: echo -e "" -./mad.sh:1919: echo -e "${PINK}MAD Audit of http lines:${NC} (${GREEN}grep \"http:\" or \"https:\"${NC})" -./mad.sh:1920: echo -e "_________________________________________________________________________" -./mad.sh:1921: echo -e "$maud_http" -./mad.sh:1922: echo -e "" -./mad.sh:1923: echo -e "${PINK}MAD Audit of curl:${NC} (${GREEN}grep \"curl\"${NC})" -./mad.sh:1924: echo -e "_________________________________________________________________________" +./mad.sh:1899: echo -e "${PINK}MAD Audit of tor_curl (+12 lines after):${NC} (${GREEN}grep \"tor_curl\"${NC})" +./mad.sh:1900: echo -e "_________________________________________________________________________" +./mad.sh:1901: echo -e "$maud_torcurl" +./mad.sh:1902: echo -e "" +./mad.sh:1903: done +./mad.sh:1904: for fil in "${arrFiles2[@]}"; +./mad.sh:1905: do +./mad.sh:1906: maud_http=$(grep -n -vxE '[[:blank:]]*([#].*)?' $fil | grep --color='always' -Ei '(http|https):') +./mad.sh:1907: maud_curl=$(grep -n -vxE '[[:blank:]]*([#].*)?' $fil | grep --color='always' -Ei 'curl') +./mad.sh:1908: maud_torcurl=$(grep -n -vxE '[[:blank:]]*([#].*)?' $fil | grep -A 12 --color='always' -Ei 'tor_curl') +./mad.sh:1909: echo -e "Files:" +./mad.sh:1910: echo -e "${BLUE}${fil}${NC}" +./mad.sh:1911: echo -e "" +./mad.sh:1912: echo -e "" +./mad.sh:1913: echo -e "${PINK}MAD Audit of http lines:${NC} (${GREEN}grep \"http:\" or \"https:\"${NC})" +./mad.sh:1914: echo -e "_________________________________________________________________________" +./mad.sh:1915: echo -e "$maud_http" +./mad.sh:1916: echo -e "" +./mad.sh:1917: echo -e "${PINK}MAD Audit of curl:${NC} (${GREEN}grep \"curl\"${NC})" +./mad.sh:1918: echo -e "_________________________________________________________________________" -- -./mad.sh:1927: echo -e "${PINK}MAD Audit of tor_curl (+12 lines after):${NC} (${GREEN}grep \"tor_curl\"${NC})" -./mad.sh:1928: echo -e "_________________________________________________________________________" -./mad.sh:1929: echo -e "$maud_torcurl" -./mad.sh:1930: echo -e "" -./mad.sh:1931: done -./mad.sh:1932: fi -./mad.sh:1933:} -./mad.sh:1934:madStatus() { -./mad.sh:1935: local InputFile="$1" -./mad.sh:1936: if [[ "$arg1" == "status" ]] ; then -./mad.sh:1937: clear +./mad.sh:1921: echo -e "${PINK}MAD Audit of tor_curl (+12 lines after):${NC} (${GREEN}grep \"tor_curl\"${NC})" +./mad.sh:1922: echo -e "_________________________________________________________________________" +./mad.sh:1923: echo -e "$maud_torcurl" +./mad.sh:1924: echo -e "" +./mad.sh:1925: done +./mad.sh:1926: fi +./mad.sh:1927:} +./mad.sh:1928:madStatus() { +./mad.sh:1929: local InputFile="$1" +./mad.sh:1930: if [[ "$arg1" == "status" ]] ; then +./mad.sh:1931: clear -- -./mad.sh:3252: file_header=$(tor_curl_request --insecure -m 8 -s -D - -o /dev/null \ -./mad.sh:3253: -H "Connection: keep-alive" \ -./mad.sh:3254: -w 'EffectiveUrl=%{url_effective}' \ -./mad.sh:3255: "$download_url") -./mad.sh:3256: else -./mad.sh:3257: printf "| Retrieving Head: attempt #$j" -./mad.sh:3258: rm -f "${WorkDir}/.temp/directhead" -./mad.sh:3259: file_header=$(tor_curl_request --insecure --head -H "Connection: keep-alive" -L -s -i "$download_url" | -./mad.sh:3260: tee "${WorkDir}/.temp/directhead" & -./mad.sh:3261: sleep 6 -./mad.sh:3262: [ -s "${WorkDir}/.temp/directhead" ] -./mad.sh:3263: kill $! 2>/dev/null -./mad.sh:3264: ) -./mad.sh:3265: if [[ ! -f "${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}" ]]; then -./mad.sh:3266: touch "${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}" -./mad.sh:3267: fi -./mad.sh:3268: rm -f "${WorkDir}/.temp/directhead" -./mad.sh:3269: fi +./mad.sh:3254: file_header=$(tor_curl_request --insecure -m 8 -s -D - -o /dev/null \ +./mad.sh:3255: -H "Connection: keep-alive" \ +./mad.sh:3256: -w 'EffectiveUrl=%{url_effective}' \ +./mad.sh:3257: "$download_url") +./mad.sh:3258: else +./mad.sh:3259: printf "| Retrieving Head: attempt #$j" +./mad.sh:3260: rm -f "${WorkDir}/.temp/directhead" +./mad.sh:3261: file_header=$(tor_curl_request --insecure --head -H "Connection: keep-alive" -L -s -i "$download_url" | +./mad.sh:3262: tee "${WorkDir}/.temp/directhead" & +./mad.sh:3263: sleep 6 +./mad.sh:3264: [ -s "${WorkDir}/.temp/directhead" ] +./mad.sh:3265: kill $! 2>/dev/null +./mad.sh:3266: ) +./mad.sh:3267: if [[ ! -f "${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}" ]]; then +./mad.sh:3268: touch "${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}" +./mad.sh:3269: fi +./mad.sh:3270: rm -f "${WorkDir}/.temp/directhead" +./mad.sh:3271: fi -- -./mad.sh:3396: tor_curl_request --insecure -L --referer "$file_url" "$download_url" --output "$file_path" -./mad.sh:3397: rc=$? -./mad.sh:3398: if ((rc != 0 )) ; then -./mad.sh:3399: printf "${RED}Download Failed (bad exit status).${NC}" -./mad.sh:3400: if [[ -f ${file_path} ]]; then -./mad.sh:3401: printf "${YELLOW} Partial removed...${NC}" -./mad.sh:3402: printf "\n\n" -./mad.sh:3403: rm -f "${file_path}" -./mad.sh:3404: else -./mad.sh:3405: printf "\n\n" -./mad.sh:3406: fi +./mad.sh:3398: tor_curl_request --insecure -L --referer "$file_url" "$download_url" --output "$file_path" +./mad.sh:3399: rc=$? +./mad.sh:3400: if ((rc != 0 )) ; then +./mad.sh:3401: printf "${RED}Download Failed (bad exit status).${NC}" +./mad.sh:3402: if [[ -f ${file_path} ]]; then +./mad.sh:3403: printf "${YELLOW} Partial removed...${NC}" +./mad.sh:3404: printf "\n\n" +./mad.sh:3405: rm -f "${file_path}" +./mad.sh:3406: else +./mad.sh:3407: printf "\n\n" +./mad.sh:3408: fi -- -./mad.sh:3449: tor_curl_request --insecure -L --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval "$download_url" --continue-at - --output "$file_path" -./mad.sh:3450: else -./mad.sh:3451: tor_curl_request --insecure -L --referer "$file_url" "$download_url" --continue-at - --output "$file_path" -./mad.sh:3452: fi -./mad.sh:3453: received_file_size=0 -./mad.sh:3454: if [[ -f "$file_path" ]] ; then -./mad.sh:3455: received_file_size=$(stat --format="%s" "$file_path" | tr -d '[:space:]') -./mad.sh:3456: fi -./mad.sh:3457: if CheckNoHtml "$remote_url" "$filename" "$file_path" ; then -./mad.sh:3458: containsHtml=false -./mad.sh:3459: else -./mad.sh:3460: containsHtml=true -./mad.sh:3461: fi +./mad.sh:3455: tor_curl_request --insecure -L --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval "$download_url" --continue-at - --output "$file_path" +./mad.sh:3456: else +./mad.sh:3457: tor_curl_request --insecure -L --referer "$file_url" "$download_url" --continue-at - --output "$file_path" +./mad.sh:3458: fi +./mad.sh:3459: received_file_size=0 +./mad.sh:3460: if [[ -f "$file_path" ]] ; then +./mad.sh:3461: received_file_size=$(stat --format="%s" "$file_path" | tr -d '[:space:]') +./mad.sh:3462: fi +./mad.sh:3463: if CheckNoHtml "$remote_url" "$filename" "$file_path" "$((received_file_size - pd_presize))" ; then +./mad.sh:3464: containsHtml=false +./mad.sh:3465: else +./mad.sh:3466: containsHtml=true +./mad.sh:3467: fi -- -./mad.sh:3649: response=$(tor_curl_upload --insecure -i \ -./mad.sh:3650: -H "Content-Type: multipart/form-data" \ -./mad.sh:3651: -F "key=" \ -./mad.sh:3652: -F "time=$jira_timeval" \ -./mad.sh:3653: -F "file=@${filepath}" \ -./mad.sh:3654: "${jira_PostUrlHost}") -./mad.sh:3655: else -./mad.sh:3656: response=$(tor_curl_upload --insecure -i \ -./mad.sh:3657: -H "Content-Type: multipart/form-data" \ -./mad.sh:3658: -F "key=" \ -./mad.sh:3659: -F "time=$jira_timeval" \ -./mad.sh:3660: -F "files[]=@${arrFiles[@]}" \ -./mad.sh:3661: "${jira_PostUrlHost}") -./mad.sh:3662: fi -./mad.sh:3663: if [[ "${DebugAllEnabled}" == "true" ]] ; then -./mad.sh:3664: debugHtml "${filepath##*/}" "${_hostCode}_upload" "post_url: ${jira_PostUrlHost}"$'\n'"${response}" -./mad.sh:3665: fi -./mad.sh:3666: if grep -Eqi ' 200 ' <<< "${response}" ; then +./mad.sh:3655: response=$(tor_curl_upload --insecure -i \ +./mad.sh:3656: -H "Content-Type: multipart/form-data" \ +./mad.sh:3657: -F "key=" \ +./mad.sh:3658: -F "time=$jira_timeval" \ +./mad.sh:3659: -F "file=@${filepath}" \ +./mad.sh:3660: "${jira_PostUrlHost}") +./mad.sh:3661: else +./mad.sh:3662: response=$(tor_curl_upload --insecure -i \ +./mad.sh:3663: -H "Content-Type: multipart/form-data" \ +./mad.sh:3664: -F "key=" \ +./mad.sh:3665: -F "time=$jira_timeval" \ +./mad.sh:3666: -F "files[]=@${arrFiles[@]}" \ +./mad.sh:3667: "${jira_PostUrlHost}") +./mad.sh:3668: fi +./mad.sh:3669: if [[ "${DebugAllEnabled}" == "true" ]] ; then +./mad.sh:3670: debugHtml "${filepath##*/}" "${_hostCode}_upload" "post_url: ${jira_PostUrlHost}"$'\n'"${response}" +./mad.sh:3671: fi +./mad.sh:3672: if grep -Eqi ' 200 ' <<< "${response}" ; then diff --git a/documentation/!Changelog (Historical).txt b/documentation/!Changelog (Historical).txt index f02973c..7cba523 100755 --- a/documentation/!Changelog (Historical).txt +++ b/documentation/!Changelog (Historical).txt @@ -3,6 +3,23 @@ # # ---------- Initial release with MAD Uploader functionality ---------- +# 2025.01.30 - [isupload] Add handling of 404 Not Found on initial page fetch +# 2025.01.23 - [mad] Do not check for supported host on "direct=" lines +# 2025.01.19 - [fileditch] Add direct download url processing fileditchfiles.me (though they block Tor now) +# 2025.01.18 - [up_nantes] Update the post retention to "week" (host removed "month" option) +# 2025.01.18 - [mad] Updates to url_encode function and addition of conversion of utf8 to ascii function +# 2025.01.17 - [ranoz] Servers response to resume changed, set as no resume type for now +# 2025.01.17 - [uwabaki] Add download handling for uwabaki onion address urls +# 2025.01.16 - [ranoz] Fix filenames with unicode chars in the download url +# 2025.01.16 - [up_axfc] Move convert utf8 to ascii to mad function +# 2025.01.16 - [up_uwabaki] Add uwabaki.party as upload host (1GB, no expiration, no DMCA, no logs) +# 2025.01.14 - [gagneux / up_gagneux] Add fichier.gagneux.info as upload / download host +# 2025.01.14 - [uwabaki] Add uwabaki.party as download host +# 2025.01.14 - [fileblade] Additional retries and handling for blocked Tor ips (until alternative) +# 2025.01.13 - [ocr_captcha] Create imagemagick OCR function for testing without tesseract +# 2025.01.13 - [anonfile, dailyuploads] Update ocr call to use tesseract function +# 2025.01.13 - [up_anonfile] Modify to use new upload url +# 2025.01.12 - [ateasystems] Update 404 Not found response # 2025.01.11 - [mad] Update direct head response handling # 2025.01.11 - [ranoz] Add 404 Not found handling on head # 2025.01.09 - [ranoz] Add handling of "NEXT_NOT_FOUND" response diff --git a/hosts/9saves.sh b/hosts/9saves.sh index 096de88..2692298 100644 --- a/hosts/9saves.sh +++ b/hosts/9saves.sh @@ -339,7 +339,7 @@ ns_GetFile() { if [[ -f "$file_path" ]] ; then received_file_size=$(stat --format="%s" "$file_path" | tr -d '[:space:]') fi - if CheckNoHtml "$remote_url" "$filename" "$file_path" ; then + if CheckNoHtml "$remote_url" "$filename" "$file_path" "$((received_file_size - pd_presize))" ; then containsHtml=false else containsHtml=true diff --git a/hosts/anonfile.sh b/hosts/anonfile.sh index ef2d49e..5455d2b 100644 --- a/hosts/anonfile.sh +++ b/hosts/anonfile.sh @@ -1,6 +1,6 @@ #! Name: anonfile.sh #! Author: kittykat -#! Version: 2025.01.13 +#! Version: 2025.02.21 #! Desc: Add support for downloading and processing of urls for a new host #! Usage: Copy this file into the ./${ScriptDir}/hosts/ folder #! @@ -381,6 +381,21 @@ anon_FetchFileInfo() { continue fi fi + if grep -Eqi 'You have reached the download-limit' <<< "$response"; then + if [[ $i == $maxfetchretries ]] ; then + rm -f "${anon_cookie_jar}"; + printf "\\n" + echo -e "${RED}| Failed to extract download link [limit].${NC}" + warnAndRetryUnknownError=true + if [[ "${finalAttempt}" == "true" ]] ; then + failedRetryDownload "${remote_url}" "Failed to extract download link [limit]" "" + fi + return 1 + else + tor_identity="${RANDOM}" + continue + fi + fi if grep -Eqi 'Just a moment...' <<< "$response"; then if [[ $i == $maxfetchretries ]] ; then rm -f "${anon_cookie_jar}"; @@ -411,18 +426,25 @@ anon_FetchFileInfo() { continue fi fi - if grep -Eqi '' <<< "$response" ; then + file_size_bytes=$(grep -oPi -m 1 '(?<= \().*?(?= bytes\).*$)' <<< "$response") + file_size_bytes=${file_size_bytes//[$'\t\r\n']} + fi + if [[ -z "$file_size_bytes" ]]; then + printf "\\n" + echo -e "${RED}| Failed to extract file info [3]${NC}" + warnAndRetryUnknownError=true + if [[ "${finalAttempt}" == "true" ]] ; then + failedRetryDownload "${remote_url}" "Failed to extract file info [3]" "" + fi + return 1 + fi + if grep -Eqi '.*$)' <<< "$response") filename="${download_url##*\/}" download_url=$(urlencode_literal_grouped_case_urlendingonly "$download_url") - elif grep -Eqi '.*$)' <<< "$response") - filename="${download_url##*\/}" - download_url=$(urlencode_literal_grouped_case_urlendingonly "$download_url") fi if [[ -z "$download_url" ]] ; then if [[ $i == $maxfetchretries ]] ; then @@ -443,68 +465,6 @@ anon_FetchFileInfo() { fi done rm -f "${anon_cookie_jar}"; - echo -e "${GREEN}# Fetching file info…${NC}" - maxfetchretries=3 - for ((j=1; j<=$maxfetchretries; j++)); do - printf " ." - CLEANSTRING=${remote_url//[^a-zA-Z0-9]/} - trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; rm -f ${anon_cookie_jar}; echo ""; tput cnorm; exit" 0 1 2 3 6 15 - GetRandomUA - file_header=$(tor_curl_request -i -s --head \ - --referer "${fixed_url}" \ - "$download_url") - if [[ "${DebugAllEnabled}" == "true" ]] ; then - debugHtml "${remote_url##*/}" "anon_head$j" "download_url: ${download_url}"$'\n'"${file_header}" - fi - if [[ -z $file_header ]] ; then - if [[ $j == $maxfetchretries ]] ; then - rm -f "${anon_cookie_jar}"; - printf "\\n" - echo -e "${RED}| Failed to extract file info${NC}" - warnAndRetryUnknownError=true - if [[ "${finalAttempt}" == "true" ]] ; then - failedRetryDownload "${remote_url}" "Failed to extract file info" "" - fi - return 1 - else - tor_identity="${RANDOM}" - continue - fi - fi - if ! grep -Eqi 'HTTP/2 200|HTTP/1.1 200|200 OK' <<< $file_header ; then - if [[ $j == $maxfetchretries ]] ; then - rm -f "${anon_cookie_jar}"; - printf "\\n" - echo -e "${RED}| Failed to extract file info${NC}" - warnAndRetryUnknownError=true - if [[ "${finalAttempt}" == "true" ]] ; then - failedRetryDownload "${remote_url}" "" "" - fi - return 1 - else - tor_identity="${RANDOM}" - continue - fi - fi - file_size_bytes=$(grep -oPi '(?<=content-length: ).*' <<< "$file_header") - file_size_bytes=${file_size_bytes//[$'\t\r\n']} - if [[ -z "$file_size_bytes" ]]; then - if [[ $j == $maxfetchretries ]] ; then - rm -f "${anon_cookie_jar}"; - printf "\\n" - echo -e "${RED}| Failed to extract file size.${NC}" - warnAndRetryUnknownError=true - if [[ "${finalAttempt}" == "true" ]] ; then - failedRetryDownload "${remote_url}" "" "" - fi - return 1 - else - tor_identity="${RANDOM}" - continue - fi - fi - break #Good to go here - done touch "${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}" if [[ ! "$filename_override" == "" ]] ; then filename="$filename_override" @@ -607,7 +567,7 @@ anon_GetFile() { if [[ -f "$file_path" ]] ; then received_file_size=$(stat --format="%s" "$file_path" | tr -d '[:space:]') fi - if CheckNoHtml "$remote_url" "$filename" "$file_path" ; then + if CheckNoHtml "$remote_url" "$filename" "$file_path" "$((received_file_size - pd_presize))" ; then containsHtml=false else containsHtml=true diff --git a/hosts/anonsharing.sh b/hosts/anonsharing.sh index 1f2bc8f..9170bbe 100644 --- a/hosts/anonsharing.sh +++ b/hosts/anonsharing.sh @@ -278,7 +278,7 @@ ansh_GetFile() { if [[ -f "$file_path" ]] ; then received_file_size=$(stat --format="%s" "$file_path" | tr -d '[:space:]') fi - if CheckNoHtml "$remote_url" "$filename" "$file_path" ; then + if CheckNoHtml "$remote_url" "$filename" "$file_path" "$((received_file_size - pd_presize))" ; then containsHtml=false else containsHtml=true diff --git a/hosts/ateasystems.sh b/hosts/ateasystems.sh index 8e6e92c..dcf9ae5 100644 --- a/hosts/ateasystems.sh +++ b/hosts/ateasystems.sh @@ -240,7 +240,7 @@ atea_GetFile() { --output "$file_path" --output "$file_path" fi fi - if CheckNoHtml "$remote_url" "$filename" "$file_path" ; then + if CheckNoHtml "$remote_url" "$filename" "$file_path" "$((received_file_size - 0))" ; then containsHtml=false else containsHtml=true diff --git a/hosts/bedrive.sh b/hosts/bedrive.sh index 56b807a..42a1744 100644 --- a/hosts/bedrive.sh +++ b/hosts/bedrive.sh @@ -318,7 +318,7 @@ bd_GetFile() { if [[ -f "$file_path" ]] ; then received_file_size=$(stat --format="%s" "$file_path" | tr -d '[:space:]') fi - if CheckNoHtml "$remote_url" "$filename" "$file_path" ; then + if CheckNoHtml "$remote_url" "$filename" "$file_path" "$((received_file_size - pd_presize))" ; then containsHtml=false else containsHtml=true diff --git a/hosts/biteblob.sh b/hosts/biteblob.sh index 335da60..d54e621 100644 --- a/hosts/biteblob.sh +++ b/hosts/biteblob.sh @@ -245,12 +245,25 @@ bite_GetFile() { continue fi fi - if CheckNoHtml "$remote_url" "$filename" "$file_path" ; then + received_file_size=0 + if [[ -f "$file_path" ]] ; then + received_file_size=$(stat --format="%s" "$file_path" | tr -d '[:space:]') + fi + if CheckNoHtml "$remote_url" "$filename" "$file_path" "$((received_file_size - pd_presize))" ; then containsHtml=false else containsHtml=true fi if [[ "$containsHtml" == "true" ]]; then + if grep -Eqi 'was removed|no such file|was deleted|not found|banned' < "$file_path" ; then + printf "\\n" + echo -e "${RED}| The file was not found or has been removed.${NC}" + rm -f "${file_path}" + rm -f "$flockDownload"; + removedDownload "${remote_url}" "The file was not found or has been removed." + exitDownloadNotAvailable=true + return 1 + fi echo -e "${YELLOW}Download Failed (contains html)${NC} partial removed..." rm -f "${file_path}" if ((j >= $MaxDownloadRetries)) ; then @@ -276,7 +289,7 @@ bite_GetFile() { if [[ -f "$file_path" ]] ; then received_file_size=$(stat --format="%s" "$file_path" | tr -d '[:space:]') fi - if CheckNoHtml "$remote_url" "$filename" "$file_path" ; then + if CheckNoHtml "$remote_url" "$filename" "$file_path" "$((received_file_size - pd_presize))" ; then containsHtml=false else containsHtml=true diff --git a/hosts/blackcloud_onion.sh b/hosts/blackcloud_onion.sh new file mode 100644 index 0000000..ebc4ed4 --- /dev/null +++ b/hosts/blackcloud_onion.sh @@ -0,0 +1,38 @@ +#! Name: blackcloud_onion.sh +#! Author: kittykat +#! Version: 2025.02.22 +#! Desc: Add support for downloading and processing of urls for a new host +#! Usage: Copy this file into the ./${ScriptDir}/hosts/ folder +#! +#! +#! ------------ REQUIRED SECTION --------------- +#! @[UPDATE] HostAndDomainRegexes: This string is loaded into mad.sh and allows dynamic handling of new url data +#! Format: '/HostCode/HostNick/HostFuncPrefix:HostDomainRegex@' +#! HostCode: (ie. 'fh' for filehaus -- cannot be used by other hosts) +#! HostNick: What is displayed throughout MAD output (ie. 'filehaus' -- "urls.txt has 10 filehaus.." will be displayed) +#! HostFuncPrefix: (ie. 'fh' -- fh_DownloadFile(), fh_FetchFileInfo() .. ) +#! * Note: Must begin with a letter a-z (functions beginning with numbers are no bueno) +#! HostDomainRegex: The regex used to verify matching urls +HostCode='bcloud' +HostNick='blackcloud' +HostFuncPrefix='bcloud' +HostUrls='bcloudwenjxgcxjh6uheyt72a5isimzgg4kv5u74jb2s22y3hzpwh6id.onion' +HostDomainRegex='^(http|https)://bcloudwenjxgcxjh6uheyt72a5isimzgg4kv5u74jb2s22y3hzpwh6id\.onion/(d1|dl)/' +#! +#! !! DO NOT UPDATE OR REMOVE !! +#! This merges the Required HostAndDomainRegexes into mad.sh +ListHostAndDomainRegexes=${ListHostAndDomainRegexes}'/'${HostCode}'/'${HostNick}'/'${HostFuncPrefix}'/'${HostUrls}':'${HostDomainRegex}'@' +#! +#! +#! ------------ (1) Host Main Download Function --------------- # +#! +#! This is a direct= download host, so all the functions are already in mad.sh +#! Since the HostFuncPrefix is defined above as "direct", nothing further needs to be done as it will +#! call the direct_DownloadFile() function already in mad.sh +bcloud_DownloadFile() { + local pUrl="$1" + local pFileCnt="$2" + local pFileUrl="${pUrl//bcloudwenjxgcxjh6uheyt72a5isimzgg4kv5u74jb2s22y3hzpwh6id\.onion\/d1/bcloudwenjxgcxjh6uheyt72a5isimzgg4kv5u74jb2s22y3hzpwh6id\.onion\/dl}" + echo -e "[${BLUE}ModifiedUrl${NC}]: ${pFileUrl}" + direct_DownloadFile "$pUrl" "$pFileCnt" "$pFileUrl" +} diff --git a/hosts/bowfile.sh b/hosts/bowfile.sh index 1daba6e..2ab250d 100644 --- a/hosts/bowfile.sh +++ b/hosts/bowfile.sh @@ -347,7 +347,7 @@ bow_GetFile() { if [[ -f "$file_path" ]] ; then received_file_size=$(stat --format="%s" "$file_path" | tr -d '[:space:]') fi - if CheckNoHtml "$remote_url" "$filename" "$file_path" ; then + if CheckNoHtml "$remote_url" "$filename" "$file_path" "$((received_file_size - pd_presize))" ; then containsHtml=false else containsHtml=true diff --git a/hosts/click.sh b/hosts/click.sh index f37b717..a83966c 100644 --- a/hosts/click.sh +++ b/hosts/click.sh @@ -587,7 +587,7 @@ click_GetFile() { if [[ -f "$file_path" ]] ; then received_file_size=$(stat --format="%s" "$file_path" | tr -d '[:space:]') fi - if CheckNoHtml "$remote_url" "$filename" "$file_path" ; then + if CheckNoHtml "$remote_url" "$filename" "$file_path" "$((received_file_size - pd_presize))" ; then containsHtml=false else containsHtml=true diff --git a/hosts/dailyuploads.sh b/hosts/dailyuploads.sh index 6ddac96..ded48f1 100644 --- a/hosts/dailyuploads.sh +++ b/hosts/dailyuploads.sh @@ -546,7 +546,7 @@ daily_GetFile() { if [[ -f "$file_path" ]] ; then received_file_size=$(stat --format="%s" "$file_path" | tr -d '[:space:]') fi - if CheckNoHtml "$remote_url" "$filename" "$file_path" ; then + if CheckNoHtml "$remote_url" "$filename" "$file_path" "$((received_file_size - pd_presize))" ; then containsHtml=false else containsHtml=true diff --git a/hosts/dashfile.sh b/hosts/dashfile.sh index ecd4a49..4cb34b0 100644 --- a/hosts/dashfile.sh +++ b/hosts/dashfile.sh @@ -541,7 +541,7 @@ dash_GetFile() { if [[ -f "$file_path" ]] ; then received_file_size=$(stat --format="%s" "$file_path" | tr -d '[:space:]') fi - if CheckNoHtml "$remote_url" "$filename" "$file_path" ; then + if CheckNoHtml "$remote_url" "$filename" "$file_path" "$((received_file_size - pd_presize))" ; then containsHtml=false else containsHtml=true diff --git a/hosts/dataupload.sh b/hosts/dataupload.sh index 27e8544..2489ea2 100644 --- a/hosts/dataupload.sh +++ b/hosts/dataupload.sh @@ -399,7 +399,7 @@ dup_GetFile() { if [[ -f "$file_path" ]] ; then received_file_size=$(stat --format="%s" "$file_path" | tr -d '[:space:]') fi - if CheckNoHtml "$remote_url" "$filename" "$file_path" ; then + if CheckNoHtml "$remote_url" "$filename" "$file_path" "$((received_file_size - pd_presize))" ; then containsHtml=false else containsHtml=true diff --git a/hosts/desiupload.sh b/hosts/desiupload.sh index 21c36fb..b2686f8 100644 --- a/hosts/desiupload.sh +++ b/hosts/desiupload.sh @@ -450,7 +450,7 @@ desi_GetFile() { if [[ -f "$file_path" ]] ; then received_file_size=$(stat --format="%s" "$file_path" | tr -d '[:space:]') fi - if CheckNoHtml "$remote_url" "$filename" "$file_path" ; then + if CheckNoHtml "$remote_url" "$filename" "$file_path" "$((received_file_size - pd_presize))" ; then containsHtml=false else containsHtml=true diff --git a/hosts/dosya.sh b/hosts/dosya.sh index 66d4103..5557f75 100644 --- a/hosts/dosya.sh +++ b/hosts/dosya.sh @@ -433,7 +433,7 @@ dosya_GetFile() { if [[ -f "$file_path" ]] ; then received_file_size=$(stat --format="%s" "$file_path" | tr -d '[:space:]') fi - if CheckNoHtml "$remote_url" "$filename" "$file_path" ; then + if CheckNoHtml "$remote_url" "$filename" "$file_path" "$((received_file_size - pd_presize))" ; then containsHtml=false else containsHtml=true diff --git a/hosts/downloadgg.sh b/hosts/downloadgg.sh index f078982..e11958e 100644 --- a/hosts/downloadgg.sh +++ b/hosts/downloadgg.sh @@ -315,7 +315,7 @@ dgg_GetFile() { if [[ -f "$file_path" ]] ; then received_file_size=$(stat --format="%s" "$file_path" | tr -d '[:space:]') fi - if CheckNoHtml "$remote_url" "$filename" "$file_path" ; then + if CheckNoHtml "$remote_url" "$filename" "$file_path" "$((received_file_size - pd_presize))" ; then containsHtml=false else containsHtml=true diff --git a/hosts/examples/ExampleNewHost.sh b/hosts/examples/ExampleNewHost.sh old mode 100644 new mode 100755 index 15facd9..a8f5a28 --- a/hosts/examples/ExampleNewHost.sh +++ b/hosts/examples/ExampleNewHost.sh @@ -204,7 +204,7 @@ fh_GetFile() { if [[ -f "$file_path" ]] ; then received_file_size=$(stat --format="%s" "$file_path" | tr -d '[:space:]') fi - if CheckNoHtml "$remote_url" "$filename" "$file_path" ; then + if CheckNoHtml "$remote_url" "$filename" "$file_path" "$((received_file_size - pd_presize))" ; then containsHtml=false else containsHtml=true diff --git a/hosts/examples/up_example.sh b/hosts/examples/up_example.sh old mode 100644 new mode 100755 diff --git a/hosts/fileblade.sh b/hosts/fileblade.sh index 525a517..5e9497c 100644 --- a/hosts/fileblade.sh +++ b/hosts/fileblade.sh @@ -492,7 +492,7 @@ fb_GetFile() { if [[ -f "$file_path" ]] ; then received_file_size=$(stat --format="%s" "$file_path" | tr -d '[:space:]') fi - if CheckNoHtml "$remote_url" "$filename" "$file_path" ; then + if CheckNoHtml "$remote_url" "$filename" "$file_path" "$((received_file_size - pd_presize))" ; then containsHtml=false else containsHtml=true diff --git a/hosts/fileditch.sh b/hosts/fileditch.sh index 309da9d..020cfa1 100644 --- a/hosts/fileditch.sh +++ b/hosts/fileditch.sh @@ -192,7 +192,7 @@ fd_GetFile() { if [[ -f "$file_path" ]] ; then received_file_size=$(stat --format="%s" "$file_path" | tr -d '[:space:]') fi - if CheckNoHtml "$remote_url" "$filename" "$file_path" ; then + if CheckNoHtml "$remote_url" "$filename" "$file_path" "$((received_file_size - pd_presize))" ; then containsHtml=false else containsHtml=true diff --git a/hosts/filedot.sh b/hosts/filedot.sh index 335df2c..253c33f 100644 --- a/hosts/filedot.sh +++ b/hosts/filedot.sh @@ -504,7 +504,7 @@ fdot_GetFile() { if [[ -f "$file_path" ]] ; then received_file_size=$(stat --format="%s" "$file_path" | tr -d '[:space:]') fi - if CheckNoHtml "$remote_url" "$filename" "$file_path" ; then + if CheckNoHtml "$remote_url" "$filename" "$file_path" "$((received_file_size - pd_presize))" ; then containsHtml=false else containsHtml=true diff --git a/hosts/filehaus.sh b/hosts/filehaus.sh index 80a2788..4ee80fc 100644 --- a/hosts/filehaus.sh +++ b/hosts/filehaus.sh @@ -196,7 +196,7 @@ fh_GetFile() { if [[ -f "$file_path" ]] ; then received_file_size=$(stat --format="%s" "$file_path" | tr -d '[:space:]') fi - if CheckNoHtml "$remote_url" "$filename" "$file_path" ; then + if CheckNoHtml "$remote_url" "$filename" "$file_path" "$((received_file_size - pd_presize))" ; then containsHtml=false else containsHtml=true diff --git a/hosts/firestorage.sh b/hosts/firestorage.sh index c22a386..33e3534 100644 --- a/hosts/firestorage.sh +++ b/hosts/firestorage.sh @@ -340,7 +340,7 @@ fs_GetFile() { if [[ -f "$file_path" ]] ; then received_file_size=$(stat --format="%s" "$file_path" | tr -d '[:space:]') fi - if CheckNoHtml "$remote_url" "$filename" "$file_path" ; then + if CheckNoHtml "$remote_url" "$filename" "$file_path" "$((received_file_size - pd_presize))" ; then containsHtml=false else containsHtml=true diff --git a/hosts/gofile.sh b/hosts/gofile.sh index 41df85a..37f56da 100644 --- a/hosts/gofile.sh +++ b/hosts/gofile.sh @@ -422,7 +422,7 @@ gofile_GetFile() { if [[ -f "$file_path" ]] ; then received_file_size=$(stat --format="%s" "$file_path" | tr -d '[:space:]') fi - if CheckNoHtml "$remote_url" "$filename" "$file_path" ; then + if CheckNoHtml "$remote_url" "$filename" "$file_path" "$((received_file_size - pd_presize))" ; then containsHtml=false else containsHtml=true diff --git a/hosts/hexload.sh b/hosts/hexload.sh index c32e8fa..a8e967e 100644 --- a/hosts/hexload.sh +++ b/hosts/hexload.sh @@ -326,7 +326,7 @@ hex_GetFile() { if [[ -f "$file_path" ]] ; then received_file_size=$(stat --format="%s" "$file_path" | tr -d '[:space:]') fi - if CheckNoHtml "$remote_url" "$filename" "$file_path" ; then + if CheckNoHtml "$remote_url" "$filename" "$file_path" "$((received_file_size - pd_presize))" ; then containsHtml=false else containsHtml=true diff --git a/hosts/innocent.sh b/hosts/innocent.sh index 6156065..ad0aac7 100644 --- a/hosts/innocent.sh +++ b/hosts/innocent.sh @@ -217,7 +217,7 @@ inno_GetFile() { if [[ -f "$file_path" ]] ; then received_file_size=$(stat --format="%s" "$file_path" | tr -d '[:space:]') fi - if CheckNoHtml "$remote_url" "$filename" "$file_path" ; then + if CheckNoHtml "$remote_url" "$filename" "$file_path" "$((received_file_size - pd_presize))" ; then containsHtml=false else containsHtml=true diff --git a/hosts/isupload.sh b/hosts/isupload.sh index 146f4e7..68d2069 100644 --- a/hosts/isupload.sh +++ b/hosts/isupload.sh @@ -373,7 +373,11 @@ isup_GetFile() { continue fi fi - if CheckNoHtml "$remote_url" "$filename" "$file_path" ; then + received_file_size=0 + if [[ -f "$file_path" ]] ; then + received_file_size=$(stat --format="%s" "$file_path" | tr -d '[:space:]') + fi + if CheckNoHtml "$remote_url" "$filename" "$file_path" "$((received_file_size - pd_presize))" ; then containsHtml=false else containsHtml=true @@ -404,7 +408,7 @@ isup_GetFile() { if [[ -f "$file_path" ]] ; then received_file_size=$(stat --format="%s" "$file_path" | tr -d '[:space:]') fi - if CheckNoHtml "$remote_url" "$filename" "$file_path" ; then + if CheckNoHtml "$remote_url" "$filename" "$file_path" "$((received_file_size - pd_presize))" ; then containsHtml=false else containsHtml=true diff --git a/hosts/kraken.sh b/hosts/kraken.sh index 4e56cf9..69635bb 100644 --- a/hosts/kraken.sh +++ b/hosts/kraken.sh @@ -291,7 +291,7 @@ kraken_GetFile() { if [[ -f "$file_path" ]] ; then received_file_size=$(stat --format="%s" "$file_path" | tr -d '[:space:]') fi - if CheckNoHtml "$remote_url" "$filename" "$file_path" ; then + if CheckNoHtml "$remote_url" "$filename" "$file_path" "$((received_file_size - pd_presize))" ; then containsHtml=false else containsHtml=true diff --git a/hosts/mediafire.sh b/hosts/mediafire.sh index ed86788..2798a34 100644 --- a/hosts/mediafire.sh +++ b/hosts/mediafire.sh @@ -282,7 +282,7 @@ mfire_GetFile() { if [[ -f "$file_path" ]] ; then received_file_size=$(stat --format="%s" "$file_path" | tr -d '[:space:]') fi - if CheckNoHtml "$remote_url" "$filename" "$file_path" ; then + if CheckNoHtml "$remote_url" "$filename" "$file_path" "$((received_file_size - pd_presize))" ; then containsHtml=false else containsHtml=true diff --git a/hosts/nippy.sh b/hosts/nippy.sh index f40f3a7..c3f8784 100644 --- a/hosts/nippy.sh +++ b/hosts/nippy.sh @@ -305,7 +305,7 @@ nippy_GetFile() { if [[ -f "$file_path" ]] ; then received_file_size=$(stat --format="%s" "$file_path" | tr -d '[:space:]') fi - if CheckNoHtml "$remote_url" "$filename" "$file_path" ; then + if CheckNoHtml "$remote_url" "$filename" "$file_path" "$((received_file_size - pd_presize))" ; then containsHtml=false else containsHtml=true diff --git a/hosts/oshi.sh b/hosts/oshi.sh index 0b06430..e48367f 100644 --- a/hosts/oshi.sh +++ b/hosts/oshi.sh @@ -1,6 +1,6 @@ #! Name: oshi.sh #! Author: kittykat -#! Version: 2025.02.17 +#! Version: 2025.02.21 #! Desc: Add support for downloading and processing of urls for a new host #! Usage: Copy this file into the ./${ScriptDir}/hosts/ folder #! @@ -96,11 +96,10 @@ oshi_FetchFileInfo() { elif [[ "${OshiBaseUrlOverride}" == "oshionion" ]]; then download_url=${remote_url//oshi\.at/5ety7tpkim5me6eszuwcje7bmy25pbtrjtue7zkqqgziljwqy3rrikqd\.onion} fi - if ! grep -Eqi '/nossl/' <<< "$download_url"; then - download_url=${download_url//oshi\.at/oshi\.at\/nossl} + if ! grep -Eqi '/nossl/' <<< "$download_url" && grep -Eqi '5ety7tpkim5me6eszuwcje7bmy25pbtrjtue7zkqqgziljwqy3rrikqd/' <<< "$download_url" ; then download_url=${download_url//5ety7tpkim5me6eszuwcje7bmy25pbtrjtue7zkqqgziljwqy3rrikqd\.onion/5ety7tpkim5me6eszuwcje7bmy25pbtrjtue7zkqqgziljwqy3rrikqdi\.onion\/nossl} fi - if grep -Eqi '^https' <<< "$download_url"; then + if grep -Eqi '^https' <<< "$download_url" && grep -Eqi '5ety7tpkim5me6eszuwcje7bmy25pbtrjtue7zkqqgziljwqy3rrikqd/' <<< "$download_url" ; then download_url=${download_url//https:/http:} fi download_url=$(urlencode_literal_grouped_case_urlendingonly "$download_url") @@ -199,15 +198,15 @@ oshi_GetFile() { CLEANSTRING=${remote_url//[^a-zA-Z0-9]/} trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; rm -f $flockDownload; echo ""; tput cnorm; exit" 0 1 2 3 6 15 if [[ "${RateMonitorEnabled}" == "true" ]]; then - tor_curl_request --insecure --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval --referer "$file_url" "$download_url" --continue-at - --output "$file_path" + tor_curl_request --insecure -L --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval --referer "$file_url" "$download_url" --continue-at - --output "$file_path" else - tor_curl_request --insecure --referer "$file_url" "$download_url" --continue-at - --output "$file_path" + tor_curl_request --insecure -L --referer "$file_url" "$download_url" --continue-at - --output "$file_path" fi received_file_size=0 if [[ -f "$file_path" ]] ; then received_file_size=$(stat --format="%s" "$file_path" | tr -d '[:space:]') fi - if CheckNoHtml "$remote_url" "$filename" "$file_path" ; then + if CheckNoHtml "$remote_url" "$filename" "$file_path" "$((received_file_size - pd_presize))" ; then containsHtml=false else containsHtml=true diff --git a/hosts/pixeldrain.sh b/hosts/pixeldrain.sh index 178dd73..aae27eb 100644 --- a/hosts/pixeldrain.sh +++ b/hosts/pixeldrain.sh @@ -1,6 +1,6 @@ #! Name: pixeldrain.sh #! Author: kittykat -#! Version: 2024.09.13 +#! Version: 2025.02.24 #! Desc: Add support for downloading and processing of urls for a new host #! Usage: Copy this file into the ./${ScriptDir}/hosts/ folder #! @@ -194,13 +194,16 @@ pd_FetchFileInfo() { return 1 fi elif grep -i -Eq "ip_download_limited_captcha_required" <<< "$response"; then - if ((i > 1)) ; then + if ((i >= 5)) ; then printf "\\n" + echo -e "${RED}| Failed: The file is IP limited… (used max bandwidth/48hr, try again later)${NC}" + exitDownloadNotAvailable=true + failedRetryDownload "${remote_url}" "Captcha IP Limited (used max bandwidth/48hr, try again later)" "" + return 1 + else + printf " ." + continue fi - echo -e "${RED}| Failed: The file is IP limited… (used max bandwidth/48hr, try again later)${NC}" - exitDownloadNotAvailable=true - failedRetryDownload "${remote_url}" "Captcha IP Limited (used max bandwidth/48hr, try again later)" "" - return 1 elif ! grep -q -Eqi '"availability":""' <<< "$response"; then pd_message=$(grep -o -P '(?<="availability":").+?(?=")' <<< "$response") if ((i > 1)) ; then @@ -352,7 +355,7 @@ pd_GetFile() { if [[ -f "$file_path" ]] ; then received_file_size=$(stat --format="%s" "$file_path" | tr -d '[:space:]') fi - if CheckNoHtml "$remote_url" "$filename" "$file_path" ; then + if CheckNoHtml "$remote_url" "$filename" "$file_path" "$((received_file_size - pd_presize))" ; then containsHtml=false else containsHtml=true diff --git a/hosts/quax.sh b/hosts/quax.sh index cfa183b..a769a07 100644 --- a/hosts/quax.sh +++ b/hosts/quax.sh @@ -181,7 +181,7 @@ qx_GetFile() { if [[ -f "$file_path" ]] ; then received_file_size=$(stat --format="%s" "$file_path" | tr -d '[:space:]') fi - if CheckNoHtml "$remote_url" "$filename" "$file_path" ; then + if CheckNoHtml "$remote_url" "$filename" "$file_path" "$((received_file_size - pd_presize))" ; then containsHtml=false else containsHtml=true diff --git a/hosts/ranoz.sh b/hosts/ranoz.sh index 7aa4d91..a70c869 100644 --- a/hosts/ranoz.sh +++ b/hosts/ranoz.sh @@ -312,7 +312,7 @@ rz_GetFile() { if [[ -f "$file_path" ]] ; then received_file_size=$(stat --format="%s" "$file_path" | tr -d '[:space:]') fi - if CheckNoHtml "$remote_url" "$filename" "$file_path" ; then + if CheckNoHtml "$remote_url" "$filename" "$file_path" "$((received_file_size - pd_presize))" ; then containsHtml=false else containsHtml=true diff --git a/hosts/sendnow.sh b/hosts/sendnow.sh index b153cac..a8ae95c 100644 --- a/hosts/sendnow.sh +++ b/hosts/sendnow.sh @@ -405,7 +405,7 @@ snow_GetFile() { if [[ -f "$file_path" ]] ; then received_file_size=$(stat --format="%s" "$file_path" | tr -d '[:space:]') fi - if CheckNoHtml "$remote_url" "$filename" "$file_path" ; then + if CheckNoHtml "$remote_url" "$filename" "$file_path" "$((received_file_size - pd_presize))" ; then containsHtml=false else containsHtml=true diff --git a/hosts/sendspace.sh b/hosts/sendspace.sh new file mode 100755 index 0000000..cf77eb6 --- /dev/null +++ b/hosts/sendspace.sh @@ -0,0 +1,401 @@ +#! Name: sendspace.sh +#! Author: kittykat +#! Version: 2025.02.21 +#! Desc: Add support for downloading and processing of urls for a new host +#! Usage: Copy this file into the ./${ScriptDir}/hosts/ folder +#! +#! +#! ------------ REQUIRED SECTION --------------- +#! @[UPDATE] HostAndDomainRegexes: This string is loaded into mad.sh and allows dynamic handling of new url data +#! Format: '/HostCode/HostNick/HostFuncPrefix:HostDomainRegex@' +#! HostCode: (ie. 'fh' for filehaus -- cannot be used by other hosts) +#! HostNick: What is displayed throughout MAD output (ie. 'filehaus' -- "urls.txt has 10 filehaus.." will be displayed) +#! HostFuncPrefix: (ie. 'fh' -- fh_DownloadFile(), fh_FetchFileInfo() .. ) +#! * Note: Must begin with a letter a-z (functions beginning with numbers are no bueno) +#! HostDomainRegex: The regex used to verify matching urls +HostCode='ss' +HostNick='sendspace' +HostFuncPrefix='ss' +HostUrls='sendspace.com' +HostDomainRegex='^(http|https)://(.*\.)?sendspace\.com/file/' +#! +#! !! DO NOT UPDATE OR REMOVE !! +#! This merges the Required HostAndDomainRegexes into mad.sh +ListHostAndDomainRegexes=${ListHostAndDomainRegexes}'/'${HostCode}'/'${HostNick}'/'${HostFuncPrefix}'/'${HostUrls}':'${HostDomainRegex}'@' +#! +#! +#! ------------ (1) Host Main Download Function --------------- # +#! +#! @REQUIRED: Host Main Download function +#! Must be named specifically as such: +#! _DownloadFile() +ss_DownloadFile() { + local remote_url=${1} + local file_url=${1} + local filecnt=${2} + warnAndRetryUnknownError=false + exitDownloadError=false + exitDownloadNotAvailable=false + fileAlreadyDone=false + download_inflight_path="${WorkDir}/.inflight/" + mkdir -p "$download_inflight_path" + completed_location="${WorkDir}/downloads/" + tor_identity="${RANDOM}" + finalAttempt="false" + for ((z=0; z<=$MaxUrlRetries; z++)); do + if [[ $z -eq $MaxUrlRetries ]] ; then + finalAttempt="true" + fi + CLEANSTRING=${remote_url//[^a-zA-Z0-9]/} + trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; echo ""; tput cnorm; exit" 0 1 2 3 6 15 + if ss_FetchFileInfo $finalAttempt && ss_GetFile "${filecnt}" $((z+1)) $finalAttempt ; then + return 0 + elif [[ $z -lt $MaxUrlRetries ]]; then + if [[ "${fileAlreadyDone}" == "true" ]] ; then + break + fi + if [[ "${warnAndRetryUnknownError}" == "true" ]] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then + debugHtml "${remote_url##*/}" "error" "Retry due to an unknown issue: attempt #$((z+1)) of ${MaxUrlRetries}" + fi + fi + if [[ "${exitDownloadError}" == "true" || "${exitDownloadNotAvailable}" == "true" ]] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then + debugHtml "${remote_url##*/}" "error" "Exit due to unrecoverable issue" + fi + rm -f "${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}" + break + fi + echo -e "\n${YELLOW}A recoverable error occurred, retry attempt $((z+1))/${MaxUrlRetries}${NC}" + sleep 3 + fi + done + rm -f "${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}" +} +#! +#! ------------- (2) Fetch File Info Function ----------------- # +#! +ss_FetchFileInfo() { + finalAttempt=$1 + maxfetchretries=5 + ss_cookie_jar="" + echo -e "${GREEN}# Fetching download link…${NC}" + for ((i=1; i<=$maxfetchretries; i++)); do + mkdir -p "${WorkDir}/.temp" + ss_cookie_jar=$(mktemp "${WorkDir}/.temp/ss_cookies""${instance_no}"".XXXXXX") + printf " ." + tor_identity="${RANDOM}" + CLEANSTRING=${remote_url//[^a-zA-Z0-9]/} + trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; rm -f "${ss_cookie_jar}"; echo ""; tput cnorm; exit" 0 1 2 3 6 15 + response=$(tor_curl_request --insecure -L -s -b "${ss_cookie_jar}" -c "${ss_cookie_jar}" "$remote_url") + if [[ "${DebugAllEnabled}" == "true" ]] ; then + debugHtml "${remote_url##*/}" "ss_dwnpage$i" "${response}" + fi + if [[ -z $response ]] ; then + rm -f "${ss_cookie_jar}"; + if [[ $i == $maxfetchretries ]] ; then + printf "\\n" + echo -e "${RED}| Failed to extract download link.${NC}" + warnAndRetryUnknownError=true + if [[ "${finalAttempt}" == "true" ]] ; then + failedRetryDownload "${remote_url}" "" "" + fi + return 1 + else + continue + fi + fi + if grep -Eqi "banned your IP|IP has been banned|you are banned" <<< "$response"; then + rm -f "${ss_cookie_jar}"; + if [[ $i == $maxfetchretries ]] ; then + printf "\\n" + echo -e "${RED}| Blocked ip${NC}" + warnAndRetryUnknownError=true + if [[ "${finalAttempt}" == "true" ]] ; then + failedRetryDownload "${remote_url}" "Blocked ip" "" + fi + return 1 + else + continue + fi + fi + if grep -Eqi "no such file|File was deleted|File not found" <<< "$response"; then + rm -f "${ss_cookie_jar}"; + printf "\\n" + echo -e "${RED}| The file was not found. It could be deleted or expired.${NC}" + exitDownloadError=true + removedDownload "${remote_url}" + return 1 + fi + if grep -Eqi 'class="download_page_button button1" href="https://' <<< "$response"; then + download_url=$(grep -oPi '(?<=class="download_page_button button1" href=").*?(?=" onclick=".*$)' <<< "$response") + filename="${download_url##*/}" + filename=${filename//[$'\t\r\n']} + download_url=$(urlencode_literal_grouped_case_urlendingonly ${download_url}) + printf "\\n" + break + else + rm -f "${ss_cookie_jar}"; + printf "\\n" + echo -e "${RED}| Failed to find download link [1]${NC}" + exitDownloadError=true + failedRetryDownload "${remote_url}" "Failed to find download link [1]" "" + return 1 + fi + done + echo -e "${GREEN}# Fetching file info…${NC}" + filename="" + file_size_bytes="" + if [[ ! "$filename_override" == "" ]] ; then + filename="$filename_override" + fi + maxfetchretries=2 + for ((j=1; j<=maxfetchretries; j++)); do + printf " ." + CLEANSTRING=${remote_url//[^a-zA-Z0-9]/} + trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; rm -f ${ss_cookie_jar}; echo ""; tput cnorm; exit" 0 1 2 3 6 15 + file_header=$(tor_curl_request --insecure -L --head -s \ + -b "${ss_cookie_jar}" -c "${ss_cookie_jar}" \ + "$download_url" | tr -d '\0') + if [[ "${DebugAllEnabled}" == "true" ]] ; then + debugHtml "${remote_url##*/}" "ss_head$j" "FileInfoUrl: ${download_url}"$'\n'"${file_header}" + fi + if [[ -z $file_header ]] ; then + if ((j == maxfetchretries)) ; then + rm -f "${ss_cookie_jar}"; + printf "\\n" + echo -e "${RED}| Failed to extract file info [1]${NC}" + warnAndRetryUnknownError=true + if [[ "${finalAttempt}" == "true" ]] ; then + failedRetryDownload "${remote_url}" "Failed to extract file info [1]" "" + fi + return 1 + else + continue + fi + fi + if ! grep -Eqi 'HTTP.* 200' <<< $file_header ; then + if ((j == maxfetchretries)) ; then + rm -f "${ss_cookie_jar}"; + printf "\\n" + echo -e "${RED}| Failed to extract file info [2]${NC}" + warnAndRetryUnknownError=true + if [[ "${finalAttempt}" == "true" ]] ; then + failedRetryDownload "${remote_url}" "Failed to extract file info [2]" "" + fi + return 1 + else + continue + fi + fi + if [[ -z $filename ]]; then + filename=$(grep -oPi '(?<=filename=").*(?=")' <<< "$file_header") + if [[ -z $filename ]]; then + filename=$(grep -oPi '(?<=filename[*]=).*' <<< "$file_header") + filename=${filename//[$'\t\r\n']} + fi + fi + if [[ -z $file_size_bytes ]] ; then + file_size_bytes=$(grep -oPi '(?<=content-length: ).*' <<< "$file_header") + file_size_bytes=${file_size_bytes//[$'\t\r\n']} + fi + if [[ -z $filename ]] || [[ -z $file_size_bytes ]] ; then + if ((j == maxfetchretries)) ; then + rm -f "${ss_cookie_jar}"; + printf "\\n" + echo -e "${RED}| Failed to extract file info [3]${NC}" + warnAndRetryUnknownError=true + if [[ "${finalAttempt}" == "true" ]] ; then + failedRetryDownload "${remote_url}" "Failed to extract file info [3]" "" + fi + return 1 + else + continue + fi + fi + break #Good to go here + done + rm -f "${ss_cookie_jar}"; + if [[ -z $filename ]] || [[ -z $file_size_bytes ]] ; then + printf "\\n" + echo -e "${RED}| Failed to extract file info [3]${NC}" + warnAndRetryUnknownError=true + if [[ "${finalAttempt}" == "true" ]] ; then + failedRetryDownload "${remote_url}" "Failed to extract file info [3]" "" + fi + return 1 + fi + touch "${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}" + if [[ ! "$filename_override" == "" ]] ; then + filename="$filename_override" + fi + filename=$(sanitize_file_or_folder_name "${filename}") + printf "\\n" + echo -e "${YELLOW}| File name:${NC}\t\"${filename}\"" + if [[ -z $file_size_bytes ]] ; then + if [[ "${finalAttempt}" == "true" ]] ; then + failedRetryDownload "${remote_url}" "Filesize not found!" "" + fi + echo -e "${YELLOW}| Filesize not found… retry${NC}" + return 1 + else + file_size_readable="$(numfmt --to=iec --from=auto --format "%.2f" <<< "$file_size_bytes")" + fi + echo -e "${YELLOW}| File size:${NC}\t${file_size_readable}" + file_path="${download_inflight_path}${filename}" + flockDownload="${WorkDir}/.flocks/${filename//[^a-zA-Z0-9\.\_\-]/}.flock" + if CheckFileSize "${remote_url}" "${file_size_bytes}" ; then + return 1 + fi + if CheckDownloadExists "$remote_url" "$MoveToFolder" "$filecnt" "$filename" "$file_path" "$completed_location" ; then + return 1 + fi + echo "${remote_url//[^a-zA-Z0-9]/}" > $flockDownload +} +#! +#! ----------- (3) Fetch File / Download File Function --------------- # +#! +ss_GetFile() { + echo -e "${GREEN}# Downloading…${NC}" + echo -e "${YELLOW}| File path:${NC}\t./.inflight/${filename}\n" + fileCnt=$1 + retryCnt=$2 + finalAttempt=$3 + flockDownload="${WorkDir}/.flocks/${filename//[^a-zA-Z0-9\.\_\-]/}.flock" + for ((j=1; j<=$MaxDownloadRetries; j++)); do + pd_presize=0 + if [[ -f "$file_path" ]] ; then + pd_presize=$(stat --format="%s" "$file_path" | tr -d '[:space:]') + fi + GetRandomUA + CLEANSTRING=${remote_url//[^a-zA-Z0-9]/} + trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; rm -f "${ss_cookie_jar}"; rm -f $flockDownload; echo ""; tput cnorm; exit" 0 1 2 3 6 15 + if [[ "${UseTorCurlImpersonate}" == "true" ]]; then + if [[ "${RateMonitorEnabled}" == "true" ]]; then + tor_curl_request --insecure --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval "$download_url" --continue-at - --output "$file_path" + else + tor_curl_request --insecure "$download_url" --continue-at - --output "$file_path" + fi + else + if [[ "${RateMonitorEnabled}" == "true" ]]; then + tor_curl_request --insecure --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval \ + -H "User-Agent: $RandomUA" \ + -H "Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8" \ + -H "Accept-Language: en-US,en;q=0.5" \ + -H "Accept-Encoding: gzip, deflate, br" \ + -H "Connection: keep-alive" \ + -H "Cookie: lng=eng" \ + -H "Upgrade-Insecure-Requests: 1" \ + -H "Sec-Fetch-Dest: document" \ + -H "Sec-Fetch-Mode: navigate" \ + -H "Sec-Fetch-Site: same-origin" \ + -H "Sec-Fetch-User: ?1" \ + --referer "$remote_url" \ + --continue-at - --output "$file_path" + else + tor_curl_request --insecure \ + -H "User-Agent: $RandomUA" \ + -H "Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8" \ + -H "Accept-Language: en-US,en;q=0.5" \ + -H "Accept-Encoding: gzip, deflate, br" \ + -H "Connection: keep-alive" \ + -H "Cookie: lng=eng" \ + -H "Upgrade-Insecure-Requests: 1" \ + -H "Sec-Fetch-Dest: document" \ + -H "Sec-Fetch-Mode: navigate" \ + -H "Sec-Fetch-Site: same-origin" \ + -H "Sec-Fetch-User: ?1" \ + --referer "$remote_url" \ + --continue-at - --output "$file_path" + fi + fi + received_file_size=0 + if [[ -f "$file_path" ]] ; then + received_file_size=$(stat --format="%s" "$file_path" | tr -d '[:space:]') + fi + if CheckNoHtml "$remote_url" "$filename" "$file_path" "$((received_file_size - pd_presize))" ; then + containsHtml=false + else + containsHtml=true + fi + downDelta=$(( received_file_size - pd_presize )) + if [[ "${received_file_size}" -ne "${file_size_bytes}" ]] || [[ "$containsHtml" == "true" ]]; then + if [[ "${AutoRepairBadPartials}" == "true" ]] && (( downDelta > 0 && downDelta < 1024 )) ; then + if [[ -f "${file_path}" ]] ; then + if ((pd_presize > 0)); then + echo -e "${YELLOW}Bad node / HTML found:${NC} reverting to previous file..." + truncateDownload "$remote_url" "$filename" "$pd_presize" "$received_file_size" + truncate -s $pd_presize "${file_path}" + else + echo -e "${YELLOW}Bad node / HTML found:${NC} tainted partial removed..." + rm -f "${file_path}" + fi + fi + if ((j >= $MaxDownloadRetries)) ; then + rm -f "$flockDownload"; + if [[ "${finalAttempt}" == "true" ]] ; then + droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" + fi + return 1 + else + continue + fi + elif [[ "${AutoRepairBadPartials}" == "true" ]] && [[ "$containsHtml" == "true" ]] ; then + if [[ -f "${file_path}" ]] ; then + if ((pd_presize > 0)); then + echo -e "${YELLOW}Bad node / HTML found:${NC} reverting to previous file..." + truncateDownload "$remote_url" "$filename" "$pd_presize" "$received_file_size" + truncate -s $pd_presize "${file_path}" + else + echo -e "${YELLOW}Bad node / HTML found:${NC} tainted partial removed..." + rm -f "${file_path}" + fi + fi + if ((j >= $MaxDownloadRetries)) ; then + rm -f "$flockDownload"; + if [[ "${finalAttempt}" == "true" ]] ; then + droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" + fi + return 1 + else + continue + fi + elif (( downDelta > 0 && downDelta < 1024 )) || [[ "$containsHtml" == "true" ]] ; then + if [[ -f "$file_path" ]] ; then + rm -rf "$file_path" + fi + echo -e "\n${YELLOW}Bad node / HTML found:${NC} tainted partial removed..." + if ((j >= $MaxDownloadRetries)) ; then + rm -f "$flockDownload"; + if [[ "${finalAttempt}" == "true" ]] ; then + droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" + fi + return 1 + else + continue + fi + fi + if [[ "${received_file_size}" -ne "${file_size_bytes}" ]]; then + echo -e "\n${RED}Download failed, file is incomplete.${NC}" + if ((j >= $MaxDownloadRetries)) ; then + rm -f "$flockDownload"; + if [[ "${finalAttempt}" == "true" ]] ; then + droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" + fi + return 1 + else + continue + fi + fi + else + break + fi + done + rm -f "$flockDownload"; + rm -f "${ss_cookie_jar}"; + ProcessCompletedDownload "$remote_url" "$MoveToFolder" "$filecnt" "$filename" "$file_size_bytes" "$completed_location" "$file_path" + return 0 +} +#! +#! --------------- Host Extra Functions ------------------- # +#! diff --git a/hosts/syspro.sh b/hosts/syspro.sh index ea852fe..c9b7fc3 100644 --- a/hosts/syspro.sh +++ b/hosts/syspro.sh @@ -208,7 +208,7 @@ sysp_GetFile() { --output "$file_path" --output "$file_path" fi fi - if CheckNoHtml "$remote_url" "$filename" "$file_path" ; then + if CheckNoHtml "$remote_url" "$filename" "$file_path" "$((received_file_size - pd_presize))" ; then containsHtml=false else containsHtml=true diff --git a/hosts/tempfileme.sh b/hosts/tempfileme.sh index 7606dd0..f5899cb 100644 --- a/hosts/tempfileme.sh +++ b/hosts/tempfileme.sh @@ -342,7 +342,7 @@ tmpme_GetFile() { if [[ -f "$file_path" ]] ; then received_file_size=$(stat --format="%s" "$file_path" | tr -d '[:space:]') fi - if CheckNoHtml "$remote_url" "$filename" "$file_path" ; then + if CheckNoHtml "$remote_url" "$filename" "$file_path" "$((received_file_size - pd_presize))" ; then containsHtml=false else containsHtml=true diff --git a/hosts/tempsh.sh b/hosts/tempsh.sh index 51bce81..1bfb999 100644 --- a/hosts/tempsh.sh +++ b/hosts/tempsh.sh @@ -265,7 +265,7 @@ tmpsh_GetFile() { if [[ -f "$file_path" ]] ; then received_file_size=$(stat --format="%s" "$file_path" | tr -d '[:space:]') fi - if CheckNoHtml "$remote_url" "$filename" "$file_path" ; then + if CheckNoHtml "$remote_url" "$filename" "$file_path" "$((received_file_size - pd_presize))" ; then containsHtml=false else containsHtml=true diff --git a/hosts/torup.sh b/hosts/torup.sh index 133c11a..2318f2e 100644 --- a/hosts/torup.sh +++ b/hosts/torup.sh @@ -236,7 +236,7 @@ torp_GetFile() { if [[ -f "$file_path" ]] ; then received_file_size=$(stat --format="%s" "$file_path" | tr -d '[:space:]') fi - if CheckNoHtml "$remote_url" "$filename" "$file_path" ; then + if CheckNoHtml "$remote_url" "$filename" "$file_path" "$((received_file_size - pd_presize))" ; then containsHtml=false else containsHtml=true diff --git a/hosts/up2share.sh b/hosts/up2share.sh index a8cb364..23c6661 100644 --- a/hosts/up2share.sh +++ b/hosts/up2share.sh @@ -366,7 +366,7 @@ up2share_GetFile() { if [[ -f "$file_path" ]] ; then received_file_size=$(stat --format="%s" "$file_path" | tr -d '[:space:]') fi - if CheckNoHtml "$remote_url" "$filename" "$file_path" ; then + if CheckNoHtml "$remote_url" "$filename" "$file_path" "$((received_file_size - pd_presize))" ; then containsHtml=false else containsHtml=true diff --git a/hosts/up_oshi.sh b/hosts/up_oshi.sh index 868ab40..4b3c8d1 100644 --- a/hosts/up_oshi.sh +++ b/hosts/up_oshi.sh @@ -1,6 +1,6 @@ #! Name: up_oshi.sh #! Author: kittykat -#! Version: 2025.02.17 +#! Version: 2025.02.21 #! Desc: Add support for uploading files to a new host #! Info: Files are accessible at https://oshi.at/ #! MaxSize: 5GB @@ -101,7 +101,7 @@ oshi_PostFile() { UploadTicket="${WorkDir}/.flocks/upload_${_hostCode}_${filepath//[^a-zA-Z0-9]/}" echo -e "[${YELLOW}${_hostCode}${NC}] Uploading ${GREEN}${filename}${NC}" tor_identity="${RANDOM}" - PostUrlHost='http://oshi.at/nossl/' + PostUrlHost='http://oshi.at/' if [[ "$OshiUploadHostChoice" == "oshionion" ]]; then PostUrlHost='http://5ety7tpkim5me6eszuwcje7bmy25pbtrjtue7zkqqgziljwqy3rrikqd.onion/nossl/' fi diff --git a/hosts/up_ranoz.sh b/hosts/up_ranoz.sh index 6e5a00d..0386138 100644 --- a/hosts/up_ranoz.sh +++ b/hosts/up_ranoz.sh @@ -1,6 +1,6 @@ #! Name: up_ranoz.sh #! Author: kittykat -#! Version: 2025.02.20 +#! Version: 2025.02.24 #! Desc: Add support for uploading files to bedrive.ru #! Info: Files are accessible at https://ranoz.gg/file/ #! MaxSize: 20GB @@ -166,7 +166,7 @@ rz_PostFile() { echo -e "| Size: ${BLUE}${filesize}${NC} bytes${NC}" echo -e "| Link: ${YELLOW}${downloadLink}${NC}" if [[ "$RanozRandomizeExt" == "true" ]]; then - successUpload "$pline" "${filepath}" "${_hostCode}" "${filesize}" "${downloadLink}" "{$response}" "[rand ext, rename to $filename or use MAD v2025.02.13+]" + successUpload "$pline" "${filepath}" "${_hostCode}" "${filesize}" "${downloadLink}" "{$response}" "[rename to $filename or use MAD script]" else successUpload "$pline" "${filepath}" "${_hostCode}" "${filesize}" "${downloadLink}" "{$response}" fi diff --git a/hosts/up_sendnow.sh b/hosts/up_sendnow.sh old mode 100755 new mode 100644 index fbfacfa..701fc54 --- a/hosts/up_sendnow.sh +++ b/hosts/up_sendnow.sh @@ -94,7 +94,7 @@ snow_PostFile() { local finalAttempt=$6 local pline=${7} UploadTicket="${WorkDir}/.flocks/upload_${_hostCode}_${filepath//[^a-zA-Z0-9]/}" - echo -e "[${YELLOW}${_hostCode}${NC}] Finding good Tor node{NC}" + echo -e "[${YELLOW}${_hostCode}${NC}] Finding good Tor node${NC}" for ((i=0; i<=15; i++)); do tor_identity="${RANDOM}" trap "rm -f ${UploadTicket}; echo ""; tput cnorm; exit" 0 1 2 3 6 15 diff --git a/hosts/up_uploadhive.sh b/hosts/up_uploadhive.sh index b7e25f0..29d0b26 100644 --- a/hosts/up_uploadhive.sh +++ b/hosts/up_uploadhive.sh @@ -1,6 +1,6 @@ #! Name: up_uploadhive.sh #! Author: kittykat -#! Version: 2025.02.20 +#! Version: 2025.02.24 #! Desc: Add support for uploading files to uploadhive.com #! Info: Files are accessible at https://uploadhive.com/ #! MaxSize: 5GB @@ -154,7 +154,7 @@ uhive_PostFile() { echo -e "| Size: ${BLUE}${filesize}${NC} bytes${NC}" echo -e "| Link: ${YELLOW}${downloadLink}${NC}" if [[ "$UploadHiveRandomizeExt" == "true" ]]; then - successUpload "$pline" "${filepath}" "${_hostCode}" "${filesize}" "${downloadLink}" "{$response}" "[rand ext, rename to $filename or use MAD v2025.02.13+]" + successUpload "$pline" "${filepath}" "${_hostCode}" "${filesize}" "${downloadLink}" "{$response}" "[rename to $filename or use MAD script]" else successUpload "$pline" "${filepath}" "${_hostCode}" "${filesize}" "${downloadLink}" "{$response}" fi diff --git a/hosts/uploadee.sh b/hosts/uploadee.sh index 65dfb35..d1bf791 100644 --- a/hosts/uploadee.sh +++ b/hosts/uploadee.sh @@ -301,7 +301,7 @@ upee_GetFile() { if [[ -f "$file_path" ]] ; then received_file_size=$(stat --format="%s" "$file_path" | tr -d '[:space:]') fi - if CheckNoHtml "$remote_url" "$filename" "$file_path" ; then + if CheckNoHtml "$remote_url" "$filename" "$file_path" "$((received_file_size - pd_presize))" ; then containsHtml=false else containsHtml=true diff --git a/hosts/uploadev.sh b/hosts/uploadev.sh index b292bdd..b76ef68 100644 --- a/hosts/uploadev.sh +++ b/hosts/uploadev.sh @@ -413,7 +413,7 @@ upev_GetFile() { if [[ -f "$file_path" ]] ; then received_file_size=$(stat --format="%s" "$file_path" | tr -d '[:space:]') fi - if CheckNoHtml "$remote_url" "$filename" "$file_path" ; then + if CheckNoHtml "$remote_url" "$filename" "$file_path" "$((received_file_size - pd_presize))" ; then containsHtml=false else containsHtml=true diff --git a/hosts/uploadflix.sh b/hosts/uploadflix.sh index 33a798a..bb7ba9a 100644 --- a/hosts/uploadflix.sh +++ b/hosts/uploadflix.sh @@ -291,7 +291,7 @@ uflix_GetFile() { if [[ -f "$file_path" ]] ; then received_file_size=$(stat --format="%s" "$file_path" | tr -d '[:space:]') fi - if CheckNoHtml "$remote_url" "$filename" "$file_path" ; then + if CheckNoHtml "$remote_url" "$filename" "$file_path" "$((received_file_size - pd_presize))" ; then containsHtml=false else containsHtml=true diff --git a/hosts/uploadhive.sh b/hosts/uploadhive.sh index 896065a..f0da9c0 100644 --- a/hosts/uploadhive.sh +++ b/hosts/uploadhive.sh @@ -252,7 +252,7 @@ uhive_GetFile() { if [[ -f "$file_path" ]] ; then received_file_size=$(stat --format="%s" "$file_path" | tr -d '[:space:]') fi - if CheckNoHtml "$remote_url" "$filename" "$file_path" ; then + if CheckNoHtml "$remote_url" "$filename" "$file_path" "$((received_file_size - pd_presize))" ; then containsHtml=false else containsHtml=true diff --git a/hosts/youdbox.sh b/hosts/youdbox.sh index b8e524f..81aa0f1 100644 --- a/hosts/youdbox.sh +++ b/hosts/youdbox.sh @@ -281,7 +281,7 @@ youd_GetFile() { if [[ -f "$file_path" ]] ; then received_file_size=$(stat --format="%s" "$file_path" | tr -d '[:space:]') fi - if CheckNoHtml "$remote_url" "$filename" "$file_path" ; then + if CheckNoHtml "$remote_url" "$filename" "$file_path" "$((received_file_size - pd_presize))" ; then containsHtml=false else containsHtml=true diff --git a/mad.sh b/mad.sh index 10d6781..198c54e 100644 --- a/mad.sh +++ b/mad.sh @@ -30,9 +30,17 @@ # # * Everyone who provided feedback and helped test.. and those who wish to remain anonymous -ScriptVersion=2025.02.20 +ScriptVersion=2025.02.25 #================================================= # Recent Additions +# 2025.02.25 - [mad + allhosts] Re-engineer BadHtml scan to only scan the first 10kb of downloaded partials +# 2025.02.24 - [pixeldrain] Update "The file is IP limited" response handling retry +# 2025.02.22 - [blackcloud_onion] Add bcloud.onion download handling (url fixing) +# 2025.02.21 - [anonfile] Update cdn link parsing to handle new subdomains +# 2025.02.21 - [anonfile] Add download limit reached response handling +# 2025.02.21 - [anonfile] Update file info retrieval (head no longer responds) +# 2025.02.21 - [sendspace] Add sendspace.com as download host +# 2025.02.21 - [oshi / up_oshi] Revert /nossl/ changes for oshi.at (clearnet) # 2025.02.20 - [up_ranoz] Fixed parsing of ranoz upload link (cloudflare) # 2025.02.20 - [sendnow] Better handling of sendnow new Tor ip blocking # 2025.02.20 - [up_ranoz / up_uploadhive] Add obfuscation of .7z in multipart filename that was missing @@ -62,23 +70,6 @@ ScriptVersion=2025.02.20 # 2025.02.02 - [mad] Add function to handle urlencode of cyrillic / kanji / latin / etc # 2025.02.02 - [ranoz] Fix handling filenames containing cyrillic / kanji / latin chars # 2025.02.02 - [all] Reduced character processing for urlencode to special url characters -# 2025.01.30 - [isupload] Add handling of 404 Not Found on initial page fetch -# 2025.01.23 - [mad] Do not check for supported host on "direct=" lines -# 2025.01.19 - [fileditch] Add direct download url processing fileditchfiles.me (though they block Tor now) -# 2025.01.18 - [up_nantes] Update the post retention to "week" (host removed "month" option) -# 2025.01.18 - [mad] Updates to url_encode function and addition of conversion of utf8 to ascii function -# 2025.01.17 - [ranoz] Servers response to resume changed, set as no resume type for now -# 2025.01.17 - [uwabaki] Add download handling for uwabaki onion address urls -# 2025.01.16 - [ranoz] Fix filenames with unicode chars in the download url -# 2025.01.16 - [up_axfc] Move convert utf8 to ascii to mad function -# 2025.01.16 - [up_uwabaki] Add uwabaki.party as upload host (1GB, no expiration, no DMCA, no logs) -# 2025.01.14 - [gagneux / up_gagneux] Add fichier.gagneux.info as upload / download host -# 2025.01.14 - [uwabaki] Add uwabaki.party as download host -# 2025.01.14 - [fileblade] Additional retries and handling for blocked Tor ips (until alternative) -# 2025.01.13 - [ocr_captcha] Create imagemagick OCR function for testing without tesseract -# 2025.01.13 - [anonfile, dailyuploads] Update ocr call to use tesseract function -# 2025.01.13 - [up_anonfile] Modify to use new upload url -# 2025.01.12 - [ateasystems] Update 404 Not found response # -- See ./documentation/!Changelog (Historical).txt for further changes -- # @@ -452,7 +443,8 @@ SetEnabledDownloadHosts() { lstEnabledDownloadHosts+="moocloud,nantes,netlib,offshorecat,oshi,pixeldrain,quax,ranoz,skrepr," lstEnabledDownloadHosts+="tempfileme,tempsh,torup,turboonion,up2share,uploadee,uploadev,uploadhive," lstEnabledDownloadHosts+="youdbox,herbolistique,uploadbay,ateasystems,syspro,dashfile,anonfile,desiupload," - lstEnabledDownloadHosts+="fileland,fireget,euromussels,ramsgaard,uwabaki,gagneux,sendnow" + lstEnabledDownloadHosts+="fileland,fireget,euromussels,ramsgaard,uwabaki,gagneux,sendnow,sendspace," + lstEnabledDownloadHosts+="blackcloud_onion" elif [[ "$EnabledDownloadHosts" == "online" ]] ; then lstEnabledDownloadHosts="1fichier,anonsharing,bedrive,biteblob,bowfile,click,cyssoux," lstEnabledDownloadHosts+="dailyuploads,dataupload,depotkaz,dictvm,dosya,downloadgg,eddowding,eternalhosting," @@ -462,7 +454,7 @@ SetEnabledDownloadHosts() { lstEnabledDownloadHosts+="oshi,pixeldrain,quax,ranoz,shareonline,skrepr,tempfileme,tempsh,torup," lstEnabledDownloadHosts+="turboonion,up2share,uploadee,uploadev,uploadhive,yolobit,youdbox,herbolistique," lstEnabledDownloadHosts+="uploadbay,ateasystems,syspro,dashfile,anonfile,desiupload,fileland,fireget," - lstEnabledDownloadHosts+="euromussels,ramsgaard,uwabaki,gagneux,sendnow" + lstEnabledDownloadHosts+="euromussels,ramsgaard,uwabaki,gagneux,sendnow,sendspace,blackcloud_onion" fi } GetRandomFiledotUser() { @@ -2783,13 +2775,19 @@ CheckNoHtml() { local cde_remote_url=$1 local cde_filename=$2 local cde_file_path=$3 - if [[ -f "${cde_file_path}" ]] ; then - badHtml=$(grep -aoPi '^.*(||content-type|:error|not found|too many connections).*$' "${cde_file_path}") + local partial_size=$4 + local tmp_partial="${WorkDir}/.temp/${cde_filename}_${partial_size}.10kb.part" + local curr_filesize=$(stat --format="%s" "${cde_file_path}" | tr -d '[:space:]') + mkdir -p "${WorkDir}/.temp" + head -c "10240" > "$tmp_partial" < <(exec tail -c "$partial_size" "$cde_file_path") + if [[ -f "${cde_file_path}" ]] ; then + badHtml=$(grep -aoPi '^.*(||content-type|:error|not found|too many connections).*$' "${tmp_partial}") if [[ "$badHtml" == "" ]]; then + rm -f "${tmp_partial}" return 0 else if [[ "${DebugAllEnabled}" == "true" ]] ; then - cp "$cde_file_path" "${WorkDir}/.debug/$cde_filename.htmldebug.txt" + cp "$tmp_partial" "${WorkDir}/.debug/$cde_filename.htmldebug.txt" fi if [[ "${DebugAllEnabled}" == "true" ]] ; then echo -e "" @@ -2804,9 +2802,11 @@ CheckNoHtml() { echo -e "BadHtml: ${badHtml}" >> "${WorkDir}/.debug/_err.log" echo -e "" fi + rm -f "${tmp_partial}" return 1 fi else + rm -f "${tmp_partial}" return 0 fi } @@ -3416,7 +3416,11 @@ direct_GetFile() { continue fi fi - if CheckNoHtml "$remote_url" "$filename" "$file_path" ; then + received_file_size=0 + if [[ -f "$file_path" ]] ; then + received_file_size=$(stat --format="%s" "$file_path" | tr -d '[:space:]') + fi + if CheckNoHtml "$remote_url" "$filename" "$file_path" "$((received_file_size - pd_presize))" ; then containsHtml=false else containsHtml=true @@ -3456,7 +3460,7 @@ direct_GetFile() { if [[ -f "$file_path" ]] ; then received_file_size=$(stat --format="%s" "$file_path" | tr -d '[:space:]') fi - if CheckNoHtml "$remote_url" "$filename" "$file_path" ; then + if CheckNoHtml "$remote_url" "$filename" "$file_path" "$((received_file_size - pd_presize))" ; then containsHtml=false else containsHtml=true