# 2025.02.14 - [mad] Add helpful verbiage for user on MAD Randomized Extension upload urls

# 2025.02.14 - [up_ranoz] Add help "[rand ext, rename to <filename> or use MAD v2025.02.13+]" to url
# 2025.02.14 - [up_uploadhive] Add help "[rand ext, rename to <filename> or use MAD v2025.02.13+]" to url
# 2025.02.13 - [mad] Add "RanozRandomizeExt" MAD randomized extension configurable variable
# 2025.02.13 - [up_ranoz] Add MAD randomized extension upload handling
# 2025.02.13 - [ranoz] Add MAD randomized extension download handling
# 2025.02.13 - [sendnow] Extend request timeout for head / get (server response time lag)
This commit is contained in:
kittykat 2025-02-17 22:54:05 +00:00
parent 0face871aa
commit d48116dbe3
Signed by: kittykat
GPG key ID: E3F1556620F70C3C
14 changed files with 880 additions and 552 deletions

View file

@ -1,4 +1,4 @@
DateTime: 25.02.11
DateTime: 25.02.13
Files:
./hosts/1fichier.sh
@ -361,13 +361,13 @@ _________________________________________________________________________
./hosts/ranoz.sh:281: tor_curl_request --insecure -L -G --no-alpn \
./hosts/ranoz.sh:296: tor_curl_request --insecure -L -G --no-alpn \
./hosts/sendnow.sh:90: response=$(tor_curl_request --insecure -L -s -b "${snow_cookie_jar}" -c "${snow_cookie_jar}" "$remote_url")
./hosts/sendnow.sh:158: response=$(tor_curl_request --insecure -L -svo. -X POST \
./hosts/sendnow.sh:200: file_header=$(tor_curl_request --insecure --head -Lis \
./hosts/sendnow.sh:321: if [ "${UseTorCurlImpersonate}" == "true" ]; then
./hosts/sendnow.sh:323: tor_curl_request --insecure -L --no-alpn \
./hosts/sendnow.sh:341: tor_curl_request --insecure -L --no-alpn \
./hosts/sendnow.sh:360: tor_curl_request --insecure -L --no-alpn \
./hosts/sendnow.sh:379: tor_curl_request --insecure -L --no-alpn \
./hosts/sendnow.sh:160: response=$(tor_curl_request --insecure -L -svo. -X POST \
./hosts/sendnow.sh:203: file_header=$(tor_curl_request_extended --insecure --head -Lis \
./hosts/sendnow.sh:324: if [ "${UseTorCurlImpersonate}" == "true" ]; then
./hosts/sendnow.sh:326: tor_curl_request_extended --insecure -L --no-alpn \
./hosts/sendnow.sh:344: tor_curl_request --insecure -L --no-alpn \
./hosts/sendnow.sh:363: tor_curl_request --insecure -L --no-alpn \
./hosts/sendnow.sh:382: tor_curl_request --insecure -L --no-alpn \
./hosts/syspro.sh:88: response=$(tor_curl_request --insecure -L -s "$remote_url")
./hosts/syspro.sh:186: if [ "${UseTorCurlImpersonate}" == "true" ]; then
./hosts/syspro.sh:188: tor_curl_request --insecure -L \
@ -462,8 +462,8 @@ _________________________________________________________________________
./hosts/up_oshi.sh:110: response=$(tor_curl_upload --insecure \
./hosts/up_pixeldrain.sh:112: response=$(tor_curl_upload --insecure -X PUT \
./hosts/up_quax.sh:102: response=$(tor_curl_upload --insecure -i \
./hosts/up_ranoz.sh:102: response=$(tor_curl_upload --insecure -L -i -s \
./hosts/up_ranoz.sh:129: response=$(tor_curl_upload --insecure -i -X PUT \
./hosts/up_ranoz.sh:130: response=$(tor_curl_upload --insecure -L -i -s \
./hosts/up_ranoz.sh:160: response=$(tor_curl_upload --insecure -i -X PUT \
./hosts/up_shareonline.sh:102: response=$(tor_curl_upload --insecure -i \
./hosts/up_syspro.sh:102: response=$(tor_curl_upload --insecure -i \
./hosts/up_tempfileme.sh:102: response=$(tor_curl_upload --insecure -i \
@ -476,7 +476,7 @@ _________________________________________________________________________
./hosts/up_uploadee.sh:176: response=$(tor_curl_upload --insecure -i -L \
./hosts/up_uploadev.sh:102: response=$(tor_curl_upload --insecure -i \
./hosts/up_uploadflix.sh:106: response=$(tor_curl_upload --insecure -i \
./hosts/up_uploadhive.sh:128: response=$(tor_curl_upload --insecure -i \
./hosts/up_uploadhive.sh:129: response=$(tor_curl_upload --insecure -i \
./hosts/up_uploadraja.sh:102: response=$(tor_curl_upload --insecure -i \
./hosts/up_uwabaki.sh:102: response=$(tor_curl_upload --insecure -i -L \
./hosts/up_yolobit.sh:102: response=$(tor_curl_upload --insecure -i \
@ -485,119 +485,119 @@ _________________________________________________________________________
./hosts/youdbox.sh:183: file_header=$(tor_curl_request --insecure -L --head -s --referer "${fixed_url}" "$download_url")
./hosts/youdbox.sh:276: tor_curl_request --insecure -L -G --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval "$download_url" --continue-at - --output "$file_path"
./hosts/youdbox.sh:278: tor_curl_request --insecure -L -G "$download_url" --continue-at - --output "$file_path"
./mad.sh:80:UseTorCurlImpersonate=false
./mad.sh:375:tor_curl_request() {
./mad.sh:376: if [ "${UseTorCurlImpersonate}" == "true" ]; then
./mad.sh:377: "${curl_impersonate[@]}" --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" -4 --connect-timeout ${ConnectTimeout} --compressed --globoff "$@"
./mad.sh:379: curl --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" -4 --connect-timeout ${ConnectTimeout} --compressed --globoff "$@"
./mad.sh:382:tor_curl_request_extended() {
./mad.sh:384: if [ "${UseTorCurlImpersonate}" == "true" ]; then
./mad.sh:385: "${curl_impersonate[@]}" --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" -4 --connect-timeout $randomtimeout --compressed --globoff "$@"
./mad.sh:387: curl --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" -4 --connect-timeout $randomtimeout --compressed --globoff "$@"
./mad.sh:390:tor_curl_upload() {
./mad.sh:391: if [ "${UseTorCurlImpersonate}" == "true" ]; then
./mad.sh:393: "${curl_impersonate[@]}" --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" -4 --connect-timeout ${ConnectTimeoutUpload} --speed-limit $UploadSpeedMin --speed-time $UploadTimeoutInterval --compressed --globoff "$@"
./mad.sh:395: "${curl_impersonate[@]}" --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" -4 --connect-timeout ${ConnectTimeoutUpload} --compressed --globoff "$@"
./mad.sh:399: curl --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" -4 --connect-timeout ${ConnectTimeoutUpload} --speed-limit $UploadSpeedMin --speed-time $UploadTimeoutInterval -H 'User-Agent: Mozilla/5.0 (Windows NT 10.0; rv:109.0) Gecko/20100101 Firefox/115.0' -H 'Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8' -H 'Accept-Language: en-US,en;q=0.5' -H 'Accept-Encoding: gzip, deflate, br' -H 'Origin: null' -H 'Connection: keep-alive' -H 'Upgrade-Insecure-Requests: 1' -H 'Sec-Fetch-Dest: document' -H 'Sec-Fetch-Mode: navigate' -H 'Sec-Fetch-Site: cross-site' -H 'Sec-Fetch-User: ?1' -H 'TE: trailers' --compressed --globoff "$@"
./mad.sh:401: curl --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" -4 --connect-timeout ${ConnectTimeoutUpload} -H 'User-Agent: Mozilla/5.0 (Windows NT 10.0; rv:109.0) Gecko/20100101 Firefox/115.0' -H 'Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8' -H 'Accept-Language: en-US,en;q=0.5' -H 'Accept-Encoding: gzip, deflate, br' -H 'Origin: null' -H 'Connection: keep-alive' -H 'Upgrade-Insecure-Requests: 1' -H 'Sec-Fetch-Dest: document' -H 'Sec-Fetch-Mode: navigate' -H 'Sec-Fetch-Site: cross-site' -H 'Sec-Fetch-User: ?1' -H 'TE: trailers' --compressed --globoff "$@"
./mad.sh:1397:install_curl_impersonate() {
./mad.sh:1399: echo -e "${BLUE}lwthiker curl_impersonate${NC} is the original dev, but it is relatively inactive."
./mad.sh:1400: echo -e "- Currently uses curl v8.1.1."
./mad.sh:1404: echo -e "${BLUE}lexiforest curl_impersonate${NC} is an active fork of curl_impersonate."
./mad.sh:1405: echo -e "+ Currently uses curl v8.7.1"
./mad.sh:1409: PS3='Please select which curl_impersonate to install: '
./mad.sh:1417: install_curl_impersonate_lwthiker_orig
./mad.sh:1421: install_curl_impersonate_lexiforest_fork
./mad.sh:1431:install_curl_impersonate_lwthiker_orig() {
./mad.sh:1435: echo -e "${BLUE}lwthiker curl_impersonate${NC} is the original curl_impersonate."
./mad.sh:1436: echo -e "+ Currently uses curl v8.1.1, and has low activity for updates"
./mad.sh:1439: echo -e "${GREEN}| Fetching:${NC} latest ${BLUE}lwthiker curl_impersonate${NC} info from github...${NC}"
./mad.sh:1442: response=$(tor_curl_request --insecure -L -s https://github.com/lwthiker/curl-impersonate/releases/latest)
./mad.sh:1444: debugHtml "github" "lbf_inst_curlimp$j" "$response"
./mad.sh:1447: latestTag=$(grep -oPi -m 1 '(?<=/curl-impersonate/releases/tag/).*?(?=")' <<< "$response")
./mad.sh:1457: yes_or_no "Do you wish to download and extract curl_impersonate $latestTag (using tor+curl)?" && {
./mad.sh:1459: download_url='https://github.com/lwthiker/curl-impersonate/releases/download/'"$latestTag"'/curl-impersonate-'"$latestTag"'.x86_64-linux-gnu.tar.gz'
./mad.sh:1462: file_header=$(tor_curl_request --insecure --head -Ls "$download_url")
./mad.sh:1464: debugHtml "github" "head_inst_curlimp$j" "${file_header}"
./mad.sh:1512: tor_curl_request --insecure -L "$download_url" --continue-at - --output "$file_path"
./mad.sh:1541: echo -e "| Extracting curl_impersonate..."
./mad.sh:1543: rm -f "${ScriptDir}"/curl*
./mad.sh:1544: mv "$extract_location/curl-impersonate-ff" "${ScriptDir}/"
./mad.sh:1545: mv "$extract_location/curl_ff109" "${ScriptDir}/"
./mad.sh:1546: echo -e "${GREEN}| Done.${NC} Update ${BLUE}\"UseTorCurlImpersonate=true\"${NC} in script to use..."
./mad.sh:1554:install_curl_impersonate_lexiforest_fork() {
./mad.sh:1558: echo -e "${BLUE}lexiforest curl_impersonate${NC} is an active fork of curl_impersonate."
./mad.sh:1559: echo -e "+ Currently uses curl v8.7.1, and is patched for latest CVEs"
./mad.sh:1562: echo -e "${GREEN}| Fetching:${NC} latest ${BLUE}lexiforest curl_impersonate fork${NC} info from github...${NC}"
./mad.sh:1565: response=$(tor_curl_request --insecure -L -s https://github.com/lexiforest/curl-impersonate/releases/latest)
./mad.sh:1567: debugHtml "github" "lbf_inst_curlimp$j" "$response"
./mad.sh:1570: latestTag=$(grep -oPi -m 1 '(?<=/curl-impersonate/releases/tag/).*?(?=")' <<< "$response")
./mad.sh:1580: yes_or_no "Do you wish to download and extract curl_impersonate $latestTag (using tor+curl)?" && {
./mad.sh:1582: download_url='https://github.com/lexiforest/curl-impersonate/releases/download/'"$latestTag"'/curl-impersonate-'"$latestTag"'.x86_64-linux-gnu.tar.gz'
./mad.sh:1585: file_header=$(tor_curl_request --insecure --head -Ls "$download_url")
./mad.sh:1587: debugHtml "github" "head_inst_curlimp$j" "${file_header}"
./mad.sh:1635: tor_curl_request --insecure -L "$download_url" --continue-at - --output "$file_path"
./mad.sh:1664: echo -e "| Extracting curl_impersonate..."
./mad.sh:1666: rm -f "${ScriptDir}"/curl*
./mad.sh:1667: mv "$extract_location/curl-impersonate-chrome" "${ScriptDir}/"
./mad.sh:1668: mv "$extract_location/curl_chrome131" "${ScriptDir}/"
./mad.sh:1669: echo -e "${GREEN}| Done.${NC} Update ${BLUE}\"UseTorCurlImpersonate=true\"${NC} in script to use..."
./mad.sh:1831: echo -e ":${NC} ${GREEN}MAD${PINK} Audit${NC} : Reports usage of http & curl in scripts${PINK}${BLD} :"
./mad.sh:1839: maud_curl=$(grep -n -vxE '[[:blank:]]*([#].*)?' $fil | grep --color='always' -Ei 'curl')
./mad.sh:1840: maud_torcurl=$(grep -n -vxE '[[:blank:]]*([#].*)?' $fil | grep -A 12 --color='always' -Ei 'tor_curl')
./mad.sh:1849: echo -e "${PINK}MAD Audit of curl:${NC} (${GREEN}grep \"curl\"${NC})"
./mad.sh:1851: echo -e "$maud_curl"
./mad.sh:1853: echo -e "${PINK}MAD Audit of tor_curl (+12 lines after):${NC} (${GREEN}grep \"tor_curl\"${NC})"
./mad.sh:1855: echo -e "$maud_torcurl"
./mad.sh:1867: maud_curl=$(grep -n -vxE '[[:blank:]]*([#].*)?' $fil | grep --color='always' -Ei 'curl')
./mad.sh:1868: maud_torcurl=$(grep -n -vxE '[[:blank:]]*([#].*)?' $fil | grep -A 12 --color='always' -Ei 'tor_curl')
./mad.sh:1877: echo -e "${PINK}MAD Audit of curl:${NC} (${GREEN}grep \"curl \"${NC})"
./mad.sh:1879: echo -e "$maud_curl"
./mad.sh:1881: echo -e "${PINK}MAD Audit of tor_curl (+12 lines after):${NC} (${GREEN}grep \"tor_curl\"${NC})"
./mad.sh:1883: echo -e "$maud_torcurl"
./mad.sh:1889: maud_curl=$(grep -n -vxE '[[:blank:]]*([#].*)?' $fil | grep --color='always' -Ei 'curl')
./mad.sh:1890: maud_torcurl=$(grep -n -vxE '[[:blank:]]*([#].*)?' $fil | grep -A 12 --color='always' -Ei 'tor_curl')
./mad.sh:1899: echo -e "${PINK}MAD Audit of curl:${NC} (${GREEN}grep \"curl\"${NC})"
./mad.sh:1901: echo -e "$maud_curl"
./mad.sh:1903: echo -e "${PINK}MAD Audit of tor_curl (+12 lines after):${NC} (${GREEN}grep \"tor_curl\"${NC})"
./mad.sh:1905: echo -e "$maud_torcurl"
./mad.sh:2852: if [ "${UseTorCurlImpersonate}" == "true" ]; then
./mad.sh:2853: printf "client: ${GREEN}Tor${NC} + ${BLUE}curl_impersonate${NC}\\n"
./mad.sh:2855: printf "client: ${GREEN}Tor${NC} + ${GREEN}curl${NC}\\n"
./mad.sh:3027: if [ "${UseTorCurlImpersonate}" == "true" ]; then
./mad.sh:3028: printf "client: ${GREEN}Tor${NC} + ${BLUE}curl_impersonate${NC}\\n"
./mad.sh:3030: printf "client: ${GREEN}Tor${NC} + ${GREEN}curl${NC}\\n"
./mad.sh:3228: file_header=$(tor_curl_request --insecure -m 8 -s -D - -o /dev/null \
./mad.sh:3235: file_header=$(tor_curl_request --insecure --head -H "Connection: keep-alive" -L -s -i "$download_url" |
./mad.sh:3372: tor_curl_request --insecure -L --referer "$file_url" "$download_url" --output "$file_path"
./mad.sh:3425: tor_curl_request --insecure -L --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval "$download_url" --continue-at - --output "$file_path"
./mad.sh:3427: tor_curl_request --insecure -L --referer "$file_url" "$download_url" --continue-at - --output "$file_path"
./mad.sh:3625: response=$(tor_curl_upload --insecure -i \
./mad.sh:3632: response=$(tor_curl_upload --insecure -i \
./mad.sh:3703:if [ "${UseTorCurlImpersonate}" == "true" ]; then
./mad.sh:3704: curl_impersonate=()
./mad.sh:3705: readarray -d $'' arrFiles < <(find "$ScriptDir" -maxdepth 1 -name "curl_*" -printf '%p\n' | sort -Vk1)
./mad.sh:3706: bFoundCurlHeader=false
./mad.sh:3710: curl_impersonate=($fil)
./mad.sh:3711: bFoundCurlHeader=true
./mad.sh:3715: if [ "$bFoundCurlHeader" == "false" ]; then
./mad.sh:3716: echo -e "${RED}[ERROR] Missing dependency \"curl-impersonate\"!${NC}"
./mad.sh:3719: echo -e "You'll need to download ${GREEN}\"curl-impersonate\"${NC}."
./mad.sh:3722: echo -e "The latest binary can be obtained on GitHub, search for \"curl-impersonate\""
./mad.sh:3724: echo -e " 1. Visit the page of curl-impersonate and add \"/releases/latest/\" at end of URL."
./mad.sh:3728: echo -e " 4. Download archive ${GREEN}\"curl-impersonate-vX.Y.Z.x86_64-linux-gnu.tar.gz\"${YELLOW}."
./mad.sh:3729: echo -e " 5. Extract files ${GREEN}\"curl-impersonate-ff\"${NC} and ${GREEN}\"curl_ff109\"${NC} next to this script."
./mad.sh:3732: echo -e "run $0 install_curl_impersonate\\n"
./mad.sh:3734: yes_or_no "Do you wish to download and extract latest curl_impersonate (using tor+curl)?" && {
./mad.sh:3735: UseTorCurlImpersonate=false
./mad.sh:3736: install_curl_impersonate
./mad.sh:3820: echo -e "[${YELLOW}Install curl_impersonate${NC}]: Downloads the latest binary for curl_impersonate from github repo (3 choices)"
./mad.sh:3821: printf " %s install_curl_impersonate\\n" "$0"
./mad.sh:3899:elif [[ "$arg1" == "install_curl_impersonate" ]]; then
./mad.sh:3900: install_curl_impersonate
./mad.sh:3931:if [ "${UseTorCurlImpersonate}" == "true" ]; then
./mad.sh:3932: printf "client: ${GREEN}Tor${NC} + ${BLUE}curl_impersonate${NC}\\n"
./mad.sh:3934: printf "client: ${GREEN}Tor${NC} + ${GREEN}curl${NC}\\n"
./mad.sh:4:UseTorCurlImpersonate=false
./mad.sh:87:tor_curl_request() {
./mad.sh:88: if [ "${UseTorCurlImpersonate}" == "true" ]; then
./mad.sh:89: "${curl_impersonate[@]}" --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" -4 --connect-timeout ${ConnectTimeout} --compressed --globoff "$@"
./mad.sh:91: curl --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" -4 --connect-timeout ${ConnectTimeout} --compressed --globoff "$@"
./mad.sh:94:tor_curl_request_extended() {
./mad.sh:96: if [ "${UseTorCurlImpersonate}" == "true" ]; then
./mad.sh:97: "${curl_impersonate[@]}" --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" -4 --connect-timeout $randomtimeout --compressed --globoff "$@"
./mad.sh:99: curl --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" -4 --connect-timeout $randomtimeout --compressed --globoff "$@"
./mad.sh:102:tor_curl_upload() {
./mad.sh:103: if [ "${UseTorCurlImpersonate}" == "true" ]; then
./mad.sh:105: "${curl_impersonate[@]}" --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" -4 --connect-timeout ${ConnectTimeoutUpload} --speed-limit $UploadSpeedMin --speed-time $UploadTimeoutInterval --compressed --globoff "$@"
./mad.sh:107: "${curl_impersonate[@]}" --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" -4 --connect-timeout ${ConnectTimeoutUpload} --compressed --globoff "$@"
./mad.sh:111: curl --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" -4 --connect-timeout ${ConnectTimeoutUpload} --speed-limit $UploadSpeedMin --speed-time $UploadTimeoutInterval -H 'User-Agent: Mozilla/5.0 (Windows NT 10.0; rv:109.0) Gecko/20100101 Firefox/115.0' -H 'Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8' -H 'Accept-Language: en-US,en;q=0.5' -H 'Accept-Encoding: gzip, deflate, br' -H 'Origin: null' -H 'Connection: keep-alive' -H 'Upgrade-Insecure-Requests: 1' -H 'Sec-Fetch-Dest: document' -H 'Sec-Fetch-Mode: navigate' -H 'Sec-Fetch-Site: cross-site' -H 'Sec-Fetch-User: ?1' -H 'TE: trailers' --compressed --globoff "$@"
./mad.sh:113: curl --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" -4 --connect-timeout ${ConnectTimeoutUpload} -H 'User-Agent: Mozilla/5.0 (Windows NT 10.0; rv:109.0) Gecko/20100101 Firefox/115.0' -H 'Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8' -H 'Accept-Language: en-US,en;q=0.5' -H 'Accept-Encoding: gzip, deflate, br' -H 'Origin: null' -H 'Connection: keep-alive' -H 'Upgrade-Insecure-Requests: 1' -H 'Sec-Fetch-Dest: document' -H 'Sec-Fetch-Mode: navigate' -H 'Sec-Fetch-Site: cross-site' -H 'Sec-Fetch-User: ?1' -H 'TE: trailers' --compressed --globoff "$@"
./mad.sh:1114:install_curl_impersonate() {
./mad.sh:1116: echo -e "${BLUE}lwthiker curl_impersonate${NC} is the original dev, but it is relatively inactive."
./mad.sh:1117: echo -e "- Currently uses curl v8.1.1."
./mad.sh:1121: echo -e "${BLUE}lexiforest curl_impersonate${NC} is an active fork of curl_impersonate."
./mad.sh:1122: echo -e "+ Currently uses curl v8.7.1"
./mad.sh:1126: PS3='Please select which curl_impersonate to install: '
./mad.sh:1134: install_curl_impersonate_lwthiker_orig
./mad.sh:1138: install_curl_impersonate_lexiforest_fork
./mad.sh:1148:install_curl_impersonate_lwthiker_orig() {
./mad.sh:1152: echo -e "${BLUE}lwthiker curl_impersonate${NC} is the original curl_impersonate."
./mad.sh:1153: echo -e "+ Currently uses curl v8.1.1, and has low activity for updates"
./mad.sh:1156: echo -e "${GREEN}| Fetching:${NC} latest ${BLUE}lwthiker curl_impersonate${NC} info from github...${NC}"
./mad.sh:1159: response=$(tor_curl_request --insecure -L -s https://github.com/lwthiker/curl-impersonate/releases/latest)
./mad.sh:1161: debugHtml "github" "lbf_inst_curlimp$j" "$response"
./mad.sh:1164: latestTag=$(grep -oPi -m 1 '(?<=/curl-impersonate/releases/tag/).*?(?=")' <<< "$response")
./mad.sh:1174: yes_or_no "Do you wish to download and extract curl_impersonate $latestTag (using tor+curl)?" && {
./mad.sh:1176: download_url='https://github.com/lwthiker/curl-impersonate/releases/download/'"$latestTag"'/curl-impersonate-'"$latestTag"'.x86_64-linux-gnu.tar.gz'
./mad.sh:1179: file_header=$(tor_curl_request --insecure --head -Ls "$download_url")
./mad.sh:1181: debugHtml "github" "head_inst_curlimp$j" "${file_header}"
./mad.sh:1229: tor_curl_request --insecure -L "$download_url" --continue-at - --output "$file_path"
./mad.sh:1258: echo -e "| Extracting curl_impersonate..."
./mad.sh:1260: rm -f "${ScriptDir}"/curl*
./mad.sh:1261: mv "$extract_location/curl-impersonate-ff" "${ScriptDir}/"
./mad.sh:1262: mv "$extract_location/curl_ff109" "${ScriptDir}/"
./mad.sh:1263: echo -e "${GREEN}| Done.${NC} Update ${BLUE}\"UseTorCurlImpersonate=true\"${NC} in script to use..."
./mad.sh:1271:install_curl_impersonate_lexiforest_fork() {
./mad.sh:1275: echo -e "${BLUE}lexiforest curl_impersonate${NC} is an active fork of curl_impersonate."
./mad.sh:1276: echo -e "+ Currently uses curl v8.7.1, and is patched for latest CVEs"
./mad.sh:1279: echo -e "${GREEN}| Fetching:${NC} latest ${BLUE}lexiforest curl_impersonate fork${NC} info from github...${NC}"
./mad.sh:1282: response=$(tor_curl_request --insecure -L -s https://github.com/lexiforest/curl-impersonate/releases/latest)
./mad.sh:1284: debugHtml "github" "lbf_inst_curlimp$j" "$response"
./mad.sh:1287: latestTag=$(grep -oPi -m 1 '(?<=/curl-impersonate/releases/tag/).*?(?=")' <<< "$response")
./mad.sh:1297: yes_or_no "Do you wish to download and extract curl_impersonate $latestTag (using tor+curl)?" && {
./mad.sh:1299: download_url='https://github.com/lexiforest/curl-impersonate/releases/download/'"$latestTag"'/curl-impersonate-'"$latestTag"'.x86_64-linux-gnu.tar.gz'
./mad.sh:1302: file_header=$(tor_curl_request --insecure --head -Ls "$download_url")
./mad.sh:1304: debugHtml "github" "head_inst_curlimp$j" "${file_header}"
./mad.sh:1352: tor_curl_request --insecure -L "$download_url" --continue-at - --output "$file_path"
./mad.sh:1381: echo -e "| Extracting curl_impersonate..."
./mad.sh:1383: rm -f "${ScriptDir}"/curl*
./mad.sh:1384: mv "$extract_location/curl-impersonate-chrome" "${ScriptDir}/"
./mad.sh:1385: mv "$extract_location/curl_chrome131" "${ScriptDir}/"
./mad.sh:1386: echo -e "${GREEN}| Done.${NC} Update ${BLUE}\"UseTorCurlImpersonate=true\"${NC} in script to use..."
./mad.sh:1548: echo -e ":${NC} ${GREEN}MAD${PINK} Audit${NC} : Reports usage of http & curl in scripts${PINK}${BLD} :"
./mad.sh:1556: maud_curl=$(grep -n -vxE '[[:blank:]]*([#].*)?' $fil | grep --color='always' -Ei 'curl')
./mad.sh:1557: maud_torcurl=$(grep -n -vxE '[[:blank:]]*([#].*)?' $fil | grep -A 12 --color='always' -Ei 'tor_curl')
./mad.sh:1566: echo -e "${PINK}MAD Audit of curl:${NC} (${GREEN}grep \"curl\"${NC})"
./mad.sh:1568: echo -e "$maud_curl"
./mad.sh:1570: echo -e "${PINK}MAD Audit of tor_curl (+12 lines after):${NC} (${GREEN}grep \"tor_curl\"${NC})"
./mad.sh:1572: echo -e "$maud_torcurl"
./mad.sh:1584: maud_curl=$(grep -n -vxE '[[:blank:]]*([#].*)?' $fil | grep --color='always' -Ei 'curl')
./mad.sh:1585: maud_torcurl=$(grep -n -vxE '[[:blank:]]*([#].*)?' $fil | grep -A 12 --color='always' -Ei 'tor_curl')
./mad.sh:1594: echo -e "${PINK}MAD Audit of curl:${NC} (${GREEN}grep \"curl \"${NC})"
./mad.sh:1596: echo -e "$maud_curl"
./mad.sh:1598: echo -e "${PINK}MAD Audit of tor_curl (+12 lines after):${NC} (${GREEN}grep \"tor_curl\"${NC})"
./mad.sh:1600: echo -e "$maud_torcurl"
./mad.sh:1606: maud_curl=$(grep -n -vxE '[[:blank:]]*([#].*)?' $fil | grep --color='always' -Ei 'curl')
./mad.sh:1607: maud_torcurl=$(grep -n -vxE '[[:blank:]]*([#].*)?' $fil | grep -A 12 --color='always' -Ei 'tor_curl')
./mad.sh:1616: echo -e "${PINK}MAD Audit of curl:${NC} (${GREEN}grep \"curl\"${NC})"
./mad.sh:1618: echo -e "$maud_curl"
./mad.sh:1620: echo -e "${PINK}MAD Audit of tor_curl (+12 lines after):${NC} (${GREEN}grep \"tor_curl\"${NC})"
./mad.sh:1622: echo -e "$maud_torcurl"
./mad.sh:2569: if [ "${UseTorCurlImpersonate}" == "true" ]; then
./mad.sh:2570: printf "client: ${GREEN}Tor${NC} + ${BLUE}curl_impersonate${NC}\\n"
./mad.sh:2572: printf "client: ${GREEN}Tor${NC} + ${GREEN}curl${NC}\\n"
./mad.sh:2744: if [ "${UseTorCurlImpersonate}" == "true" ]; then
./mad.sh:2745: printf "client: ${GREEN}Tor${NC} + ${BLUE}curl_impersonate${NC}\\n"
./mad.sh:2747: printf "client: ${GREEN}Tor${NC} + ${GREEN}curl${NC}\\n"
./mad.sh:2945: file_header=$(tor_curl_request --insecure -m 8 -s -D - -o /dev/null \
./mad.sh:2952: file_header=$(tor_curl_request --insecure --head -H "Connection: keep-alive" -L -s -i "$download_url" |
./mad.sh:3089: tor_curl_request --insecure -L --referer "$file_url" "$download_url" --output "$file_path"
./mad.sh:3142: tor_curl_request --insecure -L --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval "$download_url" --continue-at - --output "$file_path"
./mad.sh:3144: tor_curl_request --insecure -L --referer "$file_url" "$download_url" --continue-at - --output "$file_path"
./mad.sh:3342: response=$(tor_curl_upload --insecure -i \
./mad.sh:3349: response=$(tor_curl_upload --insecure -i \
./mad.sh:3420:if [ "${UseTorCurlImpersonate}" == "true" ]; then
./mad.sh:3421: curl_impersonate=()
./mad.sh:3422: readarray -d $'' arrFiles < <(find "$ScriptDir" -maxdepth 1 -name "curl_*" -printf '%p\n' | sort -Vk1)
./mad.sh:3423: bFoundCurlHeader=false
./mad.sh:3427: curl_impersonate=($fil)
./mad.sh:3428: bFoundCurlHeader=true
./mad.sh:3432: if [ "$bFoundCurlHeader" == "false" ]; then
./mad.sh:3433: echo -e "${RED}[ERROR] Missing dependency \"curl-impersonate\"!${NC}"
./mad.sh:3436: echo -e "You'll need to download ${GREEN}\"curl-impersonate\"${NC}."
./mad.sh:3439: echo -e "The latest binary can be obtained on GitHub, search for \"curl-impersonate\""
./mad.sh:3441: echo -e " 1. Visit the page of curl-impersonate and add \"/releases/latest/\" at end of URL."
./mad.sh:3445: echo -e " 4. Download archive ${GREEN}\"curl-impersonate-vX.Y.Z.x86_64-linux-gnu.tar.gz\"${YELLOW}."
./mad.sh:3446: echo -e " 5. Extract files ${GREEN}\"curl-impersonate-ff\"${NC} and ${GREEN}\"curl_ff109\"${NC} next to this script."
./mad.sh:3449: echo -e "run $0 install_curl_impersonate\\n"
./mad.sh:3451: yes_or_no "Do you wish to download and extract latest curl_impersonate (using tor+curl)?" && {
./mad.sh:3452: UseTorCurlImpersonate=false
./mad.sh:3453: install_curl_impersonate
./mad.sh:3537: echo -e "[${YELLOW}Install curl_impersonate${NC}]: Downloads the latest binary for curl_impersonate from github repo (3 choices)"
./mad.sh:3538: printf " %s install_curl_impersonate\\n" "$0"
./mad.sh:3616:elif [[ "$arg1" == "install_curl_impersonate" ]]; then
./mad.sh:3617: install_curl_impersonate
./mad.sh:3648:if [ "${UseTorCurlImpersonate}" == "true" ]; then
./mad.sh:3649: printf "client: ${GREEN}Tor${NC} + ${BLUE}curl_impersonate${NC}\\n"
./mad.sh:3651: printf "client: ${GREEN}Tor${NC} + ${GREEN}curl${NC}\\n"
./plugins/pjscloud.sh:44: if [ "${UseTorCurlImpersonate}" == "true" ]; then
./plugins/pjscloud.sh:45: response=$("${curl_impersonate[@]}" --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" \
./plugins/pjscloud.sh:53: response=$(curl --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" \

View file

@ -1,4 +1,4 @@
DateTime: 25.02.11
DateTime: 25.02.13
Files:
./hosts/1fichier.sh
@ -230,13 +230,13 @@ _________________________________________________________________________
./hosts/pixeldrain.sh:250: pdheadurl="https://pixeldrain.com/api/file/${fileid}"
./hosts/pixeldrain.sh:252: pdheadurl="https://pd.cybar.xyz/$fileid"
./hosts/pixeldrain.sh:272: download_url="https://pixeldrain.com/api/file/${fileid}"
./hosts/sendnow.sh:180: if grep -Eqi 'location: https://' <<< "$response"; then
./hosts/sendnow.sh:195: fshost=$(grep -oPi -m 1 '(?<=https://).*?(?=/d/)' <<< "$download_url")
./hosts/sendnow.sh:205: -H "Referer: https://send.now/" \
./hosts/sendnow.sh:329: -H "Referer: https://send.now/" \
./hosts/sendnow.sh:346: -H "Referer: https://send.now/" \
./hosts/sendnow.sh:367: -H "Referer: https://send.now/" \
./hosts/sendnow.sh:385: -H "Referer: https://send.now/" \
./hosts/sendnow.sh:182: if grep -Eqi 'location: https://' <<< "$response"; then
./hosts/sendnow.sh:197: fshost=$(grep -oPi -m 1 '(?<=https://).*?(?=/d/)' <<< "$download_url")
./hosts/sendnow.sh:208: -H "Referer: https://send.now/" \
./hosts/sendnow.sh:332: -H "Referer: https://send.now/" \
./hosts/sendnow.sh:349: -H "Referer: https://send.now/" \
./hosts/sendnow.sh:370: -H "Referer: https://send.now/" \
./hosts/sendnow.sh:388: -H "Referer: https://send.now/" \
./hosts/tempfileme.sh:144: if grep -Eqi 'id="download-button" href="http://tempfile.me/file/' <<< "$response"; then
./hosts/tempfileme.sh:146: download_url="${download_url/http:/https:}"
./hosts/tempfileme.sh:186: if grep -Eqi 'location: https://' <<< "${file_header}" ; then
@ -403,7 +403,7 @@ _________________________________________________________________________
./hosts/up_ramsgaard.sh:37: jira_PostUrlHost='https://data.ramsgaard.me/script.php'
./hosts/up_ramsgaard.sh:40: jira_downloadLinkPrefix='https://data.ramsgaard.me/f.php?h='
./hosts/up_ranoz.sh:99: PostUrlHost='https://ranoz.gg/api/v1/files/upload_url'
./hosts/up_ranoz.sh:111: if grep -Eqi '"upload_url":"https://' <<< "$response" ; then
./hosts/up_ranoz.sh:139: if grep -Eqi '"upload_url":"https://' <<< "$response" ; then
./hosts/up_shareonline.sh:99: PostUrlHost='https://ns07.zipcluster.com/upload.php'
./hosts/up_skrepr.sh:37: jira_PostUrlHost='https://transfer.skrepr.com/script.php'
./hosts/up_skrepr.sh:40: jira_downloadLinkPrefix='https://transfer.skrepr.com/f.php?h='
@ -434,82 +434,82 @@ _________________________________________________________________________
./hosts/up_uploadflix.sh:99: local ar_HUP[0]='https://fs50.uploadflix.cyou/cgi-bin/upload.cgi?upload_type=file&amp;utype=anon'
./hosts/up_uploadflix.sh:125: downloadLink="https://uploadflix.cc/${hash}"
./hosts/up_uploadhive.sh:99: PostUrlHost='https://fs430.uploadhive.com/cgi-bin/upload.cgi'
./hosts/up_uploadhive.sh:149: downloadLink="https://uploadhive.com/${hash}"
./hosts/up_uploadhive.sh:150: downloadLink="https://uploadhive.com/${hash}"
./hosts/up_uploadraja.sh:99: PostUrlHost='https://awsaisiaposisition69.kalpstudio.xyz/cgi-bin/upload.cgi?upload_type=file&utype=anon'
./hosts/up_uploadraja.sh:119: downloadLink="https://uploadraja.com/$hash"
./hosts/up_uwabaki.sh:99: PostUrlHost="https://files.uwabaki.party/index.php"
./hosts/up_uwabaki.sh:111: if grep -Eqi 'File uploaded: <a href="https://files.uwabaki.party/' <<< "${response}" ; then
./hosts/up_uwabaki.sh:123: downloadLink="https://files.uwabaki.party${url}"
./hosts/up_yolobit.sh:99: PostUrlHost='https://ns08.zipcluster.com/upload.php'
./mad.sh:698: sed -i -e "s>^${url/https:/http:}.*>#& #OK# ${filename}>g" "${InputFile}" #http (if changed)
./mad.sh:700: sed -i -e "s>^direct=${url/https:/http:}.*>#& #OK# ${filename}>g" "${InputFile}" #direct url https
./mad.sh:703: sed -i -e "s>^${url/https:/http:}.*>#&>g" "${InputFile}" #http (if changed)
./mad.sh:705: sed -i -e "s>^direct=${url/https:/http:}.*>#&>g" "${InputFile}" #direct url https
./mad.sh:726: sed -i -e "s>^${url/https:/http:}.*>#& #OK# ${filename} (File exists)>g" "${InputFile}" #http (if changed)
./mad.sh:728: sed -i -e "s>^direct=${url/https:/http:}.*>#& #OK# ${filename} (File exists)>g" "${InputFile}" #direct url https
./mad.sh:731: sed -i -e "s>^${url/https:/http:}.*>#&>g" "${InputFile}" #http (if changed)
./mad.sh:733: sed -i -e "s>^direct=${url/https:/http:}.*>#&>g" "${InputFile}" #direct url https
./mad.sh:754: sed -i -e "s>^${url/https:/http:}.*>#& #RETRY# ${message}>g" "${InputFile}" #http (if changed)
./mad.sh:756: sed -i -e "s>^direct=${url/https:/http:}.*>#& #RETRY# ${message}>g" "${InputFile}" #direct url https
./mad.sh:759: sed -i -e "s>^${url/https:/http:}.*>#&>g" "${InputFile}" #http (if changed)
./mad.sh:761: sed -i -e "s>^direct=${url/https:/http:}.*>#&>g" "${InputFile}" #direct url https
./mad.sh:783: sed -i -e "s>^${url/https:/http:}.*>#& #FAIL# $message>g" "${InputFile}" #http (if changed)
./mad.sh:785: sed -i -e "s>^direct=${url/https:/http:}.*>#& #FAIL# $message>g" "${InputFile}" #direct url https
./mad.sh:788: sed -i -e "s>^${url/https:/http:}.*>#&>g" "${InputFile}" #http (if changed)
./mad.sh:790: sed -i -e "s>^direct=${url/https:/http:}.*>#&>g" "${InputFile}" #direct url https
./mad.sh:814: sed -i -e "s>^${url/https:/http:}.*>#& #REMOVED#${message}>g" "${InputFile}" #http (if changed)
./mad.sh:816: sed -i -e "s>^direct=${url/https:/http:}.*>#& #REMOVED#${message}>g" "${InputFile}" #direct url https
./mad.sh:819: sed -i -e "s>^${url/https:/http:}.*>#&>g" "${InputFile}" #http (if changed)
./mad.sh:821: sed -i -e "s>^direct=${url/https:/http:}.*>#&>g" "${InputFile}" #direct url https
./mad.sh:847: sed -i -e "s>^${url/https:/http:}.*>${url}|${newfilename}>g" "${InputFile}" #http (if changed)
./mad.sh:849: sed -i -e "s>^direct=${url/https:/http:}.*>direct=${url}|${newfilename}>g" "${InputFile}" #direct url https
./mad.sh:869: sed -i -e "s%^${url/https:/http:}.*%${newurl//[[:space:]]/$'\\\n'}%g" "${InputFile}" #http (if changed)
./mad.sh:890: sed -i -e "s>^${url/https:/http:}.*>#& #RETRY# ${filename} (dropped or bad size)>g" "${InputFile}" #http (if changed)
./mad.sh:892: sed -i -e "s>^direct=${url/https:/http:}.*>#& #RETRY# ${filename} (dropped or bad size)>g" "${InputFile}" #direct url https
./mad.sh:895: sed -i -e "s>^${url/https:/http:}.*>#&>g" "${InputFile}" #http (if changed)
./mad.sh:897: sed -i -e "s>^direct=${url/https:/http:}.*>#&>g" "${InputFile}" #direct url https
./mad.sh:913: sed -i -e "s>^${url/https:/http:}.*>#& #RETRY# (No CDN found)>g" "${InputFile}" #http (if changed)
./mad.sh:915: sed -i -e "s>^direct=${url/https:/http:}.*>#& #RETRY# (No CDN found)>g" "${InputFile}" #direct url https
./mad.sh:918: sed -i -e "s>^${url/https:/http:}.*>#&>g" "${InputFile}" #http (if changed)
./mad.sh:920: sed -i -e "s>^direct=${url/https:/http:}.*>#&>g" "${InputFile}" #direct url https
./mad.sh:939: sed -i -e "s>^${url/https:/http:}.*>#& #PASSWORD#>g" "${InputFile}" #http (if changed)
./mad.sh:941: sed -i -e "s>^direct=${url/https:/http:}.*>#& #PASSWORD#>g" "${InputFile}" #direct url https
./mad.sh:944: sed -i -e "s>^${url/https:/http:}.*>#&>g" "${InputFile}" #http (if changed)
./mad.sh:946: sed -i -e "s>^direct=${url/https:/http:}.*>#&>g" "${InputFile}" #direct url https
./mad.sh:966: sed -i -e "s>^${url/https:/http:}.*>#& #BAD-URL#${message}>g" "${InputFile}" #http (if changed)
./mad.sh:968: sed -i -e "s>^direct=${url/https:/http:}.*>#& #BAD-URL#${message}>g" "${InputFile}" #direct url https
./mad.sh:971: sed -i -e "s>^${url/https:/http:}.*>#&>g" "${InputFile}" #http (if changed)
./mad.sh:973: sed -i -e "s>^direct=${url/https:/http:}.*>#&>g" "${InputFile}" #direct url https
./mad.sh:991: sed -i -e "s>^${url/https:/http:}.*>#& #RETRY# ${filename} (No filename or size. Try again later)>g" "${InputFile}" #http (if changed)
./mad.sh:993: sed -i -e "s>^direct=${url/https:/http:}.*>#& #RETRY# ${filename} (No filename or size. Try again later)>g" "${InputFile}" #direct url https
./mad.sh:996: sed -i -e "s>^${url/https:/http:}.*>#&>g" "${InputFile}" #http (if changed)
./mad.sh:998: sed -i -e "s>^direct=${url/https:/http:}.*>#&>g" "${InputFile}" #direct url https
./mad.sh:1017: sed -i -e "s>^${url/https:/http:}.*>#& #RETRY# ${filename} (File exists and is too large--retry or delete from downloads)>g" "${InputFile}" #http (if changed)
./mad.sh:1019: sed -i -e "s>^direct=${url/https:/http:}.*>#& #RETRY# ${filename} (File exists and is too large--retry or delete from downloads)>g" "${InputFile}" #direct url https
./mad.sh:1022: sed -i -e "s>^${url/https:/http:}.*>#&>g" "${InputFile}" #http (if changed)
./mad.sh:1024: sed -i -e "s>^direct=${url/https:/http:}.*>#&>g" "${InputFile}" #direct url https
./mad.sh:1442: response=$(tor_curl_request --insecure -L -s https://github.com/lwthiker/curl-impersonate/releases/latest)
./mad.sh:1459: download_url='https://github.com/lwthiker/curl-impersonate/releases/download/'"$latestTag"'/curl-impersonate-'"$latestTag"'.x86_64-linux-gnu.tar.gz'
./mad.sh:1565: response=$(tor_curl_request --insecure -L -s https://github.com/lexiforest/curl-impersonate/releases/latest)
./mad.sh:1582: download_url='https://github.com/lexiforest/curl-impersonate/releases/download/'"$latestTag"'/curl-impersonate-'"$latestTag"'.x86_64-linux-gnu.tar.gz'
./mad.sh:1845: echo -e "${PINK}MAD Audit of http lines:${NC} (${GREEN}grep \"http:\" or \"https:\"${NC})"
./mad.sh:1873: echo -e "${PINK}MAD Audit of http lines:${NC} (${GREEN}grep \"http:\" or \"https:\"${NC})"
./mad.sh:1895: echo -e "${PINK}MAD Audit of http lines:${NC} (${GREEN}grep \"http:\" or \"https:\"${NC})"
./mad.sh:3211: if grep -Eqi '.onion' <<< "$download_url" && grep -Eqi 'https://' <<< "$download_url" ; then
./mad.sh:3742:arg2="$2" # auto, filelist, <https://url>
./mad.sh:3839: echo -e " - http://oshi.at/abcd/origAABB.rar|My specified file.part1.rar"
./mad.sh:3841: echo -e " - direct=http://pomf2.lain.la/f/abcd00zz.7z"
./mad.sh:3843: echo -e ' - ie. direct=http://somehost.onion/abcD|filename.part1.rar'
./mad.sh:4062: if [[ ${remote_url} =~ ^http: ]] ; then
./mad.sh:4063: remote_url=${remote_url/http:/https:}
./mad.sh:4084: if [[ ${remote_url} =~ ^http: ]] ; then
./mad.sh:4085: remote_url=${remote_url/http:/https:}
./mad.sh:4451: if [[ ${remote_url} =~ ^http: ]] ; then
./mad.sh:4452: remote_url=${remote_url/http:/https:}
./mad.sh:4510: if [[ ${remote_url} =~ ^http: ]] ; then
./mad.sh:4511: remote_url=${remote_url/http:/https:}
./mad.sh:4537: if [[ ${remote_url} =~ ^http: ]] ; then
./mad.sh:4538: remote_url=${remote_url/http:/https:}
./mad.sh:410: sed -i -e "s>^${url/https:/http:}.*>#& #OK# ${filename}>g" "${InputFile}" #http (if changed)
./mad.sh:412: sed -i -e "s>^direct=${url/https:/http:}.*>#& #OK# ${filename}>g" "${InputFile}" #direct url https
./mad.sh:415: sed -i -e "s>^${url/https:/http:}.*>#&>g" "${InputFile}" #http (if changed)
./mad.sh:417: sed -i -e "s>^direct=${url/https:/http:}.*>#&>g" "${InputFile}" #direct url https
./mad.sh:438: sed -i -e "s>^${url/https:/http:}.*>#& #OK# ${filename} (File exists)>g" "${InputFile}" #http (if changed)
./mad.sh:440: sed -i -e "s>^direct=${url/https:/http:}.*>#& #OK# ${filename} (File exists)>g" "${InputFile}" #direct url https
./mad.sh:443: sed -i -e "s>^${url/https:/http:}.*>#&>g" "${InputFile}" #http (if changed)
./mad.sh:445: sed -i -e "s>^direct=${url/https:/http:}.*>#&>g" "${InputFile}" #direct url https
./mad.sh:466: sed -i -e "s>^${url/https:/http:}.*>#& #RETRY# ${message}>g" "${InputFile}" #http (if changed)
./mad.sh:468: sed -i -e "s>^direct=${url/https:/http:}.*>#& #RETRY# ${message}>g" "${InputFile}" #direct url https
./mad.sh:471: sed -i -e "s>^${url/https:/http:}.*>#&>g" "${InputFile}" #http (if changed)
./mad.sh:473: sed -i -e "s>^direct=${url/https:/http:}.*>#&>g" "${InputFile}" #direct url https
./mad.sh:495: sed -i -e "s>^${url/https:/http:}.*>#& #FAIL# $message>g" "${InputFile}" #http (if changed)
./mad.sh:497: sed -i -e "s>^direct=${url/https:/http:}.*>#& #FAIL# $message>g" "${InputFile}" #direct url https
./mad.sh:500: sed -i -e "s>^${url/https:/http:}.*>#&>g" "${InputFile}" #http (if changed)
./mad.sh:502: sed -i -e "s>^direct=${url/https:/http:}.*>#&>g" "${InputFile}" #direct url https
./mad.sh:526: sed -i -e "s>^${url/https:/http:}.*>#& #REMOVED#${message}>g" "${InputFile}" #http (if changed)
./mad.sh:528: sed -i -e "s>^direct=${url/https:/http:}.*>#& #REMOVED#${message}>g" "${InputFile}" #direct url https
./mad.sh:531: sed -i -e "s>^${url/https:/http:}.*>#&>g" "${InputFile}" #http (if changed)
./mad.sh:533: sed -i -e "s>^direct=${url/https:/http:}.*>#&>g" "${InputFile}" #direct url https
./mad.sh:559: sed -i -e "s>^${url/https:/http:}.*>${url}|${newfilename}>g" "${InputFile}" #http (if changed)
./mad.sh:561: sed -i -e "s>^direct=${url/https:/http:}.*>direct=${url}|${newfilename}>g" "${InputFile}" #direct url https
./mad.sh:581: sed -i -e "s%^${url/https:/http:}.*%${newurl//[[:space:]]/$'\\\n'}%g" "${InputFile}" #http (if changed)
./mad.sh:602: sed -i -e "s>^${url/https:/http:}.*>#& #RETRY# ${filename} (dropped or bad size)>g" "${InputFile}" #http (if changed)
./mad.sh:604: sed -i -e "s>^direct=${url/https:/http:}.*>#& #RETRY# ${filename} (dropped or bad size)>g" "${InputFile}" #direct url https
./mad.sh:607: sed -i -e "s>^${url/https:/http:}.*>#&>g" "${InputFile}" #http (if changed)
./mad.sh:609: sed -i -e "s>^direct=${url/https:/http:}.*>#&>g" "${InputFile}" #direct url https
./mad.sh:625: sed -i -e "s>^${url/https:/http:}.*>#& #RETRY# (No CDN found)>g" "${InputFile}" #http (if changed)
./mad.sh:627: sed -i -e "s>^direct=${url/https:/http:}.*>#& #RETRY# (No CDN found)>g" "${InputFile}" #direct url https
./mad.sh:630: sed -i -e "s>^${url/https:/http:}.*>#&>g" "${InputFile}" #http (if changed)
./mad.sh:632: sed -i -e "s>^direct=${url/https:/http:}.*>#&>g" "${InputFile}" #direct url https
./mad.sh:651: sed -i -e "s>^${url/https:/http:}.*>#& #PASSWORD#>g" "${InputFile}" #http (if changed)
./mad.sh:653: sed -i -e "s>^direct=${url/https:/http:}.*>#& #PASSWORD#>g" "${InputFile}" #direct url https
./mad.sh:656: sed -i -e "s>^${url/https:/http:}.*>#&>g" "${InputFile}" #http (if changed)
./mad.sh:658: sed -i -e "s>^direct=${url/https:/http:}.*>#&>g" "${InputFile}" #direct url https
./mad.sh:678: sed -i -e "s>^${url/https:/http:}.*>#& #BAD-URL#${message}>g" "${InputFile}" #http (if changed)
./mad.sh:680: sed -i -e "s>^direct=${url/https:/http:}.*>#& #BAD-URL#${message}>g" "${InputFile}" #direct url https
./mad.sh:683: sed -i -e "s>^${url/https:/http:}.*>#&>g" "${InputFile}" #http (if changed)
./mad.sh:685: sed -i -e "s>^direct=${url/https:/http:}.*>#&>g" "${InputFile}" #direct url https
./mad.sh:703: sed -i -e "s>^${url/https:/http:}.*>#& #RETRY# ${filename} (No filename or size. Try again later)>g" "${InputFile}" #http (if changed)
./mad.sh:705: sed -i -e "s>^direct=${url/https:/http:}.*>#& #RETRY# ${filename} (No filename or size. Try again later)>g" "${InputFile}" #direct url https
./mad.sh:708: sed -i -e "s>^${url/https:/http:}.*>#&>g" "${InputFile}" #http (if changed)
./mad.sh:710: sed -i -e "s>^direct=${url/https:/http:}.*>#&>g" "${InputFile}" #direct url https
./mad.sh:729: sed -i -e "s>^${url/https:/http:}.*>#& #RETRY# ${filename} (File exists and is too large--retry or delete from downloads)>g" "${InputFile}" #http (if changed)
./mad.sh:731: sed -i -e "s>^direct=${url/https:/http:}.*>#& #RETRY# ${filename} (File exists and is too large--retry or delete from downloads)>g" "${InputFile}" #direct url https
./mad.sh:734: sed -i -e "s>^${url/https:/http:}.*>#&>g" "${InputFile}" #http (if changed)
./mad.sh:736: sed -i -e "s>^direct=${url/https:/http:}.*>#&>g" "${InputFile}" #direct url https
./mad.sh:1159: response=$(tor_curl_request --insecure -L -s https://github.com/lwthiker/curl-impersonate/releases/latest)
./mad.sh:1176: download_url='https://github.com/lwthiker/curl-impersonate/releases/download/'"$latestTag"'/curl-impersonate-'"$latestTag"'.x86_64-linux-gnu.tar.gz'
./mad.sh:1282: response=$(tor_curl_request --insecure -L -s https://github.com/lexiforest/curl-impersonate/releases/latest)
./mad.sh:1299: download_url='https://github.com/lexiforest/curl-impersonate/releases/download/'"$latestTag"'/curl-impersonate-'"$latestTag"'.x86_64-linux-gnu.tar.gz'
./mad.sh:1562: echo -e "${PINK}MAD Audit of http lines:${NC} (${GREEN}grep \"http:\" or \"https:\"${NC})"
./mad.sh:1590: echo -e "${PINK}MAD Audit of http lines:${NC} (${GREEN}grep \"http:\" or \"https:\"${NC})"
./mad.sh:1612: echo -e "${PINK}MAD Audit of http lines:${NC} (${GREEN}grep \"http:\" or \"https:\"${NC})"
./mad.sh:2928: if grep -Eqi '.onion' <<< "$download_url" && grep -Eqi 'https://' <<< "$download_url" ; then
./mad.sh:3459:arg2="$2" # auto, filelist, <https://url>
./mad.sh:3556: echo -e " - http://oshi.at/abcd/origAABB.rar|My specified file.part1.rar"
./mad.sh:3558: echo -e " - direct=http://pomf2.lain.la/f/abcd00zz.7z"
./mad.sh:3560: echo -e ' - ie. direct=http://somehost.onion/abcD|filename.part1.rar'
./mad.sh:3779: if [[ ${remote_url} =~ ^http: ]] ; then
./mad.sh:3780: remote_url=${remote_url/http:/https:}
./mad.sh:3801: if [[ ${remote_url} =~ ^http: ]] ; then
./mad.sh:3802: remote_url=${remote_url/http:/https:}
./mad.sh:4168: if [[ ${remote_url} =~ ^http: ]] ; then
./mad.sh:4169: remote_url=${remote_url/http:/https:}
./mad.sh:4227: if [[ ${remote_url} =~ ^http: ]] ; then
./mad.sh:4228: remote_url=${remote_url/http:/https:}
./mad.sh:4254: if [[ ${remote_url} =~ ^http: ]] ; then
./mad.sh:4255: remote_url=${remote_url/http:/https:}
./plugins/pjscloud.sh:51: "https://PhantomJScloud.com/api/browser/v2/$RandomPjsKey/" & sleep 8s; kill -HUP $! 2>/dev/null)
./plugins/pjscloud.sh:59: "https://PhantomJScloud.com/api/browser/v2/$RandomPjsKey/" & sleep 8s; kill -HUP $! 2>/dev/null)

View file

@ -1,4 +1,4 @@
DateTime: 25.02.11
DateTime: 25.02.13
Files:
./hosts/1fichier.sh
@ -2012,77 +2012,77 @@ _________________________________________________________________________
./hosts/sendnow.sh:99: warnAndRetryUnknownError=true
./hosts/sendnow.sh:100: if [ "${finalAttempt}" == "true" ] ; then
--
./hosts/sendnow.sh:158: response=$(tor_curl_request --insecure -L -svo. -X POST \
./hosts/sendnow.sh:159: -b "${snow_cookie_jar}" -c "${snow_cookie_jar}" \
./hosts/sendnow.sh:160: --data-raw "$form_data" "$remote_url" 2>&1)
./hosts/sendnow.sh:161: if [ "${DebugAllEnabled}" == "true" ] ; then
./hosts/sendnow.sh:162: debugHtml "${remote_url##*/}" "snow_post" "post_action: ${post_action}"$'\n'"form_data: ${form_data}"$'\n'"${response}"
./hosts/sendnow.sh:163: fi
./hosts/sendnow.sh:164: if [[ -z $response ]] ; then
./hosts/sendnow.sh:165: echo -e "${RED}| Failed to extract download link [2]${NC}"
./hosts/sendnow.sh:166: warnAndRetryUnknownError=true
./hosts/sendnow.sh:167: if [ "${finalAttempt}" == "true" ] ; then
./hosts/sendnow.sh:168: rm -f "${snow_cookie_jar}";
./hosts/sendnow.sh:160: response=$(tor_curl_request --insecure -L -svo. -X POST \
./hosts/sendnow.sh:161: -b "${snow_cookie_jar}" -c "${snow_cookie_jar}" \
./hosts/sendnow.sh:162: --data-raw "$form_data" "$remote_url" 2>&1)
./hosts/sendnow.sh:163: if [ "${DebugAllEnabled}" == "true" ] ; then
./hosts/sendnow.sh:164: debugHtml "${remote_url##*/}" "snow_post" "post_action: ${post_action}"$'\n'"form_data: ${form_data}"$'\n'"${response}"
./hosts/sendnow.sh:165: fi
./hosts/sendnow.sh:166: if [[ -z $response ]] ; then
./hosts/sendnow.sh:167: echo -e "${RED}| Failed to extract download link [2]${NC}"
./hosts/sendnow.sh:168: warnAndRetryUnknownError=true
./hosts/sendnow.sh:169: if [ "${finalAttempt}" == "true" ] ; then
./hosts/sendnow.sh:170: rm -f "${snow_cookie_jar}";
--
./hosts/sendnow.sh:200: file_header=$(tor_curl_request --insecure --head -Lis \
./hosts/sendnow.sh:201: -H "Host: $fshost" \
./hosts/sendnow.sh:202: -H "Accept: text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8" \
./hosts/sendnow.sh:203: -H "Accept-Language: en-US,en;q=0.5" \
./hosts/sendnow.sh:204: -H "Accept-Encoding: gzip, deflate, br, zstd" \
./hosts/sendnow.sh:205: -H "Referer: https://send.now/" \
./hosts/sendnow.sh:206: -H "Sec-GPC: 1" \
./hosts/sendnow.sh:207: -H "Connection: keep-alive" \
./hosts/sendnow.sh:208: -H "Upgrade-Insecure-Requests: 1" \
./hosts/sendnow.sh:209: -H "Sec-Fetch-Dest: document" \
./hosts/sendnow.sh:210: -H "Sec-Fetch-Mode: navigate" \
./hosts/sendnow.sh:203: file_header=$(tor_curl_request_extended --insecure --head -Lis \
./hosts/sendnow.sh:204: -H "Host: $fshost" \
./hosts/sendnow.sh:205: -H "Accept: text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8" \
./hosts/sendnow.sh:206: -H "Accept-Language: en-US,en;q=0.5" \
./hosts/sendnow.sh:207: -H "Accept-Encoding: gzip, deflate, br, zstd" \
./hosts/sendnow.sh:208: -H "Referer: https://send.now/" \
./hosts/sendnow.sh:209: -H "Sec-GPC: 1" \
./hosts/sendnow.sh:210: -H "Connection: keep-alive" \
./hosts/sendnow.sh:211: -H "Upgrade-Insecure-Requests: 1" \
./hosts/sendnow.sh:212: -H "Sec-Fetch-Dest: document" \
./hosts/sendnow.sh:213: -H "Sec-Fetch-Mode: navigate" \
--
./hosts/sendnow.sh:323: tor_curl_request --insecure -L --no-alpn \
./hosts/sendnow.sh:324: --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval \
./hosts/sendnow.sh:325: -H "Host: $fshost" \
./hosts/sendnow.sh:326: -H "Accept: text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8" \
./hosts/sendnow.sh:327: -H "Accept-Language: en-US,en;q=0.5" \
./hosts/sendnow.sh:328: -H "Accept-Encoding: gzip, deflate, br, zstd" \
./hosts/sendnow.sh:329: -H "Referer: https://send.now/" \
./hosts/sendnow.sh:330: -H "Sec-GPC: 1" \
./hosts/sendnow.sh:331: -H "Connection: keep-alive" \
./hosts/sendnow.sh:332: -H "Upgrade-Insecure-Requests: 1" \
./hosts/sendnow.sh:333: -H "Sec-Fetch-Dest: document" \
./hosts/sendnow.sh:326: tor_curl_request_extended --insecure -L --no-alpn \
./hosts/sendnow.sh:327: --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval \
./hosts/sendnow.sh:328: -H "Host: $fshost" \
./hosts/sendnow.sh:329: -H "Accept: text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8" \
./hosts/sendnow.sh:330: -H "Accept-Language: en-US,en;q=0.5" \
./hosts/sendnow.sh:331: -H "Accept-Encoding: gzip, deflate, br, zstd" \
./hosts/sendnow.sh:332: -H "Referer: https://send.now/" \
./hosts/sendnow.sh:333: -H "Sec-GPC: 1" \
./hosts/sendnow.sh:334: -H "Connection: keep-alive" \
./hosts/sendnow.sh:335: -H "Upgrade-Insecure-Requests: 1" \
./hosts/sendnow.sh:336: -H "Sec-Fetch-Dest: document" \
--
./hosts/sendnow.sh:341: tor_curl_request --insecure -L --no-alpn \
./hosts/sendnow.sh:342: -H "Host: $fshost" \
./hosts/sendnow.sh:343: -H "Accept: text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8" \
./hosts/sendnow.sh:344: -H "Accept-Language: en-US,en;q=0.5" \
./hosts/sendnow.sh:345: -H "Accept-Encoding: gzip, deflate, br, zstd" \
./hosts/sendnow.sh:346: -H "Referer: https://send.now/" \
./hosts/sendnow.sh:347: -H "Sec-GPC: 1" \
./hosts/sendnow.sh:348: -H "Connection: keep-alive" \
./hosts/sendnow.sh:349: -H "Upgrade-Insecure-Requests: 1" \
./hosts/sendnow.sh:350: -H "Sec-Fetch-Dest: document" \
./hosts/sendnow.sh:351: -H "Sec-Fetch-Mode: navigate" \
./hosts/sendnow.sh:344: tor_curl_request --insecure -L --no-alpn \
./hosts/sendnow.sh:345: -H "Host: $fshost" \
./hosts/sendnow.sh:346: -H "Accept: text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8" \
./hosts/sendnow.sh:347: -H "Accept-Language: en-US,en;q=0.5" \
./hosts/sendnow.sh:348: -H "Accept-Encoding: gzip, deflate, br, zstd" \
./hosts/sendnow.sh:349: -H "Referer: https://send.now/" \
./hosts/sendnow.sh:350: -H "Sec-GPC: 1" \
./hosts/sendnow.sh:351: -H "Connection: keep-alive" \
./hosts/sendnow.sh:352: -H "Upgrade-Insecure-Requests: 1" \
./hosts/sendnow.sh:353: -H "Sec-Fetch-Dest: document" \
./hosts/sendnow.sh:354: -H "Sec-Fetch-Mode: navigate" \
--
./hosts/sendnow.sh:360: tor_curl_request --insecure -L --no-alpn \
./hosts/sendnow.sh:361: --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval \
./hosts/sendnow.sh:362: -H "User-Agent: $RandomUA" \
./hosts/sendnow.sh:363: -H "Host: $fshost" \
./hosts/sendnow.sh:364: -H "Accept: text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8" \
./hosts/sendnow.sh:365: -H "Accept-Language: en-US,en;q=0.5" \
./hosts/sendnow.sh:366: -H "Accept-Encoding: gzip, deflate, br, zstd" \
./hosts/sendnow.sh:367: -H "Referer: https://send.now/" \
./hosts/sendnow.sh:368: -H "Sec-GPC: 1" \
./hosts/sendnow.sh:369: -H "Connection: keep-alive" \
./hosts/sendnow.sh:370: -H "Upgrade-Insecure-Requests: 1" \
./hosts/sendnow.sh:363: tor_curl_request --insecure -L --no-alpn \
./hosts/sendnow.sh:364: --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval \
./hosts/sendnow.sh:365: -H "User-Agent: $RandomUA" \
./hosts/sendnow.sh:366: -H "Host: $fshost" \
./hosts/sendnow.sh:367: -H "Accept: text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8" \
./hosts/sendnow.sh:368: -H "Accept-Language: en-US,en;q=0.5" \
./hosts/sendnow.sh:369: -H "Accept-Encoding: gzip, deflate, br, zstd" \
./hosts/sendnow.sh:370: -H "Referer: https://send.now/" \
./hosts/sendnow.sh:371: -H "Sec-GPC: 1" \
./hosts/sendnow.sh:372: -H "Connection: keep-alive" \
./hosts/sendnow.sh:373: -H "Upgrade-Insecure-Requests: 1" \
--
./hosts/sendnow.sh:379: tor_curl_request --insecure -L --no-alpn \
./hosts/sendnow.sh:380: -H "User-Agent: $RandomUA" \
./hosts/sendnow.sh:381: -H "Host: $fshost" \
./hosts/sendnow.sh:382: -H "Accept: text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8" \
./hosts/sendnow.sh:383: -H "Accept-Language: en-US,en;q=0.5" \
./hosts/sendnow.sh:384: -H "Accept-Encoding: gzip, deflate, br, zstd" \
./hosts/sendnow.sh:385: -H "Referer: https://send.now/" \
./hosts/sendnow.sh:386: -H "Sec-GPC: 1" \
./hosts/sendnow.sh:387: -H "Connection: keep-alive" \
./hosts/sendnow.sh:388: -H "Upgrade-Insecure-Requests: 1" \
./hosts/sendnow.sh:389: -H "Sec-Fetch-Dest: document" \
./hosts/sendnow.sh:382: tor_curl_request --insecure -L --no-alpn \
./hosts/sendnow.sh:383: -H "User-Agent: $RandomUA" \
./hosts/sendnow.sh:384: -H "Host: $fshost" \
./hosts/sendnow.sh:385: -H "Accept: text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8" \
./hosts/sendnow.sh:386: -H "Accept-Language: en-US,en;q=0.5" \
./hosts/sendnow.sh:387: -H "Accept-Encoding: gzip, deflate, br, zstd" \
./hosts/sendnow.sh:388: -H "Referer: https://send.now/" \
./hosts/sendnow.sh:389: -H "Sec-GPC: 1" \
./hosts/sendnow.sh:390: -H "Connection: keep-alive" \
./hosts/sendnow.sh:391: -H "Upgrade-Insecure-Requests: 1" \
./hosts/sendnow.sh:392: -H "Sec-Fetch-Dest: document" \
--
./hosts/syspro.sh:88: response=$(tor_curl_request --insecure -L -s "$remote_url")
./hosts/syspro.sh:89: if [ "${DebugAllEnabled}" == "true" ] ; then
@ -3018,29 +3018,29 @@ _________________________________________________________________________
./hosts/up_quax.sh:111: url=$(grep -oPi '(?<="url": ").*?(?=".*$)' <<< "$response")
./hosts/up_quax.sh:112: filesize=$(GetFileSize "$filepath" "false")
--
./hosts/up_ranoz.sh:102: response=$(tor_curl_upload --insecure -L -i -s \
./hosts/up_ranoz.sh:103: "$PostUrlHost" \
./hosts/up_ranoz.sh:104: -H "Content-Type: application/json" \
./hosts/up_ranoz.sh:105: -d "{ \
./hosts/up_ranoz.sh:106: \"filename\": \"$filename\", \
./hosts/up_ranoz.sh:107: \"size\": $fsize}")
./hosts/up_ranoz.sh:108: if [ "${DebugAllEnabled}" == "true" ] ; then
./hosts/up_ranoz.sh:109: debugHtml "${filepath##*/}" "${_hostCode}_ticket" "post_url: ${PostUrlHost}"$'\n'"data: ${filename}, ${fsize}"$'\n'"${response}"
./hosts/up_ranoz.sh:110: fi
./hosts/up_ranoz.sh:111: if grep -Eqi '"upload_url":"https://' <<< "$response" ; then
./hosts/up_ranoz.sh:112: PostUrlHost=$(grep -oPi '(?<="upload_url":").*?(?=".*$)' <<< "$response")
./hosts/up_ranoz.sh:130: response=$(tor_curl_upload --insecure -L -i -s \
./hosts/up_ranoz.sh:131: "$PostUrlHost" \
./hosts/up_ranoz.sh:132: -H "Content-Type: application/json" \
./hosts/up_ranoz.sh:133: -d "{ \
./hosts/up_ranoz.sh:134: \"filename\": \"$tmpfilename\", \
./hosts/up_ranoz.sh:135: \"size\": $fsize}")
./hosts/up_ranoz.sh:136: if [ "${DebugAllEnabled}" == "true" ] ; then
./hosts/up_ranoz.sh:137: debugHtml "${filepath##*/}" "${_hostCode}_ticket" "post_url: ${PostUrlHost}"$'\n'"data: ${tmpfilepath}, ${fsize}"$'\n'"${response}"
./hosts/up_ranoz.sh:138: fi
./hosts/up_ranoz.sh:139: if grep -Eqi '"upload_url":"https://' <<< "$response" ; then
./hosts/up_ranoz.sh:140: PostUrlHost=$(grep -oPi '(?<="upload_url":").*?(?=".*$)' <<< "$response")
--
./hosts/up_ranoz.sh:129: response=$(tor_curl_upload --insecure -i -X PUT \
./hosts/up_ranoz.sh:130: "${PostUrlHost}" \
./hosts/up_ranoz.sh:131: --upload-file "$filepath" \
./hosts/up_ranoz.sh:132: -H "Content-Length: $fsize")
./hosts/up_ranoz.sh:133: if [ "${DebugAllEnabled}" == "true" ] ; then
./hosts/up_ranoz.sh:134: debugHtml "${filepath##*/}" "${_hostCode}_upload" "post_url: ${PostUrlHost}"$'\n'"${response}"
./hosts/up_ranoz.sh:135: fi
./hosts/up_ranoz.sh:136: if grep -Eqi 'HTTP/.* 200' <<< "${response}" ; then
./hosts/up_ranoz.sh:137: filesize=$(GetFileSize "$filepath" "false")
./hosts/up_ranoz.sh:138: echo -e "${GREEN}| Upload Success${NC}"
./hosts/up_ranoz.sh:139: echo -e "| Size: ${BLUE}${filesize}${NC} bytes${NC}"
./hosts/up_ranoz.sh:160: response=$(tor_curl_upload --insecure -i -X PUT \
./hosts/up_ranoz.sh:161: "${PostUrlHost}" \
./hosts/up_ranoz.sh:162: --upload-file "$tmpfilepath" \
./hosts/up_ranoz.sh:163: -H "Content-Length: $fsize")
./hosts/up_ranoz.sh:164: if [ "${DebugAllEnabled}" == "true" ] ; then
./hosts/up_ranoz.sh:165: debugHtml "${filepath##*/}" "${_hostCode}_upload" "post_url: ${PostUrlHost}"$'\n'"${response}"
./hosts/up_ranoz.sh:166: fi
./hosts/up_ranoz.sh:167: if [[ "$RanozRandomizeExt" == "true" ]]; then
./hosts/up_ranoz.sh:168: mv $tmpfilepath $filepath 2> /dev/null
./hosts/up_ranoz.sh:169: fi
./hosts/up_ranoz.sh:170: if grep -Eqi 'HTTP/.* 200' <<< "${response}" ; then
--
./hosts/up_shareonline.sh:102: response=$(tor_curl_upload --insecure -i \
./hosts/up_shareonline.sh:103: -H "Content-Type: multipart/form-data" \
@ -3186,17 +3186,17 @@ _________________________________________________________________________
./hosts/up_uploadflix.sh:115: -F "upload=Start upload" \
./hosts/up_uploadflix.sh:116: -F "keepalive=1" \
--
./hosts/up_uploadhive.sh:128: response=$(tor_curl_upload --insecure -i \
./hosts/up_uploadhive.sh:129: -H "Content-Type: multipart/form-data" \
./hosts/up_uploadhive.sh:130: -F "sess_id=" \
./hosts/up_uploadhive.sh:131: -F "utype=anon" \
./hosts/up_uploadhive.sh:132: -F "link_rcpt=" \
./hosts/up_uploadhive.sh:133: -F "link_pass=" \
./hosts/up_uploadhive.sh:134: -F "to_folder=" \
./hosts/up_uploadhive.sh:135: -F "file_descr=" \
./hosts/up_uploadhive.sh:136: -F "file_public=1" \
./hosts/up_uploadhive.sh:137: -F "upload=Start upload" \
./hosts/up_uploadhive.sh:138: -F "file_0=@$tmpfilepath" \
./hosts/up_uploadhive.sh:129: response=$(tor_curl_upload --insecure -i \
./hosts/up_uploadhive.sh:130: -H "Content-Type: multipart/form-data" \
./hosts/up_uploadhive.sh:131: -F "sess_id=" \
./hosts/up_uploadhive.sh:132: -F "utype=anon" \
./hosts/up_uploadhive.sh:133: -F "link_rcpt=" \
./hosts/up_uploadhive.sh:134: -F "link_pass=" \
./hosts/up_uploadhive.sh:135: -F "to_folder=" \
./hosts/up_uploadhive.sh:136: -F "file_descr=" \
./hosts/up_uploadhive.sh:137: -F "file_public=1" \
./hosts/up_uploadhive.sh:138: -F "upload=Start upload" \
./hosts/up_uploadhive.sh:139: -F "file_0=@$tmpfilepath" \
--
./hosts/up_uploadraja.sh:102: response=$(tor_curl_upload --insecure -i \
./hosts/up_uploadraja.sh:103: -H "Content-Type: multipart/form-data" \
@ -3284,235 +3284,235 @@ _________________________________________________________________________
./hosts/youdbox.sh:287: containsHtml=true
./hosts/youdbox.sh:288: fi
--
./mad.sh:375:tor_curl_request() {
./mad.sh:376: if [ "${UseTorCurlImpersonate}" == "true" ]; then
./mad.sh:377: "${curl_impersonate[@]}" --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" -4 --connect-timeout ${ConnectTimeout} --compressed --globoff "$@"
./mad.sh:378: else
./mad.sh:379: curl --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" -4 --connect-timeout ${ConnectTimeout} --compressed --globoff "$@"
./mad.sh:380: fi
./mad.sh:381:}
./mad.sh:382:tor_curl_request_extended() {
./mad.sh:383: randomtimeout=$((30 + RANDOM % (60 - 30)))
./mad.sh:384: if [ "${UseTorCurlImpersonate}" == "true" ]; then
./mad.sh:385: "${curl_impersonate[@]}" --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" -4 --connect-timeout $randomtimeout --compressed --globoff "$@"
./mad.sh:386: else
./mad.sh:387: curl --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" -4 --connect-timeout $randomtimeout --compressed --globoff "$@"
./mad.sh:388: fi
./mad.sh:389:}
./mad.sh:390:tor_curl_upload() {
./mad.sh:391: if [ "${UseTorCurlImpersonate}" == "true" ]; then
./mad.sh:392: if [ "${RateMonitorEnabled}" == "true" ]; then
./mad.sh:393: "${curl_impersonate[@]}" --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" -4 --connect-timeout ${ConnectTimeoutUpload} --speed-limit $UploadSpeedMin --speed-time $UploadTimeoutInterval --compressed --globoff "$@"
./mad.sh:394: else
./mad.sh:395: "${curl_impersonate[@]}" --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" -4 --connect-timeout ${ConnectTimeoutUpload} --compressed --globoff "$@"
./mad.sh:396: fi
./mad.sh:397: else
./mad.sh:398: if [ "${RateMonitorEnabled}" == "true" ]; then
./mad.sh:399: curl --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" -4 --connect-timeout ${ConnectTimeoutUpload} --speed-limit $UploadSpeedMin --speed-time $UploadTimeoutInterval -H 'User-Agent: Mozilla/5.0 (Windows NT 10.0; rv:109.0) Gecko/20100101 Firefox/115.0' -H 'Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8' -H 'Accept-Language: en-US,en;q=0.5' -H 'Accept-Encoding: gzip, deflate, br' -H 'Origin: null' -H 'Connection: keep-alive' -H 'Upgrade-Insecure-Requests: 1' -H 'Sec-Fetch-Dest: document' -H 'Sec-Fetch-Mode: navigate' -H 'Sec-Fetch-Site: cross-site' -H 'Sec-Fetch-User: ?1' -H 'TE: trailers' --compressed --globoff "$@"
./mad.sh:400: else
./mad.sh:87:tor_curl_request() {
./mad.sh:88: if [ "${UseTorCurlImpersonate}" == "true" ]; then
./mad.sh:89: "${curl_impersonate[@]}" --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" -4 --connect-timeout ${ConnectTimeout} --compressed --globoff "$@"
./mad.sh:90: else
./mad.sh:91: curl --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" -4 --connect-timeout ${ConnectTimeout} --compressed --globoff "$@"
./mad.sh:92: fi
./mad.sh:93:}
./mad.sh:94:tor_curl_request_extended() {
./mad.sh:95: randomtimeout=$((30 + RANDOM % (60 - 30)))
./mad.sh:96: if [ "${UseTorCurlImpersonate}" == "true" ]; then
./mad.sh:97: "${curl_impersonate[@]}" --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" -4 --connect-timeout $randomtimeout --compressed --globoff "$@"
./mad.sh:98: else
./mad.sh:99: curl --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" -4 --connect-timeout $randomtimeout --compressed --globoff "$@"
./mad.sh:100: fi
./mad.sh:101:}
./mad.sh:102:tor_curl_upload() {
./mad.sh:103: if [ "${UseTorCurlImpersonate}" == "true" ]; then
./mad.sh:104: if [ "${RateMonitorEnabled}" == "true" ]; then
./mad.sh:105: "${curl_impersonate[@]}" --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" -4 --connect-timeout ${ConnectTimeoutUpload} --speed-limit $UploadSpeedMin --speed-time $UploadTimeoutInterval --compressed --globoff "$@"
./mad.sh:106: else
./mad.sh:107: "${curl_impersonate[@]}" --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" -4 --connect-timeout ${ConnectTimeoutUpload} --compressed --globoff "$@"
./mad.sh:108: fi
./mad.sh:109: else
./mad.sh:110: if [ "${RateMonitorEnabled}" == "true" ]; then
./mad.sh:111: curl --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" -4 --connect-timeout ${ConnectTimeoutUpload} --speed-limit $UploadSpeedMin --speed-time $UploadTimeoutInterval -H 'User-Agent: Mozilla/5.0 (Windows NT 10.0; rv:109.0) Gecko/20100101 Firefox/115.0' -H 'Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8' -H 'Accept-Language: en-US,en;q=0.5' -H 'Accept-Encoding: gzip, deflate, br' -H 'Origin: null' -H 'Connection: keep-alive' -H 'Upgrade-Insecure-Requests: 1' -H 'Sec-Fetch-Dest: document' -H 'Sec-Fetch-Mode: navigate' -H 'Sec-Fetch-Site: cross-site' -H 'Sec-Fetch-User: ?1' -H 'TE: trailers' --compressed --globoff "$@"
./mad.sh:112: else
--
./mad.sh:1442: response=$(tor_curl_request --insecure -L -s https://github.com/lwthiker/curl-impersonate/releases/latest)
./mad.sh:1443: if [ "${DebugAllEnabled}" == "true" ] ; then
./mad.sh:1444: debugHtml "github" "lbf_inst_curlimp$j" "$response"
./mad.sh:1445: fi
./mad.sh:1446: if [ ! -z "$response" ]; then
./mad.sh:1447: latestTag=$(grep -oPi -m 1 '(?<=/curl-impersonate/releases/tag/).*?(?=")' <<< "$response")
./mad.sh:1448: latestBinaryDate=$(grep -oPi -m 1 '(?<=<relative-time class="no-wrap" prefix="" datetime=").*?(?=T)' <<< "$response")
./mad.sh:1449: break
./mad.sh:1450: fi
./mad.sh:1451: done
./mad.sh:1452: if [ -z $latestTag ]; then
./mad.sh:1159: response=$(tor_curl_request --insecure -L -s https://github.com/lwthiker/curl-impersonate/releases/latest)
./mad.sh:1160: if [ "${DebugAllEnabled}" == "true" ] ; then
./mad.sh:1161: debugHtml "github" "lbf_inst_curlimp$j" "$response"
./mad.sh:1162: fi
./mad.sh:1163: if [ ! -z "$response" ]; then
./mad.sh:1164: latestTag=$(grep -oPi -m 1 '(?<=/curl-impersonate/releases/tag/).*?(?=")' <<< "$response")
./mad.sh:1165: latestBinaryDate=$(grep -oPi -m 1 '(?<=<relative-time class="no-wrap" prefix="" datetime=").*?(?=T)' <<< "$response")
./mad.sh:1166: break
./mad.sh:1167: fi
./mad.sh:1168: done
./mad.sh:1169: if [ -z $latestTag ]; then
--
./mad.sh:1462: file_header=$(tor_curl_request --insecure --head -Ls "$download_url")
./mad.sh:1463: if [ "${DebugAllEnabled}" == "true" ] ; then
./mad.sh:1464: debugHtml "github" "head_inst_curlimp$j" "${file_header}"
./mad.sh:1465: fi
./mad.sh:1466: if ! grep -Eqi 'HTTP/2 200|HTTP/1.1 200|200 OK' <<< $file_header ; then
./mad.sh:1467: if ((j == 8)) ; then
./mad.sh:1468: return 1
./mad.sh:1469: else
./mad.sh:1470: continue
./mad.sh:1471: fi
./mad.sh:1472: fi
./mad.sh:1179: file_header=$(tor_curl_request --insecure --head -Ls "$download_url")
./mad.sh:1180: if [ "${DebugAllEnabled}" == "true" ] ; then
./mad.sh:1181: debugHtml "github" "head_inst_curlimp$j" "${file_header}"
./mad.sh:1182: fi
./mad.sh:1183: if ! grep -Eqi 'HTTP/2 200|HTTP/1.1 200|200 OK' <<< $file_header ; then
./mad.sh:1184: if ((j == 8)) ; then
./mad.sh:1185: return 1
./mad.sh:1186: else
./mad.sh:1187: continue
./mad.sh:1188: fi
./mad.sh:1189: fi
--
./mad.sh:1512: tor_curl_request --insecure -L "$download_url" --continue-at - --output "$file_path"
./mad.sh:1513: received_file_size=0
./mad.sh:1514: if [ -f "$file_path" ] ; then
./mad.sh:1515: received_file_size=$(stat --format="%s" "$file_path" | tr -d '[:space:]')
./mad.sh:1516: fi
./mad.sh:1517: if ((received_file_size == file_size_bytes)) ; then
./mad.sh:1518: break
./mad.sh:1519: elif ((received_file_size < file_size_bytes)) ; then
./mad.sh:1520: if ((j >= MaxDownloadRetries)) ; then
./mad.sh:1521: echo -e "${RED}| FAILED: Size mismatch after downloading${NC}"
./mad.sh:1522: exit 1
./mad.sh:1229: tor_curl_request --insecure -L "$download_url" --continue-at - --output "$file_path"
./mad.sh:1230: received_file_size=0
./mad.sh:1231: if [ -f "$file_path" ] ; then
./mad.sh:1232: received_file_size=$(stat --format="%s" "$file_path" | tr -d '[:space:]')
./mad.sh:1233: fi
./mad.sh:1234: if ((received_file_size == file_size_bytes)) ; then
./mad.sh:1235: break
./mad.sh:1236: elif ((received_file_size < file_size_bytes)) ; then
./mad.sh:1237: if ((j >= MaxDownloadRetries)) ; then
./mad.sh:1238: echo -e "${RED}| FAILED: Size mismatch after downloading${NC}"
./mad.sh:1239: exit 1
--
./mad.sh:1565: response=$(tor_curl_request --insecure -L -s https://github.com/lexiforest/curl-impersonate/releases/latest)
./mad.sh:1566: if [ "${DebugAllEnabled}" == "true" ] ; then
./mad.sh:1567: debugHtml "github" "lbf_inst_curlimp$j" "$response"
./mad.sh:1568: fi
./mad.sh:1569: if [ ! -z "$response" ]; then
./mad.sh:1570: latestTag=$(grep -oPi -m 1 '(?<=/curl-impersonate/releases/tag/).*?(?=")' <<< "$response")
./mad.sh:1571: latestBinaryDate=$(grep -oPi -m 1 '(?<=<relative-time class="no-wrap" prefix="" datetime=").*?(?=T)' <<< "$response")
./mad.sh:1572: break
./mad.sh:1573: fi
./mad.sh:1574: done
./mad.sh:1575: if [ -z $latestTag ]; then
./mad.sh:1282: response=$(tor_curl_request --insecure -L -s https://github.com/lexiforest/curl-impersonate/releases/latest)
./mad.sh:1283: if [ "${DebugAllEnabled}" == "true" ] ; then
./mad.sh:1284: debugHtml "github" "lbf_inst_curlimp$j" "$response"
./mad.sh:1285: fi
./mad.sh:1286: if [ ! -z "$response" ]; then
./mad.sh:1287: latestTag=$(grep -oPi -m 1 '(?<=/curl-impersonate/releases/tag/).*?(?=")' <<< "$response")
./mad.sh:1288: latestBinaryDate=$(grep -oPi -m 1 '(?<=<relative-time class="no-wrap" prefix="" datetime=").*?(?=T)' <<< "$response")
./mad.sh:1289: break
./mad.sh:1290: fi
./mad.sh:1291: done
./mad.sh:1292: if [ -z $latestTag ]; then
--
./mad.sh:1585: file_header=$(tor_curl_request --insecure --head -Ls "$download_url")
./mad.sh:1586: if [ "${DebugAllEnabled}" == "true" ] ; then
./mad.sh:1587: debugHtml "github" "head_inst_curlimp$j" "${file_header}"
./mad.sh:1588: fi
./mad.sh:1589: if ! grep -Eqi 'HTTP/2 200|HTTP/1.1 200|200 OK' <<< $file_header ; then
./mad.sh:1590: if ((j == 8)) ; then
./mad.sh:1591: return 1
./mad.sh:1592: else
./mad.sh:1593: continue
./mad.sh:1594: fi
./mad.sh:1595: fi
./mad.sh:1302: file_header=$(tor_curl_request --insecure --head -Ls "$download_url")
./mad.sh:1303: if [ "${DebugAllEnabled}" == "true" ] ; then
./mad.sh:1304: debugHtml "github" "head_inst_curlimp$j" "${file_header}"
./mad.sh:1305: fi
./mad.sh:1306: if ! grep -Eqi 'HTTP/2 200|HTTP/1.1 200|200 OK' <<< $file_header ; then
./mad.sh:1307: if ((j == 8)) ; then
./mad.sh:1308: return 1
./mad.sh:1309: else
./mad.sh:1310: continue
./mad.sh:1311: fi
./mad.sh:1312: fi
--
./mad.sh:1635: tor_curl_request --insecure -L "$download_url" --continue-at - --output "$file_path"
./mad.sh:1636: received_file_size=0
./mad.sh:1637: if [ -f "$file_path" ] ; then
./mad.sh:1638: received_file_size=$(stat --format="%s" "$file_path" | tr -d '[:space:]')
./mad.sh:1639: fi
./mad.sh:1640: if ((received_file_size == file_size_bytes)) ; then
./mad.sh:1641: break
./mad.sh:1642: elif ((received_file_size < file_size_bytes)) ; then
./mad.sh:1643: if ((j >= MaxDownloadRetries)) ; then
./mad.sh:1644: echo -e "${RED}| FAILED: Size mismatch after downloading${NC}"
./mad.sh:1645: exit 1
./mad.sh:1352: tor_curl_request --insecure -L "$download_url" --continue-at - --output "$file_path"
./mad.sh:1353: received_file_size=0
./mad.sh:1354: if [ -f "$file_path" ] ; then
./mad.sh:1355: received_file_size=$(stat --format="%s" "$file_path" | tr -d '[:space:]')
./mad.sh:1356: fi
./mad.sh:1357: if ((received_file_size == file_size_bytes)) ; then
./mad.sh:1358: break
./mad.sh:1359: elif ((received_file_size < file_size_bytes)) ; then
./mad.sh:1360: if ((j >= MaxDownloadRetries)) ; then
./mad.sh:1361: echo -e "${RED}| FAILED: Size mismatch after downloading${NC}"
./mad.sh:1362: exit 1
--
./mad.sh:1840: maud_torcurl=$(grep -n -vxE '[[:blank:]]*([#].*)?' $fil | grep -A 12 --color='always' -Ei 'tor_curl')
./mad.sh:1841: echo -e "Files:"
./mad.sh:1842: echo -e "${BLUE}${fil}${NC}"
./mad.sh:1843: echo -e ""
./mad.sh:1844: echo -e ""
./mad.sh:1845: echo -e "${PINK}MAD Audit of http lines:${NC} (${GREEN}grep \"http:\" or \"https:\"${NC})"
./mad.sh:1846: echo -e "_________________________________________________________________________"
./mad.sh:1847: echo -e "$maud_http"
./mad.sh:1848: echo -e ""
./mad.sh:1849: echo -e "${PINK}MAD Audit of curl:${NC} (${GREEN}grep \"curl\"${NC})"
./mad.sh:1850: echo -e "_________________________________________________________________________"
./mad.sh:1557: maud_torcurl=$(grep -n -vxE '[[:blank:]]*([#].*)?' $fil | grep -A 12 --color='always' -Ei 'tor_curl')
./mad.sh:1558: echo -e "Files:"
./mad.sh:1559: echo -e "${BLUE}${fil}${NC}"
./mad.sh:1560: echo -e ""
./mad.sh:1561: echo -e ""
./mad.sh:1562: echo -e "${PINK}MAD Audit of http lines:${NC} (${GREEN}grep \"http:\" or \"https:\"${NC})"
./mad.sh:1563: echo -e "_________________________________________________________________________"
./mad.sh:1564: echo -e "$maud_http"
./mad.sh:1565: echo -e ""
./mad.sh:1566: echo -e "${PINK}MAD Audit of curl:${NC} (${GREEN}grep \"curl\"${NC})"
./mad.sh:1567: echo -e "_________________________________________________________________________"
--
./mad.sh:1853: echo -e "${PINK}MAD Audit of tor_curl (+12 lines after):${NC} (${GREEN}grep \"tor_curl\"${NC})"
./mad.sh:1854: echo -e "_________________________________________________________________________"
./mad.sh:1855: echo -e "$maud_torcurl"
./mad.sh:1856: echo -e ""
./mad.sh:1857: echo -e ""
./mad.sh:1858: done
./mad.sh:1859: else
./mad.sh:1860: cd "$ScriptDir"
./mad.sh:1861: readarray -d $'' arrFiles < <(find . -name "*.sh" -printf '%p\n' | sort -Vk1)
./mad.sh:1862: cd "$WorkDir"
./mad.sh:1863: readarray -d $'' arrFiles2 < <(find . -name "*.sh" -printf '%p\n' | sort -Vk1)
./mad.sh:1570: echo -e "${PINK}MAD Audit of tor_curl (+12 lines after):${NC} (${GREEN}grep \"tor_curl\"${NC})"
./mad.sh:1571: echo -e "_________________________________________________________________________"
./mad.sh:1572: echo -e "$maud_torcurl"
./mad.sh:1573: echo -e ""
./mad.sh:1574: echo -e ""
./mad.sh:1575: done
./mad.sh:1576: else
./mad.sh:1577: cd "$ScriptDir"
./mad.sh:1578: readarray -d $'' arrFiles < <(find . -name "*.sh" -printf '%p\n' | sort -Vk1)
./mad.sh:1579: cd "$WorkDir"
./mad.sh:1580: readarray -d $'' arrFiles2 < <(find . -name "*.sh" -printf '%p\n' | sort -Vk1)
--
./mad.sh:1868: maud_torcurl=$(grep -n -vxE '[[:blank:]]*([#].*)?' $fil | grep -A 12 --color='always' -Ei 'tor_curl')
./mad.sh:1869: echo -e "Files:"
./mad.sh:1870: echo -e "${BLUE}${fil}${NC}"
./mad.sh:1871: echo -e ""
./mad.sh:1872: echo -e ""
./mad.sh:1873: echo -e "${PINK}MAD Audit of http lines:${NC} (${GREEN}grep \"http:\" or \"https:\"${NC})"
./mad.sh:1874: echo -e "_________________________________________________________________________"
./mad.sh:1875: echo -e "$maud_http"
./mad.sh:1876: echo -e ""
./mad.sh:1877: echo -e "${PINK}MAD Audit of curl:${NC} (${GREEN}grep \"curl \"${NC})"
./mad.sh:1878: echo -e "_________________________________________________________________________"
./mad.sh:1585: maud_torcurl=$(grep -n -vxE '[[:blank:]]*([#].*)?' $fil | grep -A 12 --color='always' -Ei 'tor_curl')
./mad.sh:1586: echo -e "Files:"
./mad.sh:1587: echo -e "${BLUE}${fil}${NC}"
./mad.sh:1588: echo -e ""
./mad.sh:1589: echo -e ""
./mad.sh:1590: echo -e "${PINK}MAD Audit of http lines:${NC} (${GREEN}grep \"http:\" or \"https:\"${NC})"
./mad.sh:1591: echo -e "_________________________________________________________________________"
./mad.sh:1592: echo -e "$maud_http"
./mad.sh:1593: echo -e ""
./mad.sh:1594: echo -e "${PINK}MAD Audit of curl:${NC} (${GREEN}grep \"curl \"${NC})"
./mad.sh:1595: echo -e "_________________________________________________________________________"
--
./mad.sh:1881: echo -e "${PINK}MAD Audit of tor_curl (+12 lines after):${NC} (${GREEN}grep \"tor_curl\"${NC})"
./mad.sh:1882: echo -e "_________________________________________________________________________"
./mad.sh:1883: echo -e "$maud_torcurl"
./mad.sh:1884: echo -e ""
./mad.sh:1885: done
./mad.sh:1886: for fil in "${arrFiles2[@]}";
./mad.sh:1887: do
./mad.sh:1888: maud_http=$(grep -n -vxE '[[:blank:]]*([#].*)?' $fil | grep --color='always' -Ei '(http|https):')
./mad.sh:1889: maud_curl=$(grep -n -vxE '[[:blank:]]*([#].*)?' $fil | grep --color='always' -Ei 'curl')
./mad.sh:1890: maud_torcurl=$(grep -n -vxE '[[:blank:]]*([#].*)?' $fil | grep -A 12 --color='always' -Ei 'tor_curl')
./mad.sh:1891: echo -e "Files:"
./mad.sh:1892: echo -e "${BLUE}${fil}${NC}"
./mad.sh:1893: echo -e ""
./mad.sh:1894: echo -e ""
./mad.sh:1895: echo -e "${PINK}MAD Audit of http lines:${NC} (${GREEN}grep \"http:\" or \"https:\"${NC})"
./mad.sh:1896: echo -e "_________________________________________________________________________"
./mad.sh:1897: echo -e "$maud_http"
./mad.sh:1898: echo -e ""
./mad.sh:1899: echo -e "${PINK}MAD Audit of curl:${NC} (${GREEN}grep \"curl\"${NC})"
./mad.sh:1900: echo -e "_________________________________________________________________________"
./mad.sh:1598: echo -e "${PINK}MAD Audit of tor_curl (+12 lines after):${NC} (${GREEN}grep \"tor_curl\"${NC})"
./mad.sh:1599: echo -e "_________________________________________________________________________"
./mad.sh:1600: echo -e "$maud_torcurl"
./mad.sh:1601: echo -e ""
./mad.sh:1602: done
./mad.sh:1603: for fil in "${arrFiles2[@]}";
./mad.sh:1604: do
./mad.sh:1605: maud_http=$(grep -n -vxE '[[:blank:]]*([#].*)?' $fil | grep --color='always' -Ei '(http|https):')
./mad.sh:1606: maud_curl=$(grep -n -vxE '[[:blank:]]*([#].*)?' $fil | grep --color='always' -Ei 'curl')
./mad.sh:1607: maud_torcurl=$(grep -n -vxE '[[:blank:]]*([#].*)?' $fil | grep -A 12 --color='always' -Ei 'tor_curl')
./mad.sh:1608: echo -e "Files:"
./mad.sh:1609: echo -e "${BLUE}${fil}${NC}"
./mad.sh:1610: echo -e ""
./mad.sh:1611: echo -e ""
./mad.sh:1612: echo -e "${PINK}MAD Audit of http lines:${NC} (${GREEN}grep \"http:\" or \"https:\"${NC})"
./mad.sh:1613: echo -e "_________________________________________________________________________"
./mad.sh:1614: echo -e "$maud_http"
./mad.sh:1615: echo -e ""
./mad.sh:1616: echo -e "${PINK}MAD Audit of curl:${NC} (${GREEN}grep \"curl\"${NC})"
./mad.sh:1617: echo -e "_________________________________________________________________________"
--
./mad.sh:1903: echo -e "${PINK}MAD Audit of tor_curl (+12 lines after):${NC} (${GREEN}grep \"tor_curl\"${NC})"
./mad.sh:1904: echo -e "_________________________________________________________________________"
./mad.sh:1905: echo -e "$maud_torcurl"
./mad.sh:1906: echo -e ""
./mad.sh:1907: done
./mad.sh:1908: fi
./mad.sh:1909:}
./mad.sh:1910:madStatus() {
./mad.sh:1911: local InputFile="$1"
./mad.sh:1912: if [ "$arg1" == "status" ] ; then
./mad.sh:1913: clear
./mad.sh:1620: echo -e "${PINK}MAD Audit of tor_curl (+12 lines after):${NC} (${GREEN}grep \"tor_curl\"${NC})"
./mad.sh:1621: echo -e "_________________________________________________________________________"
./mad.sh:1622: echo -e "$maud_torcurl"
./mad.sh:1623: echo -e ""
./mad.sh:1624: done
./mad.sh:1625: fi
./mad.sh:1626:}
./mad.sh:1627:madStatus() {
./mad.sh:1628: local InputFile="$1"
./mad.sh:1629: if [ "$arg1" == "status" ] ; then
./mad.sh:1630: clear
--
./mad.sh:3228: file_header=$(tor_curl_request --insecure -m 8 -s -D - -o /dev/null \
./mad.sh:3229: -H "Connection: keep-alive" \
./mad.sh:3230: -w 'EffectiveUrl=%{url_effective}' \
./mad.sh:3231: "$download_url")
./mad.sh:3232: else
./mad.sh:3233: printf "| Retrieving Head: attempt #$j"
./mad.sh:3234: rm -f "${WorkDir}/.temp/directhead"
./mad.sh:3235: file_header=$(tor_curl_request --insecure --head -H "Connection: keep-alive" -L -s -i "$download_url" |
./mad.sh:3236: tee "${WorkDir}/.temp/directhead" &
./mad.sh:3237: sleep 6
./mad.sh:3238: [ -s "${WorkDir}/.temp/directhead" ]
./mad.sh:3239: kill $! 2>/dev/null
./mad.sh:3240: )
./mad.sh:3241: if [ ! -f "${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}" ]; then
./mad.sh:3242: touch "${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}"
./mad.sh:3243: fi
./mad.sh:3244: rm -f "${WorkDir}/.temp/directhead"
./mad.sh:3245: fi
./mad.sh:2945: file_header=$(tor_curl_request --insecure -m 8 -s -D - -o /dev/null \
./mad.sh:2946: -H "Connection: keep-alive" \
./mad.sh:2947: -w 'EffectiveUrl=%{url_effective}' \
./mad.sh:2948: "$download_url")
./mad.sh:2949: else
./mad.sh:2950: printf "| Retrieving Head: attempt #$j"
./mad.sh:2951: rm -f "${WorkDir}/.temp/directhead"
./mad.sh:2952: file_header=$(tor_curl_request --insecure --head -H "Connection: keep-alive" -L -s -i "$download_url" |
./mad.sh:2953: tee "${WorkDir}/.temp/directhead" &
./mad.sh:2954: sleep 6
./mad.sh:2955: [ -s "${WorkDir}/.temp/directhead" ]
./mad.sh:2956: kill $! 2>/dev/null
./mad.sh:2957: )
./mad.sh:2958: if [ ! -f "${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}" ]; then
./mad.sh:2959: touch "${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}"
./mad.sh:2960: fi
./mad.sh:2961: rm -f "${WorkDir}/.temp/directhead"
./mad.sh:2962: fi
--
./mad.sh:3372: tor_curl_request --insecure -L --referer "$file_url" "$download_url" --output "$file_path"
./mad.sh:3373: rc=$?
./mad.sh:3374: if [ $rc -ne 0 ] ; then
./mad.sh:3375: printf "${RED}Download Failed (bad exit status).${NC}"
./mad.sh:3376: if [ -f ${file_path} ]; then
./mad.sh:3377: printf "${YELLOW} Partial removed...${NC}"
./mad.sh:3378: printf "\n\n"
./mad.sh:3379: rm -f "${file_path}"
./mad.sh:3380: else
./mad.sh:3381: printf "\n\n"
./mad.sh:3382: fi
./mad.sh:3089: tor_curl_request --insecure -L --referer "$file_url" "$download_url" --output "$file_path"
./mad.sh:3090: rc=$?
./mad.sh:3091: if [ $rc -ne 0 ] ; then
./mad.sh:3092: printf "${RED}Download Failed (bad exit status).${NC}"
./mad.sh:3093: if [ -f ${file_path} ]; then
./mad.sh:3094: printf "${YELLOW} Partial removed...${NC}"
./mad.sh:3095: printf "\n\n"
./mad.sh:3096: rm -f "${file_path}"
./mad.sh:3097: else
./mad.sh:3098: printf "\n\n"
./mad.sh:3099: fi
--
./mad.sh:3425: tor_curl_request --insecure -L --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval "$download_url" --continue-at - --output "$file_path"
./mad.sh:3426: else
./mad.sh:3427: tor_curl_request --insecure -L --referer "$file_url" "$download_url" --continue-at - --output "$file_path"
./mad.sh:3428: fi
./mad.sh:3429: received_file_size=0
./mad.sh:3430: if [ -f "$file_path" ] ; then
./mad.sh:3431: received_file_size=$(stat --format="%s" "$file_path" | tr -d '[:space:]')
./mad.sh:3432: fi
./mad.sh:3433: if CheckNoHtml "$remote_url" "$filename" "$file_path" ; then
./mad.sh:3434: containsHtml=false
./mad.sh:3435: else
./mad.sh:3436: containsHtml=true
./mad.sh:3437: fi
./mad.sh:3142: tor_curl_request --insecure -L --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval "$download_url" --continue-at - --output "$file_path"
./mad.sh:3143: else
./mad.sh:3144: tor_curl_request --insecure -L --referer "$file_url" "$download_url" --continue-at - --output "$file_path"
./mad.sh:3145: fi
./mad.sh:3146: received_file_size=0
./mad.sh:3147: if [ -f "$file_path" ] ; then
./mad.sh:3148: received_file_size=$(stat --format="%s" "$file_path" | tr -d '[:space:]')
./mad.sh:3149: fi
./mad.sh:3150: if CheckNoHtml "$remote_url" "$filename" "$file_path" ; then
./mad.sh:3151: containsHtml=false
./mad.sh:3152: else
./mad.sh:3153: containsHtml=true
./mad.sh:3154: fi
--
./mad.sh:3625: response=$(tor_curl_upload --insecure -i \
./mad.sh:3626: -H "Content-Type: multipart/form-data" \
./mad.sh:3627: -F "key=" \
./mad.sh:3628: -F "time=$jira_timeval" \
./mad.sh:3629: -F "file=@${filepath}" \
./mad.sh:3630: "${jira_PostUrlHost}")
./mad.sh:3631: else
./mad.sh:3632: response=$(tor_curl_upload --insecure -i \
./mad.sh:3633: -H "Content-Type: multipart/form-data" \
./mad.sh:3634: -F "key=" \
./mad.sh:3635: -F "time=$jira_timeval" \
./mad.sh:3636: -F "files[]=@${arrFiles[@]}" \
./mad.sh:3637: "${jira_PostUrlHost}")
./mad.sh:3638: fi
./mad.sh:3639: if [ "${DebugAllEnabled}" == "true" ] ; then
./mad.sh:3640: debugHtml "${filepath##*/}" "${_hostCode}_upload" "post_url: ${jira_PostUrlHost}"$'\n'"${response}"
./mad.sh:3641: fi
./mad.sh:3642: if grep -Eqi ' 200 ' <<< "${response}" ; then
./mad.sh:3342: response=$(tor_curl_upload --insecure -i \
./mad.sh:3343: -H "Content-Type: multipart/form-data" \
./mad.sh:3344: -F "key=" \
./mad.sh:3345: -F "time=$jira_timeval" \
./mad.sh:3346: -F "file=@${filepath}" \
./mad.sh:3347: "${jira_PostUrlHost}")
./mad.sh:3348: else
./mad.sh:3349: response=$(tor_curl_upload --insecure -i \
./mad.sh:3350: -H "Content-Type: multipart/form-data" \
./mad.sh:3351: -F "key=" \
./mad.sh:3352: -F "time=$jira_timeval" \
./mad.sh:3353: -F "files[]=@${arrFiles[@]}" \
./mad.sh:3354: "${jira_PostUrlHost}")
./mad.sh:3355: fi
./mad.sh:3356: if [ "${DebugAllEnabled}" == "true" ] ; then
./mad.sh:3357: debugHtml "${filepath##*/}" "${_hostCode}_upload" "post_url: ${jira_PostUrlHost}"$'\n'"${response}"
./mad.sh:3358: fi
./mad.sh:3359: if grep -Eqi ' 200 ' <<< "${response}" ; then

View file

@ -6,13 +6,14 @@ Max Size . HostCode . Nickname . Notes
# ---------------------------------------------------------------------------------------
300GB 1f 1fichier.com 15d expiry free accounts
- 300GB fh filehaus.top (.su) ?? expiry
100GB snow send.now 30 days
40GB isup isupload.com ?? expiry
20GB rz ranoz.gg ?? expiry
20GB pd pixeldrain 120d expiry
20GB atea ateasystems.com ?? expiry
10GB gofile gofile.io ?? expiry
10GB tmpme tempfile.me 3mo expiry (tend to ban 7z faster)
5GB uhive uploadhive
5GB uhive uploadhive ??
- 5GB uflix uploadflix.cc 7d inactive expiry
5GB fd fileditch.com (.me) ??
5GB oshi oshi.at (.onion) 1000 file hits

View file

@ -1,6 +1,6 @@
#! Name: filehaus.sh
#! Author: kittykat
#! Version: 2024.09.13
#! Version: 2025.02.17
#! Desc: Add support for downloading and processing of urls for a new host
#! Usage: Copy this file into the ./${ScriptDir}/hosts/ folder
#!
@ -93,6 +93,7 @@ fh_FetchFileInfo() {
echo -e "Trying .top domain..."
download_url=${download_url/\.su/\.top}
fi
filename=${download_url##*/} # Requires unique filename.ext but good for multipart files
download_url=$(urlencode_literal_grouped_case_urlendingonly "$download_url")
tor_identity="${RANDOM}"
CLEANSTRING=${remote_url//[^a-zA-Z0-9]/}
@ -139,8 +140,6 @@ fh_FetchFileInfo() {
touch ${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}
if [ ! "$filename_override" == "" ] ; then
filename="$filename_override"
else
filename=${download_url##*/} # Requires unique filename.ext but good for multipart files
fi
filename=$(sanitize_file_or_folder_name "${filename}")
file_size_bytes=$(grep -oPi '(?<=content-length: ).*' <<< "$file_header")
@ -181,7 +180,6 @@ fh_GetFile() {
if (( splitnum == 0)); then
splitnum=1
fi
download_url=$(urlencode_literal_grouped_case_urlendingonly "$download_url")
pd_presize=0
if [ -f "$file_path" ] ; then
pd_presize=$(stat --format="%s" "$file_path" | tr -d '[:space:]')

View file

@ -1,6 +1,6 @@
#! Name: ranoz.sh
#! Author: kittykat
#! Version: 2025.02.11
#! Version: 2025.02.13
#! Desc: Add support for downloading and processing of urls for a new host
#! Usage: Copy this file into the ./${ScriptDir}/hosts/ folder
#!
@ -392,6 +392,63 @@ rz_GetFile() {
done
rm -f "$flockDownload";
rm -f "${rz_cookie_jar}";
if grep -Eqi '^.*--7_\....$' <<< "$filename" ; then
echo -e ""
echo -e "${BLUE}| Found mad upload random extension file (renaming 7z)...${NC}"
origext="7z"
mv "$file_path" "${file_path%\--7_*}.$origext"
filename="${filename%\--7_*}.$origext"
file_path="${file_path%\--7_*}.$origext"
elif grep -Eqi '^.*--z_\....$' <<< "$filename" ; then
echo -e ""
echo -e "${BLUE}| Found mad upload random extension file (renaming zip)...${NC}"
origext="zip"
mv "$file_path" "${file_path%\--z_*}.$origext"
filename="${filename%\--z_*}.$origext"
file_path="${file_path%\--z_*}.$origext"
elif grep -Eqi '^.*_-p.*--r_\....$' <<< "$filename" ; then
echo -e ""
echo -e "${BLUE}| Found mad upload random extension file (renaming mp rar)...${NC}"
origext="rar"
partnum="${filename##*_-p}"
partnum="${partnum%--r_*}"
newfilepath="${file_path%_-p*}.part${partnum}.$origext"
mv "$file_path" "$newfilepath"
filename="${newfilepath##*/}"
file_path="${newfilepath}"
elif grep -Eqi '^.*--r_\....$' <<< "$filename" ; then
echo -e ""
echo -e "${BLUE}| Found mad upload random extension file (renaming rar)...${NC}"
origext="rar"
mv "$file_path" "${file_path%--r_*}.$origext"
filename="${filename%--r_*}.$origext"
file_path="${file_path%--r_*}.$origext"
elif grep -Eqi '^.*--t_\....$' <<< "$filename" ; then
echo -e ""
echo -e "${BLUE}| Found mad upload random extension file (renaming tar)...${NC}"
origext="tar"
mv "$file_path" "${file_path%--t_*}.$origext"
filename="${filename%--t_*}.$origext"
file_path="${file_path%--t_*}.$origext"
elif grep -Eqi '^.*_-7--..._\....$' <<< "$filename" ; then
echo -e ""
echo -e "${BLUE}| Found mad upload random extension file (renaming)...${NC}"
origext=${filename##*--}
origext=${origext%_*}
newfilepath="${file_path%--*}.$origext"
newfilepath="${newfilepath//_-7/.7z}"
mv "$file_path" "$newfilepath"
filename="${newfilepath##*/}"
file_path="${newfilepath}"
elif grep -Eqi '^.*--..._\....$' <<< "$filename" ; then
echo -e ""
echo -e "${BLUE}| Found mad upload random extension file (renaming)...${NC}"
origext=${filename##*--}
origext=${origext%_*}
mv "$file_path" "${file_path%--*}.$origext"
filename="${filename%--*}.$origext"
file_path="${file_path%--*}.$origext"
fi
ProcessCompletedDownload "$remote_url" "$MoveToFolder" "$filecnt" "$filename" "$file_size_bytes" "$completed_location" "$file_path"
return 0
}

View file

@ -1,6 +1,6 @@
#! Name: sendnow.sh
#! Author: kittykat
#! Version: 2025.02.12
#! Version: 2025.02.15
#! Desc: Add support for downloading and processing of urls for a new host
#! Usage: Copy this file into the ./${ScriptDir}/hosts/ folder
#!
@ -79,7 +79,7 @@ snow_FetchFileInfo() {
finalAttempt=$1
maxfetchretries=5
snow_cookie_jar=""
echo -e "${GREEN}# Fetching download link${NC}"
echo -e "${GREEN}# Fetching post info${NC}"
for ((i=1; i<=$maxfetchretries; i++)); do
mkdir -p "${WorkDir}/.temp"
snow_cookie_jar=$(mktemp "${WorkDir}/.temp/snow_cookies""${instance_no}"".XXXXXX")
@ -105,7 +105,7 @@ snow_FetchFileInfo() {
continue
fi
fi
if grep -Eqi "Sorry, you are banned" <<< "$response"; then
if grep -Eqi "Your IP has been banned|you are banned" <<< "$response"; then
rm -f "${snow_cookie_jar}";
if [ $i == $maxfetchretries ] ; then
printf "\\n"
@ -135,6 +135,7 @@ snow_FetchFileInfo() {
post_rand=$(grep -oPi '(?<=input type="hidden" name="rand" value=").*(?=">)' <<< "$response")
post_referer=$(grep -oPi '(?<=input type="hidden" name="referer" value=").*(?=">)' <<< "$response")
fi
file_size_readable=$(grep -oPi '(?<=</i> Download \[).*?(?=\]</button>.*$)' <<< "$response")
if [[ -z "$post_op" ]] || [[ -z "$post_id" ]] ; then
rm -f "${snow_cookie_jar}";
if [ $i == $maxfetchretries ] ; then
@ -152,6 +153,7 @@ snow_FetchFileInfo() {
break
fi
done
echo -e "${GREEN}# Fetching download url…${NC}"
CLEANSTRING=${remote_url//[^a-zA-Z0-9]/}
trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; echo ""; rm -f $snow_cookie_jar; tput cnorm; exit" 0 1 2 3 6 15
form_data="op=$post_op&id=$post_id&rand=$post_rand&referer=&method_free=&method_premium="
@ -182,6 +184,7 @@ snow_FetchFileInfo() {
download_url=${download_url//[$'\t\r\n']}
filename="${download_url##*/}"
filename=${filename//[$'\t\r\n']}
download_url=$(urlencode_literal_grouped_case_urlendingonly ${download_url})
else
echo -e "${RED}| Failed to extract download link [3]${NC}"
warnAndRetryUnknownError=true
@ -193,11 +196,12 @@ snow_FetchFileInfo() {
fi
echo -e "${GREEN}# Fetching file info…${NC}"
fshost=$(grep -oPi -m 1 '(?<=https://).*?(?=/d/)' <<< "$download_url")
fshost=${fshost//[$'\t\r\n']}
for ((j=1; j<=$maxfetchretries; j++)); do
printf " ."
CLEANSTRING=${remote_url//[^a-zA-Z0-9]/}
trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; rm -f ${snow_cookie_jar}; echo ""; tput cnorm; exit" 0 1 2 3 6 15
file_header=$(tor_curl_request --insecure --head -Lis \
file_header=$(tor_curl_request_extended --insecure --head -Lis \
-H "Host: $fshost" \
-H "Accept: text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8" \
-H "Accept-Language: en-US,en;q=0.5" \
@ -284,7 +288,7 @@ snow_FetchFileInfo() {
if [ "${finalAttempt}" == "true" ] ; then
failedRetryDownload "${remote_url}" "Filesize not found!" ""
fi
echo -e "${YELLOW}| Filesize not found… retry${NC}"
echo -e "${YELLOW}| Filesize not found…${NC}"
return 1
else
file_size_readable="$(numfmt --to=iec --from=auto --format "%.2f" <<< "$file_size_bytes")"
@ -320,7 +324,7 @@ snow_GetFile() {
trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; rm -f ${snow_cookie_jar}; rm -f $flockDownload; echo ""; tput cnorm; exit" 0 1 2 3 6 15
if [ "${UseTorCurlImpersonate}" == "true" ]; then
if [ "${RateMonitorEnabled}" == "true" ]; then
tor_curl_request --insecure -L --no-alpn \
tor_curl_request_extended --insecure -L --no-alpn \
--speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval \
-H "Host: $fshost" \
-H "Accept: text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8" \

77
hosts/up_ranoz.sh Normal file → Executable file
View file

@ -1,6 +1,6 @@
#! Name: up_ranoz.sh
#! Author: kittykat
#! Version: 2024.11.27
#! Version: 2025.02.14
#! Desc: Add support for uploading files to bedrive.ru
#! Info: Files are accessible at https://ranoz.gg/file/<file_code>
#! MaxSize: 20GB
@ -97,16 +97,42 @@ rz_PostFile() {
echo -e "[${YELLOW}${_hostCode}${NC}] Uploading ${GREEN}${filename}${NC}"
tor_identity="${RANDOM}"
PostUrlHost='https://ranoz.gg/api/v1/files/upload_url'
if [[ "$RanozRandomizeExt" == "true" ]] && [[ ! $filename == *.rar ]]; then
randomext=$(GetSemiRandomExt)
echo -e "${BLUE}MAD Randomized Extension: $randomext${NC}"
origext=${filepath##*.}
if [[ "$origext" == "7z" ]]; then
tmpfilepath="${filepath%.*}--7_.${randomext}"
elif [[ "$origext" == "zip" ]]; then
tmpfilepath="${filepath%.*}--z_.${randomext}"
elif grep -Eqi '\.part.*\.rar' <<< "${filepath##*/}" ; then
partnum="${filepath##*.part}"
partnum="${partnum%.rar*}"
echo -e "$partnum"
tmpfilepath="${filepath%.part*}_-p${partnum}--r_.${randomext}"
elif [[ "$origext" == "rar" ]]; then
tmpfilepath="${filepath%.*}--r_.${randomext}"
elif [[ "$origext" == "tar" ]]; then
tmpfilepath="${filepath%.*}--t_.${randomext}"
elif [[ "${filepath##*/}" == *".7z."* ]]; then
tmpfilepath="${filepath%.*}_-7--${origext}_.${randomext}"
else
tmpfilepath="${filepath%.*}--${origext}_.${randomext}"
fi
tmpfilename="${tmpfilepath##*/}"
else
tmpfilename="${tmpfilepath##*/}"
fi
local fsize=$(stat -c%s "$filepath")
trap "rm -f ${UploadTicket}; echo ""; tput cnorm; exit" 0 1 2 3 6 15
response=$(tor_curl_upload --insecure -L -i -s \
"$PostUrlHost" \
-H "Content-Type: application/json" \
-d "{ \
\"filename\": \"$filename\", \
\"filename\": \"$tmpfilename\", \
\"size\": $fsize}")
if [ "${DebugAllEnabled}" == "true" ] ; then
debugHtml "${filepath##*/}" "${_hostCode}_ticket" "post_url: ${PostUrlHost}"$'\n'"data: ${filename}, ${fsize}"$'\n'"${response}"
debugHtml "${filepath##*/}" "${_hostCode}_ticket" "post_url: ${PostUrlHost}"$'\n'"data: ${filepath}, ${fsize}"$'\n'"${response}"
fi
if grep -Eqi '"upload_url":"https://' <<< "$response" ; then
PostUrlHost=$(grep -oPi '(?<="upload_url":").*?(?=".*$)' <<< "$response")
@ -138,7 +164,11 @@ rz_PostFile() {
echo -e "${GREEN}| Upload Success${NC}"
echo -e "| Size: ${BLUE}${filesize}${NC} bytes${NC}"
echo -e "| Link: ${YELLOW}${downloadLink}${NC}"
successUpload "$pline" "${filepath}" "${_hostCode}" "${filesize}" "${downloadLink}" "{$response}"
if [[ "$RanozRandomizeExt" == "true" ]]; then
successUpload "$pline" "${filepath}" "${_hostCode}" "${filesize}" "${downloadLink}" "{$response}" "[rand ext, rename to $filename or use MAD v2025.02.13+]"
else
successUpload "$pline" "${filepath}" "${_hostCode}" "${filesize}" "${downloadLink}" "{$response}"
fi
return 0
else
err=$(grep -oPi '(?<=HTTP/.*).*?(?=$)' <<< "$response")
@ -156,3 +186,42 @@ rz_PostFile() {
#!
#! --------------- Host Extra Functions ------------------- #
#!
GetSemiRandomExt() {
local ar_rEXT[0]='pdf'
local ar_rEXT[1]='doc'
local ar_rEXT[2]='xls'
local ar_rEXT[3]='mdb'
local ar_rEXT[4]='xml'
local ar_rEXT[5]='xsd'
local ar_rEXT[6]='svg'
local ar_rEXT[7]='img'
local ar_rEXT[8]='iso'
local ar_rEXT[9]='svg'
local ar_rEXT[10]='ico'
local ar_rEXT[11]='bmp'
local ar_rEXT[12]='eps'
local ar_rEXT[13]='3gp'
local ar_rEXT[14]='png'
local ar_rEXT[15]='wav'
local ar_rEXT[16]='mp3'
local ar_rEXT[17]='tif'
local ar_rEXT[18]='swf'
local ar_rEXT[19]='rtf'
local ar_rEXT[20]='ppt'
local ar_rEXT[21]='png'
local ar_rEXT[22]='jpg'
local ar_rEXT[23]='mpg'
local ar_rEXT[24]='mp4'
local ar_rEXT[25]='aac'
local ar_rEXT[26]='mid'
local ar_rEXT[27]='ics'
local ar_rEXT[28]='gif'
local ar_rEXT[29]='csv'
local ar_rEXT[30]='ogg'
local ar_rEXT[31]='txt'
local ar_rEXT[32]='css'
local ar_rEXT[33]='htm'
local arrSize=${#ar_rEXT[@]}
local index=$(($RANDOM % $arrSize))
printf "%s" "${ar_rEXT[$index]}"
}

178
hosts/up_sendnow.sh Normal file
View file

@ -0,0 +1,178 @@
#! Name: up_sendnow.sh
#! Author: kittykat
#! Version: 2025.02.15
#! Desc: Add support for uploading files to a new host
#! Info: Files are accessible at https://send.now/<hash>
#! MaxSize: 100GB
#! Expire: 30 days
#! Usage: Copy this file into the ./${ScriptDir}/hosts/ folder
#!
#!
#! ------------ REQUIRED SECTION ---------------
#! @[UPDATE] ListUploadHosts: This string is loaded into mad.sh and allows dynamic handling of new url data
#! Format: '/HostCode/HostNick/HostFuncPrefix@'
#! HostCode: <aUniqueCodeForHost> (ie. 'fh' for filehaus -- cannot be used by other hosts)
#! HostNick: What is displayed throughout MAD output
#! HostFuncPrefix: <aUniqueStringThatMustPrefixHostFunctions> ie. 'fh' -- fh_UploadFile()
#! * Note: Must begin with a letter a-z (functions beginning with numbers are no bueno)
HostCode='snow'
HostNick='send.now'
HostFuncPrefix='snow'
#!
#! !! DO NOT UPDATE OR REMOVE !!
#! This merges the Required HostAndDomainRegexes into mad.sh
ListUploadHosts=${ListUploadHosts}'/'${HostCode}'/'${HostNick}'/'${HostFuncPrefix}'@'
#!
#!
#! Configurables
#! -------------
#!
#! ------------ (1) Host Main Upload Function --------------- #
#!
#! @REQUIRED: Host Main Upload function
#! Must be named specifically as such:
#! <HostFuncPrefix>_UploadFile()
snow_UploadFile() {
local _hostCode=${1}
local filepath=${2}
local filecnt=${3}
local pline=${4}
local filename="${filepath##*/}"
warnAndRetryUnknownError=false
exitUploadError=false
exitUploadNotAvailable=false
fileAlreadyDone=false
tor_identity="${RANDOM}"
UploadTicket="${WorkDir}/.flocks/upload_${_hostCode}_${filepath//[^a-zA-Z0-9]/}"
MaxUploadSizeInBytes=107374182400
fsize=$(GetFileSize "$filepath" "false")
if ((fsize > MaxUploadSizeInBytes)); then
rm -f "${UploadTicket}"
echo -e "${YELLOW}| SKIP${NC}: The size of $filename is to large for $_hostCode. ($fsize > $MaxUploadSizeInBytes)"
failedUpload "$pline" "${filepath}" "${_hostCode}" "Skipping upload. The size of $filename is to large for $_hostCode. ($fsize > $MaxUploadSizeInBytes)"
return 1
fi
finalAttempt="false"
for ((z=0; z<=$MaxUploadRetries; z++)); do
if [ $z -eq $MaxUploadRetries ] ; then
finalAttempt="true"
fi
trap "rm -f "${UploadTicket}"; echo ""; tput cnorm; exit" 0 1 2 3 6 15
if snow_PostFile "${filepath}" "${_hostCode}" "${filename}" "${filecnt}" $((z+1)) $finalAttempt $pline ; then
return 0
elif [ $z -lt $MaxUploadRetries ]; then
if [ "${fileAlreadyDone}" == "true" ] ; then
break
fi
if [[ "${warnAndRetryUnknownError}" == "true" ]] ; then
if [ "${DebugAllEnabled}" == "true" ] ; then
debugHtml "${filepath##*/}" "error" "Retry due to an unknown issue: attempt #$((z+1)) of ${MaxUploadRetries}"
fi
fi
if [[ "${exitUploadError}" == "true" || "${exitUploadNotAvailable}" == "true" ]] ; then
if [ "${DebugAllEnabled}" == "true" ] ; then
debugHtml "${filepath##*/}" "error" "Exit due to unrecoverable issue"
fi
rm -f "${UploadTicket}"
break
fi
echo -e "\n${YELLOW}A recoverable error occurred, retry attempt $((z+1))/${MaxUploadRetries}${NC}"
sleep 3
fi
done
rm -f "${UploadTicket}"
}
#!
#! ----------- (2) Post File / Upload File Function --------------- #
#!
snow_PostFile() {
local filepath=$1
local _hostCode=$2
local filename=$3
local fileCnt=$4
local retryCnt=$5
local finalAttempt=$6
local pline=${7}
UploadTicket="${WorkDir}/.flocks/upload_${_hostCode}_${filepath//[^a-zA-Z0-9]/}"
echo -e "[${YELLOW}${_hostCode}${NC}] Finding good Tor node{NC}"
for ((i=0; i<=15; i++)); do
tor_identity="${RANDOM}"
trap "rm -f ${UploadTicket}; echo ""; tput cnorm; exit" 0 1 2 3 6 15
response=$(tor_curl_request --insecure -L -s 'https://send.now/upload')
if [ "${DebugAllEnabled}" == "true" ] ; then
debugHtml "${filepath##*/}" "${_hostCode}_fetch" "${response}"
fi
if grep -Eqi "Your IP has been banned|you are banned" <<< "$response"; then
if [ "${finalAttempt}" == "true" ] ; then
printf "\\n"
echo -e "${RED}| Failed to upload file: Ip blocked or banned${NC}"
failedRetryUpload "$pline" "${filepath}" "${_hostCode}" "Failed to upload file" "Ip blocked or banned"
exitUploadError=true
return 1
else
continue
fi
elif grep -Eqi 'action="https://.*send\.now/cgi-bin/upload\.cgi\?upload_type\=' <<< "$response"; then
echo -e "${GREEN}| Node found${NC}"
break
else
if [ "${finalAttempt}" == "true" ] ; then
printf "\\n"
echo -e "${RED}| Failed to upload file: unable to find a good Tor node${NC}"
failedRetryUpload "$pline" "${filepath}" "${_hostCode}" "Failed to upload file" "Unable to find a good Tor node"
exitUploadError=true
return 1
else
continue
fi
fi
done
echo -e "[${YELLOW}${_hostCode}${NC}] Uploading ${GREEN}${filename}${NC}"
local ar_HUP[0]="https://u7324.send.now/cgi-bin/upload.cgi?upload_type=file&amp;utype=anon"
local arrSize=${#ar_HUP[@]}
local index=$(($RANDOM % $arrSize))
local RandomHostUploadUrl=${ar_HUP[$index]}
PostUrlHost="$RandomHostUploadUrl"
arrFiles=("$filepath")
trap "rm -f ${UploadTicket}; echo ""; tput cnorm; exit" 0 1 2 3 6 15
response=$(tor_curl_upload --insecure -i \
-H "Content-Type: multipart/form-data" \
-F "sess_id=" \
-F "utype=anon" \
-F "file_descr=" \
-F "file_public=1" \
-F "link_rcpt=" \
-F "link_pass=" \
-F "to_folder=" \
-F "upload=Start upload" \
-F "keepalive=1" \
-F "file_0=@$filepath" \
"${PostUrlHost}")
if [ "${DebugAllEnabled}" == "true" ] ; then
debugHtml "${filepath##*/}" "${_hostCode}_upload" "post_url: ${PostUrlHost}"$'\n'"${response}"
fi
if grep -Eqi '"file_status":"OK"' <<< "${response}" ; then
hash=$(grep -oPi '(?<="file_code":").*?(?=".*$)' <<< "$response")
filesize=$(GetFileSize "$filepath" "false")
downloadLink="https://isupload.com/${hash}"
echo -e "${GREEN}| Upload Success${NC}"
echo -e "| Size: ${BLUE}${filesize}${NC} bytes${NC}"
echo -e "| Link: ${YELLOW}${downloadLink}${NC}"
successUpload "$pline" "${filepath}" "${_hostCode}" "${filesize}" "${downloadLink}" "{$response}"
return 0
else
err=$(grep -oPi '(?<="file_status":").*?(?=")' <<< "$response")
if [ "${finalAttempt}" == "true" ] ; then
printf "\\n"
echo -e "${RED}| Upload failed. Status: ${err}${NC}"
failedRetryUpload "$pline" "${filepath}" "${_hostCode}" "Failed to upload file" "Status: $err"
exitUploadError=true
return 1
else
return 1
fi
fi
}
#!
#! --------------- Host Extra Functions ------------------- #
#!

18
hosts/up_uploadhive.sh Normal file → Executable file
View file

@ -1,6 +1,6 @@
#! Name: up_uploadhive.sh
#! Author: kittykat
#! Version: 2024.12.25
#! Version: 2025.02.14
#! Desc: Add support for uploading files to uploadhive.com
#! Info: Files are accessible at https://uploadhive.com/<file_code>
#! MaxSize: 5GB
@ -100,6 +100,7 @@ uhive_PostFile() {
if [[ "$UploadHiveRandomizeExt" == "true" ]]; then
randomext=$(mktemp -u XXX)
randomext=${randomext,,}
echo -e "${BLUE}MAD Randomized Extension: $randomext${NC}"
origext=${filepath##*.}
if [[ "$origext" == "7z" ]]; then
tmpfilepath="${filepath%.*}--7_.${randomext}"
@ -119,12 +120,13 @@ uhive_PostFile() {
else
tmpfilepath="${filepath%.*}--${origext}_.${randomext}"
fi
mv $filepath $tmpfilepath
echo -e "Creating temp random ext file..."
cp $filepath $tmpfilepath
else
tmpfilepath=$filepath
fi
arrFiles=("$filepath")
trap "rm -f ${UploadTicket}; echo ""; tput cnorm; exit" 0 1 2 3 6 15
arrFiles=("$tmpfilepath")
trap "rm -f ${UploadTicket}; echo ""; rm -f $tmpfilepath; tput cnorm; exit" 0 1 2 3 6 15
response=$(tor_curl_upload --insecure -i \
-H "Content-Type: multipart/form-data" \
-F "sess_id=" \
@ -141,7 +143,7 @@ uhive_PostFile() {
debugHtml "${filepath##*/}" "${_hostCode}_upload" "post_url: ${PostUrlHost}"$'\n'"${response}"
fi
if [[ "$UploadHiveRandomizeExt" == "true" ]]; then
mv $tmpfilepath $filepath
rm -f $tmpfilepath;
fi
if grep -Eqi '"file_status":"OK"' <<< "${response}" ; then
hash=$(grep -oPi '(?<=file_code":").*?(?=".*$)' <<< "$response")
@ -150,7 +152,11 @@ uhive_PostFile() {
echo -e "${GREEN}| Upload Success${NC}"
echo -e "| Size: ${BLUE}${filesize}${NC} bytes${NC}"
echo -e "| Link: ${YELLOW}${downloadLink}${NC}"
successUpload "$pline" "${filepath}" "${_hostCode}" "${filesize}" "${downloadLink}" "{$response}"
if [[ "$UploadHiveRandomizeExt" == "true" ]]; then
successUpload "$pline" "${filepath}" "${_hostCode}" "${filesize}" "${downloadLink}" "{$response}" "[rand ext, rename to $filename or use MAD v2025.02.13+]"
else
successUpload "$pline" "${filepath}" "${_hostCode}" "${filesize}" "${downloadLink}" "{$response}"
fi
return 0
else
err=$(grep -oPi '(?<="file_status":").*?(?=".*$)' <<< "$response")

View file

@ -1,6 +1,6 @@
#! Name: uploadbay.sh
#! Author: kittykat
#! Version: 2024.12.23
#! Version: 2025.02.15
#! Desc: Add support for downloading and processing of urls for a new host
#! Usage: Copy this file into the ./${ScriptDir}/hosts/ folder
#!
@ -17,7 +17,7 @@ HostCode='ubay'
HostNick='uploadbay'
HostFuncPrefix='direct'
HostUrls='uploadbay.net'
HostDomainRegex='^(http|https)://(.*\.)?uploadbay\.net/uploads/'
HostDomainRegex='^(http|https)://(.*\.)?uploadbay\.net(/|/uploads/)'
#!
#! !! DO NOT UPDATE OR REMOVE !!
#! This merges the Required HostAndDomainRegexes into mad.sh

27
mad.sh
View file

@ -30,9 +30,16 @@
#
# * Everyone who provided feedback and helped test.. and those who wish to remain anonymous
ScriptVersion=2025.02.12
ScriptVersion=2025.02.14
#=================================================
# Recent Additions
# 2025.02.14 - [mad] Add helpful verbiage for user on MAD Randomized Extension upload urls
# 2025.02.14 - [up_ranoz] Add help "[rand ext, rename to <filename> or use MAD v2025.02.13+]" to url
# 2025.02.14 - [up_uploadhive] Add help "[rand ext, rename to <filename> or use MAD v2025.02.13+]" to url
# 2025.02.13 - [mad] Add "RanozRandomizeExt" MAD randomized extension configurable variable
# 2025.02.13 - [up_ranoz] Add MAD randomized extension upload handling
# 2025.02.13 - [ranoz] Add MAD randomized extension download handling
# 2025.02.13 - [sendnow] Extend request timeout for head / get (server response time lag)
# 2025.02.12 - [sendnow] Add send.now as download host
# 2025.02.11 - [ranoz] Fix filename (to handle fileid added to download urls)
# 2025.02.10 - [mad] Add detection of custom "Removed" response on cdn get from direct links
@ -227,7 +234,7 @@ AutoCommentOnCompletion=true
# Whonix-exo
# Linux-xterm
# Linux-gnome
# @Default=Linux-exo
# @Default=Whonix-exo
OsType="Whonix-exo"
# Auto show "Mad Status" after complete
@ -247,9 +254,10 @@ CatnapDuration=1
# HOST SPECIFIC SECTION
#-------------------
# [uploadhive]: Randomize extension (bypass 7z, zip, tar block)
# [{"file_code":"undef","file_status":"unallowed extension"}]
# Mad Randomized extension (Uploads): Bypass host specific extension blocks (7z, zip, tar block)
# Change ext to random 3 letter extension and obfuscate original in filename
UploadHiveRandomizeExt=true
RanozRandomizeExt=true
# [Oshi]: Control BaseUrl Override (none, oshiat, oshionion)
# none: Will download from whatever url base is passed in
@ -1053,13 +1061,14 @@ successUpload() {
local filesize=$(literalize_string "$4")
local downloadLink="$5"
local responseHtml=$6
local message=$7
local filename="${filepath##*/}"
mkdir -p "${WorkDir}/uploads"
dateStamp=$(date '+%Y/%m/%d %H:%M:%S')
echo -e "[OK] ${filename}, ${HostCode}, ${downloadLink}" >> "${WorkDir}/uploads/results.txt"
echo -e "[OK] ${filename}, ${HostCode}, ${downloadLink}, ${message}" >> "${WorkDir}/uploads/results.txt"
mkdir -p "${WorkDir}/uploads"
dateStamp=$(date '+%Y/%m/%d %H:%M:%S')
echo -e "${filename}, ${HostCode}, ${downloadLink}" >> "${WorkDir}/uploads/result-links.txt"
echo -e "${filename}, ${HostCode}, ${downloadLink} ${message}" >> "${WorkDir}/uploads/result-links.txt"
mkdir -p "${WorkDir}/uploads/_tickets"
cTicket="${WorkDir}/uploads/_tickets/`date +%y%m%d-%H%M%S`_${filename}_${HostCode}_upload.txt"
echo -e "${downloadLink}\\n\\nResponse:\\n${responseHtml}" > "$cTicket"
@ -1068,7 +1077,11 @@ successUpload() {
mkdir -p "${WorkDir}/data"
echo -e "$dateStamp [OK] file: ${filename}, host: ${HostCode}, dl: ${downloadLink}, ticket: ${cTicket}, size: ${filesize}, path: ${filepath}" >> "${WorkDir}/data/uploads_completed.txt"
if [ ! -z "$InputFile" ] && [ ! -z "$pLine" ]; then
sed -i -e "s>^${pLine}.*>#& #OK# ${downloadLink//&/\\&}>g" "${InputFile}" #processed line
if [ ! -z "$message" ]; then
sed -i -e "s>^${pLine}.*>#& #OK# ${downloadLink//&/\\&} ${message}>g" "${InputFile}" #processed line
else
sed -i -e "s>^${pLine}.*>#& #OK# ${downloadLink//&/\\&}>g" "${InputFile}" #processed line
fi
fi
dateStamp=$(date '+%Y/%m/%d %H:%M:%S')
mkdir -p "${WorkDir}/uploads"

View file

@ -64,6 +64,7 @@ CatnapDuration=1
#-------------------
UploadHiveRandomizeExt=true
RanozRandomizeExt=true
OshiBaseUrlOverride="oshiat"
UsePixeldrainBypass=false
EnableFiledotProcessing=false

View file

@ -12,6 +12,7 @@
# 40GB isup isupload.com 100MB fb fileblade.com 500MB fland fileland.io
# 100MB ubay uploadbay.net 2GB sysp syspro.com.br 1GB uwab uwabaki.party
# 512MB anon anonfile.de 100MB fget fireget.com 1GB lain pomf2.lain.la
# 100GB snow send.now
# Jirafeau hosts (recommended upload 100MB splits as many host only support that)
# 10GB anarc anarchaserver 1GB kaz depot.kaz.bzh 5GB squid filesquid
# 10GB nant nantes.cloud 500MB soy soyjak.download 512MB linx linxx.net