# 2025.02.02 - [mad] Add function to handle urlencode of cyrillic / kanji / latin / etc

# 2025.02.02 - [ranoz] Fix handling filenames containing cyrillic / kanji / latin chars
# 2025.02.02 - [all] Reduced character processing for urlencode to special url characters
# 2025.01.30 - [isupload] Add handling of 404 Not Found on initial page fetch
# 2025.01.23 - [mad] Do not check for supported host on "direct=" lines
# 2025.01.19 - [fileditch] Add direct download url processing fileditchfiles.me (though they block Tor now)
This commit is contained in:
kittykat 2025-02-02 08:20:42 +00:00
parent 0ba636a488
commit fd4723eb24
Signed by: kittykat
GPG key ID: E3F1556620F70C3C
7 changed files with 519 additions and 460 deletions

View file

@ -1,4 +1,4 @@
DateTime: 25.01.18
DateTime: 25.02.02
Files:
./hosts/1fichier.sh
@ -352,12 +352,12 @@ _________________________________________________________________________
./hosts/quax.sh:176: tor_curl_request --insecure --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval "$download_url" --continue-at - --output "$file_path"
./hosts/quax.sh:178: tor_curl_request --insecure "$download_url" --continue-at - --output "$file_path"
./hosts/ranoz.sh:90: response=$(tor_curl_request --insecure -L -s "$remote_url")
./hosts/ranoz.sh:157: file_header=$(tor_curl_request --insecure --head -L -i -s "$download_url")
./hosts/ranoz.sh:267: if [ "${UseTorCurlImpersonate}" == "true" ]; then
./hosts/ranoz.sh:269: tor_curl_request --insecure -L -G --no-alpn \
./hosts/ranoz.sh:273: tor_curl_request --insecure -L -G --no-alpn \
./hosts/ranoz.sh:278: tor_curl_request --insecure -L -G --no-alpn \
./hosts/ranoz.sh:293: tor_curl_request --insecure -L -G --no-alpn \
./hosts/ranoz.sh:158: file_header=$(tor_curl_request --insecure --head -L -i -s "$download_url")
./hosts/ranoz.sh:268: if [ "${UseTorCurlImpersonate}" == "true" ]; then
./hosts/ranoz.sh:270: tor_curl_request --insecure -L -G --no-alpn \
./hosts/ranoz.sh:274: tor_curl_request --insecure -L -G --no-alpn \
./hosts/ranoz.sh:279: tor_curl_request --insecure -L -G --no-alpn \
./hosts/ranoz.sh:294: tor_curl_request --insecure -L -G --no-alpn \
./hosts/syspro.sh:88: response=$(tor_curl_request --insecure -L -s "$remote_url")
./hosts/syspro.sh:186: if [ "${UseTorCurlImpersonate}" == "true" ]; then
./hosts/syspro.sh:188: tor_curl_request --insecure -L \
@ -474,119 +474,119 @@ _________________________________________________________________________
./hosts/youdbox.sh:183: file_header=$(tor_curl_request --insecure -L --head -s --referer "${fixed_url}" "$download_url")
./hosts/youdbox.sh:276: tor_curl_request --insecure -L -G --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval "$download_url" --continue-at - --output "$file_path"
./hosts/youdbox.sh:278: tor_curl_request --insecure -L -G "$download_url" --continue-at - --output "$file_path"
./mad.sh:122:UseTorCurlImpersonate=false
./mad.sh:413:tor_curl_request() {
./mad.sh:414: if [ "${UseTorCurlImpersonate}" == "true" ]; then
./mad.sh:415: "${curl_impersonate[@]}" --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" -4 --connect-timeout ${ConnectTimeout} --compressed --globoff "$@"
./mad.sh:417: curl --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" -4 --connect-timeout ${ConnectTimeout} --compressed --globoff "$@"
./mad.sh:420:tor_curl_request_extended() {
./mad.sh:422: if [ "${UseTorCurlImpersonate}" == "true" ]; then
./mad.sh:423: "${curl_impersonate[@]}" --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" -4 --connect-timeout $randomtimeout --compressed --globoff "$@"
./mad.sh:425: curl --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" -4 --connect-timeout $randomtimeout --compressed --globoff "$@"
./mad.sh:428:tor_curl_upload() {
./mad.sh:429: if [ "${UseTorCurlImpersonate}" == "true" ]; then
./mad.sh:431: "${curl_impersonate[@]}" --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" -4 --connect-timeout ${ConnectTimeout} --speed-limit $UploadSpeedMin --speed-time $UploadTimeoutInterval --compressed --globoff "$@"
./mad.sh:433: "${curl_impersonate[@]}" --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" -4 --connect-timeout ${ConnectTimeout} --compressed --globoff "$@"
./mad.sh:437: curl --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" -4 --connect-timeout ${ConnectTimeout} --speed-limit $UploadSpeedMin --speed-time $UploadTimeoutInterval -H 'User-Agent: Mozilla/5.0 (Windows NT 10.0; rv:109.0) Gecko/20100101 Firefox/115.0' -H 'Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8' -H 'Accept-Language: en-US,en;q=0.5' -H 'Accept-Encoding: gzip, deflate, br' -H 'Origin: null' -H 'Connection: keep-alive' -H 'Upgrade-Insecure-Requests: 1' -H 'Sec-Fetch-Dest: document' -H 'Sec-Fetch-Mode: navigate' -H 'Sec-Fetch-Site: cross-site' -H 'Sec-Fetch-User: ?1' -H 'TE: trailers' --compressed --globoff "$@"
./mad.sh:439: curl --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" -4 --connect-timeout ${ConnectTimeout} -H 'User-Agent: Mozilla/5.0 (Windows NT 10.0; rv:109.0) Gecko/20100101 Firefox/115.0' -H 'Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8' -H 'Accept-Language: en-US,en;q=0.5' -H 'Accept-Encoding: gzip, deflate, br' -H 'Origin: null' -H 'Connection: keep-alive' -H 'Upgrade-Insecure-Requests: 1' -H 'Sec-Fetch-Dest: document' -H 'Sec-Fetch-Mode: navigate' -H 'Sec-Fetch-Site: cross-site' -H 'Sec-Fetch-User: ?1' -H 'TE: trailers' --compressed --globoff "$@"
./mad.sh:1395:install_curl_impersonate() {
./mad.sh:1397: echo -e "${BLUE}lwthiker curl_impersonate${NC} is the original dev, but it is relatively inactive."
./mad.sh:1398: echo -e "- Currently uses curl v8.1.1."
./mad.sh:1402: echo -e "${BLUE}lexiforest curl_impersonate${NC} is an active fork of curl_impersonate."
./mad.sh:1403: echo -e "+ Currently uses curl v8.7.1"
./mad.sh:1407: PS3='Please select which curl_impersonate to install: '
./mad.sh:1415: install_curl_impersonate_lwthiker_orig
./mad.sh:1419: install_curl_impersonate_lexiforest_fork
./mad.sh:1429:install_curl_impersonate_lwthiker_orig() {
./mad.sh:1433: echo -e "${BLUE}lwthiker curl_impersonate${NC} is the original curl_impersonate."
./mad.sh:1434: echo -e "+ Currently uses curl v8.1.1, and has low activity for updates"
./mad.sh:1437: echo -e "${GREEN}| Fetching:${NC} latest ${BLUE}lwthiker curl_impersonate${NC} info from github...${NC}"
./mad.sh:1440: response=$(tor_curl_request --insecure -L -s https://github.com/lwthiker/curl-impersonate/releases/latest)
./mad.sh:1442: debugHtml "github" "lbf_inst_curlimp$j" "$response"
./mad.sh:1445: latestTag=$(grep -oPi -m 1 '(?<=/curl-impersonate/releases/tag/).*?(?=")' <<< "$response")
./mad.sh:1455: yes_or_no "Do you wish to download and extract curl_impersonate $latestTag (using tor+curl)?" && {
./mad.sh:1457: download_url='https://github.com/lwthiker/curl-impersonate/releases/download/'"$latestTag"'/curl-impersonate-'"$latestTag"'.x86_64-linux-gnu.tar.gz'
./mad.sh:1460: file_header=$(tor_curl_request --insecure --head -Ls "$download_url")
./mad.sh:1462: debugHtml "github" "head_inst_curlimp$j" "${file_header}"
./mad.sh:1510: tor_curl_request --insecure -L "$download_url" --continue-at - --output "$file_path"
./mad.sh:1539: echo -e "| Extracting curl_impersonate..."
./mad.sh:1541: rm -f "${ScriptDir}"/curl*
./mad.sh:1542: mv "$extract_location/curl-impersonate-ff" "${ScriptDir}/"
./mad.sh:1543: mv "$extract_location/curl_ff109" "${ScriptDir}/"
./mad.sh:1544: echo -e "${GREEN}| Done.${NC} Update ${BLUE}\"UseTorCurlImpersonate=true\"${NC} in script to use..."
./mad.sh:1552:install_curl_impersonate_lexiforest_fork() {
./mad.sh:1556: echo -e "${BLUE}lexiforest curl_impersonate${NC} is an active fork of curl_impersonate."
./mad.sh:1557: echo -e "+ Currently uses curl v8.7.1, and is patched for latest CVEs"
./mad.sh:1560: echo -e "${GREEN}| Fetching:${NC} latest ${BLUE}lexiforest curl_impersonate fork${NC} info from github...${NC}"
./mad.sh:1563: response=$(tor_curl_request --insecure -L -s https://github.com/lexiforest/curl-impersonate/releases/latest)
./mad.sh:1565: debugHtml "github" "lbf_inst_curlimp$j" "$response"
./mad.sh:1568: latestTag=$(grep -oPi -m 1 '(?<=/curl-impersonate/releases/tag/).*?(?=")' <<< "$response")
./mad.sh:1578: yes_or_no "Do you wish to download and extract curl_impersonate $latestTag (using tor+curl)?" && {
./mad.sh:1580: download_url='https://github.com/lexiforest/curl-impersonate/releases/download/'"$latestTag"'/curl-impersonate-'"$latestTag"'.x86_64-linux-gnu.tar.gz'
./mad.sh:1583: file_header=$(tor_curl_request --insecure --head -Ls "$download_url")
./mad.sh:1585: debugHtml "github" "head_inst_curlimp$j" "${file_header}"
./mad.sh:1633: tor_curl_request --insecure -L "$download_url" --continue-at - --output "$file_path"
./mad.sh:1662: echo -e "| Extracting curl_impersonate..."
./mad.sh:1664: rm -f "${ScriptDir}"/curl*
./mad.sh:1665: mv "$extract_location/curl-impersonate-chrome" "${ScriptDir}/"
./mad.sh:1666: mv "$extract_location/curl_chrome131" "${ScriptDir}/"
./mad.sh:1667: echo -e "${GREEN}| Done.${NC} Update ${BLUE}\"UseTorCurlImpersonate=true\"${NC} in script to use..."
./mad.sh:1829: echo -e ":${NC} ${GREEN}MAD${PINK} Audit${NC} : Reports usage of http & curl in scripts${PINK}${BLD} :"
./mad.sh:1837: maud_curl=$(grep -n -vxE '[[:blank:]]*([#].*)?' $fil | grep --color='always' -Ei 'curl')
./mad.sh:1838: maud_torcurl=$(grep -n -vxE '[[:blank:]]*([#].*)?' $fil | grep -A 12 --color='always' -Ei 'tor_curl')
./mad.sh:1847: echo -e "${PINK}MAD Audit of curl:${NC} (${GREEN}grep \"curl\"${NC})"
./mad.sh:1849: echo -e "$maud_curl"
./mad.sh:1851: echo -e "${PINK}MAD Audit of tor_curl (+12 lines after):${NC} (${GREEN}grep \"tor_curl\"${NC})"
./mad.sh:1853: echo -e "$maud_torcurl"
./mad.sh:1865: maud_curl=$(grep -n -vxE '[[:blank:]]*([#].*)?' $fil | grep --color='always' -Ei 'curl')
./mad.sh:1866: maud_torcurl=$(grep -n -vxE '[[:blank:]]*([#].*)?' $fil | grep -A 12 --color='always' -Ei 'tor_curl')
./mad.sh:1875: echo -e "${PINK}MAD Audit of curl:${NC} (${GREEN}grep \"curl \"${NC})"
./mad.sh:1877: echo -e "$maud_curl"
./mad.sh:1879: echo -e "${PINK}MAD Audit of tor_curl (+12 lines after):${NC} (${GREEN}grep \"tor_curl\"${NC})"
./mad.sh:1881: echo -e "$maud_torcurl"
./mad.sh:1887: maud_curl=$(grep -n -vxE '[[:blank:]]*([#].*)?' $fil | grep --color='always' -Ei 'curl')
./mad.sh:1888: maud_torcurl=$(grep -n -vxE '[[:blank:]]*([#].*)?' $fil | grep -A 12 --color='always' -Ei 'tor_curl')
./mad.sh:1897: echo -e "${PINK}MAD Audit of curl:${NC} (${GREEN}grep \"curl\"${NC})"
./mad.sh:1899: echo -e "$maud_curl"
./mad.sh:1901: echo -e "${PINK}MAD Audit of tor_curl (+12 lines after):${NC} (${GREEN}grep \"tor_curl\"${NC})"
./mad.sh:1903: echo -e "$maud_torcurl"
./mad.sh:2850: if [ "${UseTorCurlImpersonate}" == "true" ]; then
./mad.sh:2851: printf "client: ${GREEN}Tor${NC} + ${BLUE}curl_impersonate${NC}\\n"
./mad.sh:2853: printf "client: ${GREEN}Tor${NC} + ${GREEN}curl${NC}\\n"
./mad.sh:3025: if [ "${UseTorCurlImpersonate}" == "true" ]; then
./mad.sh:3026: printf "client: ${GREEN}Tor${NC} + ${BLUE}curl_impersonate${NC}\\n"
./mad.sh:3028: printf "client: ${GREEN}Tor${NC} + ${GREEN}curl${NC}\\n"
./mad.sh:3226: file_header=$(tor_curl_request --insecure -m 8 -s -D - -o /dev/null \
./mad.sh:3233: file_header=$(tor_curl_request --insecure --head -H "Connection: keep-alive" -L -s -i "$download_url" |
./mad.sh:3363: tor_curl_request --insecure -L --referer "$file_url" "$download_url" --output "$file_path"
./mad.sh:3407: tor_curl_request --insecure -L --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval "$download_url" --continue-at - --output "$file_path"
./mad.sh:3409: tor_curl_request --insecure -L --referer "$file_url" "$download_url" --continue-at - --output "$file_path"
./mad.sh:3607: response=$(tor_curl_upload --insecure -i \
./mad.sh:3614: response=$(tor_curl_upload --insecure -i \
./mad.sh:3685:if [ "${UseTorCurlImpersonate}" == "true" ]; then
./mad.sh:3686: curl_impersonate=()
./mad.sh:3687: readarray -d $'' arrFiles < <(find "$ScriptDir" -maxdepth 1 -name "curl_*" -printf '%p\n' | sort -Vk1)
./mad.sh:3688: bFoundCurlHeader=false
./mad.sh:3692: curl_impersonate=($fil)
./mad.sh:3693: bFoundCurlHeader=true
./mad.sh:3697: if [ "$bFoundCurlHeader" == "false" ]; then
./mad.sh:3698: echo -e "${RED}[ERROR] Missing dependency \"curl-impersonate\"!${NC}"
./mad.sh:3701: echo -e "You'll need to download ${GREEN}\"curl-impersonate\"${NC}."
./mad.sh:3704: echo -e "The latest binary can be obtained on GitHub, search for \"curl-impersonate\""
./mad.sh:3706: echo -e " 1. Visit the page of curl-impersonate and add \"/releases/latest/\" at end of URL."
./mad.sh:3710: echo -e " 4. Download archive ${GREEN}\"curl-impersonate-vX.Y.Z.x86_64-linux-gnu.tar.gz\"${YELLOW}."
./mad.sh:3711: echo -e " 5. Extract files ${GREEN}\"curl-impersonate-ff\"${NC} and ${GREEN}\"curl_ff109\"${NC} next to this script."
./mad.sh:3714: echo -e "run $0 install_curl_impersonate\\n"
./mad.sh:3716: yes_or_no "Do you wish to download and extract latest curl_impersonate (using tor+curl)?" && {
./mad.sh:3717: UseTorCurlImpersonate=false
./mad.sh:3718: install_curl_impersonate
./mad.sh:3802: echo -e "[${YELLOW}Install curl_impersonate${NC}]: Downloads the latest binary for curl_impersonate from github repo (3 choices)"
./mad.sh:3803: printf " %s install_curl_impersonate\\n" "$0"
./mad.sh:3881:elif [[ "$arg1" == "install_curl_impersonate" ]]; then
./mad.sh:3882: install_curl_impersonate
./mad.sh:3913:if [ "${UseTorCurlImpersonate}" == "true" ]; then
./mad.sh:3914: printf "client: ${GREEN}Tor${NC} + ${BLUE}curl_impersonate${NC}\\n"
./mad.sh:3916: printf "client: ${GREEN}Tor${NC} + ${GREEN}curl${NC}\\n"
./mad.sh:128:UseTorCurlImpersonate=false
./mad.sh:419:tor_curl_request() {
./mad.sh:420: if [ "${UseTorCurlImpersonate}" == "true" ]; then
./mad.sh:421: "${curl_impersonate[@]}" --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" -4 --connect-timeout ${ConnectTimeout} --compressed --globoff "$@"
./mad.sh:423: curl --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" -4 --connect-timeout ${ConnectTimeout} --compressed --globoff "$@"
./mad.sh:426:tor_curl_request_extended() {
./mad.sh:428: if [ "${UseTorCurlImpersonate}" == "true" ]; then
./mad.sh:429: "${curl_impersonate[@]}" --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" -4 --connect-timeout $randomtimeout --compressed --globoff "$@"
./mad.sh:431: curl --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" -4 --connect-timeout $randomtimeout --compressed --globoff "$@"
./mad.sh:434:tor_curl_upload() {
./mad.sh:435: if [ "${UseTorCurlImpersonate}" == "true" ]; then
./mad.sh:437: "${curl_impersonate[@]}" --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" -4 --connect-timeout ${ConnectTimeout} --speed-limit $UploadSpeedMin --speed-time $UploadTimeoutInterval --compressed --globoff "$@"
./mad.sh:439: "${curl_impersonate[@]}" --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" -4 --connect-timeout ${ConnectTimeout} --compressed --globoff "$@"
./mad.sh:443: curl --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" -4 --connect-timeout ${ConnectTimeout} --speed-limit $UploadSpeedMin --speed-time $UploadTimeoutInterval -H 'User-Agent: Mozilla/5.0 (Windows NT 10.0; rv:109.0) Gecko/20100101 Firefox/115.0' -H 'Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8' -H 'Accept-Language: en-US,en;q=0.5' -H 'Accept-Encoding: gzip, deflate, br' -H 'Origin: null' -H 'Connection: keep-alive' -H 'Upgrade-Insecure-Requests: 1' -H 'Sec-Fetch-Dest: document' -H 'Sec-Fetch-Mode: navigate' -H 'Sec-Fetch-Site: cross-site' -H 'Sec-Fetch-User: ?1' -H 'TE: trailers' --compressed --globoff "$@"
./mad.sh:445: curl --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" -4 --connect-timeout ${ConnectTimeout} -H 'User-Agent: Mozilla/5.0 (Windows NT 10.0; rv:109.0) Gecko/20100101 Firefox/115.0' -H 'Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8' -H 'Accept-Language: en-US,en;q=0.5' -H 'Accept-Encoding: gzip, deflate, br' -H 'Origin: null' -H 'Connection: keep-alive' -H 'Upgrade-Insecure-Requests: 1' -H 'Sec-Fetch-Dest: document' -H 'Sec-Fetch-Mode: navigate' -H 'Sec-Fetch-Site: cross-site' -H 'Sec-Fetch-User: ?1' -H 'TE: trailers' --compressed --globoff "$@"
./mad.sh:1441:install_curl_impersonate() {
./mad.sh:1443: echo -e "${BLUE}lwthiker curl_impersonate${NC} is the original dev, but it is relatively inactive."
./mad.sh:1444: echo -e "- Currently uses curl v8.1.1."
./mad.sh:1448: echo -e "${BLUE}lexiforest curl_impersonate${NC} is an active fork of curl_impersonate."
./mad.sh:1449: echo -e "+ Currently uses curl v8.7.1"
./mad.sh:1453: PS3='Please select which curl_impersonate to install: '
./mad.sh:1461: install_curl_impersonate_lwthiker_orig
./mad.sh:1465: install_curl_impersonate_lexiforest_fork
./mad.sh:1475:install_curl_impersonate_lwthiker_orig() {
./mad.sh:1479: echo -e "${BLUE}lwthiker curl_impersonate${NC} is the original curl_impersonate."
./mad.sh:1480: echo -e "+ Currently uses curl v8.1.1, and has low activity for updates"
./mad.sh:1483: echo -e "${GREEN}| Fetching:${NC} latest ${BLUE}lwthiker curl_impersonate${NC} info from github...${NC}"
./mad.sh:1486: response=$(tor_curl_request --insecure -L -s https://github.com/lwthiker/curl-impersonate/releases/latest)
./mad.sh:1488: debugHtml "github" "lbf_inst_curlimp$j" "$response"
./mad.sh:1491: latestTag=$(grep -oPi -m 1 '(?<=/curl-impersonate/releases/tag/).*?(?=")' <<< "$response")
./mad.sh:1501: yes_or_no "Do you wish to download and extract curl_impersonate $latestTag (using tor+curl)?" && {
./mad.sh:1503: download_url='https://github.com/lwthiker/curl-impersonate/releases/download/'"$latestTag"'/curl-impersonate-'"$latestTag"'.x86_64-linux-gnu.tar.gz'
./mad.sh:1506: file_header=$(tor_curl_request --insecure --head -Ls "$download_url")
./mad.sh:1508: debugHtml "github" "head_inst_curlimp$j" "${file_header}"
./mad.sh:1556: tor_curl_request --insecure -L "$download_url" --continue-at - --output "$file_path"
./mad.sh:1585: echo -e "| Extracting curl_impersonate..."
./mad.sh:1587: rm -f "${ScriptDir}"/curl*
./mad.sh:1588: mv "$extract_location/curl-impersonate-ff" "${ScriptDir}/"
./mad.sh:1589: mv "$extract_location/curl_ff109" "${ScriptDir}/"
./mad.sh:1590: echo -e "${GREEN}| Done.${NC} Update ${BLUE}\"UseTorCurlImpersonate=true\"${NC} in script to use..."
./mad.sh:1598:install_curl_impersonate_lexiforest_fork() {
./mad.sh:1602: echo -e "${BLUE}lexiforest curl_impersonate${NC} is an active fork of curl_impersonate."
./mad.sh:1603: echo -e "+ Currently uses curl v8.7.1, and is patched for latest CVEs"
./mad.sh:1606: echo -e "${GREEN}| Fetching:${NC} latest ${BLUE}lexiforest curl_impersonate fork${NC} info from github...${NC}"
./mad.sh:1609: response=$(tor_curl_request --insecure -L -s https://github.com/lexiforest/curl-impersonate/releases/latest)
./mad.sh:1611: debugHtml "github" "lbf_inst_curlimp$j" "$response"
./mad.sh:1614: latestTag=$(grep -oPi -m 1 '(?<=/curl-impersonate/releases/tag/).*?(?=")' <<< "$response")
./mad.sh:1624: yes_or_no "Do you wish to download and extract curl_impersonate $latestTag (using tor+curl)?" && {
./mad.sh:1626: download_url='https://github.com/lexiforest/curl-impersonate/releases/download/'"$latestTag"'/curl-impersonate-'"$latestTag"'.x86_64-linux-gnu.tar.gz'
./mad.sh:1629: file_header=$(tor_curl_request --insecure --head -Ls "$download_url")
./mad.sh:1631: debugHtml "github" "head_inst_curlimp$j" "${file_header}"
./mad.sh:1679: tor_curl_request --insecure -L "$download_url" --continue-at - --output "$file_path"
./mad.sh:1708: echo -e "| Extracting curl_impersonate..."
./mad.sh:1710: rm -f "${ScriptDir}"/curl*
./mad.sh:1711: mv "$extract_location/curl-impersonate-chrome" "${ScriptDir}/"
./mad.sh:1712: mv "$extract_location/curl_chrome131" "${ScriptDir}/"
./mad.sh:1713: echo -e "${GREEN}| Done.${NC} Update ${BLUE}\"UseTorCurlImpersonate=true\"${NC} in script to use..."
./mad.sh:1875: echo -e ":${NC} ${GREEN}MAD${PINK} Audit${NC} : Reports usage of http & curl in scripts${PINK}${BLD} :"
./mad.sh:1883: maud_curl=$(grep -n -vxE '[[:blank:]]*([#].*)?' $fil | grep --color='always' -Ei 'curl')
./mad.sh:1884: maud_torcurl=$(grep -n -vxE '[[:blank:]]*([#].*)?' $fil | grep -A 12 --color='always' -Ei 'tor_curl')
./mad.sh:1893: echo -e "${PINK}MAD Audit of curl:${NC} (${GREEN}grep \"curl\"${NC})"
./mad.sh:1895: echo -e "$maud_curl"
./mad.sh:1897: echo -e "${PINK}MAD Audit of tor_curl (+12 lines after):${NC} (${GREEN}grep \"tor_curl\"${NC})"
./mad.sh:1899: echo -e "$maud_torcurl"
./mad.sh:1911: maud_curl=$(grep -n -vxE '[[:blank:]]*([#].*)?' $fil | grep --color='always' -Ei 'curl')
./mad.sh:1912: maud_torcurl=$(grep -n -vxE '[[:blank:]]*([#].*)?' $fil | grep -A 12 --color='always' -Ei 'tor_curl')
./mad.sh:1921: echo -e "${PINK}MAD Audit of curl:${NC} (${GREEN}grep \"curl \"${NC})"
./mad.sh:1923: echo -e "$maud_curl"
./mad.sh:1925: echo -e "${PINK}MAD Audit of tor_curl (+12 lines after):${NC} (${GREEN}grep \"tor_curl\"${NC})"
./mad.sh:1927: echo -e "$maud_torcurl"
./mad.sh:1933: maud_curl=$(grep -n -vxE '[[:blank:]]*([#].*)?' $fil | grep --color='always' -Ei 'curl')
./mad.sh:1934: maud_torcurl=$(grep -n -vxE '[[:blank:]]*([#].*)?' $fil | grep -A 12 --color='always' -Ei 'tor_curl')
./mad.sh:1943: echo -e "${PINK}MAD Audit of curl:${NC} (${GREEN}grep \"curl\"${NC})"
./mad.sh:1945: echo -e "$maud_curl"
./mad.sh:1947: echo -e "${PINK}MAD Audit of tor_curl (+12 lines after):${NC} (${GREEN}grep \"tor_curl\"${NC})"
./mad.sh:1949: echo -e "$maud_torcurl"
./mad.sh:2896: if [ "${UseTorCurlImpersonate}" == "true" ]; then
./mad.sh:2897: printf "client: ${GREEN}Tor${NC} + ${BLUE}curl_impersonate${NC}\\n"
./mad.sh:2899: printf "client: ${GREEN}Tor${NC} + ${GREEN}curl${NC}\\n"
./mad.sh:3071: if [ "${UseTorCurlImpersonate}" == "true" ]; then
./mad.sh:3072: printf "client: ${GREEN}Tor${NC} + ${BLUE}curl_impersonate${NC}\\n"
./mad.sh:3074: printf "client: ${GREEN}Tor${NC} + ${GREEN}curl${NC}\\n"
./mad.sh:3272: file_header=$(tor_curl_request --insecure -m 8 -s -D - -o /dev/null \
./mad.sh:3279: file_header=$(tor_curl_request --insecure --head -H "Connection: keep-alive" -L -s -i "$download_url" |
./mad.sh:3409: tor_curl_request --insecure -L --referer "$file_url" "$download_url" --output "$file_path"
./mad.sh:3453: tor_curl_request --insecure -L --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval "$download_url" --continue-at - --output "$file_path"
./mad.sh:3455: tor_curl_request --insecure -L --referer "$file_url" "$download_url" --continue-at - --output "$file_path"
./mad.sh:3653: response=$(tor_curl_upload --insecure -i \
./mad.sh:3660: response=$(tor_curl_upload --insecure -i \
./mad.sh:3731:if [ "${UseTorCurlImpersonate}" == "true" ]; then
./mad.sh:3732: curl_impersonate=()
./mad.sh:3733: readarray -d $'' arrFiles < <(find "$ScriptDir" -maxdepth 1 -name "curl_*" -printf '%p\n' | sort -Vk1)
./mad.sh:3734: bFoundCurlHeader=false
./mad.sh:3738: curl_impersonate=($fil)
./mad.sh:3739: bFoundCurlHeader=true
./mad.sh:3743: if [ "$bFoundCurlHeader" == "false" ]; then
./mad.sh:3744: echo -e "${RED}[ERROR] Missing dependency \"curl-impersonate\"!${NC}"
./mad.sh:3747: echo -e "You'll need to download ${GREEN}\"curl-impersonate\"${NC}."
./mad.sh:3750: echo -e "The latest binary can be obtained on GitHub, search for \"curl-impersonate\""
./mad.sh:3752: echo -e " 1. Visit the page of curl-impersonate and add \"/releases/latest/\" at end of URL."
./mad.sh:3756: echo -e " 4. Download archive ${GREEN}\"curl-impersonate-vX.Y.Z.x86_64-linux-gnu.tar.gz\"${YELLOW}."
./mad.sh:3757: echo -e " 5. Extract files ${GREEN}\"curl-impersonate-ff\"${NC} and ${GREEN}\"curl_ff109\"${NC} next to this script."
./mad.sh:3760: echo -e "run $0 install_curl_impersonate\\n"
./mad.sh:3762: yes_or_no "Do you wish to download and extract latest curl_impersonate (using tor+curl)?" && {
./mad.sh:3763: UseTorCurlImpersonate=false
./mad.sh:3764: install_curl_impersonate
./mad.sh:3848: echo -e "[${YELLOW}Install curl_impersonate${NC}]: Downloads the latest binary for curl_impersonate from github repo (3 choices)"
./mad.sh:3849: printf " %s install_curl_impersonate\\n" "$0"
./mad.sh:3927:elif [[ "$arg1" == "install_curl_impersonate" ]]; then
./mad.sh:3928: install_curl_impersonate
./mad.sh:3959:if [ "${UseTorCurlImpersonate}" == "true" ]; then
./mad.sh:3960: printf "client: ${GREEN}Tor${NC} + ${BLUE}curl_impersonate${NC}\\n"
./mad.sh:3962: printf "client: ${GREEN}Tor${NC} + ${GREEN}curl${NC}\\n"
./plugins/pjscloud.sh:44: if [ "${UseTorCurlImpersonate}" == "true" ]; then
./plugins/pjscloud.sh:45: response=$("${curl_impersonate[@]}" --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" \
./plugins/pjscloud.sh:53: response=$(curl --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" \

View file

@ -1,4 +1,4 @@
DateTime: 25.01.18
DateTime: 25.02.02
Files:
./hosts/1fichier.sh
@ -429,75 +429,75 @@ _________________________________________________________________________
./hosts/up_uwabaki.sh:108: if grep -Eqi 'File uploaded: <a href="https://files.uwabaki.party/' <<< "${response}" ; then
./hosts/up_uwabaki.sh:120: downloadLink="https://files.uwabaki.party${url}"
./hosts/up_yolobit.sh:99: PostUrlHost='https://ns08.zipcluster.com/upload.php'
./mad.sh:696: sed -i -e "s>^${url/https:/http:}.*>#& #OK# ${filename}>g" "${InputFile}" #http (if changed)
./mad.sh:698: sed -i -e "s>^direct=${url/https:/http:}.*>#& #OK# ${filename}>g" "${InputFile}" #direct url https
./mad.sh:701: sed -i -e "s>^${url/https:/http:}.*>#&>g" "${InputFile}" #http (if changed)
./mad.sh:703: sed -i -e "s>^direct=${url/https:/http:}.*>#&>g" "${InputFile}" #direct url https
./mad.sh:724: sed -i -e "s>^${url/https:/http:}.*>#& #OK# ${filename} (File exists)>g" "${InputFile}" #http (if changed)
./mad.sh:726: sed -i -e "s>^direct=${url/https:/http:}.*>#& #OK# ${filename} (File exists)>g" "${InputFile}" #direct url https
./mad.sh:729: sed -i -e "s>^${url/https:/http:}.*>#&>g" "${InputFile}" #http (if changed)
./mad.sh:731: sed -i -e "s>^direct=${url/https:/http:}.*>#&>g" "${InputFile}" #direct url https
./mad.sh:752: sed -i -e "s>^${url/https:/http:}.*>#& #RETRY# ${message}>g" "${InputFile}" #http (if changed)
./mad.sh:754: sed -i -e "s>^direct=${url/https:/http:}.*>#& #RETRY# ${message}>g" "${InputFile}" #direct url https
./mad.sh:757: sed -i -e "s>^${url/https:/http:}.*>#&>g" "${InputFile}" #http (if changed)
./mad.sh:759: sed -i -e "s>^direct=${url/https:/http:}.*>#&>g" "${InputFile}" #direct url https
./mad.sh:781: sed -i -e "s>^${url/https:/http:}.*>#& #FAIL# $message>g" "${InputFile}" #http (if changed)
./mad.sh:783: sed -i -e "s>^direct=${url/https:/http:}.*>#& #FAIL# $message>g" "${InputFile}" #direct url https
./mad.sh:786: sed -i -e "s>^${url/https:/http:}.*>#&>g" "${InputFile}" #http (if changed)
./mad.sh:788: sed -i -e "s>^direct=${url/https:/http:}.*>#&>g" "${InputFile}" #direct url https
./mad.sh:812: sed -i -e "s>^${url/https:/http:}.*>#& #REMOVED#${message}>g" "${InputFile}" #http (if changed)
./mad.sh:814: sed -i -e "s>^direct=${url/https:/http:}.*>#& #REMOVED#${message}>g" "${InputFile}" #direct url https
./mad.sh:817: sed -i -e "s>^${url/https:/http:}.*>#&>g" "${InputFile}" #http (if changed)
./mad.sh:819: sed -i -e "s>^direct=${url/https:/http:}.*>#&>g" "${InputFile}" #direct url https
./mad.sh:845: sed -i -e "s>^${url/https:/http:}.*>${url}|${newfilename}>g" "${InputFile}" #http (if changed)
./mad.sh:847: sed -i -e "s>^direct=${url/https:/http:}.*>direct=${url}|${newfilename}>g" "${InputFile}" #direct url https
./mad.sh:867: sed -i -e "s%^${url/https:/http:}.*%${newurl//[[:space:]]/$'\\\n'}%g" "${InputFile}" #http (if changed)
./mad.sh:888: sed -i -e "s>^${url/https:/http:}.*>#& #RETRY# ${filename} (dropped or bad size)>g" "${InputFile}" #http (if changed)
./mad.sh:890: sed -i -e "s>^direct=${url/https:/http:}.*>#& #RETRY# ${filename} (dropped or bad size)>g" "${InputFile}" #direct url https
./mad.sh:893: sed -i -e "s>^${url/https:/http:}.*>#&>g" "${InputFile}" #http (if changed)
./mad.sh:895: sed -i -e "s>^direct=${url/https:/http:}.*>#&>g" "${InputFile}" #direct url https
./mad.sh:911: sed -i -e "s>^${url/https:/http:}.*>#& #RETRY# (No CDN found)>g" "${InputFile}" #http (if changed)
./mad.sh:913: sed -i -e "s>^direct=${url/https:/http:}.*>#& #RETRY# (No CDN found)>g" "${InputFile}" #direct url https
./mad.sh:916: sed -i -e "s>^${url/https:/http:}.*>#&>g" "${InputFile}" #http (if changed)
./mad.sh:918: sed -i -e "s>^direct=${url/https:/http:}.*>#&>g" "${InputFile}" #direct url https
./mad.sh:937: sed -i -e "s>^${url/https:/http:}.*>#& #PASSWORD#>g" "${InputFile}" #http (if changed)
./mad.sh:939: sed -i -e "s>^direct=${url/https:/http:}.*>#& #PASSWORD#>g" "${InputFile}" #direct url https
./mad.sh:942: sed -i -e "s>^${url/https:/http:}.*>#&>g" "${InputFile}" #http (if changed)
./mad.sh:944: sed -i -e "s>^direct=${url/https:/http:}.*>#&>g" "${InputFile}" #direct url https
./mad.sh:964: sed -i -e "s>^${url/https:/http:}.*>#& #BAD-URL#${message}>g" "${InputFile}" #http (if changed)
./mad.sh:966: sed -i -e "s>^direct=${url/https:/http:}.*>#& #BAD-URL#${message}>g" "${InputFile}" #direct url https
./mad.sh:969: sed -i -e "s>^${url/https:/http:}.*>#&>g" "${InputFile}" #http (if changed)
./mad.sh:971: sed -i -e "s>^direct=${url/https:/http:}.*>#&>g" "${InputFile}" #direct url https
./mad.sh:989: sed -i -e "s>^${url/https:/http:}.*>#& #RETRY# ${filename} (No filename or size. Try again later)>g" "${InputFile}" #http (if changed)
./mad.sh:991: sed -i -e "s>^direct=${url/https:/http:}.*>#& #RETRY# ${filename} (No filename or size. Try again later)>g" "${InputFile}" #direct url https
./mad.sh:994: sed -i -e "s>^${url/https:/http:}.*>#&>g" "${InputFile}" #http (if changed)
./mad.sh:996: sed -i -e "s>^direct=${url/https:/http:}.*>#&>g" "${InputFile}" #direct url https
./mad.sh:1015: sed -i -e "s>^${url/https:/http:}.*>#& #RETRY# ${filename} (File exists and is too large--retry or delete from downloads)>g" "${InputFile}" #http (if changed)
./mad.sh:1017: sed -i -e "s>^direct=${url/https:/http:}.*>#& #RETRY# ${filename} (File exists and is too large--retry or delete from downloads)>g" "${InputFile}" #direct url https
./mad.sh:1020: sed -i -e "s>^${url/https:/http:}.*>#&>g" "${InputFile}" #http (if changed)
./mad.sh:1022: sed -i -e "s>^direct=${url/https:/http:}.*>#&>g" "${InputFile}" #direct url https
./mad.sh:1440: response=$(tor_curl_request --insecure -L -s https://github.com/lwthiker/curl-impersonate/releases/latest)
./mad.sh:1457: download_url='https://github.com/lwthiker/curl-impersonate/releases/download/'"$latestTag"'/curl-impersonate-'"$latestTag"'.x86_64-linux-gnu.tar.gz'
./mad.sh:1563: response=$(tor_curl_request --insecure -L -s https://github.com/lexiforest/curl-impersonate/releases/latest)
./mad.sh:1580: download_url='https://github.com/lexiforest/curl-impersonate/releases/download/'"$latestTag"'/curl-impersonate-'"$latestTag"'.x86_64-linux-gnu.tar.gz'
./mad.sh:1843: echo -e "${PINK}MAD Audit of http lines:${NC} (${GREEN}grep \"http:\" or \"https:\"${NC})"
./mad.sh:1871: echo -e "${PINK}MAD Audit of http lines:${NC} (${GREEN}grep \"http:\" or \"https:\"${NC})"
./mad.sh:1893: echo -e "${PINK}MAD Audit of http lines:${NC} (${GREEN}grep \"http:\" or \"https:\"${NC})"
./mad.sh:3209: if grep -Eqi '.onion' <<< "$download_url" && grep -Eqi 'https://' <<< "$download_url" ; then
./mad.sh:3724:arg2="$2" # auto, filelist, <https://url>
./mad.sh:3821: echo -e " - http://oshi.at/abcd/origAABB.rar|My specified file.part1.rar"
./mad.sh:3823: echo -e " - direct=http://pomf2.lain.la/f/abcd00zz.7z"
./mad.sh:3825: echo -e ' - ie. direct=http://somehost.onion/abcD|filename.part1.rar'
./mad.sh:4044: if [[ ${remote_url} =~ ^http: ]] ; then
./mad.sh:4045: remote_url=${remote_url/http:/https:}
./mad.sh:4066: if [[ ${remote_url} =~ ^http: ]] ; then
./mad.sh:4067: remote_url=${remote_url/http:/https:}
./mad.sh:4433: if [[ ${remote_url} =~ ^http: ]] ; then
./mad.sh:4434: remote_url=${remote_url/http:/https:}
./mad.sh:4492: if [[ ${remote_url} =~ ^http: ]] ; then
./mad.sh:4493: remote_url=${remote_url/http:/https:}
./mad.sh:4518: if [[ ${remote_url} =~ ^http: ]] ; then
./mad.sh:4519: remote_url=${remote_url/http:/https:}
./mad.sh:742: sed -i -e "s>^${url/https:/http:}.*>#& #OK# ${filename}>g" "${InputFile}" #http (if changed)
./mad.sh:744: sed -i -e "s>^direct=${url/https:/http:}.*>#& #OK# ${filename}>g" "${InputFile}" #direct url https
./mad.sh:747: sed -i -e "s>^${url/https:/http:}.*>#&>g" "${InputFile}" #http (if changed)
./mad.sh:749: sed -i -e "s>^direct=${url/https:/http:}.*>#&>g" "${InputFile}" #direct url https
./mad.sh:770: sed -i -e "s>^${url/https:/http:}.*>#& #OK# ${filename} (File exists)>g" "${InputFile}" #http (if changed)
./mad.sh:772: sed -i -e "s>^direct=${url/https:/http:}.*>#& #OK# ${filename} (File exists)>g" "${InputFile}" #direct url https
./mad.sh:775: sed -i -e "s>^${url/https:/http:}.*>#&>g" "${InputFile}" #http (if changed)
./mad.sh:777: sed -i -e "s>^direct=${url/https:/http:}.*>#&>g" "${InputFile}" #direct url https
./mad.sh:798: sed -i -e "s>^${url/https:/http:}.*>#& #RETRY# ${message}>g" "${InputFile}" #http (if changed)
./mad.sh:800: sed -i -e "s>^direct=${url/https:/http:}.*>#& #RETRY# ${message}>g" "${InputFile}" #direct url https
./mad.sh:803: sed -i -e "s>^${url/https:/http:}.*>#&>g" "${InputFile}" #http (if changed)
./mad.sh:805: sed -i -e "s>^direct=${url/https:/http:}.*>#&>g" "${InputFile}" #direct url https
./mad.sh:827: sed -i -e "s>^${url/https:/http:}.*>#& #FAIL# $message>g" "${InputFile}" #http (if changed)
./mad.sh:829: sed -i -e "s>^direct=${url/https:/http:}.*>#& #FAIL# $message>g" "${InputFile}" #direct url https
./mad.sh:832: sed -i -e "s>^${url/https:/http:}.*>#&>g" "${InputFile}" #http (if changed)
./mad.sh:834: sed -i -e "s>^direct=${url/https:/http:}.*>#&>g" "${InputFile}" #direct url https
./mad.sh:858: sed -i -e "s>^${url/https:/http:}.*>#& #REMOVED#${message}>g" "${InputFile}" #http (if changed)
./mad.sh:860: sed -i -e "s>^direct=${url/https:/http:}.*>#& #REMOVED#${message}>g" "${InputFile}" #direct url https
./mad.sh:863: sed -i -e "s>^${url/https:/http:}.*>#&>g" "${InputFile}" #http (if changed)
./mad.sh:865: sed -i -e "s>^direct=${url/https:/http:}.*>#&>g" "${InputFile}" #direct url https
./mad.sh:891: sed -i -e "s>^${url/https:/http:}.*>${url}|${newfilename}>g" "${InputFile}" #http (if changed)
./mad.sh:893: sed -i -e "s>^direct=${url/https:/http:}.*>direct=${url}|${newfilename}>g" "${InputFile}" #direct url https
./mad.sh:913: sed -i -e "s%^${url/https:/http:}.*%${newurl//[[:space:]]/$'\\\n'}%g" "${InputFile}" #http (if changed)
./mad.sh:934: sed -i -e "s>^${url/https:/http:}.*>#& #RETRY# ${filename} (dropped or bad size)>g" "${InputFile}" #http (if changed)
./mad.sh:936: sed -i -e "s>^direct=${url/https:/http:}.*>#& #RETRY# ${filename} (dropped or bad size)>g" "${InputFile}" #direct url https
./mad.sh:939: sed -i -e "s>^${url/https:/http:}.*>#&>g" "${InputFile}" #http (if changed)
./mad.sh:941: sed -i -e "s>^direct=${url/https:/http:}.*>#&>g" "${InputFile}" #direct url https
./mad.sh:957: sed -i -e "s>^${url/https:/http:}.*>#& #RETRY# (No CDN found)>g" "${InputFile}" #http (if changed)
./mad.sh:959: sed -i -e "s>^direct=${url/https:/http:}.*>#& #RETRY# (No CDN found)>g" "${InputFile}" #direct url https
./mad.sh:962: sed -i -e "s>^${url/https:/http:}.*>#&>g" "${InputFile}" #http (if changed)
./mad.sh:964: sed -i -e "s>^direct=${url/https:/http:}.*>#&>g" "${InputFile}" #direct url https
./mad.sh:983: sed -i -e "s>^${url/https:/http:}.*>#& #PASSWORD#>g" "${InputFile}" #http (if changed)
./mad.sh:985: sed -i -e "s>^direct=${url/https:/http:}.*>#& #PASSWORD#>g" "${InputFile}" #direct url https
./mad.sh:988: sed -i -e "s>^${url/https:/http:}.*>#&>g" "${InputFile}" #http (if changed)
./mad.sh:990: sed -i -e "s>^direct=${url/https:/http:}.*>#&>g" "${InputFile}" #direct url https
./mad.sh:1010: sed -i -e "s>^${url/https:/http:}.*>#& #BAD-URL#${message}>g" "${InputFile}" #http (if changed)
./mad.sh:1012: sed -i -e "s>^direct=${url/https:/http:}.*>#& #BAD-URL#${message}>g" "${InputFile}" #direct url https
./mad.sh:1015: sed -i -e "s>^${url/https:/http:}.*>#&>g" "${InputFile}" #http (if changed)
./mad.sh:1017: sed -i -e "s>^direct=${url/https:/http:}.*>#&>g" "${InputFile}" #direct url https
./mad.sh:1035: sed -i -e "s>^${url/https:/http:}.*>#& #RETRY# ${filename} (No filename or size. Try again later)>g" "${InputFile}" #http (if changed)
./mad.sh:1037: sed -i -e "s>^direct=${url/https:/http:}.*>#& #RETRY# ${filename} (No filename or size. Try again later)>g" "${InputFile}" #direct url https
./mad.sh:1040: sed -i -e "s>^${url/https:/http:}.*>#&>g" "${InputFile}" #http (if changed)
./mad.sh:1042: sed -i -e "s>^direct=${url/https:/http:}.*>#&>g" "${InputFile}" #direct url https
./mad.sh:1061: sed -i -e "s>^${url/https:/http:}.*>#& #RETRY# ${filename} (File exists and is too large--retry or delete from downloads)>g" "${InputFile}" #http (if changed)
./mad.sh:1063: sed -i -e "s>^direct=${url/https:/http:}.*>#& #RETRY# ${filename} (File exists and is too large--retry or delete from downloads)>g" "${InputFile}" #direct url https
./mad.sh:1066: sed -i -e "s>^${url/https:/http:}.*>#&>g" "${InputFile}" #http (if changed)
./mad.sh:1068: sed -i -e "s>^direct=${url/https:/http:}.*>#&>g" "${InputFile}" #direct url https
./mad.sh:1486: response=$(tor_curl_request --insecure -L -s https://github.com/lwthiker/curl-impersonate/releases/latest)
./mad.sh:1503: download_url='https://github.com/lwthiker/curl-impersonate/releases/download/'"$latestTag"'/curl-impersonate-'"$latestTag"'.x86_64-linux-gnu.tar.gz'
./mad.sh:1609: response=$(tor_curl_request --insecure -L -s https://github.com/lexiforest/curl-impersonate/releases/latest)
./mad.sh:1626: download_url='https://github.com/lexiforest/curl-impersonate/releases/download/'"$latestTag"'/curl-impersonate-'"$latestTag"'.x86_64-linux-gnu.tar.gz'
./mad.sh:1889: echo -e "${PINK}MAD Audit of http lines:${NC} (${GREEN}grep \"http:\" or \"https:\"${NC})"
./mad.sh:1917: echo -e "${PINK}MAD Audit of http lines:${NC} (${GREEN}grep \"http:\" or \"https:\"${NC})"
./mad.sh:1939: echo -e "${PINK}MAD Audit of http lines:${NC} (${GREEN}grep \"http:\" or \"https:\"${NC})"
./mad.sh:3255: if grep -Eqi '.onion' <<< "$download_url" && grep -Eqi 'https://' <<< "$download_url" ; then
./mad.sh:3770:arg2="$2" # auto, filelist, <https://url>
./mad.sh:3867: echo -e " - http://oshi.at/abcd/origAABB.rar|My specified file.part1.rar"
./mad.sh:3869: echo -e " - direct=http://pomf2.lain.la/f/abcd00zz.7z"
./mad.sh:3871: echo -e ' - ie. direct=http://somehost.onion/abcD|filename.part1.rar'
./mad.sh:4090: if [[ ${remote_url} =~ ^http: ]] ; then
./mad.sh:4091: remote_url=${remote_url/http:/https:}
./mad.sh:4112: if [[ ${remote_url} =~ ^http: ]] ; then
./mad.sh:4113: remote_url=${remote_url/http:/https:}
./mad.sh:4479: if [[ ${remote_url} =~ ^http: ]] ; then
./mad.sh:4480: remote_url=${remote_url/http:/https:}
./mad.sh:4538: if [[ ${remote_url} =~ ^http: ]] ; then
./mad.sh:4539: remote_url=${remote_url/http:/https:}
./mad.sh:4565: if [[ ${remote_url} =~ ^http: ]] ; then
./mad.sh:4566: remote_url=${remote_url/http:/https:}
./plugins/pjscloud.sh:51: "https://PhantomJScloud.com/api/browser/v2/$RandomPjsKey/" & sleep 8s; kill -HUP $! 2>/dev/null)
./plugins/pjscloud.sh:59: "https://PhantomJScloud.com/api/browser/v2/$RandomPjsKey/" & sleep 8s; kill -HUP $! 2>/dev/null)

View file

@ -1,4 +1,4 @@
DateTime: 25.01.18
DateTime: 25.02.02
Files:
./hosts/1fichier.sh
@ -1675,7 +1675,7 @@ _________________________________________________________________________
./hosts/isupload.sh:266: debugHtml "${remote_url##*/}" "isup_head$j" "download_url: ${download_url}"$'\n'"${file_header}"
./hosts/isupload.sh:267: fi
./hosts/isupload.sh:268: if [ ! -z "$file_header" ] ; then
./hosts/isupload.sh:269: if grep -Eqi '404 Not Found' <<< "${file_header}" ; then
./hosts/isupload.sh:269: if grep -Eqi '404 Not Found|' <<< "${file_header}" ; then
./hosts/isupload.sh:270: printf "\\n"
./hosts/isupload.sh:271: echo -e "${RED}| Not Found (404). The file has been removed.${NC}"
./hosts/isupload.sh:272: removedDownload "${remote_url}"
@ -1953,50 +1953,50 @@ _________________________________________________________________________
./hosts/ranoz.sh:99: if [ "${finalAttempt}" == "true" ] ; then
./hosts/ranoz.sh:100: failedRetryDownload "${remote_url}" "Failed to extract download url [1]" ""
--
./hosts/ranoz.sh:157: file_header=$(tor_curl_request --insecure --head -L -i -s "$download_url")
./hosts/ranoz.sh:158: if [ "${DebugAllEnabled}" == "true" ] ; then
./hosts/ranoz.sh:159: debugHtml "${remote_url##*/}" "rz_head$j" "download_url: ${download_url}"$'\n'"${file_header}"
./hosts/ranoz.sh:160: fi
./hosts/ranoz.sh:161: if [[ -z $file_header ]] ; then
./hosts/ranoz.sh:162: if [ $j == $maxfetchretries ] ; then
./hosts/ranoz.sh:163: rm -f "${rz_cookie_jar}";
./hosts/ranoz.sh:164: printf "\\n"
./hosts/ranoz.sh:165: echo -e "${RED}| Failed to extract file info${NC}"
./hosts/ranoz.sh:166: warnAndRetryUnknownError=true
./hosts/ranoz.sh:167: if [ "${finalAttempt}" == "true" ] ; then
./hosts/ranoz.sh:158: file_header=$(tor_curl_request --insecure --head -L -i -s "$download_url")
./hosts/ranoz.sh:159: if [ "${DebugAllEnabled}" == "true" ] ; then
./hosts/ranoz.sh:160: debugHtml "${remote_url##*/}" "rz_head$j" "download_url: ${download_url}"$'\n'"${file_header}"
./hosts/ranoz.sh:161: fi
./hosts/ranoz.sh:162: if [[ -z $file_header ]] ; then
./hosts/ranoz.sh:163: if [ $j == $maxfetchretries ] ; then
./hosts/ranoz.sh:164: rm -f "${rz_cookie_jar}";
./hosts/ranoz.sh:165: printf "\\n"
./hosts/ranoz.sh:166: echo -e "${RED}| Failed to extract file info${NC}"
./hosts/ranoz.sh:167: warnAndRetryUnknownError=true
./hosts/ranoz.sh:168: if [ "${finalAttempt}" == "true" ] ; then
--
./hosts/ranoz.sh:269: tor_curl_request --insecure -L -G --no-alpn \
./hosts/ranoz.sh:270: --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval \
./hosts/ranoz.sh:271: "$download_url" --continue-at - --output "$file_path"
./hosts/ranoz.sh:272: else
./hosts/ranoz.sh:273: tor_curl_request --insecure -L -G --no-alpn \
./hosts/ranoz.sh:274: "$download_url" --continue-at - --output "$file_path"
./hosts/ranoz.sh:275: fi
./hosts/ranoz.sh:276: else
./hosts/ranoz.sh:277: if [ "${RateMonitorEnabled}" == "true" ]; then
./hosts/ranoz.sh:278: tor_curl_request --insecure -L -G --no-alpn \
./hosts/ranoz.sh:279: --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval \
./hosts/ranoz.sh:280: -H "User-Agent: $RandomUA" \
./hosts/ranoz.sh:281: -H "Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8" \
./hosts/ranoz.sh:282: -H "Accept-Language: en-US,en;q=0.5" \
./hosts/ranoz.sh:283: -H "Accept-Encoding: gzip, deflate, br" \
./hosts/ranoz.sh:284: -H "Connection: keep-alive" \
./hosts/ranoz.sh:285: -H "Cookie: lng=eng" \
./hosts/ranoz.sh:286: -H "Upgrade-Insecure-Requests: 1" \
./hosts/ranoz.sh:287: -H "Sec-Fetch-Dest: document" \
./hosts/ranoz.sh:288: -H "Sec-Fetch-Mode: navigate" \
./hosts/ranoz.sh:270: tor_curl_request --insecure -L -G --no-alpn \
./hosts/ranoz.sh:271: --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval \
./hosts/ranoz.sh:272: "$download_url" --continue-at - --output "$file_path"
./hosts/ranoz.sh:273: else
./hosts/ranoz.sh:274: tor_curl_request --insecure -L -G --no-alpn \
./hosts/ranoz.sh:275: "$download_url" --continue-at - --output "$file_path"
./hosts/ranoz.sh:276: fi
./hosts/ranoz.sh:277: else
./hosts/ranoz.sh:278: if [ "${RateMonitorEnabled}" == "true" ]; then
./hosts/ranoz.sh:279: tor_curl_request --insecure -L -G --no-alpn \
./hosts/ranoz.sh:280: --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval \
./hosts/ranoz.sh:281: -H "User-Agent: $RandomUA" \
./hosts/ranoz.sh:282: -H "Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8" \
./hosts/ranoz.sh:283: -H "Accept-Language: en-US,en;q=0.5" \
./hosts/ranoz.sh:284: -H "Accept-Encoding: gzip, deflate, br" \
./hosts/ranoz.sh:285: -H "Connection: keep-alive" \
./hosts/ranoz.sh:286: -H "Cookie: lng=eng" \
./hosts/ranoz.sh:287: -H "Upgrade-Insecure-Requests: 1" \
./hosts/ranoz.sh:288: -H "Sec-Fetch-Dest: document" \
./hosts/ranoz.sh:289: -H "Sec-Fetch-Mode: navigate" \
--
./hosts/ranoz.sh:293: tor_curl_request --insecure -L -G --no-alpn \
./hosts/ranoz.sh:294: -H "User-Agent: $RandomUA" \
./hosts/ranoz.sh:295: -H "Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8" \
./hosts/ranoz.sh:296: -H "Accept-Language: en-US,en;q=0.5" \
./hosts/ranoz.sh:297: -H "Accept-Encoding: gzip, deflate, br" \
./hosts/ranoz.sh:298: -H "Connection: keep-alive" \
./hosts/ranoz.sh:299: -H "Cookie: lng=eng" \
./hosts/ranoz.sh:300: -H "Upgrade-Insecure-Requests: 1" \
./hosts/ranoz.sh:301: -H "Sec-Fetch-Dest: document" \
./hosts/ranoz.sh:302: -H "Sec-Fetch-Mode: navigate" \
./hosts/ranoz.sh:303: -H "Sec-Fetch-Site: same-origin" \
./hosts/ranoz.sh:294: tor_curl_request --insecure -L -G --no-alpn \
./hosts/ranoz.sh:295: -H "User-Agent: $RandomUA" \
./hosts/ranoz.sh:296: -H "Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8" \
./hosts/ranoz.sh:297: -H "Accept-Language: en-US,en;q=0.5" \
./hosts/ranoz.sh:298: -H "Accept-Encoding: gzip, deflate, br" \
./hosts/ranoz.sh:299: -H "Connection: keep-alive" \
./hosts/ranoz.sh:300: -H "Cookie: lng=eng" \
./hosts/ranoz.sh:301: -H "Upgrade-Insecure-Requests: 1" \
./hosts/ranoz.sh:302: -H "Sec-Fetch-Dest: document" \
./hosts/ranoz.sh:303: -H "Sec-Fetch-Mode: navigate" \
./hosts/ranoz.sh:304: -H "Sec-Fetch-Site: same-origin" \
--
./hosts/syspro.sh:88: response=$(tor_curl_request --insecure -L -s "$remote_url")
./hosts/syspro.sh:89: if [ "${DebugAllEnabled}" == "true" ] ; then
@ -3186,235 +3186,235 @@ _________________________________________________________________________
./hosts/youdbox.sh:287: containsHtml=true
./hosts/youdbox.sh:288: fi
--
./mad.sh:413:tor_curl_request() {
./mad.sh:414: if [ "${UseTorCurlImpersonate}" == "true" ]; then
./mad.sh:415: "${curl_impersonate[@]}" --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" -4 --connect-timeout ${ConnectTimeout} --compressed --globoff "$@"
./mad.sh:416: else
./mad.sh:417: curl --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" -4 --connect-timeout ${ConnectTimeout} --compressed --globoff "$@"
./mad.sh:418: fi
./mad.sh:419:}
./mad.sh:420:tor_curl_request_extended() {
./mad.sh:421: randomtimeout=$((30 + RANDOM % (60 - 30)))
./mad.sh:422: if [ "${UseTorCurlImpersonate}" == "true" ]; then
./mad.sh:423: "${curl_impersonate[@]}" --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" -4 --connect-timeout $randomtimeout --compressed --globoff "$@"
./mad.sh:424: else
./mad.sh:425: curl --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" -4 --connect-timeout $randomtimeout --compressed --globoff "$@"
./mad.sh:426: fi
./mad.sh:427:}
./mad.sh:428:tor_curl_upload() {
./mad.sh:429: if [ "${UseTorCurlImpersonate}" == "true" ]; then
./mad.sh:430: if [ "${RateMonitorEnabled}" == "true" ]; then
./mad.sh:431: "${curl_impersonate[@]}" --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" -4 --connect-timeout ${ConnectTimeout} --speed-limit $UploadSpeedMin --speed-time $UploadTimeoutInterval --compressed --globoff "$@"
./mad.sh:432: else
./mad.sh:433: "${curl_impersonate[@]}" --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" -4 --connect-timeout ${ConnectTimeout} --compressed --globoff "$@"
./mad.sh:434: fi
./mad.sh:435: else
./mad.sh:419:tor_curl_request() {
./mad.sh:420: if [ "${UseTorCurlImpersonate}" == "true" ]; then
./mad.sh:421: "${curl_impersonate[@]}" --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" -4 --connect-timeout ${ConnectTimeout} --compressed --globoff "$@"
./mad.sh:422: else
./mad.sh:423: curl --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" -4 --connect-timeout ${ConnectTimeout} --compressed --globoff "$@"
./mad.sh:424: fi
./mad.sh:425:}
./mad.sh:426:tor_curl_request_extended() {
./mad.sh:427: randomtimeout=$((30 + RANDOM % (60 - 30)))
./mad.sh:428: if [ "${UseTorCurlImpersonate}" == "true" ]; then
./mad.sh:429: "${curl_impersonate[@]}" --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" -4 --connect-timeout $randomtimeout --compressed --globoff "$@"
./mad.sh:430: else
./mad.sh:431: curl --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" -4 --connect-timeout $randomtimeout --compressed --globoff "$@"
./mad.sh:432: fi
./mad.sh:433:}
./mad.sh:434:tor_curl_upload() {
./mad.sh:435: if [ "${UseTorCurlImpersonate}" == "true" ]; then
./mad.sh:436: if [ "${RateMonitorEnabled}" == "true" ]; then
./mad.sh:437: curl --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" -4 --connect-timeout ${ConnectTimeout} --speed-limit $UploadSpeedMin --speed-time $UploadTimeoutInterval -H 'User-Agent: Mozilla/5.0 (Windows NT 10.0; rv:109.0) Gecko/20100101 Firefox/115.0' -H 'Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8' -H 'Accept-Language: en-US,en;q=0.5' -H 'Accept-Encoding: gzip, deflate, br' -H 'Origin: null' -H 'Connection: keep-alive' -H 'Upgrade-Insecure-Requests: 1' -H 'Sec-Fetch-Dest: document' -H 'Sec-Fetch-Mode: navigate' -H 'Sec-Fetch-Site: cross-site' -H 'Sec-Fetch-User: ?1' -H 'TE: trailers' --compressed --globoff "$@"
./mad.sh:437: "${curl_impersonate[@]}" --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" -4 --connect-timeout ${ConnectTimeout} --speed-limit $UploadSpeedMin --speed-time $UploadTimeoutInterval --compressed --globoff "$@"
./mad.sh:438: else
./mad.sh:439: "${curl_impersonate[@]}" --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" -4 --connect-timeout ${ConnectTimeout} --compressed --globoff "$@"
./mad.sh:440: fi
./mad.sh:441: else
./mad.sh:442: if [ "${RateMonitorEnabled}" == "true" ]; then
./mad.sh:443: curl --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" -4 --connect-timeout ${ConnectTimeout} --speed-limit $UploadSpeedMin --speed-time $UploadTimeoutInterval -H 'User-Agent: Mozilla/5.0 (Windows NT 10.0; rv:109.0) Gecko/20100101 Firefox/115.0' -H 'Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8' -H 'Accept-Language: en-US,en;q=0.5' -H 'Accept-Encoding: gzip, deflate, br' -H 'Origin: null' -H 'Connection: keep-alive' -H 'Upgrade-Insecure-Requests: 1' -H 'Sec-Fetch-Dest: document' -H 'Sec-Fetch-Mode: navigate' -H 'Sec-Fetch-Site: cross-site' -H 'Sec-Fetch-User: ?1' -H 'TE: trailers' --compressed --globoff "$@"
./mad.sh:444: else
--
./mad.sh:1440: response=$(tor_curl_request --insecure -L -s https://github.com/lwthiker/curl-impersonate/releases/latest)
./mad.sh:1441: if [ "${DebugAllEnabled}" == "true" ] ; then
./mad.sh:1442: debugHtml "github" "lbf_inst_curlimp$j" "$response"
./mad.sh:1443: fi
./mad.sh:1444: if [ ! -z "$response" ]; then
./mad.sh:1445: latestTag=$(grep -oPi -m 1 '(?<=/curl-impersonate/releases/tag/).*?(?=")' <<< "$response")
./mad.sh:1446: latestBinaryDate=$(grep -oPi -m 1 '(?<=<relative-time class="no-wrap" prefix="" datetime=").*?(?=T)' <<< "$response")
./mad.sh:1447: break
./mad.sh:1448: fi
./mad.sh:1449: done
./mad.sh:1450: if [ -z $latestTag ]; then
./mad.sh:1486: response=$(tor_curl_request --insecure -L -s https://github.com/lwthiker/curl-impersonate/releases/latest)
./mad.sh:1487: if [ "${DebugAllEnabled}" == "true" ] ; then
./mad.sh:1488: debugHtml "github" "lbf_inst_curlimp$j" "$response"
./mad.sh:1489: fi
./mad.sh:1490: if [ ! -z "$response" ]; then
./mad.sh:1491: latestTag=$(grep -oPi -m 1 '(?<=/curl-impersonate/releases/tag/).*?(?=")' <<< "$response")
./mad.sh:1492: latestBinaryDate=$(grep -oPi -m 1 '(?<=<relative-time class="no-wrap" prefix="" datetime=").*?(?=T)' <<< "$response")
./mad.sh:1493: break
./mad.sh:1494: fi
./mad.sh:1495: done
./mad.sh:1496: if [ -z $latestTag ]; then
--
./mad.sh:1460: file_header=$(tor_curl_request --insecure --head -Ls "$download_url")
./mad.sh:1461: if [ "${DebugAllEnabled}" == "true" ] ; then
./mad.sh:1462: debugHtml "github" "head_inst_curlimp$j" "${file_header}"
./mad.sh:1463: fi
./mad.sh:1464: if ! grep -Eqi 'HTTP/2 200|HTTP/1.1 200|200 OK' <<< $file_header ; then
./mad.sh:1465: if ((j == 8)) ; then
./mad.sh:1466: return 1
./mad.sh:1467: else
./mad.sh:1468: continue
./mad.sh:1469: fi
./mad.sh:1470: fi
./mad.sh:1506: file_header=$(tor_curl_request --insecure --head -Ls "$download_url")
./mad.sh:1507: if [ "${DebugAllEnabled}" == "true" ] ; then
./mad.sh:1508: debugHtml "github" "head_inst_curlimp$j" "${file_header}"
./mad.sh:1509: fi
./mad.sh:1510: if ! grep -Eqi 'HTTP/2 200|HTTP/1.1 200|200 OK' <<< $file_header ; then
./mad.sh:1511: if ((j == 8)) ; then
./mad.sh:1512: return 1
./mad.sh:1513: else
./mad.sh:1514: continue
./mad.sh:1515: fi
./mad.sh:1516: fi
--
./mad.sh:1510: tor_curl_request --insecure -L "$download_url" --continue-at - --output "$file_path"
./mad.sh:1511: received_file_size=0
./mad.sh:1512: if [ -f "$file_path" ] ; then
./mad.sh:1513: received_file_size=$(stat --format="%s" "$file_path" | tr -d '[:space:]')
./mad.sh:1514: fi
./mad.sh:1515: if ((received_file_size == file_size_bytes)) ; then
./mad.sh:1516: break
./mad.sh:1517: elif ((received_file_size < file_size_bytes)) ; then
./mad.sh:1518: if ((j >= MaxDownloadRetries)) ; then
./mad.sh:1519: echo -e "${RED}| FAILED: Size mismatch after downloading${NC}"
./mad.sh:1520: exit 1
./mad.sh:1556: tor_curl_request --insecure -L "$download_url" --continue-at - --output "$file_path"
./mad.sh:1557: received_file_size=0
./mad.sh:1558: if [ -f "$file_path" ] ; then
./mad.sh:1559: received_file_size=$(stat --format="%s" "$file_path" | tr -d '[:space:]')
./mad.sh:1560: fi
./mad.sh:1561: if ((received_file_size == file_size_bytes)) ; then
./mad.sh:1562: break
./mad.sh:1563: elif ((received_file_size < file_size_bytes)) ; then
./mad.sh:1564: if ((j >= MaxDownloadRetries)) ; then
./mad.sh:1565: echo -e "${RED}| FAILED: Size mismatch after downloading${NC}"
./mad.sh:1566: exit 1
--
./mad.sh:1563: response=$(tor_curl_request --insecure -L -s https://github.com/lexiforest/curl-impersonate/releases/latest)
./mad.sh:1564: if [ "${DebugAllEnabled}" == "true" ] ; then
./mad.sh:1565: debugHtml "github" "lbf_inst_curlimp$j" "$response"
./mad.sh:1566: fi
./mad.sh:1567: if [ ! -z "$response" ]; then
./mad.sh:1568: latestTag=$(grep -oPi -m 1 '(?<=/curl-impersonate/releases/tag/).*?(?=")' <<< "$response")
./mad.sh:1569: latestBinaryDate=$(grep -oPi -m 1 '(?<=<relative-time class="no-wrap" prefix="" datetime=").*?(?=T)' <<< "$response")
./mad.sh:1570: break
./mad.sh:1571: fi
./mad.sh:1572: done
./mad.sh:1573: if [ -z $latestTag ]; then
./mad.sh:1609: response=$(tor_curl_request --insecure -L -s https://github.com/lexiforest/curl-impersonate/releases/latest)
./mad.sh:1610: if [ "${DebugAllEnabled}" == "true" ] ; then
./mad.sh:1611: debugHtml "github" "lbf_inst_curlimp$j" "$response"
./mad.sh:1612: fi
./mad.sh:1613: if [ ! -z "$response" ]; then
./mad.sh:1614: latestTag=$(grep -oPi -m 1 '(?<=/curl-impersonate/releases/tag/).*?(?=")' <<< "$response")
./mad.sh:1615: latestBinaryDate=$(grep -oPi -m 1 '(?<=<relative-time class="no-wrap" prefix="" datetime=").*?(?=T)' <<< "$response")
./mad.sh:1616: break
./mad.sh:1617: fi
./mad.sh:1618: done
./mad.sh:1619: if [ -z $latestTag ]; then
--
./mad.sh:1583: file_header=$(tor_curl_request --insecure --head -Ls "$download_url")
./mad.sh:1584: if [ "${DebugAllEnabled}" == "true" ] ; then
./mad.sh:1585: debugHtml "github" "head_inst_curlimp$j" "${file_header}"
./mad.sh:1586: fi
./mad.sh:1587: if ! grep -Eqi 'HTTP/2 200|HTTP/1.1 200|200 OK' <<< $file_header ; then
./mad.sh:1588: if ((j == 8)) ; then
./mad.sh:1589: return 1
./mad.sh:1590: else
./mad.sh:1591: continue
./mad.sh:1592: fi
./mad.sh:1593: fi
./mad.sh:1629: file_header=$(tor_curl_request --insecure --head -Ls "$download_url")
./mad.sh:1630: if [ "${DebugAllEnabled}" == "true" ] ; then
./mad.sh:1631: debugHtml "github" "head_inst_curlimp$j" "${file_header}"
./mad.sh:1632: fi
./mad.sh:1633: if ! grep -Eqi 'HTTP/2 200|HTTP/1.1 200|200 OK' <<< $file_header ; then
./mad.sh:1634: if ((j == 8)) ; then
./mad.sh:1635: return 1
./mad.sh:1636: else
./mad.sh:1637: continue
./mad.sh:1638: fi
./mad.sh:1639: fi
--
./mad.sh:1633: tor_curl_request --insecure -L "$download_url" --continue-at - --output "$file_path"
./mad.sh:1634: received_file_size=0
./mad.sh:1635: if [ -f "$file_path" ] ; then
./mad.sh:1636: received_file_size=$(stat --format="%s" "$file_path" | tr -d '[:space:]')
./mad.sh:1637: fi
./mad.sh:1638: if ((received_file_size == file_size_bytes)) ; then
./mad.sh:1639: break
./mad.sh:1640: elif ((received_file_size < file_size_bytes)) ; then
./mad.sh:1641: if ((j >= MaxDownloadRetries)) ; then
./mad.sh:1642: echo -e "${RED}| FAILED: Size mismatch after downloading${NC}"
./mad.sh:1643: exit 1
./mad.sh:1679: tor_curl_request --insecure -L "$download_url" --continue-at - --output "$file_path"
./mad.sh:1680: received_file_size=0
./mad.sh:1681: if [ -f "$file_path" ] ; then
./mad.sh:1682: received_file_size=$(stat --format="%s" "$file_path" | tr -d '[:space:]')
./mad.sh:1683: fi
./mad.sh:1684: if ((received_file_size == file_size_bytes)) ; then
./mad.sh:1685: break
./mad.sh:1686: elif ((received_file_size < file_size_bytes)) ; then
./mad.sh:1687: if ((j >= MaxDownloadRetries)) ; then
./mad.sh:1688: echo -e "${RED}| FAILED: Size mismatch after downloading${NC}"
./mad.sh:1689: exit 1
--
./mad.sh:1838: maud_torcurl=$(grep -n -vxE '[[:blank:]]*([#].*)?' $fil | grep -A 12 --color='always' -Ei 'tor_curl')
./mad.sh:1839: echo -e "Files:"
./mad.sh:1840: echo -e "${BLUE}${fil}${NC}"
./mad.sh:1841: echo -e ""
./mad.sh:1842: echo -e ""
./mad.sh:1843: echo -e "${PINK}MAD Audit of http lines:${NC} (${GREEN}grep \"http:\" or \"https:\"${NC})"
./mad.sh:1844: echo -e "_________________________________________________________________________"
./mad.sh:1845: echo -e "$maud_http"
./mad.sh:1846: echo -e ""
./mad.sh:1847: echo -e "${PINK}MAD Audit of curl:${NC} (${GREEN}grep \"curl\"${NC})"
./mad.sh:1848: echo -e "_________________________________________________________________________"
--
./mad.sh:1851: echo -e "${PINK}MAD Audit of tor_curl (+12 lines after):${NC} (${GREEN}grep \"tor_curl\"${NC})"
./mad.sh:1852: echo -e "_________________________________________________________________________"
./mad.sh:1853: echo -e "$maud_torcurl"
./mad.sh:1854: echo -e ""
./mad.sh:1855: echo -e ""
./mad.sh:1856: done
./mad.sh:1857: else
./mad.sh:1858: cd "$ScriptDir"
./mad.sh:1859: readarray -d $'' arrFiles < <(find . -name "*.sh" -printf '%p\n' | sort -Vk1)
./mad.sh:1860: cd "$WorkDir"
./mad.sh:1861: readarray -d $'' arrFiles2 < <(find . -name "*.sh" -printf '%p\n' | sort -Vk1)
--
./mad.sh:1866: maud_torcurl=$(grep -n -vxE '[[:blank:]]*([#].*)?' $fil | grep -A 12 --color='always' -Ei 'tor_curl')
./mad.sh:1867: echo -e "Files:"
./mad.sh:1868: echo -e "${BLUE}${fil}${NC}"
./mad.sh:1869: echo -e ""
./mad.sh:1870: echo -e ""
./mad.sh:1871: echo -e "${PINK}MAD Audit of http lines:${NC} (${GREEN}grep \"http:\" or \"https:\"${NC})"
./mad.sh:1872: echo -e "_________________________________________________________________________"
./mad.sh:1873: echo -e "$maud_http"
./mad.sh:1874: echo -e ""
./mad.sh:1875: echo -e "${PINK}MAD Audit of curl:${NC} (${GREEN}grep \"curl \"${NC})"
./mad.sh:1876: echo -e "_________________________________________________________________________"
--
./mad.sh:1879: echo -e "${PINK}MAD Audit of tor_curl (+12 lines after):${NC} (${GREEN}grep \"tor_curl\"${NC})"
./mad.sh:1880: echo -e "_________________________________________________________________________"
./mad.sh:1881: echo -e "$maud_torcurl"
./mad.sh:1882: echo -e ""
./mad.sh:1883: done
./mad.sh:1884: for fil in "${arrFiles2[@]}";
./mad.sh:1885: do
./mad.sh:1886: maud_http=$(grep -n -vxE '[[:blank:]]*([#].*)?' $fil | grep --color='always' -Ei '(http|https):')
./mad.sh:1887: maud_curl=$(grep -n -vxE '[[:blank:]]*([#].*)?' $fil | grep --color='always' -Ei 'curl')
./mad.sh:1888: maud_torcurl=$(grep -n -vxE '[[:blank:]]*([#].*)?' $fil | grep -A 12 --color='always' -Ei 'tor_curl')
./mad.sh:1889: echo -e "Files:"
./mad.sh:1890: echo -e "${BLUE}${fil}${NC}"
./mad.sh:1891: echo -e ""
./mad.sh:1884: maud_torcurl=$(grep -n -vxE '[[:blank:]]*([#].*)?' $fil | grep -A 12 --color='always' -Ei 'tor_curl')
./mad.sh:1885: echo -e "Files:"
./mad.sh:1886: echo -e "${BLUE}${fil}${NC}"
./mad.sh:1887: echo -e ""
./mad.sh:1888: echo -e ""
./mad.sh:1889: echo -e "${PINK}MAD Audit of http lines:${NC} (${GREEN}grep \"http:\" or \"https:\"${NC})"
./mad.sh:1890: echo -e "_________________________________________________________________________"
./mad.sh:1891: echo -e "$maud_http"
./mad.sh:1892: echo -e ""
./mad.sh:1893: echo -e "${PINK}MAD Audit of http lines:${NC} (${GREEN}grep \"http:\" or \"https:\"${NC})"
./mad.sh:1893: echo -e "${PINK}MAD Audit of curl:${NC} (${GREEN}grep \"curl\"${NC})"
./mad.sh:1894: echo -e "_________________________________________________________________________"
./mad.sh:1895: echo -e "$maud_http"
./mad.sh:1896: echo -e ""
./mad.sh:1897: echo -e "${PINK}MAD Audit of curl:${NC} (${GREEN}grep \"curl\"${NC})"
--
./mad.sh:1897: echo -e "${PINK}MAD Audit of tor_curl (+12 lines after):${NC} (${GREEN}grep \"tor_curl\"${NC})"
./mad.sh:1898: echo -e "_________________________________________________________________________"
./mad.sh:1899: echo -e "$maud_torcurl"
./mad.sh:1900: echo -e ""
./mad.sh:1901: echo -e ""
./mad.sh:1902: done
./mad.sh:1903: else
./mad.sh:1904: cd "$ScriptDir"
./mad.sh:1905: readarray -d $'' arrFiles < <(find . -name "*.sh" -printf '%p\n' | sort -Vk1)
./mad.sh:1906: cd "$WorkDir"
./mad.sh:1907: readarray -d $'' arrFiles2 < <(find . -name "*.sh" -printf '%p\n' | sort -Vk1)
--
./mad.sh:1901: echo -e "${PINK}MAD Audit of tor_curl (+12 lines after):${NC} (${GREEN}grep \"tor_curl\"${NC})"
./mad.sh:1902: echo -e "_________________________________________________________________________"
./mad.sh:1903: echo -e "$maud_torcurl"
./mad.sh:1904: echo -e ""
./mad.sh:1905: done
./mad.sh:1906: fi
./mad.sh:1907:}
./mad.sh:1908:madStatus() {
./mad.sh:1909: local InputFile="$1"
./mad.sh:1910: if [ "$arg1" == "status" ] ; then
./mad.sh:1911: clear
./mad.sh:1912: maud_torcurl=$(grep -n -vxE '[[:blank:]]*([#].*)?' $fil | grep -A 12 --color='always' -Ei 'tor_curl')
./mad.sh:1913: echo -e "Files:"
./mad.sh:1914: echo -e "${BLUE}${fil}${NC}"
./mad.sh:1915: echo -e ""
./mad.sh:1916: echo -e ""
./mad.sh:1917: echo -e "${PINK}MAD Audit of http lines:${NC} (${GREEN}grep \"http:\" or \"https:\"${NC})"
./mad.sh:1918: echo -e "_________________________________________________________________________"
./mad.sh:1919: echo -e "$maud_http"
./mad.sh:1920: echo -e ""
./mad.sh:1921: echo -e "${PINK}MAD Audit of curl:${NC} (${GREEN}grep \"curl \"${NC})"
./mad.sh:1922: echo -e "_________________________________________________________________________"
--
./mad.sh:3226: file_header=$(tor_curl_request --insecure -m 8 -s -D - -o /dev/null \
./mad.sh:3227: -H "Connection: keep-alive" \
./mad.sh:3228: -w 'EffectiveUrl=%{url_effective}' \
./mad.sh:3229: "$download_url")
./mad.sh:3230: else
./mad.sh:3231: printf "| Retrieving Head: attempt #$j"
./mad.sh:3232: rm -f "${WorkDir}/.temp/directhead"
./mad.sh:3233: file_header=$(tor_curl_request --insecure --head -H "Connection: keep-alive" -L -s -i "$download_url" |
./mad.sh:3234: tee "${WorkDir}/.temp/directhead" &
./mad.sh:3235: sleep 6
./mad.sh:3236: [ -s "${WorkDir}/.temp/directhead" ]
./mad.sh:3237: kill $! 2>/dev/null
./mad.sh:3238: )
./mad.sh:3239: if [ ! -f "${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}" ]; then
./mad.sh:3240: touch "${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}"
./mad.sh:3241: fi
./mad.sh:3242: rm -f "${WorkDir}/.temp/directhead"
./mad.sh:3243: fi
./mad.sh:1925: echo -e "${PINK}MAD Audit of tor_curl (+12 lines after):${NC} (${GREEN}grep \"tor_curl\"${NC})"
./mad.sh:1926: echo -e "_________________________________________________________________________"
./mad.sh:1927: echo -e "$maud_torcurl"
./mad.sh:1928: echo -e ""
./mad.sh:1929: done
./mad.sh:1930: for fil in "${arrFiles2[@]}";
./mad.sh:1931: do
./mad.sh:1932: maud_http=$(grep -n -vxE '[[:blank:]]*([#].*)?' $fil | grep --color='always' -Ei '(http|https):')
./mad.sh:1933: maud_curl=$(grep -n -vxE '[[:blank:]]*([#].*)?' $fil | grep --color='always' -Ei 'curl')
./mad.sh:1934: maud_torcurl=$(grep -n -vxE '[[:blank:]]*([#].*)?' $fil | grep -A 12 --color='always' -Ei 'tor_curl')
./mad.sh:1935: echo -e "Files:"
./mad.sh:1936: echo -e "${BLUE}${fil}${NC}"
./mad.sh:1937: echo -e ""
./mad.sh:1938: echo -e ""
./mad.sh:1939: echo -e "${PINK}MAD Audit of http lines:${NC} (${GREEN}grep \"http:\" or \"https:\"${NC})"
./mad.sh:1940: echo -e "_________________________________________________________________________"
./mad.sh:1941: echo -e "$maud_http"
./mad.sh:1942: echo -e ""
./mad.sh:1943: echo -e "${PINK}MAD Audit of curl:${NC} (${GREEN}grep \"curl\"${NC})"
./mad.sh:1944: echo -e "_________________________________________________________________________"
--
./mad.sh:3363: tor_curl_request --insecure -L --referer "$file_url" "$download_url" --output "$file_path"
./mad.sh:3364: rc=$?
./mad.sh:3365: if [ $rc -ne 0 ] ; then
./mad.sh:3366: printf "${RED}Download Failed (bad exit status).${NC}"
./mad.sh:3367: if [ -f ${file_path} ]; then
./mad.sh:3368: printf "${YELLOW} Partial removed...${NC}"
./mad.sh:3369: printf "\n\n"
./mad.sh:3370: rm -f "${file_path}"
./mad.sh:3371: else
./mad.sh:3372: printf "\n\n"
./mad.sh:3373: fi
./mad.sh:1947: echo -e "${PINK}MAD Audit of tor_curl (+12 lines after):${NC} (${GREEN}grep \"tor_curl\"${NC})"
./mad.sh:1948: echo -e "_________________________________________________________________________"
./mad.sh:1949: echo -e "$maud_torcurl"
./mad.sh:1950: echo -e ""
./mad.sh:1951: done
./mad.sh:1952: fi
./mad.sh:1953:}
./mad.sh:1954:madStatus() {
./mad.sh:1955: local InputFile="$1"
./mad.sh:1956: if [ "$arg1" == "status" ] ; then
./mad.sh:1957: clear
--
./mad.sh:3407: tor_curl_request --insecure -L --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval "$download_url" --continue-at - --output "$file_path"
./mad.sh:3408: else
./mad.sh:3409: tor_curl_request --insecure -L --referer "$file_url" "$download_url" --continue-at - --output "$file_path"
./mad.sh:3410: fi
./mad.sh:3411: received_file_size=0
./mad.sh:3412: if [ -f "$file_path" ] ; then
./mad.sh:3413: received_file_size=$(stat --format="%s" "$file_path" | tr -d '[:space:]')
./mad.sh:3414: fi
./mad.sh:3415: if CheckNoHtml "$remote_url" "$filename" "$file_path" ; then
./mad.sh:3416: containsHtml=false
./mad.sh:3417: else
./mad.sh:3418: containsHtml=true
./mad.sh:3419: fi
./mad.sh:3272: file_header=$(tor_curl_request --insecure -m 8 -s -D - -o /dev/null \
./mad.sh:3273: -H "Connection: keep-alive" \
./mad.sh:3274: -w 'EffectiveUrl=%{url_effective}' \
./mad.sh:3275: "$download_url")
./mad.sh:3276: else
./mad.sh:3277: printf "| Retrieving Head: attempt #$j"
./mad.sh:3278: rm -f "${WorkDir}/.temp/directhead"
./mad.sh:3279: file_header=$(tor_curl_request --insecure --head -H "Connection: keep-alive" -L -s -i "$download_url" |
./mad.sh:3280: tee "${WorkDir}/.temp/directhead" &
./mad.sh:3281: sleep 6
./mad.sh:3282: [ -s "${WorkDir}/.temp/directhead" ]
./mad.sh:3283: kill $! 2>/dev/null
./mad.sh:3284: )
./mad.sh:3285: if [ ! -f "${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}" ]; then
./mad.sh:3286: touch "${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}"
./mad.sh:3287: fi
./mad.sh:3288: rm -f "${WorkDir}/.temp/directhead"
./mad.sh:3289: fi
--
./mad.sh:3607: response=$(tor_curl_upload --insecure -i \
./mad.sh:3608: -H "Content-Type: multipart/form-data" \
./mad.sh:3609: -F "key=" \
./mad.sh:3610: -F "time=$jira_timeval" \
./mad.sh:3611: -F "file=@${filepath}" \
./mad.sh:3612: "${jira_PostUrlHost}")
./mad.sh:3613: else
./mad.sh:3614: response=$(tor_curl_upload --insecure -i \
./mad.sh:3615: -H "Content-Type: multipart/form-data" \
./mad.sh:3616: -F "key=" \
./mad.sh:3617: -F "time=$jira_timeval" \
./mad.sh:3618: -F "files[]=@${arrFiles[@]}" \
./mad.sh:3619: "${jira_PostUrlHost}")
./mad.sh:3620: fi
./mad.sh:3621: if [ "${DebugAllEnabled}" == "true" ] ; then
./mad.sh:3622: debugHtml "${filepath##*/}" "${_hostCode}_upload" "post_url: ${jira_PostUrlHost}"$'\n'"${response}"
./mad.sh:3623: fi
./mad.sh:3624: if grep -Eqi ' 200 ' <<< "${response}" ; then
./mad.sh:3409: tor_curl_request --insecure -L --referer "$file_url" "$download_url" --output "$file_path"
./mad.sh:3410: rc=$?
./mad.sh:3411: if [ $rc -ne 0 ] ; then
./mad.sh:3412: printf "${RED}Download Failed (bad exit status).${NC}"
./mad.sh:3413: if [ -f ${file_path} ]; then
./mad.sh:3414: printf "${YELLOW} Partial removed...${NC}"
./mad.sh:3415: printf "\n\n"
./mad.sh:3416: rm -f "${file_path}"
./mad.sh:3417: else
./mad.sh:3418: printf "\n\n"
./mad.sh:3419: fi
--
./mad.sh:3453: tor_curl_request --insecure -L --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval "$download_url" --continue-at - --output "$file_path"
./mad.sh:3454: else
./mad.sh:3455: tor_curl_request --insecure -L --referer "$file_url" "$download_url" --continue-at - --output "$file_path"
./mad.sh:3456: fi
./mad.sh:3457: received_file_size=0
./mad.sh:3458: if [ -f "$file_path" ] ; then
./mad.sh:3459: received_file_size=$(stat --format="%s" "$file_path" | tr -d '[:space:]')
./mad.sh:3460: fi
./mad.sh:3461: if CheckNoHtml "$remote_url" "$filename" "$file_path" ; then
./mad.sh:3462: containsHtml=false
./mad.sh:3463: else
./mad.sh:3464: containsHtml=true
./mad.sh:3465: fi
--
./mad.sh:3653: response=$(tor_curl_upload --insecure -i \
./mad.sh:3654: -H "Content-Type: multipart/form-data" \
./mad.sh:3655: -F "key=" \
./mad.sh:3656: -F "time=$jira_timeval" \
./mad.sh:3657: -F "file=@${filepath}" \
./mad.sh:3658: "${jira_PostUrlHost}")
./mad.sh:3659: else
./mad.sh:3660: response=$(tor_curl_upload --insecure -i \
./mad.sh:3661: -H "Content-Type: multipart/form-data" \
./mad.sh:3662: -F "key=" \
./mad.sh:3663: -F "time=$jira_timeval" \
./mad.sh:3664: -F "files[]=@${arrFiles[@]}" \
./mad.sh:3665: "${jira_PostUrlHost}")
./mad.sh:3666: fi
./mad.sh:3667: if [ "${DebugAllEnabled}" == "true" ] ; then
./mad.sh:3668: debugHtml "${filepath##*/}" "${_hostCode}_upload" "post_url: ${jira_PostUrlHost}"$'\n'"${response}"
./mad.sh:3669: fi
./mad.sh:3670: if grep -Eqi ' 200 ' <<< "${response}" ; then

View file

@ -1,6 +1,6 @@
#! Name: fileditch.sh
#! Author: kittykat
#! Version: 2024.11.18
#! Version: 2025.01.19
#! Desc: Add support for downloading and processing of urls for a new host
#! Usage: Copy this file into the ./${ScriptDir}/hosts/ folder
#!
@ -17,7 +17,18 @@ HostCode='fd'
HostNick='fileditch'
HostFuncPrefix='fd'
HostUrls='fileditch.com, fileditch.me'
HostDomainRegex='^(http|https)://(.*\.)?(fileditch\.com|fileditchstuff.me)/'
HostDomainRegex='^(http|https)://(.*\.)?(fileditch\.com|fileditchstuff\.me)/'
#!
#! !! DO NOT UPDATE OR REMOVE !!
#! This merges the Required HostAndDomainRegexes into mad.sh
ListHostAndDomainRegexes=${ListHostAndDomainRegexes}'/'${HostCode}'/'${HostNick}'/'${HostFuncPrefix}'/'${HostUrls}':'${HostDomainRegex}'@'
#!
#!
HostCode='fd'
HostNick='fileditch'
HostFuncPrefix='direct'
HostUrls='fileditch.com, fileditch.me'
HostDomainRegex='^(http|https)://(.*\.)?fileditchfiles\.me/file\.php\?f=/'
#!
#! !! DO NOT UPDATE OR REMOVE !!
#! This merges the Required HostAndDomainRegexes into mad.sh

View file

@ -1,6 +1,6 @@
#! Name: isupload.sh
#! Author: kittykat
#! Version: 2025.01.05
#! Version: 2025.01.30
#! Desc: Add support for downloading and processing of urls for a new host
#! Usage: Copy this file into the ./${ScriptDir}/hosts/ folder
#!
@ -119,7 +119,7 @@ isup_FetchFileInfo() {
continue
fi
fi
if grep -Eqi "File was removed|There is no such file|File was deleted|File not found" <<< "$response"; then
if grep -Eqi "404 Not Found|File was removed|There is no such file|File was deleted|File not found" <<< "$response"; then
rm -f "${isup_cookie_jar}";
printf "\\n"
echo -e "${RED}| The file was not found. It could be deleted or expired.${NC}"
@ -182,7 +182,7 @@ isup_FetchFileInfo() {
continue
fi
fi
if grep -Eqi 'No such file with this filename|File was deleted|File not found' <<< "$response"; then
if grep -Eqi '404 Not Found|No such file with this filename|File was deleted|File not found' <<< "$response"; then
rm -f "${isup_cookie_jar}";
printf "\\n"
echo -e "${RED}| The file was not found. It could be deleted or expired.${NC}"
@ -266,7 +266,7 @@ isup_FetchFileInfo() {
debugHtml "${remote_url##*/}" "isup_head$j" "download_url: ${download_url}"$'\n'"${file_header}"
fi
if [ ! -z "$file_header" ] ; then
if grep -Eqi '404 Not Found' <<< "${file_header}" ; then
if grep -Eqi '404 Not Found|' <<< "${file_header}" ; then
printf "\\n"
echo -e "${RED}| Not Found (404). The file has been removed.${NC}"
removedDownload "${remote_url}"

View file

@ -1,6 +1,6 @@
#! Name: ranoz.sh
#! Author: kittykat
#! Version: 2025.01.18
#! Version: 2025.02.02
#! Desc: Add support for downloading and processing of urls for a new host
#! Usage: Copy this file into the ./${ScriptDir}/hosts/ folder
#!
@ -132,6 +132,7 @@ rz_FetchFileInfo() {
url_token=${url_postfix##*\?}
url_enc_fname=$(urlencode_literal_grouped_case ${url_fname})
download_url="${url_prefix}/${url_enc_fname}?${url_token}"
filename=$url_fname
break
else
if [ $i == $maxfetchretries ] ; then

57
mad.sh
View file

@ -30,9 +30,15 @@
#
# * Everyone who provided feedback and helped test.. and those who wish to remain anonymous
ScriptVersion=2025.01.18
ScriptVersion=2025.02.02
#=================================================
# Recent Additions
# 2025.02.02 - [mad] Add function to handle urlencode of cyrillic / kanji / latin / etc
# 2025.02.02 - [ranoz] Fix handling filenames containing cyrillic / kanji / latin chars
# 2025.02.02 - [all] Reduced character processing for urlencode to special url characters
# 2025.01.30 - [isupload] Add handling of 404 Not Found on initial page fetch
# 2025.01.23 - [mad] Do not check for supported host on "direct=" lines
# 2025.01.19 - [fileditch] Add direct download url processing fileditchfiles.me (though they block Tor now)
# 2025.01.18 - [up_nantes] Update the post retention to "week" (host removed "month" option)
# 2025.01.18 - [mad] Updates to url_encode function and addition of conversion of utf8 to ascii function
# 2025.01.17 - [ranoz] Servers response to resume changed, set as no resume type for now
@ -517,7 +523,44 @@ mconvert_utf8_to_ascii() {
local response_ascii=$(echo "$1" | iconv -c -f UTF-8 -t ASCII//TRANSLIT)
printf "%s" "$response_ascii"
}
urlencode_literal_grouped_case () {
urlencode_literal_grouped_case() {
local inputCleaned=$(echo -en "$@")
local out=$(echo "$inputCleaned" \
| sed \
-e 's/%/%25/g' \
-e 's/ /%20/g' \
-e 's/!/%21/g' \
-e 's/"/%22/g' \
-e "s/'/%27/g" \
-e 's/#/%23/g' \
-e 's/(/%28/g' \
-e 's/)/%29/g' \
-e 's/+/%2b/g' \
-e 's/,/%2c/g' \
-e 's/-/%2d/g' \
-e 's/:/%3a/g' \
-e 's/;/%3b/g' \
-e 's/?/%3f/g' \
-e 's/@/%40/g' \
-e 's/\$/%24/g' \
-e 's/\&/%26/g' \
-e 's/\*/%2a/g' \
-e 's/\./%2e/g' \
-e 's/\//%2f/g' \
-e 's/\[/%5b/g' \
-e 's/\\/%5c/g' \
-e 's/\]/%5d/g' \
-e 's/\^/%5e/g' \
-e 's/_/%5f/g' \
-e 's/`/%60/g' \
-e 's/{/%7b/g' \
-e 's/|/%7c/g' \
-e 's/}/%7d/g' \
-e 's/~/%7e/g'
)
echo $out
}
urlencode_literal_grouped_case_advanced() {
local inputCleaned=$(echo -en "$1")
string=$inputCleaned; format=; set --
while
@ -540,9 +583,12 @@ urlencode_literal_grouped_case () {
done
printf "$format\\n" "$@"
}
urlencode_literal_grouped_case_urlendingonly () {
urlencode_literal_grouped_case_urlendingonly() {
echo "${1%/*}""/""$(urlencode_literal_grouped_case ${1##*/})"
}
urlencode_literal_grouped_case_advanced_urlendingonly() {
echo "${1%/*}""/""$(urlencode_literal_grouped_case_advanced ${1##*/})"
}
urldecode() {
: "${*//+/ }"; echo -e "${_//%/\\x}";
}
@ -4513,6 +4559,7 @@ do
mkdir -p "${WorkDir}/.inflight"
direct_DownloadFile "${remote_url}" "${fileCount}"
fileCount=$((fileCount + 1))
continue
elif [[ ${line} =~ ^http ]] ; then
remote_url=${line}
if [[ ${remote_url} =~ ^http: ]] ; then
@ -4563,13 +4610,13 @@ do
mkdir -p "${WorkDir}/.inflight"
${_hostfuncprefix}_DownloadFile "${remote_url}" "${fileCount}"
fileCount=$((fileCount + 1))
break
break
fi
done
if [ "$isSkipOkay" == "true" ]; then
continue
elif [ "$isHostMatchFound" == "false" ]; then
printf "${RED}Invalid url (bad format or unsupported host [m*]):${NC} \\n%s\\n" $remote_url
printf "${RED}Invalid url or disabled host (bad format or unsupported host [m*]):${NC} \\n%s\\n" $remote_url
badUrlDownload "${remote_url}"
continue
fi