# 2025.02.12 - [sendnow] Add send.now as download host
# 2025.02.11 - [ranoz] Fix filename (to handle fileid added to download urls) # 2025.02.10 - [mad] Add detection of custom "Removed" response on cdn get from direct links # 2025.02.06 - [ranoz] Add UNAVAILABLE_FOR_LEGAL_REASONS response handling # 2025.02.04 - [mad] Add ConnectTimeoutUpload to separate configurable up/down timeouts
This commit is contained in:
parent
fd4723eb24
commit
0face871aa
12 changed files with 1359 additions and 561 deletions
|
|
@ -1,4 +1,4 @@
|
|||
DateTime: 25.02.02
|
||||
DateTime: 25.02.11
|
||||
|
||||
Files:
|
||||
./hosts/1fichier.sh
|
||||
|
|
@ -62,6 +62,7 @@ Files:
|
|||
./hosts/quax.sh
|
||||
./hosts/ramsgaard.sh
|
||||
./hosts/ranoz.sh
|
||||
./hosts/sendnow.sh
|
||||
./hosts/shareonline.sh
|
||||
./hosts/skrepr.sh
|
||||
./hosts/soyjak.sh
|
||||
|
|
@ -114,6 +115,7 @@ Files:
|
|||
./hosts/up_isupload.sh
|
||||
./hosts/up_kouploader.sh
|
||||
./hosts/up_kraken.sh
|
||||
./hosts/up_lainsafe.sh
|
||||
./hosts/up_linxx.sh
|
||||
./hosts/up_moocloud.sh
|
||||
./hosts/up_nantes.sh
|
||||
|
|
@ -282,9 +284,9 @@ _________________________________________________________________________
|
|||
./hosts/fileblade.sh:456: tor_curl_request --insecure -L \
|
||||
./hosts/fileblade.sh:461: tor_curl_request --insecure \
|
||||
./hosts/fileblade.sh:476: tor_curl_request --insecure \
|
||||
./hosts/fileditch.sh:85: file_header=$(tor_curl_request --insecure --head -L -s "$download_url")
|
||||
./hosts/fileditch.sh:176: tor_curl_request --insecure --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval "$download_url" --continue-at - --output "$file_path"
|
||||
./hosts/fileditch.sh:178: tor_curl_request --insecure "$download_url" --continue-at - --output "$file_path"
|
||||
./hosts/fileditch.sh:96: file_header=$(tor_curl_request --insecure --head -L -s "$download_url")
|
||||
./hosts/fileditch.sh:187: tor_curl_request --insecure --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval "$download_url" --continue-at - --output "$file_path"
|
||||
./hosts/fileditch.sh:189: tor_curl_request --insecure "$download_url" --continue-at - --output "$file_path"
|
||||
./hosts/filedot.sh:112: PAGE=$(tor_curl_request --insecure --max-time "$ConnectTimeout" -L -s \
|
||||
./hosts/filedot.sh:153: resp_login=$(tor_curl_request --insecure -L -s \
|
||||
./hosts/filedot.sh:240: response=$(tor_curl_request --insecure -L -s \
|
||||
|
|
@ -352,12 +354,20 @@ _________________________________________________________________________
|
|||
./hosts/quax.sh:176: tor_curl_request --insecure --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval "$download_url" --continue-at - --output "$file_path"
|
||||
./hosts/quax.sh:178: tor_curl_request --insecure "$download_url" --continue-at - --output "$file_path"
|
||||
./hosts/ranoz.sh:90: response=$(tor_curl_request --insecure -L -s "$remote_url")
|
||||
./hosts/ranoz.sh:158: file_header=$(tor_curl_request --insecure --head -L -i -s "$download_url")
|
||||
./hosts/ranoz.sh:268: if [ "${UseTorCurlImpersonate}" == "true" ]; then
|
||||
./hosts/ranoz.sh:270: tor_curl_request --insecure -L -G --no-alpn \
|
||||
./hosts/ranoz.sh:274: tor_curl_request --insecure -L -G --no-alpn \
|
||||
./hosts/ranoz.sh:279: tor_curl_request --insecure -L -G --no-alpn \
|
||||
./hosts/ranoz.sh:294: tor_curl_request --insecure -L -G --no-alpn \
|
||||
./hosts/ranoz.sh:160: file_header=$(tor_curl_request --insecure --head -L -i -s "$download_url")
|
||||
./hosts/ranoz.sh:270: if [ "${UseTorCurlImpersonate}" == "true" ]; then
|
||||
./hosts/ranoz.sh:272: tor_curl_request --insecure -L -G --no-alpn \
|
||||
./hosts/ranoz.sh:276: tor_curl_request --insecure -L -G --no-alpn \
|
||||
./hosts/ranoz.sh:281: tor_curl_request --insecure -L -G --no-alpn \
|
||||
./hosts/ranoz.sh:296: tor_curl_request --insecure -L -G --no-alpn \
|
||||
./hosts/sendnow.sh:90: response=$(tor_curl_request --insecure -L -s -b "${snow_cookie_jar}" -c "${snow_cookie_jar}" "$remote_url")
|
||||
./hosts/sendnow.sh:158: response=$(tor_curl_request --insecure -L -svo. -X POST \
|
||||
./hosts/sendnow.sh:200: file_header=$(tor_curl_request --insecure --head -Lis \
|
||||
./hosts/sendnow.sh:321: if [ "${UseTorCurlImpersonate}" == "true" ]; then
|
||||
./hosts/sendnow.sh:323: tor_curl_request --insecure -L --no-alpn \
|
||||
./hosts/sendnow.sh:341: tor_curl_request --insecure -L --no-alpn \
|
||||
./hosts/sendnow.sh:360: tor_curl_request --insecure -L --no-alpn \
|
||||
./hosts/sendnow.sh:379: tor_curl_request --insecure -L --no-alpn \
|
||||
./hosts/syspro.sh:88: response=$(tor_curl_request --insecure -L -s "$remote_url")
|
||||
./hosts/syspro.sh:186: if [ "${UseTorCurlImpersonate}" == "true" ]; then
|
||||
./hosts/syspro.sh:188: tor_curl_request --insecure -L \
|
||||
|
|
@ -445,6 +455,7 @@ _________________________________________________________________________
|
|||
./hosts/up_kouploader.sh:108: response=$(tor_curl_request --insecure -L -s -b "${ko_cookie_jar}" -c "${ko_cookie_jar}" "$PostUrlHost")
|
||||
./hosts/up_kouploader.sh:132: response=$(tor_curl_upload --insecure -L -i \
|
||||
./hosts/up_kraken.sh:115: response=$(tor_curl_upload --insecure -i \
|
||||
./hosts/up_lainsafe.sh:111: response=$(tor_curl_upload --insecure -i \
|
||||
./hosts/up_nippy.sh:125: response=$(tor_curl_upload --insecure -i \
|
||||
./hosts/up_nofile.sh:102: response=$(tor_curl_upload --insecure -i \
|
||||
./hosts/up_offshorecat.sh:104: response=$(tor_curl_upload --insecure -i \
|
||||
|
|
@ -467,126 +478,126 @@ _________________________________________________________________________
|
|||
./hosts/up_uploadflix.sh:106: response=$(tor_curl_upload --insecure -i \
|
||||
./hosts/up_uploadhive.sh:128: response=$(tor_curl_upload --insecure -i \
|
||||
./hosts/up_uploadraja.sh:102: response=$(tor_curl_upload --insecure -i \
|
||||
./hosts/up_uwabaki.sh:101: response=$(tor_curl_upload --insecure -i -L \
|
||||
./hosts/up_uwabaki.sh:102: response=$(tor_curl_upload --insecure -i -L \
|
||||
./hosts/up_yolobit.sh:102: response=$(tor_curl_upload --insecure -i \
|
||||
./hosts/youdbox.sh:95: response=$(tor_curl_request --insecure -L -i -s "${fixed_url}")
|
||||
./hosts/youdbox.sh:141: response=$(tor_curl_request --insecure -L -s -X POST --data "$form_data" "${fixed_url}")
|
||||
./hosts/youdbox.sh:183: file_header=$(tor_curl_request --insecure -L --head -s --referer "${fixed_url}" "$download_url")
|
||||
./hosts/youdbox.sh:276: tor_curl_request --insecure -L -G --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval "$download_url" --continue-at - --output "$file_path"
|
||||
./hosts/youdbox.sh:278: tor_curl_request --insecure -L -G "$download_url" --continue-at - --output "$file_path"
|
||||
./mad.sh:128:UseTorCurlImpersonate=false
|
||||
./mad.sh:419:tor_curl_request() {
|
||||
./mad.sh:420: if [ "${UseTorCurlImpersonate}" == "true" ]; then
|
||||
./mad.sh:421: "${curl_impersonate[@]}" --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" -4 --connect-timeout ${ConnectTimeout} --compressed --globoff "$@"
|
||||
./mad.sh:423: curl --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" -4 --connect-timeout ${ConnectTimeout} --compressed --globoff "$@"
|
||||
./mad.sh:426:tor_curl_request_extended() {
|
||||
./mad.sh:428: if [ "${UseTorCurlImpersonate}" == "true" ]; then
|
||||
./mad.sh:429: "${curl_impersonate[@]}" --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" -4 --connect-timeout $randomtimeout --compressed --globoff "$@"
|
||||
./mad.sh:431: curl --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" -4 --connect-timeout $randomtimeout --compressed --globoff "$@"
|
||||
./mad.sh:434:tor_curl_upload() {
|
||||
./mad.sh:435: if [ "${UseTorCurlImpersonate}" == "true" ]; then
|
||||
./mad.sh:437: "${curl_impersonate[@]}" --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" -4 --connect-timeout ${ConnectTimeout} --speed-limit $UploadSpeedMin --speed-time $UploadTimeoutInterval --compressed --globoff "$@"
|
||||
./mad.sh:439: "${curl_impersonate[@]}" --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" -4 --connect-timeout ${ConnectTimeout} --compressed --globoff "$@"
|
||||
./mad.sh:443: curl --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" -4 --connect-timeout ${ConnectTimeout} --speed-limit $UploadSpeedMin --speed-time $UploadTimeoutInterval -H 'User-Agent: Mozilla/5.0 (Windows NT 10.0; rv:109.0) Gecko/20100101 Firefox/115.0' -H 'Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8' -H 'Accept-Language: en-US,en;q=0.5' -H 'Accept-Encoding: gzip, deflate, br' -H 'Origin: null' -H 'Connection: keep-alive' -H 'Upgrade-Insecure-Requests: 1' -H 'Sec-Fetch-Dest: document' -H 'Sec-Fetch-Mode: navigate' -H 'Sec-Fetch-Site: cross-site' -H 'Sec-Fetch-User: ?1' -H 'TE: trailers' --compressed --globoff "$@"
|
||||
./mad.sh:445: curl --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" -4 --connect-timeout ${ConnectTimeout} -H 'User-Agent: Mozilla/5.0 (Windows NT 10.0; rv:109.0) Gecko/20100101 Firefox/115.0' -H 'Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8' -H 'Accept-Language: en-US,en;q=0.5' -H 'Accept-Encoding: gzip, deflate, br' -H 'Origin: null' -H 'Connection: keep-alive' -H 'Upgrade-Insecure-Requests: 1' -H 'Sec-Fetch-Dest: document' -H 'Sec-Fetch-Mode: navigate' -H 'Sec-Fetch-Site: cross-site' -H 'Sec-Fetch-User: ?1' -H 'TE: trailers' --compressed --globoff "$@"
|
||||
./mad.sh:1441:install_curl_impersonate() {
|
||||
./mad.sh:1443: echo -e "${BLUE}lwthiker curl_impersonate${NC} is the original dev, but it is relatively inactive."
|
||||
./mad.sh:1444: echo -e "- Currently uses curl v8.1.1."
|
||||
./mad.sh:1448: echo -e "${BLUE}lexiforest curl_impersonate${NC} is an active fork of curl_impersonate."
|
||||
./mad.sh:1449: echo -e "+ Currently uses curl v8.7.1"
|
||||
./mad.sh:1453: PS3='Please select which curl_impersonate to install: '
|
||||
./mad.sh:1461: install_curl_impersonate_lwthiker_orig
|
||||
./mad.sh:1465: install_curl_impersonate_lexiforest_fork
|
||||
./mad.sh:1475:install_curl_impersonate_lwthiker_orig() {
|
||||
./mad.sh:1479: echo -e "${BLUE}lwthiker curl_impersonate${NC} is the original curl_impersonate."
|
||||
./mad.sh:1480: echo -e "+ Currently uses curl v8.1.1, and has low activity for updates"
|
||||
./mad.sh:1483: echo -e "${GREEN}| Fetching:${NC} latest ${BLUE}lwthiker curl_impersonate${NC} info from github...${NC}"
|
||||
./mad.sh:1486: response=$(tor_curl_request --insecure -L -s https://github.com/lwthiker/curl-impersonate/releases/latest)
|
||||
./mad.sh:1488: debugHtml "github" "lbf_inst_curlimp$j" "$response"
|
||||
./mad.sh:1491: latestTag=$(grep -oPi -m 1 '(?<=/curl-impersonate/releases/tag/).*?(?=")' <<< "$response")
|
||||
./mad.sh:1501: yes_or_no "Do you wish to download and extract curl_impersonate $latestTag (using tor+curl)?" && {
|
||||
./mad.sh:1503: download_url='https://github.com/lwthiker/curl-impersonate/releases/download/'"$latestTag"'/curl-impersonate-'"$latestTag"'.x86_64-linux-gnu.tar.gz'
|
||||
./mad.sh:1506: file_header=$(tor_curl_request --insecure --head -Ls "$download_url")
|
||||
./mad.sh:1508: debugHtml "github" "head_inst_curlimp$j" "${file_header}"
|
||||
./mad.sh:1556: tor_curl_request --insecure -L "$download_url" --continue-at - --output "$file_path"
|
||||
./mad.sh:1585: echo -e "| Extracting curl_impersonate..."
|
||||
./mad.sh:1587: rm -f "${ScriptDir}"/curl*
|
||||
./mad.sh:1588: mv "$extract_location/curl-impersonate-ff" "${ScriptDir}/"
|
||||
./mad.sh:1589: mv "$extract_location/curl_ff109" "${ScriptDir}/"
|
||||
./mad.sh:1590: echo -e "${GREEN}| Done.${NC} Update ${BLUE}\"UseTorCurlImpersonate=true\"${NC} in script to use..."
|
||||
./mad.sh:1598:install_curl_impersonate_lexiforest_fork() {
|
||||
./mad.sh:1602: echo -e "${BLUE}lexiforest curl_impersonate${NC} is an active fork of curl_impersonate."
|
||||
./mad.sh:1603: echo -e "+ Currently uses curl v8.7.1, and is patched for latest CVEs"
|
||||
./mad.sh:1606: echo -e "${GREEN}| Fetching:${NC} latest ${BLUE}lexiforest curl_impersonate fork${NC} info from github...${NC}"
|
||||
./mad.sh:1609: response=$(tor_curl_request --insecure -L -s https://github.com/lexiforest/curl-impersonate/releases/latest)
|
||||
./mad.sh:1611: debugHtml "github" "lbf_inst_curlimp$j" "$response"
|
||||
./mad.sh:1614: latestTag=$(grep -oPi -m 1 '(?<=/curl-impersonate/releases/tag/).*?(?=")' <<< "$response")
|
||||
./mad.sh:1624: yes_or_no "Do you wish to download and extract curl_impersonate $latestTag (using tor+curl)?" && {
|
||||
./mad.sh:1626: download_url='https://github.com/lexiforest/curl-impersonate/releases/download/'"$latestTag"'/curl-impersonate-'"$latestTag"'.x86_64-linux-gnu.tar.gz'
|
||||
./mad.sh:1629: file_header=$(tor_curl_request --insecure --head -Ls "$download_url")
|
||||
./mad.sh:1631: debugHtml "github" "head_inst_curlimp$j" "${file_header}"
|
||||
./mad.sh:1679: tor_curl_request --insecure -L "$download_url" --continue-at - --output "$file_path"
|
||||
./mad.sh:1708: echo -e "| Extracting curl_impersonate..."
|
||||
./mad.sh:1710: rm -f "${ScriptDir}"/curl*
|
||||
./mad.sh:1711: mv "$extract_location/curl-impersonate-chrome" "${ScriptDir}/"
|
||||
./mad.sh:1712: mv "$extract_location/curl_chrome131" "${ScriptDir}/"
|
||||
./mad.sh:1713: echo -e "${GREEN}| Done.${NC} Update ${BLUE}\"UseTorCurlImpersonate=true\"${NC} in script to use..."
|
||||
./mad.sh:1875: echo -e ":${NC} ${GREEN}MAD${PINK} Audit${NC} : Reports usage of http & curl in scripts${PINK}${BLD} :"
|
||||
./mad.sh:1883: maud_curl=$(grep -n -vxE '[[:blank:]]*([#].*)?' $fil | grep --color='always' -Ei 'curl')
|
||||
./mad.sh:1884: maud_torcurl=$(grep -n -vxE '[[:blank:]]*([#].*)?' $fil | grep -A 12 --color='always' -Ei 'tor_curl')
|
||||
./mad.sh:1893: echo -e "${PINK}MAD Audit of curl:${NC} (${GREEN}grep \"curl\"${NC})"
|
||||
./mad.sh:1895: echo -e "$maud_curl"
|
||||
./mad.sh:1897: echo -e "${PINK}MAD Audit of tor_curl (+12 lines after):${NC} (${GREEN}grep \"tor_curl\"${NC})"
|
||||
./mad.sh:1899: echo -e "$maud_torcurl"
|
||||
./mad.sh:1911: maud_curl=$(grep -n -vxE '[[:blank:]]*([#].*)?' $fil | grep --color='always' -Ei 'curl')
|
||||
./mad.sh:1912: maud_torcurl=$(grep -n -vxE '[[:blank:]]*([#].*)?' $fil | grep -A 12 --color='always' -Ei 'tor_curl')
|
||||
./mad.sh:1921: echo -e "${PINK}MAD Audit of curl:${NC} (${GREEN}grep \"curl \"${NC})"
|
||||
./mad.sh:1923: echo -e "$maud_curl"
|
||||
./mad.sh:1925: echo -e "${PINK}MAD Audit of tor_curl (+12 lines after):${NC} (${GREEN}grep \"tor_curl\"${NC})"
|
||||
./mad.sh:1927: echo -e "$maud_torcurl"
|
||||
./mad.sh:1933: maud_curl=$(grep -n -vxE '[[:blank:]]*([#].*)?' $fil | grep --color='always' -Ei 'curl')
|
||||
./mad.sh:1934: maud_torcurl=$(grep -n -vxE '[[:blank:]]*([#].*)?' $fil | grep -A 12 --color='always' -Ei 'tor_curl')
|
||||
./mad.sh:1943: echo -e "${PINK}MAD Audit of curl:${NC} (${GREEN}grep \"curl\"${NC})"
|
||||
./mad.sh:1945: echo -e "$maud_curl"
|
||||
./mad.sh:1947: echo -e "${PINK}MAD Audit of tor_curl (+12 lines after):${NC} (${GREEN}grep \"tor_curl\"${NC})"
|
||||
./mad.sh:1949: echo -e "$maud_torcurl"
|
||||
./mad.sh:2896: if [ "${UseTorCurlImpersonate}" == "true" ]; then
|
||||
./mad.sh:2897: printf "client: ${GREEN}Tor${NC} + ${BLUE}curl_impersonate${NC}\\n"
|
||||
./mad.sh:2899: printf "client: ${GREEN}Tor${NC} + ${GREEN}curl${NC}\\n"
|
||||
./mad.sh:3071: if [ "${UseTorCurlImpersonate}" == "true" ]; then
|
||||
./mad.sh:3072: printf "client: ${GREEN}Tor${NC} + ${BLUE}curl_impersonate${NC}\\n"
|
||||
./mad.sh:3074: printf "client: ${GREEN}Tor${NC} + ${GREEN}curl${NC}\\n"
|
||||
./mad.sh:3272: file_header=$(tor_curl_request --insecure -m 8 -s -D - -o /dev/null \
|
||||
./mad.sh:3279: file_header=$(tor_curl_request --insecure --head -H "Connection: keep-alive" -L -s -i "$download_url" |
|
||||
./mad.sh:3409: tor_curl_request --insecure -L --referer "$file_url" "$download_url" --output "$file_path"
|
||||
./mad.sh:3453: tor_curl_request --insecure -L --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval "$download_url" --continue-at - --output "$file_path"
|
||||
./mad.sh:3455: tor_curl_request --insecure -L --referer "$file_url" "$download_url" --continue-at - --output "$file_path"
|
||||
./mad.sh:3653: response=$(tor_curl_upload --insecure -i \
|
||||
./mad.sh:3660: response=$(tor_curl_upload --insecure -i \
|
||||
./mad.sh:3731:if [ "${UseTorCurlImpersonate}" == "true" ]; then
|
||||
./mad.sh:3732: curl_impersonate=()
|
||||
./mad.sh:3733: readarray -d $'' arrFiles < <(find "$ScriptDir" -maxdepth 1 -name "curl_*" -printf '%p\n' | sort -Vk1)
|
||||
./mad.sh:3734: bFoundCurlHeader=false
|
||||
./mad.sh:3738: curl_impersonate=($fil)
|
||||
./mad.sh:3739: bFoundCurlHeader=true
|
||||
./mad.sh:3743: if [ "$bFoundCurlHeader" == "false" ]; then
|
||||
./mad.sh:3744: echo -e "${RED}[ERROR] Missing dependency \"curl-impersonate\"!${NC}"
|
||||
./mad.sh:3747: echo -e "You'll need to download ${GREEN}\"curl-impersonate\"${NC}."
|
||||
./mad.sh:3750: echo -e "The latest binary can be obtained on GitHub, search for \"curl-impersonate\""
|
||||
./mad.sh:3752: echo -e " 1. Visit the page of curl-impersonate and add \"/releases/latest/\" at end of URL."
|
||||
./mad.sh:3756: echo -e " 4. Download archive ${GREEN}\"curl-impersonate-vX.Y.Z.x86_64-linux-gnu.tar.gz\"${YELLOW}."
|
||||
./mad.sh:3757: echo -e " 5. Extract files ${GREEN}\"curl-impersonate-ff\"${NC} and ${GREEN}\"curl_ff109\"${NC} next to this script."
|
||||
./mad.sh:3760: echo -e "run $0 install_curl_impersonate\\n"
|
||||
./mad.sh:3762: yes_or_no "Do you wish to download and extract latest curl_impersonate (using tor+curl)?" && {
|
||||
./mad.sh:3763: UseTorCurlImpersonate=false
|
||||
./mad.sh:3764: install_curl_impersonate
|
||||
./mad.sh:3848: echo -e "[${YELLOW}Install curl_impersonate${NC}]: Downloads the latest binary for curl_impersonate from github repo (3 choices)"
|
||||
./mad.sh:3849: printf " %s install_curl_impersonate\\n" "$0"
|
||||
./mad.sh:3927:elif [[ "$arg1" == "install_curl_impersonate" ]]; then
|
||||
./mad.sh:3928: install_curl_impersonate
|
||||
./mad.sh:3959:if [ "${UseTorCurlImpersonate}" == "true" ]; then
|
||||
./mad.sh:3960: printf "client: ${GREEN}Tor${NC} + ${BLUE}curl_impersonate${NC}\\n"
|
||||
./mad.sh:3962: printf "client: ${GREEN}Tor${NC} + ${GREEN}curl${NC}\\n"
|
||||
./mad.sh:80:UseTorCurlImpersonate=false
|
||||
./mad.sh:375:tor_curl_request() {
|
||||
./mad.sh:376: if [ "${UseTorCurlImpersonate}" == "true" ]; then
|
||||
./mad.sh:377: "${curl_impersonate[@]}" --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" -4 --connect-timeout ${ConnectTimeout} --compressed --globoff "$@"
|
||||
./mad.sh:379: curl --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" -4 --connect-timeout ${ConnectTimeout} --compressed --globoff "$@"
|
||||
./mad.sh:382:tor_curl_request_extended() {
|
||||
./mad.sh:384: if [ "${UseTorCurlImpersonate}" == "true" ]; then
|
||||
./mad.sh:385: "${curl_impersonate[@]}" --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" -4 --connect-timeout $randomtimeout --compressed --globoff "$@"
|
||||
./mad.sh:387: curl --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" -4 --connect-timeout $randomtimeout --compressed --globoff "$@"
|
||||
./mad.sh:390:tor_curl_upload() {
|
||||
./mad.sh:391: if [ "${UseTorCurlImpersonate}" == "true" ]; then
|
||||
./mad.sh:393: "${curl_impersonate[@]}" --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" -4 --connect-timeout ${ConnectTimeoutUpload} --speed-limit $UploadSpeedMin --speed-time $UploadTimeoutInterval --compressed --globoff "$@"
|
||||
./mad.sh:395: "${curl_impersonate[@]}" --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" -4 --connect-timeout ${ConnectTimeoutUpload} --compressed --globoff "$@"
|
||||
./mad.sh:399: curl --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" -4 --connect-timeout ${ConnectTimeoutUpload} --speed-limit $UploadSpeedMin --speed-time $UploadTimeoutInterval -H 'User-Agent: Mozilla/5.0 (Windows NT 10.0; rv:109.0) Gecko/20100101 Firefox/115.0' -H 'Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8' -H 'Accept-Language: en-US,en;q=0.5' -H 'Accept-Encoding: gzip, deflate, br' -H 'Origin: null' -H 'Connection: keep-alive' -H 'Upgrade-Insecure-Requests: 1' -H 'Sec-Fetch-Dest: document' -H 'Sec-Fetch-Mode: navigate' -H 'Sec-Fetch-Site: cross-site' -H 'Sec-Fetch-User: ?1' -H 'TE: trailers' --compressed --globoff "$@"
|
||||
./mad.sh:401: curl --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" -4 --connect-timeout ${ConnectTimeoutUpload} -H 'User-Agent: Mozilla/5.0 (Windows NT 10.0; rv:109.0) Gecko/20100101 Firefox/115.0' -H 'Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8' -H 'Accept-Language: en-US,en;q=0.5' -H 'Accept-Encoding: gzip, deflate, br' -H 'Origin: null' -H 'Connection: keep-alive' -H 'Upgrade-Insecure-Requests: 1' -H 'Sec-Fetch-Dest: document' -H 'Sec-Fetch-Mode: navigate' -H 'Sec-Fetch-Site: cross-site' -H 'Sec-Fetch-User: ?1' -H 'TE: trailers' --compressed --globoff "$@"
|
||||
./mad.sh:1397:install_curl_impersonate() {
|
||||
./mad.sh:1399: echo -e "${BLUE}lwthiker curl_impersonate${NC} is the original dev, but it is relatively inactive."
|
||||
./mad.sh:1400: echo -e "- Currently uses curl v8.1.1."
|
||||
./mad.sh:1404: echo -e "${BLUE}lexiforest curl_impersonate${NC} is an active fork of curl_impersonate."
|
||||
./mad.sh:1405: echo -e "+ Currently uses curl v8.7.1"
|
||||
./mad.sh:1409: PS3='Please select which curl_impersonate to install: '
|
||||
./mad.sh:1417: install_curl_impersonate_lwthiker_orig
|
||||
./mad.sh:1421: install_curl_impersonate_lexiforest_fork
|
||||
./mad.sh:1431:install_curl_impersonate_lwthiker_orig() {
|
||||
./mad.sh:1435: echo -e "${BLUE}lwthiker curl_impersonate${NC} is the original curl_impersonate."
|
||||
./mad.sh:1436: echo -e "+ Currently uses curl v8.1.1, and has low activity for updates"
|
||||
./mad.sh:1439: echo -e "${GREEN}| Fetching:${NC} latest ${BLUE}lwthiker curl_impersonate${NC} info from github...${NC}"
|
||||
./mad.sh:1442: response=$(tor_curl_request --insecure -L -s https://github.com/lwthiker/curl-impersonate/releases/latest)
|
||||
./mad.sh:1444: debugHtml "github" "lbf_inst_curlimp$j" "$response"
|
||||
./mad.sh:1447: latestTag=$(grep -oPi -m 1 '(?<=/curl-impersonate/releases/tag/).*?(?=")' <<< "$response")
|
||||
./mad.sh:1457: yes_or_no "Do you wish to download and extract curl_impersonate $latestTag (using tor+curl)?" && {
|
||||
./mad.sh:1459: download_url='https://github.com/lwthiker/curl-impersonate/releases/download/'"$latestTag"'/curl-impersonate-'"$latestTag"'.x86_64-linux-gnu.tar.gz'
|
||||
./mad.sh:1462: file_header=$(tor_curl_request --insecure --head -Ls "$download_url")
|
||||
./mad.sh:1464: debugHtml "github" "head_inst_curlimp$j" "${file_header}"
|
||||
./mad.sh:1512: tor_curl_request --insecure -L "$download_url" --continue-at - --output "$file_path"
|
||||
./mad.sh:1541: echo -e "| Extracting curl_impersonate..."
|
||||
./mad.sh:1543: rm -f "${ScriptDir}"/curl*
|
||||
./mad.sh:1544: mv "$extract_location/curl-impersonate-ff" "${ScriptDir}/"
|
||||
./mad.sh:1545: mv "$extract_location/curl_ff109" "${ScriptDir}/"
|
||||
./mad.sh:1546: echo -e "${GREEN}| Done.${NC} Update ${BLUE}\"UseTorCurlImpersonate=true\"${NC} in script to use..."
|
||||
./mad.sh:1554:install_curl_impersonate_lexiforest_fork() {
|
||||
./mad.sh:1558: echo -e "${BLUE}lexiforest curl_impersonate${NC} is an active fork of curl_impersonate."
|
||||
./mad.sh:1559: echo -e "+ Currently uses curl v8.7.1, and is patched for latest CVEs"
|
||||
./mad.sh:1562: echo -e "${GREEN}| Fetching:${NC} latest ${BLUE}lexiforest curl_impersonate fork${NC} info from github...${NC}"
|
||||
./mad.sh:1565: response=$(tor_curl_request --insecure -L -s https://github.com/lexiforest/curl-impersonate/releases/latest)
|
||||
./mad.sh:1567: debugHtml "github" "lbf_inst_curlimp$j" "$response"
|
||||
./mad.sh:1570: latestTag=$(grep -oPi -m 1 '(?<=/curl-impersonate/releases/tag/).*?(?=")' <<< "$response")
|
||||
./mad.sh:1580: yes_or_no "Do you wish to download and extract curl_impersonate $latestTag (using tor+curl)?" && {
|
||||
./mad.sh:1582: download_url='https://github.com/lexiforest/curl-impersonate/releases/download/'"$latestTag"'/curl-impersonate-'"$latestTag"'.x86_64-linux-gnu.tar.gz'
|
||||
./mad.sh:1585: file_header=$(tor_curl_request --insecure --head -Ls "$download_url")
|
||||
./mad.sh:1587: debugHtml "github" "head_inst_curlimp$j" "${file_header}"
|
||||
./mad.sh:1635: tor_curl_request --insecure -L "$download_url" --continue-at - --output "$file_path"
|
||||
./mad.sh:1664: echo -e "| Extracting curl_impersonate..."
|
||||
./mad.sh:1666: rm -f "${ScriptDir}"/curl*
|
||||
./mad.sh:1667: mv "$extract_location/curl-impersonate-chrome" "${ScriptDir}/"
|
||||
./mad.sh:1668: mv "$extract_location/curl_chrome131" "${ScriptDir}/"
|
||||
./mad.sh:1669: echo -e "${GREEN}| Done.${NC} Update ${BLUE}\"UseTorCurlImpersonate=true\"${NC} in script to use..."
|
||||
./mad.sh:1831: echo -e ":${NC} ${GREEN}MAD${PINK} Audit${NC} : Reports usage of http & curl in scripts${PINK}${BLD} :"
|
||||
./mad.sh:1839: maud_curl=$(grep -n -vxE '[[:blank:]]*([#].*)?' $fil | grep --color='always' -Ei 'curl')
|
||||
./mad.sh:1840: maud_torcurl=$(grep -n -vxE '[[:blank:]]*([#].*)?' $fil | grep -A 12 --color='always' -Ei 'tor_curl')
|
||||
./mad.sh:1849: echo -e "${PINK}MAD Audit of curl:${NC} (${GREEN}grep \"curl\"${NC})"
|
||||
./mad.sh:1851: echo -e "$maud_curl"
|
||||
./mad.sh:1853: echo -e "${PINK}MAD Audit of tor_curl (+12 lines after):${NC} (${GREEN}grep \"tor_curl\"${NC})"
|
||||
./mad.sh:1855: echo -e "$maud_torcurl"
|
||||
./mad.sh:1867: maud_curl=$(grep -n -vxE '[[:blank:]]*([#].*)?' $fil | grep --color='always' -Ei 'curl')
|
||||
./mad.sh:1868: maud_torcurl=$(grep -n -vxE '[[:blank:]]*([#].*)?' $fil | grep -A 12 --color='always' -Ei 'tor_curl')
|
||||
./mad.sh:1877: echo -e "${PINK}MAD Audit of curl:${NC} (${GREEN}grep \"curl \"${NC})"
|
||||
./mad.sh:1879: echo -e "$maud_curl"
|
||||
./mad.sh:1881: echo -e "${PINK}MAD Audit of tor_curl (+12 lines after):${NC} (${GREEN}grep \"tor_curl\"${NC})"
|
||||
./mad.sh:1883: echo -e "$maud_torcurl"
|
||||
./mad.sh:1889: maud_curl=$(grep -n -vxE '[[:blank:]]*([#].*)?' $fil | grep --color='always' -Ei 'curl')
|
||||
./mad.sh:1890: maud_torcurl=$(grep -n -vxE '[[:blank:]]*([#].*)?' $fil | grep -A 12 --color='always' -Ei 'tor_curl')
|
||||
./mad.sh:1899: echo -e "${PINK}MAD Audit of curl:${NC} (${GREEN}grep \"curl\"${NC})"
|
||||
./mad.sh:1901: echo -e "$maud_curl"
|
||||
./mad.sh:1903: echo -e "${PINK}MAD Audit of tor_curl (+12 lines after):${NC} (${GREEN}grep \"tor_curl\"${NC})"
|
||||
./mad.sh:1905: echo -e "$maud_torcurl"
|
||||
./mad.sh:2852: if [ "${UseTorCurlImpersonate}" == "true" ]; then
|
||||
./mad.sh:2853: printf "client: ${GREEN}Tor${NC} + ${BLUE}curl_impersonate${NC}\\n"
|
||||
./mad.sh:2855: printf "client: ${GREEN}Tor${NC} + ${GREEN}curl${NC}\\n"
|
||||
./mad.sh:3027: if [ "${UseTorCurlImpersonate}" == "true" ]; then
|
||||
./mad.sh:3028: printf "client: ${GREEN}Tor${NC} + ${BLUE}curl_impersonate${NC}\\n"
|
||||
./mad.sh:3030: printf "client: ${GREEN}Tor${NC} + ${GREEN}curl${NC}\\n"
|
||||
./mad.sh:3228: file_header=$(tor_curl_request --insecure -m 8 -s -D - -o /dev/null \
|
||||
./mad.sh:3235: file_header=$(tor_curl_request --insecure --head -H "Connection: keep-alive" -L -s -i "$download_url" |
|
||||
./mad.sh:3372: tor_curl_request --insecure -L --referer "$file_url" "$download_url" --output "$file_path"
|
||||
./mad.sh:3425: tor_curl_request --insecure -L --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval "$download_url" --continue-at - --output "$file_path"
|
||||
./mad.sh:3427: tor_curl_request --insecure -L --referer "$file_url" "$download_url" --continue-at - --output "$file_path"
|
||||
./mad.sh:3625: response=$(tor_curl_upload --insecure -i \
|
||||
./mad.sh:3632: response=$(tor_curl_upload --insecure -i \
|
||||
./mad.sh:3703:if [ "${UseTorCurlImpersonate}" == "true" ]; then
|
||||
./mad.sh:3704: curl_impersonate=()
|
||||
./mad.sh:3705: readarray -d $'' arrFiles < <(find "$ScriptDir" -maxdepth 1 -name "curl_*" -printf '%p\n' | sort -Vk1)
|
||||
./mad.sh:3706: bFoundCurlHeader=false
|
||||
./mad.sh:3710: curl_impersonate=($fil)
|
||||
./mad.sh:3711: bFoundCurlHeader=true
|
||||
./mad.sh:3715: if [ "$bFoundCurlHeader" == "false" ]; then
|
||||
./mad.sh:3716: echo -e "${RED}[ERROR] Missing dependency \"curl-impersonate\"!${NC}"
|
||||
./mad.sh:3719: echo -e "You'll need to download ${GREEN}\"curl-impersonate\"${NC}."
|
||||
./mad.sh:3722: echo -e "The latest binary can be obtained on GitHub, search for \"curl-impersonate\""
|
||||
./mad.sh:3724: echo -e " 1. Visit the page of curl-impersonate and add \"/releases/latest/\" at end of URL."
|
||||
./mad.sh:3728: echo -e " 4. Download archive ${GREEN}\"curl-impersonate-vX.Y.Z.x86_64-linux-gnu.tar.gz\"${YELLOW}."
|
||||
./mad.sh:3729: echo -e " 5. Extract files ${GREEN}\"curl-impersonate-ff\"${NC} and ${GREEN}\"curl_ff109\"${NC} next to this script."
|
||||
./mad.sh:3732: echo -e "run $0 install_curl_impersonate\\n"
|
||||
./mad.sh:3734: yes_or_no "Do you wish to download and extract latest curl_impersonate (using tor+curl)?" && {
|
||||
./mad.sh:3735: UseTorCurlImpersonate=false
|
||||
./mad.sh:3736: install_curl_impersonate
|
||||
./mad.sh:3820: echo -e "[${YELLOW}Install curl_impersonate${NC}]: Downloads the latest binary for curl_impersonate from github repo (3 choices)"
|
||||
./mad.sh:3821: printf " %s install_curl_impersonate\\n" "$0"
|
||||
./mad.sh:3899:elif [[ "$arg1" == "install_curl_impersonate" ]]; then
|
||||
./mad.sh:3900: install_curl_impersonate
|
||||
./mad.sh:3931:if [ "${UseTorCurlImpersonate}" == "true" ]; then
|
||||
./mad.sh:3932: printf "client: ${GREEN}Tor${NC} + ${BLUE}curl_impersonate${NC}\\n"
|
||||
./mad.sh:3934: printf "client: ${GREEN}Tor${NC} + ${GREEN}curl${NC}\\n"
|
||||
./plugins/pjscloud.sh:44: if [ "${UseTorCurlImpersonate}" == "true" ]; then
|
||||
./plugins/pjscloud.sh:45: response=$("${curl_impersonate[@]}" --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" \
|
||||
./plugins/pjscloud.sh:53: response=$(curl --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" \
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
DateTime: 25.02.02
|
||||
DateTime: 25.02.11
|
||||
|
||||
Files:
|
||||
./hosts/1fichier.sh
|
||||
|
|
@ -62,6 +62,7 @@ Files:
|
|||
./hosts/quax.sh
|
||||
./hosts/ramsgaard.sh
|
||||
./hosts/ranoz.sh
|
||||
./hosts/sendnow.sh
|
||||
./hosts/shareonline.sh
|
||||
./hosts/skrepr.sh
|
||||
./hosts/soyjak.sh
|
||||
|
|
@ -114,6 +115,7 @@ Files:
|
|||
./hosts/up_isupload.sh
|
||||
./hosts/up_kouploader.sh
|
||||
./hosts/up_kraken.sh
|
||||
./hosts/up_lainsafe.sh
|
||||
./hosts/up_linxx.sh
|
||||
./hosts/up_moocloud.sh
|
||||
./hosts/up_nantes.sh
|
||||
|
|
@ -228,6 +230,13 @@ _________________________________________________________________________
|
|||
./hosts/pixeldrain.sh:250: pdheadurl="https://pixeldrain.com/api/file/${fileid}"
|
||||
./hosts/pixeldrain.sh:252: pdheadurl="https://pd.cybar.xyz/$fileid"
|
||||
./hosts/pixeldrain.sh:272: download_url="https://pixeldrain.com/api/file/${fileid}"
|
||||
./hosts/sendnow.sh:180: if grep -Eqi 'location: https://' <<< "$response"; then
|
||||
./hosts/sendnow.sh:195: fshost=$(grep -oPi -m 1 '(?<=https://).*?(?=/d/)' <<< "$download_url")
|
||||
./hosts/sendnow.sh:205: -H "Referer: https://send.now/" \
|
||||
./hosts/sendnow.sh:329: -H "Referer: https://send.now/" \
|
||||
./hosts/sendnow.sh:346: -H "Referer: https://send.now/" \
|
||||
./hosts/sendnow.sh:367: -H "Referer: https://send.now/" \
|
||||
./hosts/sendnow.sh:385: -H "Referer: https://send.now/" \
|
||||
./hosts/tempfileme.sh:144: if grep -Eqi 'id="download-button" href="http://tempfile.me/file/' <<< "$response"; then
|
||||
./hosts/tempfileme.sh:146: download_url="${download_url/http:/https:}"
|
||||
./hosts/tempfileme.sh:186: if grep -Eqi 'location: https://' <<< "${file_header}" ; then
|
||||
|
|
@ -360,6 +369,9 @@ _________________________________________________________________________
|
|||
./hosts/up_kraken.sh:107: local ar_HUP[8]='https://uploads9.krakenfiles.com/_uploader/gallery/upload'
|
||||
./hosts/up_kraken.sh:108: local ar_HUP[9]='https://uploads10.krakenfiles.com/_uploader/gallery/upload'
|
||||
./hosts/up_kraken.sh:126: downloadLink="https://krakenfiles.com/view/${hash}/file.html"
|
||||
./hosts/up_lainsafe.sh:108: PostUrlHost='https://pomf.lain.la/upload.php?output=html'
|
||||
./hosts/up_lainsafe.sh:129: elif grep -Eqi 'a href="https://pomf2.lain.la/f/' <<< "${response}" ; then
|
||||
./hosts/up_lainsafe.sh:130: url=$(grep -oPi -m 1 '(?<=a href=").*?(?=">https:)' <<< "$response")
|
||||
./hosts/up_linxx.sh:37: jira_PostUrlHost='https://linxx.net/upload/script.php'
|
||||
./hosts/up_linxx.sh:40: jira_downloadLinkPrefix='https://linxx.net/upload/f.php?h='
|
||||
./hosts/up_moocloud.sh:37: jira_PostUrlHost='https://file.tools.moocloud.ch/script.php'
|
||||
|
|
@ -425,79 +437,79 @@ _________________________________________________________________________
|
|||
./hosts/up_uploadhive.sh:149: downloadLink="https://uploadhive.com/${hash}"
|
||||
./hosts/up_uploadraja.sh:99: PostUrlHost='https://awsaisiaposisition69.kalpstudio.xyz/cgi-bin/upload.cgi?upload_type=file&utype=anon'
|
||||
./hosts/up_uploadraja.sh:119: downloadLink="https://uploadraja.com/$hash"
|
||||
./hosts/up_uwabaki.sh:98: PostUrlHost="https://files.uwabaki.party/upload.php"
|
||||
./hosts/up_uwabaki.sh:108: if grep -Eqi 'File uploaded: <a href="https://files.uwabaki.party/' <<< "${response}" ; then
|
||||
./hosts/up_uwabaki.sh:120: downloadLink="https://files.uwabaki.party${url}"
|
||||
./hosts/up_uwabaki.sh:99: PostUrlHost="https://files.uwabaki.party/index.php"
|
||||
./hosts/up_uwabaki.sh:111: if grep -Eqi 'File uploaded: <a href="https://files.uwabaki.party/' <<< "${response}" ; then
|
||||
./hosts/up_uwabaki.sh:123: downloadLink="https://files.uwabaki.party${url}"
|
||||
./hosts/up_yolobit.sh:99: PostUrlHost='https://ns08.zipcluster.com/upload.php'
|
||||
./mad.sh:742: sed -i -e "s>^${url/https:/http:}.*>#& #OK# ${filename}>g" "${InputFile}" #http (if changed)
|
||||
./mad.sh:744: sed -i -e "s>^direct=${url/https:/http:}.*>#& #OK# ${filename}>g" "${InputFile}" #direct url https
|
||||
./mad.sh:747: sed -i -e "s>^${url/https:/http:}.*>#&>g" "${InputFile}" #http (if changed)
|
||||
./mad.sh:749: sed -i -e "s>^direct=${url/https:/http:}.*>#&>g" "${InputFile}" #direct url https
|
||||
./mad.sh:770: sed -i -e "s>^${url/https:/http:}.*>#& #OK# ${filename} (File exists)>g" "${InputFile}" #http (if changed)
|
||||
./mad.sh:772: sed -i -e "s>^direct=${url/https:/http:}.*>#& #OK# ${filename} (File exists)>g" "${InputFile}" #direct url https
|
||||
./mad.sh:775: sed -i -e "s>^${url/https:/http:}.*>#&>g" "${InputFile}" #http (if changed)
|
||||
./mad.sh:777: sed -i -e "s>^direct=${url/https:/http:}.*>#&>g" "${InputFile}" #direct url https
|
||||
./mad.sh:798: sed -i -e "s>^${url/https:/http:}.*>#& #RETRY# ${message}>g" "${InputFile}" #http (if changed)
|
||||
./mad.sh:800: sed -i -e "s>^direct=${url/https:/http:}.*>#& #RETRY# ${message}>g" "${InputFile}" #direct url https
|
||||
./mad.sh:803: sed -i -e "s>^${url/https:/http:}.*>#&>g" "${InputFile}" #http (if changed)
|
||||
./mad.sh:805: sed -i -e "s>^direct=${url/https:/http:}.*>#&>g" "${InputFile}" #direct url https
|
||||
./mad.sh:827: sed -i -e "s>^${url/https:/http:}.*>#& #FAIL# $message>g" "${InputFile}" #http (if changed)
|
||||
./mad.sh:829: sed -i -e "s>^direct=${url/https:/http:}.*>#& #FAIL# $message>g" "${InputFile}" #direct url https
|
||||
./mad.sh:832: sed -i -e "s>^${url/https:/http:}.*>#&>g" "${InputFile}" #http (if changed)
|
||||
./mad.sh:834: sed -i -e "s>^direct=${url/https:/http:}.*>#&>g" "${InputFile}" #direct url https
|
||||
./mad.sh:858: sed -i -e "s>^${url/https:/http:}.*>#& #REMOVED#${message}>g" "${InputFile}" #http (if changed)
|
||||
./mad.sh:860: sed -i -e "s>^direct=${url/https:/http:}.*>#& #REMOVED#${message}>g" "${InputFile}" #direct url https
|
||||
./mad.sh:863: sed -i -e "s>^${url/https:/http:}.*>#&>g" "${InputFile}" #http (if changed)
|
||||
./mad.sh:865: sed -i -e "s>^direct=${url/https:/http:}.*>#&>g" "${InputFile}" #direct url https
|
||||
./mad.sh:891: sed -i -e "s>^${url/https:/http:}.*>${url}|${newfilename}>g" "${InputFile}" #http (if changed)
|
||||
./mad.sh:893: sed -i -e "s>^direct=${url/https:/http:}.*>direct=${url}|${newfilename}>g" "${InputFile}" #direct url https
|
||||
./mad.sh:913: sed -i -e "s%^${url/https:/http:}.*%${newurl//[[:space:]]/$'\\\n'}%g" "${InputFile}" #http (if changed)
|
||||
./mad.sh:934: sed -i -e "s>^${url/https:/http:}.*>#& #RETRY# ${filename} (dropped or bad size)>g" "${InputFile}" #http (if changed)
|
||||
./mad.sh:936: sed -i -e "s>^direct=${url/https:/http:}.*>#& #RETRY# ${filename} (dropped or bad size)>g" "${InputFile}" #direct url https
|
||||
./mad.sh:939: sed -i -e "s>^${url/https:/http:}.*>#&>g" "${InputFile}" #http (if changed)
|
||||
./mad.sh:941: sed -i -e "s>^direct=${url/https:/http:}.*>#&>g" "${InputFile}" #direct url https
|
||||
./mad.sh:957: sed -i -e "s>^${url/https:/http:}.*>#& #RETRY# (No CDN found)>g" "${InputFile}" #http (if changed)
|
||||
./mad.sh:959: sed -i -e "s>^direct=${url/https:/http:}.*>#& #RETRY# (No CDN found)>g" "${InputFile}" #direct url https
|
||||
./mad.sh:962: sed -i -e "s>^${url/https:/http:}.*>#&>g" "${InputFile}" #http (if changed)
|
||||
./mad.sh:964: sed -i -e "s>^direct=${url/https:/http:}.*>#&>g" "${InputFile}" #direct url https
|
||||
./mad.sh:983: sed -i -e "s>^${url/https:/http:}.*>#& #PASSWORD#>g" "${InputFile}" #http (if changed)
|
||||
./mad.sh:985: sed -i -e "s>^direct=${url/https:/http:}.*>#& #PASSWORD#>g" "${InputFile}" #direct url https
|
||||
./mad.sh:988: sed -i -e "s>^${url/https:/http:}.*>#&>g" "${InputFile}" #http (if changed)
|
||||
./mad.sh:990: sed -i -e "s>^direct=${url/https:/http:}.*>#&>g" "${InputFile}" #direct url https
|
||||
./mad.sh:1010: sed -i -e "s>^${url/https:/http:}.*>#& #BAD-URL#${message}>g" "${InputFile}" #http (if changed)
|
||||
./mad.sh:1012: sed -i -e "s>^direct=${url/https:/http:}.*>#& #BAD-URL#${message}>g" "${InputFile}" #direct url https
|
||||
./mad.sh:1015: sed -i -e "s>^${url/https:/http:}.*>#&>g" "${InputFile}" #http (if changed)
|
||||
./mad.sh:1017: sed -i -e "s>^direct=${url/https:/http:}.*>#&>g" "${InputFile}" #direct url https
|
||||
./mad.sh:1035: sed -i -e "s>^${url/https:/http:}.*>#& #RETRY# ${filename} (No filename or size. Try again later)>g" "${InputFile}" #http (if changed)
|
||||
./mad.sh:1037: sed -i -e "s>^direct=${url/https:/http:}.*>#& #RETRY# ${filename} (No filename or size. Try again later)>g" "${InputFile}" #direct url https
|
||||
./mad.sh:1040: sed -i -e "s>^${url/https:/http:}.*>#&>g" "${InputFile}" #http (if changed)
|
||||
./mad.sh:1042: sed -i -e "s>^direct=${url/https:/http:}.*>#&>g" "${InputFile}" #direct url https
|
||||
./mad.sh:1061: sed -i -e "s>^${url/https:/http:}.*>#& #RETRY# ${filename} (File exists and is too large--retry or delete from downloads)>g" "${InputFile}" #http (if changed)
|
||||
./mad.sh:1063: sed -i -e "s>^direct=${url/https:/http:}.*>#& #RETRY# ${filename} (File exists and is too large--retry or delete from downloads)>g" "${InputFile}" #direct url https
|
||||
./mad.sh:1066: sed -i -e "s>^${url/https:/http:}.*>#&>g" "${InputFile}" #http (if changed)
|
||||
./mad.sh:1068: sed -i -e "s>^direct=${url/https:/http:}.*>#&>g" "${InputFile}" #direct url https
|
||||
./mad.sh:1486: response=$(tor_curl_request --insecure -L -s https://github.com/lwthiker/curl-impersonate/releases/latest)
|
||||
./mad.sh:1503: download_url='https://github.com/lwthiker/curl-impersonate/releases/download/'"$latestTag"'/curl-impersonate-'"$latestTag"'.x86_64-linux-gnu.tar.gz'
|
||||
./mad.sh:1609: response=$(tor_curl_request --insecure -L -s https://github.com/lexiforest/curl-impersonate/releases/latest)
|
||||
./mad.sh:1626: download_url='https://github.com/lexiforest/curl-impersonate/releases/download/'"$latestTag"'/curl-impersonate-'"$latestTag"'.x86_64-linux-gnu.tar.gz'
|
||||
./mad.sh:1889: echo -e "${PINK}MAD Audit of http lines:${NC} (${GREEN}grep \"http:\" or \"https:\"${NC})"
|
||||
./mad.sh:1917: echo -e "${PINK}MAD Audit of http lines:${NC} (${GREEN}grep \"http:\" or \"https:\"${NC})"
|
||||
./mad.sh:1939: echo -e "${PINK}MAD Audit of http lines:${NC} (${GREEN}grep \"http:\" or \"https:\"${NC})"
|
||||
./mad.sh:3255: if grep -Eqi '.onion' <<< "$download_url" && grep -Eqi 'https://' <<< "$download_url" ; then
|
||||
./mad.sh:3770:arg2="$2" # auto, filelist, <https://url>
|
||||
./mad.sh:3867: echo -e " - http://oshi.at/abcd/origAABB.rar|My specified file.part1.rar"
|
||||
./mad.sh:3869: echo -e " - direct=http://pomf2.lain.la/f/abcd00zz.7z"
|
||||
./mad.sh:3871: echo -e ' - ie. direct=http://somehost.onion/abcD|filename.part1.rar'
|
||||
./mad.sh:4090: if [[ ${remote_url} =~ ^http: ]] ; then
|
||||
./mad.sh:4091: remote_url=${remote_url/http:/https:}
|
||||
./mad.sh:4112: if [[ ${remote_url} =~ ^http: ]] ; then
|
||||
./mad.sh:4113: remote_url=${remote_url/http:/https:}
|
||||
./mad.sh:4479: if [[ ${remote_url} =~ ^http: ]] ; then
|
||||
./mad.sh:4480: remote_url=${remote_url/http:/https:}
|
||||
./mad.sh:4538: if [[ ${remote_url} =~ ^http: ]] ; then
|
||||
./mad.sh:4539: remote_url=${remote_url/http:/https:}
|
||||
./mad.sh:4565: if [[ ${remote_url} =~ ^http: ]] ; then
|
||||
./mad.sh:4566: remote_url=${remote_url/http:/https:}
|
||||
./mad.sh:698: sed -i -e "s>^${url/https:/http:}.*>#& #OK# ${filename}>g" "${InputFile}" #http (if changed)
|
||||
./mad.sh:700: sed -i -e "s>^direct=${url/https:/http:}.*>#& #OK# ${filename}>g" "${InputFile}" #direct url https
|
||||
./mad.sh:703: sed -i -e "s>^${url/https:/http:}.*>#&>g" "${InputFile}" #http (if changed)
|
||||
./mad.sh:705: sed -i -e "s>^direct=${url/https:/http:}.*>#&>g" "${InputFile}" #direct url https
|
||||
./mad.sh:726: sed -i -e "s>^${url/https:/http:}.*>#& #OK# ${filename} (File exists)>g" "${InputFile}" #http (if changed)
|
||||
./mad.sh:728: sed -i -e "s>^direct=${url/https:/http:}.*>#& #OK# ${filename} (File exists)>g" "${InputFile}" #direct url https
|
||||
./mad.sh:731: sed -i -e "s>^${url/https:/http:}.*>#&>g" "${InputFile}" #http (if changed)
|
||||
./mad.sh:733: sed -i -e "s>^direct=${url/https:/http:}.*>#&>g" "${InputFile}" #direct url https
|
||||
./mad.sh:754: sed -i -e "s>^${url/https:/http:}.*>#& #RETRY# ${message}>g" "${InputFile}" #http (if changed)
|
||||
./mad.sh:756: sed -i -e "s>^direct=${url/https:/http:}.*>#& #RETRY# ${message}>g" "${InputFile}" #direct url https
|
||||
./mad.sh:759: sed -i -e "s>^${url/https:/http:}.*>#&>g" "${InputFile}" #http (if changed)
|
||||
./mad.sh:761: sed -i -e "s>^direct=${url/https:/http:}.*>#&>g" "${InputFile}" #direct url https
|
||||
./mad.sh:783: sed -i -e "s>^${url/https:/http:}.*>#& #FAIL# $message>g" "${InputFile}" #http (if changed)
|
||||
./mad.sh:785: sed -i -e "s>^direct=${url/https:/http:}.*>#& #FAIL# $message>g" "${InputFile}" #direct url https
|
||||
./mad.sh:788: sed -i -e "s>^${url/https:/http:}.*>#&>g" "${InputFile}" #http (if changed)
|
||||
./mad.sh:790: sed -i -e "s>^direct=${url/https:/http:}.*>#&>g" "${InputFile}" #direct url https
|
||||
./mad.sh:814: sed -i -e "s>^${url/https:/http:}.*>#& #REMOVED#${message}>g" "${InputFile}" #http (if changed)
|
||||
./mad.sh:816: sed -i -e "s>^direct=${url/https:/http:}.*>#& #REMOVED#${message}>g" "${InputFile}" #direct url https
|
||||
./mad.sh:819: sed -i -e "s>^${url/https:/http:}.*>#&>g" "${InputFile}" #http (if changed)
|
||||
./mad.sh:821: sed -i -e "s>^direct=${url/https:/http:}.*>#&>g" "${InputFile}" #direct url https
|
||||
./mad.sh:847: sed -i -e "s>^${url/https:/http:}.*>${url}|${newfilename}>g" "${InputFile}" #http (if changed)
|
||||
./mad.sh:849: sed -i -e "s>^direct=${url/https:/http:}.*>direct=${url}|${newfilename}>g" "${InputFile}" #direct url https
|
||||
./mad.sh:869: sed -i -e "s%^${url/https:/http:}.*%${newurl//[[:space:]]/$'\\\n'}%g" "${InputFile}" #http (if changed)
|
||||
./mad.sh:890: sed -i -e "s>^${url/https:/http:}.*>#& #RETRY# ${filename} (dropped or bad size)>g" "${InputFile}" #http (if changed)
|
||||
./mad.sh:892: sed -i -e "s>^direct=${url/https:/http:}.*>#& #RETRY# ${filename} (dropped or bad size)>g" "${InputFile}" #direct url https
|
||||
./mad.sh:895: sed -i -e "s>^${url/https:/http:}.*>#&>g" "${InputFile}" #http (if changed)
|
||||
./mad.sh:897: sed -i -e "s>^direct=${url/https:/http:}.*>#&>g" "${InputFile}" #direct url https
|
||||
./mad.sh:913: sed -i -e "s>^${url/https:/http:}.*>#& #RETRY# (No CDN found)>g" "${InputFile}" #http (if changed)
|
||||
./mad.sh:915: sed -i -e "s>^direct=${url/https:/http:}.*>#& #RETRY# (No CDN found)>g" "${InputFile}" #direct url https
|
||||
./mad.sh:918: sed -i -e "s>^${url/https:/http:}.*>#&>g" "${InputFile}" #http (if changed)
|
||||
./mad.sh:920: sed -i -e "s>^direct=${url/https:/http:}.*>#&>g" "${InputFile}" #direct url https
|
||||
./mad.sh:939: sed -i -e "s>^${url/https:/http:}.*>#& #PASSWORD#>g" "${InputFile}" #http (if changed)
|
||||
./mad.sh:941: sed -i -e "s>^direct=${url/https:/http:}.*>#& #PASSWORD#>g" "${InputFile}" #direct url https
|
||||
./mad.sh:944: sed -i -e "s>^${url/https:/http:}.*>#&>g" "${InputFile}" #http (if changed)
|
||||
./mad.sh:946: sed -i -e "s>^direct=${url/https:/http:}.*>#&>g" "${InputFile}" #direct url https
|
||||
./mad.sh:966: sed -i -e "s>^${url/https:/http:}.*>#& #BAD-URL#${message}>g" "${InputFile}" #http (if changed)
|
||||
./mad.sh:968: sed -i -e "s>^direct=${url/https:/http:}.*>#& #BAD-URL#${message}>g" "${InputFile}" #direct url https
|
||||
./mad.sh:971: sed -i -e "s>^${url/https:/http:}.*>#&>g" "${InputFile}" #http (if changed)
|
||||
./mad.sh:973: sed -i -e "s>^direct=${url/https:/http:}.*>#&>g" "${InputFile}" #direct url https
|
||||
./mad.sh:991: sed -i -e "s>^${url/https:/http:}.*>#& #RETRY# ${filename} (No filename or size. Try again later)>g" "${InputFile}" #http (if changed)
|
||||
./mad.sh:993: sed -i -e "s>^direct=${url/https:/http:}.*>#& #RETRY# ${filename} (No filename or size. Try again later)>g" "${InputFile}" #direct url https
|
||||
./mad.sh:996: sed -i -e "s>^${url/https:/http:}.*>#&>g" "${InputFile}" #http (if changed)
|
||||
./mad.sh:998: sed -i -e "s>^direct=${url/https:/http:}.*>#&>g" "${InputFile}" #direct url https
|
||||
./mad.sh:1017: sed -i -e "s>^${url/https:/http:}.*>#& #RETRY# ${filename} (File exists and is too large--retry or delete from downloads)>g" "${InputFile}" #http (if changed)
|
||||
./mad.sh:1019: sed -i -e "s>^direct=${url/https:/http:}.*>#& #RETRY# ${filename} (File exists and is too large--retry or delete from downloads)>g" "${InputFile}" #direct url https
|
||||
./mad.sh:1022: sed -i -e "s>^${url/https:/http:}.*>#&>g" "${InputFile}" #http (if changed)
|
||||
./mad.sh:1024: sed -i -e "s>^direct=${url/https:/http:}.*>#&>g" "${InputFile}" #direct url https
|
||||
./mad.sh:1442: response=$(tor_curl_request --insecure -L -s https://github.com/lwthiker/curl-impersonate/releases/latest)
|
||||
./mad.sh:1459: download_url='https://github.com/lwthiker/curl-impersonate/releases/download/'"$latestTag"'/curl-impersonate-'"$latestTag"'.x86_64-linux-gnu.tar.gz'
|
||||
./mad.sh:1565: response=$(tor_curl_request --insecure -L -s https://github.com/lexiforest/curl-impersonate/releases/latest)
|
||||
./mad.sh:1582: download_url='https://github.com/lexiforest/curl-impersonate/releases/download/'"$latestTag"'/curl-impersonate-'"$latestTag"'.x86_64-linux-gnu.tar.gz'
|
||||
./mad.sh:1845: echo -e "${PINK}MAD Audit of http lines:${NC} (${GREEN}grep \"http:\" or \"https:\"${NC})"
|
||||
./mad.sh:1873: echo -e "${PINK}MAD Audit of http lines:${NC} (${GREEN}grep \"http:\" or \"https:\"${NC})"
|
||||
./mad.sh:1895: echo -e "${PINK}MAD Audit of http lines:${NC} (${GREEN}grep \"http:\" or \"https:\"${NC})"
|
||||
./mad.sh:3211: if grep -Eqi '.onion' <<< "$download_url" && grep -Eqi 'https://' <<< "$download_url" ; then
|
||||
./mad.sh:3742:arg2="$2" # auto, filelist, <https://url>
|
||||
./mad.sh:3839: echo -e " - http://oshi.at/abcd/origAABB.rar|My specified file.part1.rar"
|
||||
./mad.sh:3841: echo -e " - direct=http://pomf2.lain.la/f/abcd00zz.7z"
|
||||
./mad.sh:3843: echo -e ' - ie. direct=http://somehost.onion/abcD|filename.part1.rar'
|
||||
./mad.sh:4062: if [[ ${remote_url} =~ ^http: ]] ; then
|
||||
./mad.sh:4063: remote_url=${remote_url/http:/https:}
|
||||
./mad.sh:4084: if [[ ${remote_url} =~ ^http: ]] ; then
|
||||
./mad.sh:4085: remote_url=${remote_url/http:/https:}
|
||||
./mad.sh:4451: if [[ ${remote_url} =~ ^http: ]] ; then
|
||||
./mad.sh:4452: remote_url=${remote_url/http:/https:}
|
||||
./mad.sh:4510: if [[ ${remote_url} =~ ^http: ]] ; then
|
||||
./mad.sh:4511: remote_url=${remote_url/http:/https:}
|
||||
./mad.sh:4537: if [[ ${remote_url} =~ ^http: ]] ; then
|
||||
./mad.sh:4538: remote_url=${remote_url/http:/https:}
|
||||
./plugins/pjscloud.sh:51: "https://PhantomJScloud.com/api/browser/v2/$RandomPjsKey/" & sleep 8s; kill -HUP $! 2>/dev/null)
|
||||
./plugins/pjscloud.sh:59: "https://PhantomJScloud.com/api/browser/v2/$RandomPjsKey/" & sleep 8s; kill -HUP $! 2>/dev/null)
|
||||
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
DateTime: 25.02.02
|
||||
DateTime: 25.02.11
|
||||
|
||||
Files:
|
||||
./hosts/1fichier.sh
|
||||
|
|
@ -62,6 +62,7 @@ Files:
|
|||
./hosts/quax.sh
|
||||
./hosts/ramsgaard.sh
|
||||
./hosts/ranoz.sh
|
||||
./hosts/sendnow.sh
|
||||
./hosts/shareonline.sh
|
||||
./hosts/skrepr.sh
|
||||
./hosts/soyjak.sh
|
||||
|
|
@ -114,6 +115,7 @@ Files:
|
|||
./hosts/up_isupload.sh
|
||||
./hosts/up_kouploader.sh
|
||||
./hosts/up_kraken.sh
|
||||
./hosts/up_lainsafe.sh
|
||||
./hosts/up_linxx.sh
|
||||
./hosts/up_moocloud.sh
|
||||
./hosts/up_nantes.sh
|
||||
|
|
@ -1298,31 +1300,31 @@ _________________________________________________________________________
|
|||
./hosts/fileblade.sh:485: -H "Sec-Fetch-Mode: navigate" \
|
||||
./hosts/fileblade.sh:486: -H "Sec-Fetch-Site: same-origin" \
|
||||
--
|
||||
./hosts/fileditch.sh:85: file_header=$(tor_curl_request --insecure --head -L -s "$download_url")
|
||||
./hosts/fileditch.sh:86: if [ "${DebugAllEnabled}" == "true" ] ; then
|
||||
./hosts/fileditch.sh:87: debugHtml "${remote_url##*/}" "fd_head$j" "download_url: ${download_url}"$'\n'"${file_header}"
|
||||
./hosts/fileditch.sh:88: fi
|
||||
./hosts/fileditch.sh:89: if [ ! -z "$file_header" ] ; then
|
||||
./hosts/fileditch.sh:90: if grep -Eqi '404 Not Found' <<< "${file_header}" ; then
|
||||
./hosts/fileditch.sh:91: echo -e "${RED}| The file has been removed (404).${NC}"
|
||||
./hosts/fileditch.sh:92: removedDownload "${remote_url}"
|
||||
./hosts/fileditch.sh:93: exitDownloadNotAvailable=true
|
||||
./hosts/fileditch.sh:94: return 1
|
||||
./hosts/fileditch.sh:95: fi
|
||||
./hosts/fileditch.sh:96: file_header=$(tor_curl_request --insecure --head -L -s "$download_url")
|
||||
./hosts/fileditch.sh:97: if [ "${DebugAllEnabled}" == "true" ] ; then
|
||||
./hosts/fileditch.sh:98: debugHtml "${remote_url##*/}" "fd_head$j" "download_url: ${download_url}"$'\n'"${file_header}"
|
||||
./hosts/fileditch.sh:99: fi
|
||||
./hosts/fileditch.sh:100: if [ ! -z "$file_header" ] ; then
|
||||
./hosts/fileditch.sh:101: if grep -Eqi '404 Not Found' <<< "${file_header}" ; then
|
||||
./hosts/fileditch.sh:102: echo -e "${RED}| The file has been removed (404).${NC}"
|
||||
./hosts/fileditch.sh:103: removedDownload "${remote_url}"
|
||||
./hosts/fileditch.sh:104: exitDownloadNotAvailable=true
|
||||
./hosts/fileditch.sh:105: return 1
|
||||
./hosts/fileditch.sh:106: fi
|
||||
--
|
||||
./hosts/fileditch.sh:176: tor_curl_request --insecure --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval "$download_url" --continue-at - --output "$file_path"
|
||||
./hosts/fileditch.sh:177: else
|
||||
./hosts/fileditch.sh:178: tor_curl_request --insecure "$download_url" --continue-at - --output "$file_path"
|
||||
./hosts/fileditch.sh:179: fi
|
||||
./hosts/fileditch.sh:180: received_file_size=0
|
||||
./hosts/fileditch.sh:181: if [ -f "$file_path" ] ; then
|
||||
./hosts/fileditch.sh:182: received_file_size=$(stat --format="%s" "$file_path" | tr -d '[:space:]')
|
||||
./hosts/fileditch.sh:183: fi
|
||||
./hosts/fileditch.sh:184: if CheckNoHtml "$remote_url" "$filename" "$file_path" ; then
|
||||
./hosts/fileditch.sh:185: containsHtml=false
|
||||
./hosts/fileditch.sh:186: else
|
||||
./hosts/fileditch.sh:187: containsHtml=true
|
||||
./hosts/fileditch.sh:188: fi
|
||||
./hosts/fileditch.sh:187: tor_curl_request --insecure --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval "$download_url" --continue-at - --output "$file_path"
|
||||
./hosts/fileditch.sh:188: else
|
||||
./hosts/fileditch.sh:189: tor_curl_request --insecure "$download_url" --continue-at - --output "$file_path"
|
||||
./hosts/fileditch.sh:190: fi
|
||||
./hosts/fileditch.sh:191: received_file_size=0
|
||||
./hosts/fileditch.sh:192: if [ -f "$file_path" ] ; then
|
||||
./hosts/fileditch.sh:193: received_file_size=$(stat --format="%s" "$file_path" | tr -d '[:space:]')
|
||||
./hosts/fileditch.sh:194: fi
|
||||
./hosts/fileditch.sh:195: if CheckNoHtml "$remote_url" "$filename" "$file_path" ; then
|
||||
./hosts/fileditch.sh:196: containsHtml=false
|
||||
./hosts/fileditch.sh:197: else
|
||||
./hosts/fileditch.sh:198: containsHtml=true
|
||||
./hosts/fileditch.sh:199: fi
|
||||
--
|
||||
./hosts/filedot.sh:112: PAGE=$(tor_curl_request --insecure --max-time "$ConnectTimeout" -L -s \
|
||||
./hosts/filedot.sh:113: -H "User-Agent: $RandomUA" \
|
||||
|
|
@ -1953,50 +1955,134 @@ _________________________________________________________________________
|
|||
./hosts/ranoz.sh:99: if [ "${finalAttempt}" == "true" ] ; then
|
||||
./hosts/ranoz.sh:100: failedRetryDownload "${remote_url}" "Failed to extract download url [1]" ""
|
||||
--
|
||||
./hosts/ranoz.sh:158: file_header=$(tor_curl_request --insecure --head -L -i -s "$download_url")
|
||||
./hosts/ranoz.sh:159: if [ "${DebugAllEnabled}" == "true" ] ; then
|
||||
./hosts/ranoz.sh:160: debugHtml "${remote_url##*/}" "rz_head$j" "download_url: ${download_url}"$'\n'"${file_header}"
|
||||
./hosts/ranoz.sh:161: fi
|
||||
./hosts/ranoz.sh:162: if [[ -z $file_header ]] ; then
|
||||
./hosts/ranoz.sh:163: if [ $j == $maxfetchretries ] ; then
|
||||
./hosts/ranoz.sh:164: rm -f "${rz_cookie_jar}";
|
||||
./hosts/ranoz.sh:165: printf "\\n"
|
||||
./hosts/ranoz.sh:166: echo -e "${RED}| Failed to extract file info${NC}"
|
||||
./hosts/ranoz.sh:167: warnAndRetryUnknownError=true
|
||||
./hosts/ranoz.sh:168: if [ "${finalAttempt}" == "true" ] ; then
|
||||
./hosts/ranoz.sh:160: file_header=$(tor_curl_request --insecure --head -L -i -s "$download_url")
|
||||
./hosts/ranoz.sh:161: if [ "${DebugAllEnabled}" == "true" ] ; then
|
||||
./hosts/ranoz.sh:162: debugHtml "${remote_url##*/}" "rz_head$j" "download_url: ${download_url}"$'\n'"${file_header}"
|
||||
./hosts/ranoz.sh:163: fi
|
||||
./hosts/ranoz.sh:164: if [[ -z $file_header ]] ; then
|
||||
./hosts/ranoz.sh:165: if [ $j == $maxfetchretries ] ; then
|
||||
./hosts/ranoz.sh:166: rm -f "${rz_cookie_jar}";
|
||||
./hosts/ranoz.sh:167: printf "\\n"
|
||||
./hosts/ranoz.sh:168: echo -e "${RED}| Failed to extract file info${NC}"
|
||||
./hosts/ranoz.sh:169: warnAndRetryUnknownError=true
|
||||
./hosts/ranoz.sh:170: if [ "${finalAttempt}" == "true" ] ; then
|
||||
--
|
||||
./hosts/ranoz.sh:270: tor_curl_request --insecure -L -G --no-alpn \
|
||||
./hosts/ranoz.sh:271: --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval \
|
||||
./hosts/ranoz.sh:272: "$download_url" --continue-at - --output "$file_path"
|
||||
./hosts/ranoz.sh:273: else
|
||||
./hosts/ranoz.sh:274: tor_curl_request --insecure -L -G --no-alpn \
|
||||
./hosts/ranoz.sh:275: "$download_url" --continue-at - --output "$file_path"
|
||||
./hosts/ranoz.sh:276: fi
|
||||
./hosts/ranoz.sh:277: else
|
||||
./hosts/ranoz.sh:278: if [ "${RateMonitorEnabled}" == "true" ]; then
|
||||
./hosts/ranoz.sh:279: tor_curl_request --insecure -L -G --no-alpn \
|
||||
./hosts/ranoz.sh:280: --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval \
|
||||
./hosts/ranoz.sh:281: -H "User-Agent: $RandomUA" \
|
||||
./hosts/ranoz.sh:282: -H "Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8" \
|
||||
./hosts/ranoz.sh:283: -H "Accept-Language: en-US,en;q=0.5" \
|
||||
./hosts/ranoz.sh:284: -H "Accept-Encoding: gzip, deflate, br" \
|
||||
./hosts/ranoz.sh:285: -H "Connection: keep-alive" \
|
||||
./hosts/ranoz.sh:286: -H "Cookie: lng=eng" \
|
||||
./hosts/ranoz.sh:287: -H "Upgrade-Insecure-Requests: 1" \
|
||||
./hosts/ranoz.sh:288: -H "Sec-Fetch-Dest: document" \
|
||||
./hosts/ranoz.sh:289: -H "Sec-Fetch-Mode: navigate" \
|
||||
./hosts/ranoz.sh:272: tor_curl_request --insecure -L -G --no-alpn \
|
||||
./hosts/ranoz.sh:273: --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval \
|
||||
./hosts/ranoz.sh:274: "$download_url" --continue-at - --output "$file_path"
|
||||
./hosts/ranoz.sh:275: else
|
||||
./hosts/ranoz.sh:276: tor_curl_request --insecure -L -G --no-alpn \
|
||||
./hosts/ranoz.sh:277: "$download_url" --continue-at - --output "$file_path"
|
||||
./hosts/ranoz.sh:278: fi
|
||||
./hosts/ranoz.sh:279: else
|
||||
./hosts/ranoz.sh:280: if [ "${RateMonitorEnabled}" == "true" ]; then
|
||||
./hosts/ranoz.sh:281: tor_curl_request --insecure -L -G --no-alpn \
|
||||
./hosts/ranoz.sh:282: --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval \
|
||||
./hosts/ranoz.sh:283: -H "User-Agent: $RandomUA" \
|
||||
./hosts/ranoz.sh:284: -H "Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8" \
|
||||
./hosts/ranoz.sh:285: -H "Accept-Language: en-US,en;q=0.5" \
|
||||
./hosts/ranoz.sh:286: -H "Accept-Encoding: gzip, deflate, br" \
|
||||
./hosts/ranoz.sh:287: -H "Connection: keep-alive" \
|
||||
./hosts/ranoz.sh:288: -H "Cookie: lng=eng" \
|
||||
./hosts/ranoz.sh:289: -H "Upgrade-Insecure-Requests: 1" \
|
||||
./hosts/ranoz.sh:290: -H "Sec-Fetch-Dest: document" \
|
||||
./hosts/ranoz.sh:291: -H "Sec-Fetch-Mode: navigate" \
|
||||
--
|
||||
./hosts/ranoz.sh:294: tor_curl_request --insecure -L -G --no-alpn \
|
||||
./hosts/ranoz.sh:295: -H "User-Agent: $RandomUA" \
|
||||
./hosts/ranoz.sh:296: -H "Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8" \
|
||||
./hosts/ranoz.sh:297: -H "Accept-Language: en-US,en;q=0.5" \
|
||||
./hosts/ranoz.sh:298: -H "Accept-Encoding: gzip, deflate, br" \
|
||||
./hosts/ranoz.sh:299: -H "Connection: keep-alive" \
|
||||
./hosts/ranoz.sh:300: -H "Cookie: lng=eng" \
|
||||
./hosts/ranoz.sh:301: -H "Upgrade-Insecure-Requests: 1" \
|
||||
./hosts/ranoz.sh:302: -H "Sec-Fetch-Dest: document" \
|
||||
./hosts/ranoz.sh:303: -H "Sec-Fetch-Mode: navigate" \
|
||||
./hosts/ranoz.sh:304: -H "Sec-Fetch-Site: same-origin" \
|
||||
./hosts/ranoz.sh:296: tor_curl_request --insecure -L -G --no-alpn \
|
||||
./hosts/ranoz.sh:297: -H "User-Agent: $RandomUA" \
|
||||
./hosts/ranoz.sh:298: -H "Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8" \
|
||||
./hosts/ranoz.sh:299: -H "Accept-Language: en-US,en;q=0.5" \
|
||||
./hosts/ranoz.sh:300: -H "Accept-Encoding: gzip, deflate, br" \
|
||||
./hosts/ranoz.sh:301: -H "Connection: keep-alive" \
|
||||
./hosts/ranoz.sh:302: -H "Cookie: lng=eng" \
|
||||
./hosts/ranoz.sh:303: -H "Upgrade-Insecure-Requests: 1" \
|
||||
./hosts/ranoz.sh:304: -H "Sec-Fetch-Dest: document" \
|
||||
./hosts/ranoz.sh:305: -H "Sec-Fetch-Mode: navigate" \
|
||||
./hosts/ranoz.sh:306: -H "Sec-Fetch-Site: same-origin" \
|
||||
--
|
||||
./hosts/sendnow.sh:90: response=$(tor_curl_request --insecure -L -s -b "${snow_cookie_jar}" -c "${snow_cookie_jar}" "$remote_url")
|
||||
./hosts/sendnow.sh:91: if [ "${DebugAllEnabled}" == "true" ] ; then
|
||||
./hosts/sendnow.sh:92: debugHtml "${remote_url##*/}" "snow_dwnpage$i" "${response}"
|
||||
./hosts/sendnow.sh:93: fi
|
||||
./hosts/sendnow.sh:94: if [[ -z $response ]] ; then
|
||||
./hosts/sendnow.sh:95: rm -f "${snow_cookie_jar}";
|
||||
./hosts/sendnow.sh:96: if [ $i == $maxfetchretries ] ; then
|
||||
./hosts/sendnow.sh:97: printf "\\n"
|
||||
./hosts/sendnow.sh:98: echo -e "${RED}| Failed to extract download link.${NC}"
|
||||
./hosts/sendnow.sh:99: warnAndRetryUnknownError=true
|
||||
./hosts/sendnow.sh:100: if [ "${finalAttempt}" == "true" ] ; then
|
||||
--
|
||||
./hosts/sendnow.sh:158: response=$(tor_curl_request --insecure -L -svo. -X POST \
|
||||
./hosts/sendnow.sh:159: -b "${snow_cookie_jar}" -c "${snow_cookie_jar}" \
|
||||
./hosts/sendnow.sh:160: --data-raw "$form_data" "$remote_url" 2>&1)
|
||||
./hosts/sendnow.sh:161: if [ "${DebugAllEnabled}" == "true" ] ; then
|
||||
./hosts/sendnow.sh:162: debugHtml "${remote_url##*/}" "snow_post" "post_action: ${post_action}"$'\n'"form_data: ${form_data}"$'\n'"${response}"
|
||||
./hosts/sendnow.sh:163: fi
|
||||
./hosts/sendnow.sh:164: if [[ -z $response ]] ; then
|
||||
./hosts/sendnow.sh:165: echo -e "${RED}| Failed to extract download link [2]${NC}"
|
||||
./hosts/sendnow.sh:166: warnAndRetryUnknownError=true
|
||||
./hosts/sendnow.sh:167: if [ "${finalAttempt}" == "true" ] ; then
|
||||
./hosts/sendnow.sh:168: rm -f "${snow_cookie_jar}";
|
||||
--
|
||||
./hosts/sendnow.sh:200: file_header=$(tor_curl_request --insecure --head -Lis \
|
||||
./hosts/sendnow.sh:201: -H "Host: $fshost" \
|
||||
./hosts/sendnow.sh:202: -H "Accept: text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8" \
|
||||
./hosts/sendnow.sh:203: -H "Accept-Language: en-US,en;q=0.5" \
|
||||
./hosts/sendnow.sh:204: -H "Accept-Encoding: gzip, deflate, br, zstd" \
|
||||
./hosts/sendnow.sh:205: -H "Referer: https://send.now/" \
|
||||
./hosts/sendnow.sh:206: -H "Sec-GPC: 1" \
|
||||
./hosts/sendnow.sh:207: -H "Connection: keep-alive" \
|
||||
./hosts/sendnow.sh:208: -H "Upgrade-Insecure-Requests: 1" \
|
||||
./hosts/sendnow.sh:209: -H "Sec-Fetch-Dest: document" \
|
||||
./hosts/sendnow.sh:210: -H "Sec-Fetch-Mode: navigate" \
|
||||
--
|
||||
./hosts/sendnow.sh:323: tor_curl_request --insecure -L --no-alpn \
|
||||
./hosts/sendnow.sh:324: --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval \
|
||||
./hosts/sendnow.sh:325: -H "Host: $fshost" \
|
||||
./hosts/sendnow.sh:326: -H "Accept: text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8" \
|
||||
./hosts/sendnow.sh:327: -H "Accept-Language: en-US,en;q=0.5" \
|
||||
./hosts/sendnow.sh:328: -H "Accept-Encoding: gzip, deflate, br, zstd" \
|
||||
./hosts/sendnow.sh:329: -H "Referer: https://send.now/" \
|
||||
./hosts/sendnow.sh:330: -H "Sec-GPC: 1" \
|
||||
./hosts/sendnow.sh:331: -H "Connection: keep-alive" \
|
||||
./hosts/sendnow.sh:332: -H "Upgrade-Insecure-Requests: 1" \
|
||||
./hosts/sendnow.sh:333: -H "Sec-Fetch-Dest: document" \
|
||||
--
|
||||
./hosts/sendnow.sh:341: tor_curl_request --insecure -L --no-alpn \
|
||||
./hosts/sendnow.sh:342: -H "Host: $fshost" \
|
||||
./hosts/sendnow.sh:343: -H "Accept: text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8" \
|
||||
./hosts/sendnow.sh:344: -H "Accept-Language: en-US,en;q=0.5" \
|
||||
./hosts/sendnow.sh:345: -H "Accept-Encoding: gzip, deflate, br, zstd" \
|
||||
./hosts/sendnow.sh:346: -H "Referer: https://send.now/" \
|
||||
./hosts/sendnow.sh:347: -H "Sec-GPC: 1" \
|
||||
./hosts/sendnow.sh:348: -H "Connection: keep-alive" \
|
||||
./hosts/sendnow.sh:349: -H "Upgrade-Insecure-Requests: 1" \
|
||||
./hosts/sendnow.sh:350: -H "Sec-Fetch-Dest: document" \
|
||||
./hosts/sendnow.sh:351: -H "Sec-Fetch-Mode: navigate" \
|
||||
--
|
||||
./hosts/sendnow.sh:360: tor_curl_request --insecure -L --no-alpn \
|
||||
./hosts/sendnow.sh:361: --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval \
|
||||
./hosts/sendnow.sh:362: -H "User-Agent: $RandomUA" \
|
||||
./hosts/sendnow.sh:363: -H "Host: $fshost" \
|
||||
./hosts/sendnow.sh:364: -H "Accept: text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8" \
|
||||
./hosts/sendnow.sh:365: -H "Accept-Language: en-US,en;q=0.5" \
|
||||
./hosts/sendnow.sh:366: -H "Accept-Encoding: gzip, deflate, br, zstd" \
|
||||
./hosts/sendnow.sh:367: -H "Referer: https://send.now/" \
|
||||
./hosts/sendnow.sh:368: -H "Sec-GPC: 1" \
|
||||
./hosts/sendnow.sh:369: -H "Connection: keep-alive" \
|
||||
./hosts/sendnow.sh:370: -H "Upgrade-Insecure-Requests: 1" \
|
||||
--
|
||||
./hosts/sendnow.sh:379: tor_curl_request --insecure -L --no-alpn \
|
||||
./hosts/sendnow.sh:380: -H "User-Agent: $RandomUA" \
|
||||
./hosts/sendnow.sh:381: -H "Host: $fshost" \
|
||||
./hosts/sendnow.sh:382: -H "Accept: text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8" \
|
||||
./hosts/sendnow.sh:383: -H "Accept-Language: en-US,en;q=0.5" \
|
||||
./hosts/sendnow.sh:384: -H "Accept-Encoding: gzip, deflate, br, zstd" \
|
||||
./hosts/sendnow.sh:385: -H "Referer: https://send.now/" \
|
||||
./hosts/sendnow.sh:386: -H "Sec-GPC: 1" \
|
||||
./hosts/sendnow.sh:387: -H "Connection: keep-alive" \
|
||||
./hosts/sendnow.sh:388: -H "Upgrade-Insecure-Requests: 1" \
|
||||
./hosts/sendnow.sh:389: -H "Sec-Fetch-Dest: document" \
|
||||
--
|
||||
./hosts/syspro.sh:88: response=$(tor_curl_request --insecure -L -s "$remote_url")
|
||||
./hosts/syspro.sh:89: if [ "${DebugAllEnabled}" == "true" ] ; then
|
||||
|
|
@ -2848,6 +2934,18 @@ _________________________________________________________________________
|
|||
./hosts/up_kraken.sh:124: hash=$(grep -oPi '(?<="hash":").*?(?=")' <<< "$response")
|
||||
./hosts/up_kraken.sh:125: filesize=$(GetFileSize "$filepath" "false")
|
||||
--
|
||||
./hosts/up_lainsafe.sh:111: response=$(tor_curl_upload --insecure -i \
|
||||
./hosts/up_lainsafe.sh:112: -H "Content-Type: multipart/form-data" \
|
||||
./hosts/up_lainsafe.sh:113: -F "files[]=@${arrFiles[@]}" \
|
||||
./hosts/up_lainsafe.sh:114: "${PostUrlHost}")
|
||||
./hosts/up_lainsafe.sh:115: if [ "${DebugAllEnabled}" == "true" ] ; then
|
||||
./hosts/up_lainsafe.sh:116: debugHtml "${filepath##*/}" "${_hostCode}_upload" "post_url: ${PostUrlHost}"$'\n'"${response}"
|
||||
./hosts/up_lainsafe.sh:117: fi
|
||||
./hosts/up_lainsafe.sh:118: if [[ -z $response ]] || grep -Eqi 'HTTP/2 403|403 Forbidden' <<< "${response}" ; then
|
||||
./hosts/up_lainsafe.sh:119: if [ "${finalAttempt}" == "true" ] ; then
|
||||
./hosts/up_lainsafe.sh:120: err=$(grep -oPi '(?<=HTTP/).*?(?=$)' <<< "$response")
|
||||
./hosts/up_lainsafe.sh:121: printf "\\n"
|
||||
--
|
||||
./hosts/up_nippy.sh:125: response=$(tor_curl_upload --insecure -i \
|
||||
./hosts/up_nippy.sh:126: -H "Content-Type: multipart/form-data" \
|
||||
./hosts/up_nippy.sh:127: -F "file[]=@${arrFiles[@]}" \
|
||||
|
|
@ -3112,17 +3210,17 @@ _________________________________________________________________________
|
|||
./hosts/up_uploadraja.sh:111: -F "file_0=@${filepath}" \
|
||||
./hosts/up_uploadraja.sh:112: "${PostUrlHost}")
|
||||
--
|
||||
./hosts/up_uwabaki.sh:101: response=$(tor_curl_upload --insecure -i -L \
|
||||
./hosts/up_uwabaki.sh:102: -H "Content-Type: multipart/form-data" \
|
||||
./hosts/up_uwabaki.sh:103: -F "files[]=@${arrFiles[@]}" \
|
||||
./hosts/up_uwabaki.sh:104: "${PostUrlHost}")
|
||||
./hosts/up_uwabaki.sh:105: if [ "${DebugAllEnabled}" == "true" ] ; then
|
||||
./hosts/up_uwabaki.sh:106: debugHtml "${filepath##*/}" "${_hostCode}_upload" "post_url: ${PostUrlHost}"$'\n'"${response}"
|
||||
./hosts/up_uwabaki.sh:107: fi
|
||||
./hosts/up_uwabaki.sh:108: if grep -Eqi 'File uploaded: <a href="https://files.uwabaki.party/' <<< "${response}" ; then
|
||||
./hosts/up_uwabaki.sh:109: url=$(grep -oPi '(?<=File uploaded: <a href=").*?(?=">.*$)' <<< "$response")
|
||||
./hosts/up_uwabaki.sh:110: filesize=$(GetFileSize "$filepath" "false")
|
||||
./hosts/up_uwabaki.sh:111: downloadLink="${url}"
|
||||
./hosts/up_uwabaki.sh:102: response=$(tor_curl_upload --insecure -i -L \
|
||||
./hosts/up_uwabaki.sh:103: -H "Content-Type: multipart/form-data" \
|
||||
./hosts/up_uwabaki.sh:104: -F "formatted=true" \
|
||||
./hosts/up_uwabaki.sh:105: -F "encryption=off" \
|
||||
./hosts/up_uwabaki.sh:106: -F "files[]=@$filepath" \
|
||||
./hosts/up_uwabaki.sh:107: "${PostUrlHost}")
|
||||
./hosts/up_uwabaki.sh:108: if [ "${DebugAllEnabled}" == "true" ] ; then
|
||||
./hosts/up_uwabaki.sh:109: debugHtml "${filepath##*/}" "${_hostCode}_upload" "post_url: ${PostUrlHost}"$'\n'"${response}"
|
||||
./hosts/up_uwabaki.sh:110: fi
|
||||
./hosts/up_uwabaki.sh:111: if grep -Eqi 'File uploaded: <a href="https://files.uwabaki.party/' <<< "${response}" ; then
|
||||
./hosts/up_uwabaki.sh:112: url=$(grep -oPi '(?<=File uploaded: <a href=").*?(?=">.*$)' <<< "$response")
|
||||
--
|
||||
./hosts/up_yolobit.sh:102: response=$(tor_curl_upload --insecure -i \
|
||||
./hosts/up_yolobit.sh:103: -H "Content-Type: multipart/form-data" \
|
||||
|
|
@ -3186,235 +3284,235 @@ _________________________________________________________________________
|
|||
./hosts/youdbox.sh:287: containsHtml=true
|
||||
./hosts/youdbox.sh:288: fi
|
||||
--
|
||||
./mad.sh:419:tor_curl_request() {
|
||||
./mad.sh:420: if [ "${UseTorCurlImpersonate}" == "true" ]; then
|
||||
./mad.sh:421: "${curl_impersonate[@]}" --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" -4 --connect-timeout ${ConnectTimeout} --compressed --globoff "$@"
|
||||
./mad.sh:422: else
|
||||
./mad.sh:423: curl --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" -4 --connect-timeout ${ConnectTimeout} --compressed --globoff "$@"
|
||||
./mad.sh:424: fi
|
||||
./mad.sh:425:}
|
||||
./mad.sh:426:tor_curl_request_extended() {
|
||||
./mad.sh:427: randomtimeout=$((30 + RANDOM % (60 - 30)))
|
||||
./mad.sh:428: if [ "${UseTorCurlImpersonate}" == "true" ]; then
|
||||
./mad.sh:429: "${curl_impersonate[@]}" --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" -4 --connect-timeout $randomtimeout --compressed --globoff "$@"
|
||||
./mad.sh:430: else
|
||||
./mad.sh:431: curl --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" -4 --connect-timeout $randomtimeout --compressed --globoff "$@"
|
||||
./mad.sh:432: fi
|
||||
./mad.sh:433:}
|
||||
./mad.sh:434:tor_curl_upload() {
|
||||
./mad.sh:435: if [ "${UseTorCurlImpersonate}" == "true" ]; then
|
||||
./mad.sh:436: if [ "${RateMonitorEnabled}" == "true" ]; then
|
||||
./mad.sh:437: "${curl_impersonate[@]}" --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" -4 --connect-timeout ${ConnectTimeout} --speed-limit $UploadSpeedMin --speed-time $UploadTimeoutInterval --compressed --globoff "$@"
|
||||
./mad.sh:438: else
|
||||
./mad.sh:439: "${curl_impersonate[@]}" --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" -4 --connect-timeout ${ConnectTimeout} --compressed --globoff "$@"
|
||||
./mad.sh:440: fi
|
||||
./mad.sh:441: else
|
||||
./mad.sh:442: if [ "${RateMonitorEnabled}" == "true" ]; then
|
||||
./mad.sh:443: curl --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" -4 --connect-timeout ${ConnectTimeout} --speed-limit $UploadSpeedMin --speed-time $UploadTimeoutInterval -H 'User-Agent: Mozilla/5.0 (Windows NT 10.0; rv:109.0) Gecko/20100101 Firefox/115.0' -H 'Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8' -H 'Accept-Language: en-US,en;q=0.5' -H 'Accept-Encoding: gzip, deflate, br' -H 'Origin: null' -H 'Connection: keep-alive' -H 'Upgrade-Insecure-Requests: 1' -H 'Sec-Fetch-Dest: document' -H 'Sec-Fetch-Mode: navigate' -H 'Sec-Fetch-Site: cross-site' -H 'Sec-Fetch-User: ?1' -H 'TE: trailers' --compressed --globoff "$@"
|
||||
./mad.sh:444: else
|
||||
./mad.sh:375:tor_curl_request() {
|
||||
./mad.sh:376: if [ "${UseTorCurlImpersonate}" == "true" ]; then
|
||||
./mad.sh:377: "${curl_impersonate[@]}" --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" -4 --connect-timeout ${ConnectTimeout} --compressed --globoff "$@"
|
||||
./mad.sh:378: else
|
||||
./mad.sh:379: curl --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" -4 --connect-timeout ${ConnectTimeout} --compressed --globoff "$@"
|
||||
./mad.sh:380: fi
|
||||
./mad.sh:381:}
|
||||
./mad.sh:382:tor_curl_request_extended() {
|
||||
./mad.sh:383: randomtimeout=$((30 + RANDOM % (60 - 30)))
|
||||
./mad.sh:384: if [ "${UseTorCurlImpersonate}" == "true" ]; then
|
||||
./mad.sh:385: "${curl_impersonate[@]}" --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" -4 --connect-timeout $randomtimeout --compressed --globoff "$@"
|
||||
./mad.sh:386: else
|
||||
./mad.sh:387: curl --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" -4 --connect-timeout $randomtimeout --compressed --globoff "$@"
|
||||
./mad.sh:388: fi
|
||||
./mad.sh:389:}
|
||||
./mad.sh:390:tor_curl_upload() {
|
||||
./mad.sh:391: if [ "${UseTorCurlImpersonate}" == "true" ]; then
|
||||
./mad.sh:392: if [ "${RateMonitorEnabled}" == "true" ]; then
|
||||
./mad.sh:393: "${curl_impersonate[@]}" --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" -4 --connect-timeout ${ConnectTimeoutUpload} --speed-limit $UploadSpeedMin --speed-time $UploadTimeoutInterval --compressed --globoff "$@"
|
||||
./mad.sh:394: else
|
||||
./mad.sh:395: "${curl_impersonate[@]}" --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" -4 --connect-timeout ${ConnectTimeoutUpload} --compressed --globoff "$@"
|
||||
./mad.sh:396: fi
|
||||
./mad.sh:397: else
|
||||
./mad.sh:398: if [ "${RateMonitorEnabled}" == "true" ]; then
|
||||
./mad.sh:399: curl --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" -4 --connect-timeout ${ConnectTimeoutUpload} --speed-limit $UploadSpeedMin --speed-time $UploadTimeoutInterval -H 'User-Agent: Mozilla/5.0 (Windows NT 10.0; rv:109.0) Gecko/20100101 Firefox/115.0' -H 'Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8' -H 'Accept-Language: en-US,en;q=0.5' -H 'Accept-Encoding: gzip, deflate, br' -H 'Origin: null' -H 'Connection: keep-alive' -H 'Upgrade-Insecure-Requests: 1' -H 'Sec-Fetch-Dest: document' -H 'Sec-Fetch-Mode: navigate' -H 'Sec-Fetch-Site: cross-site' -H 'Sec-Fetch-User: ?1' -H 'TE: trailers' --compressed --globoff "$@"
|
||||
./mad.sh:400: else
|
||||
--
|
||||
./mad.sh:1486: response=$(tor_curl_request --insecure -L -s https://github.com/lwthiker/curl-impersonate/releases/latest)
|
||||
./mad.sh:1487: if [ "${DebugAllEnabled}" == "true" ] ; then
|
||||
./mad.sh:1488: debugHtml "github" "lbf_inst_curlimp$j" "$response"
|
||||
./mad.sh:1489: fi
|
||||
./mad.sh:1490: if [ ! -z "$response" ]; then
|
||||
./mad.sh:1491: latestTag=$(grep -oPi -m 1 '(?<=/curl-impersonate/releases/tag/).*?(?=")' <<< "$response")
|
||||
./mad.sh:1492: latestBinaryDate=$(grep -oPi -m 1 '(?<=<relative-time class="no-wrap" prefix="" datetime=").*?(?=T)' <<< "$response")
|
||||
./mad.sh:1493: break
|
||||
./mad.sh:1494: fi
|
||||
./mad.sh:1495: done
|
||||
./mad.sh:1496: if [ -z $latestTag ]; then
|
||||
./mad.sh:1442: response=$(tor_curl_request --insecure -L -s https://github.com/lwthiker/curl-impersonate/releases/latest)
|
||||
./mad.sh:1443: if [ "${DebugAllEnabled}" == "true" ] ; then
|
||||
./mad.sh:1444: debugHtml "github" "lbf_inst_curlimp$j" "$response"
|
||||
./mad.sh:1445: fi
|
||||
./mad.sh:1446: if [ ! -z "$response" ]; then
|
||||
./mad.sh:1447: latestTag=$(grep -oPi -m 1 '(?<=/curl-impersonate/releases/tag/).*?(?=")' <<< "$response")
|
||||
./mad.sh:1448: latestBinaryDate=$(grep -oPi -m 1 '(?<=<relative-time class="no-wrap" prefix="" datetime=").*?(?=T)' <<< "$response")
|
||||
./mad.sh:1449: break
|
||||
./mad.sh:1450: fi
|
||||
./mad.sh:1451: done
|
||||
./mad.sh:1452: if [ -z $latestTag ]; then
|
||||
--
|
||||
./mad.sh:1506: file_header=$(tor_curl_request --insecure --head -Ls "$download_url")
|
||||
./mad.sh:1507: if [ "${DebugAllEnabled}" == "true" ] ; then
|
||||
./mad.sh:1508: debugHtml "github" "head_inst_curlimp$j" "${file_header}"
|
||||
./mad.sh:1509: fi
|
||||
./mad.sh:1510: if ! grep -Eqi 'HTTP/2 200|HTTP/1.1 200|200 OK' <<< $file_header ; then
|
||||
./mad.sh:1511: if ((j == 8)) ; then
|
||||
./mad.sh:1512: return 1
|
||||
./mad.sh:1513: else
|
||||
./mad.sh:1514: continue
|
||||
./mad.sh:1515: fi
|
||||
./mad.sh:1516: fi
|
||||
./mad.sh:1462: file_header=$(tor_curl_request --insecure --head -Ls "$download_url")
|
||||
./mad.sh:1463: if [ "${DebugAllEnabled}" == "true" ] ; then
|
||||
./mad.sh:1464: debugHtml "github" "head_inst_curlimp$j" "${file_header}"
|
||||
./mad.sh:1465: fi
|
||||
./mad.sh:1466: if ! grep -Eqi 'HTTP/2 200|HTTP/1.1 200|200 OK' <<< $file_header ; then
|
||||
./mad.sh:1467: if ((j == 8)) ; then
|
||||
./mad.sh:1468: return 1
|
||||
./mad.sh:1469: else
|
||||
./mad.sh:1470: continue
|
||||
./mad.sh:1471: fi
|
||||
./mad.sh:1472: fi
|
||||
--
|
||||
./mad.sh:1556: tor_curl_request --insecure -L "$download_url" --continue-at - --output "$file_path"
|
||||
./mad.sh:1557: received_file_size=0
|
||||
./mad.sh:1558: if [ -f "$file_path" ] ; then
|
||||
./mad.sh:1559: received_file_size=$(stat --format="%s" "$file_path" | tr -d '[:space:]')
|
||||
./mad.sh:1560: fi
|
||||
./mad.sh:1561: if ((received_file_size == file_size_bytes)) ; then
|
||||
./mad.sh:1562: break
|
||||
./mad.sh:1563: elif ((received_file_size < file_size_bytes)) ; then
|
||||
./mad.sh:1564: if ((j >= MaxDownloadRetries)) ; then
|
||||
./mad.sh:1565: echo -e "${RED}| FAILED: Size mismatch after downloading${NC}"
|
||||
./mad.sh:1566: exit 1
|
||||
./mad.sh:1512: tor_curl_request --insecure -L "$download_url" --continue-at - --output "$file_path"
|
||||
./mad.sh:1513: received_file_size=0
|
||||
./mad.sh:1514: if [ -f "$file_path" ] ; then
|
||||
./mad.sh:1515: received_file_size=$(stat --format="%s" "$file_path" | tr -d '[:space:]')
|
||||
./mad.sh:1516: fi
|
||||
./mad.sh:1517: if ((received_file_size == file_size_bytes)) ; then
|
||||
./mad.sh:1518: break
|
||||
./mad.sh:1519: elif ((received_file_size < file_size_bytes)) ; then
|
||||
./mad.sh:1520: if ((j >= MaxDownloadRetries)) ; then
|
||||
./mad.sh:1521: echo -e "${RED}| FAILED: Size mismatch after downloading${NC}"
|
||||
./mad.sh:1522: exit 1
|
||||
--
|
||||
./mad.sh:1609: response=$(tor_curl_request --insecure -L -s https://github.com/lexiforest/curl-impersonate/releases/latest)
|
||||
./mad.sh:1610: if [ "${DebugAllEnabled}" == "true" ] ; then
|
||||
./mad.sh:1611: debugHtml "github" "lbf_inst_curlimp$j" "$response"
|
||||
./mad.sh:1612: fi
|
||||
./mad.sh:1613: if [ ! -z "$response" ]; then
|
||||
./mad.sh:1614: latestTag=$(grep -oPi -m 1 '(?<=/curl-impersonate/releases/tag/).*?(?=")' <<< "$response")
|
||||
./mad.sh:1615: latestBinaryDate=$(grep -oPi -m 1 '(?<=<relative-time class="no-wrap" prefix="" datetime=").*?(?=T)' <<< "$response")
|
||||
./mad.sh:1616: break
|
||||
./mad.sh:1617: fi
|
||||
./mad.sh:1618: done
|
||||
./mad.sh:1619: if [ -z $latestTag ]; then
|
||||
./mad.sh:1565: response=$(tor_curl_request --insecure -L -s https://github.com/lexiforest/curl-impersonate/releases/latest)
|
||||
./mad.sh:1566: if [ "${DebugAllEnabled}" == "true" ] ; then
|
||||
./mad.sh:1567: debugHtml "github" "lbf_inst_curlimp$j" "$response"
|
||||
./mad.sh:1568: fi
|
||||
./mad.sh:1569: if [ ! -z "$response" ]; then
|
||||
./mad.sh:1570: latestTag=$(grep -oPi -m 1 '(?<=/curl-impersonate/releases/tag/).*?(?=")' <<< "$response")
|
||||
./mad.sh:1571: latestBinaryDate=$(grep -oPi -m 1 '(?<=<relative-time class="no-wrap" prefix="" datetime=").*?(?=T)' <<< "$response")
|
||||
./mad.sh:1572: break
|
||||
./mad.sh:1573: fi
|
||||
./mad.sh:1574: done
|
||||
./mad.sh:1575: if [ -z $latestTag ]; then
|
||||
--
|
||||
./mad.sh:1629: file_header=$(tor_curl_request --insecure --head -Ls "$download_url")
|
||||
./mad.sh:1630: if [ "${DebugAllEnabled}" == "true" ] ; then
|
||||
./mad.sh:1631: debugHtml "github" "head_inst_curlimp$j" "${file_header}"
|
||||
./mad.sh:1632: fi
|
||||
./mad.sh:1633: if ! grep -Eqi 'HTTP/2 200|HTTP/1.1 200|200 OK' <<< $file_header ; then
|
||||
./mad.sh:1634: if ((j == 8)) ; then
|
||||
./mad.sh:1635: return 1
|
||||
./mad.sh:1636: else
|
||||
./mad.sh:1637: continue
|
||||
./mad.sh:1638: fi
|
||||
./mad.sh:1639: fi
|
||||
./mad.sh:1585: file_header=$(tor_curl_request --insecure --head -Ls "$download_url")
|
||||
./mad.sh:1586: if [ "${DebugAllEnabled}" == "true" ] ; then
|
||||
./mad.sh:1587: debugHtml "github" "head_inst_curlimp$j" "${file_header}"
|
||||
./mad.sh:1588: fi
|
||||
./mad.sh:1589: if ! grep -Eqi 'HTTP/2 200|HTTP/1.1 200|200 OK' <<< $file_header ; then
|
||||
./mad.sh:1590: if ((j == 8)) ; then
|
||||
./mad.sh:1591: return 1
|
||||
./mad.sh:1592: else
|
||||
./mad.sh:1593: continue
|
||||
./mad.sh:1594: fi
|
||||
./mad.sh:1595: fi
|
||||
--
|
||||
./mad.sh:1679: tor_curl_request --insecure -L "$download_url" --continue-at - --output "$file_path"
|
||||
./mad.sh:1680: received_file_size=0
|
||||
./mad.sh:1681: if [ -f "$file_path" ] ; then
|
||||
./mad.sh:1682: received_file_size=$(stat --format="%s" "$file_path" | tr -d '[:space:]')
|
||||
./mad.sh:1683: fi
|
||||
./mad.sh:1684: if ((received_file_size == file_size_bytes)) ; then
|
||||
./mad.sh:1685: break
|
||||
./mad.sh:1686: elif ((received_file_size < file_size_bytes)) ; then
|
||||
./mad.sh:1687: if ((j >= MaxDownloadRetries)) ; then
|
||||
./mad.sh:1688: echo -e "${RED}| FAILED: Size mismatch after downloading${NC}"
|
||||
./mad.sh:1689: exit 1
|
||||
./mad.sh:1635: tor_curl_request --insecure -L "$download_url" --continue-at - --output "$file_path"
|
||||
./mad.sh:1636: received_file_size=0
|
||||
./mad.sh:1637: if [ -f "$file_path" ] ; then
|
||||
./mad.sh:1638: received_file_size=$(stat --format="%s" "$file_path" | tr -d '[:space:]')
|
||||
./mad.sh:1639: fi
|
||||
./mad.sh:1640: if ((received_file_size == file_size_bytes)) ; then
|
||||
./mad.sh:1641: break
|
||||
./mad.sh:1642: elif ((received_file_size < file_size_bytes)) ; then
|
||||
./mad.sh:1643: if ((j >= MaxDownloadRetries)) ; then
|
||||
./mad.sh:1644: echo -e "${RED}| FAILED: Size mismatch after downloading${NC}"
|
||||
./mad.sh:1645: exit 1
|
||||
--
|
||||
./mad.sh:1884: maud_torcurl=$(grep -n -vxE '[[:blank:]]*([#].*)?' $fil | grep -A 12 --color='always' -Ei 'tor_curl')
|
||||
./mad.sh:1885: echo -e "Files:"
|
||||
./mad.sh:1886: echo -e "${BLUE}${fil}${NC}"
|
||||
./mad.sh:1887: echo -e ""
|
||||
./mad.sh:1888: echo -e ""
|
||||
./mad.sh:1889: echo -e "${PINK}MAD Audit of http lines:${NC} (${GREEN}grep \"http:\" or \"https:\"${NC})"
|
||||
./mad.sh:1890: echo -e "_________________________________________________________________________"
|
||||
./mad.sh:1891: echo -e "$maud_http"
|
||||
./mad.sh:1892: echo -e ""
|
||||
./mad.sh:1893: echo -e "${PINK}MAD Audit of curl:${NC} (${GREEN}grep \"curl\"${NC})"
|
||||
./mad.sh:1894: echo -e "_________________________________________________________________________"
|
||||
./mad.sh:1840: maud_torcurl=$(grep -n -vxE '[[:blank:]]*([#].*)?' $fil | grep -A 12 --color='always' -Ei 'tor_curl')
|
||||
./mad.sh:1841: echo -e "Files:"
|
||||
./mad.sh:1842: echo -e "${BLUE}${fil}${NC}"
|
||||
./mad.sh:1843: echo -e ""
|
||||
./mad.sh:1844: echo -e ""
|
||||
./mad.sh:1845: echo -e "${PINK}MAD Audit of http lines:${NC} (${GREEN}grep \"http:\" or \"https:\"${NC})"
|
||||
./mad.sh:1846: echo -e "_________________________________________________________________________"
|
||||
./mad.sh:1847: echo -e "$maud_http"
|
||||
./mad.sh:1848: echo -e ""
|
||||
./mad.sh:1849: echo -e "${PINK}MAD Audit of curl:${NC} (${GREEN}grep \"curl\"${NC})"
|
||||
./mad.sh:1850: echo -e "_________________________________________________________________________"
|
||||
--
|
||||
./mad.sh:1897: echo -e "${PINK}MAD Audit of tor_curl (+12 lines after):${NC} (${GREEN}grep \"tor_curl\"${NC})"
|
||||
./mad.sh:1898: echo -e "_________________________________________________________________________"
|
||||
./mad.sh:1899: echo -e "$maud_torcurl"
|
||||
./mad.sh:1900: echo -e ""
|
||||
./mad.sh:1901: echo -e ""
|
||||
./mad.sh:1902: done
|
||||
./mad.sh:1903: else
|
||||
./mad.sh:1904: cd "$ScriptDir"
|
||||
./mad.sh:1905: readarray -d $'' arrFiles < <(find . -name "*.sh" -printf '%p\n' | sort -Vk1)
|
||||
./mad.sh:1906: cd "$WorkDir"
|
||||
./mad.sh:1907: readarray -d $'' arrFiles2 < <(find . -name "*.sh" -printf '%p\n' | sort -Vk1)
|
||||
./mad.sh:1853: echo -e "${PINK}MAD Audit of tor_curl (+12 lines after):${NC} (${GREEN}grep \"tor_curl\"${NC})"
|
||||
./mad.sh:1854: echo -e "_________________________________________________________________________"
|
||||
./mad.sh:1855: echo -e "$maud_torcurl"
|
||||
./mad.sh:1856: echo -e ""
|
||||
./mad.sh:1857: echo -e ""
|
||||
./mad.sh:1858: done
|
||||
./mad.sh:1859: else
|
||||
./mad.sh:1860: cd "$ScriptDir"
|
||||
./mad.sh:1861: readarray -d $'' arrFiles < <(find . -name "*.sh" -printf '%p\n' | sort -Vk1)
|
||||
./mad.sh:1862: cd "$WorkDir"
|
||||
./mad.sh:1863: readarray -d $'' arrFiles2 < <(find . -name "*.sh" -printf '%p\n' | sort -Vk1)
|
||||
--
|
||||
./mad.sh:1912: maud_torcurl=$(grep -n -vxE '[[:blank:]]*([#].*)?' $fil | grep -A 12 --color='always' -Ei 'tor_curl')
|
||||
./mad.sh:1913: echo -e "Files:"
|
||||
./mad.sh:1914: echo -e "${BLUE}${fil}${NC}"
|
||||
./mad.sh:1915: echo -e ""
|
||||
./mad.sh:1916: echo -e ""
|
||||
./mad.sh:1917: echo -e "${PINK}MAD Audit of http lines:${NC} (${GREEN}grep \"http:\" or \"https:\"${NC})"
|
||||
./mad.sh:1918: echo -e "_________________________________________________________________________"
|
||||
./mad.sh:1919: echo -e "$maud_http"
|
||||
./mad.sh:1920: echo -e ""
|
||||
./mad.sh:1921: echo -e "${PINK}MAD Audit of curl:${NC} (${GREEN}grep \"curl \"${NC})"
|
||||
./mad.sh:1922: echo -e "_________________________________________________________________________"
|
||||
./mad.sh:1868: maud_torcurl=$(grep -n -vxE '[[:blank:]]*([#].*)?' $fil | grep -A 12 --color='always' -Ei 'tor_curl')
|
||||
./mad.sh:1869: echo -e "Files:"
|
||||
./mad.sh:1870: echo -e "${BLUE}${fil}${NC}"
|
||||
./mad.sh:1871: echo -e ""
|
||||
./mad.sh:1872: echo -e ""
|
||||
./mad.sh:1873: echo -e "${PINK}MAD Audit of http lines:${NC} (${GREEN}grep \"http:\" or \"https:\"${NC})"
|
||||
./mad.sh:1874: echo -e "_________________________________________________________________________"
|
||||
./mad.sh:1875: echo -e "$maud_http"
|
||||
./mad.sh:1876: echo -e ""
|
||||
./mad.sh:1877: echo -e "${PINK}MAD Audit of curl:${NC} (${GREEN}grep \"curl \"${NC})"
|
||||
./mad.sh:1878: echo -e "_________________________________________________________________________"
|
||||
--
|
||||
./mad.sh:1925: echo -e "${PINK}MAD Audit of tor_curl (+12 lines after):${NC} (${GREEN}grep \"tor_curl\"${NC})"
|
||||
./mad.sh:1926: echo -e "_________________________________________________________________________"
|
||||
./mad.sh:1927: echo -e "$maud_torcurl"
|
||||
./mad.sh:1928: echo -e ""
|
||||
./mad.sh:1929: done
|
||||
./mad.sh:1930: for fil in "${arrFiles2[@]}";
|
||||
./mad.sh:1931: do
|
||||
./mad.sh:1932: maud_http=$(grep -n -vxE '[[:blank:]]*([#].*)?' $fil | grep --color='always' -Ei '(http|https):')
|
||||
./mad.sh:1933: maud_curl=$(grep -n -vxE '[[:blank:]]*([#].*)?' $fil | grep --color='always' -Ei 'curl')
|
||||
./mad.sh:1934: maud_torcurl=$(grep -n -vxE '[[:blank:]]*([#].*)?' $fil | grep -A 12 --color='always' -Ei 'tor_curl')
|
||||
./mad.sh:1935: echo -e "Files:"
|
||||
./mad.sh:1936: echo -e "${BLUE}${fil}${NC}"
|
||||
./mad.sh:1937: echo -e ""
|
||||
./mad.sh:1938: echo -e ""
|
||||
./mad.sh:1939: echo -e "${PINK}MAD Audit of http lines:${NC} (${GREEN}grep \"http:\" or \"https:\"${NC})"
|
||||
./mad.sh:1940: echo -e "_________________________________________________________________________"
|
||||
./mad.sh:1941: echo -e "$maud_http"
|
||||
./mad.sh:1942: echo -e ""
|
||||
./mad.sh:1943: echo -e "${PINK}MAD Audit of curl:${NC} (${GREEN}grep \"curl\"${NC})"
|
||||
./mad.sh:1944: echo -e "_________________________________________________________________________"
|
||||
./mad.sh:1881: echo -e "${PINK}MAD Audit of tor_curl (+12 lines after):${NC} (${GREEN}grep \"tor_curl\"${NC})"
|
||||
./mad.sh:1882: echo -e "_________________________________________________________________________"
|
||||
./mad.sh:1883: echo -e "$maud_torcurl"
|
||||
./mad.sh:1884: echo -e ""
|
||||
./mad.sh:1885: done
|
||||
./mad.sh:1886: for fil in "${arrFiles2[@]}";
|
||||
./mad.sh:1887: do
|
||||
./mad.sh:1888: maud_http=$(grep -n -vxE '[[:blank:]]*([#].*)?' $fil | grep --color='always' -Ei '(http|https):')
|
||||
./mad.sh:1889: maud_curl=$(grep -n -vxE '[[:blank:]]*([#].*)?' $fil | grep --color='always' -Ei 'curl')
|
||||
./mad.sh:1890: maud_torcurl=$(grep -n -vxE '[[:blank:]]*([#].*)?' $fil | grep -A 12 --color='always' -Ei 'tor_curl')
|
||||
./mad.sh:1891: echo -e "Files:"
|
||||
./mad.sh:1892: echo -e "${BLUE}${fil}${NC}"
|
||||
./mad.sh:1893: echo -e ""
|
||||
./mad.sh:1894: echo -e ""
|
||||
./mad.sh:1895: echo -e "${PINK}MAD Audit of http lines:${NC} (${GREEN}grep \"http:\" or \"https:\"${NC})"
|
||||
./mad.sh:1896: echo -e "_________________________________________________________________________"
|
||||
./mad.sh:1897: echo -e "$maud_http"
|
||||
./mad.sh:1898: echo -e ""
|
||||
./mad.sh:1899: echo -e "${PINK}MAD Audit of curl:${NC} (${GREEN}grep \"curl\"${NC})"
|
||||
./mad.sh:1900: echo -e "_________________________________________________________________________"
|
||||
--
|
||||
./mad.sh:1947: echo -e "${PINK}MAD Audit of tor_curl (+12 lines after):${NC} (${GREEN}grep \"tor_curl\"${NC})"
|
||||
./mad.sh:1948: echo -e "_________________________________________________________________________"
|
||||
./mad.sh:1949: echo -e "$maud_torcurl"
|
||||
./mad.sh:1950: echo -e ""
|
||||
./mad.sh:1951: done
|
||||
./mad.sh:1952: fi
|
||||
./mad.sh:1953:}
|
||||
./mad.sh:1954:madStatus() {
|
||||
./mad.sh:1955: local InputFile="$1"
|
||||
./mad.sh:1956: if [ "$arg1" == "status" ] ; then
|
||||
./mad.sh:1957: clear
|
||||
./mad.sh:1903: echo -e "${PINK}MAD Audit of tor_curl (+12 lines after):${NC} (${GREEN}grep \"tor_curl\"${NC})"
|
||||
./mad.sh:1904: echo -e "_________________________________________________________________________"
|
||||
./mad.sh:1905: echo -e "$maud_torcurl"
|
||||
./mad.sh:1906: echo -e ""
|
||||
./mad.sh:1907: done
|
||||
./mad.sh:1908: fi
|
||||
./mad.sh:1909:}
|
||||
./mad.sh:1910:madStatus() {
|
||||
./mad.sh:1911: local InputFile="$1"
|
||||
./mad.sh:1912: if [ "$arg1" == "status" ] ; then
|
||||
./mad.sh:1913: clear
|
||||
--
|
||||
./mad.sh:3272: file_header=$(tor_curl_request --insecure -m 8 -s -D - -o /dev/null \
|
||||
./mad.sh:3273: -H "Connection: keep-alive" \
|
||||
./mad.sh:3274: -w 'EffectiveUrl=%{url_effective}' \
|
||||
./mad.sh:3275: "$download_url")
|
||||
./mad.sh:3276: else
|
||||
./mad.sh:3277: printf "| Retrieving Head: attempt #$j"
|
||||
./mad.sh:3278: rm -f "${WorkDir}/.temp/directhead"
|
||||
./mad.sh:3279: file_header=$(tor_curl_request --insecure --head -H "Connection: keep-alive" -L -s -i "$download_url" |
|
||||
./mad.sh:3280: tee "${WorkDir}/.temp/directhead" &
|
||||
./mad.sh:3281: sleep 6
|
||||
./mad.sh:3282: [ -s "${WorkDir}/.temp/directhead" ]
|
||||
./mad.sh:3283: kill $! 2>/dev/null
|
||||
./mad.sh:3284: )
|
||||
./mad.sh:3285: if [ ! -f "${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}" ]; then
|
||||
./mad.sh:3286: touch "${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}"
|
||||
./mad.sh:3287: fi
|
||||
./mad.sh:3288: rm -f "${WorkDir}/.temp/directhead"
|
||||
./mad.sh:3289: fi
|
||||
./mad.sh:3228: file_header=$(tor_curl_request --insecure -m 8 -s -D - -o /dev/null \
|
||||
./mad.sh:3229: -H "Connection: keep-alive" \
|
||||
./mad.sh:3230: -w 'EffectiveUrl=%{url_effective}' \
|
||||
./mad.sh:3231: "$download_url")
|
||||
./mad.sh:3232: else
|
||||
./mad.sh:3233: printf "| Retrieving Head: attempt #$j"
|
||||
./mad.sh:3234: rm -f "${WorkDir}/.temp/directhead"
|
||||
./mad.sh:3235: file_header=$(tor_curl_request --insecure --head -H "Connection: keep-alive" -L -s -i "$download_url" |
|
||||
./mad.sh:3236: tee "${WorkDir}/.temp/directhead" &
|
||||
./mad.sh:3237: sleep 6
|
||||
./mad.sh:3238: [ -s "${WorkDir}/.temp/directhead" ]
|
||||
./mad.sh:3239: kill $! 2>/dev/null
|
||||
./mad.sh:3240: )
|
||||
./mad.sh:3241: if [ ! -f "${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}" ]; then
|
||||
./mad.sh:3242: touch "${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}"
|
||||
./mad.sh:3243: fi
|
||||
./mad.sh:3244: rm -f "${WorkDir}/.temp/directhead"
|
||||
./mad.sh:3245: fi
|
||||
--
|
||||
./mad.sh:3409: tor_curl_request --insecure -L --referer "$file_url" "$download_url" --output "$file_path"
|
||||
./mad.sh:3410: rc=$?
|
||||
./mad.sh:3411: if [ $rc -ne 0 ] ; then
|
||||
./mad.sh:3412: printf "${RED}Download Failed (bad exit status).${NC}"
|
||||
./mad.sh:3413: if [ -f ${file_path} ]; then
|
||||
./mad.sh:3414: printf "${YELLOW} Partial removed...${NC}"
|
||||
./mad.sh:3415: printf "\n\n"
|
||||
./mad.sh:3416: rm -f "${file_path}"
|
||||
./mad.sh:3417: else
|
||||
./mad.sh:3418: printf "\n\n"
|
||||
./mad.sh:3419: fi
|
||||
./mad.sh:3372: tor_curl_request --insecure -L --referer "$file_url" "$download_url" --output "$file_path"
|
||||
./mad.sh:3373: rc=$?
|
||||
./mad.sh:3374: if [ $rc -ne 0 ] ; then
|
||||
./mad.sh:3375: printf "${RED}Download Failed (bad exit status).${NC}"
|
||||
./mad.sh:3376: if [ -f ${file_path} ]; then
|
||||
./mad.sh:3377: printf "${YELLOW} Partial removed...${NC}"
|
||||
./mad.sh:3378: printf "\n\n"
|
||||
./mad.sh:3379: rm -f "${file_path}"
|
||||
./mad.sh:3380: else
|
||||
./mad.sh:3381: printf "\n\n"
|
||||
./mad.sh:3382: fi
|
||||
--
|
||||
./mad.sh:3453: tor_curl_request --insecure -L --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval "$download_url" --continue-at - --output "$file_path"
|
||||
./mad.sh:3454: else
|
||||
./mad.sh:3455: tor_curl_request --insecure -L --referer "$file_url" "$download_url" --continue-at - --output "$file_path"
|
||||
./mad.sh:3456: fi
|
||||
./mad.sh:3457: received_file_size=0
|
||||
./mad.sh:3458: if [ -f "$file_path" ] ; then
|
||||
./mad.sh:3459: received_file_size=$(stat --format="%s" "$file_path" | tr -d '[:space:]')
|
||||
./mad.sh:3460: fi
|
||||
./mad.sh:3461: if CheckNoHtml "$remote_url" "$filename" "$file_path" ; then
|
||||
./mad.sh:3462: containsHtml=false
|
||||
./mad.sh:3463: else
|
||||
./mad.sh:3464: containsHtml=true
|
||||
./mad.sh:3465: fi
|
||||
./mad.sh:3425: tor_curl_request --insecure -L --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval "$download_url" --continue-at - --output "$file_path"
|
||||
./mad.sh:3426: else
|
||||
./mad.sh:3427: tor_curl_request --insecure -L --referer "$file_url" "$download_url" --continue-at - --output "$file_path"
|
||||
./mad.sh:3428: fi
|
||||
./mad.sh:3429: received_file_size=0
|
||||
./mad.sh:3430: if [ -f "$file_path" ] ; then
|
||||
./mad.sh:3431: received_file_size=$(stat --format="%s" "$file_path" | tr -d '[:space:]')
|
||||
./mad.sh:3432: fi
|
||||
./mad.sh:3433: if CheckNoHtml "$remote_url" "$filename" "$file_path" ; then
|
||||
./mad.sh:3434: containsHtml=false
|
||||
./mad.sh:3435: else
|
||||
./mad.sh:3436: containsHtml=true
|
||||
./mad.sh:3437: fi
|
||||
--
|
||||
./mad.sh:3653: response=$(tor_curl_upload --insecure -i \
|
||||
./mad.sh:3654: -H "Content-Type: multipart/form-data" \
|
||||
./mad.sh:3655: -F "key=" \
|
||||
./mad.sh:3656: -F "time=$jira_timeval" \
|
||||
./mad.sh:3657: -F "file=@${filepath}" \
|
||||
./mad.sh:3658: "${jira_PostUrlHost}")
|
||||
./mad.sh:3659: else
|
||||
./mad.sh:3660: response=$(tor_curl_upload --insecure -i \
|
||||
./mad.sh:3661: -H "Content-Type: multipart/form-data" \
|
||||
./mad.sh:3662: -F "key=" \
|
||||
./mad.sh:3663: -F "time=$jira_timeval" \
|
||||
./mad.sh:3664: -F "files[]=@${arrFiles[@]}" \
|
||||
./mad.sh:3665: "${jira_PostUrlHost}")
|
||||
./mad.sh:3666: fi
|
||||
./mad.sh:3667: if [ "${DebugAllEnabled}" == "true" ] ; then
|
||||
./mad.sh:3668: debugHtml "${filepath##*/}" "${_hostCode}_upload" "post_url: ${jira_PostUrlHost}"$'\n'"${response}"
|
||||
./mad.sh:3669: fi
|
||||
./mad.sh:3670: if grep -Eqi ' 200 ' <<< "${response}" ; then
|
||||
./mad.sh:3625: response=$(tor_curl_upload --insecure -i \
|
||||
./mad.sh:3626: -H "Content-Type: multipart/form-data" \
|
||||
./mad.sh:3627: -F "key=" \
|
||||
./mad.sh:3628: -F "time=$jira_timeval" \
|
||||
./mad.sh:3629: -F "file=@${filepath}" \
|
||||
./mad.sh:3630: "${jira_PostUrlHost}")
|
||||
./mad.sh:3631: else
|
||||
./mad.sh:3632: response=$(tor_curl_upload --insecure -i \
|
||||
./mad.sh:3633: -H "Content-Type: multipart/form-data" \
|
||||
./mad.sh:3634: -F "key=" \
|
||||
./mad.sh:3635: -F "time=$jira_timeval" \
|
||||
./mad.sh:3636: -F "files[]=@${arrFiles[@]}" \
|
||||
./mad.sh:3637: "${jira_PostUrlHost}")
|
||||
./mad.sh:3638: fi
|
||||
./mad.sh:3639: if [ "${DebugAllEnabled}" == "true" ] ; then
|
||||
./mad.sh:3640: debugHtml "${filepath##*/}" "${_hostCode}_upload" "post_url: ${jira_PostUrlHost}"$'\n'"${response}"
|
||||
./mad.sh:3641: fi
|
||||
./mad.sh:3642: if grep -Eqi ' 200 ' <<< "${response}" ; then
|
||||
|
||||
|
|
|
|||
|
|
@ -3,6 +3,60 @@
|
|||
|
||||
#
|
||||
# ---------- Initial release with MAD Uploader functionality ----------
|
||||
# 2025.01.11 - [mad] Update direct head response handling
|
||||
# 2025.01.11 - [ranoz] Add 404 Not found handling on head
|
||||
# 2025.01.09 - [ranoz] Add handling of "NEXT_NOT_FOUND" response
|
||||
# 2025.01.09 - [fileblade] Fix cdn url parsing
|
||||
# 2025.01.08 - [up_pixeldrain] Fix success response from pixeldrain
|
||||
# 2025.01.08 - [ramsgaard / up_ramsgaard] Add data.ramsgaard.me as upload / download host
|
||||
# 2025.01.08 - [euromussels / up_euromussels] Add uploads.euromussels.eu as upload / download host
|
||||
# 2025.01.07 - [up_fileland] Add fileland.io as upload host
|
||||
# 2025.01.07 - [up_fireget] Add fireget.com as upload host
|
||||
# 2025.01.06 - [uploadhive] Update the removed / gone response detection
|
||||
# 2025.01.06 - [fileblade] Add "user does not allow free downloads over 100MB" response (and warnings)
|
||||
# 2025.01.06 - [desiupload] Add desiupload as download host
|
||||
# 2025.01.05 - [isupload] Fix filename detection
|
||||
# 2025.01.03 - [gofile] Detect "Bulk download is a Premium feature" response (no children)
|
||||
# 2025.01.02 - [up_axfc] Update PUT response check to handle kanji chars (remove)
|
||||
# 2025.01.02 - [dashfile] Add response 'This file reached max downloads limit'. New cookie on captcha fail
|
||||
# 2024.12.28 - [dashfile] Update captcha code check
|
||||
# 2024.12.28 - [anonfile] Add new download link href response
|
||||
# 2024.12.28 - [fileblade] Add additional response handling (subsequent downloads, unknown warnings)
|
||||
# 2024.12.28 - [eternalhosting] Update eternal.onion to handle landing page (eternal.onion/file/)
|
||||
# 2024.12.26 - [up_kouploader / up_axfc / up_torup] Fixed failedRetryUpload (was using download logging)
|
||||
# * Thanks Belky
|
||||
# 2024.12.26 - [anonfile / up_anonfile] Add anonfile.de as upload / download host
|
||||
# 2024.12.25 - [dashfile / up_dashfile] Add dashfile.net as upload / download host
|
||||
# 2024.12.25 - [isupload] Change to use tor_curl_request_extended (server response is often slow)
|
||||
# - Accept 200 OK response to continue (do not require filename / filesize)
|
||||
# 2024.12.25 - [fileblade] Add response handling for free file download dissallowed > 100MB.
|
||||
# "The file owner does not allow FREE users to download files which are over 100 MB"
|
||||
# 2024.12.25 - [mad] Add "UploadHiveRandomizeExt" option to config with default=true
|
||||
# 2024.12.25 - [uploadhive / up_uploadhive] Update renaming random ext files to their original names
|
||||
# - Handle multipart 7z & rar (abc.7z.###, abc.part#.rar)
|
||||
# 2024.12.25 - [syspro / up_syspro] Add share.syspro.com.br as upload / download host
|
||||
# 2024.12.24 - [mad] Add EnabledUploadHosts / EnabledDownloadHosts setting to fine-tune which hosts to use
|
||||
# ** Options: recommended, online, all (Default=recommended)
|
||||
# - recommended: loads all hosts verified working with MAD
|
||||
# - online: loads all hosts available online / working (includes captcha / js restricted)
|
||||
# - all: loads all hosts in hosts folder
|
||||
# 2024.12.24 - [up_ateasystems / ateasystems] Add share.ateasystems.com as upload / download host
|
||||
# 2024.12.23 - [up_uploadbay / uploadbay] Add uploadbay.net as upload / download host
|
||||
# 2024.12.23 - [up_herbolistique / herbolistique] Add transfert.herbolistique.com as upload / download host
|
||||
# 2024.12.23 - [uploadhive] Auto-rename random extension downloads
|
||||
# 2024.12.23 - [up_uploadhive] Change upload file extension to random 3 letters (uhive blocks .7z, .zip, .rar now)
|
||||
# 2024.12.23 - [up_offshorecat] Fixed upload. Updated apikey.
|
||||
# 2024.12.23 - [up_fileditch] Fixed upload. Added response handling for Tor Blocked node (retries)
|
||||
# 2024.12.23 - [up_freesocial / freesocial] Add files.freesocial.co as upload / download host
|
||||
# 2024.12.23 - [up_cyssoux / cyssoux] Add partage.cyssoux.fr as upload / download host
|
||||
# 2024.12.22 - [mad] Add jira_Upload function -- used for all jirafeau hosts
|
||||
# 2024.12.22 - [up_*AllJiraHosts*] Consolidated / moved all hosts upload functions to mad.sh
|
||||
# - Minimized jira host code (~6000 lines of duplicates removed)
|
||||
# - Jira hosts: acid, anarchaserver, depotkaz, dictvm, eddowding, familleflender, filesquid,
|
||||
# free4e, harrault, linxx, moocloud, nantes, netlib, skrepr, soyjak
|
||||
# 2024.12.20 - [fileblade / up_fileblade] Add fileblade.com as upload / download host
|
||||
# 2024.12.20 - [isupload / up_isupload] Add isupload.com as upload / download host
|
||||
# 2024.12.15 - [mediafire] Add mediafire download link processing
|
||||
# 2024.12.12 - [dictvm / up_dictvm] Add dictvm.org as upload / download host
|
||||
# 2024.12.12 - [eddowding / up_eddowding] Add eddowding.com as upload / download host
|
||||
# 2024.12.12 - [up_pixeldrain] Modify upload to use PUT
|
||||
|
|
|
|||
|
|
@ -25,6 +25,7 @@ Max Size . HostCode . Nickname . Notes
|
|||
2GB sysp syspro.com.br ?? expiry
|
||||
* 2GB axfc axfc.net 90d+ inactive expiry
|
||||
1GB uwab uwabaki.party no expiry, no DMCA, no logs
|
||||
1GB lain pomf2.lain.la ??
|
||||
- 1GB kraken krakenfiles.com 90d inactive expiry
|
||||
1GB ansh anonsharing.com 6mo expiry
|
||||
512MB anon anonfile.de ?? expiry
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
#! Name: ranoz.sh
|
||||
#! Author: kittykat
|
||||
#! Version: 2025.02.02
|
||||
#! Version: 2025.02.11
|
||||
#! Desc: Add support for downloading and processing of urls for a new host
|
||||
#! Usage: Copy this file into the ./${ScriptDir}/hosts/ folder
|
||||
#!
|
||||
|
|
@ -104,7 +104,7 @@ rz_FetchFileInfo() {
|
|||
continue
|
||||
fi
|
||||
fi
|
||||
if grep -Eqi "There is no such file|File was deleted because" <<< "$response"; then
|
||||
if grep -Eqi "There is no such file|UNAVAILABLE_FOR_LEGAL_REASONS|File was deleted because" <<< "$response"; then
|
||||
printf "\\n"
|
||||
echo -e "${RED}| The file was not found. It could be deleted or expired.${NC}"
|
||||
exitDownloadError=true
|
||||
|
|
@ -119,7 +119,7 @@ rz_FetchFileInfo() {
|
|||
return 1
|
||||
fi
|
||||
if [ "$filename_override" == "" ] ; then
|
||||
filename=$(grep -oP '(?<=<div class\="page_box_category__.....">Name</div><div class\="page_box_value__.....">).*?(?=</div>.*$)' <<< "$file_header")
|
||||
filename=$(grep -oP '(?<=\[\\"\$\\",\\"title\\",\\"2\\",\{\\"children\\":\\").*?(?=\\"\}\],.*$)' <<< "$response")
|
||||
fi
|
||||
if grep -Eqi '\\"props\\":\{\}\},\\"href\\":\\"' <<< "$response"; then
|
||||
printf "\\n"
|
||||
|
|
@ -132,7 +132,9 @@ rz_FetchFileInfo() {
|
|||
url_token=${url_postfix##*\?}
|
||||
url_enc_fname=$(urlencode_literal_grouped_case ${url_fname})
|
||||
download_url="${url_prefix}/${url_enc_fname}?${url_token}"
|
||||
filename=$url_fname
|
||||
if [[ "$filename_override" == "" ]] && [[ -z "$filename" ]] ; then
|
||||
filename=$url_fname
|
||||
fi
|
||||
break
|
||||
else
|
||||
if [ $i == $maxfetchretries ] ; then
|
||||
|
|
|
|||
487
hosts/sendnow.sh
Normal file
487
hosts/sendnow.sh
Normal file
|
|
@ -0,0 +1,487 @@
|
|||
#! Name: sendnow.sh
|
||||
#! Author: kittykat
|
||||
#! Version: 2025.02.12
|
||||
#! Desc: Add support for downloading and processing of urls for a new host
|
||||
#! Usage: Copy this file into the ./${ScriptDir}/hosts/ folder
|
||||
#!
|
||||
#!
|
||||
#! ------------ REQUIRED SECTION ---------------
|
||||
#! @[UPDATE] HostAndDomainRegexes: This string is loaded into mad.sh and allows dynamic handling of new url data
|
||||
#! Format: '/HostCode/HostNick/HostFuncPrefix:HostDomainRegex@'
|
||||
#! HostCode: <aUniqueCodeForHost> (ie. 'fh' for filehaus -- cannot be used by other hosts)
|
||||
#! HostNick: What is displayed throughout MAD output (ie. 'filehaus' -- "urls.txt has 10 filehaus.." will be displayed)
|
||||
#! HostFuncPrefix: <aUniqueStringThatMustPrefixHostFunctions> (ie. 'fh' -- fh_DownloadFile(), fh_FetchFileInfo() .. )
|
||||
#! * Note: Must begin with a letter a-z (functions beginning with numbers are no bueno)
|
||||
#! HostDomainRegex: The regex used to verify matching urls
|
||||
HostCode='snow'
|
||||
HostNick='send.now'
|
||||
HostFuncPrefix='snow'
|
||||
HostUrls='send.now'
|
||||
HostDomainRegex='^(http|https)://(.*\.)?send\.now'
|
||||
#!
|
||||
#! !! DO NOT UPDATE OR REMOVE !!
|
||||
#! This merges the Required HostAndDomainRegexes into mad.sh
|
||||
ListHostAndDomainRegexes=${ListHostAndDomainRegexes}'/'${HostCode}'/'${HostNick}'/'${HostFuncPrefix}'/'${HostUrls}':'${HostDomainRegex}'@'
|
||||
#!
|
||||
#!
|
||||
#! ------------ (1) Host Main Download Function --------------- #
|
||||
#!
|
||||
#! @REQUIRED: Host Main Download function
|
||||
#! Must be named specifically as such:
|
||||
#! <HostFuncPrefix>_DownloadFile()
|
||||
snow_DownloadFile() {
|
||||
local remote_url=${1}
|
||||
local file_url=${1}
|
||||
local filecnt=${2}
|
||||
warnAndRetryUnknownError=false
|
||||
exitDownloadError=false
|
||||
exitDownloadNotAvailable=false
|
||||
fileAlreadyDone=false
|
||||
download_inflight_path="${WorkDir}/.inflight/"
|
||||
mkdir -p "$download_inflight_path"
|
||||
completed_location="${WorkDir}/downloads/"
|
||||
tor_identity="${RANDOM}"
|
||||
finalAttempt="false"
|
||||
for ((z=0; z<=$MaxUrlRetries; z++)); do
|
||||
if [ $z -eq $MaxUrlRetries ] ; then
|
||||
finalAttempt="true"
|
||||
fi
|
||||
CLEANSTRING=${remote_url//[^a-zA-Z0-9]/}
|
||||
trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; echo ""; tput cnorm; exit" 0 1 2 3 6 15
|
||||
if snow_FetchFileInfo $finalAttempt && snow_GetFile "${filecnt}" $((z+1)) $finalAttempt ; then
|
||||
return 0
|
||||
elif [ $z -lt $MaxUrlRetries ]; then
|
||||
if [ "${fileAlreadyDone}" == "true" ] ; then
|
||||
break
|
||||
fi
|
||||
if [[ "${warnAndRetryUnknownError}" == "true" ]] ; then
|
||||
if [ "${DebugAllEnabled}" == "true" ] ; then
|
||||
debugHtml "${remote_url##*/}" "error" "Retry due to an unknown issue: attempt #$((z+1)) of ${MaxUrlRetries}"
|
||||
fi
|
||||
fi
|
||||
if [[ "${exitDownloadError}" == "true" || "${exitDownloadNotAvailable}" == "true" ]] ; then
|
||||
if [ "${DebugAllEnabled}" == "true" ] ; then
|
||||
debugHtml "${remote_url##*/}" "error" "Exit due to unrecoverable issue"
|
||||
fi
|
||||
rm -f "${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}"
|
||||
break
|
||||
fi
|
||||
echo -e "\n${YELLOW}A recoverable error occurred, retry attempt $((z+1))/${MaxUrlRetries}${NC}"
|
||||
sleep 3
|
||||
fi
|
||||
done
|
||||
rm -f "${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}"
|
||||
}
|
||||
#!
|
||||
#! ------------- (2) Fetch File Info Function ----------------- #
|
||||
#!
|
||||
snow_FetchFileInfo() {
|
||||
finalAttempt=$1
|
||||
maxfetchretries=5
|
||||
snow_cookie_jar=""
|
||||
echo -e "${GREEN}# Fetching download link…${NC}"
|
||||
for ((i=1; i<=$maxfetchretries; i++)); do
|
||||
mkdir -p "${WorkDir}/.temp"
|
||||
snow_cookie_jar=$(mktemp "${WorkDir}/.temp/snow_cookies""${instance_no}"".XXXXXX")
|
||||
printf " ."
|
||||
tor_identity="${RANDOM}"
|
||||
CLEANSTRING=${remote_url//[^a-zA-Z0-9]/}
|
||||
trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; rm -f "${snow_cookie_jar}"; echo ""; tput cnorm; exit" 0 1 2 3 6 15
|
||||
response=$(tor_curl_request --insecure -L -s -b "${snow_cookie_jar}" -c "${snow_cookie_jar}" "$remote_url")
|
||||
if [ "${DebugAllEnabled}" == "true" ] ; then
|
||||
debugHtml "${remote_url##*/}" "snow_dwnpage$i" "${response}"
|
||||
fi
|
||||
if [[ -z $response ]] ; then
|
||||
rm -f "${snow_cookie_jar}";
|
||||
if [ $i == $maxfetchretries ] ; then
|
||||
printf "\\n"
|
||||
echo -e "${RED}| Failed to extract download link.${NC}"
|
||||
warnAndRetryUnknownError=true
|
||||
if [ "${finalAttempt}" == "true" ] ; then
|
||||
failedRetryDownload "${remote_url}" "" ""
|
||||
fi
|
||||
return 1
|
||||
else
|
||||
continue
|
||||
fi
|
||||
fi
|
||||
if grep -Eqi "Sorry, you are banned" <<< "$response"; then
|
||||
rm -f "${snow_cookie_jar}";
|
||||
if [ $i == $maxfetchretries ] ; then
|
||||
printf "\\n"
|
||||
echo -e "${RED}| Failed to extract download link.${NC}"
|
||||
warnAndRetryUnknownError=true
|
||||
if [ "${finalAttempt}" == "true" ] ; then
|
||||
failedRetryDownload "${remote_url}" "" ""
|
||||
fi
|
||||
return 1
|
||||
else
|
||||
continue
|
||||
fi
|
||||
fi
|
||||
if grep -Eqi "404|not found|no such file|File was deleted|File not found|File was removed" <<< "$response"; then
|
||||
rm -f "${snow_cookie_jar}";
|
||||
printf "\\n"
|
||||
echo -e "${RED}| The file was not found. It could be deleted or expired.${NC}"
|
||||
exitDownloadError=true
|
||||
removedDownload "${remote_url}"
|
||||
return 1
|
||||
fi
|
||||
if grep -Eqi 'input type="hidden" name="id" value="' <<< "$response"; then
|
||||
printf "\\n"
|
||||
echo -e "${GREEN}| Post link found${NC}"
|
||||
post_op=$(grep -oPi '(?<=input type="hidden" name="op" value=").*(?=">)' <<< "$response")
|
||||
post_id=$(grep -oPi '(?<=input type="hidden" name="id" value=").*(?=">)' <<< "$response")
|
||||
post_rand=$(grep -oPi '(?<=input type="hidden" name="rand" value=").*(?=">)' <<< "$response")
|
||||
post_referer=$(grep -oPi '(?<=input type="hidden" name="referer" value=").*(?=">)' <<< "$response")
|
||||
fi
|
||||
if [[ -z "$post_op" ]] || [[ -z "$post_id" ]] ; then
|
||||
rm -f "${snow_cookie_jar}";
|
||||
if [ $i == $maxfetchretries ] ; then
|
||||
printf "\\n"
|
||||
echo -e "${RED}| Failed to extract download link [1]${NC}"
|
||||
warnAndRetryUnknownError=true
|
||||
if [ "${finalAttempt}" == "true" ] ; then
|
||||
failedRetryDownload "${remote_url}" "Failed to extract download link [1]" ""
|
||||
fi
|
||||
return 1
|
||||
else
|
||||
continue
|
||||
fi
|
||||
else
|
||||
break
|
||||
fi
|
||||
done
|
||||
CLEANSTRING=${remote_url//[^a-zA-Z0-9]/}
|
||||
trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; echo ""; rm -f $snow_cookie_jar; tput cnorm; exit" 0 1 2 3 6 15
|
||||
form_data="op=$post_op&id=$post_id&rand=$post_rand&referer=&method_free=&method_premium="
|
||||
response=$(tor_curl_request --insecure -L -svo. -X POST \
|
||||
-b "${snow_cookie_jar}" -c "${snow_cookie_jar}" \
|
||||
--data-raw "$form_data" "$remote_url" 2>&1)
|
||||
if [ "${DebugAllEnabled}" == "true" ] ; then
|
||||
debugHtml "${remote_url##*/}" "snow_post" "post_action: ${post_action}"$'\n'"form_data: ${form_data}"$'\n'"${response}"
|
||||
fi
|
||||
if [[ -z $response ]] ; then
|
||||
echo -e "${RED}| Failed to extract download link [2]${NC}"
|
||||
warnAndRetryUnknownError=true
|
||||
if [ "${finalAttempt}" == "true" ] ; then
|
||||
rm -f "${snow_cookie_jar}";
|
||||
failedRetryDownload "${remote_url}" "Failed to extract download link [2]" ""
|
||||
fi
|
||||
return 1
|
||||
fi
|
||||
if grep -Eqi "File Not Found" <<< "$response"; then
|
||||
echo -e "${RED}| The file was not found. It could be deleted or expired${NC}"
|
||||
rm -f "${snow_cookie_jar}";
|
||||
exitDownloadError=true
|
||||
removedDownload "${remote_url}" "The file was not found. It could be deleted or expired"
|
||||
return 1
|
||||
fi
|
||||
if grep -Eqi 'location: https://' <<< "$response"; then
|
||||
download_url=$(grep -oPi '(?<=location: ).*$' <<< "$response")
|
||||
download_url=${download_url//[$'\t\r\n']}
|
||||
filename="${download_url##*/}"
|
||||
filename=${filename//[$'\t\r\n']}
|
||||
else
|
||||
echo -e "${RED}| Failed to extract download link [3]${NC}"
|
||||
warnAndRetryUnknownError=true
|
||||
if [ "${finalAttempt}" == "true" ] ; then
|
||||
rm -f "${snow_cookie_jar}";
|
||||
failedRetryDownload "${remote_url}" "Failed to extract download link [3]" ""
|
||||
fi
|
||||
return 1
|
||||
fi
|
||||
echo -e "${GREEN}# Fetching file info…${NC}"
|
||||
fshost=$(grep -oPi -m 1 '(?<=https://).*?(?=/d/)' <<< "$download_url")
|
||||
for ((j=1; j<=$maxfetchretries; j++)); do
|
||||
printf " ."
|
||||
CLEANSTRING=${remote_url//[^a-zA-Z0-9]/}
|
||||
trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; rm -f ${snow_cookie_jar}; echo ""; tput cnorm; exit" 0 1 2 3 6 15
|
||||
file_header=$(tor_curl_request --insecure --head -Lis \
|
||||
-H "Host: $fshost" \
|
||||
-H "Accept: text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8" \
|
||||
-H "Accept-Language: en-US,en;q=0.5" \
|
||||
-H "Accept-Encoding: gzip, deflate, br, zstd" \
|
||||
-H "Referer: https://send.now/" \
|
||||
-H "Sec-GPC: 1" \
|
||||
-H "Connection: keep-alive" \
|
||||
-H "Upgrade-Insecure-Requests: 1" \
|
||||
-H "Sec-Fetch-Dest: document" \
|
||||
-H "Sec-Fetch-Mode: navigate" \
|
||||
-H "Sec-Fetch-Site: cross-site" \
|
||||
-H "Sec-Fetch-User: ?1" \
|
||||
-H "Priority: u=0, i" \
|
||||
"$download_url")
|
||||
if [ "${DebugAllEnabled}" == "true" ] ; then
|
||||
debugHtml "${remote_url##*/}" "snow_head$j" "download_url: ${download_url}"$'\n'"${file_header}"
|
||||
fi
|
||||
if [[ -z $file_header ]] ; then
|
||||
if [ $j == $maxfetchretries ] ; then
|
||||
rm -f "${snow_cookie_jar}";
|
||||
printf "\\n"
|
||||
echo -e "${RED}| Failed to extract file info${NC}"
|
||||
warnAndRetryUnknownError=true
|
||||
if [ "${finalAttempt}" == "true" ] ; then
|
||||
failedRetryDownload "${remote_url}" "Failed to extract file info" ""
|
||||
fi
|
||||
return 1
|
||||
else
|
||||
continue
|
||||
fi
|
||||
fi
|
||||
if grep -Eqi 'HTTP.* 404|404 Not Found' <<< "$file_header"; then
|
||||
rm -f "${snow_cookie_jar}";
|
||||
printf "\\n"
|
||||
echo -e "${RED}| The file appears to be gone [404]${NC}"
|
||||
exitDownloadError=true
|
||||
removedDownload "${remote_url}" "The file appears to be gone [404]"
|
||||
return 1
|
||||
fi
|
||||
if ! grep -Eqi 'HTTP.* 200' <<< $file_header ; then
|
||||
if [ $j == $maxfetchretries ] ; then
|
||||
rm -f "${snow_cookie_jar}";
|
||||
printf "\\n"
|
||||
echo -e "${RED}| Bad server response${NC}"
|
||||
warnAndRetryUnknownError=true
|
||||
if [ "${finalAttempt}" == "true" ] ; then
|
||||
failedRetryDownload "${remote_url}" "Bad server response" ""
|
||||
fi
|
||||
return 1
|
||||
else
|
||||
continue
|
||||
fi
|
||||
fi
|
||||
if [ -z "$filename" ]; then
|
||||
filename=$(grep -oP '(?<=filename\*\=).*?(?=$)' <<< "$file_header")
|
||||
filename="${filename//[$'\t\r\n\0']}"
|
||||
filename="${filename//UTF-8\'\'}"
|
||||
fi
|
||||
if [ "$filename_override" == "" ] && [ -z "$filename" ] ; then
|
||||
if [ $j == $maxfetchretries ] ; then
|
||||
printf "\\n"
|
||||
echo -e "${RED}| Failed to extract file name${NC}"
|
||||
warnAndRetryUnknownError=true
|
||||
if [ "${finalAttempt}" == "true" ] ; then
|
||||
failedRetryDownload "${remote_url}" "Failed to extract file name" ""
|
||||
fi
|
||||
return 1
|
||||
else
|
||||
continue
|
||||
fi
|
||||
fi
|
||||
break #Good to go here
|
||||
done
|
||||
touch ${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}
|
||||
if [ ! "$filename_override" == "" ] ; then
|
||||
filename="$filename_override"
|
||||
fi
|
||||
filename=$(sanitize_file_or_folder_name "${filename}")
|
||||
printf "\\n"
|
||||
echo -e "${YELLOW}| File name:${NC}\t\"${filename}\""
|
||||
file_size_bytes=$(grep -oPi '(?<=content-length: ).*' <<< "$file_header")
|
||||
file_size_bytes=${file_size_bytes//[$'\t\r\n']}
|
||||
if [ -z $file_size_bytes ] ; then
|
||||
if [ "${finalAttempt}" == "true" ] ; then
|
||||
failedRetryDownload "${remote_url}" "Filesize not found!" ""
|
||||
fi
|
||||
echo -e "${YELLOW}| Filesize not found… retry${NC}"
|
||||
return 1
|
||||
else
|
||||
file_size_readable="$(numfmt --to=iec --from=auto --format "%.2f" <<< "$file_size_bytes")"
|
||||
fi
|
||||
echo -e "${YELLOW}| File size:${NC}\t${file_size_readable}"
|
||||
file_path="${download_inflight_path}${filename}"
|
||||
flockDownload="${WorkDir}/.flocks/${filename//[^a-zA-Z0-9\.\_\-]/}.flock"
|
||||
if CheckFileSize "${remote_url}" "${file_size_bytes}" ; then
|
||||
return 1
|
||||
fi
|
||||
if CheckDownloadExists "$remote_url" "$MoveToFolder" "$filecnt" "$filename" "$file_path" "$completed_location" ; then
|
||||
return 1
|
||||
fi
|
||||
echo "${remote_url//[^a-zA-Z0-9]/}" > $flockDownload
|
||||
}
|
||||
#!
|
||||
#! ----------- (3) Fetch File / Download File Function --------------- #
|
||||
#!
|
||||
snow_GetFile() {
|
||||
echo -e "${GREEN}# Downloading…${NC}"
|
||||
echo -e "${YELLOW}| File path:${NC}\t./.inflight/${filename}\n"
|
||||
fileCnt=$1
|
||||
retryCnt=$2
|
||||
finalAttempt=$3
|
||||
flockDownload="${WorkDir}/.flocks/${filename//[^a-zA-Z0-9\.\_\-]/}.flock"
|
||||
for ((j=1; j<=$MaxDownloadRetries; j++)); do
|
||||
pd_presize=0
|
||||
if [ -f "$file_path" ] ; then
|
||||
pd_presize=$(stat --format="%s" "$file_path" | tr -d '[:space:]')
|
||||
fi
|
||||
GetRandomUA
|
||||
CLEANSTRING=${remote_url//[^a-zA-Z0-9]/}
|
||||
trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; rm -f ${snow_cookie_jar}; rm -f $flockDownload; echo ""; tput cnorm; exit" 0 1 2 3 6 15
|
||||
if [ "${UseTorCurlImpersonate}" == "true" ]; then
|
||||
if [ "${RateMonitorEnabled}" == "true" ]; then
|
||||
tor_curl_request --insecure -L --no-alpn \
|
||||
--speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval \
|
||||
-H "Host: $fshost" \
|
||||
-H "Accept: text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8" \
|
||||
-H "Accept-Language: en-US,en;q=0.5" \
|
||||
-H "Accept-Encoding: gzip, deflate, br, zstd" \
|
||||
-H "Referer: https://send.now/" \
|
||||
-H "Sec-GPC: 1" \
|
||||
-H "Connection: keep-alive" \
|
||||
-H "Upgrade-Insecure-Requests: 1" \
|
||||
-H "Sec-Fetch-Dest: document" \
|
||||
-H "Sec-Fetch-Mode: navigate" \
|
||||
-H "Sec-Fetch-Site: cross-site" \
|
||||
-H "Sec-Fetch-User: ?1" \
|
||||
-H "Priority: u=0, i" \
|
||||
"$download_url" \
|
||||
--continue-at - --output "$file_path"
|
||||
else
|
||||
tor_curl_request --insecure -L --no-alpn \
|
||||
-H "Host: $fshost" \
|
||||
-H "Accept: text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8" \
|
||||
-H "Accept-Language: en-US,en;q=0.5" \
|
||||
-H "Accept-Encoding: gzip, deflate, br, zstd" \
|
||||
-H "Referer: https://send.now/" \
|
||||
-H "Sec-GPC: 1" \
|
||||
-H "Connection: keep-alive" \
|
||||
-H "Upgrade-Insecure-Requests: 1" \
|
||||
-H "Sec-Fetch-Dest: document" \
|
||||
-H "Sec-Fetch-Mode: navigate" \
|
||||
-H "Sec-Fetch-Site: cross-site" \
|
||||
-H "Sec-Fetch-User: ?1" \
|
||||
-H "Priority: u=0, i" \
|
||||
"$download_url" \
|
||||
--continue-at - --output "$file_path"
|
||||
fi
|
||||
else
|
||||
if [ "${RateMonitorEnabled}" == "true" ]; then
|
||||
tor_curl_request --insecure -L --no-alpn \
|
||||
--speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval \
|
||||
-H "User-Agent: $RandomUA" \
|
||||
-H "Host: $fshost" \
|
||||
-H "Accept: text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8" \
|
||||
-H "Accept-Language: en-US,en;q=0.5" \
|
||||
-H "Accept-Encoding: gzip, deflate, br, zstd" \
|
||||
-H "Referer: https://send.now/" \
|
||||
-H "Sec-GPC: 1" \
|
||||
-H "Connection: keep-alive" \
|
||||
-H "Upgrade-Insecure-Requests: 1" \
|
||||
-H "Sec-Fetch-Dest: document" \
|
||||
-H "Sec-Fetch-Mode: navigate" \
|
||||
-H "Sec-Fetch-Site: cross-site" \
|
||||
-H "Sec-Fetch-User: ?1" \
|
||||
-H "Priority: u=0, i" \
|
||||
"$download_url" \
|
||||
--continue-at - --output "$file_path"
|
||||
else
|
||||
tor_curl_request --insecure -L --no-alpn \
|
||||
-H "User-Agent: $RandomUA" \
|
||||
-H "Host: $fshost" \
|
||||
-H "Accept: text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8" \
|
||||
-H "Accept-Language: en-US,en;q=0.5" \
|
||||
-H "Accept-Encoding: gzip, deflate, br, zstd" \
|
||||
-H "Referer: https://send.now/" \
|
||||
-H "Sec-GPC: 1" \
|
||||
-H "Connection: keep-alive" \
|
||||
-H "Upgrade-Insecure-Requests: 1" \
|
||||
-H "Sec-Fetch-Dest: document" \
|
||||
-H "Sec-Fetch-Mode: navigate" \
|
||||
-H "Sec-Fetch-Site: cross-site" \
|
||||
-H "Sec-Fetch-User: ?1" \
|
||||
-H "Priority: u=0, i" \
|
||||
"$download_url" \
|
||||
--continue-at - --output "$file_path"
|
||||
fi
|
||||
fi
|
||||
received_file_size=0
|
||||
if [ -f "$file_path" ] ; then
|
||||
received_file_size=$(stat --format="%s" "$file_path" | tr -d '[:space:]')
|
||||
fi
|
||||
if CheckNoHtml "$remote_url" "$filename" "$file_path" ; then
|
||||
containsHtml=false
|
||||
else
|
||||
containsHtml=true
|
||||
fi
|
||||
downDelta=$(( received_file_size - pd_presize ))
|
||||
if [[ "${received_file_size}" -ne "${file_size_bytes}" ]] || [ "$containsHtml" == "true" ]; then
|
||||
if [ "${AutoRepairBadPartials}" == "true" ] && (( downDelta > 0 && downDelta < 1024 )) ; then
|
||||
if [ -f "${file_path}" ] ; then
|
||||
if ((pd_presize > 0)); then
|
||||
echo -e "${YELLOW}Bad node / HTML found:${NC} reverting to previous file..."
|
||||
truncateDownload "$remote_url" "$filename" "$pd_presize" "$received_file_size"
|
||||
truncate -s $pd_presize "${file_path}"
|
||||
else
|
||||
echo -e "${YELLOW}Bad node / HTML found:${NC} tainted partial removed..."
|
||||
rm -f "${file_path}"
|
||||
fi
|
||||
fi
|
||||
if ((j >= $MaxDownloadRetries)) ; then
|
||||
rm -f "$flockDownload";
|
||||
if [ "${finalAttempt}" == "true" ] ; then
|
||||
droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}"
|
||||
fi
|
||||
return 1
|
||||
else
|
||||
continue
|
||||
fi
|
||||
elif [ "${AutoRepairBadPartials}" == "true" ] && [ "$containsHtml" == "true" ] ; then
|
||||
if [ -f "${file_path}" ] ; then
|
||||
if ((pd_presize > 0)); then
|
||||
echo -e "${YELLOW}Bad node / HTML found:${NC} reverting to previous file..."
|
||||
truncateDownload "$remote_url" "$filename" "$pd_presize" "$received_file_size"
|
||||
truncate -s $pd_presize "${file_path}"
|
||||
else
|
||||
echo -e "${YELLOW}Bad node / HTML found:${NC} tainted partial removed..."
|
||||
rm -f "${file_path}"
|
||||
fi
|
||||
fi
|
||||
if ((j >= $MaxDownloadRetries)) ; then
|
||||
rm -f "$flockDownload";
|
||||
if [ "${finalAttempt}" == "true" ] ; then
|
||||
droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}"
|
||||
fi
|
||||
return 1
|
||||
else
|
||||
continue
|
||||
fi
|
||||
elif (( downDelta > 0 && downDelta < 1024 )) || [ "$containsHtml" == "true" ] ; then
|
||||
if [ -f "$file_path" ] ; then
|
||||
rm -rf "$file_path"
|
||||
fi
|
||||
echo -e "\n${YELLOW}Bad node / HTML found:${NC} tainted partial removed..."
|
||||
if ((j >= $MaxDownloadRetries)) ; then
|
||||
rm -f "$flockDownload";
|
||||
if [ "${finalAttempt}" == "true" ] ; then
|
||||
droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}"
|
||||
fi
|
||||
return 1
|
||||
else
|
||||
continue
|
||||
fi
|
||||
fi
|
||||
if [[ "${received_file_size}" -ne "${file_size_bytes}" ]]; then
|
||||
echo -e "\n${RED}Download failed, file is incomplete.${NC}"
|
||||
if ((j >= $MaxDownloadRetries)) ; then
|
||||
rm -f "$flockDownload";
|
||||
if [ "${finalAttempt}" == "true" ] ; then
|
||||
droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}"
|
||||
fi
|
||||
return 1
|
||||
else
|
||||
continue
|
||||
fi
|
||||
fi
|
||||
else
|
||||
break
|
||||
fi
|
||||
done
|
||||
rm -f "$flockDownload";
|
||||
rm -f "${snow_cookie_jar}";
|
||||
ProcessCompletedDownload "$remote_url" "$MoveToFolder" "$filecnt" "$filename" "$file_size_bytes" "$completed_location" "$file_path"
|
||||
return 0
|
||||
}
|
||||
#!
|
||||
#! --------------- Host Extra Functions ------------------- #
|
||||
#!
|
||||
157
hosts/up_lainsafe.sh
Normal file
157
hosts/up_lainsafe.sh
Normal file
|
|
@ -0,0 +1,157 @@
|
|||
#! Name: up_lainsafe.sh
|
||||
#! Author: kittykat
|
||||
#! Version: 2025.02.03
|
||||
#! Desc: Add support for uploading files to dashfile.net
|
||||
#! Info: https://pomf2.lain.la/<filehash>
|
||||
#! MaxSize: 1GB
|
||||
#! Expire: ??
|
||||
#! Usage: Copy this file into the ./${ScriptDir}/hosts/ folder
|
||||
#!
|
||||
#!
|
||||
#! ------------ REQUIRED SECTION ---------------
|
||||
#! @[UPDATE] ListUploadHosts: This string is loaded into mad.sh and allows dynamic handling of new url data
|
||||
#! Format: '/HostCode/HostNick/HostFuncPrefix@'
|
||||
#! HostCode: <aUniqueCodeForHost> (ie. 'fh' for filehaus -- cannot be used by other hosts)
|
||||
#! HostNick: What is displayed throughout MAD output
|
||||
#! HostFuncPrefix: <aUniqueStringThatMustPrefixHostFunctions> ie. 'fh' -- fh_UploadFile()
|
||||
#! * Note: Must begin with a letter a-z (functions beginning with numbers are no bueno)
|
||||
HostCode='lain'
|
||||
HostNick='lainsafe'
|
||||
HostFuncPrefix='lain'
|
||||
#!
|
||||
#! !! DO NOT UPDATE OR REMOVE !!
|
||||
#! This merges the Required HostAndDomainRegexes into mad.sh
|
||||
ListUploadHosts=${ListUploadHosts}'/'${HostCode}'/'${HostNick}'/'${HostFuncPrefix}'@'
|
||||
#!
|
||||
#!
|
||||
#! Configurables
|
||||
#! -------------
|
||||
#!
|
||||
#! ------------ (1) Host Main Upload Function --------------- #
|
||||
#!
|
||||
#! @REQUIRED: Host Main Upload function
|
||||
#! Must be named specifically as such:
|
||||
#! <HostFuncPrefix>_UploadFile()
|
||||
lain_UploadFile() {
|
||||
local _hostCode=${1}
|
||||
local filepath=${2}
|
||||
local filecnt=${3}
|
||||
local pline=${4}
|
||||
local filename="${filepath##*/}"
|
||||
warnAndRetryUnknownError=false
|
||||
exitUploadError=false
|
||||
exitUploadNotAvailable=false
|
||||
fileAlreadyDone=false
|
||||
tor_identity="${RANDOM}"
|
||||
UploadTicket="${WorkDir}/.flocks/upload_${_hostCode}_${filepath//[^a-zA-Z0-9]/}"
|
||||
MaxUploadSizeInBytes=1073741824
|
||||
fsize=$(GetFileSize "$filepath" "false")
|
||||
if ((fsize > MaxUploadSizeInBytes)); then
|
||||
rm -f "${UploadTicket}"
|
||||
echo -e "${YELLOW}| SKIP${NC}: The size of $filename is to large for $_hostCode. ($fsize > $MaxUploadSizeInBytes)"
|
||||
failedUpload "$pline" "${filepath}" "${_hostCode}" "Skipping upload. The size of $filename is to large for $_hostCode. ($fsize > $MaxUploadSizeInBytes)"
|
||||
return 1
|
||||
fi
|
||||
finalAttempt="false"
|
||||
for ((z=0; z<=$MaxUploadRetries; z++)); do
|
||||
if [ $z -eq $MaxUploadRetries ] ; then
|
||||
finalAttempt="true"
|
||||
fi
|
||||
trap "rm -f "${UploadTicket}"; echo ""; tput cnorm; exit" 0 1 2 3 6 15
|
||||
if lain_PostFile "${filepath}" "${_hostCode}" "${filename}" "${filecnt}" $((z+1)) $finalAttempt $pline ; then
|
||||
return 0
|
||||
elif [ $z -lt $MaxUploadRetries ]; then
|
||||
if [ "${fileAlreadyDone}" == "true" ] ; then
|
||||
break
|
||||
fi
|
||||
if [[ "${warnAndRetryUnknownError}" == "true" ]] ; then
|
||||
if [ "${DebugAllEnabled}" == "true" ] ; then
|
||||
debugHtml "${filepath##*/}" "error" "Retry due to an unknown issue: attempt #$((z+1)) of ${MaxUploadRetries}"
|
||||
fi
|
||||
fi
|
||||
if [[ "${exitUploadError}" == "true" || "${exitUploadNotAvailable}" == "true" ]] ; then
|
||||
if [ "${DebugAllEnabled}" == "true" ] ; then
|
||||
debugHtml "${filepath##*/}" "error" "Exit due to unrecoverable issue"
|
||||
fi
|
||||
rm -f "${UploadTicket}"
|
||||
break
|
||||
fi
|
||||
echo -e "\n${YELLOW}A recoverable error occurred, retry attempt $((z+1))/${MaxUploadRetries}${NC}"
|
||||
sleep 3
|
||||
fi
|
||||
done
|
||||
rm -f "${UploadTicket}"
|
||||
}
|
||||
#!
|
||||
#! ----------- (2) Post File / Upload File Function --------------- #
|
||||
#!
|
||||
lain_PostFile() {
|
||||
local filepath=$1
|
||||
local _hostCode=$2
|
||||
local filename=$3
|
||||
local fileCnt=$4
|
||||
local retryCnt=$5
|
||||
local finalAttempt=$6
|
||||
local pline=${7}
|
||||
UploadTicket="${WorkDir}/.flocks/upload_${_hostCode}_${filepath//[^a-zA-Z0-9]/}"
|
||||
echo -e "[${YELLOW}${_hostCode}${NC}] Uploading ${GREEN}${filename}${NC}"
|
||||
for ((i=0; i<=20; i++)); do
|
||||
if ((i > 0)); then
|
||||
printf "\033[1A\r"
|
||||
printf "\033[1A\r"
|
||||
printf "\033[1A\r"
|
||||
printf "\033[1A\r"
|
||||
printf "\33[2K\r"
|
||||
fi
|
||||
echo -e "${BLUE}| Attempt:${NC} $((i+1))${NC}"
|
||||
tor_identity="${RANDOM}"
|
||||
PostUrlHost='https://pomf.lain.la/upload.php?output=html'
|
||||
arrFiles=("$filepath")
|
||||
trap "rm -f ${UploadTicket}; echo ""; tput cnorm; exit" 0 1 2 3 6 15
|
||||
response=$(tor_curl_upload --insecure -i \
|
||||
-H "Content-Type: multipart/form-data" \
|
||||
-F "files[]=@${arrFiles[@]}" \
|
||||
"${PostUrlHost}")
|
||||
if [ "${DebugAllEnabled}" == "true" ] ; then
|
||||
debugHtml "${filepath##*/}" "${_hostCode}_upload" "post_url: ${PostUrlHost}"$'\n'"${response}"
|
||||
fi
|
||||
if [[ -z $response ]] || grep -Eqi 'HTTP/2 403|403 Forbidden' <<< "${response}" ; then
|
||||
if [ "${finalAttempt}" == "true" ] ; then
|
||||
err=$(grep -oPi '(?<=HTTP/).*?(?=$)' <<< "$response")
|
||||
printf "\\n"
|
||||
echo -e "${RED}| Upload failed. Status: ${err}${NC}"
|
||||
failedRetryUpload "$pline" "${filepath}" "${_hostCode}" "Failed to upload file" "Status: $err"
|
||||
exitUploadError=true
|
||||
return 1
|
||||
else
|
||||
continue
|
||||
fi
|
||||
elif grep -Eqi 'a href="https://pomf2.lain.la/f/' <<< "${response}" ; then
|
||||
url=$(grep -oPi -m 1 '(?<=a href=").*?(?=">https:)' <<< "$response")
|
||||
filesize=$(GetFileSize "$filepath" "false")
|
||||
downloadLink="$url"
|
||||
echo -e "${GREEN}| Upload Success${NC}"
|
||||
echo -e "| Size: ${BLUE}${filesize}${NC} bytes${NC}"
|
||||
echo -e "| Link: ${YELLOW}${downloadLink}${NC}"
|
||||
successUpload "$pline" "${filepath}" "${_hostCode}" "${filesize}" "${downloadLink}" "{$response}"
|
||||
return 0
|
||||
else
|
||||
err=$(grep -oPi '(?<=HTTP/).*?(?=$)' <<< "$response")
|
||||
if [ "${finalAttempt}" == "true" ] ; then
|
||||
printf "\\n"
|
||||
echo -e "${RED}| Upload failed. Status: ${err}${NC}"
|
||||
failedRetryUpload "$pline" "${filepath}" "${_hostCode}" "Failed to upload file" "Status: $err"
|
||||
exitUploadError=true
|
||||
return 1
|
||||
else
|
||||
return 1
|
||||
fi
|
||||
fi
|
||||
done
|
||||
printf "\\n"
|
||||
echo -e "${RED}| Upload failed. Status: ${err}${NC}"
|
||||
return 1
|
||||
}
|
||||
#!
|
||||
#! --------------- Host Extra Functions ------------------- #
|
||||
#!
|
||||
|
|
@ -95,12 +95,15 @@ uwab_PostFile() {
|
|||
local finalAttempt=$6
|
||||
local pline=${7}
|
||||
UploadTicket="${WorkDir}/.flocks/upload_${_hostCode}_${filepath//[^a-zA-Z0-9]/}"
|
||||
PostUrlHost="https://files.uwabaki.party/upload.php"
|
||||
echo -e "[${YELLOW}${_hostCode}${NC}] Uploading ${GREEN}${filename}${NC}"
|
||||
PostUrlHost="https://files.uwabaki.party/index.php"
|
||||
arrFiles=("$filepath")
|
||||
trap "rm -f ${UploadTicket}; echo ""; tput cnorm; exit" 0 1 2 3 6 15
|
||||
response=$(tor_curl_upload --insecure -i -L \
|
||||
-H "Content-Type: multipart/form-data" \
|
||||
-F "files[]=@${arrFiles[@]}" \
|
||||
-F "formatted=true" \
|
||||
-F "encryption=off" \
|
||||
-F "files[]=@$filepath" \
|
||||
"${PostUrlHost}")
|
||||
if [ "${DebugAllEnabled}" == "true" ] ; then
|
||||
debugHtml "${filepath##*/}" "${_hostCode}_upload" "post_url: ${PostUrlHost}"$'\n'"${response}"
|
||||
|
|
|
|||
98
mad.sh
98
mad.sh
|
|
@ -30,9 +30,15 @@
|
|||
#
|
||||
# * Everyone who provided feedback and helped test.. and those who wish to remain anonymous
|
||||
|
||||
ScriptVersion=2025.02.02
|
||||
ScriptVersion=2025.02.12
|
||||
#=================================================
|
||||
# Recent Additions
|
||||
# 2025.02.12 - [sendnow] Add send.now as download host
|
||||
# 2025.02.11 - [ranoz] Fix filename (to handle fileid added to download urls)
|
||||
# 2025.02.10 - [mad] Add detection of custom "Removed" response on cdn get from direct links
|
||||
# 2025.02.06 - [ranoz] Add UNAVAILABLE_FOR_LEGAL_REASONS response handling
|
||||
# 2025.02.04 - [mad] Add ConnectTimeoutUpload to separate configurable up/down timeouts
|
||||
# 2025.02.03 - [up_lainsafe] Add pomf2.lain.la as upload host (1GB)
|
||||
# 2025.02.02 - [mad] Add function to handle urlencode of cyrillic / kanji / latin / etc
|
||||
# 2025.02.02 - [ranoz] Fix handling filenames containing cyrillic / kanji / latin chars
|
||||
# 2025.02.02 - [all] Reduced character processing for urlencode to special url characters
|
||||
|
|
@ -53,60 +59,6 @@ ScriptVersion=2025.02.02
|
|||
# 2025.01.13 - [anonfile, dailyuploads] Update ocr call to use tesseract function
|
||||
# 2025.01.13 - [up_anonfile] Modify to use new upload url
|
||||
# 2025.01.12 - [ateasystems] Update 404 Not found response
|
||||
# 2025.01.11 - [mad] Update direct head response handling
|
||||
# 2025.01.11 - [ranoz] Add 404 Not found handling on head
|
||||
# 2025.01.09 - [ranoz] Add handling of "NEXT_NOT_FOUND" response
|
||||
# 2025.01.09 - [fileblade] Fix cdn url parsing
|
||||
# 2025.01.08 - [up_pixeldrain] Fix success response from pixeldrain
|
||||
# 2025.01.08 - [ramsgaard / up_ramsgaard] Add data.ramsgaard.me as upload / download host
|
||||
# 2025.01.08 - [euromussels / up_euromussels] Add uploads.euromussels.eu as upload / download host
|
||||
# 2025.01.07 - [up_fileland] Add fileland.io as upload host
|
||||
# 2025.01.07 - [up_fireget] Add fireget.com as upload host
|
||||
# 2025.01.06 - [uploadhive] Update the removed / gone response detection
|
||||
# 2025.01.06 - [fileblade] Add "user does not allow free downloads over 100MB" response (and warnings)
|
||||
# 2025.01.06 - [desiupload] Add desiupload as download host
|
||||
# 2025.01.05 - [isupload] Fix filename detection
|
||||
# 2024.01.03 - [gofile] Detect "Bulk download is a Premium feature" response (no children)
|
||||
# 2025.01.02 - [up_axfc] Update PUT response check to handle kanji chars (remove)
|
||||
# 2025.01.02 - [dashfile] Add response 'This file reached max downloads limit'. New cookie on captcha fail
|
||||
# 2024.12.28 - [dashfile] Update captcha code check
|
||||
# 2024.12.28 - [anonfile] Add new download link href response
|
||||
# 2024.12.28 - [fileblade] Add additional response handling (subsequent downloads, unknown warnings)
|
||||
# 2024.12.28 - [eternalhosting] Update eternal.onion to handle landing page (eternal.onion/file/)
|
||||
# 2024.12.26 - [up_kouploader / up_axfc / up_torup] Fixed failedRetryUpload (was using download logging)
|
||||
# * Thanks Belky
|
||||
# 2024.12.26 - [anonfile / up_anonfile] Add anonfile.de as upload / download host
|
||||
# 2024.12.25 - [dashfile / up_dashfile] Add dashfile.net as upload / download host
|
||||
# 2024.12.25 - [isupload] Change to use tor_curl_request_extended (server response is often slow)
|
||||
# - Accept 200 OK response to continue (do not require filename / filesize)
|
||||
# 2024.12.25 - [fileblade] Add response handling for free file download dissallowed > 100MB.
|
||||
# "The file owner does not allow FREE users to download files which are over 100 MB"
|
||||
# 2024.12.25 - [mad] Add "UploadHiveRandomizeExt" option to config with default=true
|
||||
# 2024.12.25 - [uploadhive / up_uploadhive] Update renaming random ext files to their original names
|
||||
# - Handle multipart 7z & rar (abc.7z.###, abc.part#.rar)
|
||||
# 2024.12.25 - [syspro / up_syspro] Add share.syspro.com.br as upload / download host
|
||||
# 2024.12.24 - [mad] Add EnabledUploadHosts / EnabledDownloadHosts setting to fine-tune which hosts to use
|
||||
# ** Options: recommended, online, all (Default=recommended)
|
||||
# - recommended: loads all hosts verified working with MAD
|
||||
# - online: loads all hosts available online / working (includes captcha / js restricted)
|
||||
# - all: loads all hosts in hosts folder
|
||||
# 2024.12.24 - [up_ateasystems / ateasystems] Add share.ateasystems.com as upload / download host
|
||||
# 2024.12.23 - [up_uploadbay / uploadbay] Add uploadbay.net as upload / download host
|
||||
# 2024.12.23 - [up_herbolistique / herbolistique] Add transfert.herbolistique.com as upload / download host
|
||||
# 2024.12.23 - [uploadhive] Auto-rename random extension downloads
|
||||
# 2024.12.23 - [up_uploadhive] Change upload file extension to random 3 letters (uhive blocks .7z, .zip, .rar now)
|
||||
# 2024.12.23 - [up_offshorecat] Fixed upload. Updated apikey.
|
||||
# 2024.12.23 - [up_fileditch] Fixed upload. Added response handling for Tor Blocked node (retries)
|
||||
# 2024.12.23 - [up_freesocial / freesocial] Add files.freesocial.co as upload / download host
|
||||
# 2024.12.23 - [up_cyssoux / cyssoux] Add partage.cyssoux.fr as upload / download host
|
||||
# 2024.12.22 - [mad] Add jira_Upload function -- used for all jirafeau hosts
|
||||
# 2024.12.22 - [up_*AllJiraHosts*] Consolidated / moved all hosts upload functions to mad.sh
|
||||
# - Minimized jira host code (~6000 lines of duplicates removed)
|
||||
# - Jira hosts: acid, anarchaserver, depotkaz, dictvm, eddowding, familleflender, filesquid,
|
||||
# free4e, harrault, linxx, moocloud, nantes, netlib, skrepr, soyjak
|
||||
# 2024.12.20 - [fileblade / up_fileblade] Add fileblade.com as upload / download host
|
||||
# 2024.12.20 - [isupload / up_isupload] Add isupload.com as upload / download host
|
||||
# 2024.12.15 - [mediafire] Add mediafire download link processing
|
||||
|
||||
# -- See ./documentation/!Changelog (Historical).txt for further changes -- #
|
||||
|
||||
|
|
@ -179,6 +131,10 @@ EnabledDownloadHosts="recommended"
|
|||
# UPLOAD SECTION
|
||||
#-------------------
|
||||
|
||||
# Connection attempt timeout (tor_curl/tor_curl_impersonate)
|
||||
# @Default=15
|
||||
ConnectTimeoutUpload=15
|
||||
|
||||
# Upload file retries per host when uploading
|
||||
# @Default=4
|
||||
MaxUploadRetries=4
|
||||
|
|
@ -434,15 +390,15 @@ tor_curl_request_extended() {
|
|||
tor_curl_upload() {
|
||||
if [ "${UseTorCurlImpersonate}" == "true" ]; then
|
||||
if [ "${RateMonitorEnabled}" == "true" ]; then
|
||||
"${curl_impersonate[@]}" --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" -4 --connect-timeout ${ConnectTimeout} --speed-limit $UploadSpeedMin --speed-time $UploadTimeoutInterval --compressed --globoff "$@"
|
||||
"${curl_impersonate[@]}" --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" -4 --connect-timeout ${ConnectTimeoutUpload} --speed-limit $UploadSpeedMin --speed-time $UploadTimeoutInterval --compressed --globoff "$@"
|
||||
else
|
||||
"${curl_impersonate[@]}" --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" -4 --connect-timeout ${ConnectTimeout} --compressed --globoff "$@"
|
||||
"${curl_impersonate[@]}" --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" -4 --connect-timeout ${ConnectTimeoutUpload} --compressed --globoff "$@"
|
||||
fi
|
||||
else
|
||||
if [ "${RateMonitorEnabled}" == "true" ]; then
|
||||
curl --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" -4 --connect-timeout ${ConnectTimeout} --speed-limit $UploadSpeedMin --speed-time $UploadTimeoutInterval -H 'User-Agent: Mozilla/5.0 (Windows NT 10.0; rv:109.0) Gecko/20100101 Firefox/115.0' -H 'Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8' -H 'Accept-Language: en-US,en;q=0.5' -H 'Accept-Encoding: gzip, deflate, br' -H 'Origin: null' -H 'Connection: keep-alive' -H 'Upgrade-Insecure-Requests: 1' -H 'Sec-Fetch-Dest: document' -H 'Sec-Fetch-Mode: navigate' -H 'Sec-Fetch-Site: cross-site' -H 'Sec-Fetch-User: ?1' -H 'TE: trailers' --compressed --globoff "$@"
|
||||
curl --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" -4 --connect-timeout ${ConnectTimeoutUpload} --speed-limit $UploadSpeedMin --speed-time $UploadTimeoutInterval -H 'User-Agent: Mozilla/5.0 (Windows NT 10.0; rv:109.0) Gecko/20100101 Firefox/115.0' -H 'Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8' -H 'Accept-Language: en-US,en;q=0.5' -H 'Accept-Encoding: gzip, deflate, br' -H 'Origin: null' -H 'Connection: keep-alive' -H 'Upgrade-Insecure-Requests: 1' -H 'Sec-Fetch-Dest: document' -H 'Sec-Fetch-Mode: navigate' -H 'Sec-Fetch-Site: cross-site' -H 'Sec-Fetch-User: ?1' -H 'TE: trailers' --compressed --globoff "$@"
|
||||
else
|
||||
curl --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" -4 --connect-timeout ${ConnectTimeout} -H 'User-Agent: Mozilla/5.0 (Windows NT 10.0; rv:109.0) Gecko/20100101 Firefox/115.0' -H 'Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8' -H 'Accept-Language: en-US,en;q=0.5' -H 'Accept-Encoding: gzip, deflate, br' -H 'Origin: null' -H 'Connection: keep-alive' -H 'Upgrade-Insecure-Requests: 1' -H 'Sec-Fetch-Dest: document' -H 'Sec-Fetch-Mode: navigate' -H 'Sec-Fetch-Site: cross-site' -H 'Sec-Fetch-User: ?1' -H 'TE: trailers' --compressed --globoff "$@"
|
||||
curl --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" -4 --connect-timeout ${ConnectTimeoutUpload} -H 'User-Agent: Mozilla/5.0 (Windows NT 10.0; rv:109.0) Gecko/20100101 Firefox/115.0' -H 'Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8' -H 'Accept-Language: en-US,en;q=0.5' -H 'Accept-Encoding: gzip, deflate, br' -H 'Origin: null' -H 'Connection: keep-alive' -H 'Upgrade-Insecure-Requests: 1' -H 'Sec-Fetch-Dest: document' -H 'Sec-Fetch-Mode: navigate' -H 'Sec-Fetch-Site: cross-site' -H 'Sec-Fetch-User: ?1' -H 'TE: trailers' --compressed --globoff "$@"
|
||||
fi
|
||||
fi
|
||||
}
|
||||
|
|
@ -454,7 +410,7 @@ SetEnabledUploadHosts() {
|
|||
lstEnabledUploadHosts+="up_pixeldrain,up_quax,up_ranoz,up_skrepr,up_torup,up_turboonion,up_uploadee,"
|
||||
lstEnabledUploadHosts+="up_uploadhive,up_uploadraja,up_herbolistique,up_uploadbay,up_ateasystems,up_syspro,"
|
||||
lstEnabledUploadHosts+="up_dashfile,up_anonfile,up_fileland,up_fireget,up_euromussels,up_ramsgaard,"
|
||||
lstEnabledUploadHosts+="up_gagneux,up_uwabaki"
|
||||
lstEnabledUploadHosts+="up_gagneux,up_uwabaki,up_lainsafe"
|
||||
elif [[ "$EnabledUploadHosts" == "online" ]] ; then
|
||||
lstEnabledUploadHosts="up_1fichier,up_anonsharing,up_axfc,up_bedrive,up_bowfile,up_depotkaz,"
|
||||
lstEnabledUploadHosts+="up_familleflender,up_fileblade,up_fileditch,up_firestorage,up_free4e,up_gofile,"
|
||||
|
|
@ -463,7 +419,7 @@ SetEnabledUploadHosts() {
|
|||
lstEnabledUploadHosts+="up_shareonline,up_skrepr,up_torup,up_turboonion,up_uploadee,up_uploadhive,"
|
||||
lstEnabledUploadHosts+="up_uploadraja,up_yolobit,up_herbolistique,up_uploadbay,up_ateasystems,up_syspro,"
|
||||
lstEnabledUploadHosts+="up_dashfile,up_anonfile,up_fileland,up_fireget,up_euromussels,up_ramsgaard,"
|
||||
lstEnabledUploadHosts+="up_gagneux,up_uwabaki"
|
||||
lstEnabledUploadHosts+="up_gagneux,up_uwabaki,up_lainsafe"
|
||||
fi
|
||||
}
|
||||
SetEnabledDownloadHosts() {
|
||||
|
|
@ -475,7 +431,7 @@ SetEnabledDownloadHosts() {
|
|||
lstEnabledDownloadHosts+="moocloud,nantes,netlib,offshorecat,oshi,pixeldrain,quax,ranoz,skrepr,"
|
||||
lstEnabledDownloadHosts+="tempfileme,tempsh,torup,turboonion,up2share,uploadee,uploadev,uploadhive,"
|
||||
lstEnabledDownloadHosts+="youdbox,herbolistique,uploadbay,ateasystems,syspro,dashfile,anonfile,desiupload,"
|
||||
lstEnabledDownloadHosts+="fileland,fireget,euromussels,ramsgaard,uwabaki,gagneux"
|
||||
lstEnabledDownloadHosts+="fileland,fireget,euromussels,ramsgaard,uwabaki,gagneux,sendnow"
|
||||
elif [[ "$EnabledDownloadHosts" == "online" ]] ; then
|
||||
lstEnabledDownloadHosts="1fichier,anonsharing,bedrive,biteblob,bowfile,click,cyssoux,"
|
||||
lstEnabledDownloadHosts+="dailyuploads,dataupload,depotkaz,dictvm,dosya,downloadgg,eddowding,eternalhosting,"
|
||||
|
|
@ -485,7 +441,7 @@ SetEnabledDownloadHosts() {
|
|||
lstEnabledDownloadHosts+="oshi,pixeldrain,quax,ranoz,shareonline,skrepr,tempfileme,tempsh,torup,"
|
||||
lstEnabledDownloadHosts+="turboonion,up2share,uploadee,uploadev,uploadhive,yolobit,youdbox,herbolistique,"
|
||||
lstEnabledDownloadHosts+="uploadbay,ateasystems,syspro,dashfile,anonfile,desiupload,fileland,fireget,"
|
||||
lstEnabledDownloadHosts+="euromussels,ramsgaard,uwabaki,gagneux"
|
||||
lstEnabledDownloadHosts+="euromussels,ramsgaard,uwabaki,gagneux,sendnow"
|
||||
fi
|
||||
}
|
||||
GetRandomFiledotUser() {
|
||||
|
|
@ -3303,6 +3259,13 @@ direct_FetchFileInfo() {
|
|||
exitDownloadNotAvailable=true
|
||||
return 1
|
||||
fi
|
||||
if grep -Eqi 'was removed|no such file|was deleted|not found|banned' <<< "${file_header}" ; then
|
||||
printf "\\n"
|
||||
echo -e "${RED}| 404. The file was not found or has been removed.${NC}"
|
||||
removedDownload "${remote_url}"
|
||||
exitDownloadNotAvailable=true
|
||||
return 1
|
||||
fi
|
||||
if ! grep -Eqi 'HTTP/.*200|HTTP/.*302' <<< "${file_header}" ; then
|
||||
hResponse=$(grep -oPi 'HTTP/.*? \K.*$' <<< "${file_header}")
|
||||
if ((j>=maxretries)); then
|
||||
|
|
@ -3433,6 +3396,15 @@ direct_GetFile() {
|
|||
containsHtml=true
|
||||
fi
|
||||
if [ "$containsHtml" == "true" ]; then
|
||||
if grep -Eqi 'was removed|no such file|was deleted|not found|banned' < "$file_path" ; then
|
||||
printf "\\n"
|
||||
echo -e "${RED}| The file was not found or has been removed.${NC}"
|
||||
rm -f "${file_path}"
|
||||
rm -f "$flockDownload";
|
||||
removedDownload "${remote_url}" "The file was not found or has been removed."
|
||||
exitDownloadNotAvailable=true
|
||||
return 1
|
||||
fi
|
||||
echo -e "${YELLOW}Download Failed (contains html)${NC} partial removed..."
|
||||
rm -f "${file_path}"
|
||||
if ((j >= $MaxDownloadRetries)) ; then
|
||||
|
|
|
|||
|
|
@ -24,6 +24,7 @@ LoadPlugins=""
|
|||
# UPLOAD SECTION
|
||||
#-------------------
|
||||
|
||||
ConnectTimeoutUpload=15
|
||||
MaxUploadRetries=4
|
||||
DefaultUploadHosts='1f,uhive,oshi'
|
||||
UploadSpeedMin=10
|
||||
|
|
|
|||
|
|
@ -11,7 +11,7 @@
|
|||
# 100MB upee upload.ee 5GB fd fileditch.com 400MB dash dashfile.net
|
||||
# 40GB isup isupload.com 100MB fb fileblade.com 500MB fland fileland.io
|
||||
# 100MB ubay uploadbay.net 2GB sysp syspro.com.br 1GB uwab uwabaki.party
|
||||
# 512MB anon anonfile.de 100MB fget fireget.com
|
||||
# 512MB anon anonfile.de 100MB fget fireget.com 1GB lain pomf2.lain.la
|
||||
# Jirafeau hosts (recommended upload 100MB splits as many host only support that)
|
||||
# 10GB anarc anarchaserver 1GB kaz depot.kaz.bzh 5GB squid filesquid
|
||||
# 10GB nant nantes.cloud 500MB soy soyjak.download 512MB linx linxx.net
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue