# 2025.03.30 - [ranoz] Fix to handle new cookie requirements
# 2025.03.28 - [up_ranoz] Fix to handle new cookie requirements # 2025.03.28 - [up_anonfileio] Add anonfile.io as upload host # 2025.03.28 - [anonfileio] Add anonfile.io as download host # 2025.03.25 - [up_ranoz] Disable MAD randomized extension on uploads (7z block disabled) # 2025.03.21 - [mad] Update random user agents 2025.03 # 2025.03.20 - [jira hosts] Update 3 jira hosts (retention and maxsize) # 2025.03.16 - [torup] Fix torup cookies # 2025.03.15 - [1fichier] Get new node prior to cdn download (greater possibility of a faster node) # 2025.03.10 - [uploadscloud] Add uploadscloud.com as download host
This commit is contained in:
parent
a62ac882de
commit
270e378e08
18 changed files with 1338 additions and 630 deletions
|
|
@ -1,4 +1,4 @@
|
|||
DateTime: 25.03.08
|
||||
DateTime: 25.03.30
|
||||
|
||||
Files:
|
||||
./hosts/1fichier.sh
|
||||
|
|
@ -6,6 +6,7 @@ Files:
|
|||
./hosts/acid.sh
|
||||
./hosts/anarchaserver.sh
|
||||
./hosts/anonfile.sh
|
||||
./hosts/anonfileio.sh
|
||||
./hosts/anonsharing.sh
|
||||
./hosts/archived/nekofile.sh
|
||||
./hosts/ateasystems.sh
|
||||
|
|
@ -79,10 +80,12 @@ Files:
|
|||
./hosts/uploadev.sh
|
||||
./hosts/uploadflix.sh
|
||||
./hosts/uploadhive.sh
|
||||
./hosts/uploadscloud.sh
|
||||
./hosts/up_1fichier.sh
|
||||
./hosts/up_acid.sh
|
||||
./hosts/up_anarchaserver.sh
|
||||
./hosts/up_anonfile.sh
|
||||
./hosts/up_anonfileio.sh
|
||||
./hosts/up_anonsharing.sh
|
||||
./hosts/up_ateasystems.sh
|
||||
./hosts/up_axfc.sh
|
||||
|
|
@ -166,8 +169,8 @@ _________________________________________________________________________
|
|||
./hosts/1fichier.sh:48: PAGE=$(tor_curl_request --insecure --max-time "$ConnectTimeout" -s "${remote_url}")
|
||||
./hosts/1fichier.sh:160: cdn_request=$(tor_curl_request --insecure -s -L -b "${fich_cookie_jar}" -c "${fich_cookie_jar}" -F "submit=Download" -F "pass=${fich_user_provided_password}" -F "adz=${fich_adz_parameter}" "${remote_url}")
|
||||
./hosts/1fichier.sh:194: file_header=$(tor_curl_request --insecure -sSIL -e "${remote_url}" "${target_file_link}")
|
||||
./hosts/1fichier.sh:261: tor_curl_request --insecure -e "${remote_url}" "${target_file_link}" -C - -o "${file_path}"
|
||||
./hosts/1fichier.sh:354: PAGE=$(tor_curl_request --insecure --max-time "$ConnectTimeout" -c "${fich_cookie_jar}" -s "${remote_url}")
|
||||
./hosts/1fichier.sh:262: tor_curl_request --insecure -e "${remote_url}" "${target_file_link}" -C - -o "${file_path}"
|
||||
./hosts/1fichier.sh:355: PAGE=$(tor_curl_request --insecure --max-time "$ConnectTimeout" -c "${fich_cookie_jar}" -s "${remote_url}")
|
||||
./hosts/9saves.sh:90: response=$(tor_curl_request --insecure -L -s -b "${ns_cookie_jar}" -c "${ns_cookie_jar}" "$remote_url")
|
||||
./hosts/9saves.sh:139: response=$(tor_curl_request --insecure -L -s -X POST \
|
||||
./hosts/9saves.sh:188: file_header=$(tor_curl_request --insecure --head -L -s -b "${ns_cookie_jar}" -c "${ns_cookie_jar}" "$download_url")
|
||||
|
|
@ -356,13 +359,13 @@ _________________________________________________________________________
|
|||
./hosts/quax.sh:85: file_header=$(tor_curl_request --insecure --head -L -s "$download_url")
|
||||
./hosts/quax.sh:176: tor_curl_request --insecure --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval "$download_url" --continue-at - --output "$file_path"
|
||||
./hosts/quax.sh:178: tor_curl_request --insecure "$download_url" --continue-at - --output "$file_path"
|
||||
./hosts/ranoz.sh:90: response=$(tor_curl_request --insecure -L -s "$remote_url")
|
||||
./hosts/ranoz.sh:160: file_header=$(tor_curl_request --insecure --head -L -i -s "$download_url")
|
||||
./hosts/ranoz.sh:270: if [[ "${UseTorCurlImpersonate}" == "true" ]]; then
|
||||
./hosts/ranoz.sh:272: tor_curl_request --insecure -L -G --no-alpn \
|
||||
./hosts/ranoz.sh:276: tor_curl_request --insecure -L -G --no-alpn \
|
||||
./hosts/ranoz.sh:281: tor_curl_request --insecure -L -G --no-alpn \
|
||||
./hosts/ranoz.sh:296: tor_curl_request --insecure -L -G --no-alpn \
|
||||
./hosts/ranoz.sh:92: response=$(tor_curl_request --insecure -L -i -s \
|
||||
./hosts/ranoz.sh:168: file_header=$(tor_curl_request --insecure --head -L -i -s \
|
||||
./hosts/ranoz.sh:281: if [[ "${UseTorCurlImpersonate}" == "true" ]]; then
|
||||
./hosts/ranoz.sh:283: tor_curl_request --insecure -L -G --no-alpn \
|
||||
./hosts/ranoz.sh:287: tor_curl_request --insecure -L -G --no-alpn \
|
||||
./hosts/ranoz.sh:292: tor_curl_request --insecure -L -G --no-alpn \
|
||||
./hosts/ranoz.sh:307: tor_curl_request --insecure -L -G --no-alpn \
|
||||
./hosts/sendnow.sh:89: response=$(tor_curl_request --insecure -L -s -b "${snow_cookie_jar}" -c "${snow_cookie_jar}" "$remote_url")
|
||||
./hosts/sendnow.sh:162: response=$(tor_curl_request --insecure -L -svo. -X POST \
|
||||
./hosts/sendnow.sh:206: file_header=$(tor_curl_request_extended --insecure --head -Lis \
|
||||
|
|
@ -398,11 +401,11 @@ _________________________________________________________________________
|
|||
./hosts/tempsh.sh:236: tor_curl_request --insecure -X POST \
|
||||
./hosts/tempsh.sh:250: tor_curl_request --insecure -X POST \
|
||||
./hosts/torup.sh:92: response=$(tor_curl_request --insecure -L -s \
|
||||
./hosts/torup.sh:191: if [[ "${UseTorCurlImpersonate}" == "true" ]]; then
|
||||
./hosts/torup.sh:193: tor_curl_request --insecure -L -G --no-alpn \
|
||||
./hosts/torup.sh:199: tor_curl_request --insecure -L -G --no-alpn \
|
||||
./hosts/torup.sh:206: tor_curl_request --insecure -L -G --no-alpn \
|
||||
./hosts/torup.sh:222: tor_curl_request --insecure -L -G --no-alpn \
|
||||
./hosts/torup.sh:192: if [[ "${UseTorCurlImpersonate}" == "true" ]]; then
|
||||
./hosts/torup.sh:194: tor_curl_request --insecure -L -G --no-alpn \
|
||||
./hosts/torup.sh:201: tor_curl_request --insecure -L -G --no-alpn \
|
||||
./hosts/torup.sh:209: tor_curl_request --insecure -L -G --no-alpn \
|
||||
./hosts/torup.sh:226: tor_curl_request --insecure -L -G --no-alpn \
|
||||
./hosts/up2share.sh:91: response=$(tor_curl_request --insecure -L -s -b "${up2share_cookie_jar}" -c "${up2share_cookie_jar}" \
|
||||
./hosts/up2share.sh:144: response=$(tor_curl_request --insecure -L -s -b "${up2share_cookie_jar}" -c "${up2share_cookie_jar}" \
|
||||
./hosts/up2share.sh:195: file_header=$(tor_curl_request --insecure -L -s --head \
|
||||
|
|
@ -435,9 +438,15 @@ _________________________________________________________________________
|
|||
./hosts/uploadhive.sh:135: response=$(tor_curl_request --insecure -L -s -X POST --data "$form_data" "$remote_url" --connect-to uploadhive.com::172.67.130.243)
|
||||
./hosts/uploadhive.sh:249: tor_curl_request --insecure -L -G --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval "$download_url" --continue-at - --output "$file_path"
|
||||
./hosts/uploadhive.sh:251: tor_curl_request --insecure -L -G "$download_url" --continue-at - --output "$file_path"
|
||||
./hosts/uploadscloud.sh:90: response=$(tor_curl_request --insecure -L -s -b "${upsc_cookie_jar}" -c "${upsc_cookie_jar}" "$remote_url")
|
||||
./hosts/uploadscloud.sh:142: response=$(tor_curl_request --insecure -svo. -X POST \
|
||||
./hosts/uploadscloud.sh:183: file_header=$(tor_curl_request --insecure --head -L -s -b "${upsc_cookie_jar}" -c "${upsc_cookie_jar}" --referer "$remote_url" "$download_url")
|
||||
./hosts/uploadscloud.sh:306: tor_curl_request --insecure -L -G --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval "$download_url" --continue-at - --output "$file_path"
|
||||
./hosts/uploadscloud.sh:308: tor_curl_request --insecure -L -G "$download_url" --continue-at - --output "$file_path"
|
||||
./hosts/up_1fichier.sh:107: response=$(tor_curl_request --insecure -L -s "https://1fichier.com/")
|
||||
./hosts/up_1fichier.sh:180: response=$(tor_curl_upload --insecure -L \
|
||||
./hosts/up_anonfile.sh:102: response=$(tor_curl_upload --insecure -i \
|
||||
./hosts/up_anonfileio.sh:102: response=$(tor_curl_upload --insecure -i \
|
||||
./hosts/up_anonsharing.sh:102: response=$(tor_curl_upload --insecure -i \
|
||||
./hosts/up_ateasystems.sh:102: response=$(tor_curl_upload --insecure -i \
|
||||
./hosts/up_axfc.sh:109: response=$(tor_curl_request --insecure -L -s -b "${axfc_cookie_jar}" -c "${axfc_cookie_jar}" "$fixed_url")
|
||||
|
|
@ -471,8 +480,9 @@ _________________________________________________________________________
|
|||
./hosts/up_oshi.sh:110: response=$(tor_curl_upload --insecure \
|
||||
./hosts/up_pixeldrain.sh:112: response=$(tor_curl_upload --insecure -X PUT \
|
||||
./hosts/up_quax.sh:102: response=$(tor_curl_upload --insecure -i \
|
||||
./hosts/up_ranoz.sh:129: response=$(tor_curl_upload --insecure -L -i -s \
|
||||
./hosts/up_ranoz.sh:156: response=$(tor_curl_upload --insecure -i -X PUT \
|
||||
./hosts/up_ranoz.sh:133: response=$(tor_curl_upload --insecure -L -i -s \
|
||||
./hosts/up_ranoz.sh:140: response=$(tor_curl_upload --insecure -Lis \
|
||||
./hosts/up_ranoz.sh:169: response=$(tor_curl_upload --insecure -i -X PUT \
|
||||
./hosts/up_sendnow.sh:101: response=$(tor_curl_request --insecure -L -s 'https://send.now/upload')
|
||||
./hosts/up_sendnow.sh:138: response=$(tor_curl_upload --insecure -i \
|
||||
./hosts/up_sendspace.sh:106: response=$(tor_curl_request --insecure -L -s -b "${ss_cookie_jar}" -c "${ss_cookie_jar}" "https://sendspace.com")
|
||||
|
|
@ -498,119 +508,119 @@ _________________________________________________________________________
|
|||
./hosts/youdbox.sh:183: file_header=$(tor_curl_request --insecure -L --head -s --referer "${fixed_url}" "$download_url")
|
||||
./hosts/youdbox.sh:276: tor_curl_request --insecure -L -G --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval "$download_url" --continue-at - --output "$file_path"
|
||||
./hosts/youdbox.sh:278: tor_curl_request --insecure -L -G "$download_url" --continue-at - --output "$file_path"
|
||||
./mad.sh:101:UseTorCurlImpersonate=false
|
||||
./mad.sh:397:tor_curl_request() {
|
||||
./mad.sh:398: if [[ "${UseTorCurlImpersonate}" == "true" ]]; then
|
||||
./mad.sh:399: "${curl_impersonate[@]}" --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" -4 --connect-timeout ${ConnectTimeout} --compressed --globoff "$@"
|
||||
./mad.sh:401: curl --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" -4 --connect-timeout ${ConnectTimeout} --compressed --globoff "$@"
|
||||
./mad.sh:404:tor_curl_request_extended() {
|
||||
./mad.sh:406: if [[ "${UseTorCurlImpersonate}" == "true" ]]; then
|
||||
./mad.sh:407: "${curl_impersonate[@]}" --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" -4 --connect-timeout $randomtimeout --compressed --globoff "$@"
|
||||
./mad.sh:409: curl --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" -4 --connect-timeout $randomtimeout --compressed --globoff "$@"
|
||||
./mad.sh:412:tor_curl_upload() {
|
||||
./mad.sh:413: if [[ "${UseTorCurlImpersonate}" == "true" ]]; then
|
||||
./mad.sh:415: "${curl_impersonate[@]}" --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" -4 --expect100-timeout 10 --connect-timeout ${ConnectTimeoutUpload} --speed-limit $UploadSpeedMin --speed-time $UploadTimeoutInterval --compressed --globoff "$@"
|
||||
./mad.sh:417: "${curl_impersonate[@]}" --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" -4 --expect100-timeout 10 --connect-timeout ${ConnectTimeoutUpload} --compressed --globoff "$@"
|
||||
./mad.sh:421: curl --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" -4 --expect100-timeout 10 --connect-timeout ${ConnectTimeoutUpload} --speed-limit $UploadSpeedMin --speed-time $UploadTimeoutInterval -H 'User-Agent: Mozilla/5.0 (Windows NT 10.0; rv:109.0) Gecko/20100101 Firefox/115.0' -H 'Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8' -H 'Accept-Language: en-US,en;q=0.5' -H 'Accept-Encoding: gzip, deflate, br' -H 'Origin: null' -H 'Connection: keep-alive' -H 'Upgrade-Insecure-Requests: 1' -H 'Sec-Fetch-Dest: document' -H 'Sec-Fetch-Mode: navigate' -H 'Sec-Fetch-Site: cross-site' -H 'Sec-Fetch-User: ?1' -H 'TE: trailers' --compressed --globoff "$@"
|
||||
./mad.sh:423: curl --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" -4 --expect100-timeout 10 --connect-timeout ${ConnectTimeoutUpload} -H 'User-Agent: Mozilla/5.0 (Windows NT 10.0; rv:109.0) Gecko/20100101 Firefox/115.0' -H 'Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8' -H 'Accept-Language: en-US,en;q=0.5' -H 'Accept-Encoding: gzip, deflate, br' -H 'Origin: null' -H 'Connection: keep-alive' -H 'Upgrade-Insecure-Requests: 1' -H 'Sec-Fetch-Dest: document' -H 'Sec-Fetch-Mode: navigate' -H 'Sec-Fetch-Site: cross-site' -H 'Sec-Fetch-User: ?1' -H 'TE: trailers' --compressed --globoff "$@"
|
||||
./mad.sh:1425:install_curl_impersonate() {
|
||||
./mad.sh:1427: echo -e "${BLUE}lwthiker curl_impersonate${NC} is the original dev, but it is relatively inactive."
|
||||
./mad.sh:1428: echo -e "- Currently uses curl v8.1.1."
|
||||
./mad.sh:1432: echo -e "${BLUE}lexiforest curl_impersonate${NC} is an active fork of curl_impersonate."
|
||||
./mad.sh:1433: echo -e "+ Currently uses curl v8.7.1"
|
||||
./mad.sh:1437: PS3='Please select which curl_impersonate to install: '
|
||||
./mad.sh:1445: install_curl_impersonate_lwthiker_orig
|
||||
./mad.sh:1449: install_curl_impersonate_lexiforest_fork
|
||||
./mad.sh:1459:install_curl_impersonate_lwthiker_orig() {
|
||||
./mad.sh:1463: echo -e "${BLUE}lwthiker curl_impersonate${NC} is the original curl_impersonate."
|
||||
./mad.sh:1464: echo -e "+ Currently uses curl v8.1.1, and has low activity for updates"
|
||||
./mad.sh:1467: echo -e "${GREEN}| Fetching:${NC} latest ${BLUE}lwthiker curl_impersonate${NC} info from github...${NC}"
|
||||
./mad.sh:1470: response=$(tor_curl_request --insecure -L -s https://github.com/lwthiker/curl-impersonate/releases/latest)
|
||||
./mad.sh:1472: debugHtml "github" "lbf_inst_curlimp$j" "$response"
|
||||
./mad.sh:1475: latestTag=$(grep -oPi -m 1 '(?<=/curl-impersonate/releases/tag/).*?(?=")' <<< "$response")
|
||||
./mad.sh:1485: yes_or_no "Do you wish to download and extract curl_impersonate $latestTag (using tor+curl)?" && {
|
||||
./mad.sh:1487: download_url='https://github.com/lwthiker/curl-impersonate/releases/download/'"$latestTag"'/curl-impersonate-'"$latestTag"'.x86_64-linux-gnu.tar.gz'
|
||||
./mad.sh:1490: file_header=$(tor_curl_request --insecure --head -Ls "$download_url")
|
||||
./mad.sh:1492: debugHtml "github" "head_inst_curlimp$j" "${file_header}"
|
||||
./mad.sh:1540: tor_curl_request --insecure -L "$download_url" --continue-at - --output "$file_path"
|
||||
./mad.sh:1569: echo -e "| Extracting curl_impersonate..."
|
||||
./mad.sh:1571: rm -f "${ScriptDir}"/curl*
|
||||
./mad.sh:1572: mv "$extract_location/curl-impersonate-ff" "${ScriptDir}/"
|
||||
./mad.sh:1573: mv "$extract_location/curl_ff109" "${ScriptDir}/"
|
||||
./mad.sh:1574: echo -e "${GREEN}| Done.${NC} Update ${BLUE}\"UseTorCurlImpersonate=true\"${NC} in script to use..."
|
||||
./mad.sh:1582:install_curl_impersonate_lexiforest_fork() {
|
||||
./mad.sh:1586: echo -e "${BLUE}lexiforest curl_impersonate${NC} is an active fork of curl_impersonate."
|
||||
./mad.sh:1587: echo -e "+ Currently uses curl v8.7.1, and is patched for latest CVEs"
|
||||
./mad.sh:1590: echo -e "${GREEN}| Fetching:${NC} latest ${BLUE}lexiforest curl_impersonate fork${NC} info from github...${NC}"
|
||||
./mad.sh:1593: response=$(tor_curl_request --insecure -L -s https://github.com/lexiforest/curl-impersonate/releases/latest)
|
||||
./mad.sh:1595: debugHtml "github" "lbf_inst_curlimp$j" "$response"
|
||||
./mad.sh:1598: latestTag=$(grep -oPi -m 1 '(?<=/curl-impersonate/releases/tag/).*?(?=")' <<< "$response")
|
||||
./mad.sh:1608: yes_or_no "Do you wish to download and extract curl_impersonate $latestTag (using tor+curl)?" && {
|
||||
./mad.sh:1610: download_url='https://github.com/lexiforest/curl-impersonate/releases/download/'"$latestTag"'/curl-impersonate-'"$latestTag"'.x86_64-linux-gnu.tar.gz'
|
||||
./mad.sh:1613: file_header=$(tor_curl_request --insecure --head -Ls "$download_url")
|
||||
./mad.sh:1615: debugHtml "github" "head_inst_curlimp$j" "${file_header}"
|
||||
./mad.sh:1663: tor_curl_request --insecure -L "$download_url" --continue-at - --output "$file_path"
|
||||
./mad.sh:1692: echo -e "| Extracting curl_impersonate..."
|
||||
./mad.sh:1694: rm -f "${ScriptDir}"/curl*
|
||||
./mad.sh:1695: mv "$extract_location/curl-impersonate-chrome" "${ScriptDir}/"
|
||||
./mad.sh:1696: mv "$extract_location/curl_chrome131" "${ScriptDir}/"
|
||||
./mad.sh:1697: echo -e "${GREEN}| Done.${NC} Update ${BLUE}\"UseTorCurlImpersonate=true\"${NC} in script to use..."
|
||||
./mad.sh:1859: echo -e ":${NC} ${GREEN}MAD${PINK} Audit${NC} : Reports usage of http & curl in scripts${PINK}${BLD} :"
|
||||
./mad.sh:1867: maud_curl=$(grep -n -vxE '[[:blank:]]*([#].*)?' $fil | grep --color='always' -Ei 'curl')
|
||||
./mad.sh:1868: maud_torcurl=$(grep -n -vxE '[[:blank:]]*([#].*)?' $fil | grep -A 12 --color='always' -Ei 'tor_curl')
|
||||
./mad.sh:1877: echo -e "${PINK}MAD Audit of curl:${NC} (${GREEN}grep \"curl\"${NC})"
|
||||
./mad.sh:1879: echo -e "$maud_curl"
|
||||
./mad.sh:1881: echo -e "${PINK}MAD Audit of tor_curl (+12 lines after):${NC} (${GREEN}grep \"tor_curl\"${NC})"
|
||||
./mad.sh:1883: echo -e "$maud_torcurl"
|
||||
./mad.sh:1895: maud_curl=$(grep -n -vxE '[[:blank:]]*([#].*)?' $fil | grep --color='always' -Ei 'curl')
|
||||
./mad.sh:1896: maud_torcurl=$(grep -n -vxE '[[:blank:]]*([#].*)?' $fil | grep -A 12 --color='always' -Ei 'tor_curl')
|
||||
./mad.sh:1905: echo -e "${PINK}MAD Audit of curl:${NC} (${GREEN}grep \"curl \"${NC})"
|
||||
./mad.sh:1907: echo -e "$maud_curl"
|
||||
./mad.sh:1909: echo -e "${PINK}MAD Audit of tor_curl (+12 lines after):${NC} (${GREEN}grep \"tor_curl\"${NC})"
|
||||
./mad.sh:1911: echo -e "$maud_torcurl"
|
||||
./mad.sh:1917: maud_curl=$(grep -n -vxE '[[:blank:]]*([#].*)?' $fil | grep --color='always' -Ei 'curl')
|
||||
./mad.sh:1918: maud_torcurl=$(grep -n -vxE '[[:blank:]]*([#].*)?' $fil | grep -A 12 --color='always' -Ei 'tor_curl')
|
||||
./mad.sh:1927: echo -e "${PINK}MAD Audit of curl:${NC} (${GREEN}grep \"curl\"${NC})"
|
||||
./mad.sh:1929: echo -e "$maud_curl"
|
||||
./mad.sh:1931: echo -e "${PINK}MAD Audit of tor_curl (+12 lines after):${NC} (${GREEN}grep \"tor_curl\"${NC})"
|
||||
./mad.sh:1933: echo -e "$maud_torcurl"
|
||||
./mad.sh:2888: if [[ "${UseTorCurlImpersonate}" == "true" ]]; then
|
||||
./mad.sh:2889: printf "client: ${GREEN}Tor${NC} + ${BLUE}curl_impersonate${NC}\\n"
|
||||
./mad.sh:2891: printf "client: ${GREEN}Tor${NC} + ${GREEN}curl${NC}\\n"
|
||||
./mad.sh:3063: if [[ "${UseTorCurlImpersonate}" == "true" ]]; then
|
||||
./mad.sh:3064: printf "client: ${GREEN}Tor${NC} + ${BLUE}curl_impersonate${NC}\\n"
|
||||
./mad.sh:3066: printf "client: ${GREEN}Tor${NC} + ${GREEN}curl${NC}\\n"
|
||||
./mad.sh:3264: file_header=$(tor_curl_request --insecure -m 8 -s -D - -o /dev/null \
|
||||
./mad.sh:3271: file_header=$(tor_curl_request --insecure --head -H "Connection: keep-alive" -L -s -i "$download_url" |
|
||||
./mad.sh:3408: tor_curl_request --insecure -L --referer "$file_url" "$download_url" --output "$file_path"
|
||||
./mad.sh:3465: tor_curl_request --insecure -L --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval "$download_url" --continue-at - --output "$file_path"
|
||||
./mad.sh:3467: tor_curl_request --insecure -L --referer "$file_url" "$download_url" --continue-at - --output "$file_path"
|
||||
./mad.sh:3665: response=$(tor_curl_upload --insecure -i \
|
||||
./mad.sh:3672: response=$(tor_curl_upload --insecure -i \
|
||||
./mad.sh:3743:if [[ "${UseTorCurlImpersonate}" == "true" ]]; then
|
||||
./mad.sh:3744: curl_impersonate=()
|
||||
./mad.sh:3745: readarray -d $'' arrFiles < <(find "$ScriptDir" -maxdepth 1 -type f -name "curl_*" -printf '%p\n' | sort -Vk1)
|
||||
./mad.sh:3746: bFoundCurlHeader=false
|
||||
./mad.sh:3750: curl_impersonate=($fil)
|
||||
./mad.sh:3751: bFoundCurlHeader=true
|
||||
./mad.sh:3755: if [[ "$bFoundCurlHeader" == "false" ]]; then
|
||||
./mad.sh:3756: echo -e "${RED}[ERROR] Missing dependency \"curl-impersonate\"!${NC}"
|
||||
./mad.sh:3759: echo -e "You'll need to download ${GREEN}\"curl-impersonate\"${NC}."
|
||||
./mad.sh:3762: echo -e "The latest binary can be obtained on GitHub, search for \"curl-impersonate\""
|
||||
./mad.sh:3764: echo -e " 1. Visit the page of curl-impersonate and add \"/releases/latest/\" at end of URL."
|
||||
./mad.sh:3768: echo -e " 4. Download archive ${GREEN}\"curl-impersonate-vX.Y.Z.x86_64-linux-gnu.tar.gz\"${YELLOW}."
|
||||
./mad.sh:3769: echo -e " 5. Extract files ${GREEN}\"curl-impersonate-ff\"${NC} and ${GREEN}\"curl_ff109\"${NC} next to this script."
|
||||
./mad.sh:3772: echo -e "run $0 install_curl_impersonate\\n"
|
||||
./mad.sh:3774: yes_or_no "Do you wish to download and extract latest curl_impersonate (using tor+curl)?" && {
|
||||
./mad.sh:3775: UseTorCurlImpersonate=false
|
||||
./mad.sh:3776: install_curl_impersonate
|
||||
./mad.sh:3860: echo -e "[${YELLOW}Install curl_impersonate${NC}]: Downloads the latest binary for curl_impersonate from github repo (3 choices)"
|
||||
./mad.sh:3861: printf " %s install_curl_impersonate\\n" "$0"
|
||||
./mad.sh:3939:elif [[ "$arg1" == "install_curl_impersonate" ]]; then
|
||||
./mad.sh:3940: install_curl_impersonate
|
||||
./mad.sh:3971:if [[ "${UseTorCurlImpersonate}" == "true" ]]; then
|
||||
./mad.sh:3972: printf "client: ${GREEN}Tor${NC} + ${BLUE}curl_impersonate${NC}\\n"
|
||||
./mad.sh:3974: printf "client: ${GREEN}Tor${NC} + ${GREEN}curl${NC}\\n"
|
||||
./mad.sh:71:UseTorCurlImpersonate=false
|
||||
./mad.sh:367:tor_curl_request() {
|
||||
./mad.sh:368: if [[ "${UseTorCurlImpersonate}" == "true" ]]; then
|
||||
./mad.sh:369: "${curl_impersonate[@]}" --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" -4 --connect-timeout ${ConnectTimeout} --compressed --globoff "$@"
|
||||
./mad.sh:371: curl --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" -4 --connect-timeout ${ConnectTimeout} --compressed --globoff "$@"
|
||||
./mad.sh:374:tor_curl_request_extended() {
|
||||
./mad.sh:376: if [[ "${UseTorCurlImpersonate}" == "true" ]]; then
|
||||
./mad.sh:377: "${curl_impersonate[@]}" --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" -4 --connect-timeout $randomtimeout --compressed --globoff "$@"
|
||||
./mad.sh:379: curl --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" -4 --connect-timeout $randomtimeout --compressed --globoff "$@"
|
||||
./mad.sh:382:tor_curl_upload() {
|
||||
./mad.sh:383: if [[ "${UseTorCurlImpersonate}" == "true" ]]; then
|
||||
./mad.sh:385: "${curl_impersonate[@]}" --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" -4 --expect100-timeout 10 --connect-timeout ${ConnectTimeoutUpload} --speed-limit $UploadSpeedMin --speed-time $UploadTimeoutInterval --compressed --globoff "$@"
|
||||
./mad.sh:387: "${curl_impersonate[@]}" --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" -4 --expect100-timeout 10 --connect-timeout ${ConnectTimeoutUpload} --compressed --globoff "$@"
|
||||
./mad.sh:391: curl --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" -4 --expect100-timeout 10 --connect-timeout ${ConnectTimeoutUpload} --speed-limit $UploadSpeedMin --speed-time $UploadTimeoutInterval -H 'User-Agent: Mozilla/5.0 (Windows NT 10.0; rv:109.0) Gecko/20100101 Firefox/115.0' -H 'Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8' -H 'Accept-Language: en-US,en;q=0.5' -H 'Accept-Encoding: gzip, deflate, br' -H 'Origin: null' -H 'Connection: keep-alive' -H 'Upgrade-Insecure-Requests: 1' -H 'Sec-Fetch-Dest: document' -H 'Sec-Fetch-Mode: navigate' -H 'Sec-Fetch-Site: cross-site' -H 'Sec-Fetch-User: ?1' -H 'TE: trailers' --compressed --globoff "$@"
|
||||
./mad.sh:393: curl --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" -4 --expect100-timeout 10 --connect-timeout ${ConnectTimeoutUpload} -H 'User-Agent: Mozilla/5.0 (Windows NT 10.0; rv:109.0) Gecko/20100101 Firefox/115.0' -H 'Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8' -H 'Accept-Language: en-US,en;q=0.5' -H 'Accept-Encoding: gzip, deflate, br' -H 'Origin: null' -H 'Connection: keep-alive' -H 'Upgrade-Insecure-Requests: 1' -H 'Sec-Fetch-Dest: document' -H 'Sec-Fetch-Mode: navigate' -H 'Sec-Fetch-Site: cross-site' -H 'Sec-Fetch-User: ?1' -H 'TE: trailers' --compressed --globoff "$@"
|
||||
./mad.sh:1392:install_curl_impersonate() {
|
||||
./mad.sh:1394: echo -e "${BLUE}lwthiker curl_impersonate${NC} is the original dev, but it is relatively inactive."
|
||||
./mad.sh:1395: echo -e "- Currently uses curl v8.1.1."
|
||||
./mad.sh:1399: echo -e "${BLUE}lexiforest curl_impersonate${NC} is an active fork of curl_impersonate."
|
||||
./mad.sh:1400: echo -e "+ Currently uses curl v8.7.1"
|
||||
./mad.sh:1404: PS3='Please select which curl_impersonate to install: '
|
||||
./mad.sh:1412: install_curl_impersonate_lwthiker_orig
|
||||
./mad.sh:1416: install_curl_impersonate_lexiforest_fork
|
||||
./mad.sh:1426:install_curl_impersonate_lwthiker_orig() {
|
||||
./mad.sh:1430: echo -e "${BLUE}lwthiker curl_impersonate${NC} is the original curl_impersonate."
|
||||
./mad.sh:1431: echo -e "+ Currently uses curl v8.1.1, and has low activity for updates"
|
||||
./mad.sh:1434: echo -e "${GREEN}| Fetching:${NC} latest ${BLUE}lwthiker curl_impersonate${NC} info from github...${NC}"
|
||||
./mad.sh:1437: response=$(tor_curl_request --insecure -L -s https://github.com/lwthiker/curl-impersonate/releases/latest)
|
||||
./mad.sh:1439: debugHtml "github" "lbf_inst_curlimp$j" "$response"
|
||||
./mad.sh:1442: latestTag=$(grep -oPi -m 1 '(?<=/curl-impersonate/releases/tag/).*?(?=")' <<< "$response")
|
||||
./mad.sh:1452: yes_or_no "Do you wish to download and extract curl_impersonate $latestTag (using tor+curl)?" && {
|
||||
./mad.sh:1454: download_url='https://github.com/lwthiker/curl-impersonate/releases/download/'"$latestTag"'/curl-impersonate-'"$latestTag"'.x86_64-linux-gnu.tar.gz'
|
||||
./mad.sh:1457: file_header=$(tor_curl_request --insecure --head -Ls "$download_url")
|
||||
./mad.sh:1459: debugHtml "github" "head_inst_curlimp$j" "${file_header}"
|
||||
./mad.sh:1507: tor_curl_request --insecure -L "$download_url" --continue-at - --output "$file_path"
|
||||
./mad.sh:1536: echo -e "| Extracting curl_impersonate..."
|
||||
./mad.sh:1538: rm -f "${ScriptDir}"/curl*
|
||||
./mad.sh:1539: mv "$extract_location/curl-impersonate-ff" "${ScriptDir}/"
|
||||
./mad.sh:1540: mv "$extract_location/curl_ff109" "${ScriptDir}/"
|
||||
./mad.sh:1541: echo -e "${GREEN}| Done.${NC} Update ${BLUE}\"UseTorCurlImpersonate=true\"${NC} in script to use..."
|
||||
./mad.sh:1549:install_curl_impersonate_lexiforest_fork() {
|
||||
./mad.sh:1553: echo -e "${BLUE}lexiforest curl_impersonate${NC} is an active fork of curl_impersonate."
|
||||
./mad.sh:1554: echo -e "+ Currently uses curl v8.7.1, and is patched for latest CVEs"
|
||||
./mad.sh:1557: echo -e "${GREEN}| Fetching:${NC} latest ${BLUE}lexiforest curl_impersonate fork${NC} info from github...${NC}"
|
||||
./mad.sh:1560: response=$(tor_curl_request --insecure -L -s https://github.com/lexiforest/curl-impersonate/releases/latest)
|
||||
./mad.sh:1562: debugHtml "github" "lbf_inst_curlimp$j" "$response"
|
||||
./mad.sh:1565: latestTag=$(grep -oPi -m 1 '(?<=/curl-impersonate/releases/tag/).*?(?=")' <<< "$response")
|
||||
./mad.sh:1575: yes_or_no "Do you wish to download and extract curl_impersonate $latestTag (using tor+curl)?" && {
|
||||
./mad.sh:1577: download_url='https://github.com/lexiforest/curl-impersonate/releases/download/'"$latestTag"'/curl-impersonate-'"$latestTag"'.x86_64-linux-gnu.tar.gz'
|
||||
./mad.sh:1580: file_header=$(tor_curl_request --insecure --head -Ls "$download_url")
|
||||
./mad.sh:1582: debugHtml "github" "head_inst_curlimp$j" "${file_header}"
|
||||
./mad.sh:1630: tor_curl_request --insecure -L "$download_url" --continue-at - --output "$file_path"
|
||||
./mad.sh:1659: echo -e "| Extracting curl_impersonate..."
|
||||
./mad.sh:1661: rm -f "${ScriptDir}"/curl*
|
||||
./mad.sh:1662: mv "$extract_location/curl-impersonate-chrome" "${ScriptDir}/"
|
||||
./mad.sh:1663: mv "$extract_location/curl_chrome131" "${ScriptDir}/"
|
||||
./mad.sh:1664: echo -e "${GREEN}| Done.${NC} Update ${BLUE}\"UseTorCurlImpersonate=true\"${NC} in script to use..."
|
||||
./mad.sh:1826: echo -e ":${NC} ${GREEN}MAD${PINK} Audit${NC} : Reports usage of http & curl in scripts${PINK}${BLD} :"
|
||||
./mad.sh:1834: maud_curl=$(grep -n -vxE '[[:blank:]]*([#].*)?' $fil | grep --color='always' -Ei 'curl')
|
||||
./mad.sh:1835: maud_torcurl=$(grep -n -vxE '[[:blank:]]*([#].*)?' $fil | grep -A 12 --color='always' -Ei 'tor_curl')
|
||||
./mad.sh:1844: echo -e "${PINK}MAD Audit of curl:${NC} (${GREEN}grep \"curl\"${NC})"
|
||||
./mad.sh:1846: echo -e "$maud_curl"
|
||||
./mad.sh:1848: echo -e "${PINK}MAD Audit of tor_curl (+12 lines after):${NC} (${GREEN}grep \"tor_curl\"${NC})"
|
||||
./mad.sh:1850: echo -e "$maud_torcurl"
|
||||
./mad.sh:1862: maud_curl=$(grep -n -vxE '[[:blank:]]*([#].*)?' $fil | grep --color='always' -Ei 'curl')
|
||||
./mad.sh:1863: maud_torcurl=$(grep -n -vxE '[[:blank:]]*([#].*)?' $fil | grep -A 12 --color='always' -Ei 'tor_curl')
|
||||
./mad.sh:1872: echo -e "${PINK}MAD Audit of curl:${NC} (${GREEN}grep \"curl \"${NC})"
|
||||
./mad.sh:1874: echo -e "$maud_curl"
|
||||
./mad.sh:1876: echo -e "${PINK}MAD Audit of tor_curl (+12 lines after):${NC} (${GREEN}grep \"tor_curl\"${NC})"
|
||||
./mad.sh:1878: echo -e "$maud_torcurl"
|
||||
./mad.sh:1884: maud_curl=$(grep -n -vxE '[[:blank:]]*([#].*)?' $fil | grep --color='always' -Ei 'curl')
|
||||
./mad.sh:1885: maud_torcurl=$(grep -n -vxE '[[:blank:]]*([#].*)?' $fil | grep -A 12 --color='always' -Ei 'tor_curl')
|
||||
./mad.sh:1894: echo -e "${PINK}MAD Audit of curl:${NC} (${GREEN}grep \"curl\"${NC})"
|
||||
./mad.sh:1896: echo -e "$maud_curl"
|
||||
./mad.sh:1898: echo -e "${PINK}MAD Audit of tor_curl (+12 lines after):${NC} (${GREEN}grep \"tor_curl\"${NC})"
|
||||
./mad.sh:1900: echo -e "$maud_torcurl"
|
||||
./mad.sh:2855: if [[ "${UseTorCurlImpersonate}" == "true" ]]; then
|
||||
./mad.sh:2856: printf "client: ${GREEN}Tor${NC} + ${BLUE}curl_impersonate${NC}\\n"
|
||||
./mad.sh:2858: printf "client: ${GREEN}Tor${NC} + ${GREEN}curl${NC}\\n"
|
||||
./mad.sh:3030: if [[ "${UseTorCurlImpersonate}" == "true" ]]; then
|
||||
./mad.sh:3031: printf "client: ${GREEN}Tor${NC} + ${BLUE}curl_impersonate${NC}\\n"
|
||||
./mad.sh:3033: printf "client: ${GREEN}Tor${NC} + ${GREEN}curl${NC}\\n"
|
||||
./mad.sh:3231: file_header=$(tor_curl_request --insecure -m 8 -s -D - -o /dev/null \
|
||||
./mad.sh:3238: file_header=$(tor_curl_request --insecure --head -H "Connection: keep-alive" -L -s -i "$download_url" |
|
||||
./mad.sh:3375: tor_curl_request --insecure -L --referer "$file_url" "$download_url" --output "$file_path"
|
||||
./mad.sh:3432: tor_curl_request --insecure -L --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval "$download_url" --continue-at - --output "$file_path"
|
||||
./mad.sh:3434: tor_curl_request --insecure -L --referer "$file_url" "$download_url" --continue-at - --output "$file_path"
|
||||
./mad.sh:3632: response=$(tor_curl_upload --insecure -i \
|
||||
./mad.sh:3639: response=$(tor_curl_upload --insecure -i \
|
||||
./mad.sh:3710:if [[ "${UseTorCurlImpersonate}" == "true" ]]; then
|
||||
./mad.sh:3711: curl_impersonate=()
|
||||
./mad.sh:3712: readarray -d $'' arrFiles < <(find "$ScriptDir" -maxdepth 1 -type f -name "curl_*" -printf '%p\n' | sort -Vk1)
|
||||
./mad.sh:3713: bFoundCurlHeader=false
|
||||
./mad.sh:3717: curl_impersonate=($fil)
|
||||
./mad.sh:3718: bFoundCurlHeader=true
|
||||
./mad.sh:3722: if [[ "$bFoundCurlHeader" == "false" ]]; then
|
||||
./mad.sh:3723: echo -e "${RED}[ERROR] Missing dependency \"curl-impersonate\"!${NC}"
|
||||
./mad.sh:3726: echo -e "You'll need to download ${GREEN}\"curl-impersonate\"${NC}."
|
||||
./mad.sh:3729: echo -e "The latest binary can be obtained on GitHub, search for \"curl-impersonate\""
|
||||
./mad.sh:3731: echo -e " 1. Visit the page of curl-impersonate and add \"/releases/latest/\" at end of URL."
|
||||
./mad.sh:3735: echo -e " 4. Download archive ${GREEN}\"curl-impersonate-vX.Y.Z.x86_64-linux-gnu.tar.gz\"${YELLOW}."
|
||||
./mad.sh:3736: echo -e " 5. Extract files ${GREEN}\"curl-impersonate-ff\"${NC} and ${GREEN}\"curl_ff109\"${NC} next to this script."
|
||||
./mad.sh:3739: echo -e "run $0 install_curl_impersonate\\n"
|
||||
./mad.sh:3741: yes_or_no "Do you wish to download and extract latest curl_impersonate (using tor+curl)?" && {
|
||||
./mad.sh:3742: UseTorCurlImpersonate=false
|
||||
./mad.sh:3743: install_curl_impersonate
|
||||
./mad.sh:3827: echo -e "[${YELLOW}Install curl_impersonate${NC}]: Downloads the latest binary for curl_impersonate from github repo (3 choices)"
|
||||
./mad.sh:3828: printf " %s install_curl_impersonate\\n" "$0"
|
||||
./mad.sh:3906:elif [[ "$arg1" == "install_curl_impersonate" ]]; then
|
||||
./mad.sh:3907: install_curl_impersonate
|
||||
./mad.sh:3938:if [[ "${UseTorCurlImpersonate}" == "true" ]]; then
|
||||
./mad.sh:3939: printf "client: ${GREEN}Tor${NC} + ${BLUE}curl_impersonate${NC}\\n"
|
||||
./mad.sh:3941: printf "client: ${GREEN}Tor${NC} + ${GREEN}curl${NC}\\n"
|
||||
./plugins/pjscloud.sh:44: if [[ "${UseTorCurlImpersonate}" == "true" ]]; then
|
||||
./plugins/pjscloud.sh:45: response=$("${curl_impersonate[@]}" --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" \
|
||||
./plugins/pjscloud.sh:53: response=$(curl --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" \
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
DateTime: 25.03.08
|
||||
DateTime: 25.03.30
|
||||
|
||||
Files:
|
||||
./hosts/1fichier.sh
|
||||
|
|
@ -6,6 +6,7 @@ Files:
|
|||
./hosts/acid.sh
|
||||
./hosts/anarchaserver.sh
|
||||
./hosts/anonfile.sh
|
||||
./hosts/anonfileio.sh
|
||||
./hosts/anonsharing.sh
|
||||
./hosts/archived/nekofile.sh
|
||||
./hosts/ateasystems.sh
|
||||
|
|
@ -79,10 +80,12 @@ Files:
|
|||
./hosts/uploadev.sh
|
||||
./hosts/uploadflix.sh
|
||||
./hosts/uploadhive.sh
|
||||
./hosts/uploadscloud.sh
|
||||
./hosts/up_1fichier.sh
|
||||
./hosts/up_acid.sh
|
||||
./hosts/up_anarchaserver.sh
|
||||
./hosts/up_anonfile.sh
|
||||
./hosts/up_anonfileio.sh
|
||||
./hosts/up_anonsharing.sh
|
||||
./hosts/up_ateasystems.sh
|
||||
./hosts/up_axfc.sh
|
||||
|
|
@ -168,6 +171,8 @@ _________________________________________________________________________
|
|||
./hosts/anonfile.sh:230: if grep -Eqi 'img src="https://anonfile.de/captchas/' <<< "$response" ; then
|
||||
./hosts/anonfile.sh:442: if grep -Eqi '<a class="stretched-link" href="https:' <<< "$response"; then
|
||||
./hosts/anonfile.sh:514: anon_host=$(grep -oPi '(?<=https://).*(?=/)' <<< "$fixed_url")
|
||||
./hosts/anonfileio.sh:37: if ! grep -Eqi '^https://anonfile.io/api/download/' <<< "${pUrlMod}" ; then
|
||||
./hosts/anonfileio.sh:39: pUrlMod="https://anonfile.io/api/download/$filecode"
|
||||
./hosts/anonsharing.sh:95: "https://anonsharing.com/account/ajax/file_details")
|
||||
./hosts/anonsharing.sh:121: if grep -Eqi 'openUrl\('"'"'https:\\/\\/anonsharing.com\\/' <<< "$response"; then
|
||||
./hosts/anonsharing.sh:124: download_url='https://anonsharing.com/'$(grep -oPi '(?<=openUrl\('"'"'https:\\/\\/anonsharing.com\\/).*?(?='"'"'.*$)' <<< "$response" | head -1)
|
||||
|
|
@ -264,6 +269,8 @@ _________________________________________________________________________
|
|||
./hosts/up_anarchaserver.sh:40: jira_downloadLinkPrefix='https://transitional.anarchaserver.org/jirafeau/f.php?h='
|
||||
./hosts/up_anonfile.sh:99: PostUrlHost='https://file-01.anonfile.de/cgi-bin/upload.cgi?upload_type=file&utype=anon'
|
||||
./hosts/up_anonfile.sh:121: downloadLink="https://anonfile.de/$hash"
|
||||
./hosts/up_anonfileio.sh:99: PostUrlHost='https://anonfile.io/api/upload'
|
||||
./hosts/up_anonfileio.sh:113: downloadLink="https://anonfile.io/f/$hash"
|
||||
./hosts/up_anonsharing.sh:99: PostUrlHost='https://anonsharing.com/ajax/file_upload_handler?r=anonsharing.com'
|
||||
./hosts/up_anonsharing.sh:109: if grep -Eqi '"error":null,"url":"https:\\/\\/anonsharing.com\\/' <<< "${response}" ; then
|
||||
./hosts/up_anonsharing.sh:112: downloadLink="https://anonsharing.com/fileid=${fileid}"
|
||||
|
|
@ -408,8 +415,9 @@ _________________________________________________________________________
|
|||
./hosts/up_quax.sh:99: PostUrlHost='https://qu.ax/upload.php'
|
||||
./hosts/up_ramsgaard.sh:37: jira_PostUrlHost='https://data.ramsgaard.me/script.php'
|
||||
./hosts/up_ramsgaard.sh:40: jira_downloadLinkPrefix='https://data.ramsgaard.me/f.php?h='
|
||||
./hosts/up_ranoz.sh:99: PostUrlHost='https://ranoz.gg/api/v1/files/upload_url'
|
||||
./hosts/up_ranoz.sh:138: if grep -Eqi '"upload_url":"https://' <<< "$response" ; then
|
||||
./hosts/up_ranoz.sh:101: PrePostUrl='https://finer-yeti-69.clerk.accounts.dev/v1/client/handshake?redirect_url=https%3A%2F%2Franoz.gg%2Fapi%2Fv1%2Ffiles%2Fupload_url&suffixed_cookies=false&__clerk_hs_reason=dev-browser-missing'
|
||||
./hosts/up_ranoz.sh:102: PostUrlHost='https://ranoz.gg/api/v1/files/upload_url'
|
||||
./hosts/up_ranoz.sh:150: if grep -Eqi '"upload_url":"https://' <<< "$response" ; then
|
||||
./hosts/up_sendnow.sh:101: response=$(tor_curl_request --insecure -L -s 'https://send.now/upload')
|
||||
./hosts/up_sendnow.sh:115: elif grep -Eqi 'action="https://.*send\.now/cgi-bin/upload\.cgi\?upload_type\=' <<< "$response"; then
|
||||
./hosts/up_sendnow.sh:131: local ar_HUP[0]="https://u7324.send.now/cgi-bin/upload.cgi?upload_type=file&utype=anon"
|
||||
|
|
@ -453,75 +461,75 @@ _________________________________________________________________________
|
|||
./hosts/up_uwabaki.sh:111: if grep -Eqi 'File uploaded: <a href="https://files.uwabaki.party/' <<< "${response}" ; then
|
||||
./hosts/up_uwabaki.sh:123: downloadLink="https://files.uwabaki.party${url}"
|
||||
./hosts/up_yolobit.sh:99: PostUrlHost='https://ns08.zipcluster.com/upload.php'
|
||||
./mad.sh:721: sed -i -e "s>^${url/https:/http:}.*>#& #OK# ${filename}>g" "${InputFile}" #http (if changed)
|
||||
./mad.sh:723: sed -i -e "s>^direct=${url/https:/http:}.*>#& #OK# ${filename}>g" "${InputFile}" #direct url https
|
||||
./mad.sh:726: sed -i -e "s>^${url/https:/http:}.*>#&>g" "${InputFile}" #http (if changed)
|
||||
./mad.sh:728: sed -i -e "s>^direct=${url/https:/http:}.*>#&>g" "${InputFile}" #direct url https
|
||||
./mad.sh:749: sed -i -e "s>^${url/https:/http:}.*>#& #OK# ${filename} (File exists)>g" "${InputFile}" #http (if changed)
|
||||
./mad.sh:751: sed -i -e "s>^direct=${url/https:/http:}.*>#& #OK# ${filename} (File exists)>g" "${InputFile}" #direct url https
|
||||
./mad.sh:754: sed -i -e "s>^${url/https:/http:}.*>#&>g" "${InputFile}" #http (if changed)
|
||||
./mad.sh:756: sed -i -e "s>^direct=${url/https:/http:}.*>#&>g" "${InputFile}" #direct url https
|
||||
./mad.sh:777: sed -i -e "s>^${url/https:/http:}.*>#& #RETRY# ${message}>g" "${InputFile}" #http (if changed)
|
||||
./mad.sh:779: sed -i -e "s>^direct=${url/https:/http:}.*>#& #RETRY# ${message}>g" "${InputFile}" #direct url https
|
||||
./mad.sh:692: sed -i -e "s>^${url/https:/http:}.*>#& #OK# ${filename}>g" "${InputFile}" #http (if changed)
|
||||
./mad.sh:694: sed -i -e "s>^direct=${url/https:/http:}.*>#& #OK# ${filename}>g" "${InputFile}" #direct url https
|
||||
./mad.sh:697: sed -i -e "s>^${url/https:/http:}.*>#&>g" "${InputFile}" #http (if changed)
|
||||
./mad.sh:699: sed -i -e "s>^direct=${url/https:/http:}.*>#&>g" "${InputFile}" #direct url https
|
||||
./mad.sh:720: sed -i -e "s>^${url/https:/http:}.*>#& #OK# ${filename} (File exists)>g" "${InputFile}" #http (if changed)
|
||||
./mad.sh:722: sed -i -e "s>^direct=${url/https:/http:}.*>#& #OK# ${filename} (File exists)>g" "${InputFile}" #direct url https
|
||||
./mad.sh:725: sed -i -e "s>^${url/https:/http:}.*>#&>g" "${InputFile}" #http (if changed)
|
||||
./mad.sh:727: sed -i -e "s>^direct=${url/https:/http:}.*>#&>g" "${InputFile}" #direct url https
|
||||
./mad.sh:748: sed -i -e "s>^${url/https:/http:}.*>#& #RETRY# ${message}>g" "${InputFile}" #http (if changed)
|
||||
./mad.sh:750: sed -i -e "s>^direct=${url/https:/http:}.*>#& #RETRY# ${message}>g" "${InputFile}" #direct url https
|
||||
./mad.sh:753: sed -i -e "s>^${url/https:/http:}.*>#&>g" "${InputFile}" #http (if changed)
|
||||
./mad.sh:755: sed -i -e "s>^direct=${url/https:/http:}.*>#&>g" "${InputFile}" #direct url https
|
||||
./mad.sh:777: sed -i -e "s>^${url/https:/http:}.*>#& #FAIL# $message>g" "${InputFile}" #http (if changed)
|
||||
./mad.sh:779: sed -i -e "s>^direct=${url/https:/http:}.*>#& #FAIL# $message>g" "${InputFile}" #direct url https
|
||||
./mad.sh:782: sed -i -e "s>^${url/https:/http:}.*>#&>g" "${InputFile}" #http (if changed)
|
||||
./mad.sh:784: sed -i -e "s>^direct=${url/https:/http:}.*>#&>g" "${InputFile}" #direct url https
|
||||
./mad.sh:806: sed -i -e "s>^${url/https:/http:}.*>#& #FAIL# $message>g" "${InputFile}" #http (if changed)
|
||||
./mad.sh:808: sed -i -e "s>^direct=${url/https:/http:}.*>#& #FAIL# $message>g" "${InputFile}" #direct url https
|
||||
./mad.sh:811: sed -i -e "s>^${url/https:/http:}.*>#&>g" "${InputFile}" #http (if changed)
|
||||
./mad.sh:813: sed -i -e "s>^direct=${url/https:/http:}.*>#&>g" "${InputFile}" #direct url https
|
||||
./mad.sh:837: sed -i -e "s>^${url/https:/http:}.*>#& #REMOVED#${message}>g" "${InputFile}" #http (if changed)
|
||||
./mad.sh:839: sed -i -e "s>^direct=${url/https:/http:}.*>#& #REMOVED#${message}>g" "${InputFile}" #direct url https
|
||||
./mad.sh:842: sed -i -e "s>^${url/https:/http:}.*>#&>g" "${InputFile}" #http (if changed)
|
||||
./mad.sh:844: sed -i -e "s>^direct=${url/https:/http:}.*>#&>g" "${InputFile}" #direct url https
|
||||
./mad.sh:870: sed -i -e "s>^${url/https:/http:}.*>${url}|${newfilename}>g" "${InputFile}" #http (if changed)
|
||||
./mad.sh:872: sed -i -e "s>^direct=${url/https:/http:}.*>direct=${url}|${newfilename}>g" "${InputFile}" #direct url https
|
||||
./mad.sh:892: sed -i -e "s%^${url/https:/http:}.*%${newurl//[[:space:]]/$'\\\n'}%g" "${InputFile}" #http (if changed)
|
||||
./mad.sh:913: sed -i -e "s>^${url/https:/http:}.*>#& #RETRY# ${filename} (dropped or bad size)>g" "${InputFile}" #http (if changed)
|
||||
./mad.sh:915: sed -i -e "s>^direct=${url/https:/http:}.*>#& #RETRY# ${filename} (dropped or bad size)>g" "${InputFile}" #direct url https
|
||||
./mad.sh:918: sed -i -e "s>^${url/https:/http:}.*>#&>g" "${InputFile}" #http (if changed)
|
||||
./mad.sh:920: sed -i -e "s>^direct=${url/https:/http:}.*>#&>g" "${InputFile}" #direct url https
|
||||
./mad.sh:936: sed -i -e "s>^${url/https:/http:}.*>#& #RETRY# (No CDN found)>g" "${InputFile}" #http (if changed)
|
||||
./mad.sh:938: sed -i -e "s>^direct=${url/https:/http:}.*>#& #RETRY# (No CDN found)>g" "${InputFile}" #direct url https
|
||||
./mad.sh:941: sed -i -e "s>^${url/https:/http:}.*>#&>g" "${InputFile}" #http (if changed)
|
||||
./mad.sh:943: sed -i -e "s>^direct=${url/https:/http:}.*>#&>g" "${InputFile}" #direct url https
|
||||
./mad.sh:962: sed -i -e "s>^${url/https:/http:}.*>#& #PASSWORD#>g" "${InputFile}" #http (if changed)
|
||||
./mad.sh:964: sed -i -e "s>^direct=${url/https:/http:}.*>#& #PASSWORD#>g" "${InputFile}" #direct url https
|
||||
./mad.sh:967: sed -i -e "s>^${url/https:/http:}.*>#&>g" "${InputFile}" #http (if changed)
|
||||
./mad.sh:969: sed -i -e "s>^direct=${url/https:/http:}.*>#&>g" "${InputFile}" #direct url https
|
||||
./mad.sh:989: sed -i -e "s>^${url/https:/http:}.*>#& #BAD-URL#${message}>g" "${InputFile}" #http (if changed)
|
||||
./mad.sh:991: sed -i -e "s>^direct=${url/https:/http:}.*>#& #BAD-URL#${message}>g" "${InputFile}" #direct url https
|
||||
./mad.sh:994: sed -i -e "s>^${url/https:/http:}.*>#&>g" "${InputFile}" #http (if changed)
|
||||
./mad.sh:996: sed -i -e "s>^direct=${url/https:/http:}.*>#&>g" "${InputFile}" #direct url https
|
||||
./mad.sh:1014: sed -i -e "s>^${url/https:/http:}.*>#& #RETRY# ${filename} (No filename or size. Try again later)>g" "${InputFile}" #http (if changed)
|
||||
./mad.sh:1016: sed -i -e "s>^direct=${url/https:/http:}.*>#& #RETRY# ${filename} (No filename or size. Try again later)>g" "${InputFile}" #direct url https
|
||||
./mad.sh:1019: sed -i -e "s>^${url/https:/http:}.*>#&>g" "${InputFile}" #http (if changed)
|
||||
./mad.sh:1021: sed -i -e "s>^direct=${url/https:/http:}.*>#&>g" "${InputFile}" #direct url https
|
||||
./mad.sh:1040: sed -i -e "s>^${url/https:/http:}.*>#& #RETRY# ${filename} (File exists and is too large--retry or delete from downloads)>g" "${InputFile}" #http (if changed)
|
||||
./mad.sh:1042: sed -i -e "s>^direct=${url/https:/http:}.*>#& #RETRY# ${filename} (File exists and is too large--retry or delete from downloads)>g" "${InputFile}" #direct url https
|
||||
./mad.sh:1045: sed -i -e "s>^${url/https:/http:}.*>#&>g" "${InputFile}" #http (if changed)
|
||||
./mad.sh:1047: sed -i -e "s>^direct=${url/https:/http:}.*>#&>g" "${InputFile}" #direct url https
|
||||
./mad.sh:1470: response=$(tor_curl_request --insecure -L -s https://github.com/lwthiker/curl-impersonate/releases/latest)
|
||||
./mad.sh:1487: download_url='https://github.com/lwthiker/curl-impersonate/releases/download/'"$latestTag"'/curl-impersonate-'"$latestTag"'.x86_64-linux-gnu.tar.gz'
|
||||
./mad.sh:1593: response=$(tor_curl_request --insecure -L -s https://github.com/lexiforest/curl-impersonate/releases/latest)
|
||||
./mad.sh:1610: download_url='https://github.com/lexiforest/curl-impersonate/releases/download/'"$latestTag"'/curl-impersonate-'"$latestTag"'.x86_64-linux-gnu.tar.gz'
|
||||
./mad.sh:1873: echo -e "${PINK}MAD Audit of http lines:${NC} (${GREEN}grep \"http:\" or \"https:\"${NC})"
|
||||
./mad.sh:1901: echo -e "${PINK}MAD Audit of http lines:${NC} (${GREEN}grep \"http:\" or \"https:\"${NC})"
|
||||
./mad.sh:1923: echo -e "${PINK}MAD Audit of http lines:${NC} (${GREEN}grep \"http:\" or \"https:\"${NC})"
|
||||
./mad.sh:3247: if grep -Eqi '.onion' <<< "$download_url" && grep -Eqi 'https://' <<< "$download_url" ; then
|
||||
./mad.sh:3782:arg2="$2" # auto, filelist, <https://url>
|
||||
./mad.sh:3879: echo -e " - http://oshi.at/abcd/origAABB.rar|My specified file.part1.rar"
|
||||
./mad.sh:3881: echo -e " - direct=http://pomf2.lain.la/f/abcd00zz.7z"
|
||||
./mad.sh:3883: echo -e ' - ie. direct=http://somehost.onion/abcD|filename.part1.rar'
|
||||
./mad.sh:4102: if [[ ${remote_url} =~ ^http: ]] ; then
|
||||
./mad.sh:4103: remote_url=${remote_url/http:/https:}
|
||||
./mad.sh:4124: if [[ ${remote_url} =~ ^http: ]] ; then
|
||||
./mad.sh:4125: remote_url=${remote_url/http:/https:}
|
||||
./mad.sh:4491: if [[ ${remote_url} =~ ^http: ]] ; then
|
||||
./mad.sh:4492: remote_url=${remote_url/http:/https:}
|
||||
./mad.sh:4550: if [[ ${remote_url} =~ ^http: ]] ; then
|
||||
./mad.sh:4551: remote_url=${remote_url/http:/https:}
|
||||
./mad.sh:4577: if [[ ${remote_url} =~ ^http: ]] ; then
|
||||
./mad.sh:4578: remote_url=${remote_url/http:/https:}
|
||||
./mad.sh:808: sed -i -e "s>^${url/https:/http:}.*>#& #REMOVED#${message}>g" "${InputFile}" #http (if changed)
|
||||
./mad.sh:810: sed -i -e "s>^direct=${url/https:/http:}.*>#& #REMOVED#${message}>g" "${InputFile}" #direct url https
|
||||
./mad.sh:813: sed -i -e "s>^${url/https:/http:}.*>#&>g" "${InputFile}" #http (if changed)
|
||||
./mad.sh:815: sed -i -e "s>^direct=${url/https:/http:}.*>#&>g" "${InputFile}" #direct url https
|
||||
./mad.sh:841: sed -i -e "s>^${url/https:/http:}.*>${url}|${newfilename}>g" "${InputFile}" #http (if changed)
|
||||
./mad.sh:843: sed -i -e "s>^direct=${url/https:/http:}.*>direct=${url}|${newfilename}>g" "${InputFile}" #direct url https
|
||||
./mad.sh:863: sed -i -e "s%^${url/https:/http:}.*%${newurl//[[:space:]]/$'\\\n'}%g" "${InputFile}" #http (if changed)
|
||||
./mad.sh:884: sed -i -e "s>^${url/https:/http:}.*>#& #RETRY# ${filename} (dropped or bad size)>g" "${InputFile}" #http (if changed)
|
||||
./mad.sh:886: sed -i -e "s>^direct=${url/https:/http:}.*>#& #RETRY# ${filename} (dropped or bad size)>g" "${InputFile}" #direct url https
|
||||
./mad.sh:889: sed -i -e "s>^${url/https:/http:}.*>#&>g" "${InputFile}" #http (if changed)
|
||||
./mad.sh:891: sed -i -e "s>^direct=${url/https:/http:}.*>#&>g" "${InputFile}" #direct url https
|
||||
./mad.sh:907: sed -i -e "s>^${url/https:/http:}.*>#& #RETRY# (No CDN found)>g" "${InputFile}" #http (if changed)
|
||||
./mad.sh:909: sed -i -e "s>^direct=${url/https:/http:}.*>#& #RETRY# (No CDN found)>g" "${InputFile}" #direct url https
|
||||
./mad.sh:912: sed -i -e "s>^${url/https:/http:}.*>#&>g" "${InputFile}" #http (if changed)
|
||||
./mad.sh:914: sed -i -e "s>^direct=${url/https:/http:}.*>#&>g" "${InputFile}" #direct url https
|
||||
./mad.sh:933: sed -i -e "s>^${url/https:/http:}.*>#& #PASSWORD#>g" "${InputFile}" #http (if changed)
|
||||
./mad.sh:935: sed -i -e "s>^direct=${url/https:/http:}.*>#& #PASSWORD#>g" "${InputFile}" #direct url https
|
||||
./mad.sh:938: sed -i -e "s>^${url/https:/http:}.*>#&>g" "${InputFile}" #http (if changed)
|
||||
./mad.sh:940: sed -i -e "s>^direct=${url/https:/http:}.*>#&>g" "${InputFile}" #direct url https
|
||||
./mad.sh:960: sed -i -e "s>^${url/https:/http:}.*>#& #BAD-URL#${message}>g" "${InputFile}" #http (if changed)
|
||||
./mad.sh:962: sed -i -e "s>^direct=${url/https:/http:}.*>#& #BAD-URL#${message}>g" "${InputFile}" #direct url https
|
||||
./mad.sh:965: sed -i -e "s>^${url/https:/http:}.*>#&>g" "${InputFile}" #http (if changed)
|
||||
./mad.sh:967: sed -i -e "s>^direct=${url/https:/http:}.*>#&>g" "${InputFile}" #direct url https
|
||||
./mad.sh:985: sed -i -e "s>^${url/https:/http:}.*>#& #RETRY# ${filename} (No filename or size. Try again later)>g" "${InputFile}" #http (if changed)
|
||||
./mad.sh:987: sed -i -e "s>^direct=${url/https:/http:}.*>#& #RETRY# ${filename} (No filename or size. Try again later)>g" "${InputFile}" #direct url https
|
||||
./mad.sh:990: sed -i -e "s>^${url/https:/http:}.*>#&>g" "${InputFile}" #http (if changed)
|
||||
./mad.sh:992: sed -i -e "s>^direct=${url/https:/http:}.*>#&>g" "${InputFile}" #direct url https
|
||||
./mad.sh:1011: sed -i -e "s>^${url/https:/http:}.*>#& #RETRY# ${filename} (File exists and is too large--retry or delete from downloads)>g" "${InputFile}" #http (if changed)
|
||||
./mad.sh:1013: sed -i -e "s>^direct=${url/https:/http:}.*>#& #RETRY# ${filename} (File exists and is too large--retry or delete from downloads)>g" "${InputFile}" #direct url https
|
||||
./mad.sh:1016: sed -i -e "s>^${url/https:/http:}.*>#&>g" "${InputFile}" #http (if changed)
|
||||
./mad.sh:1018: sed -i -e "s>^direct=${url/https:/http:}.*>#&>g" "${InputFile}" #direct url https
|
||||
./mad.sh:1437: response=$(tor_curl_request --insecure -L -s https://github.com/lwthiker/curl-impersonate/releases/latest)
|
||||
./mad.sh:1454: download_url='https://github.com/lwthiker/curl-impersonate/releases/download/'"$latestTag"'/curl-impersonate-'"$latestTag"'.x86_64-linux-gnu.tar.gz'
|
||||
./mad.sh:1560: response=$(tor_curl_request --insecure -L -s https://github.com/lexiforest/curl-impersonate/releases/latest)
|
||||
./mad.sh:1577: download_url='https://github.com/lexiforest/curl-impersonate/releases/download/'"$latestTag"'/curl-impersonate-'"$latestTag"'.x86_64-linux-gnu.tar.gz'
|
||||
./mad.sh:1840: echo -e "${PINK}MAD Audit of http lines:${NC} (${GREEN}grep \"http:\" or \"https:\"${NC})"
|
||||
./mad.sh:1868: echo -e "${PINK}MAD Audit of http lines:${NC} (${GREEN}grep \"http:\" or \"https:\"${NC})"
|
||||
./mad.sh:1890: echo -e "${PINK}MAD Audit of http lines:${NC} (${GREEN}grep \"http:\" or \"https:\"${NC})"
|
||||
./mad.sh:3214: if grep -Eqi '.onion' <<< "$download_url" && grep -Eqi 'https://' <<< "$download_url" ; then
|
||||
./mad.sh:3749:arg2="$2" # auto, filelist, <https://url>
|
||||
./mad.sh:3846: echo -e " - http://oshi.at/abcd/origAABB.rar|My specified file.part1.rar"
|
||||
./mad.sh:3848: echo -e " - direct=http://pomf2.lain.la/f/abcd00zz.7z"
|
||||
./mad.sh:3850: echo -e ' - ie. direct=http://somehost.onion/abcD|filename.part1.rar'
|
||||
./mad.sh:4069: if [[ ${remote_url} =~ ^http: ]] ; then
|
||||
./mad.sh:4070: remote_url=${remote_url/http:/https:}
|
||||
./mad.sh:4091: if [[ ${remote_url} =~ ^http: ]] ; then
|
||||
./mad.sh:4092: remote_url=${remote_url/http:/https:}
|
||||
./mad.sh:4458: if [[ ${remote_url} =~ ^http: ]] ; then
|
||||
./mad.sh:4459: remote_url=${remote_url/http:/https:}
|
||||
./mad.sh:4517: if [[ ${remote_url} =~ ^http: ]] ; then
|
||||
./mad.sh:4518: remote_url=${remote_url/http:/https:}
|
||||
./mad.sh:4544: if [[ ${remote_url} =~ ^http: ]] ; then
|
||||
./mad.sh:4545: remote_url=${remote_url/http:/https:}
|
||||
./plugins/pjscloud.sh:51: "https://PhantomJScloud.com/api/browser/v2/$RandomPjsKey/" & sleep 8s; kill -HUP $! 2>/dev/null)
|
||||
./plugins/pjscloud.sh:59: "https://PhantomJScloud.com/api/browser/v2/$RandomPjsKey/" & sleep 8s; kill -HUP $! 2>/dev/null)
|
||||
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
DateTime: 25.03.08
|
||||
DateTime: 25.03.30
|
||||
|
||||
Files:
|
||||
./hosts/1fichier.sh
|
||||
|
|
@ -6,6 +6,7 @@ Files:
|
|||
./hosts/acid.sh
|
||||
./hosts/anarchaserver.sh
|
||||
./hosts/anonfile.sh
|
||||
./hosts/anonfileio.sh
|
||||
./hosts/anonsharing.sh
|
||||
./hosts/archived/nekofile.sh
|
||||
./hosts/ateasystems.sh
|
||||
|
|
@ -79,10 +80,12 @@ Files:
|
|||
./hosts/uploadev.sh
|
||||
./hosts/uploadflix.sh
|
||||
./hosts/uploadhive.sh
|
||||
./hosts/uploadscloud.sh
|
||||
./hosts/up_1fichier.sh
|
||||
./hosts/up_acid.sh
|
||||
./hosts/up_anarchaserver.sh
|
||||
./hosts/up_anonfile.sh
|
||||
./hosts/up_anonfileio.sh
|
||||
./hosts/up_anonsharing.sh
|
||||
./hosts/up_ateasystems.sh
|
||||
./hosts/up_axfc.sh
|
||||
|
|
@ -199,29 +202,29 @@ _________________________________________________________________________
|
|||
./hosts/1fichier.sh:203: if [[ -z "$file_header" ]] || [[ -z "$file_size_bytes" ]]; then
|
||||
./hosts/1fichier.sh:204: continue
|
||||
--
|
||||
./hosts/1fichier.sh:261: tor_curl_request --insecure -e "${remote_url}" "${target_file_link}" -C - -o "${file_path}"
|
||||
./hosts/1fichier.sh:262: rm -f "$flockDownload";
|
||||
./hosts/1fichier.sh:263: received_file_size=0
|
||||
./hosts/1fichier.sh:264: if [[ -f "$file_path" ]] ; then
|
||||
./hosts/1fichier.sh:265: received_file_size=$(stat --format="%s" "$file_path" | tr -d '[:space:]')
|
||||
./hosts/1fichier.sh:266: fi
|
||||
./hosts/1fichier.sh:267: if [[ "${received_file_size}" -ne "${file_size_bytes}" ]]; then
|
||||
./hosts/1fichier.sh:268: echo -e "${RED}ERROR: Size mismatch after downloading${NC}\nPerhaps you or 1fichier lost connection for a while?"
|
||||
./hosts/1fichier.sh:269: if [[ "${finalAttempt}" == "true" ]] ; then
|
||||
./hosts/1fichier.sh:270: droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}"
|
||||
./hosts/1fichier.sh:271: fi
|
||||
./hosts/1fichier.sh:262: tor_curl_request --insecure -e "${remote_url}" "${target_file_link}" -C - -o "${file_path}"
|
||||
./hosts/1fichier.sh:263: rm -f "$flockDownload";
|
||||
./hosts/1fichier.sh:264: received_file_size=0
|
||||
./hosts/1fichier.sh:265: if [[ -f "$file_path" ]] ; then
|
||||
./hosts/1fichier.sh:266: received_file_size=$(stat --format="%s" "$file_path" | tr -d '[:space:]')
|
||||
./hosts/1fichier.sh:267: fi
|
||||
./hosts/1fichier.sh:268: if [[ "${received_file_size}" -ne "${file_size_bytes}" ]]; then
|
||||
./hosts/1fichier.sh:269: echo -e "${RED}ERROR: Size mismatch after downloading${NC}\nPerhaps you or 1fichier lost connection for a while?"
|
||||
./hosts/1fichier.sh:270: if [[ "${finalAttempt}" == "true" ]] ; then
|
||||
./hosts/1fichier.sh:271: droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}"
|
||||
./hosts/1fichier.sh:272: fi
|
||||
--
|
||||
./hosts/1fichier.sh:354: PAGE=$(tor_curl_request --insecure --max-time "$ConnectTimeout" -c "${fich_cookie_jar}" -s "${remote_url}")
|
||||
./hosts/1fichier.sh:355: if [[ -z ${PAGE} ]]; then
|
||||
./hosts/1fichier.sh:356: rm -f "${fich_cookie_jar}"
|
||||
./hosts/1fichier.sh:357: continue
|
||||
./hosts/1fichier.sh:358: fi
|
||||
./hosts/1fichier.sh:359: if grep -Eqi '<span style="color:red">Warning !</span>|<span style="color:red">Attention !</span>' <<< "${PAGE}"; then
|
||||
./hosts/1fichier.sh:360: rm -f "${fich_cookie_jar}"
|
||||
./hosts/1fichier.sh:361: continue
|
||||
./hosts/1fichier.sh:362: else
|
||||
./hosts/1fichier.sh:363: fich_adz_parameter=$(grep -oPi 'name="adz" value="\K[^"]+' <<< "${PAGE}")
|
||||
./hosts/1fichier.sh:364: if [[ $fich_adz_parameter ]]; then
|
||||
./hosts/1fichier.sh:355: PAGE=$(tor_curl_request --insecure --max-time "$ConnectTimeout" -c "${fich_cookie_jar}" -s "${remote_url}")
|
||||
./hosts/1fichier.sh:356: if [[ -z ${PAGE} ]]; then
|
||||
./hosts/1fichier.sh:357: rm -f "${fich_cookie_jar}"
|
||||
./hosts/1fichier.sh:358: continue
|
||||
./hosts/1fichier.sh:359: fi
|
||||
./hosts/1fichier.sh:360: if grep -Eqi '<span style="color:red">Warning !</span>|<span style="color:red">Attention !</span>' <<< "${PAGE}"; then
|
||||
./hosts/1fichier.sh:361: rm -f "${fich_cookie_jar}"
|
||||
./hosts/1fichier.sh:362: continue
|
||||
./hosts/1fichier.sh:363: else
|
||||
./hosts/1fichier.sh:364: fich_adz_parameter=$(grep -oPi 'name="adz" value="\K[^"]+' <<< "${PAGE}")
|
||||
./hosts/1fichier.sh:365: if [[ $fich_adz_parameter ]]; then
|
||||
--
|
||||
./hosts/9saves.sh:90: response=$(tor_curl_request --insecure -L -s -b "${ns_cookie_jar}" -c "${ns_cookie_jar}" "$remote_url")
|
||||
./hosts/9saves.sh:91: if [[ "${DebugAllEnabled}" == "true" ]] ; then
|
||||
|
|
@ -1935,62 +1938,62 @@ _________________________________________________________________________
|
|||
./hosts/quax.sh:187: containsHtml=true
|
||||
./hosts/quax.sh:188: fi
|
||||
--
|
||||
./hosts/ranoz.sh:90: response=$(tor_curl_request --insecure -L -s "$remote_url")
|
||||
./hosts/ranoz.sh:91: if [[ "${DebugAllEnabled}" == "true" ]] ; then
|
||||
./hosts/ranoz.sh:92: debugHtml "${remote_url##*/}" "rz_fetch$i" "${response}"
|
||||
./hosts/ranoz.sh:93: fi
|
||||
./hosts/ranoz.sh:94: if [[ -z $response ]] ; then
|
||||
./hosts/ranoz.sh:95: if [[ $i == $maxfetchretries ]] ; then
|
||||
./hosts/ranoz.sh:96: printf "\\n"
|
||||
./hosts/ranoz.sh:97: echo -e "${RED}| Failed to extract download url [1]${NC}"
|
||||
./hosts/ranoz.sh:98: warnAndRetryUnknownError=true
|
||||
./hosts/ranoz.sh:99: if [[ "${finalAttempt}" == "true" ]] ; then
|
||||
./hosts/ranoz.sh:100: failedRetryDownload "${remote_url}" "Failed to extract download url [1]" ""
|
||||
./hosts/ranoz.sh:92: response=$(tor_curl_request --insecure -L -i -s \
|
||||
./hosts/ranoz.sh:93: -b "${rz_cookie_jar}" -c "${rz_cookie_jar}" \
|
||||
./hosts/ranoz.sh:94: "$remote_url")
|
||||
./hosts/ranoz.sh:95: if [[ "${DebugAllEnabled}" == "true" ]] ; then
|
||||
./hosts/ranoz.sh:96: debugHtml "${remote_url##*/}" "rz_fetch$i" "${response}"
|
||||
./hosts/ranoz.sh:97: fi
|
||||
./hosts/ranoz.sh:98: if [[ -z $response ]] ; then
|
||||
./hosts/ranoz.sh:99: if [[ $i == $maxfetchretries ]] ; then
|
||||
./hosts/ranoz.sh:100: rm -f "${rz_cookie_jar}";
|
||||
./hosts/ranoz.sh:101: printf "\\n"
|
||||
./hosts/ranoz.sh:102: echo -e "${RED}| Failed to extract download url [1]${NC}"
|
||||
--
|
||||
./hosts/ranoz.sh:160: file_header=$(tor_curl_request --insecure --head -L -i -s "$download_url")
|
||||
./hosts/ranoz.sh:161: if [[ "${DebugAllEnabled}" == "true" ]] ; then
|
||||
./hosts/ranoz.sh:162: debugHtml "${remote_url##*/}" "rz_head$j" "download_url: ${download_url}"$'\n'"${file_header}"
|
||||
./hosts/ranoz.sh:163: fi
|
||||
./hosts/ranoz.sh:164: if [[ -z $file_header ]] ; then
|
||||
./hosts/ranoz.sh:165: if [[ $j == $maxfetchretries ]] ; then
|
||||
./hosts/ranoz.sh:166: rm -f "${rz_cookie_jar}";
|
||||
./hosts/ranoz.sh:167: printf "\\n"
|
||||
./hosts/ranoz.sh:168: echo -e "${RED}| Failed to extract file info${NC}"
|
||||
./hosts/ranoz.sh:169: warnAndRetryUnknownError=true
|
||||
./hosts/ranoz.sh:170: if [[ "${finalAttempt}" == "true" ]] ; then
|
||||
./hosts/ranoz.sh:168: file_header=$(tor_curl_request --insecure --head -L -i -s \
|
||||
./hosts/ranoz.sh:169: -b "${rz_cookie_jar}" -c "${rz_cookie_jar}" \
|
||||
./hosts/ranoz.sh:170: "$download_url")
|
||||
./hosts/ranoz.sh:171: if [[ "${DebugAllEnabled}" == "true" ]] ; then
|
||||
./hosts/ranoz.sh:172: debugHtml "${remote_url##*/}" "rz_head$j" "download_url: ${download_url}"$'\n'"${file_header}"
|
||||
./hosts/ranoz.sh:173: fi
|
||||
./hosts/ranoz.sh:174: if [[ -z $file_header ]] ; then
|
||||
./hosts/ranoz.sh:175: if [[ $j == $maxfetchretries ]] ; then
|
||||
./hosts/ranoz.sh:176: rm -f "${rz_cookie_jar}";
|
||||
./hosts/ranoz.sh:177: printf "\\n"
|
||||
./hosts/ranoz.sh:178: echo -e "${RED}| Failed to extract file info${NC}"
|
||||
--
|
||||
./hosts/ranoz.sh:272: tor_curl_request --insecure -L -G --no-alpn \
|
||||
./hosts/ranoz.sh:273: --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval \
|
||||
./hosts/ranoz.sh:274: "$download_url" --continue-at - --output "$file_path"
|
||||
./hosts/ranoz.sh:275: else
|
||||
./hosts/ranoz.sh:276: tor_curl_request --insecure -L -G --no-alpn \
|
||||
./hosts/ranoz.sh:277: "$download_url" --continue-at - --output "$file_path"
|
||||
./hosts/ranoz.sh:278: fi
|
||||
./hosts/ranoz.sh:279: else
|
||||
./hosts/ranoz.sh:280: if [[ "${RateMonitorEnabled}" == "true" ]]; then
|
||||
./hosts/ranoz.sh:281: tor_curl_request --insecure -L -G --no-alpn \
|
||||
./hosts/ranoz.sh:282: --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval \
|
||||
./hosts/ranoz.sh:283: -H "User-Agent: $RandomUA" \
|
||||
./hosts/ranoz.sh:284: -H "Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8" \
|
||||
./hosts/ranoz.sh:285: -H "Accept-Language: en-US,en;q=0.5" \
|
||||
./hosts/ranoz.sh:286: -H "Accept-Encoding: gzip, deflate, br" \
|
||||
./hosts/ranoz.sh:287: -H "Connection: keep-alive" \
|
||||
./hosts/ranoz.sh:288: -H "Cookie: lng=eng" \
|
||||
./hosts/ranoz.sh:289: -H "Upgrade-Insecure-Requests: 1" \
|
||||
./hosts/ranoz.sh:290: -H "Sec-Fetch-Dest: document" \
|
||||
./hosts/ranoz.sh:291: -H "Sec-Fetch-Mode: navigate" \
|
||||
./hosts/ranoz.sh:283: tor_curl_request --insecure -L -G --no-alpn \
|
||||
./hosts/ranoz.sh:284: --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval \
|
||||
./hosts/ranoz.sh:285: "$download_url" --continue-at - --output "$file_path"
|
||||
./hosts/ranoz.sh:286: else
|
||||
./hosts/ranoz.sh:287: tor_curl_request --insecure -L -G --no-alpn \
|
||||
./hosts/ranoz.sh:288: "$download_url" --continue-at - --output "$file_path"
|
||||
./hosts/ranoz.sh:289: fi
|
||||
./hosts/ranoz.sh:290: else
|
||||
./hosts/ranoz.sh:291: if [[ "${RateMonitorEnabled}" == "true" ]]; then
|
||||
./hosts/ranoz.sh:292: tor_curl_request --insecure -L -G --no-alpn \
|
||||
./hosts/ranoz.sh:293: --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval \
|
||||
./hosts/ranoz.sh:294: -H "User-Agent: $RandomUA" \
|
||||
./hosts/ranoz.sh:295: -H "Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8" \
|
||||
./hosts/ranoz.sh:296: -H "Accept-Language: en-US,en;q=0.5" \
|
||||
./hosts/ranoz.sh:297: -H "Accept-Encoding: gzip, deflate, br" \
|
||||
./hosts/ranoz.sh:298: -H "Connection: keep-alive" \
|
||||
./hosts/ranoz.sh:299: -H "Cookie: lng=eng" \
|
||||
./hosts/ranoz.sh:300: -H "Upgrade-Insecure-Requests: 1" \
|
||||
./hosts/ranoz.sh:301: -H "Sec-Fetch-Dest: document" \
|
||||
./hosts/ranoz.sh:302: -H "Sec-Fetch-Mode: navigate" \
|
||||
--
|
||||
./hosts/ranoz.sh:296: tor_curl_request --insecure -L -G --no-alpn \
|
||||
./hosts/ranoz.sh:297: -H "User-Agent: $RandomUA" \
|
||||
./hosts/ranoz.sh:298: -H "Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8" \
|
||||
./hosts/ranoz.sh:299: -H "Accept-Language: en-US,en;q=0.5" \
|
||||
./hosts/ranoz.sh:300: -H "Accept-Encoding: gzip, deflate, br" \
|
||||
./hosts/ranoz.sh:301: -H "Connection: keep-alive" \
|
||||
./hosts/ranoz.sh:302: -H "Cookie: lng=eng" \
|
||||
./hosts/ranoz.sh:303: -H "Upgrade-Insecure-Requests: 1" \
|
||||
./hosts/ranoz.sh:304: -H "Sec-Fetch-Dest: document" \
|
||||
./hosts/ranoz.sh:305: -H "Sec-Fetch-Mode: navigate" \
|
||||
./hosts/ranoz.sh:306: -H "Sec-Fetch-Site: same-origin" \
|
||||
./hosts/ranoz.sh:307: tor_curl_request --insecure -L -G --no-alpn \
|
||||
./hosts/ranoz.sh:308: -H "User-Agent: $RandomUA" \
|
||||
./hosts/ranoz.sh:309: -H "Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8" \
|
||||
./hosts/ranoz.sh:310: -H "Accept-Language: en-US,en;q=0.5" \
|
||||
./hosts/ranoz.sh:311: -H "Accept-Encoding: gzip, deflate, br" \
|
||||
./hosts/ranoz.sh:312: -H "Connection: keep-alive" \
|
||||
./hosts/ranoz.sh:313: -H "Cookie: lng=eng" \
|
||||
./hosts/ranoz.sh:314: -H "Upgrade-Insecure-Requests: 1" \
|
||||
./hosts/ranoz.sh:315: -H "Sec-Fetch-Dest: document" \
|
||||
./hosts/ranoz.sh:316: -H "Sec-Fetch-Mode: navigate" \
|
||||
./hosts/ranoz.sh:317: -H "Sec-Fetch-Site: same-origin" \
|
||||
--
|
||||
./hosts/sendnow.sh:89: response=$(tor_curl_request --insecure -L -s -b "${snow_cookie_jar}" -c "${snow_cookie_jar}" "$remote_url")
|
||||
./hosts/sendnow.sh:90: if [[ "${DebugAllEnabled}" == "true" ]] ; then
|
||||
|
|
@ -2276,7 +2279,7 @@ _________________________________________________________________________
|
|||
./hosts/tempsh.sh:260: -H "Sec-Fetch-User: ?1" \
|
||||
--
|
||||
./hosts/torup.sh:92: response=$(tor_curl_request --insecure -L -s \
|
||||
./hosts/torup.sh:93: -c "${fdot_cookie_jar}" \
|
||||
./hosts/torup.sh:93: -b "${torp_cookie_jar}" -c "${torp_cookie_jar}" \
|
||||
./hosts/torup.sh:94: "$fixed_url")
|
||||
./hosts/torup.sh:95: if [[ "${DebugAllEnabled}" == "true" ]] ; then
|
||||
./hosts/torup.sh:96: debugHtml "${remote_url##*/}" "torp_fetch$i" "${response}"
|
||||
|
|
@ -2287,42 +2290,44 @@ _________________________________________________________________________
|
|||
./hosts/torup.sh:101: printf "\\n"
|
||||
./hosts/torup.sh:102: echo -e "${RED}| Failed to extract download url [1]${NC}"
|
||||
--
|
||||
./hosts/torup.sh:193: tor_curl_request --insecure -L -G --no-alpn \
|
||||
./hosts/torup.sh:194: --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval \
|
||||
./hosts/torup.sh:195: -b "${torp_cookie_jar}" -c "${torp_cookie_jar}" \
|
||||
./hosts/torup.sh:196: -H "Referer: $fixed_url" \
|
||||
./hosts/torup.sh:197: "$download_url" --output "$file_path"
|
||||
./hosts/torup.sh:198: else
|
||||
./hosts/torup.sh:199: tor_curl_request --insecure -L -G --no-alpn \
|
||||
./hosts/torup.sh:200: -b "${torp_cookie_jar}" -c "${torp_cookie_jar}" \
|
||||
./hosts/torup.sh:201: -H "Referer: $fixed_url" \
|
||||
./hosts/torup.sh:202: "$download_url" --output "$file_path"
|
||||
./hosts/torup.sh:203: fi
|
||||
./hosts/torup.sh:204: else
|
||||
./hosts/torup.sh:205: if [[ "${RateMonitorEnabled}" == "true" ]]; then
|
||||
./hosts/torup.sh:206: tor_curl_request --insecure -L -G --no-alpn \
|
||||
./hosts/torup.sh:207: --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval \
|
||||
./hosts/torup.sh:208: -H "User-Agent: $RandomUA" \
|
||||
./hosts/torup.sh:209: -H "Referer: $fixed_url" \
|
||||
./hosts/torup.sh:210: -H "Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8" \
|
||||
./hosts/torup.sh:211: -H "Accept-Language: en-US,en;q=0.5" \
|
||||
./hosts/torup.sh:212: -H "Accept-Encoding: gzip, deflate, br" \
|
||||
./hosts/torup.sh:213: -H "Connection: keep-alive" \
|
||||
./hosts/torup.sh:214: -H "Upgrade-Insecure-Requests: 1" \
|
||||
./hosts/torup.sh:215: -H "Sec-Fetch-Dest: document" \
|
||||
./hosts/torup.sh:216: -H "Sec-Fetch-Mode: navigate" \
|
||||
./hosts/torup.sh:194: tor_curl_request --insecure -L -G --no-alpn \
|
||||
./hosts/torup.sh:195: --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval \
|
||||
./hosts/torup.sh:196: -b "${torp_cookie_jar}" -c "${torp_cookie_jar}" \
|
||||
./hosts/torup.sh:197: -H "Host: ktgzpea2b76u7fgemiibp4a76onyybo4fw5gbsagtm6jrjzmgivppyyd.onion" \
|
||||
./hosts/torup.sh:198: -H "Referer: $fixed_url" \
|
||||
./hosts/torup.sh:199: "$download_url" --output "$file_path"
|
||||
./hosts/torup.sh:200: else
|
||||
./hosts/torup.sh:201: tor_curl_request --insecure -L -G --no-alpn \
|
||||
./hosts/torup.sh:202: -b "${torp_cookie_jar}" -c "${torp_cookie_jar}" \
|
||||
./hosts/torup.sh:203: -H "Host: ktgzpea2b76u7fgemiibp4a76onyybo4fw5gbsagtm6jrjzmgivppyyd.onion" \
|
||||
./hosts/torup.sh:204: -H "Referer: $fixed_url" \
|
||||
./hosts/torup.sh:205: "$download_url" --output "$file_path"
|
||||
./hosts/torup.sh:206: fi
|
||||
./hosts/torup.sh:207: else
|
||||
./hosts/torup.sh:208: if [[ "${RateMonitorEnabled}" == "true" ]]; then
|
||||
./hosts/torup.sh:209: tor_curl_request --insecure -L -G --no-alpn \
|
||||
./hosts/torup.sh:210: --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval \
|
||||
./hosts/torup.sh:211: -H "User-Agent: $RandomUA" \
|
||||
./hosts/torup.sh:212: -H "Host: ktgzpea2b76u7fgemiibp4a76onyybo4fw5gbsagtm6jrjzmgivppyyd.onion" \
|
||||
./hosts/torup.sh:213: -H "Referer: $fixed_url" \
|
||||
./hosts/torup.sh:214: -H "Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8" \
|
||||
./hosts/torup.sh:215: -H "Accept-Language: en-US,en;q=0.5" \
|
||||
./hosts/torup.sh:216: -H "Accept-Encoding: gzip, deflate, br" \
|
||||
./hosts/torup.sh:217: -H "Connection: keep-alive" \
|
||||
./hosts/torup.sh:218: -H "Upgrade-Insecure-Requests: 1" \
|
||||
./hosts/torup.sh:219: -H "Sec-Fetch-Dest: document" \
|
||||
--
|
||||
./hosts/torup.sh:222: tor_curl_request --insecure -L -G --no-alpn \
|
||||
./hosts/torup.sh:223: -H "User-Agent: $RandomUA" \
|
||||
./hosts/torup.sh:224: -H "Referer: $fixed_url" \
|
||||
./hosts/torup.sh:225: -H "Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8" \
|
||||
./hosts/torup.sh:226: -H "Accept-Language: en-US,en;q=0.5" \
|
||||
./hosts/torup.sh:227: -H "Accept-Encoding: gzip, deflate, br" \
|
||||
./hosts/torup.sh:228: -H "Connection: keep-alive" \
|
||||
./hosts/torup.sh:229: -H "Upgrade-Insecure-Requests: 1" \
|
||||
./hosts/torup.sh:230: -H "Sec-Fetch-Dest: document" \
|
||||
./hosts/torup.sh:231: -H "Sec-Fetch-Mode: navigate" \
|
||||
./hosts/torup.sh:232: -H "Sec-Fetch-Site: same-origin" \
|
||||
./hosts/torup.sh:226: tor_curl_request --insecure -L -G --no-alpn \
|
||||
./hosts/torup.sh:227: -H "User-Agent: $RandomUA" \
|
||||
./hosts/torup.sh:228: -H "Host: ktgzpea2b76u7fgemiibp4a76onyybo4fw5gbsagtm6jrjzmgivppyyd.onion" \
|
||||
./hosts/torup.sh:229: -H "Referer: $fixed_url" \
|
||||
./hosts/torup.sh:230: -H "Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8" \
|
||||
./hosts/torup.sh:231: -H "Accept-Language: en-US,en;q=0.5" \
|
||||
./hosts/torup.sh:232: -H "Accept-Encoding: gzip, deflate, br" \
|
||||
./hosts/torup.sh:233: -H "Connection: keep-alive" \
|
||||
./hosts/torup.sh:234: -H "Upgrade-Insecure-Requests: 1" \
|
||||
./hosts/torup.sh:235: -H "Sec-Fetch-Dest: document" \
|
||||
./hosts/torup.sh:236: -H "Sec-Fetch-Mode: navigate" \
|
||||
--
|
||||
./hosts/up2share.sh:91: response=$(tor_curl_request --insecure -L -s -b "${up2share_cookie_jar}" -c "${up2share_cookie_jar}" \
|
||||
./hosts/up2share.sh:92: -w "\n\nHTTP_CODE: %{http_code}\nEFFECTIVE_URL: %{url_effective}\n" \
|
||||
|
|
@ -2620,6 +2625,56 @@ _________________________________________________________________________
|
|||
./hosts/uploadhive.sh:260: containsHtml=true
|
||||
./hosts/uploadhive.sh:261: fi
|
||||
--
|
||||
./hosts/uploadscloud.sh:90: response=$(tor_curl_request --insecure -L -s -b "${upsc_cookie_jar}" -c "${upsc_cookie_jar}" "$remote_url")
|
||||
./hosts/uploadscloud.sh:91: if [[ "${DebugAllEnabled}" == "true" ]] ; then
|
||||
./hosts/uploadscloud.sh:92: debugHtml "${remote_url##*/}" "upsc_dwnpage$i" "${response}"
|
||||
./hosts/uploadscloud.sh:93: fi
|
||||
./hosts/uploadscloud.sh:94: if [[ -z $response ]] ; then
|
||||
./hosts/uploadscloud.sh:95: rm -f "${upsc_cookie_jar}";
|
||||
./hosts/uploadscloud.sh:96: if [[ $i == $maxfetchretries ]] ; then
|
||||
./hosts/uploadscloud.sh:97: printf "\\n"
|
||||
./hosts/uploadscloud.sh:98: echo -e "${RED}| Failed to extract download link.${NC}"
|
||||
./hosts/uploadscloud.sh:99: warnAndRetryUnknownError=true
|
||||
./hosts/uploadscloud.sh:100: if [[ "${finalAttempt}" == "true" ]] ; then
|
||||
--
|
||||
./hosts/uploadscloud.sh:142: response=$(tor_curl_request --insecure -svo. -X POST \
|
||||
./hosts/uploadscloud.sh:143: -b "${upsc_cookie_jar}" -c "${upsc_cookie_jar}" \
|
||||
./hosts/uploadscloud.sh:144: --data-raw "$form_data" "$remote_url" 2>&1)
|
||||
./hosts/uploadscloud.sh:145: if [[ "${DebugAllEnabled}" == "true" ]] ; then
|
||||
./hosts/uploadscloud.sh:146: debugHtml "${remote_url##*/}" "upsc_post" "post_action: ${post_action}"$'\n'"form_data: ${form_data}"$'\n'"${response}"
|
||||
./hosts/uploadscloud.sh:147: fi
|
||||
./hosts/uploadscloud.sh:148: if [[ -z $response ]] ; then
|
||||
./hosts/uploadscloud.sh:149: echo -e "${RED}| Failed to extract download link [1]${NC}"
|
||||
./hosts/uploadscloud.sh:150: warnAndRetryUnknownError=true
|
||||
./hosts/uploadscloud.sh:151: if [[ "${finalAttempt}" == "true" ]] ; then
|
||||
./hosts/uploadscloud.sh:152: rm -f "${upsc_cookie_jar}";
|
||||
--
|
||||
./hosts/uploadscloud.sh:183: file_header=$(tor_curl_request --insecure --head -L -s -b "${upsc_cookie_jar}" -c "${upsc_cookie_jar}" --referer "$remote_url" "$download_url")
|
||||
./hosts/uploadscloud.sh:184: if [[ "${DebugAllEnabled}" == "true" ]] ; then
|
||||
./hosts/uploadscloud.sh:185: debugHtml "${remote_url##*/}" "upsc_head$j" "download_url: ${download_url}"$'\n'"${file_header}"
|
||||
./hosts/uploadscloud.sh:186: fi
|
||||
./hosts/uploadscloud.sh:187: if [[ -z $file_header ]] ; then
|
||||
./hosts/uploadscloud.sh:188: if [[ $j == $maxfetchretries ]] ; then
|
||||
./hosts/uploadscloud.sh:189: rm -f "${upsc_cookie_jar}";
|
||||
./hosts/uploadscloud.sh:190: printf "\\n"
|
||||
./hosts/uploadscloud.sh:191: echo -e "${RED}| Failed to extract file info.${NC}"
|
||||
./hosts/uploadscloud.sh:192: warnAndRetryUnknownError=true
|
||||
./hosts/uploadscloud.sh:193: if [[ "${finalAttempt}" == "true" ]] ; then
|
||||
--
|
||||
./hosts/uploadscloud.sh:306: tor_curl_request --insecure -L -G --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval "$download_url" --continue-at - --output "$file_path"
|
||||
./hosts/uploadscloud.sh:307: else
|
||||
./hosts/uploadscloud.sh:308: tor_curl_request --insecure -L -G "$download_url" --continue-at - --output "$file_path"
|
||||
./hosts/uploadscloud.sh:309: fi
|
||||
./hosts/uploadscloud.sh:310: received_file_size=0
|
||||
./hosts/uploadscloud.sh:311: if [[ -f "$file_path" ]] ; then
|
||||
./hosts/uploadscloud.sh:312: received_file_size=$(stat --format="%s" "$file_path" | tr -d '[:space:]')
|
||||
./hosts/uploadscloud.sh:313: fi
|
||||
./hosts/uploadscloud.sh:314: if CheckNoHtml "$remote_url" "$filename" "$file_path" "$((received_file_size - pd_presize))" ; then
|
||||
./hosts/uploadscloud.sh:315: containsHtml=false
|
||||
./hosts/uploadscloud.sh:316: else
|
||||
./hosts/uploadscloud.sh:317: containsHtml=true
|
||||
./hosts/uploadscloud.sh:318: fi
|
||||
--
|
||||
./hosts/up_1fichier.sh:107: response=$(tor_curl_request --insecure -L -s "https://1fichier.com/")
|
||||
./hosts/up_1fichier.sh:108: if [[ "${DebugAllEnabled}" == "true" ]] ; then
|
||||
./hosts/up_1fichier.sh:109: debugHtml "${filepath##*/}" "${_hostCode}_up_getid_$i" "url: https://1fichier.com/"$'\n'"${response}"
|
||||
|
|
@ -2656,6 +2711,18 @@ _________________________________________________________________________
|
|||
./hosts/up_anonfile.sh:111: -F "upload=Start upload" \
|
||||
./hosts/up_anonfile.sh:112: -F "keepalive=1" \
|
||||
--
|
||||
./hosts/up_anonfileio.sh:102: response=$(tor_curl_upload --insecure -i \
|
||||
./hosts/up_anonfileio.sh:103: -H "Content-Type: multipart/form-data" \
|
||||
./hosts/up_anonfileio.sh:104: -F "keepalive=1" \
|
||||
./hosts/up_anonfileio.sh:105: -F "file=@${filepath}" \
|
||||
./hosts/up_anonfileio.sh:106: "${PostUrlHost}")
|
||||
./hosts/up_anonfileio.sh:107: if [[ "${DebugAllEnabled}" == "true" ]] ; then
|
||||
./hosts/up_anonfileio.sh:108: debugHtml "${filepath##*/}" "${_hostCode}_upload" "post_url: ${PostUrlHost}"$'\n'"${response}"
|
||||
./hosts/up_anonfileio.sh:109: fi
|
||||
./hosts/up_anonfileio.sh:110: if grep -Eqi '"success":true,"code":"' <<< "${response}" ; then
|
||||
./hosts/up_anonfileio.sh:111: hash=$(grep -oPi -m 1 '(?<="code":").*?(?=".*$)' <<< "$response")
|
||||
./hosts/up_anonfileio.sh:112: filesize=$(GetFileSize "$filepath" "false")
|
||||
--
|
||||
./hosts/up_anonsharing.sh:102: response=$(tor_curl_upload --insecure -i \
|
||||
./hosts/up_anonsharing.sh:103: -H "Content-Type: multipart/form-data" \
|
||||
./hosts/up_anonsharing.sh:104: -F "files[]=@${arrFiles[@]}" \
|
||||
|
|
@ -3052,29 +3119,36 @@ _________________________________________________________________________
|
|||
./hosts/up_quax.sh:111: url=$(grep -oPi '(?<="url": ").*?(?=".*$)' <<< "$response")
|
||||
./hosts/up_quax.sh:112: filesize=$(GetFileSize "$filepath" "false")
|
||||
--
|
||||
./hosts/up_ranoz.sh:129: response=$(tor_curl_upload --insecure -L -i -s \
|
||||
./hosts/up_ranoz.sh:130: "$PostUrlHost" \
|
||||
./hosts/up_ranoz.sh:131: -H "Content-Type: application/json" \
|
||||
./hosts/up_ranoz.sh:132: -d "{ \
|
||||
./hosts/up_ranoz.sh:133: \"filename\": \"$tmpfilename\", \
|
||||
./hosts/up_ranoz.sh:134: \"size\": $fsize}")
|
||||
./hosts/up_ranoz.sh:135: if [[ "${DebugAllEnabled}" == "true" ]] ; then
|
||||
./hosts/up_ranoz.sh:136: debugHtml "${filepath##*/}" "${_hostCode}_ticket" "post_url: ${PostUrlHost}"$'\n'"data: ${filepath}, ${fsize}"$'\n'"${response}"
|
||||
./hosts/up_ranoz.sh:137: fi
|
||||
./hosts/up_ranoz.sh:138: if grep -Eqi '"upload_url":"https://' <<< "$response" ; then
|
||||
./hosts/up_ranoz.sh:139: PostUrlHost=$(grep -oPi '(?<="upload_url":").*?(?=".*$)' <<< "$response")
|
||||
./hosts/up_ranoz.sh:133: response=$(tor_curl_upload --insecure -L -i -s \
|
||||
./hosts/up_ranoz.sh:134: -b "${up_rz_cookie_jar}" -c "${up_rz_cookie_jar}" \
|
||||
./hosts/up_ranoz.sh:135: "$PrePostUrl")
|
||||
./hosts/up_ranoz.sh:136: if [[ "${DebugAllEnabled}" == "true" ]] ; then
|
||||
./hosts/up_ranoz.sh:137: debugHtml "${filepath##*/}" "${_hostCode}_prepost" "prepost_url: ${PrePostUrl}"$'\n'"${response}"
|
||||
./hosts/up_ranoz.sh:138: fi
|
||||
./hosts/up_ranoz.sh:139: trap "rm -f ${UploadTicket}; rm -f ${up_rz_cookie_jar}; echo ""; tput cnorm; exit" 0 1 2 3 6 15
|
||||
./hosts/up_ranoz.sh:140: response=$(tor_curl_upload --insecure -Lis \
|
||||
./hosts/up_ranoz.sh:141: "$PostUrlHost" \
|
||||
./hosts/up_ranoz.sh:142: -b "${up_rz_cookie_jar}" -c "${up_rz_cookie_jar}" \
|
||||
./hosts/up_ranoz.sh:143: -H "Content-Type: application/json" \
|
||||
./hosts/up_ranoz.sh:144: -d "{ \
|
||||
./hosts/up_ranoz.sh:145: \"filename\": \"$tmpfilename\", \
|
||||
./hosts/up_ranoz.sh:146: \"size\": $fsize}")
|
||||
./hosts/up_ranoz.sh:147: if [[ "${DebugAllEnabled}" == "true" ]] ; then
|
||||
./hosts/up_ranoz.sh:148: debugHtml "${filepath##*/}" "${_hostCode}_ticket" "post_url: ${PostUrlHost}"$'\n'"data: ${filepath}, ${fsize}"$'\n'"${response}"
|
||||
./hosts/up_ranoz.sh:149: fi
|
||||
./hosts/up_ranoz.sh:150: if grep -Eqi '"upload_url":"https://' <<< "$response" ; then
|
||||
--
|
||||
./hosts/up_ranoz.sh:156: response=$(tor_curl_upload --insecure -i -X PUT \
|
||||
./hosts/up_ranoz.sh:157: "${PostUrlHost}" \
|
||||
./hosts/up_ranoz.sh:158: --upload-file "$filepath" \
|
||||
./hosts/up_ranoz.sh:159: -H "Content-Length: $fsize")
|
||||
./hosts/up_ranoz.sh:160: if [[ "${DebugAllEnabled}" == "true" ]] ; then
|
||||
./hosts/up_ranoz.sh:161: debugHtml "${filepath##*/}" "${_hostCode}_upload" "post_url: ${PostUrlHost}"$'\n'"${response}"
|
||||
./hosts/up_ranoz.sh:162: fi
|
||||
./hosts/up_ranoz.sh:163: if grep -Eqi 'HTTP/.* 200' <<< "${response}" ; then
|
||||
./hosts/up_ranoz.sh:164: filesize=$(GetFileSize "$filepath" "false")
|
||||
./hosts/up_ranoz.sh:165: echo -e "${GREEN}| Upload Success${NC}"
|
||||
./hosts/up_ranoz.sh:166: echo -e "| Size: ${BLUE}${filesize}${NC} bytes${NC}"
|
||||
./hosts/up_ranoz.sh:169: response=$(tor_curl_upload --insecure -i -X PUT \
|
||||
./hosts/up_ranoz.sh:170: "${PostUrlHost}" \
|
||||
./hosts/up_ranoz.sh:171: --upload-file "$filepath" \
|
||||
./hosts/up_ranoz.sh:172: -b "${up_rz_cookie_jar}" -c "${up_rz_cookie_jar}" \
|
||||
./hosts/up_ranoz.sh:173: -H "Content-Length: $fsize")
|
||||
./hosts/up_ranoz.sh:174: if [[ "${DebugAllEnabled}" == "true" ]] ; then
|
||||
./hosts/up_ranoz.sh:175: debugHtml "${filepath##*/}" "${_hostCode}_upload" "post_url: ${PostUrlHost}"$'\n'"${response}"
|
||||
./hosts/up_ranoz.sh:176: fi
|
||||
./hosts/up_ranoz.sh:177: rm -f ${up_rz_cookie_jar};
|
||||
./hosts/up_ranoz.sh:178: if grep -Eqi 'HTTP/.* 200' <<< "${response}" ; then
|
||||
./hosts/up_ranoz.sh:179: filesize=$(GetFileSize "$filepath" "false")
|
||||
--
|
||||
./hosts/up_sendnow.sh:101: response=$(tor_curl_request --insecure -L -s 'https://send.now/upload')
|
||||
./hosts/up_sendnow.sh:102: if [[ "${DebugAllEnabled}" == "true" ]] ; then
|
||||
|
|
@ -3366,235 +3440,235 @@ _________________________________________________________________________
|
|||
./hosts/youdbox.sh:287: containsHtml=true
|
||||
./hosts/youdbox.sh:288: fi
|
||||
--
|
||||
./mad.sh:397:tor_curl_request() {
|
||||
./mad.sh:398: if [[ "${UseTorCurlImpersonate}" == "true" ]]; then
|
||||
./mad.sh:399: "${curl_impersonate[@]}" --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" -4 --connect-timeout ${ConnectTimeout} --compressed --globoff "$@"
|
||||
./mad.sh:400: else
|
||||
./mad.sh:401: curl --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" -4 --connect-timeout ${ConnectTimeout} --compressed --globoff "$@"
|
||||
./mad.sh:402: fi
|
||||
./mad.sh:403:}
|
||||
./mad.sh:404:tor_curl_request_extended() {
|
||||
./mad.sh:405: randomtimeout=$((30 + RANDOM % (60 - 30)))
|
||||
./mad.sh:406: if [[ "${UseTorCurlImpersonate}" == "true" ]]; then
|
||||
./mad.sh:407: "${curl_impersonate[@]}" --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" -4 --connect-timeout $randomtimeout --compressed --globoff "$@"
|
||||
./mad.sh:408: else
|
||||
./mad.sh:409: curl --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" -4 --connect-timeout $randomtimeout --compressed --globoff "$@"
|
||||
./mad.sh:410: fi
|
||||
./mad.sh:411:}
|
||||
./mad.sh:412:tor_curl_upload() {
|
||||
./mad.sh:413: if [[ "${UseTorCurlImpersonate}" == "true" ]]; then
|
||||
./mad.sh:414: if [[ "${RateMonitorEnabled}" == "true" ]]; then
|
||||
./mad.sh:415: "${curl_impersonate[@]}" --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" -4 --expect100-timeout 10 --connect-timeout ${ConnectTimeoutUpload} --speed-limit $UploadSpeedMin --speed-time $UploadTimeoutInterval --compressed --globoff "$@"
|
||||
./mad.sh:416: else
|
||||
./mad.sh:417: "${curl_impersonate[@]}" --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" -4 --expect100-timeout 10 --connect-timeout ${ConnectTimeoutUpload} --compressed --globoff "$@"
|
||||
./mad.sh:418: fi
|
||||
./mad.sh:419: else
|
||||
./mad.sh:420: if [[ "${RateMonitorEnabled}" == "true" ]]; then
|
||||
./mad.sh:421: curl --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" -4 --expect100-timeout 10 --connect-timeout ${ConnectTimeoutUpload} --speed-limit $UploadSpeedMin --speed-time $UploadTimeoutInterval -H 'User-Agent: Mozilla/5.0 (Windows NT 10.0; rv:109.0) Gecko/20100101 Firefox/115.0' -H 'Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8' -H 'Accept-Language: en-US,en;q=0.5' -H 'Accept-Encoding: gzip, deflate, br' -H 'Origin: null' -H 'Connection: keep-alive' -H 'Upgrade-Insecure-Requests: 1' -H 'Sec-Fetch-Dest: document' -H 'Sec-Fetch-Mode: navigate' -H 'Sec-Fetch-Site: cross-site' -H 'Sec-Fetch-User: ?1' -H 'TE: trailers' --compressed --globoff "$@"
|
||||
./mad.sh:422: else
|
||||
./mad.sh:367:tor_curl_request() {
|
||||
./mad.sh:368: if [[ "${UseTorCurlImpersonate}" == "true" ]]; then
|
||||
./mad.sh:369: "${curl_impersonate[@]}" --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" -4 --connect-timeout ${ConnectTimeout} --compressed --globoff "$@"
|
||||
./mad.sh:370: else
|
||||
./mad.sh:371: curl --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" -4 --connect-timeout ${ConnectTimeout} --compressed --globoff "$@"
|
||||
./mad.sh:372: fi
|
||||
./mad.sh:373:}
|
||||
./mad.sh:374:tor_curl_request_extended() {
|
||||
./mad.sh:375: randomtimeout=$((30 + RANDOM % (60 - 30)))
|
||||
./mad.sh:376: if [[ "${UseTorCurlImpersonate}" == "true" ]]; then
|
||||
./mad.sh:377: "${curl_impersonate[@]}" --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" -4 --connect-timeout $randomtimeout --compressed --globoff "$@"
|
||||
./mad.sh:378: else
|
||||
./mad.sh:379: curl --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" -4 --connect-timeout $randomtimeout --compressed --globoff "$@"
|
||||
./mad.sh:380: fi
|
||||
./mad.sh:381:}
|
||||
./mad.sh:382:tor_curl_upload() {
|
||||
./mad.sh:383: if [[ "${UseTorCurlImpersonate}" == "true" ]]; then
|
||||
./mad.sh:384: if [[ "${RateMonitorEnabled}" == "true" ]]; then
|
||||
./mad.sh:385: "${curl_impersonate[@]}" --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" -4 --expect100-timeout 10 --connect-timeout ${ConnectTimeoutUpload} --speed-limit $UploadSpeedMin --speed-time $UploadTimeoutInterval --compressed --globoff "$@"
|
||||
./mad.sh:386: else
|
||||
./mad.sh:387: "${curl_impersonate[@]}" --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" -4 --expect100-timeout 10 --connect-timeout ${ConnectTimeoutUpload} --compressed --globoff "$@"
|
||||
./mad.sh:388: fi
|
||||
./mad.sh:389: else
|
||||
./mad.sh:390: if [[ "${RateMonitorEnabled}" == "true" ]]; then
|
||||
./mad.sh:391: curl --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" -4 --expect100-timeout 10 --connect-timeout ${ConnectTimeoutUpload} --speed-limit $UploadSpeedMin --speed-time $UploadTimeoutInterval -H 'User-Agent: Mozilla/5.0 (Windows NT 10.0; rv:109.0) Gecko/20100101 Firefox/115.0' -H 'Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8' -H 'Accept-Language: en-US,en;q=0.5' -H 'Accept-Encoding: gzip, deflate, br' -H 'Origin: null' -H 'Connection: keep-alive' -H 'Upgrade-Insecure-Requests: 1' -H 'Sec-Fetch-Dest: document' -H 'Sec-Fetch-Mode: navigate' -H 'Sec-Fetch-Site: cross-site' -H 'Sec-Fetch-User: ?1' -H 'TE: trailers' --compressed --globoff "$@"
|
||||
./mad.sh:392: else
|
||||
--
|
||||
./mad.sh:1470: response=$(tor_curl_request --insecure -L -s https://github.com/lwthiker/curl-impersonate/releases/latest)
|
||||
./mad.sh:1471: if [[ "${DebugAllEnabled}" == "true" ]] ; then
|
||||
./mad.sh:1472: debugHtml "github" "lbf_inst_curlimp$j" "$response"
|
||||
./mad.sh:1473: fi
|
||||
./mad.sh:1474: if [[ ! -z "$response" ]]; then
|
||||
./mad.sh:1475: latestTag=$(grep -oPi -m 1 '(?<=/curl-impersonate/releases/tag/).*?(?=")' <<< "$response")
|
||||
./mad.sh:1476: latestBinaryDate=$(grep -oPi -m 1 '(?<=<relative-time class="no-wrap" prefix="" datetime=").*?(?=T)' <<< "$response")
|
||||
./mad.sh:1477: break
|
||||
./mad.sh:1478: fi
|
||||
./mad.sh:1479: done
|
||||
./mad.sh:1480: if [[ -z $latestTag ]]; then
|
||||
./mad.sh:1437: response=$(tor_curl_request --insecure -L -s https://github.com/lwthiker/curl-impersonate/releases/latest)
|
||||
./mad.sh:1438: if [[ "${DebugAllEnabled}" == "true" ]] ; then
|
||||
./mad.sh:1439: debugHtml "github" "lbf_inst_curlimp$j" "$response"
|
||||
./mad.sh:1440: fi
|
||||
./mad.sh:1441: if [[ ! -z "$response" ]]; then
|
||||
./mad.sh:1442: latestTag=$(grep -oPi -m 1 '(?<=/curl-impersonate/releases/tag/).*?(?=")' <<< "$response")
|
||||
./mad.sh:1443: latestBinaryDate=$(grep -oPi -m 1 '(?<=<relative-time class="no-wrap" prefix="" datetime=").*?(?=T)' <<< "$response")
|
||||
./mad.sh:1444: break
|
||||
./mad.sh:1445: fi
|
||||
./mad.sh:1446: done
|
||||
./mad.sh:1447: if [[ -z $latestTag ]]; then
|
||||
--
|
||||
./mad.sh:1490: file_header=$(tor_curl_request --insecure --head -Ls "$download_url")
|
||||
./mad.sh:1491: if [[ "${DebugAllEnabled}" == "true" ]] ; then
|
||||
./mad.sh:1492: debugHtml "github" "head_inst_curlimp$j" "${file_header}"
|
||||
./mad.sh:1493: fi
|
||||
./mad.sh:1494: if ! grep -Eqi 'HTTP/2 200|HTTP/1.1 200|200 OK' <<< $file_header ; then
|
||||
./mad.sh:1495: if ((j == 8)) ; then
|
||||
./mad.sh:1496: return 1
|
||||
./mad.sh:1497: else
|
||||
./mad.sh:1498: continue
|
||||
./mad.sh:1499: fi
|
||||
./mad.sh:1500: fi
|
||||
./mad.sh:1457: file_header=$(tor_curl_request --insecure --head -Ls "$download_url")
|
||||
./mad.sh:1458: if [[ "${DebugAllEnabled}" == "true" ]] ; then
|
||||
./mad.sh:1459: debugHtml "github" "head_inst_curlimp$j" "${file_header}"
|
||||
./mad.sh:1460: fi
|
||||
./mad.sh:1461: if ! grep -Eqi 'HTTP/2 200|HTTP/1.1 200|200 OK' <<< $file_header ; then
|
||||
./mad.sh:1462: if ((j == 8)) ; then
|
||||
./mad.sh:1463: return 1
|
||||
./mad.sh:1464: else
|
||||
./mad.sh:1465: continue
|
||||
./mad.sh:1466: fi
|
||||
./mad.sh:1467: fi
|
||||
--
|
||||
./mad.sh:1540: tor_curl_request --insecure -L "$download_url" --continue-at - --output "$file_path"
|
||||
./mad.sh:1541: received_file_size=0
|
||||
./mad.sh:1542: if [[ -f "$file_path" ]] ; then
|
||||
./mad.sh:1543: received_file_size=$(stat --format="%s" "$file_path" | tr -d '[:space:]')
|
||||
./mad.sh:1544: fi
|
||||
./mad.sh:1545: if ((received_file_size == file_size_bytes)) ; then
|
||||
./mad.sh:1546: break
|
||||
./mad.sh:1547: elif ((received_file_size < file_size_bytes)) ; then
|
||||
./mad.sh:1548: if ((j >= MaxDownloadRetries)) ; then
|
||||
./mad.sh:1549: echo -e "${RED}| FAILED: Size mismatch after downloading${NC}"
|
||||
./mad.sh:1550: exit 1
|
||||
./mad.sh:1507: tor_curl_request --insecure -L "$download_url" --continue-at - --output "$file_path"
|
||||
./mad.sh:1508: received_file_size=0
|
||||
./mad.sh:1509: if [[ -f "$file_path" ]] ; then
|
||||
./mad.sh:1510: received_file_size=$(stat --format="%s" "$file_path" | tr -d '[:space:]')
|
||||
./mad.sh:1511: fi
|
||||
./mad.sh:1512: if ((received_file_size == file_size_bytes)) ; then
|
||||
./mad.sh:1513: break
|
||||
./mad.sh:1514: elif ((received_file_size < file_size_bytes)) ; then
|
||||
./mad.sh:1515: if ((j >= MaxDownloadRetries)) ; then
|
||||
./mad.sh:1516: echo -e "${RED}| FAILED: Size mismatch after downloading${NC}"
|
||||
./mad.sh:1517: exit 1
|
||||
--
|
||||
./mad.sh:1593: response=$(tor_curl_request --insecure -L -s https://github.com/lexiforest/curl-impersonate/releases/latest)
|
||||
./mad.sh:1594: if [[ "${DebugAllEnabled}" == "true" ]] ; then
|
||||
./mad.sh:1595: debugHtml "github" "lbf_inst_curlimp$j" "$response"
|
||||
./mad.sh:1596: fi
|
||||
./mad.sh:1597: if [[ ! -z "$response" ]]; then
|
||||
./mad.sh:1598: latestTag=$(grep -oPi -m 1 '(?<=/curl-impersonate/releases/tag/).*?(?=")' <<< "$response")
|
||||
./mad.sh:1599: latestBinaryDate=$(grep -oPi -m 1 '(?<=<relative-time class="no-wrap" prefix="" datetime=").*?(?=T)' <<< "$response")
|
||||
./mad.sh:1600: break
|
||||
./mad.sh:1601: fi
|
||||
./mad.sh:1602: done
|
||||
./mad.sh:1603: if [[ -z $latestTag ]]; then
|
||||
./mad.sh:1560: response=$(tor_curl_request --insecure -L -s https://github.com/lexiforest/curl-impersonate/releases/latest)
|
||||
./mad.sh:1561: if [[ "${DebugAllEnabled}" == "true" ]] ; then
|
||||
./mad.sh:1562: debugHtml "github" "lbf_inst_curlimp$j" "$response"
|
||||
./mad.sh:1563: fi
|
||||
./mad.sh:1564: if [[ ! -z "$response" ]]; then
|
||||
./mad.sh:1565: latestTag=$(grep -oPi -m 1 '(?<=/curl-impersonate/releases/tag/).*?(?=")' <<< "$response")
|
||||
./mad.sh:1566: latestBinaryDate=$(grep -oPi -m 1 '(?<=<relative-time class="no-wrap" prefix="" datetime=").*?(?=T)' <<< "$response")
|
||||
./mad.sh:1567: break
|
||||
./mad.sh:1568: fi
|
||||
./mad.sh:1569: done
|
||||
./mad.sh:1570: if [[ -z $latestTag ]]; then
|
||||
--
|
||||
./mad.sh:1613: file_header=$(tor_curl_request --insecure --head -Ls "$download_url")
|
||||
./mad.sh:1614: if [[ "${DebugAllEnabled}" == "true" ]] ; then
|
||||
./mad.sh:1615: debugHtml "github" "head_inst_curlimp$j" "${file_header}"
|
||||
./mad.sh:1616: fi
|
||||
./mad.sh:1617: if ! grep -Eqi 'HTTP/2 200|HTTP/1.1 200|200 OK' <<< $file_header ; then
|
||||
./mad.sh:1618: if ((j == 8)) ; then
|
||||
./mad.sh:1619: return 1
|
||||
./mad.sh:1620: else
|
||||
./mad.sh:1621: continue
|
||||
./mad.sh:1622: fi
|
||||
./mad.sh:1623: fi
|
||||
./mad.sh:1580: file_header=$(tor_curl_request --insecure --head -Ls "$download_url")
|
||||
./mad.sh:1581: if [[ "${DebugAllEnabled}" == "true" ]] ; then
|
||||
./mad.sh:1582: debugHtml "github" "head_inst_curlimp$j" "${file_header}"
|
||||
./mad.sh:1583: fi
|
||||
./mad.sh:1584: if ! grep -Eqi 'HTTP/2 200|HTTP/1.1 200|200 OK' <<< $file_header ; then
|
||||
./mad.sh:1585: if ((j == 8)) ; then
|
||||
./mad.sh:1586: return 1
|
||||
./mad.sh:1587: else
|
||||
./mad.sh:1588: continue
|
||||
./mad.sh:1589: fi
|
||||
./mad.sh:1590: fi
|
||||
--
|
||||
./mad.sh:1663: tor_curl_request --insecure -L "$download_url" --continue-at - --output "$file_path"
|
||||
./mad.sh:1664: received_file_size=0
|
||||
./mad.sh:1665: if [[ -f "$file_path" ]] ; then
|
||||
./mad.sh:1666: received_file_size=$(stat --format="%s" "$file_path" | tr -d '[:space:]')
|
||||
./mad.sh:1667: fi
|
||||
./mad.sh:1668: if ((received_file_size == file_size_bytes)) ; then
|
||||
./mad.sh:1669: break
|
||||
./mad.sh:1670: elif ((received_file_size < file_size_bytes)) ; then
|
||||
./mad.sh:1671: if ((j >= MaxDownloadRetries)) ; then
|
||||
./mad.sh:1672: echo -e "${RED}| FAILED: Size mismatch after downloading${NC}"
|
||||
./mad.sh:1673: exit 1
|
||||
./mad.sh:1630: tor_curl_request --insecure -L "$download_url" --continue-at - --output "$file_path"
|
||||
./mad.sh:1631: received_file_size=0
|
||||
./mad.sh:1632: if [[ -f "$file_path" ]] ; then
|
||||
./mad.sh:1633: received_file_size=$(stat --format="%s" "$file_path" | tr -d '[:space:]')
|
||||
./mad.sh:1634: fi
|
||||
./mad.sh:1635: if ((received_file_size == file_size_bytes)) ; then
|
||||
./mad.sh:1636: break
|
||||
./mad.sh:1637: elif ((received_file_size < file_size_bytes)) ; then
|
||||
./mad.sh:1638: if ((j >= MaxDownloadRetries)) ; then
|
||||
./mad.sh:1639: echo -e "${RED}| FAILED: Size mismatch after downloading${NC}"
|
||||
./mad.sh:1640: exit 1
|
||||
--
|
||||
./mad.sh:1868: maud_torcurl=$(grep -n -vxE '[[:blank:]]*([#].*)?' $fil | grep -A 12 --color='always' -Ei 'tor_curl')
|
||||
./mad.sh:1869: echo -e "Files:"
|
||||
./mad.sh:1870: echo -e "${BLUE}${fil}${NC}"
|
||||
./mad.sh:1835: maud_torcurl=$(grep -n -vxE '[[:blank:]]*([#].*)?' $fil | grep -A 12 --color='always' -Ei 'tor_curl')
|
||||
./mad.sh:1836: echo -e "Files:"
|
||||
./mad.sh:1837: echo -e "${BLUE}${fil}${NC}"
|
||||
./mad.sh:1838: echo -e ""
|
||||
./mad.sh:1839: echo -e ""
|
||||
./mad.sh:1840: echo -e "${PINK}MAD Audit of http lines:${NC} (${GREEN}grep \"http:\" or \"https:\"${NC})"
|
||||
./mad.sh:1841: echo -e "_________________________________________________________________________"
|
||||
./mad.sh:1842: echo -e "$maud_http"
|
||||
./mad.sh:1843: echo -e ""
|
||||
./mad.sh:1844: echo -e "${PINK}MAD Audit of curl:${NC} (${GREEN}grep \"curl\"${NC})"
|
||||
./mad.sh:1845: echo -e "_________________________________________________________________________"
|
||||
--
|
||||
./mad.sh:1848: echo -e "${PINK}MAD Audit of tor_curl (+12 lines after):${NC} (${GREEN}grep \"tor_curl\"${NC})"
|
||||
./mad.sh:1849: echo -e "_________________________________________________________________________"
|
||||
./mad.sh:1850: echo -e "$maud_torcurl"
|
||||
./mad.sh:1851: echo -e ""
|
||||
./mad.sh:1852: echo -e ""
|
||||
./mad.sh:1853: done
|
||||
./mad.sh:1854: else
|
||||
./mad.sh:1855: cd "$ScriptDir"
|
||||
./mad.sh:1856: readarray -d $'' arrFiles < <(find . -name "*.sh" -printf '%p\n' | sort -Vk1)
|
||||
./mad.sh:1857: cd "$WorkDir"
|
||||
./mad.sh:1858: readarray -d $'' arrFiles2 < <(find . -name "*.sh" -printf '%p\n' | sort -Vk1)
|
||||
--
|
||||
./mad.sh:1863: maud_torcurl=$(grep -n -vxE '[[:blank:]]*([#].*)?' $fil | grep -A 12 --color='always' -Ei 'tor_curl')
|
||||
./mad.sh:1864: echo -e "Files:"
|
||||
./mad.sh:1865: echo -e "${BLUE}${fil}${NC}"
|
||||
./mad.sh:1866: echo -e ""
|
||||
./mad.sh:1867: echo -e ""
|
||||
./mad.sh:1868: echo -e "${PINK}MAD Audit of http lines:${NC} (${GREEN}grep \"http:\" or \"https:\"${NC})"
|
||||
./mad.sh:1869: echo -e "_________________________________________________________________________"
|
||||
./mad.sh:1870: echo -e "$maud_http"
|
||||
./mad.sh:1871: echo -e ""
|
||||
./mad.sh:1872: echo -e ""
|
||||
./mad.sh:1873: echo -e "${PINK}MAD Audit of http lines:${NC} (${GREEN}grep \"http:\" or \"https:\"${NC})"
|
||||
./mad.sh:1874: echo -e "_________________________________________________________________________"
|
||||
./mad.sh:1875: echo -e "$maud_http"
|
||||
./mad.sh:1876: echo -e ""
|
||||
./mad.sh:1877: echo -e "${PINK}MAD Audit of curl:${NC} (${GREEN}grep \"curl\"${NC})"
|
||||
./mad.sh:1878: echo -e "_________________________________________________________________________"
|
||||
./mad.sh:1872: echo -e "${PINK}MAD Audit of curl:${NC} (${GREEN}grep \"curl \"${NC})"
|
||||
./mad.sh:1873: echo -e "_________________________________________________________________________"
|
||||
--
|
||||
./mad.sh:1881: echo -e "${PINK}MAD Audit of tor_curl (+12 lines after):${NC} (${GREEN}grep \"tor_curl\"${NC})"
|
||||
./mad.sh:1882: echo -e "_________________________________________________________________________"
|
||||
./mad.sh:1883: echo -e "$maud_torcurl"
|
||||
./mad.sh:1884: echo -e ""
|
||||
./mad.sh:1885: echo -e ""
|
||||
./mad.sh:1886: done
|
||||
./mad.sh:1887: else
|
||||
./mad.sh:1888: cd "$ScriptDir"
|
||||
./mad.sh:1889: readarray -d $'' arrFiles < <(find . -name "*.sh" -printf '%p\n' | sort -Vk1)
|
||||
./mad.sh:1890: cd "$WorkDir"
|
||||
./mad.sh:1891: readarray -d $'' arrFiles2 < <(find . -name "*.sh" -printf '%p\n' | sort -Vk1)
|
||||
./mad.sh:1876: echo -e "${PINK}MAD Audit of tor_curl (+12 lines after):${NC} (${GREEN}grep \"tor_curl\"${NC})"
|
||||
./mad.sh:1877: echo -e "_________________________________________________________________________"
|
||||
./mad.sh:1878: echo -e "$maud_torcurl"
|
||||
./mad.sh:1879: echo -e ""
|
||||
./mad.sh:1880: done
|
||||
./mad.sh:1881: for fil in "${arrFiles2[@]}";
|
||||
./mad.sh:1882: do
|
||||
./mad.sh:1883: maud_http=$(grep -n -vxE '[[:blank:]]*([#].*)?' $fil | grep --color='always' -Ei '(http|https):')
|
||||
./mad.sh:1884: maud_curl=$(grep -n -vxE '[[:blank:]]*([#].*)?' $fil | grep --color='always' -Ei 'curl')
|
||||
./mad.sh:1885: maud_torcurl=$(grep -n -vxE '[[:blank:]]*([#].*)?' $fil | grep -A 12 --color='always' -Ei 'tor_curl')
|
||||
./mad.sh:1886: echo -e "Files:"
|
||||
./mad.sh:1887: echo -e "${BLUE}${fil}${NC}"
|
||||
./mad.sh:1888: echo -e ""
|
||||
./mad.sh:1889: echo -e ""
|
||||
./mad.sh:1890: echo -e "${PINK}MAD Audit of http lines:${NC} (${GREEN}grep \"http:\" or \"https:\"${NC})"
|
||||
./mad.sh:1891: echo -e "_________________________________________________________________________"
|
||||
./mad.sh:1892: echo -e "$maud_http"
|
||||
./mad.sh:1893: echo -e ""
|
||||
./mad.sh:1894: echo -e "${PINK}MAD Audit of curl:${NC} (${GREEN}grep \"curl\"${NC})"
|
||||
./mad.sh:1895: echo -e "_________________________________________________________________________"
|
||||
--
|
||||
./mad.sh:1896: maud_torcurl=$(grep -n -vxE '[[:blank:]]*([#].*)?' $fil | grep -A 12 --color='always' -Ei 'tor_curl')
|
||||
./mad.sh:1897: echo -e "Files:"
|
||||
./mad.sh:1898: echo -e "${BLUE}${fil}${NC}"
|
||||
./mad.sh:1899: echo -e ""
|
||||
./mad.sh:1900: echo -e ""
|
||||
./mad.sh:1901: echo -e "${PINK}MAD Audit of http lines:${NC} (${GREEN}grep \"http:\" or \"https:\"${NC})"
|
||||
./mad.sh:1902: echo -e "_________________________________________________________________________"
|
||||
./mad.sh:1903: echo -e "$maud_http"
|
||||
./mad.sh:1904: echo -e ""
|
||||
./mad.sh:1905: echo -e "${PINK}MAD Audit of curl:${NC} (${GREEN}grep \"curl \"${NC})"
|
||||
./mad.sh:1906: echo -e "_________________________________________________________________________"
|
||||
./mad.sh:1898: echo -e "${PINK}MAD Audit of tor_curl (+12 lines after):${NC} (${GREEN}grep \"tor_curl\"${NC})"
|
||||
./mad.sh:1899: echo -e "_________________________________________________________________________"
|
||||
./mad.sh:1900: echo -e "$maud_torcurl"
|
||||
./mad.sh:1901: echo -e ""
|
||||
./mad.sh:1902: done
|
||||
./mad.sh:1903: fi
|
||||
./mad.sh:1904:}
|
||||
./mad.sh:1905:madStatus() {
|
||||
./mad.sh:1906: local InputFile="$1"
|
||||
./mad.sh:1907: if [[ "$arg1" == "status" ]] ; then
|
||||
./mad.sh:1908: clear
|
||||
--
|
||||
./mad.sh:1909: echo -e "${PINK}MAD Audit of tor_curl (+12 lines after):${NC} (${GREEN}grep \"tor_curl\"${NC})"
|
||||
./mad.sh:1910: echo -e "_________________________________________________________________________"
|
||||
./mad.sh:1911: echo -e "$maud_torcurl"
|
||||
./mad.sh:1912: echo -e ""
|
||||
./mad.sh:1913: done
|
||||
./mad.sh:1914: for fil in "${arrFiles2[@]}";
|
||||
./mad.sh:1915: do
|
||||
./mad.sh:1916: maud_http=$(grep -n -vxE '[[:blank:]]*([#].*)?' $fil | grep --color='always' -Ei '(http|https):')
|
||||
./mad.sh:1917: maud_curl=$(grep -n -vxE '[[:blank:]]*([#].*)?' $fil | grep --color='always' -Ei 'curl')
|
||||
./mad.sh:1918: maud_torcurl=$(grep -n -vxE '[[:blank:]]*([#].*)?' $fil | grep -A 12 --color='always' -Ei 'tor_curl')
|
||||
./mad.sh:1919: echo -e "Files:"
|
||||
./mad.sh:1920: echo -e "${BLUE}${fil}${NC}"
|
||||
./mad.sh:1921: echo -e ""
|
||||
./mad.sh:1922: echo -e ""
|
||||
./mad.sh:1923: echo -e "${PINK}MAD Audit of http lines:${NC} (${GREEN}grep \"http:\" or \"https:\"${NC})"
|
||||
./mad.sh:1924: echo -e "_________________________________________________________________________"
|
||||
./mad.sh:1925: echo -e "$maud_http"
|
||||
./mad.sh:1926: echo -e ""
|
||||
./mad.sh:1927: echo -e "${PINK}MAD Audit of curl:${NC} (${GREEN}grep \"curl\"${NC})"
|
||||
./mad.sh:1928: echo -e "_________________________________________________________________________"
|
||||
./mad.sh:3231: file_header=$(tor_curl_request --insecure -m 8 -s -D - -o /dev/null \
|
||||
./mad.sh:3232: -H "Connection: keep-alive" \
|
||||
./mad.sh:3233: -w 'EffectiveUrl=%{url_effective}' \
|
||||
./mad.sh:3234: "$download_url")
|
||||
./mad.sh:3235: else
|
||||
./mad.sh:3236: printf "| Retrieving Head: attempt #$j"
|
||||
./mad.sh:3237: rm -f "${WorkDir}/.temp/directhead"
|
||||
./mad.sh:3238: file_header=$(tor_curl_request --insecure --head -H "Connection: keep-alive" -L -s -i "$download_url" |
|
||||
./mad.sh:3239: tee "${WorkDir}/.temp/directhead" &
|
||||
./mad.sh:3240: sleep 6
|
||||
./mad.sh:3241: [ -s "${WorkDir}/.temp/directhead" ]
|
||||
./mad.sh:3242: kill $! 2>/dev/null
|
||||
./mad.sh:3243: )
|
||||
./mad.sh:3244: if [[ ! -f "${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}" ]]; then
|
||||
./mad.sh:3245: touch "${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}"
|
||||
./mad.sh:3246: fi
|
||||
./mad.sh:3247: rm -f "${WorkDir}/.temp/directhead"
|
||||
./mad.sh:3248: fi
|
||||
--
|
||||
./mad.sh:1931: echo -e "${PINK}MAD Audit of tor_curl (+12 lines after):${NC} (${GREEN}grep \"tor_curl\"${NC})"
|
||||
./mad.sh:1932: echo -e "_________________________________________________________________________"
|
||||
./mad.sh:1933: echo -e "$maud_torcurl"
|
||||
./mad.sh:1934: echo -e ""
|
||||
./mad.sh:1935: done
|
||||
./mad.sh:1936: fi
|
||||
./mad.sh:1937:}
|
||||
./mad.sh:1938:madStatus() {
|
||||
./mad.sh:1939: local InputFile="$1"
|
||||
./mad.sh:1940: if [[ "$arg1" == "status" ]] ; then
|
||||
./mad.sh:1941: clear
|
||||
./mad.sh:3375: tor_curl_request --insecure -L --referer "$file_url" "$download_url" --output "$file_path"
|
||||
./mad.sh:3376: rc=$?
|
||||
./mad.sh:3377: if ((rc != 0 )) ; then
|
||||
./mad.sh:3378: printf "${RED}Download Failed (bad exit status).${NC}"
|
||||
./mad.sh:3379: if [[ -f ${file_path} ]]; then
|
||||
./mad.sh:3380: printf "${YELLOW} Partial removed...${NC}"
|
||||
./mad.sh:3381: printf "\n\n"
|
||||
./mad.sh:3382: rm -f "${file_path}"
|
||||
./mad.sh:3383: else
|
||||
./mad.sh:3384: printf "\n\n"
|
||||
./mad.sh:3385: fi
|
||||
--
|
||||
./mad.sh:3264: file_header=$(tor_curl_request --insecure -m 8 -s -D - -o /dev/null \
|
||||
./mad.sh:3265: -H "Connection: keep-alive" \
|
||||
./mad.sh:3266: -w 'EffectiveUrl=%{url_effective}' \
|
||||
./mad.sh:3267: "$download_url")
|
||||
./mad.sh:3268: else
|
||||
./mad.sh:3269: printf "| Retrieving Head: attempt #$j"
|
||||
./mad.sh:3270: rm -f "${WorkDir}/.temp/directhead"
|
||||
./mad.sh:3271: file_header=$(tor_curl_request --insecure --head -H "Connection: keep-alive" -L -s -i "$download_url" |
|
||||
./mad.sh:3272: tee "${WorkDir}/.temp/directhead" &
|
||||
./mad.sh:3273: sleep 6
|
||||
./mad.sh:3274: [ -s "${WorkDir}/.temp/directhead" ]
|
||||
./mad.sh:3275: kill $! 2>/dev/null
|
||||
./mad.sh:3276: )
|
||||
./mad.sh:3277: if [[ ! -f "${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}" ]]; then
|
||||
./mad.sh:3278: touch "${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}"
|
||||
./mad.sh:3279: fi
|
||||
./mad.sh:3280: rm -f "${WorkDir}/.temp/directhead"
|
||||
./mad.sh:3281: fi
|
||||
./mad.sh:3432: tor_curl_request --insecure -L --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval "$download_url" --continue-at - --output "$file_path"
|
||||
./mad.sh:3433: else
|
||||
./mad.sh:3434: tor_curl_request --insecure -L --referer "$file_url" "$download_url" --continue-at - --output "$file_path"
|
||||
./mad.sh:3435: fi
|
||||
./mad.sh:3436: received_file_size=0
|
||||
./mad.sh:3437: if [[ -f "$file_path" ]] ; then
|
||||
./mad.sh:3438: received_file_size=$(stat --format="%s" "$file_path" | tr -d '[:space:]')
|
||||
./mad.sh:3439: fi
|
||||
./mad.sh:3440: if CheckNoHtml "$remote_url" "$filename" "$file_path" "$((received_file_size - pd_presize))" ; then
|
||||
./mad.sh:3441: containsHtml=false
|
||||
./mad.sh:3442: else
|
||||
./mad.sh:3443: containsHtml=true
|
||||
./mad.sh:3444: fi
|
||||
--
|
||||
./mad.sh:3408: tor_curl_request --insecure -L --referer "$file_url" "$download_url" --output "$file_path"
|
||||
./mad.sh:3409: rc=$?
|
||||
./mad.sh:3410: if ((rc != 0 )) ; then
|
||||
./mad.sh:3411: printf "${RED}Download Failed (bad exit status).${NC}"
|
||||
./mad.sh:3412: if [[ -f ${file_path} ]]; then
|
||||
./mad.sh:3413: printf "${YELLOW} Partial removed...${NC}"
|
||||
./mad.sh:3414: printf "\n\n"
|
||||
./mad.sh:3415: rm -f "${file_path}"
|
||||
./mad.sh:3416: else
|
||||
./mad.sh:3417: printf "\n\n"
|
||||
./mad.sh:3418: fi
|
||||
--
|
||||
./mad.sh:3465: tor_curl_request --insecure -L --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval "$download_url" --continue-at - --output "$file_path"
|
||||
./mad.sh:3466: else
|
||||
./mad.sh:3467: tor_curl_request --insecure -L --referer "$file_url" "$download_url" --continue-at - --output "$file_path"
|
||||
./mad.sh:3468: fi
|
||||
./mad.sh:3469: received_file_size=0
|
||||
./mad.sh:3470: if [[ -f "$file_path" ]] ; then
|
||||
./mad.sh:3471: received_file_size=$(stat --format="%s" "$file_path" | tr -d '[:space:]')
|
||||
./mad.sh:3472: fi
|
||||
./mad.sh:3473: if CheckNoHtml "$remote_url" "$filename" "$file_path" "$((received_file_size - pd_presize))" ; then
|
||||
./mad.sh:3474: containsHtml=false
|
||||
./mad.sh:3475: else
|
||||
./mad.sh:3476: containsHtml=true
|
||||
./mad.sh:3477: fi
|
||||
--
|
||||
./mad.sh:3665: response=$(tor_curl_upload --insecure -i \
|
||||
./mad.sh:3666: -H "Content-Type: multipart/form-data" \
|
||||
./mad.sh:3667: -F "key=" \
|
||||
./mad.sh:3668: -F "time=$jira_timeval" \
|
||||
./mad.sh:3669: -F "file=@${filepath}" \
|
||||
./mad.sh:3670: "${jira_PostUrlHost}")
|
||||
./mad.sh:3671: else
|
||||
./mad.sh:3672: response=$(tor_curl_upload --insecure -i \
|
||||
./mad.sh:3673: -H "Content-Type: multipart/form-data" \
|
||||
./mad.sh:3674: -F "key=" \
|
||||
./mad.sh:3675: -F "time=$jira_timeval" \
|
||||
./mad.sh:3676: -F "files[]=@${arrFiles[@]}" \
|
||||
./mad.sh:3677: "${jira_PostUrlHost}")
|
||||
./mad.sh:3678: fi
|
||||
./mad.sh:3679: if [[ "${DebugAllEnabled}" == "true" ]] ; then
|
||||
./mad.sh:3680: debugHtml "${filepath##*/}" "${_hostCode}_upload" "post_url: ${jira_PostUrlHost}"$'\n'"${response}"
|
||||
./mad.sh:3681: fi
|
||||
./mad.sh:3682: if grep -Eqi ' 200 ' <<< "${response}" ; then
|
||||
./mad.sh:3632: response=$(tor_curl_upload --insecure -i \
|
||||
./mad.sh:3633: -H "Content-Type: multipart/form-data" \
|
||||
./mad.sh:3634: -F "key=" \
|
||||
./mad.sh:3635: -F "time=$jira_timeval" \
|
||||
./mad.sh:3636: -F "file=@${filepath}" \
|
||||
./mad.sh:3637: "${jira_PostUrlHost}")
|
||||
./mad.sh:3638: else
|
||||
./mad.sh:3639: response=$(tor_curl_upload --insecure -i \
|
||||
./mad.sh:3640: -H "Content-Type: multipart/form-data" \
|
||||
./mad.sh:3641: -F "key=" \
|
||||
./mad.sh:3642: -F "time=$jira_timeval" \
|
||||
./mad.sh:3643: -F "files[]=@${arrFiles[@]}" \
|
||||
./mad.sh:3644: "${jira_PostUrlHost}")
|
||||
./mad.sh:3645: fi
|
||||
./mad.sh:3646: if [[ "${DebugAllEnabled}" == "true" ]] ; then
|
||||
./mad.sh:3647: debugHtml "${filepath##*/}" "${_hostCode}_upload" "post_url: ${jira_PostUrlHost}"$'\n'"${response}"
|
||||
./mad.sh:3648: fi
|
||||
./mad.sh:3649: if grep -Eqi ' 200 ' <<< "${response}" ; then
|
||||
|
||||
|
|
|
|||
|
|
@ -3,6 +3,45 @@
|
|||
|
||||
#
|
||||
# ---------- Initial release with MAD Uploader functionality ----------
|
||||
# 2025.02.26 - [uploadhive] Fix "Wrong IP" error -- use uploadhive.com IP4 address to connect for post
|
||||
# 2025.02.26 - [up_lainsafe] Fix retry terminal output
|
||||
# 2025.02.25 - [mad + allhosts] Re-engineer BadHtml scan to only scan the first 10kb of downloaded partials
|
||||
# 2025.02.24 - [pixeldrain] Update "The file is IP limited" response handling retry
|
||||
# 2025.02.22 - [blackcloud_onion] Add bcloud.onion download handling (url fixing)
|
||||
# 2025.02.21 - [anonfile] Update cdn link parsing to handle new subdomains
|
||||
# 2025.02.21 - [anonfile] Add download limit reached response handling
|
||||
# 2025.02.21 - [anonfile] Update file info retrieval (head no longer responds)
|
||||
# 2025.02.21 - [sendspace] Add sendspace.com as download host
|
||||
# 2025.02.21 - [oshi / up_oshi] Revert /nossl/ changes for oshi.at (clearnet)
|
||||
# 2025.02.20 - [up_ranoz] Fixed parsing of ranoz upload link (cloudflare)
|
||||
# 2025.02.20 - [sendnow] Better handling of sendnow new Tor ip blocking
|
||||
# 2025.02.20 - [up_ranoz / up_uploadhive] Add obfuscation of .7z in multipart filename that was missing
|
||||
# 2025.02.18 - [uploadhive] Add handling of the new /cgi-bin/dl.cgi/ url tickets (WIP)
|
||||
# (unfortunately, this is tied to the requesting ip, so downloads get "Wrong IP")
|
||||
# 2025.02.18 - [up_oshi] Add Manage url as comment on uploads
|
||||
# 2025.02.18 - [up_oshi / oshi] use /nossl/ url and http
|
||||
# 2025.02.17 - [gofile] Add a random sleep if 429 response detected (too many requests)
|
||||
# 2025.02.17 - [*ALL] Audit and update all single bracket operations
|
||||
# 2025.02.17 - [filehaus] Fix downloading from fh
|
||||
# 2025.02.15 - [uploadbay] Update urls regex for acceptable alternate
|
||||
# 2025.02.15 - [up_sendnow] Add send.now as upload host
|
||||
# 2025.02.15 - [sendnow] Fix handling of filenames with special characters in url
|
||||
# 2025.02.14 - [mad] Add helpful verbiage for user on MAD Randomized Extension upload urls
|
||||
# 2025.02.14 - [up_ranoz] Add help "[rand ext, rename to <filename> or use MAD v2025.02.13+]" to url
|
||||
# 2025.02.14 - [up_uploadhive] Add help "[rand ext, rename to <filename> or use MAD v2025.02.13+]" to url
|
||||
# 2025.02.13 - [mad] Add "RanozRandomizeExt" MAD randomized extension configurable variable
|
||||
# 2025.02.13 - [up_ranoz] Add MAD randomized extension upload handling
|
||||
# 2025.02.13 - [ranoz] Add MAD randomized extension download handling
|
||||
# 2025.02.13 - [sendnow] Extend request timeout for head / get (server response time lag)
|
||||
# 2025.02.12 - [sendnow] Add send.now as download host
|
||||
# 2025.02.11 - [ranoz] Fix filename (to handle fileid added to download urls)
|
||||
# 2025.02.10 - [mad] Add detection of custom "Removed" response on cdn get from direct links
|
||||
# 2025.02.06 - [ranoz] Add UNAVAILABLE_FOR_LEGAL_REASONS response handling
|
||||
# 2025.02.04 - [mad] Add ConnectTimeoutUpload to separate configurable up/down timeouts
|
||||
# 2025.02.03 - [up_lainsafe] Add pomf2.lain.la as upload host (1GB)
|
||||
# 2025.02.02 - [mad] Add function to handle urlencode of cyrillic / kanji / latin / etc
|
||||
# 2025.02.02 - [ranoz] Fix handling filenames containing cyrillic / kanji / latin chars
|
||||
# 2025.02.02 - [all] Reduced character processing for urlencode to special url characters
|
||||
# 2025.01.30 - [isupload] Add handling of 404 Not Found on initial page fetch
|
||||
# 2025.01.23 - [mad] Do not check for supported host on "direct=" lines
|
||||
# 2025.01.19 - [fileditch] Add direct download url processing fileditchfiles.me (though they block Tor now)
|
||||
|
|
|
|||
|
|
@ -13,6 +13,7 @@ Max Size . HostCode . Nickname . Notes
|
|||
20GB atea ateasystems.com ?? expiry
|
||||
10GB gofile gofile.io ?? expiry
|
||||
10GB tmpme tempfile.me 3mo expiry (tend to ban 7z faster)
|
||||
5GB afio anonfile.io ?? expiry
|
||||
5GB uhive uploadhive ??
|
||||
- 5GB uflix uploadflix.cc 7d inactive expiry
|
||||
5GB fd fileditch.com (.me) ??
|
||||
|
|
@ -59,9 +60,9 @@ Max Size . HostCode . Nickname . Notes
|
|||
5GB moo moocloud.ch ~1mo expiry, jirafeau
|
||||
- 5GB frso freesocial.co ~1mo expiry, jirafeau
|
||||
- 5GB squid filesquid.net ~1mo expiry, jirafeau
|
||||
5GB edd eddowding.com ~1mo expiry, jirafeau
|
||||
4GB tmpsh temp.sh 3d expiry
|
||||
2GB dict dictvm.org ~1mo expiry, jirafeau
|
||||
1GB edd eddowding.com ~1mo expiry, jirafeau
|
||||
1GB kaz depot.kaz.bzh ~1mo expiry, jirafeau
|
||||
512MB gagn fichier.gagneux.info ~1mo expiry, jirafeau
|
||||
512MB herb herbolistique.com ~1mo expiry, jirafeau
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
#! Name: 1fichier.sh
|
||||
#! Author: kittykat
|
||||
#! Version: 2024.10.06
|
||||
#! Version: 2025.03.15
|
||||
#! Desc: Add support for downloading and processing of urls for a new host
|
||||
#! Usage: Copy this file into the ./${ScriptDir}/hosts/ folder
|
||||
#!
|
||||
|
|
@ -256,6 +256,7 @@ fich_GetFile() {
|
|||
fi
|
||||
echo "${remote_url//[^a-zA-Z0-9]/}" > $flockDownload
|
||||
touch "${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}"
|
||||
tor_identity="${RANDOM}"
|
||||
CLEANSTRING=${remote_url//[^a-zA-Z0-9]/}
|
||||
trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; rm -f $flockDownload; echo ""; tput cnorm; exit" 0 1 2 3 6 15
|
||||
tor_curl_request --insecure -e "${remote_url}" "${target_file_link}" -C - -o "${file_path}"
|
||||
|
|
|
|||
43
hosts/anonfileio.sh
Normal file
43
hosts/anonfileio.sh
Normal file
|
|
@ -0,0 +1,43 @@
|
|||
#! Name: anonfileio.sh
|
||||
#! Author: kittykat
|
||||
#! Version: 2025.03.28
|
||||
#! Desc: Add support for downloading and processing of urls for a new host
|
||||
#! Usage: Copy this file into the ./${ScriptDir}/hosts/ folder
|
||||
#!
|
||||
#!
|
||||
#! ------------ REQUIRED SECTION ---------------
|
||||
#! @[UPDATE] HostAndDomainRegexes: This string is loaded into mad.sh and allows dynamic handling of new url data
|
||||
#! Format: '/HostCode/HostNick/HostFuncPrefix:HostDomainRegex@'
|
||||
#! HostCode: <aUniqueCodeForHost> (ie. 'fh' for filehaus -- cannot be used by other hosts)
|
||||
#! HostNick: What is displayed throughout MAD output (ie. 'filehaus' -- "urls.txt has 10 filehaus.." will be displayed)
|
||||
#! HostFuncPrefix: <aUniqueStringThatMustPrefixHostFunctions> (ie. 'fh' -- fh_DownloadFile(), fh_FetchFileInfo() .. )
|
||||
#! * Note: Must begin with a letter a-z (functions beginning with numbers are no bueno)
|
||||
#! HostDomainRegex: The regex used to verify matching urls
|
||||
HostCode='afio'
|
||||
HostNick='anonfile.io'
|
||||
HostFuncPrefix='afio'
|
||||
HostUrls='anonfile.io'
|
||||
HostDomainRegex='^(http|https)://anonfile\.io/(f/|api/download/)'
|
||||
#!
|
||||
#! !! DO NOT UPDATE OR REMOVE !!
|
||||
#! This merges the Required HostAndDomainRegexes into mad.sh
|
||||
ListHostAndDomainRegexes=${ListHostAndDomainRegexes}'/'${HostCode}'/'${HostNick}'/'${HostFuncPrefix}'/'${HostUrls}':'${HostDomainRegex}'@'
|
||||
#!
|
||||
#!
|
||||
#! ------------ (1) Host Main Download Function --------------- #
|
||||
#!
|
||||
#! @REQUIRED: Host Main Download function
|
||||
#! Must be named specifically as such:
|
||||
#! <HostFuncPrefix>_DownloadFile()
|
||||
afio_DownloadFile() {
|
||||
local pUrl="$1"
|
||||
local pFileCnt="$2"
|
||||
local pUrlMod="$pUrl"
|
||||
local filecode="${pUrlMod##*/}"
|
||||
if ! grep -Eqi '^https://anonfile.io/api/download/' <<< "${pUrlMod}" ; then
|
||||
Modify to api/download
|
||||
pUrlMod="https://anonfile.io/api/download/$filecode"
|
||||
echo -e "[${BLUE}ModifiedUrl${NC}]: ${pUrlMod}"
|
||||
fi
|
||||
direct_DownloadFile "$pUrl" "$pFileCnt" "$pUrlMod"
|
||||
}
|
||||
|
|
@ -1,6 +1,6 @@
|
|||
#! Name: gofile.sh
|
||||
#! Author: kittykat
|
||||
#! Version: 2025.02.17
|
||||
#! Version: 2025.03.28
|
||||
#! Desc: Add support for downloading and processing of urls for a new host
|
||||
#! Usage: Copy this file into the ./${ScriptDir}/hosts/ folder
|
||||
#!
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
#! Name: ranoz.sh
|
||||
#! Author: kittykat
|
||||
#! Version: 2025.02.13
|
||||
#! Version: 2025.02.30
|
||||
#! Desc: Add support for downloading and processing of urls for a new host
|
||||
#! Usage: Copy this file into the ./${ScriptDir}/hosts/ folder
|
||||
#!
|
||||
|
|
@ -78,6 +78,8 @@ rz_DownloadFile() {
|
|||
rz_FetchFileInfo() {
|
||||
finalAttempt=$1
|
||||
maxfetchretries=5
|
||||
mkdir -p "${WorkDir}/.temp"
|
||||
rz_cookie_jar=$(mktemp "${WorkDir}/.temp/rz_cookies""${instance_no}"".XXXXXX")
|
||||
download_url="$remote_url"
|
||||
if grep -Eqi 'ranoz.gg/file/' <<< "$remote_url"; then
|
||||
echo -e "${GREEN}# Fetching download url…${NC}"
|
||||
|
|
@ -86,13 +88,16 @@ rz_FetchFileInfo() {
|
|||
printf " ."
|
||||
tor_identity="${RANDOM}"
|
||||
CLEANSTRING=${remote_url//[^a-zA-Z0-9]/}
|
||||
trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; echo ""; tput cnorm; exit" 0 1 2 3 6 15
|
||||
response=$(tor_curl_request --insecure -L -s "$remote_url")
|
||||
trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; rm -f ${rz_cookie_jar}; echo ""; tput cnorm; exit" 0 1 2 3 6 15
|
||||
response=$(tor_curl_request --insecure -L -i -s \
|
||||
-b "${rz_cookie_jar}" -c "${rz_cookie_jar}" \
|
||||
"$remote_url")
|
||||
if [[ "${DebugAllEnabled}" == "true" ]] ; then
|
||||
debugHtml "${remote_url##*/}" "rz_fetch$i" "${response}"
|
||||
fi
|
||||
if [[ -z $response ]] ; then
|
||||
if [[ $i == $maxfetchretries ]] ; then
|
||||
rm -f "${rz_cookie_jar}";
|
||||
printf "\\n"
|
||||
echo -e "${RED}| Failed to extract download url [1]${NC}"
|
||||
warnAndRetryUnknownError=true
|
||||
|
|
@ -105,6 +110,7 @@ rz_FetchFileInfo() {
|
|||
fi
|
||||
fi
|
||||
if grep -Eqi "There is no such file|UNAVAILABLE_FOR_LEGAL_REASONS|File was deleted because" <<< "$response"; then
|
||||
rm -f "${rz_cookie_jar}";
|
||||
printf "\\n"
|
||||
echo -e "${RED}| The file was not found. It could be deleted or expired.${NC}"
|
||||
exitDownloadError=true
|
||||
|
|
@ -112,6 +118,7 @@ rz_FetchFileInfo() {
|
|||
return 1
|
||||
fi
|
||||
if grep -Eqi 'NEXT_NOT_FOUND' <<< "$response"; then
|
||||
rm -f "${rz_cookie_jar}";
|
||||
printf "\\n"
|
||||
echo -e "${RED}| The file appears to be gone (NEXT_NOT_FOUND)${NC}"
|
||||
exitDownloadError=true
|
||||
|
|
@ -138,6 +145,7 @@ rz_FetchFileInfo() {
|
|||
break
|
||||
else
|
||||
if [[ $i == $maxfetchretries ]] ; then
|
||||
rm -f "${rz_cookie_jar}";
|
||||
printf "\\n"
|
||||
echo -e "${RED}| Failed to extract download url [2]${NC}"
|
||||
warnAndRetryUnknownError=true
|
||||
|
|
@ -157,7 +165,9 @@ rz_FetchFileInfo() {
|
|||
printf " ."
|
||||
CLEANSTRING=${remote_url//[^a-zA-Z0-9]/}
|
||||
trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; rm -f ${rz_cookie_jar}; echo ""; tput cnorm; exit" 0 1 2 3 6 15
|
||||
file_header=$(tor_curl_request --insecure --head -L -i -s "$download_url")
|
||||
file_header=$(tor_curl_request --insecure --head -L -i -s \
|
||||
-b "${rz_cookie_jar}" -c "${rz_cookie_jar}" \
|
||||
"$download_url")
|
||||
if [[ "${DebugAllEnabled}" == "true" ]] ; then
|
||||
debugHtml "${remote_url##*/}" "rz_head$j" "download_url: ${download_url}"$'\n'"${file_header}"
|
||||
fi
|
||||
|
|
@ -217,6 +227,7 @@ rz_FetchFileInfo() {
|
|||
fi
|
||||
break #Good to go here
|
||||
done
|
||||
rm -f "${rz_cookie_jar}";
|
||||
touch "${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}"
|
||||
if [[ ! "$filename_override" == "" ]] ; then
|
||||
filename="$filename_override"
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
#! Name: torup.sh
|
||||
#! Author: kittykat
|
||||
#! Version: 2025.03.05
|
||||
#! Version: 2025.03.16
|
||||
#! Desc: Add support for downloading and processing of urls for a new host
|
||||
#! Usage: Copy this file into the ./${ScriptDir}/hosts/ folder
|
||||
#!
|
||||
|
|
@ -90,7 +90,7 @@ torp_FetchFileInfo() {
|
|||
CLEANSTRING=${remote_url//[^a-zA-Z0-9]/}
|
||||
trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; rm -f "${torp_cookie_jar}"; echo ""; tput cnorm; exit" 0 1 2 3 6 15
|
||||
response=$(tor_curl_request --insecure -L -s \
|
||||
-c "${fdot_cookie_jar}" \
|
||||
-b "${torp_cookie_jar}" -c "${torp_cookie_jar}" \
|
||||
"$fixed_url")
|
||||
if [[ "${DebugAllEnabled}" == "true" ]] ; then
|
||||
debugHtml "${remote_url##*/}" "torp_fetch$i" "${response}"
|
||||
|
|
@ -125,6 +125,7 @@ torp_FetchFileInfo() {
|
|||
if grep -Eqi '/file" class\="btn btn-primary btn-lg">Download Now</a>' <<< "$response"; then
|
||||
printf "\\n"
|
||||
echo -e "${GREEN}| Download url found${NC}"
|
||||
download_url=$(grep -oPi '(?<=\\"props\\":\{\}\},\\"href\\":\\").*?(?=\\"}.*$)' <<< "$response")
|
||||
download_url="${fixed_url}/file"
|
||||
break
|
||||
else
|
||||
|
|
@ -193,11 +194,13 @@ torp_GetFile() {
|
|||
tor_curl_request --insecure -L -G --no-alpn \
|
||||
--speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval \
|
||||
-b "${torp_cookie_jar}" -c "${torp_cookie_jar}" \
|
||||
-H "Host: ktgzpea2b76u7fgemiibp4a76onyybo4fw5gbsagtm6jrjzmgivppyyd.onion" \
|
||||
-H "Referer: $fixed_url" \
|
||||
"$download_url" --output "$file_path"
|
||||
else
|
||||
tor_curl_request --insecure -L -G --no-alpn \
|
||||
-b "${torp_cookie_jar}" -c "${torp_cookie_jar}" \
|
||||
-H "Host: ktgzpea2b76u7fgemiibp4a76onyybo4fw5gbsagtm6jrjzmgivppyyd.onion" \
|
||||
-H "Referer: $fixed_url" \
|
||||
"$download_url" --output "$file_path"
|
||||
fi
|
||||
|
|
@ -206,6 +209,7 @@ torp_GetFile() {
|
|||
tor_curl_request --insecure -L -G --no-alpn \
|
||||
--speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval \
|
||||
-H "User-Agent: $RandomUA" \
|
||||
-H "Host: ktgzpea2b76u7fgemiibp4a76onyybo4fw5gbsagtm6jrjzmgivppyyd.onion" \
|
||||
-H "Referer: $fixed_url" \
|
||||
-H "Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8" \
|
||||
-H "Accept-Language: en-US,en;q=0.5" \
|
||||
|
|
@ -221,6 +225,7 @@ torp_GetFile() {
|
|||
else
|
||||
tor_curl_request --insecure -L -G --no-alpn \
|
||||
-H "User-Agent: $RandomUA" \
|
||||
-H "Host: ktgzpea2b76u7fgemiibp4a76onyybo4fw5gbsagtm6jrjzmgivppyyd.onion" \
|
||||
-H "Referer: $fixed_url" \
|
||||
-H "Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8" \
|
||||
-H "Accept-Language: en-US,en;q=0.5" \
|
||||
|
|
|
|||
134
hosts/up_anonfileio.sh
Normal file
134
hosts/up_anonfileio.sh
Normal file
|
|
@ -0,0 +1,134 @@
|
|||
#! Name: up_anonfileio.sh
|
||||
#! Author: kittykat
|
||||
#! Version: 2025.03.28
|
||||
#! Desc: Add support for uploading files to anonfile.de
|
||||
#! Info: https://anonfile.io/f/<filehash>
|
||||
#! MaxSize: 5GB
|
||||
#! Expire: ??
|
||||
#! Usage: Copy this file into the ./${ScriptDir}/hosts/ folder
|
||||
#!
|
||||
#!
|
||||
#! ------------ REQUIRED SECTION ---------------
|
||||
#! @[UPDATE] ListUploadHosts: This string is loaded into mad.sh and allows dynamic handling of new url data
|
||||
#! Format: '/HostCode/HostNick/HostFuncPrefix@'
|
||||
#! HostCode: <aUniqueCodeForHost> (ie. 'fh' for filehaus -- cannot be used by other hosts)
|
||||
#! HostNick: What is displayed throughout MAD output
|
||||
#! HostFuncPrefix: <aUniqueStringThatMustPrefixHostFunctions> ie. 'fh' -- fh_UploadFile()
|
||||
#! * Note: Must begin with a letter a-z (functions beginning with numbers are no bueno)
|
||||
HostCode='afio'
|
||||
HostNick='anonfile.io'
|
||||
HostFuncPrefix='afio'
|
||||
#!
|
||||
#! !! DO NOT UPDATE OR REMOVE !!
|
||||
#! This merges the Required HostAndDomainRegexes into mad.sh
|
||||
ListUploadHosts=${ListUploadHosts}'/'${HostCode}'/'${HostNick}'/'${HostFuncPrefix}'@'
|
||||
#!
|
||||
#!
|
||||
#! Configurables
|
||||
#! -------------
|
||||
#!
|
||||
#! ------------ (1) Host Main Upload Function --------------- #
|
||||
#!
|
||||
#! @REQUIRED: Host Main Upload function
|
||||
#! Must be named specifically as such:
|
||||
#! <HostFuncPrefix>_UploadFile()
|
||||
afio_UploadFile() {
|
||||
local _hostCode=${1}
|
||||
local filepath=${2}
|
||||
local filecnt=${3}
|
||||
local pline=${4}
|
||||
local filename="${filepath##*/}"
|
||||
warnAndRetryUnknownError=false
|
||||
exitUploadError=false
|
||||
exitUploadNotAvailable=false
|
||||
fileAlreadyDone=false
|
||||
tor_identity="${RANDOM}"
|
||||
UploadTicket="${WorkDir}/.flocks/upload_${_hostCode}_${filepath//[^a-zA-Z0-9]/}"
|
||||
MaxUploadSizeInBytes=5368709120
|
||||
fsize=$(GetFileSize "$filepath" "false")
|
||||
if ((fsize > MaxUploadSizeInBytes)); then
|
||||
rm -f "${UploadTicket}"
|
||||
echo -e "${YELLOW}| SKIP${NC}: The size of $filename is to large for $_hostCode. ($fsize > $MaxUploadSizeInBytes)"
|
||||
failedUpload "$pline" "${filepath}" "${_hostCode}" "Skipping upload. The size of $filename is to large for $_hostCode. ($fsize > $MaxUploadSizeInBytes)"
|
||||
return 1
|
||||
fi
|
||||
finalAttempt="false"
|
||||
for ((z=0; z<=$MaxUploadRetries; z++)); do
|
||||
if [[ $z -eq $MaxUploadRetries ]] ; then
|
||||
finalAttempt="true"
|
||||
fi
|
||||
trap "rm -f "${UploadTicket}"; echo ""; tput cnorm; exit" 0 1 2 3 6 15
|
||||
if afio_PostFile "${filepath}" "${_hostCode}" "${filename}" "${filecnt}" $((z+1)) $finalAttempt $pline ; then
|
||||
return 0
|
||||
elif [[ $z -lt $MaxUploadRetries ]]; then
|
||||
if [[ "${fileAlreadyDone}" == "true" ]] ; then
|
||||
break
|
||||
fi
|
||||
if [[ "${warnAndRetryUnknownError}" == "true" ]] ; then
|
||||
if [[ "${DebugAllEnabled}" == "true" ]] ; then
|
||||
debugHtml "${filepath##*/}" "error" "Retry due to an unknown issue: attempt #$((z+1)) of ${MaxUploadRetries}"
|
||||
fi
|
||||
fi
|
||||
if [[ "${exitUploadError}" == "true" || "${exitUploadNotAvailable}" == "true" ]] ; then
|
||||
if [[ "${DebugAllEnabled}" == "true" ]] ; then
|
||||
debugHtml "${filepath##*/}" "error" "Exit due to unrecoverable issue"
|
||||
fi
|
||||
rm -f "${UploadTicket}"
|
||||
break
|
||||
fi
|
||||
echo -e "\n${YELLOW}A recoverable error occurred, retry attempt $((z+1))/${MaxUploadRetries}${NC}"
|
||||
sleep 3
|
||||
fi
|
||||
done
|
||||
rm -f "${UploadTicket}"
|
||||
}
|
||||
#!
|
||||
#! ----------- (2) Post File / Upload File Function --------------- #
|
||||
#!
|
||||
afio_PostFile() {
|
||||
local filepath=$1
|
||||
local _hostCode=$2
|
||||
local filename=$3
|
||||
local fileCnt=$4
|
||||
local retryCnt=$5
|
||||
local finalAttempt=$6
|
||||
local pline=${7}
|
||||
UploadTicket="${WorkDir}/.flocks/upload_${_hostCode}_${filepath//[^a-zA-Z0-9]/}"
|
||||
echo -e "[${YELLOW}${_hostCode}${NC}] Uploading ${GREEN}${filename}${NC}"
|
||||
tor_identity="${RANDOM}"
|
||||
PostUrlHost='https://anonfile.io/api/upload'
|
||||
arrFiles=("$filepath")
|
||||
trap "rm -f ${UploadTicket}; echo ""; tput cnorm; exit" 0 1 2 3 6 15
|
||||
response=$(tor_curl_upload --insecure -i \
|
||||
-H "Content-Type: multipart/form-data" \
|
||||
-F "keepalive=1" \
|
||||
-F "file=@${filepath}" \
|
||||
"${PostUrlHost}")
|
||||
if [[ "${DebugAllEnabled}" == "true" ]] ; then
|
||||
debugHtml "${filepath##*/}" "${_hostCode}_upload" "post_url: ${PostUrlHost}"$'\n'"${response}"
|
||||
fi
|
||||
if grep -Eqi '"success":true,"code":"' <<< "${response}" ; then
|
||||
hash=$(grep -oPi -m 1 '(?<="code":").*?(?=".*$)' <<< "$response")
|
||||
filesize=$(GetFileSize "$filepath" "false")
|
||||
downloadLink="https://anonfile.io/f/$hash"
|
||||
echo -e "${GREEN}| Upload Success${NC}"
|
||||
echo -e "| Size: ${BLUE}${filesize}${NC} bytes${NC}"
|
||||
echo -e "| Link: ${YELLOW}${downloadLink}${NC}"
|
||||
successUpload "$pline" "${filepath}" "${_hostCode}" "${filesize}" "${downloadLink}" "{$response}"
|
||||
return 0
|
||||
else
|
||||
err=$(grep -oPi '(?<=HTTP/).*?(?=$)' <<< "$response")
|
||||
if [[ "${finalAttempt}" == "true" ]] ; then
|
||||
printf "\\n"
|
||||
echo -e "${RED}| Upload failed. Status: ${err}${NC}"
|
||||
failedRetryUpload "$pline" "${filepath}" "${_hostCode}" "Failed to upload file" "Status: $err"
|
||||
exitUploadError=true
|
||||
return 1
|
||||
else
|
||||
return 1
|
||||
fi
|
||||
fi
|
||||
}
|
||||
#!
|
||||
#! --------------- Host Extra Functions ------------------- #
|
||||
#!
|
||||
|
|
@ -1,9 +1,9 @@
|
|||
#! Name: up_eddowding.sh
|
||||
#! Author: kittykat
|
||||
#! Version: 2024.12.12
|
||||
#! Version: 2025.03.20
|
||||
#! Desc: Add support for uploading files to files.eddowding.com
|
||||
#! Info: Files are accessible at https://address/f.php?h=<file_code>&p=1
|
||||
#! MaxSize: 5GB
|
||||
#! MaxSize: 1GB
|
||||
#! Expire: 1 Month
|
||||
#! Usage: Copy this file into the ./${ScriptDir}/hosts/ folder
|
||||
#!
|
||||
|
|
@ -33,7 +33,7 @@ ListUploadHosts=${ListUploadHosts}'/'${HostCode}'/'${HostNick}'/'${HostFuncPrefi
|
|||
#! Must be named specifically as such:
|
||||
#! <HostFuncPrefix>_UploadFile()
|
||||
edd_UploadFile() {
|
||||
jira_MaxUploadSizeInBytes=5368709120
|
||||
jira_MaxUploadSizeInBytes=1073741824
|
||||
jira_PostUrlHost='https://files.eddowding.com/script.php'
|
||||
jira_filetype=1
|
||||
jira_timeval="month"
|
||||
|
|
|
|||
|
|
@ -1,10 +1,10 @@
|
|||
#! Name: up_nantes.sh
|
||||
#! Author: kittykat
|
||||
#! Version: 2025.01.18
|
||||
#! Version: 2025.03.20
|
||||
#! Desc: Add support for uploading files to fichiers.nantes.cloud
|
||||
#! Info: Files are accessible at https://address/f.php?h=<file_code>&p=1
|
||||
#! MaxSize: 10GB
|
||||
#! Expire: 1 Month
|
||||
#! Expire: 1 Week
|
||||
#! Usage: Copy this file into the ./${ScriptDir}/hosts/ folder
|
||||
#!
|
||||
#!
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
#! Name: up_ranoz.sh
|
||||
#! Author: kittykat
|
||||
#! Version: 2025.02.24
|
||||
#! Version: 2025.03.30
|
||||
#! Desc: Add support for uploading files to bedrive.ru
|
||||
#! Info: Files are accessible at https://ranoz.gg/file/<file_code>
|
||||
#! MaxSize: 20GB
|
||||
|
|
@ -96,7 +96,11 @@ rz_PostFile() {
|
|||
UploadTicket="${WorkDir}/.flocks/upload_${_hostCode}_${filepath//[^a-zA-Z0-9]/}"
|
||||
echo -e "[${YELLOW}${_hostCode}${NC}] Uploading ${GREEN}${filename}${NC}"
|
||||
tor_identity="${RANDOM}"
|
||||
mkdir -p "${WorkDir}/.temp"
|
||||
up_rz_cookie_jar=$(mktemp "${WorkDir}/.temp/up_rz_cookies""${instance_no}"".XXXXXX")
|
||||
PrePostUrl='https://finer-yeti-69.clerk.accounts.dev/v1/client/handshake?redirect_url=https%3A%2F%2Franoz.gg%2Fapi%2Fv1%2Ffiles%2Fupload_url&suffixed_cookies=false&__clerk_hs_reason=dev-browser-missing'
|
||||
PostUrlHost='https://ranoz.gg/api/v1/files/upload_url'
|
||||
RanozRandomizeExt="false"
|
||||
if [[ "$RanozRandomizeExt" == "true" ]] && [[ ! $filename == *.rar ]]; then
|
||||
randomext=$(GetSemiRandomExt)
|
||||
echo -e "${BLUE}MAD Randomized Extension: $randomext${NC}"
|
||||
|
|
@ -122,12 +126,20 @@ rz_PostFile() {
|
|||
fi
|
||||
tmpfilename="${tmpfilepath##*/}"
|
||||
else
|
||||
tmpfilename="${tmpfilepath##*/}"
|
||||
tmpfilename="${filepath##*/}"
|
||||
fi
|
||||
local fsize=$(stat -c%s "$filepath")
|
||||
trap "rm -f ${UploadTicket}; echo ""; tput cnorm; exit" 0 1 2 3 6 15
|
||||
trap "rm -f ${UploadTicket}; rm -f ${up_rz_cookie_jar}; echo ""; tput cnorm; exit" 0 1 2 3 6 15
|
||||
response=$(tor_curl_upload --insecure -L -i -s \
|
||||
-b "${up_rz_cookie_jar}" -c "${up_rz_cookie_jar}" \
|
||||
"$PrePostUrl")
|
||||
if [[ "${DebugAllEnabled}" == "true" ]] ; then
|
||||
debugHtml "${filepath##*/}" "${_hostCode}_prepost" "prepost_url: ${PrePostUrl}"$'\n'"${response}"
|
||||
fi
|
||||
trap "rm -f ${UploadTicket}; rm -f ${up_rz_cookie_jar}; echo ""; tput cnorm; exit" 0 1 2 3 6 15
|
||||
response=$(tor_curl_upload --insecure -Lis \
|
||||
"$PostUrlHost" \
|
||||
-b "${up_rz_cookie_jar}" -c "${up_rz_cookie_jar}" \
|
||||
-H "Content-Type: application/json" \
|
||||
-d "{ \
|
||||
\"filename\": \"$tmpfilename\", \
|
||||
|
|
@ -140,6 +152,7 @@ rz_PostFile() {
|
|||
downloadLink=$(grep -oPi '(?<=,"url":").*?(?=".*$)' <<< "$response")
|
||||
echo -e "${GREEN}| Upload url obtained...${NC}"
|
||||
else
|
||||
rm -f ${up_rz_cookie_jar};
|
||||
err=$(grep -oPi '(?<="errors":\[\{"message":").*?(?=".*$)' <<< "$response")
|
||||
if [[ "${finalAttempt}" == "true" ]] ; then
|
||||
printf "\\n"
|
||||
|
|
@ -152,14 +165,16 @@ rz_PostFile() {
|
|||
fi
|
||||
fi
|
||||
arrFiles=("$filepath")
|
||||
trap "rm -f ${UploadTicket}; echo ""; tput cnorm; exit" 0 1 2 3 6 15
|
||||
trap "rm -f ${UploadTicket}; echo ""; rm -f ${up_rz_cookie_jar}; tput cnorm; exit" 0 1 2 3 6 15
|
||||
response=$(tor_curl_upload --insecure -i -X PUT \
|
||||
"${PostUrlHost}" \
|
||||
--upload-file "$filepath" \
|
||||
-b "${up_rz_cookie_jar}" -c "${up_rz_cookie_jar}" \
|
||||
-H "Content-Length: $fsize")
|
||||
if [[ "${DebugAllEnabled}" == "true" ]] ; then
|
||||
debugHtml "${filepath##*/}" "${_hostCode}_upload" "post_url: ${PostUrlHost}"$'\n'"${response}"
|
||||
fi
|
||||
rm -f ${up_rz_cookie_jar};
|
||||
if grep -Eqi 'HTTP/.* 200' <<< "${response}" ; then
|
||||
filesize=$(GetFileSize "$filepath" "false")
|
||||
echo -e "${GREEN}| Upload Success${NC}"
|
||||
|
|
|
|||
|
|
@ -1,10 +1,10 @@
|
|||
#! Name: up_skrepr.sh
|
||||
#! Author: kittykat
|
||||
#! Version: 2024.11.14
|
||||
#! Version: 2025.03.20
|
||||
#! Desc: Add support for uploading files to transfer.skrepr.com
|
||||
#! Info: Files are accessible at https://address/f.php?h=<file_code>&p=1
|
||||
#! MaxSize: ??
|
||||
#! Expire: 1 Week
|
||||
#! Expire: 1 Month
|
||||
#! Usage: Copy this file into the ./${ScriptDir}/hosts/ folder
|
||||
#!
|
||||
#!
|
||||
|
|
@ -36,7 +36,7 @@ skpr_UploadFile() {
|
|||
jira_MaxUploadSizeInBytes=104857600
|
||||
jira_PostUrlHost='https://transfer.skrepr.com/script.php'
|
||||
jira_filetype=1
|
||||
jira_timeval="week"
|
||||
jira_timeval="month"
|
||||
jira_downloadLinkPrefix='https://transfer.skrepr.com/f.php?h='
|
||||
jira_UploadFile ${1} ${2} ${3} ${4}
|
||||
}
|
||||
|
|
|
|||
399
hosts/uploadscloud.sh
Normal file
399
hosts/uploadscloud.sh
Normal file
|
|
@ -0,0 +1,399 @@
|
|||
#! Name: uploadscloud.sh
|
||||
#! Author: kittykat
|
||||
#! Version: 2025.03.10
|
||||
#! Desc: Add support for downloading and processing of urls for a new host
|
||||
#! Usage: Copy this file into the ./${ScriptDir}/hosts/ folder
|
||||
#!
|
||||
#!
|
||||
#! ------------ REQUIRED SECTION ---------------
|
||||
#! @[UPDATE] HostAndDomainRegexes: This string is loaded into mad.sh and allows dynamic handling of new url data
|
||||
#! Format: '/HostCode/HostNick/HostFuncPrefix:HostDomainRegex@'
|
||||
#! HostCode: <aUniqueCodeForHost> (ie. 'fh' for filehaus -- cannot be used by other hosts)
|
||||
#! HostNick: What is displayed throughout MAD output (ie. 'filehaus' -- "urls.txt has 10 filehaus.." will be displayed)
|
||||
#! HostFuncPrefix: <aUniqueStringThatMustPrefixHostFunctions> (ie. 'fh' -- fh_DownloadFile(), fh_FetchFileInfo() .. )
|
||||
#! * Note: Must begin with a letter a-z (functions beginning with numbers are no bueno)
|
||||
#! HostDomainRegex: The regex used to verify matching urls
|
||||
HostCode='upsc'
|
||||
HostNick='uploadscloud'
|
||||
HostFuncPrefix='upsc'
|
||||
HostUrls='uploadscloud.com'
|
||||
HostDomainRegex='^(http|https)://(.*\.)?uploadscloud\.com'
|
||||
#!
|
||||
#! !! DO NOT UPDATE OR REMOVE !!
|
||||
#! This merges the Required HostAndDomainRegexes into mad.sh
|
||||
ListHostAndDomainRegexes=${ListHostAndDomainRegexes}'/'${HostCode}'/'${HostNick}'/'${HostFuncPrefix}'/'${HostUrls}':'${HostDomainRegex}'@'
|
||||
#!
|
||||
#!
|
||||
#! ------------ (1) Host Main Download Function --------------- #
|
||||
#!
|
||||
#! @REQUIRED: Host Main Download function
|
||||
#! Must be named specifically as such:
|
||||
#! <HostFuncPrefix>_DownloadFile()
|
||||
upsc_DownloadFile() {
|
||||
local remote_url=${1}
|
||||
local file_url=${1}
|
||||
local filecnt=${2}
|
||||
warnAndRetryUnknownError=false
|
||||
exitDownloadError=false
|
||||
exitDownloadNotAvailable=false
|
||||
fileAlreadyDone=false
|
||||
download_inflight_path="${WorkDir}/.inflight/"
|
||||
mkdir -p "$download_inflight_path"
|
||||
completed_location="${WorkDir}/downloads/"
|
||||
tor_identity="${RANDOM}"
|
||||
finalAttempt="false"
|
||||
for ((z=0; z<=$MaxUrlRetries; z++)); do
|
||||
if [[ $z -eq $MaxUrlRetries ]] ; then
|
||||
finalAttempt="true"
|
||||
fi
|
||||
CLEANSTRING=${remote_url//[^a-zA-Z0-9]/}
|
||||
trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; echo ""; tput cnorm; exit" 0 1 2 3 6 15
|
||||
if upsc_FetchFileInfo $finalAttempt && upsc_GetFile "${filecnt}" $((z+1)) $finalAttempt ; then
|
||||
return 0
|
||||
elif [[ $z -lt $MaxUrlRetries ]]; then
|
||||
if [[ "${fileAlreadyDone}" == "true" ]] ; then
|
||||
break
|
||||
fi
|
||||
if [[ "${warnAndRetryUnknownError}" == "true" ]] ; then
|
||||
if [[ "${DebugAllEnabled}" == "true" ]] ; then
|
||||
debugHtml "${remote_url##*/}" "error" "Retry due to an unknown issue: attempt #$((z+1)) of ${MaxUrlRetries}"
|
||||
fi
|
||||
fi
|
||||
if [[ "${exitDownloadError}" == "true" || "${exitDownloadNotAvailable}" == "true" ]] ; then
|
||||
if [[ "${DebugAllEnabled}" == "true" ]] ; then
|
||||
debugHtml "${remote_url##*/}" "error" "Exit due to unrecoverable issue"
|
||||
fi
|
||||
rm -f "${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}"
|
||||
break
|
||||
fi
|
||||
echo -e "\n${YELLOW}A recoverable error occurred, retry attempt $((z+1))/${MaxUrlRetries}${NC}"
|
||||
sleep 3
|
||||
fi
|
||||
done
|
||||
rm -f "${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}"
|
||||
}
|
||||
#!
|
||||
#! ------------- (2) Fetch File Info Function ----------------- #
|
||||
#!
|
||||
upsc_FetchFileInfo() {
|
||||
finalAttempt=$1
|
||||
maxfetchretries=5
|
||||
upsc_cookie_jar=""
|
||||
echo -e "${GREEN}# Fetching download link…${NC}"
|
||||
for ((i=1; i<=$maxfetchretries; i++)); do
|
||||
mkdir -p "${WorkDir}/.temp"
|
||||
upsc_cookie_jar=$(mktemp "${WorkDir}/.temp/upsc_cookies""${instance_no}"".XXXXXX")
|
||||
printf " ."
|
||||
tor_identity="${RANDOM}"
|
||||
CLEANSTRING=${remote_url//[^a-zA-Z0-9]/}
|
||||
trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; rm -f "${upsc_cookie_jar}"; echo ""; tput cnorm; exit" 0 1 2 3 6 15
|
||||
response=$(tor_curl_request --insecure -L -s -b "${upsc_cookie_jar}" -c "${upsc_cookie_jar}" "$remote_url")
|
||||
if [[ "${DebugAllEnabled}" == "true" ]] ; then
|
||||
debugHtml "${remote_url##*/}" "upsc_dwnpage$i" "${response}"
|
||||
fi
|
||||
if [[ -z $response ]] ; then
|
||||
rm -f "${upsc_cookie_jar}";
|
||||
if [[ $i == $maxfetchretries ]] ; then
|
||||
printf "\\n"
|
||||
echo -e "${RED}| Failed to extract download link.${NC}"
|
||||
warnAndRetryUnknownError=true
|
||||
if [[ "${finalAttempt}" == "true" ]] ; then
|
||||
failedRetryDownload "${remote_url}" "" ""
|
||||
fi
|
||||
return 1
|
||||
else
|
||||
continue
|
||||
fi
|
||||
fi
|
||||
if grep -Eqi "There is no such file|File was deleted because|FILE NOT FOUND OR DELETED" <<< "$response"; then
|
||||
rm -f "${upsc_cookie_jar}";
|
||||
printf "\\n"
|
||||
echo -e "${RED}| The file was not found. It could be deleted or expired.${NC}"
|
||||
exitDownloadError=true
|
||||
removedDownload "${remote_url}"
|
||||
return 1
|
||||
fi
|
||||
if grep -Eqi 'input type="hidden" name="id" value="' <<< "$response"; then
|
||||
printf "\\n"
|
||||
echo -e "${GREEN}| Post link found.${NC}"
|
||||
post_op=$(grep -oPi -m 1 '(?<=input type="hidden" name="op" value=").*(?=">.*$)' <<< "$response")
|
||||
post_id=$(grep -oPi -m 1 '(?<=input type="hidden" name="id" value=").*(?=">.*$)' <<< "$response")
|
||||
fi
|
||||
if [[ -z "$post_op" ]] || [[ -z "$post_id" ]] ; then
|
||||
rm -f "${upsc_cookie_jar}";
|
||||
if [[ $i == $maxfetchretries ]] ; then
|
||||
printf "\\n"
|
||||
echo -e "${RED}| Failed to extract download link.${NC}"
|
||||
warnAndRetryUnknownError=true
|
||||
if [[ "${finalAttempt}" == "true" ]] ; then
|
||||
failedRetryDownload "${remote_url}" "" ""
|
||||
fi
|
||||
return 1
|
||||
else
|
||||
continue
|
||||
fi
|
||||
else
|
||||
break
|
||||
fi
|
||||
done
|
||||
CLEANSTRING=${remote_url//[^a-zA-Z0-9]/}
|
||||
trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; echo ""; rm -f $upsc_cookie_jar; tput cnorm; exit" 0 1 2 3 6 15
|
||||
form_data="op=$post_op&id=$post_id&rand=&referer=&method_free=&method_premium="
|
||||
response=$(tor_curl_request --insecure -svo. -X POST \
|
||||
-b "${upsc_cookie_jar}" -c "${upsc_cookie_jar}" \
|
||||
--data-raw "$form_data" "$remote_url" 2>&1)
|
||||
if [[ "${DebugAllEnabled}" == "true" ]] ; then
|
||||
debugHtml "${remote_url##*/}" "upsc_post" "post_action: ${post_action}"$'\n'"form_data: ${form_data}"$'\n'"${response}"
|
||||
fi
|
||||
if [[ -z $response ]] ; then
|
||||
echo -e "${RED}| Failed to extract download link [1]${NC}"
|
||||
warnAndRetryUnknownError=true
|
||||
if [[ "${finalAttempt}" == "true" ]] ; then
|
||||
rm -f "${upsc_cookie_jar}";
|
||||
failedRetryDownload "${remote_url}" "Failed to extract download link [1]" ""
|
||||
fi
|
||||
return 1
|
||||
fi
|
||||
if grep -Eqi "File Not Found" <<< "$response"; then
|
||||
echo -e "${RED}| The file was not found. It could be deleted or expired.${NC}"
|
||||
rm -f "${upsc_cookie_jar}";
|
||||
exitDownloadError=true
|
||||
removedDownload "${remote_url}"
|
||||
return 1
|
||||
fi
|
||||
if grep -Eqi 'Location: http' <<< "$response"; then
|
||||
download_url=$(grep -oPi '(?<=Location: ).*$' <<< "$response")
|
||||
download_url=${download_url//[$'\t\r\n']}
|
||||
filename=${download_url##*/}
|
||||
fi
|
||||
if [[ -z "$download_url" ]]; then
|
||||
echo -e "${RED}| Failed to extract download link [2]${NC}"
|
||||
warnAndRetryUnknownError=true
|
||||
if [[ "${finalAttempt}" == "true" ]] ; then
|
||||
rm -f "${upsc_cookie_jar}";
|
||||
failedRetryDownload "${remote_url}" "Failed to extract download link [2]" ""
|
||||
fi
|
||||
return 1
|
||||
fi
|
||||
echo -e "${GREEN}# Fetching file info…${NC}"
|
||||
for ((j=1; j<=$maxfetchretries; j++)); do
|
||||
printf " ."
|
||||
CLEANSTRING=${remote_url//[^a-zA-Z0-9]/}
|
||||
trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; rm -f ${upsc_cookie_jar}; echo ""; tput cnorm; exit" 0 1 2 3 6 15
|
||||
file_header=$(tor_curl_request --insecure --head -L -s -b "${upsc_cookie_jar}" -c "${upsc_cookie_jar}" --referer "$remote_url" "$download_url")
|
||||
if [[ "${DebugAllEnabled}" == "true" ]] ; then
|
||||
debugHtml "${remote_url##*/}" "upsc_head$j" "download_url: ${download_url}"$'\n'"${file_header}"
|
||||
fi
|
||||
if [[ -z $file_header ]] ; then
|
||||
if [[ $j == $maxfetchretries ]] ; then
|
||||
rm -f "${upsc_cookie_jar}";
|
||||
printf "\\n"
|
||||
echo -e "${RED}| Failed to extract file info.${NC}"
|
||||
warnAndRetryUnknownError=true
|
||||
if [[ "${finalAttempt}" == "true" ]] ; then
|
||||
failedRetryDownload "${remote_url}" "" ""
|
||||
fi
|
||||
return 1
|
||||
else
|
||||
continue
|
||||
fi
|
||||
fi
|
||||
if grep -Eqi '404 Not Found' <<< "$file_header"; then
|
||||
rm -f "${upsc_cookie_jar}";
|
||||
printf "\\n"
|
||||
echo -e "${RED}| Failed to extract file info.${NC}"
|
||||
warnAndRetryUnknownError=true
|
||||
if [[ "${finalAttempt}" == "true" ]] ; then
|
||||
failedRetryDownload "${remote_url}" "" ""
|
||||
fi
|
||||
return 1
|
||||
fi
|
||||
if ! grep -Eqi 'HTTP/2 200|HTTP/1.1 200|200 OK' <<< $file_header ; then
|
||||
if [[ $j == $maxfetchretries ]] ; then
|
||||
rm -f "${upsc_cookie_jar}";
|
||||
printf "\\n"
|
||||
echo -e "${RED}| Failed to extract file info${NC}"
|
||||
warnAndRetryUnknownError=true
|
||||
if [[ "${finalAttempt}" == "true" ]] ; then
|
||||
failedRetryDownload "${remote_url}" "" ""
|
||||
fi
|
||||
return 1
|
||||
else
|
||||
continue
|
||||
fi
|
||||
fi
|
||||
if [[ "$filename_override" == "" ]] && [[ -z "$filename" ]] ; then
|
||||
if [[ $j == $maxfetchretries ]] ; then
|
||||
rm -f "${upsc_cookie_jar}";
|
||||
printf "\\n"
|
||||
echo -e "${RED}| Failed to extract file name${NC}"
|
||||
warnAndRetryUnknownError=true
|
||||
if [[ "${finalAttempt}" == "true" ]] ; then
|
||||
failedRetryDownload "${remote_url}" "Failed to extract file name" ""
|
||||
fi
|
||||
return 1
|
||||
else
|
||||
continue
|
||||
fi
|
||||
fi
|
||||
file_size_bytes=$(grep -oPi '(?<=content-length: ).*' <<< "$file_header")
|
||||
file_size_bytes=${file_size_bytes//[$'\t\r\n']}
|
||||
if [[ -z "$file_size_bytes" ]]; then
|
||||
if [[ $j == $maxfetchretries ]] ; then
|
||||
rm -f "${upsc_cookie_jar}";
|
||||
printf "\\n"
|
||||
echo -e "${RED}| Failed to extract file size${NC}"
|
||||
warnAndRetryUnknownError=true
|
||||
if [[ "${finalAttempt}" == "true" ]] ; then
|
||||
failedRetryDownload "${remote_url}" "Failed to extract file size" ""
|
||||
fi
|
||||
return 1
|
||||
else
|
||||
tor_identity="${RANDOM}"
|
||||
continue
|
||||
fi
|
||||
fi
|
||||
break #Good to go here
|
||||
done
|
||||
rm -f "${upsc_cookie_jar}";
|
||||
touch "${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}"
|
||||
if [[ ! "$filename_override" == "" ]] ; then
|
||||
filename="$filename_override"
|
||||
fi
|
||||
filename=$(sanitize_file_or_folder_name "${filename}")
|
||||
printf "\\n"
|
||||
echo -e "${YELLOW}| File name:${NC}\t\"${filename}\""
|
||||
if [[ -z $file_size_bytes ]] ; then
|
||||
if [[ "${finalAttempt}" == "true" ]] ; then
|
||||
failedRetryDownload "${remote_url}" "Filesize not found!" ""
|
||||
fi
|
||||
echo -e "${YELLOW}| Filesize not found… retry${NC}"
|
||||
return 1
|
||||
else
|
||||
file_size_readable="$(numfmt --to=iec --from=auto --format "%.2f" <<< "$file_size_bytes")"
|
||||
fi
|
||||
echo -e "${YELLOW}| File size:${NC}\t${file_size_readable}"
|
||||
file_path="${download_inflight_path}${filename}"
|
||||
flockDownload="${WorkDir}/.flocks/${filename//[^a-zA-Z0-9\.\_\-]/}.flock"
|
||||
if CheckFileSize "${remote_url}" "${file_size_bytes}" ; then
|
||||
return 1
|
||||
fi
|
||||
if CheckDownloadExists "$remote_url" "$MoveToFolder" "$filecnt" "$filename" "$file_path" "$completed_location" ; then
|
||||
return 1
|
||||
fi
|
||||
echo "${remote_url//[^a-zA-Z0-9]/}" > $flockDownload
|
||||
}
|
||||
#!
|
||||
#! ----------- (3) Fetch File / Download File Function --------------- #
|
||||
#!
|
||||
upsc_GetFile() {
|
||||
echo -e "${GREEN}# Downloading…${NC}"
|
||||
echo -e "${YELLOW}| File path:${NC}\t./.inflight/${filename}\n"
|
||||
fileCnt=$1
|
||||
retryCnt=$2
|
||||
finalAttempt=$3
|
||||
flockDownload="${WorkDir}/.flocks/${filename//[^a-zA-Z0-9\.\_\-]/}.flock"
|
||||
for ((j=1; j<=$MaxDownloadRetries; j++)); do
|
||||
pd_presize=0
|
||||
if [[ -f "$file_path" ]] ; then
|
||||
pd_presize=$(stat --format="%s" "$file_path" | tr -d '[:space:]')
|
||||
fi
|
||||
GetRandomUA
|
||||
tor_identity="${RANDOM}" # CANNOT CHANGE FOR UPEE -- will fail
|
||||
CLEANSTRING=${remote_url//[^a-zA-Z0-9]/}
|
||||
trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; rm -f $flockDownload; echo ""; tput cnorm; exit" 0 1 2 3 6 15
|
||||
if [[ "${RateMonitorEnabled}" == "true" ]]; then
|
||||
tor_curl_request --insecure -L -G --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval "$download_url" --continue-at - --output "$file_path"
|
||||
else
|
||||
tor_curl_request --insecure -L -G "$download_url" --continue-at - --output "$file_path"
|
||||
fi
|
||||
received_file_size=0
|
||||
if [[ -f "$file_path" ]] ; then
|
||||
received_file_size=$(stat --format="%s" "$file_path" | tr -d '[:space:]')
|
||||
fi
|
||||
if CheckNoHtml "$remote_url" "$filename" "$file_path" "$((received_file_size - pd_presize))" ; then
|
||||
containsHtml=false
|
||||
else
|
||||
containsHtml=true
|
||||
fi
|
||||
downDelta=$(( received_file_size - pd_presize ))
|
||||
if [[ "${received_file_size}" -ne "${file_size_bytes}" ]] || [[ "$containsHtml" == "true" ]]; then
|
||||
if [[ "${AutoRepairBadPartials}" == "true" ]] && (( downDelta > 0 && downDelta < 1024 )) ; then
|
||||
if [[ -f "${file_path}" ]] ; then
|
||||
if ((pd_presize > 0)); then
|
||||
echo -e "${YELLOW}Bad node / HTML found:${NC} reverting to previous file..."
|
||||
truncateDownload "$remote_url" "$filename" "$pd_presize" "$received_file_size"
|
||||
truncate -s $pd_presize "${file_path}"
|
||||
else
|
||||
echo -e "${YELLOW}Bad node / HTML found:${NC} tainted partial removed..."
|
||||
rm -f "${file_path}"
|
||||
fi
|
||||
fi
|
||||
if ((j >= $MaxDownloadRetries)) ; then
|
||||
rm -f "$flockDownload";
|
||||
if [[ "${finalAttempt}" == "true" ]] ; then
|
||||
droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}"
|
||||
fi
|
||||
return 1
|
||||
else
|
||||
continue
|
||||
fi
|
||||
elif [[ "${AutoRepairBadPartials}" == "true" ]] && [[ "$containsHtml" == "true" ]] ; then
|
||||
if [[ -f "${file_path}" ]] ; then
|
||||
if ((pd_presize > 0)); then
|
||||
echo -e "${YELLOW}Bad node / HTML found:${NC} reverting to previous file..."
|
||||
truncateDownload "$remote_url" "$filename" "$pd_presize" "$received_file_size"
|
||||
truncate -s $pd_presize "${file_path}"
|
||||
else
|
||||
echo -e "${YELLOW}Bad node / HTML found:${NC} tainted partial removed..."
|
||||
rm -f "${file_path}"
|
||||
fi
|
||||
fi
|
||||
if ((j >= $MaxDownloadRetries)) ; then
|
||||
rm -f "$flockDownload";
|
||||
if [[ "${finalAttempt}" == "true" ]] ; then
|
||||
droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}"
|
||||
fi
|
||||
return 1
|
||||
else
|
||||
continue
|
||||
fi
|
||||
elif (( downDelta > 0 && downDelta < 1024 )) || [[ "$containsHtml" == "true" ]] ; then
|
||||
if [[ -f "$file_path" ]] ; then
|
||||
rm -rf "$file_path"
|
||||
fi
|
||||
echo -e "\n${YELLOW}Bad node / HTML found:${NC} tainted partial removed..."
|
||||
if ((j >= $MaxDownloadRetries)) ; then
|
||||
rm -f "$flockDownload";
|
||||
if [[ "${finalAttempt}" == "true" ]] ; then
|
||||
droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}"
|
||||
fi
|
||||
return 1
|
||||
else
|
||||
continue
|
||||
fi
|
||||
fi
|
||||
if [[ "${received_file_size}" -ne "${file_size_bytes}" ]]; then
|
||||
echo -e "\n${RED}Download failed, file is incomplete.${NC}"
|
||||
if ((j >= $MaxDownloadRetries)) ; then
|
||||
rm -f "$flockDownload";
|
||||
if [[ "${finalAttempt}" == "true" ]] ; then
|
||||
droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}"
|
||||
fi
|
||||
return 1
|
||||
else
|
||||
continue
|
||||
fi
|
||||
fi
|
||||
else
|
||||
break
|
||||
fi
|
||||
done
|
||||
rm -f "$flockDownload";
|
||||
rm -f "${upsc_cookie_jar}";
|
||||
ProcessCompletedDownload "$remote_url" "$MoveToFolder" "$filecnt" "$filename" "$file_size_bytes" "$completed_location" "$file_path"
|
||||
return 0
|
||||
}
|
||||
#!
|
||||
#! --------------- Host Extra Functions ------------------- #
|
||||
#!
|
||||
80
mad.sh
80
mad.sh
|
|
@ -30,9 +30,19 @@
|
|||
#
|
||||
# * Everyone who provided feedback and helped test.. and those who wish to remain anonymous
|
||||
|
||||
ScriptVersion=2025.03.08
|
||||
ScriptVersion=2025.03.30
|
||||
#=================================================
|
||||
# Recent Additions
|
||||
# 2025.03.30 - [ranoz] Fix to handle new cookie requirements
|
||||
# 2025.03.28 - [up_ranoz] Fix to handle new cookie requirement
|
||||
# 2025.03.28 - [up_anonfileio] Add anonfile.io as upload host
|
||||
# 2025.03.28 - [anonfileio] Add anonfile.io as download host
|
||||
# 2025.03.25 - [up_ranoz] Disable MAD randomized extension on uploads (7z block disabled)
|
||||
# 2025.03.21 - [mad] Update random user agents 2025.03
|
||||
# 2025.03.20 - [jira hosts] Update 3 jira hosts (retention and maxsize)
|
||||
# 2025.03.16 - [torup] Fix torup cookies
|
||||
# 2025.03.15 - [1fichier] Get new node prior to cdn download (greater possibility of a faster node)
|
||||
# 2025.03.10 - [uploadscloud] Add uploadscloud.com as download host
|
||||
# 2025.03.08 - [mad] Only check files in determining which curl header
|
||||
# 2025.03.06 - [uploadhive] Disable global resume for servers without resume
|
||||
# 2025.03.05 - [torup] Fix torup removed response change from prior
|
||||
|
|
@ -41,45 +51,6 @@ ScriptVersion=2025.03.08
|
|||
# 2025.03.03 - [up_sendspace] Add sendspace.com as upload host (300MB)
|
||||
# 2025.03.01 - [filedot] Fix filename parsing. Add 3 second wait for Free Download post.
|
||||
# 2025.03.01 - [torup] Update file removed response
|
||||
# 2025.02.26 - [uploadhive] Fix "Wrong IP" error -- use uploadhive.com IP4 address to connect for post
|
||||
# 2025.02.26 - [up_lainsafe] Fix retry terminal output
|
||||
# 2025.02.25 - [mad + allhosts] Re-engineer BadHtml scan to only scan the first 10kb of downloaded partials
|
||||
# 2025.02.24 - [pixeldrain] Update "The file is IP limited" response handling retry
|
||||
# 2025.02.22 - [blackcloud_onion] Add bcloud.onion download handling (url fixing)
|
||||
# 2025.02.21 - [anonfile] Update cdn link parsing to handle new subdomains
|
||||
# 2025.02.21 - [anonfile] Add download limit reached response handling
|
||||
# 2025.02.21 - [anonfile] Update file info retrieval (head no longer responds)
|
||||
# 2025.02.21 - [sendspace] Add sendspace.com as download host
|
||||
# 2025.02.21 - [oshi / up_oshi] Revert /nossl/ changes for oshi.at (clearnet)
|
||||
# 2025.02.20 - [up_ranoz] Fixed parsing of ranoz upload link (cloudflare)
|
||||
# 2025.02.20 - [sendnow] Better handling of sendnow new Tor ip blocking
|
||||
# 2025.02.20 - [up_ranoz / up_uploadhive] Add obfuscation of .7z in multipart filename that was missing
|
||||
# 2025.02.18 - [uploadhive] Add handling of the new /cgi-bin/dl.cgi/ url tickets (WIP)
|
||||
# (unfortunately, this is tied to the requesting ip, so downloads get "Wrong IP")
|
||||
# 2025.02.18 - [up_oshi] Add Manage url as comment on uploads
|
||||
# 2025.02.18 - [up_oshi / oshi] use /nossl/ url and http
|
||||
# 2025.02.17 - [gofile] Add a random sleep if 429 response detected (too many requests)
|
||||
# 2025.02.17 - [*ALL] Audit and update all single bracket operations
|
||||
# 2025.02.17 - [filehaus] Fix downloading from fh
|
||||
# 2025.02.15 - [uploadbay] Update urls regex for acceptable alternate
|
||||
# 2025.02.15 - [up_sendnow] Add send.now as upload host
|
||||
# 2025.02.15 - [sendnow] Fix handling of filenames with special characters in url
|
||||
# 2025.02.14 - [mad] Add helpful verbiage for user on MAD Randomized Extension upload urls
|
||||
# 2025.02.14 - [up_ranoz] Add help "[rand ext, rename to <filename> or use MAD v2025.02.13+]" to url
|
||||
# 2025.02.14 - [up_uploadhive] Add help "[rand ext, rename to <filename> or use MAD v2025.02.13+]" to url
|
||||
# 2025.02.13 - [mad] Add "RanozRandomizeExt" MAD randomized extension configurable variable
|
||||
# 2025.02.13 - [up_ranoz] Add MAD randomized extension upload handling
|
||||
# 2025.02.13 - [ranoz] Add MAD randomized extension download handling
|
||||
# 2025.02.13 - [sendnow] Extend request timeout for head / get (server response time lag)
|
||||
# 2025.02.12 - [sendnow] Add send.now as download host
|
||||
# 2025.02.11 - [ranoz] Fix filename (to handle fileid added to download urls)
|
||||
# 2025.02.10 - [mad] Add detection of custom "Removed" response on cdn get from direct links
|
||||
# 2025.02.06 - [ranoz] Add UNAVAILABLE_FOR_LEGAL_REASONS response handling
|
||||
# 2025.02.04 - [mad] Add ConnectTimeoutUpload to separate configurable up/down timeouts
|
||||
# 2025.02.03 - [up_lainsafe] Add pomf2.lain.la as upload host (1GB)
|
||||
# 2025.02.02 - [mad] Add function to handle urlencode of cyrillic / kanji / latin / etc
|
||||
# 2025.02.02 - [ranoz] Fix handling filenames containing cyrillic / kanji / latin chars
|
||||
# 2025.02.02 - [all] Reduced character processing for urlencode to special url characters
|
||||
|
||||
# -- See ./documentation/!Changelog (Historical).txt for further changes -- #
|
||||
|
||||
|
|
@ -432,16 +403,16 @@ SetEnabledUploadHosts() {
|
|||
lstEnabledUploadHosts+="up_pixeldrain,up_quax,up_ranoz,up_skrepr,up_torup,up_turboonion,up_uploadee,"
|
||||
lstEnabledUploadHosts+="up_uploadflix,up_uploadhive,up_uploadraja,up_herbolistique,up_uploadbay,up_ateasystems,"
|
||||
lstEnabledUploadHosts+="up_syspro,up_dashfile,up_anonfile,up_fileland,up_fireget,up_euromussels,up_ramsgaard,"
|
||||
lstEnabledUploadHosts+="up_gagneux,up_uwabaki,up_lainsafe,up_sendnow,up_sendspace"
|
||||
lstEnabledUploadHosts+="up_gagneux,up_uwabaki,up_lainsafe,up_sendnow,up_sendspace,up_anonfileio"
|
||||
elif [[ "$EnabledUploadHosts" == "online" ]] ; then
|
||||
lstEnabledUploadHosts="up_1fichier,up_anonsharing,up_axfc,up_bedrive,up_bowfile,up_depotkaz,"
|
||||
lstEnabledUploadHosts+="up_familleflender,up_fileblade,up_fileditch,up_firestorage,up_free4e,up_gofile,"
|
||||
lstEnabledUploadHosts+="up_harrault,up_hexload,up_isupload,up_kouploader,up_kraken,up_moocloud,up_nantes,"
|
||||
lstEnabledUploadHosts+="up_nippy,up_nofile,up_offshorecat,up_oshi,up_pixeldrain,up_quax,up_ranoz,"
|
||||
lstEnabledUploadHosts+="up_nippy,up_nofile,up_offshorecat,up_oshi,up_pixeldrain,up_quax,up_ranoz,up_eddowding,"
|
||||
lstEnabledUploadHosts+="up_shareonline,up_skrepr,up_torup,up_turboonion,up_uploadee,up_uploadflix,up_uploadhive,"
|
||||
lstEnabledUploadHosts+="up_uploadraja,up_yolobit,up_herbolistique,up_uploadbay,up_ateasystems,up_syspro,"
|
||||
lstEnabledUploadHosts+="up_dashfile,up_anonfile,up_fileland,up_fireget,up_euromussels,up_ramsgaard,"
|
||||
lstEnabledUploadHosts+="up_gagneux,up_uwabaki,up_lainsafe,up_sendnow,up_sendspace"
|
||||
lstEnabledUploadHosts+="up_gagneux,up_uwabaki,up_lainsafe,up_sendnow,up_sendspace,up_anonfileio"
|
||||
fi
|
||||
}
|
||||
SetEnabledDownloadHosts() {
|
||||
|
|
@ -454,7 +425,7 @@ SetEnabledDownloadHosts() {
|
|||
lstEnabledDownloadHosts+="tempfileme,tempsh,torup,turboonion,up2share,uploadee,uploadev,uploadflix,uploadhive,"
|
||||
lstEnabledDownloadHosts+="youdbox,herbolistique,uploadbay,ateasystems,syspro,dashfile,anonfile,desiupload,"
|
||||
lstEnabledDownloadHosts+="fileland,fireget,euromussels,ramsgaard,uwabaki,gagneux,sendnow,sendspace,"
|
||||
lstEnabledDownloadHosts+="blackcloud_onion"
|
||||
lstEnabledDownloadHosts+="blackcloud_onion,uploadscloud,anonfileio"
|
||||
elif [[ "$EnabledDownloadHosts" == "online" ]] ; then
|
||||
lstEnabledDownloadHosts="1fichier,anonsharing,bedrive,biteblob,bowfile,click,cyssoux,"
|
||||
lstEnabledDownloadHosts+="dailyuploads,dataupload,depotkaz,dictvm,dosya,downloadgg,eddowding,eternalhosting,"
|
||||
|
|
@ -464,7 +435,8 @@ SetEnabledDownloadHosts() {
|
|||
lstEnabledDownloadHosts+="oshi,pixeldrain,quax,ranoz,shareonline,skrepr,tempfileme,tempsh,torup,"
|
||||
lstEnabledDownloadHosts+="turboonion,up2share,uploadee,uploadev,uploadflix,uploadhive,yolobit,youdbox,"
|
||||
lstEnabledDownloadHosts+="herbolistique,uploadbay,ateasystems,syspro,dashfile,anonfile,desiupload,fileland,"
|
||||
lstEnabledDownloadHosts+="fireget,euromussels,ramsgaard,uwabaki,gagneux,sendnow,sendspace,blackcloud_onion"
|
||||
lstEnabledDownloadHosts+="fireget,euromussels,ramsgaard,uwabaki,gagneux,sendnow,sendspace,blackcloud_onion,"
|
||||
lstEnabledDownloadHosts+="uploadscloud,anonfileio"
|
||||
fi
|
||||
}
|
||||
GetRandomFiledotUser() {
|
||||
|
|
@ -1239,17 +1211,13 @@ sleepRandomMins() {
|
|||
sleep $((minWait + RANDOM % (maxWait - minWait)))m
|
||||
}
|
||||
GetRandomUA() {
|
||||
ar_UA[0]="Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/124.0.0.0 Safari/537.3"
|
||||
ar_UA[1]="Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/123.0.0.0 Safari/537.3"
|
||||
ar_UA[2]="Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/124.0.0.0 Safari/537.36 Edg/124.0.0.0"
|
||||
ar_UA[3]="Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:125.0) Gecko/20100101 Firefox/125.0"
|
||||
ar_UA[4]="Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/109.0.0.0 Safari/537.3"
|
||||
ar_UA[5]="Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/117.0.0.0 Safari/537.36 Edg/117.0.2045.4"
|
||||
ar_UA[6]="Mozilla/5.0 (Windows NT 6.1; Win64; x64; rv:109.0) Gecko/20100101 Firefox/115.0"
|
||||
ar_UA[7]="Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/122.0.0.0 Safari/537.36 Edg/122.0.0.0"
|
||||
ar_UA[8]="Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/123.0.0.0 Safari/537.36 Edg/123.0.0.0"
|
||||
ar_UA[9]="Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:126.0) Gecko/20100101 Firefox/126.0"
|
||||
ar_UA[10]="Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:115.0) Gecko/20100101 Firefox/115.0"
|
||||
ar_UA[0]="Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/132.0.0.0 Safari/537.3"
|
||||
ar_UA[1]="Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/605.1.15 (KHTML, like Gecko) Version/18.1.1 Safari/605.1.1"
|
||||
ar_UA[2]="Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/605.1.15 (KHTML, like Gecko) Version/17.0 Safari/605.1.1"
|
||||
ar_UA[3]="Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/132.0.0.0 Safari/537.36 Edg/132.0.0.0"
|
||||
ar_UA[4]="Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/128.0.0.0 Safari/537.36 Edg/128.0.0.0"
|
||||
ar_UA[5]="Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:134.0) Gecko/20100101 Firefox/134.0"
|
||||
ar_UA[6]="Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:128.0) Gecko/20100101 Firefox/128.0"
|
||||
arrSize=${#ar_UA[@]}
|
||||
index=$(($RANDOM % $arrSize))
|
||||
RandomUA=${ar_UA[$index]}
|
||||
|
|
|
|||
|
|
@ -12,7 +12,7 @@
|
|||
# 40GB isup isupload.com 100MB fb fileblade.com 500MB fland fileland.io
|
||||
# 100MB ubay uploadbay.net 2GB sysp syspro.com.br 1GB uwab uwabaki.party
|
||||
# 512MB anon anonfile.de 100MB fget fireget.com 1GB lain pomf2.lain.la
|
||||
# 100GB snow send.now 300MB ss sendspace.com
|
||||
# 100GB snow send.now 300MB ss sendspace.com 5GG afio anonfile.io
|
||||
# Jirafeau hosts (recommended upload 100MB splits as many host only support that)
|
||||
# 10GB anarc anarchaserver 1GB kaz depot.kaz.bzh 5GB squid filesquid
|
||||
# 10GB nant nantes.cloud 500MB soy soyjak.download 512MB linx linxx.net
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue