# 2025.01.14 - [gagneux / up_gagneux] Add fichier.gagneux.info as upload / download host

# 2025.01.14 - [uwabaki] Add uwabaki.party as download host
# 2025.01.14 - [fileblade] Additional retries and handling for blocked Tor ips (until alternative)
# 2025.01.13 - [ocr_captcha] Create imagemagick OCR function for testing without tesseract
# 2025.01.13 - [anonfile, dailyuploads] Update ocr call to use tesseract function
# 2025.01.13 - [up_anonfile] Modify to use new upload url
# 2025.01.12 - [ateasystems] Update 404 Not found response
# 2025.01.11 - [mad] Update direct head response handling
# 2025.01.11 - [ranoz] Add 404 Not found handling on head
# 2025.01.09 - [ranoz] Add handling of "NEXT_NOT_FOUND" response
# 2025.01.09 - [fileblade] Fix cdn url parsing
# 2025.01.08 - [up_pixeldrain] Fix success response from pixeldrain
# 2025.01.08 - [ramsgaard / up_ramsgaard] Add data.ramsgaard.me as upload / download host
# 2025.01.08 - [euromussels / up_euromussels] Add uploads.euromussels.eu as upload / download host
# 2025.01.07 - [up_fileland] Add fileland.io as upload host
# 2025.01.07 - [up_fireget] Add fireget.com as upload host
# 2025.01.06 - [uploadhive] Update the removed / gone response detection
# 2025.01.06 - [fileblade] Add "user does not allow free downloads over 100MB" response (and warnings)
# 2025.01.06 - [desiupload] Add desiupload as download host
# 2025.01.05 - [isupload] Fix filename detection
This commit is contained in:
kittykat 2025-01-16 07:54:05 +00:00
parent 30eedaf567
commit eeb8054960
Signed by: kittykat
GPG key ID: E3F1556620F70C3C
29 changed files with 1951 additions and 634 deletions

View file

@ -1,4 +1,4 @@
DateTime: 25.01.04
DateTime: 25.01.15
Files:
./hosts/1fichier.sh
@ -19,12 +19,14 @@ Files:
./hosts/dataupload.sh
./hosts/dbree.sh
./hosts/depotkaz.sh
./hosts/desiupload.sh
./hosts/dictvm.sh
./hosts/discreetshare.sh
./hosts/dosya.sh
./hosts/downloadgg.sh
./hosts/eddowding.sh
./hosts/eternalhosting.sh
./hosts/euromussels.sh
./hosts/examples/ExampleNewHost.sh
./hosts/examples/up_example.sh
./hosts/familleflender.sh
@ -37,6 +39,7 @@ Files:
./hosts/firestorage.sh
./hosts/free4e.sh
./hosts/freesocial.sh
./hosts/gagneux.sh
./hosts/gofile.sh
./hosts/harrault.sh
./hosts/herbolistique.sh
@ -57,6 +60,7 @@ Files:
./hosts/oshi.sh
./hosts/pixeldrain.sh
./hosts/quax.sh
./hosts/ramsgaard.sh
./hosts/ranoz.sh
./hosts/shareonline.sh
./hosts/skrepr.sh
@ -90,14 +94,18 @@ Files:
./hosts/up_dictvm.sh
./hosts/up_dosya.sh
./hosts/up_eddowding.sh
./hosts/up_euromussels.sh
./hosts/up_familleflender.sh
./hosts/up_fileblade.sh
./hosts/up_fileditch.sh
./hosts/up_filehaus.sh
./hosts/up_fileland.sh
./hosts/up_filesquid.sh
./hosts/up_fireget.sh
./hosts/up_firestorage.sh
./hosts/up_free4e.sh
./hosts/up_freesocial.sh
./hosts/up_gagneux.sh
./hosts/up_gofile.sh
./hosts/up_harrault.sh
./hosts/up_herbolistique.sh
@ -116,6 +124,7 @@ Files:
./hosts/up_oshi.sh
./hosts/up_pixeldrain.sh
./hosts/up_quax.sh
./hosts/up_ramsgaard.sh
./hosts/up_ranoz.sh
./hosts/up_shareonline.sh
./hosts/up_skrepr.sh
@ -132,6 +141,7 @@ Files:
./hosts/up_uploadhive.sh
./hosts/up_uploadraja.sh
./hosts/up_yolobit.sh
./hosts/uwabaki.sh
./hosts/yolobit.sh
./hosts/youdbox.sh
./mad.sh
@ -235,6 +245,14 @@ _________________________________________________________________________
./hosts/dataupload.sh:357: tor_curl_request --insecure \
./hosts/dataupload.sh:364: tor_curl_request --insecure \
./hosts/dataupload.sh:381: tor_curl_request --insecure \
./hosts/desiupload.sh:90: response=$(tor_curl_request --insecure -L -s -b "${desi_cookie_jar}" -c "${desi_cookie_jar}" "$remote_url")
./hosts/desiupload.sh:202: response=$(tor_curl_request --insecure -L -s -X POST \
./hosts/desiupload.sh:306: file_header=$(tor_curl_request --insecure -L --head -s --referer "${fixed_url}" "$download_url")
./hosts/desiupload.sh:404: if [ "${UseTorCurlImpersonate}" == "true" ]; then
./hosts/desiupload.sh:406: tor_curl_request --insecure \
./hosts/desiupload.sh:411: tor_curl_request --insecure \
./hosts/desiupload.sh:417: tor_curl_request --insecure \
./hosts/desiupload.sh:433: tor_curl_request --insecure \
./hosts/dosya.sh:108: if [ "${UseTorCurlImpersonate}" == "true" ]; then
./hosts/dosya.sh:109: PAGE=$(tor_curl_request --insecure --max-time "$ConnectTimeout" -L -s \
./hosts/dosya.sh:113: PAGE=$(tor_curl_request --insecure --max-time "$ConnectTimeout" -L -s \
@ -255,8 +273,8 @@ _________________________________________________________________________
./hosts/examples/ExampleNewHost.sh:201: tor_curl_request --insecure --referer "$download_url" "$download_url" --continue-at - --output "$file_path"
./hosts/examples/up_example.sh:112: response=$(tor_curl_upload --insecure \
./hosts/fileblade.sh:90: response=$(tor_curl_request --insecure -L -s -b "${fb_cookie_jar}" -c "${fb_cookie_jar}" "$remote_url")
./hosts/fileblade.sh:164: response=$(tor_curl_request --insecure -L -s -X POST \
./hosts/fileblade.sh:266: response=$(tor_curl_request --insecure -L -s -X POST \
./hosts/fileblade.sh:165: response=$(tor_curl_request --insecure -L -s -X POST \
./hosts/fileblade.sh:281: response=$(tor_curl_request --insecure -L -s -X POST \
./hosts/fileblade.sh:335: file_header=$(tor_curl_request --insecure -L --head -s "$download_url")
./hosts/fileblade.sh:450: if [ "${UseTorCurlImpersonate}" == "true" ]; then
./hosts/fileblade.sh:452: tor_curl_request --insecure -L \
@ -299,13 +317,13 @@ _________________________________________________________________________
./hosts/innocent.sh:214: tor_curl_request_extended --insecure "$download_url" --output "$file_path"
./hosts/isupload.sh:90: response=$(tor_curl_request_extended --insecure -L -s -b "${isup_cookie_jar}" -c "${isup_cookie_jar}" "$remote_url")
./hosts/isupload.sh:164: response=$(tor_curl_request_extended --insecure -L -s -X POST \
./hosts/isupload.sh:238: file_header=$(tor_curl_request --insecure --head -L -s "$download_url")
./hosts/isupload.sh:241: file_header=$(tor_curl_request --insecure -m 16 -s -D - -o /dev/null \
./hosts/isupload.sh:248: file_header=$(tor_curl_request --insecure --head -H "Connection: keep-alive" -L -s -i "$download_url" |
./hosts/isupload.sh:260: file_header=$(tor_curl_request_extended --insecure --head -L -s "$download_url")
./hosts/isupload.sh:352: tor_curl_request_extended --insecure -L "$download_url" --output "$file_path"
./hosts/isupload.sh:396: tor_curl_request_extended --insecure -L --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval "$download_url" --continue-at - --output "$file_path"
./hosts/isupload.sh:398: tor_curl_request_extended --insecure -L "$download_url" --continue-at - --output "$file_path"
./hosts/isupload.sh:241: file_header=$(tor_curl_request --insecure -m 8 -s -D - -o /dev/null \
./hosts/isupload.sh:247: file_header=$(tor_curl_request --insecure --head -L -s "$download_url")
./hosts/isupload.sh:251: file_header=$(tor_curl_request --insecure --head -H "Connection: keep-alive" -L -s -i "$download_url" |
./hosts/isupload.sh:263: file_header=$(tor_curl_request_extended --insecure --head -L -s "$download_url")
./hosts/isupload.sh:355: tor_curl_request_extended --insecure -L "$download_url" --output "$file_path"
./hosts/isupload.sh:399: tor_curl_request_extended --insecure -L --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval "$download_url" --continue-at - --output "$file_path"
./hosts/isupload.sh:401: tor_curl_request_extended --insecure -L "$download_url" --continue-at - --output "$file_path"
./hosts/kraken.sh:104: PAGE=$(tor_curl_request --insecure --max-time "$ConnectTimeout" -s -L -c "${kraken_cookie_jar}" "${fixed_url}")
./hosts/kraken.sh:169: down_request=$(tor_curl_request --insecure -L -s -b "${kraken_cookie_jar}" -c "${kraken_cookie_jar}" -F "token=${kraken_token}" "${kraken_action}")
./hosts/kraken.sh:186: file_header=$(tor_curl_request --insecure --head -L -s -b "${kraken_cookie_jar}" -c "${kraken_cookie_jar}" --referer "$kraken_action" "$download_url")
@ -333,12 +351,12 @@ _________________________________________________________________________
./hosts/quax.sh:176: tor_curl_request --insecure --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval "$download_url" --continue-at - --output "$file_path"
./hosts/quax.sh:178: tor_curl_request --insecure "$download_url" --continue-at - --output "$file_path"
./hosts/ranoz.sh:90: response=$(tor_curl_request --insecure -L -s "$remote_url")
./hosts/ranoz.sh:150: file_header=$(tor_curl_request --insecure --head -L -i -s "$download_url")
./hosts/ranoz.sh:259: if [ "${UseTorCurlImpersonate}" == "true" ]; then
./hosts/ranoz.sh:261: tor_curl_request --insecure -L -G --no-alpn \
./hosts/ranoz.sh:265: tor_curl_request --insecure -L -G --no-alpn \
./hosts/ranoz.sh:157: file_header=$(tor_curl_request --insecure --head -L -i -s "$download_url")
./hosts/ranoz.sh:264: if [ "${UseTorCurlImpersonate}" == "true" ]; then
./hosts/ranoz.sh:266: tor_curl_request --insecure -L -G --no-alpn \
./hosts/ranoz.sh:270: tor_curl_request --insecure -L -G --no-alpn \
./hosts/ranoz.sh:285: tor_curl_request --insecure -L -G --no-alpn \
./hosts/ranoz.sh:275: tor_curl_request --insecure -L -G --no-alpn \
./hosts/ranoz.sh:290: tor_curl_request --insecure -L -G --no-alpn \
./hosts/syspro.sh:88: response=$(tor_curl_request --insecure -L -s "$remote_url")
./hosts/syspro.sh:186: if [ "${UseTorCurlImpersonate}" == "true" ]; then
./hosts/syspro.sh:188: tor_curl_request --insecure -L \
@ -415,6 +433,8 @@ _________________________________________________________________________
./hosts/up_fileblade.sh:104: response=$(tor_curl_upload --insecure -i \
./hosts/up_fileditch.sh:107: response=$(tor_curl_upload --insecure -i -L \
./hosts/up_filehaus.sh:106: response=$(tor_curl_upload --insecure -i \
./hosts/up_fileland.sh:102: response=$(tor_curl_upload --insecure -i \
./hosts/up_fireget.sh:102: response=$(tor_curl_upload --insecure -i \
./hosts/up_firestorage.sh:113: response=$(tor_curl_upload --insecure -i \
./hosts/up_gofile.sh:102: response=$(tor_curl_request --insecure -L -s "https://api.gofile.io/servers")
./hosts/up_gofile.sh:121: response=$(tor_curl_upload --insecure -i \
@ -452,119 +472,119 @@ _________________________________________________________________________
./hosts/youdbox.sh:183: file_header=$(tor_curl_request --insecure -L --head -s --referer "${fixed_url}" "$download_url")
./hosts/youdbox.sh:276: tor_curl_request --insecure -L -G --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval "$download_url" --continue-at - --output "$file_path"
./hosts/youdbox.sh:278: tor_curl_request --insecure -L -G "$download_url" --continue-at - --output "$file_path"
./mad.sh:107:UseTorCurlImpersonate=false
./mad.sh:398:tor_curl_request() {
./mad.sh:399: if [ "${UseTorCurlImpersonate}" == "true" ]; then
./mad.sh:400: "${curl_impersonate[@]}" --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" --connect-timeout ${ConnectTimeout} --compressed --globoff "$@"
./mad.sh:402: curl --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" --connect-timeout ${ConnectTimeout} --compressed --globoff "$@"
./mad.sh:405:tor_curl_request_extended() {
./mad.sh:407: if [ "${UseTorCurlImpersonate}" == "true" ]; then
./mad.sh:408: "${curl_impersonate[@]}" --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" --connect-timeout $randomtimeout --compressed --globoff "$@"
./mad.sh:410: curl --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" --connect-timeout $randomtimeout --compressed --globoff "$@"
./mad.sh:413:tor_curl_upload() {
./mad.sh:114:UseTorCurlImpersonate=false
./mad.sh:405:tor_curl_request() {
./mad.sh:406: if [ "${UseTorCurlImpersonate}" == "true" ]; then
./mad.sh:407: "${curl_impersonate[@]}" --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" --connect-timeout ${ConnectTimeout} --compressed --globoff "$@"
./mad.sh:409: curl --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" --connect-timeout ${ConnectTimeout} --compressed --globoff "$@"
./mad.sh:412:tor_curl_request_extended() {
./mad.sh:414: if [ "${UseTorCurlImpersonate}" == "true" ]; then
./mad.sh:416: "${curl_impersonate[@]}" --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" --connect-timeout ${ConnectTimeout} --speed-limit $UploadSpeedMin --speed-time $UploadTimeoutInterval --compressed --globoff "$@"
./mad.sh:418: "${curl_impersonate[@]}" --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" --connect-timeout ${ConnectTimeout} --compressed --globoff "$@"
./mad.sh:422: curl --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" --connect-timeout ${ConnectTimeout} --speed-limit $UploadSpeedMin --speed-time $UploadTimeoutInterval -H 'User-Agent: Mozilla/5.0 (Windows NT 10.0; rv:109.0) Gecko/20100101 Firefox/115.0' -H 'Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8' -H 'Accept-Language: en-US,en;q=0.5' -H 'Accept-Encoding: gzip, deflate, br' -H 'Origin: null' -H 'Connection: keep-alive' -H 'Upgrade-Insecure-Requests: 1' -H 'Sec-Fetch-Dest: document' -H 'Sec-Fetch-Mode: navigate' -H 'Sec-Fetch-Site: cross-site' -H 'Sec-Fetch-User: ?1' -H 'TE: trailers' --compressed --globoff "$@"
./mad.sh:424: curl --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" --connect-timeout ${ConnectTimeout} -H 'User-Agent: Mozilla/5.0 (Windows NT 10.0; rv:109.0) Gecko/20100101 Firefox/115.0' -H 'Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8' -H 'Accept-Language: en-US,en;q=0.5' -H 'Accept-Encoding: gzip, deflate, br' -H 'Origin: null' -H 'Connection: keep-alive' -H 'Upgrade-Insecure-Requests: 1' -H 'Sec-Fetch-Dest: document' -H 'Sec-Fetch-Mode: navigate' -H 'Sec-Fetch-Site: cross-site' -H 'Sec-Fetch-User: ?1' -H 'TE: trailers' --compressed --globoff "$@"
./mad.sh:1368:install_curl_impersonate() {
./mad.sh:1370: echo -e "${BLUE}lwthiker curl_impersonate${NC} is the original dev, but it is relatively inactive."
./mad.sh:1371: echo -e "- Currently uses curl v8.1.1."
./mad.sh:1375: echo -e "${BLUE}lexiforest curl_impersonate${NC} is an active fork of curl_impersonate."
./mad.sh:1376: echo -e "+ Currently uses curl v8.7.1"
./mad.sh:1380: PS3='Please select which curl_impersonate to install: '
./mad.sh:1388: install_curl_impersonate_lwthiker_orig
./mad.sh:1392: install_curl_impersonate_lexiforest_fork
./mad.sh:1402:install_curl_impersonate_lwthiker_orig() {
./mad.sh:1406: echo -e "${BLUE}lwthiker curl_impersonate${NC} is the original curl_impersonate."
./mad.sh:1407: echo -e "+ Currently uses curl v8.1.1, and has low activity for updates"
./mad.sh:1410: echo -e "${GREEN}| Fetching:${NC} latest ${BLUE}lwthiker curl_impersonate${NC} info from github...${NC}"
./mad.sh:1413: response=$(tor_curl_request --insecure -L -s https://github.com/lwthiker/curl-impersonate/releases/latest)
./mad.sh:1415: debugHtml "github" "lbf_inst_curlimp$j" "$response"
./mad.sh:1418: latestTag=$(grep -oPi -m 1 '(?<=/curl-impersonate/releases/tag/).*?(?=")' <<< "$response")
./mad.sh:1428: yes_or_no "Do you wish to download and extract curl_impersonate $latestTag (using tor+curl)?" && {
./mad.sh:1430: download_url='https://github.com/lwthiker/curl-impersonate/releases/download/'"$latestTag"'/curl-impersonate-'"$latestTag"'.x86_64-linux-gnu.tar.gz'
./mad.sh:1433: file_header=$(tor_curl_request --insecure --head -Ls "$download_url")
./mad.sh:1435: debugHtml "github" "head_inst_curlimp$j" "${file_header}"
./mad.sh:1483: tor_curl_request --insecure -L "$download_url" --continue-at - --output "$file_path"
./mad.sh:1512: echo -e "| Extracting curl_impersonate..."
./mad.sh:1514: rm -f "${ScriptDir}"/curl*
./mad.sh:1515: mv "$extract_location/curl-impersonate-ff" "${ScriptDir}/"
./mad.sh:1516: mv "$extract_location/curl_ff109" "${ScriptDir}/"
./mad.sh:1517: echo -e "${GREEN}| Done.${NC} Update ${BLUE}\"UseTorCurlImpersonate=true\"${NC} in script to use..."
./mad.sh:1525:install_curl_impersonate_lexiforest_fork() {
./mad.sh:1529: echo -e "${BLUE}lexiforest curl_impersonate${NC} is an active fork of curl_impersonate."
./mad.sh:1530: echo -e "+ Currently uses curl v8.7.1, and is patched for latest CVEs"
./mad.sh:1533: echo -e "${GREEN}| Fetching:${NC} latest ${BLUE}lexiforest curl_impersonate fork${NC} info from github...${NC}"
./mad.sh:1536: response=$(tor_curl_request --insecure -L -s https://github.com/lexiforest/curl-impersonate/releases/latest)
./mad.sh:1538: debugHtml "github" "lbf_inst_curlimp$j" "$response"
./mad.sh:1541: latestTag=$(grep -oPi -m 1 '(?<=/curl-impersonate/releases/tag/).*?(?=")' <<< "$response")
./mad.sh:1551: yes_or_no "Do you wish to download and extract curl_impersonate $latestTag (using tor+curl)?" && {
./mad.sh:1553: download_url='https://github.com/lexiforest/curl-impersonate/releases/download/'"$latestTag"'/curl-impersonate-'"$latestTag"'.x86_64-linux-gnu.tar.gz'
./mad.sh:1556: file_header=$(tor_curl_request --insecure --head -Ls "$download_url")
./mad.sh:1558: debugHtml "github" "head_inst_curlimp$j" "${file_header}"
./mad.sh:1606: tor_curl_request --insecure -L "$download_url" --continue-at - --output "$file_path"
./mad.sh:1635: echo -e "| Extracting curl_impersonate..."
./mad.sh:1637: rm -f "${ScriptDir}"/curl*
./mad.sh:1638: mv "$extract_location/curl-impersonate-chrome" "${ScriptDir}/"
./mad.sh:1639: mv "$extract_location/curl_chrome131" "${ScriptDir}/"
./mad.sh:1640: echo -e "${GREEN}| Done.${NC} Update ${BLUE}\"UseTorCurlImpersonate=true\"${NC} in script to use..."
./mad.sh:1802: echo -e ":${NC} ${GREEN}MAD${PINK} Audit${NC} : Reports usage of http & curl in scripts${PINK}${BLD} :"
./mad.sh:1810: maud_curl=$(grep -n -vxE '[[:blank:]]*([#].*)?' $fil | grep --color='always' -Ei 'curl')
./mad.sh:1811: maud_torcurl=$(grep -n -vxE '[[:blank:]]*([#].*)?' $fil | grep -A 12 --color='always' -Ei 'tor_curl')
./mad.sh:1820: echo -e "${PINK}MAD Audit of curl:${NC} (${GREEN}grep \"curl\"${NC})"
./mad.sh:1822: echo -e "$maud_curl"
./mad.sh:1824: echo -e "${PINK}MAD Audit of tor_curl (+12 lines after):${NC} (${GREEN}grep \"tor_curl\"${NC})"
./mad.sh:1826: echo -e "$maud_torcurl"
./mad.sh:1838: maud_curl=$(grep -n -vxE '[[:blank:]]*([#].*)?' $fil | grep --color='always' -Ei 'curl')
./mad.sh:1839: maud_torcurl=$(grep -n -vxE '[[:blank:]]*([#].*)?' $fil | grep -A 12 --color='always' -Ei 'tor_curl')
./mad.sh:1848: echo -e "${PINK}MAD Audit of curl:${NC} (${GREEN}grep \"curl \"${NC})"
./mad.sh:1850: echo -e "$maud_curl"
./mad.sh:1852: echo -e "${PINK}MAD Audit of tor_curl (+12 lines after):${NC} (${GREEN}grep \"tor_curl\"${NC})"
./mad.sh:1854: echo -e "$maud_torcurl"
./mad.sh:1860: maud_curl=$(grep -n -vxE '[[:blank:]]*([#].*)?' $fil | grep --color='always' -Ei 'curl')
./mad.sh:1861: maud_torcurl=$(grep -n -vxE '[[:blank:]]*([#].*)?' $fil | grep -A 12 --color='always' -Ei 'tor_curl')
./mad.sh:1870: echo -e "${PINK}MAD Audit of curl:${NC} (${GREEN}grep \"curl\"${NC})"
./mad.sh:1872: echo -e "$maud_curl"
./mad.sh:1874: echo -e "${PINK}MAD Audit of tor_curl (+12 lines after):${NC} (${GREEN}grep \"tor_curl\"${NC})"
./mad.sh:1876: echo -e "$maud_torcurl"
./mad.sh:2823: if [ "${UseTorCurlImpersonate}" == "true" ]; then
./mad.sh:2824: printf "client: ${GREEN}Tor${NC} + ${BLUE}curl_impersonate${NC}\\n"
./mad.sh:2826: printf "client: ${GREEN}Tor${NC} + ${GREEN}curl${NC}\\n"
./mad.sh:2998: if [ "${UseTorCurlImpersonate}" == "true" ]; then
./mad.sh:2999: printf "client: ${GREEN}Tor${NC} + ${BLUE}curl_impersonate${NC}\\n"
./mad.sh:3001: printf "client: ${GREEN}Tor${NC} + ${GREEN}curl${NC}\\n"
./mad.sh:3199: file_header=$(tor_curl_request --insecure -m 8 -s -D - -o /dev/null \
./mad.sh:3206: file_header=$(tor_curl_request --insecure --head -H "Connection: keep-alive" -L -s -i "$download_url" |
./mad.sh:3334: tor_curl_request --insecure -L --referer "$file_url" "$download_url" --output "$file_path"
./mad.sh:3378: tor_curl_request --insecure -L --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval "$download_url" --continue-at - --output "$file_path"
./mad.sh:3380: tor_curl_request --insecure -L --referer "$file_url" "$download_url" --continue-at - --output "$file_path"
./mad.sh:3578: response=$(tor_curl_upload --insecure -i \
./mad.sh:3585: response=$(tor_curl_upload --insecure -i \
./mad.sh:3656:if [ "${UseTorCurlImpersonate}" == "true" ]; then
./mad.sh:3657: curl_impersonate=()
./mad.sh:3658: readarray -d $'' arrFiles < <(find "$ScriptDir" -maxdepth 1 -name "curl_*" -printf '%p\n' | sort -Vk1)
./mad.sh:3659: bFoundCurlHeader=false
./mad.sh:3663: curl_impersonate=($fil)
./mad.sh:3664: bFoundCurlHeader=true
./mad.sh:3668: if [ "$bFoundCurlHeader" == "false" ]; then
./mad.sh:3669: echo -e "${RED}[ERROR] Missing dependency \"curl-impersonate\"!${NC}"
./mad.sh:3672: echo -e "You'll need to download ${GREEN}\"curl-impersonate\"${NC}."
./mad.sh:3675: echo -e "The latest binary can be obtained on GitHub, search for \"curl-impersonate\""
./mad.sh:3677: echo -e " 1. Visit the page of curl-impersonate and add \"/releases/latest/\" at end of URL."
./mad.sh:3681: echo -e " 4. Download archive ${GREEN}\"curl-impersonate-vX.Y.Z.x86_64-linux-gnu.tar.gz\"${YELLOW}."
./mad.sh:3682: echo -e " 5. Extract files ${GREEN}\"curl-impersonate-ff\"${NC} and ${GREEN}\"curl_ff109\"${NC} next to this script."
./mad.sh:3685: echo -e "run $0 install_curl_impersonate\\n"
./mad.sh:3687: yes_or_no "Do you wish to download and extract latest curl_impersonate (using tor+curl)?" && {
./mad.sh:3688: UseTorCurlImpersonate=false
./mad.sh:3689: install_curl_impersonate
./mad.sh:3773: echo -e "[${YELLOW}Install curl_impersonate${NC}]: Downloads the latest binary for curl_impersonate from github repo (3 choices)"
./mad.sh:3774: printf " %s install_curl_impersonate\\n" "$0"
./mad.sh:3852:elif [[ "$arg1" == "install_curl_impersonate" ]]; then
./mad.sh:3853: install_curl_impersonate
./mad.sh:3884:if [ "${UseTorCurlImpersonate}" == "true" ]; then
./mad.sh:3885: printf "client: ${GREEN}Tor${NC} + ${BLUE}curl_impersonate${NC}\\n"
./mad.sh:3887: printf "client: ${GREEN}Tor${NC} + ${GREEN}curl${NC}\\n"
./mad.sh:415: "${curl_impersonate[@]}" --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" --connect-timeout $randomtimeout --compressed --globoff "$@"
./mad.sh:417: curl --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" --connect-timeout $randomtimeout --compressed --globoff "$@"
./mad.sh:420:tor_curl_upload() {
./mad.sh:421: if [ "${UseTorCurlImpersonate}" == "true" ]; then
./mad.sh:423: "${curl_impersonate[@]}" --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" --connect-timeout ${ConnectTimeout} --speed-limit $UploadSpeedMin --speed-time $UploadTimeoutInterval --compressed --globoff "$@"
./mad.sh:425: "${curl_impersonate[@]}" --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" --connect-timeout ${ConnectTimeout} --compressed --globoff "$@"
./mad.sh:429: curl --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" --connect-timeout ${ConnectTimeout} --speed-limit $UploadSpeedMin --speed-time $UploadTimeoutInterval -H 'User-Agent: Mozilla/5.0 (Windows NT 10.0; rv:109.0) Gecko/20100101 Firefox/115.0' -H 'Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8' -H 'Accept-Language: en-US,en;q=0.5' -H 'Accept-Encoding: gzip, deflate, br' -H 'Origin: null' -H 'Connection: keep-alive' -H 'Upgrade-Insecure-Requests: 1' -H 'Sec-Fetch-Dest: document' -H 'Sec-Fetch-Mode: navigate' -H 'Sec-Fetch-Site: cross-site' -H 'Sec-Fetch-User: ?1' -H 'TE: trailers' --compressed --globoff "$@"
./mad.sh:431: curl --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" --connect-timeout ${ConnectTimeout} -H 'User-Agent: Mozilla/5.0 (Windows NT 10.0; rv:109.0) Gecko/20100101 Firefox/115.0' -H 'Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8' -H 'Accept-Language: en-US,en;q=0.5' -H 'Accept-Encoding: gzip, deflate, br' -H 'Origin: null' -H 'Connection: keep-alive' -H 'Upgrade-Insecure-Requests: 1' -H 'Sec-Fetch-Dest: document' -H 'Sec-Fetch-Mode: navigate' -H 'Sec-Fetch-Site: cross-site' -H 'Sec-Fetch-User: ?1' -H 'TE: trailers' --compressed --globoff "$@"
./mad.sh:1382:install_curl_impersonate() {
./mad.sh:1384: echo -e "${BLUE}lwthiker curl_impersonate${NC} is the original dev, but it is relatively inactive."
./mad.sh:1385: echo -e "- Currently uses curl v8.1.1."
./mad.sh:1389: echo -e "${BLUE}lexiforest curl_impersonate${NC} is an active fork of curl_impersonate."
./mad.sh:1390: echo -e "+ Currently uses curl v8.7.1"
./mad.sh:1394: PS3='Please select which curl_impersonate to install: '
./mad.sh:1402: install_curl_impersonate_lwthiker_orig
./mad.sh:1406: install_curl_impersonate_lexiforest_fork
./mad.sh:1416:install_curl_impersonate_lwthiker_orig() {
./mad.sh:1420: echo -e "${BLUE}lwthiker curl_impersonate${NC} is the original curl_impersonate."
./mad.sh:1421: echo -e "+ Currently uses curl v8.1.1, and has low activity for updates"
./mad.sh:1424: echo -e "${GREEN}| Fetching:${NC} latest ${BLUE}lwthiker curl_impersonate${NC} info from github...${NC}"
./mad.sh:1427: response=$(tor_curl_request --insecure -L -s https://github.com/lwthiker/curl-impersonate/releases/latest)
./mad.sh:1429: debugHtml "github" "lbf_inst_curlimp$j" "$response"
./mad.sh:1432: latestTag=$(grep -oPi -m 1 '(?<=/curl-impersonate/releases/tag/).*?(?=")' <<< "$response")
./mad.sh:1442: yes_or_no "Do you wish to download and extract curl_impersonate $latestTag (using tor+curl)?" && {
./mad.sh:1444: download_url='https://github.com/lwthiker/curl-impersonate/releases/download/'"$latestTag"'/curl-impersonate-'"$latestTag"'.x86_64-linux-gnu.tar.gz'
./mad.sh:1447: file_header=$(tor_curl_request --insecure --head -Ls "$download_url")
./mad.sh:1449: debugHtml "github" "head_inst_curlimp$j" "${file_header}"
./mad.sh:1497: tor_curl_request --insecure -L "$download_url" --continue-at - --output "$file_path"
./mad.sh:1526: echo -e "| Extracting curl_impersonate..."
./mad.sh:1528: rm -f "${ScriptDir}"/curl*
./mad.sh:1529: mv "$extract_location/curl-impersonate-ff" "${ScriptDir}/"
./mad.sh:1530: mv "$extract_location/curl_ff109" "${ScriptDir}/"
./mad.sh:1531: echo -e "${GREEN}| Done.${NC} Update ${BLUE}\"UseTorCurlImpersonate=true\"${NC} in script to use..."
./mad.sh:1539:install_curl_impersonate_lexiforest_fork() {
./mad.sh:1543: echo -e "${BLUE}lexiforest curl_impersonate${NC} is an active fork of curl_impersonate."
./mad.sh:1544: echo -e "+ Currently uses curl v8.7.1, and is patched for latest CVEs"
./mad.sh:1547: echo -e "${GREEN}| Fetching:${NC} latest ${BLUE}lexiforest curl_impersonate fork${NC} info from github...${NC}"
./mad.sh:1550: response=$(tor_curl_request --insecure -L -s https://github.com/lexiforest/curl-impersonate/releases/latest)
./mad.sh:1552: debugHtml "github" "lbf_inst_curlimp$j" "$response"
./mad.sh:1555: latestTag=$(grep -oPi -m 1 '(?<=/curl-impersonate/releases/tag/).*?(?=")' <<< "$response")
./mad.sh:1565: yes_or_no "Do you wish to download and extract curl_impersonate $latestTag (using tor+curl)?" && {
./mad.sh:1567: download_url='https://github.com/lexiforest/curl-impersonate/releases/download/'"$latestTag"'/curl-impersonate-'"$latestTag"'.x86_64-linux-gnu.tar.gz'
./mad.sh:1570: file_header=$(tor_curl_request --insecure --head -Ls "$download_url")
./mad.sh:1572: debugHtml "github" "head_inst_curlimp$j" "${file_header}"
./mad.sh:1620: tor_curl_request --insecure -L "$download_url" --continue-at - --output "$file_path"
./mad.sh:1649: echo -e "| Extracting curl_impersonate..."
./mad.sh:1651: rm -f "${ScriptDir}"/curl*
./mad.sh:1652: mv "$extract_location/curl-impersonate-chrome" "${ScriptDir}/"
./mad.sh:1653: mv "$extract_location/curl_chrome131" "${ScriptDir}/"
./mad.sh:1654: echo -e "${GREEN}| Done.${NC} Update ${BLUE}\"UseTorCurlImpersonate=true\"${NC} in script to use..."
./mad.sh:1816: echo -e ":${NC} ${GREEN}MAD${PINK} Audit${NC} : Reports usage of http & curl in scripts${PINK}${BLD} :"
./mad.sh:1824: maud_curl=$(grep -n -vxE '[[:blank:]]*([#].*)?' $fil | grep --color='always' -Ei 'curl')
./mad.sh:1825: maud_torcurl=$(grep -n -vxE '[[:blank:]]*([#].*)?' $fil | grep -A 12 --color='always' -Ei 'tor_curl')
./mad.sh:1834: echo -e "${PINK}MAD Audit of curl:${NC} (${GREEN}grep \"curl\"${NC})"
./mad.sh:1836: echo -e "$maud_curl"
./mad.sh:1838: echo -e "${PINK}MAD Audit of tor_curl (+12 lines after):${NC} (${GREEN}grep \"tor_curl\"${NC})"
./mad.sh:1840: echo -e "$maud_torcurl"
./mad.sh:1852: maud_curl=$(grep -n -vxE '[[:blank:]]*([#].*)?' $fil | grep --color='always' -Ei 'curl')
./mad.sh:1853: maud_torcurl=$(grep -n -vxE '[[:blank:]]*([#].*)?' $fil | grep -A 12 --color='always' -Ei 'tor_curl')
./mad.sh:1862: echo -e "${PINK}MAD Audit of curl:${NC} (${GREEN}grep \"curl \"${NC})"
./mad.sh:1864: echo -e "$maud_curl"
./mad.sh:1866: echo -e "${PINK}MAD Audit of tor_curl (+12 lines after):${NC} (${GREEN}grep \"tor_curl\"${NC})"
./mad.sh:1868: echo -e "$maud_torcurl"
./mad.sh:1874: maud_curl=$(grep -n -vxE '[[:blank:]]*([#].*)?' $fil | grep --color='always' -Ei 'curl')
./mad.sh:1875: maud_torcurl=$(grep -n -vxE '[[:blank:]]*([#].*)?' $fil | grep -A 12 --color='always' -Ei 'tor_curl')
./mad.sh:1884: echo -e "${PINK}MAD Audit of curl:${NC} (${GREEN}grep \"curl\"${NC})"
./mad.sh:1886: echo -e "$maud_curl"
./mad.sh:1888: echo -e "${PINK}MAD Audit of tor_curl (+12 lines after):${NC} (${GREEN}grep \"tor_curl\"${NC})"
./mad.sh:1890: echo -e "$maud_torcurl"
./mad.sh:2837: if [ "${UseTorCurlImpersonate}" == "true" ]; then
./mad.sh:2838: printf "client: ${GREEN}Tor${NC} + ${BLUE}curl_impersonate${NC}\\n"
./mad.sh:2840: printf "client: ${GREEN}Tor${NC} + ${GREEN}curl${NC}\\n"
./mad.sh:3012: if [ "${UseTorCurlImpersonate}" == "true" ]; then
./mad.sh:3013: printf "client: ${GREEN}Tor${NC} + ${BLUE}curl_impersonate${NC}\\n"
./mad.sh:3015: printf "client: ${GREEN}Tor${NC} + ${GREEN}curl${NC}\\n"
./mad.sh:3213: file_header=$(tor_curl_request --insecure -m 8 -s -D - -o /dev/null \
./mad.sh:3220: file_header=$(tor_curl_request --insecure --head -H "Connection: keep-alive" -L -s -i "$download_url" |
./mad.sh:3350: tor_curl_request --insecure -L --referer "$file_url" "$download_url" --output "$file_path"
./mad.sh:3394: tor_curl_request --insecure -L --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval "$download_url" --continue-at - --output "$file_path"
./mad.sh:3396: tor_curl_request --insecure -L --referer "$file_url" "$download_url" --continue-at - --output "$file_path"
./mad.sh:3594: response=$(tor_curl_upload --insecure -i \
./mad.sh:3601: response=$(tor_curl_upload --insecure -i \
./mad.sh:3672:if [ "${UseTorCurlImpersonate}" == "true" ]; then
./mad.sh:3673: curl_impersonate=()
./mad.sh:3674: readarray -d $'' arrFiles < <(find "$ScriptDir" -maxdepth 1 -name "curl_*" -printf '%p\n' | sort -Vk1)
./mad.sh:3675: bFoundCurlHeader=false
./mad.sh:3679: curl_impersonate=($fil)
./mad.sh:3680: bFoundCurlHeader=true
./mad.sh:3684: if [ "$bFoundCurlHeader" == "false" ]; then
./mad.sh:3685: echo -e "${RED}[ERROR] Missing dependency \"curl-impersonate\"!${NC}"
./mad.sh:3688: echo -e "You'll need to download ${GREEN}\"curl-impersonate\"${NC}."
./mad.sh:3691: echo -e "The latest binary can be obtained on GitHub, search for \"curl-impersonate\""
./mad.sh:3693: echo -e " 1. Visit the page of curl-impersonate and add \"/releases/latest/\" at end of URL."
./mad.sh:3697: echo -e " 4. Download archive ${GREEN}\"curl-impersonate-vX.Y.Z.x86_64-linux-gnu.tar.gz\"${YELLOW}."
./mad.sh:3698: echo -e " 5. Extract files ${GREEN}\"curl-impersonate-ff\"${NC} and ${GREEN}\"curl_ff109\"${NC} next to this script."
./mad.sh:3701: echo -e "run $0 install_curl_impersonate\\n"
./mad.sh:3703: yes_or_no "Do you wish to download and extract latest curl_impersonate (using tor+curl)?" && {
./mad.sh:3704: UseTorCurlImpersonate=false
./mad.sh:3705: install_curl_impersonate
./mad.sh:3789: echo -e "[${YELLOW}Install curl_impersonate${NC}]: Downloads the latest binary for curl_impersonate from github repo (3 choices)"
./mad.sh:3790: printf " %s install_curl_impersonate\\n" "$0"
./mad.sh:3868:elif [[ "$arg1" == "install_curl_impersonate" ]]; then
./mad.sh:3869: install_curl_impersonate
./mad.sh:3900:if [ "${UseTorCurlImpersonate}" == "true" ]; then
./mad.sh:3901: printf "client: ${GREEN}Tor${NC} + ${BLUE}curl_impersonate${NC}\\n"
./mad.sh:3903: printf "client: ${GREEN}Tor${NC} + ${GREEN}curl${NC}\\n"
./plugins/pjscloud.sh:44: if [ "${UseTorCurlImpersonate}" == "true" ]; then
./plugins/pjscloud.sh:45: response=$("${curl_impersonate[@]}" --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" \
./plugins/pjscloud.sh:53: response=$(curl --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" \

View file

@ -1,4 +1,4 @@
DateTime: 25.01.04
DateTime: 25.01.15
Files:
./hosts/1fichier.sh
@ -19,12 +19,14 @@ Files:
./hosts/dataupload.sh
./hosts/dbree.sh
./hosts/depotkaz.sh
./hosts/desiupload.sh
./hosts/dictvm.sh
./hosts/discreetshare.sh
./hosts/dosya.sh
./hosts/downloadgg.sh
./hosts/eddowding.sh
./hosts/eternalhosting.sh
./hosts/euromussels.sh
./hosts/examples/ExampleNewHost.sh
./hosts/examples/up_example.sh
./hosts/familleflender.sh
@ -37,6 +39,7 @@ Files:
./hosts/firestorage.sh
./hosts/free4e.sh
./hosts/freesocial.sh
./hosts/gagneux.sh
./hosts/gofile.sh
./hosts/harrault.sh
./hosts/herbolistique.sh
@ -57,6 +60,7 @@ Files:
./hosts/oshi.sh
./hosts/pixeldrain.sh
./hosts/quax.sh
./hosts/ramsgaard.sh
./hosts/ranoz.sh
./hosts/shareonline.sh
./hosts/skrepr.sh
@ -90,14 +94,18 @@ Files:
./hosts/up_dictvm.sh
./hosts/up_dosya.sh
./hosts/up_eddowding.sh
./hosts/up_euromussels.sh
./hosts/up_familleflender.sh
./hosts/up_fileblade.sh
./hosts/up_fileditch.sh
./hosts/up_filehaus.sh
./hosts/up_fileland.sh
./hosts/up_filesquid.sh
./hosts/up_fireget.sh
./hosts/up_firestorage.sh
./hosts/up_free4e.sh
./hosts/up_freesocial.sh
./hosts/up_gagneux.sh
./hosts/up_gofile.sh
./hosts/up_harrault.sh
./hosts/up_herbolistique.sh
@ -116,6 +124,7 @@ Files:
./hosts/up_oshi.sh
./hosts/up_pixeldrain.sh
./hosts/up_quax.sh
./hosts/up_ramsgaard.sh
./hosts/up_ranoz.sh
./hosts/up_shareonline.sh
./hosts/up_skrepr.sh
@ -132,6 +141,7 @@ Files:
./hosts/up_uploadhive.sh
./hosts/up_uploadraja.sh
./hosts/up_yolobit.sh
./hosts/uwabaki.sh
./hosts/yolobit.sh
./hosts/youdbox.sh
./mad.sh
@ -181,9 +191,9 @@ _________________________________________________________________________
./hosts/downloadgg.sh:297: -H "Origin: https://download.gg" \
./hosts/eternalhosting.sh:36: if grep -Eqi '\.onion' <<< "$pUrlMod" && grep -Eqi 'https://' <<< "$pUrlMod" ; then
./hosts/examples/up_example.sh:105: local ar_HUP[0]='https://oshi.at'
./hosts/fileblade.sh:310: if ! grep -Eqi '<a href="https://de6.fileblade.com/files/' <<< "$response"; then
./hosts/fileblade.sh:322: download_url=$(grep -oP -m 1 '(?<=a href="https://de6.fileblade.com/files/).*?(?=" class=.*$)' <<< "$response")
./hosts/fileblade.sh:324: download_url='https://de6.fileblade.com/files/'$(urlencode_literal_grouped_case_urlendingonly "$download_url")
./hosts/fileblade.sh:310: if ! grep -oPi '(?=href="https://.*?\.fileblade.com/files/.*?" class=.*$)' <<< "$response"; then
./hosts/fileblade.sh:322: download_url=$(grep -oP -m 1 '(?<=a href="https://).*?(?=\.fileblade.com/files/).*?(?=" class=.*$)' <<< "$response")
./hosts/fileblade.sh:324: download_url='https://'$(urlencode_literal_grouped_case_urlendingonly "$download_url")
./hosts/filedot.sh:119: "https://filedot.to/login.html")
./hosts/filedot.sh:160: -H "Origin: https://filedot.to" \
./hosts/filedot.sh:162: -H "Referer: https://filedot.to/login.html" \
@ -206,9 +216,9 @@ _________________________________________________________________________
./hosts/hexload.sh:122: response=$(tor_curl_request --insecure -s --data "$form_data" "https://hexload.com/download")
./hosts/innocent.sh:48: download_url="${download_url/https:/http:}"
./hosts/isupload.sh:133: post_action="${remote_url//https:/http:}"
./hosts/isupload.sh:208: if ! grep -Eqi '<a href="http://isupload.com/cgi-bin/dl.cgi/' <<< "$response"; then
./hosts/isupload.sh:220: download_url=$(grep -oP -m 1 '(?<=a href="http://isupload.com/cgi-bin/dl.cgi/).*?(?=">.*$)' <<< "$response")
./hosts/isupload.sh:222: download_url='http://isupload.com/cgi-bin/dl.cgi/'$(urlencode_literal_grouped_case_urlendingonly "$download_url")
./hosts/isupload.sh:211: if ! grep -Eqi '<a href="http://isupload.com/cgi-bin/dl.cgi/' <<< "$response"; then
./hosts/isupload.sh:223: download_url=$(grep -oP -m 1 '(?<=a href="http://isupload.com/cgi-bin/dl.cgi/).*?(?=">.*$)' <<< "$response")
./hosts/isupload.sh:225: download_url='http://isupload.com/cgi-bin/dl.cgi/'$(urlencode_literal_grouped_case_urlendingonly "$download_url")
./hosts/kraken.sh:155: kraken_action="https://krakenfiles.com/download/${kraken_action##*/}"
./hosts/nippy.sh:160: download_url="https:"$(grep -oP '(?<=<h2><a href='\'').*(?='\'' class=)' <<< "$response")
./hosts/nippy.sh:229: cdn_url="https:"$(grep -oP '(?<=location: ).*$' <<< "$file_header")
@ -236,7 +246,7 @@ _________________________________________________________________________
./hosts/up_acid.sh:40: jira_downloadLinkPrefix='https://dl.acid.fr/f.php?h='
./hosts/up_anarchaserver.sh:37: jira_PostUrlHost='https://transitional.anarchaserver.org/jirafeau/script.php'
./hosts/up_anarchaserver.sh:40: jira_downloadLinkPrefix='https://transitional.anarchaserver.org/jirafeau/f.php?h='
./hosts/up_anonfile.sh:99: PostUrlHost='https://anonfile.de/cgi-bin/upload.cgi?upload_type=file&amp;utype=anon'
./hosts/up_anonfile.sh:99: PostUrlHost='https://file-01.anonfile.de/cgi-bin/upload.cgi?upload_type=file&amp;utype=anon'
./hosts/up_anonfile.sh:121: downloadLink="https://anonfile.de/$hash"
./hosts/up_anonsharing.sh:99: PostUrlHost='https://anonsharing.com/ajax/file_upload_handler?r=anonsharing.com'
./hosts/up_anonsharing.sh:109: if grep -Eqi '"error":null,"url":"https:\\/\\/anonsharing.com\\/' <<< "${response}" ; then
@ -282,6 +292,8 @@ _________________________________________________________________________
./hosts/up_dosya.sh:100: local ar_HUP[1]='https://dl3.lim1.dosyaupload.com/core/page/ajax/file_upload_handler.ajax.php?r=www.dosyaupload.com'
./hosts/up_eddowding.sh:37: jira_PostUrlHost='https://files.eddowding.com/script.php'
./hosts/up_eddowding.sh:40: jira_downloadLinkPrefix='https://files.eddowding.com/f.php?h='
./hosts/up_euromussels.sh:37: jira_PostUrlHost='https://uploads.euromussels.eu/script.php'
./hosts/up_euromussels.sh:40: jira_downloadLinkPrefix='https://uploads.euromussels.eu/f.php?h='
./hosts/up_familleflender.sh:37: jira_PostUrlHost='https://famille-flender.fr/jirafeau/script.php'
./hosts/up_familleflender.sh:40: jira_downloadLinkPrefix='https://famille-flender.fr/f.php?h='
./hosts/up_fileblade.sh:97: local ar_HUP[0]="https://de6.fileblade.com/cgi-bin/upload.cgi?upload_type=file&amp;utype=anon"
@ -291,8 +303,14 @@ _________________________________________________________________________
./hosts/up_filehaus.sh:114: if grep -Eqi 'HTTP/.* 200|https://cdn' <<< "${response}" ; then
./hosts/up_filehaus.sh:115: url=$(grep -oPi '(?<=https://).*(?=\.filehaus\.su).*?(?=$)' <<< "$response")
./hosts/up_filehaus.sh:117: downloadLink="https://${url}"
./hosts/up_fileland.sh:99: PostUrlHost='https://fs300.fileland.io/cgi-bin/upload.cgi?upload_type=file&amp;utype=anon'
./hosts/up_fileland.sh:121: downloadLink="https://fileland.io/$hash"
./hosts/up_filesquid.sh:37: jira_PostUrlHost='https://filesquid.net/script.php'
./hosts/up_filesquid.sh:40: jira_downloadLinkPrefix='https://filesquid.net/f.php?h='
./hosts/up_fireget.sh:99: PostUrlHost='https://s22.fireget.com/cgi-bin/upload.cgi'
./hosts/up_fireget.sh:118: if grep -Eqi 'Location: https://fireget.com/\?&fn=' <<< "${response}" ; then
./hosts/up_fireget.sh:119: hash=$(grep -oPi -m 1 '(?<=Location: https://fireget.com/\?&fn=).*?(?=&st=OK.*$)' <<< "$response")
./hosts/up_fireget.sh:121: downloadLink="https://fireget.com/$hash"
./hosts/up_firestorage.sh:99: local ar_HUP[0]='https://server65.firestorage.jp/upload.cgi'
./hosts/up_firestorage.sh:100: local ar_HUP[1]='https://server62.firestorage.jp/upload.cgi'
./hosts/up_firestorage.sh:101: local ar_HUP[2]='https://server39.firestorage.jp/upload.cgi'
@ -306,6 +324,8 @@ _________________________________________________________________________
./hosts/up_free4e.sh:40: jira_downloadLinkPrefix='https://send.free4e.com/f.php?h='
./hosts/up_freesocial.sh:37: jira_PostUrlHost='https://files.freesocial.co/script.php'
./hosts/up_freesocial.sh:40: jira_downloadLinkPrefix='https://files.freesocial.co/f.php?h='
./hosts/up_gagneux.sh:37: jira_PostUrlHost='https://fichier.gagneux.info/script.php'
./hosts/up_gagneux.sh:40: jira_downloadLinkPrefix='https://fichier.gagneux.info/f.php?h='
./hosts/up_gofile.sh:102: response=$(tor_curl_request --insecure -L -s "https://api.gofile.io/servers")
./hosts/up_gofile.sh:114: local ar_HUP[0]="https://$gofileStoreServer.gofile.io/contents/uploadFile"
./hosts/up_gofile.sh:138: hash=$(grep -oPi '(?<=https://gofile.io/d/).*?(?=")' <<< "$response")
@ -367,6 +387,8 @@ _________________________________________________________________________
./hosts/up_pixeldrain.sh:107: PostUrlHost='https://pixeldrain.com/api/file/'
./hosts/up_pixeldrain.sh:136: downloadLink="https://pixeldrain.com/u/${hash}"
./hosts/up_quax.sh:99: PostUrlHost='https://qu.ax/upload.php'
./hosts/up_ramsgaard.sh:37: jira_PostUrlHost='https://data.ramsgaard.me/script.php'
./hosts/up_ramsgaard.sh:40: jira_downloadLinkPrefix='https://data.ramsgaard.me/f.php?h='
./hosts/up_ranoz.sh:99: PostUrlHost='https://ranoz.gg/api/v1/files/upload_url'
./hosts/up_ranoz.sh:111: if grep -Eqi '"upload_url":"https://' <<< "$response" ; then
./hosts/up_shareonline.sh:99: PostUrlHost='https://ns07.zipcluster.com/upload.php'
@ -403,75 +425,75 @@ _________________________________________________________________________
./hosts/up_uploadraja.sh:99: PostUrlHost='https://awsaisiaposisition69.kalpstudio.xyz/cgi-bin/upload.cgi?upload_type=file&utype=anon'
./hosts/up_uploadraja.sh:119: downloadLink="https://uploadraja.com/$hash"
./hosts/up_yolobit.sh:99: PostUrlHost='https://ns08.zipcluster.com/upload.php'
./mad.sh:672: sed -i -e "s>^${url/https:/http:}.*>#& #OK# ${filename}>g" "${InputFile}" #http (if changed)
./mad.sh:674: sed -i -e "s>^direct=${url/https:/http:}.*>#& #OK# ${filename}>g" "${InputFile}" #direct url https
./mad.sh:677: sed -i -e "s>^${url/https:/http:}.*>#&>g" "${InputFile}" #http (if changed)
./mad.sh:679: sed -i -e "s>^direct=${url/https:/http:}.*>#&>g" "${InputFile}" #direct url https
./mad.sh:700: sed -i -e "s>^${url/https:/http:}.*>#& #OK# ${filename} (File exists)>g" "${InputFile}" #http (if changed)
./mad.sh:702: sed -i -e "s>^direct=${url/https:/http:}.*>#& #OK# ${filename} (File exists)>g" "${InputFile}" #direct url https
./mad.sh:705: sed -i -e "s>^${url/https:/http:}.*>#&>g" "${InputFile}" #http (if changed)
./mad.sh:707: sed -i -e "s>^direct=${url/https:/http:}.*>#&>g" "${InputFile}" #direct url https
./mad.sh:728: sed -i -e "s>^${url/https:/http:}.*>#& #RETRY# ${message}>g" "${InputFile}" #http (if changed)
./mad.sh:730: sed -i -e "s>^direct=${url/https:/http:}.*>#& #RETRY# ${message}>g" "${InputFile}" #direct url https
./mad.sh:733: sed -i -e "s>^${url/https:/http:}.*>#&>g" "${InputFile}" #http (if changed)
./mad.sh:735: sed -i -e "s>^direct=${url/https:/http:}.*>#&>g" "${InputFile}" #direct url https
./mad.sh:757: sed -i -e "s>^${url/https:/http:}.*>#& #FAIL# $message>g" "${InputFile}" #http (if changed)
./mad.sh:759: sed -i -e "s>^direct=${url/https:/http:}.*>#& #FAIL# $message>g" "${InputFile}" #direct url https
./mad.sh:762: sed -i -e "s>^${url/https:/http:}.*>#&>g" "${InputFile}" #http (if changed)
./mad.sh:764: sed -i -e "s>^direct=${url/https:/http:}.*>#&>g" "${InputFile}" #direct url https
./mad.sh:785: sed -i -e "s>^${url/https:/http:}.*>#& #REMOVED#>g" "${InputFile}" #http (if changed)
./mad.sh:787: sed -i -e "s>^direct=${url/https:/http:}.*>#& #REMOVED#>g" "${InputFile}" #direct url https
./mad.sh:790: sed -i -e "s>^${url/https:/http:}.*>#&>g" "${InputFile}" #http (if changed)
./mad.sh:792: sed -i -e "s>^direct=${url/https:/http:}.*>#&>g" "${InputFile}" #direct url https
./mad.sh:818: sed -i -e "s>^${url/https:/http:}.*>${url}|${newfilename}>g" "${InputFile}" #http (if changed)
./mad.sh:820: sed -i -e "s>^direct=${url/https:/http:}.*>direct=${url}|${newfilename}>g" "${InputFile}" #direct url https
./mad.sh:840: sed -i -e "s%^${url/https:/http:}.*%${newurl//[[:space:]]/$'\\\n'}%g" "${InputFile}" #http (if changed)
./mad.sh:861: sed -i -e "s>^${url/https:/http:}.*>#& #RETRY# ${filename} (dropped or bad size)>g" "${InputFile}" #http (if changed)
./mad.sh:863: sed -i -e "s>^direct=${url/https:/http:}.*>#& #RETRY# ${filename} (dropped or bad size)>g" "${InputFile}" #direct url https
./mad.sh:866: sed -i -e "s>^${url/https:/http:}.*>#&>g" "${InputFile}" #http (if changed)
./mad.sh:868: sed -i -e "s>^direct=${url/https:/http:}.*>#&>g" "${InputFile}" #direct url https
./mad.sh:884: sed -i -e "s>^${url/https:/http:}.*>#& #RETRY# (No CDN found)>g" "${InputFile}" #http (if changed)
./mad.sh:886: sed -i -e "s>^direct=${url/https:/http:}.*>#& #RETRY# (No CDN found)>g" "${InputFile}" #direct url https
./mad.sh:889: sed -i -e "s>^${url/https:/http:}.*>#&>g" "${InputFile}" #http (if changed)
./mad.sh:891: sed -i -e "s>^direct=${url/https:/http:}.*>#&>g" "${InputFile}" #direct url https
./mad.sh:910: sed -i -e "s>^${url/https:/http:}.*>#& #PASSWORD#>g" "${InputFile}" #http (if changed)
./mad.sh:912: sed -i -e "s>^direct=${url/https:/http:}.*>#& #PASSWORD#>g" "${InputFile}" #direct url https
./mad.sh:915: sed -i -e "s>^${url/https:/http:}.*>#&>g" "${InputFile}" #http (if changed)
./mad.sh:917: sed -i -e "s>^direct=${url/https:/http:}.*>#&>g" "${InputFile}" #direct url https
./mad.sh:937: sed -i -e "s>^${url/https:/http:}.*>#& #BAD-URL#${message}>g" "${InputFile}" #http (if changed)
./mad.sh:939: sed -i -e "s>^direct=${url/https:/http:}.*>#& #BAD-URL#${message}>g" "${InputFile}" #direct url https
./mad.sh:942: sed -i -e "s>^${url/https:/http:}.*>#&>g" "${InputFile}" #http (if changed)
./mad.sh:944: sed -i -e "s>^direct=${url/https:/http:}.*>#&>g" "${InputFile}" #direct url https
./mad.sh:962: sed -i -e "s>^${url/https:/http:}.*>#& #RETRY# ${filename} (No filename or size. Try again later)>g" "${InputFile}" #http (if changed)
./mad.sh:964: sed -i -e "s>^direct=${url/https:/http:}.*>#& #RETRY# ${filename} (No filename or size. Try again later)>g" "${InputFile}" #direct url https
./mad.sh:967: sed -i -e "s>^${url/https:/http:}.*>#&>g" "${InputFile}" #http (if changed)
./mad.sh:969: sed -i -e "s>^direct=${url/https:/http:}.*>#&>g" "${InputFile}" #direct url https
./mad.sh:988: sed -i -e "s>^${url/https:/http:}.*>#& #RETRY# ${filename} (File exists and is too large--retry or delete from downloads)>g" "${InputFile}" #http (if changed)
./mad.sh:990: sed -i -e "s>^direct=${url/https:/http:}.*>#& #RETRY# ${filename} (File exists and is too large--retry or delete from downloads)>g" "${InputFile}" #direct url https
./mad.sh:993: sed -i -e "s>^${url/https:/http:}.*>#&>g" "${InputFile}" #http (if changed)
./mad.sh:995: sed -i -e "s>^direct=${url/https:/http:}.*>#&>g" "${InputFile}" #direct url https
./mad.sh:1413: response=$(tor_curl_request --insecure -L -s https://github.com/lwthiker/curl-impersonate/releases/latest)
./mad.sh:1430: download_url='https://github.com/lwthiker/curl-impersonate/releases/download/'"$latestTag"'/curl-impersonate-'"$latestTag"'.x86_64-linux-gnu.tar.gz'
./mad.sh:1536: response=$(tor_curl_request --insecure -L -s https://github.com/lexiforest/curl-impersonate/releases/latest)
./mad.sh:1553: download_url='https://github.com/lexiforest/curl-impersonate/releases/download/'"$latestTag"'/curl-impersonate-'"$latestTag"'.x86_64-linux-gnu.tar.gz'
./mad.sh:1816: echo -e "${PINK}MAD Audit of http lines:${NC} (${GREEN}grep \"http:\" or \"https:\"${NC})"
./mad.sh:1844: echo -e "${PINK}MAD Audit of http lines:${NC} (${GREEN}grep \"http:\" or \"https:\"${NC})"
./mad.sh:1866: echo -e "${PINK}MAD Audit of http lines:${NC} (${GREEN}grep \"http:\" or \"https:\"${NC})"
./mad.sh:3182: if grep -Eqi '.onion' <<< "$download_url" && grep -Eqi 'https://' <<< "$download_url" ; then
./mad.sh:3695:arg2="$2" # auto, filelist, <https://url>
./mad.sh:3792: echo -e " - http://oshi.at/abcd/origAABB.rar|My specified file.part1.rar"
./mad.sh:3794: echo -e " - direct=http://pomf2.lain.la/f/abcd00zz.7z"
./mad.sh:3796: echo -e ' - ie. direct=http://somehost.onion/abcD|filename.part1.rar'
./mad.sh:4015: if [[ ${remote_url} =~ ^http: ]] ; then
./mad.sh:4016: remote_url=${remote_url/http:/https:}
./mad.sh:4037: if [[ ${remote_url} =~ ^http: ]] ; then
./mad.sh:4038: remote_url=${remote_url/http:/https:}
./mad.sh:4404: if [[ ${remote_url} =~ ^http: ]] ; then
./mad.sh:4405: remote_url=${remote_url/http:/https:}
./mad.sh:4463: if [[ ${remote_url} =~ ^http: ]] ; then
./mad.sh:4464: remote_url=${remote_url/http:/https:}
./mad.sh:4489: if [[ ${remote_url} =~ ^http: ]] ; then
./mad.sh:4490: remote_url=${remote_url/http:/https:}
./mad.sh:683: sed -i -e "s>^${url/https:/http:}.*>#& #OK# ${filename}>g" "${InputFile}" #http (if changed)
./mad.sh:685: sed -i -e "s>^direct=${url/https:/http:}.*>#& #OK# ${filename}>g" "${InputFile}" #direct url https
./mad.sh:688: sed -i -e "s>^${url/https:/http:}.*>#&>g" "${InputFile}" #http (if changed)
./mad.sh:690: sed -i -e "s>^direct=${url/https:/http:}.*>#&>g" "${InputFile}" #direct url https
./mad.sh:711: sed -i -e "s>^${url/https:/http:}.*>#& #OK# ${filename} (File exists)>g" "${InputFile}" #http (if changed)
./mad.sh:713: sed -i -e "s>^direct=${url/https:/http:}.*>#& #OK# ${filename} (File exists)>g" "${InputFile}" #direct url https
./mad.sh:716: sed -i -e "s>^${url/https:/http:}.*>#&>g" "${InputFile}" #http (if changed)
./mad.sh:718: sed -i -e "s>^direct=${url/https:/http:}.*>#&>g" "${InputFile}" #direct url https
./mad.sh:739: sed -i -e "s>^${url/https:/http:}.*>#& #RETRY# ${message}>g" "${InputFile}" #http (if changed)
./mad.sh:741: sed -i -e "s>^direct=${url/https:/http:}.*>#& #RETRY# ${message}>g" "${InputFile}" #direct url https
./mad.sh:744: sed -i -e "s>^${url/https:/http:}.*>#&>g" "${InputFile}" #http (if changed)
./mad.sh:746: sed -i -e "s>^direct=${url/https:/http:}.*>#&>g" "${InputFile}" #direct url https
./mad.sh:768: sed -i -e "s>^${url/https:/http:}.*>#& #FAIL# $message>g" "${InputFile}" #http (if changed)
./mad.sh:770: sed -i -e "s>^direct=${url/https:/http:}.*>#& #FAIL# $message>g" "${InputFile}" #direct url https
./mad.sh:773: sed -i -e "s>^${url/https:/http:}.*>#&>g" "${InputFile}" #http (if changed)
./mad.sh:775: sed -i -e "s>^direct=${url/https:/http:}.*>#&>g" "${InputFile}" #direct url https
./mad.sh:799: sed -i -e "s>^${url/https:/http:}.*>#& #REMOVED#${message}>g" "${InputFile}" #http (if changed)
./mad.sh:801: sed -i -e "s>^direct=${url/https:/http:}.*>#& #REMOVED#${message}>g" "${InputFile}" #direct url https
./mad.sh:804: sed -i -e "s>^${url/https:/http:}.*>#&>g" "${InputFile}" #http (if changed)
./mad.sh:806: sed -i -e "s>^direct=${url/https:/http:}.*>#&>g" "${InputFile}" #direct url https
./mad.sh:832: sed -i -e "s>^${url/https:/http:}.*>${url}|${newfilename}>g" "${InputFile}" #http (if changed)
./mad.sh:834: sed -i -e "s>^direct=${url/https:/http:}.*>direct=${url}|${newfilename}>g" "${InputFile}" #direct url https
./mad.sh:854: sed -i -e "s%^${url/https:/http:}.*%${newurl//[[:space:]]/$'\\\n'}%g" "${InputFile}" #http (if changed)
./mad.sh:875: sed -i -e "s>^${url/https:/http:}.*>#& #RETRY# ${filename} (dropped or bad size)>g" "${InputFile}" #http (if changed)
./mad.sh:877: sed -i -e "s>^direct=${url/https:/http:}.*>#& #RETRY# ${filename} (dropped or bad size)>g" "${InputFile}" #direct url https
./mad.sh:880: sed -i -e "s>^${url/https:/http:}.*>#&>g" "${InputFile}" #http (if changed)
./mad.sh:882: sed -i -e "s>^direct=${url/https:/http:}.*>#&>g" "${InputFile}" #direct url https
./mad.sh:898: sed -i -e "s>^${url/https:/http:}.*>#& #RETRY# (No CDN found)>g" "${InputFile}" #http (if changed)
./mad.sh:900: sed -i -e "s>^direct=${url/https:/http:}.*>#& #RETRY# (No CDN found)>g" "${InputFile}" #direct url https
./mad.sh:903: sed -i -e "s>^${url/https:/http:}.*>#&>g" "${InputFile}" #http (if changed)
./mad.sh:905: sed -i -e "s>^direct=${url/https:/http:}.*>#&>g" "${InputFile}" #direct url https
./mad.sh:924: sed -i -e "s>^${url/https:/http:}.*>#& #PASSWORD#>g" "${InputFile}" #http (if changed)
./mad.sh:926: sed -i -e "s>^direct=${url/https:/http:}.*>#& #PASSWORD#>g" "${InputFile}" #direct url https
./mad.sh:929: sed -i -e "s>^${url/https:/http:}.*>#&>g" "${InputFile}" #http (if changed)
./mad.sh:931: sed -i -e "s>^direct=${url/https:/http:}.*>#&>g" "${InputFile}" #direct url https
./mad.sh:951: sed -i -e "s>^${url/https:/http:}.*>#& #BAD-URL#${message}>g" "${InputFile}" #http (if changed)
./mad.sh:953: sed -i -e "s>^direct=${url/https:/http:}.*>#& #BAD-URL#${message}>g" "${InputFile}" #direct url https
./mad.sh:956: sed -i -e "s>^${url/https:/http:}.*>#&>g" "${InputFile}" #http (if changed)
./mad.sh:958: sed -i -e "s>^direct=${url/https:/http:}.*>#&>g" "${InputFile}" #direct url https
./mad.sh:976: sed -i -e "s>^${url/https:/http:}.*>#& #RETRY# ${filename} (No filename or size. Try again later)>g" "${InputFile}" #http (if changed)
./mad.sh:978: sed -i -e "s>^direct=${url/https:/http:}.*>#& #RETRY# ${filename} (No filename or size. Try again later)>g" "${InputFile}" #direct url https
./mad.sh:981: sed -i -e "s>^${url/https:/http:}.*>#&>g" "${InputFile}" #http (if changed)
./mad.sh:983: sed -i -e "s>^direct=${url/https:/http:}.*>#&>g" "${InputFile}" #direct url https
./mad.sh:1002: sed -i -e "s>^${url/https:/http:}.*>#& #RETRY# ${filename} (File exists and is too large--retry or delete from downloads)>g" "${InputFile}" #http (if changed)
./mad.sh:1004: sed -i -e "s>^direct=${url/https:/http:}.*>#& #RETRY# ${filename} (File exists and is too large--retry or delete from downloads)>g" "${InputFile}" #direct url https
./mad.sh:1007: sed -i -e "s>^${url/https:/http:}.*>#&>g" "${InputFile}" #http (if changed)
./mad.sh:1009: sed -i -e "s>^direct=${url/https:/http:}.*>#&>g" "${InputFile}" #direct url https
./mad.sh:1427: response=$(tor_curl_request --insecure -L -s https://github.com/lwthiker/curl-impersonate/releases/latest)
./mad.sh:1444: download_url='https://github.com/lwthiker/curl-impersonate/releases/download/'"$latestTag"'/curl-impersonate-'"$latestTag"'.x86_64-linux-gnu.tar.gz'
./mad.sh:1550: response=$(tor_curl_request --insecure -L -s https://github.com/lexiforest/curl-impersonate/releases/latest)
./mad.sh:1567: download_url='https://github.com/lexiforest/curl-impersonate/releases/download/'"$latestTag"'/curl-impersonate-'"$latestTag"'.x86_64-linux-gnu.tar.gz'
./mad.sh:1830: echo -e "${PINK}MAD Audit of http lines:${NC} (${GREEN}grep \"http:\" or \"https:\"${NC})"
./mad.sh:1858: echo -e "${PINK}MAD Audit of http lines:${NC} (${GREEN}grep \"http:\" or \"https:\"${NC})"
./mad.sh:1880: echo -e "${PINK}MAD Audit of http lines:${NC} (${GREEN}grep \"http:\" or \"https:\"${NC})"
./mad.sh:3196: if grep -Eqi '.onion' <<< "$download_url" && grep -Eqi 'https://' <<< "$download_url" ; then
./mad.sh:3711:arg2="$2" # auto, filelist, <https://url>
./mad.sh:3808: echo -e " - http://oshi.at/abcd/origAABB.rar|My specified file.part1.rar"
./mad.sh:3810: echo -e " - direct=http://pomf2.lain.la/f/abcd00zz.7z"
./mad.sh:3812: echo -e ' - ie. direct=http://somehost.onion/abcD|filename.part1.rar'
./mad.sh:4031: if [[ ${remote_url} =~ ^http: ]] ; then
./mad.sh:4032: remote_url=${remote_url/http:/https:}
./mad.sh:4053: if [[ ${remote_url} =~ ^http: ]] ; then
./mad.sh:4054: remote_url=${remote_url/http:/https:}
./mad.sh:4420: if [[ ${remote_url} =~ ^http: ]] ; then
./mad.sh:4421: remote_url=${remote_url/http:/https:}
./mad.sh:4479: if [[ ${remote_url} =~ ^http: ]] ; then
./mad.sh:4480: remote_url=${remote_url/http:/https:}
./mad.sh:4505: if [[ ${remote_url} =~ ^http: ]] ; then
./mad.sh:4506: remote_url=${remote_url/http:/https:}
./plugins/pjscloud.sh:51: "https://PhantomJScloud.com/api/browser/v2/$RandomPjsKey/" & sleep 8s; kill -HUP $! 2>/dev/null)
./plugins/pjscloud.sh:59: "https://PhantomJScloud.com/api/browser/v2/$RandomPjsKey/" & sleep 8s; kill -HUP $! 2>/dev/null)

View file

@ -1,4 +1,4 @@
DateTime: 25.01.04
DateTime: 25.01.15
Files:
./hosts/1fichier.sh
@ -19,12 +19,14 @@ Files:
./hosts/dataupload.sh
./hosts/dbree.sh
./hosts/depotkaz.sh
./hosts/desiupload.sh
./hosts/dictvm.sh
./hosts/discreetshare.sh
./hosts/dosya.sh
./hosts/downloadgg.sh
./hosts/eddowding.sh
./hosts/eternalhosting.sh
./hosts/euromussels.sh
./hosts/examples/ExampleNewHost.sh
./hosts/examples/up_example.sh
./hosts/familleflender.sh
@ -37,6 +39,7 @@ Files:
./hosts/firestorage.sh
./hosts/free4e.sh
./hosts/freesocial.sh
./hosts/gagneux.sh
./hosts/gofile.sh
./hosts/harrault.sh
./hosts/herbolistique.sh
@ -57,6 +60,7 @@ Files:
./hosts/oshi.sh
./hosts/pixeldrain.sh
./hosts/quax.sh
./hosts/ramsgaard.sh
./hosts/ranoz.sh
./hosts/shareonline.sh
./hosts/skrepr.sh
@ -90,14 +94,18 @@ Files:
./hosts/up_dictvm.sh
./hosts/up_dosya.sh
./hosts/up_eddowding.sh
./hosts/up_euromussels.sh
./hosts/up_familleflender.sh
./hosts/up_fileblade.sh
./hosts/up_fileditch.sh
./hosts/up_filehaus.sh
./hosts/up_fileland.sh
./hosts/up_filesquid.sh
./hosts/up_fireget.sh
./hosts/up_firestorage.sh
./hosts/up_free4e.sh
./hosts/up_freesocial.sh
./hosts/up_gagneux.sh
./hosts/up_gofile.sh
./hosts/up_harrault.sh
./hosts/up_herbolistique.sh
@ -116,6 +124,7 @@ Files:
./hosts/up_oshi.sh
./hosts/up_pixeldrain.sh
./hosts/up_quax.sh
./hosts/up_ramsgaard.sh
./hosts/up_ranoz.sh
./hosts/up_shareonline.sh
./hosts/up_skrepr.sh
@ -132,6 +141,7 @@ Files:
./hosts/up_uploadhive.sh
./hosts/up_uploadraja.sh
./hosts/up_yolobit.sh
./hosts/uwabaki.sh
./hosts/yolobit.sh
./hosts/youdbox.sh
./mad.sh
@ -303,7 +313,7 @@ _________________________________________________________________________
./hosts/anonfile.sh:196: printf "\\n"
--
./hosts/anonfile.sh:240: tor_curl_request --insecure -s "$captcha_img_url" --output "$tmp_captcha_img"
./hosts/anonfile.sh:241: captcha_ocr_output=$(CaptchaOcrImage "$tmp_captcha_img" "NUMBERONLY" "ContrastStretch_5x90,Brightness_130")
./hosts/anonfile.sh:241: captcha_ocr_output=$(CaptchaOcrImageTesseract "$tmp_captcha_img" "NUMBERONLY" "ContrastStretch_5x90,Brightness_130")
./hosts/anonfile.sh:242: if [ "${DebugPluginsEnabled}" == "true" ]; then
./hosts/anonfile.sh:243: printf "\\n"
./hosts/anonfile.sh:244: echo -e "$captcha_ocr_output"
@ -745,7 +755,7 @@ _________________________________________________________________________
./hosts/dailyuploads.sh:107: echo -e "${RED}| Failed to extract download link [1].${NC}"
--
./hosts/dailyuploads.sh:139: tor_curl_request --insecure -s "$captcha_img_url" --output "$tmp_captcha_img"
./hosts/dailyuploads.sh:140: captcha_ocr_output=$(CaptchaOcrImage "$tmp_captcha_img" "NUMBERONLY" "ContrastStretch_5x90,Brightness_130")
./hosts/dailyuploads.sh:140: captcha_ocr_output=$(CaptchaOcrImageTesseract "$tmp_captcha_img" "NUMBERONLY" "ContrastStretch_5x90,Brightness_130")
./hosts/dailyuploads.sh:141: if [ "${DebugPluginsEnabled}" == "true" ]; then
./hosts/dailyuploads.sh:142: printf "\\n"
./hosts/dailyuploads.sh:143: echo -e "$captcha_ocr_output"
@ -973,6 +983,77 @@ _________________________________________________________________________
./hosts/dataupload.sh:390: -H "Upgrade-Insecure-Requests: 1" \
./hosts/dataupload.sh:391: -H "Sec-Fetch-Dest: document" \
--
./hosts/desiupload.sh:90: response=$(tor_curl_request --insecure -L -s -b "${desi_cookie_jar}" -c "${desi_cookie_jar}" "$remote_url")
./hosts/desiupload.sh:91: if [ "${DebugAllEnabled}" == "true" ] ; then
./hosts/desiupload.sh:92: debugHtml "${remote_url##*/}" "desi_fetch$i" "${response}"
./hosts/desiupload.sh:93: fi
./hosts/desiupload.sh:94: if [[ -z $response ]] ; then
./hosts/desiupload.sh:95: rm -f "${desi_cookie_jar}";
./hosts/desiupload.sh:96: if [ $i == $maxfetchretries ] ; then
./hosts/desiupload.sh:97: printf "\\n"
./hosts/desiupload.sh:98: echo -e "${RED}| Failed to extract download link [1]${NC}"
./hosts/desiupload.sh:99: warnAndRetryUnknownError=true
./hosts/desiupload.sh:100: if [ "${finalAttempt}" == "true" ] ; then
--
./hosts/desiupload.sh:202: response=$(tor_curl_request --insecure -L -s -X POST \
./hosts/desiupload.sh:203: -b "${desi_cookie_jar}" -c "${desi_cookie_jar}" \
./hosts/desiupload.sh:204: --data "$form_data" "$remote_url")
./hosts/desiupload.sh:205: if [ "${DebugAllEnabled}" == "true" ] ; then
./hosts/desiupload.sh:206: debugHtml "${remote_url##*/}" "desi_post_$i" "url: ${remote_url}"$'\n'"form_data: ${form_data}"$'\n'"${response}"
./hosts/desiupload.sh:207: fi
./hosts/desiupload.sh:208: if [[ -z $response ]] ; then
./hosts/desiupload.sh:209: if [ $i == $maxfetchretries ] ; then
./hosts/desiupload.sh:210: rm -f "${desi_cookie_jar}";
./hosts/desiupload.sh:211: printf "\\n"
./hosts/desiupload.sh:212: echo -e "${RED}| Failed to extract download link [7]${NC}"
--
./hosts/desiupload.sh:306: file_header=$(tor_curl_request --insecure -L --head -s --referer "${fixed_url}" "$download_url")
./hosts/desiupload.sh:307: if [ "${DebugAllEnabled}" == "true" ] ; then
./hosts/desiupload.sh:308: debugHtml "${remote_url##*/}" "desi_head$j" "download_url: ${download_url}"$'\n'"${file_header}"
./hosts/desiupload.sh:309: fi
./hosts/desiupload.sh:310: if [[ -z $file_header ]] ; then
./hosts/desiupload.sh:311: if [ $j == $maxfetchretries ] ; then
./hosts/desiupload.sh:312: rm -f "${desi_cookie_jar}";
./hosts/desiupload.sh:313: printf "\\n"
./hosts/desiupload.sh:314: echo -e "${RED}| Failed to extract file info${NC}"
./hosts/desiupload.sh:315: warnAndRetryUnknownError=true
./hosts/desiupload.sh:316: if [ "${finalAttempt}" == "true" ] ; then
--
./hosts/desiupload.sh:406: tor_curl_request --insecure \
./hosts/desiupload.sh:407: --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval \
./hosts/desiupload.sh:408: -b "${desi_cookie_jar}" -c "${desi_cookie_jar}" \
./hosts/desiupload.sh:409: "$download_url" --continue-at - --output "$file_path"
./hosts/desiupload.sh:410: else
./hosts/desiupload.sh:411: tor_curl_request --insecure \
./hosts/desiupload.sh:412: -b "${desi_cookie_jar}" -c "${desi_cookie_jar}" \
./hosts/desiupload.sh:413: "$download_url" --continue-at - --output "$file_path"
./hosts/desiupload.sh:414: fi
./hosts/desiupload.sh:415: else
./hosts/desiupload.sh:416: if [ "${RateMonitorEnabled}" == "true" ]; then
./hosts/desiupload.sh:417: tor_curl_request --insecure \
./hosts/desiupload.sh:418: --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval \
./hosts/desiupload.sh:419: -b "${desi_cookie_jar}" -c "${desi_cookie_jar}" \
./hosts/desiupload.sh:420: -H "User-Agent: $RandomUA" \
./hosts/desiupload.sh:421: -H "Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8" \
./hosts/desiupload.sh:422: -H "Accept-Language: en-US,en;q=0.5" \
./hosts/desiupload.sh:423: -H "Accept-Encoding: gzip, deflate, br" \
./hosts/desiupload.sh:424: -H "Connection: keep-alive" \
./hosts/desiupload.sh:425: -H "Cookie: lng=eng" \
./hosts/desiupload.sh:426: -H "Upgrade-Insecure-Requests: 1" \
./hosts/desiupload.sh:427: -H "Sec-Fetch-Dest: document" \
--
./hosts/desiupload.sh:433: tor_curl_request --insecure \
./hosts/desiupload.sh:434: -b "${desi_cookie_jar}" -c "${desi_cookie_jar}" \
./hosts/desiupload.sh:435: -H "User-Agent: $RandomUA" \
./hosts/desiupload.sh:436: -H "Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8" \
./hosts/desiupload.sh:437: -H "Accept-Language: en-US,en;q=0.5" \
./hosts/desiupload.sh:438: -H "Accept-Encoding: gzip, deflate, br" \
./hosts/desiupload.sh:439: -H "Connection: keep-alive" \
./hosts/desiupload.sh:440: -H "Cookie: lng=eng" \
./hosts/desiupload.sh:441: -H "Upgrade-Insecure-Requests: 1" \
./hosts/desiupload.sh:442: -H "Sec-Fetch-Dest: document" \
./hosts/desiupload.sh:443: -H "Sec-Fetch-Mode: navigate" \
--
./hosts/dosya.sh:109: PAGE=$(tor_curl_request --insecure --max-time "$ConnectTimeout" -L -s \
./hosts/dosya.sh:110: -c "${dosya_cookie_jar}" \
./hosts/dosya.sh:111: "${remote_url}")
@ -1147,29 +1228,29 @@ _________________________________________________________________________
./hosts/fileblade.sh:99: warnAndRetryUnknownError=true
./hosts/fileblade.sh:100: if [ "${finalAttempt}" == "true" ] ; then
--
./hosts/fileblade.sh:164: response=$(tor_curl_request --insecure -L -s -X POST \
./hosts/fileblade.sh:165: -b "${fb_cookie_jar}" -c "${fb_cookie_jar}" \
./hosts/fileblade.sh:166: --data "$form_data" "$post_action")
./hosts/fileblade.sh:167: if [ "${DebugAllEnabled}" == "true" ] ; then
./hosts/fileblade.sh:168: debugHtml "${remote_url##*/}" "fb_post(1)" "post_action: ${post_action}"$'\n'"form_data: ${form_data}"$'\n'"${response}"
./hosts/fileblade.sh:169: fi
./hosts/fileblade.sh:170: if [[ -z $response ]] ; then
./hosts/fileblade.sh:171: if [ $i == $maxfetchretries ] ; then
./hosts/fileblade.sh:172: rm -f "${fb_cookie_jar}";
./hosts/fileblade.sh:173: printf "\\n"
./hosts/fileblade.sh:174: echo -e "${RED}| Failed to extract download link [3]${NC}"
./hosts/fileblade.sh:165: response=$(tor_curl_request --insecure -L -s -X POST \
./hosts/fileblade.sh:166: -b "${fb_cookie_jar}" -c "${fb_cookie_jar}" \
./hosts/fileblade.sh:167: --data "$form_data" "$post_action")
./hosts/fileblade.sh:168: if [ "${DebugAllEnabled}" == "true" ] ; then
./hosts/fileblade.sh:169: debugHtml "${remote_url##*/}" "fb_post(1)" "post_action: ${post_action}"$'\n'"form_data: ${form_data}"$'\n'"${response}"
./hosts/fileblade.sh:170: fi
./hosts/fileblade.sh:171: if [[ -z $response ]] ; then
./hosts/fileblade.sh:172: if [ $i == $maxfetchretries ] ; then
./hosts/fileblade.sh:173: rm -f "${fb_cookie_jar}";
./hosts/fileblade.sh:174: printf "\\n"
./hosts/fileblade.sh:175: echo -e "${RED}| Failed to extract download link [3]${NC}"
--
./hosts/fileblade.sh:266: response=$(tor_curl_request --insecure -L -s -X POST \
./hosts/fileblade.sh:267: -b "${fb_cookie_jar}" -c "${fb_cookie_jar}" \
./hosts/fileblade.sh:268: --data "$form_data" "$post_action")
./hosts/fileblade.sh:269: if [ "${DebugAllEnabled}" == "true" ] ; then
./hosts/fileblade.sh:270: debugHtml "${remote_url##*/}" "fb_post(2)" "post_action: ${post_action}"$'\n'"form_data: ${form_data}"$'\n'"${response}"
./hosts/fileblade.sh:271: fi
./hosts/fileblade.sh:272: if [[ -z $response ]] ; then
./hosts/fileblade.sh:273: if [ $i == $maxfetchretries ] ; then
./hosts/fileblade.sh:274: rm -f "${fb_cookie_jar}";
./hosts/fileblade.sh:275: printf "\\n"
./hosts/fileblade.sh:276: echo -e "${RED}| Failed to extract download link [4].${NC}"
./hosts/fileblade.sh:281: response=$(tor_curl_request --insecure -L -s -X POST \
./hosts/fileblade.sh:282: -b "${fb_cookie_jar}" -c "${fb_cookie_jar}" \
./hosts/fileblade.sh:283: --data "$form_data" "$post_action")
./hosts/fileblade.sh:284: if [ "${DebugAllEnabled}" == "true" ] ; then
./hosts/fileblade.sh:285: debugHtml "${remote_url##*/}" "fb_post(2)" "post_action: ${post_action}"$'\n'"form_data: ${form_data}"$'\n'"${response}"
./hosts/fileblade.sh:286: fi
./hosts/fileblade.sh:287: if [[ -z $response ]] ; then
./hosts/fileblade.sh:288: if [ $i == $maxfetchretries ] ; then
./hosts/fileblade.sh:289: rm -f "${fb_cookie_jar}";
./hosts/fileblade.sh:290: printf "\\n"
./hosts/fileblade.sh:291: echo -e "${RED}| Failed to extract download link [4].${NC}"
--
./hosts/fileblade.sh:335: file_header=$(tor_curl_request --insecure -L --head -s "$download_url")
./hosts/fileblade.sh:336: if [ "${DebugAllEnabled}" == "true" ] ; then
@ -1565,65 +1646,65 @@ _________________________________________________________________________
./hosts/isupload.sh:173: printf "\\n"
./hosts/isupload.sh:174: echo -e "${RED}| Failed to extract download link [3].${NC}"
--
./hosts/isupload.sh:238: file_header=$(tor_curl_request --insecure --head -L -s "$download_url")
./hosts/isupload.sh:239: elif ((j % 2 == 0)); then
./hosts/isupload.sh:240: printf "| Retrieving Head (Get): attempt #$j"
./hosts/isupload.sh:241: file_header=$(tor_curl_request --insecure -m 16 -s -D - -o /dev/null \
./hosts/isupload.sh:241: file_header=$(tor_curl_request --insecure -m 8 -s -D - -o /dev/null \
./hosts/isupload.sh:242: -H "Connection: keep-alive" \
./hosts/isupload.sh:243: -w 'EffectiveUrl=%{url_effective}' \
./hosts/isupload.sh:244: "$download_url")
./hosts/isupload.sh:245: elif ((j % 3 == 0)); then
./hosts/isupload.sh:246: printf "| Retrieving Head (hack): attempt #$j"
./hosts/isupload.sh:247: rm -f "${WorkDir}/.temp/directhead"
./hosts/isupload.sh:248: file_header=$(tor_curl_request --insecure --head -H "Connection: keep-alive" -L -s -i "$download_url" |
./hosts/isupload.sh:249: tee "${WorkDir}/.temp/directhead" &
./hosts/isupload.sh:250: sleep 6
./hosts/isupload.sh:251: [ -s "${WorkDir}/.temp/directhead" ]
./hosts/isupload.sh:252: kill $! 2>/dev/null
./hosts/isupload.sh:253: )
./hosts/isupload.sh:254: if [ ! -f "${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}" ]; then
./hosts/isupload.sh:255: touch "${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}"
./hosts/isupload.sh:256: fi
./hosts/isupload.sh:257: rm -f "${WorkDir}/.temp/directhead"
./hosts/isupload.sh:258: else
./hosts/isupload.sh:245: elif ((j % 2 == 0)); then
./hosts/isupload.sh:246: printf "| Retrieving Head: attempt #$j"
./hosts/isupload.sh:247: file_header=$(tor_curl_request --insecure --head -L -s "$download_url")
./hosts/isupload.sh:248: elif ((j % 3 == 0)); then
./hosts/isupload.sh:249: printf "| Retrieving Head (hack): attempt #$j"
./hosts/isupload.sh:250: rm -f "${WorkDir}/.temp/directhead"
./hosts/isupload.sh:251: file_header=$(tor_curl_request --insecure --head -H "Connection: keep-alive" -L -s -i "$download_url" |
./hosts/isupload.sh:252: tee "${WorkDir}/.temp/directhead" &
./hosts/isupload.sh:253: sleep 6
./hosts/isupload.sh:254: [ -s "${WorkDir}/.temp/directhead" ]
./hosts/isupload.sh:255: kill $! 2>/dev/null
./hosts/isupload.sh:256: )
./hosts/isupload.sh:257: if [ ! -f "${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}" ]; then
./hosts/isupload.sh:258: touch "${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}"
./hosts/isupload.sh:259: fi
./hosts/isupload.sh:260: rm -f "${WorkDir}/.temp/directhead"
./hosts/isupload.sh:261: else
--
./hosts/isupload.sh:260: file_header=$(tor_curl_request_extended --insecure --head -L -s "$download_url")
./hosts/isupload.sh:261: fi
./hosts/isupload.sh:262: if [ "${DebugAllEnabled}" == "true" ] ; then
./hosts/isupload.sh:263: debugHtml "${remote_url##*/}" "isup_head$j" "download_url: ${download_url}"$'\n'"${file_header}"
./hosts/isupload.sh:263: file_header=$(tor_curl_request_extended --insecure --head -L -s "$download_url")
./hosts/isupload.sh:264: fi
./hosts/isupload.sh:265: if [ ! -z "$file_header" ] ; then
./hosts/isupload.sh:266: if grep -Eqi '404 Not Found' <<< "${file_header}" ; then
./hosts/isupload.sh:267: printf "\\n"
./hosts/isupload.sh:268: echo -e "${RED}| Not Found (404). The file has been removed.${NC}"
./hosts/isupload.sh:269: removedDownload "${remote_url}"
./hosts/isupload.sh:270: exitDownloadNotAvailable=true
./hosts/isupload.sh:265: if [ "${DebugAllEnabled}" == "true" ] ; then
./hosts/isupload.sh:266: debugHtml "${remote_url##*/}" "isup_head$j" "download_url: ${download_url}"$'\n'"${file_header}"
./hosts/isupload.sh:267: fi
./hosts/isupload.sh:268: if [ ! -z "$file_header" ] ; then
./hosts/isupload.sh:269: if grep -Eqi '404 Not Found' <<< "${file_header}" ; then
./hosts/isupload.sh:270: printf "\\n"
./hosts/isupload.sh:271: echo -e "${RED}| Not Found (404). The file has been removed.${NC}"
./hosts/isupload.sh:272: removedDownload "${remote_url}"
./hosts/isupload.sh:273: exitDownloadNotAvailable=true
--
./hosts/isupload.sh:352: tor_curl_request_extended --insecure -L "$download_url" --output "$file_path"
./hosts/isupload.sh:353: rc=$?
./hosts/isupload.sh:354: if [ $rc -ne 0 ] ; then
./hosts/isupload.sh:355: printf "${RED}Download Failed (bad exit status).${NC}"
./hosts/isupload.sh:356: if [ -f ${file_path} ]; then
./hosts/isupload.sh:357: printf "${YELLOW} Partial removed...${NC}"
./hosts/isupload.sh:358: printf "\n\n"
./hosts/isupload.sh:359: rm -f "${file_path}"
./hosts/isupload.sh:360: else
./hosts/isupload.sh:355: tor_curl_request_extended --insecure -L "$download_url" --output "$file_path"
./hosts/isupload.sh:356: rc=$?
./hosts/isupload.sh:357: if [ $rc -ne 0 ] ; then
./hosts/isupload.sh:358: printf "${RED}Download Failed (bad exit status).${NC}"
./hosts/isupload.sh:359: if [ -f ${file_path} ]; then
./hosts/isupload.sh:360: printf "${YELLOW} Partial removed...${NC}"
./hosts/isupload.sh:361: printf "\n\n"
./hosts/isupload.sh:362: fi
./hosts/isupload.sh:362: rm -f "${file_path}"
./hosts/isupload.sh:363: else
./hosts/isupload.sh:364: printf "\n\n"
./hosts/isupload.sh:365: fi
--
./hosts/isupload.sh:396: tor_curl_request_extended --insecure -L --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval "$download_url" --continue-at - --output "$file_path"
./hosts/isupload.sh:397: else
./hosts/isupload.sh:398: tor_curl_request_extended --insecure -L "$download_url" --continue-at - --output "$file_path"
./hosts/isupload.sh:399: fi
./hosts/isupload.sh:400: received_file_size=0
./hosts/isupload.sh:401: if [ -f "$file_path" ] ; then
./hosts/isupload.sh:402: received_file_size=$(stat --format="%s" "$file_path" | tr -d '[:space:]')
./hosts/isupload.sh:403: fi
./hosts/isupload.sh:404: if CheckNoHtml "$remote_url" "$filename" "$file_path" ; then
./hosts/isupload.sh:405: containsHtml=false
./hosts/isupload.sh:406: else
./hosts/isupload.sh:407: containsHtml=true
./hosts/isupload.sh:408: fi
./hosts/isupload.sh:399: tor_curl_request_extended --insecure -L --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval "$download_url" --continue-at - --output "$file_path"
./hosts/isupload.sh:400: else
./hosts/isupload.sh:401: tor_curl_request_extended --insecure -L "$download_url" --continue-at - --output "$file_path"
./hosts/isupload.sh:402: fi
./hosts/isupload.sh:403: received_file_size=0
./hosts/isupload.sh:404: if [ -f "$file_path" ] ; then
./hosts/isupload.sh:405: received_file_size=$(stat --format="%s" "$file_path" | tr -d '[:space:]')
./hosts/isupload.sh:406: fi
./hosts/isupload.sh:407: if CheckNoHtml "$remote_url" "$filename" "$file_path" ; then
./hosts/isupload.sh:408: containsHtml=false
./hosts/isupload.sh:409: else
./hosts/isupload.sh:410: containsHtml=true
./hosts/isupload.sh:411: fi
--
./hosts/kraken.sh:104: PAGE=$(tor_curl_request --insecure --max-time "$ConnectTimeout" -s -L -c "${kraken_cookie_jar}" "${fixed_url}")
./hosts/kraken.sh:105: if [ "${DebugAllEnabled}" == "true" ] ; then
@ -1871,50 +1952,50 @@ _________________________________________________________________________
./hosts/ranoz.sh:99: if [ "${finalAttempt}" == "true" ] ; then
./hosts/ranoz.sh:100: failedRetryDownload "${remote_url}" "Failed to extract download url [1]" ""
--
./hosts/ranoz.sh:150: file_header=$(tor_curl_request --insecure --head -L -i -s "$download_url")
./hosts/ranoz.sh:151: if [ "${DebugAllEnabled}" == "true" ] ; then
./hosts/ranoz.sh:152: debugHtml "${remote_url##*/}" "rz_head$j" "download_url: ${download_url}"$'\n'"${file_header}"
./hosts/ranoz.sh:153: fi
./hosts/ranoz.sh:154: if [[ -z $file_header ]] ; then
./hosts/ranoz.sh:155: if [ $j == $maxfetchretries ] ; then
./hosts/ranoz.sh:156: rm -f "${rz_cookie_jar}";
./hosts/ranoz.sh:157: printf "\\n"
./hosts/ranoz.sh:158: echo -e "${RED}| Failed to extract file info${NC}"
./hosts/ranoz.sh:159: warnAndRetryUnknownError=true
./hosts/ranoz.sh:160: if [ "${finalAttempt}" == "true" ] ; then
./hosts/ranoz.sh:157: file_header=$(tor_curl_request --insecure --head -L -i -s "$download_url")
./hosts/ranoz.sh:158: if [ "${DebugAllEnabled}" == "true" ] ; then
./hosts/ranoz.sh:159: debugHtml "${remote_url##*/}" "rz_head$j" "download_url: ${download_url}"$'\n'"${file_header}"
./hosts/ranoz.sh:160: fi
./hosts/ranoz.sh:161: if [[ -z $file_header ]] ; then
./hosts/ranoz.sh:162: if [ $j == $maxfetchretries ] ; then
./hosts/ranoz.sh:163: rm -f "${rz_cookie_jar}";
./hosts/ranoz.sh:164: printf "\\n"
./hosts/ranoz.sh:165: echo -e "${RED}| Failed to extract file info${NC}"
./hosts/ranoz.sh:166: warnAndRetryUnknownError=true
./hosts/ranoz.sh:167: if [ "${finalAttempt}" == "true" ] ; then
--
./hosts/ranoz.sh:261: tor_curl_request --insecure -L -G --no-alpn \
./hosts/ranoz.sh:262: --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval \
./hosts/ranoz.sh:263: "$download_url" --continue-at - --output "$file_path"
./hosts/ranoz.sh:264: else
./hosts/ranoz.sh:265: tor_curl_request --insecure -L -G --no-alpn \
./hosts/ranoz.sh:266: "$download_url" --continue-at - --output "$file_path"
./hosts/ranoz.sh:267: fi
./hosts/ranoz.sh:268: else
./hosts/ranoz.sh:269: if [ "${RateMonitorEnabled}" == "true" ]; then
./hosts/ranoz.sh:266: tor_curl_request --insecure -L -G --no-alpn \
./hosts/ranoz.sh:267: --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval \
./hosts/ranoz.sh:268: "$download_url" --continue-at - --output "$file_path"
./hosts/ranoz.sh:269: else
./hosts/ranoz.sh:270: tor_curl_request --insecure -L -G --no-alpn \
./hosts/ranoz.sh:271: --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval \
./hosts/ranoz.sh:272: -H "User-Agent: $RandomUA" \
./hosts/ranoz.sh:273: -H "Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8" \
./hosts/ranoz.sh:274: -H "Accept-Language: en-US,en;q=0.5" \
./hosts/ranoz.sh:275: -H "Accept-Encoding: gzip, deflate, br" \
./hosts/ranoz.sh:276: -H "Connection: keep-alive" \
./hosts/ranoz.sh:277: -H "Cookie: lng=eng" \
./hosts/ranoz.sh:278: -H "Upgrade-Insecure-Requests: 1" \
./hosts/ranoz.sh:279: -H "Sec-Fetch-Dest: document" \
./hosts/ranoz.sh:280: -H "Sec-Fetch-Mode: navigate" \
./hosts/ranoz.sh:271: "$download_url" --continue-at - --output "$file_path"
./hosts/ranoz.sh:272: fi
./hosts/ranoz.sh:273: else
./hosts/ranoz.sh:274: if [ "${RateMonitorEnabled}" == "true" ]; then
./hosts/ranoz.sh:275: tor_curl_request --insecure -L -G --no-alpn \
./hosts/ranoz.sh:276: --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval \
./hosts/ranoz.sh:277: -H "User-Agent: $RandomUA" \
./hosts/ranoz.sh:278: -H "Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8" \
./hosts/ranoz.sh:279: -H "Accept-Language: en-US,en;q=0.5" \
./hosts/ranoz.sh:280: -H "Accept-Encoding: gzip, deflate, br" \
./hosts/ranoz.sh:281: -H "Connection: keep-alive" \
./hosts/ranoz.sh:282: -H "Cookie: lng=eng" \
./hosts/ranoz.sh:283: -H "Upgrade-Insecure-Requests: 1" \
./hosts/ranoz.sh:284: -H "Sec-Fetch-Dest: document" \
./hosts/ranoz.sh:285: -H "Sec-Fetch-Mode: navigate" \
--
./hosts/ranoz.sh:285: tor_curl_request --insecure -L -G --no-alpn \
./hosts/ranoz.sh:286: -H "User-Agent: $RandomUA" \
./hosts/ranoz.sh:287: -H "Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8" \
./hosts/ranoz.sh:288: -H "Accept-Language: en-US,en;q=0.5" \
./hosts/ranoz.sh:289: -H "Accept-Encoding: gzip, deflate, br" \
./hosts/ranoz.sh:290: -H "Connection: keep-alive" \
./hosts/ranoz.sh:291: -H "Cookie: lng=eng" \
./hosts/ranoz.sh:292: -H "Upgrade-Insecure-Requests: 1" \
./hosts/ranoz.sh:293: -H "Sec-Fetch-Dest: document" \
./hosts/ranoz.sh:294: -H "Sec-Fetch-Mode: navigate" \
./hosts/ranoz.sh:295: -H "Sec-Fetch-Site: same-origin" \
./hosts/ranoz.sh:290: tor_curl_request --insecure -L -G --no-alpn \
./hosts/ranoz.sh:291: -H "User-Agent: $RandomUA" \
./hosts/ranoz.sh:292: -H "Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8" \
./hosts/ranoz.sh:293: -H "Accept-Language: en-US,en;q=0.5" \
./hosts/ranoz.sh:294: -H "Accept-Encoding: gzip, deflate, br" \
./hosts/ranoz.sh:295: -H "Connection: keep-alive" \
./hosts/ranoz.sh:296: -H "Cookie: lng=eng" \
./hosts/ranoz.sh:297: -H "Upgrade-Insecure-Requests: 1" \
./hosts/ranoz.sh:298: -H "Sec-Fetch-Dest: document" \
./hosts/ranoz.sh:299: -H "Sec-Fetch-Mode: navigate" \
./hosts/ranoz.sh:300: -H "Sec-Fetch-Site: same-origin" \
--
./hosts/syspro.sh:88: response=$(tor_curl_request --insecure -L -s "$remote_url")
./hosts/syspro.sh:89: if [ "${DebugAllEnabled}" == "true" ] ; then
@ -2634,6 +2715,30 @@ _________________________________________________________________________
./hosts/up_filehaus.sh:115: url=$(grep -oPi '(?<=https://).*(?=\.filehaus\.su).*?(?=$)' <<< "$response")
./hosts/up_filehaus.sh:116: filesize=$(GetFileSize "$filepath" "false")
--
./hosts/up_fileland.sh:102: response=$(tor_curl_upload --insecure -i \
./hosts/up_fileland.sh:103: -H "Content-Type: multipart/form-data" \
./hosts/up_fileland.sh:104: -F "sess_id=" \
./hosts/up_fileland.sh:105: -F "utype=anon" \
./hosts/up_fileland.sh:106: -F "file_descr=" \
./hosts/up_fileland.sh:107: -F "file_public=1" \
./hosts/up_fileland.sh:108: -F "link_rcpt=" \
./hosts/up_fileland.sh:109: -F "link_pass=" \
./hosts/up_fileland.sh:110: -F "to_folder=" \
./hosts/up_fileland.sh:111: -F "upload=Start upload" \
./hosts/up_fileland.sh:112: -F "keepalive=1" \
--
./hosts/up_fireget.sh:102: response=$(tor_curl_upload --insecure -i \
./hosts/up_fireget.sh:103: -H "Content-Type: multipart/form-data" \
./hosts/up_fireget.sh:104: -H "Host: fireget.com" \
./hosts/up_fireget.sh:105: -F "sess_id=" \
./hosts/up_fireget.sh:106: -F "srv_tmp_url=" \
./hosts/up_fireget.sh:107: -F "link_rcpt=" \
./hosts/up_fireget.sh:108: -F "link_pass=" \
./hosts/up_fireget.sh:109: -F "tos=1" \
./hosts/up_fireget.sh:110: -F "submit_btn=Upload!" \
./hosts/up_fireget.sh:111: -F "upload_type=file" \
./hosts/up_fireget.sh:112: -F "file_1=@${filepath}" \
--
./hosts/up_firestorage.sh:113: response=$(tor_curl_upload --insecure -i \
./hosts/up_firestorage.sh:114: -H "Content-Type: multipart/form-data" \
./hosts/up_firestorage.sh:115: -F "jqueryupload=1" \
@ -3068,235 +3173,235 @@ _________________________________________________________________________
./hosts/youdbox.sh:287: containsHtml=true
./hosts/youdbox.sh:288: fi
--
./mad.sh:398:tor_curl_request() {
./mad.sh:399: if [ "${UseTorCurlImpersonate}" == "true" ]; then
./mad.sh:400: "${curl_impersonate[@]}" --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" --connect-timeout ${ConnectTimeout} --compressed --globoff "$@"
./mad.sh:401: else
./mad.sh:402: curl --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" --connect-timeout ${ConnectTimeout} --compressed --globoff "$@"
./mad.sh:403: fi
./mad.sh:404:}
./mad.sh:405:tor_curl_request_extended() {
./mad.sh:406: randomtimeout=$((30 + RANDOM % (60 - 30)))
./mad.sh:407: if [ "${UseTorCurlImpersonate}" == "true" ]; then
./mad.sh:408: "${curl_impersonate[@]}" --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" --connect-timeout $randomtimeout --compressed --globoff "$@"
./mad.sh:409: else
./mad.sh:410: curl --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" --connect-timeout $randomtimeout --compressed --globoff "$@"
./mad.sh:411: fi
./mad.sh:412:}
./mad.sh:413:tor_curl_upload() {
./mad.sh:405:tor_curl_request() {
./mad.sh:406: if [ "${UseTorCurlImpersonate}" == "true" ]; then
./mad.sh:407: "${curl_impersonate[@]}" --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" --connect-timeout ${ConnectTimeout} --compressed --globoff "$@"
./mad.sh:408: else
./mad.sh:409: curl --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" --connect-timeout ${ConnectTimeout} --compressed --globoff "$@"
./mad.sh:410: fi
./mad.sh:411:}
./mad.sh:412:tor_curl_request_extended() {
./mad.sh:413: randomtimeout=$((30 + RANDOM % (60 - 30)))
./mad.sh:414: if [ "${UseTorCurlImpersonate}" == "true" ]; then
./mad.sh:415: if [ "${RateMonitorEnabled}" == "true" ]; then
./mad.sh:416: "${curl_impersonate[@]}" --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" --connect-timeout ${ConnectTimeout} --speed-limit $UploadSpeedMin --speed-time $UploadTimeoutInterval --compressed --globoff "$@"
./mad.sh:417: else
./mad.sh:418: "${curl_impersonate[@]}" --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" --connect-timeout ${ConnectTimeout} --compressed --globoff "$@"
./mad.sh:419: fi
./mad.sh:420: else
./mad.sh:421: if [ "${RateMonitorEnabled}" == "true" ]; then
./mad.sh:422: curl --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" --connect-timeout ${ConnectTimeout} --speed-limit $UploadSpeedMin --speed-time $UploadTimeoutInterval -H 'User-Agent: Mozilla/5.0 (Windows NT 10.0; rv:109.0) Gecko/20100101 Firefox/115.0' -H 'Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8' -H 'Accept-Language: en-US,en;q=0.5' -H 'Accept-Encoding: gzip, deflate, br' -H 'Origin: null' -H 'Connection: keep-alive' -H 'Upgrade-Insecure-Requests: 1' -H 'Sec-Fetch-Dest: document' -H 'Sec-Fetch-Mode: navigate' -H 'Sec-Fetch-Site: cross-site' -H 'Sec-Fetch-User: ?1' -H 'TE: trailers' --compressed --globoff "$@"
./mad.sh:423: else
./mad.sh:415: "${curl_impersonate[@]}" --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" --connect-timeout $randomtimeout --compressed --globoff "$@"
./mad.sh:416: else
./mad.sh:417: curl --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" --connect-timeout $randomtimeout --compressed --globoff "$@"
./mad.sh:418: fi
./mad.sh:419:}
./mad.sh:420:tor_curl_upload() {
./mad.sh:421: if [ "${UseTorCurlImpersonate}" == "true" ]; then
./mad.sh:422: if [ "${RateMonitorEnabled}" == "true" ]; then
./mad.sh:423: "${curl_impersonate[@]}" --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" --connect-timeout ${ConnectTimeout} --speed-limit $UploadSpeedMin --speed-time $UploadTimeoutInterval --compressed --globoff "$@"
./mad.sh:424: else
./mad.sh:425: "${curl_impersonate[@]}" --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" --connect-timeout ${ConnectTimeout} --compressed --globoff "$@"
./mad.sh:426: fi
./mad.sh:427: else
./mad.sh:428: if [ "${RateMonitorEnabled}" == "true" ]; then
./mad.sh:429: curl --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" --connect-timeout ${ConnectTimeout} --speed-limit $UploadSpeedMin --speed-time $UploadTimeoutInterval -H 'User-Agent: Mozilla/5.0 (Windows NT 10.0; rv:109.0) Gecko/20100101 Firefox/115.0' -H 'Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8' -H 'Accept-Language: en-US,en;q=0.5' -H 'Accept-Encoding: gzip, deflate, br' -H 'Origin: null' -H 'Connection: keep-alive' -H 'Upgrade-Insecure-Requests: 1' -H 'Sec-Fetch-Dest: document' -H 'Sec-Fetch-Mode: navigate' -H 'Sec-Fetch-Site: cross-site' -H 'Sec-Fetch-User: ?1' -H 'TE: trailers' --compressed --globoff "$@"
./mad.sh:430: else
--
./mad.sh:1413: response=$(tor_curl_request --insecure -L -s https://github.com/lwthiker/curl-impersonate/releases/latest)
./mad.sh:1414: if [ "${DebugAllEnabled}" == "true" ] ; then
./mad.sh:1415: debugHtml "github" "lbf_inst_curlimp$j" "$response"
./mad.sh:1416: fi
./mad.sh:1417: if [ ! -z "$response" ]; then
./mad.sh:1418: latestTag=$(grep -oPi -m 1 '(?<=/curl-impersonate/releases/tag/).*?(?=")' <<< "$response")
./mad.sh:1419: latestBinaryDate=$(grep -oPi -m 1 '(?<=<relative-time class="no-wrap" prefix="" datetime=").*?(?=T)' <<< "$response")
./mad.sh:1420: break
./mad.sh:1421: fi
./mad.sh:1422: done
./mad.sh:1423: if [ -z $latestTag ]; then
./mad.sh:1427: response=$(tor_curl_request --insecure -L -s https://github.com/lwthiker/curl-impersonate/releases/latest)
./mad.sh:1428: if [ "${DebugAllEnabled}" == "true" ] ; then
./mad.sh:1429: debugHtml "github" "lbf_inst_curlimp$j" "$response"
./mad.sh:1430: fi
./mad.sh:1431: if [ ! -z "$response" ]; then
./mad.sh:1432: latestTag=$(grep -oPi -m 1 '(?<=/curl-impersonate/releases/tag/).*?(?=")' <<< "$response")
./mad.sh:1433: latestBinaryDate=$(grep -oPi -m 1 '(?<=<relative-time class="no-wrap" prefix="" datetime=").*?(?=T)' <<< "$response")
./mad.sh:1434: break
./mad.sh:1435: fi
./mad.sh:1436: done
./mad.sh:1437: if [ -z $latestTag ]; then
--
./mad.sh:1433: file_header=$(tor_curl_request --insecure --head -Ls "$download_url")
./mad.sh:1434: if [ "${DebugAllEnabled}" == "true" ] ; then
./mad.sh:1435: debugHtml "github" "head_inst_curlimp$j" "${file_header}"
./mad.sh:1436: fi
./mad.sh:1437: if ! grep -Eqi 'HTTP/2 200|HTTP/1.1 200|200 OK' <<< $file_header ; then
./mad.sh:1438: if ((j == 8)) ; then
./mad.sh:1439: return 1
./mad.sh:1440: else
./mad.sh:1441: continue
./mad.sh:1442: fi
./mad.sh:1443: fi
./mad.sh:1447: file_header=$(tor_curl_request --insecure --head -Ls "$download_url")
./mad.sh:1448: if [ "${DebugAllEnabled}" == "true" ] ; then
./mad.sh:1449: debugHtml "github" "head_inst_curlimp$j" "${file_header}"
./mad.sh:1450: fi
./mad.sh:1451: if ! grep -Eqi 'HTTP/2 200|HTTP/1.1 200|200 OK' <<< $file_header ; then
./mad.sh:1452: if ((j == 8)) ; then
./mad.sh:1453: return 1
./mad.sh:1454: else
./mad.sh:1455: continue
./mad.sh:1456: fi
./mad.sh:1457: fi
--
./mad.sh:1483: tor_curl_request --insecure -L "$download_url" --continue-at - --output "$file_path"
./mad.sh:1484: received_file_size=0
./mad.sh:1485: if [ -f "$file_path" ] ; then
./mad.sh:1486: received_file_size=$(stat --format="%s" "$file_path" | tr -d '[:space:]')
./mad.sh:1487: fi
./mad.sh:1488: if ((received_file_size == file_size_bytes)) ; then
./mad.sh:1489: break
./mad.sh:1490: elif ((received_file_size < file_size_bytes)) ; then
./mad.sh:1491: if ((j >= MaxDownloadRetries)) ; then
./mad.sh:1492: echo -e "${RED}| FAILED: Size mismatch after downloading${NC}"
./mad.sh:1493: exit 1
./mad.sh:1497: tor_curl_request --insecure -L "$download_url" --continue-at - --output "$file_path"
./mad.sh:1498: received_file_size=0
./mad.sh:1499: if [ -f "$file_path" ] ; then
./mad.sh:1500: received_file_size=$(stat --format="%s" "$file_path" | tr -d '[:space:]')
./mad.sh:1501: fi
./mad.sh:1502: if ((received_file_size == file_size_bytes)) ; then
./mad.sh:1503: break
./mad.sh:1504: elif ((received_file_size < file_size_bytes)) ; then
./mad.sh:1505: if ((j >= MaxDownloadRetries)) ; then
./mad.sh:1506: echo -e "${RED}| FAILED: Size mismatch after downloading${NC}"
./mad.sh:1507: exit 1
--
./mad.sh:1536: response=$(tor_curl_request --insecure -L -s https://github.com/lexiforest/curl-impersonate/releases/latest)
./mad.sh:1537: if [ "${DebugAllEnabled}" == "true" ] ; then
./mad.sh:1538: debugHtml "github" "lbf_inst_curlimp$j" "$response"
./mad.sh:1539: fi
./mad.sh:1540: if [ ! -z "$response" ]; then
./mad.sh:1541: latestTag=$(grep -oPi -m 1 '(?<=/curl-impersonate/releases/tag/).*?(?=")' <<< "$response")
./mad.sh:1542: latestBinaryDate=$(grep -oPi -m 1 '(?<=<relative-time class="no-wrap" prefix="" datetime=").*?(?=T)' <<< "$response")
./mad.sh:1543: break
./mad.sh:1544: fi
./mad.sh:1545: done
./mad.sh:1546: if [ -z $latestTag ]; then
./mad.sh:1550: response=$(tor_curl_request --insecure -L -s https://github.com/lexiforest/curl-impersonate/releases/latest)
./mad.sh:1551: if [ "${DebugAllEnabled}" == "true" ] ; then
./mad.sh:1552: debugHtml "github" "lbf_inst_curlimp$j" "$response"
./mad.sh:1553: fi
./mad.sh:1554: if [ ! -z "$response" ]; then
./mad.sh:1555: latestTag=$(grep -oPi -m 1 '(?<=/curl-impersonate/releases/tag/).*?(?=")' <<< "$response")
./mad.sh:1556: latestBinaryDate=$(grep -oPi -m 1 '(?<=<relative-time class="no-wrap" prefix="" datetime=").*?(?=T)' <<< "$response")
./mad.sh:1557: break
./mad.sh:1558: fi
./mad.sh:1559: done
./mad.sh:1560: if [ -z $latestTag ]; then
--
./mad.sh:1556: file_header=$(tor_curl_request --insecure --head -Ls "$download_url")
./mad.sh:1557: if [ "${DebugAllEnabled}" == "true" ] ; then
./mad.sh:1558: debugHtml "github" "head_inst_curlimp$j" "${file_header}"
./mad.sh:1559: fi
./mad.sh:1560: if ! grep -Eqi 'HTTP/2 200|HTTP/1.1 200|200 OK' <<< $file_header ; then
./mad.sh:1561: if ((j == 8)) ; then
./mad.sh:1562: return 1
./mad.sh:1563: else
./mad.sh:1564: continue
./mad.sh:1565: fi
./mad.sh:1566: fi
./mad.sh:1570: file_header=$(tor_curl_request --insecure --head -Ls "$download_url")
./mad.sh:1571: if [ "${DebugAllEnabled}" == "true" ] ; then
./mad.sh:1572: debugHtml "github" "head_inst_curlimp$j" "${file_header}"
./mad.sh:1573: fi
./mad.sh:1574: if ! grep -Eqi 'HTTP/2 200|HTTP/1.1 200|200 OK' <<< $file_header ; then
./mad.sh:1575: if ((j == 8)) ; then
./mad.sh:1576: return 1
./mad.sh:1577: else
./mad.sh:1578: continue
./mad.sh:1579: fi
./mad.sh:1580: fi
--
./mad.sh:1606: tor_curl_request --insecure -L "$download_url" --continue-at - --output "$file_path"
./mad.sh:1607: received_file_size=0
./mad.sh:1608: if [ -f "$file_path" ] ; then
./mad.sh:1609: received_file_size=$(stat --format="%s" "$file_path" | tr -d '[:space:]')
./mad.sh:1610: fi
./mad.sh:1611: if ((received_file_size == file_size_bytes)) ; then
./mad.sh:1612: break
./mad.sh:1613: elif ((received_file_size < file_size_bytes)) ; then
./mad.sh:1614: if ((j >= MaxDownloadRetries)) ; then
./mad.sh:1615: echo -e "${RED}| FAILED: Size mismatch after downloading${NC}"
./mad.sh:1616: exit 1
./mad.sh:1620: tor_curl_request --insecure -L "$download_url" --continue-at - --output "$file_path"
./mad.sh:1621: received_file_size=0
./mad.sh:1622: if [ -f "$file_path" ] ; then
./mad.sh:1623: received_file_size=$(stat --format="%s" "$file_path" | tr -d '[:space:]')
./mad.sh:1624: fi
./mad.sh:1625: if ((received_file_size == file_size_bytes)) ; then
./mad.sh:1626: break
./mad.sh:1627: elif ((received_file_size < file_size_bytes)) ; then
./mad.sh:1628: if ((j >= MaxDownloadRetries)) ; then
./mad.sh:1629: echo -e "${RED}| FAILED: Size mismatch after downloading${NC}"
./mad.sh:1630: exit 1
--
./mad.sh:1811: maud_torcurl=$(grep -n -vxE '[[:blank:]]*([#].*)?' $fil | grep -A 12 --color='always' -Ei 'tor_curl')
./mad.sh:1812: echo -e "Files:"
./mad.sh:1813: echo -e "${BLUE}${fil}${NC}"
./mad.sh:1814: echo -e ""
./mad.sh:1815: echo -e ""
./mad.sh:1816: echo -e "${PINK}MAD Audit of http lines:${NC} (${GREEN}grep \"http:\" or \"https:\"${NC})"
./mad.sh:1817: echo -e "_________________________________________________________________________"
./mad.sh:1818: echo -e "$maud_http"
./mad.sh:1819: echo -e ""
./mad.sh:1820: echo -e "${PINK}MAD Audit of curl:${NC} (${GREEN}grep \"curl\"${NC})"
./mad.sh:1821: echo -e "_________________________________________________________________________"
--
./mad.sh:1824: echo -e "${PINK}MAD Audit of tor_curl (+12 lines after):${NC} (${GREEN}grep \"tor_curl\"${NC})"
./mad.sh:1825: echo -e "_________________________________________________________________________"
./mad.sh:1826: echo -e "$maud_torcurl"
./mad.sh:1827: echo -e ""
./mad.sh:1825: maud_torcurl=$(grep -n -vxE '[[:blank:]]*([#].*)?' $fil | grep -A 12 --color='always' -Ei 'tor_curl')
./mad.sh:1826: echo -e "Files:"
./mad.sh:1827: echo -e "${BLUE}${fil}${NC}"
./mad.sh:1828: echo -e ""
./mad.sh:1829: done
./mad.sh:1830: else
./mad.sh:1831: cd "$ScriptDir"
./mad.sh:1832: readarray -d $'' arrFiles < <(find . -name "*.sh" -printf '%p\n' | sort -Vk1)
./mad.sh:1833: cd "$WorkDir"
./mad.sh:1834: readarray -d $'' arrFiles2 < <(find . -name "*.sh" -printf '%p\n' | sort -Vk1)
./mad.sh:1829: echo -e ""
./mad.sh:1830: echo -e "${PINK}MAD Audit of http lines:${NC} (${GREEN}grep \"http:\" or \"https:\"${NC})"
./mad.sh:1831: echo -e "_________________________________________________________________________"
./mad.sh:1832: echo -e "$maud_http"
./mad.sh:1833: echo -e ""
./mad.sh:1834: echo -e "${PINK}MAD Audit of curl:${NC} (${GREEN}grep \"curl\"${NC})"
./mad.sh:1835: echo -e "_________________________________________________________________________"
--
./mad.sh:1839: maud_torcurl=$(grep -n -vxE '[[:blank:]]*([#].*)?' $fil | grep -A 12 --color='always' -Ei 'tor_curl')
./mad.sh:1840: echo -e "Files:"
./mad.sh:1841: echo -e "${BLUE}${fil}${NC}"
./mad.sh:1838: echo -e "${PINK}MAD Audit of tor_curl (+12 lines after):${NC} (${GREEN}grep \"tor_curl\"${NC})"
./mad.sh:1839: echo -e "_________________________________________________________________________"
./mad.sh:1840: echo -e "$maud_torcurl"
./mad.sh:1841: echo -e ""
./mad.sh:1842: echo -e ""
./mad.sh:1843: echo -e ""
./mad.sh:1844: echo -e "${PINK}MAD Audit of http lines:${NC} (${GREEN}grep \"http:\" or \"https:\"${NC})"
./mad.sh:1845: echo -e "_________________________________________________________________________"
./mad.sh:1846: echo -e "$maud_http"
./mad.sh:1847: echo -e ""
./mad.sh:1848: echo -e "${PINK}MAD Audit of curl:${NC} (${GREEN}grep \"curl \"${NC})"
./mad.sh:1849: echo -e "_________________________________________________________________________"
./mad.sh:1843: done
./mad.sh:1844: else
./mad.sh:1845: cd "$ScriptDir"
./mad.sh:1846: readarray -d $'' arrFiles < <(find . -name "*.sh" -printf '%p\n' | sort -Vk1)
./mad.sh:1847: cd "$WorkDir"
./mad.sh:1848: readarray -d $'' arrFiles2 < <(find . -name "*.sh" -printf '%p\n' | sort -Vk1)
--
./mad.sh:1852: echo -e "${PINK}MAD Audit of tor_curl (+12 lines after):${NC} (${GREEN}grep \"tor_curl\"${NC})"
./mad.sh:1853: echo -e "_________________________________________________________________________"
./mad.sh:1854: echo -e "$maud_torcurl"
./mad.sh:1855: echo -e ""
./mad.sh:1856: done
./mad.sh:1857: for fil in "${arrFiles2[@]}";
./mad.sh:1858: do
./mad.sh:1859: maud_http=$(grep -n -vxE '[[:blank:]]*([#].*)?' $fil | grep --color='always' -Ei '(http|https):')
./mad.sh:1860: maud_curl=$(grep -n -vxE '[[:blank:]]*([#].*)?' $fil | grep --color='always' -Ei 'curl')
./mad.sh:1861: maud_torcurl=$(grep -n -vxE '[[:blank:]]*([#].*)?' $fil | grep -A 12 --color='always' -Ei 'tor_curl')
./mad.sh:1862: echo -e "Files:"
./mad.sh:1863: echo -e "${BLUE}${fil}${NC}"
./mad.sh:1864: echo -e ""
./mad.sh:1865: echo -e ""
./mad.sh:1866: echo -e "${PINK}MAD Audit of http lines:${NC} (${GREEN}grep \"http:\" or \"https:\"${NC})"
./mad.sh:1853: maud_torcurl=$(grep -n -vxE '[[:blank:]]*([#].*)?' $fil | grep -A 12 --color='always' -Ei 'tor_curl')
./mad.sh:1854: echo -e "Files:"
./mad.sh:1855: echo -e "${BLUE}${fil}${NC}"
./mad.sh:1856: echo -e ""
./mad.sh:1857: echo -e ""
./mad.sh:1858: echo -e "${PINK}MAD Audit of http lines:${NC} (${GREEN}grep \"http:\" or \"https:\"${NC})"
./mad.sh:1859: echo -e "_________________________________________________________________________"
./mad.sh:1860: echo -e "$maud_http"
./mad.sh:1861: echo -e ""
./mad.sh:1862: echo -e "${PINK}MAD Audit of curl:${NC} (${GREEN}grep \"curl \"${NC})"
./mad.sh:1863: echo -e "_________________________________________________________________________"
--
./mad.sh:1866: echo -e "${PINK}MAD Audit of tor_curl (+12 lines after):${NC} (${GREEN}grep \"tor_curl\"${NC})"
./mad.sh:1867: echo -e "_________________________________________________________________________"
./mad.sh:1868: echo -e "$maud_http"
./mad.sh:1868: echo -e "$maud_torcurl"
./mad.sh:1869: echo -e ""
./mad.sh:1870: echo -e "${PINK}MAD Audit of curl:${NC} (${GREEN}grep \"curl\"${NC})"
./mad.sh:1871: echo -e "_________________________________________________________________________"
./mad.sh:1870: done
./mad.sh:1871: for fil in "${arrFiles2[@]}";
./mad.sh:1872: do
./mad.sh:1873: maud_http=$(grep -n -vxE '[[:blank:]]*([#].*)?' $fil | grep --color='always' -Ei '(http|https):')
./mad.sh:1874: maud_curl=$(grep -n -vxE '[[:blank:]]*([#].*)?' $fil | grep --color='always' -Ei 'curl')
./mad.sh:1875: maud_torcurl=$(grep -n -vxE '[[:blank:]]*([#].*)?' $fil | grep -A 12 --color='always' -Ei 'tor_curl')
./mad.sh:1876: echo -e "Files:"
./mad.sh:1877: echo -e "${BLUE}${fil}${NC}"
./mad.sh:1878: echo -e ""
./mad.sh:1879: echo -e ""
./mad.sh:1880: echo -e "${PINK}MAD Audit of http lines:${NC} (${GREEN}grep \"http:\" or \"https:\"${NC})"
./mad.sh:1881: echo -e "_________________________________________________________________________"
./mad.sh:1882: echo -e "$maud_http"
./mad.sh:1883: echo -e ""
./mad.sh:1884: echo -e "${PINK}MAD Audit of curl:${NC} (${GREEN}grep \"curl\"${NC})"
./mad.sh:1885: echo -e "_________________________________________________________________________"
--
./mad.sh:1874: echo -e "${PINK}MAD Audit of tor_curl (+12 lines after):${NC} (${GREEN}grep \"tor_curl\"${NC})"
./mad.sh:1875: echo -e "_________________________________________________________________________"
./mad.sh:1876: echo -e "$maud_torcurl"
./mad.sh:1877: echo -e ""
./mad.sh:1878: done
./mad.sh:1879: fi
./mad.sh:1880:}
./mad.sh:1881:madStatus() {
./mad.sh:1882: local InputFile="$1"
./mad.sh:1883: if [ "$arg1" == "status" ] ; then
./mad.sh:1884: clear
./mad.sh:1888: echo -e "${PINK}MAD Audit of tor_curl (+12 lines after):${NC} (${GREEN}grep \"tor_curl\"${NC})"
./mad.sh:1889: echo -e "_________________________________________________________________________"
./mad.sh:1890: echo -e "$maud_torcurl"
./mad.sh:1891: echo -e ""
./mad.sh:1892: done
./mad.sh:1893: fi
./mad.sh:1894:}
./mad.sh:1895:madStatus() {
./mad.sh:1896: local InputFile="$1"
./mad.sh:1897: if [ "$arg1" == "status" ] ; then
./mad.sh:1898: clear
--
./mad.sh:3199: file_header=$(tor_curl_request --insecure -m 8 -s -D - -o /dev/null \
./mad.sh:3200: -H "Connection: keep-alive" \
./mad.sh:3201: -w 'EffectiveUrl=%{url_effective}' \
./mad.sh:3202: "$download_url")
./mad.sh:3203: else
./mad.sh:3204: printf "| Retrieving Head: attempt #$j"
./mad.sh:3205: rm -f "${WorkDir}/.temp/directhead"
./mad.sh:3206: file_header=$(tor_curl_request --insecure --head -H "Connection: keep-alive" -L -s -i "$download_url" |
./mad.sh:3207: tee "${WorkDir}/.temp/directhead" &
./mad.sh:3208: sleep 6
./mad.sh:3209: [ -s "${WorkDir}/.temp/directhead" ]
./mad.sh:3210: kill $! 2>/dev/null
./mad.sh:3211: )
./mad.sh:3212: if [ ! -f "${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}" ]; then
./mad.sh:3213: touch "${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}"
./mad.sh:3214: fi
./mad.sh:3215: rm -f "${WorkDir}/.temp/directhead"
./mad.sh:3216: fi
./mad.sh:3213: file_header=$(tor_curl_request --insecure -m 8 -s -D - -o /dev/null \
./mad.sh:3214: -H "Connection: keep-alive" \
./mad.sh:3215: -w 'EffectiveUrl=%{url_effective}' \
./mad.sh:3216: "$download_url")
./mad.sh:3217: else
./mad.sh:3218: printf "| Retrieving Head: attempt #$j"
./mad.sh:3219: rm -f "${WorkDir}/.temp/directhead"
./mad.sh:3220: file_header=$(tor_curl_request --insecure --head -H "Connection: keep-alive" -L -s -i "$download_url" |
./mad.sh:3221: tee "${WorkDir}/.temp/directhead" &
./mad.sh:3222: sleep 6
./mad.sh:3223: [ -s "${WorkDir}/.temp/directhead" ]
./mad.sh:3224: kill $! 2>/dev/null
./mad.sh:3225: )
./mad.sh:3226: if [ ! -f "${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}" ]; then
./mad.sh:3227: touch "${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}"
./mad.sh:3228: fi
./mad.sh:3229: rm -f "${WorkDir}/.temp/directhead"
./mad.sh:3230: fi
--
./mad.sh:3334: tor_curl_request --insecure -L --referer "$file_url" "$download_url" --output "$file_path"
./mad.sh:3335: rc=$?
./mad.sh:3336: if [ $rc -ne 0 ] ; then
./mad.sh:3337: printf "${RED}Download Failed (bad exit status).${NC}"
./mad.sh:3338: if [ -f ${file_path} ]; then
./mad.sh:3339: printf "${YELLOW} Partial removed...${NC}"
./mad.sh:3340: printf "\n\n"
./mad.sh:3341: rm -f "${file_path}"
./mad.sh:3342: else
./mad.sh:3343: printf "\n\n"
./mad.sh:3344: fi
./mad.sh:3350: tor_curl_request --insecure -L --referer "$file_url" "$download_url" --output "$file_path"
./mad.sh:3351: rc=$?
./mad.sh:3352: if [ $rc -ne 0 ] ; then
./mad.sh:3353: printf "${RED}Download Failed (bad exit status).${NC}"
./mad.sh:3354: if [ -f ${file_path} ]; then
./mad.sh:3355: printf "${YELLOW} Partial removed...${NC}"
./mad.sh:3356: printf "\n\n"
./mad.sh:3357: rm -f "${file_path}"
./mad.sh:3358: else
./mad.sh:3359: printf "\n\n"
./mad.sh:3360: fi
--
./mad.sh:3378: tor_curl_request --insecure -L --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval "$download_url" --continue-at - --output "$file_path"
./mad.sh:3379: else
./mad.sh:3380: tor_curl_request --insecure -L --referer "$file_url" "$download_url" --continue-at - --output "$file_path"
./mad.sh:3381: fi
./mad.sh:3382: received_file_size=0
./mad.sh:3383: if [ -f "$file_path" ] ; then
./mad.sh:3384: received_file_size=$(stat --format="%s" "$file_path" | tr -d '[:space:]')
./mad.sh:3385: fi
./mad.sh:3386: if CheckNoHtml "$remote_url" "$filename" "$file_path" ; then
./mad.sh:3387: containsHtml=false
./mad.sh:3388: else
./mad.sh:3389: containsHtml=true
./mad.sh:3390: fi
./mad.sh:3394: tor_curl_request --insecure -L --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval "$download_url" --continue-at - --output "$file_path"
./mad.sh:3395: else
./mad.sh:3396: tor_curl_request --insecure -L --referer "$file_url" "$download_url" --continue-at - --output "$file_path"
./mad.sh:3397: fi
./mad.sh:3398: received_file_size=0
./mad.sh:3399: if [ -f "$file_path" ] ; then
./mad.sh:3400: received_file_size=$(stat --format="%s" "$file_path" | tr -d '[:space:]')
./mad.sh:3401: fi
./mad.sh:3402: if CheckNoHtml "$remote_url" "$filename" "$file_path" ; then
./mad.sh:3403: containsHtml=false
./mad.sh:3404: else
./mad.sh:3405: containsHtml=true
./mad.sh:3406: fi
--
./mad.sh:3578: response=$(tor_curl_upload --insecure -i \
./mad.sh:3579: -H "Content-Type: multipart/form-data" \
./mad.sh:3580: -F "key=" \
./mad.sh:3581: -F "time=$jira_timeval" \
./mad.sh:3582: -F "file=@${filepath}" \
./mad.sh:3583: "${jira_PostUrlHost}")
./mad.sh:3584: else
./mad.sh:3585: response=$(tor_curl_upload --insecure -i \
./mad.sh:3586: -H "Content-Type: multipart/form-data" \
./mad.sh:3587: -F "key=" \
./mad.sh:3588: -F "time=$jira_timeval" \
./mad.sh:3589: -F "files[]=@${arrFiles[@]}" \
./mad.sh:3590: "${jira_PostUrlHost}")
./mad.sh:3591: fi
./mad.sh:3592: if [ "${DebugAllEnabled}" == "true" ] ; then
./mad.sh:3593: debugHtml "${filepath##*/}" "${_hostCode}_upload" "post_url: ${jira_PostUrlHost}"$'\n'"${response}"
./mad.sh:3594: fi
./mad.sh:3595: if grep -Eqi ' 200 ' <<< "${response}" ; then
./mad.sh:3594: response=$(tor_curl_upload --insecure -i \
./mad.sh:3595: -H "Content-Type: multipart/form-data" \
./mad.sh:3596: -F "key=" \
./mad.sh:3597: -F "time=$jira_timeval" \
./mad.sh:3598: -F "file=@${filepath}" \
./mad.sh:3599: "${jira_PostUrlHost}")
./mad.sh:3600: else
./mad.sh:3601: response=$(tor_curl_upload --insecure -i \
./mad.sh:3602: -H "Content-Type: multipart/form-data" \
./mad.sh:3603: -F "key=" \
./mad.sh:3604: -F "time=$jira_timeval" \
./mad.sh:3605: -F "files[]=@${arrFiles[@]}" \
./mad.sh:3606: "${jira_PostUrlHost}")
./mad.sh:3607: fi
./mad.sh:3608: if [ "${DebugAllEnabled}" == "true" ] ; then
./mad.sh:3609: debugHtml "${filepath##*/}" "${_hostCode}_upload" "post_url: ${jira_PostUrlHost}"$'\n'"${response}"
./mad.sh:3610: fi
./mad.sh:3611: if grep -Eqi ' 200 ' <<< "${response}" ; then