# 2025.01.14 - [gagneux / up_gagneux] Add fichier.gagneux.info as upload / download host

# 2025.01.14 - [uwabaki] Add uwabaki.party as download host
# 2025.01.14 - [fileblade] Additional retries and handling for blocked Tor ips (until alternative)
# 2025.01.13 - [ocr_captcha] Create imagemagick OCR function for testing without tesseract
# 2025.01.13 - [anonfile, dailyuploads] Update ocr call to use tesseract function
# 2025.01.13 - [up_anonfile] Modify to use new upload url
# 2025.01.12 - [ateasystems] Update 404 Not found response
# 2025.01.11 - [mad] Update direct head response handling
# 2025.01.11 - [ranoz] Add 404 Not found handling on head
# 2025.01.09 - [ranoz] Add handling of "NEXT_NOT_FOUND" response
# 2025.01.09 - [fileblade] Fix cdn url parsing
# 2025.01.08 - [up_pixeldrain] Fix success response from pixeldrain
# 2025.01.08 - [ramsgaard / up_ramsgaard] Add data.ramsgaard.me as upload / download host
# 2025.01.08 - [euromussels / up_euromussels] Add uploads.euromussels.eu as upload / download host
# 2025.01.07 - [up_fileland] Add fileland.io as upload host
# 2025.01.07 - [up_fireget] Add fireget.com as upload host
# 2025.01.06 - [uploadhive] Update the removed / gone response detection
# 2025.01.06 - [fileblade] Add "user does not allow free downloads over 100MB" response (and warnings)
# 2025.01.06 - [desiupload] Add desiupload as download host
# 2025.01.05 - [isupload] Fix filename detection
This commit is contained in:
kittykat 2025-01-16 07:54:05 +00:00
parent 30eedaf567
commit eeb8054960
Signed by: kittykat
GPG key ID: E3F1556620F70C3C
29 changed files with 1951 additions and 634 deletions

View file

@ -1,4 +1,4 @@
DateTime: 25.01.04
DateTime: 25.01.15
Files:
./hosts/1fichier.sh
@ -19,12 +19,14 @@ Files:
./hosts/dataupload.sh
./hosts/dbree.sh
./hosts/depotkaz.sh
./hosts/desiupload.sh
./hosts/dictvm.sh
./hosts/discreetshare.sh
./hosts/dosya.sh
./hosts/downloadgg.sh
./hosts/eddowding.sh
./hosts/eternalhosting.sh
./hosts/euromussels.sh
./hosts/examples/ExampleNewHost.sh
./hosts/examples/up_example.sh
./hosts/familleflender.sh
@ -37,6 +39,7 @@ Files:
./hosts/firestorage.sh
./hosts/free4e.sh
./hosts/freesocial.sh
./hosts/gagneux.sh
./hosts/gofile.sh
./hosts/harrault.sh
./hosts/herbolistique.sh
@ -57,6 +60,7 @@ Files:
./hosts/oshi.sh
./hosts/pixeldrain.sh
./hosts/quax.sh
./hosts/ramsgaard.sh
./hosts/ranoz.sh
./hosts/shareonline.sh
./hosts/skrepr.sh
@ -90,14 +94,18 @@ Files:
./hosts/up_dictvm.sh
./hosts/up_dosya.sh
./hosts/up_eddowding.sh
./hosts/up_euromussels.sh
./hosts/up_familleflender.sh
./hosts/up_fileblade.sh
./hosts/up_fileditch.sh
./hosts/up_filehaus.sh
./hosts/up_fileland.sh
./hosts/up_filesquid.sh
./hosts/up_fireget.sh
./hosts/up_firestorage.sh
./hosts/up_free4e.sh
./hosts/up_freesocial.sh
./hosts/up_gagneux.sh
./hosts/up_gofile.sh
./hosts/up_harrault.sh
./hosts/up_herbolistique.sh
@ -116,6 +124,7 @@ Files:
./hosts/up_oshi.sh
./hosts/up_pixeldrain.sh
./hosts/up_quax.sh
./hosts/up_ramsgaard.sh
./hosts/up_ranoz.sh
./hosts/up_shareonline.sh
./hosts/up_skrepr.sh
@ -132,6 +141,7 @@ Files:
./hosts/up_uploadhive.sh
./hosts/up_uploadraja.sh
./hosts/up_yolobit.sh
./hosts/uwabaki.sh
./hosts/yolobit.sh
./hosts/youdbox.sh
./mad.sh
@ -235,6 +245,14 @@ _________________________________________________________________________
./hosts/dataupload.sh:357: tor_curl_request --insecure \
./hosts/dataupload.sh:364: tor_curl_request --insecure \
./hosts/dataupload.sh:381: tor_curl_request --insecure \
./hosts/desiupload.sh:90: response=$(tor_curl_request --insecure -L -s -b "${desi_cookie_jar}" -c "${desi_cookie_jar}" "$remote_url")
./hosts/desiupload.sh:202: response=$(tor_curl_request --insecure -L -s -X POST \
./hosts/desiupload.sh:306: file_header=$(tor_curl_request --insecure -L --head -s --referer "${fixed_url}" "$download_url")
./hosts/desiupload.sh:404: if [ "${UseTorCurlImpersonate}" == "true" ]; then
./hosts/desiupload.sh:406: tor_curl_request --insecure \
./hosts/desiupload.sh:411: tor_curl_request --insecure \
./hosts/desiupload.sh:417: tor_curl_request --insecure \
./hosts/desiupload.sh:433: tor_curl_request --insecure \
./hosts/dosya.sh:108: if [ "${UseTorCurlImpersonate}" == "true" ]; then
./hosts/dosya.sh:109: PAGE=$(tor_curl_request --insecure --max-time "$ConnectTimeout" -L -s \
./hosts/dosya.sh:113: PAGE=$(tor_curl_request --insecure --max-time "$ConnectTimeout" -L -s \
@ -255,8 +273,8 @@ _________________________________________________________________________
./hosts/examples/ExampleNewHost.sh:201: tor_curl_request --insecure --referer "$download_url" "$download_url" --continue-at - --output "$file_path"
./hosts/examples/up_example.sh:112: response=$(tor_curl_upload --insecure \
./hosts/fileblade.sh:90: response=$(tor_curl_request --insecure -L -s -b "${fb_cookie_jar}" -c "${fb_cookie_jar}" "$remote_url")
./hosts/fileblade.sh:164: response=$(tor_curl_request --insecure -L -s -X POST \
./hosts/fileblade.sh:266: response=$(tor_curl_request --insecure -L -s -X POST \
./hosts/fileblade.sh:165: response=$(tor_curl_request --insecure -L -s -X POST \
./hosts/fileblade.sh:281: response=$(tor_curl_request --insecure -L -s -X POST \
./hosts/fileblade.sh:335: file_header=$(tor_curl_request --insecure -L --head -s "$download_url")
./hosts/fileblade.sh:450: if [ "${UseTorCurlImpersonate}" == "true" ]; then
./hosts/fileblade.sh:452: tor_curl_request --insecure -L \
@ -299,13 +317,13 @@ _________________________________________________________________________
./hosts/innocent.sh:214: tor_curl_request_extended --insecure "$download_url" --output "$file_path"
./hosts/isupload.sh:90: response=$(tor_curl_request_extended --insecure -L -s -b "${isup_cookie_jar}" -c "${isup_cookie_jar}" "$remote_url")
./hosts/isupload.sh:164: response=$(tor_curl_request_extended --insecure -L -s -X POST \
./hosts/isupload.sh:238: file_header=$(tor_curl_request --insecure --head -L -s "$download_url")
./hosts/isupload.sh:241: file_header=$(tor_curl_request --insecure -m 16 -s -D - -o /dev/null \
./hosts/isupload.sh:248: file_header=$(tor_curl_request --insecure --head -H "Connection: keep-alive" -L -s -i "$download_url" |
./hosts/isupload.sh:260: file_header=$(tor_curl_request_extended --insecure --head -L -s "$download_url")
./hosts/isupload.sh:352: tor_curl_request_extended --insecure -L "$download_url" --output "$file_path"
./hosts/isupload.sh:396: tor_curl_request_extended --insecure -L --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval "$download_url" --continue-at - --output "$file_path"
./hosts/isupload.sh:398: tor_curl_request_extended --insecure -L "$download_url" --continue-at - --output "$file_path"
./hosts/isupload.sh:241: file_header=$(tor_curl_request --insecure -m 8 -s -D - -o /dev/null \
./hosts/isupload.sh:247: file_header=$(tor_curl_request --insecure --head -L -s "$download_url")
./hosts/isupload.sh:251: file_header=$(tor_curl_request --insecure --head -H "Connection: keep-alive" -L -s -i "$download_url" |
./hosts/isupload.sh:263: file_header=$(tor_curl_request_extended --insecure --head -L -s "$download_url")
./hosts/isupload.sh:355: tor_curl_request_extended --insecure -L "$download_url" --output "$file_path"
./hosts/isupload.sh:399: tor_curl_request_extended --insecure -L --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval "$download_url" --continue-at - --output "$file_path"
./hosts/isupload.sh:401: tor_curl_request_extended --insecure -L "$download_url" --continue-at - --output "$file_path"
./hosts/kraken.sh:104: PAGE=$(tor_curl_request --insecure --max-time "$ConnectTimeout" -s -L -c "${kraken_cookie_jar}" "${fixed_url}")
./hosts/kraken.sh:169: down_request=$(tor_curl_request --insecure -L -s -b "${kraken_cookie_jar}" -c "${kraken_cookie_jar}" -F "token=${kraken_token}" "${kraken_action}")
./hosts/kraken.sh:186: file_header=$(tor_curl_request --insecure --head -L -s -b "${kraken_cookie_jar}" -c "${kraken_cookie_jar}" --referer "$kraken_action" "$download_url")
@ -333,12 +351,12 @@ _________________________________________________________________________
./hosts/quax.sh:176: tor_curl_request --insecure --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval "$download_url" --continue-at - --output "$file_path"
./hosts/quax.sh:178: tor_curl_request --insecure "$download_url" --continue-at - --output "$file_path"
./hosts/ranoz.sh:90: response=$(tor_curl_request --insecure -L -s "$remote_url")
./hosts/ranoz.sh:150: file_header=$(tor_curl_request --insecure --head -L -i -s "$download_url")
./hosts/ranoz.sh:259: if [ "${UseTorCurlImpersonate}" == "true" ]; then
./hosts/ranoz.sh:261: tor_curl_request --insecure -L -G --no-alpn \
./hosts/ranoz.sh:265: tor_curl_request --insecure -L -G --no-alpn \
./hosts/ranoz.sh:157: file_header=$(tor_curl_request --insecure --head -L -i -s "$download_url")
./hosts/ranoz.sh:264: if [ "${UseTorCurlImpersonate}" == "true" ]; then
./hosts/ranoz.sh:266: tor_curl_request --insecure -L -G --no-alpn \
./hosts/ranoz.sh:270: tor_curl_request --insecure -L -G --no-alpn \
./hosts/ranoz.sh:285: tor_curl_request --insecure -L -G --no-alpn \
./hosts/ranoz.sh:275: tor_curl_request --insecure -L -G --no-alpn \
./hosts/ranoz.sh:290: tor_curl_request --insecure -L -G --no-alpn \
./hosts/syspro.sh:88: response=$(tor_curl_request --insecure -L -s "$remote_url")
./hosts/syspro.sh:186: if [ "${UseTorCurlImpersonate}" == "true" ]; then
./hosts/syspro.sh:188: tor_curl_request --insecure -L \
@ -415,6 +433,8 @@ _________________________________________________________________________
./hosts/up_fileblade.sh:104: response=$(tor_curl_upload --insecure -i \
./hosts/up_fileditch.sh:107: response=$(tor_curl_upload --insecure -i -L \
./hosts/up_filehaus.sh:106: response=$(tor_curl_upload --insecure -i \
./hosts/up_fileland.sh:102: response=$(tor_curl_upload --insecure -i \
./hosts/up_fireget.sh:102: response=$(tor_curl_upload --insecure -i \
./hosts/up_firestorage.sh:113: response=$(tor_curl_upload --insecure -i \
./hosts/up_gofile.sh:102: response=$(tor_curl_request --insecure -L -s "https://api.gofile.io/servers")
./hosts/up_gofile.sh:121: response=$(tor_curl_upload --insecure -i \
@ -452,119 +472,119 @@ _________________________________________________________________________
./hosts/youdbox.sh:183: file_header=$(tor_curl_request --insecure -L --head -s --referer "${fixed_url}" "$download_url")
./hosts/youdbox.sh:276: tor_curl_request --insecure -L -G --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval "$download_url" --continue-at - --output "$file_path"
./hosts/youdbox.sh:278: tor_curl_request --insecure -L -G "$download_url" --continue-at - --output "$file_path"
./mad.sh:107:UseTorCurlImpersonate=false
./mad.sh:398:tor_curl_request() {
./mad.sh:399: if [ "${UseTorCurlImpersonate}" == "true" ]; then
./mad.sh:400: "${curl_impersonate[@]}" --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" --connect-timeout ${ConnectTimeout} --compressed --globoff "$@"
./mad.sh:402: curl --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" --connect-timeout ${ConnectTimeout} --compressed --globoff "$@"
./mad.sh:405:tor_curl_request_extended() {
./mad.sh:407: if [ "${UseTorCurlImpersonate}" == "true" ]; then
./mad.sh:408: "${curl_impersonate[@]}" --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" --connect-timeout $randomtimeout --compressed --globoff "$@"
./mad.sh:410: curl --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" --connect-timeout $randomtimeout --compressed --globoff "$@"
./mad.sh:413:tor_curl_upload() {
./mad.sh:114:UseTorCurlImpersonate=false
./mad.sh:405:tor_curl_request() {
./mad.sh:406: if [ "${UseTorCurlImpersonate}" == "true" ]; then
./mad.sh:407: "${curl_impersonate[@]}" --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" --connect-timeout ${ConnectTimeout} --compressed --globoff "$@"
./mad.sh:409: curl --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" --connect-timeout ${ConnectTimeout} --compressed --globoff "$@"
./mad.sh:412:tor_curl_request_extended() {
./mad.sh:414: if [ "${UseTorCurlImpersonate}" == "true" ]; then
./mad.sh:416: "${curl_impersonate[@]}" --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" --connect-timeout ${ConnectTimeout} --speed-limit $UploadSpeedMin --speed-time $UploadTimeoutInterval --compressed --globoff "$@"
./mad.sh:418: "${curl_impersonate[@]}" --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" --connect-timeout ${ConnectTimeout} --compressed --globoff "$@"
./mad.sh:422: curl --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" --connect-timeout ${ConnectTimeout} --speed-limit $UploadSpeedMin --speed-time $UploadTimeoutInterval -H 'User-Agent: Mozilla/5.0 (Windows NT 10.0; rv:109.0) Gecko/20100101 Firefox/115.0' -H 'Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8' -H 'Accept-Language: en-US,en;q=0.5' -H 'Accept-Encoding: gzip, deflate, br' -H 'Origin: null' -H 'Connection: keep-alive' -H 'Upgrade-Insecure-Requests: 1' -H 'Sec-Fetch-Dest: document' -H 'Sec-Fetch-Mode: navigate' -H 'Sec-Fetch-Site: cross-site' -H 'Sec-Fetch-User: ?1' -H 'TE: trailers' --compressed --globoff "$@"
./mad.sh:424: curl --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" --connect-timeout ${ConnectTimeout} -H 'User-Agent: Mozilla/5.0 (Windows NT 10.0; rv:109.0) Gecko/20100101 Firefox/115.0' -H 'Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8' -H 'Accept-Language: en-US,en;q=0.5' -H 'Accept-Encoding: gzip, deflate, br' -H 'Origin: null' -H 'Connection: keep-alive' -H 'Upgrade-Insecure-Requests: 1' -H 'Sec-Fetch-Dest: document' -H 'Sec-Fetch-Mode: navigate' -H 'Sec-Fetch-Site: cross-site' -H 'Sec-Fetch-User: ?1' -H 'TE: trailers' --compressed --globoff "$@"
./mad.sh:1368:install_curl_impersonate() {
./mad.sh:1370: echo -e "${BLUE}lwthiker curl_impersonate${NC} is the original dev, but it is relatively inactive."
./mad.sh:1371: echo -e "- Currently uses curl v8.1.1."
./mad.sh:1375: echo -e "${BLUE}lexiforest curl_impersonate${NC} is an active fork of curl_impersonate."
./mad.sh:1376: echo -e "+ Currently uses curl v8.7.1"
./mad.sh:1380: PS3='Please select which curl_impersonate to install: '
./mad.sh:1388: install_curl_impersonate_lwthiker_orig
./mad.sh:1392: install_curl_impersonate_lexiforest_fork
./mad.sh:1402:install_curl_impersonate_lwthiker_orig() {
./mad.sh:1406: echo -e "${BLUE}lwthiker curl_impersonate${NC} is the original curl_impersonate."
./mad.sh:1407: echo -e "+ Currently uses curl v8.1.1, and has low activity for updates"
./mad.sh:1410: echo -e "${GREEN}| Fetching:${NC} latest ${BLUE}lwthiker curl_impersonate${NC} info from github...${NC}"
./mad.sh:1413: response=$(tor_curl_request --insecure -L -s https://github.com/lwthiker/curl-impersonate/releases/latest)
./mad.sh:1415: debugHtml "github" "lbf_inst_curlimp$j" "$response"
./mad.sh:1418: latestTag=$(grep -oPi -m 1 '(?<=/curl-impersonate/releases/tag/).*?(?=")' <<< "$response")
./mad.sh:1428: yes_or_no "Do you wish to download and extract curl_impersonate $latestTag (using tor+curl)?" && {
./mad.sh:1430: download_url='https://github.com/lwthiker/curl-impersonate/releases/download/'"$latestTag"'/curl-impersonate-'"$latestTag"'.x86_64-linux-gnu.tar.gz'
./mad.sh:1433: file_header=$(tor_curl_request --insecure --head -Ls "$download_url")
./mad.sh:1435: debugHtml "github" "head_inst_curlimp$j" "${file_header}"
./mad.sh:1483: tor_curl_request --insecure -L "$download_url" --continue-at - --output "$file_path"
./mad.sh:1512: echo -e "| Extracting curl_impersonate..."
./mad.sh:1514: rm -f "${ScriptDir}"/curl*
./mad.sh:1515: mv "$extract_location/curl-impersonate-ff" "${ScriptDir}/"
./mad.sh:1516: mv "$extract_location/curl_ff109" "${ScriptDir}/"
./mad.sh:1517: echo -e "${GREEN}| Done.${NC} Update ${BLUE}\"UseTorCurlImpersonate=true\"${NC} in script to use..."
./mad.sh:1525:install_curl_impersonate_lexiforest_fork() {
./mad.sh:1529: echo -e "${BLUE}lexiforest curl_impersonate${NC} is an active fork of curl_impersonate."
./mad.sh:1530: echo -e "+ Currently uses curl v8.7.1, and is patched for latest CVEs"
./mad.sh:1533: echo -e "${GREEN}| Fetching:${NC} latest ${BLUE}lexiforest curl_impersonate fork${NC} info from github...${NC}"
./mad.sh:1536: response=$(tor_curl_request --insecure -L -s https://github.com/lexiforest/curl-impersonate/releases/latest)
./mad.sh:1538: debugHtml "github" "lbf_inst_curlimp$j" "$response"
./mad.sh:1541: latestTag=$(grep -oPi -m 1 '(?<=/curl-impersonate/releases/tag/).*?(?=")' <<< "$response")
./mad.sh:1551: yes_or_no "Do you wish to download and extract curl_impersonate $latestTag (using tor+curl)?" && {
./mad.sh:1553: download_url='https://github.com/lexiforest/curl-impersonate/releases/download/'"$latestTag"'/curl-impersonate-'"$latestTag"'.x86_64-linux-gnu.tar.gz'
./mad.sh:1556: file_header=$(tor_curl_request --insecure --head -Ls "$download_url")
./mad.sh:1558: debugHtml "github" "head_inst_curlimp$j" "${file_header}"
./mad.sh:1606: tor_curl_request --insecure -L "$download_url" --continue-at - --output "$file_path"
./mad.sh:1635: echo -e "| Extracting curl_impersonate..."
./mad.sh:1637: rm -f "${ScriptDir}"/curl*
./mad.sh:1638: mv "$extract_location/curl-impersonate-chrome" "${ScriptDir}/"
./mad.sh:1639: mv "$extract_location/curl_chrome131" "${ScriptDir}/"
./mad.sh:1640: echo -e "${GREEN}| Done.${NC} Update ${BLUE}\"UseTorCurlImpersonate=true\"${NC} in script to use..."
./mad.sh:1802: echo -e ":${NC} ${GREEN}MAD${PINK} Audit${NC} : Reports usage of http & curl in scripts${PINK}${BLD} :"
./mad.sh:1810: maud_curl=$(grep -n -vxE '[[:blank:]]*([#].*)?' $fil | grep --color='always' -Ei 'curl')
./mad.sh:1811: maud_torcurl=$(grep -n -vxE '[[:blank:]]*([#].*)?' $fil | grep -A 12 --color='always' -Ei 'tor_curl')
./mad.sh:1820: echo -e "${PINK}MAD Audit of curl:${NC} (${GREEN}grep \"curl\"${NC})"
./mad.sh:1822: echo -e "$maud_curl"
./mad.sh:1824: echo -e "${PINK}MAD Audit of tor_curl (+12 lines after):${NC} (${GREEN}grep \"tor_curl\"${NC})"
./mad.sh:1826: echo -e "$maud_torcurl"
./mad.sh:1838: maud_curl=$(grep -n -vxE '[[:blank:]]*([#].*)?' $fil | grep --color='always' -Ei 'curl')
./mad.sh:1839: maud_torcurl=$(grep -n -vxE '[[:blank:]]*([#].*)?' $fil | grep -A 12 --color='always' -Ei 'tor_curl')
./mad.sh:1848: echo -e "${PINK}MAD Audit of curl:${NC} (${GREEN}grep \"curl \"${NC})"
./mad.sh:1850: echo -e "$maud_curl"
./mad.sh:1852: echo -e "${PINK}MAD Audit of tor_curl (+12 lines after):${NC} (${GREEN}grep \"tor_curl\"${NC})"
./mad.sh:1854: echo -e "$maud_torcurl"
./mad.sh:1860: maud_curl=$(grep -n -vxE '[[:blank:]]*([#].*)?' $fil | grep --color='always' -Ei 'curl')
./mad.sh:1861: maud_torcurl=$(grep -n -vxE '[[:blank:]]*([#].*)?' $fil | grep -A 12 --color='always' -Ei 'tor_curl')
./mad.sh:1870: echo -e "${PINK}MAD Audit of curl:${NC} (${GREEN}grep \"curl\"${NC})"
./mad.sh:1872: echo -e "$maud_curl"
./mad.sh:1874: echo -e "${PINK}MAD Audit of tor_curl (+12 lines after):${NC} (${GREEN}grep \"tor_curl\"${NC})"
./mad.sh:1876: echo -e "$maud_torcurl"
./mad.sh:2823: if [ "${UseTorCurlImpersonate}" == "true" ]; then
./mad.sh:2824: printf "client: ${GREEN}Tor${NC} + ${BLUE}curl_impersonate${NC}\\n"
./mad.sh:2826: printf "client: ${GREEN}Tor${NC} + ${GREEN}curl${NC}\\n"
./mad.sh:2998: if [ "${UseTorCurlImpersonate}" == "true" ]; then
./mad.sh:2999: printf "client: ${GREEN}Tor${NC} + ${BLUE}curl_impersonate${NC}\\n"
./mad.sh:3001: printf "client: ${GREEN}Tor${NC} + ${GREEN}curl${NC}\\n"
./mad.sh:3199: file_header=$(tor_curl_request --insecure -m 8 -s -D - -o /dev/null \
./mad.sh:3206: file_header=$(tor_curl_request --insecure --head -H "Connection: keep-alive" -L -s -i "$download_url" |
./mad.sh:3334: tor_curl_request --insecure -L --referer "$file_url" "$download_url" --output "$file_path"
./mad.sh:3378: tor_curl_request --insecure -L --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval "$download_url" --continue-at - --output "$file_path"
./mad.sh:3380: tor_curl_request --insecure -L --referer "$file_url" "$download_url" --continue-at - --output "$file_path"
./mad.sh:3578: response=$(tor_curl_upload --insecure -i \
./mad.sh:3585: response=$(tor_curl_upload --insecure -i \
./mad.sh:3656:if [ "${UseTorCurlImpersonate}" == "true" ]; then
./mad.sh:3657: curl_impersonate=()
./mad.sh:3658: readarray -d $'' arrFiles < <(find "$ScriptDir" -maxdepth 1 -name "curl_*" -printf '%p\n' | sort -Vk1)
./mad.sh:3659: bFoundCurlHeader=false
./mad.sh:3663: curl_impersonate=($fil)
./mad.sh:3664: bFoundCurlHeader=true
./mad.sh:3668: if [ "$bFoundCurlHeader" == "false" ]; then
./mad.sh:3669: echo -e "${RED}[ERROR] Missing dependency \"curl-impersonate\"!${NC}"
./mad.sh:3672: echo -e "You'll need to download ${GREEN}\"curl-impersonate\"${NC}."
./mad.sh:3675: echo -e "The latest binary can be obtained on GitHub, search for \"curl-impersonate\""
./mad.sh:3677: echo -e " 1. Visit the page of curl-impersonate and add \"/releases/latest/\" at end of URL."
./mad.sh:3681: echo -e " 4. Download archive ${GREEN}\"curl-impersonate-vX.Y.Z.x86_64-linux-gnu.tar.gz\"${YELLOW}."
./mad.sh:3682: echo -e " 5. Extract files ${GREEN}\"curl-impersonate-ff\"${NC} and ${GREEN}\"curl_ff109\"${NC} next to this script."
./mad.sh:3685: echo -e "run $0 install_curl_impersonate\\n"
./mad.sh:3687: yes_or_no "Do you wish to download and extract latest curl_impersonate (using tor+curl)?" && {
./mad.sh:3688: UseTorCurlImpersonate=false
./mad.sh:3689: install_curl_impersonate
./mad.sh:3773: echo -e "[${YELLOW}Install curl_impersonate${NC}]: Downloads the latest binary for curl_impersonate from github repo (3 choices)"
./mad.sh:3774: printf " %s install_curl_impersonate\\n" "$0"
./mad.sh:3852:elif [[ "$arg1" == "install_curl_impersonate" ]]; then
./mad.sh:3853: install_curl_impersonate
./mad.sh:3884:if [ "${UseTorCurlImpersonate}" == "true" ]; then
./mad.sh:3885: printf "client: ${GREEN}Tor${NC} + ${BLUE}curl_impersonate${NC}\\n"
./mad.sh:3887: printf "client: ${GREEN}Tor${NC} + ${GREEN}curl${NC}\\n"
./mad.sh:415: "${curl_impersonate[@]}" --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" --connect-timeout $randomtimeout --compressed --globoff "$@"
./mad.sh:417: curl --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" --connect-timeout $randomtimeout --compressed --globoff "$@"
./mad.sh:420:tor_curl_upload() {
./mad.sh:421: if [ "${UseTorCurlImpersonate}" == "true" ]; then
./mad.sh:423: "${curl_impersonate[@]}" --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" --connect-timeout ${ConnectTimeout} --speed-limit $UploadSpeedMin --speed-time $UploadTimeoutInterval --compressed --globoff "$@"
./mad.sh:425: "${curl_impersonate[@]}" --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" --connect-timeout ${ConnectTimeout} --compressed --globoff "$@"
./mad.sh:429: curl --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" --connect-timeout ${ConnectTimeout} --speed-limit $UploadSpeedMin --speed-time $UploadTimeoutInterval -H 'User-Agent: Mozilla/5.0 (Windows NT 10.0; rv:109.0) Gecko/20100101 Firefox/115.0' -H 'Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8' -H 'Accept-Language: en-US,en;q=0.5' -H 'Accept-Encoding: gzip, deflate, br' -H 'Origin: null' -H 'Connection: keep-alive' -H 'Upgrade-Insecure-Requests: 1' -H 'Sec-Fetch-Dest: document' -H 'Sec-Fetch-Mode: navigate' -H 'Sec-Fetch-Site: cross-site' -H 'Sec-Fetch-User: ?1' -H 'TE: trailers' --compressed --globoff "$@"
./mad.sh:431: curl --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" --connect-timeout ${ConnectTimeout} -H 'User-Agent: Mozilla/5.0 (Windows NT 10.0; rv:109.0) Gecko/20100101 Firefox/115.0' -H 'Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8' -H 'Accept-Language: en-US,en;q=0.5' -H 'Accept-Encoding: gzip, deflate, br' -H 'Origin: null' -H 'Connection: keep-alive' -H 'Upgrade-Insecure-Requests: 1' -H 'Sec-Fetch-Dest: document' -H 'Sec-Fetch-Mode: navigate' -H 'Sec-Fetch-Site: cross-site' -H 'Sec-Fetch-User: ?1' -H 'TE: trailers' --compressed --globoff "$@"
./mad.sh:1382:install_curl_impersonate() {
./mad.sh:1384: echo -e "${BLUE}lwthiker curl_impersonate${NC} is the original dev, but it is relatively inactive."
./mad.sh:1385: echo -e "- Currently uses curl v8.1.1."
./mad.sh:1389: echo -e "${BLUE}lexiforest curl_impersonate${NC} is an active fork of curl_impersonate."
./mad.sh:1390: echo -e "+ Currently uses curl v8.7.1"
./mad.sh:1394: PS3='Please select which curl_impersonate to install: '
./mad.sh:1402: install_curl_impersonate_lwthiker_orig
./mad.sh:1406: install_curl_impersonate_lexiforest_fork
./mad.sh:1416:install_curl_impersonate_lwthiker_orig() {
./mad.sh:1420: echo -e "${BLUE}lwthiker curl_impersonate${NC} is the original curl_impersonate."
./mad.sh:1421: echo -e "+ Currently uses curl v8.1.1, and has low activity for updates"
./mad.sh:1424: echo -e "${GREEN}| Fetching:${NC} latest ${BLUE}lwthiker curl_impersonate${NC} info from github...${NC}"
./mad.sh:1427: response=$(tor_curl_request --insecure -L -s https://github.com/lwthiker/curl-impersonate/releases/latest)
./mad.sh:1429: debugHtml "github" "lbf_inst_curlimp$j" "$response"
./mad.sh:1432: latestTag=$(grep -oPi -m 1 '(?<=/curl-impersonate/releases/tag/).*?(?=")' <<< "$response")
./mad.sh:1442: yes_or_no "Do you wish to download and extract curl_impersonate $latestTag (using tor+curl)?" && {
./mad.sh:1444: download_url='https://github.com/lwthiker/curl-impersonate/releases/download/'"$latestTag"'/curl-impersonate-'"$latestTag"'.x86_64-linux-gnu.tar.gz'
./mad.sh:1447: file_header=$(tor_curl_request --insecure --head -Ls "$download_url")
./mad.sh:1449: debugHtml "github" "head_inst_curlimp$j" "${file_header}"
./mad.sh:1497: tor_curl_request --insecure -L "$download_url" --continue-at - --output "$file_path"
./mad.sh:1526: echo -e "| Extracting curl_impersonate..."
./mad.sh:1528: rm -f "${ScriptDir}"/curl*
./mad.sh:1529: mv "$extract_location/curl-impersonate-ff" "${ScriptDir}/"
./mad.sh:1530: mv "$extract_location/curl_ff109" "${ScriptDir}/"
./mad.sh:1531: echo -e "${GREEN}| Done.${NC} Update ${BLUE}\"UseTorCurlImpersonate=true\"${NC} in script to use..."
./mad.sh:1539:install_curl_impersonate_lexiforest_fork() {
./mad.sh:1543: echo -e "${BLUE}lexiforest curl_impersonate${NC} is an active fork of curl_impersonate."
./mad.sh:1544: echo -e "+ Currently uses curl v8.7.1, and is patched for latest CVEs"
./mad.sh:1547: echo -e "${GREEN}| Fetching:${NC} latest ${BLUE}lexiforest curl_impersonate fork${NC} info from github...${NC}"
./mad.sh:1550: response=$(tor_curl_request --insecure -L -s https://github.com/lexiforest/curl-impersonate/releases/latest)
./mad.sh:1552: debugHtml "github" "lbf_inst_curlimp$j" "$response"
./mad.sh:1555: latestTag=$(grep -oPi -m 1 '(?<=/curl-impersonate/releases/tag/).*?(?=")' <<< "$response")
./mad.sh:1565: yes_or_no "Do you wish to download and extract curl_impersonate $latestTag (using tor+curl)?" && {
./mad.sh:1567: download_url='https://github.com/lexiforest/curl-impersonate/releases/download/'"$latestTag"'/curl-impersonate-'"$latestTag"'.x86_64-linux-gnu.tar.gz'
./mad.sh:1570: file_header=$(tor_curl_request --insecure --head -Ls "$download_url")
./mad.sh:1572: debugHtml "github" "head_inst_curlimp$j" "${file_header}"
./mad.sh:1620: tor_curl_request --insecure -L "$download_url" --continue-at - --output "$file_path"
./mad.sh:1649: echo -e "| Extracting curl_impersonate..."
./mad.sh:1651: rm -f "${ScriptDir}"/curl*
./mad.sh:1652: mv "$extract_location/curl-impersonate-chrome" "${ScriptDir}/"
./mad.sh:1653: mv "$extract_location/curl_chrome131" "${ScriptDir}/"
./mad.sh:1654: echo -e "${GREEN}| Done.${NC} Update ${BLUE}\"UseTorCurlImpersonate=true\"${NC} in script to use..."
./mad.sh:1816: echo -e ":${NC} ${GREEN}MAD${PINK} Audit${NC} : Reports usage of http & curl in scripts${PINK}${BLD} :"
./mad.sh:1824: maud_curl=$(grep -n -vxE '[[:blank:]]*([#].*)?' $fil | grep --color='always' -Ei 'curl')
./mad.sh:1825: maud_torcurl=$(grep -n -vxE '[[:blank:]]*([#].*)?' $fil | grep -A 12 --color='always' -Ei 'tor_curl')
./mad.sh:1834: echo -e "${PINK}MAD Audit of curl:${NC} (${GREEN}grep \"curl\"${NC})"
./mad.sh:1836: echo -e "$maud_curl"
./mad.sh:1838: echo -e "${PINK}MAD Audit of tor_curl (+12 lines after):${NC} (${GREEN}grep \"tor_curl\"${NC})"
./mad.sh:1840: echo -e "$maud_torcurl"
./mad.sh:1852: maud_curl=$(grep -n -vxE '[[:blank:]]*([#].*)?' $fil | grep --color='always' -Ei 'curl')
./mad.sh:1853: maud_torcurl=$(grep -n -vxE '[[:blank:]]*([#].*)?' $fil | grep -A 12 --color='always' -Ei 'tor_curl')
./mad.sh:1862: echo -e "${PINK}MAD Audit of curl:${NC} (${GREEN}grep \"curl \"${NC})"
./mad.sh:1864: echo -e "$maud_curl"
./mad.sh:1866: echo -e "${PINK}MAD Audit of tor_curl (+12 lines after):${NC} (${GREEN}grep \"tor_curl\"${NC})"
./mad.sh:1868: echo -e "$maud_torcurl"
./mad.sh:1874: maud_curl=$(grep -n -vxE '[[:blank:]]*([#].*)?' $fil | grep --color='always' -Ei 'curl')
./mad.sh:1875: maud_torcurl=$(grep -n -vxE '[[:blank:]]*([#].*)?' $fil | grep -A 12 --color='always' -Ei 'tor_curl')
./mad.sh:1884: echo -e "${PINK}MAD Audit of curl:${NC} (${GREEN}grep \"curl\"${NC})"
./mad.sh:1886: echo -e "$maud_curl"
./mad.sh:1888: echo -e "${PINK}MAD Audit of tor_curl (+12 lines after):${NC} (${GREEN}grep \"tor_curl\"${NC})"
./mad.sh:1890: echo -e "$maud_torcurl"
./mad.sh:2837: if [ "${UseTorCurlImpersonate}" == "true" ]; then
./mad.sh:2838: printf "client: ${GREEN}Tor${NC} + ${BLUE}curl_impersonate${NC}\\n"
./mad.sh:2840: printf "client: ${GREEN}Tor${NC} + ${GREEN}curl${NC}\\n"
./mad.sh:3012: if [ "${UseTorCurlImpersonate}" == "true" ]; then
./mad.sh:3013: printf "client: ${GREEN}Tor${NC} + ${BLUE}curl_impersonate${NC}\\n"
./mad.sh:3015: printf "client: ${GREEN}Tor${NC} + ${GREEN}curl${NC}\\n"
./mad.sh:3213: file_header=$(tor_curl_request --insecure -m 8 -s -D - -o /dev/null \
./mad.sh:3220: file_header=$(tor_curl_request --insecure --head -H "Connection: keep-alive" -L -s -i "$download_url" |
./mad.sh:3350: tor_curl_request --insecure -L --referer "$file_url" "$download_url" --output "$file_path"
./mad.sh:3394: tor_curl_request --insecure -L --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval "$download_url" --continue-at - --output "$file_path"
./mad.sh:3396: tor_curl_request --insecure -L --referer "$file_url" "$download_url" --continue-at - --output "$file_path"
./mad.sh:3594: response=$(tor_curl_upload --insecure -i \
./mad.sh:3601: response=$(tor_curl_upload --insecure -i \
./mad.sh:3672:if [ "${UseTorCurlImpersonate}" == "true" ]; then
./mad.sh:3673: curl_impersonate=()
./mad.sh:3674: readarray -d $'' arrFiles < <(find "$ScriptDir" -maxdepth 1 -name "curl_*" -printf '%p\n' | sort -Vk1)
./mad.sh:3675: bFoundCurlHeader=false
./mad.sh:3679: curl_impersonate=($fil)
./mad.sh:3680: bFoundCurlHeader=true
./mad.sh:3684: if [ "$bFoundCurlHeader" == "false" ]; then
./mad.sh:3685: echo -e "${RED}[ERROR] Missing dependency \"curl-impersonate\"!${NC}"
./mad.sh:3688: echo -e "You'll need to download ${GREEN}\"curl-impersonate\"${NC}."
./mad.sh:3691: echo -e "The latest binary can be obtained on GitHub, search for \"curl-impersonate\""
./mad.sh:3693: echo -e " 1. Visit the page of curl-impersonate and add \"/releases/latest/\" at end of URL."
./mad.sh:3697: echo -e " 4. Download archive ${GREEN}\"curl-impersonate-vX.Y.Z.x86_64-linux-gnu.tar.gz\"${YELLOW}."
./mad.sh:3698: echo -e " 5. Extract files ${GREEN}\"curl-impersonate-ff\"${NC} and ${GREEN}\"curl_ff109\"${NC} next to this script."
./mad.sh:3701: echo -e "run $0 install_curl_impersonate\\n"
./mad.sh:3703: yes_or_no "Do you wish to download and extract latest curl_impersonate (using tor+curl)?" && {
./mad.sh:3704: UseTorCurlImpersonate=false
./mad.sh:3705: install_curl_impersonate
./mad.sh:3789: echo -e "[${YELLOW}Install curl_impersonate${NC}]: Downloads the latest binary for curl_impersonate from github repo (3 choices)"
./mad.sh:3790: printf " %s install_curl_impersonate\\n" "$0"
./mad.sh:3868:elif [[ "$arg1" == "install_curl_impersonate" ]]; then
./mad.sh:3869: install_curl_impersonate
./mad.sh:3900:if [ "${UseTorCurlImpersonate}" == "true" ]; then
./mad.sh:3901: printf "client: ${GREEN}Tor${NC} + ${BLUE}curl_impersonate${NC}\\n"
./mad.sh:3903: printf "client: ${GREEN}Tor${NC} + ${GREEN}curl${NC}\\n"
./plugins/pjscloud.sh:44: if [ "${UseTorCurlImpersonate}" == "true" ]; then
./plugins/pjscloud.sh:45: response=$("${curl_impersonate[@]}" --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" \
./plugins/pjscloud.sh:53: response=$(curl --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" \

View file

@ -1,4 +1,4 @@
DateTime: 25.01.04
DateTime: 25.01.15
Files:
./hosts/1fichier.sh
@ -19,12 +19,14 @@ Files:
./hosts/dataupload.sh
./hosts/dbree.sh
./hosts/depotkaz.sh
./hosts/desiupload.sh
./hosts/dictvm.sh
./hosts/discreetshare.sh
./hosts/dosya.sh
./hosts/downloadgg.sh
./hosts/eddowding.sh
./hosts/eternalhosting.sh
./hosts/euromussels.sh
./hosts/examples/ExampleNewHost.sh
./hosts/examples/up_example.sh
./hosts/familleflender.sh
@ -37,6 +39,7 @@ Files:
./hosts/firestorage.sh
./hosts/free4e.sh
./hosts/freesocial.sh
./hosts/gagneux.sh
./hosts/gofile.sh
./hosts/harrault.sh
./hosts/herbolistique.sh
@ -57,6 +60,7 @@ Files:
./hosts/oshi.sh
./hosts/pixeldrain.sh
./hosts/quax.sh
./hosts/ramsgaard.sh
./hosts/ranoz.sh
./hosts/shareonline.sh
./hosts/skrepr.sh
@ -90,14 +94,18 @@ Files:
./hosts/up_dictvm.sh
./hosts/up_dosya.sh
./hosts/up_eddowding.sh
./hosts/up_euromussels.sh
./hosts/up_familleflender.sh
./hosts/up_fileblade.sh
./hosts/up_fileditch.sh
./hosts/up_filehaus.sh
./hosts/up_fileland.sh
./hosts/up_filesquid.sh
./hosts/up_fireget.sh
./hosts/up_firestorage.sh
./hosts/up_free4e.sh
./hosts/up_freesocial.sh
./hosts/up_gagneux.sh
./hosts/up_gofile.sh
./hosts/up_harrault.sh
./hosts/up_herbolistique.sh
@ -116,6 +124,7 @@ Files:
./hosts/up_oshi.sh
./hosts/up_pixeldrain.sh
./hosts/up_quax.sh
./hosts/up_ramsgaard.sh
./hosts/up_ranoz.sh
./hosts/up_shareonline.sh
./hosts/up_skrepr.sh
@ -132,6 +141,7 @@ Files:
./hosts/up_uploadhive.sh
./hosts/up_uploadraja.sh
./hosts/up_yolobit.sh
./hosts/uwabaki.sh
./hosts/yolobit.sh
./hosts/youdbox.sh
./mad.sh
@ -181,9 +191,9 @@ _________________________________________________________________________
./hosts/downloadgg.sh:297: -H "Origin: https://download.gg" \
./hosts/eternalhosting.sh:36: if grep -Eqi '\.onion' <<< "$pUrlMod" && grep -Eqi 'https://' <<< "$pUrlMod" ; then
./hosts/examples/up_example.sh:105: local ar_HUP[0]='https://oshi.at'
./hosts/fileblade.sh:310: if ! grep -Eqi '<a href="https://de6.fileblade.com/files/' <<< "$response"; then
./hosts/fileblade.sh:322: download_url=$(grep -oP -m 1 '(?<=a href="https://de6.fileblade.com/files/).*?(?=" class=.*$)' <<< "$response")
./hosts/fileblade.sh:324: download_url='https://de6.fileblade.com/files/'$(urlencode_literal_grouped_case_urlendingonly "$download_url")
./hosts/fileblade.sh:310: if ! grep -oPi '(?=href="https://.*?\.fileblade.com/files/.*?" class=.*$)' <<< "$response"; then
./hosts/fileblade.sh:322: download_url=$(grep -oP -m 1 '(?<=a href="https://).*?(?=\.fileblade.com/files/).*?(?=" class=.*$)' <<< "$response")
./hosts/fileblade.sh:324: download_url='https://'$(urlencode_literal_grouped_case_urlendingonly "$download_url")
./hosts/filedot.sh:119: "https://filedot.to/login.html")
./hosts/filedot.sh:160: -H "Origin: https://filedot.to" \
./hosts/filedot.sh:162: -H "Referer: https://filedot.to/login.html" \
@ -206,9 +216,9 @@ _________________________________________________________________________
./hosts/hexload.sh:122: response=$(tor_curl_request --insecure -s --data "$form_data" "https://hexload.com/download")
./hosts/innocent.sh:48: download_url="${download_url/https:/http:}"
./hosts/isupload.sh:133: post_action="${remote_url//https:/http:}"
./hosts/isupload.sh:208: if ! grep -Eqi '<a href="http://isupload.com/cgi-bin/dl.cgi/' <<< "$response"; then
./hosts/isupload.sh:220: download_url=$(grep -oP -m 1 '(?<=a href="http://isupload.com/cgi-bin/dl.cgi/).*?(?=">.*$)' <<< "$response")
./hosts/isupload.sh:222: download_url='http://isupload.com/cgi-bin/dl.cgi/'$(urlencode_literal_grouped_case_urlendingonly "$download_url")
./hosts/isupload.sh:211: if ! grep -Eqi '<a href="http://isupload.com/cgi-bin/dl.cgi/' <<< "$response"; then
./hosts/isupload.sh:223: download_url=$(grep -oP -m 1 '(?<=a href="http://isupload.com/cgi-bin/dl.cgi/).*?(?=">.*$)' <<< "$response")
./hosts/isupload.sh:225: download_url='http://isupload.com/cgi-bin/dl.cgi/'$(urlencode_literal_grouped_case_urlendingonly "$download_url")
./hosts/kraken.sh:155: kraken_action="https://krakenfiles.com/download/${kraken_action##*/}"
./hosts/nippy.sh:160: download_url="https:"$(grep -oP '(?<=<h2><a href='\'').*(?='\'' class=)' <<< "$response")
./hosts/nippy.sh:229: cdn_url="https:"$(grep -oP '(?<=location: ).*$' <<< "$file_header")
@ -236,7 +246,7 @@ _________________________________________________________________________
./hosts/up_acid.sh:40: jira_downloadLinkPrefix='https://dl.acid.fr/f.php?h='
./hosts/up_anarchaserver.sh:37: jira_PostUrlHost='https://transitional.anarchaserver.org/jirafeau/script.php'
./hosts/up_anarchaserver.sh:40: jira_downloadLinkPrefix='https://transitional.anarchaserver.org/jirafeau/f.php?h='
./hosts/up_anonfile.sh:99: PostUrlHost='https://anonfile.de/cgi-bin/upload.cgi?upload_type=file&amp;utype=anon'
./hosts/up_anonfile.sh:99: PostUrlHost='https://file-01.anonfile.de/cgi-bin/upload.cgi?upload_type=file&amp;utype=anon'
./hosts/up_anonfile.sh:121: downloadLink="https://anonfile.de/$hash"
./hosts/up_anonsharing.sh:99: PostUrlHost='https://anonsharing.com/ajax/file_upload_handler?r=anonsharing.com'
./hosts/up_anonsharing.sh:109: if grep -Eqi '"error":null,"url":"https:\\/\\/anonsharing.com\\/' <<< "${response}" ; then
@ -282,6 +292,8 @@ _________________________________________________________________________
./hosts/up_dosya.sh:100: local ar_HUP[1]='https://dl3.lim1.dosyaupload.com/core/page/ajax/file_upload_handler.ajax.php?r=www.dosyaupload.com'
./hosts/up_eddowding.sh:37: jira_PostUrlHost='https://files.eddowding.com/script.php'
./hosts/up_eddowding.sh:40: jira_downloadLinkPrefix='https://files.eddowding.com/f.php?h='
./hosts/up_euromussels.sh:37: jira_PostUrlHost='https://uploads.euromussels.eu/script.php'
./hosts/up_euromussels.sh:40: jira_downloadLinkPrefix='https://uploads.euromussels.eu/f.php?h='
./hosts/up_familleflender.sh:37: jira_PostUrlHost='https://famille-flender.fr/jirafeau/script.php'
./hosts/up_familleflender.sh:40: jira_downloadLinkPrefix='https://famille-flender.fr/f.php?h='
./hosts/up_fileblade.sh:97: local ar_HUP[0]="https://de6.fileblade.com/cgi-bin/upload.cgi?upload_type=file&amp;utype=anon"
@ -291,8 +303,14 @@ _________________________________________________________________________
./hosts/up_filehaus.sh:114: if grep -Eqi 'HTTP/.* 200|https://cdn' <<< "${response}" ; then
./hosts/up_filehaus.sh:115: url=$(grep -oPi '(?<=https://).*(?=\.filehaus\.su).*?(?=$)' <<< "$response")
./hosts/up_filehaus.sh:117: downloadLink="https://${url}"
./hosts/up_fileland.sh:99: PostUrlHost='https://fs300.fileland.io/cgi-bin/upload.cgi?upload_type=file&amp;utype=anon'
./hosts/up_fileland.sh:121: downloadLink="https://fileland.io/$hash"
./hosts/up_filesquid.sh:37: jira_PostUrlHost='https://filesquid.net/script.php'
./hosts/up_filesquid.sh:40: jira_downloadLinkPrefix='https://filesquid.net/f.php?h='
./hosts/up_fireget.sh:99: PostUrlHost='https://s22.fireget.com/cgi-bin/upload.cgi'
./hosts/up_fireget.sh:118: if grep -Eqi 'Location: https://fireget.com/\?&fn=' <<< "${response}" ; then
./hosts/up_fireget.sh:119: hash=$(grep -oPi -m 1 '(?<=Location: https://fireget.com/\?&fn=).*?(?=&st=OK.*$)' <<< "$response")
./hosts/up_fireget.sh:121: downloadLink="https://fireget.com/$hash"
./hosts/up_firestorage.sh:99: local ar_HUP[0]='https://server65.firestorage.jp/upload.cgi'
./hosts/up_firestorage.sh:100: local ar_HUP[1]='https://server62.firestorage.jp/upload.cgi'
./hosts/up_firestorage.sh:101: local ar_HUP[2]='https://server39.firestorage.jp/upload.cgi'
@ -306,6 +324,8 @@ _________________________________________________________________________
./hosts/up_free4e.sh:40: jira_downloadLinkPrefix='https://send.free4e.com/f.php?h='
./hosts/up_freesocial.sh:37: jira_PostUrlHost='https://files.freesocial.co/script.php'
./hosts/up_freesocial.sh:40: jira_downloadLinkPrefix='https://files.freesocial.co/f.php?h='
./hosts/up_gagneux.sh:37: jira_PostUrlHost='https://fichier.gagneux.info/script.php'
./hosts/up_gagneux.sh:40: jira_downloadLinkPrefix='https://fichier.gagneux.info/f.php?h='
./hosts/up_gofile.sh:102: response=$(tor_curl_request --insecure -L -s "https://api.gofile.io/servers")
./hosts/up_gofile.sh:114: local ar_HUP[0]="https://$gofileStoreServer.gofile.io/contents/uploadFile"
./hosts/up_gofile.sh:138: hash=$(grep -oPi '(?<=https://gofile.io/d/).*?(?=")' <<< "$response")
@ -367,6 +387,8 @@ _________________________________________________________________________
./hosts/up_pixeldrain.sh:107: PostUrlHost='https://pixeldrain.com/api/file/'
./hosts/up_pixeldrain.sh:136: downloadLink="https://pixeldrain.com/u/${hash}"
./hosts/up_quax.sh:99: PostUrlHost='https://qu.ax/upload.php'
./hosts/up_ramsgaard.sh:37: jira_PostUrlHost='https://data.ramsgaard.me/script.php'
./hosts/up_ramsgaard.sh:40: jira_downloadLinkPrefix='https://data.ramsgaard.me/f.php?h='
./hosts/up_ranoz.sh:99: PostUrlHost='https://ranoz.gg/api/v1/files/upload_url'
./hosts/up_ranoz.sh:111: if grep -Eqi '"upload_url":"https://' <<< "$response" ; then
./hosts/up_shareonline.sh:99: PostUrlHost='https://ns07.zipcluster.com/upload.php'
@ -403,75 +425,75 @@ _________________________________________________________________________
./hosts/up_uploadraja.sh:99: PostUrlHost='https://awsaisiaposisition69.kalpstudio.xyz/cgi-bin/upload.cgi?upload_type=file&utype=anon'
./hosts/up_uploadraja.sh:119: downloadLink="https://uploadraja.com/$hash"
./hosts/up_yolobit.sh:99: PostUrlHost='https://ns08.zipcluster.com/upload.php'
./mad.sh:672: sed -i -e "s>^${url/https:/http:}.*>#& #OK# ${filename}>g" "${InputFile}" #http (if changed)
./mad.sh:674: sed -i -e "s>^direct=${url/https:/http:}.*>#& #OK# ${filename}>g" "${InputFile}" #direct url https
./mad.sh:677: sed -i -e "s>^${url/https:/http:}.*>#&>g" "${InputFile}" #http (if changed)
./mad.sh:679: sed -i -e "s>^direct=${url/https:/http:}.*>#&>g" "${InputFile}" #direct url https
./mad.sh:700: sed -i -e "s>^${url/https:/http:}.*>#& #OK# ${filename} (File exists)>g" "${InputFile}" #http (if changed)
./mad.sh:702: sed -i -e "s>^direct=${url/https:/http:}.*>#& #OK# ${filename} (File exists)>g" "${InputFile}" #direct url https
./mad.sh:705: sed -i -e "s>^${url/https:/http:}.*>#&>g" "${InputFile}" #http (if changed)
./mad.sh:707: sed -i -e "s>^direct=${url/https:/http:}.*>#&>g" "${InputFile}" #direct url https
./mad.sh:728: sed -i -e "s>^${url/https:/http:}.*>#& #RETRY# ${message}>g" "${InputFile}" #http (if changed)
./mad.sh:730: sed -i -e "s>^direct=${url/https:/http:}.*>#& #RETRY# ${message}>g" "${InputFile}" #direct url https
./mad.sh:733: sed -i -e "s>^${url/https:/http:}.*>#&>g" "${InputFile}" #http (if changed)
./mad.sh:735: sed -i -e "s>^direct=${url/https:/http:}.*>#&>g" "${InputFile}" #direct url https
./mad.sh:757: sed -i -e "s>^${url/https:/http:}.*>#& #FAIL# $message>g" "${InputFile}" #http (if changed)
./mad.sh:759: sed -i -e "s>^direct=${url/https:/http:}.*>#& #FAIL# $message>g" "${InputFile}" #direct url https
./mad.sh:762: sed -i -e "s>^${url/https:/http:}.*>#&>g" "${InputFile}" #http (if changed)
./mad.sh:764: sed -i -e "s>^direct=${url/https:/http:}.*>#&>g" "${InputFile}" #direct url https
./mad.sh:785: sed -i -e "s>^${url/https:/http:}.*>#& #REMOVED#>g" "${InputFile}" #http (if changed)
./mad.sh:787: sed -i -e "s>^direct=${url/https:/http:}.*>#& #REMOVED#>g" "${InputFile}" #direct url https
./mad.sh:790: sed -i -e "s>^${url/https:/http:}.*>#&>g" "${InputFile}" #http (if changed)
./mad.sh:792: sed -i -e "s>^direct=${url/https:/http:}.*>#&>g" "${InputFile}" #direct url https
./mad.sh:818: sed -i -e "s>^${url/https:/http:}.*>${url}|${newfilename}>g" "${InputFile}" #http (if changed)
./mad.sh:820: sed -i -e "s>^direct=${url/https:/http:}.*>direct=${url}|${newfilename}>g" "${InputFile}" #direct url https
./mad.sh:840: sed -i -e "s%^${url/https:/http:}.*%${newurl//[[:space:]]/$'\\\n'}%g" "${InputFile}" #http (if changed)
./mad.sh:861: sed -i -e "s>^${url/https:/http:}.*>#& #RETRY# ${filename} (dropped or bad size)>g" "${InputFile}" #http (if changed)
./mad.sh:863: sed -i -e "s>^direct=${url/https:/http:}.*>#& #RETRY# ${filename} (dropped or bad size)>g" "${InputFile}" #direct url https
./mad.sh:866: sed -i -e "s>^${url/https:/http:}.*>#&>g" "${InputFile}" #http (if changed)
./mad.sh:868: sed -i -e "s>^direct=${url/https:/http:}.*>#&>g" "${InputFile}" #direct url https
./mad.sh:884: sed -i -e "s>^${url/https:/http:}.*>#& #RETRY# (No CDN found)>g" "${InputFile}" #http (if changed)
./mad.sh:886: sed -i -e "s>^direct=${url/https:/http:}.*>#& #RETRY# (No CDN found)>g" "${InputFile}" #direct url https
./mad.sh:889: sed -i -e "s>^${url/https:/http:}.*>#&>g" "${InputFile}" #http (if changed)
./mad.sh:891: sed -i -e "s>^direct=${url/https:/http:}.*>#&>g" "${InputFile}" #direct url https
./mad.sh:910: sed -i -e "s>^${url/https:/http:}.*>#& #PASSWORD#>g" "${InputFile}" #http (if changed)
./mad.sh:912: sed -i -e "s>^direct=${url/https:/http:}.*>#& #PASSWORD#>g" "${InputFile}" #direct url https
./mad.sh:915: sed -i -e "s>^${url/https:/http:}.*>#&>g" "${InputFile}" #http (if changed)
./mad.sh:917: sed -i -e "s>^direct=${url/https:/http:}.*>#&>g" "${InputFile}" #direct url https
./mad.sh:937: sed -i -e "s>^${url/https:/http:}.*>#& #BAD-URL#${message}>g" "${InputFile}" #http (if changed)
./mad.sh:939: sed -i -e "s>^direct=${url/https:/http:}.*>#& #BAD-URL#${message}>g" "${InputFile}" #direct url https
./mad.sh:942: sed -i -e "s>^${url/https:/http:}.*>#&>g" "${InputFile}" #http (if changed)
./mad.sh:944: sed -i -e "s>^direct=${url/https:/http:}.*>#&>g" "${InputFile}" #direct url https
./mad.sh:962: sed -i -e "s>^${url/https:/http:}.*>#& #RETRY# ${filename} (No filename or size. Try again later)>g" "${InputFile}" #http (if changed)
./mad.sh:964: sed -i -e "s>^direct=${url/https:/http:}.*>#& #RETRY# ${filename} (No filename or size. Try again later)>g" "${InputFile}" #direct url https
./mad.sh:967: sed -i -e "s>^${url/https:/http:}.*>#&>g" "${InputFile}" #http (if changed)
./mad.sh:969: sed -i -e "s>^direct=${url/https:/http:}.*>#&>g" "${InputFile}" #direct url https
./mad.sh:988: sed -i -e "s>^${url/https:/http:}.*>#& #RETRY# ${filename} (File exists and is too large--retry or delete from downloads)>g" "${InputFile}" #http (if changed)
./mad.sh:990: sed -i -e "s>^direct=${url/https:/http:}.*>#& #RETRY# ${filename} (File exists and is too large--retry or delete from downloads)>g" "${InputFile}" #direct url https
./mad.sh:993: sed -i -e "s>^${url/https:/http:}.*>#&>g" "${InputFile}" #http (if changed)
./mad.sh:995: sed -i -e "s>^direct=${url/https:/http:}.*>#&>g" "${InputFile}" #direct url https
./mad.sh:1413: response=$(tor_curl_request --insecure -L -s https://github.com/lwthiker/curl-impersonate/releases/latest)
./mad.sh:1430: download_url='https://github.com/lwthiker/curl-impersonate/releases/download/'"$latestTag"'/curl-impersonate-'"$latestTag"'.x86_64-linux-gnu.tar.gz'
./mad.sh:1536: response=$(tor_curl_request --insecure -L -s https://github.com/lexiforest/curl-impersonate/releases/latest)
./mad.sh:1553: download_url='https://github.com/lexiforest/curl-impersonate/releases/download/'"$latestTag"'/curl-impersonate-'"$latestTag"'.x86_64-linux-gnu.tar.gz'
./mad.sh:1816: echo -e "${PINK}MAD Audit of http lines:${NC} (${GREEN}grep \"http:\" or \"https:\"${NC})"
./mad.sh:1844: echo -e "${PINK}MAD Audit of http lines:${NC} (${GREEN}grep \"http:\" or \"https:\"${NC})"
./mad.sh:1866: echo -e "${PINK}MAD Audit of http lines:${NC} (${GREEN}grep \"http:\" or \"https:\"${NC})"
./mad.sh:3182: if grep -Eqi '.onion' <<< "$download_url" && grep -Eqi 'https://' <<< "$download_url" ; then
./mad.sh:3695:arg2="$2" # auto, filelist, <https://url>
./mad.sh:3792: echo -e " - http://oshi.at/abcd/origAABB.rar|My specified file.part1.rar"
./mad.sh:3794: echo -e " - direct=http://pomf2.lain.la/f/abcd00zz.7z"
./mad.sh:3796: echo -e ' - ie. direct=http://somehost.onion/abcD|filename.part1.rar'
./mad.sh:4015: if [[ ${remote_url} =~ ^http: ]] ; then
./mad.sh:4016: remote_url=${remote_url/http:/https:}
./mad.sh:4037: if [[ ${remote_url} =~ ^http: ]] ; then
./mad.sh:4038: remote_url=${remote_url/http:/https:}
./mad.sh:4404: if [[ ${remote_url} =~ ^http: ]] ; then
./mad.sh:4405: remote_url=${remote_url/http:/https:}
./mad.sh:4463: if [[ ${remote_url} =~ ^http: ]] ; then
./mad.sh:4464: remote_url=${remote_url/http:/https:}
./mad.sh:4489: if [[ ${remote_url} =~ ^http: ]] ; then
./mad.sh:4490: remote_url=${remote_url/http:/https:}
./mad.sh:683: sed -i -e "s>^${url/https:/http:}.*>#& #OK# ${filename}>g" "${InputFile}" #http (if changed)
./mad.sh:685: sed -i -e "s>^direct=${url/https:/http:}.*>#& #OK# ${filename}>g" "${InputFile}" #direct url https
./mad.sh:688: sed -i -e "s>^${url/https:/http:}.*>#&>g" "${InputFile}" #http (if changed)
./mad.sh:690: sed -i -e "s>^direct=${url/https:/http:}.*>#&>g" "${InputFile}" #direct url https
./mad.sh:711: sed -i -e "s>^${url/https:/http:}.*>#& #OK# ${filename} (File exists)>g" "${InputFile}" #http (if changed)
./mad.sh:713: sed -i -e "s>^direct=${url/https:/http:}.*>#& #OK# ${filename} (File exists)>g" "${InputFile}" #direct url https
./mad.sh:716: sed -i -e "s>^${url/https:/http:}.*>#&>g" "${InputFile}" #http (if changed)
./mad.sh:718: sed -i -e "s>^direct=${url/https:/http:}.*>#&>g" "${InputFile}" #direct url https
./mad.sh:739: sed -i -e "s>^${url/https:/http:}.*>#& #RETRY# ${message}>g" "${InputFile}" #http (if changed)
./mad.sh:741: sed -i -e "s>^direct=${url/https:/http:}.*>#& #RETRY# ${message}>g" "${InputFile}" #direct url https
./mad.sh:744: sed -i -e "s>^${url/https:/http:}.*>#&>g" "${InputFile}" #http (if changed)
./mad.sh:746: sed -i -e "s>^direct=${url/https:/http:}.*>#&>g" "${InputFile}" #direct url https
./mad.sh:768: sed -i -e "s>^${url/https:/http:}.*>#& #FAIL# $message>g" "${InputFile}" #http (if changed)
./mad.sh:770: sed -i -e "s>^direct=${url/https:/http:}.*>#& #FAIL# $message>g" "${InputFile}" #direct url https
./mad.sh:773: sed -i -e "s>^${url/https:/http:}.*>#&>g" "${InputFile}" #http (if changed)
./mad.sh:775: sed -i -e "s>^direct=${url/https:/http:}.*>#&>g" "${InputFile}" #direct url https
./mad.sh:799: sed -i -e "s>^${url/https:/http:}.*>#& #REMOVED#${message}>g" "${InputFile}" #http (if changed)
./mad.sh:801: sed -i -e "s>^direct=${url/https:/http:}.*>#& #REMOVED#${message}>g" "${InputFile}" #direct url https
./mad.sh:804: sed -i -e "s>^${url/https:/http:}.*>#&>g" "${InputFile}" #http (if changed)
./mad.sh:806: sed -i -e "s>^direct=${url/https:/http:}.*>#&>g" "${InputFile}" #direct url https
./mad.sh:832: sed -i -e "s>^${url/https:/http:}.*>${url}|${newfilename}>g" "${InputFile}" #http (if changed)
./mad.sh:834: sed -i -e "s>^direct=${url/https:/http:}.*>direct=${url}|${newfilename}>g" "${InputFile}" #direct url https
./mad.sh:854: sed -i -e "s%^${url/https:/http:}.*%${newurl//[[:space:]]/$'\\\n'}%g" "${InputFile}" #http (if changed)
./mad.sh:875: sed -i -e "s>^${url/https:/http:}.*>#& #RETRY# ${filename} (dropped or bad size)>g" "${InputFile}" #http (if changed)
./mad.sh:877: sed -i -e "s>^direct=${url/https:/http:}.*>#& #RETRY# ${filename} (dropped or bad size)>g" "${InputFile}" #direct url https
./mad.sh:880: sed -i -e "s>^${url/https:/http:}.*>#&>g" "${InputFile}" #http (if changed)
./mad.sh:882: sed -i -e "s>^direct=${url/https:/http:}.*>#&>g" "${InputFile}" #direct url https
./mad.sh:898: sed -i -e "s>^${url/https:/http:}.*>#& #RETRY# (No CDN found)>g" "${InputFile}" #http (if changed)
./mad.sh:900: sed -i -e "s>^direct=${url/https:/http:}.*>#& #RETRY# (No CDN found)>g" "${InputFile}" #direct url https
./mad.sh:903: sed -i -e "s>^${url/https:/http:}.*>#&>g" "${InputFile}" #http (if changed)
./mad.sh:905: sed -i -e "s>^direct=${url/https:/http:}.*>#&>g" "${InputFile}" #direct url https
./mad.sh:924: sed -i -e "s>^${url/https:/http:}.*>#& #PASSWORD#>g" "${InputFile}" #http (if changed)
./mad.sh:926: sed -i -e "s>^direct=${url/https:/http:}.*>#& #PASSWORD#>g" "${InputFile}" #direct url https
./mad.sh:929: sed -i -e "s>^${url/https:/http:}.*>#&>g" "${InputFile}" #http (if changed)
./mad.sh:931: sed -i -e "s>^direct=${url/https:/http:}.*>#&>g" "${InputFile}" #direct url https
./mad.sh:951: sed -i -e "s>^${url/https:/http:}.*>#& #BAD-URL#${message}>g" "${InputFile}" #http (if changed)
./mad.sh:953: sed -i -e "s>^direct=${url/https:/http:}.*>#& #BAD-URL#${message}>g" "${InputFile}" #direct url https
./mad.sh:956: sed -i -e "s>^${url/https:/http:}.*>#&>g" "${InputFile}" #http (if changed)
./mad.sh:958: sed -i -e "s>^direct=${url/https:/http:}.*>#&>g" "${InputFile}" #direct url https
./mad.sh:976: sed -i -e "s>^${url/https:/http:}.*>#& #RETRY# ${filename} (No filename or size. Try again later)>g" "${InputFile}" #http (if changed)
./mad.sh:978: sed -i -e "s>^direct=${url/https:/http:}.*>#& #RETRY# ${filename} (No filename or size. Try again later)>g" "${InputFile}" #direct url https
./mad.sh:981: sed -i -e "s>^${url/https:/http:}.*>#&>g" "${InputFile}" #http (if changed)
./mad.sh:983: sed -i -e "s>^direct=${url/https:/http:}.*>#&>g" "${InputFile}" #direct url https
./mad.sh:1002: sed -i -e "s>^${url/https:/http:}.*>#& #RETRY# ${filename} (File exists and is too large--retry or delete from downloads)>g" "${InputFile}" #http (if changed)
./mad.sh:1004: sed -i -e "s>^direct=${url/https:/http:}.*>#& #RETRY# ${filename} (File exists and is too large--retry or delete from downloads)>g" "${InputFile}" #direct url https
./mad.sh:1007: sed -i -e "s>^${url/https:/http:}.*>#&>g" "${InputFile}" #http (if changed)
./mad.sh:1009: sed -i -e "s>^direct=${url/https:/http:}.*>#&>g" "${InputFile}" #direct url https
./mad.sh:1427: response=$(tor_curl_request --insecure -L -s https://github.com/lwthiker/curl-impersonate/releases/latest)
./mad.sh:1444: download_url='https://github.com/lwthiker/curl-impersonate/releases/download/'"$latestTag"'/curl-impersonate-'"$latestTag"'.x86_64-linux-gnu.tar.gz'
./mad.sh:1550: response=$(tor_curl_request --insecure -L -s https://github.com/lexiforest/curl-impersonate/releases/latest)
./mad.sh:1567: download_url='https://github.com/lexiforest/curl-impersonate/releases/download/'"$latestTag"'/curl-impersonate-'"$latestTag"'.x86_64-linux-gnu.tar.gz'
./mad.sh:1830: echo -e "${PINK}MAD Audit of http lines:${NC} (${GREEN}grep \"http:\" or \"https:\"${NC})"
./mad.sh:1858: echo -e "${PINK}MAD Audit of http lines:${NC} (${GREEN}grep \"http:\" or \"https:\"${NC})"
./mad.sh:1880: echo -e "${PINK}MAD Audit of http lines:${NC} (${GREEN}grep \"http:\" or \"https:\"${NC})"
./mad.sh:3196: if grep -Eqi '.onion' <<< "$download_url" && grep -Eqi 'https://' <<< "$download_url" ; then
./mad.sh:3711:arg2="$2" # auto, filelist, <https://url>
./mad.sh:3808: echo -e " - http://oshi.at/abcd/origAABB.rar|My specified file.part1.rar"
./mad.sh:3810: echo -e " - direct=http://pomf2.lain.la/f/abcd00zz.7z"
./mad.sh:3812: echo -e ' - ie. direct=http://somehost.onion/abcD|filename.part1.rar'
./mad.sh:4031: if [[ ${remote_url} =~ ^http: ]] ; then
./mad.sh:4032: remote_url=${remote_url/http:/https:}
./mad.sh:4053: if [[ ${remote_url} =~ ^http: ]] ; then
./mad.sh:4054: remote_url=${remote_url/http:/https:}
./mad.sh:4420: if [[ ${remote_url} =~ ^http: ]] ; then
./mad.sh:4421: remote_url=${remote_url/http:/https:}
./mad.sh:4479: if [[ ${remote_url} =~ ^http: ]] ; then
./mad.sh:4480: remote_url=${remote_url/http:/https:}
./mad.sh:4505: if [[ ${remote_url} =~ ^http: ]] ; then
./mad.sh:4506: remote_url=${remote_url/http:/https:}
./plugins/pjscloud.sh:51: "https://PhantomJScloud.com/api/browser/v2/$RandomPjsKey/" & sleep 8s; kill -HUP $! 2>/dev/null)
./plugins/pjscloud.sh:59: "https://PhantomJScloud.com/api/browser/v2/$RandomPjsKey/" & sleep 8s; kill -HUP $! 2>/dev/null)

View file

@ -1,4 +1,4 @@
DateTime: 25.01.04
DateTime: 25.01.15
Files:
./hosts/1fichier.sh
@ -19,12 +19,14 @@ Files:
./hosts/dataupload.sh
./hosts/dbree.sh
./hosts/depotkaz.sh
./hosts/desiupload.sh
./hosts/dictvm.sh
./hosts/discreetshare.sh
./hosts/dosya.sh
./hosts/downloadgg.sh
./hosts/eddowding.sh
./hosts/eternalhosting.sh
./hosts/euromussels.sh
./hosts/examples/ExampleNewHost.sh
./hosts/examples/up_example.sh
./hosts/familleflender.sh
@ -37,6 +39,7 @@ Files:
./hosts/firestorage.sh
./hosts/free4e.sh
./hosts/freesocial.sh
./hosts/gagneux.sh
./hosts/gofile.sh
./hosts/harrault.sh
./hosts/herbolistique.sh
@ -57,6 +60,7 @@ Files:
./hosts/oshi.sh
./hosts/pixeldrain.sh
./hosts/quax.sh
./hosts/ramsgaard.sh
./hosts/ranoz.sh
./hosts/shareonline.sh
./hosts/skrepr.sh
@ -90,14 +94,18 @@ Files:
./hosts/up_dictvm.sh
./hosts/up_dosya.sh
./hosts/up_eddowding.sh
./hosts/up_euromussels.sh
./hosts/up_familleflender.sh
./hosts/up_fileblade.sh
./hosts/up_fileditch.sh
./hosts/up_filehaus.sh
./hosts/up_fileland.sh
./hosts/up_filesquid.sh
./hosts/up_fireget.sh
./hosts/up_firestorage.sh
./hosts/up_free4e.sh
./hosts/up_freesocial.sh
./hosts/up_gagneux.sh
./hosts/up_gofile.sh
./hosts/up_harrault.sh
./hosts/up_herbolistique.sh
@ -116,6 +124,7 @@ Files:
./hosts/up_oshi.sh
./hosts/up_pixeldrain.sh
./hosts/up_quax.sh
./hosts/up_ramsgaard.sh
./hosts/up_ranoz.sh
./hosts/up_shareonline.sh
./hosts/up_skrepr.sh
@ -132,6 +141,7 @@ Files:
./hosts/up_uploadhive.sh
./hosts/up_uploadraja.sh
./hosts/up_yolobit.sh
./hosts/uwabaki.sh
./hosts/yolobit.sh
./hosts/youdbox.sh
./mad.sh
@ -303,7 +313,7 @@ _________________________________________________________________________
./hosts/anonfile.sh:196: printf "\\n"
--
./hosts/anonfile.sh:240: tor_curl_request --insecure -s "$captcha_img_url" --output "$tmp_captcha_img"
./hosts/anonfile.sh:241: captcha_ocr_output=$(CaptchaOcrImage "$tmp_captcha_img" "NUMBERONLY" "ContrastStretch_5x90,Brightness_130")
./hosts/anonfile.sh:241: captcha_ocr_output=$(CaptchaOcrImageTesseract "$tmp_captcha_img" "NUMBERONLY" "ContrastStretch_5x90,Brightness_130")
./hosts/anonfile.sh:242: if [ "${DebugPluginsEnabled}" == "true" ]; then
./hosts/anonfile.sh:243: printf "\\n"
./hosts/anonfile.sh:244: echo -e "$captcha_ocr_output"
@ -745,7 +755,7 @@ _________________________________________________________________________
./hosts/dailyuploads.sh:107: echo -e "${RED}| Failed to extract download link [1].${NC}"
--
./hosts/dailyuploads.sh:139: tor_curl_request --insecure -s "$captcha_img_url" --output "$tmp_captcha_img"
./hosts/dailyuploads.sh:140: captcha_ocr_output=$(CaptchaOcrImage "$tmp_captcha_img" "NUMBERONLY" "ContrastStretch_5x90,Brightness_130")
./hosts/dailyuploads.sh:140: captcha_ocr_output=$(CaptchaOcrImageTesseract "$tmp_captcha_img" "NUMBERONLY" "ContrastStretch_5x90,Brightness_130")
./hosts/dailyuploads.sh:141: if [ "${DebugPluginsEnabled}" == "true" ]; then
./hosts/dailyuploads.sh:142: printf "\\n"
./hosts/dailyuploads.sh:143: echo -e "$captcha_ocr_output"
@ -973,6 +983,77 @@ _________________________________________________________________________
./hosts/dataupload.sh:390: -H "Upgrade-Insecure-Requests: 1" \
./hosts/dataupload.sh:391: -H "Sec-Fetch-Dest: document" \
--
./hosts/desiupload.sh:90: response=$(tor_curl_request --insecure -L -s -b "${desi_cookie_jar}" -c "${desi_cookie_jar}" "$remote_url")
./hosts/desiupload.sh:91: if [ "${DebugAllEnabled}" == "true" ] ; then
./hosts/desiupload.sh:92: debugHtml "${remote_url##*/}" "desi_fetch$i" "${response}"
./hosts/desiupload.sh:93: fi
./hosts/desiupload.sh:94: if [[ -z $response ]] ; then
./hosts/desiupload.sh:95: rm -f "${desi_cookie_jar}";
./hosts/desiupload.sh:96: if [ $i == $maxfetchretries ] ; then
./hosts/desiupload.sh:97: printf "\\n"
./hosts/desiupload.sh:98: echo -e "${RED}| Failed to extract download link [1]${NC}"
./hosts/desiupload.sh:99: warnAndRetryUnknownError=true
./hosts/desiupload.sh:100: if [ "${finalAttempt}" == "true" ] ; then
--
./hosts/desiupload.sh:202: response=$(tor_curl_request --insecure -L -s -X POST \
./hosts/desiupload.sh:203: -b "${desi_cookie_jar}" -c "${desi_cookie_jar}" \
./hosts/desiupload.sh:204: --data "$form_data" "$remote_url")
./hosts/desiupload.sh:205: if [ "${DebugAllEnabled}" == "true" ] ; then
./hosts/desiupload.sh:206: debugHtml "${remote_url##*/}" "desi_post_$i" "url: ${remote_url}"$'\n'"form_data: ${form_data}"$'\n'"${response}"
./hosts/desiupload.sh:207: fi
./hosts/desiupload.sh:208: if [[ -z $response ]] ; then
./hosts/desiupload.sh:209: if [ $i == $maxfetchretries ] ; then
./hosts/desiupload.sh:210: rm -f "${desi_cookie_jar}";
./hosts/desiupload.sh:211: printf "\\n"
./hosts/desiupload.sh:212: echo -e "${RED}| Failed to extract download link [7]${NC}"
--
./hosts/desiupload.sh:306: file_header=$(tor_curl_request --insecure -L --head -s --referer "${fixed_url}" "$download_url")
./hosts/desiupload.sh:307: if [ "${DebugAllEnabled}" == "true" ] ; then
./hosts/desiupload.sh:308: debugHtml "${remote_url##*/}" "desi_head$j" "download_url: ${download_url}"$'\n'"${file_header}"
./hosts/desiupload.sh:309: fi
./hosts/desiupload.sh:310: if [[ -z $file_header ]] ; then
./hosts/desiupload.sh:311: if [ $j == $maxfetchretries ] ; then
./hosts/desiupload.sh:312: rm -f "${desi_cookie_jar}";
./hosts/desiupload.sh:313: printf "\\n"
./hosts/desiupload.sh:314: echo -e "${RED}| Failed to extract file info${NC}"
./hosts/desiupload.sh:315: warnAndRetryUnknownError=true
./hosts/desiupload.sh:316: if [ "${finalAttempt}" == "true" ] ; then
--
./hosts/desiupload.sh:406: tor_curl_request --insecure \
./hosts/desiupload.sh:407: --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval \
./hosts/desiupload.sh:408: -b "${desi_cookie_jar}" -c "${desi_cookie_jar}" \
./hosts/desiupload.sh:409: "$download_url" --continue-at - --output "$file_path"
./hosts/desiupload.sh:410: else
./hosts/desiupload.sh:411: tor_curl_request --insecure \
./hosts/desiupload.sh:412: -b "${desi_cookie_jar}" -c "${desi_cookie_jar}" \
./hosts/desiupload.sh:413: "$download_url" --continue-at - --output "$file_path"
./hosts/desiupload.sh:414: fi
./hosts/desiupload.sh:415: else
./hosts/desiupload.sh:416: if [ "${RateMonitorEnabled}" == "true" ]; then
./hosts/desiupload.sh:417: tor_curl_request --insecure \
./hosts/desiupload.sh:418: --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval \
./hosts/desiupload.sh:419: -b "${desi_cookie_jar}" -c "${desi_cookie_jar}" \
./hosts/desiupload.sh:420: -H "User-Agent: $RandomUA" \
./hosts/desiupload.sh:421: -H "Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8" \
./hosts/desiupload.sh:422: -H "Accept-Language: en-US,en;q=0.5" \
./hosts/desiupload.sh:423: -H "Accept-Encoding: gzip, deflate, br" \
./hosts/desiupload.sh:424: -H "Connection: keep-alive" \
./hosts/desiupload.sh:425: -H "Cookie: lng=eng" \
./hosts/desiupload.sh:426: -H "Upgrade-Insecure-Requests: 1" \
./hosts/desiupload.sh:427: -H "Sec-Fetch-Dest: document" \
--
./hosts/desiupload.sh:433: tor_curl_request --insecure \
./hosts/desiupload.sh:434: -b "${desi_cookie_jar}" -c "${desi_cookie_jar}" \
./hosts/desiupload.sh:435: -H "User-Agent: $RandomUA" \
./hosts/desiupload.sh:436: -H "Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8" \
./hosts/desiupload.sh:437: -H "Accept-Language: en-US,en;q=0.5" \
./hosts/desiupload.sh:438: -H "Accept-Encoding: gzip, deflate, br" \
./hosts/desiupload.sh:439: -H "Connection: keep-alive" \
./hosts/desiupload.sh:440: -H "Cookie: lng=eng" \
./hosts/desiupload.sh:441: -H "Upgrade-Insecure-Requests: 1" \
./hosts/desiupload.sh:442: -H "Sec-Fetch-Dest: document" \
./hosts/desiupload.sh:443: -H "Sec-Fetch-Mode: navigate" \
--
./hosts/dosya.sh:109: PAGE=$(tor_curl_request --insecure --max-time "$ConnectTimeout" -L -s \
./hosts/dosya.sh:110: -c "${dosya_cookie_jar}" \
./hosts/dosya.sh:111: "${remote_url}")
@ -1147,29 +1228,29 @@ _________________________________________________________________________
./hosts/fileblade.sh:99: warnAndRetryUnknownError=true
./hosts/fileblade.sh:100: if [ "${finalAttempt}" == "true" ] ; then
--
./hosts/fileblade.sh:164: response=$(tor_curl_request --insecure -L -s -X POST \
./hosts/fileblade.sh:165: -b "${fb_cookie_jar}" -c "${fb_cookie_jar}" \
./hosts/fileblade.sh:166: --data "$form_data" "$post_action")
./hosts/fileblade.sh:167: if [ "${DebugAllEnabled}" == "true" ] ; then
./hosts/fileblade.sh:168: debugHtml "${remote_url##*/}" "fb_post(1)" "post_action: ${post_action}"$'\n'"form_data: ${form_data}"$'\n'"${response}"
./hosts/fileblade.sh:169: fi
./hosts/fileblade.sh:170: if [[ -z $response ]] ; then
./hosts/fileblade.sh:171: if [ $i == $maxfetchretries ] ; then
./hosts/fileblade.sh:172: rm -f "${fb_cookie_jar}";
./hosts/fileblade.sh:173: printf "\\n"
./hosts/fileblade.sh:174: echo -e "${RED}| Failed to extract download link [3]${NC}"
./hosts/fileblade.sh:165: response=$(tor_curl_request --insecure -L -s -X POST \
./hosts/fileblade.sh:166: -b "${fb_cookie_jar}" -c "${fb_cookie_jar}" \
./hosts/fileblade.sh:167: --data "$form_data" "$post_action")
./hosts/fileblade.sh:168: if [ "${DebugAllEnabled}" == "true" ] ; then
./hosts/fileblade.sh:169: debugHtml "${remote_url##*/}" "fb_post(1)" "post_action: ${post_action}"$'\n'"form_data: ${form_data}"$'\n'"${response}"
./hosts/fileblade.sh:170: fi
./hosts/fileblade.sh:171: if [[ -z $response ]] ; then
./hosts/fileblade.sh:172: if [ $i == $maxfetchretries ] ; then
./hosts/fileblade.sh:173: rm -f "${fb_cookie_jar}";
./hosts/fileblade.sh:174: printf "\\n"
./hosts/fileblade.sh:175: echo -e "${RED}| Failed to extract download link [3]${NC}"
--
./hosts/fileblade.sh:266: response=$(tor_curl_request --insecure -L -s -X POST \
./hosts/fileblade.sh:267: -b "${fb_cookie_jar}" -c "${fb_cookie_jar}" \
./hosts/fileblade.sh:268: --data "$form_data" "$post_action")
./hosts/fileblade.sh:269: if [ "${DebugAllEnabled}" == "true" ] ; then
./hosts/fileblade.sh:270: debugHtml "${remote_url##*/}" "fb_post(2)" "post_action: ${post_action}"$'\n'"form_data: ${form_data}"$'\n'"${response}"
./hosts/fileblade.sh:271: fi
./hosts/fileblade.sh:272: if [[ -z $response ]] ; then
./hosts/fileblade.sh:273: if [ $i == $maxfetchretries ] ; then
./hosts/fileblade.sh:274: rm -f "${fb_cookie_jar}";
./hosts/fileblade.sh:275: printf "\\n"
./hosts/fileblade.sh:276: echo -e "${RED}| Failed to extract download link [4].${NC}"
./hosts/fileblade.sh:281: response=$(tor_curl_request --insecure -L -s -X POST \
./hosts/fileblade.sh:282: -b "${fb_cookie_jar}" -c "${fb_cookie_jar}" \
./hosts/fileblade.sh:283: --data "$form_data" "$post_action")
./hosts/fileblade.sh:284: if [ "${DebugAllEnabled}" == "true" ] ; then
./hosts/fileblade.sh:285: debugHtml "${remote_url##*/}" "fb_post(2)" "post_action: ${post_action}"$'\n'"form_data: ${form_data}"$'\n'"${response}"
./hosts/fileblade.sh:286: fi
./hosts/fileblade.sh:287: if [[ -z $response ]] ; then
./hosts/fileblade.sh:288: if [ $i == $maxfetchretries ] ; then
./hosts/fileblade.sh:289: rm -f "${fb_cookie_jar}";
./hosts/fileblade.sh:290: printf "\\n"
./hosts/fileblade.sh:291: echo -e "${RED}| Failed to extract download link [4].${NC}"
--
./hosts/fileblade.sh:335: file_header=$(tor_curl_request --insecure -L --head -s "$download_url")
./hosts/fileblade.sh:336: if [ "${DebugAllEnabled}" == "true" ] ; then
@ -1565,65 +1646,65 @@ _________________________________________________________________________
./hosts/isupload.sh:173: printf "\\n"
./hosts/isupload.sh:174: echo -e "${RED}| Failed to extract download link [3].${NC}"
--
./hosts/isupload.sh:238: file_header=$(tor_curl_request --insecure --head -L -s "$download_url")
./hosts/isupload.sh:239: elif ((j % 2 == 0)); then
./hosts/isupload.sh:240: printf "| Retrieving Head (Get): attempt #$j"
./hosts/isupload.sh:241: file_header=$(tor_curl_request --insecure -m 16 -s -D - -o /dev/null \
./hosts/isupload.sh:241: file_header=$(tor_curl_request --insecure -m 8 -s -D - -o /dev/null \
./hosts/isupload.sh:242: -H "Connection: keep-alive" \
./hosts/isupload.sh:243: -w 'EffectiveUrl=%{url_effective}' \
./hosts/isupload.sh:244: "$download_url")
./hosts/isupload.sh:245: elif ((j % 3 == 0)); then
./hosts/isupload.sh:246: printf "| Retrieving Head (hack): attempt #$j"
./hosts/isupload.sh:247: rm -f "${WorkDir}/.temp/directhead"
./hosts/isupload.sh:248: file_header=$(tor_curl_request --insecure --head -H "Connection: keep-alive" -L -s -i "$download_url" |
./hosts/isupload.sh:249: tee "${WorkDir}/.temp/directhead" &
./hosts/isupload.sh:250: sleep 6
./hosts/isupload.sh:251: [ -s "${WorkDir}/.temp/directhead" ]
./hosts/isupload.sh:252: kill $! 2>/dev/null
./hosts/isupload.sh:253: )
./hosts/isupload.sh:254: if [ ! -f "${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}" ]; then
./hosts/isupload.sh:255: touch "${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}"
./hosts/isupload.sh:256: fi
./hosts/isupload.sh:257: rm -f "${WorkDir}/.temp/directhead"
./hosts/isupload.sh:258: else
./hosts/isupload.sh:245: elif ((j % 2 == 0)); then
./hosts/isupload.sh:246: printf "| Retrieving Head: attempt #$j"
./hosts/isupload.sh:247: file_header=$(tor_curl_request --insecure --head -L -s "$download_url")
./hosts/isupload.sh:248: elif ((j % 3 == 0)); then
./hosts/isupload.sh:249: printf "| Retrieving Head (hack): attempt #$j"
./hosts/isupload.sh:250: rm -f "${WorkDir}/.temp/directhead"
./hosts/isupload.sh:251: file_header=$(tor_curl_request --insecure --head -H "Connection: keep-alive" -L -s -i "$download_url" |
./hosts/isupload.sh:252: tee "${WorkDir}/.temp/directhead" &
./hosts/isupload.sh:253: sleep 6
./hosts/isupload.sh:254: [ -s "${WorkDir}/.temp/directhead" ]
./hosts/isupload.sh:255: kill $! 2>/dev/null
./hosts/isupload.sh:256: )
./hosts/isupload.sh:257: if [ ! -f "${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}" ]; then
./hosts/isupload.sh:258: touch "${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}"
./hosts/isupload.sh:259: fi
./hosts/isupload.sh:260: rm -f "${WorkDir}/.temp/directhead"
./hosts/isupload.sh:261: else
--
./hosts/isupload.sh:260: file_header=$(tor_curl_request_extended --insecure --head -L -s "$download_url")
./hosts/isupload.sh:261: fi
./hosts/isupload.sh:262: if [ "${DebugAllEnabled}" == "true" ] ; then
./hosts/isupload.sh:263: debugHtml "${remote_url##*/}" "isup_head$j" "download_url: ${download_url}"$'\n'"${file_header}"
./hosts/isupload.sh:263: file_header=$(tor_curl_request_extended --insecure --head -L -s "$download_url")
./hosts/isupload.sh:264: fi
./hosts/isupload.sh:265: if [ ! -z "$file_header" ] ; then
./hosts/isupload.sh:266: if grep -Eqi '404 Not Found' <<< "${file_header}" ; then
./hosts/isupload.sh:267: printf "\\n"
./hosts/isupload.sh:268: echo -e "${RED}| Not Found (404). The file has been removed.${NC}"
./hosts/isupload.sh:269: removedDownload "${remote_url}"
./hosts/isupload.sh:270: exitDownloadNotAvailable=true
./hosts/isupload.sh:265: if [ "${DebugAllEnabled}" == "true" ] ; then
./hosts/isupload.sh:266: debugHtml "${remote_url##*/}" "isup_head$j" "download_url: ${download_url}"$'\n'"${file_header}"
./hosts/isupload.sh:267: fi
./hosts/isupload.sh:268: if [ ! -z "$file_header" ] ; then
./hosts/isupload.sh:269: if grep -Eqi '404 Not Found' <<< "${file_header}" ; then
./hosts/isupload.sh:270: printf "\\n"
./hosts/isupload.sh:271: echo -e "${RED}| Not Found (404). The file has been removed.${NC}"
./hosts/isupload.sh:272: removedDownload "${remote_url}"
./hosts/isupload.sh:273: exitDownloadNotAvailable=true
--
./hosts/isupload.sh:352: tor_curl_request_extended --insecure -L "$download_url" --output "$file_path"
./hosts/isupload.sh:353: rc=$?
./hosts/isupload.sh:354: if [ $rc -ne 0 ] ; then
./hosts/isupload.sh:355: printf "${RED}Download Failed (bad exit status).${NC}"
./hosts/isupload.sh:356: if [ -f ${file_path} ]; then
./hosts/isupload.sh:357: printf "${YELLOW} Partial removed...${NC}"
./hosts/isupload.sh:358: printf "\n\n"
./hosts/isupload.sh:359: rm -f "${file_path}"
./hosts/isupload.sh:360: else
./hosts/isupload.sh:355: tor_curl_request_extended --insecure -L "$download_url" --output "$file_path"
./hosts/isupload.sh:356: rc=$?
./hosts/isupload.sh:357: if [ $rc -ne 0 ] ; then
./hosts/isupload.sh:358: printf "${RED}Download Failed (bad exit status).${NC}"
./hosts/isupload.sh:359: if [ -f ${file_path} ]; then
./hosts/isupload.sh:360: printf "${YELLOW} Partial removed...${NC}"
./hosts/isupload.sh:361: printf "\n\n"
./hosts/isupload.sh:362: fi
./hosts/isupload.sh:362: rm -f "${file_path}"
./hosts/isupload.sh:363: else
./hosts/isupload.sh:364: printf "\n\n"
./hosts/isupload.sh:365: fi
--
./hosts/isupload.sh:396: tor_curl_request_extended --insecure -L --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval "$download_url" --continue-at - --output "$file_path"
./hosts/isupload.sh:397: else
./hosts/isupload.sh:398: tor_curl_request_extended --insecure -L "$download_url" --continue-at - --output "$file_path"
./hosts/isupload.sh:399: fi
./hosts/isupload.sh:400: received_file_size=0
./hosts/isupload.sh:401: if [ -f "$file_path" ] ; then
./hosts/isupload.sh:402: received_file_size=$(stat --format="%s" "$file_path" | tr -d '[:space:]')
./hosts/isupload.sh:403: fi
./hosts/isupload.sh:404: if CheckNoHtml "$remote_url" "$filename" "$file_path" ; then
./hosts/isupload.sh:405: containsHtml=false
./hosts/isupload.sh:406: else
./hosts/isupload.sh:407: containsHtml=true
./hosts/isupload.sh:408: fi
./hosts/isupload.sh:399: tor_curl_request_extended --insecure -L --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval "$download_url" --continue-at - --output "$file_path"
./hosts/isupload.sh:400: else
./hosts/isupload.sh:401: tor_curl_request_extended --insecure -L "$download_url" --continue-at - --output "$file_path"
./hosts/isupload.sh:402: fi
./hosts/isupload.sh:403: received_file_size=0
./hosts/isupload.sh:404: if [ -f "$file_path" ] ; then
./hosts/isupload.sh:405: received_file_size=$(stat --format="%s" "$file_path" | tr -d '[:space:]')
./hosts/isupload.sh:406: fi
./hosts/isupload.sh:407: if CheckNoHtml "$remote_url" "$filename" "$file_path" ; then
./hosts/isupload.sh:408: containsHtml=false
./hosts/isupload.sh:409: else
./hosts/isupload.sh:410: containsHtml=true
./hosts/isupload.sh:411: fi
--
./hosts/kraken.sh:104: PAGE=$(tor_curl_request --insecure --max-time "$ConnectTimeout" -s -L -c "${kraken_cookie_jar}" "${fixed_url}")
./hosts/kraken.sh:105: if [ "${DebugAllEnabled}" == "true" ] ; then
@ -1871,50 +1952,50 @@ _________________________________________________________________________
./hosts/ranoz.sh:99: if [ "${finalAttempt}" == "true" ] ; then
./hosts/ranoz.sh:100: failedRetryDownload "${remote_url}" "Failed to extract download url [1]" ""
--
./hosts/ranoz.sh:150: file_header=$(tor_curl_request --insecure --head -L -i -s "$download_url")
./hosts/ranoz.sh:151: if [ "${DebugAllEnabled}" == "true" ] ; then
./hosts/ranoz.sh:152: debugHtml "${remote_url##*/}" "rz_head$j" "download_url: ${download_url}"$'\n'"${file_header}"
./hosts/ranoz.sh:153: fi
./hosts/ranoz.sh:154: if [[ -z $file_header ]] ; then
./hosts/ranoz.sh:155: if [ $j == $maxfetchretries ] ; then
./hosts/ranoz.sh:156: rm -f "${rz_cookie_jar}";
./hosts/ranoz.sh:157: printf "\\n"
./hosts/ranoz.sh:158: echo -e "${RED}| Failed to extract file info${NC}"
./hosts/ranoz.sh:159: warnAndRetryUnknownError=true
./hosts/ranoz.sh:160: if [ "${finalAttempt}" == "true" ] ; then
./hosts/ranoz.sh:157: file_header=$(tor_curl_request --insecure --head -L -i -s "$download_url")
./hosts/ranoz.sh:158: if [ "${DebugAllEnabled}" == "true" ] ; then
./hosts/ranoz.sh:159: debugHtml "${remote_url##*/}" "rz_head$j" "download_url: ${download_url}"$'\n'"${file_header}"
./hosts/ranoz.sh:160: fi
./hosts/ranoz.sh:161: if [[ -z $file_header ]] ; then
./hosts/ranoz.sh:162: if [ $j == $maxfetchretries ] ; then
./hosts/ranoz.sh:163: rm -f "${rz_cookie_jar}";
./hosts/ranoz.sh:164: printf "\\n"
./hosts/ranoz.sh:165: echo -e "${RED}| Failed to extract file info${NC}"
./hosts/ranoz.sh:166: warnAndRetryUnknownError=true
./hosts/ranoz.sh:167: if [ "${finalAttempt}" == "true" ] ; then
--
./hosts/ranoz.sh:261: tor_curl_request --insecure -L -G --no-alpn \
./hosts/ranoz.sh:262: --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval \
./hosts/ranoz.sh:263: "$download_url" --continue-at - --output "$file_path"
./hosts/ranoz.sh:264: else
./hosts/ranoz.sh:265: tor_curl_request --insecure -L -G --no-alpn \
./hosts/ranoz.sh:266: "$download_url" --continue-at - --output "$file_path"
./hosts/ranoz.sh:267: fi
./hosts/ranoz.sh:268: else
./hosts/ranoz.sh:269: if [ "${RateMonitorEnabled}" == "true" ]; then
./hosts/ranoz.sh:266: tor_curl_request --insecure -L -G --no-alpn \
./hosts/ranoz.sh:267: --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval \
./hosts/ranoz.sh:268: "$download_url" --continue-at - --output "$file_path"
./hosts/ranoz.sh:269: else
./hosts/ranoz.sh:270: tor_curl_request --insecure -L -G --no-alpn \
./hosts/ranoz.sh:271: --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval \
./hosts/ranoz.sh:272: -H "User-Agent: $RandomUA" \
./hosts/ranoz.sh:273: -H "Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8" \
./hosts/ranoz.sh:274: -H "Accept-Language: en-US,en;q=0.5" \
./hosts/ranoz.sh:275: -H "Accept-Encoding: gzip, deflate, br" \
./hosts/ranoz.sh:276: -H "Connection: keep-alive" \
./hosts/ranoz.sh:277: -H "Cookie: lng=eng" \
./hosts/ranoz.sh:278: -H "Upgrade-Insecure-Requests: 1" \
./hosts/ranoz.sh:279: -H "Sec-Fetch-Dest: document" \
./hosts/ranoz.sh:280: -H "Sec-Fetch-Mode: navigate" \
./hosts/ranoz.sh:271: "$download_url" --continue-at - --output "$file_path"
./hosts/ranoz.sh:272: fi
./hosts/ranoz.sh:273: else
./hosts/ranoz.sh:274: if [ "${RateMonitorEnabled}" == "true" ]; then
./hosts/ranoz.sh:275: tor_curl_request --insecure -L -G --no-alpn \
./hosts/ranoz.sh:276: --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval \
./hosts/ranoz.sh:277: -H "User-Agent: $RandomUA" \
./hosts/ranoz.sh:278: -H "Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8" \
./hosts/ranoz.sh:279: -H "Accept-Language: en-US,en;q=0.5" \
./hosts/ranoz.sh:280: -H "Accept-Encoding: gzip, deflate, br" \
./hosts/ranoz.sh:281: -H "Connection: keep-alive" \
./hosts/ranoz.sh:282: -H "Cookie: lng=eng" \
./hosts/ranoz.sh:283: -H "Upgrade-Insecure-Requests: 1" \
./hosts/ranoz.sh:284: -H "Sec-Fetch-Dest: document" \
./hosts/ranoz.sh:285: -H "Sec-Fetch-Mode: navigate" \
--
./hosts/ranoz.sh:285: tor_curl_request --insecure -L -G --no-alpn \
./hosts/ranoz.sh:286: -H "User-Agent: $RandomUA" \
./hosts/ranoz.sh:287: -H "Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8" \
./hosts/ranoz.sh:288: -H "Accept-Language: en-US,en;q=0.5" \
./hosts/ranoz.sh:289: -H "Accept-Encoding: gzip, deflate, br" \
./hosts/ranoz.sh:290: -H "Connection: keep-alive" \
./hosts/ranoz.sh:291: -H "Cookie: lng=eng" \
./hosts/ranoz.sh:292: -H "Upgrade-Insecure-Requests: 1" \
./hosts/ranoz.sh:293: -H "Sec-Fetch-Dest: document" \
./hosts/ranoz.sh:294: -H "Sec-Fetch-Mode: navigate" \
./hosts/ranoz.sh:295: -H "Sec-Fetch-Site: same-origin" \
./hosts/ranoz.sh:290: tor_curl_request --insecure -L -G --no-alpn \
./hosts/ranoz.sh:291: -H "User-Agent: $RandomUA" \
./hosts/ranoz.sh:292: -H "Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8" \
./hosts/ranoz.sh:293: -H "Accept-Language: en-US,en;q=0.5" \
./hosts/ranoz.sh:294: -H "Accept-Encoding: gzip, deflate, br" \
./hosts/ranoz.sh:295: -H "Connection: keep-alive" \
./hosts/ranoz.sh:296: -H "Cookie: lng=eng" \
./hosts/ranoz.sh:297: -H "Upgrade-Insecure-Requests: 1" \
./hosts/ranoz.sh:298: -H "Sec-Fetch-Dest: document" \
./hosts/ranoz.sh:299: -H "Sec-Fetch-Mode: navigate" \
./hosts/ranoz.sh:300: -H "Sec-Fetch-Site: same-origin" \
--
./hosts/syspro.sh:88: response=$(tor_curl_request --insecure -L -s "$remote_url")
./hosts/syspro.sh:89: if [ "${DebugAllEnabled}" == "true" ] ; then
@ -2634,6 +2715,30 @@ _________________________________________________________________________
./hosts/up_filehaus.sh:115: url=$(grep -oPi '(?<=https://).*(?=\.filehaus\.su).*?(?=$)' <<< "$response")
./hosts/up_filehaus.sh:116: filesize=$(GetFileSize "$filepath" "false")
--
./hosts/up_fileland.sh:102: response=$(tor_curl_upload --insecure -i \
./hosts/up_fileland.sh:103: -H "Content-Type: multipart/form-data" \
./hosts/up_fileland.sh:104: -F "sess_id=" \
./hosts/up_fileland.sh:105: -F "utype=anon" \
./hosts/up_fileland.sh:106: -F "file_descr=" \
./hosts/up_fileland.sh:107: -F "file_public=1" \
./hosts/up_fileland.sh:108: -F "link_rcpt=" \
./hosts/up_fileland.sh:109: -F "link_pass=" \
./hosts/up_fileland.sh:110: -F "to_folder=" \
./hosts/up_fileland.sh:111: -F "upload=Start upload" \
./hosts/up_fileland.sh:112: -F "keepalive=1" \
--
./hosts/up_fireget.sh:102: response=$(tor_curl_upload --insecure -i \
./hosts/up_fireget.sh:103: -H "Content-Type: multipart/form-data" \
./hosts/up_fireget.sh:104: -H "Host: fireget.com" \
./hosts/up_fireget.sh:105: -F "sess_id=" \
./hosts/up_fireget.sh:106: -F "srv_tmp_url=" \
./hosts/up_fireget.sh:107: -F "link_rcpt=" \
./hosts/up_fireget.sh:108: -F "link_pass=" \
./hosts/up_fireget.sh:109: -F "tos=1" \
./hosts/up_fireget.sh:110: -F "submit_btn=Upload!" \
./hosts/up_fireget.sh:111: -F "upload_type=file" \
./hosts/up_fireget.sh:112: -F "file_1=@${filepath}" \
--
./hosts/up_firestorage.sh:113: response=$(tor_curl_upload --insecure -i \
./hosts/up_firestorage.sh:114: -H "Content-Type: multipart/form-data" \
./hosts/up_firestorage.sh:115: -F "jqueryupload=1" \
@ -3068,235 +3173,235 @@ _________________________________________________________________________
./hosts/youdbox.sh:287: containsHtml=true
./hosts/youdbox.sh:288: fi
--
./mad.sh:398:tor_curl_request() {
./mad.sh:399: if [ "${UseTorCurlImpersonate}" == "true" ]; then
./mad.sh:400: "${curl_impersonate[@]}" --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" --connect-timeout ${ConnectTimeout} --compressed --globoff "$@"
./mad.sh:401: else
./mad.sh:402: curl --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" --connect-timeout ${ConnectTimeout} --compressed --globoff "$@"
./mad.sh:403: fi
./mad.sh:404:}
./mad.sh:405:tor_curl_request_extended() {
./mad.sh:406: randomtimeout=$((30 + RANDOM % (60 - 30)))
./mad.sh:407: if [ "${UseTorCurlImpersonate}" == "true" ]; then
./mad.sh:408: "${curl_impersonate[@]}" --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" --connect-timeout $randomtimeout --compressed --globoff "$@"
./mad.sh:409: else
./mad.sh:410: curl --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" --connect-timeout $randomtimeout --compressed --globoff "$@"
./mad.sh:411: fi
./mad.sh:412:}
./mad.sh:413:tor_curl_upload() {
./mad.sh:405:tor_curl_request() {
./mad.sh:406: if [ "${UseTorCurlImpersonate}" == "true" ]; then
./mad.sh:407: "${curl_impersonate[@]}" --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" --connect-timeout ${ConnectTimeout} --compressed --globoff "$@"
./mad.sh:408: else
./mad.sh:409: curl --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" --connect-timeout ${ConnectTimeout} --compressed --globoff "$@"
./mad.sh:410: fi
./mad.sh:411:}
./mad.sh:412:tor_curl_request_extended() {
./mad.sh:413: randomtimeout=$((30 + RANDOM % (60 - 30)))
./mad.sh:414: if [ "${UseTorCurlImpersonate}" == "true" ]; then
./mad.sh:415: if [ "${RateMonitorEnabled}" == "true" ]; then
./mad.sh:416: "${curl_impersonate[@]}" --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" --connect-timeout ${ConnectTimeout} --speed-limit $UploadSpeedMin --speed-time $UploadTimeoutInterval --compressed --globoff "$@"
./mad.sh:417: else
./mad.sh:418: "${curl_impersonate[@]}" --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" --connect-timeout ${ConnectTimeout} --compressed --globoff "$@"
./mad.sh:419: fi
./mad.sh:420: else
./mad.sh:421: if [ "${RateMonitorEnabled}" == "true" ]; then
./mad.sh:422: curl --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" --connect-timeout ${ConnectTimeout} --speed-limit $UploadSpeedMin --speed-time $UploadTimeoutInterval -H 'User-Agent: Mozilla/5.0 (Windows NT 10.0; rv:109.0) Gecko/20100101 Firefox/115.0' -H 'Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8' -H 'Accept-Language: en-US,en;q=0.5' -H 'Accept-Encoding: gzip, deflate, br' -H 'Origin: null' -H 'Connection: keep-alive' -H 'Upgrade-Insecure-Requests: 1' -H 'Sec-Fetch-Dest: document' -H 'Sec-Fetch-Mode: navigate' -H 'Sec-Fetch-Site: cross-site' -H 'Sec-Fetch-User: ?1' -H 'TE: trailers' --compressed --globoff "$@"
./mad.sh:423: else
./mad.sh:415: "${curl_impersonate[@]}" --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" --connect-timeout $randomtimeout --compressed --globoff "$@"
./mad.sh:416: else
./mad.sh:417: curl --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" --connect-timeout $randomtimeout --compressed --globoff "$@"
./mad.sh:418: fi
./mad.sh:419:}
./mad.sh:420:tor_curl_upload() {
./mad.sh:421: if [ "${UseTorCurlImpersonate}" == "true" ]; then
./mad.sh:422: if [ "${RateMonitorEnabled}" == "true" ]; then
./mad.sh:423: "${curl_impersonate[@]}" --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" --connect-timeout ${ConnectTimeout} --speed-limit $UploadSpeedMin --speed-time $UploadTimeoutInterval --compressed --globoff "$@"
./mad.sh:424: else
./mad.sh:425: "${curl_impersonate[@]}" --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" --connect-timeout ${ConnectTimeout} --compressed --globoff "$@"
./mad.sh:426: fi
./mad.sh:427: else
./mad.sh:428: if [ "${RateMonitorEnabled}" == "true" ]; then
./mad.sh:429: curl --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" --connect-timeout ${ConnectTimeout} --speed-limit $UploadSpeedMin --speed-time $UploadTimeoutInterval -H 'User-Agent: Mozilla/5.0 (Windows NT 10.0; rv:109.0) Gecko/20100101 Firefox/115.0' -H 'Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8' -H 'Accept-Language: en-US,en;q=0.5' -H 'Accept-Encoding: gzip, deflate, br' -H 'Origin: null' -H 'Connection: keep-alive' -H 'Upgrade-Insecure-Requests: 1' -H 'Sec-Fetch-Dest: document' -H 'Sec-Fetch-Mode: navigate' -H 'Sec-Fetch-Site: cross-site' -H 'Sec-Fetch-User: ?1' -H 'TE: trailers' --compressed --globoff "$@"
./mad.sh:430: else
--
./mad.sh:1413: response=$(tor_curl_request --insecure -L -s https://github.com/lwthiker/curl-impersonate/releases/latest)
./mad.sh:1414: if [ "${DebugAllEnabled}" == "true" ] ; then
./mad.sh:1415: debugHtml "github" "lbf_inst_curlimp$j" "$response"
./mad.sh:1416: fi
./mad.sh:1417: if [ ! -z "$response" ]; then
./mad.sh:1418: latestTag=$(grep -oPi -m 1 '(?<=/curl-impersonate/releases/tag/).*?(?=")' <<< "$response")
./mad.sh:1419: latestBinaryDate=$(grep -oPi -m 1 '(?<=<relative-time class="no-wrap" prefix="" datetime=").*?(?=T)' <<< "$response")
./mad.sh:1420: break
./mad.sh:1421: fi
./mad.sh:1422: done
./mad.sh:1423: if [ -z $latestTag ]; then
./mad.sh:1427: response=$(tor_curl_request --insecure -L -s https://github.com/lwthiker/curl-impersonate/releases/latest)
./mad.sh:1428: if [ "${DebugAllEnabled}" == "true" ] ; then
./mad.sh:1429: debugHtml "github" "lbf_inst_curlimp$j" "$response"
./mad.sh:1430: fi
./mad.sh:1431: if [ ! -z "$response" ]; then
./mad.sh:1432: latestTag=$(grep -oPi -m 1 '(?<=/curl-impersonate/releases/tag/).*?(?=")' <<< "$response")
./mad.sh:1433: latestBinaryDate=$(grep -oPi -m 1 '(?<=<relative-time class="no-wrap" prefix="" datetime=").*?(?=T)' <<< "$response")
./mad.sh:1434: break
./mad.sh:1435: fi
./mad.sh:1436: done
./mad.sh:1437: if [ -z $latestTag ]; then
--
./mad.sh:1433: file_header=$(tor_curl_request --insecure --head -Ls "$download_url")
./mad.sh:1434: if [ "${DebugAllEnabled}" == "true" ] ; then
./mad.sh:1435: debugHtml "github" "head_inst_curlimp$j" "${file_header}"
./mad.sh:1436: fi
./mad.sh:1437: if ! grep -Eqi 'HTTP/2 200|HTTP/1.1 200|200 OK' <<< $file_header ; then
./mad.sh:1438: if ((j == 8)) ; then
./mad.sh:1439: return 1
./mad.sh:1440: else
./mad.sh:1441: continue
./mad.sh:1442: fi
./mad.sh:1443: fi
./mad.sh:1447: file_header=$(tor_curl_request --insecure --head -Ls "$download_url")
./mad.sh:1448: if [ "${DebugAllEnabled}" == "true" ] ; then
./mad.sh:1449: debugHtml "github" "head_inst_curlimp$j" "${file_header}"
./mad.sh:1450: fi
./mad.sh:1451: if ! grep -Eqi 'HTTP/2 200|HTTP/1.1 200|200 OK' <<< $file_header ; then
./mad.sh:1452: if ((j == 8)) ; then
./mad.sh:1453: return 1
./mad.sh:1454: else
./mad.sh:1455: continue
./mad.sh:1456: fi
./mad.sh:1457: fi
--
./mad.sh:1483: tor_curl_request --insecure -L "$download_url" --continue-at - --output "$file_path"
./mad.sh:1484: received_file_size=0
./mad.sh:1485: if [ -f "$file_path" ] ; then
./mad.sh:1486: received_file_size=$(stat --format="%s" "$file_path" | tr -d '[:space:]')
./mad.sh:1487: fi
./mad.sh:1488: if ((received_file_size == file_size_bytes)) ; then
./mad.sh:1489: break
./mad.sh:1490: elif ((received_file_size < file_size_bytes)) ; then
./mad.sh:1491: if ((j >= MaxDownloadRetries)) ; then
./mad.sh:1492: echo -e "${RED}| FAILED: Size mismatch after downloading${NC}"
./mad.sh:1493: exit 1
./mad.sh:1497: tor_curl_request --insecure -L "$download_url" --continue-at - --output "$file_path"
./mad.sh:1498: received_file_size=0
./mad.sh:1499: if [ -f "$file_path" ] ; then
./mad.sh:1500: received_file_size=$(stat --format="%s" "$file_path" | tr -d '[:space:]')
./mad.sh:1501: fi
./mad.sh:1502: if ((received_file_size == file_size_bytes)) ; then
./mad.sh:1503: break
./mad.sh:1504: elif ((received_file_size < file_size_bytes)) ; then
./mad.sh:1505: if ((j >= MaxDownloadRetries)) ; then
./mad.sh:1506: echo -e "${RED}| FAILED: Size mismatch after downloading${NC}"
./mad.sh:1507: exit 1
--
./mad.sh:1536: response=$(tor_curl_request --insecure -L -s https://github.com/lexiforest/curl-impersonate/releases/latest)
./mad.sh:1537: if [ "${DebugAllEnabled}" == "true" ] ; then
./mad.sh:1538: debugHtml "github" "lbf_inst_curlimp$j" "$response"
./mad.sh:1539: fi
./mad.sh:1540: if [ ! -z "$response" ]; then
./mad.sh:1541: latestTag=$(grep -oPi -m 1 '(?<=/curl-impersonate/releases/tag/).*?(?=")' <<< "$response")
./mad.sh:1542: latestBinaryDate=$(grep -oPi -m 1 '(?<=<relative-time class="no-wrap" prefix="" datetime=").*?(?=T)' <<< "$response")
./mad.sh:1543: break
./mad.sh:1544: fi
./mad.sh:1545: done
./mad.sh:1546: if [ -z $latestTag ]; then
./mad.sh:1550: response=$(tor_curl_request --insecure -L -s https://github.com/lexiforest/curl-impersonate/releases/latest)
./mad.sh:1551: if [ "${DebugAllEnabled}" == "true" ] ; then
./mad.sh:1552: debugHtml "github" "lbf_inst_curlimp$j" "$response"
./mad.sh:1553: fi
./mad.sh:1554: if [ ! -z "$response" ]; then
./mad.sh:1555: latestTag=$(grep -oPi -m 1 '(?<=/curl-impersonate/releases/tag/).*?(?=")' <<< "$response")
./mad.sh:1556: latestBinaryDate=$(grep -oPi -m 1 '(?<=<relative-time class="no-wrap" prefix="" datetime=").*?(?=T)' <<< "$response")
./mad.sh:1557: break
./mad.sh:1558: fi
./mad.sh:1559: done
./mad.sh:1560: if [ -z $latestTag ]; then
--
./mad.sh:1556: file_header=$(tor_curl_request --insecure --head -Ls "$download_url")
./mad.sh:1557: if [ "${DebugAllEnabled}" == "true" ] ; then
./mad.sh:1558: debugHtml "github" "head_inst_curlimp$j" "${file_header}"
./mad.sh:1559: fi
./mad.sh:1560: if ! grep -Eqi 'HTTP/2 200|HTTP/1.1 200|200 OK' <<< $file_header ; then
./mad.sh:1561: if ((j == 8)) ; then
./mad.sh:1562: return 1
./mad.sh:1563: else
./mad.sh:1564: continue
./mad.sh:1565: fi
./mad.sh:1566: fi
./mad.sh:1570: file_header=$(tor_curl_request --insecure --head -Ls "$download_url")
./mad.sh:1571: if [ "${DebugAllEnabled}" == "true" ] ; then
./mad.sh:1572: debugHtml "github" "head_inst_curlimp$j" "${file_header}"
./mad.sh:1573: fi
./mad.sh:1574: if ! grep -Eqi 'HTTP/2 200|HTTP/1.1 200|200 OK' <<< $file_header ; then
./mad.sh:1575: if ((j == 8)) ; then
./mad.sh:1576: return 1
./mad.sh:1577: else
./mad.sh:1578: continue
./mad.sh:1579: fi
./mad.sh:1580: fi
--
./mad.sh:1606: tor_curl_request --insecure -L "$download_url" --continue-at - --output "$file_path"
./mad.sh:1607: received_file_size=0
./mad.sh:1608: if [ -f "$file_path" ] ; then
./mad.sh:1609: received_file_size=$(stat --format="%s" "$file_path" | tr -d '[:space:]')
./mad.sh:1610: fi
./mad.sh:1611: if ((received_file_size == file_size_bytes)) ; then
./mad.sh:1612: break
./mad.sh:1613: elif ((received_file_size < file_size_bytes)) ; then
./mad.sh:1614: if ((j >= MaxDownloadRetries)) ; then
./mad.sh:1615: echo -e "${RED}| FAILED: Size mismatch after downloading${NC}"
./mad.sh:1616: exit 1
./mad.sh:1620: tor_curl_request --insecure -L "$download_url" --continue-at - --output "$file_path"
./mad.sh:1621: received_file_size=0
./mad.sh:1622: if [ -f "$file_path" ] ; then
./mad.sh:1623: received_file_size=$(stat --format="%s" "$file_path" | tr -d '[:space:]')
./mad.sh:1624: fi
./mad.sh:1625: if ((received_file_size == file_size_bytes)) ; then
./mad.sh:1626: break
./mad.sh:1627: elif ((received_file_size < file_size_bytes)) ; then
./mad.sh:1628: if ((j >= MaxDownloadRetries)) ; then
./mad.sh:1629: echo -e "${RED}| FAILED: Size mismatch after downloading${NC}"
./mad.sh:1630: exit 1
--
./mad.sh:1811: maud_torcurl=$(grep -n -vxE '[[:blank:]]*([#].*)?' $fil | grep -A 12 --color='always' -Ei 'tor_curl')
./mad.sh:1812: echo -e "Files:"
./mad.sh:1813: echo -e "${BLUE}${fil}${NC}"
./mad.sh:1814: echo -e ""
./mad.sh:1815: echo -e ""
./mad.sh:1816: echo -e "${PINK}MAD Audit of http lines:${NC} (${GREEN}grep \"http:\" or \"https:\"${NC})"
./mad.sh:1817: echo -e "_________________________________________________________________________"
./mad.sh:1818: echo -e "$maud_http"
./mad.sh:1819: echo -e ""
./mad.sh:1820: echo -e "${PINK}MAD Audit of curl:${NC} (${GREEN}grep \"curl\"${NC})"
./mad.sh:1821: echo -e "_________________________________________________________________________"
--
./mad.sh:1824: echo -e "${PINK}MAD Audit of tor_curl (+12 lines after):${NC} (${GREEN}grep \"tor_curl\"${NC})"
./mad.sh:1825: echo -e "_________________________________________________________________________"
./mad.sh:1826: echo -e "$maud_torcurl"
./mad.sh:1827: echo -e ""
./mad.sh:1825: maud_torcurl=$(grep -n -vxE '[[:blank:]]*([#].*)?' $fil | grep -A 12 --color='always' -Ei 'tor_curl')
./mad.sh:1826: echo -e "Files:"
./mad.sh:1827: echo -e "${BLUE}${fil}${NC}"
./mad.sh:1828: echo -e ""
./mad.sh:1829: done
./mad.sh:1830: else
./mad.sh:1831: cd "$ScriptDir"
./mad.sh:1832: readarray -d $'' arrFiles < <(find . -name "*.sh" -printf '%p\n' | sort -Vk1)
./mad.sh:1833: cd "$WorkDir"
./mad.sh:1834: readarray -d $'' arrFiles2 < <(find . -name "*.sh" -printf '%p\n' | sort -Vk1)
./mad.sh:1829: echo -e ""
./mad.sh:1830: echo -e "${PINK}MAD Audit of http lines:${NC} (${GREEN}grep \"http:\" or \"https:\"${NC})"
./mad.sh:1831: echo -e "_________________________________________________________________________"
./mad.sh:1832: echo -e "$maud_http"
./mad.sh:1833: echo -e ""
./mad.sh:1834: echo -e "${PINK}MAD Audit of curl:${NC} (${GREEN}grep \"curl\"${NC})"
./mad.sh:1835: echo -e "_________________________________________________________________________"
--
./mad.sh:1839: maud_torcurl=$(grep -n -vxE '[[:blank:]]*([#].*)?' $fil | grep -A 12 --color='always' -Ei 'tor_curl')
./mad.sh:1840: echo -e "Files:"
./mad.sh:1841: echo -e "${BLUE}${fil}${NC}"
./mad.sh:1838: echo -e "${PINK}MAD Audit of tor_curl (+12 lines after):${NC} (${GREEN}grep \"tor_curl\"${NC})"
./mad.sh:1839: echo -e "_________________________________________________________________________"
./mad.sh:1840: echo -e "$maud_torcurl"
./mad.sh:1841: echo -e ""
./mad.sh:1842: echo -e ""
./mad.sh:1843: echo -e ""
./mad.sh:1844: echo -e "${PINK}MAD Audit of http lines:${NC} (${GREEN}grep \"http:\" or \"https:\"${NC})"
./mad.sh:1845: echo -e "_________________________________________________________________________"
./mad.sh:1846: echo -e "$maud_http"
./mad.sh:1847: echo -e ""
./mad.sh:1848: echo -e "${PINK}MAD Audit of curl:${NC} (${GREEN}grep \"curl \"${NC})"
./mad.sh:1849: echo -e "_________________________________________________________________________"
./mad.sh:1843: done
./mad.sh:1844: else
./mad.sh:1845: cd "$ScriptDir"
./mad.sh:1846: readarray -d $'' arrFiles < <(find . -name "*.sh" -printf '%p\n' | sort -Vk1)
./mad.sh:1847: cd "$WorkDir"
./mad.sh:1848: readarray -d $'' arrFiles2 < <(find . -name "*.sh" -printf '%p\n' | sort -Vk1)
--
./mad.sh:1852: echo -e "${PINK}MAD Audit of tor_curl (+12 lines after):${NC} (${GREEN}grep \"tor_curl\"${NC})"
./mad.sh:1853: echo -e "_________________________________________________________________________"
./mad.sh:1854: echo -e "$maud_torcurl"
./mad.sh:1855: echo -e ""
./mad.sh:1856: done
./mad.sh:1857: for fil in "${arrFiles2[@]}";
./mad.sh:1858: do
./mad.sh:1859: maud_http=$(grep -n -vxE '[[:blank:]]*([#].*)?' $fil | grep --color='always' -Ei '(http|https):')
./mad.sh:1860: maud_curl=$(grep -n -vxE '[[:blank:]]*([#].*)?' $fil | grep --color='always' -Ei 'curl')
./mad.sh:1861: maud_torcurl=$(grep -n -vxE '[[:blank:]]*([#].*)?' $fil | grep -A 12 --color='always' -Ei 'tor_curl')
./mad.sh:1862: echo -e "Files:"
./mad.sh:1863: echo -e "${BLUE}${fil}${NC}"
./mad.sh:1864: echo -e ""
./mad.sh:1865: echo -e ""
./mad.sh:1866: echo -e "${PINK}MAD Audit of http lines:${NC} (${GREEN}grep \"http:\" or \"https:\"${NC})"
./mad.sh:1853: maud_torcurl=$(grep -n -vxE '[[:blank:]]*([#].*)?' $fil | grep -A 12 --color='always' -Ei 'tor_curl')
./mad.sh:1854: echo -e "Files:"
./mad.sh:1855: echo -e "${BLUE}${fil}${NC}"
./mad.sh:1856: echo -e ""
./mad.sh:1857: echo -e ""
./mad.sh:1858: echo -e "${PINK}MAD Audit of http lines:${NC} (${GREEN}grep \"http:\" or \"https:\"${NC})"
./mad.sh:1859: echo -e "_________________________________________________________________________"
./mad.sh:1860: echo -e "$maud_http"
./mad.sh:1861: echo -e ""
./mad.sh:1862: echo -e "${PINK}MAD Audit of curl:${NC} (${GREEN}grep \"curl \"${NC})"
./mad.sh:1863: echo -e "_________________________________________________________________________"
--
./mad.sh:1866: echo -e "${PINK}MAD Audit of tor_curl (+12 lines after):${NC} (${GREEN}grep \"tor_curl\"${NC})"
./mad.sh:1867: echo -e "_________________________________________________________________________"
./mad.sh:1868: echo -e "$maud_http"
./mad.sh:1868: echo -e "$maud_torcurl"
./mad.sh:1869: echo -e ""
./mad.sh:1870: echo -e "${PINK}MAD Audit of curl:${NC} (${GREEN}grep \"curl\"${NC})"
./mad.sh:1871: echo -e "_________________________________________________________________________"
./mad.sh:1870: done
./mad.sh:1871: for fil in "${arrFiles2[@]}";
./mad.sh:1872: do
./mad.sh:1873: maud_http=$(grep -n -vxE '[[:blank:]]*([#].*)?' $fil | grep --color='always' -Ei '(http|https):')
./mad.sh:1874: maud_curl=$(grep -n -vxE '[[:blank:]]*([#].*)?' $fil | grep --color='always' -Ei 'curl')
./mad.sh:1875: maud_torcurl=$(grep -n -vxE '[[:blank:]]*([#].*)?' $fil | grep -A 12 --color='always' -Ei 'tor_curl')
./mad.sh:1876: echo -e "Files:"
./mad.sh:1877: echo -e "${BLUE}${fil}${NC}"
./mad.sh:1878: echo -e ""
./mad.sh:1879: echo -e ""
./mad.sh:1880: echo -e "${PINK}MAD Audit of http lines:${NC} (${GREEN}grep \"http:\" or \"https:\"${NC})"
./mad.sh:1881: echo -e "_________________________________________________________________________"
./mad.sh:1882: echo -e "$maud_http"
./mad.sh:1883: echo -e ""
./mad.sh:1884: echo -e "${PINK}MAD Audit of curl:${NC} (${GREEN}grep \"curl\"${NC})"
./mad.sh:1885: echo -e "_________________________________________________________________________"
--
./mad.sh:1874: echo -e "${PINK}MAD Audit of tor_curl (+12 lines after):${NC} (${GREEN}grep \"tor_curl\"${NC})"
./mad.sh:1875: echo -e "_________________________________________________________________________"
./mad.sh:1876: echo -e "$maud_torcurl"
./mad.sh:1877: echo -e ""
./mad.sh:1878: done
./mad.sh:1879: fi
./mad.sh:1880:}
./mad.sh:1881:madStatus() {
./mad.sh:1882: local InputFile="$1"
./mad.sh:1883: if [ "$arg1" == "status" ] ; then
./mad.sh:1884: clear
./mad.sh:1888: echo -e "${PINK}MAD Audit of tor_curl (+12 lines after):${NC} (${GREEN}grep \"tor_curl\"${NC})"
./mad.sh:1889: echo -e "_________________________________________________________________________"
./mad.sh:1890: echo -e "$maud_torcurl"
./mad.sh:1891: echo -e ""
./mad.sh:1892: done
./mad.sh:1893: fi
./mad.sh:1894:}
./mad.sh:1895:madStatus() {
./mad.sh:1896: local InputFile="$1"
./mad.sh:1897: if [ "$arg1" == "status" ] ; then
./mad.sh:1898: clear
--
./mad.sh:3199: file_header=$(tor_curl_request --insecure -m 8 -s -D - -o /dev/null \
./mad.sh:3200: -H "Connection: keep-alive" \
./mad.sh:3201: -w 'EffectiveUrl=%{url_effective}' \
./mad.sh:3202: "$download_url")
./mad.sh:3203: else
./mad.sh:3204: printf "| Retrieving Head: attempt #$j"
./mad.sh:3205: rm -f "${WorkDir}/.temp/directhead"
./mad.sh:3206: file_header=$(tor_curl_request --insecure --head -H "Connection: keep-alive" -L -s -i "$download_url" |
./mad.sh:3207: tee "${WorkDir}/.temp/directhead" &
./mad.sh:3208: sleep 6
./mad.sh:3209: [ -s "${WorkDir}/.temp/directhead" ]
./mad.sh:3210: kill $! 2>/dev/null
./mad.sh:3211: )
./mad.sh:3212: if [ ! -f "${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}" ]; then
./mad.sh:3213: touch "${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}"
./mad.sh:3214: fi
./mad.sh:3215: rm -f "${WorkDir}/.temp/directhead"
./mad.sh:3216: fi
./mad.sh:3213: file_header=$(tor_curl_request --insecure -m 8 -s -D - -o /dev/null \
./mad.sh:3214: -H "Connection: keep-alive" \
./mad.sh:3215: -w 'EffectiveUrl=%{url_effective}' \
./mad.sh:3216: "$download_url")
./mad.sh:3217: else
./mad.sh:3218: printf "| Retrieving Head: attempt #$j"
./mad.sh:3219: rm -f "${WorkDir}/.temp/directhead"
./mad.sh:3220: file_header=$(tor_curl_request --insecure --head -H "Connection: keep-alive" -L -s -i "$download_url" |
./mad.sh:3221: tee "${WorkDir}/.temp/directhead" &
./mad.sh:3222: sleep 6
./mad.sh:3223: [ -s "${WorkDir}/.temp/directhead" ]
./mad.sh:3224: kill $! 2>/dev/null
./mad.sh:3225: )
./mad.sh:3226: if [ ! -f "${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}" ]; then
./mad.sh:3227: touch "${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}"
./mad.sh:3228: fi
./mad.sh:3229: rm -f "${WorkDir}/.temp/directhead"
./mad.sh:3230: fi
--
./mad.sh:3334: tor_curl_request --insecure -L --referer "$file_url" "$download_url" --output "$file_path"
./mad.sh:3335: rc=$?
./mad.sh:3336: if [ $rc -ne 0 ] ; then
./mad.sh:3337: printf "${RED}Download Failed (bad exit status).${NC}"
./mad.sh:3338: if [ -f ${file_path} ]; then
./mad.sh:3339: printf "${YELLOW} Partial removed...${NC}"
./mad.sh:3340: printf "\n\n"
./mad.sh:3341: rm -f "${file_path}"
./mad.sh:3342: else
./mad.sh:3343: printf "\n\n"
./mad.sh:3344: fi
./mad.sh:3350: tor_curl_request --insecure -L --referer "$file_url" "$download_url" --output "$file_path"
./mad.sh:3351: rc=$?
./mad.sh:3352: if [ $rc -ne 0 ] ; then
./mad.sh:3353: printf "${RED}Download Failed (bad exit status).${NC}"
./mad.sh:3354: if [ -f ${file_path} ]; then
./mad.sh:3355: printf "${YELLOW} Partial removed...${NC}"
./mad.sh:3356: printf "\n\n"
./mad.sh:3357: rm -f "${file_path}"
./mad.sh:3358: else
./mad.sh:3359: printf "\n\n"
./mad.sh:3360: fi
--
./mad.sh:3378: tor_curl_request --insecure -L --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval "$download_url" --continue-at - --output "$file_path"
./mad.sh:3379: else
./mad.sh:3380: tor_curl_request --insecure -L --referer "$file_url" "$download_url" --continue-at - --output "$file_path"
./mad.sh:3381: fi
./mad.sh:3382: received_file_size=0
./mad.sh:3383: if [ -f "$file_path" ] ; then
./mad.sh:3384: received_file_size=$(stat --format="%s" "$file_path" | tr -d '[:space:]')
./mad.sh:3385: fi
./mad.sh:3386: if CheckNoHtml "$remote_url" "$filename" "$file_path" ; then
./mad.sh:3387: containsHtml=false
./mad.sh:3388: else
./mad.sh:3389: containsHtml=true
./mad.sh:3390: fi
./mad.sh:3394: tor_curl_request --insecure -L --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval "$download_url" --continue-at - --output "$file_path"
./mad.sh:3395: else
./mad.sh:3396: tor_curl_request --insecure -L --referer "$file_url" "$download_url" --continue-at - --output "$file_path"
./mad.sh:3397: fi
./mad.sh:3398: received_file_size=0
./mad.sh:3399: if [ -f "$file_path" ] ; then
./mad.sh:3400: received_file_size=$(stat --format="%s" "$file_path" | tr -d '[:space:]')
./mad.sh:3401: fi
./mad.sh:3402: if CheckNoHtml "$remote_url" "$filename" "$file_path" ; then
./mad.sh:3403: containsHtml=false
./mad.sh:3404: else
./mad.sh:3405: containsHtml=true
./mad.sh:3406: fi
--
./mad.sh:3578: response=$(tor_curl_upload --insecure -i \
./mad.sh:3579: -H "Content-Type: multipart/form-data" \
./mad.sh:3580: -F "key=" \
./mad.sh:3581: -F "time=$jira_timeval" \
./mad.sh:3582: -F "file=@${filepath}" \
./mad.sh:3583: "${jira_PostUrlHost}")
./mad.sh:3584: else
./mad.sh:3585: response=$(tor_curl_upload --insecure -i \
./mad.sh:3586: -H "Content-Type: multipart/form-data" \
./mad.sh:3587: -F "key=" \
./mad.sh:3588: -F "time=$jira_timeval" \
./mad.sh:3589: -F "files[]=@${arrFiles[@]}" \
./mad.sh:3590: "${jira_PostUrlHost}")
./mad.sh:3591: fi
./mad.sh:3592: if [ "${DebugAllEnabled}" == "true" ] ; then
./mad.sh:3593: debugHtml "${filepath##*/}" "${_hostCode}_upload" "post_url: ${jira_PostUrlHost}"$'\n'"${response}"
./mad.sh:3594: fi
./mad.sh:3595: if grep -Eqi ' 200 ' <<< "${response}" ; then
./mad.sh:3594: response=$(tor_curl_upload --insecure -i \
./mad.sh:3595: -H "Content-Type: multipart/form-data" \
./mad.sh:3596: -F "key=" \
./mad.sh:3597: -F "time=$jira_timeval" \
./mad.sh:3598: -F "file=@${filepath}" \
./mad.sh:3599: "${jira_PostUrlHost}")
./mad.sh:3600: else
./mad.sh:3601: response=$(tor_curl_upload --insecure -i \
./mad.sh:3602: -H "Content-Type: multipart/form-data" \
./mad.sh:3603: -F "key=" \
./mad.sh:3604: -F "time=$jira_timeval" \
./mad.sh:3605: -F "files[]=@${arrFiles[@]}" \
./mad.sh:3606: "${jira_PostUrlHost}")
./mad.sh:3607: fi
./mad.sh:3608: if [ "${DebugAllEnabled}" == "true" ] ; then
./mad.sh:3609: debugHtml "${filepath##*/}" "${_hostCode}_upload" "post_url: ${jira_PostUrlHost}"$'\n'"${response}"
./mad.sh:3610: fi
./mad.sh:3611: if grep -Eqi ' 200 ' <<< "${response}" ; then

View file

@ -3,6 +3,17 @@
#
# ---------- Initial release with MAD Uploader functionality ----------
# 2024.12.12 - [dictvm / up_dictvm] Add dictvm.org as upload / download host
# 2024.12.12 - [eddowding / up_eddowding] Add eddowding.com as upload / download host
# 2024.12.12 - [up_pixeldrain] Modify upload to use PUT
# 2024.12.12 - [mad] Update pixeldrain api key
# 2024.12.09 - [ranoz] Fix filenames with spaces
# 2024.11.29 - [innocent] Update to use tor_curl_request_extended for head/get
# 2024.11.29 - [quax] Update 404 Not found response handling
# 2024.11.27 - [up_ranoz] Modify download link to not use the upload url ticket link
# 2024.11.26 - [filehaus] Handle "404 Not found" on first instance
# 2024.11.25 - [up_moocloud / moocloud] Add moocloud.ch as an upload and download host
# 2024.11.24 - [uploadhive] Handle "Error creating download link" response -- do not mark Removed
# 2024.11.23 - [filehaus] Use tor_curl_request_extended for head / get for filehaus urls
# 2024.11.23 - [mad] Make tor_curl_request_extended a random timeout between 30-60 seconds
# 2024.11.22 - [up_quax, quax] Add qu.ax as an upload and download host

View file

@ -66,20 +66,20 @@ RELOAD!
Example:
-----------
folder=New folder 01
# pw: **1234567890$$
# ref: http//reference.source.url/here.html
#ref=http://urlToThreadOrPost
#pw=**1234567890$$
https://1fichier.com/?123456789abcdefghijk
http://hexload.com/123456789abc
folder=New folder 02
# pw: 4444555551-1
#pw=4444555551-1
http://5ety7tpkim5me6eszuwcje7bmy25pbtrjtue7zkqqgziljwqy3rrikqd.onion/ZaZa/12az.rar
http://oshi.at/AAzz/11ZZ.rar|File - Set 001 (2001).7z
http://oshi.at/AAyy/11YY.rar|File - Set 002 (2001).7z
http://pixeldrain.com/u/ZZaa0011
folder=Direct link fun
# pw= 2022234092
#pw=2022234092
direct=http://pomf2.lain.la/f/abcd123456789.7z
direct=http://pomf2.lain.la/f/ABCD998877000.rar|This is it [2022].rar

View file

@ -27,12 +27,14 @@ Max Size . HostCode . Nickname . Notes
- 1GB kraken krakenfiles.com 90d inactive expiry
1GB ansh anonsharing.com 6mo expiry
512MB anon anonfile.de ?? expiry
500MB fland fileland.io 180d expiry
400MB dash dashfile.net ?? expiry
300MB trbo turbo.onion ~40d expiry
256MB qx qu.ax ?? expiry
- 250MB upev uploadev.org 90d inactive expiry
* 240MB ko kouploader.jp 5mo expiry (240MB max)
150MB torp TorUp.onion 30d inactive expiry
100MB fget fireget.com ??
100MB fb fileblade.com ?? expiry
100MB ubay uploadbay.net ?? expiry
100MB upee upload.ee 50d expiry
@ -57,7 +59,8 @@ Max Size . HostCode . Nickname . Notes
4GB tmpsh temp.sh 3d expiry
2GB dict dictvm.org ~1mo expiry, jirafeau
1GB kaz depot.kaz.bzh ~1mo expiry, jirafeau
512MB herb herbolistique.com ~1mo expiry, jirafeau
512MB gagn fichier.gagneux.info ~1mo expiry, jirafeau
512MB herb herbolistique.com ~1mo expiry, jirafeau
- 512MB linx linxx.net ~1mo expiry, jirafeau
- 500MB soy soyjak.download ~1mo expiry, jirafeau
195MB dup dataupload.net ?? expiry

4
hosts/anonfile.sh Executable file → Normal file
View file

@ -1,6 +1,6 @@
#! Name: anonfile.sh
#! Author: kittykat
#! Version: 2024.12.28
#! Version: 2025.01.13
#! Desc: Add support for downloading and processing of urls for a new host
#! Usage: Copy this file into the ./${ScriptDir}/hosts/ folder
#!
@ -238,7 +238,7 @@ anon_FetchFileInfo() {
mkdir -p "$WorkDir/.temp"
tmp_captcha_img="$WorkDir/.temp/${remote_url//[^a-zA-Z0-9]/}.jpg"
tor_curl_request --insecure -s "$captcha_img_url" --output "$tmp_captcha_img"
captcha_ocr_output=$(CaptchaOcrImage "$tmp_captcha_img" "NUMBERONLY" "ContrastStretch_5x90,Brightness_130")
captcha_ocr_output=$(CaptchaOcrImageTesseract "$tmp_captcha_img" "NUMBERONLY" "ContrastStretch_5x90,Brightness_130")
if [ "${DebugPluginsEnabled}" == "true" ]; then
printf "\\n"
echo -e "$captcha_ocr_output"

View file

@ -1,6 +1,6 @@
#! Name: ateasystems.sh
#! Author: kittykat
#! Version: 2024.12.24
#! Version: 2025.01.11
#! Desc: Add support for downloading and processing of urls for a new host
#! Usage: Copy this file into the ./${ScriptDir}/hosts/ folder
#!
@ -102,7 +102,7 @@ atea_FetchFileInfo() {
continue
fi
fi
if grep -Eqi 'File Not Found|No such file with this filename|File was deleted|<table id="error_message"' <<< "$response"; then
if grep -Eqi 'File Not Found|404 Not Found|was not found on this server|No such file with this filename|File was deleted' <<< "$response"; then
printf "\\n"
echo -e "${RED}| The file was not found. It could be deleted or expired.${NC}"
exitDownloadError=true

View file

@ -1,6 +1,6 @@
#! Name: dailyuploads.sh
#! Author: kittykat
#! Version: 2024.10.13
#! Version: 2025.01.13
#! Desc: Add support for downloading and processing of urls for a new host
#! Usage: Copy this file into the ./${ScriptDir}/hosts/ folder
#!
@ -137,7 +137,7 @@ daily_FetchFileInfo() {
mkdir -p "$WorkDir/.temp"
tmp_captcha_img="$WorkDir/.temp/${remote_url//[^a-zA-Z0-9]/}.jpg"
tor_curl_request --insecure -s "$captcha_img_url" --output "$tmp_captcha_img"
captcha_ocr_output=$(CaptchaOcrImage "$tmp_captcha_img" "NUMBERONLY" "ContrastStretch_5x90,Brightness_130")
captcha_ocr_output=$(CaptchaOcrImageTesseract "$tmp_captcha_img" "NUMBERONLY" "ContrastStretch_5x90,Brightness_130")
if [ "${DebugPluginsEnabled}" == "true" ]; then
printf "\\n"
echo -e "$captcha_ocr_output"

538
hosts/desiupload.sh Normal file
View file

@ -0,0 +1,538 @@
#! Name: desiupload.sh
#! Author: kittykat
#! Version: 2025.01.06
#! Desc: Add support for downloading and processing of urls for a new host
#! Usage: Copy this file into the ./${ScriptDir}/hosts/ folder
#!
#!
#! ------------ REQUIRED SECTION ---------------
#! @[UPDATE] HostAndDomainRegexes: This string is loaded into mad.sh and allows dynamic handling of new url data
#! Format: '/HostCode/HostNick/HostFuncPrefix:HostDomainRegex@'
#! HostCode: <aUniqueCodeForHost> (ie. 'fh' for filehaus -- cannot be used by other hosts)
#! HostNick: What is displayed throughout MAD output (ie. 'filehaus' -- "urls.txt has 10 filehaus.." will be displayed)
#! HostFuncPrefix: <aUniqueStringThatMustPrefixHostFunctions> (ie. 'fh' -- fh_DownloadFile(), fh_FetchFileInfo() .. )
#! * Note: Must begin with a letter a-z (functions beginning with numbers are no bueno)
#! HostDomainRegex: The regex used to verify matching urls
HostCode='desi'
HostNick='desiupload'
HostFuncPrefix='desi'
HostUrls='desiupload.co'
HostDomainRegex='^(http|https)://(.*\.)?desiupload\.co/'
#!
#! !! DO NOT UPDATE OR REMOVE !!
#! This merges the Required HostAndDomainRegexes into mad.sh
ListHostAndDomainRegexes=${ListHostAndDomainRegexes}'/'${HostCode}'/'${HostNick}'/'${HostFuncPrefix}'/'${HostUrls}':'${HostDomainRegex}'@'
#!
#!
#! ------------ (1) Host Main Download Function --------------- #
#!
#! @REQUIRED: Host Main Download function
#! Must be named specifically as such:
#! <HostFuncPrefix>_DownloadFile()
desi_DownloadFile() {
local remote_url=${1}
local file_url=${1}
local filecnt=${2}
warnAndRetryUnknownError=false
exitDownloadError=false
exitDownloadNotAvailable=false
fileAlreadyDone=false
download_inflight_path="${WorkDir}/.inflight/"
mkdir -p "$download_inflight_path"
completed_location="${WorkDir}/downloads/"
tor_identity="${RANDOM}"
finalAttempt="false"
for ((z=0; z<=$MaxUrlRetries; z++)); do
if [ $z -eq $MaxUrlRetries ] ; then
finalAttempt="true"
fi
CLEANSTRING=${remote_url//[^a-zA-Z0-9]/}
trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; echo ""; tput cnorm; exit" 0 1 2 3 6 15
if desi_FetchFileInfo $finalAttempt && desi_GetFile "${filecnt}" $((z+1)) $finalAttempt ; then
return 0
elif [ $z -lt $MaxUrlRetries ]; then
if [ "${fileAlreadyDone}" == "true" ] ; then
break
fi
if [[ "${warnAndRetryUnknownError}" == "true" ]] ; then
if [ "${DebugAllEnabled}" == "true" ] ; then
debugHtml "${remote_url##*/}" "error" "Retry due to an unknown issue: attempt #$((z+1)) of ${MaxUrlRetries}"
fi
fi
if [[ "${exitDownloadError}" == "true" || "${exitDownloadNotAvailable}" == "true" ]] ; then
if [ "${DebugAllEnabled}" == "true" ] ; then
debugHtml "${remote_url##*/}" "error" "Exit due to unrecoverable issue"
fi
rm -f "${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}"
break
fi
echo -e "\n${YELLOW}A recoverable error occurred, retry attempt $((z+1))/${MaxUrlRetries}${NC}"
sleep 3
fi
done
rm -f "${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}"
}
#!
#! ------------- (2) Fetch File Info Function ----------------- #
#!
desi_FetchFileInfo() {
finalAttempt=$1
maxfetchretries=6
desi_cookie_jar=""
echo -e "${GREEN}# Fetching post info…${NC}"
for ((i=1; i<=$maxfetchretries; i++)); do
mkdir -p "${WorkDir}/.temp"
desi_cookie_jar=$(mktemp "${WorkDir}/.temp/desi_cookies""${instance_no}"".XXXXXX")
printf " ."
tor_identity="${RANDOM}"
CLEANSTRING=${remote_url//[^a-zA-Z0-9]/}
trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; echo ""; tput cnorm; exit" 0 1 2 3 6 15
response=$(tor_curl_request --insecure -L -s -b "${desi_cookie_jar}" -c "${desi_cookie_jar}" "$remote_url")
if [ "${DebugAllEnabled}" == "true" ] ; then
debugHtml "${remote_url##*/}" "desi_fetch$i" "${response}"
fi
if [[ -z $response ]] ; then
rm -f "${desi_cookie_jar}";
if [ $i == $maxfetchretries ] ; then
printf "\\n"
echo -e "${RED}| Failed to extract download link [1]${NC}"
warnAndRetryUnknownError=true
if [ "${finalAttempt}" == "true" ] ; then
failedRetryDownload "${remote_url}" "Failed to extract download link [1]" ""
fi
return 1
else
continue
fi
fi
if grep -Eqi 'File Not Found|No such file with this filename|File was deleted|<table id="error_message"' <<< "$response"; then
rm -f "${desi_cookie_jar}";
printf "\\n"
echo -e "${RED}| The file was not found. It could be deleted or expired.${NC}"
exitDownloadError=true
removedDownload "${remote_url}"
return 1
fi
if grep -Eqi 'name="code" class="captcha_code"' <<< "$response"; then
printf "\\n"
echo -e "${GREEN}| Captcha found.${NC}"
post_sc=$(grep -oP '(?<=input type="hidden" name="sc" value=").*(?=">.*$)' <<< "$response")
codeline=$(grep -oP -m 1 '(?<=<td align=right><div style='"'"'width:80px;height:26px;).*(?=</div></td>)' <<< "$response")
pval1=$(grep -oP -m 1 '<span style='"'"'position:absolute;padding-left:[0-9]px;padding-top:[0-9]+px;'"'"'>&#\K.*?(?=;</span>)' <<< "$codeline" )
if ((pval1 <= 0)); then
pval1=$(grep -oP -m 1 '<span style='"'"'position:absolute;padding-left:1[0-9]px;padding-top:[0-9]+px;'"'"'>&#\K.*?(?=;</span>)' <<< "$codeline" )
fi
pval2=$(grep -oP -m 1 '<span style='"'"'position:absolute;padding-left:2[0-9]px;padding-top:[0-9]+px;'"'"'>&#\K.*?(?=;</span>)' <<< "$codeline" )
pval3=$(grep -oP -m 1 '<span style='"'"'position:absolute;padding-left:4[0-9]px;padding-top:[0-9]+px;'"'"'>&#\K.*?(?=;</span>)' <<< "$codeline" )
pval4=$(grep -oP -m 1 '<span style='"'"'position:absolute;padding-left:6[0-9]px;padding-top:[0-9]+px;'"'"'>&#\K.*?(?=;</span>)' <<< "$codeline" )
val1=$((pval1-48)); val2=$((pval2-48)); val3=$((pval3-48)); val4=$((pval4-48))
captcha_code="${val1}${val2}${val3}${val4}"
if grep -Eqi '-' <<< "$captcha_code"; then
if [ $i == $maxfetchretries ] ; then
rm -f "${desi_cookie_jar}";
printf "\\n"
echo -e "${RED}| Bad captcha code [2]${NC}"
warnAndRetryUnknownError=true
if [ "${finalAttempt}" == "true" ] ; then
failedRetryDownload "${remote_url}" "Bad captcha code [2]" ""
fi
return 1
else
tor_identity="${RANDOM}"
continue
fi
fi
fi
if grep -Eqi 'input type="hidden" name="id" value="' <<< "$response"; then
echo -e "${GREEN}| Post link found.${NC}"
post_action=$(grep -oP '(?<=form name="F1" method="POST" action=").*(?=".*$)' <<< "$response")
post_op=$(grep -oP '(?<=input type="hidden" name="op" value=").*(?=">.*$)' <<< "$response")
post_id=$(grep -oP '(?<=input type="hidden" name="id" value=").*(?=">.*$)' <<< "$response")
post_rand=$(grep -oP '(?<=input type="hidden" name="rand" value=").*(?=">.*$)' <<< "$response")
post_referer=$(grep -oP '(?<=input type="hidden" name="referer" value=").*(?=">.*$)' <<< "$response")
else
rm -f "${desi_cookie_jar}";
if [ $i == $maxfetchretries ] ; then
printf "\\n"
echo -e "${RED}| Failed to extract download link [2]${NC}"
warnAndRetryUnknownError=true
if [ "${finalAttempt}" == "true" ] ; then
failedRetryDownload "${remote_url}" "Failed to extract download link [2]" ""
fi
return 1
else
continue
fi
fi
if [[ -z "$post_op" ]] || [[ -z "$post_id" ]] || [[ -z "$post_rand" ]] || [[ -z "$captcha_code" ]] ; then
rm -f "${desi_cookie_jar}";
if [ $i == $maxfetchretries ] ; then
printf "\\n"
echo -e "${RED}| Failed to extract download link [3]${NC}"
warnAndRetryUnknownError=true
if [ "${finalAttempt}" == "true" ] ; then
failedRetryDownload "${remote_url}" "Failed to extract download link [3]" ""
fi
return 1
else
continue
fi
else
break
fi
done
form_data="op=${post_op}&id=${post_id}&rand=${post_rand}&method_free=&method_premium=&adblock_detected=&code=${captcha_code}"
echo -e "| Captcha countdown (15s)…"
printf " "
for ((i=1; i<=3; i++)); do
sleep 5s
if ((i % 2 == 0)); then
printf "$((i * 5))"
else
printf ".."
fi
done
echo -e ""
echo -e "${GREEN}# Fetching download url…${NC}"
for ((i=1; i<=$maxfetchretries; i++)); do
printf " _"
download_url=""
CLEANSTRING=${remote_url//[^a-zA-Z0-9]/}
trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; rm -f ${desi_cookie_jar}; echo ""; tput cnorm; exit" 0 1 2 3 6 15
response=$(tor_curl_request --insecure -L -s -X POST \
-b "${desi_cookie_jar}" -c "${desi_cookie_jar}" \
--data "$form_data" "$remote_url")
if [ "${DebugAllEnabled}" == "true" ] ; then
debugHtml "${remote_url##*/}" "desi_post_$i" "url: ${remote_url}"$'\n'"form_data: ${form_data}"$'\n'"${response}"
fi
if [[ -z $response ]] ; then
if [ $i == $maxfetchretries ] ; then
rm -f "${desi_cookie_jar}";
printf "\\n"
echo -e "${RED}| Failed to extract download link [7]${NC}"
warnAndRetryUnknownError=true
if [ "${finalAttempt}" == "true" ] ; then
failedRetryDownload "${remote_url}" "Failed to extract download link [7]" ""
fi
return 1
else
tor_identity="${RANDOM}"
continue
fi
fi
if grep -Eqi 'div class="err">Wrong IP address</div>' <<< "$response"; then
rm -f "${desi_cookie_jar}";
printf "\\n"
echo -e "${RED}| Wrong IP address [1]${NC}"
warnAndRetryUnknownError=true
if [ "${finalAttempt}" == "true" ] ; then
failedRetryDownload "${remote_url}" "Wrong IP address [1]" ""
fi
return 1
fi
if grep -Eqi 'No such file with this filename|File was deleted' <<< "$response"; then
rm -f "${desi_cookie_jar}";
printf "\\n"
echo -e "${RED}| The file was not found. It could be deleted or expired.${NC}"
exitDownloadError=true
removedDownload "${remote_url}"
return 1
fi
if grep -Eqi 'you have to wait|seconds till next download' <<< "$response"; then
if [ $i == $maxfetchretries ] ; then
rm -f "${desi_cookie_jar}";
printf "\\n"
echo -e "${RED}| Rate limited [2]${NC}"
warnAndRetryUnknownError=true
if [ "${finalAttempt}" == "true" ] ; then
failedRetryDownload "${remote_url}" "Rate limited [2]" ""
fi
return 1
else
tor_identity="${RANDOM}"
continue
fi
fi
if grep -Eqi 'Just a moment...' <<< "$response"; then
if [ $i == $maxfetchretries ] ; then
rm -f "${desi_cookie_jar}";
printf "\\n"
echo -e "${RED}| Failed to extract download link [8]${NC}"
warnAndRetryUnknownError=true
if [ "${finalAttempt}" == "true" ] ; then
failedRetryDownload "${remote_url}" "Failed to extract download link [8]" ""
fi
return 1
else
tor_identity="${RANDOM}"
continue
fi
fi
if grep -Eqi '<nobr>Filename: <b>' <<< "$response"; then
filename=$(grep -oP '(?<=<nobr>Filename: <b>).*(?=</b>.*$)' <<< "$response")
fi
if grep -Eqi '<span id="direct_link">' <<< "$response"; then
printf "\\n"
echo -e "${GREEN}| Download url found.${NC}"
subSearch=$(awk '/<span id="direct_link">/,/function player_start(evt)/' <<< "$response")
download_url=$(grep -oP '(?<=<a href=").*(?="><img src="/images/dirct-lnk.png".*$)' <<< "$subSearch")
download_url=$(urlencode_literal_grouped_case_urlendingonly "$download_url")
fi
if [[ -z "$download_url" ]] ; then
if [ $i == $maxfetchretries ] ; then
rm -f "${desi_cookie_jar}";
printf "\\n"
echo -e "${RED}| Failed to extract download link [9]${NC}"
warnAndRetryUnknownError=true
if [ "${finalAttempt}" == "true" ] ; then
failedRetryDownload "${remote_url}" "Failed to extract download link [9]" ""
fi
return 1
else
tor_identity="${RANDOM}"
continue
fi
else
break
fi
done
rm -f "${desi_cookie_jar}";
echo -e "${GREEN}# Fetching file info…${NC}"
for ((j=1; j<=$maxfetchretries; j++)); do
printf " ."
CLEANSTRING=${remote_url//[^a-zA-Z0-9]/}
trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; rm -f ${desi_cookie_jar}; echo ""; tput cnorm; exit" 0 1 2 3 6 15
GetRandomUA
file_header=$(tor_curl_request --insecure -L --head -s --referer "${fixed_url}" "$download_url")
if [ "${DebugAllEnabled}" == "true" ] ; then
debugHtml "${remote_url##*/}" "desi_head$j" "download_url: ${download_url}"$'\n'"${file_header}"
fi
if [[ -z $file_header ]] ; then
if [ $j == $maxfetchretries ] ; then
rm -f "${desi_cookie_jar}";
printf "\\n"
echo -e "${RED}| Failed to extract file info${NC}"
warnAndRetryUnknownError=true
if [ "${finalAttempt}" == "true" ] ; then
failedRetryDownload "${remote_url}" "" ""
fi
return 1
else
tor_identity="${RANDOM}"
continue
fi
fi
if ! grep -Eqi 'HTTP/2 200|HTTP/1.1 200|200 OK' <<< $file_header ; then
if [ $j == $maxfetchretries ] ; then
rm -f "${desi_cookie_jar}";
printf "\\n"
echo -e "${RED}| Failed to extract file info${NC}"
warnAndRetryUnknownError=true
if [ "${finalAttempt}" == "true" ] ; then
failedRetryDownload "${remote_url}" "" ""
fi
return 1
else
tor_identity="${RANDOM}"
continue
fi
fi
file_size_bytes=$(grep -oPi '(?<=content-length: ).*' <<< "$file_header")
file_size_bytes=${file_size_bytes//[$'\t\r\n']}
if [[ -z "$file_size_bytes" ]]; then
if [ $j == $maxfetchretries ] ; then
rm -f "${desi_cookie_jar}";
printf "\\n"
echo -e "${RED}| Failed to extract file size.${NC}"
warnAndRetryUnknownError=true
if [ "${finalAttempt}" == "true" ] ; then
failedRetryDownload "${remote_url}" "" ""
fi
return 1
else
tor_identity="${RANDOM}"
continue
fi
fi
break #Good to go here
done
touch ${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}
if [ ! "$filename_override" == "" ] ; then
filename="$filename_override"
fi
filename=$(sanitize_file_or_folder_name "${filename}")
printf "\\n"
echo -e "${YELLOW}| File name:${NC}\t\"${filename}\""
if [ -z $file_size_bytes ] ; then
if [ "${finalAttempt}" == "true" ] ; then
failedRetryDownload "${remote_url}" "Filesize not found!" ""
fi
echo -e "${YELLOW}| Filesize not found… retry${NC}"
return 1
else
file_size_readable="$(numfmt --to=iec --from=auto --format "%.2f" <<< "$file_size_bytes")"
fi
echo -e "${YELLOW}| File size:${NC}\t${file_size_readable}"
file_path="${download_inflight_path}${filename}"
flockDownload="${WorkDir}/.flocks/${filename//[^a-zA-Z0-9\.\_\-]/}.flock"
if CheckFileSize "${remote_url}" "${file_size_bytes}" ; then
return 1
fi
if CheckDownloadExists "$remote_url" "$MoveToFolder" "$filecnt" "$filename" "$file_path" "$completed_location" ; then
return 1
fi
echo "${remote_url//[^a-zA-Z0-9]/}" > $flockDownload
}
#!
#! ----------- (3) Fetch File / Download File Function --------------- #
#!
desi_GetFile() {
echo -e "${GREEN}# Downloading…"
echo -e "${YELLOW}| File path:${NC}\t./.inflight/${filename}\n"
fileCnt=$1
retryCnt=$2
finalAttempt=$3
flockDownload="${WorkDir}/.flocks/${filename//[^a-zA-Z0-9\.\_\-]/}.flock"
for ((j=1; j<=$MaxDownloadRetries; j++)); do
pd_presize=0
if [ -f "$file_path" ] ; then
pd_presize=$(stat --format="%s" "$file_path" | tr -d '[:space:]')
fi
GetRandomUA
CLEANSTRING=${remote_url//[^a-zA-Z0-9]/}
trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; rm -f ${desi_cookie_jar}; rm -f $flockDownload; echo ""; tput cnorm; exit" 0 1 2 3 6 15
if [ "${UseTorCurlImpersonate}" == "true" ]; then
if [ "${RateMonitorEnabled}" == "true" ]; then
tor_curl_request --insecure \
--speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval \
-b "${desi_cookie_jar}" -c "${desi_cookie_jar}" \
"$download_url" --continue-at - --output "$file_path"
else
tor_curl_request --insecure \
-b "${desi_cookie_jar}" -c "${desi_cookie_jar}" \
"$download_url" --continue-at - --output "$file_path"
fi
else
if [ "${RateMonitorEnabled}" == "true" ]; then
tor_curl_request --insecure \
--speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval \
-b "${desi_cookie_jar}" -c "${desi_cookie_jar}" \
-H "User-Agent: $RandomUA" \
-H "Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8" \
-H "Accept-Language: en-US,en;q=0.5" \
-H "Accept-Encoding: gzip, deflate, br" \
-H "Connection: keep-alive" \
-H "Cookie: lng=eng" \
-H "Upgrade-Insecure-Requests: 1" \
-H "Sec-Fetch-Dest: document" \
-H "Sec-Fetch-Mode: navigate" \
-H "Sec-Fetch-Site: same-origin" \
-H "Sec-Fetch-User: ?1" \
"$download_url" --continue-at - --output "$file_path"
else
tor_curl_request --insecure \
-b "${desi_cookie_jar}" -c "${desi_cookie_jar}" \
-H "User-Agent: $RandomUA" \
-H "Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8" \
-H "Accept-Language: en-US,en;q=0.5" \
-H "Accept-Encoding: gzip, deflate, br" \
-H "Connection: keep-alive" \
-H "Cookie: lng=eng" \
-H "Upgrade-Insecure-Requests: 1" \
-H "Sec-Fetch-Dest: document" \
-H "Sec-Fetch-Mode: navigate" \
-H "Sec-Fetch-Site: same-origin" \
-H "Sec-Fetch-User: ?1" \
"$download_url" --continue-at - --output "$file_path"
fi
fi
received_file_size=0
if [ -f "$file_path" ] ; then
received_file_size=$(stat --format="%s" "$file_path" | tr -d '[:space:]')
fi
if CheckNoHtml "$remote_url" "$filename" "$file_path" ; then
containsHtml=false
else
containsHtml=true
fi
downDelta=$(( received_file_size - pd_presize ))
if [[ "${received_file_size}" -ne "${file_size_bytes}" ]] || [ "$containsHtml" == "true" ]; then
if [ "${AutoRepairBadPartials}" == "true" ] && (( downDelta > 0 && downDelta < 1024 )) ; then
if [ -f "${file_path}" ] ; then
if ((pd_presize > 0)); then
echo -e "${YELLOW}Bad node / HTML found:${NC} reverting to previous file..."
truncateDownload "$remote_url" "$filename" "$pd_presize" "$received_file_size"
truncate -s $pd_presize "${file_path}"
else
echo -e "${YELLOW}Bad node / HTML found:${NC} tainted partial removed..."
rm -f "${file_path}"
fi
fi
if ((j >= $MaxDownloadRetries)) ; then
rm -f "$flockDownload";
if [ "${finalAttempt}" == "true" ] ; then
droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}"
fi
return 1
else
continue
fi
elif [ "${AutoRepairBadPartials}" == "true" ] && [ "$containsHtml" == "true" ] ; then
if [ -f "${file_path}" ] ; then
if ((pd_presize > 0)); then
echo -e "${YELLOW}Bad node / HTML found:${NC} reverting to previous file..."
truncateDownload "$remote_url" "$filename" "$pd_presize" "$received_file_size"
truncate -s $pd_presize "${file_path}"
else
echo -e "${YELLOW}Bad node / HTML found:${NC} tainted partial removed..."
rm -f "${file_path}"
fi
fi
if ((j >= $MaxDownloadRetries)) ; then
rm -f "$flockDownload";
if [ "${finalAttempt}" == "true" ] ; then
droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}"
fi
return 1
else
continue
fi
elif (( downDelta > 0 && downDelta < 1024 )) || [ "$containsHtml" == "true" ] ; then
if [ -f "$file_path" ] ; then
rm -rf "$file_path"
fi
echo -e "\n${YELLOW}Bad node / HTML found:${NC} tainted partial removed..."
if ((j >= $MaxDownloadRetries)) ; then
rm -f "$flockDownload";
if [ "${finalAttempt}" == "true" ] ; then
droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}"
fi
return 1
else
continue
fi
fi
if [[ "${received_file_size}" -ne "${file_size_bytes}" ]]; then
echo -e "\n${RED}Download failed, file is incomplete.${NC}"
if ((j >= $MaxDownloadRetries)) ; then
rm -f "$flockDownload";
if [ "${finalAttempt}" == "true" ] ; then
droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}"
fi
return 1
else
continue
fi
fi
else
break
fi
done
rm -f "$flockDownload";
rm -f "${desi_cookie_jar}";
ProcessCompletedDownload "$remote_url" "$MoveToFolder" "$filecnt" "$filename" "$file_size_bytes" "$completed_location" "$file_path"
return 0
}
#!
#! --------------- Host Extra Functions ------------------- #
#!

31
hosts/euromussels.sh Normal file
View file

@ -0,0 +1,31 @@
#! Name: euromussels.sh
#! Author: kittykat
#! Version: 2025.01.08
#! Desc: Add support for downloading and processing of urls for a new host
#! Usage: Copy this file into the ./${ScriptDir}/hosts/ folder
#!
#!
#! ------------ REQUIRED SECTION ---------------
#! @[UPDATE] HostAndDomainRegexes: This string is loaded into mad.sh and allows dynamic handling of new url data
#! Format: '/HostCode/HostNick/HostFuncPrefix:HostDomainRegex@'
#! HostCode: <aUniqueCodeForHost> (ie. 'fh' for filehaus -- cannot be used by other hosts)
#! HostNick: What is displayed throughout MAD output (ie. 'filehaus' -- "urls.txt has 10 filehaus.." will be displayed)
#! HostFuncPrefix: <aUniqueStringThatMustPrefixHostFunctions> (ie. 'fh' -- fh_DownloadFile(), fh_FetchFileInfo() .. )
#! * Note: Must begin with a letter a-z (functions beginning with numbers are no bueno)
#! HostDomainRegex: The regex used to verify matching urls
HostCode='euro'
HostNick='euromussels'
HostFuncPrefix='direct'
HostUrls='uploads.euromussels.eu'
HostDomainRegex='^(http|https)://(.*\.)?uploads\.euromussels\.eu/'
#!
#! !! DO NOT UPDATE OR REMOVE !!
#! This merges the Required HostAndDomainRegexes into mad.sh
ListHostAndDomainRegexes=${ListHostAndDomainRegexes}'/'${HostCode}'/'${HostNick}'/'${HostFuncPrefix}'/'${HostUrls}':'${HostDomainRegex}'@'
#!
#!
#! ------------ (1) Host Main Download Function --------------- #
#!
#! This is a direct= download host, so all the functions are already in mad.sh
#! Since the HostFuncPrefix is defined above as "direct", nothing further needs to be done as it will
#! call the direct_DownloadFile() function already in mad.sh

56
hosts/fileblade.sh Executable file → Normal file
View file

@ -1,6 +1,6 @@
#! Name: isupload.sh
#! Author: kittykat
#! Version: 2024.12.28
#! Version: 2025.01.14
#! Desc: Add support for downloading and processing of urls for a new host
#! Usage: Copy this file into the ./${ScriptDir}/hosts/ folder
#!
@ -77,7 +77,7 @@ fb_DownloadFile() {
#!
fb_FetchFileInfo() {
finalAttempt=$1
maxfetchretries=5
maxfetchretries=10
fb_cookie_jar=""
echo -e "${GREEN}# Fetching download1…${NC}"
for ((i=1; i<=$maxfetchretries; i++)); do
@ -105,14 +105,14 @@ fb_FetchFileInfo() {
continue
fi
fi
if grep -Eqi "Sorry, you are banned" <<< "$response"; then
rm -f "${fb_cookie_jar}";
if grep -Eqi "Sorry, you are banned|Sorry, you have been blocked" <<< "$response"; then
if [ $i == $maxfetchretries ] ; then
rm -f "${fb_cookie_jar}";
printf "\\n"
echo -e "${RED}| Failed to extract download link.${NC}"
echo -e "${RED}| Failed to extract download link [blocked ip]${NC}"
warnAndRetryUnknownError=true
if [ "${finalAttempt}" == "true" ] ; then
failedRetryDownload "${remote_url}" "" ""
failedRetryDownload "${remote_url}" "Failed to extract download link [blocked ip]" ""
fi
return 1
else
@ -154,6 +154,7 @@ fb_FetchFileInfo() {
break
fi
done
maxfetchretries=36
echo -e "${GREEN}# Fetching download2…${NC}"
for ((i=1; i<=$maxfetchretries; i++)); do
printf " _"
@ -178,7 +179,6 @@ fb_FetchFileInfo() {
fi
return 1
else
tor_identity="${RANDOM}"
continue
fi
fi
@ -190,13 +190,28 @@ fb_FetchFileInfo() {
removedDownload "${remote_url}"
return 1
fi
if grep -Eqi 'The file owner does not allow FREE users to download files which are over 100 MB' <<< "$response"; then
if grep -Eqi 'file owner does not allow FREE users to download files which are over 100 MB' <<< "$response"; then
rm -f "${fb_cookie_jar}";
printf "\\n"
echo -e "${RED}| Pro download only. (Free users not allowed download > 100MB)${NC}"
echo -e "${RED}| Pro download only. (Free users not allowed downloads over 100MB)${NC}"
exitDownloadError=true
failedRetryDownload "${remote_url}" "Pro download only. [Free users not allowed download over 100MB]" ""
failedRetryDownload "${remote_url}" "Pro download only. [Free users not allowed downloads over 100MB]" ""
return 1
fi
if grep -Eqi "Sorry, you are banned|Sorry, you have been blocked" <<< "$response"; then
if [ $i == $maxfetchretries ] ; then
rm -f "${fb_cookie_jar}";
printf "\\n"
echo -e "${RED}| Failed to extract download link [blocked ip]${NC}"
warnAndRetryUnknownError=true
if [ "${finalAttempt}" == "true" ] ; then
failedRetryDownload "${remote_url}" "Failed to extract download link [blocked ip]" ""
fi
return 1
else
tor_identity="${RANDOM}"
continue
fi
fi
if grep -Eqi 'Your subsequent download will be started in' <<< "$response"; then
if [ $i == $maxfetchretries ] ; then
@ -292,22 +307,7 @@ fb_FetchFileInfo() {
removedDownload "${remote_url}"
return 1
fi
if grep -Eqi 'Just a moment...' <<< "$response"; then
if [ $i == $maxfetchretries ] ; then
rm -f "${fb_cookie_jar}";
printf "\\n"
echo -e "${RED}| Failed to extract download link [5].${NC}"
warnAndRetryUnknownError=true
if [ "${finalAttempt}" == "true" ] ; then
failedRetryDownload "${remote_url}" "Failed to extract download link [5]" ""
fi
return 1
else
tor_identity="${RANDOM}"
continue
fi
fi
if ! grep -Eqi '<a href="https://de6.fileblade.com/files/' <<< "$response"; then
if ! grep -oPi '(?=href="https://.*?\.fileblade.com/files/.*?" class=.*$)' <<< "$response"; then
printf "\\n"
echo -e "${RED}| Failed to extract download link [6]${NC}"
warnAndRetryUnknownError=true
@ -319,9 +319,9 @@ fb_FetchFileInfo() {
else
printf "\\n"
echo -e "${GREEN}| Download url found.${NC}"
download_url=$(grep -oP -m 1 '(?<=a href="https://de6.fileblade.com/files/).*?(?=" class=.*$)' <<< "$response")
download_url=$(grep -oP -m 1 '(?<=a href="https://).*?(?=\.fileblade.com/files/).*?(?=" class=.*$)' <<< "$response")
download_url="${download_url//[$'\t\r\n']}"
download_url='https://de6.fileblade.com/files/'$(urlencode_literal_grouped_case_urlendingonly "$download_url")
download_url='https://'$(urlencode_literal_grouped_case_urlendingonly "$download_url")
break
fi
done

31
hosts/gagneux.sh Normal file
View file

@ -0,0 +1,31 @@
#! Name: gagneux.sh
#! Author: kittykat
#! Version: 2025.01.14
#! Desc: Add support for downloading and processing of urls for a new host
#! Usage: Copy this file into the ./${ScriptDir}/hosts/ folder
#!
#!
#! ------------ REQUIRED SECTION ---------------
#! @[UPDATE] HostAndDomainRegexes: This string is loaded into mad.sh and allows dynamic handling of new url data
#! Format: '/HostCode/HostNick/HostFuncPrefix:HostDomainRegex@'
#! HostCode: <aUniqueCodeForHost> (ie. 'fh' for filehaus -- cannot be used by other hosts)
#! HostNick: What is displayed throughout MAD output (ie. 'filehaus' -- "urls.txt has 10 filehaus.." will be displayed)
#! HostFuncPrefix: <aUniqueStringThatMustPrefixHostFunctions> (ie. 'fh' -- fh_DownloadFile(), fh_FetchFileInfo() .. )
#! * Note: Must begin with a letter a-z (functions beginning with numbers are no bueno)
#! HostDomainRegex: The regex used to verify matching urls
HostCode='gagn'
HostNick='gagneux.info'
HostFuncPrefix='direct'
HostUrls='fichier.gagneux.info'
HostDomainRegex='^(http|https)://(.*\.)?fichier\.gagneux\.info/'
#!
#! !! DO NOT UPDATE OR REMOVE !!
#! This merges the Required HostAndDomainRegexes into mad.sh
ListHostAndDomainRegexes=${ListHostAndDomainRegexes}'/'${HostCode}'/'${HostNick}'/'${HostFuncPrefix}'/'${HostUrls}':'${HostDomainRegex}'@'
#!
#!
#! ------------ (1) Host Main Download Function --------------- #
#!
#! This is a direct= download host, so all the functions are already in mad.sh
#! Since the HostFuncPrefix is defined above as "direct", nothing further needs to be done as it will
#! call the direct_DownloadFile() function already in mad.sh

View file

@ -1,6 +1,6 @@
#! Name: isupload.sh
#! Author: kittykat
#! Version: 2024.12.20
#! Version: 2025.01.05
#! Desc: Add support for downloading and processing of urls for a new host
#! Usage: Copy this file into the ./${ScriptDir}/hosts/ folder
#!
@ -205,6 +205,9 @@ isup_FetchFileInfo() {
continue
fi
fi
if grep -Eqi '<nobr>Filename: <b>' <<< "$response"; then
filename=$(grep -oP -m 1 '(?<=<nobr>Filename: <b>).*?(?=<\/b><\/nobr><br>.*$)' <<< "$response")
fi
if ! grep -Eqi '<a href="http://isupload.com/cgi-bin/dl.cgi/' <<< "$response"; then
printf "\\n"
echo -e "${RED}| Failed to extract download link [4]${NC}"
@ -234,14 +237,14 @@ isup_FetchFileInfo() {
tput sc
tor_identity="${RANDOM}"
if ((j % 1 == 0)); then
printf "| Retrieving Head: attempt #$j"
file_header=$(tor_curl_request --insecure --head -L -s "$download_url")
elif ((j % 2 == 0)); then
printf "| Retrieving Head (Get): attempt #$j"
file_header=$(tor_curl_request --insecure -m 16 -s -D - -o /dev/null \
file_header=$(tor_curl_request --insecure -m 8 -s -D - -o /dev/null \
-H "Connection: keep-alive" \
-w 'EffectiveUrl=%{url_effective}' \
"$download_url")
elif ((j % 2 == 0)); then
printf "| Retrieving Head: attempt #$j"
file_header=$(tor_curl_request --insecure --head -L -s "$download_url")
elif ((j % 3 == 0)); then
printf "| Retrieving Head (hack): attempt #$j"
rm -f "${WorkDir}/.temp/directhead"

31
hosts/ramsgaard.sh Normal file
View file

@ -0,0 +1,31 @@
#! Name: ramsgaard.sh
#! Author: kittykat
#! Version: 2025.01.08
#! Desc: Add support for downloading and processing of urls for a new host
#! Usage: Copy this file into the ./${ScriptDir}/hosts/ folder
#!
#!
#! ------------ REQUIRED SECTION ---------------
#! @[UPDATE] HostAndDomainRegexes: This string is loaded into mad.sh and allows dynamic handling of new url data
#! Format: '/HostCode/HostNick/HostFuncPrefix:HostDomainRegex@'
#! HostCode: <aUniqueCodeForHost> (ie. 'fh' for filehaus -- cannot be used by other hosts)
#! HostNick: What is displayed throughout MAD output (ie. 'filehaus' -- "urls.txt has 10 filehaus.." will be displayed)
#! HostFuncPrefix: <aUniqueStringThatMustPrefixHostFunctions> (ie. 'fh' -- fh_DownloadFile(), fh_FetchFileInfo() .. )
#! * Note: Must begin with a letter a-z (functions beginning with numbers are no bueno)
#! HostDomainRegex: The regex used to verify matching urls
HostCode='rmsg'
HostNick='ramsgaard'
HostFuncPrefix='direct'
HostUrls='data.ramsgaard.me'
HostDomainRegex='^(http|https)://(.*\.)?data\.ramsgaard\.me/'
#!
#! !! DO NOT UPDATE OR REMOVE !!
#! This merges the Required HostAndDomainRegexes into mad.sh
ListHostAndDomainRegexes=${ListHostAndDomainRegexes}'/'${HostCode}'/'${HostNick}'/'${HostFuncPrefix}'/'${HostUrls}':'${HostDomainRegex}'@'
#!
#!
#! ------------ (1) Host Main Download Function --------------- #
#!
#! This is a direct= download host, so all the functions are already in mad.sh
#! Since the HostFuncPrefix is defined above as "direct", nothing further needs to be done as it will
#! call the direct_DownloadFile() function already in mad.sh

View file

@ -1,6 +1,6 @@
#! Name: ranoz.sh
#! Author: kittykat
#! Version: 2024.12.09
#! Version: 2025.01.11
#! Desc: Add support for downloading and processing of urls for a new host
#! Usage: Copy this file into the ./${ScriptDir}/hosts/ folder
#!
@ -111,6 +111,13 @@ rz_FetchFileInfo() {
removedDownload "${remote_url}"
return 1
fi
if grep -Eqi 'NEXT_NOT_FOUND' <<< "$response"; then
printf "\\n"
echo -e "${RED}| The file appears to be gone (NEXT_NOT_FOUND)${NC}"
exitDownloadError=true
removedDownload "${remote_url}" "The file appears to be gone [NEXT_NOT_FOUND]"
return 1
fi
if [ "$filename_override" == "" ] ; then
filename=$(grep -oP '(?<=<div class\="page_box_category__.....">Name</div><div class\="page_box_value__.....">).*?(?=</div>.*$)' <<< "$file_header")
fi
@ -165,14 +172,12 @@ rz_FetchFileInfo() {
continue
fi
fi
if grep -Eqi '404 Not Found' <<< "$file_header"; then
if grep -Eqi 'HTTP.* 404|404 Not Found' <<< "$file_header"; then
rm -f "${rz_cookie_jar}";
printf "\\n"
echo -e "${RED}| Failed to extract file info${NC}"
warnAndRetryUnknownError=true
if [ "${finalAttempt}" == "true" ] ; then
failedRetryDownload "${remote_url}" "Failed to extract file info" ""
fi
echo -e "${RED}| The file appears to be gone [404]${NC}"
exitDownloadError=true
removedDownload "${remote_url}" "The file appears to be gone [404]"
return 1
fi
if ! grep -Eqi 'HTTP.* 200' <<< $file_header ; then

View file

@ -1,6 +1,6 @@
#! Name: up_anonfile.sh
#! Author: kittykat
#! Version: 2024.10.26
#! Version: 2025.01.13
#! Desc: Add support for uploading files to anonfile.de
#! Info: https://anonfile.de/<filehash>
#! MaxSize: 512MB
@ -96,7 +96,7 @@ anon_PostFile() {
UploadTicket="${WorkDir}/.flocks/upload_${_hostCode}_${filepath//[^a-zA-Z0-9]/}"
echo -e "[${YELLOW}${_hostCode}${NC}] Uploading ${GREEN}${filename}${NC}"
tor_identity="${RANDOM}"
PostUrlHost='https://anonfile.de/cgi-bin/upload.cgi?upload_type=file&amp;utype=anon'
PostUrlHost='https://file-01.anonfile.de/cgi-bin/upload.cgi?upload_type=file&amp;utype=anon'
arrFiles=("$filepath")
trap "rm -f ${UploadTicket}; echo ""; tput cnorm; exit" 0 1 2 3 6 15
response=$(tor_curl_upload --insecure -i \

42
hosts/up_euromussels.sh Normal file
View file

@ -0,0 +1,42 @@
#! Name: up_euromussels.sh
#! Author: kittykat
#! Version: 2025.01.08
#! Desc: Add support for uploading files to uploads.euromussels.eu
#! Info: Files are accessible at https://address/f.php?h=<file_code>&p=1
#! MaxSize: 512MB
#! Expire: 1 Month
#! Usage: Copy this file into the ./${ScriptDir}/hosts/ folder
#!
#!
#! ------------ REQUIRED SECTION ---------------
#! @[UPDATE] ListUploadHosts: This string is loaded into mad.sh and allows dynamic handling of new url data
#! Format: '/HostCode/HostNick/HostFuncPrefix@'
#! HostCode: <aUniqueCodeForHost> (ie. 'fh' for filehaus -- cannot be used by other hosts)
#! HostNick: What is displayed throughout MAD output
#! HostFuncPrefix: <aUniqueStringThatMustPrefixHostFunctions> ie. 'fh' -- fh_UploadFile()
#! * Note: Must begin with a letter a-z (functions beginning with numbers are no bueno)
HostCode='euro'
HostNick='euromussels.eu'
HostFuncPrefix='euro'
#!
#! !! DO NOT UPDATE OR REMOVE !!
#! This merges the Required HostAndDomainRegexes into mad.sh
ListUploadHosts=${ListUploadHosts}'/'${HostCode}'/'${HostNick}'/'${HostFuncPrefix}'@'
#!
#!
#! Jirafeau Host
#! -------------
#!
#! ------------ (1) Host Main Upload Function --------------- #
#!
#! @REQUIRED: Host Main Upload function
#! Must be named specifically as such:
#! <HostFuncPrefix>_UploadFile()
euro_UploadFile() {
jira_MaxUploadSizeInBytes=536870912
jira_PostUrlHost='https://uploads.euromussels.eu/script.php'
jira_filetype=1
jira_timeval="month"
jira_downloadLinkPrefix='https://uploads.euromussels.eu/f.php?h='
jira_UploadFile ${1} ${2} ${3} ${4}
}

142
hosts/up_fileland.sh Normal file
View file

@ -0,0 +1,142 @@
#! Name: up_fileland.sh
#! Author: kittykat
#! Version: 2025.01.07
#! Desc: Add support for uploading files to fileland.io
#! Info: https://fileland.io/<filehash>
#! MaxSize: 500MB
#! Expire: ??
#! Usage: Copy this file into the ./${ScriptDir}/hosts/ folder
#!
#!
#! ------------ REQUIRED SECTION ---------------
#! @[UPDATE] ListUploadHosts: This string is loaded into mad.sh and allows dynamic handling of new url data
#! Format: '/HostCode/HostNick/HostFuncPrefix@'
#! HostCode: <aUniqueCodeForHost> (ie. 'fh' for filehaus -- cannot be used by other hosts)
#! HostNick: What is displayed throughout MAD output
#! HostFuncPrefix: <aUniqueStringThatMustPrefixHostFunctions> ie. 'fh' -- fh_UploadFile()
#! * Note: Must begin with a letter a-z (functions beginning with numbers are no bueno)
HostCode='fland'
HostNick='fileland'
HostFuncPrefix='fland'
#!
#! !! DO NOT UPDATE OR REMOVE !!
#! This merges the Required HostAndDomainRegexes into mad.sh
ListUploadHosts=${ListUploadHosts}'/'${HostCode}'/'${HostNick}'/'${HostFuncPrefix}'@'
#!
#!
#! Configurables
#! -------------
#!
#! ------------ (1) Host Main Upload Function --------------- #
#!
#! @REQUIRED: Host Main Upload function
#! Must be named specifically as such:
#! <HostFuncPrefix>_UploadFile()
fland_UploadFile() {
local _hostCode=${1}
local filepath=${2}
local filecnt=${3}
local pline=${4}
local filename="${filepath##*/}"
warnAndRetryUnknownError=false
exitUploadError=false
exitUploadNotAvailable=false
fileAlreadyDone=false
tor_identity="${RANDOM}"
UploadTicket="${WorkDir}/.flocks/upload_${_hostCode}_${filepath//[^a-zA-Z0-9]/}"
MaxUploadSizeInBytes=524288000
fsize=$(GetFileSize "$filepath" "false")
if ((fsize > MaxUploadSizeInBytes)); then
rm -f "${UploadTicket}"
echo -e "${YELLOW}| SKIP${NC}: The size of $filename is to large for $_hostCode. ($fsize > $MaxUploadSizeInBytes)"
failedUpload "$pline" "${filepath}" "${_hostCode}" "Skipping upload. The size of $filename is to large for $_hostCode. ($fsize > $MaxUploadSizeInBytes)"
return 1
fi
finalAttempt="false"
for ((z=0; z<=$MaxUploadRetries; z++)); do
if [ $z -eq $MaxUploadRetries ] ; then
finalAttempt="true"
fi
trap "rm -f "${UploadTicket}"; echo ""; tput cnorm; exit" 0 1 2 3 6 15
if fland_PostFile "${filepath}" "${_hostCode}" "${filename}" "${filecnt}" $((z+1)) $finalAttempt $pline ; then
return 0
elif [ $z -lt $MaxUploadRetries ]; then
if [ "${fileAlreadyDone}" == "true" ] ; then
break
fi
if [[ "${warnAndRetryUnknownError}" == "true" ]] ; then
if [ "${DebugAllEnabled}" == "true" ] ; then
debugHtml "${filepath##*/}" "error" "Retry due to an unknown issue: attempt #$((z+1)) of ${MaxUploadRetries}"
fi
fi
if [[ "${exitUploadError}" == "true" || "${exitUploadNotAvailable}" == "true" ]] ; then
if [ "${DebugAllEnabled}" == "true" ] ; then
debugHtml "${filepath##*/}" "error" "Exit due to unrecoverable issue"
fi
rm -f "${UploadTicket}"
break
fi
echo -e "\n${YELLOW}A recoverable error occurred, retry attempt $((z+1))/${MaxUploadRetries}${NC}"
sleep 3
fi
done
rm -f "${UploadTicket}"
}
#!
#! ----------- (2) Post File / Upload File Function --------------- #
#!
fland_PostFile() {
local filepath=$1
local _hostCode=$2
local filename=$3
local fileCnt=$4
local retryCnt=$5
local finalAttempt=$6
local pline=${7}
UploadTicket="${WorkDir}/.flocks/upload_${_hostCode}_${filepath//[^a-zA-Z0-9]/}"
echo -e "[${YELLOW}${_hostCode}${NC}] Uploading ${GREEN}${filename}${NC}"
tor_identity="${RANDOM}"
PostUrlHost='https://fs300.fileland.io/cgi-bin/upload.cgi?upload_type=file&amp;utype=anon'
arrFiles=("$filepath")
trap "rm -f ${UploadTicket}; echo ""; tput cnorm; exit" 0 1 2 3 6 15
response=$(tor_curl_upload --insecure -i \
-H "Content-Type: multipart/form-data" \
-F "sess_id=" \
-F "utype=anon" \
-F "file_descr=" \
-F "file_public=1" \
-F "link_rcpt=" \
-F "link_pass=" \
-F "to_folder=" \
-F "upload=Start upload" \
-F "keepalive=1" \
-F "file_0=@${filepath}" \
"${PostUrlHost}")
if [ "${DebugAllEnabled}" == "true" ] ; then
debugHtml "${filepath##*/}" "${_hostCode}_upload" "post_url: ${PostUrlHost}"$'\n'"${response}"
fi
if grep -Eqi '"file_status":"OK"' <<< "${response}" ; then
hash=$(grep -oPi -m 1 '(?<="file_code":").*?(?=",".*$)' <<< "$response")
filesize=$(GetFileSize "$filepath" "false")
downloadLink="https://fileland.io/$hash"
echo -e "${GREEN}| Upload Success${NC}"
echo -e "| Size: ${BLUE}${filesize}${NC} bytes${NC}"
echo -e "| Link: ${YELLOW}${downloadLink}${NC}"
successUpload "$pline" "${filepath}" "${_hostCode}" "${filesize}" "${downloadLink}" "{$response}"
return 0
else
err=$(grep -oPi '(?<=HTTP/).*?(?=$)' <<< "$response")
if [ "${finalAttempt}" == "true" ] ; then
printf "\\n"
echo -e "${RED}| Upload failed. Status: ${err}${NC}"
failedRetryUpload "$pline" "${filepath}" "${_hostCode}" "Failed to upload file" "Status: $err"
exitUploadError=true
return 1
else
return 1
fi
fi
}
#!
#! --------------- Host Extra Functions ------------------- #
#!

142
hosts/up_fireget.sh Normal file
View file

@ -0,0 +1,142 @@
#! Name: up_fireget.sh
#! Author: kittykat
#! Version: 2025.01.07
#! Desc: Add support for uploading files to fireget.com
#! Info: https://fireget.com/<filehash>
#! MaxSize: 100MB
#! Expire: ??
#! Usage: Copy this file into the ./${ScriptDir}/hosts/ folder
#!
#!
#! ------------ REQUIRED SECTION ---------------
#! @[UPDATE] ListUploadHosts: This string is loaded into mad.sh and allows dynamic handling of new url data
#! Format: '/HostCode/HostNick/HostFuncPrefix@'
#! HostCode: <aUniqueCodeForHost> (ie. 'fh' for filehaus -- cannot be used by other hosts)
#! HostNick: What is displayed throughout MAD output
#! HostFuncPrefix: <aUniqueStringThatMustPrefixHostFunctions> ie. 'fh' -- fh_UploadFile()
#! * Note: Must begin with a letter a-z (functions beginning with numbers are no bueno)
HostCode='fget'
HostNick='fireget'
HostFuncPrefix='fget'
#!
#! !! DO NOT UPDATE OR REMOVE !!
#! This merges the Required HostAndDomainRegexes into mad.sh
ListUploadHosts=${ListUploadHosts}'/'${HostCode}'/'${HostNick}'/'${HostFuncPrefix}'@'
#!
#!
#! Configurables
#! -------------
#!
#! ------------ (1) Host Main Upload Function --------------- #
#!
#! @REQUIRED: Host Main Upload function
#! Must be named specifically as such:
#! <HostFuncPrefix>_UploadFile()
fget_UploadFile() {
local _hostCode=${1}
local filepath=${2}
local filecnt=${3}
local pline=${4}
local filename="${filepath##*/}"
warnAndRetryUnknownError=false
exitUploadError=false
exitUploadNotAvailable=false
fileAlreadyDone=false
tor_identity="${RANDOM}"
UploadTicket="${WorkDir}/.flocks/upload_${_hostCode}_${filepath//[^a-zA-Z0-9]/}"
MaxUploadSizeInBytes=104857600
fsize=$(GetFileSize "$filepath" "false")
if ((fsize > MaxUploadSizeInBytes)); then
rm -f "${UploadTicket}"
echo -e "${YELLOW}| SKIP${NC}: The size of $filename is to large for $_hostCode. ($fsize > $MaxUploadSizeInBytes)"
failedUpload "$pline" "${filepath}" "${_hostCode}" "Skipping upload. The size of $filename is to large for $_hostCode. ($fsize > $MaxUploadSizeInBytes)"
return 1
fi
finalAttempt="false"
for ((z=0; z<=$MaxUploadRetries; z++)); do
if [ $z -eq $MaxUploadRetries ] ; then
finalAttempt="true"
fi
trap "rm -f "${UploadTicket}"; echo ""; tput cnorm; exit" 0 1 2 3 6 15
if fget_PostFile "${filepath}" "${_hostCode}" "${filename}" "${filecnt}" $((z+1)) $finalAttempt $pline ; then
return 0
elif [ $z -lt $MaxUploadRetries ]; then
if [ "${fileAlreadyDone}" == "true" ] ; then
break
fi
if [[ "${warnAndRetryUnknownError}" == "true" ]] ; then
if [ "${DebugAllEnabled}" == "true" ] ; then
debugHtml "${filepath##*/}" "error" "Retry due to an unknown issue: attempt #$((z+1)) of ${MaxUploadRetries}"
fi
fi
if [[ "${exitUploadError}" == "true" || "${exitUploadNotAvailable}" == "true" ]] ; then
if [ "${DebugAllEnabled}" == "true" ] ; then
debugHtml "${filepath##*/}" "error" "Exit due to unrecoverable issue"
fi
rm -f "${UploadTicket}"
break
fi
echo -e "\n${YELLOW}A recoverable error occurred, retry attempt $((z+1))/${MaxUploadRetries}${NC}"
sleep 3
fi
done
rm -f "${UploadTicket}"
}
#!
#! ----------- (2) Post File / Upload File Function --------------- #
#!
fget_PostFile() {
local filepath=$1
local _hostCode=$2
local filename=$3
local fileCnt=$4
local retryCnt=$5
local finalAttempt=$6
local pline=${7}
UploadTicket="${WorkDir}/.flocks/upload_${_hostCode}_${filepath//[^a-zA-Z0-9]/}"
echo -e "[${YELLOW}${_hostCode}${NC}] Uploading ${GREEN}${filename}${NC}"
tor_identity="${RANDOM}"
PostUrlHost='https://s22.fireget.com/cgi-bin/upload.cgi'
arrFiles=("$filepath")
trap "rm -f ${UploadTicket}; echo ""; tput cnorm; exit" 0 1 2 3 6 15
response=$(tor_curl_upload --insecure -i \
-H "Content-Type: multipart/form-data" \
-H "Host: fireget.com" \
-F "sess_id=" \
-F "srv_tmp_url=" \
-F "link_rcpt=" \
-F "link_pass=" \
-F "tos=1" \
-F "submit_btn=Upload!" \
-F "upload_type=file" \
-F "file_1=@${filepath}" \
--cookie "lang=english" \
"${PostUrlHost}")
if [ "${DebugAllEnabled}" == "true" ] ; then
debugHtml "${filepath##*/}" "${_hostCode}_upload" "post_url: ${PostUrlHost}"$'\n'"${response}"
fi
if grep -Eqi 'Location: https://fireget.com/\?&fn=' <<< "${response}" ; then
hash=$(grep -oPi -m 1 '(?<=Location: https://fireget.com/\?&fn=).*?(?=&st=OK.*$)' <<< "$response")
filesize=$(GetFileSize "$filepath" "false")
downloadLink="https://fireget.com/$hash"
echo -e "${GREEN}| Upload Success${NC}"
echo -e "| Size: ${BLUE}${filesize}${NC} bytes${NC}"
echo -e "| Link: ${YELLOW}${downloadLink}${NC}"
successUpload "$pline" "${filepath}" "${_hostCode}" "${filesize}" "${downloadLink}" "{$response}"
return 0
else
err=$(grep -oPi '(?<=HTTP/).*?(?=$)' <<< "$response")
if [ "${finalAttempt}" == "true" ] ; then
printf "\\n"
echo -e "${RED}| Upload failed. Status: ${err}${NC}"
failedRetryUpload "$pline" "${filepath}" "${_hostCode}" "Failed to upload file" "Status: $err"
exitUploadError=true
return 1
else
return 1
fi
fi
}
#!
#! --------------- Host Extra Functions ------------------- #
#!

42
hosts/up_gagneux.sh Normal file
View file

@ -0,0 +1,42 @@
#! Name: up_gagneux.sh
#! Author: kittykat
#! Version: 2025.01.14
#! Desc: Add support for uploading files to gagneux.info
#! Info: Files are accessible at https://address/f.php?h=<file_code>&p=1
#! MaxSize: 512MB
#! Expire: 1 Month
#! Usage: Copy this file into the ./${ScriptDir}/hosts/ folder
#!
#!
#! ------------ REQUIRED SECTION ---------------
#! @[UPDATE] ListUploadHosts: This string is loaded into mad.sh and allows dynamic handling of new url data
#! Format: '/HostCode/HostNick/HostFuncPrefix@'
#! HostCode: <aUniqueCodeForHost> (ie. 'fh' for filehaus -- cannot be used by other hosts)
#! HostNick: What is displayed throughout MAD output
#! HostFuncPrefix: <aUniqueStringThatMustPrefixHostFunctions> ie. 'fh' -- fh_UploadFile()
#! * Note: Must begin with a letter a-z (functions beginning with numbers are no bueno)
HostCode='gagn'
HostNick='gagneux'
HostFuncPrefix='gagn'
#!
#! !! DO NOT UPDATE OR REMOVE !!
#! This merges the Required HostAndDomainRegexes into mad.sh
ListUploadHosts=${ListUploadHosts}'/'${HostCode}'/'${HostNick}'/'${HostFuncPrefix}'@'
#!
#!
#! Jirafeau Host
#! -------------
#!
#! ------------ (1) Host Main Upload Function --------------- #
#!
#! @REQUIRED: Host Main Upload function
#! Must be named specifically as such:
#! <HostFuncPrefix>_UploadFile()
gagn_UploadFile() {
jira_MaxUploadSizeInBytes=536870912
jira_PostUrlHost='https://fichier.gagneux.info/script.php'
jira_filetype=1
jira_timeval="month"
jira_downloadLinkPrefix='https://fichier.gagneux.info/f.php?h='
jira_UploadFile ${1} ${2} ${3} ${4}
}

View file

@ -1,6 +1,6 @@
#! Name: up_pixeldrain.sh
#! Author: kittykat
#! Version: 2024.11.13
#! Version: 2025.01.08
#! Desc: Add support for uploading files to pixeldrain.com
#! Info: Files are accessible at https://pixeldrain.com/u/<file_code>
#! MaxSize: 20GB
@ -130,8 +130,8 @@ pd_PostFile() {
fi
break
done
if grep -Eqi '"success":true,"id":"' <<< "${response}" ; then
hash=$(grep -oPi '(?<="success":true,"id":").*?(?=".*$)' <<< "$response")
if grep -Eqi '"id":"' <<< "${response}" ; then
hash=$(grep -oPi '(?<="id":").*?(?=".*$)' <<< "$response")
filesize=$(GetFileSize "$filepath" "false")
downloadLink="https://pixeldrain.com/u/${hash}"
echo -e "${GREEN}| Upload Success${NC}"

42
hosts/up_ramsgaard.sh Normal file
View file

@ -0,0 +1,42 @@
#! Name: up_ramsgaard.sh
#! Author: kittykat
#! Version: 2025.01.08
#! Desc: Add support for uploading files to data.ramsgaard.me
#! Info: Files are accessible at https://address/f.php?h=<file_code>&p=1
#! MaxSize: 512MB
#! Expire: 1 Month
#! Usage: Copy this file into the ./${ScriptDir}/hosts/ folder
#!
#!
#! ------------ REQUIRED SECTION ---------------
#! @[UPDATE] ListUploadHosts: This string is loaded into mad.sh and allows dynamic handling of new url data
#! Format: '/HostCode/HostNick/HostFuncPrefix@'
#! HostCode: <aUniqueCodeForHost> (ie. 'fh' for filehaus -- cannot be used by other hosts)
#! HostNick: What is displayed throughout MAD output
#! HostFuncPrefix: <aUniqueStringThatMustPrefixHostFunctions> ie. 'fh' -- fh_UploadFile()
#! * Note: Must begin with a letter a-z (functions beginning with numbers are no bueno)
HostCode='rmsg'
HostNick='ramsgaard'
HostFuncPrefix='rmsg'
#!
#! !! DO NOT UPDATE OR REMOVE !!
#! This merges the Required HostAndDomainRegexes into mad.sh
ListUploadHosts=${ListUploadHosts}'/'${HostCode}'/'${HostNick}'/'${HostFuncPrefix}'@'
#!
#!
#! Jirafeau Host
#! -------------
#!
#! ------------ (1) Host Main Upload Function --------------- #
#!
#! @REQUIRED: Host Main Upload function
#! Must be named specifically as such:
#! <HostFuncPrefix>_UploadFile()
rmsg_UploadFile() {
jira_MaxUploadSizeInBytes=536870912
jira_PostUrlHost='https://data.ramsgaard.me/script.php'
jira_filetype=1
jira_timeval="month"
jira_downloadLinkPrefix='https://data.ramsgaard.me/f.php?h='
jira_UploadFile ${1} ${2} ${3} ${4}
}

View file

@ -1,6 +1,6 @@
#! Name: uploadhive.sh
#! Author: kittykat
#! Version: 2024.12.25
#! Version: 2025.01.06
#! Desc: Add support for downloading and processing of urls for a new host
#! Usage: Copy this file into the ./${ScriptDir}/hosts/ folder
#!
@ -102,7 +102,7 @@ uhive_FetchFileInfo() {
continue
fi
fi
if grep -Eqi "File Not Found|The file you were looking for could not be found|The file was removed by administrator" <<< "$response"; then
if grep -Eqi "File Not Found|could not be found|file was removed|404 Not Found" <<< "$response"; then
printf "\\n"
echo -e "${RED}| The file was not found. It could be deleted or expired.${NC}"
exitDownloadError=true

31
hosts/uwabaki.sh Normal file
View file

@ -0,0 +1,31 @@
#! Name: uwabaki.sh
#! Author: kittykat
#! Version: 2025.01.14
#! Desc: Add support for downloading and processing of urls for a new host
#! Usage: Copy this file into the ./${ScriptDir}/hosts/ folder
#!
#!
#! ------------ REQUIRED SECTION ---------------
#! @[UPDATE] HostAndDomainRegexes: This string is loaded into mad.sh and allows dynamic handling of new url data
#! Format: '/HostCode/HostNick/HostFuncPrefix:HostDomainRegex@'
#! HostCode: <aUniqueCodeForHost> (ie. 'fh' for filehaus -- cannot be used by other hosts)
#! HostNick: What is displayed throughout MAD output (ie. 'filehaus' -- "urls.txt has 10 filehaus.." will be displayed)
#! HostFuncPrefix: <aUniqueStringThatMustPrefixHostFunctions> (ie. 'fh' -- fh_DownloadFile(), fh_FetchFileInfo() .. )
#! * Note: Must begin with a letter a-z (functions beginning with numbers are no bueno)
#! HostDomainRegex: The regex used to verify matching urls
HostCode='uwab'
HostNick='uwabaki'
HostFuncPrefix='direct'
HostUrls='uwabaki.party'
HostDomainRegex='^(http|https)://(.*\.)?files\.uwabaki\.party/'
#!
#! !! DO NOT UPDATE OR REMOVE !!
#! This merges the Required HostAndDomainRegexes into mad.sh
ListHostAndDomainRegexes=${ListHostAndDomainRegexes}'/'${HostCode}'/'${HostNick}'/'${HostFuncPrefix}'/'${HostUrls}':'${HostDomainRegex}'@'
#!
#!
#! ------------ (1) Host Main Download Function --------------- #
#!
#! This is a direct= download host, so all the functions are already in mad.sh
#! Since the HostFuncPrefix is defined above as "direct", nothing further needs to be done as it will
#! call the direct_DownloadFile() function already in mad.sh

86
mad.sh
View file

@ -21,19 +21,37 @@
# Special thanks for contributions and collaboration:
# * beautfar - code fixes, bad html detection, several areas of code updates, and ideas
# * MisterFL - unzip after completion code and ideas
#
# Additional thanks for contributions:
# * stacktrough - click captcha workarounds and pixel websock code
# * oldfart - code suggestions, helpful feedback, clipmon implementation, WorkDir, fixes
# * zaire - feedback, suggestions, and encouragement
# * Rexmaxx - testing and bug reports
# * TinyPanties56 - feedback and suggestions
# * klonkerz - feedback and suggestions, url only processing
#
# Thanks for testing, feedback, bug reports, suggestions, and encouragement:
# * zaire, Rexmaxx, TinyPanties56, klonkerz, Stifflove
#
# * Everyone who provided feedback and helped test.. and those who wish to remain anonymous
ScriptVersion=2025.01.04
ScriptVersion=2025.01.14
#=================================================
# Recent Additions
# 2025.01.14 - [gagneux / up_gagneux] Add fichier.gagneux.info as upload / download host
# 2025.01.14 - [uwabaki] Add uwabaki.party as download host
# 2025.01.14 - [fileblade] Additional retries and handling for blocked Tor ips (until alternative)
# 2025.01.13 - [ocr_captcha] Create imagemagick OCR function for testing without tesseract
# 2025.01.13 - [anonfile, dailyuploads] Update ocr call to use tesseract function
# 2025.01.13 - [up_anonfile] Modify to use new upload url
# 2025.01.12 - [ateasystems] Update 404 Not found response
# 2025.01.11 - [mad] Update direct head response handling
# 2025.01.11 - [ranoz] Add 404 Not found handling on head
# 2025.01.09 - [ranoz] Add handling of "NEXT_NOT_FOUND" response
# 2025.01.09 - [fileblade] Fix cdn url parsing
# 2025.01.08 - [up_pixeldrain] Fix success response from pixeldrain
# 2025.01.08 - [ramsgaard / up_ramsgaard] Add data.ramsgaard.me as upload / download host
# 2025.01.08 - [euromussels / up_euromussels] Add uploads.euromussels.eu as upload / download host
# 2025.01.07 - [up_fileland] Add fileland.io as upload host
# 2025.01.07 - [up_fireget] Add fireget.com as upload host
# 2025.01.06 - [uploadhive] Update the removed / gone response detection
# 2025.01.06 - [fileblade] Add "user does not allow free downloads over 100MB" response (and warnings)
# 2025.01.06 - [desiupload] Add desiupload as download host
# 2025.01.05 - [isupload] Fix filename detection
# 2024.01.03 - [gofile] Detect "Bulk download is a Premium feature" response (no children)
# 2025.01.02 - [up_axfc] Update PUT response check to handle kanji chars (remove)
# 2025.01.02 - [dashfile] Add response 'This file reached max downloads limit'. New cookie on captcha fail
@ -75,17 +93,6 @@ ScriptVersion=2025.01.04
# 2024.12.20 - [fileblade / up_fileblade] Add fileblade.com as upload / download host
# 2024.12.20 - [isupload / up_isupload] Add isupload.com as upload / download host
# 2024.12.15 - [mediafire] Add mediafire download link processing
# 2024.12.12 - [dictvm / up_dictvm] Add dictvm.org as upload / download host
# 2024.12.12 - [eddowding / up_eddowding] Add eddowding.com as upload / download host
# 2024.12.12 - [up_pixeldrain] Modify upload to use PUT
# 2024.12.12 - [mad] Update pixeldrain api key
# 2024.12.09 - [ranoz] Fix filenames with spaces
# 2024.11.29 - [innocent] Update to use tor_curl_request_extended for head/get
# 2024.11.29 - [quax] Update 404 Not found response handling
# 2024.11.27 - [up_ranoz] Modify download link to not use the upload url ticket link
# 2024.11.26 - [filehaus] Handle "404 Not found" on first instance
# 2024.11.25 - [up_moocloud / moocloud] Add moocloud.ch as an upload and download host
# 2024.11.24 - [uploadhive] Handle "Error creating download link" response -- do not mark Removed
# -- See ./documentation/!Changelog (Historical).txt for further changes -- #
@ -432,7 +439,8 @@ SetEnabledUploadHosts() {
lstEnabledUploadHosts+="up_isupload,up_kouploader,up_moocloud,up_nantes,up_offshorecat,up_oshi,"
lstEnabledUploadHosts+="up_pixeldrain,up_quax,up_ranoz,up_skrepr,up_torup,up_turboonion,up_uploadee,"
lstEnabledUploadHosts+="up_uploadhive,up_uploadraja,up_herbolistique,up_uploadbay,up_ateasystems,up_syspro,"
lstEnabledUploadHosts+="up_dashfile,up_anonfile"
lstEnabledUploadHosts+="up_dashfile,up_anonfile,up_fileland,up_fireget,up_euromussels,up_ramsgaard,"
lstEnabledUploadHosts+="up_gagneux"
elif [[ "$EnabledUploadHosts" == "online" ]] ; then
lstEnabledUploadHosts="up_1fichier,up_anonsharing,up_axfc,up_bedrive,up_bowfile,up_depotkaz,"
lstEnabledUploadHosts+="up_familleflender,up_fileblade,up_fileditch,up_firestorage,up_free4e,up_gofile,"
@ -440,7 +448,8 @@ SetEnabledUploadHosts() {
lstEnabledUploadHosts+="up_nippy,up_nofile,up_offshorecat,up_oshi,up_pixeldrain,up_quax,up_ranoz,"
lstEnabledUploadHosts+="up_shareonline,up_skrepr,up_torup,up_turboonion,up_uploadee,up_uploadhive,"
lstEnabledUploadHosts+="up_uploadraja,up_yolobit,up_herbolistique,up_uploadbay,up_ateasystems,up_syspro,"
lstEnabledUploadHosts+="up_dashfile,up_anonfile"
lstEnabledUploadHosts+="up_dashfile,up_anonfile,up_fileland,up_fireget,up_euromussels,up_ramsgaard,"
lstEnabledUploadHosts+="up_gagneux"
fi
}
SetEnabledDownloadHosts() {
@ -451,7 +460,8 @@ SetEnabledDownloadHosts() {
lstEnabledDownloadHosts+="gofile,harrault,innocent,isupload,lainsafe,lainsafe_onion,linxx,mediafire,"
lstEnabledDownloadHosts+="moocloud,nantes,netlib,offshorecat,oshi,pixeldrain,quax,ranoz,skrepr,"
lstEnabledDownloadHosts+="tempfileme,tempsh,torup,turboonion,up2share,uploadee,uploadev,uploadhive,"
lstEnabledDownloadHosts+="youdbox,herbolistique,uploadbay,ateasystems,syspro,dashfile,anonfile"
lstEnabledDownloadHosts+="youdbox,herbolistique,uploadbay,ateasystems,syspro,dashfile,anonfile,desiupload,"
lstEnabledDownloadHosts+="fileland,fireget,euromussels,ramsgaard,uwabaki,gagneux"
elif [[ "$EnabledDownloadHosts" == "online" ]] ; then
lstEnabledDownloadHosts="1fichier,anonsharing,bedrive,biteblob,bowfile,click,cyssoux,"
lstEnabledDownloadHosts+="dailyuploads,dataupload,depotkaz,dictvm,dosya,downloadgg,eddowding,eternalhosting,"
@ -460,7 +470,8 @@ SetEnabledDownloadHosts() {
lstEnabledDownloadHosts+="lainsafe_onion,mediafire,moocloud,nantes,netlib,nippy,nofile,offshorecat,"
lstEnabledDownloadHosts+="oshi,pixeldrain,quax,ranoz,shareonline,skrepr,tempfileme,tempsh,torup,"
lstEnabledDownloadHosts+="turboonion,up2share,uploadee,uploadev,uploadhive,yolobit,youdbox,herbolistique,"
lstEnabledDownloadHosts+="uploadbay,ateasystems,syspro,dashfile,anonfile"
lstEnabledDownloadHosts+="uploadbay,ateasystems,syspro,dashfile,anonfile,desiupload,fileland,fireget,"
lstEnabledDownloadHosts+="euromussels,ramsgaard,uwabaki,gagneux"
fi
}
GetRandomFiledotUser() {
@ -773,18 +784,21 @@ failedDownload() {
removedDownload() {
local url="${1//[$'\t\r\n']}"
local message=$(literalize_string "$2")
if [ ! -z $message ]; then
message=" $message"
fi
mkdir -p "${WorkDir}/downloads"
dateStamp=$(date '+%Y/%m/%d %H:%M:%S')
echo -e "$dateStamp [REMOVED] ${url}" >> "${WorkDir}/downloads/results.txt"
echo -e "$dateStamp [REMOVED] ${url}${message}" >> "${WorkDir}/downloads/results.txt"
mkdir -p "${WorkDir}/data"
dateStamp=$(date '+%Y/%m/%d %H:%M:%S')
echo -e "$dateStamp [REMOVED] url: ${url}, message: $message" >> "${WorkDir}/data/downloads_completed.txt"
echo -e "$dateStamp [REMOVED] url: ${url}, message:$message" >> "${WorkDir}/data/downloads_completed.txt"
if [ ! "$UrlOnly" == "true" ]; then
if [ "${AutoCommentOnCompletion}" == "true" ] ; then
sed -i -e "s>^$url.*>#& #REMOVED#>g" "${InputFile}" #processed url
sed -i -e "s>^${url/https:/http:}.*>#& #REMOVED#>g" "${InputFile}" #http (if changed)
sed -i -e "s>^direct=$url.*>#& #REMOVED#>g" "${InputFile}" #direct url http
sed -i -e "s>^direct=${url/https:/http:}.*>#& #REMOVED#>g" "${InputFile}" #direct url https
sed -i -e "s>^$url.*>#& #REMOVED#${message}>g" "${InputFile}" #processed url
sed -i -e "s>^${url/https:/http:}.*>#& #REMOVED#${message}>g" "${InputFile}" #http (if changed)
sed -i -e "s>^direct=$url.*>#& #REMOVED#${message}>g" "${InputFile}" #direct url http
sed -i -e "s>^direct=${url/https:/http:}.*>#& #REMOVED#${message}>g" "${InputFile}" #direct url https
else
sed -i -e "s>^$url.*>#&>g" "${InputFile}" #processed url
sed -i -e "s>^${url/https:/http:}.*>#&>g" "${InputFile}" #http (if changed)
@ -930,7 +944,7 @@ badUrlDownload() {
fi
mkdir -p "${WorkDir}/downloads"
dateStamp=$(date '+%Y/%m/%d %H:%M:%S')
echo -e "$dateStamp [BADURL] ${url}" >> "${WorkDir}/downloads/results.txt"
echo -e "$dateStamp [BADURL] ${url}${message}" >> "${WorkDir}/downloads/results.txt"
if [ ! "$UrlOnly" == "true" ]; then
if [ "${AutoCommentOnCompletion}" == "true" ] ; then
sed -i -e "s>^${url}.*>#& #BAD-URL#${message}>g" "${InputFile}" #processed url
@ -3184,7 +3198,7 @@ direct_FetchFileInfo() {
download_url="${download_url/https/http}"
fi
file_id=$(grep -oP '[^\/]*$' <<< "$file_url")
maxretries=2
maxretries=4
for ((j=1; j<=$maxretries; j++)); do
mkdir -p "${WorkDir}/.temp"
if ((j > 1)); then
@ -3231,14 +3245,16 @@ direct_FetchFileInfo() {
return 1
fi
if ! grep -Eqi 'HTTP/.*200|HTTP/.*302' <<< "${file_header}" ; then
hResponse=$(grep -oPi 'HTTP/.* \K.*$' <<< "${file_header}")
if ((j>=$maxretries)); then
hResponse=$(grep -oPi 'HTTP/.*? \K.*$' <<< "${file_header}")
if ((j>=maxretries)); then
printf "\\n"
echo -e "${RED}| Unexpected header response ($hResponse).${NC}"
echo -e "${RED}| Unexpected header response: ${hResponse}${NC}"
failedRetryDownload "${remote_url}" "Unexpected header response: ${hResponse}" ""
exitDownloadNotAvailable=true
return 1
else
printf "\\n"
echo -e "${YELLOW}| Unexpected header response ($hResponse). Retrying...${NC}"
echo -e "${YELLOW}| Unexpected header response: ${hResponse}. Retrying...${NC}"
continue
fi
fi
@ -3298,7 +3314,7 @@ direct_FetchFileInfo() {
filename=$(sanitize_file_or_folder_name "${filename}")
if [ -z "$filename" ]; then
printf "\\n"
echo -e "${RED}| Unexpected or no header response.${NC}"
echo -e "${RED}| Unexpected or no header response [no filename]${NC}"
return 1
fi
if [ -z $file_size_bytes ] ; then

View file

@ -4,4 +4,6 @@ How to setup tesseract-ocr traineddata:
https://github.com/tesseract-ocr/tessdata_best/raw/main/eng.traineddata
(SHA256: 8280AED0782FE27257A68EA10FE7EF324CA0F8D85BD2FD145D1C2B560BCB66BA)
* And then extracted to ./plugins/ocr/tessdata/ folder (15,400,601 bytes)
* And then extracted to ./plugins/ocr/tessdata/ folder (15,400,601 bytes)
!! Rename "eng.traineddata" to "eng_best.traineddata"

69
plugins/ocr_captcha.sh Executable file → Normal file
View file

@ -1,6 +1,6 @@
#! Name: ocr_captcha.sh
#! Author: kittykat
#! Version: 2024.10.13
#! Version: 2025.01.14
#! Desc: Script to extract captcha from image using tesseract-ocr and imagemagick
#! Usage: Edit LoadPlugin="" line in mad.sh or mad.config
#! LoadPlugin="ocr_captcha.sh"
@ -31,14 +31,71 @@
#! * PostSuccessfulUpload(): occurs after an upload success (after upload completed ticket is created in ./downloads/).
#! * PostFailedUpload(): occurs after an upload fails definitively -- #FAIL# in the temp_upload_handler.txt
#! * PostFailRetryUpload(): occurs after an upload fails with a retry (network drop, unexpected result)
#! * DoneProcessingAllUploads: occurs after alll the files have finished processing
#! * DoneProcessingAllUploads: occurs after all the files have finished processing
#!
#!
#! CaptchaOcrImage: Uses imagemagick only to alter 4 digit horizontal captchas (WIP)
CaptchaOcrImage() {
local plugName='ocr_captcha'
local plugFunc='CaptchaOcrImage'
if [ "${DebugPluginsEnabled}" == "true" ]; then
echo -e "[${PINK}DEBUG${NC}]: Running ${PINK}$plugFunc${NC} in ${BLUE}$plugName${NC} ...${NC}"
fi
DEPENDENCIES=(convert)
for DEPENDENCY in ${DEPENDENCIES[@]} ; do
if [ -z $(which $DEPENDENCY) ] ; then
if [ "$DEPENDENCY" == "convert" ]; then
echo "imagemagick not installed. Aborting"
else
echo "$DEPENDENCY not installed. Aborting"
fi
return 1
fi
done
captcha_image_filepath="$1"
data_type="$2"
imagemagick_extra_params="$3"
local captcha_image_filename="${captcha_image_filepath##*/}"
if [ ! -f "$captcha_image_filepath" ]; then
echo -e "Image not found."
return 1
fi
local digitschars='"data:image/webp;base64,'$(base64 -w 0 $captcha_image_filepath)'"'
if grep -Eqi "NUMBERONLY" <<< "$data_type" ; then
local i e r
for i in {0..3}; do
e=$(compare -metric NCC -subimage-search $digitschars \( "$1" -crop 8x10+$((22+9*i))+8 \) null: 2>&1)
[[ $e =~ @\ ([0-9]+) ]] && r+=$((1+BASH_REMATCH[1]/8))
done
echo "$r" > "$WorkDir/.temp/ocr_final.txt"
captcha="$r"
elif grep -Eqi "ALPHAONLY" <<< "$data_type" ; then
local i e r
for i in {0..3}; do
e=$(compare -metric NCC -subimage-search $digitschars \( "$1" -crop 8x10+$((22+9*i))+8 \) null: 2>&1)
[[ $e =~ @\ ([a-zA-Z]+) ]] && r+=$((1+BASH_REMATCH[1]/8))
done
echo "$r" > "$WorkDir/.temp/ocr_final.txt"
captcha="$r"
else
local i e r
for i in {0..3}; do
e=$(compare -metric NCC -subimage-search $digitschars \( "$1" -crop 8x10+$((22+9*i))+8 \) null: 2>&1)
[[ $e =~ @\ ([0-9a-zA-Z]+) ]] && r+=$((1+BASH_REMATCH[1]/8))
done
echo "$r" > "$WorkDir/.temp/ocr_final.txt"
captcha="$r"
fi
echo -e "[CAPTCHA_CODE:${captcha}]"
}
#!
#! CaptchaOcrImageTesseract: Uses imagemagick to alter, and Tesseract OCR to process captchas
CaptchaOcrImageTesseract() {
local plugName='ocr_captcha'
local plugFunc='CaptchaOcrImageTesseract'
if [ "${DebugPluginsEnabled}" == "true" ]; then
echo -e "[${PINK}DEBUG${NC}]: Running ${PINK}$plugFunc${NC} in ${BLUE}$plugName${NC} ...${NC}"
fi
DEPENDENCIES=(tesseract convert)
for DEPENDENCY in ${DEPENDENCIES[@]} ; do
if [ -z $(which $DEPENDENCY) ] ; then
@ -53,7 +110,7 @@ CaptchaOcrImage() {
TESSERACT_CMD=$(which tesseract)
export TESSDATA_PREFIX="${ScriptDir}/plugins/ocr/tessdata"
captcha_image_filepath="$1"
tessdata_type="$2"
data_type="$2"
imagemagick_extra_params="$3"
local captcha_image_filename="${captcha_image_filepath##*/}"
if [ ! -f "$captcha_image_filepath" ]; then
@ -110,13 +167,13 @@ CaptchaOcrImage() {
elif grep -Eqi "Brightness_160" <<< "$imagemagick_extra_params" ; then
convert "$IMGtemp" -modulate 160 "$IMGtemp"
fi
if grep -Eqi "NUMBERONLY" <<< "$tessdata_type" ; then
if grep -Eqi "NUMBERONLY" <<< "$data_type" ; then
captcha=$($TESSERACT_CMD --psm 8 --oem 1 -l eng_best --dpi 70 -c tessedit_char_whitelist=0123456789 "$IMGtemp" stdout | tr -d " " | xargs)
captcha=${captcha//[!0-9]/}
elif grep -Eqi "ALPHAONLY" <<< "$tessdata_type" ; then
elif grep -Eqi "ALPHAONLY" <<< "$data_type" ; then
captcha=$($TESSERACT_CMD --psm 8 --oem 1 -l eng_best --dpi 70 -c tessedit_char_whitelist=abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ "$IMGtemp" stdout | tr -d " " | xargs)
captcha=${captcha//[!a-zA-Z]/}
elif grep -Eqi "ALPHANUMERIC" <<< "$tessdata_type" ; then
elif grep -Eqi "ALPHANUMERIC" <<< "$data_type" ; then
captcha=$($TESSERACT_CMD --psm 8 --oem 1 -l eng_best --dpi 70 -c tessedit_char_whitelist=0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ "$IMGtemp" stdout | tr -d " " | xargs)
captcha=${captcha//[!0-9a-zA-Z]/}
else

View file

@ -13,7 +13,7 @@
# 100MB upee upload.ee 5GB fd fileditch.com 256MB qx qu.ax
# 40GB isup isupload.com 100MB fb fileblade.com 20GB atea ateasystems.com
# 100MB ubay uploadbay.net 2GB sysp syspro.com.br 400MB dash dashfile.net
# 512MB anon anonfile.de
# 512MB anon anonfile.de 100MB fget fireget.com 500MB fland fileland.io
# Jirafeau hosts (recommended upload 100MB splits as many host only support that)
# 10GB anarc anarchaserver 1GB kaz depot.kaz.bzh 5GB squid filesquid
# 10GB nant nantes.cloud 500MB soy soyjak.download 512MB linx linxx.net
@ -21,6 +21,7 @@
# ?? fr4e sendfree4e.fr 100MB harr harrault.fr 100MB acid dl.acid.fr
# ?? skpr skrepr.com 5GB edd eddowding.com 2GB dict dictvm.org
# 10GB cyx cyssoux.fr 5GB frso freesocial.co 512MB herb herbolistique.com
# 512MB gagn gagneux.info
# (Require js -- do not use)
# 4GB daily dailyuploads 1GB kraken krakenfiles 2GB hex hexload
# 4GB bd bedrive.ru 5GB uflix uploadflix