# 2025.01.14 - [gagneux / up_gagneux] Add fichier.gagneux.info as upload / download host

# 2025.01.14 - [uwabaki] Add uwabaki.party as download host
# 2025.01.14 - [fileblade] Additional retries and handling for blocked Tor ips (until alternative)
# 2025.01.13 - [ocr_captcha] Create imagemagick OCR function for testing without tesseract
# 2025.01.13 - [anonfile, dailyuploads] Update ocr call to use tesseract function
# 2025.01.13 - [up_anonfile] Modify to use new upload url
# 2025.01.12 - [ateasystems] Update 404 Not found response
# 2025.01.11 - [mad] Update direct head response handling
# 2025.01.11 - [ranoz] Add 404 Not found handling on head
# 2025.01.09 - [ranoz] Add handling of "NEXT_NOT_FOUND" response
# 2025.01.09 - [fileblade] Fix cdn url parsing
# 2025.01.08 - [up_pixeldrain] Fix success response from pixeldrain
# 2025.01.08 - [ramsgaard / up_ramsgaard] Add data.ramsgaard.me as upload / download host
# 2025.01.08 - [euromussels / up_euromussels] Add uploads.euromussels.eu as upload / download host
# 2025.01.07 - [up_fileland] Add fileland.io as upload host
# 2025.01.07 - [up_fireget] Add fireget.com as upload host
# 2025.01.06 - [uploadhive] Update the removed / gone response detection
# 2025.01.06 - [fileblade] Add "user does not allow free downloads over 100MB" response (and warnings)
# 2025.01.06 - [desiupload] Add desiupload as download host
# 2025.01.05 - [isupload] Fix filename detection
This commit is contained in:
kittykat 2025-01-16 07:54:05 +00:00
parent 30eedaf567
commit eeb8054960
Signed by: kittykat
GPG key ID: E3F1556620F70C3C
29 changed files with 1951 additions and 634 deletions

View file

@ -1,4 +1,4 @@
DateTime: 25.01.04
DateTime: 25.01.15
Files:
./hosts/1fichier.sh
@ -19,12 +19,14 @@ Files:
./hosts/dataupload.sh
./hosts/dbree.sh
./hosts/depotkaz.sh
./hosts/desiupload.sh
./hosts/dictvm.sh
./hosts/discreetshare.sh
./hosts/dosya.sh
./hosts/downloadgg.sh
./hosts/eddowding.sh
./hosts/eternalhosting.sh
./hosts/euromussels.sh
./hosts/examples/ExampleNewHost.sh
./hosts/examples/up_example.sh
./hosts/familleflender.sh
@ -37,6 +39,7 @@ Files:
./hosts/firestorage.sh
./hosts/free4e.sh
./hosts/freesocial.sh
./hosts/gagneux.sh
./hosts/gofile.sh
./hosts/harrault.sh
./hosts/herbolistique.sh
@ -57,6 +60,7 @@ Files:
./hosts/oshi.sh
./hosts/pixeldrain.sh
./hosts/quax.sh
./hosts/ramsgaard.sh
./hosts/ranoz.sh
./hosts/shareonline.sh
./hosts/skrepr.sh
@ -90,14 +94,18 @@ Files:
./hosts/up_dictvm.sh
./hosts/up_dosya.sh
./hosts/up_eddowding.sh
./hosts/up_euromussels.sh
./hosts/up_familleflender.sh
./hosts/up_fileblade.sh
./hosts/up_fileditch.sh
./hosts/up_filehaus.sh
./hosts/up_fileland.sh
./hosts/up_filesquid.sh
./hosts/up_fireget.sh
./hosts/up_firestorage.sh
./hosts/up_free4e.sh
./hosts/up_freesocial.sh
./hosts/up_gagneux.sh
./hosts/up_gofile.sh
./hosts/up_harrault.sh
./hosts/up_herbolistique.sh
@ -116,6 +124,7 @@ Files:
./hosts/up_oshi.sh
./hosts/up_pixeldrain.sh
./hosts/up_quax.sh
./hosts/up_ramsgaard.sh
./hosts/up_ranoz.sh
./hosts/up_shareonline.sh
./hosts/up_skrepr.sh
@ -132,6 +141,7 @@ Files:
./hosts/up_uploadhive.sh
./hosts/up_uploadraja.sh
./hosts/up_yolobit.sh
./hosts/uwabaki.sh
./hosts/yolobit.sh
./hosts/youdbox.sh
./mad.sh
@ -181,9 +191,9 @@ _________________________________________________________________________
./hosts/downloadgg.sh:297: -H "Origin: https://download.gg" \
./hosts/eternalhosting.sh:36: if grep -Eqi '\.onion' <<< "$pUrlMod" && grep -Eqi 'https://' <<< "$pUrlMod" ; then
./hosts/examples/up_example.sh:105: local ar_HUP[0]='https://oshi.at'
./hosts/fileblade.sh:310: if ! grep -Eqi '<a href="https://de6.fileblade.com/files/' <<< "$response"; then
./hosts/fileblade.sh:322: download_url=$(grep -oP -m 1 '(?<=a href="https://de6.fileblade.com/files/).*?(?=" class=.*$)' <<< "$response")
./hosts/fileblade.sh:324: download_url='https://de6.fileblade.com/files/'$(urlencode_literal_grouped_case_urlendingonly "$download_url")
./hosts/fileblade.sh:310: if ! grep -oPi '(?=href="https://.*?\.fileblade.com/files/.*?" class=.*$)' <<< "$response"; then
./hosts/fileblade.sh:322: download_url=$(grep -oP -m 1 '(?<=a href="https://).*?(?=\.fileblade.com/files/).*?(?=" class=.*$)' <<< "$response")
./hosts/fileblade.sh:324: download_url='https://'$(urlencode_literal_grouped_case_urlendingonly "$download_url")
./hosts/filedot.sh:119: "https://filedot.to/login.html")
./hosts/filedot.sh:160: -H "Origin: https://filedot.to" \
./hosts/filedot.sh:162: -H "Referer: https://filedot.to/login.html" \
@ -206,9 +216,9 @@ _________________________________________________________________________
./hosts/hexload.sh:122: response=$(tor_curl_request --insecure -s --data "$form_data" "https://hexload.com/download")
./hosts/innocent.sh:48: download_url="${download_url/https:/http:}"
./hosts/isupload.sh:133: post_action="${remote_url//https:/http:}"
./hosts/isupload.sh:208: if ! grep -Eqi '<a href="http://isupload.com/cgi-bin/dl.cgi/' <<< "$response"; then
./hosts/isupload.sh:220: download_url=$(grep -oP -m 1 '(?<=a href="http://isupload.com/cgi-bin/dl.cgi/).*?(?=">.*$)' <<< "$response")
./hosts/isupload.sh:222: download_url='http://isupload.com/cgi-bin/dl.cgi/'$(urlencode_literal_grouped_case_urlendingonly "$download_url")
./hosts/isupload.sh:211: if ! grep -Eqi '<a href="http://isupload.com/cgi-bin/dl.cgi/' <<< "$response"; then
./hosts/isupload.sh:223: download_url=$(grep -oP -m 1 '(?<=a href="http://isupload.com/cgi-bin/dl.cgi/).*?(?=">.*$)' <<< "$response")
./hosts/isupload.sh:225: download_url='http://isupload.com/cgi-bin/dl.cgi/'$(urlencode_literal_grouped_case_urlendingonly "$download_url")
./hosts/kraken.sh:155: kraken_action="https://krakenfiles.com/download/${kraken_action##*/}"
./hosts/nippy.sh:160: download_url="https:"$(grep -oP '(?<=<h2><a href='\'').*(?='\'' class=)' <<< "$response")
./hosts/nippy.sh:229: cdn_url="https:"$(grep -oP '(?<=location: ).*$' <<< "$file_header")
@ -236,7 +246,7 @@ _________________________________________________________________________
./hosts/up_acid.sh:40: jira_downloadLinkPrefix='https://dl.acid.fr/f.php?h='
./hosts/up_anarchaserver.sh:37: jira_PostUrlHost='https://transitional.anarchaserver.org/jirafeau/script.php'
./hosts/up_anarchaserver.sh:40: jira_downloadLinkPrefix='https://transitional.anarchaserver.org/jirafeau/f.php?h='
./hosts/up_anonfile.sh:99: PostUrlHost='https://anonfile.de/cgi-bin/upload.cgi?upload_type=file&amp;utype=anon'
./hosts/up_anonfile.sh:99: PostUrlHost='https://file-01.anonfile.de/cgi-bin/upload.cgi?upload_type=file&amp;utype=anon'
./hosts/up_anonfile.sh:121: downloadLink="https://anonfile.de/$hash"
./hosts/up_anonsharing.sh:99: PostUrlHost='https://anonsharing.com/ajax/file_upload_handler?r=anonsharing.com'
./hosts/up_anonsharing.sh:109: if grep -Eqi '"error":null,"url":"https:\\/\\/anonsharing.com\\/' <<< "${response}" ; then
@ -282,6 +292,8 @@ _________________________________________________________________________
./hosts/up_dosya.sh:100: local ar_HUP[1]='https://dl3.lim1.dosyaupload.com/core/page/ajax/file_upload_handler.ajax.php?r=www.dosyaupload.com'
./hosts/up_eddowding.sh:37: jira_PostUrlHost='https://files.eddowding.com/script.php'
./hosts/up_eddowding.sh:40: jira_downloadLinkPrefix='https://files.eddowding.com/f.php?h='
./hosts/up_euromussels.sh:37: jira_PostUrlHost='https://uploads.euromussels.eu/script.php'
./hosts/up_euromussels.sh:40: jira_downloadLinkPrefix='https://uploads.euromussels.eu/f.php?h='
./hosts/up_familleflender.sh:37: jira_PostUrlHost='https://famille-flender.fr/jirafeau/script.php'
./hosts/up_familleflender.sh:40: jira_downloadLinkPrefix='https://famille-flender.fr/f.php?h='
./hosts/up_fileblade.sh:97: local ar_HUP[0]="https://de6.fileblade.com/cgi-bin/upload.cgi?upload_type=file&amp;utype=anon"
@ -291,8 +303,14 @@ _________________________________________________________________________
./hosts/up_filehaus.sh:114: if grep -Eqi 'HTTP/.* 200|https://cdn' <<< "${response}" ; then
./hosts/up_filehaus.sh:115: url=$(grep -oPi '(?<=https://).*(?=\.filehaus\.su).*?(?=$)' <<< "$response")
./hosts/up_filehaus.sh:117: downloadLink="https://${url}"
./hosts/up_fileland.sh:99: PostUrlHost='https://fs300.fileland.io/cgi-bin/upload.cgi?upload_type=file&amp;utype=anon'
./hosts/up_fileland.sh:121: downloadLink="https://fileland.io/$hash"
./hosts/up_filesquid.sh:37: jira_PostUrlHost='https://filesquid.net/script.php'
./hosts/up_filesquid.sh:40: jira_downloadLinkPrefix='https://filesquid.net/f.php?h='
./hosts/up_fireget.sh:99: PostUrlHost='https://s22.fireget.com/cgi-bin/upload.cgi'
./hosts/up_fireget.sh:118: if grep -Eqi 'Location: https://fireget.com/\?&fn=' <<< "${response}" ; then
./hosts/up_fireget.sh:119: hash=$(grep -oPi -m 1 '(?<=Location: https://fireget.com/\?&fn=).*?(?=&st=OK.*$)' <<< "$response")
./hosts/up_fireget.sh:121: downloadLink="https://fireget.com/$hash"
./hosts/up_firestorage.sh:99: local ar_HUP[0]='https://server65.firestorage.jp/upload.cgi'
./hosts/up_firestorage.sh:100: local ar_HUP[1]='https://server62.firestorage.jp/upload.cgi'
./hosts/up_firestorage.sh:101: local ar_HUP[2]='https://server39.firestorage.jp/upload.cgi'
@ -306,6 +324,8 @@ _________________________________________________________________________
./hosts/up_free4e.sh:40: jira_downloadLinkPrefix='https://send.free4e.com/f.php?h='
./hosts/up_freesocial.sh:37: jira_PostUrlHost='https://files.freesocial.co/script.php'
./hosts/up_freesocial.sh:40: jira_downloadLinkPrefix='https://files.freesocial.co/f.php?h='
./hosts/up_gagneux.sh:37: jira_PostUrlHost='https://fichier.gagneux.info/script.php'
./hosts/up_gagneux.sh:40: jira_downloadLinkPrefix='https://fichier.gagneux.info/f.php?h='
./hosts/up_gofile.sh:102: response=$(tor_curl_request --insecure -L -s "https://api.gofile.io/servers")
./hosts/up_gofile.sh:114: local ar_HUP[0]="https://$gofileStoreServer.gofile.io/contents/uploadFile"
./hosts/up_gofile.sh:138: hash=$(grep -oPi '(?<=https://gofile.io/d/).*?(?=")' <<< "$response")
@ -367,6 +387,8 @@ _________________________________________________________________________
./hosts/up_pixeldrain.sh:107: PostUrlHost='https://pixeldrain.com/api/file/'
./hosts/up_pixeldrain.sh:136: downloadLink="https://pixeldrain.com/u/${hash}"
./hosts/up_quax.sh:99: PostUrlHost='https://qu.ax/upload.php'
./hosts/up_ramsgaard.sh:37: jira_PostUrlHost='https://data.ramsgaard.me/script.php'
./hosts/up_ramsgaard.sh:40: jira_downloadLinkPrefix='https://data.ramsgaard.me/f.php?h='
./hosts/up_ranoz.sh:99: PostUrlHost='https://ranoz.gg/api/v1/files/upload_url'
./hosts/up_ranoz.sh:111: if grep -Eqi '"upload_url":"https://' <<< "$response" ; then
./hosts/up_shareonline.sh:99: PostUrlHost='https://ns07.zipcluster.com/upload.php'
@ -403,75 +425,75 @@ _________________________________________________________________________
./hosts/up_uploadraja.sh:99: PostUrlHost='https://awsaisiaposisition69.kalpstudio.xyz/cgi-bin/upload.cgi?upload_type=file&utype=anon'
./hosts/up_uploadraja.sh:119: downloadLink="https://uploadraja.com/$hash"
./hosts/up_yolobit.sh:99: PostUrlHost='https://ns08.zipcluster.com/upload.php'
./mad.sh:672: sed -i -e "s>^${url/https:/http:}.*>#& #OK# ${filename}>g" "${InputFile}" #http (if changed)
./mad.sh:674: sed -i -e "s>^direct=${url/https:/http:}.*>#& #OK# ${filename}>g" "${InputFile}" #direct url https
./mad.sh:677: sed -i -e "s>^${url/https:/http:}.*>#&>g" "${InputFile}" #http (if changed)
./mad.sh:679: sed -i -e "s>^direct=${url/https:/http:}.*>#&>g" "${InputFile}" #direct url https
./mad.sh:700: sed -i -e "s>^${url/https:/http:}.*>#& #OK# ${filename} (File exists)>g" "${InputFile}" #http (if changed)
./mad.sh:702: sed -i -e "s>^direct=${url/https:/http:}.*>#& #OK# ${filename} (File exists)>g" "${InputFile}" #direct url https
./mad.sh:705: sed -i -e "s>^${url/https:/http:}.*>#&>g" "${InputFile}" #http (if changed)
./mad.sh:707: sed -i -e "s>^direct=${url/https:/http:}.*>#&>g" "${InputFile}" #direct url https
./mad.sh:728: sed -i -e "s>^${url/https:/http:}.*>#& #RETRY# ${message}>g" "${InputFile}" #http (if changed)
./mad.sh:730: sed -i -e "s>^direct=${url/https:/http:}.*>#& #RETRY# ${message}>g" "${InputFile}" #direct url https
./mad.sh:733: sed -i -e "s>^${url/https:/http:}.*>#&>g" "${InputFile}" #http (if changed)
./mad.sh:735: sed -i -e "s>^direct=${url/https:/http:}.*>#&>g" "${InputFile}" #direct url https
./mad.sh:757: sed -i -e "s>^${url/https:/http:}.*>#& #FAIL# $message>g" "${InputFile}" #http (if changed)
./mad.sh:759: sed -i -e "s>^direct=${url/https:/http:}.*>#& #FAIL# $message>g" "${InputFile}" #direct url https
./mad.sh:762: sed -i -e "s>^${url/https:/http:}.*>#&>g" "${InputFile}" #http (if changed)
./mad.sh:764: sed -i -e "s>^direct=${url/https:/http:}.*>#&>g" "${InputFile}" #direct url https
./mad.sh:785: sed -i -e "s>^${url/https:/http:}.*>#& #REMOVED#>g" "${InputFile}" #http (if changed)
./mad.sh:787: sed -i -e "s>^direct=${url/https:/http:}.*>#& #REMOVED#>g" "${InputFile}" #direct url https
./mad.sh:790: sed -i -e "s>^${url/https:/http:}.*>#&>g" "${InputFile}" #http (if changed)
./mad.sh:792: sed -i -e "s>^direct=${url/https:/http:}.*>#&>g" "${InputFile}" #direct url https
./mad.sh:818: sed -i -e "s>^${url/https:/http:}.*>${url}|${newfilename}>g" "${InputFile}" #http (if changed)
./mad.sh:820: sed -i -e "s>^direct=${url/https:/http:}.*>direct=${url}|${newfilename}>g" "${InputFile}" #direct url https
./mad.sh:840: sed -i -e "s%^${url/https:/http:}.*%${newurl//[[:space:]]/$'\\\n'}%g" "${InputFile}" #http (if changed)
./mad.sh:861: sed -i -e "s>^${url/https:/http:}.*>#& #RETRY# ${filename} (dropped or bad size)>g" "${InputFile}" #http (if changed)
./mad.sh:863: sed -i -e "s>^direct=${url/https:/http:}.*>#& #RETRY# ${filename} (dropped or bad size)>g" "${InputFile}" #direct url https
./mad.sh:866: sed -i -e "s>^${url/https:/http:}.*>#&>g" "${InputFile}" #http (if changed)
./mad.sh:868: sed -i -e "s>^direct=${url/https:/http:}.*>#&>g" "${InputFile}" #direct url https
./mad.sh:884: sed -i -e "s>^${url/https:/http:}.*>#& #RETRY# (No CDN found)>g" "${InputFile}" #http (if changed)
./mad.sh:886: sed -i -e "s>^direct=${url/https:/http:}.*>#& #RETRY# (No CDN found)>g" "${InputFile}" #direct url https
./mad.sh:889: sed -i -e "s>^${url/https:/http:}.*>#&>g" "${InputFile}" #http (if changed)
./mad.sh:891: sed -i -e "s>^direct=${url/https:/http:}.*>#&>g" "${InputFile}" #direct url https
./mad.sh:910: sed -i -e "s>^${url/https:/http:}.*>#& #PASSWORD#>g" "${InputFile}" #http (if changed)
./mad.sh:912: sed -i -e "s>^direct=${url/https:/http:}.*>#& #PASSWORD#>g" "${InputFile}" #direct url https
./mad.sh:915: sed -i -e "s>^${url/https:/http:}.*>#&>g" "${InputFile}" #http (if changed)
./mad.sh:917: sed -i -e "s>^direct=${url/https:/http:}.*>#&>g" "${InputFile}" #direct url https
./mad.sh:937: sed -i -e "s>^${url/https:/http:}.*>#& #BAD-URL#${message}>g" "${InputFile}" #http (if changed)
./mad.sh:939: sed -i -e "s>^direct=${url/https:/http:}.*>#& #BAD-URL#${message}>g" "${InputFile}" #direct url https
./mad.sh:942: sed -i -e "s>^${url/https:/http:}.*>#&>g" "${InputFile}" #http (if changed)
./mad.sh:944: sed -i -e "s>^direct=${url/https:/http:}.*>#&>g" "${InputFile}" #direct url https
./mad.sh:962: sed -i -e "s>^${url/https:/http:}.*>#& #RETRY# ${filename} (No filename or size. Try again later)>g" "${InputFile}" #http (if changed)
./mad.sh:964: sed -i -e "s>^direct=${url/https:/http:}.*>#& #RETRY# ${filename} (No filename or size. Try again later)>g" "${InputFile}" #direct url https
./mad.sh:967: sed -i -e "s>^${url/https:/http:}.*>#&>g" "${InputFile}" #http (if changed)
./mad.sh:969: sed -i -e "s>^direct=${url/https:/http:}.*>#&>g" "${InputFile}" #direct url https
./mad.sh:988: sed -i -e "s>^${url/https:/http:}.*>#& #RETRY# ${filename} (File exists and is too large--retry or delete from downloads)>g" "${InputFile}" #http (if changed)
./mad.sh:990: sed -i -e "s>^direct=${url/https:/http:}.*>#& #RETRY# ${filename} (File exists and is too large--retry or delete from downloads)>g" "${InputFile}" #direct url https
./mad.sh:993: sed -i -e "s>^${url/https:/http:}.*>#&>g" "${InputFile}" #http (if changed)
./mad.sh:995: sed -i -e "s>^direct=${url/https:/http:}.*>#&>g" "${InputFile}" #direct url https
./mad.sh:1413: response=$(tor_curl_request --insecure -L -s https://github.com/lwthiker/curl-impersonate/releases/latest)
./mad.sh:1430: download_url='https://github.com/lwthiker/curl-impersonate/releases/download/'"$latestTag"'/curl-impersonate-'"$latestTag"'.x86_64-linux-gnu.tar.gz'
./mad.sh:1536: response=$(tor_curl_request --insecure -L -s https://github.com/lexiforest/curl-impersonate/releases/latest)
./mad.sh:1553: download_url='https://github.com/lexiforest/curl-impersonate/releases/download/'"$latestTag"'/curl-impersonate-'"$latestTag"'.x86_64-linux-gnu.tar.gz'
./mad.sh:1816: echo -e "${PINK}MAD Audit of http lines:${NC} (${GREEN}grep \"http:\" or \"https:\"${NC})"
./mad.sh:1844: echo -e "${PINK}MAD Audit of http lines:${NC} (${GREEN}grep \"http:\" or \"https:\"${NC})"
./mad.sh:1866: echo -e "${PINK}MAD Audit of http lines:${NC} (${GREEN}grep \"http:\" or \"https:\"${NC})"
./mad.sh:3182: if grep -Eqi '.onion' <<< "$download_url" && grep -Eqi 'https://' <<< "$download_url" ; then
./mad.sh:3695:arg2="$2" # auto, filelist, <https://url>
./mad.sh:3792: echo -e " - http://oshi.at/abcd/origAABB.rar|My specified file.part1.rar"
./mad.sh:3794: echo -e " - direct=http://pomf2.lain.la/f/abcd00zz.7z"
./mad.sh:3796: echo -e ' - ie. direct=http://somehost.onion/abcD|filename.part1.rar'
./mad.sh:4015: if [[ ${remote_url} =~ ^http: ]] ; then
./mad.sh:4016: remote_url=${remote_url/http:/https:}
./mad.sh:4037: if [[ ${remote_url} =~ ^http: ]] ; then
./mad.sh:4038: remote_url=${remote_url/http:/https:}
./mad.sh:4404: if [[ ${remote_url} =~ ^http: ]] ; then
./mad.sh:4405: remote_url=${remote_url/http:/https:}
./mad.sh:4463: if [[ ${remote_url} =~ ^http: ]] ; then
./mad.sh:4464: remote_url=${remote_url/http:/https:}
./mad.sh:4489: if [[ ${remote_url} =~ ^http: ]] ; then
./mad.sh:4490: remote_url=${remote_url/http:/https:}
./mad.sh:683: sed -i -e "s>^${url/https:/http:}.*>#& #OK# ${filename}>g" "${InputFile}" #http (if changed)
./mad.sh:685: sed -i -e "s>^direct=${url/https:/http:}.*>#& #OK# ${filename}>g" "${InputFile}" #direct url https
./mad.sh:688: sed -i -e "s>^${url/https:/http:}.*>#&>g" "${InputFile}" #http (if changed)
./mad.sh:690: sed -i -e "s>^direct=${url/https:/http:}.*>#&>g" "${InputFile}" #direct url https
./mad.sh:711: sed -i -e "s>^${url/https:/http:}.*>#& #OK# ${filename} (File exists)>g" "${InputFile}" #http (if changed)
./mad.sh:713: sed -i -e "s>^direct=${url/https:/http:}.*>#& #OK# ${filename} (File exists)>g" "${InputFile}" #direct url https
./mad.sh:716: sed -i -e "s>^${url/https:/http:}.*>#&>g" "${InputFile}" #http (if changed)
./mad.sh:718: sed -i -e "s>^direct=${url/https:/http:}.*>#&>g" "${InputFile}" #direct url https
./mad.sh:739: sed -i -e "s>^${url/https:/http:}.*>#& #RETRY# ${message}>g" "${InputFile}" #http (if changed)
./mad.sh:741: sed -i -e "s>^direct=${url/https:/http:}.*>#& #RETRY# ${message}>g" "${InputFile}" #direct url https
./mad.sh:744: sed -i -e "s>^${url/https:/http:}.*>#&>g" "${InputFile}" #http (if changed)
./mad.sh:746: sed -i -e "s>^direct=${url/https:/http:}.*>#&>g" "${InputFile}" #direct url https
./mad.sh:768: sed -i -e "s>^${url/https:/http:}.*>#& #FAIL# $message>g" "${InputFile}" #http (if changed)
./mad.sh:770: sed -i -e "s>^direct=${url/https:/http:}.*>#& #FAIL# $message>g" "${InputFile}" #direct url https
./mad.sh:773: sed -i -e "s>^${url/https:/http:}.*>#&>g" "${InputFile}" #http (if changed)
./mad.sh:775: sed -i -e "s>^direct=${url/https:/http:}.*>#&>g" "${InputFile}" #direct url https
./mad.sh:799: sed -i -e "s>^${url/https:/http:}.*>#& #REMOVED#${message}>g" "${InputFile}" #http (if changed)
./mad.sh:801: sed -i -e "s>^direct=${url/https:/http:}.*>#& #REMOVED#${message}>g" "${InputFile}" #direct url https
./mad.sh:804: sed -i -e "s>^${url/https:/http:}.*>#&>g" "${InputFile}" #http (if changed)
./mad.sh:806: sed -i -e "s>^direct=${url/https:/http:}.*>#&>g" "${InputFile}" #direct url https
./mad.sh:832: sed -i -e "s>^${url/https:/http:}.*>${url}|${newfilename}>g" "${InputFile}" #http (if changed)
./mad.sh:834: sed -i -e "s>^direct=${url/https:/http:}.*>direct=${url}|${newfilename}>g" "${InputFile}" #direct url https
./mad.sh:854: sed -i -e "s%^${url/https:/http:}.*%${newurl//[[:space:]]/$'\\\n'}%g" "${InputFile}" #http (if changed)
./mad.sh:875: sed -i -e "s>^${url/https:/http:}.*>#& #RETRY# ${filename} (dropped or bad size)>g" "${InputFile}" #http (if changed)
./mad.sh:877: sed -i -e "s>^direct=${url/https:/http:}.*>#& #RETRY# ${filename} (dropped or bad size)>g" "${InputFile}" #direct url https
./mad.sh:880: sed -i -e "s>^${url/https:/http:}.*>#&>g" "${InputFile}" #http (if changed)
./mad.sh:882: sed -i -e "s>^direct=${url/https:/http:}.*>#&>g" "${InputFile}" #direct url https
./mad.sh:898: sed -i -e "s>^${url/https:/http:}.*>#& #RETRY# (No CDN found)>g" "${InputFile}" #http (if changed)
./mad.sh:900: sed -i -e "s>^direct=${url/https:/http:}.*>#& #RETRY# (No CDN found)>g" "${InputFile}" #direct url https
./mad.sh:903: sed -i -e "s>^${url/https:/http:}.*>#&>g" "${InputFile}" #http (if changed)
./mad.sh:905: sed -i -e "s>^direct=${url/https:/http:}.*>#&>g" "${InputFile}" #direct url https
./mad.sh:924: sed -i -e "s>^${url/https:/http:}.*>#& #PASSWORD#>g" "${InputFile}" #http (if changed)
./mad.sh:926: sed -i -e "s>^direct=${url/https:/http:}.*>#& #PASSWORD#>g" "${InputFile}" #direct url https
./mad.sh:929: sed -i -e "s>^${url/https:/http:}.*>#&>g" "${InputFile}" #http (if changed)
./mad.sh:931: sed -i -e "s>^direct=${url/https:/http:}.*>#&>g" "${InputFile}" #direct url https
./mad.sh:951: sed -i -e "s>^${url/https:/http:}.*>#& #BAD-URL#${message}>g" "${InputFile}" #http (if changed)
./mad.sh:953: sed -i -e "s>^direct=${url/https:/http:}.*>#& #BAD-URL#${message}>g" "${InputFile}" #direct url https
./mad.sh:956: sed -i -e "s>^${url/https:/http:}.*>#&>g" "${InputFile}" #http (if changed)
./mad.sh:958: sed -i -e "s>^direct=${url/https:/http:}.*>#&>g" "${InputFile}" #direct url https
./mad.sh:976: sed -i -e "s>^${url/https:/http:}.*>#& #RETRY# ${filename} (No filename or size. Try again later)>g" "${InputFile}" #http (if changed)
./mad.sh:978: sed -i -e "s>^direct=${url/https:/http:}.*>#& #RETRY# ${filename} (No filename or size. Try again later)>g" "${InputFile}" #direct url https
./mad.sh:981: sed -i -e "s>^${url/https:/http:}.*>#&>g" "${InputFile}" #http (if changed)
./mad.sh:983: sed -i -e "s>^direct=${url/https:/http:}.*>#&>g" "${InputFile}" #direct url https
./mad.sh:1002: sed -i -e "s>^${url/https:/http:}.*>#& #RETRY# ${filename} (File exists and is too large--retry or delete from downloads)>g" "${InputFile}" #http (if changed)
./mad.sh:1004: sed -i -e "s>^direct=${url/https:/http:}.*>#& #RETRY# ${filename} (File exists and is too large--retry or delete from downloads)>g" "${InputFile}" #direct url https
./mad.sh:1007: sed -i -e "s>^${url/https:/http:}.*>#&>g" "${InputFile}" #http (if changed)
./mad.sh:1009: sed -i -e "s>^direct=${url/https:/http:}.*>#&>g" "${InputFile}" #direct url https
./mad.sh:1427: response=$(tor_curl_request --insecure -L -s https://github.com/lwthiker/curl-impersonate/releases/latest)
./mad.sh:1444: download_url='https://github.com/lwthiker/curl-impersonate/releases/download/'"$latestTag"'/curl-impersonate-'"$latestTag"'.x86_64-linux-gnu.tar.gz'
./mad.sh:1550: response=$(tor_curl_request --insecure -L -s https://github.com/lexiforest/curl-impersonate/releases/latest)
./mad.sh:1567: download_url='https://github.com/lexiforest/curl-impersonate/releases/download/'"$latestTag"'/curl-impersonate-'"$latestTag"'.x86_64-linux-gnu.tar.gz'
./mad.sh:1830: echo -e "${PINK}MAD Audit of http lines:${NC} (${GREEN}grep \"http:\" or \"https:\"${NC})"
./mad.sh:1858: echo -e "${PINK}MAD Audit of http lines:${NC} (${GREEN}grep \"http:\" or \"https:\"${NC})"
./mad.sh:1880: echo -e "${PINK}MAD Audit of http lines:${NC} (${GREEN}grep \"http:\" or \"https:\"${NC})"
./mad.sh:3196: if grep -Eqi '.onion' <<< "$download_url" && grep -Eqi 'https://' <<< "$download_url" ; then
./mad.sh:3711:arg2="$2" # auto, filelist, <https://url>
./mad.sh:3808: echo -e " - http://oshi.at/abcd/origAABB.rar|My specified file.part1.rar"
./mad.sh:3810: echo -e " - direct=http://pomf2.lain.la/f/abcd00zz.7z"
./mad.sh:3812: echo -e ' - ie. direct=http://somehost.onion/abcD|filename.part1.rar'
./mad.sh:4031: if [[ ${remote_url} =~ ^http: ]] ; then
./mad.sh:4032: remote_url=${remote_url/http:/https:}
./mad.sh:4053: if [[ ${remote_url} =~ ^http: ]] ; then
./mad.sh:4054: remote_url=${remote_url/http:/https:}
./mad.sh:4420: if [[ ${remote_url} =~ ^http: ]] ; then
./mad.sh:4421: remote_url=${remote_url/http:/https:}
./mad.sh:4479: if [[ ${remote_url} =~ ^http: ]] ; then
./mad.sh:4480: remote_url=${remote_url/http:/https:}
./mad.sh:4505: if [[ ${remote_url} =~ ^http: ]] ; then
./mad.sh:4506: remote_url=${remote_url/http:/https:}
./plugins/pjscloud.sh:51: "https://PhantomJScloud.com/api/browser/v2/$RandomPjsKey/" & sleep 8s; kill -HUP $! 2>/dev/null)
./plugins/pjscloud.sh:59: "https://PhantomJScloud.com/api/browser/v2/$RandomPjsKey/" & sleep 8s; kill -HUP $! 2>/dev/null)