diff --git a/.audit/mad-audit-curl.log b/.audit/mad-audit-curl.log index 620a0de..6399644 100755 --- a/.audit/mad-audit-curl.log +++ b/.audit/mad-audit-curl.log @@ -1,27 +1,34 @@ -DateTime: 24.11.18 +DateTime: 24.12.26 Files: ./hosts/1fichier.sh ./hosts/9saves.sh ./hosts/acid.sh ./hosts/anarchaserver.sh +./hosts/anonfile.sh ./hosts/anonsharing.sh ./hosts/archived/nekofile.sh +./hosts/ateasystems.sh ./hosts/bedrive.sh ./hosts/biteblob.sh ./hosts/bowfile.sh ./hosts/click.sh +./hosts/cyssoux.sh ./hosts/dailyuploads.sh +./hosts/dashfile.sh ./hosts/dataupload.sh ./hosts/dbree.sh ./hosts/depotkaz.sh +./hosts/dictvm.sh ./hosts/discreetshare.sh ./hosts/dosya.sh ./hosts/downloadgg.sh +./hosts/eddowding.sh ./hosts/eternalhosting.sh ./hosts/examples/ExampleNewHost.sh ./hosts/examples/up_example.sh ./hosts/familleflender.sh +./hosts/fileblade.sh ./hosts/fileditch.sh ./hosts/filedoge.sh ./hosts/filedot.sh @@ -29,14 +36,19 @@ Files: ./hosts/filesquid.sh ./hosts/firestorage.sh ./hosts/free4e.sh +./hosts/freesocial.sh ./hosts/gofile.sh ./hosts/harrault.sh +./hosts/herbolistique.sh ./hosts/hexload.sh ./hosts/innocent.sh +./hosts/isupload.sh ./hosts/kraken.sh ./hosts/lainsafe.sh ./hosts/lainsafe_onion.sh ./hosts/linxx.sh +./hosts/mediafire.sh +./hosts/moocloud.sh ./hosts/nantes.sh ./hosts/netlib.sh ./hosts/nippy.sh @@ -44,15 +56,18 @@ Files: ./hosts/offshorecat.sh ./hosts/oshi.sh ./hosts/pixeldrain.sh +./hosts/quax.sh ./hosts/ranoz.sh ./hosts/shareonline.sh ./hosts/skrepr.sh ./hosts/soyjak.sh +./hosts/syspro.sh ./hosts/tempfileme.sh ./hosts/tempsh.sh ./hosts/torup.sh ./hosts/turboonion.sh ./hosts/up2share.sh +./hosts/uploadbay.sh ./hosts/uploadee.sh ./hosts/uploadev.sh ./hosts/uploadflix.sh @@ -60,28 +75,39 @@ Files: ./hosts/up_1fichier.sh ./hosts/up_acid.sh ./hosts/up_anarchaserver.sh +./hosts/up_anonfile.sh ./hosts/up_anonsharing.sh +./hosts/up_ateasystems.sh ./hosts/up_axfc.sh ./hosts/up_bedrive.sh ./hosts/up_bowfile.sh +./hosts/up_cyssoux.sh ./hosts/up_dailyuploads.sh +./hosts/up_dashfile.sh ./hosts/up_dataupload.sh ./hosts/up_dbree.sh ./hosts/up_depotkaz.sh +./hosts/up_dictvm.sh ./hosts/up_dosya.sh +./hosts/up_eddowding.sh ./hosts/up_familleflender.sh +./hosts/up_fileblade.sh ./hosts/up_fileditch.sh ./hosts/up_filehaus.sh ./hosts/up_filesquid.sh ./hosts/up_firestorage.sh ./hosts/up_free4e.sh +./hosts/up_freesocial.sh ./hosts/up_gofile.sh ./hosts/up_harrault.sh +./hosts/up_herbolistique.sh ./hosts/up_hexload.sh ./hosts/up_innocent.sh +./hosts/up_isupload.sh ./hosts/up_kouploader.sh ./hosts/up_kraken.sh ./hosts/up_linxx.sh +./hosts/up_moocloud.sh ./hosts/up_nantes.sh ./hosts/up_netlib.sh ./hosts/up_nippy.sh @@ -89,14 +115,17 @@ Files: ./hosts/up_offshorecat.sh ./hosts/up_oshi.sh ./hosts/up_pixeldrain.sh +./hosts/up_quax.sh ./hosts/up_ranoz.sh ./hosts/up_shareonline.sh ./hosts/up_skrepr.sh ./hosts/up_soyjak.sh +./hosts/up_syspro.sh ./hosts/up_tempfileme.sh ./hosts/up_tempsh.sh ./hosts/up_torup.sh ./hosts/up_turboonion.sh +./hosts/up_uploadbay.sh ./hosts/up_uploadee.sh ./hosts/up_uploadev.sh ./hosts/up_uploadflix.sh @@ -130,11 +159,27 @@ _________________________________________________________________________ ./hosts/9saves.sh:297: tor_curl_request --insecure \ ./hosts/9saves.sh:304: tor_curl_request --insecure \ ./hosts/9saves.sh:321: tor_curl_request --insecure \ +./hosts/anonfile.sh:96: response=$(tor_curl_request --insecure -L -s -b "${anon_cookie_jar}" -c "${anon_cookie_jar}" \ +./hosts/anonfile.sh:186: response=$(tor_curl_request --insecure -L -s -X POST \ +./hosts/anonfile.sh:240: tor_curl_request --insecure -s "$captcha_img_url" --output "$tmp_captcha_img" +./hosts/anonfile.sh:340: response=$(tor_curl_request --insecure -L -s -X POST \ +./hosts/anonfile.sh:446: file_header=$(tor_curl_request -i -s --head \ +./hosts/anonfile.sh:549: if [ "${UseTorCurlImpersonate}" == "true" ]; then +./hosts/anonfile.sh:551: tor_curl_request --insecure \ +./hosts/anonfile.sh:557: tor_curl_request --insecure \ +./hosts/anonfile.sh:564: tor_curl_request --insecure \ +./hosts/anonfile.sh:581: tor_curl_request --insecure \ ./hosts/anonsharing.sh:91: response=$(tor_curl_request --insecure -i -s \ ./hosts/anonsharing.sh:150: file_header=$(tor_curl_request --insecure --head -L -i -s \ ./hosts/anonsharing.sh:158: file_header=$(tor_curl_request --insecure --head -H "Connection: keep-alive" -s -i \ ./hosts/anonsharing.sh:273: tor_curl_request --insecure --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval "$download_url" --output "$file_path" ./hosts/anonsharing.sh:275: tor_curl_request --insecure "$download_url" --output "$file_path" +./hosts/ateasystems.sh:88: response=$(tor_curl_request --insecure -L -s "$remote_url") +./hosts/ateasystems.sh:218: if [ "${UseTorCurlImpersonate}" == "true" ]; then +./hosts/ateasystems.sh:220: tor_curl_request --insecure \ +./hosts/ateasystems.sh:225: tor_curl_request --insecure \ +./hosts/ateasystems.sh:231: tor_curl_request --insecure \ +./hosts/ateasystems.sh:237: tor_curl_request --insecure \ ./hosts/bedrive.sh:90: response=$(tor_curl_request --insecure -L -s \ ./hosts/bedrive.sh:149: file_header=$(tor_curl_request --insecure --head -L -i -s \ ./hosts/bedrive.sh:270: if [ "${UseTorCurlImpersonate}" == "true" ]; then @@ -173,6 +218,15 @@ _________________________________________________________________________ ./hosts/dailyuploads.sh:504: tor_curl_request --insecure \ ./hosts/dailyuploads.sh:511: tor_curl_request --insecure \ ./hosts/dailyuploads.sh:528: tor_curl_request --insecure \ +./hosts/dashfile.sh:90: response=$(tor_curl_request --insecure -L -s -b "${dash_cookie_jar}" -c "${dash_cookie_jar}" \ +./hosts/dashfile.sh:169: response=$(tor_curl_request --insecure -L -s -X POST \ +./hosts/dashfile.sh:297: response=$(tor_curl_request --insecure -L -s -X POST \ +./hosts/dashfile.sh:386: file_header=$(tor_curl_request --insecure -L --head -s --referer "${fixed_url}" "$download_url") +./hosts/dashfile.sh:484: if [ "${UseTorCurlImpersonate}" == "true" ]; then +./hosts/dashfile.sh:486: tor_curl_request --insecure \ +./hosts/dashfile.sh:491: tor_curl_request --insecure \ +./hosts/dashfile.sh:497: tor_curl_request --insecure \ +./hosts/dashfile.sh:513: tor_curl_request --insecure \ ./hosts/dataupload.sh:90: response=$(tor_curl_request --insecure -L -s -b "${dup_cookie_jar}" -c "${dup_cookie_jar}" "$remote_url") ./hosts/dataupload.sh:166: response=$(tor_curl_request --insecure -svo. -X POST \ ./hosts/dataupload.sh:234: file_header=$(tor_curl_request --insecure -L --head -s "$download_url") @@ -200,6 +254,15 @@ _________________________________________________________________________ ./hosts/examples/ExampleNewHost.sh:199: tor_curl_request --insecure --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval --referer "$download_url" "$download_url" --continue-at - --output "$file_path" ./hosts/examples/ExampleNewHost.sh:201: tor_curl_request --insecure --referer "$download_url" "$download_url" --continue-at - --output "$file_path" ./hosts/examples/up_example.sh:112: response=$(tor_curl_upload --insecure \ +./hosts/fileblade.sh:90: response=$(tor_curl_request --insecure -L -s -b "${fb_cookie_jar}" -c "${fb_cookie_jar}" "$remote_url") +./hosts/fileblade.sh:167: response=$(tor_curl_request --insecure -L -s -X POST \ +./hosts/fileblade.sh:254: response=$(tor_curl_request --insecure -L -s -X POST \ +./hosts/fileblade.sh:323: file_header=$(tor_curl_request --insecure -L --head -s "$download_url") +./hosts/fileblade.sh:438: if [ "${UseTorCurlImpersonate}" == "true" ]; then +./hosts/fileblade.sh:440: tor_curl_request --insecure -L \ +./hosts/fileblade.sh:444: tor_curl_request --insecure -L \ +./hosts/fileblade.sh:449: tor_curl_request --insecure \ +./hosts/fileblade.sh:464: tor_curl_request --insecure \ ./hosts/fileditch.sh:85: file_header=$(tor_curl_request --insecure --head -L -s "$download_url") ./hosts/fileditch.sh:176: tor_curl_request --insecure --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval "$download_url" --continue-at - --output "$file_path" ./hosts/fileditch.sh:178: tor_curl_request --insecure "$download_url" --continue-at - --output "$file_path" @@ -210,35 +273,48 @@ _________________________________________________________________________ ./hosts/filedot.sh:406: file_header=$(tor_curl_request --insecure -L --head -s --referer "${fixed_url}" "$download_url") ./hosts/filedot.sh:499: tor_curl_request --insecure -L -G --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval "$download_url" --continue-at - --output "$file_path" ./hosts/filedot.sh:501: tor_curl_request --insecure -L -G "$download_url" --continue-at - --output "$file_path" -./hosts/filehaus.sh:100: file_header=$(tor_curl_request --insecure -L --head -s --referer "${remote_url//\.org/\.cc}" "$download_url") -./hosts/filehaus.sh:197: tor_curl_request --insecure --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval --referer "$download_url" "$download_url" --continue-at - --output "$file_path" -./hosts/filehaus.sh:199: tor_curl_request --insecure --referer "$download_url" "$download_url" --continue-at - --output "$file_path" +./hosts/filehaus.sh:100: file_header=$(tor_curl_request_extended --insecure -L --head -s --referer "${remote_url//\.org/\.cc}" "$download_url") +./hosts/filehaus.sh:193: tor_curl_request_extended --insecure --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval --referer "$download_url" "$download_url" --continue-at - --output "$file_path" +./hosts/filehaus.sh:195: tor_curl_request_extended --insecure --referer "$download_url" "$download_url" --continue-at - --output "$file_path" ./hosts/firestorage.sh:98: response=$(tor_curl_request --insecure -L -s "${fixed_url}") ./hosts/firestorage.sh:226: file_header=$(tor_curl_request --insecure -L --head -s --referer "${fixed_url}" "$download_url") ./hosts/firestorage.sh:335: tor_curl_request --insecure -L -G --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval "$download_url" --continue-at - --output "$file_path" ./hosts/firestorage.sh:337: tor_curl_request --insecure -L -G "$download_url" --continue-at - --output "$file_path" ./hosts/gofile.sh:97: response=$(tor_curl_request --insecure -s -X POST \ ./hosts/gofile.sh:170: response=$(tor_curl_request --insecure -G -L -s \ -./hosts/gofile.sh:241: file_header=$(tor_curl_request --insecure -L --head -s \ -./hosts/gofile.sh:359: tor_curl_request --insecure -G --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval \ -./hosts/gofile.sh:373: tor_curl_request --insecure -G \ +./hosts/gofile.sh:250: file_header=$(tor_curl_request --insecure -L --head -s \ +./hosts/gofile.sh:369: tor_curl_request --insecure -G --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval \ +./hosts/gofile.sh:383: tor_curl_request --insecure -G \ ./hosts/hexload.sh:108: response=$(tor_curl_request --insecure -s --data "$form_data" "https://hexload.com/download") ./hosts/hexload.sh:116: response=$(tor_curl_request --insecure -s --data "$form_data" "https://hexload.com/download") ./hosts/hexload.sh:122: response=$(tor_curl_request --insecure -s --data "$form_data" "https://hexload.com/download") ./hosts/hexload.sh:254: file_header=$(tor_curl_request --insecure --head -L -s --referer "$file_url" "$download_url") ./hosts/hexload.sh:321: tor_curl_request --insecure --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval --referer "$file_url" "$download_url" --continue-at - --output "$file_path" ./hosts/hexload.sh:323: tor_curl_request --insecure --referer "$file_url" "$download_url" --continue-at - --output "$file_path" -./hosts/innocent.sh:97: file_header=$(tor_curl_request --insecure --head -L -s "$download_url") -./hosts/innocent.sh:100: file_header=$(tor_curl_request --insecure -m 8 -s -D - -o /dev/null \ +./hosts/innocent.sh:97: file_header=$(tor_curl_request_extended --insecure --head -L -s "$download_url") +./hosts/innocent.sh:100: file_header=$(tor_curl_request --insecure -m 16 -s -D - -o /dev/null \ ./hosts/innocent.sh:107: file_header=$(tor_curl_request --insecure --head -H "Connection: keep-alive" -L -s -i "$download_url" | ./hosts/innocent.sh:119: file_header=$(tor_curl_request --insecure --head -L -s "$download_url") -./hosts/innocent.sh:211: tor_curl_request --insecure "$download_url" --continue-at - --output "$file_path" -./hosts/innocent.sh:214: tor_curl_request --insecure "$download_url" --output "$file_path" +./hosts/innocent.sh:211: tor_curl_request_extended --insecure "$download_url" --continue-at - --output "$file_path" +./hosts/innocent.sh:214: tor_curl_request_extended --insecure "$download_url" --output "$file_path" +./hosts/isupload.sh:90: response=$(tor_curl_request_extended --insecure -L -s -b "${isup_cookie_jar}" -c "${isup_cookie_jar}" "$remote_url") +./hosts/isupload.sh:164: response=$(tor_curl_request_extended --insecure -L -s -X POST \ +./hosts/isupload.sh:238: file_header=$(tor_curl_request --insecure --head -L -s "$download_url") +./hosts/isupload.sh:241: file_header=$(tor_curl_request --insecure -m 16 -s -D - -o /dev/null \ +./hosts/isupload.sh:248: file_header=$(tor_curl_request --insecure --head -H "Connection: keep-alive" -L -s -i "$download_url" | +./hosts/isupload.sh:260: file_header=$(tor_curl_request_extended --insecure --head -L -s "$download_url") +./hosts/isupload.sh:352: tor_curl_request_extended --insecure -L "$download_url" --output "$file_path" +./hosts/isupload.sh:396: tor_curl_request_extended --insecure -L --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval "$download_url" --continue-at - --output "$file_path" +./hosts/isupload.sh:398: tor_curl_request_extended --insecure -L "$download_url" --continue-at - --output "$file_path" ./hosts/kraken.sh:104: PAGE=$(tor_curl_request --insecure --max-time "$ConnectTimeout" -s -L -c "${kraken_cookie_jar}" "${fixed_url}") ./hosts/kraken.sh:169: down_request=$(tor_curl_request --insecure -L -s -b "${kraken_cookie_jar}" -c "${kraken_cookie_jar}" -F "token=${kraken_token}" "${kraken_action}") ./hosts/kraken.sh:186: file_header=$(tor_curl_request --insecure --head -L -s -b "${kraken_cookie_jar}" -c "${kraken_cookie_jar}" --referer "$kraken_action" "$download_url") ./hosts/kraken.sh:286: tor_curl_request --insecure --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval --referer "$kraken_action" "$download_url" --continue-at - --output "$file_path" ./hosts/kraken.sh:288: tor_curl_request --insecure --referer "$kraken_action" "$download_url" --continue-at - --output "$file_path" +./hosts/mediafire.sh:94: response=$(tor_curl_request --insecure -L -s \ +./hosts/mediafire.sh:157: file_header=$(tor_curl_request --insecure -L --head -s \ +./hosts/mediafire.sh:276: tor_curl_request_extended --insecure "$download_url" --continue-at - --output "$file_path" +./hosts/mediafire.sh:279: tor_curl_request_extended --insecure "$download_url" --output "$file_path" ./hosts/nippy.sh:119: response=$(tor_curl_request --insecure -L -s -b "${nippy_cookie_jar}" -c "${nippy_cookie_jar}" "$fixed_url") ./hosts/nippy.sh:188: file_header=$(tor_curl_request --insecure -L --head -s \ ./hosts/nippy.sh:299: tor_curl_request --insecure -L --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval \ @@ -253,13 +329,22 @@ _________________________________________________________________________ ./hosts/pixeldrain.sh:328: tor_curl_request --insecure \ ./hosts/pixeldrain.sh:333: tor_curl_request --insecure \ ./hosts/pixeldrain.sh:342: tor_curl_request --insecure \ +./hosts/quax.sh:85: file_header=$(tor_curl_request --insecure --head -L -s "$download_url") +./hosts/quax.sh:176: tor_curl_request --insecure --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval "$download_url" --continue-at - --output "$file_path" +./hosts/quax.sh:178: tor_curl_request --insecure "$download_url" --continue-at - --output "$file_path" ./hosts/ranoz.sh:90: response=$(tor_curl_request --insecure -L -s "$remote_url") -./hosts/ranoz.sh:144: file_header=$(tor_curl_request --insecure --head -L -i -s "$download_url") -./hosts/ranoz.sh:253: if [ "${UseTorCurlImpersonate}" == "true" ]; then -./hosts/ranoz.sh:255: tor_curl_request --insecure -L -G --no-alpn \ -./hosts/ranoz.sh:259: tor_curl_request --insecure -L -G --no-alpn \ -./hosts/ranoz.sh:264: tor_curl_request --insecure -L -G --no-alpn \ -./hosts/ranoz.sh:279: tor_curl_request --insecure -L -G --no-alpn \ +./hosts/ranoz.sh:150: file_header=$(tor_curl_request --insecure --head -L -i -s "$download_url") +./hosts/ranoz.sh:259: if [ "${UseTorCurlImpersonate}" == "true" ]; then +./hosts/ranoz.sh:261: tor_curl_request --insecure -L -G --no-alpn \ +./hosts/ranoz.sh:265: tor_curl_request --insecure -L -G --no-alpn \ +./hosts/ranoz.sh:270: tor_curl_request --insecure -L -G --no-alpn \ +./hosts/ranoz.sh:285: tor_curl_request --insecure -L -G --no-alpn \ +./hosts/syspro.sh:88: response=$(tor_curl_request --insecure -L -s "$remote_url") +./hosts/syspro.sh:186: if [ "${UseTorCurlImpersonate}" == "true" ]; then +./hosts/syspro.sh:188: tor_curl_request --insecure -L \ +./hosts/syspro.sh:193: tor_curl_request --insecure \ +./hosts/syspro.sh:199: tor_curl_request --insecure -L \ +./hosts/syspro.sh:205: tor_curl_request --insecure -L \ ./hosts/tempfileme.sh:89: response=$(tor_curl_request --insecure -L -s "$remote_url") ./hosts/tempfileme.sh:170: file_header=$(tor_curl_request --insecure -L --head -s --referer "${remote_url}" "$download_url") ./hosts/tempfileme.sh:298: if [ "${UseTorCurlImpersonate}" == "true" ]; then @@ -309,61 +394,57 @@ _________________________________________________________________________ ./hosts/uploadflix.sh:288: tor_curl_request --insecure -L -G "$download_url" --continue-at - --output "$file_path" ./hosts/uploadhive.sh:88: response=$(tor_curl_request --insecure -L -s "$remote_url") ./hosts/uploadhive.sh:134: response=$(tor_curl_request --insecure -L -s -X POST --data "$form_data" "$remote_url") -./hosts/uploadhive.sh:175: file_header=$(tor_curl_request --insecure --head -s -L --referer "$remote_url" "$download_url") -./hosts/uploadhive.sh:269: tor_curl_request --insecure -L -G --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval "$download_url" --continue-at - --output "$file_path" -./hosts/uploadhive.sh:271: tor_curl_request --insecure -L -G "$download_url" --continue-at - --output "$file_path" +./hosts/uploadhive.sh:185: file_header=$(tor_curl_request --insecure --head -s -L --referer "$remote_url" "$download_url") +./hosts/uploadhive.sh:279: tor_curl_request --insecure -L -G --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval "$download_url" --continue-at - --output "$file_path" +./hosts/uploadhive.sh:281: tor_curl_request --insecure -L -G "$download_url" --continue-at - --output "$file_path" ./hosts/up_1fichier.sh:107: response=$(tor_curl_request --insecure -L -s "https://1fichier.com/") ./hosts/up_1fichier.sh:180: response=$(tor_curl_upload --insecure -L \ -./hosts/up_acid.sh:102: response=$(tor_curl_upload --insecure -i \ -./hosts/up_anarchaserver.sh:102: response=$(tor_curl_upload --insecure -i \ +./hosts/up_anonfile.sh:102: response=$(tor_curl_upload --insecure -i \ ./hosts/up_anonsharing.sh:102: response=$(tor_curl_upload --insecure -i \ +./hosts/up_ateasystems.sh:102: response=$(tor_curl_upload --insecure -i \ ./hosts/up_axfc.sh:109: response=$(tor_curl_request --insecure -L -s -b "${axfc_cookie_jar}" -c "${axfc_cookie_jar}" "$fixed_url") ./hosts/up_axfc.sh:136: response=$(tor_curl_upload --insecure -L -s -X POST \ ./hosts/up_axfc.sh:184: response=$(tor_curl_upload --insecure -L -i -X POST \ ./hosts/up_bedrive.sh:102: response=$(tor_curl_upload --insecure -i \ ./hosts/up_bowfile.sh:108: response=$(tor_curl_request --insecure -L -i \ ./hosts/up_dailyuploads.sh:109: response=$(tor_curl_upload --insecure -i \ +./hosts/up_dashfile.sh:102: response=$(tor_curl_upload --insecure -i \ ./hosts/up_dataupload.sh:102: response=$(tor_curl_upload --insecure -i \ ./hosts/up_dbree.sh:102: response=$(tor_curl_upload --insecure -i \ -./hosts/up_depotkaz.sh:102: response=$(tor_curl_upload --insecure -i \ ./hosts/up_dosya.sh:107: response=$(tor_curl_upload --insecure -L -i \ -./hosts/up_familleflender.sh:102: response=$(tor_curl_upload --insecure -i \ -./hosts/up_fileditch.sh:102: response=$(tor_curl_upload --insecure -i \ +./hosts/up_fileblade.sh:104: response=$(tor_curl_upload --insecure -i \ +./hosts/up_fileditch.sh:107: response=$(tor_curl_upload --insecure -i -L \ ./hosts/up_filehaus.sh:106: response=$(tor_curl_upload --insecure -i \ -./hosts/up_filesquid.sh:104: response=$(tor_curl_upload --insecure -i \ ./hosts/up_firestorage.sh:113: response=$(tor_curl_upload --insecure -i \ -./hosts/up_free4e.sh:94: response=$(tor_curl_upload --insecure -i \ ./hosts/up_gofile.sh:102: response=$(tor_curl_request --insecure -L -s "https://api.gofile.io/servers") ./hosts/up_gofile.sh:121: response=$(tor_curl_upload --insecure -i \ -./hosts/up_harrault.sh:101: response=$(tor_curl_upload --insecure -i \ ./hosts/up_hexload.sh:109: response=$(tor_curl_upload --insecure -i \ ./hosts/up_innocent.sh:99: response=$(tor_curl_upload --insecure -D - -o /dev/null \ +./hosts/up_isupload.sh:104: response=$(tor_curl_upload --insecure -i \ ./hosts/up_kouploader.sh:108: response=$(tor_curl_request --insecure -L -s -b "${ko_cookie_jar}" -c "${ko_cookie_jar}" "$PostUrlHost") ./hosts/up_kouploader.sh:132: response=$(tor_curl_upload --insecure -L -i \ ./hosts/up_kraken.sh:115: response=$(tor_curl_upload --insecure -i \ -./hosts/up_linxx.sh:102: response=$(tor_curl_upload --insecure -i \ -./hosts/up_nantes.sh:102: response=$(tor_curl_upload --insecure -i \ -./hosts/up_netlib.sh:102: response=$(tor_curl_upload --insecure -i \ ./hosts/up_nippy.sh:125: response=$(tor_curl_upload --insecure -i \ ./hosts/up_nofile.sh:102: response=$(tor_curl_upload --insecure -i \ ./hosts/up_offshorecat.sh:104: response=$(tor_curl_upload --insecure -i \ ./hosts/up_oshi.sh:110: response=$(tor_curl_upload --insecure \ -./hosts/up_pixeldrain.sh:113: response=$(tor_curl_upload --insecure -i \ +./hosts/up_pixeldrain.sh:112: response=$(tor_curl_upload --insecure -X PUT \ +./hosts/up_quax.sh:102: response=$(tor_curl_upload --insecure -i \ ./hosts/up_ranoz.sh:102: response=$(tor_curl_upload --insecure -L -i -s \ -./hosts/up_ranoz.sh:131: response=$(tor_curl_upload --insecure -i -X PUT \ +./hosts/up_ranoz.sh:129: response=$(tor_curl_upload --insecure -i -X PUT \ ./hosts/up_shareonline.sh:102: response=$(tor_curl_upload --insecure -i \ -./hosts/up_skrepr.sh:94: response=$(tor_curl_upload --insecure -i \ -./hosts/up_soyjak.sh:102: response=$(tor_curl_upload --insecure -i \ +./hosts/up_syspro.sh:102: response=$(tor_curl_upload --insecure -i \ ./hosts/up_tempfileme.sh:102: response=$(tor_curl_upload --insecure -i \ ./hosts/up_tempsh.sh:102: response=$(tor_curl_upload --insecure -i \ ./hosts/up_torup.sh:109: response=$(tor_curl_request --insecure -L -s -b "${torp_cookie_jar}" -c "${torp_cookie_jar}" \ ./hosts/up_torup.sh:149: response=$(tor_curl_upload --insecure -i \ ./hosts/up_turboonion.sh:99: response=$(tor_curl_upload --insecure \ +./hosts/up_uploadbay.sh:102: response=$(tor_curl_upload --insecure -i \ ./hosts/up_uploadee.sh:106: response=$(tor_curl_request --insecure -L -s "https://www.upload.ee/ubr_link_upload.php") ./hosts/up_uploadee.sh:176: response=$(tor_curl_upload --insecure -i -L \ ./hosts/up_uploadev.sh:102: response=$(tor_curl_upload --insecure -i \ ./hosts/up_uploadflix.sh:106: response=$(tor_curl_upload --insecure -i \ -./hosts/up_uploadhive.sh:102: response=$(tor_curl_upload --insecure -i \ +./hosts/up_uploadhive.sh:128: response=$(tor_curl_upload --insecure -i \ ./hosts/up_uploadraja.sh:102: response=$(tor_curl_upload --insecure -i \ ./hosts/up_yolobit.sh:102: response=$(tor_curl_upload --insecure -i \ ./hosts/youdbox.sh:95: response=$(tor_curl_request --insecure -L -i -s "${fixed_url}") @@ -371,117 +452,119 @@ _________________________________________________________________________ ./hosts/youdbox.sh:183: file_header=$(tor_curl_request --insecure -L --head -s --referer "${fixed_url}" "$download_url") ./hosts/youdbox.sh:276: tor_curl_request --insecure -L -G --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval "$download_url" --continue-at - --output "$file_path" ./hosts/youdbox.sh:278: tor_curl_request --insecure -L -G "$download_url" --continue-at - --output "$file_path" -./mad.sh:107:UseTorCurlImpersonate=false -./mad.sh:385:tor_curl_request() { -./mad.sh:386: if [ "${UseTorCurlImpersonate}" == "true" ]; then -./mad.sh:387: "${curl_impersonate[@]}" --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" --connect-timeout ${ConnectTimeout} --compressed --globoff "$@" -./mad.sh:389: curl --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" --connect-timeout ${ConnectTimeout} --compressed --globoff "$@" -./mad.sh:392:tor_curl_request_extended() { -./mad.sh:393: if [ "${UseTorCurlImpersonate}" == "true" ]; then -./mad.sh:394: "${curl_impersonate[@]}" --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" --connect-timeout 60 --compressed --globoff "$@" -./mad.sh:396: curl --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" --connect-timeout 60 --compressed --globoff "$@" -./mad.sh:399:tor_curl_upload() { +./mad.sh:100:UseTorCurlImpersonate=false +./mad.sh:391:tor_curl_request() { +./mad.sh:392: if [ "${UseTorCurlImpersonate}" == "true" ]; then +./mad.sh:393: "${curl_impersonate[@]}" --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" --connect-timeout ${ConnectTimeout} --compressed --globoff "$@" +./mad.sh:395: curl --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" --connect-timeout ${ConnectTimeout} --compressed --globoff "$@" +./mad.sh:398:tor_curl_request_extended() { ./mad.sh:400: if [ "${UseTorCurlImpersonate}" == "true" ]; then -./mad.sh:402: "${curl_impersonate[@]}" --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" --connect-timeout ${ConnectTimeout} --speed-limit $UploadSpeedMin --speed-time $UploadTimeoutInterval --compressed --globoff "$@" -./mad.sh:404: "${curl_impersonate[@]}" --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" --connect-timeout ${ConnectTimeout} --compressed --globoff "$@" -./mad.sh:408: curl --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" --connect-timeout ${ConnectTimeout} --speed-limit $UploadSpeedMin --speed-time $UploadTimeoutInterval -H 'User-Agent: Mozilla/5.0 (Windows NT 10.0; rv:109.0) Gecko/20100101 Firefox/115.0' -H 'Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8' -H 'Accept-Language: en-US,en;q=0.5' -H 'Accept-Encoding: gzip, deflate, br' -H 'Origin: null' -H 'Connection: keep-alive' -H 'Upgrade-Insecure-Requests: 1' -H 'Sec-Fetch-Dest: document' -H 'Sec-Fetch-Mode: navigate' -H 'Sec-Fetch-Site: cross-site' -H 'Sec-Fetch-User: ?1' -H 'TE: trailers' --compressed --globoff "$@" -./mad.sh:410: curl --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" --connect-timeout ${ConnectTimeout} -H 'User-Agent: Mozilla/5.0 (Windows NT 10.0; rv:109.0) Gecko/20100101 Firefox/115.0' -H 'Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8' -H 'Accept-Language: en-US,en;q=0.5' -H 'Accept-Encoding: gzip, deflate, br' -H 'Origin: null' -H 'Connection: keep-alive' -H 'Upgrade-Insecure-Requests: 1' -H 'Sec-Fetch-Dest: document' -H 'Sec-Fetch-Mode: navigate' -H 'Sec-Fetch-Site: cross-site' -H 'Sec-Fetch-User: ?1' -H 'TE: trailers' --compressed --globoff "$@" -./mad.sh:1268:install_curl_impersonate() { -./mad.sh:1270: echo -e "${BLUE}lwthiker curl_impersonate${NC} is the original dev, but it is relatively inactive." -./mad.sh:1271: echo -e "- Currently uses curl v8.1.1." -./mad.sh:1275: echo -e "${BLUE}lexiforest curl_impersonate${NC} is an active fork of curl_impersonate." -./mad.sh:1276: echo -e "+ Currently uses curl v8.7.1" -./mad.sh:1280: PS3='Please select which curl_impersonate to install: ' -./mad.sh:1288: install_curl_impersonate_lwthiker_orig -./mad.sh:1292: install_curl_impersonate_lexiforest_fork -./mad.sh:1302:install_curl_impersonate_lwthiker_orig() { -./mad.sh:1306: echo -e "${BLUE}lwthiker curl_impersonate${NC} is the original curl_impersonate." -./mad.sh:1307: echo -e "+ Currently uses curl v8.1.1, and has low activity for updates" -./mad.sh:1310: echo -e "${GREEN}| Fetching:${NC} latest ${BLUE}lwthiker curl_impersonate${NC} info from github...${NC}" -./mad.sh:1313: response=$(tor_curl_request --insecure -L -s https://github.com/lwthiker/curl-impersonate/releases/latest) -./mad.sh:1315: debugHtml "github" "lbf_inst_curlimp$j" "$response" -./mad.sh:1318: latestTag=$(grep -oPi -m 1 '(?<=/curl-impersonate/releases/tag/).*?(?=")' <<< "$response") -./mad.sh:1328: yes_or_no "Do you wish to download and extract curl_impersonate $latestTag (using tor+curl)?" && { -./mad.sh:1330: download_url='https://github.com/lwthiker/curl-impersonate/releases/download/'"$latestTag"'/curl-impersonate-'"$latestTag"'.x86_64-linux-gnu.tar.gz' -./mad.sh:1333: file_header=$(tor_curl_request --insecure --head -Ls "$download_url") -./mad.sh:1335: debugHtml "github" "head_inst_curlimp$j" "${file_header}" -./mad.sh:1383: tor_curl_request --insecure -L "$download_url" --continue-at - --output "$file_path" -./mad.sh:1412: echo -e "| Extracting curl_impersonate..." -./mad.sh:1414: rm -f "${ScriptDir}"/curl* -./mad.sh:1415: mv "$extract_location/curl-impersonate-ff" "${ScriptDir}/" -./mad.sh:1416: mv "$extract_location/curl_ff109" "${ScriptDir}/" -./mad.sh:1417: echo -e "${GREEN}| Done.${NC} Update ${BLUE}\"UseTorCurlImpersonate=true\"${NC} in script to use..." -./mad.sh:1425:install_curl_impersonate_lexiforest_fork() { -./mad.sh:1429: echo -e "${BLUE}lexiforest curl_impersonate${NC} is an active fork of curl_impersonate." -./mad.sh:1430: echo -e "+ Currently uses curl v8.7.1, and is patched for latest CVEs" -./mad.sh:1433: echo -e "${GREEN}| Fetching:${NC} latest ${BLUE}lexiforest curl_impersonate fork${NC} info from github...${NC}" -./mad.sh:1436: response=$(tor_curl_request --insecure -L -s https://github.com/lexiforest/curl-impersonate/releases/latest) -./mad.sh:1438: debugHtml "github" "lbf_inst_curlimp$j" "$response" -./mad.sh:1441: latestTag=$(grep -oPi -m 1 '(?<=/curl-impersonate/releases/tag/).*?(?=")' <<< "$response") -./mad.sh:1451: yes_or_no "Do you wish to download and extract curl_impersonate $latestTag (using tor+curl)?" && { -./mad.sh:1453: download_url='https://github.com/lexiforest/curl-impersonate/releases/download/'"$latestTag"'/curl-impersonate-'"$latestTag"'.x86_64-linux-gnu.tar.gz' -./mad.sh:1456: file_header=$(tor_curl_request --insecure --head -Ls "$download_url") -./mad.sh:1458: debugHtml "github" "head_inst_curlimp$j" "${file_header}" -./mad.sh:1506: tor_curl_request --insecure -L "$download_url" --continue-at - --output "$file_path" -./mad.sh:1535: echo -e "| Extracting curl_impersonate..." -./mad.sh:1537: rm -f "${ScriptDir}"/curl* -./mad.sh:1538: mv "$extract_location/curl-impersonate-chrome" "${ScriptDir}/" -./mad.sh:1539: mv "$extract_location/curl_chrome124" "${ScriptDir}/" -./mad.sh:1540: echo -e "${GREEN}| Done.${NC} Update ${BLUE}\"UseTorCurlImpersonate=true\"${NC} in script to use..." -./mad.sh:1702: echo -e ":${NC} ${GREEN}MAD${PINK} Audit${NC} : Reports usage of http & curl in scripts${PINK}${BLD} :" -./mad.sh:1710: maud_curl=$(grep -n -vxE '[[:blank:]]*([#].*)?' $fil | grep --color='always' -Ei 'curl') -./mad.sh:1711: maud_torcurl=$(grep -n -vxE '[[:blank:]]*([#].*)?' $fil | grep -A 12 --color='always' -Ei 'tor_curl') -./mad.sh:1720: echo -e "${PINK}MAD Audit of curl:${NC} (${GREEN}grep \"curl\"${NC})" -./mad.sh:1722: echo -e "$maud_curl" -./mad.sh:1724: echo -e "${PINK}MAD Audit of tor_curl (+12 lines after):${NC} (${GREEN}grep \"tor_curl\"${NC})" -./mad.sh:1726: echo -e "$maud_torcurl" -./mad.sh:1738: maud_curl=$(grep -n -vxE '[[:blank:]]*([#].*)?' $fil | grep --color='always' -Ei 'curl') -./mad.sh:1739: maud_torcurl=$(grep -n -vxE '[[:blank:]]*([#].*)?' $fil | grep -A 12 --color='always' -Ei 'tor_curl') -./mad.sh:1748: echo -e "${PINK}MAD Audit of curl:${NC} (${GREEN}grep \"curl \"${NC})" -./mad.sh:1750: echo -e "$maud_curl" -./mad.sh:1752: echo -e "${PINK}MAD Audit of tor_curl (+12 lines after):${NC} (${GREEN}grep \"tor_curl\"${NC})" -./mad.sh:1754: echo -e "$maud_torcurl" -./mad.sh:1760: maud_curl=$(grep -n -vxE '[[:blank:]]*([#].*)?' $fil | grep --color='always' -Ei 'curl') -./mad.sh:1761: maud_torcurl=$(grep -n -vxE '[[:blank:]]*([#].*)?' $fil | grep -A 12 --color='always' -Ei 'tor_curl') -./mad.sh:1770: echo -e "${PINK}MAD Audit of curl:${NC} (${GREEN}grep \"curl\"${NC})" -./mad.sh:1772: echo -e "$maud_curl" -./mad.sh:1774: echo -e "${PINK}MAD Audit of tor_curl (+12 lines after):${NC} (${GREEN}grep \"tor_curl\"${NC})" -./mad.sh:1776: echo -e "$maud_torcurl" -./mad.sh:2723: if [ "${UseTorCurlImpersonate}" == "true" ]; then -./mad.sh:2724: printf "client: ${GREEN}Tor${NC} + ${BLUE}curl_impersonate${NC}\\n" -./mad.sh:2726: printf "client: ${GREEN}Tor${NC} + ${GREEN}curl${NC}\\n" -./mad.sh:2898: if [ "${UseTorCurlImpersonate}" == "true" ]; then -./mad.sh:2899: printf "client: ${GREEN}Tor${NC} + ${BLUE}curl_impersonate${NC}\\n" -./mad.sh:2901: printf "client: ${GREEN}Tor${NC} + ${GREEN}curl${NC}\\n" -./mad.sh:3099: file_header=$(tor_curl_request --insecure -m 8 -s -D - -o /dev/null \ -./mad.sh:3106: file_header=$(tor_curl_request --insecure --head -H "Connection: keep-alive" -L -s -i "$download_url" | -./mad.sh:3234: tor_curl_request --insecure -L --referer "$file_url" "$download_url" --output "$file_path" -./mad.sh:3278: tor_curl_request --insecure -L --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval "$download_url" --continue-at - --output "$file_path" -./mad.sh:3280: tor_curl_request --insecure -L --referer "$file_url" "$download_url" --continue-at - --output "$file_path" -./mad.sh:3453:if [ "${UseTorCurlImpersonate}" == "true" ]; then -./mad.sh:3454: curl_impersonate=() -./mad.sh:3455: readarray -d $'' arrFiles < <(find "$ScriptDir" -maxdepth 1 -name "curl_*" -printf '%p\n' | sort -Vk1) -./mad.sh:3456: bFoundCurlHeader=false -./mad.sh:3460: curl_impersonate=($fil) -./mad.sh:3461: bFoundCurlHeader=true -./mad.sh:3465: if [ "$bFoundCurlHeader" == "false" ]; then -./mad.sh:3466: echo -e "${RED}[ERROR] Missing dependency \"curl-impersonate\"!${NC}" -./mad.sh:3469: echo -e "You'll need to download ${GREEN}\"curl-impersonate\"${NC}." -./mad.sh:3472: echo -e "The latest binary can be obtained on GitHub, search for \"curl-impersonate\"" -./mad.sh:3474: echo -e " 1. Visit the page of curl-impersonate and add \"/releases/latest/\" at end of URL." -./mad.sh:3478: echo -e " 4. Download archive ${GREEN}\"curl-impersonate-vX.Y.Z.x86_64-linux-gnu.tar.gz\"${YELLOW}." -./mad.sh:3479: echo -e " 5. Extract files ${GREEN}\"curl-impersonate-ff\"${NC} and ${GREEN}\"curl_ff109\"${NC} next to this script." -./mad.sh:3482: echo -e "run $0 install_curl_impersonate\\n" -./mad.sh:3484: yes_or_no "Do you wish to download and extract latest curl_impersonate (using tor+curl)?" && { -./mad.sh:3485: UseTorCurlImpersonate=false -./mad.sh:3486: install_curl_impersonate -./mad.sh:3570: echo -e "[${YELLOW}Install curl_impersonate${NC}]: Downloads the latest binary for curl_impersonate from github repo (3 choices)" -./mad.sh:3571: printf " %s install_curl_impersonate\\n" "$0" -./mad.sh:3649:elif [[ "$arg1" == "install_curl_impersonate" ]]; then -./mad.sh:3650: install_curl_impersonate -./mad.sh:3681:if [ "${UseTorCurlImpersonate}" == "true" ]; then -./mad.sh:3682: printf "client: ${GREEN}Tor${NC} + ${BLUE}curl_impersonate${NC}\\n" -./mad.sh:3684: printf "client: ${GREEN}Tor${NC} + ${GREEN}curl${NC}\\n" +./mad.sh:401: "${curl_impersonate[@]}" --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" --connect-timeout $randomtimeout --compressed --globoff "$@" +./mad.sh:403: curl --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" --connect-timeout $randomtimeout --compressed --globoff "$@" +./mad.sh:406:tor_curl_upload() { +./mad.sh:407: if [ "${UseTorCurlImpersonate}" == "true" ]; then +./mad.sh:409: "${curl_impersonate[@]}" --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" --connect-timeout ${ConnectTimeout} --speed-limit $UploadSpeedMin --speed-time $UploadTimeoutInterval --compressed --globoff "$@" +./mad.sh:411: "${curl_impersonate[@]}" --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" --connect-timeout ${ConnectTimeout} --compressed --globoff "$@" +./mad.sh:415: curl --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" --connect-timeout ${ConnectTimeout} --speed-limit $UploadSpeedMin --speed-time $UploadTimeoutInterval -H 'User-Agent: Mozilla/5.0 (Windows NT 10.0; rv:109.0) Gecko/20100101 Firefox/115.0' -H 'Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8' -H 'Accept-Language: en-US,en;q=0.5' -H 'Accept-Encoding: gzip, deflate, br' -H 'Origin: null' -H 'Connection: keep-alive' -H 'Upgrade-Insecure-Requests: 1' -H 'Sec-Fetch-Dest: document' -H 'Sec-Fetch-Mode: navigate' -H 'Sec-Fetch-Site: cross-site' -H 'Sec-Fetch-User: ?1' -H 'TE: trailers' --compressed --globoff "$@" +./mad.sh:417: curl --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" --connect-timeout ${ConnectTimeout} -H 'User-Agent: Mozilla/5.0 (Windows NT 10.0; rv:109.0) Gecko/20100101 Firefox/115.0' -H 'Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8' -H 'Accept-Language: en-US,en;q=0.5' -H 'Accept-Encoding: gzip, deflate, br' -H 'Origin: null' -H 'Connection: keep-alive' -H 'Upgrade-Insecure-Requests: 1' -H 'Sec-Fetch-Dest: document' -H 'Sec-Fetch-Mode: navigate' -H 'Sec-Fetch-Site: cross-site' -H 'Sec-Fetch-User: ?1' -H 'TE: trailers' --compressed --globoff "$@" +./mad.sh:1361:install_curl_impersonate() { +./mad.sh:1363: echo -e "${BLUE}lwthiker curl_impersonate${NC} is the original dev, but it is relatively inactive." +./mad.sh:1364: echo -e "- Currently uses curl v8.1.1." +./mad.sh:1368: echo -e "${BLUE}lexiforest curl_impersonate${NC} is an active fork of curl_impersonate." +./mad.sh:1369: echo -e "+ Currently uses curl v8.7.1" +./mad.sh:1373: PS3='Please select which curl_impersonate to install: ' +./mad.sh:1381: install_curl_impersonate_lwthiker_orig +./mad.sh:1385: install_curl_impersonate_lexiforest_fork +./mad.sh:1395:install_curl_impersonate_lwthiker_orig() { +./mad.sh:1399: echo -e "${BLUE}lwthiker curl_impersonate${NC} is the original curl_impersonate." +./mad.sh:1400: echo -e "+ Currently uses curl v8.1.1, and has low activity for updates" +./mad.sh:1403: echo -e "${GREEN}| Fetching:${NC} latest ${BLUE}lwthiker curl_impersonate${NC} info from github...${NC}" +./mad.sh:1406: response=$(tor_curl_request --insecure -L -s https://github.com/lwthiker/curl-impersonate/releases/latest) +./mad.sh:1408: debugHtml "github" "lbf_inst_curlimp$j" "$response" +./mad.sh:1411: latestTag=$(grep -oPi -m 1 '(?<=/curl-impersonate/releases/tag/).*?(?=")' <<< "$response") +./mad.sh:1421: yes_or_no "Do you wish to download and extract curl_impersonate $latestTag (using tor+curl)?" && { +./mad.sh:1423: download_url='https://github.com/lwthiker/curl-impersonate/releases/download/'"$latestTag"'/curl-impersonate-'"$latestTag"'.x86_64-linux-gnu.tar.gz' +./mad.sh:1426: file_header=$(tor_curl_request --insecure --head -Ls "$download_url") +./mad.sh:1428: debugHtml "github" "head_inst_curlimp$j" "${file_header}" +./mad.sh:1476: tor_curl_request --insecure -L "$download_url" --continue-at - --output "$file_path" +./mad.sh:1505: echo -e "| Extracting curl_impersonate..." +./mad.sh:1507: rm -f "${ScriptDir}"/curl* +./mad.sh:1508: mv "$extract_location/curl-impersonate-ff" "${ScriptDir}/" +./mad.sh:1509: mv "$extract_location/curl_ff109" "${ScriptDir}/" +./mad.sh:1510: echo -e "${GREEN}| Done.${NC} Update ${BLUE}\"UseTorCurlImpersonate=true\"${NC} in script to use..." +./mad.sh:1518:install_curl_impersonate_lexiforest_fork() { +./mad.sh:1522: echo -e "${BLUE}lexiforest curl_impersonate${NC} is an active fork of curl_impersonate." +./mad.sh:1523: echo -e "+ Currently uses curl v8.7.1, and is patched for latest CVEs" +./mad.sh:1526: echo -e "${GREEN}| Fetching:${NC} latest ${BLUE}lexiforest curl_impersonate fork${NC} info from github...${NC}" +./mad.sh:1529: response=$(tor_curl_request --insecure -L -s https://github.com/lexiforest/curl-impersonate/releases/latest) +./mad.sh:1531: debugHtml "github" "lbf_inst_curlimp$j" "$response" +./mad.sh:1534: latestTag=$(grep -oPi -m 1 '(?<=/curl-impersonate/releases/tag/).*?(?=")' <<< "$response") +./mad.sh:1544: yes_or_no "Do you wish to download and extract curl_impersonate $latestTag (using tor+curl)?" && { +./mad.sh:1546: download_url='https://github.com/lexiforest/curl-impersonate/releases/download/'"$latestTag"'/curl-impersonate-'"$latestTag"'.x86_64-linux-gnu.tar.gz' +./mad.sh:1549: file_header=$(tor_curl_request --insecure --head -Ls "$download_url") +./mad.sh:1551: debugHtml "github" "head_inst_curlimp$j" "${file_header}" +./mad.sh:1599: tor_curl_request --insecure -L "$download_url" --continue-at - --output "$file_path" +./mad.sh:1628: echo -e "| Extracting curl_impersonate..." +./mad.sh:1630: rm -f "${ScriptDir}"/curl* +./mad.sh:1631: mv "$extract_location/curl-impersonate-chrome" "${ScriptDir}/" +./mad.sh:1632: mv "$extract_location/curl_chrome131" "${ScriptDir}/" +./mad.sh:1633: echo -e "${GREEN}| Done.${NC} Update ${BLUE}\"UseTorCurlImpersonate=true\"${NC} in script to use..." +./mad.sh:1795: echo -e ":${NC} ${GREEN}MAD${PINK} Audit${NC} : Reports usage of http & curl in scripts${PINK}${BLD} :" +./mad.sh:1803: maud_curl=$(grep -n -vxE '[[:blank:]]*([#].*)?' $fil | grep --color='always' -Ei 'curl') +./mad.sh:1804: maud_torcurl=$(grep -n -vxE '[[:blank:]]*([#].*)?' $fil | grep -A 12 --color='always' -Ei 'tor_curl') +./mad.sh:1813: echo -e "${PINK}MAD Audit of curl:${NC} (${GREEN}grep \"curl\"${NC})" +./mad.sh:1815: echo -e "$maud_curl" +./mad.sh:1817: echo -e "${PINK}MAD Audit of tor_curl (+12 lines after):${NC} (${GREEN}grep \"tor_curl\"${NC})" +./mad.sh:1819: echo -e "$maud_torcurl" +./mad.sh:1831: maud_curl=$(grep -n -vxE '[[:blank:]]*([#].*)?' $fil | grep --color='always' -Ei 'curl') +./mad.sh:1832: maud_torcurl=$(grep -n -vxE '[[:blank:]]*([#].*)?' $fil | grep -A 12 --color='always' -Ei 'tor_curl') +./mad.sh:1841: echo -e "${PINK}MAD Audit of curl:${NC} (${GREEN}grep \"curl \"${NC})" +./mad.sh:1843: echo -e "$maud_curl" +./mad.sh:1845: echo -e "${PINK}MAD Audit of tor_curl (+12 lines after):${NC} (${GREEN}grep \"tor_curl\"${NC})" +./mad.sh:1847: echo -e "$maud_torcurl" +./mad.sh:1853: maud_curl=$(grep -n -vxE '[[:blank:]]*([#].*)?' $fil | grep --color='always' -Ei 'curl') +./mad.sh:1854: maud_torcurl=$(grep -n -vxE '[[:blank:]]*([#].*)?' $fil | grep -A 12 --color='always' -Ei 'tor_curl') +./mad.sh:1863: echo -e "${PINK}MAD Audit of curl:${NC} (${GREEN}grep \"curl\"${NC})" +./mad.sh:1865: echo -e "$maud_curl" +./mad.sh:1867: echo -e "${PINK}MAD Audit of tor_curl (+12 lines after):${NC} (${GREEN}grep \"tor_curl\"${NC})" +./mad.sh:1869: echo -e "$maud_torcurl" +./mad.sh:2816: if [ "${UseTorCurlImpersonate}" == "true" ]; then +./mad.sh:2817: printf "client: ${GREEN}Tor${NC} + ${BLUE}curl_impersonate${NC}\\n" +./mad.sh:2819: printf "client: ${GREEN}Tor${NC} + ${GREEN}curl${NC}\\n" +./mad.sh:2991: if [ "${UseTorCurlImpersonate}" == "true" ]; then +./mad.sh:2992: printf "client: ${GREEN}Tor${NC} + ${BLUE}curl_impersonate${NC}\\n" +./mad.sh:2994: printf "client: ${GREEN}Tor${NC} + ${GREEN}curl${NC}\\n" +./mad.sh:3192: file_header=$(tor_curl_request --insecure -m 8 -s -D - -o /dev/null \ +./mad.sh:3199: file_header=$(tor_curl_request --insecure --head -H "Connection: keep-alive" -L -s -i "$download_url" | +./mad.sh:3327: tor_curl_request --insecure -L --referer "$file_url" "$download_url" --output "$file_path" +./mad.sh:3371: tor_curl_request --insecure -L --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval "$download_url" --continue-at - --output "$file_path" +./mad.sh:3373: tor_curl_request --insecure -L --referer "$file_url" "$download_url" --continue-at - --output "$file_path" +./mad.sh:3571: response=$(tor_curl_upload --insecure -i \ +./mad.sh:3578: response=$(tor_curl_upload --insecure -i \ +./mad.sh:3649:if [ "${UseTorCurlImpersonate}" == "true" ]; then +./mad.sh:3650: curl_impersonate=() +./mad.sh:3651: readarray -d $'' arrFiles < <(find "$ScriptDir" -maxdepth 1 -name "curl_*" -printf '%p\n' | sort -Vk1) +./mad.sh:3652: bFoundCurlHeader=false +./mad.sh:3656: curl_impersonate=($fil) +./mad.sh:3657: bFoundCurlHeader=true +./mad.sh:3661: if [ "$bFoundCurlHeader" == "false" ]; then +./mad.sh:3662: echo -e "${RED}[ERROR] Missing dependency \"curl-impersonate\"!${NC}" +./mad.sh:3665: echo -e "You'll need to download ${GREEN}\"curl-impersonate\"${NC}." +./mad.sh:3668: echo -e "The latest binary can be obtained on GitHub, search for \"curl-impersonate\"" +./mad.sh:3670: echo -e " 1. Visit the page of curl-impersonate and add \"/releases/latest/\" at end of URL." +./mad.sh:3674: echo -e " 4. Download archive ${GREEN}\"curl-impersonate-vX.Y.Z.x86_64-linux-gnu.tar.gz\"${YELLOW}." +./mad.sh:3675: echo -e " 5. Extract files ${GREEN}\"curl-impersonate-ff\"${NC} and ${GREEN}\"curl_ff109\"${NC} next to this script." +./mad.sh:3678: echo -e "run $0 install_curl_impersonate\\n" +./mad.sh:3680: yes_or_no "Do you wish to download and extract latest curl_impersonate (using tor+curl)?" && { +./mad.sh:3681: UseTorCurlImpersonate=false +./mad.sh:3682: install_curl_impersonate +./mad.sh:3766: echo -e "[${YELLOW}Install curl_impersonate${NC}]: Downloads the latest binary for curl_impersonate from github repo (3 choices)" +./mad.sh:3767: printf " %s install_curl_impersonate\\n" "$0" +./mad.sh:3845:elif [[ "$arg1" == "install_curl_impersonate" ]]; then +./mad.sh:3846: install_curl_impersonate +./mad.sh:3877:if [ "${UseTorCurlImpersonate}" == "true" ]; then +./mad.sh:3878: printf "client: ${GREEN}Tor${NC} + ${BLUE}curl_impersonate${NC}\\n" +./mad.sh:3880: printf "client: ${GREEN}Tor${NC} + ${GREEN}curl${NC}\\n" ./plugins/pjscloud.sh:44: if [ "${UseTorCurlImpersonate}" == "true" ]; then ./plugins/pjscloud.sh:45: response=$("${curl_impersonate[@]}" --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" \ ./plugins/pjscloud.sh:53: response=$(curl --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" \ diff --git a/.audit/mad-audit-http.log b/.audit/mad-audit-http.log index 8648778..764aec6 100755 --- a/.audit/mad-audit-http.log +++ b/.audit/mad-audit-http.log @@ -1,27 +1,34 @@ -DateTime: 24.11.18 +DateTime: 24.12.26 Files: ./hosts/1fichier.sh ./hosts/9saves.sh ./hosts/acid.sh ./hosts/anarchaserver.sh +./hosts/anonfile.sh ./hosts/anonsharing.sh ./hosts/archived/nekofile.sh +./hosts/ateasystems.sh ./hosts/bedrive.sh ./hosts/biteblob.sh ./hosts/bowfile.sh ./hosts/click.sh +./hosts/cyssoux.sh ./hosts/dailyuploads.sh +./hosts/dashfile.sh ./hosts/dataupload.sh ./hosts/dbree.sh ./hosts/depotkaz.sh +./hosts/dictvm.sh ./hosts/discreetshare.sh ./hosts/dosya.sh ./hosts/downloadgg.sh +./hosts/eddowding.sh ./hosts/eternalhosting.sh ./hosts/examples/ExampleNewHost.sh ./hosts/examples/up_example.sh ./hosts/familleflender.sh +./hosts/fileblade.sh ./hosts/fileditch.sh ./hosts/filedoge.sh ./hosts/filedot.sh @@ -29,14 +36,19 @@ Files: ./hosts/filesquid.sh ./hosts/firestorage.sh ./hosts/free4e.sh +./hosts/freesocial.sh ./hosts/gofile.sh ./hosts/harrault.sh +./hosts/herbolistique.sh ./hosts/hexload.sh ./hosts/innocent.sh +./hosts/isupload.sh ./hosts/kraken.sh ./hosts/lainsafe.sh ./hosts/lainsafe_onion.sh ./hosts/linxx.sh +./hosts/mediafire.sh +./hosts/moocloud.sh ./hosts/nantes.sh ./hosts/netlib.sh ./hosts/nippy.sh @@ -44,15 +56,18 @@ Files: ./hosts/offshorecat.sh ./hosts/oshi.sh ./hosts/pixeldrain.sh +./hosts/quax.sh ./hosts/ranoz.sh ./hosts/shareonline.sh ./hosts/skrepr.sh ./hosts/soyjak.sh +./hosts/syspro.sh ./hosts/tempfileme.sh ./hosts/tempsh.sh ./hosts/torup.sh ./hosts/turboonion.sh ./hosts/up2share.sh +./hosts/uploadbay.sh ./hosts/uploadee.sh ./hosts/uploadev.sh ./hosts/uploadflix.sh @@ -60,28 +75,39 @@ Files: ./hosts/up_1fichier.sh ./hosts/up_acid.sh ./hosts/up_anarchaserver.sh +./hosts/up_anonfile.sh ./hosts/up_anonsharing.sh +./hosts/up_ateasystems.sh ./hosts/up_axfc.sh ./hosts/up_bedrive.sh ./hosts/up_bowfile.sh +./hosts/up_cyssoux.sh ./hosts/up_dailyuploads.sh +./hosts/up_dashfile.sh ./hosts/up_dataupload.sh ./hosts/up_dbree.sh ./hosts/up_depotkaz.sh +./hosts/up_dictvm.sh ./hosts/up_dosya.sh +./hosts/up_eddowding.sh ./hosts/up_familleflender.sh +./hosts/up_fileblade.sh ./hosts/up_fileditch.sh ./hosts/up_filehaus.sh ./hosts/up_filesquid.sh ./hosts/up_firestorage.sh ./hosts/up_free4e.sh +./hosts/up_freesocial.sh ./hosts/up_gofile.sh ./hosts/up_harrault.sh +./hosts/up_herbolistique.sh ./hosts/up_hexload.sh ./hosts/up_innocent.sh +./hosts/up_isupload.sh ./hosts/up_kouploader.sh ./hosts/up_kraken.sh ./hosts/up_linxx.sh +./hosts/up_moocloud.sh ./hosts/up_nantes.sh ./hosts/up_netlib.sh ./hosts/up_nippy.sh @@ -89,14 +115,17 @@ Files: ./hosts/up_offshorecat.sh ./hosts/up_oshi.sh ./hosts/up_pixeldrain.sh +./hosts/up_quax.sh ./hosts/up_ranoz.sh ./hosts/up_shareonline.sh ./hosts/up_skrepr.sh ./hosts/up_soyjak.sh +./hosts/up_syspro.sh ./hosts/up_tempfileme.sh ./hosts/up_tempsh.sh ./hosts/up_torup.sh ./hosts/up_turboonion.sh +./hosts/up_uploadbay.sh ./hosts/up_uploadee.sh ./hosts/up_uploadev.sh ./hosts/up_uploadflix.sh @@ -119,6 +148,9 @@ MAD Audit of http lines: (grep "http:" or "https:") _________________________________________________________________________ ./hosts/1fichier.sh:166: if ! grep -Eqi "https://" <<< "${target_file_link}" > /dev/null ; then ./hosts/9saves.sh:141: --data "$form_data" "https://9saves.com/") +./hosts/anonfile.sh:230: if grep -Eqi 'img src="https://anonfile.de/captchas/' <<< "$response" ; then +./hosts/anonfile.sh:414: if grep -Eqi 'https://.*dashfile.net.*$)' <<< "$response") ./hosts/dataupload.sh:133: post_action="https://dataupload.net/" ./hosts/dataupload.sh:210: if ! grep -Eqi "location: https://dataupload.net/d/" <<< "$response"; then ./hosts/dosya.sh:168: if grep -Eqi 'https://dosyaupload.com' <<< "$dos_url" ; then @@ -146,6 +180,9 @@ _________________________________________________________________________ ./hosts/downloadgg.sh:278: -H "Origin: https://download.gg" \ ./hosts/downloadgg.sh:297: -H "Origin: https://download.gg" \ ./hosts/examples/up_example.sh:105: local ar_HUP[0]='https://oshi.at' +./hosts/fileblade.sh:298: if ! grep -Eqi ')' <<< "$response") -./hosts/gofile.sh:84: badUrlDownload "${remote_url}" "Expect format http://gofile.io/d/xxxxxxxxx" +./hosts/gofile.sh:84: badUrlDownload "${remote_url}" "Expect format: http://*.gofile.io/d/xxxxx" ./hosts/gofile.sh:108: "https://api.gofile.io/accounts") ./hosts/gofile.sh:183: "https://api.gofile.io/contents/$file_id") ./hosts/gofile.sh:185: debugHtml "${remote_url##*/}" "gofile_contents$i" "url: https://api.gofile.io/contents/${file_id}?${form_data}"$'\n'"${response}" -./hosts/gofile.sh:293: cdn_url="https:"$(grep -oPi '(?<=location: ).*' <<< "$file_header") +./hosts/gofile.sh:204: cnturls=$(grep -oin 'https://' <<< "$download_url" | wc -l) +./hosts/gofile.sh:303: cdn_url="https:"$(grep -oPi '(?<=location: ).*' <<< "$file_header") ./hosts/hexload.sh:102: response=$(pjscloud_tor_request "https://hexload.com/download" "$form_data") ./hosts/hexload.sh:108: response=$(tor_curl_request --insecure -s --data "$form_data" "https://hexload.com/download") ./hosts/hexload.sh:116: response=$(tor_curl_request --insecure -s --data "$form_data" "https://hexload.com/download") ./hosts/hexload.sh:122: response=$(tor_curl_request --insecure -s --data "$form_data" "https://hexload.com/download") ./hosts/innocent.sh:48: download_url="${download_url/https:/http:}" +./hosts/isupload.sh:133: post_action="${remote_url//https:/http:}" +./hosts/isupload.sh:208: if ! grep -Eqi '.*$)' <<< "$response") +./hosts/isupload.sh:222: download_url='http://isupload.com/cgi-bin/dl.cgi/'$(urlencode_literal_grouped_case_urlendingonly "$download_url") ./hosts/kraken.sh:155: kraken_action="https://krakenfiles.com/download/${kraken_action##*/}" ./hosts/nippy.sh:160: download_url="https:"$(grep -oP '(?<=


http://koldr.jp/' <<< "${response}" ; then ./hosts/up_kouploader.sh:147: subSearch=$(awk '/MESSAGE<\/font>/,/http:\/\/koldr.jp\//' <<< "$response") @@ -271,12 +335,14 @@ _________________________________________________________________________ ./hosts/up_kraken.sh:107: local ar_HUP[8]='https://uploads9.krakenfiles.com/_uploader/gallery/upload' ./hosts/up_kraken.sh:108: local ar_HUP[9]='https://uploads10.krakenfiles.com/_uploader/gallery/upload' ./hosts/up_kraken.sh:126: downloadLink="https://krakenfiles.com/view/${hash}/file.html" -./hosts/up_linxx.sh:99: PostUrlHost='https://linxx.net/upload/script.php' -./hosts/up_linxx.sh:114: downloadLink="https://linxx.net/upload/f.php?h=${hash}&p=1" -./hosts/up_nantes.sh:99: PostUrlHost='https://fichiers.nantes.cloud/script.php' -./hosts/up_nantes.sh:114: downloadLink="https://fichiers.nantes.cloud/f.php?h=${hash}&p=1" -./hosts/up_netlib.sh:99: PostUrlHost='https://mhep.netlib.re/jirafeau/script.php' -./hosts/up_netlib.sh:114: downloadLink="https://mhep.netlib.re/jirafeau/f.php?h=${hash}&p=1" +./hosts/up_linxx.sh:37: jira_PostUrlHost='https://linxx.net/upload/script.php' +./hosts/up_linxx.sh:40: jira_downloadLinkPrefix='https://linxx.net/upload/f.php?h=' +./hosts/up_moocloud.sh:37: jira_PostUrlHost='https://file.tools.moocloud.ch/script.php' +./hosts/up_moocloud.sh:40: jira_downloadLinkPrefix='https://file.tools.moocloud.ch/f.php?h=' +./hosts/up_nantes.sh:37: jira_PostUrlHost='https://fichiers.nantes.cloud/script.php' +./hosts/up_nantes.sh:40: jira_downloadLinkPrefix='https://fichiers.nantes.cloud/f.php?h=' +./hosts/up_netlib.sh:37: jira_PostUrlHost='https://mhep.netlib.re/jirafeau/script.php' +./hosts/up_netlib.sh:40: jira_downloadLinkPrefix='https://mhep.netlib.re/jirafeau/f.php?h=' ./hosts/up_nippy.sh:103: PostUrlHost='https://ns05.zipcluster.com/upload.php' ./hosts/up_nippy.sh:105: PostUrlHost='https://ns01.zipcluster.com/upload.php' ./hosts/up_nippy.sh:107: PostUrlHost='https://ns04.zipcluster.com/upload.php' @@ -294,15 +360,20 @@ _________________________________________________________________________ ./hosts/up_offshorecat.sh:115: downloadLink="https://files.offshore.cat/${hash}" ./hosts/up_oshi.sh:104: PostUrlHost='https://oshi.at/' ./hosts/up_oshi.sh:106: PostUrlHost='http://5ety7tpkim5me6eszuwcje7bmy25pbtrjtue7zkqqgziljwqy3rrikqd.onion/' -./hosts/up_pixeldrain.sh:107: PostUrlHost='https://pixeldrain.com/api/file' -./hosts/up_pixeldrain.sh:137: downloadLink="https://pixeldrain.com/u/${hash}" +./hosts/up_pixeldrain.sh:107: PostUrlHost='https://pixeldrain.com/api/file/' +./hosts/up_pixeldrain.sh:136: downloadLink="https://pixeldrain.com/u/${hash}" +./hosts/up_quax.sh:99: PostUrlHost='https://qu.ax/upload.php' ./hosts/up_ranoz.sh:99: PostUrlHost='https://ranoz.gg/api/v1/files/upload_url' ./hosts/up_ranoz.sh:111: if grep -Eqi '"upload_url":"https://' <<< "$response" ; then ./hosts/up_shareonline.sh:99: PostUrlHost='https://ns07.zipcluster.com/upload.php' -./hosts/up_skrepr.sh:91: PostUrlHost='https://transfer.skrepr.com/script.php' -./hosts/up_skrepr.sh:106: downloadLink="https://transfer.skrepr.com/f.php?h=${hash}&p=1" -./hosts/up_soyjak.sh:99: PostUrlHost='https://soyjak.download/script.php' -./hosts/up_soyjak.sh:114: downloadLink="https://soyjak.download/f.php?h=${hash}&p=1" +./hosts/up_skrepr.sh:37: jira_PostUrlHost='https://transfer.skrepr.com/script.php' +./hosts/up_skrepr.sh:40: jira_downloadLinkPrefix='https://transfer.skrepr.com/f.php?h=' +./hosts/up_soyjak.sh:37: jira_PostUrlHost='https://soyjak.download/script.php' +./hosts/up_soyjak.sh:40: jira_downloadLinkPrefix='https://soyjak.download/f.php?h=' +./hosts/up_syspro.sh:99: PostUrlHost='https://share.syspro.com.br/cgi-bin/upload.cgi?upload_id=' +./hosts/up_syspro.sh:112: if grep -Eqi "Location: http://share\.syspro\.com\.br/" <<< "${response}" ; then +./hosts/up_syspro.sh:113: fname=$(grep -oPi -m 1 '(?<=http://share.syspro.com.br//\?&filename=).*?(?=&del_id.*$)' <<< "$response") +./hosts/up_syspro.sh:116: downloadLink="http://share.syspro.com.br/$fname/$fnameorig" ./hosts/up_tempfileme.sh:99: PostUrlHost='https://tempfile.me/upload' ./hosts/up_tempsh.sh:99: PostUrlHost='https://temp.sh/upload' ./hosts/up_tempsh.sh:111: hash=$(grep -oPi '(?<=http://temp.sh/).*?(?=$)' <<< "$response") @@ -314,6 +385,8 @@ _________________________________________________________________________ ./hosts/up_torup.sh:162: if grep -Eqi 'input type="text" value="http://ktgzpea2b76u7fgemiibp4a76onyybo4fw5gbsagtm6jrjzmgivppyyd.onion/download/' <<< "${response}" ; then ./hosts/up_turboonion.sh:96: PostUrlHost='http://3qeyzgtujhguzjletcz34qxsiqoymlni6s6rhc37kpobyttzngwlzjid.onion/api/upload' ./hosts/up_turboonion.sh:118: downloadLink='http://3qeyzgtujhguzjletcz34qxsiqoymlni6s6rhc37kpobyttzngwlzjid.onion/dl/file/'"$hash" +./hosts/up_uploadbay.sh:99: PostUrlHost='https://uploadbay.net/upload.php' +./hosts/up_uploadbay.sh:110: if grep -Eqi "class='file one' href='https://uploadbay.net/uploads/" <<< "${response}" ; then ./hosts/up_uploadee.sh:106: response=$(tor_curl_request --insecure -L -s "https://www.upload.ee/ubr_link_upload.php") ./hosts/up_uploadee.sh:108: debugHtml "${filepath##*/}" "${_hostCode}_up_getid_$i" "url: https://www.upload.ee/ubr_link_upload.php"$'\n'"${response}" ./hosts/up_uploadee.sh:173: PostUrlHost="https://www.upload.ee/cgi-bin/ubr_upload.pl?upload_id=$upee_uploadid" @@ -322,78 +395,79 @@ _________________________________________________________________________ ./hosts/up_uploadflix.sh:99: local ar_HUP[0]='https://fs50.uploadflix.cyou/cgi-bin/upload.cgi?upload_type=file&utype=anon' ./hosts/up_uploadflix.sh:125: downloadLink="https://uploadflix.cc/${hash}" ./hosts/up_uploadhive.sh:99: PostUrlHost='https://fs430.uploadhive.com/cgi-bin/upload.cgi' -./hosts/up_uploadhive.sh:120: downloadLink="https://uploadhive.com/${hash}" +./hosts/up_uploadhive.sh:149: downloadLink="https://uploadhive.com/${hash}" ./hosts/up_uploadraja.sh:99: PostUrlHost='https://awsaisiaposisition69.kalpstudio.xyz/cgi-bin/upload.cgi?upload_type=file&utype=anon' ./hosts/up_uploadraja.sh:119: downloadLink="https://uploadraja.com/$hash" ./hosts/up_yolobit.sh:99: PostUrlHost='https://ns08.zipcluster.com/upload.php' -./mad.sh:620: sed -i -e "s>^${url/https:/http:}.*>#& #OK# ${filename}>g" "${InputFile}" #http (if changed) -./mad.sh:622: sed -i -e "s>^direct=${url/https:/http:}.*>#& #OK# ${filename}>g" "${InputFile}" #direct url https -./mad.sh:625: sed -i -e "s>^${url/https:/http:}.*>#&>g" "${InputFile}" #http (if changed) -./mad.sh:627: sed -i -e "s>^direct=${url/https:/http:}.*>#&>g" "${InputFile}" #direct url https -./mad.sh:648: sed -i -e "s>^${url/https:/http:}.*>#& #OK# ${filename} (File exists)>g" "${InputFile}" #http (if changed) -./mad.sh:650: sed -i -e "s>^direct=${url/https:/http:}.*>#& #OK# ${filename} (File exists)>g" "${InputFile}" #direct url https -./mad.sh:653: sed -i -e "s>^${url/https:/http:}.*>#&>g" "${InputFile}" #http (if changed) -./mad.sh:655: sed -i -e "s>^direct=${url/https:/http:}.*>#&>g" "${InputFile}" #direct url https -./mad.sh:676: sed -i -e "s>^${url/https:/http:}.*>#& #RETRY# ${message}>g" "${InputFile}" #http (if changed) -./mad.sh:678: sed -i -e "s>^direct=${url/https:/http:}.*>#& #RETRY# ${message}>g" "${InputFile}" #direct url https -./mad.sh:681: sed -i -e "s>^${url/https:/http:}.*>#&>g" "${InputFile}" #http (if changed) -./mad.sh:683: sed -i -e "s>^direct=${url/https:/http:}.*>#&>g" "${InputFile}" #direct url https -./mad.sh:705: sed -i -e "s>^${url/https:/http:}.*>#& #FAIL# $message>g" "${InputFile}" #http (if changed) -./mad.sh:707: sed -i -e "s>^direct=${url/https:/http:}.*>#& #FAIL# $message>g" "${InputFile}" #direct url https -./mad.sh:710: sed -i -e "s>^${url/https:/http:}.*>#&>g" "${InputFile}" #http (if changed) -./mad.sh:712: sed -i -e "s>^direct=${url/https:/http:}.*>#&>g" "${InputFile}" #direct url https -./mad.sh:733: sed -i -e "s>^${url/https:/http:}.*>#& #REMOVED#>g" "${InputFile}" #http (if changed) -./mad.sh:735: sed -i -e "s>^direct=${url/https:/http:}.*>#& #REMOVED#>g" "${InputFile}" #direct url https -./mad.sh:738: sed -i -e "s>^${url/https:/http:}.*>#&>g" "${InputFile}" #http (if changed) -./mad.sh:740: sed -i -e "s>^direct=${url/https:/http:}.*>#&>g" "${InputFile}" #direct url https -./mad.sh:766: sed -i -e "s>^${url/https:/http:}.*>${url}|${newfilename}>g" "${InputFile}" #http (if changed) -./mad.sh:768: sed -i -e "s>^direct=${url/https:/http:}.*>direct=${url}|${newfilename}>g" "${InputFile}" #direct url https -./mad.sh:793: sed -i -e "s>^${url/https:/http:}.*>#& #RETRY# ${filename} (dropped or bad size)>g" "${InputFile}" #http (if changed) -./mad.sh:795: sed -i -e "s>^direct=${url/https:/http:}.*>#& #RETRY# ${filename} (dropped or bad size)>g" "${InputFile}" #direct url https -./mad.sh:798: sed -i -e "s>^${url/https:/http:}.*>#&>g" "${InputFile}" #http (if changed) -./mad.sh:800: sed -i -e "s>^direct=${url/https:/http:}.*>#&>g" "${InputFile}" #direct url https -./mad.sh:816: sed -i -e "s>^${url/https:/http:}.*>#& #RETRY# (No CDN found)>g" "${InputFile}" #http (if changed) -./mad.sh:818: sed -i -e "s>^direct=${url/https:/http:}.*>#& #RETRY# (No CDN found)>g" "${InputFile}" #direct url https -./mad.sh:821: sed -i -e "s>^${url/https:/http:}.*>#&>g" "${InputFile}" #http (if changed) -./mad.sh:823: sed -i -e "s>^direct=${url/https:/http:}.*>#&>g" "${InputFile}" #direct url https -./mad.sh:842: sed -i -e "s>^${url/https:/http:}.*>#& #PASSWORD#>g" "${InputFile}" #http (if changed) -./mad.sh:844: sed -i -e "s>^direct=${url/https:/http:}.*>#& #PASSWORD#>g" "${InputFile}" #direct url https -./mad.sh:847: sed -i -e "s>^${url/https:/http:}.*>#&>g" "${InputFile}" #http (if changed) -./mad.sh:849: sed -i -e "s>^direct=${url/https:/http:}.*>#&>g" "${InputFile}" #direct url https -./mad.sh:869: sed -i -e "s>^${url/https:/http:}.*>#& #BAD-URL#${message}>g" "${InputFile}" #http (if changed) -./mad.sh:871: sed -i -e "s>^direct=${url/https:/http:}.*>#& #BAD-URL#${message}>g" "${InputFile}" #direct url https -./mad.sh:874: sed -i -e "s>^${url/https:/http:}.*>#&>g" "${InputFile}" #http (if changed) -./mad.sh:876: sed -i -e "s>^direct=${url/https:/http:}.*>#&>g" "${InputFile}" #direct url https -./mad.sh:894: sed -i -e "s>^${url/https:/http:}.*>#& #RETRY# ${filename} (No filename or size. Try again later)>g" "${InputFile}" #http (if changed) -./mad.sh:896: sed -i -e "s>^direct=${url/https:/http:}.*>#& #RETRY# ${filename} (No filename or size. Try again later)>g" "${InputFile}" #direct url https -./mad.sh:899: sed -i -e "s>^${url/https:/http:}.*>#&>g" "${InputFile}" #http (if changed) -./mad.sh:901: sed -i -e "s>^direct=${url/https:/http:}.*>#&>g" "${InputFile}" #direct url https -./mad.sh:920: sed -i -e "s>^${url/https:/http:}.*>#& #RETRY# ${filename} (File exists and is too large--retry or delete from downloads)>g" "${InputFile}" #http (if changed) -./mad.sh:922: sed -i -e "s>^direct=${url/https:/http:}.*>#& #RETRY# ${filename} (File exists and is too large--retry or delete from downloads)>g" "${InputFile}" #direct url https -./mad.sh:925: sed -i -e "s>^${url/https:/http:}.*>#&>g" "${InputFile}" #http (if changed) -./mad.sh:927: sed -i -e "s>^direct=${url/https:/http:}.*>#&>g" "${InputFile}" #direct url https -./mad.sh:1313: response=$(tor_curl_request --insecure -L -s https://github.com/lwthiker/curl-impersonate/releases/latest) -./mad.sh:1330: download_url='https://github.com/lwthiker/curl-impersonate/releases/download/'"$latestTag"'/curl-impersonate-'"$latestTag"'.x86_64-linux-gnu.tar.gz' -./mad.sh:1436: response=$(tor_curl_request --insecure -L -s https://github.com/lexiforest/curl-impersonate/releases/latest) -./mad.sh:1453: download_url='https://github.com/lexiforest/curl-impersonate/releases/download/'"$latestTag"'/curl-impersonate-'"$latestTag"'.x86_64-linux-gnu.tar.gz' -./mad.sh:1716: echo -e "${PINK}MAD Audit of http lines:${NC} (${GREEN}grep \"http:\" or \"https:\"${NC})" -./mad.sh:1744: echo -e "${PINK}MAD Audit of http lines:${NC} (${GREEN}grep \"http:\" or \"https:\"${NC})" -./mad.sh:1766: echo -e "${PINK}MAD Audit of http lines:${NC} (${GREEN}grep \"http:\" or \"https:\"${NC})" -./mad.sh:3082: if grep -Eqi '.onion' <<< "$download_url" && grep -Eqi 'https://' <<< "$download_url" ; then -./mad.sh:3492:arg2="$2" # auto, filelist, -./mad.sh:3589: echo -e " - http://oshi.at/abcd/origAABB.rar|My specified file.part1.rar" -./mad.sh:3591: echo -e " - direct=http://pomf2.lain.la/f/abcd00zz.7z" -./mad.sh:3593: echo -e ' - ie. direct=http://somehost.onion/abcD|filename.part1.rar' -./mad.sh:3812: if [[ ${remote_url} =~ ^http: ]] ; then -./mad.sh:3813: remote_url=${remote_url/http:/https:} -./mad.sh:3834: if [[ ${remote_url} =~ ^http: ]] ; then -./mad.sh:3835: remote_url=${remote_url/http:/https:} -./mad.sh:4201: if [[ ${remote_url} =~ ^http: ]] ; then -./mad.sh:4202: remote_url=${remote_url/http:/https:} -./mad.sh:4260: if [[ ${remote_url} =~ ^http: ]] ; then -./mad.sh:4261: remote_url=${remote_url/http:/https:} -./mad.sh:4286: if [[ ${remote_url} =~ ^http: ]] ; then -./mad.sh:4287: remote_url=${remote_url/http:/https:} +./mad.sh:665: sed -i -e "s>^${url/https:/http:}.*>#& #OK# ${filename}>g" "${InputFile}" #http (if changed) +./mad.sh:667: sed -i -e "s>^direct=${url/https:/http:}.*>#& #OK# ${filename}>g" "${InputFile}" #direct url https +./mad.sh:670: sed -i -e "s>^${url/https:/http:}.*>#&>g" "${InputFile}" #http (if changed) +./mad.sh:672: sed -i -e "s>^direct=${url/https:/http:}.*>#&>g" "${InputFile}" #direct url https +./mad.sh:693: sed -i -e "s>^${url/https:/http:}.*>#& #OK# ${filename} (File exists)>g" "${InputFile}" #http (if changed) +./mad.sh:695: sed -i -e "s>^direct=${url/https:/http:}.*>#& #OK# ${filename} (File exists)>g" "${InputFile}" #direct url https +./mad.sh:698: sed -i -e "s>^${url/https:/http:}.*>#&>g" "${InputFile}" #http (if changed) +./mad.sh:700: sed -i -e "s>^direct=${url/https:/http:}.*>#&>g" "${InputFile}" #direct url https +./mad.sh:721: sed -i -e "s>^${url/https:/http:}.*>#& #RETRY# ${message}>g" "${InputFile}" #http (if changed) +./mad.sh:723: sed -i -e "s>^direct=${url/https:/http:}.*>#& #RETRY# ${message}>g" "${InputFile}" #direct url https +./mad.sh:726: sed -i -e "s>^${url/https:/http:}.*>#&>g" "${InputFile}" #http (if changed) +./mad.sh:728: sed -i -e "s>^direct=${url/https:/http:}.*>#&>g" "${InputFile}" #direct url https +./mad.sh:750: sed -i -e "s>^${url/https:/http:}.*>#& #FAIL# $message>g" "${InputFile}" #http (if changed) +./mad.sh:752: sed -i -e "s>^direct=${url/https:/http:}.*>#& #FAIL# $message>g" "${InputFile}" #direct url https +./mad.sh:755: sed -i -e "s>^${url/https:/http:}.*>#&>g" "${InputFile}" #http (if changed) +./mad.sh:757: sed -i -e "s>^direct=${url/https:/http:}.*>#&>g" "${InputFile}" #direct url https +./mad.sh:778: sed -i -e "s>^${url/https:/http:}.*>#& #REMOVED#>g" "${InputFile}" #http (if changed) +./mad.sh:780: sed -i -e "s>^direct=${url/https:/http:}.*>#& #REMOVED#>g" "${InputFile}" #direct url https +./mad.sh:783: sed -i -e "s>^${url/https:/http:}.*>#&>g" "${InputFile}" #http (if changed) +./mad.sh:785: sed -i -e "s>^direct=${url/https:/http:}.*>#&>g" "${InputFile}" #direct url https +./mad.sh:811: sed -i -e "s>^${url/https:/http:}.*>${url}|${newfilename}>g" "${InputFile}" #http (if changed) +./mad.sh:813: sed -i -e "s>^direct=${url/https:/http:}.*>direct=${url}|${newfilename}>g" "${InputFile}" #direct url https +./mad.sh:833: sed -i -e "s%^${url/https:/http:}.*%${newurl//[[:space:]]/$'\\\n'}%g" "${InputFile}" #http (if changed) +./mad.sh:854: sed -i -e "s>^${url/https:/http:}.*>#& #RETRY# ${filename} (dropped or bad size)>g" "${InputFile}" #http (if changed) +./mad.sh:856: sed -i -e "s>^direct=${url/https:/http:}.*>#& #RETRY# ${filename} (dropped or bad size)>g" "${InputFile}" #direct url https +./mad.sh:859: sed -i -e "s>^${url/https:/http:}.*>#&>g" "${InputFile}" #http (if changed) +./mad.sh:861: sed -i -e "s>^direct=${url/https:/http:}.*>#&>g" "${InputFile}" #direct url https +./mad.sh:877: sed -i -e "s>^${url/https:/http:}.*>#& #RETRY# (No CDN found)>g" "${InputFile}" #http (if changed) +./mad.sh:879: sed -i -e "s>^direct=${url/https:/http:}.*>#& #RETRY# (No CDN found)>g" "${InputFile}" #direct url https +./mad.sh:882: sed -i -e "s>^${url/https:/http:}.*>#&>g" "${InputFile}" #http (if changed) +./mad.sh:884: sed -i -e "s>^direct=${url/https:/http:}.*>#&>g" "${InputFile}" #direct url https +./mad.sh:903: sed -i -e "s>^${url/https:/http:}.*>#& #PASSWORD#>g" "${InputFile}" #http (if changed) +./mad.sh:905: sed -i -e "s>^direct=${url/https:/http:}.*>#& #PASSWORD#>g" "${InputFile}" #direct url https +./mad.sh:908: sed -i -e "s>^${url/https:/http:}.*>#&>g" "${InputFile}" #http (if changed) +./mad.sh:910: sed -i -e "s>^direct=${url/https:/http:}.*>#&>g" "${InputFile}" #direct url https +./mad.sh:930: sed -i -e "s>^${url/https:/http:}.*>#& #BAD-URL#${message}>g" "${InputFile}" #http (if changed) +./mad.sh:932: sed -i -e "s>^direct=${url/https:/http:}.*>#& #BAD-URL#${message}>g" "${InputFile}" #direct url https +./mad.sh:935: sed -i -e "s>^${url/https:/http:}.*>#&>g" "${InputFile}" #http (if changed) +./mad.sh:937: sed -i -e "s>^direct=${url/https:/http:}.*>#&>g" "${InputFile}" #direct url https +./mad.sh:955: sed -i -e "s>^${url/https:/http:}.*>#& #RETRY# ${filename} (No filename or size. Try again later)>g" "${InputFile}" #http (if changed) +./mad.sh:957: sed -i -e "s>^direct=${url/https:/http:}.*>#& #RETRY# ${filename} (No filename or size. Try again later)>g" "${InputFile}" #direct url https +./mad.sh:960: sed -i -e "s>^${url/https:/http:}.*>#&>g" "${InputFile}" #http (if changed) +./mad.sh:962: sed -i -e "s>^direct=${url/https:/http:}.*>#&>g" "${InputFile}" #direct url https +./mad.sh:981: sed -i -e "s>^${url/https:/http:}.*>#& #RETRY# ${filename} (File exists and is too large--retry or delete from downloads)>g" "${InputFile}" #http (if changed) +./mad.sh:983: sed -i -e "s>^direct=${url/https:/http:}.*>#& #RETRY# ${filename} (File exists and is too large--retry or delete from downloads)>g" "${InputFile}" #direct url https +./mad.sh:986: sed -i -e "s>^${url/https:/http:}.*>#&>g" "${InputFile}" #http (if changed) +./mad.sh:988: sed -i -e "s>^direct=${url/https:/http:}.*>#&>g" "${InputFile}" #direct url https +./mad.sh:1406: response=$(tor_curl_request --insecure -L -s https://github.com/lwthiker/curl-impersonate/releases/latest) +./mad.sh:1423: download_url='https://github.com/lwthiker/curl-impersonate/releases/download/'"$latestTag"'/curl-impersonate-'"$latestTag"'.x86_64-linux-gnu.tar.gz' +./mad.sh:1529: response=$(tor_curl_request --insecure -L -s https://github.com/lexiforest/curl-impersonate/releases/latest) +./mad.sh:1546: download_url='https://github.com/lexiforest/curl-impersonate/releases/download/'"$latestTag"'/curl-impersonate-'"$latestTag"'.x86_64-linux-gnu.tar.gz' +./mad.sh:1809: echo -e "${PINK}MAD Audit of http lines:${NC} (${GREEN}grep \"http:\" or \"https:\"${NC})" +./mad.sh:1837: echo -e "${PINK}MAD Audit of http lines:${NC} (${GREEN}grep \"http:\" or \"https:\"${NC})" +./mad.sh:1859: echo -e "${PINK}MAD Audit of http lines:${NC} (${GREEN}grep \"http:\" or \"https:\"${NC})" +./mad.sh:3175: if grep -Eqi '.onion' <<< "$download_url" && grep -Eqi 'https://' <<< "$download_url" ; then +./mad.sh:3688:arg2="$2" # auto, filelist, +./mad.sh:3785: echo -e " - http://oshi.at/abcd/origAABB.rar|My specified file.part1.rar" +./mad.sh:3787: echo -e " - direct=http://pomf2.lain.la/f/abcd00zz.7z" +./mad.sh:3789: echo -e ' - ie. direct=http://somehost.onion/abcD|filename.part1.rar' +./mad.sh:4008: if [[ ${remote_url} =~ ^http: ]] ; then +./mad.sh:4009: remote_url=${remote_url/http:/https:} +./mad.sh:4030: if [[ ${remote_url} =~ ^http: ]] ; then +./mad.sh:4031: remote_url=${remote_url/http:/https:} +./mad.sh:4397: if [[ ${remote_url} =~ ^http: ]] ; then +./mad.sh:4398: remote_url=${remote_url/http:/https:} +./mad.sh:4456: if [[ ${remote_url} =~ ^http: ]] ; then +./mad.sh:4457: remote_url=${remote_url/http:/https:} +./mad.sh:4482: if [[ ${remote_url} =~ ^http: ]] ; then +./mad.sh:4483: remote_url=${remote_url/http:/https:} ./plugins/pjscloud.sh:51: "https://PhantomJScloud.com/api/browser/v2/$RandomPjsKey/" & sleep 8s; kill -HUP $! 2>/dev/null) ./plugins/pjscloud.sh:59: "https://PhantomJScloud.com/api/browser/v2/$RandomPjsKey/" & sleep 8s; kill -HUP $! 2>/dev/null) diff --git a/.audit/mad-audit-tor_curl-details.log b/.audit/mad-audit-tor_curl-details.log index 2b63d5d..dfcbe0d 100755 --- a/.audit/mad-audit-tor_curl-details.log +++ b/.audit/mad-audit-tor_curl-details.log @@ -1,27 +1,34 @@ -DateTime: 24.11.18 +DateTime: 24.12.26 Files: ./hosts/1fichier.sh ./hosts/9saves.sh ./hosts/acid.sh ./hosts/anarchaserver.sh +./hosts/anonfile.sh ./hosts/anonsharing.sh ./hosts/archived/nekofile.sh +./hosts/ateasystems.sh ./hosts/bedrive.sh ./hosts/biteblob.sh ./hosts/bowfile.sh ./hosts/click.sh +./hosts/cyssoux.sh ./hosts/dailyuploads.sh +./hosts/dashfile.sh ./hosts/dataupload.sh ./hosts/dbree.sh ./hosts/depotkaz.sh +./hosts/dictvm.sh ./hosts/discreetshare.sh ./hosts/dosya.sh ./hosts/downloadgg.sh +./hosts/eddowding.sh ./hosts/eternalhosting.sh ./hosts/examples/ExampleNewHost.sh ./hosts/examples/up_example.sh ./hosts/familleflender.sh +./hosts/fileblade.sh ./hosts/fileditch.sh ./hosts/filedoge.sh ./hosts/filedot.sh @@ -29,14 +36,19 @@ Files: ./hosts/filesquid.sh ./hosts/firestorage.sh ./hosts/free4e.sh +./hosts/freesocial.sh ./hosts/gofile.sh ./hosts/harrault.sh +./hosts/herbolistique.sh ./hosts/hexload.sh ./hosts/innocent.sh +./hosts/isupload.sh ./hosts/kraken.sh ./hosts/lainsafe.sh ./hosts/lainsafe_onion.sh ./hosts/linxx.sh +./hosts/mediafire.sh +./hosts/moocloud.sh ./hosts/nantes.sh ./hosts/netlib.sh ./hosts/nippy.sh @@ -44,15 +56,18 @@ Files: ./hosts/offshorecat.sh ./hosts/oshi.sh ./hosts/pixeldrain.sh +./hosts/quax.sh ./hosts/ranoz.sh ./hosts/shareonline.sh ./hosts/skrepr.sh ./hosts/soyjak.sh +./hosts/syspro.sh ./hosts/tempfileme.sh ./hosts/tempsh.sh ./hosts/torup.sh ./hosts/turboonion.sh ./hosts/up2share.sh +./hosts/uploadbay.sh ./hosts/uploadee.sh ./hosts/uploadev.sh ./hosts/uploadflix.sh @@ -60,28 +75,39 @@ Files: ./hosts/up_1fichier.sh ./hosts/up_acid.sh ./hosts/up_anarchaserver.sh +./hosts/up_anonfile.sh ./hosts/up_anonsharing.sh +./hosts/up_ateasystems.sh ./hosts/up_axfc.sh ./hosts/up_bedrive.sh ./hosts/up_bowfile.sh +./hosts/up_cyssoux.sh ./hosts/up_dailyuploads.sh +./hosts/up_dashfile.sh ./hosts/up_dataupload.sh ./hosts/up_dbree.sh ./hosts/up_depotkaz.sh +./hosts/up_dictvm.sh ./hosts/up_dosya.sh +./hosts/up_eddowding.sh ./hosts/up_familleflender.sh +./hosts/up_fileblade.sh ./hosts/up_fileditch.sh ./hosts/up_filehaus.sh ./hosts/up_filesquid.sh ./hosts/up_firestorage.sh ./hosts/up_free4e.sh +./hosts/up_freesocial.sh ./hosts/up_gofile.sh ./hosts/up_harrault.sh +./hosts/up_herbolistique.sh ./hosts/up_hexload.sh ./hosts/up_innocent.sh +./hosts/up_isupload.sh ./hosts/up_kouploader.sh ./hosts/up_kraken.sh ./hosts/up_linxx.sh +./hosts/up_moocloud.sh ./hosts/up_nantes.sh ./hosts/up_netlib.sh ./hosts/up_nippy.sh @@ -89,14 +115,17 @@ Files: ./hosts/up_offshorecat.sh ./hosts/up_oshi.sh ./hosts/up_pixeldrain.sh +./hosts/up_quax.sh ./hosts/up_ranoz.sh ./hosts/up_shareonline.sh ./hosts/up_skrepr.sh ./hosts/up_soyjak.sh +./hosts/up_syspro.sh ./hosts/up_tempfileme.sh ./hosts/up_tempsh.sh ./hosts/up_torup.sh ./hosts/up_turboonion.sh +./hosts/up_uploadbay.sh ./hosts/up_uploadee.sh ./hosts/up_uploadev.sh ./hosts/up_uploadflix.sh @@ -249,6 +278,103 @@ _________________________________________________________________________ ./hosts/9saves.sh:330: -H "Sec-Fetch-Dest: document" \ ./hosts/9saves.sh:331: -H "Sec-Fetch-Mode: navigate" \ -- +./hosts/anonfile.sh:96: response=$(tor_curl_request --insecure -L -s -b "${anon_cookie_jar}" -c "${anon_cookie_jar}" \ +./hosts/anonfile.sh:97: -w "\n\nHTTP_CODE: %{http_code}\nEFFECTIVE_URL: %{url_effective}\n" \ +./hosts/anonfile.sh:98: "$fixed_url") +./hosts/anonfile.sh:99: if [ "${DebugAllEnabled}" == "true" ] ; then +./hosts/anonfile.sh:100: debugHtml "${remote_url##*/}" "anon_fetch$i" "${response}" +./hosts/anonfile.sh:101: fi +./hosts/anonfile.sh:102: if [[ -z $response ]] ; then +./hosts/anonfile.sh:103: rm -f "${anon_cookie_jar}"; +./hosts/anonfile.sh:104: if [ $i == $maxfetchretries ] ; then +./hosts/anonfile.sh:105: printf "\\n" +./hosts/anonfile.sh:106: echo -e "${RED}| Failed to extract download link [1]${NC}" +-- +./hosts/anonfile.sh:186: response=$(tor_curl_request --insecure -L -s -X POST \ +./hosts/anonfile.sh:187: -b "${anon_cookie_jar}" -c "${anon_cookie_jar}" \ +./hosts/anonfile.sh:188: -w "\n\nHTTP_CODE: %{http_code}\nEFFECTIVE_URL: %{url_effective}\n" \ +./hosts/anonfile.sh:189: --data "$form_data" "$fixed_url") +./hosts/anonfile.sh:190: if [ "${DebugAllEnabled}" == "true" ] ; then +./hosts/anonfile.sh:191: debugHtml "${remote_url##*/}" "anon_post1_$i" "${response}" +./hosts/anonfile.sh:192: fi +./hosts/anonfile.sh:193: if [[ -z $response ]] ; then +./hosts/anonfile.sh:194: rm -f "${anon_cookie_jar}"; +./hosts/anonfile.sh:195: if [ $i == $maxfetchretries ] ; then +./hosts/anonfile.sh:196: printf "\\n" +-- +./hosts/anonfile.sh:240: tor_curl_request --insecure -s "$captcha_img_url" --output "$tmp_captcha_img" +./hosts/anonfile.sh:241: captcha_ocr_output=$(CaptchaOcrImage "$tmp_captcha_img" "NUMBERONLY" "ContrastStretch_5x90,Brightness_130") +./hosts/anonfile.sh:242: if [ "${DebugPluginsEnabled}" == "true" ]; then +./hosts/anonfile.sh:243: printf "\\n" +./hosts/anonfile.sh:244: echo -e "$captcha_ocr_output" +./hosts/anonfile.sh:245: fi +./hosts/anonfile.sh:246: captcha_code=$(grep -oP -m 1 "(?<=\[CAPTCHA_CODE\:).*?(?=\])" <<< "$captcha_ocr_output") +./hosts/anonfile.sh:247: rm -f "$tmp_captcha_img" +./hosts/anonfile.sh:248: rm -f "$captcha_ocr_output" +./hosts/anonfile.sh:249: local caplength=${#captcha_code} +./hosts/anonfile.sh:250: if [ -z "$captcha_code" ] || ((caplength != 4)) ; then +-- +./hosts/anonfile.sh:340: response=$(tor_curl_request --insecure -L -s -X POST \ +./hosts/anonfile.sh:341: -b "${anon_cookie_jar}" -c "${anon_cookie_jar}" \ +./hosts/anonfile.sh:342: --data "$form_data" "$fixed_url") +./hosts/anonfile.sh:343: if [ "${DebugAllEnabled}" == "true" ] ; then +./hosts/anonfile.sh:344: debugHtml "${remote_url##*/}" "anon_post2_$i" "url: ${fixed_url}"$'\n'"form_data: ${form_data}"$'\n'"${response}" +./hosts/anonfile.sh:345: fi +./hosts/anonfile.sh:346: if [[ -z $response ]] ; then +./hosts/anonfile.sh:347: if [ $i == $maxfetchretries ] ; then +./hosts/anonfile.sh:348: rm -f "${anon_cookie_jar}"; +./hosts/anonfile.sh:349: printf "\\n" +./hosts/anonfile.sh:350: echo -e "${RED}| Failed to extract download link [3].${NC}" +-- +./hosts/anonfile.sh:446: file_header=$(tor_curl_request -i -s --head \ +./hosts/anonfile.sh:447: --referer "${fixed_url}" \ +./hosts/anonfile.sh:448: "$download_url") +./hosts/anonfile.sh:449: if [ "${DebugAllEnabled}" == "true" ] ; then +./hosts/anonfile.sh:450: debugHtml "${remote_url##*/}" "anon_head$j" "download_url: ${download_url}"$'\n'"${file_header}" +./hosts/anonfile.sh:451: fi +./hosts/anonfile.sh:452: if [[ -z $file_header ]] ; then +./hosts/anonfile.sh:453: if [ $j == $maxfetchretries ] ; then +./hosts/anonfile.sh:454: rm -f "${anon_cookie_jar}"; +./hosts/anonfile.sh:455: printf "\\n" +./hosts/anonfile.sh:456: echo -e "${RED}| Failed to extract file info${NC}" +-- +./hosts/anonfile.sh:551: tor_curl_request --insecure \ +./hosts/anonfile.sh:552: --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval \ +./hosts/anonfile.sh:553: -b "${anon_cookie_jar}" -c "${anon_cookie_jar}" \ +./hosts/anonfile.sh:554: --referer "${fixed_url}" \ +./hosts/anonfile.sh:555: "$download_url" --continue-at - --output "$file_path" +./hosts/anonfile.sh:556: else +./hosts/anonfile.sh:557: tor_curl_request --insecure \ +./hosts/anonfile.sh:558: -b "${anon_cookie_jar}" -c "${anon_cookie_jar}" \ +./hosts/anonfile.sh:559: --referer "${fixed_url}" \ +./hosts/anonfile.sh:560: "$download_url" --continue-at - --output "$file_path" +./hosts/anonfile.sh:561: fi +./hosts/anonfile.sh:562: else +./hosts/anonfile.sh:563: if [ "${RateMonitorEnabled}" == "true" ]; then +./hosts/anonfile.sh:564: tor_curl_request --insecure \ +./hosts/anonfile.sh:565: --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval \ +./hosts/anonfile.sh:566: -H "User-Agent: $RandomUA" \ +./hosts/anonfile.sh:567: -H "Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8" \ +./hosts/anonfile.sh:568: -H "Accept-Language: en-US,en;q=0.5" \ +./hosts/anonfile.sh:569: -H "Accept-Encoding: gzip, deflate, br" \ +./hosts/anonfile.sh:570: -H "Connection: keep-alive" \ +./hosts/anonfile.sh:571: -H "Cookie: lng=eng" \ +./hosts/anonfile.sh:572: -H "Upgrade-Insecure-Requests: 1" \ +./hosts/anonfile.sh:573: -H "Sec-Fetch-Dest: document" \ +./hosts/anonfile.sh:574: -H "Sec-Fetch-Mode: navigate" \ +-- +./hosts/anonfile.sh:581: tor_curl_request --insecure \ +./hosts/anonfile.sh:582: -H "User-Agent: $RandomUA" \ +./hosts/anonfile.sh:583: -H "Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8" \ +./hosts/anonfile.sh:584: -H "Accept-Language: en-US,en;q=0.5" \ +./hosts/anonfile.sh:585: -H "Accept-Encoding: gzip, deflate, br" \ +./hosts/anonfile.sh:586: -H "Connection: keep-alive" \ +./hosts/anonfile.sh:587: -H "Cookie: lng=eng" \ +./hosts/anonfile.sh:588: -H "Upgrade-Insecure-Requests: 1" \ +./hosts/anonfile.sh:589: -H "Sec-Fetch-Dest: document" \ +./hosts/anonfile.sh:590: -H "Sec-Fetch-Mode: navigate" \ +./hosts/anonfile.sh:591: -H "Sec-Fetch-Site: same-origin" \ +-- ./hosts/anonsharing.sh:91: response=$(tor_curl_request --insecure -i -s \ ./hosts/anonsharing.sh:92: -b "${ansh_cookie_jar}" -c "${ansh_cookie_jar}" \ ./hosts/anonsharing.sh:93: -F "u=$fileid" \ @@ -295,6 +421,47 @@ _________________________________________________________________________ ./hosts/anonsharing.sh:284: containsHtml=true ./hosts/anonsharing.sh:285: fi -- +./hosts/ateasystems.sh:88: response=$(tor_curl_request --insecure -L -s "$remote_url") +./hosts/ateasystems.sh:89: if [ "${DebugAllEnabled}" == "true" ] ; then +./hosts/ateasystems.sh:90: debugHtml "${remote_url##*/}" "atea_fetch$i" "${response}" +./hosts/ateasystems.sh:91: fi +./hosts/ateasystems.sh:92: if [[ -z $response ]] ; then +./hosts/ateasystems.sh:93: if [ $i == $maxfetchretries ] ; then +./hosts/ateasystems.sh:94: printf "\\n" +./hosts/ateasystems.sh:95: echo -e "${RED}| Failed to extract download link [1]${NC}" +./hosts/ateasystems.sh:96: warnAndRetryUnknownError=true +./hosts/ateasystems.sh:97: if [ "${finalAttempt}" == "true" ] ; then +./hosts/ateasystems.sh:98: failedRetryDownload "${remote_url}" "Failed to extract download link [1]" "" +-- +./hosts/ateasystems.sh:220: tor_curl_request --insecure \ +./hosts/ateasystems.sh:221: --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval \ +./hosts/ateasystems.sh:222: --data "$form_data" "$post_action" \ +./hosts/ateasystems.sh:223: --output "$file_path" --output "$file_path" +./hosts/ateasystems.sh:224: else +./hosts/ateasystems.sh:225: tor_curl_request --insecure \ +./hosts/ateasystems.sh:226: --data "$form_data" "$post_action" \ +./hosts/ateasystems.sh:227: --output "$file_path" --output "$file_path" +./hosts/ateasystems.sh:228: fi +./hosts/ateasystems.sh:229: else +./hosts/ateasystems.sh:230: if [ "${RateMonitorEnabled}" == "true" ]; then +./hosts/ateasystems.sh:231: tor_curl_request --insecure \ +./hosts/ateasystems.sh:232: --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval \ +./hosts/ateasystems.sh:233: -H "User-Agent: $RandomUA" \ +./hosts/ateasystems.sh:234: --data "$form_data" "$post_action" \ +./hosts/ateasystems.sh:235: --output "$file_path" --output "$file_path" +./hosts/ateasystems.sh:236: else +./hosts/ateasystems.sh:237: tor_curl_request --insecure \ +./hosts/ateasystems.sh:238: -H "User-Agent: $RandomUA" \ +./hosts/ateasystems.sh:239: --data "$form_data" "$post_action" \ +./hosts/ateasystems.sh:240: --output "$file_path" --output "$file_path" +./hosts/ateasystems.sh:241: fi +./hosts/ateasystems.sh:242: fi +./hosts/ateasystems.sh:243: if CheckNoHtml "$remote_url" "$filename" "$file_path" ; then +./hosts/ateasystems.sh:244: containsHtml=false +./hosts/ateasystems.sh:245: else +./hosts/ateasystems.sh:246: containsHtml=true +./hosts/ateasystems.sh:247: fi +-- ./hosts/bedrive.sh:90: response=$(tor_curl_request --insecure -L -s \ ./hosts/bedrive.sh:91: -b "${bd_cookie_jar}" -c "${bd_cookie_jar}" \ ./hosts/bedrive.sh:92: "$remote_url") @@ -650,6 +817,89 @@ _________________________________________________________________________ ./hosts/dailyuploads.sh:537: -H "Sec-Fetch-Mode: navigate" \ ./hosts/dailyuploads.sh:538: -H "Sec-Fetch-Site: same-origin" \ -- +./hosts/dashfile.sh:90: response=$(tor_curl_request --insecure -L -s -b "${dash_cookie_jar}" -c "${dash_cookie_jar}" \ +./hosts/dashfile.sh:91: -w "\n\nHTTP_CODE: %{http_code}\nEFFECTIVE_URL: %{url_effective}\n" \ +./hosts/dashfile.sh:92: "$remote_url") +./hosts/dashfile.sh:93: if [ "${DebugAllEnabled}" == "true" ] ; then +./hosts/dashfile.sh:94: debugHtml "${remote_url##*/}" "dash_fetch$i" "${response}" +./hosts/dashfile.sh:95: fi +./hosts/dashfile.sh:96: if [[ -z $response ]] ; then +./hosts/dashfile.sh:97: rm -f "${dash_cookie_jar}"; +./hosts/dashfile.sh:98: if [ $i == $maxfetchretries ] ; then +./hosts/dashfile.sh:99: printf "\\n" +./hosts/dashfile.sh:100: echo -e "${RED}| Failed to extract download link [1]${NC}" +-- +./hosts/dashfile.sh:169: response=$(tor_curl_request --insecure -L -s -X POST \ +./hosts/dashfile.sh:170: -b "${dash_cookie_jar}" -c "${dash_cookie_jar}" \ +./hosts/dashfile.sh:171: --data "$form_data" "$remote_url") +./hosts/dashfile.sh:172: if [ "${DebugAllEnabled}" == "true" ] ; then +./hosts/dashfile.sh:173: debugHtml "${remote_url##*/}" "dash_post1_$i" "url: ${remote_url}"$'\n'"form_data: ${form_data}"$'\n'"${response}" +./hosts/dashfile.sh:174: fi +./hosts/dashfile.sh:175: if [[ -z $response ]] ; then +./hosts/dashfile.sh:176: if [ $i == $maxfetchretries ] ; then +./hosts/dashfile.sh:177: rm -f "${dash_cookie_jar}"; +./hosts/dashfile.sh:178: printf "\\n" +./hosts/dashfile.sh:179: echo -e "${RED}| Failed to extract download link [4]${NC}" +-- +./hosts/dashfile.sh:297: response=$(tor_curl_request --insecure -L -s -X POST \ +./hosts/dashfile.sh:298: -b "${dash_cookie_jar}" -c "${dash_cookie_jar}" \ +./hosts/dashfile.sh:299: --data "$form_data" "$remote_url") +./hosts/dashfile.sh:300: if [ "${DebugAllEnabled}" == "true" ] ; then +./hosts/dashfile.sh:301: debugHtml "${remote_url##*/}" "dash_post2_$i" "url: ${remote_url}"$'\n'"form_data: ${form_data}"$'\n'"${response}" +./hosts/dashfile.sh:302: fi +./hosts/dashfile.sh:303: if [[ -z $response ]] ; then +./hosts/dashfile.sh:304: if [ $i == $maxfetchretries ] ; then +./hosts/dashfile.sh:305: rm -f "${dash_cookie_jar}"; +./hosts/dashfile.sh:306: printf "\\n" +./hosts/dashfile.sh:307: echo -e "${RED}| Failed to extract download link [7]${NC}" +-- +./hosts/dashfile.sh:386: file_header=$(tor_curl_request --insecure -L --head -s --referer "${fixed_url}" "$download_url") +./hosts/dashfile.sh:387: if [ "${DebugAllEnabled}" == "true" ] ; then +./hosts/dashfile.sh:388: debugHtml "${remote_url##*/}" "dash_head$j" "download_url: ${download_url}"$'\n'"${file_header}" +./hosts/dashfile.sh:389: fi +./hosts/dashfile.sh:390: if [[ -z $file_header ]] ; then +./hosts/dashfile.sh:391: if [ $j == $maxfetchretries ] ; then +./hosts/dashfile.sh:392: rm -f "${dash_cookie_jar}"; +./hosts/dashfile.sh:393: printf "\\n" +./hosts/dashfile.sh:394: echo -e "${RED}| Failed to extract file info${NC}" +./hosts/dashfile.sh:395: warnAndRetryUnknownError=true +./hosts/dashfile.sh:396: if [ "${finalAttempt}" == "true" ] ; then +-- +./hosts/dashfile.sh:486: tor_curl_request --insecure \ +./hosts/dashfile.sh:487: --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval \ +./hosts/dashfile.sh:488: -b "${dash_cookie_jar}" -c "${dash_cookie_jar}" \ +./hosts/dashfile.sh:489: "$download_url" --continue-at - --output "$file_path" +./hosts/dashfile.sh:490: else +./hosts/dashfile.sh:491: tor_curl_request --insecure \ +./hosts/dashfile.sh:492: -b "${dash_cookie_jar}" -c "${dash_cookie_jar}" \ +./hosts/dashfile.sh:493: "$download_url" --continue-at - --output "$file_path" +./hosts/dashfile.sh:494: fi +./hosts/dashfile.sh:495: else +./hosts/dashfile.sh:496: if [ "${RateMonitorEnabled}" == "true" ]; then +./hosts/dashfile.sh:497: tor_curl_request --insecure \ +./hosts/dashfile.sh:498: --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval \ +./hosts/dashfile.sh:499: -b "${dash_cookie_jar}" -c "${dash_cookie_jar}" \ +./hosts/dashfile.sh:500: -H "User-Agent: $RandomUA" \ +./hosts/dashfile.sh:501: -H "Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8" \ +./hosts/dashfile.sh:502: -H "Accept-Language: en-US,en;q=0.5" \ +./hosts/dashfile.sh:503: -H "Accept-Encoding: gzip, deflate, br" \ +./hosts/dashfile.sh:504: -H "Connection: keep-alive" \ +./hosts/dashfile.sh:505: -H "Cookie: lng=eng" \ +./hosts/dashfile.sh:506: -H "Upgrade-Insecure-Requests: 1" \ +./hosts/dashfile.sh:507: -H "Sec-Fetch-Dest: document" \ +-- +./hosts/dashfile.sh:513: tor_curl_request --insecure \ +./hosts/dashfile.sh:514: -b "${dash_cookie_jar}" -c "${dash_cookie_jar}" \ +./hosts/dashfile.sh:515: -H "User-Agent: $RandomUA" \ +./hosts/dashfile.sh:516: -H "Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8" \ +./hosts/dashfile.sh:517: -H "Accept-Language: en-US,en;q=0.5" \ +./hosts/dashfile.sh:518: -H "Accept-Encoding: gzip, deflate, br" \ +./hosts/dashfile.sh:519: -H "Connection: keep-alive" \ +./hosts/dashfile.sh:520: -H "Cookie: lng=eng" \ +./hosts/dashfile.sh:521: -H "Upgrade-Insecure-Requests: 1" \ +./hosts/dashfile.sh:522: -H "Sec-Fetch-Dest: document" \ +./hosts/dashfile.sh:523: -H "Sec-Fetch-Mode: navigate" \ +-- ./hosts/dataupload.sh:90: response=$(tor_curl_request --insecure -L -s -b "${dup_cookie_jar}" -c "${dup_cookie_jar}" "$remote_url") ./hosts/dataupload.sh:91: if [ "${DebugAllEnabled}" == "true" ] ; then ./hosts/dataupload.sh:92: debugHtml "${remote_url##*/}" "dup_dwnpage$i" "${response}" @@ -885,6 +1135,87 @@ _________________________________________________________________________ ./hosts/examples/up_example.sh:121: fi ./hosts/examples/up_example.sh:122: successUpload "$pline" "${filepath}" "${_hostCode}" "${filesize}" "${downloadLink}" "{$responseHtml}" -- +./hosts/fileblade.sh:90: response=$(tor_curl_request --insecure -L -s -b "${fb_cookie_jar}" -c "${fb_cookie_jar}" "$remote_url") +./hosts/fileblade.sh:91: if [ "${DebugAllEnabled}" == "true" ] ; then +./hosts/fileblade.sh:92: debugHtml "${remote_url##*/}" "fb_dwnpage$i" "${response}" +./hosts/fileblade.sh:93: fi +./hosts/fileblade.sh:94: if [[ -z $response ]] ; then +./hosts/fileblade.sh:95: rm -f "${fb_cookie_jar}"; +./hosts/fileblade.sh:96: if [ $i == $maxfetchretries ] ; then +./hosts/fileblade.sh:97: printf "\\n" +./hosts/fileblade.sh:98: echo -e "${RED}| Failed to extract download link.${NC}" +./hosts/fileblade.sh:99: warnAndRetryUnknownError=true +./hosts/fileblade.sh:100: if [ "${finalAttempt}" == "true" ] ; then +-- +./hosts/fileblade.sh:167: response=$(tor_curl_request --insecure -L -s -X POST \ +./hosts/fileblade.sh:168: -b "${fb_cookie_jar}" -c "${fb_cookie_jar}" \ +./hosts/fileblade.sh:169: --data "$form_data" "$post_action") +./hosts/fileblade.sh:170: if [ "${DebugAllEnabled}" == "true" ] ; then +./hosts/fileblade.sh:171: debugHtml "${remote_url##*/}" "fb_post" "post_action: ${post_action}"$'\n'"form_data: ${form_data}"$'\n'"${response}" +./hosts/fileblade.sh:172: fi +./hosts/fileblade.sh:173: if [[ -z $response ]] ; then +./hosts/fileblade.sh:174: if [ $i == $maxfetchretries ] ; then +./hosts/fileblade.sh:175: rm -f "${fb_cookie_jar}"; +./hosts/fileblade.sh:176: printf "\\n" +./hosts/fileblade.sh:177: echo -e "${RED}| Failed to extract download link [3]${NC}" +-- +./hosts/fileblade.sh:254: response=$(tor_curl_request --insecure -L -s -X POST \ +./hosts/fileblade.sh:255: -b "${fb_cookie_jar}" -c "${fb_cookie_jar}" \ +./hosts/fileblade.sh:256: --data "$form_data" "$post_action") +./hosts/fileblade.sh:257: if [ "${DebugAllEnabled}" == "true" ] ; then +./hosts/fileblade.sh:258: debugHtml "${remote_url##*/}" "fb_post2" "post_action: ${post_action}"$'\n'"form_data: ${form_data}"$'\n'"${response}" +./hosts/fileblade.sh:259: fi +./hosts/fileblade.sh:260: if [[ -z $response ]] ; then +./hosts/fileblade.sh:261: if [ $i == $maxfetchretries ] ; then +./hosts/fileblade.sh:262: rm -f "${fb_cookie_jar}"; +./hosts/fileblade.sh:263: printf "\\n" +./hosts/fileblade.sh:264: echo -e "${RED}| Failed to extract download link [4].${NC}" +-- +./hosts/fileblade.sh:323: file_header=$(tor_curl_request --insecure -L --head -s "$download_url") +./hosts/fileblade.sh:324: if [ "${DebugAllEnabled}" == "true" ] ; then +./hosts/fileblade.sh:325: debugHtml "${remote_url##*/}" "fb_head$j" "download_url: ${download_url}"$'\n'"${file_header}" +./hosts/fileblade.sh:326: fi +./hosts/fileblade.sh:327: if [[ -z $file_header ]] ; then +./hosts/fileblade.sh:328: if [ $j == $maxfetchretries ] ; then +./hosts/fileblade.sh:329: rm -f "${fb_cookie_jar}"; +./hosts/fileblade.sh:330: printf "\\n" +./hosts/fileblade.sh:331: echo -e "${RED}| Failed to extract file info [1]${NC}" +./hosts/fileblade.sh:332: warnAndRetryUnknownError=true +./hosts/fileblade.sh:333: if [ "${finalAttempt}" == "true" ] ; then +-- +./hosts/fileblade.sh:440: tor_curl_request --insecure -L \ +./hosts/fileblade.sh:441: --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval \ +./hosts/fileblade.sh:442: "$download_url" --continue-at - --output "$file_path" +./hosts/fileblade.sh:443: else +./hosts/fileblade.sh:444: tor_curl_request --insecure -L \ +./hosts/fileblade.sh:445: "$download_url" --continue-at - --output "$file_path" +./hosts/fileblade.sh:446: fi +./hosts/fileblade.sh:447: else +./hosts/fileblade.sh:448: if [ "${RateMonitorEnabled}" == "true" ]; then +./hosts/fileblade.sh:449: tor_curl_request --insecure \ +./hosts/fileblade.sh:450: --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval \ +./hosts/fileblade.sh:451: -H "User-Agent: $RandomUA" \ +./hosts/fileblade.sh:452: -H "Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8" \ +./hosts/fileblade.sh:453: -H "Accept-Language: en-US,en;q=0.5" \ +./hosts/fileblade.sh:454: -H "Accept-Encoding: gzip, deflate, br" \ +./hosts/fileblade.sh:455: -H "Connection: keep-alive" \ +./hosts/fileblade.sh:456: -H "Cookie: lng=eng" \ +./hosts/fileblade.sh:457: -H "Upgrade-Insecure-Requests: 1" \ +./hosts/fileblade.sh:458: -H "Sec-Fetch-Dest: document" \ +./hosts/fileblade.sh:459: -H "Sec-Fetch-Mode: navigate" \ +-- +./hosts/fileblade.sh:464: tor_curl_request --insecure \ +./hosts/fileblade.sh:465: -H "User-Agent: $RandomUA" \ +./hosts/fileblade.sh:466: -H "Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8" \ +./hosts/fileblade.sh:467: -H "Accept-Language: en-US,en;q=0.5" \ +./hosts/fileblade.sh:468: -H "Accept-Encoding: gzip, deflate, br" \ +./hosts/fileblade.sh:469: -H "Connection: keep-alive" \ +./hosts/fileblade.sh:470: -H "Cookie: lng=eng" \ +./hosts/fileblade.sh:471: -H "Upgrade-Insecure-Requests: 1" \ +./hosts/fileblade.sh:472: -H "Sec-Fetch-Dest: document" \ +./hosts/fileblade.sh:473: -H "Sec-Fetch-Mode: navigate" \ +./hosts/fileblade.sh:474: -H "Sec-Fetch-Site: same-origin" \ +-- ./hosts/fileditch.sh:85: file_header=$(tor_curl_request --insecure --head -L -s "$download_url") ./hosts/fileditch.sh:86: if [ "${DebugAllEnabled}" == "true" ] ; then ./hosts/fileditch.sh:87: debugHtml "${remote_url##*/}" "fd_head$j" "download_url: ${download_url}"$'\n'"${file_header}" @@ -985,31 +1316,31 @@ _________________________________________________________________________ ./hosts/filedot.sh:510: containsHtml=true ./hosts/filedot.sh:511: fi -- -./hosts/filehaus.sh:100: file_header=$(tor_curl_request --insecure -L --head -s --referer "${remote_url//\.org/\.cc}" "$download_url") +./hosts/filehaus.sh:100: file_header=$(tor_curl_request_extended --insecure -L --head -s --referer "${remote_url//\.org/\.cc}" "$download_url") ./hosts/filehaus.sh:101: if [ "${DebugAllEnabled}" == "true" ] ; then ./hosts/filehaus.sh:102: debugHtml "${remote_url##*/}" "fh_head$j" "download_url: ${download_url}"$'\n'"${file_header}" ./hosts/filehaus.sh:103: fi ./hosts/filehaus.sh:104: if [ ! -z "$file_header" ] ; then ./hosts/filehaus.sh:105: if grep -Eqi '404 Not Found' <<< "${file_header}" ; then -./hosts/filehaus.sh:106: if [ $j == $maxfetchretries ] ; then -./hosts/filehaus.sh:107: printf "\\n" -./hosts/filehaus.sh:108: echo -e "${RED}| The file has been removed (404).${NC}" -./hosts/filehaus.sh:109: removedDownload "${remote_url}" -./hosts/filehaus.sh:110: exitDownloadNotAvailable=true +./hosts/filehaus.sh:106: printf "\\n" +./hosts/filehaus.sh:107: echo -e "${RED}| The file has been removed (404).${NC}" +./hosts/filehaus.sh:108: removedDownload "${remote_url}" +./hosts/filehaus.sh:109: exitDownloadNotAvailable=true +./hosts/filehaus.sh:110: return 1 -- -./hosts/filehaus.sh:197: tor_curl_request --insecure --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval --referer "$download_url" "$download_url" --continue-at - --output "$file_path" -./hosts/filehaus.sh:198: else -./hosts/filehaus.sh:199: tor_curl_request --insecure --referer "$download_url" "$download_url" --continue-at - --output "$file_path" +./hosts/filehaus.sh:193: tor_curl_request_extended --insecure --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval --referer "$download_url" "$download_url" --continue-at - --output "$file_path" +./hosts/filehaus.sh:194: else +./hosts/filehaus.sh:195: tor_curl_request_extended --insecure --referer "$download_url" "$download_url" --continue-at - --output "$file_path" +./hosts/filehaus.sh:196: fi +./hosts/filehaus.sh:197: received_file_size=0 +./hosts/filehaus.sh:198: if [ -f "$file_path" ] ; then +./hosts/filehaus.sh:199: received_file_size=$(stat --format="%s" "$file_path" | tr -d '[:space:]') ./hosts/filehaus.sh:200: fi -./hosts/filehaus.sh:201: received_file_size=0 -./hosts/filehaus.sh:202: if [ -f "$file_path" ] ; then -./hosts/filehaus.sh:203: received_file_size=$(stat --format="%s" "$file_path" | tr -d '[:space:]') -./hosts/filehaus.sh:204: fi -./hosts/filehaus.sh:205: if CheckNoHtml "$remote_url" "$filename" "$file_path" ; then -./hosts/filehaus.sh:206: containsHtml=false -./hosts/filehaus.sh:207: else -./hosts/filehaus.sh:208: containsHtml=true -./hosts/filehaus.sh:209: fi +./hosts/filehaus.sh:201: if CheckNoHtml "$remote_url" "$filename" "$file_path" ; then +./hosts/filehaus.sh:202: containsHtml=false +./hosts/filehaus.sh:203: else +./hosts/filehaus.sh:204: containsHtml=true +./hosts/filehaus.sh:205: fi -- ./hosts/firestorage.sh:98: response=$(tor_curl_request --insecure -L -s "${fixed_url}") ./hosts/firestorage.sh:99: if [ "${DebugAllEnabled}" == "true" ] ; then @@ -1073,41 +1404,41 @@ _________________________________________________________________________ ./hosts/gofile.sh:179: -H "Sec-Fetch-Mode: navigate" \ ./hosts/gofile.sh:180: -H "Sec-Fetch-Site: none" \ -- -./hosts/gofile.sh:241: file_header=$(tor_curl_request --insecure -L --head -s \ -./hosts/gofile.sh:242: -b "${gofile_cookie_jar}" -c "${gofile_cookie_jar}" \ -./hosts/gofile.sh:243: -H "User-Agent: $RandomUA" \ -./hosts/gofile.sh:244: -H "Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8" \ -./hosts/gofile.sh:245: -H "Accept-Language: en-US,en;q=0.5" \ -./hosts/gofile.sh:246: -H "Accept-Encoding: gzip, deflate, br" \ -./hosts/gofile.sh:247: -H "Connection: keep-alive" \ -./hosts/gofile.sh:248: -H "Upgrade-Insecure-Requests: 1" \ -./hosts/gofile.sh:249: -H "Sec-Fetch-Dest: document" \ -./hosts/gofile.sh:250: -H "Sec-Fetch-Mode: navigate" \ -./hosts/gofile.sh:251: -H "Sec-Fetch-Site: none" \ +./hosts/gofile.sh:250: file_header=$(tor_curl_request --insecure -L --head -s \ +./hosts/gofile.sh:251: -b "${gofile_cookie_jar}" -c "${gofile_cookie_jar}" \ +./hosts/gofile.sh:252: -H "User-Agent: $RandomUA" \ +./hosts/gofile.sh:253: -H "Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8" \ +./hosts/gofile.sh:254: -H "Accept-Language: en-US,en;q=0.5" \ +./hosts/gofile.sh:255: -H "Accept-Encoding: gzip, deflate, br" \ +./hosts/gofile.sh:256: -H "Connection: keep-alive" \ +./hosts/gofile.sh:257: -H "Upgrade-Insecure-Requests: 1" \ +./hosts/gofile.sh:258: -H "Sec-Fetch-Dest: document" \ +./hosts/gofile.sh:259: -H "Sec-Fetch-Mode: navigate" \ +./hosts/gofile.sh:260: -H "Sec-Fetch-Site: none" \ -- -./hosts/gofile.sh:359: tor_curl_request --insecure -G --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval \ -./hosts/gofile.sh:360: -b "${gofile_cookie_jar}" -c "${gofile_cookie_jar}" \ -./hosts/gofile.sh:361: -H "User-Agent: $RandomUA" \ -./hosts/gofile.sh:362: -H "Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8" \ -./hosts/gofile.sh:363: -H "Accept-Language: en-US,en;q=0.5" \ -./hosts/gofile.sh:364: -H "Accept-Encoding: gzip, deflate, br" \ -./hosts/gofile.sh:365: -H "Connection: keep-alive" \ -./hosts/gofile.sh:366: -H "Upgrade-Insecure-Requests: 1" \ -./hosts/gofile.sh:367: -H "Sec-Fetch-Dest: document" \ -./hosts/gofile.sh:368: -H "Sec-Fetch-Mode: navigate" \ -./hosts/gofile.sh:369: -H "Sec-Fetch-Site: none" \ +./hosts/gofile.sh:369: tor_curl_request --insecure -G --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval \ +./hosts/gofile.sh:370: -b "${gofile_cookie_jar}" -c "${gofile_cookie_jar}" \ +./hosts/gofile.sh:371: -H "User-Agent: $RandomUA" \ +./hosts/gofile.sh:372: -H "Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8" \ +./hosts/gofile.sh:373: -H "Accept-Language: en-US,en;q=0.5" \ +./hosts/gofile.sh:374: -H "Accept-Encoding: gzip, deflate, br" \ +./hosts/gofile.sh:375: -H "Connection: keep-alive" \ +./hosts/gofile.sh:376: -H "Upgrade-Insecure-Requests: 1" \ +./hosts/gofile.sh:377: -H "Sec-Fetch-Dest: document" \ +./hosts/gofile.sh:378: -H "Sec-Fetch-Mode: navigate" \ +./hosts/gofile.sh:379: -H "Sec-Fetch-Site: none" \ -- -./hosts/gofile.sh:373: tor_curl_request --insecure -G \ -./hosts/gofile.sh:374: -b "${gofile_cookie_jar}" -c "${gofile_cookie_jar}" \ -./hosts/gofile.sh:375: -H "User-Agent: $RandomUA" \ -./hosts/gofile.sh:376: -H "Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8" \ -./hosts/gofile.sh:377: -H "Accept-Language: en-US,en;q=0.5" \ -./hosts/gofile.sh:378: -H "Accept-Encoding: gzip, deflate, br" \ -./hosts/gofile.sh:379: -H "Connection: keep-alive" \ -./hosts/gofile.sh:380: -H "Upgrade-Insecure-Requests: 1" \ -./hosts/gofile.sh:381: -H "Sec-Fetch-Dest: document" \ -./hosts/gofile.sh:382: -H "Sec-Fetch-Mode: navigate" \ -./hosts/gofile.sh:383: -H "Sec-Fetch-Site: none" \ +./hosts/gofile.sh:383: tor_curl_request --insecure -G \ +./hosts/gofile.sh:384: -b "${gofile_cookie_jar}" -c "${gofile_cookie_jar}" \ +./hosts/gofile.sh:385: -H "User-Agent: $RandomUA" \ +./hosts/gofile.sh:386: -H "Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8" \ +./hosts/gofile.sh:387: -H "Accept-Language: en-US,en;q=0.5" \ +./hosts/gofile.sh:388: -H "Accept-Encoding: gzip, deflate, br" \ +./hosts/gofile.sh:389: -H "Connection: keep-alive" \ +./hosts/gofile.sh:390: -H "Upgrade-Insecure-Requests: 1" \ +./hosts/gofile.sh:391: -H "Sec-Fetch-Dest: document" \ +./hosts/gofile.sh:392: -H "Sec-Fetch-Mode: navigate" \ +./hosts/gofile.sh:393: -H "Sec-Fetch-Site: none" \ -- ./hosts/hexload.sh:108: response=$(tor_curl_request --insecure -s --data "$form_data" "https://hexload.com/download") ./hosts/hexload.sh:109: if [ "${DebugAllEnabled}" == "true" ] ; then @@ -1161,10 +1492,10 @@ _________________________________________________________________________ ./hosts/hexload.sh:332: containsHtml=true ./hosts/hexload.sh:333: fi -- -./hosts/innocent.sh:97: file_header=$(tor_curl_request --insecure --head -L -s "$download_url") +./hosts/innocent.sh:97: file_header=$(tor_curl_request_extended --insecure --head -L -s "$download_url") ./hosts/innocent.sh:98: elif ((j % 2 == 0)); then ./hosts/innocent.sh:99: printf "| Retrieving Head (Get): attempt #$j" -./hosts/innocent.sh:100: file_header=$(tor_curl_request --insecure -m 8 -s -D - -o /dev/null \ +./hosts/innocent.sh:100: file_header=$(tor_curl_request --insecure -m 16 -s -D - -o /dev/null \ ./hosts/innocent.sh:101: -H "Connection: keep-alive" \ ./hosts/innocent.sh:102: -w 'EffectiveUrl=%{url_effective}' \ ./hosts/innocent.sh:103: "$download_url") @@ -1195,10 +1526,10 @@ _________________________________________________________________________ ./hosts/innocent.sh:128: removedDownload "${remote_url}" ./hosts/innocent.sh:129: exitDownloadNotAvailable=true -- -./hosts/innocent.sh:211: tor_curl_request --insecure "$download_url" --continue-at - --output "$file_path" +./hosts/innocent.sh:211: tor_curl_request_extended --insecure "$download_url" --continue-at - --output "$file_path" ./hosts/innocent.sh:212: else ./hosts/innocent.sh:213: echo -e "${BLUE}| No Resume Fetch${NC}" -./hosts/innocent.sh:214: tor_curl_request --insecure "$download_url" --output "$file_path" +./hosts/innocent.sh:214: tor_curl_request_extended --insecure "$download_url" --output "$file_path" ./hosts/innocent.sh:215: fi ./hosts/innocent.sh:216: received_file_size=0 ./hosts/innocent.sh:217: if [ -f "$file_path" ] ; then @@ -1210,6 +1541,90 @@ _________________________________________________________________________ ./hosts/innocent.sh:223: containsHtml=true ./hosts/innocent.sh:224: fi -- +./hosts/isupload.sh:90: response=$(tor_curl_request_extended --insecure -L -s -b "${isup_cookie_jar}" -c "${isup_cookie_jar}" "$remote_url") +./hosts/isupload.sh:91: if [ "${DebugAllEnabled}" == "true" ] ; then +./hosts/isupload.sh:92: debugHtml "${remote_url##*/}" "isup_dwnpage$i" "${response}" +./hosts/isupload.sh:93: fi +./hosts/isupload.sh:94: if [[ -z $response ]] ; then +./hosts/isupload.sh:95: rm -f "${isup_cookie_jar}"; +./hosts/isupload.sh:96: if [ $i == $maxfetchretries ] ; then +./hosts/isupload.sh:97: printf "\\n" +./hosts/isupload.sh:98: echo -e "${RED}| Failed to extract download link.${NC}" +./hosts/isupload.sh:99: warnAndRetryUnknownError=true +./hosts/isupload.sh:100: if [ "${finalAttempt}" == "true" ] ; then +-- +./hosts/isupload.sh:164: response=$(tor_curl_request_extended --insecure -L -s -X POST \ +./hosts/isupload.sh:165: -b "${isup_cookie_jar}" -c "${isup_cookie_jar}" \ +./hosts/isupload.sh:166: --data "$form_data" "$post_action") +./hosts/isupload.sh:167: if [ "${DebugAllEnabled}" == "true" ] ; then +./hosts/isupload.sh:168: debugHtml "${remote_url##*/}" "isup_post" "post_action: ${post_action}"$'\n'"form_data: ${form_data}"$'\n'"${response}" +./hosts/isupload.sh:169: fi +./hosts/isupload.sh:170: if [[ -z $response ]] ; then +./hosts/isupload.sh:171: if [ $i == $maxfetchretries ] ; then +./hosts/isupload.sh:172: rm -f "${isup_cookie_jar}"; +./hosts/isupload.sh:173: printf "\\n" +./hosts/isupload.sh:174: echo -e "${RED}| Failed to extract download link [3].${NC}" +-- +./hosts/isupload.sh:238: file_header=$(tor_curl_request --insecure --head -L -s "$download_url") +./hosts/isupload.sh:239: elif ((j % 2 == 0)); then +./hosts/isupload.sh:240: printf "| Retrieving Head (Get): attempt #$j" +./hosts/isupload.sh:241: file_header=$(tor_curl_request --insecure -m 16 -s -D - -o /dev/null \ +./hosts/isupload.sh:242: -H "Connection: keep-alive" \ +./hosts/isupload.sh:243: -w 'EffectiveUrl=%{url_effective}' \ +./hosts/isupload.sh:244: "$download_url") +./hosts/isupload.sh:245: elif ((j % 3 == 0)); then +./hosts/isupload.sh:246: printf "| Retrieving Head (hack): attempt #$j" +./hosts/isupload.sh:247: rm -f "${WorkDir}/.temp/directhead" +./hosts/isupload.sh:248: file_header=$(tor_curl_request --insecure --head -H "Connection: keep-alive" -L -s -i "$download_url" | +./hosts/isupload.sh:249: tee "${WorkDir}/.temp/directhead" & +./hosts/isupload.sh:250: sleep 6 +./hosts/isupload.sh:251: [ -s "${WorkDir}/.temp/directhead" ] +./hosts/isupload.sh:252: kill $! 2>/dev/null +./hosts/isupload.sh:253: ) +./hosts/isupload.sh:254: if [ ! -f "${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}" ]; then +./hosts/isupload.sh:255: touch "${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}" +./hosts/isupload.sh:256: fi +./hosts/isupload.sh:257: rm -f "${WorkDir}/.temp/directhead" +./hosts/isupload.sh:258: else +-- +./hosts/isupload.sh:260: file_header=$(tor_curl_request_extended --insecure --head -L -s "$download_url") +./hosts/isupload.sh:261: fi +./hosts/isupload.sh:262: if [ "${DebugAllEnabled}" == "true" ] ; then +./hosts/isupload.sh:263: debugHtml "${remote_url##*/}" "isup_head$j" "download_url: ${download_url}"$'\n'"${file_header}" +./hosts/isupload.sh:264: fi +./hosts/isupload.sh:265: if [ ! -z "$file_header" ] ; then +./hosts/isupload.sh:266: if grep -Eqi '404 Not Found' <<< "${file_header}" ; then +./hosts/isupload.sh:267: printf "\\n" +./hosts/isupload.sh:268: echo -e "${RED}| Not Found (404). The file has been removed.${NC}" +./hosts/isupload.sh:269: removedDownload "${remote_url}" +./hosts/isupload.sh:270: exitDownloadNotAvailable=true +-- +./hosts/isupload.sh:352: tor_curl_request_extended --insecure -L "$download_url" --output "$file_path" +./hosts/isupload.sh:353: rc=$? +./hosts/isupload.sh:354: if [ $rc -ne 0 ] ; then +./hosts/isupload.sh:355: printf "${RED}Download Failed (bad exit status).${NC}" +./hosts/isupload.sh:356: if [ -f ${file_path} ]; then +./hosts/isupload.sh:357: printf "${YELLOW} Partial removed...${NC}" +./hosts/isupload.sh:358: printf "\n\n" +./hosts/isupload.sh:359: rm -f "${file_path}" +./hosts/isupload.sh:360: else +./hosts/isupload.sh:361: printf "\n\n" +./hosts/isupload.sh:362: fi +-- +./hosts/isupload.sh:396: tor_curl_request_extended --insecure -L --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval "$download_url" --continue-at - --output "$file_path" +./hosts/isupload.sh:397: else +./hosts/isupload.sh:398: tor_curl_request_extended --insecure -L "$download_url" --continue-at - --output "$file_path" +./hosts/isupload.sh:399: fi +./hosts/isupload.sh:400: received_file_size=0 +./hosts/isupload.sh:401: if [ -f "$file_path" ] ; then +./hosts/isupload.sh:402: received_file_size=$(stat --format="%s" "$file_path" | tr -d '[:space:]') +./hosts/isupload.sh:403: fi +./hosts/isupload.sh:404: if CheckNoHtml "$remote_url" "$filename" "$file_path" ; then +./hosts/isupload.sh:405: containsHtml=false +./hosts/isupload.sh:406: else +./hosts/isupload.sh:407: containsHtml=true +./hosts/isupload.sh:408: fi +-- ./hosts/kraken.sh:104: PAGE=$(tor_curl_request --insecure --max-time "$ConnectTimeout" -s -L -c "${kraken_cookie_jar}" "${fixed_url}") ./hosts/kraken.sh:105: if [ "${DebugAllEnabled}" == "true" ] ; then ./hosts/kraken.sh:106: debugHtml "${krak_id}" "kraken_token${num_attempt}_$i" "url: ${fixed_url}"$'\n'"krakenid: ${krak_id}"$'\n'"${PAGE}" @@ -1260,6 +1675,45 @@ _________________________________________________________________________ ./hosts/kraken.sh:297: containsHtml=true ./hosts/kraken.sh:298: fi -- +./hosts/mediafire.sh:94: response=$(tor_curl_request --insecure -L -s \ +./hosts/mediafire.sh:95: -H "Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8" \ +./hosts/mediafire.sh:96: -H "Accept-Language: en-US,en;q=0.5" \ +./hosts/mediafire.sh:97: -H "Accept-Encoding: gzip, deflate, br" \ +./hosts/mediafire.sh:98: -H "Accept-Charset: ISO-8859-1,utf-8;q=0.7,*;q=0.7" \ +./hosts/mediafire.sh:99: -H "Connection: keep-alive" \ +./hosts/mediafire.sh:100: -H "Upgrade-Insecure-Requests: 1" \ +./hosts/mediafire.sh:101: -H "Sec-Fetch-Dest: document" \ +./hosts/mediafire.sh:102: -H "Sec-Fetch-Mode: navigate" \ +./hosts/mediafire.sh:103: -H "Sec-Fetch-Site: none" \ +./hosts/mediafire.sh:104: -H "Sec-Fetch-User: ?1" \ +-- +./hosts/mediafire.sh:157: file_header=$(tor_curl_request --insecure -L --head -s \ +./hosts/mediafire.sh:158: -H "Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8" \ +./hosts/mediafire.sh:159: -H "Accept-Language: en-US,en;q=0.5" \ +./hosts/mediafire.sh:160: -H "Accept-Encoding: gzip, deflate, br" \ +./hosts/mediafire.sh:161: -H "Accept-Charset: ISO-8859-1,utf-8;q=0.7,*;q=0.7" \ +./hosts/mediafire.sh:162: -H "Connection: keep-alive" \ +./hosts/mediafire.sh:163: -H "Upgrade-Insecure-Requests: 1" \ +./hosts/mediafire.sh:164: -H "Sec-Fetch-Dest: document" \ +./hosts/mediafire.sh:165: -H "Sec-Fetch-Mode: navigate" \ +./hosts/mediafire.sh:166: -H "Sec-Fetch-Site: none" \ +./hosts/mediafire.sh:167: -H "Sec-Fetch-User: ?1" \ +-- +./hosts/mediafire.sh:276: tor_curl_request_extended --insecure "$download_url" --continue-at - --output "$file_path" +./hosts/mediafire.sh:277: else +./hosts/mediafire.sh:278: echo -e "${BLUE}| No Resume Fetch${NC}" +./hosts/mediafire.sh:279: tor_curl_request_extended --insecure "$download_url" --output "$file_path" +./hosts/mediafire.sh:280: fi +./hosts/mediafire.sh:281: received_file_size=0 +./hosts/mediafire.sh:282: if [ -f "$file_path" ] ; then +./hosts/mediafire.sh:283: received_file_size=$(stat --format="%s" "$file_path" | tr -d '[:space:]') +./hosts/mediafire.sh:284: fi +./hosts/mediafire.sh:285: if CheckNoHtml "$remote_url" "$filename" "$file_path" ; then +./hosts/mediafire.sh:286: containsHtml=false +./hosts/mediafire.sh:287: else +./hosts/mediafire.sh:288: containsHtml=true +./hosts/mediafire.sh:289: fi +-- ./hosts/nippy.sh:119: response=$(tor_curl_request --insecure -L -s -b "${nippy_cookie_jar}" -c "${nippy_cookie_jar}" "$fixed_url") ./hosts/nippy.sh:120: if [ "${DebugAllEnabled}" == "true" ] ; then ./hosts/nippy.sh:121: debugHtml "${remote_url##*/}" "nippy_dwnpage$i" "fixed_url: ${fixed_url}"$'\n'"${response}" @@ -1379,6 +1833,32 @@ _________________________________________________________________________ ./hosts/pixeldrain.sh:351: received_file_size=0 ./hosts/pixeldrain.sh:352: if [ -f "$file_path" ] ; then -- +./hosts/quax.sh:85: file_header=$(tor_curl_request --insecure --head -L -s "$download_url") +./hosts/quax.sh:86: if [ "${DebugAllEnabled}" == "true" ] ; then +./hosts/quax.sh:87: debugHtml "${remote_url##*/}" "qx_head$j" "download_url: ${download_url}"$'\n'"${file_header}" +./hosts/quax.sh:88: fi +./hosts/quax.sh:89: if [ ! -z "$file_header" ] ; then +./hosts/quax.sh:90: if grep -Eqi '404 Not Found|HTTP.* 404' <<< "${file_header}" ; then +./hosts/quax.sh:91: echo -e "${RED}| The file has been removed (404).${NC}" +./hosts/quax.sh:92: removedDownload "${remote_url}" +./hosts/quax.sh:93: exitDownloadNotAvailable=true +./hosts/quax.sh:94: return 1 +./hosts/quax.sh:95: fi +-- +./hosts/quax.sh:176: tor_curl_request --insecure --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval "$download_url" --continue-at - --output "$file_path" +./hosts/quax.sh:177: else +./hosts/quax.sh:178: tor_curl_request --insecure "$download_url" --continue-at - --output "$file_path" +./hosts/quax.sh:179: fi +./hosts/quax.sh:180: received_file_size=0 +./hosts/quax.sh:181: if [ -f "$file_path" ] ; then +./hosts/quax.sh:182: received_file_size=$(stat --format="%s" "$file_path" | tr -d '[:space:]') +./hosts/quax.sh:183: fi +./hosts/quax.sh:184: if CheckNoHtml "$remote_url" "$filename" "$file_path" ; then +./hosts/quax.sh:185: containsHtml=false +./hosts/quax.sh:186: else +./hosts/quax.sh:187: containsHtml=true +./hosts/quax.sh:188: fi +-- ./hosts/ranoz.sh:90: response=$(tor_curl_request --insecure -L -s "$remote_url") ./hosts/ranoz.sh:91: if [ "${DebugAllEnabled}" == "true" ] ; then ./hosts/ranoz.sh:92: debugHtml "${remote_url##*/}" "rz_fetch$i" "${response}" @@ -1391,50 +1871,91 @@ _________________________________________________________________________ ./hosts/ranoz.sh:99: if [ "${finalAttempt}" == "true" ] ; then ./hosts/ranoz.sh:100: failedRetryDownload "${remote_url}" "Failed to extract download url [1]" "" -- -./hosts/ranoz.sh:144: file_header=$(tor_curl_request --insecure --head -L -i -s "$download_url") -./hosts/ranoz.sh:145: if [ "${DebugAllEnabled}" == "true" ] ; then -./hosts/ranoz.sh:146: debugHtml "${remote_url##*/}" "rz_head$j" "download_url: ${download_url}"$'\n'"${file_header}" -./hosts/ranoz.sh:147: fi -./hosts/ranoz.sh:148: if [[ -z $file_header ]] ; then -./hosts/ranoz.sh:149: if [ $j == $maxfetchretries ] ; then -./hosts/ranoz.sh:150: rm -f "${rz_cookie_jar}"; -./hosts/ranoz.sh:151: printf "\\n" -./hosts/ranoz.sh:152: echo -e "${RED}| Failed to extract file info${NC}" -./hosts/ranoz.sh:153: warnAndRetryUnknownError=true -./hosts/ranoz.sh:154: if [ "${finalAttempt}" == "true" ] ; then +./hosts/ranoz.sh:150: file_header=$(tor_curl_request --insecure --head -L -i -s "$download_url") +./hosts/ranoz.sh:151: if [ "${DebugAllEnabled}" == "true" ] ; then +./hosts/ranoz.sh:152: debugHtml "${remote_url##*/}" "rz_head$j" "download_url: ${download_url}"$'\n'"${file_header}" +./hosts/ranoz.sh:153: fi +./hosts/ranoz.sh:154: if [[ -z $file_header ]] ; then +./hosts/ranoz.sh:155: if [ $j == $maxfetchretries ] ; then +./hosts/ranoz.sh:156: rm -f "${rz_cookie_jar}"; +./hosts/ranoz.sh:157: printf "\\n" +./hosts/ranoz.sh:158: echo -e "${RED}| Failed to extract file info${NC}" +./hosts/ranoz.sh:159: warnAndRetryUnknownError=true +./hosts/ranoz.sh:160: if [ "${finalAttempt}" == "true" ] ; then -- -./hosts/ranoz.sh:255: tor_curl_request --insecure -L -G --no-alpn \ -./hosts/ranoz.sh:256: --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval \ -./hosts/ranoz.sh:257: "$download_url" --continue-at - --output "$file_path" -./hosts/ranoz.sh:258: else -./hosts/ranoz.sh:259: tor_curl_request --insecure -L -G --no-alpn \ -./hosts/ranoz.sh:260: "$download_url" --continue-at - --output "$file_path" -./hosts/ranoz.sh:261: fi -./hosts/ranoz.sh:262: else -./hosts/ranoz.sh:263: if [ "${RateMonitorEnabled}" == "true" ]; then -./hosts/ranoz.sh:264: tor_curl_request --insecure -L -G --no-alpn \ -./hosts/ranoz.sh:265: --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval \ -./hosts/ranoz.sh:266: -H "User-Agent: $RandomUA" \ -./hosts/ranoz.sh:267: -H "Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8" \ -./hosts/ranoz.sh:268: -H "Accept-Language: en-US,en;q=0.5" \ -./hosts/ranoz.sh:269: -H "Accept-Encoding: gzip, deflate, br" \ -./hosts/ranoz.sh:270: -H "Connection: keep-alive" \ -./hosts/ranoz.sh:271: -H "Cookie: lng=eng" \ -./hosts/ranoz.sh:272: -H "Upgrade-Insecure-Requests: 1" \ -./hosts/ranoz.sh:273: -H "Sec-Fetch-Dest: document" \ -./hosts/ranoz.sh:274: -H "Sec-Fetch-Mode: navigate" \ +./hosts/ranoz.sh:261: tor_curl_request --insecure -L -G --no-alpn \ +./hosts/ranoz.sh:262: --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval \ +./hosts/ranoz.sh:263: "$download_url" --continue-at - --output "$file_path" +./hosts/ranoz.sh:264: else +./hosts/ranoz.sh:265: tor_curl_request --insecure -L -G --no-alpn \ +./hosts/ranoz.sh:266: "$download_url" --continue-at - --output "$file_path" +./hosts/ranoz.sh:267: fi +./hosts/ranoz.sh:268: else +./hosts/ranoz.sh:269: if [ "${RateMonitorEnabled}" == "true" ]; then +./hosts/ranoz.sh:270: tor_curl_request --insecure -L -G --no-alpn \ +./hosts/ranoz.sh:271: --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval \ +./hosts/ranoz.sh:272: -H "User-Agent: $RandomUA" \ +./hosts/ranoz.sh:273: -H "Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8" \ +./hosts/ranoz.sh:274: -H "Accept-Language: en-US,en;q=0.5" \ +./hosts/ranoz.sh:275: -H "Accept-Encoding: gzip, deflate, br" \ +./hosts/ranoz.sh:276: -H "Connection: keep-alive" \ +./hosts/ranoz.sh:277: -H "Cookie: lng=eng" \ +./hosts/ranoz.sh:278: -H "Upgrade-Insecure-Requests: 1" \ +./hosts/ranoz.sh:279: -H "Sec-Fetch-Dest: document" \ +./hosts/ranoz.sh:280: -H "Sec-Fetch-Mode: navigate" \ -- -./hosts/ranoz.sh:279: tor_curl_request --insecure -L -G --no-alpn \ -./hosts/ranoz.sh:280: -H "User-Agent: $RandomUA" \ -./hosts/ranoz.sh:281: -H "Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8" \ -./hosts/ranoz.sh:282: -H "Accept-Language: en-US,en;q=0.5" \ -./hosts/ranoz.sh:283: -H "Accept-Encoding: gzip, deflate, br" \ -./hosts/ranoz.sh:284: -H "Connection: keep-alive" \ -./hosts/ranoz.sh:285: -H "Cookie: lng=eng" \ -./hosts/ranoz.sh:286: -H "Upgrade-Insecure-Requests: 1" \ -./hosts/ranoz.sh:287: -H "Sec-Fetch-Dest: document" \ -./hosts/ranoz.sh:288: -H "Sec-Fetch-Mode: navigate" \ -./hosts/ranoz.sh:289: -H "Sec-Fetch-Site: same-origin" \ +./hosts/ranoz.sh:285: tor_curl_request --insecure -L -G --no-alpn \ +./hosts/ranoz.sh:286: -H "User-Agent: $RandomUA" \ +./hosts/ranoz.sh:287: -H "Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8" \ +./hosts/ranoz.sh:288: -H "Accept-Language: en-US,en;q=0.5" \ +./hosts/ranoz.sh:289: -H "Accept-Encoding: gzip, deflate, br" \ +./hosts/ranoz.sh:290: -H "Connection: keep-alive" \ +./hosts/ranoz.sh:291: -H "Cookie: lng=eng" \ +./hosts/ranoz.sh:292: -H "Upgrade-Insecure-Requests: 1" \ +./hosts/ranoz.sh:293: -H "Sec-Fetch-Dest: document" \ +./hosts/ranoz.sh:294: -H "Sec-Fetch-Mode: navigate" \ +./hosts/ranoz.sh:295: -H "Sec-Fetch-Site: same-origin" \ +-- +./hosts/syspro.sh:88: response=$(tor_curl_request --insecure -L -s "$remote_url") +./hosts/syspro.sh:89: if [ "${DebugAllEnabled}" == "true" ] ; then +./hosts/syspro.sh:90: debugHtml "${remote_url##*/}" "sysp_fetch$i" "${response}" +./hosts/syspro.sh:91: fi +./hosts/syspro.sh:92: if [[ -z $response ]] ; then +./hosts/syspro.sh:93: if [ $i == $maxfetchretries ] ; then +./hosts/syspro.sh:94: printf "\\n" +./hosts/syspro.sh:95: echo -e "${RED}| Failed to extract download link [1]${NC}" +./hosts/syspro.sh:96: warnAndRetryUnknownError=true +./hosts/syspro.sh:97: if [ "${finalAttempt}" == "true" ] ; then +./hosts/syspro.sh:98: failedRetryDownload "${remote_url}" "Failed to extract download link [1]" "" +-- +./hosts/syspro.sh:188: tor_curl_request --insecure -L \ +./hosts/syspro.sh:189: --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval \ +./hosts/syspro.sh:190: --data "$form_data" "$post_action" \ +./hosts/syspro.sh:191: --output "$file_path" --output "$file_path" +./hosts/syspro.sh:192: else +./hosts/syspro.sh:193: tor_curl_request --insecure \ +./hosts/syspro.sh:194: --data "$form_data" "$post_action" \ +./hosts/syspro.sh:195: --output "$file_path" --output "$file_path" +./hosts/syspro.sh:196: fi +./hosts/syspro.sh:197: else +./hosts/syspro.sh:198: if [ "${RateMonitorEnabled}" == "true" ]; then +./hosts/syspro.sh:199: tor_curl_request --insecure -L \ +./hosts/syspro.sh:200: --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval \ +./hosts/syspro.sh:201: -H "User-Agent: $RandomUA" \ +./hosts/syspro.sh:202: --data "$form_data" "$post_action" \ +./hosts/syspro.sh:203: --output "$file_path" --output "$file_path" +./hosts/syspro.sh:204: else +./hosts/syspro.sh:205: tor_curl_request --insecure -L \ +./hosts/syspro.sh:206: -H "User-Agent: $RandomUA" \ +./hosts/syspro.sh:207: --data "$form_data" "$post_action" \ +./hosts/syspro.sh:208: --output "$file_path" --output "$file_path" +./hosts/syspro.sh:209: fi +./hosts/syspro.sh:210: fi +./hosts/syspro.sh:211: if CheckNoHtml "$remote_url" "$filename" "$file_path" ; then +./hosts/syspro.sh:212: containsHtml=false +./hosts/syspro.sh:213: else +./hosts/syspro.sh:214: containsHtml=true +./hosts/syspro.sh:215: fi -- ./hosts/tempfileme.sh:89: response=$(tor_curl_request --insecure -L -s "$remote_url") ./hosts/tempfileme.sh:90: if [ "${DebugAllEnabled}" == "true" ] ; then @@ -1864,38 +2385,38 @@ _________________________________________________________________________ ./hosts/uploadhive.sh:136: debugHtml "${remote_url##*/}" "uhive_post" "${response}" ./hosts/uploadhive.sh:137: fi ./hosts/uploadhive.sh:138: if [[ -z $response ]] ; then -./hosts/uploadhive.sh:139: echo -e "${RED}| Failed to extract download link.${NC}" +./hosts/uploadhive.sh:139: echo -e "${RED}| Failed to extract download link [1]${NC}" ./hosts/uploadhive.sh:140: warnAndRetryUnknownError=true ./hosts/uploadhive.sh:141: if [ "${finalAttempt}" == "true" ] ; then -./hosts/uploadhive.sh:142: failedRetryDownload "${remote_url}" "" "" +./hosts/uploadhive.sh:142: failedRetryDownload "${remote_url}" "Failed to extract download link [1]" "" ./hosts/uploadhive.sh:143: fi ./hosts/uploadhive.sh:144: return 1 -- -./hosts/uploadhive.sh:175: file_header=$(tor_curl_request --insecure --head -s -L --referer "$remote_url" "$download_url") -./hosts/uploadhive.sh:176: if [ "${DebugAllEnabled}" == "true" ] ; then -./hosts/uploadhive.sh:177: debugHtml "${remote_url##*/}" "uhive_head$j" "download_url: ${download_url}"$'\n'"${file_header}" -./hosts/uploadhive.sh:178: fi -./hosts/uploadhive.sh:179: if [[ -z $file_header ]] ; then -./hosts/uploadhive.sh:180: if [ $j == $maxfetchretries ] ; then -./hosts/uploadhive.sh:181: printf "\\n" -./hosts/uploadhive.sh:182: echo -e "${RED}| Failed to extract file info.${NC}" -./hosts/uploadhive.sh:183: warnAndRetryUnknownError=true -./hosts/uploadhive.sh:184: if [ "${finalAttempt}" == "true" ] ; then -./hosts/uploadhive.sh:185: failedRetryDownload "${remote_url}" "" "" +./hosts/uploadhive.sh:185: file_header=$(tor_curl_request --insecure --head -s -L --referer "$remote_url" "$download_url") +./hosts/uploadhive.sh:186: if [ "${DebugAllEnabled}" == "true" ] ; then +./hosts/uploadhive.sh:187: debugHtml "${remote_url##*/}" "uhive_head$j" "download_url: ${download_url}"$'\n'"${file_header}" +./hosts/uploadhive.sh:188: fi +./hosts/uploadhive.sh:189: if [[ -z $file_header ]] ; then +./hosts/uploadhive.sh:190: if [ $j == $maxfetchretries ] ; then +./hosts/uploadhive.sh:191: printf "\\n" +./hosts/uploadhive.sh:192: echo -e "${RED}| Failed to extract file info.${NC}" +./hosts/uploadhive.sh:193: warnAndRetryUnknownError=true +./hosts/uploadhive.sh:194: if [ "${finalAttempt}" == "true" ] ; then +./hosts/uploadhive.sh:195: failedRetryDownload "${remote_url}" "" "" -- -./hosts/uploadhive.sh:269: tor_curl_request --insecure -L -G --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval "$download_url" --continue-at - --output "$file_path" -./hosts/uploadhive.sh:270: else -./hosts/uploadhive.sh:271: tor_curl_request --insecure -L -G "$download_url" --continue-at - --output "$file_path" -./hosts/uploadhive.sh:272: fi -./hosts/uploadhive.sh:273: received_file_size=0 -./hosts/uploadhive.sh:274: if [ -f "$file_path" ] ; then -./hosts/uploadhive.sh:275: received_file_size=$(stat --format="%s" "$file_path" | tr -d '[:space:]') -./hosts/uploadhive.sh:276: fi -./hosts/uploadhive.sh:277: if CheckNoHtml "$remote_url" "$filename" "$file_path" ; then -./hosts/uploadhive.sh:278: containsHtml=false -./hosts/uploadhive.sh:279: else -./hosts/uploadhive.sh:280: containsHtml=true -./hosts/uploadhive.sh:281: fi +./hosts/uploadhive.sh:279: tor_curl_request --insecure -L -G --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval "$download_url" --continue-at - --output "$file_path" +./hosts/uploadhive.sh:280: else +./hosts/uploadhive.sh:281: tor_curl_request --insecure -L -G "$download_url" --continue-at - --output "$file_path" +./hosts/uploadhive.sh:282: fi +./hosts/uploadhive.sh:283: received_file_size=0 +./hosts/uploadhive.sh:284: if [ -f "$file_path" ] ; then +./hosts/uploadhive.sh:285: received_file_size=$(stat --format="%s" "$file_path" | tr -d '[:space:]') +./hosts/uploadhive.sh:286: fi +./hosts/uploadhive.sh:287: if CheckNoHtml "$remote_url" "$filename" "$file_path" ; then +./hosts/uploadhive.sh:288: containsHtml=false +./hosts/uploadhive.sh:289: else +./hosts/uploadhive.sh:290: containsHtml=true +./hosts/uploadhive.sh:291: fi -- ./hosts/up_1fichier.sh:107: response=$(tor_curl_request --insecure -L -s "https://1fichier.com/") ./hosts/up_1fichier.sh:108: if [ "${DebugAllEnabled}" == "true" ] ; then @@ -1921,29 +2442,17 @@ _________________________________________________________________________ ./hosts/up_1fichier.sh:189: "${PostUrlHost}") ./hosts/up_1fichier.sh:190: if [ "${DebugAllEnabled}" == "true" ] ; then -- -./hosts/up_acid.sh:102: response=$(tor_curl_upload --insecure -i \ -./hosts/up_acid.sh:103: -H "Content-Type: multipart/form-data" \ -./hosts/up_acid.sh:104: -F "time=month" \ -./hosts/up_acid.sh:105: -F "file=@${filepath}" \ -./hosts/up_acid.sh:106: "${PostUrlHost}") -./hosts/up_acid.sh:107: if [ "${DebugAllEnabled}" == "true" ] ; then -./hosts/up_acid.sh:108: debugHtml "${filepath##*/}" "${_hostCode}_upload" "post_url: ${PostUrlHost}"$'\n'"${response}" -./hosts/up_acid.sh:109: fi -./hosts/up_acid.sh:110: if grep -Eqi ' 200 ' <<< "${response}" ; then -./hosts/up_acid.sh:111: hash=$(echo "$response" | tail -2 | head -1) -./hosts/up_acid.sh:112: hash=${hash//[$'\t\r\n']} --- -./hosts/up_anarchaserver.sh:102: response=$(tor_curl_upload --insecure -i \ -./hosts/up_anarchaserver.sh:103: -H "Content-Type: multipart/form-data" \ -./hosts/up_anarchaserver.sh:104: -F "time=month" \ -./hosts/up_anarchaserver.sh:105: -F "file=@${filepath}" \ -./hosts/up_anarchaserver.sh:106: "${PostUrlHost}") -./hosts/up_anarchaserver.sh:107: if [ "${DebugAllEnabled}" == "true" ] ; then -./hosts/up_anarchaserver.sh:108: debugHtml "${filepath##*/}" "${_hostCode}_upload" "post_url: ${PostUrlHost}"$'\n'"${response}" -./hosts/up_anarchaserver.sh:109: fi -./hosts/up_anarchaserver.sh:110: if grep -Eqi ' 200 ' <<< "${response}" ; then -./hosts/up_anarchaserver.sh:111: hash=$(echo "$response" | tail -2 | head -1) -./hosts/up_anarchaserver.sh:112: hash=${hash//[$'\t\r\n']} +./hosts/up_anonfile.sh:102: response=$(tor_curl_upload --insecure -i \ +./hosts/up_anonfile.sh:103: -H "Content-Type: multipart/form-data" \ +./hosts/up_anonfile.sh:104: -F "sess_id=" \ +./hosts/up_anonfile.sh:105: -F "utype=anon" \ +./hosts/up_anonfile.sh:106: -F "file_descr=" \ +./hosts/up_anonfile.sh:107: -F "file_public=1" \ +./hosts/up_anonfile.sh:108: -F "link_rcpt=" \ +./hosts/up_anonfile.sh:109: -F "link_pass=" \ +./hosts/up_anonfile.sh:110: -F "to_folder=" \ +./hosts/up_anonfile.sh:111: -F "upload=Start upload" \ +./hosts/up_anonfile.sh:112: -F "keepalive=1" \ -- ./hosts/up_anonsharing.sh:102: response=$(tor_curl_upload --insecure -i \ ./hosts/up_anonsharing.sh:103: -H "Content-Type: multipart/form-data" \ @@ -1957,6 +2466,18 @@ _________________________________________________________________________ ./hosts/up_anonsharing.sh:111: filesize=$(GetFileSize "$filepath" "false") ./hosts/up_anonsharing.sh:112: downloadLink="https://anonsharing.com/fileid=${fileid}" -- +./hosts/up_ateasystems.sh:102: response=$(tor_curl_upload --insecure -i \ +./hosts/up_ateasystems.sh:103: -H "Content-Type: multipart/form-data" \ +./hosts/up_ateasystems.sh:104: -F "tos=" \ +./hosts/up_ateasystems.sh:105: -F "file_0_descr=" \ +./hosts/up_ateasystems.sh:106: -F "link_pass=" \ +./hosts/up_ateasystems.sh:107: -F "file_0=@${filepath}" \ +./hosts/up_ateasystems.sh:108: "${PostUrlHost}") +./hosts/up_ateasystems.sh:109: if [ "${DebugAllEnabled}" == "true" ] ; then +./hosts/up_ateasystems.sh:110: debugHtml "${filepath##*/}" "${_hostCode}_upload" "post_url: ${PostUrlHost}"$'\n'"${response}" +./hosts/up_ateasystems.sh:111: fi +./hosts/up_ateasystems.sh:112: if grep -Eqi "Location: https://share\.ateasystems\.com/share/\?\&filename\=" <<< "${response}" ; then +-- ./hosts/up_axfc.sh:109: response=$(tor_curl_request --insecure -L -s -b "${axfc_cookie_jar}" -c "${axfc_cookie_jar}" "$fixed_url") ./hosts/up_axfc.sh:110: if [ "${DebugAllEnabled}" == "true" ] ; then ./hosts/up_axfc.sh:111: debugHtml "${filepath##*/}" "axfc_fetch$i" "${response}" @@ -2029,6 +2550,18 @@ _________________________________________________________________________ ./hosts/up_dailyuploads.sh:118: -F "file_0=@$filepath" \ ./hosts/up_dailyuploads.sh:119: "${PostUrlHost}") -- +./hosts/up_dashfile.sh:102: response=$(tor_curl_upload --insecure -i \ +./hosts/up_dashfile.sh:103: -H "Content-Type: multipart/form-data" \ +./hosts/up_dashfile.sh:104: -F "sess_id=" \ +./hosts/up_dashfile.sh:105: -F "utype=anon" \ +./hosts/up_dashfile.sh:106: -F "file_descr=" \ +./hosts/up_dashfile.sh:107: -F "file_public=1" \ +./hosts/up_dashfile.sh:108: -F "link_rcpt=" \ +./hosts/up_dashfile.sh:109: -F "link_pass=" \ +./hosts/up_dashfile.sh:110: -F "to_folder=" \ +./hosts/up_dashfile.sh:111: -F "upload=Start upload" \ +./hosts/up_dashfile.sh:112: -F "keepalive=1" \ +-- ./hosts/up_dataupload.sh:102: response=$(tor_curl_upload --insecure -i \ ./hosts/up_dataupload.sh:103: -H "Content-Type: multipart/form-data" \ ./hosts/up_dataupload.sh:104: -F "sess_id=" \ @@ -2053,18 +2586,6 @@ _________________________________________________________________________ ./hosts/up_dbree.sh:111: url=$(grep -oPi '(?<=
Why we block tor' <<< "${response}" ; then +./hosts/up_fileditch.sh:115: if ((j >= 20)); then +./hosts/up_fileditch.sh:116: if [ "${finalAttempt}" == "true" ] ; then +./hosts/up_fileditch.sh:117: printf "\\n" -- ./hosts/up_filehaus.sh:106: response=$(tor_curl_upload --insecure -i \ ./hosts/up_filehaus.sh:107: -H "Content-Type: multipart/form-data" \ @@ -2113,18 +2634,6 @@ _________________________________________________________________________ ./hosts/up_filehaus.sh:115: url=$(grep -oPi '(?<=https://).*(?=\.filehaus\.su).*?(?=$)' <<< "$response") ./hosts/up_filehaus.sh:116: filesize=$(GetFileSize "$filepath" "false") -- -./hosts/up_filesquid.sh:104: response=$(tor_curl_upload --insecure -i \ -./hosts/up_filesquid.sh:105: -H "Content-Type: multipart/form-data" \ -./hosts/up_filesquid.sh:106: -F "time=month" \ -./hosts/up_filesquid.sh:107: -F "file=@${filepath}" \ -./hosts/up_filesquid.sh:108: "${PostUrlHost}") -./hosts/up_filesquid.sh:109: if [ "${DebugAllEnabled}" == "true" ] ; then -./hosts/up_filesquid.sh:110: debugHtml "${filepath##*/}" "${_hostCode}_upload" "post_url: ${PostUrlHost}"$'\n'"${response}" -./hosts/up_filesquid.sh:111: fi -./hosts/up_filesquid.sh:112: if grep -Eqi 'No password nor allowed IP' <<< "$response"; then -./hosts/up_filesquid.sh:113: if ((i >= maxretries)) ; then -./hosts/up_filesquid.sh:114: echo -e "${RED}| Upload failed. No password nor allowed IP.${NC}" --- ./hosts/up_firestorage.sh:113: response=$(tor_curl_upload --insecure -i \ ./hosts/up_firestorage.sh:114: -H "Content-Type: multipart/form-data" \ ./hosts/up_firestorage.sh:115: -F "jqueryupload=1" \ @@ -2137,18 +2646,6 @@ _________________________________________________________________________ ./hosts/up_firestorage.sh:122: dec_response=$(urldecode "$response") ./hosts/up_firestorage.sh:123: if grep -Eqi '= MaxDownloadRetries)) ; then -./mad.sh:1392: echo -e "${RED}| FAILED: Size mismatch after downloading${NC}" -./mad.sh:1393: exit 1 +./mad.sh:1476: tor_curl_request --insecure -L "$download_url" --continue-at - --output "$file_path" +./mad.sh:1477: received_file_size=0 +./mad.sh:1478: if [ -f "$file_path" ] ; then +./mad.sh:1479: received_file_size=$(stat --format="%s" "$file_path" | tr -d '[:space:]') +./mad.sh:1480: fi +./mad.sh:1481: if ((received_file_size == file_size_bytes)) ; then +./mad.sh:1482: break +./mad.sh:1483: elif ((received_file_size < file_size_bytes)) ; then +./mad.sh:1484: if ((j >= MaxDownloadRetries)) ; then +./mad.sh:1485: echo -e "${RED}| FAILED: Size mismatch after downloading${NC}" +./mad.sh:1486: exit 1 -- -./mad.sh:1436: response=$(tor_curl_request --insecure -L -s https://github.com/lexiforest/curl-impersonate/releases/latest) -./mad.sh:1437: if [ "${DebugAllEnabled}" == "true" ] ; then -./mad.sh:1438: debugHtml "github" "lbf_inst_curlimp$j" "$response" -./mad.sh:1439: fi -./mad.sh:1440: if [ ! -z "$response" ]; then -./mad.sh:1441: latestTag=$(grep -oPi -m 1 '(?<=/curl-impersonate/releases/tag/).*?(?=")' <<< "$response") -./mad.sh:1442: latestBinaryDate=$(grep -oPi -m 1 '(?<== MaxDownloadRetries)) ; then -./mad.sh:1515: echo -e "${RED}| FAILED: Size mismatch after downloading${NC}" -./mad.sh:1516: exit 1 +./mad.sh:1599: tor_curl_request --insecure -L "$download_url" --continue-at - --output "$file_path" +./mad.sh:1600: received_file_size=0 +./mad.sh:1601: if [ -f "$file_path" ] ; then +./mad.sh:1602: received_file_size=$(stat --format="%s" "$file_path" | tr -d '[:space:]') +./mad.sh:1603: fi +./mad.sh:1604: if ((received_file_size == file_size_bytes)) ; then +./mad.sh:1605: break +./mad.sh:1606: elif ((received_file_size < file_size_bytes)) ; then +./mad.sh:1607: if ((j >= MaxDownloadRetries)) ; then +./mad.sh:1608: echo -e "${RED}| FAILED: Size mismatch after downloading${NC}" +./mad.sh:1609: exit 1 -- -./mad.sh:1711: maud_torcurl=$(grep -n -vxE '[[:blank:]]*([#].*)?' $fil | grep -A 12 --color='always' -Ei 'tor_curl') -./mad.sh:1712: echo -e "Files:" -./mad.sh:1713: echo -e "${BLUE}${fil}${NC}" -./mad.sh:1714: echo -e "" -./mad.sh:1715: echo -e "" -./mad.sh:1716: echo -e "${PINK}MAD Audit of http lines:${NC} (${GREEN}grep \"http:\" or \"https:\"${NC})" -./mad.sh:1717: echo -e "_________________________________________________________________________" -./mad.sh:1718: echo -e "$maud_http" -./mad.sh:1719: echo -e "" -./mad.sh:1720: echo -e "${PINK}MAD Audit of curl:${NC} (${GREEN}grep \"curl\"${NC})" -./mad.sh:1721: echo -e "_________________________________________________________________________" +./mad.sh:1804: maud_torcurl=$(grep -n -vxE '[[:blank:]]*([#].*)?' $fil | grep -A 12 --color='always' -Ei 'tor_curl') +./mad.sh:1805: echo -e "Files:" +./mad.sh:1806: echo -e "${BLUE}${fil}${NC}" +./mad.sh:1807: echo -e "" +./mad.sh:1808: echo -e "" +./mad.sh:1809: echo -e "${PINK}MAD Audit of http lines:${NC} (${GREEN}grep \"http:\" or \"https:\"${NC})" +./mad.sh:1810: echo -e "_________________________________________________________________________" +./mad.sh:1811: echo -e "$maud_http" +./mad.sh:1812: echo -e "" +./mad.sh:1813: echo -e "${PINK}MAD Audit of curl:${NC} (${GREEN}grep \"curl\"${NC})" +./mad.sh:1814: echo -e "_________________________________________________________________________" -- -./mad.sh:1724: echo -e "${PINK}MAD Audit of tor_curl (+12 lines after):${NC} (${GREEN}grep \"tor_curl\"${NC})" -./mad.sh:1725: echo -e "_________________________________________________________________________" -./mad.sh:1726: echo -e "$maud_torcurl" -./mad.sh:1727: echo -e "" -./mad.sh:1728: echo -e "" -./mad.sh:1729: done -./mad.sh:1730: else -./mad.sh:1731: cd "$ScriptDir" -./mad.sh:1732: readarray -d $'' arrFiles < <(find . -name "*.sh" -printf '%p\n' | sort -Vk1) -./mad.sh:1733: cd "$WorkDir" -./mad.sh:1734: readarray -d $'' arrFiles2 < <(find . -name "*.sh" -printf '%p\n' | sort -Vk1) +./mad.sh:1817: echo -e "${PINK}MAD Audit of tor_curl (+12 lines after):${NC} (${GREEN}grep \"tor_curl\"${NC})" +./mad.sh:1818: echo -e "_________________________________________________________________________" +./mad.sh:1819: echo -e "$maud_torcurl" +./mad.sh:1820: echo -e "" +./mad.sh:1821: echo -e "" +./mad.sh:1822: done +./mad.sh:1823: else +./mad.sh:1824: cd "$ScriptDir" +./mad.sh:1825: readarray -d $'' arrFiles < <(find . -name "*.sh" -printf '%p\n' | sort -Vk1) +./mad.sh:1826: cd "$WorkDir" +./mad.sh:1827: readarray -d $'' arrFiles2 < <(find . -name "*.sh" -printf '%p\n' | sort -Vk1) -- -./mad.sh:1739: maud_torcurl=$(grep -n -vxE '[[:blank:]]*([#].*)?' $fil | grep -A 12 --color='always' -Ei 'tor_curl') -./mad.sh:1740: echo -e "Files:" -./mad.sh:1741: echo -e "${BLUE}${fil}${NC}" -./mad.sh:1742: echo -e "" -./mad.sh:1743: echo -e "" -./mad.sh:1744: echo -e "${PINK}MAD Audit of http lines:${NC} (${GREEN}grep \"http:\" or \"https:\"${NC})" -./mad.sh:1745: echo -e "_________________________________________________________________________" -./mad.sh:1746: echo -e "$maud_http" -./mad.sh:1747: echo -e "" -./mad.sh:1748: echo -e "${PINK}MAD Audit of curl:${NC} (${GREEN}grep \"curl \"${NC})" -./mad.sh:1749: echo -e "_________________________________________________________________________" +./mad.sh:1832: maud_torcurl=$(grep -n -vxE '[[:blank:]]*([#].*)?' $fil | grep -A 12 --color='always' -Ei 'tor_curl') +./mad.sh:1833: echo -e "Files:" +./mad.sh:1834: echo -e "${BLUE}${fil}${NC}" +./mad.sh:1835: echo -e "" +./mad.sh:1836: echo -e "" +./mad.sh:1837: echo -e "${PINK}MAD Audit of http lines:${NC} (${GREEN}grep \"http:\" or \"https:\"${NC})" +./mad.sh:1838: echo -e "_________________________________________________________________________" +./mad.sh:1839: echo -e "$maud_http" +./mad.sh:1840: echo -e "" +./mad.sh:1841: echo -e "${PINK}MAD Audit of curl:${NC} (${GREEN}grep \"curl \"${NC})" +./mad.sh:1842: echo -e "_________________________________________________________________________" -- -./mad.sh:1752: echo -e "${PINK}MAD Audit of tor_curl (+12 lines after):${NC} (${GREEN}grep \"tor_curl\"${NC})" -./mad.sh:1753: echo -e "_________________________________________________________________________" -./mad.sh:1754: echo -e "$maud_torcurl" -./mad.sh:1755: echo -e "" -./mad.sh:1756: done -./mad.sh:1757: for fil in "${arrFiles2[@]}"; -./mad.sh:1758: do -./mad.sh:1759: maud_http=$(grep -n -vxE '[[:blank:]]*([#].*)?' $fil | grep --color='always' -Ei '(http|https):') -./mad.sh:1760: maud_curl=$(grep -n -vxE '[[:blank:]]*([#].*)?' $fil | grep --color='always' -Ei 'curl') -./mad.sh:1761: maud_torcurl=$(grep -n -vxE '[[:blank:]]*([#].*)?' $fil | grep -A 12 --color='always' -Ei 'tor_curl') -./mad.sh:1762: echo -e "Files:" -./mad.sh:1763: echo -e "${BLUE}${fil}${NC}" -./mad.sh:1764: echo -e "" -./mad.sh:1765: echo -e "" -./mad.sh:1766: echo -e "${PINK}MAD Audit of http lines:${NC} (${GREEN}grep \"http:\" or \"https:\"${NC})" -./mad.sh:1767: echo -e "_________________________________________________________________________" -./mad.sh:1768: echo -e "$maud_http" -./mad.sh:1769: echo -e "" -./mad.sh:1770: echo -e "${PINK}MAD Audit of curl:${NC} (${GREEN}grep \"curl\"${NC})" -./mad.sh:1771: echo -e "_________________________________________________________________________" +./mad.sh:1845: echo -e "${PINK}MAD Audit of tor_curl (+12 lines after):${NC} (${GREEN}grep \"tor_curl\"${NC})" +./mad.sh:1846: echo -e "_________________________________________________________________________" +./mad.sh:1847: echo -e "$maud_torcurl" +./mad.sh:1848: echo -e "" +./mad.sh:1849: done +./mad.sh:1850: for fil in "${arrFiles2[@]}"; +./mad.sh:1851: do +./mad.sh:1852: maud_http=$(grep -n -vxE '[[:blank:]]*([#].*)?' $fil | grep --color='always' -Ei '(http|https):') +./mad.sh:1853: maud_curl=$(grep -n -vxE '[[:blank:]]*([#].*)?' $fil | grep --color='always' -Ei 'curl') +./mad.sh:1854: maud_torcurl=$(grep -n -vxE '[[:blank:]]*([#].*)?' $fil | grep -A 12 --color='always' -Ei 'tor_curl') +./mad.sh:1855: echo -e "Files:" +./mad.sh:1856: echo -e "${BLUE}${fil}${NC}" +./mad.sh:1857: echo -e "" +./mad.sh:1858: echo -e "" +./mad.sh:1859: echo -e "${PINK}MAD Audit of http lines:${NC} (${GREEN}grep \"http:\" or \"https:\"${NC})" +./mad.sh:1860: echo -e "_________________________________________________________________________" +./mad.sh:1861: echo -e "$maud_http" +./mad.sh:1862: echo -e "" +./mad.sh:1863: echo -e "${PINK}MAD Audit of curl:${NC} (${GREEN}grep \"curl\"${NC})" +./mad.sh:1864: echo -e "_________________________________________________________________________" -- -./mad.sh:1774: echo -e "${PINK}MAD Audit of tor_curl (+12 lines after):${NC} (${GREEN}grep \"tor_curl\"${NC})" -./mad.sh:1775: echo -e "_________________________________________________________________________" -./mad.sh:1776: echo -e "$maud_torcurl" -./mad.sh:1777: echo -e "" -./mad.sh:1778: done -./mad.sh:1779: fi -./mad.sh:1780:} -./mad.sh:1781:madStatus() { -./mad.sh:1782: local InputFile="$1" -./mad.sh:1783: if [ "$arg1" == "status" ] ; then -./mad.sh:1784: clear +./mad.sh:1867: echo -e "${PINK}MAD Audit of tor_curl (+12 lines after):${NC} (${GREEN}grep \"tor_curl\"${NC})" +./mad.sh:1868: echo -e "_________________________________________________________________________" +./mad.sh:1869: echo -e "$maud_torcurl" +./mad.sh:1870: echo -e "" +./mad.sh:1871: done +./mad.sh:1872: fi +./mad.sh:1873:} +./mad.sh:1874:madStatus() { +./mad.sh:1875: local InputFile="$1" +./mad.sh:1876: if [ "$arg1" == "status" ] ; then +./mad.sh:1877: clear -- -./mad.sh:3099: file_header=$(tor_curl_request --insecure -m 8 -s -D - -o /dev/null \ -./mad.sh:3100: -H "Connection: keep-alive" \ -./mad.sh:3101: -w 'EffectiveUrl=%{url_effective}' \ -./mad.sh:3102: "$download_url") -./mad.sh:3103: else -./mad.sh:3104: printf "| Retrieving Head: attempt #$j" -./mad.sh:3105: rm -f "${WorkDir}/.temp/directhead" -./mad.sh:3106: file_header=$(tor_curl_request --insecure --head -H "Connection: keep-alive" -L -s -i "$download_url" | -./mad.sh:3107: tee "${WorkDir}/.temp/directhead" & -./mad.sh:3108: sleep 6 -./mad.sh:3109: [ -s "${WorkDir}/.temp/directhead" ] -./mad.sh:3110: kill $! 2>/dev/null -./mad.sh:3111: ) -./mad.sh:3112: if [ ! -f "${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}" ]; then -./mad.sh:3113: touch "${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}" -./mad.sh:3114: fi -./mad.sh:3115: rm -f "${WorkDir}/.temp/directhead" -./mad.sh:3116: fi +./mad.sh:3192: file_header=$(tor_curl_request --insecure -m 8 -s -D - -o /dev/null \ +./mad.sh:3193: -H "Connection: keep-alive" \ +./mad.sh:3194: -w 'EffectiveUrl=%{url_effective}' \ +./mad.sh:3195: "$download_url") +./mad.sh:3196: else +./mad.sh:3197: printf "| Retrieving Head: attempt #$j" +./mad.sh:3198: rm -f "${WorkDir}/.temp/directhead" +./mad.sh:3199: file_header=$(tor_curl_request --insecure --head -H "Connection: keep-alive" -L -s -i "$download_url" | +./mad.sh:3200: tee "${WorkDir}/.temp/directhead" & +./mad.sh:3201: sleep 6 +./mad.sh:3202: [ -s "${WorkDir}/.temp/directhead" ] +./mad.sh:3203: kill $! 2>/dev/null +./mad.sh:3204: ) +./mad.sh:3205: if [ ! -f "${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}" ]; then +./mad.sh:3206: touch "${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}" +./mad.sh:3207: fi +./mad.sh:3208: rm -f "${WorkDir}/.temp/directhead" +./mad.sh:3209: fi -- -./mad.sh:3234: tor_curl_request --insecure -L --referer "$file_url" "$download_url" --output "$file_path" -./mad.sh:3235: rc=$? -./mad.sh:3236: if [ $rc -ne 0 ] ; then -./mad.sh:3237: printf "${RED}Download Failed (bad exit status).${NC}" -./mad.sh:3238: if [ -f ${file_path} ]; then -./mad.sh:3239: printf "${YELLOW} Partial removed...${NC}" -./mad.sh:3240: printf "\n\n" -./mad.sh:3241: rm -f "${file_path}" -./mad.sh:3242: else -./mad.sh:3243: printf "\n\n" -./mad.sh:3244: fi +./mad.sh:3327: tor_curl_request --insecure -L --referer "$file_url" "$download_url" --output "$file_path" +./mad.sh:3328: rc=$? +./mad.sh:3329: if [ $rc -ne 0 ] ; then +./mad.sh:3330: printf "${RED}Download Failed (bad exit status).${NC}" +./mad.sh:3331: if [ -f ${file_path} ]; then +./mad.sh:3332: printf "${YELLOW} Partial removed...${NC}" +./mad.sh:3333: printf "\n\n" +./mad.sh:3334: rm -f "${file_path}" +./mad.sh:3335: else +./mad.sh:3336: printf "\n\n" +./mad.sh:3337: fi -- -./mad.sh:3278: tor_curl_request --insecure -L --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval "$download_url" --continue-at - --output "$file_path" -./mad.sh:3279: else -./mad.sh:3280: tor_curl_request --insecure -L --referer "$file_url" "$download_url" --continue-at - --output "$file_path" -./mad.sh:3281: fi -./mad.sh:3282: received_file_size=0 -./mad.sh:3283: if [ -f "$file_path" ] ; then -./mad.sh:3284: received_file_size=$(stat --format="%s" "$file_path" | tr -d '[:space:]') -./mad.sh:3285: fi -./mad.sh:3286: if CheckNoHtml "$remote_url" "$filename" "$file_path" ; then -./mad.sh:3287: containsHtml=false -./mad.sh:3288: else -./mad.sh:3289: containsHtml=true -./mad.sh:3290: fi +./mad.sh:3371: tor_curl_request --insecure -L --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval "$download_url" --continue-at - --output "$file_path" +./mad.sh:3372: else +./mad.sh:3373: tor_curl_request --insecure -L --referer "$file_url" "$download_url" --continue-at - --output "$file_path" +./mad.sh:3374: fi +./mad.sh:3375: received_file_size=0 +./mad.sh:3376: if [ -f "$file_path" ] ; then +./mad.sh:3377: received_file_size=$(stat --format="%s" "$file_path" | tr -d '[:space:]') +./mad.sh:3378: fi +./mad.sh:3379: if CheckNoHtml "$remote_url" "$filename" "$file_path" ; then +./mad.sh:3380: containsHtml=false +./mad.sh:3381: else +./mad.sh:3382: containsHtml=true +./mad.sh:3383: fi +-- +./mad.sh:3571: response=$(tor_curl_upload --insecure -i \ +./mad.sh:3572: -H "Content-Type: multipart/form-data" \ +./mad.sh:3573: -F "key=" \ +./mad.sh:3574: -F "time=$jira_timeval" \ +./mad.sh:3575: -F "file=@${filepath}" \ +./mad.sh:3576: "${jira_PostUrlHost}") +./mad.sh:3577: else +./mad.sh:3578: response=$(tor_curl_upload --insecure -i \ +./mad.sh:3579: -H "Content-Type: multipart/form-data" \ +./mad.sh:3580: -F "key=" \ +./mad.sh:3581: -F "time=$jira_timeval" \ +./mad.sh:3582: -F "files[]=@${arrFiles[@]}" \ +./mad.sh:3583: "${jira_PostUrlHost}") +./mad.sh:3584: fi +./mad.sh:3585: if [ "${DebugAllEnabled}" == "true" ] ; then +./mad.sh:3586: debugHtml "${filepath##*/}" "${_hostCode}_upload" "post_url: ${jira_PostUrlHost}"$'\n'"${response}" +./mad.sh:3587: fi +./mad.sh:3588: if grep -Eqi ' 200 ' <<< "${response}" ; then diff --git a/documentation/!Changelog (Historical).txt b/documentation/!Changelog (Historical).txt index 145a984..73cddac 100755 --- a/documentation/!Changelog (Historical).txt +++ b/documentation/!Changelog (Historical).txt @@ -3,6 +3,66 @@ # # ---------- Initial release with MAD Uploader functionality ---------- +# 2024.11.23 - [filehaus] Use tor_curl_request_extended for head / get for filehaus urls +# 2024.11.23 - [mad] Make tor_curl_request_extended a random timeout between 30-60 seconds +# 2024.11.22 - [up_quax, quax] Add qu.ax as an upload and download host +# 2024.11.21 - [filedot] Fix check for post filename +# 2024.11.20 - [gofile] Handle parsing parent gofile url into multiple download urls +# (still needs updating to handle child urls gofile.io/download/web//file) +# 2024.11.19 - [mad] Add updateUrlDownload function to handle updating a url +# (ie. parent gofile url with children urls) +# 2024.11.18 - [up_fileditch / fileditch] Add fileditch.com as upload and download host +# 2024.11.17 - [innocent] Fix "Fetching file info". Support resume downloads. +# 2024.11.16 - [mad] Fix reload on uploads.txt modified (uploads: filemode) +# 2024.11.16 - [up_*] Fix removal of upload ticket if filesize is not supported +# 2024.11.15 - [familleflender] Add famille-flender.fr as download host +# 2024.11.15 - [up_familleflender] Add famille-flender.fr as upload host +# 2024.11.15 - [up_filehaus] Finish the uploader (the server is back online) +# 2024.11.14 - [up_skrepr, skrepr] Add transfer.skrepr.com as upload and download host +# 2024.11.13 - [up_pixeldrain] Add pixeldrain as an upload host +# 2024.11.13 - [mad] Add pixeldrain apikey section to allow pd uploads +# 2024.11.13 - [up_filesquid] Add "No password nor allowed IP" response handling +# 2024.11.12 - [mad] Fix uploads.txt status marking (urls / messages) containg '&' chars +# 2024.11.12 - [up_torup] Max upload filesize changed to 150MB +# 2024.11.12 - [up_uploadee] Add upload.ee as an upload host +# 2024.11.11 - [up_offcat] Add Offshore.cat as upload host +# 2024.11.11 - [mad] Add OffShore.cat Upload ApiKeys section to allow using Offshore.cat as upload host +# 2024.11.10 - [mad] Fix uploads.txt multi-terminal processing (use /uploads/temp_upload_handler.txt) +# 2024.11.10 - [1fichier] Add new "has been automatically deleted after its free hosting period expired" +# 2024.11.10 - [up_torup] Add TorUp as an upload host +# 2024.11.09 - [torup] Add TorUp as a download host (no resume) +# (ktgzpea2b76u7fgemiibp4a76onyybo4fw5gbsagtm6jrjzmgivppyyd.onion) +# 2024.11.08 - [nippy] Fix nippydrive.com. Update detection of temporarily unavailable response. +# 2024.11.08 - [up2share] Fix download url (https) +# 2024.11.08 - [up2share] Fix advertised filesize on a redirection +# 2024.11.06 - [SkipUrlsInDownloadsCompletedTxt] Fix blank url check +# 2024.11.06 - [ranoz] Add ranoz.gg as download host +# 2024.11.05 - [up_ranoz] Add ranoz.gg as upload host +# 2024.11.02 - [innocent] Disable rate monitor on download +# 2024.11.02 - [mad, innocent] Add 18s timeout on HEAD (get hack) for no response from host +# 2024.11.01 - [up_uploadhive] Add uploadhive as upload host +# 2024.11.01 - [innocent] Switch between 3 alternate head type attempts +# 2024.10.30 - [uploadev] Add additional file removed response handling +# 2024.10.30 - [anonsharing] Add AnonSharing.com as download host (only processes urls with fileid) +# 2024.10.29 - [kraken] Add recaptcha response handling +# 2024.10.29 - [bowfile] Add File has been removed by the site administrator response +# 2024.10.29 - [up_anonsharing] Add AnonSharing.com as upload host (unique url with fileid) +# 2024.10.29 - [uploadev] Add UploadEv.org as download host +# 2024.10.27 - [up_uploadev] Add UploadEv.org as upload host +# 2024.10.25 - [dosya] Add check for too many failed responses. try again later. +# 2024.10.24 - [bedrive / up_bedrive] Add bedrive.ru as download / upload host +# 2024.10.24 - [mad] Add pw: and ref: keyword values to ./data/downloads_completed.txt logging +# 2024.10.24 - [mad] Add extended connection-timeout request (tor_curl_request_extended) +# 2024.10.24 - [dosya] Use extended connection-timeout request for HEAD +# 2024.10.23 - [mad] Fix PostFailedUpload function call +# 2024.10.22 - [innocent / up_innocent] Add innocent.onion as download / upload host +# * Current download does not support resume +# 2024.10.22 - [mad] Few updates to direct download +# 2024.10.21 - [nippy] Update nippy to handle dbree (.me, .org) +# 2024.10.21 - [dbree] Add dbree.org +# 2024.10.17 - [dailyuploads] Add recaptcha detection and abort (js required) +# * Working on possible PJSCloud solution +# 2024.10.16 - [mad] Remove tor_curl_upload duplicate connect-timeout (thanks PeachX) # 2024.10.14 - [pjscloud] Create pjscloud.sh plugin -- use PhantomJSCloud to get a response from a url # * Change pixeldrain ViewPump to use pjscloud.sh plugin. (default enabled) # * Testing usage on other javascript required response hosts (ie. daily, hexload) diff --git a/documentation/README-upload_hosts.txt b/documentation/README-upload_hosts.txt index 99e98b0..abbda9e 100755 --- a/documentation/README-upload_hosts.txt +++ b/documentation/README-upload_hosts.txt @@ -5,9 +5,11 @@ Max Size . HostCode . Nickname . Notes # --------------------------------------------------------------------------------------- 300GB 1f 1fichier.com 15d expiry free accounts - 300GB fh filehaus.top (.su) ?? expiry +- 300GB fh filehaus.top (.su) ?? expiry + 40GB isup isupload.com ?? expiry 20GB rz ranoz.gg ?? expiry 20GB pd pixeldrain 120d expiry + 20GB atea ateasystems.com ?? expiry 10GB gofile gofile.io ?? expiry 10GB tmpme tempfile.me 3mo expiry (tend to ban 7z faster) 5GB uhive uploadhive @@ -18,44 +20,54 @@ Max Size . HostCode . Nickname . Notes - 4GB bd bedrive.ru ?? expiry - 4GB daily dailyuploads.net ?? expiry - 2GB hex hexload.com 30d inactive expiry - 2GB dosya dosyaupload.com 45d inactive expiry +- 2GB dosya dosyaupload.com 45d inactive expiry 2GB fs firestorage.jp 90d+ inactive expiry + 2GB sysp syspro.com.br ?? expiry * 2GB axfc axfc.net 90d+ inactive expiry - 1GB kraken krakenfiles.com 90d inactive expiry 1GB ansh anonsharing.com 6mo expiry + 512MB anon anonfile.de ?? expiry + 400MB dash dashfile.net ?? expiry 300MB trbo turbo.onion ~40d expiry 256MB qx qu.ax ?? expiry - 250MB upev uploadev.org 90d inactive expiry +- 250MB upev uploadev.org 90d inactive expiry * 240MB ko kouploader.jp 5mo expiry (240MB max) 150MB torp TorUp.onion 30d inactive expiry + 100MB fb fileblade.com ?? expiry + 100MB ubay uploadbay.net ?? expiry 100MB upee upload.ee 50d expiry 100MB bow bowfile.com 20d inactive expiry - 100MB yolo yolobit ?? expiry - 100MB nofile nofile.org ?? expiry - 100MB so share-online.vg ?? expiry +- 100MB yolo yolobit ?? expiry +- 100MB nofile nofile.org ?? expiry +- 100MB so share-online.vg ?? expiry 100MB inno innocent.onion ?? expiry # Short Retention ---------------------------------------------------------------------- Max Size . HostCode . Nickname . Notes # --------------------------------------------------------------------------------------- - 10GB nant fichiers.nantes.cloud ~1mo expiry, jirafrau - 10GB anarc anarchaserver.org ~1mo expiry, jirafrau - 10GB nlib netlib.re ~1mo expiry, jirafrau + 10GB nant fichiers.nantes.cloud ~1mo expiry, jirafeau +- 10GB anarc anarchaserver.org ~1mo expiry, jirafeau + 10GB nlib netlib.re ~1mo expiry, jirafeau * 10GB raja uploadraja.com 4d inactive expiry - 5GB moo moocloud.sh ~1mo expiry, jirafrau - 5GB squid filesquid.net ~1mo expiry, jirafrau +- 10GB cyx cyssoux.fr ~1mo expiry, jirafeau + 5GB moo moocloud.ch ~1mo expiry, jirafeau +- 5GB frso freesocial.co ~1mo expiry, jirafeau +- 5GB squid filesquid.net ~1mo expiry, jirafeau + 5GB edd eddowding.com ~1mo expiry, jirafeau 4GB tmpsh temp.sh 3d expiry - 1GB kaz depot.kaz.bzh ~1mo expiry, jirafrau - 512MB linx linxx.net ~1mo expiry, jirafrau - 500MB soy soyjak.download ~1mo expiry, jirafrau + 2GB dict dictvm.org ~1mo expiry, jirafeau + 1GB kaz depot.kaz.bzh ~1mo expiry, jirafeau + 512MB herb herbolistique.com ~1mo expiry, jirafeau +- 512MB linx linxx.net ~1mo expiry, jirafeau +- 500MB soy soyjak.download ~1mo expiry, jirafeau 195MB dup dataupload.net ?? expiry - 100MB nippy nippy* ?? expiry, (file, share, box, drive, space) - 100MB dbree dbree.me ?? expiry - 100MB ffl famille-flender ~1mo expiry, jirafrau - 100MB harr files.harrault.fr ~1mo expiry, jirafrau - 100MB acid dl.acid.fr ~1mo expiry, no resume, jirafrau - 100MB fr4e sendfree4e.fr ~1mo expiry, jirafrau - ?? skpr skrepr.com ~1mo expiry, jirafrau +- 100MB nippy nippy* ?? expiry, (file, share, box, drive, space) +- 100MB dbree dbree.me ?? expiry + 100MB ffl famille-flender ~1mo expiry, jirafeau + 100MB harr files.harrault.fr ~1mo expiry, jirafeau +- 100MB acid dl.acid.fr ~1mo expiry, no resume, jirafeau + 100MB fr4e sendfree4e.fr ~1mo expiry, jirafeau + ?? skpr skrepr.com ~1mo expiry, jirafeau Failing (-): @@ -65,6 +77,21 @@ kraken kraken.com (MAD download failing -- JS required / Google Recaptcha) hex hexload.com (MAD download failing -- JS required / Google Recaptcha) bd bedrive.ru (MAD download failing -- JS required / Google Recaptcha) uflix uploadflix.cc (JS required) +fh filehaus (going away) +so share-online -- Captcha / js required +dbree dbree.me -- Captcha / js required +yolo yolobit -- Captcha / js required +nofile nofile.org -- Captcha / js required +anarc anarchaserver.org -- Error +acid dl.acid.fr -- Error +nippy nippy* -- Captcha / js required +linx linxx.net -- Upload url no response / gone +soy soyjak.download -- No response +upev uploadev.org -- Uploads are not allowed for your account type +dosya dosyaupload -- Uploads failing / downloads Captcha / js required +cyx cyssoux.fr -- Error 6 -- server temp file location issue +frso freesocial.co -- No response +squid filesquid.net -- No response NOTES (*): ---------------- @@ -73,7 +100,7 @@ ko kouploader.jp (MAD download not implemented) axfc axfc.net (MAD download not implemented) -# Jirafraeu hosts: +# Jirafeau hosts: (recommended upload 100MB splits as many host only support that) diff --git a/hosts/anonfile.sh b/hosts/anonfile.sh new file mode 100644 index 0000000..dc46f3c --- /dev/null +++ b/hosts/anonfile.sh @@ -0,0 +1,687 @@ +#! Name: anonfile.sh +#! Author: kittykat +#! Version: 2024.12.26 +#! Desc: Add support for downloading and processing of urls for a new host +#! Usage: Copy this file into the ./${ScriptDir}/hosts/ folder +#! +#! +#! ------------ REQUIRED SECTION --------------- +#! @[UPDATE] HostAndDomainRegexes: This string is loaded into mad.sh and allows dynamic handling of new url data +#! Format: '/HostCode/HostNick/HostFuncPrefix:HostDomainRegex@' +#! HostCode: (ie. 'fh' for filehaus -- cannot be used by other hosts) +#! HostNick: What is displayed throughout MAD output (ie. 'filehaus' -- "urls.txt has 10 filehaus.." will be displayed) +#! HostFuncPrefix: (ie. 'fh' -- fh_DownloadFile(), fh_FetchFileInfo() .. ) +#! * Note: Must begin with a letter a-z (functions beginning with numbers are no bueno) +#! HostDomainRegex: The regex used to verify matching urls +HostCode='anon' +HostNick='anonfile' +HostFuncPrefix='anon' +HostUrls='anonfile.de' +HostDomainRegex='^(http|https)://(.*\.)?anonfile\.de/' +#! +#! !! DO NOT UPDATE OR REMOVE !! +#! This merges the Required HostAndDomainRegexes into mad.sh +ListHostAndDomainRegexes=${ListHostAndDomainRegexes}'/'${HostCode}'/'${HostNick}'/'${HostFuncPrefix}'/'${HostUrls}':'${HostDomainRegex}'@' +#! +#! +#! ------------ (1) Host Main Download Function --------------- # +#! +#! @REQUIRED: Host Main Download function +#! Must be named specifically as such: +#! _DownloadFile() +anon_DownloadFile() { + if ! grep -Eq "ocr_captcha.sh" <<< "$LoadPlugins" ; then + echo -e "${RED}| The host dailyuploads requires OcrCaptcha.sh plugin + dependencies.${NC}" + failedRetryDownload "${remote_url}" "The host dailyuploads requires OcrCaptcha.sh plugin + dependencies." + return 0 + fi + local remote_url=${1} + local file_url=${1} + local filecnt=${2} + warnAndRetryUnknownError=false + exitDownloadError=false + exitDownloadNotAvailable=false + fileAlreadyDone=false + download_inflight_path="${WorkDir}/.inflight/" + mkdir -p "$download_inflight_path" + completed_location="${WorkDir}/downloads/" + tor_identity="${RANDOM}" + finalAttempt="false" + for ((z=0; z<=$MaxUrlRetries; z++)); do + if [ $z -eq $MaxUrlRetries ] ; then + finalAttempt="true" + fi + CLEANSTRING=${remote_url//[^a-zA-Z0-9]/} + trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; echo ""; tput cnorm; exit" 0 1 2 3 6 15 + if anon_FetchFileInfo $finalAttempt && anon_GetFile "${filecnt}" $((z+1)) $finalAttempt ; then + return 0 + elif [ $z -lt $MaxUrlRetries ]; then + if [ "${fileAlreadyDone}" == "true" ] ; then + break + fi + if [[ "${warnAndRetryUnknownError}" == "true" ]] ; then + if [ "${DebugAllEnabled}" == "true" ] ; then + debugHtml "${remote_url##*/}" "error" "Retry due to an unknown issue: attempt #$((z+1)) of ${MaxUrlRetries}" + fi + fi + if [[ "${exitDownloadError}" == "true" || "${exitDownloadNotAvailable}" == "true" ]] ; then + if [ "${DebugAllEnabled}" == "true" ] ; then + debugHtml "${remote_url##*/}" "error" "Exit due to unrecoverable issue" + fi + rm -f "${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}" + break + fi + echo -e "\n${YELLOW}A recoverable error occurred, retry attempt $((z+1))/${MaxUrlRetries}${NC}" + sleep 3 + fi + done + rm -f "${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}" +} +#! +#! ------------- (2) Fetch File Info Function ----------------- # +#! +anon_FetchFileInfo() { + finalAttempt=$1 + maxfetchretries=16 + anon_cookie_jar="" + fixed_url=${remote_url} + echo -e "${GREEN}# Fetching post info…${NC}" + for ((i=1; i<=$maxfetchretries; i++)); do + mkdir -p "${WorkDir}/.temp" + anon_cookie_jar=$(mktemp "${WorkDir}/.temp/anon_cookies""${instance_no}"".XXXXXX") + printf " ." + tor_identity="${RANDOM}" + CLEANSTRING=${remote_url//[^a-zA-Z0-9]/} + trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; rm -f ${anon_cookie_jar}; echo ""; tput cnorm; exit" 0 1 2 3 6 15 + response=$(tor_curl_request --insecure -L -s -b "${anon_cookie_jar}" -c "${anon_cookie_jar}" \ + -w "\n\nHTTP_CODE: %{http_code}\nEFFECTIVE_URL: %{url_effective}\n" \ + "$fixed_url") + if [ "${DebugAllEnabled}" == "true" ] ; then + debugHtml "${remote_url##*/}" "anon_fetch$i" "${response}" + fi + if [[ -z $response ]] ; then + rm -f "${anon_cookie_jar}"; + if [ $i == $maxfetchretries ] ; then + printf "\\n" + echo -e "${RED}| Failed to extract download link [1]${NC}" + warnAndRetryUnknownError=true + if [ "${finalAttempt}" == "true" ] ; then + failedRetryDownload "${remote_url}" "Failed to extract download link [1]" "" + fi + return 1 + else + continue + fi + fi + if grep -Eqi 'You have reached the download-limit' <<< "$response"; then + rm -f "${anon_cookie_jar}"; + if [ $i == $maxfetchretries ] ; then + printf "\\n" + echo -e "${RED}| Download limit reached for ip${NC}" + warnAndRetryUnknownError=true + if [ "${finalAttempt}" == "true" ] ; then + failedRetryDownload "${remote_url}" "Download limit reached for ip" "" + fi + return 1 + else + continue + fi + fi + if grep -Eqi 'File Not Found|No such file with this filename|File was deleted|)' <<< "$response") + post_id=$(grep -oP '(?<=input type="hidden" name="id" value=").*(?=">)' <<< "$response") + post_fname=$(grep -oP '(?<=input type="hidden" name="fname" value=").*(?=">)' <<< "$response") + post_fname=$(urlencode_literal_grouped_case "$post_fname") + else + rm -f "${anon_cookie_jar}"; + if [ $i == $maxfetchretries ] ; then + printf "\\n" + echo -e "${RED}| Failed to extract download link [2]${NC}" + warnAndRetryUnknownError=true + if [ "${finalAttempt}" == "true" ] ; then + failedRetryDownload "${remote_url}" "Failed to extract download link [2]" "" + fi + return 1 + else + tor_identity="${RANDOM}" + continue + fi + fi + if [[ -z "$post_op" ]] || [[ -z "$post_id" ]] || [[ -z "$post_fname" ]] ; then + rm -f "${anon_cookie_jar}"; + if [ $i == $maxfetchretries ] ; then + printf "\\n" + echo -e "${RED}| Failed to extract download link [3]${NC}" + warnAndRetryUnknownError=true + if [ "${finalAttempt}" == "true" ] ; then + failedRetryDownload "${remote_url}" "Failed to extract download link [3]" "" + fi + return 1 + else + tor_identity="${RANDOM}" + continue + fi + else + break + fi + done + echo -e "${GREEN}# Fetching Post2 / Captcha…${NC}" + for ((i=1; i<=$maxfetchretries; i++)); do + mkdir -p "${WorkDir}/.temp" + anon_cookie_jar=$(mktemp "${WorkDir}/.temp/anon_cookies""${instance_no}"".XXXXXX") + printf " ." + CLEANSTRING=${remote_url//[^a-zA-Z0-9]/} + trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; rm -f ${anon_cookie_jar}; echo ""; tput cnorm; exit" 0 1 2 3 6 15 + form_data="op=${post_op}&usr_login=&id=${post_id}&fname=${post_fname}&referer=&method_free=Free+Download+%3E%3E" + response=$(tor_curl_request --insecure -L -s -X POST \ + -b "${anon_cookie_jar}" -c "${anon_cookie_jar}" \ + -w "\n\nHTTP_CODE: %{http_code}\nEFFECTIVE_URL: %{url_effective}\n" \ + --data "$form_data" "$fixed_url") + if [ "${DebugAllEnabled}" == "true" ] ; then + debugHtml "${remote_url##*/}" "anon_post1_$i" "${response}" + fi + if [[ -z $response ]] ; then + rm -f "${anon_cookie_jar}"; + if [ $i == $maxfetchretries ] ; then + printf "\\n" + echo -e "${RED}| Failed to extract download link [1]${NC}" + warnAndRetryUnknownError=true + if [ "${finalAttempt}" == "true" ] ; then + failedRetryDownload "${remote_url}" "Failed to extract download link [1]" "" + fi + return 1 + else + continue + fi + fi + if grep -Eqi 'You have reached the download-limit' <<< "$response"; then + rm -f "${anon_cookie_jar}"; + if [ $i == $maxfetchretries ] ; then + printf "\\n" + echo -e "${RED}| Download limit reached for ip${NC}" + warnAndRetryUnknownError=true + if [ "${finalAttempt}" == "true" ] ; then + failedRetryDownload "${remote_url}" "Download limit reached for ip" "" + fi + return 1 + else + tor_identity="${RANDOM}" + continue + fi + fi + if grep -Eqi 'File Not Found|No such file with this filename|File was deleted|
)' <<< "$response") + post_id=$(grep -oP '(?<=input type="hidden" name="id" value=").*(?=">)' <<< "$response") + post_rand=$(grep -oP '(?<=input type="hidden" name="rand" value=").*(?=">)' <<< "$response") + post_referer=$(grep -oP '(?<=input type="hidden" name="referer" value=").*(?=">)' <<< "$response") + post_referer=$(urlencode_literal_grouped_case_urlendingonly "$post_referer") + else + rm -f "${anon_cookie_jar}"; + if [ $i == $maxfetchretries ] ; then + printf "\\n" + echo -e "${RED}| Failed to extract download link [5]${NC}" + warnAndRetryUnknownError=true + if [ "${finalAttempt}" == "true" ] ; then + failedRetryDownload "${remote_url}" "Failed to extract download link [5]" "" + fi + return 1 + else + tor_identity="${RANDOM}" + continue + fi + fi + if [[ -z "$captcha_code" ]] || [[ -z "$post_op" ]] || [[ -z "$post_id" ]] || [[ -z "$post_rand" ]] ; then + rm -f "${anon_cookie_jar}"; + if [ $i == $maxfetchretries ] ; then + printf "\\n" + echo -e "${RED}| Failed to extract download link [6]${NC}" + warnAndRetryUnknownError=true + if [ "${finalAttempt}" == "true" ] ; then + failedRetryDownload "${remote_url}" "Failed to extract download link [6]" "" + fi + return 1 + else + tor_identity="${RANDOM}" + continue + fi + else + break + fi + elif grep -Eqi 'class="g-recaptcha" data-sitekey="' <<< "$response" ; then + rm -f "${anon_cookie_jar}"; + if [ $i == $maxfetchretries ] ; then + printf "\\n" + echo -e "${RED}| Recaptcha detected (js required)${NC}" + exitDownloadError=true + failedRetryDownload "${remote_url}" "Recaptcha detected (js required)" "" + return 1 + else + continue + fi + else + rm -f "${anon_cookie_jar}"; + if [ $i == $maxfetchretries ] ; then + printf "\\n" + echo -e "${RED}| No download button found${NC}" + exitDownloadError=true + failedRetryDownload "${remote_url}" "No download button found" "" + return 1 + else + continue + fi + fi + done + echo -e "| Captcha countdown (10s)…" + sleep 10s + maxfetchretries=1 + echo -e "${GREEN}# Fetching download url…${NC}" + for ((i=1; i<=$maxfetchretries; i++)); do + printf " _" + download_url="" + CLEANSTRING=${remote_url//[^a-zA-Z0-9]/} + trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; rm -f ${anon_cookie_jar}; echo ""; tput cnorm; exit" 0 1 2 3 6 15 + form_data="op=${post_op}&id=${post_id}&rand=${post_rand}&referer=${post_referer}&method_free=Free+Download+%3E%3E&method_premium=&adblock_detected=&code=${captcha_code}" + response=$(tor_curl_request --insecure -L -s -X POST \ + -b "${anon_cookie_jar}" -c "${anon_cookie_jar}" \ + --data "$form_data" "$fixed_url") + if [ "${DebugAllEnabled}" == "true" ] ; then + debugHtml "${remote_url##*/}" "anon_post2_$i" "url: ${fixed_url}"$'\n'"form_data: ${form_data}"$'\n'"${response}" + fi + if [[ -z $response ]] ; then + if [ $i == $maxfetchretries ] ; then + rm -f "${anon_cookie_jar}"; + printf "\\n" + echo -e "${RED}| Failed to extract download link [3].${NC}" + warnAndRetryUnknownError=true + if [ "${finalAttempt}" == "true" ] ; then + failedRetryDownload "${remote_url}" "" "" + fi + return 1 + else + tor_identity="${RANDOM}" + continue + fi + fi + if grep -Eqi 'Whoo! No such file|No such file with this filename|File was deleted' <<< "$response"; then + rm -f "${anon_cookie_jar}"; + printf "\\n" + echo -e "${RED}| The file was not found. It could be deleted or expired.${NC}" + exitDownloadError=true + removedDownload "${remote_url}" + return 1 + fi + if grep -Eqi 'you have to wait|seconds till next download' <<< "$response"; then + if [ $i == $maxfetchretries ] ; then + rm -f "${anon_cookie_jar}"; + printf "\\n" + echo -e "${RED}| Rate limited [2]${NC}" + warnAndRetryUnknownError=true + if [ "${finalAttempt}" == "true" ] ; then + failedRetryDownload "${remote_url}" "Rate limited [2]" "" + fi + return 1 + else + tor_identity="${RANDOM}" + continue + fi + fi + if grep -Eqi 'Just a moment...' <<< "$response"; then + if [ $i == $maxfetchretries ] ; then + rm -f "${anon_cookie_jar}"; + printf "\\n" + echo -e "${RED}| Failed to extract download link [7].${NC}" + warnAndRetryUnknownError=true + if [ "${finalAttempt}" == "true" ] ; then + failedRetryDownload "${remote_url}" "Failed to extract download link [7]" "" + fi + return 1 + else + tor_identity="${RANDOM}" + continue + fi + fi + if grep -Eqi 'Wrong captcha' <<< "$response"; then + if [ $i == $maxfetchretries ] ; then + rm -f "${anon_cookie_jar}"; + printf "\\n" + echo -e "${RED}| Failed to extract download link. (Wrong captcha)${NC}" + warnAndRetryUnknownError=true + if [ "${finalAttempt}" == "true" ] ; then + failedRetryDownload "${remote_url}" "Failed to extract download link. (Wrong captcha)" "" + fi + return 1 + else + tor_identity="${RANDOM}" + continue + fi + fi + if grep -Eqi '.*$)' <<< "$response") + download_url=$(urlencode_literal_grouped_case_urlendingonly "$download_url") + fi + if [[ -z "$download_url" ]] ; then + if [ $i == $maxfetchretries ] ; then + rm -f "${anon_cookie_jar}"; + printf "\\n" + echo -e "${RED}| Failed to extract download link [8]${NC}" + warnAndRetryUnknownError=true + if [ "${finalAttempt}" == "true" ] ; then + failedRetryDownload "${remote_url}" "Failed to extract download link [8]" "" + fi + return 1 + else + tor_identity="${RANDOM}" + continue + fi + else + break + fi + done + rm -f "${anon_cookie_jar}"; + echo -e "${GREEN}# Fetching file info…${NC}" + maxfetchretries=3 + for ((j=1; j<=$maxfetchretries; j++)); do + printf " ." + CLEANSTRING=${remote_url//[^a-zA-Z0-9]/} + trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; rm -f ${anon_cookie_jar}; echo ""; tput cnorm; exit" 0 1 2 3 6 15 + GetRandomUA + file_header=$(tor_curl_request -i -s --head \ + --referer "${fixed_url}" \ + "$download_url") + if [ "${DebugAllEnabled}" == "true" ] ; then + debugHtml "${remote_url##*/}" "anon_head$j" "download_url: ${download_url}"$'\n'"${file_header}" + fi + if [[ -z $file_header ]] ; then + if [ $j == $maxfetchretries ] ; then + rm -f "${anon_cookie_jar}"; + printf "\\n" + echo -e "${RED}| Failed to extract file info${NC}" + warnAndRetryUnknownError=true + if [ "${finalAttempt}" == "true" ] ; then + failedRetryDownload "${remote_url}" "Failed to extract file info" "" + fi + return 1 + else + tor_identity="${RANDOM}" + continue + fi + fi + if ! grep -Eqi 'HTTP/2 200|HTTP/1.1 200|200 OK' <<< $file_header ; then + if [ $j == $maxfetchretries ] ; then + rm -f "${anon_cookie_jar}"; + printf "\\n" + echo -e "${RED}| Failed to extract file info${NC}" + warnAndRetryUnknownError=true + if [ "${finalAttempt}" == "true" ] ; then + failedRetryDownload "${remote_url}" "" "" + fi + return 1 + else + tor_identity="${RANDOM}" + continue + fi + fi + file_size_bytes=$(grep -oPi '(?<=content-length: ).*' <<< "$file_header") + file_size_bytes=${file_size_bytes//[$'\t\r\n']} + if [[ -z "$file_size_bytes" ]]; then + if [ $j == $maxfetchretries ] ; then + rm -f "${anon_cookie_jar}"; + printf "\\n" + echo -e "${RED}| Failed to extract file size.${NC}" + warnAndRetryUnknownError=true + if [ "${finalAttempt}" == "true" ] ; then + failedRetryDownload "${remote_url}" "" "" + fi + return 1 + else + tor_identity="${RANDOM}" + continue + fi + fi + break #Good to go here + done + touch ${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/} + if [ ! "$filename_override" == "" ] ; then + filename="$filename_override" + else + filename="${download_url##*\/}" + fi + filename=$(sanitize_file_or_folder_name "${filename}") + printf "\\n" + echo -e "${YELLOW}| File name:${NC}\t\"${filename}\"" + if [ -z $file_size_bytes ] ; then + if [ "${finalAttempt}" == "true" ] ; then + failedRetryDownload "${remote_url}" "Filesize not found!" "" + fi + echo -e "${YELLOW}| Filesize not found… retry${NC}" + return 1 + else + file_size_readable="$(numfmt --to=iec --from=auto --format "%.2f" <<< "$file_size_bytes")" + fi + echo -e "${YELLOW}| File size:${NC}\t${file_size_readable}" + file_path="${download_inflight_path}${filename}" + flockDownload="${WorkDir}/.flocks/${filename//[^a-zA-Z0-9\.\_\-]/}.flock" + if CheckFileSize "${remote_url}" "${file_size_bytes}" ; then + return 1 + fi + if CheckDownloadExists "$remote_url" "$MoveToFolder" "$filecnt" "$filename" "$file_path" "$completed_location" ; then + return 1 + fi + echo "${remote_url//[^a-zA-Z0-9]/}" > $flockDownload +} +#! +#! ----------- (3) Fetch File / Download File Function --------------- # +#! +anon_GetFile() { + echo -e "${GREEN}# Downloading…" + echo -e "${YELLOW}| File path:${NC}\t./.inflight/${filename}\n" + fileCnt=$1 + retryCnt=$2 + finalAttempt=$3 + flockDownload="${WorkDir}/.flocks/${filename//[^a-zA-Z0-9\.\_\-]/}.flock" + for ((j=1; j<=$MaxDownloadRetries; j++)); do + pd_presize=0 + if [ -f "$file_path" ] ; then + pd_presize=$(stat --format="%s" "$file_path" | tr -d '[:space:]') + fi + GetRandomUA + anon_host=$(grep -oPi '(?<=https://).*(?=/)' <<< "$fixed_url") + CLEANSTRING=${remote_url//[^a-zA-Z0-9]/} + trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; rm -f ${anon_cookie_jar}; rm -f $flockDownload; echo ""; tput cnorm; exit" 0 1 2 3 6 15 + if [ "${UseTorCurlImpersonate}" == "true" ]; then + if [ "${RateMonitorEnabled}" == "true" ]; then + tor_curl_request --insecure \ + --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval \ + -b "${anon_cookie_jar}" -c "${anon_cookie_jar}" \ + --referer "${fixed_url}" \ + "$download_url" --continue-at - --output "$file_path" + else + tor_curl_request --insecure \ + -b "${anon_cookie_jar}" -c "${anon_cookie_jar}" \ + --referer "${fixed_url}" \ + "$download_url" --continue-at - --output "$file_path" + fi + else + if [ "${RateMonitorEnabled}" == "true" ]; then + tor_curl_request --insecure \ + --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval \ + -H "User-Agent: $RandomUA" \ + -H "Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8" \ + -H "Accept-Language: en-US,en;q=0.5" \ + -H "Accept-Encoding: gzip, deflate, br" \ + -H "Connection: keep-alive" \ + -H "Cookie: lng=eng" \ + -H "Upgrade-Insecure-Requests: 1" \ + -H "Sec-Fetch-Dest: document" \ + -H "Sec-Fetch-Mode: navigate" \ + -H "Sec-Fetch-Site: same-origin" \ + -H "Sec-Fetch-User: ?1" \ + -b "${anon_cookie_jar}" -c "${anon_cookie_jar}" \ + --referer "${fixed_url}" \ + "$download_url" --continue-at - --output "$file_path" + else + tor_curl_request --insecure \ + -H "User-Agent: $RandomUA" \ + -H "Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8" \ + -H "Accept-Language: en-US,en;q=0.5" \ + -H "Accept-Encoding: gzip, deflate, br" \ + -H "Connection: keep-alive" \ + -H "Cookie: lng=eng" \ + -H "Upgrade-Insecure-Requests: 1" \ + -H "Sec-Fetch-Dest: document" \ + -H "Sec-Fetch-Mode: navigate" \ + -H "Sec-Fetch-Site: same-origin" \ + -H "Sec-Fetch-User: ?1" \ + -b "${anon_cookie_jar}" -c "${anon_cookie_jar}" \ + --referer "${fixed_url}" \ + "$download_url" --continue-at - --output "$file_path" + fi + fi + received_file_size=0 + if [ -f "$file_path" ] ; then + received_file_size=$(stat --format="%s" "$file_path" | tr -d '[:space:]') + fi + if CheckNoHtml "$remote_url" "$filename" "$file_path" ; then + containsHtml=false + else + containsHtml=true + fi + downDelta=$(( received_file_size - pd_presize )) + if [[ "${received_file_size}" -ne "${file_size_bytes}" ]] || [ "$containsHtml" == "true" ]; then + if [ "${AutoRepairBadPartials}" == "true" ] && (( downDelta > 0 && downDelta < 1024 )) ; then + if [ -f "${file_path}" ] ; then + if ((pd_presize > 0)); then + echo -e "${YELLOW}Bad node / HTML found:${NC} reverting to previous file..." + truncateDownload "$remote_url" "$filename" "$pd_presize" "$received_file_size" + truncate -s $pd_presize "${file_path}" + else + echo -e "${YELLOW}Bad node / HTML found:${NC} tainted partial removed..." + rm -f "${file_path}" + fi + fi + if ((j >= $MaxDownloadRetries)) ; then + rm -f "$flockDownload"; + if [ "${finalAttempt}" == "true" ] ; then + droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" + fi + return 1 + else + continue + fi + elif [ "${AutoRepairBadPartials}" == "true" ] && [ "$containsHtml" == "true" ] ; then + if [ -f "${file_path}" ] ; then + if ((pd_presize > 0)); then + echo -e "${YELLOW}Bad node / HTML found:${NC} reverting to previous file..." + truncateDownload "$remote_url" "$filename" "$pd_presize" "$received_file_size" + truncate -s $pd_presize "${file_path}" + else + echo -e "${YELLOW}Bad node / HTML found:${NC} tainted partial removed..." + rm -f "${file_path}" + fi + fi + if ((j >= $MaxDownloadRetries)) ; then + rm -f "$flockDownload"; + if [ "${finalAttempt}" == "true" ] ; then + droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" + fi + return 1 + else + continue + fi + elif (( downDelta > 0 && downDelta < 1024 )) || [ "$containsHtml" == "true" ] ; then + if [ -f "$file_path" ] ; then + rm -rf "$file_path" + fi + echo -e "\n${YELLOW}Bad node / HTML found:${NC} tainted partial removed..." + if ((j >= $MaxDownloadRetries)) ; then + rm -f "$flockDownload"; + if [ "${finalAttempt}" == "true" ] ; then + droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" + fi + return 1 + else + continue + fi + fi + if [[ "${received_file_size}" -ne "${file_size_bytes}" ]]; then + echo -e "\n${RED}Download failed, file is incomplete.${NC}" + if ((j >= $MaxDownloadRetries)) ; then + rm -f "$flockDownload"; + if [ "${finalAttempt}" == "true" ] ; then + droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" + fi + return 1 + else + continue + fi + fi + else + break + fi + done + rm -f "$flockDownload"; + rm -f "${anon_cookie_jar}"; + ProcessCompletedDownload "$remote_url" "$MoveToFolder" "$filecnt" "$filename" "$file_size_bytes" "$completed_location" "$file_path" + return 0 +} +#! +#! --------------- Host Extra Functions ------------------- # +#! diff --git a/hosts/ateasystems.sh b/hosts/ateasystems.sh new file mode 100644 index 0000000..6b4fa47 --- /dev/null +++ b/hosts/ateasystems.sh @@ -0,0 +1,265 @@ +#! Name: ateasystems.sh +#! Author: kittykat +#! Version: 2024.12.24 +#! Desc: Add support for downloading and processing of urls for a new host +#! Usage: Copy this file into the ./${ScriptDir}/hosts/ folder +#! +#! +#! ------------ REQUIRED SECTION --------------- +#! @[UPDATE] HostAndDomainRegexes: This string is loaded into mad.sh and allows dynamic handling of new url data +#! Format: '/HostCode/HostNick/HostFuncPrefix:HostDomainRegex@' +#! HostCode: (ie. 'fh' for filehaus -- cannot be used by other hosts) +#! HostNick: What is displayed throughout MAD output (ie. 'filehaus' -- "urls.txt has 10 filehaus.." will be displayed) +#! HostFuncPrefix: (ie. 'fh' -- fh_DownloadFile(), fh_FetchFileInfo() .. ) +#! * Note: Must begin with a letter a-z (functions beginning with numbers are no bueno) +#! HostDomainRegex: The regex used to verify matching urls +HostCode='atea' +HostNick='atea' +HostFuncPrefix='atea' +HostUrls='share.ateasystems.com' +HostDomainRegex='^(http|https)://(.*\.)?share\.ateasystems\.com/share/' +#! +#! !! DO NOT UPDATE OR REMOVE !! +#! This merges the Required HostAndDomainRegexes into mad.sh +ListHostAndDomainRegexes=${ListHostAndDomainRegexes}'/'${HostCode}'/'${HostNick}'/'${HostFuncPrefix}'/'${HostUrls}':'${HostDomainRegex}'@' +#! +#! +#! ------------ (1) Host Main Download Function --------------- # +#! +#! @REQUIRED: Host Main Download function +#! Must be named specifically as such: +#! _DownloadFile() +atea_DownloadFile() { + local remote_url=${1} + local file_url=${1} + local filecnt=${2} + warnAndRetryUnknownError=false + exitDownloadError=false + exitDownloadNotAvailable=false + fileAlreadyDone=false + download_inflight_path="${WorkDir}/.inflight/" + mkdir -p "$download_inflight_path" + completed_location="${WorkDir}/downloads/" + tor_identity="${RANDOM}" + finalAttempt="false" + for ((z=0; z<=$MaxUrlRetries; z++)); do + if [ $z -eq $MaxUrlRetries ] ; then + finalAttempt="true" + fi + CLEANSTRING=${remote_url//[^a-zA-Z0-9]/} + trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; echo ""; tput cnorm; exit" 0 1 2 3 6 15 + if atea_FetchFileInfo $finalAttempt && atea_GetFile "${filecnt}" $((z+1)) $finalAttempt ; then + return 0 + elif [ $z -lt $MaxUrlRetries ]; then + if [ "${fileAlreadyDone}" == "true" ] ; then + break + fi + if [[ "${warnAndRetryUnknownError}" == "true" ]] ; then + if [ "${DebugAllEnabled}" == "true" ] ; then + debugHtml "${remote_url##*/}" "error" "Retry due to an unknown issue: attempt #$((z+1)) of ${MaxUrlRetries}" + fi + fi + if [[ "${exitDownloadError}" == "true" || "${exitDownloadNotAvailable}" == "true" ]] ; then + if [ "${DebugAllEnabled}" == "true" ] ; then + debugHtml "${remote_url##*/}" "error" "Exit due to unrecoverable issue" + fi + rm -f "${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}" + break + fi + echo -e "\n${YELLOW}A recoverable error occurred, retry attempt $((z+1))/${MaxUrlRetries}${NC}" + sleep 3 + fi + done + rm -f "${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}" +} +#! +#! ------------- (2) Fetch File Info Function ----------------- # +#! +atea_FetchFileInfo() { + finalAttempt=$1 + maxfetchretries=6 + echo -e "${GREEN}# Fetching post info…${NC}" + for ((i=1; i<=$maxfetchretries; i++)); do + mkdir -p "${WorkDir}/.temp" + printf " ." + tor_identity="${RANDOM}" + CLEANSTRING=${remote_url//[^a-zA-Z0-9]/} + trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; echo ""; tput cnorm; exit" 0 1 2 3 6 15 + response=$(tor_curl_request --insecure -L -s "$remote_url") + if [ "${DebugAllEnabled}" == "true" ] ; then + debugHtml "${remote_url##*/}" "atea_fetch$i" "${response}" + fi + if [[ -z $response ]] ; then + if [ $i == $maxfetchretries ] ; then + printf "\\n" + echo -e "${RED}| Failed to extract download link [1]${NC}" + warnAndRetryUnknownError=true + if [ "${finalAttempt}" == "true" ] ; then + failedRetryDownload "${remote_url}" "Failed to extract download link [1]" "" + fi + return 1 + else + continue + fi + fi + if grep -Eqi 'File Not Found|No such file with this filename|File was deleted|
.*$)' <<< "$response") + codeline=$(grep -oP -m 1 '(?<=)' <<< "$response") + pval1=$(grep -oP -m 1 '\K.*?(?=)' <<< "$codeline" ) + if ((pval1 <= 0)); then + pval1=$(grep -oP -m 1 '\K.*?(?=)' <<< "$codeline" ) + fi + pval2=$(grep -oP -m 1 '\K.*?(?=)' <<< "$codeline" ) + pval3=$(grep -oP -m 1 '\K.*?(?=)' <<< "$codeline" ) + pval4=$(grep -oP -m 1 '\K.*?(?=)' <<< "$codeline" ) + val1=$((pval1-0)); val2=$((pval2-0)); val3=$((pval3-0)); val4=$((pval4-0)) + captcha_code="${val1}${val2}${val3}${val4}" + if grep -Eqi '-' <<< "$captcha_code"; then + if [ $i == $maxfetchretries ] ; then + rm -f "${atea_cookie_jar}"; + printf "\\n" + echo -e "${RED}| Bad captcha code [2]${NC}" + warnAndRetryUnknownError=true + if [ "${finalAttempt}" == "true" ] ; then + failedRetryDownload "${remote_url}" "Bad captcha code [2]" "" + fi + return 1 + else + tor_identity="${RANDOM}" + continue + fi + fi + fi + if grep -Eqi 'input type="hidden" name="id" value="' <<< "$response"; then + echo -e "${GREEN}| Post link found.${NC}" + post_action=$(grep -oP '(?<=Form name="F1" method="POST" action=").*(?=" onSubmit.*$)' <<< "$response") + post_act=$(grep -oP '(?<=input type="hidden" name="act" value=").*(?=">.*$)' <<< "$response") + post_id=$(grep -oP '(?<=input type="hidden" name="id" value=").*(?=">.*$)' <<< "$response") + post_fname=$(grep -oP '(?<=input type="hidden" name="fname" value=").*(?=">.*$)' <<< "$response") + post_rand=$(grep -oP '(?<=input type="hidden" name="rand" value=").*(?=">.*$)' <<< "$response") + post_fname=$(urlencode_literal_grouped_case "${post_fname}") + post_action="${post_action//[$'\t\r\n']}" + if [ "$filename_override" == "" ]; then + filename=$(urlencode_literal_grouped_case "${post_fname}") + fi + else + if [ $i == $maxfetchretries ] ; then + printf "\\n" + echo -e "${RED}| Failed to extract download link [2]${NC}" + warnAndRetryUnknownError=true + if [ "${finalAttempt}" == "true" ] ; then + failedRetryDownload "${remote_url}" "Failed to extract download link [2]" "" + fi + return 1 + else + continue + fi + fi + if [[ -z "$post_action" ]] || [[ -z "$post_act" ]] || [[ -z "$post_id" ]] || [[ -z "$post_sc" ]] || \ + [[ -z "$post_fname" ]] || [[ -z "$post_rand" ]] || [[ -z "$captcha_code" ]] ; then + if [ $i == $maxfetchretries ] ; then + printf "\\n" + echo -e "${RED}| Failed to extract download link [3]${NC}" + warnAndRetryUnknownError=true + if [ "${finalAttempt}" == "true" ] ; then + failedRetryDownload "${remote_url}" "Failed to extract download link [3]" "" + fi + return 1 + else + continue + fi + else + break + fi + done + form_data="act=${post_act}&id=${post_id}&fname=${post_fname}&rand=${post_rand}&sc=${post_sc}&code=${captcha_code}&btn=Download+File" + echo -e "| Captcha countdown (3s)…" + sleep 3s + touch ${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/} + if [ ! "$filename_override" == "" ] ; then + filename="$filename_override" + fi + filename=$(sanitize_file_or_folder_name "${filename}") + printf "\\n" + echo -e "${YELLOW}| File name:${NC}\t\"${filename}\"" + echo -e "${YELLOW}| File size:${NC}\tUnknown${NC}" + file_path="${download_inflight_path}${filename}" + flockDownload="${WorkDir}/.flocks/${filename//[^a-zA-Z0-9\.\_\-]/}.flock" + if CheckDownloadExists "$remote_url" "$MoveToFolder" "$filecnt" "$filename" "$file_path" "$completed_location" ; then + return 1 + fi + echo "${remote_url//[^a-zA-Z0-9]/}" > $flockDownload +} +#! +#! ----------- (3) Fetch File / Download File Function --------------- # +#! +atea_GetFile() { + echo -e "${GREEN}# Downloading…\t${BLUE}(No Resume)${NC}" + echo -e "${YELLOW}| File path:${NC}\t./.inflight/${filename}\n" + fileCnt=$1 + retryCnt=$2 + finalAttempt=$3 + flockDownload="${WorkDir}/.flocks/${filename//[^a-zA-Z0-9\.\_\-]/}.flock" + GetRandomUA + if [ -f "$file_path" ]; then + rm -f "file_path" + fi + CLEANSTRING=${remote_url//[^a-zA-Z0-9]/} + trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; rm -f $flockDownload; echo ""; tput cnorm; exit" 0 1 2 3 6 15 + if [ "${UseTorCurlImpersonate}" == "true" ]; then + if [ "${RateMonitorEnabled}" == "true" ]; then + tor_curl_request --insecure \ + --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval \ + --data "$form_data" "$post_action" \ + --output "$file_path" --output "$file_path" + else + tor_curl_request --insecure \ + --data "$form_data" "$post_action" \ + --output "$file_path" --output "$file_path" + fi + else + if [ "${RateMonitorEnabled}" == "true" ]; then + tor_curl_request --insecure \ + --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval \ + -H "User-Agent: $RandomUA" \ + --data "$form_data" "$post_action" \ + --output "$file_path" --output "$file_path" + else + tor_curl_request --insecure \ + -H "User-Agent: $RandomUA" \ + --data "$form_data" "$post_action" \ + --output "$file_path" --output "$file_path" + fi + fi + if CheckNoHtml "$remote_url" "$filename" "$file_path" ; then + containsHtml=false + else + containsHtml=true + fi + if [ "$containsHtml" == "true" ]; then + if [ -f "$file_path" ] ; then + rm -rf "$file_path" + fi + echo -e "\n${YELLOW}Bad node / HTML found:${NC} tainted partial removed..." + rm -f $flockDownload; + if [ "${finalAttempt}" == "true" ] ; then + droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" + fi + return 1 + fi + rm -f "$flockDownload"; + ProcessCompletedDownload "$remote_url" "$MoveToFolder" "$filecnt" "$filename" "$file_size_bytes" "$completed_location" "$file_path" + return 0 +} +#! +#! --------------- Host Extra Functions ------------------- # +#! diff --git a/hosts/cyssoux.sh b/hosts/cyssoux.sh new file mode 100644 index 0000000..16e03f6 --- /dev/null +++ b/hosts/cyssoux.sh @@ -0,0 +1,31 @@ +#! Name: cyssoux.sh +#! Author: kittykat +#! Version: 2024.12.23 +#! Desc: Add support for downloading and processing of urls for a new host +#! Usage: Copy this file into the ./${ScriptDir}/hosts/ folder +#! +#! +#! ------------ REQUIRED SECTION --------------- +#! @[UPDATE] HostAndDomainRegexes: This string is loaded into mad.sh and allows dynamic handling of new url data +#! Format: '/HostCode/HostNick/HostFuncPrefix:HostDomainRegex@' +#! HostCode: (ie. 'fh' for filehaus -- cannot be used by other hosts) +#! HostNick: What is displayed throughout MAD output (ie. 'filehaus' -- "urls.txt has 10 filehaus.." will be displayed) +#! HostFuncPrefix: (ie. 'fh' -- fh_DownloadFile(), fh_FetchFileInfo() .. ) +#! * Note: Must begin with a letter a-z (functions beginning with numbers are no bueno) +#! HostDomainRegex: The regex used to verify matching urls +HostCode='cyx' +HostNick='cyssoux' +HostFuncPrefix='direct' +HostUrls='partage.cyssoux.fr' +HostDomainRegex='^(http|https)://(.*\.)?partage\.cyssoux\.fr/' +#! +#! !! DO NOT UPDATE OR REMOVE !! +#! This merges the Required HostAndDomainRegexes into mad.sh +ListHostAndDomainRegexes=${ListHostAndDomainRegexes}'/'${HostCode}'/'${HostNick}'/'${HostFuncPrefix}'/'${HostUrls}':'${HostDomainRegex}'@' +#! +#! +#! ------------ (1) Host Main Download Function --------------- # +#! +#! This is a direct= download host, so all the functions are already in mad.sh +#! Since the HostFuncPrefix is defined above as "direct", nothing further needs to be done as it will +#! call the direct_DownloadFile() function already in mad.sh diff --git a/hosts/dashfile.sh b/hosts/dashfile.sh new file mode 100644 index 0000000..a4c549e --- /dev/null +++ b/hosts/dashfile.sh @@ -0,0 +1,618 @@ +#! Name: dashfile.sh +#! Author: kittykat +#! Version: 2024.12.25 +#! Desc: Add support for downloading and processing of urls for a new host +#! Usage: Copy this file into the ./${ScriptDir}/hosts/ folder +#! +#! +#! ------------ REQUIRED SECTION --------------- +#! @[UPDATE] HostAndDomainRegexes: This string is loaded into mad.sh and allows dynamic handling of new url data +#! Format: '/HostCode/HostNick/HostFuncPrefix:HostDomainRegex@' +#! HostCode: (ie. 'fh' for filehaus -- cannot be used by other hosts) +#! HostNick: What is displayed throughout MAD output (ie. 'filehaus' -- "urls.txt has 10 filehaus.." will be displayed) +#! HostFuncPrefix: (ie. 'fh' -- fh_DownloadFile(), fh_FetchFileInfo() .. ) +#! * Note: Must begin with a letter a-z (functions beginning with numbers are no bueno) +#! HostDomainRegex: The regex used to verify matching urls +HostCode='dash' +HostNick='dashfile' +HostFuncPrefix='dash' +HostUrls='dashfile.net' +HostDomainRegex='^(http|https)://(.*\.)?dashfile\.net/' +#! +#! !! DO NOT UPDATE OR REMOVE !! +#! This merges the Required HostAndDomainRegexes into mad.sh +ListHostAndDomainRegexes=${ListHostAndDomainRegexes}'/'${HostCode}'/'${HostNick}'/'${HostFuncPrefix}'/'${HostUrls}':'${HostDomainRegex}'@' +#! +#! +#! ------------ (1) Host Main Download Function --------------- # +#! +#! @REQUIRED: Host Main Download function +#! Must be named specifically as such: +#! _DownloadFile() +dash_DownloadFile() { + local remote_url=${1} + local file_url=${1} + local filecnt=${2} + warnAndRetryUnknownError=false + exitDownloadError=false + exitDownloadNotAvailable=false + fileAlreadyDone=false + download_inflight_path="${WorkDir}/.inflight/" + mkdir -p "$download_inflight_path" + completed_location="${WorkDir}/downloads/" + tor_identity="${RANDOM}" + finalAttempt="false" + for ((z=0; z<=$MaxUrlRetries; z++)); do + if [ $z -eq $MaxUrlRetries ] ; then + finalAttempt="true" + fi + CLEANSTRING=${remote_url//[^a-zA-Z0-9]/} + trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; echo ""; tput cnorm; exit" 0 1 2 3 6 15 + if dash_FetchFileInfo $finalAttempt && dash_GetFile "${filecnt}" $((z+1)) $finalAttempt ; then + return 0 + elif [ $z -lt $MaxUrlRetries ]; then + if [ "${fileAlreadyDone}" == "true" ] ; then + break + fi + if [[ "${warnAndRetryUnknownError}" == "true" ]] ; then + if [ "${DebugAllEnabled}" == "true" ] ; then + debugHtml "${remote_url##*/}" "error" "Retry due to an unknown issue: attempt #$((z+1)) of ${MaxUrlRetries}" + fi + fi + if [[ "${exitDownloadError}" == "true" || "${exitDownloadNotAvailable}" == "true" ]] ; then + if [ "${DebugAllEnabled}" == "true" ] ; then + debugHtml "${remote_url##*/}" "error" "Exit due to unrecoverable issue" + fi + rm -f "${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}" + break + fi + echo -e "\n${YELLOW}A recoverable error occurred, retry attempt $((z+1))/${MaxUrlRetries}${NC}" + sleep 3 + fi + done + rm -f "${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}" +} +#! +#! ------------- (2) Fetch File Info Function ----------------- # +#! +dash_FetchFileInfo() { + finalAttempt=$1 + maxfetchretries=6 + dash_cookie_jar="" + echo -e "${GREEN}# Fetching post info…${NC}" + for ((i=1; i<=$maxfetchretries; i++)); do + mkdir -p "${WorkDir}/.temp" + dash_cookie_jar=$(mktemp "${WorkDir}/.temp/dash_cookies""${instance_no}"".XXXXXX") + printf " ." + tor_identity="${RANDOM}" + CLEANSTRING=${remote_url//[^a-zA-Z0-9]/} + trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; rm -f ${dash_cookie_jar}; echo ""; tput cnorm; exit" 0 1 2 3 6 15 + response=$(tor_curl_request --insecure -L -s -b "${dash_cookie_jar}" -c "${dash_cookie_jar}" \ + -w "\n\nHTTP_CODE: %{http_code}\nEFFECTIVE_URL: %{url_effective}\n" \ + "$remote_url") + if [ "${DebugAllEnabled}" == "true" ] ; then + debugHtml "${remote_url##*/}" "dash_fetch$i" "${response}" + fi + if [[ -z $response ]] ; then + rm -f "${dash_cookie_jar}"; + if [ $i == $maxfetchretries ] ; then + printf "\\n" + echo -e "${RED}| Failed to extract download link [1]${NC}" + warnAndRetryUnknownError=true + if [ "${finalAttempt}" == "true" ] ; then + failedRetryDownload "${remote_url}" "Failed to extract download link [1]" "" + fi + return 1 + else + continue + fi + fi + if grep -Eqi 'File Not Found|No such file with this filename|File was deleted|
' <<< "$response"; then + printf "\\n" + echo -e "${GREEN}| Post link found.${NC}" + post_op=$(grep -oP '(?<=input type="hidden" name="op" value=").*(?=">)' <<< "$response") + post_ul=$(grep -oP '(?<=input type="hidden" name="usr_login" value=").*(?=">)' <<< "$response") + post_id=$(grep -oP '(?<=input type="hidden" name="id" value=").*(?=">)' <<< "$response") + post_fname=$(grep -oP '(?<=input type="hidden" name="fname" value=").*(?=">)' <<< "$response") + post_referer=$(grep -oP '(?<=input type="hidden" name="referer" value=").*(?=">)' <<< "$response") + if [ "$filename_override" == "" ]; then + filename="$post_fname" + fi + filename=$(sanitize_file_or_folder_name "${filename}") + post_fname=$(urlencode_literal_grouped_case "${post_fname}") + else + rm -f "${dash_cookie_jar}"; + if [ $i == $maxfetchretries ] ; then + printf "\\n" + echo -e "${RED}| Failed to extract download link [2].${NC}" + warnAndRetryUnknownError=true + if [ "${finalAttempt}" == "true" ] ; then + failedRetryDownload "${remote_url}" "Failed to extract download link [2]" "" + fi + return 1 + else + continue + fi + fi + if [[ -z "$post_op" ]] || [[ -z "$post_id" ]] || [[ -z "$post_fname" ]]; then + rm -f "${dash_cookie_jar}"; + if [ $i == $maxfetchretries ] ; then + printf "\\n" + echo -e "${RED}| Failed to extract download link [3]${NC}" + warnAndRetryUnknownError=true + if [ "${finalAttempt}" == "true" ] ; then + failedRetryDownload "${remote_url}" "Failed to extract download link [3]" "" + fi + return 1 + else + continue + fi + else + break + fi + done + local captcha_code="" + echo -e "${GREEN}# Fetching captcha…${NC}" + for ((i=1; i<=$maxfetchretries; i++)); do + printf " …" + CLEANSTRING=${remote_url//[^a-zA-Z0-9]/} + trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; rm -f ${dash_cookie_jar}; echo ""; tput cnorm; exit" 0 1 2 3 6 15 + form_data="op=${post_op}&usr_login=${post_ul}&id=${post_id}&fname=${post_fname}&referer=&method_free=Free+Download" + response=$(tor_curl_request --insecure -L -s -X POST \ + -b "${dash_cookie_jar}" -c "${dash_cookie_jar}" \ + --data "$form_data" "$remote_url") + if [ "${DebugAllEnabled}" == "true" ] ; then + debugHtml "${remote_url##*/}" "dash_post1_$i" "url: ${remote_url}"$'\n'"form_data: ${form_data}"$'\n'"${response}" + fi + if [[ -z $response ]] ; then + if [ $i == $maxfetchretries ] ; then + rm -f "${dash_cookie_jar}"; + printf "\\n" + echo -e "${RED}| Failed to extract download link [4]${NC}" + warnAndRetryUnknownError=true + if [ "${finalAttempt}" == "true" ] ; then + failedRetryDownload "${remote_url}" "Failed to extract download link [4]" "" + fi + return 1 + else + tor_identity="${RANDOM}" + continue + fi + fi + if grep -Eqi 'No such file with this filename|File was deleted|
)' <<< "$response") + pval1=$(grep -oP -m 1 '&#\K.*?(?=;)' <<< "$codeline" ) + if ((pval1 <= 0)); then + pval1=$(grep -oP -m 1 '&#\K.*?(?=;)' <<< "$codeline" ) + fi + pval2=$(grep -oP -m 1 '&#\K.*?(?=;)' <<< "$codeline" ) + pval3=$(grep -oP -m 1 '&#\K.*?(?=;)' <<< "$codeline" ) + pval4=$(grep -oP -m 1 '&#\K.*?(?=;)' <<< "$codeline" ) + val1=$((pval1-48)); val2=$((pval2-48)); val3=$((pval3-48)); val4=$((pval4-48)) + captcha_code="${val1}${val2}${val3}${val4}" + if grep -Eqi 'name="method_free" value="Free Download">' <<< "$response"; then + printf "\\n" + echo -e "${GREEN}| Captcha found.${NC}" + post_op=$(grep -oP '(?<=input type="hidden" name="op" value=").*(?=">)' <<< "$response") + post_id=$(grep -oP '(?<=input type="hidden" name="id" value=").*(?=">)' <<< "$response") + post_rand=$(grep -oP '(?<=input type="hidden" name="rand" value=").*(?=">)' <<< "$response") + post_referer=$(grep -oP '(?<=input type="hidden" name="referer" value=").*(?=">)' <<< "$response") + post_referer=$(urlencode_literal_grouped_case_urlendingonly "$post_referer") + else + if [ $i == $maxfetchretries ] ; then + rm -f "${dash_cookie_jar}"; + printf "\\n" + echo -e "${RED}| Failed to extract download link [5]${NC}" + warnAndRetryUnknownError=true + if [ "${finalAttempt}" == "true" ] ; then + failedRetryDownload "${remote_url}" "Failed to extract download link [5]" "" + fi + return 1 + else + tor_identity="${RANDOM}" + continue + fi + fi + if grep -Eqi '-' <<< "$captcha_code"; then + if [ $i == $maxfetchretries ] ; then + rm -f "${dash_cookie_jar}"; + printf "\\n" + echo -e "${RED}| Bad captcha code [2]${NC}" + warnAndRetryUnknownError=true + if [ "${finalAttempt}" == "true" ] ; then + failedRetryDownload "${remote_url}" "Bad captcha code [2]" "" + fi + return 1 + else + tor_identity="${RANDOM}" + continue + fi + fi + if [[ -z "$captcha_code" ]] || [[ -z "$post_op" ]] || [[ -z "$post_id" ]] || [[ -z "$post_rand" ]] ; then + if [ $i == $maxfetchretries ] ; then + rm -f "${dash_cookie_jar}"; + printf "\\n" + echo -e "${RED}| Failed to extract download link [6]${NC}" + warnAndRetryUnknownError=true + if [ "${finalAttempt}" == "true" ] ; then + failedRetryDownload "${remote_url}" "Failed to extract download link [6]" "" + fi + return 1 + else + tor_identity="${RANDOM}" + continue + fi + else + break + fi + done + echo -e "| Captcha countdown (60s)…" + printf " " + for ((i=1; i<=12; i++)); do + sleep 5s + if ((i % 2 == 0)); then + printf "$((i * 5))" + else + printf ".." + fi + done + echo -e "" + echo -e "${GREEN}# Fetching download url…${NC}" + for ((i=1; i<=$maxfetchretries; i++)); do + printf " _" + download_url="" + CLEANSTRING=${remote_url//[^a-zA-Z0-9]/} + trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; rm -f ${dash_cookie_jar}; echo ""; tput cnorm; exit" 0 1 2 3 6 15 + form_data="op=${post_op}&id=${post_id}&rand=${post_rand}&referer=${post_referer}&method_free=Free+Download&method_premium=&adblock_detected=&code=${captcha_code}" + response=$(tor_curl_request --insecure -L -s -X POST \ + -b "${dash_cookie_jar}" -c "${dash_cookie_jar}" \ + --data "$form_data" "$remote_url") + if [ "${DebugAllEnabled}" == "true" ] ; then + debugHtml "${remote_url##*/}" "dash_post2_$i" "url: ${remote_url}"$'\n'"form_data: ${form_data}"$'\n'"${response}" + fi + if [[ -z $response ]] ; then + if [ $i == $maxfetchretries ] ; then + rm -f "${dash_cookie_jar}"; + printf "\\n" + echo -e "${RED}| Failed to extract download link [7]${NC}" + warnAndRetryUnknownError=true + if [ "${finalAttempt}" == "true" ] ; then + failedRetryDownload "${remote_url}" "Failed to extract download link [7]" "" + fi + return 1 + else + tor_identity="${RANDOM}" + continue + fi + fi + if grep -Eqi 'No such file with this filename|File was deleted' <<< "$response"; then + rm -f "${dash_cookie_jar}"; + printf "\\n" + echo -e "${RED}| The file was not found. It could be deleted or expired.${NC}" + exitDownloadError=true + removedDownload "${remote_url}" + return 1 + fi + if grep -Eqi 'you have to wait|seconds till next download' <<< "$response"; then + if [ $i == $maxfetchretries ] ; then + rm -f "${dash_cookie_jar}"; + printf "\\n" + echo -e "${RED}| Rate limited [2]${NC}" + warnAndRetryUnknownError=true + if [ "${finalAttempt}" == "true" ] ; then + failedRetryDownload "${remote_url}" "Rate limited [2]" "" + fi + return 1 + else + tor_identity="${RANDOM}" + continue + fi + fi + if grep -Eqi 'Just a moment...' <<< "$response"; then + if [ $i == $maxfetchretries ] ; then + rm -f "${dash_cookie_jar}"; + printf "\\n" + echo -e "${RED}| Failed to extract download link [8]${NC}" + warnAndRetryUnknownError=true + if [ "${finalAttempt}" == "true" ] ; then + failedRetryDownload "${remote_url}" "Failed to extract download link [8]" "" + fi + return 1 + else + tor_identity="${RANDOM}" + continue + fi + fi + if grep -Eqi 'https://.*dashfile.net.*$)' <<< "$response") + download_url=$(urlencode_literal_grouped_case_urlendingonly "$download_url") + fi + if [[ -z "$download_url" ]] ; then + if [ $i == $maxfetchretries ] ; then + rm -f "${dash_cookie_jar}"; + printf "\\n" + echo -e "${RED}| Failed to extract download link [9]${NC}" + warnAndRetryUnknownError=true + if [ "${finalAttempt}" == "true" ] ; then + failedRetryDownload "${remote_url}" "Failed to extract download link [9]" "" + fi + return 1 + else + tor_identity="${RANDOM}" + continue + fi + else + break + fi + done + echo -e "${GREEN}# Fetching file info…${NC}" + for ((j=1; j<=$maxfetchretries; j++)); do + printf " ." + CLEANSTRING=${remote_url//[^a-zA-Z0-9]/} + trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; rm -f ${dash_cookie_jar}; echo ""; tput cnorm; exit" 0 1 2 3 6 15 + GetRandomUA + file_header=$(tor_curl_request --insecure -L --head -s --referer "${fixed_url}" "$download_url") + if [ "${DebugAllEnabled}" == "true" ] ; then + debugHtml "${remote_url##*/}" "dash_head$j" "download_url: ${download_url}"$'\n'"${file_header}" + fi + if [[ -z $file_header ]] ; then + if [ $j == $maxfetchretries ] ; then + rm -f "${dash_cookie_jar}"; + printf "\\n" + echo -e "${RED}| Failed to extract file info${NC}" + warnAndRetryUnknownError=true + if [ "${finalAttempt}" == "true" ] ; then + failedRetryDownload "${remote_url}" "" "" + fi + return 1 + else + tor_identity="${RANDOM}" + continue + fi + fi + if ! grep -Eqi 'HTTP/2 200|HTTP/1.1 200|200 OK' <<< $file_header ; then + if [ $j == $maxfetchretries ] ; then + rm -f "${dash_cookie_jar}"; + printf "\\n" + echo -e "${RED}| Failed to extract file info${NC}" + warnAndRetryUnknownError=true + if [ "${finalAttempt}" == "true" ] ; then + failedRetryDownload "${remote_url}" "" "" + fi + return 1 + else + tor_identity="${RANDOM}" + continue + fi + fi + file_size_bytes=$(grep -oPi '(?<=content-length: ).*' <<< "$file_header") + file_size_bytes=${file_size_bytes//[$'\t\r\n']} + if [[ -z "$file_size_bytes" ]]; then + if [ $j == $maxfetchretries ] ; then + rm -f "${dash_cookie_jar}"; + printf "\\n" + echo -e "${RED}| Failed to extract file size.${NC}" + warnAndRetryUnknownError=true + if [ "${finalAttempt}" == "true" ] ; then + failedRetryDownload "${remote_url}" "" "" + fi + return 1 + else + tor_identity="${RANDOM}" + continue + fi + fi + break #Good to go here + done + touch ${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/} + if [ ! "$filename_override" == "" ] ; then + filename="$filename_override" + fi + filename=$(sanitize_file_or_folder_name "${filename}") + printf "\\n" + echo -e "${YELLOW}| File name:${NC}\t\"${filename}\"" + if [ -z $file_size_bytes ] ; then + if [ "${finalAttempt}" == "true" ] ; then + failedRetryDownload "${remote_url}" "Filesize not found!" "" + fi + echo -e "${YELLOW}| Filesize not found… retry${NC}" + return 1 + else + file_size_readable="$(numfmt --to=iec --from=auto --format "%.2f" <<< "$file_size_bytes")" + fi + echo -e "${YELLOW}| File size:${NC}\t${file_size_readable}" + file_path="${download_inflight_path}${filename}" + flockDownload="${WorkDir}/.flocks/${filename//[^a-zA-Z0-9\.\_\-]/}.flock" + if CheckFileSize "${remote_url}" "${file_size_bytes}" ; then + return 1 + fi + if CheckDownloadExists "$remote_url" "$MoveToFolder" "$filecnt" "$filename" "$file_path" "$completed_location" ; then + return 1 + fi + echo "${remote_url//[^a-zA-Z0-9]/}" > $flockDownload +} +#! +#! ----------- (3) Fetch File / Download File Function --------------- # +#! +dash_GetFile() { + echo -e "${GREEN}# Downloading…" + echo -e "${YELLOW}| File path:${NC}\t./.inflight/${filename}\n" + fileCnt=$1 + retryCnt=$2 + finalAttempt=$3 + flockDownload="${WorkDir}/.flocks/${filename//[^a-zA-Z0-9\.\_\-]/}.flock" + for ((j=1; j<=$MaxDownloadRetries; j++)); do + pd_presize=0 + if [ -f "$file_path" ] ; then + pd_presize=$(stat --format="%s" "$file_path" | tr -d '[:space:]') + fi + GetRandomUA + CLEANSTRING=${remote_url//[^a-zA-Z0-9]/} + trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; rm -f ${dash_cookie_jar}; rm -f $flockDownload; echo ""; tput cnorm; exit" 0 1 2 3 6 15 + if [ "${UseTorCurlImpersonate}" == "true" ]; then + if [ "${RateMonitorEnabled}" == "true" ]; then + tor_curl_request --insecure \ + --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval \ + -b "${dash_cookie_jar}" -c "${dash_cookie_jar}" \ + "$download_url" --continue-at - --output "$file_path" + else + tor_curl_request --insecure \ + -b "${dash_cookie_jar}" -c "${dash_cookie_jar}" \ + "$download_url" --continue-at - --output "$file_path" + fi + else + if [ "${RateMonitorEnabled}" == "true" ]; then + tor_curl_request --insecure \ + --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval \ + -b "${dash_cookie_jar}" -c "${dash_cookie_jar}" \ + -H "User-Agent: $RandomUA" \ + -H "Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8" \ + -H "Accept-Language: en-US,en;q=0.5" \ + -H "Accept-Encoding: gzip, deflate, br" \ + -H "Connection: keep-alive" \ + -H "Cookie: lng=eng" \ + -H "Upgrade-Insecure-Requests: 1" \ + -H "Sec-Fetch-Dest: document" \ + -H "Sec-Fetch-Mode: navigate" \ + -H "Sec-Fetch-Site: same-origin" \ + -H "Sec-Fetch-User: ?1" \ + "$download_url" --continue-at - --output "$file_path" + else + tor_curl_request --insecure \ + -b "${dash_cookie_jar}" -c "${dash_cookie_jar}" \ + -H "User-Agent: $RandomUA" \ + -H "Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8" \ + -H "Accept-Language: en-US,en;q=0.5" \ + -H "Accept-Encoding: gzip, deflate, br" \ + -H "Connection: keep-alive" \ + -H "Cookie: lng=eng" \ + -H "Upgrade-Insecure-Requests: 1" \ + -H "Sec-Fetch-Dest: document" \ + -H "Sec-Fetch-Mode: navigate" \ + -H "Sec-Fetch-Site: same-origin" \ + -H "Sec-Fetch-User: ?1" \ + "$download_url" --continue-at - --output "$file_path" + fi + fi + received_file_size=0 + if [ -f "$file_path" ] ; then + received_file_size=$(stat --format="%s" "$file_path" | tr -d '[:space:]') + fi + if CheckNoHtml "$remote_url" "$filename" "$file_path" ; then + containsHtml=false + else + containsHtml=true + fi + downDelta=$(( received_file_size - pd_presize )) + if [[ "${received_file_size}" -ne "${file_size_bytes}" ]] || [ "$containsHtml" == "true" ]; then + if [ "${AutoRepairBadPartials}" == "true" ] && (( downDelta > 0 && downDelta < 1024 )) ; then + if [ -f "${file_path}" ] ; then + if ((pd_presize > 0)); then + echo -e "${YELLOW}Bad node / HTML found:${NC} reverting to previous file..." + truncateDownload "$remote_url" "$filename" "$pd_presize" "$received_file_size" + truncate -s $pd_presize "${file_path}" + else + echo -e "${YELLOW}Bad node / HTML found:${NC} tainted partial removed..." + rm -f "${file_path}" + fi + fi + if ((j >= $MaxDownloadRetries)) ; then + rm -f "$flockDownload"; + if [ "${finalAttempt}" == "true" ] ; then + droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" + fi + return 1 + else + continue + fi + elif [ "${AutoRepairBadPartials}" == "true" ] && [ "$containsHtml" == "true" ] ; then + if [ -f "${file_path}" ] ; then + if ((pd_presize > 0)); then + echo -e "${YELLOW}Bad node / HTML found:${NC} reverting to previous file..." + truncateDownload "$remote_url" "$filename" "$pd_presize" "$received_file_size" + truncate -s $pd_presize "${file_path}" + else + echo -e "${YELLOW}Bad node / HTML found:${NC} tainted partial removed..." + rm -f "${file_path}" + fi + fi + if ((j >= $MaxDownloadRetries)) ; then + rm -f "$flockDownload"; + if [ "${finalAttempt}" == "true" ] ; then + droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" + fi + return 1 + else + continue + fi + elif (( downDelta > 0 && downDelta < 1024 )) || [ "$containsHtml" == "true" ] ; then + if [ -f "$file_path" ] ; then + rm -rf "$file_path" + fi + echo -e "\n${YELLOW}Bad node / HTML found:${NC} tainted partial removed..." + if ((j >= $MaxDownloadRetries)) ; then + rm -f "$flockDownload"; + if [ "${finalAttempt}" == "true" ] ; then + droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" + fi + return 1 + else + continue + fi + fi + if [[ "${received_file_size}" -ne "${file_size_bytes}" ]]; then + echo -e "\n${RED}Download failed, file is incomplete.${NC}" + if ((j >= $MaxDownloadRetries)) ; then + rm -f "$flockDownload"; + if [ "${finalAttempt}" == "true" ] ; then + droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" + fi + return 1 + else + continue + fi + fi + else + break + fi + done + rm -f "$flockDownload"; + rm -f "${dash_cookie_jar}"; + ProcessCompletedDownload "$remote_url" "$MoveToFolder" "$filecnt" "$filename" "$file_size_bytes" "$completed_location" "$file_path" + return 0 +} +#! +#! --------------- Host Extra Functions ------------------- # +#! diff --git a/hosts/dictvm.sh b/hosts/dictvm.sh new file mode 100644 index 0000000..40eb9aa --- /dev/null +++ b/hosts/dictvm.sh @@ -0,0 +1,41 @@ +#! Name: dictvm.sh +#! Author: kittykat +#! Version: 2024.12.12 +#! Desc: Add support for downloading and processing of urls for a new host +#! Usage: Copy this file into the ./${ScriptDir}/hosts/ folder +#! +#! +#! ------------ REQUIRED SECTION --------------- +#! @[UPDATE] HostAndDomainRegexes: This string is loaded into mad.sh and allows dynamic handling of new url data +#! Format: '/HostCode/HostNick/HostFuncPrefix:HostDomainRegex@' +#! HostCode: (ie. 'fh' for filehaus -- cannot be used by other hosts) +#! HostNick: What is displayed throughout MAD output (ie. 'filehaus' -- "urls.txt has 10 filehaus.." will be displayed) +#! HostFuncPrefix: (ie. 'fh' -- fh_DownloadFile(), fh_FetchFileInfo() .. ) +#! * Note: Must begin with a letter a-z (functions beginning with numbers are no bueno) +#! HostDomainRegex: The regex used to verify matching urls +HostCode='dict' +HostNick='dictvm.org' +HostFuncPrefix='dict' +HostUrls='dictvm.org' +HostDomainRegex='^(http|https)://(.*\.)?upload\.dictvm\.org/' +#! +#! !! DO NOT UPDATE OR REMOVE !! +#! This merges the Required HostAndDomainRegexes into mad.sh +ListHostAndDomainRegexes=${ListHostAndDomainRegexes}'/'${HostCode}'/'${HostNick}'/'${HostFuncPrefix}'/'${HostUrls}':'${HostDomainRegex}'@' +#! +#! +#! ------------ (1) Host Main Download Function --------------- # +#! +#! This is a direct= download host, so all the functions are already in mad.sh +#! Since the HostFuncPrefix is defined above as "direct", nothing further needs to be done as it will +#! call the direct_DownloadFile() function already in mad.sh +dict_DownloadFile() { + local pUrl="$1" + local pFileCnt="$2" + local pFileUrl="$pUrl" + if ! grep -Eqi '&p=1$' <<< "$pUrl" ; then + pFileUrl="${pUrl}&p=1" + echo -e "[${BLUE}ModifiedUrl${NC}]: ${pFileUrl}" + fi + direct_DownloadFile "$pUrl" "$pFileCnt" "$pFileUrl" +} diff --git a/hosts/eddowding.sh b/hosts/eddowding.sh new file mode 100644 index 0000000..73ced5e --- /dev/null +++ b/hosts/eddowding.sh @@ -0,0 +1,41 @@ +#! Name: eddowding.sh +#! Author: kittykat +#! Version: 2024.12.12 +#! Desc: Add support for downloading and processing of urls for a new host +#! Usage: Copy this file into the ./${ScriptDir}/hosts/ folder +#! +#! +#! ------------ REQUIRED SECTION --------------- +#! @[UPDATE] HostAndDomainRegexes: This string is loaded into mad.sh and allows dynamic handling of new url data +#! Format: '/HostCode/HostNick/HostFuncPrefix:HostDomainRegex@' +#! HostCode: (ie. 'fh' for filehaus -- cannot be used by other hosts) +#! HostNick: What is displayed throughout MAD output (ie. 'filehaus' -- "urls.txt has 10 filehaus.." will be displayed) +#! HostFuncPrefix: (ie. 'fh' -- fh_DownloadFile(), fh_FetchFileInfo() .. ) +#! * Note: Must begin with a letter a-z (functions beginning with numbers are no bueno) +#! HostDomainRegex: The regex used to verify matching urls +HostCode='edd' +HostNick='eddowding.com' +HostFuncPrefix='edd' +HostUrls='files.eddowding.com' +HostDomainRegex='^(http|https)://(.*\.)?files\.eddowding\.com/' +#! +#! !! DO NOT UPDATE OR REMOVE !! +#! This merges the Required HostAndDomainRegexes into mad.sh +ListHostAndDomainRegexes=${ListHostAndDomainRegexes}'/'${HostCode}'/'${HostNick}'/'${HostFuncPrefix}'/'${HostUrls}':'${HostDomainRegex}'@' +#! +#! +#! ------------ (1) Host Main Download Function --------------- # +#! +#! This is a direct= download host, so all the functions are already in mad.sh +#! Since the HostFuncPrefix is defined above as "direct", nothing further needs to be done as it will +#! call the direct_DownloadFile() function already in mad.sh +edd_DownloadFile() { + local pUrl="$1" + local pFileCnt="$2" + local pFileUrl="$pUrl" + if ! grep -Eqi '&p=1$' <<< "$pUrl" ; then + pFileUrl="${pUrl}&p=1" + echo -e "[${BLUE}ModifiedUrl${NC}]: ${pFileUrl}" + fi + direct_DownloadFile "$pUrl" "$pFileCnt" "$pFileUrl" +} diff --git a/hosts/fileblade.sh b/hosts/fileblade.sh new file mode 100644 index 0000000..4ddae54 --- /dev/null +++ b/hosts/fileblade.sh @@ -0,0 +1,567 @@ +#! Name: isupload.sh +#! Author: kittykat +#! Version: 2024.12.20 +#! Desc: Add support for downloading and processing of urls for a new host +#! Usage: Copy this file into the ./${ScriptDir}/hosts/ folder +#! +#! +#! ------------ REQUIRED SECTION --------------- +#! @[UPDATE] HostAndDomainRegexes: This string is loaded into mad.sh and allows dynamic handling of new url data +#! Format: '/HostCode/HostNick/HostFuncPrefix:HostDomainRegex@' +#! HostCode: (ie. 'fh' for filehaus -- cannot be used by other hosts) +#! HostNick: What is displayed throughout MAD output (ie. 'filehaus' -- "urls.txt has 10 filehaus.." will be displayed) +#! HostFuncPrefix: (ie. 'fh' -- fh_DownloadFile(), fh_FetchFileInfo() .. ) +#! * Note: Must begin with a letter a-z (functions beginning with numbers are no bueno) +#! HostDomainRegex: The regex used to verify matching urls +HostCode='fb' +HostNick='fileblade' +HostFuncPrefix='fb' +HostUrls='fileblade.com' +HostDomainRegex='^(http|https)://(.*\.)?fileblade\.com' +#! +#! !! DO NOT UPDATE OR REMOVE !! +#! This merges the Required HostAndDomainRegexes into mad.sh +ListHostAndDomainRegexes=${ListHostAndDomainRegexes}'/'${HostCode}'/'${HostNick}'/'${HostFuncPrefix}'/'${HostUrls}':'${HostDomainRegex}'@' +#! +#! +#! ------------ (1) Host Main Download Function --------------- # +#! +#! @REQUIRED: Host Main Download function +#! Must be named specifically as such: +#! _DownloadFile() +fb_DownloadFile() { + local remote_url=${1} + local file_url=${1} + local filecnt=${2} + warnAndRetryUnknownError=false + exitDownloadError=false + exitDownloadNotAvailable=false + fileAlreadyDone=false + download_inflight_path="${WorkDir}/.inflight/" + mkdir -p "$download_inflight_path" + completed_location="${WorkDir}/downloads/" + tor_identity="${RANDOM}" + finalAttempt="false" + for ((z=0; z<=$MaxUrlRetries; z++)); do + if [ $z -eq $MaxUrlRetries ] ; then + finalAttempt="true" + fi + CLEANSTRING=${remote_url//[^a-zA-Z0-9]/} + trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; echo ""; tput cnorm; exit" 0 1 2 3 6 15 + if fb_FetchFileInfo $finalAttempt && fb_GetFile "${filecnt}" $((z+1)) $finalAttempt ; then + return 0 + elif [ $z -lt $MaxUrlRetries ]; then + if [ "${fileAlreadyDone}" == "true" ] ; then + break + fi + if [[ "${warnAndRetryUnknownError}" == "true" ]] ; then + if [ "${DebugAllEnabled}" == "true" ] ; then + debugHtml "${remote_url##*/}" "error" "Retry due to an unknown issue: attempt #$((z+1)) of ${MaxUrlRetries}" + fi + fi + if [[ "${exitDownloadError}" == "true" || "${exitDownloadNotAvailable}" == "true" ]] ; then + if [ "${DebugAllEnabled}" == "true" ] ; then + debugHtml "${remote_url##*/}" "error" "Exit due to unrecoverable issue" + fi + rm -f "${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}" + break + fi + echo -e "\n${YELLOW}A recoverable error occurred, retry attempt $((z+1))/${MaxUrlRetries}${NC}" + sleep 3 + fi + done + rm -f "${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}" +} +#! +#! ------------- (2) Fetch File Info Function ----------------- # +#! +fb_FetchFileInfo() { + finalAttempt=$1 + maxfetchretries=5 + fb_cookie_jar="" + echo -e "${GREEN}# Fetching download1…${NC}" + for ((i=1; i<=$maxfetchretries; i++)); do + mkdir -p "${WorkDir}/.temp" + fb_cookie_jar=$(mktemp "${WorkDir}/.temp/fb_cookies""${instance_no}"".XXXXXX") + printf " ." + tor_identity="${RANDOM}" + CLEANSTRING=${remote_url//[^a-zA-Z0-9]/} + trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; rm -f "${fb_cookie_jar}"; echo ""; tput cnorm; exit" 0 1 2 3 6 15 + response=$(tor_curl_request --insecure -L -s -b "${fb_cookie_jar}" -c "${fb_cookie_jar}" "$remote_url") + if [ "${DebugAllEnabled}" == "true" ] ; then + debugHtml "${remote_url##*/}" "fb_dwnpage$i" "${response}" + fi + if [[ -z $response ]] ; then + rm -f "${fb_cookie_jar}"; + if [ $i == $maxfetchretries ] ; then + printf "\\n" + echo -e "${RED}| Failed to extract download link.${NC}" + warnAndRetryUnknownError=true + if [ "${finalAttempt}" == "true" ] ; then + failedRetryDownload "${remote_url}" "" "" + fi + return 1 + else + continue + fi + fi + if grep -Eqi "Sorry, you are banned" <<< "$response"; then + rm -f "${fb_cookie_jar}"; + if [ $i == $maxfetchretries ] ; then + printf "\\n" + echo -e "${RED}| Failed to extract download link.${NC}" + warnAndRetryUnknownError=true + if [ "${finalAttempt}" == "true" ] ; then + failedRetryDownload "${remote_url}" "" "" + fi + return 1 + else + continue + fi + fi + if grep -Eqi "File was removed|There is no such file|File was deleted|File not found" <<< "$response"; then + rm -f "${fb_cookie_jar}"; + printf "\\n" + echo -e "${RED}| The file was not found. It could be deleted or expired.${NC}" + exitDownloadError=true + removedDownload "${remote_url}" + return 1 + fi + if grep -Eqi 'input type="hidden" name="op" value="' <<< "$response"; then + printf "\\n" + echo -e "${GREEN}| Post link found.${NC}" + post_action="${remote_url}" + post_op=$(grep -oPi -m 1 '(?<=input type="hidden" name="op" value=").*(?=">.*$)' <<< "$response") + post_usr_login=$(grep -oPi -m 1 '(?<=input type="hidden" name="usr_login" value=").*(?=">.*$)' <<< "$response") + post_id=$(grep -oPi -m 1 '(?<=input type="hidden" name="id" value=").*(?=">.*$)' <<< "$response") + post_fname=$(grep -oPi -m 1 '(?<=input type="hidden" name="fname" value=").*(?=">.*$)' <<< "$response") + post_referer=$(grep -oPi -m 1 '(?<=input type="hidden" name="referer" value=").*(?=">.*$)' <<< "$response") + post_action=$(urlencode_literal_grouped_case_urlendingonly "${post_action}") + fi + if [[ -z "$post_action" ]] || [[ -z "$post_op" ]] || [[ -z "$post_id" ]] || [[ -z "$post_fname" ]] ; then + rm -f "${fb_cookie_jar}"; + if [ $i == $maxfetchretries ] ; then + printf "\\n" + echo -e "${RED}| Failed to extract download link [2].${NC}" + warnAndRetryUnknownError=true + if [ "${finalAttempt}" == "true" ] ; then + failedRetryDownload "${remote_url}" "Failed to extract download link [2]" "" + fi + return 1 + else + continue + fi + else + break + fi + done + echo -e "| Download countdown (10s)…" + sleep 10s + echo -e "${GREEN}# Fetching download2…${NC}" + for ((i=1; i<=$maxfetchretries; i++)); do + printf " _" + download_url="" + CLEANSTRING=${remote_url//[^a-zA-Z0-9]/} + trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; echo ""; rm -f $fb_cookie_jar; tput cnorm; exit" 0 1 2 3 6 15 + form_data="op=$post_op&usr_login=$post_usr_login&id=$post_id&fname=$post_fname&referer=$post_referer&method_free=method_free" + response=$(tor_curl_request --insecure -L -s -X POST \ + -b "${fb_cookie_jar}" -c "${fb_cookie_jar}" \ + --data "$form_data" "$post_action") + if [ "${DebugAllEnabled}" == "true" ] ; then + debugHtml "${remote_url##*/}" "fb_post" "post_action: ${post_action}"$'\n'"form_data: ${form_data}"$'\n'"${response}" + fi + if [[ -z $response ]] ; then + if [ $i == $maxfetchretries ] ; then + rm -f "${fb_cookie_jar}"; + printf "\\n" + echo -e "${RED}| Failed to extract download link [3]${NC}" + warnAndRetryUnknownError=true + if [ "${finalAttempt}" == "true" ] ; then + failedRetryDownload "${remote_url}" "Failed to extract download link [3]" "" + fi + return 1 + else + tor_identity="${RANDOM}" + continue + fi + fi + if grep -Eqi 'No such file with this filename|File was deleted|File not found' <<< "$response"; then + rm -f "${fb_cookie_jar}"; + printf "\\n" + echo -e "${RED}| The file was not found. It could be deleted or expired.${NC}" + exitDownloadError=true + removedDownload "${remote_url}" + return 1 + fi + if grep -Eqi 'The file owner does not allow FREE users to download files which are over 100 MB' <<< "$response"; then + rm -f "${fb_cookie_jar}"; + printf "\\n" + echo -e "${RED}| Pro download only. (Free users not allowed download > 100MB)${NC}" + exitDownloadError=true + failedRetryDownload "${remote_url}" "Pro download only. [Free users not allowed download over 100MB]" "" + return 1 + fi + if grep -Eqi 'Just a moment...' <<< "$response"; then + if [ $i == $maxfetchretries ] ; then + rm -f "${fb_cookie_jar}"; + printf "\\n" + echo -e "${RED}| Failed to extract download link [3].${NC}" + warnAndRetryUnknownError=true + if [ "${finalAttempt}" == "true" ] ; then + failedRetryDownload "${remote_url}" "" "" + fi + return 1 + else + tor_identity="${RANDOM}" + continue + fi + fi + if grep -Eqi 'input type="hidden" name="op" value="' <<< "$response"; then + printf "\\n" + echo -e "${GREEN}| Post link found.${NC}" + post_action="${remote_url}" + post_op=$(grep -oPi -m 1 '(?<=input type="hidden" name="op" value=").*(?=">.*$)' <<< "$response") + post_id=$(grep -oPi -m 1 '(?<=input type="hidden" name="id" value=").*(?=">.*$)' <<< "$response") + post_rand=$(grep -oPi -m 1 '(?<=input type="hidden" name="rand" value=").*(?=">.*$)' <<< "$response") + post_referer=$(grep -oPi -m 1 '(?<=input type="hidden" name="referer" value=").*(?=">.*$)' <<< "$response") + post_action=$(urlencode_literal_grouped_case_urlendingonly "${post_action}") + post_referer=$(urlencode_literal_grouped_case "${post_referer}") + fi + if [[ -z "$post_action" ]] || [[ -z "$post_op" ]] || [[ -z "$post_id" ]] ; then + rm -f "${fb_cookie_jar}"; + if [ $i == $maxfetchretries ] ; then + printf "\\n" + echo -e "${RED}| Failed to extract download link [3].${NC}" + warnAndRetryUnknownError=true + if [ "${finalAttempt}" == "true" ] ; then + failedRetryDownload "${remote_url}" "Failed to extract download link [3]" "" + fi + return 1 + else + continue + fi + else + break + fi + done + echo -e "${GREEN}# Fetching download url…${NC}" + for ((i=1; i<=$maxfetchretries; i++)); do + printf " _" + download_url="" + CLEANSTRING=${remote_url//[^a-zA-Z0-9]/} + trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; echo ""; rm -f $fb_cookie_jar; tput cnorm; exit" 0 1 2 3 6 15 + form_data="op=$post_op&rand=$post_rand&id=$post_id&referer=$post_referer&method_free=method_free&method_premium=&adblock_detected=0" + response=$(tor_curl_request --insecure -L -s -X POST \ + -b "${fb_cookie_jar}" -c "${fb_cookie_jar}" \ + --data "$form_data" "$post_action") + if [ "${DebugAllEnabled}" == "true" ] ; then + debugHtml "${remote_url##*/}" "fb_post2" "post_action: ${post_action}"$'\n'"form_data: ${form_data}"$'\n'"${response}" + fi + if [[ -z $response ]] ; then + if [ $i == $maxfetchretries ] ; then + rm -f "${fb_cookie_jar}"; + printf "\\n" + echo -e "${RED}| Failed to extract download link [4].${NC}" + warnAndRetryUnknownError=true + if [ "${finalAttempt}" == "true" ] ; then + failedRetryDownload "${remote_url}" "Failed to extract download link [4]" "" + fi + return 1 + else + tor_identity="${RANDOM}" + continue + fi + fi + if grep -Eqi 'No such file with this filename|File was deleted|File not found' <<< "$response"; then + rm -f "${fb_cookie_jar}"; + printf "\\n" + echo -e "${RED}| The file was not found. It could be deleted or expired.${NC}" + exitDownloadError=true + removedDownload "${remote_url}" + return 1 + fi + if grep -Eqi 'Just a moment...' <<< "$response"; then + if [ $i == $maxfetchretries ] ; then + rm -f "${fb_cookie_jar}"; + printf "\\n" + echo -e "${RED}| Failed to extract download link [5].${NC}" + warnAndRetryUnknownError=true + if [ "${finalAttempt}" == "true" ] ; then + failedRetryDownload "${remote_url}" "Failed to extract download link [5]" "" + fi + return 1 + else + tor_identity="${RANDOM}" + continue + fi + fi + if ! grep -Eqi ' $flockDownload +} +#! +#! ----------- (3) Fetch File / Download File Function --------------- # +#! +fb_GetFile() { + echo -e "${GREEN}# Downloading…)${NC}" + echo -e "${YELLOW}| File path:${NC}\t./.inflight/${filename}\n" + fileCnt=$1 + retryCnt=$2 + finalAttempt=$3 + flockDownload="${WorkDir}/.flocks/${filename//[^a-zA-Z0-9\.\_\-]/}.flock" + for ((j=1; j<=$MaxDownloadRetries; j++)); do + pd_presize=0 + if [ -f "$file_path" ] ; then + pd_presize=$(stat --format="%s" "$file_path" | tr -d '[:space:]') + fi + GetRandomUA + CLEANSTRING=${remote_url//[^a-zA-Z0-9]/} + trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; rm -f "${fb_cookie_jar}"; rm -f $flockDownload; echo ""; tput cnorm; exit" 0 1 2 3 6 15 + if [ "${UseTorCurlImpersonate}" == "true" ]; then + if [ "${RateMonitorEnabled}" == "true" ]; then + tor_curl_request --insecure -L \ + --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval \ + "$download_url" --continue-at - --output "$file_path" + else + tor_curl_request --insecure -L \ + "$download_url" --continue-at - --output "$file_path" + fi + else + if [ "${RateMonitorEnabled}" == "true" ]; then + tor_curl_request --insecure \ + --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval \ + -H "User-Agent: $RandomUA" \ + -H "Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8" \ + -H "Accept-Language: en-US,en;q=0.5" \ + -H "Accept-Encoding: gzip, deflate, br" \ + -H "Connection: keep-alive" \ + -H "Cookie: lng=eng" \ + -H "Upgrade-Insecure-Requests: 1" \ + -H "Sec-Fetch-Dest: document" \ + -H "Sec-Fetch-Mode: navigate" \ + -H "Sec-Fetch-Site: same-origin" \ + -H "Sec-Fetch-User: ?1" \ + "$download_url" --continue-at - --output "$file_path" + else + tor_curl_request --insecure \ + -H "User-Agent: $RandomUA" \ + -H "Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8" \ + -H "Accept-Language: en-US,en;q=0.5" \ + -H "Accept-Encoding: gzip, deflate, br" \ + -H "Connection: keep-alive" \ + -H "Cookie: lng=eng" \ + -H "Upgrade-Insecure-Requests: 1" \ + -H "Sec-Fetch-Dest: document" \ + -H "Sec-Fetch-Mode: navigate" \ + -H "Sec-Fetch-Site: same-origin" \ + -H "Sec-Fetch-User: ?1" \ + "$download_url" --continue-at - --output "$file_path" + fi + fi + received_file_size=0 + if [ -f "$file_path" ] ; then + received_file_size=$(stat --format="%s" "$file_path" | tr -d '[:space:]') + fi + if CheckNoHtml "$remote_url" "$filename" "$file_path" ; then + containsHtml=false + else + containsHtml=true + fi + downDelta=$(( received_file_size - pd_presize )) + if [[ "${received_file_size}" -ne "${file_size_bytes}" ]] || [ "$containsHtml" == "true" ]; then + if [ "${AutoRepairBadPartials}" == "true" ] && (( downDelta > 0 && downDelta < 1024 )) ; then + if [ -f "${file_path}" ] ; then + if ((pd_presize > 0)); then + echo -e "${YELLOW}Bad node / HTML found:${NC} reverting to previous file..." + truncateDownload "$remote_url" "$filename" "$pd_presize" "$received_file_size" + truncate -s $pd_presize "${file_path}" + else + echo -e "${YELLOW}Bad node / HTML found:${NC} tainted partial removed..." + rm -f "${file_path}" + fi + fi + if ((j >= $MaxDownloadRetries)) ; then + rm -f "$flockDownload"; + if [ "${finalAttempt}" == "true" ] ; then + droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" + fi + return 1 + else + continue + fi + elif [ "${AutoRepairBadPartials}" == "true" ] && [ "$containsHtml" == "true" ] ; then + if [ -f "${file_path}" ] ; then + if ((pd_presize > 0)); then + echo -e "${YELLOW}Bad node / HTML found:${NC} reverting to previous file..." + truncateDownload "$remote_url" "$filename" "$pd_presize" "$received_file_size" + truncate -s $pd_presize "${file_path}" + else + echo -e "${YELLOW}Bad node / HTML found:${NC} tainted partial removed..." + rm -f "${file_path}" + fi + fi + if ((j >= $MaxDownloadRetries)) ; then + rm -f "$flockDownload"; + if [ "${finalAttempt}" == "true" ] ; then + droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" + fi + return 1 + else + continue + fi + elif (( downDelta > 0 && downDelta < 1024 )) || [ "$containsHtml" == "true" ] ; then + if [ -f "$file_path" ] ; then + rm -rf "$file_path" + fi + echo -e "\n${YELLOW}Bad node / HTML found:${NC} tainted partial removed..." + if ((j >= $MaxDownloadRetries)) ; then + rm -f "$flockDownload"; + if [ "${finalAttempt}" == "true" ] ; then + droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" + fi + return 1 + else + continue + fi + fi + if [[ "${received_file_size}" -ne "${file_size_bytes}" ]]; then + echo -e "\n${RED}Download failed, file is incomplete.${NC}" + if ((j >= $MaxDownloadRetries)) ; then + rm -f "$flockDownload"; + if [ "${finalAttempt}" == "true" ] ; then + droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" + fi + return 1 + else + continue + fi + fi + else + break + fi + done + rm -f "$flockDownload"; + ProcessCompletedDownload "$remote_url" "$MoveToFolder" "$filecnt" "$filename" "$file_size_bytes" "$completed_location" "$file_path" + return 0 +} +#! +#! --------------- Host Extra Functions ------------------- # +#! diff --git a/hosts/freesocial.sh b/hosts/freesocial.sh new file mode 100644 index 0000000..ed9770e --- /dev/null +++ b/hosts/freesocial.sh @@ -0,0 +1,31 @@ +#! Name: freesocial.sh +#! Author: kittykat +#! Version: 2024.12.23 +#! Desc: Add support for downloading and processing of urls for a new host +#! Usage: Copy this file into the ./${ScriptDir}/hosts/ folder +#! +#! +#! ------------ REQUIRED SECTION --------------- +#! @[UPDATE] HostAndDomainRegexes: This string is loaded into mad.sh and allows dynamic handling of new url data +#! Format: '/HostCode/HostNick/HostFuncPrefix:HostDomainRegex@' +#! HostCode: (ie. 'fh' for filehaus -- cannot be used by other hosts) +#! HostNick: What is displayed throughout MAD output (ie. 'filehaus' -- "urls.txt has 10 filehaus.." will be displayed) +#! HostFuncPrefix: (ie. 'fh' -- fh_DownloadFile(), fh_FetchFileInfo() .. ) +#! * Note: Must begin with a letter a-z (functions beginning with numbers are no bueno) +#! HostDomainRegex: The regex used to verify matching urls +HostCode='frso' +HostNick='freesocial' +HostFuncPrefix='direct' +HostUrls='files.freesocial.co' +HostDomainRegex='^(http|https)://(.*\.)?files\.freesocial\.co/' +#! +#! !! DO NOT UPDATE OR REMOVE !! +#! This merges the Required HostAndDomainRegexes into mad.sh +ListHostAndDomainRegexes=${ListHostAndDomainRegexes}'/'${HostCode}'/'${HostNick}'/'${HostFuncPrefix}'/'${HostUrls}':'${HostDomainRegex}'@' +#! +#! +#! ------------ (1) Host Main Download Function --------------- # +#! +#! This is a direct= download host, so all the functions are already in mad.sh +#! Since the HostFuncPrefix is defined above as "direct", nothing further needs to be done as it will +#! call the direct_DownloadFile() function already in mad.sh diff --git a/hosts/gofile.sh b/hosts/gofile.sh index f939618..e4c8de8 100644 --- a/hosts/gofile.sh +++ b/hosts/gofile.sh @@ -279,6 +279,7 @@ gofile_FetchFileInfo() { fi if ! grep -Eqi 'HTTP/2 200|HTTP/1.1 200|200 OK' <<< $file_header ; then if [ $j == $maxfetchretries ] ; then + rm -f "${gofile_cookie_jar}"; printf "\\n" echo -e "${RED}| Failed to extract file info${NC}" warnAndRetryUnknownError=true diff --git a/hosts/herbolistique.sh b/hosts/herbolistique.sh new file mode 100644 index 0000000..6948629 --- /dev/null +++ b/hosts/herbolistique.sh @@ -0,0 +1,31 @@ +#! Name: herbolistique.sh +#! Author: kittykat +#! Version: 2024.12.23 +#! Desc: Add support for downloading and processing of urls for a new host +#! Usage: Copy this file into the ./${ScriptDir}/hosts/ folder +#! +#! +#! ------------ REQUIRED SECTION --------------- +#! @[UPDATE] HostAndDomainRegexes: This string is loaded into mad.sh and allows dynamic handling of new url data +#! Format: '/HostCode/HostNick/HostFuncPrefix:HostDomainRegex@' +#! HostCode: (ie. 'fh' for filehaus -- cannot be used by other hosts) +#! HostNick: What is displayed throughout MAD output (ie. 'filehaus' -- "urls.txt has 10 filehaus.." will be displayed) +#! HostFuncPrefix: (ie. 'fh' -- fh_DownloadFile(), fh_FetchFileInfo() .. ) +#! * Note: Must begin with a letter a-z (functions beginning with numbers are no bueno) +#! HostDomainRegex: The regex used to verify matching urls +HostCode='herb' +HostNick='herbolistique' +HostFuncPrefix='direct' +HostUrls='transfert.herbolistique.com' +HostDomainRegex='^(http|https)://(.*\.)?transfert\.herbolistique\.com/' +#! +#! !! DO NOT UPDATE OR REMOVE !! +#! This merges the Required HostAndDomainRegexes into mad.sh +ListHostAndDomainRegexes=${ListHostAndDomainRegexes}'/'${HostCode}'/'${HostNick}'/'${HostFuncPrefix}'/'${HostUrls}':'${HostDomainRegex}'@' +#! +#! +#! ------------ (1) Host Main Download Function --------------- # +#! +#! This is a direct= download host, so all the functions are already in mad.sh +#! Since the HostFuncPrefix is defined above as "direct", nothing further needs to be done as it will +#! call the direct_DownloadFile() function already in mad.sh diff --git a/hosts/innocent.sh b/hosts/innocent.sh index 228d046..523bc34 100644 --- a/hosts/innocent.sh +++ b/hosts/innocent.sh @@ -1,6 +1,6 @@ #! Name: innocent.sh #! Author: kittykat -#! Version: 2024.11.17 +#! Version: 2024.11.29 #! Desc: Add support for downloading and processing of urls for a new host #! Usage: Copy this file into the ./${ScriptDir}/hosts/ folder #! @@ -94,10 +94,10 @@ inno_FetchFileInfo() { tor_identity="${RANDOM}" if ((j % 1 == 0)); then printf "| Retrieving Head: attempt #$j" - file_header=$(tor_curl_request --insecure --head -L -s "$download_url") + file_header=$(tor_curl_request_extended --insecure --head -L -s "$download_url") elif ((j % 2 == 0)); then printf "| Retrieving Head (Get): attempt #$j" - file_header=$(tor_curl_request --insecure -m 8 -s -D - -o /dev/null \ + file_header=$(tor_curl_request --insecure -m 16 -s -D - -o /dev/null \ -H "Connection: keep-alive" \ -w 'EffectiveUrl=%{url_effective}' \ "$download_url") @@ -208,10 +208,10 @@ inno_GetFile() { CLEANSTRING=${remote_url//[^a-zA-Z0-9]/} trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; rm -f $flockDownload; echo ""; tput cnorm; exit" 0 1 2 3 6 15 if [ ! -z $file_size_bytes ] ; then - tor_curl_request --insecure "$download_url" --continue-at - --output "$file_path" + tor_curl_request_extended --insecure "$download_url" --continue-at - --output "$file_path" else echo -e "${BLUE}| No Resume Fetch${NC}" - tor_curl_request --insecure "$download_url" --output "$file_path" + tor_curl_request_extended --insecure "$download_url" --output "$file_path" fi received_file_size=0 if [ -f "$file_path" ] ; then diff --git a/hosts/isupload.sh b/hosts/isupload.sh new file mode 100644 index 0000000..cfe7268 --- /dev/null +++ b/hosts/isupload.sh @@ -0,0 +1,489 @@ +#! Name: isupload.sh +#! Author: kittykat +#! Version: 2024.12.20 +#! Desc: Add support for downloading and processing of urls for a new host +#! Usage: Copy this file into the ./${ScriptDir}/hosts/ folder +#! +#! +#! ------------ REQUIRED SECTION --------------- +#! @[UPDATE] HostAndDomainRegexes: This string is loaded into mad.sh and allows dynamic handling of new url data +#! Format: '/HostCode/HostNick/HostFuncPrefix:HostDomainRegex@' +#! HostCode: (ie. 'fh' for filehaus -- cannot be used by other hosts) +#! HostNick: What is displayed throughout MAD output (ie. 'filehaus' -- "urls.txt has 10 filehaus.." will be displayed) +#! HostFuncPrefix: (ie. 'fh' -- fh_DownloadFile(), fh_FetchFileInfo() .. ) +#! * Note: Must begin with a letter a-z (functions beginning with numbers are no bueno) +#! HostDomainRegex: The regex used to verify matching urls +HostCode='isup' +HostNick='isupload' +HostFuncPrefix='isup' +HostUrls='isupload.com' +HostDomainRegex='^(http|https)://(.*\.)?isupload\.com' +#! +#! !! DO NOT UPDATE OR REMOVE !! +#! This merges the Required HostAndDomainRegexes into mad.sh +ListHostAndDomainRegexes=${ListHostAndDomainRegexes}'/'${HostCode}'/'${HostNick}'/'${HostFuncPrefix}'/'${HostUrls}':'${HostDomainRegex}'@' +#! +#! +#! ------------ (1) Host Main Download Function --------------- # +#! +#! @REQUIRED: Host Main Download function +#! Must be named specifically as such: +#! _DownloadFile() +isup_DownloadFile() { + local remote_url=${1} + local file_url=${1} + local filecnt=${2} + warnAndRetryUnknownError=false + exitDownloadError=false + exitDownloadNotAvailable=false + fileAlreadyDone=false + download_inflight_path="${WorkDir}/.inflight/" + mkdir -p "$download_inflight_path" + completed_location="${WorkDir}/downloads/" + tor_identity="${RANDOM}" + finalAttempt="false" + for ((z=0; z<=$MaxUrlRetries; z++)); do + if [ $z -eq $MaxUrlRetries ] ; then + finalAttempt="true" + fi + CLEANSTRING=${remote_url//[^a-zA-Z0-9]/} + trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; echo ""; tput cnorm; exit" 0 1 2 3 6 15 + if isup_FetchFileInfo $finalAttempt && isup_GetFile "${filecnt}" $((z+1)) $finalAttempt ; then + return 0 + elif [ $z -lt $MaxUrlRetries ]; then + if [ "${fileAlreadyDone}" == "true" ] ; then + break + fi + if [[ "${warnAndRetryUnknownError}" == "true" ]] ; then + if [ "${DebugAllEnabled}" == "true" ] ; then + debugHtml "${remote_url##*/}" "error" "Retry due to an unknown issue: attempt #$((z+1)) of ${MaxUrlRetries}" + fi + fi + if [[ "${exitDownloadError}" == "true" || "${exitDownloadNotAvailable}" == "true" ]] ; then + if [ "${DebugAllEnabled}" == "true" ] ; then + debugHtml "${remote_url##*/}" "error" "Exit due to unrecoverable issue" + fi + rm -f "${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}" + break + fi + echo -e "\n${YELLOW}A recoverable error occurred, retry attempt $((z+1))/${MaxUrlRetries}${NC}" + sleep 3 + fi + done + rm -f "${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}" +} +#! +#! ------------- (2) Fetch File Info Function ----------------- # +#! +isup_FetchFileInfo() { + finalAttempt=$1 + maxfetchretries=5 + isup_cookie_jar="" + echo -e "${GREEN}# Fetching download link…${NC}" + for ((i=1; i<=$maxfetchretries; i++)); do + mkdir -p "${WorkDir}/.temp" + isup_cookie_jar=$(mktemp "${WorkDir}/.temp/isup_cookies""${instance_no}"".XXXXXX") + printf " ." + tor_identity="${RANDOM}" + CLEANSTRING=${remote_url//[^a-zA-Z0-9]/} + trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; rm -f "${isup_cookie_jar}"; echo ""; tput cnorm; exit" 0 1 2 3 6 15 + response=$(tor_curl_request_extended --insecure -L -s -b "${isup_cookie_jar}" -c "${isup_cookie_jar}" "$remote_url") + if [ "${DebugAllEnabled}" == "true" ] ; then + debugHtml "${remote_url##*/}" "isup_dwnpage$i" "${response}" + fi + if [[ -z $response ]] ; then + rm -f "${isup_cookie_jar}"; + if [ $i == $maxfetchretries ] ; then + printf "\\n" + echo -e "${RED}| Failed to extract download link.${NC}" + warnAndRetryUnknownError=true + if [ "${finalAttempt}" == "true" ] ; then + failedRetryDownload "${remote_url}" "" "" + fi + return 1 + else + continue + fi + fi + if grep -Eqi "Sorry, you are banned" <<< "$response"; then + rm -f "${isup_cookie_jar}"; + if [ $i == $maxfetchretries ] ; then + printf "\\n" + echo -e "${RED}| Failed to extract download link.${NC}" + warnAndRetryUnknownError=true + if [ "${finalAttempt}" == "true" ] ; then + failedRetryDownload "${remote_url}" "" "" + fi + return 1 + else + continue + fi + fi + if grep -Eqi "File was removed|There is no such file|File was deleted|File not found" <<< "$response"; then + rm -f "${isup_cookie_jar}"; + printf "\\n" + echo -e "${RED}| The file was not found. It could be deleted or expired.${NC}" + exitDownloadError=true + removedDownload "${remote_url}" + return 1 + fi + if grep -Eqi 'input type="hidden" name="op" value="' <<< "$response"; then + printf "\\n" + echo -e "${GREEN}| Post link found.${NC}" + post_action="${remote_url//https:/http:}" + post_op=$(grep -oPi '(?<=input type="hidden" name="op" value=").*(?=">)' <<< "$response") + post_id=$(grep -oPi '(?<=input type="hidden" name="id" value=").*(?=">)' <<< "$response") + post_rand=$(grep -oPi '(?<=input type="hidden" name="rand" value=").*(?=">)' <<< "$response") + post_referer=$(grep -oPi '(?<=input type="hidden" name="referer" value=").*(?=">)' <<< "$response") + post_action=$(urlencode_literal_grouped_case_urlendingonly "${post_action}") + fi + if [[ -z "$post_action" ]] || [[ -z "$post_op" ]] || [[ -z "$post_id" ]] ; then + rm -f "${isup_cookie_jar}"; + if [ $i == $maxfetchretries ] ; then + printf "\\n" + echo -e "${RED}| Failed to extract download link [2].${NC}" + warnAndRetryUnknownError=true + if [ "${finalAttempt}" == "true" ] ; then + failedRetryDownload "${remote_url}" "Failed to extract download link [2]" "" + fi + return 1 + else + continue + fi + else + break + fi + done + echo -e "${GREEN}# Fetching download url…${NC}" + for ((i=1; i<=$maxfetchretries; i++)); do + printf " _" + download_url="" + CLEANSTRING=${remote_url//[^a-zA-Z0-9]/} + trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; echo ""; rm -f $isup_cookie_jar; tput cnorm; exit" 0 1 2 3 6 15 + form_data="op=$post_op&id=$post_id&rand=$post_rand&referer=$post_referer&method_free=&method_premium=" + response=$(tor_curl_request_extended --insecure -L -s -X POST \ + -b "${isup_cookie_jar}" -c "${isup_cookie_jar}" \ + --data "$form_data" "$post_action") + if [ "${DebugAllEnabled}" == "true" ] ; then + debugHtml "${remote_url##*/}" "isup_post" "post_action: ${post_action}"$'\n'"form_data: ${form_data}"$'\n'"${response}" + fi + if [[ -z $response ]] ; then + if [ $i == $maxfetchretries ] ; then + rm -f "${isup_cookie_jar}"; + printf "\\n" + echo -e "${RED}| Failed to extract download link [3].${NC}" + warnAndRetryUnknownError=true + if [ "${finalAttempt}" == "true" ] ; then + failedRetryDownload "${remote_url}" "" "" + fi + return 1 + else + tor_identity="${RANDOM}" + continue + fi + fi + if grep -Eqi 'No such file with this filename|File was deleted|File not found' <<< "$response"; then + rm -f "${isup_cookie_jar}"; + printf "\\n" + echo -e "${RED}| The file was not found. It could be deleted or expired.${NC}" + exitDownloadError=true + removedDownload "${remote_url}" + return 1 + fi + if grep -Eqi 'Just a moment...' <<< "$response"; then + if [ $i == $maxfetchretries ] ; then + rm -f "${isup_cookie_jar}"; + printf "\\n" + echo -e "${RED}| Failed to extract download link [3].${NC}" + warnAndRetryUnknownError=true + if [ "${finalAttempt}" == "true" ] ; then + failedRetryDownload "${remote_url}" "" "" + fi + return 1 + else + tor_identity="${RANDOM}" + continue + fi + fi + if ! grep -Eqi '.*$)' <<< "$response") + download_url="${download_url//[$'\t\r\n']}" + download_url='http://isupload.com/cgi-bin/dl.cgi/'$(urlencode_literal_grouped_case_urlendingonly "$download_url") + break + fi + done + rm -f "${isup_cookie_jar}"; + echo -e "${GREEN}# Fetching file info…${NC}" + maxretries=3 + for ((j=1; j<=$maxretries; j++)); do + mkdir -p "${WorkDir}/.temp" + if ((j > 1)); then + tput rc; tput el; + fi + tput sc + tor_identity="${RANDOM}" + if ((j % 1 == 0)); then + printf "| Retrieving Head: attempt #$j" + file_header=$(tor_curl_request --insecure --head -L -s "$download_url") + elif ((j % 2 == 0)); then + printf "| Retrieving Head (Get): attempt #$j" + file_header=$(tor_curl_request --insecure -m 16 -s -D - -o /dev/null \ + -H "Connection: keep-alive" \ + -w 'EffectiveUrl=%{url_effective}' \ + "$download_url") + elif ((j % 3 == 0)); then + printf "| Retrieving Head (hack): attempt #$j" + rm -f "${WorkDir}/.temp/directhead" + file_header=$(tor_curl_request --insecure --head -H "Connection: keep-alive" -L -s -i "$download_url" | + tee "${WorkDir}/.temp/directhead" & + sleep 6 + [ -s "${WorkDir}/.temp/directhead" ] + kill $! 2>/dev/null + ) + if [ ! -f "${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}" ]; then + touch "${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}" + fi + rm -f "${WorkDir}/.temp/directhead" + else + printf "| Retrieving Head: attempt #$j" + file_header=$(tor_curl_request_extended --insecure --head -L -s "$download_url") + fi + if [ "${DebugAllEnabled}" == "true" ] ; then + debugHtml "${remote_url##*/}" "isup_head$j" "download_url: ${download_url}"$'\n'"${file_header}" + fi + if [ ! -z "$file_header" ] ; then + if grep -Eqi '404 Not Found' <<< "${file_header}" ; then + printf "\\n" + echo -e "${RED}| Not Found (404). The file has been removed.${NC}" + removedDownload "${remote_url}" + exitDownloadNotAvailable=true + return 1 + fi + if ! grep -Eqi 'HTTP/.*200|HTTP/.*302' <<< "${file_header}" ; then + hResponse=$(grep -oPi 'HTTP/.* \K.*$' <<< "${file_header}") + if ((j>=$maxretries)); then + printf "\\n" + echo -e "${RED}| Unexpected header response ($hResponse).${NC}" + return 1 + else + printf "\\n" + echo -e "${YELLOW}| Unexpected header response ($hResponse). Retrying...${NC}" + continue + fi + else + break + fi + if [ "$filename_override" == "" ] ; then + filename=${download_url##*/} + fi + if grep -Eqi 'Content-Length:' <<< "${file_header}" ; then + file_size_bytes=$(grep -oPi '(?<=content-length: ).*?(?=$)' <<< "$file_header") + file_size_bytes=${file_size_bytes//[$'\t\r\n']} + break + fi + else + if ((j>=$maxretries)); then + printf "\\n" + echo -e "${RED}| No response. Try again later.${NC}" + failedRetryDownload "${remote_url}" "" "" + exitDownloadNotAvailable=true + return 1 + else + continue + fi + fi + done + printf "\\n" + touch ${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/} + if [ ! "$filename_override" == "" ] ; then + filename="$filename_override" + fi + filename=$(sanitize_file_or_folder_name "${filename}") + if [ -z "$filename" ]; then + printf "\\n" + echo -e "${RED}| Unexpected or no header response.${NC}" + return 1 + fi + if [ -z $file_size_bytes ] ; then + file_size_readable="${RED}Unknown filesize…${NC}" + else + file_size_readable="$(numfmt --to=iec --from=auto --format "%.2f" <<< "$file_size_bytes")" + fi + echo -e "${YELLOW}| File size:${NC}\t${file_size_readable}" + file_path="${download_inflight_path}${filename}" + echo -e "${YELLOW}| File name:${NC}\t\"${filename}\"" + flockDownload="${WorkDir}/.flocks/${filename//[^a-zA-Z0-9\.\_\-]/}.flock" + if CheckDownloadExists "$remote_url" "$MoveToFolder" "$filecnt" "$filename" "$file_path" "$completed_location" ; then + return 1 + fi + echo "${remote_url//[^a-zA-Z0-9]/}" > $flockDownload +} +#! +#! ----------- (3) Fetch File / Download File Function --------------- # +#! +isup_GetFile() { + echo -e "${GREEN}# Downloading…\t${BLUE}(No Resume)${NC}" + echo -e "${YELLOW}| File path:${NC}\t./.inflight/${filename}\n" + fileCnt=$1 + retryCnt=$2 + finalAttempt=$3 + flockDownload="${WorkDir}/.flocks/${filename//[^a-zA-Z0-9\.\_\-]/}.flock" + for ((j=1; j<=$MaxDownloadRetries; j++)); do + pd_presize=0 + if [ -f "$file_path" ] ; then + pd_presize=$(stat --format="%s" "$file_path" | tr -d '[:space:]') + fi + GetRandomUA + if [ -z $file_size_bytes ] ; then + echo -e "${BLUE}| No Resume Fetch${NC} (unknown filesize)" + CLEANSTRING=${remote_url//[^a-zA-Z0-9]/} + trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; rm -f $flockDownload; echo ""; tput cnorm; exit" 0 1 2 3 6 15 + tor_curl_request_extended --insecure -L "$download_url" --output "$file_path" + rc=$? + if [ $rc -ne 0 ] ; then + printf "${RED}Download Failed (bad exit status).${NC}" + if [ -f ${file_path} ]; then + printf "${YELLOW} Partial removed...${NC}" + printf "\n\n" + rm -f "${file_path}" + else + printf "\n\n" + fi + if ((j >= $MaxDownloadRetries)) ; then + rm -f "$flockDownload"; + if [ "${finalAttempt}" == "true" ] ; then + droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" + fi + return 1 + else + continue + fi + fi + if CheckNoHtml "$remote_url" "$filename" "$file_path" ; then + containsHtml=false + else + containsHtml=true + fi + if [ "$containsHtml" == "true" ]; then + echo -e "${YELLOW}Download Failed (contains html)${NC} partial removed..." + rm -f "${file_path}" + if ((j >= $MaxDownloadRetries)) ; then + rm -f "$flockDownload"; + if [ "${finalAttempt}" == "true" ] ; then + droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" + fi + return 1 + else + continue + fi + fi + break + else + CLEANSTRING=${remote_url//[^a-zA-Z0-9]/} + trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; rm -f $flockDownload; echo ""; tput cnorm; exit" 0 1 2 3 6 15 + if [ "${RateMonitorEnabled}" == "true" ]; then + tor_curl_request_extended --insecure -L --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval "$download_url" --continue-at - --output "$file_path" + else + tor_curl_request_extended --insecure -L "$download_url" --continue-at - --output "$file_path" + fi + received_file_size=0 + if [ -f "$file_path" ] ; then + received_file_size=$(stat --format="%s" "$file_path" | tr -d '[:space:]') + fi + if CheckNoHtml "$remote_url" "$filename" "$file_path" ; then + containsHtml=false + else + containsHtml=true + fi + downDelta=$(( received_file_size - pd_presize )) + if [[ "${received_file_size}" -ne "${file_size_bytes}" ]] || [ "$containsHtml" == "true" ]; then + if [ "${AutoRepairBadPartials}" == "true" ] && (( downDelta > 0 && downDelta < 1024 )) ; then + if [ -f "${file_path}" ] ; then + if ((pd_presize > 0)); then + echo -e "${YELLOW}Bad node / HTML found:${NC} reverting to previous file..." + truncateDownload "$remote_url" "$filename" "$pd_presize" "$received_file_size" + truncate -s $pd_presize "${file_path}" + else + echo -e "${YELLOW}Bad node / HTML found:${NC} tainted partial removed..." + rm -f "${file_path}" + fi + fi + if ((j >= $MaxDownloadRetries)) ; then + rm -f "$flockDownload"; + if [ "${finalAttempt}" == "true" ] ; then + droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" + fi + return 1 + else + continue + fi + elif [ "${AutoRepairBadPartials}" == "true" ] && [ "$containsHtml" == "true" ] ; then + if [ -f "${file_path}" ] ; then + if ((pd_presize > 0)); then + echo -e "${YELLOW}Bad node / HTML found:${NC} reverting to previous file..." + truncateDownload "$remote_url" "$filename" "$pd_presize" "$received_file_size" + truncate -s $pd_presize "${file_path}" + else + echo -e "${YELLOW}Bad node / HTML found:${NC} tainted partial removed..." + rm -f "${file_path}" + fi + fi + if ((j >= $MaxDownloadRetries)) ; then + rm -f "$flockDownload"; + if [ "${finalAttempt}" == "true" ] ; then + droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" + fi + return 1 + else + continue + fi + elif (( downDelta > 0 && downDelta < 1024 )) || [ "$containsHtml" == "true" ] ; then + if [ -f "$file_path" ] ; then + rm -rf "$file_path" + fi + echo -e "\n${YELLOW}Bad node / HTML found:${NC} tainted partial removed..." + if ((j >= $MaxDownloadRetries)) ; then + rm -f "$flockDownload"; + if [ "${finalAttempt}" == "true" ] ; then + droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" + fi + return 1 + else + continue + fi + fi + if [[ "${received_file_size}" -ne "${file_size_bytes}" ]]; then + echo -e "\n${RED}Download failed, file is incomplete.${NC}" + if ((j >= $MaxDownloadRetries)) ; then + rm -f "$flockDownload"; + if [ "${finalAttempt}" == "true" ] ; then + droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" + fi + return 1 + else + continue + fi + fi + else + break + fi + fi + done + rm -f "$flockDownload"; + ProcessCompletedDownload "$remote_url" "$MoveToFolder" "$filecnt" "$filename" "$file_size_bytes" "$completed_location" "$file_path" + return 0 +} +#! +#! --------------- Host Extra Functions ------------------- # +#! diff --git a/hosts/mediafire.sh b/hosts/mediafire.sh new file mode 100644 index 0000000..9f32515 --- /dev/null +++ b/hosts/mediafire.sh @@ -0,0 +1,319 @@ +#! Name: mediafire.sh +#! Author: kittykat +#! Version: 2024.12.15 +#! Desc: Add support for downloading and processing of urls for a new host +#! Usage: Copy this file into the ./${ScriptDir}/hosts/ folder +#! +#! +#! ------------ REQUIRED SECTION --------------- +#! @[UPDATE] HostAndDomainRegexes: This string is loaded into mad.sh and allows dynamic handling of new url data +#! Format: '/HostCode/HostNick/HostFuncPrefix:HostDomainRegex@' +#! HostCode: (ie. 'fh' for filehaus -- cannot be used by other hosts) +#! HostNick: What is displayed throughout MAD output (ie. 'filehaus' -- "urls.txt has 10 filehaus.." will be displayed) +#! HostFuncPrefix: (ie. 'fh' -- fh_DownloadFile(), fh_FetchFileInfo() .. ) +#! * Note: Must begin with a letter a-z (functions beginning with numbers are no bueno) +#! HostDomainRegex: The regex used to verify matching urls +HostCode='mfire' +HostNick='mediafire' +HostFuncPrefix='mfire' +HostUrls='mediafire.com' +HostDomainRegex='^(http|https)://(.*\.)?mediafire\.com/file/' +#! +#! !! DO NOT UPDATE OR REMOVE !! +#! This merges the Required HostAndDomainRegexes into mad.sh +ListHostAndDomainRegexes=${ListHostAndDomainRegexes}'/'${HostCode}'/'${HostNick}'/'${HostFuncPrefix}'/'${HostUrls}':'${HostDomainRegex}'@' +#! +#! +#! ------------ (1) Host Main Download Function --------------- # +#! +#! @REQUIRED: Host Main Download function +#! Must be named specifically as such: +#! _DownloadFile() +mfire_DownloadFile() { + local remote_url=${1} + local file_url=${1} + local filecnt=${2} + warnAndRetryUnknownError=false + exitDownloadError=false + exitDownloadNotAvailable=false + fileAlreadyDone=false + download_inflight_path="${WorkDir}/.inflight/" + mkdir -p "$download_inflight_path" + completed_location="${WorkDir}/downloads/" + if ! grep -Eqi '/file$' <<< "$remote_url" ; then + download_url="${remote_url}/file" + else + download_url="$remote_url" + fi + tor_identity="${RANDOM}" + finalAttempt="false" + for ((z=0; z<=$MaxUrlRetries; z++)); do + if [ $z -eq $MaxUrlRetries ] ; then + finalAttempt="true" + fi + CLEANSTRING=${remote_url//[^a-zA-Z0-9]/} + trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; echo ""; tput cnorm; exit" 0 1 2 3 6 15 + if mfire_FetchFileInfo $finalAttempt && mfire_GetFile "${filecnt}" $((z+1)) $finalAttempt ; then + return 0 + elif [ $z -lt $MaxUrlRetries ]; then + if [ "${fileAlreadyDone}" == "true" ] ; then + break + fi + if [[ "${warnAndRetryUnknownError}" == "true" ]] ; then + if [ "${DebugAllEnabled}" == "true" ] ; then + debugHtml "${remote_url##*/}" "error" "Retry due to an unknown issue: attempt #$((z+1)) of ${MaxUrlRetries}" + fi + fi + if [[ "${exitDownloadError}" == "true" || "${exitDownloadNotAvailable}" == "true" ]] ; then + if [ "${DebugAllEnabled}" == "true" ] ; then + debugHtml "${remote_url##*/}" "error" "Exit due to unrecoverable issue" + fi + rm -f "${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}" + break + fi + echo -e "\n${YELLOW}A recoverable error occurred, retry attempt $((z+1))/${MaxUrlRetries}${NC}" + sleep 3 + fi + done + rm -f "${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}" +} +#! +#! ------------- (2) Fetch File Info Function ----------------- # +#! +mfire_FetchFileInfo() { + echo -e "${GREEN}# Fetching download link…${NC}" + maxretries=3 + for ((i=1; i<=$maxretries; i++)); do + mkdir -p "${WorkDir}/.temp" + mfire_cookie_jar=$(mktemp "${WorkDir}/.temp/mfire_cookies""${instance_no}"".XXXXXX") + printf " ." + GetRandomUA + tor_identity="${RANDOM}" + CLEANSTRING=${remote_url//[^a-zA-Z0-9]/} + trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; rm -f ${mfire_cookie_jar}; echo ""; tput cnorm; exit" 0 1 2 3 6 15 + response=$(tor_curl_request --insecure -L -s \ + -H "Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8" \ + -H "Accept-Language: en-US,en;q=0.5" \ + -H "Accept-Encoding: gzip, deflate, br" \ + -H "Accept-Charset: ISO-8859-1,utf-8;q=0.7,*;q=0.7" \ + -H "Connection: keep-alive" \ + -H "Upgrade-Insecure-Requests: 1" \ + -H "Sec-Fetch-Dest: document" \ + -H "Sec-Fetch-Mode: navigate" \ + -H "Sec-Fetch-Site: none" \ + -H "Sec-Fetch-User: ?1" \ + "$download_url" | tr -d '\0') + response=$(echo "$response" | iconv -c -f UTF-8 -t ISO8859-1) + if [ "${DebugAllEnabled}" == "true" ] ; then + debugHtml "${remote_url##*/}" "mfire_fetch$i" "download_url: ${download_url}"$'\n'"${response}" + fi + if [[ -z $response ]] ; then + rm -f "${mfire_cookie_jar}"; + if ((i == maxfetchretries)) ; then + printf "\\n" + echo -e "${RED}| Failed to get download link${NC}" + warnAndRetryUnknownError=true + if [ "${finalAttempt}" == "true" ] ; then + failedRetryDownload "${remote_url}" "Failed to get download link" "" + fi + return 1 + else + continue + fi + fi + if grep -Eqi 'aria-label="Download file"' <<< "$response"; then + printf "\\n" + echo -e "${GREEN}| Download link found${NC}" + subSearch=$(awk '/aria-label="Download file"/,//' <<< "$response") + download_url=$(grep -oP '(?<=href=").*(?=id=)' <<< "$subSearch") + download_url=${download_url%\"*} + break + else + rm -f "${mfire_cookie_jar}"; + if ((i == maxfetchretries)) ; then + printf "\\n" + echo -e "${RED}| Failed to get download link${NC}" + warnAndRetryUnknownError=true + if [ "${finalAttempt}" == "true" ] ; then + failedRetryDownload "${remote_url}" "Failed to get download link" "" + fi + return 1 + else + continue + fi + fi + done + printf "\\n" + echo -e "${GREEN}# Fetching file info…${NC}" + filename="" + file_size_bytes="" + if [ ! "$filename_override" == "" ] ; then + filename="$filename_override" + fi + for ((j=1; j<=$maxretries; j++)); do + printf " ." + CLEANSTRING=${remote_url//[^a-zA-Z0-9]/} + trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; rm -f ${mfire_cookie_jar}; echo ""; tput cnorm; exit" 0 1 2 3 6 15 + file_header=$(tor_curl_request --insecure -L --head -s \ + -H "Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8" \ + -H "Accept-Language: en-US,en;q=0.5" \ + -H "Accept-Encoding: gzip, deflate, br" \ + -H "Accept-Charset: ISO-8859-1,utf-8;q=0.7,*;q=0.7" \ + -H "Connection: keep-alive" \ + -H "Upgrade-Insecure-Requests: 1" \ + -H "Sec-Fetch-Dest: document" \ + -H "Sec-Fetch-Mode: navigate" \ + -H "Sec-Fetch-Site: none" \ + -H "Sec-Fetch-User: ?1" \ + -b "${mfire_cookie_jar}" -c "${mfire_cookie_jar}" \ + "$download_url" | tr -d '\0') + if [ "${DebugAllEnabled}" == "true" ] ; then + debugHtml "${remote_url##*/}" "mfire_head$j" "FileInfoUrl: ${download_url}"$'\n'"${file_header}" + fi + if [[ -z $file_header ]] ; then + if ((j == maxfetchretries)) ; then + rm -f "${mfire_cookie_jar}"; + printf "\\n" + echo -e "${RED}| Failed to extract file info [1]${NC}" + warnAndRetryUnknownError=true + if [ "${finalAttempt}" == "true" ] ; then + failedRetryDownload "${remote_url}" "Failed to extract file info [1]" "" + fi + return 1 + else + continue + fi + fi + if ! grep -Eqi 'HTTP.* 200' <<< $file_header ; then + if ((j == maxfetchretries)) ; then + rm -f "${mfire_cookie_jar}"; + printf "\\n" + echo -e "${RED}| Failed to extract file info [2]${NC}" + warnAndRetryUnknownError=true + if [ "${finalAttempt}" == "true" ] ; then + failedRetryDownload "${remote_url}" "Failed to extract file info [2]" "" + fi + return 1 + else + continue + fi + fi + if [ -z $filename ]; then + filename=$(grep -oPi '(?<=filename=").*(?=")' <<< "$file_header") + if [ -z $filename ]; then + filename=$(grep -oPi '(?<=filename[*]=).*' <<< "$file_header") + filename=${filename//[$'\t\r\n']} + fi + fi + if [ -z $file_size_bytes ] ; then + file_size_bytes=$(grep -oPi '(?<=content-length: ).*' <<< "$file_header") + file_size_bytes=${file_size_bytes//[$'\t\r\n']} + fi + if [ -z $filename ] || [ -z $file_size_bytes ] ; then + if ((j == maxfetchretries)) ; then + rm -f "${mfire_cookie_jar}"; + printf "\\n" + echo -e "${RED}| Failed to extract file info [3]${NC}" + warnAndRetryUnknownError=true + if [ "${finalAttempt}" == "true" ] ; then + failedRetryDownload "${remote_url}" "Failed to extract file info [3]" "" + fi + return 1 + else + continue + fi + fi + break #Good to go here + done + rm -f "${mfire_cookie_jar}"; + touch ${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/} + if [ ! "$filename_override" == "" ] ; then + filename="$filename_override" + elif [ -z $filename ] ; then + filename=${download_url##*/} + fi + filename=$(sanitize_file_or_folder_name "${filename}") + if [ -z "$filename" ]; then + echo -e "${RED}| Unexpected or no header response.${NC}" + return 1 + fi + if [ -z $file_size_bytes ] ; then + file_size_readable="${RED}Unknown filesize…${NC}" + else + file_size_readable="$(numfmt --to=iec --from=auto --format "%.2f" <<< "$file_size_bytes")" + fi + printf "\\n" + echo -e "${YELLOW}| File size:${NC}\t${file_size_readable}" + file_path="${download_inflight_path}${filename}" + echo -e "${YELLOW}| File name:${NC}\t\"${filename}\"" + flockDownload="${WorkDir}/.flocks/${filename//[^a-zA-Z0-9\.\_\-]/}.flock" + if CheckFileSize "${remote_url}" "${file_size_bytes}" ; then + return 1 + fi + if CheckDownloadExists "$remote_url" "$MoveToFolder" "$filecnt" "$filename" "$file_path" "$completed_location" ; then + return 1 + fi + echo "${remote_url//[^a-zA-Z0-9]/}" > $flockDownload +} +#! +#! ----------- (3) Fetch File / Download File Function --------------- # +#! +mfire_GetFile() { + echo -e "${GREEN}# Downloading…" + echo -e "${YELLOW}| File path:${NC}\t./.inflight/${filename}\n" + fileCnt=$1 + retryCnt=$2 + finalAttempt=$3 + flockDownload="${WorkDir}/.flocks/${filename//[^a-zA-Z0-9\.\_\-]/}.flock" + for ((j=1; j<=$MaxDownloadRetries; j++)); do + pd_presize=0 + if [ -f "$file_path" ] ; then + pd_presize=$(stat --format="%s" "$file_path" | tr -d '[:space:]') + fi + CLEANSTRING=${remote_url//[^a-zA-Z0-9]/} + trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; rm -f $flockDownload; echo ""; tput cnorm; exit" 0 1 2 3 6 15 + if [ ! -z $file_size_bytes ] ; then + tor_curl_request_extended --insecure "$download_url" --continue-at - --output "$file_path" + else + echo -e "${BLUE}| No Resume Fetch${NC}" + tor_curl_request_extended --insecure "$download_url" --output "$file_path" + fi + received_file_size=0 + if [ -f "$file_path" ] ; then + received_file_size=$(stat --format="%s" "$file_path" | tr -d '[:space:]') + fi + if CheckNoHtml "$remote_url" "$filename" "$file_path" ; then + containsHtml=false + else + containsHtml=true + fi + if [ "${AutoRepairBadPartials}" == "true" ] && [ "$containsHtml" == "true" ] ; then + if [ -f "${file_path}" ] ; then + if ((pd_presize > 0)); then + echo -e "${YELLOW}Bad node / HTML found:${NC} reverting to previous file..." + truncateDownload "$remote_url" "$filename" "$pd_presize" "$received_file_size" + truncate -s $pd_presize "${file_path}" + else + echo -e "${YELLOW}Bad node / HTML found:${NC} tainted partial removed..." + rm -f "${file_path}" + fi + fi + if ((j >= $MaxDownloadRetries)) ; then + rm -f "$flockDownload"; + if [ "${finalAttempt}" == "true" ] ; then + droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" + fi + return 1 + else + continue + fi + fi + break + done + rm -f "$flockDownload"; + ProcessCompletedDownload "$remote_url" "$MoveToFolder" "$filecnt" "$filename" "$file_size_bytes" "$completed_location" "$file_path" + return 0 +} +#! +#! --------------- Host Extra Functions ------------------- # +#! diff --git a/hosts/quax.sh b/hosts/quax.sh index 5d07a98..c2351ea 100644 --- a/hosts/quax.sh +++ b/hosts/quax.sh @@ -1,6 +1,6 @@ #! Name: quax.sh #! Author: kittykat -#! Version: 2024.11.22 +#! Version: 2024.11.29 #! Desc: Add support for downloading and processing of urls for a new host #! Usage: Copy this file into the ./${ScriptDir}/hosts/ folder #! @@ -87,7 +87,7 @@ qx_FetchFileInfo() { debugHtml "${remote_url##*/}" "qx_head$j" "download_url: ${download_url}"$'\n'"${file_header}" fi if [ ! -z "$file_header" ] ; then - if grep -Eqi '404 Not Found' <<< "${file_header}" ; then + if grep -Eqi '404 Not Found|HTTP.* 404' <<< "${file_header}" ; then echo -e "${RED}| The file has been removed (404).${NC}" removedDownload "${remote_url}" exitDownloadNotAvailable=true diff --git a/hosts/ranoz.sh b/hosts/ranoz.sh index ab49f5d..6a5f680 100644 --- a/hosts/ranoz.sh +++ b/hosts/ranoz.sh @@ -1,6 +1,6 @@ #! Name: ranoz.sh #! Author: kittykat -#! Version: 2024.11.06 +#! Version: 2024.12.09 #! Desc: Add support for downloading and processing of urls for a new host #! Usage: Copy this file into the ./${ScriptDir}/hosts/ folder #! @@ -119,6 +119,12 @@ rz_FetchFileInfo() { echo -e "${GREEN}| Download url found${NC}" download_url=$(grep -oPi '(?<=\\"props\\":\{\}\},\\"href\\":\\").*?(?=\\"}.*$)' <<< "$response") download_url="${download_url//[$'\t\r\n\0']}" + url_prefix=${download_url%\/*} + url_postfix=${download_url##*\/} + url_fname=${url_postfix%\?*} + url_token=${url_postfix##*\?} + url_enc_fname=$(urlencode_literal_grouped_case ${url_fname}) + download_url="${url_prefix}/${url_enc_fname}?${url_token}" break else if [ $i == $maxfetchretries ] ; then diff --git a/hosts/syspro.sh b/hosts/syspro.sh new file mode 100644 index 0000000..a3534bb --- /dev/null +++ b/hosts/syspro.sh @@ -0,0 +1,233 @@ +#! Name: syspro.sh +#! Author: kittykat +#! Version: 2024.12.25 +#! Desc: Add support for downloading and processing of urls for a new host +#! Usage: Copy this file into the ./${ScriptDir}/hosts/ folder +#! +#! +#! ------------ REQUIRED SECTION --------------- +#! @[UPDATE] HostAndDomainRegexes: This string is loaded into mad.sh and allows dynamic handling of new url data +#! Format: '/HostCode/HostNick/HostFuncPrefix:HostDomainRegex@' +#! HostCode: (ie. 'fh' for filehaus -- cannot be used by other hosts) +#! HostNick: What is displayed throughout MAD output (ie. 'filehaus' -- "urls.txt has 10 filehaus.." will be displayed) +#! HostFuncPrefix: (ie. 'fh' -- fh_DownloadFile(), fh_FetchFileInfo() .. ) +#! * Note: Must begin with a letter a-z (functions beginning with numbers are no bueno) +#! HostDomainRegex: The regex used to verify matching urls +HostCode='sysp' +HostNick='syspro' +HostFuncPrefix='sysp' +HostUrls='share.syspro.com.br' +HostDomainRegex='^(http|https)://(.*\.)?share\.syspro\.com\.br/' +#! +#! !! DO NOT UPDATE OR REMOVE !! +#! This merges the Required HostAndDomainRegexes into mad.sh +ListHostAndDomainRegexes=${ListHostAndDomainRegexes}'/'${HostCode}'/'${HostNick}'/'${HostFuncPrefix}'/'${HostUrls}':'${HostDomainRegex}'@' +#! +#! +#! ------------ (1) Host Main Download Function --------------- # +#! +#! @REQUIRED: Host Main Download function +#! Must be named specifically as such: +#! _DownloadFile() +sysp_DownloadFile() { + local remote_url=${1} + local file_url=${1} + local filecnt=${2} + warnAndRetryUnknownError=false + exitDownloadError=false + exitDownloadNotAvailable=false + fileAlreadyDone=false + download_inflight_path="${WorkDir}/.inflight/" + mkdir -p "$download_inflight_path" + completed_location="${WorkDir}/downloads/" + tor_identity="${RANDOM}" + finalAttempt="false" + for ((z=0; z<=$MaxUrlRetries; z++)); do + if [ $z -eq $MaxUrlRetries ] ; then + finalAttempt="true" + fi + CLEANSTRING=${remote_url//[^a-zA-Z0-9]/} + trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; echo ""; tput cnorm; exit" 0 1 2 3 6 15 + if sysp_FetchFileInfo $finalAttempt && sysp_GetFile "${filecnt}" $((z+1)) $finalAttempt ; then + return 0 + elif [ $z -lt $MaxUrlRetries ]; then + if [ "${fileAlreadyDone}" == "true" ] ; then + break + fi + if [[ "${warnAndRetryUnknownError}" == "true" ]] ; then + if [ "${DebugAllEnabled}" == "true" ] ; then + debugHtml "${remote_url##*/}" "error" "Retry due to an unknown issue: attempt #$((z+1)) of ${MaxUrlRetries}" + fi + fi + if [[ "${exitDownloadError}" == "true" || "${exitDownloadNotAvailable}" == "true" ]] ; then + if [ "${DebugAllEnabled}" == "true" ] ; then + debugHtml "${remote_url##*/}" "error" "Exit due to unrecoverable issue" + fi + rm -f "${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}" + break + fi + echo -e "\n${YELLOW}A recoverable error occurred, retry attempt $((z+1))/${MaxUrlRetries}${NC}" + sleep 3 + fi + done + rm -f "${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}" +} +#! +#! ------------- (2) Fetch File Info Function ----------------- # +#! +sysp_FetchFileInfo() { + finalAttempt=$1 + maxfetchretries=6 + echo -e "${GREEN}# Fetching post info…${NC}" + for ((i=1; i<=$maxfetchretries; i++)); do + mkdir -p "${WorkDir}/.temp" + printf " ." + tor_identity="${RANDOM}" + CLEANSTRING=${remote_url//[^a-zA-Z0-9]/} + trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; echo ""; tput cnorm; exit" 0 1 2 3 6 15 + response=$(tor_curl_request --insecure -L -s "$remote_url") + if [ "${DebugAllEnabled}" == "true" ] ; then + debugHtml "${remote_url##*/}" "sysp_fetch$i" "${response}" + fi + if [[ -z $response ]] ; then + if [ $i == $maxfetchretries ] ; then + printf "\\n" + echo -e "${RED}| Failed to extract download link [1]${NC}" + warnAndRetryUnknownError=true + if [ "${finalAttempt}" == "true" ] ; then + failedRetryDownload "${remote_url}" "Failed to extract download link [1]" "" + fi + return 1 + else + continue + fi + fi + if grep -Eqi 'File Not Found|No such file with this filename|File was deleted|
.*$)' <<< "$response") + post_id=$(grep -oP '(?<=input type="hidden" name="id" value=").*(?=">.*$)' <<< "$response") + post_fname=$(grep -oP '(?<=input type="hidden" name="fname" value=").*(?=">.*$)' <<< "$response") + post_rand=$(grep -oP '(?<=input type="hidden" name="rand" value=").*(?=">.*$)' <<< "$response") + post_fname=$(urlencode_literal_grouped_case "${post_fname}") + post_action="${post_action//[$'\t\r\n']}" + if [ "$filename_override" == "" ]; then + filename=$(urlencode_literal_grouped_case "${post_fname}") + fi + else + if [ $i == $maxfetchretries ] ; then + printf "\\n" + echo -e "${RED}| Failed to extract download link [2]${NC}" + warnAndRetryUnknownError=true + if [ "${finalAttempt}" == "true" ] ; then + failedRetryDownload "${remote_url}" "Failed to extract download link [2]" "" + fi + return 1 + else + continue + fi + fi + if [[ -z "$post_action" ]] || [[ -z "$post_act" ]] || [[ -z "$post_id" ]] || \ + [[ -z "$post_fname" ]] || [[ -z "$post_rand" ]] ; then + if [ $i == $maxfetchretries ] ; then + printf "\\n" + echo -e "${RED}| Failed to extract download link [3]${NC}" + warnAndRetryUnknownError=true + if [ "${finalAttempt}" == "true" ] ; then + failedRetryDownload "${remote_url}" "Failed to extract download link [3]" "" + fi + return 1 + else + continue + fi + else + break + fi + done + form_data="act=${post_act}&id=${post_id}&fname=${post_fname}&rand=${post_rand}&btn=Download+File" + touch ${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/} + if [ ! "$filename_override" == "" ] ; then + filename="$filename_override" + fi + filename=$(sanitize_file_or_folder_name "${filename}") + printf "\\n" + echo -e "${YELLOW}| File name:${NC}\t\"${filename}\"" + echo -e "${YELLOW}| File size:${NC}\tUnknown${NC}" + file_path="${download_inflight_path}${filename}" + flockDownload="${WorkDir}/.flocks/${filename//[^a-zA-Z0-9\.\_\-]/}.flock" + if CheckDownloadExists "$remote_url" "$MoveToFolder" "$filecnt" "$filename" "$file_path" "$completed_location" ; then + return 1 + fi + echo "${remote_url//[^a-zA-Z0-9]/}" > $flockDownload +} +#! +#! ----------- (3) Fetch File / Download File Function --------------- # +#! +sysp_GetFile() { + echo -e "${GREEN}# Downloading…\t${BLUE}(No Resume)${NC}" + echo -e "${YELLOW}| File path:${NC}\t./.inflight/${filename}\n" + fileCnt=$1 + retryCnt=$2 + finalAttempt=$3 + flockDownload="${WorkDir}/.flocks/${filename//[^a-zA-Z0-9\.\_\-]/}.flock" + GetRandomUA + if [ -f "$file_path" ]; then + rm -f "file_path" + fi + CLEANSTRING=${remote_url//[^a-zA-Z0-9]/} + trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; rm -f $flockDownload; echo ""; tput cnorm; exit" 0 1 2 3 6 15 + if [ "${UseTorCurlImpersonate}" == "true" ]; then + if [ "${RateMonitorEnabled}" == "true" ]; then + tor_curl_request --insecure -L \ + --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval \ + --data "$form_data" "$post_action" \ + --output "$file_path" --output "$file_path" + else + tor_curl_request --insecure \ + --data "$form_data" "$post_action" \ + --output "$file_path" --output "$file_path" + fi + else + if [ "${RateMonitorEnabled}" == "true" ]; then + tor_curl_request --insecure -L \ + --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval \ + -H "User-Agent: $RandomUA" \ + --data "$form_data" "$post_action" \ + --output "$file_path" --output "$file_path" + else + tor_curl_request --insecure -L \ + -H "User-Agent: $RandomUA" \ + --data "$form_data" "$post_action" \ + --output "$file_path" --output "$file_path" + fi + fi + if CheckNoHtml "$remote_url" "$filename" "$file_path" ; then + containsHtml=false + else + containsHtml=true + fi + if [ "$containsHtml" == "true" ]; then + if [ -f "$file_path" ] ; then + rm -rf "$file_path" + fi + echo -e "\n${YELLOW}Bad node / HTML found:${NC} tainted partial removed..." + rm -f $flockDownload; + if [ "${finalAttempt}" == "true" ] ; then + droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" + fi + return 1 + fi + rm -f "$flockDownload"; + ProcessCompletedDownload "$remote_url" "$MoveToFolder" "$filecnt" "$filename" "$file_size_bytes" "$completed_location" "$file_path" + return 0 +} +#! +#! --------------- Host Extra Functions ------------------- # +#! diff --git a/hosts/up_acid.sh b/hosts/up_acid.sh index e7019b6..43ac522 100644 --- a/hosts/up_acid.sh +++ b/hosts/up_acid.sh @@ -24,7 +24,7 @@ HostFuncPrefix='acid' ListUploadHosts=${ListUploadHosts}'/'${HostCode}'/'${HostNick}'/'${HostFuncPrefix}'@' #! #! -#! Configurables +#! Jirafeau Host #! ------------- #! #! ------------ (1) Host Main Upload Function --------------- # @@ -33,103 +33,10 @@ ListUploadHosts=${ListUploadHosts}'/'${HostCode}'/'${HostNick}'/'${HostFuncPrefi #! Must be named specifically as such: #! _UploadFile() acid_UploadFile() { - local _hostCode=${1} - local filepath=${2} - local filecnt=${3} - local pline=${4} - local filename="${filepath##*/}" - warnAndRetryUnknownError=false - exitUploadError=false - exitUploadNotAvailable=false - fileAlreadyDone=false - tor_identity="${RANDOM}" - UploadTicket="${WorkDir}/.flocks/upload_${_hostCode}_${filepath//[^a-zA-Z0-9]/}" - MaxUploadSizeInBytes=104857600 - fsize=$(GetFileSize "$filepath" "false") - if ((fsize > MaxUploadSizeInBytes)); then - rm -f "${UploadTicket}" - echo -e "${YELLOW}| SKIP${NC}: The size of $filename is to large for $_hostCode. ($fsize > $MaxUploadSizeInBytes)" - failedUpload "$pline" "${filepath}" "${_hostCode}" "Skipping upload. The size of $filename is to large for $_hostCode. ($fsize > $MaxUploadSizeInBytes)" - return 1 - fi - finalAttempt="false" - for ((z=0; z<=$MaxUploadRetries; z++)); do - if [ $z -eq $MaxUploadRetries ] ; then - finalAttempt="true" - fi - trap "rm -f "${UploadTicket}"; echo ""; tput cnorm; exit" 0 1 2 3 6 15 - if acid_PostFile "${filepath}" "${_hostCode}" "${filename}" "${filecnt}" $((z+1)) $finalAttempt $pline ; then - return 0 - elif [ $z -lt $MaxUploadRetries ]; then - if [ "${fileAlreadyDone}" == "true" ] ; then - break - fi - if [[ "${warnAndRetryUnknownError}" == "true" ]] ; then - if [ "${DebugAllEnabled}" == "true" ] ; then - debugHtml "${filepath##*/}" "error" "Retry due to an unknown issue: attempt #$((z+1)) of ${MaxUploadRetries}" - fi - fi - if [[ "${exitUploadError}" == "true" || "${exitUploadNotAvailable}" == "true" ]] ; then - if [ "${DebugAllEnabled}" == "true" ] ; then - debugHtml "${filepath##*/}" "error" "Exit due to unrecoverable issue" - fi - rm -f "${UploadTicket}" - break - fi - echo -e "\n${YELLOW}A recoverable error occurred, retry attempt $((z+1))/${MaxUploadRetries}${NC}" - sleep 3 - fi - done - rm -f "${UploadTicket}" + jira_MaxUploadSizeInBytes=104857600 + jira_PostUrlHost='https://dl.acid.fr/script.php' + jira_filetype=1 + jira_timeval="month" + jira_downloadLinkPrefix='https://dl.acid.fr/f.php?h=' + jira_UploadFile ${1} ${2} ${3} ${4} } -#! -#! ----------- (2) Post File / Upload File Function --------------- # -#! -acid_PostFile() { - local filepath=$1 - local _hostCode=$2 - local filename=$3 - local fileCnt=$4 - local retryCnt=$5 - local finalAttempt=$6 - local pline=${7} - UploadTicket="${WorkDir}/.flocks/upload_${_hostCode}_${filepath//[^a-zA-Z0-9]/}" - echo -e "[${YELLOW}${_hostCode}${NC}] Uploading ${GREEN}${filename}${NC}" - tor_identity="${RANDOM}" - PostUrlHost='https://dl.acid.fr/script.php' - arrFiles=("$filepath") - trap "rm -f ${UploadTicket}; echo ""; tput cnorm; exit" 0 1 2 3 6 15 - response=$(tor_curl_upload --insecure -i \ - -H "Content-Type: multipart/form-data" \ - -F "time=month" \ - -F "file=@${filepath}" \ - "${PostUrlHost}") - if [ "${DebugAllEnabled}" == "true" ] ; then - debugHtml "${filepath##*/}" "${_hostCode}_upload" "post_url: ${PostUrlHost}"$'\n'"${response}" - fi - if grep -Eqi ' 200 ' <<< "${response}" ; then - hash=$(echo "$response" | tail -2 | head -1) - hash=${hash//[$'\t\r\n']} - filesize=$(GetFileSize "$filepath" "false") - downloadLink="https://dl.acid.fr/f.php?h=${hash}&p=1" - echo -e "${GREEN}| Upload Success${NC}" - echo -e "| Size: ${BLUE}${filesize}${NC} bytes${NC}" - echo -e "| Link: ${YELLOW}${downloadLink}${NC}" - successUpload "$pline" "${filepath}" "${_hostCode}" "${filesize}" "${downloadLink}" "{$response}" - return 0 - else - err=$(grep -oPi '(?<=HTTP/).*?(?=$)' <<< "$response") - if [ "${finalAttempt}" == "true" ] ; then - printf "\\n" - echo -e "${RED}| Upload failed. Status: ${err}${NC}" - failedRetryUpload "$pline" "${filepath}" "${_hostCode}" "Failed to upload file" "Status: $err" - exitUploadError=true - return 1 - else - return 1 - fi - fi -} -#! -#! --------------- Host Extra Functions ------------------- # -#! diff --git a/hosts/up_anarchaserver.sh b/hosts/up_anarchaserver.sh index 8dfcdfd..dd9723c 100644 --- a/hosts/up_anarchaserver.sh +++ b/hosts/up_anarchaserver.sh @@ -24,7 +24,7 @@ HostFuncPrefix='anarc' ListUploadHosts=${ListUploadHosts}'/'${HostCode}'/'${HostNick}'/'${HostFuncPrefix}'@' #! #! -#! Configurables +#! Jirafeau Host #! ------------- #! #! ------------ (1) Host Main Upload Function --------------- # @@ -33,103 +33,10 @@ ListUploadHosts=${ListUploadHosts}'/'${HostCode}'/'${HostNick}'/'${HostFuncPrefi #! Must be named specifically as such: #! _UploadFile() anarc_UploadFile() { - local _hostCode=${1} - local filepath=${2} - local filecnt=${3} - local pline=${4} - local filename="${filepath##*/}" - warnAndRetryUnknownError=false - exitUploadError=false - exitUploadNotAvailable=false - fileAlreadyDone=false - tor_identity="${RANDOM}" - UploadTicket="${WorkDir}/.flocks/upload_${_hostCode}_${filepath//[^a-zA-Z0-9]/}" - MaxUploadSizeInBytes=10737418240 - fsize=$(GetFileSize "$filepath" "false") - if ((fsize > MaxUploadSizeInBytes)); then - rm -f "${UploadTicket}" - echo -e "${YELLOW}| SKIP${NC}: The size of $filename is to large for $_hostCode. ($fsize > $MaxUploadSizeInBytes)" - failedUpload "$pline" "${filepath}" "${_hostCode}" "Skipping upload. The size of $filename is to large for $_hostCode. ($fsize > $MaxUploadSizeInBytes)" - return 1 - fi - finalAttempt="false" - for ((z=0; z<=$MaxUploadRetries; z++)); do - if [ $z -eq $MaxUploadRetries ] ; then - finalAttempt="true" - fi - trap "rm -f "${UploadTicket}"; echo ""; tput cnorm; exit" 0 1 2 3 6 15 - if anarc_PostFile "${filepath}" "${_hostCode}" "${filename}" "${filecnt}" $((z+1)) $finalAttempt $pline ; then - return 0 - elif [ $z -lt $MaxUploadRetries ]; then - if [ "${fileAlreadyDone}" == "true" ] ; then - break - fi - if [[ "${warnAndRetryUnknownError}" == "true" ]] ; then - if [ "${DebugAllEnabled}" == "true" ] ; then - debugHtml "${filepath##*/}" "error" "Retry due to an unknown issue: attempt #$((z+1)) of ${MaxUploadRetries}" - fi - fi - if [[ "${exitUploadError}" == "true" || "${exitUploadNotAvailable}" == "true" ]] ; then - if [ "${DebugAllEnabled}" == "true" ] ; then - debugHtml "${filepath##*/}" "error" "Exit due to unrecoverable issue" - fi - rm -f "${UploadTicket}" - break - fi - echo -e "\n${YELLOW}A recoverable error occurred, retry attempt $((z+1))/${MaxUploadRetries}${NC}" - sleep 3 - fi - done - rm -f "${UploadTicket}" + jira_MaxUploadSizeInBytes=10737418240 + jira_PostUrlHost='https://transitional.anarchaserver.org/jirafeau/script.php' + jira_filetype=1 + jira_timeval="month" + jira_downloadLinkPrefix='https://transitional.anarchaserver.org/jirafeau/f.php?h=' + jira_UploadFile ${1} ${2} ${3} ${4} } -#! -#! ----------- (2) Post File / Upload File Function --------------- # -#! -anarc_PostFile() { - local filepath=$1 - local _hostCode=$2 - local filename=$3 - local fileCnt=$4 - local retryCnt=$5 - local finalAttempt=$6 - local pline=${7} - UploadTicket="${WorkDir}/.flocks/upload_${_hostCode}_${filepath//[^a-zA-Z0-9]/}" - echo -e "[${YELLOW}${_hostCode}${NC}] Uploading ${GREEN}${filename}${NC}" - tor_identity="${RANDOM}" - PostUrlHost='https://transitional.anarchaserver.org/jirafeau/script.php' - arrFiles=("$filepath") - trap "rm -f ${UploadTicket}; echo ""; tput cnorm; exit" 0 1 2 3 6 15 - response=$(tor_curl_upload --insecure -i \ - -H "Content-Type: multipart/form-data" \ - -F "time=month" \ - -F "file=@${filepath}" \ - "${PostUrlHost}") - if [ "${DebugAllEnabled}" == "true" ] ; then - debugHtml "${filepath##*/}" "${_hostCode}_upload" "post_url: ${PostUrlHost}"$'\n'"${response}" - fi - if grep -Eqi ' 200 ' <<< "${response}" ; then - hash=$(echo "$response" | tail -2 | head -1) - hash=${hash//[$'\t\r\n']} - filesize=$(GetFileSize "$filepath" "false") - downloadLink="https://transitional.anarchaserver.org/jirafeau/f.php?h=${hash}&p=1" - echo -e "${GREEN}| Upload Success${NC}" - echo -e "| Size: ${BLUE}${filesize}${NC} bytes${NC}" - echo -e "| Link: ${YELLOW}${downloadLink}${NC}" - successUpload "$pline" "${filepath}" "${_hostCode}" "${filesize}" "${downloadLink}" "{$response}" - return 0 - else - err=$(grep -oPi '(?<=HTTP/).*?(?=$)' <<< "$response") - if [ "${finalAttempt}" == "true" ] ; then - printf "\\n" - echo -e "${RED}| Upload failed. Status: ${err}${NC}" - failedRetryUpload "$pline" "${filepath}" "${_hostCode}" "Failed to upload file" "Status: $err" - exitUploadError=true - return 1 - else - return 1 - fi - fi -} -#! -#! --------------- Host Extra Functions ------------------- # -#! diff --git a/hosts/up_anonfile.sh b/hosts/up_anonfile.sh new file mode 100644 index 0000000..ec1d2f7 --- /dev/null +++ b/hosts/up_anonfile.sh @@ -0,0 +1,142 @@ +#! Name: up_anonfile.sh +#! Author: kittykat +#! Version: 2024.10.26 +#! Desc: Add support for uploading files to anonfile.de +#! Info: https://anonfile.de/ +#! MaxSize: 512MB +#! Expire: ?? +#! Usage: Copy this file into the ./${ScriptDir}/hosts/ folder +#! +#! +#! ------------ REQUIRED SECTION --------------- +#! @[UPDATE] ListUploadHosts: This string is loaded into mad.sh and allows dynamic handling of new url data +#! Format: '/HostCode/HostNick/HostFuncPrefix@' +#! HostCode: (ie. 'fh' for filehaus -- cannot be used by other hosts) +#! HostNick: What is displayed throughout MAD output +#! HostFuncPrefix: ie. 'fh' -- fh_UploadFile() +#! * Note: Must begin with a letter a-z (functions beginning with numbers are no bueno) +HostCode='anon' +HostNick='anonfile' +HostFuncPrefix='anon' +#! +#! !! DO NOT UPDATE OR REMOVE !! +#! This merges the Required HostAndDomainRegexes into mad.sh +ListUploadHosts=${ListUploadHosts}'/'${HostCode}'/'${HostNick}'/'${HostFuncPrefix}'@' +#! +#! +#! Configurables +#! ------------- +#! +#! ------------ (1) Host Main Upload Function --------------- # +#! +#! @REQUIRED: Host Main Upload function +#! Must be named specifically as such: +#! _UploadFile() +anon_UploadFile() { + local _hostCode=${1} + local filepath=${2} + local filecnt=${3} + local pline=${4} + local filename="${filepath##*/}" + warnAndRetryUnknownError=false + exitUploadError=false + exitUploadNotAvailable=false + fileAlreadyDone=false + tor_identity="${RANDOM}" + UploadTicket="${WorkDir}/.flocks/upload_${_hostCode}_${filepath//[^a-zA-Z0-9]/}" + MaxUploadSizeInBytes=536870912 + fsize=$(GetFileSize "$filepath" "false") + if ((fsize > MaxUploadSizeInBytes)); then + rm -f "${UploadTicket}" + echo -e "${YELLOW}| SKIP${NC}: The size of $filename is to large for $_hostCode. ($fsize > $MaxUploadSizeInBytes)" + failedUpload "$pline" "${filepath}" "${_hostCode}" "Skipping upload. The size of $filename is to large for $_hostCode. ($fsize > $MaxUploadSizeInBytes)" + return 1 + fi + finalAttempt="false" + for ((z=0; z<=$MaxUploadRetries; z++)); do + if [ $z -eq $MaxUploadRetries ] ; then + finalAttempt="true" + fi + trap "rm -f "${UploadTicket}"; echo ""; tput cnorm; exit" 0 1 2 3 6 15 + if anon_PostFile "${filepath}" "${_hostCode}" "${filename}" "${filecnt}" $((z+1)) $finalAttempt $pline ; then + return 0 + elif [ $z -lt $MaxUploadRetries ]; then + if [ "${fileAlreadyDone}" == "true" ] ; then + break + fi + if [[ "${warnAndRetryUnknownError}" == "true" ]] ; then + if [ "${DebugAllEnabled}" == "true" ] ; then + debugHtml "${filepath##*/}" "error" "Retry due to an unknown issue: attempt #$((z+1)) of ${MaxUploadRetries}" + fi + fi + if [[ "${exitUploadError}" == "true" || "${exitUploadNotAvailable}" == "true" ]] ; then + if [ "${DebugAllEnabled}" == "true" ] ; then + debugHtml "${filepath##*/}" "error" "Exit due to unrecoverable issue" + fi + rm -f "${UploadTicket}" + break + fi + echo -e "\n${YELLOW}A recoverable error occurred, retry attempt $((z+1))/${MaxUploadRetries}${NC}" + sleep 3 + fi + done + rm -f "${UploadTicket}" +} +#! +#! ----------- (2) Post File / Upload File Function --------------- # +#! +anon_PostFile() { + local filepath=$1 + local _hostCode=$2 + local filename=$3 + local fileCnt=$4 + local retryCnt=$5 + local finalAttempt=$6 + local pline=${7} + UploadTicket="${WorkDir}/.flocks/upload_${_hostCode}_${filepath//[^a-zA-Z0-9]/}" + echo -e "[${YELLOW}${_hostCode}${NC}] Uploading ${GREEN}${filename}${NC}" + tor_identity="${RANDOM}" + PostUrlHost='https://anonfile.de/cgi-bin/upload.cgi?upload_type=file&utype=anon' + arrFiles=("$filepath") + trap "rm -f ${UploadTicket}; echo ""; tput cnorm; exit" 0 1 2 3 6 15 + response=$(tor_curl_upload --insecure -i \ + -H "Content-Type: multipart/form-data" \ + -F "sess_id=" \ + -F "utype=anon" \ + -F "file_descr=" \ + -F "file_public=1" \ + -F "link_rcpt=" \ + -F "link_pass=" \ + -F "to_folder=" \ + -F "upload=Start upload" \ + -F "keepalive=1" \ + -F "file_0=@${filepath}" \ + "${PostUrlHost}") + if [ "${DebugAllEnabled}" == "true" ] ; then + debugHtml "${filepath##*/}" "${_hostCode}_upload" "post_url: ${PostUrlHost}"$'\n'"${response}" + fi + if grep -Eqi '"file_status":"OK"' <<< "${response}" ; then + hash=$(grep -oPi -m 1 '(?<="file_code":").*?(?=".*$)' <<< "$response") + filesize=$(GetFileSize "$filepath" "false") + downloadLink="https://anonfile.de/$hash" + echo -e "${GREEN}| Upload Success${NC}" + echo -e "| Size: ${BLUE}${filesize}${NC} bytes${NC}" + echo -e "| Link: ${YELLOW}${downloadLink}${NC}" + successUpload "$pline" "${filepath}" "${_hostCode}" "${filesize}" "${downloadLink}" "{$response}" + return 0 + else + err=$(grep -oPi '(?<=HTTP/).*?(?=$)' <<< "$response") + if [ "${finalAttempt}" == "true" ] ; then + printf "\\n" + echo -e "${RED}| Upload failed. Status: ${err}${NC}" + failedRetryUpload "$pline" "${filepath}" "${_hostCode}" "Failed to upload file" "Status: $err" + exitUploadError=true + return 1 + else + return 1 + fi + fi +} +#! +#! --------------- Host Extra Functions ------------------- # +#! diff --git a/hosts/up_ateasystems.sh b/hosts/up_ateasystems.sh new file mode 100644 index 0000000..97f4e85 --- /dev/null +++ b/hosts/up_ateasystems.sh @@ -0,0 +1,137 @@ +#! Name: up_ateasystems.sh +#! Author: kittykat +#! Version: 2024.10.24 +#! Desc: Add support for uploading files to share.ateasystems.com +#! Info: https://share.ateasystems.com/share// +#! MaxSize: 20GB +#! Expire: ?? +#! Usage: Copy this file into the ./${ScriptDir}/hosts/ folder +#! +#! +#! ------------ REQUIRED SECTION --------------- +#! @[UPDATE] ListUploadHosts: This string is loaded into mad.sh and allows dynamic handling of new url data +#! Format: '/HostCode/HostNick/HostFuncPrefix@' +#! HostCode: (ie. 'fh' for filehaus -- cannot be used by other hosts) +#! HostNick: What is displayed throughout MAD output +#! HostFuncPrefix: ie. 'fh' -- fh_UploadFile() +#! * Note: Must begin with a letter a-z (functions beginning with numbers are no bueno) +HostCode='atea' +HostNick='atea' +HostFuncPrefix='atea' +#! +#! !! DO NOT UPDATE OR REMOVE !! +#! This merges the Required HostAndDomainRegexes into mad.sh +ListUploadHosts=${ListUploadHosts}'/'${HostCode}'/'${HostNick}'/'${HostFuncPrefix}'@' +#! +#! +#! Configurables +#! ------------- +#! +#! ------------ (1) Host Main Upload Function --------------- # +#! +#! @REQUIRED: Host Main Upload function +#! Must be named specifically as such: +#! _UploadFile() +atea_UploadFile() { + local _hostCode=${1} + local filepath=${2} + local filecnt=${3} + local pline=${4} + local filename="${filepath##*/}" + warnAndRetryUnknownError=false + exitUploadError=false + exitUploadNotAvailable=false + fileAlreadyDone=false + tor_identity="${RANDOM}" + UploadTicket="${WorkDir}/.flocks/upload_${_hostCode}_${filepath//[^a-zA-Z0-9]/}" + MaxUploadSizeInBytes=21474836480 + fsize=$(GetFileSize "$filepath" "false") + if ((fsize > MaxUploadSizeInBytes)); then + rm -f "${UploadTicket}" + echo -e "${YELLOW}| SKIP${NC}: The size of $filename is to large for $_hostCode. ($fsize > $MaxUploadSizeInBytes)" + failedUpload "$pline" "${filepath}" "${_hostCode}" "Skipping upload. The size of $filename is to large for $_hostCode. ($fsize > $MaxUploadSizeInBytes)" + return 1 + fi + finalAttempt="false" + for ((z=0; z<=$MaxUploadRetries; z++)); do + if [ $z -eq $MaxUploadRetries ] ; then + finalAttempt="true" + fi + trap "rm -f "${UploadTicket}"; echo ""; tput cnorm; exit" 0 1 2 3 6 15 + if atea_PostFile "${filepath}" "${_hostCode}" "${filename}" "${filecnt}" $((z+1)) $finalAttempt $pline ; then + return 0 + elif [ $z -lt $MaxUploadRetries ]; then + if [ "${fileAlreadyDone}" == "true" ] ; then + break + fi + if [[ "${warnAndRetryUnknownError}" == "true" ]] ; then + if [ "${DebugAllEnabled}" == "true" ] ; then + debugHtml "${filepath##*/}" "error" "Retry due to an unknown issue: attempt #$((z+1)) of ${MaxUploadRetries}" + fi + fi + if [[ "${exitUploadError}" == "true" || "${exitUploadNotAvailable}" == "true" ]] ; then + if [ "${DebugAllEnabled}" == "true" ] ; then + debugHtml "${filepath##*/}" "error" "Exit due to unrecoverable issue" + fi + rm -f "${UploadTicket}" + break + fi + echo -e "\n${YELLOW}A recoverable error occurred, retry attempt $((z+1))/${MaxUploadRetries}${NC}" + sleep 3 + fi + done + rm -f "${UploadTicket}" +} +#! +#! ----------- (2) Post File / Upload File Function --------------- # +#! +atea_PostFile() { + local filepath=$1 + local _hostCode=$2 + local filename=$3 + local fileCnt=$4 + local retryCnt=$5 + local finalAttempt=$6 + local pline=${7} + UploadTicket="${WorkDir}/.flocks/upload_${_hostCode}_${filepath//[^a-zA-Z0-9]/}" + echo -e "[${YELLOW}${_hostCode}${NC}] Uploading ${GREEN}${filename}${NC}" + tor_identity="${RANDOM}" + PostUrlHost='https://share.ateasystems.com/share/cgi-bin/upload.cgi?upload_id=' + arrFiles=("$filepath") + trap "rm -f ${UploadTicket}; echo ""; tput cnorm; exit" 0 1 2 3 6 15 + response=$(tor_curl_upload --insecure -i \ + -H "Content-Type: multipart/form-data" \ + -F "tos=" \ + -F "file_0_descr=" \ + -F "link_pass=" \ + -F "file_0=@${filepath}" \ + "${PostUrlHost}") + if [ "${DebugAllEnabled}" == "true" ] ; then + debugHtml "${filepath##*/}" "${_hostCode}_upload" "post_url: ${PostUrlHost}"$'\n'"${response}" + fi + if grep -Eqi "Location: https://share\.ateasystems\.com/share/\?\&filename\=" <<< "${response}" ; then + fname=$(grep -oPi -m 1 '(?<=https://share.ateasystems.com/share/\?\&filename=).*?(?=&del_id.*$)' <<< "$response") + fnameorig=$(grep -oPi -m 1 '(?<=&filename_original=).*?(?=&status.*$)' <<< "$response") + filesize=$(GetFileSize "$filepath" "false") + downloadLink="https://share.ateasystems.com/share/$fname/$fnameorig" + echo -e "${GREEN}| Upload Success${NC}" + echo -e "| Size: ${BLUE}${filesize}${NC} bytes${NC}" + echo -e "| Link: ${YELLOW}${downloadLink}${NC}" + successUpload "$pline" "${filepath}" "${_hostCode}" "${filesize}" "${downloadLink}" "{$response}" + return 0 + else + err=$(grep -oPi '(?<=HTTP/).*?(?=$)' <<< "$response") + if [ "${finalAttempt}" == "true" ] ; then + printf "\\n" + echo -e "${RED}| Upload failed. Status: ${err}${NC}" + failedRetryUpload "$pline" "${filepath}" "${_hostCode}" "Failed to upload file" "Status: $err" + exitUploadError=true + return 1 + else + return 1 + fi + fi +} +#! +#! --------------- Host Extra Functions ------------------- # +#! diff --git a/hosts/up_axfc.sh b/hosts/up_axfc.sh index 460c557..11a2489 100644 --- a/hosts/up_axfc.sh +++ b/hosts/up_axfc.sh @@ -1,6 +1,6 @@ #! Name: up_axfc.sh #! Author: kittykat -#! Version: 2024.10.23 +#! Version: 2024.12.26 #! Desc: Add support for uploading files to a new host #! Info: Files are accessible at https://www.axfc.net/ #! MaxSize: 2GB @@ -117,7 +117,7 @@ axfc_PostFile() { echo -e "${RED}| Failed to start an upload [1]${NC}" warnAndRetryUnknownError=true if [ "${finalAttempt}" == "true" ] ; then - failedRetryDownload "${remote_url}" "Failed to start an upload [1]" "" + failedRetryUpload "$pline" "${filepath}" "${_hostCode}" "Failed to start an upload [1]" "" fi return 1 else @@ -152,7 +152,7 @@ axfc_PostFile() { echo -e "${RED}| Failed to extract token link [1].${NC}" warnAndRetryUnknownError=true if [ "${finalAttempt}" == "true" ] ; then - failedUpload "${filepath}" "${_hostCode}" "Failed to extract token link [1]" "" + failedUpload "$pline" "${filepath}" "${_hostCode}" "Failed to extract token link [1]" "" fi return 1 else @@ -169,7 +169,7 @@ axfc_PostFile() { echo -e "${RED}| Ticket url not found [1].${NC}" warnAndRetryUnknownError=true if [ "${finalAttempt}" == "true" ] ; then - failedRetryUpload "${filepath}" "${_hostCode}" "Ticket url not found [1]" "" + failedRetryUpload "$pline" "${filepath}" "${_hostCode}" "Ticket url not found [1]" "" fi return 1 else diff --git a/hosts/up_cyssoux.sh b/hosts/up_cyssoux.sh new file mode 100644 index 0000000..c1706c9 --- /dev/null +++ b/hosts/up_cyssoux.sh @@ -0,0 +1,42 @@ +#! Name: up_cyssoux.sh +#! Author: kittykat +#! Version: 2024.12.23 +#! Desc: Add support for uploading files to partage.cyssoux.fr +#! Info: Files are accessible at https://address/f.php?h=&p=1 +#! MaxSize: ?? +#! Expire: 1 Month +#! Usage: Copy this file into the ./${ScriptDir}/hosts/ folder +#! +#! +#! ------------ REQUIRED SECTION --------------- +#! @[UPDATE] ListUploadHosts: This string is loaded into mad.sh and allows dynamic handling of new url data +#! Format: '/HostCode/HostNick/HostFuncPrefix@' +#! HostCode: (ie. 'fh' for filehaus -- cannot be used by other hosts) +#! HostNick: What is displayed throughout MAD output +#! HostFuncPrefix: ie. 'fh' -- fh_UploadFile() +#! * Note: Must begin with a letter a-z (functions beginning with numbers are no bueno) +HostCode='cyx' +HostNick='cyssoux' +HostFuncPrefix='cyx' +#! +#! !! DO NOT UPDATE OR REMOVE !! +#! This merges the Required HostAndDomainRegexes into mad.sh +ListUploadHosts=${ListUploadHosts}'/'${HostCode}'/'${HostNick}'/'${HostFuncPrefix}'@' +#! +#! +#! Jirafeau Host +#! ------------- +#! +#! ------------ (1) Host Main Upload Function --------------- # +#! +#! @REQUIRED: Host Main Upload function +#! Must be named specifically as such: +#! _UploadFile() +cyx_UploadFile() { + jira_MaxUploadSizeInBytes=10737418240 + jira_PostUrlHost='https://partage.cyssoux.fr/script.php' + jira_filetype=1 + jira_timeval="month" + jira_downloadLinkPrefix='https://partage.cyssoux.fr/f.php?h=' + jira_UploadFile ${1} ${2} ${3} ${4} +} diff --git a/hosts/up_dashfile.sh b/hosts/up_dashfile.sh new file mode 100644 index 0000000..85ba6f7 --- /dev/null +++ b/hosts/up_dashfile.sh @@ -0,0 +1,142 @@ +#! Name: up_dashfile.sh +#! Author: kittykat +#! Version: 2024.10.25 +#! Desc: Add support for uploading files to dashfile.net +#! Info: https://dashfile.net/ +#! MaxSize: 400MB +#! Expire: ?? +#! Usage: Copy this file into the ./${ScriptDir}/hosts/ folder +#! +#! +#! ------------ REQUIRED SECTION --------------- +#! @[UPDATE] ListUploadHosts: This string is loaded into mad.sh and allows dynamic handling of new url data +#! Format: '/HostCode/HostNick/HostFuncPrefix@' +#! HostCode: (ie. 'fh' for filehaus -- cannot be used by other hosts) +#! HostNick: What is displayed throughout MAD output +#! HostFuncPrefix: ie. 'fh' -- fh_UploadFile() +#! * Note: Must begin with a letter a-z (functions beginning with numbers are no bueno) +HostCode='dash' +HostNick='dashfile' +HostFuncPrefix='dash' +#! +#! !! DO NOT UPDATE OR REMOVE !! +#! This merges the Required HostAndDomainRegexes into mad.sh +ListUploadHosts=${ListUploadHosts}'/'${HostCode}'/'${HostNick}'/'${HostFuncPrefix}'@' +#! +#! +#! Configurables +#! ------------- +#! +#! ------------ (1) Host Main Upload Function --------------- # +#! +#! @REQUIRED: Host Main Upload function +#! Must be named specifically as such: +#! _UploadFile() +dash_UploadFile() { + local _hostCode=${1} + local filepath=${2} + local filecnt=${3} + local pline=${4} + local filename="${filepath##*/}" + warnAndRetryUnknownError=false + exitUploadError=false + exitUploadNotAvailable=false + fileAlreadyDone=false + tor_identity="${RANDOM}" + UploadTicket="${WorkDir}/.flocks/upload_${_hostCode}_${filepath//[^a-zA-Z0-9]/}" + MaxUploadSizeInBytes=419430400 + fsize=$(GetFileSize "$filepath" "false") + if ((fsize > MaxUploadSizeInBytes)); then + rm -f "${UploadTicket}" + echo -e "${YELLOW}| SKIP${NC}: The size of $filename is to large for $_hostCode. ($fsize > $MaxUploadSizeInBytes)" + failedUpload "$pline" "${filepath}" "${_hostCode}" "Skipping upload. The size of $filename is to large for $_hostCode. ($fsize > $MaxUploadSizeInBytes)" + return 1 + fi + finalAttempt="false" + for ((z=0; z<=$MaxUploadRetries; z++)); do + if [ $z -eq $MaxUploadRetries ] ; then + finalAttempt="true" + fi + trap "rm -f "${UploadTicket}"; echo ""; tput cnorm; exit" 0 1 2 3 6 15 + if dash_PostFile "${filepath}" "${_hostCode}" "${filename}" "${filecnt}" $((z+1)) $finalAttempt $pline ; then + return 0 + elif [ $z -lt $MaxUploadRetries ]; then + if [ "${fileAlreadyDone}" == "true" ] ; then + break + fi + if [[ "${warnAndRetryUnknownError}" == "true" ]] ; then + if [ "${DebugAllEnabled}" == "true" ] ; then + debugHtml "${filepath##*/}" "error" "Retry due to an unknown issue: attempt #$((z+1)) of ${MaxUploadRetries}" + fi + fi + if [[ "${exitUploadError}" == "true" || "${exitUploadNotAvailable}" == "true" ]] ; then + if [ "${DebugAllEnabled}" == "true" ] ; then + debugHtml "${filepath##*/}" "error" "Exit due to unrecoverable issue" + fi + rm -f "${UploadTicket}" + break + fi + echo -e "\n${YELLOW}A recoverable error occurred, retry attempt $((z+1))/${MaxUploadRetries}${NC}" + sleep 3 + fi + done + rm -f "${UploadTicket}" +} +#! +#! ----------- (2) Post File / Upload File Function --------------- # +#! +dash_PostFile() { + local filepath=$1 + local _hostCode=$2 + local filename=$3 + local fileCnt=$4 + local retryCnt=$5 + local finalAttempt=$6 + local pline=${7} + UploadTicket="${WorkDir}/.flocks/upload_${_hostCode}_${filepath//[^a-zA-Z0-9]/}" + echo -e "[${YELLOW}${_hostCode}${NC}] Uploading ${GREEN}${filename}${NC}" + tor_identity="${RANDOM}" + PostUrlHost='https://s02.dashfile.net/cgi-bin/upload.cgi?upload_type=file&utype=anon' + arrFiles=("$filepath") + trap "rm -f ${UploadTicket}; echo ""; tput cnorm; exit" 0 1 2 3 6 15 + response=$(tor_curl_upload --insecure -i \ + -H "Content-Type: multipart/form-data" \ + -F "sess_id=" \ + -F "utype=anon" \ + -F "file_descr=" \ + -F "file_public=1" \ + -F "link_rcpt=" \ + -F "link_pass=" \ + -F "to_folder=" \ + -F "upload=Start upload" \ + -F "keepalive=1" \ + -F "file_0=@${filepath}" \ + "${PostUrlHost}") + if [ "${DebugAllEnabled}" == "true" ] ; then + debugHtml "${filepath##*/}" "${_hostCode}_upload" "post_url: ${PostUrlHost}"$'\n'"${response}" + fi + if grep -Eqi '"file_status":"OK"' <<< "${response}" ; then + hash=$(grep -oPi -m 1 '(?<="file_code":").*?(?=",".*$)' <<< "$response") + filesize=$(GetFileSize "$filepath" "false") + downloadLink="https://dashfile.net/$hash" + echo -e "${GREEN}| Upload Success${NC}" + echo -e "| Size: ${BLUE}${filesize}${NC} bytes${NC}" + echo -e "| Link: ${YELLOW}${downloadLink}${NC}" + successUpload "$pline" "${filepath}" "${_hostCode}" "${filesize}" "${downloadLink}" "{$response}" + return 0 + else + err=$(grep -oPi '(?<=HTTP/).*?(?=$)' <<< "$response") + if [ "${finalAttempt}" == "true" ] ; then + printf "\\n" + echo -e "${RED}| Upload failed. Status: ${err}${NC}" + failedRetryUpload "$pline" "${filepath}" "${_hostCode}" "Failed to upload file" "Status: $err" + exitUploadError=true + return 1 + else + return 1 + fi + fi +} +#! +#! --------------- Host Extra Functions ------------------- # +#! diff --git a/hosts/up_depotkaz.sh b/hosts/up_depotkaz.sh index bef5c9d..c99b8bf 100644 --- a/hosts/up_depotkaz.sh +++ b/hosts/up_depotkaz.sh @@ -24,7 +24,7 @@ HostFuncPrefix='kaz' ListUploadHosts=${ListUploadHosts}'/'${HostCode}'/'${HostNick}'/'${HostFuncPrefix}'@' #! #! -#! Configurables +#! Jirafeau Host #! ------------- #! #! ------------ (1) Host Main Upload Function --------------- # @@ -33,103 +33,10 @@ ListUploadHosts=${ListUploadHosts}'/'${HostCode}'/'${HostNick}'/'${HostFuncPrefi #! Must be named specifically as such: #! _UploadFile() kaz_UploadFile() { - local _hostCode=${1} - local filepath=${2} - local filecnt=${3} - local pline=${4} - local filename="${filepath##*/}" - warnAndRetryUnknownError=false - exitUploadError=false - exitUploadNotAvailable=false - fileAlreadyDone=false - tor_identity="${RANDOM}" - UploadTicket="${WorkDir}/.flocks/upload_${_hostCode}_${filepath//[^a-zA-Z0-9]/}" - MaxUploadSizeInBytes=1073741824 - fsize=$(GetFileSize "$filepath" "false") - if ((fsize > MaxUploadSizeInBytes)); then - rm -f "${UploadTicket}" - echo -e "${YELLOW}| SKIP${NC}: The size of $filename is to large for $_hostCode. ($fsize > $MaxUploadSizeInBytes)" - failedUpload "$pline" "${filepath}" "${_hostCode}" "Skipping upload. The size of $filename is to large for $_hostCode. ($fsize > $MaxUploadSizeInBytes)" - return 1 - fi - finalAttempt="false" - for ((z=0; z<=$MaxUploadRetries; z++)); do - if [ $z -eq $MaxUploadRetries ] ; then - finalAttempt="true" - fi - trap "rm -f "${UploadTicket}"; echo ""; tput cnorm; exit" 0 1 2 3 6 15 - if kaz_PostFile "${filepath}" "${_hostCode}" "${filename}" "${filecnt}" $((z+1)) $finalAttempt $pline ; then - return 0 - elif [ $z -lt $MaxUploadRetries ]; then - if [ "${fileAlreadyDone}" == "true" ] ; then - break - fi - if [[ "${warnAndRetryUnknownError}" == "true" ]] ; then - if [ "${DebugAllEnabled}" == "true" ] ; then - debugHtml "${filepath##*/}" "error" "Retry due to an unknown issue: attempt #$((z+1)) of ${MaxUploadRetries}" - fi - fi - if [[ "${exitUploadError}" == "true" || "${exitUploadNotAvailable}" == "true" ]] ; then - if [ "${DebugAllEnabled}" == "true" ] ; then - debugHtml "${filepath##*/}" "error" "Exit due to unrecoverable issue" - fi - rm -f "${UploadTicket}" - break - fi - echo -e "\n${YELLOW}A recoverable error occurred, retry attempt $((z+1))/${MaxUploadRetries}${NC}" - sleep 3 - fi - done - rm -f "${UploadTicket}" + jira_MaxUploadSizeInBytes=1073741824 + jira_PostUrlHost='https://depot.kaz.bzh/script.php' + jira_filetype=1 + jira_timeval="month" + jira_downloadLinkPrefix='https://depot.kaz.bzh/f.php?h=' + jira_UploadFile ${1} ${2} ${3} ${4} } -#! -#! ----------- (2) Post File / Upload File Function --------------- # -#! -kaz_PostFile() { - local filepath=$1 - local _hostCode=$2 - local filename=$3 - local fileCnt=$4 - local retryCnt=$5 - local finalAttempt=$6 - local pline=${7} - UploadTicket="${WorkDir}/.flocks/upload_${_hostCode}_${filepath//[^a-zA-Z0-9]/}" - echo -e "[${YELLOW}${_hostCode}${NC}] Uploading ${GREEN}${filename}${NC}" - tor_identity="${RANDOM}" - PostUrlHost='https://depot.kaz.bzh/script.php' - arrFiles=("$filepath") - trap "rm -f ${UploadTicket}; echo ""; tput cnorm; exit" 0 1 2 3 6 15 - response=$(tor_curl_upload --insecure -i \ - -H "Content-Type: multipart/form-data" \ - -F "time=month" \ - -F "file=@${filepath}" \ - "${PostUrlHost}") - if [ "${DebugAllEnabled}" == "true" ] ; then - debugHtml "${filepath##*/}" "${_hostCode}_upload" "post_url: ${PostUrlHost}"$'\n'"${response}" - fi - if grep -Eqi ' 200 ' <<< "${response}" ; then - hash=$(echo "$response" | tail -2 | head -1) - hash=${hash//[$'\t\r\n']} - filesize=$(GetFileSize "$filepath" "false") - downloadLink="https://depot.kaz.bzh/f.php?h=${hash}&p=1" - echo -e "${GREEN}| Upload Success${NC}" - echo -e "| Size: ${BLUE}${filesize}${NC} bytes${NC}" - echo -e "| Link: ${YELLOW}${downloadLink}${NC}" - successUpload "$pline" "${filepath}" "${_hostCode}" "${filesize}" "${downloadLink}" "{$response}" - return 0 - else - err=$(grep -oPi '(?<=HTTP/).*?(?=$)' <<< "$response") - if [ "${finalAttempt}" == "true" ] ; then - printf "\\n" - echo -e "${RED}| Upload failed. Status: ${err}${NC}" - failedRetryUpload "$pline" "${filepath}" "${_hostCode}" "Failed to upload file" "Status: $err" - exitUploadError=true - return 1 - else - return 1 - fi - fi -} -#! -#! --------------- Host Extra Functions ------------------- # -#! diff --git a/hosts/up_dictvm.sh b/hosts/up_dictvm.sh new file mode 100644 index 0000000..33018c7 --- /dev/null +++ b/hosts/up_dictvm.sh @@ -0,0 +1,42 @@ +#! Name: up_dictvm.sh +#! Author: kittykat +#! Version: 2024.12.12 +#! Desc: Add support for uploading files to upload.dictvm.org +#! Info: Files are accessible at https://address/f.php?h=&p=1 +#! MaxSize: 2GB +#! Expire: 1 Month +#! Usage: Copy this file into the ./${ScriptDir}/hosts/ folder +#! +#! +#! ------------ REQUIRED SECTION --------------- +#! @[UPDATE] ListUploadHosts: This string is loaded into mad.sh and allows dynamic handling of new url data +#! Format: '/HostCode/HostNick/HostFuncPrefix@' +#! HostCode: (ie. 'fh' for filehaus -- cannot be used by other hosts) +#! HostNick: What is displayed throughout MAD output +#! HostFuncPrefix: ie. 'fh' -- fh_UploadFile() +#! * Note: Must begin with a letter a-z (functions beginning with numbers are no bueno) +HostCode='dict' +HostNick='dictvm.org' +HostFuncPrefix='dict' +#! +#! !! DO NOT UPDATE OR REMOVE !! +#! This merges the Required HostAndDomainRegexes into mad.sh +ListUploadHosts=${ListUploadHosts}'/'${HostCode}'/'${HostNick}'/'${HostFuncPrefix}'@' +#! +#! +#! Jirafeau Host +#! ------------- +#! +#! ------------ (1) Host Main Upload Function --------------- # +#! +#! @REQUIRED: Host Main Upload function +#! Must be named specifically as such: +#! _UploadFile() +dict_UploadFile() { + jira_MaxUploadSizeInBytes=2147483648 + jira_PostUrlHost='https://upload.dictvm.org/script.php' + jira_filetype=1 + jira_timeval="month" + jira_downloadLinkPrefix='https://upload.dictvm.org/f.php?h=' + jira_UploadFile ${1} ${2} ${3} ${4} +} diff --git a/hosts/up_eddowding.sh b/hosts/up_eddowding.sh new file mode 100644 index 0000000..ff58b9d --- /dev/null +++ b/hosts/up_eddowding.sh @@ -0,0 +1,42 @@ +#! Name: up_eddowding.sh +#! Author: kittykat +#! Version: 2024.12.12 +#! Desc: Add support for uploading files to files.eddowding.com +#! Info: Files are accessible at https://address/f.php?h=&p=1 +#! MaxSize: 5GB +#! Expire: 1 Month +#! Usage: Copy this file into the ./${ScriptDir}/hosts/ folder +#! +#! +#! ------------ REQUIRED SECTION --------------- +#! @[UPDATE] ListUploadHosts: This string is loaded into mad.sh and allows dynamic handling of new url data +#! Format: '/HostCode/HostNick/HostFuncPrefix@' +#! HostCode: (ie. 'fh' for filehaus -- cannot be used by other hosts) +#! HostNick: What is displayed throughout MAD output +#! HostFuncPrefix: ie. 'fh' -- fh_UploadFile() +#! * Note: Must begin with a letter a-z (functions beginning with numbers are no bueno) +HostCode='edd' +HostNick='eddowding.com' +HostFuncPrefix='edd' +#! +#! !! DO NOT UPDATE OR REMOVE !! +#! This merges the Required HostAndDomainRegexes into mad.sh +ListUploadHosts=${ListUploadHosts}'/'${HostCode}'/'${HostNick}'/'${HostFuncPrefix}'@' +#! +#! +#! Jirafeau Host +#! ------------- +#! +#! ------------ (1) Host Main Upload Function --------------- # +#! +#! @REQUIRED: Host Main Upload function +#! Must be named specifically as such: +#! _UploadFile() +edd_UploadFile() { + jira_MaxUploadSizeInBytes=5368709120 + jira_PostUrlHost='https://files.eddowding.com/script.php' + jira_filetype=1 + jira_timeval="month" + jira_downloadLinkPrefix='https://files.eddowding.com/f.php?h=' + jira_UploadFile ${1} ${2} ${3} ${4} +} diff --git a/hosts/up_familleflender.sh b/hosts/up_familleflender.sh index 2367208..b7caf21 100644 --- a/hosts/up_familleflender.sh +++ b/hosts/up_familleflender.sh @@ -24,7 +24,7 @@ HostFuncPrefix='ffl' ListUploadHosts=${ListUploadHosts}'/'${HostCode}'/'${HostNick}'/'${HostFuncPrefix}'@' #! #! -#! Configurables +#! Jirafeau Host #! ------------- #! #! ------------ (1) Host Main Upload Function --------------- # @@ -33,103 +33,10 @@ ListUploadHosts=${ListUploadHosts}'/'${HostCode}'/'${HostNick}'/'${HostFuncPrefi #! Must be named specifically as such: #! _UploadFile() ffl_UploadFile() { - local _hostCode=${1} - local filepath=${2} - local filecnt=${3} - local pline=${4} - local filename="${filepath##*/}" - warnAndRetryUnknownError=false - exitUploadError=false - exitUploadNotAvailable=false - fileAlreadyDone=false - tor_identity="${RANDOM}" - UploadTicket="${WorkDir}/.flocks/upload_${_hostCode}_${filepath//[^a-zA-Z0-9]/}" - MaxUploadSizeInBytes=1073741824 - fsize=$(GetFileSize "$filepath" "false") - if ((fsize > MaxUploadSizeInBytes)); then - rm -f "${UploadTicket}" - echo -e "${YELLOW}| SKIP${NC}: The size of $filename is to large for $_hostCode. ($fsize > $MaxUploadSizeInBytes)" - failedUpload "$pline" "${filepath}" "${_hostCode}" "Skipping upload. The size of $filename is to large for $_hostCode. ($fsize > $MaxUploadSizeInBytes)" - return 1 - fi - finalAttempt="false" - for ((z=0; z<=$MaxUploadRetries; z++)); do - if [ $z -eq $MaxUploadRetries ] ; then - finalAttempt="true" - fi - trap "rm -f "${UploadTicket}"; echo ""; tput cnorm; exit" 0 1 2 3 6 15 - if ffl_PostFile "${filepath}" "${_hostCode}" "${filename}" "${filecnt}" $((z+1)) $finalAttempt $pline ; then - return 0 - elif [ $z -lt $MaxUploadRetries ]; then - if [ "${fileAlreadyDone}" == "true" ] ; then - break - fi - if [[ "${warnAndRetryUnknownError}" == "true" ]] ; then - if [ "${DebugAllEnabled}" == "true" ] ; then - debugHtml "${filepath##*/}" "error" "Retry due to an unknown issue: attempt #$((z+1)) of ${MaxUploadRetries}" - fi - fi - if [[ "${exitUploadError}" == "true" || "${exitUploadNotAvailable}" == "true" ]] ; then - if [ "${DebugAllEnabled}" == "true" ] ; then - debugHtml "${filepath##*/}" "error" "Exit due to unrecoverable issue" - fi - rm -f "${UploadTicket}" - break - fi - echo -e "\n${YELLOW}A recoverable error occurred, retry attempt $((z+1))/${MaxUploadRetries}${NC}" - sleep 3 - fi - done - rm -f "${UploadTicket}" + jira_MaxUploadSizeInBytes=1073741824 + jira_PostUrlHost='https://famille-flender.fr/jirafeau/script.php' + jira_filetype=1 + jira_timeval="month" + jira_downloadLinkPrefix='https://famille-flender.fr/f.php?h=' + jira_UploadFile ${1} ${2} ${3} ${4} } -#! -#! ----------- (2) Post File / Upload File Function --------------- # -#! -ffl_PostFile() { - local filepath=$1 - local _hostCode=$2 - local filename=$3 - local fileCnt=$4 - local retryCnt=$5 - local finalAttempt=$6 - local pline=${7} - UploadTicket="${WorkDir}/.flocks/upload_${_hostCode}_${filepath//[^a-zA-Z0-9]/}" - echo -e "[${YELLOW}${_hostCode}${NC}] Uploading ${GREEN}${filename}${NC}" - tor_identity="${RANDOM}" - PostUrlHost='https://famille-flender.fr/jirafeau/script.php' - arrFiles=("$filepath") - trap "rm -f ${UploadTicket}; echo ""; tput cnorm; exit" 0 1 2 3 6 15 - response=$(tor_curl_upload --insecure -i \ - -H "Content-Type: multipart/form-data" \ - -F "time=month" \ - -F "file=@${filepath}" \ - "${PostUrlHost}") - if [ "${DebugAllEnabled}" == "true" ] ; then - debugHtml "${filepath##*/}" "${_hostCode}_upload" "post_url: ${PostUrlHost}"$'\n'"${response}" - fi - if grep -Eqi ' 200 ' <<< "${response}" ; then - hash=$(echo "$response" | tail -2 | head -1) - hash=${hash//[$'\t\r\n']} - filesize=$(GetFileSize "$filepath" "false") - downloadLink="https://famille-flender.fr/f.php?h=${hash}&p=1" - echo -e "${GREEN}| Upload Success${NC}" - echo -e "| Size: ${BLUE}${filesize}${NC} bytes${NC}" - echo -e "| Link: ${YELLOW}${downloadLink}${NC}" - successUpload "$pline" "${filepath}" "${_hostCode}" "${filesize}" "${downloadLink}" "{$response}" - return 0 - else - err=$(grep -oPi '(?<=HTTP/).*?(?=$)' <<< "$response") - if [ "${finalAttempt}" == "true" ] ; then - printf "\\n" - echo -e "${RED}| Upload failed. Status: ${err}${NC}" - failedRetryUpload "$pline" "${filepath}" "${_hostCode}" "Failed to upload file" "Status: $err" - exitUploadError=true - return 1 - else - return 1 - fi - fi -} -#! -#! --------------- Host Extra Functions ------------------- # -#! diff --git a/hosts/up_fileblade.sh b/hosts/up_fileblade.sh new file mode 100644 index 0000000..8c2b4c9 --- /dev/null +++ b/hosts/up_fileblade.sh @@ -0,0 +1,144 @@ +#! Name: up_fileblade.sh +#! Author: kittykat +#! Version: 2024.12.20 +#! Desc: Add support for uploading files to a new host +#! Info: Files are accessible at https://fileblade.com/ +#! MaxSize: 40GB +#! Expire: ? +#! Usage: Copy this file into the ./${ScriptDir}/hosts/ folder +#! +#! +#! ------------ REQUIRED SECTION --------------- +#! @[UPDATE] ListUploadHosts: This string is loaded into mad.sh and allows dynamic handling of new url data +#! Format: '/HostCode/HostNick/HostFuncPrefix@' +#! HostCode: (ie. 'fh' for filehaus -- cannot be used by other hosts) +#! HostNick: What is displayed throughout MAD output +#! HostFuncPrefix: ie. 'fh' -- fh_UploadFile() +#! * Note: Must begin with a letter a-z (functions beginning with numbers are no bueno) +HostCode='fb' +HostNick='fileblade' +HostFuncPrefix='fb' +#! +#! !! DO NOT UPDATE OR REMOVE !! +#! This merges the Required HostAndDomainRegexes into mad.sh +ListUploadHosts=${ListUploadHosts}'/'${HostCode}'/'${HostNick}'/'${HostFuncPrefix}'@' +#! +#! +#! Configurables +#! ------------- +#! +#! ------------ (1) Host Main Upload Function --------------- # +#! +#! @REQUIRED: Host Main Upload function +#! Must be named specifically as such: +#! _UploadFile() +fb_UploadFile() { + local _hostCode=${1} + local filepath=${2} + local filecnt=${3} + local pline=${4} + local filename="${filepath##*/}" + warnAndRetryUnknownError=false + exitUploadError=false + exitUploadNotAvailable=false + fileAlreadyDone=false + tor_identity="${RANDOM}" + UploadTicket="${WorkDir}/.flocks/upload_${_hostCode}_${filepath//[^a-zA-Z0-9]/}" + MaxUploadSizeInBytes=104857600 + fsize=$(GetFileSize "$filepath" "false") + if ((fsize > MaxUploadSizeInBytes)); then + rm -f "${UploadTicket}" + echo -e "${YELLOW}| SKIP${NC}: The size of $filename is to large for $_hostCode. ($fsize > $MaxUploadSizeInBytes)" + failedUpload "$pline" "${filepath}" "${_hostCode}" "Skipping upload. The size of $filename is to large for $_hostCode. ($fsize > $MaxUploadSizeInBytes)" + return 1 + fi + finalAttempt="false" + for ((z=0; z<=$MaxUploadRetries; z++)); do + if [ $z -eq $MaxUploadRetries ] ; then + finalAttempt="true" + fi + trap "rm -f "${UploadTicket}"; echo ""; tput cnorm; exit" 0 1 2 3 6 15 + if fb_PostFile "${filepath}" "${_hostCode}" "${filename}" "${filecnt}" $((z+1)) $finalAttempt $pline ; then + return 0 + elif [ $z -lt $MaxUploadRetries ]; then + if [ "${fileAlreadyDone}" == "true" ] ; then + break + fi + if [[ "${warnAndRetryUnknownError}" == "true" ]] ; then + if [ "${DebugAllEnabled}" == "true" ] ; then + debugHtml "${filepath##*/}" "error" "Retry due to an unknown issue: attempt #$((z+1)) of ${MaxUploadRetries}" + fi + fi + if [[ "${exitUploadError}" == "true" || "${exitUploadNotAvailable}" == "true" ]] ; then + if [ "${DebugAllEnabled}" == "true" ] ; then + debugHtml "${filepath##*/}" "error" "Exit due to unrecoverable issue" + fi + rm -f "${UploadTicket}" + break + fi + echo -e "\n${YELLOW}A recoverable error occurred, retry attempt $((z+1))/${MaxUploadRetries}${NC}" + sleep 3 + fi + done + rm -f "${UploadTicket}" +} +#! +#! ----------- (2) Post File / Upload File Function --------------- # +#! +fb_PostFile() { + local filepath=$1 + local _hostCode=$2 + local filename=$3 + local fileCnt=$4 + local retryCnt=$5 + local finalAttempt=$6 + local pline=${7} + UploadTicket="${WorkDir}/.flocks/upload_${_hostCode}_${filepath//[^a-zA-Z0-9]/}" + local ar_HUP[0]="https://de6.fileblade.com/cgi-bin/upload.cgi?upload_type=file&utype=anon" + local arrSize=${#ar_HUP[@]} + local index=$(($RANDOM % $arrSize)) + local RandomHostUploadUrl=${ar_HUP[$index]} + PostUrlHost="$RandomHostUploadUrl" + arrFiles=("$filepath") + trap "rm -f ${UploadTicket}; echo ""; tput cnorm; exit" 0 1 2 3 6 15 + response=$(tor_curl_upload --insecure -i \ + -H "Content-Type: multipart/form-data" \ + -F "sess_id=" \ + -F "utype=anon" \ + -F "file_descr=" \ + -F "file_public=1" \ + -F "link_rcpt=" \ + -F "link_pass=" \ + -F "to_folder=" \ + -F "upload=Start upload" \ + -F "keepalive=1" \ + -F "file_0=@$filepath" \ + "${PostUrlHost}") + if [ "${DebugAllEnabled}" == "true" ] ; then + debugHtml "${filepath##*/}" "${_hostCode}_upload" "post_url: ${PostUrlHost}"$'\n'"${response}" + fi + if grep -Eqi '"file_status":"OK"' <<< "${response}" ; then + hash=$(grep -oPi '(?<="file_code":").*?(?=".*$)' <<< "$response") + filesize=$(GetFileSize "$filepath" "false") + downloadLink="https://fileblade.com/${hash}" + echo -e "${GREEN}| Upload Success${NC}" + echo -e "| Size: ${BLUE}${filesize}${NC} bytes${NC}" + echo -e "| Link: ${YELLOW}${downloadLink}${NC}" + successUpload "$pline" "${filepath}" "${_hostCode}" "${filesize}" "${downloadLink}" "{$response}" + return 0 + else + err=$(grep -oPi '(?<="file_status":").*?(?=")' <<< "$response") + if [ "${finalAttempt}" == "true" ] ; then + printf "\\n" + echo -e "${RED}| Upload failed. Status: ${err}${NC}" + failedRetryUpload "$pline" "${filepath}" "${_hostCode}" "Failed to upload file" "Status: $err" + exitUploadError=true + return 1 + else + return 1 + fi + fi +} +#! +#! --------------- Host Extra Functions ------------------- # +#! diff --git a/hosts/up_fileditch.sh b/hosts/up_fileditch.sh index 1e135b9..d2f2ce8 100644 --- a/hosts/up_fileditch.sh +++ b/hosts/up_fileditch.sh @@ -95,38 +95,63 @@ fd_PostFile() { local pline=${7} UploadTicket="${WorkDir}/.flocks/upload_${_hostCode}_${filepath//[^a-zA-Z0-9]/}" echo -e "[${YELLOW}${_hostCode}${NC}] Uploading ${GREEN}${filename}${NC}" - tor_identity="${RANDOM}" - PostUrlHost='https://up1.fileditch.com/upload.php' - arrFiles=("$filepath") - trap "rm -f ${UploadTicket}; echo ""; tput cnorm; exit" 0 1 2 3 6 15 - response=$(tor_curl_upload --insecure -i \ - -H "Content-Type: multipart/form-data" \ - -F "files[]=@${arrFiles[@]}" \ - "${PostUrlHost}") - if [ "${DebugAllEnabled}" == "true" ] ; then - debugHtml "${filepath##*/}" "${_hostCode}_upload" "post_url: ${PostUrlHost}"$'\n'"${response}" - fi - if grep -Eqi '"success": true,' <<< "${response}" ; then - url=$(grep -oPi '(?<="url": ").*?(?=".*$)' <<< "$response") - filesize=$(GetFileSize "$filepath" "false") - downloadLink="${url//\\/}" - echo -e "${GREEN}| Upload Success${NC}" - echo -e "| Size: ${BLUE}${filesize}${NC} bytes${NC}" - echo -e "| Link: ${YELLOW}${downloadLink}${NC}" - successUpload "$pline" "${filepath}" "${_hostCode}" "${filesize}" "${downloadLink}" "{$response}" - return 0 - else - err=$(grep -oPi '(?<=HTTP/).*?(?=$)' <<< "$response") - if [ "${finalAttempt}" == "true" ] ; then - printf "\\n" - echo -e "${RED}| Upload failed. Status: ${err}${NC}" - failedRetryUpload "$pline" "${filepath}" "${_hostCode}" "Failed to upload file" "Status: $err" - exitUploadError=true - return 1 + for ((j=1; j<=20; j++)); do + if ((j > 1)); then + tput rc; tput el; + fi + tput sc + tor_identity="${RANDOM}" + PostUrlHost='https://up1.fileditch.com/upload.php' + arrFiles=("$filepath") + trap "rm -f ${UploadTicket}; echo ""; tput cnorm; exit" 0 1 2 3 6 15 + response=$(tor_curl_upload --insecure -i -L \ + -H "Content-Type: multipart/form-data" \ + -F "files[]=@${arrFiles[@]}" \ + "${PostUrlHost}") + if [ "${DebugAllEnabled}" == "true" ] ; then + debugHtml "${filepath##*/}" "${_hostCode}_upload_$j" "post_url: ${PostUrlHost}"$'\n'"${response}" + fi + if grep -Eqi 'Why we block tor' <<< "${response}" ; then + if ((j >= 20)); then + if [ "${finalAttempt}" == "true" ] ; then + printf "\\n" + echo -e "${RED}| Upload failed. Tor Blocked.${NC}" + failedRetryUpload "$pline" "${filepath}" "${_hostCode}" "Upload failed. Tor Blocked." + exitUploadError=true + return 1 + else + echo -e "" + echo -e "${RED}| Tor blocked. (max retries hit)${NC}" + return 1 + fi + fi + echo -e "" + echo -e "${YELLOW}| Tor blocked.. (retry)${NC}" + continue + fi + if grep -Eqi '"success": true,' <<< "${response}" ; then + url=$(grep -oPi '(?<="url": ").*?(?=".*$)' <<< "$response") + filesize=$(GetFileSize "$filepath" "false") + downloadLink="${url//\\/}" + echo -e "" + echo -e "${GREEN}| Upload Success${NC}" + echo -e "| Size: ${BLUE}${filesize}${NC} bytes${NC}" + echo -e "| Link: ${YELLOW}${downloadLink}${NC}" + successUpload "$pline" "${filepath}" "${_hostCode}" "${filesize}" "${downloadLink}" "{$response}" + return 0 else - return 1 - fi - fi + err=$(grep -oPi '(?<=HTTP/).*?(?=$)' <<< "$response") + if [ "${finalAttempt}" == "true" ] ; then + echo -e "" + echo -e "${RED}| Upload failed. Status: ${err}${NC}" + failedRetryUpload "$pline" "${filepath}" "${_hostCode}" "Failed to upload file" "Status: $err" + exitUploadError=true + return 1 + else + return 1 + fi + fi + done } #! #! --------------- Host Extra Functions ------------------- # diff --git a/hosts/up_filesquid.sh b/hosts/up_filesquid.sh index 4d93a60..7b7709b 100644 --- a/hosts/up_filesquid.sh +++ b/hosts/up_filesquid.sh @@ -24,7 +24,7 @@ HostFuncPrefix='squid' ListUploadHosts=${ListUploadHosts}'/'${HostCode}'/'${HostNick}'/'${HostFuncPrefix}'@' #! #! -#! Configurables +#! Jirafeau Host #! ------------- #! #! ------------ (1) Host Main Upload Function --------------- # @@ -33,119 +33,10 @@ ListUploadHosts=${ListUploadHosts}'/'${HostCode}'/'${HostNick}'/'${HostFuncPrefi #! Must be named specifically as such: #! _UploadFile() squid_UploadFile() { - local _hostCode=${1} - local filepath=${2} - local filecnt=${3} - local pline=${4} - local filename="${filepath##*/}" - warnAndRetryUnknownError=false - exitUploadError=false - exitUploadNotAvailable=false - fileAlreadyDone=false - tor_identity="${RANDOM}" - UploadTicket="${WorkDir}/.flocks/upload_${_hostCode}_${filepath//[^a-zA-Z0-9]/}" - MaxUploadSizeInBytes=5368709120 - fsize=$(GetFileSize "$filepath" "false") - if ((fsize > MaxUploadSizeInBytes)); then - rm -f "${UploadTicket}" - echo -e "${YELLOW}| SKIP${NC}: The size of $filename is to large for $_hostCode. ($fsize > $MaxUploadSizeInBytes)" - failedUpload "$pline" "${filepath}" "${_hostCode}" "Skipping upload. The size of $filename is to large for $_hostCode. ($fsize > $MaxUploadSizeInBytes)" - return 1 - fi - finalAttempt="false" - for ((z=0; z<=$MaxUploadRetries; z++)); do - if [ $z -eq $MaxUploadRetries ] ; then - finalAttempt="true" - fi - trap "rm -f "${UploadTicket}"; echo ""; tput cnorm; exit" 0 1 2 3 6 15 - if squid_PostFile "${filepath}" "${_hostCode}" "${filename}" "${filecnt}" $((z+1)) $finalAttempt $pline ; then - return 0 - elif [ $z -lt $MaxUploadRetries ]; then - if [ "${fileAlreadyDone}" == "true" ] ; then - break - fi - if [[ "${warnAndRetryUnknownError}" == "true" ]] ; then - if [ "${DebugAllEnabled}" == "true" ] ; then - debugHtml "${filepath##*/}" "error" "Retry due to an unknown issue: attempt #$((z+1)) of ${MaxUploadRetries}" - fi - fi - if [[ "${exitUploadError}" == "true" || "${exitUploadNotAvailable}" == "true" ]] ; then - if [ "${DebugAllEnabled}" == "true" ] ; then - debugHtml "${filepath##*/}" "error" "Exit due to unrecoverable issue" - fi - rm -f "${UploadTicket}" - break - fi - echo -e "\n${YELLOW}A recoverable error occurred, retry attempt $((z+1))/${MaxUploadRetries}${NC}" - sleep 3 - fi - done - rm -f "${UploadTicket}" + jira_MaxUploadSizeInBytes=5368709120 + jira_PostUrlHost='https://filesquid.net/script.php' + jira_filetype=1 + jira_timeval="month" + jira_downloadLinkPrefix='https://filesquid.net/f.php?h=' + jira_UploadFile ${1} ${2} ${3} ${4} } -#! -#! ----------- (2) Post File / Upload File Function --------------- # -#! -squid_PostFile() { - local filepath=$1 - local _hostCode=$2 - local filename=$3 - local fileCnt=$4 - local retryCnt=$5 - local finalAttempt=$6 - local pline=${7} - UploadTicket="${WorkDir}/.flocks/upload_${_hostCode}_${filepath//[^a-zA-Z0-9]/}" - echo -e "[${YELLOW}${_hostCode}${NC}] Uploading ${GREEN}${filename}${NC}" - maxretries=4 - for ((i=1; i<=$maxretries; i++)); do - tor_identity="${RANDOM}" - PostUrlHost='https://filesquid.net/script.php' - arrFiles=("$filepath") - trap "rm -f ${UploadTicket}; echo ""; tput cnorm; exit" 0 1 2 3 6 15 - response=$(tor_curl_upload --insecure -i \ - -H "Content-Type: multipart/form-data" \ - -F "time=month" \ - -F "file=@${filepath}" \ - "${PostUrlHost}") - if [ "${DebugAllEnabled}" == "true" ] ; then - debugHtml "${filepath##*/}" "${_hostCode}_upload" "post_url: ${PostUrlHost}"$'\n'"${response}" - fi - if grep -Eqi 'No password nor allowed IP' <<< "$response"; then - if ((i >= maxretries)) ; then - echo -e "${RED}| Upload failed. No password nor allowed IP.${NC}" - failedRetryUpload "$pline" "${filepath}" "${_hostCode}" "Failed to upload file" "No password nor allowed IP." - exitUploadError=true - return 1 - else - echo -n "${YELLOW}No password nor allowed IP${NC} (retry)..." - continue - fi - else - break - fi - done - if grep -Eqi ' 200 ' <<< "${response}" ; then - hash=$(echo "$response" | tail -2 | head -1) - hash=${hash//[$'\t\r\n']} - filesize=$(GetFileSize "$filepath" "false") - downloadLink="https://filesquid.net/f.php?h=${hash}&p=1" - echo -e "${GREEN}| Upload Success${NC}" - echo -e "| Size: ${BLUE}${filesize}${NC} bytes${NC}" - echo -e "| Link: ${YELLOW}${downloadLink}${NC}" - successUpload "$pline" "${filepath}" "${_hostCode}" "${filesize}" "${downloadLink}" "{$response}" - return 0 - else - err=$(grep -oPi '(?<=HTTP/).*?(?=$)' <<< "$response") - if [ "${finalAttempt}" == "true" ] ; then - printf "\\n" - echo -e "${RED}| Upload failed. Status: ${err}${NC}" - failedRetryUpload "$pline" "${filepath}" "${_hostCode}" "Failed to upload file" "Status: $err" - exitUploadError=true - return 1 - else - return 1 - fi - fi -} -#! -#! --------------- Host Extra Functions ------------------- # -#! diff --git a/hosts/up_free4e.sh b/hosts/up_free4e.sh index 8dc840e..96cb5ec 100644 --- a/hosts/up_free4e.sh +++ b/hosts/up_free4e.sh @@ -24,7 +24,7 @@ HostFuncPrefix='fr4e' ListUploadHosts=${ListUploadHosts}'/'${HostCode}'/'${HostNick}'/'${HostFuncPrefix}'@' #! #! -#! Configurables +#! Jirafeau Host #! ------------- #! #! ------------ (1) Host Main Upload Function --------------- # @@ -33,95 +33,10 @@ ListUploadHosts=${ListUploadHosts}'/'${HostCode}'/'${HostNick}'/'${HostFuncPrefi #! Must be named specifically as such: #! _UploadFile() fr4e_UploadFile() { - local _hostCode=${1} - local filepath=${2} - local filecnt=${3} - local pline=${4} - local filename="${filepath##*/}" - warnAndRetryUnknownError=false - exitUploadError=false - exitUploadNotAvailable=false - fileAlreadyDone=false - tor_identity="${RANDOM}" - UploadTicket="${WorkDir}/.flocks/upload_${_hostCode}_${filepath//[^a-zA-Z0-9]/}" - finalAttempt="false" - for ((z=0; z<=$MaxUploadRetries; z++)); do - if [ $z -eq $MaxUploadRetries ] ; then - finalAttempt="true" - fi - trap "rm -f "${UploadTicket}"; echo ""; tput cnorm; exit" 0 1 2 3 6 15 - if fr4e_PostFile "${filepath}" "${_hostCode}" "${filename}" "${filecnt}" $((z+1)) $finalAttempt $pline ; then - return 0 - elif [ $z -lt $MaxUploadRetries ]; then - if [ "${fileAlreadyDone}" == "true" ] ; then - break - fi - if [[ "${warnAndRetryUnknownError}" == "true" ]] ; then - if [ "${DebugAllEnabled}" == "true" ] ; then - debugHtml "${filepath##*/}" "error" "Retry due to an unknown issue: attempt #$((z+1)) of ${MaxUploadRetries}" - fi - fi - if [[ "${exitUploadError}" == "true" || "${exitUploadNotAvailable}" == "true" ]] ; then - if [ "${DebugAllEnabled}" == "true" ] ; then - debugHtml "${filepath##*/}" "error" "Exit due to unrecoverable issue" - fi - rm -f "${UploadTicket}" - break - fi - echo -e "\n${YELLOW}A recoverable error occurred, retry attempt $((z+1))/${MaxUploadRetries}${NC}" - sleep 3 - fi - done - rm -f "${UploadTicket}" + jira_MaxUploadSizeInBytes=104857600 + jira_PostUrlHost='https://send.free4e.com/script.php' + jira_filetype=1 + jira_timeval="week" + jira_downloadLinkPrefix='https://send.free4e.com/f.php?h=' + jira_UploadFile ${1} ${2} ${3} ${4} } -#! -#! ----------- (2) Post File / Upload File Function --------------- # -#! -fr4e_PostFile() { - local filepath=$1 - local _hostCode=$2 - local filename=$3 - local fileCnt=$4 - local retryCnt=$5 - local finalAttempt=$6 - local pline=${7} - UploadTicket="${WorkDir}/.flocks/upload_${_hostCode}_${filepath//[^a-zA-Z0-9]/}" - echo -e "[${YELLOW}${_hostCode}${NC}] Uploading ${GREEN}${filename}${NC}" - tor_identity="${RANDOM}" - PostUrlHost='https://send.free4e.com/script.php' - arrFiles=("$filepath") - trap "rm -f ${UploadTicket}; echo ""; tput cnorm; exit" 0 1 2 3 6 15 - response=$(tor_curl_upload --insecure -i \ - -H "Content-Type: multipart/form-data" \ - -F "time=week" \ - -F "file=@${filepath}" \ - "${PostUrlHost}") - if [ "${DebugAllEnabled}" == "true" ] ; then - debugHtml "${filepath##*/}" "${_hostCode}_upload" "post_url: ${PostUrlHost}"$'\n'"${response}" - fi - if grep -Eqi ' 200 ' <<< "${response}" ; then - hash=$(echo "$response" | tail -2 | head -1) - hash=${hash//[$'\t\r\n']} - filesize=$(GetFileSize "$filepath" "false") - downloadLink="https://send.free4e.com/f.php?h=${hash}&p=1" - echo -e "${GREEN}| Upload Success${NC}" - echo -e "| Size: ${BLUE}${filesize}${NC} bytes${NC}" - echo -e "| Link: ${YELLOW}${downloadLink}${NC}" - successUpload "$pline" "${filepath}" "${_hostCode}" "${filesize}" "${downloadLink}" "{$response}" - return 0 - else - err=$(grep -oPi '(?<=HTTP/).*?(?=$)' <<< "$response") - if [ "${finalAttempt}" == "true" ] ; then - printf "\\n" - echo -e "${RED}| Upload failed. Status: ${err}${NC}" - failedRetryUpload "$pline" "${filepath}" "${_hostCode}" "Failed to upload file" "Status: $err" - exitUploadError=true - return 1 - else - return 1 - fi - fi -} -#! -#! --------------- Host Extra Functions ------------------- # -#! diff --git a/hosts/up_freesocial.sh b/hosts/up_freesocial.sh new file mode 100644 index 0000000..2fb865c --- /dev/null +++ b/hosts/up_freesocial.sh @@ -0,0 +1,42 @@ +#! Name: up_freesocial.sh +#! Author: kittykat +#! Version: 2024.12.23 +#! Desc: Add support for uploading files to files.freesocial.co +#! Info: Files are accessible at https://address/f.php?h=&p=1 +#! MaxSize: ?? +#! Expire: 1 Month +#! Usage: Copy this file into the ./${ScriptDir}/hosts/ folder +#! +#! +#! ------------ REQUIRED SECTION --------------- +#! @[UPDATE] ListUploadHosts: This string is loaded into mad.sh and allows dynamic handling of new url data +#! Format: '/HostCode/HostNick/HostFuncPrefix@' +#! HostCode: (ie. 'fh' for filehaus -- cannot be used by other hosts) +#! HostNick: What is displayed throughout MAD output +#! HostFuncPrefix: ie. 'fh' -- fh_UploadFile() +#! * Note: Must begin with a letter a-z (functions beginning with numbers are no bueno) +HostCode='frso' +HostNick='freesocial' +HostFuncPrefix='frso' +#! +#! !! DO NOT UPDATE OR REMOVE !! +#! This merges the Required HostAndDomainRegexes into mad.sh +ListUploadHosts=${ListUploadHosts}'/'${HostCode}'/'${HostNick}'/'${HostFuncPrefix}'@' +#! +#! +#! Jirafeau Host +#! ------------- +#! +#! ------------ (1) Host Main Upload Function --------------- # +#! +#! @REQUIRED: Host Main Upload function +#! Must be named specifically as such: +#! _UploadFile() +frso_UploadFile() { + jira_MaxUploadSizeInBytes=5368709120 + jira_PostUrlHost='https://files.freesocial.co/script.php' + jira_filetype=1 + jira_timeval="month" + jira_downloadLinkPrefix='https://files.freesocial.co/f.php?h=' + jira_UploadFile ${1} ${2} ${3} ${4} +} diff --git a/hosts/up_harrault.sh b/hosts/up_harrault.sh index b65f780..d7b71fb 100644 --- a/hosts/up_harrault.sh +++ b/hosts/up_harrault.sh @@ -24,7 +24,7 @@ HostFuncPrefix='harr' ListUploadHosts=${ListUploadHosts}'/'${HostCode}'/'${HostNick}'/'${HostFuncPrefix}'@' #! #! -#! Configurables +#! Jirafeau Host #! ------------- #! #! ------------ (1) Host Main Upload Function --------------- # @@ -33,102 +33,10 @@ ListUploadHosts=${ListUploadHosts}'/'${HostCode}'/'${HostNick}'/'${HostFuncPrefi #! Must be named specifically as such: #! _UploadFile() harr_UploadFile() { - local _hostCode=${1} - local filepath=${2} - local filecnt=${3} - local pline=${4} - local filename="${filepath##*/}" - warnAndRetryUnknownError=false - exitUploadError=false - exitUploadNotAvailable=false - fileAlreadyDone=false - tor_identity="${RANDOM}" - UploadTicket="${WorkDir}/.flocks/upload_${_hostCode}_${filepath//[^a-zA-Z0-9]/}" - fsize=$(GetFileSize "$filepath" "false") - if ((fsize > MaxUploadSizeInBytes)); then - rm -f "${UploadTicket}" - echo -e "${YELLOW}| SKIP${NC}: The size of $filename is to large for $_hostCode. ($fsize > $MaxUploadSizeInBytes)" - failedUpload "$pline" "${filepath}" "${_hostCode}" "Skipping upload. The size of $filename is to large for $_hostCode. ($fsize > $MaxUploadSizeInBytes)" - return 1 - fi - finalAttempt="false" - for ((z=0; z<=$MaxUploadRetries; z++)); do - if [ $z -eq $MaxUploadRetries ] ; then - finalAttempt="true" - fi - trap "rm -f "${UploadTicket}"; echo ""; tput cnorm; exit" 0 1 2 3 6 15 - if harr_PostFile "${filepath}" "${_hostCode}" "${filename}" "${filecnt}" $((z+1)) $finalAttempt $pline ; then - return 0 - elif [ $z -lt $MaxUploadRetries ]; then - if [ "${fileAlreadyDone}" == "true" ] ; then - break - fi - if [[ "${warnAndRetryUnknownError}" == "true" ]] ; then - if [ "${DebugAllEnabled}" == "true" ] ; then - debugHtml "${filepath##*/}" "error" "Retry due to an unknown issue: attempt #$((z+1)) of ${MaxUploadRetries}" - fi - fi - if [[ "${exitUploadError}" == "true" || "${exitUploadNotAvailable}" == "true" ]] ; then - if [ "${DebugAllEnabled}" == "true" ] ; then - debugHtml "${filepath##*/}" "error" "Exit due to unrecoverable issue" - fi - rm -f "${UploadTicket}" - break - fi - echo -e "\n${YELLOW}A recoverable error occurred, retry attempt $((z+1))/${MaxUploadRetries}${NC}" - sleep 3 - fi - done - rm -f "${UploadTicket}" + jira_MaxUploadSizeInBytes=104857600 + jira_PostUrlHost='https://files.harrault.fr/script.php' + jira_filetype=1 + jira_timeval="month" + jira_downloadLinkPrefix='https://files.harrault.fr/f.php?h=' + jira_UploadFile ${1} ${2} ${3} ${4} } -#! -#! ----------- (2) Post File / Upload File Function --------------- # -#! -harr_PostFile() { - local filepath=$1 - local _hostCode=$2 - local filename=$3 - local fileCnt=$4 - local retryCnt=$5 - local finalAttempt=$6 - local pline=${7} - UploadTicket="${WorkDir}/.flocks/upload_${_hostCode}_${filepath//[^a-zA-Z0-9]/}" - echo -e "[${YELLOW}${_hostCode}${NC}] Uploading ${GREEN}${filename}${NC}" - tor_identity="${RANDOM}" - PostUrlHost='https://files.harrault.fr/script.php' - arrFiles=("$filepath") - trap "rm -f ${UploadTicket}; echo ""; tput cnorm; exit" 0 1 2 3 6 15 - response=$(tor_curl_upload --insecure -i \ - -H "Content-Type: multipart/form-data" \ - -F "time=month" \ - -F "file=@${filepath}" \ - "${PostUrlHost}") - if [ "${DebugAllEnabled}" == "true" ] ; then - debugHtml "${filepath##*/}" "${_hostCode}_upload" "post_url: ${PostUrlHost}"$'\n'"${response}" - fi - if grep -Eqi ' 200 ' <<< "${response}" ; then - hash=$(echo "$response" | tail -2 | head -1) - hash=${hash//[$'\t\r\n']} - filesize=$(GetFileSize "$filepath" "false") - downloadLink="https://files.harrault.fr/f.php?h=${hash}&p=1" - echo -e "${GREEN}| Upload Success${NC}" - echo -e "| Size: ${BLUE}${filesize}${NC} bytes${NC}" - echo -e "| Link: ${YELLOW}${downloadLink}${NC}" - successUpload "$pline" "${filepath}" "${_hostCode}" "${filesize}" "${downloadLink}" "{$response}" - return 0 - else - err=$(grep -oPi '(?<=HTTP/).*?(?=$)' <<< "$response") - if [ "${finalAttempt}" == "true" ] ; then - printf "\\n" - echo -e "${RED}| Upload failed. Status: ${err}${NC}" - failedRetryUpload "$pline" "${filepath}" "${_hostCode}" "Failed to upload file" "Status: $err" - exitUploadError=true - return 1 - else - return 1 - fi - fi -} -#! -#! --------------- Host Extra Functions ------------------- # -#! diff --git a/hosts/up_herbolistique.sh b/hosts/up_herbolistique.sh new file mode 100644 index 0000000..00abf88 --- /dev/null +++ b/hosts/up_herbolistique.sh @@ -0,0 +1,42 @@ +#! Name: up_herbolistique.sh +#! Author: kittykat +#! Version: 2024.12.23 +#! Desc: Add support for uploading files to transfert.herbolistique.com +#! Info: Files are accessible at https://address/f.php?h=&p=1 +#! MaxSize: ?? +#! Expire: 1 Month +#! Usage: Copy this file into the ./${ScriptDir}/hosts/ folder +#! +#! +#! ------------ REQUIRED SECTION --------------- +#! @[UPDATE] ListUploadHosts: This string is loaded into mad.sh and allows dynamic handling of new url data +#! Format: '/HostCode/HostNick/HostFuncPrefix@' +#! HostCode: (ie. 'fh' for filehaus -- cannot be used by other hosts) +#! HostNick: What is displayed throughout MAD output +#! HostFuncPrefix: ie. 'fh' -- fh_UploadFile() +#! * Note: Must begin with a letter a-z (functions beginning with numbers are no bueno) +HostCode='herb' +HostNick='herbolistique' +HostFuncPrefix='herb' +#! +#! !! DO NOT UPDATE OR REMOVE !! +#! This merges the Required HostAndDomainRegexes into mad.sh +ListUploadHosts=${ListUploadHosts}'/'${HostCode}'/'${HostNick}'/'${HostFuncPrefix}'@' +#! +#! +#! Jirafeau Host +#! ------------- +#! +#! ------------ (1) Host Main Upload Function --------------- # +#! +#! @REQUIRED: Host Main Upload function +#! Must be named specifically as such: +#! _UploadFile() +herb_UploadFile() { + jira_MaxUploadSizeInBytes=536870912 + jira_PostUrlHost='https://transfert.herbolistique.com/script.php' + jira_filetype=1 + jira_timeval="month" + jira_downloadLinkPrefix='https://transfert.herbolistique.com/f.php?h=' + jira_UploadFile ${1} ${2} ${3} ${4} +} diff --git a/hosts/up_isupload.sh b/hosts/up_isupload.sh new file mode 100644 index 0000000..ea26aa7 --- /dev/null +++ b/hosts/up_isupload.sh @@ -0,0 +1,144 @@ +#! Name: up_isupload.sh +#! Author: kittykat +#! Version: 2024.12.20 +#! Desc: Add support for uploading files to a new host +#! Info: Files are accessible at https://isupload.com/ +#! MaxSize: 40GB +#! Expire: ? +#! Usage: Copy this file into the ./${ScriptDir}/hosts/ folder +#! +#! +#! ------------ REQUIRED SECTION --------------- +#! @[UPDATE] ListUploadHosts: This string is loaded into mad.sh and allows dynamic handling of new url data +#! Format: '/HostCode/HostNick/HostFuncPrefix@' +#! HostCode: (ie. 'fh' for filehaus -- cannot be used by other hosts) +#! HostNick: What is displayed throughout MAD output +#! HostFuncPrefix: ie. 'fh' -- fh_UploadFile() +#! * Note: Must begin with a letter a-z (functions beginning with numbers are no bueno) +HostCode='isup' +HostNick='isupload' +HostFuncPrefix='isup' +#! +#! !! DO NOT UPDATE OR REMOVE !! +#! This merges the Required HostAndDomainRegexes into mad.sh +ListUploadHosts=${ListUploadHosts}'/'${HostCode}'/'${HostNick}'/'${HostFuncPrefix}'@' +#! +#! +#! Configurables +#! ------------- +#! +#! ------------ (1) Host Main Upload Function --------------- # +#! +#! @REQUIRED: Host Main Upload function +#! Must be named specifically as such: +#! _UploadFile() +isup_UploadFile() { + local _hostCode=${1} + local filepath=${2} + local filecnt=${3} + local pline=${4} + local filename="${filepath##*/}" + warnAndRetryUnknownError=false + exitUploadError=false + exitUploadNotAvailable=false + fileAlreadyDone=false + tor_identity="${RANDOM}" + UploadTicket="${WorkDir}/.flocks/upload_${_hostCode}_${filepath//[^a-zA-Z0-9]/}" + MaxUploadSizeInBytes=42949672960 + fsize=$(GetFileSize "$filepath" "false") + if ((fsize > MaxUploadSizeInBytes)); then + rm -f "${UploadTicket}" + echo -e "${YELLOW}| SKIP${NC}: The size of $filename is to large for $_hostCode. ($fsize > $MaxUploadSizeInBytes)" + failedUpload "$pline" "${filepath}" "${_hostCode}" "Skipping upload. The size of $filename is to large for $_hostCode. ($fsize > $MaxUploadSizeInBytes)" + return 1 + fi + finalAttempt="false" + for ((z=0; z<=$MaxUploadRetries; z++)); do + if [ $z -eq $MaxUploadRetries ] ; then + finalAttempt="true" + fi + trap "rm -f "${UploadTicket}"; echo ""; tput cnorm; exit" 0 1 2 3 6 15 + if isup_PostFile "${filepath}" "${_hostCode}" "${filename}" "${filecnt}" $((z+1)) $finalAttempt $pline ; then + return 0 + elif [ $z -lt $MaxUploadRetries ]; then + if [ "${fileAlreadyDone}" == "true" ] ; then + break + fi + if [[ "${warnAndRetryUnknownError}" == "true" ]] ; then + if [ "${DebugAllEnabled}" == "true" ] ; then + debugHtml "${filepath##*/}" "error" "Retry due to an unknown issue: attempt #$((z+1)) of ${MaxUploadRetries}" + fi + fi + if [[ "${exitUploadError}" == "true" || "${exitUploadNotAvailable}" == "true" ]] ; then + if [ "${DebugAllEnabled}" == "true" ] ; then + debugHtml "${filepath##*/}" "error" "Exit due to unrecoverable issue" + fi + rm -f "${UploadTicket}" + break + fi + echo -e "\n${YELLOW}A recoverable error occurred, retry attempt $((z+1))/${MaxUploadRetries}${NC}" + sleep 3 + fi + done + rm -f "${UploadTicket}" +} +#! +#! ----------- (2) Post File / Upload File Function --------------- # +#! +isup_PostFile() { + local filepath=$1 + local _hostCode=$2 + local filename=$3 + local fileCnt=$4 + local retryCnt=$5 + local finalAttempt=$6 + local pline=${7} + UploadTicket="${WorkDir}/.flocks/upload_${_hostCode}_${filepath//[^a-zA-Z0-9]/}" + local ar_HUP[0]="http://isupload.com/cgi-bin/upload.cgi?upload_type=file&utype=anon" + local arrSize=${#ar_HUP[@]} + local index=$(($RANDOM % $arrSize)) + local RandomHostUploadUrl=${ar_HUP[$index]} + PostUrlHost="$RandomHostUploadUrl" + arrFiles=("$filepath") + trap "rm -f ${UploadTicket}; echo ""; tput cnorm; exit" 0 1 2 3 6 15 + response=$(tor_curl_upload --insecure -i \ + -H "Content-Type: multipart/form-data" \ + -F "sess_id=" \ + -F "utype=anon" \ + -F "file_descr=" \ + -F "file_public=1" \ + -F "link_rcpt=" \ + -F "link_pass=" \ + -F "to_folder=" \ + -F "upload=Start upload" \ + -F "keepalive=1" \ + -F "file_0=@$filepath" \ + "${PostUrlHost}") + if [ "${DebugAllEnabled}" == "true" ] ; then + debugHtml "${filepath##*/}" "${_hostCode}_upload" "post_url: ${PostUrlHost}"$'\n'"${response}" + fi + if grep -Eqi '"file_status":"OK"' <<< "${response}" ; then + hash=$(grep -oPi '(?<="file_code":").*?(?=".*$)' <<< "$response") + filesize=$(GetFileSize "$filepath" "false") + downloadLink="https://isupload.com/${hash}" + echo -e "${GREEN}| Upload Success${NC}" + echo -e "| Size: ${BLUE}${filesize}${NC} bytes${NC}" + echo -e "| Link: ${YELLOW}${downloadLink}${NC}" + successUpload "$pline" "${filepath}" "${_hostCode}" "${filesize}" "${downloadLink}" "{$response}" + return 0 + else + err=$(grep -oPi '(?<="file_status":").*?(?=")' <<< "$response") + if [ "${finalAttempt}" == "true" ] ; then + printf "\\n" + echo -e "${RED}| Upload failed. Status: ${err}${NC}" + failedRetryUpload "$pline" "${filepath}" "${_hostCode}" "Failed to upload file" "Status: $err" + exitUploadError=true + return 1 + else + return 1 + fi + fi +} +#! +#! --------------- Host Extra Functions ------------------- # +#! diff --git a/hosts/up_kouploader.sh b/hosts/up_kouploader.sh index 6439b90..d1f45d6 100644 --- a/hosts/up_kouploader.sh +++ b/hosts/up_kouploader.sh @@ -1,6 +1,6 @@ #! Name: up_kouploader.sh #! Author: kittykat -#! Version: 2024.10.23 +#! Version: 2024.12.26 #! Desc: Add support for uploading files to kouploader.jp #! Info: Files are accessible at https://kouploader.jp/ #! MaxSize: 240MB @@ -116,7 +116,7 @@ ko_PostFile() { echo -e "${RED}| Failed to start an upload [1]${NC}" warnAndRetryUnknownError=true if [ "${finalAttempt}" == "true" ] ; then - failedRetryDownload "${remote_url}" "Failed to start an upload [1]" "" + failedRetryUpload "$pline" "${filepath}" "${_hostCode}" "Failed to start an upload [1]" "" fi return 1 else diff --git a/hosts/up_linxx.sh b/hosts/up_linxx.sh index 95d7ff6..993b17b 100644 --- a/hosts/up_linxx.sh +++ b/hosts/up_linxx.sh @@ -24,7 +24,7 @@ HostFuncPrefix='linx' ListUploadHosts=${ListUploadHosts}'/'${HostCode}'/'${HostNick}'/'${HostFuncPrefix}'@' #! #! -#! Configurables +#! Jirafeau Host #! ------------- #! #! ------------ (1) Host Main Upload Function --------------- # @@ -33,103 +33,10 @@ ListUploadHosts=${ListUploadHosts}'/'${HostCode}'/'${HostNick}'/'${HostFuncPrefi #! Must be named specifically as such: #! _UploadFile() linx_UploadFile() { - local _hostCode=${1} - local filepath=${2} - local filecnt=${3} - local pline=${4} - local filename="${filepath##*/}" - warnAndRetryUnknownError=false - exitUploadError=false - exitUploadNotAvailable=false - fileAlreadyDone=false - tor_identity="${RANDOM}" - UploadTicket="${WorkDir}/.flocks/upload_${_hostCode}_${filepath//[^a-zA-Z0-9]/}" - MaxUploadSizeInBytes=536870912 - fsize=$(GetFileSize "$filepath" "false") - if ((fsize > MaxUploadSizeInBytes)); then - rm -f "${UploadTicket}" - echo -e "${YELLOW}| SKIP${NC}: The size of $filename is to large for $_hostCode. ($fsize > $MaxUploadSizeInBytes)" - failedUpload "$pline" "${filepath}" "${_hostCode}" "Skipping upload. The size of $filename is to large for $_hostCode. ($fsize > $MaxUploadSizeInBytes)" - return 1 - fi - finalAttempt="false" - for ((z=0; z<=$MaxUploadRetries; z++)); do - if [ $z -eq $MaxUploadRetries ] ; then - finalAttempt="true" - fi - trap "rm -f "${UploadTicket}"; echo ""; tput cnorm; exit" 0 1 2 3 6 15 - if linx_PostFile "${filepath}" "${_hostCode}" "${filename}" "${filecnt}" $((z+1)) $finalAttempt $pline ; then - return 0 - elif [ $z -lt $MaxUploadRetries ]; then - if [ "${fileAlreadyDone}" == "true" ] ; then - break - fi - if [[ "${warnAndRetryUnknownError}" == "true" ]] ; then - if [ "${DebugAllEnabled}" == "true" ] ; then - debugHtml "${filepath##*/}" "error" "Retry due to an unknown issue: attempt #$((z+1)) of ${MaxUploadRetries}" - fi - fi - if [[ "${exitUploadError}" == "true" || "${exitUploadNotAvailable}" == "true" ]] ; then - if [ "${DebugAllEnabled}" == "true" ] ; then - debugHtml "${filepath##*/}" "error" "Exit due to unrecoverable issue" - fi - rm -f "${UploadTicket}" - break - fi - echo -e "\n${YELLOW}A recoverable error occurred, retry attempt $((z+1))/${MaxUploadRetries}${NC}" - sleep 3 - fi - done - rm -f "${UploadTicket}" + jira_MaxUploadSizeInBytes=536870912 + jira_PostUrlHost='https://linxx.net/upload/script.php' + jira_filetype=1 + jira_timeval="month" + jira_downloadLinkPrefix='https://linxx.net/upload/f.php?h=' + jira_UploadFile ${1} ${2} ${3} ${4} } -#! -#! ----------- (2) Post File / Upload File Function --------------- # -#! -linx_PostFile() { - local filepath=$1 - local _hostCode=$2 - local filename=$3 - local fileCnt=$4 - local retryCnt=$5 - local finalAttempt=$6 - local pline=${7} - UploadTicket="${WorkDir}/.flocks/upload_${_hostCode}_${filepath//[^a-zA-Z0-9]/}" - echo -e "[${YELLOW}${_hostCode}${NC}] Uploading ${GREEN}${filename}${NC}" - tor_identity="${RANDOM}" - PostUrlHost='https://linxx.net/upload/script.php' - arrFiles=("$filepath") - trap "rm -f ${UploadTicket}; echo ""; tput cnorm; exit" 0 1 2 3 6 15 - response=$(tor_curl_upload --insecure -i \ - -H "Content-Type: multipart/form-data" \ - -F "time=month" \ - -F "file=@${filepath}" \ - "${PostUrlHost}") - if [ "${DebugAllEnabled}" == "true" ] ; then - debugHtml "${filepath##*/}" "${_hostCode}_upload" "post_url: ${PostUrlHost}"$'\n'"${response}" - fi - if grep -Eqi ' 200 ' <<< "${response}" ; then - hash=$(echo "$response" | tail -2 | head -1) - hash=${hash//[$'\t\r\n']} - filesize=$(GetFileSize "$filepath" "false") - downloadLink="https://linxx.net/upload/f.php?h=${hash}&p=1" - echo -e "${GREEN}| Upload Success${NC}" - echo -e "| Size: ${BLUE}${filesize}${NC} bytes${NC}" - echo -e "| Link: ${YELLOW}${downloadLink}${NC}" - successUpload "$pline" "${filepath}" "${_hostCode}" "${filesize}" "${downloadLink}" "{$response}" - return 0 - else - err=$(grep -oPi '(?<=HTTP/).*?(?=$)' <<< "$response") - if [ "${finalAttempt}" == "true" ] ; then - printf "\\n" - echo -e "${RED}| Upload failed. Status: ${err}${NC}" - failedRetryUpload "$pline" "${filepath}" "${_hostCode}" "Failed to upload file" "Status: $err" - exitUploadError=true - return 1 - else - return 1 - fi - fi -} -#! -#! --------------- Host Extra Functions ------------------- # -#! diff --git a/hosts/up_moocloud.sh b/hosts/up_moocloud.sh index 8c65f00..256d2aa 100644 --- a/hosts/up_moocloud.sh +++ b/hosts/up_moocloud.sh @@ -24,7 +24,7 @@ HostFuncPrefix='moo' ListUploadHosts=${ListUploadHosts}'/'${HostCode}'/'${HostNick}'/'${HostFuncPrefix}'@' #! #! -#! Configurables +#! Jirafeau Host #! ------------- #! #! ------------ (1) Host Main Upload Function --------------- # @@ -33,103 +33,10 @@ ListUploadHosts=${ListUploadHosts}'/'${HostCode}'/'${HostNick}'/'${HostFuncPrefi #! Must be named specifically as such: #! _UploadFile() moo_UploadFile() { - local _hostCode=${1} - local filepath=${2} - local filecnt=${3} - local pline=${4} - local filename="${filepath##*/}" - warnAndRetryUnknownError=false - exitUploadError=false - exitUploadNotAvailable=false - fileAlreadyDone=false - tor_identity="${RANDOM}" - UploadTicket="${WorkDir}/.flocks/upload_${_hostCode}_${filepath//[^a-zA-Z0-9]/}" - MaxUploadSizeInBytes=5368709120 - fsize=$(GetFileSize "$filepath" "false") - if ((fsize > MaxUploadSizeInBytes)); then - rm -f "${UploadTicket}" - echo -e "${YELLOW}| SKIP${NC}: The size of $filename is to large for $_hostCode. ($fsize > $MaxUploadSizeInBytes)" - failedUpload "$pline" "${filepath}" "${_hostCode}" "Skipping upload. The size of $filename is to large for $_hostCode. ($fsize > $MaxUploadSizeInBytes)" - return 1 - fi - finalAttempt="false" - for ((z=0; z<=$MaxUploadRetries; z++)); do - if [ $z -eq $MaxUploadRetries ] ; then - finalAttempt="true" - fi - trap "rm -f "${UploadTicket}"; echo ""; tput cnorm; exit" 0 1 2 3 6 15 - if moo_PostFile "${filepath}" "${_hostCode}" "${filename}" "${filecnt}" $((z+1)) $finalAttempt $pline ; then - return 0 - elif [ $z -lt $MaxUploadRetries ]; then - if [ "${fileAlreadyDone}" == "true" ] ; then - break - fi - if [[ "${warnAndRetryUnknownError}" == "true" ]] ; then - if [ "${DebugAllEnabled}" == "true" ] ; then - debugHtml "${filepath##*/}" "error" "Retry due to an unknown issue: attempt #$((z+1)) of ${MaxUploadRetries}" - fi - fi - if [[ "${exitUploadError}" == "true" || "${exitUploadNotAvailable}" == "true" ]] ; then - if [ "${DebugAllEnabled}" == "true" ] ; then - debugHtml "${filepath##*/}" "error" "Exit due to unrecoverable issue" - fi - rm -f "${UploadTicket}" - break - fi - echo -e "\n${YELLOW}A recoverable error occurred, retry attempt $((z+1))/${MaxUploadRetries}${NC}" - sleep 3 - fi - done - rm -f "${UploadTicket}" + jira_MaxUploadSizeInBytes=5368709120 + jira_PostUrlHost='https://file.tools.moocloud.ch/script.php' + jira_filetype=1 + jira_timeval="month" + jira_downloadLinkPrefix='https://file.tools.moocloud.ch/f.php?h=' + jira_UploadFile ${1} ${2} ${3} ${4} } -#! -#! ----------- (2) Post File / Upload File Function --------------- # -#! -moo_PostFile() { - local filepath=$1 - local _hostCode=$2 - local filename=$3 - local fileCnt=$4 - local retryCnt=$5 - local finalAttempt=$6 - local pline=${7} - UploadTicket="${WorkDir}/.flocks/upload_${_hostCode}_${filepath//[^a-zA-Z0-9]/}" - echo -e "[${YELLOW}${_hostCode}${NC}] Uploading ${GREEN}${filename}${NC}" - tor_identity="${RANDOM}" - PostUrlHost='https://file.tools.moocloud.ch/script.php' - arrFiles=("$filepath") - trap "rm -f ${UploadTicket}; echo ""; tput cnorm; exit" 0 1 2 3 6 15 - response=$(tor_curl_upload --insecure -i \ - -H "Content-Type: multipart/form-data" \ - -F "time=month" \ - -F "file=@${filepath}" \ - "${PostUrlHost}") - if [ "${DebugAllEnabled}" == "true" ] ; then - debugHtml "${filepath##*/}" "${_hostCode}_upload" "post_url: ${PostUrlHost}"$'\n'"${response}" - fi - if grep -Eqi ' 200 ' <<< "${response}" ; then - hash=$(echo "$response" | tail -2 | head -1) - hash=${hash//[$'\t\r\n']} - filesize=$(GetFileSize "$filepath" "false") - downloadLink="https://file.tools.moocloud.ch/f.php?h=${hash}&p=1" - echo -e "${GREEN}| Upload Success${NC}" - echo -e "| Size: ${BLUE}${filesize}${NC} bytes${NC}" - echo -e "| Link: ${YELLOW}${downloadLink}${NC}" - successUpload "$pline" "${filepath}" "${_hostCode}" "${filesize}" "${downloadLink}" "{$response}" - return 0 - else - err=$(grep -oPi '(?<=HTTP/).*?(?=$)' <<< "$response") - if [ "${finalAttempt}" == "true" ] ; then - printf "\\n" - echo -e "${RED}| Upload failed. Status: ${err}${NC}" - failedRetryUpload "$pline" "${filepath}" "${_hostCode}" "Failed to upload file" "Status: $err" - exitUploadError=true - return 1 - else - return 1 - fi - fi -} -#! -#! --------------- Host Extra Functions ------------------- # -#! diff --git a/hosts/up_nantes.sh b/hosts/up_nantes.sh index d7d871c..e05a930 100644 --- a/hosts/up_nantes.sh +++ b/hosts/up_nantes.sh @@ -24,7 +24,7 @@ HostFuncPrefix='nant' ListUploadHosts=${ListUploadHosts}'/'${HostCode}'/'${HostNick}'/'${HostFuncPrefix}'@' #! #! -#! Configurables +#! Jirafeau Host #! ------------- #! #! ------------ (1) Host Main Upload Function --------------- # @@ -33,103 +33,10 @@ ListUploadHosts=${ListUploadHosts}'/'${HostCode}'/'${HostNick}'/'${HostFuncPrefi #! Must be named specifically as such: #! _UploadFile() nant_UploadFile() { - local _hostCode=${1} - local filepath=${2} - local filecnt=${3} - local pline=${4} - local filename="${filepath##*/}" - warnAndRetryUnknownError=false - exitUploadError=false - exitUploadNotAvailable=false - fileAlreadyDone=false - tor_identity="${RANDOM}" - UploadTicket="${WorkDir}/.flocks/upload_${_hostCode}_${filepath//[^a-zA-Z0-9]/}" - MaxUploadSizeInBytes=10737418240 - fsize=$(GetFileSize "$filepath" "false") - if ((fsize > MaxUploadSizeInBytes)); then - rm -f "${UploadTicket}" - echo -e "${YELLOW}| SKIP${NC}: The size of $filename is to large for $_hostCode. ($fsize > $MaxUploadSizeInBytes)" - failedUpload "$pline" "${filepath}" "${_hostCode}" "Skipping upload. The size of $filename is to large for $_hostCode. ($fsize > $MaxUploadSizeInBytes)" - return 1 - fi - finalAttempt="false" - for ((z=0; z<=$MaxUploadRetries; z++)); do - if [ $z -eq $MaxUploadRetries ] ; then - finalAttempt="true" - fi - trap "rm -f "${UploadTicket}"; echo ""; tput cnorm; exit" 0 1 2 3 6 15 - if nant_PostFile "${filepath}" "${_hostCode}" "${filename}" "${filecnt}" $((z+1)) $finalAttempt $pline ; then - return 0 - elif [ $z -lt $MaxUploadRetries ]; then - if [ "${fileAlreadyDone}" == "true" ] ; then - break - fi - if [[ "${warnAndRetryUnknownError}" == "true" ]] ; then - if [ "${DebugAllEnabled}" == "true" ] ; then - debugHtml "${filepath##*/}" "error" "Retry due to an unknown issue: attempt #$((z+1)) of ${MaxUploadRetries}" - fi - fi - if [[ "${exitUploadError}" == "true" || "${exitUploadNotAvailable}" == "true" ]] ; then - if [ "${DebugAllEnabled}" == "true" ] ; then - debugHtml "${filepath##*/}" "error" "Exit due to unrecoverable issue" - fi - rm -f "${UploadTicket}" - break - fi - echo -e "\n${YELLOW}A recoverable error occurred, retry attempt $((z+1))/${MaxUploadRetries}${NC}" - sleep 3 - fi - done - rm -f "${UploadTicket}" + jira_MaxUploadSizeInBytes=10737418240 + jira_PostUrlHost='https://fichiers.nantes.cloud/script.php' + jira_filetype=1 + jira_timeval="month" + jira_downloadLinkPrefix='https://fichiers.nantes.cloud/f.php?h=' + jira_UploadFile ${1} ${2} ${3} ${4} } -#! -#! ----------- (2) Post File / Upload File Function --------------- # -#! -nant_PostFile() { - local filepath=$1 - local _hostCode=$2 - local filename=$3 - local fileCnt=$4 - local retryCnt=$5 - local finalAttempt=$6 - local pline=${7} - UploadTicket="${WorkDir}/.flocks/upload_${_hostCode}_${filepath//[^a-zA-Z0-9]/}" - echo -e "[${YELLOW}${_hostCode}${NC}] Uploading ${GREEN}${filename}${NC}" - tor_identity="${RANDOM}" - PostUrlHost='https://fichiers.nantes.cloud/script.php' - arrFiles=("$filepath") - trap "rm -f ${UploadTicket}; echo ""; tput cnorm; exit" 0 1 2 3 6 15 - response=$(tor_curl_upload --insecure -i \ - -H "Content-Type: multipart/form-data" \ - -F "time=month" \ - -F "file=@${filepath}" \ - "${PostUrlHost}") - if [ "${DebugAllEnabled}" == "true" ] ; then - debugHtml "${filepath##*/}" "${_hostCode}_upload" "post_url: ${PostUrlHost}"$'\n'"${response}" - fi - if grep -Eqi ' 200 ' <<< "${response}" ; then - hash=$(echo "$response" | tail -2 | head -1) - hash=${hash//[$'\t\r\n']} - filesize=$(GetFileSize "$filepath" "false") - downloadLink="https://fichiers.nantes.cloud/f.php?h=${hash}&p=1" - echo -e "${GREEN}| Upload Success${NC}" - echo -e "| Size: ${BLUE}${filesize}${NC} bytes${NC}" - echo -e "| Link: ${YELLOW}${downloadLink}${NC}" - successUpload "$pline" "${filepath}" "${_hostCode}" "${filesize}" "${downloadLink}" "{$response}" - return 0 - else - err=$(grep -oPi '(?<=HTTP/).*?(?=$)' <<< "$response") - if [ "${finalAttempt}" == "true" ] ; then - printf "\\n" - echo -e "${RED}| Upload failed. Status: ${err}${NC}" - failedRetryUpload "$pline" "${filepath}" "${_hostCode}" "Failed to upload file" "Status: $err" - exitUploadError=true - return 1 - else - return 1 - fi - fi -} -#! -#! --------------- Host Extra Functions ------------------- # -#! diff --git a/hosts/up_netlib.sh b/hosts/up_netlib.sh index d42f399..6502ded 100644 --- a/hosts/up_netlib.sh +++ b/hosts/up_netlib.sh @@ -24,7 +24,7 @@ HostFuncPrefix='nlib' ListUploadHosts=${ListUploadHosts}'/'${HostCode}'/'${HostNick}'/'${HostFuncPrefix}'@' #! #! -#! Configurables +#! Jirafeau Host #! ------------- #! #! ------------ (1) Host Main Upload Function --------------- # @@ -33,103 +33,10 @@ ListUploadHosts=${ListUploadHosts}'/'${HostCode}'/'${HostNick}'/'${HostFuncPrefi #! Must be named specifically as such: #! _UploadFile() nlib_UploadFile() { - local _hostCode=${1} - local filepath=${2} - local filecnt=${3} - local pline=${4} - local filename="${filepath##*/}" - warnAndRetryUnknownError=false - exitUploadError=false - exitUploadNotAvailable=false - fileAlreadyDone=false - tor_identity="${RANDOM}" - UploadTicket="${WorkDir}/.flocks/upload_${_hostCode}_${filepath//[^a-zA-Z0-9]/}" - MaxUploadSizeInBytes=10737418240 - fsize=$(GetFileSize "$filepath" "false") - if ((fsize > MaxUploadSizeInBytes)); then - rm -f "${UploadTicket}" - echo -e "${YELLOW}| SKIP${NC}: The size of $filename is to large for $_hostCode. ($fsize > $MaxUploadSizeInBytes)" - failedUpload "$pline" "${filepath}" "${_hostCode}" "Skipping upload. The size of $filename is to large for $_hostCode. ($fsize > $MaxUploadSizeInBytes)" - return 1 - fi - finalAttempt="false" - for ((z=0; z<=$MaxUploadRetries; z++)); do - if [ $z -eq $MaxUploadRetries ] ; then - finalAttempt="true" - fi - trap "rm -f "${UploadTicket}"; echo ""; tput cnorm; exit" 0 1 2 3 6 15 - if nlib_PostFile "${filepath}" "${_hostCode}" "${filename}" "${filecnt}" $((z+1)) $finalAttempt $pline ; then - return 0 - elif [ $z -lt $MaxUploadRetries ]; then - if [ "${fileAlreadyDone}" == "true" ] ; then - break - fi - if [[ "${warnAndRetryUnknownError}" == "true" ]] ; then - if [ "${DebugAllEnabled}" == "true" ] ; then - debugHtml "${filepath##*/}" "error" "Retry due to an unknown issue: attempt #$((z+1)) of ${MaxUploadRetries}" - fi - fi - if [[ "${exitUploadError}" == "true" || "${exitUploadNotAvailable}" == "true" ]] ; then - if [ "${DebugAllEnabled}" == "true" ] ; then - debugHtml "${filepath##*/}" "error" "Exit due to unrecoverable issue" - fi - rm -f "${UploadTicket}" - break - fi - echo -e "\n${YELLOW}A recoverable error occurred, retry attempt $((z+1))/${MaxUploadRetries}${NC}" - sleep 3 - fi - done - rm -f "${UploadTicket}" + jira_MaxUploadSizeInBytes=10737418240 + jira_PostUrlHost='https://mhep.netlib.re/jirafeau/script.php' + jira_filetype=1 + jira_timeval="month" + jira_downloadLinkPrefix='https://mhep.netlib.re/jirafeau/f.php?h=' + jira_UploadFile ${1} ${2} ${3} ${4} } -#! -#! ----------- (2) Post File / Upload File Function --------------- # -#! -nlib_PostFile() { - local filepath=$1 - local _hostCode=$2 - local filename=$3 - local fileCnt=$4 - local retryCnt=$5 - local finalAttempt=$6 - local pline=${7} - UploadTicket="${WorkDir}/.flocks/upload_${_hostCode}_${filepath//[^a-zA-Z0-9]/}" - echo -e "[${YELLOW}${_hostCode}${NC}] Uploading ${GREEN}${filename}${NC}" - tor_identity="${RANDOM}" - PostUrlHost='https://mhep.netlib.re/jirafeau/script.php' - arrFiles=("$filepath") - trap "rm -f ${UploadTicket}; echo ""; tput cnorm; exit" 0 1 2 3 6 15 - response=$(tor_curl_upload --insecure -i \ - -H "Content-Type: multipart/form-data" \ - -F "time=month" \ - -F "file=@${filepath}" \ - "${PostUrlHost}") - if [ "${DebugAllEnabled}" == "true" ] ; then - debugHtml "${filepath##*/}" "${_hostCode}_upload" "post_url: ${PostUrlHost}"$'\n'"${response}" - fi - if grep -Eqi ' 200 ' <<< "${response}" ; then - hash=$(echo "$response" | tail -2 | head -1) - hash=${hash//[$'\t\r\n']} - filesize=$(GetFileSize "$filepath" "false") - downloadLink="https://mhep.netlib.re/jirafeau/f.php?h=${hash}&p=1" - echo -e "${GREEN}| Upload Success${NC}" - echo -e "| Size: ${BLUE}${filesize}${NC} bytes${NC}" - echo -e "| Link: ${YELLOW}${downloadLink}${NC}" - successUpload "$pline" "${filepath}" "${_hostCode}" "${filesize}" "${downloadLink}" "{$response}" - return 0 - else - err=$(grep -oPi '(?<=HTTP/).*?(?=$)' <<< "$response") - if [ "${finalAttempt}" == "true" ] ; then - printf "\\n" - echo -e "${RED}| Upload failed. Status: ${err}${NC}" - failedRetryUpload "$pline" "${filepath}" "${_hostCode}" "Failed to upload file" "Status: $err" - exitUploadError=true - return 1 - else - return 1 - fi - fi -} -#! -#! --------------- Host Extra Functions ------------------- # -#! diff --git a/hosts/up_offshorecat.sh b/hosts/up_offshorecat.sh index 01f2239..ea4cb5c 100644 --- a/hosts/up_offshorecat.sh +++ b/hosts/up_offshorecat.sh @@ -1,6 +1,6 @@ #! Name: up_offshorecat.sh #! Author: kittykat -#! Version: 2024.11.12 +#! Version: 2024.12.23 #! Desc: Add support for uploading files to offshore.cat #! Info: Files are accessible at https://files.offshore.cat/ #! MaxSize: 4GB @@ -103,7 +103,7 @@ offcat_PostFile() { apikey="$RandomOscKey" response=$(tor_curl_upload --insecure -i \ -H "Content-Type: multipart/form-data" \ - -H "apiKey: $apikey" \ + -H "x-api-key: $apikey" \ -F "files[]=@$filepath" \ "${PostUrlHost}") if [ "${DebugAllEnabled}" == "true" ] ; then diff --git a/hosts/up_pixeldrain.sh b/hosts/up_pixeldrain.sh index 32dfe6e..947fe98 100644 --- a/hosts/up_pixeldrain.sh +++ b/hosts/up_pixeldrain.sh @@ -104,24 +104,22 @@ pd_PostFile() { echo -e "[${YELLOW}${_hostCode}${NC}] Uploading ${GREEN}${filename}${NC}" for ((i=1; i<=10; i++)); do tor_identity="${RANDOM}" - PostUrlHost='https://pixeldrain.com/api/file' + PostUrlHost='https://pixeldrain.com/api/file/' arrFiles=("$filepath") trap "rm -f ${UploadTicket}; echo ""; tput cnorm; exit" 0 1 2 3 6 15 GetRandomPdKey apikey="$RandomPdKey" - apikey_enc=$(echo -n ":$apikey" | base64) - response=$(tor_curl_upload --insecure -i \ - -H "Authorization: Basic $apikey_enc" \ - "${PostUrlHost}" \ - -F "file=@$filepath") + response=$(tor_curl_upload --insecure -X PUT \ + -u :"$apikey" "${PostUrlHost}" -T "${filepath}") if [ "${DebugAllEnabled}" == "true" ] ; then debugHtml "${filepath##*/}" "${_hostCode}_upload" "post_url: ${PostUrlHost}"$'\n'"apikey: ${apikey}"$'\n'"${response}" fi if grep -Eqi '"success":false,"value":"ip_banned"' <<< "$response"; then echo -e "${YELLOW}Banned IP${NC} (retry)..." continue - else - break + elif grep -Eqi '"success":false' <<< "$response"; then + echo -e "${YELLOW}Unexpected response${NC} (retry)..." + continue fi if grep -Eqi '"success":false,"value":"authentication_failed"' <<< "$response"; then printf "\\n" @@ -130,6 +128,7 @@ pd_PostFile() { exitUploadError=true return 1 fi + break done if grep -Eqi '"success":true,"id":"' <<< "${response}" ; then hash=$(grep -oPi '(?<="success":true,"id":").*?(?=".*$)' <<< "$response") diff --git a/hosts/up_skrepr.sh b/hosts/up_skrepr.sh index c03b54e..086f784 100644 --- a/hosts/up_skrepr.sh +++ b/hosts/up_skrepr.sh @@ -24,7 +24,7 @@ HostFuncPrefix='skpr' ListUploadHosts=${ListUploadHosts}'/'${HostCode}'/'${HostNick}'/'${HostFuncPrefix}'@' #! #! -#! Configurables +#! Jirafeau Host #! ------------- #! #! ------------ (1) Host Main Upload Function --------------- # @@ -33,95 +33,10 @@ ListUploadHosts=${ListUploadHosts}'/'${HostCode}'/'${HostNick}'/'${HostFuncPrefi #! Must be named specifically as such: #! _UploadFile() skpr_UploadFile() { - local _hostCode=${1} - local filepath=${2} - local filecnt=${3} - local pline=${4} - local filename="${filepath##*/}" - warnAndRetryUnknownError=false - exitUploadError=false - exitUploadNotAvailable=false - fileAlreadyDone=false - tor_identity="${RANDOM}" - UploadTicket="${WorkDir}/.flocks/upload_${_hostCode}_${filepath//[^a-zA-Z0-9]/}" - finalAttempt="false" - for ((z=0; z<=$MaxUploadRetries; z++)); do - if [ $z -eq $MaxUploadRetries ] ; then - finalAttempt="true" - fi - trap "rm -f "${UploadTicket}"; echo ""; tput cnorm; exit" 0 1 2 3 6 15 - if skpr_PostFile "${filepath}" "${_hostCode}" "${filename}" "${filecnt}" $((z+1)) $finalAttempt $pline ; then - return 0 - elif [ $z -lt $MaxUploadRetries ]; then - if [ "${fileAlreadyDone}" == "true" ] ; then - break - fi - if [[ "${warnAndRetryUnknownError}" == "true" ]] ; then - if [ "${DebugAllEnabled}" == "true" ] ; then - debugHtml "${filepath##*/}" "error" "Retry due to an unknown issue: attempt #$((z+1)) of ${MaxUploadRetries}" - fi - fi - if [[ "${exitUploadError}" == "true" || "${exitUploadNotAvailable}" == "true" ]] ; then - if [ "${DebugAllEnabled}" == "true" ] ; then - debugHtml "${filepath##*/}" "error" "Exit due to unrecoverable issue" - fi - rm -f "${UploadTicket}" - break - fi - echo -e "\n${YELLOW}A recoverable error occurred, retry attempt $((z+1))/${MaxUploadRetries}${NC}" - sleep 3 - fi - done - rm -f "${UploadTicket}" + jira_MaxUploadSizeInBytes=104857600 + jira_PostUrlHost='https://transfer.skrepr.com/script.php' + jira_filetype=1 + jira_timeval="week" + jira_downloadLinkPrefix='https://transfer.skrepr.com/f.php?h=' + jira_UploadFile ${1} ${2} ${3} ${4} } -#! -#! ----------- (2) Post File / Upload File Function --------------- # -#! -skpr_PostFile() { - local filepath=$1 - local _hostCode=$2 - local filename=$3 - local fileCnt=$4 - local retryCnt=$5 - local finalAttempt=$6 - local pline=${7} - UploadTicket="${WorkDir}/.flocks/upload_${_hostCode}_${filepath//[^a-zA-Z0-9]/}" - echo -e "[${YELLOW}${_hostCode}${NC}] Uploading ${GREEN}${filename}${NC}" - tor_identity="${RANDOM}" - PostUrlHost='https://transfer.skrepr.com/script.php' - arrFiles=("$filepath") - trap "rm -f ${UploadTicket}; echo ""; tput cnorm; exit" 0 1 2 3 6 15 - response=$(tor_curl_upload --insecure -i \ - -H "Content-Type: multipart/form-data" \ - -F "time=week" \ - -F "file=@${filepath}" \ - "${PostUrlHost}") - if [ "${DebugAllEnabled}" == "true" ] ; then - debugHtml "${filepath##*/}" "${_hostCode}_upload" "post_url: ${PostUrlHost}"$'\n'"${response}" - fi - if grep -Eqi ' 200 ' <<< "${response}" ; then - hash=$(echo "$response" | tail -2 | head -1) - hash=${hash//[$'\t\r\n']} - filesize=$(GetFileSize "$filepath" "false") - downloadLink="https://transfer.skrepr.com/f.php?h=${hash}&p=1" - echo -e "${GREEN}| Upload Success${NC}" - echo -e "| Size: ${BLUE}${filesize}${NC} bytes${NC}" - echo -e "| Link: ${YELLOW}${downloadLink}${NC}" - successUpload "$pline" "${filepath}" "${_hostCode}" "${filesize}" "${downloadLink}" "{$response}" - return 0 - else - err=$(grep -oPi '(?<=HTTP/).*?(?=$)' <<< "$response") - if [ "${finalAttempt}" == "true" ] ; then - printf "\\n" - echo -e "${RED}| Upload failed. Status: ${err}${NC}" - failedRetryUpload "$pline" "${filepath}" "${_hostCode}" "Failed to upload file" "Status: $err" - exitUploadError=true - return 1 - else - return 1 - fi - fi -} -#! -#! --------------- Host Extra Functions ------------------- # -#! diff --git a/hosts/up_soyjak.sh b/hosts/up_soyjak.sh index f25e468..176dc7c 100644 --- a/hosts/up_soyjak.sh +++ b/hosts/up_soyjak.sh @@ -24,7 +24,7 @@ HostFuncPrefix='soy' ListUploadHosts=${ListUploadHosts}'/'${HostCode}'/'${HostNick}'/'${HostFuncPrefix}'@' #! #! -#! Configurables +#! Jirafeau Host #! ------------- #! #! ------------ (1) Host Main Upload Function --------------- # @@ -33,103 +33,10 @@ ListUploadHosts=${ListUploadHosts}'/'${HostCode}'/'${HostNick}'/'${HostFuncPrefi #! Must be named specifically as such: #! _UploadFile() soy_UploadFile() { - local _hostCode=${1} - local filepath=${2} - local filecnt=${3} - local pline=${4} - local filename="${filepath##*/}" - warnAndRetryUnknownError=false - exitUploadError=false - exitUploadNotAvailable=false - fileAlreadyDone=false - tor_identity="${RANDOM}" - UploadTicket="${WorkDir}/.flocks/upload_${_hostCode}_${filepath//[^a-zA-Z0-9]/}" - MaxUploadSizeInBytes=524288000 - fsize=$(GetFileSize "$filepath" "false") - if ((fsize > MaxUploadSizeInBytes)); then - rm -f "${UploadTicket}" - echo -e "${YELLOW}| SKIP${NC}: The size of $filename is to large for $_hostCode. ($fsize > $MaxUploadSizeInBytes)" - failedUpload "$pline" "${filepath}" "${_hostCode}" "Skipping upload. The size of $filename is to large for $_hostCode. ($fsize > $MaxUploadSizeInBytes)" - return 1 - fi - finalAttempt="false" - for ((z=0; z<=$MaxUploadRetries; z++)); do - if [ $z -eq $MaxUploadRetries ] ; then - finalAttempt="true" - fi - trap "rm -f "${UploadTicket}"; echo ""; tput cnorm; exit" 0 1 2 3 6 15 - if soy_PostFile "${filepath}" "${_hostCode}" "${filename}" "${filecnt}" $((z+1)) $finalAttempt $pline ; then - return 0 - elif [ $z -lt $MaxUploadRetries ]; then - if [ "${fileAlreadyDone}" == "true" ] ; then - break - fi - if [[ "${warnAndRetryUnknownError}" == "true" ]] ; then - if [ "${DebugAllEnabled}" == "true" ] ; then - debugHtml "${filepath##*/}" "error" "Retry due to an unknown issue: attempt #$((z+1)) of ${MaxUploadRetries}" - fi - fi - if [[ "${exitUploadError}" == "true" || "${exitUploadNotAvailable}" == "true" ]] ; then - if [ "${DebugAllEnabled}" == "true" ] ; then - debugHtml "${filepath##*/}" "error" "Exit due to unrecoverable issue" - fi - rm -f "${UploadTicket}" - break - fi - echo -e "\n${YELLOW}A recoverable error occurred, retry attempt $((z+1))/${MaxUploadRetries}${NC}" - sleep 3 - fi - done - rm -f "${UploadTicket}" + jira_MaxUploadSizeInBytes=524288000 + jira_PostUrlHost='https://soyjak.download/script.php' + jira_filetype=1 + jira_timeval="month" + jira_downloadLinkPrefix='https://soyjak.download/f.php?h=' + jira_UploadFile ${1} ${2} ${3} ${4} } -#! -#! ----------- (2) Post File / Upload File Function --------------- # -#! -soy_PostFile() { - local filepath=$1 - local _hostCode=$2 - local filename=$3 - local fileCnt=$4 - local retryCnt=$5 - local finalAttempt=$6 - local pline=${7} - UploadTicket="${WorkDir}/.flocks/upload_${_hostCode}_${filepath//[^a-zA-Z0-9]/}" - echo -e "[${YELLOW}${_hostCode}${NC}] Uploading ${GREEN}${filename}${NC}" - tor_identity="${RANDOM}" - PostUrlHost='https://soyjak.download/script.php' - arrFiles=("$filepath") - trap "rm -f ${UploadTicket}; echo ""; tput cnorm; exit" 0 1 2 3 6 15 - response=$(tor_curl_upload --insecure -i \ - -H "Content-Type: multipart/form-data" \ - -F "time=month" \ - -F "file=@${filepath}" \ - "${PostUrlHost}") - if [ "${DebugAllEnabled}" == "true" ] ; then - debugHtml "${filepath##*/}" "${_hostCode}_upload" "post_url: ${PostUrlHost}"$'\n'"${response}" - fi - if grep -Eqi ' 200 ' <<< "${response}" ; then - hash=$(echo "$response" | tail -2 | head -1) - hash=${hash//[$'\t\r\n']} - filesize=$(GetFileSize "$filepath" "false") - downloadLink="https://soyjak.download/f.php?h=${hash}&p=1" - echo -e "${GREEN}| Upload Success${NC}" - echo -e "| Size: ${BLUE}${filesize}${NC} bytes${NC}" - echo -e "| Link: ${YELLOW}${downloadLink}${NC}" - successUpload "$pline" "${filepath}" "${_hostCode}" "${filesize}" "${downloadLink}" "{$response}" - return 0 - else - err=$(grep -oPi '(?<=HTTP/).*?(?=$)' <<< "$response") - if [ "${finalAttempt}" == "true" ] ; then - printf "\\n" - echo -e "${RED}| Upload failed. Status: ${err}${NC}" - failedRetryUpload "$pline" "${filepath}" "${_hostCode}" "Failed to upload file" "Status: $err" - exitUploadError=true - return 1 - else - return 1 - fi - fi -} -#! -#! --------------- Host Extra Functions ------------------- # -#! diff --git a/hosts/up_syspro.sh b/hosts/up_syspro.sh new file mode 100644 index 0000000..48ed7dc --- /dev/null +++ b/hosts/up_syspro.sh @@ -0,0 +1,137 @@ +#! Name: up_syspro.sh +#! Author: kittykat +#! Version: 2024.10.25 +#! Desc: Add support for uploading files to share.syspro.com.br +#! Info: https://share.syspro.com.br/share// +#! MaxSize: 2GB +#! Expire: ?? +#! Usage: Copy this file into the ./${ScriptDir}/hosts/ folder +#! +#! +#! ------------ REQUIRED SECTION --------------- +#! @[UPDATE] ListUploadHosts: This string is loaded into mad.sh and allows dynamic handling of new url data +#! Format: '/HostCode/HostNick/HostFuncPrefix@' +#! HostCode: (ie. 'fh' for filehaus -- cannot be used by other hosts) +#! HostNick: What is displayed throughout MAD output +#! HostFuncPrefix: ie. 'fh' -- fh_UploadFile() +#! * Note: Must begin with a letter a-z (functions beginning with numbers are no bueno) +HostCode='sysp' +HostNick='syspro' +HostFuncPrefix='sysp' +#! +#! !! DO NOT UPDATE OR REMOVE !! +#! This merges the Required HostAndDomainRegexes into mad.sh +ListUploadHosts=${ListUploadHosts}'/'${HostCode}'/'${HostNick}'/'${HostFuncPrefix}'@' +#! +#! +#! Configurables +#! ------------- +#! +#! ------------ (1) Host Main Upload Function --------------- # +#! +#! @REQUIRED: Host Main Upload function +#! Must be named specifically as such: +#! _UploadFile() +sysp_UploadFile() { + local _hostCode=${1} + local filepath=${2} + local filecnt=${3} + local pline=${4} + local filename="${filepath##*/}" + warnAndRetryUnknownError=false + exitUploadError=false + exitUploadNotAvailable=false + fileAlreadyDone=false + tor_identity="${RANDOM}" + UploadTicket="${WorkDir}/.flocks/upload_${_hostCode}_${filepath//[^a-zA-Z0-9]/}" + MaxUploadSizeInBytes=2147483648 + fsize=$(GetFileSize "$filepath" "false") + if ((fsize > MaxUploadSizeInBytes)); then + rm -f "${UploadTicket}" + echo -e "${YELLOW}| SKIP${NC}: The size of $filename is to large for $_hostCode. ($fsize > $MaxUploadSizeInBytes)" + failedUpload "$pline" "${filepath}" "${_hostCode}" "Skipping upload. The size of $filename is to large for $_hostCode. ($fsize > $MaxUploadSizeInBytes)" + return 1 + fi + finalAttempt="false" + for ((z=0; z<=$MaxUploadRetries; z++)); do + if [ $z -eq $MaxUploadRetries ] ; then + finalAttempt="true" + fi + trap "rm -f "${UploadTicket}"; echo ""; tput cnorm; exit" 0 1 2 3 6 15 + if sysp_PostFile "${filepath}" "${_hostCode}" "${filename}" "${filecnt}" $((z+1)) $finalAttempt $pline ; then + return 0 + elif [ $z -lt $MaxUploadRetries ]; then + if [ "${fileAlreadyDone}" == "true" ] ; then + break + fi + if [[ "${warnAndRetryUnknownError}" == "true" ]] ; then + if [ "${DebugAllEnabled}" == "true" ] ; then + debugHtml "${filepath##*/}" "error" "Retry due to an unknown issue: attempt #$((z+1)) of ${MaxUploadRetries}" + fi + fi + if [[ "${exitUploadError}" == "true" || "${exitUploadNotAvailable}" == "true" ]] ; then + if [ "${DebugAllEnabled}" == "true" ] ; then + debugHtml "${filepath##*/}" "error" "Exit due to unrecoverable issue" + fi + rm -f "${UploadTicket}" + break + fi + echo -e "\n${YELLOW}A recoverable error occurred, retry attempt $((z+1))/${MaxUploadRetries}${NC}" + sleep 3 + fi + done + rm -f "${UploadTicket}" +} +#! +#! ----------- (2) Post File / Upload File Function --------------- # +#! +sysp_PostFile() { + local filepath=$1 + local _hostCode=$2 + local filename=$3 + local fileCnt=$4 + local retryCnt=$5 + local finalAttempt=$6 + local pline=${7} + UploadTicket="${WorkDir}/.flocks/upload_${_hostCode}_${filepath//[^a-zA-Z0-9]/}" + echo -e "[${YELLOW}${_hostCode}${NC}] Uploading ${GREEN}${filename}${NC}" + tor_identity="${RANDOM}" + PostUrlHost='https://share.syspro.com.br/cgi-bin/upload.cgi?upload_id=' + arrFiles=("$filepath") + trap "rm -f ${UploadTicket}; echo ""; tput cnorm; exit" 0 1 2 3 6 15 + response=$(tor_curl_upload --insecure -i \ + -H "Content-Type: multipart/form-data" \ + -F "tos=" \ + -F "link_rcpt=" \ + -F "link_pass=" \ + -F "file_1=@${filepath}" \ + "${PostUrlHost}") + if [ "${DebugAllEnabled}" == "true" ] ; then + debugHtml "${filepath##*/}" "${_hostCode}_upload" "post_url: ${PostUrlHost}"$'\n'"${response}" + fi + if grep -Eqi "Location: http://share\.syspro\.com\.br/" <<< "${response}" ; then + fname=$(grep -oPi -m 1 '(?<=http://share.syspro.com.br//\?&filename=).*?(?=&del_id.*$)' <<< "$response") + fnameorig=$(grep -oPi -m 1 '(?<=&filename_original=).*?(?=&status.*$)' <<< "$response") + filesize=$(GetFileSize "$filepath" "false") + downloadLink="http://share.syspro.com.br/$fname/$fnameorig" + echo -e "${GREEN}| Upload Success${NC}" + echo -e "| Size: ${BLUE}${filesize}${NC} bytes${NC}" + echo -e "| Link: ${YELLOW}${downloadLink}${NC}" + successUpload "$pline" "${filepath}" "${_hostCode}" "${filesize}" "${downloadLink}" "{$response}" + return 0 + else + err=$(grep -oPi '(?<=HTTP/).*?(?=$)' <<< "$response") + if [ "${finalAttempt}" == "true" ] ; then + printf "\\n" + echo -e "${RED}| Upload failed. Status: ${err}${NC}" + failedRetryUpload "$pline" "${filepath}" "${_hostCode}" "Failed to upload file" "Status: $err" + exitUploadError=true + return 1 + else + return 1 + fi + fi +} +#! +#! --------------- Host Extra Functions ------------------- # +#! diff --git a/hosts/up_torup.sh b/hosts/up_torup.sh index f3136b6..6f9a204 100644 --- a/hosts/up_torup.sh +++ b/hosts/up_torup.sh @@ -1,6 +1,6 @@ #! Name: up_torup.sh #! Author: kittykat -#! Version: 2024.11.09 +#! Version: 2024.12.26 #! Desc: Add support for uploading files to ktgzpea2b76u7fgemiibp4a76onyybo4fw5gbsagtm6jrjzmgivppyyd.onion #! Info: Files are accessible at http://ktgzpea2b76u7fgemiibp4a76onyybo4fw5gbsagtm6jrjzmgivppyyd.onion/download/ #! MaxSize: 150MB @@ -118,7 +118,7 @@ torp_PostFile() { echo -e "${RED}| Failed to start an upload [1]${NC}" warnAndRetryUnknownError=true if [ "${finalAttempt}" == "true" ] ; then - failedRetryDownload "${remote_url}" "Failed to start an upload [1]" "" + failedRetryUpload "$pline" "${filepath}" "${_hostCode}" "Failed to start an upload [1]" "" fi return 1 else @@ -134,7 +134,7 @@ torp_PostFile() { echo -e "${RED}| Failed to start an upload [2]${NC}" warnAndRetryUnknownError=true if [ "${finalAttempt}" == "true" ] ; then - failedRetryDownload "${remote_url}" "Failed to start an upload [2]" "" + failedRetryUpload "$pline" "${filepath}" "${_hostCode}" "Failed to start an upload [2]" "" fi return 1 else diff --git a/hosts/up_uploadbay.sh b/hosts/up_uploadbay.sh new file mode 100644 index 0000000..c5fec88 --- /dev/null +++ b/hosts/up_uploadbay.sh @@ -0,0 +1,134 @@ +#! Name: up_uploadbay.sh +#! Author: kittykat +#! Version: 2024.12.23 +#! Desc: Add support for uploading files to qu.ax +#! Info: Files are accessible at https://uploadbay.net/uploads/ +#! MaxSize: 100MB +#! Expire: ?? +#! Usage: Copy this file into the ./${ScriptDir}/hosts/ folder +#! +#! +#! ------------ REQUIRED SECTION --------------- +#! @[UPDATE] ListUploadHosts: This string is loaded into mad.sh and allows dynamic handling of new url data +#! Format: '/HostCode/HostNick/HostFuncPrefix@' +#! HostCode: (ie. 'fh' for filehaus -- cannot be used by other hosts) +#! HostNick: What is displayed throughout MAD output +#! HostFuncPrefix: ie. 'fh' -- fh_UploadFile() +#! * Note: Must begin with a letter a-z (functions beginning with numbers are no bueno) +HostCode='ubay' +HostNick='uploadbay' +HostFuncPrefix='ubay' +#! +#! !! DO NOT UPDATE OR REMOVE !! +#! This merges the Required HostAndDomainRegexes into mad.sh +ListUploadHosts=${ListUploadHosts}'/'${HostCode}'/'${HostNick}'/'${HostFuncPrefix}'@' +#! +#! +#! Configurables +#! ------------- +#! +#! ------------ (1) Host Main Upload Function --------------- # +#! +#! @REQUIRED: Host Main Upload function +#! Must be named specifically as such: +#! _UploadFile() +ubay_UploadFile() { + local _hostCode=${1} + local filepath=${2} + local filecnt=${3} + local pline=${4} + local filename="${filepath##*/}" + warnAndRetryUnknownError=false + exitUploadError=false + exitUploadNotAvailable=false + fileAlreadyDone=false + tor_identity="${RANDOM}" + UploadTicket="${WorkDir}/.flocks/upload_${_hostCode}_${filepath//[^a-zA-Z0-9]/}" + MaxUploadSizeInBytes=104857600 + fsize=$(GetFileSize "$filepath" "false") + if ((fsize > MaxUploadSizeInBytes)); then + rm -f "${UploadTicket}" + echo -e "${YELLOW}| SKIP${NC}: The size of $filename is to large for $_hostCode. ($fsize > $MaxUploadSizeInBytes)" + failedUpload "$pline" "${filepath}" "${_hostCode}" "Skipping upload. The size of $filename is to large for $_hostCode. ($fsize > $MaxUploadSizeInBytes)" + return 1 + fi + finalAttempt="false" + for ((z=0; z<=$MaxUploadRetries; z++)); do + if [ $z -eq $MaxUploadRetries ] ; then + finalAttempt="true" + fi + trap "rm -f "${UploadTicket}"; echo ""; tput cnorm; exit" 0 1 2 3 6 15 + if ubay_PostFile "${filepath}" "${_hostCode}" "${filename}" "${filecnt}" $((z+1)) $finalAttempt $pline ; then + return 0 + elif [ $z -lt $MaxUploadRetries ]; then + if [ "${fileAlreadyDone}" == "true" ] ; then + break + fi + if [[ "${warnAndRetryUnknownError}" == "true" ]] ; then + if [ "${DebugAllEnabled}" == "true" ] ; then + debugHtml "${filepath##*/}" "error" "Retry due to an unknown issue: attempt #$((z+1)) of ${MaxUploadRetries}" + fi + fi + if [[ "${exitUploadError}" == "true" || "${exitUploadNotAvailable}" == "true" ]] ; then + if [ "${DebugAllEnabled}" == "true" ] ; then + debugHtml "${filepath##*/}" "error" "Exit due to unrecoverable issue" + fi + rm -f "${UploadTicket}" + break + fi + echo -e "\n${YELLOW}A recoverable error occurred, retry attempt $((z+1))/${MaxUploadRetries}${NC}" + sleep 3 + fi + done + rm -f "${UploadTicket}" +} +#! +#! ----------- (2) Post File / Upload File Function --------------- # +#! +ubay_PostFile() { + local filepath=$1 + local _hostCode=$2 + local filename=$3 + local fileCnt=$4 + local retryCnt=$5 + local finalAttempt=$6 + local pline=${7} + UploadTicket="${WorkDir}/.flocks/upload_${_hostCode}_${filepath//[^a-zA-Z0-9]/}" + echo -e "[${YELLOW}${_hostCode}${NC}] Uploading ${GREEN}${filename}${NC}" + tor_identity="${RANDOM}" + PostUrlHost='https://uploadbay.net/upload.php' + arrFiles=("$filepath") + trap "rm -f ${UploadTicket}; echo ""; tput cnorm; exit" 0 1 2 3 6 15 + response=$(tor_curl_upload --insecure -i \ + -H "Content-Type: multipart/form-data" \ + -H "expiry=-1" \ + -F "fileToUpload=@${filepath}" \ + "${PostUrlHost}") + if [ "${DebugAllEnabled}" == "true" ] ; then + debugHtml "${filepath##*/}" "${_hostCode}_upload" "post_url: ${PostUrlHost}"$'\n'"${response}" + fi + if grep -Eqi "class='file one' href='https://uploadbay.net/uploads/" <<< "${response}" ; then + url=$(grep -oPi '(?<=href='"'"').*?(?='"'"'.*$)' <<< "$response") + filesize=$(GetFileSize "$filepath" "false") + downloadLink="${url//\\/}" + echo -e "${GREEN}| Upload Success${NC}" + echo -e "| Size: ${BLUE}${filesize}${NC} bytes${NC}" + echo -e "| Link: ${YELLOW}${downloadLink}${NC}" + successUpload "$pline" "${filepath}" "${_hostCode}" "${filesize}" "${downloadLink}" "{$response}" + return 0 + else + err=$(grep -oPi '(?<=HTTP/).*?(?=$)' <<< "$response") + if [ "${finalAttempt}" == "true" ] ; then + printf "\\n" + echo -e "${RED}| Upload failed. Status: ${err}${NC}" + failedRetryUpload "$pline" "${filepath}" "${_hostCode}" "Failed to upload file" "Status: $err" + exitUploadError=true + return 1 + else + return 1 + fi + fi +} +#! +#! --------------- Host Extra Functions ------------------- # +#! diff --git a/hosts/up_uploadhive.sh b/hosts/up_uploadhive.sh index e95b62d..f6a4afe 100644 --- a/hosts/up_uploadhive.sh +++ b/hosts/up_uploadhive.sh @@ -1,6 +1,6 @@ #! Name: up_uploadhive.sh #! Author: kittykat -#! Version: 2024.11.01 +#! Version: 2024.12.25 #! Desc: Add support for uploading files to uploadhive.com #! Info: Files are accessible at https://uploadhive.com/ #! MaxSize: 5GB @@ -97,6 +97,32 @@ uhive_PostFile() { echo -e "[${YELLOW}${_hostCode}${NC}] Uploading ${GREEN}${filename}${NC}" tor_identity="${RANDOM}" PostUrlHost='https://fs430.uploadhive.com/cgi-bin/upload.cgi' + if [[ "$UploadHiveRandomizeExt" == "true" ]]; then + randomext=$(mktemp -u XXX) + randomext=${randomext,,} + origext=${filepath##*.} + if [[ "$origext" == "7z" ]]; then + tmpfilepath="${filepath%.*}--7_.${randomext}" + elif [[ "$origext" == "zip" ]]; then + tmpfilepath="${filepath%.*}--z_.${randomext}" + elif grep -Eqi '\.part.*\.rar' <<< "${filepath##*/}" ; then + partnum="${filepath##*.part}" + partnum="${partnum%.rar*}" + echo -e "$partnum" + tmpfilepath="${filepath%.part*}_-p${partnum}--r_.${randomext}" + elif [[ "$origext" == "rar" ]]; then + tmpfilepath="${filepath%.*}--r_.${randomext}" + elif [[ "$origext" == "tar" ]]; then + tmpfilepath="${filepath%.*}--t_.${randomext}" + elif [[ "${filepath##*/}" == *".7z."* ]]; then + tmpfilepath="${filepath%.*}_-7--${origext}_.${randomext}" + else + tmpfilepath="${filepath%.*}--${origext}_.${randomext}" + fi + mv $filepath $tmpfilepath + else + tmpfilepath=$filepath + fi arrFiles=("$filepath") trap "rm -f ${UploadTicket}; echo ""; tput cnorm; exit" 0 1 2 3 6 15 response=$(tor_curl_upload --insecure -i \ @@ -109,11 +135,14 @@ uhive_PostFile() { -F "file_descr=" \ -F "file_public=1" \ -F "upload=Start upload" \ - -F "file_0=@$filepath" \ + -F "file_0=@$tmpfilepath" \ "${PostUrlHost}") if [ "${DebugAllEnabled}" == "true" ] ; then debugHtml "${filepath##*/}" "${_hostCode}_upload" "post_url: ${PostUrlHost}"$'\n'"${response}" fi + if [[ "$UploadHiveRandomizeExt" == "true" ]]; then + mv $tmpfilepath $filepath + fi if grep -Eqi '"file_status":"OK"' <<< "${response}" ; then hash=$(grep -oPi '(?<=file_code":").*?(?=".*$)' <<< "$response") filesize=$(GetFileSize "$filepath" "false") diff --git a/hosts/uploadbay.sh b/hosts/uploadbay.sh new file mode 100644 index 0000000..de1da57 --- /dev/null +++ b/hosts/uploadbay.sh @@ -0,0 +1,31 @@ +#! Name: uploadbay.sh +#! Author: kittykat +#! Version: 2024.12.23 +#! Desc: Add support for downloading and processing of urls for a new host +#! Usage: Copy this file into the ./${ScriptDir}/hosts/ folder +#! +#! +#! ------------ REQUIRED SECTION --------------- +#! @[UPDATE] HostAndDomainRegexes: This string is loaded into mad.sh and allows dynamic handling of new url data +#! Format: '/HostCode/HostNick/HostFuncPrefix:HostDomainRegex@' +#! HostCode: (ie. 'fh' for filehaus -- cannot be used by other hosts) +#! HostNick: What is displayed throughout MAD output (ie. 'filehaus' -- "urls.txt has 10 filehaus.." will be displayed) +#! HostFuncPrefix: (ie. 'fh' -- fh_DownloadFile(), fh_FetchFileInfo() .. ) +#! * Note: Must begin with a letter a-z (functions beginning with numbers are no bueno) +#! HostDomainRegex: The regex used to verify matching urls +HostCode='ubay' +HostNick='uploadbay' +HostFuncPrefix='direct' +HostUrls='uploadbay.net' +HostDomainRegex='^(http|https)://(.*\.)?uploadbay\.net/uploads/' +#! +#! !! DO NOT UPDATE OR REMOVE !! +#! This merges the Required HostAndDomainRegexes into mad.sh +ListHostAndDomainRegexes=${ListHostAndDomainRegexes}'/'${HostCode}'/'${HostNick}'/'${HostFuncPrefix}'/'${HostUrls}':'${HostDomainRegex}'@' +#! +#! +#! ------------ (1) Host Main Download Function --------------- # +#! +#! This is a direct= download host, so all the functions are already in mad.sh +#! Since the HostFuncPrefix is defined above as "direct", nothing further needs to be done as it will +#! call the direct_DownloadFile() function already in mad.sh diff --git a/hosts/uploadhive.sh b/hosts/uploadhive.sh index dc2fabd..f4b1dd9 100644 --- a/hosts/uploadhive.sh +++ b/hosts/uploadhive.sh @@ -1,6 +1,6 @@ #! Name: uploadhive.sh #! Author: kittykat -#! Version: 2024.09.13 +#! Version: 2024.12.25 #! Desc: Add support for downloading and processing of urls for a new host #! Usage: Copy this file into the ./${ScriptDir}/hosts/ folder #! @@ -363,6 +363,63 @@ uhive_GetFile() { fi done rm -f "$flockDownload"; + if grep -Eqi '^.*--7_\....$' <<< "$filename" ; then + echo -e "" + echo -e "${BLUE}| Found mad upload random extension file (renaming 7z)...${NC}" + origext="7z" + mv "$file_path" "${file_path%\--7_*}.$origext" + filename="${filename%\--7_*}.$origext" + file_path="${file_path%\--7_*}.$origext" + elif grep -Eqi '^.*--z_\....$' <<< "$filename" ; then + echo -e "" + echo -e "${BLUE}| Found mad upload random extension file (renaming zip)...${NC}" + origext="zip" + mv "$file_path" "${file_path%\--z_*}.$origext" + filename="${filename%\--z_*}.$origext" + file_path="${file_path%\--z_*}.$origext" + elif grep -Eqi '^.*_-p.*--r_\....$' <<< "$filename" ; then + echo -e "" + echo -e "${BLUE}| Found mad upload random extension file (renaming mp rar)...${NC}" + origext="rar" + partnum="${filename##*_-p}" + partnum="${partnum%--r_*}" + newfilepath="${file_path%_-p*}.part${partnum}.$origext" + mv "$file_path" "$newfilepath" + filename="${newfilepath##*/}" + file_path="${newfilepath}" + elif grep -Eqi '^.*--r_\....$' <<< "$filename" ; then + echo -e "" + echo -e "${BLUE}| Found mad upload random extension file (renaming rar)...${NC}" + origext="rar" + mv "$file_path" "${file_path%--r_*}.$origext" + filename="${filename%--r_*}.$origext" + file_path="${file_path%--r_*}.$origext" + elif grep -Eqi '^.*--t_\....$' <<< "$filename" ; then + echo -e "" + echo -e "${BLUE}| Found mad upload random extension file (renaming tar)...${NC}" + origext="tar" + mv "$file_path" "${file_path%--t_*}.$origext" + filename="${filename%--t_*}.$origext" + file_path="${file_path%--t_*}.$origext" + elif grep -Eqi '^.*_-7--..._\....$' <<< "$filename" ; then + echo -e "" + echo -e "${BLUE}| Found mad upload random extension file (renaming)...${NC}" + origext=${filename##*--} + origext=${origext%_*} + newfilepath="${file_path%--*}.$origext" + newfilepath="${newfilepath//_-7/.7z}" + mv "$file_path" "$newfilepath" + filename="${newfilepath##*/}" + file_path="${newfilepath}" + elif grep -Eqi '^.*--..._\....$' <<< "$filename" ; then + echo -e "" + echo -e "${BLUE}| Found mad upload random extension file (renaming)...${NC}" + origext=${filename##*--} + origext=${origext%_*} + mv "$file_path" "${file_path%--*}.$origext" + filename="${filename%--*}.$origext" + file_path="${file_path%--*}.$origext" + fi ProcessCompletedDownload "$remote_url" "$MoveToFolder" "$filecnt" "$filename" "$file_size_bytes" "$completed_location" "$file_path" return 0 } diff --git a/mad.sh b/mad.sh index 6bd7f9f..e77396d 100644 --- a/mad.sh +++ b/mad.sh @@ -31,73 +31,54 @@ # * klonkerz - feedback and suggestions, url only processing # * Everyone who provided feedback and helped test.. and those who wish to remain anonymous -ScriptVersion=2024.11.28 +ScriptVersion=2024.12.26 #================================================= # Recent Additions +# 2024.12.26 - [up_kouploader / up_axfc / up_torup] Fixed failedRetryUpload (was using download logging) +# * Thanks Belky +# 2024.12.26 - [anonfile / up_anonfile] Add anonfile.de as upload / download host +# 2024.12.25 - [dashfile / up_dashfile] Add dashfile.net as upload / download host +# 2024.12.25 - [isupload] Change to use tor_curl_request_extended (server response is often slow) +# - Accept 200 OK response to continue (do not require filename / filesize) +# 2024.12.25 - [fileblade] Add response handling for free file download dissallowed > 100MB. +# "The file owner does not allow FREE users to download files which are over 100 MB" +# 2024.12.25 - [mad] Add "UploadHiveRandomizeExt" option to config with default=true +# 2024.12.25 - [uploadhive / up_uploadhive] Update renaming random ext files to their original names +# - Handle multipart 7z & rar (abc.7z.###, abc.part#.rar) +# 2024.12.25 - [syspro / up_syspro] Add share.syspro.com.br as upload / download host +# 2024.12.24 - [mad] Add EnabledUploadHosts / EnabledDownloadHosts setting to fine-tune which hosts to use +# ** Options: recommended, online, all (Default=recommended) +# - recommended: loads all hosts verified working with MAD +# - online: loads all hosts available online / working (includes captcha / js restricted) +# - all: loads all hosts in hosts folder +# 2024.12.24 - [up_ateasystems / ateasystems] Add share.ateasystems.com as upload / download host +# 2024.12.23 - [up_uploadbay / uploadbay] Add uploadbay.net as upload / download host +# 2024.12.23 - [up_herbolistique / herbolistique] Add transfert.herbolistique.com as upload / download host +# 2024.12.23 - [uploadhive] Auto-rename random extension downloads +# 2024.12.23 - [up_uploadhive] Change upload file extension to random 3 letters (uhive blocks .7z, .zip, .rar now) +# 2024.12.23 - [up_offshorecat] Fixed upload. Updated apikey. +# 2024.12.23 - [up_fileditch] Fixed upload. Added response handling for Tor Blocked node (retries) +# 2024.12.23 - [up_freesocial / freesocial] Add files.freesocial.co as upload / download host +# 2024.12.23 - [up_cyssoux / cyssoux] Add partage.cyssoux.fr as upload / download host +# 2024.12.22 - [mad] Add jira_Upload function -- used for all jirafeau hosts +# 2024.12.22 - [up_*AllJiraHosts*] Consolidated / moved all hosts upload functions to mad.sh +# - Minimized jira host code (~6000 lines of duplicates removed) +# - Jira hosts: acid, anarchaserver, depotkaz, dictvm, eddowding, familleflender, filesquid, +# free4e, harrault, linxx, moocloud, nantes, netlib, skrepr, soyjak +# 2024.12.20 - [fileblade / up_fileblade] Add fileblade.com as upload / download host +# 2024.12.20 - [isupload / up_isupload] Add isupload.com as upload / download host +# 2024.12.15 - [mediafire] Add mediafire download link processing +# 2024.12.12 - [dictvm / up_dictvm] Add dictvm.org as upload / download host +# 2024.12.12 - [eddowding / up_eddowding] Add eddowding.com as upload / download host +# 2024.12.12 - [up_pixeldrain] Modify upload to use PUT +# 2024.12.12 - [mad] Update pixeldrain api key +# 2024.12.09 - [ranoz] Fix filenames with spaces +# 2024.11.29 - [innocent] Update to use tor_curl_request_extended for head/get +# 2024.11.29 - [quax] Update 404 Not found response handling # 2024.11.27 - [up_ranoz] Modify download link to not use the upload url ticket link # 2024.11.26 - [filehaus] Handle "404 Not found" on first instance # 2024.11.25 - [up_moocloud / moocloud] Add moocloud.ch as an upload and download host # 2024.11.24 - [uploadhive] Handle "Error creating download link" response -- do not mark Removed -# 2024.11.23 - [filehaus] Use tor_curl_request_extended for head / get for filehaus urls -# 2024.11.23 - [mad] Make tor_curl_request_extended a random timeout between 30-60 seconds -# 2024.11.22 - [up_quax, quax] Add qu.ax as an upload and download host -# 2024.11.21 - [filedot] Fix check for post filename -# 2024.11.20 - [gofile] Handle parsing parent gofile url into multiple download urls -# (still needs updating to handle child urls gofile.io/download/web//file) -# 2024.11.19 - [mad] Add updateUrlDownload function to handle updating a url -# (ie. parent gofile url with children urls) -# 2024.11.18 - [up_fileditch / fileditch] Add fileditch.com as upload and download host -# 2024.11.17 - [innocent] Fix "Fetching file info". Support resume downloads. -# 2024.11.16 - [mad] Fix reload on uploads.txt modified (uploads: filemode) -# 2024.11.16 - [up_*] Fix removal of upload ticket if filesize is not supported -# 2024.11.15 - [familleflender] Add famille-flender.fr as download host -# 2024.11.15 - [up_familleflender] Add famille-flender.fr as upload host -# 2024.11.15 - [up_filehaus] Finish the uploader (the server is back online) -# 2024.11.14 - [up_skrepr, skrepr] Add transfer.skrepr.com as upload and download host -# 2024.11.13 - [up_pixeldrain] Add pixeldrain as an upload host -# 2024.11.13 - [mad] Add pixeldrain apikey section to allow pd uploads -# 2024.11.13 - [up_filesquid] Add "No password nor allowed IP" response handling -# 2024.11.12 - [mad] Fix uploads.txt status marking (urls / messages) containg '&' chars -# 2024.11.12 - [up_torup] Max upload filesize changed to 150MB -# 2024.11.12 - [up_uploadee] Add upload.ee as an upload host -# 2024.11.11 - [up_offcat] Add Offshore.cat as upload host -# 2024.11.11 - [mad] Add OffShore.cat Upload ApiKeys section to allow using Offshore.cat as upload host -# 2024.11.10 - [mad] Fix uploads.txt multi-terminal processing (use /uploads/temp_upload_handler.txt) -# 2024.11.10 - [1fichier] Add new "has been automatically deleted after its free hosting period expired" -# 2024.11.10 - [up_torup] Add TorUp as an upload host -# 2024.11.09 - [torup] Add TorUp as a download host (no resume) -# (ktgzpea2b76u7fgemiibp4a76onyybo4fw5gbsagtm6jrjzmgivppyyd.onion) -# 2024.11.08 - [nippy] Fix nippydrive.com. Update detection of temporarily unavailable response. -# 2024.11.08 - [up2share] Fix download url (https) -# 2024.11.08 - [up2share] Fix advertised filesize on a redirection -# 2024.11.06 - [SkipUrlsInDownloadsCompletedTxt] Fix blank url check -# 2024.11.06 - [ranoz] Add ranoz.gg as download host -# 2024.11.05 - [up_ranoz] Add ranoz.gg as upload host -# 2024.11.02 - [innocent] Disable rate monitor on download -# 2024.11.02 - [mad, innocent] Add 18s timeout on HEAD (get hack) for no response from host -# 2024.11.01 - [up_uploadhive] Add uploadhive as upload host -# 2024.11.01 - [innocent] Switch between 3 alternate head type attempts -# 2024.10.30 - [uploadev] Add additional file removed response handling -# 2024.10.30 - [anonsharing] Add AnonSharing.com as download host (only processes urls with fileid) -# 2024.10.29 - [kraken] Add recaptcha response handling -# 2024.10.29 - [bowfile] Add File has been removed by the site administrator response -# 2024.10.29 - [up_anonsharing] Add AnonSharing.com as upload host (unique url with fileid) -# 2024.10.29 - [uploadev] Add UploadEv.org as download host -# 2024.10.27 - [up_uploadev] Add UploadEv.org as upload host -# 2024.10.25 - [dosya] Add check for too many failed responses. try again later. -# 2024.10.24 - [bedrive / up_bedrive] Add bedrive.ru as download / upload host -# 2024.10.24 - [mad] Add pw: and ref: keyword values to ./data/downloads_completed.txt logging -# 2024.10.24 - [mad] Add extended connection-timeout request (tor_curl_request_extended) -# 2024.10.24 - [dosya] Use extended connection-timeout request for HEAD -# 2024.10.23 - [mad] Fix PostFailedUpload function call -# 2024.10.22 - [innocent / up_innocent] Add innocent.onion as download / upload host -# * Current download does not support resume -# 2024.10.22 - [mad] Few updates to direct download -# 2024.10.21 - [nippy] Update nippy to handle dbree (.me, .org) -# 2024.10.21 - [dbree] Add dbree.org -# 2024.10.17 - [dailyuploads] Add recaptcha detection and abort (js required) -# * Working on possible PJSCloud solution -# 2024.10.16 - [mad] Remove tor_curl_upload duplicate connect-timeout (thanks PeachX) # -- See ./documentation/!Changelog (Historical).txt for further changes -- # @@ -156,6 +137,15 @@ RateMonitorEnabled=true # @MyCurrent="pjscloud.sh,ocr_captcha.sh,SkipUrlsInDownloadsCompletedTxt.sh" LoadPlugins="" +# Enabled Hosts: (Upload / Download) [ "recommended", "online", "all" ] +# -- Last Checked / Updated: 2024.12.23 +# -- Available options -- +# * "recommended" -- Loads hosts that currently work with MAD +# * "online" -- Loads hosts that are available online (includes captcha / js restricted) +# * "all" -- Loads all hosts (blank/unknown is also "all") +# @Default="recommended" (only load hosts that are verified working with MAD) +EnabledUploadHosts="recommended" +EnabledDownloadHosts="recommended" #================================================= # UPLOAD SECTION @@ -167,7 +157,7 @@ MaxUploadRetries=4 # Selected upload hosts # @Default=1f,uhive,oshi -DefaultUploadHosts='1f,uhive,oshi' +DefaultUploadHosts='1f,oshi,gofile' # [RateMonitor - UploadSpeedMin]: Minimum required Upload Speed in bytes (used in coordination with UploadTimeoutInterval) # This helps ensure an upload doesn't go stale and hit a speed of 0 for too long. (! Requires RateMonitorEnabled=true) @@ -178,8 +168,8 @@ UploadSpeedMin=10 # [RateMonitor - UploadTimeoutInterval]: Amount of time in seconds a transfer can remain below the UploadSpeedMin before it will timeout. # This helps ensure an upload doesn't go stale and hit a speed of 0 for too long. (! Requires RateMonitorEnabled=true) # ie. curl: (28) Operation too slow. Less than 5000 bytes/sec transferred the last 60 seconds -# @Default=600 (10 min) -UploadTimeoutInterval=600 +# @Default=300 (5 min) +UploadTimeoutInterval=300 #================================================= @@ -273,6 +263,10 @@ CatnapDuration=1 # HOST SPECIFIC SECTION #------------------- +# [uploadhive]: Randomize extension (bypass 7z, zip, tar block) +# [{"file_code":"undef","file_status":"unallowed extension"}] +UploadHiveRandomizeExt=true + # [Oshi]: Control BaseUrl Override (none, oshiat, oshionion) # none: Will download from whatever url base is passed in # oshiat: Will convert all oshi urls to oshi.at (clearnet, faster) @@ -305,7 +299,7 @@ ar_fdUP[0]="user1|pass1" # - Setup free accounts: https://files.offshore.cat/register (use any username/pass - not verified) # - Get apikey: https://files.offshore.cat/dashboard/account (use login created above) # - The accounts are randomly selected for every download. -ar_oscKey[0]='CJZaU3yCQXZrozRmgXOLHjKqP1bbqbvEbJgOZig53WRgEHFHRTh5kIbEWbhEdyLq' # Shared general +ar_oscKey[0]='4GDsorzK4e1yowrCiZaBnS992uKjiZVnXbByJr0kHmaAxarP26LkRV79MbKACXt0' # Shared general #ar_oscKey[1]='apikey' # Uncomment line to use a 2nd #ar_oscKey[2]='apikey' # Uncomment line to use a 3rd #ar_oscKey[3]='apikey' # Uncomment line to use a 4th @@ -317,7 +311,7 @@ ar_oscKey[0]='CJZaU3yCQXZrozRmgXOLHjKqP1bbqbvEbJgOZig53WRgEHFHRTh5kIbEWbhEdyLq' # - Setup free accounts:https://pixeldrain.com/register (use any username/pass - not verified) # - Get apikey: https://pixeldrain.com/user/api_keys (use login created above) # - The accounts are randomly selected for every download. -ar_pdKey[0]='6a7c5c4e-aeb4-45ab-a11b-96799da02922' # Shared general +ar_pdKey[0]='cad31e7f-676d-4d47-a41b-b32087bee0c2' # Shared general #ar_pdKey[1]='apikey' # Uncomment line to use a 2nd #ar_pdKey[2]='apikey' # Uncomment line to use a 3rd #ar_pdKey[3]='apikey' # Uncomment line to use a 4th @@ -424,6 +418,44 @@ tor_curl_upload() { fi fi } +SetEnabledUploadHosts() { + if [[ "$EnabledUploadHosts" == "recommended" ]] ; then + lstEnabledUploadHosts="up_1fichier,up_anonsharing,up_axfc,up_bowfile,up_depotkaz,up_familleflender," + lstEnabledUploadHosts+="up_fileblade,up_fileditch,up_firestorage,up_free4e,up_gofile,up_harrault," + lstEnabledUploadHosts+="up_isupload,up_kouploader,up_moocloud,up_nantes,up_offshorecat,up_oshi," + lstEnabledUploadHosts+="up_pixeldrain,up_quax,up_ranoz,up_skrepr,up_torup,up_turboonion,up_uploadee," + lstEnabledUploadHosts+="up_uploadhive,up_uploadraja,up_herbolistique,up_uploadbay,up_ateasystems,up_syspro," + lstEnabledUploadHosts+="up_dashfile,up_anonfile" + elif [[ "$EnabledUploadHosts" == "online" ]] ; then + lstEnabledUploadHosts="up_1fichier,up_anonsharing,up_axfc,up_bedrive,up_bowfile,up_depotkaz," + lstEnabledUploadHosts+="up_familleflender,up_fileblade,up_fileditch,up_firestorage,up_free4e,up_gofile," + lstEnabledUploadHosts+="up_harrault,up_hexload,up_isupload,up_kouploader,up_kraken,up_moocloud,up_nantes," + lstEnabledUploadHosts+="up_nippy,up_nofile,up_offshorecat,up_oshi,up_pixeldrain,up_quax,up_ranoz," + lstEnabledUploadHosts+="up_shareonline,up_skrepr,up_torup,up_turboonion,up_uploadee,up_uploadhive," + lstEnabledUploadHosts+="up_uploadraja,up_yolobit,up_herbolistique,up_uploadbay,up_ateasystems,up_syspro," + lstEnabledUploadHosts+="up_dashfile,up_anonfile" + fi +} +SetEnabledDownloadHosts() { + if [[ "$EnabledDownloadHosts" == "recommended" ]] ; then + lstEnabledDownloadHosts="1fichier,acid,anarchaserver,anonsharing,biteblob,bowfile,cyssoux,dataupload," + lstEnabledDownloadHosts+="depotkaz,dictvm,downloadgg,eddowding,eternalhosting,familleflender,fileblade," + lstEnabledDownloadHosts+="fileditch,filedoge,filedot,filehaus,filesquid,firestorage,free4e,freesocial," + lstEnabledDownloadHosts+="gofile,harrault,innocent,isupload,lainsafe,lainsafe_onion,linxx,mediafire," + lstEnabledDownloadHosts+="moocloud,nantes,netlib,offshorecat,oshi,pixeldrain,quax,ranoz,skrepr," + lstEnabledDownloadHosts+="tempfileme,tempsh,torup,turboonion,up2share,uploadee,uploadev,uploadhive," + lstEnabledDownloadHosts+="youdbox,herbolistique,uploadbay,ateasystems,syspro,dashfile,anonfile" + elif [[ "$EnabledDownloadHosts" == "online" ]] ; then + lstEnabledDownloadHosts="1fichier,anonsharing,bedrive,biteblob,bowfile,click,cyssoux," + lstEnabledDownloadHosts+="dailyuploads,dataupload,depotkaz,dictvm,dosya,downloadgg,eddowding,eternalhosting," + lstEnabledDownloadHosts+="familleflender,fileblade,fileditch,filedoge,filedot,firestorage," + lstEnabledDownloadHosts+="free4e,gofile,harrault,hexload,isupload,kraken,lainsafe," + lstEnabledDownloadHosts+="lainsafe_onion,mediafire,moocloud,nantes,netlib,nippy,nofile,offshorecat," + lstEnabledDownloadHosts+="oshi,pixeldrain,quax,ranoz,shareonline,skrepr,tempfileme,tempsh,torup," + lstEnabledDownloadHosts+="turboonion,up2share,uploadee,uploadev,uploadhive,yolobit,youdbox,herbolistique," + lstEnabledDownloadHosts+="uploadbay,ateasystems,syspro,dashfile,anonfile" + fi +} GetRandomFiledotUser() { arrSize=${#ar_fdUP[@]} index=$(($RANDOM % $arrSize)) @@ -999,7 +1031,7 @@ successUpload() { dateStamp=$(date '+%Y/%m/%d %H:%M:%S') mkdir -p "${WorkDir}/data" echo -e "$dateStamp [OK] file: ${filename}, host: ${HostCode}, dl: ${downloadLink}, ticket: ${cTicket}, size: ${filesize}, path: ${filepath}" >> "${WorkDir}/data/uploads_completed.txt" - if [ ! -z "$InputFile" ]; then + if [ ! -z "$InputFile" ] && [ ! -z "$pLine" ]; then sed -i -e "s>^${pLine}.*>#& #OK# ${downloadLink//&/\\&}>g" "${InputFile}" #processed line fi dateStamp=$(date '+%Y/%m/%d %H:%M:%S') @@ -1024,7 +1056,7 @@ successUploadExists() { mkdir -p "${WorkDir}/uploads" dateStamp=$(date '+%Y/%m/%d %H:%M:%S') echo -e "[EXISTS] ${filename}, ${HostCode}, ${downloadLink}" >> "${WorkDir}/uploads/results.txt" - if [ ! -z "$InputFile" ]; then + if [ ! -z "$InputFile" ] && [ ! -z "$pLine" ]; then sed -i -e "s>^${pLine}.*>#& #OK# (Upload exists) ${message//&/\\&}>g" "${InputFile}" #processed line fi UploadTicket="${WorkDir}/.flocks/upload_${HostCode}_${filepath//[^a-zA-Z0-9]/}" @@ -1045,7 +1077,7 @@ failedUpload() { mkdir -p "${WorkDir}/uploads" dateStamp=$(date '+%Y/%m/%d %H:%M:%S') echo -e "[FAIL] ${HostCode}, ${filename}, ${message}" >> "${WorkDir}/uploads/results.txt" - if [ ! -z "$InputFile" ]; then + if [ ! -z "$InputFile" ] && [ ! -z "$pLine" ]; then sed -i -e "s>^${pLine}.*>#& #FAIL# ${message//&/\\&}>g" "${InputFile}" #processed line fi dateStamp=$(date '+%Y/%m/%d %H:%M:%S') @@ -1069,7 +1101,7 @@ failedRetryUpload() { mkdir -p "${WorkDir}/uploads" dateStamp=$(date '+%Y/%m/%d %H:%M:%S') echo -e "[RETRY] ${HostCode}, ${filename}, ${message}" >> "${WorkDir}/uploads/results.txt" - if [ ! -z "$InputFile" ]; then + if [ ! -z "$InputFile" ] && [ ! -z "$pLine" ]; then sed -i -e "s>^${pLine}.*>#& #RETRY# ${message//&/\\&}>g" "${InputFile}" #processed line fi dateStamp=$(date '+%Y/%m/%d %H:%M:%S') @@ -1094,7 +1126,7 @@ skipFailedUpload() { mkdir -p "${WorkDir}/uploads" dateStamp=$(date '+%Y/%m/%d %H:%M:%S') echo -e "[SKIP/FAIL] ${HostCode}, ${filename}, ${message}" >> "${WorkDir}/uploads/results.txt" - if [ ! -z "$InputFile" ]; then + if [ ! -z "$InputFile" ] && [ ! -z "$pLine" ]; then sed -i -e "s>^${pLine}.*>#& #FAIL# (Skip) ${message//&/\\&}>g" "${InputFile}" #processed line fi dateStamp=$(date '+%Y/%m/%d %H:%M:%S') @@ -1114,7 +1146,7 @@ uploadBadInputLine() { mkdir -p "${WorkDir}/uploads" dateStamp=$(date '+%Y/%m/%d %H:%M:%S') echo -e "[BADLINE] ${pLine}${message}" >> "${WorkDir}/uploads/results.txt" - if [ ! -z "$InputFile" ]; then + if [ ! -z "$InputFile" ] && [ ! -z "$pLine" ]; then sed -i -e "s>^${pLine}.*>#& #RETRY# (Bad Line)${message//&/\\&}>g" "${InputFile}" #processed line fi dateStamp=$(date '+%Y/%m/%d %H:%M:%S') @@ -1161,11 +1193,27 @@ GetRandomUA() { LoadMadDownloadHosts() { if [ -d "${ScriptDir}/hosts/" ]; then echo -e "${GREEN}Loading Download Hosts...${NC}" + SetEnabledDownloadHosts tHostFuncPrefixes="" cnthostsloaded=0 for fil in "${ScriptDir}"/hosts/*.sh ; do if [ -f "$fil" ]; then + if [[ "$EnabledDownloadHosts" == "recommended" ]] || [[ "$EnabledDownloadHosts" == "online" ]] ; then + readarray -d "," -t arrEnabledHosts <<< "${lstEnabledDownloadHosts}" + isfound=false + for hostfil in "${arrEnabledHosts[@]}"; + do + hostfil="${hostfil//[$'\t\r\n']}" + if [[ "${fil##*/}" == "${hostfil}.sh" ]] ; then + isfound=true + break + fi + done + if [[ "$isfound" == "false" ]]; then + continue + fi + fi if grep -Eq '^HostFuncPrefix='"'" "$fil" ; then tfilename="${fil##*/}" if [[ "$tfilename" == "up_"* ]] ; then @@ -1196,11 +1244,27 @@ LoadMadDownloadHosts() { LoadMadUploadHosts() { if [ -d "${ScriptDir}/hosts/" ]; then echo -e "${GREEN}Loading Upload Hosts...${NC}" + SetEnabledUploadHosts tHostFuncPrefixes="" cnthostsloaded=0 for fil in "${ScriptDir}"/hosts/up_*.sh ; do if [ -f "$fil" ]; then + if [[ "$EnabledUploadHosts" == "recommended" ]] || [[ "$EnabledUploadHosts" == "online" ]] ; then + readarray -d "," -t arrEnabledHosts <<< "${lstEnabledUploadHosts}" + isfound=false + for hostfil in "${arrEnabledHosts[@]}"; + do + hostfil="${hostfil//[$'\t\r\n']}" + if [[ "${fil##*/}" == "${hostfil}.sh" ]] ; then + isfound=true + break + fi + done + if [[ "$isfound" == "false" ]]; then + continue + fi + fi if grep -Eq '^HostFuncPrefix='"'" "$fil" ; then tfilename="${fil##*/}" _hostfuncprefix=$(grep -oP -m 1 '^HostFuncPrefix='"'"'\K.*?(?='"'"')' "$fil") @@ -1565,7 +1629,7 @@ install_curl_impersonate_lexiforest_fork() { tarOutput=$(tar -xvzf ${final_tarpath} -C $extract_location) rm -f "${ScriptDir}"/curl* mv "$extract_location/curl-impersonate-chrome" "${ScriptDir}/" - mv "$extract_location/curl_chrome124" "${ScriptDir}/" + mv "$extract_location/curl_chrome131" "${ScriptDir}/" echo -e "${GREEN}| Done.${NC} Update ${BLUE}\"UseTorCurlImpersonate=true\"${NC} in script to use..." exit 0 else @@ -3441,6 +3505,109 @@ direct_DownloadFile() { done rm -f "${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}" } +jira_UploadFile() { + local _hostCode=${1} + local filepath=${2} + local filecnt=${3} + local pline=${4} + local filename="${filepath##*/}" + warnAndRetryUnknownError=false + exitUploadError=false + exitUploadNotAvailable=false + fileAlreadyDone=false + tor_identity="${RANDOM}" + UploadTicket="${WorkDir}/.flocks/upload_${_hostCode}_${filepath//[^a-zA-Z0-9]/}" + fsize=$(GetFileSize "$filepath" "false") + if ((fsize > jira_MaxUploadSizeInBytes)); then + rm -f "${UploadTicket}" + echo -e "${YELLOW}| SKIP${NC}: The size of $filename is to large for $_hostCode. ($fsize > $jira_MaxUploadSizeInBytes)" + failedUpload "$pline" "${filepath}" "${_hostCode}" "Skipping upload. The size of $filename is to large for $_hostCode. ($fsize > $jira_MaxUploadSizeInBytes)" + return 1 + fi + finalAttempt="false" + for ((z=0; z<=$MaxUploadRetries; z++)); do + if [ $z -eq $MaxUploadRetries ] ; then + finalAttempt="true" + fi + trap "rm -f "${UploadTicket}"; echo ""; tput cnorm; exit" 0 1 2 3 6 15 + if jira_PostFile "${filepath}" "${_hostCode}" "${filename}" "${filecnt}" $((z+1)) $finalAttempt $pline ; then + return 0 + elif [ $z -lt $MaxUploadRetries ]; then + if [ "${fileAlreadyDone}" == "true" ] ; then + break + fi + if [[ "${warnAndRetryUnknownError}" == "true" ]] ; then + if [ "${DebugAllEnabled}" == "true" ] ; then + debugHtml "${filepath##*/}" "error" "Retry due to an unknown issue: attempt #$((z+1)) of ${MaxUploadRetries}" + fi + fi + if [[ "${exitUploadError}" == "true" || "${exitUploadNotAvailable}" == "true" ]] ; then + if [ "${DebugAllEnabled}" == "true" ] ; then + debugHtml "${filepath##*/}" "error" "Exit due to unrecoverable issue" + fi + rm -f "${UploadTicket}" + break + fi + echo -e "\n${YELLOW}A recoverable error occurred, retry attempt $((z+1))/${MaxUploadRetries}${NC}" + sleep 3 + fi + done + rm -f "${UploadTicket}" +} +jira_PostFile() { + local filepath=$1 + local _hostCode=$2 + local filename=$3 + local fileCnt=$4 + local retryCnt=$5 + local finalAttempt=$6 + local pline=${7} + UploadTicket="${WorkDir}/.flocks/upload_${_hostCode}_${filepath//[^a-zA-Z0-9]/}" + echo -e "[${YELLOW}${_hostCode}${NC}] Uploading ${GREEN}${filename}${NC}" + tor_identity="${RANDOM}" + arrFiles=("$filepath") + trap "rm -f ${UploadTicket}; echo ""; tput cnorm; exit" 0 1 2 3 6 15 + if ((jira_filetype == 1)) ; then + response=$(tor_curl_upload --insecure -i \ + -H "Content-Type: multipart/form-data" \ + -F "key=" \ + -F "time=$jira_timeval" \ + -F "file=@${filepath}" \ + "${jira_PostUrlHost}") + else + response=$(tor_curl_upload --insecure -i \ + -H "Content-Type: multipart/form-data" \ + -F "key=" \ + -F "time=$jira_timeval" \ + -F "files[]=@${arrFiles[@]}" \ + "${jira_PostUrlHost}") + fi + if [ "${DebugAllEnabled}" == "true" ] ; then + debugHtml "${filepath##*/}" "${_hostCode}_upload" "post_url: ${jira_PostUrlHost}"$'\n'"${response}" + fi + if grep -Eqi ' 200 ' <<< "${response}" ; then + hash=$(echo "$response" | tail -2 | head -1) + hash=${hash//[$'\t\r\n']} + filesize=$(GetFileSize "$filepath" "false") + downloadLink="${jira_downloadLinkPrefix}${hash}&p=1" + echo -e "${GREEN}| Upload Success${NC}" + echo -e "| Size: ${BLUE}${filesize}${NC} bytes${NC}" + echo -e "| Link: ${YELLOW}${downloadLink}${NC}" + successUpload "$pline" "${filepath}" "${_hostCode}" "${filesize}" "${downloadLink}" "{$response}" + return 0 + else + err=$(grep -oPi '(?<=HTTP/).*?(?=$)' <<< "$response") + if [ "${finalAttempt}" == "true" ] ; then + printf "\\n" + echo -e "${RED}| Upload failed. Status: ${err}${NC}" + failedRetryUpload "$pline" "${filepath}" "${_hostCode}" "Failed to upload file" "Status: $err" + exitUploadError=true + return 1 + else + return 1 + fi + fi +} backupIFS=$IFS IFS=$(echo -en "\n\b") RED=$(tput setaf 1) diff --git a/optional/mad.config b/optional/mad.config index 6d3e779..33cc275 100755 --- a/optional/mad.config +++ b/optional/mad.config @@ -27,7 +27,7 @@ LoadPlugins="" MaxUploadRetries=4 DefaultUploadHosts='1f,uhive,oshi' UploadSpeedMin=10 -UploadTimeoutInterval=300 +UploadTimeoutInterval=600 #================================================= @@ -62,6 +62,7 @@ CatnapDuration=1 # HOST SPECIFIC SECTION #------------------- +UploadHiveRandomizeExt=true OshiBaseUrlOverride="oshiat" UsePixeldrainBypass=false EnableFiledotProcessing=false @@ -69,14 +70,14 @@ EnableFiledotProcessing=false # [FileDot - Filedot User / Pass list] ar_fdUP[0]="user1|pass1" #ar_fdUP[1]="user2|pass2" # Uncomment line to use a 2nd account -#ar_fdUP[2]="user3|pass3" # Uncomment line to use a 3rd account -#ar_fdUP[3]="user4|pass4" # Uncomment line to use a 4th account -#ar_fdUP[4]="user5|pass5" # Uncomment line to use a 5th account -#ar_fdUP[5]="user6|pass6" # Uncomment line to use a 6th account -#ar_fdUP[6]="user7|pass7" # Uncomment line to use a 7th account -#ar_fdUP[7]="user8|pass8" # Uncomment line to use a 8th account -#ar_fdUP[8]="user9|pass9" # Uncomment line to use a 9th account -#ar_fdUP[9]="user10|pass10" # Uncomment line to use a 10th account + +# [OffShore.cat Upload ApiKeys] +ar_oscKey[0]='4GDsorzK4e1yowrCiZaBnS992uKjiZVnXbByJr0kHmaAxarP26LkRV79MbKACXt0' # Shared general +#ar_oscKey[1]='apikey' # Uncomment line to use a 2nd + +# [pixeldrain.com Upload ApiKeys] +ar_pdKey[0]='cad31e7f-676d-4d47-a41b-b32087bee0c2' # Shared general +#ar_pdKey[1]='apikey' # Uncomment line to use a 2nd # [PhantomJS Keys]: pjscloud.sh plugin ar_pgsKey[0]='ak-shp9s-6zqr2-d30tt-9h64j-a0zkz' @@ -89,12 +90,6 @@ ar_pgsKey[6]='ak-x2ng1-cr476-k4bph-ae8ks-9eg45' # Uncomment line to use a 7th ke ar_pgsKey[7]='ak-s6k8z-wb6fz-dgb37-j268v-mgspe' # Uncomment line to use a 8th key ar_pgsKey[8]='ak-msdn7-vs5jr-4kknq-3qgw7-grj57' # Uncomment line to use a 9th key ar_pgsKey[9]='ak-77pgx-g1ge9-twmhy-em51a-p8p53' # Uncomment line to use a 10th key -#ar_pgsKey[10]='aa-bbbbb-ccccc-ddddd-eeeee-fffff' # Uncomment line to use a 11th key -#ar_pgsKey[11]='aa-bbbbb-ccccc-ddddd-eeeee-fffff' # Uncomment line to use a 12th key -#ar_pgsKey[12]='aa-bbbbb-ccccc-ddddd-eeeee-fffff' # Uncomment line to use a 13th key -#ar_pgsKey[13]='aa-bbbbb-ccccc-ddddd-eeeee-fffff' # Uncomment line to use a 14th key -#ar_pgsKey[14]='aa-bbbbb-ccccc-ddddd-eeeee-fffff' # Uncomment line to use a 15th key -#ar_pgsKey[15]='aa-bbbbb-ccccc-ddddd-eeeee-fffff' # Uncomment line to use a 16th key # Global pjscloud enabled hosts PJSCloud_pixeldrain=true # Enables pixeldrain ViewPump (pjscloud.sh plugin required) diff --git a/uploads.txt b/uploads.txt index 5122bbd..60ce534 100755 --- a/uploads.txt +++ b/uploads.txt @@ -11,12 +11,16 @@ # 10GB tmpme tempfile.me 300MB trbo turbo.onion 100MB inno innocent.onion # 1GB ansh anonsharing.com 1GB torp TorUp.onion 4GB offcat offshore.cat # 100MB upee upload.ee 5GB fd fileditch.com 256MB qx qu.ax -# Jirafraeu hosts (recommended upload 100MB splits as many host only support that) +# 40GB isup isupload.com 100MB fb fileblade.com 20GB atea ateasystems.com +# 100MB ubay uploadbay.net 2GB sysp syspro.com.br 400MB dash dashfile.net +# 512MB anon anonfile.de +# Jirafeau hosts (recommended upload 100MB splits as many host only support that) # 10GB anarc anarchaserver 1GB kaz depot.kaz.bzh 5GB squid filesquid # 10GB nant nantes.cloud 500MB soy soyjak.download 512MB linx linxx.net -# 10GB nlib netlib.re 100MB ffl famille-flender 5GB moo moocloud.sh +# 10GB nlib netlib.re 100MB ffl famille-flender 5GB moo moocloud.ch # ?? fr4e sendfree4e.fr 100MB harr harrault.fr 100MB acid dl.acid.fr -# ?? skpr skrepr.com +# ?? skpr skrepr.com 5GB edd eddowding.com 2GB dict dictvm.org +# 10GB cyx cyssoux.fr 5GB frso freesocial.co 512MB herb herbolistique.com # (Require js -- do not use) # 4GB daily dailyuploads 1GB kraken krakenfiles 2GB hex hexload # 4GB bd bedrive.ru 5GB uflix uploadflix