diff --git a/.audit/mad-audit-curl.log b/.audit/mad-audit-curl.log new file mode 100755 index 0000000..ca86b40 --- /dev/null +++ b/.audit/mad-audit-curl.log @@ -0,0 +1,457 @@ +DateTime: 24.11.07 + +Files: +./hosts/1fichier.sh +./hosts/9saves.sh +./hosts/acid.sh +./hosts/anarchaserver.sh +./hosts/anonsharing.sh +./hosts/archived/nekofile.sh +./hosts/bedrive.sh +./hosts/biteblob.sh +./hosts/bowfile.sh +./hosts/click.sh +./hosts/dailyuploads.sh +./hosts/dataupload.sh +./hosts/dbree.sh +./hosts/depotkaz.sh +./hosts/discreetshare.sh +./hosts/dosya.sh +./hosts/downloadgg.sh +./hosts/eternalhosting.sh +./hosts/examples/ExampleNewHost.sh +./hosts/examples/up_example.sh +./hosts/filedoge.sh +./hosts/filedot.sh +./hosts/filehaus.sh +./hosts/filesquid.sh +./hosts/firestorage.sh +./hosts/free4e.sh +./hosts/gofile.sh +./hosts/harrault.sh +./hosts/hexload.sh +./hosts/innocent.sh +./hosts/kraken.sh +./hosts/lainsafe.sh +./hosts/lainsafe_onion.sh +./hosts/linxx.sh +./hosts/nantes.sh +./hosts/netlib.sh +./hosts/nippy.sh +./hosts/nofile.sh +./hosts/offshorecat.sh +./hosts/oshi.sh +./hosts/pixeldrain.sh +./hosts/ranoz.sh +./hosts/shareonline.sh +./hosts/soyjak.sh +./hosts/tempfileme.sh +./hosts/tempsh.sh +./hosts/turboonion.sh +./hosts/up2share.sh +./hosts/uploadee.sh +./hosts/uploadev.sh +./hosts/uploadflix.sh +./hosts/uploadhive.sh +./hosts/up_1fichier.sh +./hosts/up_acid.sh +./hosts/up_anarchaserver.sh +./hosts/up_anonsharing.sh +./hosts/up_axfc.sh +./hosts/up_bedrive.sh +./hosts/up_bowfile.sh +./hosts/up_dailyuploads.sh +./hosts/up_dataupload.sh +./hosts/up_dbree.sh +./hosts/up_depotkaz.sh +./hosts/up_dosya.sh +./hosts/up_filehaus.sh +./hosts/up_filesquid.sh +./hosts/up_firestorage.sh +./hosts/up_free4e.sh +./hosts/up_gofile.sh +./hosts/up_harrault.sh +./hosts/up_hexload.sh +./hosts/up_innocent.sh +./hosts/up_kouploader.sh +./hosts/up_kraken.sh +./hosts/up_linxx.sh +./hosts/up_nantes.sh +./hosts/up_netlib.sh +./hosts/up_nippy.sh +./hosts/up_nofile.sh +./hosts/up_oshi.sh +./hosts/up_ranoz.sh +./hosts/up_shareonline.sh +./hosts/up_soyjak.sh +./hosts/up_tempfileme.sh +./hosts/up_tempsh.sh +./hosts/up_turboonion.sh +./hosts/up_uploadev.sh +./hosts/up_uploadflix.sh +./hosts/up_uploadhive.sh +./hosts/up_uploadraja.sh +./hosts/up_yolobit.sh +./hosts/yolobit.sh +./hosts/youdbox.sh +./mad.sh +./plugins/AutoResetAndRetryDownloads.sh +./plugins/CatnapCtrlC.sh +./plugins/SkipUrlsInDownloadsCompletedTxt.sh +./plugins/examples/ExampleAddNewFuncAndCallOnSuccessfulDownload.sh +./plugins/examples/ExamplesMainHooks.sh +./plugins/ocr_captcha.sh +./plugins/pjscloud.sh + + +MAD Audit of curl: (grep "curl") +_________________________________________________________________________ +./hosts/1fichier.sh:48: PAGE=$(tor_curl_request --insecure --max-time "$ConnectTimeout" -s "${remote_url}") +./hosts/1fichier.sh:155: cdn_request=$(tor_curl_request --insecure -s -L -b "${fich_cookie_jar}" -c "${fich_cookie_jar}" -F "submit=Download" -F "pass=${fich_user_provided_password}" -F "adz=${fich_adz_parameter}" "${remote_url}") +./hosts/1fichier.sh:189: file_header=$(tor_curl_request --insecure -sSIL -e "${remote_url}" "${target_file_link}") +./hosts/1fichier.sh:256: tor_curl_request --insecure -e "${remote_url}" "${target_file_link}" -C - -o "${file_path}" +./hosts/1fichier.sh:349: PAGE=$(tor_curl_request --insecure --max-time "$ConnectTimeout" -c "${fich_cookie_jar}" -s "${remote_url}") +./hosts/9saves.sh:90: response=$(tor_curl_request --insecure -L -s -b "${ns_cookie_jar}" -c "${ns_cookie_jar}" "$remote_url") +./hosts/9saves.sh:139: response=$(tor_curl_request --insecure -L -s -X POST \ +./hosts/9saves.sh:188: file_header=$(tor_curl_request --insecure --head -L -s -b "${ns_cookie_jar}" -c "${ns_cookie_jar}" "$download_url") +./hosts/9saves.sh:290: if [ "${UseTorCurlImpersonate}" == "true" ]; then +./hosts/9saves.sh:292: tor_curl_request --insecure \ +./hosts/9saves.sh:297: tor_curl_request --insecure \ +./hosts/9saves.sh:304: tor_curl_request --insecure \ +./hosts/9saves.sh:321: tor_curl_request --insecure \ +./hosts/anonsharing.sh:91: response=$(tor_curl_request --insecure -i -s \ +./hosts/anonsharing.sh:150: file_header=$(tor_curl_request --insecure --head -L -i -s \ +./hosts/anonsharing.sh:158: file_header=$(tor_curl_request --insecure --head -H "Connection: keep-alive" -s -i \ +./hosts/anonsharing.sh:273: tor_curl_request --insecure --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval "$download_url" --output "$file_path" +./hosts/anonsharing.sh:275: tor_curl_request --insecure "$download_url" --output "$file_path" +./hosts/bedrive.sh:90: response=$(tor_curl_request --insecure -L -s \ +./hosts/bedrive.sh:149: file_header=$(tor_curl_request --insecure --head -L -i -s \ +./hosts/bedrive.sh:270: if [ "${UseTorCurlImpersonate}" == "true" ]; then +./hosts/bedrive.sh:272: tor_curl_request --insecure -L -G --no-alpn \ +./hosts/bedrive.sh:277: tor_curl_request --insecure -L -G --no-alpn \ +./hosts/bedrive.sh:284: tor_curl_request --insecure -L -G --no-alpn \ +./hosts/bedrive.sh:300: tor_curl_request --insecure -L -G --no-alpn \ +./hosts/biteblob.sh:96: response=$(tor_curl_request --insecure -L -s "${fixed_url}") +./hosts/biteblob.sh:144: file_header=$(tor_curl_request --insecure --head -L -s "$download_url") +./hosts/biteblob.sh:227: tor_curl_request --insecure --referer "$file_url" "$download_url" --output "$file_path" +./hosts/biteblob.sh:271: tor_curl_request --insecure --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval "$download_url" --continue-at - --output "$file_path" +./hosts/biteblob.sh:273: tor_curl_request --insecure --referer "$file_url" "$download_url" --continue-at - --output "$file_path" +./hosts/bowfile.sh:91: response=$(tor_curl_request --insecure -L -s -b "${bow_cookie_jar}" -c "${bow_cookie_jar}" \ +./hosts/bowfile.sh:143: response=$(tor_curl_request --insecure -s --head \ +./hosts/bowfile.sh:182: file_header=$(tor_curl_request --insecure -L -sS -i --head \ +./hosts/bowfile.sh:297: if [ "${UseTorCurlImpersonate}" == "true" ]; then +./hosts/bowfile.sh:299: tor_curl_request --insecure -L \ +./hosts/bowfile.sh:305: tor_curl_request --insecure -L \ +./hosts/bowfile.sh:312: tor_curl_request --insecure -L \ +./hosts/bowfile.sh:329: tor_curl_request --insecure -L \ +./hosts/click.sh:143: response=$(tor_curl_request --insecure -L -s -b "${click_cookie_jar}" -c "${click_cookie_jar}" \ +./hosts/click.sh:226: response=$(tor_curl_request --insecure -L -s -X POST \ +./hosts/click.sh:345: response=$(tor_curl_request --insecure -L -s -X POST \ +./hosts/click.sh:434: file_header=$(tor_curl_request --insecure -L --head -s --referer "${fixed_url}" "$download_url") +./hosts/click.sh:533: if [ "${UseTorCurlImpersonate}" == "true" ]; then +./hosts/click.sh:535: tor_curl_request --insecure \ +./hosts/click.sh:542: tor_curl_request --insecure \ +./hosts/click.sh:550: tor_curl_request --insecure \ +./hosts/click.sh:568: tor_curl_request --insecure \ +./hosts/dailyuploads.sh:97: response=$(tor_curl_request --insecure -L -s -b "${daily_cookie_jar}" -c "${daily_cookie_jar}" \ +./hosts/dailyuploads.sh:139: tor_curl_request --insecure -s "$captcha_img_url" --output "$tmp_captcha_img" +./hosts/dailyuploads.sh:286: response=$(tor_curl_request --insecure -L -s -X POST \ +./hosts/dailyuploads.sh:392: file_header=$(tor_curl_request -i -s --head \ +./hosts/dailyuploads.sh:496: if [ "${UseTorCurlImpersonate}" == "true" ]; then +./hosts/dailyuploads.sh:498: tor_curl_request --insecure \ +./hosts/dailyuploads.sh:504: tor_curl_request --insecure \ +./hosts/dailyuploads.sh:511: tor_curl_request --insecure \ +./hosts/dailyuploads.sh:528: tor_curl_request --insecure \ +./hosts/dataupload.sh:90: response=$(tor_curl_request --insecure -L -s -b "${dup_cookie_jar}" -c "${dup_cookie_jar}" "$remote_url") +./hosts/dataupload.sh:166: response=$(tor_curl_request --insecure -svo. -X POST \ +./hosts/dataupload.sh:234: file_header=$(tor_curl_request --insecure -L --head -s "$download_url") +./hosts/dataupload.sh:349: if [ "${UseTorCurlImpersonate}" == "true" ]; then +./hosts/dataupload.sh:351: tor_curl_request --insecure \ +./hosts/dataupload.sh:357: tor_curl_request --insecure \ +./hosts/dataupload.sh:364: tor_curl_request --insecure \ +./hosts/dataupload.sh:381: tor_curl_request --insecure \ +./hosts/dosya.sh:108: if [ "${UseTorCurlImpersonate}" == "true" ]; then +./hosts/dosya.sh:109: PAGE=$(tor_curl_request --insecure --max-time "$ConnectTimeout" -L -s \ +./hosts/dosya.sh:113: PAGE=$(tor_curl_request --insecure --max-time "$ConnectTimeout" -L -s \ +./hosts/dosya.sh:172: if [ "${UseTorCurlImpersonate}" == "true" ]; then +./hosts/dosya.sh:173: file_header=$(tor_curl_request_extended --insecure --head -L -s \ +./hosts/dosya.sh:179: file_header=$(tor_curl_request_extended --insecure --head -L -s \ +./hosts/dosya.sh:402: tor_curl_request -L -G --insecure \ +./hosts/dosya.sh:417: tor_curl_request -L -G --insecure \ +./hosts/downloadgg.sh:90: response=$(tor_curl_request --insecure -L -s -b "${dgg_cookie_jar}" -c "${dgg_cookie_jar}" "$remote_url") +./hosts/downloadgg.sh:169: response=$(tor_curl_request --insecure -svo. -X POST \ +./hosts/downloadgg.sh:255: if [ "${UseTorCurlImpersonate}" == "true" ]; then +./hosts/downloadgg.sh:257: tor_curl_request --insecure -X POST \ +./hosts/downloadgg.sh:265: tor_curl_request --insecure -X POST \ +./hosts/downloadgg.sh:275: tor_curl_request --insecure -X POST \ +./hosts/downloadgg.sh:294: tor_curl_request --insecure -X POST \ +./hosts/examples/ExampleNewHost.sh:102: file_header=$(tor_curl_request --insecure -L --head -s --referer "${remote_url//\.org/\.cc}" "$download_url") +./hosts/examples/ExampleNewHost.sh:199: tor_curl_request --insecure --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval --referer "$download_url" "$download_url" --continue-at - --output "$file_path" +./hosts/examples/ExampleNewHost.sh:201: tor_curl_request --insecure --referer "$download_url" "$download_url" --continue-at - --output "$file_path" +./hosts/examples/up_example.sh:112: response=$(tor_curl_upload --insecure \ +./hosts/filedot.sh:112: PAGE=$(tor_curl_request --insecure --max-time "$ConnectTimeout" -L -s \ +./hosts/filedot.sh:153: resp_login=$(tor_curl_request --insecure -L -s \ +./hosts/filedot.sh:240: response=$(tor_curl_request --insecure -L -s \ +./hosts/filedot.sh:320: response=$(tor_curl_request --insecure -L -s \ +./hosts/filedot.sh:406: file_header=$(tor_curl_request --insecure -L --head -s --referer "${fixed_url}" "$download_url") +./hosts/filedot.sh:499: tor_curl_request --insecure -L -G --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval "$download_url" --continue-at - --output "$file_path" +./hosts/filedot.sh:501: tor_curl_request --insecure -L -G "$download_url" --continue-at - --output "$file_path" +./hosts/filehaus.sh:100: file_header=$(tor_curl_request --insecure -L --head -s --referer "${remote_url//\.org/\.cc}" "$download_url") +./hosts/filehaus.sh:197: tor_curl_request --insecure --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval --referer "$download_url" "$download_url" --continue-at - --output "$file_path" +./hosts/filehaus.sh:199: tor_curl_request --insecure --referer "$download_url" "$download_url" --continue-at - --output "$file_path" +./hosts/firestorage.sh:98: response=$(tor_curl_request --insecure -L -s "${fixed_url}") +./hosts/firestorage.sh:226: file_header=$(tor_curl_request --insecure -L --head -s --referer "${fixed_url}" "$download_url") +./hosts/firestorage.sh:335: tor_curl_request --insecure -L -G --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval "$download_url" --continue-at - --output "$file_path" +./hosts/firestorage.sh:337: tor_curl_request --insecure -L -G "$download_url" --continue-at - --output "$file_path" +./hosts/gofile.sh:97: response=$(tor_curl_request --insecure -s -X POST \ +./hosts/gofile.sh:170: response=$(tor_curl_request --insecure -G -L -s \ +./hosts/gofile.sh:241: file_header=$(tor_curl_request --insecure -L --head -s \ +./hosts/gofile.sh:359: tor_curl_request --insecure -G --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval \ +./hosts/gofile.sh:373: tor_curl_request --insecure -G \ +./hosts/hexload.sh:108: response=$(tor_curl_request --insecure -s --data "$form_data" "https://hexload.com/download") +./hosts/hexload.sh:116: response=$(tor_curl_request --insecure -s --data "$form_data" "https://hexload.com/download") +./hosts/hexload.sh:122: response=$(tor_curl_request --insecure -s --data "$form_data" "https://hexload.com/download") +./hosts/hexload.sh:254: file_header=$(tor_curl_request --insecure --head -L -s --referer "$file_url" "$download_url") +./hosts/hexload.sh:321: tor_curl_request --insecure --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval --referer "$file_url" "$download_url" --continue-at - --output "$file_path" +./hosts/hexload.sh:323: tor_curl_request --insecure --referer "$file_url" "$download_url" --continue-at - --output "$file_path" +./hosts/innocent.sh:98: file_header=$(tor_curl_request --insecure --head -H "Connection: keep-alive" -L -s -i "$download_url" | +./hosts/innocent.sh:110: file_header=$(tor_curl_request --insecure -m 16 -s -D - -o /dev/null \ +./hosts/innocent.sh:116: file_header=$(tor_curl_request --insecure --head -L -s "$download_url") +./hosts/innocent.sh:207: tor_curl_request --insecure "$download_url" --output "$file_path" +./hosts/kraken.sh:104: PAGE=$(tor_curl_request --insecure --max-time "$ConnectTimeout" -s -L -c "${kraken_cookie_jar}" "${fixed_url}") +./hosts/kraken.sh:169: down_request=$(tor_curl_request --insecure -L -s -b "${kraken_cookie_jar}" -c "${kraken_cookie_jar}" -F "token=${kraken_token}" "${kraken_action}") +./hosts/kraken.sh:186: file_header=$(tor_curl_request --insecure --head -L -s -b "${kraken_cookie_jar}" -c "${kraken_cookie_jar}" --referer "$kraken_action" "$download_url") +./hosts/kraken.sh:286: tor_curl_request --insecure --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval --referer "$kraken_action" "$download_url" --continue-at - --output "$file_path" +./hosts/kraken.sh:288: tor_curl_request --insecure --referer "$kraken_action" "$download_url" --continue-at - --output "$file_path" +./hosts/nippy.sh:121: response=$(tor_curl_request --insecure -L -s -b "${nippy_cookie_jar}" -c "${nippy_cookie_jar}" "$fixed_url") +./hosts/nippy.sh:190: file_header=$(tor_curl_request --insecure -L --head -s \ +./hosts/nippy.sh:301: tor_curl_request --insecure -L --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval \ +./hosts/nippy.sh:304: tor_curl_request --insecure -L "$download_url" --continue-at - --output "$file_path" +./hosts/oshi.sh:101: file_header=$(tor_curl_request --insecure --head -L -s --referer "$remote_url" "$download_url") +./hosts/oshi.sh:195: tor_curl_request --insecure --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval --referer "$file_url" "$download_url" --continue-at - --output "$file_path" +./hosts/oshi.sh:197: tor_curl_request --insecure --referer "$file_url" "$download_url" --continue-at - --output "$file_path" +./hosts/pixeldrain.sh:94: response=$(tor_curl_request --insecure -L -s "https://pixeldrain.com/u/$fileid") +./hosts/pixeldrain.sh:256: file_header=$(tor_curl_request --insecure --head -L -s --referer "$file_url" "$pdheadurl") +./hosts/pixeldrain.sh:322: if [ "${UseTorCurlImpersonate}" == "true" ]; then +./hosts/pixeldrain.sh:324: tor_curl_request --insecure \ +./hosts/pixeldrain.sh:328: tor_curl_request --insecure \ +./hosts/pixeldrain.sh:333: tor_curl_request --insecure \ +./hosts/pixeldrain.sh:342: tor_curl_request --insecure \ +./hosts/ranoz.sh:90: response=$(tor_curl_request --insecure -L -s "$remote_url") +./hosts/ranoz.sh:144: file_header=$(tor_curl_request --insecure --head -L -i -s "$download_url") +./hosts/ranoz.sh:253: if [ "${UseTorCurlImpersonate}" == "true" ]; then +./hosts/ranoz.sh:255: tor_curl_request --insecure -L -G --no-alpn \ +./hosts/ranoz.sh:259: tor_curl_request --insecure -L -G --no-alpn \ +./hosts/ranoz.sh:264: tor_curl_request --insecure -L -G --no-alpn \ +./hosts/ranoz.sh:279: tor_curl_request --insecure -L -G --no-alpn \ +./hosts/tempfileme.sh:89: response=$(tor_curl_request --insecure -L -s "$remote_url") +./hosts/tempfileme.sh:163: file_header=$(tor_curl_request --insecure -L --head -s --referer "${remote_url}" "$download_url") +./hosts/tempfileme.sh:291: if [ "${UseTorCurlImpersonate}" == "true" ]; then +./hosts/tempfileme.sh:293: tor_curl_request --insecure -L \ +./hosts/tempfileme.sh:298: tor_curl_request --insecure -L \ +./hosts/tempfileme.sh:304: tor_curl_request --insecure -L \ +./hosts/tempfileme.sh:319: tor_curl_request --insecure -L \ +./hosts/tempsh.sh:88: file_header=$(tor_curl_request --insecure -s -D - -o /dev/null -X POST \ +./hosts/tempsh.sh:225: if [ "${UseTorCurlImpersonate}" == "true" ]; then +./hosts/tempsh.sh:227: tor_curl_request --insecure -X POST \ +./hosts/tempsh.sh:231: tor_curl_request --insecure -X POST \ +./hosts/tempsh.sh:236: tor_curl_request --insecure -X POST \ +./hosts/tempsh.sh:250: tor_curl_request --insecure -X POST \ +./hosts/up2share.sh:91: response=$(tor_curl_request --insecure -L -s -b "${up2share_cookie_jar}" -c "${up2share_cookie_jar}" \ +./hosts/up2share.sh:144: response=$(tor_curl_request --insecure -L -s -b "${up2share_cookie_jar}" -c "${up2share_cookie_jar}" \ +./hosts/up2share.sh:195: file_header=$(tor_curl_request --insecure -L -s --head \ +./hosts/up2share.sh:311: if [ "${UseTorCurlImpersonate}" == "true" ]; then +./hosts/up2share.sh:313: tor_curl_request --insecure -L \ +./hosts/up2share.sh:320: tor_curl_request --insecure -L \ +./hosts/up2share.sh:328: tor_curl_request --insecure -L \ +./hosts/up2share.sh:346: tor_curl_request --insecure -L \ +./hosts/uploadee.sh:90: response=$(tor_curl_request --insecure -L -s "$remote_url") +./hosts/uploadee.sh:143: file_header=$(tor_curl_request --insecure --head -L -s -b "${upee_cookie_jar}" -c "${upee_cookie_jar}" --referer "$remote_url" "$download_url") +./hosts/uploadee.sh:249: if [ "${UseTorCurlImpersonate}" == "true" ]; then +./hosts/uploadee.sh:251: tor_curl_request --insecure -L -G --no-alpn \ +./hosts/uploadee.sh:257: tor_curl_request --insecure -L -G --no-alpn \ +./hosts/uploadee.sh:265: tor_curl_request --insecure -L -G --no-alpn \ +./hosts/uploadee.sh:282: tor_curl_request --insecure -L -G --no-alpn \ +./hosts/uploadev.sh:91: response=$(tor_curl_request --insecure -L -s -b "${upev_cookie_jar}" -c "${upev_cookie_jar}" \ +./hosts/uploadev.sh:181: response=$(tor_curl_request --insecure -L -s -X POST \ +./hosts/uploadev.sh:268: file_header=$(tor_curl_request --insecure -L --head -s "$download_url") +./hosts/uploadev.sh:367: if [ "${UseTorCurlImpersonate}" == "true" ]; then +./hosts/uploadev.sh:369: tor_curl_request --insecure -L \ +./hosts/uploadev.sh:374: tor_curl_request --insecure -L \ +./hosts/uploadev.sh:380: tor_curl_request --insecure -L \ +./hosts/uploadev.sh:396: tor_curl_request --insecure -L \ +./hosts/uploadflix.sh:97: response=$(tor_curl_request --insecure -L -s "${fixed_url}") +./hosts/uploadflix.sh:150: response=$(tor_curl_request --insecure -L -s -X POST --data "$form_data" "${fixed_url}") +./hosts/uploadflix.sh:192: file_header=$(tor_curl_request --insecure -L --head -s --referer "${fixed_url}" "${download_url}") +./hosts/uploadflix.sh:286: tor_curl_request --insecure -L -G --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval "$download_url" --continue-at - --output "$file_path" +./hosts/uploadflix.sh:288: tor_curl_request --insecure -L -G "$download_url" --continue-at - --output "$file_path" +./hosts/uploadhive.sh:88: response=$(tor_curl_request --insecure -L -s "$remote_url") +./hosts/uploadhive.sh:134: response=$(tor_curl_request --insecure -L -s -X POST --data "$form_data" "$remote_url") +./hosts/uploadhive.sh:175: file_header=$(tor_curl_request --insecure --head -s -L --referer "$remote_url" "$download_url") +./hosts/uploadhive.sh:269: tor_curl_request --insecure -L -G --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval "$download_url" --continue-at - --output "$file_path" +./hosts/uploadhive.sh:271: tor_curl_request --insecure -L -G "$download_url" --continue-at - --output "$file_path" +./hosts/up_1fichier.sh:107: response=$(tor_curl_request --insecure -L -s "https://1fichier.com/") +./hosts/up_1fichier.sh:180: response=$(tor_curl_upload --insecure -L \ +./hosts/up_acid.sh:94: response=$(tor_curl_upload --insecure -i \ +./hosts/up_anarchaserver.sh:102: response=$(tor_curl_upload --insecure -i \ +./hosts/up_anonsharing.sh:102: response=$(tor_curl_upload --insecure -i \ +./hosts/up_axfc.sh:109: response=$(tor_curl_request --insecure -L -s -b "${axfc_cookie_jar}" -c "${axfc_cookie_jar}" "$fixed_url") +./hosts/up_axfc.sh:136: response=$(tor_curl_upload --insecure -L -s -X POST \ +./hosts/up_axfc.sh:184: response=$(tor_curl_upload --insecure -L -i -X POST \ +./hosts/up_bedrive.sh:102: response=$(tor_curl_upload --insecure -i \ +./hosts/up_bowfile.sh:108: response=$(tor_curl_request --insecure -L -i \ +./hosts/up_dailyuploads.sh:109: response=$(tor_curl_upload --insecure -i \ +./hosts/up_dataupload.sh:102: response=$(tor_curl_upload --insecure -i \ +./hosts/up_dbree.sh:102: response=$(tor_curl_upload --insecure -i \ +./hosts/up_depotkaz.sh:102: response=$(tor_curl_upload --insecure -i \ +./hosts/up_dosya.sh:107: response=$(tor_curl_upload --insecure -L -i \ +./hosts/up_filehaus.sh:106: response=$(tor_curl_upload --insecure -i \ +./hosts/up_filesquid.sh:102: response=$(tor_curl_upload --insecure -i \ +./hosts/up_firestorage.sh:113: response=$(tor_curl_upload --insecure -i \ +./hosts/up_free4e.sh:94: response=$(tor_curl_upload --insecure -i \ +./hosts/up_gofile.sh:102: response=$(tor_curl_request --insecure -L -s "https://api.gofile.io/servers") +./hosts/up_gofile.sh:121: response=$(tor_curl_upload --insecure -i \ +./hosts/up_harrault.sh:94: response=$(tor_curl_upload --insecure -i \ +./hosts/up_hexload.sh:109: response=$(tor_curl_upload --insecure -i \ +./hosts/up_innocent.sh:99: response=$(tor_curl_upload --insecure -D - -o /dev/null \ +./hosts/up_kouploader.sh:108: response=$(tor_curl_request --insecure -L -s -b "${ko_cookie_jar}" -c "${ko_cookie_jar}" "$PostUrlHost") +./hosts/up_kouploader.sh:132: response=$(tor_curl_upload --insecure -L -i \ +./hosts/up_kraken.sh:115: response=$(tor_curl_upload --insecure -i \ +./hosts/up_linxx.sh:102: response=$(tor_curl_upload --insecure -i \ +./hosts/up_nantes.sh:102: response=$(tor_curl_upload --insecure -i \ +./hosts/up_netlib.sh:102: response=$(tor_curl_upload --insecure -i \ +./hosts/up_nippy.sh:125: response=$(tor_curl_upload --insecure -i \ +./hosts/up_nofile.sh:102: response=$(tor_curl_upload --insecure -i \ +./hosts/up_oshi.sh:110: response=$(tor_curl_upload --insecure \ +./hosts/up_ranoz.sh:102: response=$(tor_curl_upload --insecure -L -i -s \ +./hosts/up_ranoz.sh:131: response=$(tor_curl_upload --insecure -i -X PUT \ +./hosts/up_shareonline.sh:102: response=$(tor_curl_upload --insecure -i \ +./hosts/up_soyjak.sh:102: response=$(tor_curl_upload --insecure -i \ +./hosts/up_tempfileme.sh:102: response=$(tor_curl_upload --insecure -i \ +./hosts/up_tempsh.sh:102: response=$(tor_curl_upload --insecure -i \ +./hosts/up_turboonion.sh:99: response=$(tor_curl_upload --insecure \ +./hosts/up_uploadev.sh:102: response=$(tor_curl_upload --insecure -i \ +./hosts/up_uploadflix.sh:106: response=$(tor_curl_upload --insecure -i \ +./hosts/up_uploadhive.sh:102: response=$(tor_curl_upload --insecure -i \ +./hosts/up_uploadraja.sh:102: response=$(tor_curl_upload --insecure -i \ +./hosts/up_yolobit.sh:102: response=$(tor_curl_upload --insecure -i \ +./hosts/youdbox.sh:95: response=$(tor_curl_request --insecure -L -i -s "${fixed_url}") +./hosts/youdbox.sh:141: response=$(tor_curl_request --insecure -L -s -X POST --data "$form_data" "${fixed_url}") +./hosts/youdbox.sh:183: file_header=$(tor_curl_request --insecure -L --head -s --referer "${fixed_url}" "$download_url") +./hosts/youdbox.sh:276: tor_curl_request --insecure -L -G --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval "$download_url" --continue-at - --output "$file_path" +./hosts/youdbox.sh:278: tor_curl_request --insecure -L -G "$download_url" --continue-at - --output "$file_path" +./mad.sh:106:UseTorCurlImpersonate=false +./mad.sh:371:tor_curl_request() { +./mad.sh:372: if [ "${UseTorCurlImpersonate}" == "true" ]; then +./mad.sh:373: "${curl_impersonate[@]}" --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" --connect-timeout ${ConnectTimeout} --compressed --globoff "$@" +./mad.sh:375: curl --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" --connect-timeout ${ConnectTimeout} --compressed --globoff "$@" +./mad.sh:378:tor_curl_request_extended() { +./mad.sh:379: if [ "${UseTorCurlImpersonate}" == "true" ]; then +./mad.sh:380: "${curl_impersonate[@]}" --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" --connect-timeout 60 --compressed --globoff "$@" +./mad.sh:382: curl --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" --connect-timeout 60 --compressed --globoff "$@" +./mad.sh:385:tor_curl_upload() { +./mad.sh:386: if [ "${UseTorCurlImpersonate}" == "true" ]; then +./mad.sh:388: "${curl_impersonate[@]}" --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" --connect-timeout ${ConnectTimeout} --speed-limit $UploadSpeedMin --speed-time $UploadTimeoutInterval --compressed --globoff "$@" +./mad.sh:390: "${curl_impersonate[@]}" --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" --connect-timeout ${ConnectTimeout} --compressed --globoff "$@" +./mad.sh:394: curl --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" --connect-timeout ${ConnectTimeout} --speed-limit $UploadSpeedMin --speed-time $UploadTimeoutInterval -H 'User-Agent: Mozilla/5.0 (Windows NT 10.0; rv:109.0) Gecko/20100101 Firefox/115.0' -H 'Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8' -H 'Accept-Language: en-US,en;q=0.5' -H 'Accept-Encoding: gzip, deflate, br' -H 'Origin: null' -H 'Connection: keep-alive' -H 'Upgrade-Insecure-Requests: 1' -H 'Sec-Fetch-Dest: document' -H 'Sec-Fetch-Mode: navigate' -H 'Sec-Fetch-Site: cross-site' -H 'Sec-Fetch-User: ?1' -H 'TE: trailers' --compressed --globoff "$@" +./mad.sh:396: curl --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" --connect-timeout ${ConnectTimeout} -H 'User-Agent: Mozilla/5.0 (Windows NT 10.0; rv:109.0) Gecko/20100101 Firefox/115.0' -H 'Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8' -H 'Accept-Language: en-US,en;q=0.5' -H 'Accept-Encoding: gzip, deflate, br' -H 'Origin: null' -H 'Connection: keep-alive' -H 'Upgrade-Insecure-Requests: 1' -H 'Sec-Fetch-Dest: document' -H 'Sec-Fetch-Mode: navigate' -H 'Sec-Fetch-Site: cross-site' -H 'Sec-Fetch-User: ?1' -H 'TE: trailers' --compressed --globoff "$@" +./mad.sh:1283:install_curl_impersonate() { +./mad.sh:1285: echo -e "${BLUE}lwthiker curl_impersonate${NC} is the original dev, but it is relatively inactive." +./mad.sh:1286: echo -e "- Currently uses curl v8.1.1." +./mad.sh:1290: echo -e "${BLUE}lexiforest curl_impersonate${NC} is an active fork of curl_impersonate." +./mad.sh:1291: echo -e "+ Currently uses curl v8.7.1" +./mad.sh:1295: PS3='Please select which curl_impersonate to install: ' +./mad.sh:1303: install_curl_impersonate_lwthiker_orig +./mad.sh:1307: install_curl_impersonate_lexiforest_fork +./mad.sh:1317:install_curl_impersonate_lwthiker_orig() { +./mad.sh:1321: echo -e "${BLUE}lwthiker curl_impersonate${NC} is the original curl_impersonate." +./mad.sh:1322: echo -e "+ Currently uses curl v8.1.1, and has low activity for updates" +./mad.sh:1325: echo -e "${GREEN}| Fetching:${NC} latest ${BLUE}lwthiker curl_impersonate${NC} info from github...${NC}" +./mad.sh:1328: response=$(tor_curl_request --insecure -L -s https://github.com/lwthiker/curl-impersonate/releases/latest) +./mad.sh:1330: debugHtml "github" "lbf_inst_curlimp$j" "$response" +./mad.sh:1333: latestTag=$(grep -oPi -m 1 '(?<=/curl-impersonate/releases/tag/).*?(?=")' <<< "$response") +./mad.sh:1343: yes_or_no "Do you wish to download and extract curl_impersonate $latestTag (using tor+curl)?" && { +./mad.sh:1345: download_url='https://github.com/lwthiker/curl-impersonate/releases/download/'"$latestTag"'/curl-impersonate-'"$latestTag"'.x86_64-linux-gnu.tar.gz' +./mad.sh:1348: file_header=$(tor_curl_request --insecure --head -Ls "$download_url") +./mad.sh:1350: debugHtml "github" "head_inst_curlimp$j" "${file_header}" +./mad.sh:1398: tor_curl_request --insecure -L "$download_url" --continue-at - --output "$file_path" +./mad.sh:1427: echo -e "| Extracting curl_impersonate..." +./mad.sh:1429: rm -f "${ScriptDir}"/curl* +./mad.sh:1430: mv "$extract_location/curl-impersonate-ff" "${ScriptDir}/" +./mad.sh:1431: mv "$extract_location/curl_ff109" "${ScriptDir}/" +./mad.sh:1432: echo -e "${GREEN}| Done.${NC} Update ${BLUE}\"UseTorCurlImpersonate=true\"${NC} in script to use..." +./mad.sh:1440:install_curl_impersonate_lexiforest_fork() { +./mad.sh:1444: echo -e "${BLUE}lexiforest curl_impersonate${NC} is an active fork of curl_impersonate." +./mad.sh:1445: echo -e "+ Currently uses curl v8.7.1, and is patched for latest CVEs" +./mad.sh:1448: echo -e "${GREEN}| Fetching:${NC} latest ${BLUE}lexiforest curl_impersonate fork${NC} info from github...${NC}" +./mad.sh:1451: response=$(tor_curl_request --insecure -L -s https://github.com/lexiforest/curl-impersonate/releases/latest) +./mad.sh:1453: debugHtml "github" "lbf_inst_curlimp$j" "$response" +./mad.sh:1456: latestTag=$(grep -oPi -m 1 '(?<=/curl-impersonate/releases/tag/).*?(?=")' <<< "$response") +./mad.sh:1466: yes_or_no "Do you wish to download and extract curl_impersonate $latestTag (using tor+curl)?" && { +./mad.sh:1468: download_url='https://github.com/lexiforest/curl-impersonate/releases/download/'"$latestTag"'/curl-impersonate-'"$latestTag"'.x86_64-linux-gnu.tar.gz' +./mad.sh:1471: file_header=$(tor_curl_request --insecure --head -Ls "$download_url") +./mad.sh:1473: debugHtml "github" "head_inst_curlimp$j" "${file_header}" +./mad.sh:1521: tor_curl_request --insecure -L "$download_url" --continue-at - --output "$file_path" +./mad.sh:1550: echo -e "| Extracting curl_impersonate..." +./mad.sh:1552: rm -f "${ScriptDir}"/curl* +./mad.sh:1553: mv "$extract_location/curl-impersonate-chrome" "${ScriptDir}/" +./mad.sh:1554: mv "$extract_location/curl_chrome124" "${ScriptDir}/" +./mad.sh:1555: echo -e "${GREEN}| Done.${NC} Update ${BLUE}\"UseTorCurlImpersonate=true\"${NC} in script to use..." +./mad.sh:1717: echo -e ":${NC} ${GREEN}MAD${PINK} Audit${NC} : Reports usage of http & curl in scripts${PINK}${BLD} :" +./mad.sh:1725: maud_curl=$(grep -n -vxE '[[:blank:]]*([#].*)?' $fil | grep --color='always' -Ei 'curl') +./mad.sh:1726: maud_torcurl=$(grep -n -vxE '[[:blank:]]*([#].*)?' $fil | grep -A 12 --color='always' -Ei 'tor_curl') +./mad.sh:1735: echo -e "${PINK}MAD Audit of curl:${NC} (${GREEN}grep \"curl\"${NC})" +./mad.sh:1737: echo -e "$maud_curl" +./mad.sh:1739: echo -e "${PINK}MAD Audit of tor_curl (+12 lines after):${NC} (${GREEN}grep \"tor_curl\"${NC})" +./mad.sh:1741: echo -e "$maud_torcurl" +./mad.sh:1753: maud_curl=$(grep -n -vxE '[[:blank:]]*([#].*)?' $fil | grep --color='always' -Ei 'curl') +./mad.sh:1754: maud_torcurl=$(grep -n -vxE '[[:blank:]]*([#].*)?' $fil | grep -A 12 --color='always' -Ei 'tor_curl') +./mad.sh:1763: echo -e "${PINK}MAD Audit of curl:${NC} (${GREEN}grep \"curl \"${NC})" +./mad.sh:1765: echo -e "$maud_curl" +./mad.sh:1767: echo -e "${PINK}MAD Audit of tor_curl (+12 lines after):${NC} (${GREEN}grep \"tor_curl\"${NC})" +./mad.sh:1769: echo -e "$maud_torcurl" +./mad.sh:1775: maud_curl=$(grep -n -vxE '[[:blank:]]*([#].*)?' $fil | grep --color='always' -Ei 'curl') +./mad.sh:1776: maud_torcurl=$(grep -n -vxE '[[:blank:]]*([#].*)?' $fil | grep -A 12 --color='always' -Ei 'tor_curl') +./mad.sh:1785: echo -e "${PINK}MAD Audit of curl:${NC} (${GREEN}grep \"curl\"${NC})" +./mad.sh:1787: echo -e "$maud_curl" +./mad.sh:1789: echo -e "${PINK}MAD Audit of tor_curl (+12 lines after):${NC} (${GREEN}grep \"tor_curl\"${NC})" +./mad.sh:1791: echo -e "$maud_torcurl" +./mad.sh:2738: if [ "${UseTorCurlImpersonate}" == "true" ]; then +./mad.sh:2739: printf "client: ${GREEN}Tor${NC} + ${BLUE}curl_impersonate${NC}\\n" +./mad.sh:2741: printf "client: ${GREEN}Tor${NC} + ${GREEN}curl${NC}\\n" +./mad.sh:2901: if [ "${UseTorCurlImpersonate}" == "true" ]; then +./mad.sh:2902: printf "client: ${GREEN}Tor${NC} + ${BLUE}curl_impersonate${NC}\\n" +./mad.sh:2904: printf "client: ${GREEN}Tor${NC} + ${GREEN}curl${NC}\\n" +./mad.sh:3102: file_header=$(tor_curl_request --insecure -m 18 -s -D - -o /dev/null \ +./mad.sh:3109: file_header=$(tor_curl_request --insecure --head -H "Connection: keep-alive" -L -s -i "$download_url" | +./mad.sh:3237: tor_curl_request --insecure -L --referer "$file_url" "$download_url" --output "$file_path" +./mad.sh:3281: tor_curl_request --insecure -L --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval "$download_url" --continue-at - --output "$file_path" +./mad.sh:3283: tor_curl_request --insecure -L --referer "$file_url" "$download_url" --continue-at - --output "$file_path" +./mad.sh:3456:if [ "${UseTorCurlImpersonate}" == "true" ]; then +./mad.sh:3457: curl_impersonate=() +./mad.sh:3458: readarray -d $'' arrFiles < <(find "$ScriptDir" -maxdepth 1 -name "curl_*" -printf '%p\n' | sort -Vk1) +./mad.sh:3459: bFoundCurlHeader=false +./mad.sh:3463: curl_impersonate=($fil) +./mad.sh:3464: bFoundCurlHeader=true +./mad.sh:3468: if [ "$bFoundCurlHeader" == "false" ]; then +./mad.sh:3469: echo -e "${RED}[ERROR] Missing dependency \"curl-impersonate\"!${NC}" +./mad.sh:3472: echo -e "You'll need to download ${GREEN}\"curl-impersonate\"${NC}." +./mad.sh:3475: echo -e "The latest binary can be obtained on GitHub, search for \"curl-impersonate\"" +./mad.sh:3477: echo -e " 1. Visit the page of curl-impersonate and add \"/releases/latest/\" at end of URL." +./mad.sh:3481: echo -e " 4. Download archive ${GREEN}\"curl-impersonate-vX.Y.Z.x86_64-linux-gnu.tar.gz\"${YELLOW}." +./mad.sh:3482: echo -e " 5. Extract files ${GREEN}\"curl-impersonate-ff\"${NC} and ${GREEN}\"curl_ff109\"${NC} next to this script." +./mad.sh:3485: echo -e "run $0 install_curl_impersonate\\n" +./mad.sh:3487: yes_or_no "Do you wish to download and extract latest curl_impersonate (using tor+curl)?" && { +./mad.sh:3488: UseTorCurlImpersonate=false +./mad.sh:3489: install_curl_impersonate +./mad.sh:3573: echo -e "[${YELLOW}Install curl_impersonate${NC}]: Downloads the latest binary for curl_impersonate from github repo (3 choices)" +./mad.sh:3574: printf " %s install_curl_impersonate\\n" "$0" +./mad.sh:3652:elif [[ "$arg1" == "install_curl_impersonate" ]]; then +./mad.sh:3653: install_curl_impersonate +./mad.sh:3684:if [ "${UseTorCurlImpersonate}" == "true" ]; then +./mad.sh:3685: printf "client: ${GREEN}Tor${NC} + ${BLUE}curl_impersonate${NC}\\n" +./mad.sh:3687: printf "client: ${GREEN}Tor${NC} + ${GREEN}curl${NC}\\n" +./plugins/pjscloud.sh:44: if [ "${UseTorCurlImpersonate}" == "true" ]; then +./plugins/pjscloud.sh:45: response=$("${curl_impersonate[@]}" --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" \ +./plugins/pjscloud.sh:53: response=$(curl --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" \ + diff --git a/.audit/mad-audit-http.log b/.audit/mad-audit-http.log new file mode 100755 index 0000000..de66ad6 --- /dev/null +++ b/.audit/mad-audit-http.log @@ -0,0 +1,366 @@ +DateTime: 24.11.07 + +Files: +./hosts/1fichier.sh +./hosts/9saves.sh +./hosts/acid.sh +./hosts/anarchaserver.sh +./hosts/anonsharing.sh +./hosts/archived/nekofile.sh +./hosts/bedrive.sh +./hosts/biteblob.sh +./hosts/bowfile.sh +./hosts/click.sh +./hosts/dailyuploads.sh +./hosts/dataupload.sh +./hosts/dbree.sh +./hosts/depotkaz.sh +./hosts/discreetshare.sh +./hosts/dosya.sh +./hosts/downloadgg.sh +./hosts/eternalhosting.sh +./hosts/examples/ExampleNewHost.sh +./hosts/examples/up_example.sh +./hosts/filedoge.sh +./hosts/filedot.sh +./hosts/filehaus.sh +./hosts/filesquid.sh +./hosts/firestorage.sh +./hosts/free4e.sh +./hosts/gofile.sh +./hosts/harrault.sh +./hosts/hexload.sh +./hosts/innocent.sh +./hosts/kraken.sh +./hosts/lainsafe.sh +./hosts/lainsafe_onion.sh +./hosts/linxx.sh +./hosts/nantes.sh +./hosts/netlib.sh +./hosts/nippy.sh +./hosts/nofile.sh +./hosts/offshorecat.sh +./hosts/oshi.sh +./hosts/pixeldrain.sh +./hosts/ranoz.sh +./hosts/shareonline.sh +./hosts/soyjak.sh +./hosts/tempfileme.sh +./hosts/tempsh.sh +./hosts/turboonion.sh +./hosts/up2share.sh +./hosts/uploadee.sh +./hosts/uploadev.sh +./hosts/uploadflix.sh +./hosts/uploadhive.sh +./hosts/up_1fichier.sh +./hosts/up_acid.sh +./hosts/up_anarchaserver.sh +./hosts/up_anonsharing.sh +./hosts/up_axfc.sh +./hosts/up_bedrive.sh +./hosts/up_bowfile.sh +./hosts/up_dailyuploads.sh +./hosts/up_dataupload.sh +./hosts/up_dbree.sh +./hosts/up_depotkaz.sh +./hosts/up_dosya.sh +./hosts/up_filehaus.sh +./hosts/up_filesquid.sh +./hosts/up_firestorage.sh +./hosts/up_free4e.sh +./hosts/up_gofile.sh +./hosts/up_harrault.sh +./hosts/up_hexload.sh +./hosts/up_innocent.sh +./hosts/up_kouploader.sh +./hosts/up_kraken.sh +./hosts/up_linxx.sh +./hosts/up_nantes.sh +./hosts/up_netlib.sh +./hosts/up_nippy.sh +./hosts/up_nofile.sh +./hosts/up_oshi.sh +./hosts/up_ranoz.sh +./hosts/up_shareonline.sh +./hosts/up_soyjak.sh +./hosts/up_tempfileme.sh +./hosts/up_tempsh.sh +./hosts/up_turboonion.sh +./hosts/up_uploadev.sh +./hosts/up_uploadflix.sh +./hosts/up_uploadhive.sh +./hosts/up_uploadraja.sh +./hosts/up_yolobit.sh +./hosts/yolobit.sh +./hosts/youdbox.sh +./mad.sh +./plugins/AutoResetAndRetryDownloads.sh +./plugins/CatnapCtrlC.sh +./plugins/SkipUrlsInDownloadsCompletedTxt.sh +./plugins/examples/ExampleAddNewFuncAndCallOnSuccessfulDownload.sh +./plugins/examples/ExamplesMainHooks.sh +./plugins/ocr_captcha.sh +./plugins/pjscloud.sh + + +MAD Audit of http lines: (grep "http:" or "https:") +_________________________________________________________________________ +./hosts/1fichier.sh:161: if ! grep -Eqi "https://" <<< "${target_file_link}" > /dev/null ; then +./hosts/9saves.sh:141: --data "$form_data" "https://9saves.com/") +./hosts/anonsharing.sh:95: "https://anonsharing.com/account/ajax/file_details") +./hosts/anonsharing.sh:121: if grep -Eqi 'openUrl\('"'"'https:\\/\\/anonsharing.com\\/' <<< "$response"; then +./hosts/anonsharing.sh:124: download_url='https://anonsharing.com/'$(grep -oPi '(?<=openUrl\('"'"'https:\\/\\/anonsharing.com\\/).*?(?='"'"'.*$)' <<< "$response" | head -1) +./hosts/anonsharing.sh:155: ansh_host="${download_url/https:\/\//}" +./hosts/biteblob.sh:123: if ! grep -Eqi 'https://biteblob.com/Download/' <<< $download_url ; then +./hosts/biteblob.sh:125: download_url="https://biteblob.com${download_loc}" +./hosts/bowfile.sh:119: if grep -Eqi 'let next = "https://bowfile.com/' <<< "$response" ; then +./hosts/bowfile.sh:120: download_url="https://bowfile.com/"$(grep -oPi '(?<=let next = "https://bowfile.com/).*?(?=")' <<< "$response") +./hosts/bowfile.sh:174: fshost=$(grep -oPi -m 1 '(?<=https://).*?(?=/token/download/dl)' <<< "$download_url") +./hosts/bowfile.sh:293: fshost=$(grep -oPi -m 1 '(?<=https://).*?(?=/token/download/dl)' <<< "$download_url") +./hosts/bowfile.sh:302: --referer "https://bowfile.com/" \ +./hosts/bowfile.sh:307: --referer "https://bowfile.com/" \ +./hosts/bowfile.sh:326: --referer "https://bowfile.com/" \ +./hosts/bowfile.sh:342: --referer "https://bowfile.com/" \ +./hosts/click.sh:530: click_host=$(grep -oPi '(?<=https://).*(?=/)' <<< "$fixed_url") +./hosts/dailyuploads.sh:125: if grep -Eqi 'img src="https://dailyuploads.net/captchas/' <<< "$response" ; then +./hosts/dailyuploads.sh:280: response=$(pjscloud_tor_request "https://hexload.com/download" "$form_data") +./hosts/dailyuploads.sh:493: daily_host=$(grep -oPi '(?<=https://).*(?=/)' <<< "$fixed_url") +./hosts/dataupload.sh:133: post_action="https://dataupload.net/" +./hosts/dataupload.sh:210: if ! grep -Eqi "location: https://dataupload.net/d/" <<< "$response"; then +./hosts/dosya.sh:168: if grep -Eqi 'https://dosyaupload.com' <<< "$dos_url" ; then +./hosts/downloadgg.sh:139: if grep -Eqi '
)' <<< "$response") +./hosts/gofile.sh:84: badUrlDownload "${remote_url}" "Expect format http://gofile.io/d/xxxxxxxxx" +./hosts/gofile.sh:108: "https://api.gofile.io/accounts") +./hosts/gofile.sh:183: "https://api.gofile.io/contents/$file_id") +./hosts/gofile.sh:185: debugHtml "${remote_url##*/}" "gofile_contents$i" "url: https://api.gofile.io/contents/${file_id}?${form_data}"$'\n'"${response}" +./hosts/gofile.sh:293: cdn_url="https:"$(grep -oPi '(?<=location: ).*' <<< "$file_header") +./hosts/hexload.sh:102: response=$(pjscloud_tor_request "https://hexload.com/download" "$form_data") +./hosts/hexload.sh:108: response=$(tor_curl_request --insecure -s --data "$form_data" "https://hexload.com/download") +./hosts/hexload.sh:116: response=$(tor_curl_request --insecure -s --data "$form_data" "https://hexload.com/download") +./hosts/hexload.sh:122: response=$(tor_curl_request --insecure -s --data "$form_data" "https://hexload.com/download") +./hosts/innocent.sh:48: download_url="${download_url/https:/http:}" +./hosts/kraken.sh:155: kraken_action="https://krakenfiles.com/download/${kraken_action##*/}" +./hosts/nippy.sh:162: download_url="https:"$(grep -oP '(?<=

click here)' <<< "$response") +./hosts/up2share.sh:197: --referer "https://up2sha.re/" "$download_url") +./hosts/up2share.sh:317: --referer "https://up2sha.re/" \ +./hosts/up2share.sh:323: --referer "https://up2sha.re/" \ +./hosts/up2share.sh:343: --referer "https://up2sha.re/" \ +./hosts/up2share.sh:360: --referer "https://up2sha.re/" \ +./hosts/up_1fichier.sh:107: response=$(tor_curl_request --insecure -L -s "https://1fichier.com/") +./hosts/up_1fichier.sh:109: debugHtml "${filepath##*/}" "${_hostCode}_up_getid_$i" "url: https://1fichier.com/"$'\n'"${response}" +./hosts/up_1fichier.sh:176: fichHost="${fichaction/https:\/\///}" +./hosts/up_1fichier.sh:195: hash=$(grep -oPi '(?<=
http://koldr.jp/' <<< "${response}" ; then +./hosts/up_kouploader.sh:147: subSearch=$(awk '/MESSAGE<\/font>/,/http:\/\/koldr.jp\//' <<< "$response") +./hosts/up_kouploader.sh:150: downloadLink="http://up8.kouploader.jp/?m=dp&dlkey=1234&n=$hash" +./hosts/up_kraken.sh:99: local ar_HUP[0]='https://uploads1.krakenfiles.com/_uploader/gallery/upload' +./hosts/up_kraken.sh:100: local ar_HUP[1]='https://uploads2.krakenfiles.com/_uploader/gallery/upload' +./hosts/up_kraken.sh:101: local ar_HUP[2]='https://uploads3.krakenfiles.com/_uploader/gallery/upload' +./hosts/up_kraken.sh:102: local ar_HUP[3]='https://uploads4.krakenfiles.com/_uploader/gallery/upload' +./hosts/up_kraken.sh:103: local ar_HUP[4]='https://uploads5.krakenfiles.com/_uploader/gallery/upload' +./hosts/up_kraken.sh:104: local ar_HUP[5]='https://uploads6.krakenfiles.com/_uploader/gallery/upload' +./hosts/up_kraken.sh:105: local ar_HUP[6]='https://uploads7.krakenfiles.com/_uploader/gallery/upload' +./hosts/up_kraken.sh:106: local ar_HUP[7]='https://uploads8.krakenfiles.com/_uploader/gallery/upload' +./hosts/up_kraken.sh:107: local ar_HUP[8]='https://uploads9.krakenfiles.com/_uploader/gallery/upload' +./hosts/up_kraken.sh:108: local ar_HUP[9]='https://uploads10.krakenfiles.com/_uploader/gallery/upload' +./hosts/up_kraken.sh:126: downloadLink="https://krakenfiles.com/view/${hash}/file.html" +./hosts/up_linxx.sh:99: PostUrlHost='https://linxx.net/upload/script.php' +./hosts/up_linxx.sh:114: downloadLink="https://linxx.net/upload/f.php?h=${hash}&p=1" +./hosts/up_nantes.sh:99: PostUrlHost='https://fichiers.nantes.cloud/script.php' +./hosts/up_nantes.sh:114: downloadLink="https://fichiers.nantes.cloud/f.php?h=${hash}&p=1" +./hosts/up_netlib.sh:99: PostUrlHost='https://mhep.netlib.re/jirafeau/script.php' +./hosts/up_netlib.sh:114: downloadLink="https://mhep.netlib.re/jirafeau/f.php?h=${hash}&p=1" +./hosts/up_nippy.sh:103: PostUrlHost='https://ns05.zipcluster.com/upload.php' +./hosts/up_nippy.sh:105: PostUrlHost='https://ns01.zipcluster.com/upload.php' +./hosts/up_nippy.sh:107: PostUrlHost='https://ns04.zipcluster.com/upload.php' +./hosts/up_nippy.sh:109: PostUrlHost='https://ns03.zipcluster.com/upload.php' +./hosts/up_nippy.sh:111: PostUrlHost='https://ns02.zipcluster.com/upload.php' +./hosts/up_nippy.sh:113: local ar_HUP[0]='https://ns05.zipcluster.com/upload.php' # nippydrive +./hosts/up_nippy.sh:114: local ar_HUP[1]='https://ns01.zipcluster.com/upload.php' # nippyshare +./hosts/up_nippy.sh:115: local ar_HUP[2]='https://ns04.zipcluster.com/upload.php' # nippybox +./hosts/up_nippy.sh:116: local ar_HUP[3]='https://ns03.zipcluster.com/upload.php' # nippyspace +./hosts/up_nippy.sh:117: local ar_HUP[4]='https://ns02.zipcluster.com/upload.php' # nippyfile +./hosts/up_nofile.sh:99: PostUrlHost='https://ns06.zipcluster.com/upload.php' +./hosts/up_oshi.sh:104: PostUrlHost='https://oshi.at/' +./hosts/up_oshi.sh:106: PostUrlHost='http://5ety7tpkim5me6eszuwcje7bmy25pbtrjtue7zkqqgziljwqy3rrikqd.onion/' +./hosts/up_ranoz.sh:99: PostUrlHost='https://ranoz.gg/api/v1/files/upload_url' +./hosts/up_ranoz.sh:111: if grep -Eqi '"upload_url":"https://' <<< "$response" ; then +./hosts/up_shareonline.sh:99: PostUrlHost='https://ns07.zipcluster.com/upload.php' +./hosts/up_soyjak.sh:99: PostUrlHost='https://soyjak.download/script.php' +./hosts/up_soyjak.sh:114: downloadLink="https://soyjak.download/f.php?h=${hash}&p=1" +./hosts/up_tempfileme.sh:99: PostUrlHost='https://tempfile.me/upload' +./hosts/up_tempsh.sh:99: PostUrlHost='https://temp.sh/upload' +./hosts/up_tempsh.sh:111: hash=$(grep -oPi '(?<=http://temp.sh/).*?(?=$)' <<< "$response") +./hosts/up_tempsh.sh:113: downloadLink="https://temp.sh/${hash}" +./hosts/up_turboonion.sh:96: PostUrlHost='http://3qeyzgtujhguzjletcz34qxsiqoymlni6s6rhc37kpobyttzngwlzjid.onion/api/upload' +./hosts/up_turboonion.sh:118: downloadLink='http://3qeyzgtujhguzjletcz34qxsiqoymlni6s6rhc37kpobyttzngwlzjid.onion/dl/file/'"$hash" +./hosts/up_uploadev.sh:99: PostUrlHost='https://fs8.uploadev.org/cgi-bin/upload.cgi?upload_type=file&utype=anon' +./hosts/up_uploadev.sh:119: downloadLink="https://uploadev.org/${hash}" +./hosts/up_uploadflix.sh:99: local ar_HUP[0]='https://fs50.uploadflix.cyou/cgi-bin/upload.cgi?upload_type=file&utype=anon' +./hosts/up_uploadflix.sh:125: downloadLink="https://uploadflix.cc/${hash}" +./hosts/up_uploadhive.sh:99: PostUrlHost='https://fs430.uploadhive.com/cgi-bin/upload.cgi' +./hosts/up_uploadhive.sh:120: downloadLink="https://uploadhive.com/${hash}" +./hosts/up_uploadraja.sh:99: PostUrlHost='https://awsaisiaposisition69.kalpstudio.xyz/cgi-bin/upload.cgi?upload_type=file&utype=anon' +./hosts/up_uploadraja.sh:119: downloadLink="https://uploadraja.com/$hash" +./hosts/up_yolobit.sh:99: PostUrlHost='https://ns08.zipcluster.com/upload.php' +./mad.sh:606: sed -i -e "s>^${url/https:/http:}.*>#& #OK# ${filename}>g" "${InputFile}" #http (if changed) +./mad.sh:608: sed -i -e "s>^direct=${url/https:/http:}.*>#& #OK# ${filename}>g" "${InputFile}" #direct url https +./mad.sh:611: sed -i -e "s>^${url/https:/http:}.*>#&>g" "${InputFile}" #http (if changed) +./mad.sh:613: sed -i -e "s>^direct=${url/https:/http:}.*>#&>g" "${InputFile}" #direct url https +./mad.sh:634: sed -i -e "s>^${url/https:/http:}.*>#& #OK# ${filename} (File exists)>g" "${InputFile}" #http (if changed) +./mad.sh:636: sed -i -e "s>^direct=${url/https:/http:}.*>#& #OK# ${filename} (File exists)>g" "${InputFile}" #direct url https +./mad.sh:639: sed -i -e "s>^${url/https:/http:}.*>#&>g" "${InputFile}" #http (if changed) +./mad.sh:641: sed -i -e "s>^direct=${url/https:/http:}.*>#&>g" "${InputFile}" #direct url https +./mad.sh:662: sed -i -e "s>^${url/https:/http:}.*>#& #RETRY# ${message}>g" "${InputFile}" #http (if changed) +./mad.sh:664: sed -i -e "s>^direct=${url/https:/http:}.*>#& #RETRY# ${message}>g" "${InputFile}" #direct url https +./mad.sh:667: sed -i -e "s>^${url/https:/http:}.*>#&>g" "${InputFile}" #http (if changed) +./mad.sh:669: sed -i -e "s>^direct=${url/https:/http:}.*>#&>g" "${InputFile}" #direct url https +./mad.sh:691: sed -i -e "s>^${url/https:/http:}.*>#& #FAIL# $message>g" "${InputFile}" #http (if changed) +./mad.sh:693: sed -i -e "s>^direct=${url/https:/http:}.*>#& #FAIL# $message>g" "${InputFile}" #direct url https +./mad.sh:696: sed -i -e "s>^${url/https:/http:}.*>#&>g" "${InputFile}" #http (if changed) +./mad.sh:698: sed -i -e "s>^direct=${url/https:/http:}.*>#&>g" "${InputFile}" #direct url https +./mad.sh:719: sed -i -e "s>^${url/https:/http:}.*>#& #REMOVED#>g" "${InputFile}" #http (if changed) +./mad.sh:721: sed -i -e "s>^direct=${url/https:/http:}.*>#& #REMOVED#>g" "${InputFile}" #direct url https +./mad.sh:724: sed -i -e "s>^${url/https:/http:}.*>#&>g" "${InputFile}" #http (if changed) +./mad.sh:726: sed -i -e "s>^direct=${url/https:/http:}.*>#&>g" "${InputFile}" #direct url https +./mad.sh:752: sed -i -e "s>^${url/https:/http:}.*>${url}|${newfilename}>g" "${InputFile}" #http (if changed) +./mad.sh:754: sed -i -e "s>^direct=${url/https:/http:}.*>direct=${url}|${newfilename}>g" "${InputFile}" #direct url https +./mad.sh:779: sed -i -e "s>^${url/https:/http:}.*>#& #RETRY# ${filename} (dropped or bad size)>g" "${InputFile}" #http (if changed) +./mad.sh:781: sed -i -e "s>^direct=${url/https:/http:}.*>#& #RETRY# ${filename} (dropped or bad size)>g" "${InputFile}" #direct url https +./mad.sh:784: sed -i -e "s>^${url/https:/http:}.*>#&>g" "${InputFile}" #http (if changed) +./mad.sh:786: sed -i -e "s>^direct=${url/https:/http:}.*>#&>g" "${InputFile}" #direct url https +./mad.sh:802: sed -i -e "s>^${url/https:/http:}.*>#& #RETRY# (No CDN found)>g" "${InputFile}" #http (if changed) +./mad.sh:804: sed -i -e "s>^direct=${url/https:/http:}.*>#& #RETRY# (No CDN found)>g" "${InputFile}" #direct url https +./mad.sh:807: sed -i -e "s>^${url/https:/http:}.*>#&>g" "${InputFile}" #http (if changed) +./mad.sh:809: sed -i -e "s>^direct=${url/https:/http:}.*>#&>g" "${InputFile}" #direct url https +./mad.sh:828: sed -i -e "s>^${url/https:/http:}.*>#& #PASSWORD#>g" "${InputFile}" #http (if changed) +./mad.sh:830: sed -i -e "s>^direct=${url/https:/http:}.*>#& #PASSWORD#>g" "${InputFile}" #direct url https +./mad.sh:833: sed -i -e "s>^${url/https:/http:}.*>#&>g" "${InputFile}" #http (if changed) +./mad.sh:835: sed -i -e "s>^direct=${url/https:/http:}.*>#&>g" "${InputFile}" #direct url https +./mad.sh:855: sed -i -e "s>^${url/https:/http:}.*>#& #BAD-URL#${message}>g" "${InputFile}" #http (if changed) +./mad.sh:857: sed -i -e "s>^direct=${url/https:/http:}.*>#& #BAD-URL#${message}>g" "${InputFile}" #direct url https +./mad.sh:860: sed -i -e "s>^${url/https:/http:}.*>#&>g" "${InputFile}" #http (if changed) +./mad.sh:862: sed -i -e "s>^direct=${url/https:/http:}.*>#&>g" "${InputFile}" #direct url https +./mad.sh:880: sed -i -e "s>^${url/https:/http:}.*>#& #RETRY# ${filename} (No filename or size. Try again later)>g" "${InputFile}" #http (if changed) +./mad.sh:882: sed -i -e "s>^direct=${url/https:/http:}.*>#& #RETRY# ${filename} (No filename or size. Try again later)>g" "${InputFile}" #direct url https +./mad.sh:885: sed -i -e "s>^${url/https:/http:}.*>#&>g" "${InputFile}" #http (if changed) +./mad.sh:887: sed -i -e "s>^direct=${url/https:/http:}.*>#&>g" "${InputFile}" #direct url https +./mad.sh:906: sed -i -e "s>^${url/https:/http:}.*>#& #RETRY# ${filename} (File exists and is too large--retry or delete from downloads)>g" "${InputFile}" #http (if changed) +./mad.sh:908: sed -i -e "s>^direct=${url/https:/http:}.*>#& #RETRY# ${filename} (File exists and is too large--retry or delete from downloads)>g" "${InputFile}" #direct url https +./mad.sh:911: sed -i -e "s>^${url/https:/http:}.*>#&>g" "${InputFile}" #http (if changed) +./mad.sh:913: sed -i -e "s>^direct=${url/https:/http:}.*>#&>g" "${InputFile}" #direct url https +./mad.sh:1328: response=$(tor_curl_request --insecure -L -s https://github.com/lwthiker/curl-impersonate/releases/latest) +./mad.sh:1345: download_url='https://github.com/lwthiker/curl-impersonate/releases/download/'"$latestTag"'/curl-impersonate-'"$latestTag"'.x86_64-linux-gnu.tar.gz' +./mad.sh:1451: response=$(tor_curl_request --insecure -L -s https://github.com/lexiforest/curl-impersonate/releases/latest) +./mad.sh:1468: download_url='https://github.com/lexiforest/curl-impersonate/releases/download/'"$latestTag"'/curl-impersonate-'"$latestTag"'.x86_64-linux-gnu.tar.gz' +./mad.sh:1731: echo -e "${PINK}MAD Audit of http lines:${NC} (${GREEN}grep \"http:\" or \"https:\"${NC})" +./mad.sh:1759: echo -e "${PINK}MAD Audit of http lines:${NC} (${GREEN}grep \"http:\" or \"https:\"${NC})" +./mad.sh:1781: echo -e "${PINK}MAD Audit of http lines:${NC} (${GREEN}grep \"http:\" or \"https:\"${NC})" +./mad.sh:3085: if grep -Eqi '.onion' <<< "$download_url" && grep -Eqi 'https://' <<< "$download_url" ; then +./mad.sh:3495:arg2="$2" # auto, filelist, +./mad.sh:3592: echo -e " - http://oshi.at/abcd/origAABB.rar|My specified file.part1.rar" +./mad.sh:3594: echo -e " - direct=http://pomf2.lain.la/f/abcd00zz.7z" +./mad.sh:3596: echo -e ' - ie. direct=http://somehost.onion/abcD|filename.part1.rar' +./mad.sh:3815: if [[ ${remote_url} =~ ^http: ]] ; then +./mad.sh:3816: remote_url=${remote_url/http:/https:} +./mad.sh:3837: if [[ ${remote_url} =~ ^http: ]] ; then +./mad.sh:3838: remote_url=${remote_url/http:/https:} +./mad.sh:4204: if [[ ${remote_url} =~ ^http: ]] ; then +./mad.sh:4205: remote_url=${remote_url/http:/https:} +./mad.sh:4263: if [[ ${remote_url} =~ ^http: ]] ; then +./mad.sh:4264: remote_url=${remote_url/http:/https:} +./mad.sh:4289: if [[ ${remote_url} =~ ^http: ]] ; then +./mad.sh:4290: remote_url=${remote_url/http:/https:} +./plugins/pjscloud.sh:51: "https://PhantomJScloud.com/api/browser/v2/$RandomPjsKey/" & sleep 8s; kill -HUP $! 2>/dev/null) +./plugins/pjscloud.sh:59: "https://PhantomJScloud.com/api/browser/v2/$RandomPjsKey/" & sleep 8s; kill -HUP $! 2>/dev/null) + diff --git a/.audit/mad-audit-tor_curl-details.log b/.audit/mad-audit-tor_curl-details.log new file mode 100755 index 0000000..fef0833 --- /dev/null +++ b/.audit/mad-audit-tor_curl-details.log @@ -0,0 +1,2608 @@ +DateTime: 24.11.07 + +Files: +./hosts/1fichier.sh +./hosts/9saves.sh +./hosts/acid.sh +./hosts/anarchaserver.sh +./hosts/anonsharing.sh +./hosts/archived/nekofile.sh +./hosts/bedrive.sh +./hosts/biteblob.sh +./hosts/bowfile.sh +./hosts/click.sh +./hosts/dailyuploads.sh +./hosts/dataupload.sh +./hosts/dbree.sh +./hosts/depotkaz.sh +./hosts/discreetshare.sh +./hosts/dosya.sh +./hosts/downloadgg.sh +./hosts/eternalhosting.sh +./hosts/examples/ExampleNewHost.sh +./hosts/examples/up_example.sh +./hosts/filedoge.sh +./hosts/filedot.sh +./hosts/filehaus.sh +./hosts/filesquid.sh +./hosts/firestorage.sh +./hosts/free4e.sh +./hosts/gofile.sh +./hosts/harrault.sh +./hosts/hexload.sh +./hosts/innocent.sh +./hosts/kraken.sh +./hosts/lainsafe.sh +./hosts/lainsafe_onion.sh +./hosts/linxx.sh +./hosts/nantes.sh +./hosts/netlib.sh +./hosts/nippy.sh +./hosts/nofile.sh +./hosts/offshorecat.sh +./hosts/oshi.sh +./hosts/pixeldrain.sh +./hosts/ranoz.sh +./hosts/shareonline.sh +./hosts/soyjak.sh +./hosts/tempfileme.sh +./hosts/tempsh.sh +./hosts/turboonion.sh +./hosts/up2share.sh +./hosts/uploadee.sh +./hosts/uploadev.sh +./hosts/uploadflix.sh +./hosts/uploadhive.sh +./hosts/up_1fichier.sh +./hosts/up_acid.sh +./hosts/up_anarchaserver.sh +./hosts/up_anonsharing.sh +./hosts/up_axfc.sh +./hosts/up_bedrive.sh +./hosts/up_bowfile.sh +./hosts/up_dailyuploads.sh +./hosts/up_dataupload.sh +./hosts/up_dbree.sh +./hosts/up_depotkaz.sh +./hosts/up_dosya.sh +./hosts/up_filehaus.sh +./hosts/up_filesquid.sh +./hosts/up_firestorage.sh +./hosts/up_free4e.sh +./hosts/up_gofile.sh +./hosts/up_harrault.sh +./hosts/up_hexload.sh +./hosts/up_innocent.sh +./hosts/up_kouploader.sh +./hosts/up_kraken.sh +./hosts/up_linxx.sh +./hosts/up_nantes.sh +./hosts/up_netlib.sh +./hosts/up_nippy.sh +./hosts/up_nofile.sh +./hosts/up_oshi.sh +./hosts/up_ranoz.sh +./hosts/up_shareonline.sh +./hosts/up_soyjak.sh +./hosts/up_tempfileme.sh +./hosts/up_tempsh.sh +./hosts/up_turboonion.sh +./hosts/up_uploadev.sh +./hosts/up_uploadflix.sh +./hosts/up_uploadhive.sh +./hosts/up_uploadraja.sh +./hosts/up_yolobit.sh +./hosts/yolobit.sh +./hosts/youdbox.sh +./mad.sh +./plugins/AutoResetAndRetryDownloads.sh +./plugins/CatnapCtrlC.sh +./plugins/SkipUrlsInDownloadsCompletedTxt.sh +./plugins/examples/ExampleAddNewFuncAndCallOnSuccessfulDownload.sh +./plugins/examples/ExamplesMainHooks.sh +./plugins/ocr_captcha.sh +./plugins/pjscloud.sh + + +MAD Audit of tor_curl (+10 lines after): (grep "tor_curl") +_________________________________________________________________________ +./hosts/1fichier.sh:48: PAGE=$(tor_curl_request --insecure --max-time "$ConnectTimeout" -s "${remote_url}") +./hosts/1fichier.sh:49: if [ "${DebugAllEnabled}" == "true" ] ; then +./hosts/1fichier.sh:50: debugHtml "${remote_url##*/?}" "prechk$y" "${PAGE}" +./hosts/1fichier.sh:51: fi +./hosts/1fichier.sh:52: file_information=$(grep -oP '(?<=)[^<]*?(?=)' <<< "${PAGE}") +./hosts/1fichier.sh:53: size=$(echo "${file_information}" | tail -n 1) +./hosts/1fichier.sh:54: filename=$(echo "${file_information}" | head -n 1) +./hosts/1fichier.sh:55: if [ ! "$filename_override" == "" ] ; then +./hosts/1fichier.sh:56: filename="$filename_override" +./hosts/1fichier.sh:57: fi +./hosts/1fichier.sh:58: filename=$(sanitize_file_or_folder_name "${filename}") +-- +./hosts/1fichier.sh:155: cdn_request=$(tor_curl_request --insecure -s -L -b "${fich_cookie_jar}" -c "${fich_cookie_jar}" -F "submit=Download" -F "pass=${fich_user_provided_password}" -F "adz=${fich_adz_parameter}" "${remote_url}") +./hosts/1fichier.sh:156: target_file_link=$(echo "$cdn_request" | grep -A 2 '
' | grep -oP ' /dev/null ; then +./hosts/1fichier.sh:162: if $is_password_protected; then +./hosts/1fichier.sh:163: echo -e "${RED}ERROR: Incorrect password${NC}\nSince this download required a password, you might have copied it incorrectly?" +./hosts/1fichier.sh:164: passwordProtectedDownload "${remote_url}" +./hosts/1fichier.sh:165: exitDownloadError=true +-- +./hosts/1fichier.sh:189: file_header=$(tor_curl_request --insecure -sSIL -e "${remote_url}" "${target_file_link}") +./hosts/1fichier.sh:190: file_size_bytes=$(grep -oPi '(?<=content-length: ).*' <<< "$file_header") +./hosts/1fichier.sh:191: file_size_bytes=${file_size_bytes//[$'\t\r\n']} +./hosts/1fichier.sh:192: download_inflight_path="${WorkDir}/.inflight/" +./hosts/1fichier.sh:193: completed_location="${WorkDir}/downloads/" +./hosts/1fichier.sh:194: file_path="${download_inflight_path}${filename}" +./hosts/1fichier.sh:195: if [ "${DebugAllEnabled}" == "true" ] ; then +./hosts/1fichier.sh:196: debugHtml "${remote_url##*/?}" "fich_savehead${num_attempt}_${j}" "target_file_link: ${target_file_link}"$'\n'"${file_header}" +./hosts/1fichier.sh:197: fi +./hosts/1fichier.sh:198: if [[ -z "$file_header" ]] || [[ -z "$file_size_bytes" ]]; then +./hosts/1fichier.sh:199: continue +-- +./hosts/1fichier.sh:256: tor_curl_request --insecure -e "${remote_url}" "${target_file_link}" -C - -o "${file_path}" +./hosts/1fichier.sh:257: rm -f "$flockDownload"; +./hosts/1fichier.sh:258: received_file_size=0 +./hosts/1fichier.sh:259: if [ -f "$file_path" ] ; then +./hosts/1fichier.sh:260: received_file_size=$(stat --format="%s" "$file_path" | tr -d '[:space:]') +./hosts/1fichier.sh:261: fi +./hosts/1fichier.sh:262: if [[ "${received_file_size}" -ne "${file_size_bytes}" ]]; then +./hosts/1fichier.sh:263: echo -e "${RED}ERROR: Size mismatch after downloading${NC}\nPerhaps you or 1fichier lost connection for a while?" +./hosts/1fichier.sh:264: if [ "${finalAttempt}" == "true" ] ; then +./hosts/1fichier.sh:265: droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" +./hosts/1fichier.sh:266: fi +-- +./hosts/1fichier.sh:349: PAGE=$(tor_curl_request --insecure --max-time "$ConnectTimeout" -c "${fich_cookie_jar}" -s "${remote_url}") +./hosts/1fichier.sh:350: if [[ -z ${PAGE} ]]; then +./hosts/1fichier.sh:351: rm -f "${fich_cookie_jar}" +./hosts/1fichier.sh:352: continue +./hosts/1fichier.sh:353: fi +./hosts/1fichier.sh:354: if grep -Eqi 'Warning !|Attention !' <<< "${PAGE}"; then +./hosts/1fichier.sh:355: rm -f "${fich_cookie_jar}" +./hosts/1fichier.sh:356: continue +./hosts/1fichier.sh:357: else +./hosts/1fichier.sh:358: fich_adz_parameter=$(grep -oPi 'name="adz" value="\K[^"]+' <<< "${PAGE}") +./hosts/1fichier.sh:359: if [[ $fich_adz_parameter ]]; then +-- +./hosts/9saves.sh:90: response=$(tor_curl_request --insecure -L -s -b "${ns_cookie_jar}" -c "${ns_cookie_jar}" "$remote_url") +./hosts/9saves.sh:91: if [ "${DebugAllEnabled}" == "true" ] ; then +./hosts/9saves.sh:92: debugHtml "${remote_url##*/}" "ns_dwnpage$i" "${response}" +./hosts/9saves.sh:93: fi +./hosts/9saves.sh:94: if [[ -z $response ]] ; then +./hosts/9saves.sh:95: if [ $i == $maxfetchretries ] ; then +./hosts/9saves.sh:96: rm -f "${ns_cookie_jar}"; +./hosts/9saves.sh:97: printf "\\n" +./hosts/9saves.sh:98: echo -e "${RED}| Failed to extract post link.${NC}" +./hosts/9saves.sh:99: warnAndRetryUnknownError=true +./hosts/9saves.sh:100: if [ "${finalAttempt}" == "true" ] ; then +-- +./hosts/9saves.sh:139: response=$(tor_curl_request --insecure -L -s -X POST \ +./hosts/9saves.sh:140: -b "${ns_cookie_jar}" -c "${ns_cookie_jar}" \ +./hosts/9saves.sh:141: --data "$form_data" "https://9saves.com/") +./hosts/9saves.sh:142: if [ "${DebugAllEnabled}" == "true" ] ; then +./hosts/9saves.sh:143: debugHtml "${remote_url##*/}" "ns_post" "form_data: ${form_data}"$'\n'"${response}" +./hosts/9saves.sh:144: fi +./hosts/9saves.sh:145: if [[ -z $response ]] ; then +./hosts/9saves.sh:146: rm -f "${ns_cookie_jar}"; +./hosts/9saves.sh:147: echo -e "${RED}| Failed to extract download link.${NC}" +./hosts/9saves.sh:148: warnAndRetryUnknownError=true +./hosts/9saves.sh:149: if [ "${finalAttempt}" == "true" ] ; then +-- +./hosts/9saves.sh:188: file_header=$(tor_curl_request --insecure --head -L -s -b "${ns_cookie_jar}" -c "${ns_cookie_jar}" "$download_url") +./hosts/9saves.sh:189: if [ "${DebugAllEnabled}" == "true" ] ; then +./hosts/9saves.sh:190: debugHtml "${remote_url##*/}" "ns_head$j" "download_url: ${download_url}"$'\n'"${file_header}" +./hosts/9saves.sh:191: fi +./hosts/9saves.sh:192: if [[ -z $file_header ]] ; then +./hosts/9saves.sh:193: if [ $j == $maxfetchretries ] ; then +./hosts/9saves.sh:194: rm -f "${ns_cookie_jar}"; +./hosts/9saves.sh:195: printf "\\n" +./hosts/9saves.sh:196: echo -e "${RED}| Failed to extract file info.${NC}" +./hosts/9saves.sh:197: warnAndRetryUnknownError=true +./hosts/9saves.sh:198: if [ "${finalAttempt}" == "true" ] ; then +-- +./hosts/9saves.sh:292: tor_curl_request --insecure \ +./hosts/9saves.sh:293: -b "${ns_cookie_jar}" --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval \ +./hosts/9saves.sh:294: "$download_url" \ +./hosts/9saves.sh:295: --continue-at - --output "$file_path" +./hosts/9saves.sh:296: else +./hosts/9saves.sh:297: tor_curl_request --insecure \ +./hosts/9saves.sh:298: -b "${ns_cookie_jar}" \ +./hosts/9saves.sh:299: "$download_url" \ +./hosts/9saves.sh:300: --continue-at - --output "$file_path" +./hosts/9saves.sh:301: fi +./hosts/9saves.sh:302: else +./hosts/9saves.sh:303: if [ "${RateMonitorEnabled}" == "true" ]; then +./hosts/9saves.sh:304: tor_curl_request --insecure \ +./hosts/9saves.sh:305: --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval \ +./hosts/9saves.sh:306: -b "${ns_cookie_jar}" \ +./hosts/9saves.sh:307: -H "User-Agent: $RandomUA" \ +./hosts/9saves.sh:308: -H "Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8" \ +./hosts/9saves.sh:309: -H "Accept-Language: en-US,en;q=0.5" \ +./hosts/9saves.sh:310: -H "Accept-Encoding: gzip, deflate, br" \ +./hosts/9saves.sh:311: -H "Connection: keep-alive" \ +./hosts/9saves.sh:312: -H "Cookie: lng=eng" \ +./hosts/9saves.sh:313: -H "Upgrade-Insecure-Requests: 1" \ +./hosts/9saves.sh:314: -H "Sec-Fetch-Dest: document" \ +-- +./hosts/9saves.sh:321: tor_curl_request --insecure \ +./hosts/9saves.sh:322: -b "${ns_cookie_jar}" \ +./hosts/9saves.sh:323: -H "User-Agent: $RandomUA" \ +./hosts/9saves.sh:324: -H "Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8" \ +./hosts/9saves.sh:325: -H "Accept-Language: en-US,en;q=0.5" \ +./hosts/9saves.sh:326: -H "Accept-Encoding: gzip, deflate, br" \ +./hosts/9saves.sh:327: -H "Connection: keep-alive" \ +./hosts/9saves.sh:328: -H "Cookie: lng=eng" \ +./hosts/9saves.sh:329: -H "Upgrade-Insecure-Requests: 1" \ +./hosts/9saves.sh:330: -H "Sec-Fetch-Dest: document" \ +./hosts/9saves.sh:331: -H "Sec-Fetch-Mode: navigate" \ +-- +./hosts/anonsharing.sh:91: response=$(tor_curl_request --insecure -i -s \ +./hosts/anonsharing.sh:92: -b "${ansh_cookie_jar}" -c "${ansh_cookie_jar}" \ +./hosts/anonsharing.sh:93: -F "u=$fileid" \ +./hosts/anonsharing.sh:94: -F "p=true" \ +./hosts/anonsharing.sh:95: "https://anonsharing.com/account/ajax/file_details") +./hosts/anonsharing.sh:96: if [ "${DebugAllEnabled}" == "true" ] ; then +./hosts/anonsharing.sh:97: debugHtml "${remote_url##*/}" "ansh_postfileinfo$i" "${response}" +./hosts/anonsharing.sh:98: fi +./hosts/anonsharing.sh:99: if [[ -z $response ]] ; then +./hosts/anonsharing.sh:100: rm -f "${ansh_cookie_jar}"; +./hosts/anonsharing.sh:101: if [ $i == $maxfetchretries ] ; then +-- +./hosts/anonsharing.sh:150: file_header=$(tor_curl_request --insecure --head -L -i -s \ +./hosts/anonsharing.sh:151: -b "${ansh_cookie_jar}" -c "${ansh_cookie_jar}" \ +./hosts/anonsharing.sh:152: "$download_url") +./hosts/anonsharing.sh:153: else +./hosts/anonsharing.sh:154: echo -e "${GREEN}# Fetching file info…${NC}" +./hosts/anonsharing.sh:155: ansh_host="${download_url/https:\/\//}" +./hosts/anonsharing.sh:156: ansh_host="${ansh_host%%\/*}" +./hosts/anonsharing.sh:157: rm -f "${WorkDir}/.temp/directhead" +./hosts/anonsharing.sh:158: file_header=$(tor_curl_request --insecure --head -H "Connection: keep-alive" -s -i \ +./hosts/anonsharing.sh:159: -H "Host: $ansh_host" \ +./hosts/anonsharing.sh:160: "$download_url" | +./hosts/anonsharing.sh:161: tee "${WorkDir}/.temp/directhead" & +./hosts/anonsharing.sh:162: sleep 6 +./hosts/anonsharing.sh:163: [ -s "${WorkDir}/.temp/directhead" ] +./hosts/anonsharing.sh:164: kill $! 2>/dev/null +./hosts/anonsharing.sh:165: ) +./hosts/anonsharing.sh:166: if [ ! -f "${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}" ]; then +./hosts/anonsharing.sh:167: touch "${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}" +./hosts/anonsharing.sh:168: fi +-- +./hosts/anonsharing.sh:273: tor_curl_request --insecure --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval "$download_url" --output "$file_path" +./hosts/anonsharing.sh:274: else +./hosts/anonsharing.sh:275: tor_curl_request --insecure "$download_url" --output "$file_path" +./hosts/anonsharing.sh:276: fi +./hosts/anonsharing.sh:277: received_file_size=0 +./hosts/anonsharing.sh:278: if [ -f "$file_path" ] ; then +./hosts/anonsharing.sh:279: received_file_size=$(stat --format="%s" "$file_path" | tr -d '[:space:]') +./hosts/anonsharing.sh:280: fi +./hosts/anonsharing.sh:281: if CheckNoHtml "$remote_url" "$filename" "$file_path" ; then +./hosts/anonsharing.sh:282: containsHtml=false +./hosts/anonsharing.sh:283: else +./hosts/anonsharing.sh:284: containsHtml=true +./hosts/anonsharing.sh:285: fi +-- +./hosts/bedrive.sh:90: response=$(tor_curl_request --insecure -L -s \ +./hosts/bedrive.sh:91: -b "${bd_cookie_jar}" -c "${bd_cookie_jar}" \ +./hosts/bedrive.sh:92: "$remote_url") +./hosts/bedrive.sh:93: if [ "${DebugAllEnabled}" == "true" ] ; then +./hosts/bedrive.sh:94: debugHtml "${remote_url##*/}" "bd_fetch$i" "${response}" +./hosts/bedrive.sh:95: fi +./hosts/bedrive.sh:96: if [[ -z $response ]] ; then +./hosts/bedrive.sh:97: rm -f "${bd_cookie_jar}"; +./hosts/bedrive.sh:98: if [ $i == $maxfetchretries ] ; then +./hosts/bedrive.sh:99: printf "\\n" +./hosts/bedrive.sh:100: echo -e "${RED}| Failed to extract download link.${NC}" +-- +./hosts/bedrive.sh:149: file_header=$(tor_curl_request --insecure --head -L -i -s \ +./hosts/bedrive.sh:150: -b "${bd_cookie_jar}" -c "${bd_cookie_jar}" \ +./hosts/bedrive.sh:151: "$download_url") +./hosts/bedrive.sh:152: if [ "${DebugAllEnabled}" == "true" ] ; then +./hosts/bedrive.sh:153: debugHtml "${remote_url##*/}" "bd_head$j" "download_url: ${download_url}"$'\n'"${file_header}" +./hosts/bedrive.sh:154: fi +./hosts/bedrive.sh:155: if [[ -z $file_header ]] ; then +./hosts/bedrive.sh:156: if [ $j == $maxfetchretries ] ; then +./hosts/bedrive.sh:157: rm -f "${bd_cookie_jar}"; +./hosts/bedrive.sh:158: printf "\\n" +./hosts/bedrive.sh:159: echo -e "${RED}| Failed to extract file info.${NC}" +-- +./hosts/bedrive.sh:272: tor_curl_request --insecure -L -G --no-alpn \ +./hosts/bedrive.sh:273: -b "${bd_cookie_jar}" -c "${bd_cookie_jar}" --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval \ +./hosts/bedrive.sh:274: --referer "$remote_url" "$download_url" \ +./hosts/bedrive.sh:275: --continue-at - --output "$file_path" +./hosts/bedrive.sh:276: else +./hosts/bedrive.sh:277: tor_curl_request --insecure -L -G --no-alpn \ +./hosts/bedrive.sh:278: -b "${bd_cookie_jar}" -c "${bd_cookie_jar}" \ +./hosts/bedrive.sh:279: --referer "$remote_url" "$download_url" \ +./hosts/bedrive.sh:280: --continue-at - --output "$file_path" +./hosts/bedrive.sh:281: fi +./hosts/bedrive.sh:282: else +./hosts/bedrive.sh:283: if [ "${RateMonitorEnabled}" == "true" ]; then +./hosts/bedrive.sh:284: tor_curl_request --insecure -L -G --no-alpn \ +./hosts/bedrive.sh:285: -b "${bd_cookie_jar}" -c "${bd_cookie_jar}" --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval \ +./hosts/bedrive.sh:286: -H "User-Agent: $RandomUA" \ +./hosts/bedrive.sh:287: -H "Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8" \ +./hosts/bedrive.sh:288: -H "Accept-Language: en-US,en;q=0.5" \ +./hosts/bedrive.sh:289: -H "Accept-Encoding: gzip, deflate, br" \ +./hosts/bedrive.sh:290: -H "Connection: keep-alive" \ +./hosts/bedrive.sh:291: -H "Cookie: lng=eng" \ +./hosts/bedrive.sh:292: -H "Upgrade-Insecure-Requests: 1" \ +./hosts/bedrive.sh:293: -H "Sec-Fetch-Dest: document" \ +./hosts/bedrive.sh:294: -H "Sec-Fetch-Mode: navigate" \ +-- +./hosts/bedrive.sh:300: tor_curl_request --insecure -L -G --no-alpn \ +./hosts/bedrive.sh:301: -b "${bd_cookie_jar}" -c "${bd_cookie_jar}" \ +./hosts/bedrive.sh:302: -H "User-Agent: $RandomUA" \ +./hosts/bedrive.sh:303: -H "Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8" \ +./hosts/bedrive.sh:304: -H "Accept-Language: en-US,en;q=0.5" \ +./hosts/bedrive.sh:305: -H "Accept-Encoding: gzip, deflate, br" \ +./hosts/bedrive.sh:306: -H "Connection: keep-alive" \ +./hosts/bedrive.sh:307: -H "Cookie: lng=eng" \ +./hosts/bedrive.sh:308: -H "Upgrade-Insecure-Requests: 1" \ +./hosts/bedrive.sh:309: -H "Sec-Fetch-Dest: document" \ +./hosts/bedrive.sh:310: -H "Sec-Fetch-Mode: navigate" \ +-- +./hosts/biteblob.sh:96: response=$(tor_curl_request --insecure -L -s "${fixed_url}") +./hosts/biteblob.sh:97: if [ "${DebugAllEnabled}" == "true" ] ; then +./hosts/biteblob.sh:98: debugHtml "${remote_url##*/}" "bite_dwnpage$j" "url: $fixed_url"$'\n'"${response}" +./hosts/biteblob.sh:99: fi +./hosts/biteblob.sh:100: if [[ -z $response ]] ; then +./hosts/biteblob.sh:101: if [ $j == $maxfetchretries ] ; then +./hosts/biteblob.sh:102: printf "\\n" +./hosts/biteblob.sh:103: echo -e "${RED}| Failed to extract download link${NC}" +./hosts/biteblob.sh:104: warnAndRetryUnknownError=true +./hosts/biteblob.sh:105: if [ "${finalAttempt}" == "true" ] ; then +./hosts/biteblob.sh:106: failedRetryDownload "${remote_url}" "" "" +-- +./hosts/biteblob.sh:144: file_header=$(tor_curl_request --insecure --head -L -s "$download_url") +./hosts/biteblob.sh:145: if [ "${DebugAllEnabled}" == "true" ] ; then +./hosts/biteblob.sh:146: debugHtml "${remote_url##*/}" "bite_head$j" "download_url: ${download_url}"$'\n'"${file_header}" +./hosts/biteblob.sh:147: fi +./hosts/biteblob.sh:148: if [[ -z $file_header ]] ; then +./hosts/biteblob.sh:149: if [ $j == $maxfetchretries ] ; then +./hosts/biteblob.sh:150: printf "\\n" +./hosts/biteblob.sh:151: echo -e "${RED}| Failed to extract file info.${NC}" +./hosts/biteblob.sh:152: warnAndRetryUnknownError=true +./hosts/biteblob.sh:153: if [ "${finalAttempt}" == "true" ] ; then +./hosts/biteblob.sh:154: failedRetryDownload "${remote_url}" "" "" +-- +./hosts/biteblob.sh:227: tor_curl_request --insecure --referer "$file_url" "$download_url" --output "$file_path" +./hosts/biteblob.sh:228: rc=$? +./hosts/biteblob.sh:229: if [ $rc -ne 0 ] ; then +./hosts/biteblob.sh:230: printf "${RED}Download Failed (bad exit status).${NC}" +./hosts/biteblob.sh:231: if [ -f ${file_path} ]; then +./hosts/biteblob.sh:232: printf "${YELLOW} Partial removed...${NC}" +./hosts/biteblob.sh:233: printf "\n\n" +./hosts/biteblob.sh:234: rm -f "${file_path}" +./hosts/biteblob.sh:235: else +./hosts/biteblob.sh:236: printf "\n\n" +./hosts/biteblob.sh:237: fi +-- +./hosts/biteblob.sh:271: tor_curl_request --insecure --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval "$download_url" --continue-at - --output "$file_path" +./hosts/biteblob.sh:272: else +./hosts/biteblob.sh:273: tor_curl_request --insecure --referer "$file_url" "$download_url" --continue-at - --output "$file_path" +./hosts/biteblob.sh:274: fi +./hosts/biteblob.sh:275: received_file_size=0 +./hosts/biteblob.sh:276: if [ -f "$file_path" ] ; then +./hosts/biteblob.sh:277: received_file_size=$(stat --format="%s" "$file_path" | tr -d '[:space:]') +./hosts/biteblob.sh:278: fi +./hosts/biteblob.sh:279: if CheckNoHtml "$remote_url" "$filename" "$file_path" ; then +./hosts/biteblob.sh:280: containsHtml=false +./hosts/biteblob.sh:281: else +./hosts/biteblob.sh:282: containsHtml=true +./hosts/biteblob.sh:283: fi +-- +./hosts/bowfile.sh:91: response=$(tor_curl_request --insecure -L -s -b "${bow_cookie_jar}" -c "${bow_cookie_jar}" \ +./hosts/bowfile.sh:92: -w "\n\nHTTP_CODE: %{http_code}\nEFFECTIVE_URL: %{url_effective}\n" \ +./hosts/bowfile.sh:93: "$fixed_url") +./hosts/bowfile.sh:94: if [ "${DebugAllEnabled}" == "true" ] ; then +./hosts/bowfile.sh:95: debugHtml "${remote_url##*/}" "bow_fetch$i" "${response}" +./hosts/bowfile.sh:96: fi +./hosts/bowfile.sh:97: if [[ -z $response ]] ; then +./hosts/bowfile.sh:98: rm -f "${bow_cookie_jar}"; +./hosts/bowfile.sh:99: if [ $i == $maxfetchretries ] ; then +./hosts/bowfile.sh:100: printf "\\n" +./hosts/bowfile.sh:101: echo -e "${RED}| Failed to extract token link [1].${NC}" +-- +./hosts/bowfile.sh:143: response=$(tor_curl_request --insecure -s --head \ +./hosts/bowfile.sh:144: -b "${bow_cookie_jar}" -c "${bow_cookie_jar}" \ +./hosts/bowfile.sh:145: -H "Host: bowfile.com" \ +./hosts/bowfile.sh:146: -w "\n\nHTTP_CODE: %{http_code}\nEFFECTIVE_URL: %{url_effective}\n" \ +./hosts/bowfile.sh:147: "$download_url") +./hosts/bowfile.sh:148: if [ "${DebugAllEnabled}" == "true" ] ; then +./hosts/bowfile.sh:149: debugHtml "${remote_url##*/}" "bow_downurl" "download_url: ${download_url}"$'\n'"${response}" +./hosts/bowfile.sh:150: fi +./hosts/bowfile.sh:151: if [[ -z $response ]] ; then +./hosts/bowfile.sh:152: rm -f "${bow_cookie_jar}"; +./hosts/bowfile.sh:153: printf "\\n" +-- +./hosts/bowfile.sh:182: file_header=$(tor_curl_request --insecure -L -sS -i --head \ +./hosts/bowfile.sh:183: -H "Host: $fshost" \ +./hosts/bowfile.sh:184: -w "\n\nHTTP_CODE: %{http_code}\nEFFECTIVE_URL: %{url_effective}\n" \ +./hosts/bowfile.sh:185: "$download_url") +./hosts/bowfile.sh:186: if [ "${DebugAllEnabled}" == "true" ] ; then +./hosts/bowfile.sh:187: debugHtml "${remote_url##*/}" "bow_head$j" "download_url: ${download_url}"$'\n'"download_token: ${dltoken}"$'\n'"${file_header}" +./hosts/bowfile.sh:188: fi +./hosts/bowfile.sh:189: if [[ -z $file_header ]] ; then +./hosts/bowfile.sh:190: if [ $j == $maxfetchretries ] ; then +./hosts/bowfile.sh:191: printf "\\n" +./hosts/bowfile.sh:192: echo -e "${RED}| Failed to extract file info${NC}" +-- +./hosts/bowfile.sh:299: tor_curl_request --insecure -L \ +./hosts/bowfile.sh:300: --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval \ +./hosts/bowfile.sh:301: -H "Host: $fshost" \ +./hosts/bowfile.sh:302: --referer "https://bowfile.com/" \ +./hosts/bowfile.sh:303: "$download_url" --continue-at - --output "$file_path" +./hosts/bowfile.sh:304: else +./hosts/bowfile.sh:305: tor_curl_request --insecure -L \ +./hosts/bowfile.sh:306: -H "Host: $fshost" \ +./hosts/bowfile.sh:307: --referer "https://bowfile.com/" \ +./hosts/bowfile.sh:308: "$download_url" --continue-at - --output "$file_path" +./hosts/bowfile.sh:309: fi +./hosts/bowfile.sh:310: else +./hosts/bowfile.sh:311: if [ "${RateMonitorEnabled}" == "true" ]; then +./hosts/bowfile.sh:312: tor_curl_request --insecure -L \ +./hosts/bowfile.sh:313: --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval \ +./hosts/bowfile.sh:314: -H "Host: $fshost" \ +./hosts/bowfile.sh:315: -H "User-Agent: $RandomUA" \ +./hosts/bowfile.sh:316: -H "Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8" \ +./hosts/bowfile.sh:317: -H "Accept-Language: en-US,en;q=0.5" \ +./hosts/bowfile.sh:318: -H "Accept-Encoding: gzip, deflate, br" \ +./hosts/bowfile.sh:319: -H "Connection: keep-alive" \ +./hosts/bowfile.sh:320: -H "Cookie: lng=eng" \ +./hosts/bowfile.sh:321: -H "Upgrade-Insecure-Requests: 1" \ +./hosts/bowfile.sh:322: -H "Sec-Fetch-Dest: document" \ +-- +./hosts/bowfile.sh:329: tor_curl_request --insecure -L \ +./hosts/bowfile.sh:330: -H "Host: $fshost" \ +./hosts/bowfile.sh:331: -H "User-Agent: $RandomUA" \ +./hosts/bowfile.sh:332: -H "Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8" \ +./hosts/bowfile.sh:333: -H "Accept-Language: en-US,en;q=0.5" \ +./hosts/bowfile.sh:334: -H "Accept-Encoding: gzip, deflate, br" \ +./hosts/bowfile.sh:335: -H "Connection: keep-alive" \ +./hosts/bowfile.sh:336: -H "Cookie: lng=eng" \ +./hosts/bowfile.sh:337: -H "Upgrade-Insecure-Requests: 1" \ +./hosts/bowfile.sh:338: -H "Sec-Fetch-Dest: document" \ +./hosts/bowfile.sh:339: -H "Sec-Fetch-Mode: navigate" \ +-- +./hosts/click.sh:143: response=$(tor_curl_request --insecure -L -s -b "${click_cookie_jar}" -c "${click_cookie_jar}" \ +./hosts/click.sh:144: -w "\n\nHTTP_CODE: %{http_code}\nEFFECTIVE_URL: %{url_effective}\n" \ +./hosts/click.sh:145: "$fixed_url") +./hosts/click.sh:146: if [ "${DebugAllEnabled}" == "true" ] ; then +./hosts/click.sh:147: debugHtml "${remote_url##*/}" "click_fetch$i" "${response}" +./hosts/click.sh:148: fi +./hosts/click.sh:149: if [[ -z $response ]] ; then +./hosts/click.sh:150: rm -f "${click_cookie_jar}"; +./hosts/click.sh:151: if [ $i == $maxfetchretries ] ; then +./hosts/click.sh:152: printf "\\n" +./hosts/click.sh:153: echo -e "${RED}| Failed to extract download link [1].${NC}" +-- +./hosts/click.sh:226: response=$(tor_curl_request --insecure -L -s -X POST \ +./hosts/click.sh:227: -b "${click_cookie_jar}" -c "${click_cookie_jar}" \ +./hosts/click.sh:228: --data "$form_data" "$fixed_url") +./hosts/click.sh:229: if [ "${DebugAllEnabled}" == "true" ] ; then +./hosts/click.sh:230: debugHtml "${remote_url##*/}" "click_post1_$i" "url: ${fixed_url}"$'\n'"form_data: ${form_data}"$'\n'"${response}" +./hosts/click.sh:231: fi +./hosts/click.sh:232: if [[ -z $response ]] ; then +./hosts/click.sh:233: if [ $i == $maxfetchretries ] ; then +./hosts/click.sh:234: rm -f "${click_cookie_jar}"; +./hosts/click.sh:235: printf "\\n" +./hosts/click.sh:236: echo -e "${RED}| Failed to extract download link [2].${NC}" +-- +./hosts/click.sh:345: response=$(tor_curl_request --insecure -L -s -X POST \ +./hosts/click.sh:346: -b "${click_cookie_jar}" -c "${click_cookie_jar}" \ +./hosts/click.sh:347: --data "$form_data" "$fixed_url") +./hosts/click.sh:348: if [ "${DebugAllEnabled}" == "true" ] ; then +./hosts/click.sh:349: debugHtml "${remote_url##*/}" "click_post2_$i" "url: ${fixed_url}"$'\n'"form_data: ${form_data}"$'\n'"${response}" +./hosts/click.sh:350: fi +./hosts/click.sh:351: if [[ -z $response ]] ; then +./hosts/click.sh:352: if [ $i == $maxfetchretries ] ; then +./hosts/click.sh:353: rm -f "${click_cookie_jar}"; +./hosts/click.sh:354: printf "\\n" +./hosts/click.sh:355: echo -e "${RED}| Failed to extract download link [3].${NC}" +-- +./hosts/click.sh:434: file_header=$(tor_curl_request --insecure -L --head -s --referer "${fixed_url}" "$download_url") +./hosts/click.sh:435: if [ "${DebugAllEnabled}" == "true" ] ; then +./hosts/click.sh:436: debugHtml "${remote_url##*/}" "click_head$j" "download_url: ${download_url}"$'\n'"${file_header}" +./hosts/click.sh:437: fi +./hosts/click.sh:438: if [[ -z $file_header ]] ; then +./hosts/click.sh:439: if [ $j == $maxfetchretries ] ; then +./hosts/click.sh:440: rm -f "${click_cookie_jar}"; +./hosts/click.sh:441: printf "\\n" +./hosts/click.sh:442: echo -e "${RED}| Failed to extract file info${NC}" +./hosts/click.sh:443: warnAndRetryUnknownError=true +./hosts/click.sh:444: if [ "${finalAttempt}" == "true" ] ; then +-- +./hosts/click.sh:535: tor_curl_request --insecure \ +./hosts/click.sh:536: --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval \ +./hosts/click.sh:537: -b "${click_cookie_jar}" -c "${click_cookie_jar}" \ +./hosts/click.sh:538: -H "Host: $click_host" \ +./hosts/click.sh:539: --referer "$post_referer" \ +./hosts/click.sh:540: "$download_url" --continue-at - --output "$file_path" +./hosts/click.sh:541: else +./hosts/click.sh:542: tor_curl_request --insecure \ +./hosts/click.sh:543: -b "${click_cookie_jar}" -c "${click_cookie_jar}" \ +./hosts/click.sh:544: -H "Host: $click_host" \ +./hosts/click.sh:545: --referer "$post_referer" \ +./hosts/click.sh:546: "$download_url" --continue-at - --output "$file_path" +./hosts/click.sh:547: fi +./hosts/click.sh:548: else +./hosts/click.sh:549: if [ "${RateMonitorEnabled}" == "true" ]; then +./hosts/click.sh:550: tor_curl_request --insecure \ +./hosts/click.sh:551: --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval \ +./hosts/click.sh:552: -b "${click_cookie_jar}" -c "${click_cookie_jar}" \ +./hosts/click.sh:553: -H "Host: $click_host" \ +./hosts/click.sh:554: -H "User-Agent: $RandomUA" \ +./hosts/click.sh:555: -H "Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8" \ +./hosts/click.sh:556: -H "Accept-Language: en-US,en;q=0.5" \ +./hosts/click.sh:557: -H "Accept-Encoding: gzip, deflate, br" \ +./hosts/click.sh:558: -H "Connection: keep-alive" \ +./hosts/click.sh:559: -H "Cookie: lng=eng" \ +./hosts/click.sh:560: -H "Upgrade-Insecure-Requests: 1" \ +-- +./hosts/click.sh:568: tor_curl_request --insecure \ +./hosts/click.sh:569: -b "${click_cookie_jar}" -c "${click_cookie_jar}" \ +./hosts/click.sh:570: -H "Host: $click_host" \ +./hosts/click.sh:571: -H "User-Agent: $RandomUA" \ +./hosts/click.sh:572: -H "Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8" \ +./hosts/click.sh:573: -H "Accept-Language: en-US,en;q=0.5" \ +./hosts/click.sh:574: -H "Accept-Encoding: gzip, deflate, br" \ +./hosts/click.sh:575: -H "Connection: keep-alive" \ +./hosts/click.sh:576: -H "Cookie: lng=eng" \ +./hosts/click.sh:577: -H "Upgrade-Insecure-Requests: 1" \ +./hosts/click.sh:578: -H "Sec-Fetch-Dest: document" \ +-- +./hosts/dailyuploads.sh:97: response=$(tor_curl_request --insecure -L -s -b "${daily_cookie_jar}" -c "${daily_cookie_jar}" \ +./hosts/dailyuploads.sh:98: -w "\n\nHTTP_CODE: %{http_code}\nEFFECTIVE_URL: %{url_effective}\n" \ +./hosts/dailyuploads.sh:99: "$fixed_url") +./hosts/dailyuploads.sh:100: if [ "${DebugAllEnabled}" == "true" ] ; then +./hosts/dailyuploads.sh:101: debugHtml "${remote_url##*/}" "daily_fetch$i" "${response}" +./hosts/dailyuploads.sh:102: fi +./hosts/dailyuploads.sh:103: if [[ -z $response ]] ; then +./hosts/dailyuploads.sh:104: rm -f "${daily_cookie_jar}"; +./hosts/dailyuploads.sh:105: if [ $i == $maxfetchretries ] ; then +./hosts/dailyuploads.sh:106: printf "\\n" +./hosts/dailyuploads.sh:107: echo -e "${RED}| Failed to extract download link [1].${NC}" +-- +./hosts/dailyuploads.sh:139: tor_curl_request --insecure -s "$captcha_img_url" --output "$tmp_captcha_img" +./hosts/dailyuploads.sh:140: captcha_ocr_output=$(CaptchaOcrImage "$tmp_captcha_img" "NUMBERONLY" "ContrastStretch_5x90,Brightness_130") +./hosts/dailyuploads.sh:141: if [ "${DebugPluginsEnabled}" == "true" ]; then +./hosts/dailyuploads.sh:142: printf "\\n" +./hosts/dailyuploads.sh:143: echo -e "$captcha_ocr_output" +./hosts/dailyuploads.sh:144: fi +./hosts/dailyuploads.sh:145: captcha_code=$(grep -oP -m 1 "(?<=\[CAPTCHA_CODE\:).*?(?=\])" <<< "$captcha_ocr_output") +./hosts/dailyuploads.sh:146: rm -f "$tmp_captcha_img" +./hosts/dailyuploads.sh:147: rm -f "$captcha_ocr_output" +./hosts/dailyuploads.sh:148: local caplength=${#captcha_code} +./hosts/dailyuploads.sh:149: if [ -z "$captcha_code" ] || ((caplength != 4)) ; then +-- +./hosts/dailyuploads.sh:286: response=$(tor_curl_request --insecure -L -s -X POST \ +./hosts/dailyuploads.sh:287: -b "${daily_cookie_jar}" -c "${daily_cookie_jar}" \ +./hosts/dailyuploads.sh:288: --data "$form_data" "$fixed_url") +./hosts/dailyuploads.sh:289: if [ "${DebugAllEnabled}" == "true" ] ; then +./hosts/dailyuploads.sh:290: debugHtml "${remote_url##*/}" "daily_post2_$i" "url: ${fixed_url}"$'\n'"form_data: ${form_data}"$'\n'"${response}" +./hosts/dailyuploads.sh:291: fi +./hosts/dailyuploads.sh:292: fi +./hosts/dailyuploads.sh:293: if [[ -z $response ]] ; then +./hosts/dailyuploads.sh:294: if [ $i == $maxfetchretries ] ; then +./hosts/dailyuploads.sh:295: rm -f "${daily_cookie_jar}"; +./hosts/dailyuploads.sh:296: printf "\\n" +-- +./hosts/dailyuploads.sh:392: file_header=$(tor_curl_request -i -s --head \ +./hosts/dailyuploads.sh:393: -b "${daily_cookie_jar}" -c "${daily_cookie_jar}" \ +./hosts/dailyuploads.sh:394: --referer "${fixed_url}" \ +./hosts/dailyuploads.sh:395: "$download_url") +./hosts/dailyuploads.sh:396: if [ "${DebugAllEnabled}" == "true" ] ; then +./hosts/dailyuploads.sh:397: debugHtml "${remote_url##*/}" "daily_head$j" "download_url: ${download_url}"$'\n'"${file_header}" +./hosts/dailyuploads.sh:398: fi +./hosts/dailyuploads.sh:399: if [[ -z $file_header ]] ; then +./hosts/dailyuploads.sh:400: if [ $j == $maxfetchretries ] ; then +./hosts/dailyuploads.sh:401: rm -f "${daily_cookie_jar}"; +./hosts/dailyuploads.sh:402: printf "\\n" +-- +./hosts/dailyuploads.sh:498: tor_curl_request --insecure \ +./hosts/dailyuploads.sh:499: --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval \ +./hosts/dailyuploads.sh:500: -b "${daily_cookie_jar}" -c "${daily_cookie_jar}" \ +./hosts/dailyuploads.sh:501: --referer "${fixed_url}" \ +./hosts/dailyuploads.sh:502: "$download_url" --continue-at - --output "$file_path" +./hosts/dailyuploads.sh:503: else +./hosts/dailyuploads.sh:504: tor_curl_request --insecure \ +./hosts/dailyuploads.sh:505: -b "${daily_cookie_jar}" -c "${daily_cookie_jar}" \ +./hosts/dailyuploads.sh:506: --referer "${fixed_url}" \ +./hosts/dailyuploads.sh:507: "$download_url" --continue-at - --output "$file_path" +./hosts/dailyuploads.sh:508: fi +./hosts/dailyuploads.sh:509: else +./hosts/dailyuploads.sh:510: if [ "${RateMonitorEnabled}" == "true" ]; then +./hosts/dailyuploads.sh:511: tor_curl_request --insecure \ +./hosts/dailyuploads.sh:512: --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval \ +./hosts/dailyuploads.sh:513: -H "User-Agent: $RandomUA" \ +./hosts/dailyuploads.sh:514: -H "Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8" \ +./hosts/dailyuploads.sh:515: -H "Accept-Language: en-US,en;q=0.5" \ +./hosts/dailyuploads.sh:516: -H "Accept-Encoding: gzip, deflate, br" \ +./hosts/dailyuploads.sh:517: -H "Connection: keep-alive" \ +./hosts/dailyuploads.sh:518: -H "Cookie: lng=eng" \ +./hosts/dailyuploads.sh:519: -H "Upgrade-Insecure-Requests: 1" \ +./hosts/dailyuploads.sh:520: -H "Sec-Fetch-Dest: document" \ +./hosts/dailyuploads.sh:521: -H "Sec-Fetch-Mode: navigate" \ +-- +./hosts/dailyuploads.sh:528: tor_curl_request --insecure \ +./hosts/dailyuploads.sh:529: -H "User-Agent: $RandomUA" \ +./hosts/dailyuploads.sh:530: -H "Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8" \ +./hosts/dailyuploads.sh:531: -H "Accept-Language: en-US,en;q=0.5" \ +./hosts/dailyuploads.sh:532: -H "Accept-Encoding: gzip, deflate, br" \ +./hosts/dailyuploads.sh:533: -H "Connection: keep-alive" \ +./hosts/dailyuploads.sh:534: -H "Cookie: lng=eng" \ +./hosts/dailyuploads.sh:535: -H "Upgrade-Insecure-Requests: 1" \ +./hosts/dailyuploads.sh:536: -H "Sec-Fetch-Dest: document" \ +./hosts/dailyuploads.sh:537: -H "Sec-Fetch-Mode: navigate" \ +./hosts/dailyuploads.sh:538: -H "Sec-Fetch-Site: same-origin" \ +-- +./hosts/dataupload.sh:90: response=$(tor_curl_request --insecure -L -s -b "${dup_cookie_jar}" -c "${dup_cookie_jar}" "$remote_url") +./hosts/dataupload.sh:91: if [ "${DebugAllEnabled}" == "true" ] ; then +./hosts/dataupload.sh:92: debugHtml "${remote_url##*/}" "dup_dwnpage$i" "${response}" +./hosts/dataupload.sh:93: fi +./hosts/dataupload.sh:94: if [[ -z $response ]] ; then +./hosts/dataupload.sh:95: rm -f "${dup_cookie_jar}"; +./hosts/dataupload.sh:96: if [ $i == $maxfetchretries ] ; then +./hosts/dataupload.sh:97: printf "\\n" +./hosts/dataupload.sh:98: echo -e "${RED}| Failed to extract download link.${NC}" +./hosts/dataupload.sh:99: warnAndRetryUnknownError=true +./hosts/dataupload.sh:100: if [ "${finalAttempt}" == "true" ] ; then +-- +./hosts/dataupload.sh:166: response=$(tor_curl_request --insecure -svo. -X POST \ +./hosts/dataupload.sh:167: -b "${dup_cookie_jar}" -c "${dup_cookie_jar}" \ +./hosts/dataupload.sh:168: --data-raw "$form_data" "$post_action" 2>&1) +./hosts/dataupload.sh:169: if [ "${DebugAllEnabled}" == "true" ] ; then +./hosts/dataupload.sh:170: debugHtml "${remote_url##*/}" "dup_post" "post_action: ${post_action}"$'\n'"form_data: ${form_data}"$'\n'"${response}" +./hosts/dataupload.sh:171: fi +./hosts/dataupload.sh:172: if [[ -z $response ]] ; then +./hosts/dataupload.sh:173: if [ $i == $maxfetchretries ] ; then +./hosts/dataupload.sh:174: rm -f "${dup_cookie_jar}"; +./hosts/dataupload.sh:175: printf "\\n" +./hosts/dataupload.sh:176: echo -e "${RED}| Failed to extract download link [3].${NC}" +-- +./hosts/dataupload.sh:234: file_header=$(tor_curl_request --insecure -L --head -s "$download_url") +./hosts/dataupload.sh:235: if [ "${DebugAllEnabled}" == "true" ] ; then +./hosts/dataupload.sh:236: debugHtml "${remote_url##*/}" "dup_head$j" "download_url: ${download_url}"$'\n'"${file_header}" +./hosts/dataupload.sh:237: fi +./hosts/dataupload.sh:238: if [[ -z $file_header ]] ; then +./hosts/dataupload.sh:239: if [ $j == $maxfetchretries ] ; then +./hosts/dataupload.sh:240: rm -f "${dup_cookie_jar}"; +./hosts/dataupload.sh:241: printf "\\n" +./hosts/dataupload.sh:242: echo -e "${RED}| Failed to extract file info [1]${NC}" +./hosts/dataupload.sh:243: warnAndRetryUnknownError=true +./hosts/dataupload.sh:244: if [ "${finalAttempt}" == "true" ] ; then +-- +./hosts/dataupload.sh:351: tor_curl_request --insecure \ +./hosts/dataupload.sh:352: --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval \ +./hosts/dataupload.sh:353: -b "${dup_cookie_jar}" -c "${dup_cookie_jar}" \ +./hosts/dataupload.sh:354: -H "Host: dataupload.net" \ +./hosts/dataupload.sh:355: "$download_url" --continue-at - --output "$file_path" +./hosts/dataupload.sh:356: else +./hosts/dataupload.sh:357: tor_curl_request --insecure \ +./hosts/dataupload.sh:358: -b "${dup_cookie_jar}" -c "${dup_cookie_jar}" \ +./hosts/dataupload.sh:359: -H "Host: dataupload.net" \ +./hosts/dataupload.sh:360: "$download_url" --continue-at - --output "$file_path" +./hosts/dataupload.sh:361: fi +./hosts/dataupload.sh:362: else +./hosts/dataupload.sh:363: if [ "${RateMonitorEnabled}" == "true" ]; then +./hosts/dataupload.sh:364: tor_curl_request --insecure \ +./hosts/dataupload.sh:365: --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval \ +./hosts/dataupload.sh:366: -b "${dup_cookie_jar}" -c "${dup_cookie_jar}" \ +./hosts/dataupload.sh:367: -H "Host: dataupload.net" \ +./hosts/dataupload.sh:368: -H "User-Agent: $RandomUA" \ +./hosts/dataupload.sh:369: -H "Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8" \ +./hosts/dataupload.sh:370: -H "Accept-Language: en-US,en;q=0.5" \ +./hosts/dataupload.sh:371: -H "Accept-Encoding: gzip, deflate, br" \ +./hosts/dataupload.sh:372: -H "Connection: keep-alive" \ +./hosts/dataupload.sh:373: -H "Cookie: lng=eng" \ +./hosts/dataupload.sh:374: -H "Upgrade-Insecure-Requests: 1" \ +-- +./hosts/dataupload.sh:381: tor_curl_request --insecure \ +./hosts/dataupload.sh:382: -b "${dup_cookie_jar}" -c "${dup_cookie_jar}" \ +./hosts/dataupload.sh:383: -H "Host: dataupload.net" \ +./hosts/dataupload.sh:384: -H "User-Agent: $RandomUA" \ +./hosts/dataupload.sh:385: -H "Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8" \ +./hosts/dataupload.sh:386: -H "Accept-Language: en-US,en;q=0.5" \ +./hosts/dataupload.sh:387: -H "Accept-Encoding: gzip, deflate, br" \ +./hosts/dataupload.sh:388: -H "Connection: keep-alive" \ +./hosts/dataupload.sh:389: -H "Cookie: lng=eng" \ +./hosts/dataupload.sh:390: -H "Upgrade-Insecure-Requests: 1" \ +./hosts/dataupload.sh:391: -H "Sec-Fetch-Dest: document" \ +-- +./hosts/dosya.sh:109: PAGE=$(tor_curl_request --insecure --max-time "$ConnectTimeout" -L -s \ +./hosts/dosya.sh:110: -c "${dosya_cookie_jar}" \ +./hosts/dosya.sh:111: "${remote_url}") +./hosts/dosya.sh:112: else +./hosts/dosya.sh:113: PAGE=$(tor_curl_request --insecure --max-time "$ConnectTimeout" -L -s \ +./hosts/dosya.sh:114: -H "User-Agent: $RandomUA" \ +./hosts/dosya.sh:115: -H 'Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8' \ +./hosts/dosya.sh:116: -H 'Accept-Language: en-US,en;q=0.5' \ +./hosts/dosya.sh:117: -H 'Accept-Encoding: gzip, deflate, br' -H 'Connection: keep-alive' -H 'Upgrade-Insecure-Requests: 1' \ +./hosts/dosya.sh:118: -H 'Sec-Fetch-Dest: document' -H 'Sec-Fetch-Mode: navigate' -H 'Sec-Fetch-Site: none' -H 'Sec-Fetch-User: ?1' \ +./hosts/dosya.sh:119: -c "${dosya_cookie_jar}" \ +./hosts/dosya.sh:120: "${remote_url}") +./hosts/dosya.sh:121: fi +./hosts/dosya.sh:122: if [ "${DebugAllEnabled}" == "true" ] ; then +./hosts/dosya.sh:123: debugHtml "${remote_url##*/}" "dos_fetch_$i" "remote_url: ${remote_url}"$'\n'"User-Agent: $RandomUA"$'\n'"${PAGE}" +-- +./hosts/dosya.sh:173: file_header=$(tor_curl_request_extended --insecure --head -L -s \ +./hosts/dosya.sh:174: -H "Cookie: filehosting=$cookie_filehosting" \ +./hosts/dosya.sh:175: -H "Host: www.dosyaupload.com" \ +./hosts/dosya.sh:176: -w "\n\nHTTP_CODE: %{http_code}\nEFFECTIVE_URL: %{url_effective}\n" \ +./hosts/dosya.sh:177: -b "${dosya_cookie_jar}" -c "${dosya_cookie_jar}" "$dos_url") +./hosts/dosya.sh:178: else +./hosts/dosya.sh:179: file_header=$(tor_curl_request_extended --insecure --head -L -s \ +./hosts/dosya.sh:180: -w "\n\nHTTP_CODE: %{http_code}\nEFFECTIVE_URL: %{url_effective}\n" \ +./hosts/dosya.sh:181: -H "User-Agent: $RandomUA" \ +./hosts/dosya.sh:182: -H "Cookie: filehosting=$cookie_filehosting" \ +./hosts/dosya.sh:183: -H "Host: www.dosyaupload.com" \ +./hosts/dosya.sh:184: -H 'Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8' \ +./hosts/dosya.sh:185: -H 'Accept-Language: en-US,en;q=0.5' \ +./hosts/dosya.sh:186: -H 'Accept-Encoding: gzip, deflate, br' \ +./hosts/dosya.sh:187: -H 'Connection: keep-alive' -H 'Upgrade-Insecure-Requests: 1' \ +./hosts/dosya.sh:188: -H 'Sec-Fetch-Dest: document' -H 'Sec-Fetch-Mode: navigate' -H 'Sec-Fetch-Site: none' -H 'Sec-Fetch-User: ?1' \ +./hosts/dosya.sh:189: -b "${dosya_cookie_jar}" -c "${dosya_cookie_jar}" "$dos_url") +-- +./hosts/dosya.sh:402: tor_curl_request -L -G --insecure \ +./hosts/dosya.sh:403: -H "Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/jxl,image/webp,*/*;q=0.8" \ +./hosts/dosya.sh:404: -H "Accept-Language: en-US,en;q=0.5" \ +./hosts/dosya.sh:405: -H "Accept-Encoding: gzip, deflate, br" \ +./hosts/dosya.sh:406: -H "Connection: keep-alive" \ +./hosts/dosya.sh:407: -H "Upgrade-Insecure-Requests: 1" \ +./hosts/dosya.sh:408: -H "Sec-Fetch-Dest: document" \ +./hosts/dosya.sh:409: -H "Sec-Fetch-Mode: navigate" \ +./hosts/dosya.sh:410: -H "Sec-Fetch-Site: none" \ +./hosts/dosya.sh:411: -H "Sec-Fetch-User: ?1" \ +./hosts/dosya.sh:412: -H "DNT: 1" \ +-- +./hosts/dosya.sh:417: tor_curl_request -L -G --insecure \ +./hosts/dosya.sh:418: -H "Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/jxl,image/webp,*/*;q=0.8" \ +./hosts/dosya.sh:419: -H "Accept-Language: en-US,en;q=0.5" \ +./hosts/dosya.sh:420: -H "Accept-Encoding: gzip, deflate, br" \ +./hosts/dosya.sh:421: -H "Connection: keep-alive" \ +./hosts/dosya.sh:422: -H "Upgrade-Insecure-Requests: 1" \ +./hosts/dosya.sh:423: -H "Sec-Fetch-Dest: document" \ +./hosts/dosya.sh:424: -H "Sec-Fetch-Mode: navigate" \ +./hosts/dosya.sh:425: -H "Sec-Fetch-Site: none" \ +./hosts/dosya.sh:426: -H "Sec-Fetch-User: ?1" \ +./hosts/dosya.sh:427: -H "DNT: 1" \ +-- +./hosts/downloadgg.sh:90: response=$(tor_curl_request --insecure -L -s -b "${dgg_cookie_jar}" -c "${dgg_cookie_jar}" "$remote_url") +./hosts/downloadgg.sh:91: if [ "${DebugAllEnabled}" == "true" ] ; then +./hosts/downloadgg.sh:92: debugHtml "${remote_url##*/}" "dgg_dwnpage$i" "${response}" +./hosts/downloadgg.sh:93: fi +./hosts/downloadgg.sh:94: if [[ -z $response ]] ; then +./hosts/downloadgg.sh:95: rm -f "${dgg_cookie_jar}"; +./hosts/downloadgg.sh:96: if [ $i == $maxfetchretries ] ; then +./hosts/downloadgg.sh:97: printf "\\n" +./hosts/downloadgg.sh:98: echo -e "${RED}| Failed to extract download link.${NC}" +./hosts/downloadgg.sh:99: warnAndRetryUnknownError=true +./hosts/downloadgg.sh:100: if [ "${finalAttempt}" == "true" ] ; then +-- +./hosts/downloadgg.sh:169: response=$(tor_curl_request --insecure -svo. -X POST \ +./hosts/downloadgg.sh:170: -b "${dgg_cookie_jar}" -c "${dgg_cookie_jar}" \ +./hosts/downloadgg.sh:171: --data-raw "$form_data" "$post_action" 2>&1) +./hosts/downloadgg.sh:172: if [ "${DebugAllEnabled}" == "true" ] ; then +./hosts/downloadgg.sh:173: debugHtml "${remote_url##*/}" "dgg_post" "post_action: ${post_action}"$'\n'"form_data: ${form_data}"$'\n'"${response}" +./hosts/downloadgg.sh:174: fi +./hosts/downloadgg.sh:175: if [[ -z $response ]] ; then +./hosts/downloadgg.sh:176: echo -e "${RED}| Failed to extract download link${NC}" +./hosts/downloadgg.sh:177: warnAndRetryUnknownError=true +./hosts/downloadgg.sh:178: if [ "${finalAttempt}" == "true" ] ; then +./hosts/downloadgg.sh:179: rm -f "${dgg_cookie_jar}"; +-- +./hosts/downloadgg.sh:257: tor_curl_request --insecure -X POST \ +./hosts/downloadgg.sh:258: -b "${dgg_cookie_jar}" -c "${dgg_cookie_jar}" --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval \ +./hosts/downloadgg.sh:259: -H "Host: download.gg" \ +./hosts/downloadgg.sh:260: -H "Origin: https://download.gg" \ +./hosts/downloadgg.sh:261: --referer "$remote_url" \ +./hosts/downloadgg.sh:262: --data "$form_data" "$post_action" \ +./hosts/downloadgg.sh:263: --output "$file_path" +./hosts/downloadgg.sh:264: else +./hosts/downloadgg.sh:265: tor_curl_request --insecure -X POST \ +./hosts/downloadgg.sh:266: -b "${dgg_cookie_jar}" -c "${dgg_cookie_jar}" \ +./hosts/downloadgg.sh:267: -H "Host: download.gg" \ +./hosts/downloadgg.sh:268: -H "Origin: https://download.gg" \ +./hosts/downloadgg.sh:269: --referer "$remote_url" \ +./hosts/downloadgg.sh:270: --data "$form_data" "$post_action" \ +./hosts/downloadgg.sh:271: --output "$file_path" +./hosts/downloadgg.sh:272: fi +./hosts/downloadgg.sh:273: else +./hosts/downloadgg.sh:274: if [ "${RateMonitorEnabled}" == "true" ]; then +./hosts/downloadgg.sh:275: tor_curl_request --insecure -X POST \ +./hosts/downloadgg.sh:276: -b "${dgg_cookie_jar}" -c "${dgg_cookie_jar}" --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval \ +./hosts/downloadgg.sh:277: -H "Host: download.gg" \ +./hosts/downloadgg.sh:278: -H "Origin: https://download.gg" \ +./hosts/downloadgg.sh:279: -H "User-Agent: $RandomUA" \ +./hosts/downloadgg.sh:280: -H "Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8" \ +./hosts/downloadgg.sh:281: -H "Accept-Language: en-US,en;q=0.5" \ +./hosts/downloadgg.sh:282: -H "Accept-Encoding: gzip, deflate, br" \ +./hosts/downloadgg.sh:283: -H "Connection: keep-alive" \ +./hosts/downloadgg.sh:284: -H "Cookie: lng=eng" \ +./hosts/downloadgg.sh:285: -H "Upgrade-Insecure-Requests: 1" \ +-- +./hosts/downloadgg.sh:294: tor_curl_request --insecure -X POST \ +./hosts/downloadgg.sh:295: -b "${dgg_cookie_jar}" -c "${dgg_cookie_jar}" \ +./hosts/downloadgg.sh:296: -H "Host: download.gg" \ +./hosts/downloadgg.sh:297: -H "Origin: https://download.gg" \ +./hosts/downloadgg.sh:298: -H "User-Agent: $RandomUA" \ +./hosts/downloadgg.sh:299: -H "Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8" \ +./hosts/downloadgg.sh:300: -H "Accept-Language: en-US,en;q=0.5" \ +./hosts/downloadgg.sh:301: -H "Accept-Encoding: gzip, deflate, br" \ +./hosts/downloadgg.sh:302: -H "Connection: keep-alive" \ +./hosts/downloadgg.sh:303: -H "Cookie: lng=eng" \ +./hosts/downloadgg.sh:304: -H "Upgrade-Insecure-Requests: 1" \ +-- +./hosts/examples/ExampleNewHost.sh:102: file_header=$(tor_curl_request --insecure -L --head -s --referer "${remote_url//\.org/\.cc}" "$download_url") +./hosts/examples/ExampleNewHost.sh:103: if [ "${DebugAllEnabled}" == "true" ] ; then +./hosts/examples/ExampleNewHost.sh:104: debugHtml "${remote_url##*/}" "fh_head$j" "download_url: ${download_url}"$'\n'"${file_header}" +./hosts/examples/ExampleNewHost.sh:105: fi +./hosts/examples/ExampleNewHost.sh:106: if [ ! -z "$file_header" ] ; then +./hosts/examples/ExampleNewHost.sh:107: if grep -Eqi '404 Not Found' <<< "${file_header}" ; then +./hosts/examples/ExampleNewHost.sh:108: if [ $j == $maxfetchretries ] ; then +./hosts/examples/ExampleNewHost.sh:109: printf "\\n" +./hosts/examples/ExampleNewHost.sh:110: echo -e "${RED}| The file has been removed (404).${NC}" +./hosts/examples/ExampleNewHost.sh:111: removedDownload "${remote_url}" +./hosts/examples/ExampleNewHost.sh:112: exitDownloadNotAvailable=true +-- +./hosts/examples/ExampleNewHost.sh:199: tor_curl_request --insecure --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval --referer "$download_url" "$download_url" --continue-at - --output "$file_path" +./hosts/examples/ExampleNewHost.sh:200: else +./hosts/examples/ExampleNewHost.sh:201: tor_curl_request --insecure --referer "$download_url" "$download_url" --continue-at - --output "$file_path" +./hosts/examples/ExampleNewHost.sh:202: fi +./hosts/examples/ExampleNewHost.sh:203: received_file_size=0 +./hosts/examples/ExampleNewHost.sh:204: if [ -f "$file_path" ] ; then +./hosts/examples/ExampleNewHost.sh:205: received_file_size=$(stat --format="%s" "$file_path" | tr -d '[:space:]') +./hosts/examples/ExampleNewHost.sh:206: fi +./hosts/examples/ExampleNewHost.sh:207: if CheckNoHtml "$remote_url" "$filename" "$file_path" ; then +./hosts/examples/ExampleNewHost.sh:208: containsHtml=false +./hosts/examples/ExampleNewHost.sh:209: else +./hosts/examples/ExampleNewHost.sh:210: containsHtml=true +./hosts/examples/ExampleNewHost.sh:211: fi +-- +./hosts/examples/up_example.sh:112: response=$(tor_curl_upload --insecure \ +./hosts/examples/up_example.sh:113: -F "files[]=@${arrFiles[@]}" \ +./hosts/examples/up_example.sh:114: -F "expire=129600" \ +./hosts/examples/up_example.sh:115: -F "autodestroy=0" \ +./hosts/examples/up_example.sh:116: -F "randomizefn=0" \ +./hosts/examples/up_example.sh:117: -F "shorturl=0" \ +./hosts/examples/up_example.sh:118: "${PostUrlHost}") +./hosts/examples/up_example.sh:119: if [ "${DebugAllEnabled}" == "true" ] ; then +./hosts/examples/up_example.sh:120: debugHtml "${remote_url##*/}" "${_hostCode}_dwnpage$j" "post_url: ${PostUrlHost}"$'\n'"${response}" +./hosts/examples/up_example.sh:121: fi +./hosts/examples/up_example.sh:122: successUpload "$pline" "${filepath}" "${_hostCode}" "${filesize}" "${downloadLink}" "{$responseHtml}" +-- +./hosts/filedot.sh:112: PAGE=$(tor_curl_request --insecure --max-time "$ConnectTimeout" -L -s \ +./hosts/filedot.sh:113: -H "User-Agent: $RandomUA" \ +./hosts/filedot.sh:114: -H 'Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8' \ +./hosts/filedot.sh:115: -H 'Accept-Language: en-US,en;q=0.5' \ +./hosts/filedot.sh:116: -H 'Accept-Encoding: gzip, deflate, br' -H 'Connection: keep-alive' -H 'Upgrade-Insecure-Requests: 1' \ +./hosts/filedot.sh:117: -H 'Sec-Fetch-Dest: document' -H 'Sec-Fetch-Mode: navigate' -H 'Sec-Fetch-Site: none' -H 'Sec-Fetch-User: ?1' \ +./hosts/filedot.sh:118: -c "${fdot_cookie_jar}" \ +./hosts/filedot.sh:119: "https://filedot.to/login.html") +./hosts/filedot.sh:120: if [ "${DebugAllEnabled}" == "true" ] ; then +./hosts/filedot.sh:121: debugHtml "${remote_url##*/}" "fdot_login_$a" "${PAGE}" +./hosts/filedot.sh:122: fi +-- +./hosts/filedot.sh:153: resp_login=$(tor_curl_request --insecure -L -s \ +./hosts/filedot.sh:154: -H "Host: filedot.to" \ +./hosts/filedot.sh:155: -H "User-Agent: $RandomUA" \ +./hosts/filedot.sh:156: -H "text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8" \ +./hosts/filedot.sh:157: -H "Accept-Language: en-US,en;q=0.5" \ +./hosts/filedot.sh:158: -H "Accept-Encoding: gzip, deflate, br" \ +./hosts/filedot.sh:159: -H "Content-Type: application/x-www-form-urlencoded" \ +./hosts/filedot.sh:160: -H "Origin: https://filedot.to" \ +./hosts/filedot.sh:161: -H "Connection: keep-alive" \ +./hosts/filedot.sh:162: -H "Referer: https://filedot.to/login.html" \ +./hosts/filedot.sh:163: -H "DNT: 1" \ +-- +./hosts/filedot.sh:240: response=$(tor_curl_request --insecure -L -s \ +./hosts/filedot.sh:241: -H "Host: filedot.to" \ +./hosts/filedot.sh:242: -H "User-Agent: $RandomUA" \ +./hosts/filedot.sh:243: -H "text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8" \ +./hosts/filedot.sh:244: -H "Accept-Language: en-US,en;q=0.5" \ +./hosts/filedot.sh:245: -H "Accept-Encoding: gzip, deflate, br" \ +./hosts/filedot.sh:246: -H "Content-Type: application/x-www-form-urlencoded" \ +./hosts/filedot.sh:247: -H "Origin: https://filedot.to" \ +./hosts/filedot.sh:248: -H "Connection: keep-alive" \ +./hosts/filedot.sh:249: -H "Referer: https://filedot.to/login.html" \ +./hosts/filedot.sh:250: -H "DNT: 1" \ +-- +./hosts/filedot.sh:320: response=$(tor_curl_request --insecure -L -s \ +./hosts/filedot.sh:321: -H "Host: filedot.to" \ +./hosts/filedot.sh:322: -H "User-Agent: $RandomUA" \ +./hosts/filedot.sh:323: -H "text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8" \ +./hosts/filedot.sh:324: -H "Accept-Language: en-US,en;q=0.5" \ +./hosts/filedot.sh:325: -H "Accept-Encoding: gzip, deflate, br" \ +./hosts/filedot.sh:326: -H "Content-Type: application/x-www-form-urlencoded" \ +./hosts/filedot.sh:327: -H "Origin: https://filedot.to" \ +./hosts/filedot.sh:328: -H "Connection: keep-alive" \ +./hosts/filedot.sh:329: -H "Referer: $remote_url" \ +./hosts/filedot.sh:330: -H "DNT: 1" \ +-- +./hosts/filedot.sh:406: file_header=$(tor_curl_request --insecure -L --head -s --referer "${fixed_url}" "$download_url") +./hosts/filedot.sh:407: if [ "${DebugAllEnabled}" == "true" ] ; then +./hosts/filedot.sh:408: debugHtml "${remote_url##*/}" "fdot_head_$j" "download_url: ${download_url}"$'\n'"${file_header}" +./hosts/filedot.sh:409: fi +./hosts/filedot.sh:410: if [[ -z $file_header ]] ; then +./hosts/filedot.sh:411: if [ $j == $maxfetchretries ] ; then +./hosts/filedot.sh:412: printf "\\n" +./hosts/filedot.sh:413: echo -e "${RED}| Failed to extract file info.${NC}" +./hosts/filedot.sh:414: warnAndRetryUnknownError=true +./hosts/filedot.sh:415: if [ "${finalAttempt}" == "true" ] ; then +./hosts/filedot.sh:416: failedRetryDownload "${remote_url}" "" "" +-- +./hosts/filedot.sh:499: tor_curl_request --insecure -L -G --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval "$download_url" --continue-at - --output "$file_path" +./hosts/filedot.sh:500: else +./hosts/filedot.sh:501: tor_curl_request --insecure -L -G "$download_url" --continue-at - --output "$file_path" +./hosts/filedot.sh:502: fi +./hosts/filedot.sh:503: received_file_size=0 +./hosts/filedot.sh:504: if [ -f "$file_path" ] ; then +./hosts/filedot.sh:505: received_file_size=$(stat --format="%s" "$file_path" | tr -d '[:space:]') +./hosts/filedot.sh:506: fi +./hosts/filedot.sh:507: if CheckNoHtml "$remote_url" "$filename" "$file_path" ; then +./hosts/filedot.sh:508: containsHtml=false +./hosts/filedot.sh:509: else +./hosts/filedot.sh:510: containsHtml=true +./hosts/filedot.sh:511: fi +-- +./hosts/filehaus.sh:100: file_header=$(tor_curl_request --insecure -L --head -s --referer "${remote_url//\.org/\.cc}" "$download_url") +./hosts/filehaus.sh:101: if [ "${DebugAllEnabled}" == "true" ] ; then +./hosts/filehaus.sh:102: debugHtml "${remote_url##*/}" "fh_head$j" "download_url: ${download_url}"$'\n'"${file_header}" +./hosts/filehaus.sh:103: fi +./hosts/filehaus.sh:104: if [ ! -z "$file_header" ] ; then +./hosts/filehaus.sh:105: if grep -Eqi '404 Not Found' <<< "${file_header}" ; then +./hosts/filehaus.sh:106: if [ $j == $maxfetchretries ] ; then +./hosts/filehaus.sh:107: printf "\\n" +./hosts/filehaus.sh:108: echo -e "${RED}| The file has been removed (404).${NC}" +./hosts/filehaus.sh:109: removedDownload "${remote_url}" +./hosts/filehaus.sh:110: exitDownloadNotAvailable=true +-- +./hosts/filehaus.sh:197: tor_curl_request --insecure --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval --referer "$download_url" "$download_url" --continue-at - --output "$file_path" +./hosts/filehaus.sh:198: else +./hosts/filehaus.sh:199: tor_curl_request --insecure --referer "$download_url" "$download_url" --continue-at - --output "$file_path" +./hosts/filehaus.sh:200: fi +./hosts/filehaus.sh:201: received_file_size=0 +./hosts/filehaus.sh:202: if [ -f "$file_path" ] ; then +./hosts/filehaus.sh:203: received_file_size=$(stat --format="%s" "$file_path" | tr -d '[:space:]') +./hosts/filehaus.sh:204: fi +./hosts/filehaus.sh:205: if CheckNoHtml "$remote_url" "$filename" "$file_path" ; then +./hosts/filehaus.sh:206: containsHtml=false +./hosts/filehaus.sh:207: else +./hosts/filehaus.sh:208: containsHtml=true +./hosts/filehaus.sh:209: fi +-- +./hosts/firestorage.sh:98: response=$(tor_curl_request --insecure -L -s "${fixed_url}") +./hosts/firestorage.sh:99: if [ "${DebugAllEnabled}" == "true" ] ; then +./hosts/firestorage.sh:100: debugHtml "${remote_url##*/}" "fs_${fetchnum}fetch_$j" "fixed_url: ${fixed_url}"$'\n'"${response}" +./hosts/firestorage.sh:101: fi +./hosts/firestorage.sh:102: if [[ -z $response ]] ; then +./hosts/firestorage.sh:103: if [ $j == $maxfetchretries ] ; then +./hosts/firestorage.sh:104: printf "\\n" +./hosts/firestorage.sh:105: echo -e "${RED}| Failed to extract link.${NC}" +./hosts/firestorage.sh:106: warnAndRetryUnknownError=true +./hosts/firestorage.sh:107: if [ "${finalAttempt}" == "true" ] ; then +./hosts/firestorage.sh:108: failedRetryDownload "${remote_url}" "" "" +-- +./hosts/firestorage.sh:226: file_header=$(tor_curl_request --insecure -L --head -s --referer "${fixed_url}" "$download_url") +./hosts/firestorage.sh:227: if [ "${DebugAllEnabled}" == "true" ] ; then +./hosts/firestorage.sh:228: debugHtml "${remote_url##*/}" "fs_head$j" "download_url: ${download_url}"$'\n'"${file_header}" +./hosts/firestorage.sh:229: fi +./hosts/firestorage.sh:230: if [[ -z $file_header ]] ; then +./hosts/firestorage.sh:231: if [ $j == $maxfetchretries ] ; then +./hosts/firestorage.sh:232: printf "\\n" +./hosts/firestorage.sh:233: echo -e "${RED}| Failed to extract file info${NC}" +./hosts/firestorage.sh:234: warnAndRetryUnknownError=true +./hosts/firestorage.sh:235: if [ "${finalAttempt}" == "true" ] ; then +./hosts/firestorage.sh:236: failedRetryDownload "${remote_url}" "" "" +-- +./hosts/firestorage.sh:335: tor_curl_request --insecure -L -G --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval "$download_url" --continue-at - --output "$file_path" +./hosts/firestorage.sh:336: else +./hosts/firestorage.sh:337: tor_curl_request --insecure -L -G "$download_url" --continue-at - --output "$file_path" +./hosts/firestorage.sh:338: fi +./hosts/firestorage.sh:339: received_file_size=0 +./hosts/firestorage.sh:340: if [ -f "$file_path" ] ; then +./hosts/firestorage.sh:341: received_file_size=$(stat --format="%s" "$file_path" | tr -d '[:space:]') +./hosts/firestorage.sh:342: fi +./hosts/firestorage.sh:343: if CheckNoHtml "$remote_url" "$filename" "$file_path" ; then +./hosts/firestorage.sh:344: containsHtml=false +./hosts/firestorage.sh:345: else +./hosts/firestorage.sh:346: containsHtml=true +./hosts/firestorage.sh:347: fi +-- +./hosts/gofile.sh:97: response=$(tor_curl_request --insecure -s -X POST \ +./hosts/gofile.sh:98: -H "User-Agent: $RandomUA" \ +./hosts/gofile.sh:99: -H "Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8" \ +./hosts/gofile.sh:100: -H "Accept-Language: en-US,en;q=0.5" \ +./hosts/gofile.sh:101: -H "Accept-Encoding: gzip, deflate, br" \ +./hosts/gofile.sh:102: -H "Connection: keep-alive" \ +./hosts/gofile.sh:103: -H "Upgrade-Insecure-Requests: 1" \ +./hosts/gofile.sh:104: -H "Sec-Fetch-Dest: document" \ +./hosts/gofile.sh:105: -H "Sec-Fetch-Mode: navigate" \ +./hosts/gofile.sh:106: -H "Sec-Fetch-Site: none" \ +./hosts/gofile.sh:107: -H "Sec-Fetch-User: ?1" \ +-- +./hosts/gofile.sh:170: response=$(tor_curl_request --insecure -G -L -s \ +./hosts/gofile.sh:171: -b "${gofile_cookie_jar}" -c "${gofile_cookie_jar}" \ +./hosts/gofile.sh:172: -H "User-Agent: $RandomUA" \ +./hosts/gofile.sh:173: -H "Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8" \ +./hosts/gofile.sh:174: -H "Accept-Language: en-US,en;q=0.5" \ +./hosts/gofile.sh:175: -H "Accept-Encoding: gzip, deflate, br" \ +./hosts/gofile.sh:176: -H "Connection: keep-alive" \ +./hosts/gofile.sh:177: -H "Upgrade-Insecure-Requests: 1" \ +./hosts/gofile.sh:178: -H "Sec-Fetch-Dest: document" \ +./hosts/gofile.sh:179: -H "Sec-Fetch-Mode: navigate" \ +./hosts/gofile.sh:180: -H "Sec-Fetch-Site: none" \ +-- +./hosts/gofile.sh:241: file_header=$(tor_curl_request --insecure -L --head -s \ +./hosts/gofile.sh:242: -b "${gofile_cookie_jar}" -c "${gofile_cookie_jar}" \ +./hosts/gofile.sh:243: -H "User-Agent: $RandomUA" \ +./hosts/gofile.sh:244: -H "Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8" \ +./hosts/gofile.sh:245: -H "Accept-Language: en-US,en;q=0.5" \ +./hosts/gofile.sh:246: -H "Accept-Encoding: gzip, deflate, br" \ +./hosts/gofile.sh:247: -H "Connection: keep-alive" \ +./hosts/gofile.sh:248: -H "Upgrade-Insecure-Requests: 1" \ +./hosts/gofile.sh:249: -H "Sec-Fetch-Dest: document" \ +./hosts/gofile.sh:250: -H "Sec-Fetch-Mode: navigate" \ +./hosts/gofile.sh:251: -H "Sec-Fetch-Site: none" \ +-- +./hosts/gofile.sh:359: tor_curl_request --insecure -G --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval \ +./hosts/gofile.sh:360: -b "${gofile_cookie_jar}" -c "${gofile_cookie_jar}" \ +./hosts/gofile.sh:361: -H "User-Agent: $RandomUA" \ +./hosts/gofile.sh:362: -H "Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8" \ +./hosts/gofile.sh:363: -H "Accept-Language: en-US,en;q=0.5" \ +./hosts/gofile.sh:364: -H "Accept-Encoding: gzip, deflate, br" \ +./hosts/gofile.sh:365: -H "Connection: keep-alive" \ +./hosts/gofile.sh:366: -H "Upgrade-Insecure-Requests: 1" \ +./hosts/gofile.sh:367: -H "Sec-Fetch-Dest: document" \ +./hosts/gofile.sh:368: -H "Sec-Fetch-Mode: navigate" \ +./hosts/gofile.sh:369: -H "Sec-Fetch-Site: none" \ +-- +./hosts/gofile.sh:373: tor_curl_request --insecure -G \ +./hosts/gofile.sh:374: -b "${gofile_cookie_jar}" -c "${gofile_cookie_jar}" \ +./hosts/gofile.sh:375: -H "User-Agent: $RandomUA" \ +./hosts/gofile.sh:376: -H "Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8" \ +./hosts/gofile.sh:377: -H "Accept-Language: en-US,en;q=0.5" \ +./hosts/gofile.sh:378: -H "Accept-Encoding: gzip, deflate, br" \ +./hosts/gofile.sh:379: -H "Connection: keep-alive" \ +./hosts/gofile.sh:380: -H "Upgrade-Insecure-Requests: 1" \ +./hosts/gofile.sh:381: -H "Sec-Fetch-Dest: document" \ +./hosts/gofile.sh:382: -H "Sec-Fetch-Mode: navigate" \ +./hosts/gofile.sh:383: -H "Sec-Fetch-Site: none" \ +-- +./hosts/hexload.sh:108: response=$(tor_curl_request --insecure -s --data "$form_data" "https://hexload.com/download") +./hosts/hexload.sh:109: if [ "${DebugAllEnabled}" == "true" ] ; then +./hosts/hexload.sh:110: debugHtml "${remote_url##*/}" "hex_dwnpage$j" "form_data: ${form_data}"$'\n'"${response}" +./hosts/hexload.sh:111: fi +./hosts/hexload.sh:112: fi +./hosts/hexload.sh:113: else +./hosts/hexload.sh:114: if [ "$hexUseDownload" == "download2" ]; then +./hosts/hexload.sh:115: form_data="op=download1&id=${file_id}&rand=&usr_login=&fname=&ajax=1&method_free=1&dataType=json" +./hosts/hexload.sh:116: response=$(tor_curl_request --insecure -s --data "$form_data" "https://hexload.com/download") +./hosts/hexload.sh:117: if [ "${DebugAllEnabled}" == "true" ] ; then +./hosts/hexload.sh:118: debugHtml "${remote_url##*/}" "hex_dwnpage$j" "form_data: ${form_data}"$'\n'"${response}" +./hosts/hexload.sh:119: fi +./hosts/hexload.sh:120: else +./hosts/hexload.sh:121: form_data="op=download2&id=${file_id}&rand=&usr_login=&fname=&ajax=1&method_free=1&dataType=json" +./hosts/hexload.sh:122: response=$(tor_curl_request --insecure -s --data "$form_data" "https://hexload.com/download") +./hosts/hexload.sh:123: if [ "${DebugAllEnabled}" == "true" ] ; then +./hosts/hexload.sh:124: debugHtml "${remote_url##*/}" "hex_dwnpage$j" "form_data: ${form_data}"$'\n'"${response}" +./hosts/hexload.sh:125: fi +./hosts/hexload.sh:126: fi +./hosts/hexload.sh:127: fi +./hosts/hexload.sh:128: filename=$(echo "$response" | grep -oPi '(?<="file_name":")[^"]+(?=")' | base64 --decode) +./hosts/hexload.sh:129: download_url=$(echo "$response" | grep -oPi '(?<="link":")[^"]+(?=")' | base64 --decode) +./hosts/hexload.sh:130: download_url=$(urlencode_spaces "$download_url") +./hosts/hexload.sh:131: if grep -Eqi "Sorry, you have been blocked" <<< "$response"; then +./hosts/hexload.sh:132: if [ $j == $hexmaxfetchfileretries ] ; then +-- +./hosts/hexload.sh:254: file_header=$(tor_curl_request --insecure --head -L -s --referer "$file_url" "$download_url") +./hosts/hexload.sh:255: if [ "${DebugAllEnabled}" == "true" ] ; then +./hosts/hexload.sh:256: debugHtml "${remote_url##*/}" "hex_head$j" "download_url: ${download_url}"$'\n'"${file_header}" +./hosts/hexload.sh:257: fi +./hosts/hexload.sh:258: if ! grep -Eqi 'HTTP/2 200|HTTP/1.1 200|200 OK' <<< $file_header ; then +./hosts/hexload.sh:259: if ((j < hexmaxfetchfileretries)); then +./hosts/hexload.sh:260: continue +./hosts/hexload.sh:261: else +./hosts/hexload.sh:262: printf "\\n" +./hosts/hexload.sh:263: echo -e "${RED}| Bad header response…${NC}" +./hosts/hexload.sh:264: exitDownloadError=true +-- +./hosts/hexload.sh:321: tor_curl_request --insecure --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval --referer "$file_url" "$download_url" --continue-at - --output "$file_path" +./hosts/hexload.sh:322: else +./hosts/hexload.sh:323: tor_curl_request --insecure --referer "$file_url" "$download_url" --continue-at - --output "$file_path" +./hosts/hexload.sh:324: fi +./hosts/hexload.sh:325: received_file_size=0 +./hosts/hexload.sh:326: if [ -f "$file_path" ] ; then +./hosts/hexload.sh:327: received_file_size=$(stat --format="%s" "$file_path" | tr -d '[:space:]') +./hosts/hexload.sh:328: fi +./hosts/hexload.sh:329: if CheckNoHtml "$remote_url" "$filename" "$file_path" ; then +./hosts/hexload.sh:330: containsHtml=false +./hosts/hexload.sh:331: else +./hosts/hexload.sh:332: containsHtml=true +./hosts/hexload.sh:333: fi +-- +./hosts/innocent.sh:98: file_header=$(tor_curl_request --insecure --head -H "Connection: keep-alive" -L -s -i "$download_url" | +./hosts/innocent.sh:99: tee "${WorkDir}/.temp/directhead" & +./hosts/innocent.sh:100: sleep 6 +./hosts/innocent.sh:101: [ -s "${WorkDir}/.temp/directhead" ] +./hosts/innocent.sh:102: kill $! 2>/dev/null +./hosts/innocent.sh:103: ) +./hosts/innocent.sh:104: if [ ! -f "${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}" ]; then +./hosts/innocent.sh:105: touch "${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}" +./hosts/innocent.sh:106: fi +./hosts/innocent.sh:107: rm -f "${WorkDir}/.temp/directhead" +./hosts/innocent.sh:108: elif ((j % 3 == 0)); then +-- +./hosts/innocent.sh:110: file_header=$(tor_curl_request --insecure -m 16 -s -D - -o /dev/null \ +./hosts/innocent.sh:111: -H "Connection: keep-alive" \ +./hosts/innocent.sh:112: -w 'EffectiveUrl=%{url_effective}' \ +./hosts/innocent.sh:113: "$download_url") +./hosts/innocent.sh:114: else +./hosts/innocent.sh:115: printf "| Retrieving Head: attempt #$j" +./hosts/innocent.sh:116: file_header=$(tor_curl_request --insecure --head -L -s "$download_url") +./hosts/innocent.sh:117: fi +./hosts/innocent.sh:118: if [ "${DebugAllEnabled}" == "true" ] ; then +./hosts/innocent.sh:119: debugHtml "${remote_url##*/}" "inno_head$j" "download_url: ${download_url}"$'\n'"${file_header}" +./hosts/innocent.sh:120: fi +./hosts/innocent.sh:121: if [ ! -z "$file_header" ] ; then +./hosts/innocent.sh:122: if grep -Eqi '404 Not Found' <<< "${file_header}" ; then +./hosts/innocent.sh:123: printf "\\n" +./hosts/innocent.sh:124: echo -e "${RED}| Not Found (404). The file has been removed.${NC}" +./hosts/innocent.sh:125: removedDownload "${remote_url}" +./hosts/innocent.sh:126: exitDownloadNotAvailable=true +-- +./hosts/innocent.sh:207: tor_curl_request --insecure "$download_url" --output "$file_path" +./hosts/innocent.sh:208: received_file_size=0 +./hosts/innocent.sh:209: if [ -f "$file_path" ] ; then +./hosts/innocent.sh:210: received_file_size=$(stat --format="%s" "$file_path" | tr -d '[:space:]') +./hosts/innocent.sh:211: fi +./hosts/innocent.sh:212: if CheckNoHtml "$remote_url" "$filename" "$file_path" ; then +./hosts/innocent.sh:213: containsHtml=false +./hosts/innocent.sh:214: else +./hosts/innocent.sh:215: containsHtml=true +./hosts/innocent.sh:216: fi +./hosts/innocent.sh:217: if [ "${AutoRepairBadPartials}" == "true" ] && [ "$containsHtml" == "true" ] ; then +-- +./hosts/kraken.sh:104: PAGE=$(tor_curl_request --insecure --max-time "$ConnectTimeout" -s -L -c "${kraken_cookie_jar}" "${fixed_url}") +./hosts/kraken.sh:105: if [ "${DebugAllEnabled}" == "true" ] ; then +./hosts/kraken.sh:106: debugHtml "${krak_id}" "kraken_token${num_attempt}_$i" "url: ${fixed_url}"$'\n'"krakenid: ${krak_id}"$'\n'"${PAGE}" +./hosts/kraken.sh:107: fi +./hosts/kraken.sh:108: if grep -Eqi 'sendFormCaptcha()' <<< "${PAGE}"; then +./hosts/kraken.sh:109: rm -f "$kraken_cookie_jar"; +./hosts/kraken.sh:110: printf "\n" +./hosts/kraken.sh:111: echo -e "${RED}| Captcha required (Recaptcha)${NC}" +./hosts/kraken.sh:112: exitDownloadError=true +./hosts/kraken.sh:113: failedRetryDownload "${remote_url}" "Captcha required (Recaptcha)" "" +./hosts/kraken.sh:114: return 1 +-- +./hosts/kraken.sh:169: down_request=$(tor_curl_request --insecure -L -s -b "${kraken_cookie_jar}" -c "${kraken_cookie_jar}" -F "token=${kraken_token}" "${kraken_action}") +./hosts/kraken.sh:170: if [ "${DebugAllEnabled}" == "true" ] ; then +./hosts/kraken.sh:171: debugHtml "${krak_id}" "kraken_url${num_attempt}_1" "action: ${kraken_action}, token: ${kraken_token}"$'\n'"${down_request}" +./hosts/kraken.sh:172: fi +./hosts/kraken.sh:173: if ! grep -Eqi '"status":"ok"' <<< "${down_request}"; then +./hosts/kraken.sh:174: echo -e "${RED}| Failed to get download url${NC}" +./hosts/kraken.sh:175: rm -f "$kraken_cookie_jar"; +./hosts/kraken.sh:176: return 1 +./hosts/kraken.sh:177: else +./hosts/kraken.sh:178: kraken_url=$(grep -oP '"url":"\K[^"]+' <<< "${down_request}") +./hosts/kraken.sh:179: download_url=${kraken_url//\\/} +-- +./hosts/kraken.sh:186: file_header=$(tor_curl_request --insecure --head -L -s -b "${kraken_cookie_jar}" -c "${kraken_cookie_jar}" --referer "$kraken_action" "$download_url") +./hosts/kraken.sh:187: if [ "${DebugAllEnabled}" == "true" ] ; then +./hosts/kraken.sh:188: debugHtml "${krak_id}" "kraken_head$j" "download_url: ${download_url}"$'\n'"${file_header}" +./hosts/kraken.sh:189: fi +./hosts/kraken.sh:190: rm -f "$kraken_cookie_jar"; +./hosts/kraken.sh:191: if [ ! -z "$file_header" ] ; then +./hosts/kraken.sh:192: if grep -Eqi '404 Not Found' <<< "${file_header}" ; then +./hosts/kraken.sh:193: echo -e "${RED}| The file has been removed (404).${NC}" +./hosts/kraken.sh:194: removedDownload "${remote_url}" +./hosts/kraken.sh:195: exitDownloadNotAvailable=true +./hosts/kraken.sh:196: return 1 +-- +./hosts/kraken.sh:286: tor_curl_request --insecure --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval --referer "$kraken_action" "$download_url" --continue-at - --output "$file_path" +./hosts/kraken.sh:287: else +./hosts/kraken.sh:288: tor_curl_request --insecure --referer "$kraken_action" "$download_url" --continue-at - --output "$file_path" +./hosts/kraken.sh:289: fi +./hosts/kraken.sh:290: received_file_size=0 +./hosts/kraken.sh:291: if [ -f "$file_path" ] ; then +./hosts/kraken.sh:292: received_file_size=$(stat --format="%s" "$file_path" | tr -d '[:space:]') +./hosts/kraken.sh:293: fi +./hosts/kraken.sh:294: if CheckNoHtml "$remote_url" "$filename" "$file_path" ; then +./hosts/kraken.sh:295: containsHtml=false +./hosts/kraken.sh:296: else +./hosts/kraken.sh:297: containsHtml=true +./hosts/kraken.sh:298: fi +-- +./hosts/nippy.sh:121: response=$(tor_curl_request --insecure -L -s -b "${nippy_cookie_jar}" -c "${nippy_cookie_jar}" "$fixed_url") +./hosts/nippy.sh:122: if [ "${DebugAllEnabled}" == "true" ] ; then +./hosts/nippy.sh:123: debugHtml "${remote_url##*/}" "nippy_dwnpage$i" "${response}" +./hosts/nippy.sh:124: fi +./hosts/nippy.sh:125: if [[ -z $response ]] ; then +./hosts/nippy.sh:126: rm -f "${nippy_cookie_jar}"; +./hosts/nippy.sh:127: if [ $i == $maxfetchretries ] ; then +./hosts/nippy.sh:128: printf "\\n" +./hosts/nippy.sh:129: echo -e "${RED}| Failed to extract download link.${NC}" +./hosts/nippy.sh:130: warnAndRetryUnknownError=true +./hosts/nippy.sh:131: if [ "${finalAttempt}" == "true" ] ; then +-- +./hosts/nippy.sh:190: file_header=$(tor_curl_request --insecure -L --head -s \ +./hosts/nippy.sh:191: -b "${nippy_cookie_jar}" -c "${nippy_cookie_jar}" \ +./hosts/nippy.sh:192: "$download_url") +./hosts/nippy.sh:193: if [ "${DebugAllEnabled}" == "true" ] ; then +./hosts/nippy.sh:194: debugHtml "${remote_url##*/}" "nippy_head$j" "FileInfoUrl: ${download_url}"$'\n'"${file_header}" +./hosts/nippy.sh:195: fi +./hosts/nippy.sh:196: if [[ -z $file_header ]] ; then +./hosts/nippy.sh:197: if [ $j == $maxfetchretries ] ; then +./hosts/nippy.sh:198: rm -f "${nippy_cookie_jar}"; +./hosts/nippy.sh:199: printf "\\n" +./hosts/nippy.sh:200: echo -e "${RED}| Failed to extract file info${NC}" +-- +./hosts/nippy.sh:301: tor_curl_request --insecure -L --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval \ +./hosts/nippy.sh:302: "$download_url" --continue-at - --output "$file_path" +./hosts/nippy.sh:303: else +./hosts/nippy.sh:304: tor_curl_request --insecure -L "$download_url" --continue-at - --output "$file_path" +./hosts/nippy.sh:305: fi +./hosts/nippy.sh:306: received_file_size=0 +./hosts/nippy.sh:307: if [ -f "$file_path" ] ; then +./hosts/nippy.sh:308: received_file_size=$(stat --format="%s" "$file_path" | tr -d '[:space:]') +./hosts/nippy.sh:309: fi +./hosts/nippy.sh:310: if CheckNoHtml "$remote_url" "$filename" "$file_path" ; then +./hosts/nippy.sh:311: containsHtml=false +./hosts/nippy.sh:312: else +./hosts/nippy.sh:313: containsHtml=true +./hosts/nippy.sh:314: fi +-- +./hosts/oshi.sh:101: file_header=$(tor_curl_request --insecure --head -L -s --referer "$remote_url" "$download_url") +./hosts/oshi.sh:102: if [ "${DebugAllEnabled}" == "true" ] ; then +./hosts/oshi.sh:103: debugHtml "${remote_url##*/}" "oshi_head$j" "download_url: ${download_url}"$'\n'"${file_header}" +./hosts/oshi.sh:104: fi +./hosts/oshi.sh:105: if [ ! -z "$file_header" ] ; then +./hosts/oshi.sh:106: if grep -Eqi '404 Not Found' <<< "${file_header}" ; then +./hosts/oshi.sh:107: echo -e "${RED}| O shi-, (404). The file has been removed.${NC}" +./hosts/oshi.sh:108: removedDownload "${remote_url}" +./hosts/oshi.sh:109: exitDownloadNotAvailable=true +./hosts/oshi.sh:110: return 1 +./hosts/oshi.sh:111: fi +-- +./hosts/oshi.sh:195: tor_curl_request --insecure --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval --referer "$file_url" "$download_url" --continue-at - --output "$file_path" +./hosts/oshi.sh:196: else +./hosts/oshi.sh:197: tor_curl_request --insecure --referer "$file_url" "$download_url" --continue-at - --output "$file_path" +./hosts/oshi.sh:198: fi +./hosts/oshi.sh:199: received_file_size=0 +./hosts/oshi.sh:200: if [ -f "$file_path" ] ; then +./hosts/oshi.sh:201: received_file_size=$(stat --format="%s" "$file_path" | tr -d '[:space:]') +./hosts/oshi.sh:202: fi +./hosts/oshi.sh:203: if CheckNoHtml "$remote_url" "$filename" "$file_path" ; then +./hosts/oshi.sh:204: containsHtml=false +./hosts/oshi.sh:205: else +./hosts/oshi.sh:206: containsHtml=true +./hosts/oshi.sh:207: fi +-- +./hosts/pixeldrain.sh:94: response=$(tor_curl_request --insecure -L -s "https://pixeldrain.com/u/$fileid") +./hosts/pixeldrain.sh:95: if [ "${DebugAllEnabled}" == "true" ] ; then +./hosts/pixeldrain.sh:96: debugHtml "${remote_url##*/}" "pd_fetch$i" "$response" +./hosts/pixeldrain.sh:97: fi +./hosts/pixeldrain.sh:98: if [ ! -z "$response" ] ; then +./hosts/pixeldrain.sh:99: if grep -q -Eq '"views":' <<< "$response"; then +./hosts/pixeldrain.sh:100: pdpreviews=$(grep -o -P '(?<="views":).+?(?=,")' <<< "$response") +./hosts/pixeldrain.sh:101: fi +./hosts/pixeldrain.sh:102: if grep -i -Eq "You have reached the maximum number of open download connections" <<< "$response"; then +./hosts/pixeldrain.sh:103: if [ $i -ge 5 ] ; then +./hosts/pixeldrain.sh:104: printf "\\n" +-- +./hosts/pixeldrain.sh:256: file_header=$(tor_curl_request --insecure --head -L -s --referer "$file_url" "$pdheadurl") +./hosts/pixeldrain.sh:257: if [ "${DebugAllEnabled}" == "true" ] ; then +./hosts/pixeldrain.sh:258: debugHtml "${remote_url##*/}" "pd_head$i" "url: ${pdheadurl}"$'\n'"${file_header}" +./hosts/pixeldrain.sh:259: fi +./hosts/pixeldrain.sh:260: if ! grep -Eqi 'HTTP/2 200|HTTP/1.1 200|200 OK' <<< $file_header ; then +./hosts/pixeldrain.sh:261: if ((i < 6)); then +./hosts/pixeldrain.sh:262: continue +./hosts/pixeldrain.sh:263: else +./hosts/pixeldrain.sh:264: echo -e "${YELLOW}| Filesize not found.${NC}" +./hosts/pixeldrain.sh:265: return 1 +./hosts/pixeldrain.sh:266: fi +-- +./hosts/pixeldrain.sh:324: tor_curl_request --insecure \ +./hosts/pixeldrain.sh:325: --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval \ +./hosts/pixeldrain.sh:326: --referer "$file_url" "$download_url" --continue-at - --output "$file_path" +./hosts/pixeldrain.sh:327: else +./hosts/pixeldrain.sh:328: tor_curl_request --insecure \ +./hosts/pixeldrain.sh:329: --referer "$file_url" "$download_url" --continue-at - --output "$file_path" +./hosts/pixeldrain.sh:330: fi +./hosts/pixeldrain.sh:331: else +./hosts/pixeldrain.sh:332: if [ "${RateMonitorEnabled}" == "true" ]; then +./hosts/pixeldrain.sh:333: tor_curl_request --insecure \ +./hosts/pixeldrain.sh:334: -H "User-Agent: $RandomUA" \ +./hosts/pixeldrain.sh:335: -H 'Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8' \ +./hosts/pixeldrain.sh:336: -H 'Accept-Language: en-US,en;q=0.5' -H 'Accept-Encoding: gzip, deflate, br' \ +./hosts/pixeldrain.sh:337: -H 'Connection: keep-alive' -H 'Upgrade-Insecure-Requests: 1' -H 'Sec-Fetch-Dest: document' \ +./hosts/pixeldrain.sh:338: -H 'Sec-Fetch-Mode: navigate' -H 'Sec-Fetch-Site: none' -H 'Sec-Fetch-User: ?1' \ +./hosts/pixeldrain.sh:339: --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval \ +./hosts/pixeldrain.sh:340: --referer "$file_url" "$download_url" --continue-at - --output "$file_path" +./hosts/pixeldrain.sh:341: else +./hosts/pixeldrain.sh:342: tor_curl_request --insecure \ +./hosts/pixeldrain.sh:343: -H "User-Agent: $RandomUA" \ +./hosts/pixeldrain.sh:344: -H 'Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8' \ +./hosts/pixeldrain.sh:345: -H 'Accept-Language: en-US,en;q=0.5' -H 'Accept-Encoding: gzip, deflate, br' \ +./hosts/pixeldrain.sh:346: -H 'Connection: keep-alive' -H 'Upgrade-Insecure-Requests: 1' -H 'Sec-Fetch-Dest: document' \ +./hosts/pixeldrain.sh:347: -H 'Sec-Fetch-Mode: navigate' -H 'Sec-Fetch-Site: none' -H 'Sec-Fetch-User: ?1' \ +./hosts/pixeldrain.sh:348: --referer "$file_url" "$download_url" --continue-at - --output "$file_path" +./hosts/pixeldrain.sh:349: fi +./hosts/pixeldrain.sh:350: fi +./hosts/pixeldrain.sh:351: received_file_size=0 +./hosts/pixeldrain.sh:352: if [ -f "$file_path" ] ; then +-- +./hosts/ranoz.sh:90: response=$(tor_curl_request --insecure -L -s "$remote_url") +./hosts/ranoz.sh:91: if [ "${DebugAllEnabled}" == "true" ] ; then +./hosts/ranoz.sh:92: debugHtml "${remote_url##*/}" "rz_fetch$i" "${response}" +./hosts/ranoz.sh:93: fi +./hosts/ranoz.sh:94: if [[ -z $response ]] ; then +./hosts/ranoz.sh:95: if [ $i == $maxfetchretries ] ; then +./hosts/ranoz.sh:96: printf "\\n" +./hosts/ranoz.sh:97: echo -e "${RED}| Failed to extract download url [1]${NC}" +./hosts/ranoz.sh:98: warnAndRetryUnknownError=true +./hosts/ranoz.sh:99: if [ "${finalAttempt}" == "true" ] ; then +./hosts/ranoz.sh:100: failedRetryDownload "${remote_url}" "Failed to extract download url [1]" "" +-- +./hosts/ranoz.sh:144: file_header=$(tor_curl_request --insecure --head -L -i -s "$download_url") +./hosts/ranoz.sh:145: if [ "${DebugAllEnabled}" == "true" ] ; then +./hosts/ranoz.sh:146: debugHtml "${remote_url##*/}" "rz_head$j" "download_url: ${download_url}"$'\n'"${file_header}" +./hosts/ranoz.sh:147: fi +./hosts/ranoz.sh:148: if [[ -z $file_header ]] ; then +./hosts/ranoz.sh:149: if [ $j == $maxfetchretries ] ; then +./hosts/ranoz.sh:150: rm -f "${rz_cookie_jar}"; +./hosts/ranoz.sh:151: printf "\\n" +./hosts/ranoz.sh:152: echo -e "${RED}| Failed to extract file info${NC}" +./hosts/ranoz.sh:153: warnAndRetryUnknownError=true +./hosts/ranoz.sh:154: if [ "${finalAttempt}" == "true" ] ; then +-- +./hosts/ranoz.sh:255: tor_curl_request --insecure -L -G --no-alpn \ +./hosts/ranoz.sh:256: --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval \ +./hosts/ranoz.sh:257: "$download_url" --continue-at - --output "$file_path" +./hosts/ranoz.sh:258: else +./hosts/ranoz.sh:259: tor_curl_request --insecure -L -G --no-alpn \ +./hosts/ranoz.sh:260: "$download_url" --continue-at - --output "$file_path" +./hosts/ranoz.sh:261: fi +./hosts/ranoz.sh:262: else +./hosts/ranoz.sh:263: if [ "${RateMonitorEnabled}" == "true" ]; then +./hosts/ranoz.sh:264: tor_curl_request --insecure -L -G --no-alpn \ +./hosts/ranoz.sh:265: --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval \ +./hosts/ranoz.sh:266: -H "User-Agent: $RandomUA" \ +./hosts/ranoz.sh:267: -H "Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8" \ +./hosts/ranoz.sh:268: -H "Accept-Language: en-US,en;q=0.5" \ +./hosts/ranoz.sh:269: -H "Accept-Encoding: gzip, deflate, br" \ +./hosts/ranoz.sh:270: -H "Connection: keep-alive" \ +./hosts/ranoz.sh:271: -H "Cookie: lng=eng" \ +./hosts/ranoz.sh:272: -H "Upgrade-Insecure-Requests: 1" \ +./hosts/ranoz.sh:273: -H "Sec-Fetch-Dest: document" \ +./hosts/ranoz.sh:274: -H "Sec-Fetch-Mode: navigate" \ +-- +./hosts/ranoz.sh:279: tor_curl_request --insecure -L -G --no-alpn \ +./hosts/ranoz.sh:280: -H "User-Agent: $RandomUA" \ +./hosts/ranoz.sh:281: -H "Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8" \ +./hosts/ranoz.sh:282: -H "Accept-Language: en-US,en;q=0.5" \ +./hosts/ranoz.sh:283: -H "Accept-Encoding: gzip, deflate, br" \ +./hosts/ranoz.sh:284: -H "Connection: keep-alive" \ +./hosts/ranoz.sh:285: -H "Cookie: lng=eng" \ +./hosts/ranoz.sh:286: -H "Upgrade-Insecure-Requests: 1" \ +./hosts/ranoz.sh:287: -H "Sec-Fetch-Dest: document" \ +./hosts/ranoz.sh:288: -H "Sec-Fetch-Mode: navigate" \ +./hosts/ranoz.sh:289: -H "Sec-Fetch-Site: same-origin" \ +-- +./hosts/tempfileme.sh:89: response=$(tor_curl_request --insecure -L -s "$remote_url") +./hosts/tempfileme.sh:90: if [ "${DebugAllEnabled}" == "true" ] ; then +./hosts/tempfileme.sh:91: debugHtml "${remote_url##*/}" "tmpme_fetch$j" "${response}" +./hosts/tempfileme.sh:92: fi +./hosts/tempfileme.sh:93: if [[ -z $response ]] ; then +./hosts/tempfileme.sh:94: if [ $j == $maxfetchretries ] ; then +./hosts/tempfileme.sh:95: printf "\\n" +./hosts/tempfileme.sh:96: echo -e "${RED}| Failed to extract download link${NC}" +./hosts/tempfileme.sh:97: warnAndRetryUnknownError=true +./hosts/tempfileme.sh:98: if [ "${finalAttempt}" == "true" ] ; then +./hosts/tempfileme.sh:99: failedRetryDownload "${remote_url}" "Failed to extract download link" "" +-- +./hosts/tempfileme.sh:163: file_header=$(tor_curl_request --insecure -L --head -s --referer "${remote_url}" "$download_url") +./hosts/tempfileme.sh:164: if [ "${DebugAllEnabled}" == "true" ] ; then +./hosts/tempfileme.sh:165: debugHtml "${remote_url##*/}" "tmpme_head$j" "download_url: ${download_url}"$'\n'"${file_header}" +./hosts/tempfileme.sh:166: fi +./hosts/tempfileme.sh:167: if [ ! -z "$file_header" ] ; then +./hosts/tempfileme.sh:168: if grep -Eqi 'HTTP.* 404|Not Found' <<< "${file_header}" ; then +./hosts/tempfileme.sh:169: if [ $j == $maxfetchretries ] ; then +./hosts/tempfileme.sh:170: printf "\\n" +./hosts/tempfileme.sh:171: echo -e "${RED}| The file has been removed (404).${NC}" +./hosts/tempfileme.sh:172: removedDownload "${remote_url}" +./hosts/tempfileme.sh:173: exitDownloadNotAvailable=true +-- +./hosts/tempfileme.sh:293: tor_curl_request --insecure -L \ +./hosts/tempfileme.sh:294: --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval \ +./hosts/tempfileme.sh:295: --referer "${remote_url}" \ +./hosts/tempfileme.sh:296: "$download_url" --continue-at - --output "$file_path" +./hosts/tempfileme.sh:297: else +./hosts/tempfileme.sh:298: tor_curl_request --insecure -L \ +./hosts/tempfileme.sh:299: --referer "${remote_url}" \ +./hosts/tempfileme.sh:300: "$download_url" --continue-at - --output "$file_path" +./hosts/tempfileme.sh:301: fi +./hosts/tempfileme.sh:302: else +./hosts/tempfileme.sh:303: if [ "${RateMonitorEnabled}" == "true" ]; then +./hosts/tempfileme.sh:304: tor_curl_request --insecure -L \ +./hosts/tempfileme.sh:305: --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval \ +./hosts/tempfileme.sh:306: -H "User-Agent: $RandomUA" \ +./hosts/tempfileme.sh:307: -H "Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8" \ +./hosts/tempfileme.sh:308: -H "Accept-Language: en-US,en;q=0.5" \ +./hosts/tempfileme.sh:309: -H "Accept-Encoding: gzip, deflate, br" \ +./hosts/tempfileme.sh:310: -H "Connection: keep-alive" \ +./hosts/tempfileme.sh:311: -H "Upgrade-Insecure-Requests: 1" \ +./hosts/tempfileme.sh:312: -H "Sec-Fetch-Dest: document" \ +./hosts/tempfileme.sh:313: -H "Sec-Fetch-Mode: navigate" \ +./hosts/tempfileme.sh:314: -H "Sec-Fetch-Site: same-origin" \ +-- +./hosts/tempfileme.sh:319: tor_curl_request --insecure -L \ +./hosts/tempfileme.sh:320: -H "User-Agent: $RandomUA" \ +./hosts/tempfileme.sh:321: -H "Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8" \ +./hosts/tempfileme.sh:322: -H "Accept-Language: en-US,en;q=0.5" \ +./hosts/tempfileme.sh:323: -H "Accept-Encoding: gzip, deflate, br" \ +./hosts/tempfileme.sh:324: -H "Connection: keep-alive" \ +./hosts/tempfileme.sh:325: -H "Upgrade-Insecure-Requests: 1" \ +./hosts/tempfileme.sh:326: -H "Sec-Fetch-Dest: document" \ +./hosts/tempfileme.sh:327: -H "Sec-Fetch-Mode: navigate" \ +./hosts/tempfileme.sh:328: -H "Sec-Fetch-Site: same-origin" \ +./hosts/tempfileme.sh:329: -H "Sec-Fetch-User: ?1" \ +-- +./hosts/tempsh.sh:88: file_header=$(tor_curl_request --insecure -s -D - -o /dev/null -X POST \ +./hosts/tempsh.sh:89: -H "Connection: keep-alive" \ +./hosts/tempsh.sh:90: -w 'EffectiveUrl=%{url_effective}' \ +./hosts/tempsh.sh:91: "$download_url") +./hosts/tempsh.sh:92: if [ "${DebugAllEnabled}" == "true" ] ; then +./hosts/tempsh.sh:93: debugHtml "${remote_url##*/}" "tmpsh_posthead" "download_url: ${download_url}"$'\n'"${file_header}" +./hosts/tempsh.sh:94: fi +./hosts/tempsh.sh:95: if [[ -z $file_header ]] ; then +./hosts/tempsh.sh:96: if [ $j == $maxfetchretries ] ; then +./hosts/tempsh.sh:97: printf "\\n" +./hosts/tempsh.sh:98: echo -e "${RED}| Failed to extract file info [1]${NC}" +-- +./hosts/tempsh.sh:227: tor_curl_request --insecure -X POST \ +./hosts/tempsh.sh:228: --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval \ +./hosts/tempsh.sh:229: "$download_url" --continue-at - --output "$file_path" +./hosts/tempsh.sh:230: else +./hosts/tempsh.sh:231: tor_curl_request --insecure -X POST \ +./hosts/tempsh.sh:232: "$download_url" --continue-at - --output "$file_path" +./hosts/tempsh.sh:233: fi +./hosts/tempsh.sh:234: else +./hosts/tempsh.sh:235: if [ "${RateMonitorEnabled}" == "true" ]; then +./hosts/tempsh.sh:236: tor_curl_request --insecure -X POST \ +./hosts/tempsh.sh:237: --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval \ +./hosts/tempsh.sh:238: -H "User-Agent: $RandomUA" \ +./hosts/tempsh.sh:239: -H "Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8" \ +./hosts/tempsh.sh:240: -H "Accept-Language: en-US,en;q=0.5" \ +./hosts/tempsh.sh:241: -H "Accept-Encoding: gzip, deflate, br" \ +./hosts/tempsh.sh:242: -H "Connection: keep-alive" \ +./hosts/tempsh.sh:243: -H "Upgrade-Insecure-Requests: 1" \ +./hosts/tempsh.sh:244: -H "Sec-Fetch-Dest: document" \ +./hosts/tempsh.sh:245: -H "Sec-Fetch-Mode: navigate" \ +./hosts/tempsh.sh:246: -H "Sec-Fetch-Site: same-origin" \ +-- +./hosts/tempsh.sh:250: tor_curl_request --insecure -X POST \ +./hosts/tempsh.sh:251: -H "User-Agent: $RandomUA" \ +./hosts/tempsh.sh:252: -H "Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8" \ +./hosts/tempsh.sh:253: -H "Accept-Language: en-US,en;q=0.5" \ +./hosts/tempsh.sh:254: -H "Accept-Encoding: gzip, deflate, br" \ +./hosts/tempsh.sh:255: -H "Connection: keep-alive" \ +./hosts/tempsh.sh:256: -H "Upgrade-Insecure-Requests: 1" \ +./hosts/tempsh.sh:257: -H "Sec-Fetch-Dest: document" \ +./hosts/tempsh.sh:258: -H "Sec-Fetch-Mode: navigate" \ +./hosts/tempsh.sh:259: -H "Sec-Fetch-Site: same-origin" \ +./hosts/tempsh.sh:260: -H "Sec-Fetch-User: ?1" \ +-- +./hosts/up2share.sh:91: response=$(tor_curl_request --insecure -L -s -b "${up2share_cookie_jar}" -c "${up2share_cookie_jar}" \ +./hosts/up2share.sh:92: -w "\n\nHTTP_CODE: %{http_code}\nEFFECTIVE_URL: %{url_effective}\n" \ +./hosts/up2share.sh:93: "$fixed_url") +./hosts/up2share.sh:94: if [ "${DebugAllEnabled}" == "true" ] ; then +./hosts/up2share.sh:95: debugHtml "${remote_url##*/}" "up2share_fetch$i" "${response}" +./hosts/up2share.sh:96: fi +./hosts/up2share.sh:97: if [[ -z $response ]] ; then +./hosts/up2share.sh:98: rm -f "${up2share_cookie_jar}"; +./hosts/up2share.sh:99: if [ $i == $maxfetchretries ] ; then +./hosts/up2share.sh:100: printf "\\n" +./hosts/up2share.sh:101: echo -e "${RED}| Failed to extract token link [1].${NC}" +-- +./hosts/up2share.sh:144: response=$(tor_curl_request --insecure -L -s -b "${up2share_cookie_jar}" -c "${up2share_cookie_jar}" \ +./hosts/up2share.sh:145: -w "\n\nHTTP_CODE: %{http_code}\nEFFECTIVE_URL: %{url_effective}\n" \ +./hosts/up2share.sh:146: "$download_url") +./hosts/up2share.sh:147: if [ "${DebugAllEnabled}" == "true" ] ; then +./hosts/up2share.sh:148: debugHtml "${remote_url##*/}" "up2share_down$i" "${response}" +./hosts/up2share.sh:149: fi +./hosts/up2share.sh:150: if [[ -z $response ]] ; then +./hosts/up2share.sh:151: rm -f "${up2share_cookie_jar}"; +./hosts/up2share.sh:152: if [ $i == $maxfetchretries ] ; then +./hosts/up2share.sh:153: printf "\\n" +./hosts/up2share.sh:154: echo -e "${RED}| Failed to extract download link [1].${NC}" +-- +./hosts/up2share.sh:195: file_header=$(tor_curl_request --insecure -L -s --head \ +./hosts/up2share.sh:196: -b "${up2share_cookie_jar}" -c "${up2share_cookie_jar}" \ +./hosts/up2share.sh:197: --referer "https://up2sha.re/" "$download_url") +./hosts/up2share.sh:198: if [ "${DebugAllEnabled}" == "true" ] ; then +./hosts/up2share.sh:199: debugHtml "${remote_url##*/}" "up2share_head$j" "download_url: ${download_url}"$'\n'"${file_header}" +./hosts/up2share.sh:200: fi +./hosts/up2share.sh:201: if [[ -z $file_header ]] ; then +./hosts/up2share.sh:202: if [ $j == $maxfetchretries ] ; then +./hosts/up2share.sh:203: rm -f "${up2share_cookie_jar}"; +./hosts/up2share.sh:204: printf "\\n" +./hosts/up2share.sh:205: echo -e "${RED}| Failed to extract file info${NC}" +-- +./hosts/up2share.sh:313: tor_curl_request --insecure -L \ +./hosts/up2share.sh:314: --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval \ +./hosts/up2share.sh:315: -b "${up2share_cookie_jar}" -c "${up2share_cookie_jar}" \ +./hosts/up2share.sh:316: -H "Host: up2sha.re" \ +./hosts/up2share.sh:317: --referer "https://up2sha.re/" \ +./hosts/up2share.sh:318: "$download_url" --continue-at - --output "$file_path" +./hosts/up2share.sh:319: else +./hosts/up2share.sh:320: tor_curl_request --insecure -L \ +./hosts/up2share.sh:321: -b "${up2share_cookie_jar}" -c "${up2share_cookie_jar}" \ +./hosts/up2share.sh:322: -H "Host: up2sha.re" \ +./hosts/up2share.sh:323: --referer "https://up2sha.re/" \ +./hosts/up2share.sh:324: "$download_url" --continue-at - --output "$file_path" +./hosts/up2share.sh:325: fi +./hosts/up2share.sh:326: else +./hosts/up2share.sh:327: if [ "${RateMonitorEnabled}" == "true" ]; then +./hosts/up2share.sh:328: tor_curl_request --insecure -L \ +./hosts/up2share.sh:329: --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval \ +./hosts/up2share.sh:330: -b "${up2share_cookie_jar}" -c "${up2share_cookie_jar}" \ +./hosts/up2share.sh:331: -H "Host: up2sha.re" \ +./hosts/up2share.sh:332: -H "User-Agent: $RandomUA" \ +./hosts/up2share.sh:333: -H "Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8" \ +./hosts/up2share.sh:334: -H "Accept-Language: en-US,en;q=0.5" \ +./hosts/up2share.sh:335: -H "Accept-Encoding: gzip, deflate, br" \ +./hosts/up2share.sh:336: -H "Connection: keep-alive" \ +./hosts/up2share.sh:337: -H "Cookie: lng=eng" \ +./hosts/up2share.sh:338: -H "Upgrade-Insecure-Requests: 1" \ +-- +./hosts/up2share.sh:346: tor_curl_request --insecure -L \ +./hosts/up2share.sh:347: -b "${up2share_cookie_jar}" -c "${up2share_cookie_jar}" \ +./hosts/up2share.sh:348: -H "Host: up2sha.re" \ +./hosts/up2share.sh:349: -H "User-Agent: $RandomUA" \ +./hosts/up2share.sh:350: -H "Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8" \ +./hosts/up2share.sh:351: -H "Accept-Language: en-US,en;q=0.5" \ +./hosts/up2share.sh:352: -H "Accept-Encoding: gzip, deflate, br" \ +./hosts/up2share.sh:353: -H "Connection: keep-alive" \ +./hosts/up2share.sh:354: -H "Cookie: lng=eng" \ +./hosts/up2share.sh:355: -H "Upgrade-Insecure-Requests: 1" \ +./hosts/up2share.sh:356: -H "Sec-Fetch-Dest: document" \ +-- +./hosts/uploadee.sh:90: response=$(tor_curl_request --insecure -L -s "$remote_url") +./hosts/uploadee.sh:91: if [ "${DebugAllEnabled}" == "true" ] ; then +./hosts/uploadee.sh:92: debugHtml "${remote_url##*/}" "upee_dwnpage$i" "${response}" +./hosts/uploadee.sh:93: fi +./hosts/uploadee.sh:94: if [[ -z $response ]] ; then +./hosts/uploadee.sh:95: rm -f "${upee_cookie_jar}"; +./hosts/uploadee.sh:96: if [ $i == $maxfetchretries ] ; then +./hosts/uploadee.sh:97: printf "\\n" +./hosts/uploadee.sh:98: echo -e "${RED}| Failed to extract download link.${NC}" +./hosts/uploadee.sh:99: warnAndRetryUnknownError=true +./hosts/uploadee.sh:100: if [ "${finalAttempt}" == "true" ] ; then +-- +./hosts/uploadee.sh:143: file_header=$(tor_curl_request --insecure --head -L -s -b "${upee_cookie_jar}" -c "${upee_cookie_jar}" --referer "$remote_url" "$download_url") +./hosts/uploadee.sh:144: if [ "${DebugAllEnabled}" == "true" ] ; then +./hosts/uploadee.sh:145: debugHtml "${remote_url##*/}" "upee_head$j" "download_url: ${download_url}"$'\n'"${file_header}" +./hosts/uploadee.sh:146: fi +./hosts/uploadee.sh:147: if [[ -z $file_header ]] ; then +./hosts/uploadee.sh:148: if [ $j == $maxfetchretries ] ; then +./hosts/uploadee.sh:149: rm -f "${upee_cookie_jar}"; +./hosts/uploadee.sh:150: printf "\\n" +./hosts/uploadee.sh:151: echo -e "${RED}| Failed to extract file info.${NC}" +./hosts/uploadee.sh:152: warnAndRetryUnknownError=true +./hosts/uploadee.sh:153: if [ "${finalAttempt}" == "true" ] ; then +-- +./hosts/uploadee.sh:251: tor_curl_request --insecure -L -G --no-alpn \ +./hosts/uploadee.sh:252: -b "${upee_cookie_jar}" -c "${upee_cookie_jar}" --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval \ +./hosts/uploadee.sh:253: -H "Host: www.upload.ee" \ +./hosts/uploadee.sh:254: --referer "$remote_url" "$download_url" \ +./hosts/uploadee.sh:255: --continue-at - --output "$file_path" +./hosts/uploadee.sh:256: else +./hosts/uploadee.sh:257: tor_curl_request --insecure -L -G --no-alpn \ +./hosts/uploadee.sh:258: -b "${upee_cookie_jar}" -c "${upee_cookie_jar}" \ +./hosts/uploadee.sh:259: -H "Host: www.upload.ee" \ +./hosts/uploadee.sh:260: --referer "$remote_url" "$download_url" \ +./hosts/uploadee.sh:261: --continue-at - --output "$file_path" +./hosts/uploadee.sh:262: fi +./hosts/uploadee.sh:263: else +./hosts/uploadee.sh:264: if [ "${RateMonitorEnabled}" == "true" ]; then +./hosts/uploadee.sh:265: tor_curl_request --insecure -L -G --no-alpn \ +./hosts/uploadee.sh:266: -b "${upee_cookie_jar}" -c "${upee_cookie_jar}" --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval \ +./hosts/uploadee.sh:267: -H "Host: www.upload.ee" \ +./hosts/uploadee.sh:268: -H "User-Agent: $RandomUA" \ +./hosts/uploadee.sh:269: -H "Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8" \ +./hosts/uploadee.sh:270: -H "Accept-Language: en-US,en;q=0.5" \ +./hosts/uploadee.sh:271: -H "Accept-Encoding: gzip, deflate, br" \ +./hosts/uploadee.sh:272: -H "Connection: keep-alive" \ +./hosts/uploadee.sh:273: -H "Cookie: lng=eng" \ +./hosts/uploadee.sh:274: -H "Upgrade-Insecure-Requests: 1" \ +./hosts/uploadee.sh:275: -H "Sec-Fetch-Dest: document" \ +-- +./hosts/uploadee.sh:282: tor_curl_request --insecure -L -G --no-alpn \ +./hosts/uploadee.sh:283: -b "${upee_cookie_jar}" -c "${upee_cookie_jar}" \ +./hosts/uploadee.sh:284: -H "Host: www.upload.ee" \ +./hosts/uploadee.sh:285: -H "User-Agent: $RandomUA" \ +./hosts/uploadee.sh:286: -H "Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8" \ +./hosts/uploadee.sh:287: -H "Accept-Language: en-US,en;q=0.5" \ +./hosts/uploadee.sh:288: -H "Accept-Encoding: gzip, deflate, br" \ +./hosts/uploadee.sh:289: -H "Connection: keep-alive" \ +./hosts/uploadee.sh:290: -H "Cookie: lng=eng" \ +./hosts/uploadee.sh:291: -H "Upgrade-Insecure-Requests: 1" \ +./hosts/uploadee.sh:292: -H "Sec-Fetch-Dest: document" \ +-- +./hosts/uploadev.sh:91: response=$(tor_curl_request --insecure -L -s -b "${upev_cookie_jar}" -c "${upev_cookie_jar}" \ +./hosts/uploadev.sh:92: -w "\n\nHTTP_CODE: %{http_code}\nEFFECTIVE_URL: %{url_effective}\n" \ +./hosts/uploadev.sh:93: "$fixed_url") +./hosts/uploadev.sh:94: if [ "${DebugAllEnabled}" == "true" ] ; then +./hosts/uploadev.sh:95: debugHtml "${remote_url##*/}" "upev_fetch$i" "${response}" +./hosts/uploadev.sh:96: fi +./hosts/uploadev.sh:97: if [[ -z $response ]] ; then +./hosts/uploadev.sh:98: rm -f "${upev_cookie_jar}"; +./hosts/uploadev.sh:99: if [ $i == $maxfetchretries ] ; then +./hosts/uploadev.sh:100: printf "\\n" +./hosts/uploadev.sh:101: echo -e "${RED}| Failed to extract download link [1]${NC}" +-- +./hosts/uploadev.sh:181: response=$(tor_curl_request --insecure -L -s -X POST \ +./hosts/uploadev.sh:182: -b "${upev_cookie_jar}" -c "${upev_cookie_jar}" \ +./hosts/uploadev.sh:183: --data "$form_data" "$fixed_url") +./hosts/uploadev.sh:184: if [ "${DebugAllEnabled}" == "true" ] ; then +./hosts/uploadev.sh:185: debugHtml "${remote_url##*/}" "upev_post2_$i" "url: ${fixed_url}"$'\n'"form_data: ${form_data}"$'\n'"${response}" +./hosts/uploadev.sh:186: fi +./hosts/uploadev.sh:187: if [[ -z $response ]] ; then +./hosts/uploadev.sh:188: if [ $i == $maxfetchretries ] ; then +./hosts/uploadev.sh:189: rm -f "${upev_cookie_jar}"; +./hosts/uploadev.sh:190: printf "\\n" +./hosts/uploadev.sh:191: echo -e "${RED}| Failed to extract download link [4]${NC}" +-- +./hosts/uploadev.sh:268: file_header=$(tor_curl_request --insecure -L --head -s "$download_url") +./hosts/uploadev.sh:269: if [ "${DebugAllEnabled}" == "true" ] ; then +./hosts/uploadev.sh:270: debugHtml "${remote_url##*/}" "upev_head$j" "download_url: ${download_url}"$'\n'"${file_header}" +./hosts/uploadev.sh:271: fi +./hosts/uploadev.sh:272: if [[ -z $file_header ]] ; then +./hosts/uploadev.sh:273: if [ $j == $maxfetchretries ] ; then +./hosts/uploadev.sh:274: rm -f "${upev_cookie_jar}"; +./hosts/uploadev.sh:275: printf "\\n" +./hosts/uploadev.sh:276: echo -e "${RED}| Failed to extract file info [1]${NC}" +./hosts/uploadev.sh:277: warnAndRetryUnknownError=true +./hosts/uploadev.sh:278: if [ "${finalAttempt}" == "true" ] ; then +-- +./hosts/uploadev.sh:369: tor_curl_request --insecure -L \ +./hosts/uploadev.sh:370: --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval \ +./hosts/uploadev.sh:371: -b "${upev_cookie_jar}" -c "${upev_cookie_jar}" \ +./hosts/uploadev.sh:372: "$download_url" --continue-at - --output "$file_path" +./hosts/uploadev.sh:373: else +./hosts/uploadev.sh:374: tor_curl_request --insecure -L \ +./hosts/uploadev.sh:375: -b "${upev_cookie_jar}" -c "${upev_cookie_jar}" \ +./hosts/uploadev.sh:376: "$download_url" --continue-at - --output "$file_path" +./hosts/uploadev.sh:377: fi +./hosts/uploadev.sh:378: else +./hosts/uploadev.sh:379: if [ "${RateMonitorEnabled}" == "true" ]; then +./hosts/uploadev.sh:380: tor_curl_request --insecure -L \ +./hosts/uploadev.sh:381: --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval \ +./hosts/uploadev.sh:382: -b "${upev_cookie_jar}" -c "${upev_cookie_jar}" \ +./hosts/uploadev.sh:383: -H "User-Agent: $RandomUA" \ +./hosts/uploadev.sh:384: -H "Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8" \ +./hosts/uploadev.sh:385: -H "Accept-Language: en-US,en;q=0.5" \ +./hosts/uploadev.sh:386: -H "Accept-Encoding: gzip, deflate, br" \ +./hosts/uploadev.sh:387: -H "Connection: keep-alive" \ +./hosts/uploadev.sh:388: -H "Cookie: lng=eng" \ +./hosts/uploadev.sh:389: -H "Upgrade-Insecure-Requests: 1" \ +./hosts/uploadev.sh:390: -H "Sec-Fetch-Dest: document" \ +-- +./hosts/uploadev.sh:396: tor_curl_request --insecure -L \ +./hosts/uploadev.sh:397: -b "${upev_cookie_jar}" -c "${upev_cookie_jar}" \ +./hosts/uploadev.sh:398: -H "User-Agent: $RandomUA" \ +./hosts/uploadev.sh:399: -H "Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8" \ +./hosts/uploadev.sh:400: -H "Accept-Language: en-US,en;q=0.5" \ +./hosts/uploadev.sh:401: -H "Accept-Encoding: gzip, deflate, br" \ +./hosts/uploadev.sh:402: -H "Connection: keep-alive" \ +./hosts/uploadev.sh:403: -H "Cookie: lng=eng" \ +./hosts/uploadev.sh:404: -H "Upgrade-Insecure-Requests: 1" \ +./hosts/uploadev.sh:405: -H "Sec-Fetch-Dest: document" \ +./hosts/uploadev.sh:406: -H "Sec-Fetch-Mode: navigate" \ +-- +./hosts/uploadflix.sh:97: response=$(tor_curl_request --insecure -L -s "${fixed_url}") +./hosts/uploadflix.sh:98: if [ "${DebugAllEnabled}" == "true" ] ; then +./hosts/uploadflix.sh:99: debugHtml "${remote_url##*/}" "uflix_dwnpage$j" "${response}" +./hosts/uploadflix.sh:100: fi +./hosts/uploadflix.sh:101: if [[ -z $response ]] ; then +./hosts/uploadflix.sh:102: if [ $j == $maxfetchretries ] ; then +./hosts/uploadflix.sh:103: printf "\\n" +./hosts/uploadflix.sh:104: echo -e "${RED}| Failed to extract post link.${NC}" +./hosts/uploadflix.sh:105: warnAndRetryUnknownError=true +./hosts/uploadflix.sh:106: if [ "${finalAttempt}" == "true" ] ; then +./hosts/uploadflix.sh:107: failedRetryDownload "${remote_url}" "" "" +-- +./hosts/uploadflix.sh:150: response=$(tor_curl_request --insecure -L -s -X POST --data "$form_data" "${fixed_url}") +./hosts/uploadflix.sh:151: if [ "${DebugAllEnabled}" == "true" ] ; then +./hosts/uploadflix.sh:152: debugHtml "${remote_url##*/}" "uflix_post" "form_data: ${form_data}"$'\n'"${response}" +./hosts/uploadflix.sh:153: fi +./hosts/uploadflix.sh:154: if [[ -z $response ]] ; then +./hosts/uploadflix.sh:155: echo -e "${RED}| Failed to extract download link.${NC}" +./hosts/uploadflix.sh:156: warnAndRetryUnknownError=true +./hosts/uploadflix.sh:157: if [ "${finalAttempt}" == "true" ] ; then +./hosts/uploadflix.sh:158: failedRetryDownload "${remote_url}" "" "" +./hosts/uploadflix.sh:159: fi +./hosts/uploadflix.sh:160: return 1 +-- +./hosts/uploadflix.sh:192: file_header=$(tor_curl_request --insecure -L --head -s --referer "${fixed_url}" "${download_url}") +./hosts/uploadflix.sh:193: if [ "${DebugAllEnabled}" == "true" ] ; then +./hosts/uploadflix.sh:194: debugHtml "${remote_url##*/}" "uflix_head$j" "download_url: ${download_url}"$'\n'"${file_header}" +./hosts/uploadflix.sh:195: fi +./hosts/uploadflix.sh:196: if [[ -z $file_header ]] ; then +./hosts/uploadflix.sh:197: if [ $j == $maxfetchretries ] ; then +./hosts/uploadflix.sh:198: printf "\\n" +./hosts/uploadflix.sh:199: echo -e "${RED}| Failed to extract file info.${NC}" +./hosts/uploadflix.sh:200: warnAndRetryUnknownError=true +./hosts/uploadflix.sh:201: if [ "${finalAttempt}" == "true" ] ; then +./hosts/uploadflix.sh:202: failedRetryDownload "${remote_url}" "" "" +-- +./hosts/uploadflix.sh:286: tor_curl_request --insecure -L -G --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval "$download_url" --continue-at - --output "$file_path" +./hosts/uploadflix.sh:287: else +./hosts/uploadflix.sh:288: tor_curl_request --insecure -L -G "$download_url" --continue-at - --output "$file_path" +./hosts/uploadflix.sh:289: fi +./hosts/uploadflix.sh:290: received_file_size=0 +./hosts/uploadflix.sh:291: if [ -f "$file_path" ] ; then +./hosts/uploadflix.sh:292: received_file_size=$(stat --format="%s" "$file_path" | tr -d '[:space:]') +./hosts/uploadflix.sh:293: fi +./hosts/uploadflix.sh:294: if CheckNoHtml "$remote_url" "$filename" "$file_path" ; then +./hosts/uploadflix.sh:295: containsHtml=false +./hosts/uploadflix.sh:296: else +./hosts/uploadflix.sh:297: containsHtml=true +./hosts/uploadflix.sh:298: fi +-- +./hosts/uploadhive.sh:88: response=$(tor_curl_request --insecure -L -s "$remote_url") +./hosts/uploadhive.sh:89: if [ "${DebugAllEnabled}" == "true" ] ; then +./hosts/uploadhive.sh:90: debugHtml "${remote_url##*/}" "uhive_dwnpage$j" "${response}" +./hosts/uploadhive.sh:91: fi +./hosts/uploadhive.sh:92: if [[ -z $response ]] ; then +./hosts/uploadhive.sh:93: if [ $j == $maxfetchretries ] ; then +./hosts/uploadhive.sh:94: printf "\\n" +./hosts/uploadhive.sh:95: echo -e "${RED}| Failed to extract post link.${NC}" +./hosts/uploadhive.sh:96: warnAndRetryUnknownError=true +./hosts/uploadhive.sh:97: if [ "${finalAttempt}" == "true" ] ; then +./hosts/uploadhive.sh:98: failedRetryDownload "${remote_url}" "" "" +-- +./hosts/uploadhive.sh:134: response=$(tor_curl_request --insecure -L -s -X POST --data "$form_data" "$remote_url") +./hosts/uploadhive.sh:135: if [ "${DebugAllEnabled}" == "true" ] ; then +./hosts/uploadhive.sh:136: debugHtml "${remote_url##*/}" "uhive_post" "${response}" +./hosts/uploadhive.sh:137: fi +./hosts/uploadhive.sh:138: if [[ -z $response ]] ; then +./hosts/uploadhive.sh:139: echo -e "${RED}| Failed to extract download link.${NC}" +./hosts/uploadhive.sh:140: warnAndRetryUnknownError=true +./hosts/uploadhive.sh:141: if [ "${finalAttempt}" == "true" ] ; then +./hosts/uploadhive.sh:142: failedRetryDownload "${remote_url}" "" "" +./hosts/uploadhive.sh:143: fi +./hosts/uploadhive.sh:144: return 1 +-- +./hosts/uploadhive.sh:175: file_header=$(tor_curl_request --insecure --head -s -L --referer "$remote_url" "$download_url") +./hosts/uploadhive.sh:176: if [ "${DebugAllEnabled}" == "true" ] ; then +./hosts/uploadhive.sh:177: debugHtml "${remote_url##*/}" "uhive_head$j" "download_url: ${download_url}"$'\n'"${file_header}" +./hosts/uploadhive.sh:178: fi +./hosts/uploadhive.sh:179: if [[ -z $file_header ]] ; then +./hosts/uploadhive.sh:180: if [ $j == $maxfetchretries ] ; then +./hosts/uploadhive.sh:181: printf "\\n" +./hosts/uploadhive.sh:182: echo -e "${RED}| Failed to extract file info.${NC}" +./hosts/uploadhive.sh:183: warnAndRetryUnknownError=true +./hosts/uploadhive.sh:184: if [ "${finalAttempt}" == "true" ] ; then +./hosts/uploadhive.sh:185: failedRetryDownload "${remote_url}" "" "" +-- +./hosts/uploadhive.sh:269: tor_curl_request --insecure -L -G --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval "$download_url" --continue-at - --output "$file_path" +./hosts/uploadhive.sh:270: else +./hosts/uploadhive.sh:271: tor_curl_request --insecure -L -G "$download_url" --continue-at - --output "$file_path" +./hosts/uploadhive.sh:272: fi +./hosts/uploadhive.sh:273: received_file_size=0 +./hosts/uploadhive.sh:274: if [ -f "$file_path" ] ; then +./hosts/uploadhive.sh:275: received_file_size=$(stat --format="%s" "$file_path" | tr -d '[:space:]') +./hosts/uploadhive.sh:276: fi +./hosts/uploadhive.sh:277: if CheckNoHtml "$remote_url" "$filename" "$file_path" ; then +./hosts/uploadhive.sh:278: containsHtml=false +./hosts/uploadhive.sh:279: else +./hosts/uploadhive.sh:280: containsHtml=true +./hosts/uploadhive.sh:281: fi +-- +./hosts/up_1fichier.sh:107: response=$(tor_curl_request --insecure -L -s "https://1fichier.com/") +./hosts/up_1fichier.sh:108: if [ "${DebugAllEnabled}" == "true" ] ; then +./hosts/up_1fichier.sh:109: debugHtml "${filepath##*/}" "${_hostCode}_up_getid_$i" "url: https://1fichier.com/"$'\n'"${response}" +./hosts/up_1fichier.sh:110: fi +./hosts/up_1fichier.sh:111: if [[ -z $response ]] ; then +./hosts/up_1fichier.sh:112: if [ $i == $maxfetchretries ] ; then +./hosts/up_1fichier.sh:113: if [ "${finalAttempt}" == "true" ] ; then +./hosts/up_1fichier.sh:114: printf "\\n" +./hosts/up_1fichier.sh:115: echo -e "${RED}| Upload failed. (GetId [1])${NC}" +./hosts/up_1fichier.sh:116: failedUpload "$pline" "${filepath}" "${_hostCode}" "Failed to upload file" "No Response (GetId [1])" +./hosts/up_1fichier.sh:117: exitUploadError=true +-- +./hosts/up_1fichier.sh:180: response=$(tor_curl_upload --insecure -L \ +./hosts/up_1fichier.sh:181: -F "file[]=@${arrFiles[@]}" \ +./hosts/up_1fichier.sh:182: -F "send_ssl=on" \ +./hosts/up_1fichier.sh:183: -F "domain=0" \ +./hosts/up_1fichier.sh:184: -F "mail=" \ +./hosts/up_1fichier.sh:185: -F "dpass=" \ +./hosts/up_1fichier.sh:186: -F "user=" \ +./hosts/up_1fichier.sh:187: -F "mails=" \ +./hosts/up_1fichier.sh:188: -F "message=" \ +./hosts/up_1fichier.sh:189: "${PostUrlHost}") +./hosts/up_1fichier.sh:190: if [ "${DebugAllEnabled}" == "true" ] ; then +-- +./hosts/up_acid.sh:94: response=$(tor_curl_upload --insecure -i \ +./hosts/up_acid.sh:95: -H "Content-Type: multipart/form-data" \ +./hosts/up_acid.sh:96: -F "time=month" \ +./hosts/up_acid.sh:97: -F "file=@${filepath}" \ +./hosts/up_acid.sh:98: "${PostUrlHost}") +./hosts/up_acid.sh:99: if [ "${DebugAllEnabled}" == "true" ] ; then +./hosts/up_acid.sh:100: debugHtml "${filepath##*/}" "${_hostCode}_upload" "post_url: ${PostUrlHost}"$'\n'"${response}" +./hosts/up_acid.sh:101: fi +./hosts/up_acid.sh:102: if grep -Eqi ' 200 ' <<< "${response}" ; then +./hosts/up_acid.sh:103: hash=$(echo "$response" | tail -2 | head -1) +./hosts/up_acid.sh:104: hash=${hash//[$'\t\r\n']} +-- +./hosts/up_anarchaserver.sh:102: response=$(tor_curl_upload --insecure -i \ +./hosts/up_anarchaserver.sh:103: -H "Content-Type: multipart/form-data" \ +./hosts/up_anarchaserver.sh:104: -F "time=month" \ +./hosts/up_anarchaserver.sh:105: -F "file=@${filepath}" \ +./hosts/up_anarchaserver.sh:106: "${PostUrlHost}") +./hosts/up_anarchaserver.sh:107: if [ "${DebugAllEnabled}" == "true" ] ; then +./hosts/up_anarchaserver.sh:108: debugHtml "${filepath##*/}" "${_hostCode}_upload" "post_url: ${PostUrlHost}"$'\n'"${response}" +./hosts/up_anarchaserver.sh:109: fi +./hosts/up_anarchaserver.sh:110: if grep -Eqi ' 200 ' <<< "${response}" ; then +./hosts/up_anarchaserver.sh:111: hash=$(echo "$response" | tail -2 | head -1) +./hosts/up_anarchaserver.sh:112: hash=${hash//[$'\t\r\n']} +-- +./hosts/up_anonsharing.sh:102: response=$(tor_curl_upload --insecure -i \ +./hosts/up_anonsharing.sh:103: -H "Content-Type: multipart/form-data" \ +./hosts/up_anonsharing.sh:104: -F "files[]=@${arrFiles[@]}" \ +./hosts/up_anonsharing.sh:105: "${PostUrlHost}") +./hosts/up_anonsharing.sh:106: if [ "${DebugAllEnabled}" == "true" ] ; then +./hosts/up_anonsharing.sh:107: debugHtml "${filepath##*/}" "${_hostCode}_upload" "post_url: ${PostUrlHost}"$'\n'"${response}" +./hosts/up_anonsharing.sh:108: fi +./hosts/up_anonsharing.sh:109: if grep -Eqi '"error":null,"url":"https:\\/\\/anonsharing.com\\/' <<< "${response}" ; then +./hosts/up_anonsharing.sh:110: fileid=$(grep -oPi '(?<=","file_id":").*?(?=".*$)' <<< "$response") +./hosts/up_anonsharing.sh:111: filesize=$(GetFileSize "$filepath" "false") +./hosts/up_anonsharing.sh:112: downloadLink="https://anonsharing.com/fileid=${fileid}" +-- +./hosts/up_axfc.sh:109: response=$(tor_curl_request --insecure -L -s -b "${axfc_cookie_jar}" -c "${axfc_cookie_jar}" "$fixed_url") +./hosts/up_axfc.sh:110: if [ "${DebugAllEnabled}" == "true" ] ; then +./hosts/up_axfc.sh:111: debugHtml "${remote_url##*/}" "axfc_fetch$i" "${response}" +./hosts/up_axfc.sh:112: fi +./hosts/up_axfc.sh:113: if [[ -z $response ]] ; then +./hosts/up_axfc.sh:114: rm -f "${axfc_cookie_jar}"; +./hosts/up_axfc.sh:115: if [ $i == $maxfetchretries ] ; then +./hosts/up_axfc.sh:116: printf "\\n" +./hosts/up_axfc.sh:117: echo -e "${RED}| Failed to start an upload [1]${NC}" +./hosts/up_axfc.sh:118: warnAndRetryUnknownError=true +./hosts/up_axfc.sh:119: if [ "${finalAttempt}" == "true" ] ; then +-- +./hosts/up_axfc.sh:136: response=$(tor_curl_upload --insecure -L -s -X POST \ +./hosts/up_axfc.sh:137: -H 'Referer: https://www.axfc.net/u/post_m.pl' \ +./hosts/up_axfc.sh:138: -H 'Content-Type: application/x-www-form-urlencoded' \ +./hosts/up_axfc.sh:139: -H 'Origin: https://www.axfc.net' \ +./hosts/up_axfc.sh:140: -H 'Connection: keep-alive' \ +./hosts/up_axfc.sh:141: -b "${axfc_cookie_jar}" -c "${axfc_cookie_jar}" \ +./hosts/up_axfc.sh:142: --data-raw "method=upload&ext=ext&filename=1&comment=&address=&delpass=$randelkey&keyword=1234&count=&term=0&term_y=2024&term_mon=10&term_d=1&term_h=15&term_min=0&term_s=0&term_ps=&term_mp=3600" \ +./hosts/up_axfc.sh:143: -w "\n\nHTTP_CODE: %{http_code}\nEFFECTIVE_URL: %{url_effective}\n" \ +./hosts/up_axfc.sh:144: "$fixed_url") +./hosts/up_axfc.sh:145: if [ "${DebugAllEnabled}" == "true" ] ; then +./hosts/up_axfc.sh:146: debugHtml "${remote_url##*/}" "axfc_ticket$i" "${response}" +-- +./hosts/up_axfc.sh:184: response=$(tor_curl_upload --insecure -L -i -X POST \ +./hosts/up_axfc.sh:185: -H "Content-Type: multipart/form-data" \ +./hosts/up_axfc.sh:186: -H 'Connection: keep-alive' \ +./hosts/up_axfc.sh:187: -F "filedata=@$filepath" \ +./hosts/up_axfc.sh:188: -b "${axfc_cookie_jar}" -c "${axfc_cookie_jar}" \ +./hosts/up_axfc.sh:189: "$PostUrlHost") +./hosts/up_axfc.sh:190: if [ "${DebugAllEnabled}" == "true" ] ; then +./hosts/up_axfc.sh:191: debugHtml "${filepath##*/}" "${_hostCode}_upload" "post_url: ${PostUrlHost}"$'\n'"${response}" +./hosts/up_axfc.sh:192: fi +./hosts/up_axfc.sh:193: if grep -Eqi 'Axfc Uploader -投稿完了.*キーワード付きURL:.*a href="https://www.axfc.net.*(QueryString無しVer)' <<< "${response}" ; then +./hosts/up_axfc.sh:194: subSearch=$(awk '/Axfc Uploader -投稿完了/,/(QueryString無しVer)/' <<< "$response") +-- +./hosts/up_bedrive.sh:102: response=$(tor_curl_upload --insecure -i \ +./hosts/up_bedrive.sh:103: -H "Content-Type: multipart/form-data" \ +./hosts/up_bedrive.sh:104: -F "time=month" \ +./hosts/up_bedrive.sh:105: -F "files[]=@${arrFiles[@]}" \ +./hosts/up_bedrive.sh:106: "${PostUrlHost}") +./hosts/up_bedrive.sh:107: if [ "${DebugAllEnabled}" == "true" ] ; then +./hosts/up_bedrive.sh:108: debugHtml "${filepath##*/}" "${_hostCode}_upload" "post_url: ${PostUrlHost}"$'\n'"${response}" +./hosts/up_bedrive.sh:109: fi +./hosts/up_bedrive.sh:110: if grep -Eqi '"error":null,"url":"https:\\/\\/bedrive.ru\\/' <<< "${response}" ; then +./hosts/up_bedrive.sh:111: hash=$(grep -oPi '(?<="url":"https:\\/\\/bedrive.ru\\/).*?(?=".*$)' <<< "$response") +./hosts/up_bedrive.sh:112: filesize=$(GetFileSize "$filepath" "false") +-- +./hosts/up_bowfile.sh:108: response=$(tor_curl_request --insecure -L -i \ +./hosts/up_bowfile.sh:109: -H "Content-Type: multipart/form-data" \ +./hosts/up_bowfile.sh:110: -F "files[]=@$filepath" \ +./hosts/up_bowfile.sh:111: "${PostUrlHost}") +./hosts/up_bowfile.sh:112: if [ "${DebugAllEnabled}" == "true" ] ; then +./hosts/up_bowfile.sh:113: debugHtml "${filepath##*/}" "${_hostCode}_upload" "post_url: ${PostUrlHost}"$'\n'"${response}" +./hosts/up_bowfile.sh:114: fi +./hosts/up_bowfile.sh:115: if grep -Eqi '"error":null,"url":"https:\\/\\/bowfile.com\\/' <<< "${response}" ; then +./hosts/up_bowfile.sh:116: url=$(grep -oPi '(?<="url":"https:\\/\\/bowfile.com\\/).*?(?=")' <<< "$response") +./hosts/up_bowfile.sh:117: hash=$(grep -oPi '(?<="short_url":").*?(?=")' <<< "$response") +./hosts/up_bowfile.sh:118: filesize=$(GetFileSize "$filepath" "false") +-- +./hosts/up_dailyuploads.sh:109: response=$(tor_curl_upload --insecure -i \ +./hosts/up_dailyuploads.sh:110: -H "Content-Type: multipart/form-data" \ +./hosts/up_dailyuploads.sh:111: -F "sess_id=" \ +./hosts/up_dailyuploads.sh:112: -F "utype=anon" \ +./hosts/up_dailyuploads.sh:113: -F "link_rcpt=" \ +./hosts/up_dailyuploads.sh:114: -F "link_pass=" \ +./hosts/up_dailyuploads.sh:115: -F "to_folder=" \ +./hosts/up_dailyuploads.sh:116: -F "file_descr=" \ +./hosts/up_dailyuploads.sh:117: -F "file_public=1" \ +./hosts/up_dailyuploads.sh:118: -F "file_0=@$filepath" \ +./hosts/up_dailyuploads.sh:119: "${PostUrlHost}") +-- +./hosts/up_dataupload.sh:102: response=$(tor_curl_upload --insecure -i \ +./hosts/up_dataupload.sh:103: -H "Content-Type: multipart/form-data" \ +./hosts/up_dataupload.sh:104: -F "sess_id=" \ +./hosts/up_dataupload.sh:105: -F "file_descr=" \ +./hosts/up_dataupload.sh:106: -F "file_public=" \ +./hosts/up_dataupload.sh:107: -F "link_rcpt=" \ +./hosts/up_dataupload.sh:108: -F "link_pass=" \ +./hosts/up_dataupload.sh:109: -F "to_folder=" \ +./hosts/up_dataupload.sh:110: -F "keepalive=1" \ +./hosts/up_dataupload.sh:111: -F "file_0=@${filepath}" \ +./hosts/up_dataupload.sh:112: "${PostUrlHost}") +-- +./hosts/up_dbree.sh:102: response=$(tor_curl_upload --insecure -i \ +./hosts/up_dbree.sh:103: -H "Content-Type: multipart/form-data" \ +./hosts/up_dbree.sh:104: -F "file[]=@${arrFiles[@]}" \ +./hosts/up_dbree.sh:105: -F "upload=Upload" \ +./hosts/up_dbree.sh:106: "${PostUrlHost}") +./hosts/up_dbree.sh:107: if [ "${DebugAllEnabled}" == "true" ] ; then +./hosts/up_dbree.sh:108: debugHtml "${filepath##*/}" "${_hostCode}_upload" "post_url: ${PostUrlHost}"$'\n'"${response}" +./hosts/up_dbree.sh:109: fi +./hosts/up_dbree.sh:110: if grep -Eqi 'URL: = MaxDownloadRetries)) ; then +./mad.sh:1407: echo -e "${RED}| FAILED: Size mismatch after downloading${NC}" +./mad.sh:1408: exit 1 +-- +./mad.sh:1451: response=$(tor_curl_request --insecure -L -s https://github.com/lexiforest/curl-impersonate/releases/latest) +./mad.sh:1452: if [ "${DebugAllEnabled}" == "true" ] ; then +./mad.sh:1453: debugHtml "github" "lbf_inst_curlimp$j" "$response" +./mad.sh:1454: fi +./mad.sh:1455: if [ ! -z "$response" ]; then +./mad.sh:1456: latestTag=$(grep -oPi -m 1 '(?<=/curl-impersonate/releases/tag/).*?(?=")' <<< "$response") +./mad.sh:1457: latestBinaryDate=$(grep -oPi -m 1 '(?<== MaxDownloadRetries)) ; then +./mad.sh:1530: echo -e "${RED}| FAILED: Size mismatch after downloading${NC}" +./mad.sh:1531: exit 1 +-- +./mad.sh:1726: maud_torcurl=$(grep -n -vxE '[[:blank:]]*([#].*)?' $fil | grep -A 12 --color='always' -Ei 'tor_curl') +./mad.sh:1727: echo -e "Files:" +./mad.sh:1728: echo -e "${BLUE}${fil}${NC}" +./mad.sh:1729: echo -e "" +./mad.sh:1730: echo -e "" +./mad.sh:1731: echo -e "${PINK}MAD Audit of http lines:${NC} (${GREEN}grep \"http:\" or \"https:\"${NC})" +./mad.sh:1732: echo -e "_________________________________________________________________________" +./mad.sh:1733: echo -e "$maud_http" +./mad.sh:1734: echo -e "" +./mad.sh:1735: echo -e "${PINK}MAD Audit of curl:${NC} (${GREEN}grep \"curl\"${NC})" +./mad.sh:1736: echo -e "_________________________________________________________________________" +-- +./mad.sh:1739: echo -e "${PINK}MAD Audit of tor_curl (+12 lines after):${NC} (${GREEN}grep \"tor_curl\"${NC})" +./mad.sh:1740: echo -e "_________________________________________________________________________" +./mad.sh:1741: echo -e "$maud_torcurl" +./mad.sh:1742: echo -e "" +./mad.sh:1743: echo -e "" +./mad.sh:1744: done +./mad.sh:1745: else +./mad.sh:1746: cd "$ScriptDir" +./mad.sh:1747: readarray -d $'' arrFiles < <(find . -name "*.sh" -printf '%p\n' | sort -Vk1) +./mad.sh:1748: cd "$WorkDir" +./mad.sh:1749: readarray -d $'' arrFiles2 < <(find . -name "*.sh" -printf '%p\n' | sort -Vk1) +-- +./mad.sh:1754: maud_torcurl=$(grep -n -vxE '[[:blank:]]*([#].*)?' $fil | grep -A 12 --color='always' -Ei 'tor_curl') +./mad.sh:1755: echo -e "Files:" +./mad.sh:1756: echo -e "${BLUE}${fil}${NC}" +./mad.sh:1757: echo -e "" +./mad.sh:1758: echo -e "" +./mad.sh:1759: echo -e "${PINK}MAD Audit of http lines:${NC} (${GREEN}grep \"http:\" or \"https:\"${NC})" +./mad.sh:1760: echo -e "_________________________________________________________________________" +./mad.sh:1761: echo -e "$maud_http" +./mad.sh:1762: echo -e "" +./mad.sh:1763: echo -e "${PINK}MAD Audit of curl:${NC} (${GREEN}grep \"curl \"${NC})" +./mad.sh:1764: echo -e "_________________________________________________________________________" +-- +./mad.sh:1767: echo -e "${PINK}MAD Audit of tor_curl (+12 lines after):${NC} (${GREEN}grep \"tor_curl\"${NC})" +./mad.sh:1768: echo -e "_________________________________________________________________________" +./mad.sh:1769: echo -e "$maud_torcurl" +./mad.sh:1770: echo -e "" +./mad.sh:1771: done +./mad.sh:1772: for fil in "${arrFiles2[@]}"; +./mad.sh:1773: do +./mad.sh:1774: maud_http=$(grep -n -vxE '[[:blank:]]*([#].*)?' $fil | grep --color='always' -Ei '(http|https):') +./mad.sh:1775: maud_curl=$(grep -n -vxE '[[:blank:]]*([#].*)?' $fil | grep --color='always' -Ei 'curl') +./mad.sh:1776: maud_torcurl=$(grep -n -vxE '[[:blank:]]*([#].*)?' $fil | grep -A 12 --color='always' -Ei 'tor_curl') +./mad.sh:1777: echo -e "Files:" +./mad.sh:1778: echo -e "${BLUE}${fil}${NC}" +./mad.sh:1779: echo -e "" +./mad.sh:1780: echo -e "" +./mad.sh:1781: echo -e "${PINK}MAD Audit of http lines:${NC} (${GREEN}grep \"http:\" or \"https:\"${NC})" +./mad.sh:1782: echo -e "_________________________________________________________________________" +./mad.sh:1783: echo -e "$maud_http" +./mad.sh:1784: echo -e "" +./mad.sh:1785: echo -e "${PINK}MAD Audit of curl:${NC} (${GREEN}grep \"curl\"${NC})" +./mad.sh:1786: echo -e "_________________________________________________________________________" +-- +./mad.sh:1789: echo -e "${PINK}MAD Audit of tor_curl (+12 lines after):${NC} (${GREEN}grep \"tor_curl\"${NC})" +./mad.sh:1790: echo -e "_________________________________________________________________________" +./mad.sh:1791: echo -e "$maud_torcurl" +./mad.sh:1792: echo -e "" +./mad.sh:1793: done +./mad.sh:1794: fi +./mad.sh:1795:} +./mad.sh:1796:madStatus() { +./mad.sh:1797: local InputFile="$1" +./mad.sh:1798: if [ "$arg1" == "status" ] ; then +./mad.sh:1799: clear +-- +./mad.sh:3102: file_header=$(tor_curl_request --insecure -m 18 -s -D - -o /dev/null \ +./mad.sh:3103: -H "Connection: keep-alive" \ +./mad.sh:3104: -w 'EffectiveUrl=%{url_effective}' \ +./mad.sh:3105: "$download_url") +./mad.sh:3106: else +./mad.sh:3107: printf "| Retrieving Head: attempt #$j" +./mad.sh:3108: rm -f "${WorkDir}/.temp/directhead" +./mad.sh:3109: file_header=$(tor_curl_request --insecure --head -H "Connection: keep-alive" -L -s -i "$download_url" | +./mad.sh:3110: tee "${WorkDir}/.temp/directhead" & +./mad.sh:3111: sleep 6 +./mad.sh:3112: [ -s "${WorkDir}/.temp/directhead" ] +./mad.sh:3113: kill $! 2>/dev/null +./mad.sh:3114: ) +./mad.sh:3115: if [ ! -f "${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}" ]; then +./mad.sh:3116: touch "${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}" +./mad.sh:3117: fi +./mad.sh:3118: rm -f "${WorkDir}/.temp/directhead" +./mad.sh:3119: fi +-- +./mad.sh:3237: tor_curl_request --insecure -L --referer "$file_url" "$download_url" --output "$file_path" +./mad.sh:3238: rc=$? +./mad.sh:3239: if [ $rc -ne 0 ] ; then +./mad.sh:3240: printf "${RED}Download Failed (bad exit status).${NC}" +./mad.sh:3241: if [ -f ${file_path} ]; then +./mad.sh:3242: printf "${YELLOW} Partial removed...${NC}" +./mad.sh:3243: printf "\n\n" +./mad.sh:3244: rm -f "${file_path}" +./mad.sh:3245: else +./mad.sh:3246: printf "\n\n" +./mad.sh:3247: fi +-- +./mad.sh:3281: tor_curl_request --insecure -L --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval "$download_url" --continue-at - --output "$file_path" +./mad.sh:3282: else +./mad.sh:3283: tor_curl_request --insecure -L --referer "$file_url" "$download_url" --continue-at - --output "$file_path" +./mad.sh:3284: fi +./mad.sh:3285: received_file_size=0 +./mad.sh:3286: if [ -f "$file_path" ] ; then +./mad.sh:3287: received_file_size=$(stat --format="%s" "$file_path" | tr -d '[:space:]') +./mad.sh:3288: fi +./mad.sh:3289: if CheckNoHtml "$remote_url" "$filename" "$file_path" ; then +./mad.sh:3290: containsHtml=false +./mad.sh:3291: else +./mad.sh:3292: containsHtml=true +./mad.sh:3293: fi + diff --git a/_audit.sh b/_audit.sh new file mode 100755 index 0000000..b5f3bd7 --- /dev/null +++ b/_audit.sh @@ -0,0 +1,45 @@ +#!/bin/bash + +backupIFS=$IFS +IFS=$(echo -en "\n\b") +ScriptDir="$( cd "$( dirname "$(realpath "$0")" )" && pwd )" +mkdir -p "$ScriptDir/.audit" +madAuditHttpLog="$ScriptDir/.audit/mad-audit-http.log" +madAuditCurlLog="$ScriptDir/.audit/mad-audit-curl.log" +madAuditTorCurlLog="$ScriptDir/.audit/mad-audit-tor_curl-details.log" +echo "DateTime: `date +%y.%m.%d`" | tee "$madAuditHttpLog" "$madAuditCurlLog" "$madAuditTorCurlLog" > /dev/null +echo "" | tee -a "$madAuditHttpLog" "$madAuditCurlLog" "$madAuditTorCurlLog" > /dev/null + +# Get array of all .sh files +cd "$ScriptDir" +readarray -d $'' arrFiles < <(find . -name "*.sh" ! -name "_audit.sh" -printf '%p\n' | sort -Vk1) + +# Process each file +for fil in "${arrFiles[@]}"; +do + # Find all "http", "curl ", and "tor_curl" refs + maud_http=$(grep -n -vxE '[[:blank:]]*([#].*)?' $fil | grep -Ei '(http|https):') + maud_curl=$(grep -n -vxE '[[:blank:]]*([#].*)?' $fil | grep -Ei 'curl') + maud_torcurl=$(grep -n -vxE '[[:blank:]]*([#].*)?' $fil | grep -A 10 -Ei 'tor_curl') + + echo "Files:" | tee -a "$madAuditHttpLog" "$madAuditCurlLog" "$madAuditTorCurlLog" > /dev/null + echo "${fil}" | tee -a "$madAuditHttpLog" "$madAuditCurlLog" "$madAuditTorCurlLog" > /dev/null + echo "" | tee -a "$madAuditHttpLog" "$madAuditCurlLog" "$madAuditTorCurlLog" > /dev/null + + echo "MAD Audit of http lines: (grep \"http:\" or \"https:\")" >> "$madAuditHttpLog" + echo "_________________________________________________________________________" >> "$madAuditHttpLog" + echo "$maud_http" >> "$madAuditHttpLog" + echo "" >> "$madAuditHttpLog" + + echo "MAD Audit of curl: (grep \"curl\")" >> "$madAuditCurlLog" + echo "_________________________________________________________________________" >> "$madAuditCurlLog" + echo "$maud_curl" >> "$madAuditCurlLog" + echo "" >> "$madAuditCurlLog" + + echo "MAD Audit of tor_curl (+10 lines after): (grep \"tor_curl\")" >> "$madAuditTorCurlLog" + echo "_________________________________________________________________________" >> "$madAuditTorCurlLog" + echo "$maud_torcurl" >> "$madAuditTorCurlLog" + echo "" >> "$madAuditTorCurlLog" +done + +IFS=$backupIFS \ No newline at end of file diff --git a/documentation/!CMD arguments.txt b/documentation/!CMD arguments.txt new file mode 100755 index 0000000..bc61e44 --- /dev/null +++ b/documentation/!CMD arguments.txt @@ -0,0 +1,54 @@ +------ Basic Usage (Downloads) ----------------------- + +# Process urls and download files from urls.txt +./mad.sh urls.txt + +# Process specific host urls in urls.txt +./mad.sh urls.txt + (ie ./mad.sh kraken urls.txt, ./mad.sh hex urls.txt) + +# Launch 4 terminals to process specific host urls in urls.txt (fallback to allhosts) +./mad.sh multi auto 4 urls.txt + +# Launch terminals to process specific host urls in urls.txt (fallback to allhosts) +./mad.sh multi auto urls.txt + +# Show the status of urls in urls.txt +./mad.sh status urls.txt + +# Reset any #RETRY# lines in urls.txt +./mad.sh reset urls.txt + + +------ Basic Usage (Uploads) ------------------------- + +# Display MAD UI Uploader (process files in ./uploads/ folder to selected hosts) +./mad.sh upload + +# Use MAD file processing (batch like the downloader) +./mad.sh upload uploads.txt + +# Show the status of urls in urls.txt +./mad.sh upload status uploads.txt + +# Reset any #RETRY# lines in uploads.txt +./mad.sh upload reset uploads.txt + + +------ Informational Display ----------------------- + +# Display Host Modules and their internal description +./mad.sh hosts + +# Diplay Plugins and their internal description +./mad.sh plugins + + +------ Other Arguments ------------------------------ + +Install curl_impersonate: Downloads the latest binary for curl_impersonate from github repo +.mad.sh install_curl_impersonate + +MAD Clipboard Monitor: Monitor clipboard for supported urls and add them to file (requires xclip -- apt install xclip) +./mad.sh clipmon urls.txt + diff --git a/documentation/!Changelog (Historical).txt b/documentation/!Changelog (Historical).txt new file mode 100755 index 0000000..9b0253c --- /dev/null +++ b/documentation/!Changelog (Historical).txt @@ -0,0 +1,480 @@ +# Additions by kittykat +# Tail format (newest to oldest) + +# +# ---------- Initial release with MAD Uploader functionality ---------- +# 2024.09.30 - [up_firestorage] Add firestorage.jp as upload host +# 2024.09.29 - [free4e/up_free4e] Add free4e.com as download and upload host +# 2024.09.29 - [harrault/up_harrault] Add harrault.fr as download and upload host +# 2024.09.29 - [acid/up_acid] Add acid.fr as download and upload host +# 2024.09.29 - [mad] Fix duplicate rename with literal chars in url +# 2024.09.28 - [dataupload/up_dataupload] Add dataupload.net as download and upload host +# 2024.09.27 - [netlib/up_netlib] Add mhep.netlib.re as download and upload host +# 2024.09.27 - [filesquid/up_filesquid] Add filesquid.net as download and upload host +# 2024.09.27 - [soyjak/up_soyjak] Add soyjak.download as download and upload host +# 2024.09.27 - [linxx/up_linxx] Add linxx.net as download and upload host +# 2024.09.27 - [nantes/up_nantes] Add nantes.cloud as download and upload host +# 2024.09.27 - [depotkaz/up_depotkaz] Add depot.kaz.bzh as download and upload host +# 2024.09.27 - [anarchaserver/up_anarchaserver] Add transitional.anarchaserver.org as download and upload host +# 2024.09.26 - [AutoResetAndRetryDownloads] Add autoloop handling to Doze&Retry / Ticket Expiry +# 2024.09.26 - [bowfile] Add handling of 'File has been removed due to inactivity' +# 2024.09.26 - [dailyuploads] Fix parsing blank referer +# 2024.09.26 - [dosya] Improve cookie cleanup +# 2024.09.26 - [1fichier] Improve cookie cleanup for exit node process +# 2024.09.26 - [mad] Fix direct= onion addresses (revert back to http) +# 2024.09.26 - [mad] Add additional direct= filename cleaning +# 2024.09.26 - [SkipUrlsInDownloadsCompletedTxt] Fix detection of already completed "direct=" urls +# 2024.09.25 - [bowfile] Add bowfile as download host (finally) +# 2024.09.25 - [mad + hosts] Do not remove file lock on Skip if another term is actively downloading the file +# 2024.09.25 - [click] Add clickndownload.name and clicknupload.name domains +# 2024.09.25 - [mad] Add global $UrlsVars that can be accessed from any function / plugin (code beautfar) +# - Any #key=value line added to urls.txt is parsed into this variable and their current value +# is accessible as ${UrlsVars["$key"]} -- ie. ${UrlsVars[pw]} +# 2024.09.25 - [mad] Fix ScriptDir ref when mad.sh is ran as a soft link (code beautfar) +# 2024.09.25 - [mad] Fix passing return code from hooked functions (thanks beautfar) +# 2024.09.25 - [uflix] Add server general error response handling +# 2024.09.25 - [ocr_captcha] Fix temp filename issue from decluttering / renaming +# 2024.09.24 - Update help, and documentation +# 2024.09.24 - Decluttered MAD folder structure and naming: +# (* READ the document on migration in the ./documentation folder *) +# (* REVIEW FolderStructure Pictures in documentation as well *) +# 2024.09.24 - [*all plugins / all hosts*] Updates to use the new decluttered folder structure and names +# 2024.09.24 - [SkipUrlsInDownloadsCompletedTxt] Add #REMOVED# to the Skip Url check +# 2024.09.24 - [up_gofile] Attempt to retrieve best upload server prior to file send +# 2024.09.23 - [mad] Add MAD Upload Reset (to reset #RETRY# lines in uploads.txt) +# * ./mad.sh upload reset uploads.txt +# 2024.09.23 - [kraken] Add cleanup of extra chars added to token +# 2024.09.23 - [filedot] Url encode user / pass in request +# 2024.09.23 - [mad] Complete MAD Upload Status (for uploads.txt) +# * ./mad.sh upload status uploads.txt +# 2024.09.23 - [mad] Fix trimming #pw= lines with special characters (beautfar) +# 2024.09.22 - [mad] Add extended upload argument (filepath) to process uploads in uploads.txt +# * ./mad.sh upload uploads.txt +# * This will process any line not starting with '#' and containing a '|' +# # Required format: +# * filename|HostCode (defaults in the ./uploads folder) +# * filepath|HostCode (uses file path passed in) +# * ie. +# MyArchive01.7z|oshi +# MyArchive01.7z|1f +# MyArchive01.7z|bow +# ! This functionality is quite new and likely I will find something I need to fix. Please +# report anything you encounter. +# 2024.09.22 - [*all upload hosts*] Updates to handle uploads.txt file processing +# 2024.09.22 - [mad] Add one more hookable function: PostFailRetryUpload() +# This is unused currrently, but will be implemented in file processing in a future update +# 2024.09.22 - [mad] Modify plugin loading system: allow multiple plugins to hook the same "hookable" functions +# * Same hookable functions: +# OnLoad(), BeginProcessing(), PreProcessUrl(), PostSuccessfulDownload(), PostFailedDownload(), +# PostFailRetryDownload(), DoneProcessingAllUrls(), PostSuccessfulUpload(), PostFailedUpload() +# Summary of changes: +# * To hook a function, it must be named "HookName_" and be unique. Best practice is to use filename +# ie. OnLoad_MyFilename() +# * NOTE: To upgrade any current plugins you wrote to function this way, just add "_" +# to the hooked function name in your plugin. +# * (Review ExampleMainHooks for more details) +# 2024.09.22 - [*all plugins*] Modified function names to use the new v2 hook mechanism, allowing for multiple +# hooks of the same function. +# 2024.09.21 - [mad] Sanitize printf in success/fail/retry/etc messaging [code: beautfar] +# 2024.09.21 - [mad] Add '#ref=' keyword to store links (like folder=) to "$CurrentRef" [code: beautfar] +# 2024.09.21 - [dbree] - Add dbree.me download host (zipcluster) +# 2024.09.21 - [up_dbree] - Add dbree.me upload host (zipcluster) +# 2024.09.21 - [nofile] - Add nofile.org download host (zipcluster) +# 2024.09.21 - [up_nofile] - Add nofile.org upload host (zipcluster) +# 2024.09.21 - [shareonline] - Add shareonline download host (zipcluster) +# 2024.09.21 - [up_shareonline] - Add shareonline upload host (zipcluster) +# 2024.09.21 - [up_yolobit] - Add yolobit upload host (zipcluster) +# 2024.09.20 - [yolobit] Add new host domain -- download files from yolobit.com (zipcluster) +# 2024.09.20 - [mad] Changed default UploadSpeedMin to 100 for uploads with RateMonitor (still catch stale uploads) +# 2024.09.20 - [lainsafe_onion] - Add lainsafe.kallist4mcluuxbjnr5p2asdlmdhaos3pcrvhk3fbzmiiiftwg6zncid.onion +# 2024.09.20 - [SkipOkUrlsInResultsTxt, SkipUrlsInCompletedTxt] - Add line verification prior to check +# 2024.09.20 - [nippy] Handle 302 response on download from some servers +# 2024.09.19 - [ocr_captcha] Create new plugin to perform OCR on images (primarily for dailyuploads) +# new image captcha system -- (WIP, accuracy maybe 25-35%, but it is all local) +# * Add "LoadPlugins=ocr_captcha.sh" to use +# * Dependencies: tesseract-ocr & imagemagick +# * (sudo apt-get install tesseract-ocr, sudo apt-get install imagemagick) +# 2024.09.19 - [dailyuploads] Fix dailyuploads captcha process -- was changed to an image captcha. +# (image captcha requires ocr_captcha plugin. not perfect -- maybe 25%-35% accuracy) +# 2024.09.18 - [mad] Add '[', ']' to literalize_string func +# 2024.09.18 - [up_uploadflix] Updated the response parsing.. working now +# 2024.09.17 - [dosya] Fix potential issue getting dosya filename +# 2024.09.17 - [mad] Fix LoopThroughFileUntilComplete=false not processing initially +# 2024.09.16 - Lots of pre-release updates & cleanup +# 2024.09.16 - Add bowfile as upload host +# 2024.09.16 - Add 3 new upload hosts -- dailyuploads, filehaus (down atm), uploadflix (down atm) +# 2024.09.16 - Add nippy upload (zipcluster: random, nippydrive, nippyshare, nippybox, nippyspace, nippyfile) +# 2024.09.16 - Add 3 new upload hosts -- hexload, gofile, dosya upload host +# 2024.09.16 - Add debug message to plugins to help locate issues if plugin has any errors +# 2024.09.16 - Add detection of failed uploads to hosts so other terminals will not attempt to upload +# 2024.09.16 - Add flock upload ticket detection and notification +# 2024.09.16 - Create 3 initial working upload hosts (1F, oshi, kraken). Also an example upload host. +# 2024.09.16 - Categorized the Options in the script and config into sections (minimized the config) +# 2024.09.16 - Added and Uploads section with 2 options in script and config +# - MaxUploadRetries (default=3) max tries to upload a file to each host +# - DefaultUploadHosts (default=1f,kraken,oshi) +# * This allows fast selection / entry of hosts at the prompt by typing 'd' +# 2024.09.15 - Add 3 new arguments / functionality to mad +# 1. ./mad.sh hosts -- displays all host modules and some internal details: +# hostcode, nick, prefix, functions, and upload specific info, etc.) +# 2. ./mad.sh plugins -- displays all plugins and internal details: +# (hostcode, nick, prefix, functions, entrypoint) +# 3. ./mad.sh upload -- This begins the batch upload processing: +# * Batch uploads will pickup any supported filetypes in the ./uploads folder +# (.7z, .rar, .001 - .009) +# * Once an upload is successfully uploaded, the download link and info is displayed, +# and a ticket is created in the ./uploads folder with all the details as well. +# * On completion, or already uploaded, or fail/retry, or failure, all information is +# logged to the ./results_upload.txt file in shortform, and detailed information is +# written to the ./uploads/uploads_processed.txt file. +# * The ./uploads/uploads_processed.txt file is used to ensure files are not uploaded +# more than once to each selected host. To re-up, the file can be edited to remove lines, +# or simply deleted. It's main purpose is to function until all files are uploaded in +# that batch, and then the folder cleaned for the next round. +# --- @ Uploading has several safety measures in place: +# 1. Supported file extension checking +# 2. A 2-step batch begin process: (require user to type 'y' to proceed selecting hosts, +# and then also require the user to type in the hostcodes to upload to). +# 3. Prior to the prompts, all files to be uploaded are displayed on the screen with details +# 4. Prior to hostcode input, all availabe upload hostcodes and hostnicks are displayed. +# 5. All the other MAD features inherent in initialization +# ** That said, be certain you take your own safety measures uploading: +# - Remove metadata from images, password protect your archives, etc. +# 2024.09.15 - Updates to the SkipUrlsInCompletedTxt.sh plugin to be more robust +# 2024.09.15 - Build out upload hosts templates "./hosts/up_.sh" +# ('up_' prefix is reserved for upload host modules) +# 2024.09.15 - Add MAD Upload functionality +# 2024.09.15 - Add MAD Host Details (run ./mad.sh hostdetails) +# Displays host information queried from all available host modules (./hosts/) +# 2024.09.15 - Add MAD Plugin Details (run ./mad.sh plugindetails) +# Displays available plugins (./plugins/ and their hooked functions +# +# ---------- Initial release with MAD Hosts functionality ---------- +# 2024.09.14 - Few small plugin updates (only functionality change is in SkipUrlsInCompletedTxt: +# include matching line number in output +# 2024.09.14 - Port clipmon functionality to use dynamic hosts +# 2024.09.13 - Change running in UrlOnly mode (passing in a url to process), to allow a second argument +# for the filename override (ie. ./mad.sh http://oshi.at/abcd "my filename.7z") +# 2024.09.13 - Port arguments to process a specific host urls to use dynamic hosts +# 2024.09.13 - Port .mad.sh help to use dynamic host data +# 2024.09.13 - Lots of testing.. lots of changes.. (all is passing so far.. though expect updates) +# 2024.09.13 - Add VerboseLoading option (display all hosts / plugin loads, or only FAIL statuses) +# 2024.09.13 - Added verification to hosts.sh file loading (check format, ensure unique HostFuncPrefix) +# 2024.09.13 - Created an example host with some descriptive help +# 2024.09.13 - Moved hosts functions into individual loadable host files (additional hosts can be added +# (additional supported hosts can be added with the example template "./hosts/Examples/ExampleNewHost.host" +# 2024.09.13 - Created a host folder and LoadHosts() function to load *.host files into mad.sh +# 2024.09.13 - Initial port of all host data (HostCode, HostNick, HostDomainRegex) into a modular string +# 2024.09.13 - [Major Update]: Host processing and code (modularized, moved into loadable hosts) +# - Created ListHostAndDomainRegexes object to allow modularization: +# - Allow loading hosts (and creating additional hosts) similarly to plugins +# - Merge ~4000 lines of host url checks and processing to make script more maintainable +# 2024.09.13 - Add detection of duplicate hook usage (functions) during plugin load and disallow +# 2024.09.12 - Created a few working plugins and one example plugin with helpful information for builders +# ** Plugins have passed args available, as well as all mad.sh vars and functions available ** +# - AutoResetAndRetryDownloads: Runs mad.sh reset after processing all urls and then relaunches MAD +# - CatnapCtrlC: Keeps mad.sh running until Ctrl-C, waiting for urls.txt updates +# - ExamplesMainHooks: Examples of the 7 main hooks +# - SkipOkUrlsInResultsTxt: Skips urls that already exist in results.txt with an #OK# flag +# - SkipUrlsInCompletedTxt: Better version of SkipOkUrlsInResultsTxt (uses new completed.txt) +# - UnzipAfterCompleted: (WIP) Unzips archives marked #OK# in urls.txt immediately after they are successfully +# downloaded and marked #OK# [this is not working yet] +# 2024.09.11 - Added completed.txt logging with more detailed info. +# (helpful for plugins such as unzipping and skip already downloaded urls as it contains filepath / date) +# 2024.09.11 - Worked with beautfar to build out ability to skip downloads already successfully downloaded +# in the results.txt (via SkipOkUrlsInResultsTxt.sh plugin). +# 2024.09.11 - Designed plugins framework in code: plugins folder, loading plugins, 5 main hooks (see readme) +# * The plugin system was designed to allow intermediate coders to implement workflows to fit their needs +# +# 2024.09.10 - Updates to nippy host processing (multi-domain, retries on unavailable response) +# 2024.09.10 - Add additional nippy hosts (nippybox.com, nippyfile.com, nippyspace.com) +# 2024.09.09 - Add retries to hexload head (ran into issue were cdn was not resolvable--likely gone) +# 2024.09.08 - Sanitize all vars written to urls.txt (prevent failures leaving a flock) +# 2024.09.07 - Add additional uflix responses +# 2024.09.06 - Add wait time response to hex and handling +# 2024.09.06 - Sanitize logging for unknown (html) errors with hexload +# 2024.09.05 - Update MinimumAllowedFilesize check for all hosts (1KB default) +# 2024.09.03 - Add new host up2sha.re +# 2024.09.03 - Replace strings dependency for bad html detection (code by beautfar) +# 2024.09.02 - Add nippyshare.me +# 2024.09.02 - Add handling of "download is temporarily unavailable" response from nippy +# 2024.09.01 - Fix MadStatus line # +# 2024.08.30 - Speed up MadStatus check / report +# 2024.08.30 - Add WorkDirOverride option to allow the working directory to be somewhere other than ScriptDir +# 2024.08.30 - Complete overhaul of ScriptDir / WorkDir to allow specifying locations +# 2024.08.30 - Converted hundreds unary operations to be more robust +# 2024.08.30 - Merge redundant shared code for maintainability and to reduce script size (~3000 lines) +# 2024.08.30 - Moved random functions out from the script configurables +# ** If you are using mad.config, it will need to be updated (grab the new one and update or merge) +# 2024.08.29 - Add handling 522 response for kraken +# 2024.08.29 - Add fdot download-limit reached response detection and removing user for further sessions +# 2024.08.29 - Add additional status [FAIL] to allow for unavailable / no retry links +# 2024.08.29 - Add fdot response handling for premium users only files +# 2024.08.28 - Add the ability to pass in a URL to simply process it instead of urls.txt +# * ./mad.sh http://oshi.at/ZZZZ +# * ./mad.sh http://oshi.at/ZZZZ\|MyFileName.7z (override filename -- don't forget the cli escape '\|' ) +# 2024.08.28 - Stringify all the rm commands for best practice (flocks, etc.) +# 2024.08.27 - Update which for curl_impersonate to look in ScriptDir +# 2024.08.26 - Updates to dailyuploads.net response parsing +# 2024.08.25 - Add option to specify terms to auto start in "multi auto" +# ./mad.sh multi auto # urls.txt +# 2024.08.24 - Add new host -- dailyuploads.net +# 2024.08.23 - Fix specific host processing completion (switch back to processing allhosts) +# 2024.08.23 - Update LaunchTerminal / ReloadScript args processing +# 2024.08.23 - clipmon: If specified urls.txt file does not exist, create it +# 2024.08.22 - Update curl_impersonate forks (cleanup / testing) +# 2024.08.22 - Add handling for multi-link download.gg urls (2 or more download files available on page) +# 2024.08.22 - Limit filehaus "no response" retries--server is likely down--mark Retry later +# 2024.08.21 - Modify catnapping message to not keep scrolling while waiting for downloads to finish +# 2024.08.21 - Make direct= download header retrieval and response check more robust +# 2024.08.21 - Revert the multi # urls.txt argument order (it was that, or change the documentation) +# * ./mad.sh multi # urls.txt +# * ./mad.sh multi host # urls.txt +# 2024.08.20 - Add several more 1F family domains +# * alterupload.com, cjoint.net, desfichiers.com, dfichiers.com, megadl.fr, mesfichiers.org, +# piecejointe.net, pjointe.com, dl4free.com +# 2024.08.20 - Fix script reload with multiple args +# 2024.08.20 - Fix Launch Terminal with multi # args +# 2024.08.20 - Dosya working again.. (up to 60 second delay on cdn server sending file) +# 2024.08.20 - Fix input file quick url count after initial argument parsing +# 2024.08.20 - Fix host parsing of args -- multi # host +# 2024.08.19 - Clean gofile filename +# 2024.08.19 - Fix download.gg post url for files with meta characters (ie. spaces) +# 2024.08.18 - Fix first line bash +# 2024.08.18 - Fix possible gofile cdn parsing issue +# 2024.08.18 - Updates to click file not found responses +# 2024.08.18 - Add clicknupload.site / clickndownload.site domain +# 2024.08.18 - Clean download.gg filename +# 2024.08.18 - Add download.gg Removed and Error responses +# 2024.08.17 - Fix flocks for active downloads with AutoRenameDuplicateFilenames=true +# (Only allow one download pure unique url -- including dupes) +# 2024.08.16 - Add AutoRenameDuplicateFilenames option (default=false) +# For any download filename that is a duplicate, this will prepend the filename with a random string +# ie. MyFile.rar --> 20240801124552305_renamed_MyFile.rar +# ** NOTE: Enabling AutoRenameDuplicateFilenames will result in downloading every url regardless +# of whether it is a duplicate or not. +# Enabled: +# (+) No need to skip simultaneous downloads of same named files +# (+) Less concern for unique filename overrides or collisions +# (-) Cannot use the |fname.ext override to try multiple download urls in order. +# ie. +# http://hosturl1.com/abcd|myfile.rar +# http://hosturl2.com/abcd|myfile.rar +# http://hosturl3.com/abcd|myfile.rar +# -- instead, use comments and uncomment if necessary -- +# http://hosturl1.com/abcd|myfile.rar +# # alt http://hosturl2.com/abcd|myfile.rar +# # alt http://hosturl3.com/abcd|myfile.rar +# Disabled: (normal / previous functionality) +# (+) Can use the |fname.ext override to try multiple download urls in order. +# (+) More control over downloads and the expected end result +# (-) More concern for unique filename overrides or collisions +# (-) Have to wait for duplicate filenames to finish downloading before starting the next. +# 2024.08.15 - Add tenvoi urls as download host (1F) +# 2024.08.14 - Add OshiBaseUrlOverride option to allow using the input url or forcing to oshi.at or oshi.onion +# 2024.08.12 - Fix file.flock check (needs to happen prior to downloads exist check) +# This fixes issues with two downloads with the same filename occurring where the second is marked failed/retry) +# 2024.08.11 - Fix gofile possible head filename parsing (new filename*=) +# 2024.08.10 - Add new host -- offshore.cat +# 2024.08.06 - Fix mad.config override for UseTorCurlImpersonate +# 2024.08.05 - Set curl_ff109 priority in which check +# 2024.08.03 - Let click resolve url domain for first 3 attempts, then fallback to .org +# 2024.08.02 - Add curl_impersonate menu choices lwthiker (orig), lexiforest (fork) +# 2024.07.30 - Add curl_impersonate lexiforest fork (more active, upgraded curl 8.7.1) +# 2024.07.28 - Fix possible 9saves fetch error +# 2024.07.26 - Fix upload.ee fileinfo request +# 2024.07.20 - Oshi file removed / no filename in header +# 2024.07.18 - Fix click dns resolution for alternate orgs that often fail +# 2024.07.16 - Fix for oshi and https cert error +# 2024.07.13 - Allow nippydrive downloads as well (ala nippyshare) +# 2024.07.12 - Fix click post for filenames with url metacharacters +# 2024.07.12 - Fix to not add blank lines at the end of processing list (from reloads) +# 2024.07.12 - Ensure url flock exists prior to download start +# 2024.07.06 - Sanitize clicknupload cdn url (fix for filenames with spaces and metacharacters) +# 2024.07.05 - Rework the reload function and terminal launcher +# 2024.07.05 - Fix bad partial detection logic / add "Too many connections" +# 2024.07.05 - Fix handling of an unexpected head query response for click and most other hosts +# 2024.07.04 - Add new host -- clicknupload / clickndownload +# 2024.07.03 - Remove nekofiles (host is gone) +# 2024.07.02 - Add new host -- gofile.io +# 2024.07.02 - Make reload script more dynamic +# 2024.07.01 - Add new host -- nippyshare.com +# 2024.06.30 - Add several direct hosts so urls can just be added +# -- Neko, lainsafe, FileDoge, Eternal, DiscreetShare +# 2024.06.28 - Add new host -- download.gg (works for good links, still needs file removed response detection) +# 2024.06.28 - Add new host -- firestorage (works for good links, still needs file removed response detection) +# 2024.06.27 - Fixes for ninesaves, biteblob, and other additions that were not tested long enough +# 2024.06.25 - Updates to filename handling with url-enconding +# 2024.06.25 - Updates to biteblob.com url handling and responses +# 2024.06.24 - Add new host -- biteblob.com +# 2024.06.24 - Add new host -- 9saves.com +# 2024.06.23 - Add clipboard monitoring option (rudimentary for now). Run in a separate terminal. +# * Dependencies: xclip (sudo apt install xclip) +# ie. ./mad.sh clipmon urls.txt +# 2024.06.22 - Add addtl pixeldrain and uploadhive file removal responses +# 2024.06.18 - Add a check to fix url flock for direct downloads with no head (fix for last update) +# 2024.06.18 - Update downloads folder to use script dir instead of pwd +# 2024.06.17 - Update to download curl_impersonate (retrieve version/date) +# 2024.06.16 - Add addtl uploadflix removed response +# 2024.06.16 - Update uploadhive removed file detection and head 500 server response +# 2024.06.16 - Fix detection of already completed if in downloads and size is equal +# 2024.06.15 - Updates to direct to handle no head response (api.discreetshare.com, and others) +# 2024.06.14 - Modify bad partial detection +# 2024.06.14 - Add debug logging to bad html check to show bad lines found +# 2024.06.14 - Updates to file downloads (generic and specific -- pd, direct) +# 2024.06.14 - Only use agent-specific header overrides if not using curl_impersonate (they are already handled) +# 2024.06.12 - Handle incorrect head response from pixeldrain +# 2024.06.12 - Make pixeldrain bypass an option (default false) +# 2024.06.11 - Make direct downloads more robust (perform no-resume downloads where no content-length is sent. +# ie. filedoge.com (this fixes filedoge.com downloads using direct=http://api.filedoge.com/) +# 2024.06.08 - Add notification option to install curl_impersonate if option is set to true and it is not found +# 2024.06.08 - Add a option to download / extract curl_impersonate (using tor+curl) to the script +# 2024.06.08 - Fix youdbox removal detection when no response +# 2024.06.04 - Fix detect direct urls if no other url types exist in inputfile +# 2024.06.04 - Remove unecessary filename parsing when a filename override is used +# 2024.06.03 - Better handling of hexload download2 cdns +# 2024.06.01 - Add additional file removal response checks for youdbox +# 2024.05.30 - Attempt to fix incorrect kraken urls, make fileid more robust +# 2024.05.28 - Add filedot.top (filedot.to) +# 2024.05.26 - Add "file was deleted because of being against Upload.ee rules" catch +# 2024.05.26 - Re-incorporate new pixeldrain viewpump functionality (use PhantomJSCloud) +# 2024.05.26 - Add retry/fail if filesize parsing fails +# 2024.05.25 - Add check for "Too many connections from your IP" to partial repairing +# 2024.05.20 - Small fix for html detecting in partials and repairing (trunc) +# 2024.05.19 - Fix filehaus head response check +# 2024.05.19 - Make filesize parser more robust for all hosts +# 2024.05.18 - Updated random user-agents (remove mobile/linux and use the top 10 -- 2024/05) +# 2024.05.18 - Changed head query of dosya to better handle response (location with no filesize updates head query +# with new location. +# 2024.05.16 - Fix null error when running without curl_impersonate +# 2024.05.16 - Add optional loading of saved mad.config variables from mad.config file if it exists to allow +# upgrading without having to reconfigure all the settings. +# 2024.05.15 - Allow RateMonitor on kraken if not resuming (issues only occur if a partial exists and the cdn +# server connected to does not support byte resume correctly.. which tends to be about half the time). +# 2024.05.11 - Addition of "direct=" keyword in urls.txt to download from a direct link or cdn +# - If the direct url doesn't end in the filename, it is highly recommended to override it with |filename.ext +# - ie. direct=http://somehost.onion/abcD|filename.part1.rar +# 2024.05.11 - Disable RateMonitor for kraken (as not all servers support byte resume correctly) +# 2024.05.09 - Sanitize urls to handle potential non-acceptable chars +# 2024.05.09 - Fix possible dosya cdn issue +# 2024.05.08 - Fix to allow inputfile not being in the script directory +# 2024.05.08 - Fix detecting corrupt partial with html (and trunc logging) +# 2024.05.07 - Add OsType (used for launching terminals with "multi" argument +# 2024.05.06 - Fdot settings check, format updates, etc. +# 2024.05.05 - Add pixeldrain ip / rate limited failure (captcha locked). View pump broke. Bypass still in testing +# 2024.05.03 - Fix possible 1F filesize check failure +# 2024.05.02 - Add kraken detection of cloudflare server issues (521: Web server down) +# 2024.05.01 - Add detection and repair of html corruption in bad partial downloads +# 2024.04.28 - Update pixeldrain (viewpump broke, use bypass) +# 2024.04.26 - Host fixes (upload.ee, hex, 1F, uhive) +# 2024.04.20 - Add youdbox.site as host +# 2024.04 - Add filedot.to as host (integration with user/pass login) +# 2024.04 - Add AutoRepairBadPartials (deprecated backup/restore) +# 2024.04 - Add download file retries (quick retries) +# 2024.04 - Add auto-switching to .top/.su domains for filehaus on excessive retries +# 2024.04 - Additional url hardening +# 2024.04 - Add uploadflix.cc / uploadflix.org +# 2024.04 - Add uploadhive.com +# 2024.04 - Add upload.ee +# 2024.04 - Add random user agent for usage +# 2024.04 - Add dosyaupload.com +# Detection of html pollution in downloads +# Updates to pixeldrain bypass +# Catch kraken "Oops" server alert +# Update pixeldrain (viewpump broke, use bypass) +# Host fixes (upload.ee, hex, 1F, uhive) +# Add youdbox.site as host +# Add filedot.to as host (integration with user/pass login) +# Add AutoRepairBadPartials (deprecated backup/restore) +# Add download file retries (quick retries) +# Add auto-switching to .top/.su domains for filehaus on excessive retries +# Additional url hardening +# Add uploadflix.cc / uploadflix.org +# Add uploadhive.com +# Add upload.ee +# Add random user agent for usage +# Add dosyaupload.com +# (retry skipped collisions / allow multiple host for a file) +# Add "LoopThroughFileUntilComplete" option to continue processing urls.txt until it has no urls to process +# * When it comes back around, if it is completed, it will be marked #OK# My file!.rar (File exists) +# * First will lock and begin downloading, the second will skip it and move on, eventually coming back around to it. +# http://krakenfiles.com/view/abcd123456/file.html|My file!.rar +# http://oshi.at/eeaa/12345.rar|My file!.rar +# ie. +# Add download / inprogress file downloading to handle collisions and to allow multi-host options for a file. +# Add hexupload.net +# Make hosts unique, cleanup cookies and temp +# Add kraken downloading (kraken) +# ie. http:/oshi.at/abcd/1abc.7z|NewFilename.001 +# Add ability to specify download filename by adding "|filename.ext" to the end of the url in file. +# Add filehaus downloading (fh) +# - Removes the _flocks folder to clear any stale tickets/locks. +# - Reverts all "#RETRY#" commented lines back so it can be downloaded again. +# Add mad.sh reset urls.txt +# leaking into the file. It also allows for resuming from a bad node, where if it is off, the download must restart. +# *deprecated* Add PdAutoBackupRestorePartials option that will backup / restore partial pixeldrain downloads to prevent bad api data +# ./mad multi auto urls.txt +# (OS dependent, 1 terminal per host -- whonix tested) +# ./mad multi fh 2 urls.txt +# ./mad multi oshi 2 urls.txt +# ./mad multi pd 2 urls.txt +# ./mad multi hex 2 urls.txt +# ./mad multi 1f 2 urls.txt +# (OS dependent, X terminals for a specific host -- whonix tested) +# ./mad multi [2-8] urls.txt +# (OS dependent, X terminals for all hosts -- whonix tested) +# ./mad urls.txt +# ./mad urls.txt +# ./mad urls.txt +# ./mad urls.txt +# (OS agnostic, run in X or more separate terminals) +# ./mad fh urls.txt +# ./mad oshi urls.txt +# ./mad pd urls.txt +# ./mad hex urls.txt +# ./mad 1f urls.txt +# (OS agnostic, run in X separate terminals) +# Add mutli-terminal / single-host (1 per host) downloading +# Add oshi downloading (oshi) +# Add pixeldrain downloading (pd) +# Add 1F french bytes conversion and potential incorrect download filesize detection (1Flove) +# Add hexload downloading functionality and integrated logging, moving, etc. +# Add multi-host downloading (1F, Hexload) +# More verbose logging on Retry/Fail reason +# Additional cdn debugging +# Add auto-commenting in urls.txt on completed / failed downloads +# Add check for completed download in the MoveToFolder (log and continue) +# Cleanup and debug additions +# Add verbose results logging +# Add optional minimum download size check +# Add retry attempts to acquire filename, filesize, and header +# Add retry on initial status attempts +# Add resume downloads (auto-resume by default) +# Incorporate multi-process download to find an empty slot faster (thanks 1flove devs) +# Add 1F Url validation +# Add STOP! keyword to allow ending after a specified download (perhaps drive limitations or another reason) +# Cleanup +# *deprecated* Update connection headers 2023.11 +# folder="" --> Keeps downloads in initial downloads directory +# - ex. folder=Folder1 Name (desc) --> Creates a folder "Folder1 Name (desc)" and moves downloads there +# Add folder= option to allow moving downloads into specified folders upon completion (blank to reset to downloads) +# *deprecated* Add option to keep partial downloads (move to *.partial folder) -- may use up space for large downloads +# *deprecated* Add multiple text recode options (some require apt install recode, or apt install html2text) +# *deprecated* Fixes for latin charset (UseRecode=html2iso8859) +# Add option to clear screen on filelist reload +# Add ability to auto-reload urls.txt if modifications are detected +# Allow reloading/restarting script (updated urls.txt) after finished processing current url "reload" or "restart" file exists +# Allow clearing the screen if "clear" file exists +# Allow aborting/stopping processing remaining urls if "stop" file exists +# Add allow comment lines (#), blank lines, and garbage lines (non-http starting) +# Add skipping file has been deleted +# Add skipping file removed by owner or does not exist +# Add skipping of removed files from host +# Output download status into results.txt in script directory +# Display try # on which successfully retrieved a valid circuit +# Add fixing (autoconverting) http:// --> https:// +# Added debug option (save html response in _debug folder) +# Added more output verbosity +# Added configurable failed download retries +# Increased connection retries / configurable connection timeout +# Fixes to output and code diff --git a/documentation/!README-mad-v2024.09.24-migration-checklist.txt b/documentation/!README-mad-v2024.09.24-migration-checklist.txt new file mode 100755 index 0000000..8e59e14 --- /dev/null +++ b/documentation/!README-mad-v2024.09.24-migration-checklist.txt @@ -0,0 +1,51 @@ +1.) Read and understand the changes -- see ./documentation/FolderStructure-Default (AIO) or + ./documentation/FolderStructure-WorkDirOverride to visualize the changes. + +Summary: +-------- + * Renamed _debug, _temp, _flocks to .debug, .temp, .flocks (so you can hide them if desired) + * Renamed 'downloads' folder (transferring files) to .inflight + * Renamed 'completed' folder to downloads (makes more sense as there are uploads / downloads (& .inflight) + * Moved "plugins" and "hosts" folders to ScriptDir in code (so they must reside in the same folder as + mad.sh (and curl_impersonate, mad.config if you choose to use them) + * Added ./data folder in WorkDir (the downloads_completed, uploads_completed reside here). It is intended + to be a more long-term data storage (things that plugins will / do use. + * Moved results.txt to ./downloads for downloads and ./uploads for uploads. This file is the verbose + processing of lines (OK, SKIP, FAIL, RETRY, NOCDN, PASSWORD, RENAME, etc). It is helpful to see an outline + of all the terminal processing. + * A summary of all the uploaded download links is created in ./uploads/results-links.txt (only successes) + * Upload completed tickets are put in the ./uploads/_tickets folder to keep it less cluttered. + * If uploads are done manual mode (through the UI ./mad.sh upload), then a file named + ./uploads/temp_upload_handler.txt is created and used to ensure multiple terminals do not reprocess + completed / failed upload files. If using uploads.txt, this is not created and not necessary as it is + handled directly in the uploads.txt + +2.) End any mad.sh processes +3.) Delete your old hosts & plugins folders (save off any plugins you are working on or wish to save). If you + have plugins that reference any of the old completed.txt, results.txt, or results_uploads.txt, then will need + to be referenced in their new locations: $WorkDir/data/downloads_completed.txt, + $WorkDir/downloads/results.txt, $WorkDir/uploads/results.txt +4.) Delete _debug, _temp, _flocks in your old folder +5.) Rename 'downloads' folder to '.inflight' (or delete if empty) +6.) Rename 'completed' folder to 'downloads' +5.) Copy all the files in this bundle and move to your MAD locations (overwrite anything there) +6.) Rename completed.txt to downloads_completed.txt and move to ./data +7.) If you have LoadPlugins="SkipUrlsInCompletedTxt.sh" in your mad.sh or mad.config, the name has changed to + SkipUrlsInDownloadsCompletedTxt.sh, so update it in the LoadPlugins="" line. + +Why? + * Less clutter everywhere (especially for those who use the WorkDirOverride) + - Scripts are in ScriptDir (mad.sh, mad.config, curl_impersonate, plugins, hosts) + - Downloads (complete) and all their result data is in downloads folder (except urls.txt, can be anywhere) + - Downloads can work in batches.. once done and extracted moved.. the contents can be purged + - Downloads that are being transferred / resuming, are in the .inflight folder. + - Once batch of downloads is complete.. this can be purged. There shouldn't be anything in it unless + something is processing or failed while processing and is waiting for resume. + - Uploads (files and all the results data / result-links.txt are in uploads (except uploads.txt); + - Uploads can work in batches.. once done and Download Links handled, this can be purged for next batch + - Upload tickets are saved in ./uploads/_tickets + - All .folders can be hidden, they are also generally short-term (really for processing a batch) + - Once processing is complete (no uploads or downloads active, these can be purged) + - New data folder is for longer term storage of statdata such as completed uploads / downloads for plugins + +* That's it for now.. \ No newline at end of file diff --git a/documentation/!README.txt b/documentation/!README.txt new file mode 100755 index 0000000..dbeaa03 --- /dev/null +++ b/documentation/!README.txt @@ -0,0 +1,157 @@ +:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-: +: Multi-host Auto Downloader [aka MAD] (by kittykat) : +:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-: + +Setup: +------ +1. Extract to folder to a location. +2. Ensure the script is executable + # Open terminal in script folder (or open terminal and cd into script folder). + # Run "chmod +e mad.sh" to give the script execute permission. +3. Configure settings in mad.sh script that you desire. +* Optional config: + - Copy mad.config to script directory from the Documentation folder and configure settings there instead. This + file will load and override any settings configured in the script -- this allows upgrading mad.sh without + having to reconfigure the settings across versions. +* Optional curl_impersonate: + - See Optional Depenencies section below. +4. Add urls to urls.txt or any file you wish to use. +5. Run "./mad.sh urls.txt" (see usage section below for additional commands or run "./mad.sh ?" for help) + + +Optional Dependecies: +--------------------- +Some hosts use CloudFlare to detect and block scripts (such as hexload). +To get around it, this script needs to impersonate a browser. +You'll need to download "curl-impersonate". + +It can be obtained on GitHub, search online for "curl-impersonate" + +To access the releases on GitHub without javascript, do this: +1. Visit the GitHub page of curl-impersonate and add "/releases/latest/" at end of URL. +2. You'll be redirected to the latest version, e.g: "/releases/tag/vx.x.x" +3. In the URL replace "tag" with "expanded_assets", e.g. "/releases/expanded_assets/v0.5.4" + +- Download archive "curl-impersonate-vX.Y.Z.x86_64-linux-gnu.tar.gz". +- Extract files "curl-impersonate-ff" and "curl_ff109" next to this script or into your PATH." + + +Usage (urls.txt): +----------------- +- ENTER 1f, hexload, pixeldrain, kraken, dosya, filehaus, oshi, upload.ee, uploadhive, or uploadflix urls + in urls.txt (one url per line). +- ! No trailing spaces BUT requires at least one empty newline at the end of file + +! Accepts/ignores comment lines and garbage lines (lines beginning with '#' or non http) + +Keywords (urls.txt): +-------------------- +folder= + - Changes save to folder (Lines beginning with keyword "folder=") + ie. folder=This is my folder! (vids) [2024] +|filename.ext + - Overrides filename to save download as (add the suffix "|Filename.ext" to a url) + ie. http://oshi.at/abcd|My new filename.rar +direct= + - Will download directly from url (special processing for Lainsafe, FileDoge, NekoFile, DiscreetShare, and others. + ie. direct=https://oshi.at/abcd/ABCD +#pw= + - Will update the $CurrentZipPassword variable in mad.sh (can be accessed in plugins) +STOP! + - Stops processing at this line +RELOAD! + - Forces a reload of the script and urls.txt file with the same commandline it started with + + +Example: +----------- +folder=New folder 01 +# pw: **1234567890$$ +# ref: http//reference.source.url/here.html +https://1fichier.com/?123456789abcdefghijk +http://hexload.com/123456789abc + +folder=New folder 02 +# pw: 4444555551-1 +http://5ety7tpkim5me6eszuwcje7bmy25pbtrjtue7zkqqgziljwqy3rrikqd.onion/ZaZa/12az.rar +http://oshi.at/AAzz/11ZZ.rar|File - Set 001 (2001).7z +http://oshi.at/AAyy/11YY.rar|File - Set 002 (2001).7z +http://pixeldrain.com/u/ZZaa0011 + +folder=Direct link fun +# pw= 2022234092 +direct=http://pomf2.lain.la/f/abcd123456789.7z +direct=http://pomf2.lain.la/f/ABCD998877000.rar|This is it [2022].rar + + +------ Informational Display ------------------------- + +[Status] of urls in urls.txt +./mad.sh status urls.txt + +[Reset] failed / retry urls in urls.txt +./mad.sh reset urls.txt + +[Host] Modules and their internal description +./mad.sh hosts + +[Plugins] and their internal description +./mad.sh plugins + + +------ Basic Usage (Uploads) ------------------------- + +[Upload] launch MAD Uploader (process files in ./uploads/ folder to selected hosts) +./mad.sh upload + + +------ Basic Usage (Downloads) ----------------------- + +[Run] +./mad.sh urls.txt + +## Multi Runs: (mutli-terminals / all-hosts / specific-host) ## +--------------------------------------------------------------- +[Normal Mode] Process urls.txt in order with multiple terminals downloading + (OS agnostic, run in X or more separate terminals) + ./mad urls.txt + ./mad urls.txt + (OS dependent, X terminals for all hosts -- whonix tested) + ./mad multi [2-8] urls.txt + +[Specific Host] Process only X host in terminal + (OS agnostic, run in X separate terminals) + ./mad 1f urls.txt + ./mad hex urls.txt + ./mad pd urls.txt + ./mad kraken urls.txt + ./mad dosya urls.txt + ./mad fh urls.txt + ./mad oshi urls.txt + ./mad upee urls.txt + ./mad uphive urls.txt + ./mad upflix urls.txt + +[**Multi Specific Host] Create X terminals for a specific host and process downloads in order + (**OS dependent, X terminals for a specific host -- whonix tested) + ./mad multi 1f 2 urls.txt + ./mad multi hex 2 urls.txt + ./mad multi pd 2 urls.txt + ./mad multi kraken 2 urls.txt + ./mad multi dosya 2 urls.txt + ./mad multi fh 2 urls.txt + ./mad multi oshi 2 urls.txt + ./mad multi upee 2 urls.txt + ./mad multi uphive 2 urls.txt + ./mad multi upflix 2 urls.txt + +[**Multi Auto] Create 1 terminal for each host and process downloads in order + (**OS dependent, 1 terminal per host -- whonix tested) + ./mad multi auto urls.txt + +[**Multi Auto] Create 4 terminals (1 terminal for each host) and process downloads in order + (**OS dependent, 1 terminal per host -- whonix tested) + ./mad multi auto 4 urls.txt + + + \ No newline at end of file diff --git a/documentation/FolderStructure-Default (AIO)/ScriptDir+WorkDir.jpg b/documentation/FolderStructure-Default (AIO)/ScriptDir+WorkDir.jpg new file mode 100755 index 0000000..7186626 Binary files /dev/null and b/documentation/FolderStructure-Default (AIO)/ScriptDir+WorkDir.jpg differ diff --git a/documentation/FolderStructure-WorkDirOverride/ScriptDir.jpg b/documentation/FolderStructure-WorkDirOverride/ScriptDir.jpg new file mode 100755 index 0000000..d709f9c Binary files /dev/null and b/documentation/FolderStructure-WorkDirOverride/ScriptDir.jpg differ diff --git a/documentation/FolderStructure-WorkDirOverride/WorkingDir.jpg b/documentation/FolderStructure-WorkDirOverride/WorkingDir.jpg new file mode 100755 index 0000000..687075b Binary files /dev/null and b/documentation/FolderStructure-WorkDirOverride/WorkingDir.jpg differ diff --git a/documentation/README-upload_hosts.txt b/documentation/README-upload_hosts.txt new file mode 100755 index 0000000..4d58e6c --- /dev/null +++ b/documentation/README-upload_hosts.txt @@ -0,0 +1,66 @@ +# Upload Hosts / HostCodes (by Retention, Max Size) +# ------------------------------------------------- + +# Long Retention ----------------------------------------------------------------------- +Max Size . HostCode . Nickname . Notes +# --------------------------------------------------------------------------------------- + 300GB 1f 1fichier.com 15d expiry free accounts + 300GB fh filehaus.top (.su) ?? expiry + 20GB rz ranoz.gg ?? expiry + 10GB gofile gofile.io ?? expiry + 10GB tmpme tempfile.me 3mo expiry (tend to ban 7z faster) + 5GB uhive uploadhive + 5GB uflix uploadflix.cc 7d inactive expiry + 5GB oshi oshi.at (.onion) 1000 file hits +- 4GB bd bedrive.ru ?? expiry +- 4GB daily dailyuploads.net ?? expiry +- 2GB hex hexload.com 30d inactive expiry + 2GB dosya dosyaupload.com 45d inactive expiry + 2GB fs firestorage.jp 90d+ inactive expiry +* 2GB axfc axfc.net 90d+ inactive expiry +- 1GB kraken krakenfiles.com 90d inactive expiry + 1GB ansh anonsharing.com 6mo expiry + 300MB trbo turbo.onion ~40d expiry + 250MB upev uploadev.org 90d inactive expiry +* 240MB ko kouploader.jp 5mo expiry (240MB max) + 100MB bow bowfile.com 20d inactive expiry + 100MB yolo yolobit ?? expiry + 100MB nofile nofile.org ?? expiry + 100MB so share-online.vg ?? expiry + 100MB inno innocent.onion ?? expiry + +# Short Retention ---------------------------------------------------------------------- +Max Size . HostCode . Nickname . Notes +# --------------------------------------------------------------------------------------- + 10GB nant fichiers.nantes.cloud ~1mo or less expiry, jirafrau + 10GB anarc anarchaserver.org ~1mo or less expiry, jirafrau + 10GB nlib netlib.re ~1mo or less expiry, jirafrau +* 10GB raja uploadraja.com 4d inactive expiry + 5GB squid filesquid.net ~1mo or less expiry, jirafrau + 4GB tmpsh temp.sh 3d expiry + 1GB kaz depot.kaz.bzh ~1mo or less expiry, jirafrau + 512MB linx linxx.net ~1mo or less expiry, jirafrau + 500MB soy soyjak.download ~1mo or less expiry, jirafrau + 195MB dup dataupload.net ?? expiry + 100MB nippy nippy* ?? expiry, (file, share, box, drive, space) + 100MB dbree dbree.me ?? expiry + ?? harr files.harrault.fr ~1mo or less expiry, jirafrau + ?? acid dl.acid.fr ~1mo or less expiry, no resume, jirafrau + ?? fr4e sendfree4e.fr ~1wk or less expiry, jirafrau + + +Failing (-): +---------------- +daily dailyuploads.net (MAD download failing -- JS required / Google Recaptcha) +kraken kraken.com (MAD download failing -- JS required / Google Recaptcha) +hex hexload.com (MAD download failing -- JS required / Google Recaptcha) +bd bedrive.ru (MAD download failing -- JS required / Google Recaptcha) + +NOTES (*): +---------------- +raja uploadraja.com (MAD download not implemented) +ko kouploader.jp (MAD download not implemented) +axfc axfc.net (MAD download not implemented) + + + diff --git a/hosts/1fichier.sh b/hosts/1fichier.sh new file mode 100755 index 0000000..330cd99 --- /dev/null +++ b/hosts/1fichier.sh @@ -0,0 +1,377 @@ +#! Name: 1fichier.sh +#! Author: kittykat +#! Version: 2024.10.06 +#! Desc: Add support for downloading and processing of urls for a new host +#! Usage: Copy this file into the ./${ScriptDir}/hosts/ folder +#! +#! +#! ------------ REQUIRED SECTION --------------- +#! @[UPDATE] HostAndDomainRegexes: This string is loaded into mad.sh and allows dynamic handling of new url data +#! Format: '/HostCode/HostNick/HostFuncPrefix:HostDomainRegex@' +#! HostCode: (ie. 'fh' for filehaus -- cannot be used by other hosts) +#! HostNick: What is displayed throughout MAD output (ie. 'filehaus' -- "urls.txt has 10 filehaus.." will be displayed) +#! HostFuncPrefix: (ie. 'fh' -- fh_DownloadFile(), fh_FetchFileInfo() .. ) +#! * Note: Must begin with a letter a-z (functions beginning with numbers are no bueno) +#! HostDomainRegex: The regex used to verify matching urls +HostCode='1f' +HostNick='1fichier' +HostFuncPrefix='fich' +HostUrls='1fichier.com, tenvoi.com, alterupload.com, cjoint.net, desfichiers.com, dfichiers.com, megadl.fr, mesfichiers.org, piecejointe.net, pjointe.com, dl4free.com' +HostDomainRegex='^(http|https)://(.*\.)?(1fichier\.com|tenvoi\.com|alterupload\.com|cjoint\.net|desfichiers\.com|dfichiers\.com|megadl\.fr|mesfichiers\.org|piecejointe\.net|pjointe\.com|dl4free\.com)/\?[a-z0-9]{20}$' +#! +#! !! DO NOT UPDATE OR REMOVE !! +#! This merges the Required HostAndDomainRegexes into mad.sh +ListHostAndDomainRegexes=${ListHostAndDomainRegexes}'/'${HostCode}'/'${HostNick}'/'${HostFuncPrefix}'/'${HostUrls}':'${HostDomainRegex}'@' +#! +#! +#! ------------ (1) Host Main Download Function --------------- # +#! +#! @REQUIRED: Host Main Download function +#! Must be named specifically as such: +#! _DownloadFile() +fich_DownloadFile() { + warnAndRetryUnknownError=false + exitDownloadError=false + exitDownloadNotAvailable=false + fileAlreadyDone=false + local remote_url=${1} + local filecnt=${2} + fich_cookie_jar="" + fich_adz_parameter="" + target_file_link="" + fich_user_provided_password="" + is_password_protected=false + for ((y=1; y<=15; y++)); do + CLEANSTRING=${remote_url//[^a-zA-Z0-9]/} + trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; rm -f $lockfile; echo ""; tput cnorm; exit" 0 1 2 3 6 15 + tor_identity="${RANDOM}" + PAGE=$(tor_curl_request --insecure --max-time "$ConnectTimeout" -s "${remote_url}") + if [ "${DebugAllEnabled}" == "true" ] ; then + debugHtml "${remote_url##*/?}" "prechk$y" "${PAGE}" + fi + file_information=$(grep -oP '(?<=)[^<]*?(?=)' <<< "${PAGE}") + size=$(echo "${file_information}" | tail -n 1) + filename=$(echo "${file_information}" | head -n 1) + if [ ! "$filename_override" == "" ] ; then + filename="$filename_override" + fi + filename=$(sanitize_file_or_folder_name "${filename}") + download_inflight_path="${WorkDir}/.inflight/" + completed_location="${WorkDir}/downloads/" + if grep -Eqi "The requested file has been deleted for inactivity|Le fichier demandé a été supprimé automatiquement pour inactivité" <<< "${PAGE}"; then + echo -e "\n${RED}The file in URL (${remote_url}) was removed due to inactivity${NC}" + removedDownload "${remote_url}" + return 1 + elif + grep -Eqi "The requested file does not exist|It could be deleted by its owner" <<< "${PAGE}"; then + echo -e "\n${RED}The file in URL (${remote_url}) does not exist. Is the URL correct?${NC}" + removedDownload "${remote_url}" + return 1 + elif + grep -Eqi "The requested file has been deleted|Le fichier demandé a été supprimé" <<< "${PAGE}"; then + echo -e "\n${RED}The file in URL (${remote_url}) has been deleted by owner${NC}" + removedDownload "${remote_url}" + return 1 + elif + grep -Eqi "Le fichier demandé a été supprimé suite à un rapport d'abus|The requested file has been deleted following an abuse request" <<< "${PAGE}"; then + echo -e "\n${RED}The file in URL (${remote_url}) was reported and removed by 1fichier${NC}" + removedDownload "${remote_url}" + return 1 + elif + grep -Eqi "Le fichier demandé a été supprimé suite à une notification|The requested file has been deleted following a notification" <<< "${PAGE}"; then + echo -e "\n${RED}The file in URL (${remote_url}) was reported and removed by 1fichier${NC}" + removedDownload "${remote_url}" + return 1 + fi + if [[ -z "$filename" || -z "$size" || ${size//[!0-9]/} =~ '^[0-9]+([.][0-9]+?$' ]]; then + if [ $y -eq $MaxUrlRetries ] ; then + echo -e "\n${RED}ERROR: Filename or size not found${NC}" + echo -e "url: ${remote_url}" + echo -e "filename: $filename" + echo -e "size: $size" + filenameOrSizeNotExistDownload "${remote_url}" "${filename}" "${size}" + return 1 + fi + printf " ." + sleep 1 + continue # Try again if not MaxUrlRetries + fi + if grep -q "id=\"pass\"" <<< "${PAGE}"; then + echo -e "${YELLOW}This download requires a password${NC}" + passwordProtectedDownload "${remote_url}" + return 1 + fi + if [ $y -gt 1 ] ; then + printf "\\n" + fi + echo -e "${GREEN}${filename} (${size}) is available.${NC}" + break + done + finalAttempt="false" + for ((z=0; z<=$MaxUrlRetries; z++)); do + if [ $z -eq $MaxUrlRetries ] ; then + finalAttempt="true" + fi + CLEANSTRING=${remote_url//[^a-zA-Z0-9]/} + trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; echo ""; tput cnorm; exit" 0 1 2 3 6 15 + if fich_FindEmptySlot && fich_FetchFileInfo "" $((z+1)) $finalAttempt && fich_GetFile "${filecnt}" $((z+1)) $finalAttempt $filename ; then + return 0 + elif [ $z -lt $MaxUrlRetries ]; then + if [ "${fileAlreadyDone}" == "true" ] ; then + break + fi + if [[ "${warnAndRetryUnknownError}" == "true" ]] ; then + if [ "${DebugAllEnabled}" == "true" ] ; then + debugHtml "${remote_url##*/?}" "error" "Retry due to an unknown issue: attempt #$((z+1)) of ${MaxUrlRetries}" + fi + fi + if [[ "${exitDownloadError}" == "true" || "${exitDownloadNotAvailable}" == "true" ]] ; then + if [ "${DebugAllEnabled}" == "true" ] ; then + debugHtml "${remote_url##*/?}" "error" "Exit due to unrecoverable issue" + fi + rm -f "${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}" + break + fi + echo -e "\n${YELLOW}A recoverable error occurred, retry attempt $((z+1))/${MaxUrlRetries}${NC}" + sleep 3 + fi + done + rm -f "${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}" +} +#! +#! ------------- (2) Fetch File Info Function ----------------- # +#! +fich_FetchFileInfo() { + local fileCnt=$1 + local num_attempt=$2 + local finalAttempt=$3 + echo -e "\nTrying to get CDN URL" + fich_adz_parameter=$(cat "${WorkDir}/.temp/fich_adz_parameter") + fich_cookie_jar=$(cat "${WorkDir}/.temp/fich_cookie_jar") + tor_identity=$(cat "${WorkDir}/.temp/fich_tor_identity") + rm -f "${WorkDir}/.temp/fich_cookie_jar"; rm -f "${WorkDir}/.temp/fich_adz_parameter"; rm -f "${WorkDir}/.temp/fich_tor_identity"; + CLEANSTRING=${remote_url//[^a-zA-Z0-9]/} + trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; rm -f "${fich_cookie_jar}"; echo ""; tput cnorm; exit" 0 1 2 3 6 15 + cdn_request=$(tor_curl_request --insecure -s -L -b "${fich_cookie_jar}" -c "${fich_cookie_jar}" -F "submit=Download" -F "pass=${fich_user_provided_password}" -F "adz=${fich_adz_parameter}" "${remote_url}") + target_file_link=$(echo "$cdn_request" | grep -A 2 '
' | grep -oP ' /dev/null ; then + if $is_password_protected; then + echo -e "${RED}ERROR: Incorrect password${NC}\nSince this download required a password, you might have copied it incorrectly?" + passwordProtectedDownload "${remote_url}" + exitDownloadError=true + return 2 + else + echo -e "${RED}ERROR: Could not find CDN URL${NC}" + if [ "${finalAttempt}" == "true" ] ; then + noCdnDownload "${remote_url}" + fi + return 1 + fi + fi + echo -e "${GREEN}CDN URL has been found!${NC}" +} +#! +#! ----------- (3) Fetch File / Download File Function --------------- # +#! +fich_GetFile() { + local filecnt=$1 + local num_attempt=$2 + local finalAttempt=$3 + local filename=$4 + echo -e "\n${RED}❤${NC} Saving ${GREEN}${remote_url##*/?}${NC} to ${GREEN}${filename}${NC}" + CLEANSTRING=${remote_url//[^a-zA-Z0-9]/} + for ((j=1; j<=4; j++)); do + trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; echo ""; tput cnorm; exit" 0 1 2 3 6 15 + file_header=$(tor_curl_request --insecure -sSIL -e "${remote_url}" "${target_file_link}") + file_size_bytes=$(grep -oPi '(?<=content-length: ).*' <<< "$file_header") + file_size_bytes=${file_size_bytes//[$'\t\r\n']} + download_inflight_path="${WorkDir}/.inflight/" + completed_location="${WorkDir}/downloads/" + file_path="${download_inflight_path}${filename}" + if [ "${DebugAllEnabled}" == "true" ] ; then + debugHtml "${remote_url##*/?}" "fich_savehead${num_attempt}_${j}" "target_file_link: ${target_file_link}"$'\n'"${file_header}" + fi + if [[ -z "$file_header" ]] || [[ -z "$file_size_bytes" ]]; then + continue + else + break + fi + done + if grep -Eqi "200 OK" <<< "${file_header}" > /dev/null ; then + echo "We good" > /dev/null + elif grep -Eqi "410 Gone" <<< "${file_header}" > /dev/null ; then + echo -e "${RED}ERROR: Failed to retrieve file header (410 Gone).${NC}\nThis could be due to 1fichier experiencing a temporary issue." + if [ "${finalAttempt}" == "true" ] ; then + failedDownload "${remote_url}" "${filename}" "410 Gone" + fi + return 1 + elif grep -Eqi "403 Forbidden" <<< "${file_header}" > /dev/null ; then + echo -e "${RED}ERROR: Failed to retrieve file header (403 Forbidden).${NC}\nThis could be due to 1fichier experiencing a temporary issue." + if [ "${finalAttempt}" == "true" ] ; then + failedDownload "${remote_url}" "${filename}" "403 Forbidden" + fi + return 1 + else + echo -e "${RED}ERROR: Failed to retrieve file header (Unknown Head Response).${NC}\nThis could be due to 1fichier experiencing a temporary issue." + if [ "${finalAttempt}" == "true" ] ; then + failedDownload "${remote_url}" "${filename}" "Unknown Head Response" + fi + return 1 + fi + size_value=$(echo $size | cut -f1 -d' ') + size_unit=$(echo $size | cut -f2 -d' ') + case $size_unit in + KB|Ko) size_bytes=$(echo "$size_value * 1000" | bc);; + MB|Mo) size_bytes=$(echo "$size_value * 1000 * 1000" | bc);; + GB|Go) size_bytes=$(echo "$size_value * 1000 * 1000 * 1000" | bc);; + TB|To) size_bytes=$(echo "$size_value * 1000 * 1000 * 1000 * 1000" | bc);; + *) size_bytes=$size_value;; + esac + percent_threshold=0.01 # 1% error threshold, should be fine + percent_numerator=$(echo "$file_size_bytes - $size_bytes" | bc) + percent_diff=$(echo "${percent_numerator/-/} / (($file_size_bytes + $size_bytes) / 2)" | bc -l) + if (( $(echo "$percent_diff > $percent_threshold" |bc -l) )); then + echo -e "${RED}ERROR: The difference between the advertised and retrieved file size is too big${NC}\nThis is most likely due to someone else taking the slot or some other error along the way." + echo -e "AdvertisedSize: $size ($size_bytes), Size: $file_size_bytes, Diff: $percent_diff" + if [ "${finalAttempt}" == "true" ] ; then + droppedSizeBadDownload "${remote_url}" "${filename}" "${file_size_bytes}" "${size}" + fi + return 1 + fi + flockDownload="${WorkDir}/.flocks/${filename//[^a-zA-Z0-9\.\_\-]/}.flock" + if CheckFileSize "${remote_url}" ${file_size_bytes} ; then + return 1 + fi + if CheckDownloadExists "$remote_url" "$MoveToFolder" "$filecnt" "$filename" "$file_path" "$completed_location" ; then + return 1 + fi + echo "${remote_url//[^a-zA-Z0-9]/}" > $flockDownload + touch ${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/} + CLEANSTRING=${remote_url//[^a-zA-Z0-9]/} + trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; rm -f $flockDownload; echo ""; tput cnorm; exit" 0 1 2 3 6 15 + tor_curl_request --insecure -e "${remote_url}" "${target_file_link}" -C - -o "${file_path}" + rm -f "$flockDownload"; + received_file_size=0 + if [ -f "$file_path" ] ; then + received_file_size=$(stat --format="%s" "$file_path" | tr -d '[:space:]') + fi + if [[ "${received_file_size}" -ne "${file_size_bytes}" ]]; then + echo -e "${RED}ERROR: Size mismatch after downloading${NC}\nPerhaps you or 1fichier lost connection for a while?" + if [ "${finalAttempt}" == "true" ] ; then + droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" + fi + return 1 + fi + ProcessCompletedDownload "$remote_url" "$MoveToFolder" "$filecnt" "$filename" "$file_size_bytes" "$completed_location" "$file_path" +} +#! +#! --------------- Host Extra Functions ------------------- # +#! +fich_FindEmptySlot() { + mkdir -p "${WorkDir}/.temp" + local lockfile="${WorkDir}/.temp/lockfile.lock" + timer_start=$(date +%s) + while [[ -e $lockfile ]]; do + CLEANSTRING=${remote_url//[^a-zA-Z0-9]/} + trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; rm -f $lockfile; echo ""; tput cnorm; exit" 0 1 2 3 6 15 + timer_duration=$(($(date +%s) - timer_start)) + echo -e "Lock file in place, time elapsed: $timer_duration seconds" + sleep 1 + printf "\033[F" + tput el + done + touch $lockfile + echo -e "\nSearching for a usable exit node" + mkdir -p "${WorkDir}/.temp" + rm -f "${WorkDir}/.temp/fich_adz_parameter" + sleep 0.2 + instances="10" + timer_start=$(date +%s) + CLEANSTRING=${remote_url//[^a-zA-Z0-9]/} + trap 'rm -f "${WorkDir}/.flocks/${CLEANSTRING}"; rm -f $lockfile; tput el; echo -e "${RED}Aborted by user!${NC}"; for i in $(seq 1 $instances); do tput el; echo ""; done; rm -f "${WorkDir}/.temp/fich_cookie_jar"; rm -f "${WorkDir}/.temp/fich_adz_parameter"; rm -f "${WorkDir}/.temp/tor_identity"; echo -e "${RED}Aborted by user!${NC}"; tput cnorm; exit' 0 1 2 3 6 15 + for instance_no in $(seq 1 $instances); do + fich_FetchTorExitNode "${instance_no}" ${instances} & + sleep 0.1 + done + wait + CLEANSTRING=${remote_url//[^a-zA-Z0-9]/} + trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; rm -f $lockfile; rm -f "${WorkDir}/.temp/fich_cookie_jar"; rm -f "${WorkDir}/.temp/fich_adz_parameter"; rm -f "${WorkDir}/.temp/tor_identity"; echo ""; tput cnorm; exit" 0 1 2 3 6 15 + if [[ $(stat -c '%s' "${WorkDir}/.temp/fich_adz_parameter") -lt 1 ]]; then + echo -e "${RED}One of the instances ran out of attempts, try again later${NC}" + return 1 + fi + timer_end=$(date +%s) + timer_duration=$((timer_end - timer_start)) + tput el + echo -e "${GREEN}Search took ${timer_duration} seconds${NC}" + rm -f $lockfile +} +fich_FetchTorExitNode() { + local max_attempts=${CircuitRetries} + local iter=0 instance_no="$1" time_out=5 instances="$2" fich_cookie_jar="" tor_identity="" fich_adz_parameter="" PAGE="" + mkdir -p "${WorkDir}/.temp" + while :; do + if [[ "${iter}" -ge "${max_attempts}" ]]; then + rm -f "$fich_cookie_jar" + if [[ -s "${WorkDir}/.temp/fich_adz_parameter" ]]; then + rm -f "$fich_cookie_jar" + exit 1 + fi + echo "" > "${WorkDir}/.temp/fich_adz_parameter" + for i in $(seq 1 "$instances"); do echo ""; done + echo -e "\n${RED}Instance ${instance_no} ran out of attempts${NC}" + exit 1 + fi + ((iter++)) + if [[ -s "${WorkDir}/.temp/fich_adz_parameter" ]]; then + tput el + printf "Closing instance %s\r" "${instance_no}" + rm -f "${fich_cookie_jar}" # new + exit 1 + fi + for ((i=1; i<=instance_no; i++)) ; do + printf "\\n" + done + printf "Instance %s \t| Attempt %s \r" "${instance_no}" "${iter}/${max_attempts}" + for ((i=1; i<=instance_no; i++)) ; do + printf "\033[F" + done + if [[ ! $fich_adz_parameter ]]; then + rm -f "$fich_cookie_jar" + fich_cookie_jar=$(mktemp "${WorkDir}/.temp/fich_cookies""${instance_no}"".XXXXXX") + fi + tor_identity="${RANDOM}" + trap "rm -f ${fich_cookie_jar}; echo ""; tput cnorm; exit" 0 1 2 3 6 15 + PAGE=$(tor_curl_request --insecure --max-time "$ConnectTimeout" -c "${fich_cookie_jar}" -s "${remote_url}") + if [[ -z ${PAGE} ]]; then + rm -f "${fich_cookie_jar}" + continue + fi + if grep -Eqi 'Warning !|Attention !' <<< "${PAGE}"; then + rm -f "${fich_cookie_jar}" + continue + else + fich_adz_parameter=$(grep -oPi 'name="adz" value="\K[^"]+' <<< "${PAGE}") + if [[ $fich_adz_parameter ]]; then + if [[ -s "${WorkDir}/.temp/fich_adz_parameter" ]]; then + rm -f "$fich_cookie_jar" + exit 1 + fi + echo "$fich_adz_parameter" > "${WorkDir}/.temp/fich_adz_parameter" + echo "$fich_cookie_jar" > "${WorkDir}/.temp/fich_cookie_jar" + echo "$tor_identity" > "${WorkDir}/.temp/fich_tor_identity" + rm -f "$fich_cookie_jar" + for i in $(seq 1 "$instances"); do echo ""; done + echo "" + tput el + echo -e "${GREEN}Slot found by instance ${instance_no}${NC}" + break + fi + fi + done + tput cnorm +} diff --git a/hosts/9saves.sh b/hosts/9saves.sh new file mode 100755 index 0000000..a152195 --- /dev/null +++ b/hosts/9saves.sh @@ -0,0 +1,427 @@ +#! Name: 9saves.sh +#! Author: kittykat +#! Version: 2024.09.13 +#! Desc: Add support for downloading and processing of urls for a new host +#! Usage: Copy this file into the ./${ScriptDir}/hosts/ folder +#! +#! +#! ------------ REQUIRED SECTION --------------- +#! @[UPDATE] HostAndDomainRegexes: This string is loaded into mad.sh and allows dynamic handling of new url data +#! Format: '/HostCode/HostNick/HostFuncPrefix:HostDomainRegex@' +#! HostCode: (ie. 'fh' for filehaus -- cannot be used by other hosts) +#! HostNick: What is displayed throughout MAD output (ie. 'filehaus' -- "urls.txt has 10 filehaus.." will be displayed) +#! HostFuncPrefix: (ie. 'fh' -- fh_DownloadFile(), fh_FetchFileInfo() .. ) +#! * Note: Must begin with a letter a-z (functions beginning with numbers are no bueno) +#! HostDomainRegex: The regex used to verify matching urls +HostCode='ns' +HostNick='9saves' +HostFuncPrefix='ns' +HostUrls='9saves.com' +HostDomainRegex='^(http|https)://(.*\.)?9saves\.(com|org)' +#! +#! !! DO NOT UPDATE OR REMOVE !! +#! This merges the Required HostAndDomainRegexes into mad.sh +ListHostAndDomainRegexes=${ListHostAndDomainRegexes}'/'${HostCode}'/'${HostNick}'/'${HostFuncPrefix}'/'${HostUrls}':'${HostDomainRegex}'@' +#! +#! +#! ------------ (1) Host Main Download Function --------------- # +#! +#! @REQUIRED: Host Main Download function +#! Must be named specifically as such: +#! _DownloadFile() +ns_DownloadFile() { + local remote_url=${1} + local file_url=${1} + local filecnt=${2} + warnAndRetryUnknownError=false + exitDownloadError=false + exitDownloadNotAvailable=false + fileAlreadyDone=false + download_inflight_path="${WorkDir}/.inflight/" + mkdir -p "$download_inflight_path" + completed_location="${WorkDir}/downloads/" + tor_identity="${RANDOM}" + finalAttempt="false" + for ((z=0; z<=$MaxUrlRetries; z++)); do + if [ $z -eq $MaxUrlRetries ] ; then + finalAttempt="true" + fi + CLEANSTRING=${remote_url//[^a-zA-Z0-9]/} + trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; echo ""; tput cnorm; exit" 0 1 2 3 6 15 + if ns_FetchFileInfo $finalAttempt && ns_GetFile "${filecnt}" $((z+1)) $finalAttempt ; then + return 0 + elif [ $z -lt $MaxUrlRetries ]; then + if [ "${fileAlreadyDone}" == "true" ] ; then + break + fi + if [[ "${warnAndRetryUnknownError}" == "true" ]] ; then + if [ "${DebugAllEnabled}" == "true" ] ; then + debugHtml "${remote_url##*/}" "error" "Retry due to an unknown issue: attempt #$((z+1)) of ${MaxUrlRetries}" + fi + fi + if [[ "${exitDownloadError}" == "true" || "${exitDownloadNotAvailable}" == "true" ]] ; then + if [ "${DebugAllEnabled}" == "true" ] ; then + debugHtml "${remote_url##*/}" "error" "Exit due to unrecoverable issue" + fi + rm -f "${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}" + break + fi + echo -e "\n${YELLOW}A recoverable error occurred, retry attempt $((z+1))/${MaxUrlRetries}${NC}" + sleep 3 + fi + done + rm -f "${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}" +} +#! +#! ------------- (2) Fetch File Info Function ----------------- # +#! +ns_FetchFileInfo() { + finalAttempt=$1 + maxfetchretries=6 + ns_cookie_jar="" + echo -e "${GREEN}# Fetching download link…${NC}" + for ((i=1; i<=$maxfetchretries; i++)); do + mkdir -p "${WorkDir}/.temp" + ns_cookie_jar=$(mktemp "${WorkDir}/.temp/ns_cookies""${instance_no}"".XXXXXX") + printf " ." + tor_identity="${RANDOM}" + CLEANSTRING=${remote_url//[^a-zA-Z0-9]/} + trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; rm -f ${ns_cookie_jar}; echo ""; tput cnorm; exit" 0 1 2 3 6 15 + response=$(tor_curl_request --insecure -L -s -b "${ns_cookie_jar}" -c "${ns_cookie_jar}" "$remote_url") + if [ "${DebugAllEnabled}" == "true" ] ; then + debugHtml "${remote_url##*/}" "ns_dwnpage$i" "${response}" + fi + if [[ -z $response ]] ; then + if [ $i == $maxfetchretries ] ; then + rm -f "${ns_cookie_jar}"; + printf "\\n" + echo -e "${RED}| Failed to extract post link.${NC}" + warnAndRetryUnknownError=true + if [ "${finalAttempt}" == "true" ] ; then + failedRetryDownload "${remote_url}" "" "" + fi + return 1 + else + continue + fi + fi + if grep -Eqi 'No such file|404 NOT FOUND' <<< "$response"; then + rm -f "${ns_cookie_jar}"; + printf "\\n" + echo -e "${RED}| The file was not found. It could be deleted or expired.${NC}" + exitDownloadError=true + removedDownload "${remote_url}" + return 1 + fi + if grep -Eqi 'input type="hidden" name="id" value="' <<< "$response"; then + printf "\\n" + echo -e "${GREEN}| Post link found.${NC}" + post_op=$(grep -oP '(?<=input type="hidden" name="op" value=").*(?=">)' <<< "$response") + post_id=$(grep -oP '(?<=input type="hidden" name="id" value=").*(?=">)' <<< "$response") + post_rand=$(grep -oP '(?<=input type="hidden" name="rand" value=").*(?=">)' <<< "$response") + post_referer=$(grep -oP '(?<=input type="hidden" name="referer" value=").*(?=">)' <<< "$response") + break + fi + if ((i == maxfetchretries)) ; then + rm -f "${ns_cookie_jar}"; + printf "\\n" + echo -e "${RED}| Failed to extract post link (unknown).${NC}" + warnAndRetryUnknownError=true + if [ "${finalAttempt}" == "true" ] ; then + failedRetryDownload "${remote_url}" "" "" + fi + return 1 + fi + done + CLEANSTRING=${remote_url//[^a-zA-Z0-9]/} + trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; rm -f ${ns_cookie_jar}; echo ""; tput cnorm; exit" 0 1 2 3 6 15 + form_data="op=${post_op}&id=${post_id}&rand=${post_rand}&referer=${post_referer}&method_free=&method_premium=" + response=$(tor_curl_request --insecure -L -s -X POST \ + -b "${ns_cookie_jar}" -c "${ns_cookie_jar}" \ + --data "$form_data" "https://9saves.com/") + if [ "${DebugAllEnabled}" == "true" ] ; then + debugHtml "${remote_url##*/}" "ns_post" "form_data: ${form_data}"$'\n'"${response}" + fi + if [[ -z $response ]] ; then + rm -f "${ns_cookie_jar}"; + echo -e "${RED}| Failed to extract download link.${NC}" + warnAndRetryUnknownError=true + if [ "${finalAttempt}" == "true" ] ; then + failedRetryDownload "${remote_url}" "" "" + fi + return 1 + fi + if grep -Eqi "File Not Found" <<< "$response"; then + rm -f "${ns_cookie_jar}"; + echo -e "${RED}| The file was not found. It could be deleted or expired.${NC}" + exitDownloadError=true + removedDownload "${remote_url}" + return 1 + fi + if grep -Eqi 'This direct link will be available for' <<< "$response"; then + echo -e "${GREEN}| Download link found.${NC}" + download_url=$(grep -oP '(?<=class="btn" href=").*(?=.*9saves\.com).*(?=">Download)' <<< "$response") + filename=$(grep -oP '(?<=Filename: ).*(?=
)' <<< "$response") + else + rm -f "${ns_cookie_jar}"; + echo -e "${RED}| The file was not found. It could be deleted or expired.${NC}" + exitDownloadError=true + removedDownload "${remote_url}" + return 1 + fi + if [[ -z $download_url ]] ; then + rm -f "${ns_cookie_jar}"; + echo -e "${RED}| Failed to extract download link.${NC}" + warnAndRetryUnknownError=true + if [ "${finalAttempt}" == "true" ] ; then + failedRetryDownload "${remote_url}" "" "" + fi + return 1 + fi + download_url=$(urlencode_literal_grouped_case_urlendingonly "$download_url") + maxfetchretries=6 + echo -e "${GREEN}# Fetching file info…${NC}" + for ((j=1; j<=$maxfetchretries; j++)); do + printf " ." + CLEANSTRING=${remote_url//[^a-zA-Z0-9]/} + trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; rm -f ${ns_cookie_jar}; echo ""; tput cnorm; exit" 0 1 2 3 6 15 + file_header=$(tor_curl_request --insecure --head -L -s -b "${ns_cookie_jar}" -c "${ns_cookie_jar}" "$download_url") + if [ "${DebugAllEnabled}" == "true" ] ; then + debugHtml "${remote_url##*/}" "ns_head$j" "download_url: ${download_url}"$'\n'"${file_header}" + fi + if [[ -z $file_header ]] ; then + if [ $j == $maxfetchretries ] ; then + rm -f "${ns_cookie_jar}"; + printf "\\n" + echo -e "${RED}| Failed to extract file info.${NC}" + warnAndRetryUnknownError=true + if [ "${finalAttempt}" == "true" ] ; then + failedRetryDownload "${remote_url}" "" "" + fi + return 1 + else + continue + fi + fi + if grep -Eqi '404 Not Found' <<< "$file_header"; then + rm -f "${ns_cookie_jar}"; + printf "\\n" + echo -e "${RED}| Failed to extract file info.${NC}" + warnAndRetryUnknownError=true + if [ "${finalAttempt}" == "true" ] ; then + failedRetryDownload "${remote_url}" "" "" + fi + return 1 + fi + if ! grep -Eqi 'HTTP/2 200|HTTP/1.1 200|200 OK' <<< $file_header ; then + if [ $j == $maxfetchretries ] ; then + rm -f "${ns_cookie_jar}"; + printf "\\n" + echo -e "${RED}| Failed to extract file info (no 200 response).${NC}" + warnAndRetryUnknownError=true + if [ "${finalAttempt}" == "true" ] ; then + failedRetryDownload "${remote_url}" "" "" + fi + return 1 + else + continue + fi + fi + if grep -Eqi 'location:' <<< "$file_header"; then + download_url=$(grep -oPi '(?<=location: ).*' <<< "$file_header") + download_url=${download_url//[$'\t\r\n']} + fi + file_size_bytes=$(grep -oPi '(?<=content-length: ).*' <<< "$file_header") + file_size_bytes=${file_size_bytes//[$'\t\r\n']} + if [ -z $file_size_bytes ] ; then + if [ $j == $maxfetchretries ] ; then + rm -f "${ns_cookie_jar}"; + printf "\\n" + echo -e "${RED}| Filesize not found…${NC}" + warnAndRetryUnknownError=true + if [ "${finalAttempt}" == "true" ] ; then + failedRetryDownload "${remote_url}" "Filesize not found!" "" + fi + return 1 + else + continue + fi + else + file_size_readable="$(numfmt --to=iec --from=auto --format "%.2f" <<< "$file_size_bytes")" + fi + break #Good to go here + done + touch ${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/} + if [ ! "$filename_override" == "" ] ; then + filename="$filename_override" + fi + filename=$(sanitize_file_or_folder_name "${filename}") + printf "\\n" + echo -e "${YELLOW}| File name:${NC}\t\"${filename}\"" + echo -e "${YELLOW}| File size:${NC}\t${file_size_readable}" + file_path="${download_inflight_path}${filename}" + flockDownload="${WorkDir}/.flocks/${filename//[^a-zA-Z0-9\.\_\-]/}.flock" + if CheckFileSize "${remote_url}" "${file_size_bytes}" ; then + return 1 + fi + if CheckDownloadExists "$remote_url" "$MoveToFolder" "$filecnt" "$filename" "$file_path" "$completed_location" ; then + return 1 + fi + echo "${remote_url//[^a-zA-Z0-9]/}" > $flockDownload +} +#! +#! ----------- (3) Fetch File / Download File Function --------------- # +#! +ns_GetFile() { + echo -e "${GREEN}# Downloading…" + echo -e "${YELLOW}| File path:${NC}\t./.inflight/${filename}\n" + fileCnt=$1 + retryCnt=$2 + finalAttempt=$3 + flockDownload="${WorkDir}/.flocks/${filename//[^a-zA-Z0-9\.\_\-]/}.flock" + for ((j=1; j<=$MaxDownloadRetries; j++)); do + pd_presize=0 + if [ -f "$file_path" ] ; then + pd_presize=$(stat --format="%s" "$file_path" | tr -d '[:space:]') + fi + GetRandomUA + CLEANSTRING=${remote_url//[^a-zA-Z0-9]/} + trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; rm -f ${ns_cookie_jar}; rm -f $flockDownload; echo ""; tput cnorm; exit" 0 1 2 3 6 15 + if [ "${UseTorCurlImpersonate}" == "true" ]; then + if [ "${RateMonitorEnabled}" == "true" ]; then + tor_curl_request --insecure \ + -b "${ns_cookie_jar}" --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval \ + "$download_url" \ + --continue-at - --output "$file_path" + else + tor_curl_request --insecure \ + -b "${ns_cookie_jar}" \ + "$download_url" \ + --continue-at - --output "$file_path" + fi + else + if [ "${RateMonitorEnabled}" == "true" ]; then + tor_curl_request --insecure \ + --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval \ + -b "${ns_cookie_jar}" \ + -H "User-Agent: $RandomUA" \ + -H "Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8" \ + -H "Accept-Language: en-US,en;q=0.5" \ + -H "Accept-Encoding: gzip, deflate, br" \ + -H "Connection: keep-alive" \ + -H "Cookie: lng=eng" \ + -H "Upgrade-Insecure-Requests: 1" \ + -H "Sec-Fetch-Dest: document" \ + -H "Sec-Fetch-Mode: navigate" \ + -H "Sec-Fetch-Site: same-origin" \ + -H "Sec-Fetch-User: ?1" \ + "$download_url" \ + --continue-at - --output "$file_path" + else + tor_curl_request --insecure \ + -b "${ns_cookie_jar}" \ + -H "User-Agent: $RandomUA" \ + -H "Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8" \ + -H "Accept-Language: en-US,en;q=0.5" \ + -H "Accept-Encoding: gzip, deflate, br" \ + -H "Connection: keep-alive" \ + -H "Cookie: lng=eng" \ + -H "Upgrade-Insecure-Requests: 1" \ + -H "Sec-Fetch-Dest: document" \ + -H "Sec-Fetch-Mode: navigate" \ + -H "Sec-Fetch-Site: same-origin" \ + -H "Sec-Fetch-User: ?1" \ + "$download_url" \ + --continue-at - --output "$file_path" + fi + fi + received_file_size=0 + if [ -f "$file_path" ] ; then + received_file_size=$(stat --format="%s" "$file_path" | tr -d '[:space:]') + fi + if CheckNoHtml "$remote_url" "$filename" "$file_path" ; then + containsHtml=false + else + containsHtml=true + fi + downDelta=$(( received_file_size - pd_presize )) + if [[ "${received_file_size}" -ne "${file_size_bytes}" ]] || [ "$containsHtml" == "true" ]; then + if [ "${AutoRepairBadPartials}" == "true" ] && (( downDelta > 0 && downDelta < 10240 )) ; then + if [ -f "${file_path}" ] ; then + if ((pd_presize > 0)); then + echo -e "${YELLOW}Bad node / HTML found:${NC} reverting to previous file..." + truncateDownload "$remote_url" "$filename" "$pd_presize" "$received_file_size" + truncate -s $pd_presize "${file_path}" + else + echo -e "${YELLOW}Bad node / HTML found:${NC} tainted partial removed..." + rm -f "${file_path}" + fi + fi + if ((j >= $MaxDownloadRetries)) ; then + rm -f "$flockDownload"; + if [ "${finalAttempt}" == "true" ] ; then + droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" + fi + return 1 + else + continue + fi + elif [ "${AutoRepairBadPartials}" == "true" ] && [ "$containsHtml" == "true" ] ; then + if [ -f "${file_path}" ] ; then + if ((pd_presize > 0)); then + echo -e "${YELLOW}Bad node / HTML found:${NC} reverting to previous file..." + truncateDownload "$remote_url" "$filename" "$pd_presize" "$received_file_size" + truncate -s $pd_presize "${file_path}" + else + echo -e "${YELLOW}Bad node / HTML found:${NC} tainted partial removed..." + rm -f "${file_path}" + fi + fi + if ((j >= $MaxDownloadRetries)) ; then + rm -f "$flockDownload"; + if [ "${finalAttempt}" == "true" ] ; then + droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" + fi + return 1 + else + continue + fi + elif (( downDelta > 0 && downDelta < 1024 )) || [ "$containsHtml" == "true" ] ; then + if [ -f "$file_path" ] ; then + rm -rf "$file_path" + fi + echo -e "\n${YELLOW}Bad node / HTML found:${NC} tainted partial removed..." + if ((j >= $MaxDownloadRetries)) ; then + rm -f "$flockDownload"; + if [ "${finalAttempt}" == "true" ] ; then + droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" + fi + return 1 + else + continue + fi + fi + if [[ "${received_file_size}" -ne "${file_size_bytes}" ]]; then + echo -e "\n${RED}Download failed, file is incomplete.${NC}" + if ((j >= $MaxDownloadRetries)) ; then + rm -f "$flockDownload"; + if [ "${finalAttempt}" == "true" ] ; then + droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" + fi + return 1 + else + continue + fi + fi + else + break + fi + done + rm -f "$flockDownload"; + rm -f "${ns_cookie_jar}"; + ProcessCompletedDownload "$remote_url" "$MoveToFolder" "$filecnt" "$filename" "$file_size_bytes" "$completed_location" "$file_path" + return 0 +} +#! +#! --------------- Host Extra Functions ------------------- # +#! diff --git a/hosts/acid.sh b/hosts/acid.sh new file mode 100755 index 0000000..a23941e --- /dev/null +++ b/hosts/acid.sh @@ -0,0 +1,31 @@ +#! Name: acid.sh +#! Author: kittykat +#! Version: 2024.09.29 +#! Desc: Add support for downloading and processing of urls for a new host +#! Usage: Copy this file into the ./${ScriptDir}/hosts/ folder +#! +#! +#! ------------ REQUIRED SECTION --------------- +#! @[UPDATE] HostAndDomainRegexes: This string is loaded into mad.sh and allows dynamic handling of new url data +#! Format: '/HostCode/HostNick/HostFuncPrefix:HostDomainRegex@' +#! HostCode: (ie. 'fh' for filehaus -- cannot be used by other hosts) +#! HostNick: What is displayed throughout MAD output (ie. 'filehaus' -- "urls.txt has 10 filehaus.." will be displayed) +#! HostFuncPrefix: (ie. 'fh' -- fh_DownloadFile(), fh_FetchFileInfo() .. ) +#! * Note: Must begin with a letter a-z (functions beginning with numbers are no bueno) +#! HostDomainRegex: The regex used to verify matching urls +HostCode='acid' +HostNick='acid.fr' +HostFuncPrefix='direct' +HostUrls='dl.acid.fr' +HostDomainRegex='^(http|https)://(.*\.)?dl\.acid\.fr/' +#! +#! !! DO NOT UPDATE OR REMOVE !! +#! This merges the Required HostAndDomainRegexes into mad.sh +ListHostAndDomainRegexes=${ListHostAndDomainRegexes}'/'${HostCode}'/'${HostNick}'/'${HostFuncPrefix}'/'${HostUrls}':'${HostDomainRegex}'@' +#! +#! +#! ------------ (1) Host Main Download Function --------------- # +#! +#! This is a direct= download host, so all the functions are already in mad.sh +#! Since the HostFuncPrefix is defined above as "direct", nothing further needs to be done as it will +#! call the direct_DownloadFile() function already in mad.sh diff --git a/hosts/anarchaserver.sh b/hosts/anarchaserver.sh new file mode 100755 index 0000000..7451284 --- /dev/null +++ b/hosts/anarchaserver.sh @@ -0,0 +1,31 @@ +#! Name: anarchaserver.sh +#! Author: kittykat +#! Version: 2024.09.27 +#! Desc: Add support for downloading and processing of urls for a new host +#! Usage: Copy this file into the ./${ScriptDir}/hosts/ folder +#! +#! +#! ------------ REQUIRED SECTION --------------- +#! @[UPDATE] HostAndDomainRegexes: This string is loaded into mad.sh and allows dynamic handling of new url data +#! Format: '/HostCode/HostNick/HostFuncPrefix:HostDomainRegex@' +#! HostCode: (ie. 'fh' for filehaus -- cannot be used by other hosts) +#! HostNick: What is displayed throughout MAD output (ie. 'filehaus' -- "urls.txt has 10 filehaus.." will be displayed) +#! HostFuncPrefix: (ie. 'fh' -- fh_DownloadFile(), fh_FetchFileInfo() .. ) +#! * Note: Must begin with a letter a-z (functions beginning with numbers are no bueno) +#! HostDomainRegex: The regex used to verify matching urls +HostCode='anarc' +HostNick='anarchaserver' +HostFuncPrefix='direct' +HostUrls='transitional.anarchaserver.org' +HostDomainRegex='^(http|https)://(.*\.)?transitional\.anarchaserver\.org/jirafeau/' +#! +#! !! DO NOT UPDATE OR REMOVE !! +#! This merges the Required HostAndDomainRegexes into mad.sh +ListHostAndDomainRegexes=${ListHostAndDomainRegexes}'/'${HostCode}'/'${HostNick}'/'${HostFuncPrefix}'/'${HostUrls}':'${HostDomainRegex}'@' +#! +#! +#! ------------ (1) Host Main Download Function --------------- # +#! +#! This is a direct= download host, so all the functions are already in mad.sh +#! Since the HostFuncPrefix is defined above as "direct", nothing further needs to be done as it will +#! call the direct_DownloadFile() function already in mad.sh diff --git a/hosts/anonsharing.sh b/hosts/anonsharing.sh new file mode 100755 index 0000000..6f3fadc --- /dev/null +++ b/hosts/anonsharing.sh @@ -0,0 +1,315 @@ +#! Name: anonsharing.sh +#! Author: kittykat +#! Version: 2024.10.30 +#! Desc: Add support for downloading and processing of urls for a new host +#! Usage: Copy this file into the ./${ScriptDir}/hosts/ folder +#! +#! +#! ------------ REQUIRED SECTION --------------- +#! @[UPDATE] HostAndDomainRegexes: This string is loaded into mad.sh and allows dynamic handling of new url data +#! Format: '/HostCode/HostNick/HostFuncPrefix:HostDomainRegex@' +#! HostCode: (ie. 'fh' for filehaus -- cannot be used by other hosts) +#! HostNick: What is displayed throughout MAD output (ie. 'filehaus' -- "urls.txt has 10 filehaus.." will be displayed) +#! HostFuncPrefix: (ie. 'fh' -- fh_DownloadFile(), fh_FetchFileInfo() .. ) +#! * Note: Must begin with a letter a-z (functions beginning with numbers are no bueno) +#! HostDomainRegex: The regex used to verify matching urls +HostCode='ansh' +HostNick='anonsharing' +HostFuncPrefix='ansh' +HostUrls='anonsharing.com' +HostDomainRegex='^(http|https)://(.*\.)?anonsharing\.com/fileid\=' +#! +#! !! DO NOT UPDATE OR REMOVE !! +#! This merges the Required HostAndDomainRegexes into mad.sh +ListHostAndDomainRegexes=${ListHostAndDomainRegexes}'/'${HostCode}'/'${HostNick}'/'${HostFuncPrefix}'/'${HostUrls}':'${HostDomainRegex}'@' +#! +#! +#! ------------ (1) Host Main Download Function --------------- # +#! +#! @REQUIRED: Host Main Download function +#! Must be named specifically as such: +#! _DownloadFile() +ansh_DownloadFile() { + local remote_url=${1} + local file_url=${1} + local filecnt=${2} + warnAndRetryUnknownError=false + exitDownloadError=false + exitDownloadNotAvailable=false + fileAlreadyDone=false + download_inflight_path="${WorkDir}/.inflight/" + mkdir -p "$download_inflight_path" + completed_location="${WorkDir}/downloads/" + tor_identity="${RANDOM}" + finalAttempt="false" + for ((z=0; z<=$MaxUrlRetries; z++)); do + if [ $z -eq $MaxUrlRetries ] ; then + finalAttempt="true" + fi + CLEANSTRING=${remote_url//[^a-zA-Z0-9]/} + trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; echo ""; tput cnorm; exit" 0 1 2 3 6 15 + if ansh_FetchFileInfo $finalAttempt && ansh_GetFile "${filecnt}" $((z+1)) $finalAttempt ; then + return 0 + elif [ $z -lt $MaxUrlRetries ]; then + if [ "${fileAlreadyDone}" == "true" ] ; then + break + fi + if [[ "${warnAndRetryUnknownError}" == "true" ]] ; then + if [ "${DebugAllEnabled}" == "true" ] ; then + debugHtml "${remote_url##*/}" "error" "Retry due to an unknown issue: attempt #$((z+1)) of ${MaxUrlRetries}" + fi + fi + if [[ "${exitDownloadError}" == "true" || "${exitDownloadNotAvailable}" == "true" ]] ; then + if [ "${DebugAllEnabled}" == "true" ] ; then + debugHtml "${remote_url##*/}" "error" "Exit due to unrecoverable issue" + fi + rm -f "${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}" + break + fi + echo -e "\n${YELLOW}A recoverable error occurred, retry attempt $((z+1))/${MaxUrlRetries}${NC}" + sleep 3 + fi + done + rm -f "${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}" +} +#! +#! ------------- (2) Fetch File Info Function ----------------- # +#! +ansh_FetchFileInfo() { + finalAttempt=$1 + maxfetchretries=5 + ansh_cookie_jar="" + local fileid="${remote_url##*fileid\=}" + echo -e "${GREEN}# Fetching download link…${NC}" + for ((i=1; i<=$maxfetchretries; i++)); do + mkdir -p "${WorkDir}/.temp" + ansh_cookie_jar=$(mktemp "${WorkDir}/.temp/ansh_cookies""${instance_no}"".XXXXXX") + printf " ." + tor_identity="${RANDOM}" + CLEANSTRING=${remote_url//[^a-zA-Z0-9]/} + trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; rm -f ${ansh_cookie_jar}; echo ""; tput cnorm; exit" 0 1 2 3 6 15 + response=$(tor_curl_request --insecure -i -s \ + -b "${ansh_cookie_jar}" -c "${ansh_cookie_jar}" \ + -F "u=$fileid" \ + -F "p=true" \ + "https://anonsharing.com/account/ajax/file_details") + if [ "${DebugAllEnabled}" == "true" ] ; then + debugHtml "${remote_url##*/}" "ansh_postfileinfo$i" "${response}" + fi + if [[ -z $response ]] ; then + rm -f "${ansh_cookie_jar}"; + if [ $i == $maxfetchretries ] ; then + printf "\\n" + echo -e "${RED}| Failed to extract download link [1]${NC}" + warnAndRetryUnknownError=true + if [ "${finalAttempt}" == "true" ] ; then + failedRetryDownload "${remote_url}" "Failed to extract download link [1]" "" + fi + return 1 + else + continue + fi + fi + if grep -Eqi "There is no such file|File was deleted because" <<< "$response"; then + rm -f "${ansh_cookie_jar}"; + printf "\\n" + echo -e "${RED}| The file was not found. It could be deleted or expired.${NC}" + exitDownloadError=true + removedDownload "${remote_url}" + return 1 + fi + if grep -Eqi 'openUrl\('"'"'https:\\/\\/anonsharing.com\\/' <<< "$response"; then + printf "\\n" + echo -e "${GREEN}| Link found.${NC}" + download_url='https://anonsharing.com/'$(grep -oPi '(?<=openUrl\('"'"'https:\\/\\/anonsharing.com\\/).*?(?='"'"'.*$)' <<< "$response" | head -1) + download_url="${download_url//\\\//\/}" + download_url="${download_url//[$'\t\r\n\0']}" + break + else + rm -f "${ansh_cookie_jar}"; + if [ $i == $maxfetchretries ] ; then + printf "\\n" + echo -e "${RED}| Failed to extract download link [2]${NC}" + warnAndRetryUnknownError=true + if [ "${finalAttempt}" == "true" ] ; then + failedRetryDownload "${remote_url}" "Failed to extract download link [2]" "" + fi + return 1 + else + continue + fi + fi + break #Good to go here + done + bLocationFound=false + for ((j=1; j<=$maxfetchretries; j++)); do + CLEANSTRING=${remote_url//[^a-zA-Z0-9]/} + trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; rm -f ${ansh_cookie_jar}; echo ""; tput cnorm; exit" 0 1 2 3 6 15 + if [ "$bLocationFound" == "false" ]; then + echo -e "${GREEN}# Fetching cdn and file info…${NC}" + file_header=$(tor_curl_request --insecure --head -L -i -s \ + -b "${ansh_cookie_jar}" -c "${ansh_cookie_jar}" \ + "$download_url") + else + echo -e "${GREEN}# Fetching file info…${NC}" + ansh_host="${download_url/https:\/\//}" + ansh_host="${ansh_host%%\/*}" + rm -f "${WorkDir}/.temp/directhead" + file_header=$(tor_curl_request --insecure --head -H "Connection: keep-alive" -s -i \ + -H "Host: $ansh_host" \ + "$download_url" | + tee "${WorkDir}/.temp/directhead" & + sleep 6 + [ -s "${WorkDir}/.temp/directhead" ] + kill $! 2>/dev/null + ) + if [ ! -f "${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}" ]; then + touch "${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}" + fi + rm -f "${WorkDir}/.temp/directhead" + fi + if [ "${DebugAllEnabled}" == "true" ] ; then + debugHtml "${remote_url##*/}" "ansh_head$j" "download_url: ${download_url}"$'\n'"${file_header}" + fi + if [[ -z $file_header ]] ; then + if [ $j == $maxfetchretries ] ; then + rm -f "${ansh_cookie_jar}"; + printf "\\n" + echo -e "${RED}| Failed to extract file info.${NC}" + warnAndRetryUnknownError=true + if [ "${finalAttempt}" == "true" ] ; then + failedRetryDownload "${remote_url}" "" "" + fi + return 1 + else + continue + fi + fi + if grep -Eqi '404 Not Found' <<< "$file_header"; then + rm -f "${ansh_cookie_jar}"; + printf "\\n" + echo -e "${RED}| Failed to extract file info.${NC}" + warnAndRetryUnknownError=true + if [ "${finalAttempt}" == "true" ] ; then + failedRetryDownload "${remote_url}" "" "" + fi + return 1 + fi + if grep -Eqi '302|location: ' <<< $file_header ; then + bLocationFound=true + download_url=$(grep -oPi -m 1 '(?<=location: ).*?(?=$)' <<< "$file_header") + download_url=${download_url//[$'\t\r\n']} + else + rm -f "${ansh_cookie_jar}"; + printf "\\n" + echo -e "${RED}| Failed to get download url (no location)${NC}" + warnAndRetryUnknownError=true + if [ "${finalAttempt}" == "true" ] ; then + failedRetryDownload "${remote_url}" "Failed to get download url (no location)" "" + fi + return 1 + fi + filename=$(grep -oP '(?<=filename=").*?(?=".*$)' <<< "$file_header") + if [ "$filename_override" == "" ] && [ -z "$filename" ] ; then + if [ $j == $maxfetchretries ] ; then + rm -f "${ansh_cookie_jar}"; + printf "\\n" + echo -e "${RED}| Failed to extract file name${NC}" + warnAndRetryUnknownError=true + if [ "${finalAttempt}" == "true" ] ; then + failedRetryDownload "${remote_url}" "Failed to extract file name" "" + fi + return 1 + else + continue + fi + fi + break #Good to go here + done + rm -f "${ansh_cookie_jar}"; + touch ${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/} + if [ ! "$filename_override" == "" ] ; then + filename="$filename_override" + fi + filename=$(sanitize_file_or_folder_name "${filename}") + printf "\\n" + echo -e "${YELLOW}| File name:${NC}\t\"${filename}\"" + if [ -z $file_size_bytes ] ; then + file_size_readable="${RED}Unknown filesize…${NC}" + else + file_size_readable="$(numfmt --to=iec --from=auto --format "%.2f" <<< "$file_size_bytes")" + fi + echo -e "${YELLOW}| File size:${NC}\t${file_size_readable}" + file_path="${download_inflight_path}${filename}" + flockDownload="${WorkDir}/.flocks/${filename//[^a-zA-Z0-9\.\_\-]/}.flock" + if CheckFileSize "${remote_url}" "${file_size_bytes}" ; then + return 1 + fi + if CheckDownloadExists "$remote_url" "$MoveToFolder" "$filecnt" "$filename" "$file_path" "$completed_location" ; then + return 1 + fi + echo "${remote_url//[^a-zA-Z0-9]/}" > $flockDownload +} +#! +#! ----------- (3) Fetch File / Download File Function --------------- # +#! +ansh_GetFile() { + echo -e "${GREEN}# Downloading…" + echo -e "${YELLOW}| File path:${NC}\t./.inflight/${filename}\n" + fileCnt=$1 + retryCnt=$2 + finalAttempt=$3 + flockDownload="${WorkDir}/.flocks/${filename//[^a-zA-Z0-9\.\_\-]/}.flock" + for ((j=1; j<=$MaxDownloadRetries; j++)); do + pd_presize=0 + if [ -f "$file_path" ] ; then + pd_presize=$(stat --format="%s" "$file_path" | tr -d '[:space:]') + fi + GetRandomUA + CLEANSTRING=${remote_url//[^a-zA-Z0-9]/} + trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; rm -f ${ansh_cookie_jar}; rm -f $flockDownload; echo ""; tput cnorm; exit" 0 1 2 3 6 15 + echo -e "${BLUE}| No Resume Fetch${NC} (unknown filesize)" + if [ "${RateMonitorEnabled}" == "true" ]; then + tor_curl_request --insecure --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval "$download_url" --output "$file_path" + else + tor_curl_request --insecure "$download_url" --output "$file_path" + fi + received_file_size=0 + if [ -f "$file_path" ] ; then + received_file_size=$(stat --format="%s" "$file_path" | tr -d '[:space:]') + fi + if CheckNoHtml "$remote_url" "$filename" "$file_path" ; then + containsHtml=false + else + containsHtml=true + fi + if [ "${AutoRepairBadPartials}" == "true" ] && [ "$containsHtml" == "true" ] ; then + if [ -f "${file_path}" ] ; then + if ((pd_presize > 0)); then + echo -e "${YELLOW}Bad node / HTML found:${NC} reverting to previous file..." + truncateDownload "$remote_url" "$filename" "$pd_presize" "$received_file_size" + truncate -s $pd_presize "${file_path}" + else + echo -e "${YELLOW}Bad node / HTML found:${NC} tainted partial removed..." + rm -f "${file_path}" + fi + fi + if ((j >= $MaxDownloadRetries)) ; then + rm -f "$flockDownload"; + if [ "${finalAttempt}" == "true" ] ; then + droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" + fi + return 1 + else + continue + fi + fi + break + done + rm -f "$flockDownload"; + ProcessCompletedDownload "$remote_url" "$MoveToFolder" "$filecnt" "$filename" "$file_size_bytes" "$completed_location" "$file_path" + return 0 +} +#! +#! --------------- Host Extra Functions ------------------- # +#! diff --git a/hosts/archived/nekofile.sh b/hosts/archived/nekofile.sh new file mode 100755 index 0000000..5432654 --- /dev/null +++ b/hosts/archived/nekofile.sh @@ -0,0 +1,32 @@ +#! Name: nekofile.sh +#! Author: kittykat +#! Version: 2024.09.13 +#! Desc: Add support for downloading and processing of urls for a new host +#! Usage: Copy this file into the ./${ScriptDir}/hosts/ folder +#! +#! NOTES: nekofile has been shutdown by the hoster due to reported violations !!! +#! +#! ------------ REQUIRED SECTION --------------- +#! @[UPDATE] HostAndDomainRegexes: This string is loaded into mad.sh and allows dynamic handling of new url data +#! Format: '/HostCode/HostNick/HostFuncPrefix:HostDomainRegex@' +#! HostCode: (ie. 'fh' for filehaus -- cannot be used by other hosts) +#! HostNick: What is displayed throughout MAD output (ie. 'filehaus' -- "urls.txt has 10 filehaus.." will be displayed) +#! HostFuncPrefix: (ie. 'fh' -- fh_DownloadFile(), fh_FetchFileInfo() .. ) +#! * Note: Must begin with a letter a-z (functions beginning with numbers are no bueno) +#! HostDomainRegex: The regex used to verify matching urls +HostCode='neko' +HostNick='nekofile' +HostFuncPrefix='direct' +HostUrls='nekofile.eu.org' +HostDomainRegex='^(http|https)://(.*\.)?nekofile.eu.org/' +#! +#! !! DO NOT UPDATE OR REMOVE !! +#! This merges the Required HostAndDomainRegexes into mad.sh +ListHostAndDomainRegexes=${ListHostAndDomainRegexes}'/'${HostCode}'/'${HostNick}'/'${HostFuncPrefix}'/'${HostUrls}':'${HostDomainRegex}'@' +#! +#! +#! ------------ (1) Host Main Download Function --------------- # +#! +#! This is a direct= download host, so all the functions are already in mad.sh +#! Since the HostFuncPrefix is defined above as "direct", nothing further needs to be done as it will +#! call the direct_DownloadFile() function already in mad.sh \ No newline at end of file diff --git a/hosts/bedrive.sh b/hosts/bedrive.sh new file mode 100755 index 0000000..f38418c --- /dev/null +++ b/hosts/bedrive.sh @@ -0,0 +1,406 @@ +#! Name: bedrive.sh +#! Author: kittykat +#! Version: 2024.10.24 +#! Desc: Add support for downloading and processing of urls for a new host +#! Usage: Copy this file into the ./${ScriptDir}/hosts/ folder +#! +#! +#! ------------ REQUIRED SECTION --------------- +#! @[UPDATE] HostAndDomainRegexes: This string is loaded into mad.sh and allows dynamic handling of new url data +#! Format: '/HostCode/HostNick/HostFuncPrefix:HostDomainRegex@' +#! HostCode: (ie. 'fh' for filehaus -- cannot be used by other hosts) +#! HostNick: What is displayed throughout MAD output (ie. 'filehaus' -- "urls.txt has 10 filehaus.." will be displayed) +#! HostFuncPrefix: (ie. 'fh' -- fh_DownloadFile(), fh_FetchFileInfo() .. ) +#! * Note: Must begin with a letter a-z (functions beginning with numbers are no bueno) +#! HostDomainRegex: The regex used to verify matching urls +HostCode='bd' +HostNick='bedrive' +HostFuncPrefix='bd' +HostUrls='bedrive.ru' +HostDomainRegex='^(http|https)://(.*\.)?bedrive\.ru' +#! +#! !! DO NOT UPDATE OR REMOVE !! +#! This merges the Required HostAndDomainRegexes into mad.sh +ListHostAndDomainRegexes=${ListHostAndDomainRegexes}'/'${HostCode}'/'${HostNick}'/'${HostFuncPrefix}'/'${HostUrls}':'${HostDomainRegex}'@' +#! +#! +#! ------------ (1) Host Main Download Function --------------- # +#! +#! @REQUIRED: Host Main Download function +#! Must be named specifically as such: +#! _DownloadFile() +bd_DownloadFile() { + local remote_url=${1} + local file_url=${1} + local filecnt=${2} + warnAndRetryUnknownError=false + exitDownloadError=false + exitDownloadNotAvailable=false + fileAlreadyDone=false + download_inflight_path="${WorkDir}/.inflight/" + mkdir -p "$download_inflight_path" + completed_location="${WorkDir}/downloads/" + tor_identity="${RANDOM}" + finalAttempt="false" + for ((z=0; z<=$MaxUrlRetries; z++)); do + if [ $z -eq $MaxUrlRetries ] ; then + finalAttempt="true" + fi + CLEANSTRING=${remote_url//[^a-zA-Z0-9]/} + trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; echo ""; tput cnorm; exit" 0 1 2 3 6 15 + if bd_FetchFileInfo $finalAttempt && bd_GetFile "${filecnt}" $((z+1)) $finalAttempt ; then + return 0 + elif [ $z -lt $MaxUrlRetries ]; then + if [ "${fileAlreadyDone}" == "true" ] ; then + break + fi + if [[ "${warnAndRetryUnknownError}" == "true" ]] ; then + if [ "${DebugAllEnabled}" == "true" ] ; then + debugHtml "${remote_url##*/}" "error" "Retry due to an unknown issue: attempt #$((z+1)) of ${MaxUrlRetries}" + fi + fi + if [[ "${exitDownloadError}" == "true" || "${exitDownloadNotAvailable}" == "true" ]] ; then + if [ "${DebugAllEnabled}" == "true" ] ; then + debugHtml "${remote_url##*/}" "error" "Exit due to unrecoverable issue" + fi + rm -f "${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}" + break + fi + echo -e "\n${YELLOW}A recoverable error occurred, retry attempt $((z+1))/${MaxUrlRetries}${NC}" + sleep 3 + fi + done + rm -f "${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}" +} +#! +#! ------------- (2) Fetch File Info Function ----------------- # +#! +bd_FetchFileInfo() { + finalAttempt=$1 + maxfetchretries=5 + bd_cookie_jar="" + echo -e "${GREEN}# Fetching ticket link…${NC}" + for ((i=1; i<=$maxfetchretries; i++)); do + mkdir -p "${WorkDir}/.temp" + bd_cookie_jar=$(mktemp "${WorkDir}/.temp/bd_cookies""${instance_no}"".XXXXXX") + printf " ." + tor_identity="${RANDOM}" + CLEANSTRING=${remote_url//[^a-zA-Z0-9]/} + trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; rm -f ${bd_cookie_jar}; echo ""; tput cnorm; exit" 0 1 2 3 6 15 + response=$(tor_curl_request --insecure -L -s \ + -b "${bd_cookie_jar}" -c "${bd_cookie_jar}" \ + "$remote_url") + if [ "${DebugAllEnabled}" == "true" ] ; then + debugHtml "${remote_url##*/}" "bd_fetch$i" "${response}" + fi + if [[ -z $response ]] ; then + rm -f "${bd_cookie_jar}"; + if [ $i == $maxfetchretries ] ; then + printf "\\n" + echo -e "${RED}| Failed to extract download link.${NC}" + warnAndRetryUnknownError=true + if [ "${finalAttempt}" == "true" ] ; then + failedRetryDownload "${remote_url}" "" "" + fi + return 1 + else + continue + fi + fi + if grep -Eqi "There is no such file|File was deleted because" <<< "$response"; then + rm -f "${bd_cookie_jar}"; + printf "\\n" + echo -e "${RED}| The file was not found. It could be deleted or expired.${NC}" + exitDownloadError=true + removedDownload "${remote_url}" + return 1 + fi + if grep -Eqi ",e='.*'" <<< "$response"; then + printf "\\n" + echo -e "${GREEN}| Link found.${NC}" + bd_encodedurl=$(grep -oPi '(?<=,e='"'"').*?(?='"'"',.*$)' <<< "$response") + bd_encodedurl="${bd_encodedurl//[$'\t\r\n\0']}" + download_url=$(echo "$bd_encodedurl" | base64 --decode) + download_url="${download_url//[$'\t\r\n\0']}" + break + else + rm -f "${bd_cookie_jar}"; + if [ $i == $maxfetchretries ] ; then + printf "\\n" + echo -e "${RED}| Failed to extract download link.${NC}" + warnAndRetryUnknownError=true + if [ "${finalAttempt}" == "true" ] ; then + failedRetryDownload "${remote_url}" "" "" + fi + return 1 + else + continue + fi + fi + break #Good to go here + done + echo -e "${GREEN}# Waiting 20 sec…${NC}" + sleep 20s + echo -e "${GREEN}# Fetching file info…${NC}" + for ((j=1; j<=$maxfetchretries; j++)); do + printf " ." + CLEANSTRING=${remote_url//[^a-zA-Z0-9]/} + trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; rm -f ${bd_cookie_jar}; echo ""; tput cnorm; exit" 0 1 2 3 6 15 + file_header=$(tor_curl_request --insecure --head -L -i -s \ + -b "${bd_cookie_jar}" -c "${bd_cookie_jar}" \ + "$download_url") + if [ "${DebugAllEnabled}" == "true" ] ; then + debugHtml "${remote_url##*/}" "bd_head$j" "download_url: ${download_url}"$'\n'"${file_header}" + fi + if [[ -z $file_header ]] ; then + if [ $j == $maxfetchretries ] ; then + rm -f "${bd_cookie_jar}"; + printf "\\n" + echo -e "${RED}| Failed to extract file info.${NC}" + warnAndRetryUnknownError=true + if [ "${finalAttempt}" == "true" ] ; then + failedRetryDownload "${remote_url}" "" "" + fi + return 1 + else + continue + fi + fi + if grep -Eqi '404 Not Found' <<< "$file_header"; then + rm -f "${bd_cookie_jar}"; + printf "\\n" + echo -e "${RED}| Failed to extract file info.${NC}" + warnAndRetryUnknownError=true + if [ "${finalAttempt}" == "true" ] ; then + failedRetryDownload "${remote_url}" "" "" + fi + return 1 + fi + if grep -Eqi '302|location: ' <<< $file_header ; then + download_url=$(grep -oPi -m 1 '(?<=location: ).*?(?=$)' <<< "$file_header") + download_url=${download_url//[$'\t\r\n']} + else + rm -f "${bd_cookie_jar}"; + printf "\\n" + echo -e "${RED}| Failed to get download url (no location)${NC}" + warnAndRetryUnknownError=true + if [ "${finalAttempt}" == "true" ] ; then + failedRetryDownload "${remote_url}" "Failed to get download url (no location)" "" + fi + return 1 + fi + if ! grep -Eqi 'HTTP.* 200' <<< $file_header ; then + if [ $j == $maxfetchretries ] ; then + rm -f "${bd_cookie_jar}"; + printf "\\n" + echo -e "${RED}| Failed to extract file info${NC}" + warnAndRetryUnknownError=true + if [ "${finalAttempt}" == "true" ] ; then + failedRetryDownload "${remote_url}" "Failed to extract file info" "" + fi + return 1 + else + continue + fi + fi + filename=$(grep -oP '(?<=filename=").*?(?=".*$)' <<< "$file_header") + if [ "$filename_override" == "" ] && [ -z "$filename" ] ; then + if [ $j == $maxfetchretries ] ; then + rm -f "${bd_cookie_jar}"; + printf "\\n" + echo -e "${RED}| Failed to extract file name${NC}" + warnAndRetryUnknownError=true + if [ "${finalAttempt}" == "true" ] ; then + failedRetryDownload "${remote_url}" "Failed to extract file name" "" + fi + return 1 + else + continue + fi + fi + break #Good to go here + done + touch ${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/} + if [ ! "$filename_override" == "" ] ; then + filename="$filename_override" + fi + filename=$(sanitize_file_or_folder_name "${filename}") + printf "\\n" + echo -e "${YELLOW}| File name:${NC}\t\"${filename}\"" + file_size_bytes=$(grep -oPi '(?<=content-length: ).*' <<< "$file_header") + file_size_bytes=${file_size_bytes//[$'\t\r\n']} + if [ -z $file_size_bytes ] ; then + if [ "${finalAttempt}" == "true" ] ; then + failedRetryDownload "${remote_url}" "Filesize not found!" "" + fi + echo -e "${YELLOW}| Filesize not found… retry${NC}" + return 1 + else + file_size_readable="$(numfmt --to=iec --from=auto --format "%.2f" <<< "$file_size_bytes")" + fi + echo -e "${YELLOW}| File size:${NC}\t${file_size_readable}" + file_path="${download_inflight_path}${filename}" + flockDownload="${WorkDir}/.flocks/${filename//[^a-zA-Z0-9\.\_\-]/}.flock" + if CheckFileSize "${remote_url}" "${file_size_bytes}" ; then + return 1 + fi + if CheckDownloadExists "$remote_url" "$MoveToFolder" "$filecnt" "$filename" "$file_path" "$completed_location" ; then + return 1 + fi + echo "${remote_url//[^a-zA-Z0-9]/}" > $flockDownload +} +#! +#! ----------- (3) Fetch File / Download File Function --------------- # +#! +bd_GetFile() { + echo -e "${GREEN}# Downloading…" + echo -e "${YELLOW}| File path:${NC}\t./.inflight/${filename}\n" + fileCnt=$1 + retryCnt=$2 + finalAttempt=$3 + flockDownload="${WorkDir}/.flocks/${filename//[^a-zA-Z0-9\.\_\-]/}.flock" + for ((j=1; j<=$MaxDownloadRetries; j++)); do + pd_presize=0 + if [ -f "$file_path" ] ; then + pd_presize=$(stat --format="%s" "$file_path" | tr -d '[:space:]') + fi + GetRandomUA + CLEANSTRING=${remote_url//[^a-zA-Z0-9]/} + trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; rm -f ${bd_cookie_jar}; rm -f $flockDownload; echo ""; tput cnorm; exit" 0 1 2 3 6 15 + if [ "${UseTorCurlImpersonate}" == "true" ]; then + if [ "${RateMonitorEnabled}" == "true" ]; then + tor_curl_request --insecure -L -G --no-alpn \ + -b "${bd_cookie_jar}" -c "${bd_cookie_jar}" --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval \ + --referer "$remote_url" "$download_url" \ + --continue-at - --output "$file_path" + else + tor_curl_request --insecure -L -G --no-alpn \ + -b "${bd_cookie_jar}" -c "${bd_cookie_jar}" \ + --referer "$remote_url" "$download_url" \ + --continue-at - --output "$file_path" + fi + else + if [ "${RateMonitorEnabled}" == "true" ]; then + tor_curl_request --insecure -L -G --no-alpn \ + -b "${bd_cookie_jar}" -c "${bd_cookie_jar}" --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval \ + -H "User-Agent: $RandomUA" \ + -H "Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8" \ + -H "Accept-Language: en-US,en;q=0.5" \ + -H "Accept-Encoding: gzip, deflate, br" \ + -H "Connection: keep-alive" \ + -H "Cookie: lng=eng" \ + -H "Upgrade-Insecure-Requests: 1" \ + -H "Sec-Fetch-Dest: document" \ + -H "Sec-Fetch-Mode: navigate" \ + -H "Sec-Fetch-Site: same-origin" \ + -H "Sec-Fetch-User: ?1" \ + --referer "$remote_url" "$download_url" \ + --continue-at - --output "$file_path" + else + tor_curl_request --insecure -L -G --no-alpn \ + -b "${bd_cookie_jar}" -c "${bd_cookie_jar}" \ + -H "User-Agent: $RandomUA" \ + -H "Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8" \ + -H "Accept-Language: en-US,en;q=0.5" \ + -H "Accept-Encoding: gzip, deflate, br" \ + -H "Connection: keep-alive" \ + -H "Cookie: lng=eng" \ + -H "Upgrade-Insecure-Requests: 1" \ + -H "Sec-Fetch-Dest: document" \ + -H "Sec-Fetch-Mode: navigate" \ + -H "Sec-Fetch-Site: same-origin" \ + -H "Sec-Fetch-User: ?1" \ + --referer "$remote_url" "$download_url" \ + --continue-at - --output "$file_path" + fi + fi + received_file_size=0 + if [ -f "$file_path" ] ; then + received_file_size=$(stat --format="%s" "$file_path" | tr -d '[:space:]') + fi + if CheckNoHtml "$remote_url" "$filename" "$file_path" ; then + containsHtml=false + else + containsHtml=true + fi + downDelta=$(( received_file_size - pd_presize )) + if [[ "${received_file_size}" -ne "${file_size_bytes}" ]] || [ "$containsHtml" == "true" ]; then + if [ "${AutoRepairBadPartials}" == "true" ] && (( downDelta > 0 && downDelta < 1024 )) ; then + if [ -f "${file_path}" ] ; then + if ((pd_presize > 0)); then + echo -e "${YELLOW}Bad node / HTML found:${NC} reverting to previous file..." + truncateDownload "$remote_url" "$filename" "$pd_presize" "$received_file_size" + truncate -s $pd_presize "${file_path}" + else + echo -e "${YELLOW}Bad node / HTML found:${NC} tainted partial removed..." + rm -f "${file_path}" + fi + fi + if ((j >= $MaxDownloadRetries)) ; then + rm -f "$flockDownload"; + if [ "${finalAttempt}" == "true" ] ; then + droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" + fi + return 1 + else + continue + fi + elif [ "${AutoRepairBadPartials}" == "true" ] && [ "$containsHtml" == "true" ] ; then + if [ -f "${file_path}" ] ; then + if ((pd_presize > 0)); then + echo -e "${YELLOW}Bad node / HTML found:${NC} reverting to previous file..." + truncateDownload "$remote_url" "$filename" "$pd_presize" "$received_file_size" + truncate -s $pd_presize "${file_path}" + else + echo -e "${YELLOW}Bad node / HTML found:${NC} tainted partial removed..." + rm -f "${file_path}" + fi + fi + if ((j >= $MaxDownloadRetries)) ; then + rm -f "$flockDownload"; + if [ "${finalAttempt}" == "true" ] ; then + droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" + fi + return 1 + else + continue + fi + elif (( downDelta > 0 && downDelta < 1024 )) || [ "$containsHtml" == "true" ] ; then + if [ -f "$file_path" ] ; then + rm -rf "$file_path" + fi + echo -e "\n${YELLOW}Bad node / HTML found:${NC} tainted partial removed..." + if ((j >= $MaxDownloadRetries)) ; then + rm -f "$flockDownload"; + if [ "${finalAttempt}" == "true" ] ; then + droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" + fi + return 1 + else + continue + fi + fi + if [[ "${received_file_size}" -ne "${file_size_bytes}" ]]; then + echo -e "\n${RED}Download failed, file is incomplete.${NC}" + if ((j >= $MaxDownloadRetries)) ; then + rm -f "$flockDownload"; + if [ "${finalAttempt}" == "true" ] ; then + droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" + fi + return 1 + else + continue + fi + fi + else + break + fi + done + rm -f "$flockDownload"; + rm -f "${bd_cookie_jar}"; + ProcessCompletedDownload "$remote_url" "$MoveToFolder" "$filecnt" "$filename" "$file_size_bytes" "$completed_location" "$file_path" + return 0 +} +#! +#! --------------- Host Extra Functions ------------------- # +#! diff --git a/hosts/biteblob.sh b/hosts/biteblob.sh new file mode 100755 index 0000000..fbfb170 --- /dev/null +++ b/hosts/biteblob.sh @@ -0,0 +1,364 @@ +#! Name: biteblob.sh +#! Author: kittykat +#! Version: 2024.09.13 +#! Desc: Add support for downloading and processing of urls for a new host +#! Usage: Copy this file into the ./${ScriptDir}/hosts/ folder +#! +#! +#! ------------ REQUIRED SECTION --------------- +#! @[UPDATE] HostAndDomainRegexes: This string is loaded into mad.sh and allows dynamic handling of new url data +#! Format: '/HostCode/HostNick/HostFuncPrefix:HostDomainRegex@' +#! HostCode: (ie. 'fh' for filehaus -- cannot be used by other hosts) +#! HostNick: What is displayed throughout MAD output (ie. 'filehaus' -- "urls.txt has 10 filehaus.." will be displayed) +#! HostFuncPrefix: (ie. 'fh' -- fh_DownloadFile(), fh_FetchFileInfo() .. ) +#! * Note: Must begin with a letter a-z (functions beginning with numbers are no bueno) +#! HostDomainRegex: The regex used to verify matching urls +HostCode='bite' +HostNick='biteblob' +HostFuncPrefix='bite' +HostUrls='biteblob.com' +HostDomainRegex='^(http|https)://(.*\.)?biteblob\.(com|org)' +#! +#! !! DO NOT UPDATE OR REMOVE !! +#! This merges the Required HostAndDomainRegexes into mad.sh +ListHostAndDomainRegexes=${ListHostAndDomainRegexes}'/'${HostCode}'/'${HostNick}'/'${HostFuncPrefix}'/'${HostUrls}':'${HostDomainRegex}'@' +#! +#! +#! ------------ (1) Host Main Download Function --------------- # +#! +#! @REQUIRED: Host Main Download function +#! Must be named specifically as such: +#! _DownloadFile() +bite_DownloadFile() { + local remote_url=${1} + local file_url=${1} + local filecnt=${2} + warnAndRetryUnknownError=false + exitDownloadError=false + exitDownloadNotAvailable=false + fileAlreadyDone=false + download_inflight_path="${WorkDir}/.inflight/" + mkdir -p "$download_inflight_path" + completed_location="${WorkDir}/downloads/" + tor_identity="${RANDOM}" + finalAttempt="false" + for ((z=0; z<=$MaxUrlRetries; z++)); do + if [ $z -eq $MaxUrlRetries ] ; then + finalAttempt="true" + fi + CLEANSTRING=${remote_url//[^a-zA-Z0-9]/} + trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; echo ""; tput cnorm; exit" 0 1 2 3 6 15 + if bite_FetchFileInfo $finalAttempt && bite_GetFile "${filecnt}" $((z+1)) $finalAttempt ; then + return 0 + elif [ $z -lt $MaxUrlRetries ]; then + if [ "${fileAlreadyDone}" == "true" ] ; then + break + fi + if [[ "${warnAndRetryUnknownError}" == "true" ]] ; then + if [ "${DebugAllEnabled}" == "true" ] ; then + debugHtml "${remote_url##*/}" "error" "Retry due to an unknown issue: attempt #$((z+1)) of ${MaxUrlRetries}" + fi + fi + if [[ "${exitDownloadError}" == "true" || "${exitDownloadNotAvailable}" == "true" ]] ; then + if [ "${DebugAllEnabled}" == "true" ] ; then + debugHtml "${remote_url##*/}" "error" "Exit due to unrecoverable issue" + fi + rm -f "${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}" + break + fi + echo -e "\n${YELLOW}A recoverable error occurred, retry attempt $((z+1))/${MaxUrlRetries}${NC}" + sleep 3 + fi + done + rm -f "${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}" +} +#! +#! ------------- (2) Fetch File Info Function ----------------- # +#! +bite_FetchFileInfo() { + finalAttempt=$1 + maxfetchretries=5 + fixed_url=${remote_url} + if grep -Eqi "biteblob.org" <<< "$fixed_url"; then + fixed_url=${remote_url/biteblob.org/biteblob.com} + fi + if grep -Eqi "biteblob.com/Download/" <<< "$fixed_url"; then + fixed_url=${remote_url/biteblob.com\/Download/biteblob.com\/Information} + fi + download_url=${fixed_url/Information/Download} + echo -e "${GREEN}# Fetching download link…${NC}" + for ((j=1; j<=$maxfetchretries; j++)); do + mkdir -p "${WorkDir}/.temp" + printf " ." + tor_identity="${RANDOM}" + CLEANSTRING=${remote_url//[^a-zA-Z0-9]/} + trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; echo ""; tput cnorm; exit" 0 1 2 3 6 15 + response=$(tor_curl_request --insecure -L -s "${fixed_url}") + if [ "${DebugAllEnabled}" == "true" ] ; then + debugHtml "${remote_url##*/}" "bite_dwnpage$j" "url: $fixed_url"$'\n'"${response}" + fi + if [[ -z $response ]] ; then + if [ $j == $maxfetchretries ] ; then + printf "\\n" + echo -e "${RED}| Failed to extract download link${NC}" + warnAndRetryUnknownError=true + if [ "${finalAttempt}" == "true" ] ; then + failedRetryDownload "${remote_url}" "" "" + fi + return 1 + else + continue + fi + fi + if grep -Eqi 'Not Found|Invalid Request|Link Unauthorized|No download available|file was removed|file has been deleted' <<< "$response"; then + printf "\\n" + echo -e "${RED}| The file was not found. It could be deleted or expired.${NC}" + exitDownloadError=true + removedDownload "${remote_url}" + return 1 + fi + if grep -Eqi '