diff --git a/.audit/mad-audit-curl.log b/.audit/mad-audit-curl.log index 173fb81..71d76b6 100755 --- a/.audit/mad-audit-curl.log +++ b/.audit/mad-audit-curl.log @@ -1,4 +1,4 @@ -DateTime: 25.02.13 +DateTime: 25.02.19 Files: ./hosts/1fichier.sh @@ -128,6 +128,7 @@ Files: ./hosts/up_quax.sh ./hosts/up_ramsgaard.sh ./hosts/up_ranoz.sh +./hosts/up_sendnow.sh ./hosts/up_shareonline.sh ./hosts/up_skrepr.sh ./hosts/up_soyjak.sh @@ -167,7 +168,7 @@ _________________________________________________________________________ ./hosts/9saves.sh:90: response=$(tor_curl_request --insecure -L -s -b "${ns_cookie_jar}" -c "${ns_cookie_jar}" "$remote_url") ./hosts/9saves.sh:139: response=$(tor_curl_request --insecure -L -s -X POST \ ./hosts/9saves.sh:188: file_header=$(tor_curl_request --insecure --head -L -s -b "${ns_cookie_jar}" -c "${ns_cookie_jar}" "$download_url") -./hosts/9saves.sh:290: if [ "${UseTorCurlImpersonate}" == "true" ]; then +./hosts/9saves.sh:290: if [[ "${UseTorCurlImpersonate}" == "true" ]]; then ./hosts/9saves.sh:292: tor_curl_request --insecure \ ./hosts/9saves.sh:297: tor_curl_request --insecure \ ./hosts/9saves.sh:304: tor_curl_request --insecure \ @@ -176,8 +177,8 @@ _________________________________________________________________________ ./hosts/anonfile.sh:186: response=$(tor_curl_request --insecure -L -s -X POST \ ./hosts/anonfile.sh:240: tor_curl_request --insecure -s "$captcha_img_url" --output "$tmp_captcha_img" ./hosts/anonfile.sh:340: response=$(tor_curl_request --insecure -L -s -X POST \ -./hosts/anonfile.sh:451: file_header=$(tor_curl_request -i -s --head \ -./hosts/anonfile.sh:557: if [ "${UseTorCurlImpersonate}" == "true" ]; then +./hosts/anonfile.sh:453: file_header=$(tor_curl_request -i -s --head \ +./hosts/anonfile.sh:557: if [[ "${UseTorCurlImpersonate}" == "true" ]]; then ./hosts/anonfile.sh:559: tor_curl_request --insecure \ ./hosts/anonfile.sh:565: tor_curl_request --insecure \ ./hosts/anonfile.sh:572: tor_curl_request --insecure \ @@ -188,14 +189,14 @@ _________________________________________________________________________ ./hosts/anonsharing.sh:273: tor_curl_request --insecure --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval "$download_url" --output "$file_path" ./hosts/anonsharing.sh:275: tor_curl_request --insecure "$download_url" --output "$file_path" ./hosts/ateasystems.sh:88: response=$(tor_curl_request --insecure -L -s "$remote_url") -./hosts/ateasystems.sh:218: if [ "${UseTorCurlImpersonate}" == "true" ]; then +./hosts/ateasystems.sh:218: if [[ "${UseTorCurlImpersonate}" == "true" ]]; then ./hosts/ateasystems.sh:220: tor_curl_request --insecure \ ./hosts/ateasystems.sh:225: tor_curl_request --insecure \ ./hosts/ateasystems.sh:231: tor_curl_request --insecure \ ./hosts/ateasystems.sh:237: tor_curl_request --insecure \ ./hosts/bedrive.sh:90: response=$(tor_curl_request --insecure -L -s \ ./hosts/bedrive.sh:149: file_header=$(tor_curl_request --insecure --head -L -i -s \ -./hosts/bedrive.sh:270: if [ "${UseTorCurlImpersonate}" == "true" ]; then +./hosts/bedrive.sh:270: if [[ "${UseTorCurlImpersonate}" == "true" ]]; then ./hosts/bedrive.sh:272: tor_curl_request --insecure -L -G --no-alpn \ ./hosts/bedrive.sh:277: tor_curl_request --insecure -L -G --no-alpn \ ./hosts/bedrive.sh:284: tor_curl_request --insecure -L -G --no-alpn \ @@ -208,7 +209,7 @@ _________________________________________________________________________ ./hosts/bowfile.sh:91: response=$(tor_curl_request --insecure -L -s -b "${bow_cookie_jar}" -c "${bow_cookie_jar}" \ ./hosts/bowfile.sh:143: response=$(tor_curl_request --insecure -s --head \ ./hosts/bowfile.sh:182: file_header=$(tor_curl_request --insecure -L -sS -i --head \ -./hosts/bowfile.sh:297: if [ "${UseTorCurlImpersonate}" == "true" ]; then +./hosts/bowfile.sh:297: if [[ "${UseTorCurlImpersonate}" == "true" ]]; then ./hosts/bowfile.sh:299: tor_curl_request --insecure -L \ ./hosts/bowfile.sh:305: tor_curl_request --insecure -L \ ./hosts/bowfile.sh:312: tor_curl_request --insecure -L \ @@ -217,7 +218,7 @@ _________________________________________________________________________ ./hosts/click.sh:226: response=$(tor_curl_request --insecure -L -s -X POST \ ./hosts/click.sh:345: response=$(tor_curl_request --insecure -L -s -X POST \ ./hosts/click.sh:434: file_header=$(tor_curl_request --insecure -L --head -s --referer "${fixed_url}" "$download_url") -./hosts/click.sh:533: if [ "${UseTorCurlImpersonate}" == "true" ]; then +./hosts/click.sh:533: if [[ "${UseTorCurlImpersonate}" == "true" ]]; then ./hosts/click.sh:535: tor_curl_request --insecure \ ./hosts/click.sh:542: tor_curl_request --insecure \ ./hosts/click.sh:550: tor_curl_request --insecure \ @@ -226,7 +227,7 @@ _________________________________________________________________________ ./hosts/dailyuploads.sh:139: tor_curl_request --insecure -s "$captcha_img_url" --output "$tmp_captcha_img" ./hosts/dailyuploads.sh:286: response=$(tor_curl_request --insecure -L -s -X POST \ ./hosts/dailyuploads.sh:392: file_header=$(tor_curl_request -i -s --head \ -./hosts/dailyuploads.sh:496: if [ "${UseTorCurlImpersonate}" == "true" ]; then +./hosts/dailyuploads.sh:496: if [[ "${UseTorCurlImpersonate}" == "true" ]]; then ./hosts/dailyuploads.sh:498: tor_curl_request --insecure \ ./hosts/dailyuploads.sh:504: tor_curl_request --insecure \ ./hosts/dailyuploads.sh:511: tor_curl_request --insecure \ @@ -235,7 +236,7 @@ _________________________________________________________________________ ./hosts/dashfile.sh:177: response=$(tor_curl_request --insecure -L -s -X POST \ ./hosts/dashfile.sh:308: response=$(tor_curl_request --insecure -L -s -X POST \ ./hosts/dashfile.sh:397: file_header=$(tor_curl_request --insecure -L --head -s --referer "${fixed_url}" "$download_url") -./hosts/dashfile.sh:495: if [ "${UseTorCurlImpersonate}" == "true" ]; then +./hosts/dashfile.sh:495: if [[ "${UseTorCurlImpersonate}" == "true" ]]; then ./hosts/dashfile.sh:497: tor_curl_request --insecure \ ./hosts/dashfile.sh:502: tor_curl_request --insecure \ ./hosts/dashfile.sh:508: tor_curl_request --insecure \ @@ -243,7 +244,7 @@ _________________________________________________________________________ ./hosts/dataupload.sh:90: response=$(tor_curl_request --insecure -L -s -b "${dup_cookie_jar}" -c "${dup_cookie_jar}" "$remote_url") ./hosts/dataupload.sh:166: response=$(tor_curl_request --insecure -svo. -X POST \ ./hosts/dataupload.sh:234: file_header=$(tor_curl_request --insecure -L --head -s "$download_url") -./hosts/dataupload.sh:349: if [ "${UseTorCurlImpersonate}" == "true" ]; then +./hosts/dataupload.sh:349: if [[ "${UseTorCurlImpersonate}" == "true" ]]; then ./hosts/dataupload.sh:351: tor_curl_request --insecure \ ./hosts/dataupload.sh:357: tor_curl_request --insecure \ ./hosts/dataupload.sh:364: tor_curl_request --insecure \ @@ -251,22 +252,22 @@ _________________________________________________________________________ ./hosts/desiupload.sh:90: response=$(tor_curl_request --insecure -L -s -b "${desi_cookie_jar}" -c "${desi_cookie_jar}" "$remote_url") ./hosts/desiupload.sh:202: response=$(tor_curl_request --insecure -L -s -X POST \ ./hosts/desiupload.sh:306: file_header=$(tor_curl_request --insecure -L --head -s --referer "${fixed_url}" "$download_url") -./hosts/desiupload.sh:404: if [ "${UseTorCurlImpersonate}" == "true" ]; then +./hosts/desiupload.sh:404: if [[ "${UseTorCurlImpersonate}" == "true" ]]; then ./hosts/desiupload.sh:406: tor_curl_request --insecure \ ./hosts/desiupload.sh:411: tor_curl_request --insecure \ ./hosts/desiupload.sh:417: tor_curl_request --insecure \ ./hosts/desiupload.sh:433: tor_curl_request --insecure \ -./hosts/dosya.sh:108: if [ "${UseTorCurlImpersonate}" == "true" ]; then +./hosts/dosya.sh:108: if [[ "${UseTorCurlImpersonate}" == "true" ]]; then ./hosts/dosya.sh:109: PAGE=$(tor_curl_request --insecure --max-time "$ConnectTimeout" -L -s \ ./hosts/dosya.sh:113: PAGE=$(tor_curl_request --insecure --max-time "$ConnectTimeout" -L -s \ -./hosts/dosya.sh:172: if [ "${UseTorCurlImpersonate}" == "true" ]; then +./hosts/dosya.sh:172: if [[ "${UseTorCurlImpersonate}" == "true" ]]; then ./hosts/dosya.sh:173: file_header=$(tor_curl_request_extended --insecure --head -L -s \ ./hosts/dosya.sh:179: file_header=$(tor_curl_request_extended --insecure --head -L -s \ ./hosts/dosya.sh:402: tor_curl_request -L -G --insecure \ ./hosts/dosya.sh:417: tor_curl_request -L -G --insecure \ ./hosts/downloadgg.sh:90: response=$(tor_curl_request --insecure -L -s -b "${dgg_cookie_jar}" -c "${dgg_cookie_jar}" "$remote_url") ./hosts/downloadgg.sh:169: response=$(tor_curl_request --insecure -svo. -X POST \ -./hosts/downloadgg.sh:255: if [ "${UseTorCurlImpersonate}" == "true" ]; then +./hosts/downloadgg.sh:255: if [[ "${UseTorCurlImpersonate}" == "true" ]]; then ./hosts/downloadgg.sh:257: tor_curl_request --insecure -X POST \ ./hosts/downloadgg.sh:265: tor_curl_request --insecure -X POST \ ./hosts/downloadgg.sh:275: tor_curl_request --insecure -X POST \ @@ -279,7 +280,7 @@ _________________________________________________________________________ ./hosts/fileblade.sh:165: response=$(tor_curl_request --insecure -L -s -X POST \ ./hosts/fileblade.sh:281: response=$(tor_curl_request --insecure -L -s -X POST \ ./hosts/fileblade.sh:335: file_header=$(tor_curl_request --insecure -L --head -s "$download_url") -./hosts/fileblade.sh:450: if [ "${UseTorCurlImpersonate}" == "true" ]; then +./hosts/fileblade.sh:450: if [[ "${UseTorCurlImpersonate}" == "true" ]]; then ./hosts/fileblade.sh:452: tor_curl_request --insecure -L \ ./hosts/fileblade.sh:456: tor_curl_request --insecure -L \ ./hosts/fileblade.sh:461: tor_curl_request --insecure \ @@ -294,9 +295,9 @@ _________________________________________________________________________ ./hosts/filedot.sh:406: file_header=$(tor_curl_request --insecure -L --head -s --referer "${fixed_url}" "$download_url") ./hosts/filedot.sh:499: tor_curl_request --insecure -L -G --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval "$download_url" --continue-at - --output "$file_path" ./hosts/filedot.sh:501: tor_curl_request --insecure -L -G "$download_url" --continue-at - --output "$file_path" -./hosts/filehaus.sh:100: file_header=$(tor_curl_request_extended --insecure -L --head -s --referer "${remote_url//\.org/\.cc}" "$download_url") -./hosts/filehaus.sh:193: tor_curl_request_extended --insecure --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval --referer "$download_url" "$download_url" --continue-at - --output "$file_path" -./hosts/filehaus.sh:195: tor_curl_request_extended --insecure --referer "$download_url" "$download_url" --continue-at - --output "$file_path" +./hosts/filehaus.sh:101: file_header=$(tor_curl_request_extended --insecure -L --head -s --referer "${remote_url//\.org/\.cc}" "$download_url") +./hosts/filehaus.sh:191: tor_curl_request_extended --insecure --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval --referer "$download_url" "$download_url" --continue-at - --output "$file_path" +./hosts/filehaus.sh:193: tor_curl_request_extended --insecure --referer "$download_url" "$download_url" --continue-at - --output "$file_path" ./hosts/firestorage.sh:98: response=$(tor_curl_request --insecure -L -s "${fixed_url}") ./hosts/firestorage.sh:226: file_header=$(tor_curl_request --insecure -L --head -s --referer "${fixed_url}" "$download_url") ./hosts/firestorage.sh:335: tor_curl_request --insecure -L -G --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval "$download_url" --continue-at - --output "$file_path" @@ -304,8 +305,8 @@ _________________________________________________________________________ ./hosts/gofile.sh:97: response=$(tor_curl_request --insecure -s -X POST \ ./hosts/gofile.sh:170: response=$(tor_curl_request --insecure -G -L -s \ ./hosts/gofile.sh:258: file_header=$(tor_curl_request --insecure -L --head -s \ -./hosts/gofile.sh:377: tor_curl_request --insecure -G --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval \ -./hosts/gofile.sh:391: tor_curl_request --insecure -G \ +./hosts/gofile.sh:393: tor_curl_request --insecure -G --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval \ +./hosts/gofile.sh:407: tor_curl_request --insecure -G \ ./hosts/hexload.sh:108: response=$(tor_curl_request --insecure -s --data "$form_data" "https://hexload.com/download") ./hosts/hexload.sh:116: response=$(tor_curl_request --insecure -s --data "$form_data" "https://hexload.com/download") ./hosts/hexload.sh:122: response=$(tor_curl_request --insecure -s --data "$form_data" "https://hexload.com/download") @@ -340,12 +341,12 @@ _________________________________________________________________________ ./hosts/nippy.sh:188: file_header=$(tor_curl_request --insecure -L --head -s \ ./hosts/nippy.sh:299: tor_curl_request --insecure -L --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval \ ./hosts/nippy.sh:302: tor_curl_request --insecure -L "$download_url" --continue-at - --output "$file_path" -./hosts/oshi.sh:101: file_header=$(tor_curl_request --insecure --head -L -s --referer "$remote_url" "$download_url") -./hosts/oshi.sh:195: tor_curl_request --insecure --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval --referer "$file_url" "$download_url" --continue-at - --output "$file_path" -./hosts/oshi.sh:197: tor_curl_request --insecure --referer "$file_url" "$download_url" --continue-at - --output "$file_path" +./hosts/oshi.sh:108: file_header=$(tor_curl_request --insecure --head -L -s --referer "$remote_url" "$download_url") +./hosts/oshi.sh:202: tor_curl_request --insecure --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval --referer "$file_url" "$download_url" --continue-at - --output "$file_path" +./hosts/oshi.sh:204: tor_curl_request --insecure --referer "$file_url" "$download_url" --continue-at - --output "$file_path" ./hosts/pixeldrain.sh:94: response=$(tor_curl_request --insecure -L -s "https://pixeldrain.com/u/$fileid") ./hosts/pixeldrain.sh:256: file_header=$(tor_curl_request --insecure --head -L -s --referer "$file_url" "$pdheadurl") -./hosts/pixeldrain.sh:322: if [ "${UseTorCurlImpersonate}" == "true" ]; then +./hosts/pixeldrain.sh:322: if [[ "${UseTorCurlImpersonate}" == "true" ]]; then ./hosts/pixeldrain.sh:324: tor_curl_request --insecure \ ./hosts/pixeldrain.sh:328: tor_curl_request --insecure \ ./hosts/pixeldrain.sh:333: tor_curl_request --insecure \ @@ -355,40 +356,40 @@ _________________________________________________________________________ ./hosts/quax.sh:178: tor_curl_request --insecure "$download_url" --continue-at - --output "$file_path" ./hosts/ranoz.sh:90: response=$(tor_curl_request --insecure -L -s "$remote_url") ./hosts/ranoz.sh:160: file_header=$(tor_curl_request --insecure --head -L -i -s "$download_url") -./hosts/ranoz.sh:270: if [ "${UseTorCurlImpersonate}" == "true" ]; then +./hosts/ranoz.sh:270: if [[ "${UseTorCurlImpersonate}" == "true" ]]; then ./hosts/ranoz.sh:272: tor_curl_request --insecure -L -G --no-alpn \ ./hosts/ranoz.sh:276: tor_curl_request --insecure -L -G --no-alpn \ ./hosts/ranoz.sh:281: tor_curl_request --insecure -L -G --no-alpn \ ./hosts/ranoz.sh:296: tor_curl_request --insecure -L -G --no-alpn \ ./hosts/sendnow.sh:90: response=$(tor_curl_request --insecure -L -s -b "${snow_cookie_jar}" -c "${snow_cookie_jar}" "$remote_url") ./hosts/sendnow.sh:160: response=$(tor_curl_request --insecure -L -svo. -X POST \ -./hosts/sendnow.sh:203: file_header=$(tor_curl_request_extended --insecure --head -Lis \ -./hosts/sendnow.sh:324: if [ "${UseTorCurlImpersonate}" == "true" ]; then -./hosts/sendnow.sh:326: tor_curl_request_extended --insecure -L --no-alpn \ -./hosts/sendnow.sh:344: tor_curl_request --insecure -L --no-alpn \ -./hosts/sendnow.sh:363: tor_curl_request --insecure -L --no-alpn \ -./hosts/sendnow.sh:382: tor_curl_request --insecure -L --no-alpn \ +./hosts/sendnow.sh:204: file_header=$(tor_curl_request_extended --insecure --head -Lis \ +./hosts/sendnow.sh:325: if [[ "${UseTorCurlImpersonate}" == "true" ]]; then +./hosts/sendnow.sh:327: tor_curl_request_extended --insecure -L --no-alpn \ +./hosts/sendnow.sh:345: tor_curl_request --insecure -L --no-alpn \ +./hosts/sendnow.sh:364: tor_curl_request --insecure -L --no-alpn \ +./hosts/sendnow.sh:383: tor_curl_request --insecure -L --no-alpn \ ./hosts/syspro.sh:88: response=$(tor_curl_request --insecure -L -s "$remote_url") -./hosts/syspro.sh:186: if [ "${UseTorCurlImpersonate}" == "true" ]; then +./hosts/syspro.sh:186: if [[ "${UseTorCurlImpersonate}" == "true" ]]; then ./hosts/syspro.sh:188: tor_curl_request --insecure -L \ ./hosts/syspro.sh:193: tor_curl_request --insecure \ ./hosts/syspro.sh:199: tor_curl_request --insecure -L \ ./hosts/syspro.sh:205: tor_curl_request --insecure -L \ ./hosts/tempfileme.sh:89: response=$(tor_curl_request --insecure -L -s "$remote_url") ./hosts/tempfileme.sh:170: file_header=$(tor_curl_request --insecure -L --head -s --referer "${remote_url}" "$download_url") -./hosts/tempfileme.sh:298: if [ "${UseTorCurlImpersonate}" == "true" ]; then +./hosts/tempfileme.sh:298: if [[ "${UseTorCurlImpersonate}" == "true" ]]; then ./hosts/tempfileme.sh:300: tor_curl_request --insecure -L \ ./hosts/tempfileme.sh:305: tor_curl_request --insecure -L \ ./hosts/tempfileme.sh:311: tor_curl_request --insecure -L \ ./hosts/tempfileme.sh:326: tor_curl_request --insecure -L \ ./hosts/tempsh.sh:88: file_header=$(tor_curl_request --insecure -s -D - -o /dev/null -X POST \ -./hosts/tempsh.sh:225: if [ "${UseTorCurlImpersonate}" == "true" ]; then +./hosts/tempsh.sh:225: if [[ "${UseTorCurlImpersonate}" == "true" ]]; then ./hosts/tempsh.sh:227: tor_curl_request --insecure -X POST \ ./hosts/tempsh.sh:231: tor_curl_request --insecure -X POST \ ./hosts/tempsh.sh:236: tor_curl_request --insecure -X POST \ ./hosts/tempsh.sh:250: tor_curl_request --insecure -X POST \ ./hosts/torup.sh:92: response=$(tor_curl_request --insecure -L -s \ -./hosts/torup.sh:188: if [ "${UseTorCurlImpersonate}" == "true" ]; then +./hosts/torup.sh:188: if [[ "${UseTorCurlImpersonate}" == "true" ]]; then ./hosts/torup.sh:190: tor_curl_request --insecure -L -G --no-alpn \ ./hosts/torup.sh:196: tor_curl_request --insecure -L -G --no-alpn \ ./hosts/torup.sh:203: tor_curl_request --insecure -L -G --no-alpn \ @@ -396,14 +397,14 @@ _________________________________________________________________________ ./hosts/up2share.sh:91: response=$(tor_curl_request --insecure -L -s -b "${up2share_cookie_jar}" -c "${up2share_cookie_jar}" \ ./hosts/up2share.sh:144: response=$(tor_curl_request --insecure -L -s -b "${up2share_cookie_jar}" -c "${up2share_cookie_jar}" \ ./hosts/up2share.sh:195: file_header=$(tor_curl_request --insecure -L -s --head \ -./hosts/up2share.sh:312: if [ "${UseTorCurlImpersonate}" == "true" ]; then +./hosts/up2share.sh:312: if [[ "${UseTorCurlImpersonate}" == "true" ]]; then ./hosts/up2share.sh:314: tor_curl_request --insecure -L \ ./hosts/up2share.sh:321: tor_curl_request --insecure -L \ ./hosts/up2share.sh:329: tor_curl_request --insecure -L \ ./hosts/up2share.sh:347: tor_curl_request --insecure -L \ ./hosts/uploadee.sh:90: response=$(tor_curl_request --insecure -L -s "$remote_url") ./hosts/uploadee.sh:143: file_header=$(tor_curl_request --insecure --head -L -s -b "${upee_cookie_jar}" -c "${upee_cookie_jar}" --referer "$remote_url" "$download_url") -./hosts/uploadee.sh:249: if [ "${UseTorCurlImpersonate}" == "true" ]; then +./hosts/uploadee.sh:249: if [[ "${UseTorCurlImpersonate}" == "true" ]]; then ./hosts/uploadee.sh:251: tor_curl_request --insecure -L -G --no-alpn \ ./hosts/uploadee.sh:257: tor_curl_request --insecure -L -G --no-alpn \ ./hosts/uploadee.sh:265: tor_curl_request --insecure -L -G --no-alpn \ @@ -411,7 +412,7 @@ _________________________________________________________________________ ./hosts/uploadev.sh:91: response=$(tor_curl_request --insecure -L -s -b "${upev_cookie_jar}" -c "${upev_cookie_jar}" \ ./hosts/uploadev.sh:181: response=$(tor_curl_request --insecure -L -s -X POST \ ./hosts/uploadev.sh:268: file_header=$(tor_curl_request --insecure -L --head -s "$download_url") -./hosts/uploadev.sh:367: if [ "${UseTorCurlImpersonate}" == "true" ]; then +./hosts/uploadev.sh:367: if [[ "${UseTorCurlImpersonate}" == "true" ]]; then ./hosts/uploadev.sh:369: tor_curl_request --insecure -L \ ./hosts/uploadev.sh:374: tor_curl_request --insecure -L \ ./hosts/uploadev.sh:380: tor_curl_request --insecure -L \ @@ -422,10 +423,9 @@ _________________________________________________________________________ ./hosts/uploadflix.sh:286: tor_curl_request --insecure -L -G --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval "$download_url" --continue-at - --output "$file_path" ./hosts/uploadflix.sh:288: tor_curl_request --insecure -L -G "$download_url" --continue-at - --output "$file_path" ./hosts/uploadhive.sh:88: response=$(tor_curl_request --insecure -L -s "$remote_url") -./hosts/uploadhive.sh:134: response=$(tor_curl_request --insecure -L -s -X POST --data "$form_data" "$remote_url") -./hosts/uploadhive.sh:185: file_header=$(tor_curl_request --insecure --head -s -L --referer "$remote_url" "$download_url") -./hosts/uploadhive.sh:279: tor_curl_request --insecure -L -G --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval "$download_url" --continue-at - --output "$file_path" -./hosts/uploadhive.sh:281: tor_curl_request --insecure -L -G "$download_url" --continue-at - --output "$file_path" +./hosts/uploadhive.sh:135: response=$(tor_curl_request --insecure -L -s -X POST --data "$form_data" "$remote_url") +./hosts/uploadhive.sh:247: tor_curl_request --insecure -L -G --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval "$download_url" --continue-at - --output "$file_path" +./hosts/uploadhive.sh:249: tor_curl_request --insecure -L -G "$download_url" --continue-at - --output "$file_path" ./hosts/up_1fichier.sh:107: response=$(tor_curl_request --insecure -L -s "https://1fichier.com/") ./hosts/up_1fichier.sh:180: response=$(tor_curl_upload --insecure -L \ ./hosts/up_anonfile.sh:102: response=$(tor_curl_upload --insecure -i \ @@ -462,8 +462,10 @@ _________________________________________________________________________ ./hosts/up_oshi.sh:110: response=$(tor_curl_upload --insecure \ ./hosts/up_pixeldrain.sh:112: response=$(tor_curl_upload --insecure -X PUT \ ./hosts/up_quax.sh:102: response=$(tor_curl_upload --insecure -i \ -./hosts/up_ranoz.sh:130: response=$(tor_curl_upload --insecure -L -i -s \ -./hosts/up_ranoz.sh:160: response=$(tor_curl_upload --insecure -i -X PUT \ +./hosts/up_ranoz.sh:128: response=$(tor_curl_upload --insecure -L -i -s \ +./hosts/up_ranoz.sh:155: response=$(tor_curl_upload --insecure -i -X PUT \ +./hosts/up_sendnow.sh:101: response=$(tor_curl_request --insecure -L -s 'https://send.now/upload') +./hosts/up_sendnow.sh:138: response=$(tor_curl_upload --insecure -i \ ./hosts/up_shareonline.sh:102: response=$(tor_curl_upload --insecure -i \ ./hosts/up_syspro.sh:102: response=$(tor_curl_upload --insecure -i \ ./hosts/up_tempfileme.sh:102: response=$(tor_curl_upload --insecure -i \ @@ -476,7 +478,7 @@ _________________________________________________________________________ ./hosts/up_uploadee.sh:176: response=$(tor_curl_upload --insecure -i -L \ ./hosts/up_uploadev.sh:102: response=$(tor_curl_upload --insecure -i \ ./hosts/up_uploadflix.sh:106: response=$(tor_curl_upload --insecure -i \ -./hosts/up_uploadhive.sh:129: response=$(tor_curl_upload --insecure -i \ +./hosts/up_uploadhive.sh:130: response=$(tor_curl_upload --insecure -i \ ./hosts/up_uploadraja.sh:102: response=$(tor_curl_upload --insecure -i \ ./hosts/up_uwabaki.sh:102: response=$(tor_curl_upload --insecure -i -L \ ./hosts/up_yolobit.sh:102: response=$(tor_curl_upload --insecure -i \ @@ -485,120 +487,120 @@ _________________________________________________________________________ ./hosts/youdbox.sh:183: file_header=$(tor_curl_request --insecure -L --head -s --referer "${fixed_url}" "$download_url") ./hosts/youdbox.sh:276: tor_curl_request --insecure -L -G --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval "$download_url" --continue-at - --output "$file_path" ./hosts/youdbox.sh:278: tor_curl_request --insecure -L -G "$download_url" --continue-at - --output "$file_path" -./mad.sh:4:UseTorCurlImpersonate=false -./mad.sh:87:tor_curl_request() { -./mad.sh:88: if [ "${UseTorCurlImpersonate}" == "true" ]; then -./mad.sh:89: "${curl_impersonate[@]}" --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" -4 --connect-timeout ${ConnectTimeout} --compressed --globoff "$@" -./mad.sh:91: curl --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" -4 --connect-timeout ${ConnectTimeout} --compressed --globoff "$@" -./mad.sh:94:tor_curl_request_extended() { -./mad.sh:96: if [ "${UseTorCurlImpersonate}" == "true" ]; then -./mad.sh:97: "${curl_impersonate[@]}" --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" -4 --connect-timeout $randomtimeout --compressed --globoff "$@" -./mad.sh:99: curl --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" -4 --connect-timeout $randomtimeout --compressed --globoff "$@" -./mad.sh:102:tor_curl_upload() { -./mad.sh:103: if [ "${UseTorCurlImpersonate}" == "true" ]; then -./mad.sh:105: "${curl_impersonate[@]}" --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" -4 --connect-timeout ${ConnectTimeoutUpload} --speed-limit $UploadSpeedMin --speed-time $UploadTimeoutInterval --compressed --globoff "$@" -./mad.sh:107: "${curl_impersonate[@]}" --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" -4 --connect-timeout ${ConnectTimeoutUpload} --compressed --globoff "$@" -./mad.sh:111: curl --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" -4 --connect-timeout ${ConnectTimeoutUpload} --speed-limit $UploadSpeedMin --speed-time $UploadTimeoutInterval -H 'User-Agent: Mozilla/5.0 (Windows NT 10.0; rv:109.0) Gecko/20100101 Firefox/115.0' -H 'Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8' -H 'Accept-Language: en-US,en;q=0.5' -H 'Accept-Encoding: gzip, deflate, br' -H 'Origin: null' -H 'Connection: keep-alive' -H 'Upgrade-Insecure-Requests: 1' -H 'Sec-Fetch-Dest: document' -H 'Sec-Fetch-Mode: navigate' -H 'Sec-Fetch-Site: cross-site' -H 'Sec-Fetch-User: ?1' -H 'TE: trailers' --compressed --globoff "$@" -./mad.sh:113: curl --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" -4 --connect-timeout ${ConnectTimeoutUpload} -H 'User-Agent: Mozilla/5.0 (Windows NT 10.0; rv:109.0) Gecko/20100101 Firefox/115.0' -H 'Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8' -H 'Accept-Language: en-US,en;q=0.5' -H 'Accept-Encoding: gzip, deflate, br' -H 'Origin: null' -H 'Connection: keep-alive' -H 'Upgrade-Insecure-Requests: 1' -H 'Sec-Fetch-Dest: document' -H 'Sec-Fetch-Mode: navigate' -H 'Sec-Fetch-Site: cross-site' -H 'Sec-Fetch-User: ?1' -H 'TE: trailers' --compressed --globoff "$@" -./mad.sh:1114:install_curl_impersonate() { -./mad.sh:1116: echo -e "${BLUE}lwthiker curl_impersonate${NC} is the original dev, but it is relatively inactive." -./mad.sh:1117: echo -e "- Currently uses curl v8.1.1." -./mad.sh:1121: echo -e "${BLUE}lexiforest curl_impersonate${NC} is an active fork of curl_impersonate." -./mad.sh:1122: echo -e "+ Currently uses curl v8.7.1" -./mad.sh:1126: PS3='Please select which curl_impersonate to install: ' -./mad.sh:1134: install_curl_impersonate_lwthiker_orig -./mad.sh:1138: install_curl_impersonate_lexiforest_fork -./mad.sh:1148:install_curl_impersonate_lwthiker_orig() { -./mad.sh:1152: echo -e "${BLUE}lwthiker curl_impersonate${NC} is the original curl_impersonate." -./mad.sh:1153: echo -e "+ Currently uses curl v8.1.1, and has low activity for updates" -./mad.sh:1156: echo -e "${GREEN}| Fetching:${NC} latest ${BLUE}lwthiker curl_impersonate${NC} info from github...${NC}" -./mad.sh:1159: response=$(tor_curl_request --insecure -L -s https://github.com/lwthiker/curl-impersonate/releases/latest) -./mad.sh:1161: debugHtml "github" "lbf_inst_curlimp$j" "$response" -./mad.sh:1164: latestTag=$(grep -oPi -m 1 '(?<=/curl-impersonate/releases/tag/).*?(?=")' <<< "$response") -./mad.sh:1174: yes_or_no "Do you wish to download and extract curl_impersonate $latestTag (using tor+curl)?" && { -./mad.sh:1176: download_url='https://github.com/lwthiker/curl-impersonate/releases/download/'"$latestTag"'/curl-impersonate-'"$latestTag"'.x86_64-linux-gnu.tar.gz' -./mad.sh:1179: file_header=$(tor_curl_request --insecure --head -Ls "$download_url") -./mad.sh:1181: debugHtml "github" "head_inst_curlimp$j" "${file_header}" -./mad.sh:1229: tor_curl_request --insecure -L "$download_url" --continue-at - --output "$file_path" -./mad.sh:1258: echo -e "| Extracting curl_impersonate..." -./mad.sh:1260: rm -f "${ScriptDir}"/curl* -./mad.sh:1261: mv "$extract_location/curl-impersonate-ff" "${ScriptDir}/" -./mad.sh:1262: mv "$extract_location/curl_ff109" "${ScriptDir}/" -./mad.sh:1263: echo -e "${GREEN}| Done.${NC} Update ${BLUE}\"UseTorCurlImpersonate=true\"${NC} in script to use..." -./mad.sh:1271:install_curl_impersonate_lexiforest_fork() { -./mad.sh:1275: echo -e "${BLUE}lexiforest curl_impersonate${NC} is an active fork of curl_impersonate." -./mad.sh:1276: echo -e "+ Currently uses curl v8.7.1, and is patched for latest CVEs" -./mad.sh:1279: echo -e "${GREEN}| Fetching:${NC} latest ${BLUE}lexiforest curl_impersonate fork${NC} info from github...${NC}" -./mad.sh:1282: response=$(tor_curl_request --insecure -L -s https://github.com/lexiforest/curl-impersonate/releases/latest) -./mad.sh:1284: debugHtml "github" "lbf_inst_curlimp$j" "$response" -./mad.sh:1287: latestTag=$(grep -oPi -m 1 '(?<=/curl-impersonate/releases/tag/).*?(?=")' <<< "$response") -./mad.sh:1297: yes_or_no "Do you wish to download and extract curl_impersonate $latestTag (using tor+curl)?" && { -./mad.sh:1299: download_url='https://github.com/lexiforest/curl-impersonate/releases/download/'"$latestTag"'/curl-impersonate-'"$latestTag"'.x86_64-linux-gnu.tar.gz' -./mad.sh:1302: file_header=$(tor_curl_request --insecure --head -Ls "$download_url") -./mad.sh:1304: debugHtml "github" "head_inst_curlimp$j" "${file_header}" -./mad.sh:1352: tor_curl_request --insecure -L "$download_url" --continue-at - --output "$file_path" -./mad.sh:1381: echo -e "| Extracting curl_impersonate..." -./mad.sh:1383: rm -f "${ScriptDir}"/curl* -./mad.sh:1384: mv "$extract_location/curl-impersonate-chrome" "${ScriptDir}/" -./mad.sh:1385: mv "$extract_location/curl_chrome131" "${ScriptDir}/" -./mad.sh:1386: echo -e "${GREEN}| Done.${NC} Update ${BLUE}\"UseTorCurlImpersonate=true\"${NC} in script to use..." -./mad.sh:1548: echo -e ":${NC} ${GREEN}MAD${PINK} Audit${NC} : Reports usage of http & curl in scripts${PINK}${BLD} :" -./mad.sh:1556: maud_curl=$(grep -n -vxE '[[:blank:]]*([#].*)?' $fil | grep --color='always' -Ei 'curl') -./mad.sh:1557: maud_torcurl=$(grep -n -vxE '[[:blank:]]*([#].*)?' $fil | grep -A 12 --color='always' -Ei 'tor_curl') -./mad.sh:1566: echo -e "${PINK}MAD Audit of curl:${NC} (${GREEN}grep \"curl\"${NC})" -./mad.sh:1568: echo -e "$maud_curl" -./mad.sh:1570: echo -e "${PINK}MAD Audit of tor_curl (+12 lines after):${NC} (${GREEN}grep \"tor_curl\"${NC})" -./mad.sh:1572: echo -e "$maud_torcurl" -./mad.sh:1584: maud_curl=$(grep -n -vxE '[[:blank:]]*([#].*)?' $fil | grep --color='always' -Ei 'curl') -./mad.sh:1585: maud_torcurl=$(grep -n -vxE '[[:blank:]]*([#].*)?' $fil | grep -A 12 --color='always' -Ei 'tor_curl') -./mad.sh:1594: echo -e "${PINK}MAD Audit of curl:${NC} (${GREEN}grep \"curl \"${NC})" -./mad.sh:1596: echo -e "$maud_curl" -./mad.sh:1598: echo -e "${PINK}MAD Audit of tor_curl (+12 lines after):${NC} (${GREEN}grep \"tor_curl\"${NC})" -./mad.sh:1600: echo -e "$maud_torcurl" -./mad.sh:1606: maud_curl=$(grep -n -vxE '[[:blank:]]*([#].*)?' $fil | grep --color='always' -Ei 'curl') -./mad.sh:1607: maud_torcurl=$(grep -n -vxE '[[:blank:]]*([#].*)?' $fil | grep -A 12 --color='always' -Ei 'tor_curl') -./mad.sh:1616: echo -e "${PINK}MAD Audit of curl:${NC} (${GREEN}grep \"curl\"${NC})" -./mad.sh:1618: echo -e "$maud_curl" -./mad.sh:1620: echo -e "${PINK}MAD Audit of tor_curl (+12 lines after):${NC} (${GREEN}grep \"tor_curl\"${NC})" -./mad.sh:1622: echo -e "$maud_torcurl" -./mad.sh:2569: if [ "${UseTorCurlImpersonate}" == "true" ]; then -./mad.sh:2570: printf "client: ${GREEN}Tor${NC} + ${BLUE}curl_impersonate${NC}\\n" -./mad.sh:2572: printf "client: ${GREEN}Tor${NC} + ${GREEN}curl${NC}\\n" -./mad.sh:2744: if [ "${UseTorCurlImpersonate}" == "true" ]; then -./mad.sh:2745: printf "client: ${GREEN}Tor${NC} + ${BLUE}curl_impersonate${NC}\\n" -./mad.sh:2747: printf "client: ${GREEN}Tor${NC} + ${GREEN}curl${NC}\\n" -./mad.sh:2945: file_header=$(tor_curl_request --insecure -m 8 -s -D - -o /dev/null \ -./mad.sh:2952: file_header=$(tor_curl_request --insecure --head -H "Connection: keep-alive" -L -s -i "$download_url" | -./mad.sh:3089: tor_curl_request --insecure -L --referer "$file_url" "$download_url" --output "$file_path" -./mad.sh:3142: tor_curl_request --insecure -L --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval "$download_url" --continue-at - --output "$file_path" -./mad.sh:3144: tor_curl_request --insecure -L --referer "$file_url" "$download_url" --continue-at - --output "$file_path" -./mad.sh:3342: response=$(tor_curl_upload --insecure -i \ -./mad.sh:3349: response=$(tor_curl_upload --insecure -i \ -./mad.sh:3420:if [ "${UseTorCurlImpersonate}" == "true" ]; then -./mad.sh:3421: curl_impersonate=() -./mad.sh:3422: readarray -d $'' arrFiles < <(find "$ScriptDir" -maxdepth 1 -name "curl_*" -printf '%p\n' | sort -Vk1) -./mad.sh:3423: bFoundCurlHeader=false -./mad.sh:3427: curl_impersonate=($fil) -./mad.sh:3428: bFoundCurlHeader=true -./mad.sh:3432: if [ "$bFoundCurlHeader" == "false" ]; then -./mad.sh:3433: echo -e "${RED}[ERROR] Missing dependency \"curl-impersonate\"!${NC}" -./mad.sh:3436: echo -e "You'll need to download ${GREEN}\"curl-impersonate\"${NC}." -./mad.sh:3439: echo -e "The latest binary can be obtained on GitHub, search for \"curl-impersonate\"" -./mad.sh:3441: echo -e " 1. Visit the page of curl-impersonate and add \"/releases/latest/\" at end of URL." -./mad.sh:3445: echo -e " 4. Download archive ${GREEN}\"curl-impersonate-vX.Y.Z.x86_64-linux-gnu.tar.gz\"${YELLOW}." -./mad.sh:3446: echo -e " 5. Extract files ${GREEN}\"curl-impersonate-ff\"${NC} and ${GREEN}\"curl_ff109\"${NC} next to this script." -./mad.sh:3449: echo -e "run $0 install_curl_impersonate\\n" -./mad.sh:3451: yes_or_no "Do you wish to download and extract latest curl_impersonate (using tor+curl)?" && { -./mad.sh:3452: UseTorCurlImpersonate=false -./mad.sh:3453: install_curl_impersonate -./mad.sh:3537: echo -e "[${YELLOW}Install curl_impersonate${NC}]: Downloads the latest binary for curl_impersonate from github repo (3 choices)" -./mad.sh:3538: printf " %s install_curl_impersonate\\n" "$0" -./mad.sh:3616:elif [[ "$arg1" == "install_curl_impersonate" ]]; then -./mad.sh:3617: install_curl_impersonate -./mad.sh:3648:if [ "${UseTorCurlImpersonate}" == "true" ]; then -./mad.sh:3649: printf "client: ${GREEN}Tor${NC} + ${BLUE}curl_impersonate${NC}\\n" -./mad.sh:3651: printf "client: ${GREEN}Tor${NC} + ${GREEN}curl${NC}\\n" -./plugins/pjscloud.sh:44: if [ "${UseTorCurlImpersonate}" == "true" ]; then +./mad.sh:97:UseTorCurlImpersonate=false +./mad.sh:393:tor_curl_request() { +./mad.sh:394: if [[ "${UseTorCurlImpersonate}" == "true" ]]; then +./mad.sh:395: "${curl_impersonate[@]}" --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" -4 --connect-timeout ${ConnectTimeout} --compressed --globoff "$@" +./mad.sh:397: curl --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" -4 --connect-timeout ${ConnectTimeout} --compressed --globoff "$@" +./mad.sh:400:tor_curl_request_extended() { +./mad.sh:402: if [[ "${UseTorCurlImpersonate}" == "true" ]]; then +./mad.sh:403: "${curl_impersonate[@]}" --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" -4 --connect-timeout $randomtimeout --compressed --globoff "$@" +./mad.sh:405: curl --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" -4 --connect-timeout $randomtimeout --compressed --globoff "$@" +./mad.sh:408:tor_curl_upload() { +./mad.sh:409: if [[ "${UseTorCurlImpersonate}" == "true" ]]; then +./mad.sh:411: "${curl_impersonate[@]}" --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" -4 --connect-timeout ${ConnectTimeoutUpload} --speed-limit $UploadSpeedMin --speed-time $UploadTimeoutInterval --compressed --globoff "$@" +./mad.sh:413: "${curl_impersonate[@]}" --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" -4 --connect-timeout ${ConnectTimeoutUpload} --compressed --globoff "$@" +./mad.sh:417: curl --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" -4 --connect-timeout ${ConnectTimeoutUpload} --speed-limit $UploadSpeedMin --speed-time $UploadTimeoutInterval -H 'User-Agent: Mozilla/5.0 (Windows NT 10.0; rv:109.0) Gecko/20100101 Firefox/115.0' -H 'Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8' -H 'Accept-Language: en-US,en;q=0.5' -H 'Accept-Encoding: gzip, deflate, br' -H 'Origin: null' -H 'Connection: keep-alive' -H 'Upgrade-Insecure-Requests: 1' -H 'Sec-Fetch-Dest: document' -H 'Sec-Fetch-Mode: navigate' -H 'Sec-Fetch-Site: cross-site' -H 'Sec-Fetch-User: ?1' -H 'TE: trailers' --compressed --globoff "$@" +./mad.sh:419: curl --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" -4 --connect-timeout ${ConnectTimeoutUpload} -H 'User-Agent: Mozilla/5.0 (Windows NT 10.0; rv:109.0) Gecko/20100101 Firefox/115.0' -H 'Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8' -H 'Accept-Language: en-US,en;q=0.5' -H 'Accept-Encoding: gzip, deflate, br' -H 'Origin: null' -H 'Connection: keep-alive' -H 'Upgrade-Insecure-Requests: 1' -H 'Sec-Fetch-Dest: document' -H 'Sec-Fetch-Mode: navigate' -H 'Sec-Fetch-Site: cross-site' -H 'Sec-Fetch-User: ?1' -H 'TE: trailers' --compressed --globoff "$@" +./mad.sh:1420:install_curl_impersonate() { +./mad.sh:1422: echo -e "${BLUE}lwthiker curl_impersonate${NC} is the original dev, but it is relatively inactive." +./mad.sh:1423: echo -e "- Currently uses curl v8.1.1." +./mad.sh:1427: echo -e "${BLUE}lexiforest curl_impersonate${NC} is an active fork of curl_impersonate." +./mad.sh:1428: echo -e "+ Currently uses curl v8.7.1" +./mad.sh:1432: PS3='Please select which curl_impersonate to install: ' +./mad.sh:1440: install_curl_impersonate_lwthiker_orig +./mad.sh:1444: install_curl_impersonate_lexiforest_fork +./mad.sh:1454:install_curl_impersonate_lwthiker_orig() { +./mad.sh:1458: echo -e "${BLUE}lwthiker curl_impersonate${NC} is the original curl_impersonate." +./mad.sh:1459: echo -e "+ Currently uses curl v8.1.1, and has low activity for updates" +./mad.sh:1462: echo -e "${GREEN}| Fetching:${NC} latest ${BLUE}lwthiker curl_impersonate${NC} info from github...${NC}" +./mad.sh:1465: response=$(tor_curl_request --insecure -L -s https://github.com/lwthiker/curl-impersonate/releases/latest) +./mad.sh:1467: debugHtml "github" "lbf_inst_curlimp$j" "$response" +./mad.sh:1470: latestTag=$(grep -oPi -m 1 '(?<=/curl-impersonate/releases/tag/).*?(?=")' <<< "$response") +./mad.sh:1480: yes_or_no "Do you wish to download and extract curl_impersonate $latestTag (using tor+curl)?" && { +./mad.sh:1482: download_url='https://github.com/lwthiker/curl-impersonate/releases/download/'"$latestTag"'/curl-impersonate-'"$latestTag"'.x86_64-linux-gnu.tar.gz' +./mad.sh:1485: file_header=$(tor_curl_request --insecure --head -Ls "$download_url") +./mad.sh:1487: debugHtml "github" "head_inst_curlimp$j" "${file_header}" +./mad.sh:1535: tor_curl_request --insecure -L "$download_url" --continue-at - --output "$file_path" +./mad.sh:1564: echo -e "| Extracting curl_impersonate..." +./mad.sh:1566: rm -f "${ScriptDir}"/curl* +./mad.sh:1567: mv "$extract_location/curl-impersonate-ff" "${ScriptDir}/" +./mad.sh:1568: mv "$extract_location/curl_ff109" "${ScriptDir}/" +./mad.sh:1569: echo -e "${GREEN}| Done.${NC} Update ${BLUE}\"UseTorCurlImpersonate=true\"${NC} in script to use..." +./mad.sh:1577:install_curl_impersonate_lexiforest_fork() { +./mad.sh:1581: echo -e "${BLUE}lexiforest curl_impersonate${NC} is an active fork of curl_impersonate." +./mad.sh:1582: echo -e "+ Currently uses curl v8.7.1, and is patched for latest CVEs" +./mad.sh:1585: echo -e "${GREEN}| Fetching:${NC} latest ${BLUE}lexiforest curl_impersonate fork${NC} info from github...${NC}" +./mad.sh:1588: response=$(tor_curl_request --insecure -L -s https://github.com/lexiforest/curl-impersonate/releases/latest) +./mad.sh:1590: debugHtml "github" "lbf_inst_curlimp$j" "$response" +./mad.sh:1593: latestTag=$(grep -oPi -m 1 '(?<=/curl-impersonate/releases/tag/).*?(?=")' <<< "$response") +./mad.sh:1603: yes_or_no "Do you wish to download and extract curl_impersonate $latestTag (using tor+curl)?" && { +./mad.sh:1605: download_url='https://github.com/lexiforest/curl-impersonate/releases/download/'"$latestTag"'/curl-impersonate-'"$latestTag"'.x86_64-linux-gnu.tar.gz' +./mad.sh:1608: file_header=$(tor_curl_request --insecure --head -Ls "$download_url") +./mad.sh:1610: debugHtml "github" "head_inst_curlimp$j" "${file_header}" +./mad.sh:1658: tor_curl_request --insecure -L "$download_url" --continue-at - --output "$file_path" +./mad.sh:1687: echo -e "| Extracting curl_impersonate..." +./mad.sh:1689: rm -f "${ScriptDir}"/curl* +./mad.sh:1690: mv "$extract_location/curl-impersonate-chrome" "${ScriptDir}/" +./mad.sh:1691: mv "$extract_location/curl_chrome131" "${ScriptDir}/" +./mad.sh:1692: echo -e "${GREEN}| Done.${NC} Update ${BLUE}\"UseTorCurlImpersonate=true\"${NC} in script to use..." +./mad.sh:1854: echo -e ":${NC} ${GREEN}MAD${PINK} Audit${NC} : Reports usage of http & curl in scripts${PINK}${BLD} :" +./mad.sh:1862: maud_curl=$(grep -n -vxE '[[:blank:]]*([#].*)?' $fil | grep --color='always' -Ei 'curl') +./mad.sh:1863: maud_torcurl=$(grep -n -vxE '[[:blank:]]*([#].*)?' $fil | grep -A 12 --color='always' -Ei 'tor_curl') +./mad.sh:1872: echo -e "${PINK}MAD Audit of curl:${NC} (${GREEN}grep \"curl\"${NC})" +./mad.sh:1874: echo -e "$maud_curl" +./mad.sh:1876: echo -e "${PINK}MAD Audit of tor_curl (+12 lines after):${NC} (${GREEN}grep \"tor_curl\"${NC})" +./mad.sh:1878: echo -e "$maud_torcurl" +./mad.sh:1890: maud_curl=$(grep -n -vxE '[[:blank:]]*([#].*)?' $fil | grep --color='always' -Ei 'curl') +./mad.sh:1891: maud_torcurl=$(grep -n -vxE '[[:blank:]]*([#].*)?' $fil | grep -A 12 --color='always' -Ei 'tor_curl') +./mad.sh:1900: echo -e "${PINK}MAD Audit of curl:${NC} (${GREEN}grep \"curl \"${NC})" +./mad.sh:1902: echo -e "$maud_curl" +./mad.sh:1904: echo -e "${PINK}MAD Audit of tor_curl (+12 lines after):${NC} (${GREEN}grep \"tor_curl\"${NC})" +./mad.sh:1906: echo -e "$maud_torcurl" +./mad.sh:1912: maud_curl=$(grep -n -vxE '[[:blank:]]*([#].*)?' $fil | grep --color='always' -Ei 'curl') +./mad.sh:1913: maud_torcurl=$(grep -n -vxE '[[:blank:]]*([#].*)?' $fil | grep -A 12 --color='always' -Ei 'tor_curl') +./mad.sh:1922: echo -e "${PINK}MAD Audit of curl:${NC} (${GREEN}grep \"curl\"${NC})" +./mad.sh:1924: echo -e "$maud_curl" +./mad.sh:1926: echo -e "${PINK}MAD Audit of tor_curl (+12 lines after):${NC} (${GREEN}grep \"tor_curl\"${NC})" +./mad.sh:1928: echo -e "$maud_torcurl" +./mad.sh:2875: if [[ "${UseTorCurlImpersonate}" == "true" ]]; then +./mad.sh:2876: printf "client: ${GREEN}Tor${NC} + ${BLUE}curl_impersonate${NC}\\n" +./mad.sh:2878: printf "client: ${GREEN}Tor${NC} + ${GREEN}curl${NC}\\n" +./mad.sh:3050: if [[ "${UseTorCurlImpersonate}" == "true" ]]; then +./mad.sh:3051: printf "client: ${GREEN}Tor${NC} + ${BLUE}curl_impersonate${NC}\\n" +./mad.sh:3053: printf "client: ${GREEN}Tor${NC} + ${GREEN}curl${NC}\\n" +./mad.sh:3251: file_header=$(tor_curl_request --insecure -m 8 -s -D - -o /dev/null \ +./mad.sh:3258: file_header=$(tor_curl_request --insecure --head -H "Connection: keep-alive" -L -s -i "$download_url" | +./mad.sh:3395: tor_curl_request --insecure -L --referer "$file_url" "$download_url" --output "$file_path" +./mad.sh:3448: tor_curl_request --insecure -L --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval "$download_url" --continue-at - --output "$file_path" +./mad.sh:3450: tor_curl_request --insecure -L --referer "$file_url" "$download_url" --continue-at - --output "$file_path" +./mad.sh:3648: response=$(tor_curl_upload --insecure -i \ +./mad.sh:3655: response=$(tor_curl_upload --insecure -i \ +./mad.sh:3726:if [[ "${UseTorCurlImpersonate}" == "true" ]]; then +./mad.sh:3727: curl_impersonate=() +./mad.sh:3728: readarray -d $'' arrFiles < <(find "$ScriptDir" -maxdepth 1 -name "curl_*" -printf '%p\n' | sort -Vk1) +./mad.sh:3729: bFoundCurlHeader=false +./mad.sh:3733: curl_impersonate=($fil) +./mad.sh:3734: bFoundCurlHeader=true +./mad.sh:3738: if [[ "$bFoundCurlHeader" == "false" ]]; then +./mad.sh:3739: echo -e "${RED}[ERROR] Missing dependency \"curl-impersonate\"!${NC}" +./mad.sh:3742: echo -e "You'll need to download ${GREEN}\"curl-impersonate\"${NC}." +./mad.sh:3745: echo -e "The latest binary can be obtained on GitHub, search for \"curl-impersonate\"" +./mad.sh:3747: echo -e " 1. Visit the page of curl-impersonate and add \"/releases/latest/\" at end of URL." +./mad.sh:3751: echo -e " 4. Download archive ${GREEN}\"curl-impersonate-vX.Y.Z.x86_64-linux-gnu.tar.gz\"${YELLOW}." +./mad.sh:3752: echo -e " 5. Extract files ${GREEN}\"curl-impersonate-ff\"${NC} and ${GREEN}\"curl_ff109\"${NC} next to this script." +./mad.sh:3755: echo -e "run $0 install_curl_impersonate\\n" +./mad.sh:3757: yes_or_no "Do you wish to download and extract latest curl_impersonate (using tor+curl)?" && { +./mad.sh:3758: UseTorCurlImpersonate=false +./mad.sh:3759: install_curl_impersonate +./mad.sh:3843: echo -e "[${YELLOW}Install curl_impersonate${NC}]: Downloads the latest binary for curl_impersonate from github repo (3 choices)" +./mad.sh:3844: printf " %s install_curl_impersonate\\n" "$0" +./mad.sh:3922:elif [[ "$arg1" == "install_curl_impersonate" ]]; then +./mad.sh:3923: install_curl_impersonate +./mad.sh:3954:if [[ "${UseTorCurlImpersonate}" == "true" ]]; then +./mad.sh:3955: printf "client: ${GREEN}Tor${NC} + ${BLUE}curl_impersonate${NC}\\n" +./mad.sh:3957: printf "client: ${GREEN}Tor${NC} + ${GREEN}curl${NC}\\n" +./plugins/pjscloud.sh:44: if [[ "${UseTorCurlImpersonate}" == "true" ]]; then ./plugins/pjscloud.sh:45: response=$("${curl_impersonate[@]}" --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" \ ./plugins/pjscloud.sh:53: response=$(curl --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" \ diff --git a/.audit/mad-audit-http.log b/.audit/mad-audit-http.log index 9dce414..f4e3178 100755 --- a/.audit/mad-audit-http.log +++ b/.audit/mad-audit-http.log @@ -1,4 +1,4 @@ -DateTime: 25.02.13 +DateTime: 25.02.19 Files: ./hosts/1fichier.sh @@ -128,6 +128,7 @@ Files: ./hosts/up_quax.sh ./hosts/up_ramsgaard.sh ./hosts/up_ranoz.sh +./hosts/up_sendnow.sh ./hosts/up_shareonline.sh ./hosts/up_skrepr.sh ./hosts/up_soyjak.sh @@ -212,7 +213,7 @@ _________________________________________________________________________ ./hosts/gofile.sh:183: "https://api.gofile.io/contents/$file_id") ./hosts/gofile.sh:185: debugHtml "${remote_url##*/}" "gofile_contents$i" "url: https://api.gofile.io/contents/${file_id}?${form_data}"$'\n'"${response}" ./hosts/gofile.sh:212: cnturls=$(grep -oin 'https://' <<< "$download_url" | wc -l) -./hosts/gofile.sh:311: cdn_url="https:"$(grep -oPi '(?<=location: ).*' <<< "$file_header") +./hosts/gofile.sh:327: cdn_url="https:"$(grep -oPi '(?<=location: ).*' <<< "$file_header") ./hosts/hexload.sh:102: response=$(pjscloud_tor_request "https://hexload.com/download" "$form_data") ./hosts/hexload.sh:108: response=$(tor_curl_request --insecure -s --data "$form_data" "https://hexload.com/download") ./hosts/hexload.sh:116: response=$(tor_curl_request --insecure -s --data "$form_data" "https://hexload.com/download") @@ -225,18 +226,19 @@ _________________________________________________________________________ ./hosts/kraken.sh:155: kraken_action="https://krakenfiles.com/download/${kraken_action##*/}" ./hosts/nippy.sh:160: download_url="https:"$(grep -oP '(?<=
' <<< "$response"; then - if [ $i == $maxfetchretries ] ; then + if [[ $i == $maxfetchretries ]] ; then rm -f "${fb_cookie_jar}"; printf "\\n" echo -e "${RED}| Failed to extract download link (Unknown warning encountered) [3c]${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "Unknown warning encountered in download2 [3c]" "" fi return 1 @@ -256,11 +256,11 @@ fb_FetchFileInfo() { fi if [[ -z "$post_action" ]] || [[ -z "$post_op" ]] || [[ -z "$post_id" ]] ; then rm -f "${fb_cookie_jar}"; - if [ $i == $maxfetchretries ] ; then + if [[ $i == $maxfetchretries ]] ; then printf "\\n" echo -e "${RED}| Failed to extract download link [3].${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "Failed to extract download link [3]" "" fi return 1 @@ -281,16 +281,16 @@ fb_FetchFileInfo() { response=$(tor_curl_request --insecure -L -s -X POST \ -b "${fb_cookie_jar}" -c "${fb_cookie_jar}" \ --data "$form_data" "$post_action") - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${remote_url##*/}" "fb_post(2)" "post_action: ${post_action}"$'\n'"form_data: ${form_data}"$'\n'"${response}" fi if [[ -z $response ]] ; then - if [ $i == $maxfetchretries ] ; then + if [[ $i == $maxfetchretries ]] ; then rm -f "${fb_cookie_jar}"; printf "\\n" echo -e "${RED}| Failed to extract download link [4].${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "Failed to extract download link [4]" "" fi return 1 @@ -311,7 +311,7 @@ fb_FetchFileInfo() { printf "\\n" echo -e "${RED}| Failed to extract download link [6]${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then rm -f "${fb_cookie_jar}"; failedRetryDownload "${remote_url}" "Failed to extract download link [6]" "" fi @@ -333,16 +333,16 @@ fb_FetchFileInfo() { trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; rm -f ${fb_cookie_jar}; echo ""; tput cnorm; exit" 0 1 2 3 6 15 GetRandomUA file_header=$(tor_curl_request --insecure -L --head -s "$download_url") - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${remote_url##*/}" "fb_head$j" "download_url: ${download_url}"$'\n'"${file_header}" fi if [[ -z $file_header ]] ; then - if [ $j == $maxfetchretries ] ; then + if [[ $j == $maxfetchretries ]] ; then rm -f "${fb_cookie_jar}"; printf "\\n" echo -e "${RED}| Failed to extract file info [1]${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "Failed to extract file info [1]" "" fi return 1 @@ -352,12 +352,12 @@ fb_FetchFileInfo() { fi fi if ! grep -Eqi 'HTTP.*200' <<< $file_header ; then - if [ $j == $maxfetchretries ] ; then + if [[ $j == $maxfetchretries ]] ; then rm -f "${fb_cookie_jar}"; printf "\\n" echo -e "${RED}| Failed to extract file info [2]${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "Failed to extract file info [2]" "" fi return 1 @@ -366,15 +366,15 @@ fb_FetchFileInfo() { continue fi fi - if [ "$filename_override" == "" ] ; then + if [[ "$filename_override" == "" ]] ; then filename="${download_url##*/}" filename=${filename//%0d/} if [[ -z "$filename" ]]; then - if [ $j == $maxfetchretries ] ; then + if [[ $j == $maxfetchretries ]] ; then printf "\\n" echo -e "${RED}| Failed to extract file name${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "Failed to extract file name" "" fi return 1 @@ -386,12 +386,12 @@ fb_FetchFileInfo() { file_size_bytes=$(grep -oPi '(?<=content-length: ).*?(?=$)' <<< "$file_header") file_size_bytes=${file_size_bytes//[$'\t\r\n']} if [[ -z "$file_size_bytes" ]]; then - if [ $j == $maxfetchretries ] ; then + if [[ $j == $maxfetchretries ]] ; then rm -f "${fb_cookie_jar}"; printf "\\n" echo -e "${RED}| Failed to extract file size.${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "Failed to extract file size." "" fi return 1 @@ -402,15 +402,15 @@ fb_FetchFileInfo() { fi break #Good to go here done - touch ${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/} - if [ ! "$filename_override" == "" ] ; then + touch "${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}" + if [[ ! "$filename_override" == "" ]] ; then filename="$filename_override" fi filename=$(sanitize_file_or_folder_name "${filename}") printf "\\n" echo -e "${YELLOW}| File name:${NC}\t\"${filename}\"" - if [ -z $file_size_bytes ] ; then - if [ "${finalAttempt}" == "true" ] ; then + if [[ -z $file_size_bytes ]] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "Filesize not found!" "" fi echo -e "${YELLOW}| Filesize not found… retry${NC}" @@ -441,14 +441,14 @@ fb_GetFile() { flockDownload="${WorkDir}/.flocks/${filename//[^a-zA-Z0-9\.\_\-]/}.flock" for ((j=1; j<=$MaxDownloadRetries; j++)); do pd_presize=0 - if [ -f "$file_path" ] ; then + if [[ -f "$file_path" ]] ; then pd_presize=$(stat --format="%s" "$file_path" | tr -d '[:space:]') fi GetRandomUA CLEANSTRING=${remote_url//[^a-zA-Z0-9]/} trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; rm -f "${fb_cookie_jar}"; rm -f $flockDownload; echo ""; tput cnorm; exit" 0 1 2 3 6 15 - if [ "${UseTorCurlImpersonate}" == "true" ]; then - if [ "${RateMonitorEnabled}" == "true" ]; then + if [[ "${UseTorCurlImpersonate}" == "true" ]]; then + if [[ "${RateMonitorEnabled}" == "true" ]]; then tor_curl_request --insecure -L \ --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval \ "$download_url" --continue-at - --output "$file_path" @@ -457,7 +457,7 @@ fb_GetFile() { "$download_url" --continue-at - --output "$file_path" fi else - if [ "${RateMonitorEnabled}" == "true" ]; then + if [[ "${RateMonitorEnabled}" == "true" ]]; then tor_curl_request --insecure \ --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval \ -H "User-Agent: $RandomUA" \ @@ -489,7 +489,7 @@ fb_GetFile() { fi fi received_file_size=0 - if [ -f "$file_path" ] ; then + if [[ -f "$file_path" ]] ; then received_file_size=$(stat --format="%s" "$file_path" | tr -d '[:space:]') fi if CheckNoHtml "$remote_url" "$filename" "$file_path" ; then @@ -498,9 +498,9 @@ fb_GetFile() { containsHtml=true fi downDelta=$(( received_file_size - pd_presize )) - if [[ "${received_file_size}" -ne "${file_size_bytes}" ]] || [ "$containsHtml" == "true" ]; then - if [ "${AutoRepairBadPartials}" == "true" ] && (( downDelta > 0 && downDelta < 1024 )) ; then - if [ -f "${file_path}" ] ; then + if [[ "${received_file_size}" -ne "${file_size_bytes}" ]] || [[ "$containsHtml" == "true" ]]; then + if [[ "${AutoRepairBadPartials}" == "true" ]] && (( downDelta > 0 && downDelta < 1024 )) ; then + if [[ -f "${file_path}" ]] ; then if ((pd_presize > 0)); then echo -e "${YELLOW}Bad node / HTML found:${NC} reverting to previous file..." truncateDownload "$remote_url" "$filename" "$pd_presize" "$received_file_size" @@ -512,15 +512,15 @@ fb_GetFile() { fi if ((j >= $MaxDownloadRetries)) ; then rm -f "$flockDownload"; - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" fi return 1 else continue fi - elif [ "${AutoRepairBadPartials}" == "true" ] && [ "$containsHtml" == "true" ] ; then - if [ -f "${file_path}" ] ; then + elif [[ "${AutoRepairBadPartials}" == "true" ]] && [[ "$containsHtml" == "true" ]] ; then + if [[ -f "${file_path}" ]] ; then if ((pd_presize > 0)); then echo -e "${YELLOW}Bad node / HTML found:${NC} reverting to previous file..." truncateDownload "$remote_url" "$filename" "$pd_presize" "$received_file_size" @@ -532,21 +532,21 @@ fb_GetFile() { fi if ((j >= $MaxDownloadRetries)) ; then rm -f "$flockDownload"; - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" fi return 1 else continue fi - elif (( downDelta > 0 && downDelta < 1024 )) || [ "$containsHtml" == "true" ] ; then - if [ -f "$file_path" ] ; then + elif (( downDelta > 0 && downDelta < 1024 )) || [[ "$containsHtml" == "true" ]] ; then + if [[ -f "$file_path" ]] ; then rm -rf "$file_path" fi echo -e "\n${YELLOW}Bad node / HTML found:${NC} tainted partial removed..." if ((j >= $MaxDownloadRetries)) ; then rm -f "$flockDownload"; - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" fi return 1 @@ -558,7 +558,7 @@ fb_GetFile() { echo -e "\n${RED}Download failed, file is incomplete.${NC}" if ((j >= $MaxDownloadRetries)) ; then rm -f "$flockDownload"; - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" fi return 1 diff --git a/hosts/fileditch.sh b/hosts/fileditch.sh index 433fc1d..309da9d 100644 --- a/hosts/fileditch.sh +++ b/hosts/fileditch.sh @@ -54,24 +54,24 @@ fd_DownloadFile() { tor_identity="${RANDOM}" finalAttempt="false" for ((z=0; z<=$MaxUrlRetries; z++)); do - if [ $z -eq $MaxUrlRetries ] ; then + if [[ $z -eq $MaxUrlRetries ]] ; then finalAttempt="true" fi CLEANSTRING=${remote_url//[^a-zA-Z0-9]/} trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; echo ""; tput cnorm; exit" 0 1 2 3 6 15 if fd_FetchFileInfo $finalAttempt && fd_GetFile "${filecnt}" $((z+1)) $finalAttempt ; then return 0 - elif [ $z -lt $MaxUrlRetries ]; then - if [ "${fileAlreadyDone}" == "true" ] ; then + elif [[ $z -lt $MaxUrlRetries ]]; then + if [[ "${fileAlreadyDone}" == "true" ]] ; then break fi if [[ "${warnAndRetryUnknownError}" == "true" ]] ; then - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${remote_url##*/}" "error" "Retry due to an unknown issue: attempt #$((z+1)) of ${MaxUrlRetries}" fi fi if [[ "${exitDownloadError}" == "true" || "${exitDownloadNotAvailable}" == "true" ]] ; then - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${remote_url##*/}" "error" "Exit due to unrecoverable issue" fi rm -f "${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}" @@ -94,10 +94,10 @@ fd_FetchFileInfo() { download_url=$(urlencode_literal_grouped_case_urlendingonly "$remote_url") tor_identity="${RANDOM}" file_header=$(tor_curl_request --insecure --head -L -s "$download_url") - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${remote_url##*/}" "fd_head$j" "download_url: ${download_url}"$'\n'"${file_header}" fi - if [ ! -z "$file_header" ] ; then + if [[ ! -z "$file_header" ]] ; then if grep -Eqi '404 Not Found' <<< "${file_header}" ; then echo -e "${RED}| The file has been removed (404).${NC}" removedDownload "${remote_url}" @@ -107,12 +107,12 @@ fd_FetchFileInfo() { if ! grep -Eqi 'HTTP/.* 200' <<< $file_header ; then echo -e "${RED}| Failed to extract file info${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "" "" fi return 1 fi - if [ "$filename_override" == "" ] ; then + if [[ "$filename_override" == "" ]] ; then if grep -Eqi 'filename=' <<< "${file_header}" ; then filename=$(grep -oP 'filename=\K.*$' <<< "${file_header}") filename=${filename##filename} @@ -124,29 +124,29 @@ fd_FetchFileInfo() { fi if ! grep -Eqi 'Content-Length' <<< "${file_header}" ; then echo -e "${RED}| Failed to extract file size.${NC}" - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "" "" fi return 1 fi else echo -e "${RED}| No response. Try again later.${NC}" - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "" "" fi return 1 fi - touch ${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/} - if [ ! "$filename_override" == "" ] ; then + touch "${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}" + if [[ ! "$filename_override" == "" ]] ; then filename="$filename_override" - elif [ -z $filename ] ; then + elif [[ -z $filename ]] ; then filename=${download_url##*/} fi filename=$(sanitize_file_or_folder_name "${filename}") file_size_bytes=$(grep -oPi '(?<=content-length: ).*' <<< "$file_header") file_size_bytes=${file_size_bytes//[$'\t\r\n']} - if [ -z $file_size_bytes ] ; then - if [ "${finalAttempt}" == "true" ] ; then + if [[ -z $file_size_bytes ]] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "Filesize not found!" "" fi echo -e "${YELLOW}| Filesize not found… retry${NC}" @@ -178,18 +178,18 @@ fd_GetFile() { flockDownload="${WorkDir}/.flocks/${filename//[^a-zA-Z0-9\.\_\-]/}.flock" for ((j=1; j<=$MaxDownloadRetries; j++)); do pd_presize=0 - if [ -f "$file_path" ] ; then + if [[ -f "$file_path" ]] ; then pd_presize=$(stat --format="%s" "$file_path" | tr -d '[:space:]') fi CLEANSTRING=${remote_url//[^a-zA-Z0-9]/} trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; rm -f $flockDownload; echo ""; tput cnorm; exit" 0 1 2 3 6 15 - if [ "${RateMonitorEnabled}" == "true" ]; then + if [[ "${RateMonitorEnabled}" == "true" ]]; then tor_curl_request --insecure --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval "$download_url" --continue-at - --output "$file_path" else tor_curl_request --insecure "$download_url" --continue-at - --output "$file_path" fi received_file_size=0 - if [ -f "$file_path" ] ; then + if [[ -f "$file_path" ]] ; then received_file_size=$(stat --format="%s" "$file_path" | tr -d '[:space:]') fi if CheckNoHtml "$remote_url" "$filename" "$file_path" ; then @@ -198,9 +198,9 @@ fd_GetFile() { containsHtml=true fi downDelta=$(( received_file_size - pd_presize )) - if [[ "${received_file_size}" -ne "${file_size_bytes}" ]] || [ "$containsHtml" == "true" ]; then - if [ "${AutoRepairBadPartials}" == "true" ] && (( downDelta > 0 && downDelta < 1024 )) ; then - if [ -f "${file_path}" ] ; then + if [[ "${received_file_size}" -ne "${file_size_bytes}" ]] || [[ "$containsHtml" == "true" ]]; then + if [[ "${AutoRepairBadPartials}" == "true" ]] && (( downDelta > 0 && downDelta < 1024 )) ; then + if [[ -f "${file_path}" ]] ; then if ((pd_presize > 0)); then echo -e "${YELLOW}Bad node / HTML found:${NC} reverting to previous file..." truncateDownload "$remote_url" "$filename" "$pd_presize" "$received_file_size" @@ -212,15 +212,15 @@ fd_GetFile() { fi if ((j >= $MaxDownloadRetries)) ; then rm -f "$flockDownload"; - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" fi return 1 else continue fi - elif [ "${AutoRepairBadPartials}" == "true" ] && [ "$containsHtml" == "true" ] ; then - if [ -f "${file_path}" ] ; then + elif [[ "${AutoRepairBadPartials}" == "true" ]] && [[ "$containsHtml" == "true" ]] ; then + if [[ -f "${file_path}" ]] ; then if ((pd_presize > 0)); then echo -e "${YELLOW}Bad node / HTML found:${NC} reverting to previous file..." truncateDownload "$remote_url" "$filename" "$pd_presize" "$received_file_size" @@ -232,21 +232,21 @@ fd_GetFile() { fi if ((j >= $MaxDownloadRetries)) ; then rm -f "$flockDownload"; - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" fi return 1 else continue fi - elif (( downDelta > 0 && downDelta < 1024 )) || [ "$containsHtml" == "true" ] ; then - if [ -f "$file_path" ] ; then + elif (( downDelta > 0 && downDelta < 1024 )) || [[ "$containsHtml" == "true" ]] ; then + if [[ -f "$file_path" ]] ; then rm -rf "$file_path" fi echo -e "\n${YELLOW}Bad node / HTML found:${NC} tainted partial removed..." if ((j >= $MaxDownloadRetries)) ; then rm -f "$flockDownload"; - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" fi return 1 @@ -258,7 +258,7 @@ fd_GetFile() { echo -e "\n${RED}Download failed, file is incomplete.${NC}" if ((j >= $MaxDownloadRetries)) ; then rm -f "$flockDownload"; - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" fi return 1 diff --git a/hosts/filedot.sh b/hosts/filedot.sh index 79a071d..335df2c 100644 --- a/hosts/filedot.sh +++ b/hosts/filedot.sh @@ -43,24 +43,24 @@ fdot_DownloadFile() { tor_identity="${RANDOM}" finalAttempt="false" for ((z=0; z<=$MaxUrlRetries; z++)); do - if [ $z -eq $MaxUrlRetries ] ; then + if [[ $z -eq $MaxUrlRetries ]] ; then finalAttempt="true" fi CLEANSTRING=${remote_url//[^a-zA-Z0-9]/} trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; echo ""; tput cnorm; exit" 0 1 2 3 6 15 if fdot_FetchFileInfo $finalAttempt && fdot_GetFile "${filecnt}" $((z+1)) $finalAttempt ; then return 0 - elif [ $z -lt $MaxUrlRetries ]; then - if [ "${fileAlreadyDone}" == "true" ] ; then + elif [[ $z -lt $MaxUrlRetries ]]; then + if [[ "${fileAlreadyDone}" == "true" ]] ; then break fi if [[ "${warnAndRetryUnknownError}" == "true" ]] ; then - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${remote_url##*/}" "error" "Retry due to an unknown issue: attempt #$((z+1)) of ${MaxUrlRetries}" fi fi if [[ "${exitDownloadError}" == "true" || "${exitDownloadNotAvailable}" == "true" ]] ; then - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${remote_url##*/}" "error" "Exit due to unrecoverable issue" fi rm -f "${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}" @@ -117,7 +117,7 @@ fdot_FetchFileInfo() { -H 'Sec-Fetch-Dest: document' -H 'Sec-Fetch-Mode: navigate' -H 'Sec-Fetch-Site: none' -H 'Sec-Fetch-User: ?1' \ -c "${fdot_cookie_jar}" \ "https://filedot.to/login.html") - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${remote_url##*/}" "fdot_login_$a" "${PAGE}" fi if grep -Eqi 'Sorry, you have been blocked' <<< "${PAGE}" ; then @@ -168,7 +168,7 @@ fdot_FetchFileInfo() { -H "Sec-Fetch-User: ?1" \ -b "${fdot_cookie_jar}" -c "${fdot_cookie_jar}" \ -d "$form_data" "https://filedot.to/") - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${remote_url##*/}" "fdot_loginP_$b" "form_data: ${form_data}"$'\n'"${resp_login}" fi if grep -Eqi 'Sorry, you have been blocked' <<< "${resp_login}" ; then @@ -255,7 +255,7 @@ fdot_FetchFileInfo() { -H "Sec-Fetch-User: ?1" \ -b "${fdot_cookie_jar}" -c "${fdot_cookie_jar}" \ -d "$form_data" "https://filedot.to/") - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${remote_url##*/}" "fdot_fdownP_$c" "form_data: ${form_data}"$'\n'"${response}" fi if grep -Eqi 'Sorry, you have been blocked' <<< "${response}" ; then @@ -335,7 +335,7 @@ fdot_FetchFileInfo() { -H "Sec-Fetch-User: ?1" \ -b "${fdot_cookie_jar}" -c "${fdot_cookie_jar}" \ -d "$form_data" "$fixed_url") - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${remote_url##*/}" "fdot_downlnkP_$d" "form_data: ${form_data}"$'\n'"${response}" fi if grep -Eqi 'Sorry, you have been blocked' <<< "${response}" ; then @@ -404,15 +404,15 @@ fdot_FetchFileInfo() { CLEANSTRING=${remote_url//[^a-zA-Z0-9]/} trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; echo ""; tput cnorm; exit" 0 1 2 3 6 15 file_header=$(tor_curl_request --insecure -L --head -s --referer "${fixed_url}" "$download_url") - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${remote_url##*/}" "fdot_head_$j" "download_url: ${download_url}"$'\n'"${file_header}" fi if [[ -z $file_header ]] ; then - if [ $j == $maxfetchretries ] ; then + if [[ $j == $maxfetchretries ]] ; then printf "\\n" echo -e "${RED}| Failed to extract file info.${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "" "" fi return 1 @@ -421,11 +421,11 @@ fdot_FetchFileInfo() { fi fi if ! grep -Eqi 'HTTP/2 200|HTTP/1.1 200|200 OK' <<< $file_header ; then - if [ $j == $maxfetchretries ] ; then + if [[ $j == $maxfetchretries ]] ; then printf "\\n" echo -e "${RED}| Failed to extract file info${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "" "" fi return 1 @@ -436,11 +436,11 @@ fdot_FetchFileInfo() { file_size_bytes=$(grep -oPi '(?<=content-length: ).*' <<< "$file_header") file_size_bytes=${file_size_bytes//[$'\t\r\n']} if [[ -z "$file_size_bytes" ]]; then - if [ $j == $maxfetchretries ] ; then + if [[ $j == $maxfetchretries ]] ; then printf "\\n" echo -e "${RED}| Failed to extract file info.${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "" "" fi return 1 @@ -450,15 +450,15 @@ fdot_FetchFileInfo() { fi break #Good to go here done - touch ${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/} - if [ ! "$filename_override" == "" ] ; then + touch "${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}" + if [[ ! "$filename_override" == "" ]] ; then filename="$filename_override" fi filename=$(sanitize_file_or_folder_name "${filename}") printf "\\n" echo -e "${YELLOW}| File name:${NC}\t\"${filename}\"" - if [ -z $file_size_bytes ] ; then - if [ "${finalAttempt}" == "true" ] ; then + if [[ -z $file_size_bytes ]] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "Filesize not found!" "" fi echo -e "${YELLOW}| Filesize not found… retry${NC}" @@ -489,19 +489,19 @@ fdot_GetFile() { flockDownload="${WorkDir}/.flocks/${filename//[^a-zA-Z0-9\.\_\-]/}.flock" for ((j=1; j<=$MaxDownloadRetries; j++)); do pd_presize=0 - if [ -f "$file_path" ] ; then + if [[ -f "$file_path" ]] ; then pd_presize=$(stat --format="%s" "$file_path" | tr -d '[:space:]') fi tor_identity="${RANDOM}" CLEANSTRING=${remote_url//[^a-zA-Z0-9]/} trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; rm -f $flockDownload; echo ""; tput cnorm; exit" 0 1 2 3 6 15 - if [ "${RateMonitorEnabled}" == "true" ]; then + if [[ "${RateMonitorEnabled}" == "true" ]]; then tor_curl_request --insecure -L -G --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval "$download_url" --continue-at - --output "$file_path" else tor_curl_request --insecure -L -G "$download_url" --continue-at - --output "$file_path" fi received_file_size=0 - if [ -f "$file_path" ] ; then + if [[ -f "$file_path" ]] ; then received_file_size=$(stat --format="%s" "$file_path" | tr -d '[:space:]') fi if CheckNoHtml "$remote_url" "$filename" "$file_path" ; then @@ -510,9 +510,9 @@ fdot_GetFile() { containsHtml=true fi downDelta=$(( received_file_size - pd_presize )) - if [[ "${received_file_size}" -ne "${file_size_bytes}" ]] || [ "$containsHtml" == "true" ]; then - if [ "${AutoRepairBadPartials}" == "true" ] && (( downDelta > 0 && downDelta < 1024 )) ; then - if [ -f "${file_path}" ] ; then + if [[ "${received_file_size}" -ne "${file_size_bytes}" ]] || [[ "$containsHtml" == "true" ]]; then + if [[ "${AutoRepairBadPartials}" == "true" ]] && (( downDelta > 0 && downDelta < 1024 )) ; then + if [[ -f "${file_path}" ]] ; then if ((pd_presize > 0)); then echo -e "${YELLOW}Bad node / HTML found:${NC} reverting to previous file..." truncateDownload "$remote_url" "$filename" "$pd_presize" "$received_file_size" @@ -524,15 +524,15 @@ fdot_GetFile() { fi if ((j >= $MaxDownloadRetries)) ; then rm -f "$flockDownload"; - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" fi return 1 else continue fi - elif [ "${AutoRepairBadPartials}" == "true" ] && [ "$containsHtml" == "true" ] ; then - if [ -f "${file_path}" ] ; then + elif [[ "${AutoRepairBadPartials}" == "true" ]] && [[ "$containsHtml" == "true" ]] ; then + if [[ -f "${file_path}" ]] ; then if ((pd_presize > 0)); then echo -e "${YELLOW}Bad node / HTML found:${NC} reverting to previous file..." truncateDownload "$remote_url" "$filename" "$pd_presize" "$received_file_size" @@ -544,21 +544,21 @@ fdot_GetFile() { fi if ((j >= $MaxDownloadRetries)) ; then rm -f "$flockDownload"; - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" fi return 1 else continue fi - elif (( downDelta > 0 && downDelta < 1024 )) || [ "$containsHtml" == "true" ] ; then - if [ -f "$file_path" ] ; then + elif (( downDelta > 0 && downDelta < 1024 )) || [[ "$containsHtml" == "true" ]] ; then + if [[ -f "$file_path" ]] ; then rm -rf "$file_path" fi echo -e "\n${YELLOW}Bad node / HTML found:${NC} tainted partial removed..." if ((j >= $MaxDownloadRetries)) ; then rm -f "$flockDownload"; - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" fi return 1 @@ -570,7 +570,7 @@ fdot_GetFile() { echo -e "\n${RED}Download failed, file is incomplete.${NC}" if ((j >= $MaxDownloadRetries)) ; then rm -f "$flockDownload"; - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" fi return 1 diff --git a/hosts/filehaus.sh b/hosts/filehaus.sh index e93539a..80a2788 100644 --- a/hosts/filehaus.sh +++ b/hosts/filehaus.sh @@ -43,24 +43,24 @@ fh_DownloadFile() { tor_identity="${RANDOM}" finalAttempt="false" for ((z=0; z<=$MaxUrlRetries; z++)); do - if [ $z -eq $MaxUrlRetries ] ; then + if [[ $z -eq $MaxUrlRetries ]] ; then finalAttempt="true" fi CLEANSTRING=${remote_url//[^a-zA-Z0-9]/} trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; echo ""; tput cnorm; exit" 0 1 2 3 6 15 if fh_FetchFileInfo $finalAttempt && fh_GetFile "${filecnt}" $((z+1)) $finalAttempt ; then return 0 - elif [ $z -lt $MaxUrlRetries ]; then - if [ "${fileAlreadyDone}" == "true" ] ; then + elif [[ $z -lt $MaxUrlRetries ]]; then + if [[ "${fileAlreadyDone}" == "true" ]] ; then break fi if [[ "${warnAndRetryUnknownError}" == "true" ]] ; then - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${remote_url##*/}" "error" "Retry due to an unknown issue: attempt #$((z+1)) of ${MaxUrlRetries}" fi fi if [[ "${exitDownloadError}" == "true" || "${exitDownloadNotAvailable}" == "true" ]] ; then - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${remote_url##*/}" "error" "Exit due to unrecoverable issue" fi rm -f "${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}" @@ -99,10 +99,10 @@ fh_FetchFileInfo() { CLEANSTRING=${remote_url//[^a-zA-Z0-9]/} trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; echo ""; tput cnorm; exit" 0 1 2 3 6 15 file_header=$(tor_curl_request_extended --insecure -L --head -s --referer "${remote_url//\.org/\.cc}" "$download_url") - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${remote_url##*/}" "fh_head$j" "download_url: ${download_url}"$'\n'"${file_header}" fi - if [ ! -z "$file_header" ] ; then + if [[ ! -z "$file_header" ]] ; then if grep -Eqi '404 Not Found' <<< "${file_header}" ; then printf "\\n" echo -e "${RED}| The file has been removed (404).${NC}" @@ -111,10 +111,10 @@ fh_FetchFileInfo() { return 1 fi if ! grep -Eqi '200|content-length' <<< "${file_header}" ; then - if [ $j == $maxfetchretries ] ; then + if [[ $j == $maxfetchretries ]] ; then printf "\\n" echo -e "${RED}| Failed to extract file size.${NC}" - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "" "" fi return 1 @@ -137,15 +137,15 @@ fh_FetchFileInfo() { printf "\\n" break done - touch ${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/} - if [ ! "$filename_override" == "" ] ; then + touch "${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}" + if [[ ! "$filename_override" == "" ]] ; then filename="$filename_override" fi filename=$(sanitize_file_or_folder_name "${filename}") file_size_bytes=$(grep -oPi '(?<=content-length: ).*' <<< "$file_header") file_size_bytes=${file_size_bytes//[$'\t\r\n']} - if [ -z $file_size_bytes ] ; then - if [ "${finalAttempt}" == "true" ] ; then + if [[ -z $file_size_bytes ]] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "Filesize not found!" "" fi echo -e "${YELLOW}| Filesize not found… retry${NC}" @@ -181,19 +181,19 @@ fh_GetFile() { splitnum=1 fi pd_presize=0 - if [ -f "$file_path" ] ; then + if [[ -f "$file_path" ]] ; then pd_presize=$(stat --format="%s" "$file_path" | tr -d '[:space:]') fi tor_identity="${RANDOM}" CLEANSTRING=${remote_url//[^a-zA-Z0-9]/} trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; rm -f $flockDownload; echo ""; tput cnorm; exit" 0 1 2 3 6 15 - if [ "${RateMonitorEnabled}" == "true" ]; then + if [[ "${RateMonitorEnabled}" == "true" ]]; then tor_curl_request_extended --insecure --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval --referer "$download_url" "$download_url" --continue-at - --output "$file_path" else tor_curl_request_extended --insecure --referer "$download_url" "$download_url" --continue-at - --output "$file_path" fi received_file_size=0 - if [ -f "$file_path" ] ; then + if [[ -f "$file_path" ]] ; then received_file_size=$(stat --format="%s" "$file_path" | tr -d '[:space:]') fi if CheckNoHtml "$remote_url" "$filename" "$file_path" ; then @@ -202,9 +202,9 @@ fh_GetFile() { containsHtml=true fi downDelta=$(( received_file_size - pd_presize )) - if [[ "${received_file_size}" -ne "${file_size_bytes}" ]] || [ "$containsHtml" == "true" ]; then - if [ "${AutoRepairBadPartials}" == "true" ] && (( downDelta > 0 && downDelta < 1024 )) ; then - if [ -f "${file_path}" ] ; then + if [[ "${received_file_size}" -ne "${file_size_bytes}" ]] || [[ "$containsHtml" == "true" ]]; then + if [[ "${AutoRepairBadPartials}" == "true" ]] && (( downDelta > 0 && downDelta < 1024 )) ; then + if [[ -f "${file_path}" ]] ; then if ((pd_presize > 0)); then echo -e "${YELLOW}Bad node / HTML found:${NC} reverting to previous file..." truncateDownload "$remote_url" "$filename" "$pd_presize" "$received_file_size" @@ -216,15 +216,15 @@ fh_GetFile() { fi if ((j >= $MaxDownloadRetries)) ; then rm -f "$flockDownload"; - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" fi return 1 else continue fi - elif [ "${AutoRepairBadPartials}" == "true" ] && [ "$containsHtml" == "true" ] ; then - if [ -f "${file_path}" ] ; then + elif [[ "${AutoRepairBadPartials}" == "true" ]] && [[ "$containsHtml" == "true" ]] ; then + if [[ -f "${file_path}" ]] ; then if ((pd_presize > 0)); then echo -e "${YELLOW}Bad node / HTML found:${NC} reverting to previous file..." truncateDownload "$remote_url" "$filename" "$pd_presize" "$received_file_size" @@ -236,21 +236,21 @@ fh_GetFile() { fi if ((j >= $MaxDownloadRetries)) ; then rm -f "$flockDownload"; - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" fi return 1 else continue fi - elif (( downDelta > 0 && downDelta < 1024 )) || [ "$containsHtml" == "true" ] ; then - if [ -f "$file_path" ] ; then + elif (( downDelta > 0 && downDelta < 1024 )) || [[ "$containsHtml" == "true" ]] ; then + if [[ -f "$file_path" ]] ; then rm -rf "$file_path" fi echo -e "\n${YELLOW}Bad node / HTML found:${NC} tainted partial removed..." if ((j >= $MaxDownloadRetries)) ; then rm -f "$flockDownload"; - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" fi return 1 @@ -262,7 +262,7 @@ fh_GetFile() { echo -e "\n${RED}Download failed, file is incomplete.${NC}" if ((j >= $MaxDownloadRetries)) ; then rm -f "$flockDownload"; - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" fi return 1 diff --git a/hosts/firestorage.sh b/hosts/firestorage.sh index 374d572..c22a386 100644 --- a/hosts/firestorage.sh +++ b/hosts/firestorage.sh @@ -43,24 +43,24 @@ fs_DownloadFile() { tor_identity="${RANDOM}" finalAttempt="false" for ((z=0; z<=$MaxUrlRetries; z++)); do - if [ $z -eq $MaxUrlRetries ] ; then + if [[ $z -eq $MaxUrlRetries ]] ; then finalAttempt="true" fi CLEANSTRING=${remote_url//[^a-zA-Z0-9]/} trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; echo ""; tput cnorm; exit" 0 1 2 3 6 15 if fs_FetchFileInfo $finalAttempt && fs_GetFile "${filecnt}" $((z+1)) $finalAttempt ; then return 0 - elif [ $z -lt $MaxUrlRetries ]; then - if [ "${fileAlreadyDone}" == "true" ] ; then + elif [[ $z -lt $MaxUrlRetries ]]; then + if [[ "${fileAlreadyDone}" == "true" ]] ; then break fi if [[ "${warnAndRetryUnknownError}" == "true" ]] ; then - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${remote_url##*/}" "error" "Retry due to an unknown issue: attempt #$((z+1)) of ${MaxUrlRetries}" fi fi if [[ "${exitDownloadError}" == "true" || "${exitDownloadNotAvailable}" == "true" ]] ; then - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${remote_url##*/}" "error" "Exit due to unrecoverable issue" fi rm -f "${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}" @@ -89,22 +89,22 @@ fs_FetchFileInfo() { for ((j=1; j<=$maxfetchretries; j++)); do mkdir -p "${WorkDir}/.temp" printf " ." - if [ "$newIdent" == "true" ] ; then + if [[ "$newIdent" == "true" ]] ; then tor_identity="${RANDOM}" newIdent=false fi CLEANSTRING=${remote_url//[^a-zA-Z0-9]/} trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; echo ""; tput cnorm; exit" 0 1 2 3 6 15 response=$(tor_curl_request --insecure -L -s "${fixed_url}") - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${remote_url##*/}" "fs_${fetchnum}fetch_$j" "fixed_url: ${fixed_url}"$'\n'"${response}" fi if [[ -z $response ]] ; then - if [ $j == $maxfetchretries ] ; then + if [[ $j == $maxfetchretries ]] ; then printf "\\n" echo -e "${RED}| Failed to extract link.${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "" "" fi return 1 @@ -135,11 +135,11 @@ fs_FetchFileInfo() { j=$((j-1)) continue fi - if [ $j == $maxfetchretries ] ; then + if [[ $j == $maxfetchretries ]] ; then printf "\\n" echo -e "${RED}| Failed to extract link (unknown)${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "Failed to extract link (unknown)" "" fi return 1 @@ -159,11 +159,11 @@ fs_FetchFileInfo() { j=$((j-1)) continue fi - if [ $j == $maxfetchretries ] ; then + if [[ $j == $maxfetchretries ]] ; then printf "\\n" echo -e "${RED}| Failed to extract link (unknown)${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "Failed to extract link (unknown)" "" fi return 1 @@ -183,11 +183,11 @@ fs_FetchFileInfo() { j=$((j-1)) continue fi - if [ $j == $maxfetchretries ] ; then + if [[ $j == $maxfetchretries ]] ; then printf "\\n" echo -e "${RED}| Failed to extract link (unknown)${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "Failed to extract link (unknown)" "" fi return 1 @@ -206,11 +206,11 @@ fs_FetchFileInfo() { download_url=$(grep -oP -m 1 '(?<= 0 && downDelta < 1024 )) ; then - if [ -f "${file_path}" ] ; then + if [[ "${received_file_size}" -ne "${file_size_bytes}" ]] || [[ "$containsHtml" == "true" ]]; then + if [[ "${AutoRepairBadPartials}" == "true" ]] && (( downDelta > 0 && downDelta < 1024 )) ; then + if [[ -f "${file_path}" ]] ; then if ((pd_presize > 0)); then echo -e "${YELLOW}Bad node / HTML found:${NC} reverting to previous file..." truncateDownload "$remote_url" "$filename" "$pd_presize" "$received_file_size" @@ -360,15 +360,15 @@ fs_GetFile() { fi if ((j >= $MaxDownloadRetries)) ; then rm -f "$flockDownload"; - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" fi return 1 else continue fi - elif [ "${AutoRepairBadPartials}" == "true" ] && [ "$containsHtml" == "true" ] ; then - if [ -f "${file_path}" ] ; then + elif [[ "${AutoRepairBadPartials}" == "true" ]] && [[ "$containsHtml" == "true" ]] ; then + if [[ -f "${file_path}" ]] ; then if ((pd_presize > 0)); then echo -e "${YELLOW}Bad node / HTML found:${NC} reverting to previous file..." truncateDownload "$remote_url" "$filename" "$pd_presize" "$received_file_size" @@ -380,21 +380,21 @@ fs_GetFile() { fi if ((j >= $MaxDownloadRetries)) ; then rm -f "$flockDownload"; - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" fi return 1 else continue fi - elif (( downDelta > 0 && downDelta < 1024 )) || [ "$containsHtml" == "true" ] ; then - if [ -f "$file_path" ] ; then + elif (( downDelta > 0 && downDelta < 1024 )) || [[ "$containsHtml" == "true" ]] ; then + if [[ -f "$file_path" ]] ; then rm -rf "$file_path" fi echo -e "\n${YELLOW}Bad node / HTML found:${NC} tainted partial removed..." if ((j >= $MaxDownloadRetries)) ; then rm -f "$flockDownload"; - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" fi return 1 @@ -406,7 +406,7 @@ fs_GetFile() { echo -e "\n${RED}Download failed, file is incomplete.${NC}" if ((j >= $MaxDownloadRetries)) ; then rm -f "$flockDownload"; - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" fi return 1 diff --git a/hosts/gofile.sh b/hosts/gofile.sh old mode 100755 new mode 100644 index 0326ccd..41df85a --- a/hosts/gofile.sh +++ b/hosts/gofile.sh @@ -1,6 +1,6 @@ #! Name: gofile.sh #! Author: kittykat -#! Version: 2025.01.03 +#! Version: 2025.02.17 #! Desc: Add support for downloading and processing of urls for a new host #! Usage: Copy this file into the ./${ScriptDir}/hosts/ folder #! @@ -43,24 +43,24 @@ gofile_DownloadFile() { tor_identity="${RANDOM}" finalAttempt="false" for ((z=0; z<=$MaxUrlRetries; z++)); do - if [ $z -eq $MaxUrlRetries ] ; then + if [[ $z -eq $MaxUrlRetries ]] ; then finalAttempt="true" fi CLEANSTRING=${remote_url//[^a-zA-Z0-9]/} trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; echo ""; tput cnorm; exit" 0 1 2 3 6 15 if gofile_FetchFileInfo $finalAttempt && gofile_GetFile "${filecnt}" $((z+1)) $finalAttempt ; then return 0 - elif [ $z -lt $MaxUrlRetries ]; then - if [ "${fileAlreadyDone}" == "true" ] ; then + elif [[ $z -lt $MaxUrlRetries ]]; then + if [[ "${fileAlreadyDone}" == "true" ]] ; then break fi if [[ "${warnAndRetryUnknownError}" == "true" ]] ; then - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${remote_url##*/}" "error" "Retry due to an unknown issue: attempt #$((z+1)) of ${MaxUrlRetries}" fi fi if [[ "${exitDownloadError}" == "true" || "${exitDownloadNotAvailable}" == "true" ]] ; then - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${remote_url##*/}" "error" "Exit due to unrecoverable issue" fi rm -f "${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}" @@ -77,7 +77,7 @@ gofile_DownloadFile() { #! gofile_FetchFileInfo() { finalAttempt=$1 - maxfetchretries=5 + maxfetchretries=4 gofile_cookie_jar="" if ! grep -Eqi 'gofile.io/d/' <<< "$remote_url"; then echo -e "${RED}| Bad gofile url (format: gofile.io/d/xxxxx)${NC}" @@ -106,16 +106,16 @@ gofile_FetchFileInfo() { -H "Sec-Fetch-Site: none" \ -H "Sec-Fetch-User: ?1" \ "https://api.gofile.io/accounts") - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${remote_url##*/}" "gofile_token$i" "${response}" fi if [[ -z $response ]] ; then rm -f "${gofile_cookie_jar}"; - if [ $i == $maxfetchretries ] ; then + if [[ $i == $maxfetchretries ]] ; then printf "\\n" echo -e "${RED}| Failed to get token.${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "" "" fi return 1 @@ -125,18 +125,18 @@ gofile_FetchFileInfo() { fi if grep -Eqi '"token":"' <<< "$response"; then token=$(grep -oP '(?<="token":").*(?="})' <<< "$response") - if [ ! -z $token ]; then + if [[ ! -z "$token" ]]; then printf "\\n" echo -e "${GREEN}| Token acquired.${NC}" echo -e ".gofile.io\tTRUE\t/\tFALSE\t0\taccountToken\t$token" > ${gofile_cookie_jar} break else rm -f "${gofile_cookie_jar}"; - if [ $i == $maxfetchretries ] ; then + if [[ $i == $maxfetchretries ]] ; then printf "\\n" echo -e "${RED}| Failed to get token.${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "" "" fi return 1 @@ -146,11 +146,11 @@ gofile_FetchFileInfo() { fi else rm -f "${gofile_cookie_jar}"; - if [ $i == $maxfetchretries ] ; then + if [[ $i == $maxfetchretries ]] ; then printf "\\n" echo -e "${RED}| Failed to get token.${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "" "" fi return 1 @@ -181,16 +181,16 @@ gofile_FetchFileInfo() { -H "Sec-Fetch-User: ?1" \ --data "$form_data" \ "https://api.gofile.io/contents/$file_id") - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${remote_url##*/}" "gofile_contents$i" "url: https://api.gofile.io/contents/${file_id}?${form_data}"$'\n'"${response}" fi if [[ -z $response ]] ; then rm -f "${gofile_cookie_jar}"; - if [ $i == $maxfetchretries ] ; then + if [[ $i == $maxfetchretries ]] ; then printf "\\n" echo -e "${RED}| Failed to get download url.${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "" "" fi return 1 @@ -223,11 +223,11 @@ gofile_FetchFileInfo() { fi else rm -f "${gofile_cookie_jar}"; - if [ $i == $maxfetchretries ] ; then + if [[ $i == $maxfetchretries ]] ; then printf "\\n" echo -e "${RED}| Failed to get download url.${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "" "" fi return 1 @@ -248,7 +248,7 @@ gofile_FetchFileInfo() { filename="" file_size_bytes="" cdn_url="" - if [ ! "$filename_override" == "" ] ; then + if [[ ! "$filename_override" == "" ]] ; then filename="$filename_override" fi for ((j=1; j<=$maxfetchretries; j++)); do @@ -257,7 +257,6 @@ gofile_FetchFileInfo() { trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; rm -f ${gofile_cookie_jar}; echo ""; tput cnorm; exit" 0 1 2 3 6 15 file_header=$(tor_curl_request --insecure -L --head -s \ -b "${gofile_cookie_jar}" -c "${gofile_cookie_jar}" \ - -H "User-Agent: $RandomUA" \ -H "Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8" \ -H "Accept-Language: en-US,en;q=0.5" \ -H "Accept-Encoding: gzip, deflate, br" \ @@ -268,30 +267,47 @@ gofile_FetchFileInfo() { -H "Sec-Fetch-Site: none" \ -H "Sec-Fetch-User: ?1" \ "$download_url") - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${remote_url##*/}" "gofile_head$j" "FileInfoUrl: ${download_url}"$'\n'"${file_header}" fi if [[ -z $file_header ]] ; then - if [ $j == $maxfetchretries ] ; then + if [[ $j == $maxfetchretries ]] ; then rm -f "${gofile_cookie_jar}"; printf "\\n" echo -e "${RED}| Failed to extract file info${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then - failedRetryDownload "${remote_url}" "" "" + if [[ "${finalAttempt}" == "true" ]] ; then + failedRetryDownload "${remote_url}" "Failed to extract file info" "" fi return 1 else continue fi fi + if grep -Eqi 'HTTP/2 429|HTTP/1.1 429|HTTP.*429.*$' <<< $file_header ; then + if [[ $j == $maxfetchretries ]] ; then + rm -f "${gofile_cookie_jar}"; + printf "\\n" + echo -e "${RED}| Failed to extract file info [429]${NC}" + warnAndRetryUnknownError=true + if [[ "${finalAttempt}" == "true" ]] ; then + failedRetryDownload "${remote_url}" "Failed to extract file info" "" + fi + return 1 + else + printf " zZz" + sleepRandomSecs 1 5 + tor_identity="${RANDOM}" + continue + fi + fi if ! grep -Eqi 'HTTP/2 200|HTTP/1.1 200|200 OK' <<< $file_header ; then - if [ $j == $maxfetchretries ] ; then + if [[ $j == $maxfetchretries ]] ; then rm -f "${gofile_cookie_jar}"; printf "\\n" echo -e "${RED}| Failed to extract file info${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "" "" fi return 1 @@ -299,29 +315,29 @@ gofile_FetchFileInfo() { continue fi fi - if [ -z $filename ]; then + if [[ -z $filename ]]; then filename=$(grep -oPi '(?<=filename=").*(?=")' <<< "$file_header") - if [ -z $filename ]; then + if [[ -z $filename ]]; then filename=$(grep -oPi '(?<=filename[*]=).*' <<< "$file_header") filename=${filename//[$'\t\r\n']} fi filename=${filename//UTF-8\'\'/} fi - if [ -z $cdn_url ] ; then + if [[ -z "$cdn_url" ]] ; then cdn_url="https:"$(grep -oPi '(?<=location: ).*' <<< "$file_header") cdn_url=${cdn_url//[$'\t\r\n']} fi - if [ -z $file_size_bytes ] ; then + if [[ -z $file_size_bytes ]] ; then file_size_bytes=$(grep -oPi '(?<=content-length: ).*' <<< "$file_header") file_size_bytes=${file_size_bytes//[$'\t\r\n']} fi - if [ -z $filename ] || [ -z $file_size_bytes ] ; then - if [ $j == $maxfetchretries ] ; then + if [[ -z $filename ]] || [[ -z "$file_size_bytes" ]] ; then + if [[ $j == $maxfetchretries ]] ; then rm -f "${gofile_cookie_jar}"; printf "\\n" echo -e "${RED}| Failed to extract file info.${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "" "" fi return 1 @@ -331,12 +347,12 @@ gofile_FetchFileInfo() { fi break #Good to go here done - touch ${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/} + touch "${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}" filename=$(sanitize_file_or_folder_name "${filename}") printf "\\n" echo -e "${YELLOW}| File name:${NC}\t\"${filename}\"" - if [ -z $file_size_bytes ] ; then - if [ "${finalAttempt}" == "true" ] ; then + if [[ -z $file_size_bytes ]] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "Filesize not found!" "" fi echo -e "${YELLOW}| Filesize not found… retry${NC}" @@ -367,13 +383,13 @@ gofile_GetFile() { flockDownload="${WorkDir}/.flocks/${filename//[^a-zA-Z0-9\.\_\-]/}.flock" for ((j=1; j<=$MaxDownloadRetries; j++)); do pd_presize=0 - if [ -f "$file_path" ] ; then + if [[ -f "$file_path" ]] ; then pd_presize=$(stat --format="%s" "$file_path" | tr -d '[:space:]') fi GetRandomUA CLEANSTRING=${remote_url//[^a-zA-Z0-9]/} trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; rm -f ${gofile_cookie_jar}; rm -f $flockDownload; echo ""; tput cnorm; exit" 0 1 2 3 6 15 - if [ "${RateMonitorEnabled}" == "true" ]; then + if [[ "${RateMonitorEnabled}" == "true" ]]; then tor_curl_request --insecure -G --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval \ -b "${gofile_cookie_jar}" -c "${gofile_cookie_jar}" \ -H "User-Agent: $RandomUA" \ @@ -403,7 +419,7 @@ gofile_GetFile() { "$download_url" --continue-at - --output "$file_path" fi received_file_size=0 - if [ -f "$file_path" ] ; then + if [[ -f "$file_path" ]] ; then received_file_size=$(stat --format="%s" "$file_path" | tr -d '[:space:]') fi if CheckNoHtml "$remote_url" "$filename" "$file_path" ; then @@ -412,9 +428,9 @@ gofile_GetFile() { containsHtml=true fi downDelta=$(( received_file_size - pd_presize )) - if [[ "${received_file_size}" -ne "${file_size_bytes}" ]] || [ "$containsHtml" == "true" ]; then - if [ "${AutoRepairBadPartials}" == "true" ] && (( downDelta > 0 && downDelta < 1024 )) ; then - if [ -f "${file_path}" ] ; then + if [[ "${received_file_size}" -ne "${file_size_bytes}" ]] || [[ "$containsHtml" == "true" ]]; then + if [[ "${AutoRepairBadPartials}" == "true" ]] && (( downDelta > 0 && downDelta < 1024 )) ; then + if [[ -f "${file_path}" ]] ; then if ((pd_presize > 0)); then echo -e "${YELLOW}Bad node / HTML found:${NC} reverting to previous file..." truncateDownload "$remote_url" "$filename" "$pd_presize" "$received_file_size" @@ -426,15 +442,15 @@ gofile_GetFile() { fi if ((j >= $MaxDownloadRetries)) ; then rm -f "$flockDownload"; - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" fi return 1 else continue fi - elif [ "${AutoRepairBadPartials}" == "true" ] && [ "$containsHtml" == "true" ] ; then - if [ -f "${file_path}" ] ; then + elif [[ "${AutoRepairBadPartials}" == "true" ]] && [[ "$containsHtml" == "true" ]] ; then + if [[ -f "${file_path}" ]] ; then if ((pd_presize > 0)); then echo -e "${YELLOW}Bad node / HTML found:${NC} reverting to previous file..." truncateDownload "$remote_url" "$filename" "$pd_presize" "$received_file_size" @@ -446,21 +462,21 @@ gofile_GetFile() { fi if ((j >= $MaxDownloadRetries)) ; then rm -f "$flockDownload"; - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" fi return 1 else continue fi - elif (( downDelta > 0 && downDelta < 1024 )) || [ "$containsHtml" == "true" ] ; then - if [ -f "$file_path" ] ; then + elif (( downDelta > 0 && downDelta < 1024 )) || [[ "$containsHtml" == "true" ]] ; then + if [[ -f "$file_path" ]] ; then rm -rf "$file_path" fi echo -e "\n${YELLOW}Bad node / HTML found:${NC} tainted partial removed..." if ((j >= $MaxDownloadRetries)) ; then rm -f "$flockDownload"; - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" fi return 1 @@ -472,7 +488,7 @@ gofile_GetFile() { echo -e "\n${RED}Download failed, file is incomplete.${NC}" if ((j >= $MaxDownloadRetries)) ; then rm -f "$flockDownload"; - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" fi return 1 diff --git a/hosts/hexload.sh b/hosts/hexload.sh index c9f043d..c32e8fa 100644 --- a/hosts/hexload.sh +++ b/hosts/hexload.sh @@ -46,24 +46,24 @@ hex_DownloadFile() { tor_identity="${RANDOM}" finalAttempt="false" for ((z=0; z<=$MaxUrlRetries; z++)); do - if [ $z -eq $MaxUrlRetries ] ; then + if [[ $z -eq $MaxUrlRetries ]] ; then finalAttempt="true" fi CLEANSTRING=${remote_url//[^a-zA-Z0-9]/} trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; echo ""; tput cnorm; exit" 0 1 2 3 6 15 if hex_FetchFileInfo $finalAttempt && check_file_extension && hex_GetFile "${filecnt}" $((z+1)) $finalAttempt ; then return 0 - elif [ $z -lt $MaxUrlRetries ]; then - if [ "${fileAlreadyDone}" == "true" ] ; then + elif [[ $z -lt $MaxUrlRetries ]]; then + if [[ "${fileAlreadyDone}" == "true" ]] ; then break fi if [[ "${warnAndRetryUnknownError}" == "true" ]] ; then - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${remote_url##*/}" "error" "Retry due to an unknown issue: attempt #$((z+1)) of ${MaxUrlRetries}" fi fi if [[ "${exitDownloadError}" == "true" || "${exitDownloadNotAvailable}" == "true" ]] ; then - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${remote_url##*/}" "error" "Exit due to unrecoverable issue" fi rm -f "${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}" @@ -85,42 +85,42 @@ hex_FetchFileInfo() { echo -e "${GREEN}# Fetching download url… ${NC}[.] timeout, [-] blocked ip, [*] wait time${NC}" local bDonePrint=true for ((j=1; j<=$hexmaxfetchfileretries; j++)); do - if [ "${bDonePrint}" == "false" ]; then + if [[ "${bDonePrint}" == "false" ]]; then printf " ." fi bDonePrint=false CLEANSTRING=${remote_url//[^a-zA-Z0-9]/} trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; echo ""; tput cnorm; exit" 0 1 2 3 6 15 - if [ "$bNoNewTorIdent" == "true" ]; then + if [[ "$bNoNewTorIdent" == "true" ]]; then bNoNewTorIdent=false else tor_identity="${RANDOM}" fi - if grep -Eq "pjscloud.sh" <<< "$LoadPlugins" && [ "$PJSCloud_hexload" == "true" ]; then - if [ "$hexUseDownload" == "download2" ]; then + if grep -Eq "pjscloud.sh" <<< "$LoadPlugins" && [[ "$PJSCloud_hexload" == "true" ]]; then + if [[ "$hexUseDownload" == "download2" ]]; then form_data="op=download1&id=${file_id}&rand=&usr_login=&fname=&ajax=1&method_free=1&dataType=json" response=$(pjscloud_tor_request "https://hexload.com/download" "$form_data") - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${remote_url##*/}" "hex_dwnpage$j" "form_data: ${form_data}"$'\n'"${response}" fi else form_data="op=download1&id=${file_id}&rand=&usr_login=&fname=&ajax=1&method_free=1&dataType=json" response=$(tor_curl_request --insecure -s --data "$form_data" "https://hexload.com/download") - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${remote_url##*/}" "hex_dwnpage$j" "form_data: ${form_data}"$'\n'"${response}" fi fi else - if [ "$hexUseDownload" == "download2" ]; then + if [[ "$hexUseDownload" == "download2" ]]; then form_data="op=download1&id=${file_id}&rand=&usr_login=&fname=&ajax=1&method_free=1&dataType=json" response=$(tor_curl_request --insecure -s --data "$form_data" "https://hexload.com/download") - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${remote_url##*/}" "hex_dwnpage$j" "form_data: ${form_data}"$'\n'"${response}" fi else form_data="op=download2&id=${file_id}&rand=&usr_login=&fname=&ajax=1&method_free=1&dataType=json" response=$(tor_curl_request --insecure -s --data "$form_data" "https://hexload.com/download") - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${remote_url##*/}" "hex_dwnpage$j" "form_data: ${form_data}"$'\n'"${response}" fi fi @@ -129,11 +129,11 @@ hex_FetchFileInfo() { download_url=$(echo "$response" | grep -oPi '(?<="link":")[^"]+(?=")' | base64 --decode) download_url=$(urlencode_spaces "$download_url") if grep -Eqi "Sorry, you have been blocked" <<< "$response"; then - if [ $j == $hexmaxfetchfileretries ] ; then + if ((j == hexmaxfetchfileretries)) ; then printf "\\n" echo -e "${YELLOW}| Too many failed attempts${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "" "" fi return 1 @@ -153,7 +153,7 @@ hex_FetchFileInfo() { return 1 fi wait_time=$(grep -oP '(?<=Wait ).*?(?= seconds.*$)' <<< "$response") - if [ "$hexUseDownload" == "download1" ] && grep -Eqi 'name="op" value="download2">' <<< "$response"; then + if [[ "$hexUseDownload" == "download1" ]] && grep -Eqi 'name="op" value="download2">' <<< "$response"; then printf "\\n" echo -e "${YELLOW}| File uses download2.. switching.${NC}" hexUseDownload="download2" @@ -163,17 +163,17 @@ hex_FetchFileInfo() { continue fi if grep -Eqi '>You have to wait' <<< "$response"; then - if [ $j == $hexmaxfetchfileretries ] ; then + if ((j == hexmaxfetchfileretries)) ; then req_msg=$(grep -oP '(?<=>You have to wait ).*?(?= till next download.*$)' <<< "$response") req_msg=$(sanitize_html_tags "${req_msg}") printf "\\n" echo -e "${YELLOW}| Failed: ${req_msg}.${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "${req_msg}" "" fi return 1 - elif [ "$hexUseDownload" == "download1" ] && grep -Eqi 'name="op" value="download2">' <<< "$response"; then + elif [[ "$hexUseDownload" == "download1" ]] && grep -Eqi 'name="op" value="download2">' <<< "$response"; then printf "\\n" echo -e "${YELLOW}| File uses download2.. switching.${NC}" hexUseDownload="download2" @@ -202,7 +202,7 @@ hex_FetchFileInfo() { failedRetryDownload "${remote_url}" "$req_msg" "" return 1 fi - if [ "$hexUseDownload" == "download1" ] && grep -Eqi 'name="op" value="download2">' <<< "$response"; then + if [[ "$hexUseDownload" == "download1" ]] && grep -Eqi 'name="op" value="download2">' <<< "$response"; then printf "\\n" echo -e "${YELLOW}| File uses download2.. switching.${NC}" hexUseDownload="download2" @@ -211,11 +211,11 @@ hex_FetchFileInfo() { continue fi if [[ -z "$download_url" ]]; then - if [ $j == $hexmaxfetchfileretries ] ; then + if ((j == hexmaxfetchfileretries)) ; then printf "\\n" echo -e "${RED}| Failed to extract CDN URL${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "Failed to extract CDN URL" "" fi return 1 @@ -223,12 +223,12 @@ hex_FetchFileInfo() { continue fi fi - if [ "$filename_override" == "" ] && [ -z "$filename" ] ; then - if [ $j == $hexmaxfetchfileretries ] ; then + if [[ "$filename_override" == "" ]] && [[ -z "$filename" ]] ; then + if ((j == hexmaxfetchfileretries)) ; then printf "\\n" echo -e "${RED}| Failed to extract file name${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "ailed to extract file name" "" fi return 1 @@ -238,8 +238,8 @@ hex_FetchFileInfo() { fi break #Good to go here done - touch ${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/} - if [ ! "$filename_override" == "" ] ; then + touch "${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}" + if [[ ! "$filename_override" == "" ]] ; then filename="$filename_override" fi filename=$(sanitize_file_or_folder_name "${filename}") @@ -252,7 +252,7 @@ hex_FetchFileInfo() { trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; echo ""; tput cnorm; exit" 0 1 2 3 6 15 tor_identity="${RANDOM}" file_header=$(tor_curl_request --insecure --head -L -s --referer "$file_url" "$download_url") - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${remote_url##*/}" "hex_head$j" "download_url: ${download_url}"$'\n'"${file_header}" fi if ! grep -Eqi 'HTTP/2 200|HTTP/1.1 200|200 OK' <<< $file_header ; then @@ -268,12 +268,12 @@ hex_FetchFileInfo() { fi file_size_bytes=$(grep -oPi '(?<=content-length: ).*' <<< "$file_header") file_size_bytes=${file_size_bytes//[$'\t\r\n']} - if [ -z $file_size_bytes ] ; then + if [[ -z $file_size_bytes ]] ; then if ((j < hexmaxfetchfileretries)); then printf "\\n" echo -e "${YELLOW}| Filesize not found… retry${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "Filesize not found" "" fi return 1 @@ -310,20 +310,20 @@ hex_GetFile() { flockDownload="${WorkDir}/.flocks/${filename//[^a-zA-Z0-9\.\_\-]/}.flock" for ((j=1; j<=$MaxDownloadRetries; j++)); do pd_presize=0 - if [ -f "$file_path" ] ; then + if [[ -f "$file_path" ]] ; then pd_presize=$(stat --format="%s" "$file_path" | tr -d '[:space:]') fi GetRandomUA tor_identity="${RANDOM}" CLEANSTRING=${remote_url//[^a-zA-Z0-9]/} trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; rm -f $flockDownload; echo ""; tput cnorm; exit" 0 1 2 3 6 15 - if [ "${RateMonitorEnabled}" == "true" ]; then + if [[ "${RateMonitorEnabled}" == "true" ]]; then tor_curl_request --insecure --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval --referer "$file_url" "$download_url" --continue-at - --output "$file_path" else tor_curl_request --insecure --referer "$file_url" "$download_url" --continue-at - --output "$file_path" fi received_file_size=0 - if [ -f "$file_path" ] ; then + if [[ -f "$file_path" ]] ; then received_file_size=$(stat --format="%s" "$file_path" | tr -d '[:space:]') fi if CheckNoHtml "$remote_url" "$filename" "$file_path" ; then @@ -332,9 +332,9 @@ hex_GetFile() { containsHtml=true fi downDelta=$(( received_file_size - pd_presize )) - if [[ "${received_file_size}" -ne "${file_size_bytes}" ]] || [ "$containsHtml" == "true" ]; then - if [ "${AutoRepairBadPartials}" == "true" ] && (( downDelta > 0 && downDelta < 1024 )) ; then - if [ -f "${file_path}" ] ; then + if [[ "${received_file_size}" -ne "${file_size_bytes}" ]] || [[ "$containsHtml" == "true" ]]; then + if [[ "${AutoRepairBadPartials}" == "true" ]] && (( downDelta > 0 && downDelta < 1024 )) ; then + if [[ -f "${file_path}" ]] ; then if ((pd_presize > 0)); then echo -e "${YELLOW}Bad node / HTML found:${NC} reverting to previous file..." truncateDownload "$remote_url" "$filename" "$pd_presize" "$received_file_size" @@ -346,15 +346,15 @@ hex_GetFile() { fi if ((j >= $MaxDownloadRetries)) ; then rm -f "$flockDownload"; - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" fi return 1 else continue fi - elif [ "${AutoRepairBadPartials}" == "true" ] && [ "$containsHtml" == "true" ] ; then - if [ -f "${file_path}" ] ; then + elif [[ "${AutoRepairBadPartials}" == "true" ]] && [[ "$containsHtml" == "true" ]] ; then + if [[ -f "${file_path}" ]] ; then if ((pd_presize > 0)); then echo -e "${YELLOW}Bad node / HTML found:${NC} reverting to previous file..." truncateDownload "$remote_url" "$filename" "$pd_presize" "$received_file_size" @@ -366,21 +366,21 @@ hex_GetFile() { fi if ((j >= $MaxDownloadRetries)) ; then rm -f "$flockDownload"; - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" fi return 1 else continue fi - elif (( downDelta > 0 && downDelta < 1024 )) || [ "$containsHtml" == "true" ] ; then - if [ -f "$file_path" ] ; then + elif (( downDelta > 0 && downDelta < 1024 )) || [[ "$containsHtml" == "true" ]] ; then + if [[ -f "$file_path" ]] ; then rm -rf "$file_path" fi echo -e "\n${YELLOW}Bad node / HTML found:${NC} tainted partial removed..." if ((j >= $MaxDownloadRetries)) ; then rm -f "$flockDownload"; - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" fi return 1 @@ -392,7 +392,7 @@ hex_GetFile() { echo -e "\n${RED}Download failed, file is incomplete.${NC}" if ((j >= $MaxDownloadRetries)) ; then rm -f "$flockDownload"; - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" fi return 1 diff --git a/hosts/innocent.sh b/hosts/innocent.sh index 523bc34..6156065 100644 --- a/hosts/innocent.sh +++ b/hosts/innocent.sh @@ -49,24 +49,24 @@ inno_DownloadFile() { tor_identity="${RANDOM}" finalAttempt="false" for ((z=0; z<=$MaxUrlRetries; z++)); do - if [ $z -eq $MaxUrlRetries ] ; then + if [[ $z -eq $MaxUrlRetries ]] ; then finalAttempt="true" fi CLEANSTRING=${remote_url//[^a-zA-Z0-9]/} trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; echo ""; tput cnorm; exit" 0 1 2 3 6 15 if inno_FetchFileInfo $finalAttempt && inno_GetFile "${filecnt}" $((z+1)) $finalAttempt ; then return 0 - elif [ $z -lt $MaxUrlRetries ]; then - if [ "${fileAlreadyDone}" == "true" ] ; then + elif [[ $z -lt $MaxUrlRetries ]]; then + if [[ "${fileAlreadyDone}" == "true" ]] ; then break fi if [[ "${warnAndRetryUnknownError}" == "true" ]] ; then - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${remote_url##*/}" "error" "Retry due to an unknown issue: attempt #$((z+1)) of ${MaxUrlRetries}" fi fi if [[ "${exitDownloadError}" == "true" || "${exitDownloadNotAvailable}" == "true" ]] ; then - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${remote_url##*/}" "error" "Exit due to unrecoverable issue" fi rm -f "${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}" @@ -110,7 +110,7 @@ inno_FetchFileInfo() { [ -s "${WorkDir}/.temp/directhead" ] kill $! 2>/dev/null ) - if [ ! -f "${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}" ]; then + if [[ ! -f "${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}" ]]; then touch "${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}" fi rm -f "${WorkDir}/.temp/directhead" @@ -118,10 +118,10 @@ inno_FetchFileInfo() { printf "| Retrieving Head: attempt #$j" file_header=$(tor_curl_request --insecure --head -L -s "$download_url") fi - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${remote_url##*/}" "inno_head$j" "download_url: ${download_url}"$'\n'"${file_header}" fi - if [ ! -z "$file_header" ] ; then + if [[ ! -z "$file_header" ]] ; then if grep -Eqi '404 Not Found' <<< "${file_header}" ; then printf "\\n" echo -e "${RED}| Not Found (404). The file has been removed.${NC}" @@ -141,7 +141,7 @@ inno_FetchFileInfo() { continue fi fi - if [ "$filename_override" == "" ] ; then + if [[ "$filename_override" == "" ]] ; then filename=${download_url##*/} fi if grep -Eqi 'Content-Length:' <<< "${file_header}" ; then @@ -162,18 +162,18 @@ inno_FetchFileInfo() { fi done printf "\\n" - touch ${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/} - if [ ! "$filename_override" == "" ] ; then + touch "${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}" + if [[ ! "$filename_override" == "" ]] ; then filename="$filename_override" - elif [ -z $filename ] ; then + elif [[ -z $filename ]] ; then filename=${download_url##*/} fi filename=$(sanitize_file_or_folder_name "${filename}") - if [ -z "$filename" ]; then + if [[ -z "$filename" ]]; then echo -e "${RED}| Unexpected or no header response.${NC}" return 1 fi - if [ -z $file_size_bytes ] ; then + if [[ -z $file_size_bytes ]] ; then file_size_readable="${RED}Unknown filesize…${NC}" else file_size_readable="$(numfmt --to=iec --from=auto --format "%.2f" <<< "$file_size_bytes")" @@ -202,19 +202,19 @@ inno_GetFile() { flockDownload="${WorkDir}/.flocks/${filename//[^a-zA-Z0-9\.\_\-]/}.flock" for ((j=1; j<=$MaxDownloadRetries; j++)); do pd_presize=0 - if [ -f "$file_path" ] ; then + if [[ -f "$file_path" ]] ; then pd_presize=$(stat --format="%s" "$file_path" | tr -d '[:space:]') fi CLEANSTRING=${remote_url//[^a-zA-Z0-9]/} trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; rm -f $flockDownload; echo ""; tput cnorm; exit" 0 1 2 3 6 15 - if [ ! -z $file_size_bytes ] ; then + if [[ ! -z "$file_size_bytes" ]] ; then tor_curl_request_extended --insecure "$download_url" --continue-at - --output "$file_path" else echo -e "${BLUE}| No Resume Fetch${NC}" tor_curl_request_extended --insecure "$download_url" --output "$file_path" fi received_file_size=0 - if [ -f "$file_path" ] ; then + if [[ -f "$file_path" ]] ; then received_file_size=$(stat --format="%s" "$file_path" | tr -d '[:space:]') fi if CheckNoHtml "$remote_url" "$filename" "$file_path" ; then @@ -222,8 +222,8 @@ inno_GetFile() { else containsHtml=true fi - if [ "${AutoRepairBadPartials}" == "true" ] && [ "$containsHtml" == "true" ] ; then - if [ -f "${file_path}" ] ; then + if [[ "${AutoRepairBadPartials}" == "true" ]] && [[ "$containsHtml" == "true" ]] ; then + if [[ -f "${file_path}" ]] ; then if ((pd_presize > 0)); then echo -e "${YELLOW}Bad node / HTML found:${NC} reverting to previous file..." truncateDownload "$remote_url" "$filename" "$pd_presize" "$received_file_size" @@ -235,7 +235,7 @@ inno_GetFile() { fi if ((j >= $MaxDownloadRetries)) ; then rm -f "$flockDownload"; - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" fi return 1 diff --git a/hosts/isupload.sh b/hosts/isupload.sh index 6aa9011..146f4e7 100644 --- a/hosts/isupload.sh +++ b/hosts/isupload.sh @@ -43,24 +43,24 @@ isup_DownloadFile() { tor_identity="${RANDOM}" finalAttempt="false" for ((z=0; z<=$MaxUrlRetries; z++)); do - if [ $z -eq $MaxUrlRetries ] ; then + if [[ $z -eq $MaxUrlRetries ]] ; then finalAttempt="true" fi CLEANSTRING=${remote_url//[^a-zA-Z0-9]/} trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; echo ""; tput cnorm; exit" 0 1 2 3 6 15 if isup_FetchFileInfo $finalAttempt && isup_GetFile "${filecnt}" $((z+1)) $finalAttempt ; then return 0 - elif [ $z -lt $MaxUrlRetries ]; then - if [ "${fileAlreadyDone}" == "true" ] ; then + elif [[ $z -lt $MaxUrlRetries ]]; then + if [[ "${fileAlreadyDone}" == "true" ]] ; then break fi if [[ "${warnAndRetryUnknownError}" == "true" ]] ; then - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${remote_url##*/}" "error" "Retry due to an unknown issue: attempt #$((z+1)) of ${MaxUrlRetries}" fi fi if [[ "${exitDownloadError}" == "true" || "${exitDownloadNotAvailable}" == "true" ]] ; then - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${remote_url##*/}" "error" "Exit due to unrecoverable issue" fi rm -f "${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}" @@ -88,16 +88,16 @@ isup_FetchFileInfo() { CLEANSTRING=${remote_url//[^a-zA-Z0-9]/} trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; rm -f "${isup_cookie_jar}"; echo ""; tput cnorm; exit" 0 1 2 3 6 15 response=$(tor_curl_request_extended --insecure -L -s -b "${isup_cookie_jar}" -c "${isup_cookie_jar}" "$remote_url") - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${remote_url##*/}" "isup_dwnpage$i" "${response}" fi if [[ -z $response ]] ; then rm -f "${isup_cookie_jar}"; - if [ $i == $maxfetchretries ] ; then + if [[ $i == $maxfetchretries ]] ; then printf "\\n" echo -e "${RED}| Failed to extract download link.${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "" "" fi return 1 @@ -107,11 +107,11 @@ isup_FetchFileInfo() { fi if grep -Eqi "Sorry, you are banned" <<< "$response"; then rm -f "${isup_cookie_jar}"; - if [ $i == $maxfetchretries ] ; then + if [[ $i == $maxfetchretries ]] ; then printf "\\n" echo -e "${RED}| Failed to extract download link.${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "" "" fi return 1 @@ -139,11 +139,11 @@ isup_FetchFileInfo() { fi if [[ -z "$post_action" ]] || [[ -z "$post_op" ]] || [[ -z "$post_id" ]] ; then rm -f "${isup_cookie_jar}"; - if [ $i == $maxfetchretries ] ; then + if [[ $i == $maxfetchretries ]] ; then printf "\\n" echo -e "${RED}| Failed to extract download link [2].${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "Failed to extract download link [2]" "" fi return 1 @@ -164,16 +164,16 @@ isup_FetchFileInfo() { response=$(tor_curl_request_extended --insecure -L -s -X POST \ -b "${isup_cookie_jar}" -c "${isup_cookie_jar}" \ --data "$form_data" "$post_action") - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${remote_url##*/}" "isup_post" "post_action: ${post_action}"$'\n'"form_data: ${form_data}"$'\n'"${response}" fi if [[ -z $response ]] ; then - if [ $i == $maxfetchretries ] ; then + if [[ $i == $maxfetchretries ]] ; then rm -f "${isup_cookie_jar}"; printf "\\n" echo -e "${RED}| Failed to extract download link [3].${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "" "" fi return 1 @@ -191,12 +191,12 @@ isup_FetchFileInfo() { return 1 fi if grep -Eqi 'Just a moment...' <<< "$response"; then - if [ $i == $maxfetchretries ] ; then + if [[ $i == $maxfetchretries ]] ; then rm -f "${isup_cookie_jar}"; printf "\\n" echo -e "${RED}| Failed to extract download link [3].${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "" "" fi return 1 @@ -212,7 +212,7 @@ isup_FetchFileInfo() { printf "\\n" echo -e "${RED}| Failed to extract download link [4]${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then rm -f "${isup_cookie_jar}"; failedRetryDownload "${remote_url}" "Failed to extract download link [4]" "" fi @@ -254,7 +254,7 @@ isup_FetchFileInfo() { [ -s "${WorkDir}/.temp/directhead" ] kill $! 2>/dev/null ) - if [ ! -f "${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}" ]; then + if [[ ! -f "${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}" ]]; then touch "${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}" fi rm -f "${WorkDir}/.temp/directhead" @@ -262,10 +262,10 @@ isup_FetchFileInfo() { printf "| Retrieving Head: attempt #$j" file_header=$(tor_curl_request_extended --insecure --head -L -s "$download_url") fi - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${remote_url##*/}" "isup_head$j" "download_url: ${download_url}"$'\n'"${file_header}" fi - if [ ! -z "$file_header" ] ; then + if [[ ! -z "$file_header" ]] ; then if grep -Eqi '404 Not Found|' <<< "${file_header}" ; then printf "\\n" echo -e "${RED}| Not Found (404). The file has been removed.${NC}" @@ -287,7 +287,7 @@ isup_FetchFileInfo() { else break fi - if [ "$filename_override" == "" ] ; then + if [[ "$filename_override" == "" ]] ; then filename=${download_url##*/} fi if grep -Eqi 'Content-Length:' <<< "${file_header}" ; then @@ -308,17 +308,17 @@ isup_FetchFileInfo() { fi done printf "\\n" - touch ${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/} - if [ ! "$filename_override" == "" ] ; then + touch "${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}" + if [[ ! "$filename_override" == "" ]] ; then filename="$filename_override" fi filename=$(sanitize_file_or_folder_name "${filename}") - if [ -z "$filename" ]; then + if [[ -z "$filename" ]]; then printf "\\n" echo -e "${RED}| Unexpected or no header response.${NC}" return 1 fi - if [ -z $file_size_bytes ] ; then + if [[ -z $file_size_bytes ]] ; then file_size_readable="${RED}Unknown filesize…${NC}" else file_size_readable="$(numfmt --to=iec --from=auto --format "%.2f" <<< "$file_size_bytes")" @@ -344,19 +344,19 @@ isup_GetFile() { flockDownload="${WorkDir}/.flocks/${filename//[^a-zA-Z0-9\.\_\-]/}.flock" for ((j=1; j<=$MaxDownloadRetries; j++)); do pd_presize=0 - if [ -f "$file_path" ] ; then + if [[ -f "$file_path" ]] ; then pd_presize=$(stat --format="%s" "$file_path" | tr -d '[:space:]') fi GetRandomUA - if [ -z $file_size_bytes ] ; then + if [[ -z $file_size_bytes ]] ; then echo -e "${BLUE}| No Resume Fetch${NC} (unknown filesize)" CLEANSTRING=${remote_url//[^a-zA-Z0-9]/} trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; rm -f $flockDownload; echo ""; tput cnorm; exit" 0 1 2 3 6 15 tor_curl_request_extended --insecure -L "$download_url" --output "$file_path" rc=$? - if [ $rc -ne 0 ] ; then + if ((rc != 0 )) ; then printf "${RED}Download Failed (bad exit status).${NC}" - if [ -f ${file_path} ]; then + if [[ -f ${file_path} ]]; then printf "${YELLOW} Partial removed...${NC}" printf "\n\n" rm -f "${file_path}" @@ -365,7 +365,7 @@ isup_GetFile() { fi if ((j >= $MaxDownloadRetries)) ; then rm -f "$flockDownload"; - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" fi return 1 @@ -378,12 +378,12 @@ isup_GetFile() { else containsHtml=true fi - if [ "$containsHtml" == "true" ]; then + if [[ "$containsHtml" == "true" ]]; then echo -e "${YELLOW}Download Failed (contains html)${NC} partial removed..." rm -f "${file_path}" if ((j >= $MaxDownloadRetries)) ; then rm -f "$flockDownload"; - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" fi return 1 @@ -395,13 +395,13 @@ isup_GetFile() { else CLEANSTRING=${remote_url//[^a-zA-Z0-9]/} trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; rm -f $flockDownload; echo ""; tput cnorm; exit" 0 1 2 3 6 15 - if [ "${RateMonitorEnabled}" == "true" ]; then + if [[ "${RateMonitorEnabled}" == "true" ]]; then tor_curl_request_extended --insecure -L --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval "$download_url" --continue-at - --output "$file_path" else tor_curl_request_extended --insecure -L "$download_url" --continue-at - --output "$file_path" fi received_file_size=0 - if [ -f "$file_path" ] ; then + if [[ -f "$file_path" ]] ; then received_file_size=$(stat --format="%s" "$file_path" | tr -d '[:space:]') fi if CheckNoHtml "$remote_url" "$filename" "$file_path" ; then @@ -410,9 +410,9 @@ isup_GetFile() { containsHtml=true fi downDelta=$(( received_file_size - pd_presize )) - if [[ "${received_file_size}" -ne "${file_size_bytes}" ]] || [ "$containsHtml" == "true" ]; then - if [ "${AutoRepairBadPartials}" == "true" ] && (( downDelta > 0 && downDelta < 1024 )) ; then - if [ -f "${file_path}" ] ; then + if [[ "${received_file_size}" -ne "${file_size_bytes}" ]] || [[ "$containsHtml" == "true" ]]; then + if [[ "${AutoRepairBadPartials}" == "true" ]] && (( downDelta > 0 && downDelta < 1024 )) ; then + if [[ -f "${file_path}" ]] ; then if ((pd_presize > 0)); then echo -e "${YELLOW}Bad node / HTML found:${NC} reverting to previous file..." truncateDownload "$remote_url" "$filename" "$pd_presize" "$received_file_size" @@ -424,15 +424,15 @@ isup_GetFile() { fi if ((j >= $MaxDownloadRetries)) ; then rm -f "$flockDownload"; - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" fi return 1 else continue fi - elif [ "${AutoRepairBadPartials}" == "true" ] && [ "$containsHtml" == "true" ] ; then - if [ -f "${file_path}" ] ; then + elif [[ "${AutoRepairBadPartials}" == "true" ]] && [[ "$containsHtml" == "true" ]] ; then + if [[ -f "${file_path}" ]] ; then if ((pd_presize > 0)); then echo -e "${YELLOW}Bad node / HTML found:${NC} reverting to previous file..." truncateDownload "$remote_url" "$filename" "$pd_presize" "$received_file_size" @@ -444,21 +444,21 @@ isup_GetFile() { fi if ((j >= $MaxDownloadRetries)) ; then rm -f "$flockDownload"; - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" fi return 1 else continue fi - elif (( downDelta > 0 && downDelta < 1024 )) || [ "$containsHtml" == "true" ] ; then - if [ -f "$file_path" ] ; then + elif (( downDelta > 0 && downDelta < 1024 )) || [[ "$containsHtml" == "true" ]] ; then + if [[ -f "$file_path" ]] ; then rm -rf "$file_path" fi echo -e "\n${YELLOW}Bad node / HTML found:${NC} tainted partial removed..." if ((j >= $MaxDownloadRetries)) ; then rm -f "$flockDownload"; - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" fi return 1 @@ -470,7 +470,7 @@ isup_GetFile() { echo -e "\n${RED}Download failed, file is incomplete.${NC}" if ((j >= $MaxDownloadRetries)) ; then rm -f "$flockDownload"; - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" fi return 1 diff --git a/hosts/kraken.sh b/hosts/kraken.sh index 12ea1c3..4e56cf9 100644 --- a/hosts/kraken.sh +++ b/hosts/kraken.sh @@ -43,24 +43,24 @@ kraken_DownloadFile() { tor_identity="${RANDOM}" finalAttempt="false" for ((z=0; z<=$MaxUrlRetries; z++)); do - if [ $z -eq $MaxUrlRetries ] ; then + if [[ $z -eq $MaxUrlRetries ]] ; then finalAttempt="true" fi CLEANSTRING=${remote_url//[^a-zA-Z0-9]/} trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; echo ""; tput cnorm; exit" 0 1 2 3 6 15 if kraken_FetchFileInfo "" $((i+1)) $finalAttempt && kraken_GetFile "${filecnt}" $((z+1)) $finalAttempt ; then return 0 - elif [ $z -lt $MaxUrlRetries ]; then - if [ "${fileAlreadyDone}" == "true" ] ; then + elif [[ $z -lt $MaxUrlRetries ]]; then + if [[ "${fileAlreadyDone}" == "true" ]] ; then break fi if [[ "${warnAndRetryUnknownError}" == "true" ]] ; then - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${remote_url##*/}" "error" "Retry due to an unknown issue: attempt #$((z+1)) of ${MaxUrlRetries}" fi fi if [[ "${exitDownloadError}" == "true" || "${exitDownloadNotAvailable}" == "true" ]] ; then - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${remote_url##*/}" "error" "Exit due to unrecoverable issue" fi rm -f "${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}" @@ -102,7 +102,7 @@ kraken_FetchFileInfo() { tor_identity="${RANDOM}" trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; rm -f ${kraken_cookie_jar}; echo ""; tput cnorm; exit" 0 1 2 3 6 15 PAGE=$(tor_curl_request --insecure --max-time "$ConnectTimeout" -s -L -c "${kraken_cookie_jar}" "${fixed_url}") - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${krak_id}" "kraken_token${num_attempt}_$i" "url: ${fixed_url}"$'\n'"krakenid: ${krak_id}"$'\n'"${PAGE}" fi if grep -Eqi 'sendFormCaptcha()' <<< "${PAGE}"; then @@ -167,7 +167,7 @@ kraken_FetchFileInfo() { CLEANSTRING=${remote_url//[^a-zA-Z0-9]/} trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; rm -f ${kraken_cookie_jar}; echo ""; tput cnorm; exit" 0 1 2 3 6 15 down_request=$(tor_curl_request --insecure -L -s -b "${kraken_cookie_jar}" -c "${kraken_cookie_jar}" -F "token=${kraken_token}" "${kraken_action}") - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${krak_id}" "kraken_url${num_attempt}_1" "action: ${kraken_action}, token: ${kraken_token}"$'\n'"${down_request}" fi if ! grep -Eqi '"status":"ok"' <<< "${down_request}"; then @@ -184,11 +184,11 @@ kraken_FetchFileInfo() { CLEANSTRING=${remote_url//[^a-zA-Z0-9]/} trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; rm -f ${kraken_cookie_jar}; echo ""; tput cnorm; exit" 0 1 2 3 6 15 file_header=$(tor_curl_request --insecure --head -L -s -b "${kraken_cookie_jar}" -c "${kraken_cookie_jar}" --referer "$kraken_action" "$download_url") - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${krak_id}" "kraken_head$j" "download_url: ${download_url}"$'\n'"${file_header}" fi rm -f "$kraken_cookie_jar"; - if [ ! -z "$file_header" ] ; then + if [[ ! -z "$file_header" ]] ; then if grep -Eqi '404 Not Found' <<< "${file_header}" ; then echo -e "${RED}| The file has been removed (404).${NC}" removedDownload "${remote_url}" @@ -203,12 +203,12 @@ kraken_FetchFileInfo() { fi if ! grep -Eqi 'HTTP/2 200|HTTP/1.1 200|200 OK' <<< $file_header ; then echo -e "${RED}| Bad http response.${NC}" - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "" "" fi return 1 fi - if [ "$filename_override" == "" ] ; then + if [[ "$filename_override" == "" ]] ; then if grep -Eqi 'filename=' <<< "${file_header}" ; then filename=$(grep -oP 'filename=\K.*$' <<< "${file_header}") filename=${filename##filename} @@ -216,7 +216,7 @@ kraken_FetchFileInfo() { filename=${filename//[$'\t\r\n']} else echo -e "${RED}| Failed to extract file name.${NC}" - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "" "" fi return 1 @@ -224,27 +224,27 @@ kraken_FetchFileInfo() { fi if ! grep -Eqi 'Content-Length' <<< "${file_header}" ; then echo -e "${RED}| Failed to extract file size.${NC}" - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "" "" fi return 1 fi else echo -e "${RED}| No response. Try again later.${NC}" - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "" "" fi return 1 fi - touch ${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/} - if [ ! "$filename_override" == "" ] ; then + touch "${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}" + if [[ ! "$filename_override" == "" ]] ; then filename="$filename_override" fi filename=$(sanitize_file_or_folder_name "${filename}") file_size_bytes=$(grep -oPi '(?<=content-length: ).*' <<< "$file_header") file_size_bytes=${file_size_bytes//[$'\t\r\n']} - if [ -z $file_size_bytes ] ; then - if [ "${finalAttempt}" == "true" ] ; then + if [[ -z $file_size_bytes ]] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "Filesize not found!" "" fi echo -e "${YELLOW}| Filesize not found… retry${NC}" @@ -276,19 +276,19 @@ kraken_GetFile() { flockDownload="${WorkDir}/.flocks/${filename//[^a-zA-Z0-9\.\_\-]/}.flock" for ((j=1; j<=$MaxDownloadRetries; j++)); do pd_presize=0 - if [ -f "$file_path" ] ; then + if [[ -f "$file_path" ]] ; then pd_presize=$(stat --format="%s" "$file_path" | tr -d '[:space:]') fi GetRandomUA CLEANSTRING=${remote_url//[^a-zA-Z0-9]/} trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; rm -f $flockDownload; echo ""; tput cnorm; exit" 0 1 2 3 6 15 - if [ "${RateMonitorEnabled}" == "true" ] && [ ! -f "$file_path" ]; then + if [[ "${RateMonitorEnabled}" == "true" ]] && [[ ! -f "$file_path" ]]; then tor_curl_request --insecure --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval --referer "$kraken_action" "$download_url" --continue-at - --output "$file_path" else tor_curl_request --insecure --referer "$kraken_action" "$download_url" --continue-at - --output "$file_path" fi received_file_size=0 - if [ -f "$file_path" ] ; then + if [[ -f "$file_path" ]] ; then received_file_size=$(stat --format="%s" "$file_path" | tr -d '[:space:]') fi if CheckNoHtml "$remote_url" "$filename" "$file_path" ; then @@ -297,9 +297,9 @@ kraken_GetFile() { containsHtml=true fi downDelta=$(( received_file_size - pd_presize )) - if [[ "${received_file_size}" -ne "${file_size_bytes}" ]] || [ "$containsHtml" == "true" ]; then - if [ "${AutoRepairBadPartials}" == "true" ] && (( downDelta > 0 && downDelta < 1024 )) ; then - if [ -f "${file_path}" ] ; then + if [[ "${received_file_size}" -ne "${file_size_bytes}" ]] || [[ "$containsHtml" == "true" ]]; then + if [[ "${AutoRepairBadPartials}" == "true" ]] && (( downDelta > 0 && downDelta < 1024 )) ; then + if [[ -f "${file_path}" ]] ; then if ((pd_presize > 0)); then echo -e "${YELLOW}Bad node / HTML found:${NC} reverting to previous file..." truncateDownload "$remote_url" "$filename" "$pd_presize" "$received_file_size" @@ -311,15 +311,15 @@ kraken_GetFile() { fi if ((j >= $MaxDownloadRetries)) ; then rm -f "$flockDownload"; - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" fi return 1 else continue fi - elif [ "${AutoRepairBadPartials}" == "true" ] && [ "$containsHtml" == "true" ] ; then - if [ -f "${file_path}" ] ; then + elif [[ "${AutoRepairBadPartials}" == "true" ]] && [[ "$containsHtml" == "true" ]] ; then + if [[ -f "${file_path}" ]] ; then if ((pd_presize > 0)); then echo -e "${YELLOW}Bad node / HTML found:${NC} reverting to previous file..." truncateDownload "$remote_url" "$filename" "$pd_presize" "$received_file_size" @@ -331,21 +331,21 @@ kraken_GetFile() { fi if ((j >= $MaxDownloadRetries)) ; then rm -f "$flockDownload"; - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" fi return 1 else continue fi - elif (( downDelta > 0 && downDelta < 1024 )) || [ "$containsHtml" == "true" ] ; then - if [ -f "$file_path" ] ; then + elif (( downDelta > 0 && downDelta < 1024 )) || [[ "$containsHtml" == "true" ]] ; then + if [[ -f "$file_path" ]] ; then rm -rf "$file_path" fi echo -e "\n${YELLOW}Bad node / HTML found:${NC} tainted partial removed..." if ((j >= $MaxDownloadRetries)) ; then rm -f "$flockDownload"; - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" fi return 1 @@ -357,7 +357,7 @@ kraken_GetFile() { echo -e "\n${RED}Download failed, file is incomplete.${NC}" if ((j >= $MaxDownloadRetries)) ; then rm -f "$flockDownload"; - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" fi return 1 diff --git a/hosts/mediafire.sh b/hosts/mediafire.sh index 9f32515..ed86788 100644 --- a/hosts/mediafire.sh +++ b/hosts/mediafire.sh @@ -48,24 +48,24 @@ mfire_DownloadFile() { tor_identity="${RANDOM}" finalAttempt="false" for ((z=0; z<=$MaxUrlRetries; z++)); do - if [ $z -eq $MaxUrlRetries ] ; then + if [[ $z -eq $MaxUrlRetries ]] ; then finalAttempt="true" fi CLEANSTRING=${remote_url//[^a-zA-Z0-9]/} trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; echo ""; tput cnorm; exit" 0 1 2 3 6 15 if mfire_FetchFileInfo $finalAttempt && mfire_GetFile "${filecnt}" $((z+1)) $finalAttempt ; then return 0 - elif [ $z -lt $MaxUrlRetries ]; then - if [ "${fileAlreadyDone}" == "true" ] ; then + elif [[ $z -lt $MaxUrlRetries ]]; then + if [[ "${fileAlreadyDone}" == "true" ]] ; then break fi if [[ "${warnAndRetryUnknownError}" == "true" ]] ; then - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${remote_url##*/}" "error" "Retry due to an unknown issue: attempt #$((z+1)) of ${MaxUrlRetries}" fi fi if [[ "${exitDownloadError}" == "true" || "${exitDownloadNotAvailable}" == "true" ]] ; then - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${remote_url##*/}" "error" "Exit due to unrecoverable issue" fi rm -f "${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}" @@ -104,7 +104,7 @@ mfire_FetchFileInfo() { -H "Sec-Fetch-User: ?1" \ "$download_url" | tr -d '\0') response=$(echo "$response" | iconv -c -f UTF-8 -t ISO8859-1) - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${remote_url##*/}" "mfire_fetch$i" "download_url: ${download_url}"$'\n'"${response}" fi if [[ -z $response ]] ; then @@ -113,7 +113,7 @@ mfire_FetchFileInfo() { printf "\\n" echo -e "${RED}| Failed to get download link${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "Failed to get download link" "" fi return 1 @@ -134,7 +134,7 @@ mfire_FetchFileInfo() { printf "\\n" echo -e "${RED}| Failed to get download link${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "Failed to get download link" "" fi return 1 @@ -147,7 +147,7 @@ mfire_FetchFileInfo() { echo -e "${GREEN}# Fetching file info…${NC}" filename="" file_size_bytes="" - if [ ! "$filename_override" == "" ] ; then + if [[ ! "$filename_override" == "" ]] ; then filename="$filename_override" fi for ((j=1; j<=$maxretries; j++)); do @@ -167,7 +167,7 @@ mfire_FetchFileInfo() { -H "Sec-Fetch-User: ?1" \ -b "${mfire_cookie_jar}" -c "${mfire_cookie_jar}" \ "$download_url" | tr -d '\0') - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${remote_url##*/}" "mfire_head$j" "FileInfoUrl: ${download_url}"$'\n'"${file_header}" fi if [[ -z $file_header ]] ; then @@ -176,7 +176,7 @@ mfire_FetchFileInfo() { printf "\\n" echo -e "${RED}| Failed to extract file info [1]${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "Failed to extract file info [1]" "" fi return 1 @@ -190,7 +190,7 @@ mfire_FetchFileInfo() { printf "\\n" echo -e "${RED}| Failed to extract file info [2]${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "Failed to extract file info [2]" "" fi return 1 @@ -198,24 +198,24 @@ mfire_FetchFileInfo() { continue fi fi - if [ -z $filename ]; then + if [[ -z $filename ]]; then filename=$(grep -oPi '(?<=filename=").*(?=")' <<< "$file_header") - if [ -z $filename ]; then + if [[ -z $filename ]]; then filename=$(grep -oPi '(?<=filename[*]=).*' <<< "$file_header") filename=${filename//[$'\t\r\n']} fi fi - if [ -z $file_size_bytes ] ; then + if [[ -z $file_size_bytes ]] ; then file_size_bytes=$(grep -oPi '(?<=content-length: ).*' <<< "$file_header") file_size_bytes=${file_size_bytes//[$'\t\r\n']} fi - if [ -z $filename ] || [ -z $file_size_bytes ] ; then + if [[ -z $filename ]] || [[ -z "$file_size_bytes" ]] ; then if ((j == maxfetchretries)) ; then rm -f "${mfire_cookie_jar}"; printf "\\n" echo -e "${RED}| Failed to extract file info [3]${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "Failed to extract file info [3]" "" fi return 1 @@ -226,18 +226,18 @@ mfire_FetchFileInfo() { break #Good to go here done rm -f "${mfire_cookie_jar}"; - touch ${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/} - if [ ! "$filename_override" == "" ] ; then + touch "${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}" + if [[ ! "$filename_override" == "" ]] ; then filename="$filename_override" - elif [ -z $filename ] ; then + elif [[ -z $filename ]] ; then filename=${download_url##*/} fi filename=$(sanitize_file_or_folder_name "${filename}") - if [ -z "$filename" ]; then + if [[ -z "$filename" ]]; then echo -e "${RED}| Unexpected or no header response.${NC}" return 1 fi - if [ -z $file_size_bytes ] ; then + if [[ -z $file_size_bytes ]] ; then file_size_readable="${RED}Unknown filesize…${NC}" else file_size_readable="$(numfmt --to=iec --from=auto --format "%.2f" <<< "$file_size_bytes")" @@ -267,19 +267,19 @@ mfire_GetFile() { flockDownload="${WorkDir}/.flocks/${filename//[^a-zA-Z0-9\.\_\-]/}.flock" for ((j=1; j<=$MaxDownloadRetries; j++)); do pd_presize=0 - if [ -f "$file_path" ] ; then + if [[ -f "$file_path" ]] ; then pd_presize=$(stat --format="%s" "$file_path" | tr -d '[:space:]') fi CLEANSTRING=${remote_url//[^a-zA-Z0-9]/} trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; rm -f $flockDownload; echo ""; tput cnorm; exit" 0 1 2 3 6 15 - if [ ! -z $file_size_bytes ] ; then + if [[ ! -z "$file_size_bytes" ]] ; then tor_curl_request_extended --insecure "$download_url" --continue-at - --output "$file_path" else echo -e "${BLUE}| No Resume Fetch${NC}" tor_curl_request_extended --insecure "$download_url" --output "$file_path" fi received_file_size=0 - if [ -f "$file_path" ] ; then + if [[ -f "$file_path" ]] ; then received_file_size=$(stat --format="%s" "$file_path" | tr -d '[:space:]') fi if CheckNoHtml "$remote_url" "$filename" "$file_path" ; then @@ -287,8 +287,8 @@ mfire_GetFile() { else containsHtml=true fi - if [ "${AutoRepairBadPartials}" == "true" ] && [ "$containsHtml" == "true" ] ; then - if [ -f "${file_path}" ] ; then + if [[ "${AutoRepairBadPartials}" == "true" ]] && [[ "$containsHtml" == "true" ]] ; then + if [[ -f "${file_path}" ]] ; then if ((pd_presize > 0)); then echo -e "${YELLOW}Bad node / HTML found:${NC} reverting to previous file..." truncateDownload "$remote_url" "$filename" "$pd_presize" "$received_file_size" @@ -300,7 +300,7 @@ mfire_GetFile() { fi if ((j >= $MaxDownloadRetries)) ; then rm -f "$flockDownload"; - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" fi return 1 diff --git a/hosts/nippy.sh b/hosts/nippy.sh index 8b25bea..f40f3a7 100644 --- a/hosts/nippy.sh +++ b/hosts/nippy.sh @@ -43,24 +43,24 @@ nippy_DownloadFile() { tor_identity="${RANDOM}" finalAttempt="false" for ((z=0; z<=$MaxUrlRetries; z++)); do - if [ $z -eq $MaxUrlRetries ] ; then + if [[ $z -eq $MaxUrlRetries ]] ; then finalAttempt="true" fi CLEANSTRING=${remote_url//[^a-zA-Z0-9]/} trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; echo ""; tput cnorm; exit" 0 1 2 3 6 15 if nippy_FetchFileInfo $finalAttempt && nippy_GetFile "${filecnt}" $((z+1)) $finalAttempt ; then return 0 - elif [ $z -lt $MaxUrlRetries ]; then - if [ "${fileAlreadyDone}" == "true" ] ; then + elif [[ $z -lt $MaxUrlRetries ]]; then + if [[ "${fileAlreadyDone}" == "true" ]] ; then break fi if [[ "${warnAndRetryUnknownError}" == "true" ]] ; then - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${remote_url##*/}" "error" "Retry due to an unknown issue: attempt #$((z+1)) of ${MaxUrlRetries}" fi fi if [[ "${exitDownloadError}" == "true" || "${exitDownloadNotAvailable}" == "true" ]] ; then - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${remote_url##*/}" "error" "Exit due to unrecoverable issue" fi rm -f "${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}" @@ -117,16 +117,16 @@ nippy_FetchFileInfo() { CLEANSTRING=${remote_url//[^a-zA-Z0-9]/} trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; rm -f ${nippy_cookie_jar}; echo ""; tput cnorm; exit" 0 1 2 3 6 15 response=$(tor_curl_request --insecure -L -s -b "${nippy_cookie_jar}" -c "${nippy_cookie_jar}" "$fixed_url") - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${remote_url##*/}" "nippy_dwnpage$i" "fixed_url: ${fixed_url}"$'\n'"${response}" fi if [[ -z $response ]] ; then rm -f "${nippy_cookie_jar}"; - if [ $i == $maxfetchretries ] ; then + if [[ $i == $maxfetchretries ]] ; then printf "\\n" echo -e "${RED}| Failed to extract download link.${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "" "" fi return 1 @@ -165,7 +165,7 @@ nippy_FetchFileInfo() { printf "\\n" echo -e "${RED}| Failed to extract download url (unknown).${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "" "" fi return 1 @@ -178,7 +178,7 @@ nippy_FetchFileInfo() { filename="" file_size_bytes="" cdn_url="" - if [ ! "$filename_override" == "" ] ; then + if [[ ! "$filename_override" == "" ]] ; then filename="$filename_override" fi for ((j=1; j<=$maxfetchretries; j++)); do @@ -188,16 +188,16 @@ nippy_FetchFileInfo() { file_header=$(tor_curl_request --insecure -L --head -s \ -b "${nippy_cookie_jar}" -c "${nippy_cookie_jar}" \ "$download_url") - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${remote_url##*/}" "nippy_head$j" "FileInfoUrl: ${download_url}"$'\n'"${file_header}" fi if [[ -z $file_header ]] ; then - if [ $j == $maxfetchretries ] ; then + if [[ $j == $maxfetchretries ]] ; then rm -f "${nippy_cookie_jar}"; printf "\\n" echo -e "${RED}| Failed to extract file info${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "Failed to extract file info" "" fi return 1 @@ -213,11 +213,11 @@ nippy_FetchFileInfo() { return 1 fi if ! grep -Eqi 'HTTP.* 200' <<< $file_header ; then - if [ $j == $maxfetchretries ] ; then + if [[ $j == $maxfetchretries ]] ; then printf "\\n" echo -e "${RED}| Failed to extract file info${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "Failed to extract file info" "" fi return 1 @@ -225,24 +225,24 @@ nippy_FetchFileInfo() { continue fi fi - if [ -z $cdn_url ] ; then + if [[ -z "$cdn_url" ]] ; then cdn_url="https:"$(grep -oP '(?<=location: ).*$' <<< "$file_header") cdn_url=${cdn_url//[$'\t\r\n']} fi - if [ -z $filename ]; then + if [[ -z $filename ]]; then filename=$(grep -oPi '(?<=filename=").*(?=")' <<< "$file_header") fi - if [ -z $file_size_bytes ] ; then + if [[ -z $file_size_bytes ]] ; then file_size_bytes=$(grep -oPi '(?<=content-length: ).*' <<< "$file_header") file_size_bytes=${file_size_bytes//[$'\t\r\n']} fi - if [ -z $filename ] || [ -z $cdn_url ] || [ -z $file_size_bytes ] ; then - if [ $j == $maxfetchretries ] ; then + if [[ -z $filename ]] || [[ -z "$cdn_url" ]] || [[ -z "$file_size_bytes" ]] ; then + if [[ $j == $maxfetchretries ]] ; then rm -f "${nippy_cookie_jar}"; printf "\\n" echo -e "${RED}| Failed to extract file info.${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "" "" fi return 1 @@ -253,12 +253,12 @@ nippy_FetchFileInfo() { break #Good to go here done download_url="$cdn_url" - touch ${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/} + touch "${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}" filename=$(sanitize_file_or_folder_name "${filename}") printf "\\n" echo -e "${YELLOW}| File name:${NC}\t\"${filename}\"" - if [ -z $file_size_bytes ] ; then - if [ "${finalAttempt}" == "true" ] ; then + if [[ -z $file_size_bytes ]] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "Filesize not found!" "" fi echo -e "${YELLOW}| Filesize not found… retry${NC}" @@ -289,20 +289,20 @@ nippy_GetFile() { flockDownload="${WorkDir}/.flocks/${filename//[^a-zA-Z0-9\.\_\-]/}.flock" for ((j=1; j<=$MaxDownloadRetries; j++)); do pd_presize=0 - if [ -f "$file_path" ] ; then + if [[ -f "$file_path" ]] ; then pd_presize=$(stat --format="%s" "$file_path" | tr -d '[:space:]') fi GetRandomUA CLEANSTRING=${remote_url//[^a-zA-Z0-9]/} trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; rm -f ${nippy_cookie_jar}; rm -f $flockDownload; echo ""; tput cnorm; exit" 0 1 2 3 6 15 - if [ "${RateMonitorEnabled}" == "true" ]; then + if [[ "${RateMonitorEnabled}" == "true" ]]; then tor_curl_request --insecure -L --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval \ "$download_url" --continue-at - --output "$file_path" else tor_curl_request --insecure -L "$download_url" --continue-at - --output "$file_path" fi received_file_size=0 - if [ -f "$file_path" ] ; then + if [[ -f "$file_path" ]] ; then received_file_size=$(stat --format="%s" "$file_path" | tr -d '[:space:]') fi if CheckNoHtml "$remote_url" "$filename" "$file_path" ; then @@ -311,9 +311,9 @@ nippy_GetFile() { containsHtml=true fi downDelta=$(( received_file_size - pd_presize )) - if [[ "${received_file_size}" -ne "${file_size_bytes}" ]] || [ "$containsHtml" == "true" ]; then - if [ "${AutoRepairBadPartials}" == "true" ] && (( downDelta > 0 && downDelta < 1024 )) ; then - if [ -f "${file_path}" ] ; then + if [[ "${received_file_size}" -ne "${file_size_bytes}" ]] || [[ "$containsHtml" == "true" ]]; then + if [[ "${AutoRepairBadPartials}" == "true" ]] && (( downDelta > 0 && downDelta < 1024 )) ; then + if [[ -f "${file_path}" ]] ; then if ((pd_presize > 0)); then echo -e "${YELLOW}Bad node / HTML found:${NC} reverting to previous file..." truncateDownload "$remote_url" "$filename" "$pd_presize" "$received_file_size" @@ -325,15 +325,15 @@ nippy_GetFile() { fi if ((j >= $MaxDownloadRetries)) ; then rm -f "$flockDownload"; - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" fi return 1 else continue fi - elif [ "${AutoRepairBadPartials}" == "true" ] && [ "$containsHtml" == "true" ] ; then - if [ -f "${file_path}" ] ; then + elif [[ "${AutoRepairBadPartials}" == "true" ]] && [[ "$containsHtml" == "true" ]] ; then + if [[ -f "${file_path}" ]] ; then if ((pd_presize > 0)); then echo -e "${YELLOW}Bad node / HTML found:${NC} reverting to previous file..." truncateDownload "$remote_url" "$filename" "$pd_presize" "$received_file_size" @@ -345,21 +345,21 @@ nippy_GetFile() { fi if ((j >= $MaxDownloadRetries)) ; then rm -f "$flockDownload"; - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" fi return 1 else continue fi - elif (( downDelta > 0 && downDelta < 1024 )) || [ "$containsHtml" == "true" ] ; then - if [ -f "$file_path" ] ; then + elif (( downDelta > 0 && downDelta < 1024 )) || [[ "$containsHtml" == "true" ]] ; then + if [[ -f "$file_path" ]] ; then rm -rf "$file_path" fi echo -e "\n${YELLOW}Bad node / HTML found:${NC} tainted partial removed..." if ((j >= $MaxDownloadRetries)) ; then rm -f "$flockDownload"; - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" fi return 1 @@ -371,7 +371,7 @@ nippy_GetFile() { echo -e "\n${RED}Download failed, file is incomplete.${NC}" if ((j >= $MaxDownloadRetries)) ; then rm -f "$flockDownload"; - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" fi return 1 diff --git a/hosts/oshi.sh b/hosts/oshi.sh index 9a838e0..0b06430 100644 --- a/hosts/oshi.sh +++ b/hosts/oshi.sh @@ -1,6 +1,6 @@ #! Name: oshi.sh #! Author: kittykat -#! Version: 2024.09.13 +#! Version: 2025.02.17 #! Desc: Add support for downloading and processing of urls for a new host #! Usage: Copy this file into the ./${ScriptDir}/hosts/ folder #! @@ -54,24 +54,24 @@ oshi_DownloadFile() { tor_identity="${RANDOM}" finalAttempt="false" for ((z=0; z<=$MaxUrlRetries; z++)); do - if [ $z -eq $MaxUrlRetries ] ; then + if [[ $z -eq $MaxUrlRetries ]] ; then finalAttempt="true" fi CLEANSTRING=${remote_url//[^a-zA-Z0-9]/} trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; echo ""; tput cnorm; exit" 0 1 2 3 6 15 if oshi_FetchFileInfo $finalAttempt && oshi_GetFile "${filecnt}" $((z+1)) $finalAttempt ; then return 0 - elif [ $z -lt $MaxUrlRetries ]; then - if [ "${fileAlreadyDone}" == "true" ] ; then + elif [[ $z -lt $MaxUrlRetries ]]; then + if [[ "${fileAlreadyDone}" == "true" ]] ; then break fi if [[ "${warnAndRetryUnknownError}" == "true" ]] ; then - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${remote_url##*/}" "error" "Retry due to an unknown issue: attempt #$((z+1)) of ${MaxUrlRetries}" fi fi if [[ "${exitDownloadError}" == "true" || "${exitDownloadNotAvailable}" == "true" ]] ; then - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${remote_url##*/}" "error" "Exit due to unrecoverable issue" fi rm -f "${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}" @@ -91,18 +91,25 @@ oshi_FetchFileInfo() { finalAttempt=$1 CLEANSTRING=${remote_url//[^a-zA-Z0-9]/} trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; echo ""; tput cnorm; exit" 0 1 2 3 6 15 - if [ "${OshiBaseUrlOverride}" == "oshiat" ]; then + if [[ "${OshiBaseUrlOverride}" == "oshiat" ]]; then download_url=${remote_url//5ety7tpkim5me6eszuwcje7bmy25pbtrjtue7zkqqgziljwqy3rrikqd\.onion/oshi\.at} - elif [ "${OshiBaseUrlOverride}" == "oshionion" ]; then + elif [[ "${OshiBaseUrlOverride}" == "oshionion" ]]; then download_url=${remote_url//oshi\.at/5ety7tpkim5me6eszuwcje7bmy25pbtrjtue7zkqqgziljwqy3rrikqd\.onion} fi + if ! grep -Eqi '/nossl/' <<< "$download_url"; then + download_url=${download_url//oshi\.at/oshi\.at\/nossl} + download_url=${download_url//5ety7tpkim5me6eszuwcje7bmy25pbtrjtue7zkqqgziljwqy3rrikqd\.onion/5ety7tpkim5me6eszuwcje7bmy25pbtrjtue7zkqqgziljwqy3rrikqdi\.onion\/nossl} + fi + if grep -Eqi '^https' <<< "$download_url"; then + download_url=${download_url//https:/http:} + fi download_url=$(urlencode_literal_grouped_case_urlendingonly "$download_url") tor_identity="${RANDOM}" file_header=$(tor_curl_request --insecure --head -L -s --referer "$remote_url" "$download_url") - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${remote_url##*/}" "oshi_head$j" "download_url: ${download_url}"$'\n'"${file_header}" fi - if [ ! -z "$file_header" ] ; then + if [[ ! -z "$file_header" ]] ; then if grep -Eqi '404 Not Found' <<< "${file_header}" ; then echo -e "${RED}| O shi-, (404). The file has been removed.${NC}" removedDownload "${remote_url}" @@ -112,12 +119,12 @@ oshi_FetchFileInfo() { if ! grep -Eqi 'HTTP/2 200|HTTP/1.1 200|200 OK' <<< $file_header ; then echo -e "${RED}| Failed to extract file info${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "" "" fi return 1 fi - if [ "$filename_override" == "" ] ; then + if [[ "$filename_override" == "" ]] ; then if grep -Eqi 'filename=' <<< "${file_header}" ; then filename=$(grep -oP 'filename=\K.*$' <<< "${file_header}") filename=${filename##filename} @@ -132,29 +139,29 @@ oshi_FetchFileInfo() { fi if ! grep -Eqi 'Content-Length' <<< "${file_header}" ; then echo -e "${RED}| Failed to extract file size.${NC}" - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "" "" fi return 1 fi else echo -e "${RED}| No response. Try again later.${NC}" - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "" "" fi return 1 fi - touch ${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/} - if [ ! "$filename_override" == "" ] ; then + touch "${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}" + if [[ ! "$filename_override" == "" ]] ; then filename="$filename_override" - elif [ -z $filename ] ; then + elif [[ -z $filename ]] ; then filename=${download_url##*/} fi filename=$(sanitize_file_or_folder_name "${filename}") file_size_bytes=$(grep -oPi '(?<=content-length: ).*' <<< "$file_header") file_size_bytes=${file_size_bytes//[$'\t\r\n']} - if [ -z $file_size_bytes ] ; then - if [ "${finalAttempt}" == "true" ] ; then + if [[ -z $file_size_bytes ]] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "Filesize not found!" "" fi echo -e "${YELLOW}| Filesize not found… retry${NC}" @@ -186,18 +193,18 @@ oshi_GetFile() { flockDownload="${WorkDir}/.flocks/${filename//[^a-zA-Z0-9\.\_\-]/}.flock" for ((j=1; j<=$MaxDownloadRetries; j++)); do pd_presize=0 - if [ -f "$file_path" ] ; then + if [[ -f "$file_path" ]] ; then pd_presize=$(stat --format="%s" "$file_path" | tr -d '[:space:]') fi CLEANSTRING=${remote_url//[^a-zA-Z0-9]/} trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; rm -f $flockDownload; echo ""; tput cnorm; exit" 0 1 2 3 6 15 - if [ "${RateMonitorEnabled}" == "true" ]; then + if [[ "${RateMonitorEnabled}" == "true" ]]; then tor_curl_request --insecure --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval --referer "$file_url" "$download_url" --continue-at - --output "$file_path" else tor_curl_request --insecure --referer "$file_url" "$download_url" --continue-at - --output "$file_path" fi received_file_size=0 - if [ -f "$file_path" ] ; then + if [[ -f "$file_path" ]] ; then received_file_size=$(stat --format="%s" "$file_path" | tr -d '[:space:]') fi if CheckNoHtml "$remote_url" "$filename" "$file_path" ; then @@ -206,9 +213,9 @@ oshi_GetFile() { containsHtml=true fi downDelta=$(( received_file_size - pd_presize )) - if [[ "${received_file_size}" -ne "${file_size_bytes}" ]] || [ "$containsHtml" == "true" ]; then - if [ "${AutoRepairBadPartials}" == "true" ] && (( downDelta > 0 && downDelta < 1024 )) ; then - if [ -f "${file_path}" ] ; then + if [[ "${received_file_size}" -ne "${file_size_bytes}" ]] || [[ "$containsHtml" == "true" ]]; then + if [[ "${AutoRepairBadPartials}" == "true" ]] && (( downDelta > 0 && downDelta < 1024 )) ; then + if [[ -f "${file_path}" ]] ; then if ((pd_presize > 0)); then echo -e "${YELLOW}Bad node / HTML found:${NC} reverting to previous file..." truncateDownload "$remote_url" "$filename" "$pd_presize" "$received_file_size" @@ -220,15 +227,15 @@ oshi_GetFile() { fi if ((j >= $MaxDownloadRetries)) ; then rm -f "$flockDownload"; - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" fi return 1 else continue fi - elif [ "${AutoRepairBadPartials}" == "true" ] && [ "$containsHtml" == "true" ] ; then - if [ -f "${file_path}" ] ; then + elif [[ "${AutoRepairBadPartials}" == "true" ]] && [[ "$containsHtml" == "true" ]] ; then + if [[ -f "${file_path}" ]] ; then if ((pd_presize > 0)); then echo -e "${YELLOW}Bad node / HTML found:${NC} reverting to previous file..." truncateDownload "$remote_url" "$filename" "$pd_presize" "$received_file_size" @@ -240,21 +247,21 @@ oshi_GetFile() { fi if ((j >= $MaxDownloadRetries)) ; then rm -f "$flockDownload"; - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" fi return 1 else continue fi - elif (( downDelta > 0 && downDelta < 1024 )) || [ "$containsHtml" == "true" ] ; then - if [ -f "$file_path" ] ; then + elif (( downDelta > 0 && downDelta < 1024 )) || [[ "$containsHtml" == "true" ]] ; then + if [[ -f "$file_path" ]] ; then rm -rf "$file_path" fi echo -e "\n${YELLOW}Bad node / HTML found:${NC} tainted partial removed..." if ((j >= $MaxDownloadRetries)) ; then rm -f "$flockDownload"; - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" fi return 1 @@ -266,7 +273,7 @@ oshi_GetFile() { echo -e "\n${RED}Download failed, file is incomplete.${NC}" if ((j >= $MaxDownloadRetries)) ; then rm -f "$flockDownload"; - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" fi return 1 diff --git a/hosts/pixeldrain.sh b/hosts/pixeldrain.sh index 48d2e98..178dd73 100644 --- a/hosts/pixeldrain.sh +++ b/hosts/pixeldrain.sh @@ -43,24 +43,24 @@ pd_DownloadFile() { tor_identity="${RANDOM}" finalAttempt="false" for ((z=0; z<=$MaxUrlRetries; z++)); do - if [ $z -eq $MaxUrlRetries ] ; then + if [[ $z -eq $MaxUrlRetries ]] ; then finalAttempt="true" fi CLEANSTRING=${remote_url//[^a-zA-Z0-9]/} trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; echo ""; tput cnorm; exit" 0 1 2 3 6 15 if pd_FetchFileInfo $finalAttempt && pd_GetFile "${filecnt}" $((z+1)) $finalAttempt ; then return 0 - elif [ $z -lt $MaxUrlRetries ]; then - if [ "${fileAlreadyDone}" == "true" ] ; then + elif [[ $z -lt $MaxUrlRetries ]]; then + if [[ "${fileAlreadyDone}" == "true" ]] ; then break fi if [[ "${warnAndRetryUnknownError}" == "true" ]] ; then - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${remote_url##*/}" "error" "Retry due to an unknown issue: attempt #$((z+1)) of ${MaxUrlRetries}" fi fi if [[ "${exitDownloadError}" == "true" || "${exitDownloadNotAvailable}" == "true" ]] ; then - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${remote_url##*/}" "error" "Exit due to unrecoverable issue" fi rm -f "${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}" @@ -92,18 +92,18 @@ pd_FetchFileInfo() { CLEANSTRING=${remote_url//[^a-zA-Z0-9]/} trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; echo ""; tput cnorm; exit" 0 1 2 3 6 15 response=$(tor_curl_request --insecure -L -s "https://pixeldrain.com/u/$fileid") - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${remote_url##*/}" "pd_fetch$i" "$response" fi - if [ ! -z "$response" ] ; then + if [[ ! -z "$response" ]] ; then if grep -q -Eq '"views":' <<< "$response"; then pdpreviews=$(grep -o -P '(?<="views":).+?(?=,")' <<< "$response") fi if grep -i -Eq "You have reached the maximum number of open download connections" <<< "$response"; then - if [ $i -ge 5 ] ; then + if ((i >= 5)) ; then printf "\\n" echo -e "${YELLOW}| Bad node. Reached the maximum number of open download connections…${NC}" - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "" "" fi return 1 @@ -135,7 +135,7 @@ pd_FetchFileInfo() { if ((i > 1)) ; then printf "\\n" fi - if grep -Eq "pjscloud.sh" <<< "$LoadPlugins" && [ "$PJSCloud_pixeldrain" == "true" ]; then + if grep -Eq "pjscloud.sh" <<< "$LoadPlugins" && [[ "$PJSCloud_pixeldrain" == "true" ]]; then if ! grep -Eq "pjscloud.sh" <<< "$LoadPlugins" ; then echo -e "${RED}| Pixeldrain viewpump requires pjscloud.sh plugin.${NC}" failedRetryDownload "${remote_url}" "Captcha Rate Limited (needs view pumping). Requires pjscloud.sh plugin." @@ -157,7 +157,7 @@ pd_FetchFileInfo() { tor_identity="${RANDOM}" trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; rm -f $jsonRequest; echo ""; tput cnorm; exit" 0 1 2 3 6 15 resp_pump=$(pjscloud_tor_request "https://pixeldrain.com/u/$fileid") - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${remote_url##*/}" "pd_pump$k" "preViews: $pdpreviews"$'\n'"postViews: $pdpostviews"$'\n'"$resp_pump" fi if grep -q -Eq 'Error: Forbidden' <<< "$resp_pump"; then @@ -182,7 +182,7 @@ pd_FetchFileInfo() { echo -e "| Final views: $pdpostviews (+1)" echo -e "| Waiting a few seconds to allow pd views to update…" sleepRandomSecs 45 120 - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "" "" return 1 fi @@ -207,22 +207,22 @@ pd_FetchFileInfo() { printf "\\n" fi echo -e "${YELLOW}| Unknown availability: $pd_message${NC}" - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "" "" return 1 fi fi fi - if [ $i -gt 1 ] ; then + if ((i > 1)) ; then printf "\\n" fi echo -e "| Current views: $pdpreviews" break else - if [ $i -ge 5 ] ; then + if ((i >= 5)) ; then printf "\\n" echo -e "${YELLOW}| No response…${NC}" - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "" "" fi return 1 @@ -233,14 +233,14 @@ pd_FetchFileInfo() { fi done filename=$(grep -oP '(?<="name":")[^"]+(?=")' <<< "$response") - touch ${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/} - if [ ! "$filename_override" == "" ] ; then + touch "${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}" + if [[ ! "$filename_override" == "" ]] ; then filename="$filename_override" fi filename=$(sanitize_file_or_folder_name "${filename}") - if [ "$filename_override" == "" ] && [ -z "$filename" ] ; then + if [[ "$filename_override" == "" ]] && [[ -z "$filename" ]] ; then echo -e "${RED}| Failed to extract file name.${NC}" - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "" "" fi return 1 @@ -248,13 +248,13 @@ pd_FetchFileInfo() { echo -e "${YELLOW}| File name:${NC}\t\"${filename}\"" for ((i=1; i<=6; i++)); do pdheadurl="https://pixeldrain.com/api/file/${fileid}" - if [ "${UsePixeldrainBypass}" == "true" ]; then + if [[ "${UsePixeldrainBypass}" == "true" ]]; then pdheadurl="https://pd.cybar.xyz/$fileid" fi CLEANSTRING=${remote_url//[^a-zA-Z0-9]/} trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; echo ""; tput cnorm; exit" 0 1 2 3 6 15 file_header=$(tor_curl_request --insecure --head -L -s --referer "$file_url" "$pdheadurl") - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${remote_url##*/}" "pd_head$i" "url: ${pdheadurl}"$'\n'"${file_header}" fi if ! grep -Eqi 'HTTP/2 200|HTTP/1.1 200|200 OK' <<< $file_header ; then @@ -265,7 +265,7 @@ pd_FetchFileInfo() { return 1 fi fi - if [ "${UsePixeldrainBypass}" == "true" ]; then + if [[ "${UsePixeldrainBypass}" == "true" ]]; then download_url=$(grep -oP '(?<=location: ).*$' <<< "$file_header") download_url="${download_url//[$'\t\r\n']}" else @@ -274,8 +274,8 @@ pd_FetchFileInfo() { download_url=$(urlencode_literal_grouped_case_urlendingonly "$download_url") file_size_bytes=$(grep -oPi '(?<=content-length: ).*' <<< "$file_header") file_size_bytes=${file_size_bytes//[$'\t\r\n']} - if [ -z $file_size_bytes ] ; then - if [ "${finalAttempt}" == "true" ] ; then + if [[ -z $file_size_bytes ]] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "Filesize not found!" "" fi echo -e "${YELLOW}| Filesize not found… retry${NC}" @@ -294,7 +294,7 @@ pd_FetchFileInfo() { if CheckDownloadExists "$remote_url" "$MoveToFolder" "$filecnt" "$filename" "$file_path" "$completed_location" ; then return 1 fi - if [ "${UsePixeldrainBypass}" == "true" ]; then + if [[ "${UsePixeldrainBypass}" == "true" ]]; then echo -e "| ${BLUE}PixelDrain bypass:${NC} Knight beds queen ${RED}]${NC}°${PINK}----${RED}[${NC} ♞♝ ${NC}|▀▄▀▄▀▄▀▄▀▄▀▄▀▄▀▄|${NC}" fi echo "${remote_url//[^a-zA-Z0-9]/}" > $flockDownload @@ -311,7 +311,7 @@ pd_GetFile() { flockDownload="${WorkDir}/.flocks/${filename//[^a-zA-Z0-9\.\_\-]/}.flock" for ((j=1; j<=$MaxDownloadRetries; j++)); do pd_presize=0 - if [ -f "$file_path" ] ; then + if [[ -f "$file_path" ]] ; then pd_presize=$(stat --format="%s" "$file_path" | tr -d '[:space:]') fi echo -e "Download Url: $download_url" @@ -319,8 +319,8 @@ pd_GetFile() { tor_identity="${RANDOM}" CLEANSTRING=${remote_url//[^a-zA-Z0-9]/} trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; rm -f $flockDownload; echo ""; tput cnorm; exit" 0 1 2 3 6 15 - if [ "${UseTorCurlImpersonate}" == "true" ]; then - if [ "${RateMonitorEnabled}" == "true" ]; then + if [[ "${UseTorCurlImpersonate}" == "true" ]]; then + if [[ "${RateMonitorEnabled}" == "true" ]]; then tor_curl_request --insecure \ --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval \ --referer "$file_url" "$download_url" --continue-at - --output "$file_path" @@ -329,7 +329,7 @@ pd_GetFile() { --referer "$file_url" "$download_url" --continue-at - --output "$file_path" fi else - if [ "${RateMonitorEnabled}" == "true" ]; then + if [[ "${RateMonitorEnabled}" == "true" ]]; then tor_curl_request --insecure \ -H "User-Agent: $RandomUA" \ -H 'Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8' \ @@ -349,7 +349,7 @@ pd_GetFile() { fi fi received_file_size=0 - if [ -f "$file_path" ] ; then + if [[ -f "$file_path" ]] ; then received_file_size=$(stat --format="%s" "$file_path" | tr -d '[:space:]') fi if CheckNoHtml "$remote_url" "$filename" "$file_path" ; then @@ -358,9 +358,9 @@ pd_GetFile() { containsHtml=true fi downDelta=$(( received_file_size - pd_presize )) - if [[ "${received_file_size}" -ne "${file_size_bytes}" ]] || [ "$containsHtml" == "true" ]; then - if [ "${AutoRepairBadPartials}" == "true" ] && (( downDelta > 0 && downDelta < 1024 )) ; then - if [ -f "${file_path}" ] ; then + if [[ "${received_file_size}" -ne "${file_size_bytes}" ]] || [[ "$containsHtml" == "true" ]]; then + if [[ "${AutoRepairBadPartials}" == "true" ]] && (( downDelta > 0 && downDelta < 1024 )) ; then + if [[ -f "${file_path}" ]] ; then if ((pd_presize > 0)); then echo -e "${YELLOW}Bad node / HTML found:${NC} reverting to previous file..." truncateDownload "$remote_url" "$filename" "$pd_presize" "$received_file_size" @@ -372,15 +372,15 @@ pd_GetFile() { fi if ((j >= $MaxDownloadRetries)) ; then rm -f "$flockDownload"; - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" fi return 1 else continue fi - elif [ "${AutoRepairBadPartials}" == "true" ] && [ "$containsHtml" == "true" ] ; then - if [ -f "${file_path}" ] ; then + elif [[ "${AutoRepairBadPartials}" == "true" ]] && [[ "$containsHtml" == "true" ]] ; then + if [[ -f "${file_path}" ]] ; then if ((pd_presize > 0)); then echo -e "${YELLOW}Bad node / HTML found:${NC} reverting to previous file..." truncateDownload "$remote_url" "$filename" "$pd_presize" "$received_file_size" @@ -392,21 +392,21 @@ pd_GetFile() { fi if ((j >= $MaxDownloadRetries)) ; then rm -f "$flockDownload"; - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" fi return 1 else continue fi - elif (( downDelta > 0 && downDelta < 1024 )) || [ "$containsHtml" == "true" ] ; then - if [ -f "$file_path" ] ; then + elif (( downDelta > 0 && downDelta < 1024 )) || [[ "$containsHtml" == "true" ]] ; then + if [[ -f "$file_path" ]] ; then rm -rf "$file_path" fi echo -e "\n${YELLOW}Bad node / HTML found:${NC} tainted partial removed..." if ((j >= $MaxDownloadRetries)) ; then rm -f "$flockDownload"; - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" fi return 1 @@ -418,7 +418,7 @@ pd_GetFile() { echo -e "\n${RED}Download failed, file is incomplete.${NC}" if ((j >= $MaxDownloadRetries)) ; then rm -f "$flockDownload"; - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" fi return 1 diff --git a/hosts/quax.sh b/hosts/quax.sh index c2351ea..cfa183b 100644 --- a/hosts/quax.sh +++ b/hosts/quax.sh @@ -43,24 +43,24 @@ qx_DownloadFile() { tor_identity="${RANDOM}" finalAttempt="false" for ((z=0; z<=$MaxUrlRetries; z++)); do - if [ $z -eq $MaxUrlRetries ] ; then + if [[ $z -eq $MaxUrlRetries ]] ; then finalAttempt="true" fi CLEANSTRING=${remote_url//[^a-zA-Z0-9]/} trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; echo ""; tput cnorm; exit" 0 1 2 3 6 15 if qx_FetchFileInfo $finalAttempt && qx_GetFile "${filecnt}" $((z+1)) $finalAttempt ; then return 0 - elif [ $z -lt $MaxUrlRetries ]; then - if [ "${fileAlreadyDone}" == "true" ] ; then + elif [[ $z -lt $MaxUrlRetries ]]; then + if [[ "${fileAlreadyDone}" == "true" ]] ; then break fi if [[ "${warnAndRetryUnknownError}" == "true" ]] ; then - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${remote_url##*/}" "error" "Retry due to an unknown issue: attempt #$((z+1)) of ${MaxUrlRetries}" fi fi if [[ "${exitDownloadError}" == "true" || "${exitDownloadNotAvailable}" == "true" ]] ; then - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${remote_url##*/}" "error" "Exit due to unrecoverable issue" fi rm -f "${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}" @@ -83,10 +83,10 @@ qx_FetchFileInfo() { download_url=$(urlencode_literal_grouped_case_urlendingonly "$remote_url") tor_identity="${RANDOM}" file_header=$(tor_curl_request --insecure --head -L -s "$download_url") - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${remote_url##*/}" "qx_head$j" "download_url: ${download_url}"$'\n'"${file_header}" fi - if [ ! -z "$file_header" ] ; then + if [[ ! -z "$file_header" ]] ; then if grep -Eqi '404 Not Found|HTTP.* 404' <<< "${file_header}" ; then echo -e "${RED}| The file has been removed (404).${NC}" removedDownload "${remote_url}" @@ -96,12 +96,12 @@ qx_FetchFileInfo() { if ! grep -Eqi 'HTTP/.* 200' <<< $file_header ; then echo -e "${RED}| Failed to extract file info${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "" "" fi return 1 fi - if [ "$filename_override" == "" ] ; then + if [[ "$filename_override" == "" ]] ; then if grep -Eqi 'filename=' <<< "${file_header}" ; then filename=$(grep -oP 'filename=\K.*$' <<< "${file_header}") filename=${filename##filename} @@ -113,29 +113,29 @@ qx_FetchFileInfo() { fi if ! grep -Eqi 'Content-Length' <<< "${file_header}" ; then echo -e "${RED}| Failed to extract file size.${NC}" - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "" "" fi return 1 fi else echo -e "${RED}| No response. Try again later.${NC}" - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "" "" fi return 1 fi - touch ${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/} - if [ ! "$filename_override" == "" ] ; then + touch "${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}" + if [[ ! "$filename_override" == "" ]] ; then filename="$filename_override" - elif [ -z $filename ] ; then + elif [[ -z $filename ]] ; then filename=${download_url##*/} fi filename=$(sanitize_file_or_folder_name "${filename}") file_size_bytes=$(grep -oPi '(?<=content-length: ).*' <<< "$file_header") file_size_bytes=${file_size_bytes//[$'\t\r\n']} - if [ -z $file_size_bytes ] ; then - if [ "${finalAttempt}" == "true" ] ; then + if [[ -z $file_size_bytes ]] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "Filesize not found!" "" fi echo -e "${YELLOW}| Filesize not found… retry${NC}" @@ -167,18 +167,18 @@ qx_GetFile() { flockDownload="${WorkDir}/.flocks/${filename//[^a-zA-Z0-9\.\_\-]/}.flock" for ((j=1; j<=$MaxDownloadRetries; j++)); do pd_presize=0 - if [ -f "$file_path" ] ; then + if [[ -f "$file_path" ]] ; then pd_presize=$(stat --format="%s" "$file_path" | tr -d '[:space:]') fi CLEANSTRING=${remote_url//[^a-zA-Z0-9]/} trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; rm -f $flockDownload; echo ""; tput cnorm; exit" 0 1 2 3 6 15 - if [ "${RateMonitorEnabled}" == "true" ]; then + if [[ "${RateMonitorEnabled}" == "true" ]]; then tor_curl_request --insecure --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval "$download_url" --continue-at - --output "$file_path" else tor_curl_request --insecure "$download_url" --continue-at - --output "$file_path" fi received_file_size=0 - if [ -f "$file_path" ] ; then + if [[ -f "$file_path" ]] ; then received_file_size=$(stat --format="%s" "$file_path" | tr -d '[:space:]') fi if CheckNoHtml "$remote_url" "$filename" "$file_path" ; then @@ -187,9 +187,9 @@ qx_GetFile() { containsHtml=true fi downDelta=$(( received_file_size - pd_presize )) - if [[ "${received_file_size}" -ne "${file_size_bytes}" ]] || [ "$containsHtml" == "true" ]; then - if [ "${AutoRepairBadPartials}" == "true" ] && (( downDelta > 0 && downDelta < 1024 )) ; then - if [ -f "${file_path}" ] ; then + if [[ "${received_file_size}" -ne "${file_size_bytes}" ]] || [[ "$containsHtml" == "true" ]]; then + if [[ "${AutoRepairBadPartials}" == "true" ]] && (( downDelta > 0 && downDelta < 1024 )) ; then + if [[ -f "${file_path}" ]] ; then if ((pd_presize > 0)); then echo -e "${YELLOW}Bad node / HTML found:${NC} reverting to previous file..." truncateDownload "$remote_url" "$filename" "$pd_presize" "$received_file_size" @@ -201,15 +201,15 @@ qx_GetFile() { fi if ((j >= $MaxDownloadRetries)) ; then rm -f "$flockDownload"; - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" fi return 1 else continue fi - elif [ "${AutoRepairBadPartials}" == "true" ] && [ "$containsHtml" == "true" ] ; then - if [ -f "${file_path}" ] ; then + elif [[ "${AutoRepairBadPartials}" == "true" ]] && [[ "$containsHtml" == "true" ]] ; then + if [[ -f "${file_path}" ]] ; then if ((pd_presize > 0)); then echo -e "${YELLOW}Bad node / HTML found:${NC} reverting to previous file..." truncateDownload "$remote_url" "$filename" "$pd_presize" "$received_file_size" @@ -221,21 +221,21 @@ qx_GetFile() { fi if ((j >= $MaxDownloadRetries)) ; then rm -f "$flockDownload"; - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" fi return 1 else continue fi - elif (( downDelta > 0 && downDelta < 1024 )) || [ "$containsHtml" == "true" ] ; then - if [ -f "$file_path" ] ; then + elif (( downDelta > 0 && downDelta < 1024 )) || [[ "$containsHtml" == "true" ]] ; then + if [[ -f "$file_path" ]] ; then rm -rf "$file_path" fi echo -e "\n${YELLOW}Bad node / HTML found:${NC} tainted partial removed..." if ((j >= $MaxDownloadRetries)) ; then rm -f "$flockDownload"; - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" fi return 1 @@ -247,7 +247,7 @@ qx_GetFile() { echo -e "\n${RED}Download failed, file is incomplete.${NC}" if ((j >= $MaxDownloadRetries)) ; then rm -f "$flockDownload"; - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" fi return 1 diff --git a/hosts/ranoz.sh b/hosts/ranoz.sh index f7210cf..7aa4d91 100644 --- a/hosts/ranoz.sh +++ b/hosts/ranoz.sh @@ -43,24 +43,24 @@ rz_DownloadFile() { tor_identity="${RANDOM}" finalAttempt="false" for ((z=0; z<=$MaxUrlRetries; z++)); do - if [ $z -eq $MaxUrlRetries ] ; then + if [[ $z -eq $MaxUrlRetries ]] ; then finalAttempt="true" fi CLEANSTRING=${remote_url//[^a-zA-Z0-9]/} trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; echo ""; tput cnorm; exit" 0 1 2 3 6 15 if rz_FetchFileInfo $finalAttempt && rz_GetFile "${filecnt}" $((z+1)) $finalAttempt ; then return 0 - elif [ $z -lt $MaxUrlRetries ]; then - if [ "${fileAlreadyDone}" == "true" ] ; then + elif [[ $z -lt $MaxUrlRetries ]]; then + if [[ "${fileAlreadyDone}" == "true" ]] ; then break fi if [[ "${warnAndRetryUnknownError}" == "true" ]] ; then - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${remote_url##*/}" "error" "Retry due to an unknown issue: attempt #$((z+1)) of ${MaxUrlRetries}" fi fi if [[ "${exitDownloadError}" == "true" || "${exitDownloadNotAvailable}" == "true" ]] ; then - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${remote_url##*/}" "error" "Exit due to unrecoverable issue" fi rm -f "${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}" @@ -88,15 +88,15 @@ rz_FetchFileInfo() { CLEANSTRING=${remote_url//[^a-zA-Z0-9]/} trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; echo ""; tput cnorm; exit" 0 1 2 3 6 15 response=$(tor_curl_request --insecure -L -s "$remote_url") - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${remote_url##*/}" "rz_fetch$i" "${response}" fi if [[ -z $response ]] ; then - if [ $i == $maxfetchretries ] ; then + if [[ $i == $maxfetchretries ]] ; then printf "\\n" echo -e "${RED}| Failed to extract download url [1]${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "Failed to extract download url [1]" "" fi return 1 @@ -118,7 +118,7 @@ rz_FetchFileInfo() { removedDownload "${remote_url}" "The file appears to be gone [NEXT_NOT_FOUND]" return 1 fi - if [ "$filename_override" == "" ] ; then + if [[ "$filename_override" == "" ]] ; then filename=$(grep -oP '(?<=\[\\"\$\\",\\"title\\",\\"2\\",\{\\"children\\":\\").*?(?=\\"\}\],.*$)' <<< "$response") fi if grep -Eqi '\\"props\\":\{\}\},\\"href\\":\\"' <<< "$response"; then @@ -137,11 +137,11 @@ rz_FetchFileInfo() { fi break else - if [ $i == $maxfetchretries ] ; then + if [[ $i == $maxfetchretries ]] ; then printf "\\n" echo -e "${RED}| Failed to extract download url [2]${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "Failed to extract download url [2]" "" fi return 1 @@ -158,16 +158,16 @@ rz_FetchFileInfo() { CLEANSTRING=${remote_url//[^a-zA-Z0-9]/} trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; rm -f ${rz_cookie_jar}; echo ""; tput cnorm; exit" 0 1 2 3 6 15 file_header=$(tor_curl_request --insecure --head -L -i -s "$download_url") - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${remote_url##*/}" "rz_head$j" "download_url: ${download_url}"$'\n'"${file_header}" fi if [[ -z $file_header ]] ; then - if [ $j == $maxfetchretries ] ; then + if [[ $j == $maxfetchretries ]] ; then rm -f "${rz_cookie_jar}"; printf "\\n" echo -e "${RED}| Failed to extract file info${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "Failed to extract file info" "" fi return 1 @@ -184,12 +184,12 @@ rz_FetchFileInfo() { return 1 fi if ! grep -Eqi 'HTTP.* 200' <<< $file_header ; then - if [ $j == $maxfetchretries ] ; then + if [[ $j == $maxfetchretries ]] ; then rm -f "${rz_cookie_jar}"; printf "\\n" echo -e "${RED}| Bad server response${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "Bad server response" "" fi return 1 @@ -197,17 +197,17 @@ rz_FetchFileInfo() { continue fi fi - if [ -z "$filename" ]; then + if [[ -z "$filename" ]]; then filename=$(grep -oP '(?<=filename\*\=).*?(?=$)' <<< "$file_header") filename="${filename//[$'\t\r\n\0']}" filename="${filename//UTF-8\'\'}" fi - if [ "$filename_override" == "" ] && [ -z "$filename" ] ; then - if [ $j == $maxfetchretries ] ; then + if [[ "$filename_override" == "" ]] && [[ -z "$filename" ]] ; then + if [[ $j == $maxfetchretries ]] ; then printf "\\n" echo -e "${RED}| Failed to extract file name${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "Failed to extract file name" "" fi return 1 @@ -217,8 +217,8 @@ rz_FetchFileInfo() { fi break #Good to go here done - touch ${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/} - if [ ! "$filename_override" == "" ] ; then + touch "${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}" + if [[ ! "$filename_override" == "" ]] ; then filename="$filename_override" fi filename=$(sanitize_file_or_folder_name "${filename}") @@ -226,8 +226,8 @@ rz_FetchFileInfo() { echo -e "${YELLOW}| File name:${NC}\t\"${filename}\"" file_size_bytes=$(grep -oPi '(?<=content-length: ).*' <<< "$file_header") file_size_bytes=${file_size_bytes//[$'\t\r\n']} - if [ -z $file_size_bytes ] ; then - if [ "${finalAttempt}" == "true" ] ; then + if [[ -z $file_size_bytes ]] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "Filesize not found!" "" fi echo -e "${YELLOW}| Filesize not found… retry${NC}" @@ -256,19 +256,19 @@ rz_GetFile() { retryCnt=$2 finalAttempt=$3 flockDownload="${WorkDir}/.flocks/${filename//[^a-zA-Z0-9\.\_\-]/}.flock" - if [ -f "$file_path" ]; then + if [[ -f "$file_path" ]]; then rm -f "$file_path" fi for ((j=1; j<=$MaxDownloadRetries; j++)); do pd_presize=0 - if [ -f "$file_path" ] ; then + if [[ -f "$file_path" ]] ; then pd_presize=$(stat --format="%s" "$file_path" | tr -d '[:space:]') fi GetRandomUA CLEANSTRING=${remote_url//[^a-zA-Z0-9]/} trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; rm -f ${rz_cookie_jar}; rm -f $flockDownload; echo ""; tput cnorm; exit" 0 1 2 3 6 15 - if [ "${UseTorCurlImpersonate}" == "true" ]; then - if [ "${RateMonitorEnabled}" == "true" ]; then + if [[ "${UseTorCurlImpersonate}" == "true" ]]; then + if [[ "${RateMonitorEnabled}" == "true" ]]; then tor_curl_request --insecure -L -G --no-alpn \ --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval \ "$download_url" --continue-at - --output "$file_path" @@ -277,7 +277,7 @@ rz_GetFile() { "$download_url" --continue-at - --output "$file_path" fi else - if [ "${RateMonitorEnabled}" == "true" ]; then + if [[ "${RateMonitorEnabled}" == "true" ]]; then tor_curl_request --insecure -L -G --no-alpn \ --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval \ -H "User-Agent: $RandomUA" \ @@ -309,7 +309,7 @@ rz_GetFile() { fi fi received_file_size=0 - if [ -f "$file_path" ] ; then + if [[ -f "$file_path" ]] ; then received_file_size=$(stat --format="%s" "$file_path" | tr -d '[:space:]') fi if CheckNoHtml "$remote_url" "$filename" "$file_path" ; then @@ -318,9 +318,9 @@ rz_GetFile() { containsHtml=true fi downDelta=$(( received_file_size - pd_presize )) - if [[ "${received_file_size}" -ne "${file_size_bytes}" ]] || [ "$containsHtml" == "true" ]; then - if [ "${AutoRepairBadPartials}" == "true" ] && (( downDelta > 0 && downDelta < 1024 )) ; then - if [ -f "${file_path}" ] ; then + if [[ "${received_file_size}" -ne "${file_size_bytes}" ]] || [[ "$containsHtml" == "true" ]]; then + if [[ "${AutoRepairBadPartials}" == "true" ]] && (( downDelta > 0 && downDelta < 1024 )) ; then + if [[ -f "${file_path}" ]] ; then if ((pd_presize > 0)); then echo -e "${YELLOW}Bad node / HTML found:${NC} reverting to previous file..." truncateDownload "$remote_url" "$filename" "$pd_presize" "$received_file_size" @@ -332,15 +332,15 @@ rz_GetFile() { fi if ((j >= $MaxDownloadRetries)) ; then rm -f "$flockDownload"; - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" fi return 1 else continue fi - elif [ "${AutoRepairBadPartials}" == "true" ] && [ "$containsHtml" == "true" ] ; then - if [ -f "${file_path}" ] ; then + elif [[ "${AutoRepairBadPartials}" == "true" ]] && [[ "$containsHtml" == "true" ]] ; then + if [[ -f "${file_path}" ]] ; then if ((pd_presize > 0)); then echo -e "${YELLOW}Bad node / HTML found:${NC} reverting to previous file..." truncateDownload "$remote_url" "$filename" "$pd_presize" "$received_file_size" @@ -352,21 +352,21 @@ rz_GetFile() { fi if ((j >= $MaxDownloadRetries)) ; then rm -f "$flockDownload"; - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" fi return 1 else continue fi - elif (( downDelta > 0 && downDelta < 1024 )) || [ "$containsHtml" == "true" ] ; then - if [ -f "$file_path" ] ; then + elif (( downDelta > 0 && downDelta < 1024 )) || [[ "$containsHtml" == "true" ]] ; then + if [[ -f "$file_path" ]] ; then rm -rf "$file_path" fi echo -e "\n${YELLOW}Bad node / HTML found:${NC} tainted partial removed..." if ((j >= $MaxDownloadRetries)) ; then rm -f "$flockDownload"; - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" fi return 1 @@ -378,7 +378,7 @@ rz_GetFile() { echo -e "\n${RED}Download failed, file is incomplete.${NC}" if ((j >= $MaxDownloadRetries)) ; then rm -f "$flockDownload"; - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" fi return 1 diff --git a/hosts/sendnow.sh b/hosts/sendnow.sh index e332003..414f0fe 100644 --- a/hosts/sendnow.sh +++ b/hosts/sendnow.sh @@ -43,24 +43,24 @@ snow_DownloadFile() { tor_identity="${RANDOM}" finalAttempt="false" for ((z=0; z<=$MaxUrlRetries; z++)); do - if [ $z -eq $MaxUrlRetries ] ; then + if [[ $z -eq $MaxUrlRetries ]] ; then finalAttempt="true" fi CLEANSTRING=${remote_url//[^a-zA-Z0-9]/} trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; echo ""; tput cnorm; exit" 0 1 2 3 6 15 if snow_FetchFileInfo $finalAttempt && snow_GetFile "${filecnt}" $((z+1)) $finalAttempt ; then return 0 - elif [ $z -lt $MaxUrlRetries ]; then - if [ "${fileAlreadyDone}" == "true" ] ; then + elif [[ $z -lt $MaxUrlRetries ]]; then + if [[ "${fileAlreadyDone}" == "true" ]] ; then break fi if [[ "${warnAndRetryUnknownError}" == "true" ]] ; then - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${remote_url##*/}" "error" "Retry due to an unknown issue: attempt #$((z+1)) of ${MaxUrlRetries}" fi fi if [[ "${exitDownloadError}" == "true" || "${exitDownloadNotAvailable}" == "true" ]] ; then - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${remote_url##*/}" "error" "Exit due to unrecoverable issue" fi rm -f "${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}" @@ -88,16 +88,16 @@ snow_FetchFileInfo() { CLEANSTRING=${remote_url//[^a-zA-Z0-9]/} trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; rm -f "${snow_cookie_jar}"; echo ""; tput cnorm; exit" 0 1 2 3 6 15 response=$(tor_curl_request --insecure -L -s -b "${snow_cookie_jar}" -c "${snow_cookie_jar}" "$remote_url") - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${remote_url##*/}" "snow_dwnpage$i" "${response}" fi if [[ -z $response ]] ; then rm -f "${snow_cookie_jar}"; - if [ $i == $maxfetchretries ] ; then + if [[ $i == $maxfetchretries ]] ; then printf "\\n" echo -e "${RED}| Failed to extract download link.${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "" "" fi return 1 @@ -107,11 +107,11 @@ snow_FetchFileInfo() { fi if grep -Eqi "Your IP has been banned|you are banned" <<< "$response"; then rm -f "${snow_cookie_jar}"; - if [ $i == $maxfetchretries ] ; then + if [[ $i == $maxfetchretries ]] ; then printf "\\n" echo -e "${RED}| Failed to extract download link.${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "" "" fi return 1 @@ -138,11 +138,11 @@ snow_FetchFileInfo() { file_size_readable=$(grep -oPi '(?<=