diff --git a/.audit/mad-audit-curl.log b/.audit/mad-audit-curl.log index 173fb81..71d76b6 100755 --- a/.audit/mad-audit-curl.log +++ b/.audit/mad-audit-curl.log @@ -1,4 +1,4 @@ -DateTime: 25.02.13 +DateTime: 25.02.19 Files: ./hosts/1fichier.sh @@ -128,6 +128,7 @@ Files: ./hosts/up_quax.sh ./hosts/up_ramsgaard.sh ./hosts/up_ranoz.sh +./hosts/up_sendnow.sh ./hosts/up_shareonline.sh ./hosts/up_skrepr.sh ./hosts/up_soyjak.sh @@ -167,7 +168,7 @@ _________________________________________________________________________ ./hosts/9saves.sh:90: response=$(tor_curl_request --insecure -L -s -b "${ns_cookie_jar}" -c "${ns_cookie_jar}" "$remote_url") ./hosts/9saves.sh:139: response=$(tor_curl_request --insecure -L -s -X POST \ ./hosts/9saves.sh:188: file_header=$(tor_curl_request --insecure --head -L -s -b "${ns_cookie_jar}" -c "${ns_cookie_jar}" "$download_url") -./hosts/9saves.sh:290: if [ "${UseTorCurlImpersonate}" == "true" ]; then +./hosts/9saves.sh:290: if [[ "${UseTorCurlImpersonate}" == "true" ]]; then ./hosts/9saves.sh:292: tor_curl_request --insecure \ ./hosts/9saves.sh:297: tor_curl_request --insecure \ ./hosts/9saves.sh:304: tor_curl_request --insecure \ @@ -176,8 +177,8 @@ _________________________________________________________________________ ./hosts/anonfile.sh:186: response=$(tor_curl_request --insecure -L -s -X POST \ ./hosts/anonfile.sh:240: tor_curl_request --insecure -s "$captcha_img_url" --output "$tmp_captcha_img" ./hosts/anonfile.sh:340: response=$(tor_curl_request --insecure -L -s -X POST \ -./hosts/anonfile.sh:451: file_header=$(tor_curl_request -i -s --head \ -./hosts/anonfile.sh:557: if [ "${UseTorCurlImpersonate}" == "true" ]; then +./hosts/anonfile.sh:453: file_header=$(tor_curl_request -i -s --head \ +./hosts/anonfile.sh:557: if [[ "${UseTorCurlImpersonate}" == "true" ]]; then ./hosts/anonfile.sh:559: tor_curl_request --insecure \ ./hosts/anonfile.sh:565: tor_curl_request --insecure \ ./hosts/anonfile.sh:572: tor_curl_request --insecure \ @@ -188,14 +189,14 @@ _________________________________________________________________________ ./hosts/anonsharing.sh:273: tor_curl_request --insecure --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval "$download_url" --output "$file_path" ./hosts/anonsharing.sh:275: tor_curl_request --insecure "$download_url" --output "$file_path" ./hosts/ateasystems.sh:88: response=$(tor_curl_request --insecure -L -s "$remote_url") -./hosts/ateasystems.sh:218: if [ "${UseTorCurlImpersonate}" == "true" ]; then +./hosts/ateasystems.sh:218: if [[ "${UseTorCurlImpersonate}" == "true" ]]; then ./hosts/ateasystems.sh:220: tor_curl_request --insecure \ ./hosts/ateasystems.sh:225: tor_curl_request --insecure \ ./hosts/ateasystems.sh:231: tor_curl_request --insecure \ ./hosts/ateasystems.sh:237: tor_curl_request --insecure \ ./hosts/bedrive.sh:90: response=$(tor_curl_request --insecure -L -s \ ./hosts/bedrive.sh:149: file_header=$(tor_curl_request --insecure --head -L -i -s \ -./hosts/bedrive.sh:270: if [ "${UseTorCurlImpersonate}" == "true" ]; then +./hosts/bedrive.sh:270: if [[ "${UseTorCurlImpersonate}" == "true" ]]; then ./hosts/bedrive.sh:272: tor_curl_request --insecure -L -G --no-alpn \ ./hosts/bedrive.sh:277: tor_curl_request --insecure -L -G --no-alpn \ ./hosts/bedrive.sh:284: tor_curl_request --insecure -L -G --no-alpn \ @@ -208,7 +209,7 @@ _________________________________________________________________________ ./hosts/bowfile.sh:91: response=$(tor_curl_request --insecure -L -s -b "${bow_cookie_jar}" -c "${bow_cookie_jar}" \ ./hosts/bowfile.sh:143: response=$(tor_curl_request --insecure -s --head \ ./hosts/bowfile.sh:182: file_header=$(tor_curl_request --insecure -L -sS -i --head \ -./hosts/bowfile.sh:297: if [ "${UseTorCurlImpersonate}" == "true" ]; then +./hosts/bowfile.sh:297: if [[ "${UseTorCurlImpersonate}" == "true" ]]; then ./hosts/bowfile.sh:299: tor_curl_request --insecure -L \ ./hosts/bowfile.sh:305: tor_curl_request --insecure -L \ ./hosts/bowfile.sh:312: tor_curl_request --insecure -L \ @@ -217,7 +218,7 @@ _________________________________________________________________________ ./hosts/click.sh:226: response=$(tor_curl_request --insecure -L -s -X POST \ ./hosts/click.sh:345: response=$(tor_curl_request --insecure -L -s -X POST \ ./hosts/click.sh:434: file_header=$(tor_curl_request --insecure -L --head -s --referer "${fixed_url}" "$download_url") -./hosts/click.sh:533: if [ "${UseTorCurlImpersonate}" == "true" ]; then +./hosts/click.sh:533: if [[ "${UseTorCurlImpersonate}" == "true" ]]; then ./hosts/click.sh:535: tor_curl_request --insecure \ ./hosts/click.sh:542: tor_curl_request --insecure \ ./hosts/click.sh:550: tor_curl_request --insecure \ @@ -226,7 +227,7 @@ _________________________________________________________________________ ./hosts/dailyuploads.sh:139: tor_curl_request --insecure -s "$captcha_img_url" --output "$tmp_captcha_img" ./hosts/dailyuploads.sh:286: response=$(tor_curl_request --insecure -L -s -X POST \ ./hosts/dailyuploads.sh:392: file_header=$(tor_curl_request -i -s --head \ -./hosts/dailyuploads.sh:496: if [ "${UseTorCurlImpersonate}" == "true" ]; then +./hosts/dailyuploads.sh:496: if [[ "${UseTorCurlImpersonate}" == "true" ]]; then ./hosts/dailyuploads.sh:498: tor_curl_request --insecure \ ./hosts/dailyuploads.sh:504: tor_curl_request --insecure \ ./hosts/dailyuploads.sh:511: tor_curl_request --insecure \ @@ -235,7 +236,7 @@ _________________________________________________________________________ ./hosts/dashfile.sh:177: response=$(tor_curl_request --insecure -L -s -X POST \ ./hosts/dashfile.sh:308: response=$(tor_curl_request --insecure -L -s -X POST \ ./hosts/dashfile.sh:397: file_header=$(tor_curl_request --insecure -L --head -s --referer "${fixed_url}" "$download_url") -./hosts/dashfile.sh:495: if [ "${UseTorCurlImpersonate}" == "true" ]; then +./hosts/dashfile.sh:495: if [[ "${UseTorCurlImpersonate}" == "true" ]]; then ./hosts/dashfile.sh:497: tor_curl_request --insecure \ ./hosts/dashfile.sh:502: tor_curl_request --insecure \ ./hosts/dashfile.sh:508: tor_curl_request --insecure \ @@ -243,7 +244,7 @@ _________________________________________________________________________ ./hosts/dataupload.sh:90: response=$(tor_curl_request --insecure -L -s -b "${dup_cookie_jar}" -c "${dup_cookie_jar}" "$remote_url") ./hosts/dataupload.sh:166: response=$(tor_curl_request --insecure -svo. -X POST \ ./hosts/dataupload.sh:234: file_header=$(tor_curl_request --insecure -L --head -s "$download_url") -./hosts/dataupload.sh:349: if [ "${UseTorCurlImpersonate}" == "true" ]; then +./hosts/dataupload.sh:349: if [[ "${UseTorCurlImpersonate}" == "true" ]]; then ./hosts/dataupload.sh:351: tor_curl_request --insecure \ ./hosts/dataupload.sh:357: tor_curl_request --insecure \ ./hosts/dataupload.sh:364: tor_curl_request --insecure \ @@ -251,22 +252,22 @@ _________________________________________________________________________ ./hosts/desiupload.sh:90: response=$(tor_curl_request --insecure -L -s -b "${desi_cookie_jar}" -c "${desi_cookie_jar}" "$remote_url") ./hosts/desiupload.sh:202: response=$(tor_curl_request --insecure -L -s -X POST \ ./hosts/desiupload.sh:306: file_header=$(tor_curl_request --insecure -L --head -s --referer "${fixed_url}" "$download_url") -./hosts/desiupload.sh:404: if [ "${UseTorCurlImpersonate}" == "true" ]; then +./hosts/desiupload.sh:404: if [[ "${UseTorCurlImpersonate}" == "true" ]]; then ./hosts/desiupload.sh:406: tor_curl_request --insecure \ ./hosts/desiupload.sh:411: tor_curl_request --insecure \ ./hosts/desiupload.sh:417: tor_curl_request --insecure \ ./hosts/desiupload.sh:433: tor_curl_request --insecure \ -./hosts/dosya.sh:108: if [ "${UseTorCurlImpersonate}" == "true" ]; then +./hosts/dosya.sh:108: if [[ "${UseTorCurlImpersonate}" == "true" ]]; then ./hosts/dosya.sh:109: PAGE=$(tor_curl_request --insecure --max-time "$ConnectTimeout" -L -s \ ./hosts/dosya.sh:113: PAGE=$(tor_curl_request --insecure --max-time "$ConnectTimeout" -L -s \ -./hosts/dosya.sh:172: if [ "${UseTorCurlImpersonate}" == "true" ]; then +./hosts/dosya.sh:172: if [[ "${UseTorCurlImpersonate}" == "true" ]]; then ./hosts/dosya.sh:173: file_header=$(tor_curl_request_extended --insecure --head -L -s \ ./hosts/dosya.sh:179: file_header=$(tor_curl_request_extended --insecure --head -L -s \ ./hosts/dosya.sh:402: tor_curl_request -L -G --insecure \ ./hosts/dosya.sh:417: tor_curl_request -L -G --insecure \ ./hosts/downloadgg.sh:90: response=$(tor_curl_request --insecure -L -s -b "${dgg_cookie_jar}" -c "${dgg_cookie_jar}" "$remote_url") ./hosts/downloadgg.sh:169: response=$(tor_curl_request --insecure -svo. -X POST \ -./hosts/downloadgg.sh:255: if [ "${UseTorCurlImpersonate}" == "true" ]; then +./hosts/downloadgg.sh:255: if [[ "${UseTorCurlImpersonate}" == "true" ]]; then ./hosts/downloadgg.sh:257: tor_curl_request --insecure -X POST \ ./hosts/downloadgg.sh:265: tor_curl_request --insecure -X POST \ ./hosts/downloadgg.sh:275: tor_curl_request --insecure -X POST \ @@ -279,7 +280,7 @@ _________________________________________________________________________ ./hosts/fileblade.sh:165: response=$(tor_curl_request --insecure -L -s -X POST \ ./hosts/fileblade.sh:281: response=$(tor_curl_request --insecure -L -s -X POST \ ./hosts/fileblade.sh:335: file_header=$(tor_curl_request --insecure -L --head -s "$download_url") -./hosts/fileblade.sh:450: if [ "${UseTorCurlImpersonate}" == "true" ]; then +./hosts/fileblade.sh:450: if [[ "${UseTorCurlImpersonate}" == "true" ]]; then ./hosts/fileblade.sh:452: tor_curl_request --insecure -L \ ./hosts/fileblade.sh:456: tor_curl_request --insecure -L \ ./hosts/fileblade.sh:461: tor_curl_request --insecure \ @@ -294,9 +295,9 @@ _________________________________________________________________________ ./hosts/filedot.sh:406: file_header=$(tor_curl_request --insecure -L --head -s --referer "${fixed_url}" "$download_url") ./hosts/filedot.sh:499: tor_curl_request --insecure -L -G --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval "$download_url" --continue-at - --output "$file_path" ./hosts/filedot.sh:501: tor_curl_request --insecure -L -G "$download_url" --continue-at - --output "$file_path" -./hosts/filehaus.sh:100: file_header=$(tor_curl_request_extended --insecure -L --head -s --referer "${remote_url//\.org/\.cc}" "$download_url") -./hosts/filehaus.sh:193: tor_curl_request_extended --insecure --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval --referer "$download_url" "$download_url" --continue-at - --output "$file_path" -./hosts/filehaus.sh:195: tor_curl_request_extended --insecure --referer "$download_url" "$download_url" --continue-at - --output "$file_path" +./hosts/filehaus.sh:101: file_header=$(tor_curl_request_extended --insecure -L --head -s --referer "${remote_url//\.org/\.cc}" "$download_url") +./hosts/filehaus.sh:191: tor_curl_request_extended --insecure --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval --referer "$download_url" "$download_url" --continue-at - --output "$file_path" +./hosts/filehaus.sh:193: tor_curl_request_extended --insecure --referer "$download_url" "$download_url" --continue-at - --output "$file_path" ./hosts/firestorage.sh:98: response=$(tor_curl_request --insecure -L -s "${fixed_url}") ./hosts/firestorage.sh:226: file_header=$(tor_curl_request --insecure -L --head -s --referer "${fixed_url}" "$download_url") ./hosts/firestorage.sh:335: tor_curl_request --insecure -L -G --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval "$download_url" --continue-at - --output "$file_path" @@ -304,8 +305,8 @@ _________________________________________________________________________ ./hosts/gofile.sh:97: response=$(tor_curl_request --insecure -s -X POST \ ./hosts/gofile.sh:170: response=$(tor_curl_request --insecure -G -L -s \ ./hosts/gofile.sh:258: file_header=$(tor_curl_request --insecure -L --head -s \ -./hosts/gofile.sh:377: tor_curl_request --insecure -G --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval \ -./hosts/gofile.sh:391: tor_curl_request --insecure -G \ +./hosts/gofile.sh:393: tor_curl_request --insecure -G --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval \ +./hosts/gofile.sh:407: tor_curl_request --insecure -G \ ./hosts/hexload.sh:108: response=$(tor_curl_request --insecure -s --data "$form_data" "https://hexload.com/download") ./hosts/hexload.sh:116: response=$(tor_curl_request --insecure -s --data "$form_data" "https://hexload.com/download") ./hosts/hexload.sh:122: response=$(tor_curl_request --insecure -s --data "$form_data" "https://hexload.com/download") @@ -340,12 +341,12 @@ _________________________________________________________________________ ./hosts/nippy.sh:188: file_header=$(tor_curl_request --insecure -L --head -s \ ./hosts/nippy.sh:299: tor_curl_request --insecure -L --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval \ ./hosts/nippy.sh:302: tor_curl_request --insecure -L "$download_url" --continue-at - --output "$file_path" -./hosts/oshi.sh:101: file_header=$(tor_curl_request --insecure --head -L -s --referer "$remote_url" "$download_url") -./hosts/oshi.sh:195: tor_curl_request --insecure --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval --referer "$file_url" "$download_url" --continue-at - --output "$file_path" -./hosts/oshi.sh:197: tor_curl_request --insecure --referer "$file_url" "$download_url" --continue-at - --output "$file_path" +./hosts/oshi.sh:108: file_header=$(tor_curl_request --insecure --head -L -s --referer "$remote_url" "$download_url") +./hosts/oshi.sh:202: tor_curl_request --insecure --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval --referer "$file_url" "$download_url" --continue-at - --output "$file_path" +./hosts/oshi.sh:204: tor_curl_request --insecure --referer "$file_url" "$download_url" --continue-at - --output "$file_path" ./hosts/pixeldrain.sh:94: response=$(tor_curl_request --insecure -L -s "https://pixeldrain.com/u/$fileid") ./hosts/pixeldrain.sh:256: file_header=$(tor_curl_request --insecure --head -L -s --referer "$file_url" "$pdheadurl") -./hosts/pixeldrain.sh:322: if [ "${UseTorCurlImpersonate}" == "true" ]; then +./hosts/pixeldrain.sh:322: if [[ "${UseTorCurlImpersonate}" == "true" ]]; then ./hosts/pixeldrain.sh:324: tor_curl_request --insecure \ ./hosts/pixeldrain.sh:328: tor_curl_request --insecure \ ./hosts/pixeldrain.sh:333: tor_curl_request --insecure \ @@ -355,40 +356,40 @@ _________________________________________________________________________ ./hosts/quax.sh:178: tor_curl_request --insecure "$download_url" --continue-at - --output "$file_path" ./hosts/ranoz.sh:90: response=$(tor_curl_request --insecure -L -s "$remote_url") ./hosts/ranoz.sh:160: file_header=$(tor_curl_request --insecure --head -L -i -s "$download_url") -./hosts/ranoz.sh:270: if [ "${UseTorCurlImpersonate}" == "true" ]; then +./hosts/ranoz.sh:270: if [[ "${UseTorCurlImpersonate}" == "true" ]]; then ./hosts/ranoz.sh:272: tor_curl_request --insecure -L -G --no-alpn \ ./hosts/ranoz.sh:276: tor_curl_request --insecure -L -G --no-alpn \ ./hosts/ranoz.sh:281: tor_curl_request --insecure -L -G --no-alpn \ ./hosts/ranoz.sh:296: tor_curl_request --insecure -L -G --no-alpn \ ./hosts/sendnow.sh:90: response=$(tor_curl_request --insecure -L -s -b "${snow_cookie_jar}" -c "${snow_cookie_jar}" "$remote_url") ./hosts/sendnow.sh:160: response=$(tor_curl_request --insecure -L -svo. -X POST \ -./hosts/sendnow.sh:203: file_header=$(tor_curl_request_extended --insecure --head -Lis \ -./hosts/sendnow.sh:324: if [ "${UseTorCurlImpersonate}" == "true" ]; then -./hosts/sendnow.sh:326: tor_curl_request_extended --insecure -L --no-alpn \ -./hosts/sendnow.sh:344: tor_curl_request --insecure -L --no-alpn \ -./hosts/sendnow.sh:363: tor_curl_request --insecure -L --no-alpn \ -./hosts/sendnow.sh:382: tor_curl_request --insecure -L --no-alpn \ +./hosts/sendnow.sh:204: file_header=$(tor_curl_request_extended --insecure --head -Lis \ +./hosts/sendnow.sh:325: if [[ "${UseTorCurlImpersonate}" == "true" ]]; then +./hosts/sendnow.sh:327: tor_curl_request_extended --insecure -L --no-alpn \ +./hosts/sendnow.sh:345: tor_curl_request --insecure -L --no-alpn \ +./hosts/sendnow.sh:364: tor_curl_request --insecure -L --no-alpn \ +./hosts/sendnow.sh:383: tor_curl_request --insecure -L --no-alpn \ ./hosts/syspro.sh:88: response=$(tor_curl_request --insecure -L -s "$remote_url") -./hosts/syspro.sh:186: if [ "${UseTorCurlImpersonate}" == "true" ]; then +./hosts/syspro.sh:186: if [[ "${UseTorCurlImpersonate}" == "true" ]]; then ./hosts/syspro.sh:188: tor_curl_request --insecure -L \ ./hosts/syspro.sh:193: tor_curl_request --insecure \ ./hosts/syspro.sh:199: tor_curl_request --insecure -L \ ./hosts/syspro.sh:205: tor_curl_request --insecure -L \ ./hosts/tempfileme.sh:89: response=$(tor_curl_request --insecure -L -s "$remote_url") ./hosts/tempfileme.sh:170: file_header=$(tor_curl_request --insecure -L --head -s --referer "${remote_url}" "$download_url") -./hosts/tempfileme.sh:298: if [ "${UseTorCurlImpersonate}" == "true" ]; then +./hosts/tempfileme.sh:298: if [[ "${UseTorCurlImpersonate}" == "true" ]]; then ./hosts/tempfileme.sh:300: tor_curl_request --insecure -L \ ./hosts/tempfileme.sh:305: tor_curl_request --insecure -L \ ./hosts/tempfileme.sh:311: tor_curl_request --insecure -L \ ./hosts/tempfileme.sh:326: tor_curl_request --insecure -L \ ./hosts/tempsh.sh:88: file_header=$(tor_curl_request --insecure -s -D - -o /dev/null -X POST \ -./hosts/tempsh.sh:225: if [ "${UseTorCurlImpersonate}" == "true" ]; then +./hosts/tempsh.sh:225: if [[ "${UseTorCurlImpersonate}" == "true" ]]; then ./hosts/tempsh.sh:227: tor_curl_request --insecure -X POST \ ./hosts/tempsh.sh:231: tor_curl_request --insecure -X POST \ ./hosts/tempsh.sh:236: tor_curl_request --insecure -X POST \ ./hosts/tempsh.sh:250: tor_curl_request --insecure -X POST \ ./hosts/torup.sh:92: response=$(tor_curl_request --insecure -L -s \ -./hosts/torup.sh:188: if [ "${UseTorCurlImpersonate}" == "true" ]; then +./hosts/torup.sh:188: if [[ "${UseTorCurlImpersonate}" == "true" ]]; then ./hosts/torup.sh:190: tor_curl_request --insecure -L -G --no-alpn \ ./hosts/torup.sh:196: tor_curl_request --insecure -L -G --no-alpn \ ./hosts/torup.sh:203: tor_curl_request --insecure -L -G --no-alpn \ @@ -396,14 +397,14 @@ _________________________________________________________________________ ./hosts/up2share.sh:91: response=$(tor_curl_request --insecure -L -s -b "${up2share_cookie_jar}" -c "${up2share_cookie_jar}" \ ./hosts/up2share.sh:144: response=$(tor_curl_request --insecure -L -s -b "${up2share_cookie_jar}" -c "${up2share_cookie_jar}" \ ./hosts/up2share.sh:195: file_header=$(tor_curl_request --insecure -L -s --head \ -./hosts/up2share.sh:312: if [ "${UseTorCurlImpersonate}" == "true" ]; then +./hosts/up2share.sh:312: if [[ "${UseTorCurlImpersonate}" == "true" ]]; then ./hosts/up2share.sh:314: tor_curl_request --insecure -L \ ./hosts/up2share.sh:321: tor_curl_request --insecure -L \ ./hosts/up2share.sh:329: tor_curl_request --insecure -L \ ./hosts/up2share.sh:347: tor_curl_request --insecure -L \ ./hosts/uploadee.sh:90: response=$(tor_curl_request --insecure -L -s "$remote_url") ./hosts/uploadee.sh:143: file_header=$(tor_curl_request --insecure --head -L -s -b "${upee_cookie_jar}" -c "${upee_cookie_jar}" --referer "$remote_url" "$download_url") -./hosts/uploadee.sh:249: if [ "${UseTorCurlImpersonate}" == "true" ]; then +./hosts/uploadee.sh:249: if [[ "${UseTorCurlImpersonate}" == "true" ]]; then ./hosts/uploadee.sh:251: tor_curl_request --insecure -L -G --no-alpn \ ./hosts/uploadee.sh:257: tor_curl_request --insecure -L -G --no-alpn \ ./hosts/uploadee.sh:265: tor_curl_request --insecure -L -G --no-alpn \ @@ -411,7 +412,7 @@ _________________________________________________________________________ ./hosts/uploadev.sh:91: response=$(tor_curl_request --insecure -L -s -b "${upev_cookie_jar}" -c "${upev_cookie_jar}" \ ./hosts/uploadev.sh:181: response=$(tor_curl_request --insecure -L -s -X POST \ ./hosts/uploadev.sh:268: file_header=$(tor_curl_request --insecure -L --head -s "$download_url") -./hosts/uploadev.sh:367: if [ "${UseTorCurlImpersonate}" == "true" ]; then +./hosts/uploadev.sh:367: if [[ "${UseTorCurlImpersonate}" == "true" ]]; then ./hosts/uploadev.sh:369: tor_curl_request --insecure -L \ ./hosts/uploadev.sh:374: tor_curl_request --insecure -L \ ./hosts/uploadev.sh:380: tor_curl_request --insecure -L \ @@ -422,10 +423,9 @@ _________________________________________________________________________ ./hosts/uploadflix.sh:286: tor_curl_request --insecure -L -G --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval "$download_url" --continue-at - --output "$file_path" ./hosts/uploadflix.sh:288: tor_curl_request --insecure -L -G "$download_url" --continue-at - --output "$file_path" ./hosts/uploadhive.sh:88: response=$(tor_curl_request --insecure -L -s "$remote_url") -./hosts/uploadhive.sh:134: response=$(tor_curl_request --insecure -L -s -X POST --data "$form_data" "$remote_url") -./hosts/uploadhive.sh:185: file_header=$(tor_curl_request --insecure --head -s -L --referer "$remote_url" "$download_url") -./hosts/uploadhive.sh:279: tor_curl_request --insecure -L -G --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval "$download_url" --continue-at - --output "$file_path" -./hosts/uploadhive.sh:281: tor_curl_request --insecure -L -G "$download_url" --continue-at - --output "$file_path" +./hosts/uploadhive.sh:135: response=$(tor_curl_request --insecure -L -s -X POST --data "$form_data" "$remote_url") +./hosts/uploadhive.sh:247: tor_curl_request --insecure -L -G --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval "$download_url" --continue-at - --output "$file_path" +./hosts/uploadhive.sh:249: tor_curl_request --insecure -L -G "$download_url" --continue-at - --output "$file_path" ./hosts/up_1fichier.sh:107: response=$(tor_curl_request --insecure -L -s "https://1fichier.com/") ./hosts/up_1fichier.sh:180: response=$(tor_curl_upload --insecure -L \ ./hosts/up_anonfile.sh:102: response=$(tor_curl_upload --insecure -i \ @@ -462,8 +462,10 @@ _________________________________________________________________________ ./hosts/up_oshi.sh:110: response=$(tor_curl_upload --insecure \ ./hosts/up_pixeldrain.sh:112: response=$(tor_curl_upload --insecure -X PUT \ ./hosts/up_quax.sh:102: response=$(tor_curl_upload --insecure -i \ -./hosts/up_ranoz.sh:130: response=$(tor_curl_upload --insecure -L -i -s \ -./hosts/up_ranoz.sh:160: response=$(tor_curl_upload --insecure -i -X PUT \ +./hosts/up_ranoz.sh:128: response=$(tor_curl_upload --insecure -L -i -s \ +./hosts/up_ranoz.sh:155: response=$(tor_curl_upload --insecure -i -X PUT \ +./hosts/up_sendnow.sh:101: response=$(tor_curl_request --insecure -L -s 'https://send.now/upload') +./hosts/up_sendnow.sh:138: response=$(tor_curl_upload --insecure -i \ ./hosts/up_shareonline.sh:102: response=$(tor_curl_upload --insecure -i \ ./hosts/up_syspro.sh:102: response=$(tor_curl_upload --insecure -i \ ./hosts/up_tempfileme.sh:102: response=$(tor_curl_upload --insecure -i \ @@ -476,7 +478,7 @@ _________________________________________________________________________ ./hosts/up_uploadee.sh:176: response=$(tor_curl_upload --insecure -i -L \ ./hosts/up_uploadev.sh:102: response=$(tor_curl_upload --insecure -i \ ./hosts/up_uploadflix.sh:106: response=$(tor_curl_upload --insecure -i \ -./hosts/up_uploadhive.sh:129: response=$(tor_curl_upload --insecure -i \ +./hosts/up_uploadhive.sh:130: response=$(tor_curl_upload --insecure -i \ ./hosts/up_uploadraja.sh:102: response=$(tor_curl_upload --insecure -i \ ./hosts/up_uwabaki.sh:102: response=$(tor_curl_upload --insecure -i -L \ ./hosts/up_yolobit.sh:102: response=$(tor_curl_upload --insecure -i \ @@ -485,120 +487,120 @@ _________________________________________________________________________ ./hosts/youdbox.sh:183: file_header=$(tor_curl_request --insecure -L --head -s --referer "${fixed_url}" "$download_url") ./hosts/youdbox.sh:276: tor_curl_request --insecure -L -G --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval "$download_url" --continue-at - --output "$file_path" ./hosts/youdbox.sh:278: tor_curl_request --insecure -L -G "$download_url" --continue-at - --output "$file_path" -./mad.sh:4:UseTorCurlImpersonate=false -./mad.sh:87:tor_curl_request() { -./mad.sh:88: if [ "${UseTorCurlImpersonate}" == "true" ]; then -./mad.sh:89: "${curl_impersonate[@]}" --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" -4 --connect-timeout ${ConnectTimeout} --compressed --globoff "$@" -./mad.sh:91: curl --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" -4 --connect-timeout ${ConnectTimeout} --compressed --globoff "$@" -./mad.sh:94:tor_curl_request_extended() { -./mad.sh:96: if [ "${UseTorCurlImpersonate}" == "true" ]; then -./mad.sh:97: "${curl_impersonate[@]}" --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" -4 --connect-timeout $randomtimeout --compressed --globoff "$@" -./mad.sh:99: curl --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" -4 --connect-timeout $randomtimeout --compressed --globoff "$@" -./mad.sh:102:tor_curl_upload() { -./mad.sh:103: if [ "${UseTorCurlImpersonate}" == "true" ]; then -./mad.sh:105: "${curl_impersonate[@]}" --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" -4 --connect-timeout ${ConnectTimeoutUpload} --speed-limit $UploadSpeedMin --speed-time $UploadTimeoutInterval --compressed --globoff "$@" -./mad.sh:107: "${curl_impersonate[@]}" --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" -4 --connect-timeout ${ConnectTimeoutUpload} --compressed --globoff "$@" -./mad.sh:111: curl --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" -4 --connect-timeout ${ConnectTimeoutUpload} --speed-limit $UploadSpeedMin --speed-time $UploadTimeoutInterval -H 'User-Agent: Mozilla/5.0 (Windows NT 10.0; rv:109.0) Gecko/20100101 Firefox/115.0' -H 'Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8' -H 'Accept-Language: en-US,en;q=0.5' -H 'Accept-Encoding: gzip, deflate, br' -H 'Origin: null' -H 'Connection: keep-alive' -H 'Upgrade-Insecure-Requests: 1' -H 'Sec-Fetch-Dest: document' -H 'Sec-Fetch-Mode: navigate' -H 'Sec-Fetch-Site: cross-site' -H 'Sec-Fetch-User: ?1' -H 'TE: trailers' --compressed --globoff "$@" -./mad.sh:113: curl --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" -4 --connect-timeout ${ConnectTimeoutUpload} -H 'User-Agent: Mozilla/5.0 (Windows NT 10.0; rv:109.0) Gecko/20100101 Firefox/115.0' -H 'Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8' -H 'Accept-Language: en-US,en;q=0.5' -H 'Accept-Encoding: gzip, deflate, br' -H 'Origin: null' -H 'Connection: keep-alive' -H 'Upgrade-Insecure-Requests: 1' -H 'Sec-Fetch-Dest: document' -H 'Sec-Fetch-Mode: navigate' -H 'Sec-Fetch-Site: cross-site' -H 'Sec-Fetch-User: ?1' -H 'TE: trailers' --compressed --globoff "$@" -./mad.sh:1114:install_curl_impersonate() { -./mad.sh:1116: echo -e "${BLUE}lwthiker curl_impersonate${NC} is the original dev, but it is relatively inactive." -./mad.sh:1117: echo -e "- Currently uses curl v8.1.1." -./mad.sh:1121: echo -e "${BLUE}lexiforest curl_impersonate${NC} is an active fork of curl_impersonate." -./mad.sh:1122: echo -e "+ Currently uses curl v8.7.1" -./mad.sh:1126: PS3='Please select which curl_impersonate to install: ' -./mad.sh:1134: install_curl_impersonate_lwthiker_orig -./mad.sh:1138: install_curl_impersonate_lexiforest_fork -./mad.sh:1148:install_curl_impersonate_lwthiker_orig() { -./mad.sh:1152: echo -e "${BLUE}lwthiker curl_impersonate${NC} is the original curl_impersonate." -./mad.sh:1153: echo -e "+ Currently uses curl v8.1.1, and has low activity for updates" -./mad.sh:1156: echo -e "${GREEN}| Fetching:${NC} latest ${BLUE}lwthiker curl_impersonate${NC} info from github...${NC}" -./mad.sh:1159: response=$(tor_curl_request --insecure -L -s https://github.com/lwthiker/curl-impersonate/releases/latest) -./mad.sh:1161: debugHtml "github" "lbf_inst_curlimp$j" "$response" -./mad.sh:1164: latestTag=$(grep -oPi -m 1 '(?<=/curl-impersonate/releases/tag/).*?(?=")' <<< "$response") -./mad.sh:1174: yes_or_no "Do you wish to download and extract curl_impersonate $latestTag (using tor+curl)?" && { -./mad.sh:1176: download_url='https://github.com/lwthiker/curl-impersonate/releases/download/'"$latestTag"'/curl-impersonate-'"$latestTag"'.x86_64-linux-gnu.tar.gz' -./mad.sh:1179: file_header=$(tor_curl_request --insecure --head -Ls "$download_url") -./mad.sh:1181: debugHtml "github" "head_inst_curlimp$j" "${file_header}" -./mad.sh:1229: tor_curl_request --insecure -L "$download_url" --continue-at - --output "$file_path" -./mad.sh:1258: echo -e "| Extracting curl_impersonate..." -./mad.sh:1260: rm -f "${ScriptDir}"/curl* -./mad.sh:1261: mv "$extract_location/curl-impersonate-ff" "${ScriptDir}/" -./mad.sh:1262: mv "$extract_location/curl_ff109" "${ScriptDir}/" -./mad.sh:1263: echo -e "${GREEN}| Done.${NC} Update ${BLUE}\"UseTorCurlImpersonate=true\"${NC} in script to use..." -./mad.sh:1271:install_curl_impersonate_lexiforest_fork() { -./mad.sh:1275: echo -e "${BLUE}lexiforest curl_impersonate${NC} is an active fork of curl_impersonate." -./mad.sh:1276: echo -e "+ Currently uses curl v8.7.1, and is patched for latest CVEs" -./mad.sh:1279: echo -e "${GREEN}| Fetching:${NC} latest ${BLUE}lexiforest curl_impersonate fork${NC} info from github...${NC}" -./mad.sh:1282: response=$(tor_curl_request --insecure -L -s https://github.com/lexiforest/curl-impersonate/releases/latest) -./mad.sh:1284: debugHtml "github" "lbf_inst_curlimp$j" "$response" -./mad.sh:1287: latestTag=$(grep -oPi -m 1 '(?<=/curl-impersonate/releases/tag/).*?(?=")' <<< "$response") -./mad.sh:1297: yes_or_no "Do you wish to download and extract curl_impersonate $latestTag (using tor+curl)?" && { -./mad.sh:1299: download_url='https://github.com/lexiforest/curl-impersonate/releases/download/'"$latestTag"'/curl-impersonate-'"$latestTag"'.x86_64-linux-gnu.tar.gz' -./mad.sh:1302: file_header=$(tor_curl_request --insecure --head -Ls "$download_url") -./mad.sh:1304: debugHtml "github" "head_inst_curlimp$j" "${file_header}" -./mad.sh:1352: tor_curl_request --insecure -L "$download_url" --continue-at - --output "$file_path" -./mad.sh:1381: echo -e "| Extracting curl_impersonate..." -./mad.sh:1383: rm -f "${ScriptDir}"/curl* -./mad.sh:1384: mv "$extract_location/curl-impersonate-chrome" "${ScriptDir}/" -./mad.sh:1385: mv "$extract_location/curl_chrome131" "${ScriptDir}/" -./mad.sh:1386: echo -e "${GREEN}| Done.${NC} Update ${BLUE}\"UseTorCurlImpersonate=true\"${NC} in script to use..." -./mad.sh:1548: echo -e ":${NC} ${GREEN}MAD${PINK} Audit${NC} : Reports usage of http & curl in scripts${PINK}${BLD} :" -./mad.sh:1556: maud_curl=$(grep -n -vxE '[[:blank:]]*([#].*)?' $fil | grep --color='always' -Ei 'curl') -./mad.sh:1557: maud_torcurl=$(grep -n -vxE '[[:blank:]]*([#].*)?' $fil | grep -A 12 --color='always' -Ei 'tor_curl') -./mad.sh:1566: echo -e "${PINK}MAD Audit of curl:${NC} (${GREEN}grep \"curl\"${NC})" -./mad.sh:1568: echo -e "$maud_curl" -./mad.sh:1570: echo -e "${PINK}MAD Audit of tor_curl (+12 lines after):${NC} (${GREEN}grep \"tor_curl\"${NC})" -./mad.sh:1572: echo -e "$maud_torcurl" -./mad.sh:1584: maud_curl=$(grep -n -vxE '[[:blank:]]*([#].*)?' $fil | grep --color='always' -Ei 'curl') -./mad.sh:1585: maud_torcurl=$(grep -n -vxE '[[:blank:]]*([#].*)?' $fil | grep -A 12 --color='always' -Ei 'tor_curl') -./mad.sh:1594: echo -e "${PINK}MAD Audit of curl:${NC} (${GREEN}grep \"curl \"${NC})" -./mad.sh:1596: echo -e "$maud_curl" -./mad.sh:1598: echo -e "${PINK}MAD Audit of tor_curl (+12 lines after):${NC} (${GREEN}grep \"tor_curl\"${NC})" -./mad.sh:1600: echo -e "$maud_torcurl" -./mad.sh:1606: maud_curl=$(grep -n -vxE '[[:blank:]]*([#].*)?' $fil | grep --color='always' -Ei 'curl') -./mad.sh:1607: maud_torcurl=$(grep -n -vxE '[[:blank:]]*([#].*)?' $fil | grep -A 12 --color='always' -Ei 'tor_curl') -./mad.sh:1616: echo -e "${PINK}MAD Audit of curl:${NC} (${GREEN}grep \"curl\"${NC})" -./mad.sh:1618: echo -e "$maud_curl" -./mad.sh:1620: echo -e "${PINK}MAD Audit of tor_curl (+12 lines after):${NC} (${GREEN}grep \"tor_curl\"${NC})" -./mad.sh:1622: echo -e "$maud_torcurl" -./mad.sh:2569: if [ "${UseTorCurlImpersonate}" == "true" ]; then -./mad.sh:2570: printf "client: ${GREEN}Tor${NC} + ${BLUE}curl_impersonate${NC}\\n" -./mad.sh:2572: printf "client: ${GREEN}Tor${NC} + ${GREEN}curl${NC}\\n" -./mad.sh:2744: if [ "${UseTorCurlImpersonate}" == "true" ]; then -./mad.sh:2745: printf "client: ${GREEN}Tor${NC} + ${BLUE}curl_impersonate${NC}\\n" -./mad.sh:2747: printf "client: ${GREEN}Tor${NC} + ${GREEN}curl${NC}\\n" -./mad.sh:2945: file_header=$(tor_curl_request --insecure -m 8 -s -D - -o /dev/null \ -./mad.sh:2952: file_header=$(tor_curl_request --insecure --head -H "Connection: keep-alive" -L -s -i "$download_url" | -./mad.sh:3089: tor_curl_request --insecure -L --referer "$file_url" "$download_url" --output "$file_path" -./mad.sh:3142: tor_curl_request --insecure -L --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval "$download_url" --continue-at - --output "$file_path" -./mad.sh:3144: tor_curl_request --insecure -L --referer "$file_url" "$download_url" --continue-at - --output "$file_path" -./mad.sh:3342: response=$(tor_curl_upload --insecure -i \ -./mad.sh:3349: response=$(tor_curl_upload --insecure -i \ -./mad.sh:3420:if [ "${UseTorCurlImpersonate}" == "true" ]; then -./mad.sh:3421: curl_impersonate=() -./mad.sh:3422: readarray -d $'' arrFiles < <(find "$ScriptDir" -maxdepth 1 -name "curl_*" -printf '%p\n' | sort -Vk1) -./mad.sh:3423: bFoundCurlHeader=false -./mad.sh:3427: curl_impersonate=($fil) -./mad.sh:3428: bFoundCurlHeader=true -./mad.sh:3432: if [ "$bFoundCurlHeader" == "false" ]; then -./mad.sh:3433: echo -e "${RED}[ERROR] Missing dependency \"curl-impersonate\"!${NC}" -./mad.sh:3436: echo -e "You'll need to download ${GREEN}\"curl-impersonate\"${NC}." -./mad.sh:3439: echo -e "The latest binary can be obtained on GitHub, search for \"curl-impersonate\"" -./mad.sh:3441: echo -e " 1. Visit the page of curl-impersonate and add \"/releases/latest/\" at end of URL." -./mad.sh:3445: echo -e " 4. Download archive ${GREEN}\"curl-impersonate-vX.Y.Z.x86_64-linux-gnu.tar.gz\"${YELLOW}." -./mad.sh:3446: echo -e " 5. Extract files ${GREEN}\"curl-impersonate-ff\"${NC} and ${GREEN}\"curl_ff109\"${NC} next to this script." -./mad.sh:3449: echo -e "run $0 install_curl_impersonate\\n" -./mad.sh:3451: yes_or_no "Do you wish to download and extract latest curl_impersonate (using tor+curl)?" && { -./mad.sh:3452: UseTorCurlImpersonate=false -./mad.sh:3453: install_curl_impersonate -./mad.sh:3537: echo -e "[${YELLOW}Install curl_impersonate${NC}]: Downloads the latest binary for curl_impersonate from github repo (3 choices)" -./mad.sh:3538: printf " %s install_curl_impersonate\\n" "$0" -./mad.sh:3616:elif [[ "$arg1" == "install_curl_impersonate" ]]; then -./mad.sh:3617: install_curl_impersonate -./mad.sh:3648:if [ "${UseTorCurlImpersonate}" == "true" ]; then -./mad.sh:3649: printf "client: ${GREEN}Tor${NC} + ${BLUE}curl_impersonate${NC}\\n" -./mad.sh:3651: printf "client: ${GREEN}Tor${NC} + ${GREEN}curl${NC}\\n" -./plugins/pjscloud.sh:44: if [ "${UseTorCurlImpersonate}" == "true" ]; then +./mad.sh:97:UseTorCurlImpersonate=false +./mad.sh:393:tor_curl_request() { +./mad.sh:394: if [[ "${UseTorCurlImpersonate}" == "true" ]]; then +./mad.sh:395: "${curl_impersonate[@]}" --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" -4 --connect-timeout ${ConnectTimeout} --compressed --globoff "$@" +./mad.sh:397: curl --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" -4 --connect-timeout ${ConnectTimeout} --compressed --globoff "$@" +./mad.sh:400:tor_curl_request_extended() { +./mad.sh:402: if [[ "${UseTorCurlImpersonate}" == "true" ]]; then +./mad.sh:403: "${curl_impersonate[@]}" --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" -4 --connect-timeout $randomtimeout --compressed --globoff "$@" +./mad.sh:405: curl --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" -4 --connect-timeout $randomtimeout --compressed --globoff "$@" +./mad.sh:408:tor_curl_upload() { +./mad.sh:409: if [[ "${UseTorCurlImpersonate}" == "true" ]]; then +./mad.sh:411: "${curl_impersonate[@]}" --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" -4 --connect-timeout ${ConnectTimeoutUpload} --speed-limit $UploadSpeedMin --speed-time $UploadTimeoutInterval --compressed --globoff "$@" +./mad.sh:413: "${curl_impersonate[@]}" --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" -4 --connect-timeout ${ConnectTimeoutUpload} --compressed --globoff "$@" +./mad.sh:417: curl --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" -4 --connect-timeout ${ConnectTimeoutUpload} --speed-limit $UploadSpeedMin --speed-time $UploadTimeoutInterval -H 'User-Agent: Mozilla/5.0 (Windows NT 10.0; rv:109.0) Gecko/20100101 Firefox/115.0' -H 'Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8' -H 'Accept-Language: en-US,en;q=0.5' -H 'Accept-Encoding: gzip, deflate, br' -H 'Origin: null' -H 'Connection: keep-alive' -H 'Upgrade-Insecure-Requests: 1' -H 'Sec-Fetch-Dest: document' -H 'Sec-Fetch-Mode: navigate' -H 'Sec-Fetch-Site: cross-site' -H 'Sec-Fetch-User: ?1' -H 'TE: trailers' --compressed --globoff "$@" +./mad.sh:419: curl --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" -4 --connect-timeout ${ConnectTimeoutUpload} -H 'User-Agent: Mozilla/5.0 (Windows NT 10.0; rv:109.0) Gecko/20100101 Firefox/115.0' -H 'Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8' -H 'Accept-Language: en-US,en;q=0.5' -H 'Accept-Encoding: gzip, deflate, br' -H 'Origin: null' -H 'Connection: keep-alive' -H 'Upgrade-Insecure-Requests: 1' -H 'Sec-Fetch-Dest: document' -H 'Sec-Fetch-Mode: navigate' -H 'Sec-Fetch-Site: cross-site' -H 'Sec-Fetch-User: ?1' -H 'TE: trailers' --compressed --globoff "$@" +./mad.sh:1420:install_curl_impersonate() { +./mad.sh:1422: echo -e "${BLUE}lwthiker curl_impersonate${NC} is the original dev, but it is relatively inactive." +./mad.sh:1423: echo -e "- Currently uses curl v8.1.1." +./mad.sh:1427: echo -e "${BLUE}lexiforest curl_impersonate${NC} is an active fork of curl_impersonate." +./mad.sh:1428: echo -e "+ Currently uses curl v8.7.1" +./mad.sh:1432: PS3='Please select which curl_impersonate to install: ' +./mad.sh:1440: install_curl_impersonate_lwthiker_orig +./mad.sh:1444: install_curl_impersonate_lexiforest_fork +./mad.sh:1454:install_curl_impersonate_lwthiker_orig() { +./mad.sh:1458: echo -e "${BLUE}lwthiker curl_impersonate${NC} is the original curl_impersonate." +./mad.sh:1459: echo -e "+ Currently uses curl v8.1.1, and has low activity for updates" +./mad.sh:1462: echo -e "${GREEN}| Fetching:${NC} latest ${BLUE}lwthiker curl_impersonate${NC} info from github...${NC}" +./mad.sh:1465: response=$(tor_curl_request --insecure -L -s https://github.com/lwthiker/curl-impersonate/releases/latest) +./mad.sh:1467: debugHtml "github" "lbf_inst_curlimp$j" "$response" +./mad.sh:1470: latestTag=$(grep -oPi -m 1 '(?<=/curl-impersonate/releases/tag/).*?(?=")' <<< "$response") +./mad.sh:1480: yes_or_no "Do you wish to download and extract curl_impersonate $latestTag (using tor+curl)?" && { +./mad.sh:1482: download_url='https://github.com/lwthiker/curl-impersonate/releases/download/'"$latestTag"'/curl-impersonate-'"$latestTag"'.x86_64-linux-gnu.tar.gz' +./mad.sh:1485: file_header=$(tor_curl_request --insecure --head -Ls "$download_url") +./mad.sh:1487: debugHtml "github" "head_inst_curlimp$j" "${file_header}" +./mad.sh:1535: tor_curl_request --insecure -L "$download_url" --continue-at - --output "$file_path" +./mad.sh:1564: echo -e "| Extracting curl_impersonate..." +./mad.sh:1566: rm -f "${ScriptDir}"/curl* +./mad.sh:1567: mv "$extract_location/curl-impersonate-ff" "${ScriptDir}/" +./mad.sh:1568: mv "$extract_location/curl_ff109" "${ScriptDir}/" +./mad.sh:1569: echo -e "${GREEN}| Done.${NC} Update ${BLUE}\"UseTorCurlImpersonate=true\"${NC} in script to use..." +./mad.sh:1577:install_curl_impersonate_lexiforest_fork() { +./mad.sh:1581: echo -e "${BLUE}lexiforest curl_impersonate${NC} is an active fork of curl_impersonate." +./mad.sh:1582: echo -e "+ Currently uses curl v8.7.1, and is patched for latest CVEs" +./mad.sh:1585: echo -e "${GREEN}| Fetching:${NC} latest ${BLUE}lexiforest curl_impersonate fork${NC} info from github...${NC}" +./mad.sh:1588: response=$(tor_curl_request --insecure -L -s https://github.com/lexiforest/curl-impersonate/releases/latest) +./mad.sh:1590: debugHtml "github" "lbf_inst_curlimp$j" "$response" +./mad.sh:1593: latestTag=$(grep -oPi -m 1 '(?<=/curl-impersonate/releases/tag/).*?(?=")' <<< "$response") +./mad.sh:1603: yes_or_no "Do you wish to download and extract curl_impersonate $latestTag (using tor+curl)?" && { +./mad.sh:1605: download_url='https://github.com/lexiforest/curl-impersonate/releases/download/'"$latestTag"'/curl-impersonate-'"$latestTag"'.x86_64-linux-gnu.tar.gz' +./mad.sh:1608: file_header=$(tor_curl_request --insecure --head -Ls "$download_url") +./mad.sh:1610: debugHtml "github" "head_inst_curlimp$j" "${file_header}" +./mad.sh:1658: tor_curl_request --insecure -L "$download_url" --continue-at - --output "$file_path" +./mad.sh:1687: echo -e "| Extracting curl_impersonate..." +./mad.sh:1689: rm -f "${ScriptDir}"/curl* +./mad.sh:1690: mv "$extract_location/curl-impersonate-chrome" "${ScriptDir}/" +./mad.sh:1691: mv "$extract_location/curl_chrome131" "${ScriptDir}/" +./mad.sh:1692: echo -e "${GREEN}| Done.${NC} Update ${BLUE}\"UseTorCurlImpersonate=true\"${NC} in script to use..." +./mad.sh:1854: echo -e ":${NC} ${GREEN}MAD${PINK} Audit${NC} : Reports usage of http & curl in scripts${PINK}${BLD} :" +./mad.sh:1862: maud_curl=$(grep -n -vxE '[[:blank:]]*([#].*)?' $fil | grep --color='always' -Ei 'curl') +./mad.sh:1863: maud_torcurl=$(grep -n -vxE '[[:blank:]]*([#].*)?' $fil | grep -A 12 --color='always' -Ei 'tor_curl') +./mad.sh:1872: echo -e "${PINK}MAD Audit of curl:${NC} (${GREEN}grep \"curl\"${NC})" +./mad.sh:1874: echo -e "$maud_curl" +./mad.sh:1876: echo -e "${PINK}MAD Audit of tor_curl (+12 lines after):${NC} (${GREEN}grep \"tor_curl\"${NC})" +./mad.sh:1878: echo -e "$maud_torcurl" +./mad.sh:1890: maud_curl=$(grep -n -vxE '[[:blank:]]*([#].*)?' $fil | grep --color='always' -Ei 'curl') +./mad.sh:1891: maud_torcurl=$(grep -n -vxE '[[:blank:]]*([#].*)?' $fil | grep -A 12 --color='always' -Ei 'tor_curl') +./mad.sh:1900: echo -e "${PINK}MAD Audit of curl:${NC} (${GREEN}grep \"curl \"${NC})" +./mad.sh:1902: echo -e "$maud_curl" +./mad.sh:1904: echo -e "${PINK}MAD Audit of tor_curl (+12 lines after):${NC} (${GREEN}grep \"tor_curl\"${NC})" +./mad.sh:1906: echo -e "$maud_torcurl" +./mad.sh:1912: maud_curl=$(grep -n -vxE '[[:blank:]]*([#].*)?' $fil | grep --color='always' -Ei 'curl') +./mad.sh:1913: maud_torcurl=$(grep -n -vxE '[[:blank:]]*([#].*)?' $fil | grep -A 12 --color='always' -Ei 'tor_curl') +./mad.sh:1922: echo -e "${PINK}MAD Audit of curl:${NC} (${GREEN}grep \"curl\"${NC})" +./mad.sh:1924: echo -e "$maud_curl" +./mad.sh:1926: echo -e "${PINK}MAD Audit of tor_curl (+12 lines after):${NC} (${GREEN}grep \"tor_curl\"${NC})" +./mad.sh:1928: echo -e "$maud_torcurl" +./mad.sh:2875: if [[ "${UseTorCurlImpersonate}" == "true" ]]; then +./mad.sh:2876: printf "client: ${GREEN}Tor${NC} + ${BLUE}curl_impersonate${NC}\\n" +./mad.sh:2878: printf "client: ${GREEN}Tor${NC} + ${GREEN}curl${NC}\\n" +./mad.sh:3050: if [[ "${UseTorCurlImpersonate}" == "true" ]]; then +./mad.sh:3051: printf "client: ${GREEN}Tor${NC} + ${BLUE}curl_impersonate${NC}\\n" +./mad.sh:3053: printf "client: ${GREEN}Tor${NC} + ${GREEN}curl${NC}\\n" +./mad.sh:3251: file_header=$(tor_curl_request --insecure -m 8 -s -D - -o /dev/null \ +./mad.sh:3258: file_header=$(tor_curl_request --insecure --head -H "Connection: keep-alive" -L -s -i "$download_url" | +./mad.sh:3395: tor_curl_request --insecure -L --referer "$file_url" "$download_url" --output "$file_path" +./mad.sh:3448: tor_curl_request --insecure -L --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval "$download_url" --continue-at - --output "$file_path" +./mad.sh:3450: tor_curl_request --insecure -L --referer "$file_url" "$download_url" --continue-at - --output "$file_path" +./mad.sh:3648: response=$(tor_curl_upload --insecure -i \ +./mad.sh:3655: response=$(tor_curl_upload --insecure -i \ +./mad.sh:3726:if [[ "${UseTorCurlImpersonate}" == "true" ]]; then +./mad.sh:3727: curl_impersonate=() +./mad.sh:3728: readarray -d $'' arrFiles < <(find "$ScriptDir" -maxdepth 1 -name "curl_*" -printf '%p\n' | sort -Vk1) +./mad.sh:3729: bFoundCurlHeader=false +./mad.sh:3733: curl_impersonate=($fil) +./mad.sh:3734: bFoundCurlHeader=true +./mad.sh:3738: if [[ "$bFoundCurlHeader" == "false" ]]; then +./mad.sh:3739: echo -e "${RED}[ERROR] Missing dependency \"curl-impersonate\"!${NC}" +./mad.sh:3742: echo -e "You'll need to download ${GREEN}\"curl-impersonate\"${NC}." +./mad.sh:3745: echo -e "The latest binary can be obtained on GitHub, search for \"curl-impersonate\"" +./mad.sh:3747: echo -e " 1. Visit the page of curl-impersonate and add \"/releases/latest/\" at end of URL." +./mad.sh:3751: echo -e " 4. Download archive ${GREEN}\"curl-impersonate-vX.Y.Z.x86_64-linux-gnu.tar.gz\"${YELLOW}." +./mad.sh:3752: echo -e " 5. Extract files ${GREEN}\"curl-impersonate-ff\"${NC} and ${GREEN}\"curl_ff109\"${NC} next to this script." +./mad.sh:3755: echo -e "run $0 install_curl_impersonate\\n" +./mad.sh:3757: yes_or_no "Do you wish to download and extract latest curl_impersonate (using tor+curl)?" && { +./mad.sh:3758: UseTorCurlImpersonate=false +./mad.sh:3759: install_curl_impersonate +./mad.sh:3843: echo -e "[${YELLOW}Install curl_impersonate${NC}]: Downloads the latest binary for curl_impersonate from github repo (3 choices)" +./mad.sh:3844: printf " %s install_curl_impersonate\\n" "$0" +./mad.sh:3922:elif [[ "$arg1" == "install_curl_impersonate" ]]; then +./mad.sh:3923: install_curl_impersonate +./mad.sh:3954:if [[ "${UseTorCurlImpersonate}" == "true" ]]; then +./mad.sh:3955: printf "client: ${GREEN}Tor${NC} + ${BLUE}curl_impersonate${NC}\\n" +./mad.sh:3957: printf "client: ${GREEN}Tor${NC} + ${GREEN}curl${NC}\\n" +./plugins/pjscloud.sh:44: if [[ "${UseTorCurlImpersonate}" == "true" ]]; then ./plugins/pjscloud.sh:45: response=$("${curl_impersonate[@]}" --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" \ ./plugins/pjscloud.sh:53: response=$(curl --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" \ diff --git a/.audit/mad-audit-http.log b/.audit/mad-audit-http.log index 9dce414..f4e3178 100755 --- a/.audit/mad-audit-http.log +++ b/.audit/mad-audit-http.log @@ -1,4 +1,4 @@ -DateTime: 25.02.13 +DateTime: 25.02.19 Files: ./hosts/1fichier.sh @@ -128,6 +128,7 @@ Files: ./hosts/up_quax.sh ./hosts/up_ramsgaard.sh ./hosts/up_ranoz.sh +./hosts/up_sendnow.sh ./hosts/up_shareonline.sh ./hosts/up_skrepr.sh ./hosts/up_soyjak.sh @@ -212,7 +213,7 @@ _________________________________________________________________________ ./hosts/gofile.sh:183: "https://api.gofile.io/contents/$file_id") ./hosts/gofile.sh:185: debugHtml "${remote_url##*/}" "gofile_contents$i" "url: https://api.gofile.io/contents/${file_id}?${form_data}"$'\n'"${response}" ./hosts/gofile.sh:212: cnturls=$(grep -oin 'https://' <<< "$download_url" | wc -l) -./hosts/gofile.sh:311: cdn_url="https:"$(grep -oPi '(?<=location: ).*' <<< "$file_header") +./hosts/gofile.sh:327: cdn_url="https:"$(grep -oPi '(?<=location: ).*' <<< "$file_header") ./hosts/hexload.sh:102: response=$(pjscloud_tor_request "https://hexload.com/download" "$form_data") ./hosts/hexload.sh:108: response=$(tor_curl_request --insecure -s --data "$form_data" "https://hexload.com/download") ./hosts/hexload.sh:116: response=$(tor_curl_request --insecure -s --data "$form_data" "https://hexload.com/download") @@ -225,18 +226,19 @@ _________________________________________________________________________ ./hosts/kraken.sh:155: kraken_action="https://krakenfiles.com/download/${kraken_action##*/}" ./hosts/nippy.sh:160: download_url="https:"$(grep -oP '(?<=

^${url/https:/http:}.*>#& #OK# ${filename}>g" "${InputFile}" #http (if changed) -./mad.sh:412: sed -i -e "s>^direct=${url/https:/http:}.*>#& #OK# ${filename}>g" "${InputFile}" #direct url https -./mad.sh:415: sed -i -e "s>^${url/https:/http:}.*>#&>g" "${InputFile}" #http (if changed) -./mad.sh:417: sed -i -e "s>^direct=${url/https:/http:}.*>#&>g" "${InputFile}" #direct url https -./mad.sh:438: sed -i -e "s>^${url/https:/http:}.*>#& #OK# ${filename} (File exists)>g" "${InputFile}" #http (if changed) -./mad.sh:440: sed -i -e "s>^direct=${url/https:/http:}.*>#& #OK# ${filename} (File exists)>g" "${InputFile}" #direct url https -./mad.sh:443: sed -i -e "s>^${url/https:/http:}.*>#&>g" "${InputFile}" #http (if changed) -./mad.sh:445: sed -i -e "s>^direct=${url/https:/http:}.*>#&>g" "${InputFile}" #direct url https -./mad.sh:466: sed -i -e "s>^${url/https:/http:}.*>#& #RETRY# ${message}>g" "${InputFile}" #http (if changed) -./mad.sh:468: sed -i -e "s>^direct=${url/https:/http:}.*>#& #RETRY# ${message}>g" "${InputFile}" #direct url https -./mad.sh:471: sed -i -e "s>^${url/https:/http:}.*>#&>g" "${InputFile}" #http (if changed) -./mad.sh:473: sed -i -e "s>^direct=${url/https:/http:}.*>#&>g" "${InputFile}" #direct url https -./mad.sh:495: sed -i -e "s>^${url/https:/http:}.*>#& #FAIL# $message>g" "${InputFile}" #http (if changed) -./mad.sh:497: sed -i -e "s>^direct=${url/https:/http:}.*>#& #FAIL# $message>g" "${InputFile}" #direct url https -./mad.sh:500: sed -i -e "s>^${url/https:/http:}.*>#&>g" "${InputFile}" #http (if changed) -./mad.sh:502: sed -i -e "s>^direct=${url/https:/http:}.*>#&>g" "${InputFile}" #direct url https -./mad.sh:526: sed -i -e "s>^${url/https:/http:}.*>#& #REMOVED#${message}>g" "${InputFile}" #http (if changed) -./mad.sh:528: sed -i -e "s>^direct=${url/https:/http:}.*>#& #REMOVED#${message}>g" "${InputFile}" #direct url https -./mad.sh:531: sed -i -e "s>^${url/https:/http:}.*>#&>g" "${InputFile}" #http (if changed) -./mad.sh:533: sed -i -e "s>^direct=${url/https:/http:}.*>#&>g" "${InputFile}" #direct url https -./mad.sh:559: sed -i -e "s>^${url/https:/http:}.*>${url}|${newfilename}>g" "${InputFile}" #http (if changed) -./mad.sh:561: sed -i -e "s>^direct=${url/https:/http:}.*>direct=${url}|${newfilename}>g" "${InputFile}" #direct url https -./mad.sh:581: sed -i -e "s%^${url/https:/http:}.*%${newurl//[[:space:]]/$'\\\n'}%g" "${InputFile}" #http (if changed) -./mad.sh:602: sed -i -e "s>^${url/https:/http:}.*>#& #RETRY# ${filename} (dropped or bad size)>g" "${InputFile}" #http (if changed) -./mad.sh:604: sed -i -e "s>^direct=${url/https:/http:}.*>#& #RETRY# ${filename} (dropped or bad size)>g" "${InputFile}" #direct url https -./mad.sh:607: sed -i -e "s>^${url/https:/http:}.*>#&>g" "${InputFile}" #http (if changed) -./mad.sh:609: sed -i -e "s>^direct=${url/https:/http:}.*>#&>g" "${InputFile}" #direct url https -./mad.sh:625: sed -i -e "s>^${url/https:/http:}.*>#& #RETRY# (No CDN found)>g" "${InputFile}" #http (if changed) -./mad.sh:627: sed -i -e "s>^direct=${url/https:/http:}.*>#& #RETRY# (No CDN found)>g" "${InputFile}" #direct url https -./mad.sh:630: sed -i -e "s>^${url/https:/http:}.*>#&>g" "${InputFile}" #http (if changed) -./mad.sh:632: sed -i -e "s>^direct=${url/https:/http:}.*>#&>g" "${InputFile}" #direct url https -./mad.sh:651: sed -i -e "s>^${url/https:/http:}.*>#& #PASSWORD#>g" "${InputFile}" #http (if changed) -./mad.sh:653: sed -i -e "s>^direct=${url/https:/http:}.*>#& #PASSWORD#>g" "${InputFile}" #direct url https -./mad.sh:656: sed -i -e "s>^${url/https:/http:}.*>#&>g" "${InputFile}" #http (if changed) -./mad.sh:658: sed -i -e "s>^direct=${url/https:/http:}.*>#&>g" "${InputFile}" #direct url https -./mad.sh:678: sed -i -e "s>^${url/https:/http:}.*>#& #BAD-URL#${message}>g" "${InputFile}" #http (if changed) -./mad.sh:680: sed -i -e "s>^direct=${url/https:/http:}.*>#& #BAD-URL#${message}>g" "${InputFile}" #direct url https -./mad.sh:683: sed -i -e "s>^${url/https:/http:}.*>#&>g" "${InputFile}" #http (if changed) -./mad.sh:685: sed -i -e "s>^direct=${url/https:/http:}.*>#&>g" "${InputFile}" #direct url https -./mad.sh:703: sed -i -e "s>^${url/https:/http:}.*>#& #RETRY# ${filename} (No filename or size. Try again later)>g" "${InputFile}" #http (if changed) -./mad.sh:705: sed -i -e "s>^direct=${url/https:/http:}.*>#& #RETRY# ${filename} (No filename or size. Try again later)>g" "${InputFile}" #direct url https -./mad.sh:708: sed -i -e "s>^${url/https:/http:}.*>#&>g" "${InputFile}" #http (if changed) -./mad.sh:710: sed -i -e "s>^direct=${url/https:/http:}.*>#&>g" "${InputFile}" #direct url https -./mad.sh:729: sed -i -e "s>^${url/https:/http:}.*>#& #RETRY# ${filename} (File exists and is too large--retry or delete from downloads)>g" "${InputFile}" #http (if changed) -./mad.sh:731: sed -i -e "s>^direct=${url/https:/http:}.*>#& #RETRY# ${filename} (File exists and is too large--retry or delete from downloads)>g" "${InputFile}" #direct url https -./mad.sh:734: sed -i -e "s>^${url/https:/http:}.*>#&>g" "${InputFile}" #http (if changed) -./mad.sh:736: sed -i -e "s>^direct=${url/https:/http:}.*>#&>g" "${InputFile}" #direct url https -./mad.sh:1159: response=$(tor_curl_request --insecure -L -s https://github.com/lwthiker/curl-impersonate/releases/latest) -./mad.sh:1176: download_url='https://github.com/lwthiker/curl-impersonate/releases/download/'"$latestTag"'/curl-impersonate-'"$latestTag"'.x86_64-linux-gnu.tar.gz' -./mad.sh:1282: response=$(tor_curl_request --insecure -L -s https://github.com/lexiforest/curl-impersonate/releases/latest) -./mad.sh:1299: download_url='https://github.com/lexiforest/curl-impersonate/releases/download/'"$latestTag"'/curl-impersonate-'"$latestTag"'.x86_64-linux-gnu.tar.gz' -./mad.sh:1562: echo -e "${PINK}MAD Audit of http lines:${NC} (${GREEN}grep \"http:\" or \"https:\"${NC})" -./mad.sh:1590: echo -e "${PINK}MAD Audit of http lines:${NC} (${GREEN}grep \"http:\" or \"https:\"${NC})" -./mad.sh:1612: echo -e "${PINK}MAD Audit of http lines:${NC} (${GREEN}grep \"http:\" or \"https:\"${NC})" -./mad.sh:2928: if grep -Eqi '.onion' <<< "$download_url" && grep -Eqi 'https://' <<< "$download_url" ; then -./mad.sh:3459:arg2="$2" # auto, filelist, -./mad.sh:3556: echo -e " - http://oshi.at/abcd/origAABB.rar|My specified file.part1.rar" -./mad.sh:3558: echo -e " - direct=http://pomf2.lain.la/f/abcd00zz.7z" -./mad.sh:3560: echo -e ' - ie. direct=http://somehost.onion/abcD|filename.part1.rar' -./mad.sh:3779: if [[ ${remote_url} =~ ^http: ]] ; then -./mad.sh:3780: remote_url=${remote_url/http:/https:} -./mad.sh:3801: if [[ ${remote_url} =~ ^http: ]] ; then -./mad.sh:3802: remote_url=${remote_url/http:/https:} -./mad.sh:4168: if [[ ${remote_url} =~ ^http: ]] ; then -./mad.sh:4169: remote_url=${remote_url/http:/https:} -./mad.sh:4227: if [[ ${remote_url} =~ ^http: ]] ; then -./mad.sh:4228: remote_url=${remote_url/http:/https:} -./mad.sh:4254: if [[ ${remote_url} =~ ^http: ]] ; then -./mad.sh:4255: remote_url=${remote_url/http:/https:} +./mad.sh:716: sed -i -e "s>^${url/https:/http:}.*>#& #OK# ${filename}>g" "${InputFile}" #http (if changed) +./mad.sh:718: sed -i -e "s>^direct=${url/https:/http:}.*>#& #OK# ${filename}>g" "${InputFile}" #direct url https +./mad.sh:721: sed -i -e "s>^${url/https:/http:}.*>#&>g" "${InputFile}" #http (if changed) +./mad.sh:723: sed -i -e "s>^direct=${url/https:/http:}.*>#&>g" "${InputFile}" #direct url https +./mad.sh:744: sed -i -e "s>^${url/https:/http:}.*>#& #OK# ${filename} (File exists)>g" "${InputFile}" #http (if changed) +./mad.sh:746: sed -i -e "s>^direct=${url/https:/http:}.*>#& #OK# ${filename} (File exists)>g" "${InputFile}" #direct url https +./mad.sh:749: sed -i -e "s>^${url/https:/http:}.*>#&>g" "${InputFile}" #http (if changed) +./mad.sh:751: sed -i -e "s>^direct=${url/https:/http:}.*>#&>g" "${InputFile}" #direct url https +./mad.sh:772: sed -i -e "s>^${url/https:/http:}.*>#& #RETRY# ${message}>g" "${InputFile}" #http (if changed) +./mad.sh:774: sed -i -e "s>^direct=${url/https:/http:}.*>#& #RETRY# ${message}>g" "${InputFile}" #direct url https +./mad.sh:777: sed -i -e "s>^${url/https:/http:}.*>#&>g" "${InputFile}" #http (if changed) +./mad.sh:779: sed -i -e "s>^direct=${url/https:/http:}.*>#&>g" "${InputFile}" #direct url https +./mad.sh:801: sed -i -e "s>^${url/https:/http:}.*>#& #FAIL# $message>g" "${InputFile}" #http (if changed) +./mad.sh:803: sed -i -e "s>^direct=${url/https:/http:}.*>#& #FAIL# $message>g" "${InputFile}" #direct url https +./mad.sh:806: sed -i -e "s>^${url/https:/http:}.*>#&>g" "${InputFile}" #http (if changed) +./mad.sh:808: sed -i -e "s>^direct=${url/https:/http:}.*>#&>g" "${InputFile}" #direct url https +./mad.sh:832: sed -i -e "s>^${url/https:/http:}.*>#& #REMOVED#${message}>g" "${InputFile}" #http (if changed) +./mad.sh:834: sed -i -e "s>^direct=${url/https:/http:}.*>#& #REMOVED#${message}>g" "${InputFile}" #direct url https +./mad.sh:837: sed -i -e "s>^${url/https:/http:}.*>#&>g" "${InputFile}" #http (if changed) +./mad.sh:839: sed -i -e "s>^direct=${url/https:/http:}.*>#&>g" "${InputFile}" #direct url https +./mad.sh:865: sed -i -e "s>^${url/https:/http:}.*>${url}|${newfilename}>g" "${InputFile}" #http (if changed) +./mad.sh:867: sed -i -e "s>^direct=${url/https:/http:}.*>direct=${url}|${newfilename}>g" "${InputFile}" #direct url https +./mad.sh:887: sed -i -e "s%^${url/https:/http:}.*%${newurl//[[:space:]]/$'\\\n'}%g" "${InputFile}" #http (if changed) +./mad.sh:908: sed -i -e "s>^${url/https:/http:}.*>#& #RETRY# ${filename} (dropped or bad size)>g" "${InputFile}" #http (if changed) +./mad.sh:910: sed -i -e "s>^direct=${url/https:/http:}.*>#& #RETRY# ${filename} (dropped or bad size)>g" "${InputFile}" #direct url https +./mad.sh:913: sed -i -e "s>^${url/https:/http:}.*>#&>g" "${InputFile}" #http (if changed) +./mad.sh:915: sed -i -e "s>^direct=${url/https:/http:}.*>#&>g" "${InputFile}" #direct url https +./mad.sh:931: sed -i -e "s>^${url/https:/http:}.*>#& #RETRY# (No CDN found)>g" "${InputFile}" #http (if changed) +./mad.sh:933: sed -i -e "s>^direct=${url/https:/http:}.*>#& #RETRY# (No CDN found)>g" "${InputFile}" #direct url https +./mad.sh:936: sed -i -e "s>^${url/https:/http:}.*>#&>g" "${InputFile}" #http (if changed) +./mad.sh:938: sed -i -e "s>^direct=${url/https:/http:}.*>#&>g" "${InputFile}" #direct url https +./mad.sh:957: sed -i -e "s>^${url/https:/http:}.*>#& #PASSWORD#>g" "${InputFile}" #http (if changed) +./mad.sh:959: sed -i -e "s>^direct=${url/https:/http:}.*>#& #PASSWORD#>g" "${InputFile}" #direct url https +./mad.sh:962: sed -i -e "s>^${url/https:/http:}.*>#&>g" "${InputFile}" #http (if changed) +./mad.sh:964: sed -i -e "s>^direct=${url/https:/http:}.*>#&>g" "${InputFile}" #direct url https +./mad.sh:984: sed -i -e "s>^${url/https:/http:}.*>#& #BAD-URL#${message}>g" "${InputFile}" #http (if changed) +./mad.sh:986: sed -i -e "s>^direct=${url/https:/http:}.*>#& #BAD-URL#${message}>g" "${InputFile}" #direct url https +./mad.sh:989: sed -i -e "s>^${url/https:/http:}.*>#&>g" "${InputFile}" #http (if changed) +./mad.sh:991: sed -i -e "s>^direct=${url/https:/http:}.*>#&>g" "${InputFile}" #direct url https +./mad.sh:1009: sed -i -e "s>^${url/https:/http:}.*>#& #RETRY# ${filename} (No filename or size. Try again later)>g" "${InputFile}" #http (if changed) +./mad.sh:1011: sed -i -e "s>^direct=${url/https:/http:}.*>#& #RETRY# ${filename} (No filename or size. Try again later)>g" "${InputFile}" #direct url https +./mad.sh:1014: sed -i -e "s>^${url/https:/http:}.*>#&>g" "${InputFile}" #http (if changed) +./mad.sh:1016: sed -i -e "s>^direct=${url/https:/http:}.*>#&>g" "${InputFile}" #direct url https +./mad.sh:1035: sed -i -e "s>^${url/https:/http:}.*>#& #RETRY# ${filename} (File exists and is too large--retry or delete from downloads)>g" "${InputFile}" #http (if changed) +./mad.sh:1037: sed -i -e "s>^direct=${url/https:/http:}.*>#& #RETRY# ${filename} (File exists and is too large--retry or delete from downloads)>g" "${InputFile}" #direct url https +./mad.sh:1040: sed -i -e "s>^${url/https:/http:}.*>#&>g" "${InputFile}" #http (if changed) +./mad.sh:1042: sed -i -e "s>^direct=${url/https:/http:}.*>#&>g" "${InputFile}" #direct url https +./mad.sh:1465: response=$(tor_curl_request --insecure -L -s https://github.com/lwthiker/curl-impersonate/releases/latest) +./mad.sh:1482: download_url='https://github.com/lwthiker/curl-impersonate/releases/download/'"$latestTag"'/curl-impersonate-'"$latestTag"'.x86_64-linux-gnu.tar.gz' +./mad.sh:1588: response=$(tor_curl_request --insecure -L -s https://github.com/lexiforest/curl-impersonate/releases/latest) +./mad.sh:1605: download_url='https://github.com/lexiforest/curl-impersonate/releases/download/'"$latestTag"'/curl-impersonate-'"$latestTag"'.x86_64-linux-gnu.tar.gz' +./mad.sh:1868: echo -e "${PINK}MAD Audit of http lines:${NC} (${GREEN}grep \"http:\" or \"https:\"${NC})" +./mad.sh:1896: echo -e "${PINK}MAD Audit of http lines:${NC} (${GREEN}grep \"http:\" or \"https:\"${NC})" +./mad.sh:1918: echo -e "${PINK}MAD Audit of http lines:${NC} (${GREEN}grep \"http:\" or \"https:\"${NC})" +./mad.sh:3234: if grep -Eqi '.onion' <<< "$download_url" && grep -Eqi 'https://' <<< "$download_url" ; then +./mad.sh:3765:arg2="$2" # auto, filelist, +./mad.sh:3862: echo -e " - http://oshi.at/abcd/origAABB.rar|My specified file.part1.rar" +./mad.sh:3864: echo -e " - direct=http://pomf2.lain.la/f/abcd00zz.7z" +./mad.sh:3866: echo -e ' - ie. direct=http://somehost.onion/abcD|filename.part1.rar' +./mad.sh:4085: if [[ ${remote_url} =~ ^http: ]] ; then +./mad.sh:4086: remote_url=${remote_url/http:/https:} +./mad.sh:4107: if [[ ${remote_url} =~ ^http: ]] ; then +./mad.sh:4108: remote_url=${remote_url/http:/https:} +./mad.sh:4474: if [[ ${remote_url} =~ ^http: ]] ; then +./mad.sh:4475: remote_url=${remote_url/http:/https:} +./mad.sh:4533: if [[ ${remote_url} =~ ^http: ]] ; then +./mad.sh:4534: remote_url=${remote_url/http:/https:} +./mad.sh:4560: if [[ ${remote_url} =~ ^http: ]] ; then +./mad.sh:4561: remote_url=${remote_url/http:/https:} ./plugins/pjscloud.sh:51: "https://PhantomJScloud.com/api/browser/v2/$RandomPjsKey/" & sleep 8s; kill -HUP $! 2>/dev/null) ./plugins/pjscloud.sh:59: "https://PhantomJScloud.com/api/browser/v2/$RandomPjsKey/" & sleep 8s; kill -HUP $! 2>/dev/null) diff --git a/.audit/mad-audit-tor_curl-details.log b/.audit/mad-audit-tor_curl-details.log index d98d7e8..145f4ba 100755 --- a/.audit/mad-audit-tor_curl-details.log +++ b/.audit/mad-audit-tor_curl-details.log @@ -1,4 +1,4 @@ -DateTime: 25.02.13 +DateTime: 25.02.19 Files: ./hosts/1fichier.sh @@ -128,6 +128,7 @@ Files: ./hosts/up_quax.sh ./hosts/up_ramsgaard.sh ./hosts/up_ranoz.sh +./hosts/up_sendnow.sh ./hosts/up_shareonline.sh ./hosts/up_skrepr.sh ./hosts/up_soyjak.sh @@ -160,20 +161,20 @@ Files: MAD Audit of tor_curl (+10 lines after): (grep "tor_curl") _________________________________________________________________________ ./hosts/1fichier.sh:48: PAGE=$(tor_curl_request --insecure --max-time "$ConnectTimeout" -s "${remote_url}") -./hosts/1fichier.sh:49: if [ "${DebugAllEnabled}" == "true" ] ; then +./hosts/1fichier.sh:49: if [[ "${DebugAllEnabled}" == "true" ]] ; then ./hosts/1fichier.sh:50: debugHtml "${remote_url##*/?}" "prechk$y" "${PAGE}" ./hosts/1fichier.sh:51: fi ./hosts/1fichier.sh:52: file_information=$(grep -oP '(?<=)[^<]*?(?=)' <<< "${PAGE}") ./hosts/1fichier.sh:53: size=$(echo "${file_information}" | tail -n 1) ./hosts/1fichier.sh:54: filename=$(echo "${file_information}" | head -n 1) -./hosts/1fichier.sh:55: if [ ! "$filename_override" == "" ] ; then +./hosts/1fichier.sh:55: if [[ ! "$filename_override" == "" ]] ; then ./hosts/1fichier.sh:56: filename="$filename_override" ./hosts/1fichier.sh:57: fi ./hosts/1fichier.sh:58: filename=$(sanitize_file_or_folder_name "${filename}") -- ./hosts/1fichier.sh:160: cdn_request=$(tor_curl_request --insecure -s -L -b "${fich_cookie_jar}" -c "${fich_cookie_jar}" -F "submit=Download" -F "pass=${fich_user_provided_password}" -F "adz=${fich_adz_parameter}" "${remote_url}") ./hosts/1fichier.sh:161: target_file_link=$(echo "$cdn_request" | grep -A 2 '
' | grep -oP '/dev/null ./hosts/anonsharing.sh:165: ) -./hosts/anonsharing.sh:166: if [ ! -f "${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}" ]; then +./hosts/anonsharing.sh:166: if [[ ! -f "${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}" ]]; then ./hosts/anonsharing.sh:167: touch "${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}" ./hosts/anonsharing.sh:168: fi -- @@ -425,7 +426,7 @@ _________________________________________________________________________ ./hosts/anonsharing.sh:275: tor_curl_request --insecure "$download_url" --output "$file_path" ./hosts/anonsharing.sh:276: fi ./hosts/anonsharing.sh:277: received_file_size=0 -./hosts/anonsharing.sh:278: if [ -f "$file_path" ] ; then +./hosts/anonsharing.sh:278: if [[ -f "$file_path" ]] ; then ./hosts/anonsharing.sh:279: received_file_size=$(stat --format="%s" "$file_path" | tr -d '[:space:]') ./hosts/anonsharing.sh:280: fi ./hosts/anonsharing.sh:281: if CheckNoHtml "$remote_url" "$filename" "$file_path" ; then @@ -435,15 +436,15 @@ _________________________________________________________________________ ./hosts/anonsharing.sh:285: fi -- ./hosts/ateasystems.sh:88: response=$(tor_curl_request --insecure -L -s "$remote_url") -./hosts/ateasystems.sh:89: if [ "${DebugAllEnabled}" == "true" ] ; then +./hosts/ateasystems.sh:89: if [[ "${DebugAllEnabled}" == "true" ]] ; then ./hosts/ateasystems.sh:90: debugHtml "${remote_url##*/}" "atea_fetch$i" "${response}" ./hosts/ateasystems.sh:91: fi ./hosts/ateasystems.sh:92: if [[ -z $response ]] ; then -./hosts/ateasystems.sh:93: if [ $i == $maxfetchretries ] ; then +./hosts/ateasystems.sh:93: if [[ $i == $maxfetchretries ]] ; then ./hosts/ateasystems.sh:94: printf "\\n" ./hosts/ateasystems.sh:95: echo -e "${RED}| Failed to extract download link [1]${NC}" ./hosts/ateasystems.sh:96: warnAndRetryUnknownError=true -./hosts/ateasystems.sh:97: if [ "${finalAttempt}" == "true" ] ; then +./hosts/ateasystems.sh:97: if [[ "${finalAttempt}" == "true" ]] ; then ./hosts/ateasystems.sh:98: failedRetryDownload "${remote_url}" "Failed to extract download link [1]" "" -- ./hosts/ateasystems.sh:220: tor_curl_request --insecure \ @@ -456,7 +457,7 @@ _________________________________________________________________________ ./hosts/ateasystems.sh:227: --output "$file_path" --output "$file_path" ./hosts/ateasystems.sh:228: fi ./hosts/ateasystems.sh:229: else -./hosts/ateasystems.sh:230: if [ "${RateMonitorEnabled}" == "true" ]; then +./hosts/ateasystems.sh:230: if [[ "${RateMonitorEnabled}" == "true" ]]; then ./hosts/ateasystems.sh:231: tor_curl_request --insecure \ ./hosts/ateasystems.sh:232: --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval \ ./hosts/ateasystems.sh:233: -H "User-Agent: $RandomUA" \ @@ -478,23 +479,23 @@ _________________________________________________________________________ ./hosts/bedrive.sh:90: response=$(tor_curl_request --insecure -L -s \ ./hosts/bedrive.sh:91: -b "${bd_cookie_jar}" -c "${bd_cookie_jar}" \ ./hosts/bedrive.sh:92: "$remote_url") -./hosts/bedrive.sh:93: if [ "${DebugAllEnabled}" == "true" ] ; then +./hosts/bedrive.sh:93: if [[ "${DebugAllEnabled}" == "true" ]] ; then ./hosts/bedrive.sh:94: debugHtml "${remote_url##*/}" "bd_fetch$i" "${response}" ./hosts/bedrive.sh:95: fi ./hosts/bedrive.sh:96: if [[ -z $response ]] ; then ./hosts/bedrive.sh:97: rm -f "${bd_cookie_jar}"; -./hosts/bedrive.sh:98: if [ $i == $maxfetchretries ] ; then +./hosts/bedrive.sh:98: if [[ $i == $maxfetchretries ]] ; then ./hosts/bedrive.sh:99: printf "\\n" ./hosts/bedrive.sh:100: echo -e "${RED}| Failed to extract download link.${NC}" -- ./hosts/bedrive.sh:149: file_header=$(tor_curl_request --insecure --head -L -i -s \ ./hosts/bedrive.sh:150: -b "${bd_cookie_jar}" -c "${bd_cookie_jar}" \ ./hosts/bedrive.sh:151: "$download_url") -./hosts/bedrive.sh:152: if [ "${DebugAllEnabled}" == "true" ] ; then +./hosts/bedrive.sh:152: if [[ "${DebugAllEnabled}" == "true" ]] ; then ./hosts/bedrive.sh:153: debugHtml "${remote_url##*/}" "bd_head$j" "download_url: ${download_url}"$'\n'"${file_header}" ./hosts/bedrive.sh:154: fi ./hosts/bedrive.sh:155: if [[ -z $file_header ]] ; then -./hosts/bedrive.sh:156: if [ $j == $maxfetchretries ] ; then +./hosts/bedrive.sh:156: if [[ $j == $maxfetchretries ]] ; then ./hosts/bedrive.sh:157: rm -f "${bd_cookie_jar}"; ./hosts/bedrive.sh:158: printf "\\n" ./hosts/bedrive.sh:159: echo -e "${RED}| Failed to extract file info.${NC}" @@ -510,7 +511,7 @@ _________________________________________________________________________ ./hosts/bedrive.sh:280: --continue-at - --output "$file_path" ./hosts/bedrive.sh:281: fi ./hosts/bedrive.sh:282: else -./hosts/bedrive.sh:283: if [ "${RateMonitorEnabled}" == "true" ]; then +./hosts/bedrive.sh:283: if [[ "${RateMonitorEnabled}" == "true" ]]; then ./hosts/bedrive.sh:284: tor_curl_request --insecure -L -G --no-alpn \ ./hosts/bedrive.sh:285: -b "${bd_cookie_jar}" -c "${bd_cookie_jar}" --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval \ ./hosts/bedrive.sh:286: -H "User-Agent: $RandomUA" \ @@ -536,34 +537,34 @@ _________________________________________________________________________ ./hosts/bedrive.sh:310: -H "Sec-Fetch-Mode: navigate" \ -- ./hosts/biteblob.sh:96: response=$(tor_curl_request --insecure -L -s "${fixed_url}") -./hosts/biteblob.sh:97: if [ "${DebugAllEnabled}" == "true" ] ; then +./hosts/biteblob.sh:97: if [[ "${DebugAllEnabled}" == "true" ]] ; then ./hosts/biteblob.sh:98: debugHtml "${remote_url##*/}" "bite_dwnpage$j" "url: $fixed_url"$'\n'"${response}" ./hosts/biteblob.sh:99: fi ./hosts/biteblob.sh:100: if [[ -z $response ]] ; then -./hosts/biteblob.sh:101: if [ $j == $maxfetchretries ] ; then +./hosts/biteblob.sh:101: if [[ $j == $maxfetchretries ]] ; then ./hosts/biteblob.sh:102: printf "\\n" ./hosts/biteblob.sh:103: echo -e "${RED}| Failed to extract download link${NC}" ./hosts/biteblob.sh:104: warnAndRetryUnknownError=true -./hosts/biteblob.sh:105: if [ "${finalAttempt}" == "true" ] ; then +./hosts/biteblob.sh:105: if [[ "${finalAttempt}" == "true" ]] ; then ./hosts/biteblob.sh:106: failedRetryDownload "${remote_url}" "" "" -- ./hosts/biteblob.sh:144: file_header=$(tor_curl_request --insecure --head -L -s "$download_url") -./hosts/biteblob.sh:145: if [ "${DebugAllEnabled}" == "true" ] ; then +./hosts/biteblob.sh:145: if [[ "${DebugAllEnabled}" == "true" ]] ; then ./hosts/biteblob.sh:146: debugHtml "${remote_url##*/}" "bite_head$j" "download_url: ${download_url}"$'\n'"${file_header}" ./hosts/biteblob.sh:147: fi ./hosts/biteblob.sh:148: if [[ -z $file_header ]] ; then -./hosts/biteblob.sh:149: if [ $j == $maxfetchretries ] ; then +./hosts/biteblob.sh:149: if [[ $j == $maxfetchretries ]] ; then ./hosts/biteblob.sh:150: printf "\\n" ./hosts/biteblob.sh:151: echo -e "${RED}| Failed to extract file info.${NC}" ./hosts/biteblob.sh:152: warnAndRetryUnknownError=true -./hosts/biteblob.sh:153: if [ "${finalAttempt}" == "true" ] ; then +./hosts/biteblob.sh:153: if [[ "${finalAttempt}" == "true" ]] ; then ./hosts/biteblob.sh:154: failedRetryDownload "${remote_url}" "" "" -- ./hosts/biteblob.sh:227: tor_curl_request --insecure --referer "$file_url" "$download_url" --output "$file_path" ./hosts/biteblob.sh:228: rc=$? -./hosts/biteblob.sh:229: if [ $rc -ne 0 ] ; then +./hosts/biteblob.sh:229: if ((rc != 0 )) ; then ./hosts/biteblob.sh:230: printf "${RED}Download Failed (bad exit status).${NC}" -./hosts/biteblob.sh:231: if [ -f ${file_path} ]; then +./hosts/biteblob.sh:231: if [[ -f ${file_path} ]]; then ./hosts/biteblob.sh:232: printf "${YELLOW} Partial removed...${NC}" ./hosts/biteblob.sh:233: printf "\n\n" ./hosts/biteblob.sh:234: rm -f "${file_path}" @@ -576,7 +577,7 @@ _________________________________________________________________________ ./hosts/biteblob.sh:273: tor_curl_request --insecure --referer "$file_url" "$download_url" --continue-at - --output "$file_path" ./hosts/biteblob.sh:274: fi ./hosts/biteblob.sh:275: received_file_size=0 -./hosts/biteblob.sh:276: if [ -f "$file_path" ] ; then +./hosts/biteblob.sh:276: if [[ -f "$file_path" ]] ; then ./hosts/biteblob.sh:277: received_file_size=$(stat --format="%s" "$file_path" | tr -d '[:space:]') ./hosts/biteblob.sh:278: fi ./hosts/biteblob.sh:279: if CheckNoHtml "$remote_url" "$filename" "$file_path" ; then @@ -588,12 +589,12 @@ _________________________________________________________________________ ./hosts/bowfile.sh:91: response=$(tor_curl_request --insecure -L -s -b "${bow_cookie_jar}" -c "${bow_cookie_jar}" \ ./hosts/bowfile.sh:92: -w "\n\nHTTP_CODE: %{http_code}\nEFFECTIVE_URL: %{url_effective}\n" \ ./hosts/bowfile.sh:93: "$fixed_url") -./hosts/bowfile.sh:94: if [ "${DebugAllEnabled}" == "true" ] ; then +./hosts/bowfile.sh:94: if [[ "${DebugAllEnabled}" == "true" ]] ; then ./hosts/bowfile.sh:95: debugHtml "${remote_url##*/}" "bow_fetch$i" "${response}" ./hosts/bowfile.sh:96: fi ./hosts/bowfile.sh:97: if [[ -z $response ]] ; then ./hosts/bowfile.sh:98: rm -f "${bow_cookie_jar}"; -./hosts/bowfile.sh:99: if [ $i == $maxfetchretries ] ; then +./hosts/bowfile.sh:99: if [[ $i == $maxfetchretries ]] ; then ./hosts/bowfile.sh:100: printf "\\n" ./hosts/bowfile.sh:101: echo -e "${RED}| Failed to extract token link [1].${NC}" -- @@ -602,7 +603,7 @@ _________________________________________________________________________ ./hosts/bowfile.sh:145: -H "Host: bowfile.com" \ ./hosts/bowfile.sh:146: -w "\n\nHTTP_CODE: %{http_code}\nEFFECTIVE_URL: %{url_effective}\n" \ ./hosts/bowfile.sh:147: "$download_url") -./hosts/bowfile.sh:148: if [ "${DebugAllEnabled}" == "true" ] ; then +./hosts/bowfile.sh:148: if [[ "${DebugAllEnabled}" == "true" ]] ; then ./hosts/bowfile.sh:149: debugHtml "${remote_url##*/}" "bow_downurl" "download_url: ${download_url}"$'\n'"${response}" ./hosts/bowfile.sh:150: fi ./hosts/bowfile.sh:151: if [[ -z $response ]] ; then @@ -613,11 +614,11 @@ _________________________________________________________________________ ./hosts/bowfile.sh:183: -H "Host: $fshost" \ ./hosts/bowfile.sh:184: -w "\n\nHTTP_CODE: %{http_code}\nEFFECTIVE_URL: %{url_effective}\n" \ ./hosts/bowfile.sh:185: "$download_url") -./hosts/bowfile.sh:186: if [ "${DebugAllEnabled}" == "true" ] ; then +./hosts/bowfile.sh:186: if [[ "${DebugAllEnabled}" == "true" ]] ; then ./hosts/bowfile.sh:187: debugHtml "${remote_url##*/}" "bow_head$j" "download_url: ${download_url}"$'\n'"download_token: ${dltoken}"$'\n'"${file_header}" ./hosts/bowfile.sh:188: fi ./hosts/bowfile.sh:189: if [[ -z $file_header ]] ; then -./hosts/bowfile.sh:190: if [ $j == $maxfetchretries ] ; then +./hosts/bowfile.sh:190: if [[ $j == $maxfetchretries ]] ; then ./hosts/bowfile.sh:191: printf "\\n" ./hosts/bowfile.sh:192: echo -e "${RED}| Failed to extract file info${NC}" -- @@ -633,7 +634,7 @@ _________________________________________________________________________ ./hosts/bowfile.sh:308: "$download_url" --continue-at - --output "$file_path" ./hosts/bowfile.sh:309: fi ./hosts/bowfile.sh:310: else -./hosts/bowfile.sh:311: if [ "${RateMonitorEnabled}" == "true" ]; then +./hosts/bowfile.sh:311: if [[ "${RateMonitorEnabled}" == "true" ]]; then ./hosts/bowfile.sh:312: tor_curl_request --insecure -L \ ./hosts/bowfile.sh:313: --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval \ ./hosts/bowfile.sh:314: -H "Host: $fshost" \ @@ -661,23 +662,23 @@ _________________________________________________________________________ ./hosts/click.sh:143: response=$(tor_curl_request --insecure -L -s -b "${click_cookie_jar}" -c "${click_cookie_jar}" \ ./hosts/click.sh:144: -w "\n\nHTTP_CODE: %{http_code}\nEFFECTIVE_URL: %{url_effective}\n" \ ./hosts/click.sh:145: "$fixed_url") -./hosts/click.sh:146: if [ "${DebugAllEnabled}" == "true" ] ; then +./hosts/click.sh:146: if [[ "${DebugAllEnabled}" == "true" ]] ; then ./hosts/click.sh:147: debugHtml "${remote_url##*/}" "click_fetch$i" "${response}" ./hosts/click.sh:148: fi ./hosts/click.sh:149: if [[ -z $response ]] ; then ./hosts/click.sh:150: rm -f "${click_cookie_jar}"; -./hosts/click.sh:151: if [ $i == $maxfetchretries ] ; then +./hosts/click.sh:151: if [[ $i == $maxfetchretries ]] ; then ./hosts/click.sh:152: printf "\\n" ./hosts/click.sh:153: echo -e "${RED}| Failed to extract download link [1].${NC}" -- ./hosts/click.sh:226: response=$(tor_curl_request --insecure -L -s -X POST \ ./hosts/click.sh:227: -b "${click_cookie_jar}" -c "${click_cookie_jar}" \ ./hosts/click.sh:228: --data "$form_data" "$fixed_url") -./hosts/click.sh:229: if [ "${DebugAllEnabled}" == "true" ] ; then +./hosts/click.sh:229: if [[ "${DebugAllEnabled}" == "true" ]] ; then ./hosts/click.sh:230: debugHtml "${remote_url##*/}" "click_post1_$i" "url: ${fixed_url}"$'\n'"form_data: ${form_data}"$'\n'"${response}" ./hosts/click.sh:231: fi ./hosts/click.sh:232: if [[ -z $response ]] ; then -./hosts/click.sh:233: if [ $i == $maxfetchretries ] ; then +./hosts/click.sh:233: if [[ $i == $maxfetchretries ]] ; then ./hosts/click.sh:234: rm -f "${click_cookie_jar}"; ./hosts/click.sh:235: printf "\\n" ./hosts/click.sh:236: echo -e "${RED}| Failed to extract download link [2].${NC}" @@ -685,26 +686,26 @@ _________________________________________________________________________ ./hosts/click.sh:345: response=$(tor_curl_request --insecure -L -s -X POST \ ./hosts/click.sh:346: -b "${click_cookie_jar}" -c "${click_cookie_jar}" \ ./hosts/click.sh:347: --data "$form_data" "$fixed_url") -./hosts/click.sh:348: if [ "${DebugAllEnabled}" == "true" ] ; then +./hosts/click.sh:348: if [[ "${DebugAllEnabled}" == "true" ]] ; then ./hosts/click.sh:349: debugHtml "${remote_url##*/}" "click_post2_$i" "url: ${fixed_url}"$'\n'"form_data: ${form_data}"$'\n'"${response}" ./hosts/click.sh:350: fi ./hosts/click.sh:351: if [[ -z $response ]] ; then -./hosts/click.sh:352: if [ $i == $maxfetchretries ] ; then +./hosts/click.sh:352: if [[ $i == $maxfetchretries ]] ; then ./hosts/click.sh:353: rm -f "${click_cookie_jar}"; ./hosts/click.sh:354: printf "\\n" ./hosts/click.sh:355: echo -e "${RED}| Failed to extract download link [3].${NC}" -- ./hosts/click.sh:434: file_header=$(tor_curl_request --insecure -L --head -s --referer "${fixed_url}" "$download_url") -./hosts/click.sh:435: if [ "${DebugAllEnabled}" == "true" ] ; then +./hosts/click.sh:435: if [[ "${DebugAllEnabled}" == "true" ]] ; then ./hosts/click.sh:436: debugHtml "${remote_url##*/}" "click_head$j" "download_url: ${download_url}"$'\n'"${file_header}" ./hosts/click.sh:437: fi ./hosts/click.sh:438: if [[ -z $file_header ]] ; then -./hosts/click.sh:439: if [ $j == $maxfetchretries ] ; then +./hosts/click.sh:439: if [[ $j == $maxfetchretries ]] ; then ./hosts/click.sh:440: rm -f "${click_cookie_jar}"; ./hosts/click.sh:441: printf "\\n" ./hosts/click.sh:442: echo -e "${RED}| Failed to extract file info${NC}" ./hosts/click.sh:443: warnAndRetryUnknownError=true -./hosts/click.sh:444: if [ "${finalAttempt}" == "true" ] ; then +./hosts/click.sh:444: if [[ "${finalAttempt}" == "true" ]] ; then -- ./hosts/click.sh:535: tor_curl_request --insecure \ ./hosts/click.sh:536: --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval \ @@ -720,7 +721,7 @@ _________________________________________________________________________ ./hosts/click.sh:546: "$download_url" --continue-at - --output "$file_path" ./hosts/click.sh:547: fi ./hosts/click.sh:548: else -./hosts/click.sh:549: if [ "${RateMonitorEnabled}" == "true" ]; then +./hosts/click.sh:549: if [[ "${RateMonitorEnabled}" == "true" ]]; then ./hosts/click.sh:550: tor_curl_request --insecure \ ./hosts/click.sh:551: --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval \ ./hosts/click.sh:552: -b "${click_cookie_jar}" -c "${click_cookie_jar}" \ @@ -748,18 +749,18 @@ _________________________________________________________________________ ./hosts/dailyuploads.sh:97: response=$(tor_curl_request --insecure -L -s -b "${daily_cookie_jar}" -c "${daily_cookie_jar}" \ ./hosts/dailyuploads.sh:98: -w "\n\nHTTP_CODE: %{http_code}\nEFFECTIVE_URL: %{url_effective}\n" \ ./hosts/dailyuploads.sh:99: "$fixed_url") -./hosts/dailyuploads.sh:100: if [ "${DebugAllEnabled}" == "true" ] ; then +./hosts/dailyuploads.sh:100: if [[ "${DebugAllEnabled}" == "true" ]] ; then ./hosts/dailyuploads.sh:101: debugHtml "${remote_url##*/}" "daily_fetch$i" "${response}" ./hosts/dailyuploads.sh:102: fi ./hosts/dailyuploads.sh:103: if [[ -z $response ]] ; then ./hosts/dailyuploads.sh:104: rm -f "${daily_cookie_jar}"; -./hosts/dailyuploads.sh:105: if [ $i == $maxfetchretries ] ; then +./hosts/dailyuploads.sh:105: if [[ $i == $maxfetchretries ]] ; then ./hosts/dailyuploads.sh:106: printf "\\n" ./hosts/dailyuploads.sh:107: echo -e "${RED}| Failed to extract download link [1].${NC}" -- ./hosts/dailyuploads.sh:139: tor_curl_request --insecure -s "$captcha_img_url" --output "$tmp_captcha_img" ./hosts/dailyuploads.sh:140: captcha_ocr_output=$(CaptchaOcrImageTesseract "$tmp_captcha_img" "NUMBERONLY" "ContrastStretch_5x90,Brightness_130") -./hosts/dailyuploads.sh:141: if [ "${DebugPluginsEnabled}" == "true" ]; then +./hosts/dailyuploads.sh:141: if [[ "${DebugPluginsEnabled}" == "true" ]]; then ./hosts/dailyuploads.sh:142: printf "\\n" ./hosts/dailyuploads.sh:143: echo -e "$captcha_ocr_output" ./hosts/dailyuploads.sh:144: fi @@ -767,17 +768,17 @@ _________________________________________________________________________ ./hosts/dailyuploads.sh:146: rm -f "$tmp_captcha_img" ./hosts/dailyuploads.sh:147: rm -f "$captcha_ocr_output" ./hosts/dailyuploads.sh:148: local caplength=${#captcha_code} -./hosts/dailyuploads.sh:149: if [ -z "$captcha_code" ] || ((caplength != 4)) ; then +./hosts/dailyuploads.sh:149: if [[ -z "$captcha_code" ]] || ((caplength != 4)) ; then -- ./hosts/dailyuploads.sh:286: response=$(tor_curl_request --insecure -L -s -X POST \ ./hosts/dailyuploads.sh:287: -b "${daily_cookie_jar}" -c "${daily_cookie_jar}" \ ./hosts/dailyuploads.sh:288: --data "$form_data" "$fixed_url") -./hosts/dailyuploads.sh:289: if [ "${DebugAllEnabled}" == "true" ] ; then +./hosts/dailyuploads.sh:289: if [[ "${DebugAllEnabled}" == "true" ]] ; then ./hosts/dailyuploads.sh:290: debugHtml "${remote_url##*/}" "daily_post2_$i" "url: ${fixed_url}"$'\n'"form_data: ${form_data}"$'\n'"${response}" ./hosts/dailyuploads.sh:291: fi ./hosts/dailyuploads.sh:292: fi ./hosts/dailyuploads.sh:293: if [[ -z $response ]] ; then -./hosts/dailyuploads.sh:294: if [ $i == $maxfetchretries ] ; then +./hosts/dailyuploads.sh:294: if [[ $i == $maxfetchretries ]] ; then ./hosts/dailyuploads.sh:295: rm -f "${daily_cookie_jar}"; ./hosts/dailyuploads.sh:296: printf "\\n" -- @@ -785,11 +786,11 @@ _________________________________________________________________________ ./hosts/dailyuploads.sh:393: -b "${daily_cookie_jar}" -c "${daily_cookie_jar}" \ ./hosts/dailyuploads.sh:394: --referer "${fixed_url}" \ ./hosts/dailyuploads.sh:395: "$download_url") -./hosts/dailyuploads.sh:396: if [ "${DebugAllEnabled}" == "true" ] ; then +./hosts/dailyuploads.sh:396: if [[ "${DebugAllEnabled}" == "true" ]] ; then ./hosts/dailyuploads.sh:397: debugHtml "${remote_url##*/}" "daily_head$j" "download_url: ${download_url}"$'\n'"${file_header}" ./hosts/dailyuploads.sh:398: fi ./hosts/dailyuploads.sh:399: if [[ -z $file_header ]] ; then -./hosts/dailyuploads.sh:400: if [ $j == $maxfetchretries ] ; then +./hosts/dailyuploads.sh:400: if [[ $j == $maxfetchretries ]] ; then ./hosts/dailyuploads.sh:401: rm -f "${daily_cookie_jar}"; ./hosts/dailyuploads.sh:402: printf "\\n" -- @@ -805,7 +806,7 @@ _________________________________________________________________________ ./hosts/dailyuploads.sh:507: "$download_url" --continue-at - --output "$file_path" ./hosts/dailyuploads.sh:508: fi ./hosts/dailyuploads.sh:509: else -./hosts/dailyuploads.sh:510: if [ "${RateMonitorEnabled}" == "true" ]; then +./hosts/dailyuploads.sh:510: if [[ "${RateMonitorEnabled}" == "true" ]]; then ./hosts/dailyuploads.sh:511: tor_curl_request --insecure \ ./hosts/dailyuploads.sh:512: --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval \ ./hosts/dailyuploads.sh:513: -H "User-Agent: $RandomUA" \ @@ -833,23 +834,23 @@ _________________________________________________________________________ ./hosts/dashfile.sh:90: response=$(tor_curl_request --insecure -L -s -b "${dash_cookie_jar}" -c "${dash_cookie_jar}" \ ./hosts/dashfile.sh:91: -w "\n\nHTTP_CODE: %{http_code}\nEFFECTIVE_URL: %{url_effective}\n" \ ./hosts/dashfile.sh:92: "$remote_url") -./hosts/dashfile.sh:93: if [ "${DebugAllEnabled}" == "true" ] ; then +./hosts/dashfile.sh:93: if [[ "${DebugAllEnabled}" == "true" ]] ; then ./hosts/dashfile.sh:94: debugHtml "${remote_url##*/}" "dash_fetch$i" "${response}" ./hosts/dashfile.sh:95: fi ./hosts/dashfile.sh:96: if [[ -z $response ]] ; then ./hosts/dashfile.sh:97: rm -f "${dash_cookie_jar}"; -./hosts/dashfile.sh:98: if [ $i == $maxfetchretries ] ; then +./hosts/dashfile.sh:98: if [[ $i == $maxfetchretries ]] ; then ./hosts/dashfile.sh:99: printf "\\n" ./hosts/dashfile.sh:100: echo -e "${RED}| Failed to extract download link [1]${NC}" -- ./hosts/dashfile.sh:177: response=$(tor_curl_request --insecure -L -s -X POST \ ./hosts/dashfile.sh:178: -b "${dash_cookie_jar}" -c "${dash_cookie_jar}" \ ./hosts/dashfile.sh:179: --data "$form_data" "$remote_url") -./hosts/dashfile.sh:180: if [ "${DebugAllEnabled}" == "true" ] ; then +./hosts/dashfile.sh:180: if [[ "${DebugAllEnabled}" == "true" ]] ; then ./hosts/dashfile.sh:181: debugHtml "${remote_url##*/}" "dash_post1_$i" "url: ${remote_url}"$'\n'"form_data: ${form_data}"$'\n'"${response}" ./hosts/dashfile.sh:182: fi ./hosts/dashfile.sh:183: if [[ -z $response ]] ; then -./hosts/dashfile.sh:184: if [ $i == $maxfetchretries ] ; then +./hosts/dashfile.sh:184: if [[ $i == $maxfetchretries ]] ; then ./hosts/dashfile.sh:185: rm -f "${dash_cookie_jar}"; ./hosts/dashfile.sh:186: printf "\\n" ./hosts/dashfile.sh:187: echo -e "${RED}| Failed to extract download link [4]${NC}" @@ -857,26 +858,26 @@ _________________________________________________________________________ ./hosts/dashfile.sh:308: response=$(tor_curl_request --insecure -L -s -X POST \ ./hosts/dashfile.sh:309: -b "${dash_cookie_jar}" -c "${dash_cookie_jar}" \ ./hosts/dashfile.sh:310: --data "$form_data" "$remote_url") -./hosts/dashfile.sh:311: if [ "${DebugAllEnabled}" == "true" ] ; then +./hosts/dashfile.sh:311: if [[ "${DebugAllEnabled}" == "true" ]] ; then ./hosts/dashfile.sh:312: debugHtml "${remote_url##*/}" "dash_post2_$i" "url: ${remote_url}"$'\n'"form_data: ${form_data}"$'\n'"${response}" ./hosts/dashfile.sh:313: fi ./hosts/dashfile.sh:314: if [[ -z $response ]] ; then -./hosts/dashfile.sh:315: if [ $i == $maxfetchretries ] ; then +./hosts/dashfile.sh:315: if [[ $i == $maxfetchretries ]] ; then ./hosts/dashfile.sh:316: rm -f "${dash_cookie_jar}"; ./hosts/dashfile.sh:317: printf "\\n" ./hosts/dashfile.sh:318: echo -e "${RED}| Failed to extract download link [7]${NC}" -- ./hosts/dashfile.sh:397: file_header=$(tor_curl_request --insecure -L --head -s --referer "${fixed_url}" "$download_url") -./hosts/dashfile.sh:398: if [ "${DebugAllEnabled}" == "true" ] ; then +./hosts/dashfile.sh:398: if [[ "${DebugAllEnabled}" == "true" ]] ; then ./hosts/dashfile.sh:399: debugHtml "${remote_url##*/}" "dash_head$j" "download_url: ${download_url}"$'\n'"${file_header}" ./hosts/dashfile.sh:400: fi ./hosts/dashfile.sh:401: if [[ -z $file_header ]] ; then -./hosts/dashfile.sh:402: if [ $j == $maxfetchretries ] ; then +./hosts/dashfile.sh:402: if [[ $j == $maxfetchretries ]] ; then ./hosts/dashfile.sh:403: rm -f "${dash_cookie_jar}"; ./hosts/dashfile.sh:404: printf "\\n" ./hosts/dashfile.sh:405: echo -e "${RED}| Failed to extract file info${NC}" ./hosts/dashfile.sh:406: warnAndRetryUnknownError=true -./hosts/dashfile.sh:407: if [ "${finalAttempt}" == "true" ] ; then +./hosts/dashfile.sh:407: if [[ "${finalAttempt}" == "true" ]] ; then -- ./hosts/dashfile.sh:497: tor_curl_request --insecure \ ./hosts/dashfile.sh:498: --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval \ @@ -888,7 +889,7 @@ _________________________________________________________________________ ./hosts/dashfile.sh:504: "$download_url" --continue-at - --output "$file_path" ./hosts/dashfile.sh:505: fi ./hosts/dashfile.sh:506: else -./hosts/dashfile.sh:507: if [ "${RateMonitorEnabled}" == "true" ]; then +./hosts/dashfile.sh:507: if [[ "${RateMonitorEnabled}" == "true" ]]; then ./hosts/dashfile.sh:508: tor_curl_request --insecure \ ./hosts/dashfile.sh:509: --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval \ ./hosts/dashfile.sh:510: -b "${dash_cookie_jar}" -c "${dash_cookie_jar}" \ @@ -914,40 +915,40 @@ _________________________________________________________________________ ./hosts/dashfile.sh:534: -H "Sec-Fetch-Mode: navigate" \ -- ./hosts/dataupload.sh:90: response=$(tor_curl_request --insecure -L -s -b "${dup_cookie_jar}" -c "${dup_cookie_jar}" "$remote_url") -./hosts/dataupload.sh:91: if [ "${DebugAllEnabled}" == "true" ] ; then +./hosts/dataupload.sh:91: if [[ "${DebugAllEnabled}" == "true" ]] ; then ./hosts/dataupload.sh:92: debugHtml "${remote_url##*/}" "dup_dwnpage$i" "${response}" ./hosts/dataupload.sh:93: fi ./hosts/dataupload.sh:94: if [[ -z $response ]] ; then ./hosts/dataupload.sh:95: rm -f "${dup_cookie_jar}"; -./hosts/dataupload.sh:96: if [ $i == $maxfetchretries ] ; then +./hosts/dataupload.sh:96: if [[ $i == $maxfetchretries ]] ; then ./hosts/dataupload.sh:97: printf "\\n" ./hosts/dataupload.sh:98: echo -e "${RED}| Failed to extract download link.${NC}" ./hosts/dataupload.sh:99: warnAndRetryUnknownError=true -./hosts/dataupload.sh:100: if [ "${finalAttempt}" == "true" ] ; then +./hosts/dataupload.sh:100: if [[ "${finalAttempt}" == "true" ]] ; then -- ./hosts/dataupload.sh:166: response=$(tor_curl_request --insecure -svo. -X POST \ ./hosts/dataupload.sh:167: -b "${dup_cookie_jar}" -c "${dup_cookie_jar}" \ ./hosts/dataupload.sh:168: --data-raw "$form_data" "$post_action" 2>&1) -./hosts/dataupload.sh:169: if [ "${DebugAllEnabled}" == "true" ] ; then +./hosts/dataupload.sh:169: if [[ "${DebugAllEnabled}" == "true" ]] ; then ./hosts/dataupload.sh:170: debugHtml "${remote_url##*/}" "dup_post" "post_action: ${post_action}"$'\n'"form_data: ${form_data}"$'\n'"${response}" ./hosts/dataupload.sh:171: fi ./hosts/dataupload.sh:172: if [[ -z $response ]] ; then -./hosts/dataupload.sh:173: if [ $i == $maxfetchretries ] ; then +./hosts/dataupload.sh:173: if [[ $i == $maxfetchretries ]] ; then ./hosts/dataupload.sh:174: rm -f "${dup_cookie_jar}"; ./hosts/dataupload.sh:175: printf "\\n" ./hosts/dataupload.sh:176: echo -e "${RED}| Failed to extract download link [3].${NC}" -- ./hosts/dataupload.sh:234: file_header=$(tor_curl_request --insecure -L --head -s "$download_url") -./hosts/dataupload.sh:235: if [ "${DebugAllEnabled}" == "true" ] ; then +./hosts/dataupload.sh:235: if [[ "${DebugAllEnabled}" == "true" ]] ; then ./hosts/dataupload.sh:236: debugHtml "${remote_url##*/}" "dup_head$j" "download_url: ${download_url}"$'\n'"${file_header}" ./hosts/dataupload.sh:237: fi ./hosts/dataupload.sh:238: if [[ -z $file_header ]] ; then -./hosts/dataupload.sh:239: if [ $j == $maxfetchretries ] ; then +./hosts/dataupload.sh:239: if [[ $j == $maxfetchretries ]] ; then ./hosts/dataupload.sh:240: rm -f "${dup_cookie_jar}"; ./hosts/dataupload.sh:241: printf "\\n" ./hosts/dataupload.sh:242: echo -e "${RED}| Failed to extract file info [1]${NC}" ./hosts/dataupload.sh:243: warnAndRetryUnknownError=true -./hosts/dataupload.sh:244: if [ "${finalAttempt}" == "true" ] ; then +./hosts/dataupload.sh:244: if [[ "${finalAttempt}" == "true" ]] ; then -- ./hosts/dataupload.sh:351: tor_curl_request --insecure \ ./hosts/dataupload.sh:352: --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval \ @@ -961,7 +962,7 @@ _________________________________________________________________________ ./hosts/dataupload.sh:360: "$download_url" --continue-at - --output "$file_path" ./hosts/dataupload.sh:361: fi ./hosts/dataupload.sh:362: else -./hosts/dataupload.sh:363: if [ "${RateMonitorEnabled}" == "true" ]; then +./hosts/dataupload.sh:363: if [[ "${RateMonitorEnabled}" == "true" ]]; then ./hosts/dataupload.sh:364: tor_curl_request --insecure \ ./hosts/dataupload.sh:365: --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval \ ./hosts/dataupload.sh:366: -b "${dup_cookie_jar}" -c "${dup_cookie_jar}" \ @@ -987,40 +988,40 @@ _________________________________________________________________________ ./hosts/dataupload.sh:391: -H "Sec-Fetch-Dest: document" \ -- ./hosts/desiupload.sh:90: response=$(tor_curl_request --insecure -L -s -b "${desi_cookie_jar}" -c "${desi_cookie_jar}" "$remote_url") -./hosts/desiupload.sh:91: if [ "${DebugAllEnabled}" == "true" ] ; then +./hosts/desiupload.sh:91: if [[ "${DebugAllEnabled}" == "true" ]] ; then ./hosts/desiupload.sh:92: debugHtml "${remote_url##*/}" "desi_fetch$i" "${response}" ./hosts/desiupload.sh:93: fi ./hosts/desiupload.sh:94: if [[ -z $response ]] ; then ./hosts/desiupload.sh:95: rm -f "${desi_cookie_jar}"; -./hosts/desiupload.sh:96: if [ $i == $maxfetchretries ] ; then +./hosts/desiupload.sh:96: if [[ $i == $maxfetchretries ]] ; then ./hosts/desiupload.sh:97: printf "\\n" ./hosts/desiupload.sh:98: echo -e "${RED}| Failed to extract download link [1]${NC}" ./hosts/desiupload.sh:99: warnAndRetryUnknownError=true -./hosts/desiupload.sh:100: if [ "${finalAttempt}" == "true" ] ; then +./hosts/desiupload.sh:100: if [[ "${finalAttempt}" == "true" ]] ; then -- ./hosts/desiupload.sh:202: response=$(tor_curl_request --insecure -L -s -X POST \ ./hosts/desiupload.sh:203: -b "${desi_cookie_jar}" -c "${desi_cookie_jar}" \ ./hosts/desiupload.sh:204: --data "$form_data" "$remote_url") -./hosts/desiupload.sh:205: if [ "${DebugAllEnabled}" == "true" ] ; then +./hosts/desiupload.sh:205: if [[ "${DebugAllEnabled}" == "true" ]] ; then ./hosts/desiupload.sh:206: debugHtml "${remote_url##*/}" "desi_post_$i" "url: ${remote_url}"$'\n'"form_data: ${form_data}"$'\n'"${response}" ./hosts/desiupload.sh:207: fi ./hosts/desiupload.sh:208: if [[ -z $response ]] ; then -./hosts/desiupload.sh:209: if [ $i == $maxfetchretries ] ; then +./hosts/desiupload.sh:209: if [[ $i == $maxfetchretries ]] ; then ./hosts/desiupload.sh:210: rm -f "${desi_cookie_jar}"; ./hosts/desiupload.sh:211: printf "\\n" ./hosts/desiupload.sh:212: echo -e "${RED}| Failed to extract download link [7]${NC}" -- ./hosts/desiupload.sh:306: file_header=$(tor_curl_request --insecure -L --head -s --referer "${fixed_url}" "$download_url") -./hosts/desiupload.sh:307: if [ "${DebugAllEnabled}" == "true" ] ; then +./hosts/desiupload.sh:307: if [[ "${DebugAllEnabled}" == "true" ]] ; then ./hosts/desiupload.sh:308: debugHtml "${remote_url##*/}" "desi_head$j" "download_url: ${download_url}"$'\n'"${file_header}" ./hosts/desiupload.sh:309: fi ./hosts/desiupload.sh:310: if [[ -z $file_header ]] ; then -./hosts/desiupload.sh:311: if [ $j == $maxfetchretries ] ; then +./hosts/desiupload.sh:311: if [[ $j == $maxfetchretries ]] ; then ./hosts/desiupload.sh:312: rm -f "${desi_cookie_jar}"; ./hosts/desiupload.sh:313: printf "\\n" ./hosts/desiupload.sh:314: echo -e "${RED}| Failed to extract file info${NC}" ./hosts/desiupload.sh:315: warnAndRetryUnknownError=true -./hosts/desiupload.sh:316: if [ "${finalAttempt}" == "true" ] ; then +./hosts/desiupload.sh:316: if [[ "${finalAttempt}" == "true" ]] ; then -- ./hosts/desiupload.sh:406: tor_curl_request --insecure \ ./hosts/desiupload.sh:407: --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval \ @@ -1032,7 +1033,7 @@ _________________________________________________________________________ ./hosts/desiupload.sh:413: "$download_url" --continue-at - --output "$file_path" ./hosts/desiupload.sh:414: fi ./hosts/desiupload.sh:415: else -./hosts/desiupload.sh:416: if [ "${RateMonitorEnabled}" == "true" ]; then +./hosts/desiupload.sh:416: if [[ "${RateMonitorEnabled}" == "true" ]]; then ./hosts/desiupload.sh:417: tor_curl_request --insecure \ ./hosts/desiupload.sh:418: --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval \ ./hosts/desiupload.sh:419: -b "${desi_cookie_jar}" -c "${desi_cookie_jar}" \ @@ -1070,7 +1071,7 @@ _________________________________________________________________________ ./hosts/dosya.sh:119: -c "${dosya_cookie_jar}" \ ./hosts/dosya.sh:120: "${remote_url}") ./hosts/dosya.sh:121: fi -./hosts/dosya.sh:122: if [ "${DebugAllEnabled}" == "true" ] ; then +./hosts/dosya.sh:122: if [[ "${DebugAllEnabled}" == "true" ]] ; then ./hosts/dosya.sh:123: debugHtml "${remote_url##*/}" "dos_fetch_$i" "remote_url: ${remote_url}"$'\n'"User-Agent: $RandomUA"$'\n'"${PAGE}" -- ./hosts/dosya.sh:173: file_header=$(tor_curl_request_extended --insecure --head -L -s \ @@ -1116,27 +1117,27 @@ _________________________________________________________________________ ./hosts/dosya.sh:427: -H "DNT: 1" \ -- ./hosts/downloadgg.sh:90: response=$(tor_curl_request --insecure -L -s -b "${dgg_cookie_jar}" -c "${dgg_cookie_jar}" "$remote_url") -./hosts/downloadgg.sh:91: if [ "${DebugAllEnabled}" == "true" ] ; then +./hosts/downloadgg.sh:91: if [[ "${DebugAllEnabled}" == "true" ]] ; then ./hosts/downloadgg.sh:92: debugHtml "${remote_url##*/}" "dgg_dwnpage$i" "${response}" ./hosts/downloadgg.sh:93: fi ./hosts/downloadgg.sh:94: if [[ -z $response ]] ; then ./hosts/downloadgg.sh:95: rm -f "${dgg_cookie_jar}"; -./hosts/downloadgg.sh:96: if [ $i == $maxfetchretries ] ; then +./hosts/downloadgg.sh:96: if [[ $i == $maxfetchretries ]] ; then ./hosts/downloadgg.sh:97: printf "\\n" ./hosts/downloadgg.sh:98: echo -e "${RED}| Failed to extract download link.${NC}" ./hosts/downloadgg.sh:99: warnAndRetryUnknownError=true -./hosts/downloadgg.sh:100: if [ "${finalAttempt}" == "true" ] ; then +./hosts/downloadgg.sh:100: if [[ "${finalAttempt}" == "true" ]] ; then -- ./hosts/downloadgg.sh:169: response=$(tor_curl_request --insecure -svo. -X POST \ ./hosts/downloadgg.sh:170: -b "${dgg_cookie_jar}" -c "${dgg_cookie_jar}" \ ./hosts/downloadgg.sh:171: --data-raw "$form_data" "$post_action" 2>&1) -./hosts/downloadgg.sh:172: if [ "${DebugAllEnabled}" == "true" ] ; then +./hosts/downloadgg.sh:172: if [[ "${DebugAllEnabled}" == "true" ]] ; then ./hosts/downloadgg.sh:173: debugHtml "${remote_url##*/}" "dgg_post" "post_action: ${post_action}"$'\n'"form_data: ${form_data}"$'\n'"${response}" ./hosts/downloadgg.sh:174: fi ./hosts/downloadgg.sh:175: if [[ -z $response ]] ; then ./hosts/downloadgg.sh:176: echo -e "${RED}| Failed to extract download link${NC}" ./hosts/downloadgg.sh:177: warnAndRetryUnknownError=true -./hosts/downloadgg.sh:178: if [ "${finalAttempt}" == "true" ] ; then +./hosts/downloadgg.sh:178: if [[ "${finalAttempt}" == "true" ]] ; then ./hosts/downloadgg.sh:179: rm -f "${dgg_cookie_jar}"; -- ./hosts/downloadgg.sh:257: tor_curl_request --insecure -X POST \ @@ -1156,7 +1157,7 @@ _________________________________________________________________________ ./hosts/downloadgg.sh:271: --output "$file_path" ./hosts/downloadgg.sh:272: fi ./hosts/downloadgg.sh:273: else -./hosts/downloadgg.sh:274: if [ "${RateMonitorEnabled}" == "true" ]; then +./hosts/downloadgg.sh:274: if [[ "${RateMonitorEnabled}" == "true" ]]; then ./hosts/downloadgg.sh:275: tor_curl_request --insecure -X POST \ ./hosts/downloadgg.sh:276: -b "${dgg_cookie_jar}" -c "${dgg_cookie_jar}" --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval \ ./hosts/downloadgg.sh:277: -H "Host: download.gg" \ @@ -1182,12 +1183,12 @@ _________________________________________________________________________ ./hosts/downloadgg.sh:304: -H "Upgrade-Insecure-Requests: 1" \ -- ./hosts/examples/ExampleNewHost.sh:102: file_header=$(tor_curl_request --insecure -L --head -s --referer "${remote_url//\.org/\.cc}" "$download_url") -./hosts/examples/ExampleNewHost.sh:103: if [ "${DebugAllEnabled}" == "true" ] ; then +./hosts/examples/ExampleNewHost.sh:103: if [[ "${DebugAllEnabled}" == "true" ]] ; then ./hosts/examples/ExampleNewHost.sh:104: debugHtml "${remote_url##*/}" "fh_head$j" "download_url: ${download_url}"$'\n'"${file_header}" ./hosts/examples/ExampleNewHost.sh:105: fi -./hosts/examples/ExampleNewHost.sh:106: if [ ! -z "$file_header" ] ; then +./hosts/examples/ExampleNewHost.sh:106: if [[ ! -z "$file_header" ]] ; then ./hosts/examples/ExampleNewHost.sh:107: if grep -Eqi '404 Not Found' <<< "${file_header}" ; then -./hosts/examples/ExampleNewHost.sh:108: if [ $j == $maxfetchretries ] ; then +./hosts/examples/ExampleNewHost.sh:108: if [[ $j == $maxfetchretries ]] ; then ./hosts/examples/ExampleNewHost.sh:109: printf "\\n" ./hosts/examples/ExampleNewHost.sh:110: echo -e "${RED}| The file has been removed (404).${NC}" ./hosts/examples/ExampleNewHost.sh:111: removedDownload "${remote_url}" @@ -1198,7 +1199,7 @@ _________________________________________________________________________ ./hosts/examples/ExampleNewHost.sh:201: tor_curl_request --insecure --referer "$download_url" "$download_url" --continue-at - --output "$file_path" ./hosts/examples/ExampleNewHost.sh:202: fi ./hosts/examples/ExampleNewHost.sh:203: received_file_size=0 -./hosts/examples/ExampleNewHost.sh:204: if [ -f "$file_path" ] ; then +./hosts/examples/ExampleNewHost.sh:204: if [[ -f "$file_path" ]] ; then ./hosts/examples/ExampleNewHost.sh:205: received_file_size=$(stat --format="%s" "$file_path" | tr -d '[:space:]') ./hosts/examples/ExampleNewHost.sh:206: fi ./hosts/examples/ExampleNewHost.sh:207: if CheckNoHtml "$remote_url" "$filename" "$file_path" ; then @@ -1214,31 +1215,31 @@ _________________________________________________________________________ ./hosts/examples/up_example.sh:116: -F "randomizefn=0" \ ./hosts/examples/up_example.sh:117: -F "shorturl=0" \ ./hosts/examples/up_example.sh:118: "${PostUrlHost}") -./hosts/examples/up_example.sh:119: if [ "${DebugAllEnabled}" == "true" ] ; then +./hosts/examples/up_example.sh:119: if [[ "${DebugAllEnabled}" == "true" ]] ; then ./hosts/examples/up_example.sh:120: debugHtml "${remote_url##*/}" "${_hostCode}_dwnpage$j" "post_url: ${PostUrlHost}"$'\n'"${response}" ./hosts/examples/up_example.sh:121: fi ./hosts/examples/up_example.sh:122: successUpload "$pline" "${filepath}" "${_hostCode}" "${filesize}" "${downloadLink}" "{$responseHtml}" -- ./hosts/fileblade.sh:90: response=$(tor_curl_request --insecure -L -s -b "${fb_cookie_jar}" -c "${fb_cookie_jar}" "$remote_url") -./hosts/fileblade.sh:91: if [ "${DebugAllEnabled}" == "true" ] ; then +./hosts/fileblade.sh:91: if [[ "${DebugAllEnabled}" == "true" ]] ; then ./hosts/fileblade.sh:92: debugHtml "${remote_url##*/}" "fb_dwnpage$i" "${response}" ./hosts/fileblade.sh:93: fi ./hosts/fileblade.sh:94: if [[ -z $response ]] ; then ./hosts/fileblade.sh:95: rm -f "${fb_cookie_jar}"; -./hosts/fileblade.sh:96: if [ $i == $maxfetchretries ] ; then +./hosts/fileblade.sh:96: if [[ $i == $maxfetchretries ]] ; then ./hosts/fileblade.sh:97: printf "\\n" ./hosts/fileblade.sh:98: echo -e "${RED}| Failed to extract download link.${NC}" ./hosts/fileblade.sh:99: warnAndRetryUnknownError=true -./hosts/fileblade.sh:100: if [ "${finalAttempt}" == "true" ] ; then +./hosts/fileblade.sh:100: if [[ "${finalAttempt}" == "true" ]] ; then -- ./hosts/fileblade.sh:165: response=$(tor_curl_request --insecure -L -s -X POST \ ./hosts/fileblade.sh:166: -b "${fb_cookie_jar}" -c "${fb_cookie_jar}" \ ./hosts/fileblade.sh:167: --data "$form_data" "$post_action") -./hosts/fileblade.sh:168: if [ "${DebugAllEnabled}" == "true" ] ; then +./hosts/fileblade.sh:168: if [[ "${DebugAllEnabled}" == "true" ]] ; then ./hosts/fileblade.sh:169: debugHtml "${remote_url##*/}" "fb_post(1)" "post_action: ${post_action}"$'\n'"form_data: ${form_data}"$'\n'"${response}" ./hosts/fileblade.sh:170: fi ./hosts/fileblade.sh:171: if [[ -z $response ]] ; then -./hosts/fileblade.sh:172: if [ $i == $maxfetchretries ] ; then +./hosts/fileblade.sh:172: if [[ $i == $maxfetchretries ]] ; then ./hosts/fileblade.sh:173: rm -f "${fb_cookie_jar}"; ./hosts/fileblade.sh:174: printf "\\n" ./hosts/fileblade.sh:175: echo -e "${RED}| Failed to extract download link [3]${NC}" @@ -1246,26 +1247,26 @@ _________________________________________________________________________ ./hosts/fileblade.sh:281: response=$(tor_curl_request --insecure -L -s -X POST \ ./hosts/fileblade.sh:282: -b "${fb_cookie_jar}" -c "${fb_cookie_jar}" \ ./hosts/fileblade.sh:283: --data "$form_data" "$post_action") -./hosts/fileblade.sh:284: if [ "${DebugAllEnabled}" == "true" ] ; then +./hosts/fileblade.sh:284: if [[ "${DebugAllEnabled}" == "true" ]] ; then ./hosts/fileblade.sh:285: debugHtml "${remote_url##*/}" "fb_post(2)" "post_action: ${post_action}"$'\n'"form_data: ${form_data}"$'\n'"${response}" ./hosts/fileblade.sh:286: fi ./hosts/fileblade.sh:287: if [[ -z $response ]] ; then -./hosts/fileblade.sh:288: if [ $i == $maxfetchretries ] ; then +./hosts/fileblade.sh:288: if [[ $i == $maxfetchretries ]] ; then ./hosts/fileblade.sh:289: rm -f "${fb_cookie_jar}"; ./hosts/fileblade.sh:290: printf "\\n" ./hosts/fileblade.sh:291: echo -e "${RED}| Failed to extract download link [4].${NC}" -- ./hosts/fileblade.sh:335: file_header=$(tor_curl_request --insecure -L --head -s "$download_url") -./hosts/fileblade.sh:336: if [ "${DebugAllEnabled}" == "true" ] ; then +./hosts/fileblade.sh:336: if [[ "${DebugAllEnabled}" == "true" ]] ; then ./hosts/fileblade.sh:337: debugHtml "${remote_url##*/}" "fb_head$j" "download_url: ${download_url}"$'\n'"${file_header}" ./hosts/fileblade.sh:338: fi ./hosts/fileblade.sh:339: if [[ -z $file_header ]] ; then -./hosts/fileblade.sh:340: if [ $j == $maxfetchretries ] ; then +./hosts/fileblade.sh:340: if [[ $j == $maxfetchretries ]] ; then ./hosts/fileblade.sh:341: rm -f "${fb_cookie_jar}"; ./hosts/fileblade.sh:342: printf "\\n" ./hosts/fileblade.sh:343: echo -e "${RED}| Failed to extract file info [1]${NC}" ./hosts/fileblade.sh:344: warnAndRetryUnknownError=true -./hosts/fileblade.sh:345: if [ "${finalAttempt}" == "true" ] ; then +./hosts/fileblade.sh:345: if [[ "${finalAttempt}" == "true" ]] ; then -- ./hosts/fileblade.sh:452: tor_curl_request --insecure -L \ ./hosts/fileblade.sh:453: --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval \ @@ -1275,7 +1276,7 @@ _________________________________________________________________________ ./hosts/fileblade.sh:457: "$download_url" --continue-at - --output "$file_path" ./hosts/fileblade.sh:458: fi ./hosts/fileblade.sh:459: else -./hosts/fileblade.sh:460: if [ "${RateMonitorEnabled}" == "true" ]; then +./hosts/fileblade.sh:460: if [[ "${RateMonitorEnabled}" == "true" ]]; then ./hosts/fileblade.sh:461: tor_curl_request --insecure \ ./hosts/fileblade.sh:462: --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval \ ./hosts/fileblade.sh:463: -H "User-Agent: $RandomUA" \ @@ -1301,10 +1302,10 @@ _________________________________________________________________________ ./hosts/fileblade.sh:486: -H "Sec-Fetch-Site: same-origin" \ -- ./hosts/fileditch.sh:96: file_header=$(tor_curl_request --insecure --head -L -s "$download_url") -./hosts/fileditch.sh:97: if [ "${DebugAllEnabled}" == "true" ] ; then +./hosts/fileditch.sh:97: if [[ "${DebugAllEnabled}" == "true" ]] ; then ./hosts/fileditch.sh:98: debugHtml "${remote_url##*/}" "fd_head$j" "download_url: ${download_url}"$'\n'"${file_header}" ./hosts/fileditch.sh:99: fi -./hosts/fileditch.sh:100: if [ ! -z "$file_header" ] ; then +./hosts/fileditch.sh:100: if [[ ! -z "$file_header" ]] ; then ./hosts/fileditch.sh:101: if grep -Eqi '404 Not Found' <<< "${file_header}" ; then ./hosts/fileditch.sh:102: echo -e "${RED}| The file has been removed (404).${NC}" ./hosts/fileditch.sh:103: removedDownload "${remote_url}" @@ -1317,7 +1318,7 @@ _________________________________________________________________________ ./hosts/fileditch.sh:189: tor_curl_request --insecure "$download_url" --continue-at - --output "$file_path" ./hosts/fileditch.sh:190: fi ./hosts/fileditch.sh:191: received_file_size=0 -./hosts/fileditch.sh:192: if [ -f "$file_path" ] ; then +./hosts/fileditch.sh:192: if [[ -f "$file_path" ]] ; then ./hosts/fileditch.sh:193: received_file_size=$(stat --format="%s" "$file_path" | tr -d '[:space:]') ./hosts/fileditch.sh:194: fi ./hosts/fileditch.sh:195: if CheckNoHtml "$remote_url" "$filename" "$file_path" ; then @@ -1334,7 +1335,7 @@ _________________________________________________________________________ ./hosts/filedot.sh:117: -H 'Sec-Fetch-Dest: document' -H 'Sec-Fetch-Mode: navigate' -H 'Sec-Fetch-Site: none' -H 'Sec-Fetch-User: ?1' \ ./hosts/filedot.sh:118: -c "${fdot_cookie_jar}" \ ./hosts/filedot.sh:119: "https://filedot.to/login.html") -./hosts/filedot.sh:120: if [ "${DebugAllEnabled}" == "true" ] ; then +./hosts/filedot.sh:120: if [[ "${DebugAllEnabled}" == "true" ]] ; then ./hosts/filedot.sh:121: debugHtml "${remote_url##*/}" "fdot_login_$a" "${PAGE}" ./hosts/filedot.sh:122: fi -- @@ -1375,15 +1376,15 @@ _________________________________________________________________________ ./hosts/filedot.sh:330: -H "DNT: 1" \ -- ./hosts/filedot.sh:406: file_header=$(tor_curl_request --insecure -L --head -s --referer "${fixed_url}" "$download_url") -./hosts/filedot.sh:407: if [ "${DebugAllEnabled}" == "true" ] ; then +./hosts/filedot.sh:407: if [[ "${DebugAllEnabled}" == "true" ]] ; then ./hosts/filedot.sh:408: debugHtml "${remote_url##*/}" "fdot_head_$j" "download_url: ${download_url}"$'\n'"${file_header}" ./hosts/filedot.sh:409: fi ./hosts/filedot.sh:410: if [[ -z $file_header ]] ; then -./hosts/filedot.sh:411: if [ $j == $maxfetchretries ] ; then +./hosts/filedot.sh:411: if [[ $j == $maxfetchretries ]] ; then ./hosts/filedot.sh:412: printf "\\n" ./hosts/filedot.sh:413: echo -e "${RED}| Failed to extract file info.${NC}" ./hosts/filedot.sh:414: warnAndRetryUnknownError=true -./hosts/filedot.sh:415: if [ "${finalAttempt}" == "true" ] ; then +./hosts/filedot.sh:415: if [[ "${finalAttempt}" == "true" ]] ; then ./hosts/filedot.sh:416: failedRetryDownload "${remote_url}" "" "" -- ./hosts/filedot.sh:499: tor_curl_request --insecure -L -G --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval "$download_url" --continue-at - --output "$file_path" @@ -1391,7 +1392,7 @@ _________________________________________________________________________ ./hosts/filedot.sh:501: tor_curl_request --insecure -L -G "$download_url" --continue-at - --output "$file_path" ./hosts/filedot.sh:502: fi ./hosts/filedot.sh:503: received_file_size=0 -./hosts/filedot.sh:504: if [ -f "$file_path" ] ; then +./hosts/filedot.sh:504: if [[ -f "$file_path" ]] ; then ./hosts/filedot.sh:505: received_file_size=$(stat --format="%s" "$file_path" | tr -d '[:space:]') ./hosts/filedot.sh:506: fi ./hosts/filedot.sh:507: if CheckNoHtml "$remote_url" "$filename" "$file_path" ; then @@ -1400,54 +1401,54 @@ _________________________________________________________________________ ./hosts/filedot.sh:510: containsHtml=true ./hosts/filedot.sh:511: fi -- -./hosts/filehaus.sh:100: file_header=$(tor_curl_request_extended --insecure -L --head -s --referer "${remote_url//\.org/\.cc}" "$download_url") -./hosts/filehaus.sh:101: if [ "${DebugAllEnabled}" == "true" ] ; then -./hosts/filehaus.sh:102: debugHtml "${remote_url##*/}" "fh_head$j" "download_url: ${download_url}"$'\n'"${file_header}" -./hosts/filehaus.sh:103: fi -./hosts/filehaus.sh:104: if [ ! -z "$file_header" ] ; then -./hosts/filehaus.sh:105: if grep -Eqi '404 Not Found' <<< "${file_header}" ; then -./hosts/filehaus.sh:106: printf "\\n" -./hosts/filehaus.sh:107: echo -e "${RED}| The file has been removed (404).${NC}" -./hosts/filehaus.sh:108: removedDownload "${remote_url}" -./hosts/filehaus.sh:109: exitDownloadNotAvailable=true -./hosts/filehaus.sh:110: return 1 +./hosts/filehaus.sh:101: file_header=$(tor_curl_request_extended --insecure -L --head -s --referer "${remote_url//\.org/\.cc}" "$download_url") +./hosts/filehaus.sh:102: if [[ "${DebugAllEnabled}" == "true" ]] ; then +./hosts/filehaus.sh:103: debugHtml "${remote_url##*/}" "fh_head$j" "download_url: ${download_url}"$'\n'"${file_header}" +./hosts/filehaus.sh:104: fi +./hosts/filehaus.sh:105: if [[ ! -z "$file_header" ]] ; then +./hosts/filehaus.sh:106: if grep -Eqi '404 Not Found' <<< "${file_header}" ; then +./hosts/filehaus.sh:107: printf "\\n" +./hosts/filehaus.sh:108: echo -e "${RED}| The file has been removed (404).${NC}" +./hosts/filehaus.sh:109: removedDownload "${remote_url}" +./hosts/filehaus.sh:110: exitDownloadNotAvailable=true +./hosts/filehaus.sh:111: return 1 -- -./hosts/filehaus.sh:193: tor_curl_request_extended --insecure --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval --referer "$download_url" "$download_url" --continue-at - --output "$file_path" -./hosts/filehaus.sh:194: else -./hosts/filehaus.sh:195: tor_curl_request_extended --insecure --referer "$download_url" "$download_url" --continue-at - --output "$file_path" -./hosts/filehaus.sh:196: fi -./hosts/filehaus.sh:197: received_file_size=0 -./hosts/filehaus.sh:198: if [ -f "$file_path" ] ; then -./hosts/filehaus.sh:199: received_file_size=$(stat --format="%s" "$file_path" | tr -d '[:space:]') -./hosts/filehaus.sh:200: fi -./hosts/filehaus.sh:201: if CheckNoHtml "$remote_url" "$filename" "$file_path" ; then -./hosts/filehaus.sh:202: containsHtml=false -./hosts/filehaus.sh:203: else -./hosts/filehaus.sh:204: containsHtml=true -./hosts/filehaus.sh:205: fi +./hosts/filehaus.sh:191: tor_curl_request_extended --insecure --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval --referer "$download_url" "$download_url" --continue-at - --output "$file_path" +./hosts/filehaus.sh:192: else +./hosts/filehaus.sh:193: tor_curl_request_extended --insecure --referer "$download_url" "$download_url" --continue-at - --output "$file_path" +./hosts/filehaus.sh:194: fi +./hosts/filehaus.sh:195: received_file_size=0 +./hosts/filehaus.sh:196: if [[ -f "$file_path" ]] ; then +./hosts/filehaus.sh:197: received_file_size=$(stat --format="%s" "$file_path" | tr -d '[:space:]') +./hosts/filehaus.sh:198: fi +./hosts/filehaus.sh:199: if CheckNoHtml "$remote_url" "$filename" "$file_path" ; then +./hosts/filehaus.sh:200: containsHtml=false +./hosts/filehaus.sh:201: else +./hosts/filehaus.sh:202: containsHtml=true +./hosts/filehaus.sh:203: fi -- ./hosts/firestorage.sh:98: response=$(tor_curl_request --insecure -L -s "${fixed_url}") -./hosts/firestorage.sh:99: if [ "${DebugAllEnabled}" == "true" ] ; then +./hosts/firestorage.sh:99: if [[ "${DebugAllEnabled}" == "true" ]] ; then ./hosts/firestorage.sh:100: debugHtml "${remote_url##*/}" "fs_${fetchnum}fetch_$j" "fixed_url: ${fixed_url}"$'\n'"${response}" ./hosts/firestorage.sh:101: fi ./hosts/firestorage.sh:102: if [[ -z $response ]] ; then -./hosts/firestorage.sh:103: if [ $j == $maxfetchretries ] ; then +./hosts/firestorage.sh:103: if [[ $j == $maxfetchretries ]] ; then ./hosts/firestorage.sh:104: printf "\\n" ./hosts/firestorage.sh:105: echo -e "${RED}| Failed to extract link.${NC}" ./hosts/firestorage.sh:106: warnAndRetryUnknownError=true -./hosts/firestorage.sh:107: if [ "${finalAttempt}" == "true" ] ; then +./hosts/firestorage.sh:107: if [[ "${finalAttempt}" == "true" ]] ; then ./hosts/firestorage.sh:108: failedRetryDownload "${remote_url}" "" "" -- ./hosts/firestorage.sh:226: file_header=$(tor_curl_request --insecure -L --head -s --referer "${fixed_url}" "$download_url") -./hosts/firestorage.sh:227: if [ "${DebugAllEnabled}" == "true" ] ; then +./hosts/firestorage.sh:227: if [[ "${DebugAllEnabled}" == "true" ]] ; then ./hosts/firestorage.sh:228: debugHtml "${remote_url##*/}" "fs_head$j" "download_url: ${download_url}"$'\n'"${file_header}" ./hosts/firestorage.sh:229: fi ./hosts/firestorage.sh:230: if [[ -z $file_header ]] ; then -./hosts/firestorage.sh:231: if [ $j == $maxfetchretries ] ; then +./hosts/firestorage.sh:231: if [[ $j == $maxfetchretries ]] ; then ./hosts/firestorage.sh:232: printf "\\n" ./hosts/firestorage.sh:233: echo -e "${RED}| Failed to extract file info${NC}" ./hosts/firestorage.sh:234: warnAndRetryUnknownError=true -./hosts/firestorage.sh:235: if [ "${finalAttempt}" == "true" ] ; then +./hosts/firestorage.sh:235: if [[ "${finalAttempt}" == "true" ]] ; then ./hosts/firestorage.sh:236: failedRetryDownload "${remote_url}" "" "" -- ./hosts/firestorage.sh:335: tor_curl_request --insecure -L -G --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval "$download_url" --continue-at - --output "$file_path" @@ -1455,7 +1456,7 @@ _________________________________________________________________________ ./hosts/firestorage.sh:337: tor_curl_request --insecure -L -G "$download_url" --continue-at - --output "$file_path" ./hosts/firestorage.sh:338: fi ./hosts/firestorage.sh:339: received_file_size=0 -./hosts/firestorage.sh:340: if [ -f "$file_path" ] ; then +./hosts/firestorage.sh:340: if [[ -f "$file_path" ]] ; then ./hosts/firestorage.sh:341: received_file_size=$(stat --format="%s" "$file_path" | tr -d '[:space:]') ./hosts/firestorage.sh:342: fi ./hosts/firestorage.sh:343: if CheckNoHtml "$remote_url" "$filename" "$file_path" ; then @@ -1490,56 +1491,56 @@ _________________________________________________________________________ -- ./hosts/gofile.sh:258: file_header=$(tor_curl_request --insecure -L --head -s \ ./hosts/gofile.sh:259: -b "${gofile_cookie_jar}" -c "${gofile_cookie_jar}" \ -./hosts/gofile.sh:260: -H "User-Agent: $RandomUA" \ -./hosts/gofile.sh:261: -H "Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8" \ -./hosts/gofile.sh:262: -H "Accept-Language: en-US,en;q=0.5" \ -./hosts/gofile.sh:263: -H "Accept-Encoding: gzip, deflate, br" \ -./hosts/gofile.sh:264: -H "Connection: keep-alive" \ -./hosts/gofile.sh:265: -H "Upgrade-Insecure-Requests: 1" \ -./hosts/gofile.sh:266: -H "Sec-Fetch-Dest: document" \ -./hosts/gofile.sh:267: -H "Sec-Fetch-Mode: navigate" \ -./hosts/gofile.sh:268: -H "Sec-Fetch-Site: none" \ +./hosts/gofile.sh:260: -H "Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8" \ +./hosts/gofile.sh:261: -H "Accept-Language: en-US,en;q=0.5" \ +./hosts/gofile.sh:262: -H "Accept-Encoding: gzip, deflate, br" \ +./hosts/gofile.sh:263: -H "Connection: keep-alive" \ +./hosts/gofile.sh:264: -H "Upgrade-Insecure-Requests: 1" \ +./hosts/gofile.sh:265: -H "Sec-Fetch-Dest: document" \ +./hosts/gofile.sh:266: -H "Sec-Fetch-Mode: navigate" \ +./hosts/gofile.sh:267: -H "Sec-Fetch-Site: none" \ +./hosts/gofile.sh:268: -H "Sec-Fetch-User: ?1" \ -- -./hosts/gofile.sh:377: tor_curl_request --insecure -G --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval \ -./hosts/gofile.sh:378: -b "${gofile_cookie_jar}" -c "${gofile_cookie_jar}" \ -./hosts/gofile.sh:379: -H "User-Agent: $RandomUA" \ -./hosts/gofile.sh:380: -H "Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8" \ -./hosts/gofile.sh:381: -H "Accept-Language: en-US,en;q=0.5" \ -./hosts/gofile.sh:382: -H "Accept-Encoding: gzip, deflate, br" \ -./hosts/gofile.sh:383: -H "Connection: keep-alive" \ -./hosts/gofile.sh:384: -H "Upgrade-Insecure-Requests: 1" \ -./hosts/gofile.sh:385: -H "Sec-Fetch-Dest: document" \ -./hosts/gofile.sh:386: -H "Sec-Fetch-Mode: navigate" \ -./hosts/gofile.sh:387: -H "Sec-Fetch-Site: none" \ +./hosts/gofile.sh:393: tor_curl_request --insecure -G --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval \ +./hosts/gofile.sh:394: -b "${gofile_cookie_jar}" -c "${gofile_cookie_jar}" \ +./hosts/gofile.sh:395: -H "User-Agent: $RandomUA" \ +./hosts/gofile.sh:396: -H "Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8" \ +./hosts/gofile.sh:397: -H "Accept-Language: en-US,en;q=0.5" \ +./hosts/gofile.sh:398: -H "Accept-Encoding: gzip, deflate, br" \ +./hosts/gofile.sh:399: -H "Connection: keep-alive" \ +./hosts/gofile.sh:400: -H "Upgrade-Insecure-Requests: 1" \ +./hosts/gofile.sh:401: -H "Sec-Fetch-Dest: document" \ +./hosts/gofile.sh:402: -H "Sec-Fetch-Mode: navigate" \ +./hosts/gofile.sh:403: -H "Sec-Fetch-Site: none" \ -- -./hosts/gofile.sh:391: tor_curl_request --insecure -G \ -./hosts/gofile.sh:392: -b "${gofile_cookie_jar}" -c "${gofile_cookie_jar}" \ -./hosts/gofile.sh:393: -H "User-Agent: $RandomUA" \ -./hosts/gofile.sh:394: -H "Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8" \ -./hosts/gofile.sh:395: -H "Accept-Language: en-US,en;q=0.5" \ -./hosts/gofile.sh:396: -H "Accept-Encoding: gzip, deflate, br" \ -./hosts/gofile.sh:397: -H "Connection: keep-alive" \ -./hosts/gofile.sh:398: -H "Upgrade-Insecure-Requests: 1" \ -./hosts/gofile.sh:399: -H "Sec-Fetch-Dest: document" \ -./hosts/gofile.sh:400: -H "Sec-Fetch-Mode: navigate" \ -./hosts/gofile.sh:401: -H "Sec-Fetch-Site: none" \ +./hosts/gofile.sh:407: tor_curl_request --insecure -G \ +./hosts/gofile.sh:408: -b "${gofile_cookie_jar}" -c "${gofile_cookie_jar}" \ +./hosts/gofile.sh:409: -H "User-Agent: $RandomUA" \ +./hosts/gofile.sh:410: -H "Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8" \ +./hosts/gofile.sh:411: -H "Accept-Language: en-US,en;q=0.5" \ +./hosts/gofile.sh:412: -H "Accept-Encoding: gzip, deflate, br" \ +./hosts/gofile.sh:413: -H "Connection: keep-alive" \ +./hosts/gofile.sh:414: -H "Upgrade-Insecure-Requests: 1" \ +./hosts/gofile.sh:415: -H "Sec-Fetch-Dest: document" \ +./hosts/gofile.sh:416: -H "Sec-Fetch-Mode: navigate" \ +./hosts/gofile.sh:417: -H "Sec-Fetch-Site: none" \ -- ./hosts/hexload.sh:108: response=$(tor_curl_request --insecure -s --data "$form_data" "https://hexload.com/download") -./hosts/hexload.sh:109: if [ "${DebugAllEnabled}" == "true" ] ; then +./hosts/hexload.sh:109: if [[ "${DebugAllEnabled}" == "true" ]] ; then ./hosts/hexload.sh:110: debugHtml "${remote_url##*/}" "hex_dwnpage$j" "form_data: ${form_data}"$'\n'"${response}" ./hosts/hexload.sh:111: fi ./hosts/hexload.sh:112: fi ./hosts/hexload.sh:113: else -./hosts/hexload.sh:114: if [ "$hexUseDownload" == "download2" ]; then +./hosts/hexload.sh:114: if [[ "$hexUseDownload" == "download2" ]]; then ./hosts/hexload.sh:115: form_data="op=download1&id=${file_id}&rand=&usr_login=&fname=&ajax=1&method_free=1&dataType=json" ./hosts/hexload.sh:116: response=$(tor_curl_request --insecure -s --data "$form_data" "https://hexload.com/download") -./hosts/hexload.sh:117: if [ "${DebugAllEnabled}" == "true" ] ; then +./hosts/hexload.sh:117: if [[ "${DebugAllEnabled}" == "true" ]] ; then ./hosts/hexload.sh:118: debugHtml "${remote_url##*/}" "hex_dwnpage$j" "form_data: ${form_data}"$'\n'"${response}" ./hosts/hexload.sh:119: fi ./hosts/hexload.sh:120: else ./hosts/hexload.sh:121: form_data="op=download2&id=${file_id}&rand=&usr_login=&fname=&ajax=1&method_free=1&dataType=json" ./hosts/hexload.sh:122: response=$(tor_curl_request --insecure -s --data "$form_data" "https://hexload.com/download") -./hosts/hexload.sh:123: if [ "${DebugAllEnabled}" == "true" ] ; then +./hosts/hexload.sh:123: if [[ "${DebugAllEnabled}" == "true" ]] ; then ./hosts/hexload.sh:124: debugHtml "${remote_url##*/}" "hex_dwnpage$j" "form_data: ${form_data}"$'\n'"${response}" ./hosts/hexload.sh:125: fi ./hosts/hexload.sh:126: fi @@ -1548,10 +1549,10 @@ _________________________________________________________________________ ./hosts/hexload.sh:129: download_url=$(echo "$response" | grep -oPi '(?<="link":")[^"]+(?=")' | base64 --decode) ./hosts/hexload.sh:130: download_url=$(urlencode_spaces "$download_url") ./hosts/hexload.sh:131: if grep -Eqi "Sorry, you have been blocked" <<< "$response"; then -./hosts/hexload.sh:132: if [ $j == $hexmaxfetchfileretries ] ; then +./hosts/hexload.sh:132: if ((j == hexmaxfetchfileretries)) ; then -- ./hosts/hexload.sh:254: file_header=$(tor_curl_request --insecure --head -L -s --referer "$file_url" "$download_url") -./hosts/hexload.sh:255: if [ "${DebugAllEnabled}" == "true" ] ; then +./hosts/hexload.sh:255: if [[ "${DebugAllEnabled}" == "true" ]] ; then ./hosts/hexload.sh:256: debugHtml "${remote_url##*/}" "hex_head$j" "download_url: ${download_url}"$'\n'"${file_header}" ./hosts/hexload.sh:257: fi ./hosts/hexload.sh:258: if ! grep -Eqi 'HTTP/2 200|HTTP/1.1 200|200 OK' <<< $file_header ; then @@ -1567,7 +1568,7 @@ _________________________________________________________________________ ./hosts/hexload.sh:323: tor_curl_request --insecure --referer "$file_url" "$download_url" --continue-at - --output "$file_path" ./hosts/hexload.sh:324: fi ./hosts/hexload.sh:325: received_file_size=0 -./hosts/hexload.sh:326: if [ -f "$file_path" ] ; then +./hosts/hexload.sh:326: if [[ -f "$file_path" ]] ; then ./hosts/hexload.sh:327: received_file_size=$(stat --format="%s" "$file_path" | tr -d '[:space:]') ./hosts/hexload.sh:328: fi ./hosts/hexload.sh:329: if CheckNoHtml "$remote_url" "$filename" "$file_path" ; then @@ -1592,7 +1593,7 @@ _________________________________________________________________________ ./hosts/innocent.sh:110: [ -s "${WorkDir}/.temp/directhead" ] ./hosts/innocent.sh:111: kill $! 2>/dev/null ./hosts/innocent.sh:112: ) -./hosts/innocent.sh:113: if [ ! -f "${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}" ]; then +./hosts/innocent.sh:113: if [[ ! -f "${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}" ]]; then ./hosts/innocent.sh:114: touch "${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}" ./hosts/innocent.sh:115: fi ./hosts/innocent.sh:116: rm -f "${WorkDir}/.temp/directhead" @@ -1600,10 +1601,10 @@ _________________________________________________________________________ -- ./hosts/innocent.sh:119: file_header=$(tor_curl_request --insecure --head -L -s "$download_url") ./hosts/innocent.sh:120: fi -./hosts/innocent.sh:121: if [ "${DebugAllEnabled}" == "true" ] ; then +./hosts/innocent.sh:121: if [[ "${DebugAllEnabled}" == "true" ]] ; then ./hosts/innocent.sh:122: debugHtml "${remote_url##*/}" "inno_head$j" "download_url: ${download_url}"$'\n'"${file_header}" ./hosts/innocent.sh:123: fi -./hosts/innocent.sh:124: if [ ! -z "$file_header" ] ; then +./hosts/innocent.sh:124: if [[ ! -z "$file_header" ]] ; then ./hosts/innocent.sh:125: if grep -Eqi '404 Not Found' <<< "${file_header}" ; then ./hosts/innocent.sh:126: printf "\\n" ./hosts/innocent.sh:127: echo -e "${RED}| Not Found (404). The file has been removed.${NC}" @@ -1616,7 +1617,7 @@ _________________________________________________________________________ ./hosts/innocent.sh:214: tor_curl_request_extended --insecure "$download_url" --output "$file_path" ./hosts/innocent.sh:215: fi ./hosts/innocent.sh:216: received_file_size=0 -./hosts/innocent.sh:217: if [ -f "$file_path" ] ; then +./hosts/innocent.sh:217: if [[ -f "$file_path" ]] ; then ./hosts/innocent.sh:218: received_file_size=$(stat --format="%s" "$file_path" | tr -d '[:space:]') ./hosts/innocent.sh:219: fi ./hosts/innocent.sh:220: if CheckNoHtml "$remote_url" "$filename" "$file_path" ; then @@ -1626,25 +1627,25 @@ _________________________________________________________________________ ./hosts/innocent.sh:224: fi -- ./hosts/isupload.sh:90: response=$(tor_curl_request_extended --insecure -L -s -b "${isup_cookie_jar}" -c "${isup_cookie_jar}" "$remote_url") -./hosts/isupload.sh:91: if [ "${DebugAllEnabled}" == "true" ] ; then +./hosts/isupload.sh:91: if [[ "${DebugAllEnabled}" == "true" ]] ; then ./hosts/isupload.sh:92: debugHtml "${remote_url##*/}" "isup_dwnpage$i" "${response}" ./hosts/isupload.sh:93: fi ./hosts/isupload.sh:94: if [[ -z $response ]] ; then ./hosts/isupload.sh:95: rm -f "${isup_cookie_jar}"; -./hosts/isupload.sh:96: if [ $i == $maxfetchretries ] ; then +./hosts/isupload.sh:96: if [[ $i == $maxfetchretries ]] ; then ./hosts/isupload.sh:97: printf "\\n" ./hosts/isupload.sh:98: echo -e "${RED}| Failed to extract download link.${NC}" ./hosts/isupload.sh:99: warnAndRetryUnknownError=true -./hosts/isupload.sh:100: if [ "${finalAttempt}" == "true" ] ; then +./hosts/isupload.sh:100: if [[ "${finalAttempt}" == "true" ]] ; then -- ./hosts/isupload.sh:164: response=$(tor_curl_request_extended --insecure -L -s -X POST \ ./hosts/isupload.sh:165: -b "${isup_cookie_jar}" -c "${isup_cookie_jar}" \ ./hosts/isupload.sh:166: --data "$form_data" "$post_action") -./hosts/isupload.sh:167: if [ "${DebugAllEnabled}" == "true" ] ; then +./hosts/isupload.sh:167: if [[ "${DebugAllEnabled}" == "true" ]] ; then ./hosts/isupload.sh:168: debugHtml "${remote_url##*/}" "isup_post" "post_action: ${post_action}"$'\n'"form_data: ${form_data}"$'\n'"${response}" ./hosts/isupload.sh:169: fi ./hosts/isupload.sh:170: if [[ -z $response ]] ; then -./hosts/isupload.sh:171: if [ $i == $maxfetchretries ] ; then +./hosts/isupload.sh:171: if [[ $i == $maxfetchretries ]] ; then ./hosts/isupload.sh:172: rm -f "${isup_cookie_jar}"; ./hosts/isupload.sh:173: printf "\\n" ./hosts/isupload.sh:174: echo -e "${RED}| Failed to extract download link [3].${NC}" @@ -1665,7 +1666,7 @@ _________________________________________________________________________ ./hosts/isupload.sh:254: [ -s "${WorkDir}/.temp/directhead" ] ./hosts/isupload.sh:255: kill $! 2>/dev/null ./hosts/isupload.sh:256: ) -./hosts/isupload.sh:257: if [ ! -f "${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}" ]; then +./hosts/isupload.sh:257: if [[ ! -f "${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}" ]]; then ./hosts/isupload.sh:258: touch "${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}" ./hosts/isupload.sh:259: fi ./hosts/isupload.sh:260: rm -f "${WorkDir}/.temp/directhead" @@ -1673,10 +1674,10 @@ _________________________________________________________________________ -- ./hosts/isupload.sh:263: file_header=$(tor_curl_request_extended --insecure --head -L -s "$download_url") ./hosts/isupload.sh:264: fi -./hosts/isupload.sh:265: if [ "${DebugAllEnabled}" == "true" ] ; then +./hosts/isupload.sh:265: if [[ "${DebugAllEnabled}" == "true" ]] ; then ./hosts/isupload.sh:266: debugHtml "${remote_url##*/}" "isup_head$j" "download_url: ${download_url}"$'\n'"${file_header}" ./hosts/isupload.sh:267: fi -./hosts/isupload.sh:268: if [ ! -z "$file_header" ] ; then +./hosts/isupload.sh:268: if [[ ! -z "$file_header" ]] ; then ./hosts/isupload.sh:269: if grep -Eqi '404 Not Found|' <<< "${file_header}" ; then ./hosts/isupload.sh:270: printf "\\n" ./hosts/isupload.sh:271: echo -e "${RED}| Not Found (404). The file has been removed.${NC}" @@ -1685,9 +1686,9 @@ _________________________________________________________________________ -- ./hosts/isupload.sh:355: tor_curl_request_extended --insecure -L "$download_url" --output "$file_path" ./hosts/isupload.sh:356: rc=$? -./hosts/isupload.sh:357: if [ $rc -ne 0 ] ; then +./hosts/isupload.sh:357: if ((rc != 0 )) ; then ./hosts/isupload.sh:358: printf "${RED}Download Failed (bad exit status).${NC}" -./hosts/isupload.sh:359: if [ -f ${file_path} ]; then +./hosts/isupload.sh:359: if [[ -f ${file_path} ]]; then ./hosts/isupload.sh:360: printf "${YELLOW} Partial removed...${NC}" ./hosts/isupload.sh:361: printf "\n\n" ./hosts/isupload.sh:362: rm -f "${file_path}" @@ -1700,7 +1701,7 @@ _________________________________________________________________________ ./hosts/isupload.sh:401: tor_curl_request_extended --insecure -L "$download_url" --continue-at - --output "$file_path" ./hosts/isupload.sh:402: fi ./hosts/isupload.sh:403: received_file_size=0 -./hosts/isupload.sh:404: if [ -f "$file_path" ] ; then +./hosts/isupload.sh:404: if [[ -f "$file_path" ]] ; then ./hosts/isupload.sh:405: received_file_size=$(stat --format="%s" "$file_path" | tr -d '[:space:]') ./hosts/isupload.sh:406: fi ./hosts/isupload.sh:407: if CheckNoHtml "$remote_url" "$filename" "$file_path" ; then @@ -1710,7 +1711,7 @@ _________________________________________________________________________ ./hosts/isupload.sh:411: fi -- ./hosts/kraken.sh:104: PAGE=$(tor_curl_request --insecure --max-time "$ConnectTimeout" -s -L -c "${kraken_cookie_jar}" "${fixed_url}") -./hosts/kraken.sh:105: if [ "${DebugAllEnabled}" == "true" ] ; then +./hosts/kraken.sh:105: if [[ "${DebugAllEnabled}" == "true" ]] ; then ./hosts/kraken.sh:106: debugHtml "${krak_id}" "kraken_token${num_attempt}_$i" "url: ${fixed_url}"$'\n'"krakenid: ${krak_id}"$'\n'"${PAGE}" ./hosts/kraken.sh:107: fi ./hosts/kraken.sh:108: if grep -Eqi 'sendFormCaptcha()' <<< "${PAGE}"; then @@ -1722,7 +1723,7 @@ _________________________________________________________________________ ./hosts/kraken.sh:114: return 1 -- ./hosts/kraken.sh:169: down_request=$(tor_curl_request --insecure -L -s -b "${kraken_cookie_jar}" -c "${kraken_cookie_jar}" -F "token=${kraken_token}" "${kraken_action}") -./hosts/kraken.sh:170: if [ "${DebugAllEnabled}" == "true" ] ; then +./hosts/kraken.sh:170: if [[ "${DebugAllEnabled}" == "true" ]] ; then ./hosts/kraken.sh:171: debugHtml "${krak_id}" "kraken_url${num_attempt}_1" "action: ${kraken_action}, token: ${kraken_token}"$'\n'"${down_request}" ./hosts/kraken.sh:172: fi ./hosts/kraken.sh:173: if ! grep -Eqi '"status":"ok"' <<< "${down_request}"; then @@ -1734,11 +1735,11 @@ _________________________________________________________________________ ./hosts/kraken.sh:179: download_url=${kraken_url//\\/} -- ./hosts/kraken.sh:186: file_header=$(tor_curl_request --insecure --head -L -s -b "${kraken_cookie_jar}" -c "${kraken_cookie_jar}" --referer "$kraken_action" "$download_url") -./hosts/kraken.sh:187: if [ "${DebugAllEnabled}" == "true" ] ; then +./hosts/kraken.sh:187: if [[ "${DebugAllEnabled}" == "true" ]] ; then ./hosts/kraken.sh:188: debugHtml "${krak_id}" "kraken_head$j" "download_url: ${download_url}"$'\n'"${file_header}" ./hosts/kraken.sh:189: fi ./hosts/kraken.sh:190: rm -f "$kraken_cookie_jar"; -./hosts/kraken.sh:191: if [ ! -z "$file_header" ] ; then +./hosts/kraken.sh:191: if [[ ! -z "$file_header" ]] ; then ./hosts/kraken.sh:192: if grep -Eqi '404 Not Found' <<< "${file_header}" ; then ./hosts/kraken.sh:193: echo -e "${RED}| The file has been removed (404).${NC}" ./hosts/kraken.sh:194: removedDownload "${remote_url}" @@ -1750,7 +1751,7 @@ _________________________________________________________________________ ./hosts/kraken.sh:288: tor_curl_request --insecure --referer "$kraken_action" "$download_url" --continue-at - --output "$file_path" ./hosts/kraken.sh:289: fi ./hosts/kraken.sh:290: received_file_size=0 -./hosts/kraken.sh:291: if [ -f "$file_path" ] ; then +./hosts/kraken.sh:291: if [[ -f "$file_path" ]] ; then ./hosts/kraken.sh:292: received_file_size=$(stat --format="%s" "$file_path" | tr -d '[:space:]') ./hosts/kraken.sh:293: fi ./hosts/kraken.sh:294: if CheckNoHtml "$remote_url" "$filename" "$file_path" ; then @@ -1789,7 +1790,7 @@ _________________________________________________________________________ ./hosts/mediafire.sh:279: tor_curl_request_extended --insecure "$download_url" --output "$file_path" ./hosts/mediafire.sh:280: fi ./hosts/mediafire.sh:281: received_file_size=0 -./hosts/mediafire.sh:282: if [ -f "$file_path" ] ; then +./hosts/mediafire.sh:282: if [[ -f "$file_path" ]] ; then ./hosts/mediafire.sh:283: received_file_size=$(stat --format="%s" "$file_path" | tr -d '[:space:]') ./hosts/mediafire.sh:284: fi ./hosts/mediafire.sh:285: if CheckNoHtml "$remote_url" "$filename" "$file_path" ; then @@ -1799,25 +1800,25 @@ _________________________________________________________________________ ./hosts/mediafire.sh:289: fi -- ./hosts/nippy.sh:119: response=$(tor_curl_request --insecure -L -s -b "${nippy_cookie_jar}" -c "${nippy_cookie_jar}" "$fixed_url") -./hosts/nippy.sh:120: if [ "${DebugAllEnabled}" == "true" ] ; then +./hosts/nippy.sh:120: if [[ "${DebugAllEnabled}" == "true" ]] ; then ./hosts/nippy.sh:121: debugHtml "${remote_url##*/}" "nippy_dwnpage$i" "fixed_url: ${fixed_url}"$'\n'"${response}" ./hosts/nippy.sh:122: fi ./hosts/nippy.sh:123: if [[ -z $response ]] ; then ./hosts/nippy.sh:124: rm -f "${nippy_cookie_jar}"; -./hosts/nippy.sh:125: if [ $i == $maxfetchretries ] ; then +./hosts/nippy.sh:125: if [[ $i == $maxfetchretries ]] ; then ./hosts/nippy.sh:126: printf "\\n" ./hosts/nippy.sh:127: echo -e "${RED}| Failed to extract download link.${NC}" ./hosts/nippy.sh:128: warnAndRetryUnknownError=true -./hosts/nippy.sh:129: if [ "${finalAttempt}" == "true" ] ; then +./hosts/nippy.sh:129: if [[ "${finalAttempt}" == "true" ]] ; then -- ./hosts/nippy.sh:188: file_header=$(tor_curl_request --insecure -L --head -s \ ./hosts/nippy.sh:189: -b "${nippy_cookie_jar}" -c "${nippy_cookie_jar}" \ ./hosts/nippy.sh:190: "$download_url") -./hosts/nippy.sh:191: if [ "${DebugAllEnabled}" == "true" ] ; then +./hosts/nippy.sh:191: if [[ "${DebugAllEnabled}" == "true" ]] ; then ./hosts/nippy.sh:192: debugHtml "${remote_url##*/}" "nippy_head$j" "FileInfoUrl: ${download_url}"$'\n'"${file_header}" ./hosts/nippy.sh:193: fi ./hosts/nippy.sh:194: if [[ -z $file_header ]] ; then -./hosts/nippy.sh:195: if [ $j == $maxfetchretries ] ; then +./hosts/nippy.sh:195: if [[ $j == $maxfetchretries ]] ; then ./hosts/nippy.sh:196: rm -f "${nippy_cookie_jar}"; ./hosts/nippy.sh:197: printf "\\n" ./hosts/nippy.sh:198: echo -e "${RED}| Failed to extract file info${NC}" @@ -1828,7 +1829,7 @@ _________________________________________________________________________ ./hosts/nippy.sh:302: tor_curl_request --insecure -L "$download_url" --continue-at - --output "$file_path" ./hosts/nippy.sh:303: fi ./hosts/nippy.sh:304: received_file_size=0 -./hosts/nippy.sh:305: if [ -f "$file_path" ] ; then +./hosts/nippy.sh:305: if [[ -f "$file_path" ]] ; then ./hosts/nippy.sh:306: received_file_size=$(stat --format="%s" "$file_path" | tr -d '[:space:]') ./hosts/nippy.sh:307: fi ./hosts/nippy.sh:308: if CheckNoHtml "$remote_url" "$filename" "$file_path" ; then @@ -1837,46 +1838,46 @@ _________________________________________________________________________ ./hosts/nippy.sh:311: containsHtml=true ./hosts/nippy.sh:312: fi -- -./hosts/oshi.sh:101: file_header=$(tor_curl_request --insecure --head -L -s --referer "$remote_url" "$download_url") -./hosts/oshi.sh:102: if [ "${DebugAllEnabled}" == "true" ] ; then -./hosts/oshi.sh:103: debugHtml "${remote_url##*/}" "oshi_head$j" "download_url: ${download_url}"$'\n'"${file_header}" -./hosts/oshi.sh:104: fi -./hosts/oshi.sh:105: if [ ! -z "$file_header" ] ; then -./hosts/oshi.sh:106: if grep -Eqi '404 Not Found' <<< "${file_header}" ; then -./hosts/oshi.sh:107: echo -e "${RED}| O shi-, (404). The file has been removed.${NC}" -./hosts/oshi.sh:108: removedDownload "${remote_url}" -./hosts/oshi.sh:109: exitDownloadNotAvailable=true -./hosts/oshi.sh:110: return 1 -./hosts/oshi.sh:111: fi +./hosts/oshi.sh:108: file_header=$(tor_curl_request --insecure --head -L -s --referer "$remote_url" "$download_url") +./hosts/oshi.sh:109: if [[ "${DebugAllEnabled}" == "true" ]] ; then +./hosts/oshi.sh:110: debugHtml "${remote_url##*/}" "oshi_head$j" "download_url: ${download_url}"$'\n'"${file_header}" +./hosts/oshi.sh:111: fi +./hosts/oshi.sh:112: if [[ ! -z "$file_header" ]] ; then +./hosts/oshi.sh:113: if grep -Eqi '404 Not Found' <<< "${file_header}" ; then +./hosts/oshi.sh:114: echo -e "${RED}| O shi-, (404). The file has been removed.${NC}" +./hosts/oshi.sh:115: removedDownload "${remote_url}" +./hosts/oshi.sh:116: exitDownloadNotAvailable=true +./hosts/oshi.sh:117: return 1 +./hosts/oshi.sh:118: fi -- -./hosts/oshi.sh:195: tor_curl_request --insecure --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval --referer "$file_url" "$download_url" --continue-at - --output "$file_path" -./hosts/oshi.sh:196: else -./hosts/oshi.sh:197: tor_curl_request --insecure --referer "$file_url" "$download_url" --continue-at - --output "$file_path" -./hosts/oshi.sh:198: fi -./hosts/oshi.sh:199: received_file_size=0 -./hosts/oshi.sh:200: if [ -f "$file_path" ] ; then -./hosts/oshi.sh:201: received_file_size=$(stat --format="%s" "$file_path" | tr -d '[:space:]') -./hosts/oshi.sh:202: fi -./hosts/oshi.sh:203: if CheckNoHtml "$remote_url" "$filename" "$file_path" ; then -./hosts/oshi.sh:204: containsHtml=false -./hosts/oshi.sh:205: else -./hosts/oshi.sh:206: containsHtml=true -./hosts/oshi.sh:207: fi +./hosts/oshi.sh:202: tor_curl_request --insecure --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval --referer "$file_url" "$download_url" --continue-at - --output "$file_path" +./hosts/oshi.sh:203: else +./hosts/oshi.sh:204: tor_curl_request --insecure --referer "$file_url" "$download_url" --continue-at - --output "$file_path" +./hosts/oshi.sh:205: fi +./hosts/oshi.sh:206: received_file_size=0 +./hosts/oshi.sh:207: if [[ -f "$file_path" ]] ; then +./hosts/oshi.sh:208: received_file_size=$(stat --format="%s" "$file_path" | tr -d '[:space:]') +./hosts/oshi.sh:209: fi +./hosts/oshi.sh:210: if CheckNoHtml "$remote_url" "$filename" "$file_path" ; then +./hosts/oshi.sh:211: containsHtml=false +./hosts/oshi.sh:212: else +./hosts/oshi.sh:213: containsHtml=true +./hosts/oshi.sh:214: fi -- ./hosts/pixeldrain.sh:94: response=$(tor_curl_request --insecure -L -s "https://pixeldrain.com/u/$fileid") -./hosts/pixeldrain.sh:95: if [ "${DebugAllEnabled}" == "true" ] ; then +./hosts/pixeldrain.sh:95: if [[ "${DebugAllEnabled}" == "true" ]] ; then ./hosts/pixeldrain.sh:96: debugHtml "${remote_url##*/}" "pd_fetch$i" "$response" ./hosts/pixeldrain.sh:97: fi -./hosts/pixeldrain.sh:98: if [ ! -z "$response" ] ; then +./hosts/pixeldrain.sh:98: if [[ ! -z "$response" ]] ; then ./hosts/pixeldrain.sh:99: if grep -q -Eq '"views":' <<< "$response"; then ./hosts/pixeldrain.sh:100: pdpreviews=$(grep -o -P '(?<="views":).+?(?=,")' <<< "$response") ./hosts/pixeldrain.sh:101: fi ./hosts/pixeldrain.sh:102: if grep -i -Eq "You have reached the maximum number of open download connections" <<< "$response"; then -./hosts/pixeldrain.sh:103: if [ $i -ge 5 ] ; then +./hosts/pixeldrain.sh:103: if ((i >= 5)) ; then ./hosts/pixeldrain.sh:104: printf "\\n" -- ./hosts/pixeldrain.sh:256: file_header=$(tor_curl_request --insecure --head -L -s --referer "$file_url" "$pdheadurl") -./hosts/pixeldrain.sh:257: if [ "${DebugAllEnabled}" == "true" ] ; then +./hosts/pixeldrain.sh:257: if [[ "${DebugAllEnabled}" == "true" ]] ; then ./hosts/pixeldrain.sh:258: debugHtml "${remote_url##*/}" "pd_head$i" "url: ${pdheadurl}"$'\n'"${file_header}" ./hosts/pixeldrain.sh:259: fi ./hosts/pixeldrain.sh:260: if ! grep -Eqi 'HTTP/2 200|HTTP/1.1 200|200 OK' <<< $file_header ; then @@ -1895,7 +1896,7 @@ _________________________________________________________________________ ./hosts/pixeldrain.sh:329: --referer "$file_url" "$download_url" --continue-at - --output "$file_path" ./hosts/pixeldrain.sh:330: fi ./hosts/pixeldrain.sh:331: else -./hosts/pixeldrain.sh:332: if [ "${RateMonitorEnabled}" == "true" ]; then +./hosts/pixeldrain.sh:332: if [[ "${RateMonitorEnabled}" == "true" ]]; then ./hosts/pixeldrain.sh:333: tor_curl_request --insecure \ ./hosts/pixeldrain.sh:334: -H "User-Agent: $RandomUA" \ ./hosts/pixeldrain.sh:335: -H 'Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8' \ @@ -1915,13 +1916,13 @@ _________________________________________________________________________ ./hosts/pixeldrain.sh:349: fi ./hosts/pixeldrain.sh:350: fi ./hosts/pixeldrain.sh:351: received_file_size=0 -./hosts/pixeldrain.sh:352: if [ -f "$file_path" ] ; then +./hosts/pixeldrain.sh:352: if [[ -f "$file_path" ]] ; then -- ./hosts/quax.sh:85: file_header=$(tor_curl_request --insecure --head -L -s "$download_url") -./hosts/quax.sh:86: if [ "${DebugAllEnabled}" == "true" ] ; then +./hosts/quax.sh:86: if [[ "${DebugAllEnabled}" == "true" ]] ; then ./hosts/quax.sh:87: debugHtml "${remote_url##*/}" "qx_head$j" "download_url: ${download_url}"$'\n'"${file_header}" ./hosts/quax.sh:88: fi -./hosts/quax.sh:89: if [ ! -z "$file_header" ] ; then +./hosts/quax.sh:89: if [[ ! -z "$file_header" ]] ; then ./hosts/quax.sh:90: if grep -Eqi '404 Not Found|HTTP.* 404' <<< "${file_header}" ; then ./hosts/quax.sh:91: echo -e "${RED}| The file has been removed (404).${NC}" ./hosts/quax.sh:92: removedDownload "${remote_url}" @@ -1934,7 +1935,7 @@ _________________________________________________________________________ ./hosts/quax.sh:178: tor_curl_request --insecure "$download_url" --continue-at - --output "$file_path" ./hosts/quax.sh:179: fi ./hosts/quax.sh:180: received_file_size=0 -./hosts/quax.sh:181: if [ -f "$file_path" ] ; then +./hosts/quax.sh:181: if [[ -f "$file_path" ]] ; then ./hosts/quax.sh:182: received_file_size=$(stat --format="%s" "$file_path" | tr -d '[:space:]') ./hosts/quax.sh:183: fi ./hosts/quax.sh:184: if CheckNoHtml "$remote_url" "$filename" "$file_path" ; then @@ -1944,28 +1945,28 @@ _________________________________________________________________________ ./hosts/quax.sh:188: fi -- ./hosts/ranoz.sh:90: response=$(tor_curl_request --insecure -L -s "$remote_url") -./hosts/ranoz.sh:91: if [ "${DebugAllEnabled}" == "true" ] ; then +./hosts/ranoz.sh:91: if [[ "${DebugAllEnabled}" == "true" ]] ; then ./hosts/ranoz.sh:92: debugHtml "${remote_url##*/}" "rz_fetch$i" "${response}" ./hosts/ranoz.sh:93: fi ./hosts/ranoz.sh:94: if [[ -z $response ]] ; then -./hosts/ranoz.sh:95: if [ $i == $maxfetchretries ] ; then +./hosts/ranoz.sh:95: if [[ $i == $maxfetchretries ]] ; then ./hosts/ranoz.sh:96: printf "\\n" ./hosts/ranoz.sh:97: echo -e "${RED}| Failed to extract download url [1]${NC}" ./hosts/ranoz.sh:98: warnAndRetryUnknownError=true -./hosts/ranoz.sh:99: if [ "${finalAttempt}" == "true" ] ; then +./hosts/ranoz.sh:99: if [[ "${finalAttempt}" == "true" ]] ; then ./hosts/ranoz.sh:100: failedRetryDownload "${remote_url}" "Failed to extract download url [1]" "" -- ./hosts/ranoz.sh:160: file_header=$(tor_curl_request --insecure --head -L -i -s "$download_url") -./hosts/ranoz.sh:161: if [ "${DebugAllEnabled}" == "true" ] ; then +./hosts/ranoz.sh:161: if [[ "${DebugAllEnabled}" == "true" ]] ; then ./hosts/ranoz.sh:162: debugHtml "${remote_url##*/}" "rz_head$j" "download_url: ${download_url}"$'\n'"${file_header}" ./hosts/ranoz.sh:163: fi ./hosts/ranoz.sh:164: if [[ -z $file_header ]] ; then -./hosts/ranoz.sh:165: if [ $j == $maxfetchretries ] ; then +./hosts/ranoz.sh:165: if [[ $j == $maxfetchretries ]] ; then ./hosts/ranoz.sh:166: rm -f "${rz_cookie_jar}"; ./hosts/ranoz.sh:167: printf "\\n" ./hosts/ranoz.sh:168: echo -e "${RED}| Failed to extract file info${NC}" ./hosts/ranoz.sh:169: warnAndRetryUnknownError=true -./hosts/ranoz.sh:170: if [ "${finalAttempt}" == "true" ] ; then +./hosts/ranoz.sh:170: if [[ "${finalAttempt}" == "true" ]] ; then -- ./hosts/ranoz.sh:272: tor_curl_request --insecure -L -G --no-alpn \ ./hosts/ranoz.sh:273: --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval \ @@ -1975,7 +1976,7 @@ _________________________________________________________________________ ./hosts/ranoz.sh:277: "$download_url" --continue-at - --output "$file_path" ./hosts/ranoz.sh:278: fi ./hosts/ranoz.sh:279: else -./hosts/ranoz.sh:280: if [ "${RateMonitorEnabled}" == "true" ]; then +./hosts/ranoz.sh:280: if [[ "${RateMonitorEnabled}" == "true" ]]; then ./hosts/ranoz.sh:281: tor_curl_request --insecure -L -G --no-alpn \ ./hosts/ranoz.sh:282: --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval \ ./hosts/ranoz.sh:283: -H "User-Agent: $RandomUA" \ @@ -2001,99 +2002,99 @@ _________________________________________________________________________ ./hosts/ranoz.sh:306: -H "Sec-Fetch-Site: same-origin" \ -- ./hosts/sendnow.sh:90: response=$(tor_curl_request --insecure -L -s -b "${snow_cookie_jar}" -c "${snow_cookie_jar}" "$remote_url") -./hosts/sendnow.sh:91: if [ "${DebugAllEnabled}" == "true" ] ; then +./hosts/sendnow.sh:91: if [[ "${DebugAllEnabled}" == "true" ]] ; then ./hosts/sendnow.sh:92: debugHtml "${remote_url##*/}" "snow_dwnpage$i" "${response}" ./hosts/sendnow.sh:93: fi ./hosts/sendnow.sh:94: if [[ -z $response ]] ; then ./hosts/sendnow.sh:95: rm -f "${snow_cookie_jar}"; -./hosts/sendnow.sh:96: if [ $i == $maxfetchretries ] ; then +./hosts/sendnow.sh:96: if [[ $i == $maxfetchretries ]] ; then ./hosts/sendnow.sh:97: printf "\\n" ./hosts/sendnow.sh:98: echo -e "${RED}| Failed to extract download link.${NC}" ./hosts/sendnow.sh:99: warnAndRetryUnknownError=true -./hosts/sendnow.sh:100: if [ "${finalAttempt}" == "true" ] ; then +./hosts/sendnow.sh:100: if [[ "${finalAttempt}" == "true" ]] ; then -- ./hosts/sendnow.sh:160: response=$(tor_curl_request --insecure -L -svo. -X POST \ ./hosts/sendnow.sh:161: -b "${snow_cookie_jar}" -c "${snow_cookie_jar}" \ ./hosts/sendnow.sh:162: --data-raw "$form_data" "$remote_url" 2>&1) -./hosts/sendnow.sh:163: if [ "${DebugAllEnabled}" == "true" ] ; then +./hosts/sendnow.sh:163: if [[ "${DebugAllEnabled}" == "true" ]] ; then ./hosts/sendnow.sh:164: debugHtml "${remote_url##*/}" "snow_post" "post_action: ${post_action}"$'\n'"form_data: ${form_data}"$'\n'"${response}" ./hosts/sendnow.sh:165: fi ./hosts/sendnow.sh:166: if [[ -z $response ]] ; then ./hosts/sendnow.sh:167: echo -e "${RED}| Failed to extract download link [2]${NC}" ./hosts/sendnow.sh:168: warnAndRetryUnknownError=true -./hosts/sendnow.sh:169: if [ "${finalAttempt}" == "true" ] ; then +./hosts/sendnow.sh:169: if [[ "${finalAttempt}" == "true" ]] ; then ./hosts/sendnow.sh:170: rm -f "${snow_cookie_jar}"; -- -./hosts/sendnow.sh:203: file_header=$(tor_curl_request_extended --insecure --head -Lis \ -./hosts/sendnow.sh:204: -H "Host: $fshost" \ -./hosts/sendnow.sh:205: -H "Accept: text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8" \ -./hosts/sendnow.sh:206: -H "Accept-Language: en-US,en;q=0.5" \ -./hosts/sendnow.sh:207: -H "Accept-Encoding: gzip, deflate, br, zstd" \ -./hosts/sendnow.sh:208: -H "Referer: https://send.now/" \ -./hosts/sendnow.sh:209: -H "Sec-GPC: 1" \ -./hosts/sendnow.sh:210: -H "Connection: keep-alive" \ -./hosts/sendnow.sh:211: -H "Upgrade-Insecure-Requests: 1" \ -./hosts/sendnow.sh:212: -H "Sec-Fetch-Dest: document" \ -./hosts/sendnow.sh:213: -H "Sec-Fetch-Mode: navigate" \ +./hosts/sendnow.sh:204: file_header=$(tor_curl_request_extended --insecure --head -Lis \ +./hosts/sendnow.sh:205: -H "Host: $fshost" \ +./hosts/sendnow.sh:206: -H "Accept: text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8" \ +./hosts/sendnow.sh:207: -H "Accept-Language: en-US,en;q=0.5" \ +./hosts/sendnow.sh:208: -H "Accept-Encoding: gzip, deflate, br, zstd" \ +./hosts/sendnow.sh:209: -H "Referer: https://send.now/" \ +./hosts/sendnow.sh:210: -H "Sec-GPC: 1" \ +./hosts/sendnow.sh:211: -H "Connection: keep-alive" \ +./hosts/sendnow.sh:212: -H "Upgrade-Insecure-Requests: 1" \ +./hosts/sendnow.sh:213: -H "Sec-Fetch-Dest: document" \ +./hosts/sendnow.sh:214: -H "Sec-Fetch-Mode: navigate" \ -- -./hosts/sendnow.sh:326: tor_curl_request_extended --insecure -L --no-alpn \ -./hosts/sendnow.sh:327: --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval \ -./hosts/sendnow.sh:328: -H "Host: $fshost" \ -./hosts/sendnow.sh:329: -H "Accept: text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8" \ -./hosts/sendnow.sh:330: -H "Accept-Language: en-US,en;q=0.5" \ -./hosts/sendnow.sh:331: -H "Accept-Encoding: gzip, deflate, br, zstd" \ -./hosts/sendnow.sh:332: -H "Referer: https://send.now/" \ -./hosts/sendnow.sh:333: -H "Sec-GPC: 1" \ -./hosts/sendnow.sh:334: -H "Connection: keep-alive" \ -./hosts/sendnow.sh:335: -H "Upgrade-Insecure-Requests: 1" \ -./hosts/sendnow.sh:336: -H "Sec-Fetch-Dest: document" \ +./hosts/sendnow.sh:327: tor_curl_request_extended --insecure -L --no-alpn \ +./hosts/sendnow.sh:328: --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval \ +./hosts/sendnow.sh:329: -H "Host: $fshost" \ +./hosts/sendnow.sh:330: -H "Accept: text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8" \ +./hosts/sendnow.sh:331: -H "Accept-Language: en-US,en;q=0.5" \ +./hosts/sendnow.sh:332: -H "Accept-Encoding: gzip, deflate, br, zstd" \ +./hosts/sendnow.sh:333: -H "Referer: https://send.now/" \ +./hosts/sendnow.sh:334: -H "Sec-GPC: 1" \ +./hosts/sendnow.sh:335: -H "Connection: keep-alive" \ +./hosts/sendnow.sh:336: -H "Upgrade-Insecure-Requests: 1" \ +./hosts/sendnow.sh:337: -H "Sec-Fetch-Dest: document" \ -- -./hosts/sendnow.sh:344: tor_curl_request --insecure -L --no-alpn \ -./hosts/sendnow.sh:345: -H "Host: $fshost" \ -./hosts/sendnow.sh:346: -H "Accept: text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8" \ -./hosts/sendnow.sh:347: -H "Accept-Language: en-US,en;q=0.5" \ -./hosts/sendnow.sh:348: -H "Accept-Encoding: gzip, deflate, br, zstd" \ -./hosts/sendnow.sh:349: -H "Referer: https://send.now/" \ -./hosts/sendnow.sh:350: -H "Sec-GPC: 1" \ -./hosts/sendnow.sh:351: -H "Connection: keep-alive" \ -./hosts/sendnow.sh:352: -H "Upgrade-Insecure-Requests: 1" \ -./hosts/sendnow.sh:353: -H "Sec-Fetch-Dest: document" \ -./hosts/sendnow.sh:354: -H "Sec-Fetch-Mode: navigate" \ +./hosts/sendnow.sh:345: tor_curl_request --insecure -L --no-alpn \ +./hosts/sendnow.sh:346: -H "Host: $fshost" \ +./hosts/sendnow.sh:347: -H "Accept: text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8" \ +./hosts/sendnow.sh:348: -H "Accept-Language: en-US,en;q=0.5" \ +./hosts/sendnow.sh:349: -H "Accept-Encoding: gzip, deflate, br, zstd" \ +./hosts/sendnow.sh:350: -H "Referer: https://send.now/" \ +./hosts/sendnow.sh:351: -H "Sec-GPC: 1" \ +./hosts/sendnow.sh:352: -H "Connection: keep-alive" \ +./hosts/sendnow.sh:353: -H "Upgrade-Insecure-Requests: 1" \ +./hosts/sendnow.sh:354: -H "Sec-Fetch-Dest: document" \ +./hosts/sendnow.sh:355: -H "Sec-Fetch-Mode: navigate" \ -- -./hosts/sendnow.sh:363: tor_curl_request --insecure -L --no-alpn \ -./hosts/sendnow.sh:364: --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval \ -./hosts/sendnow.sh:365: -H "User-Agent: $RandomUA" \ -./hosts/sendnow.sh:366: -H "Host: $fshost" \ -./hosts/sendnow.sh:367: -H "Accept: text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8" \ -./hosts/sendnow.sh:368: -H "Accept-Language: en-US,en;q=0.5" \ -./hosts/sendnow.sh:369: -H "Accept-Encoding: gzip, deflate, br, zstd" \ -./hosts/sendnow.sh:370: -H "Referer: https://send.now/" \ -./hosts/sendnow.sh:371: -H "Sec-GPC: 1" \ -./hosts/sendnow.sh:372: -H "Connection: keep-alive" \ -./hosts/sendnow.sh:373: -H "Upgrade-Insecure-Requests: 1" \ +./hosts/sendnow.sh:364: tor_curl_request --insecure -L --no-alpn \ +./hosts/sendnow.sh:365: --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval \ +./hosts/sendnow.sh:366: -H "User-Agent: $RandomUA" \ +./hosts/sendnow.sh:367: -H "Host: $fshost" \ +./hosts/sendnow.sh:368: -H "Accept: text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8" \ +./hosts/sendnow.sh:369: -H "Accept-Language: en-US,en;q=0.5" \ +./hosts/sendnow.sh:370: -H "Accept-Encoding: gzip, deflate, br, zstd" \ +./hosts/sendnow.sh:371: -H "Referer: https://send.now/" \ +./hosts/sendnow.sh:372: -H "Sec-GPC: 1" \ +./hosts/sendnow.sh:373: -H "Connection: keep-alive" \ +./hosts/sendnow.sh:374: -H "Upgrade-Insecure-Requests: 1" \ -- -./hosts/sendnow.sh:382: tor_curl_request --insecure -L --no-alpn \ -./hosts/sendnow.sh:383: -H "User-Agent: $RandomUA" \ -./hosts/sendnow.sh:384: -H "Host: $fshost" \ -./hosts/sendnow.sh:385: -H "Accept: text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8" \ -./hosts/sendnow.sh:386: -H "Accept-Language: en-US,en;q=0.5" \ -./hosts/sendnow.sh:387: -H "Accept-Encoding: gzip, deflate, br, zstd" \ -./hosts/sendnow.sh:388: -H "Referer: https://send.now/" \ -./hosts/sendnow.sh:389: -H "Sec-GPC: 1" \ -./hosts/sendnow.sh:390: -H "Connection: keep-alive" \ -./hosts/sendnow.sh:391: -H "Upgrade-Insecure-Requests: 1" \ -./hosts/sendnow.sh:392: -H "Sec-Fetch-Dest: document" \ +./hosts/sendnow.sh:383: tor_curl_request --insecure -L --no-alpn \ +./hosts/sendnow.sh:384: -H "User-Agent: $RandomUA" \ +./hosts/sendnow.sh:385: -H "Host: $fshost" \ +./hosts/sendnow.sh:386: -H "Accept: text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8" \ +./hosts/sendnow.sh:387: -H "Accept-Language: en-US,en;q=0.5" \ +./hosts/sendnow.sh:388: -H "Accept-Encoding: gzip, deflate, br, zstd" \ +./hosts/sendnow.sh:389: -H "Referer: https://send.now/" \ +./hosts/sendnow.sh:390: -H "Sec-GPC: 1" \ +./hosts/sendnow.sh:391: -H "Connection: keep-alive" \ +./hosts/sendnow.sh:392: -H "Upgrade-Insecure-Requests: 1" \ +./hosts/sendnow.sh:393: -H "Sec-Fetch-Dest: document" \ -- ./hosts/syspro.sh:88: response=$(tor_curl_request --insecure -L -s "$remote_url") -./hosts/syspro.sh:89: if [ "${DebugAllEnabled}" == "true" ] ; then +./hosts/syspro.sh:89: if [[ "${DebugAllEnabled}" == "true" ]] ; then ./hosts/syspro.sh:90: debugHtml "${remote_url##*/}" "sysp_fetch$i" "${response}" ./hosts/syspro.sh:91: fi ./hosts/syspro.sh:92: if [[ -z $response ]] ; then -./hosts/syspro.sh:93: if [ $i == $maxfetchretries ] ; then +./hosts/syspro.sh:93: if [[ $i == $maxfetchretries ]] ; then ./hosts/syspro.sh:94: printf "\\n" ./hosts/syspro.sh:95: echo -e "${RED}| Failed to extract download link [1]${NC}" ./hosts/syspro.sh:96: warnAndRetryUnknownError=true -./hosts/syspro.sh:97: if [ "${finalAttempt}" == "true" ] ; then +./hosts/syspro.sh:97: if [[ "${finalAttempt}" == "true" ]] ; then ./hosts/syspro.sh:98: failedRetryDownload "${remote_url}" "Failed to extract download link [1]" "" -- ./hosts/syspro.sh:188: tor_curl_request --insecure -L \ @@ -2106,7 +2107,7 @@ _________________________________________________________________________ ./hosts/syspro.sh:195: --output "$file_path" --output "$file_path" ./hosts/syspro.sh:196: fi ./hosts/syspro.sh:197: else -./hosts/syspro.sh:198: if [ "${RateMonitorEnabled}" == "true" ]; then +./hosts/syspro.sh:198: if [[ "${RateMonitorEnabled}" == "true" ]]; then ./hosts/syspro.sh:199: tor_curl_request --insecure -L \ ./hosts/syspro.sh:200: --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval \ ./hosts/syspro.sh:201: -H "User-Agent: $RandomUA" \ @@ -2126,24 +2127,24 @@ _________________________________________________________________________ ./hosts/syspro.sh:215: fi -- ./hosts/tempfileme.sh:89: response=$(tor_curl_request --insecure -L -s "$remote_url") -./hosts/tempfileme.sh:90: if [ "${DebugAllEnabled}" == "true" ] ; then +./hosts/tempfileme.sh:90: if [[ "${DebugAllEnabled}" == "true" ]] ; then ./hosts/tempfileme.sh:91: debugHtml "${remote_url##*/}" "tmpme_fetch$j" "${response}" ./hosts/tempfileme.sh:92: fi ./hosts/tempfileme.sh:93: if [[ -z $response ]] ; then -./hosts/tempfileme.sh:94: if [ $j == $maxfetchretries ] ; then +./hosts/tempfileme.sh:94: if [[ $j == $maxfetchretries ]] ; then ./hosts/tempfileme.sh:95: printf "\\n" ./hosts/tempfileme.sh:96: echo -e "${RED}| Failed to extract download link${NC}" ./hosts/tempfileme.sh:97: warnAndRetryUnknownError=true -./hosts/tempfileme.sh:98: if [ "${finalAttempt}" == "true" ] ; then +./hosts/tempfileme.sh:98: if [[ "${finalAttempt}" == "true" ]] ; then ./hosts/tempfileme.sh:99: failedRetryDownload "${remote_url}" "Failed to extract download link" "" -- ./hosts/tempfileme.sh:170: file_header=$(tor_curl_request --insecure -L --head -s --referer "${remote_url}" "$download_url") -./hosts/tempfileme.sh:171: if [ "${DebugAllEnabled}" == "true" ] ; then +./hosts/tempfileme.sh:171: if [[ "${DebugAllEnabled}" == "true" ]] ; then ./hosts/tempfileme.sh:172: debugHtml "${remote_url##*/}" "tmpme_head$j" "download_url: ${download_url}"$'\n'"${file_header}" ./hosts/tempfileme.sh:173: fi -./hosts/tempfileme.sh:174: if [ ! -z "$file_header" ] ; then +./hosts/tempfileme.sh:174: if [[ ! -z "$file_header" ]] ; then ./hosts/tempfileme.sh:175: if grep -Eqi 'HTTP.* 404|Not Found' <<< "${file_header}" ; then -./hosts/tempfileme.sh:176: if [ $j == $maxfetchretries ] ; then +./hosts/tempfileme.sh:176: if [[ $j == $maxfetchretries ]] ; then ./hosts/tempfileme.sh:177: printf "\\n" ./hosts/tempfileme.sh:178: echo -e "${RED}| The file has been removed (404).${NC}" ./hosts/tempfileme.sh:179: removedDownload "${remote_url}" @@ -2159,7 +2160,7 @@ _________________________________________________________________________ ./hosts/tempfileme.sh:307: "$download_url" --continue-at - --output "$file_path" ./hosts/tempfileme.sh:308: fi ./hosts/tempfileme.sh:309: else -./hosts/tempfileme.sh:310: if [ "${RateMonitorEnabled}" == "true" ]; then +./hosts/tempfileme.sh:310: if [[ "${RateMonitorEnabled}" == "true" ]]; then ./hosts/tempfileme.sh:311: tor_curl_request --insecure -L \ ./hosts/tempfileme.sh:312: --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval \ ./hosts/tempfileme.sh:313: -H "User-Agent: $RandomUA" \ @@ -2188,11 +2189,11 @@ _________________________________________________________________________ ./hosts/tempsh.sh:89: -H "Connection: keep-alive" \ ./hosts/tempsh.sh:90: -w 'EffectiveUrl=%{url_effective}' \ ./hosts/tempsh.sh:91: "$download_url") -./hosts/tempsh.sh:92: if [ "${DebugAllEnabled}" == "true" ] ; then +./hosts/tempsh.sh:92: if [[ "${DebugAllEnabled}" == "true" ]] ; then ./hosts/tempsh.sh:93: debugHtml "${remote_url##*/}" "tmpsh_posthead" "download_url: ${download_url}"$'\n'"${file_header}" ./hosts/tempsh.sh:94: fi ./hosts/tempsh.sh:95: if [[ -z $file_header ]] ; then -./hosts/tempsh.sh:96: if [ $j == $maxfetchretries ] ; then +./hosts/tempsh.sh:96: if [[ $j == $maxfetchretries ]] ; then ./hosts/tempsh.sh:97: printf "\\n" ./hosts/tempsh.sh:98: echo -e "${RED}| Failed to extract file info [1]${NC}" -- @@ -2204,7 +2205,7 @@ _________________________________________________________________________ ./hosts/tempsh.sh:232: "$download_url" --continue-at - --output "$file_path" ./hosts/tempsh.sh:233: fi ./hosts/tempsh.sh:234: else -./hosts/tempsh.sh:235: if [ "${RateMonitorEnabled}" == "true" ]; then +./hosts/tempsh.sh:235: if [[ "${RateMonitorEnabled}" == "true" ]]; then ./hosts/tempsh.sh:236: tor_curl_request --insecure -X POST \ ./hosts/tempsh.sh:237: --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval \ ./hosts/tempsh.sh:238: -H "User-Agent: $RandomUA" \ @@ -2232,11 +2233,11 @@ _________________________________________________________________________ ./hosts/torup.sh:92: response=$(tor_curl_request --insecure -L -s \ ./hosts/torup.sh:93: -c "${fdot_cookie_jar}" \ ./hosts/torup.sh:94: "$fixed_url") -./hosts/torup.sh:95: if [ "${DebugAllEnabled}" == "true" ] ; then +./hosts/torup.sh:95: if [[ "${DebugAllEnabled}" == "true" ]] ; then ./hosts/torup.sh:96: debugHtml "${remote_url##*/}" "torp_fetch$i" "${response}" ./hosts/torup.sh:97: fi ./hosts/torup.sh:98: if [[ -z $response ]] ; then -./hosts/torup.sh:99: if [ $i == $maxfetchretries ] ; then +./hosts/torup.sh:99: if [[ $i == $maxfetchretries ]] ; then ./hosts/torup.sh:100: printf "\\n" ./hosts/torup.sh:101: echo -e "${RED}| Failed to extract download url [1]${NC}" ./hosts/torup.sh:102: warnAndRetryUnknownError=true @@ -2253,7 +2254,7 @@ _________________________________________________________________________ ./hosts/torup.sh:199: "$download_url" --output "$file_path" ./hosts/torup.sh:200: fi ./hosts/torup.sh:201: else -./hosts/torup.sh:202: if [ "${RateMonitorEnabled}" == "true" ]; then +./hosts/torup.sh:202: if [[ "${RateMonitorEnabled}" == "true" ]]; then ./hosts/torup.sh:203: tor_curl_request --insecure -L -G --no-alpn \ ./hosts/torup.sh:204: --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval \ ./hosts/torup.sh:205: -H "User-Agent: $RandomUA" \ @@ -2281,35 +2282,35 @@ _________________________________________________________________________ ./hosts/up2share.sh:91: response=$(tor_curl_request --insecure -L -s -b "${up2share_cookie_jar}" -c "${up2share_cookie_jar}" \ ./hosts/up2share.sh:92: -w "\n\nHTTP_CODE: %{http_code}\nEFFECTIVE_URL: %{url_effective}\n" \ ./hosts/up2share.sh:93: "$fixed_url") -./hosts/up2share.sh:94: if [ "${DebugAllEnabled}" == "true" ] ; then +./hosts/up2share.sh:94: if [[ "${DebugAllEnabled}" == "true" ]] ; then ./hosts/up2share.sh:95: debugHtml "${remote_url##*/}" "up2share_fetch$i" "${response}" ./hosts/up2share.sh:96: fi ./hosts/up2share.sh:97: if [[ -z $response ]] ; then ./hosts/up2share.sh:98: rm -f "${up2share_cookie_jar}"; -./hosts/up2share.sh:99: if [ $i == $maxfetchretries ] ; then +./hosts/up2share.sh:99: if [[ $i == $maxfetchretries ]] ; then ./hosts/up2share.sh:100: printf "\\n" ./hosts/up2share.sh:101: echo -e "${RED}| Failed to extract token link [1].${NC}" -- ./hosts/up2share.sh:144: response=$(tor_curl_request --insecure -L -s -b "${up2share_cookie_jar}" -c "${up2share_cookie_jar}" \ ./hosts/up2share.sh:145: -w "\n\nHTTP_CODE: %{http_code}\nEFFECTIVE_URL: %{url_effective}\n" \ ./hosts/up2share.sh:146: "$download_url") -./hosts/up2share.sh:147: if [ "${DebugAllEnabled}" == "true" ] ; then +./hosts/up2share.sh:147: if [[ "${DebugAllEnabled}" == "true" ]] ; then ./hosts/up2share.sh:148: debugHtml "${remote_url##*/}" "up2share_down$i" "${response}" ./hosts/up2share.sh:149: fi ./hosts/up2share.sh:150: if [[ -z $response ]] ; then ./hosts/up2share.sh:151: rm -f "${up2share_cookie_jar}"; -./hosts/up2share.sh:152: if [ $i == $maxfetchretries ] ; then +./hosts/up2share.sh:152: if [[ $i == $maxfetchretries ]] ; then ./hosts/up2share.sh:153: printf "\\n" ./hosts/up2share.sh:154: echo -e "${RED}| Failed to extract download link [1].${NC}" -- ./hosts/up2share.sh:195: file_header=$(tor_curl_request --insecure -L -s --head \ ./hosts/up2share.sh:196: -b "${up2share_cookie_jar}" -c "${up2share_cookie_jar}" \ ./hosts/up2share.sh:197: --referer "https://up2sha.re/" "$download_url") -./hosts/up2share.sh:198: if [ "${DebugAllEnabled}" == "true" ] ; then +./hosts/up2share.sh:198: if [[ "${DebugAllEnabled}" == "true" ]] ; then ./hosts/up2share.sh:199: debugHtml "${remote_url##*/}" "up2share_head$j" "download_url: ${download_url}"$'\n'"${file_header}" ./hosts/up2share.sh:200: fi ./hosts/up2share.sh:201: if [[ -z $file_header ]] ; then -./hosts/up2share.sh:202: if [ $j == $maxfetchretries ] ; then +./hosts/up2share.sh:202: if [[ $j == $maxfetchretries ]] ; then ./hosts/up2share.sh:203: rm -f "${up2share_cookie_jar}"; ./hosts/up2share.sh:204: printf "\\n" ./hosts/up2share.sh:205: echo -e "${RED}| Failed to extract file info${NC}" @@ -2328,7 +2329,7 @@ _________________________________________________________________________ ./hosts/up2share.sh:325: "$download_url" --continue-at - --output "$file_path" ./hosts/up2share.sh:326: fi ./hosts/up2share.sh:327: else -./hosts/up2share.sh:328: if [ "${RateMonitorEnabled}" == "true" ]; then +./hosts/up2share.sh:328: if [[ "${RateMonitorEnabled}" == "true" ]]; then ./hosts/up2share.sh:329: tor_curl_request --insecure -L \ ./hosts/up2share.sh:330: --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval \ ./hosts/up2share.sh:331: -b "${up2share_cookie_jar}" -c "${up2share_cookie_jar}" \ @@ -2354,28 +2355,28 @@ _________________________________________________________________________ ./hosts/up2share.sh:357: -H "Sec-Fetch-Dest: document" \ -- ./hosts/uploadee.sh:90: response=$(tor_curl_request --insecure -L -s "$remote_url") -./hosts/uploadee.sh:91: if [ "${DebugAllEnabled}" == "true" ] ; then +./hosts/uploadee.sh:91: if [[ "${DebugAllEnabled}" == "true" ]] ; then ./hosts/uploadee.sh:92: debugHtml "${remote_url##*/}" "upee_dwnpage$i" "${response}" ./hosts/uploadee.sh:93: fi ./hosts/uploadee.sh:94: if [[ -z $response ]] ; then ./hosts/uploadee.sh:95: rm -f "${upee_cookie_jar}"; -./hosts/uploadee.sh:96: if [ $i == $maxfetchretries ] ; then +./hosts/uploadee.sh:96: if [[ $i == $maxfetchretries ]] ; then ./hosts/uploadee.sh:97: printf "\\n" ./hosts/uploadee.sh:98: echo -e "${RED}| Failed to extract download link.${NC}" ./hosts/uploadee.sh:99: warnAndRetryUnknownError=true -./hosts/uploadee.sh:100: if [ "${finalAttempt}" == "true" ] ; then +./hosts/uploadee.sh:100: if [[ "${finalAttempt}" == "true" ]] ; then -- ./hosts/uploadee.sh:143: file_header=$(tor_curl_request --insecure --head -L -s -b "${upee_cookie_jar}" -c "${upee_cookie_jar}" --referer "$remote_url" "$download_url") -./hosts/uploadee.sh:144: if [ "${DebugAllEnabled}" == "true" ] ; then +./hosts/uploadee.sh:144: if [[ "${DebugAllEnabled}" == "true" ]] ; then ./hosts/uploadee.sh:145: debugHtml "${remote_url##*/}" "upee_head$j" "download_url: ${download_url}"$'\n'"${file_header}" ./hosts/uploadee.sh:146: fi ./hosts/uploadee.sh:147: if [[ -z $file_header ]] ; then -./hosts/uploadee.sh:148: if [ $j == $maxfetchretries ] ; then +./hosts/uploadee.sh:148: if [[ $j == $maxfetchretries ]] ; then ./hosts/uploadee.sh:149: rm -f "${upee_cookie_jar}"; ./hosts/uploadee.sh:150: printf "\\n" ./hosts/uploadee.sh:151: echo -e "${RED}| Failed to extract file info.${NC}" ./hosts/uploadee.sh:152: warnAndRetryUnknownError=true -./hosts/uploadee.sh:153: if [ "${finalAttempt}" == "true" ] ; then +./hosts/uploadee.sh:153: if [[ "${finalAttempt}" == "true" ]] ; then -- ./hosts/uploadee.sh:251: tor_curl_request --insecure -L -G --no-alpn \ ./hosts/uploadee.sh:252: -b "${upee_cookie_jar}" -c "${upee_cookie_jar}" --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval \ @@ -2390,7 +2391,7 @@ _________________________________________________________________________ ./hosts/uploadee.sh:261: --continue-at - --output "$file_path" ./hosts/uploadee.sh:262: fi ./hosts/uploadee.sh:263: else -./hosts/uploadee.sh:264: if [ "${RateMonitorEnabled}" == "true" ]; then +./hosts/uploadee.sh:264: if [[ "${RateMonitorEnabled}" == "true" ]]; then ./hosts/uploadee.sh:265: tor_curl_request --insecure -L -G --no-alpn \ ./hosts/uploadee.sh:266: -b "${upee_cookie_jar}" -c "${upee_cookie_jar}" --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval \ ./hosts/uploadee.sh:267: -H "Host: www.upload.ee" \ @@ -2418,38 +2419,38 @@ _________________________________________________________________________ ./hosts/uploadev.sh:91: response=$(tor_curl_request --insecure -L -s -b "${upev_cookie_jar}" -c "${upev_cookie_jar}" \ ./hosts/uploadev.sh:92: -w "\n\nHTTP_CODE: %{http_code}\nEFFECTIVE_URL: %{url_effective}\n" \ ./hosts/uploadev.sh:93: "$fixed_url") -./hosts/uploadev.sh:94: if [ "${DebugAllEnabled}" == "true" ] ; then +./hosts/uploadev.sh:94: if [[ "${DebugAllEnabled}" == "true" ]] ; then ./hosts/uploadev.sh:95: debugHtml "${remote_url##*/}" "upev_fetch$i" "${response}" ./hosts/uploadev.sh:96: fi ./hosts/uploadev.sh:97: if [[ -z $response ]] ; then ./hosts/uploadev.sh:98: rm -f "${upev_cookie_jar}"; -./hosts/uploadev.sh:99: if [ $i == $maxfetchretries ] ; then +./hosts/uploadev.sh:99: if [[ $i == $maxfetchretries ]] ; then ./hosts/uploadev.sh:100: printf "\\n" ./hosts/uploadev.sh:101: echo -e "${RED}| Failed to extract download link [1]${NC}" -- ./hosts/uploadev.sh:181: response=$(tor_curl_request --insecure -L -s -X POST \ ./hosts/uploadev.sh:182: -b "${upev_cookie_jar}" -c "${upev_cookie_jar}" \ ./hosts/uploadev.sh:183: --data "$form_data" "$fixed_url") -./hosts/uploadev.sh:184: if [ "${DebugAllEnabled}" == "true" ] ; then +./hosts/uploadev.sh:184: if [[ "${DebugAllEnabled}" == "true" ]] ; then ./hosts/uploadev.sh:185: debugHtml "${remote_url##*/}" "upev_post2_$i" "url: ${fixed_url}"$'\n'"form_data: ${form_data}"$'\n'"${response}" ./hosts/uploadev.sh:186: fi ./hosts/uploadev.sh:187: if [[ -z $response ]] ; then -./hosts/uploadev.sh:188: if [ $i == $maxfetchretries ] ; then +./hosts/uploadev.sh:188: if [[ $i == $maxfetchretries ]] ; then ./hosts/uploadev.sh:189: rm -f "${upev_cookie_jar}"; ./hosts/uploadev.sh:190: printf "\\n" ./hosts/uploadev.sh:191: echo -e "${RED}| Failed to extract download link [4]${NC}" -- ./hosts/uploadev.sh:268: file_header=$(tor_curl_request --insecure -L --head -s "$download_url") -./hosts/uploadev.sh:269: if [ "${DebugAllEnabled}" == "true" ] ; then +./hosts/uploadev.sh:269: if [[ "${DebugAllEnabled}" == "true" ]] ; then ./hosts/uploadev.sh:270: debugHtml "${remote_url##*/}" "upev_head$j" "download_url: ${download_url}"$'\n'"${file_header}" ./hosts/uploadev.sh:271: fi ./hosts/uploadev.sh:272: if [[ -z $file_header ]] ; then -./hosts/uploadev.sh:273: if [ $j == $maxfetchretries ] ; then +./hosts/uploadev.sh:273: if [[ $j == $maxfetchretries ]] ; then ./hosts/uploadev.sh:274: rm -f "${upev_cookie_jar}"; ./hosts/uploadev.sh:275: printf "\\n" ./hosts/uploadev.sh:276: echo -e "${RED}| Failed to extract file info [1]${NC}" ./hosts/uploadev.sh:277: warnAndRetryUnknownError=true -./hosts/uploadev.sh:278: if [ "${finalAttempt}" == "true" ] ; then +./hosts/uploadev.sh:278: if [[ "${finalAttempt}" == "true" ]] ; then -- ./hosts/uploadev.sh:369: tor_curl_request --insecure -L \ ./hosts/uploadev.sh:370: --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval \ @@ -2461,7 +2462,7 @@ _________________________________________________________________________ ./hosts/uploadev.sh:376: "$download_url" --continue-at - --output "$file_path" ./hosts/uploadev.sh:377: fi ./hosts/uploadev.sh:378: else -./hosts/uploadev.sh:379: if [ "${RateMonitorEnabled}" == "true" ]; then +./hosts/uploadev.sh:379: if [[ "${RateMonitorEnabled}" == "true" ]]; then ./hosts/uploadev.sh:380: tor_curl_request --insecure -L \ ./hosts/uploadev.sh:381: --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval \ ./hosts/uploadev.sh:382: -b "${upev_cookie_jar}" -c "${upev_cookie_jar}" \ @@ -2487,39 +2488,39 @@ _________________________________________________________________________ ./hosts/uploadev.sh:406: -H "Sec-Fetch-Mode: navigate" \ -- ./hosts/uploadflix.sh:97: response=$(tor_curl_request --insecure -L -s "${fixed_url}") -./hosts/uploadflix.sh:98: if [ "${DebugAllEnabled}" == "true" ] ; then +./hosts/uploadflix.sh:98: if [[ "${DebugAllEnabled}" == "true" ]] ; then ./hosts/uploadflix.sh:99: debugHtml "${remote_url##*/}" "uflix_dwnpage$j" "${response}" ./hosts/uploadflix.sh:100: fi ./hosts/uploadflix.sh:101: if [[ -z $response ]] ; then -./hosts/uploadflix.sh:102: if [ $j == $maxfetchretries ] ; then +./hosts/uploadflix.sh:102: if [[ $j == $maxfetchretries ]] ; then ./hosts/uploadflix.sh:103: printf "\\n" ./hosts/uploadflix.sh:104: echo -e "${RED}| Failed to extract post link.${NC}" ./hosts/uploadflix.sh:105: warnAndRetryUnknownError=true -./hosts/uploadflix.sh:106: if [ "${finalAttempt}" == "true" ] ; then +./hosts/uploadflix.sh:106: if [[ "${finalAttempt}" == "true" ]] ; then ./hosts/uploadflix.sh:107: failedRetryDownload "${remote_url}" "" "" -- ./hosts/uploadflix.sh:150: response=$(tor_curl_request --insecure -L -s -X POST --data "$form_data" "${fixed_url}") -./hosts/uploadflix.sh:151: if [ "${DebugAllEnabled}" == "true" ] ; then +./hosts/uploadflix.sh:151: if [[ "${DebugAllEnabled}" == "true" ]] ; then ./hosts/uploadflix.sh:152: debugHtml "${remote_url##*/}" "uflix_post" "form_data: ${form_data}"$'\n'"${response}" ./hosts/uploadflix.sh:153: fi ./hosts/uploadflix.sh:154: if [[ -z $response ]] ; then ./hosts/uploadflix.sh:155: echo -e "${RED}| Failed to extract download link.${NC}" ./hosts/uploadflix.sh:156: warnAndRetryUnknownError=true -./hosts/uploadflix.sh:157: if [ "${finalAttempt}" == "true" ] ; then +./hosts/uploadflix.sh:157: if [[ "${finalAttempt}" == "true" ]] ; then ./hosts/uploadflix.sh:158: failedRetryDownload "${remote_url}" "" "" ./hosts/uploadflix.sh:159: fi ./hosts/uploadflix.sh:160: return 1 -- ./hosts/uploadflix.sh:192: file_header=$(tor_curl_request --insecure -L --head -s --referer "${fixed_url}" "${download_url}") -./hosts/uploadflix.sh:193: if [ "${DebugAllEnabled}" == "true" ] ; then +./hosts/uploadflix.sh:193: if [[ "${DebugAllEnabled}" == "true" ]] ; then ./hosts/uploadflix.sh:194: debugHtml "${remote_url##*/}" "uflix_head$j" "download_url: ${download_url}"$'\n'"${file_header}" ./hosts/uploadflix.sh:195: fi ./hosts/uploadflix.sh:196: if [[ -z $file_header ]] ; then -./hosts/uploadflix.sh:197: if [ $j == $maxfetchretries ] ; then +./hosts/uploadflix.sh:197: if [[ $j == $maxfetchretries ]] ; then ./hosts/uploadflix.sh:198: printf "\\n" ./hosts/uploadflix.sh:199: echo -e "${RED}| Failed to extract file info.${NC}" ./hosts/uploadflix.sh:200: warnAndRetryUnknownError=true -./hosts/uploadflix.sh:201: if [ "${finalAttempt}" == "true" ] ; then +./hosts/uploadflix.sh:201: if [[ "${finalAttempt}" == "true" ]] ; then ./hosts/uploadflix.sh:202: failedRetryDownload "${remote_url}" "" "" -- ./hosts/uploadflix.sh:286: tor_curl_request --insecure -L -G --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval "$download_url" --continue-at - --output "$file_path" @@ -2527,7 +2528,7 @@ _________________________________________________________________________ ./hosts/uploadflix.sh:288: tor_curl_request --insecure -L -G "$download_url" --continue-at - --output "$file_path" ./hosts/uploadflix.sh:289: fi ./hosts/uploadflix.sh:290: received_file_size=0 -./hosts/uploadflix.sh:291: if [ -f "$file_path" ] ; then +./hosts/uploadflix.sh:291: if [[ -f "$file_path" ]] ; then ./hosts/uploadflix.sh:292: received_file_size=$(stat --format="%s" "$file_path" | tr -d '[:space:]') ./hosts/uploadflix.sh:293: fi ./hosts/uploadflix.sh:294: if CheckNoHtml "$remote_url" "$filename" "$file_path" ; then @@ -2537,62 +2538,50 @@ _________________________________________________________________________ ./hosts/uploadflix.sh:298: fi -- ./hosts/uploadhive.sh:88: response=$(tor_curl_request --insecure -L -s "$remote_url") -./hosts/uploadhive.sh:89: if [ "${DebugAllEnabled}" == "true" ] ; then +./hosts/uploadhive.sh:89: if [[ "${DebugAllEnabled}" == "true" ]] ; then ./hosts/uploadhive.sh:90: debugHtml "${remote_url##*/}" "uhive_dwnpage$j" "${response}" ./hosts/uploadhive.sh:91: fi ./hosts/uploadhive.sh:92: if [[ -z $response ]] ; then -./hosts/uploadhive.sh:93: if [ $j == $maxfetchretries ] ; then +./hosts/uploadhive.sh:93: if [[ $j == $maxfetchretries ]] ; then ./hosts/uploadhive.sh:94: printf "\\n" ./hosts/uploadhive.sh:95: echo -e "${RED}| Failed to extract post link.${NC}" ./hosts/uploadhive.sh:96: warnAndRetryUnknownError=true -./hosts/uploadhive.sh:97: if [ "${finalAttempt}" == "true" ] ; then +./hosts/uploadhive.sh:97: if [[ "${finalAttempt}" == "true" ]] ; then ./hosts/uploadhive.sh:98: failedRetryDownload "${remote_url}" "" "" -- -./hosts/uploadhive.sh:134: response=$(tor_curl_request --insecure -L -s -X POST --data "$form_data" "$remote_url") -./hosts/uploadhive.sh:135: if [ "${DebugAllEnabled}" == "true" ] ; then -./hosts/uploadhive.sh:136: debugHtml "${remote_url##*/}" "uhive_post" "${response}" -./hosts/uploadhive.sh:137: fi -./hosts/uploadhive.sh:138: if [[ -z $response ]] ; then -./hosts/uploadhive.sh:139: echo -e "${RED}| Failed to extract download link [1]${NC}" -./hosts/uploadhive.sh:140: warnAndRetryUnknownError=true -./hosts/uploadhive.sh:141: if [ "${finalAttempt}" == "true" ] ; then -./hosts/uploadhive.sh:142: failedRetryDownload "${remote_url}" "Failed to extract download link [1]" "" -./hosts/uploadhive.sh:143: fi -./hosts/uploadhive.sh:144: return 1 +./hosts/uploadhive.sh:135: response=$(tor_curl_request --insecure -L -s -X POST --data "$form_data" "$remote_url") +./hosts/uploadhive.sh:136: if [[ "${DebugAllEnabled}" == "true" ]] ; then +./hosts/uploadhive.sh:137: debugHtml "${remote_url##*/}" "uhive_post" "${response}" +./hosts/uploadhive.sh:138: fi +./hosts/uploadhive.sh:139: if [[ -z $response ]] ; then +./hosts/uploadhive.sh:140: echo -e "${RED}| Failed to extract download link [1]${NC}" +./hosts/uploadhive.sh:141: warnAndRetryUnknownError=true +./hosts/uploadhive.sh:142: if [[ "${finalAttempt}" == "true" ]] ; then +./hosts/uploadhive.sh:143: failedRetryDownload "${remote_url}" "Failed to extract download link [1]" "" +./hosts/uploadhive.sh:144: fi +./hosts/uploadhive.sh:145: return 1 -- -./hosts/uploadhive.sh:185: file_header=$(tor_curl_request --insecure --head -s -L --referer "$remote_url" "$download_url") -./hosts/uploadhive.sh:186: if [ "${DebugAllEnabled}" == "true" ] ; then -./hosts/uploadhive.sh:187: debugHtml "${remote_url##*/}" "uhive_head$j" "download_url: ${download_url}"$'\n'"${file_header}" -./hosts/uploadhive.sh:188: fi -./hosts/uploadhive.sh:189: if [[ -z $file_header ]] ; then -./hosts/uploadhive.sh:190: if [ $j == $maxfetchretries ] ; then -./hosts/uploadhive.sh:191: printf "\\n" -./hosts/uploadhive.sh:192: echo -e "${RED}| Failed to extract file info.${NC}" -./hosts/uploadhive.sh:193: warnAndRetryUnknownError=true -./hosts/uploadhive.sh:194: if [ "${finalAttempt}" == "true" ] ; then -./hosts/uploadhive.sh:195: failedRetryDownload "${remote_url}" "" "" --- -./hosts/uploadhive.sh:279: tor_curl_request --insecure -L -G --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval "$download_url" --continue-at - --output "$file_path" -./hosts/uploadhive.sh:280: else -./hosts/uploadhive.sh:281: tor_curl_request --insecure -L -G "$download_url" --continue-at - --output "$file_path" -./hosts/uploadhive.sh:282: fi -./hosts/uploadhive.sh:283: received_file_size=0 -./hosts/uploadhive.sh:284: if [ -f "$file_path" ] ; then -./hosts/uploadhive.sh:285: received_file_size=$(stat --format="%s" "$file_path" | tr -d '[:space:]') -./hosts/uploadhive.sh:286: fi -./hosts/uploadhive.sh:287: if CheckNoHtml "$remote_url" "$filename" "$file_path" ; then -./hosts/uploadhive.sh:288: containsHtml=false -./hosts/uploadhive.sh:289: else -./hosts/uploadhive.sh:290: containsHtml=true -./hosts/uploadhive.sh:291: fi +./hosts/uploadhive.sh:247: tor_curl_request --insecure -L -G --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval "$download_url" --continue-at - --output "$file_path" +./hosts/uploadhive.sh:248: else +./hosts/uploadhive.sh:249: tor_curl_request --insecure -L -G "$download_url" --continue-at - --output "$file_path" +./hosts/uploadhive.sh:250: fi +./hosts/uploadhive.sh:251: received_file_size=0 +./hosts/uploadhive.sh:252: if [[ -f "$file_path" ]] ; then +./hosts/uploadhive.sh:253: received_file_size=$(stat --format="%s" "$file_path" | tr -d '[:space:]') +./hosts/uploadhive.sh:254: fi +./hosts/uploadhive.sh:255: if CheckNoHtml "$remote_url" "$filename" "$file_path" ; then +./hosts/uploadhive.sh:256: containsHtml=false +./hosts/uploadhive.sh:257: else +./hosts/uploadhive.sh:258: containsHtml=true +./hosts/uploadhive.sh:259: fi -- ./hosts/up_1fichier.sh:107: response=$(tor_curl_request --insecure -L -s "https://1fichier.com/") -./hosts/up_1fichier.sh:108: if [ "${DebugAllEnabled}" == "true" ] ; then +./hosts/up_1fichier.sh:108: if [[ "${DebugAllEnabled}" == "true" ]] ; then ./hosts/up_1fichier.sh:109: debugHtml "${filepath##*/}" "${_hostCode}_up_getid_$i" "url: https://1fichier.com/"$'\n'"${response}" ./hosts/up_1fichier.sh:110: fi ./hosts/up_1fichier.sh:111: if [[ -z $response ]] ; then -./hosts/up_1fichier.sh:112: if [ $i == $maxfetchretries ] ; then -./hosts/up_1fichier.sh:113: if [ "${finalAttempt}" == "true" ] ; then +./hosts/up_1fichier.sh:112: if [[ $i == $maxfetchretries ]] ; then +./hosts/up_1fichier.sh:113: if [[ "${finalAttempt}" == "true" ]] ; then ./hosts/up_1fichier.sh:114: printf "\\n" ./hosts/up_1fichier.sh:115: echo -e "${RED}| Upload failed. (GetId [1])${NC}" ./hosts/up_1fichier.sh:116: failedUpload "$pline" "${filepath}" "${_hostCode}" "Failed to upload file" "No Response (GetId [1])" @@ -2608,7 +2597,7 @@ _________________________________________________________________________ ./hosts/up_1fichier.sh:187: -F "mails=" \ ./hosts/up_1fichier.sh:188: -F "message=" \ ./hosts/up_1fichier.sh:189: "${PostUrlHost}") -./hosts/up_1fichier.sh:190: if [ "${DebugAllEnabled}" == "true" ] ; then +./hosts/up_1fichier.sh:190: if [[ "${DebugAllEnabled}" == "true" ]] ; then -- ./hosts/up_anonfile.sh:102: response=$(tor_curl_upload --insecure -i \ ./hosts/up_anonfile.sh:103: -H "Content-Type: multipart/form-data" \ @@ -2626,7 +2615,7 @@ _________________________________________________________________________ ./hosts/up_anonsharing.sh:103: -H "Content-Type: multipart/form-data" \ ./hosts/up_anonsharing.sh:104: -F "files[]=@${arrFiles[@]}" \ ./hosts/up_anonsharing.sh:105: "${PostUrlHost}") -./hosts/up_anonsharing.sh:106: if [ "${DebugAllEnabled}" == "true" ] ; then +./hosts/up_anonsharing.sh:106: if [[ "${DebugAllEnabled}" == "true" ]] ; then ./hosts/up_anonsharing.sh:107: debugHtml "${filepath##*/}" "${_hostCode}_upload" "post_url: ${PostUrlHost}"$'\n'"${response}" ./hosts/up_anonsharing.sh:108: fi ./hosts/up_anonsharing.sh:109: if grep -Eqi '"error":null,"url":"https:\\/\\/anonsharing.com\\/' <<< "${response}" ; then @@ -2641,22 +2630,22 @@ _________________________________________________________________________ ./hosts/up_ateasystems.sh:106: -F "link_pass=" \ ./hosts/up_ateasystems.sh:107: -F "file_0=@${filepath}" \ ./hosts/up_ateasystems.sh:108: "${PostUrlHost}") -./hosts/up_ateasystems.sh:109: if [ "${DebugAllEnabled}" == "true" ] ; then +./hosts/up_ateasystems.sh:109: if [[ "${DebugAllEnabled}" == "true" ]] ; then ./hosts/up_ateasystems.sh:110: debugHtml "${filepath##*/}" "${_hostCode}_upload" "post_url: ${PostUrlHost}"$'\n'"${response}" ./hosts/up_ateasystems.sh:111: fi ./hosts/up_ateasystems.sh:112: if grep -Eqi "Location: https://share\.ateasystems\.com/share/\?\&filename\=" <<< "${response}" ; then -- ./hosts/up_axfc.sh:109: response=$(tor_curl_request --insecure -L -s -b "${axfc_cookie_jar}" -c "${axfc_cookie_jar}" "$fixed_url") -./hosts/up_axfc.sh:110: if [ "${DebugAllEnabled}" == "true" ] ; then +./hosts/up_axfc.sh:110: if [[ "${DebugAllEnabled}" == "true" ]] ; then ./hosts/up_axfc.sh:111: debugHtml "${filepath##*/}" "axfc_fetch$i" "${response}" ./hosts/up_axfc.sh:112: fi ./hosts/up_axfc.sh:113: if [[ -z $response ]] ; then ./hosts/up_axfc.sh:114: rm -f "${axfc_cookie_jar}"; -./hosts/up_axfc.sh:115: if [ $i == $maxfetchretries ] ; then +./hosts/up_axfc.sh:115: if [[ $i == $maxfetchretries ]] ; then ./hosts/up_axfc.sh:116: printf "\\n" ./hosts/up_axfc.sh:117: echo -e "${RED}| Failed to start an upload [1]${NC}" ./hosts/up_axfc.sh:118: warnAndRetryUnknownError=true -./hosts/up_axfc.sh:119: if [ "${finalAttempt}" == "true" ] ; then +./hosts/up_axfc.sh:119: if [[ "${finalAttempt}" == "true" ]] ; then -- ./hosts/up_axfc.sh:136: response=$(tor_curl_upload --insecure -L -s -X POST \ ./hosts/up_axfc.sh:137: -H 'Referer: https://www.axfc.net/u/post_m.pl' \ @@ -2667,7 +2656,7 @@ _________________________________________________________________________ ./hosts/up_axfc.sh:142: --data-raw "method=upload&ext=ext&filename=1&comment=&address=&delpass=$randelkey&keyword=1234&count=&term=0&term_y=2024&term_mon=10&term_d=1&term_h=15&term_min=0&term_s=0&term_ps=&term_mp=3600" \ ./hosts/up_axfc.sh:143: -w "\n\nHTTP_CODE: %{http_code}\nEFFECTIVE_URL: %{url_effective}\n" \ ./hosts/up_axfc.sh:144: "$fixed_url") -./hosts/up_axfc.sh:145: if [ "${DebugAllEnabled}" == "true" ] ; then +./hosts/up_axfc.sh:145: if [[ "${DebugAllEnabled}" == "true" ]] ; then ./hosts/up_axfc.sh:146: debugHtml "${filepath##*/}" "axfc_ticket$i" "${response}" -- ./hosts/up_axfc.sh:184: response=$(tor_curl_upload --insecure -L -i -X POST \ @@ -2676,18 +2665,18 @@ _________________________________________________________________________ ./hosts/up_axfc.sh:187: -F "filedata=@$filepath" \ ./hosts/up_axfc.sh:188: -b "${axfc_cookie_jar}" -c "${axfc_cookie_jar}" \ ./hosts/up_axfc.sh:189: "$PostUrlHost") -./hosts/up_axfc.sh:190: if [ "${DebugAllEnabled}" == "true" ] ; then +./hosts/up_axfc.sh:190: if [[ "${DebugAllEnabled}" == "true" ]] ; then ./hosts/up_axfc.sh:191: debugHtml "${filepath##*/}" "${_hostCode}_upload" "post_url: ${PostUrlHost}"$'\n'"${response}" ./hosts/up_axfc.sh:192: fi ./hosts/up_axfc.sh:193: response_ascii=$(mconvert_utf8_to_ascii "$response") -./hosts/up_axfc.sh:194: if [ "${DebugAllEnabled}" == "true" ] ; then +./hosts/up_axfc.sh:194: if [[ "${DebugAllEnabled}" == "true" ]] ; then -- ./hosts/up_bedrive.sh:102: response=$(tor_curl_upload --insecure -i \ ./hosts/up_bedrive.sh:103: -H "Content-Type: multipart/form-data" \ ./hosts/up_bedrive.sh:104: -F "time=month" \ ./hosts/up_bedrive.sh:105: -F "files[]=@${arrFiles[@]}" \ ./hosts/up_bedrive.sh:106: "${PostUrlHost}") -./hosts/up_bedrive.sh:107: if [ "${DebugAllEnabled}" == "true" ] ; then +./hosts/up_bedrive.sh:107: if [[ "${DebugAllEnabled}" == "true" ]] ; then ./hosts/up_bedrive.sh:108: debugHtml "${filepath##*/}" "${_hostCode}_upload" "post_url: ${PostUrlHost}"$'\n'"${response}" ./hosts/up_bedrive.sh:109: fi ./hosts/up_bedrive.sh:110: if grep -Eqi '"error":null,"url":"https:\\/\\/bedrive.ru\\/' <<< "${response}" ; then @@ -2698,7 +2687,7 @@ _________________________________________________________________________ ./hosts/up_bowfile.sh:109: -H "Content-Type: multipart/form-data" \ ./hosts/up_bowfile.sh:110: -F "files[]=@$filepath" \ ./hosts/up_bowfile.sh:111: "${PostUrlHost}") -./hosts/up_bowfile.sh:112: if [ "${DebugAllEnabled}" == "true" ] ; then +./hosts/up_bowfile.sh:112: if [[ "${DebugAllEnabled}" == "true" ]] ; then ./hosts/up_bowfile.sh:113: debugHtml "${filepath##*/}" "${_hostCode}_upload" "post_url: ${PostUrlHost}"$'\n'"${response}" ./hosts/up_bowfile.sh:114: fi ./hosts/up_bowfile.sh:115: if grep -Eqi '"error":null,"url":"https:\\/\\/bowfile.com\\/' <<< "${response}" ; then @@ -2747,7 +2736,7 @@ _________________________________________________________________________ ./hosts/up_dbree.sh:104: -F "file[]=@${arrFiles[@]}" \ ./hosts/up_dbree.sh:105: -F "upload=Upload" \ ./hosts/up_dbree.sh:106: "${PostUrlHost}") -./hosts/up_dbree.sh:107: if [ "${DebugAllEnabled}" == "true" ] ; then +./hosts/up_dbree.sh:107: if [[ "${DebugAllEnabled}" == "true" ]] ; then ./hosts/up_dbree.sh:108: debugHtml "${filepath##*/}" "${_hostCode}_upload" "post_url: ${PostUrlHost}"$'\n'"${response}" ./hosts/up_dbree.sh:109: fi ./hosts/up_dbree.sh:110: if grep -Eqi 'URL: Why we block tor' <<< "${response}" ; then ./hosts/up_fileditch.sh:115: if ((j >= 20)); then -./hosts/up_fileditch.sh:116: if [ "${finalAttempt}" == "true" ] ; then +./hosts/up_fileditch.sh:116: if [[ "${finalAttempt}" == "true" ]] ; then ./hosts/up_fileditch.sh:117: printf "\\n" -- ./hosts/up_filehaus.sh:106: response=$(tor_curl_upload --insecure -i \ @@ -2795,7 +2784,7 @@ _________________________________________________________________________ ./hosts/up_filehaus.sh:108: -F "file=@$filepath" \ ./hosts/up_filehaus.sh:109: -F "submit=Upload" \ ./hosts/up_filehaus.sh:110: "${PostUrlHost}") -./hosts/up_filehaus.sh:111: if [ "${DebugAllEnabled}" == "true" ] ; then +./hosts/up_filehaus.sh:111: if [[ "${DebugAllEnabled}" == "true" ]] ; then ./hosts/up_filehaus.sh:112: debugHtml "${filepath##*/}" "${_hostCode}_upload" "post_url: ${PostUrlHost}"$'\n'"${response}" ./hosts/up_filehaus.sh:113: fi ./hosts/up_filehaus.sh:114: if grep -Eqi 'HTTP/.* 200|https://cdn' <<< "${response}" ; then @@ -2832,17 +2821,17 @@ _________________________________________________________________________ ./hosts/up_firestorage.sh:116: -F "exp=0" \ ./hosts/up_firestorage.sh:117: -F "Filename=@$filepath" \ ./hosts/up_firestorage.sh:118: "${PostUrlHost}") -./hosts/up_firestorage.sh:119: if [ "${DebugAllEnabled}" == "true" ] ; then +./hosts/up_firestorage.sh:119: if [[ "${DebugAllEnabled}" == "true" ]] ; then ./hosts/up_firestorage.sh:120: debugHtml "${filepath##*/}" "${_hostCode}_upload" "post_url: ${PostUrlHost}"$'\n'"${response}" ./hosts/up_firestorage.sh:121: fi ./hosts/up_firestorage.sh:122: dec_response=$(urldecode "$response") ./hosts/up_firestorage.sh:123: if grep -Eqi ' /dev/null -./hosts/up_ranoz.sh:169: fi -./hosts/up_ranoz.sh:170: if grep -Eqi 'HTTP/.* 200' <<< "${response}" ; then +./hosts/up_ranoz.sh:155: response=$(tor_curl_upload --insecure -i -X PUT \ +./hosts/up_ranoz.sh:156: "${PostUrlHost}" \ +./hosts/up_ranoz.sh:157: --upload-file "$filepath" \ +./hosts/up_ranoz.sh:158: -H "Content-Length: $fsize") +./hosts/up_ranoz.sh:159: if [[ "${DebugAllEnabled}" == "true" ]] ; then +./hosts/up_ranoz.sh:160: debugHtml "${filepath##*/}" "${_hostCode}_upload" "post_url: ${PostUrlHost}"$'\n'"${response}" +./hosts/up_ranoz.sh:161: fi +./hosts/up_ranoz.sh:162: if grep -Eqi 'HTTP/.* 200' <<< "${response}" ; then +./hosts/up_ranoz.sh:163: filesize=$(GetFileSize "$filepath" "false") +./hosts/up_ranoz.sh:164: echo -e "${GREEN}| Upload Success${NC}" +./hosts/up_ranoz.sh:165: echo -e "| Size: ${BLUE}${filesize}${NC} bytes${NC}" +-- +./hosts/up_sendnow.sh:101: response=$(tor_curl_request --insecure -L -s 'https://send.now/upload') +./hosts/up_sendnow.sh:102: if [[ "${DebugAllEnabled}" == "true" ]] ; then +./hosts/up_sendnow.sh:103: debugHtml "${filepath##*/}" "${_hostCode}_fetch" "${response}" +./hosts/up_sendnow.sh:104: fi +./hosts/up_sendnow.sh:105: if grep -Eqi "Your IP has been banned|you are banned" <<< "$response"; then +./hosts/up_sendnow.sh:106: if [[ "${finalAttempt}" == "true" ]] ; then +./hosts/up_sendnow.sh:107: printf "\\n" +./hosts/up_sendnow.sh:108: echo -e "${RED}| Failed to upload file: Ip blocked or banned${NC}" +./hosts/up_sendnow.sh:109: failedRetryUpload "$pline" "${filepath}" "${_hostCode}" "Failed to upload file" "Ip blocked or banned" +./hosts/up_sendnow.sh:110: exitUploadError=true +./hosts/up_sendnow.sh:111: return 1 +-- +./hosts/up_sendnow.sh:138: response=$(tor_curl_upload --insecure -i \ +./hosts/up_sendnow.sh:139: -H "Content-Type: multipart/form-data" \ +./hosts/up_sendnow.sh:140: -F "sess_id=" \ +./hosts/up_sendnow.sh:141: -F "utype=anon" \ +./hosts/up_sendnow.sh:142: -F "file_descr=" \ +./hosts/up_sendnow.sh:143: -F "file_public=1" \ +./hosts/up_sendnow.sh:144: -F "link_rcpt=" \ +./hosts/up_sendnow.sh:145: -F "link_pass=" \ +./hosts/up_sendnow.sh:146: -F "to_folder=" \ +./hosts/up_sendnow.sh:147: -F "upload=Start upload" \ +./hosts/up_sendnow.sh:148: -F "keepalive=1" \ -- ./hosts/up_shareonline.sh:102: response=$(tor_curl_upload --insecure -i \ ./hosts/up_shareonline.sh:103: -H "Content-Type: multipart/form-data" \ ./hosts/up_shareonline.sh:104: -F "file[]=@${arrFiles[@]}" \ ./hosts/up_shareonline.sh:105: -F "upload=Upload" \ ./hosts/up_shareonline.sh:106: "${PostUrlHost}") -./hosts/up_shareonline.sh:107: if [ "${DebugAllEnabled}" == "true" ] ; then +./hosts/up_shareonline.sh:107: if [[ "${DebugAllEnabled}" == "true" ]] ; then ./hosts/up_shareonline.sh:108: debugHtml "${filepath##*/}" "${_hostCode}(${index})_upload" "post_url: ${PostUrlHost}"$'\n'"${response}" ./hosts/up_shareonline.sh:109: fi ./hosts/up_shareonline.sh:110: if grep -Eqi 'URL: = MaxDownloadRetries)) ; then -./mad.sh:1238: echo -e "${RED}| FAILED: Size mismatch after downloading${NC}" -./mad.sh:1239: exit 1 +./mad.sh:1535: tor_curl_request --insecure -L "$download_url" --continue-at - --output "$file_path" +./mad.sh:1536: received_file_size=0 +./mad.sh:1537: if [[ -f "$file_path" ]] ; then +./mad.sh:1538: received_file_size=$(stat --format="%s" "$file_path" | tr -d '[:space:]') +./mad.sh:1539: fi +./mad.sh:1540: if ((received_file_size == file_size_bytes)) ; then +./mad.sh:1541: break +./mad.sh:1542: elif ((received_file_size < file_size_bytes)) ; then +./mad.sh:1543: if ((j >= MaxDownloadRetries)) ; then +./mad.sh:1544: echo -e "${RED}| FAILED: Size mismatch after downloading${NC}" +./mad.sh:1545: exit 1 -- -./mad.sh:1282: response=$(tor_curl_request --insecure -L -s https://github.com/lexiforest/curl-impersonate/releases/latest) -./mad.sh:1283: if [ "${DebugAllEnabled}" == "true" ] ; then -./mad.sh:1284: debugHtml "github" "lbf_inst_curlimp$j" "$response" -./mad.sh:1285: fi -./mad.sh:1286: if [ ! -z "$response" ]; then -./mad.sh:1287: latestTag=$(grep -oPi -m 1 '(?<=/curl-impersonate/releases/tag/).*?(?=")' <<< "$response") -./mad.sh:1288: latestBinaryDate=$(grep -oPi -m 1 '(?<== MaxDownloadRetries)) ; then -./mad.sh:1361: echo -e "${RED}| FAILED: Size mismatch after downloading${NC}" -./mad.sh:1362: exit 1 +./mad.sh:1658: tor_curl_request --insecure -L "$download_url" --continue-at - --output "$file_path" +./mad.sh:1659: received_file_size=0 +./mad.sh:1660: if [[ -f "$file_path" ]] ; then +./mad.sh:1661: received_file_size=$(stat --format="%s" "$file_path" | tr -d '[:space:]') +./mad.sh:1662: fi +./mad.sh:1663: if ((received_file_size == file_size_bytes)) ; then +./mad.sh:1664: break +./mad.sh:1665: elif ((received_file_size < file_size_bytes)) ; then +./mad.sh:1666: if ((j >= MaxDownloadRetries)) ; then +./mad.sh:1667: echo -e "${RED}| FAILED: Size mismatch after downloading${NC}" +./mad.sh:1668: exit 1 -- -./mad.sh:1557: maud_torcurl=$(grep -n -vxE '[[:blank:]]*([#].*)?' $fil | grep -A 12 --color='always' -Ei 'tor_curl') -./mad.sh:1558: echo -e "Files:" -./mad.sh:1559: echo -e "${BLUE}${fil}${NC}" -./mad.sh:1560: echo -e "" -./mad.sh:1561: echo -e "" -./mad.sh:1562: echo -e "${PINK}MAD Audit of http lines:${NC} (${GREEN}grep \"http:\" or \"https:\"${NC})" -./mad.sh:1563: echo -e "_________________________________________________________________________" -./mad.sh:1564: echo -e "$maud_http" -./mad.sh:1565: echo -e "" -./mad.sh:1566: echo -e "${PINK}MAD Audit of curl:${NC} (${GREEN}grep \"curl\"${NC})" -./mad.sh:1567: echo -e "_________________________________________________________________________" +./mad.sh:1863: maud_torcurl=$(grep -n -vxE '[[:blank:]]*([#].*)?' $fil | grep -A 12 --color='always' -Ei 'tor_curl') +./mad.sh:1864: echo -e "Files:" +./mad.sh:1865: echo -e "${BLUE}${fil}${NC}" +./mad.sh:1866: echo -e "" +./mad.sh:1867: echo -e "" +./mad.sh:1868: echo -e "${PINK}MAD Audit of http lines:${NC} (${GREEN}grep \"http:\" or \"https:\"${NC})" +./mad.sh:1869: echo -e "_________________________________________________________________________" +./mad.sh:1870: echo -e "$maud_http" +./mad.sh:1871: echo -e "" +./mad.sh:1872: echo -e "${PINK}MAD Audit of curl:${NC} (${GREEN}grep \"curl\"${NC})" +./mad.sh:1873: echo -e "_________________________________________________________________________" -- -./mad.sh:1570: echo -e "${PINK}MAD Audit of tor_curl (+12 lines after):${NC} (${GREEN}grep \"tor_curl\"${NC})" -./mad.sh:1571: echo -e "_________________________________________________________________________" -./mad.sh:1572: echo -e "$maud_torcurl" -./mad.sh:1573: echo -e "" -./mad.sh:1574: echo -e "" -./mad.sh:1575: done -./mad.sh:1576: else -./mad.sh:1577: cd "$ScriptDir" -./mad.sh:1578: readarray -d $'' arrFiles < <(find . -name "*.sh" -printf '%p\n' | sort -Vk1) -./mad.sh:1579: cd "$WorkDir" -./mad.sh:1580: readarray -d $'' arrFiles2 < <(find . -name "*.sh" -printf '%p\n' | sort -Vk1) +./mad.sh:1876: echo -e "${PINK}MAD Audit of tor_curl (+12 lines after):${NC} (${GREEN}grep \"tor_curl\"${NC})" +./mad.sh:1877: echo -e "_________________________________________________________________________" +./mad.sh:1878: echo -e "$maud_torcurl" +./mad.sh:1879: echo -e "" +./mad.sh:1880: echo -e "" +./mad.sh:1881: done +./mad.sh:1882: else +./mad.sh:1883: cd "$ScriptDir" +./mad.sh:1884: readarray -d $'' arrFiles < <(find . -name "*.sh" -printf '%p\n' | sort -Vk1) +./mad.sh:1885: cd "$WorkDir" +./mad.sh:1886: readarray -d $'' arrFiles2 < <(find . -name "*.sh" -printf '%p\n' | sort -Vk1) -- -./mad.sh:1585: maud_torcurl=$(grep -n -vxE '[[:blank:]]*([#].*)?' $fil | grep -A 12 --color='always' -Ei 'tor_curl') -./mad.sh:1586: echo -e "Files:" -./mad.sh:1587: echo -e "${BLUE}${fil}${NC}" -./mad.sh:1588: echo -e "" -./mad.sh:1589: echo -e "" -./mad.sh:1590: echo -e "${PINK}MAD Audit of http lines:${NC} (${GREEN}grep \"http:\" or \"https:\"${NC})" -./mad.sh:1591: echo -e "_________________________________________________________________________" -./mad.sh:1592: echo -e "$maud_http" -./mad.sh:1593: echo -e "" -./mad.sh:1594: echo -e "${PINK}MAD Audit of curl:${NC} (${GREEN}grep \"curl \"${NC})" -./mad.sh:1595: echo -e "_________________________________________________________________________" +./mad.sh:1891: maud_torcurl=$(grep -n -vxE '[[:blank:]]*([#].*)?' $fil | grep -A 12 --color='always' -Ei 'tor_curl') +./mad.sh:1892: echo -e "Files:" +./mad.sh:1893: echo -e "${BLUE}${fil}${NC}" +./mad.sh:1894: echo -e "" +./mad.sh:1895: echo -e "" +./mad.sh:1896: echo -e "${PINK}MAD Audit of http lines:${NC} (${GREEN}grep \"http:\" or \"https:\"${NC})" +./mad.sh:1897: echo -e "_________________________________________________________________________" +./mad.sh:1898: echo -e "$maud_http" +./mad.sh:1899: echo -e "" +./mad.sh:1900: echo -e "${PINK}MAD Audit of curl:${NC} (${GREEN}grep \"curl \"${NC})" +./mad.sh:1901: echo -e "_________________________________________________________________________" -- -./mad.sh:1598: echo -e "${PINK}MAD Audit of tor_curl (+12 lines after):${NC} (${GREEN}grep \"tor_curl\"${NC})" -./mad.sh:1599: echo -e "_________________________________________________________________________" -./mad.sh:1600: echo -e "$maud_torcurl" -./mad.sh:1601: echo -e "" -./mad.sh:1602: done -./mad.sh:1603: for fil in "${arrFiles2[@]}"; -./mad.sh:1604: do -./mad.sh:1605: maud_http=$(grep -n -vxE '[[:blank:]]*([#].*)?' $fil | grep --color='always' -Ei '(http|https):') -./mad.sh:1606: maud_curl=$(grep -n -vxE '[[:blank:]]*([#].*)?' $fil | grep --color='always' -Ei 'curl') -./mad.sh:1607: maud_torcurl=$(grep -n -vxE '[[:blank:]]*([#].*)?' $fil | grep -A 12 --color='always' -Ei 'tor_curl') -./mad.sh:1608: echo -e "Files:" -./mad.sh:1609: echo -e "${BLUE}${fil}${NC}" -./mad.sh:1610: echo -e "" -./mad.sh:1611: echo -e "" -./mad.sh:1612: echo -e "${PINK}MAD Audit of http lines:${NC} (${GREEN}grep \"http:\" or \"https:\"${NC})" -./mad.sh:1613: echo -e "_________________________________________________________________________" -./mad.sh:1614: echo -e "$maud_http" -./mad.sh:1615: echo -e "" -./mad.sh:1616: echo -e "${PINK}MAD Audit of curl:${NC} (${GREEN}grep \"curl\"${NC})" -./mad.sh:1617: echo -e "_________________________________________________________________________" +./mad.sh:1904: echo -e "${PINK}MAD Audit of tor_curl (+12 lines after):${NC} (${GREEN}grep \"tor_curl\"${NC})" +./mad.sh:1905: echo -e "_________________________________________________________________________" +./mad.sh:1906: echo -e "$maud_torcurl" +./mad.sh:1907: echo -e "" +./mad.sh:1908: done +./mad.sh:1909: for fil in "${arrFiles2[@]}"; +./mad.sh:1910: do +./mad.sh:1911: maud_http=$(grep -n -vxE '[[:blank:]]*([#].*)?' $fil | grep --color='always' -Ei '(http|https):') +./mad.sh:1912: maud_curl=$(grep -n -vxE '[[:blank:]]*([#].*)?' $fil | grep --color='always' -Ei 'curl') +./mad.sh:1913: maud_torcurl=$(grep -n -vxE '[[:blank:]]*([#].*)?' $fil | grep -A 12 --color='always' -Ei 'tor_curl') +./mad.sh:1914: echo -e "Files:" +./mad.sh:1915: echo -e "${BLUE}${fil}${NC}" +./mad.sh:1916: echo -e "" +./mad.sh:1917: echo -e "" +./mad.sh:1918: echo -e "${PINK}MAD Audit of http lines:${NC} (${GREEN}grep \"http:\" or \"https:\"${NC})" +./mad.sh:1919: echo -e "_________________________________________________________________________" +./mad.sh:1920: echo -e "$maud_http" +./mad.sh:1921: echo -e "" +./mad.sh:1922: echo -e "${PINK}MAD Audit of curl:${NC} (${GREEN}grep \"curl\"${NC})" +./mad.sh:1923: echo -e "_________________________________________________________________________" -- -./mad.sh:1620: echo -e "${PINK}MAD Audit of tor_curl (+12 lines after):${NC} (${GREEN}grep \"tor_curl\"${NC})" -./mad.sh:1621: echo -e "_________________________________________________________________________" -./mad.sh:1622: echo -e "$maud_torcurl" -./mad.sh:1623: echo -e "" -./mad.sh:1624: done -./mad.sh:1625: fi -./mad.sh:1626:} -./mad.sh:1627:madStatus() { -./mad.sh:1628: local InputFile="$1" -./mad.sh:1629: if [ "$arg1" == "status" ] ; then -./mad.sh:1630: clear +./mad.sh:1926: echo -e "${PINK}MAD Audit of tor_curl (+12 lines after):${NC} (${GREEN}grep \"tor_curl\"${NC})" +./mad.sh:1927: echo -e "_________________________________________________________________________" +./mad.sh:1928: echo -e "$maud_torcurl" +./mad.sh:1929: echo -e "" +./mad.sh:1930: done +./mad.sh:1931: fi +./mad.sh:1932:} +./mad.sh:1933:madStatus() { +./mad.sh:1934: local InputFile="$1" +./mad.sh:1935: if [[ "$arg1" == "status" ]] ; then +./mad.sh:1936: clear -- -./mad.sh:2945: file_header=$(tor_curl_request --insecure -m 8 -s -D - -o /dev/null \ -./mad.sh:2946: -H "Connection: keep-alive" \ -./mad.sh:2947: -w 'EffectiveUrl=%{url_effective}' \ -./mad.sh:2948: "$download_url") -./mad.sh:2949: else -./mad.sh:2950: printf "| Retrieving Head: attempt #$j" -./mad.sh:2951: rm -f "${WorkDir}/.temp/directhead" -./mad.sh:2952: file_header=$(tor_curl_request --insecure --head -H "Connection: keep-alive" -L -s -i "$download_url" | -./mad.sh:2953: tee "${WorkDir}/.temp/directhead" & -./mad.sh:2954: sleep 6 -./mad.sh:2955: [ -s "${WorkDir}/.temp/directhead" ] -./mad.sh:2956: kill $! 2>/dev/null -./mad.sh:2957: ) -./mad.sh:2958: if [ ! -f "${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}" ]; then -./mad.sh:2959: touch "${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}" -./mad.sh:2960: fi -./mad.sh:2961: rm -f "${WorkDir}/.temp/directhead" -./mad.sh:2962: fi +./mad.sh:3251: file_header=$(tor_curl_request --insecure -m 8 -s -D - -o /dev/null \ +./mad.sh:3252: -H "Connection: keep-alive" \ +./mad.sh:3253: -w 'EffectiveUrl=%{url_effective}' \ +./mad.sh:3254: "$download_url") +./mad.sh:3255: else +./mad.sh:3256: printf "| Retrieving Head: attempt #$j" +./mad.sh:3257: rm -f "${WorkDir}/.temp/directhead" +./mad.sh:3258: file_header=$(tor_curl_request --insecure --head -H "Connection: keep-alive" -L -s -i "$download_url" | +./mad.sh:3259: tee "${WorkDir}/.temp/directhead" & +./mad.sh:3260: sleep 6 +./mad.sh:3261: [ -s "${WorkDir}/.temp/directhead" ] +./mad.sh:3262: kill $! 2>/dev/null +./mad.sh:3263: ) +./mad.sh:3264: if [[ ! -f "${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}" ]]; then +./mad.sh:3265: touch "${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}" +./mad.sh:3266: fi +./mad.sh:3267: rm -f "${WorkDir}/.temp/directhead" +./mad.sh:3268: fi -- -./mad.sh:3089: tor_curl_request --insecure -L --referer "$file_url" "$download_url" --output "$file_path" -./mad.sh:3090: rc=$? -./mad.sh:3091: if [ $rc -ne 0 ] ; then -./mad.sh:3092: printf "${RED}Download Failed (bad exit status).${NC}" -./mad.sh:3093: if [ -f ${file_path} ]; then -./mad.sh:3094: printf "${YELLOW} Partial removed...${NC}" -./mad.sh:3095: printf "\n\n" -./mad.sh:3096: rm -f "${file_path}" -./mad.sh:3097: else -./mad.sh:3098: printf "\n\n" -./mad.sh:3099: fi +./mad.sh:3395: tor_curl_request --insecure -L --referer "$file_url" "$download_url" --output "$file_path" +./mad.sh:3396: rc=$? +./mad.sh:3397: if ((rc != 0 )) ; then +./mad.sh:3398: printf "${RED}Download Failed (bad exit status).${NC}" +./mad.sh:3399: if [[ -f ${file_path} ]]; then +./mad.sh:3400: printf "${YELLOW} Partial removed...${NC}" +./mad.sh:3401: printf "\n\n" +./mad.sh:3402: rm -f "${file_path}" +./mad.sh:3403: else +./mad.sh:3404: printf "\n\n" +./mad.sh:3405: fi -- -./mad.sh:3142: tor_curl_request --insecure -L --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval "$download_url" --continue-at - --output "$file_path" -./mad.sh:3143: else -./mad.sh:3144: tor_curl_request --insecure -L --referer "$file_url" "$download_url" --continue-at - --output "$file_path" -./mad.sh:3145: fi -./mad.sh:3146: received_file_size=0 -./mad.sh:3147: if [ -f "$file_path" ] ; then -./mad.sh:3148: received_file_size=$(stat --format="%s" "$file_path" | tr -d '[:space:]') -./mad.sh:3149: fi -./mad.sh:3150: if CheckNoHtml "$remote_url" "$filename" "$file_path" ; then -./mad.sh:3151: containsHtml=false -./mad.sh:3152: else -./mad.sh:3153: containsHtml=true -./mad.sh:3154: fi +./mad.sh:3448: tor_curl_request --insecure -L --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval "$download_url" --continue-at - --output "$file_path" +./mad.sh:3449: else +./mad.sh:3450: tor_curl_request --insecure -L --referer "$file_url" "$download_url" --continue-at - --output "$file_path" +./mad.sh:3451: fi +./mad.sh:3452: received_file_size=0 +./mad.sh:3453: if [[ -f "$file_path" ]] ; then +./mad.sh:3454: received_file_size=$(stat --format="%s" "$file_path" | tr -d '[:space:]') +./mad.sh:3455: fi +./mad.sh:3456: if CheckNoHtml "$remote_url" "$filename" "$file_path" ; then +./mad.sh:3457: containsHtml=false +./mad.sh:3458: else +./mad.sh:3459: containsHtml=true +./mad.sh:3460: fi -- -./mad.sh:3342: response=$(tor_curl_upload --insecure -i \ -./mad.sh:3343: -H "Content-Type: multipart/form-data" \ -./mad.sh:3344: -F "key=" \ -./mad.sh:3345: -F "time=$jira_timeval" \ -./mad.sh:3346: -F "file=@${filepath}" \ -./mad.sh:3347: "${jira_PostUrlHost}") -./mad.sh:3348: else -./mad.sh:3349: response=$(tor_curl_upload --insecure -i \ -./mad.sh:3350: -H "Content-Type: multipart/form-data" \ -./mad.sh:3351: -F "key=" \ -./mad.sh:3352: -F "time=$jira_timeval" \ -./mad.sh:3353: -F "files[]=@${arrFiles[@]}" \ -./mad.sh:3354: "${jira_PostUrlHost}") -./mad.sh:3355: fi -./mad.sh:3356: if [ "${DebugAllEnabled}" == "true" ] ; then -./mad.sh:3357: debugHtml "${filepath##*/}" "${_hostCode}_upload" "post_url: ${jira_PostUrlHost}"$'\n'"${response}" -./mad.sh:3358: fi -./mad.sh:3359: if grep -Eqi ' 200 ' <<< "${response}" ; then +./mad.sh:3648: response=$(tor_curl_upload --insecure -i \ +./mad.sh:3649: -H "Content-Type: multipart/form-data" \ +./mad.sh:3650: -F "key=" \ +./mad.sh:3651: -F "time=$jira_timeval" \ +./mad.sh:3652: -F "file=@${filepath}" \ +./mad.sh:3653: "${jira_PostUrlHost}") +./mad.sh:3654: else +./mad.sh:3655: response=$(tor_curl_upload --insecure -i \ +./mad.sh:3656: -H "Content-Type: multipart/form-data" \ +./mad.sh:3657: -F "key=" \ +./mad.sh:3658: -F "time=$jira_timeval" \ +./mad.sh:3659: -F "files[]=@${arrFiles[@]}" \ +./mad.sh:3660: "${jira_PostUrlHost}") +./mad.sh:3661: fi +./mad.sh:3662: if [[ "${DebugAllEnabled}" == "true" ]] ; then +./mad.sh:3663: debugHtml "${filepath##*/}" "${_hostCode}_upload" "post_url: ${jira_PostUrlHost}"$'\n'"${response}" +./mad.sh:3664: fi +./mad.sh:3665: if grep -Eqi ' 200 ' <<< "${response}" ; then diff --git a/hosts/1fichier.sh b/hosts/1fichier.sh index f21412f..93af901 100644 --- a/hosts/1fichier.sh +++ b/hosts/1fichier.sh @@ -46,13 +46,13 @@ fich_DownloadFile() { trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; rm -f $lockfile; echo ""; tput cnorm; exit" 0 1 2 3 6 15 tor_identity="${RANDOM}" PAGE=$(tor_curl_request --insecure --max-time "$ConnectTimeout" -s "${remote_url}") - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${remote_url##*/?}" "prechk$y" "${PAGE}" fi file_information=$(grep -oP '(?<=)[^<]*?(?=)' <<< "${PAGE}") size=$(echo "${file_information}" | tail -n 1) filename=$(echo "${file_information}" | head -n 1) - if [ ! "$filename_override" == "" ] ; then + if [[ ! "$filename_override" == "" ]] ; then filename="$filename_override" fi filename=$(sanitize_file_or_folder_name "${filename}") @@ -89,7 +89,7 @@ fich_DownloadFile() { return 1 fi if [[ -z "$filename" || -z "$size" || ${size//[!0-9]/} =~ '^[0-9]+([.][0-9]+?$' ]]; then - if [ $y -eq $MaxUrlRetries ] ; then + if ((y == MaxUrlRetries )) ; then echo -e "\n${RED}ERROR: Filename or size not found${NC}" echo -e "url: ${remote_url}" echo -e "filename: $filename" @@ -106,7 +106,7 @@ fich_DownloadFile() { passwordProtectedDownload "${remote_url}" return 1 fi - if [ $y -gt 1 ] ; then + if ((y > 1 )) ; then printf "\\n" fi echo -e "${GREEN}${filename} (${size}) is available.${NC}" @@ -114,24 +114,24 @@ fich_DownloadFile() { done finalAttempt="false" for ((z=0; z<=$MaxUrlRetries; z++)); do - if [ $z -eq $MaxUrlRetries ] ; then + if [[ $z -eq $MaxUrlRetries ]] ; then finalAttempt="true" fi CLEANSTRING=${remote_url//[^a-zA-Z0-9]/} trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; echo ""; tput cnorm; exit" 0 1 2 3 6 15 if fich_FindEmptySlot && fich_FetchFileInfo "" $((z+1)) $finalAttempt && fich_GetFile "${filecnt}" $((z+1)) $finalAttempt $filename ; then return 0 - elif [ $z -lt $MaxUrlRetries ]; then - if [ "${fileAlreadyDone}" == "true" ] ; then + elif [[ $z -lt $MaxUrlRetries ]]; then + if [[ "${fileAlreadyDone}" == "true" ]] ; then break fi if [[ "${warnAndRetryUnknownError}" == "true" ]] ; then - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${remote_url##*/?}" "error" "Retry due to an unknown issue: attempt #$((z+1)) of ${MaxUrlRetries}" fi fi if [[ "${exitDownloadError}" == "true" || "${exitDownloadNotAvailable}" == "true" ]] ; then - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${remote_url##*/?}" "error" "Exit due to unrecoverable issue" fi rm -f "${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}" @@ -159,7 +159,7 @@ fich_FetchFileInfo() { trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; rm -f "${fich_cookie_jar}"; echo ""; tput cnorm; exit" 0 1 2 3 6 15 cdn_request=$(tor_curl_request --insecure -s -L -b "${fich_cookie_jar}" -c "${fich_cookie_jar}" -F "submit=Download" -F "pass=${fich_user_provided_password}" -F "adz=${fich_adz_parameter}" "${remote_url}") target_file_link=$(echo "$cdn_request" | grep -A 2 '
' | grep -oP ' /dev/null elif grep -Eqi "410 Gone" <<< "${file_header}" > /dev/null ; then echo -e "${RED}ERROR: Failed to retrieve file header (410 Gone).${NC}\nThis could be due to 1fichier experiencing a temporary issue." - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedDownload "${remote_url}" "${filename}" "410 Gone" fi return 1 elif grep -Eqi "403 Forbidden" <<< "${file_header}" > /dev/null ; then echo -e "${RED}ERROR: Failed to retrieve file header (403 Forbidden).${NC}\nThis could be due to 1fichier experiencing a temporary issue." - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedDownload "${remote_url}" "${filename}" "403 Forbidden" fi return 1 else echo -e "${RED}ERROR: Failed to retrieve file header (Unknown Head Response).${NC}\nThis could be due to 1fichier experiencing a temporary issue." - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedDownload "${remote_url}" "${filename}" "Unknown Head Response" fi return 1 @@ -242,7 +242,7 @@ fich_GetFile() { if (( $(echo "$percent_diff > $percent_threshold" |bc -l) )); then echo -e "${RED}ERROR: The difference between the advertised and retrieved file size is too big${NC}\nThis is most likely due to someone else taking the slot or some other error along the way." echo -e "AdvertisedSize: $size ($size_bytes), Size: $file_size_bytes, Diff: $percent_diff" - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then droppedSizeBadDownload "${remote_url}" "${filename}" "${file_size_bytes}" "${size}" fi return 1 @@ -255,18 +255,18 @@ fich_GetFile() { return 1 fi echo "${remote_url//[^a-zA-Z0-9]/}" > $flockDownload - touch ${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/} + touch "${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}" CLEANSTRING=${remote_url//[^a-zA-Z0-9]/} trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; rm -f $flockDownload; echo ""; tput cnorm; exit" 0 1 2 3 6 15 tor_curl_request --insecure -e "${remote_url}" "${target_file_link}" -C - -o "${file_path}" rm -f "$flockDownload"; received_file_size=0 - if [ -f "$file_path" ] ; then + if [[ -f "$file_path" ]] ; then received_file_size=$(stat --format="%s" "$file_path" | tr -d '[:space:]') fi if [[ "${received_file_size}" -ne "${file_size_bytes}" ]]; then echo -e "${RED}ERROR: Size mismatch after downloading${NC}\nPerhaps you or 1fichier lost connection for a while?" - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" fi return 1 diff --git a/hosts/9saves.sh b/hosts/9saves.sh index a152195..096de88 100644 --- a/hosts/9saves.sh +++ b/hosts/9saves.sh @@ -43,24 +43,24 @@ ns_DownloadFile() { tor_identity="${RANDOM}" finalAttempt="false" for ((z=0; z<=$MaxUrlRetries; z++)); do - if [ $z -eq $MaxUrlRetries ] ; then + if [[ $z -eq $MaxUrlRetries ]] ; then finalAttempt="true" fi CLEANSTRING=${remote_url//[^a-zA-Z0-9]/} trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; echo ""; tput cnorm; exit" 0 1 2 3 6 15 if ns_FetchFileInfo $finalAttempt && ns_GetFile "${filecnt}" $((z+1)) $finalAttempt ; then return 0 - elif [ $z -lt $MaxUrlRetries ]; then - if [ "${fileAlreadyDone}" == "true" ] ; then + elif [[ $z -lt $MaxUrlRetries ]]; then + if [[ "${fileAlreadyDone}" == "true" ]] ; then break fi if [[ "${warnAndRetryUnknownError}" == "true" ]] ; then - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${remote_url##*/}" "error" "Retry due to an unknown issue: attempt #$((z+1)) of ${MaxUrlRetries}" fi fi if [[ "${exitDownloadError}" == "true" || "${exitDownloadNotAvailable}" == "true" ]] ; then - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${remote_url##*/}" "error" "Exit due to unrecoverable issue" fi rm -f "${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}" @@ -88,16 +88,16 @@ ns_FetchFileInfo() { CLEANSTRING=${remote_url//[^a-zA-Z0-9]/} trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; rm -f ${ns_cookie_jar}; echo ""; tput cnorm; exit" 0 1 2 3 6 15 response=$(tor_curl_request --insecure -L -s -b "${ns_cookie_jar}" -c "${ns_cookie_jar}" "$remote_url") - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${remote_url##*/}" "ns_dwnpage$i" "${response}" fi if [[ -z $response ]] ; then - if [ $i == $maxfetchretries ] ; then + if [[ $i == $maxfetchretries ]] ; then rm -f "${ns_cookie_jar}"; printf "\\n" echo -e "${RED}| Failed to extract post link.${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "" "" fi return 1 @@ -127,7 +127,7 @@ ns_FetchFileInfo() { printf "\\n" echo -e "${RED}| Failed to extract post link (unknown).${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "" "" fi return 1 @@ -139,14 +139,14 @@ ns_FetchFileInfo() { response=$(tor_curl_request --insecure -L -s -X POST \ -b "${ns_cookie_jar}" -c "${ns_cookie_jar}" \ --data "$form_data" "https://9saves.com/") - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${remote_url##*/}" "ns_post" "form_data: ${form_data}"$'\n'"${response}" fi if [[ -z $response ]] ; then rm -f "${ns_cookie_jar}"; echo -e "${RED}| Failed to extract download link.${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "" "" fi return 1 @@ -173,7 +173,7 @@ ns_FetchFileInfo() { rm -f "${ns_cookie_jar}"; echo -e "${RED}| Failed to extract download link.${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "" "" fi return 1 @@ -186,16 +186,16 @@ ns_FetchFileInfo() { CLEANSTRING=${remote_url//[^a-zA-Z0-9]/} trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; rm -f ${ns_cookie_jar}; echo ""; tput cnorm; exit" 0 1 2 3 6 15 file_header=$(tor_curl_request --insecure --head -L -s -b "${ns_cookie_jar}" -c "${ns_cookie_jar}" "$download_url") - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${remote_url##*/}" "ns_head$j" "download_url: ${download_url}"$'\n'"${file_header}" fi if [[ -z $file_header ]] ; then - if [ $j == $maxfetchretries ] ; then + if [[ $j == $maxfetchretries ]] ; then rm -f "${ns_cookie_jar}"; printf "\\n" echo -e "${RED}| Failed to extract file info.${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "" "" fi return 1 @@ -208,18 +208,18 @@ ns_FetchFileInfo() { printf "\\n" echo -e "${RED}| Failed to extract file info.${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "" "" fi return 1 fi if ! grep -Eqi 'HTTP/2 200|HTTP/1.1 200|200 OK' <<< $file_header ; then - if [ $j == $maxfetchretries ] ; then + if [[ $j == $maxfetchretries ]] ; then rm -f "${ns_cookie_jar}"; printf "\\n" echo -e "${RED}| Failed to extract file info (no 200 response).${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "" "" fi return 1 @@ -233,13 +233,13 @@ ns_FetchFileInfo() { fi file_size_bytes=$(grep -oPi '(?<=content-length: ).*' <<< "$file_header") file_size_bytes=${file_size_bytes//[$'\t\r\n']} - if [ -z $file_size_bytes ] ; then - if [ $j == $maxfetchretries ] ; then + if [[ -z $file_size_bytes ]] ; then + if [[ $j == $maxfetchretries ]] ; then rm -f "${ns_cookie_jar}"; printf "\\n" echo -e "${RED}| Filesize not found…${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "Filesize not found!" "" fi return 1 @@ -251,8 +251,8 @@ ns_FetchFileInfo() { fi break #Good to go here done - touch ${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/} - if [ ! "$filename_override" == "" ] ; then + touch "${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}" + if [[ ! "$filename_override" == "" ]] ; then filename="$filename_override" fi filename=$(sanitize_file_or_folder_name "${filename}") @@ -281,14 +281,14 @@ ns_GetFile() { flockDownload="${WorkDir}/.flocks/${filename//[^a-zA-Z0-9\.\_\-]/}.flock" for ((j=1; j<=$MaxDownloadRetries; j++)); do pd_presize=0 - if [ -f "$file_path" ] ; then + if [[ -f "$file_path" ]] ; then pd_presize=$(stat --format="%s" "$file_path" | tr -d '[:space:]') fi GetRandomUA CLEANSTRING=${remote_url//[^a-zA-Z0-9]/} trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; rm -f ${ns_cookie_jar}; rm -f $flockDownload; echo ""; tput cnorm; exit" 0 1 2 3 6 15 - if [ "${UseTorCurlImpersonate}" == "true" ]; then - if [ "${RateMonitorEnabled}" == "true" ]; then + if [[ "${UseTorCurlImpersonate}" == "true" ]]; then + if [[ "${RateMonitorEnabled}" == "true" ]]; then tor_curl_request --insecure \ -b "${ns_cookie_jar}" --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval \ "$download_url" \ @@ -300,7 +300,7 @@ ns_GetFile() { --continue-at - --output "$file_path" fi else - if [ "${RateMonitorEnabled}" == "true" ]; then + if [[ "${RateMonitorEnabled}" == "true" ]]; then tor_curl_request --insecure \ --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval \ -b "${ns_cookie_jar}" \ @@ -336,7 +336,7 @@ ns_GetFile() { fi fi received_file_size=0 - if [ -f "$file_path" ] ; then + if [[ -f "$file_path" ]] ; then received_file_size=$(stat --format="%s" "$file_path" | tr -d '[:space:]') fi if CheckNoHtml "$remote_url" "$filename" "$file_path" ; then @@ -345,9 +345,9 @@ ns_GetFile() { containsHtml=true fi downDelta=$(( received_file_size - pd_presize )) - if [[ "${received_file_size}" -ne "${file_size_bytes}" ]] || [ "$containsHtml" == "true" ]; then - if [ "${AutoRepairBadPartials}" == "true" ] && (( downDelta > 0 && downDelta < 10240 )) ; then - if [ -f "${file_path}" ] ; then + if [[ "${received_file_size}" -ne "${file_size_bytes}" ]] || [[ "$containsHtml" == "true" ]]; then + if [[ "${AutoRepairBadPartials}" == "true" ]] && (( downDelta > 0 && downDelta < 10240 )) ; then + if [[ -f "${file_path}" ]] ; then if ((pd_presize > 0)); then echo -e "${YELLOW}Bad node / HTML found:${NC} reverting to previous file..." truncateDownload "$remote_url" "$filename" "$pd_presize" "$received_file_size" @@ -359,15 +359,15 @@ ns_GetFile() { fi if ((j >= $MaxDownloadRetries)) ; then rm -f "$flockDownload"; - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" fi return 1 else continue fi - elif [ "${AutoRepairBadPartials}" == "true" ] && [ "$containsHtml" == "true" ] ; then - if [ -f "${file_path}" ] ; then + elif [[ "${AutoRepairBadPartials}" == "true" ]] && [[ "$containsHtml" == "true" ]] ; then + if [[ -f "${file_path}" ]] ; then if ((pd_presize > 0)); then echo -e "${YELLOW}Bad node / HTML found:${NC} reverting to previous file..." truncateDownload "$remote_url" "$filename" "$pd_presize" "$received_file_size" @@ -379,21 +379,21 @@ ns_GetFile() { fi if ((j >= $MaxDownloadRetries)) ; then rm -f "$flockDownload"; - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" fi return 1 else continue fi - elif (( downDelta > 0 && downDelta < 1024 )) || [ "$containsHtml" == "true" ] ; then - if [ -f "$file_path" ] ; then + elif (( downDelta > 0 && downDelta < 1024 )) || [[ "$containsHtml" == "true" ]] ; then + if [[ -f "$file_path" ]] ; then rm -rf "$file_path" fi echo -e "\n${YELLOW}Bad node / HTML found:${NC} tainted partial removed..." if ((j >= $MaxDownloadRetries)) ; then rm -f "$flockDownload"; - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" fi return 1 @@ -405,7 +405,7 @@ ns_GetFile() { echo -e "\n${RED}Download failed, file is incomplete.${NC}" if ((j >= $MaxDownloadRetries)) ; then rm -f "$flockDownload"; - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" fi return 1 diff --git a/hosts/anonfile.sh b/hosts/anonfile.sh index f06cebc..ef2d49e 100644 --- a/hosts/anonfile.sh +++ b/hosts/anonfile.sh @@ -48,24 +48,24 @@ anon_DownloadFile() { tor_identity="${RANDOM}" finalAttempt="false" for ((z=0; z<=$MaxUrlRetries; z++)); do - if [ $z -eq $MaxUrlRetries ] ; then + if [[ $z -eq $MaxUrlRetries ]] ; then finalAttempt="true" fi CLEANSTRING=${remote_url//[^a-zA-Z0-9]/} trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; echo ""; tput cnorm; exit" 0 1 2 3 6 15 if anon_FetchFileInfo $finalAttempt && anon_GetFile "${filecnt}" $((z+1)) $finalAttempt ; then return 0 - elif [ $z -lt $MaxUrlRetries ]; then - if [ "${fileAlreadyDone}" == "true" ] ; then + elif [[ $z -lt $MaxUrlRetries ]]; then + if [[ "${fileAlreadyDone}" == "true" ]] ; then break fi if [[ "${warnAndRetryUnknownError}" == "true" ]] ; then - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${remote_url##*/}" "error" "Retry due to an unknown issue: attempt #$((z+1)) of ${MaxUrlRetries}" fi fi if [[ "${exitDownloadError}" == "true" || "${exitDownloadNotAvailable}" == "true" ]] ; then - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${remote_url##*/}" "error" "Exit due to unrecoverable issue" fi rm -f "${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}" @@ -96,16 +96,16 @@ anon_FetchFileInfo() { response=$(tor_curl_request --insecure -L -s -b "${anon_cookie_jar}" -c "${anon_cookie_jar}" \ -w "\n\nHTTP_CODE: %{http_code}\nEFFECTIVE_URL: %{url_effective}\n" \ "$fixed_url") - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${remote_url##*/}" "anon_fetch$i" "${response}" fi if [[ -z $response ]] ; then rm -f "${anon_cookie_jar}"; - if [ $i == $maxfetchretries ] ; then + if [[ $i == $maxfetchretries ]] ; then printf "\\n" echo -e "${RED}| Failed to extract download link [1]${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "Failed to extract download link [1]" "" fi return 1 @@ -115,11 +115,11 @@ anon_FetchFileInfo() { fi if grep -Eqi 'You have reached the download-limit' <<< "$response"; then rm -f "${anon_cookie_jar}"; - if [ $i == $maxfetchretries ] ; then + if [[ $i == $maxfetchretries ]] ; then printf "\\n" echo -e "${RED}| Download limit reached for ip${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "Download limit reached for ip" "" fi return 1 @@ -144,11 +144,11 @@ anon_FetchFileInfo() { post_fname=$(urlencode_literal_grouped_case "$post_fname") else rm -f "${anon_cookie_jar}"; - if [ $i == $maxfetchretries ] ; then + if [[ $i == $maxfetchretries ]] ; then printf "\\n" echo -e "${RED}| Failed to extract download link [2]${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "Failed to extract download link [2]" "" fi return 1 @@ -159,11 +159,11 @@ anon_FetchFileInfo() { fi if [[ -z "$post_op" ]] || [[ -z "$post_id" ]] || [[ -z "$post_fname" ]] ; then rm -f "${anon_cookie_jar}"; - if [ $i == $maxfetchretries ] ; then + if [[ $i == $maxfetchretries ]] ; then printf "\\n" echo -e "${RED}| Failed to extract download link [3]${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "Failed to extract download link [3]" "" fi return 1 @@ -187,16 +187,16 @@ anon_FetchFileInfo() { -b "${anon_cookie_jar}" -c "${anon_cookie_jar}" \ -w "\n\nHTTP_CODE: %{http_code}\nEFFECTIVE_URL: %{url_effective}\n" \ --data "$form_data" "$fixed_url") - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${remote_url##*/}" "anon_post1_$i" "${response}" fi if [[ -z $response ]] ; then rm -f "${anon_cookie_jar}"; - if [ $i == $maxfetchretries ] ; then + if [[ $i == $maxfetchretries ]] ; then printf "\\n" echo -e "${RED}| Failed to extract download link [1]${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "Failed to extract download link [1]" "" fi return 1 @@ -206,11 +206,11 @@ anon_FetchFileInfo() { fi if grep -Eqi 'You have reached the download-limit' <<< "$response"; then rm -f "${anon_cookie_jar}"; - if [ $i == $maxfetchretries ] ; then + if [[ $i == $maxfetchretries ]] ; then printf "\\n" echo -e "${RED}| Download limit reached for ip${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "Download limit reached for ip" "" fi return 1 @@ -239,7 +239,7 @@ anon_FetchFileInfo() { tmp_captcha_img="$WorkDir/.temp/${remote_url//[^a-zA-Z0-9]/}.jpg" tor_curl_request --insecure -s "$captcha_img_url" --output "$tmp_captcha_img" captcha_ocr_output=$(CaptchaOcrImageTesseract "$tmp_captcha_img" "NUMBERONLY" "ContrastStretch_5x90,Brightness_130") - if [ "${DebugPluginsEnabled}" == "true" ]; then + if [[ "${DebugPluginsEnabled}" == "true" ]]; then printf "\\n" echo -e "$captcha_ocr_output" fi @@ -247,9 +247,9 @@ anon_FetchFileInfo() { rm -f "$tmp_captcha_img" rm -f "$captcha_ocr_output" local caplength=${#captcha_code} - if [ -z "$captcha_code" ] || ((caplength != 4)) ; then + if [[ -z "$captcha_code" ]] || ((caplength != 4)) ; then rm -f "${anon_cookie_jar}"; - if [ $i == $maxfetchretries ] ; then + if [[ $i == $maxfetchretries ]] ; then printf "\\n" echo -e "${RED}| Bad or unknown captcha$NC (${GREY}$captcha_code${NC}).${NC}" exitDownloadError=true @@ -273,11 +273,11 @@ anon_FetchFileInfo() { post_referer=$(urlencode_literal_grouped_case_urlendingonly "$post_referer") else rm -f "${anon_cookie_jar}"; - if [ $i == $maxfetchretries ] ; then + if [[ $i == $maxfetchretries ]] ; then printf "\\n" echo -e "${RED}| Failed to extract download link [5]${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "Failed to extract download link [5]" "" fi return 1 @@ -288,11 +288,11 @@ anon_FetchFileInfo() { fi if [[ -z "$captcha_code" ]] || [[ -z "$post_op" ]] || [[ -z "$post_id" ]] || [[ -z "$post_rand" ]] ; then rm -f "${anon_cookie_jar}"; - if [ $i == $maxfetchretries ] ; then + if [[ $i == $maxfetchretries ]] ; then printf "\\n" echo -e "${RED}| Failed to extract download link [6]${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "Failed to extract download link [6]" "" fi return 1 @@ -305,7 +305,7 @@ anon_FetchFileInfo() { fi elif grep -Eqi 'class="g-recaptcha" data-sitekey="' <<< "$response" ; then rm -f "${anon_cookie_jar}"; - if [ $i == $maxfetchretries ] ; then + if [[ $i == $maxfetchretries ]] ; then printf "\\n" echo -e "${RED}| Recaptcha detected (js required)${NC}" exitDownloadError=true @@ -316,7 +316,7 @@ anon_FetchFileInfo() { fi else rm -f "${anon_cookie_jar}"; - if [ $i == $maxfetchretries ] ; then + if [[ $i == $maxfetchretries ]] ; then printf "\\n" echo -e "${RED}| No download button found${NC}" exitDownloadError=true @@ -340,16 +340,16 @@ anon_FetchFileInfo() { response=$(tor_curl_request --insecure -L -s -X POST \ -b "${anon_cookie_jar}" -c "${anon_cookie_jar}" \ --data "$form_data" "$fixed_url") - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${remote_url##*/}" "anon_post2_$i" "url: ${fixed_url}"$'\n'"form_data: ${form_data}"$'\n'"${response}" fi if [[ -z $response ]] ; then - if [ $i == $maxfetchretries ] ; then + if [[ $i == $maxfetchretries ]] ; then rm -f "${anon_cookie_jar}"; printf "\\n" echo -e "${RED}| Failed to extract download link [3].${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "" "" fi return 1 @@ -367,12 +367,12 @@ anon_FetchFileInfo() { return 1 fi if grep -Eqi 'you have to wait|seconds till next download' <<< "$response"; then - if [ $i == $maxfetchretries ] ; then + if [[ $i == $maxfetchretries ]] ; then rm -f "${anon_cookie_jar}"; printf "\\n" echo -e "${RED}| Rate limited [2]${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "Rate limited [2]" "" fi return 1 @@ -382,12 +382,12 @@ anon_FetchFileInfo() { fi fi if grep -Eqi 'Just a moment...' <<< "$response"; then - if [ $i == $maxfetchretries ] ; then + if [[ $i == $maxfetchretries ]] ; then rm -f "${anon_cookie_jar}"; printf "\\n" echo -e "${RED}| Failed to extract download link [7].${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "Failed to extract download link [7]" "" fi return 1 @@ -397,12 +397,12 @@ anon_FetchFileInfo() { fi fi if grep -Eqi 'Wrong captcha' <<< "$response"; then - if [ $i == $maxfetchretries ] ; then + if [[ $i == $maxfetchretries ]] ; then rm -f "${anon_cookie_jar}"; printf "\\n" echo -e "${RED}| Failed to extract download link. (Wrong captcha)${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "Failed to extract download link. (Wrong captcha)" "" fi return 1 @@ -415,20 +415,22 @@ anon_FetchFileInfo() { printf "\\n" echo -e "${GREEN}| Download url found [1]${NC}" download_url=$(grep -oP '(?<=.*$)' <<< "$response") + filename="${download_url##*\/}" download_url=$(urlencode_literal_grouped_case_urlendingonly "$download_url") elif grep -Eqi '.*$)' <<< "$response") + filename="${download_url##*\/}" download_url=$(urlencode_literal_grouped_case_urlendingonly "$download_url") fi if [[ -z "$download_url" ]] ; then - if [ $i == $maxfetchretries ] ; then + if [[ $i == $maxfetchretries ]] ; then rm -f "${anon_cookie_jar}"; printf "\\n" echo -e "${RED}| Failed to extract download link [8]${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "Failed to extract download link [8]" "" fi return 1 @@ -451,16 +453,16 @@ anon_FetchFileInfo() { file_header=$(tor_curl_request -i -s --head \ --referer "${fixed_url}" \ "$download_url") - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${remote_url##*/}" "anon_head$j" "download_url: ${download_url}"$'\n'"${file_header}" fi if [[ -z $file_header ]] ; then - if [ $j == $maxfetchretries ] ; then + if [[ $j == $maxfetchretries ]] ; then rm -f "${anon_cookie_jar}"; printf "\\n" echo -e "${RED}| Failed to extract file info${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "Failed to extract file info" "" fi return 1 @@ -470,12 +472,12 @@ anon_FetchFileInfo() { fi fi if ! grep -Eqi 'HTTP/2 200|HTTP/1.1 200|200 OK' <<< $file_header ; then - if [ $j == $maxfetchretries ] ; then + if [[ $j == $maxfetchretries ]] ; then rm -f "${anon_cookie_jar}"; printf "\\n" echo -e "${RED}| Failed to extract file info${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "" "" fi return 1 @@ -487,12 +489,12 @@ anon_FetchFileInfo() { file_size_bytes=$(grep -oPi '(?<=content-length: ).*' <<< "$file_header") file_size_bytes=${file_size_bytes//[$'\t\r\n']} if [[ -z "$file_size_bytes" ]]; then - if [ $j == $maxfetchretries ] ; then + if [[ $j == $maxfetchretries ]] ; then rm -f "${anon_cookie_jar}"; printf "\\n" echo -e "${RED}| Failed to extract file size.${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "" "" fi return 1 @@ -503,17 +505,15 @@ anon_FetchFileInfo() { fi break #Good to go here done - touch ${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/} - if [ ! "$filename_override" == "" ] ; then + touch "${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}" + if [[ ! "$filename_override" == "" ]] ; then filename="$filename_override" - else - filename="${download_url##*\/}" fi filename=$(sanitize_file_or_folder_name "${filename}") printf "\\n" echo -e "${YELLOW}| File name:${NC}\t\"${filename}\"" - if [ -z $file_size_bytes ] ; then - if [ "${finalAttempt}" == "true" ] ; then + if [[ -z $file_size_bytes ]] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "Filesize not found!" "" fi echo -e "${YELLOW}| Filesize not found… retry${NC}" @@ -542,20 +542,20 @@ anon_GetFile() { retryCnt=$2 finalAttempt=$3 flockDownload="${WorkDir}/.flocks/${filename//[^a-zA-Z0-9\.\_\-]/}.flock" - if [ -f "$file_path" ]; then + if [[ -f "$file_path" ]]; then rm -f "$file_path" fi for ((j=1; j<=$MaxDownloadRetries; j++)); do pd_presize=0 - if [ -f "$file_path" ] ; then + if [[ -f "$file_path" ]] ; then pd_presize=$(stat --format="%s" "$file_path" | tr -d '[:space:]') fi GetRandomUA anon_host=$(grep -oPi '(?<=https://).*(?=/)' <<< "$fixed_url") CLEANSTRING=${remote_url//[^a-zA-Z0-9]/} trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; rm -f ${anon_cookie_jar}; rm -f $flockDownload; echo ""; tput cnorm; exit" 0 1 2 3 6 15 - if [ "${UseTorCurlImpersonate}" == "true" ]; then - if [ "${RateMonitorEnabled}" == "true" ]; then + if [[ "${UseTorCurlImpersonate}" == "true" ]]; then + if [[ "${RateMonitorEnabled}" == "true" ]]; then tor_curl_request --insecure \ --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval \ -b "${anon_cookie_jar}" -c "${anon_cookie_jar}" \ @@ -568,7 +568,7 @@ anon_GetFile() { "$download_url" --output "$file_path" fi else - if [ "${RateMonitorEnabled}" == "true" ]; then + if [[ "${RateMonitorEnabled}" == "true" ]]; then tor_curl_request --insecure \ --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval \ -H "User-Agent: $RandomUA" \ @@ -604,7 +604,7 @@ anon_GetFile() { fi fi received_file_size=0 - if [ -f "$file_path" ] ; then + if [[ -f "$file_path" ]] ; then received_file_size=$(stat --format="%s" "$file_path" | tr -d '[:space:]') fi if CheckNoHtml "$remote_url" "$filename" "$file_path" ; then @@ -613,9 +613,9 @@ anon_GetFile() { containsHtml=true fi downDelta=$(( received_file_size - pd_presize )) - if [[ "${received_file_size}" -ne "${file_size_bytes}" ]] || [ "$containsHtml" == "true" ]; then - if [ "${AutoRepairBadPartials}" == "true" ] && (( downDelta > 0 && downDelta < 1024 )) ; then - if [ -f "${file_path}" ] ; then + if [[ "${received_file_size}" -ne "${file_size_bytes}" ]] || [[ "$containsHtml" == "true" ]]; then + if [[ "${AutoRepairBadPartials}" == "true" ]] && (( downDelta > 0 && downDelta < 1024 )) ; then + if [[ -f "${file_path}" ]] ; then if ((pd_presize > 0)); then echo -e "${YELLOW}Bad node / HTML found:${NC} reverting to previous file..." truncateDownload "$remote_url" "$filename" "$pd_presize" "$received_file_size" @@ -627,15 +627,15 @@ anon_GetFile() { fi if ((j >= $MaxDownloadRetries)) ; then rm -f "$flockDownload"; - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" fi return 1 else continue fi - elif [ "${AutoRepairBadPartials}" == "true" ] && [ "$containsHtml" == "true" ] ; then - if [ -f "${file_path}" ] ; then + elif [[ "${AutoRepairBadPartials}" == "true" ]] && [[ "$containsHtml" == "true" ]] ; then + if [[ -f "${file_path}" ]] ; then if ((pd_presize > 0)); then echo -e "${YELLOW}Bad node / HTML found:${NC} reverting to previous file..." truncateDownload "$remote_url" "$filename" "$pd_presize" "$received_file_size" @@ -647,21 +647,21 @@ anon_GetFile() { fi if ((j >= $MaxDownloadRetries)) ; then rm -f "$flockDownload"; - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" fi return 1 else continue fi - elif (( downDelta > 0 && downDelta < 1024 )) || [ "$containsHtml" == "true" ] ; then - if [ -f "$file_path" ] ; then + elif (( downDelta > 0 && downDelta < 1024 )) || [[ "$containsHtml" == "true" ]] ; then + if [[ -f "$file_path" ]] ; then rm -rf "$file_path" fi echo -e "\n${YELLOW}Bad node / HTML found:${NC} tainted partial removed..." if ((j >= $MaxDownloadRetries)) ; then rm -f "$flockDownload"; - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" fi return 1 @@ -673,7 +673,7 @@ anon_GetFile() { echo -e "\n${RED}Download failed, file is incomplete.${NC}" if ((j >= $MaxDownloadRetries)) ; then rm -f "$flockDownload"; - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" fi return 1 diff --git a/hosts/anonsharing.sh b/hosts/anonsharing.sh index 6f3fadc..1f2bc8f 100644 --- a/hosts/anonsharing.sh +++ b/hosts/anonsharing.sh @@ -43,24 +43,24 @@ ansh_DownloadFile() { tor_identity="${RANDOM}" finalAttempt="false" for ((z=0; z<=$MaxUrlRetries; z++)); do - if [ $z -eq $MaxUrlRetries ] ; then + if [[ $z -eq $MaxUrlRetries ]] ; then finalAttempt="true" fi CLEANSTRING=${remote_url//[^a-zA-Z0-9]/} trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; echo ""; tput cnorm; exit" 0 1 2 3 6 15 if ansh_FetchFileInfo $finalAttempt && ansh_GetFile "${filecnt}" $((z+1)) $finalAttempt ; then return 0 - elif [ $z -lt $MaxUrlRetries ]; then - if [ "${fileAlreadyDone}" == "true" ] ; then + elif [[ $z -lt $MaxUrlRetries ]]; then + if [[ "${fileAlreadyDone}" == "true" ]] ; then break fi if [[ "${warnAndRetryUnknownError}" == "true" ]] ; then - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${remote_url##*/}" "error" "Retry due to an unknown issue: attempt #$((z+1)) of ${MaxUrlRetries}" fi fi if [[ "${exitDownloadError}" == "true" || "${exitDownloadNotAvailable}" == "true" ]] ; then - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${remote_url##*/}" "error" "Exit due to unrecoverable issue" fi rm -f "${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}" @@ -93,16 +93,16 @@ ansh_FetchFileInfo() { -F "u=$fileid" \ -F "p=true" \ "https://anonsharing.com/account/ajax/file_details") - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${remote_url##*/}" "ansh_postfileinfo$i" "${response}" fi if [[ -z $response ]] ; then rm -f "${ansh_cookie_jar}"; - if [ $i == $maxfetchretries ] ; then + if [[ $i == $maxfetchretries ]] ; then printf "\\n" echo -e "${RED}| Failed to extract download link [1]${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "Failed to extract download link [1]" "" fi return 1 @@ -127,11 +127,11 @@ ansh_FetchFileInfo() { break else rm -f "${ansh_cookie_jar}"; - if [ $i == $maxfetchretries ] ; then + if [[ $i == $maxfetchretries ]] ; then printf "\\n" echo -e "${RED}| Failed to extract download link [2]${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "Failed to extract download link [2]" "" fi return 1 @@ -145,7 +145,7 @@ ansh_FetchFileInfo() { for ((j=1; j<=$maxfetchretries; j++)); do CLEANSTRING=${remote_url//[^a-zA-Z0-9]/} trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; rm -f ${ansh_cookie_jar}; echo ""; tput cnorm; exit" 0 1 2 3 6 15 - if [ "$bLocationFound" == "false" ]; then + if [[ "$bLocationFound" == "false" ]]; then echo -e "${GREEN}# Fetching cdn and file info…${NC}" file_header=$(tor_curl_request --insecure --head -L -i -s \ -b "${ansh_cookie_jar}" -c "${ansh_cookie_jar}" \ @@ -163,21 +163,21 @@ ansh_FetchFileInfo() { [ -s "${WorkDir}/.temp/directhead" ] kill $! 2>/dev/null ) - if [ ! -f "${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}" ]; then + if [[ ! -f "${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}" ]]; then touch "${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}" fi rm -f "${WorkDir}/.temp/directhead" fi - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${remote_url##*/}" "ansh_head$j" "download_url: ${download_url}"$'\n'"${file_header}" fi if [[ -z $file_header ]] ; then - if [ $j == $maxfetchretries ] ; then + if [[ $j == $maxfetchretries ]] ; then rm -f "${ansh_cookie_jar}"; printf "\\n" echo -e "${RED}| Failed to extract file info.${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "" "" fi return 1 @@ -190,7 +190,7 @@ ansh_FetchFileInfo() { printf "\\n" echo -e "${RED}| Failed to extract file info.${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "" "" fi return 1 @@ -204,19 +204,19 @@ ansh_FetchFileInfo() { printf "\\n" echo -e "${RED}| Failed to get download url (no location)${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "Failed to get download url (no location)" "" fi return 1 fi filename=$(grep -oP '(?<=filename=").*?(?=".*$)' <<< "$file_header") - if [ "$filename_override" == "" ] && [ -z "$filename" ] ; then - if [ $j == $maxfetchretries ] ; then + if [[ "$filename_override" == "" ]] && [[ -z "$filename" ]] ; then + if [[ $j == $maxfetchretries ]] ; then rm -f "${ansh_cookie_jar}"; printf "\\n" echo -e "${RED}| Failed to extract file name${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "Failed to extract file name" "" fi return 1 @@ -227,14 +227,14 @@ ansh_FetchFileInfo() { break #Good to go here done rm -f "${ansh_cookie_jar}"; - touch ${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/} - if [ ! "$filename_override" == "" ] ; then + touch "${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}" + if [[ ! "$filename_override" == "" ]] ; then filename="$filename_override" fi filename=$(sanitize_file_or_folder_name "${filename}") printf "\\n" echo -e "${YELLOW}| File name:${NC}\t\"${filename}\"" - if [ -z $file_size_bytes ] ; then + if [[ -z $file_size_bytes ]] ; then file_size_readable="${RED}Unknown filesize…${NC}" else file_size_readable="$(numfmt --to=iec --from=auto --format "%.2f" <<< "$file_size_bytes")" @@ -262,20 +262,20 @@ ansh_GetFile() { flockDownload="${WorkDir}/.flocks/${filename//[^a-zA-Z0-9\.\_\-]/}.flock" for ((j=1; j<=$MaxDownloadRetries; j++)); do pd_presize=0 - if [ -f "$file_path" ] ; then + if [[ -f "$file_path" ]] ; then pd_presize=$(stat --format="%s" "$file_path" | tr -d '[:space:]') fi GetRandomUA CLEANSTRING=${remote_url//[^a-zA-Z0-9]/} trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; rm -f ${ansh_cookie_jar}; rm -f $flockDownload; echo ""; tput cnorm; exit" 0 1 2 3 6 15 echo -e "${BLUE}| No Resume Fetch${NC} (unknown filesize)" - if [ "${RateMonitorEnabled}" == "true" ]; then + if [[ "${RateMonitorEnabled}" == "true" ]]; then tor_curl_request --insecure --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval "$download_url" --output "$file_path" else tor_curl_request --insecure "$download_url" --output "$file_path" fi received_file_size=0 - if [ -f "$file_path" ] ; then + if [[ -f "$file_path" ]] ; then received_file_size=$(stat --format="%s" "$file_path" | tr -d '[:space:]') fi if CheckNoHtml "$remote_url" "$filename" "$file_path" ; then @@ -283,8 +283,8 @@ ansh_GetFile() { else containsHtml=true fi - if [ "${AutoRepairBadPartials}" == "true" ] && [ "$containsHtml" == "true" ] ; then - if [ -f "${file_path}" ] ; then + if [[ "${AutoRepairBadPartials}" == "true" ]] && [[ "$containsHtml" == "true" ]] ; then + if [[ -f "${file_path}" ]] ; then if ((pd_presize > 0)); then echo -e "${YELLOW}Bad node / HTML found:${NC} reverting to previous file..." truncateDownload "$remote_url" "$filename" "$pd_presize" "$received_file_size" @@ -296,7 +296,7 @@ ansh_GetFile() { fi if ((j >= $MaxDownloadRetries)) ; then rm -f "$flockDownload"; - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" fi return 1 diff --git a/hosts/ateasystems.sh b/hosts/ateasystems.sh index 018a04a..8e6e92c 100644 --- a/hosts/ateasystems.sh +++ b/hosts/ateasystems.sh @@ -43,24 +43,24 @@ atea_DownloadFile() { tor_identity="${RANDOM}" finalAttempt="false" for ((z=0; z<=$MaxUrlRetries; z++)); do - if [ $z -eq $MaxUrlRetries ] ; then + if [[ $z -eq $MaxUrlRetries ]] ; then finalAttempt="true" fi CLEANSTRING=${remote_url//[^a-zA-Z0-9]/} trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; echo ""; tput cnorm; exit" 0 1 2 3 6 15 if atea_FetchFileInfo $finalAttempt && atea_GetFile "${filecnt}" $((z+1)) $finalAttempt ; then return 0 - elif [ $z -lt $MaxUrlRetries ]; then - if [ "${fileAlreadyDone}" == "true" ] ; then + elif [[ $z -lt $MaxUrlRetries ]]; then + if [[ "${fileAlreadyDone}" == "true" ]] ; then break fi if [[ "${warnAndRetryUnknownError}" == "true" ]] ; then - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${remote_url##*/}" "error" "Retry due to an unknown issue: attempt #$((z+1)) of ${MaxUrlRetries}" fi fi if [[ "${exitDownloadError}" == "true" || "${exitDownloadNotAvailable}" == "true" ]] ; then - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${remote_url##*/}" "error" "Exit due to unrecoverable issue" fi rm -f "${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}" @@ -86,15 +86,15 @@ atea_FetchFileInfo() { CLEANSTRING=${remote_url//[^a-zA-Z0-9]/} trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; echo ""; tput cnorm; exit" 0 1 2 3 6 15 response=$(tor_curl_request --insecure -L -s "$remote_url") - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${remote_url##*/}" "atea_fetch$i" "${response}" fi if [[ -z $response ]] ; then - if [ $i == $maxfetchretries ] ; then + if [[ $i == $maxfetchretries ]] ; then printf "\\n" echo -e "${RED}| Failed to extract download link [1]${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "Failed to extract download link [1]" "" fi return 1 @@ -124,12 +124,12 @@ atea_FetchFileInfo() { val1=$((pval1-0)); val2=$((pval2-0)); val3=$((pval3-0)); val4=$((pval4-0)) captcha_code="${val1}${val2}${val3}${val4}" if grep -Eqi '-' <<< "$captcha_code"; then - if [ $i == $maxfetchretries ] ; then + if [[ $i == $maxfetchretries ]] ; then rm -f "${atea_cookie_jar}"; printf "\\n" echo -e "${RED}| Bad captcha code [2]${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "Bad captcha code [2]" "" fi return 1 @@ -148,15 +148,15 @@ atea_FetchFileInfo() { post_rand=$(grep -oP '(?<=input type="hidden" name="rand" value=").*(?=">.*$)' <<< "$response") post_fname=$(urlencode_literal_grouped_case "${post_fname}") post_action="${post_action//[$'\t\r\n']}" - if [ "$filename_override" == "" ]; then + if [[ "$filename_override" == "" ]]; then filename=$(urlencode_literal_grouped_case "${post_fname}") fi else - if [ $i == $maxfetchretries ] ; then + if [[ $i == $maxfetchretries ]] ; then printf "\\n" echo -e "${RED}| Failed to extract download link [2]${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "Failed to extract download link [2]" "" fi return 1 @@ -166,11 +166,11 @@ atea_FetchFileInfo() { fi if [[ -z "$post_action" ]] || [[ -z "$post_act" ]] || [[ -z "$post_id" ]] || [[ -z "$post_sc" ]] || \ [[ -z "$post_fname" ]] || [[ -z "$post_rand" ]] || [[ -z "$captcha_code" ]] ; then - if [ $i == $maxfetchretries ] ; then + if [[ $i == $maxfetchretries ]] ; then printf "\\n" echo -e "${RED}| Failed to extract download link [3]${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "Failed to extract download link [3]" "" fi return 1 @@ -184,8 +184,8 @@ atea_FetchFileInfo() { form_data="act=${post_act}&id=${post_id}&fname=${post_fname}&rand=${post_rand}&sc=${post_sc}&code=${captcha_code}&btn=Download+File" echo -e "| Captcha countdown (3s)…" sleep 3s - touch ${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/} - if [ ! "$filename_override" == "" ] ; then + touch "${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}" + if [[ ! "$filename_override" == "" ]] ; then filename="$filename_override" fi filename=$(sanitize_file_or_folder_name "${filename}") @@ -210,13 +210,13 @@ atea_GetFile() { finalAttempt=$3 flockDownload="${WorkDir}/.flocks/${filename//[^a-zA-Z0-9\.\_\-]/}.flock" GetRandomUA - if [ -f "$file_path" ]; then + if [[ -f "$file_path" ]]; then rm -f "file_path" fi CLEANSTRING=${remote_url//[^a-zA-Z0-9]/} trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; rm -f $flockDownload; echo ""; tput cnorm; exit" 0 1 2 3 6 15 - if [ "${UseTorCurlImpersonate}" == "true" ]; then - if [ "${RateMonitorEnabled}" == "true" ]; then + if [[ "${UseTorCurlImpersonate}" == "true" ]]; then + if [[ "${RateMonitorEnabled}" == "true" ]]; then tor_curl_request --insecure \ --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval \ --data "$form_data" "$post_action" \ @@ -227,7 +227,7 @@ atea_GetFile() { --output "$file_path" --output "$file_path" fi else - if [ "${RateMonitorEnabled}" == "true" ]; then + if [[ "${RateMonitorEnabled}" == "true" ]]; then tor_curl_request --insecure \ --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval \ -H "User-Agent: $RandomUA" \ @@ -245,13 +245,13 @@ atea_GetFile() { else containsHtml=true fi - if [ "$containsHtml" == "true" ]; then - if [ -f "$file_path" ] ; then + if [[ "$containsHtml" == "true" ]]; then + if [[ -f "$file_path" ]] ; then rm -rf "$file_path" fi echo -e "\n${YELLOW}Bad node / HTML found:${NC} tainted partial removed..." rm -f $flockDownload; - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" fi return 1 diff --git a/hosts/bedrive.sh b/hosts/bedrive.sh index f38418c..56b807a 100644 --- a/hosts/bedrive.sh +++ b/hosts/bedrive.sh @@ -43,24 +43,24 @@ bd_DownloadFile() { tor_identity="${RANDOM}" finalAttempt="false" for ((z=0; z<=$MaxUrlRetries; z++)); do - if [ $z -eq $MaxUrlRetries ] ; then + if [[ $z -eq $MaxUrlRetries ]] ; then finalAttempt="true" fi CLEANSTRING=${remote_url//[^a-zA-Z0-9]/} trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; echo ""; tput cnorm; exit" 0 1 2 3 6 15 if bd_FetchFileInfo $finalAttempt && bd_GetFile "${filecnt}" $((z+1)) $finalAttempt ; then return 0 - elif [ $z -lt $MaxUrlRetries ]; then - if [ "${fileAlreadyDone}" == "true" ] ; then + elif [[ $z -lt $MaxUrlRetries ]]; then + if [[ "${fileAlreadyDone}" == "true" ]] ; then break fi if [[ "${warnAndRetryUnknownError}" == "true" ]] ; then - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${remote_url##*/}" "error" "Retry due to an unknown issue: attempt #$((z+1)) of ${MaxUrlRetries}" fi fi if [[ "${exitDownloadError}" == "true" || "${exitDownloadNotAvailable}" == "true" ]] ; then - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${remote_url##*/}" "error" "Exit due to unrecoverable issue" fi rm -f "${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}" @@ -90,16 +90,16 @@ bd_FetchFileInfo() { response=$(tor_curl_request --insecure -L -s \ -b "${bd_cookie_jar}" -c "${bd_cookie_jar}" \ "$remote_url") - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${remote_url##*/}" "bd_fetch$i" "${response}" fi if [[ -z $response ]] ; then rm -f "${bd_cookie_jar}"; - if [ $i == $maxfetchretries ] ; then + if [[ $i == $maxfetchretries ]] ; then printf "\\n" echo -e "${RED}| Failed to extract download link.${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "" "" fi return 1 @@ -125,11 +125,11 @@ bd_FetchFileInfo() { break else rm -f "${bd_cookie_jar}"; - if [ $i == $maxfetchretries ] ; then + if [[ $i == $maxfetchretries ]] ; then printf "\\n" echo -e "${RED}| Failed to extract download link.${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "" "" fi return 1 @@ -149,16 +149,16 @@ bd_FetchFileInfo() { file_header=$(tor_curl_request --insecure --head -L -i -s \ -b "${bd_cookie_jar}" -c "${bd_cookie_jar}" \ "$download_url") - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${remote_url##*/}" "bd_head$j" "download_url: ${download_url}"$'\n'"${file_header}" fi if [[ -z $file_header ]] ; then - if [ $j == $maxfetchretries ] ; then + if [[ $j == $maxfetchretries ]] ; then rm -f "${bd_cookie_jar}"; printf "\\n" echo -e "${RED}| Failed to extract file info.${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "" "" fi return 1 @@ -171,7 +171,7 @@ bd_FetchFileInfo() { printf "\\n" echo -e "${RED}| Failed to extract file info.${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "" "" fi return 1 @@ -184,18 +184,18 @@ bd_FetchFileInfo() { printf "\\n" echo -e "${RED}| Failed to get download url (no location)${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "Failed to get download url (no location)" "" fi return 1 fi if ! grep -Eqi 'HTTP.* 200' <<< $file_header ; then - if [ $j == $maxfetchretries ] ; then + if [[ $j == $maxfetchretries ]] ; then rm -f "${bd_cookie_jar}"; printf "\\n" echo -e "${RED}| Failed to extract file info${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "Failed to extract file info" "" fi return 1 @@ -204,13 +204,13 @@ bd_FetchFileInfo() { fi fi filename=$(grep -oP '(?<=filename=").*?(?=".*$)' <<< "$file_header") - if [ "$filename_override" == "" ] && [ -z "$filename" ] ; then - if [ $j == $maxfetchretries ] ; then + if [[ "$filename_override" == "" ]] && [[ -z "$filename" ]] ; then + if [[ $j == $maxfetchretries ]] ; then rm -f "${bd_cookie_jar}"; printf "\\n" echo -e "${RED}| Failed to extract file name${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "Failed to extract file name" "" fi return 1 @@ -220,8 +220,8 @@ bd_FetchFileInfo() { fi break #Good to go here done - touch ${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/} - if [ ! "$filename_override" == "" ] ; then + touch "${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}" + if [[ ! "$filename_override" == "" ]] ; then filename="$filename_override" fi filename=$(sanitize_file_or_folder_name "${filename}") @@ -229,8 +229,8 @@ bd_FetchFileInfo() { echo -e "${YELLOW}| File name:${NC}\t\"${filename}\"" file_size_bytes=$(grep -oPi '(?<=content-length: ).*' <<< "$file_header") file_size_bytes=${file_size_bytes//[$'\t\r\n']} - if [ -z $file_size_bytes ] ; then - if [ "${finalAttempt}" == "true" ] ; then + if [[ -z $file_size_bytes ]] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "Filesize not found!" "" fi echo -e "${YELLOW}| Filesize not found… retry${NC}" @@ -261,14 +261,14 @@ bd_GetFile() { flockDownload="${WorkDir}/.flocks/${filename//[^a-zA-Z0-9\.\_\-]/}.flock" for ((j=1; j<=$MaxDownloadRetries; j++)); do pd_presize=0 - if [ -f "$file_path" ] ; then + if [[ -f "$file_path" ]] ; then pd_presize=$(stat --format="%s" "$file_path" | tr -d '[:space:]') fi GetRandomUA CLEANSTRING=${remote_url//[^a-zA-Z0-9]/} trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; rm -f ${bd_cookie_jar}; rm -f $flockDownload; echo ""; tput cnorm; exit" 0 1 2 3 6 15 - if [ "${UseTorCurlImpersonate}" == "true" ]; then - if [ "${RateMonitorEnabled}" == "true" ]; then + if [[ "${UseTorCurlImpersonate}" == "true" ]]; then + if [[ "${RateMonitorEnabled}" == "true" ]]; then tor_curl_request --insecure -L -G --no-alpn \ -b "${bd_cookie_jar}" -c "${bd_cookie_jar}" --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval \ --referer "$remote_url" "$download_url" \ @@ -280,7 +280,7 @@ bd_GetFile() { --continue-at - --output "$file_path" fi else - if [ "${RateMonitorEnabled}" == "true" ]; then + if [[ "${RateMonitorEnabled}" == "true" ]]; then tor_curl_request --insecure -L -G --no-alpn \ -b "${bd_cookie_jar}" -c "${bd_cookie_jar}" --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval \ -H "User-Agent: $RandomUA" \ @@ -315,7 +315,7 @@ bd_GetFile() { fi fi received_file_size=0 - if [ -f "$file_path" ] ; then + if [[ -f "$file_path" ]] ; then received_file_size=$(stat --format="%s" "$file_path" | tr -d '[:space:]') fi if CheckNoHtml "$remote_url" "$filename" "$file_path" ; then @@ -324,9 +324,9 @@ bd_GetFile() { containsHtml=true fi downDelta=$(( received_file_size - pd_presize )) - if [[ "${received_file_size}" -ne "${file_size_bytes}" ]] || [ "$containsHtml" == "true" ]; then - if [ "${AutoRepairBadPartials}" == "true" ] && (( downDelta > 0 && downDelta < 1024 )) ; then - if [ -f "${file_path}" ] ; then + if [[ "${received_file_size}" -ne "${file_size_bytes}" ]] || [[ "$containsHtml" == "true" ]]; then + if [[ "${AutoRepairBadPartials}" == "true" ]] && (( downDelta > 0 && downDelta < 1024 )) ; then + if [[ -f "${file_path}" ]] ; then if ((pd_presize > 0)); then echo -e "${YELLOW}Bad node / HTML found:${NC} reverting to previous file..." truncateDownload "$remote_url" "$filename" "$pd_presize" "$received_file_size" @@ -338,15 +338,15 @@ bd_GetFile() { fi if ((j >= $MaxDownloadRetries)) ; then rm -f "$flockDownload"; - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" fi return 1 else continue fi - elif [ "${AutoRepairBadPartials}" == "true" ] && [ "$containsHtml" == "true" ] ; then - if [ -f "${file_path}" ] ; then + elif [[ "${AutoRepairBadPartials}" == "true" ]] && [[ "$containsHtml" == "true" ]] ; then + if [[ -f "${file_path}" ]] ; then if ((pd_presize > 0)); then echo -e "${YELLOW}Bad node / HTML found:${NC} reverting to previous file..." truncateDownload "$remote_url" "$filename" "$pd_presize" "$received_file_size" @@ -358,21 +358,21 @@ bd_GetFile() { fi if ((j >= $MaxDownloadRetries)) ; then rm -f "$flockDownload"; - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" fi return 1 else continue fi - elif (( downDelta > 0 && downDelta < 1024 )) || [ "$containsHtml" == "true" ] ; then - if [ -f "$file_path" ] ; then + elif (( downDelta > 0 && downDelta < 1024 )) || [[ "$containsHtml" == "true" ]] ; then + if [[ -f "$file_path" ]] ; then rm -rf "$file_path" fi echo -e "\n${YELLOW}Bad node / HTML found:${NC} tainted partial removed..." if ((j >= $MaxDownloadRetries)) ; then rm -f "$flockDownload"; - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" fi return 1 @@ -384,7 +384,7 @@ bd_GetFile() { echo -e "\n${RED}Download failed, file is incomplete.${NC}" if ((j >= $MaxDownloadRetries)) ; then rm -f "$flockDownload"; - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" fi return 1 diff --git a/hosts/biteblob.sh b/hosts/biteblob.sh index fbfb170..335da60 100644 --- a/hosts/biteblob.sh +++ b/hosts/biteblob.sh @@ -43,24 +43,24 @@ bite_DownloadFile() { tor_identity="${RANDOM}" finalAttempt="false" for ((z=0; z<=$MaxUrlRetries; z++)); do - if [ $z -eq $MaxUrlRetries ] ; then + if [[ $z -eq $MaxUrlRetries ]] ; then finalAttempt="true" fi CLEANSTRING=${remote_url//[^a-zA-Z0-9]/} trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; echo ""; tput cnorm; exit" 0 1 2 3 6 15 if bite_FetchFileInfo $finalAttempt && bite_GetFile "${filecnt}" $((z+1)) $finalAttempt ; then return 0 - elif [ $z -lt $MaxUrlRetries ]; then - if [ "${fileAlreadyDone}" == "true" ] ; then + elif [[ $z -lt $MaxUrlRetries ]]; then + if [[ "${fileAlreadyDone}" == "true" ]] ; then break fi if [[ "${warnAndRetryUnknownError}" == "true" ]] ; then - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${remote_url##*/}" "error" "Retry due to an unknown issue: attempt #$((z+1)) of ${MaxUrlRetries}" fi fi if [[ "${exitDownloadError}" == "true" || "${exitDownloadNotAvailable}" == "true" ]] ; then - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${remote_url##*/}" "error" "Exit due to unrecoverable issue" fi rm -f "${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}" @@ -94,15 +94,15 @@ bite_FetchFileInfo() { CLEANSTRING=${remote_url//[^a-zA-Z0-9]/} trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; echo ""; tput cnorm; exit" 0 1 2 3 6 15 response=$(tor_curl_request --insecure -L -s "${fixed_url}") - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${remote_url##*/}" "bite_dwnpage$j" "url: $fixed_url"$'\n'"${response}" fi if [[ -z $response ]] ; then - if [ $j == $maxfetchretries ] ; then + if [[ $j == $maxfetchretries ]] ; then printf "\\n" echo -e "${RED}| Failed to extract download link${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "" "" fi return 1 @@ -126,11 +126,11 @@ bite_FetchFileInfo() { fi break fi - if [ $j == $maxfetchretries ] ; then + if [[ $j == $maxfetchretries ]] ; then printf "\\n" echo -e "${RED}| Failed to extract download info (unknown).${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "" "" fi return 1 @@ -142,15 +142,15 @@ bite_FetchFileInfo() { CLEANSTRING=${remote_url//[^a-zA-Z0-9]/} trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; echo ""; tput cnorm; exit" 0 1 2 3 6 15 file_header=$(tor_curl_request --insecure --head -L -s "$download_url") - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${remote_url##*/}" "bite_head$j" "download_url: ${download_url}"$'\n'"${file_header}" fi if [[ -z $file_header ]] ; then - if [ $j == $maxfetchretries ] ; then + if [[ $j == $maxfetchretries ]] ; then printf "\\n" echo -e "${RED}| Failed to extract file info.${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "" "" fi return 1 @@ -159,11 +159,11 @@ bite_FetchFileInfo() { fi fi if ! grep -Eqi 'HTTP/2 200|HTTP/1.1 200|200 OK' <<< $file_header ; then - if [ $j == $maxfetchretries ] ; then + if [[ $j == $maxfetchretries ]] ; then printf "\\n" echo -e "${RED}| Failed to extract file info${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "" "" fi return 1 @@ -171,7 +171,7 @@ bite_FetchFileInfo() { continue fi fi - if [ "$filename_override" == "" ] ; then + if [[ "$filename_override" == "" ]] ; then if grep -Eqi 'filename=' <<< "${file_header}" ; then filename=$(grep -oP 'filename=\K.*$' <<< "${file_header}") filename=${filename##filename} @@ -185,12 +185,12 @@ bite_FetchFileInfo() { printf "\\n" break #Good to go here done - touch ${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/} - if [ ! "$filename_override" == "" ] ; then + touch "${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}" + if [[ ! "$filename_override" == "" ]] ; then filename="$filename_override" fi filename=$(sanitize_file_or_folder_name "${filename}") - if [ -z $file_size_bytes ] ; then + if [[ -z $file_size_bytes ]] ; then file_size_readable="${RED}Unknown filesize…${NC}" else file_size_readable="$(numfmt --to=iec --from=auto --format "%.2f" <<< "$file_size_bytes")" @@ -216,19 +216,19 @@ bite_GetFile() { flockDownload="${WorkDir}/.flocks/${filename//[^a-zA-Z0-9\.\_\-]/}.flock" for ((j=1; j<=$MaxDownloadRetries; j++)); do pd_presize=0 - if [ -f "$file_path" ] ; then + if [[ -f "$file_path" ]] ; then pd_presize=$(stat --format="%s" "$file_path" | tr -d '[:space:]') fi - if [ -z $file_size_bytes ] ; then + if [[ -z $file_size_bytes ]] ; then echo -e "${BLUE}| No Resume Fetch${NC} (unknown filesize)" tor_identity="${RANDOM}" CLEANSTRING=${remote_url//[^a-zA-Z0-9]/} trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; rm -f $flockDownload; echo ""; tput cnorm; exit" 0 1 2 3 6 15 tor_curl_request --insecure --referer "$file_url" "$download_url" --output "$file_path" rc=$? - if [ $rc -ne 0 ] ; then + if ((rc != 0 )) ; then printf "${RED}Download Failed (bad exit status).${NC}" - if [ -f ${file_path} ]; then + if [[ -f ${file_path} ]]; then printf "${YELLOW} Partial removed...${NC}" printf "\n\n" rm -f "${file_path}" @@ -237,7 +237,7 @@ bite_GetFile() { fi if ((j >= $MaxDownloadRetries)) ; then rm -f "$flockDownload"; - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" fi return 1 @@ -250,12 +250,12 @@ bite_GetFile() { else containsHtml=true fi - if [ "$containsHtml" == "true" ]; then + if [[ "$containsHtml" == "true" ]]; then echo -e "${YELLOW}Download Failed (contains html)${NC} partial removed..." rm -f "${file_path}" if ((j >= $MaxDownloadRetries)) ; then rm -f "$flockDownload"; - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" fi return 1 @@ -267,13 +267,13 @@ bite_GetFile() { else CLEANSTRING=${remote_url//[^a-zA-Z0-9]/} trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; rm -f $flockDownload; echo ""; tput cnorm; exit" 0 1 2 3 6 15 - if [ "${RateMonitorEnabled}" == "true" ]; then + if [[ "${RateMonitorEnabled}" == "true" ]]; then tor_curl_request --insecure --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval "$download_url" --continue-at - --output "$file_path" else tor_curl_request --insecure --referer "$file_url" "$download_url" --continue-at - --output "$file_path" fi received_file_size=0 - if [ -f "$file_path" ] ; then + if [[ -f "$file_path" ]] ; then received_file_size=$(stat --format="%s" "$file_path" | tr -d '[:space:]') fi if CheckNoHtml "$remote_url" "$filename" "$file_path" ; then @@ -282,9 +282,9 @@ bite_GetFile() { containsHtml=true fi downDelta=$(( received_file_size - pd_presize )) - if [[ "${received_file_size}" -ne "${file_size_bytes}" ]] || [ "$containsHtml" == "true" ]; then - if [ "${AutoRepairBadPartials}" == "true" ] && (( downDelta > 0 && downDelta < 1024 )) ; then - if [ -f "${file_path}" ] ; then + if [[ "${received_file_size}" -ne "${file_size_bytes}" ]] || [[ "$containsHtml" == "true" ]]; then + if [[ "${AutoRepairBadPartials}" == "true" ]] && (( downDelta > 0 && downDelta < 1024 )) ; then + if [[ -f "${file_path}" ]] ; then if ((pd_presize > 0)); then echo -e "${YELLOW}Bad node / HTML found:${NC} reverting to previous file..." truncateDownload "$remote_url" "$filename" "$pd_presize" "$received_file_size" @@ -296,15 +296,15 @@ bite_GetFile() { fi if ((j >= $MaxDownloadRetries)) ; then rm -f "$flockDownload"; - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" fi return 1 else continue fi - elif [ "${AutoRepairBadPartials}" == "true" ] && [ "$containsHtml" == "true" ] ; then - if [ -f "${file_path}" ] ; then + elif [[ "${AutoRepairBadPartials}" == "true" ]] && [[ "$containsHtml" == "true" ]] ; then + if [[ -f "${file_path}" ]] ; then if ((pd_presize > 0)); then echo -e "${YELLOW}Bad node / HTML found:${NC} reverting to previous file..." truncateDownload "$remote_url" "$filename" "$pd_presize" "$received_file_size" @@ -316,21 +316,21 @@ bite_GetFile() { fi if ((j >= $MaxDownloadRetries)) ; then rm -f "$flockDownload"; - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" fi return 1 else continue fi - elif (( downDelta > 0 && downDelta < 1024 )) || [ "$containsHtml" == "true" ] ; then - if [ -f "$file_path" ] ; then + elif (( downDelta > 0 && downDelta < 1024 )) || [[ "$containsHtml" == "true" ]] ; then + if [[ -f "$file_path" ]] ; then rm -rf "$file_path" fi echo -e "\n${YELLOW}Bad node / HTML found:${NC} tainted partial removed..." if ((j >= $MaxDownloadRetries)) ; then rm -f "$flockDownload"; - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" fi return 1 @@ -342,7 +342,7 @@ bite_GetFile() { echo -e "\n${RED}Download failed, file is incomplete.${NC}" if ((j >= $MaxDownloadRetries)) ; then rm -f "$flockDownload"; - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" fi return 1 diff --git a/hosts/bowfile.sh b/hosts/bowfile.sh index 70add7d..1daba6e 100644 --- a/hosts/bowfile.sh +++ b/hosts/bowfile.sh @@ -43,24 +43,24 @@ bow_DownloadFile() { tor_identity="${RANDOM}" finalAttempt="false" for ((z=0; z<=$MaxUrlRetries; z++)); do - if [ $z -eq $MaxUrlRetries ] ; then + if [[ $z -eq $MaxUrlRetries ]] ; then finalAttempt="true" fi CLEANSTRING=${remote_url//[^a-zA-Z0-9]/} trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; echo ""; tput cnorm; exit" 0 1 2 3 6 15 if bow_FetchFileInfo $finalAttempt && bow_GetFile "${filecnt}" $((z+1)) $finalAttempt ; then return 0 - elif [ $z -lt $MaxUrlRetries ]; then - if [ "${fileAlreadyDone}" == "true" ] ; then + elif [[ $z -lt $MaxUrlRetries ]]; then + if [[ "${fileAlreadyDone}" == "true" ]] ; then break fi if [[ "${warnAndRetryUnknownError}" == "true" ]] ; then - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${remote_url##*/}" "error" "Retry due to an unknown issue: attempt #$((z+1)) of ${MaxUrlRetries}" fi fi if [[ "${exitDownloadError}" == "true" || "${exitDownloadNotAvailable}" == "true" ]] ; then - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${remote_url##*/}" "error" "Exit due to unrecoverable issue" fi rm -f "${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}" @@ -91,16 +91,16 @@ bow_FetchFileInfo() { response=$(tor_curl_request --insecure -L -s -b "${bow_cookie_jar}" -c "${bow_cookie_jar}" \ -w "\n\nHTTP_CODE: %{http_code}\nEFFECTIVE_URL: %{url_effective}\n" \ "$fixed_url") - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${remote_url##*/}" "bow_fetch$i" "${response}" fi if [[ -z $response ]] ; then rm -f "${bow_cookie_jar}"; - if [ $i == $maxfetchretries ] ; then + if [[ $i == $maxfetchretries ]] ; then printf "\\n" echo -e "${RED}| Failed to extract token link [1].${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "Failed to extract token link [1]" "" fi return 1 @@ -123,7 +123,7 @@ bow_FetchFileInfo() { break else rm -f "${bow_cookie_jar}"; - if [ $i == $maxfetchretries ] ; then + if [[ $i == $maxfetchretries ]] ; then printf "\\n" echo -e "${RED}| Ticket url not found [1].${NC}" exitDownloadError=true @@ -145,7 +145,7 @@ bow_FetchFileInfo() { -H "Host: bowfile.com" \ -w "\n\nHTTP_CODE: %{http_code}\nEFFECTIVE_URL: %{url_effective}\n" \ "$download_url") - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${remote_url##*/}" "bow_downurl" "download_url: ${download_url}"$'\n'"${response}" fi if [[ -z $response ]] ; then @@ -153,7 +153,7 @@ bow_FetchFileInfo() { printf "\\n" echo -e "${RED}| Failed to get download url (no location)${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "Failed to get download url" "" fi return 1 @@ -166,7 +166,7 @@ bow_FetchFileInfo() { printf "\\n" echo -e "${RED}| Failed to get download url (no location)${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "Failed to get download url (no location)" "" fi return 1 @@ -183,15 +183,15 @@ bow_FetchFileInfo() { -H "Host: $fshost" \ -w "\n\nHTTP_CODE: %{http_code}\nEFFECTIVE_URL: %{url_effective}\n" \ "$download_url") - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${remote_url##*/}" "bow_head$j" "download_url: ${download_url}"$'\n'"download_token: ${dltoken}"$'\n'"${file_header}" fi if [[ -z $file_header ]] ; then - if [ $j == $maxfetchretries ] ; then + if [[ $j == $maxfetchretries ]] ; then printf "\\n" echo -e "${RED}| Failed to extract file info${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "Failed to extract file info" "" fi return 1 @@ -201,11 +201,11 @@ bow_FetchFileInfo() { fi fi if ! grep -Eqi 'HTTP/2 200|HTTP/1.1 200|200 OK' <<< $file_header ; then - if [ $j == $maxfetchretries ] ; then + if [[ $j == $maxfetchretries ]] ; then printf "\\n" echo -e "${RED}| Failed to extract file info${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "Failed to extract file info (no 200 response)" "" fi return 1 @@ -214,14 +214,14 @@ bow_FetchFileInfo() { continue fi fi - if [ "$filename_override" == "" ] ; then + if [[ "$filename_override" == "" ]] ; then filename=$(grep -oPi '(?<=filename=").*?(?=")' <<< "$file_header") if [[ -z "$filename" ]]; then - if [ $j == $maxfetchretries ] ; then + if [[ $j == $maxfetchretries ]] ; then printf "\\n" echo -e "${RED}| Failed to extract file name${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "Failed to extract filename." "" fi return 1 @@ -233,11 +233,11 @@ bow_FetchFileInfo() { file_size_bytes=$(grep -oPi '(?<=content-length: ).*' <<< "$file_header") file_size_bytes=${file_size_bytes//[$'\t\r\n']} if [[ -z "$file_size_bytes" ]]; then - if [ $j == $maxfetchretries ] ; then + if [[ $j == $maxfetchretries ]] ; then printf "\\n" echo -e "${RED}| Failed to extract filesize.${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "Failed to extract filesize." "" fi return 1 @@ -248,15 +248,15 @@ bow_FetchFileInfo() { fi break #Good to go here done - touch ${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/} - if [ ! "$filename_override" == "" ] ; then + touch "${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}" + if [[ ! "$filename_override" == "" ]] ; then filename="$filename_override" fi filename=$(sanitize_file_or_folder_name "${filename}") printf "\\n" echo -e "${YELLOW}| File name:${NC}\t\"${filename}\"" - if [ -z $file_size_bytes ] ; then - if [ "${finalAttempt}" == "true" ] ; then + if [[ -z $file_size_bytes ]] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "Filesize not found!" "" fi echo -e "${YELLOW}| Filesize not found… retry${NC}" @@ -287,15 +287,15 @@ bow_GetFile() { flockDownload="${WorkDir}/.flocks/${filename//[^a-zA-Z0-9\.\_\-]/}.flock" for ((j=1; j<=$MaxDownloadRetries; j++)); do pd_presize=0 - if [ -f "$file_path" ] ; then + if [[ -f "$file_path" ]] ; then pd_presize=$(stat --format="%s" "$file_path" | tr -d '[:space:]') fi fshost=$(grep -oPi -m 1 '(?<=https://).*?(?=/token/download/dl)' <<< "$download_url") GetRandomUA CLEANSTRING=${remote_url//[^a-zA-Z0-9]/} trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; rm -f ${bow_cookie_jar}; rm -f $flockDownload; echo ""; tput cnorm; exit" 0 1 2 3 6 15 - if [ "${UseTorCurlImpersonate}" == "true" ]; then - if [ "${RateMonitorEnabled}" == "true" ]; then + if [[ "${UseTorCurlImpersonate}" == "true" ]]; then + if [[ "${RateMonitorEnabled}" == "true" ]]; then tor_curl_request --insecure -L \ --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval \ -H "Host: $fshost" \ @@ -308,7 +308,7 @@ bow_GetFile() { "$download_url" --continue-at - --output "$file_path" fi else - if [ "${RateMonitorEnabled}" == "true" ]; then + if [[ "${RateMonitorEnabled}" == "true" ]]; then tor_curl_request --insecure -L \ --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval \ -H "Host: $fshost" \ @@ -344,7 +344,7 @@ bow_GetFile() { fi fi received_file_size=0 - if [ -f "$file_path" ] ; then + if [[ -f "$file_path" ]] ; then received_file_size=$(stat --format="%s" "$file_path" | tr -d '[:space:]') fi if CheckNoHtml "$remote_url" "$filename" "$file_path" ; then @@ -353,9 +353,9 @@ bow_GetFile() { containsHtml=true fi downDelta=$(( received_file_size - pd_presize )) - if [[ "${received_file_size}" -ne "${file_size_bytes}" ]] || [ "$containsHtml" == "true" ]; then - if [ "${AutoRepairBadPartials}" == "true" ] && (( downDelta > 0 && downDelta < 1024 )) ; then - if [ -f "${file_path}" ] ; then + if [[ "${received_file_size}" -ne "${file_size_bytes}" ]] || [[ "$containsHtml" == "true" ]]; then + if [[ "${AutoRepairBadPartials}" == "true" ]] && (( downDelta > 0 && downDelta < 1024 )) ; then + if [[ -f "${file_path}" ]] ; then if ((pd_presize > 0)); then echo -e "${YELLOW}Bad node / HTML found:${NC} reverting to previous file..." truncateDownload "$remote_url" "$filename" "$pd_presize" "$received_file_size" @@ -367,15 +367,15 @@ bow_GetFile() { fi if ((j >= $MaxDownloadRetries)) ; then rm -f "$flockDownload"; - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" fi return 1 else continue fi - elif [ "${AutoRepairBadPartials}" == "true" ] && [ "$containsHtml" == "true" ] ; then - if [ -f "${file_path}" ] ; then + elif [[ "${AutoRepairBadPartials}" == "true" ]] && [[ "$containsHtml" == "true" ]] ; then + if [[ -f "${file_path}" ]] ; then if ((pd_presize > 0)); then echo -e "${YELLOW}Bad node / HTML found:${NC} reverting to previous file..." truncateDownload "$remote_url" "$filename" "$pd_presize" "$received_file_size" @@ -387,21 +387,21 @@ bow_GetFile() { fi if ((j >= $MaxDownloadRetries)) ; then rm -f "$flockDownload"; - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" fi return 1 else continue fi - elif (( downDelta > 0 && downDelta < 1024 )) || [ "$containsHtml" == "true" ] ; then - if [ -f "$file_path" ] ; then + elif (( downDelta > 0 && downDelta < 1024 )) || [[ "$containsHtml" == "true" ]] ; then + if [[ -f "$file_path" ]] ; then rm -rf "$file_path" fi echo -e "\n${YELLOW}Bad node / HTML found:${NC} tainted partial removed..." if ((j >= $MaxDownloadRetries)) ; then rm -f "$flockDownload"; - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" fi return 1 @@ -413,7 +413,7 @@ bow_GetFile() { echo -e "\n${RED}Download failed, file is incomplete.${NC}" if ((j >= $MaxDownloadRetries)) ; then rm -f "$flockDownload"; - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" fi return 1 diff --git a/hosts/click.sh b/hosts/click.sh index c0c011a..f37b717 100644 --- a/hosts/click.sh +++ b/hosts/click.sh @@ -54,24 +54,24 @@ click_DownloadFile() { tor_identity="${RANDOM}" finalAttempt="false" for ((z=0; z<=$MaxUrlRetries; z++)); do - if [ $z -eq $MaxUrlRetries ] ; then + if [[ $z -eq $MaxUrlRetries ]] ; then finalAttempt="true" fi CLEANSTRING=${remote_url//[^a-zA-Z0-9]/} trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; echo ""; tput cnorm; exit" 0 1 2 3 6 15 if click_FetchFileInfo $finalAttempt && click_GetFile "${filecnt}" $((z+1)) $finalAttempt ; then return 0 - elif [ $z -lt $MaxUrlRetries ]; then - if [ "${fileAlreadyDone}" == "true" ] ; then + elif [[ $z -lt $MaxUrlRetries ]]; then + if [[ "${fileAlreadyDone}" == "true" ]] ; then break fi if [[ "${warnAndRetryUnknownError}" == "true" ]] ; then - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${remote_url##*/}" "error" "Retry due to an unknown issue: attempt #$((z+1)) of ${MaxUrlRetries}" fi fi if [[ "${exitDownloadError}" == "true" || "${exitDownloadNotAvailable}" == "true" ]] ; then - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${remote_url##*/}" "error" "Exit due to unrecoverable issue" fi rm -f "${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}" @@ -143,16 +143,16 @@ click_FetchFileInfo() { response=$(tor_curl_request --insecure -L -s -b "${click_cookie_jar}" -c "${click_cookie_jar}" \ -w "\n\nHTTP_CODE: %{http_code}\nEFFECTIVE_URL: %{url_effective}\n" \ "$fixed_url") - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${remote_url##*/}" "click_fetch$i" "${response}" fi if [[ -z $response ]] ; then rm -f "${click_cookie_jar}"; - if [ $i == $maxfetchretries ] ; then + if [[ $i == $maxfetchretries ]] ; then printf "\\n" echo -e "${RED}| Failed to extract download link [1].${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "" "" fi return 1 @@ -180,18 +180,18 @@ click_FetchFileInfo() { post_id=$(grep -oP '(?<=input type="hidden" name="id" value=").*(?=">)' <<< "$response") post_fname=$(grep -oP '(?<=input type="hidden" name="fname" value=").*(?=">)' <<< "$response") post_referer=$(grep -oP '(?<=input type="hidden" name="referer" value=").*(?=">)' <<< "$response") - if [ "$filename_override" == "" ]; then + if [[ "$filename_override" == "" ]]; then filename="$post_fname" fi filename=$(sanitize_file_or_folder_name "${filename}") post_fname=$(urlencode_literal_grouped_case "${post_fname}") else rm -f "${click_cookie_jar}"; - if [ $i == $maxfetchretries ] ; then + if [[ $i == $maxfetchretries ]] ; then printf "\\n" echo -e "${RED}| Failed to extract download link [1].${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "" "" fi return 1 @@ -201,11 +201,11 @@ click_FetchFileInfo() { fi if [[ -z "$post_op" ]] || [[ -z "$post_id" ]] || [[ -z "$post_fname" ]]; then rm -f "${click_cookie_jar}"; - if [ $i == $maxfetchretries ] ; then + if [[ $i == $maxfetchretries ]] ; then printf "\\n" echo -e "${RED}| Failed to extract download link [1].${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "" "" fi return 1 @@ -226,16 +226,16 @@ click_FetchFileInfo() { response=$(tor_curl_request --insecure -L -s -X POST \ -b "${click_cookie_jar}" -c "${click_cookie_jar}" \ --data "$form_data" "$fixed_url") - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${remote_url##*/}" "click_post1_$i" "url: ${fixed_url}"$'\n'"form_data: ${form_data}"$'\n'"${response}" fi if [[ -z $response ]] ; then - if [ $i == $maxfetchretries ] ; then + if [[ $i == $maxfetchretries ]] ; then rm -f "${click_cookie_jar}"; printf "\\n" echo -e "${RED}| Failed to extract download link [2].${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "" "" fi return 1 @@ -253,12 +253,12 @@ click_FetchFileInfo() { return 1 fi if grep -Eqi 'you have to wait|seconds till next download' <<< "$response"; then - if [ $i == $maxfetchretries ] ; then + if [[ $i == $maxfetchretries ]] ; then rm -f "${click_cookie_jar}"; printf "\\n" echo -e "${RED}| Rate limited. [2].${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "" "" fi return 1 @@ -286,12 +286,12 @@ click_FetchFileInfo() { post_referer=$(grep -oP '(?<=input type="hidden" name="referer" value=").*(?=">)' <<< "$response") post_referer=$(urlencode_literal_grouped_case_urlendingonly "$post_referer") else - if [ $i == $maxfetchretries ] ; then + if [[ $i == $maxfetchretries ]] ; then rm -f "${click_cookie_jar}"; printf "\\n" echo -e "${RED}| Failed to extract download link [2].${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "" "" fi return 1 @@ -301,12 +301,12 @@ click_FetchFileInfo() { fi fi if grep -Eqi '-' <<< "$captcha_code"; then - if [ $i == $maxfetchretries ] ; then + if [[ $i == $maxfetchretries ]] ; then rm -f "${click_cookie_jar}"; printf "\\n" echo -e "${RED}| Bad captcha code [2].${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "" "" fi return 1 @@ -316,12 +316,12 @@ click_FetchFileInfo() { fi fi if [[ -z "$captcha_code" ]] || [[ -z "$post_op" ]] || [[ -z "$post_id" ]] || [[ -z "$post_rand" ]] ; then - if [ $i == $maxfetchretries ] ; then + if [[ $i == $maxfetchretries ]] ; then rm -f "${click_cookie_jar}"; printf "\\n" echo -e "${RED}| Failed to extract download link [2].${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "" "" fi return 1 @@ -345,16 +345,16 @@ click_FetchFileInfo() { response=$(tor_curl_request --insecure -L -s -X POST \ -b "${click_cookie_jar}" -c "${click_cookie_jar}" \ --data "$form_data" "$fixed_url") - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${remote_url##*/}" "click_post2_$i" "url: ${fixed_url}"$'\n'"form_data: ${form_data}"$'\n'"${response}" fi if [[ -z $response ]] ; then - if [ $i == $maxfetchretries ] ; then + if [[ $i == $maxfetchretries ]] ; then rm -f "${click_cookie_jar}"; printf "\\n" echo -e "${RED}| Failed to extract download link [3].${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "" "" fi return 1 @@ -372,12 +372,12 @@ click_FetchFileInfo() { return 1 fi if grep -Eqi 'you have to wait|seconds till next download' <<< "$response"; then - if [ $i == $maxfetchretries ] ; then + if [[ $i == $maxfetchretries ]] ; then rm -f "${click_cookie_jar}"; printf "\\n" echo -e "${RED}| Rate limited. [2].${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "" "" fi return 1 @@ -387,12 +387,12 @@ click_FetchFileInfo() { fi fi if grep -Eqi 'Just a moment...' <<< "$response"; then - if [ $i == $maxfetchretries ] ; then + if [[ $i == $maxfetchretries ]] ; then rm -f "${click_cookie_jar}"; printf "\\n" echo -e "${RED}| Failed to extract download link [3].${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "" "" fi return 1 @@ -408,12 +408,12 @@ click_FetchFileInfo() { download_url=$(urlencode_literal_grouped_case_urlendingonly "$download_url") fi if [[ -z "$download_url" ]] ; then - if [ $i == $maxfetchretries ] ; then + if [[ $i == $maxfetchretries ]] ; then rm -f "${click_cookie_jar}"; printf "\\n" echo -e "${RED}| Failed to extract download link [3].${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "" "" fi return 1 @@ -432,16 +432,16 @@ click_FetchFileInfo() { trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; rm -f ${click_cookie_jar}; echo ""; tput cnorm; exit" 0 1 2 3 6 15 GetRandomUA file_header=$(tor_curl_request --insecure -L --head -s --referer "${fixed_url}" "$download_url") - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${remote_url##*/}" "click_head$j" "download_url: ${download_url}"$'\n'"${file_header}" fi if [[ -z $file_header ]] ; then - if [ $j == $maxfetchretries ] ; then + if [[ $j == $maxfetchretries ]] ; then rm -f "${click_cookie_jar}"; printf "\\n" echo -e "${RED}| Failed to extract file info${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "" "" fi return 1 @@ -451,12 +451,12 @@ click_FetchFileInfo() { fi fi if ! grep -Eqi 'HTTP/2 200|HTTP/1.1 200|200 OK' <<< $file_header ; then - if [ $j == $maxfetchretries ] ; then + if [[ $j == $maxfetchretries ]] ; then rm -f "${click_cookie_jar}"; printf "\\n" echo -e "${RED}| Failed to extract file info${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "" "" fi return 1 @@ -468,12 +468,12 @@ click_FetchFileInfo() { file_size_bytes=$(grep -oPi '(?<=content-length: ).*' <<< "$file_header") file_size_bytes=${file_size_bytes//[$'\t\r\n']} if [[ -z "$file_size_bytes" ]]; then - if [ $j == $maxfetchretries ] ; then + if [[ $j == $maxfetchretries ]] ; then rm -f "${click_cookie_jar}"; printf "\\n" echo -e "${RED}| Failed to extract file size.${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "" "" fi return 1 @@ -484,15 +484,15 @@ click_FetchFileInfo() { fi break #Good to go here done - touch ${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/} - if [ ! "$filename_override" == "" ] ; then + touch "${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}" + if [[ ! "$filename_override" == "" ]] ; then filename="$filename_override" fi filename=$(sanitize_file_or_folder_name "${filename}") printf "\\n" echo -e "${YELLOW}| File name:${NC}\t\"${filename}\"" - if [ -z $file_size_bytes ] ; then - if [ "${finalAttempt}" == "true" ] ; then + if [[ -z $file_size_bytes ]] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "Filesize not found!" "" fi echo -e "${YELLOW}| Filesize not found… retry${NC}" @@ -523,15 +523,15 @@ click_GetFile() { flockDownload="${WorkDir}/.flocks/${filename//[^a-zA-Z0-9\.\_\-]/}.flock" for ((j=1; j<=$MaxDownloadRetries; j++)); do pd_presize=0 - if [ -f "$file_path" ] ; then + if [[ -f "$file_path" ]] ; then pd_presize=$(stat --format="%s" "$file_path" | tr -d '[:space:]') fi GetRandomUA click_host=$(grep -oPi '(?<=https://).*(?=/)' <<< "$fixed_url") CLEANSTRING=${remote_url//[^a-zA-Z0-9]/} trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; rm -f ${click_cookie_jar}; rm -f $flockDownload; echo ""; tput cnorm; exit" 0 1 2 3 6 15 - if [ "${UseTorCurlImpersonate}" == "true" ]; then - if [ "${RateMonitorEnabled}" == "true" ]; then + if [[ "${UseTorCurlImpersonate}" == "true" ]]; then + if [[ "${RateMonitorEnabled}" == "true" ]]; then tor_curl_request --insecure \ --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval \ -b "${click_cookie_jar}" -c "${click_cookie_jar}" \ @@ -546,7 +546,7 @@ click_GetFile() { "$download_url" --continue-at - --output "$file_path" fi else - if [ "${RateMonitorEnabled}" == "true" ]; then + if [[ "${RateMonitorEnabled}" == "true" ]]; then tor_curl_request --insecure \ --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval \ -b "${click_cookie_jar}" -c "${click_cookie_jar}" \ @@ -584,7 +584,7 @@ click_GetFile() { fi fi received_file_size=0 - if [ -f "$file_path" ] ; then + if [[ -f "$file_path" ]] ; then received_file_size=$(stat --format="%s" "$file_path" | tr -d '[:space:]') fi if CheckNoHtml "$remote_url" "$filename" "$file_path" ; then @@ -593,9 +593,9 @@ click_GetFile() { containsHtml=true fi downDelta=$(( received_file_size - pd_presize )) - if [[ "${received_file_size}" -ne "${file_size_bytes}" ]] || [ "$containsHtml" == "true" ]; then - if [ "${AutoRepairBadPartials}" == "true" ] && (( downDelta > 0 && downDelta < 1024 )) ; then - if [ -f "${file_path}" ] ; then + if [[ "${received_file_size}" -ne "${file_size_bytes}" ]] || [[ "$containsHtml" == "true" ]]; then + if [[ "${AutoRepairBadPartials}" == "true" ]] && (( downDelta > 0 && downDelta < 1024 )) ; then + if [[ -f "${file_path}" ]] ; then if ((pd_presize > 0)); then echo -e "${YELLOW}Bad node / HTML found:${NC} reverting to previous file..." truncateDownload "$remote_url" "$filename" "$pd_presize" "$received_file_size" @@ -607,15 +607,15 @@ click_GetFile() { fi if ((j >= $MaxDownloadRetries)) ; then rm -f "$flockDownload"; - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" fi return 1 else continue fi - elif [ "${AutoRepairBadPartials}" == "true" ] && [ "$containsHtml" == "true" ] ; then - if [ -f "${file_path}" ] ; then + elif [[ "${AutoRepairBadPartials}" == "true" ]] && [[ "$containsHtml" == "true" ]] ; then + if [[ -f "${file_path}" ]] ; then if ((pd_presize > 0)); then echo -e "${YELLOW}Bad node / HTML found:${NC} reverting to previous file..." truncateDownload "$remote_url" "$filename" "$pd_presize" "$received_file_size" @@ -627,21 +627,21 @@ click_GetFile() { fi if ((j >= $MaxDownloadRetries)) ; then rm -f "$flockDownload"; - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" fi return 1 else continue fi - elif (( downDelta > 0 && downDelta < 1024 )) || [ "$containsHtml" == "true" ] ; then - if [ -f "$file_path" ] ; then + elif (( downDelta > 0 && downDelta < 1024 )) || [[ "$containsHtml" == "true" ]] ; then + if [[ -f "$file_path" ]] ; then rm -rf "$file_path" fi echo -e "\n${YELLOW}Bad node / HTML found:${NC} tainted partial removed..." if ((j >= $MaxDownloadRetries)) ; then rm -f "$flockDownload"; - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" fi return 1 @@ -653,7 +653,7 @@ click_GetFile() { echo -e "\n${RED}Download failed, file is incomplete.${NC}" if ((j >= $MaxDownloadRetries)) ; then rm -f "$flockDownload"; - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" fi return 1 diff --git a/hosts/dailyuploads.sh b/hosts/dailyuploads.sh index bc59832..6ddac96 100644 --- a/hosts/dailyuploads.sh +++ b/hosts/dailyuploads.sh @@ -49,24 +49,24 @@ daily_DownloadFile() { tor_identity="${RANDOM}" finalAttempt="false" for ((z=0; z<=$MaxUrlRetries; z++)); do - if [ $z -eq $MaxUrlRetries ] ; then + if [[ $z -eq $MaxUrlRetries ]] ; then finalAttempt="true" fi CLEANSTRING=${remote_url//[^a-zA-Z0-9]/} trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; echo ""; tput cnorm; exit" 0 1 2 3 6 15 if daily_FetchFileInfo $finalAttempt && daily_GetFile "${filecnt}" $((z+1)) $finalAttempt ; then return 0 - elif [ $z -lt $MaxUrlRetries ]; then - if [ "${fileAlreadyDone}" == "true" ] ; then + elif [[ $z -lt $MaxUrlRetries ]]; then + if [[ "${fileAlreadyDone}" == "true" ]] ; then break fi if [[ "${warnAndRetryUnknownError}" == "true" ]] ; then - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${remote_url##*/}" "error" "Retry due to an unknown issue: attempt #$((z+1)) of ${MaxUrlRetries}" fi fi if [[ "${exitDownloadError}" == "true" || "${exitDownloadNotAvailable}" == "true" ]] ; then - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${remote_url##*/}" "error" "Exit due to unrecoverable issue" fi rm -f "${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}" @@ -97,16 +97,16 @@ daily_FetchFileInfo() { response=$(tor_curl_request --insecure -L -s -b "${daily_cookie_jar}" -c "${daily_cookie_jar}" \ -w "\n\nHTTP_CODE: %{http_code}\nEFFECTIVE_URL: %{url_effective}\n" \ "$fixed_url") - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${remote_url##*/}" "daily_fetch$i" "${response}" fi if [[ -z $response ]] ; then rm -f "${daily_cookie_jar}"; - if [ $i == $maxfetchretries ] ; then + if [[ $i == $maxfetchretries ]] ; then printf "\\n" echo -e "${RED}| Failed to extract download link [1].${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "" "" fi return 1 @@ -138,7 +138,7 @@ daily_FetchFileInfo() { tmp_captcha_img="$WorkDir/.temp/${remote_url//[^a-zA-Z0-9]/}.jpg" tor_curl_request --insecure -s "$captcha_img_url" --output "$tmp_captcha_img" captcha_ocr_output=$(CaptchaOcrImageTesseract "$tmp_captcha_img" "NUMBERONLY" "ContrastStretch_5x90,Brightness_130") - if [ "${DebugPluginsEnabled}" == "true" ]; then + if [[ "${DebugPluginsEnabled}" == "true" ]]; then printf "\\n" echo -e "$captcha_ocr_output" fi @@ -146,9 +146,9 @@ daily_FetchFileInfo() { rm -f "$tmp_captcha_img" rm -f "$captcha_ocr_output" local caplength=${#captcha_code} - if [ -z "$captcha_code" ] || ((caplength != 4)) ; then + if [[ -z "$captcha_code" ]] || ((caplength != 4)) ; then rm -f "${daily_cookie_jar}"; - if [ $i == $maxfetchretries ] ; then + if [[ $i == $maxfetchretries ]] ; then printf "\\n" echo -e "${RED}| Bad or unknown captcha$NC (${GREY}$captcha_code${NC}).${NC}" exitDownloadError=true @@ -171,11 +171,11 @@ daily_FetchFileInfo() { post_referer="" else rm -f "${daily_cookie_jar}"; - if [ $i == $maxfetchretries ] ; then + if [[ $i == $maxfetchretries ]] ; then printf "\\n" echo -e "${RED}| Failed to extract download link [2].${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "" "" fi return 1 @@ -186,11 +186,11 @@ daily_FetchFileInfo() { fi if [[ -z "$captcha_code" ]] || [[ -z "$post_op" ]] || [[ -z "$post_id" ]] || [[ -z "$post_rand" ]] ; then rm -f "${daily_cookie_jar}"; - if [ $i == $maxfetchretries ] ; then + if [[ $i == $maxfetchretries ]] ; then printf "\\n" echo -e "${RED}| Failed to extract download link [2].${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "" "" fi return 1 @@ -202,7 +202,7 @@ daily_FetchFileInfo() { break fi elif grep -Eqi 'class="g-recaptcha" data-sitekey="' <<< "$response" ; then - if grep -Eq "pjscloud.sh" <<< "$LoadPlugins" && [ "$PJSCloud_daily" == "true" ]; then + if grep -Eq "pjscloud.sh" <<< "$LoadPlugins" && [[ "$PJSCloud_daily" == "true" ]]; then daily_recaptcha_detected=true if grep -Eqi 'input type="hidden" name="id" value="' <<< "$response"; then printf "\\n" @@ -213,11 +213,11 @@ daily_FetchFileInfo() { post_referer="" else rm -f "${daily_cookie_jar}"; - if [ $i == $maxfetchretries ] ; then + if [[ $i == $maxfetchretries ]] ; then printf "\\n" echo -e "${RED}| Failed to extract download link [2].${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "" "" fi return 1 @@ -228,11 +228,11 @@ daily_FetchFileInfo() { fi if [[ -z "$post_op" ]] || [[ -z "$post_id" ]] || [[ -z "$post_rand" ]] ; then rm -f "${daily_cookie_jar}"; - if [ $i == $maxfetchretries ] ; then + if [[ $i == $maxfetchretries ]] ; then printf "\\n" echo -e "${RED}| Failed to extract download link [2].${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "" "" fi return 1 @@ -245,7 +245,7 @@ daily_FetchFileInfo() { fi else rm -f "${daily_cookie_jar}"; - if [ $i == $maxfetchretries ] ; then + if [[ $i == $maxfetchretries ]] ; then printf "\\n" echo -e "${RED}| Recaptcha detected (js required)${NC}" exitDownloadError=true @@ -257,7 +257,7 @@ daily_FetchFileInfo() { fi else rm -f "${daily_cookie_jar}"; - if [ $i == $maxfetchretries ] ; then + if [[ $i == $maxfetchretries ]] ; then printf "\\n" echo -e "${RED}| No download button found${NC}" exitDownloadError=true @@ -275,10 +275,10 @@ daily_FetchFileInfo() { download_url="" CLEANSTRING=${remote_url//[^a-zA-Z0-9]/} trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; rm -f ${daily_cookie_jar}; echo ""; tput cnorm; exit" 0 1 2 3 6 15 - if [ "$daily_recaptcha_detected" == "true" ]; then + if [[ "$daily_recaptcha_detected" == "true" ]]; then form_data="op=${post_op}&id=${post_id}&rand=${post_rand}&referer=${post_referer}&method_free=&method_premium=&adblock_detected=" response=$(pjscloud_tor_request "https://hexload.com/download" "$form_data") - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${remote_url##*/}" "daily_post2$i" "form_data: ${form_data}"$'\n'"${response}" fi else @@ -286,17 +286,17 @@ daily_FetchFileInfo() { response=$(tor_curl_request --insecure -L -s -X POST \ -b "${daily_cookie_jar}" -c "${daily_cookie_jar}" \ --data "$form_data" "$fixed_url") - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${remote_url##*/}" "daily_post2_$i" "url: ${fixed_url}"$'\n'"form_data: ${form_data}"$'\n'"${response}" fi fi if [[ -z $response ]] ; then - if [ $i == $maxfetchretries ] ; then + if [[ $i == $maxfetchretries ]] ; then rm -f "${daily_cookie_jar}"; printf "\\n" echo -e "${RED}| Failed to extract download link [3].${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "" "" fi return 1 @@ -314,12 +314,12 @@ daily_FetchFileInfo() { return 1 fi if grep -Eqi 'you have to wait|seconds till next download' <<< "$response"; then - if [ $i == $maxfetchretries ] ; then + if [[ $i == $maxfetchretries ]] ; then rm -f "${daily_cookie_jar}"; printf "\\n" echo -e "${RED}| Rate limited. [2].${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "" "" fi return 1 @@ -329,12 +329,12 @@ daily_FetchFileInfo() { fi fi if grep -Eqi 'Just a moment...' <<< "$response"; then - if [ $i == $maxfetchretries ] ; then + if [[ $i == $maxfetchretries ]] ; then rm -f "${daily_cookie_jar}"; printf "\\n" echo -e "${RED}| Failed to extract download link [3].${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "Failed to extract download link [3]" "" fi return 1 @@ -344,12 +344,12 @@ daily_FetchFileInfo() { fi fi if grep -Eqi '
Wrong captcha
' <<< "$response"; then - if [ $i == $maxfetchretries ] ; then + if [[ $i == $maxfetchretries ]] ; then rm -f "${daily_cookie_jar}"; printf "\\n" echo -e "${RED}| Failed to extract download link. (Wrong captcha)${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "Failed to extract download link. (Wrong captcha)" "" fi return 1 @@ -365,12 +365,12 @@ daily_FetchFileInfo() { download_url=$(urlencode_literal_grouped_case_urlendingonly "$download_url") fi if [[ -z "$download_url" ]] ; then - if [ $i == $maxfetchretries ] ; then + if [[ $i == $maxfetchretries ]] ; then rm -f "${daily_cookie_jar}"; printf "\\n" echo -e "${RED}| Failed to extract download link [3].${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "" "" fi return 1 @@ -393,16 +393,16 @@ daily_FetchFileInfo() { -b "${daily_cookie_jar}" -c "${daily_cookie_jar}" \ --referer "${fixed_url}" \ "$download_url") - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${remote_url##*/}" "daily_head$j" "download_url: ${download_url}"$'\n'"${file_header}" fi if [[ -z $file_header ]] ; then - if [ $j == $maxfetchretries ] ; then + if [[ $j == $maxfetchretries ]] ; then rm -f "${daily_cookie_jar}"; printf "\\n" echo -e "${RED}| Failed to extract file info${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "Failed to extract file info" "" fi return 1 @@ -412,12 +412,12 @@ daily_FetchFileInfo() { fi fi if ! grep -Eqi 'HTTP/2 200|HTTP/1.1 200|200 OK' <<< $file_header ; then - if [ $j == $maxfetchretries ] ; then + if [[ $j == $maxfetchretries ]] ; then rm -f "${daily_cookie_jar}"; printf "\\n" echo -e "${RED}| Failed to extract file info${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "" "" fi return 1 @@ -429,12 +429,12 @@ daily_FetchFileInfo() { file_size_bytes=$(grep -oPi '(?<=content-length: ).*' <<< "$file_header") file_size_bytes=${file_size_bytes//[$'\t\r\n']} if [[ -z "$file_size_bytes" ]]; then - if [ $j == $maxfetchretries ] ; then + if [[ $j == $maxfetchretries ]] ; then rm -f "${daily_cookie_jar}"; printf "\\n" echo -e "${RED}| Failed to extract file size.${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "" "" fi return 1 @@ -445,8 +445,8 @@ daily_FetchFileInfo() { fi break #Good to go here done - touch ${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/} - if [ ! "$filename_override" == "" ] ; then + touch "${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}" + if [[ ! "$filename_override" == "" ]] ; then filename="$filename_override" else filename="${download_url##*\/}" @@ -454,8 +454,8 @@ daily_FetchFileInfo() { filename=$(sanitize_file_or_folder_name "${filename}") printf "\\n" echo -e "${YELLOW}| File name:${NC}\t\"${filename}\"" - if [ -z $file_size_bytes ] ; then - if [ "${finalAttempt}" == "true" ] ; then + if [[ -z $file_size_bytes ]] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "Filesize not found!" "" fi echo -e "${YELLOW}| Filesize not found… retry${NC}" @@ -486,15 +486,15 @@ daily_GetFile() { flockDownload="${WorkDir}/.flocks/${filename//[^a-zA-Z0-9\.\_\-]/}.flock" for ((j=1; j<=$MaxDownloadRetries; j++)); do pd_presize=0 - if [ -f "$file_path" ] ; then + if [[ -f "$file_path" ]] ; then pd_presize=$(stat --format="%s" "$file_path" | tr -d '[:space:]') fi GetRandomUA daily_host=$(grep -oPi '(?<=https://).*(?=/)' <<< "$fixed_url") CLEANSTRING=${remote_url//[^a-zA-Z0-9]/} trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; rm -f ${daily_cookie_jar}; rm -f $flockDownload; echo ""; tput cnorm; exit" 0 1 2 3 6 15 - if [ "${UseTorCurlImpersonate}" == "true" ]; then - if [ "${RateMonitorEnabled}" == "true" ]; then + if [[ "${UseTorCurlImpersonate}" == "true" ]]; then + if [[ "${RateMonitorEnabled}" == "true" ]]; then tor_curl_request --insecure \ --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval \ -b "${daily_cookie_jar}" -c "${daily_cookie_jar}" \ @@ -507,7 +507,7 @@ daily_GetFile() { "$download_url" --continue-at - --output "$file_path" fi else - if [ "${RateMonitorEnabled}" == "true" ]; then + if [[ "${RateMonitorEnabled}" == "true" ]]; then tor_curl_request --insecure \ --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval \ -H "User-Agent: $RandomUA" \ @@ -543,7 +543,7 @@ daily_GetFile() { fi fi received_file_size=0 - if [ -f "$file_path" ] ; then + if [[ -f "$file_path" ]] ; then received_file_size=$(stat --format="%s" "$file_path" | tr -d '[:space:]') fi if CheckNoHtml "$remote_url" "$filename" "$file_path" ; then @@ -552,9 +552,9 @@ daily_GetFile() { containsHtml=true fi downDelta=$(( received_file_size - pd_presize )) - if [[ "${received_file_size}" -ne "${file_size_bytes}" ]] || [ "$containsHtml" == "true" ]; then - if [ "${AutoRepairBadPartials}" == "true" ] && (( downDelta > 0 && downDelta < 1024 )) ; then - if [ -f "${file_path}" ] ; then + if [[ "${received_file_size}" -ne "${file_size_bytes}" ]] || [[ "$containsHtml" == "true" ]]; then + if [[ "${AutoRepairBadPartials}" == "true" ]] && (( downDelta > 0 && downDelta < 1024 )) ; then + if [[ -f "${file_path}" ]] ; then if ((pd_presize > 0)); then echo -e "${YELLOW}Bad node / HTML found:${NC} reverting to previous file..." truncateDownload "$remote_url" "$filename" "$pd_presize" "$received_file_size" @@ -566,15 +566,15 @@ daily_GetFile() { fi if ((j >= $MaxDownloadRetries)) ; then rm -f "$flockDownload"; - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" fi return 1 else continue fi - elif [ "${AutoRepairBadPartials}" == "true" ] && [ "$containsHtml" == "true" ] ; then - if [ -f "${file_path}" ] ; then + elif [[ "${AutoRepairBadPartials}" == "true" ]] && [[ "$containsHtml" == "true" ]] ; then + if [[ -f "${file_path}" ]] ; then if ((pd_presize > 0)); then echo -e "${YELLOW}Bad node / HTML found:${NC} reverting to previous file..." truncateDownload "$remote_url" "$filename" "$pd_presize" "$received_file_size" @@ -586,21 +586,21 @@ daily_GetFile() { fi if ((j >= $MaxDownloadRetries)) ; then rm -f "$flockDownload"; - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" fi return 1 else continue fi - elif (( downDelta > 0 && downDelta < 1024 )) || [ "$containsHtml" == "true" ] ; then - if [ -f "$file_path" ] ; then + elif (( downDelta > 0 && downDelta < 1024 )) || [[ "$containsHtml" == "true" ]] ; then + if [[ -f "$file_path" ]] ; then rm -rf "$file_path" fi echo -e "\n${YELLOW}Bad node / HTML found:${NC} tainted partial removed..." if ((j >= $MaxDownloadRetries)) ; then rm -f "$flockDownload"; - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" fi return 1 @@ -612,7 +612,7 @@ daily_GetFile() { echo -e "\n${RED}Download failed, file is incomplete.${NC}" if ((j >= $MaxDownloadRetries)) ; then rm -f "$flockDownload"; - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" fi return 1 diff --git a/hosts/dashfile.sh b/hosts/dashfile.sh old mode 100755 new mode 100644 index f37093e..ecd4a49 --- a/hosts/dashfile.sh +++ b/hosts/dashfile.sh @@ -43,24 +43,24 @@ dash_DownloadFile() { tor_identity="${RANDOM}" finalAttempt="false" for ((z=0; z<=$MaxUrlRetries; z++)); do - if [ $z -eq $MaxUrlRetries ] ; then + if [[ $z -eq $MaxUrlRetries ]] ; then finalAttempt="true" fi CLEANSTRING=${remote_url//[^a-zA-Z0-9]/} trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; echo ""; tput cnorm; exit" 0 1 2 3 6 15 if dash_FetchFileInfo $finalAttempt && dash_GetFile "${filecnt}" $((z+1)) $finalAttempt ; then return 0 - elif [ $z -lt $MaxUrlRetries ]; then - if [ "${fileAlreadyDone}" == "true" ] ; then + elif [[ $z -lt $MaxUrlRetries ]]; then + if [[ "${fileAlreadyDone}" == "true" ]] ; then break fi if [[ "${warnAndRetryUnknownError}" == "true" ]] ; then - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${remote_url##*/}" "error" "Retry due to an unknown issue: attempt #$((z+1)) of ${MaxUrlRetries}" fi fi if [[ "${exitDownloadError}" == "true" || "${exitDownloadNotAvailable}" == "true" ]] ; then - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${remote_url##*/}" "error" "Exit due to unrecoverable issue" fi rm -f "${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}" @@ -90,16 +90,16 @@ dash_FetchFileInfo() { response=$(tor_curl_request --insecure -L -s -b "${dash_cookie_jar}" -c "${dash_cookie_jar}" \ -w "\n\nHTTP_CODE: %{http_code}\nEFFECTIVE_URL: %{url_effective}\n" \ "$remote_url") - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${remote_url##*/}" "dash_fetch$i" "${response}" fi if [[ -z $response ]] ; then rm -f "${dash_cookie_jar}"; - if [ $i == $maxfetchretries ] ; then + if [[ $i == $maxfetchretries ]] ; then printf "\\n" echo -e "${RED}| Failed to extract download link [1]${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "Failed to extract download link [1]" "" fi return 1 @@ -131,18 +131,18 @@ dash_FetchFileInfo() { post_id=$(grep -oP '(?<=input type="hidden" name="id" value=").*(?=">)' <<< "$response") post_fname=$(grep -oP '(?<=input type="hidden" name="fname" value=").*(?=">)' <<< "$response") post_referer=$(grep -oP '(?<=input type="hidden" name="referer" value=").*(?=">)' <<< "$response") - if [ "$filename_override" == "" ]; then + if [[ "$filename_override" == "" ]]; then filename="$post_fname" fi filename=$(sanitize_file_or_folder_name "${filename}") post_fname=$(urlencode_literal_grouped_case "${post_fname}") else rm -f "${dash_cookie_jar}"; - if [ $i == $maxfetchretries ] ; then + if [[ $i == $maxfetchretries ]] ; then printf "\\n" echo -e "${RED}| Failed to extract download link [2].${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "Failed to extract download link [2]" "" fi return 1 @@ -152,11 +152,11 @@ dash_FetchFileInfo() { fi if [[ -z "$post_op" ]] || [[ -z "$post_id" ]] || [[ -z "$post_fname" ]]; then rm -f "${dash_cookie_jar}"; - if [ $i == $maxfetchretries ] ; then + if [[ $i == $maxfetchretries ]] ; then printf "\\n" echo -e "${RED}| Failed to extract download link [3]${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "Failed to extract download link [3]" "" fi return 1 @@ -177,16 +177,16 @@ dash_FetchFileInfo() { response=$(tor_curl_request --insecure -L -s -X POST \ -b "${dash_cookie_jar}" -c "${dash_cookie_jar}" \ --data "$form_data" "$remote_url") - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${remote_url##*/}" "dash_post1_$i" "url: ${remote_url}"$'\n'"form_data: ${form_data}"$'\n'"${response}" fi if [[ -z $response ]] ; then - if [ $i == $maxfetchretries ] ; then + if [[ $i == $maxfetchretries ]] ; then rm -f "${dash_cookie_jar}"; printf "\\n" echo -e "${RED}| Failed to extract download link [4]${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "Failed to extract download link [4]" "" fi return 1 @@ -212,12 +212,12 @@ dash_FetchFileInfo() { return 1 fi if grep -Eqi 'you have to wait|seconds till next download' <<< "$response"; then - if [ $i == $maxfetchretries ] ; then + if [[ $i == $maxfetchretries ]] ; then rm -f "${dash_cookie_jar}"; printf "\\n" echo -e "${RED}| Rate limited. [2]${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "Rate limited. [2]" "" fi return 1 @@ -236,12 +236,12 @@ dash_FetchFileInfo() { pval4=$(grep -oP -m 1 '&#\K.*?(?=;)' <<< "$codeline" ) val1=$((pval1-48)); val2=$((pval2-48)); val3=$((pval3-48)); val4=$((pval4-48)) captcha_code="${val1}${val2}${val3}${val4}" - if [ -z "$captcha_code" ] || grep -Eqi '-' <<< "$captcha_code"; then + if [[ -z "$captcha_code" ]] || grep -Eqi '-' <<< "$captcha_code"; then rm -f "${dash_cookie_jar}"; printf "\\n" echo -e "${RED}| Bad captcha code [2]${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "Bad captcha code [2]" "" fi return 1 @@ -257,11 +257,11 @@ dash_FetchFileInfo() { post_referer=$(grep -oP '(?<=input type="hidden" name="referer" value=").*(?=">)' <<< "$response") post_referer=$(urlencode_literal_grouped_case_urlendingonly "$post_referer") else - if [ $i == $maxfetchretries ] ; then + if [[ $i == $maxfetchretries ]] ; then rm -f "${dash_cookie_jar}"; echo -e "${RED}| Failed to extract download link [5]${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "Failed to extract download link [5]" "" fi return 1 @@ -271,11 +271,11 @@ dash_FetchFileInfo() { fi fi if [[ -z "$captcha_code" ]] || [[ -z "$post_op" ]] || [[ -z "$post_id" ]] || [[ -z "$post_rand" ]] ; then - if [ $i == $maxfetchretries ] ; then + if [[ $i == $maxfetchretries ]] ; then rm -f "${dash_cookie_jar}"; echo -e "${RED}| Failed to extract download link [6]${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "Failed to extract download link [6]" "" fi return 1 @@ -308,16 +308,16 @@ dash_FetchFileInfo() { response=$(tor_curl_request --insecure -L -s -X POST \ -b "${dash_cookie_jar}" -c "${dash_cookie_jar}" \ --data "$form_data" "$remote_url") - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${remote_url##*/}" "dash_post2_$i" "url: ${remote_url}"$'\n'"form_data: ${form_data}"$'\n'"${response}" fi if [[ -z $response ]] ; then - if [ $i == $maxfetchretries ] ; then + if [[ $i == $maxfetchretries ]] ; then rm -f "${dash_cookie_jar}"; printf "\\n" echo -e "${RED}| Failed to extract download link [7]${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "Failed to extract download link [7]" "" fi return 1 @@ -335,12 +335,12 @@ dash_FetchFileInfo() { return 1 fi if grep -Eqi 'you have to wait|seconds till next download' <<< "$response"; then - if [ $i == $maxfetchretries ] ; then + if [[ $i == $maxfetchretries ]] ; then rm -f "${dash_cookie_jar}"; printf "\\n" echo -e "${RED}| Rate limited [2]${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "Rate limited [2]" "" fi return 1 @@ -350,12 +350,12 @@ dash_FetchFileInfo() { fi fi if grep -Eqi 'Just a moment...' <<< "$response"; then - if [ $i == $maxfetchretries ] ; then + if [[ $i == $maxfetchretries ]] ; then rm -f "${dash_cookie_jar}"; printf "\\n" echo -e "${RED}| Failed to extract download link [8]${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "Failed to extract download link [8]" "" fi return 1 @@ -371,12 +371,12 @@ dash_FetchFileInfo() { download_url=$(urlencode_literal_grouped_case_urlendingonly "$download_url") fi if [[ -z "$download_url" ]] ; then - if [ $i == $maxfetchretries ] ; then + if [[ $i == $maxfetchretries ]] ; then rm -f "${dash_cookie_jar}"; printf "\\n" echo -e "${RED}| Failed to extract download link [9]${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "Failed to extract download link [9]" "" fi return 1 @@ -395,16 +395,16 @@ dash_FetchFileInfo() { trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; rm -f ${dash_cookie_jar}; echo ""; tput cnorm; exit" 0 1 2 3 6 15 GetRandomUA file_header=$(tor_curl_request --insecure -L --head -s --referer "${fixed_url}" "$download_url") - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${remote_url##*/}" "dash_head$j" "download_url: ${download_url}"$'\n'"${file_header}" fi if [[ -z $file_header ]] ; then - if [ $j == $maxfetchretries ] ; then + if [[ $j == $maxfetchretries ]] ; then rm -f "${dash_cookie_jar}"; printf "\\n" echo -e "${RED}| Failed to extract file info${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "" "" fi return 1 @@ -414,12 +414,12 @@ dash_FetchFileInfo() { fi fi if ! grep -Eqi 'HTTP/2 200|HTTP/1.1 200|200 OK' <<< $file_header ; then - if [ $j == $maxfetchretries ] ; then + if [[ $j == $maxfetchretries ]] ; then rm -f "${dash_cookie_jar}"; printf "\\n" echo -e "${RED}| Failed to extract file info${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "" "" fi return 1 @@ -431,12 +431,12 @@ dash_FetchFileInfo() { file_size_bytes=$(grep -oPi '(?<=content-length: ).*' <<< "$file_header") file_size_bytes=${file_size_bytes//[$'\t\r\n']} if [[ -z "$file_size_bytes" ]]; then - if [ $j == $maxfetchretries ] ; then + if [[ $j == $maxfetchretries ]] ; then rm -f "${dash_cookie_jar}"; printf "\\n" echo -e "${RED}| Failed to extract file size.${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "" "" fi return 1 @@ -447,15 +447,15 @@ dash_FetchFileInfo() { fi break #Good to go here done - touch ${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/} - if [ ! "$filename_override" == "" ] ; then + touch "${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}" + if [[ ! "$filename_override" == "" ]] ; then filename="$filename_override" fi filename=$(sanitize_file_or_folder_name "${filename}") printf "\\n" echo -e "${YELLOW}| File name:${NC}\t\"${filename}\"" - if [ -z $file_size_bytes ] ; then - if [ "${finalAttempt}" == "true" ] ; then + if [[ -z $file_size_bytes ]] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "Filesize not found!" "" fi echo -e "${YELLOW}| Filesize not found… retry${NC}" @@ -486,14 +486,14 @@ dash_GetFile() { flockDownload="${WorkDir}/.flocks/${filename//[^a-zA-Z0-9\.\_\-]/}.flock" for ((j=1; j<=$MaxDownloadRetries; j++)); do pd_presize=0 - if [ -f "$file_path" ] ; then + if [[ -f "$file_path" ]] ; then pd_presize=$(stat --format="%s" "$file_path" | tr -d '[:space:]') fi GetRandomUA CLEANSTRING=${remote_url//[^a-zA-Z0-9]/} trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; rm -f ${dash_cookie_jar}; rm -f $flockDownload; echo ""; tput cnorm; exit" 0 1 2 3 6 15 - if [ "${UseTorCurlImpersonate}" == "true" ]; then - if [ "${RateMonitorEnabled}" == "true" ]; then + if [[ "${UseTorCurlImpersonate}" == "true" ]]; then + if [[ "${RateMonitorEnabled}" == "true" ]]; then tor_curl_request --insecure \ --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval \ -b "${dash_cookie_jar}" -c "${dash_cookie_jar}" \ @@ -504,7 +504,7 @@ dash_GetFile() { "$download_url" --continue-at - --output "$file_path" fi else - if [ "${RateMonitorEnabled}" == "true" ]; then + if [[ "${RateMonitorEnabled}" == "true" ]]; then tor_curl_request --insecure \ --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval \ -b "${dash_cookie_jar}" -c "${dash_cookie_jar}" \ @@ -538,7 +538,7 @@ dash_GetFile() { fi fi received_file_size=0 - if [ -f "$file_path" ] ; then + if [[ -f "$file_path" ]] ; then received_file_size=$(stat --format="%s" "$file_path" | tr -d '[:space:]') fi if CheckNoHtml "$remote_url" "$filename" "$file_path" ; then @@ -547,9 +547,9 @@ dash_GetFile() { containsHtml=true fi downDelta=$(( received_file_size - pd_presize )) - if [[ "${received_file_size}" -ne "${file_size_bytes}" ]] || [ "$containsHtml" == "true" ]; then - if [ "${AutoRepairBadPartials}" == "true" ] && (( downDelta > 0 && downDelta < 1024 )) ; then - if [ -f "${file_path}" ] ; then + if [[ "${received_file_size}" -ne "${file_size_bytes}" ]] || [[ "$containsHtml" == "true" ]]; then + if [[ "${AutoRepairBadPartials}" == "true" ]] && (( downDelta > 0 && downDelta < 1024 )) ; then + if [[ -f "${file_path}" ]] ; then if ((pd_presize > 0)); then echo -e "${YELLOW}Bad node / HTML found:${NC} reverting to previous file..." truncateDownload "$remote_url" "$filename" "$pd_presize" "$received_file_size" @@ -561,15 +561,15 @@ dash_GetFile() { fi if ((j >= $MaxDownloadRetries)) ; then rm -f "$flockDownload"; - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" fi return 1 else continue fi - elif [ "${AutoRepairBadPartials}" == "true" ] && [ "$containsHtml" == "true" ] ; then - if [ -f "${file_path}" ] ; then + elif [[ "${AutoRepairBadPartials}" == "true" ]] && [[ "$containsHtml" == "true" ]] ; then + if [[ -f "${file_path}" ]] ; then if ((pd_presize > 0)); then echo -e "${YELLOW}Bad node / HTML found:${NC} reverting to previous file..." truncateDownload "$remote_url" "$filename" "$pd_presize" "$received_file_size" @@ -581,21 +581,21 @@ dash_GetFile() { fi if ((j >= $MaxDownloadRetries)) ; then rm -f "$flockDownload"; - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" fi return 1 else continue fi - elif (( downDelta > 0 && downDelta < 1024 )) || [ "$containsHtml" == "true" ] ; then - if [ -f "$file_path" ] ; then + elif (( downDelta > 0 && downDelta < 1024 )) || [[ "$containsHtml" == "true" ]] ; then + if [[ -f "$file_path" ]] ; then rm -rf "$file_path" fi echo -e "\n${YELLOW}Bad node / HTML found:${NC} tainted partial removed..." if ((j >= $MaxDownloadRetries)) ; then rm -f "$flockDownload"; - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" fi return 1 @@ -607,7 +607,7 @@ dash_GetFile() { echo -e "\n${RED}Download failed, file is incomplete.${NC}" if ((j >= $MaxDownloadRetries)) ; then rm -f "$flockDownload"; - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" fi return 1 diff --git a/hosts/dataupload.sh b/hosts/dataupload.sh index 6a3d70b..27e8544 100644 --- a/hosts/dataupload.sh +++ b/hosts/dataupload.sh @@ -43,24 +43,24 @@ dup_DownloadFile() { tor_identity="${RANDOM}" finalAttempt="false" for ((z=0; z<=$MaxUrlRetries; z++)); do - if [ $z -eq $MaxUrlRetries ] ; then + if [[ $z -eq $MaxUrlRetries ]] ; then finalAttempt="true" fi CLEANSTRING=${remote_url//[^a-zA-Z0-9]/} trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; echo ""; tput cnorm; exit" 0 1 2 3 6 15 if dup_FetchFileInfo $finalAttempt && dup_GetFile "${filecnt}" $((z+1)) $finalAttempt ; then return 0 - elif [ $z -lt $MaxUrlRetries ]; then - if [ "${fileAlreadyDone}" == "true" ] ; then + elif [[ $z -lt $MaxUrlRetries ]]; then + if [[ "${fileAlreadyDone}" == "true" ]] ; then break fi if [[ "${warnAndRetryUnknownError}" == "true" ]] ; then - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${remote_url##*/}" "error" "Retry due to an unknown issue: attempt #$((z+1)) of ${MaxUrlRetries}" fi fi if [[ "${exitDownloadError}" == "true" || "${exitDownloadNotAvailable}" == "true" ]] ; then - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${remote_url##*/}" "error" "Exit due to unrecoverable issue" fi rm -f "${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}" @@ -88,16 +88,16 @@ dup_FetchFileInfo() { CLEANSTRING=${remote_url//[^a-zA-Z0-9]/} trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; rm -f "${dup_cookie_jar}"; echo ""; tput cnorm; exit" 0 1 2 3 6 15 response=$(tor_curl_request --insecure -L -s -b "${dup_cookie_jar}" -c "${dup_cookie_jar}" "$remote_url") - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${remote_url##*/}" "dup_dwnpage$i" "${response}" fi if [[ -z $response ]] ; then rm -f "${dup_cookie_jar}"; - if [ $i == $maxfetchretries ] ; then + if [[ $i == $maxfetchretries ]] ; then printf "\\n" echo -e "${RED}| Failed to extract download link.${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "" "" fi return 1 @@ -107,11 +107,11 @@ dup_FetchFileInfo() { fi if grep -Eqi "Sorry, you are banned" <<< "$response"; then rm -f "${dup_cookie_jar}"; - if [ $i == $maxfetchretries ] ; then + if [[ $i == $maxfetchretries ]] ; then printf "\\n" echo -e "${RED}| Failed to extract download link.${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "" "" fi return 1 @@ -139,11 +139,11 @@ dup_FetchFileInfo() { fi if [[ -z "$post_action" ]] || [[ -z "$post_op" ]] || [[ -z "$post_id" ]] ; then rm -f "${dup_cookie_jar}"; - if [ $i == $maxfetchretries ] ; then + if [[ $i == $maxfetchretries ]] ; then printf "\\n" echo -e "${RED}| Failed to extract download link [2].${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "Failed to extract download link [2]" "" fi return 1 @@ -166,16 +166,16 @@ dup_FetchFileInfo() { response=$(tor_curl_request --insecure -svo. -X POST \ -b "${dup_cookie_jar}" -c "${dup_cookie_jar}" \ --data-raw "$form_data" "$post_action" 2>&1) - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${remote_url##*/}" "dup_post" "post_action: ${post_action}"$'\n'"form_data: ${form_data}"$'\n'"${response}" fi if [[ -z $response ]] ; then - if [ $i == $maxfetchretries ] ; then + if [[ $i == $maxfetchretries ]] ; then rm -f "${dup_cookie_jar}"; printf "\\n" echo -e "${RED}| Failed to extract download link [3].${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "" "" fi return 1 @@ -193,12 +193,12 @@ dup_FetchFileInfo() { return 1 fi if grep -Eqi 'Just a moment...' <<< "$response"; then - if [ $i == $maxfetchretries ] ; then + if [[ $i == $maxfetchretries ]] ; then rm -f "${dup_cookie_jar}"; printf "\\n" echo -e "${RED}| Failed to extract download link [3].${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "" "" fi return 1 @@ -211,7 +211,7 @@ dup_FetchFileInfo() { printf "\\n" echo -e "${RED}| Failed to extract download link [4]${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then rm -f "${dup_cookie_jar}"; failedRetryDownload "${remote_url}" "Failed to extract download link [4]" "" fi @@ -232,16 +232,16 @@ dup_FetchFileInfo() { trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; rm -f ${dup_cookie_jar}; echo ""; tput cnorm; exit" 0 1 2 3 6 15 GetRandomUA file_header=$(tor_curl_request --insecure -L --head -s "$download_url") - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${remote_url##*/}" "dup_head$j" "download_url: ${download_url}"$'\n'"${file_header}" fi if [[ -z $file_header ]] ; then - if [ $j == $maxfetchretries ] ; then + if [[ $j == $maxfetchretries ]] ; then rm -f "${dup_cookie_jar}"; printf "\\n" echo -e "${RED}| Failed to extract file info [1]${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "Failed to extract file info [1]" "" fi return 1 @@ -251,12 +251,12 @@ dup_FetchFileInfo() { fi fi if ! grep -Eqi 'HTTP.*200' <<< $file_header ; then - if [ $j == $maxfetchretries ] ; then + if [[ $j == $maxfetchretries ]] ; then rm -f "${dup_cookie_jar}"; printf "\\n" echo -e "${RED}| Failed to extract file info [2]${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "Failed to extract file info [2]" "" fi return 1 @@ -265,15 +265,15 @@ dup_FetchFileInfo() { continue fi fi - if [ "$filename_override" == "" ] ; then + if [[ "$filename_override" == "" ]] ; then filename="${download_url##*/}" filename=${filename//%0d/} if [[ -z "$filename" ]]; then - if [ $j == $maxfetchretries ] ; then + if [[ $j == $maxfetchretries ]] ; then printf "\\n" echo -e "${RED}| Failed to extract file name${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "Failed to extract file name" "" fi return 1 @@ -285,12 +285,12 @@ dup_FetchFileInfo() { file_size_bytes=$(grep -oPi '(?<=content-length: ).*?(?=$)' <<< "$file_header") file_size_bytes=${file_size_bytes//[$'\t\r\n']} if [[ -z "$file_size_bytes" ]]; then - if [ $j == $maxfetchretries ] ; then + if [[ $j == $maxfetchretries ]] ; then rm -f "${dup_cookie_jar}"; printf "\\n" echo -e "${RED}| Failed to extract file size.${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "Failed to extract file size." "" fi return 1 @@ -301,15 +301,15 @@ dup_FetchFileInfo() { fi break #Good to go here done - touch ${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/} - if [ ! "$filename_override" == "" ] ; then + touch "${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}" + if [[ ! "$filename_override" == "" ]] ; then filename="$filename_override" fi filename=$(sanitize_file_or_folder_name "${filename}") printf "\\n" echo -e "${YELLOW}| File name:${NC}\t\"${filename}\"" - if [ -z $file_size_bytes ] ; then - if [ "${finalAttempt}" == "true" ] ; then + if [[ -z $file_size_bytes ]] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "Filesize not found!" "" fi echo -e "${YELLOW}| Filesize not found… retry${NC}" @@ -340,14 +340,14 @@ dup_GetFile() { flockDownload="${WorkDir}/.flocks/${filename//[^a-zA-Z0-9\.\_\-]/}.flock" for ((j=1; j<=$MaxDownloadRetries; j++)); do pd_presize=0 - if [ -f "$file_path" ] ; then + if [[ -f "$file_path" ]] ; then pd_presize=$(stat --format="%s" "$file_path" | tr -d '[:space:]') fi GetRandomUA CLEANSTRING=${remote_url//[^a-zA-Z0-9]/} trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; rm -f "${dup_cookie_jar}"; rm -f $flockDownload; echo ""; tput cnorm; exit" 0 1 2 3 6 15 - if [ "${UseTorCurlImpersonate}" == "true" ]; then - if [ "${RateMonitorEnabled}" == "true" ]; then + if [[ "${UseTorCurlImpersonate}" == "true" ]]; then + if [[ "${RateMonitorEnabled}" == "true" ]]; then tor_curl_request --insecure \ --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval \ -b "${dup_cookie_jar}" -c "${dup_cookie_jar}" \ @@ -360,7 +360,7 @@ dup_GetFile() { "$download_url" --continue-at - --output "$file_path" fi else - if [ "${RateMonitorEnabled}" == "true" ]; then + if [[ "${RateMonitorEnabled}" == "true" ]]; then tor_curl_request --insecure \ --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval \ -b "${dup_cookie_jar}" -c "${dup_cookie_jar}" \ @@ -396,7 +396,7 @@ dup_GetFile() { fi fi received_file_size=0 - if [ -f "$file_path" ] ; then + if [[ -f "$file_path" ]] ; then received_file_size=$(stat --format="%s" "$file_path" | tr -d '[:space:]') fi if CheckNoHtml "$remote_url" "$filename" "$file_path" ; then @@ -405,9 +405,9 @@ dup_GetFile() { containsHtml=true fi downDelta=$(( received_file_size - pd_presize )) - if [[ "${received_file_size}" -ne "${file_size_bytes}" ]] || [ "$containsHtml" == "true" ]; then - if [ "${AutoRepairBadPartials}" == "true" ] && (( downDelta > 0 && downDelta < 1024 )) ; then - if [ -f "${file_path}" ] ; then + if [[ "${received_file_size}" -ne "${file_size_bytes}" ]] || [[ "$containsHtml" == "true" ]]; then + if [[ "${AutoRepairBadPartials}" == "true" ]] && (( downDelta > 0 && downDelta < 1024 )) ; then + if [[ -f "${file_path}" ]] ; then if ((pd_presize > 0)); then echo -e "${YELLOW}Bad node / HTML found:${NC} reverting to previous file..." truncateDownload "$remote_url" "$filename" "$pd_presize" "$received_file_size" @@ -419,15 +419,15 @@ dup_GetFile() { fi if ((j >= $MaxDownloadRetries)) ; then rm -f "$flockDownload"; - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" fi return 1 else continue fi - elif [ "${AutoRepairBadPartials}" == "true" ] && [ "$containsHtml" == "true" ] ; then - if [ -f "${file_path}" ] ; then + elif [[ "${AutoRepairBadPartials}" == "true" ]] && [[ "$containsHtml" == "true" ]] ; then + if [[ -f "${file_path}" ]] ; then if ((pd_presize > 0)); then echo -e "${YELLOW}Bad node / HTML found:${NC} reverting to previous file..." truncateDownload "$remote_url" "$filename" "$pd_presize" "$received_file_size" @@ -439,21 +439,21 @@ dup_GetFile() { fi if ((j >= $MaxDownloadRetries)) ; then rm -f "$flockDownload"; - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" fi return 1 else continue fi - elif (( downDelta > 0 && downDelta < 1024 )) || [ "$containsHtml" == "true" ] ; then - if [ -f "$file_path" ] ; then + elif (( downDelta > 0 && downDelta < 1024 )) || [[ "$containsHtml" == "true" ]] ; then + if [[ -f "$file_path" ]] ; then rm -rf "$file_path" fi echo -e "\n${YELLOW}Bad node / HTML found:${NC} tainted partial removed..." if ((j >= $MaxDownloadRetries)) ; then rm -f "$flockDownload"; - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" fi return 1 @@ -465,7 +465,7 @@ dup_GetFile() { echo -e "\n${RED}Download failed, file is incomplete.${NC}" if ((j >= $MaxDownloadRetries)) ; then rm -f "$flockDownload"; - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" fi return 1 diff --git a/hosts/desiupload.sh b/hosts/desiupload.sh index 4545f68..21c36fb 100644 --- a/hosts/desiupload.sh +++ b/hosts/desiupload.sh @@ -43,24 +43,24 @@ desi_DownloadFile() { tor_identity="${RANDOM}" finalAttempt="false" for ((z=0; z<=$MaxUrlRetries; z++)); do - if [ $z -eq $MaxUrlRetries ] ; then + if [[ $z -eq $MaxUrlRetries ]] ; then finalAttempt="true" fi CLEANSTRING=${remote_url//[^a-zA-Z0-9]/} trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; echo ""; tput cnorm; exit" 0 1 2 3 6 15 if desi_FetchFileInfo $finalAttempt && desi_GetFile "${filecnt}" $((z+1)) $finalAttempt ; then return 0 - elif [ $z -lt $MaxUrlRetries ]; then - if [ "${fileAlreadyDone}" == "true" ] ; then + elif [[ $z -lt $MaxUrlRetries ]]; then + if [[ "${fileAlreadyDone}" == "true" ]] ; then break fi if [[ "${warnAndRetryUnknownError}" == "true" ]] ; then - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${remote_url##*/}" "error" "Retry due to an unknown issue: attempt #$((z+1)) of ${MaxUrlRetries}" fi fi if [[ "${exitDownloadError}" == "true" || "${exitDownloadNotAvailable}" == "true" ]] ; then - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${remote_url##*/}" "error" "Exit due to unrecoverable issue" fi rm -f "${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}" @@ -88,16 +88,16 @@ desi_FetchFileInfo() { CLEANSTRING=${remote_url//[^a-zA-Z0-9]/} trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; echo ""; tput cnorm; exit" 0 1 2 3 6 15 response=$(tor_curl_request --insecure -L -s -b "${desi_cookie_jar}" -c "${desi_cookie_jar}" "$remote_url") - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${remote_url##*/}" "desi_fetch$i" "${response}" fi if [[ -z $response ]] ; then rm -f "${desi_cookie_jar}"; - if [ $i == $maxfetchretries ] ; then + if [[ $i == $maxfetchretries ]] ; then printf "\\n" echo -e "${RED}| Failed to extract download link [1]${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "Failed to extract download link [1]" "" fi return 1 @@ -128,12 +128,12 @@ desi_FetchFileInfo() { val1=$((pval1-48)); val2=$((pval2-48)); val3=$((pval3-48)); val4=$((pval4-48)) captcha_code="${val1}${val2}${val3}${val4}" if grep -Eqi '-' <<< "$captcha_code"; then - if [ $i == $maxfetchretries ] ; then + if [[ $i == $maxfetchretries ]] ; then rm -f "${desi_cookie_jar}"; printf "\\n" echo -e "${RED}| Bad captcha code [2]${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "Bad captcha code [2]" "" fi return 1 @@ -152,11 +152,11 @@ desi_FetchFileInfo() { post_referer=$(grep -oP '(?<=input type="hidden" name="referer" value=").*(?=">.*$)' <<< "$response") else rm -f "${desi_cookie_jar}"; - if [ $i == $maxfetchretries ] ; then + if [[ $i == $maxfetchretries ]] ; then printf "\\n" echo -e "${RED}| Failed to extract download link [2]${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "Failed to extract download link [2]" "" fi return 1 @@ -166,11 +166,11 @@ desi_FetchFileInfo() { fi if [[ -z "$post_op" ]] || [[ -z "$post_id" ]] || [[ -z "$post_rand" ]] || [[ -z "$captcha_code" ]] ; then rm -f "${desi_cookie_jar}"; - if [ $i == $maxfetchretries ] ; then + if [[ $i == $maxfetchretries ]] ; then printf "\\n" echo -e "${RED}| Failed to extract download link [3]${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "Failed to extract download link [3]" "" fi return 1 @@ -202,16 +202,16 @@ desi_FetchFileInfo() { response=$(tor_curl_request --insecure -L -s -X POST \ -b "${desi_cookie_jar}" -c "${desi_cookie_jar}" \ --data "$form_data" "$remote_url") - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${remote_url##*/}" "desi_post_$i" "url: ${remote_url}"$'\n'"form_data: ${form_data}"$'\n'"${response}" fi if [[ -z $response ]] ; then - if [ $i == $maxfetchretries ] ; then + if [[ $i == $maxfetchretries ]] ; then rm -f "${desi_cookie_jar}"; printf "\\n" echo -e "${RED}| Failed to extract download link [7]${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "Failed to extract download link [7]" "" fi return 1 @@ -225,7 +225,7 @@ desi_FetchFileInfo() { printf "\\n" echo -e "${RED}| Wrong IP address [1]${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "Wrong IP address [1]" "" fi return 1 @@ -239,12 +239,12 @@ desi_FetchFileInfo() { return 1 fi if grep -Eqi 'you have to wait|seconds till next download' <<< "$response"; then - if [ $i == $maxfetchretries ] ; then + if [[ $i == $maxfetchretries ]] ; then rm -f "${desi_cookie_jar}"; printf "\\n" echo -e "${RED}| Rate limited [2]${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "Rate limited [2]" "" fi return 1 @@ -254,12 +254,12 @@ desi_FetchFileInfo() { fi fi if grep -Eqi 'Just a moment...' <<< "$response"; then - if [ $i == $maxfetchretries ] ; then + if [[ $i == $maxfetchretries ]] ; then rm -f "${desi_cookie_jar}"; printf "\\n" echo -e "${RED}| Failed to extract download link [8]${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "Failed to extract download link [8]" "" fi return 1 @@ -279,12 +279,12 @@ desi_FetchFileInfo() { download_url=$(urlencode_literal_grouped_case_urlendingonly "$download_url") fi if [[ -z "$download_url" ]] ; then - if [ $i == $maxfetchretries ] ; then + if [[ $i == $maxfetchretries ]] ; then rm -f "${desi_cookie_jar}"; printf "\\n" echo -e "${RED}| Failed to extract download link [9]${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "Failed to extract download link [9]" "" fi return 1 @@ -304,16 +304,16 @@ desi_FetchFileInfo() { trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; rm -f ${desi_cookie_jar}; echo ""; tput cnorm; exit" 0 1 2 3 6 15 GetRandomUA file_header=$(tor_curl_request --insecure -L --head -s --referer "${fixed_url}" "$download_url") - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${remote_url##*/}" "desi_head$j" "download_url: ${download_url}"$'\n'"${file_header}" fi if [[ -z $file_header ]] ; then - if [ $j == $maxfetchretries ] ; then + if [[ $j == $maxfetchretries ]] ; then rm -f "${desi_cookie_jar}"; printf "\\n" echo -e "${RED}| Failed to extract file info${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "" "" fi return 1 @@ -323,12 +323,12 @@ desi_FetchFileInfo() { fi fi if ! grep -Eqi 'HTTP/2 200|HTTP/1.1 200|200 OK' <<< $file_header ; then - if [ $j == $maxfetchretries ] ; then + if [[ $j == $maxfetchretries ]] ; then rm -f "${desi_cookie_jar}"; printf "\\n" echo -e "${RED}| Failed to extract file info${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "" "" fi return 1 @@ -340,12 +340,12 @@ desi_FetchFileInfo() { file_size_bytes=$(grep -oPi '(?<=content-length: ).*' <<< "$file_header") file_size_bytes=${file_size_bytes//[$'\t\r\n']} if [[ -z "$file_size_bytes" ]]; then - if [ $j == $maxfetchretries ] ; then + if [[ $j == $maxfetchretries ]] ; then rm -f "${desi_cookie_jar}"; printf "\\n" echo -e "${RED}| Failed to extract file size.${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "" "" fi return 1 @@ -356,15 +356,15 @@ desi_FetchFileInfo() { fi break #Good to go here done - touch ${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/} - if [ ! "$filename_override" == "" ] ; then + touch "${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}" + if [[ ! "$filename_override" == "" ]] ; then filename="$filename_override" fi filename=$(sanitize_file_or_folder_name "${filename}") printf "\\n" echo -e "${YELLOW}| File name:${NC}\t\"${filename}\"" - if [ -z $file_size_bytes ] ; then - if [ "${finalAttempt}" == "true" ] ; then + if [[ -z $file_size_bytes ]] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "Filesize not found!" "" fi echo -e "${YELLOW}| Filesize not found… retry${NC}" @@ -395,14 +395,14 @@ desi_GetFile() { flockDownload="${WorkDir}/.flocks/${filename//[^a-zA-Z0-9\.\_\-]/}.flock" for ((j=1; j<=$MaxDownloadRetries; j++)); do pd_presize=0 - if [ -f "$file_path" ] ; then + if [[ -f "$file_path" ]] ; then pd_presize=$(stat --format="%s" "$file_path" | tr -d '[:space:]') fi GetRandomUA CLEANSTRING=${remote_url//[^a-zA-Z0-9]/} trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; rm -f ${desi_cookie_jar}; rm -f $flockDownload; echo ""; tput cnorm; exit" 0 1 2 3 6 15 - if [ "${UseTorCurlImpersonate}" == "true" ]; then - if [ "${RateMonitorEnabled}" == "true" ]; then + if [[ "${UseTorCurlImpersonate}" == "true" ]]; then + if [[ "${RateMonitorEnabled}" == "true" ]]; then tor_curl_request --insecure \ --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval \ -b "${desi_cookie_jar}" -c "${desi_cookie_jar}" \ @@ -413,7 +413,7 @@ desi_GetFile() { "$download_url" --continue-at - --output "$file_path" fi else - if [ "${RateMonitorEnabled}" == "true" ]; then + if [[ "${RateMonitorEnabled}" == "true" ]]; then tor_curl_request --insecure \ --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval \ -b "${desi_cookie_jar}" -c "${desi_cookie_jar}" \ @@ -447,7 +447,7 @@ desi_GetFile() { fi fi received_file_size=0 - if [ -f "$file_path" ] ; then + if [[ -f "$file_path" ]] ; then received_file_size=$(stat --format="%s" "$file_path" | tr -d '[:space:]') fi if CheckNoHtml "$remote_url" "$filename" "$file_path" ; then @@ -456,9 +456,9 @@ desi_GetFile() { containsHtml=true fi downDelta=$(( received_file_size - pd_presize )) - if [[ "${received_file_size}" -ne "${file_size_bytes}" ]] || [ "$containsHtml" == "true" ]; then - if [ "${AutoRepairBadPartials}" == "true" ] && (( downDelta > 0 && downDelta < 1024 )) ; then - if [ -f "${file_path}" ] ; then + if [[ "${received_file_size}" -ne "${file_size_bytes}" ]] || [[ "$containsHtml" == "true" ]]; then + if [[ "${AutoRepairBadPartials}" == "true" ]] && (( downDelta > 0 && downDelta < 1024 )) ; then + if [[ -f "${file_path}" ]] ; then if ((pd_presize > 0)); then echo -e "${YELLOW}Bad node / HTML found:${NC} reverting to previous file..." truncateDownload "$remote_url" "$filename" "$pd_presize" "$received_file_size" @@ -470,15 +470,15 @@ desi_GetFile() { fi if ((j >= $MaxDownloadRetries)) ; then rm -f "$flockDownload"; - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" fi return 1 else continue fi - elif [ "${AutoRepairBadPartials}" == "true" ] && [ "$containsHtml" == "true" ] ; then - if [ -f "${file_path}" ] ; then + elif [[ "${AutoRepairBadPartials}" == "true" ]] && [[ "$containsHtml" == "true" ]] ; then + if [[ -f "${file_path}" ]] ; then if ((pd_presize > 0)); then echo -e "${YELLOW}Bad node / HTML found:${NC} reverting to previous file..." truncateDownload "$remote_url" "$filename" "$pd_presize" "$received_file_size" @@ -490,21 +490,21 @@ desi_GetFile() { fi if ((j >= $MaxDownloadRetries)) ; then rm -f "$flockDownload"; - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" fi return 1 else continue fi - elif (( downDelta > 0 && downDelta < 1024 )) || [ "$containsHtml" == "true" ] ; then - if [ -f "$file_path" ] ; then + elif (( downDelta > 0 && downDelta < 1024 )) || [[ "$containsHtml" == "true" ]] ; then + if [[ -f "$file_path" ]] ; then rm -rf "$file_path" fi echo -e "\n${YELLOW}Bad node / HTML found:${NC} tainted partial removed..." if ((j >= $MaxDownloadRetries)) ; then rm -f "$flockDownload"; - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" fi return 1 @@ -516,7 +516,7 @@ desi_GetFile() { echo -e "\n${RED}Download failed, file is incomplete.${NC}" if ((j >= $MaxDownloadRetries)) ; then rm -f "$flockDownload"; - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" fi return 1 diff --git a/hosts/dosya.sh b/hosts/dosya.sh index 4ba7e36..66d4103 100644 --- a/hosts/dosya.sh +++ b/hosts/dosya.sh @@ -44,24 +44,24 @@ dosya_DownloadFile() { tor_identity="${RANDOM}" finalAttempt="false" for ((z=0; z<=$MaxUrlRetries; z++)); do - if [ $z -eq $MaxUrlRetries ] ; then + if [[ $z -eq $MaxUrlRetries ]] ; then finalAttempt="true" fi CLEANSTRING=${remote_url//[^a-zA-Z0-9]/} trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; echo ""; tput cnorm; exit" 0 1 2 3 6 15 if dosya_FetchFileInfo $finalAttempt && dosya_GetFile "${filecnt}" $((z+1)) $finalAttempt ; then return 0 - elif [ $z -lt $MaxUrlRetries ]; then - if [ "${fileAlreadyDone}" == "true" ] ; then + elif [[ $z -lt $MaxUrlRetries ]]; then + if [[ "${fileAlreadyDone}" == "true" ]] ; then break fi if [[ "${warnAndRetryUnknownError}" == "true" ]] ; then - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${remote_url##*/}" "error" "Retry due to an unknown issue: attempt #$((z+1)) of ${MaxUrlRetries}" fi fi if [[ "${exitDownloadError}" == "true" || "${exitDownloadNotAvailable}" == "true" ]] ; then - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${remote_url##*/}" "error" "Exit due to unrecoverable issue" fi rm -f "${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}" @@ -105,7 +105,7 @@ dosya_FetchFileInfo() { tor_identity="${RANDOM}" GetRandomUA trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; rm -f ${dosya_cookie_jar}; echo ""; tput cnorm; exit" 0 1 2 3 6 15 - if [ "${UseTorCurlImpersonate}" == "true" ]; then + if [[ "${UseTorCurlImpersonate}" == "true" ]]; then PAGE=$(tor_curl_request --insecure --max-time "$ConnectTimeout" -L -s \ -c "${dosya_cookie_jar}" \ "${remote_url}") @@ -119,7 +119,7 @@ dosya_FetchFileInfo() { -c "${dosya_cookie_jar}" \ "${remote_url}") fi - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${remote_url##*/}" "dos_fetch_$i" "remote_url: ${remote_url}"$'\n'"User-Agent: $RandomUA"$'\n'"${PAGE}" fi if grep -Eqi '|Hata indir|Dosya silindi|Oops - Sayfa Bulunamadı!' <<< "${PAGE}" ; then @@ -169,7 +169,7 @@ dosya_FetchFileInfo() { dos_url=${dos_url/dosyaupload.com/www.dosyaupload.com} fi trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; rm -f ${dosya_cookie_jar}; echo ""; tput cnorm; exit" 0 1 2 3 6 15 - if [ "${UseTorCurlImpersonate}" == "true" ]; then + if [[ "${UseTorCurlImpersonate}" == "true" ]]; then file_header=$(tor_curl_request_extended --insecure --head -L -s \ -H "Cookie: filehosting=$cookie_filehosting" \ -H "Host: www.dosyaupload.com" \ @@ -188,10 +188,10 @@ dosya_FetchFileInfo() { -H 'Sec-Fetch-Dest: document' -H 'Sec-Fetch-Mode: navigate' -H 'Sec-Fetch-Site: none' -H 'Sec-Fetch-User: ?1' \ -b "${dosya_cookie_jar}" -c "${dosya_cookie_jar}" "$dos_url") fi - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${remote_url##*/}" "dos_head_""$i"_"$j" "dos_url: ${dos_url}"$'\n'"User-Agent: $RandomUA"$'\n\n'"${file_header}" fi - if [ ! -z "$file_header" ] ; then + if [[ ! -z "$file_header" ]] ; then bIsLocation=false bIsContentLength=false if grep -Eqi 'download_token=' <<< $dos_url ; then @@ -204,7 +204,7 @@ dosya_FetchFileInfo() { printf "\\n" fi echo -e "${RED}| The cdn url returned no response or disconnected client. Try again later.${NC}" - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "The cdn url returned no response or disconnected client. Try again later." "" fi return 1 @@ -258,7 +258,7 @@ dosya_FetchFileInfo() { printf "\\n" fi echo -e "${RED}| Failed to find pt url${NC}" - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "" "" fi return 1 @@ -278,7 +278,7 @@ dosya_FetchFileInfo() { printf "\\n" fi echo -e "${RED}| Failed to extract file size${NC}" - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "" "" fi return 1 @@ -293,7 +293,7 @@ dosya_FetchFileInfo() { continue fi else - if [ ! -z $dosya_token_url ]; then + if [[ ! -z "$dosya_token_url" ]]; then rm -f ${dosya_cookie_jar}; if ((j > 1)) ; then printf "\\n" @@ -307,7 +307,7 @@ dosya_FetchFileInfo() { printf "\\n" fi echo -e "${RED}|Dosya url not found${NC}" - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "" "" fi return 1 @@ -329,7 +329,7 @@ dosya_FetchFileInfo() { printf "\\n" fi echo -e "${RED}| No response. Try again later." - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "" "" fi return 1 @@ -344,20 +344,20 @@ dosya_FetchFileInfo() { fi fi done #head - if [ ! -z $dosya_token_url ]; then + if [[ ! -z "$dosya_token_url" ]]; then rm -f "${dosya_cookie_jar}"; break fi done #fetch - touch ${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/} - if [ ! "$filename_override" == "" ] ; then + touch "${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}" + if [[ ! "$filename_override" == "" ]] ; then filename="$filename_override" else filename=$dosya_fname fi filename=$(sanitize_file_or_folder_name "${filename}") - if [ -z $file_size_bytes ] ; then - if [ "${finalAttempt}" == "true" ] ; then + if [[ -z $file_size_bytes ]] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "Filesize not found!" "" fi echo -e "${YELLOW}| Filesize not found… retry${NC}" @@ -387,18 +387,18 @@ dosya_GetFile() { retryCnt=$2 finalAttempt=$3 flockDownload="${WorkDir}/.flocks/${filename//[^a-zA-Z0-9\.\_\-]/}.flock" - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${remote_url##*/}" "dos_down_UA" "User-Agent: ${RandomUA}" fi for ((j=1; j<=$MaxDownloadRetries; j++)); do pd_presize=0 - if [ -f "$file_path" ] ; then + if [[ -f "$file_path" ]] ; then pd_presize=$(stat --format="%s" "$file_path" | tr -d '[:space:]') fi GetRandomUA CLEANSTRING=${remote_url//[^a-zA-Z0-9]/} trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; rm -f $flockDownload; echo ""; tput cnorm; exit" 0 1 2 3 6 15 - if [ "${RateMonitorEnabled}" == "true" ]; then + if [[ "${RateMonitorEnabled}" == "true" ]]; then tor_curl_request -L -G --insecure \ -H "Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/jxl,image/webp,*/*;q=0.8" \ -H "Accept-Language: en-US,en;q=0.5" \ @@ -430,7 +430,7 @@ dosya_GetFile() { "$download_url" --continue-at - --output "$file_path" fi received_file_size=0 - if [ -f "$file_path" ] ; then + if [[ -f "$file_path" ]] ; then received_file_size=$(stat --format="%s" "$file_path" | tr -d '[:space:]') fi if CheckNoHtml "$remote_url" "$filename" "$file_path" ; then @@ -439,9 +439,9 @@ dosya_GetFile() { containsHtml=true fi downDelta=$(( received_file_size - pd_presize )) - if [[ "${received_file_size}" -ne "${file_size_bytes}" ]] || [ "$containsHtml" == "true" ]; then - if [ "${AutoRepairBadPartials}" == "true" ] && (( downDelta > 0 && downDelta < 1024 )) ; then - if [ -f "${file_path}" ] ; then + if [[ "${received_file_size}" -ne "${file_size_bytes}" ]] || [[ "$containsHtml" == "true" ]]; then + if [[ "${AutoRepairBadPartials}" == "true" ]] && (( downDelta > 0 && downDelta < 1024 )) ; then + if [[ -f "${file_path}" ]] ; then if ((pd_presize > 0)); then echo -e "${YELLOW}Bad node / HTML found:${NC} reverting to previous file..." truncateDownload "$remote_url" "$filename" "$pd_presize" "$received_file_size" @@ -454,15 +454,15 @@ dosya_GetFile() { if ((j >= $MaxDownloadRetries)) ; then rm -f "${dosya_cookie_jar}"; rm -f "$flockDownload"; - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" fi return 1 else continue fi - elif [ "${AutoRepairBadPartials}" == "true" ] && [ "$containsHtml" == "true" ] ; then - if [ -f "${file_path}" ] ; then + elif [[ "${AutoRepairBadPartials}" == "true" ]] && [[ "$containsHtml" == "true" ]] ; then + if [[ -f "${file_path}" ]] ; then if ((pd_presize > 0)); then echo -e "${YELLOW}Bad node / HTML found:${NC} reverting to previous file..." truncateDownload "$remote_url" "$filename" "$pd_presize" "$received_file_size" @@ -475,22 +475,22 @@ dosya_GetFile() { if ((j >= $MaxDownloadRetries)) ; then rm -f "${dosya_cookie_jar}"; rm -f "$flockDownload"; - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" fi return 1 else continue fi - elif (( downDelta > 0 && downDelta < 1024 )) || [ "$containsHtml" == "true" ] ; then - if [ -f "$file_path" ] ; then + elif (( downDelta > 0 && downDelta < 1024 )) || [[ "$containsHtml" == "true" ]] ; then + if [[ -f "$file_path" ]] ; then rm -rf "$file_path" fi echo -e "\n${YELLOW}Bad node / HTML found:${NC} tainted partial removed..." if ((j >= $MaxDownloadRetries)) ; then rm -f "${dosya_cookie_jar}"; rm -f "$flockDownload"; - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" fi return 1 @@ -503,7 +503,7 @@ dosya_GetFile() { if ((j >= $MaxDownloadRetries)) ; then rm -f "${dosya_cookie_jar}"; rm -f "$flockDownload"; - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" fi return 1 diff --git a/hosts/downloadgg.sh b/hosts/downloadgg.sh index 29ce388..f078982 100644 --- a/hosts/downloadgg.sh +++ b/hosts/downloadgg.sh @@ -43,24 +43,24 @@ dgg_DownloadFile() { tor_identity="${RANDOM}" finalAttempt="false" for ((z=0; z<=$MaxUrlRetries; z++)); do - if [ $z -eq $MaxUrlRetries ] ; then + if [[ $z -eq $MaxUrlRetries ]] ; then finalAttempt="true" fi CLEANSTRING=${remote_url//[^a-zA-Z0-9]/} trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; echo ""; tput cnorm; exit" 0 1 2 3 6 15 if dgg_FetchFileInfo $finalAttempt && dgg_GetFile "${filecnt}" $((z+1)) $finalAttempt ; then return 0 - elif [ $z -lt $MaxUrlRetries ]; then - if [ "${fileAlreadyDone}" == "true" ] ; then + elif [[ $z -lt $MaxUrlRetries ]]; then + if [[ "${fileAlreadyDone}" == "true" ]] ; then break fi if [[ "${warnAndRetryUnknownError}" == "true" ]] ; then - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${remote_url##*/}" "error" "Retry due to an unknown issue: attempt #$((z+1)) of ${MaxUrlRetries}" fi fi if [[ "${exitDownloadError}" == "true" || "${exitDownloadNotAvailable}" == "true" ]] ; then - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${remote_url##*/}" "error" "Exit due to unrecoverable issue" fi rm -f "${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}" @@ -88,16 +88,16 @@ dgg_FetchFileInfo() { CLEANSTRING=${remote_url//[^a-zA-Z0-9]/} trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; rm -f "${dgg_cookie_jar}"; echo ""; tput cnorm; exit" 0 1 2 3 6 15 response=$(tor_curl_request --insecure -L -s -b "${dgg_cookie_jar}" -c "${dgg_cookie_jar}" "$remote_url") - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${remote_url##*/}" "dgg_dwnpage$i" "${response}" fi if [[ -z $response ]] ; then rm -f "${dgg_cookie_jar}"; - if [ $i == $maxfetchretries ] ; then + if [[ $i == $maxfetchretries ]] ; then printf "\\n" echo -e "${RED}| Failed to extract download link.${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "" "" fi return 1 @@ -107,11 +107,11 @@ dgg_FetchFileInfo() { fi if grep -Eqi "Sorry, you are banned" <<< "$response"; then rm -f "${dgg_cookie_jar}"; - if [ $i == $maxfetchretries ] ; then + if [[ $i == $maxfetchretries ]] ; then printf "\\n" echo -e "${RED}| Failed to extract download link.${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "" "" fi return 1 @@ -148,11 +148,11 @@ dgg_FetchFileInfo() { fi if [[ -z "$post_action" ]] || [[ -z "$post_file" ]] || [[ -z "$post_hash" ]] || [[ -z "$post_key" ]]; then rm -f "${dgg_cookie_jar}"; - if [ $i == $maxfetchretries ] ; then + if [[ $i == $maxfetchretries ]] ; then printf "\\n" echo -e "${RED}| Failed to extract download link.${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "" "" fi return 1 @@ -169,13 +169,13 @@ dgg_FetchFileInfo() { response=$(tor_curl_request --insecure -svo. -X POST \ -b "${dgg_cookie_jar}" -c "${dgg_cookie_jar}" \ --data-raw "$form_data" "$post_action" 2>&1) - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${remote_url##*/}" "dgg_post" "post_action: ${post_action}"$'\n'"form_data: ${form_data}"$'\n'"${response}" fi if [[ -z $response ]] ; then echo -e "${RED}| Failed to extract download link${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then rm -f "${dgg_cookie_jar}"; failedRetryDownload "${remote_url}" "" "" fi @@ -201,21 +201,21 @@ dgg_FetchFileInfo() { if [[ -z $filename ]] || [[ -z $file_size_bytes ]] ; then echo -e "${RED}| Failed to extract file info${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then rm -f "${dgg_cookie_jar}"; failedRetryDownload "${remote_url}" "" "" fi return 1 fi - touch ${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/} - if [ ! "$filename_override" == "" ] ; then + touch "${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}" + if [[ ! "$filename_override" == "" ]] ; then filename="$filename_override" fi filename=$(sanitize_file_or_folder_name "${filename}") printf "\\n" echo -e "${YELLOW}| File name:${NC}\t\"${filename}\"" - if [ -z $file_size_bytes ] ; then - if [ "${finalAttempt}" == "true" ] ; then + if [[ -z $file_size_bytes ]] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "Filesize not found!" "" fi echo -e "${YELLOW}| Filesize not found… retry${NC}" @@ -246,14 +246,14 @@ dgg_GetFile() { flockDownload="${WorkDir}/.flocks/${filename//[^a-zA-Z0-9\.\_\-]/}.flock" for ((j=1; j<=$MaxDownloadRetries; j++)); do pd_presize=0 - if [ -f "$file_path" ] ; then + if [[ -f "$file_path" ]] ; then pd_presize=$(stat --format="%s" "$file_path" | tr -d '[:space:]') fi GetRandomUA CLEANSTRING=${remote_url//[^a-zA-Z0-9]/} trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; rm -f "${dgg_cookie_jar}"; rm -f $flockDownload; echo ""; tput cnorm; exit" 0 1 2 3 6 15 - if [ "${UseTorCurlImpersonate}" == "true" ]; then - if [ "${RateMonitorEnabled}" == "true" ]; then + if [[ "${UseTorCurlImpersonate}" == "true" ]]; then + if [[ "${RateMonitorEnabled}" == "true" ]]; then tor_curl_request --insecure -X POST \ -b "${dgg_cookie_jar}" -c "${dgg_cookie_jar}" --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval \ -H "Host: download.gg" \ @@ -271,7 +271,7 @@ dgg_GetFile() { --output "$file_path" fi else - if [ "${RateMonitorEnabled}" == "true" ]; then + if [[ "${RateMonitorEnabled}" == "true" ]]; then tor_curl_request --insecure -X POST \ -b "${dgg_cookie_jar}" -c "${dgg_cookie_jar}" --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval \ -H "Host: download.gg" \ @@ -312,7 +312,7 @@ dgg_GetFile() { fi fi received_file_size=0 - if [ -f "$file_path" ] ; then + if [[ -f "$file_path" ]] ; then received_file_size=$(stat --format="%s" "$file_path" | tr -d '[:space:]') fi if CheckNoHtml "$remote_url" "$filename" "$file_path" ; then @@ -321,9 +321,9 @@ dgg_GetFile() { containsHtml=true fi downDelta=$(( received_file_size - pd_presize )) - if [[ "${received_file_size}" -ne "${file_size_bytes}" ]] || [ "$containsHtml" == "true" ]; then - if [ "${AutoRepairBadPartials}" == "true" ] && (( downDelta > 0 && downDelta < 1024 )) ; then - if [ -f "${file_path}" ] ; then + if [[ "${received_file_size}" -ne "${file_size_bytes}" ]] || [[ "$containsHtml" == "true" ]]; then + if [[ "${AutoRepairBadPartials}" == "true" ]] && (( downDelta > 0 && downDelta < 1024 )) ; then + if [[ -f "${file_path}" ]] ; then if ((pd_presize > 0)); then echo -e "${YELLOW}Bad node / HTML found:${NC} reverting to previous file..." truncateDownload "$remote_url" "$filename" "$pd_presize" "$received_file_size" @@ -335,15 +335,15 @@ dgg_GetFile() { fi if ((j >= $MaxDownloadRetries)) ; then rm -f "$flockDownload"; - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" fi return 1 else continue fi - elif [ "${AutoRepairBadPartials}" == "true" ] && [ "$containsHtml" == "true" ] ; then - if [ -f "${file_path}" ] ; then + elif [[ "${AutoRepairBadPartials}" == "true" ]] && [[ "$containsHtml" == "true" ]] ; then + if [[ -f "${file_path}" ]] ; then if ((pd_presize > 0)); then echo -e "${YELLOW}Bad node / HTML found:${NC} reverting to previous file..." truncateDownload "$remote_url" "$filename" "$pd_presize" "$received_file_size" @@ -355,21 +355,21 @@ dgg_GetFile() { fi if ((j >= $MaxDownloadRetries)) ; then rm -f "$flockDownload"; - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" fi return 1 else continue fi - elif (( downDelta > 0 && downDelta < 1024 )) || [ "$containsHtml" == "true" ] ; then - if [ -f "$file_path" ] ; then + elif (( downDelta > 0 && downDelta < 1024 )) || [[ "$containsHtml" == "true" ]] ; then + if [[ -f "$file_path" ]] ; then rm -rf "$file_path" fi echo -e "\n${YELLOW}Bad node / HTML found:${NC} tainted partial removed..." if ((j >= $MaxDownloadRetries)) ; then rm -f "$flockDownload"; - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" fi return 1 @@ -381,7 +381,7 @@ dgg_GetFile() { echo -e "\n${RED}Download failed, file is incomplete.${NC}" if ((j >= $MaxDownloadRetries)) ; then rm -f "$flockDownload"; - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" fi return 1 diff --git a/hosts/eternalhosting.sh b/hosts/eternalhosting.sh old mode 100755 new mode 100644 diff --git a/hosts/examples/ExampleNewHost.sh b/hosts/examples/ExampleNewHost.sh old mode 100755 new mode 100644 index 75404fc..15facd9 --- a/hosts/examples/ExampleNewHost.sh +++ b/hosts/examples/ExampleNewHost.sh @@ -1,6 +1,6 @@ #! Name: ExampleNewHost.sh #! Author: kittykat -#! Version: 2024.09.13 +#! Version: 2025.02.18 #! Desc: Add support for downloading and processing of urls for a new host #! Usage: Copy this file into the ./${ScriptDir}/hosts/ folder #! @@ -45,24 +45,24 @@ fh_DownloadFile() { tor_identity="${RANDOM}" finalAttempt="false" for ((z=0; z<=$MaxUrlRetries; z++)); do - if [ $z -eq $MaxUrlRetries ] ; then + if [[ $z -eq $MaxUrlRetries ]] ; then finalAttempt="true" fi CLEANSTRING=${remote_url//[^a-zA-Z0-9]/} trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; echo ""; tput cnorm; exit" 0 1 2 3 6 15 if fh_FetchFileInfo $finalAttempt && fh_GetFile "${filecnt}" $((z+1)) $finalAttempt ; then return 0 - elif [ $z -lt $MaxUrlRetries ]; then - if [ "${fileAlreadyDone}" == "true" ] ; then + elif [[ $z -lt $MaxUrlRetries ]]; then + if [[ "${fileAlreadyDone}" == "true" ]] ; then break fi if [[ "${warnAndRetryUnknownError}" == "true" ]] ; then - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${remote_url##*/}" "error" "Retry due to an unknown issue: attempt #$((z+1)) of ${MaxUrlRetries}" fi fi if [[ "${exitDownloadError}" == "true" || "${exitDownloadNotAvailable}" == "true" ]] ; then - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${remote_url##*/}" "error" "Exit due to unrecoverable issue" fi rm -f "${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}" @@ -100,12 +100,12 @@ fh_FetchFileInfo() { CLEANSTRING=${remote_url//[^a-zA-Z0-9]/} trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; echo ""; tput cnorm; exit" 0 1 2 3 6 15 file_header=$(tor_curl_request --insecure -L --head -s --referer "${remote_url//\.org/\.cc}" "$download_url") - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${remote_url##*/}" "fh_head$j" "download_url: ${download_url}"$'\n'"${file_header}" fi - if [ ! -z "$file_header" ] ; then + if [[ ! -z "$file_header" ]] ; then if grep -Eqi '404 Not Found' <<< "${file_header}" ; then - if [ $j == $maxfetchretries ] ; then + if [[ $j == $maxfetchretries ]] ; then printf "\\n" echo -e "${RED}| The file has been removed (404).${NC}" removedDownload "${remote_url}" @@ -116,10 +116,10 @@ fh_FetchFileInfo() { fi fi if ! grep -Eqi '200|content-length' <<< "${file_header}" ; then - if [ $j == $maxfetchretries ] ; then + if [[ $j == $maxfetchretries ]] ; then printf "\\n" echo -e "${RED}| Failed to extract file size.${NC}" - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "" "" fi return 1 @@ -142,8 +142,8 @@ fh_FetchFileInfo() { printf "\\n" break done - touch ${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/} - if [ ! "$filename_override" == "" ] ; then + touch "${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}" + if [[ ! "$filename_override" == "" ]] ; then filename="$filename_override" else filename=${download_url##*/} # Requires unique filename.ext but good for multipart files @@ -151,8 +151,8 @@ fh_FetchFileInfo() { filename=$(sanitize_file_or_folder_name "${filename}") file_size_bytes=$(grep -oPi '(?<=content-length: ).*' <<< "$file_header") file_size_bytes=${file_size_bytes//[$'\t\r\n']} - if [ -z $file_size_bytes ] ; then - if [ "${finalAttempt}" == "true" ] ; then + if [[ -z $file_size_bytes ]] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "Filesize not found!" "" fi echo -e "${YELLOW}| Filesize not found… retry${NC}" @@ -189,19 +189,19 @@ fh_GetFile() { fi download_url=$(urlencode_literal_grouped_case_urlendingonly "$download_url") pd_presize=0 - if [ -f "$file_path" ] ; then + if [[ -f "$file_path" ]] ; then pd_presize=$(stat --format="%s" "$file_path" | tr -d '[:space:]') fi tor_identity="${RANDOM}" CLEANSTRING=${remote_url//[^a-zA-Z0-9]/} trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; rm -f $flockDownload; echo ""; tput cnorm; exit" 0 1 2 3 6 15 - if [ "${RateMonitorEnabled}" == "true" ]; then + if [[ "${RateMonitorEnabled}" == "true" ]]; then tor_curl_request --insecure --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval --referer "$download_url" "$download_url" --continue-at - --output "$file_path" else tor_curl_request --insecure --referer "$download_url" "$download_url" --continue-at - --output "$file_path" fi received_file_size=0 - if [ -f "$file_path" ] ; then + if [[ -f "$file_path" ]] ; then received_file_size=$(stat --format="%s" "$file_path" | tr -d '[:space:]') fi if CheckNoHtml "$remote_url" "$filename" "$file_path" ; then @@ -210,9 +210,9 @@ fh_GetFile() { containsHtml=true fi downDelta=$(( received_file_size - pd_presize )) - if [[ "${received_file_size}" -ne "${file_size_bytes}" ]] || [ "$containsHtml" == "true" ]; then - if [ "${AutoRepairBadPartials}" == "true" ] && (( downDelta > 0 && downDelta < 1024 )) ; then - if [ -f "${file_path}" ] ; then + if [[ "${received_file_size}" -ne "${file_size_bytes}" ]] || [[ "$containsHtml" == "true" ]]; then + if [[ "${AutoRepairBadPartials}" == "true" ]] && (( downDelta > 0 && downDelta < 1024 )) ; then + if [[ -f "${file_path}" ]] ; then if ((pd_presize > 0)); then echo -e "${YELLOW}Bad node / HTML found:${NC} reverting to previous file..." truncateDownload "$remote_url" "$filename" "$pd_presize" "$received_file_size" @@ -224,15 +224,15 @@ fh_GetFile() { fi if ((j >= $MaxDownloadRetries)) ; then rm -f "$flockDownload"; - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" fi return 1 else continue fi - elif [ "${AutoRepairBadPartials}" == "true" ] && [ "$containsHtml" == "true" ] ; then - if [ -f "${file_path}" ] ; then + elif [[ "${AutoRepairBadPartials}" == "true" ]] && [[ "$containsHtml" == "true" ]] ; then + if [[ -f "${file_path}" ]] ; then if ((pd_presize > 0)); then echo -e "${YELLOW}Bad node / HTML found:${NC} reverting to previous file..." truncateDownload "$remote_url" "$filename" "$pd_presize" "$received_file_size" @@ -244,21 +244,21 @@ fh_GetFile() { fi if ((j >= $MaxDownloadRetries)) ; then rm -f "$flockDownload"; - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" fi return 1 else continue fi - elif (( downDelta > 0 && downDelta < 1024 )) || [ "$containsHtml" == "true" ] ; then - if [ -f "$file_path" ] ; then + elif (( downDelta > 0 && downDelta < 1024 )) || [[ "$containsHtml" == "true" ]] ; then + if [[ -f "$file_path" ]] ; then rm -rf "$file_path" fi echo -e "\n${YELLOW}Bad node / HTML found:${NC} tainted partial removed..." if ((j >= $MaxDownloadRetries)) ; then rm -f "$flockDownload"; - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" fi return 1 @@ -270,7 +270,7 @@ fh_GetFile() { echo -e "\n${RED}Download failed, file is incomplete.${NC}" if ((j >= $MaxDownloadRetries)) ; then rm -f "$flockDownload"; - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" fi return 1 diff --git a/hosts/examples/up_example.sh b/hosts/examples/up_example.sh old mode 100755 new mode 100644 index 84b0ea0..7419aab --- a/hosts/examples/up_example.sh +++ b/hosts/examples/up_example.sh @@ -1,6 +1,6 @@ #! Name: up_example.sh #! Author: kittykat -#! Version: 2024.10.23 +#! Version: 2025.02.18 #! Desc: Add support for uploading files to a new host #! Info: Files are accessible at https://oshi.at/ link #! MaxSize: 5GB @@ -54,30 +54,30 @@ oshi_UploadFile() { MaxUploadSizeInBytes=5368709120 fsize=$(GetFileSize "$filepath" "false") if ((fsize > MaxUploadSizeInBytes)); then - m -f "${UploadTicket}" + rm -f "${UploadTicket}" echo -e "${YELLOW}| SKIP${NC}: The size of $filename is to large for $_hostCode. ($fsize > $MaxUploadSizeInBytes)" failedUpload "${filepath}" "${_hostCode}" "Skipping upload. The size of $filename is to large for $_hostCode. ($fsize > $MaxUploadSizeInBytes)" return 1 fi finalAttempt="false" for ((z=0; z<=$MaxUploadRetries; z++)); do - if [ $z -eq $MaxUploadRetries ] ; then + if [[ $z -eq $MaxUploadRetries ]] ; then finalAttempt="true" fi trap "rm -f "${UploadTicket}"; echo ""; tput cnorm; exit" 0 1 2 3 6 15 if oshi_PostFile "${filepath}" "${_hostCode}" "${filename}" "${filecnt}" $((z+1)) $finalAttempt $pline ; then return 0 - elif [ $z -lt $MaxUploadRetries ]; then - if [ "${fileAlreadyDone}" == "true" ] ; then + elif [[ $z -lt $MaxUploadRetries ]]; then + if [[ "${fileAlreadyDone}" == "true" ]] ; then break fi if [[ "${warnAndRetryUnknownError}" == "true" ]] ; then - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${filepath##*/}" "error" "Retry due to an unknown issue: attempt #$((z+1)) of ${MaxUploadRetries}" fi fi if [[ "${exitDownloadError}" == "true" || "${exitDownloadNotAvailable}" == "true" ]] ; then - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${filepath##*/}" "error" "Exit due to unrecoverable issue" fi rm -f "${UploadTicket}" @@ -116,7 +116,7 @@ oshi_PostFile() { -F "randomizefn=0" \ -F "shorturl=0" \ "${PostUrlHost}") - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${remote_url##*/}" "${_hostCode}_dwnpage$j" "post_url: ${PostUrlHost}"$'\n'"${response}" fi successUpload "$pline" "${filepath}" "${_hostCode}" "${filesize}" "${downloadLink}" "{$responseHtml}" diff --git a/hosts/fileblade.sh b/hosts/fileblade.sh index 1ead143..525a517 100644 --- a/hosts/fileblade.sh +++ b/hosts/fileblade.sh @@ -43,24 +43,24 @@ fb_DownloadFile() { tor_identity="${RANDOM}" finalAttempt="false" for ((z=0; z<=$MaxUrlRetries; z++)); do - if [ $z -eq $MaxUrlRetries ] ; then + if [[ $z -eq $MaxUrlRetries ]] ; then finalAttempt="true" fi CLEANSTRING=${remote_url//[^a-zA-Z0-9]/} trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; echo ""; tput cnorm; exit" 0 1 2 3 6 15 if fb_FetchFileInfo $finalAttempt && fb_GetFile "${filecnt}" $((z+1)) $finalAttempt ; then return 0 - elif [ $z -lt $MaxUrlRetries ]; then - if [ "${fileAlreadyDone}" == "true" ] ; then + elif [[ $z -lt $MaxUrlRetries ]]; then + if [[ "${fileAlreadyDone}" == "true" ]] ; then break fi if [[ "${warnAndRetryUnknownError}" == "true" ]] ; then - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${remote_url##*/}" "error" "Retry due to an unknown issue: attempt #$((z+1)) of ${MaxUrlRetries}" fi fi if [[ "${exitDownloadError}" == "true" || "${exitDownloadNotAvailable}" == "true" ]] ; then - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${remote_url##*/}" "error" "Exit due to unrecoverable issue" fi rm -f "${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}" @@ -88,16 +88,16 @@ fb_FetchFileInfo() { CLEANSTRING=${remote_url//[^a-zA-Z0-9]/} trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; rm -f "${fb_cookie_jar}"; echo ""; tput cnorm; exit" 0 1 2 3 6 15 response=$(tor_curl_request --insecure -L -s -b "${fb_cookie_jar}" -c "${fb_cookie_jar}" "$remote_url") - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${remote_url##*/}" "fb_dwnpage$i" "${response}" fi if [[ -z $response ]] ; then rm -f "${fb_cookie_jar}"; - if [ $i == $maxfetchretries ] ; then + if [[ $i == $maxfetchretries ]] ; then printf "\\n" echo -e "${RED}| Failed to extract download link.${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "" "" fi return 1 @@ -107,11 +107,11 @@ fb_FetchFileInfo() { fi if grep -Eqi "Sorry, you are banned|Sorry, you have been blocked" <<< "$response"; then rm -f "${fb_cookie_jar}"; - if [ $i == $maxfetchretries ] ; then + if [[ $i == $maxfetchretries ]] ; then printf "\\n" echo -e "${RED}| Failed to extract download link [blocked ip]${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "Failed to extract download link [blocked ip]" "" fi return 1 @@ -139,11 +139,11 @@ fb_FetchFileInfo() { fi if [[ -z "$post_action" ]] || [[ -z "$post_op" ]] || [[ -z "$post_id" ]] || [[ -z "$post_fname" ]] ; then rm -f "${fb_cookie_jar}"; - if [ $i == $maxfetchretries ] ; then + if [[ $i == $maxfetchretries ]] ; then printf "\\n" echo -e "${RED}| Failed to extract download link [2].${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "Failed to extract download link [2]" "" fi return 1 @@ -165,16 +165,16 @@ fb_FetchFileInfo() { response=$(tor_curl_request --insecure -L -s -X POST \ -b "${fb_cookie_jar}" -c "${fb_cookie_jar}" \ --data "$form_data" "$post_action") - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${remote_url##*/}" "fb_post(1)" "post_action: ${post_action}"$'\n'"form_data: ${form_data}"$'\n'"${response}" fi if [[ -z $response ]] ; then - if [ $i == $maxfetchretries ] ; then + if [[ $i == $maxfetchretries ]] ; then rm -f "${fb_cookie_jar}"; printf "\\n" echo -e "${RED}| Failed to extract download link [3]${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "Failed to extract download link [3]" "" fi return 1 @@ -199,12 +199,12 @@ fb_FetchFileInfo() { return 1 fi if grep -Eqi "Sorry, you are banned|Sorry, you have been blocked" <<< "$response"; then - if [ $i == $maxfetchretries ] ; then + if [[ $i == $maxfetchretries ]] ; then rm -f "${fb_cookie_jar}"; printf "\\n" echo -e "${RED}| Failed to extract download link [blocked ip]${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "Failed to extract download link [blocked ip]" "" fi return 1 @@ -214,12 +214,12 @@ fb_FetchFileInfo() { fi fi if grep -Eqi 'Your subsequent download will be started in' <<< "$response"; then - if [ $i == $maxfetchretries ] ; then + if [[ $i == $maxfetchretries ]] ; then rm -f "${fb_cookie_jar}"; printf "\\n" echo -e "${RED}| Subsequent download wait.. [3b]${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "Subsequent download wait.. [3b]" "" fi return 1 @@ -229,12 +229,12 @@ fb_FetchFileInfo() { fi fi if grep -Eqi '

' <<< "$response"; then - if [ $i == $maxfetchretries ] ; then + if [[ $i == $maxfetchretries ]] ; then rm -f "${fb_cookie_jar}"; printf "\\n" echo -e "${RED}| Failed to extract download link (Unknown warning encountered) [3c]${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "Unknown warning encountered in download2 [3c]" "" fi return 1 @@ -256,11 +256,11 @@ fb_FetchFileInfo() { fi if [[ -z "$post_action" ]] || [[ -z "$post_op" ]] || [[ -z "$post_id" ]] ; then rm -f "${fb_cookie_jar}"; - if [ $i == $maxfetchretries ] ; then + if [[ $i == $maxfetchretries ]] ; then printf "\\n" echo -e "${RED}| Failed to extract download link [3].${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "Failed to extract download link [3]" "" fi return 1 @@ -281,16 +281,16 @@ fb_FetchFileInfo() { response=$(tor_curl_request --insecure -L -s -X POST \ -b "${fb_cookie_jar}" -c "${fb_cookie_jar}" \ --data "$form_data" "$post_action") - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${remote_url##*/}" "fb_post(2)" "post_action: ${post_action}"$'\n'"form_data: ${form_data}"$'\n'"${response}" fi if [[ -z $response ]] ; then - if [ $i == $maxfetchretries ] ; then + if [[ $i == $maxfetchretries ]] ; then rm -f "${fb_cookie_jar}"; printf "\\n" echo -e "${RED}| Failed to extract download link [4].${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "Failed to extract download link [4]" "" fi return 1 @@ -311,7 +311,7 @@ fb_FetchFileInfo() { printf "\\n" echo -e "${RED}| Failed to extract download link [6]${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then rm -f "${fb_cookie_jar}"; failedRetryDownload "${remote_url}" "Failed to extract download link [6]" "" fi @@ -333,16 +333,16 @@ fb_FetchFileInfo() { trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; rm -f ${fb_cookie_jar}; echo ""; tput cnorm; exit" 0 1 2 3 6 15 GetRandomUA file_header=$(tor_curl_request --insecure -L --head -s "$download_url") - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${remote_url##*/}" "fb_head$j" "download_url: ${download_url}"$'\n'"${file_header}" fi if [[ -z $file_header ]] ; then - if [ $j == $maxfetchretries ] ; then + if [[ $j == $maxfetchretries ]] ; then rm -f "${fb_cookie_jar}"; printf "\\n" echo -e "${RED}| Failed to extract file info [1]${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "Failed to extract file info [1]" "" fi return 1 @@ -352,12 +352,12 @@ fb_FetchFileInfo() { fi fi if ! grep -Eqi 'HTTP.*200' <<< $file_header ; then - if [ $j == $maxfetchretries ] ; then + if [[ $j == $maxfetchretries ]] ; then rm -f "${fb_cookie_jar}"; printf "\\n" echo -e "${RED}| Failed to extract file info [2]${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "Failed to extract file info [2]" "" fi return 1 @@ -366,15 +366,15 @@ fb_FetchFileInfo() { continue fi fi - if [ "$filename_override" == "" ] ; then + if [[ "$filename_override" == "" ]] ; then filename="${download_url##*/}" filename=${filename//%0d/} if [[ -z "$filename" ]]; then - if [ $j == $maxfetchretries ] ; then + if [[ $j == $maxfetchretries ]] ; then printf "\\n" echo -e "${RED}| Failed to extract file name${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "Failed to extract file name" "" fi return 1 @@ -386,12 +386,12 @@ fb_FetchFileInfo() { file_size_bytes=$(grep -oPi '(?<=content-length: ).*?(?=$)' <<< "$file_header") file_size_bytes=${file_size_bytes//[$'\t\r\n']} if [[ -z "$file_size_bytes" ]]; then - if [ $j == $maxfetchretries ] ; then + if [[ $j == $maxfetchretries ]] ; then rm -f "${fb_cookie_jar}"; printf "\\n" echo -e "${RED}| Failed to extract file size.${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "Failed to extract file size." "" fi return 1 @@ -402,15 +402,15 @@ fb_FetchFileInfo() { fi break #Good to go here done - touch ${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/} - if [ ! "$filename_override" == "" ] ; then + touch "${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}" + if [[ ! "$filename_override" == "" ]] ; then filename="$filename_override" fi filename=$(sanitize_file_or_folder_name "${filename}") printf "\\n" echo -e "${YELLOW}| File name:${NC}\t\"${filename}\"" - if [ -z $file_size_bytes ] ; then - if [ "${finalAttempt}" == "true" ] ; then + if [[ -z $file_size_bytes ]] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "Filesize not found!" "" fi echo -e "${YELLOW}| Filesize not found… retry${NC}" @@ -441,14 +441,14 @@ fb_GetFile() { flockDownload="${WorkDir}/.flocks/${filename//[^a-zA-Z0-9\.\_\-]/}.flock" for ((j=1; j<=$MaxDownloadRetries; j++)); do pd_presize=0 - if [ -f "$file_path" ] ; then + if [[ -f "$file_path" ]] ; then pd_presize=$(stat --format="%s" "$file_path" | tr -d '[:space:]') fi GetRandomUA CLEANSTRING=${remote_url//[^a-zA-Z0-9]/} trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; rm -f "${fb_cookie_jar}"; rm -f $flockDownload; echo ""; tput cnorm; exit" 0 1 2 3 6 15 - if [ "${UseTorCurlImpersonate}" == "true" ]; then - if [ "${RateMonitorEnabled}" == "true" ]; then + if [[ "${UseTorCurlImpersonate}" == "true" ]]; then + if [[ "${RateMonitorEnabled}" == "true" ]]; then tor_curl_request --insecure -L \ --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval \ "$download_url" --continue-at - --output "$file_path" @@ -457,7 +457,7 @@ fb_GetFile() { "$download_url" --continue-at - --output "$file_path" fi else - if [ "${RateMonitorEnabled}" == "true" ]; then + if [[ "${RateMonitorEnabled}" == "true" ]]; then tor_curl_request --insecure \ --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval \ -H "User-Agent: $RandomUA" \ @@ -489,7 +489,7 @@ fb_GetFile() { fi fi received_file_size=0 - if [ -f "$file_path" ] ; then + if [[ -f "$file_path" ]] ; then received_file_size=$(stat --format="%s" "$file_path" | tr -d '[:space:]') fi if CheckNoHtml "$remote_url" "$filename" "$file_path" ; then @@ -498,9 +498,9 @@ fb_GetFile() { containsHtml=true fi downDelta=$(( received_file_size - pd_presize )) - if [[ "${received_file_size}" -ne "${file_size_bytes}" ]] || [ "$containsHtml" == "true" ]; then - if [ "${AutoRepairBadPartials}" == "true" ] && (( downDelta > 0 && downDelta < 1024 )) ; then - if [ -f "${file_path}" ] ; then + if [[ "${received_file_size}" -ne "${file_size_bytes}" ]] || [[ "$containsHtml" == "true" ]]; then + if [[ "${AutoRepairBadPartials}" == "true" ]] && (( downDelta > 0 && downDelta < 1024 )) ; then + if [[ -f "${file_path}" ]] ; then if ((pd_presize > 0)); then echo -e "${YELLOW}Bad node / HTML found:${NC} reverting to previous file..." truncateDownload "$remote_url" "$filename" "$pd_presize" "$received_file_size" @@ -512,15 +512,15 @@ fb_GetFile() { fi if ((j >= $MaxDownloadRetries)) ; then rm -f "$flockDownload"; - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" fi return 1 else continue fi - elif [ "${AutoRepairBadPartials}" == "true" ] && [ "$containsHtml" == "true" ] ; then - if [ -f "${file_path}" ] ; then + elif [[ "${AutoRepairBadPartials}" == "true" ]] && [[ "$containsHtml" == "true" ]] ; then + if [[ -f "${file_path}" ]] ; then if ((pd_presize > 0)); then echo -e "${YELLOW}Bad node / HTML found:${NC} reverting to previous file..." truncateDownload "$remote_url" "$filename" "$pd_presize" "$received_file_size" @@ -532,21 +532,21 @@ fb_GetFile() { fi if ((j >= $MaxDownloadRetries)) ; then rm -f "$flockDownload"; - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" fi return 1 else continue fi - elif (( downDelta > 0 && downDelta < 1024 )) || [ "$containsHtml" == "true" ] ; then - if [ -f "$file_path" ] ; then + elif (( downDelta > 0 && downDelta < 1024 )) || [[ "$containsHtml" == "true" ]] ; then + if [[ -f "$file_path" ]] ; then rm -rf "$file_path" fi echo -e "\n${YELLOW}Bad node / HTML found:${NC} tainted partial removed..." if ((j >= $MaxDownloadRetries)) ; then rm -f "$flockDownload"; - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" fi return 1 @@ -558,7 +558,7 @@ fb_GetFile() { echo -e "\n${RED}Download failed, file is incomplete.${NC}" if ((j >= $MaxDownloadRetries)) ; then rm -f "$flockDownload"; - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" fi return 1 diff --git a/hosts/fileditch.sh b/hosts/fileditch.sh index 433fc1d..309da9d 100644 --- a/hosts/fileditch.sh +++ b/hosts/fileditch.sh @@ -54,24 +54,24 @@ fd_DownloadFile() { tor_identity="${RANDOM}" finalAttempt="false" for ((z=0; z<=$MaxUrlRetries; z++)); do - if [ $z -eq $MaxUrlRetries ] ; then + if [[ $z -eq $MaxUrlRetries ]] ; then finalAttempt="true" fi CLEANSTRING=${remote_url//[^a-zA-Z0-9]/} trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; echo ""; tput cnorm; exit" 0 1 2 3 6 15 if fd_FetchFileInfo $finalAttempt && fd_GetFile "${filecnt}" $((z+1)) $finalAttempt ; then return 0 - elif [ $z -lt $MaxUrlRetries ]; then - if [ "${fileAlreadyDone}" == "true" ] ; then + elif [[ $z -lt $MaxUrlRetries ]]; then + if [[ "${fileAlreadyDone}" == "true" ]] ; then break fi if [[ "${warnAndRetryUnknownError}" == "true" ]] ; then - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${remote_url##*/}" "error" "Retry due to an unknown issue: attempt #$((z+1)) of ${MaxUrlRetries}" fi fi if [[ "${exitDownloadError}" == "true" || "${exitDownloadNotAvailable}" == "true" ]] ; then - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${remote_url##*/}" "error" "Exit due to unrecoverable issue" fi rm -f "${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}" @@ -94,10 +94,10 @@ fd_FetchFileInfo() { download_url=$(urlencode_literal_grouped_case_urlendingonly "$remote_url") tor_identity="${RANDOM}" file_header=$(tor_curl_request --insecure --head -L -s "$download_url") - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${remote_url##*/}" "fd_head$j" "download_url: ${download_url}"$'\n'"${file_header}" fi - if [ ! -z "$file_header" ] ; then + if [[ ! -z "$file_header" ]] ; then if grep -Eqi '404 Not Found' <<< "${file_header}" ; then echo -e "${RED}| The file has been removed (404).${NC}" removedDownload "${remote_url}" @@ -107,12 +107,12 @@ fd_FetchFileInfo() { if ! grep -Eqi 'HTTP/.* 200' <<< $file_header ; then echo -e "${RED}| Failed to extract file info${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "" "" fi return 1 fi - if [ "$filename_override" == "" ] ; then + if [[ "$filename_override" == "" ]] ; then if grep -Eqi 'filename=' <<< "${file_header}" ; then filename=$(grep -oP 'filename=\K.*$' <<< "${file_header}") filename=${filename##filename} @@ -124,29 +124,29 @@ fd_FetchFileInfo() { fi if ! grep -Eqi 'Content-Length' <<< "${file_header}" ; then echo -e "${RED}| Failed to extract file size.${NC}" - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "" "" fi return 1 fi else echo -e "${RED}| No response. Try again later.${NC}" - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "" "" fi return 1 fi - touch ${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/} - if [ ! "$filename_override" == "" ] ; then + touch "${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}" + if [[ ! "$filename_override" == "" ]] ; then filename="$filename_override" - elif [ -z $filename ] ; then + elif [[ -z $filename ]] ; then filename=${download_url##*/} fi filename=$(sanitize_file_or_folder_name "${filename}") file_size_bytes=$(grep -oPi '(?<=content-length: ).*' <<< "$file_header") file_size_bytes=${file_size_bytes//[$'\t\r\n']} - if [ -z $file_size_bytes ] ; then - if [ "${finalAttempt}" == "true" ] ; then + if [[ -z $file_size_bytes ]] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "Filesize not found!" "" fi echo -e "${YELLOW}| Filesize not found… retry${NC}" @@ -178,18 +178,18 @@ fd_GetFile() { flockDownload="${WorkDir}/.flocks/${filename//[^a-zA-Z0-9\.\_\-]/}.flock" for ((j=1; j<=$MaxDownloadRetries; j++)); do pd_presize=0 - if [ -f "$file_path" ] ; then + if [[ -f "$file_path" ]] ; then pd_presize=$(stat --format="%s" "$file_path" | tr -d '[:space:]') fi CLEANSTRING=${remote_url//[^a-zA-Z0-9]/} trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; rm -f $flockDownload; echo ""; tput cnorm; exit" 0 1 2 3 6 15 - if [ "${RateMonitorEnabled}" == "true" ]; then + if [[ "${RateMonitorEnabled}" == "true" ]]; then tor_curl_request --insecure --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval "$download_url" --continue-at - --output "$file_path" else tor_curl_request --insecure "$download_url" --continue-at - --output "$file_path" fi received_file_size=0 - if [ -f "$file_path" ] ; then + if [[ -f "$file_path" ]] ; then received_file_size=$(stat --format="%s" "$file_path" | tr -d '[:space:]') fi if CheckNoHtml "$remote_url" "$filename" "$file_path" ; then @@ -198,9 +198,9 @@ fd_GetFile() { containsHtml=true fi downDelta=$(( received_file_size - pd_presize )) - if [[ "${received_file_size}" -ne "${file_size_bytes}" ]] || [ "$containsHtml" == "true" ]; then - if [ "${AutoRepairBadPartials}" == "true" ] && (( downDelta > 0 && downDelta < 1024 )) ; then - if [ -f "${file_path}" ] ; then + if [[ "${received_file_size}" -ne "${file_size_bytes}" ]] || [[ "$containsHtml" == "true" ]]; then + if [[ "${AutoRepairBadPartials}" == "true" ]] && (( downDelta > 0 && downDelta < 1024 )) ; then + if [[ -f "${file_path}" ]] ; then if ((pd_presize > 0)); then echo -e "${YELLOW}Bad node / HTML found:${NC} reverting to previous file..." truncateDownload "$remote_url" "$filename" "$pd_presize" "$received_file_size" @@ -212,15 +212,15 @@ fd_GetFile() { fi if ((j >= $MaxDownloadRetries)) ; then rm -f "$flockDownload"; - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" fi return 1 else continue fi - elif [ "${AutoRepairBadPartials}" == "true" ] && [ "$containsHtml" == "true" ] ; then - if [ -f "${file_path}" ] ; then + elif [[ "${AutoRepairBadPartials}" == "true" ]] && [[ "$containsHtml" == "true" ]] ; then + if [[ -f "${file_path}" ]] ; then if ((pd_presize > 0)); then echo -e "${YELLOW}Bad node / HTML found:${NC} reverting to previous file..." truncateDownload "$remote_url" "$filename" "$pd_presize" "$received_file_size" @@ -232,21 +232,21 @@ fd_GetFile() { fi if ((j >= $MaxDownloadRetries)) ; then rm -f "$flockDownload"; - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" fi return 1 else continue fi - elif (( downDelta > 0 && downDelta < 1024 )) || [ "$containsHtml" == "true" ] ; then - if [ -f "$file_path" ] ; then + elif (( downDelta > 0 && downDelta < 1024 )) || [[ "$containsHtml" == "true" ]] ; then + if [[ -f "$file_path" ]] ; then rm -rf "$file_path" fi echo -e "\n${YELLOW}Bad node / HTML found:${NC} tainted partial removed..." if ((j >= $MaxDownloadRetries)) ; then rm -f "$flockDownload"; - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" fi return 1 @@ -258,7 +258,7 @@ fd_GetFile() { echo -e "\n${RED}Download failed, file is incomplete.${NC}" if ((j >= $MaxDownloadRetries)) ; then rm -f "$flockDownload"; - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" fi return 1 diff --git a/hosts/filedot.sh b/hosts/filedot.sh index 79a071d..335df2c 100644 --- a/hosts/filedot.sh +++ b/hosts/filedot.sh @@ -43,24 +43,24 @@ fdot_DownloadFile() { tor_identity="${RANDOM}" finalAttempt="false" for ((z=0; z<=$MaxUrlRetries; z++)); do - if [ $z -eq $MaxUrlRetries ] ; then + if [[ $z -eq $MaxUrlRetries ]] ; then finalAttempt="true" fi CLEANSTRING=${remote_url//[^a-zA-Z0-9]/} trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; echo ""; tput cnorm; exit" 0 1 2 3 6 15 if fdot_FetchFileInfo $finalAttempt && fdot_GetFile "${filecnt}" $((z+1)) $finalAttempt ; then return 0 - elif [ $z -lt $MaxUrlRetries ]; then - if [ "${fileAlreadyDone}" == "true" ] ; then + elif [[ $z -lt $MaxUrlRetries ]]; then + if [[ "${fileAlreadyDone}" == "true" ]] ; then break fi if [[ "${warnAndRetryUnknownError}" == "true" ]] ; then - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${remote_url##*/}" "error" "Retry due to an unknown issue: attempt #$((z+1)) of ${MaxUrlRetries}" fi fi if [[ "${exitDownloadError}" == "true" || "${exitDownloadNotAvailable}" == "true" ]] ; then - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${remote_url##*/}" "error" "Exit due to unrecoverable issue" fi rm -f "${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}" @@ -117,7 +117,7 @@ fdot_FetchFileInfo() { -H 'Sec-Fetch-Dest: document' -H 'Sec-Fetch-Mode: navigate' -H 'Sec-Fetch-Site: none' -H 'Sec-Fetch-User: ?1' \ -c "${fdot_cookie_jar}" \ "https://filedot.to/login.html") - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${remote_url##*/}" "fdot_login_$a" "${PAGE}" fi if grep -Eqi 'Sorry, you have been blocked' <<< "${PAGE}" ; then @@ -168,7 +168,7 @@ fdot_FetchFileInfo() { -H "Sec-Fetch-User: ?1" \ -b "${fdot_cookie_jar}" -c "${fdot_cookie_jar}" \ -d "$form_data" "https://filedot.to/") - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${remote_url##*/}" "fdot_loginP_$b" "form_data: ${form_data}"$'\n'"${resp_login}" fi if grep -Eqi 'Sorry, you have been blocked' <<< "${resp_login}" ; then @@ -255,7 +255,7 @@ fdot_FetchFileInfo() { -H "Sec-Fetch-User: ?1" \ -b "${fdot_cookie_jar}" -c "${fdot_cookie_jar}" \ -d "$form_data" "https://filedot.to/") - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${remote_url##*/}" "fdot_fdownP_$c" "form_data: ${form_data}"$'\n'"${response}" fi if grep -Eqi 'Sorry, you have been blocked' <<< "${response}" ; then @@ -335,7 +335,7 @@ fdot_FetchFileInfo() { -H "Sec-Fetch-User: ?1" \ -b "${fdot_cookie_jar}" -c "${fdot_cookie_jar}" \ -d "$form_data" "$fixed_url") - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${remote_url##*/}" "fdot_downlnkP_$d" "form_data: ${form_data}"$'\n'"${response}" fi if grep -Eqi 'Sorry, you have been blocked' <<< "${response}" ; then @@ -404,15 +404,15 @@ fdot_FetchFileInfo() { CLEANSTRING=${remote_url//[^a-zA-Z0-9]/} trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; echo ""; tput cnorm; exit" 0 1 2 3 6 15 file_header=$(tor_curl_request --insecure -L --head -s --referer "${fixed_url}" "$download_url") - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${remote_url##*/}" "fdot_head_$j" "download_url: ${download_url}"$'\n'"${file_header}" fi if [[ -z $file_header ]] ; then - if [ $j == $maxfetchretries ] ; then + if [[ $j == $maxfetchretries ]] ; then printf "\\n" echo -e "${RED}| Failed to extract file info.${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "" "" fi return 1 @@ -421,11 +421,11 @@ fdot_FetchFileInfo() { fi fi if ! grep -Eqi 'HTTP/2 200|HTTP/1.1 200|200 OK' <<< $file_header ; then - if [ $j == $maxfetchretries ] ; then + if [[ $j == $maxfetchretries ]] ; then printf "\\n" echo -e "${RED}| Failed to extract file info${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "" "" fi return 1 @@ -436,11 +436,11 @@ fdot_FetchFileInfo() { file_size_bytes=$(grep -oPi '(?<=content-length: ).*' <<< "$file_header") file_size_bytes=${file_size_bytes//[$'\t\r\n']} if [[ -z "$file_size_bytes" ]]; then - if [ $j == $maxfetchretries ] ; then + if [[ $j == $maxfetchretries ]] ; then printf "\\n" echo -e "${RED}| Failed to extract file info.${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "" "" fi return 1 @@ -450,15 +450,15 @@ fdot_FetchFileInfo() { fi break #Good to go here done - touch ${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/} - if [ ! "$filename_override" == "" ] ; then + touch "${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}" + if [[ ! "$filename_override" == "" ]] ; then filename="$filename_override" fi filename=$(sanitize_file_or_folder_name "${filename}") printf "\\n" echo -e "${YELLOW}| File name:${NC}\t\"${filename}\"" - if [ -z $file_size_bytes ] ; then - if [ "${finalAttempt}" == "true" ] ; then + if [[ -z $file_size_bytes ]] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "Filesize not found!" "" fi echo -e "${YELLOW}| Filesize not found… retry${NC}" @@ -489,19 +489,19 @@ fdot_GetFile() { flockDownload="${WorkDir}/.flocks/${filename//[^a-zA-Z0-9\.\_\-]/}.flock" for ((j=1; j<=$MaxDownloadRetries; j++)); do pd_presize=0 - if [ -f "$file_path" ] ; then + if [[ -f "$file_path" ]] ; then pd_presize=$(stat --format="%s" "$file_path" | tr -d '[:space:]') fi tor_identity="${RANDOM}" CLEANSTRING=${remote_url//[^a-zA-Z0-9]/} trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; rm -f $flockDownload; echo ""; tput cnorm; exit" 0 1 2 3 6 15 - if [ "${RateMonitorEnabled}" == "true" ]; then + if [[ "${RateMonitorEnabled}" == "true" ]]; then tor_curl_request --insecure -L -G --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval "$download_url" --continue-at - --output "$file_path" else tor_curl_request --insecure -L -G "$download_url" --continue-at - --output "$file_path" fi received_file_size=0 - if [ -f "$file_path" ] ; then + if [[ -f "$file_path" ]] ; then received_file_size=$(stat --format="%s" "$file_path" | tr -d '[:space:]') fi if CheckNoHtml "$remote_url" "$filename" "$file_path" ; then @@ -510,9 +510,9 @@ fdot_GetFile() { containsHtml=true fi downDelta=$(( received_file_size - pd_presize )) - if [[ "${received_file_size}" -ne "${file_size_bytes}" ]] || [ "$containsHtml" == "true" ]; then - if [ "${AutoRepairBadPartials}" == "true" ] && (( downDelta > 0 && downDelta < 1024 )) ; then - if [ -f "${file_path}" ] ; then + if [[ "${received_file_size}" -ne "${file_size_bytes}" ]] || [[ "$containsHtml" == "true" ]]; then + if [[ "${AutoRepairBadPartials}" == "true" ]] && (( downDelta > 0 && downDelta < 1024 )) ; then + if [[ -f "${file_path}" ]] ; then if ((pd_presize > 0)); then echo -e "${YELLOW}Bad node / HTML found:${NC} reverting to previous file..." truncateDownload "$remote_url" "$filename" "$pd_presize" "$received_file_size" @@ -524,15 +524,15 @@ fdot_GetFile() { fi if ((j >= $MaxDownloadRetries)) ; then rm -f "$flockDownload"; - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" fi return 1 else continue fi - elif [ "${AutoRepairBadPartials}" == "true" ] && [ "$containsHtml" == "true" ] ; then - if [ -f "${file_path}" ] ; then + elif [[ "${AutoRepairBadPartials}" == "true" ]] && [[ "$containsHtml" == "true" ]] ; then + if [[ -f "${file_path}" ]] ; then if ((pd_presize > 0)); then echo -e "${YELLOW}Bad node / HTML found:${NC} reverting to previous file..." truncateDownload "$remote_url" "$filename" "$pd_presize" "$received_file_size" @@ -544,21 +544,21 @@ fdot_GetFile() { fi if ((j >= $MaxDownloadRetries)) ; then rm -f "$flockDownload"; - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" fi return 1 else continue fi - elif (( downDelta > 0 && downDelta < 1024 )) || [ "$containsHtml" == "true" ] ; then - if [ -f "$file_path" ] ; then + elif (( downDelta > 0 && downDelta < 1024 )) || [[ "$containsHtml" == "true" ]] ; then + if [[ -f "$file_path" ]] ; then rm -rf "$file_path" fi echo -e "\n${YELLOW}Bad node / HTML found:${NC} tainted partial removed..." if ((j >= $MaxDownloadRetries)) ; then rm -f "$flockDownload"; - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" fi return 1 @@ -570,7 +570,7 @@ fdot_GetFile() { echo -e "\n${RED}Download failed, file is incomplete.${NC}" if ((j >= $MaxDownloadRetries)) ; then rm -f "$flockDownload"; - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" fi return 1 diff --git a/hosts/filehaus.sh b/hosts/filehaus.sh index e93539a..80a2788 100644 --- a/hosts/filehaus.sh +++ b/hosts/filehaus.sh @@ -43,24 +43,24 @@ fh_DownloadFile() { tor_identity="${RANDOM}" finalAttempt="false" for ((z=0; z<=$MaxUrlRetries; z++)); do - if [ $z -eq $MaxUrlRetries ] ; then + if [[ $z -eq $MaxUrlRetries ]] ; then finalAttempt="true" fi CLEANSTRING=${remote_url//[^a-zA-Z0-9]/} trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; echo ""; tput cnorm; exit" 0 1 2 3 6 15 if fh_FetchFileInfo $finalAttempt && fh_GetFile "${filecnt}" $((z+1)) $finalAttempt ; then return 0 - elif [ $z -lt $MaxUrlRetries ]; then - if [ "${fileAlreadyDone}" == "true" ] ; then + elif [[ $z -lt $MaxUrlRetries ]]; then + if [[ "${fileAlreadyDone}" == "true" ]] ; then break fi if [[ "${warnAndRetryUnknownError}" == "true" ]] ; then - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${remote_url##*/}" "error" "Retry due to an unknown issue: attempt #$((z+1)) of ${MaxUrlRetries}" fi fi if [[ "${exitDownloadError}" == "true" || "${exitDownloadNotAvailable}" == "true" ]] ; then - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${remote_url##*/}" "error" "Exit due to unrecoverable issue" fi rm -f "${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}" @@ -99,10 +99,10 @@ fh_FetchFileInfo() { CLEANSTRING=${remote_url//[^a-zA-Z0-9]/} trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; echo ""; tput cnorm; exit" 0 1 2 3 6 15 file_header=$(tor_curl_request_extended --insecure -L --head -s --referer "${remote_url//\.org/\.cc}" "$download_url") - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${remote_url##*/}" "fh_head$j" "download_url: ${download_url}"$'\n'"${file_header}" fi - if [ ! -z "$file_header" ] ; then + if [[ ! -z "$file_header" ]] ; then if grep -Eqi '404 Not Found' <<< "${file_header}" ; then printf "\\n" echo -e "${RED}| The file has been removed (404).${NC}" @@ -111,10 +111,10 @@ fh_FetchFileInfo() { return 1 fi if ! grep -Eqi '200|content-length' <<< "${file_header}" ; then - if [ $j == $maxfetchretries ] ; then + if [[ $j == $maxfetchretries ]] ; then printf "\\n" echo -e "${RED}| Failed to extract file size.${NC}" - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "" "" fi return 1 @@ -137,15 +137,15 @@ fh_FetchFileInfo() { printf "\\n" break done - touch ${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/} - if [ ! "$filename_override" == "" ] ; then + touch "${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}" + if [[ ! "$filename_override" == "" ]] ; then filename="$filename_override" fi filename=$(sanitize_file_or_folder_name "${filename}") file_size_bytes=$(grep -oPi '(?<=content-length: ).*' <<< "$file_header") file_size_bytes=${file_size_bytes//[$'\t\r\n']} - if [ -z $file_size_bytes ] ; then - if [ "${finalAttempt}" == "true" ] ; then + if [[ -z $file_size_bytes ]] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "Filesize not found!" "" fi echo -e "${YELLOW}| Filesize not found… retry${NC}" @@ -181,19 +181,19 @@ fh_GetFile() { splitnum=1 fi pd_presize=0 - if [ -f "$file_path" ] ; then + if [[ -f "$file_path" ]] ; then pd_presize=$(stat --format="%s" "$file_path" | tr -d '[:space:]') fi tor_identity="${RANDOM}" CLEANSTRING=${remote_url//[^a-zA-Z0-9]/} trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; rm -f $flockDownload; echo ""; tput cnorm; exit" 0 1 2 3 6 15 - if [ "${RateMonitorEnabled}" == "true" ]; then + if [[ "${RateMonitorEnabled}" == "true" ]]; then tor_curl_request_extended --insecure --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval --referer "$download_url" "$download_url" --continue-at - --output "$file_path" else tor_curl_request_extended --insecure --referer "$download_url" "$download_url" --continue-at - --output "$file_path" fi received_file_size=0 - if [ -f "$file_path" ] ; then + if [[ -f "$file_path" ]] ; then received_file_size=$(stat --format="%s" "$file_path" | tr -d '[:space:]') fi if CheckNoHtml "$remote_url" "$filename" "$file_path" ; then @@ -202,9 +202,9 @@ fh_GetFile() { containsHtml=true fi downDelta=$(( received_file_size - pd_presize )) - if [[ "${received_file_size}" -ne "${file_size_bytes}" ]] || [ "$containsHtml" == "true" ]; then - if [ "${AutoRepairBadPartials}" == "true" ] && (( downDelta > 0 && downDelta < 1024 )) ; then - if [ -f "${file_path}" ] ; then + if [[ "${received_file_size}" -ne "${file_size_bytes}" ]] || [[ "$containsHtml" == "true" ]]; then + if [[ "${AutoRepairBadPartials}" == "true" ]] && (( downDelta > 0 && downDelta < 1024 )) ; then + if [[ -f "${file_path}" ]] ; then if ((pd_presize > 0)); then echo -e "${YELLOW}Bad node / HTML found:${NC} reverting to previous file..." truncateDownload "$remote_url" "$filename" "$pd_presize" "$received_file_size" @@ -216,15 +216,15 @@ fh_GetFile() { fi if ((j >= $MaxDownloadRetries)) ; then rm -f "$flockDownload"; - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" fi return 1 else continue fi - elif [ "${AutoRepairBadPartials}" == "true" ] && [ "$containsHtml" == "true" ] ; then - if [ -f "${file_path}" ] ; then + elif [[ "${AutoRepairBadPartials}" == "true" ]] && [[ "$containsHtml" == "true" ]] ; then + if [[ -f "${file_path}" ]] ; then if ((pd_presize > 0)); then echo -e "${YELLOW}Bad node / HTML found:${NC} reverting to previous file..." truncateDownload "$remote_url" "$filename" "$pd_presize" "$received_file_size" @@ -236,21 +236,21 @@ fh_GetFile() { fi if ((j >= $MaxDownloadRetries)) ; then rm -f "$flockDownload"; - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" fi return 1 else continue fi - elif (( downDelta > 0 && downDelta < 1024 )) || [ "$containsHtml" == "true" ] ; then - if [ -f "$file_path" ] ; then + elif (( downDelta > 0 && downDelta < 1024 )) || [[ "$containsHtml" == "true" ]] ; then + if [[ -f "$file_path" ]] ; then rm -rf "$file_path" fi echo -e "\n${YELLOW}Bad node / HTML found:${NC} tainted partial removed..." if ((j >= $MaxDownloadRetries)) ; then rm -f "$flockDownload"; - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" fi return 1 @@ -262,7 +262,7 @@ fh_GetFile() { echo -e "\n${RED}Download failed, file is incomplete.${NC}" if ((j >= $MaxDownloadRetries)) ; then rm -f "$flockDownload"; - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" fi return 1 diff --git a/hosts/firestorage.sh b/hosts/firestorage.sh index 374d572..c22a386 100644 --- a/hosts/firestorage.sh +++ b/hosts/firestorage.sh @@ -43,24 +43,24 @@ fs_DownloadFile() { tor_identity="${RANDOM}" finalAttempt="false" for ((z=0; z<=$MaxUrlRetries; z++)); do - if [ $z -eq $MaxUrlRetries ] ; then + if [[ $z -eq $MaxUrlRetries ]] ; then finalAttempt="true" fi CLEANSTRING=${remote_url//[^a-zA-Z0-9]/} trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; echo ""; tput cnorm; exit" 0 1 2 3 6 15 if fs_FetchFileInfo $finalAttempt && fs_GetFile "${filecnt}" $((z+1)) $finalAttempt ; then return 0 - elif [ $z -lt $MaxUrlRetries ]; then - if [ "${fileAlreadyDone}" == "true" ] ; then + elif [[ $z -lt $MaxUrlRetries ]]; then + if [[ "${fileAlreadyDone}" == "true" ]] ; then break fi if [[ "${warnAndRetryUnknownError}" == "true" ]] ; then - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${remote_url##*/}" "error" "Retry due to an unknown issue: attempt #$((z+1)) of ${MaxUrlRetries}" fi fi if [[ "${exitDownloadError}" == "true" || "${exitDownloadNotAvailable}" == "true" ]] ; then - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${remote_url##*/}" "error" "Exit due to unrecoverable issue" fi rm -f "${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}" @@ -89,22 +89,22 @@ fs_FetchFileInfo() { for ((j=1; j<=$maxfetchretries; j++)); do mkdir -p "${WorkDir}/.temp" printf " ." - if [ "$newIdent" == "true" ] ; then + if [[ "$newIdent" == "true" ]] ; then tor_identity="${RANDOM}" newIdent=false fi CLEANSTRING=${remote_url//[^a-zA-Z0-9]/} trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; echo ""; tput cnorm; exit" 0 1 2 3 6 15 response=$(tor_curl_request --insecure -L -s "${fixed_url}") - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${remote_url##*/}" "fs_${fetchnum}fetch_$j" "fixed_url: ${fixed_url}"$'\n'"${response}" fi if [[ -z $response ]] ; then - if [ $j == $maxfetchretries ] ; then + if [[ $j == $maxfetchretries ]] ; then printf "\\n" echo -e "${RED}| Failed to extract link.${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "" "" fi return 1 @@ -135,11 +135,11 @@ fs_FetchFileInfo() { j=$((j-1)) continue fi - if [ $j == $maxfetchretries ] ; then + if [[ $j == $maxfetchretries ]] ; then printf "\\n" echo -e "${RED}| Failed to extract link (unknown)${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "Failed to extract link (unknown)" "" fi return 1 @@ -159,11 +159,11 @@ fs_FetchFileInfo() { j=$((j-1)) continue fi - if [ $j == $maxfetchretries ] ; then + if [[ $j == $maxfetchretries ]] ; then printf "\\n" echo -e "${RED}| Failed to extract link (unknown)${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "Failed to extract link (unknown)" "" fi return 1 @@ -183,11 +183,11 @@ fs_FetchFileInfo() { j=$((j-1)) continue fi - if [ $j == $maxfetchretries ] ; then + if [[ $j == $maxfetchretries ]] ; then printf "\\n" echo -e "${RED}| Failed to extract link (unknown)${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "Failed to extract link (unknown)" "" fi return 1 @@ -206,11 +206,11 @@ fs_FetchFileInfo() { download_url=$(grep -oP -m 1 '(?<= 0 && downDelta < 1024 )) ; then - if [ -f "${file_path}" ] ; then + if [[ "${received_file_size}" -ne "${file_size_bytes}" ]] || [[ "$containsHtml" == "true" ]]; then + if [[ "${AutoRepairBadPartials}" == "true" ]] && (( downDelta > 0 && downDelta < 1024 )) ; then + if [[ -f "${file_path}" ]] ; then if ((pd_presize > 0)); then echo -e "${YELLOW}Bad node / HTML found:${NC} reverting to previous file..." truncateDownload "$remote_url" "$filename" "$pd_presize" "$received_file_size" @@ -360,15 +360,15 @@ fs_GetFile() { fi if ((j >= $MaxDownloadRetries)) ; then rm -f "$flockDownload"; - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" fi return 1 else continue fi - elif [ "${AutoRepairBadPartials}" == "true" ] && [ "$containsHtml" == "true" ] ; then - if [ -f "${file_path}" ] ; then + elif [[ "${AutoRepairBadPartials}" == "true" ]] && [[ "$containsHtml" == "true" ]] ; then + if [[ -f "${file_path}" ]] ; then if ((pd_presize > 0)); then echo -e "${YELLOW}Bad node / HTML found:${NC} reverting to previous file..." truncateDownload "$remote_url" "$filename" "$pd_presize" "$received_file_size" @@ -380,21 +380,21 @@ fs_GetFile() { fi if ((j >= $MaxDownloadRetries)) ; then rm -f "$flockDownload"; - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" fi return 1 else continue fi - elif (( downDelta > 0 && downDelta < 1024 )) || [ "$containsHtml" == "true" ] ; then - if [ -f "$file_path" ] ; then + elif (( downDelta > 0 && downDelta < 1024 )) || [[ "$containsHtml" == "true" ]] ; then + if [[ -f "$file_path" ]] ; then rm -rf "$file_path" fi echo -e "\n${YELLOW}Bad node / HTML found:${NC} tainted partial removed..." if ((j >= $MaxDownloadRetries)) ; then rm -f "$flockDownload"; - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" fi return 1 @@ -406,7 +406,7 @@ fs_GetFile() { echo -e "\n${RED}Download failed, file is incomplete.${NC}" if ((j >= $MaxDownloadRetries)) ; then rm -f "$flockDownload"; - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" fi return 1 diff --git a/hosts/gofile.sh b/hosts/gofile.sh old mode 100755 new mode 100644 index 0326ccd..41df85a --- a/hosts/gofile.sh +++ b/hosts/gofile.sh @@ -1,6 +1,6 @@ #! Name: gofile.sh #! Author: kittykat -#! Version: 2025.01.03 +#! Version: 2025.02.17 #! Desc: Add support for downloading and processing of urls for a new host #! Usage: Copy this file into the ./${ScriptDir}/hosts/ folder #! @@ -43,24 +43,24 @@ gofile_DownloadFile() { tor_identity="${RANDOM}" finalAttempt="false" for ((z=0; z<=$MaxUrlRetries; z++)); do - if [ $z -eq $MaxUrlRetries ] ; then + if [[ $z -eq $MaxUrlRetries ]] ; then finalAttempt="true" fi CLEANSTRING=${remote_url//[^a-zA-Z0-9]/} trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; echo ""; tput cnorm; exit" 0 1 2 3 6 15 if gofile_FetchFileInfo $finalAttempt && gofile_GetFile "${filecnt}" $((z+1)) $finalAttempt ; then return 0 - elif [ $z -lt $MaxUrlRetries ]; then - if [ "${fileAlreadyDone}" == "true" ] ; then + elif [[ $z -lt $MaxUrlRetries ]]; then + if [[ "${fileAlreadyDone}" == "true" ]] ; then break fi if [[ "${warnAndRetryUnknownError}" == "true" ]] ; then - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${remote_url##*/}" "error" "Retry due to an unknown issue: attempt #$((z+1)) of ${MaxUrlRetries}" fi fi if [[ "${exitDownloadError}" == "true" || "${exitDownloadNotAvailable}" == "true" ]] ; then - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${remote_url##*/}" "error" "Exit due to unrecoverable issue" fi rm -f "${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}" @@ -77,7 +77,7 @@ gofile_DownloadFile() { #! gofile_FetchFileInfo() { finalAttempt=$1 - maxfetchretries=5 + maxfetchretries=4 gofile_cookie_jar="" if ! grep -Eqi 'gofile.io/d/' <<< "$remote_url"; then echo -e "${RED}| Bad gofile url (format: gofile.io/d/xxxxx)${NC}" @@ -106,16 +106,16 @@ gofile_FetchFileInfo() { -H "Sec-Fetch-Site: none" \ -H "Sec-Fetch-User: ?1" \ "https://api.gofile.io/accounts") - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${remote_url##*/}" "gofile_token$i" "${response}" fi if [[ -z $response ]] ; then rm -f "${gofile_cookie_jar}"; - if [ $i == $maxfetchretries ] ; then + if [[ $i == $maxfetchretries ]] ; then printf "\\n" echo -e "${RED}| Failed to get token.${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "" "" fi return 1 @@ -125,18 +125,18 @@ gofile_FetchFileInfo() { fi if grep -Eqi '"token":"' <<< "$response"; then token=$(grep -oP '(?<="token":").*(?="})' <<< "$response") - if [ ! -z $token ]; then + if [[ ! -z "$token" ]]; then printf "\\n" echo -e "${GREEN}| Token acquired.${NC}" echo -e ".gofile.io\tTRUE\t/\tFALSE\t0\taccountToken\t$token" > ${gofile_cookie_jar} break else rm -f "${gofile_cookie_jar}"; - if [ $i == $maxfetchretries ] ; then + if [[ $i == $maxfetchretries ]] ; then printf "\\n" echo -e "${RED}| Failed to get token.${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "" "" fi return 1 @@ -146,11 +146,11 @@ gofile_FetchFileInfo() { fi else rm -f "${gofile_cookie_jar}"; - if [ $i == $maxfetchretries ] ; then + if [[ $i == $maxfetchretries ]] ; then printf "\\n" echo -e "${RED}| Failed to get token.${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "" "" fi return 1 @@ -181,16 +181,16 @@ gofile_FetchFileInfo() { -H "Sec-Fetch-User: ?1" \ --data "$form_data" \ "https://api.gofile.io/contents/$file_id") - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${remote_url##*/}" "gofile_contents$i" "url: https://api.gofile.io/contents/${file_id}?${form_data}"$'\n'"${response}" fi if [[ -z $response ]] ; then rm -f "${gofile_cookie_jar}"; - if [ $i == $maxfetchretries ] ; then + if [[ $i == $maxfetchretries ]] ; then printf "\\n" echo -e "${RED}| Failed to get download url.${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "" "" fi return 1 @@ -223,11 +223,11 @@ gofile_FetchFileInfo() { fi else rm -f "${gofile_cookie_jar}"; - if [ $i == $maxfetchretries ] ; then + if [[ $i == $maxfetchretries ]] ; then printf "\\n" echo -e "${RED}| Failed to get download url.${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "" "" fi return 1 @@ -248,7 +248,7 @@ gofile_FetchFileInfo() { filename="" file_size_bytes="" cdn_url="" - if [ ! "$filename_override" == "" ] ; then + if [[ ! "$filename_override" == "" ]] ; then filename="$filename_override" fi for ((j=1; j<=$maxfetchretries; j++)); do @@ -257,7 +257,6 @@ gofile_FetchFileInfo() { trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; rm -f ${gofile_cookie_jar}; echo ""; tput cnorm; exit" 0 1 2 3 6 15 file_header=$(tor_curl_request --insecure -L --head -s \ -b "${gofile_cookie_jar}" -c "${gofile_cookie_jar}" \ - -H "User-Agent: $RandomUA" \ -H "Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8" \ -H "Accept-Language: en-US,en;q=0.5" \ -H "Accept-Encoding: gzip, deflate, br" \ @@ -268,30 +267,47 @@ gofile_FetchFileInfo() { -H "Sec-Fetch-Site: none" \ -H "Sec-Fetch-User: ?1" \ "$download_url") - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${remote_url##*/}" "gofile_head$j" "FileInfoUrl: ${download_url}"$'\n'"${file_header}" fi if [[ -z $file_header ]] ; then - if [ $j == $maxfetchretries ] ; then + if [[ $j == $maxfetchretries ]] ; then rm -f "${gofile_cookie_jar}"; printf "\\n" echo -e "${RED}| Failed to extract file info${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then - failedRetryDownload "${remote_url}" "" "" + if [[ "${finalAttempt}" == "true" ]] ; then + failedRetryDownload "${remote_url}" "Failed to extract file info" "" fi return 1 else continue fi fi + if grep -Eqi 'HTTP/2 429|HTTP/1.1 429|HTTP.*429.*$' <<< $file_header ; then + if [[ $j == $maxfetchretries ]] ; then + rm -f "${gofile_cookie_jar}"; + printf "\\n" + echo -e "${RED}| Failed to extract file info [429]${NC}" + warnAndRetryUnknownError=true + if [[ "${finalAttempt}" == "true" ]] ; then + failedRetryDownload "${remote_url}" "Failed to extract file info" "" + fi + return 1 + else + printf " zZz" + sleepRandomSecs 1 5 + tor_identity="${RANDOM}" + continue + fi + fi if ! grep -Eqi 'HTTP/2 200|HTTP/1.1 200|200 OK' <<< $file_header ; then - if [ $j == $maxfetchretries ] ; then + if [[ $j == $maxfetchretries ]] ; then rm -f "${gofile_cookie_jar}"; printf "\\n" echo -e "${RED}| Failed to extract file info${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "" "" fi return 1 @@ -299,29 +315,29 @@ gofile_FetchFileInfo() { continue fi fi - if [ -z $filename ]; then + if [[ -z $filename ]]; then filename=$(grep -oPi '(?<=filename=").*(?=")' <<< "$file_header") - if [ -z $filename ]; then + if [[ -z $filename ]]; then filename=$(grep -oPi '(?<=filename[*]=).*' <<< "$file_header") filename=${filename//[$'\t\r\n']} fi filename=${filename//UTF-8\'\'/} fi - if [ -z $cdn_url ] ; then + if [[ -z "$cdn_url" ]] ; then cdn_url="https:"$(grep -oPi '(?<=location: ).*' <<< "$file_header") cdn_url=${cdn_url//[$'\t\r\n']} fi - if [ -z $file_size_bytes ] ; then + if [[ -z $file_size_bytes ]] ; then file_size_bytes=$(grep -oPi '(?<=content-length: ).*' <<< "$file_header") file_size_bytes=${file_size_bytes//[$'\t\r\n']} fi - if [ -z $filename ] || [ -z $file_size_bytes ] ; then - if [ $j == $maxfetchretries ] ; then + if [[ -z $filename ]] || [[ -z "$file_size_bytes" ]] ; then + if [[ $j == $maxfetchretries ]] ; then rm -f "${gofile_cookie_jar}"; printf "\\n" echo -e "${RED}| Failed to extract file info.${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "" "" fi return 1 @@ -331,12 +347,12 @@ gofile_FetchFileInfo() { fi break #Good to go here done - touch ${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/} + touch "${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}" filename=$(sanitize_file_or_folder_name "${filename}") printf "\\n" echo -e "${YELLOW}| File name:${NC}\t\"${filename}\"" - if [ -z $file_size_bytes ] ; then - if [ "${finalAttempt}" == "true" ] ; then + if [[ -z $file_size_bytes ]] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "Filesize not found!" "" fi echo -e "${YELLOW}| Filesize not found… retry${NC}" @@ -367,13 +383,13 @@ gofile_GetFile() { flockDownload="${WorkDir}/.flocks/${filename//[^a-zA-Z0-9\.\_\-]/}.flock" for ((j=1; j<=$MaxDownloadRetries; j++)); do pd_presize=0 - if [ -f "$file_path" ] ; then + if [[ -f "$file_path" ]] ; then pd_presize=$(stat --format="%s" "$file_path" | tr -d '[:space:]') fi GetRandomUA CLEANSTRING=${remote_url//[^a-zA-Z0-9]/} trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; rm -f ${gofile_cookie_jar}; rm -f $flockDownload; echo ""; tput cnorm; exit" 0 1 2 3 6 15 - if [ "${RateMonitorEnabled}" == "true" ]; then + if [[ "${RateMonitorEnabled}" == "true" ]]; then tor_curl_request --insecure -G --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval \ -b "${gofile_cookie_jar}" -c "${gofile_cookie_jar}" \ -H "User-Agent: $RandomUA" \ @@ -403,7 +419,7 @@ gofile_GetFile() { "$download_url" --continue-at - --output "$file_path" fi received_file_size=0 - if [ -f "$file_path" ] ; then + if [[ -f "$file_path" ]] ; then received_file_size=$(stat --format="%s" "$file_path" | tr -d '[:space:]') fi if CheckNoHtml "$remote_url" "$filename" "$file_path" ; then @@ -412,9 +428,9 @@ gofile_GetFile() { containsHtml=true fi downDelta=$(( received_file_size - pd_presize )) - if [[ "${received_file_size}" -ne "${file_size_bytes}" ]] || [ "$containsHtml" == "true" ]; then - if [ "${AutoRepairBadPartials}" == "true" ] && (( downDelta > 0 && downDelta < 1024 )) ; then - if [ -f "${file_path}" ] ; then + if [[ "${received_file_size}" -ne "${file_size_bytes}" ]] || [[ "$containsHtml" == "true" ]]; then + if [[ "${AutoRepairBadPartials}" == "true" ]] && (( downDelta > 0 && downDelta < 1024 )) ; then + if [[ -f "${file_path}" ]] ; then if ((pd_presize > 0)); then echo -e "${YELLOW}Bad node / HTML found:${NC} reverting to previous file..." truncateDownload "$remote_url" "$filename" "$pd_presize" "$received_file_size" @@ -426,15 +442,15 @@ gofile_GetFile() { fi if ((j >= $MaxDownloadRetries)) ; then rm -f "$flockDownload"; - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" fi return 1 else continue fi - elif [ "${AutoRepairBadPartials}" == "true" ] && [ "$containsHtml" == "true" ] ; then - if [ -f "${file_path}" ] ; then + elif [[ "${AutoRepairBadPartials}" == "true" ]] && [[ "$containsHtml" == "true" ]] ; then + if [[ -f "${file_path}" ]] ; then if ((pd_presize > 0)); then echo -e "${YELLOW}Bad node / HTML found:${NC} reverting to previous file..." truncateDownload "$remote_url" "$filename" "$pd_presize" "$received_file_size" @@ -446,21 +462,21 @@ gofile_GetFile() { fi if ((j >= $MaxDownloadRetries)) ; then rm -f "$flockDownload"; - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" fi return 1 else continue fi - elif (( downDelta > 0 && downDelta < 1024 )) || [ "$containsHtml" == "true" ] ; then - if [ -f "$file_path" ] ; then + elif (( downDelta > 0 && downDelta < 1024 )) || [[ "$containsHtml" == "true" ]] ; then + if [[ -f "$file_path" ]] ; then rm -rf "$file_path" fi echo -e "\n${YELLOW}Bad node / HTML found:${NC} tainted partial removed..." if ((j >= $MaxDownloadRetries)) ; then rm -f "$flockDownload"; - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" fi return 1 @@ -472,7 +488,7 @@ gofile_GetFile() { echo -e "\n${RED}Download failed, file is incomplete.${NC}" if ((j >= $MaxDownloadRetries)) ; then rm -f "$flockDownload"; - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" fi return 1 diff --git a/hosts/hexload.sh b/hosts/hexload.sh index c9f043d..c32e8fa 100644 --- a/hosts/hexload.sh +++ b/hosts/hexload.sh @@ -46,24 +46,24 @@ hex_DownloadFile() { tor_identity="${RANDOM}" finalAttempt="false" for ((z=0; z<=$MaxUrlRetries; z++)); do - if [ $z -eq $MaxUrlRetries ] ; then + if [[ $z -eq $MaxUrlRetries ]] ; then finalAttempt="true" fi CLEANSTRING=${remote_url//[^a-zA-Z0-9]/} trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; echo ""; tput cnorm; exit" 0 1 2 3 6 15 if hex_FetchFileInfo $finalAttempt && check_file_extension && hex_GetFile "${filecnt}" $((z+1)) $finalAttempt ; then return 0 - elif [ $z -lt $MaxUrlRetries ]; then - if [ "${fileAlreadyDone}" == "true" ] ; then + elif [[ $z -lt $MaxUrlRetries ]]; then + if [[ "${fileAlreadyDone}" == "true" ]] ; then break fi if [[ "${warnAndRetryUnknownError}" == "true" ]] ; then - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${remote_url##*/}" "error" "Retry due to an unknown issue: attempt #$((z+1)) of ${MaxUrlRetries}" fi fi if [[ "${exitDownloadError}" == "true" || "${exitDownloadNotAvailable}" == "true" ]] ; then - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${remote_url##*/}" "error" "Exit due to unrecoverable issue" fi rm -f "${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}" @@ -85,42 +85,42 @@ hex_FetchFileInfo() { echo -e "${GREEN}# Fetching download url… ${NC}[.] timeout, [-] blocked ip, [*] wait time${NC}" local bDonePrint=true for ((j=1; j<=$hexmaxfetchfileretries; j++)); do - if [ "${bDonePrint}" == "false" ]; then + if [[ "${bDonePrint}" == "false" ]]; then printf " ." fi bDonePrint=false CLEANSTRING=${remote_url//[^a-zA-Z0-9]/} trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; echo ""; tput cnorm; exit" 0 1 2 3 6 15 - if [ "$bNoNewTorIdent" == "true" ]; then + if [[ "$bNoNewTorIdent" == "true" ]]; then bNoNewTorIdent=false else tor_identity="${RANDOM}" fi - if grep -Eq "pjscloud.sh" <<< "$LoadPlugins" && [ "$PJSCloud_hexload" == "true" ]; then - if [ "$hexUseDownload" == "download2" ]; then + if grep -Eq "pjscloud.sh" <<< "$LoadPlugins" && [[ "$PJSCloud_hexload" == "true" ]]; then + if [[ "$hexUseDownload" == "download2" ]]; then form_data="op=download1&id=${file_id}&rand=&usr_login=&fname=&ajax=1&method_free=1&dataType=json" response=$(pjscloud_tor_request "https://hexload.com/download" "$form_data") - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${remote_url##*/}" "hex_dwnpage$j" "form_data: ${form_data}"$'\n'"${response}" fi else form_data="op=download1&id=${file_id}&rand=&usr_login=&fname=&ajax=1&method_free=1&dataType=json" response=$(tor_curl_request --insecure -s --data "$form_data" "https://hexload.com/download") - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${remote_url##*/}" "hex_dwnpage$j" "form_data: ${form_data}"$'\n'"${response}" fi fi else - if [ "$hexUseDownload" == "download2" ]; then + if [[ "$hexUseDownload" == "download2" ]]; then form_data="op=download1&id=${file_id}&rand=&usr_login=&fname=&ajax=1&method_free=1&dataType=json" response=$(tor_curl_request --insecure -s --data "$form_data" "https://hexload.com/download") - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${remote_url##*/}" "hex_dwnpage$j" "form_data: ${form_data}"$'\n'"${response}" fi else form_data="op=download2&id=${file_id}&rand=&usr_login=&fname=&ajax=1&method_free=1&dataType=json" response=$(tor_curl_request --insecure -s --data "$form_data" "https://hexload.com/download") - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${remote_url##*/}" "hex_dwnpage$j" "form_data: ${form_data}"$'\n'"${response}" fi fi @@ -129,11 +129,11 @@ hex_FetchFileInfo() { download_url=$(echo "$response" | grep -oPi '(?<="link":")[^"]+(?=")' | base64 --decode) download_url=$(urlencode_spaces "$download_url") if grep -Eqi "Sorry, you have been blocked" <<< "$response"; then - if [ $j == $hexmaxfetchfileretries ] ; then + if ((j == hexmaxfetchfileretries)) ; then printf "\\n" echo -e "${YELLOW}| Too many failed attempts${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "" "" fi return 1 @@ -153,7 +153,7 @@ hex_FetchFileInfo() { return 1 fi wait_time=$(grep -oP '(?<=Wait ).*?(?= seconds.*$)' <<< "$response") - if [ "$hexUseDownload" == "download1" ] && grep -Eqi 'name="op" value="download2">' <<< "$response"; then + if [[ "$hexUseDownload" == "download1" ]] && grep -Eqi 'name="op" value="download2">' <<< "$response"; then printf "\\n" echo -e "${YELLOW}| File uses download2.. switching.${NC}" hexUseDownload="download2" @@ -163,17 +163,17 @@ hex_FetchFileInfo() { continue fi if grep -Eqi '>You have to wait' <<< "$response"; then - if [ $j == $hexmaxfetchfileretries ] ; then + if ((j == hexmaxfetchfileretries)) ; then req_msg=$(grep -oP '(?<=>You have to wait ).*?(?= till next download.*$)' <<< "$response") req_msg=$(sanitize_html_tags "${req_msg}") printf "\\n" echo -e "${YELLOW}| Failed: ${req_msg}.${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "${req_msg}" "" fi return 1 - elif [ "$hexUseDownload" == "download1" ] && grep -Eqi 'name="op" value="download2">' <<< "$response"; then + elif [[ "$hexUseDownload" == "download1" ]] && grep -Eqi 'name="op" value="download2">' <<< "$response"; then printf "\\n" echo -e "${YELLOW}| File uses download2.. switching.${NC}" hexUseDownload="download2" @@ -202,7 +202,7 @@ hex_FetchFileInfo() { failedRetryDownload "${remote_url}" "$req_msg" "" return 1 fi - if [ "$hexUseDownload" == "download1" ] && grep -Eqi 'name="op" value="download2">' <<< "$response"; then + if [[ "$hexUseDownload" == "download1" ]] && grep -Eqi 'name="op" value="download2">' <<< "$response"; then printf "\\n" echo -e "${YELLOW}| File uses download2.. switching.${NC}" hexUseDownload="download2" @@ -211,11 +211,11 @@ hex_FetchFileInfo() { continue fi if [[ -z "$download_url" ]]; then - if [ $j == $hexmaxfetchfileretries ] ; then + if ((j == hexmaxfetchfileretries)) ; then printf "\\n" echo -e "${RED}| Failed to extract CDN URL${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "Failed to extract CDN URL" "" fi return 1 @@ -223,12 +223,12 @@ hex_FetchFileInfo() { continue fi fi - if [ "$filename_override" == "" ] && [ -z "$filename" ] ; then - if [ $j == $hexmaxfetchfileretries ] ; then + if [[ "$filename_override" == "" ]] && [[ -z "$filename" ]] ; then + if ((j == hexmaxfetchfileretries)) ; then printf "\\n" echo -e "${RED}| Failed to extract file name${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "ailed to extract file name" "" fi return 1 @@ -238,8 +238,8 @@ hex_FetchFileInfo() { fi break #Good to go here done - touch ${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/} - if [ ! "$filename_override" == "" ] ; then + touch "${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}" + if [[ ! "$filename_override" == "" ]] ; then filename="$filename_override" fi filename=$(sanitize_file_or_folder_name "${filename}") @@ -252,7 +252,7 @@ hex_FetchFileInfo() { trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; echo ""; tput cnorm; exit" 0 1 2 3 6 15 tor_identity="${RANDOM}" file_header=$(tor_curl_request --insecure --head -L -s --referer "$file_url" "$download_url") - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${remote_url##*/}" "hex_head$j" "download_url: ${download_url}"$'\n'"${file_header}" fi if ! grep -Eqi 'HTTP/2 200|HTTP/1.1 200|200 OK' <<< $file_header ; then @@ -268,12 +268,12 @@ hex_FetchFileInfo() { fi file_size_bytes=$(grep -oPi '(?<=content-length: ).*' <<< "$file_header") file_size_bytes=${file_size_bytes//[$'\t\r\n']} - if [ -z $file_size_bytes ] ; then + if [[ -z $file_size_bytes ]] ; then if ((j < hexmaxfetchfileretries)); then printf "\\n" echo -e "${YELLOW}| Filesize not found… retry${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "Filesize not found" "" fi return 1 @@ -310,20 +310,20 @@ hex_GetFile() { flockDownload="${WorkDir}/.flocks/${filename//[^a-zA-Z0-9\.\_\-]/}.flock" for ((j=1; j<=$MaxDownloadRetries; j++)); do pd_presize=0 - if [ -f "$file_path" ] ; then + if [[ -f "$file_path" ]] ; then pd_presize=$(stat --format="%s" "$file_path" | tr -d '[:space:]') fi GetRandomUA tor_identity="${RANDOM}" CLEANSTRING=${remote_url//[^a-zA-Z0-9]/} trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; rm -f $flockDownload; echo ""; tput cnorm; exit" 0 1 2 3 6 15 - if [ "${RateMonitorEnabled}" == "true" ]; then + if [[ "${RateMonitorEnabled}" == "true" ]]; then tor_curl_request --insecure --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval --referer "$file_url" "$download_url" --continue-at - --output "$file_path" else tor_curl_request --insecure --referer "$file_url" "$download_url" --continue-at - --output "$file_path" fi received_file_size=0 - if [ -f "$file_path" ] ; then + if [[ -f "$file_path" ]] ; then received_file_size=$(stat --format="%s" "$file_path" | tr -d '[:space:]') fi if CheckNoHtml "$remote_url" "$filename" "$file_path" ; then @@ -332,9 +332,9 @@ hex_GetFile() { containsHtml=true fi downDelta=$(( received_file_size - pd_presize )) - if [[ "${received_file_size}" -ne "${file_size_bytes}" ]] || [ "$containsHtml" == "true" ]; then - if [ "${AutoRepairBadPartials}" == "true" ] && (( downDelta > 0 && downDelta < 1024 )) ; then - if [ -f "${file_path}" ] ; then + if [[ "${received_file_size}" -ne "${file_size_bytes}" ]] || [[ "$containsHtml" == "true" ]]; then + if [[ "${AutoRepairBadPartials}" == "true" ]] && (( downDelta > 0 && downDelta < 1024 )) ; then + if [[ -f "${file_path}" ]] ; then if ((pd_presize > 0)); then echo -e "${YELLOW}Bad node / HTML found:${NC} reverting to previous file..." truncateDownload "$remote_url" "$filename" "$pd_presize" "$received_file_size" @@ -346,15 +346,15 @@ hex_GetFile() { fi if ((j >= $MaxDownloadRetries)) ; then rm -f "$flockDownload"; - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" fi return 1 else continue fi - elif [ "${AutoRepairBadPartials}" == "true" ] && [ "$containsHtml" == "true" ] ; then - if [ -f "${file_path}" ] ; then + elif [[ "${AutoRepairBadPartials}" == "true" ]] && [[ "$containsHtml" == "true" ]] ; then + if [[ -f "${file_path}" ]] ; then if ((pd_presize > 0)); then echo -e "${YELLOW}Bad node / HTML found:${NC} reverting to previous file..." truncateDownload "$remote_url" "$filename" "$pd_presize" "$received_file_size" @@ -366,21 +366,21 @@ hex_GetFile() { fi if ((j >= $MaxDownloadRetries)) ; then rm -f "$flockDownload"; - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" fi return 1 else continue fi - elif (( downDelta > 0 && downDelta < 1024 )) || [ "$containsHtml" == "true" ] ; then - if [ -f "$file_path" ] ; then + elif (( downDelta > 0 && downDelta < 1024 )) || [[ "$containsHtml" == "true" ]] ; then + if [[ -f "$file_path" ]] ; then rm -rf "$file_path" fi echo -e "\n${YELLOW}Bad node / HTML found:${NC} tainted partial removed..." if ((j >= $MaxDownloadRetries)) ; then rm -f "$flockDownload"; - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" fi return 1 @@ -392,7 +392,7 @@ hex_GetFile() { echo -e "\n${RED}Download failed, file is incomplete.${NC}" if ((j >= $MaxDownloadRetries)) ; then rm -f "$flockDownload"; - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" fi return 1 diff --git a/hosts/innocent.sh b/hosts/innocent.sh index 523bc34..6156065 100644 --- a/hosts/innocent.sh +++ b/hosts/innocent.sh @@ -49,24 +49,24 @@ inno_DownloadFile() { tor_identity="${RANDOM}" finalAttempt="false" for ((z=0; z<=$MaxUrlRetries; z++)); do - if [ $z -eq $MaxUrlRetries ] ; then + if [[ $z -eq $MaxUrlRetries ]] ; then finalAttempt="true" fi CLEANSTRING=${remote_url//[^a-zA-Z0-9]/} trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; echo ""; tput cnorm; exit" 0 1 2 3 6 15 if inno_FetchFileInfo $finalAttempt && inno_GetFile "${filecnt}" $((z+1)) $finalAttempt ; then return 0 - elif [ $z -lt $MaxUrlRetries ]; then - if [ "${fileAlreadyDone}" == "true" ] ; then + elif [[ $z -lt $MaxUrlRetries ]]; then + if [[ "${fileAlreadyDone}" == "true" ]] ; then break fi if [[ "${warnAndRetryUnknownError}" == "true" ]] ; then - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${remote_url##*/}" "error" "Retry due to an unknown issue: attempt #$((z+1)) of ${MaxUrlRetries}" fi fi if [[ "${exitDownloadError}" == "true" || "${exitDownloadNotAvailable}" == "true" ]] ; then - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${remote_url##*/}" "error" "Exit due to unrecoverable issue" fi rm -f "${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}" @@ -110,7 +110,7 @@ inno_FetchFileInfo() { [ -s "${WorkDir}/.temp/directhead" ] kill $! 2>/dev/null ) - if [ ! -f "${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}" ]; then + if [[ ! -f "${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}" ]]; then touch "${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}" fi rm -f "${WorkDir}/.temp/directhead" @@ -118,10 +118,10 @@ inno_FetchFileInfo() { printf "| Retrieving Head: attempt #$j" file_header=$(tor_curl_request --insecure --head -L -s "$download_url") fi - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${remote_url##*/}" "inno_head$j" "download_url: ${download_url}"$'\n'"${file_header}" fi - if [ ! -z "$file_header" ] ; then + if [[ ! -z "$file_header" ]] ; then if grep -Eqi '404 Not Found' <<< "${file_header}" ; then printf "\\n" echo -e "${RED}| Not Found (404). The file has been removed.${NC}" @@ -141,7 +141,7 @@ inno_FetchFileInfo() { continue fi fi - if [ "$filename_override" == "" ] ; then + if [[ "$filename_override" == "" ]] ; then filename=${download_url##*/} fi if grep -Eqi 'Content-Length:' <<< "${file_header}" ; then @@ -162,18 +162,18 @@ inno_FetchFileInfo() { fi done printf "\\n" - touch ${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/} - if [ ! "$filename_override" == "" ] ; then + touch "${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}" + if [[ ! "$filename_override" == "" ]] ; then filename="$filename_override" - elif [ -z $filename ] ; then + elif [[ -z $filename ]] ; then filename=${download_url##*/} fi filename=$(sanitize_file_or_folder_name "${filename}") - if [ -z "$filename" ]; then + if [[ -z "$filename" ]]; then echo -e "${RED}| Unexpected or no header response.${NC}" return 1 fi - if [ -z $file_size_bytes ] ; then + if [[ -z $file_size_bytes ]] ; then file_size_readable="${RED}Unknown filesize…${NC}" else file_size_readable="$(numfmt --to=iec --from=auto --format "%.2f" <<< "$file_size_bytes")" @@ -202,19 +202,19 @@ inno_GetFile() { flockDownload="${WorkDir}/.flocks/${filename//[^a-zA-Z0-9\.\_\-]/}.flock" for ((j=1; j<=$MaxDownloadRetries; j++)); do pd_presize=0 - if [ -f "$file_path" ] ; then + if [[ -f "$file_path" ]] ; then pd_presize=$(stat --format="%s" "$file_path" | tr -d '[:space:]') fi CLEANSTRING=${remote_url//[^a-zA-Z0-9]/} trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; rm -f $flockDownload; echo ""; tput cnorm; exit" 0 1 2 3 6 15 - if [ ! -z $file_size_bytes ] ; then + if [[ ! -z "$file_size_bytes" ]] ; then tor_curl_request_extended --insecure "$download_url" --continue-at - --output "$file_path" else echo -e "${BLUE}| No Resume Fetch${NC}" tor_curl_request_extended --insecure "$download_url" --output "$file_path" fi received_file_size=0 - if [ -f "$file_path" ] ; then + if [[ -f "$file_path" ]] ; then received_file_size=$(stat --format="%s" "$file_path" | tr -d '[:space:]') fi if CheckNoHtml "$remote_url" "$filename" "$file_path" ; then @@ -222,8 +222,8 @@ inno_GetFile() { else containsHtml=true fi - if [ "${AutoRepairBadPartials}" == "true" ] && [ "$containsHtml" == "true" ] ; then - if [ -f "${file_path}" ] ; then + if [[ "${AutoRepairBadPartials}" == "true" ]] && [[ "$containsHtml" == "true" ]] ; then + if [[ -f "${file_path}" ]] ; then if ((pd_presize > 0)); then echo -e "${YELLOW}Bad node / HTML found:${NC} reverting to previous file..." truncateDownload "$remote_url" "$filename" "$pd_presize" "$received_file_size" @@ -235,7 +235,7 @@ inno_GetFile() { fi if ((j >= $MaxDownloadRetries)) ; then rm -f "$flockDownload"; - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" fi return 1 diff --git a/hosts/isupload.sh b/hosts/isupload.sh index 6aa9011..146f4e7 100644 --- a/hosts/isupload.sh +++ b/hosts/isupload.sh @@ -43,24 +43,24 @@ isup_DownloadFile() { tor_identity="${RANDOM}" finalAttempt="false" for ((z=0; z<=$MaxUrlRetries; z++)); do - if [ $z -eq $MaxUrlRetries ] ; then + if [[ $z -eq $MaxUrlRetries ]] ; then finalAttempt="true" fi CLEANSTRING=${remote_url//[^a-zA-Z0-9]/} trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; echo ""; tput cnorm; exit" 0 1 2 3 6 15 if isup_FetchFileInfo $finalAttempt && isup_GetFile "${filecnt}" $((z+1)) $finalAttempt ; then return 0 - elif [ $z -lt $MaxUrlRetries ]; then - if [ "${fileAlreadyDone}" == "true" ] ; then + elif [[ $z -lt $MaxUrlRetries ]]; then + if [[ "${fileAlreadyDone}" == "true" ]] ; then break fi if [[ "${warnAndRetryUnknownError}" == "true" ]] ; then - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${remote_url##*/}" "error" "Retry due to an unknown issue: attempt #$((z+1)) of ${MaxUrlRetries}" fi fi if [[ "${exitDownloadError}" == "true" || "${exitDownloadNotAvailable}" == "true" ]] ; then - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${remote_url##*/}" "error" "Exit due to unrecoverable issue" fi rm -f "${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}" @@ -88,16 +88,16 @@ isup_FetchFileInfo() { CLEANSTRING=${remote_url//[^a-zA-Z0-9]/} trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; rm -f "${isup_cookie_jar}"; echo ""; tput cnorm; exit" 0 1 2 3 6 15 response=$(tor_curl_request_extended --insecure -L -s -b "${isup_cookie_jar}" -c "${isup_cookie_jar}" "$remote_url") - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${remote_url##*/}" "isup_dwnpage$i" "${response}" fi if [[ -z $response ]] ; then rm -f "${isup_cookie_jar}"; - if [ $i == $maxfetchretries ] ; then + if [[ $i == $maxfetchretries ]] ; then printf "\\n" echo -e "${RED}| Failed to extract download link.${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "" "" fi return 1 @@ -107,11 +107,11 @@ isup_FetchFileInfo() { fi if grep -Eqi "Sorry, you are banned" <<< "$response"; then rm -f "${isup_cookie_jar}"; - if [ $i == $maxfetchretries ] ; then + if [[ $i == $maxfetchretries ]] ; then printf "\\n" echo -e "${RED}| Failed to extract download link.${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "" "" fi return 1 @@ -139,11 +139,11 @@ isup_FetchFileInfo() { fi if [[ -z "$post_action" ]] || [[ -z "$post_op" ]] || [[ -z "$post_id" ]] ; then rm -f "${isup_cookie_jar}"; - if [ $i == $maxfetchretries ] ; then + if [[ $i == $maxfetchretries ]] ; then printf "\\n" echo -e "${RED}| Failed to extract download link [2].${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "Failed to extract download link [2]" "" fi return 1 @@ -164,16 +164,16 @@ isup_FetchFileInfo() { response=$(tor_curl_request_extended --insecure -L -s -X POST \ -b "${isup_cookie_jar}" -c "${isup_cookie_jar}" \ --data "$form_data" "$post_action") - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${remote_url##*/}" "isup_post" "post_action: ${post_action}"$'\n'"form_data: ${form_data}"$'\n'"${response}" fi if [[ -z $response ]] ; then - if [ $i == $maxfetchretries ] ; then + if [[ $i == $maxfetchretries ]] ; then rm -f "${isup_cookie_jar}"; printf "\\n" echo -e "${RED}| Failed to extract download link [3].${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "" "" fi return 1 @@ -191,12 +191,12 @@ isup_FetchFileInfo() { return 1 fi if grep -Eqi 'Just a moment...' <<< "$response"; then - if [ $i == $maxfetchretries ] ; then + if [[ $i == $maxfetchretries ]] ; then rm -f "${isup_cookie_jar}"; printf "\\n" echo -e "${RED}| Failed to extract download link [3].${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "" "" fi return 1 @@ -212,7 +212,7 @@ isup_FetchFileInfo() { printf "\\n" echo -e "${RED}| Failed to extract download link [4]${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then rm -f "${isup_cookie_jar}"; failedRetryDownload "${remote_url}" "Failed to extract download link [4]" "" fi @@ -254,7 +254,7 @@ isup_FetchFileInfo() { [ -s "${WorkDir}/.temp/directhead" ] kill $! 2>/dev/null ) - if [ ! -f "${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}" ]; then + if [[ ! -f "${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}" ]]; then touch "${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}" fi rm -f "${WorkDir}/.temp/directhead" @@ -262,10 +262,10 @@ isup_FetchFileInfo() { printf "| Retrieving Head: attempt #$j" file_header=$(tor_curl_request_extended --insecure --head -L -s "$download_url") fi - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${remote_url##*/}" "isup_head$j" "download_url: ${download_url}"$'\n'"${file_header}" fi - if [ ! -z "$file_header" ] ; then + if [[ ! -z "$file_header" ]] ; then if grep -Eqi '404 Not Found|' <<< "${file_header}" ; then printf "\\n" echo -e "${RED}| Not Found (404). The file has been removed.${NC}" @@ -287,7 +287,7 @@ isup_FetchFileInfo() { else break fi - if [ "$filename_override" == "" ] ; then + if [[ "$filename_override" == "" ]] ; then filename=${download_url##*/} fi if grep -Eqi 'Content-Length:' <<< "${file_header}" ; then @@ -308,17 +308,17 @@ isup_FetchFileInfo() { fi done printf "\\n" - touch ${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/} - if [ ! "$filename_override" == "" ] ; then + touch "${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}" + if [[ ! "$filename_override" == "" ]] ; then filename="$filename_override" fi filename=$(sanitize_file_or_folder_name "${filename}") - if [ -z "$filename" ]; then + if [[ -z "$filename" ]]; then printf "\\n" echo -e "${RED}| Unexpected or no header response.${NC}" return 1 fi - if [ -z $file_size_bytes ] ; then + if [[ -z $file_size_bytes ]] ; then file_size_readable="${RED}Unknown filesize…${NC}" else file_size_readable="$(numfmt --to=iec --from=auto --format "%.2f" <<< "$file_size_bytes")" @@ -344,19 +344,19 @@ isup_GetFile() { flockDownload="${WorkDir}/.flocks/${filename//[^a-zA-Z0-9\.\_\-]/}.flock" for ((j=1; j<=$MaxDownloadRetries; j++)); do pd_presize=0 - if [ -f "$file_path" ] ; then + if [[ -f "$file_path" ]] ; then pd_presize=$(stat --format="%s" "$file_path" | tr -d '[:space:]') fi GetRandomUA - if [ -z $file_size_bytes ] ; then + if [[ -z $file_size_bytes ]] ; then echo -e "${BLUE}| No Resume Fetch${NC} (unknown filesize)" CLEANSTRING=${remote_url//[^a-zA-Z0-9]/} trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; rm -f $flockDownload; echo ""; tput cnorm; exit" 0 1 2 3 6 15 tor_curl_request_extended --insecure -L "$download_url" --output "$file_path" rc=$? - if [ $rc -ne 0 ] ; then + if ((rc != 0 )) ; then printf "${RED}Download Failed (bad exit status).${NC}" - if [ -f ${file_path} ]; then + if [[ -f ${file_path} ]]; then printf "${YELLOW} Partial removed...${NC}" printf "\n\n" rm -f "${file_path}" @@ -365,7 +365,7 @@ isup_GetFile() { fi if ((j >= $MaxDownloadRetries)) ; then rm -f "$flockDownload"; - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" fi return 1 @@ -378,12 +378,12 @@ isup_GetFile() { else containsHtml=true fi - if [ "$containsHtml" == "true" ]; then + if [[ "$containsHtml" == "true" ]]; then echo -e "${YELLOW}Download Failed (contains html)${NC} partial removed..." rm -f "${file_path}" if ((j >= $MaxDownloadRetries)) ; then rm -f "$flockDownload"; - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" fi return 1 @@ -395,13 +395,13 @@ isup_GetFile() { else CLEANSTRING=${remote_url//[^a-zA-Z0-9]/} trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; rm -f $flockDownload; echo ""; tput cnorm; exit" 0 1 2 3 6 15 - if [ "${RateMonitorEnabled}" == "true" ]; then + if [[ "${RateMonitorEnabled}" == "true" ]]; then tor_curl_request_extended --insecure -L --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval "$download_url" --continue-at - --output "$file_path" else tor_curl_request_extended --insecure -L "$download_url" --continue-at - --output "$file_path" fi received_file_size=0 - if [ -f "$file_path" ] ; then + if [[ -f "$file_path" ]] ; then received_file_size=$(stat --format="%s" "$file_path" | tr -d '[:space:]') fi if CheckNoHtml "$remote_url" "$filename" "$file_path" ; then @@ -410,9 +410,9 @@ isup_GetFile() { containsHtml=true fi downDelta=$(( received_file_size - pd_presize )) - if [[ "${received_file_size}" -ne "${file_size_bytes}" ]] || [ "$containsHtml" == "true" ]; then - if [ "${AutoRepairBadPartials}" == "true" ] && (( downDelta > 0 && downDelta < 1024 )) ; then - if [ -f "${file_path}" ] ; then + if [[ "${received_file_size}" -ne "${file_size_bytes}" ]] || [[ "$containsHtml" == "true" ]]; then + if [[ "${AutoRepairBadPartials}" == "true" ]] && (( downDelta > 0 && downDelta < 1024 )) ; then + if [[ -f "${file_path}" ]] ; then if ((pd_presize > 0)); then echo -e "${YELLOW}Bad node / HTML found:${NC} reverting to previous file..." truncateDownload "$remote_url" "$filename" "$pd_presize" "$received_file_size" @@ -424,15 +424,15 @@ isup_GetFile() { fi if ((j >= $MaxDownloadRetries)) ; then rm -f "$flockDownload"; - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" fi return 1 else continue fi - elif [ "${AutoRepairBadPartials}" == "true" ] && [ "$containsHtml" == "true" ] ; then - if [ -f "${file_path}" ] ; then + elif [[ "${AutoRepairBadPartials}" == "true" ]] && [[ "$containsHtml" == "true" ]] ; then + if [[ -f "${file_path}" ]] ; then if ((pd_presize > 0)); then echo -e "${YELLOW}Bad node / HTML found:${NC} reverting to previous file..." truncateDownload "$remote_url" "$filename" "$pd_presize" "$received_file_size" @@ -444,21 +444,21 @@ isup_GetFile() { fi if ((j >= $MaxDownloadRetries)) ; then rm -f "$flockDownload"; - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" fi return 1 else continue fi - elif (( downDelta > 0 && downDelta < 1024 )) || [ "$containsHtml" == "true" ] ; then - if [ -f "$file_path" ] ; then + elif (( downDelta > 0 && downDelta < 1024 )) || [[ "$containsHtml" == "true" ]] ; then + if [[ -f "$file_path" ]] ; then rm -rf "$file_path" fi echo -e "\n${YELLOW}Bad node / HTML found:${NC} tainted partial removed..." if ((j >= $MaxDownloadRetries)) ; then rm -f "$flockDownload"; - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" fi return 1 @@ -470,7 +470,7 @@ isup_GetFile() { echo -e "\n${RED}Download failed, file is incomplete.${NC}" if ((j >= $MaxDownloadRetries)) ; then rm -f "$flockDownload"; - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" fi return 1 diff --git a/hosts/kraken.sh b/hosts/kraken.sh index 12ea1c3..4e56cf9 100644 --- a/hosts/kraken.sh +++ b/hosts/kraken.sh @@ -43,24 +43,24 @@ kraken_DownloadFile() { tor_identity="${RANDOM}" finalAttempt="false" for ((z=0; z<=$MaxUrlRetries; z++)); do - if [ $z -eq $MaxUrlRetries ] ; then + if [[ $z -eq $MaxUrlRetries ]] ; then finalAttempt="true" fi CLEANSTRING=${remote_url//[^a-zA-Z0-9]/} trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; echo ""; tput cnorm; exit" 0 1 2 3 6 15 if kraken_FetchFileInfo "" $((i+1)) $finalAttempt && kraken_GetFile "${filecnt}" $((z+1)) $finalAttempt ; then return 0 - elif [ $z -lt $MaxUrlRetries ]; then - if [ "${fileAlreadyDone}" == "true" ] ; then + elif [[ $z -lt $MaxUrlRetries ]]; then + if [[ "${fileAlreadyDone}" == "true" ]] ; then break fi if [[ "${warnAndRetryUnknownError}" == "true" ]] ; then - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${remote_url##*/}" "error" "Retry due to an unknown issue: attempt #$((z+1)) of ${MaxUrlRetries}" fi fi if [[ "${exitDownloadError}" == "true" || "${exitDownloadNotAvailable}" == "true" ]] ; then - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${remote_url##*/}" "error" "Exit due to unrecoverable issue" fi rm -f "${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}" @@ -102,7 +102,7 @@ kraken_FetchFileInfo() { tor_identity="${RANDOM}" trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; rm -f ${kraken_cookie_jar}; echo ""; tput cnorm; exit" 0 1 2 3 6 15 PAGE=$(tor_curl_request --insecure --max-time "$ConnectTimeout" -s -L -c "${kraken_cookie_jar}" "${fixed_url}") - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${krak_id}" "kraken_token${num_attempt}_$i" "url: ${fixed_url}"$'\n'"krakenid: ${krak_id}"$'\n'"${PAGE}" fi if grep -Eqi 'sendFormCaptcha()' <<< "${PAGE}"; then @@ -167,7 +167,7 @@ kraken_FetchFileInfo() { CLEANSTRING=${remote_url//[^a-zA-Z0-9]/} trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; rm -f ${kraken_cookie_jar}; echo ""; tput cnorm; exit" 0 1 2 3 6 15 down_request=$(tor_curl_request --insecure -L -s -b "${kraken_cookie_jar}" -c "${kraken_cookie_jar}" -F "token=${kraken_token}" "${kraken_action}") - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${krak_id}" "kraken_url${num_attempt}_1" "action: ${kraken_action}, token: ${kraken_token}"$'\n'"${down_request}" fi if ! grep -Eqi '"status":"ok"' <<< "${down_request}"; then @@ -184,11 +184,11 @@ kraken_FetchFileInfo() { CLEANSTRING=${remote_url//[^a-zA-Z0-9]/} trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; rm -f ${kraken_cookie_jar}; echo ""; tput cnorm; exit" 0 1 2 3 6 15 file_header=$(tor_curl_request --insecure --head -L -s -b "${kraken_cookie_jar}" -c "${kraken_cookie_jar}" --referer "$kraken_action" "$download_url") - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${krak_id}" "kraken_head$j" "download_url: ${download_url}"$'\n'"${file_header}" fi rm -f "$kraken_cookie_jar"; - if [ ! -z "$file_header" ] ; then + if [[ ! -z "$file_header" ]] ; then if grep -Eqi '404 Not Found' <<< "${file_header}" ; then echo -e "${RED}| The file has been removed (404).${NC}" removedDownload "${remote_url}" @@ -203,12 +203,12 @@ kraken_FetchFileInfo() { fi if ! grep -Eqi 'HTTP/2 200|HTTP/1.1 200|200 OK' <<< $file_header ; then echo -e "${RED}| Bad http response.${NC}" - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "" "" fi return 1 fi - if [ "$filename_override" == "" ] ; then + if [[ "$filename_override" == "" ]] ; then if grep -Eqi 'filename=' <<< "${file_header}" ; then filename=$(grep -oP 'filename=\K.*$' <<< "${file_header}") filename=${filename##filename} @@ -216,7 +216,7 @@ kraken_FetchFileInfo() { filename=${filename//[$'\t\r\n']} else echo -e "${RED}| Failed to extract file name.${NC}" - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "" "" fi return 1 @@ -224,27 +224,27 @@ kraken_FetchFileInfo() { fi if ! grep -Eqi 'Content-Length' <<< "${file_header}" ; then echo -e "${RED}| Failed to extract file size.${NC}" - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "" "" fi return 1 fi else echo -e "${RED}| No response. Try again later.${NC}" - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "" "" fi return 1 fi - touch ${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/} - if [ ! "$filename_override" == "" ] ; then + touch "${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}" + if [[ ! "$filename_override" == "" ]] ; then filename="$filename_override" fi filename=$(sanitize_file_or_folder_name "${filename}") file_size_bytes=$(grep -oPi '(?<=content-length: ).*' <<< "$file_header") file_size_bytes=${file_size_bytes//[$'\t\r\n']} - if [ -z $file_size_bytes ] ; then - if [ "${finalAttempt}" == "true" ] ; then + if [[ -z $file_size_bytes ]] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "Filesize not found!" "" fi echo -e "${YELLOW}| Filesize not found… retry${NC}" @@ -276,19 +276,19 @@ kraken_GetFile() { flockDownload="${WorkDir}/.flocks/${filename//[^a-zA-Z0-9\.\_\-]/}.flock" for ((j=1; j<=$MaxDownloadRetries; j++)); do pd_presize=0 - if [ -f "$file_path" ] ; then + if [[ -f "$file_path" ]] ; then pd_presize=$(stat --format="%s" "$file_path" | tr -d '[:space:]') fi GetRandomUA CLEANSTRING=${remote_url//[^a-zA-Z0-9]/} trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; rm -f $flockDownload; echo ""; tput cnorm; exit" 0 1 2 3 6 15 - if [ "${RateMonitorEnabled}" == "true" ] && [ ! -f "$file_path" ]; then + if [[ "${RateMonitorEnabled}" == "true" ]] && [[ ! -f "$file_path" ]]; then tor_curl_request --insecure --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval --referer "$kraken_action" "$download_url" --continue-at - --output "$file_path" else tor_curl_request --insecure --referer "$kraken_action" "$download_url" --continue-at - --output "$file_path" fi received_file_size=0 - if [ -f "$file_path" ] ; then + if [[ -f "$file_path" ]] ; then received_file_size=$(stat --format="%s" "$file_path" | tr -d '[:space:]') fi if CheckNoHtml "$remote_url" "$filename" "$file_path" ; then @@ -297,9 +297,9 @@ kraken_GetFile() { containsHtml=true fi downDelta=$(( received_file_size - pd_presize )) - if [[ "${received_file_size}" -ne "${file_size_bytes}" ]] || [ "$containsHtml" == "true" ]; then - if [ "${AutoRepairBadPartials}" == "true" ] && (( downDelta > 0 && downDelta < 1024 )) ; then - if [ -f "${file_path}" ] ; then + if [[ "${received_file_size}" -ne "${file_size_bytes}" ]] || [[ "$containsHtml" == "true" ]]; then + if [[ "${AutoRepairBadPartials}" == "true" ]] && (( downDelta > 0 && downDelta < 1024 )) ; then + if [[ -f "${file_path}" ]] ; then if ((pd_presize > 0)); then echo -e "${YELLOW}Bad node / HTML found:${NC} reverting to previous file..." truncateDownload "$remote_url" "$filename" "$pd_presize" "$received_file_size" @@ -311,15 +311,15 @@ kraken_GetFile() { fi if ((j >= $MaxDownloadRetries)) ; then rm -f "$flockDownload"; - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" fi return 1 else continue fi - elif [ "${AutoRepairBadPartials}" == "true" ] && [ "$containsHtml" == "true" ] ; then - if [ -f "${file_path}" ] ; then + elif [[ "${AutoRepairBadPartials}" == "true" ]] && [[ "$containsHtml" == "true" ]] ; then + if [[ -f "${file_path}" ]] ; then if ((pd_presize > 0)); then echo -e "${YELLOW}Bad node / HTML found:${NC} reverting to previous file..." truncateDownload "$remote_url" "$filename" "$pd_presize" "$received_file_size" @@ -331,21 +331,21 @@ kraken_GetFile() { fi if ((j >= $MaxDownloadRetries)) ; then rm -f "$flockDownload"; - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" fi return 1 else continue fi - elif (( downDelta > 0 && downDelta < 1024 )) || [ "$containsHtml" == "true" ] ; then - if [ -f "$file_path" ] ; then + elif (( downDelta > 0 && downDelta < 1024 )) || [[ "$containsHtml" == "true" ]] ; then + if [[ -f "$file_path" ]] ; then rm -rf "$file_path" fi echo -e "\n${YELLOW}Bad node / HTML found:${NC} tainted partial removed..." if ((j >= $MaxDownloadRetries)) ; then rm -f "$flockDownload"; - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" fi return 1 @@ -357,7 +357,7 @@ kraken_GetFile() { echo -e "\n${RED}Download failed, file is incomplete.${NC}" if ((j >= $MaxDownloadRetries)) ; then rm -f "$flockDownload"; - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" fi return 1 diff --git a/hosts/mediafire.sh b/hosts/mediafire.sh index 9f32515..ed86788 100644 --- a/hosts/mediafire.sh +++ b/hosts/mediafire.sh @@ -48,24 +48,24 @@ mfire_DownloadFile() { tor_identity="${RANDOM}" finalAttempt="false" for ((z=0; z<=$MaxUrlRetries; z++)); do - if [ $z -eq $MaxUrlRetries ] ; then + if [[ $z -eq $MaxUrlRetries ]] ; then finalAttempt="true" fi CLEANSTRING=${remote_url//[^a-zA-Z0-9]/} trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; echo ""; tput cnorm; exit" 0 1 2 3 6 15 if mfire_FetchFileInfo $finalAttempt && mfire_GetFile "${filecnt}" $((z+1)) $finalAttempt ; then return 0 - elif [ $z -lt $MaxUrlRetries ]; then - if [ "${fileAlreadyDone}" == "true" ] ; then + elif [[ $z -lt $MaxUrlRetries ]]; then + if [[ "${fileAlreadyDone}" == "true" ]] ; then break fi if [[ "${warnAndRetryUnknownError}" == "true" ]] ; then - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${remote_url##*/}" "error" "Retry due to an unknown issue: attempt #$((z+1)) of ${MaxUrlRetries}" fi fi if [[ "${exitDownloadError}" == "true" || "${exitDownloadNotAvailable}" == "true" ]] ; then - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${remote_url##*/}" "error" "Exit due to unrecoverable issue" fi rm -f "${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}" @@ -104,7 +104,7 @@ mfire_FetchFileInfo() { -H "Sec-Fetch-User: ?1" \ "$download_url" | tr -d '\0') response=$(echo "$response" | iconv -c -f UTF-8 -t ISO8859-1) - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${remote_url##*/}" "mfire_fetch$i" "download_url: ${download_url}"$'\n'"${response}" fi if [[ -z $response ]] ; then @@ -113,7 +113,7 @@ mfire_FetchFileInfo() { printf "\\n" echo -e "${RED}| Failed to get download link${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "Failed to get download link" "" fi return 1 @@ -134,7 +134,7 @@ mfire_FetchFileInfo() { printf "\\n" echo -e "${RED}| Failed to get download link${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "Failed to get download link" "" fi return 1 @@ -147,7 +147,7 @@ mfire_FetchFileInfo() { echo -e "${GREEN}# Fetching file info…${NC}" filename="" file_size_bytes="" - if [ ! "$filename_override" == "" ] ; then + if [[ ! "$filename_override" == "" ]] ; then filename="$filename_override" fi for ((j=1; j<=$maxretries; j++)); do @@ -167,7 +167,7 @@ mfire_FetchFileInfo() { -H "Sec-Fetch-User: ?1" \ -b "${mfire_cookie_jar}" -c "${mfire_cookie_jar}" \ "$download_url" | tr -d '\0') - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${remote_url##*/}" "mfire_head$j" "FileInfoUrl: ${download_url}"$'\n'"${file_header}" fi if [[ -z $file_header ]] ; then @@ -176,7 +176,7 @@ mfire_FetchFileInfo() { printf "\\n" echo -e "${RED}| Failed to extract file info [1]${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "Failed to extract file info [1]" "" fi return 1 @@ -190,7 +190,7 @@ mfire_FetchFileInfo() { printf "\\n" echo -e "${RED}| Failed to extract file info [2]${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "Failed to extract file info [2]" "" fi return 1 @@ -198,24 +198,24 @@ mfire_FetchFileInfo() { continue fi fi - if [ -z $filename ]; then + if [[ -z $filename ]]; then filename=$(grep -oPi '(?<=filename=").*(?=")' <<< "$file_header") - if [ -z $filename ]; then + if [[ -z $filename ]]; then filename=$(grep -oPi '(?<=filename[*]=).*' <<< "$file_header") filename=${filename//[$'\t\r\n']} fi fi - if [ -z $file_size_bytes ] ; then + if [[ -z $file_size_bytes ]] ; then file_size_bytes=$(grep -oPi '(?<=content-length: ).*' <<< "$file_header") file_size_bytes=${file_size_bytes//[$'\t\r\n']} fi - if [ -z $filename ] || [ -z $file_size_bytes ] ; then + if [[ -z $filename ]] || [[ -z "$file_size_bytes" ]] ; then if ((j == maxfetchretries)) ; then rm -f "${mfire_cookie_jar}"; printf "\\n" echo -e "${RED}| Failed to extract file info [3]${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "Failed to extract file info [3]" "" fi return 1 @@ -226,18 +226,18 @@ mfire_FetchFileInfo() { break #Good to go here done rm -f "${mfire_cookie_jar}"; - touch ${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/} - if [ ! "$filename_override" == "" ] ; then + touch "${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}" + if [[ ! "$filename_override" == "" ]] ; then filename="$filename_override" - elif [ -z $filename ] ; then + elif [[ -z $filename ]] ; then filename=${download_url##*/} fi filename=$(sanitize_file_or_folder_name "${filename}") - if [ -z "$filename" ]; then + if [[ -z "$filename" ]]; then echo -e "${RED}| Unexpected or no header response.${NC}" return 1 fi - if [ -z $file_size_bytes ] ; then + if [[ -z $file_size_bytes ]] ; then file_size_readable="${RED}Unknown filesize…${NC}" else file_size_readable="$(numfmt --to=iec --from=auto --format "%.2f" <<< "$file_size_bytes")" @@ -267,19 +267,19 @@ mfire_GetFile() { flockDownload="${WorkDir}/.flocks/${filename//[^a-zA-Z0-9\.\_\-]/}.flock" for ((j=1; j<=$MaxDownloadRetries; j++)); do pd_presize=0 - if [ -f "$file_path" ] ; then + if [[ -f "$file_path" ]] ; then pd_presize=$(stat --format="%s" "$file_path" | tr -d '[:space:]') fi CLEANSTRING=${remote_url//[^a-zA-Z0-9]/} trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; rm -f $flockDownload; echo ""; tput cnorm; exit" 0 1 2 3 6 15 - if [ ! -z $file_size_bytes ] ; then + if [[ ! -z "$file_size_bytes" ]] ; then tor_curl_request_extended --insecure "$download_url" --continue-at - --output "$file_path" else echo -e "${BLUE}| No Resume Fetch${NC}" tor_curl_request_extended --insecure "$download_url" --output "$file_path" fi received_file_size=0 - if [ -f "$file_path" ] ; then + if [[ -f "$file_path" ]] ; then received_file_size=$(stat --format="%s" "$file_path" | tr -d '[:space:]') fi if CheckNoHtml "$remote_url" "$filename" "$file_path" ; then @@ -287,8 +287,8 @@ mfire_GetFile() { else containsHtml=true fi - if [ "${AutoRepairBadPartials}" == "true" ] && [ "$containsHtml" == "true" ] ; then - if [ -f "${file_path}" ] ; then + if [[ "${AutoRepairBadPartials}" == "true" ]] && [[ "$containsHtml" == "true" ]] ; then + if [[ -f "${file_path}" ]] ; then if ((pd_presize > 0)); then echo -e "${YELLOW}Bad node / HTML found:${NC} reverting to previous file..." truncateDownload "$remote_url" "$filename" "$pd_presize" "$received_file_size" @@ -300,7 +300,7 @@ mfire_GetFile() { fi if ((j >= $MaxDownloadRetries)) ; then rm -f "$flockDownload"; - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" fi return 1 diff --git a/hosts/nippy.sh b/hosts/nippy.sh index 8b25bea..f40f3a7 100644 --- a/hosts/nippy.sh +++ b/hosts/nippy.sh @@ -43,24 +43,24 @@ nippy_DownloadFile() { tor_identity="${RANDOM}" finalAttempt="false" for ((z=0; z<=$MaxUrlRetries; z++)); do - if [ $z -eq $MaxUrlRetries ] ; then + if [[ $z -eq $MaxUrlRetries ]] ; then finalAttempt="true" fi CLEANSTRING=${remote_url//[^a-zA-Z0-9]/} trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; echo ""; tput cnorm; exit" 0 1 2 3 6 15 if nippy_FetchFileInfo $finalAttempt && nippy_GetFile "${filecnt}" $((z+1)) $finalAttempt ; then return 0 - elif [ $z -lt $MaxUrlRetries ]; then - if [ "${fileAlreadyDone}" == "true" ] ; then + elif [[ $z -lt $MaxUrlRetries ]]; then + if [[ "${fileAlreadyDone}" == "true" ]] ; then break fi if [[ "${warnAndRetryUnknownError}" == "true" ]] ; then - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${remote_url##*/}" "error" "Retry due to an unknown issue: attempt #$((z+1)) of ${MaxUrlRetries}" fi fi if [[ "${exitDownloadError}" == "true" || "${exitDownloadNotAvailable}" == "true" ]] ; then - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${remote_url##*/}" "error" "Exit due to unrecoverable issue" fi rm -f "${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}" @@ -117,16 +117,16 @@ nippy_FetchFileInfo() { CLEANSTRING=${remote_url//[^a-zA-Z0-9]/} trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; rm -f ${nippy_cookie_jar}; echo ""; tput cnorm; exit" 0 1 2 3 6 15 response=$(tor_curl_request --insecure -L -s -b "${nippy_cookie_jar}" -c "${nippy_cookie_jar}" "$fixed_url") - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${remote_url##*/}" "nippy_dwnpage$i" "fixed_url: ${fixed_url}"$'\n'"${response}" fi if [[ -z $response ]] ; then rm -f "${nippy_cookie_jar}"; - if [ $i == $maxfetchretries ] ; then + if [[ $i == $maxfetchretries ]] ; then printf "\\n" echo -e "${RED}| Failed to extract download link.${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "" "" fi return 1 @@ -165,7 +165,7 @@ nippy_FetchFileInfo() { printf "\\n" echo -e "${RED}| Failed to extract download url (unknown).${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "" "" fi return 1 @@ -178,7 +178,7 @@ nippy_FetchFileInfo() { filename="" file_size_bytes="" cdn_url="" - if [ ! "$filename_override" == "" ] ; then + if [[ ! "$filename_override" == "" ]] ; then filename="$filename_override" fi for ((j=1; j<=$maxfetchretries; j++)); do @@ -188,16 +188,16 @@ nippy_FetchFileInfo() { file_header=$(tor_curl_request --insecure -L --head -s \ -b "${nippy_cookie_jar}" -c "${nippy_cookie_jar}" \ "$download_url") - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${remote_url##*/}" "nippy_head$j" "FileInfoUrl: ${download_url}"$'\n'"${file_header}" fi if [[ -z $file_header ]] ; then - if [ $j == $maxfetchretries ] ; then + if [[ $j == $maxfetchretries ]] ; then rm -f "${nippy_cookie_jar}"; printf "\\n" echo -e "${RED}| Failed to extract file info${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "Failed to extract file info" "" fi return 1 @@ -213,11 +213,11 @@ nippy_FetchFileInfo() { return 1 fi if ! grep -Eqi 'HTTP.* 200' <<< $file_header ; then - if [ $j == $maxfetchretries ] ; then + if [[ $j == $maxfetchretries ]] ; then printf "\\n" echo -e "${RED}| Failed to extract file info${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "Failed to extract file info" "" fi return 1 @@ -225,24 +225,24 @@ nippy_FetchFileInfo() { continue fi fi - if [ -z $cdn_url ] ; then + if [[ -z "$cdn_url" ]] ; then cdn_url="https:"$(grep -oP '(?<=location: ).*$' <<< "$file_header") cdn_url=${cdn_url//[$'\t\r\n']} fi - if [ -z $filename ]; then + if [[ -z $filename ]]; then filename=$(grep -oPi '(?<=filename=").*(?=")' <<< "$file_header") fi - if [ -z $file_size_bytes ] ; then + if [[ -z $file_size_bytes ]] ; then file_size_bytes=$(grep -oPi '(?<=content-length: ).*' <<< "$file_header") file_size_bytes=${file_size_bytes//[$'\t\r\n']} fi - if [ -z $filename ] || [ -z $cdn_url ] || [ -z $file_size_bytes ] ; then - if [ $j == $maxfetchretries ] ; then + if [[ -z $filename ]] || [[ -z "$cdn_url" ]] || [[ -z "$file_size_bytes" ]] ; then + if [[ $j == $maxfetchretries ]] ; then rm -f "${nippy_cookie_jar}"; printf "\\n" echo -e "${RED}| Failed to extract file info.${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "" "" fi return 1 @@ -253,12 +253,12 @@ nippy_FetchFileInfo() { break #Good to go here done download_url="$cdn_url" - touch ${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/} + touch "${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}" filename=$(sanitize_file_or_folder_name "${filename}") printf "\\n" echo -e "${YELLOW}| File name:${NC}\t\"${filename}\"" - if [ -z $file_size_bytes ] ; then - if [ "${finalAttempt}" == "true" ] ; then + if [[ -z $file_size_bytes ]] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "Filesize not found!" "" fi echo -e "${YELLOW}| Filesize not found… retry${NC}" @@ -289,20 +289,20 @@ nippy_GetFile() { flockDownload="${WorkDir}/.flocks/${filename//[^a-zA-Z0-9\.\_\-]/}.flock" for ((j=1; j<=$MaxDownloadRetries; j++)); do pd_presize=0 - if [ -f "$file_path" ] ; then + if [[ -f "$file_path" ]] ; then pd_presize=$(stat --format="%s" "$file_path" | tr -d '[:space:]') fi GetRandomUA CLEANSTRING=${remote_url//[^a-zA-Z0-9]/} trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; rm -f ${nippy_cookie_jar}; rm -f $flockDownload; echo ""; tput cnorm; exit" 0 1 2 3 6 15 - if [ "${RateMonitorEnabled}" == "true" ]; then + if [[ "${RateMonitorEnabled}" == "true" ]]; then tor_curl_request --insecure -L --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval \ "$download_url" --continue-at - --output "$file_path" else tor_curl_request --insecure -L "$download_url" --continue-at - --output "$file_path" fi received_file_size=0 - if [ -f "$file_path" ] ; then + if [[ -f "$file_path" ]] ; then received_file_size=$(stat --format="%s" "$file_path" | tr -d '[:space:]') fi if CheckNoHtml "$remote_url" "$filename" "$file_path" ; then @@ -311,9 +311,9 @@ nippy_GetFile() { containsHtml=true fi downDelta=$(( received_file_size - pd_presize )) - if [[ "${received_file_size}" -ne "${file_size_bytes}" ]] || [ "$containsHtml" == "true" ]; then - if [ "${AutoRepairBadPartials}" == "true" ] && (( downDelta > 0 && downDelta < 1024 )) ; then - if [ -f "${file_path}" ] ; then + if [[ "${received_file_size}" -ne "${file_size_bytes}" ]] || [[ "$containsHtml" == "true" ]]; then + if [[ "${AutoRepairBadPartials}" == "true" ]] && (( downDelta > 0 && downDelta < 1024 )) ; then + if [[ -f "${file_path}" ]] ; then if ((pd_presize > 0)); then echo -e "${YELLOW}Bad node / HTML found:${NC} reverting to previous file..." truncateDownload "$remote_url" "$filename" "$pd_presize" "$received_file_size" @@ -325,15 +325,15 @@ nippy_GetFile() { fi if ((j >= $MaxDownloadRetries)) ; then rm -f "$flockDownload"; - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" fi return 1 else continue fi - elif [ "${AutoRepairBadPartials}" == "true" ] && [ "$containsHtml" == "true" ] ; then - if [ -f "${file_path}" ] ; then + elif [[ "${AutoRepairBadPartials}" == "true" ]] && [[ "$containsHtml" == "true" ]] ; then + if [[ -f "${file_path}" ]] ; then if ((pd_presize > 0)); then echo -e "${YELLOW}Bad node / HTML found:${NC} reverting to previous file..." truncateDownload "$remote_url" "$filename" "$pd_presize" "$received_file_size" @@ -345,21 +345,21 @@ nippy_GetFile() { fi if ((j >= $MaxDownloadRetries)) ; then rm -f "$flockDownload"; - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" fi return 1 else continue fi - elif (( downDelta > 0 && downDelta < 1024 )) || [ "$containsHtml" == "true" ] ; then - if [ -f "$file_path" ] ; then + elif (( downDelta > 0 && downDelta < 1024 )) || [[ "$containsHtml" == "true" ]] ; then + if [[ -f "$file_path" ]] ; then rm -rf "$file_path" fi echo -e "\n${YELLOW}Bad node / HTML found:${NC} tainted partial removed..." if ((j >= $MaxDownloadRetries)) ; then rm -f "$flockDownload"; - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" fi return 1 @@ -371,7 +371,7 @@ nippy_GetFile() { echo -e "\n${RED}Download failed, file is incomplete.${NC}" if ((j >= $MaxDownloadRetries)) ; then rm -f "$flockDownload"; - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" fi return 1 diff --git a/hosts/oshi.sh b/hosts/oshi.sh index 9a838e0..0b06430 100644 --- a/hosts/oshi.sh +++ b/hosts/oshi.sh @@ -1,6 +1,6 @@ #! Name: oshi.sh #! Author: kittykat -#! Version: 2024.09.13 +#! Version: 2025.02.17 #! Desc: Add support for downloading and processing of urls for a new host #! Usage: Copy this file into the ./${ScriptDir}/hosts/ folder #! @@ -54,24 +54,24 @@ oshi_DownloadFile() { tor_identity="${RANDOM}" finalAttempt="false" for ((z=0; z<=$MaxUrlRetries; z++)); do - if [ $z -eq $MaxUrlRetries ] ; then + if [[ $z -eq $MaxUrlRetries ]] ; then finalAttempt="true" fi CLEANSTRING=${remote_url//[^a-zA-Z0-9]/} trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; echo ""; tput cnorm; exit" 0 1 2 3 6 15 if oshi_FetchFileInfo $finalAttempt && oshi_GetFile "${filecnt}" $((z+1)) $finalAttempt ; then return 0 - elif [ $z -lt $MaxUrlRetries ]; then - if [ "${fileAlreadyDone}" == "true" ] ; then + elif [[ $z -lt $MaxUrlRetries ]]; then + if [[ "${fileAlreadyDone}" == "true" ]] ; then break fi if [[ "${warnAndRetryUnknownError}" == "true" ]] ; then - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${remote_url##*/}" "error" "Retry due to an unknown issue: attempt #$((z+1)) of ${MaxUrlRetries}" fi fi if [[ "${exitDownloadError}" == "true" || "${exitDownloadNotAvailable}" == "true" ]] ; then - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${remote_url##*/}" "error" "Exit due to unrecoverable issue" fi rm -f "${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}" @@ -91,18 +91,25 @@ oshi_FetchFileInfo() { finalAttempt=$1 CLEANSTRING=${remote_url//[^a-zA-Z0-9]/} trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; echo ""; tput cnorm; exit" 0 1 2 3 6 15 - if [ "${OshiBaseUrlOverride}" == "oshiat" ]; then + if [[ "${OshiBaseUrlOverride}" == "oshiat" ]]; then download_url=${remote_url//5ety7tpkim5me6eszuwcje7bmy25pbtrjtue7zkqqgziljwqy3rrikqd\.onion/oshi\.at} - elif [ "${OshiBaseUrlOverride}" == "oshionion" ]; then + elif [[ "${OshiBaseUrlOverride}" == "oshionion" ]]; then download_url=${remote_url//oshi\.at/5ety7tpkim5me6eszuwcje7bmy25pbtrjtue7zkqqgziljwqy3rrikqd\.onion} fi + if ! grep -Eqi '/nossl/' <<< "$download_url"; then + download_url=${download_url//oshi\.at/oshi\.at\/nossl} + download_url=${download_url//5ety7tpkim5me6eszuwcje7bmy25pbtrjtue7zkqqgziljwqy3rrikqd\.onion/5ety7tpkim5me6eszuwcje7bmy25pbtrjtue7zkqqgziljwqy3rrikqdi\.onion\/nossl} + fi + if grep -Eqi '^https' <<< "$download_url"; then + download_url=${download_url//https:/http:} + fi download_url=$(urlencode_literal_grouped_case_urlendingonly "$download_url") tor_identity="${RANDOM}" file_header=$(tor_curl_request --insecure --head -L -s --referer "$remote_url" "$download_url") - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${remote_url##*/}" "oshi_head$j" "download_url: ${download_url}"$'\n'"${file_header}" fi - if [ ! -z "$file_header" ] ; then + if [[ ! -z "$file_header" ]] ; then if grep -Eqi '404 Not Found' <<< "${file_header}" ; then echo -e "${RED}| O shi-, (404). The file has been removed.${NC}" removedDownload "${remote_url}" @@ -112,12 +119,12 @@ oshi_FetchFileInfo() { if ! grep -Eqi 'HTTP/2 200|HTTP/1.1 200|200 OK' <<< $file_header ; then echo -e "${RED}| Failed to extract file info${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "" "" fi return 1 fi - if [ "$filename_override" == "" ] ; then + if [[ "$filename_override" == "" ]] ; then if grep -Eqi 'filename=' <<< "${file_header}" ; then filename=$(grep -oP 'filename=\K.*$' <<< "${file_header}") filename=${filename##filename} @@ -132,29 +139,29 @@ oshi_FetchFileInfo() { fi if ! grep -Eqi 'Content-Length' <<< "${file_header}" ; then echo -e "${RED}| Failed to extract file size.${NC}" - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "" "" fi return 1 fi else echo -e "${RED}| No response. Try again later.${NC}" - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "" "" fi return 1 fi - touch ${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/} - if [ ! "$filename_override" == "" ] ; then + touch "${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}" + if [[ ! "$filename_override" == "" ]] ; then filename="$filename_override" - elif [ -z $filename ] ; then + elif [[ -z $filename ]] ; then filename=${download_url##*/} fi filename=$(sanitize_file_or_folder_name "${filename}") file_size_bytes=$(grep -oPi '(?<=content-length: ).*' <<< "$file_header") file_size_bytes=${file_size_bytes//[$'\t\r\n']} - if [ -z $file_size_bytes ] ; then - if [ "${finalAttempt}" == "true" ] ; then + if [[ -z $file_size_bytes ]] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "Filesize not found!" "" fi echo -e "${YELLOW}| Filesize not found… retry${NC}" @@ -186,18 +193,18 @@ oshi_GetFile() { flockDownload="${WorkDir}/.flocks/${filename//[^a-zA-Z0-9\.\_\-]/}.flock" for ((j=1; j<=$MaxDownloadRetries; j++)); do pd_presize=0 - if [ -f "$file_path" ] ; then + if [[ -f "$file_path" ]] ; then pd_presize=$(stat --format="%s" "$file_path" | tr -d '[:space:]') fi CLEANSTRING=${remote_url//[^a-zA-Z0-9]/} trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; rm -f $flockDownload; echo ""; tput cnorm; exit" 0 1 2 3 6 15 - if [ "${RateMonitorEnabled}" == "true" ]; then + if [[ "${RateMonitorEnabled}" == "true" ]]; then tor_curl_request --insecure --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval --referer "$file_url" "$download_url" --continue-at - --output "$file_path" else tor_curl_request --insecure --referer "$file_url" "$download_url" --continue-at - --output "$file_path" fi received_file_size=0 - if [ -f "$file_path" ] ; then + if [[ -f "$file_path" ]] ; then received_file_size=$(stat --format="%s" "$file_path" | tr -d '[:space:]') fi if CheckNoHtml "$remote_url" "$filename" "$file_path" ; then @@ -206,9 +213,9 @@ oshi_GetFile() { containsHtml=true fi downDelta=$(( received_file_size - pd_presize )) - if [[ "${received_file_size}" -ne "${file_size_bytes}" ]] || [ "$containsHtml" == "true" ]; then - if [ "${AutoRepairBadPartials}" == "true" ] && (( downDelta > 0 && downDelta < 1024 )) ; then - if [ -f "${file_path}" ] ; then + if [[ "${received_file_size}" -ne "${file_size_bytes}" ]] || [[ "$containsHtml" == "true" ]]; then + if [[ "${AutoRepairBadPartials}" == "true" ]] && (( downDelta > 0 && downDelta < 1024 )) ; then + if [[ -f "${file_path}" ]] ; then if ((pd_presize > 0)); then echo -e "${YELLOW}Bad node / HTML found:${NC} reverting to previous file..." truncateDownload "$remote_url" "$filename" "$pd_presize" "$received_file_size" @@ -220,15 +227,15 @@ oshi_GetFile() { fi if ((j >= $MaxDownloadRetries)) ; then rm -f "$flockDownload"; - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" fi return 1 else continue fi - elif [ "${AutoRepairBadPartials}" == "true" ] && [ "$containsHtml" == "true" ] ; then - if [ -f "${file_path}" ] ; then + elif [[ "${AutoRepairBadPartials}" == "true" ]] && [[ "$containsHtml" == "true" ]] ; then + if [[ -f "${file_path}" ]] ; then if ((pd_presize > 0)); then echo -e "${YELLOW}Bad node / HTML found:${NC} reverting to previous file..." truncateDownload "$remote_url" "$filename" "$pd_presize" "$received_file_size" @@ -240,21 +247,21 @@ oshi_GetFile() { fi if ((j >= $MaxDownloadRetries)) ; then rm -f "$flockDownload"; - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" fi return 1 else continue fi - elif (( downDelta > 0 && downDelta < 1024 )) || [ "$containsHtml" == "true" ] ; then - if [ -f "$file_path" ] ; then + elif (( downDelta > 0 && downDelta < 1024 )) || [[ "$containsHtml" == "true" ]] ; then + if [[ -f "$file_path" ]] ; then rm -rf "$file_path" fi echo -e "\n${YELLOW}Bad node / HTML found:${NC} tainted partial removed..." if ((j >= $MaxDownloadRetries)) ; then rm -f "$flockDownload"; - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" fi return 1 @@ -266,7 +273,7 @@ oshi_GetFile() { echo -e "\n${RED}Download failed, file is incomplete.${NC}" if ((j >= $MaxDownloadRetries)) ; then rm -f "$flockDownload"; - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" fi return 1 diff --git a/hosts/pixeldrain.sh b/hosts/pixeldrain.sh index 48d2e98..178dd73 100644 --- a/hosts/pixeldrain.sh +++ b/hosts/pixeldrain.sh @@ -43,24 +43,24 @@ pd_DownloadFile() { tor_identity="${RANDOM}" finalAttempt="false" for ((z=0; z<=$MaxUrlRetries; z++)); do - if [ $z -eq $MaxUrlRetries ] ; then + if [[ $z -eq $MaxUrlRetries ]] ; then finalAttempt="true" fi CLEANSTRING=${remote_url//[^a-zA-Z0-9]/} trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; echo ""; tput cnorm; exit" 0 1 2 3 6 15 if pd_FetchFileInfo $finalAttempt && pd_GetFile "${filecnt}" $((z+1)) $finalAttempt ; then return 0 - elif [ $z -lt $MaxUrlRetries ]; then - if [ "${fileAlreadyDone}" == "true" ] ; then + elif [[ $z -lt $MaxUrlRetries ]]; then + if [[ "${fileAlreadyDone}" == "true" ]] ; then break fi if [[ "${warnAndRetryUnknownError}" == "true" ]] ; then - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${remote_url##*/}" "error" "Retry due to an unknown issue: attempt #$((z+1)) of ${MaxUrlRetries}" fi fi if [[ "${exitDownloadError}" == "true" || "${exitDownloadNotAvailable}" == "true" ]] ; then - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${remote_url##*/}" "error" "Exit due to unrecoverable issue" fi rm -f "${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}" @@ -92,18 +92,18 @@ pd_FetchFileInfo() { CLEANSTRING=${remote_url//[^a-zA-Z0-9]/} trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; echo ""; tput cnorm; exit" 0 1 2 3 6 15 response=$(tor_curl_request --insecure -L -s "https://pixeldrain.com/u/$fileid") - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${remote_url##*/}" "pd_fetch$i" "$response" fi - if [ ! -z "$response" ] ; then + if [[ ! -z "$response" ]] ; then if grep -q -Eq '"views":' <<< "$response"; then pdpreviews=$(grep -o -P '(?<="views":).+?(?=,")' <<< "$response") fi if grep -i -Eq "You have reached the maximum number of open download connections" <<< "$response"; then - if [ $i -ge 5 ] ; then + if ((i >= 5)) ; then printf "\\n" echo -e "${YELLOW}| Bad node. Reached the maximum number of open download connections…${NC}" - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "" "" fi return 1 @@ -135,7 +135,7 @@ pd_FetchFileInfo() { if ((i > 1)) ; then printf "\\n" fi - if grep -Eq "pjscloud.sh" <<< "$LoadPlugins" && [ "$PJSCloud_pixeldrain" == "true" ]; then + if grep -Eq "pjscloud.sh" <<< "$LoadPlugins" && [[ "$PJSCloud_pixeldrain" == "true" ]]; then if ! grep -Eq "pjscloud.sh" <<< "$LoadPlugins" ; then echo -e "${RED}| Pixeldrain viewpump requires pjscloud.sh plugin.${NC}" failedRetryDownload "${remote_url}" "Captcha Rate Limited (needs view pumping). Requires pjscloud.sh plugin." @@ -157,7 +157,7 @@ pd_FetchFileInfo() { tor_identity="${RANDOM}" trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; rm -f $jsonRequest; echo ""; tput cnorm; exit" 0 1 2 3 6 15 resp_pump=$(pjscloud_tor_request "https://pixeldrain.com/u/$fileid") - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${remote_url##*/}" "pd_pump$k" "preViews: $pdpreviews"$'\n'"postViews: $pdpostviews"$'\n'"$resp_pump" fi if grep -q -Eq 'Error: Forbidden' <<< "$resp_pump"; then @@ -182,7 +182,7 @@ pd_FetchFileInfo() { echo -e "| Final views: $pdpostviews (+1)" echo -e "| Waiting a few seconds to allow pd views to update…" sleepRandomSecs 45 120 - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "" "" return 1 fi @@ -207,22 +207,22 @@ pd_FetchFileInfo() { printf "\\n" fi echo -e "${YELLOW}| Unknown availability: $pd_message${NC}" - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "" "" return 1 fi fi fi - if [ $i -gt 1 ] ; then + if ((i > 1)) ; then printf "\\n" fi echo -e "| Current views: $pdpreviews" break else - if [ $i -ge 5 ] ; then + if ((i >= 5)) ; then printf "\\n" echo -e "${YELLOW}| No response…${NC}" - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "" "" fi return 1 @@ -233,14 +233,14 @@ pd_FetchFileInfo() { fi done filename=$(grep -oP '(?<="name":")[^"]+(?=")' <<< "$response") - touch ${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/} - if [ ! "$filename_override" == "" ] ; then + touch "${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}" + if [[ ! "$filename_override" == "" ]] ; then filename="$filename_override" fi filename=$(sanitize_file_or_folder_name "${filename}") - if [ "$filename_override" == "" ] && [ -z "$filename" ] ; then + if [[ "$filename_override" == "" ]] && [[ -z "$filename" ]] ; then echo -e "${RED}| Failed to extract file name.${NC}" - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "" "" fi return 1 @@ -248,13 +248,13 @@ pd_FetchFileInfo() { echo -e "${YELLOW}| File name:${NC}\t\"${filename}\"" for ((i=1; i<=6; i++)); do pdheadurl="https://pixeldrain.com/api/file/${fileid}" - if [ "${UsePixeldrainBypass}" == "true" ]; then + if [[ "${UsePixeldrainBypass}" == "true" ]]; then pdheadurl="https://pd.cybar.xyz/$fileid" fi CLEANSTRING=${remote_url//[^a-zA-Z0-9]/} trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; echo ""; tput cnorm; exit" 0 1 2 3 6 15 file_header=$(tor_curl_request --insecure --head -L -s --referer "$file_url" "$pdheadurl") - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${remote_url##*/}" "pd_head$i" "url: ${pdheadurl}"$'\n'"${file_header}" fi if ! grep -Eqi 'HTTP/2 200|HTTP/1.1 200|200 OK' <<< $file_header ; then @@ -265,7 +265,7 @@ pd_FetchFileInfo() { return 1 fi fi - if [ "${UsePixeldrainBypass}" == "true" ]; then + if [[ "${UsePixeldrainBypass}" == "true" ]]; then download_url=$(grep -oP '(?<=location: ).*$' <<< "$file_header") download_url="${download_url//[$'\t\r\n']}" else @@ -274,8 +274,8 @@ pd_FetchFileInfo() { download_url=$(urlencode_literal_grouped_case_urlendingonly "$download_url") file_size_bytes=$(grep -oPi '(?<=content-length: ).*' <<< "$file_header") file_size_bytes=${file_size_bytes//[$'\t\r\n']} - if [ -z $file_size_bytes ] ; then - if [ "${finalAttempt}" == "true" ] ; then + if [[ -z $file_size_bytes ]] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "Filesize not found!" "" fi echo -e "${YELLOW}| Filesize not found… retry${NC}" @@ -294,7 +294,7 @@ pd_FetchFileInfo() { if CheckDownloadExists "$remote_url" "$MoveToFolder" "$filecnt" "$filename" "$file_path" "$completed_location" ; then return 1 fi - if [ "${UsePixeldrainBypass}" == "true" ]; then + if [[ "${UsePixeldrainBypass}" == "true" ]]; then echo -e "| ${BLUE}PixelDrain bypass:${NC} Knight beds queen ${RED}]${NC}°${PINK}----${RED}[${NC} ♞♝ ${NC}|▀▄▀▄▀▄▀▄▀▄▀▄▀▄▀▄|${NC}" fi echo "${remote_url//[^a-zA-Z0-9]/}" > $flockDownload @@ -311,7 +311,7 @@ pd_GetFile() { flockDownload="${WorkDir}/.flocks/${filename//[^a-zA-Z0-9\.\_\-]/}.flock" for ((j=1; j<=$MaxDownloadRetries; j++)); do pd_presize=0 - if [ -f "$file_path" ] ; then + if [[ -f "$file_path" ]] ; then pd_presize=$(stat --format="%s" "$file_path" | tr -d '[:space:]') fi echo -e "Download Url: $download_url" @@ -319,8 +319,8 @@ pd_GetFile() { tor_identity="${RANDOM}" CLEANSTRING=${remote_url//[^a-zA-Z0-9]/} trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; rm -f $flockDownload; echo ""; tput cnorm; exit" 0 1 2 3 6 15 - if [ "${UseTorCurlImpersonate}" == "true" ]; then - if [ "${RateMonitorEnabled}" == "true" ]; then + if [[ "${UseTorCurlImpersonate}" == "true" ]]; then + if [[ "${RateMonitorEnabled}" == "true" ]]; then tor_curl_request --insecure \ --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval \ --referer "$file_url" "$download_url" --continue-at - --output "$file_path" @@ -329,7 +329,7 @@ pd_GetFile() { --referer "$file_url" "$download_url" --continue-at - --output "$file_path" fi else - if [ "${RateMonitorEnabled}" == "true" ]; then + if [[ "${RateMonitorEnabled}" == "true" ]]; then tor_curl_request --insecure \ -H "User-Agent: $RandomUA" \ -H 'Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8' \ @@ -349,7 +349,7 @@ pd_GetFile() { fi fi received_file_size=0 - if [ -f "$file_path" ] ; then + if [[ -f "$file_path" ]] ; then received_file_size=$(stat --format="%s" "$file_path" | tr -d '[:space:]') fi if CheckNoHtml "$remote_url" "$filename" "$file_path" ; then @@ -358,9 +358,9 @@ pd_GetFile() { containsHtml=true fi downDelta=$(( received_file_size - pd_presize )) - if [[ "${received_file_size}" -ne "${file_size_bytes}" ]] || [ "$containsHtml" == "true" ]; then - if [ "${AutoRepairBadPartials}" == "true" ] && (( downDelta > 0 && downDelta < 1024 )) ; then - if [ -f "${file_path}" ] ; then + if [[ "${received_file_size}" -ne "${file_size_bytes}" ]] || [[ "$containsHtml" == "true" ]]; then + if [[ "${AutoRepairBadPartials}" == "true" ]] && (( downDelta > 0 && downDelta < 1024 )) ; then + if [[ -f "${file_path}" ]] ; then if ((pd_presize > 0)); then echo -e "${YELLOW}Bad node / HTML found:${NC} reverting to previous file..." truncateDownload "$remote_url" "$filename" "$pd_presize" "$received_file_size" @@ -372,15 +372,15 @@ pd_GetFile() { fi if ((j >= $MaxDownloadRetries)) ; then rm -f "$flockDownload"; - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" fi return 1 else continue fi - elif [ "${AutoRepairBadPartials}" == "true" ] && [ "$containsHtml" == "true" ] ; then - if [ -f "${file_path}" ] ; then + elif [[ "${AutoRepairBadPartials}" == "true" ]] && [[ "$containsHtml" == "true" ]] ; then + if [[ -f "${file_path}" ]] ; then if ((pd_presize > 0)); then echo -e "${YELLOW}Bad node / HTML found:${NC} reverting to previous file..." truncateDownload "$remote_url" "$filename" "$pd_presize" "$received_file_size" @@ -392,21 +392,21 @@ pd_GetFile() { fi if ((j >= $MaxDownloadRetries)) ; then rm -f "$flockDownload"; - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" fi return 1 else continue fi - elif (( downDelta > 0 && downDelta < 1024 )) || [ "$containsHtml" == "true" ] ; then - if [ -f "$file_path" ] ; then + elif (( downDelta > 0 && downDelta < 1024 )) || [[ "$containsHtml" == "true" ]] ; then + if [[ -f "$file_path" ]] ; then rm -rf "$file_path" fi echo -e "\n${YELLOW}Bad node / HTML found:${NC} tainted partial removed..." if ((j >= $MaxDownloadRetries)) ; then rm -f "$flockDownload"; - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" fi return 1 @@ -418,7 +418,7 @@ pd_GetFile() { echo -e "\n${RED}Download failed, file is incomplete.${NC}" if ((j >= $MaxDownloadRetries)) ; then rm -f "$flockDownload"; - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" fi return 1 diff --git a/hosts/quax.sh b/hosts/quax.sh index c2351ea..cfa183b 100644 --- a/hosts/quax.sh +++ b/hosts/quax.sh @@ -43,24 +43,24 @@ qx_DownloadFile() { tor_identity="${RANDOM}" finalAttempt="false" for ((z=0; z<=$MaxUrlRetries; z++)); do - if [ $z -eq $MaxUrlRetries ] ; then + if [[ $z -eq $MaxUrlRetries ]] ; then finalAttempt="true" fi CLEANSTRING=${remote_url//[^a-zA-Z0-9]/} trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; echo ""; tput cnorm; exit" 0 1 2 3 6 15 if qx_FetchFileInfo $finalAttempt && qx_GetFile "${filecnt}" $((z+1)) $finalAttempt ; then return 0 - elif [ $z -lt $MaxUrlRetries ]; then - if [ "${fileAlreadyDone}" == "true" ] ; then + elif [[ $z -lt $MaxUrlRetries ]]; then + if [[ "${fileAlreadyDone}" == "true" ]] ; then break fi if [[ "${warnAndRetryUnknownError}" == "true" ]] ; then - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${remote_url##*/}" "error" "Retry due to an unknown issue: attempt #$((z+1)) of ${MaxUrlRetries}" fi fi if [[ "${exitDownloadError}" == "true" || "${exitDownloadNotAvailable}" == "true" ]] ; then - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${remote_url##*/}" "error" "Exit due to unrecoverable issue" fi rm -f "${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}" @@ -83,10 +83,10 @@ qx_FetchFileInfo() { download_url=$(urlencode_literal_grouped_case_urlendingonly "$remote_url") tor_identity="${RANDOM}" file_header=$(tor_curl_request --insecure --head -L -s "$download_url") - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${remote_url##*/}" "qx_head$j" "download_url: ${download_url}"$'\n'"${file_header}" fi - if [ ! -z "$file_header" ] ; then + if [[ ! -z "$file_header" ]] ; then if grep -Eqi '404 Not Found|HTTP.* 404' <<< "${file_header}" ; then echo -e "${RED}| The file has been removed (404).${NC}" removedDownload "${remote_url}" @@ -96,12 +96,12 @@ qx_FetchFileInfo() { if ! grep -Eqi 'HTTP/.* 200' <<< $file_header ; then echo -e "${RED}| Failed to extract file info${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "" "" fi return 1 fi - if [ "$filename_override" == "" ] ; then + if [[ "$filename_override" == "" ]] ; then if grep -Eqi 'filename=' <<< "${file_header}" ; then filename=$(grep -oP 'filename=\K.*$' <<< "${file_header}") filename=${filename##filename} @@ -113,29 +113,29 @@ qx_FetchFileInfo() { fi if ! grep -Eqi 'Content-Length' <<< "${file_header}" ; then echo -e "${RED}| Failed to extract file size.${NC}" - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "" "" fi return 1 fi else echo -e "${RED}| No response. Try again later.${NC}" - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "" "" fi return 1 fi - touch ${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/} - if [ ! "$filename_override" == "" ] ; then + touch "${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}" + if [[ ! "$filename_override" == "" ]] ; then filename="$filename_override" - elif [ -z $filename ] ; then + elif [[ -z $filename ]] ; then filename=${download_url##*/} fi filename=$(sanitize_file_or_folder_name "${filename}") file_size_bytes=$(grep -oPi '(?<=content-length: ).*' <<< "$file_header") file_size_bytes=${file_size_bytes//[$'\t\r\n']} - if [ -z $file_size_bytes ] ; then - if [ "${finalAttempt}" == "true" ] ; then + if [[ -z $file_size_bytes ]] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "Filesize not found!" "" fi echo -e "${YELLOW}| Filesize not found… retry${NC}" @@ -167,18 +167,18 @@ qx_GetFile() { flockDownload="${WorkDir}/.flocks/${filename//[^a-zA-Z0-9\.\_\-]/}.flock" for ((j=1; j<=$MaxDownloadRetries; j++)); do pd_presize=0 - if [ -f "$file_path" ] ; then + if [[ -f "$file_path" ]] ; then pd_presize=$(stat --format="%s" "$file_path" | tr -d '[:space:]') fi CLEANSTRING=${remote_url//[^a-zA-Z0-9]/} trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; rm -f $flockDownload; echo ""; tput cnorm; exit" 0 1 2 3 6 15 - if [ "${RateMonitorEnabled}" == "true" ]; then + if [[ "${RateMonitorEnabled}" == "true" ]]; then tor_curl_request --insecure --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval "$download_url" --continue-at - --output "$file_path" else tor_curl_request --insecure "$download_url" --continue-at - --output "$file_path" fi received_file_size=0 - if [ -f "$file_path" ] ; then + if [[ -f "$file_path" ]] ; then received_file_size=$(stat --format="%s" "$file_path" | tr -d '[:space:]') fi if CheckNoHtml "$remote_url" "$filename" "$file_path" ; then @@ -187,9 +187,9 @@ qx_GetFile() { containsHtml=true fi downDelta=$(( received_file_size - pd_presize )) - if [[ "${received_file_size}" -ne "${file_size_bytes}" ]] || [ "$containsHtml" == "true" ]; then - if [ "${AutoRepairBadPartials}" == "true" ] && (( downDelta > 0 && downDelta < 1024 )) ; then - if [ -f "${file_path}" ] ; then + if [[ "${received_file_size}" -ne "${file_size_bytes}" ]] || [[ "$containsHtml" == "true" ]]; then + if [[ "${AutoRepairBadPartials}" == "true" ]] && (( downDelta > 0 && downDelta < 1024 )) ; then + if [[ -f "${file_path}" ]] ; then if ((pd_presize > 0)); then echo -e "${YELLOW}Bad node / HTML found:${NC} reverting to previous file..." truncateDownload "$remote_url" "$filename" "$pd_presize" "$received_file_size" @@ -201,15 +201,15 @@ qx_GetFile() { fi if ((j >= $MaxDownloadRetries)) ; then rm -f "$flockDownload"; - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" fi return 1 else continue fi - elif [ "${AutoRepairBadPartials}" == "true" ] && [ "$containsHtml" == "true" ] ; then - if [ -f "${file_path}" ] ; then + elif [[ "${AutoRepairBadPartials}" == "true" ]] && [[ "$containsHtml" == "true" ]] ; then + if [[ -f "${file_path}" ]] ; then if ((pd_presize > 0)); then echo -e "${YELLOW}Bad node / HTML found:${NC} reverting to previous file..." truncateDownload "$remote_url" "$filename" "$pd_presize" "$received_file_size" @@ -221,21 +221,21 @@ qx_GetFile() { fi if ((j >= $MaxDownloadRetries)) ; then rm -f "$flockDownload"; - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" fi return 1 else continue fi - elif (( downDelta > 0 && downDelta < 1024 )) || [ "$containsHtml" == "true" ] ; then - if [ -f "$file_path" ] ; then + elif (( downDelta > 0 && downDelta < 1024 )) || [[ "$containsHtml" == "true" ]] ; then + if [[ -f "$file_path" ]] ; then rm -rf "$file_path" fi echo -e "\n${YELLOW}Bad node / HTML found:${NC} tainted partial removed..." if ((j >= $MaxDownloadRetries)) ; then rm -f "$flockDownload"; - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" fi return 1 @@ -247,7 +247,7 @@ qx_GetFile() { echo -e "\n${RED}Download failed, file is incomplete.${NC}" if ((j >= $MaxDownloadRetries)) ; then rm -f "$flockDownload"; - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" fi return 1 diff --git a/hosts/ranoz.sh b/hosts/ranoz.sh index f7210cf..7aa4d91 100644 --- a/hosts/ranoz.sh +++ b/hosts/ranoz.sh @@ -43,24 +43,24 @@ rz_DownloadFile() { tor_identity="${RANDOM}" finalAttempt="false" for ((z=0; z<=$MaxUrlRetries; z++)); do - if [ $z -eq $MaxUrlRetries ] ; then + if [[ $z -eq $MaxUrlRetries ]] ; then finalAttempt="true" fi CLEANSTRING=${remote_url//[^a-zA-Z0-9]/} trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; echo ""; tput cnorm; exit" 0 1 2 3 6 15 if rz_FetchFileInfo $finalAttempt && rz_GetFile "${filecnt}" $((z+1)) $finalAttempt ; then return 0 - elif [ $z -lt $MaxUrlRetries ]; then - if [ "${fileAlreadyDone}" == "true" ] ; then + elif [[ $z -lt $MaxUrlRetries ]]; then + if [[ "${fileAlreadyDone}" == "true" ]] ; then break fi if [[ "${warnAndRetryUnknownError}" == "true" ]] ; then - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${remote_url##*/}" "error" "Retry due to an unknown issue: attempt #$((z+1)) of ${MaxUrlRetries}" fi fi if [[ "${exitDownloadError}" == "true" || "${exitDownloadNotAvailable}" == "true" ]] ; then - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${remote_url##*/}" "error" "Exit due to unrecoverable issue" fi rm -f "${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}" @@ -88,15 +88,15 @@ rz_FetchFileInfo() { CLEANSTRING=${remote_url//[^a-zA-Z0-9]/} trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; echo ""; tput cnorm; exit" 0 1 2 3 6 15 response=$(tor_curl_request --insecure -L -s "$remote_url") - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${remote_url##*/}" "rz_fetch$i" "${response}" fi if [[ -z $response ]] ; then - if [ $i == $maxfetchretries ] ; then + if [[ $i == $maxfetchretries ]] ; then printf "\\n" echo -e "${RED}| Failed to extract download url [1]${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "Failed to extract download url [1]" "" fi return 1 @@ -118,7 +118,7 @@ rz_FetchFileInfo() { removedDownload "${remote_url}" "The file appears to be gone [NEXT_NOT_FOUND]" return 1 fi - if [ "$filename_override" == "" ] ; then + if [[ "$filename_override" == "" ]] ; then filename=$(grep -oP '(?<=\[\\"\$\\",\\"title\\",\\"2\\",\{\\"children\\":\\").*?(?=\\"\}\],.*$)' <<< "$response") fi if grep -Eqi '\\"props\\":\{\}\},\\"href\\":\\"' <<< "$response"; then @@ -137,11 +137,11 @@ rz_FetchFileInfo() { fi break else - if [ $i == $maxfetchretries ] ; then + if [[ $i == $maxfetchretries ]] ; then printf "\\n" echo -e "${RED}| Failed to extract download url [2]${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "Failed to extract download url [2]" "" fi return 1 @@ -158,16 +158,16 @@ rz_FetchFileInfo() { CLEANSTRING=${remote_url//[^a-zA-Z0-9]/} trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; rm -f ${rz_cookie_jar}; echo ""; tput cnorm; exit" 0 1 2 3 6 15 file_header=$(tor_curl_request --insecure --head -L -i -s "$download_url") - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${remote_url##*/}" "rz_head$j" "download_url: ${download_url}"$'\n'"${file_header}" fi if [[ -z $file_header ]] ; then - if [ $j == $maxfetchretries ] ; then + if [[ $j == $maxfetchretries ]] ; then rm -f "${rz_cookie_jar}"; printf "\\n" echo -e "${RED}| Failed to extract file info${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "Failed to extract file info" "" fi return 1 @@ -184,12 +184,12 @@ rz_FetchFileInfo() { return 1 fi if ! grep -Eqi 'HTTP.* 200' <<< $file_header ; then - if [ $j == $maxfetchretries ] ; then + if [[ $j == $maxfetchretries ]] ; then rm -f "${rz_cookie_jar}"; printf "\\n" echo -e "${RED}| Bad server response${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "Bad server response" "" fi return 1 @@ -197,17 +197,17 @@ rz_FetchFileInfo() { continue fi fi - if [ -z "$filename" ]; then + if [[ -z "$filename" ]]; then filename=$(grep -oP '(?<=filename\*\=).*?(?=$)' <<< "$file_header") filename="${filename//[$'\t\r\n\0']}" filename="${filename//UTF-8\'\'}" fi - if [ "$filename_override" == "" ] && [ -z "$filename" ] ; then - if [ $j == $maxfetchretries ] ; then + if [[ "$filename_override" == "" ]] && [[ -z "$filename" ]] ; then + if [[ $j == $maxfetchretries ]] ; then printf "\\n" echo -e "${RED}| Failed to extract file name${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "Failed to extract file name" "" fi return 1 @@ -217,8 +217,8 @@ rz_FetchFileInfo() { fi break #Good to go here done - touch ${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/} - if [ ! "$filename_override" == "" ] ; then + touch "${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}" + if [[ ! "$filename_override" == "" ]] ; then filename="$filename_override" fi filename=$(sanitize_file_or_folder_name "${filename}") @@ -226,8 +226,8 @@ rz_FetchFileInfo() { echo -e "${YELLOW}| File name:${NC}\t\"${filename}\"" file_size_bytes=$(grep -oPi '(?<=content-length: ).*' <<< "$file_header") file_size_bytes=${file_size_bytes//[$'\t\r\n']} - if [ -z $file_size_bytes ] ; then - if [ "${finalAttempt}" == "true" ] ; then + if [[ -z $file_size_bytes ]] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "Filesize not found!" "" fi echo -e "${YELLOW}| Filesize not found… retry${NC}" @@ -256,19 +256,19 @@ rz_GetFile() { retryCnt=$2 finalAttempt=$3 flockDownload="${WorkDir}/.flocks/${filename//[^a-zA-Z0-9\.\_\-]/}.flock" - if [ -f "$file_path" ]; then + if [[ -f "$file_path" ]]; then rm -f "$file_path" fi for ((j=1; j<=$MaxDownloadRetries; j++)); do pd_presize=0 - if [ -f "$file_path" ] ; then + if [[ -f "$file_path" ]] ; then pd_presize=$(stat --format="%s" "$file_path" | tr -d '[:space:]') fi GetRandomUA CLEANSTRING=${remote_url//[^a-zA-Z0-9]/} trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; rm -f ${rz_cookie_jar}; rm -f $flockDownload; echo ""; tput cnorm; exit" 0 1 2 3 6 15 - if [ "${UseTorCurlImpersonate}" == "true" ]; then - if [ "${RateMonitorEnabled}" == "true" ]; then + if [[ "${UseTorCurlImpersonate}" == "true" ]]; then + if [[ "${RateMonitorEnabled}" == "true" ]]; then tor_curl_request --insecure -L -G --no-alpn \ --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval \ "$download_url" --continue-at - --output "$file_path" @@ -277,7 +277,7 @@ rz_GetFile() { "$download_url" --continue-at - --output "$file_path" fi else - if [ "${RateMonitorEnabled}" == "true" ]; then + if [[ "${RateMonitorEnabled}" == "true" ]]; then tor_curl_request --insecure -L -G --no-alpn \ --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval \ -H "User-Agent: $RandomUA" \ @@ -309,7 +309,7 @@ rz_GetFile() { fi fi received_file_size=0 - if [ -f "$file_path" ] ; then + if [[ -f "$file_path" ]] ; then received_file_size=$(stat --format="%s" "$file_path" | tr -d '[:space:]') fi if CheckNoHtml "$remote_url" "$filename" "$file_path" ; then @@ -318,9 +318,9 @@ rz_GetFile() { containsHtml=true fi downDelta=$(( received_file_size - pd_presize )) - if [[ "${received_file_size}" -ne "${file_size_bytes}" ]] || [ "$containsHtml" == "true" ]; then - if [ "${AutoRepairBadPartials}" == "true" ] && (( downDelta > 0 && downDelta < 1024 )) ; then - if [ -f "${file_path}" ] ; then + if [[ "${received_file_size}" -ne "${file_size_bytes}" ]] || [[ "$containsHtml" == "true" ]]; then + if [[ "${AutoRepairBadPartials}" == "true" ]] && (( downDelta > 0 && downDelta < 1024 )) ; then + if [[ -f "${file_path}" ]] ; then if ((pd_presize > 0)); then echo -e "${YELLOW}Bad node / HTML found:${NC} reverting to previous file..." truncateDownload "$remote_url" "$filename" "$pd_presize" "$received_file_size" @@ -332,15 +332,15 @@ rz_GetFile() { fi if ((j >= $MaxDownloadRetries)) ; then rm -f "$flockDownload"; - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" fi return 1 else continue fi - elif [ "${AutoRepairBadPartials}" == "true" ] && [ "$containsHtml" == "true" ] ; then - if [ -f "${file_path}" ] ; then + elif [[ "${AutoRepairBadPartials}" == "true" ]] && [[ "$containsHtml" == "true" ]] ; then + if [[ -f "${file_path}" ]] ; then if ((pd_presize > 0)); then echo -e "${YELLOW}Bad node / HTML found:${NC} reverting to previous file..." truncateDownload "$remote_url" "$filename" "$pd_presize" "$received_file_size" @@ -352,21 +352,21 @@ rz_GetFile() { fi if ((j >= $MaxDownloadRetries)) ; then rm -f "$flockDownload"; - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" fi return 1 else continue fi - elif (( downDelta > 0 && downDelta < 1024 )) || [ "$containsHtml" == "true" ] ; then - if [ -f "$file_path" ] ; then + elif (( downDelta > 0 && downDelta < 1024 )) || [[ "$containsHtml" == "true" ]] ; then + if [[ -f "$file_path" ]] ; then rm -rf "$file_path" fi echo -e "\n${YELLOW}Bad node / HTML found:${NC} tainted partial removed..." if ((j >= $MaxDownloadRetries)) ; then rm -f "$flockDownload"; - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" fi return 1 @@ -378,7 +378,7 @@ rz_GetFile() { echo -e "\n${RED}Download failed, file is incomplete.${NC}" if ((j >= $MaxDownloadRetries)) ; then rm -f "$flockDownload"; - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" fi return 1 diff --git a/hosts/sendnow.sh b/hosts/sendnow.sh index e332003..414f0fe 100644 --- a/hosts/sendnow.sh +++ b/hosts/sendnow.sh @@ -43,24 +43,24 @@ snow_DownloadFile() { tor_identity="${RANDOM}" finalAttempt="false" for ((z=0; z<=$MaxUrlRetries; z++)); do - if [ $z -eq $MaxUrlRetries ] ; then + if [[ $z -eq $MaxUrlRetries ]] ; then finalAttempt="true" fi CLEANSTRING=${remote_url//[^a-zA-Z0-9]/} trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; echo ""; tput cnorm; exit" 0 1 2 3 6 15 if snow_FetchFileInfo $finalAttempt && snow_GetFile "${filecnt}" $((z+1)) $finalAttempt ; then return 0 - elif [ $z -lt $MaxUrlRetries ]; then - if [ "${fileAlreadyDone}" == "true" ] ; then + elif [[ $z -lt $MaxUrlRetries ]]; then + if [[ "${fileAlreadyDone}" == "true" ]] ; then break fi if [[ "${warnAndRetryUnknownError}" == "true" ]] ; then - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${remote_url##*/}" "error" "Retry due to an unknown issue: attempt #$((z+1)) of ${MaxUrlRetries}" fi fi if [[ "${exitDownloadError}" == "true" || "${exitDownloadNotAvailable}" == "true" ]] ; then - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${remote_url##*/}" "error" "Exit due to unrecoverable issue" fi rm -f "${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}" @@ -88,16 +88,16 @@ snow_FetchFileInfo() { CLEANSTRING=${remote_url//[^a-zA-Z0-9]/} trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; rm -f "${snow_cookie_jar}"; echo ""; tput cnorm; exit" 0 1 2 3 6 15 response=$(tor_curl_request --insecure -L -s -b "${snow_cookie_jar}" -c "${snow_cookie_jar}" "$remote_url") - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${remote_url##*/}" "snow_dwnpage$i" "${response}" fi if [[ -z $response ]] ; then rm -f "${snow_cookie_jar}"; - if [ $i == $maxfetchretries ] ; then + if [[ $i == $maxfetchretries ]] ; then printf "\\n" echo -e "${RED}| Failed to extract download link.${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "" "" fi return 1 @@ -107,11 +107,11 @@ snow_FetchFileInfo() { fi if grep -Eqi "Your IP has been banned|you are banned" <<< "$response"; then rm -f "${snow_cookie_jar}"; - if [ $i == $maxfetchretries ] ; then + if [[ $i == $maxfetchretries ]] ; then printf "\\n" echo -e "${RED}| Failed to extract download link.${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "" "" fi return 1 @@ -138,11 +138,11 @@ snow_FetchFileInfo() { file_size_readable=$(grep -oPi '(?<= Download \[).*?(?=\].*$)' <<< "$response") if [[ -z "$post_op" ]] || [[ -z "$post_id" ]] ; then rm -f "${snow_cookie_jar}"; - if [ $i == $maxfetchretries ] ; then + if [[ $i == $maxfetchretries ]] ; then printf "\\n" echo -e "${RED}| Failed to extract download link [1]${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "Failed to extract download link [1]" "" fi return 1 @@ -160,13 +160,13 @@ snow_FetchFileInfo() { response=$(tor_curl_request --insecure -L -svo. -X POST \ -b "${snow_cookie_jar}" -c "${snow_cookie_jar}" \ --data-raw "$form_data" "$remote_url" 2>&1) - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${remote_url##*/}" "snow_post" "post_action: ${post_action}"$'\n'"form_data: ${form_data}"$'\n'"${response}" fi if [[ -z $response ]] ; then echo -e "${RED}| Failed to extract download link [2]${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then rm -f "${snow_cookie_jar}"; failedRetryDownload "${remote_url}" "Failed to extract download link [2]" "" fi @@ -188,7 +188,7 @@ snow_FetchFileInfo() { else echo -e "${RED}| Failed to extract download link [3]${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then rm -f "${snow_cookie_jar}"; failedRetryDownload "${remote_url}" "Failed to extract download link [3]" "" fi @@ -216,16 +216,16 @@ snow_FetchFileInfo() { -H "Sec-Fetch-User: ?1" \ -H "Priority: u=0, i" \ "$download_url") - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${remote_url##*/}" "snow_head$j" "download_url: ${download_url}"$'\n'"${file_header}" fi if [[ -z $file_header ]] ; then - if [ $j == $maxfetchretries ] ; then + if [[ $j == $maxfetchretries ]] ; then rm -f "${snow_cookie_jar}"; printf "\\n" echo -e "${RED}| Failed to extract file info${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "Failed to extract file info" "" fi return 1 @@ -242,12 +242,12 @@ snow_FetchFileInfo() { return 1 fi if ! grep -Eqi 'HTTP.* 200' <<< $file_header ; then - if [ $j == $maxfetchretries ] ; then + if [[ $j == $maxfetchretries ]] ; then rm -f "${snow_cookie_jar}"; printf "\\n" echo -e "${RED}| Bad server response${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "Bad server response" "" fi return 1 @@ -255,17 +255,17 @@ snow_FetchFileInfo() { continue fi fi - if [ -z "$filename" ]; then + if [[ -z "$filename" ]]; then filename=$(grep -oP '(?<=filename\*\=).*?(?=$)' <<< "$file_header") filename="${filename//[$'\t\r\n\0']}" filename="${filename//UTF-8\'\'}" fi - if [ "$filename_override" == "" ] && [ -z "$filename" ] ; then - if [ $j == $maxfetchretries ] ; then + if [[ "$filename_override" == "" ]] && [[ -z "$filename" ]] ; then + if [[ $j == $maxfetchretries ]] ; then printf "\\n" echo -e "${RED}| Failed to extract file name${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "Failed to extract file name" "" fi return 1 @@ -275,8 +275,8 @@ snow_FetchFileInfo() { fi break #Good to go here done - touch ${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/} - if [ ! "$filename_override" == "" ] ; then + touch "${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}" + if [[ ! "$filename_override" == "" ]] ; then filename="$filename_override" fi filename=$(sanitize_file_or_folder_name "${filename}") @@ -284,8 +284,8 @@ snow_FetchFileInfo() { echo -e "${YELLOW}| File name:${NC}\t\"${filename}\"" file_size_bytes=$(grep -oPi '(?<=content-length: ).*' <<< "$file_header") file_size_bytes=${file_size_bytes//[$'\t\r\n']} - if [ -z $file_size_bytes ] ; then - if [ "${finalAttempt}" == "true" ] ; then + if [[ -z $file_size_bytes ]] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "Filesize not found!" "" fi echo -e "${YELLOW}| Filesize not found…${NC}" @@ -316,14 +316,14 @@ snow_GetFile() { flockDownload="${WorkDir}/.flocks/${filename//[^a-zA-Z0-9\.\_\-]/}.flock" for ((j=1; j<=$MaxDownloadRetries; j++)); do pd_presize=0 - if [ -f "$file_path" ] ; then + if [[ -f "$file_path" ]] ; then pd_presize=$(stat --format="%s" "$file_path" | tr -d '[:space:]') fi GetRandomUA CLEANSTRING=${remote_url//[^a-zA-Z0-9]/} trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; rm -f ${snow_cookie_jar}; rm -f $flockDownload; echo ""; tput cnorm; exit" 0 1 2 3 6 15 - if [ "${UseTorCurlImpersonate}" == "true" ]; then - if [ "${RateMonitorEnabled}" == "true" ]; then + if [[ "${UseTorCurlImpersonate}" == "true" ]]; then + if [[ "${RateMonitorEnabled}" == "true" ]]; then tor_curl_request_extended --insecure -L --no-alpn \ --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval \ -H "Host: $fshost" \ @@ -360,7 +360,7 @@ snow_GetFile() { --continue-at - --output "$file_path" fi else - if [ "${RateMonitorEnabled}" == "true" ]; then + if [[ "${RateMonitorEnabled}" == "true" ]]; then tor_curl_request --insecure -L --no-alpn \ --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval \ -H "User-Agent: $RandomUA" \ @@ -400,7 +400,7 @@ snow_GetFile() { fi fi received_file_size=0 - if [ -f "$file_path" ] ; then + if [[ -f "$file_path" ]] ; then received_file_size=$(stat --format="%s" "$file_path" | tr -d '[:space:]') fi if CheckNoHtml "$remote_url" "$filename" "$file_path" ; then @@ -409,9 +409,9 @@ snow_GetFile() { containsHtml=true fi downDelta=$(( received_file_size - pd_presize )) - if [[ "${received_file_size}" -ne "${file_size_bytes}" ]] || [ "$containsHtml" == "true" ]; then - if [ "${AutoRepairBadPartials}" == "true" ] && (( downDelta > 0 && downDelta < 1024 )) ; then - if [ -f "${file_path}" ] ; then + if [[ "${received_file_size}" -ne "${file_size_bytes}" ]] || [[ "$containsHtml" == "true" ]]; then + if [[ "${AutoRepairBadPartials}" == "true" ]] && (( downDelta > 0 && downDelta < 1024 )) ; then + if [[ -f "${file_path}" ]] ; then if ((pd_presize > 0)); then echo -e "${YELLOW}Bad node / HTML found:${NC} reverting to previous file..." truncateDownload "$remote_url" "$filename" "$pd_presize" "$received_file_size" @@ -423,15 +423,15 @@ snow_GetFile() { fi if ((j >= $MaxDownloadRetries)) ; then rm -f "$flockDownload"; - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" fi return 1 else continue fi - elif [ "${AutoRepairBadPartials}" == "true" ] && [ "$containsHtml" == "true" ] ; then - if [ -f "${file_path}" ] ; then + elif [[ "${AutoRepairBadPartials}" == "true" ]] && [[ "$containsHtml" == "true" ]] ; then + if [[ -f "${file_path}" ]] ; then if ((pd_presize > 0)); then echo -e "${YELLOW}Bad node / HTML found:${NC} reverting to previous file..." truncateDownload "$remote_url" "$filename" "$pd_presize" "$received_file_size" @@ -443,21 +443,21 @@ snow_GetFile() { fi if ((j >= $MaxDownloadRetries)) ; then rm -f "$flockDownload"; - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" fi return 1 else continue fi - elif (( downDelta > 0 && downDelta < 1024 )) || [ "$containsHtml" == "true" ] ; then - if [ -f "$file_path" ] ; then + elif (( downDelta > 0 && downDelta < 1024 )) || [[ "$containsHtml" == "true" ]] ; then + if [[ -f "$file_path" ]] ; then rm -rf "$file_path" fi echo -e "\n${YELLOW}Bad node / HTML found:${NC} tainted partial removed..." if ((j >= $MaxDownloadRetries)) ; then rm -f "$flockDownload"; - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" fi return 1 @@ -469,7 +469,7 @@ snow_GetFile() { echo -e "\n${RED}Download failed, file is incomplete.${NC}" if ((j >= $MaxDownloadRetries)) ; then rm -f "$flockDownload"; - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" fi return 1 diff --git a/hosts/syspro.sh b/hosts/syspro.sh index a3534bb..ea852fe 100644 --- a/hosts/syspro.sh +++ b/hosts/syspro.sh @@ -43,24 +43,24 @@ sysp_DownloadFile() { tor_identity="${RANDOM}" finalAttempt="false" for ((z=0; z<=$MaxUrlRetries; z++)); do - if [ $z -eq $MaxUrlRetries ] ; then + if [[ $z -eq $MaxUrlRetries ]] ; then finalAttempt="true" fi CLEANSTRING=${remote_url//[^a-zA-Z0-9]/} trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; echo ""; tput cnorm; exit" 0 1 2 3 6 15 if sysp_FetchFileInfo $finalAttempt && sysp_GetFile "${filecnt}" $((z+1)) $finalAttempt ; then return 0 - elif [ $z -lt $MaxUrlRetries ]; then - if [ "${fileAlreadyDone}" == "true" ] ; then + elif [[ $z -lt $MaxUrlRetries ]]; then + if [[ "${fileAlreadyDone}" == "true" ]] ; then break fi if [[ "${warnAndRetryUnknownError}" == "true" ]] ; then - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${remote_url##*/}" "error" "Retry due to an unknown issue: attempt #$((z+1)) of ${MaxUrlRetries}" fi fi if [[ "${exitDownloadError}" == "true" || "${exitDownloadNotAvailable}" == "true" ]] ; then - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${remote_url##*/}" "error" "Exit due to unrecoverable issue" fi rm -f "${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}" @@ -86,15 +86,15 @@ sysp_FetchFileInfo() { CLEANSTRING=${remote_url//[^a-zA-Z0-9]/} trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; echo ""; tput cnorm; exit" 0 1 2 3 6 15 response=$(tor_curl_request --insecure -L -s "$remote_url") - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${remote_url##*/}" "sysp_fetch$i" "${response}" fi if [[ -z $response ]] ; then - if [ $i == $maxfetchretries ] ; then + if [[ $i == $maxfetchretries ]] ; then printf "\\n" echo -e "${RED}| Failed to extract download link [1]${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "Failed to extract download link [1]" "" fi return 1 @@ -118,15 +118,15 @@ sysp_FetchFileInfo() { post_rand=$(grep -oP '(?<=input type="hidden" name="rand" value=").*(?=">.*$)' <<< "$response") post_fname=$(urlencode_literal_grouped_case "${post_fname}") post_action="${post_action//[$'\t\r\n']}" - if [ "$filename_override" == "" ]; then + if [[ "$filename_override" == "" ]]; then filename=$(urlencode_literal_grouped_case "${post_fname}") fi else - if [ $i == $maxfetchretries ] ; then + if [[ $i == $maxfetchretries ]] ; then printf "\\n" echo -e "${RED}| Failed to extract download link [2]${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "Failed to extract download link [2]" "" fi return 1 @@ -136,11 +136,11 @@ sysp_FetchFileInfo() { fi if [[ -z "$post_action" ]] || [[ -z "$post_act" ]] || [[ -z "$post_id" ]] || \ [[ -z "$post_fname" ]] || [[ -z "$post_rand" ]] ; then - if [ $i == $maxfetchretries ] ; then + if [[ $i == $maxfetchretries ]] ; then printf "\\n" echo -e "${RED}| Failed to extract download link [3]${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "Failed to extract download link [3]" "" fi return 1 @@ -152,8 +152,8 @@ sysp_FetchFileInfo() { fi done form_data="act=${post_act}&id=${post_id}&fname=${post_fname}&rand=${post_rand}&btn=Download+File" - touch ${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/} - if [ ! "$filename_override" == "" ] ; then + touch "${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}" + if [[ ! "$filename_override" == "" ]] ; then filename="$filename_override" fi filename=$(sanitize_file_or_folder_name "${filename}") @@ -178,13 +178,13 @@ sysp_GetFile() { finalAttempt=$3 flockDownload="${WorkDir}/.flocks/${filename//[^a-zA-Z0-9\.\_\-]/}.flock" GetRandomUA - if [ -f "$file_path" ]; then + if [[ -f "$file_path" ]]; then rm -f "file_path" fi CLEANSTRING=${remote_url//[^a-zA-Z0-9]/} trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; rm -f $flockDownload; echo ""; tput cnorm; exit" 0 1 2 3 6 15 - if [ "${UseTorCurlImpersonate}" == "true" ]; then - if [ "${RateMonitorEnabled}" == "true" ]; then + if [[ "${UseTorCurlImpersonate}" == "true" ]]; then + if [[ "${RateMonitorEnabled}" == "true" ]]; then tor_curl_request --insecure -L \ --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval \ --data "$form_data" "$post_action" \ @@ -195,7 +195,7 @@ sysp_GetFile() { --output "$file_path" --output "$file_path" fi else - if [ "${RateMonitorEnabled}" == "true" ]; then + if [[ "${RateMonitorEnabled}" == "true" ]]; then tor_curl_request --insecure -L \ --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval \ -H "User-Agent: $RandomUA" \ @@ -213,13 +213,13 @@ sysp_GetFile() { else containsHtml=true fi - if [ "$containsHtml" == "true" ]; then - if [ -f "$file_path" ] ; then + if [[ "$containsHtml" == "true" ]]; then + if [[ -f "$file_path" ]] ; then rm -rf "$file_path" fi echo -e "\n${YELLOW}Bad node / HTML found:${NC} tainted partial removed..." rm -f $flockDownload; - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" fi return 1 diff --git a/hosts/tempfileme.sh b/hosts/tempfileme.sh index 3dbc912..7606dd0 100644 --- a/hosts/tempfileme.sh +++ b/hosts/tempfileme.sh @@ -43,24 +43,24 @@ tmpme_DownloadFile() { tor_identity="${RANDOM}" finalAttempt="false" for ((z=0; z<=$MaxUrlRetries; z++)); do - if [ $z -eq $MaxUrlRetries ] ; then + if [[ $z -eq $MaxUrlRetries ]] ; then finalAttempt="true" fi CLEANSTRING=${remote_url//[^a-zA-Z0-9]/} trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; echo ""; tput cnorm; exit" 0 1 2 3 6 15 if tmpme_FetchFileInfo $finalAttempt && tmpme_GetFile "${filecnt}" $((z+1)) $finalAttempt ; then return 0 - elif [ $z -lt $MaxUrlRetries ]; then - if [ "${fileAlreadyDone}" == "true" ] ; then + elif [[ $z -lt $MaxUrlRetries ]]; then + if [[ "${fileAlreadyDone}" == "true" ]] ; then break fi if [[ "${warnAndRetryUnknownError}" == "true" ]] ; then - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${remote_url##*/}" "error" "Retry due to an unknown issue: attempt #$((z+1)) of ${MaxUrlRetries}" fi fi if [[ "${exitDownloadError}" == "true" || "${exitDownloadNotAvailable}" == "true" ]] ; then - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${remote_url##*/}" "error" "Exit due to unrecoverable issue" fi rm -f "${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}" @@ -87,15 +87,15 @@ tmpme_FetchFileInfo() { CLEANSTRING=${remote_url//[^a-zA-Z0-9]/} trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; echo ""; tput cnorm; exit" 0 1 2 3 6 15 response=$(tor_curl_request --insecure -L -s "$remote_url") - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${remote_url##*/}" "tmpme_fetch$j" "${response}" fi if [[ -z $response ]] ; then - if [ $j == $maxfetchretries ] ; then + if [[ $j == $maxfetchretries ]] ; then printf "\\n" echo -e "${RED}| Failed to extract download link${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "Failed to extract download link" "" fi return 1 @@ -104,11 +104,11 @@ tmpme_FetchFileInfo() { fi fi if grep -Eqi "Sorry, you are banned" <<< "$response"; then - if [ $j == $maxfetchretries ] ; then + if [[ $j == $maxfetchretries ]] ; then printf "\\n" echo -e "${RED}| Failed to extract download link${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "Failed to extract download link" "" fi return 1 @@ -131,7 +131,7 @@ tmpme_FetchFileInfo() { return 1 fi if grep -Eqi '503 Service Unavailable|No server is available to handle this request' <<< "$response"; then - if [ $j == $maxfetchretries ] ; then + if [[ $j == $maxfetchretries ]] ; then printf "\\n" echo -e "${RED}| 503 Service Unavailable (try again later)${NC}" exitDownloadError=true @@ -145,12 +145,12 @@ tmpme_FetchFileInfo() { download_url=$(grep -oPi '(?<=id="download-button" href=").*?(?=" class="button".*$)' <<< "$response") download_url="${download_url/http:/https:}" fi - if [ -z "$download_url" ]; then - if [ $j == $maxfetchretries ] ; then + if [[ -z "$download_url" ]]; then + if [[ $j == $maxfetchretries ]] ; then printf "\\n" echo -e "${RED}| Failed to extract token url.${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "Failed to extract token url." "" fi return 1 @@ -168,12 +168,12 @@ tmpme_FetchFileInfo() { CLEANSTRING=${remote_url//[^a-zA-Z0-9]/} trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; echo ""; tput cnorm; exit" 0 1 2 3 6 15 file_header=$(tor_curl_request --insecure -L --head -s --referer "${remote_url}" "$download_url") - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${remote_url##*/}" "tmpme_head$j" "download_url: ${download_url}"$'\n'"${file_header}" fi - if [ ! -z "$file_header" ] ; then + if [[ ! -z "$file_header" ]] ; then if grep -Eqi 'HTTP.* 404|Not Found' <<< "${file_header}" ; then - if [ $j == $maxfetchretries ] ; then + if [[ $j == $maxfetchretries ]] ; then printf "\\n" echo -e "${RED}| The file has been removed (404).${NC}" removedDownload "${remote_url}" @@ -187,18 +187,18 @@ tmpme_FetchFileInfo() { download_url=$(grep -oPi '(?<=location: ).*(?=$)' <<< "$file_header") fi if grep -Eqi 'HTTP.* 200|content-length' <<< "${file_header}" ; then - if [ "$filename_override" == "" ] ; then + if [[ "$filename_override" == "" ]] ; then filename=$(grep -oPi '(?<=filename=).*(?=$)' <<< "$file_header") filename=${filename//[$'\t\r\n']} - if [ -z $filename ]; then + if [[ -z $filename ]]; then filename="${download_url##*/}" fi if [[ -z "$filename" ]]; then - if [ $j == $maxfetchretries ] ; then + if [[ $j == $maxfetchretries ]] ; then printf "\\n" echo -e "${RED}| Failed to extract file name${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "Failed to extract file name" "" fi return 1 @@ -210,10 +210,10 @@ tmpme_FetchFileInfo() { file_size_bytes=$(grep -oPi '(?<=content-length: ).*?(?=$)' <<< "$file_header") file_size_bytes=${file_size_bytes//[$'\t\r\n']} else - if [ $j == $maxfetchretries ] ; then + if [[ $j == $maxfetchretries ]] ; then printf "\\n" echo -e "${RED}| Failed to extract file size.${NC}" - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "" "" fi return 1 @@ -234,11 +234,11 @@ tmpme_FetchFileInfo() { fi fi if [[ -z "$file_size_bytes" ]]; then - if [ $j == $maxfetchretries ] ; then + if [[ $j == $maxfetchretries ]] ; then printf "\\n" echo -e "${RED}| Failed to extract file size.${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "Failed to extract file size." "" fi return 1 @@ -250,15 +250,15 @@ tmpme_FetchFileInfo() { printf "\\n" break done - touch ${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/} - if [ ! "$filename_override" == "" ] ; then + touch "${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}" + if [[ ! "$filename_override" == "" ]] ; then filename="$filename_override" fi filename=$(sanitize_file_or_folder_name "${filename}") printf "\\n" echo -e "${YELLOW}| File name:${NC}\t\"${filename}\"" - if [ -z $file_size_bytes ] ; then - if [ "${finalAttempt}" == "true" ] ; then + if [[ -z $file_size_bytes ]] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "Filesize not found!" "" fi echo -e "${YELLOW}| Filesize not found… retry${NC}" @@ -289,14 +289,14 @@ tmpme_GetFile() { flockDownload="${WorkDir}/.flocks/${filename//[^a-zA-Z0-9\.\_\-]/}.flock" for ((j=1; j<=$MaxDownloadRetries; j++)); do pd_presize=0 - if [ -f "$file_path" ] ; then + if [[ -f "$file_path" ]] ; then pd_presize=$(stat --format="%s" "$file_path" | tr -d '[:space:]') fi GetRandomUA CLEANSTRING=${remote_url//[^a-zA-Z0-9]/} trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; rm -f $flockDownload; echo ""; tput cnorm; exit" 0 1 2 3 6 15 - if [ "${UseTorCurlImpersonate}" == "true" ]; then - if [ "${RateMonitorEnabled}" == "true" ]; then + if [[ "${UseTorCurlImpersonate}" == "true" ]]; then + if [[ "${RateMonitorEnabled}" == "true" ]]; then tor_curl_request --insecure -L \ --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval \ --referer "${remote_url}" \ @@ -307,7 +307,7 @@ tmpme_GetFile() { "$download_url" --continue-at - --output "$file_path" fi else - if [ "${RateMonitorEnabled}" == "true" ]; then + if [[ "${RateMonitorEnabled}" == "true" ]]; then tor_curl_request --insecure -L \ --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval \ -H "User-Agent: $RandomUA" \ @@ -339,7 +339,7 @@ tmpme_GetFile() { fi fi received_file_size=0 - if [ -f "$file_path" ] ; then + if [[ -f "$file_path" ]] ; then received_file_size=$(stat --format="%s" "$file_path" | tr -d '[:space:]') fi if CheckNoHtml "$remote_url" "$filename" "$file_path" ; then @@ -348,9 +348,9 @@ tmpme_GetFile() { containsHtml=true fi downDelta=$(( received_file_size - pd_presize )) - if [[ "${received_file_size}" -ne "${file_size_bytes}" ]] || [ "$containsHtml" == "true" ]; then - if [ "${AutoRepairBadPartials}" == "true" ] && (( downDelta > 0 && downDelta < 1024 )) ; then - if [ -f "${file_path}" ] ; then + if [[ "${received_file_size}" -ne "${file_size_bytes}" ]] || [[ "$containsHtml" == "true" ]]; then + if [[ "${AutoRepairBadPartials}" == "true" ]] && (( downDelta > 0 && downDelta < 1024 )) ; then + if [[ -f "${file_path}" ]] ; then if ((pd_presize > 0)); then echo -e "${YELLOW}Bad node / HTML found:${NC} reverting to previous file..." truncateDownload "$remote_url" "$filename" "$pd_presize" "$received_file_size" @@ -362,15 +362,15 @@ tmpme_GetFile() { fi if ((j >= $MaxDownloadRetries)) ; then rm -f "$flockDownload"; - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" fi return 1 else continue fi - elif [ "${AutoRepairBadPartials}" == "true" ] && [ "$containsHtml" == "true" ] ; then - if [ -f "${file_path}" ] ; then + elif [[ "${AutoRepairBadPartials}" == "true" ]] && [[ "$containsHtml" == "true" ]] ; then + if [[ -f "${file_path}" ]] ; then if ((pd_presize > 0)); then echo -e "${YELLOW}Bad node / HTML found:${NC} reverting to previous file..." truncateDownload "$remote_url" "$filename" "$pd_presize" "$received_file_size" @@ -382,21 +382,21 @@ tmpme_GetFile() { fi if ((j >= $MaxDownloadRetries)) ; then rm -f "$flockDownload"; - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" fi return 1 else continue fi - elif (( downDelta > 0 && downDelta < 1024 )) || [ "$containsHtml" == "true" ] ; then - if [ -f "$file_path" ] ; then + elif (( downDelta > 0 && downDelta < 1024 )) || [[ "$containsHtml" == "true" ]] ; then + if [[ -f "$file_path" ]] ; then rm -rf "$file_path" fi echo -e "\n${YELLOW}Bad node / HTML found:${NC} tainted partial removed..." if ((j >= $MaxDownloadRetries)) ; then rm -f "$flockDownload"; - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" fi return 1 @@ -408,7 +408,7 @@ tmpme_GetFile() { echo -e "\n${RED}Download failed, file is incomplete.${NC}" if ((j >= $MaxDownloadRetries)) ; then rm -f "$flockDownload"; - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" fi return 1 diff --git a/hosts/tempsh.sh b/hosts/tempsh.sh index f7b949f..51bce81 100644 --- a/hosts/tempsh.sh +++ b/hosts/tempsh.sh @@ -43,24 +43,24 @@ tmpsh_DownloadFile() { tor_identity="${RANDOM}" finalAttempt="false" for ((z=0; z<=$MaxUrlRetries; z++)); do - if [ $z -eq $MaxUrlRetries ] ; then + if [[ $z -eq $MaxUrlRetries ]] ; then finalAttempt="true" fi CLEANSTRING=${remote_url//[^a-zA-Z0-9]/} trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; echo ""; tput cnorm; exit" 0 1 2 3 6 15 if tmpsh_FetchFileInfo $finalAttempt && tmpsh_GetFile "${filecnt}" $((z+1)) $finalAttempt ; then return 0 - elif [ $z -lt $MaxUrlRetries ]; then - if [ "${fileAlreadyDone}" == "true" ] ; then + elif [[ $z -lt $MaxUrlRetries ]]; then + if [[ "${fileAlreadyDone}" == "true" ]] ; then break fi if [[ "${warnAndRetryUnknownError}" == "true" ]] ; then - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${remote_url##*/}" "error" "Retry due to an unknown issue: attempt #$((z+1)) of ${MaxUrlRetries}" fi fi if [[ "${exitDownloadError}" == "true" || "${exitDownloadNotAvailable}" == "true" ]] ; then - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${remote_url##*/}" "error" "Exit due to unrecoverable issue" fi rm -f "${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}" @@ -89,15 +89,15 @@ tmpsh_FetchFileInfo() { -H "Connection: keep-alive" \ -w 'EffectiveUrl=%{url_effective}' \ "$download_url") - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${remote_url##*/}" "tmpsh_posthead" "download_url: ${download_url}"$'\n'"${file_header}" fi if [[ -z $file_header ]] ; then - if [ $j == $maxfetchretries ] ; then + if [[ $j == $maxfetchretries ]] ; then printf "\\n" echo -e "${RED}| Failed to extract file info [1]${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "Failed to extract file info [1]" "" fi return 1 @@ -123,17 +123,17 @@ tmpsh_FetchFileInfo() { if grep -Eqi 'HTTP.* 200' <<< "$file_header"; then printf "\\n" echo -e "${GREEN}| File info found.${NC}" - if [ "$filename_override" == "" ] ; then + if [[ "$filename_override" == "" ]] ; then filename=$(grep -oPi '(?<=filename=").*(?=")' <<< "$file_header") - if [ -z $filename ]; then + if [[ -z $filename ]]; then filename="${download_url##*/}" fi if [[ -z "$filename" ]]; then - if [ $j == $maxfetchretries ] ; then + if [[ $j == $maxfetchretries ]] ; then printf "\\n" echo -e "${RED}| Failed to extract file name${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "Failed to extract file name" "" fi return 1 @@ -146,11 +146,11 @@ tmpsh_FetchFileInfo() { file_size_bytes=${file_size_bytes//[$'\t\r\n']} else err=$(grep -oPi '(?<=HTTP/.* ).*?(?=$)' <<< "$file_header") - if [ $j == $maxfetchretries ] ; then + if [[ $j == $maxfetchretries ]] ; then printf "\\n" echo -e "${RED}| Failed to get file info. (Code: $err)${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "Failed to get file info. (Code: $err)" "" fi return 1 @@ -160,11 +160,11 @@ tmpsh_FetchFileInfo() { fi fi if [[ -z "$file_size_bytes" ]]; then - if [ $j == $maxfetchretries ] ; then + if [[ $j == $maxfetchretries ]] ; then printf "\\n" echo -e "${RED}| Failed to extract file size.${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "Failed to extract file size." "" fi return 1 @@ -176,15 +176,15 @@ tmpsh_FetchFileInfo() { printf "\\n" break done - touch ${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/} - if [ ! "$filename_override" == "" ] ; then + touch "${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}" + if [[ ! "$filename_override" == "" ]] ; then filename="$filename_override" fi filename=$(sanitize_file_or_folder_name "${filename}") printf "\\n" echo -e "${YELLOW}| File name:${NC}\t\"${filename}\"" - if [ -z $file_size_bytes ] ; then - if [ "${finalAttempt}" == "true" ] ; then + if [[ -z $file_size_bytes ]] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "Filesize not found!" "" fi echo -e "${YELLOW}| Filesize not found… retry${NC}" @@ -215,15 +215,15 @@ tmpsh_GetFile() { flockDownload="${WorkDir}/.flocks/${filename//[^a-zA-Z0-9\.\_\-]/}.flock" for ((j=1; j<=$MaxDownloadRetries; j++)); do pd_presize=0 - if [ -f "$file_path" ] ; then + if [[ -f "$file_path" ]] ; then pd_presize=$(stat --format="%s" "$file_path" | tr -d '[:space:]') fi GetRandomUA tor_identity="${RANDOM}" CLEANSTRING=${remote_url//[^a-zA-Z0-9]/} trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; rm -f $flockDownload; echo ""; tput cnorm; exit" 0 1 2 3 6 15 - if [ "${UseTorCurlImpersonate}" == "true" ]; then - if [ "${RateMonitorEnabled}" == "true" ]; then + if [[ "${UseTorCurlImpersonate}" == "true" ]]; then + if [[ "${RateMonitorEnabled}" == "true" ]]; then tor_curl_request --insecure -X POST \ --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval \ "$download_url" --continue-at - --output "$file_path" @@ -232,7 +232,7 @@ tmpsh_GetFile() { "$download_url" --continue-at - --output "$file_path" fi else - if [ "${RateMonitorEnabled}" == "true" ]; then + if [[ "${RateMonitorEnabled}" == "true" ]]; then tor_curl_request --insecure -X POST \ --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval \ -H "User-Agent: $RandomUA" \ @@ -262,7 +262,7 @@ tmpsh_GetFile() { fi fi received_file_size=0 - if [ -f "$file_path" ] ; then + if [[ -f "$file_path" ]] ; then received_file_size=$(stat --format="%s" "$file_path" | tr -d '[:space:]') fi if CheckNoHtml "$remote_url" "$filename" "$file_path" ; then @@ -271,9 +271,9 @@ tmpsh_GetFile() { containsHtml=true fi downDelta=$(( received_file_size - pd_presize )) - if [[ "${received_file_size}" -ne "${file_size_bytes}" ]] || [ "$containsHtml" == "true" ]; then - if [ "${AutoRepairBadPartials}" == "true" ] && (( downDelta > 0 && downDelta < 1024 )) ; then - if [ -f "${file_path}" ] ; then + if [[ "${received_file_size}" -ne "${file_size_bytes}" ]] || [[ "$containsHtml" == "true" ]]; then + if [[ "${AutoRepairBadPartials}" == "true" ]] && (( downDelta > 0 && downDelta < 1024 )) ; then + if [[ -f "${file_path}" ]] ; then if ((pd_presize > 0)); then echo -e "${YELLOW}Bad node / HTML found:${NC} reverting to previous file..." truncateDownload "$remote_url" "$filename" "$pd_presize" "$received_file_size" @@ -285,15 +285,15 @@ tmpsh_GetFile() { fi if ((j >= $MaxDownloadRetries)) ; then rm -f "$flockDownload"; - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" fi return 1 else continue fi - elif [ "${AutoRepairBadPartials}" == "true" ] && [ "$containsHtml" == "true" ] ; then - if [ -f "${file_path}" ] ; then + elif [[ "${AutoRepairBadPartials}" == "true" ]] && [[ "$containsHtml" == "true" ]] ; then + if [[ -f "${file_path}" ]] ; then if ((pd_presize > 0)); then echo -e "${YELLOW}Bad node / HTML found:${NC} reverting to previous file..." truncateDownload "$remote_url" "$filename" "$pd_presize" "$received_file_size" @@ -305,21 +305,21 @@ tmpsh_GetFile() { fi if ((j >= $MaxDownloadRetries)) ; then rm -f "$flockDownload"; - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" fi return 1 else continue fi - elif (( downDelta > 0 && downDelta < 1024 )) || [ "$containsHtml" == "true" ] ; then - if [ -f "$file_path" ] ; then + elif (( downDelta > 0 && downDelta < 1024 )) || [[ "$containsHtml" == "true" ]] ; then + if [[ -f "$file_path" ]] ; then rm -rf "$file_path" fi echo -e "\n${YELLOW}Bad node / HTML found:${NC} tainted partial removed..." if ((j >= $MaxDownloadRetries)) ; then rm -f "$flockDownload"; - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" fi return 1 @@ -331,7 +331,7 @@ tmpsh_GetFile() { echo -e "\n${RED}Download failed, file is incomplete.${NC}" if ((j >= $MaxDownloadRetries)) ; then rm -f "$flockDownload"; - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" fi return 1 diff --git a/hosts/torup.sh b/hosts/torup.sh index 1ced86b..133c11a 100644 --- a/hosts/torup.sh +++ b/hosts/torup.sh @@ -43,24 +43,24 @@ torp_DownloadFile() { tor_identity="${RANDOM}" finalAttempt="false" for ((z=0; z<=$MaxUrlRetries; z++)); do - if [ $z -eq $MaxUrlRetries ] ; then + if [[ $z -eq $MaxUrlRetries ]] ; then finalAttempt="true" fi CLEANSTRING=${remote_url//[^a-zA-Z0-9]/} trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; echo ""; tput cnorm; exit" 0 1 2 3 6 15 if torp_FetchFileInfo $finalAttempt && torp_GetFile "${filecnt}" $((z+1)) $finalAttempt ; then return 0 - elif [ $z -lt $MaxUrlRetries ]; then - if [ "${fileAlreadyDone}" == "true" ] ; then + elif [[ $z -lt $MaxUrlRetries ]]; then + if [[ "${fileAlreadyDone}" == "true" ]] ; then break fi if [[ "${warnAndRetryUnknownError}" == "true" ]] ; then - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${remote_url##*/}" "error" "Retry due to an unknown issue: attempt #$((z+1)) of ${MaxUrlRetries}" fi fi if [[ "${exitDownloadError}" == "true" || "${exitDownloadNotAvailable}" == "true" ]] ; then - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${remote_url##*/}" "error" "Exit due to unrecoverable issue" fi rm -f "${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}" @@ -92,15 +92,15 @@ torp_FetchFileInfo() { response=$(tor_curl_request --insecure -L -s \ -c "${fdot_cookie_jar}" \ "$fixed_url") - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${remote_url##*/}" "torp_fetch$i" "${response}" fi if [[ -z $response ]] ; then - if [ $i == $maxfetchretries ] ; then + if [[ $i == $maxfetchretries ]] ; then printf "\\n" echo -e "${RED}| Failed to extract download url [1]${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "Failed to extract download url [1]" "" fi return 1 @@ -115,7 +115,7 @@ torp_FetchFileInfo() { removedDownload "${remote_url}" return 1 fi - if [ "$filename_override" == "" ] ; then + if [[ "$filename_override" == "" ]] ; then filename=$(grep -oP '(?<=h3 class\="h5 text-white mb-3">).*?(?=

.*$)' <<< "$response") fi file_size_bytes=$(grep -oP '(?<=\(Raw: ).*?(?=\).*$)' <<< "$response") @@ -126,11 +126,11 @@ torp_FetchFileInfo() { download_url="${fixed_url}/file" break else - if [ $i == $maxfetchretries ] ; then + if [[ $i == $maxfetchretries ]] ; then printf "\\n" echo -e "${RED}| Failed to extract download url [2]${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "Failed to extract download url [2]" "" fi return 1 @@ -140,15 +140,15 @@ torp_FetchFileInfo() { fi break #Good to go here done - touch ${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/} - if [ ! "$filename_override" == "" ] ; then + touch "${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}" + if [[ ! "$filename_override" == "" ]] ; then filename="$filename_override" fi filename=$(sanitize_file_or_folder_name "${filename}") printf "\\n" echo -e "${YELLOW}| File name:${NC}\t\"${filename}\"" - if [ -z $file_size_bytes ] ; then - if [ "${finalAttempt}" == "true" ] ; then + if [[ -z $file_size_bytes ]] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "Filesize not found!" "" fi echo -e "${YELLOW}| Filesize not found… retry${NC}" @@ -179,14 +179,14 @@ torp_GetFile() { flockDownload="${WorkDir}/.flocks/${filename//[^a-zA-Z0-9\.\_\-]/}.flock" for ((j=1; j<=$MaxDownloadRetries; j++)); do pd_presize=0 - if [ -f "$file_path" ] ; then + if [[ -f "$file_path" ]] ; then pd_presize=$(stat --format="%s" "$file_path" | tr -d '[:space:]') fi GetRandomUA CLEANSTRING=${remote_url//[^a-zA-Z0-9]/} trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; rm -f ${torp_cookie_jar}; rm -f $flockDownload; echo ""; tput cnorm; exit" 0 1 2 3 6 15 - if [ "${UseTorCurlImpersonate}" == "true" ]; then - if [ "${RateMonitorEnabled}" == "true" ]; then + if [[ "${UseTorCurlImpersonate}" == "true" ]]; then + if [[ "${RateMonitorEnabled}" == "true" ]]; then tor_curl_request --insecure -L -G --no-alpn \ --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval \ -b "${torp_cookie_jar}" -c "${torp_cookie_jar}" \ @@ -199,7 +199,7 @@ torp_GetFile() { "$download_url" --output "$file_path" fi else - if [ "${RateMonitorEnabled}" == "true" ]; then + if [[ "${RateMonitorEnabled}" == "true" ]]; then tor_curl_request --insecure -L -G --no-alpn \ --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval \ -H "User-Agent: $RandomUA" \ @@ -233,7 +233,7 @@ torp_GetFile() { fi fi received_file_size=0 - if [ -f "$file_path" ] ; then + if [[ -f "$file_path" ]] ; then received_file_size=$(stat --format="%s" "$file_path" | tr -d '[:space:]') fi if CheckNoHtml "$remote_url" "$filename" "$file_path" ; then @@ -242,9 +242,9 @@ torp_GetFile() { containsHtml=true fi downDelta=$(( received_file_size - pd_presize )) - if [[ "${received_file_size}" -ne "${file_size_bytes}" ]] || [ "$containsHtml" == "true" ]; then - if [ "${AutoRepairBadPartials}" == "true" ] && (( downDelta > 0 && downDelta < 1024 )) ; then - if [ -f "${file_path}" ] ; then + if [[ "${received_file_size}" -ne "${file_size_bytes}" ]] || [[ "$containsHtml" == "true" ]]; then + if [[ "${AutoRepairBadPartials}" == "true" ]] && (( downDelta > 0 && downDelta < 1024 )) ; then + if [[ -f "${file_path}" ]] ; then if ((pd_presize > 0)); then echo -e "${YELLOW}Bad node / HTML found:${NC} reverting to previous file..." truncateDownload "$remote_url" "$filename" "$pd_presize" "$received_file_size" @@ -256,15 +256,15 @@ torp_GetFile() { fi if ((j >= $MaxDownloadRetries)) ; then rm -f "$flockDownload"; - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" fi return 1 else continue fi - elif [ "${AutoRepairBadPartials}" == "true" ] && [ "$containsHtml" == "true" ] ; then - if [ -f "${file_path}" ] ; then + elif [[ "${AutoRepairBadPartials}" == "true" ]] && [[ "$containsHtml" == "true" ]] ; then + if [[ -f "${file_path}" ]] ; then if ((pd_presize > 0)); then echo -e "${YELLOW}Bad node / HTML found:${NC} reverting to previous file..." truncateDownload "$remote_url" "$filename" "$pd_presize" "$received_file_size" @@ -276,21 +276,21 @@ torp_GetFile() { fi if ((j >= $MaxDownloadRetries)) ; then rm -f "$flockDownload"; - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" fi return 1 else continue fi - elif (( downDelta > 0 && downDelta < 1024 )) || [ "$containsHtml" == "true" ] ; then - if [ -f "$file_path" ] ; then + elif (( downDelta > 0 && downDelta < 1024 )) || [[ "$containsHtml" == "true" ]] ; then + if [[ -f "$file_path" ]] ; then rm -rf "$file_path" fi echo -e "\n${YELLOW}Bad node / HTML found:${NC} tainted partial removed..." if ((j >= $MaxDownloadRetries)) ; then rm -f "$flockDownload"; - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" fi return 1 @@ -302,7 +302,7 @@ torp_GetFile() { echo -e "\n${RED}Download failed, file is incomplete.${NC}" if ((j >= $MaxDownloadRetries)) ; then rm -f "$flockDownload"; - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" fi return 1 diff --git a/hosts/up2share.sh b/hosts/up2share.sh index 6342a68..a8cb364 100644 --- a/hosts/up2share.sh +++ b/hosts/up2share.sh @@ -43,24 +43,24 @@ up2share_DownloadFile() { tor_identity="${RANDOM}" finalAttempt="false" for ((z=0; z<=$MaxUrlRetries; z++)); do - if [ $z -eq $MaxUrlRetries ] ; then + if [[ $z -eq $MaxUrlRetries ]] ; then finalAttempt="true" fi CLEANSTRING=${remote_url//[^a-zA-Z0-9]/} trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; echo ""; tput cnorm; exit" 0 1 2 3 6 15 if up2share_FetchFileInfo $finalAttempt && up2share_GetFile "${filecnt}" $((z+1)) $finalAttempt ; then return 0 - elif [ $z -lt $MaxUrlRetries ]; then - if [ "${fileAlreadyDone}" == "true" ] ; then + elif [[ $z -lt $MaxUrlRetries ]]; then + if [[ "${fileAlreadyDone}" == "true" ]] ; then break fi if [[ "${warnAndRetryUnknownError}" == "true" ]] ; then - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${remote_url##*/}" "error" "Retry due to an unknown issue: attempt #$((z+1)) of ${MaxUrlRetries}" fi fi if [[ "${exitDownloadError}" == "true" || "${exitDownloadNotAvailable}" == "true" ]] ; then - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${remote_url##*/}" "error" "Exit due to unrecoverable issue" fi rm -f "${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}" @@ -91,16 +91,16 @@ up2share_FetchFileInfo() { response=$(tor_curl_request --insecure -L -s -b "${up2share_cookie_jar}" -c "${up2share_cookie_jar}" \ -w "\n\nHTTP_CODE: %{http_code}\nEFFECTIVE_URL: %{url_effective}\n" \ "$fixed_url") - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${remote_url##*/}" "up2share_fetch$i" "${response}" fi if [[ -z $response ]] ; then rm -f "${up2share_cookie_jar}"; - if [ $i == $maxfetchretries ] ; then + if [[ $i == $maxfetchretries ]] ; then printf "\\n" echo -e "${RED}| Failed to extract token link [1].${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "" "" fi return 1 @@ -122,7 +122,7 @@ up2share_FetchFileInfo() { break else rm -f "${up2share_cookie_jar}"; - if [ $i == $maxfetchretries ] ; then + if [[ $i == $maxfetchretries ]] ; then printf "\\n" echo -e "${RED}| Download button not found [1].${NC}" exitDownloadError=true @@ -144,16 +144,16 @@ up2share_FetchFileInfo() { response=$(tor_curl_request --insecure -L -s -b "${up2share_cookie_jar}" -c "${up2share_cookie_jar}" \ -w "\n\nHTTP_CODE: %{http_code}\nEFFECTIVE_URL: %{url_effective}\n" \ "$download_url") - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${remote_url##*/}" "up2share_down$i" "${response}" fi if [[ -z $response ]] ; then rm -f "${up2share_cookie_jar}"; - if [ $i == $maxfetchretries ] ; then + if [[ $i == $maxfetchretries ]] ; then printf "\\n" echo -e "${RED}| Failed to extract download link [1].${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "" "" fi return 1 @@ -175,7 +175,7 @@ up2share_FetchFileInfo() { break else rm -f "${up2share_cookie_jar}"; - if [ $i == $maxfetchretries ] ; then + if [[ $i == $maxfetchretries ]] ; then printf "\\n" echo -e "${RED}| Direct link not found [2].${NC}" exitDownloadError=true @@ -195,16 +195,16 @@ up2share_FetchFileInfo() { file_header=$(tor_curl_request --insecure -L -s --head \ -b "${up2share_cookie_jar}" -c "${up2share_cookie_jar}" \ --referer "https://up2sha.re/" "$download_url") - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${remote_url##*/}" "up2share_head$j" "download_url: ${download_url}"$'\n'"${file_header}" fi if [[ -z $file_header ]] ; then - if [ $j == $maxfetchretries ] ; then + if [[ $j == $maxfetchretries ]] ; then rm -f "${up2share_cookie_jar}"; printf "\\n" echo -e "${RED}| Failed to extract file info${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "" "" fi return 1 @@ -214,12 +214,12 @@ up2share_FetchFileInfo() { fi fi if ! grep -Eqi 'HTTP/2 200|HTTP/1.1 200|200 OK' <<< $file_header ; then - if [ $j == $maxfetchretries ] ; then + if [[ $j == $maxfetchretries ]] ; then rm -f "${up2share_cookie_jar}"; printf "\\n" echo -e "${RED}| Failed to extract file info${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "" "" fi return 1 @@ -228,14 +228,14 @@ up2share_FetchFileInfo() { continue fi fi - if [ "$filename_override" == "" ] ; then + if [[ "$filename_override" == "" ]] ; then filename=$(grep -oPi '(?<=filename=").*?(?=")' <<< "$file_header") if [[ -z "$filename" ]]; then - if [ $j == $maxfetchretries ] ; then + if [[ $j == $maxfetchretries ]] ; then printf "\\n" echo -e "${RED}| Failed to extract file name${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "" "" fi return 1 @@ -248,12 +248,12 @@ up2share_FetchFileInfo() { file_size_bytes=$(grep -oPi '(?<=content-length: ).*?(?=$)' <<< "$subSearch") file_size_bytes=${file_size_bytes//[$'\t\r\n']} if [[ -z "$file_size_bytes" ]]; then - if [ $j == $maxfetchretries ] ; then + if [[ $j == $maxfetchretries ]] ; then rm -f "${up2share_cookie_jar}"; printf "\\n" echo -e "${RED}| Failed to extract file size.${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "" "" fi return 1 @@ -264,15 +264,15 @@ up2share_FetchFileInfo() { fi break #Good to go here done - touch ${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/} - if [ ! "$filename_override" == "" ] ; then + touch "${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}" + if [[ ! "$filename_override" == "" ]] ; then filename="$filename_override" fi filename=$(sanitize_file_or_folder_name "${filename}") printf "\\n" echo -e "${YELLOW}| File name:${NC}\t\"${filename}\"" - if [ -z $file_size_bytes ] ; then - if [ "${finalAttempt}" == "true" ] ; then + if [[ -z $file_size_bytes ]] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "Filesize not found!" "" fi echo -e "${YELLOW}| Filesize not found… retry${NC}" @@ -303,14 +303,14 @@ up2share_GetFile() { flockDownload="${WorkDir}/.flocks/${filename//[^a-zA-Z0-9\.\_\-]/}.flock" for ((j=1; j<=$MaxDownloadRetries; j++)); do pd_presize=0 - if [ -f "$file_path" ] ; then + if [[ -f "$file_path" ]] ; then pd_presize=$(stat --format="%s" "$file_path" | tr -d '[:space:]') fi GetRandomUA CLEANSTRING=${remote_url//[^a-zA-Z0-9]/} trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; rm -f ${up2share_cookie_jar}; rm -f $flockDownload; echo ""; tput cnorm; exit" 0 1 2 3 6 15 - if [ "${UseTorCurlImpersonate}" == "true" ]; then - if [ "${RateMonitorEnabled}" == "true" ]; then + if [[ "${UseTorCurlImpersonate}" == "true" ]]; then + if [[ "${RateMonitorEnabled}" == "true" ]]; then tor_curl_request --insecure -L \ --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval \ -b "${up2share_cookie_jar}" -c "${up2share_cookie_jar}" \ @@ -325,7 +325,7 @@ up2share_GetFile() { "$download_url" --continue-at - --output "$file_path" fi else - if [ "${RateMonitorEnabled}" == "true" ]; then + if [[ "${RateMonitorEnabled}" == "true" ]]; then tor_curl_request --insecure -L \ --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval \ -b "${up2share_cookie_jar}" -c "${up2share_cookie_jar}" \ @@ -363,7 +363,7 @@ up2share_GetFile() { fi fi received_file_size=0 - if [ -f "$file_path" ] ; then + if [[ -f "$file_path" ]] ; then received_file_size=$(stat --format="%s" "$file_path" | tr -d '[:space:]') fi if CheckNoHtml "$remote_url" "$filename" "$file_path" ; then @@ -372,9 +372,9 @@ up2share_GetFile() { containsHtml=true fi downDelta=$(( received_file_size - pd_presize )) - if [[ "${received_file_size}" -ne "${file_size_bytes}" ]] || [ "$containsHtml" == "true" ]; then - if [ "${AutoRepairBadPartials}" == "true" ] && (( downDelta > 0 && downDelta < 1024 )) ; then - if [ -f "${file_path}" ] ; then + if [[ "${received_file_size}" -ne "${file_size_bytes}" ]] || [[ "$containsHtml" == "true" ]]; then + if [[ "${AutoRepairBadPartials}" == "true" ]] && (( downDelta > 0 && downDelta < 1024 )) ; then + if [[ -f "${file_path}" ]] ; then if ((pd_presize > 0)); then echo -e "${YELLOW}Bad node / HTML found:${NC} reverting to previous file..." truncateDownload "$remote_url" "$filename" "$pd_presize" "$received_file_size" @@ -386,15 +386,15 @@ up2share_GetFile() { fi if ((j >= $MaxDownloadRetries)) ; then rm -f "$flockDownload"; - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" fi return 1 else continue fi - elif [ "${AutoRepairBadPartials}" == "true" ] && [ "$containsHtml" == "true" ] ; then - if [ -f "${file_path}" ] ; then + elif [[ "${AutoRepairBadPartials}" == "true" ]] && [[ "$containsHtml" == "true" ]] ; then + if [[ -f "${file_path}" ]] ; then if ((pd_presize > 0)); then echo -e "${YELLOW}Bad node / HTML found:${NC} reverting to previous file..." truncateDownload "$remote_url" "$filename" "$pd_presize" "$received_file_size" @@ -406,21 +406,21 @@ up2share_GetFile() { fi if ((j >= $MaxDownloadRetries)) ; then rm -f "$flockDownload"; - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" fi return 1 else continue fi - elif (( downDelta > 0 && downDelta < 1024 )) || [ "$containsHtml" == "true" ] ; then - if [ -f "$file_path" ] ; then + elif (( downDelta > 0 && downDelta < 1024 )) || [[ "$containsHtml" == "true" ]] ; then + if [[ -f "$file_path" ]] ; then rm -rf "$file_path" fi echo -e "\n${YELLOW}Bad node / HTML found:${NC} tainted partial removed..." if ((j >= $MaxDownloadRetries)) ; then rm -f "$flockDownload"; - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" fi return 1 @@ -432,7 +432,7 @@ up2share_GetFile() { echo -e "\n${RED}Download failed, file is incomplete.${NC}" if ((j >= $MaxDownloadRetries)) ; then rm -f "$flockDownload"; - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" fi return 1 diff --git a/hosts/up_1fichier.sh b/hosts/up_1fichier.sh index da5e4c4..e2cf6aa 100644 --- a/hosts/up_1fichier.sh +++ b/hosts/up_1fichier.sh @@ -54,23 +54,23 @@ fich_UploadFile() { fi finalAttempt="false" for ((z=0; z<=$MaxUploadRetries; z++)); do - if [ $z -eq $MaxUploadRetries ] ; then + if [[ $z -eq $MaxUploadRetries ]] ; then finalAttempt="true" fi trap "rm -f "${UploadTicket}"; echo ""; tput cnorm; exit" 0 1 2 3 6 15 if fich_PostFile "${filepath}" "${_hostCode}" "${filename}" "${filecnt}" $((z+1)) $finalAttempt $pline; then return 0 - elif [ $z -lt $MaxUploadRetries ]; then - if [ "${fileAlreadyDone}" == "true" ] ; then + elif [[ $z -lt $MaxUploadRetries ]]; then + if [[ "${fileAlreadyDone}" == "true" ]] ; then break fi if [[ "${warnAndRetryUnknownError}" == "true" ]] ; then - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${filepath##*/}" "error" "Retry due to an unknown issue: attempt #$((z+1)) of ${MaxUploadRetries}" fi fi if [[ "${exitUploadError}" == "true" || "${exitUploadNotAvailable}" == "true" ]] ; then - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${filepath##*/}" "error" "Exit due to unrecoverable issue" fi rm -f "${UploadTicket}" @@ -105,12 +105,12 @@ fich_PostFile() { tor_identity="${RANDOM}" trap "rm -f ${UploadTicket}; echo ""; tput cnorm; exit" 0 1 2 3 6 15 response=$(tor_curl_request --insecure -L -s "https://1fichier.com/") - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${filepath##*/}" "${_hostCode}_up_getid_$i" "url: https://1fichier.com/"$'\n'"${response}" fi if [[ -z $response ]] ; then - if [ $i == $maxfetchretries ] ; then - if [ "${finalAttempt}" == "true" ] ; then + if [[ $i == $maxfetchretries ]] ; then + if [[ "${finalAttempt}" == "true" ]] ; then printf "\\n" echo -e "${RED}| Upload failed. (GetId [1])${NC}" failedUpload "$pline" "${filepath}" "${_hostCode}" "Failed to upload file" "No Response (GetId [1])" @@ -131,11 +131,11 @@ fich_PostFile() { echo -e "${GREEN}| Found 1F id...${NC}" fichaction=$(grep -oPi '(?<=action=").*?(?=")' <<< "$response") fichId=$(grep -oPi '(?<=1fichier.com/upload.cgi\?id=).*?(?=")' <<< "$response") - if [ ! -z "$fichaction" ] && [ ! -z "$fichId" ]; then + if [[ ! -z "$fichaction" ]] && [[ ! -z "$fichId" ]]; then break else - if [ $i == $maxfetchretries ] ; then - if [ "${finalAttempt}" == "true" ] ; then + if [[ $i == $maxfetchretries ]] ; then + if [[ "${finalAttempt}" == "true" ]] ; then printf "\\n" echo -e "${RED}| Upload failed. (GetId [2])${NC}" failedRetryUpload "$pline" "${filepath}" "${_hostCode}" "Failed to upload file" "(GetId [2])" @@ -152,8 +152,8 @@ fich_PostFile() { fi fi else - if [ $i == $maxfetchretries ] ; then - if [ "${finalAttempt}" == "true" ] ; then + if [[ $i == $maxfetchretries ]] ; then + if [[ "${finalAttempt}" == "true" ]] ; then printf "\\n" echo -e "${RED}| Upload failed. (GetId [3])${NC}" failedRetryUpload "$pline" "${filepath}" "${_hostCode}" "Failed to upload file" "(GetId [3])" @@ -187,7 +187,7 @@ fich_PostFile() { -F "mails=" \ -F "message=" \ "${PostUrlHost}") - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${filepath##*/}" "${_hostCode}_upload" "post_url: ${PostUrlHost}"$'\n'"${response}" fi if grep -Eqi ').*?(?=)' <<< "$response") - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then printf "\\n" echo -e "${RED}| Upload failed. Error: ${err}${NC}" failedRetryUpload "$pline" "${filepath}" "${_hostCode}" "Failed to upload file" "Error: $err" diff --git a/hosts/up_anonfile.sh b/hosts/up_anonfile.sh index db3a346..5b2b401 100644 --- a/hosts/up_anonfile.sh +++ b/hosts/up_anonfile.sh @@ -54,23 +54,23 @@ anon_UploadFile() { fi finalAttempt="false" for ((z=0; z<=$MaxUploadRetries; z++)); do - if [ $z -eq $MaxUploadRetries ] ; then + if [[ $z -eq $MaxUploadRetries ]] ; then finalAttempt="true" fi trap "rm -f "${UploadTicket}"; echo ""; tput cnorm; exit" 0 1 2 3 6 15 if anon_PostFile "${filepath}" "${_hostCode}" "${filename}" "${filecnt}" $((z+1)) $finalAttempt $pline ; then return 0 - elif [ $z -lt $MaxUploadRetries ]; then - if [ "${fileAlreadyDone}" == "true" ] ; then + elif [[ $z -lt $MaxUploadRetries ]]; then + if [[ "${fileAlreadyDone}" == "true" ]] ; then break fi if [[ "${warnAndRetryUnknownError}" == "true" ]] ; then - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${filepath##*/}" "error" "Retry due to an unknown issue: attempt #$((z+1)) of ${MaxUploadRetries}" fi fi if [[ "${exitUploadError}" == "true" || "${exitUploadNotAvailable}" == "true" ]] ; then - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${filepath##*/}" "error" "Exit due to unrecoverable issue" fi rm -f "${UploadTicket}" @@ -112,7 +112,7 @@ anon_PostFile() { -F "keepalive=1" \ -F "file_0=@${filepath}" \ "${PostUrlHost}") - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${filepath##*/}" "${_hostCode}_upload" "post_url: ${PostUrlHost}"$'\n'"${response}" fi if grep -Eqi '"file_status":"OK"' <<< "${response}" ; then @@ -126,7 +126,7 @@ anon_PostFile() { return 0 else err=$(grep -oPi '(?<=HTTP/).*?(?=$)' <<< "$response") - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then printf "\\n" echo -e "${RED}| Upload failed. Status: ${err}${NC}" failedRetryUpload "$pline" "${filepath}" "${_hostCode}" "Failed to upload file" "Status: $err" diff --git a/hosts/up_anonsharing.sh b/hosts/up_anonsharing.sh index 261f6c2..cc489cc 100644 --- a/hosts/up_anonsharing.sh +++ b/hosts/up_anonsharing.sh @@ -54,23 +54,23 @@ ansh_UploadFile() { fi finalAttempt="false" for ((z=0; z<=$MaxUploadRetries; z++)); do - if [ $z -eq $MaxUploadRetries ] ; then + if [[ $z -eq $MaxUploadRetries ]] ; then finalAttempt="true" fi trap "rm -f "${UploadTicket}"; echo ""; tput cnorm; exit" 0 1 2 3 6 15 if ansh_PostFile "${filepath}" "${_hostCode}" "${filename}" "${filecnt}" $((z+1)) $finalAttempt $pline ; then return 0 - elif [ $z -lt $MaxUploadRetries ]; then - if [ "${fileAlreadyDone}" == "true" ] ; then + elif [[ $z -lt $MaxUploadRetries ]]; then + if [[ "${fileAlreadyDone}" == "true" ]] ; then break fi if [[ "${warnAndRetryUnknownError}" == "true" ]] ; then - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${filepath##*/}" "error" "Retry due to an unknown issue: attempt #$((z+1)) of ${MaxUploadRetries}" fi fi if [[ "${exitUploadError}" == "true" || "${exitUploadNotAvailable}" == "true" ]] ; then - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${filepath##*/}" "error" "Exit due to unrecoverable issue" fi rm -f "${UploadTicket}" @@ -103,7 +103,7 @@ ansh_PostFile() { -H "Content-Type: multipart/form-data" \ -F "files[]=@${arrFiles[@]}" \ "${PostUrlHost}") - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${filepath##*/}" "${_hostCode}_upload" "post_url: ${PostUrlHost}"$'\n'"${response}" fi if grep -Eqi '"error":null,"url":"https:\\/\\/anonsharing.com\\/' <<< "${response}" ; then @@ -117,7 +117,7 @@ ansh_PostFile() { return 0 else err=$(grep -oPi '(?<="error":).*?(?=,.*$)' <<< "$response") - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then printf "\\n" echo -e "${RED}| Upload failed. Status: ${err}${NC}" failedRetryUpload "$pline" "${filepath}" "${_hostCode}" "Failed to upload file" "Status: $err" diff --git a/hosts/up_ateasystems.sh b/hosts/up_ateasystems.sh index 97f4e85..e185d82 100644 --- a/hosts/up_ateasystems.sh +++ b/hosts/up_ateasystems.sh @@ -54,23 +54,23 @@ atea_UploadFile() { fi finalAttempt="false" for ((z=0; z<=$MaxUploadRetries; z++)); do - if [ $z -eq $MaxUploadRetries ] ; then + if [[ $z -eq $MaxUploadRetries ]] ; then finalAttempt="true" fi trap "rm -f "${UploadTicket}"; echo ""; tput cnorm; exit" 0 1 2 3 6 15 if atea_PostFile "${filepath}" "${_hostCode}" "${filename}" "${filecnt}" $((z+1)) $finalAttempt $pline ; then return 0 - elif [ $z -lt $MaxUploadRetries ]; then - if [ "${fileAlreadyDone}" == "true" ] ; then + elif [[ $z -lt $MaxUploadRetries ]]; then + if [[ "${fileAlreadyDone}" == "true" ]] ; then break fi if [[ "${warnAndRetryUnknownError}" == "true" ]] ; then - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${filepath##*/}" "error" "Retry due to an unknown issue: attempt #$((z+1)) of ${MaxUploadRetries}" fi fi if [[ "${exitUploadError}" == "true" || "${exitUploadNotAvailable}" == "true" ]] ; then - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${filepath##*/}" "error" "Exit due to unrecoverable issue" fi rm -f "${UploadTicket}" @@ -106,7 +106,7 @@ atea_PostFile() { -F "link_pass=" \ -F "file_0=@${filepath}" \ "${PostUrlHost}") - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${filepath##*/}" "${_hostCode}_upload" "post_url: ${PostUrlHost}"$'\n'"${response}" fi if grep -Eqi "Location: https://share\.ateasystems\.com/share/\?\&filename\=" <<< "${response}" ; then @@ -121,7 +121,7 @@ atea_PostFile() { return 0 else err=$(grep -oPi '(?<=HTTP/).*?(?=$)' <<< "$response") - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then printf "\\n" echo -e "${RED}| Upload failed. Status: ${err}${NC}" failedRetryUpload "$pline" "${filepath}" "${_hostCode}" "Failed to upload file" "Status: $err" diff --git a/hosts/up_axfc.sh b/hosts/up_axfc.sh index 6752884..05f8306 100644 --- a/hosts/up_axfc.sh +++ b/hosts/up_axfc.sh @@ -54,24 +54,24 @@ axfc_UploadFile() { fi finalAttempt="false" for ((z=0; z<=$MaxUploadRetries; z++)); do - if [ $z -eq $MaxUploadRetries ] ; then + if [[ $z -eq $MaxUploadRetries ]] ; then finalAttempt="true" fi trap "rm -f "${UploadTicket}"; echo ""; tput cnorm; exit" 0 1 2 3 6 15 if axfc_PostFile "${filepath}" "${_hostCode}" "${filename}" "${filecnt}" $((z+1)) $finalAttempt $pline ; then return 0 - elif [ $z -lt $MaxUploadRetries ]; then - if [ "${fileAlreadyDone}" == "true" ] ; then + elif [[ $z -lt $MaxUploadRetries ]]; then + if [[ "${fileAlreadyDone}" == "true" ]] ; then rm -f "${UploadTicket}" break fi if [[ "${warnAndRetryUnknownError}" == "true" ]] ; then - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${filepath##*/}" "error" "Retry due to an unknown issue: attempt #$((z+1)) of ${MaxUploadRetries}" fi fi if [[ "${exitUploadError}" == "true" || "${exitUploadNotAvailable}" == "true" ]] ; then - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${filepath##*/}" "error" "Exit due to unrecoverable issue" fi rm -f "${UploadTicket}" @@ -107,16 +107,16 @@ axfc_PostFile() { tor_identity="${RANDOM}" trap "rm -f ${UploadTicket}; rm -f ${axfc_cookie_jar}; echo ""; tput cnorm; exit" 0 1 2 3 6 15 response=$(tor_curl_request --insecure -L -s -b "${axfc_cookie_jar}" -c "${axfc_cookie_jar}" "$fixed_url") - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${filepath##*/}" "axfc_fetch$i" "${response}" fi if [[ -z $response ]] ; then rm -f "${axfc_cookie_jar}"; - if [ $i == $maxfetchretries ] ; then + if [[ $i == $maxfetchretries ]] ; then printf "\\n" echo -e "${RED}| Failed to start an upload [1]${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryUpload "$pline" "${filepath}" "${_hostCode}" "Failed to start an upload [1]" "" fi return 1 @@ -142,16 +142,16 @@ axfc_PostFile() { --data-raw "method=upload&ext=ext&filename=1&comment=&address=&delpass=$randelkey&keyword=1234&count=&term=0&term_y=2024&term_mon=10&term_d=1&term_h=15&term_min=0&term_s=0&term_ps=&term_mp=3600" \ -w "\n\nHTTP_CODE: %{http_code}\nEFFECTIVE_URL: %{url_effective}\n" \ "$fixed_url") - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${filepath##*/}" "axfc_ticket$i" "${response}" fi if [[ -z $response ]] ; then rm -f "${axfc_cookie_jar}"; - if [ $i == $maxfetchretries ] ; then + if [[ $i == $maxfetchretries ]] ; then printf "\\n" echo -e "${RED}| Failed to extract token link [1].${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedUpload "$pline" "${filepath}" "${_hostCode}" "Failed to extract token link [1]" "" fi return 1 @@ -164,11 +164,11 @@ axfc_PostFile() { break else rm -f "${axfc_cookie_jar}"; - if [ $i == $maxfetchretries ] ; then + if [[ $i == $maxfetchretries ]] ; then printf "\\n" echo -e "${RED}| Ticket url not found [1].${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryUpload "$pline" "${filepath}" "${_hostCode}" "Ticket url not found [1]" "" fi return 1 @@ -187,11 +187,11 @@ axfc_PostFile() { -F "filedata=@$filepath" \ -b "${axfc_cookie_jar}" -c "${axfc_cookie_jar}" \ "$PostUrlHost") - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${filepath##*/}" "${_hostCode}_upload" "post_url: ${PostUrlHost}"$'\n'"${response}" fi response_ascii=$(mconvert_utf8_to_ascii "$response") - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${filepath##*/}" "${_hostCode}_upload_ascii" "post_url: ${PostUrlHost}"$'\n'"${response_ascii}" fi if grep -Eqi -m 1 'a href="https://www\.axfc\.net\/u\/.*\?key=1234"' <<< "${response_ascii}" ; then @@ -216,7 +216,7 @@ axfc_PostFile() { return 0 else err=$(grep -oPi '(?<=HTTP/).*?(?=")' <<< "$response") - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then printf "\\n" echo -e "${RED}| Upload failed. Status: ${err}${NC}" failedRetryUpload "$pline" "${filepath}" "${_hostCode}" "Failed to upload file" "Status: $err" diff --git a/hosts/up_bedrive.sh b/hosts/up_bedrive.sh index b669bef..24b99a9 100644 --- a/hosts/up_bedrive.sh +++ b/hosts/up_bedrive.sh @@ -54,23 +54,23 @@ bd_UploadFile() { fi finalAttempt="false" for ((z=0; z<=$MaxUploadRetries; z++)); do - if [ $z -eq $MaxUploadRetries ] ; then + if [[ $z -eq $MaxUploadRetries ]] ; then finalAttempt="true" fi trap "rm -f "${UploadTicket}"; echo ""; tput cnorm; exit" 0 1 2 3 6 15 if bd_PostFile "${filepath}" "${_hostCode}" "${filename}" "${filecnt}" $((z+1)) $finalAttempt $pline ; then return 0 - elif [ $z -lt $MaxUploadRetries ]; then - if [ "${fileAlreadyDone}" == "true" ] ; then + elif [[ $z -lt $MaxUploadRetries ]]; then + if [[ "${fileAlreadyDone}" == "true" ]] ; then break fi if [[ "${warnAndRetryUnknownError}" == "true" ]] ; then - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${filepath##*/}" "error" "Retry due to an unknown issue: attempt #$((z+1)) of ${MaxUploadRetries}" fi fi if [[ "${exitUploadError}" == "true" || "${exitUploadNotAvailable}" == "true" ]] ; then - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${filepath##*/}" "error" "Exit due to unrecoverable issue" fi rm -f "${UploadTicket}" @@ -104,7 +104,7 @@ bd_PostFile() { -F "time=month" \ -F "files[]=@${arrFiles[@]}" \ "${PostUrlHost}") - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${filepath##*/}" "${_hostCode}_upload" "post_url: ${PostUrlHost}"$'\n'"${response}" fi if grep -Eqi '"error":null,"url":"https:\\/\\/bedrive.ru\\/' <<< "${response}" ; then @@ -118,7 +118,7 @@ bd_PostFile() { return 0 else err=$(grep -oPi '(?<="error":).*?(?=,.*$)' <<< "$response") - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then printf "\\n" echo -e "${RED}| Upload failed. Status: ${err}${NC}" failedRetryUpload "$pline" "${filepath}" "${_hostCode}" "Failed to upload file" "Status: $err" diff --git a/hosts/up_bowfile.sh b/hosts/up_bowfile.sh index b4e5814..33b4587 100644 --- a/hosts/up_bowfile.sh +++ b/hosts/up_bowfile.sh @@ -54,23 +54,23 @@ bow_UploadFile() { fi finalAttempt="false" for ((z=0; z<=$MaxUploadRetries; z++)); do - if [ $z -eq $MaxUploadRetries ] ; then + if [[ $z -eq $MaxUploadRetries ]] ; then finalAttempt="true" fi trap "rm -f "${UploadTicket}"; echo ""; tput cnorm; exit" 0 1 2 3 6 15 if bow_PostFile "${filepath}" "${_hostCode}" "${filename}" "${filecnt}" $((z+1)) $finalAttempt $pline ; then return 0 - elif [ $z -lt $MaxUploadRetries ]; then - if [ "${fileAlreadyDone}" == "true" ] ; then + elif [[ $z -lt $MaxUploadRetries ]]; then + if [[ "${fileAlreadyDone}" == "true" ]] ; then break fi if [[ "${warnAndRetryUnknownError}" == "true" ]] ; then - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${filepath##*/}" "error" "Retry due to an unknown issue: attempt #$((z+1)) of ${MaxUploadRetries}" fi fi if [[ "${exitUploadError}" == "true" || "${exitUploadNotAvailable}" == "true" ]] ; then - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${filepath##*/}" "error" "Exit due to unrecoverable issue" fi rm -f "${UploadTicket}" @@ -109,7 +109,7 @@ bow_PostFile() { -H "Content-Type: multipart/form-data" \ -F "files[]=@$filepath" \ "${PostUrlHost}") - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${filepath##*/}" "${_hostCode}_upload" "post_url: ${PostUrlHost}"$'\n'"${response}" fi if grep -Eqi '"error":null,"url":"https:\\/\\/bowfile.com\\/' <<< "${response}" ; then @@ -124,7 +124,7 @@ bow_PostFile() { return 0 else err=$(grep -oPi '(?<=HTTP/2 ).*?(?=")' <<< "$response") - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then printf "\\n" echo -e "${RED}| Upload failed. Status: ${err}${NC}" failedRetryUpload "$pline" "${filepath}" "${_hostCode}" "Failed to upload file" "Status: $err" diff --git a/hosts/up_dailyuploads.sh b/hosts/up_dailyuploads.sh index 2e0a500..bfa1f75 100644 --- a/hosts/up_dailyuploads.sh +++ b/hosts/up_dailyuploads.sh @@ -54,23 +54,23 @@ daily_UploadFile() { fi finalAttempt="false" for ((z=0; z<=$MaxUploadRetries; z++)); do - if [ $z -eq $MaxUploadRetries ] ; then + if [[ $z -eq $MaxUploadRetries ]] ; then finalAttempt="true" fi trap "rm -f "${UploadTicket}"; echo ""; tput cnorm; exit" 0 1 2 3 6 15 if daily_PostFile "${filepath}" "${_hostCode}" "${filename}" "${filecnt}" $((z+1)) $finalAttempt $pline ; then return 0 - elif [ $z -lt $MaxUploadRetries ]; then - if [ "${fileAlreadyDone}" == "true" ] ; then + elif [[ $z -lt $MaxUploadRetries ]]; then + if [[ "${fileAlreadyDone}" == "true" ]] ; then break fi if [[ "${warnAndRetryUnknownError}" == "true" ]] ; then - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${filepath##*/}" "error" "Retry due to an unknown issue: attempt #$((z+1)) of ${MaxUploadRetries}" fi fi if [[ "${exitUploadError}" == "true" || "${exitUploadNotAvailable}" == "true" ]] ; then - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${filepath##*/}" "error" "Exit due to unrecoverable issue" fi rm -f "${UploadTicket}" @@ -117,7 +117,7 @@ daily_PostFile() { -F "file_public=1" \ -F "file_0=@$filepath" \ "${PostUrlHost}") - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${filepath##*/}" "${_hostCode}_upload" "post_url: ${PostUrlHost}"$'\n'"${response}" fi if grep -Eqi '"file_status":"OK"' <<< "${response}" ; then @@ -131,7 +131,7 @@ daily_PostFile() { return 0 else err=$(grep -oPi '(?<=HTTP/1.1 ).*?(?=$)' <<< "$response") - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then printf "\\n" echo -e "${RED}| Upload failed. Status: ${err}${NC}" failedRetryUpload "$pline" "${filepath}" "${_hostCode}" "Failed to upload file" "Status: $err" diff --git a/hosts/up_dashfile.sh b/hosts/up_dashfile.sh index 85ba6f7..7e57f6a 100644 --- a/hosts/up_dashfile.sh +++ b/hosts/up_dashfile.sh @@ -54,23 +54,23 @@ dash_UploadFile() { fi finalAttempt="false" for ((z=0; z<=$MaxUploadRetries; z++)); do - if [ $z -eq $MaxUploadRetries ] ; then + if [[ $z -eq $MaxUploadRetries ]] ; then finalAttempt="true" fi trap "rm -f "${UploadTicket}"; echo ""; tput cnorm; exit" 0 1 2 3 6 15 if dash_PostFile "${filepath}" "${_hostCode}" "${filename}" "${filecnt}" $((z+1)) $finalAttempt $pline ; then return 0 - elif [ $z -lt $MaxUploadRetries ]; then - if [ "${fileAlreadyDone}" == "true" ] ; then + elif [[ $z -lt $MaxUploadRetries ]]; then + if [[ "${fileAlreadyDone}" == "true" ]] ; then break fi if [[ "${warnAndRetryUnknownError}" == "true" ]] ; then - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${filepath##*/}" "error" "Retry due to an unknown issue: attempt #$((z+1)) of ${MaxUploadRetries}" fi fi if [[ "${exitUploadError}" == "true" || "${exitUploadNotAvailable}" == "true" ]] ; then - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${filepath##*/}" "error" "Exit due to unrecoverable issue" fi rm -f "${UploadTicket}" @@ -112,7 +112,7 @@ dash_PostFile() { -F "keepalive=1" \ -F "file_0=@${filepath}" \ "${PostUrlHost}") - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${filepath##*/}" "${_hostCode}_upload" "post_url: ${PostUrlHost}"$'\n'"${response}" fi if grep -Eqi '"file_status":"OK"' <<< "${response}" ; then @@ -126,7 +126,7 @@ dash_PostFile() { return 0 else err=$(grep -oPi '(?<=HTTP/).*?(?=$)' <<< "$response") - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then printf "\\n" echo -e "${RED}| Upload failed. Status: ${err}${NC}" failedRetryUpload "$pline" "${filepath}" "${_hostCode}" "Failed to upload file" "Status: $err" diff --git a/hosts/up_dataupload.sh b/hosts/up_dataupload.sh index d669d22..a7b87aa 100644 --- a/hosts/up_dataupload.sh +++ b/hosts/up_dataupload.sh @@ -54,23 +54,23 @@ dup_UploadFile() { fi finalAttempt="false" for ((z=0; z<=$MaxUploadRetries; z++)); do - if [ $z -eq $MaxUploadRetries ] ; then + if [[ $z -eq $MaxUploadRetries ]] ; then finalAttempt="true" fi trap "rm -f "${UploadTicket}"; echo ""; tput cnorm; exit" 0 1 2 3 6 15 if dup_PostFile "${filepath}" "${_hostCode}" "${filename}" "${filecnt}" $((z+1)) $finalAttempt $pline ; then return 0 - elif [ $z -lt $MaxUploadRetries ]; then - if [ "${fileAlreadyDone}" == "true" ] ; then + elif [[ $z -lt $MaxUploadRetries ]]; then + if [[ "${fileAlreadyDone}" == "true" ]] ; then break fi if [[ "${warnAndRetryUnknownError}" == "true" ]] ; then - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${filepath##*/}" "error" "Retry due to an unknown issue: attempt #$((z+1)) of ${MaxUploadRetries}" fi fi if [[ "${exitUploadError}" == "true" || "${exitUploadNotAvailable}" == "true" ]] ; then - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${filepath##*/}" "error" "Exit due to unrecoverable issue" fi rm -f "${UploadTicket}" @@ -110,7 +110,7 @@ dup_PostFile() { -F "keepalive=1" \ -F "file_0=@${filepath}" \ "${PostUrlHost}") - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${filepath##*/}" "${_hostCode}_upload" "post_url: ${PostUrlHost}"$'\n'"${response}" fi if grep -Eqi 'file_status":"OK"' <<< "${response}" ; then @@ -124,7 +124,7 @@ dup_PostFile() { return 0 else err=$(grep -oPi '(?<=HTTP/).*?(?=$)' <<< "$response") - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then printf "\\n" echo -e "${RED}| Upload failed. Status: ${err}${NC}" failedRetryUpload "$pline" "${filepath}" "${_hostCode}" "Failed to upload file" "Status: $err" diff --git a/hosts/up_dbree.sh b/hosts/up_dbree.sh index 49573c4..295e0e2 100644 --- a/hosts/up_dbree.sh +++ b/hosts/up_dbree.sh @@ -54,23 +54,23 @@ dbree_UploadFile() { fi finalAttempt="false" for ((z=0; z<=$MaxUploadRetries; z++)); do - if [ $z -eq $MaxUploadRetries ] ; then + if [[ $z -eq $MaxUploadRetries ]] ; then finalAttempt="true" fi trap "rm -f "${UploadTicket}"; echo ""; tput cnorm; exit" 0 1 2 3 6 15 if dbree_PostFile "${filepath}" "${_hostCode}" "${filename}" "${filecnt}" $((z+1)) $finalAttempt $pline ; then return 0 - elif [ $z -lt $MaxUploadRetries ]; then - if [ "${fileAlreadyDone}" == "true" ] ; then + elif [[ $z -lt $MaxUploadRetries ]]; then + if [[ "${fileAlreadyDone}" == "true" ]] ; then break fi if [[ "${warnAndRetryUnknownError}" == "true" ]] ; then - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${filepath##*/}" "error" "Retry due to an unknown issue: attempt #$((z+1)) of ${MaxUploadRetries}" fi fi if [[ "${exitUploadError}" == "true" || "${exitUploadNotAvailable}" == "true" ]] ; then - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${filepath##*/}" "error" "Exit due to unrecoverable issue" fi rm -f "${UploadTicket}" @@ -104,7 +104,7 @@ dbree_PostFile() { -F "file[]=@${arrFiles[@]}" \ -F "upload=Upload" \ "${PostUrlHost}") - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${filepath##*/}" "${_hostCode}_upload" "post_url: ${PostUrlHost}"$'\n'"${response}" fi if grep -Eqi 'URL: Why we block tor' <<< "${response}" ; then if ((j >= 20)); then - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then printf "\\n" echo -e "${RED}| Upload failed. Tor Blocked.${NC}" failedRetryUpload "$pline" "${filepath}" "${_hostCode}" "Upload failed. Tor Blocked." @@ -141,7 +141,7 @@ fd_PostFile() { return 0 else err=$(grep -oPi '(?<=HTTP/).*?(?=$)' <<< "$response") - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then echo -e "" echo -e "${RED}| Upload failed. Status: ${err}${NC}" failedRetryUpload "$pline" "${filepath}" "${_hostCode}" "Failed to upload file" "Status: $err" diff --git a/hosts/up_filehaus.sh b/hosts/up_filehaus.sh index 25eb03e..d696e67 100644 --- a/hosts/up_filehaus.sh +++ b/hosts/up_filehaus.sh @@ -54,23 +54,23 @@ fh_UploadFile() { fi finalAttempt="false" for ((z=0; z<=$MaxUploadRetries; z++)); do - if [ $z -eq $MaxUploadRetries ] ; then + if [[ $z -eq $MaxUploadRetries ]] ; then finalAttempt="true" fi trap "rm -f "${UploadTicket}"; echo ""; tput cnorm; exit" 0 1 2 3 6 15 if fh_PostFile "${filepath}" "${_hostCode}" "${filename}" "${filecnt}" $((z+1)) $finalAttempt $pline ; then return 0 - elif [ $z -lt $MaxUploadRetries ]; then - if [ "${fileAlreadyDone}" == "true" ] ; then + elif [[ $z -lt $MaxUploadRetries ]]; then + if [[ "${fileAlreadyDone}" == "true" ]] ; then break fi if [[ "${warnAndRetryUnknownError}" == "true" ]] ; then - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${filepath##*/}" "error" "Retry due to an unknown issue: attempt #$((z+1)) of ${MaxUploadRetries}" fi fi if [[ "${exitUploadError}" == "true" || "${exitUploadNotAvailable}" == "true" ]] ; then - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${filepath##*/}" "error" "Exit due to unrecoverable issue" fi rm -f "${UploadTicket}" @@ -108,7 +108,7 @@ fh_PostFile() { -F "file=@$filepath" \ -F "submit=Upload" \ "${PostUrlHost}") - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${filepath##*/}" "${_hostCode}_upload" "post_url: ${PostUrlHost}"$'\n'"${response}" fi if grep -Eqi 'HTTP/.* 200|https://cdn' <<< "${response}" ; then @@ -122,7 +122,7 @@ fh_PostFile() { return 0 else err=$(grep -oPi '(?<=HTTP/.* ).*?(?=$)' <<< "$response") - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then printf "\\n" echo -e "${RED}| Upload failed. Status: ${err}${NC}" failedRetryUpload "$pline" "${filepath}" "${_hostCode}" "Failed to upload file" "Status: $err" diff --git a/hosts/up_fileland.sh b/hosts/up_fileland.sh index 3a36899..e72ac78 100644 --- a/hosts/up_fileland.sh +++ b/hosts/up_fileland.sh @@ -54,23 +54,23 @@ fland_UploadFile() { fi finalAttempt="false" for ((z=0; z<=$MaxUploadRetries; z++)); do - if [ $z -eq $MaxUploadRetries ] ; then + if [[ $z -eq $MaxUploadRetries ]] ; then finalAttempt="true" fi trap "rm -f "${UploadTicket}"; echo ""; tput cnorm; exit" 0 1 2 3 6 15 if fland_PostFile "${filepath}" "${_hostCode}" "${filename}" "${filecnt}" $((z+1)) $finalAttempt $pline ; then return 0 - elif [ $z -lt $MaxUploadRetries ]; then - if [ "${fileAlreadyDone}" == "true" ] ; then + elif [[ $z -lt $MaxUploadRetries ]]; then + if [[ "${fileAlreadyDone}" == "true" ]] ; then break fi if [[ "${warnAndRetryUnknownError}" == "true" ]] ; then - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${filepath##*/}" "error" "Retry due to an unknown issue: attempt #$((z+1)) of ${MaxUploadRetries}" fi fi if [[ "${exitUploadError}" == "true" || "${exitUploadNotAvailable}" == "true" ]] ; then - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${filepath##*/}" "error" "Exit due to unrecoverable issue" fi rm -f "${UploadTicket}" @@ -112,7 +112,7 @@ fland_PostFile() { -F "keepalive=1" \ -F "file_0=@${filepath}" \ "${PostUrlHost}") - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${filepath##*/}" "${_hostCode}_upload" "post_url: ${PostUrlHost}"$'\n'"${response}" fi if grep -Eqi '"file_status":"OK"' <<< "${response}" ; then @@ -126,7 +126,7 @@ fland_PostFile() { return 0 else err=$(grep -oPi '(?<=HTTP/).*?(?=$)' <<< "$response") - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then printf "\\n" echo -e "${RED}| Upload failed. Status: ${err}${NC}" failedRetryUpload "$pline" "${filepath}" "${_hostCode}" "Failed to upload file" "Status: $err" diff --git a/hosts/up_fireget.sh b/hosts/up_fireget.sh index 04dcb72..fa4a475 100644 --- a/hosts/up_fireget.sh +++ b/hosts/up_fireget.sh @@ -54,23 +54,23 @@ fget_UploadFile() { fi finalAttempt="false" for ((z=0; z<=$MaxUploadRetries; z++)); do - if [ $z -eq $MaxUploadRetries ] ; then + if [[ $z -eq $MaxUploadRetries ]] ; then finalAttempt="true" fi trap "rm -f "${UploadTicket}"; echo ""; tput cnorm; exit" 0 1 2 3 6 15 if fget_PostFile "${filepath}" "${_hostCode}" "${filename}" "${filecnt}" $((z+1)) $finalAttempt $pline ; then return 0 - elif [ $z -lt $MaxUploadRetries ]; then - if [ "${fileAlreadyDone}" == "true" ] ; then + elif [[ $z -lt $MaxUploadRetries ]]; then + if [[ "${fileAlreadyDone}" == "true" ]] ; then break fi if [[ "${warnAndRetryUnknownError}" == "true" ]] ; then - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${filepath##*/}" "error" "Retry due to an unknown issue: attempt #$((z+1)) of ${MaxUploadRetries}" fi fi if [[ "${exitUploadError}" == "true" || "${exitUploadNotAvailable}" == "true" ]] ; then - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${filepath##*/}" "error" "Exit due to unrecoverable issue" fi rm -f "${UploadTicket}" @@ -112,7 +112,7 @@ fget_PostFile() { -F "file_1=@${filepath}" \ --cookie "lang=english" \ "${PostUrlHost}") - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${filepath##*/}" "${_hostCode}_upload" "post_url: ${PostUrlHost}"$'\n'"${response}" fi if grep -Eqi 'Location: https://fireget.com/\?&fn=' <<< "${response}" ; then @@ -126,7 +126,7 @@ fget_PostFile() { return 0 else err=$(grep -oPi '(?<=HTTP/).*?(?=$)' <<< "$response") - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then printf "\\n" echo -e "${RED}| Upload failed. Status: ${err}${NC}" failedRetryUpload "$pline" "${filepath}" "${_hostCode}" "Failed to upload file" "Status: $err" diff --git a/hosts/up_firestorage.sh b/hosts/up_firestorage.sh index 7a54097..baefeae 100644 --- a/hosts/up_firestorage.sh +++ b/hosts/up_firestorage.sh @@ -54,23 +54,23 @@ fs_UploadFile() { fi finalAttempt="false" for ((z=0; z<=$MaxUploadRetries; z++)); do - if [ $z -eq $MaxUploadRetries ] ; then + if [[ $z -eq $MaxUploadRetries ]] ; then finalAttempt="true" fi trap "rm -f "${UploadTicket}"; echo ""; tput cnorm; exit" 0 1 2 3 6 15 if fs_PostFile "${filepath}" "${_hostCode}" "${filename}" "${filecnt}" $((z+1)) $finalAttempt $pline ; then return 0 - elif [ $z -lt $MaxUploadRetries ]; then - if [ "${fileAlreadyDone}" == "true" ] ; then + elif [[ $z -lt $MaxUploadRetries ]]; then + if [[ "${fileAlreadyDone}" == "true" ]] ; then break fi if [[ "${warnAndRetryUnknownError}" == "true" ]] ; then - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${filepath##*/}" "error" "Retry due to an unknown issue: attempt #$((z+1)) of ${MaxUploadRetries}" fi fi if [[ "${exitUploadError}" == "true" || "${exitUploadNotAvailable}" == "true" ]] ; then - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${filepath##*/}" "error" "Exit due to unrecoverable issue" fi rm -f "${UploadTicket}" @@ -116,7 +116,7 @@ fs_PostFile() { -F "exp=0" \ -F "Filename=@$filepath" \ "${PostUrlHost}") - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${filepath##*/}" "${_hostCode}_upload" "post_url: ${PostUrlHost}"$'\n'"${response}" fi dec_response=$(urldecode "$response") @@ -131,7 +131,7 @@ fs_PostFile() { return 0 else err=$(grep -oPi '(?<=HTTP/.*? ).*?(?=$)' <<< "$dec_response") - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then printf "\\n" echo -e "${RED}| Upload failed. Status: ${err}${NC}" failedRetryUpload "$pline" "${filepath}" "${_hostCode}" "Failed to upload file" "Status: $err" diff --git a/hosts/up_gofile.sh b/hosts/up_gofile.sh index d1d17c9..3921557 100644 --- a/hosts/up_gofile.sh +++ b/hosts/up_gofile.sh @@ -54,23 +54,23 @@ gofile_UploadFile() { fi finalAttempt="false" for ((z=0; z<=$MaxUploadRetries; z++)); do - if [ $z -eq $MaxUploadRetries ] ; then + if [[ $z -eq $MaxUploadRetries ]] ; then finalAttempt="true" fi trap "rm -f "${UploadTicket}"; echo ""; tput cnorm; exit" 0 1 2 3 6 15 if gofile_PostFile "${filepath}" "${_hostCode}" "${filename}" "${filecnt}" $((z+1)) $finalAttempt $pline ; then return 0 - elif [ $z -lt $MaxUploadRetries ]; then - if [ "${fileAlreadyDone}" == "true" ] ; then + elif [[ $z -lt $MaxUploadRetries ]]; then + if [[ "${fileAlreadyDone}" == "true" ]] ; then break fi if [[ "${warnAndRetryUnknownError}" == "true" ]] ; then - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${filepath##*/}" "error" "Retry due to an unknown issue: attempt #$((z+1)) of ${MaxUploadRetries}" fi fi if [[ "${exitUploadError}" == "true" || "${exitUploadNotAvailable}" == "true" ]] ; then - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${filepath##*/}" "error" "Exit due to unrecoverable issue" fi rm -f "${UploadTicket}" @@ -100,10 +100,10 @@ gofile_PostFile() { gofileStoreServer="store4" trap "rm -f ${UploadTicket}; echo ""; tput cnorm; exit" 0 1 2 3 6 15 response=$(tor_curl_request --insecure -L -s "https://api.gofile.io/servers") - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${remote_url##*/}" "${_hostCode}_store" "${response}" fi - if [ ! -z "$response" ] ; then + if [[ ! -z "$response" ]] ; then if grep -Eqi '"status":"ok","data":{"servers":' <<< "${response}" ; then gofileStoreServer=$(grep -oPi '(?<="servers":\[{"name":").*?(?=")' <<< "${response}") echo -e "${GREEN}|${NC} Found: ${BLUE}$gofileStoreServer${NC}" @@ -130,7 +130,7 @@ gofile_PostFile() { -F "keepalive=1" \ -F "file=@$filepath" \ "${PostUrlHost}") - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${filepath##*/}" "${_hostCode}_upload" "post_url: ${PostUrlHost}"$'\n'"${response}" fi if grep -Eqi '"status":"ok"' <<< "${response}" ; then @@ -145,7 +145,7 @@ gofile_PostFile() { return 0 else err=$(grep -oPi '(?<="file_status":").*?(?=")' <<< "$response") - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then printf "\\n" echo -e "${RED}| Upload failed. Status: ${err}${NC}" failedRetryUpload "$pline" "${filepath}" "${_hostCode}" "Failed to upload file" "Status: $err" diff --git a/hosts/up_hexload.sh b/hosts/up_hexload.sh index 38035f3..3f4b636 100644 --- a/hosts/up_hexload.sh +++ b/hosts/up_hexload.sh @@ -54,23 +54,23 @@ hex_UploadFile() { fi finalAttempt="false" for ((z=0; z<=$MaxUploadRetries; z++)); do - if [ $z -eq $MaxUploadRetries ] ; then + if [[ $z -eq $MaxUploadRetries ]] ; then finalAttempt="true" fi trap "rm -f "${UploadTicket}"; echo ""; tput cnorm; exit" 0 1 2 3 6 15 if hex_PostFile "${filepath}" "${_hostCode}" "${filename}" "${filecnt}" $((z+1)) $finalAttempt $pline ; then return 0 - elif [ $z -lt $MaxUploadRetries ]; then - if [ "${fileAlreadyDone}" == "true" ] ; then + elif [[ $z -lt $MaxUploadRetries ]]; then + if [[ "${fileAlreadyDone}" == "true" ]] ; then break fi if [[ "${warnAndRetryUnknownError}" == "true" ]] ; then - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${filepath##*/}" "error" "Retry due to an unknown issue: attempt #$((z+1)) of ${MaxUploadRetries}" fi fi if [[ "${exitUploadError}" == "true" || "${exitUploadNotAvailable}" == "true" ]] ; then - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${filepath##*/}" "error" "Exit due to unrecoverable issue" fi rm -f "${UploadTicket}" @@ -118,7 +118,7 @@ hex_PostFile() { -F "keepalive=1" \ -F "file_0=@$filepath" \ "${PostUrlHost}") - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${filepath##*/}" "${_hostCode}_upload" "post_url: ${PostUrlHost}"$'\n'"${response}" fi if grep -Eqi '"file_status":"OK"' <<< "${response}" ; then @@ -132,7 +132,7 @@ hex_PostFile() { return 0 else err=$(grep -oPi '(?<="file_status":").*?(?=")' <<< "$response") - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then printf "\\n" echo -e "${RED}| Upload failed. Status: ${err}${NC}" failedRetryUpload "$pline" "${filepath}" "${_hostCode}" "Failed to upload file" "Status: $err" diff --git a/hosts/up_innocent.sh b/hosts/up_innocent.sh index d3dfa12..a956fc0 100644 --- a/hosts/up_innocent.sh +++ b/hosts/up_innocent.sh @@ -51,23 +51,23 @@ inno_UploadFile() { fi finalAttempt="false" for ((z=0; z<=$MaxUploadRetries; z++)); do - if [ $z -eq $MaxUploadRetries ] ; then + if [[ $z -eq $MaxUploadRetries ]] ; then finalAttempt="true" fi trap "rm -f "${UploadTicket}"; echo ""; tput cnorm; exit" 0 1 2 3 6 15 if inno_PostFile "${filepath}" "${_hostCode}" "${filename}" "${filecnt}" $((z+1)) $finalAttempt $pline ; then return 0 - elif [ $z -lt $MaxUploadRetries ]; then - if [ "${fileAlreadyDone}" == "true" ] ; then + elif [[ $z -lt $MaxUploadRetries ]]; then + if [[ "${fileAlreadyDone}" == "true" ]] ; then break fi if [[ "${warnAndRetryUnknownError}" == "true" ]] ; then - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${filepath##*/}" "error" "Retry due to an unknown issue: attempt #$((z+1)) of ${MaxUploadRetries}" fi fi if [[ "${exitUploadError}" == "true" || "${exitUploadNotAvailable}" == "true" ]] ; then - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${filepath##*/}" "error" "Exit due to unrecoverable issue" fi rm -f "${UploadTicket}" @@ -102,7 +102,7 @@ inno_PostFile() { -H "Origin: http://innocent5z4fg2kdd4y6q4emu5nfybfiyr2mbp7s5pwllf6sqqhqdwyd.onion" \ -F "file=@${filepath}" \ "${PostUrlHost}") - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${filepath##*/}" "${_hostCode}_upload" "post_url: ${PostUrlHost}"$'\n'"${response}" fi if grep -Eqi 'Location: \?uploaded=' <<< "${response}" ; then @@ -117,7 +117,7 @@ inno_PostFile() { return 0 else err=$(grep -oPi '(?<=HTTP/).*?(?=$)' <<< "$response") - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then printf "\\n" echo -e "${RED}| Upload failed. Error: ${err}${NC}" failedRetryUpload "$pline" "${filepath}" "${_hostCode}" "Failed to upload file" "Error: $err" diff --git a/hosts/up_isupload.sh b/hosts/up_isupload.sh index ea26aa7..ca0b370 100644 --- a/hosts/up_isupload.sh +++ b/hosts/up_isupload.sh @@ -54,23 +54,23 @@ isup_UploadFile() { fi finalAttempt="false" for ((z=0; z<=$MaxUploadRetries; z++)); do - if [ $z -eq $MaxUploadRetries ] ; then + if [[ $z -eq $MaxUploadRetries ]] ; then finalAttempt="true" fi trap "rm -f "${UploadTicket}"; echo ""; tput cnorm; exit" 0 1 2 3 6 15 if isup_PostFile "${filepath}" "${_hostCode}" "${filename}" "${filecnt}" $((z+1)) $finalAttempt $pline ; then return 0 - elif [ $z -lt $MaxUploadRetries ]; then - if [ "${fileAlreadyDone}" == "true" ] ; then + elif [[ $z -lt $MaxUploadRetries ]]; then + if [[ "${fileAlreadyDone}" == "true" ]] ; then break fi if [[ "${warnAndRetryUnknownError}" == "true" ]] ; then - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${filepath##*/}" "error" "Retry due to an unknown issue: attempt #$((z+1)) of ${MaxUploadRetries}" fi fi if [[ "${exitUploadError}" == "true" || "${exitUploadNotAvailable}" == "true" ]] ; then - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${filepath##*/}" "error" "Exit due to unrecoverable issue" fi rm -f "${UploadTicket}" @@ -114,7 +114,7 @@ isup_PostFile() { -F "keepalive=1" \ -F "file_0=@$filepath" \ "${PostUrlHost}") - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${filepath##*/}" "${_hostCode}_upload" "post_url: ${PostUrlHost}"$'\n'"${response}" fi if grep -Eqi '"file_status":"OK"' <<< "${response}" ; then @@ -128,7 +128,7 @@ isup_PostFile() { return 0 else err=$(grep -oPi '(?<="file_status":").*?(?=")' <<< "$response") - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then printf "\\n" echo -e "${RED}| Upload failed. Status: ${err}${NC}" failedRetryUpload "$pline" "${filepath}" "${_hostCode}" "Failed to upload file" "Status: $err" diff --git a/hosts/up_kouploader.sh b/hosts/up_kouploader.sh index d1f45d6..f4b09ae 100644 --- a/hosts/up_kouploader.sh +++ b/hosts/up_kouploader.sh @@ -54,23 +54,23 @@ ko_UploadFile() { fi finalAttempt="false" for ((z=0; z<=$MaxUploadRetries; z++)); do - if [ $z -eq $MaxUploadRetries ] ; then + if [[ $z -eq $MaxUploadRetries ]] ; then finalAttempt="true" fi trap "rm -f "${UploadTicket}"; echo ""; tput cnorm; exit" 0 1 2 3 6 15 if ko_PostFile "${filepath}" "${_hostCode}" "${filename}" "${filecnt}" $((z+1)) $finalAttempt $pline ; then return 0 - elif [ $z -lt $MaxUploadRetries ]; then - if [ "${fileAlreadyDone}" == "true" ] ; then + elif [[ $z -lt $MaxUploadRetries ]]; then + if [[ "${fileAlreadyDone}" == "true" ]] ; then break fi if [[ "${warnAndRetryUnknownError}" == "true" ]] ; then - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${filepath##*/}" "error" "Retry due to an unknown issue: attempt #$((z+1)) of ${MaxUploadRetries}" fi fi if [[ "${exitUploadError}" == "true" || "${exitUploadNotAvailable}" == "true" ]] ; then - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${filepath##*/}" "error" "Exit due to unrecoverable issue" fi rm -f "${UploadTicket}" @@ -106,16 +106,16 @@ ko_PostFile() { tor_identity="${RANDOM}" trap "rm -f ${UploadTicket}; rm -f ${ko_cookie_jar}; echo ""; tput cnorm; exit" 0 1 2 3 6 15 response=$(tor_curl_request --insecure -L -s -b "${ko_cookie_jar}" -c "${ko_cookie_jar}" "$PostUrlHost") - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${filepath##*/}" "ko_fetch$i" "${response}" fi if [[ -z $response ]] ; then rm -f "${ko_cookie_jar}"; - if [ $i == $maxfetchretries ] ; then + if [[ $i == $maxfetchretries ]] ; then printf "\\n" echo -e "${RED}| Failed to start an upload [1]${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryUpload "$pline" "${filepath}" "${_hostCode}" "Failed to start an upload [1]" "" fi return 1 @@ -139,7 +139,7 @@ ko_PostFile() { -F "file=@${filepath}" \ -b "${ko_cookie_jar}" -c "${ko_cookie_jar}" \ "${PostUrlHost}") - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${filepath##*/}" "${_hostCode}_upload" "post_url: ${PostUrlHost}"$'\n'"${response}" fi rm -f ${ko_cookie_jar} @@ -155,7 +155,7 @@ ko_PostFile() { return 0 else err=$(grep -oPi '(?<=HTTP/).*?(?=$)' <<< "$response") - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then printf "\\n" echo -e "${RED}| Upload failed. Status: ${err}${NC}" failedRetryUpload "$pline" "${filepath}" "${_hostCode}" "Failed to upload file" "Status: $err" diff --git a/hosts/up_kraken.sh b/hosts/up_kraken.sh index 561dde6..40bba30 100644 --- a/hosts/up_kraken.sh +++ b/hosts/up_kraken.sh @@ -54,23 +54,23 @@ kraken_UploadFile() { fi finalAttempt="false" for ((z=0; z<=$MaxUploadRetries; z++)); do - if [ $z -eq $MaxUploadRetries ] ; then + if [[ $z -eq $MaxUploadRetries ]] ; then finalAttempt="true" fi trap "rm -f "${UploadTicket}"; echo ""; tput cnorm; exit" 0 1 2 3 6 15 if kraken_PostFile "${filepath}" "${_hostCode}" "${filename}" "${filecnt}" $((z+1)) $finalAttempt $pline ; then return 0 - elif [ $z -lt $MaxUploadRetries ]; then - if [ "${fileAlreadyDone}" == "true" ] ; then + elif [[ $z -lt $MaxUploadRetries ]]; then + if [[ "${fileAlreadyDone}" == "true" ]] ; then break fi if [[ "${warnAndRetryUnknownError}" == "true" ]] ; then - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${filepath##*/}" "error" "Retry due to an unknown issue: attempt #$((z+1)) of ${MaxUploadRetries}" fi fi if [[ "${exitUploadError}" == "true" || "${exitUploadNotAvailable}" == "true" ]] ; then - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${filepath##*/}" "error" "Exit due to unrecoverable issue" fi rm -f "${UploadTicket}" @@ -116,7 +116,7 @@ kraken_PostFile() { -H "Content-Type: multipart/form-data" \ -F "files[]=@${arrFiles[@]}" \ "${PostUrlHost}") - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${filepath##*/}" "${_hostCode}_upload" "post_url: ${PostUrlHost}"$'\n'"${response}" fi if grep -Eqi '"error":""' <<< "${response}" ; then @@ -131,7 +131,7 @@ kraken_PostFile() { return 0 else err=$(grep -oPi '(?<="error":").*?(?=")' <<< "$response") - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then printf "\\n" echo -e "${RED}| Upload failed. Error: ${err}${NC}" failedRetryUpload "$pline" "${filepath}" "${_hostCode}" "Failed to upload file" "Error: $err" diff --git a/hosts/up_lainsafe.sh b/hosts/up_lainsafe.sh index 61ef156..14801e4 100644 --- a/hosts/up_lainsafe.sh +++ b/hosts/up_lainsafe.sh @@ -54,23 +54,23 @@ lain_UploadFile() { fi finalAttempt="false" for ((z=0; z<=$MaxUploadRetries; z++)); do - if [ $z -eq $MaxUploadRetries ] ; then + if [[ $z -eq $MaxUploadRetries ]] ; then finalAttempt="true" fi trap "rm -f "${UploadTicket}"; echo ""; tput cnorm; exit" 0 1 2 3 6 15 if lain_PostFile "${filepath}" "${_hostCode}" "${filename}" "${filecnt}" $((z+1)) $finalAttempt $pline ; then return 0 - elif [ $z -lt $MaxUploadRetries ]; then - if [ "${fileAlreadyDone}" == "true" ] ; then + elif [[ $z -lt $MaxUploadRetries ]]; then + if [[ "${fileAlreadyDone}" == "true" ]] ; then break fi if [[ "${warnAndRetryUnknownError}" == "true" ]] ; then - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${filepath##*/}" "error" "Retry due to an unknown issue: attempt #$((z+1)) of ${MaxUploadRetries}" fi fi if [[ "${exitUploadError}" == "true" || "${exitUploadNotAvailable}" == "true" ]] ; then - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${filepath##*/}" "error" "Exit due to unrecoverable issue" fi rm -f "${UploadTicket}" @@ -112,11 +112,11 @@ lain_PostFile() { -H "Content-Type: multipart/form-data" \ -F "files[]=@${arrFiles[@]}" \ "${PostUrlHost}") - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${filepath##*/}" "${_hostCode}_upload" "post_url: ${PostUrlHost}"$'\n'"${response}" fi if [[ -z $response ]] || grep -Eqi 'HTTP/2 403|403 Forbidden' <<< "${response}" ; then - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then err=$(grep -oPi '(?<=HTTP/).*?(?=$)' <<< "$response") printf "\\n" echo -e "${RED}| Upload failed. Status: ${err}${NC}" @@ -137,7 +137,7 @@ lain_PostFile() { return 0 else err=$(grep -oPi '(?<=HTTP/).*?(?=$)' <<< "$response") - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then printf "\\n" echo -e "${RED}| Upload failed. Status: ${err}${NC}" failedRetryUpload "$pline" "${filepath}" "${_hostCode}" "Failed to upload file" "Status: $err" diff --git a/hosts/up_nippy.sh b/hosts/up_nippy.sh index a172314..bbb1719 100644 --- a/hosts/up_nippy.sh +++ b/hosts/up_nippy.sh @@ -57,23 +57,23 @@ nippy_UploadFile() { fi finalAttempt="false" for ((z=0; z<=$MaxUploadRetries; z++)); do - if [ $z -eq $MaxUploadRetries ] ; then + if [[ $z -eq $MaxUploadRetries ]] ; then finalAttempt="true" fi trap "rm -f "${UploadTicket}"; echo ""; tput cnorm; exit" 0 1 2 3 6 15 if nippy_PostFile "${filepath}" "${_hostCode}" "${filename}" "${filecnt}" $((z+1)) $finalAttempt $pline ; then return 0 - elif [ $z -lt $MaxUploadRetries ]; then - if [ "${fileAlreadyDone}" == "true" ] ; then + elif [[ $z -lt $MaxUploadRetries ]]; then + if [[ "${fileAlreadyDone}" == "true" ]] ; then break fi if [[ "${warnAndRetryUnknownError}" == "true" ]] ; then - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${filepath##*/}" "error" "Retry due to an unknown issue: attempt #$((z+1)) of ${MaxUploadRetries}" fi fi if [[ "${exitUploadError}" == "true" || "${exitUploadNotAvailable}" == "true" ]] ; then - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${filepath##*/}" "error" "Exit due to unrecoverable issue" fi rm -f "${UploadTicket}" @@ -99,15 +99,15 @@ nippy_PostFile() { UploadTicket="${WorkDir}/.flocks/upload_${_hostCode}_${filepath//[^a-zA-Z0-9]/}" echo -e "[${YELLOW}${_hostCode}${NC}] Uploading ${GREEN}${filename}${NC}" tor_identity="${RANDOM}" - if [ "$NippyDomain" == "nippydrive" ]; then + if [[ "$NippyDomain" == "nippydrive" ]]; then PostUrlHost='https://ns05.zipcluster.com/upload.php' - elif [ "$NippyDomain" == "nippyshare" ]; then + elif [[ "$NippyDomain" == "nippyshare" ]]; then PostUrlHost='https://ns01.zipcluster.com/upload.php' - elif [ "$NippyDomain" == "nippybox" ]; then + elif [[ "$NippyDomain" == "nippybox" ]]; then PostUrlHost='https://ns04.zipcluster.com/upload.php' - elif [ "$NippyDomain" == "nippyspace" ]; then + elif [[ "$NippyDomain" == "nippyspace" ]]; then PostUrlHost='https://ns03.zipcluster.com/upload.php' - elif [ "$NippyDomain" == "nippyfile" ]; then + elif [[ "$NippyDomain" == "nippyfile" ]]; then PostUrlHost='https://ns02.zipcluster.com/upload.php' else local ar_HUP[0]='https://ns05.zipcluster.com/upload.php' # nippydrive @@ -127,7 +127,7 @@ nippy_PostFile() { -F "file[]=@${arrFiles[@]}" \ -F "upload=Upload" \ "${PostUrlHost}") - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${filepath##*/}" "${_hostCode}(${index})_upload" "post_url: ${PostUrlHost}"$'\n'"${response}" fi if grep -Eqi 'URL: #! MaxSize: 5GB @@ -59,23 +59,23 @@ oshi_UploadFile() { fi finalAttempt="false" for ((z=0; z<=$MaxUploadRetries; z++)); do - if [ $z -eq $MaxUploadRetries ] ; then + if [[ $z -eq $MaxUploadRetries ]] ; then finalAttempt="true" fi trap "rm -f "${UploadTicket}"; echo ""; tput cnorm; exit" 0 1 2 3 6 15 if oshi_PostFile "${filepath}" "${_hostCode}" "${filename}" "${filecnt}" $((z+1)) $finalAttempt $pline ; then return 0 - elif [ $z -lt $MaxUploadRetries ]; then - if [ "${fileAlreadyDone}" == "true" ] ; then + elif [[ $z -lt $MaxUploadRetries ]]; then + if [[ "${fileAlreadyDone}" == "true" ]] ; then break fi if [[ "${warnAndRetryUnknownError}" == "true" ]] ; then - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${filepath##*/}" "error" "Retry due to an unknown issue: attempt #$((z+1)) of ${MaxUploadRetries}" fi fi if [[ "${exitUploadError}" == "true" || "${exitUploadNotAvailable}" == "true" ]] ; then - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${filepath##*/}" "error" "Exit due to unrecoverable issue" fi rm -f "${UploadTicket}" @@ -101,9 +101,9 @@ oshi_PostFile() { UploadTicket="${WorkDir}/.flocks/upload_${_hostCode}_${filepath//[^a-zA-Z0-9]/}" echo -e "[${YELLOW}${_hostCode}${NC}] Uploading ${GREEN}${filename}${NC}" tor_identity="${RANDOM}" - PostUrlHost='https://oshi.at/' - if [ "$OshiUploadHostChoice" == "oshionion" ]; then - PostUrlHost='http://5ety7tpkim5me6eszuwcje7bmy25pbtrjtue7zkqqgziljwqy3rrikqd.onion/' + PostUrlHost='http://oshi.at/nossl/' + if [[ "$OshiUploadHostChoice" == "oshionion" ]]; then + PostUrlHost='http://5ety7tpkim5me6eszuwcje7bmy25pbtrjtue7zkqqgziljwqy3rrikqd.onion/nossl/' fi arrFiles=("$filepath") trap "rm -f ${UploadTicket}; echo ""; tput cnorm; exit" 0 1 2 3 6 15 @@ -114,22 +114,24 @@ oshi_PostFile() { -F "randomizefn=0" \ -F "shorturl=0" \ "${PostUrlHost}") - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${filepath##*/}" "${_hostCode}_upload" "post_url: ${PostUrlHost}"$'\n'"${response}" fi if grep -Eqi '^DL: ' <<< "${response}" ; then url=$(grep -oPi '(?<=^DL: ).*(?=$)' <<< "$response") + manageUrl=$(grep -oPi '(?<=^MANAGE: ).*(?=$)' <<< "$response") hash=$(grep -oPi '(?<='"${PostUrlHost}"').*?(?=$)' <<< "$url") filesize=$(GetFileSize "$filepath" "false") downloadLink="$url" echo -e "${GREEN}| Upload Success${NC}" echo -e "| Size: ${BLUE}${filesize}${NC} bytes${NC}" echo -e "| Link: ${YELLOW}${downloadLink}${NC}" - successUpload "$pline" "${filepath}" "${_hostCode}" "${filesize}" "${downloadLink}" "{$response}" + echo -e "| Manage: ${YELLOW}${manageUrl}${NC}" + successUpload "$pline" "${filepath}" "${_hostCode}" "${filesize}" "${downloadLink}" "{$response}" "[Manage: $manageUrl]" return 0 else err=$(grep -oPi '(?<="HTTP/2 ").*?(?=$)' <<< "$response") - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then printf "\\n" echo -e "${RED}| Upload failed. Code: ${err}${NC}" failedRetryUpload "$pline" "${filepath}" "${_hostCode}" "Failed to upload file" "Code: $err" diff --git a/hosts/up_pixeldrain.sh b/hosts/up_pixeldrain.sh index 04622a0..dac11ff 100644 --- a/hosts/up_pixeldrain.sh +++ b/hosts/up_pixeldrain.sh @@ -52,7 +52,7 @@ pd_UploadFile() { failedUpload "$pline" "${filepath}" "${_hostCode}" "Skipping upload. The size of $filename is to large for $_hostCode. ($fsize > $MaxUploadSizeInBytes)" return 1 fi - if [ -z "$RandomPdKey" ]; then + if [[ -z "$RandomPdKey" ]]; then printf "\\n" echo -e "${RED}| No Api Key provided in config.${NC}" failedRetryUpload "$pline" "${filepath}" "${_hostCode}" "Upload failed." "No Api key." @@ -61,23 +61,23 @@ pd_UploadFile() { fi finalAttempt="false" for ((z=0; z<=$MaxUploadRetries; z++)); do - if [ $z -eq $MaxUploadRetries ] ; then + if [[ $z -eq $MaxUploadRetries ]] ; then finalAttempt="true" fi trap "rm -f "${UploadTicket}"; echo ""; tput cnorm; exit" 0 1 2 3 6 15 if pd_PostFile "${filepath}" "${_hostCode}" "${filename}" "${filecnt}" $((z+1)) $finalAttempt $pline ; then return 0 - elif [ $z -lt $MaxUploadRetries ]; then - if [ "${fileAlreadyDone}" == "true" ] ; then + elif [[ $z -lt $MaxUploadRetries ]]; then + if [[ "${fileAlreadyDone}" == "true" ]] ; then break fi if [[ "${warnAndRetryUnknownError}" == "true" ]] ; then - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${filepath##*/}" "error" "Retry due to an unknown issue: attempt #$((z+1)) of ${MaxUploadRetries}" fi fi if [[ "${exitUploadError}" == "true" || "${exitUploadNotAvailable}" == "true" ]] ; then - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${filepath##*/}" "error" "Exit due to unrecoverable issue" fi rm -f "${UploadTicket}" @@ -111,7 +111,7 @@ pd_PostFile() { apikey="$RandomPdKey" response=$(tor_curl_upload --insecure -X PUT \ -u :"$apikey" "${PostUrlHost}" -T "${filepath}") - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${filepath##*/}" "${_hostCode}_upload" "post_url: ${PostUrlHost}"$'\n'"apikey: ${apikey}"$'\n'"${response}" fi if grep -Eqi '"success":false,"value":"ip_banned"' <<< "$response"; then @@ -141,7 +141,7 @@ pd_PostFile() { return 0 else err=$(grep -oPi '(?<="success":false,"value":").*?(?=".*$)' <<< "$response") - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then printf "\\n" echo -e "${RED}| Upload failed. Error: ${err}${NC}" failedRetryUpload "$pline" "${filepath}" "${_hostCode}" "Failed to upload file" "Error: $err" diff --git a/hosts/up_quax.sh b/hosts/up_quax.sh index 9705d2d..2e1ed02 100644 --- a/hosts/up_quax.sh +++ b/hosts/up_quax.sh @@ -54,23 +54,23 @@ qx_UploadFile() { fi finalAttempt="false" for ((z=0; z<=$MaxUploadRetries; z++)); do - if [ $z -eq $MaxUploadRetries ] ; then + if [[ $z -eq $MaxUploadRetries ]] ; then finalAttempt="true" fi trap "rm -f "${UploadTicket}"; echo ""; tput cnorm; exit" 0 1 2 3 6 15 if qx_PostFile "${filepath}" "${_hostCode}" "${filename}" "${filecnt}" $((z+1)) $finalAttempt $pline ; then return 0 - elif [ $z -lt $MaxUploadRetries ]; then - if [ "${fileAlreadyDone}" == "true" ] ; then + elif [[ $z -lt $MaxUploadRetries ]]; then + if [[ "${fileAlreadyDone}" == "true" ]] ; then break fi if [[ "${warnAndRetryUnknownError}" == "true" ]] ; then - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${filepath##*/}" "error" "Retry due to an unknown issue: attempt #$((z+1)) of ${MaxUploadRetries}" fi fi if [[ "${exitUploadError}" == "true" || "${exitUploadNotAvailable}" == "true" ]] ; then - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${filepath##*/}" "error" "Exit due to unrecoverable issue" fi rm -f "${UploadTicket}" @@ -104,7 +104,7 @@ qx_PostFile() { -H "expiry=-1" \ -F "files[]=@${arrFiles[@]}" \ "${PostUrlHost}") - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${filepath##*/}" "${_hostCode}_upload" "post_url: ${PostUrlHost}"$'\n'"${response}" fi if grep -Eqi '"success": true,' <<< "${response}" ; then @@ -118,7 +118,7 @@ qx_PostFile() { return 0 else err=$(grep -oPi '(?<=HTTP/).*?(?=$)' <<< "$response") - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then printf "\\n" echo -e "${RED}| Upload failed. Status: ${err}${NC}" failedRetryUpload "$pline" "${filepath}" "${_hostCode}" "Failed to upload file" "Status: $err" diff --git a/hosts/up_ranoz.sh b/hosts/up_ranoz.sh old mode 100755 new mode 100644 index dd93f6e..0af9329 --- a/hosts/up_ranoz.sh +++ b/hosts/up_ranoz.sh @@ -54,23 +54,23 @@ rz_UploadFile() { fi finalAttempt="false" for ((z=0; z<=$MaxUploadRetries; z++)); do - if [ $z -eq $MaxUploadRetries ] ; then + if [[ $z -eq $MaxUploadRetries ]] ; then finalAttempt="true" fi trap "rm -f "${UploadTicket}"; echo ""; tput cnorm; exit" 0 1 2 3 6 15 if rz_PostFile "${filepath}" "${_hostCode}" "${filename}" "${filecnt}" $((z+1)) $finalAttempt $pline ; then return 0 - elif [ $z -lt $MaxUploadRetries ]; then - if [ "${fileAlreadyDone}" == "true" ] ; then + elif [[ $z -lt $MaxUploadRetries ]]; then + if [[ "${fileAlreadyDone}" == "true" ]] ; then break fi if [[ "${warnAndRetryUnknownError}" == "true" ]] ; then - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${filepath##*/}" "error" "Retry due to an unknown issue: attempt #$((z+1)) of ${MaxUploadRetries}" fi fi if [[ "${exitUploadError}" == "true" || "${exitUploadNotAvailable}" == "true" ]] ; then - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${filepath##*/}" "error" "Exit due to unrecoverable issue" fi rm -f "${UploadTicket}" @@ -131,7 +131,7 @@ rz_PostFile() { -d "{ \ \"filename\": \"$tmpfilename\", \ \"size\": $fsize}") - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${filepath##*/}" "${_hostCode}_ticket" "post_url: ${PostUrlHost}"$'\n'"data: ${filepath}, ${fsize}"$'\n'"${response}" fi if grep -Eqi '"upload_url":"https://' <<< "$response" ; then @@ -140,7 +140,7 @@ rz_PostFile() { echo -e "${GREEN}| Upload url obtained...${NC}" else err=$(grep -oPi '(?<="errors":\[\{"message":").*?(?=".*$)' <<< "$response") - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then printf "\\n" echo -e "${RED}| Upload request failed. Status: ${err}${NC}" failedRetryUpload "$pline" "${filepath}" "${_hostCode}" "Failed to upload file" "Status: $err" @@ -156,7 +156,7 @@ rz_PostFile() { "${PostUrlHost}" \ --upload-file "$filepath" \ -H "Content-Length: $fsize") - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${filepath##*/}" "${_hostCode}_upload" "post_url: ${PostUrlHost}"$'\n'"${response}" fi if grep -Eqi 'HTTP/.* 200' <<< "${response}" ; then @@ -172,7 +172,7 @@ rz_PostFile() { return 0 else err=$(grep -oPi '(?<=HTTP/.*).*?(?=$)' <<< "$response") - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then printf "\\n" echo -e "${RED}| Upload failed. Status: ${err}${NC}" failedRetryUpload "$pline" "${filepath}" "${_hostCode}" "Failed to upload file" "Status: $err" diff --git a/hosts/up_sendnow.sh b/hosts/up_sendnow.sh old mode 100644 new mode 100755 index c352bc2..fbfacfa --- a/hosts/up_sendnow.sh +++ b/hosts/up_sendnow.sh @@ -54,23 +54,23 @@ snow_UploadFile() { fi finalAttempt="false" for ((z=0; z<=$MaxUploadRetries; z++)); do - if [ $z -eq $MaxUploadRetries ] ; then + if [[ $z -eq $MaxUploadRetries ]] ; then finalAttempt="true" fi trap "rm -f "${UploadTicket}"; echo ""; tput cnorm; exit" 0 1 2 3 6 15 if snow_PostFile "${filepath}" "${_hostCode}" "${filename}" "${filecnt}" $((z+1)) $finalAttempt $pline ; then return 0 - elif [ $z -lt $MaxUploadRetries ]; then - if [ "${fileAlreadyDone}" == "true" ] ; then + elif [[ $z -lt $MaxUploadRetries ]]; then + if [[ "${fileAlreadyDone}" == "true" ]] ; then break fi if [[ "${warnAndRetryUnknownError}" == "true" ]] ; then - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${filepath##*/}" "error" "Retry due to an unknown issue: attempt #$((z+1)) of ${MaxUploadRetries}" fi fi if [[ "${exitUploadError}" == "true" || "${exitUploadNotAvailable}" == "true" ]] ; then - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${filepath##*/}" "error" "Exit due to unrecoverable issue" fi rm -f "${UploadTicket}" @@ -99,11 +99,11 @@ snow_PostFile() { tor_identity="${RANDOM}" trap "rm -f ${UploadTicket}; echo ""; tput cnorm; exit" 0 1 2 3 6 15 response=$(tor_curl_request --insecure -L -s 'https://send.now/upload') - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${filepath##*/}" "${_hostCode}_fetch" "${response}" fi if grep -Eqi "Your IP has been banned|you are banned" <<< "$response"; then - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then printf "\\n" echo -e "${RED}| Failed to upload file: Ip blocked or banned${NC}" failedRetryUpload "$pline" "${filepath}" "${_hostCode}" "Failed to upload file" "Ip blocked or banned" @@ -116,7 +116,7 @@ snow_PostFile() { echo -e "${GREEN}| Node found${NC}" break else - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then printf "\\n" echo -e "${RED}| Failed to upload file: unable to find a good Tor node${NC}" failedRetryUpload "$pline" "${filepath}" "${_hostCode}" "Failed to upload file" "Unable to find a good Tor node" @@ -148,13 +148,13 @@ snow_PostFile() { -F "keepalive=1" \ -F "file_0=@$filepath" \ "${PostUrlHost}") - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${filepath##*/}" "${_hostCode}_upload" "post_url: ${PostUrlHost}"$'\n'"${response}" fi if grep -Eqi '"file_status":"OK"' <<< "${response}" ; then hash=$(grep -oPi '(?<="file_code":").*?(?=".*$)' <<< "$response") filesize=$(GetFileSize "$filepath" "false") - downloadLink="https://isupload.com/${hash}" + downloadLink="https://send.now/${hash}" echo -e "${GREEN}| Upload Success${NC}" echo -e "| Size: ${BLUE}${filesize}${NC} bytes${NC}" echo -e "| Link: ${YELLOW}${downloadLink}${NC}" @@ -162,7 +162,7 @@ snow_PostFile() { return 0 else err=$(grep -oPi '(?<="file_status":").*?(?=")' <<< "$response") - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then printf "\\n" echo -e "${RED}| Upload failed. Status: ${err}${NC}" failedRetryUpload "$pline" "${filepath}" "${_hostCode}" "Failed to upload file" "Status: $err" diff --git a/hosts/up_shareonline.sh b/hosts/up_shareonline.sh index 1d6e551..aac963c 100644 --- a/hosts/up_shareonline.sh +++ b/hosts/up_shareonline.sh @@ -54,23 +54,23 @@ so_UploadFile() { fi finalAttempt="false" for ((z=0; z<=$MaxUploadRetries; z++)); do - if [ $z -eq $MaxUploadRetries ] ; then + if [[ $z -eq $MaxUploadRetries ]] ; then finalAttempt="true" fi trap "rm -f "${UploadTicket}"; echo ""; tput cnorm; exit" 0 1 2 3 6 15 if so_PostFile "${filepath}" "${_hostCode}" "${filename}" "${filecnt}" $((z+1)) $finalAttempt $pline ; then return 0 - elif [ $z -lt $MaxUploadRetries ]; then - if [ "${fileAlreadyDone}" == "true" ] ; then + elif [[ $z -lt $MaxUploadRetries ]]; then + if [[ "${fileAlreadyDone}" == "true" ]] ; then break fi if [[ "${warnAndRetryUnknownError}" == "true" ]] ; then - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${filepath##*/}" "error" "Retry due to an unknown issue: attempt #$((z+1)) of ${MaxUploadRetries}" fi fi if [[ "${exitUploadError}" == "true" || "${exitUploadNotAvailable}" == "true" ]] ; then - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${filepath##*/}" "error" "Exit due to unrecoverable issue" fi rm -f "${UploadTicket}" @@ -104,7 +104,7 @@ so_PostFile() { -F "file[]=@${arrFiles[@]}" \ -F "upload=Upload" \ "${PostUrlHost}") - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${filepath##*/}" "${_hostCode}(${index})_upload" "post_url: ${PostUrlHost}"$'\n'"${response}" fi if grep -Eqi 'URL: 0 && downDelta < 1024 )) ; then - if [ -f "${file_path}" ] ; then + if [[ "${received_file_size}" -ne "${file_size_bytes}" ]] || [[ "$containsHtml" == "true" ]]; then + if [[ "${AutoRepairBadPartials}" == "true" ]] && (( downDelta > 0 && downDelta < 1024 )) ; then + if [[ -f "${file_path}" ]] ; then if ((pd_presize > 0)); then echo -e "${YELLOW}Bad node / HTML found:${NC} reverting to previous file..." truncateDownload "$remote_url" "$filename" "$pd_presize" "$received_file_size" @@ -321,15 +321,15 @@ upee_GetFile() { fi if ((j >= $MaxDownloadRetries)) ; then rm -f "$flockDownload"; - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" fi return 1 else continue fi - elif [ "${AutoRepairBadPartials}" == "true" ] && [ "$containsHtml" == "true" ] ; then - if [ -f "${file_path}" ] ; then + elif [[ "${AutoRepairBadPartials}" == "true" ]] && [[ "$containsHtml" == "true" ]] ; then + if [[ -f "${file_path}" ]] ; then if ((pd_presize > 0)); then echo -e "${YELLOW}Bad node / HTML found:${NC} reverting to previous file..." truncateDownload "$remote_url" "$filename" "$pd_presize" "$received_file_size" @@ -341,21 +341,21 @@ upee_GetFile() { fi if ((j >= $MaxDownloadRetries)) ; then rm -f "$flockDownload"; - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" fi return 1 else continue fi - elif (( downDelta > 0 && downDelta < 1024 )) || [ "$containsHtml" == "true" ] ; then - if [ -f "$file_path" ] ; then + elif (( downDelta > 0 && downDelta < 1024 )) || [[ "$containsHtml" == "true" ]] ; then + if [[ -f "$file_path" ]] ; then rm -rf "$file_path" fi echo -e "\n${YELLOW}Bad node / HTML found:${NC} tainted partial removed..." if ((j >= $MaxDownloadRetries)) ; then rm -f "$flockDownload"; - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" fi return 1 @@ -367,7 +367,7 @@ upee_GetFile() { echo -e "\n${RED}Download failed, file is incomplete.${NC}" if ((j >= $MaxDownloadRetries)) ; then rm -f "$flockDownload"; - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" fi return 1 diff --git a/hosts/uploadev.sh b/hosts/uploadev.sh index d338362..b292bdd 100644 --- a/hosts/uploadev.sh +++ b/hosts/uploadev.sh @@ -43,24 +43,24 @@ upev_DownloadFile() { tor_identity="${RANDOM}" finalAttempt="false" for ((z=0; z<=$MaxUrlRetries; z++)); do - if [ $z -eq $MaxUrlRetries ] ; then + if [[ $z -eq $MaxUrlRetries ]] ; then finalAttempt="true" fi CLEANSTRING=${remote_url//[^a-zA-Z0-9]/} trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; echo ""; tput cnorm; exit" 0 1 2 3 6 15 if upev_FetchFileInfo $finalAttempt && upev_GetFile "${filecnt}" $((z+1)) $finalAttempt ; then return 0 - elif [ $z -lt $MaxUrlRetries ]; then - if [ "${fileAlreadyDone}" == "true" ] ; then + elif [[ $z -lt $MaxUrlRetries ]]; then + if [[ "${fileAlreadyDone}" == "true" ]] ; then break fi if [[ "${warnAndRetryUnknownError}" == "true" ]] ; then - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${remote_url##*/}" "error" "Retry due to an unknown issue: attempt #$((z+1)) of ${MaxUrlRetries}" fi fi if [[ "${exitDownloadError}" == "true" || "${exitDownloadNotAvailable}" == "true" ]] ; then - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${remote_url##*/}" "error" "Exit due to unrecoverable issue" fi rm -f "${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}" @@ -91,16 +91,16 @@ upev_FetchFileInfo() { response=$(tor_curl_request --insecure -L -s -b "${upev_cookie_jar}" -c "${upev_cookie_jar}" \ -w "\n\nHTTP_CODE: %{http_code}\nEFFECTIVE_URL: %{url_effective}\n" \ "$fixed_url") - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${remote_url##*/}" "upev_fetch$i" "${response}" fi if [[ -z $response ]] ; then rm -f "${upev_cookie_jar}"; - if [ $i == $maxfetchretries ] ; then + if [[ $i == $maxfetchretries ]] ; then printf "\\n" echo -e "${RED}| Failed to extract download link [1]${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "Failed to extract download link [1]" "" fi return 1 @@ -140,11 +140,11 @@ upev_FetchFileInfo() { post_referer=$(urlencode_literal_grouped_case_urlendingonly "$post_referer") else rm -f "${upev_cookie_jar}"; - if [ $i == $maxfetchretries ] ; then + if [[ $i == $maxfetchretries ]] ; then printf "\\n" echo -e "${RED}| Failed to extract download link [2]${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "Failed to extract download link [2]" "" fi return 1 @@ -154,11 +154,11 @@ upev_FetchFileInfo() { fi if [[ -z "$post_op" ]] || [[ -z "$post_id" ]] || [[ -z "$post_rand" ]] || [[ -z "$captcha_code" ]] ; then rm -f "${upev_cookie_jar}"; - if [ $i == $maxfetchretries ] ; then + if [[ $i == $maxfetchretries ]] ; then printf "\\n" echo -e "${RED}| Failed to extract download link [3]${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "Failed to extract download link [3]" "" fi return 1 @@ -181,16 +181,16 @@ upev_FetchFileInfo() { response=$(tor_curl_request --insecure -L -s -X POST \ -b "${upev_cookie_jar}" -c "${upev_cookie_jar}" \ --data "$form_data" "$fixed_url") - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${remote_url##*/}" "upev_post2_$i" "url: ${fixed_url}"$'\n'"form_data: ${form_data}"$'\n'"${response}" fi if [[ -z $response ]] ; then - if [ $i == $maxfetchretries ] ; then + if [[ $i == $maxfetchretries ]] ; then rm -f "${upev_cookie_jar}"; printf "\\n" echo -e "${RED}| Failed to extract download link [4]${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "Failed to extract download link [4]" "" fi return 1 @@ -208,12 +208,12 @@ upev_FetchFileInfo() { return 1 fi if grep -Eqi 'you have to wait|seconds till next download' <<< "$response"; then - if [ $i == $maxfetchretries ] ; then + if [[ $i == $maxfetchretries ]] ; then rm -f "${upev_cookie_jar}"; printf "\\n" echo -e "${RED}| Rate limited. [2].${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "" "" fi return 1 @@ -223,12 +223,12 @@ upev_FetchFileInfo() { fi fi if grep -Eqi 'Just a moment...' <<< "$response"; then - if [ $i == $maxfetchretries ] ; then + if [[ $i == $maxfetchretries ]] ; then rm -f "${upev_cookie_jar}"; printf "\\n" echo -e "${RED}| Failed to extract download link [3].${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "" "" fi return 1 @@ -238,18 +238,18 @@ upev_FetchFileInfo() { fi fi download_url=$(grep -oPi '(?<=.*$)' <<< "$response") - if [ ! -z "$download_url" ]; then + if [[ ! -z "$download_url" ]]; then printf "\\n" echo -e "${GREEN}| Download url found.${NC}" download_url=$(urlencode_literal_grouped_case_urlendingonly "$download_url") break; else - if [ $i == $maxfetchretries ] ; then + if [[ $i == $maxfetchretries ]] ; then rm -f "${upev_cookie_jar}"; printf "\\n" echo -e "${RED}| Failed to extract download link [5]${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "Failed to extract download link [5]" "" fi return 1 @@ -266,16 +266,16 @@ upev_FetchFileInfo() { trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; rm -f ${upev_cookie_jar}; echo ""; tput cnorm; exit" 0 1 2 3 6 15 GetRandomUA file_header=$(tor_curl_request --insecure -L --head -s "$download_url") - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${remote_url##*/}" "upev_head$j" "download_url: ${download_url}"$'\n'"${file_header}" fi if [[ -z $file_header ]] ; then - if [ $j == $maxfetchretries ] ; then + if [[ $j == $maxfetchretries ]] ; then rm -f "${upev_cookie_jar}"; printf "\\n" echo -e "${RED}| Failed to extract file info [1]${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "Failed to extract file info [1]" "" fi return 1 @@ -285,12 +285,12 @@ upev_FetchFileInfo() { fi fi if ! grep -Eqi 'HTTP/.* 200' <<< $file_header ; then - if [ $j == $maxfetchretries ] ; then + if [[ $j == $maxfetchretries ]] ; then rm -f "${upev_cookie_jar}"; printf "\\n" echo -e "${RED}| Failed to extract file info [2]${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "Failed to extract file info [2]" "" fi return 1 @@ -303,12 +303,12 @@ upev_FetchFileInfo() { file_size_bytes=$(grep -oPi '(?<=content-length: ).*' <<< "$file_header") file_size_bytes=${file_size_bytes//[$'\t\r\n']} if [[ -z "$file_size_bytes" ]]; then - if [ $j == $maxfetchretries ] ; then + if [[ $j == $maxfetchretries ]] ; then rm -f "${upev_cookie_jar}"; printf "\\n" echo -e "${RED}| Failed to extract file size.${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "" "" fi return 1 @@ -319,15 +319,15 @@ upev_FetchFileInfo() { fi break #Good to go here done - touch ${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/} - if [ ! "$filename_override" == "" ] ; then + touch "${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}" + if [[ ! "$filename_override" == "" ]] ; then filename="$filename_override" fi filename=$(sanitize_file_or_folder_name "${filename}") printf "\\n" echo -e "${YELLOW}| File name:${NC}\t\"${filename}\"" - if [ -z $file_size_bytes ] ; then - if [ "${finalAttempt}" == "true" ] ; then + if [[ -z $file_size_bytes ]] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "Filesize not found!" "" fi echo -e "${YELLOW}| Filesize not found… retry${NC}" @@ -358,14 +358,14 @@ upev_GetFile() { flockDownload="${WorkDir}/.flocks/${filename//[^a-zA-Z0-9\.\_\-]/}.flock" for ((j=1; j<=$MaxDownloadRetries; j++)); do pd_presize=0 - if [ -f "$file_path" ] ; then + if [[ -f "$file_path" ]] ; then pd_presize=$(stat --format="%s" "$file_path" | tr -d '[:space:]') fi GetRandomUA CLEANSTRING=${remote_url//[^a-zA-Z0-9]/} trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; rm -f ${upev_cookie_jar}; rm -f $flockDownload; echo ""; tput cnorm; exit" 0 1 2 3 6 15 - if [ "${UseTorCurlImpersonate}" == "true" ]; then - if [ "${RateMonitorEnabled}" == "true" ]; then + if [[ "${UseTorCurlImpersonate}" == "true" ]]; then + if [[ "${RateMonitorEnabled}" == "true" ]]; then tor_curl_request --insecure -L \ --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval \ -b "${upev_cookie_jar}" -c "${upev_cookie_jar}" \ @@ -376,7 +376,7 @@ upev_GetFile() { "$download_url" --continue-at - --output "$file_path" fi else - if [ "${RateMonitorEnabled}" == "true" ]; then + if [[ "${RateMonitorEnabled}" == "true" ]]; then tor_curl_request --insecure -L \ --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval \ -b "${upev_cookie_jar}" -c "${upev_cookie_jar}" \ @@ -410,7 +410,7 @@ upev_GetFile() { fi fi received_file_size=0 - if [ -f "$file_path" ] ; then + if [[ -f "$file_path" ]] ; then received_file_size=$(stat --format="%s" "$file_path" | tr -d '[:space:]') fi if CheckNoHtml "$remote_url" "$filename" "$file_path" ; then @@ -419,9 +419,9 @@ upev_GetFile() { containsHtml=true fi downDelta=$(( received_file_size - pd_presize )) - if [[ "${received_file_size}" -ne "${file_size_bytes}" ]] || [ "$containsHtml" == "true" ]; then - if [ "${AutoRepairBadPartials}" == "true" ] && (( downDelta > 0 && downDelta < 1024 )) ; then - if [ -f "${file_path}" ] ; then + if [[ "${received_file_size}" -ne "${file_size_bytes}" ]] || [[ "$containsHtml" == "true" ]]; then + if [[ "${AutoRepairBadPartials}" == "true" ]] && (( downDelta > 0 && downDelta < 1024 )) ; then + if [[ -f "${file_path}" ]] ; then if ((pd_presize > 0)); then echo -e "${YELLOW}Bad node / HTML found:${NC} reverting to previous file..." truncateDownload "$remote_url" "$filename" "$pd_presize" "$received_file_size" @@ -433,15 +433,15 @@ upev_GetFile() { fi if ((j >= $MaxDownloadRetries)) ; then rm -f "$flockDownload"; - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" fi return 1 else continue fi - elif [ "${AutoRepairBadPartials}" == "true" ] && [ "$containsHtml" == "true" ] ; then - if [ -f "${file_path}" ] ; then + elif [[ "${AutoRepairBadPartials}" == "true" ]] && [[ "$containsHtml" == "true" ]] ; then + if [[ -f "${file_path}" ]] ; then if ((pd_presize > 0)); then echo -e "${YELLOW}Bad node / HTML found:${NC} reverting to previous file..." truncateDownload "$remote_url" "$filename" "$pd_presize" "$received_file_size" @@ -453,21 +453,21 @@ upev_GetFile() { fi if ((j >= $MaxDownloadRetries)) ; then rm -f "$flockDownload"; - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" fi return 1 else continue fi - elif (( downDelta > 0 && downDelta < 1024 )) || [ "$containsHtml" == "true" ] ; then - if [ -f "$file_path" ] ; then + elif (( downDelta > 0 && downDelta < 1024 )) || [[ "$containsHtml" == "true" ]] ; then + if [[ -f "$file_path" ]] ; then rm -rf "$file_path" fi echo -e "\n${YELLOW}Bad node / HTML found:${NC} tainted partial removed..." if ((j >= $MaxDownloadRetries)) ; then rm -f "$flockDownload"; - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" fi return 1 @@ -479,7 +479,7 @@ upev_GetFile() { echo -e "\n${RED}Download failed, file is incomplete.${NC}" if ((j >= $MaxDownloadRetries)) ; then rm -f "$flockDownload"; - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" fi return 1 diff --git a/hosts/uploadflix.sh b/hosts/uploadflix.sh index 2bef6eb..33a798a 100644 --- a/hosts/uploadflix.sh +++ b/hosts/uploadflix.sh @@ -17,7 +17,7 @@ HostCode='uflix' HostNick='uploadflix' HostFuncPrefix='uflix' HostUrls='uploadflix.cc, uploadflix.org' -HostDomainRegex='^(http|https)://(.*\.)?uploadflix\.(com|cc|org)' +HostDomainRegex='^(http|https)://(.*\.)?uploadflix\.(com|cc|org)/' #! #! !! DO NOT UPDATE OR REMOVE !! #! This merges the Required HostAndDomainRegexes into mad.sh @@ -43,24 +43,24 @@ uflix_DownloadFile() { tor_identity="${RANDOM}" finalAttempt="false" for ((z=0; z<=$MaxUrlRetries; z++)); do - if [ $z -eq $MaxUrlRetries ] ; then + if [[ $z -eq $MaxUrlRetries ]] ; then finalAttempt="true" fi CLEANSTRING=${remote_url//[^a-zA-Z0-9]/} trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; echo ""; tput cnorm; exit" 0 1 2 3 6 15 if uflix_FetchFileInfo $finalAttempt && uflix_GetFile "${filecnt}" $((z+1)) $finalAttempt ; then return 0 - elif [ $z -lt $MaxUrlRetries ]; then - if [ "${fileAlreadyDone}" == "true" ] ; then + elif [[ $z -lt $MaxUrlRetries ]]; then + if [[ "${fileAlreadyDone}" == "true" ]] ; then break fi if [[ "${warnAndRetryUnknownError}" == "true" ]] ; then - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${remote_url##*/}" "error" "Retry due to an unknown issue: attempt #$((z+1)) of ${MaxUrlRetries}" fi fi if [[ "${exitDownloadError}" == "true" || "${exitDownloadNotAvailable}" == "true" ]] ; then - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${remote_url##*/}" "error" "Exit due to unrecoverable issue" fi rm -f "${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}" @@ -95,15 +95,15 @@ uflix_FetchFileInfo() { CLEANSTRING=${remote_url//[^a-zA-Z0-9]/} trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; echo ""; tput cnorm; exit" 0 1 2 3 6 15 response=$(tor_curl_request --insecure -L -s "${fixed_url}") - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${remote_url##*/}" "uflix_dwnpage$j" "${response}" fi if [[ -z $response ]] ; then - if [ $j == $maxfetchretries ] ; then + if [[ $j == $maxfetchretries ]] ; then printf "\\n" echo -e "${RED}| Failed to extract post link.${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "" "" fi return 1 @@ -134,11 +134,11 @@ uflix_FetchFileInfo() { post_referer=$(grep -oP '(?<=input type="hidden" name="referer" value=").*(?=">)' <<< "$response") break fi - if [ $j == $maxfetchretries ] ; then + if [[ $j == $maxfetchretries ]] ; then printf "\\n" echo -e "${RED}| Failed to extract post link (unknown).${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "" "" fi return 1 @@ -148,13 +148,13 @@ uflix_FetchFileInfo() { trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; echo ""; tput cnorm; exit" 0 1 2 3 6 15 form_data="op=${post_op}&id=${post_id}&rand=${post_rand}&referer=${post_referer}&method_free=&method_premium=&adblock_detected=" response=$(tor_curl_request --insecure -L -s -X POST --data "$form_data" "${fixed_url}") - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${remote_url##*/}" "uflix_post" "form_data: ${form_data}"$'\n'"${response}" fi if [[ -z $response ]] ; then echo -e "${RED}| Failed to extract download link.${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "" "" fi return 1 @@ -178,7 +178,7 @@ uflix_FetchFileInfo() { if [[ -z $download_url ]] ; then echo -e "${RED}| Failed to extract download link.${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "" "" fi return 1 @@ -190,15 +190,15 @@ uflix_FetchFileInfo() { CLEANSTRING=${remote_url//[^a-zA-Z0-9]/} trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; echo ""; tput cnorm; exit" 0 1 2 3 6 15 file_header=$(tor_curl_request --insecure -L --head -s --referer "${fixed_url}" "${download_url}") - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${remote_url##*/}" "uflix_head$j" "download_url: ${download_url}"$'\n'"${file_header}" fi if [[ -z $file_header ]] ; then - if [ $j == $maxfetchretries ] ; then + if [[ $j == $maxfetchretries ]] ; then printf "\\n" echo -e "${RED}| Failed to extract file info.${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "" "" fi return 1 @@ -207,11 +207,11 @@ uflix_FetchFileInfo() { fi fi if ! grep -Eqi 'HTTP/2 200|HTTP/1.1 200|200 OK' <<< $file_header ; then - if [ $j == $maxfetchretries ] ; then + if [[ $j == $maxfetchretries ]] ; then printf "\\n" echo -e "${RED}| Failed to extract file info (bad response).${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "" "" fi return 1 @@ -222,11 +222,11 @@ uflix_FetchFileInfo() { file_size_bytes=$(grep -oPi '(?<=content-length: ).*' <<< "$file_header") file_size_bytes=${file_size_bytes//[$'\t\r\n']} if [[ -z "$file_size_bytes" ]]; then - if [ $j == $maxfetchretries ] ; then + if [[ $j == $maxfetchretries ]] ; then printf "\\n" echo -e "${RED}| Failed to extract file info.${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "" "" fi return 1 @@ -236,15 +236,15 @@ uflix_FetchFileInfo() { fi break #Good to go here done - touch ${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/} - if [ ! "$filename_override" == "" ] ; then + touch "${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}" + if [[ ! "$filename_override" == "" ]] ; then filename="$filename_override" fi filename=$(sanitize_file_or_folder_name "${filename}") printf "\\n" echo -e "${YELLOW}| File name:${NC}\t\"${filename}\"" - if [ -z $file_size_bytes ] ; then - if [ "${finalAttempt}" == "true" ] ; then + if [[ -z $file_size_bytes ]] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "Filesize not found!" "" fi echo -e "${YELLOW}| Filesize not found… retry${NC}" @@ -275,20 +275,20 @@ uflix_GetFile() { flockDownload="${WorkDir}/.flocks/${filename//[^a-zA-Z0-9\.\_\-]/}.flock" for ((j=1; j<=$MaxDownloadRetries; j++)); do pd_presize=0 - if [ -f "$file_path" ] ; then + if [[ -f "$file_path" ]] ; then pd_presize=$(stat --format="%s" "$file_path" | tr -d '[:space:]') fi GetRandomUA tor_identity="${RANDOM}" CLEANSTRING=${remote_url//[^a-zA-Z0-9]/} trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; rm -f $flockDownload; echo ""; tput cnorm; exit" 0 1 2 3 6 15 - if [ "${RateMonitorEnabled}" == "true" ]; then + if [[ "${RateMonitorEnabled}" == "true" ]]; then tor_curl_request --insecure -L -G --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval "$download_url" --continue-at - --output "$file_path" else tor_curl_request --insecure -L -G "$download_url" --continue-at - --output "$file_path" fi received_file_size=0 - if [ -f "$file_path" ] ; then + if [[ -f "$file_path" ]] ; then received_file_size=$(stat --format="%s" "$file_path" | tr -d '[:space:]') fi if CheckNoHtml "$remote_url" "$filename" "$file_path" ; then @@ -297,9 +297,9 @@ uflix_GetFile() { containsHtml=true fi downDelta=$(( received_file_size - pd_presize )) - if [[ "${received_file_size}" -ne "${file_size_bytes}" ]] || [ "$containsHtml" == "true" ]; then - if [ "${AutoRepairBadPartials}" == "true" ] && (( downDelta > 0 && downDelta < 1024 )) ; then - if [ -f "${file_path}" ] ; then + if [[ "${received_file_size}" -ne "${file_size_bytes}" ]] || [[ "$containsHtml" == "true" ]]; then + if [[ "${AutoRepairBadPartials}" == "true" ]] && (( downDelta > 0 && downDelta < 1024 )) ; then + if [[ -f "${file_path}" ]] ; then if ((pd_presize > 0)); then echo -e "${YELLOW}Bad node / HTML found:${NC} reverting to previous file..." truncateDownload "$remote_url" "$filename" "$pd_presize" "$received_file_size" @@ -311,15 +311,15 @@ uflix_GetFile() { fi if ((j >= $MaxDownloadRetries)) ; then rm -f "$flockDownload"; - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" fi return 1 else continue fi - elif [ "${AutoRepairBadPartials}" == "true" ] && [ "$containsHtml" == "true" ] ; then - if [ -f "${file_path}" ] ; then + elif [[ "${AutoRepairBadPartials}" == "true" ]] && [[ "$containsHtml" == "true" ]] ; then + if [[ -f "${file_path}" ]] ; then if ((pd_presize > 0)); then echo -e "${YELLOW}Bad node / HTML found:${NC} reverting to previous file..." truncateDownload "$remote_url" "$filename" "$pd_presize" "$received_file_size" @@ -331,21 +331,21 @@ uflix_GetFile() { fi if ((j >= $MaxDownloadRetries)) ; then rm -f "$flockDownload"; - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" fi return 1 else continue fi - elif (( downDelta > 0 && downDelta < 1024 )) || [ "$containsHtml" == "true" ] ; then - if [ -f "$file_path" ] ; then + elif (( downDelta > 0 && downDelta < 1024 )) || [[ "$containsHtml" == "true" ]] ; then + if [[ -f "$file_path" ]] ; then rm -rf "$file_path" fi echo -e "\n${YELLOW}Bad node / HTML found:${NC} tainted partial removed..." if ((j >= $MaxDownloadRetries)) ; then rm -f "$flockDownload"; - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" fi return 1 @@ -357,7 +357,7 @@ uflix_GetFile() { echo -e "\n${RED}Download failed, file is incomplete.${NC}" if ((j >= $MaxDownloadRetries)) ; then rm -f "$flockDownload"; - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" fi return 1 diff --git a/hosts/uploadhive.sh b/hosts/uploadhive.sh index 20c14db..896065a 100644 --- a/hosts/uploadhive.sh +++ b/hosts/uploadhive.sh @@ -1,6 +1,6 @@ #! Name: uploadhive.sh #! Author: kittykat -#! Version: 2025.01.06 +#! Version: 2025.02.17 #! Desc: Add support for downloading and processing of urls for a new host #! Usage: Copy this file into the ./${ScriptDir}/hosts/ folder #! @@ -43,24 +43,24 @@ uhive_DownloadFile() { tor_identity="${RANDOM}" finalAttempt="false" for ((z=0; z<=$MaxUrlRetries; z++)); do - if [ $z -eq $MaxUrlRetries ] ; then + if [[ $z -eq $MaxUrlRetries ]] ; then finalAttempt="true" fi CLEANSTRING=${remote_url//[^a-zA-Z0-9]/} trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; echo ""; tput cnorm; exit" 0 1 2 3 6 15 if uhive_FetchFileInfo $finalAttempt && uhive_GetFile "${filecnt}" $((z+1)) $finalAttempt ; then return 0 - elif [ $z -lt $MaxUrlRetries ]; then - if [ "${fileAlreadyDone}" == "true" ] ; then + elif [[ $z -lt $MaxUrlRetries ]]; then + if [[ "${fileAlreadyDone}" == "true" ]] ; then break fi if [[ "${warnAndRetryUnknownError}" == "true" ]] ; then - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${remote_url##*/}" "error" "Retry due to an unknown issue: attempt #$((z+1)) of ${MaxUrlRetries}" fi fi if [[ "${exitDownloadError}" == "true" || "${exitDownloadNotAvailable}" == "true" ]] ; then - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${remote_url##*/}" "error" "Exit due to unrecoverable issue" fi rm -f "${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}" @@ -86,15 +86,15 @@ uhive_FetchFileInfo() { CLEANSTRING=${remote_url//[^a-zA-Z0-9]/} trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; echo ""; tput cnorm; exit" 0 1 2 3 6 15 response=$(tor_curl_request --insecure -L -s "$remote_url") - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${remote_url##*/}" "uhive_dwnpage$j" "${response}" fi if [[ -z $response ]] ; then - if [ $j == $maxfetchretries ] ; then + if [[ $j == $maxfetchretries ]] ; then printf "\\n" echo -e "${RED}| Failed to extract post link.${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "" "" fi return 1 @@ -118,27 +118,28 @@ uhive_FetchFileInfo() { post_referer=$(grep -oP '(?<=input type="hidden" name="referer" value=").*(?=">)' <<< "$response") break fi - if [ $j == $maxfetchretries ] ; then + if [[ $j == $maxfetchretries ]] ; then printf "\\n" echo -e "${RED}| Failed to extract post link (unknown).${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "" "" fi return 1 fi done + echo -e "${GREEN}# Fetching file info…${NC}" CLEANSTRING=${remote_url//[^a-zA-Z0-9]/} trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; echo ""; tput cnorm; exit" 0 1 2 3 6 15 form_data="op=${post_op}&id=${post_id}&rand=${post_rand}&referer=${post_referer}&method_free=&method_premium=" response=$(tor_curl_request --insecure -L -s -X POST --data "$form_data" "$remote_url") - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${remote_url##*/}" "uhive_post" "${response}" fi if [[ -z $response ]] ; then echo -e "${RED}| Failed to extract download link [1]${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "Failed to extract download link [1]" "" fi return 1 @@ -157,87 +158,55 @@ uhive_FetchFileInfo() { failedRetryDownload "${remote_url}" "Error happened when generating Download Link" $(sanitize_html_tags "${err}") return 1 fi - if grep -Eqi '' <<< "$response" ; then + filename=$(grep -oPi '(?<=Filename: ).*(?=.*$)' <<< "$response") + filename=${filename//[$'\t\r\n']} + fi + if grep -Eqi 'File Download Link Generated' <<< "$response" && grep -Eqi 'bytes\)
' <<< "$response" ; then + file_size_bytes=$(grep -oPi -m 1 '(?<= \().*?(?= bytes\)
.*$)' <<< "$response") + file_size_bytes=${file_size_bytes//[$'\t\r\n']} + fi + if [[ -z "$file_size_bytes" ]]; then + printf "\\n" + echo -e "${RED}| Failed to extract file info [3]${NC}" + warnAndRetryUnknownError=true + if [[ "${finalAttempt}" == "true" ]] ; then + failedRetryDownload "${remote_url}" "Failed to extract file info [3]" "" + fi + return 1 + fi + if grep -Eqi ')' <<< "$response") - filename=${download_url##*/} + elif grep -Eqi ')' <<< "$response") else echo -e "${RED}| The file was not found. It could be deleted or expired.${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "The file was not found. It could be deleted or expired" "" fi return 1 fi + echo -e "download_url: $download_url" if [[ -z $download_url ]] ; then echo -e "${RED}| Failed to extract download link [2]${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "Failed to extract download link [2]" "" fi return 1 fi - echo -e "${GREEN}# Fetching file info…${NC}" - for ((j=1; j<=$maxfetchretries; j++)); do - printf " ." - CLEANSTRING=${remote_url//[^a-zA-Z0-9]/} - trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; echo ""; tput cnorm; exit" 0 1 2 3 6 15 - file_header=$(tor_curl_request --insecure --head -s -L --referer "$remote_url" "$download_url") - if [ "${DebugAllEnabled}" == "true" ] ; then - debugHtml "${remote_url##*/}" "uhive_head$j" "download_url: ${download_url}"$'\n'"${file_header}" - fi - if [[ -z $file_header ]] ; then - if [ $j == $maxfetchretries ] ; then - printf "\\n" - echo -e "${RED}| Failed to extract file info.${NC}" - warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then - failedRetryDownload "${remote_url}" "" "" - fi - return 1 - else - continue - fi - fi - if ! grep -Eqi 'HTTP/2 200|HTTP/1.1 200|200 OK' <<< $file_header ; then - if [ $j == $maxfetchretries ] ; then - printf "\\n" - echo -e "${RED}| Failed to extract file info${NC}" - warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then - failedRetryDownload "${remote_url}" "" "" - fi - return 1 - else - continue - fi - fi - file_size_bytes=$(grep -oPi '(?<=content-length: ).*' <<< "$file_header") - file_size_bytes=${file_size_bytes//[$'\t\r\n']} - if [[ -z "$file_size_bytes" ]]; then - if [ $j == $maxfetchretries ] ; then - printf "\\n" - echo -e "${RED}| Failed to extract file info.${NC}" - warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then - failedRetryDownload "${remote_url}" "" "" - fi - return 1 - else - continue - fi - fi - break #Good to go here - done - touch ${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/} - if [ ! "$filename_override" == "" ] ; then + touch "${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}" + if [[ ! "$filename_override" == "" ]] ; then filename="$filename_override" fi filename=$(sanitize_file_or_folder_name "${filename}") printf "\\n" echo -e "${YELLOW}| File name:${NC}\t\"${filename}\"" - if [ -z $file_size_bytes ] ; then - if [ "${finalAttempt}" == "true" ] ; then + if [[ -z $file_size_bytes ]] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "Filesize not found!" "" fi echo -e "${YELLOW}| Filesize not found… retry${NC}" @@ -268,20 +237,19 @@ uhive_GetFile() { flockDownload="${WorkDir}/.flocks/${filename//[^a-zA-Z0-9\.\_\-]/}.flock" for ((j=1; j<=$MaxDownloadRetries; j++)); do pd_presize=0 - if [ -f "$file_path" ] ; then + if [[ -f "$file_path" ]] ; then pd_presize=$(stat --format="%s" "$file_path" | tr -d '[:space:]') fi GetRandomUA - tor_identity="${RANDOM}" CLEANSTRING=${remote_url//[^a-zA-Z0-9]/} trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; rm -f $flockDownload; echo ""; tput cnorm; exit" 0 1 2 3 6 15 - if [ "${RateMonitorEnabled}" == "true" ]; then + if [[ "${RateMonitorEnabled}" == "true" ]]; then tor_curl_request --insecure -L -G --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval "$download_url" --continue-at - --output "$file_path" else tor_curl_request --insecure -L -G "$download_url" --continue-at - --output "$file_path" fi received_file_size=0 - if [ -f "$file_path" ] ; then + if [[ -f "$file_path" ]] ; then received_file_size=$(stat --format="%s" "$file_path" | tr -d '[:space:]') fi if CheckNoHtml "$remote_url" "$filename" "$file_path" ; then @@ -290,9 +258,9 @@ uhive_GetFile() { containsHtml=true fi downDelta=$(( received_file_size - pd_presize )) - if [[ "${received_file_size}" -ne "${file_size_bytes}" ]] || [ "$containsHtml" == "true" ]; then - if [ "${AutoRepairBadPartials}" == "true" ] && (( downDelta > 0 && downDelta < 1024 )) ; then - if [ -f "${file_path}" ] ; then + if [[ "${received_file_size}" -ne "${file_size_bytes}" ]] || [[ "$containsHtml" == "true" ]]; then + if [[ "${AutoRepairBadPartials}" == "true" ]] && (( downDelta > 0 && downDelta < 1024 )) ; then + if [[ -f "${file_path}" ]] ; then if ((pd_presize > 0)); then echo -e "${YELLOW}Bad node / HTML found:${NC} reverting to previous file..." truncateDownload "$remote_url" "$filename" "$pd_presize" "$received_file_size" @@ -304,15 +272,15 @@ uhive_GetFile() { fi if ((j >= $MaxDownloadRetries)) ; then rm -f "$flockDownload"; - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" fi return 1 else continue fi - elif [ "${AutoRepairBadPartials}" == "true" ] && [ "$containsHtml" == "true" ] ; then - if [ -f "${file_path}" ] ; then + elif [[ "${AutoRepairBadPartials}" == "true" ]] && [[ "$containsHtml" == "true" ]] ; then + if [[ -f "${file_path}" ]] ; then if ((pd_presize > 0)); then echo -e "${YELLOW}Bad node / HTML found:${NC} reverting to previous file..." truncateDownload "$remote_url" "$filename" "$pd_presize" "$received_file_size" @@ -324,21 +292,21 @@ uhive_GetFile() { fi if ((j >= $MaxDownloadRetries)) ; then rm -f "$flockDownload"; - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" fi return 1 else continue fi - elif (( downDelta > 0 && downDelta < 1024 )) || [ "$containsHtml" == "true" ] ; then - if [ -f "$file_path" ] ; then + elif (( downDelta > 0 && downDelta < 1024 )) || [[ "$containsHtml" == "true" ]] ; then + if [[ -f "$file_path" ]] ; then rm -rf "$file_path" fi echo -e "\n${YELLOW}Bad node / HTML found:${NC} tainted partial removed..." if ((j >= $MaxDownloadRetries)) ; then rm -f "$flockDownload"; - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" fi return 1 @@ -350,7 +318,7 @@ uhive_GetFile() { echo -e "\n${RED}Download failed, file is incomplete.${NC}" if ((j >= $MaxDownloadRetries)) ; then rm -f "$flockDownload"; - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" fi return 1 diff --git a/hosts/youdbox.sh b/hosts/youdbox.sh index 5a2db17..b8e524f 100644 --- a/hosts/youdbox.sh +++ b/hosts/youdbox.sh @@ -43,24 +43,24 @@ youd_DownloadFile() { tor_identity="${RANDOM}" finalAttempt="false" for ((z=0; z<=$MaxUrlRetries; z++)); do - if [ $z -eq $MaxUrlRetries ] ; then + if [[ $z -eq $MaxUrlRetries ]] ; then finalAttempt="true" fi CLEANSTRING=${remote_url//[^a-zA-Z0-9]/} trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; echo ""; tput cnorm; exit" 0 1 2 3 6 15 if youd_FetchFileInfo $finalAttempt && youd_GetFile "${filecnt}" $((z+1)) $finalAttempt ; then return 0 - elif [ $z -lt $MaxUrlRetries ]; then - if [ "${fileAlreadyDone}" == "true" ] ; then + elif [[ $z -lt $MaxUrlRetries ]]; then + if [[ "${fileAlreadyDone}" == "true" ]] ; then break fi if [[ "${warnAndRetryUnknownError}" == "true" ]] ; then - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${remote_url##*/}" "error" "Retry due to an unknown issue: attempt #$((z+1)) of ${MaxUrlRetries}" fi fi if [[ "${exitDownloadError}" == "true" || "${exitDownloadNotAvailable}" == "true" ]] ; then - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${remote_url##*/}" "error" "Exit due to unrecoverable issue" fi rm -f "${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}" @@ -93,15 +93,15 @@ youd_FetchFileInfo() { CLEANSTRING=${remote_url//[^a-zA-Z0-9]/} trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; echo ""; tput cnorm; exit" 0 1 2 3 6 15 response=$(tor_curl_request --insecure -L -i -s "${fixed_url}") - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${remote_url##*/}" "youd_dwnpage$j" "${response}" fi if [[ -z $response ]] ; then - if [ $j == $maxfetchretries ] ; then + if [[ $j == $maxfetchretries ]] ; then printf "\\n" echo -e "${RED}| Failed to extract post link.${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "" "" fi return 1 @@ -125,11 +125,11 @@ youd_FetchFileInfo() { post_referer=$(grep -oP '(?<=input type="hidden" name="referer" value=").*(?=">)' <<< "$response") break fi - if [ $j == $maxfetchretries ] ; then + if [[ $j == $maxfetchretries ]] ; then printf "\\n" echo -e "${RED}| Failed to extract post link (unknown).${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "" "" fi return 1 @@ -139,13 +139,13 @@ youd_FetchFileInfo() { trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; echo ""; tput cnorm; exit" 0 1 2 3 6 15 form_data="op=${post_op}&id=${post_id}&rand=${post_rand}&referer=${post_referer}&method_free=&method_premium=&adblock_detected=" response=$(tor_curl_request --insecure -L -s -X POST --data "$form_data" "${fixed_url}") - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${remote_url##*/}" "youd_post" "${response}" fi if [[ -z $response ]] ; then echo -e "${RED}| Failed to extract download link.${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "" "" fi return 1 @@ -170,7 +170,7 @@ youd_FetchFileInfo() { if [[ -z $download_url ]] ; then echo -e "${RED}| Failed to extract download link.${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "" "" fi return 1 @@ -181,15 +181,15 @@ youd_FetchFileInfo() { CLEANSTRING=${remote_url//[^a-zA-Z0-9]/} trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; echo ""; tput cnorm; exit" 0 1 2 3 6 15 file_header=$(tor_curl_request --insecure -L --head -s --referer "${fixed_url}" "$download_url") - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${remote_url##*/}" "youd_head$j" "download_url: ${download_url}"$'\n'"${file_header}" fi if [[ -z $file_header ]] ; then - if [ $j == $maxfetchretries ] ; then + if [[ $j == $maxfetchretries ]] ; then printf "\\n" echo -e "${RED}| Failed to extract file info.${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "" "" fi return 1 @@ -198,11 +198,11 @@ youd_FetchFileInfo() { fi fi if ! grep -Eqi 'HTTP/2 200|HTTP/1.1 200|200 OK' <<< $file_header ; then - if [ $j == $maxfetchretries ] ; then + if [[ $j == $maxfetchretries ]] ; then printf "\\n" echo -e "${RED}| Failed to extract file info${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "" "" fi return 1 @@ -213,11 +213,11 @@ youd_FetchFileInfo() { file_size_bytes=$(grep -oPi '(?<=content-length: ).*' <<< "$file_header") file_size_bytes=${file_size_bytes//[$'\t\r\n']} if [[ -z "$file_size_bytes" ]]; then - if [ $j == $maxfetchretries ] ; then + if [[ $j == $maxfetchretries ]] ; then printf "\\n" echo -e "${RED}| Failed to extract file info.${NC}" warnAndRetryUnknownError=true - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "" "" fi return 1 @@ -227,15 +227,15 @@ youd_FetchFileInfo() { fi break #Good to go here done - touch ${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/} - if [ ! "$filename_override" == "" ] ; then + touch "${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}" + if [[ ! "$filename_override" == "" ]] ; then filename="$filename_override" fi filename=$(sanitize_file_or_folder_name "${filename}") printf "\\n" echo -e "${YELLOW}| File name:${NC}\t\"${filename}\"" - if [ -z $file_size_bytes ] ; then - if [ "${finalAttempt}" == "true" ] ; then + if [[ -z $file_size_bytes ]] ; then + if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "Filesize not found!" "" fi echo -e "${YELLOW}| Filesize not found… retry${NC}" @@ -266,19 +266,19 @@ youd_GetFile() { flockDownload="${WorkDir}/.flocks/${filename//[^a-zA-Z0-9\.\_\-]/}.flock" for ((j=1; j<=$MaxDownloadRetries; j++)); do pd_presize=0 - if [ -f "$file_path" ] ; then + if [[ -f "$file_path" ]] ; then pd_presize=$(stat --format="%s" "$file_path" | tr -d '[:space:]') fi tor_identity="${RANDOM}" CLEANSTRING=${remote_url//[^a-zA-Z0-9]/} trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; rm -f $flockDownload; echo ""; tput cnorm; exit" 0 1 2 3 6 15 - if [ "${RateMonitorEnabled}" == "true" ]; then + if [[ "${RateMonitorEnabled}" == "true" ]]; then tor_curl_request --insecure -L -G --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval "$download_url" --continue-at - --output "$file_path" else tor_curl_request --insecure -L -G "$download_url" --continue-at - --output "$file_path" fi received_file_size=0 - if [ -f "$file_path" ] ; then + if [[ -f "$file_path" ]] ; then received_file_size=$(stat --format="%s" "$file_path" | tr -d '[:space:]') fi if CheckNoHtml "$remote_url" "$filename" "$file_path" ; then @@ -287,9 +287,9 @@ youd_GetFile() { containsHtml=true fi downDelta=$(( received_file_size - pd_presize )) - if [[ "${received_file_size}" -ne "${file_size_bytes}" ]] || [ "$containsHtml" == "true" ]; then - if [ "${AutoRepairBadPartials}" == "true" ] && (( downDelta > 0 && downDelta < 1024 )) ; then - if [ -f "${file_path}" ] ; then + if [[ "${received_file_size}" -ne "${file_size_bytes}" ]] || [[ "$containsHtml" == "true" ]]; then + if [[ "${AutoRepairBadPartials}" == "true" ]] && (( downDelta > 0 && downDelta < 1024 )) ; then + if [[ -f "${file_path}" ]] ; then if ((pd_presize > 0)); then echo -e "${YELLOW}Bad node / HTML found:${NC} reverting to previous file..." truncateDownload "$remote_url" "$filename" "$pd_presize" "$received_file_size" @@ -301,15 +301,15 @@ youd_GetFile() { fi if ((j >= $MaxDownloadRetries)) ; then rm -f "$flockDownload"; - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" fi return 1 else continue fi - elif [ "${AutoRepairBadPartials}" == "true" ] && [ "$containsHtml" == "true" ] ; then - if [ -f "${file_path}" ] ; then + elif [[ "${AutoRepairBadPartials}" == "true" ]] && [[ "$containsHtml" == "true" ]] ; then + if [[ -f "${file_path}" ]] ; then if ((pd_presize > 0)); then echo -e "${YELLOW}Bad node / HTML found:${NC} reverting to previous file..." truncateDownload "$remote_url" "$filename" "$pd_presize" "$received_file_size" @@ -321,21 +321,21 @@ youd_GetFile() { fi if ((j >= $MaxDownloadRetries)) ; then rm -f "$flockDownload"; - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" fi return 1 else continue fi - elif (( downDelta > 0 && downDelta < 1024 )) || [ "$containsHtml" == "true" ] ; then - if [ -f "$file_path" ] ; then + elif (( downDelta > 0 && downDelta < 1024 )) || [[ "$containsHtml" == "true" ]] ; then + if [[ -f "$file_path" ]] ; then rm -rf "$file_path" fi echo -e "\n${YELLOW}Bad node / HTML found:${NC} tainted partial removed..." if ((j >= $MaxDownloadRetries)) ; then rm -f "$flockDownload"; - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" fi return 1 @@ -347,7 +347,7 @@ youd_GetFile() { echo -e "\n${RED}Download failed, file is incomplete.${NC}" if ((j >= $MaxDownloadRetries)) ; then rm -f "$flockDownload"; - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" fi return 1 diff --git a/mad.sh b/mad.sh index 46e09cb..12ba72e 100644 --- a/mad.sh +++ b/mad.sh @@ -30,9 +30,19 @@ # # * Everyone who provided feedback and helped test.. and those who wish to remain anonymous -ScriptVersion=2025.02.14 +ScriptVersion=2025.02.18 #================================================= # Recent Additions +# 2025.02.18 - [uploadhive] Add handling of the new /cgi-bin/dl.cgi/ url tickets (WIP) +# (unfortunately, this is tied to the requesting ip, so downloads get "Wrong IP") +# 2025.02.18 - [up_oshi] Add Manage url as comment on uploads +# 2025.02.18 - [up_oshi / oshi] use /nossl/ url and http +# 2025.02.17 - [gofile] Add a random sleep if 429 response detected (too many requests) +# 2025.02.17 - [*ALL] Audit and update all single bracket operations +# 2025.02.17 - [filehaus] Fix downloading from fh +# 2025.02.15 - [uploadbay] Update urls regex for acceptable alternate +# 2025.02.15 - [up_sendnow] Add send.now as upload host +# 2025.02.15 - [sendnow] Fix handling of filenames with special characters in url # 2025.02.14 - [mad] Add helpful verbiage for user on MAD Randomized Extension upload urls # 2025.02.14 - [up_ranoz] Add help "[rand ext, rename to or use MAD v2025.02.13+]" to url # 2025.02.14 - [up_uploadhive] Add help "[rand ext, rename to or use MAD v2025.02.13+]" to url @@ -374,14 +384,14 @@ checkTor() { local torPort= for port in 9050 9150 ; do echo "" 2>/dev/null > /dev/tcp/${TorIp}/$port - if [ "$?" == "0" ] ; then + if [[ "$?" == "0" ]] ; then torPort=$port fi done printf "%d" $torPort } tor_curl_request() { - if [ "${UseTorCurlImpersonate}" == "true" ]; then + if [[ "${UseTorCurlImpersonate}" == "true" ]]; then "${curl_impersonate[@]}" --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" -4 --connect-timeout ${ConnectTimeout} --compressed --globoff "$@" else curl --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" -4 --connect-timeout ${ConnectTimeout} --compressed --globoff "$@" @@ -389,21 +399,21 @@ tor_curl_request() { } tor_curl_request_extended() { randomtimeout=$((30 + RANDOM % (60 - 30))) - if [ "${UseTorCurlImpersonate}" == "true" ]; then + if [[ "${UseTorCurlImpersonate}" == "true" ]]; then "${curl_impersonate[@]}" --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" -4 --connect-timeout $randomtimeout --compressed --globoff "$@" else curl --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" -4 --connect-timeout $randomtimeout --compressed --globoff "$@" fi } tor_curl_upload() { - if [ "${UseTorCurlImpersonate}" == "true" ]; then - if [ "${RateMonitorEnabled}" == "true" ]; then + if [[ "${UseTorCurlImpersonate}" == "true" ]]; then + if [[ "${RateMonitorEnabled}" == "true" ]]; then "${curl_impersonate[@]}" --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" -4 --connect-timeout ${ConnectTimeoutUpload} --speed-limit $UploadSpeedMin --speed-time $UploadTimeoutInterval --compressed --globoff "$@" else "${curl_impersonate[@]}" --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" -4 --connect-timeout ${ConnectTimeoutUpload} --compressed --globoff "$@" fi else - if [ "${RateMonitorEnabled}" == "true" ]; then + if [[ "${RateMonitorEnabled}" == "true" ]]; then curl --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" -4 --connect-timeout ${ConnectTimeoutUpload} --speed-limit $UploadSpeedMin --speed-time $UploadTimeoutInterval -H 'User-Agent: Mozilla/5.0 (Windows NT 10.0; rv:109.0) Gecko/20100101 Firefox/115.0' -H 'Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8' -H 'Accept-Language: en-US,en;q=0.5' -H 'Accept-Encoding: gzip, deflate, br' -H 'Origin: null' -H 'Connection: keep-alive' -H 'Upgrade-Insecure-Requests: 1' -H 'Sec-Fetch-Dest: document' -H 'Sec-Fetch-Mode: navigate' -H 'Sec-Fetch-Site: cross-site' -H 'Sec-Fetch-User: ?1' -H 'TE: trailers' --compressed --globoff "$@" else curl --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" -4 --connect-timeout ${ConnectTimeoutUpload} -H 'User-Agent: Mozilla/5.0 (Windows NT 10.0; rv:109.0) Gecko/20100101 Firefox/115.0' -H 'Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8' -H 'Accept-Language: en-US,en;q=0.5' -H 'Accept-Encoding: gzip, deflate, br' -H 'Origin: null' -H 'Connection: keep-alive' -H 'Upgrade-Insecure-Requests: 1' -H 'Sec-Fetch-Dest: document' -H 'Sec-Fetch-Mode: navigate' -H 'Sec-Fetch-Site: cross-site' -H 'Sec-Fetch-User: ?1' -H 'TE: trailers' --compressed --globoff "$@" @@ -418,7 +428,7 @@ SetEnabledUploadHosts() { lstEnabledUploadHosts+="up_pixeldrain,up_quax,up_ranoz,up_skrepr,up_torup,up_turboonion,up_uploadee," lstEnabledUploadHosts+="up_uploadhive,up_uploadraja,up_herbolistique,up_uploadbay,up_ateasystems,up_syspro," lstEnabledUploadHosts+="up_dashfile,up_anonfile,up_fileland,up_fireget,up_euromussels,up_ramsgaard," - lstEnabledUploadHosts+="up_gagneux,up_uwabaki,up_lainsafe" + lstEnabledUploadHosts+="up_gagneux,up_uwabaki,up_lainsafe,up_sendnow" elif [[ "$EnabledUploadHosts" == "online" ]] ; then lstEnabledUploadHosts="up_1fichier,up_anonsharing,up_axfc,up_bedrive,up_bowfile,up_depotkaz," lstEnabledUploadHosts+="up_familleflender,up_fileblade,up_fileditch,up_firestorage,up_free4e,up_gofile," @@ -427,7 +437,7 @@ SetEnabledUploadHosts() { lstEnabledUploadHosts+="up_shareonline,up_skrepr,up_torup,up_turboonion,up_uploadee,up_uploadhive," lstEnabledUploadHosts+="up_uploadraja,up_yolobit,up_herbolistique,up_uploadbay,up_ateasystems,up_syspro," lstEnabledUploadHosts+="up_dashfile,up_anonfile,up_fileland,up_fireget,up_euromussels,up_ramsgaard," - lstEnabledUploadHosts+="up_gagneux,up_uwabaki,up_lainsafe" + lstEnabledUploadHosts+="up_gagneux,up_uwabaki,up_lainsafe,up_sendnow" fi } SetEnabledDownloadHosts() { @@ -457,7 +467,7 @@ GetRandomFiledotUser() { index=$(($RANDOM % $arrSize)) RandomFdotUser=${ar_fdUP[$index]} local tFdotUser=${RandomFdotUser%%\|*} - if [ -f "${WorkDir}/.temp/_fdot-limitreached-accounts.txt" ]; then + if [[ -f "${WorkDir}/.temp/_fdot-limitreached-accounts.txt" ]]; then lastModSeconds=$(date -r "${WorkDir}/.temp/_fdot-limitreached-accounts.txt" +%s) currSeconds=$(date +%s) elapsedSeconds=$((currSeconds - lastModSeconds)) @@ -466,7 +476,7 @@ GetRandomFiledotUser() { rm -f "${WorkDir}/.temp/_fdot-limitreached-accounts.txt" fi fi - if [ -f "${WorkDir}/.temp/_fdot-limitreached-accounts.txt" ]; then + if [[ -f "${WorkDir}/.temp/_fdot-limitreached-accounts.txt" ]]; then listFdotLimitReached=$(cat "${WorkDir}/.temp/_fdot-limitreached-accounts.txt") else listFdotLimitReached="" @@ -593,7 +603,7 @@ literalize_string() { GetFileSize() { local filepath=$1 local humanreadable=$2 - if [ -f "$filepath" ]; then + if [[ -f "$filepath" ]]; then if [[ "$humanreadable" == "true" ]]; then echo $(wc -c < "$filepath" | numfmt --to=iec) else @@ -616,28 +626,28 @@ LaunchTerminal() { if ! grep -Eqi "^(./|/)" <<< "$script_source" ; then script_source="${ScriptDir}/$script_source" fi - if [ -f $script_source ] ; then + if [[ -f $script_source ]] ; then printf "Spawing terminal for $script_source $selhost $selinfile\\n" - if [ "$OsType" == "Whonix-exo" ]; then - if [ "$selhost" == "allhosts" ]; then + if [[ "$OsType" == "Whonix-exo" ]]; then + if [[ "$selhost" == "allhosts" ]]; then exo-open --launch TerminalEmulator bash -c "$script_source $selinfile" >/dev/null 2>&1 else exo-open --launch TerminalEmulator bash -c "$script_source $selhost $selinfile" >/dev/null 2>&1 fi - elif [ "$OsType" == "Linux-xterm" ]; then - if [ "$selhost" == "allhosts" ]; then + elif [[ "$OsType" == "Linux-xterm" ]]; then + if [[ "$selhost" == "allhosts" ]]; then xterm -e /bin/bash -c "$script_source $selinfile" >/dev/null 2>&1 else xterm -e /bin/bash -c "$script_source $selhost $selinfile" >/dev/null 2>&1 fi - elif [ "$OsType" == "Linux-gnome" ]; then - if [ "$selhost" == "allhosts" ]; then + elif [[ "$OsType" == "Linux-gnome" ]]; then + if [[ "$selhost" == "allhosts" ]]; then gnome-terminal -- /bin/bash -c "$script_source $selinfile" >/dev/null 2>&1 else gnome-terminal -- /bin/bash -c "$script_source $selhost $selinfile" >/dev/null 2>&1 fi else - if [ "$selhost" == "allhosts" ]; then + if [[ "$selhost" == "allhosts" ]]; then exo-open --launch TerminalEmulator /bin/bash -c "$script_source $selinfile" >/dev/null 2>&1 else exo-open --launch TerminalEmulator /bin/bash -c "$script_source $selhost $selinfile" >/dev/null 2>&1 @@ -648,14 +658,14 @@ LaunchTerminal() { ReloadScript() { script_source="$0" passedArgs=$@ - if [ -z "$passedArgs" ] && [ ! -f ${InputFile} ] ; then + if [[ -z "$passedArgs" ]] && [[ ! -f "${InputFile}" ]] ; then InputFile="${WorkDir}/${InputFile}" fi if ! grep -Eqi "^(./|/)" <<< "$script_source"; then script_source="${ScriptDir}/$script_source" fi - if [ -f $script_source ]; then - if [ ! -z "$passedArgs" ] ; then + if [[ -f $script_source ]]; then + if [[ ! -z "$passedArgs" ]] ; then exec "$script_source" $@ else exec "$script_source" "${InputFile}" @@ -677,7 +687,7 @@ debugHtml() { local fileCntName="$1" local functionName="$2" local downloadHtml=$3 - if [ ! -d "${WorkDir}/.debug" ] ; then + if [[ ! -d "${WorkDir}/.debug" ]] ; then mkdir -p "${WorkDir}/.debug" fi echo -e "${downloadHtml}" >> "${WorkDir}/.debug/`date +%y%m%d-%H%M%S`_${fileCntName}_${functionName}.txt" @@ -700,8 +710,8 @@ successDownload() { mkdir -p "${WorkDir}/downloads" dateStamp=$(date '+%Y/%m/%d %H:%M:%S') echo -e "$dateStamp [OK] url: ${url}, name: ${filename}, size: ${filesize}, path: ${filepath}" >> "${WorkDir}/downloads/results.txt" - if [ ! "$UrlOnly" == "true" ]; then - if [ "${AutoCommentOnCompletion}" == "true" ] ; then + if [[ ! "$UrlOnly" == "true" ]]; then + if [[ "${AutoCommentOnCompletion}" == "true" ]] ; then sed -i -e "s>^$url.*>#& #OK# ${filename}>g" "${InputFile}" #processed url sed -i -e "s>^${url/https:/http:}.*>#& #OK# ${filename}>g" "${InputFile}" #http (if changed) sed -i -e "s>^direct=$url.*>#& #OK# ${filename}>g" "${InputFile}" #direct url http @@ -714,7 +724,7 @@ successDownload() { fi fi CLEANSTRING=${remote_url//[^a-zA-Z0-9]/} - if [ -f "${WorkDir}/.flocks/${CLEANSTRING}" ]; then + if [[ -f "${WorkDir}/.flocks/${CLEANSTRING}" ]]; then rm -f "${WorkDir}/.flocks/${CLEANSTRING}" fi PostSuccessfulDownload "${url}" "${filepath}" "${filename}" "${folder}" "${filesize}" @@ -728,8 +738,8 @@ successDownloadExists() { mkdir -p "${WorkDir}/downloads" dateStamp=$(date '+%Y/%m/%d %H:%M:%S') echo -e "$dateStamp [EXISTS] url: ${url}, name: ${filename}, size: ${filesize}, path: ${filepath}" >> "${WorkDir}/downloads/results.txt" - if [ ! "$UrlOnly" == "true" ]; then - if [ "${AutoCommentOnCompletion}" == "true" ] ; then + if [[ ! "$UrlOnly" == "true" ]]; then + if [[ "${AutoCommentOnCompletion}" == "true" ]] ; then sed -i -e "s>^$url.*>#& #OK# ${filename} (File exists)>g" "${InputFile}" #processed url sed -i -e "s>^${url/https:/http:}.*>#& #OK# ${filename} (File exists)>g" "${InputFile}" #http (if changed) sed -i -e "s>^direct=$url.*>#& #OK# ${filename} (File exists)>g" "${InputFile}" #direct url http @@ -742,7 +752,7 @@ successDownloadExists() { fi fi CLEANSTRING=${remote_url//[^a-zA-Z0-9]/} - if [ -f "${WorkDir}/.flocks/${CLEANSTRING}" ]; then + if [[ -f "${WorkDir}/.flocks/${CLEANSTRING}" ]]; then rm -f "${WorkDir}/.flocks/${CLEANSTRING}" fi } @@ -750,14 +760,14 @@ failedRetryDownload() { local url="${1//[$'\t\r\n']}" local message=$(literalize_string "$2") local message2=$(literalize_string "$3") - if [ ! -z "$message2" ]; then + if [[ ! -z "$message2" ]]; then message="$message, $message2" fi mkdir -p "${WorkDir}/downloads" dateStamp=$(date '+%Y/%m/%d %H:%M:%S') echo -e "$dateStamp [RETRY] ${url}, ${message}" >> "${WorkDir}/downloads/results.txt" - if [ ! "$UrlOnly" == "true" ]; then - if [ "${AutoCommentOnCompletion}" == "true" ] ; then + if [[ ! "$UrlOnly" == "true" ]]; then + if [[ "${AutoCommentOnCompletion}" == "true" ]] ; then sed -i -e "s>^$url.*>#& #RETRY# ${message}>g" "${InputFile}" #processed url sed -i -e "s>^${url/https:/http:}.*>#& #RETRY# ${message}>g" "${InputFile}" #http (if changed) sed -i -e "s>^direct=$url.*>#& #RETRY# ${message}>g" "${InputFile}" #direct url http @@ -770,7 +780,7 @@ failedRetryDownload() { fi fi CLEANSTRING=${remote_url//[^a-zA-Z0-9]/} - if [ -f "${WorkDir}/.flocks/${CLEANSTRING}" ]; then + if [[ -f "${WorkDir}/.flocks/${CLEANSTRING}" ]]; then rm -f "${WorkDir}/.flocks/${CLEANSTRING}" fi PostFailRetryDownload "${url}" "${message}" "${message2}" @@ -779,14 +789,14 @@ failedDownload() { local url="${1//[$'\t\r\n']}" local message=$(literalize_string "$2") local message2=$(literalize_string "$3") - if [ ! -z "$message2" ]; then + if [[ ! -z "$message2" ]]; then message="$message, $message2" fi mkdir -p "${WorkDir}/downloads" dateStamp=$(date '+%Y/%m/%d %H:%M:%S') echo -e "$dateStamp [FAIL] ${url}, ${message}" >> "${WorkDir}/downloads/results.txt" - if [ ! "$UrlOnly" == "true" ]; then - if [ "${AutoCommentOnCompletion}" == "true" ] ; then + if [[ ! "$UrlOnly" == "true" ]]; then + if [[ "${AutoCommentOnCompletion}" == "true" ]] ; then sed -i -e "s>^$url.*>#& #FAIL# $message>g" "${InputFile}" #processed url sed -i -e "s>^${url/https:/http:}.*>#& #FAIL# $message>g" "${InputFile}" #http (if changed) sed -i -e "s>^direct=$url.*>#& #RETRY# $message>g" "${InputFile}" #direct url http @@ -799,7 +809,7 @@ failedDownload() { fi fi CLEANSTRING=${remote_url//[^a-zA-Z0-9]/} - if [ -f "${WorkDir}/.flocks/${CLEANSTRING}" ]; then + if [[ -f "${WorkDir}/.flocks/${CLEANSTRING}" ]]; then rm -f "${WorkDir}/.flocks/${CLEANSTRING}" fi PostFailedDownload "${url}" "${message}" "${message2}" @@ -807,7 +817,7 @@ failedDownload() { removedDownload() { local url="${1//[$'\t\r\n']}" local message=$(literalize_string "$2") - if [ ! -z $message ]; then + if [[ ! -z $message ]]; then message=" $message" fi mkdir -p "${WorkDir}/downloads" @@ -816,8 +826,8 @@ removedDownload() { mkdir -p "${WorkDir}/data" dateStamp=$(date '+%Y/%m/%d %H:%M:%S') echo -e "$dateStamp [REMOVED] url: ${url}, message:$message" >> "${WorkDir}/data/downloads_completed.txt" - if [ ! "$UrlOnly" == "true" ]; then - if [ "${AutoCommentOnCompletion}" == "true" ] ; then + if [[ ! "$UrlOnly" == "true" ]]; then + if [[ "${AutoCommentOnCompletion}" == "true" ]] ; then sed -i -e "s>^$url.*>#& #REMOVED#${message}>g" "${InputFile}" #processed url sed -i -e "s>^${url/https:/http:}.*>#& #REMOVED#${message}>g" "${InputFile}" #http (if changed) sed -i -e "s>^direct=$url.*>#& #REMOVED#${message}>g" "${InputFile}" #direct url http @@ -830,7 +840,7 @@ removedDownload() { fi fi CLEANSTRING=${remote_url//[^a-zA-Z0-9]/} - if [ -f "${WorkDir}/.flocks/${CLEANSTRING}" ]; then + if [[ -f "${WorkDir}/.flocks/${CLEANSTRING}" ]]; then rm -f "${WorkDir}/.flocks/${CLEANSTRING}" fi } @@ -841,7 +851,7 @@ skipUrlDownload() { local flockpath=$4 flockpathcontents=$(cat $flockpath) CLEANSTRING=${remote_url//[^a-zA-Z0-9]/} - if [ -f "${WorkDir}/.flocks/${CLEANSTRING}" ]; then + if [[ -f "${WorkDir}/.flocks/${CLEANSTRING}" ]]; then rm -f "${WorkDir}/.flocks/${CLEANSTRING}" fi } @@ -850,7 +860,7 @@ renameDuplicateDownload() { local filename=$(literalize_string "$2") local cTimestamp=$(date +"%Y%m%d%H%M%S%3N") local newfilename="${cTimestamp}_renamed_${filename}" - if [ ! "$UrlOnly" == "true" ]; then + if [[ ! "$UrlOnly" == "true" ]]; then sed -i -e "s>^$url.*>${url}|${newfilename}>g" "${InputFile}" #processed url sed -i -e "s>^${url/https:/http:}.*>${url}|${newfilename}>g" "${InputFile}" #http (if changed) sed -i -e "s>^direct=$url.*>direct=${url}|${newfilename}>g" "${InputFile}" #direct url http @@ -860,10 +870,10 @@ renameDuplicateDownload() { dateStamp=$(date '+%Y/%m/%d %H:%M:%S') echo -e "$dateStamp [RENAME] ${url}, ${filename}, (new filename: ${newfilename})" >> "${WorkDir}/downloads/results.txt" CLEANSTRING=${remote_url//[^a-zA-Z0-9]/} - if [ -f "${WorkDir}/.flocks/${CLEANSTRING}" ]; then + if [[ -f "${WorkDir}/.flocks/${CLEANSTRING}" ]]; then rm -f "${WorkDir}/.flocks/${CLEANSTRING}" fi - if [ "$UrlOnly" == "true" ]; then + if [[ "$UrlOnly" == "true" ]]; then echo -e "${RED}| [FAILED]: A file already exists with the specified name. Rename it and try again.${NC}" echo -e "| Filename: \"./downloads/$filename\"" fi @@ -872,7 +882,7 @@ updateUrlDownload() { local url=$(literalize_string "${1//[$'\t\r\n']}") local newurl="$2" echo -e "1${PINK}$newurl${NC}" - if [ ! "$UrlOnly" == "true" ]; then + if [[ ! "$UrlOnly" == "true" ]]; then sed -i -e "s%^$url.*%${newurl//[[:space:]]/$'\\\n'}%g" "${InputFile}" #processed url sed -i -e "s%^${url/https:/http:}.*%${newurl//[[:space:]]/$'\\\n'}%g" "${InputFile}" #http (if changed) fi @@ -880,7 +890,7 @@ updateUrlDownload() { dateStamp=$(date '+%Y/%m/%d %H:%M:%S') echo -e "$dateStamp [UPDATE] ${url} (new url: ${newfilename})" >> "${WorkDir}/downloads/results.txt" CLEANSTRING=${remote_url//[^a-zA-Z0-9]/} - if [ -f "${WorkDir}/.flocks/${CLEANSTRING}" ]; then + if [[ -f "${WorkDir}/.flocks/${CLEANSTRING}" ]]; then rm -f "${WorkDir}/.flocks/${CLEANSTRING}" fi } @@ -892,8 +902,8 @@ droppedSizeBadDownload() { mkdir -p "${WorkDir}/downloads" dateStamp=$(date '+%Y/%m/%d %H:%M:%S') echo "$dateStamp [DROP/BADSIZE] ${url}, ${filename}, size: ${filesize}, advertisedsize: ${altsize}" >> "${WorkDir}/downloads/results.txt" - if [ ! "$UrlOnly" == "true" ]; then - if [ "${AutoCommentOnCompletion}" == "true" ] ; then + if [[ ! "$UrlOnly" == "true" ]]; then + if [[ "${AutoCommentOnCompletion}" == "true" ]] ; then sed -i -e "s>^$url.*>#& #RETRY# ${filename} (dropped or bad size)>g" "${InputFile}" #processed url sed -i -e "s>^${url/https:/http:}.*>#& #RETRY# ${filename} (dropped or bad size)>g" "${InputFile}" #http (if changed) sed -i -e "s>^direct=$url.*>#& #RETRY# ${filename} (dropped or bad size)>g" "${InputFile}" #direct url http @@ -906,7 +916,7 @@ droppedSizeBadDownload() { fi fi CLEANSTRING=${remote_url//[^a-zA-Z0-9]/} - if [ -f "${WorkDir}/.flocks/${CLEANSTRING}" ]; then + if [[ -f "${WorkDir}/.flocks/${CLEANSTRING}" ]]; then rm -f "${WorkDir}/.flocks/${CLEANSTRING}" fi } @@ -915,8 +925,8 @@ noCdnDownload() { mkdir -p "${WorkDir}/downloads" dateStamp=$(date '+%Y/%m/%d %H:%M:%S') echo -e "$dateStamp [NOCDN] ${url}" >> "${WorkDir}/downloads/results.txt" - if [ ! "$UrlOnly" == "true" ]; then - if [ "${AutoCommentOnCompletion}" == "true" ] ; then + if [[ ! "$UrlOnly" == "true" ]]; then + if [[ "${AutoCommentOnCompletion}" == "true" ]] ; then sed -i -e "s>^$url.*>#& #RETRY# (No CDN found)>g" "${InputFile}" #processed url sed -i -e "s>^${url/https:/http:}.*>#& #RETRY# (No CDN found)>g" "${InputFile}" #http (if changed) sed -i -e "s>^direct=$url.*>#& #RETRY# (No CDN found)>g" "${InputFile}" #direct url http @@ -929,7 +939,7 @@ noCdnDownload() { fi fi CLEANSTRING=${remote_url//[^a-zA-Z0-9]/} - if [ -f "${WorkDir}/.flocks/${CLEANSTRING}" ]; then + if [[ -f "${WorkDir}/.flocks/${CLEANSTRING}" ]]; then rm -f "${WorkDir}/.flocks/${CLEANSTRING}" fi } @@ -941,8 +951,8 @@ passwordProtectedDownload() { mkdir -p "${WorkDir}/data" dateStamp=$(date '+%Y/%m/%d %H:%M:%S') echo -e "$dateStamp [PASSWORD] url: ${url}, name: ${filename}, size: ${filesize}, path: ${filepath}" >> "${WorkDir}/data/downloads_completed.txt" - if [ ! "$UrlOnly" == "true" ]; then - if [ "${AutoCommentOnCompletion}" == "true" ] ; then + if [[ ! "$UrlOnly" == "true" ]]; then + if [[ "${AutoCommentOnCompletion}" == "true" ]] ; then sed -i -e "s>^${url}.*>#& #PASSWORD#>g" "${InputFile}" #processed url sed -i -e "s>^${url/https:/http:}.*>#& #PASSWORD#>g" "${InputFile}" #http (if changed) sed -i -e "s>^direct=${url}.*>#& #PASSWORD#>g" "${InputFile}" #direct url http @@ -955,21 +965,21 @@ passwordProtectedDownload() { fi fi CLEANSTRING=${remote_url//[^a-zA-Z0-9]/} - if [ -f "${WorkDir}/.flocks/${CLEANSTRING}" ]; then + if [[ -f "${WorkDir}/.flocks/${CLEANSTRING}" ]]; then rm -f "${WorkDir}/.flocks/${CLEANSTRING}" fi } badUrlDownload() { local url="${1//[$'\t\r\n']}" local message=$(literalize_string "$2") - if [ ! -z $message ]; then + if [[ ! -z "$message" ]]; then message=" $message" fi mkdir -p "${WorkDir}/downloads" dateStamp=$(date '+%Y/%m/%d %H:%M:%S') echo -e "$dateStamp [BADURL] ${url}${message}" >> "${WorkDir}/downloads/results.txt" - if [ ! "$UrlOnly" == "true" ]; then - if [ "${AutoCommentOnCompletion}" == "true" ] ; then + if [[ ! "$UrlOnly" == "true" ]]; then + if [[ "${AutoCommentOnCompletion}" == "true" ]] ; then sed -i -e "s>^${url}.*>#& #BAD-URL#${message}>g" "${InputFile}" #processed url sed -i -e "s>^${url/https:/http:}.*>#& #BAD-URL#${message}>g" "${InputFile}" #http (if changed) sed -i -e "s>^direct=${url}.*>#& #BAD-URL#${message}>g" "${InputFile}" #direct url http @@ -982,7 +992,7 @@ badUrlDownload() { fi fi CLEANSTRING=${remote_url//[^a-zA-Z0-9]/} - if [ -f "${WorkDir}/.flocks/${CLEANSTRING}" ]; then + if [[ -f "${WorkDir}/.flocks/${CLEANSTRING}" ]]; then rm -f "${WorkDir}/.flocks/${CLEANSTRING}" fi } @@ -993,8 +1003,8 @@ filenameOrSizeNotExistDownload() { mkdir -p "${WorkDir}/downloads" dateStamp=$(date '+%Y/%m/%d %H:%M:%S') echo -e "$dateStamp [FAIL] ${url}, No File / Bad size (filename: ${filename}, ${message})" >> "${WorkDir}/downloads/results.txt" - if [ ! "$UrlOnly" == "true" ]; then - if [ "${AutoCommentOnCompletion}" == "true" ] ; then + if [[ ! "$UrlOnly" == "true" ]]; then + if [[ "${AutoCommentOnCompletion}" == "true" ]] ; then sed -i -e "s>^${url}.*>#& #RETRY# ${filename} (No filename or size. Try again later)>g" "${InputFile}" #processed url sed -i -e "s>^${url/https:/http:}.*>#& #RETRY# ${filename} (No filename or size. Try again later)>g" "${InputFile}" #http (if changed) sed -i -e "s>^direct=$${url}.*>#& #RETRY# ${filename} (No filename or size. Try again later)>g" "${InputFile}" #direct url http @@ -1007,7 +1017,7 @@ filenameOrSizeNotExistDownload() { fi fi CLEANSTRING=${remote_url//[^a-zA-Z0-9]/} - if [ -f "${WorkDir}/.flocks/${CLEANSTRING}" ]; then + if [[ -f "${WorkDir}/.flocks/${CLEANSTRING}" ]]; then rm -f "${WorkDir}/.flocks/${CLEANSTRING}" fi } @@ -1019,8 +1029,8 @@ fileExistsButSizeTooLargeDownload() { mkdir -p "${WorkDir}/downloads" dateStamp=$(date '+%Y/%m/%d %H:%M:%S') echo -e "$dateStamp [EXISTS/TOOLARGE] ${url}, ${filename}, sizeOnDisk: ${filesize} downSize: ${downloadFilesize}" >> "${WorkDir}/downloads/results.txt" - if [ ! "$UrlOnly" == "true" ]; then - if [ "${AutoCommentOnCompletion}" == "true" ] ; then + if [[ ! "$UrlOnly" == "true" ]]; then + if [[ "${AutoCommentOnCompletion}" == "true" ]] ; then sed -i -e "s>^$url.*>#& #RETRY# ${filename} (File exists and is too large--retry or delete from downloads)>g" "${InputFile}" #processed url sed -i -e "s>^${url/https:/http:}.*>#& #RETRY# ${filename} (File exists and is too large--retry or delete from downloads)>g" "${InputFile}" #http (if changed) sed -i -e "s>^direct=$url.*>#& #RETRY# ${filename} (File exists and is too large--retry or delete from downloads)>g" "${InputFile}" #direct url http @@ -1033,7 +1043,7 @@ fileExistsButSizeTooLargeDownload() { fi fi CLEANSTRING=${remote_url//[^a-zA-Z0-9]/} - if [ -f "${WorkDir}/.flocks/${CLEANSTRING}" ]; then + if [[ -f "${WorkDir}/.flocks/${CLEANSTRING}" ]]; then rm -f "${WorkDir}/.flocks/${CLEANSTRING}" fi } @@ -1076,8 +1086,8 @@ successUpload() { dateStamp=$(date '+%Y/%m/%d %H:%M:%S') mkdir -p "${WorkDir}/data" echo -e "$dateStamp [OK] file: ${filename}, host: ${HostCode}, dl: ${downloadLink}, ticket: ${cTicket}, size: ${filesize}, path: ${filepath}" >> "${WorkDir}/data/uploads_completed.txt" - if [ ! -z "$InputFile" ] && [ ! -z "$pLine" ]; then - if [ ! -z "$message" ]; then + if [[ ! -z "$InputFile" ]] && [[ ! -z "$pLine" ]]; then + if [[ ! -z "$message" ]]; then sed -i -e "s>^${pLine}.*>#& #OK# ${downloadLink//&/\\&} ${message}>g" "${InputFile}" #processed line else sed -i -e "s>^${pLine}.*>#& #OK# ${downloadLink//&/\\&}>g" "${InputFile}" #processed line @@ -1087,7 +1097,7 @@ successUpload() { mkdir -p "${WorkDir}/uploads" echo -e "$dateStamp [OK] file: ${filename}, host: ${HostCode}, dl: ${downloadLink}, ticket: ${cTicket}, size: ${filesize}, path: ${filepath}" >> "${WorkDir}/uploads/temp_upload_handler.txt" UploadTicket="${WorkDir}/.flocks/upload_${HostCode}_${filepath//[^a-zA-Z0-9]/}" - if [ -f "${UploadTicket}" ]; then + if [[ -f "${UploadTicket}" ]]; then rm -f "${UploadTicket}" fi PostSuccessfulUpload "${filepath}" "${HostCode}" "${filename}" "${filesize}" "${downloadLink}" @@ -1099,17 +1109,17 @@ successUploadExists() { local message=$(literalize_string "$4") local message2=$(literalize_string "$5") local filename="${filepath##*/}" - if [ ! -z "$message2" ]; then + if [[ ! -z "$message2" ]]; then message="$message, $message2" fi mkdir -p "${WorkDir}/uploads" dateStamp=$(date '+%Y/%m/%d %H:%M:%S') echo -e "[EXISTS] ${filename}, ${HostCode}, ${downloadLink}" >> "${WorkDir}/uploads/results.txt" - if [ ! -z "$InputFile" ] && [ ! -z "$pLine" ]; then + if [[ ! -z "$InputFile" ]] && [[ ! -z "$pLine" ]]; then sed -i -e "s>^${pLine}.*>#& #OK# (Upload exists) ${message//&/\\&}>g" "${InputFile}" #processed line fi UploadTicket="${WorkDir}/.flocks/upload_${HostCode}_${filepath//[^a-zA-Z0-9]/}" - if [ -f "${UploadTicket}" ]; then + if [[ -f "${UploadTicket}" ]]; then rm -f "${UploadTicket}" fi } @@ -1120,20 +1130,20 @@ failedUpload() { local message=$(literalize_string "$4") local message2=$(literalize_string "$5") local filename="${filepath##*/}" - if [ ! -z "$message2" ]; then + if [[ ! -z "$message2" ]]; then message="$message, $message2" fi mkdir -p "${WorkDir}/uploads" dateStamp=$(date '+%Y/%m/%d %H:%M:%S') echo -e "[FAIL] ${HostCode}, ${filename}, ${message}" >> "${WorkDir}/uploads/results.txt" - if [ ! -z "$InputFile" ] && [ ! -z "$pLine" ]; then + if [[ ! -z "$InputFile" ]] && [[ ! -z "$pLine" ]]; then sed -i -e "s>^${pLine}.*>#& #FAIL# ${message//&/\\&}>g" "${InputFile}" #processed line fi dateStamp=$(date '+%Y/%m/%d %H:%M:%S') mkdir -p "${WorkDir}/uploads" echo -e "$dateStamp [FAIL] file: ${filename}, host: ${HostCode}, msg: ${message}, path: ${filepath}" >> "${WorkDir}/uploads/temp_upload_handler.txt" UploadTicket="${WorkDir}/.flocks/upload_${HostCode}_${filepath//[^a-zA-Z0-9]/}" - if [ -f "${UploadTicket}" ]; then + if [[ -f "${UploadTicket}" ]]; then rm -f "${UploadTicket}" fi PostFailedUpload "$pline" "${filepath}" "${HostCode}" "${message1}" "${message2}" @@ -1144,20 +1154,20 @@ failedRetryUpload() { local HostCode=$(literalize_string "$3") local message=$(literalize_string "$4") local message2=$(literalize_string "$5") - if [ ! -z "$message2" ]; then + if [[ ! -z "$message2" ]]; then message="$message, $message2" fi mkdir -p "${WorkDir}/uploads" dateStamp=$(date '+%Y/%m/%d %H:%M:%S') echo -e "[RETRY] ${HostCode}, ${filename}, ${message}" >> "${WorkDir}/uploads/results.txt" - if [ ! -z "$InputFile" ] && [ ! -z "$pLine" ]; then + if [[ ! -z "$InputFile" ]] && [[ ! -z "$pLine" ]]; then sed -i -e "s>^${pLine}.*>#& #RETRY# ${message//&/\\&}>g" "${InputFile}" #processed line fi dateStamp=$(date '+%Y/%m/%d %H:%M:%S') mkdir -p "${WorkDir}/uploads" echo -e "$dateStamp [RETRY] file: ${filename}, host: ${HostCode}, msg: ${message}, path: ${filepath}" >> "${WorkDir}/uploads/temp_upload_handler.txt" UploadTicket="${WorkDir}/.flocks/upload_${HostCode}_${filepath//[^a-zA-Z0-9]/}" - if [ -f "${UploadTicket}" ]; then + if [[ -f "${UploadTicket}" ]]; then rm -f "${UploadTicket}" fi PostFailRetryUpload "${url}" "${message}" "${message2}" @@ -1169,33 +1179,33 @@ skipFailedUpload() { local message=$(literalize_string "$4") local message2=$(literalize_string "$5") local filename="${filepath##*/}" - if [ ! -z "$message2" ]; then + if [[ ! -z "$message2" ]]; then message="$message, $message2" fi mkdir -p "${WorkDir}/uploads" dateStamp=$(date '+%Y/%m/%d %H:%M:%S') echo -e "[SKIP/FAIL] ${HostCode}, ${filename}, ${message}" >> "${WorkDir}/uploads/results.txt" - if [ ! -z "$InputFile" ] && [ ! -z "$pLine" ]; then + if [[ ! -z "$InputFile" ]] && [[ ! -z "$pLine" ]]; then sed -i -e "s>^${pLine}.*>#& #FAIL# (Skip) ${message//&/\\&}>g" "${InputFile}" #processed line fi dateStamp=$(date '+%Y/%m/%d %H:%M:%S') mkdir -p "${WorkDir}/uploads" echo -e "$dateStamp [FAIL] file: ${filename}, host: ${HostCode}, msg: ${message}, path: ${filepath}" >> "${WorkDir}/uploads/temp_upload_handler.txt" UploadTicket="${WorkDir}/.flocks/upload_${HostCode}_${filepath//[^a-zA-Z0-9]/}" - if [ -f "${UploadTicket}" ]; then + if [[ -f "${UploadTicket}" ]]; then rm -f "${UploadTicket}" fi } uploadBadInputLine() { local pLine="${1//[$'\t\r\n']}" local message=$(literalize_string "$2") - if [ ! -z $message ]; then + if [[ ! -z "$message" ]]; then message=" $message" fi mkdir -p "${WorkDir}/uploads" dateStamp=$(date '+%Y/%m/%d %H:%M:%S') echo -e "[BADLINE] ${pLine}${message}" >> "${WorkDir}/uploads/results.txt" - if [ ! -z "$InputFile" ] && [ ! -z "$pLine" ]; then + if [[ ! -z "$InputFile" ]] && [[ ! -z "$pLine" ]]; then sed -i -e "s>^${pLine}.*>#& #RETRY# (Bad Line)${message//&/\\&}>g" "${InputFile}" #processed line fi dateStamp=$(date '+%Y/%m/%d %H:%M:%S') @@ -1240,14 +1250,14 @@ GetRandomUA() { RandomUA=${ar_UA[$index]} } LoadMadDownloadHosts() { - if [ -d "${ScriptDir}/hosts/" ]; then + if [[ -d "${ScriptDir}/hosts/" ]]; then echo -e "${GREEN}Loading Download Hosts...${NC}" SetEnabledDownloadHosts tHostFuncPrefixes="" cnthostsloaded=0 for fil in "${ScriptDir}"/hosts/*.sh ; do - if [ -f "$fil" ]; then + if [[ -f "$fil" ]]; then if [[ "$EnabledDownloadHosts" == "recommended" ]] || [[ "$EnabledDownloadHosts" == "online" ]] ; then readarray -d "," -t arrEnabledHosts <<< "${lstEnabledDownloadHosts}" isfound=false @@ -1277,7 +1287,7 @@ LoadMadDownloadHosts() { source "$fil" cnthostsloaded=$((cnthostsloaded + 1)) tHostFuncPrefixes="${tHostFuncPrefixes}:${_hostfuncprefix}:" - if [ "$VerboseLoading" == "true" ]; then + if [[ "$VerboseLoading" == "true" ]]; then echo -e "[${GREEN}OK${NC}] ${BLUE}${tfilename}${NC}" fi fi @@ -1291,14 +1301,14 @@ LoadMadDownloadHosts() { fi } LoadMadUploadHosts() { - if [ -d "${ScriptDir}/hosts/" ]; then + if [[ -d "${ScriptDir}/hosts/" ]]; then echo -e "${GREEN}Loading Upload Hosts...${NC}" SetEnabledUploadHosts tHostFuncPrefixes="" cnthostsloaded=0 for fil in "${ScriptDir}"/hosts/up_*.sh ; do - if [ -f "$fil" ]; then + if [[ -f "$fil" ]]; then if [[ "$EnabledUploadHosts" == "recommended" ]] || [[ "$EnabledUploadHosts" == "online" ]] ; then readarray -d "," -t arrEnabledHosts <<< "${lstEnabledUploadHosts}" isfound=false @@ -1325,7 +1335,7 @@ LoadMadUploadHosts() { source "$fil" cnthostsloaded=$((cnthostsloaded + 1)) tHostFuncPrefixes="${tHostFuncPrefixes}:${_hostfuncprefix}:" - if [ "$VerboseLoading" == "true" ]; then + if [[ "$VerboseLoading" == "true" ]]; then echo -e "[${GREEN}OK${NC}] ${BLUE}${tfilename}${NC}" fi fi @@ -1339,7 +1349,7 @@ LoadMadUploadHosts() { fi } LoadMadPlugins() { - if [ ! -z "${LoadPlugins}" ]; then + if [[ ! -z "${LoadPlugins}" ]]; then echo -e "${GREEN}Loading Plugins...${NC}" loadedPluginFuncsUsed="" cntplugsloaded=0 @@ -1347,16 +1357,16 @@ LoadMadPlugins() { for plugin in "${arrPlugins[@]}"; do plg="${plugin//[$'\t\r\n']}" - if [ ! -z "${plg}" ] ; then + if [[ ! -z "${plg}" ]] ; then plg=$(literalize_string "$plugin") - if [ -f "${ScriptDir}/plugins/${plg}" ]; then + if [[ -f "${ScriptDir}/plugins/${plg}" ]]; then currPluginFuncsUsed=$(grep '()' "${ScriptDir}/plugins/${plg}" | awk '!/#/ {print $1}') currPluginFuncsUsed="${currPluginFuncsUsed//$'\n'/, }" readarray -d ", " -t arrCurrPluginFuncsUsed <<< "$currPluginFuncsUsed" local isDupeFunc=false for cplg in "${arrCurrPluginFuncsUsed[@]}"; do - if [ ! -z "${loadedPluginFuncsUsed}" ] && [[ *"${cplg}"* == "$loadedPluginFuncsUsed" ]] ; then + if [[ ! -z "${loadedPluginFuncsUsed}" ]] && [[ *"${cplg}"* == "$loadedPluginFuncsUsed" ]] ; then isDupeFunc=true echo -e "[${RED}FAIL${NC}] ${BLUE}${plg}${NC} (dupe hook detected)${NC}" echo -e " Function already overriden: ${YELLOW}$cplg${NC}" @@ -1387,10 +1397,10 @@ LoadMadPlugins() { fi fi done - if [ "$isDupeFunc" == "false" ]; then + if [[ "$isDupeFunc" == "false" ]]; then source "${ScriptDir}/plugins/${plg}" cntplugsloaded=$((cntplugsloaded + 1)) - if [ "$VerboseLoading" == "true" ]; then + if [[ "$VerboseLoading" == "true" ]]; then echo -e "[${GREEN}OK${NC}] ${BLUE}${plg}${NC}" echo -e " --> Hooks: ${PINK}$currPluginFuncsUsed${NC}" fi @@ -1453,16 +1463,16 @@ install_curl_impersonate_lwthiker_orig() { latestBinaryUrl="" for ((j=1; j<=4; j++)); do response=$(tor_curl_request --insecure -L -s https://github.com/lwthiker/curl-impersonate/releases/latest) - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "github" "lbf_inst_curlimp$j" "$response" fi - if [ ! -z "$response" ]; then + if [[ ! -z "$response" ]]; then latestTag=$(grep -oPi -m 1 '(?<=/curl-impersonate/releases/tag/).*?(?=")' <<< "$response") latestBinaryDate=$(grep -oPi -m 1 '(?<= /dev/null ; then @@ -2046,7 +2056,7 @@ madStatus() { fi lineCnt=$((lineCnt+1)) done < ${InputFile} - if [ -d "${WorkDir}/downloads" ]; then + if [[ -d "${WorkDir}/downloads" ]]; then size_complete=$(du -hs --apparent-size "${WorkDir}/downloads" | awk '{print $1}') else size_complete="" @@ -2055,7 +2065,7 @@ madStatus() { echo -e "------------------------------------------------" echo -e "[Todo] (${BLUE}$cntTodo${NC}) - URLs to process" echo -e "$listTodo" - if [ ! -z $size_complete ]; then + if [[ ! -z "$size_complete" ]]; then echo -e "[OK] (${GREEN}$cntOk${NC}) - Downloads completed [size: $size_complete]" else echo -e "[OK] (${GREEN}$cntOk${NC}) - Downloads completed" @@ -2099,7 +2109,7 @@ madStatus() { cntOk=$((cntOk+1)) fi done < ${InputFile} - if [ -d "${WorkDir}/downloads" ]; then + if [[ -d "${WorkDir}/downloads" ]]; then size_complete=$(du -hs --apparent-size "${WorkDir}/downloads" | awk '{print $1}') else size_complete="" @@ -2108,7 +2118,7 @@ madStatus() { echo -e "------------------------------------------------" echo -e "[Todo] ($cntTodo${NC}) - URLs to process" echo -e "$listTodo" - if [ ! -z $size_complete ]; then + if [[ ! -z "$size_complete" ]]; then echo -e "[OK] (${GREEN}$cntOk${NC}) - URLs completed (commented out) [size: $size_complete]" else echo -e "[OK] (${GREEN}$cntOk${NC}) - URLs completed (commented out)" @@ -2119,17 +2129,17 @@ madStatus() { } madStatusUploads() { local InputFile="$1" - if [ "$arg1" == "status" ] ; then + if [[ "$arg1" == "status" ]] ; then clear fi echo -e "${BLD}" echo -e "${PINK}:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:" echo -e ":${NC} ${GREEN}MAD${PINK} Status${NC} : Report status of uploads in file${PINK}${BLD} :" echo -e ":-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:${NC}\\n" - if [ ! -f "${InputFile}" ] && [ -f "${WorkDir}/${InputFile}" ]; then + if [[ ! -f "${InputFile}" ]] && [[ -f "${WorkDir}/${InputFile}" ]]; then InputFile="${WorkDir}/${InputFile}" fi - if [ ! -f "${InputFile}" ]; then + if [[ ! -f "${InputFile}" ]]; then printf "Unable to read file %s! [ms]\\n" "${InputFile}" exit 1 fi @@ -2142,7 +2152,7 @@ madStatusUploads() { cntRetry=0 listOther="" cntOther=0 - if [ "${AutoCommentOnCompletion}" == "true" ] ; then + if [[ "${AutoCommentOnCompletion}" == "true" ]] ; then while IFS= read -r line || [[ -n $line ]]; do if grep -Eqi '^$|^ ' <<< "${line}" > /dev/null || grep -Eqvi '\|' <<< "${line}" > /dev/null; then @@ -2228,7 +2238,7 @@ madHostDetails() { echo -e "" for fil in "${ScriptDir}"/hosts/*.sh ; do - if [ -f "$fil" ]; then + if [[ -f "$fil" ]]; then tfilename="${fil##*/}" if [[ "$tfilename" == "up_"* ]] ; then continue @@ -2256,7 +2266,7 @@ madHostDetails() { echo -e "" for fil in "${ScriptDir}"/hosts/up_*.sh ; do - if [ -f "$fil" ]; then + if [[ -f "$fil" ]]; then tfilename="${fil##*/}" local _hostcode=$(grep -oPi -m 1 '(?<=^HostCode='"'"').*?(?='"'"')' "$fil") local _hostnick=$(grep -oPi -m 1 '(?<=^HostNick='"'"').*?(?='"'"')' "$fil") @@ -2285,7 +2295,7 @@ madPluginDetails() { echo -e ":-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:${NC}\\n" for fil in "${ScriptDir}"/plugins/*.sh ; do - if [ -f "$fil" ]; then + if [[ -f "$fil" ]]; then tfilename="${fil##*/}" local _description=$(grep -oPzi '(?<=Desc: ).*\n.*(?=#.*Usage:)' "$fil" | tr '\0' ' ') local _currPluginFuncsUsed=$(grep '()' "${fil}" | awk '!/#/ {print $1}') @@ -2302,10 +2312,10 @@ madPluginDetails() { } clipboard_monitor() { InputFile="$1" - if [ ! -f "${InputFile}" ] && [ -f "${WorkDir}/${InputFile}" ]; then + if [[ ! -f "${InputFile}" ]] && [[ -f "${WorkDir}/${InputFile}" ]]; then InputFile="${WorkDir}/${InputFile}" fi - if [ ! -f "${InputFile}" ]; then + if [[ ! -f "${InputFile}" ]]; then touch "${InputFile}" fi if ! hash xclip 2>/dev/null; then @@ -2354,7 +2364,7 @@ clipboard_monitor() { break fi done - if [ "$isHostMatchFound" == "true" ]; then + if [[ "$isHostMatchFound" == "true" ]]; then download_url=$(urlencode_literal_grouped_case_urlendingonly "$line") echo -e "${GREEN}$_hostnick url found:${NC} $line${NC}" echo -e "$line" >> ${InputFile} @@ -2644,8 +2654,8 @@ CheckDownloadExists () { if [[ -e "$cde_flockDownload" ]]; then fContents=$(cat $cde_flockDownload) fContents=${fContents//[$'\t\r\n']} - if [ "$AutoRenameDuplicateFilenames" == "true" ]; then - if [ "$fContents" == "${cde_remote_url//[^a-zA-Z0-9]/}" ]; then + if [[ "$AutoRenameDuplicateFilenames" == "true" ]]; then + if [[ "$fContents" == "${cde_remote_url//[^a-zA-Z0-9]/}" ]]; then echo -e "${YELLOW}[SKIP]${NC} Lock exists for filename. Is it downloading in another terminal?${NC}" echo -e "${YELLOW}File: ${NC}${cde_filename}" echo -e "${YELLOW}Lock: ${NC}./.flocks/${cde_filename//[^a-zA-Z0-9\.\_\-]/}.flock" @@ -2669,7 +2679,7 @@ CheckDownloadExists () { echo -e "${YELLOW}Lock: ${NC}./.flocks/${cde_filename//[^a-zA-Z0-9\.\_\-]/}.flock" echo -e "URL: ${cde_remote_url}" printf "%s\\n" "================================================================================" - if [ ! "$fContents" == "${cde_remote_url//[^a-zA-Z0-9]/}" ]; then + if [[ ! "$fContents" == "${cde_remote_url//[^a-zA-Z0-9]/}" ]]; then skipUrlDownload "${cde_remote_url}" "${cde_filename}" "./.flocks/${cde_filename//[^a-zA-Z0-9\.\_\-]/}.flock" "$cde_flockDownload" fi fileAlreadyDone=true @@ -2681,8 +2691,8 @@ CheckDownloadExists () { if [[ "${existing_file_size}" -eq "${file_size_bytes}" ]]; then echo -e "${GREEN}File exists in downloads and the size matches the expected size.\nNo need to re-download.${NC}" mkdir -p "${WorkDir}/downloads" - if [ ! "${cde_MoveToFolder}" == "" ] ; then - if [ ! -d "${cde_completed_location}${cde_MoveToFolder}" ]; then + if [[ ! "${cde_MoveToFolder}" == "" ]] ; then + if [[ ! -d "${cde_completed_location}${cde_MoveToFolder}" ]]; then mkdir -p "${cde_completed_location}${cde_MoveToFolder}" fi mv "${cde_file_path}" "${cde_completed_location}${cde_MoveToFolder}/" @@ -2708,16 +2718,16 @@ CheckDownloadExists () { return 0 fi return 0 - elif [ ! -z "$file_size_bytes" ] && [[ "${existing_file_size}" -gt "${file_size_bytes}" ]]; then + elif [[ ! -z "$file_size_bytes" ]] && [[ "${existing_file_size}" -gt "${file_size_bytes}" ]]; then echo -e "${RED}ERROR: File exists in downloads folder but is larger than expected.${NC}\nThis could be due to several instances saving the same file, an old download using the same name, or host experiencing a temporary issue." fileExistsButSizeTooLargeDownload "${cde_remote_url}" "${cde_filename}" "${existing_file_size}" "${file_size_bytes}" exitDownloadError=true return 0 fi fi - if [ ! "${cde_MoveToFolder}" == "" ] ; then + if [[ ! "${cde_MoveToFolder}" == "" ]] ; then if [[ -e "${cde_completed_location}${cde_MoveToFolder}/${cde_filename}" ]] ; then - if [ "$AutoRenameDuplicateFilenames" == "true" ]; then + if [[ "$AutoRenameDuplicateFilenames" == "true" ]]; then printf "\\n%s\\n" "================================================================================" echo -e "${RED}❤${GREEN}[RENAME] Download exists and is complete. Renaming download filename.${NC}" echo -e "${GREEN}File: ${NC}#${cde_filecnt}, ${cde_filename}" @@ -2741,7 +2751,7 @@ CheckDownloadExists () { fi else if [[ -e "${cde_completed_location}${cde_filename}" ]] ; then - if [ "$AutoRenameDuplicateFilenames" == "true" ]; then + if [[ "$AutoRenameDuplicateFilenames" == "true" ]]; then printf "\\n%s\\n" "================================================================================" echo -e "${RED}❤${GREEN}[RENAME] Download exists and is complete. Renaming download filename.${NC}" echo -e "${GREEN}File: ${NC}#${cde_filecnt}, ${cde_filename}" @@ -2770,15 +2780,15 @@ CheckNoHtml() { local cde_remote_url=$1 local cde_filename=$2 local cde_file_path=$3 - if [ -f "${cde_file_path}" ] ; then + if [[ -f "${cde_file_path}" ]] ; then badHtml=$(grep -aoPi '^.*(||content-type|:error|not found|too many connections).*$' "${cde_file_path}") - if [ "$badHtml" == "" ]; then + if [[ "$badHtml" == "" ]]; then return 0 else - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then cp "$cde_file_path" "${WorkDir}/.debug/$cde_filename.htmldebug.txt" fi - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then echo -e "" printf "${YELLOW}HTML found:${NC} Details in ./.debug/_err.log" mkdir -p "${WorkDir}/.debug" @@ -2800,7 +2810,7 @@ CheckNoHtml() { CheckFileSize() { local cde_remote_url=$1 local cde_file_size_bytes=$2 - if [ -z $cde_file_size_bytes ]; then + if [[ -z $cde_file_size_bytes ]]; then return 1 fi if (( cde_file_size_bytes < MinimumAllowedFilesize )); then @@ -2820,11 +2830,11 @@ ProcessCompletedDownload() { local cde_completed_location=$6 local cde_inflight_path=$7 completed_location="${WorkDir}/downloads/" - if [ ! -d "${WorkDir}/downloads" ]; then + if [[ ! -d "${WorkDir}/downloads" ]]; then mkdir -p "${WorkDir}/downloads" fi - if [ ! "${cde_MoveToFolder}" == "" ] ; then - if [ ! -d "${cde_completed_location}${cde_MoveToFolder}" ]; then + if [[ ! "${cde_MoveToFolder}" == "" ]] ; then + if [[ ! -d "${cde_completed_location}${cde_MoveToFolder}" ]]; then mkdir -p "${cde_completed_location}${cde_MoveToFolder}" fi mv "${cde_inflight_path}" "${cde_completed_location}${cde_MoveToFolder}/" @@ -2862,26 +2872,26 @@ MadUploadFromFileTxt() { printf "Ensure Tor is setup and listening on a port between 9050 and 9150. Exiting...\\n" exit 1 fi - if [ "${UseTorCurlImpersonate}" == "true" ]; then + if [[ "${UseTorCurlImpersonate}" == "true" ]]; then printf "client: ${GREEN}Tor${NC} + ${BLUE}curl_impersonate${NC}\\n" else printf "client: ${GREEN}Tor${NC} + ${GREEN}curl${NC}\\n" fi printf "ConnectTimeout: ${GREEN}$ConnectTimeout${NC}, UploadRetries: ${GREEN}$MaxUploadRetries${NC}, " - if [ "${RateMonitorEnabled}" == "true" ]; then + if [[ "${RateMonitorEnabled}" == "true" ]]; then printf "RateMonitor: ${GREEN}${RateMonitorEnabled}${NC}" else printf "RateMonitor: ${GREY}${RateMonitorEnabled}${NC}" fi printf "\\n" echo -e "DefaultUploadHosts: ${BLUE}$DefaultUploadHosts${NC}" - if [ "${DebugAllEnabled}" == "true" ] || [ "${DebugPluginsEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] || [[ "${DebugPluginsEnabled}" == "true" ]] ; then bDebugMsgPrintCnt=0 - if [ "${DebugAllEnabled}" == "true" ]; then + if [[ "${DebugAllEnabled}" == "true" ]]; then printf "DebugHosts: ${BLUE}${DebugAllEnabled}${NC}" bDebugMsgPrintCnt=$((bDebugMsgPrintCnt + 1)) fi - if [ "${DebugPluginsEnabled}" == "true" ]; then + if [[ "${DebugPluginsEnabled}" == "true" ]]; then if ((bDebugMsgPrintCnt > 0)) ; then printf ", " fi @@ -2891,7 +2901,7 @@ MadUploadFromFileTxt() { fi echo -e "" HashFilelistTxt=$( sha1sum ${InputFile} | awk '{print $1}' ) - if [ "$VerboseLoading" == "true" ]; then + if [[ "$VerboseLoading" == "true" ]]; then printf "SHA1: %s\\n\\n" "${HashFilelistTxt}" fi mainLoopControl=true @@ -2902,7 +2912,7 @@ MadUploadFromFileTxt() { qChkLineCount=$(grep -Evi '^#|^$|#OK#|#FAIL#|#RETRY#)' "${InputFile}" | wc -l | awk '{ print $1 }') if ((qChkLineCount <= 0)) ; then if DoneProcessingAllUrls "${InputFile}" ; then - if [ "${AutoShowMadStatus}" == "true" ] ; then + if [[ "${AutoShowMadStatus}" == "true" ]] ; then echo -e "${RED}❤${GREEN}Done! ${YELLOW}Me0W!${NC} :D" madStatusUploads "${InputFile}" fi @@ -2916,12 +2926,12 @@ MadUploadFromFileTxt() { sed 's/^[[:space:]]*// ; s/[[:space:]]*$//' "${InputFile}" | while IFS= read -r line do - if [ -f "${InputFile}" ] ; then + if [[ -f "${InputFile}" ]] ; then currentHashFilelistTxt=$( sha1sum "${InputFile}" | awk '{print $1}' ) else currentHashFilelistTxt=$( sha1sum ${InputFile} | awk '{print $1}' ) fi - if ((qChkLineCount > 0)) && [ ! "${HashFilelistTxt}" == "${currentHashFilelistTxt}" ]; then + if ((qChkLineCount > 0)) && [[ ! "${HashFilelistTxt}" == "${currentHashFilelistTxt}" ]]; then printf "\\n%s\\n" "--------------------------------------------" printf "[${PINK}Reload${NC}] ${InputFile} was modified.\\n" printf "%s\\n" "--------------------------------------------" @@ -2946,21 +2956,21 @@ MadUploadFromFileTxt() { else continue fi - if [ ! -f "$upfile_filepath" ]; then + if [[ ! -f "$upfile_filepath" ]]; then upfile_filepath="${WorkDir}/uploads/$upfile_filepath" fi upfile_filename="${upfile_filepath##*\/}" upfile_fsize=$(GetFileSize "$upfile_filepath" "false") - if [ ! -f "$upfile_filepath" ] || ((upfile_fsize <= 0)) ; then + if [[ ! -f "$upfile_filepath" ]] || ((upfile_fsize <= 0)) ; then uploadBadInputLine "$line" "Bad input line (file: $upfile_filepath, size: $upfile_fsize)" continue fi - if [ -z "$upfile_hostcode" ] || [ -z "$upfile_filepath" ]; then + if [[ -z "$upfile_hostcode" ]] || [[ -z "$upfile_filepath" ]]; then uploadBadInputLine "$line" "Bad input line (check filename and hostcode are valid)" continue fi UploadTicket="${WorkDir}/.flocks/upload_${upfile_hostcode}_${upfile_filepath//[^a-zA-Z0-9]/}" - if [ -f "$UploadTicket" ]; then + if [[ -f "$UploadTicket" ]]; then echo -e "${YELLOW}Lock Exists (Skipping)${NC}: ./.flocks/upload_${upfile_hostcode}_${upfile_filepath//[^a-zA-Z0-9]/}" fileCount=$((fileCount + 1)) continue @@ -2993,7 +3003,7 @@ MadUploadFromFileTxt() { done #loop through the file line by line qChkLineCount=$(grep -Evi '^#|^$|#OK#|#FAIL#|#RETRY#)' "${InputFile}" | wc -l | awk '{ print $1 }') if ((qChkLineCount > 0)) ; then - if [ "$LoopThroughFileUntilComplete" == "false" ]; then + if [[ "$LoopThroughFileUntilComplete" == "false" ]]; then echo -e "${NC}" echo -e "${YELLOW}Unprocessed / Skipped File / HostCode(s) Found:${NC}" echo -e "Most likely another terminal is uploading it, or a flock exists...${NC}" @@ -3037,26 +3047,26 @@ MadUploadFilesInUploadsFolder() { printf "Ensure Tor is setup and listening on a port between 9050 and 9150. Exiting...\\n" exit 1 fi - if [ "${UseTorCurlImpersonate}" == "true" ]; then + if [[ "${UseTorCurlImpersonate}" == "true" ]]; then printf "client: ${GREEN}Tor${NC} + ${BLUE}curl_impersonate${NC}\\n" else printf "client: ${GREEN}Tor${NC} + ${GREEN}curl${NC}\\n" fi printf "ConnectTimeout: ${GREEN}$ConnectTimeout${NC}, UploadRetries: ${GREEN}$MaxUploadRetries${NC}, " - if [ "${RateMonitorEnabled}" == "true" ]; then + if [[ "${RateMonitorEnabled}" == "true" ]]; then printf "RateMonitor: ${GREEN}${RateMonitorEnabled}${NC}" else printf "RateMonitor: ${GREY}${RateMonitorEnabled}${NC}" fi printf "\\n" echo -e "DefaultUploadHosts: ${BLUE}$DefaultUploadHosts${NC}" - if [ "${DebugAllEnabled}" == "true" ] || [ "${DebugPluginsEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] || [[ "${DebugPluginsEnabled}" == "true" ]] ; then bDebugMsgPrintCnt=0 - if [ "${DebugAllEnabled}" == "true" ]; then + if [[ "${DebugAllEnabled}" == "true" ]]; then printf "DebugHosts: ${BLUE}${DebugAllEnabled}${NC}" bDebugMsgPrintCnt=$((bDebugMsgPrintCnt + 1)) fi - if [ "${DebugPluginsEnabled}" == "true" ]; then + if [[ "${DebugPluginsEnabled}" == "true" ]]; then if ((bDebugMsgPrintCnt > 0)) ; then printf ", " fi @@ -3071,11 +3081,11 @@ MadUploadFilesInUploadsFolder() { "${WorkDir}/uploads/"*.[7][zZ] \ "${WorkDir}/uploads/"*.[0-9][0-9][0-9] ; do - if [ -d "$fil" ] || [ ! -f "$fil" ]; then + if [[ -d "$fil" ]] || [[ ! -f "$fil" ]]; then continue fi fsize=$(GetFileSize "$fil" "false") - if [ -f "$fil" ] && ((fsize > 0)); then + if [[ -f "$fil" ]] && ((fsize > 0)); then tfilename="${fil##*/}" printf "${BLUE}%10s${NC} ${GREEN}%-56s${NC}\\n" $(GetFileSize "$fil" "true") "${tfilename}" fi @@ -3094,13 +3104,13 @@ MadUploadFilesInUploadsFolder() { local itemcount=1 for hline in "${arrListUploadHosts[@]}"; do - if [ -z "$hline" ] ; then + if [[ -z "$hline" ]] ; then continue fi _hostcode=$(echo $hline|cut -f2 -d '/') _hostnick=$(echo $hline|cut -f3 -d '/') _hostfuncprefix=$(echo $hline|cut -f4 -d '/') - if [ -z "$_hostcode" ] ; then + if [[ -z "$_hostcode" ]] ; then continue fi if [[ ! "$_helpprochostcodes" =~ *"$_hostcode"* ]]; then @@ -3135,7 +3145,7 @@ MadUploadFilesInUploadsFolder() { "${WorkDir}/uploads/"*.[7][zZ] \ "${WorkDir}/uploads/"*.[0-9][0-9][0-9] ; do - if [ -d "$fil" ] || [ ! -f "$fil" ]; then + if [[ -d "$fil" ]] || [[ ! -f "$fil" ]]; then continue fi fsize=$(GetFileSize "$fil" "false") @@ -3149,9 +3159,9 @@ MadUploadFilesInUploadsFolder() { fi userentry_hostcode=$(TrimWhitespace "$hline") linematch="" - if [ -f "${WorkDir}/uploads/temp_upload_handler.txt" ]; then + if [[ -f "${WorkDir}/uploads/temp_upload_handler.txt" ]]; then linematch=$(grep -Eni -m 1 "[OK] file: ${tfilename}, host: ${userentry_hostcode},.*\$" "${WorkDir}/uploads/temp_upload_handler.txt") - if [ ! -z "$linematch" ] ; then + if [[ ! -z "$linematch" ]] ; then echo -e "" echo -e "${GREEN}$tfilename${NC} already uploaded to ${userentry_hostcode} in ${GREEN}temp_upload_handler.txt${NC}" echo -e "${BLUE}line${NC}: ${linematch//, /\\n}" @@ -3160,7 +3170,7 @@ MadUploadFilesInUploadsFolder() { continue fi linematch=$(grep -Eni -m 1 "[FAIL] file: ${tfilename}, host: ${userentry_hostcode},.*\$" "${WorkDir}/uploads/temp_upload_handler.txt") - if [ ! -z "$linematch" ] ; then + if [[ ! -z "$linematch" ]] ; then echo -e "" echo -e "${GREEN}$tfilename${NC} already failed upload to ${userentry_hostcode} in ${GREEN}temp_upload_handler.txt${NC}" echo -e "${BLUE}line${NC}: ${linematch//, /\\n}" @@ -3169,7 +3179,7 @@ MadUploadFilesInUploadsFolder() { continue fi linematch=$(grep -Eni -m 1 "[RETRY] file: ${tfilename}, host: ${userentry_hostcode},.*\$" "${WorkDir}/uploads/temp_upload_handler.txt") - if [ ! -z "$linematch" ] ; then + if [[ ! -z "$linematch" ]] ; then echo -e "" echo -e "${GREEN}$tfilename${NC} already failed upload to ${userentry_hostcode} in ${GREEN}temp_upload_handler.txt${NC}" echo -e "${BLUE}line${NC}: ${linematch//, /\\n}" @@ -3196,7 +3206,7 @@ MadUploadFilesInUploadsFolder() { _hostfuncprefix=$(echo $hline|cut -f4 -d '/') if [[ "$_hostcode" == "$userentry_hostcode" ]]; then UploadTicket="${WorkDir}/.flocks/upload_${_hostcode}_${fil//[^a-zA-Z0-9]/}" - if [ -f "$UploadTicket" ]; then + if [[ -f "$UploadTicket" ]]; then echo -e "${YELLOW}Lock Exists (Skipping)${NC}: ./.flocks/upload_${upfile_hostcode}_${upfile_filepath//[^a-zA-Z0-9]/}" continue fi @@ -3251,12 +3261,12 @@ direct_FetchFileInfo() { [ -s "${WorkDir}/.temp/directhead" ] kill $! 2>/dev/null ) - if [ ! -f "${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}" ]; then + if [[ ! -f "${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}" ]]; then touch "${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}" fi rm -f "${WorkDir}/.temp/directhead" fi - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then urllen=${#remote_url} if ((urllen > 64)); then debugHtml "${remote_url##*/}" "direct_${RANDOM}_head" "download_url: ${download_url}"$'\n'"${file_header}" @@ -3264,7 +3274,7 @@ direct_FetchFileInfo() { debugHtml "${remote_url##*/}" "direct_${remote_url//[^a-zA-Z0-9]/}_head" "download_url: ${download_url}"$'\n'"${file_header}" fi fi - if [ ! -z "$file_header" ] ; then + if [[ ! -z "$file_header" ]] ; then if grep -Eqi 'HTTP/.*404' <<< "${file_header}" ; then printf "\\n" echo -e "${RED}| 404. The file was not found or has been removed.${NC}" @@ -3293,7 +3303,7 @@ direct_FetchFileInfo() { continue fi fi - if [ "$filename_override" == "" ] && [ "$filename" == "" ] ; then + if [[ "$filename_override" == "" ]] && [[ "$filename" == "" ]] ; then if grep -Eqi 'filename=' <<< "${file_header}" ; then filename=$(grep -oPi -m 1 'filename=\K.*?$' <<< "${file_header}") filename="${filename%%;*}" @@ -3301,7 +3311,7 @@ direct_FetchFileInfo() { filename=${filename##filename} filename=${filename//\"/} filename=${filename//[$'\t\r\n']} - elif [ ! -z $file_id ]; then + elif [[ ! -z "$file_id" ]]; then if grep -Eqi '(.rar|.7z|.zip|.[0-9][0-9][0-9])$' <<< ${file_id} ; then printf "\\n" echo -e "${YELLOW}| Failed to extract file name, using url name.${NC}" @@ -3342,17 +3352,17 @@ direct_FetchFileInfo() { fi fi done - touch ${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/} - if [ ! "$filename_override" == "" ] ; then + touch "${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}" + if [[ ! "$filename_override" == "" ]] ; then filename="$filename_override" fi filename=$(sanitize_file_or_folder_name "${filename}") - if [ -z "$filename" ]; then + if [[ -z "$filename" ]]; then printf "\\n" echo -e "${RED}| Unexpected or no header response [no filename]${NC}" return 1 fi - if [ -z $file_size_bytes ] ; then + if [[ -z $file_size_bytes ]] ; then file_size_readable="${RED}Unknown filesize…${NC}" else file_size_readable="$(numfmt --to=iec --from=auto --format "%.2f" <<< "$file_size_bytes")" @@ -3375,18 +3385,18 @@ direct_GetFile() { flockDownload="${WorkDir}/.flocks/${filename//[^a-zA-Z0-9\.\_\-]/}.flock" for ((j=1; j<=$MaxDownloadRetries; j++)); do pd_presize=0 - if [ -f "$file_path" ] ; then + if [[ -f "$file_path" ]] ; then pd_presize=$(stat --format="%s" "$file_path" | tr -d '[:space:]') fi - if [ -z $file_size_bytes ] ; then + if [[ -z $file_size_bytes ]] ; then echo -e "${BLUE}| No Resume Fetch${NC} (unknown filesize)" CLEANSTRING=${remote_url//[^a-zA-Z0-9]/} trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; rm -f $flockDownload; echo ""; tput cnorm; exit" 0 1 2 3 6 15 tor_curl_request --insecure -L --referer "$file_url" "$download_url" --output "$file_path" rc=$? - if [ $rc -ne 0 ] ; then + if ((rc != 0 )) ; then printf "${RED}Download Failed (bad exit status).${NC}" - if [ -f ${file_path} ]; then + if [[ -f ${file_path} ]]; then printf "${YELLOW} Partial removed...${NC}" printf "\n\n" rm -f "${file_path}" @@ -3395,7 +3405,7 @@ direct_GetFile() { fi if ((j >= $MaxDownloadRetries)) ; then rm -f "$flockDownload"; - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" fi return 1 @@ -3408,7 +3418,7 @@ direct_GetFile() { else containsHtml=true fi - if [ "$containsHtml" == "true" ]; then + if [[ "$containsHtml" == "true" ]]; then if grep -Eqi 'was removed|no such file|was deleted|not found|banned' < "$file_path" ; then printf "\\n" echo -e "${RED}| The file was not found or has been removed.${NC}" @@ -3422,7 +3432,7 @@ direct_GetFile() { rm -f "${file_path}" if ((j >= $MaxDownloadRetries)) ; then rm -f "$flockDownload"; - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" fi return 1 @@ -3434,13 +3444,13 @@ direct_GetFile() { else CLEANSTRING=${remote_url//[^a-zA-Z0-9]/} trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; rm -f $flockDownload; echo ""; tput cnorm; exit" 0 1 2 3 6 15 - if [ "${RateMonitorEnabled}" == "true" ]; then + if [[ "${RateMonitorEnabled}" == "true" ]]; then tor_curl_request --insecure -L --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval "$download_url" --continue-at - --output "$file_path" else tor_curl_request --insecure -L --referer "$file_url" "$download_url" --continue-at - --output "$file_path" fi received_file_size=0 - if [ -f "$file_path" ] ; then + if [[ -f "$file_path" ]] ; then received_file_size=$(stat --format="%s" "$file_path" | tr -d '[:space:]') fi if CheckNoHtml "$remote_url" "$filename" "$file_path" ; then @@ -3449,9 +3459,9 @@ direct_GetFile() { containsHtml=true fi downDelta=$(( received_file_size - pd_presize )) - if [[ "${received_file_size}" -ne "${file_size_bytes}" ]] || [ "$containsHtml" == "true" ]; then - if [ "${AutoRepairBadPartials}" == "true" ] && (( downDelta > 0 && downDelta < 1024 )) ; then - if [ -f "${file_path}" ] ; then + if [[ "${received_file_size}" -ne "${file_size_bytes}" ]] || [[ "$containsHtml" == "true" ]]; then + if [[ "${AutoRepairBadPartials}" == "true" ]] && (( downDelta > 0 && downDelta < 1024 )) ; then + if [[ -f "${file_path}" ]] ; then if ((pd_presize > 0)); then echo -e "${YELLOW}Bad node / HTML found:${NC} reverting to previous file..." truncateDownload "$remote_url" "$filename" "$pd_presize" "$received_file_size" @@ -3463,15 +3473,15 @@ direct_GetFile() { fi if ((j >= $MaxDownloadRetries)) ; then rm -f "$flockDownload"; - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" fi return 1 else continue fi - elif [ "${AutoRepairBadPartials}" == "true" ] && [ "$containsHtml" == "true" ] ; then - if [ -f "${file_path}" ] ; then + elif [[ "${AutoRepairBadPartials}" == "true" ]] && [[ "$containsHtml" == "true" ]] ; then + if [[ -f "${file_path}" ]] ; then if ((pd_presize > 0)); then echo -e "${YELLOW}Bad node / HTML found:${NC} reverting to previous file..." truncateDownload "$remote_url" "$filename" "$pd_presize" "$received_file_size" @@ -3483,21 +3493,21 @@ direct_GetFile() { fi if ((j >= $MaxDownloadRetries)) ; then rm -f "$flockDownload"; - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" fi return 1 else continue fi - elif (( downDelta > 0 && downDelta < 1024 )) || [ "$containsHtml" == "true" ] ; then - if [ -f "$file_path" ] ; then + elif (( downDelta > 0 && downDelta < 1024 )) || [[ "$containsHtml" == "true" ]] ; then + if [[ -f "$file_path" ]] ; then rm -rf "$file_path" fi echo -e "\n${YELLOW}Bad node / HTML found:${NC} tainted partial removed..." if ((j >= $MaxDownloadRetries)) ; then rm -f "$flockDownload"; - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" fi return 1 @@ -3509,7 +3519,7 @@ direct_GetFile() { echo -e "\n${RED}Download failed, file is incomplete.${NC}" if ((j >= $MaxDownloadRetries)) ; then rm -f "$flockDownload"; - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}" fi return 1 @@ -3530,7 +3540,7 @@ direct_DownloadFile() { local remote_url="${1}" # url from urls.txt local filecnt=${2} local file_url="${3}" # Override url ie. lainsafe.onion --> lainsafe - if [ -z "$file_url" ]; then + if [[ -z "$file_url" ]]; then file_url="$remote_url" fi warnAndRetryUnknownError=false @@ -3543,24 +3553,24 @@ direct_DownloadFile() { tor_identity="${RANDOM}" finalAttempt="false" for ((z=0; z<=$MaxUrlRetries; z++)); do - if [ $z -eq $MaxUrlRetries ] ; then + if [[ $z -eq $MaxUrlRetries ]] ; then finalAttempt="true" fi CLEANSTRING=${remote_url//[^a-zA-Z0-9]/} trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; echo ""; tput cnorm; exit" 0 1 2 3 6 15 if direct_FetchFileInfo $finalAttempt && direct_GetFile "${filecnt}" $((z+1)) $finalAttempt ; then return 0 - elif [ $z -lt $MaxUrlRetries ]; then - if [ "${fileAlreadyDone}" == "true" ] ; then + elif [[ $z -lt $MaxUrlRetries ]]; then + if [[ "${fileAlreadyDone}" == "true" ]] ; then break fi if [[ "${warnAndRetryUnknownError}" == "true" ]] ; then - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${remote_url##*/}" "error" "Retry due to an unknown issue: attempt #$((z+1)) of ${MaxUrlRetries}" fi fi if [[ "${exitDownloadError}" == "true" || "${exitDownloadNotAvailable}" == "true" ]] ; then - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${remote_url##*/}" "error" "Exit due to unrecoverable issue" fi rm -f "${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}" @@ -3593,23 +3603,23 @@ jira_UploadFile() { fi finalAttempt="false" for ((z=0; z<=$MaxUploadRetries; z++)); do - if [ $z -eq $MaxUploadRetries ] ; then + if [[ $z -eq $MaxUploadRetries ]] ; then finalAttempt="true" fi trap "rm -f "${UploadTicket}"; echo ""; tput cnorm; exit" 0 1 2 3 6 15 if jira_PostFile "${filepath}" "${_hostCode}" "${filename}" "${filecnt}" $((z+1)) $finalAttempt $pline ; then return 0 - elif [ $z -lt $MaxUploadRetries ]; then - if [ "${fileAlreadyDone}" == "true" ] ; then + elif [[ $z -lt $MaxUploadRetries ]]; then + if [[ "${fileAlreadyDone}" == "true" ]] ; then break fi if [[ "${warnAndRetryUnknownError}" == "true" ]] ; then - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${filepath##*/}" "error" "Retry due to an unknown issue: attempt #$((z+1)) of ${MaxUploadRetries}" fi fi if [[ "${exitUploadError}" == "true" || "${exitUploadNotAvailable}" == "true" ]] ; then - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${filepath##*/}" "error" "Exit due to unrecoverable issue" fi rm -f "${UploadTicket}" @@ -3649,7 +3659,7 @@ jira_PostFile() { -F "files[]=@${arrFiles[@]}" \ "${jira_PostUrlHost}") fi - if [ "${DebugAllEnabled}" == "true" ] ; then + if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${filepath##*/}" "${_hostCode}_upload" "post_url: ${jira_PostUrlHost}"$'\n'"${response}" fi if grep -Eqi ' 200 ' <<< "${response}" ; then @@ -3664,7 +3674,7 @@ jira_PostFile() { return 0 else err=$(grep -oPi '(?<=HTTP/).*?(?=$)' <<< "$response") - if [ "${finalAttempt}" == "true" ] ; then + if [[ "${finalAttempt}" == "true" ]] ; then printf "\\n" echo -e "${RED}| Upload failed. Status: ${err}${NC}" failedRetryUpload "$pline" "${filepath}" "${_hostCode}" "Failed to upload file" "Status: $err" @@ -3694,13 +3704,13 @@ trap "tput el; tput cnorm; exit" 0 1 2 3 6 15 CatnapCount=0 UrlOnly=false didSourceMadConfig=false -if [ -f "${ScriptDir}/mad.config" ] ; then +if [[ -f "${ScriptDir}/mad.config" ]] ; then source ${ScriptDir}/mad.config didSourceMadConfig=true fi -if [ ! -z ${WorkDirOverride} ]; then +if [[ ! -z ${WorkDirOverride} ]]; then WorkDir="${WorkDirOverride}" - if [ ! -d "${WorkDir}" ]; then + if [[ ! -d "${WorkDir}" ]]; then mkdir -p "${WorkDir}" fi fi @@ -3709,11 +3719,11 @@ if [[ "$1" == "audit" ]]; then exit 0 fi torPort=$(checkTor) -if [ "$torPort" == "" ] ; then +if [[ "$torPort" == "" ]] ; then printf "%s\\n" "Tor is not running!" exit 1 fi -if [ "${UseTorCurlImpersonate}" == "true" ]; then +if [[ "${UseTorCurlImpersonate}" == "true" ]]; then curl_impersonate=() readarray -d $'' arrFiles < <(find "$ScriptDir" -maxdepth 1 -name "curl_*" -printf '%p\n' | sort -Vk1) bFoundCurlHeader=false @@ -3725,7 +3735,7 @@ if [ "${UseTorCurlImpersonate}" == "true" ]; then break fi done - if [ "$bFoundCurlHeader" == "false" ]; then + if [[ "$bFoundCurlHeader" == "false" ]]; then echo -e "${RED}[ERROR] Missing dependency \"curl-impersonate\"!${NC}" echo -e "Some hosts use CloudFlare to detect and block scripts (such as hexload)." echo -e "To get around it, this script needs to impersonate a browser." @@ -3757,8 +3767,8 @@ arg3="$3" # filelist arg4="$4" # moveToFolder arg5="$5" # fileCount arg6="$6" # lineCount -if [ "$#" -ne 1 ] && [ "$#" -ne 2 ] && [ "$#" -ne 3 ] && [ "$#" -ne 4 ] && [ "$#" -ne 5 ] && [ "$#" -ne 6 ] || \ - [ "$1" == "help" ] || [ "$1" == "?" ] ; then +if [[ "$#" -ne 1 ]] && [[ "$#" -ne 2 ]] && [[ "$#" -ne 3 ]] && [[ "$#" -ne 4 ]] && [[ "$#" -ne 5 ]] && [[ "$#" -ne 6 ]] || \ + [[ "$1" == "help" ]] || [[ "$1" == "?" ]] ; then LoadMadDownloadHosts echo -e "${BLUE}MAD Help -------------------------${NC}" echo -e "Supported Hosts + Keyword:" @@ -3858,30 +3868,30 @@ if [ "$#" -ne 1 ] && [ "$#" -ne 2 ] && [ "$#" -ne 3 ] && [ "$#" -ne 4 ] && [ "$# exit 0 fi if [[ "$arg1" == "upload" ]] || [[ "$arg1" == "uploads" ]]; then - if [ "$arg2" == "status" ] && [ -f "$arg3" ]; then + if [[ "$arg2" == "status" ]] && [[ -f "$arg3" ]]; then madStatusUploads "$arg3" exit 0 fi - if [ "$arg2" == "reset" ] && [ -f "$arg3" ]; then + if [[ "$arg2" == "reset" ]] && [[ -f "$arg3" ]]; then madResetUploads "$arg3" exit 0 fi - if [ ! -z "$arg2" ]; then + if [[ ! -z "$arg2" ]]; then InputFile="$arg2" - if [ ! -f "${InputFile}" ] && [ -f "${WorkDir}/${InputFile}" ]; then + if [[ ! -f "${InputFile}" ]] && [[ -f "${WorkDir}/${InputFile}" ]]; then InputFile="${WorkDir}/${InputFile}" fi - if [ ! -f "${InputFile}" ]; then + if [[ ! -f "${InputFile}" ]]; then echo -e "Unable to read file $InputFile [1]" exit 1 fi CleanInputFile $InputFile qChkLineCount=0 - if [ -f "${InputFile}" ] ; then + if [[ -f "${InputFile}" ]] ; then qChkLineCount=$(grep -Evi '^#|^$|#OK#|#FAIL#|#RETRY#)' "${InputFile}" | wc -l | awk '{ print $1 }') if ((qChkLineCount <= 0)); then if DoneProcessingAllUploads "$InputFile" ; then - if [ "${AutoShowMadStatus}" == "true" ] ; then + if [[ "${AutoShowMadStatus}" == "true" ]] ; then echo -e "${RED}❤${GREEN}Done! ${YELLOW}Me0W!${NC} :D" madStatusUploads "$InputFile" fi @@ -3926,7 +3936,7 @@ echo -e "${BLD}" echo -e "${PINK}:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:" echo -e ":${NC} ${GREEN}M${NC}ulti-host ${GREEN}A${NC}uto ${GREEN}D${NC}ownloader - ${YELLOW}v${ScriptVersion} ${BLUE}(by kittykat) ${PINK}:" echo -e ":-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:${NC}\\n" -if [ "$didSourceMadConfig" == "true" ]; then +if [[ "$didSourceMadConfig" == "true" ]]; then echo -e "[${GREEN}LOAD${NC}] ${BLUE}mad.config${NC}" echo -e "" fi @@ -3941,40 +3951,40 @@ else printf "Ensure Tor is setup and listening on a port between 9050 and 9150. Exiting...\\n" exit 1 fi -if [ "${UseTorCurlImpersonate}" == "true" ]; then +if [[ "${UseTorCurlImpersonate}" == "true" ]]; then printf "client: ${GREEN}Tor${NC} + ${BLUE}curl_impersonate${NC}\\n" else printf "client: ${GREEN}Tor${NC} + ${GREEN}curl${NC}\\n" fi echo -e "ConnectTimeout: ${GREEN}$ConnectTimeout${NC}, CircuitRetries: ${GREEN}$CircuitRetries${NC}, UrlRetries: ${GREEN}$MaxUrlRetries${NC}, DownloadRetries: ${GREEN}$MaxDownloadRetries${NC}" -if [ "${LoopThroughFileUntilComplete}" == "true" ]; then +if [[ "${LoopThroughFileUntilComplete}" == "true" ]]; then printf "Loop: ${GREEN}${LoopThroughFileUntilComplete}${NC}, " else printf "Loop: ${GREY}${LoopThroughFileUntilComplete}${NC}, " fi -if [ "${AutoReloadOnFilelistTxtChanges}" == "true" ]; then +if [[ "${AutoReloadOnFilelistTxtChanges}" == "true" ]]; then printf "AutoReload: ${GREEN}${AutoReloadOnFilelistTxtChanges}${NC}, " else printf "AutoReload: ${GREY}${AutoReloadOnFilelistTxtChanges}${NC}, " fi -if [ "${AutoCommentOnCompletion}" == "true" ]; then +if [[ "${AutoCommentOnCompletion}" == "true" ]]; then printf "AutoComment: ${GREEN}${AutoCommentOnCompletion}${NC}, " else printf "AutoComment: ${GREY}${AutoCommentOnCompletion}${NC}, " fi -if [ "${AutoRepairBadPartials}" == "true" ]; then +if [[ "${AutoRepairBadPartials}" == "true" ]]; then printf "AutoRepairPartials: ${GREEN}${AutoRepairBadPartials}${NC}" else printf "AutoRepairPartials: ${GREY}${AutoRepairBadPartials}${NC}" fi printf "\\n" -if [ "${RateMonitorEnabled}" == "true" ]; then +if [[ "${RateMonitorEnabled}" == "true" ]]; then printf "RateMonitor: ${GREEN}${RateMonitorEnabled}${NC}, " else printf "RateMonitor: ${GREY}${RateMonitorEnabled}${NC}, " fi -if grep -Eq "pjscloud.sh" <<< "$LoadPlugins" && [ "$PJSCloud_pixeldrain" == "true" ]; then - if [ "${ar_pgsKey[0]}" == 'aa-bbbbb-ccccc-ddddd-eeeee-fffff' ] || [ "${ar_pgsKey[0]}" == "" ] ; then +if grep -Eq "pjscloud.sh" <<< "$LoadPlugins" && [[ "$PJSCloud_pixeldrain" == "true" ]]; then + if [[ "${ar_pgsKey[0]}" == 'aa-bbbbb-ccccc-ddddd-eeeee-fffff' ]] || [[ "${ar_pgsKey[0]}" == "" ]] ; then printf " ${RED}[==>${NC} Setup ${BLUE}PJS apikey${NC} in pjscloud.sh ${RED}<==]${NC}" PJSCloud_pixeldrain=false printf "PDPump: ${GREY}${PJSCloud_pixeldrain}${NC}, " @@ -3984,14 +3994,14 @@ if grep -Eq "pjscloud.sh" <<< "$LoadPlugins" && [ "$PJSCloud_pixeldrain" == "tru else printf "PDPump: ${GREY}${PJSCloud_pixeldrain}${NC}, " fi -if [ "${UsePixeldrainBypass}" == "true" ]; then +if [[ "${UsePixeldrainBypass}" == "true" ]]; then printf "PDBypass: ${GREEN}${UsePixeldrainBypass}${NC}, " else printf "PDBypass: ${GREY}${UsePixeldrainBypass}${NC}, " fi -if [ "${EnableFiledotProcessing}" == "true" ]; then +if [[ "${EnableFiledotProcessing}" == "true" ]]; then GetRandomFiledotUser - if [ "${ar_fdUP[0]}" == 'user1|pass1' ] || [ "${ar_fdUP[0]}" == "" ]; then + if [[ "${ar_fdUP[0]}" == 'user1|pass1' ]] || [[ "${ar_fdUP[0]}" == "" ]]; then printf " ${RED}[==>${NC} Setup ${BLUE}user${NC}/${BLUE}pass${NC} in script ${RED}<==]${NC}" EnableFiledotProcessing=false printf "Filedot: ${GREY}$EnableFiledotProcessing${NC}" @@ -4002,13 +4012,13 @@ else printf "Filedot: ${GREY}$EnableFiledotProcessing${NC}" fi printf "\\n" -if [ "${DebugAllEnabled}" == "true" ] || [ "${DebugPluginsEnabled}" == "true" ] ; then +if [[ "${DebugAllEnabled}" == "true" ]] || [[ "${DebugPluginsEnabled}" == "true" ]] ; then bDebugMsgPrintCnt=0 - if [ "${DebugAllEnabled}" == "true" ]; then + if [[ "${DebugAllEnabled}" == "true" ]]; then printf "DebugHosts: ${BLUE}${DebugAllEnabled}${NC}" bDebugMsgPrintCnt=$((bDebugMsgPrintCnt + 1)) fi - if [ "${DebugPluginsEnabled}" == "true" ]; then + if [[ "${DebugPluginsEnabled}" == "true" ]]; then if ((bDebugMsgPrintCnt > 0)) ; then printf ", " fi @@ -4016,35 +4026,35 @@ if [ "${DebugAllEnabled}" == "true" ] || [ "${DebugPluginsEnabled}" == "true" ] fi printf "\\n" fi -if [ ! "$UrlOnly" == "true" ]; then +if [[ ! "$UrlOnly" == "true" ]]; then qChkLineCount=0 - if [ -f "${WorkDir}/$1" ] ; then + if [[ -f "${WorkDir}/$1" ]] ; then qChkLineCount=$(grep -Ei '^(http|direct=http)' "${WorkDir}/$1" | wc -l | awk '{ print $1 }') if ((qChkLineCount <= 0)); then if DoneProcessingAllUrls "$1" ; then - if [ "${AutoShowMadStatus}" == "true" ] ; then + if [[ "${AutoShowMadStatus}" == "true" ]] ; then echo -e "${RED}❤${GREEN}Done! ${YELLOW}Me0W!${NC} :D" madStatus "$1" fi exit 0 fi fi - elif [ -f "${WorkDir}/$2" ] ; then + elif [[ -f "${WorkDir}/$2" ]] ; then qChkLineCount=$(grep -Ei '^(http|direct=http)' "${WorkDir}/$2" | wc -l | awk '{ print $1 }') if ((qChkLineCount <= 0)); then if DoneProcessingAllUrls "$2" ; then - if [ "${AutoShowMadStatus}" == "true" ] ; then + if [[ "${AutoShowMadStatus}" == "true" ]] ; then echo -e "${RED}❤${GREEN}Done! ${YELLOW}Me0W!${NC} :D" madStatus "$2" fi exit 0 fi fi - elif [ -f "${WorkDir}/$3" ] ; then + elif [[ -f "${WorkDir}/$3" ]] ; then qChkLineCount=$(grep -Ei '^(http|direct=http)' "${WorkDir}/$3" | wc -l | awk '{ print $1 }') if ((qChkLineCount <= 0)); then if DoneProcessingAllUrls "$3" ; then - if [ "${AutoShowMadStatus}" == "true" ] ; then + if [[ "${AutoShowMadStatus}" == "true" ]] ; then echo -e "${RED}❤${GREEN}Done! ${YELLOW}Me0W!${NC} :D" madStatus "$3" fi @@ -4055,11 +4065,11 @@ if [ ! "$UrlOnly" == "true" ]; then fi hostOnlyOrUrl="" multiCount=0 -if [ "$UrlOnly" == "true" ]; then +if [[ "$UrlOnly" == "true" ]]; then echo -e "" echo -e "${BLUE}:-:-:-: URL Only Mode :-:-:-:${NC}" line="$arg1" - if [ ! -z "$arg2" ]; then + if [[ ! -z "$arg2" ]]; then filename_override="$arg2" fi if [[ $line =~ | ]] ; then @@ -4076,12 +4086,12 @@ if [ "$UrlOnly" == "true" ]; then remote_url=${remote_url/http:/https:} fi CLEANSTRING=${remote_url//[^a-zA-Z0-9]/} - if [ -f "${WorkDir}/.flocks/${CLEANSTRING}" ] ; then + if [[ -f "${WorkDir}/.flocks/${CLEANSTRING}" ]] ; then echo -e "${YELLOW}Lock Exists (Skipping)${NC}: ./.flocks/${CLEANSTRING}" exit 0 fi printf "\nGetting ${YELLOW}direct${NC} file ${GREEN}1${NC}\\n" - if [ ! "${filename_override}" == "" ] ; then + if [[ ! "${filename_override}" == "" ]] ; then printf "[${BLUE}FilenameOverride${NC}]: %s\\n" $filename_override fi printf "[${BLUE}DirectUrl${NC}]: %s\\n" "${remote_url}" @@ -4098,7 +4108,7 @@ if [ "$UrlOnly" == "true" ]; then remote_url=${remote_url/http:/https:} fi CLEANSTRING=${remote_url//[^a-zA-Z0-9]/} - if [ -f "${WorkDir}/.flocks/${CLEANSTRING}" ] ; then + if [[ -f "${WorkDir}/.flocks/${CLEANSTRING}" ]] ; then echo -e "${YELLOW}Lock Exists (Skipping)${NC}: ./.flocks/${CLEANSTRING}" exit 0 fi @@ -4123,7 +4133,7 @@ if [ "$UrlOnly" == "true" ]; then if [[ $remote_url =~ $_hostdomainregex ]]; then isHostMatchFound=true printf "\nGetting ${YELLOW}$_hostnick${NC} file ${GREEN}1${NC}\\n" - if [ ! "${filename_override}" == "" ] ; then + if [[ ! "${filename_override}" == "" ]] ; then printf "[${BLUE}FilenameOverride${NC}]: %s\\n" $filename_override fi printf "[DownloadUrl]: %s\\n" "${remote_url}" @@ -4134,7 +4144,7 @@ if [ "$UrlOnly" == "true" ]; then break fi done - if [ "$isHostMatchFound" == "false" ]; then + if [[ "$isHostMatchFound" == "false" ]]; then printf "${RED}Invalid url (bad format or unsupported host [UO]):${NC} \\n%s\\n" $remote_url badUrlDownload "${remote_url}" exit 0 @@ -4156,10 +4166,10 @@ if [[ "$arg1" == "multi" ]] && [[ "$arg2" == "auto" ]] ; then else InputFile="$3" fi - if [ ! -f "${InputFile}" ] && [ -f "${WorkDir}/${InputFile}" ]; then + if [[ ! -f "${InputFile}" ]] && [[ -f "${WorkDir}/${InputFile}" ]]; then InputFile="${WorkDir}/${InputFile}" fi - if [ ! -f "${InputFile}" ]; then + if [[ ! -f "${InputFile}" ]]; then printf "Unable to read file %s! [1]\\n" "$1" exit 1 else @@ -4184,14 +4194,14 @@ if [[ "$arg1" == "multi" ]] && [[ "$arg2" == "auto" ]] ; then lineCount=0 lineCount=$(grep -Ei $_hostdomainregex "${InputFile}" | wc -l | awk '{ print $1 }') if ((lineCount > 0)) ; then - if [ "$useMultiCount" == "false" ] || ( [ $useMultiCount == "true" ] && ((multiCount > 0)) ) ; then + if [[ "$useMultiCount" == "false" ]] || ( [ $useMultiCount == "true" ] && ((multiCount > 0)) ) ; then printf "%s has ${GREEN}%d ${YELLOW}$_hostnick${NC} files to download.\\n" "${InputFile}" $lineCount LaunchTerminal "$_hostcode" ${InputFile} multiCount=$((multiCount - 1)) fi fi done - if [ $useMultiCount == "true" ] && ((multiCount > 0)) ; then + if [[ $useMultiCount == "true" ]] && ((multiCount > 0)) ; then printf "Spawning ${GREEN}%d ${YELLOW}allhosts${NC} terminals.\\n" $multiCount for ((k=1; k<=$multiCount; k++)); do LaunchTerminal ${InputFile} @@ -4206,10 +4216,10 @@ elif [[ "$arg1" == "multi" ]] && \ if [[ "$ListHostAndDomainRegexes" == *"$arg2"* ]]; then InputFile="$arg4" multiCount=$arg3 - if [ ! -f "${InputFile}" ] && [ -f "${WorkDir}/${InputFile}" ]; then + if [[ ! -f "${InputFile}" ]] && [[ -f "${WorkDir}/${InputFile}" ]]; then InputFile="${WorkDir}/${InputFile}" fi - if [ ! -f "${InputFile}" ]; then + if [[ ! -f "${InputFile}" ]]; then printf "Unable to read file %s! [2]\\n" "$1" exit 1 else @@ -4229,7 +4239,7 @@ elif [[ "$arg1" == "multi" ]] && \ _hostnick=$(echo $chnk1|cut -f3 -d '/') _hostfuncprefix=$(echo $chnk1|cut -f4 -d '/') _hostdomainregex="${hline#*\:}" - if [ "$arg2" == "$_hostcode" ] ; then + if [[ "$arg2" == "$_hostcode" ]] ; then lineCount=$(grep -Ei $_hostdomainregex "${InputFile}" | wc -l | awk '{ print $1 }') printf "%s has ${GREEN}%d ${YELLOW}$_hostnick${NC} files to download.\\n" "${InputFile}" $lineCount for ((k=1; k<=$multiCount; k++)); do @@ -4246,10 +4256,10 @@ elif [[ "$arg1" == "multi" ]] && \ "$arg2" == "5" || "$arg2" == "6" || "$arg2" == "7" || "$arg2" == "8" ]] ; then multiCount=$arg2 fi - if [ ! -f "${InputFile}" ] && [ -f "${WorkDir}/${InputFile}" ]; then + if [[ ! -f "${InputFile}" ]] && [[ -f "${WorkDir}/${InputFile}" ]]; then InputFile="${WorkDir}/${InputFile}" fi - if [ ! -f "${InputFile}" ]; then + if [[ ! -f "${InputFile}" ]]; then printf "Unable to read file %s! [3]\\n" "$1" exit 1 else @@ -4270,10 +4280,10 @@ elif [[ "$arg1" == "multi" ]] && \ _hostnick=$(echo $chnk1|cut -f3 -d '/') _hostfuncprefix=$(echo $chnk1|cut -f4 -d '/') _hostdomainregex="${hline#*\:}" - if [ "$_hostfuncprefix" == "direct" ]; then + if [[ "$_hostfuncprefix" == "direct" ]]; then lineCount2=$(grep -Ei '^direct=http' "${InputFile}" | wc -l | awk '{ print $1 }') lineCount=$((lineCount + lineCount2)) - elif [ "$_hostcode" == "fdot" ] && [ "${EnableFiledotProcessing}" == "true" ]; then + elif [[ "$_hostcode" == "fdot" ]] && [[ "${EnableFiledotProcessing}" == "true" ]]; then lineCount2=$(grep -Ei $_hostdomainregex "${InputFile}" | wc -l | awk '{ print $1 }') lineCount=$((lineCount + lineCount2)) else @@ -4307,13 +4317,13 @@ else _hostnick=$(echo $chnk1|cut -f3 -d '/') _hostfuncprefix=$(echo $chnk1|cut -f4 -d '/') _hostdomainregex="${hline#*\:}" - if [ "$arg1" == "$_hostcode" ]; then + if [[ "$arg1" == "$_hostcode" ]]; then hostOnlyOrUrl="$1" InputFile="$2" - if [ ! -f "${InputFile}" ] && [ -f "${WorkDir}/${InputFile}" ]; then + if [[ ! -f "${InputFile}" ]] && [[ -f "${WorkDir}/${InputFile}" ]]; then InputFile="${WorkDir}/${InputFile}" fi - if [ ! -f "${InputFile}" ]; then + if [[ ! -f "${InputFile}" ]]; then printf "Unable to read file %s! [host]\\n" "$1" exit 1 else @@ -4324,14 +4334,14 @@ else isHostMatchFound=true fi done - if [ "$isHostMatchFound" == "true" ]; then + if [[ "$isHostMatchFound" == "true" ]]; then printf "%s has ${GREEN}%d ${YELLOW}$foundhostnick${NC} files to download.\\n" "${InputFile}" $lineCount else InputFile="$1" - if [ ! -f "${InputFile}" ] && [ -f "${WorkDir}/${InputFile}" ]; then + if [[ ! -f "${InputFile}" ]] && [[ -f "${WorkDir}/${InputFile}" ]]; then InputFile="${WorkDir}/${InputFile}" fi - if [ ! -f "${InputFile}" ]; then + if [[ ! -f "${InputFile}" ]]; then printf "Unable to read file %s! [main]\\n" "$1" exit 1 fi @@ -4341,7 +4351,7 @@ else fi CleanInputFile $InputFile HashFilelistTxt=$( sha1sum ${InputFile} | awk '{print $1}' ) -if [ "$VerboseLoading" == "true" ]; then +if [[ "$VerboseLoading" == "true" ]]; then printf "SHA1: %s\\n\\n" "${HashFilelistTxt}" fi mainLoopControl=true @@ -4352,7 +4362,7 @@ do qChkLineCount=$(grep -Ei '^(http|direct=http)' "${InputFile}" | wc -l | awk '{ print $1 }') if ((qChkLineCount <= 0)) ; then if DoneProcessingAllUrls "${InputFile}" ; then - if [ "${AutoShowMadStatus}" == "true" ] ; then + if [[ "${AutoShowMadStatus}" == "true" ]] ; then echo -e "${RED}❤${GREEN}Done! ${YELLOW}Me0W!${NC} :D" madStatus "${InputFile}" fi @@ -4372,51 +4382,51 @@ do filename_override="${line##*\|}" line="${line%%\|*}" fi - if [ -f "${InputFile}" ] ; then + if [[ -f "${InputFile}" ]] ; then currentHashFilelistTxt=$( sha1sum "${InputFile}" | awk '{print $1}' ) else currentHashFilelistTxt=$( sha1sum ${InputFile} | awk '{print $1}' ) fi - if [ -f "${WorkDir}/clear" ] ; then + if [[ -f "${WorkDir}/clear" ]] ; then clear printf "[${PINK}ClearScreen${NC}] Clear file was found. (removing and clearing)\\n" - if [ -f "${WorkDir}/clear" ] ; then + if [[ -f "${WorkDir}/clear" ]] ; then rm -f "${WorkDir}/clear" fi - elif ((qChkLineCount > 0)) && [ ! "${HashFilelistTxt}" == "${currentHashFilelistTxt}" ] && \ - [ "${ClearScreenOnAutoReload}" == "true" ] && [ "${AutoReloadOnFilelistTxtChanges}" == "true" ] ; then + elif ((qChkLineCount > 0)) && [[ ! "${HashFilelistTxt}" == "${currentHashFilelistTxt}" ]] && \ + [[ "${ClearScreenOnAutoReload}" == "true" ]] && [[ "${AutoReloadOnFilelistTxtChanges}" == "true" ]] ; then clear printf "[${PINK}ClearScreen${NC}] Auto-clearing screen. (${InputFile} change detected)\\n" fi - if [ -f "${WorkDir}/stop" ] ; then + if [[ -f "${WorkDir}/stop" ]] ; then printf "\\n%s\\n" "--------------------------------------------" printf "[${PINK}Stop${NC}] stop file was found.\\n" printf "%s\\n" "--------------------------------------------" rm -f "${WorkDir}/stop" exit 1 fi - if [ -f "${WorkDir}/restart" ] ; then + if [[ -f "${WorkDir}/restart" ]] ; then printf "\\n%s\\n" "--------------------------------------------" printf "[${PINK}Restart${NC}] restart file was found.\\n" printf "%s\\n" "--------------------------------------------" rm -f "${WorkDir}/restart" - if [ -f "${WorkDir}/reload" ] ; then + if [[ -f "${WorkDir}/reload" ]] ; then rm -f "${WorkDir}/reload" fi ReloadScript "$@" exit 1 - elif [ -f "${WorkDir}/reload" ] ; then + elif [[ -f "${WorkDir}/reload" ]] ; then printf "\\n%s\\n" "--------------------------------------------" printf "[${PINK}Reload${NC}] reload file was found.\\n" printf "%s\\n" "--------------------------------------------" rm -f "${WorkDir}/reload" - if [ -f "${WorkDir}/restart" ] ; then + if [[ -f "${WorkDir}/restart" ]] ; then rm -f "${WorkDir}/restart" fi ReloadScript "$@" exit 1 - elif ((qChkLineCount > 0)) && [ "${AutoReloadOnFilelistTxtChanges}" == "true" ] && \ - [ ! "${HashFilelistTxt}" == "${currentHashFilelistTxt}" ]; then + elif ((qChkLineCount > 0)) && [[ "${AutoReloadOnFilelistTxtChanges}" == "true" ]] && \ + [[ ! "${HashFilelistTxt}" == "${currentHashFilelistTxt}" ]]; then printf "\\n%s\\n" "--------------------------------------------" printf "[${PINK}Reload${NC}] ${InputFile} was modified.\\n" printf "%s\\n" "--------------------------------------------" @@ -4446,7 +4456,7 @@ do continue fi fi - if [ ! -z "${hostOnlyOrUrl}" ] ; then + if [[ ! -z "${hostOnlyOrUrl}" ]] ; then _hostcode="" _hostnick="" _hostfuncprefix="" @@ -4480,10 +4490,10 @@ do _hostdomainregex="${hline#*\:}" if [[ $remote_url =~ $_hostdomainregex ]] ; then isSupportedHost=true - if [ "${hostOnlyOrUrl}" == "$_hostcode" ] ; then + if [[ "${hostOnlyOrUrl}" == "$_hostcode" ]] ; then isHostMatchFound=true CLEANSTRING=${remote_url//[^a-zA-Z0-9]/} - if [ -f "${WorkDir}/.flocks/${CLEANSTRING}" ] ; then + if [[ -f "${WorkDir}/.flocks/${CLEANSTRING}" ]] ; then if ((CatnapCount <= 0)) ; then echo -e "${YELLOW}Lock Exists (Skipping)${NC}: ./.flocks/${CLEANSTRING}" fi @@ -4492,10 +4502,10 @@ do break fi printf "\nGetting ${YELLOW}$_hostnick${NC} file ${GREEN}%d${NC} of ${GREEN}%d${NC} ${PINK}($_hostcode urls)${NC}\\n" $fileCount $lineCount - if [ ! "${filename_override}" == "" ] ; then + if [[ ! "${filename_override}" == "" ]] ; then printf "[${BLUE}FilenameOverride${NC}]: %s\\n" $filename_override fi - if [ ! "${MoveToFolder}" == "" ] ; then + if [[ ! "${MoveToFolder}" == "" ]] ; then printf "[${BLUE}MoveToFolder${NC}]: %s\\n" $MoveToFolder fi printf "[DownloadUrl]: %s\\n" "${remote_url}" @@ -4508,9 +4518,9 @@ do fi fi done - if [ "$isSkipOkay" == "true" ]; then + if [[ "$isSkipOkay" == "true" ]]; then continue - elif [ "$isSupportedHost" == "false" ]; then + elif [[ "$isSupportedHost" == "false" ]]; then printf "${RED}Invalid url (bad format or unsupported host [m1]):${NC} \\n%s\\n" $remote_url badUrlDownload "${remote_url}" continue @@ -4524,7 +4534,7 @@ do remote_url=${remote_url/http:/https:} fi CLEANSTRING=${remote_url//[^a-zA-Z0-9]/} - if [ -f "${WorkDir}/.flocks/${CLEANSTRING}" ] ; then + if [[ -f "${WorkDir}/.flocks/${CLEANSTRING}" ]] ; then if ((CatnapCount <= 0)) ; then echo -e "${YELLOW}Lock Exists (Skipping)${NC}: ./.flocks/${CLEANSTRING}" fi @@ -4532,10 +4542,10 @@ do continue fi printf "\nGetting ${YELLOW}direct${NC} file ${GREEN}%d${NC} of ${GREEN}%d${NC}\\n" $fileCount $lineCount - if [ ! "${filename_override}" == "" ] ; then + if [[ ! "${filename_override}" == "" ]] ; then printf "[${BLUE}FilenameOverride${NC}]: %s\\n" $filename_override fi - if [ ! "${MoveToFolder}" == "" ] ; then + if [[ ! "${MoveToFolder}" == "" ]] ; then printf "[${BLUE}MoveToFolder${NC}]: %s\\n" $MoveToFolder fi printf "[${BLUE}DirectUrl${NC}]: %s\\n" "${remote_url}" @@ -4574,7 +4584,7 @@ do if [[ $remote_url =~ $_hostdomainregex ]] ; then isHostMatchFound=true CLEANSTRING=${remote_url//[^a-zA-Z0-9]/} - if [ -f "${WorkDir}/.flocks/${CLEANSTRING}" ] ; then + if [[ -f "${WorkDir}/.flocks/${CLEANSTRING}" ]] ; then if ((CatnapCount <= 0)) ; then echo -e "${YELLOW}Lock Exists (Skipping)${NC}: ./.flocks/${CLEANSTRING}" fi @@ -4583,10 +4593,10 @@ do break fi printf "\nGetting ${YELLOW}$_hostnick${NC} file ${GREEN}%d${NC} of ${GREEN}%d${NC}\\n" $fileCount $lineCount - if [ ! "${filename_override}" == "" ] ; then + if [[ ! "${filename_override}" == "" ]] ; then printf "[${BLUE}FilenameOverride${NC}]: %s\\n" $filename_override fi - if [ ! "${MoveToFolder}" == "" ] ; then + if [[ ! "${MoveToFolder}" == "" ]] ; then printf "[${BLUE}MoveToFolder${NC}]: %s\\n" $MoveToFolder fi printf "[DownloadUrl]: %s\\n" "${remote_url}" @@ -4598,9 +4608,9 @@ do break fi done - if [ "$isSkipOkay" == "true" ]; then + if [[ "$isSkipOkay" == "true" ]]; then continue - elif [ "$isHostMatchFound" == "false" ]; then + elif [[ "$isHostMatchFound" == "false" ]]; then printf "${RED}Invalid url or disabled host (bad format or unsupported host [m*]):${NC} \\n%s\\n" $remote_url badUrlDownload "${remote_url}" continue @@ -4609,7 +4619,7 @@ do done #loop through the file line by line qChkLineCount=$(grep -Ei '^(http|direct=http)' "${InputFile}" | wc -l | awk '{ print $1 }') if ((qChkLineCount > 0)) ; then - if [ ! -z $hostOnlyOrUrl ] ; then + if [[ ! -z $hostOnlyOrUrl ]] ; then echo -e "${NC}" echo -e "${YELLOW}Unprocessed / Skipped URL(s) Found:${NC}" echo -e "Most likely from a different host than ${YELLOW}$hostOnlyOrUrl${NC}, another terminal is downloading it, or a flock exists...${NC}" @@ -4617,8 +4627,8 @@ do ReloadScript "" exit 0 else - if [ "$LoopThroughFileUntilComplete" == "false" ]; then - if [ ! -z $hostOnlyOrUrl ] ; then + if [[ "$LoopThroughFileUntilComplete" == "false" ]]; then + if [[ ! -z $hostOnlyOrUrl ]] ; then echo -e "${NC}" echo -e "${YELLOW}Unprocessed / Skipped URL(s) Found:${NC}" echo -e "Most likely from a different host than ${YELLOW}$hostOnlyOrUrl${NC}, another terminal is downloading it, or a flock exists...${NC}" diff --git a/plugins/AutoResetAndRetryDownloads.sh b/plugins/AutoResetAndRetryDownloads.sh old mode 100755 new mode 100644 index d64d529..c049f5f --- a/plugins/AutoResetAndRetryDownloads.sh +++ b/plugins/AutoResetAndRetryDownloads.sh @@ -1,6 +1,6 @@ #! Name: AutoResetAndRetryDownloads.sh #! Author: kittykat -#! Version: 2024.09.27 +#! Version: 2025.02.18 #! Desc: Runs mad.sh reset after processing all urls and then relaunches MAD #! Usage: Edit LoadPlugin="" line in mad.sh or mad.config #! LoadPlugin="AutoResetAndRetryDownloads.sh" @@ -30,25 +30,25 @@ DoneProcessingAllUrls_AutoResetAndRetryDownloads() { local plugName='AutoResetAndRetryDownloads' local plugFunc='DoneProcessingAllUrls_AutoResetAndRetryDownloads' - if [ "${DebugPluginsEnabled}" == "true" ]; then + if [[ "${DebugPluginsEnabled}" == "true" ]]; then echo -e "[${PINK}DEBUG${NC}]: Running ${PINK}$plugFunc${NC} in ${BLUE}$plugName${NC} ...${NC}" fi local pInputFile="$1" arard_ticketactivetimeout=60 arard_ticketexpirytimeout=540 - if [ "$LoopThroughFileUntilComplete" == "true" ] && [ ! -z $CatnapDuration ] && ((CatnapDuration > 0)); then + if [[ "$LoopThroughFileUntilComplete" == "true" ]] && [[ ! -z $CatnapDuration ]] && ((CatnapDuration > 0)); then arard_ticketactivetimeout=$(( arard_ticketactivetimeout + (CatnapDuration * 60) )) arard_ticketexpirytimeout=$(( arard_ticketexpirytimeout + (CatnapDuration * 60) )) fi if grep -Eqi '^#(http.*#RETRY#|direct=http.*#RETRY#).*$' "$pInputFile" ; then - if [ ! -f "${WorkDir}/.temp/AutoResetAndRetryDownloadsarard_startTime.txt" ]; then + if [[ ! -f "${WorkDir}/.temp/AutoResetAndRetryDownloadsarard_startTime.txt" ]]; then echo $(date +%s) > "${WorkDir}/.temp/AutoResetAndRetryDownloadsarard_startTime.txt" else arard_startTime=$(cat "${WorkDir}/.temp/AutoResetAndRetryDownloadsarard_startTime.txt") arard_currTime=$(date +%s) arard_durationseconds=$((arard_currTime - arard_startTime)) if ((arard_durationseconds > arard_ticketexpirytimeout)); then - if [ "${DebugPluginsEnabled}" == "true" ]; then + if [[ "${DebugPluginsEnabled}" == "true" ]]; then echo -e "[${PINK}DEBUG${NC}]: Ticket expiry occurred ($arard_durationseconds > $arard_ticketexpirytimeout)..${NC}" fi echo $(date +%s) > "${WorkDir}/.temp/AutoResetAndRetryDownloadsarard_startTime.txt" @@ -56,7 +56,7 @@ DoneProcessingAllUrls_AutoResetAndRetryDownloads() { arard_currTime=$(date +%s) arard_durationseconds=$((arard_currTime - arard_startTime)) elif ((arard_durationseconds > arard_ticketactivetimeout)); then - if [ "${DebugPluginsEnabled}" == "true" ]; then + if [[ "${DebugPluginsEnabled}" == "true" ]]; then echo -e "[${PINK}DEBUG${NC}]: Ticket inactive ($arard_durationseconds > $arard_ticketactivetimeout)..${NC}" fi for ((a=1; a<=2; a++)); do @@ -67,7 +67,7 @@ DoneProcessingAllUrls_AutoResetAndRetryDownloads() { echo -e "${YELLOW}Reset&Retry${NC}: Done processing... ${BLUE}$((arard_durationseconds/60))${NC} min(s). Expiry ${PINK}$(( (arard_ticketexpirytimeout - arard_durationseconds)/60 ))${NC} min(s).${NC}" return 0 fi - if [ "${DebugPluginsEnabled}" == "true" ]; then + if [[ "${DebugPluginsEnabled}" == "true" ]]; then echo -e "[${PINK}DEBUG${NC}]: Ticket active ($arard_durationseconds)..${NC}" fi for ((a=1; a<=2; a++)); do @@ -78,7 +78,7 @@ DoneProcessingAllUrls_AutoResetAndRetryDownloads() { echo -e "${YELLOW}Reset&Retry${NC}: Dozing ${BLUE}1${NC} min. Processing for ${BLUE}$((arard_durationseconds/60))${NC} of ${BLUE}$((arard_ticketactivetimeout/60))${NC} min(s)${NC}. Expiry ${PINK}$(( (arard_ticketexpirytimeout - arard_durationseconds)/60 ))${NC} min(s).${NC}" sleep 1m fi - if [ ! -f "${WorkDir}/.temp/AutoResetAndRetryDownloadsCnt.lk" ]; then + if [[ ! -f "${WorkDir}/.temp/AutoResetAndRetryDownloadsCnt.lk" ]]; then touch "${WorkDir}/.temp/AutoResetAndRetryDownloadsCnt.lk" trap "rm -f ${WorkDir}/.temp/AutoResetAndRetryDownloadsCnt.lk; echo ""; tput cnorm; exit" 0 1 2 3 6 15 madReset "$pInputFile" "true" "false" "false" "true" diff --git a/plugins/CatnapCtrlC.sh b/plugins/CatnapCtrlC.sh old mode 100755 new mode 100644 index 99c5cc7..4c9c3ef --- a/plugins/CatnapCtrlC.sh +++ b/plugins/CatnapCtrlC.sh @@ -1,6 +1,6 @@ #! Name: CatnapCtrlC.sh #! Author: kittykat -#! Version: 2024.09.21 +#! Version: 2025.02.18 #! Desc: Keeps mad.sh running until Ctrl-C, waiting for urls.txt updates #! Usage: Edit LoadPlugin="" line in mad.sh or mad.config #! LoadPlugin="CatnapCtrlC.sh" @@ -34,7 +34,7 @@ DoneProcessingAllUrls_CatnapCtrlC() { local plugName='CatnapCtrlC' local plugFunc='DoneProcessingAllUrls_CatnapCtrlC' - if [ "${DebugPluginsEnabled}" == "true" ]; then + if [[ "${DebugPluginsEnabled}" == "true" ]]; then echo -e "[${PINK}DEBUG${NC}]: Running ${PINK}$plugFunc${NC} in ${BLUE}$plugName${NC} ...${NC}" fi local pInputFile="$1" diff --git a/plugins/SkipUrlsInDownloadsCompletedTxt.sh b/plugins/SkipUrlsInDownloadsCompletedTxt.sh old mode 100755 new mode 100644 index 4ce8d88..1c37c90 --- a/plugins/SkipUrlsInDownloadsCompletedTxt.sh +++ b/plugins/SkipUrlsInDownloadsCompletedTxt.sh @@ -1,6 +1,6 @@ #! Name: SkipUrlsInDownloadsCompletedTxt.sh #! Author: kittykat / beautfar -#! Version: 2024.11.06 +#! Version: 2025.02.18 #! Desc: Skips urls that already exist in downloads_completed.txt #! This is similar to the SkipOkUrlsInResultsTxt.sh script, except it is better -- #! - Creates "./data/downloads_completed.txt with only completed urls (less noise / clutter). @@ -37,7 +37,7 @@ PreProcessUrl_SkipUrlsInDownloadsCompletedTxt() { local plugName='SkipUrlsInDownloadsCompletedTxt' local plugFunc='PreProcessUrl_SkipUrlsInDownloadsCompletedTxt' - if [ "${DebugPluginsEnabled}" == "true" ]; then + if [[ "${DebugPluginsEnabled}" == "true" ]]; then echo -e "[${PINK}DEBUG${NC}]: Running ${PINK}$plugFunc${NC} in ${BLUE}$plugName${NC} ...${NC}" fi local pFullLine="$1" @@ -48,25 +48,25 @@ PreProcessUrl_SkipUrlsInDownloadsCompletedTxt() { if grep -Eqi '^direct=' <<< "${pUrlOnly}" ; then pUrlOnly=${pUrlOnly/direct=/} fi - if [ -z "$pFullLine" ]; then + if [[ -z "$pFullLine" ]]; then return 0 fi local tUrl="${pUrlOnly##*\:\/\/}" - if [ "${DebugPluginsEnabled}" == "true" ]; then + if [[ "${DebugPluginsEnabled}" == "true" ]]; then echo -e "[${PINK}DEBUG${NC}]: pFullLine: $pFullLine${NC}" echo -e "[${PINK}DEBUG${NC}]: pUrlOnly: $pUrlOnly${NC}" echo -e "[${PINK}DEBUG${NC}]: tUrl: $tUrl${NC}" fi - if [ ! -z "$tUrl" ]; then + if [[ ! -z "$tUrl" ]]; then linematch="" - if [ -f "${WorkDir}/data/downloads_completed.txt" ]; then + if [[ -f "${WorkDir}/data/downloads_completed.txt" ]]; then linematch=$(grep -Eni "[OK].*url:.*${tUrl}.*\$" "${WorkDir}/data/downloads_completed.txt") else - if [ "${DebugPluginsEnabled}" == "true" ]; then + if [[ "${DebugPluginsEnabled}" == "true" ]]; then echo -e "[${PINK}DEBUG${NC}]: File not found: ${BLUE}${WorkDir}/data/downloads_completed.txt${NC}" fi fi - if [ ! -z "$linematch" ] ; then + if [[ ! -z "$linematch" ]] ; then echo -e "" echo -e "${GREEN}$pUrlOnly${NC} has already been downloaded in ${GREEN}downloads_completed.txt${NC}" echo -e "${BLUE}line${NC}:${NC}" @@ -75,12 +75,12 @@ PreProcessUrl_SkipUrlsInDownloadsCompletedTxt() { return 1 fi linematch="" - if [ -f "${WorkDir}/data/downloads_completed.txt" ]; then + if [[ -f "${WorkDir}/data/downloads_completed.txt" ]]; then linematch=$(grep -Eni "[REMOVED].*url:.*${tUrl}.*\$" "${WorkDir}/data/downloads_completed.txt") fi - if [ ! -z "$linematch" ] ; then + if [[ ! -z "$linematch" ]] ; then echo -e "" - echo -e "${RED}$pUrlOnly${NC} already mareked removed in ${GREEN}downloads_completed.txt${NC}" + echo -e "${RED}$pUrlOnly${NC} already marked removed in ${GREEN}downloads_completed.txt${NC}" echo -e "${BLUE}line${NC}: ${linematch//, /\\n}" removedDownload "$pUrlOnly" "URL found in ./data/downloads_completed.txt (line#: ${linematch%%:*})" return 1 diff --git a/plugins/examples/ExampleAddNewFuncAndCallOnSuccessfulDownload.sh b/plugins/examples/ExampleAddNewFuncAndCallOnSuccessfulDownload.sh old mode 100755 new mode 100644 index d680a6d..e0d9c57 --- a/plugins/examples/ExampleAddNewFuncAndCallOnSuccessfulDownload.sh +++ b/plugins/examples/ExampleAddNewFuncAndCallOnSuccessfulDownload.sh @@ -1,6 +1,6 @@ #! Name: ExampleAddNewFuncAndCallOnSuccessfulDownload.sh #! Author: kittykat -#! Version: 2024.09.21 +#! Version: 2025.02.18 #! Desc: Add a new function to MAD and call it whenever a file is successfully downloaded #! Usage: Edit LoadPlugin="" line in mad.sh or mad.config #! LoadPlugin="ExampleAddNewFuncAndCallOnSuccessfulDownload.sh" @@ -40,7 +40,7 @@ PostSuccessfulDownload_mysha256() { local plugName='ExampleAddNewFuncAndCallOnSuccessfulDownload' local plugFunc='PostSuccessfulDownload_mysha256' - if [ "${DebugAllEnabled}" == "true" ]; then + if [[ "${DebugAllEnabled}" == "true" ]]; then echo -e "[${PINK}DEBUG${NC}]: Running ${PINK}$plugFunc${NC} in ${BLUE}$plugName${NC} ...${NC}" fi local pRemoteUrl="$1" @@ -59,7 +59,7 @@ PostSuccessfulDownload_mysha256() { hookHashFile() { local plugName='ExampleAddNewFuncAndCallOnSuccessfulDownload' local plugFunc='hookHashFile' - if [ "${DebugPluginsEnabled}" == "true" ]; then + if [[ "${DebugPluginsEnabled}" == "true" ]]; then echo -e "[${PINK}DEBUG${NC}]: Running ${PINK}$plugFunc${NC} in ${BLUE}$plugName${NC} ...${NC}" fi local pFilePath="$1" diff --git a/plugins/examples/ExamplesMainHooks.sh b/plugins/examples/ExamplesMainHooks.sh old mode 100755 new mode 100644 index 787b414..a019a88 --- a/plugins/examples/ExamplesMainHooks.sh +++ b/plugins/examples/ExamplesMainHooks.sh @@ -1,6 +1,6 @@ #! Name: ExamplesMainHooks.sh #! Author: kittykat -#! Version: 2024.09.21 +#! Version: 2025.02.18 #! Desc: Just some examples on easy plugins... #! Usage: Edit LoadPlugin="" line in mad.sh or mad.config #! LoadPlugin="ExamplesMainHooks.sh" @@ -33,7 +33,7 @@ OnLoad_ExamplesMainHooks() { local plugName='ExamplesMainHooks' local plugFunc='OnLoad_ExamplesMainHooks' - if [ "${DebugPluginsEnabled}" == "true" ]; then + if [[ "${DebugPluginsEnabled}" == "true" ]]; then echo -e "[${PINK}DEBUG${NC}]: Running ${PINK}$plugFunc${NC} in ${BLUE}$plugName${NC} ...${NC}" fi local pScriptSourceDir="$1" @@ -43,7 +43,7 @@ OnLoad_ExamplesMainHooks() { BeginProcessing_ExamplesMainHooks() { local plugName='ExamplesMainHooks' local plugFunc='BeginProcessing_ExamplesMainHooks' - if [ "${DebugPluginsEnabled}" == "true" ]; then + if [[ "${DebugPluginsEnabled}" == "true" ]]; then echo -e "[${PINK}DEBUG${NC}]: Running ${PINK}$plugFunc${NC} in ${BLUE}$plugName${NC} ...${NC}" fi local pInputFile="$1" @@ -52,7 +52,7 @@ BeginProcessing_ExamplesMainHooks() { PreProcessUrl_ExamplesMainHooks() { local plugName='ExamplesMainHooks' local plugFunc='PreProcessUrl_ExamplesMainHooks' - if [ "${DebugPluginsEnabled}" == "true" ]; then + if [[ "${DebugPluginsEnabled}" == "true" ]]; then echo -e "[${PINK}DEBUG${NC}]: Running ${PINK}$plugFunc${NC} in ${BLUE}$plugName${NC} ...${NC}" fi local pFullLine="$1" @@ -69,7 +69,7 @@ PreProcessUrl_ExamplesMainHooks() { PostSuccessfulDownload_ExamplesMainHooks() { local plugName='ExamplesMainHooks' local plugFunc='PostSuccessfulDownload_ExamplesMainHooks' - if [ "${DebugPluginsEnabled}" == "true" ]; then + if [[ "${DebugPluginsEnabled}" == "true" ]]; then echo -e "[${PINK}DEBUG${NC}]: Running ${PINK}$plugFunc${NC} in ${BLUE}$plugName${NC} ...${NC}" fi local pRemoteUrl="$1" @@ -82,7 +82,7 @@ PostSuccessfulDownload_ExamplesMainHooks() { PostFailedDownload_ExamplesMainHooks() { local plugName='ExamplesMainHooks' local plugFunc='PostFailedDownload_ExamplesMainHooks' - if [ "${DebugPluginsEnabled}" == "true" ]; then + if [[ "${DebugPluginsEnabled}" == "true" ]]; then echo -e "[${PINK}DEBUG${NC}]: Running ${PINK}$plugFunc${NC} in ${BLUE}$plugName${NC} ...${NC}" fi local pRemoteUrl="$1" @@ -93,7 +93,7 @@ PostFailedDownload_ExamplesMainHooks() { PostFailRetryDownload_ExamplesMainHooks() { local plugName='ExamplesMainHooks' local plugFunc='PostFailRetryDownload_ExamplesMainHooks' - if [ "${DebugPluginsEnabled}" == "true" ]; then + if [[ "${DebugPluginsEnabled}" == "true" ]]; then echo -e "[${PINK}DEBUG${NC}]: Running ${PINK}$plugFunc${NC} in ${BLUE}$plugName${NC} ...${NC}" fi local pRemoteUrl="$1" @@ -104,7 +104,7 @@ PostFailRetryDownload_ExamplesMainHooks() { DoneProcessingAllUrls_ExamplesMainHooks() { local plugName='ExamplesMainHooks' local plugFunc='DoneProcessingAllUrls_ExamplesMainHooks' - if [ "${DebugPluginsEnabled}" == "true" ]; then + if [[ "${DebugPluginsEnabled}" == "true" ]]; then echo -e "[${PINK}DEBUG${NC}]: Running ${PINK}$plugFunc${NC} in ${BLUE}$plugName${NC} ...${NC}" fi local pInputFile="$1" @@ -118,7 +118,7 @@ DoneProcessingAllUrls_ExamplesMainHooks() { PostSuccessfulUpload_ExamplesMainHooks() { local plugName='ExamplesMainHooks' local plugFunc='PostSuccessfulUpload_ExamplesMainHooks' - if [ "${DebugPluginsEnabled}" == "true" ]; then + if [[ "${DebugPluginsEnabled}" == "true" ]]; then echo -e "[${PINK}DEBUG${NC}]: Running ${PINK}$plugFunc${NC} in ${BLUE}$plugName${NC} ...${NC}" fi local pFilePath="$1" @@ -131,7 +131,7 @@ PostSuccessfulUpload_ExamplesMainHooks() { PostFailedUpload_ExamplesMainHooks() { local plugName='ExamplesMainHooks' local plugFunc='PostFailedUpload_ExamplesMainHooks' - if [ "${DebugPluginsEnabled}" == "true" ]; then + if [[ "${DebugPluginsEnabled}" == "true" ]]; then echo -e "[${PINK}DEBUG${NC}]: Running ${PINK}$plugFunc${NC} in ${BLUE}$plugName${NC} ...${NC}" fi local pFilePath="$1" @@ -143,7 +143,7 @@ PostFailedUpload_ExamplesMainHooks() { PostFailRetryUpload_ExamplesMainHooks() { local plugName='ExamplesMainHooks' local plugFunc='PostFailRetryUpload_ExamplesMainHooks' - if [ "${DebugPluginsEnabled}" == "true" ]; then + if [[ "${DebugPluginsEnabled}" == "true" ]]; then echo -e "[${PINK}DEBUG${NC}]: Running ${PINK}$plugFunc${NC} in ${BLUE}$plugName${NC} ...${NC}" fi local pFilePath="$1" @@ -155,7 +155,7 @@ PostFailRetryUpload_ExamplesMainHooks() { DoneProcessingAllUploads_ExamplesMainHooks() { local plugName='ExamplesMainHooks' local plugFunc='DoneProcessingAllUploads_ExamplesMainHooks' - if [ "${DebugPluginsEnabled}" == "true" ]; then + if [[ "${DebugPluginsEnabled}" == "true" ]]; then echo -e "[${PINK}DEBUG${NC}]: Running ${PINK}$plugFunc${NC} in ${BLUE}$plugName${NC} ...${NC}" fi local pInputFile="$1" diff --git a/plugins/ocr_captcha.sh b/plugins/ocr_captcha.sh index f2bc88a..f46c64a 100644 --- a/plugins/ocr_captcha.sh +++ b/plugins/ocr_captcha.sh @@ -1,6 +1,6 @@ #! Name: ocr_captcha.sh #! Author: kittykat -#! Version: 2025.01.14 +#! Version: 2025.02.18 #! Desc: Script to extract captcha from image using tesseract-ocr and imagemagick #! Usage: Edit LoadPlugin="" line in mad.sh or mad.config #! LoadPlugin="ocr_captcha.sh" @@ -38,13 +38,13 @@ CaptchaOcrImage() { local plugName='ocr_captcha' local plugFunc='CaptchaOcrImage' - if [ "${DebugPluginsEnabled}" == "true" ]; then + if [[ "${DebugPluginsEnabled}" == "true" ]]; then echo -e "[${PINK}DEBUG${NC}]: Running ${PINK}$plugFunc${NC} in ${BLUE}$plugName${NC} ...${NC}" fi DEPENDENCIES=(convert) for DEPENDENCY in ${DEPENDENCIES[@]} ; do - if [ -z $(which $DEPENDENCY) ] ; then - if [ "$DEPENDENCY" == "convert" ]; then + if [[ -z $(which $DEPENDENCY) ]] ; then + if [[ "$DEPENDENCY" == "convert" ]]; then echo "imagemagick not installed. Aborting" else echo "$DEPENDENCY not installed. Aborting" @@ -56,7 +56,7 @@ CaptchaOcrImage() { data_type="$2" imagemagick_extra_params="$3" local captcha_image_filename="${captcha_image_filepath##*/}" - if [ ! -f "$captcha_image_filepath" ]; then + if [[ ! -f "$captcha_image_filepath" ]]; then echo -e "Image not found." return 1 fi @@ -93,13 +93,13 @@ CaptchaOcrImage() { CaptchaOcrImageTesseract() { local plugName='ocr_captcha' local plugFunc='CaptchaOcrImageTesseract' - if [ "${DebugPluginsEnabled}" == "true" ]; then + if [[ "${DebugPluginsEnabled}" == "true" ]]; then echo -e "[${PINK}DEBUG${NC}]: Running ${PINK}$plugFunc${NC} in ${BLUE}$plugName${NC} ...${NC}" fi DEPENDENCIES=(tesseract convert) for DEPENDENCY in ${DEPENDENCIES[@]} ; do if [ -z $(which $DEPENDENCY) ] ; then - if [ "$DEPENDENCY" == "convert" ]; then + if [[ "$DEPENDENCY" == "convert" ]]; then echo "imagemagick not installed. Aborting" else echo "$DEPENDENCY not installed. Aborting" @@ -113,7 +113,7 @@ CaptchaOcrImageTesseract() { data_type="$2" imagemagick_extra_params="$3" local captcha_image_filename="${captcha_image_filepath##*/}" - if [ ! -f "$captcha_image_filepath" ]; then + if [[ ! -f "$captcha_image_filepath" ]]; then echo -e "Image not found." return 1 fi diff --git a/plugins/pjscloud.sh b/plugins/pjscloud.sh old mode 100755 new mode 100644 index 1db4a0d..c9d9e34 --- a/plugins/pjscloud.sh +++ b/plugins/pjscloud.sh @@ -1,6 +1,6 @@ #! Name: pjscloud.sh #! Author: kittykat -#! Version: 2024.10.14 +#! Version: 2025.02.18 #! Desc: Wrapper to use PhantomJSCloud to retrieve url response #! Usage: Edit LoadPlugin="" line in mad.sh or mad.config #! LoadPlugin="pjscloud.sh" @@ -27,21 +27,21 @@ pjscloud_tor_request() { local plugName='pjscloud' local plugFunc='pjscloud_tor_request' - if [ "${DebugPluginsEnabled}" == "true" ]; then + if [[ "${DebugPluginsEnabled}" == "true" ]]; then echo -e "[${PINK}DEBUG${NC}]: Running ${PINK}$plugFunc${NC} in ${BLUE}$plugName${NC} ...${NC}" fi pjs_targeturl="$1" pjs_data="$2" - if [ -z "$pjs_targeturl" ]; then + if [[ -z "$pjs_targeturl" ]]; then echo "Aborting: No target url specified." fi GetRandomPjsKey - if [ ! -z "$pjs_targeturl" ] && [ ! -z "$pjs_data" ] ; then + if [[ ! -z "$pjs_targeturl" ]] && [[ ! -z "$pjs_data" ]] ; then pjs_urldata='{"url":"'"$pjs_targeturl"'", urlSettings:{"operation":"POST", "data":"'"$pjs_data"'"}, "renderType":"plainText"}' else pjs_urldata='{"url":"'"$pjs_targeturl"'", "renderType":"plainText"}' fi - if [ "${UseTorCurlImpersonate}" == "true" ]; then + if [[ "${UseTorCurlImpersonate}" == "true" ]]; then response=$("${curl_impersonate[@]}" --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" \ --connect-timeout ${ConnectTimeout} \ --insecure -k -s \