# 2025.02.02 - [ranoz] Fix handling filenames containing cyrillic / kanji / latin chars # 2025.02.02 - [all] Reduced character processing for urlencode to special url characters # 2025.01.30 - [isupload] Add handling of 404 Not Found on initial page fetch # 2025.01.23 - [mad] Do not check for supported host on "direct=" lines # 2025.01.19 - [fileditch] Add direct download url processing fileditchfiles.me (though they block Tor now)
4670 lines
206 KiB
Bash
4670 lines
206 KiB
Bash
#!/bin/bash
|
|
|
|
# Copyright 2023-2025 kittykat (kittykat@morke.org) | (luvherpurr@torbox3uiot6wchz.onion)
|
|
#
|
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
# you may not use this file except in compliance with the License.
|
|
# You may obtain a copy of the License at: http://www.apache.org/licenses/LICENSE-2.0
|
|
#
|
|
# Unless required by applicable law or agreed to in writing, software distributed
|
|
# under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS
|
|
# OF ANY KIND, either express or implied. See the License for the specific language
|
|
# governing permissions and limitations under the License.
|
|
#
|
|
# * Some lines were taken and modified from the following scripts:
|
|
# - 1fichier.sh by SoupeAuLait@Rindexxx
|
|
# - download1f.sh by GotGap
|
|
# - hexlove.sh by AnonymousCapybara
|
|
# - 1flove.sh provided by Sweeties and the NK crew
|
|
# - Developers of ZTools for testing hosts and apis, and providing a helpful resource
|
|
#
|
|
# Special thanks for contributions and collaboration:
|
|
# * beautfar - code fixes, bad html detection, several areas of code updates, and ideas
|
|
# * MisterFL - unzip after completion code and ideas
|
|
# * stacktrough - click captcha workarounds and pixel websock code
|
|
# * oldfart - code suggestions, helpful feedback, clipmon implementation, WorkDir, fixes
|
|
# * ysDyx - code suggestions, alternative imagemagick ocr implemention
|
|
#
|
|
# Thanks for testing, feedback, bug reports, suggestions, and encouragement:
|
|
# * zaire, Rexmaxx, TinyPanties56, klonkerz, Stifflove, samiam22, peachbutler
|
|
#
|
|
# * Everyone who provided feedback and helped test.. and those who wish to remain anonymous
|
|
|
|
ScriptVersion=2025.02.02
|
|
#=================================================
|
|
# Recent Additions
|
|
# 2025.02.02 - [mad] Add function to handle urlencode of cyrillic / kanji / latin / etc
|
|
# 2025.02.02 - [ranoz] Fix handling filenames containing cyrillic / kanji / latin chars
|
|
# 2025.02.02 - [all] Reduced character processing for urlencode to special url characters
|
|
# 2025.01.30 - [isupload] Add handling of 404 Not Found on initial page fetch
|
|
# 2025.01.23 - [mad] Do not check for supported host on "direct=" lines
|
|
# 2025.01.19 - [fileditch] Add direct download url processing fileditchfiles.me (though they block Tor now)
|
|
# 2025.01.18 - [up_nantes] Update the post retention to "week" (host removed "month" option)
|
|
# 2025.01.18 - [mad] Updates to url_encode function and addition of conversion of utf8 to ascii function
|
|
# 2025.01.17 - [ranoz] Servers response to resume changed, set as no resume type for now
|
|
# 2025.01.17 - [uwabaki] Add download handling for uwabaki onion address urls
|
|
# 2025.01.16 - [ranoz] Fix filenames with unicode chars in the download url
|
|
# 2025.01.16 - [up_axfc] Move convert utf8 to ascii to mad function
|
|
# 2025.01.16 - [up_uwabaki] Add uwabaki.party as upload host (1GB, no expiration, no DMCA, no logs)
|
|
# 2025.01.14 - [gagneux / up_gagneux] Add fichier.gagneux.info as upload / download host
|
|
# 2025.01.14 - [uwabaki] Add uwabaki.party as download host
|
|
# 2025.01.14 - [fileblade] Additional retries and handling for blocked Tor ips (until alternative)
|
|
# 2025.01.13 - [ocr_captcha] Create imagemagick OCR function for testing without tesseract
|
|
# 2025.01.13 - [anonfile, dailyuploads] Update ocr call to use tesseract function
|
|
# 2025.01.13 - [up_anonfile] Modify to use new upload url
|
|
# 2025.01.12 - [ateasystems] Update 404 Not found response
|
|
# 2025.01.11 - [mad] Update direct head response handling
|
|
# 2025.01.11 - [ranoz] Add 404 Not found handling on head
|
|
# 2025.01.09 - [ranoz] Add handling of "NEXT_NOT_FOUND" response
|
|
# 2025.01.09 - [fileblade] Fix cdn url parsing
|
|
# 2025.01.08 - [up_pixeldrain] Fix success response from pixeldrain
|
|
# 2025.01.08 - [ramsgaard / up_ramsgaard] Add data.ramsgaard.me as upload / download host
|
|
# 2025.01.08 - [euromussels / up_euromussels] Add uploads.euromussels.eu as upload / download host
|
|
# 2025.01.07 - [up_fileland] Add fileland.io as upload host
|
|
# 2025.01.07 - [up_fireget] Add fireget.com as upload host
|
|
# 2025.01.06 - [uploadhive] Update the removed / gone response detection
|
|
# 2025.01.06 - [fileblade] Add "user does not allow free downloads over 100MB" response (and warnings)
|
|
# 2025.01.06 - [desiupload] Add desiupload as download host
|
|
# 2025.01.05 - [isupload] Fix filename detection
|
|
# 2024.01.03 - [gofile] Detect "Bulk download is a Premium feature" response (no children)
|
|
# 2025.01.02 - [up_axfc] Update PUT response check to handle kanji chars (remove)
|
|
# 2025.01.02 - [dashfile] Add response 'This file reached max downloads limit'. New cookie on captcha fail
|
|
# 2024.12.28 - [dashfile] Update captcha code check
|
|
# 2024.12.28 - [anonfile] Add new download link href response
|
|
# 2024.12.28 - [fileblade] Add additional response handling (subsequent downloads, unknown warnings)
|
|
# 2024.12.28 - [eternalhosting] Update eternal.onion to handle landing page (eternal.onion/file/)
|
|
# 2024.12.26 - [up_kouploader / up_axfc / up_torup] Fixed failedRetryUpload (was using download logging)
|
|
# * Thanks Belky
|
|
# 2024.12.26 - [anonfile / up_anonfile] Add anonfile.de as upload / download host
|
|
# 2024.12.25 - [dashfile / up_dashfile] Add dashfile.net as upload / download host
|
|
# 2024.12.25 - [isupload] Change to use tor_curl_request_extended (server response is often slow)
|
|
# - Accept 200 OK response to continue (do not require filename / filesize)
|
|
# 2024.12.25 - [fileblade] Add response handling for free file download dissallowed > 100MB.
|
|
# "The file owner does not allow FREE users to download files which are over 100 MB"
|
|
# 2024.12.25 - [mad] Add "UploadHiveRandomizeExt" option to config with default=true
|
|
# 2024.12.25 - [uploadhive / up_uploadhive] Update renaming random ext files to their original names
|
|
# - Handle multipart 7z & rar (abc.7z.###, abc.part#.rar)
|
|
# 2024.12.25 - [syspro / up_syspro] Add share.syspro.com.br as upload / download host
|
|
# 2024.12.24 - [mad] Add EnabledUploadHosts / EnabledDownloadHosts setting to fine-tune which hosts to use
|
|
# ** Options: recommended, online, all (Default=recommended)
|
|
# - recommended: loads all hosts verified working with MAD
|
|
# - online: loads all hosts available online / working (includes captcha / js restricted)
|
|
# - all: loads all hosts in hosts folder
|
|
# 2024.12.24 - [up_ateasystems / ateasystems] Add share.ateasystems.com as upload / download host
|
|
# 2024.12.23 - [up_uploadbay / uploadbay] Add uploadbay.net as upload / download host
|
|
# 2024.12.23 - [up_herbolistique / herbolistique] Add transfert.herbolistique.com as upload / download host
|
|
# 2024.12.23 - [uploadhive] Auto-rename random extension downloads
|
|
# 2024.12.23 - [up_uploadhive] Change upload file extension to random 3 letters (uhive blocks .7z, .zip, .rar now)
|
|
# 2024.12.23 - [up_offshorecat] Fixed upload. Updated apikey.
|
|
# 2024.12.23 - [up_fileditch] Fixed upload. Added response handling for Tor Blocked node (retries)
|
|
# 2024.12.23 - [up_freesocial / freesocial] Add files.freesocial.co as upload / download host
|
|
# 2024.12.23 - [up_cyssoux / cyssoux] Add partage.cyssoux.fr as upload / download host
|
|
# 2024.12.22 - [mad] Add jira_Upload function -- used for all jirafeau hosts
|
|
# 2024.12.22 - [up_*AllJiraHosts*] Consolidated / moved all hosts upload functions to mad.sh
|
|
# - Minimized jira host code (~6000 lines of duplicates removed)
|
|
# - Jira hosts: acid, anarchaserver, depotkaz, dictvm, eddowding, familleflender, filesquid,
|
|
# free4e, harrault, linxx, moocloud, nantes, netlib, skrepr, soyjak
|
|
# 2024.12.20 - [fileblade / up_fileblade] Add fileblade.com as upload / download host
|
|
# 2024.12.20 - [isupload / up_isupload] Add isupload.com as upload / download host
|
|
# 2024.12.15 - [mediafire] Add mediafire download link processing
|
|
|
|
# -- See ./documentation/!Changelog (Historical).txt for further changes -- #
|
|
|
|
|
|
#=================================================
|
|
# SCRIPT GLOBALS Section
|
|
#-------------------
|
|
|
|
# TorIp (local network ip tor traffic is routed through)
|
|
# @Default=127.0.0.1
|
|
TorIp=127.0.0.1
|
|
|
|
# UseTorCurlImpersonate: This is helpful to circumvent the Cloudflare script protection of hexload, filedot, and other hosts.
|
|
# If "true", the curl-impersonate dependencies are required to be in the script directory [curl-impersonate-ff, curl_ff109].
|
|
# Instructions on downloading them are displayed if ran without them and this is set to "true".
|
|
# if "false", the normal curl is used.
|
|
# ** Both route through Tor and are safe
|
|
# @Default=false
|
|
UseTorCurlImpersonate=false
|
|
|
|
# Overrides the location for all the working files (urls.txt, downloads, completed, _temp, _flocks, _debug)
|
|
# * The mad.sh script can reside anywhere (optional mad.config must be in the same folder if being used)
|
|
# * The WorkDirOverride path is where everything else will reside
|
|
# @Default= <blank> (no override / use the same folder as the mad.sh script)
|
|
WorkDirOverride=""
|
|
|
|
# VerboseLoading: Determines Verbosity of display for Hosts / Plugins loading
|
|
# If "true" all hosts and plugins will be displayed as loading
|
|
# If "false" or blank, only "FAIL" status messages are shown
|
|
# @Default=true
|
|
VerboseLoading=true
|
|
|
|
# [RateMonitor]: Enable Download / Upload Speed Rate Monitor that restarts downloads / uploads
|
|
# if they are < X bytes for X seconds.
|
|
# DownloadSpeedMin / DownloadTimeoutInterval / UploadSpeedMin / UploadTimeoutInterval specified below
|
|
# @Default=true
|
|
RateMonitorEnabled=true
|
|
|
|
# Plugin System: This allows writing a script file that will be loaded / overlaid upon launch, allowing users to
|
|
# add functions and to override (hook) functions already in mad.sh with additional functionality
|
|
# (ie. unzip after successful download, skip urls that are already in completed.txt)
|
|
## Additional Hook Functions added for easy of overriding:
|
|
# * OnLoad(): Occurs after load mad.config / load plugins (prior to processing).
|
|
# * BeginProcessing(): Occurs immediately after beginning processin of urls.txt (loops with Catnaps).
|
|
# * PreProcessUrl(): occurs immediately after reading in an unprocessed url (^http) line to process.
|
|
# * PostSuccessfulDownload(): occurs after a download success (is marked #OK# in the urls.txt).
|
|
# * PostFailedDownload(): occurs after a download fails (is marked #FAIL# in the urls.txt).
|
|
# * PostFailRetryDownload(): occurs after a download fails with a retry (is marked #RETRY# in the urls.txt).
|
|
# * DoneProcessingAllUrls(): occurs after all the urls have finished processing (no flocks or other terms downloading).
|
|
# * PostSuccessfulUpload(): occurs after an upload success (after upload completed ticket is created in ./completed/).
|
|
# * PostFailedUpload(): occurs after an upload fails definitively -- #FAIL# in the uploads_processed.txt
|
|
# * PostFailRetryUpload(): occurs after an upload fails with a retry (network drop, unexpected result)
|
|
# * DoneProcessingAllUploads: occurs after alll the files have finished processing
|
|
# @Default=""
|
|
# @Recommended="pjscloud.sh,ocr_captcha.sh"
|
|
# @MyCurrent="pjscloud.sh,ocr_captcha.sh,SkipUrlsInDownloadsCompletedTxt.sh"
|
|
LoadPlugins=""
|
|
|
|
# Enabled Hosts: (Upload / Download) [ "recommended", "online", "all" ]
|
|
# -- Last Checked / Updated: 2024.12.23
|
|
# -- Available options --
|
|
# * "recommended" -- Loads hosts that currently work with MAD
|
|
# * "online" -- Loads hosts that are available online (includes captcha / js restricted)
|
|
# * "all" -- Loads all hosts (blank/unknown is also "all")
|
|
# @Default="recommended" (only load hosts that are verified working with MAD)
|
|
EnabledUploadHosts="recommended"
|
|
EnabledDownloadHosts="recommended"
|
|
|
|
#=================================================
|
|
# UPLOAD SECTION
|
|
#-------------------
|
|
|
|
# Upload file retries per host when uploading
|
|
# @Default=4
|
|
MaxUploadRetries=4
|
|
|
|
# Selected upload hosts
|
|
# @Default=1f,uhive,oshi
|
|
DefaultUploadHosts='1f,oshi,gofile'
|
|
|
|
# [RateMonitor - UploadSpeedMin]: Minimum required Upload Speed in bytes (used in coordination with UploadTimeoutInterval)
|
|
# This helps ensure an upload doesn't go stale and hit a speed of 0 for too long. (! Requires RateMonitorEnabled=true)
|
|
# ie. curl: (28) Operation too slow. Less than 10 bytes/sec transferred the last 60 seconds
|
|
# @Default=10
|
|
UploadSpeedMin=10
|
|
|
|
# [RateMonitor - UploadTimeoutInterval]: Amount of time in seconds a transfer can remain below the UploadSpeedMin before it will timeout.
|
|
# This helps ensure an upload doesn't go stale and hit a speed of 0 for too long. (! Requires RateMonitorEnabled=true)
|
|
# ie. curl: (28) Operation too slow. Less than 5000 bytes/sec transferred the last 60 seconds
|
|
# @Default=300 (5 min)
|
|
UploadTimeoutInterval=300
|
|
|
|
|
|
#=================================================
|
|
# DOWNLOAD SECTION
|
|
#-------------------
|
|
|
|
# Number of 1F circuit retries per instance to get an empty slot (# of CRs x 10 instances)
|
|
# @Default=100
|
|
CircuitRetries=100
|
|
|
|
# Connection attempt timeout (tor_curl/tor_curl_impersonate)
|
|
# @Default=18
|
|
ConnectTimeout=18
|
|
|
|
# Url processing retries (through drops, bad nodes, captchas, unrecognized error codes, disconnects, etc.)
|
|
# This is a complete restart failure such as failure to fetch info or pre-download url info.
|
|
# @Default=15
|
|
MaxUrlRetries=15
|
|
|
|
# Download file retries (through drops, bad nodes, captchas, unrecognized error codes, disconnects, etc.)
|
|
# This is a quick restart of a failed download (we have cdn or download url and file info).
|
|
# @Default=8
|
|
MaxDownloadRetries=8
|
|
|
|
# [RateMonitor - DownloadSpeedMin]: Minimum required Download Speed in bytes (used in coordination with DownloadTimeoutInterval)
|
|
# This helps ensure a download doesn't go stale and hit a speed of 0 for too long. (! Requires RateMonitorEnabled=true)
|
|
# ie. curl: (28) Operation too slow. Less than 5000 bytes/sec transferred the last 60 seconds
|
|
# @Default=5000
|
|
DownloadSpeedMin=5000
|
|
|
|
# [RateMonitor - DownloadTimeoutInterval]: Amount of time in seconds a transfer can remain below the DownloadSpeedMin before it will timeout.
|
|
# This helps ensure a download doesn't go stale and hit a speed of 0 for too long. (! Requires RateMonitorEnabled=true)
|
|
# ie. curl: (28) Operation too slow. Less than 5000 bytes/sec transferred the last 60 seconds
|
|
# @Default=60
|
|
DownloadTimeoutInterval=60
|
|
|
|
# Minimum download size in bytes: Require hosts to report > x bytes [0=disable]
|
|
# @Default=100 bytes
|
|
MinimumAllowedFilesize=100
|
|
|
|
# Automatically repair partial downloads with bad data (bad node, too many connections from IP, blocked, etc.)
|
|
# True: Allows resuming through a bad node.
|
|
# False: Restarts from the beginning on a bad node.
|
|
# @Default=true
|
|
AutoRepairBadPartials=true
|
|
|
|
# Auto rename duplicate filenames being downloaded
|
|
# This will prepend the filename with a random string ie. MyFile.rar --> 20240801124552305_renamed_MyFile.rar
|
|
# @Default=false
|
|
AutoRenameDuplicateFilenames=false
|
|
|
|
# Loop through urls.txt until all urls are processed (# commented out..)
|
|
# - Retry skipped, duplicate files from different hosts with same name, failures, etc.
|
|
# @Default=true
|
|
LoopThroughFileUntilComplete=true
|
|
|
|
# Reload the urls.txt if it changes
|
|
# @Default=true
|
|
AutoReloadOnFilelistTxtChanges=true
|
|
|
|
# Auto Comment out Success / Failed Downloads in urls.txt ("url" --> "#url #STATUS# filename/message")
|
|
# @Default=true
|
|
AutoCommentOnCompletion=true
|
|
|
|
|
|
#=================================================
|
|
# SCRIPT EXTRAS SECTION
|
|
#-------------------
|
|
|
|
# [OsType]: Used to determine how to launch terminals (multi arguments) [exo-launcher, xterm, gnome-terminal]
|
|
# Whonix-exo
|
|
# Linux-xterm
|
|
# Linux-gnome
|
|
# @Default=Linux-exo
|
|
OsType="Whonix-exo"
|
|
|
|
# Auto show "Mad Status" after complete
|
|
# @Default=true
|
|
AutoShowMadStatus=true
|
|
|
|
# Clear the console on urls.txt changes
|
|
# @Default=false
|
|
ClearScreenOnAutoReload=false
|
|
|
|
# Time to wait catnapping while waiting for urls to process or the list to complete (in minutes)
|
|
# @Default=1
|
|
CatnapDuration=1
|
|
|
|
|
|
#=================================================
|
|
# HOST SPECIFIC SECTION
|
|
#-------------------
|
|
|
|
# [uploadhive]: Randomize extension (bypass 7z, zip, tar block)
|
|
# [{"file_code":"undef","file_status":"unallowed extension"}]
|
|
UploadHiveRandomizeExt=true
|
|
|
|
# [Oshi]: Control BaseUrl Override (none, oshiat, oshionion)
|
|
# none: Will download from whatever url base is passed in
|
|
# oshiat: Will convert all oshi urls to oshi.at (clearnet, faster)
|
|
# oshionion: Will convert all oshi urls to oshi.onion (onion, slower)
|
|
# @Default=oshiat
|
|
OshiBaseUrlOverride="oshiat"
|
|
|
|
# [PixelDrain]: Use bypass method (may not be as fast, and with ViewPump enabled, not necessary)
|
|
# @Default=false
|
|
UsePixeldrainBypass=false
|
|
|
|
# [FileDot]: Enable processing of filedot urls (must add account details below)
|
|
# @Default=false
|
|
EnableFiledotProcessing=false
|
|
|
|
# [FileDot - Filedot User / Pass list]
|
|
# ! If you wish to use filedot processing, you must create an account or accounts and enter the user/pass details below.
|
|
# - Setup free accounts on the signup page (use any email address as they are not verified). Add the account information
|
|
# below and remove/add any lines for accounts.
|
|
# - The accounts are randomly selected for every download (each account has a 5GB/day download limit).
|
|
ar_fdUP[0]="user1|pass1"
|
|
#ar_fdUP[1]="user2|pass2" # Uncomment line to use a 2nd account
|
|
#ar_fdUP[2]="user3|pass3" # Uncomment line to use a 3rd account
|
|
#ar_fdUP[3]="user4|pass4" # Uncomment line to use a 4th account
|
|
#ar_fdUP[4]="user5|pass5" # Uncomment line to use a 5th account
|
|
|
|
# [OffShore.cat Upload ApiKeys]
|
|
# ! If you wish to use OffShore.cat uploading, you must create an account and get an apikey (or use the general public
|
|
# one shared here.
|
|
# - Setup free accounts: https://files.offshore.cat/register (use any username/pass - not verified)
|
|
# - Get apikey: https://files.offshore.cat/dashboard/account (use login created above)
|
|
# - The accounts are randomly selected for every download.
|
|
ar_oscKey[0]='4GDsorzK4e1yowrCiZaBnS992uKjiZVnXbByJr0kHmaAxarP26LkRV79MbKACXt0' # Shared general
|
|
#ar_oscKey[1]='apikey' # Uncomment line to use a 2nd
|
|
#ar_oscKey[2]='apikey' # Uncomment line to use a 3rd
|
|
#ar_oscKey[3]='apikey' # Uncomment line to use a 4th
|
|
#ar_oscKey[4]='apikey' # Uncomment line to use a 5th
|
|
|
|
# [pixeldrain.com Upload ApiKeys]
|
|
# ! If you wish to use pixeldrain uploading, you must create an account and get an apikey (or use the general public
|
|
# one shared here.
|
|
# - Setup free accounts:https://pixeldrain.com/register (use any username/pass - not verified)
|
|
# - Get apikey: https://pixeldrain.com/user/api_keys (use login created above)
|
|
# - The accounts are randomly selected for every download.
|
|
ar_pdKey[0]='cad31e7f-676d-4d47-a41b-b32087bee0c2' # Shared general
|
|
#ar_pdKey[1]='apikey' # Uncomment line to use a 2nd
|
|
#ar_pdKey[2]='apikey' # Uncomment line to use a 3rd
|
|
#ar_pdKey[3]='apikey' # Uncomment line to use a 4th
|
|
#ar_pdKey[4]='apikey' # Uncomment line to use a 5th
|
|
|
|
# [PhantomJS Keys]: pjscloud.sh plugin
|
|
# ! Required for pixeldrain ViewPump and pjscloud functionality (enable / disable each in the pjscloud.sh file)
|
|
# * Optional: you can create addtional / personal accounts and enter the apikey details below to ensure api resources.
|
|
# - Setup free accounts on the signup page (use a temporary email, and connect from proxy or VPN).
|
|
# below and remove/add any lines for apikeys.
|
|
# - The keys are randomly selected for every download (each account has a 500 requests/day limit for free accounts).
|
|
ar_pgsKey[0]='ak-shp9s-6zqr2-d30tt-9h64j-a0zkz'
|
|
ar_pgsKey[1]='ak-2dxy6-mjhk5-ypk2m-dtgvj-4vvbb' # Uncomment line to use a 2nd key
|
|
ar_pgsKey[2]='ak-akvyb-b0kpd-yrxp0-3wnkg-qjwb8' # Uncomment line to use a 3rd key
|
|
ar_pgsKey[3]='ak-z5y5h-s5kpr-kkxpz-rnpxw-xpet2' # Uncomment line to use a 4th key
|
|
ar_pgsKey[4]='ak-ygp2m-rhp5v-znjt2-aq9x9-pz3j9' # Uncomment line to use a 5th key
|
|
ar_pgsKey[5]='ak-fgdhj-j6fad-ap0j1-ta529-v5gwh' # Uncomment line to use a 6th key
|
|
ar_pgsKey[6]='ak-x2ng1-cr476-k4bph-ae8ks-9eg45' # Uncomment line to use a 7th key
|
|
ar_pgsKey[7]='ak-s6k8z-wb6fz-dgb37-j268v-mgspe' # Uncomment line to use a 8th key
|
|
ar_pgsKey[8]='ak-msdn7-vs5jr-4kknq-3qgw7-grj57' # Uncomment line to use a 9th key
|
|
ar_pgsKey[9]='ak-77pgx-g1ge9-twmhy-em51a-p8p53' # Uncomment line to use a 10th key
|
|
|
|
# Global pjscloud enabled hosts
|
|
PJSCloud_pixeldrain=true # Enables pixeldrain ViewPump (pjscloud.sh plugin required)
|
|
PJSCloud_hexload=false # Enables hexload download2 javascript response handling (WIP)
|
|
PJSCloud_daily=false # Enables dailyuploads recaptcha javascript response handling (WIP)
|
|
|
|
|
|
#=================================================
|
|
# DEBUGGING
|
|
#-------------------
|
|
|
|
# DebugAllEnabled: Debug host response output to ./_debug folder
|
|
# @Default=false
|
|
DebugAllEnabled=false
|
|
|
|
# DebugPluginsEnabled: Debug plugin hook entry and flow to screen
|
|
# @Default=false
|
|
DebugPluginsEnabled=false
|
|
|
|
|
|
#=================================================
|
|
# Other Script Globals (don't change)
|
|
#-------------------------------------------------
|
|
ScriptDir="$( cd "$( dirname "$(realpath "$0")" )" && pwd )"
|
|
ScriptPid=$$
|
|
WorkDir=${ScriptDir}
|
|
ListHostAndDomainRegexes=""
|
|
ListUploadHosts=""
|
|
HashFilelistTxt=""
|
|
MoveToFolder=""
|
|
#! Global var for passing Key/Value pairs to plugins / functions (accessible in functions)
|
|
#! Can add any #key=value in the urls.txt file and then access them in the $UrlsVars variable
|
|
#! UrlsVars[$key]="$val" ie. ${UrlsVars[pw]} would be the current line #pw=value
|
|
declare -A UrlsVars
|
|
InputFile=""
|
|
warnAndRetryUnknownError=false
|
|
exitDownloadNotAvailable=false
|
|
fileAlreadyDone=false
|
|
file_id=""
|
|
file_url=""
|
|
filename=""
|
|
filename_override=""
|
|
file_path=""
|
|
download_url=""
|
|
listFdotLimitReached=""
|
|
checkTor() {
|
|
local torPort=
|
|
for port in 9050 9150 ; do
|
|
echo "" 2>/dev/null > /dev/tcp/${TorIp}/$port
|
|
if [ "$?" == "0" ] ; then
|
|
torPort=$port
|
|
fi
|
|
done
|
|
printf "%d" $torPort
|
|
}
|
|
tor_curl_request() {
|
|
if [ "${UseTorCurlImpersonate}" == "true" ]; then
|
|
"${curl_impersonate[@]}" --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" -4 --connect-timeout ${ConnectTimeout} --compressed --globoff "$@"
|
|
else
|
|
curl --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" -4 --connect-timeout ${ConnectTimeout} --compressed --globoff "$@"
|
|
fi
|
|
}
|
|
tor_curl_request_extended() {
|
|
randomtimeout=$((30 + RANDOM % (60 - 30)))
|
|
if [ "${UseTorCurlImpersonate}" == "true" ]; then
|
|
"${curl_impersonate[@]}" --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" -4 --connect-timeout $randomtimeout --compressed --globoff "$@"
|
|
else
|
|
curl --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" -4 --connect-timeout $randomtimeout --compressed --globoff "$@"
|
|
fi
|
|
}
|
|
tor_curl_upload() {
|
|
if [ "${UseTorCurlImpersonate}" == "true" ]; then
|
|
if [ "${RateMonitorEnabled}" == "true" ]; then
|
|
"${curl_impersonate[@]}" --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" -4 --connect-timeout ${ConnectTimeout} --speed-limit $UploadSpeedMin --speed-time $UploadTimeoutInterval --compressed --globoff "$@"
|
|
else
|
|
"${curl_impersonate[@]}" --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" -4 --connect-timeout ${ConnectTimeout} --compressed --globoff "$@"
|
|
fi
|
|
else
|
|
if [ "${RateMonitorEnabled}" == "true" ]; then
|
|
curl --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" -4 --connect-timeout ${ConnectTimeout} --speed-limit $UploadSpeedMin --speed-time $UploadTimeoutInterval -H 'User-Agent: Mozilla/5.0 (Windows NT 10.0; rv:109.0) Gecko/20100101 Firefox/115.0' -H 'Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8' -H 'Accept-Language: en-US,en;q=0.5' -H 'Accept-Encoding: gzip, deflate, br' -H 'Origin: null' -H 'Connection: keep-alive' -H 'Upgrade-Insecure-Requests: 1' -H 'Sec-Fetch-Dest: document' -H 'Sec-Fetch-Mode: navigate' -H 'Sec-Fetch-Site: cross-site' -H 'Sec-Fetch-User: ?1' -H 'TE: trailers' --compressed --globoff "$@"
|
|
else
|
|
curl --proxy "socks5h://${tor_identity}@${TorIp}:${torPort}" -4 --connect-timeout ${ConnectTimeout} -H 'User-Agent: Mozilla/5.0 (Windows NT 10.0; rv:109.0) Gecko/20100101 Firefox/115.0' -H 'Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8' -H 'Accept-Language: en-US,en;q=0.5' -H 'Accept-Encoding: gzip, deflate, br' -H 'Origin: null' -H 'Connection: keep-alive' -H 'Upgrade-Insecure-Requests: 1' -H 'Sec-Fetch-Dest: document' -H 'Sec-Fetch-Mode: navigate' -H 'Sec-Fetch-Site: cross-site' -H 'Sec-Fetch-User: ?1' -H 'TE: trailers' --compressed --globoff "$@"
|
|
fi
|
|
fi
|
|
}
|
|
SetEnabledUploadHosts() {
|
|
if [[ "$EnabledUploadHosts" == "recommended" ]] ; then
|
|
lstEnabledUploadHosts="up_1fichier,up_anonsharing,up_axfc,up_bowfile,up_depotkaz,up_familleflender,"
|
|
lstEnabledUploadHosts+="up_fileblade,up_fileditch,up_firestorage,up_free4e,up_gofile,up_harrault,"
|
|
lstEnabledUploadHosts+="up_isupload,up_kouploader,up_moocloud,up_nantes,up_offshorecat,up_oshi,"
|
|
lstEnabledUploadHosts+="up_pixeldrain,up_quax,up_ranoz,up_skrepr,up_torup,up_turboonion,up_uploadee,"
|
|
lstEnabledUploadHosts+="up_uploadhive,up_uploadraja,up_herbolistique,up_uploadbay,up_ateasystems,up_syspro,"
|
|
lstEnabledUploadHosts+="up_dashfile,up_anonfile,up_fileland,up_fireget,up_euromussels,up_ramsgaard,"
|
|
lstEnabledUploadHosts+="up_gagneux,up_uwabaki"
|
|
elif [[ "$EnabledUploadHosts" == "online" ]] ; then
|
|
lstEnabledUploadHosts="up_1fichier,up_anonsharing,up_axfc,up_bedrive,up_bowfile,up_depotkaz,"
|
|
lstEnabledUploadHosts+="up_familleflender,up_fileblade,up_fileditch,up_firestorage,up_free4e,up_gofile,"
|
|
lstEnabledUploadHosts+="up_harrault,up_hexload,up_isupload,up_kouploader,up_kraken,up_moocloud,up_nantes,"
|
|
lstEnabledUploadHosts+="up_nippy,up_nofile,up_offshorecat,up_oshi,up_pixeldrain,up_quax,up_ranoz,"
|
|
lstEnabledUploadHosts+="up_shareonline,up_skrepr,up_torup,up_turboonion,up_uploadee,up_uploadhive,"
|
|
lstEnabledUploadHosts+="up_uploadraja,up_yolobit,up_herbolistique,up_uploadbay,up_ateasystems,up_syspro,"
|
|
lstEnabledUploadHosts+="up_dashfile,up_anonfile,up_fileland,up_fireget,up_euromussels,up_ramsgaard,"
|
|
lstEnabledUploadHosts+="up_gagneux,up_uwabaki"
|
|
fi
|
|
}
|
|
SetEnabledDownloadHosts() {
|
|
if [[ "$EnabledDownloadHosts" == "recommended" ]] ; then
|
|
lstEnabledDownloadHosts="1fichier,acid,anarchaserver,anonsharing,biteblob,bowfile,cyssoux,dataupload,"
|
|
lstEnabledDownloadHosts+="depotkaz,dictvm,downloadgg,eddowding,eternalhosting,familleflender,fileblade,"
|
|
lstEnabledDownloadHosts+="fileditch,filedoge,filedot,filehaus,filesquid,firestorage,free4e,freesocial,"
|
|
lstEnabledDownloadHosts+="gofile,harrault,innocent,isupload,lainsafe,lainsafe_onion,linxx,mediafire,"
|
|
lstEnabledDownloadHosts+="moocloud,nantes,netlib,offshorecat,oshi,pixeldrain,quax,ranoz,skrepr,"
|
|
lstEnabledDownloadHosts+="tempfileme,tempsh,torup,turboonion,up2share,uploadee,uploadev,uploadhive,"
|
|
lstEnabledDownloadHosts+="youdbox,herbolistique,uploadbay,ateasystems,syspro,dashfile,anonfile,desiupload,"
|
|
lstEnabledDownloadHosts+="fileland,fireget,euromussels,ramsgaard,uwabaki,gagneux"
|
|
elif [[ "$EnabledDownloadHosts" == "online" ]] ; then
|
|
lstEnabledDownloadHosts="1fichier,anonsharing,bedrive,biteblob,bowfile,click,cyssoux,"
|
|
lstEnabledDownloadHosts+="dailyuploads,dataupload,depotkaz,dictvm,dosya,downloadgg,eddowding,eternalhosting,"
|
|
lstEnabledDownloadHosts+="familleflender,fileblade,fileditch,filedoge,filedot,firestorage,"
|
|
lstEnabledDownloadHosts+="free4e,gofile,harrault,hexload,isupload,kraken,lainsafe,"
|
|
lstEnabledDownloadHosts+="lainsafe_onion,mediafire,moocloud,nantes,netlib,nippy,nofile,offshorecat,"
|
|
lstEnabledDownloadHosts+="oshi,pixeldrain,quax,ranoz,shareonline,skrepr,tempfileme,tempsh,torup,"
|
|
lstEnabledDownloadHosts+="turboonion,up2share,uploadee,uploadev,uploadhive,yolobit,youdbox,herbolistique,"
|
|
lstEnabledDownloadHosts+="uploadbay,ateasystems,syspro,dashfile,anonfile,desiupload,fileland,fireget,"
|
|
lstEnabledDownloadHosts+="euromussels,ramsgaard,uwabaki,gagneux"
|
|
fi
|
|
}
|
|
GetRandomFiledotUser() {
|
|
arrSize=${#ar_fdUP[@]}
|
|
index=$(($RANDOM % $arrSize))
|
|
RandomFdotUser=${ar_fdUP[$index]}
|
|
local tFdotUser=${RandomFdotUser%%\|*}
|
|
if [ -f "${WorkDir}/.temp/_fdot-limitreached-accounts.txt" ]; then
|
|
lastModSeconds=$(date -r "${WorkDir}/.temp/_fdot-limitreached-accounts.txt" +%s)
|
|
currSeconds=$(date +%s)
|
|
elapsedSeconds=$((currSeconds - lastModSeconds))
|
|
elapsedHours=$((elapsedSeconds / 60 / 60))
|
|
if ((elapsedHours >= 24)); then
|
|
rm -f "${WorkDir}/.temp/_fdot-limitreached-accounts.txt"
|
|
fi
|
|
fi
|
|
if [ -f "${WorkDir}/.temp/_fdot-limitreached-accounts.txt" ]; then
|
|
listFdotLimitReached=$(cat "${WorkDir}/.temp/_fdot-limitreached-accounts.txt")
|
|
else
|
|
listFdotLimitReached=""
|
|
fi
|
|
maxAccCheckCnt=0
|
|
while [[ "${listFdotLimitReached}" == *"${tFdotUser}|"* ]]; do
|
|
maxAccCheckCnt=$((maxAccCheckCnt + 1))
|
|
index=$(($RANDOM % $arrSize))
|
|
RandomFdotUser=${ar_fdUP[$index]}
|
|
tFdotUser=${RandomFdotUser%%\|*}
|
|
if (( maxAccCheckCnt >= 1000 )); then
|
|
RandomFdotUser="#ALLACCOUNTSMAXED#|##"
|
|
break
|
|
fi
|
|
done
|
|
}
|
|
mconvert_utf8_to_ascii() {
|
|
local response_ascii=$(echo "$1" | iconv -c -f UTF-8 -t ASCII//TRANSLIT)
|
|
printf "%s" "$response_ascii"
|
|
}
|
|
urlencode_literal_grouped_case() {
|
|
local inputCleaned=$(echo -en "$@")
|
|
local out=$(echo "$inputCleaned" \
|
|
| sed \
|
|
-e 's/%/%25/g' \
|
|
-e 's/ /%20/g' \
|
|
-e 's/!/%21/g' \
|
|
-e 's/"/%22/g' \
|
|
-e "s/'/%27/g" \
|
|
-e 's/#/%23/g' \
|
|
-e 's/(/%28/g' \
|
|
-e 's/)/%29/g' \
|
|
-e 's/+/%2b/g' \
|
|
-e 's/,/%2c/g' \
|
|
-e 's/-/%2d/g' \
|
|
-e 's/:/%3a/g' \
|
|
-e 's/;/%3b/g' \
|
|
-e 's/?/%3f/g' \
|
|
-e 's/@/%40/g' \
|
|
-e 's/\$/%24/g' \
|
|
-e 's/\&/%26/g' \
|
|
-e 's/\*/%2a/g' \
|
|
-e 's/\./%2e/g' \
|
|
-e 's/\//%2f/g' \
|
|
-e 's/\[/%5b/g' \
|
|
-e 's/\\/%5c/g' \
|
|
-e 's/\]/%5d/g' \
|
|
-e 's/\^/%5e/g' \
|
|
-e 's/_/%5f/g' \
|
|
-e 's/`/%60/g' \
|
|
-e 's/{/%7b/g' \
|
|
-e 's/|/%7c/g' \
|
|
-e 's/}/%7d/g' \
|
|
-e 's/~/%7e/g'
|
|
)
|
|
echo $out
|
|
}
|
|
urlencode_literal_grouped_case_advanced() {
|
|
local inputCleaned=$(echo -en "$1")
|
|
string=$inputCleaned; format=; set --
|
|
while
|
|
literal=${string%%[!-._~0-9A-Za-z]*}
|
|
case "$literal" in
|
|
?*)
|
|
format=$format%s
|
|
set -- "$@" "$literal"
|
|
string=${string#$literal};;
|
|
esac
|
|
case "$string" in
|
|
"") false;;
|
|
esac
|
|
do
|
|
tail=${string#?}
|
|
head=${string%$tail}
|
|
format=$format%%%02x
|
|
set -- "$@" "'$head"
|
|
string=$tail
|
|
done
|
|
printf "$format\\n" "$@"
|
|
}
|
|
urlencode_literal_grouped_case_urlendingonly() {
|
|
echo "${1%/*}""/""$(urlencode_literal_grouped_case ${1##*/})"
|
|
}
|
|
urlencode_literal_grouped_case_advanced_urlendingonly() {
|
|
echo "${1%/*}""/""$(urlencode_literal_grouped_case_advanced ${1##*/})"
|
|
}
|
|
urldecode() {
|
|
: "${*//+/ }"; echo -e "${_//%/\\x}";
|
|
}
|
|
urlencode_spaces() {
|
|
echo "${1// /%20}"
|
|
}
|
|
sanitize_file_or_folder_name() {
|
|
local var="$*"
|
|
var="${var//\%20/ }"
|
|
var="${var//[\*\"\/\\\<\>\:\|\?$'\t\r\n']/_}"
|
|
var=$(trim_string $var)
|
|
echo $var
|
|
}
|
|
sanitize_html_tags() {
|
|
local var="$*"
|
|
var=$(sed -e 's/<[^>]*>//g' <<< "${var}")
|
|
echo $var
|
|
}
|
|
trim_string() {
|
|
local var="$*"
|
|
while [[ $var == ' '* ]]; do
|
|
var="${var## }"
|
|
done
|
|
while [[ $var == *' ' ]]; do
|
|
var="${var%% }"
|
|
done
|
|
echo "$var"
|
|
}
|
|
literalize_string() {
|
|
lit_string=$1
|
|
lit_string=${lit_string//\&/\\\&}
|
|
lit_string=${lit_string//\|/\\\|}
|
|
lit_string=${lit_string//\[/\\\[}
|
|
lit_string=${lit_string//\]/\\\]}
|
|
lit_string="${lit_string//[$'\t\r\n']}"
|
|
echo "${lit_string}"
|
|
}
|
|
GetFileSize() {
|
|
local filepath=$1
|
|
local humanreadable=$2
|
|
if [ -f "$filepath" ]; then
|
|
if [[ "$humanreadable" == "true" ]]; then
|
|
echo $(wc -c < "$filepath" | numfmt --to=iec)
|
|
else
|
|
echo $(wc -c < "$filepath")
|
|
fi
|
|
else
|
|
echo 0
|
|
fi
|
|
}
|
|
TrimWhitespace() {
|
|
echo $(sed 's/ *$//' <<< "$1")
|
|
}
|
|
CleanInputFile() {
|
|
sed -i 's/[ \t]*$//' $1
|
|
}
|
|
LaunchTerminal() {
|
|
selhost=$1
|
|
selinfile=$2
|
|
script_source="$0"
|
|
if ! grep -Eqi "^(./|/)" <<< "$script_source" ; then
|
|
script_source="${ScriptDir}/$script_source"
|
|
fi
|
|
if [ -f $script_source ] ; then
|
|
printf "Spawing terminal for $script_source $selhost $selinfile\\n"
|
|
if [ "$OsType" == "Whonix-exo" ]; then
|
|
if [ "$selhost" == "allhosts" ]; then
|
|
exo-open --launch TerminalEmulator bash -c "$script_source $selinfile" >/dev/null 2>&1
|
|
else
|
|
exo-open --launch TerminalEmulator bash -c "$script_source $selhost $selinfile" >/dev/null 2>&1
|
|
fi
|
|
elif [ "$OsType" == "Linux-xterm" ]; then
|
|
if [ "$selhost" == "allhosts" ]; then
|
|
xterm -e /bin/bash -c "$script_source $selinfile" >/dev/null 2>&1
|
|
else
|
|
xterm -e /bin/bash -c "$script_source $selhost $selinfile" >/dev/null 2>&1
|
|
fi
|
|
elif [ "$OsType" == "Linux-gnome" ]; then
|
|
if [ "$selhost" == "allhosts" ]; then
|
|
gnome-terminal -- /bin/bash -c "$script_source $selinfile" >/dev/null 2>&1
|
|
else
|
|
gnome-terminal -- /bin/bash -c "$script_source $selhost $selinfile" >/dev/null 2>&1
|
|
fi
|
|
else
|
|
if [ "$selhost" == "allhosts" ]; then
|
|
exo-open --launch TerminalEmulator /bin/bash -c "$script_source $selinfile" >/dev/null 2>&1
|
|
else
|
|
exo-open --launch TerminalEmulator /bin/bash -c "$script_source $selhost $selinfile" >/dev/null 2>&1
|
|
fi
|
|
fi
|
|
fi
|
|
}
|
|
ReloadScript() {
|
|
script_source="$0"
|
|
passedArgs=$@
|
|
if [ -z "$passedArgs" ] && [ ! -f ${InputFile} ] ; then
|
|
InputFile="${WorkDir}/${InputFile}"
|
|
fi
|
|
if ! grep -Eqi "^(./|/)" <<< "$script_source"; then
|
|
script_source="${ScriptDir}/$script_source"
|
|
fi
|
|
if [ -f $script_source ]; then
|
|
if [ ! -z "$passedArgs" ] ; then
|
|
exec "$script_source" $@
|
|
else
|
|
exec "$script_source" "${InputFile}"
|
|
fi
|
|
fi
|
|
}
|
|
check_file_extension() {
|
|
if [[ "$filename" == "*.zip" ]]; then
|
|
echo -e "${RED}The zip file format is considered dangerous, aborting download."
|
|
return 1
|
|
elif [[ "$filename" == "*.exe" ]]; then
|
|
echo -e "${RED}The file is an executable program (.exe), aborting download."
|
|
return 1
|
|
else
|
|
return 0
|
|
fi
|
|
}
|
|
debugHtml() {
|
|
local fileCntName="$1"
|
|
local functionName="$2"
|
|
local downloadHtml=$3
|
|
if [ ! -d "${WorkDir}/.debug" ] ; then
|
|
mkdir -p "${WorkDir}/.debug"
|
|
fi
|
|
echo -e "${downloadHtml}" >> "${WorkDir}/.debug/`date +%y%m%d-%H%M%S`_${fileCntName}_${functionName}.txt"
|
|
}
|
|
addResultsNote() {
|
|
local url="${1//[$'\t\r\n']}"
|
|
local note="$2"
|
|
mkdir -p "${WorkDir}/downloads"
|
|
echo -e "[NOTE] ${url}, $note" >> "${WorkDir}/downloads/results.txt"
|
|
}
|
|
successDownload() {
|
|
local url="${1//[$'\t\r\n']}"
|
|
local filename=$(literalize_string "$2")
|
|
local folder=$(literalize_string "$3")
|
|
local filesize=$(literalize_string "$4")
|
|
local filepath=$(literalize_string "$5")
|
|
mkdir -p "${WorkDir}/data"
|
|
dateStamp=$(date '+%Y/%m/%d %H:%M:%S')
|
|
echo -e "$dateStamp [OK] url: ${url}, name: ${filename}, size: ${filesize}, pw: ${UrlsVars[pw]}, path: ${filepath}" >> "${WorkDir}/data/downloads_completed.txt"
|
|
mkdir -p "${WorkDir}/downloads"
|
|
dateStamp=$(date '+%Y/%m/%d %H:%M:%S')
|
|
echo -e "$dateStamp [OK] url: ${url}, name: ${filename}, size: ${filesize}, path: ${filepath}" >> "${WorkDir}/downloads/results.txt"
|
|
if [ ! "$UrlOnly" == "true" ]; then
|
|
if [ "${AutoCommentOnCompletion}" == "true" ] ; then
|
|
sed -i -e "s>^$url.*>#& #OK# ${filename}>g" "${InputFile}" #processed url
|
|
sed -i -e "s>^${url/https:/http:}.*>#& #OK# ${filename}>g" "${InputFile}" #http (if changed)
|
|
sed -i -e "s>^direct=$url.*>#& #OK# ${filename}>g" "${InputFile}" #direct url http
|
|
sed -i -e "s>^direct=${url/https:/http:}.*>#& #OK# ${filename}>g" "${InputFile}" #direct url https
|
|
else
|
|
sed -i -e "s>^$url.*>#&>g" "${InputFile}" #processed url
|
|
sed -i -e "s>^${url/https:/http:}.*>#&>g" "${InputFile}" #http (if changed)
|
|
sed -i -e "s>^direct=$url.*>#&>g" "${InputFile}" #direct url http
|
|
sed -i -e "s>^direct=${url/https:/http:}.*>#&>g" "${InputFile}" #direct url https
|
|
fi
|
|
fi
|
|
CLEANSTRING=${remote_url//[^a-zA-Z0-9]/}
|
|
if [ -f "${WorkDir}/.flocks/${CLEANSTRING}" ]; then
|
|
rm -f "${WorkDir}/.flocks/${CLEANSTRING}"
|
|
fi
|
|
PostSuccessfulDownload "${url}" "${filepath}" "${filename}" "${folder}" "${filesize}"
|
|
}
|
|
successDownloadExists() {
|
|
local url="${1//[$'\t\r\n']}"
|
|
local filename=$(literalize_string "$2")
|
|
local folder=$(literalize_string "$3")
|
|
local filesize=$(literalize_string "$4")
|
|
local filepath=$(literalize_string "$5")
|
|
mkdir -p "${WorkDir}/downloads"
|
|
dateStamp=$(date '+%Y/%m/%d %H:%M:%S')
|
|
echo -e "$dateStamp [EXISTS] url: ${url}, name: ${filename}, size: ${filesize}, path: ${filepath}" >> "${WorkDir}/downloads/results.txt"
|
|
if [ ! "$UrlOnly" == "true" ]; then
|
|
if [ "${AutoCommentOnCompletion}" == "true" ] ; then
|
|
sed -i -e "s>^$url.*>#& #OK# ${filename} (File exists)>g" "${InputFile}" #processed url
|
|
sed -i -e "s>^${url/https:/http:}.*>#& #OK# ${filename} (File exists)>g" "${InputFile}" #http (if changed)
|
|
sed -i -e "s>^direct=$url.*>#& #OK# ${filename} (File exists)>g" "${InputFile}" #direct url http
|
|
sed -i -e "s>^direct=${url/https:/http:}.*>#& #OK# ${filename} (File exists)>g" "${InputFile}" #direct url https
|
|
else
|
|
sed -i -e "s>^$url.*>#&>g" "${InputFile}" #processed url
|
|
sed -i -e "s>^${url/https:/http:}.*>#&>g" "${InputFile}" #http (if changed)
|
|
sed -i -e "s>^direct=$url.*>#&>g" "${InputFile}" #direct url http
|
|
sed -i -e "s>^direct=${url/https:/http:}.*>#&>g" "${InputFile}" #direct url https
|
|
fi
|
|
fi
|
|
CLEANSTRING=${remote_url//[^a-zA-Z0-9]/}
|
|
if [ -f "${WorkDir}/.flocks/${CLEANSTRING}" ]; then
|
|
rm -f "${WorkDir}/.flocks/${CLEANSTRING}"
|
|
fi
|
|
}
|
|
failedRetryDownload() {
|
|
local url="${1//[$'\t\r\n']}"
|
|
local message=$(literalize_string "$2")
|
|
local message2=$(literalize_string "$3")
|
|
if [ ! -z "$message2" ]; then
|
|
message="$message, $message2"
|
|
fi
|
|
mkdir -p "${WorkDir}/downloads"
|
|
dateStamp=$(date '+%Y/%m/%d %H:%M:%S')
|
|
echo -e "$dateStamp [RETRY] ${url}, ${message}" >> "${WorkDir}/downloads/results.txt"
|
|
if [ ! "$UrlOnly" == "true" ]; then
|
|
if [ "${AutoCommentOnCompletion}" == "true" ] ; then
|
|
sed -i -e "s>^$url.*>#& #RETRY# ${message}>g" "${InputFile}" #processed url
|
|
sed -i -e "s>^${url/https:/http:}.*>#& #RETRY# ${message}>g" "${InputFile}" #http (if changed)
|
|
sed -i -e "s>^direct=$url.*>#& #RETRY# ${message}>g" "${InputFile}" #direct url http
|
|
sed -i -e "s>^direct=${url/https:/http:}.*>#& #RETRY# ${message}>g" "${InputFile}" #direct url https
|
|
else
|
|
sed -i -e "s>^$url.*>#&>g" "${InputFile}" #processed url
|
|
sed -i -e "s>^${url/https:/http:}.*>#&>g" "${InputFile}" #http (if changed)
|
|
sed -i -e "s>^direct=$url.*>#&>g" "${InputFile}" #direct url http
|
|
sed -i -e "s>^direct=${url/https:/http:}.*>#&>g" "${InputFile}" #direct url https
|
|
fi
|
|
fi
|
|
CLEANSTRING=${remote_url//[^a-zA-Z0-9]/}
|
|
if [ -f "${WorkDir}/.flocks/${CLEANSTRING}" ]; then
|
|
rm -f "${WorkDir}/.flocks/${CLEANSTRING}"
|
|
fi
|
|
PostFailRetryDownload "${url}" "${message}" "${message2}"
|
|
}
|
|
failedDownload() {
|
|
local url="${1//[$'\t\r\n']}"
|
|
local message=$(literalize_string "$2")
|
|
local message2=$(literalize_string "$3")
|
|
if [ ! -z "$message2" ]; then
|
|
message="$message, $message2"
|
|
fi
|
|
mkdir -p "${WorkDir}/downloads"
|
|
dateStamp=$(date '+%Y/%m/%d %H:%M:%S')
|
|
echo -e "$dateStamp [FAIL] ${url}, ${message}" >> "${WorkDir}/downloads/results.txt"
|
|
if [ ! "$UrlOnly" == "true" ]; then
|
|
if [ "${AutoCommentOnCompletion}" == "true" ] ; then
|
|
sed -i -e "s>^$url.*>#& #FAIL# $message>g" "${InputFile}" #processed url
|
|
sed -i -e "s>^${url/https:/http:}.*>#& #FAIL# $message>g" "${InputFile}" #http (if changed)
|
|
sed -i -e "s>^direct=$url.*>#& #RETRY# $message>g" "${InputFile}" #direct url http
|
|
sed -i -e "s>^direct=${url/https:/http:}.*>#& #FAIL# $message>g" "${InputFile}" #direct url https
|
|
else
|
|
sed -i -e "s>^$url.*>#&>g" "${InputFile}" #processed url
|
|
sed -i -e "s>^${url/https:/http:}.*>#&>g" "${InputFile}" #http (if changed)
|
|
sed -i -e "s>^direct=$url.*>#&>g" "${InputFile}" #direct url http
|
|
sed -i -e "s>^direct=${url/https:/http:}.*>#&>g" "${InputFile}" #direct url https
|
|
fi
|
|
fi
|
|
CLEANSTRING=${remote_url//[^a-zA-Z0-9]/}
|
|
if [ -f "${WorkDir}/.flocks/${CLEANSTRING}" ]; then
|
|
rm -f "${WorkDir}/.flocks/${CLEANSTRING}"
|
|
fi
|
|
PostFailedDownload "${url}" "${message}" "${message2}"
|
|
}
|
|
removedDownload() {
|
|
local url="${1//[$'\t\r\n']}"
|
|
local message=$(literalize_string "$2")
|
|
if [ ! -z $message ]; then
|
|
message=" $message"
|
|
fi
|
|
mkdir -p "${WorkDir}/downloads"
|
|
dateStamp=$(date '+%Y/%m/%d %H:%M:%S')
|
|
echo -e "$dateStamp [REMOVED] ${url}${message}" >> "${WorkDir}/downloads/results.txt"
|
|
mkdir -p "${WorkDir}/data"
|
|
dateStamp=$(date '+%Y/%m/%d %H:%M:%S')
|
|
echo -e "$dateStamp [REMOVED] url: ${url}, message:$message" >> "${WorkDir}/data/downloads_completed.txt"
|
|
if [ ! "$UrlOnly" == "true" ]; then
|
|
if [ "${AutoCommentOnCompletion}" == "true" ] ; then
|
|
sed -i -e "s>^$url.*>#& #REMOVED#${message}>g" "${InputFile}" #processed url
|
|
sed -i -e "s>^${url/https:/http:}.*>#& #REMOVED#${message}>g" "${InputFile}" #http (if changed)
|
|
sed -i -e "s>^direct=$url.*>#& #REMOVED#${message}>g" "${InputFile}" #direct url http
|
|
sed -i -e "s>^direct=${url/https:/http:}.*>#& #REMOVED#${message}>g" "${InputFile}" #direct url https
|
|
else
|
|
sed -i -e "s>^$url.*>#&>g" "${InputFile}" #processed url
|
|
sed -i -e "s>^${url/https:/http:}.*>#&>g" "${InputFile}" #http (if changed)
|
|
sed -i -e "s>^direct=$url.*>#&>g" "${InputFile}" #direct url http
|
|
sed -i -e "s>^direct=${url/https:/http:}.*>#&>g" "${InputFile}" #direct url https
|
|
fi
|
|
fi
|
|
CLEANSTRING=${remote_url//[^a-zA-Z0-9]/}
|
|
if [ -f "${WorkDir}/.flocks/${CLEANSTRING}" ]; then
|
|
rm -f "${WorkDir}/.flocks/${CLEANSTRING}"
|
|
fi
|
|
}
|
|
skipUrlDownload() {
|
|
local url="${1//[$'\t\r\n']}"
|
|
local filename="$2"
|
|
local flockshortname=$3
|
|
local flockpath=$4
|
|
flockpathcontents=$(cat $flockpath)
|
|
CLEANSTRING=${remote_url//[^a-zA-Z0-9]/}
|
|
if [ -f "${WorkDir}/.flocks/${CLEANSTRING}" ]; then
|
|
rm -f "${WorkDir}/.flocks/${CLEANSTRING}"
|
|
fi
|
|
}
|
|
renameDuplicateDownload() {
|
|
local url=$(literalize_string "${1//[$'\t\r\n']}")
|
|
local filename=$(literalize_string "$2")
|
|
local cTimestamp=$(date +"%Y%m%d%H%M%S%3N")
|
|
local newfilename="${cTimestamp}_renamed_${filename}"
|
|
if [ ! "$UrlOnly" == "true" ]; then
|
|
sed -i -e "s>^$url.*>${url}|${newfilename}>g" "${InputFile}" #processed url
|
|
sed -i -e "s>^${url/https:/http:}.*>${url}|${newfilename}>g" "${InputFile}" #http (if changed)
|
|
sed -i -e "s>^direct=$url.*>direct=${url}|${newfilename}>g" "${InputFile}" #direct url http
|
|
sed -i -e "s>^direct=${url/https:/http:}.*>direct=${url}|${newfilename}>g" "${InputFile}" #direct url https
|
|
fi
|
|
mkdir -p "${WorkDir}/downloads"
|
|
dateStamp=$(date '+%Y/%m/%d %H:%M:%S')
|
|
echo -e "$dateStamp [RENAME] ${url}, ${filename}, (new filename: ${newfilename})" >> "${WorkDir}/downloads/results.txt"
|
|
CLEANSTRING=${remote_url//[^a-zA-Z0-9]/}
|
|
if [ -f "${WorkDir}/.flocks/${CLEANSTRING}" ]; then
|
|
rm -f "${WorkDir}/.flocks/${CLEANSTRING}"
|
|
fi
|
|
if [ "$UrlOnly" == "true" ]; then
|
|
echo -e "${RED}| [FAILED]: A file already exists with the specified name. Rename it and try again.${NC}"
|
|
echo -e "| Filename: \"./downloads/$filename\""
|
|
fi
|
|
}
|
|
updateUrlDownload() {
|
|
local url=$(literalize_string "${1//[$'\t\r\n']}")
|
|
local newurl="$2"
|
|
echo -e "1${PINK}$newurl${NC}"
|
|
if [ ! "$UrlOnly" == "true" ]; then
|
|
sed -i -e "s%^$url.*%${newurl//[[:space:]]/$'\\\n'}%g" "${InputFile}" #processed url
|
|
sed -i -e "s%^${url/https:/http:}.*%${newurl//[[:space:]]/$'\\\n'}%g" "${InputFile}" #http (if changed)
|
|
fi
|
|
mkdir -p "${WorkDir}/downloads"
|
|
dateStamp=$(date '+%Y/%m/%d %H:%M:%S')
|
|
echo -e "$dateStamp [UPDATE] ${url} (new url: ${newfilename})" >> "${WorkDir}/downloads/results.txt"
|
|
CLEANSTRING=${remote_url//[^a-zA-Z0-9]/}
|
|
if [ -f "${WorkDir}/.flocks/${CLEANSTRING}" ]; then
|
|
rm -f "${WorkDir}/.flocks/${CLEANSTRING}"
|
|
fi
|
|
}
|
|
droppedSizeBadDownload() {
|
|
local url="${1//[$'\t\r\n']}"
|
|
local filename=$(literalize_string "$2")
|
|
local filesize=$(literalize_string "$3")
|
|
local altsize=$(literalize_string "$4")
|
|
mkdir -p "${WorkDir}/downloads"
|
|
dateStamp=$(date '+%Y/%m/%d %H:%M:%S')
|
|
echo "$dateStamp [DROP/BADSIZE] ${url}, ${filename}, size: ${filesize}, advertisedsize: ${altsize}" >> "${WorkDir}/downloads/results.txt"
|
|
if [ ! "$UrlOnly" == "true" ]; then
|
|
if [ "${AutoCommentOnCompletion}" == "true" ] ; then
|
|
sed -i -e "s>^$url.*>#& #RETRY# ${filename} (dropped or bad size)>g" "${InputFile}" #processed url
|
|
sed -i -e "s>^${url/https:/http:}.*>#& #RETRY# ${filename} (dropped or bad size)>g" "${InputFile}" #http (if changed)
|
|
sed -i -e "s>^direct=$url.*>#& #RETRY# ${filename} (dropped or bad size)>g" "${InputFile}" #direct url http
|
|
sed -i -e "s>^direct=${url/https:/http:}.*>#& #RETRY# ${filename} (dropped or bad size)>g" "${InputFile}" #direct url https
|
|
else
|
|
sed -i -e "s>^$url.*>#&>g" "${InputFile}" #processed url
|
|
sed -i -e "s>^${url/https:/http:}.*>#&>g" "${InputFile}" #http (if changed)
|
|
sed -i -e "s>^direct=$url.*>#&>g" "${InputFile}" #direct url http
|
|
sed -i -e "s>^direct=${url/https:/http:}.*>#&>g" "${InputFile}" #direct url https
|
|
fi
|
|
fi
|
|
CLEANSTRING=${remote_url//[^a-zA-Z0-9]/}
|
|
if [ -f "${WorkDir}/.flocks/${CLEANSTRING}" ]; then
|
|
rm -f "${WorkDir}/.flocks/${CLEANSTRING}"
|
|
fi
|
|
}
|
|
noCdnDownload() {
|
|
local url="${1//[$'\t\r\n']}"
|
|
mkdir -p "${WorkDir}/downloads"
|
|
dateStamp=$(date '+%Y/%m/%d %H:%M:%S')
|
|
echo -e "$dateStamp [NOCDN] ${url}" >> "${WorkDir}/downloads/results.txt"
|
|
if [ ! "$UrlOnly" == "true" ]; then
|
|
if [ "${AutoCommentOnCompletion}" == "true" ] ; then
|
|
sed -i -e "s>^$url.*>#& #RETRY# (No CDN found)>g" "${InputFile}" #processed url
|
|
sed -i -e "s>^${url/https:/http:}.*>#& #RETRY# (No CDN found)>g" "${InputFile}" #http (if changed)
|
|
sed -i -e "s>^direct=$url.*>#& #RETRY# (No CDN found)>g" "${InputFile}" #direct url http
|
|
sed -i -e "s>^direct=${url/https:/http:}.*>#& #RETRY# (No CDN found)>g" "${InputFile}" #direct url https
|
|
else
|
|
sed -i -e "s>^$url.*>#&>g" "${InputFile}" #processed url
|
|
sed -i -e "s>^${url/https:/http:}.*>#&>g" "${InputFile}" #http (if changed)
|
|
sed -i -e "s>^direct=$url.*>#&>g" "${InputFile}" #direct url http
|
|
sed -i -e "s>^direct=${url/https:/http:}.*>#&>g" "${InputFile}" #direct url https
|
|
fi
|
|
fi
|
|
CLEANSTRING=${remote_url//[^a-zA-Z0-9]/}
|
|
if [ -f "${WorkDir}/.flocks/${CLEANSTRING}" ]; then
|
|
rm -f "${WorkDir}/.flocks/${CLEANSTRING}"
|
|
fi
|
|
}
|
|
passwordProtectedDownload() {
|
|
local url="${1//[$'\t\r\n']}"
|
|
mkdir -p "${WorkDir}/downloads"
|
|
dateStamp=$(date '+%Y/%m/%d %H:%M:%S')
|
|
echo -e "$dateStamp [PASSWORD] ${url}" >> "${WorkDir}/downloads/results.txt"
|
|
mkdir -p "${WorkDir}/data"
|
|
dateStamp=$(date '+%Y/%m/%d %H:%M:%S')
|
|
echo -e "$dateStamp [PASSWORD] url: ${url}, name: ${filename}, size: ${filesize}, path: ${filepath}" >> "${WorkDir}/data/downloads_completed.txt"
|
|
if [ ! "$UrlOnly" == "true" ]; then
|
|
if [ "${AutoCommentOnCompletion}" == "true" ] ; then
|
|
sed -i -e "s>^${url}.*>#& #PASSWORD#>g" "${InputFile}" #processed url
|
|
sed -i -e "s>^${url/https:/http:}.*>#& #PASSWORD#>g" "${InputFile}" #http (if changed)
|
|
sed -i -e "s>^direct=${url}.*>#& #PASSWORD#>g" "${InputFile}" #direct url http
|
|
sed -i -e "s>^direct=${url/https:/http:}.*>#& #PASSWORD#>g" "${InputFile}" #direct url https
|
|
else
|
|
sed -i -e "s>^$url.*>#&>g" "${InputFile}" #processed url
|
|
sed -i -e "s>^${url/https:/http:}.*>#&>g" "${InputFile}" #http (if changed)
|
|
sed -i -e "s>^direct=$url.*>#&>g" "${InputFile}" #direct url http
|
|
sed -i -e "s>^direct=${url/https:/http:}.*>#&>g" "${InputFile}" #direct url https
|
|
fi
|
|
fi
|
|
CLEANSTRING=${remote_url//[^a-zA-Z0-9]/}
|
|
if [ -f "${WorkDir}/.flocks/${CLEANSTRING}" ]; then
|
|
rm -f "${WorkDir}/.flocks/${CLEANSTRING}"
|
|
fi
|
|
}
|
|
badUrlDownload() {
|
|
local url="${1//[$'\t\r\n']}"
|
|
local message=$(literalize_string "$2")
|
|
if [ ! -z $message ]; then
|
|
message=" $message"
|
|
fi
|
|
mkdir -p "${WorkDir}/downloads"
|
|
dateStamp=$(date '+%Y/%m/%d %H:%M:%S')
|
|
echo -e "$dateStamp [BADURL] ${url}${message}" >> "${WorkDir}/downloads/results.txt"
|
|
if [ ! "$UrlOnly" == "true" ]; then
|
|
if [ "${AutoCommentOnCompletion}" == "true" ] ; then
|
|
sed -i -e "s>^${url}.*>#& #BAD-URL#${message}>g" "${InputFile}" #processed url
|
|
sed -i -e "s>^${url/https:/http:}.*>#& #BAD-URL#${message}>g" "${InputFile}" #http (if changed)
|
|
sed -i -e "s>^direct=${url}.*>#& #BAD-URL#${message}>g" "${InputFile}" #direct url http
|
|
sed -i -e "s>^direct=${url/https:/http:}.*>#& #BAD-URL#${message}>g" "${InputFile}" #direct url https
|
|
else
|
|
sed -i -e "s>^$url.*>#&>g" "${InputFile}" #processed url
|
|
sed -i -e "s>^${url/https:/http:}.*>#&>g" "${InputFile}" #http (if changed)
|
|
sed -i -e "s>^direct=$url.*>#&>g" "${InputFile}" #direct url http
|
|
sed -i -e "s>^direct=${url/https:/http:}.*>#&>g" "${InputFile}" #direct url https
|
|
fi
|
|
fi
|
|
CLEANSTRING=${remote_url//[^a-zA-Z0-9]/}
|
|
if [ -f "${WorkDir}/.flocks/${CLEANSTRING}" ]; then
|
|
rm -f "${WorkDir}/.flocks/${CLEANSTRING}"
|
|
fi
|
|
}
|
|
filenameOrSizeNotExistDownload() {
|
|
local url="${1//[$'\t\r\n']}"
|
|
local filename=$(literalize_string "$2")
|
|
local message=$(literalize_string "$3")
|
|
mkdir -p "${WorkDir}/downloads"
|
|
dateStamp=$(date '+%Y/%m/%d %H:%M:%S')
|
|
echo -e "$dateStamp [FAIL] ${url}, No File / Bad size (filename: ${filename}, ${message})" >> "${WorkDir}/downloads/results.txt"
|
|
if [ ! "$UrlOnly" == "true" ]; then
|
|
if [ "${AutoCommentOnCompletion}" == "true" ] ; then
|
|
sed -i -e "s>^${url}.*>#& #RETRY# ${filename} (No filename or size. Try again later)>g" "${InputFile}" #processed url
|
|
sed -i -e "s>^${url/https:/http:}.*>#& #RETRY# ${filename} (No filename or size. Try again later)>g" "${InputFile}" #http (if changed)
|
|
sed -i -e "s>^direct=$${url}.*>#& #RETRY# ${filename} (No filename or size. Try again later)>g" "${InputFile}" #direct url http
|
|
sed -i -e "s>^direct=${url/https:/http:}.*>#& #RETRY# ${filename} (No filename or size. Try again later)>g" "${InputFile}" #direct url https
|
|
else
|
|
sed -i -e "s>^$url.*>#&>g" "${InputFile}" #processed url
|
|
sed -i -e "s>^${url/https:/http:}.*>#&>g" "${InputFile}" #http (if changed)
|
|
sed -i -e "s>^direct=$url.*>#&>g" "${InputFile}" #direct url http
|
|
sed -i -e "s>^direct=${url/https:/http:}.*>#&>g" "${InputFile}" #direct url https
|
|
fi
|
|
fi
|
|
CLEANSTRING=${remote_url//[^a-zA-Z0-9]/}
|
|
if [ -f "${WorkDir}/.flocks/${CLEANSTRING}" ]; then
|
|
rm -f "${WorkDir}/.flocks/${CLEANSTRING}"
|
|
fi
|
|
}
|
|
fileExistsButSizeTooLargeDownload() {
|
|
local url="${1//[$'\t\r\n']}"
|
|
local filename=$(literalize_string "$2")
|
|
local filesize=$(literalize_string "$3")
|
|
local downloadFilesize=$(literalize_string "$4")
|
|
mkdir -p "${WorkDir}/downloads"
|
|
dateStamp=$(date '+%Y/%m/%d %H:%M:%S')
|
|
echo -e "$dateStamp [EXISTS/TOOLARGE] ${url}, ${filename}, sizeOnDisk: ${filesize} downSize: ${downloadFilesize}" >> "${WorkDir}/downloads/results.txt"
|
|
if [ ! "$UrlOnly" == "true" ]; then
|
|
if [ "${AutoCommentOnCompletion}" == "true" ] ; then
|
|
sed -i -e "s>^$url.*>#& #RETRY# ${filename} (File exists and is too large--retry or delete from downloads)>g" "${InputFile}" #processed url
|
|
sed -i -e "s>^${url/https:/http:}.*>#& #RETRY# ${filename} (File exists and is too large--retry or delete from downloads)>g" "${InputFile}" #http (if changed)
|
|
sed -i -e "s>^direct=$url.*>#& #RETRY# ${filename} (File exists and is too large--retry or delete from downloads)>g" "${InputFile}" #direct url http
|
|
sed -i -e "s>^direct=${url/https:/http:}.*>#& #RETRY# ${filename} (File exists and is too large--retry or delete from downloads)>g" "${InputFile}" #direct url https
|
|
else
|
|
sed -i -e "s>^$url.*>#&>g" "${InputFile}" #processed url
|
|
sed -i -e "s>^${url/https:/http:}.*>#&>g" "${InputFile}" #http (if changed)
|
|
sed -i -e "s>^direct=$url.*>#&>g" "${InputFile}" #direct url http
|
|
sed -i -e "s>^direct=${url/https:/http:}.*>#&>g" "${InputFile}" #direct url https
|
|
fi
|
|
fi
|
|
CLEANSTRING=${remote_url//[^a-zA-Z0-9]/}
|
|
if [ -f "${WorkDir}/.flocks/${CLEANSTRING}" ]; then
|
|
rm -f "${WorkDir}/.flocks/${CLEANSTRING}"
|
|
fi
|
|
}
|
|
truncateDownload() {
|
|
local url="${1//[$'\t\r\n']}"
|
|
local filename="$2"
|
|
local truncsize=$3
|
|
local partialsize=$4
|
|
mkdir -p "${WorkDir}/downloads"
|
|
dateStamp=$(date '+%Y/%m/%d %H:%M:%S')
|
|
echo -e "$dateStamp [TRUNC] ${url}, ${filename}, tsize:$truncsize, psize: $partialsize" >> "${WorkDir}/downloads/results.txt"
|
|
}
|
|
pumpDownload() {
|
|
local url="${1//[$'\t\r\n']}"
|
|
local preViews=$2
|
|
local postViews=$3
|
|
mkdir -p "${WorkDir}/downloads"
|
|
dateStamp=$(date '+%Y/%m/%d %H:%M:%S')
|
|
echo -e "$dateStamp [PUMP] ${url}, previews:$preViews, postviews: $postViews" >> "${WorkDir}/downloads/results.txt"
|
|
}
|
|
successUpload() {
|
|
local pLine="${1//[$'\t\r\n']}"
|
|
local filepath="${2//[$'\t\r\n']}"
|
|
local HostCode=$(literalize_string "$3")
|
|
local filesize=$(literalize_string "$4")
|
|
local downloadLink="$5"
|
|
local responseHtml=$6
|
|
local filename="${filepath##*/}"
|
|
mkdir -p "${WorkDir}/uploads"
|
|
dateStamp=$(date '+%Y/%m/%d %H:%M:%S')
|
|
echo -e "[OK] ${filename}, ${HostCode}, ${downloadLink}" >> "${WorkDir}/uploads/results.txt"
|
|
mkdir -p "${WorkDir}/uploads"
|
|
dateStamp=$(date '+%Y/%m/%d %H:%M:%S')
|
|
echo -e "${filename}, ${HostCode}, ${downloadLink}" >> "${WorkDir}/uploads/result-links.txt"
|
|
mkdir -p "${WorkDir}/uploads/_tickets"
|
|
cTicket="${WorkDir}/uploads/_tickets/`date +%y%m%d-%H%M%S`_${filename}_${HostCode}_upload.txt"
|
|
echo -e "${downloadLink}\\n\\nResponse:\\n${responseHtml}" > "$cTicket"
|
|
echo -e "${BLUE}| Ticket${NC}: "$cTicket""
|
|
dateStamp=$(date '+%Y/%m/%d %H:%M:%S')
|
|
mkdir -p "${WorkDir}/data"
|
|
echo -e "$dateStamp [OK] file: ${filename}, host: ${HostCode}, dl: ${downloadLink}, ticket: ${cTicket}, size: ${filesize}, path: ${filepath}" >> "${WorkDir}/data/uploads_completed.txt"
|
|
if [ ! -z "$InputFile" ] && [ ! -z "$pLine" ]; then
|
|
sed -i -e "s>^${pLine}.*>#& #OK# ${downloadLink//&/\\&}>g" "${InputFile}" #processed line
|
|
fi
|
|
dateStamp=$(date '+%Y/%m/%d %H:%M:%S')
|
|
mkdir -p "${WorkDir}/uploads"
|
|
echo -e "$dateStamp [OK] file: ${filename}, host: ${HostCode}, dl: ${downloadLink}, ticket: ${cTicket}, size: ${filesize}, path: ${filepath}" >> "${WorkDir}/uploads/temp_upload_handler.txt"
|
|
UploadTicket="${WorkDir}/.flocks/upload_${HostCode}_${filepath//[^a-zA-Z0-9]/}"
|
|
if [ -f "${UploadTicket}" ]; then
|
|
rm -f "${UploadTicket}"
|
|
fi
|
|
PostSuccessfulUpload "${filepath}" "${HostCode}" "${filename}" "${filesize}" "${downloadLink}"
|
|
}
|
|
successUploadExists() {
|
|
local pLine="${1//[$'\t\r\n']}"
|
|
local filepath="${2//[$'\t\r\n']}"
|
|
local HostCode=$(literalize_string "$3")
|
|
local message=$(literalize_string "$4")
|
|
local message2=$(literalize_string "$5")
|
|
local filename="${filepath##*/}"
|
|
if [ ! -z "$message2" ]; then
|
|
message="$message, $message2"
|
|
fi
|
|
mkdir -p "${WorkDir}/uploads"
|
|
dateStamp=$(date '+%Y/%m/%d %H:%M:%S')
|
|
echo -e "[EXISTS] ${filename}, ${HostCode}, ${downloadLink}" >> "${WorkDir}/uploads/results.txt"
|
|
if [ ! -z "$InputFile" ] && [ ! -z "$pLine" ]; then
|
|
sed -i -e "s>^${pLine}.*>#& #OK# (Upload exists) ${message//&/\\&}>g" "${InputFile}" #processed line
|
|
fi
|
|
UploadTicket="${WorkDir}/.flocks/upload_${HostCode}_${filepath//[^a-zA-Z0-9]/}"
|
|
if [ -f "${UploadTicket}" ]; then
|
|
rm -f "${UploadTicket}"
|
|
fi
|
|
}
|
|
failedUpload() {
|
|
local pLine="${1//[$'\t\r\n']}"
|
|
local filepath="${2//[$'\t\r\n']}"
|
|
local HostCode=$(literalize_string "$3")
|
|
local message=$(literalize_string "$4")
|
|
local message2=$(literalize_string "$5")
|
|
local filename="${filepath##*/}"
|
|
if [ ! -z "$message2" ]; then
|
|
message="$message, $message2"
|
|
fi
|
|
mkdir -p "${WorkDir}/uploads"
|
|
dateStamp=$(date '+%Y/%m/%d %H:%M:%S')
|
|
echo -e "[FAIL] ${HostCode}, ${filename}, ${message}" >> "${WorkDir}/uploads/results.txt"
|
|
if [ ! -z "$InputFile" ] && [ ! -z "$pLine" ]; then
|
|
sed -i -e "s>^${pLine}.*>#& #FAIL# ${message//&/\\&}>g" "${InputFile}" #processed line
|
|
fi
|
|
dateStamp=$(date '+%Y/%m/%d %H:%M:%S')
|
|
mkdir -p "${WorkDir}/uploads"
|
|
echo -e "$dateStamp [FAIL] file: ${filename}, host: ${HostCode}, msg: ${message}, path: ${filepath}" >> "${WorkDir}/uploads/temp_upload_handler.txt"
|
|
UploadTicket="${WorkDir}/.flocks/upload_${HostCode}_${filepath//[^a-zA-Z0-9]/}"
|
|
if [ -f "${UploadTicket}" ]; then
|
|
rm -f "${UploadTicket}"
|
|
fi
|
|
PostFailedUpload "$pline" "${filepath}" "${HostCode}" "${message1}" "${message2}"
|
|
}
|
|
failedRetryUpload() {
|
|
local pLine="${1//[$'\t\r\n']}"
|
|
local filepath="${2//[$'\t\r\n']}"
|
|
local HostCode=$(literalize_string "$3")
|
|
local message=$(literalize_string "$4")
|
|
local message2=$(literalize_string "$5")
|
|
if [ ! -z "$message2" ]; then
|
|
message="$message, $message2"
|
|
fi
|
|
mkdir -p "${WorkDir}/uploads"
|
|
dateStamp=$(date '+%Y/%m/%d %H:%M:%S')
|
|
echo -e "[RETRY] ${HostCode}, ${filename}, ${message}" >> "${WorkDir}/uploads/results.txt"
|
|
if [ ! -z "$InputFile" ] && [ ! -z "$pLine" ]; then
|
|
sed -i -e "s>^${pLine}.*>#& #RETRY# ${message//&/\\&}>g" "${InputFile}" #processed line
|
|
fi
|
|
dateStamp=$(date '+%Y/%m/%d %H:%M:%S')
|
|
mkdir -p "${WorkDir}/uploads"
|
|
echo -e "$dateStamp [RETRY] file: ${filename}, host: ${HostCode}, msg: ${message}, path: ${filepath}" >> "${WorkDir}/uploads/temp_upload_handler.txt"
|
|
UploadTicket="${WorkDir}/.flocks/upload_${HostCode}_${filepath//[^a-zA-Z0-9]/}"
|
|
if [ -f "${UploadTicket}" ]; then
|
|
rm -f "${UploadTicket}"
|
|
fi
|
|
PostFailRetryUpload "${url}" "${message}" "${message2}"
|
|
}
|
|
skipFailedUpload() {
|
|
local pLine="${1//[$'\t\r\n']}"
|
|
local filepath="${2//[$'\t\r\n']}"
|
|
local HostCode=$(literalize_string "$3")
|
|
local message=$(literalize_string "$4")
|
|
local message2=$(literalize_string "$5")
|
|
local filename="${filepath##*/}"
|
|
if [ ! -z "$message2" ]; then
|
|
message="$message, $message2"
|
|
fi
|
|
mkdir -p "${WorkDir}/uploads"
|
|
dateStamp=$(date '+%Y/%m/%d %H:%M:%S')
|
|
echo -e "[SKIP/FAIL] ${HostCode}, ${filename}, ${message}" >> "${WorkDir}/uploads/results.txt"
|
|
if [ ! -z "$InputFile" ] && [ ! -z "$pLine" ]; then
|
|
sed -i -e "s>^${pLine}.*>#& #FAIL# (Skip) ${message//&/\\&}>g" "${InputFile}" #processed line
|
|
fi
|
|
dateStamp=$(date '+%Y/%m/%d %H:%M:%S')
|
|
mkdir -p "${WorkDir}/uploads"
|
|
echo -e "$dateStamp [FAIL] file: ${filename}, host: ${HostCode}, msg: ${message}, path: ${filepath}" >> "${WorkDir}/uploads/temp_upload_handler.txt"
|
|
UploadTicket="${WorkDir}/.flocks/upload_${HostCode}_${filepath//[^a-zA-Z0-9]/}"
|
|
if [ -f "${UploadTicket}" ]; then
|
|
rm -f "${UploadTicket}"
|
|
fi
|
|
}
|
|
uploadBadInputLine() {
|
|
local pLine="${1//[$'\t\r\n']}"
|
|
local message=$(literalize_string "$2")
|
|
if [ ! -z $message ]; then
|
|
message=" $message"
|
|
fi
|
|
mkdir -p "${WorkDir}/uploads"
|
|
dateStamp=$(date '+%Y/%m/%d %H:%M:%S')
|
|
echo -e "[BADLINE] ${pLine}${message}" >> "${WorkDir}/uploads/results.txt"
|
|
if [ ! -z "$InputFile" ] && [ ! -z "$pLine" ]; then
|
|
sed -i -e "s>^${pLine}.*>#& #RETRY# (Bad Line)${message//&/\\&}>g" "${InputFile}" #processed line
|
|
fi
|
|
dateStamp=$(date '+%Y/%m/%d %H:%M:%S')
|
|
mkdir -p "${WorkDir}/uploads"
|
|
echo -e "$dateStamp [FAIL] file: ${filename}, host: ${HostCode}, msg: ${message}, path: ${filepath}" >> "${WorkDir}/uploads/temp_upload_handler.txt"
|
|
}
|
|
yes_or_no() {
|
|
while true; do
|
|
printf "${YELLOW}"
|
|
read -p "$* [y/n]: " yn
|
|
printf "${NC}"
|
|
case $yn in
|
|
[Yy]*) return 0 ;;
|
|
[Nn]*) echo -e "Aborted" ; return 1 ;;
|
|
esac
|
|
done
|
|
}
|
|
sleepRandomSecs() {
|
|
minWait=$1
|
|
maxWait=$2
|
|
sleep $((minWait + RANDOM % (maxWait - minWait)))s
|
|
}
|
|
sleepRandomMins() {
|
|
minWait=$1
|
|
maxWait=$2
|
|
sleep $((minWait + RANDOM % (maxWait - minWait)))m
|
|
}
|
|
GetRandomUA() {
|
|
ar_UA[0]="Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/124.0.0.0 Safari/537.3"
|
|
ar_UA[1]="Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/123.0.0.0 Safari/537.3"
|
|
ar_UA[2]="Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/124.0.0.0 Safari/537.36 Edg/124.0.0.0"
|
|
ar_UA[3]="Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:125.0) Gecko/20100101 Firefox/125.0"
|
|
ar_UA[4]="Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/109.0.0.0 Safari/537.3"
|
|
ar_UA[5]="Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/117.0.0.0 Safari/537.36 Edg/117.0.2045.4"
|
|
ar_UA[6]="Mozilla/5.0 (Windows NT 6.1; Win64; x64; rv:109.0) Gecko/20100101 Firefox/115.0"
|
|
ar_UA[7]="Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/122.0.0.0 Safari/537.36 Edg/122.0.0.0"
|
|
ar_UA[8]="Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/123.0.0.0 Safari/537.36 Edg/123.0.0.0"
|
|
ar_UA[9]="Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:126.0) Gecko/20100101 Firefox/126.0"
|
|
ar_UA[10]="Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:115.0) Gecko/20100101 Firefox/115.0"
|
|
arrSize=${#ar_UA[@]}
|
|
index=$(($RANDOM % $arrSize))
|
|
RandomUA=${ar_UA[$index]}
|
|
}
|
|
LoadMadDownloadHosts() {
|
|
if [ -d "${ScriptDir}/hosts/" ]; then
|
|
echo -e "${GREEN}Loading Download Hosts...${NC}"
|
|
SetEnabledDownloadHosts
|
|
tHostFuncPrefixes=""
|
|
cnthostsloaded=0
|
|
for fil in "${ScriptDir}"/hosts/*.sh ;
|
|
do
|
|
if [ -f "$fil" ]; then
|
|
if [[ "$EnabledDownloadHosts" == "recommended" ]] || [[ "$EnabledDownloadHosts" == "online" ]] ; then
|
|
readarray -d "," -t arrEnabledHosts <<< "${lstEnabledDownloadHosts}"
|
|
isfound=false
|
|
for hostfil in "${arrEnabledHosts[@]}";
|
|
do
|
|
hostfil="${hostfil//[$'\t\r\n']}"
|
|
if [[ "${fil##*/}" == "${hostfil}.sh" ]] ; then
|
|
isfound=true
|
|
break
|
|
fi
|
|
done
|
|
if [[ "$isfound" == "false" ]]; then
|
|
continue
|
|
fi
|
|
fi
|
|
if grep -Eq '^HostFuncPrefix='"'" "$fil" ; then
|
|
tfilename="${fil##*/}"
|
|
if [[ "$tfilename" == "up_"* ]] ; then
|
|
continue
|
|
fi
|
|
_hostfuncprefix=$(grep -oP -m 1 '^HostFuncPrefix='"'"'\K.*?(?='"'"')' "$fil")
|
|
if [[ ! "${_hostfuncprefix}" == "direct" ]] && \
|
|
[ ! -z "${tHostFuncPrefixes}" ] && \
|
|
grep -Eqi ":${_hostfuncprefix}:" <<< "$tHostFuncPrefixes" ; then
|
|
echo -e "[${RED}FAIL${NC}] ${BLUE}${tfilename}${NC} (dupe HostFuncPrefix ${PINK}${_hostfuncprefix}${NC} detected)${NC}"
|
|
else
|
|
source "$fil"
|
|
cnthostsloaded=$((cnthostsloaded + 1))
|
|
tHostFuncPrefixes="${tHostFuncPrefixes}:${_hostfuncprefix}:"
|
|
if [ "$VerboseLoading" == "true" ]; then
|
|
echo -e "[${GREEN}OK${NC}] ${BLUE}${tfilename}${NC}"
|
|
fi
|
|
fi
|
|
else
|
|
echo -e "[${RED}FAIL${NC}] ${BLUE}${tfilename}${NC} (not a valid host file)${NC}"
|
|
fi
|
|
fi
|
|
done
|
|
echo -e " (${YELLOW}$cnthostsloaded${NC}) loaded${NC}"
|
|
echo -e ""
|
|
fi
|
|
}
|
|
LoadMadUploadHosts() {
|
|
if [ -d "${ScriptDir}/hosts/" ]; then
|
|
echo -e "${GREEN}Loading Upload Hosts...${NC}"
|
|
SetEnabledUploadHosts
|
|
tHostFuncPrefixes=""
|
|
cnthostsloaded=0
|
|
for fil in "${ScriptDir}"/hosts/up_*.sh ;
|
|
do
|
|
if [ -f "$fil" ]; then
|
|
if [[ "$EnabledUploadHosts" == "recommended" ]] || [[ "$EnabledUploadHosts" == "online" ]] ; then
|
|
readarray -d "," -t arrEnabledHosts <<< "${lstEnabledUploadHosts}"
|
|
isfound=false
|
|
for hostfil in "${arrEnabledHosts[@]}";
|
|
do
|
|
hostfil="${hostfil//[$'\t\r\n']}"
|
|
if [[ "${fil##*/}" == "${hostfil}.sh" ]] ; then
|
|
isfound=true
|
|
break
|
|
fi
|
|
done
|
|
if [[ "$isfound" == "false" ]]; then
|
|
continue
|
|
fi
|
|
fi
|
|
if grep -Eq '^HostFuncPrefix='"'" "$fil" ; then
|
|
tfilename="${fil##*/}"
|
|
_hostfuncprefix=$(grep -oP -m 1 '^HostFuncPrefix='"'"'\K.*?(?='"'"')' "$fil")
|
|
if [[ ! "${_hostfuncprefix}" == "direct" ]] && \
|
|
[ ! -z "${tHostFuncPrefixes}" ] && \
|
|
grep -Eqi ":${_hostfuncprefix}:" <<< "$tHostFuncPrefixes" ; then
|
|
echo -e "[${RED}FAIL${NC}] ${BLUE}${tfilename}${NC} (dupe HostFuncPrefix ${PINK}${_hostfuncprefix}${NC} detected)${NC}"
|
|
else
|
|
source "$fil"
|
|
cnthostsloaded=$((cnthostsloaded + 1))
|
|
tHostFuncPrefixes="${tHostFuncPrefixes}:${_hostfuncprefix}:"
|
|
if [ "$VerboseLoading" == "true" ]; then
|
|
echo -e "[${GREEN}OK${NC}] ${BLUE}${tfilename}${NC}"
|
|
fi
|
|
fi
|
|
else
|
|
echo -e "[${RED}FAIL${NC}] ${BLUE}${tfilename}${NC} (not a valid host file)${NC}"
|
|
fi
|
|
fi
|
|
done
|
|
echo -e " (${YELLOW}$cnthostsloaded${NC}) loaded${NC}"
|
|
echo -e ""
|
|
fi
|
|
}
|
|
LoadMadPlugins() {
|
|
if [ ! -z "${LoadPlugins}" ]; then
|
|
echo -e "${GREEN}Loading Plugins...${NC}"
|
|
loadedPluginFuncsUsed=""
|
|
cntplugsloaded=0
|
|
readarray -d "," -t arrPlugins <<< "${LoadPlugins}"
|
|
for plugin in "${arrPlugins[@]}";
|
|
do
|
|
plg="${plugin//[$'\t\r\n']}"
|
|
if [ ! -z "${plg}" ] ; then
|
|
plg=$(literalize_string "$plugin")
|
|
if [ -f "${ScriptDir}/plugins/${plg}" ]; then
|
|
currPluginFuncsUsed=$(grep '()' "${ScriptDir}/plugins/${plg}" | awk '!/#/ {print $1}')
|
|
currPluginFuncsUsed="${currPluginFuncsUsed//$'\n'/, }"
|
|
readarray -d ", " -t arrCurrPluginFuncsUsed <<< "$currPluginFuncsUsed"
|
|
local isDupeFunc=false
|
|
for cplg in "${arrCurrPluginFuncsUsed[@]}";
|
|
do
|
|
if [ ! -z "${loadedPluginFuncsUsed}" ] && [[ *"${cplg}"* == "$loadedPluginFuncsUsed" ]] ; then
|
|
isDupeFunc=true
|
|
echo -e "[${RED}FAIL${NC}] ${BLUE}${plg}${NC} (dupe hook detected)${NC}"
|
|
echo -e " Function already overriden: ${YELLOW}$cplg${NC}"
|
|
break
|
|
else
|
|
if grep -Eq '^OnLoad' <<< "$cplg" ; then
|
|
lstOnLoad="$lstOnLoad""@""${cplg%%(*}"
|
|
elif grep -Eq '^BeginProcessing' <<< "$cplg" ; then
|
|
lstBeginProcessing="$lstBeginProcessing""@""${cplg%%(*}"
|
|
elif grep -Eq '^PreProcessUrl' <<< "$cplg" ; then
|
|
lstPreProcessUrl="$lstPreProcessUrl""@""${cplg%%(*}"
|
|
elif grep -Eq '^PostSuccessfulDownload' <<< "$cplg" ; then
|
|
lstPostSuccessfulDownload="$lstPostSuccessfulDownload""@""${cplg%%(*}"
|
|
elif grep -Eq '^PostFailedDownload' <<< "$cplg" ; then
|
|
lstPostFailedDownload="$lstPostFailedDownload""@""${cplg%%(*}"
|
|
elif grep -Eq '^PostFailRetryDownload' <<< "$cplg" ; then
|
|
lstPostFailRetryDownload="$lstPostFailRetryDownload""@""${cplg%%(*}"
|
|
elif grep -Eq '^DoneProcessingAllUrls' <<< "$cplg" ; then
|
|
lstDoneProcessingAllUrls="$lstDoneProcessingAllUrls""@""${cplg%%(*}"
|
|
elif grep -Eq '^PostSuccessfulUpload' <<< "$cplg" ; then
|
|
lstPostSuccessfulUpload="$lstPostSuccessfulUpload""@""${cplg%%(*}"
|
|
elif grep -Eq '^PostFailedUpload' <<< "$cplg" ; then
|
|
lstPostFailedUpload="$lstPostFailedUpload""@""${cplg%%(*}"
|
|
elif grep -Eq '^PostFailRetryUpload' <<< "$cplg" ; then
|
|
lstPostFailRetryUpload="$lstPostFailRetryUpload""@""${cplg%%(*}"
|
|
elif grep -Eq '^DoneProcessingAllUploads' <<< "$cplg" ; then
|
|
lstDoneProcessingAllUploads="$lstDoneProcessingAllUploads""@""${cplg%%(*}"
|
|
fi
|
|
fi
|
|
done
|
|
if [ "$isDupeFunc" == "false" ]; then
|
|
source "${ScriptDir}/plugins/${plg}"
|
|
cntplugsloaded=$((cntplugsloaded + 1))
|
|
if [ "$VerboseLoading" == "true" ]; then
|
|
echo -e "[${GREEN}OK${NC}] ${BLUE}${plg}${NC}"
|
|
echo -e " --> Hooks: ${PINK}$currPluginFuncsUsed${NC}"
|
|
fi
|
|
loadedPluginFuncsUsed="${loadedPluginFuncsUsed}${currPluginFuncsUsed}"
|
|
else
|
|
continue
|
|
fi
|
|
else
|
|
echo -e "[${RED}FAIL${NC}] ${BLUE}${plg}${NC} (not found)${NC}"
|
|
fi
|
|
fi
|
|
done
|
|
echo -e " (${YELLOW}$cntplugsloaded${NC}) loaded${NC}"
|
|
echo -e ""
|
|
fi
|
|
}
|
|
install_curl_impersonate() {
|
|
clear
|
|
echo -e "${BLUE}lwthiker curl_impersonate${NC} is the original dev, but it is relatively inactive."
|
|
echo -e "- Currently uses curl v8.1.1."
|
|
echo -e "+ Supports Firefox, Chrome, Edge, and Safari impersonation."
|
|
echo -e "+ (Recommended)"
|
|
echo -e ""
|
|
echo -e "${BLUE}lexiforest curl_impersonate${NC} is an active fork of curl_impersonate."
|
|
echo -e "+ Currently uses curl v8.7.1"
|
|
echo -e "+ Supports Chrome, Edge, and Safari impersonation."
|
|
echo -e "+ (beta, not tested)"
|
|
echo -e ""
|
|
PS3='Please select which curl_impersonate to install: '
|
|
options=("lwthiker (orig, ff/chrome/edge/safari, not active) *Currently recommended*" \
|
|
"lexiforest (fork, chrome/edge/safari, active)" \
|
|
"Quit")
|
|
select opt in "${options[@]}"
|
|
do
|
|
case $opt in
|
|
"lwthiker (orig, ff/chrome/edge/safari, not active) *Currently recommended*")
|
|
install_curl_impersonate_lwthiker_orig
|
|
break
|
|
;;
|
|
"lexiforest (fork, chrome/edge/safari, active)")
|
|
install_curl_impersonate_lexiforest_fork
|
|
break
|
|
;;
|
|
"Quit")
|
|
exit 0
|
|
;;
|
|
*) echo "Invalid option $REPLY";;
|
|
esac
|
|
done
|
|
}
|
|
install_curl_impersonate_lwthiker_orig() {
|
|
clear
|
|
mkdir -p "${WorkDir}/downloads"
|
|
mkdir -p "${WorkDir}/.temp"
|
|
echo -e "${BLUE}lwthiker curl_impersonate${NC} is the original curl_impersonate."
|
|
echo -e "+ Currently uses curl v8.1.1, and has low activity for updates"
|
|
echo -e "+ Supports FireFox, Chrome, Edge, and Safari impersonation."
|
|
echo -e ""
|
|
echo -e "${GREEN}| Fetching:${NC} latest ${BLUE}lwthiker curl_impersonate${NC} info from github...${NC}"
|
|
latestBinaryUrl=""
|
|
for ((j=1; j<=4; j++)); do
|
|
response=$(tor_curl_request --insecure -L -s https://github.com/lwthiker/curl-impersonate/releases/latest)
|
|
if [ "${DebugAllEnabled}" == "true" ] ; then
|
|
debugHtml "github" "lbf_inst_curlimp$j" "$response"
|
|
fi
|
|
if [ ! -z "$response" ]; then
|
|
latestTag=$(grep -oPi -m 1 '(?<=/curl-impersonate/releases/tag/).*?(?=")' <<< "$response")
|
|
latestBinaryDate=$(grep -oPi -m 1 '(?<=<relative-time class="no-wrap" prefix="" datetime=").*?(?=T)' <<< "$response")
|
|
break
|
|
fi
|
|
done
|
|
if [ -z $latestTag ]; then
|
|
echo -e "${RED}| FAILED: Unable to query the latest release from github${NC}"
|
|
exit 1
|
|
fi
|
|
echo -e "${GREEN}| Info${NC}: latest release ${GREEN}$latestTag${NC} ($latestBinaryDate)${NC}"
|
|
yes_or_no "Do you wish to download and extract curl_impersonate $latestTag (using tor+curl)?" && {
|
|
echo -e "${GREEN}| Downloading${NC}: latest release ${GREEN}$latestTag${NC}${NC}"
|
|
download_url='https://github.com/lwthiker/curl-impersonate/releases/download/'"$latestTag"'/curl-impersonate-'"$latestTag"'.x86_64-linux-gnu.tar.gz'
|
|
filename="${download_url##*\/}"
|
|
for ((j=1; j<=8; j++)); do
|
|
file_header=$(tor_curl_request --insecure --head -Ls "$download_url")
|
|
if [ "${DebugAllEnabled}" == "true" ] ; then
|
|
debugHtml "github" "head_inst_curlimp$j" "${file_header}"
|
|
fi
|
|
if ! grep -Eqi 'HTTP/2 200|HTTP/1.1 200|200 OK' <<< $file_header ; then
|
|
if ((j == 8)) ; then
|
|
return 1
|
|
else
|
|
continue
|
|
fi
|
|
fi
|
|
if grep -Eqi 'location:' <<< $file_header ; then
|
|
if ((j == 8)) ; then
|
|
return 1
|
|
else
|
|
download_url=$(grep -oPi '(?<=location: ).*' <<< "$file_header")
|
|
download_url=${download_url//[$'\t\r\n']}
|
|
continue
|
|
fi
|
|
fi
|
|
file_size_bytes=$(grep -oPi '(?<=content-length: ).*' <<< "$file_header")
|
|
file_size_bytes=${file_size_bytes//[$'\t\r\n']}
|
|
file_size_readable="$(numfmt --to=iec --from=auto --format "%.2f" <<< "$file_size_bytes")"
|
|
download_inflight_path="${WorkDir}/.inflight/"
|
|
extract_location="${WorkDir}/.temp/"
|
|
file_path="${download_inflight_path}${filename}"
|
|
if [ ! -z $file_size_bytes ]; then
|
|
break
|
|
fi
|
|
done
|
|
if [ -z $file_size_bytes ]; then
|
|
echo -e "${RED}| FAILED: Unable to retrieve filesize${NC}"
|
|
exit 1
|
|
fi
|
|
echo -e ""
|
|
echo -e "${YELLOW}| [Filename]${NC} $filename"
|
|
echo -e "${YELLOW}| [Filesize]${NC} $file_size_readable"
|
|
echo -e ""
|
|
if [[ -e "${file_path}" ]]; then
|
|
existing_file_size=$(stat --format="%s" "${file_path}" | tr -d '[:space:]')
|
|
if ((existing_file_size == file_size_bytes)); then
|
|
echo -e "${GREEN}File exists in downloads and the size matches the expected size.\nNo need to re-download.${NC}"
|
|
elif ((existing_file_size > file_size_bytes)); then
|
|
echo -e "${RED}| ERROR: File exists in downloads folder but is larger than expected.${NC}\nThis could be due to several instances saving the same file, an old download using the same name, or host experiencing a temporary issue."
|
|
exit 1
|
|
fi
|
|
else
|
|
for ((j=1; j<=$MaxDownloadRetries; j++)); do
|
|
tor_identity="${RANDOM}"
|
|
trap "echo ""; tput cnorm; exit" 0 1 2 3 6 15
|
|
tor_curl_request --insecure -L "$download_url" --continue-at - --output "$file_path"
|
|
received_file_size=0
|
|
if [ -f "$file_path" ] ; then
|
|
received_file_size=$(stat --format="%s" "$file_path" | tr -d '[:space:]')
|
|
fi
|
|
if ((received_file_size == file_size_bytes)) ; then
|
|
break
|
|
elif ((received_file_size < file_size_bytes)) ; then
|
|
if ((j >= MaxDownloadRetries)) ; then
|
|
echo -e "${RED}| FAILED: Size mismatch after downloading${NC}"
|
|
exit 1
|
|
else
|
|
echo -e "${YELLOW}| RETRY: Size mismatch after downloading.${NC}"
|
|
continue
|
|
fi
|
|
elif ((received_file_size > file_size_bytes)) ; then
|
|
echo -e "${RED}| FAILED: Size is too large after downloading.${NC}"
|
|
exit 1
|
|
else
|
|
echo -e "${RED}| FAILED: Unknown error downloading.${NC}"
|
|
exit 1
|
|
fi
|
|
done
|
|
fi
|
|
if [ -f $file_path ] ; then
|
|
echo -e ""
|
|
echo -e "${GREEN}| [OK]${NC} Download complete.${NC}"
|
|
mv "${file_path}" "$extract_location"
|
|
final_tarpath="$extract_location/$filename"
|
|
echo -e "| Extracting curl_impersonate..."
|
|
tarOutput=$(tar -xvzf ${final_tarpath} -C $extract_location)
|
|
rm -f "${ScriptDir}"/curl*
|
|
mv "$extract_location/curl-impersonate-ff" "${ScriptDir}/"
|
|
mv "$extract_location/curl_ff109" "${ScriptDir}/"
|
|
echo -e "${GREEN}| Done.${NC} Update ${BLUE}\"UseTorCurlImpersonate=true\"${NC} in script to use..."
|
|
exit 0
|
|
else
|
|
echo -e "${RED}| Failed: Unable to find ./.temp/${filename}${NC}"
|
|
exit 1
|
|
fi
|
|
}
|
|
}
|
|
install_curl_impersonate_lexiforest_fork() {
|
|
clear
|
|
mkdir -p "${WorkDir}/downloads"
|
|
mkdir -p "${WorkDir}/.temp"
|
|
echo -e "${BLUE}lexiforest curl_impersonate${NC} is an active fork of curl_impersonate."
|
|
echo -e "+ Currently uses curl v8.7.1, and is patched for latest CVEs"
|
|
echo -e "+ Supports Chrome, Edge, and Safari impersonation."
|
|
echo -e ""
|
|
echo -e "${GREEN}| Fetching:${NC} latest ${BLUE}lexiforest curl_impersonate fork${NC} info from github...${NC}"
|
|
latestBinaryUrl=""
|
|
for ((j=1; j<=4; j++)); do
|
|
response=$(tor_curl_request --insecure -L -s https://github.com/lexiforest/curl-impersonate/releases/latest)
|
|
if [ "${DebugAllEnabled}" == "true" ] ; then
|
|
debugHtml "github" "lbf_inst_curlimp$j" "$response"
|
|
fi
|
|
if [ ! -z "$response" ]; then
|
|
latestTag=$(grep -oPi -m 1 '(?<=/curl-impersonate/releases/tag/).*?(?=")' <<< "$response")
|
|
latestBinaryDate=$(grep -oPi -m 1 '(?<=<relative-time class="no-wrap" prefix="" datetime=").*?(?=T)' <<< "$response")
|
|
break
|
|
fi
|
|
done
|
|
if [ -z $latestTag ]; then
|
|
echo -e "${RED}| FAILED: Unable to query the latest release from github${NC}"
|
|
exit 1
|
|
fi
|
|
echo -e "${GREEN}| Info${NC}: latest release ${GREEN}$latestTag${NC} ($latestBinaryDate)${NC}"
|
|
yes_or_no "Do you wish to download and extract curl_impersonate $latestTag (using tor+curl)?" && {
|
|
echo -e "${GREEN}| Downloading${NC}: latest release ${GREEN}$latestTag${NC}${NC}"
|
|
download_url='https://github.com/lexiforest/curl-impersonate/releases/download/'"$latestTag"'/curl-impersonate-'"$latestTag"'.x86_64-linux-gnu.tar.gz'
|
|
filename="${download_url##*\/}"
|
|
for ((j=1; j<=8; j++)); do
|
|
file_header=$(tor_curl_request --insecure --head -Ls "$download_url")
|
|
if [ "${DebugAllEnabled}" == "true" ] ; then
|
|
debugHtml "github" "head_inst_curlimp$j" "${file_header}"
|
|
fi
|
|
if ! grep -Eqi 'HTTP/2 200|HTTP/1.1 200|200 OK' <<< $file_header ; then
|
|
if ((j == 8)) ; then
|
|
return 1
|
|
else
|
|
continue
|
|
fi
|
|
fi
|
|
if grep -Eqi 'location:' <<< $file_header ; then
|
|
if ((j == 8)) ; then
|
|
return 1
|
|
else
|
|
download_url=$(grep -oPi '(?<=location: ).*' <<< "$file_header")
|
|
download_url=${download_url//[$'\t\r\n']}
|
|
continue
|
|
fi
|
|
fi
|
|
file_size_bytes=$(grep -oPi '(?<=content-length: ).*' <<< "$file_header")
|
|
file_size_bytes=${file_size_bytes//[$'\t\r\n']}
|
|
file_size_readable="$(numfmt --to=iec --from=auto --format "%.2f" <<< "$file_size_bytes")"
|
|
download_inflight_path="${WorkDir}/.inflight/"
|
|
extract_location="${WorkDir}/.temp/"
|
|
file_path="${download_inflight_path}${filename}"
|
|
if [ ! -z $file_size_bytes ]; then
|
|
break
|
|
fi
|
|
done
|
|
if [ -z $file_size_bytes ]; then
|
|
echo -e "${RED}| FAILED: Unable to retrieve filesize${NC}"
|
|
exit 1
|
|
fi
|
|
echo -e ""
|
|
echo -e "${YELLOW}| [Filename]${NC} $filename"
|
|
echo -e "${YELLOW}| [Filesize]${NC} $file_size_readable"
|
|
echo -e ""
|
|
if [[ -e "${file_path}" ]]; then
|
|
existing_file_size=$(stat --format="%s" "${file_path}" | tr -d '[:space:]')
|
|
if ((existing_file_size == file_size_bytes)); then
|
|
echo -e "${GREEN}File exists in downloads and the size matches the expected size.\nNo need to re-download.${NC}"
|
|
elif ((existing_file_size > file_size_bytes)); then
|
|
echo -e "${RED}| ERROR: File exists in downloads folder but is larger than expected.${NC}\nThis could be due to several instances saving the same file, an old download using the same name, or host experiencing a temporary issue."
|
|
exit 1
|
|
fi
|
|
else
|
|
for ((j=1; j<=$MaxDownloadRetries; j++)); do
|
|
tor_identity="${RANDOM}"
|
|
trap "echo ""; tput cnorm; exit" 0 1 2 3 6 15
|
|
tor_curl_request --insecure -L "$download_url" --continue-at - --output "$file_path"
|
|
received_file_size=0
|
|
if [ -f "$file_path" ] ; then
|
|
received_file_size=$(stat --format="%s" "$file_path" | tr -d '[:space:]')
|
|
fi
|
|
if ((received_file_size == file_size_bytes)) ; then
|
|
break
|
|
elif ((received_file_size < file_size_bytes)) ; then
|
|
if ((j >= MaxDownloadRetries)) ; then
|
|
echo -e "${RED}| FAILED: Size mismatch after downloading${NC}"
|
|
exit 1
|
|
else
|
|
echo -e "${YELLOW}| RETRY: Size mismatch after downloading.${NC}"
|
|
continue
|
|
fi
|
|
elif ((received_file_size > file_size_bytes)) ; then
|
|
echo -e "${RED}| FAILED: Size is too large after downloading.${NC}"
|
|
exit 1
|
|
else
|
|
echo -e "${RED}| FAILED: Unknown error downloading.${NC}"
|
|
exit 1
|
|
fi
|
|
done
|
|
fi
|
|
if [ -f $file_path ] ; then
|
|
echo -e ""
|
|
echo -e "${GREEN}| [OK]${NC} Download complete.${NC}"
|
|
mv "${file_path}" "$extract_location"
|
|
final_tarpath="$extract_location/$filename"
|
|
echo -e "| Extracting curl_impersonate..."
|
|
tarOutput=$(tar -xvzf ${final_tarpath} -C $extract_location)
|
|
rm -f "${ScriptDir}"/curl*
|
|
mv "$extract_location/curl-impersonate-chrome" "${ScriptDir}/"
|
|
mv "$extract_location/curl_chrome131" "${ScriptDir}/"
|
|
echo -e "${GREEN}| Done.${NC} Update ${BLUE}\"UseTorCurlImpersonate=true\"${NC} in script to use..."
|
|
exit 0
|
|
else
|
|
echo -e "${RED}| Failed: Unable to find ./.temp/${filename}${NC}"
|
|
exit 1
|
|
fi
|
|
}
|
|
}
|
|
madReset() {
|
|
local InputFile="$1"
|
|
local noprompt=false
|
|
local autoclean=false
|
|
local autoclose=true
|
|
local noclear=false
|
|
if [ "$2" == "true" ]; then
|
|
noprompt=true
|
|
fi
|
|
if [ "$3" == "true" ]; then
|
|
autoclean=true
|
|
fi
|
|
if [ "$4" == "false" ]; then
|
|
autoclose=false
|
|
fi
|
|
if [ "$5" == "true" ]; then
|
|
noclear=true
|
|
fi
|
|
if [ "$noclear" == "false" ]; then
|
|
clear
|
|
fi
|
|
echo -e "${BLD}"
|
|
echo -e "${PINK}:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:"
|
|
echo -e ":${NC} ${GREEN}MAD${PINK} Reset${NC} : Revert URLs marked with #RETRY#${PINK}${BLD} :"
|
|
echo -e ":-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:${NC}\\n"
|
|
if [ ! -f "${InputFile}" ] && [ -f "${WorkDir}/${InputFile}" ]; then
|
|
InputFile="${WorkDir}/${InputFile}"
|
|
fi
|
|
if [ ! -f "${InputFile}" ]; then
|
|
printf "Unable to read file %s! [mr]\\n" "${InputFile}"
|
|
else
|
|
fileCount=1
|
|
lineCnt=1
|
|
while IFS= read -r line || [[ -n $line ]];
|
|
do
|
|
if [[ "$line" =~ ^\#(http|https|direct=http|direct=https):// ]] && [[ "$line" =~ '#RETRY#' ]]; then
|
|
remote_url=$(grep -oPi '^#\K.*?(?= #RETRY#)' <<< "$line")
|
|
printf "Reset Line ${GREEN}#${lineCnt}${NC}: %s\\n" "${remote_url}"
|
|
sed -i -e "s|^$(literalize_string "${line}")|$(literalize_string "${remote_url}")|gi" "${InputFile}" #processed url
|
|
fi
|
|
lineCnt=$((lineCnt+1))
|
|
done < ${InputFile}
|
|
fi
|
|
if [ -d "${WorkDir}/.flocks" ] || [ -d "${WorkDir}/.temp" ] || [ -d "${WorkDir}/.debug" ] ; then
|
|
if [ "$autoclean" == "true" ]; then
|
|
if [ -d "${WorkDir}/.flocks" ]; then
|
|
rm -rf "${WorkDir}/.flocks"
|
|
fi
|
|
if [ -d "${WorkDir}/.temp" ]; then
|
|
rm -rf "${WorkDir}/.temp"
|
|
fi
|
|
if [ -d "${WorkDir}/.debug" ]; then
|
|
rm -rf "${WorkDir}/.debug"
|
|
fi
|
|
else
|
|
if [ "$noprompt" == "false" ]; then
|
|
printf "\\n"
|
|
yes_or_no "Cleanup temp folders? (choose 'n' if currently processing in other terminals)" && {
|
|
if [ -d "${WorkDir}/.flocks" ]; then
|
|
rm -rf "${WorkDir}/.flocks"
|
|
fi
|
|
if [ -d "${WorkDir}/.temp" ]; then
|
|
rm -rf "${WorkDir}/.temp"
|
|
fi
|
|
if [ -d "${WorkDir}/.debug" ]; then
|
|
rm -rf "${WorkDir}/.debug"
|
|
fi
|
|
}
|
|
fi
|
|
fi
|
|
fi
|
|
if [ "$autoclose" == "true" ]; then
|
|
exit 0
|
|
fi
|
|
}
|
|
madResetUploads() {
|
|
local InputFile="$1"
|
|
local noprompt=false
|
|
local autoclean=false
|
|
local autoclose=true
|
|
local noclear=false
|
|
if [ "$2" == "true" ]; then
|
|
noprompt=true
|
|
fi
|
|
if [ "$3" == "true" ]; then
|
|
autoclean=true
|
|
fi
|
|
if [ "$4" == "false" ]; then
|
|
autoclose=false
|
|
fi
|
|
if [ "$5" == "true" ]; then
|
|
noclear=true
|
|
fi
|
|
if [ "$noclear" == "false" ]; then
|
|
clear
|
|
fi
|
|
echo -e "${BLD}"
|
|
echo -e "${PINK}:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:"
|
|
echo -e ":${NC} ${GREEN}MAD${PINK} Reset${NC} : Revert Upload files marked with #RETRY#${PINK}${BLD} :"
|
|
echo -e ":-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:${NC}\\n"
|
|
if [ ! -f "${InputFile}" ] && [ -f "${WorkDir}/${InputFile}" ]; then
|
|
InputFile="${WorkDir}/${InputFile}"
|
|
fi
|
|
if [ ! -f "${InputFile}" ]; then
|
|
printf "Unable to read file %s! [mr]\\n" "${InputFile}"
|
|
exit 1
|
|
fi
|
|
fileCount=1
|
|
lineCnt=1
|
|
while IFS= read -r line || [[ -n $line ]];
|
|
do
|
|
if grep -Eqi '^\#.*\|.*#RETRY#' <<< "$line" ; then
|
|
fline=$(grep -oPi '^#\K.*?(?= #RETRY#)' <<< "$line")
|
|
printf "Reset Line ${GREEN}#${lineCnt}${NC}: %s\\n" "${fline}"
|
|
sed -i -e "s|^$(literalize_string "${line}")|$(literalize_string "${fline}")|gi" "${InputFile}" #processed url
|
|
fi
|
|
lineCnt=$((lineCnt+1))
|
|
done < ${InputFile}
|
|
if [ -d "${WorkDir}/.flocks" ] || [ -d "${WorkDir}/.temp" ] || [ -d "${WorkDir}/.debug" ] ; then
|
|
if [ "$autoclean" == "true" ]; then
|
|
if [ -d "${WorkDir}/.flocks" ]; then
|
|
rm -rf "${WorkDir}/.flocks"
|
|
fi
|
|
if [ -d "${WorkDir}/.temp" ]; then
|
|
rm -rf "${WorkDir}/.temp"
|
|
fi
|
|
if [ -d "${WorkDir}/.debug" ]; then
|
|
rm -rf "${WorkDir}/.debug"
|
|
fi
|
|
else
|
|
if [ "$noprompt" == "false" ]; then
|
|
printf "\\n"
|
|
yes_or_no "Cleanup temp folders? (choose 'n' if currently processing in other terminals)" && {
|
|
if [ -d "${WorkDir}/.flocks" ]; then
|
|
rm -rf "${WorkDir}/.flocks"
|
|
fi
|
|
if [ -d "${WorkDir}/.temp" ]; then
|
|
rm -rf "${WorkDir}/.temp"
|
|
fi
|
|
if [ -d "${WorkDir}/.debug" ]; then
|
|
rm -rf "${WorkDir}/.debug"
|
|
fi
|
|
}
|
|
fi
|
|
fi
|
|
fi
|
|
if [ "$autoclose" == "true" ]; then
|
|
exit 0
|
|
fi
|
|
}
|
|
madAudit() {
|
|
clear
|
|
echo -e "${BLD}"
|
|
echo -e "${PINK}:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:"
|
|
echo -e ":${NC} ${GREEN}MAD${PINK} Audit${NC} : Reports usage of http & curl in scripts${PINK}${BLD} :"
|
|
echo -e ":-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:${NC}\\n"
|
|
if [ "$ScriptDir" == "$WorkDir" ]; then
|
|
cd "$ScriptDir"
|
|
readarray -d $'' arrFiles < <(find . -name "*.sh" -printf '%p\n' | sort -Vk1)
|
|
for fil in "${arrFiles[@]}";
|
|
do
|
|
maud_http=$(grep -n -vxE '[[:blank:]]*([#].*)?' $fil | grep --color='always' -Ei '(http|https):')
|
|
maud_curl=$(grep -n -vxE '[[:blank:]]*([#].*)?' $fil | grep --color='always' -Ei 'curl')
|
|
maud_torcurl=$(grep -n -vxE '[[:blank:]]*([#].*)?' $fil | grep -A 12 --color='always' -Ei 'tor_curl')
|
|
echo -e "Files:"
|
|
echo -e "${BLUE}${fil}${NC}"
|
|
echo -e ""
|
|
echo -e ""
|
|
echo -e "${PINK}MAD Audit of http lines:${NC} (${GREEN}grep \"http:\" or \"https:\"${NC})"
|
|
echo -e "_________________________________________________________________________"
|
|
echo -e "$maud_http"
|
|
echo -e ""
|
|
echo -e "${PINK}MAD Audit of curl:${NC} (${GREEN}grep \"curl\"${NC})"
|
|
echo -e "_________________________________________________________________________"
|
|
echo -e "$maud_curl"
|
|
echo -e ""
|
|
echo -e "${PINK}MAD Audit of tor_curl (+12 lines after):${NC} (${GREEN}grep \"tor_curl\"${NC})"
|
|
echo -e "_________________________________________________________________________"
|
|
echo -e "$maud_torcurl"
|
|
echo -e ""
|
|
echo -e ""
|
|
done
|
|
else
|
|
cd "$ScriptDir"
|
|
readarray -d $'' arrFiles < <(find . -name "*.sh" -printf '%p\n' | sort -Vk1)
|
|
cd "$WorkDir"
|
|
readarray -d $'' arrFiles2 < <(find . -name "*.sh" -printf '%p\n' | sort -Vk1)
|
|
for fil in "${arrFiles[@]}";
|
|
do
|
|
maud_http=$(grep -n -vxE '[[:blank:]]*([#].*)?' $fil | grep --color='always' -Ei '(http|https):')
|
|
maud_curl=$(grep -n -vxE '[[:blank:]]*([#].*)?' $fil | grep --color='always' -Ei 'curl')
|
|
maud_torcurl=$(grep -n -vxE '[[:blank:]]*([#].*)?' $fil | grep -A 12 --color='always' -Ei 'tor_curl')
|
|
echo -e "Files:"
|
|
echo -e "${BLUE}${fil}${NC}"
|
|
echo -e ""
|
|
echo -e ""
|
|
echo -e "${PINK}MAD Audit of http lines:${NC} (${GREEN}grep \"http:\" or \"https:\"${NC})"
|
|
echo -e "_________________________________________________________________________"
|
|
echo -e "$maud_http"
|
|
echo -e ""
|
|
echo -e "${PINK}MAD Audit of curl:${NC} (${GREEN}grep \"curl \"${NC})"
|
|
echo -e "_________________________________________________________________________"
|
|
echo -e "$maud_curl"
|
|
echo -e ""
|
|
echo -e "${PINK}MAD Audit of tor_curl (+12 lines after):${NC} (${GREEN}grep \"tor_curl\"${NC})"
|
|
echo -e "_________________________________________________________________________"
|
|
echo -e "$maud_torcurl"
|
|
echo -e ""
|
|
done
|
|
for fil in "${arrFiles2[@]}";
|
|
do
|
|
maud_http=$(grep -n -vxE '[[:blank:]]*([#].*)?' $fil | grep --color='always' -Ei '(http|https):')
|
|
maud_curl=$(grep -n -vxE '[[:blank:]]*([#].*)?' $fil | grep --color='always' -Ei 'curl')
|
|
maud_torcurl=$(grep -n -vxE '[[:blank:]]*([#].*)?' $fil | grep -A 12 --color='always' -Ei 'tor_curl')
|
|
echo -e "Files:"
|
|
echo -e "${BLUE}${fil}${NC}"
|
|
echo -e ""
|
|
echo -e ""
|
|
echo -e "${PINK}MAD Audit of http lines:${NC} (${GREEN}grep \"http:\" or \"https:\"${NC})"
|
|
echo -e "_________________________________________________________________________"
|
|
echo -e "$maud_http"
|
|
echo -e ""
|
|
echo -e "${PINK}MAD Audit of curl:${NC} (${GREEN}grep \"curl\"${NC})"
|
|
echo -e "_________________________________________________________________________"
|
|
echo -e "$maud_curl"
|
|
echo -e ""
|
|
echo -e "${PINK}MAD Audit of tor_curl (+12 lines after):${NC} (${GREEN}grep \"tor_curl\"${NC})"
|
|
echo -e "_________________________________________________________________________"
|
|
echo -e "$maud_torcurl"
|
|
echo -e ""
|
|
done
|
|
fi
|
|
}
|
|
madStatus() {
|
|
local InputFile="$1"
|
|
if [ "$arg1" == "status" ] ; then
|
|
clear
|
|
fi
|
|
echo -e "${BLD}"
|
|
echo -e "${PINK}:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:"
|
|
echo -e ":${NC} ${GREEN}MAD${PINK} Status${NC} : Reports status of urls in file${PINK}${BLD} :"
|
|
echo -e ":-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:${NC}\\n"
|
|
if [ ! -f "${InputFile}" ] && [ -f "${WorkDir}/${InputFile}" ]; then
|
|
InputFile="${WorkDir}/${InputFile}"
|
|
fi
|
|
if [ ! -f "${InputFile}" ]; then
|
|
printf "Unable to read file %s! [ms]\\n" "${InputFile}"
|
|
exit 1
|
|
fi
|
|
lineCnt=1
|
|
listTodo=""
|
|
cntTodo=0
|
|
listOk=""
|
|
cntOk=0
|
|
listRetry=""
|
|
cntRetry=0
|
|
listRemoved=""
|
|
cntRemoved=0
|
|
listOther=""
|
|
cntOther=0
|
|
if [ "${AutoCommentOnCompletion}" == "true" ] ; then
|
|
while IFS= read -r line || [[ -n $line ]];
|
|
do
|
|
if ! grep -Eqi '^(http|direct=http|#http|#direct=http)' <<< "${line}" > /dev/null ; then
|
|
lineCnt=$((lineCnt+1))
|
|
continue
|
|
fi
|
|
tline=$line
|
|
if grep -Eqi '^(http|direct=http)' <<< "${line}" > /dev/null ; then
|
|
if grep -Eqi '^direct=' <<< "${line}" > /dev/null ; then
|
|
tline=${tline/direct=/}
|
|
fi
|
|
if grep -Eqi '\|' <<< "${line}" > /dev/null ; then
|
|
listTodo="$listTodo$lineCnt\t${NC}${tline%%\|*}${NC}\\n"
|
|
else
|
|
listTodo="$listTodo$lineCnt\t${NC}${tline//[$'\t\r\n']}${NC}\\n"
|
|
fi
|
|
cntTodo=$((cntTodo+1))
|
|
elif grep -Pqi '^(#http|#direct=http).*(?=.*#OK#)' <<< "${line}" > /dev/null ; then
|
|
if grep -Eqi '^#direct=' <<< "${line}" > /dev/null ; then
|
|
tline=${tline/direct=/}
|
|
fi
|
|
if grep -Eqi '\|' <<< "${tline}" > /dev/null ; then
|
|
tline=${tline#*#}
|
|
listOk="$listOk$lineCnt\t${GREEN}${tline%%\|*}${NC}\\n"
|
|
else
|
|
tline=${tline%%' #OK#'*}
|
|
tline=${tline#*#}
|
|
listOk="$listOk$lineCnt\t${GREEN}${tline//[$'\t\r\n']}${NC}\\n"
|
|
fi
|
|
cntOk=$((cntOk+1))
|
|
elif grep -Pqi '^(#http|#direct=http).*(?=.*#RETRY#)' <<< "${line}" > /dev/null ; then
|
|
if grep -Eqi '^#direct=' <<< "${line}" > /dev/null ; then
|
|
tline=${tline/direct=/}
|
|
fi
|
|
if grep -Eqi '\|' <<< "${line}" > /dev/null ; then
|
|
tline=${tline##*/#}
|
|
listRetry="$listRetry$lineCnt\t${YELLOW}${tline%%\|*}${NC}\\n"
|
|
else
|
|
tline=${tline##*/#}
|
|
tline=${tline%%\ #RETRY#*}
|
|
listRetry="$listRetry$lineCnt\t${YELLOW}${tline//[$'\t\r\n']}${NC}\\n"
|
|
fi
|
|
cntRetry=$((cntRetry+1))
|
|
elif grep -Pqi '^(#http|#direct=http).*(?=.*#REMOVED#)' <<< "${line}" > /dev/null ; then
|
|
if grep -Eqi '^#direct=' <<< "${line}" > /dev/null ; then
|
|
tline=${tline/direct=/}
|
|
fi
|
|
if grep -Eqi '\|' <<< "${line}" > /dev/null ; then
|
|
tline=${tline##*/#}
|
|
listRemoved="$listRemoved$lineCnt\t${RED}${tline%%\|*}${NC}\\n"
|
|
else
|
|
tline=${tline##*/#}
|
|
tline=${tline%%\ #REMOVED#*}
|
|
listRemoved="$listRemoved$lineCnt\t${RED}${tline//[$'\t\r\n']}${NC}\\n"
|
|
fi
|
|
cntRemoved=$((cntRemoved+1))
|
|
elif grep -Pqi '^(#http|#direct=http).*(?=.*#FAIL#)' <<< "${line}" > /dev/null ; then
|
|
if grep -Eqi '^#direct=' <<< "${line}" > /dev/null ; then
|
|
tline=${tline/direct=/}
|
|
fi
|
|
if grep -Eqi '\|' <<< "${line}" > /dev/null ; then
|
|
tline=${tline##*/#}
|
|
listOther="$listOther$lineCnt\t${RED}${tline%%\|*}${NC}\\n"
|
|
else
|
|
tline=${tline##*/#}
|
|
tline=${tline%%\ #BAD-URL#*}
|
|
listOther="$listOther$lineCnt\t${RED}${tline//[$'\t\r\n']}${NC}\\n"
|
|
fi
|
|
cntOther=$((cntOther+1))
|
|
elif grep -Pqi '^(#http|#direct=http).*(?=.*#BAD-URL#)' <<< "${line}" > /dev/null ; then
|
|
if grep -Eqi '^#direct=' <<< "${line}" > /dev/null ; then
|
|
tline=${tline/direct=/}
|
|
fi
|
|
if grep -Eqi '\|' <<< "${line}" > /dev/null ; then
|
|
tline=${tline##*/#}
|
|
listOther="$listOther$lineCnt\t${RED}${tline%%\|*}${NC}\\n"
|
|
else
|
|
tline=${tline##*/#}
|
|
tline=${tline%%\ #BAD-URL#*}
|
|
listOther="$listOther$lineCnt\t${RED}${tline//[$'\t\r\n']}${NC}\\n"
|
|
fi
|
|
cntOther=$((cntOther+1))
|
|
elif grep -Pqi '^(#http|#direct=http).*(?=.*#PASSWORD#)' <<< "${line}" > /dev/null ; then
|
|
if grep -Eqi '^#direct=' <<< "${line}" > /dev/null ; then
|
|
tline=${tline/direct=/}
|
|
fi
|
|
if grep -Eqi '\|' <<< "${line}" > /dev/null ; then
|
|
tline=${tline##*/#}
|
|
listOther="$listOther$lineCnt\t${RED}${tline%%\|*}${NC}\\n"
|
|
else
|
|
tline=${tline##*/#}
|
|
tline=${tline%%\ #PASSWORD#*}
|
|
listOther="$listOther$lineCnt\t${RED}${tline//[$'\t\r\n']}${NC}\\n"
|
|
fi
|
|
cntOther=$((cntOther+1))
|
|
fi
|
|
lineCnt=$((lineCnt+1))
|
|
done < ${InputFile}
|
|
if [ -d "${WorkDir}/downloads" ]; then
|
|
size_complete=$(du -hs --apparent-size "${WorkDir}/downloads" | awk '{print $1}')
|
|
else
|
|
size_complete=""
|
|
fi
|
|
echo -e "${PINK}Line #\tUrl${NC}"
|
|
echo -e "------------------------------------------------"
|
|
echo -e "[Todo] (${BLUE}$cntTodo${NC}) - URLs to process"
|
|
echo -e "$listTodo"
|
|
if [ ! -z $size_complete ]; then
|
|
echo -e "[OK] (${GREEN}$cntOk${NC}) - Downloads completed [size: $size_complete]"
|
|
else
|
|
echo -e "[OK] (${GREEN}$cntOk${NC}) - Downloads completed"
|
|
fi
|
|
echo -e "$listOk"
|
|
echo -e "[Retry] (${YELLOW}$cntRetry${NC}) - Downloads that failed and need a reset and retry"
|
|
echo -e "$listRetry"
|
|
echo -e "[Removed] (${RED}$cntRemoved${NC}) - Downloads that have been removed from the host"
|
|
echo -e "$listRemoved"
|
|
echo -e "[Other] (${RED}$cntOther${NC}) - Downloads with a bad url, require a password, or are unavailable"
|
|
echo -e "$listOther"
|
|
else
|
|
while IFS= read -r line || [[ -n $line ]];
|
|
do
|
|
if ! grep -Eqi '^(http|direct=http|#http|#direct=http)' <<< "${line}" > /dev/null ; then
|
|
continue
|
|
fi
|
|
tline=$line
|
|
if grep -q -e '^(http|direct=http)' <<< "${line}" > /dev/null ; then
|
|
if grep -q -e '^direct=' <<< "${line}" > /dev/null ; then
|
|
tline=${tline/direct=/}
|
|
fi
|
|
if grep -q -e '\|' <<< "${line}" > /dev/null ; then
|
|
listTodo="$listTodo$lineCnt\t${NC}${tline%%\|*}${NC}\\n"
|
|
else
|
|
listTodo="$listTodo$lineCnt\t${NC}${tline//[$'\t\r\n']}${NC}\\n"
|
|
fi
|
|
cntTodo=$((cntTodo+1))
|
|
elif grep -q -P '^(#http|#direct=http)' <<< "${line}" > /dev/null ; then
|
|
if grep -q -e '^#direct=' <<< "${line}" > /dev/null ; then
|
|
tline=${tline/direct=/}
|
|
fi
|
|
if grep -q -e '\|' <<< "${line}" > /dev/null ; then
|
|
tline=${tline#*#}
|
|
listOk="$listOk$lineCnt\t${GREEN}${tline%%\|*}${NC}\\n"
|
|
else
|
|
tline=${tline%%' #OK#'*}
|
|
tline=${tline#*#}
|
|
listOk="$listOk$lineCnt\t${GREEN}${tline//[$'\t\r\n']}${NC}\\n"
|
|
fi
|
|
cntOk=$((cntOk+1))
|
|
fi
|
|
done < ${InputFile}
|
|
if [ -d "${WorkDir}/downloads" ]; then
|
|
size_complete=$(du -hs --apparent-size "${WorkDir}/downloads" | awk '{print $1}')
|
|
else
|
|
size_complete=""
|
|
fi
|
|
echo -e "${PINK}Line #\tUrl${NC}"
|
|
echo -e "------------------------------------------------"
|
|
echo -e "[Todo] ($cntTodo${NC}) - URLs to process"
|
|
echo -e "$listTodo"
|
|
if [ ! -z $size_complete ]; then
|
|
echo -e "[OK] (${GREEN}$cntOk${NC}) - URLs completed (commented out) [size: $size_complete]"
|
|
else
|
|
echo -e "[OK] (${GREEN}$cntOk${NC}) - URLs completed (commented out)"
|
|
fi
|
|
echo -e "$listOk"
|
|
fi
|
|
exit 0
|
|
}
|
|
madStatusUploads() {
|
|
local InputFile="$1"
|
|
if [ "$arg1" == "status" ] ; then
|
|
clear
|
|
fi
|
|
echo -e "${BLD}"
|
|
echo -e "${PINK}:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:"
|
|
echo -e ":${NC} ${GREEN}MAD${PINK} Status${NC} : Report status of uploads in file${PINK}${BLD} :"
|
|
echo -e ":-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:${NC}\\n"
|
|
if [ ! -f "${InputFile}" ] && [ -f "${WorkDir}/${InputFile}" ]; then
|
|
InputFile="${WorkDir}/${InputFile}"
|
|
fi
|
|
if [ ! -f "${InputFile}" ]; then
|
|
printf "Unable to read file %s! [ms]\\n" "${InputFile}"
|
|
exit 1
|
|
fi
|
|
lineCnt=1
|
|
listTodo=""
|
|
cntTodo=0
|
|
listOk=""
|
|
cntOk=0
|
|
listRetry=""
|
|
cntRetry=0
|
|
listOther=""
|
|
cntOther=0
|
|
if [ "${AutoCommentOnCompletion}" == "true" ] ; then
|
|
while IFS= read -r line || [[ -n $line ]];
|
|
do
|
|
if grep -Eqi '^$|^ ' <<< "${line}" > /dev/null || grep -Eqvi '\|' <<< "${line}" > /dev/null; then
|
|
lineCnt=$((lineCnt+1))
|
|
continue
|
|
fi
|
|
tline="$line"
|
|
if grep -Eqi '^[^#]' <<< "${line}" > /dev/null && grep -Eqi '\|' <<< "${line}" > /dev/null ; then
|
|
tline="${tline//|/ [}""]"
|
|
listTodo="$listTodo$lineCnt\t${NC}${tline//[$'\t\r\n']}${NC}\\n"
|
|
cntTodo=$((cntTodo+1))
|
|
elif grep -Pqi '^#.*(?=#OK#)' <<< "${line}" > /dev/null ; then
|
|
tline="${tline#*\#}"
|
|
tline="${tline//|/ [}"
|
|
tline="${tline// #OK#/]}"
|
|
listOk="$listOk$lineCnt\t${NC}${GREEN}${tline//[$'\t\r\n']}${NC}\\n"
|
|
cntOk=$((cntOk+1))
|
|
elif grep -Pqi '^#.*(?=#RETRY#)' <<< "${line}" > /dev/null ; then
|
|
tline="${tline#*\#}"
|
|
tline="${tline//|/ [}"
|
|
tline="${tline// #RETRY#/]}"
|
|
listRetry="$listRetry$lineCnt\t${NC}${YELLOW}${tline//[$'\t\r\n']}${NC}\\n"
|
|
cntRetry=$((cntRetry+1))
|
|
elif grep -Pqi '^#.*(?=#FAIL#)' <<< "${line}" > /dev/null ; then
|
|
tline="${tline#*\#}"
|
|
tline="${tline//|/ [}"
|
|
tline="${tline// #FAIL#/]}"
|
|
listOther="$listOther$lineCnt\t${RED}${tline//[$'\t\r\n']}${NC}\\n"
|
|
cntOther=$((cntOther+1))
|
|
elif grep -Pqi '^#.*(?=#BAD-LINE#)' <<< "${line}" > /dev/null ; then
|
|
tline="${tline#*\#}"
|
|
tline="${tline//|/ [}"
|
|
tline="${tline// #BAD-LINE#/]}"
|
|
listOther="$listOther$lineCnt\t${RED}${tline//[$'\t\r\n']}${NC}\\n"
|
|
cntOther=$((cntOther+1))
|
|
fi
|
|
lineCnt=$((lineCnt+1))
|
|
done < ${InputFile}
|
|
echo -e "${PINK}Line #\tFilename\tHostCode${NC}"
|
|
echo -e "------------------------------------------------"
|
|
echo -e "[Todo] (${BLUE}$cntTodo${NC}) - Uploads to process"
|
|
echo -e "$listTodo"
|
|
echo -e "[OK] (${GREEN}$cntOk${NC}) - Uploads completed"
|
|
echo -e "$listOk"
|
|
echo -e "[Retry] (${YELLOW}$cntRetry${NC}) - Uploads that failed and need a reset and retry"
|
|
echo -e "$listRetry"
|
|
echo -e "[Fail] (${RED}$cntOther${NC}) - Uploads that failed are have a bad line in file"
|
|
echo -e "$listOther"
|
|
else
|
|
while IFS= read -r line || [[ -n $line ]];
|
|
do
|
|
if grep -Eqi '^$|^ ' <<< "${line}" > /dev/null || grep -Eqvi '\|' <<< "${line}" > /dev/null; then
|
|
lineCnt=$((lineCnt+1))
|
|
continue
|
|
fi
|
|
tline=$line
|
|
if grep -Eqi '^[^#]' <<< "${line}" > /dev/null && grep -Eqi '\|' <<< "${line}" > /dev/null ; then
|
|
listTodo="$listTodo$lineCnt\t${NC}${tline//[$'\t\r\n']}${NC}\\n"
|
|
cntTodo=$((cntTodo+1))
|
|
elif grep -Pqi '^#.*(?=\|)' <<< "${line}" > /dev/null ; then
|
|
listOk="$listOk$lineCnt\t${GREEN}${tline//[$'\t\r\n']}${NC}\\n"
|
|
cntOk=$((cntOk+1))
|
|
fi
|
|
done < ${InputFile}
|
|
echo -e "${PINK}Line #\tFilename\tHostCode${NC}"
|
|
echo -e "------------------------------------------------"
|
|
echo -e "[Todo] ($cntTodo${NC}) - Uploads to process"
|
|
echo -e "$listTodo"
|
|
echo -e "[OK] (${GREEN}$cntOk${NC}) - Uploads completed (commented out)"
|
|
echo -e "$listOk"
|
|
fi
|
|
exit 0
|
|
}
|
|
madHostDetails() {
|
|
clear
|
|
echo -e "${BLD}"
|
|
echo -e "${PINK}:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:"
|
|
echo -e ":${NC} ${GREEN}MAD${PINK} Host Details${NC} :: Available hosts & functions${PINK}${BLD} :"
|
|
echo -e ":-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:${NC}\\n"
|
|
echo -e ""
|
|
echo -e "${YELLOW}Download Hosts:${NC}"
|
|
echo -e "----------------------------------------------------------------${NC}"
|
|
echo -e ""
|
|
for fil in "${ScriptDir}"/hosts/*.sh ;
|
|
do
|
|
if [ -f "$fil" ]; then
|
|
tfilename="${fil##*/}"
|
|
if [[ "$tfilename" == "up_"* ]] ; then
|
|
continue
|
|
fi
|
|
local _hostcode=$(grep -oPi -m 1 '(?<=^HostCode='"'"').*?(?='"'"')' "$fil")
|
|
local _hostnick=$(grep -oPi -m 1 '(?<=^HostNick='"'"').*?(?='"'"')' "$fil")
|
|
local _hostfuncprefix=$(grep -oPi -m 1 '(?<=^HostFuncPrefix='"'"').*?(?='"'"')' "$fil")
|
|
local _hosturls=$(grep -oPi -m 1 '(?<=^HostUrls='"'"').*?(?='"'"')' "$fil")
|
|
local _hostdomainregex=$(grep -oPi -m 1 '(?<=^HostDomainRegex='"'"').*?(?='"'"')' "$fil")
|
|
local _currHostsFuncsUsed=$(grep '()' "${fil}" | awk '!/#/ {print $1}')
|
|
local chfuCleaned=${_currHostsFuncsUsed//$'\n'/, }
|
|
echo -e "[${YELLOW}$_hostcode${NC}] ${GREEN}$_hostnick${NC}"
|
|
echo -e "Module: ./hosts/${BLUE}${tfilename}${NC}"
|
|
echo -e "Urls: ${NC}$_hosturls${NC}"
|
|
echo -e "Regex: ${RED}${_hostdomainregex}${NC}"
|
|
echo -e "Functions: ${PINK}$chfuCleaned${NC}"
|
|
echo -e "Entrypoint: ${BLUE}${_hostfuncprefix}_DownloadFile()${NC}"
|
|
echo -e ""
|
|
fi
|
|
done
|
|
echo -e ""
|
|
echo -e ""
|
|
echo -e "${YELLOW}Upload Hosts:${NC}"
|
|
echo -e "----------------------------------------------------------------${NC}"
|
|
echo -e ""
|
|
for fil in "${ScriptDir}"/hosts/up_*.sh ;
|
|
do
|
|
if [ -f "$fil" ]; then
|
|
tfilename="${fil##*/}"
|
|
local _hostcode=$(grep -oPi -m 1 '(?<=^HostCode='"'"').*?(?='"'"')' "$fil")
|
|
local _hostnick=$(grep -oPi -m 1 '(?<=^HostNick='"'"').*?(?='"'"')' "$fil")
|
|
local _hostfuncprefix=$(grep -oPi -m 1 '(?<=^HostFuncPrefix='"'"').*?(?='"'"')' "$fil")
|
|
local _info=$(grep -oPi -m 1 '(?<=^#! Info: ).*$' "$fil")
|
|
local _msize=$(grep -oPi -m 1 '(?<=^#! MaxSize: ).*$' "$fil")
|
|
local _expire=$(grep -oPi -m 1 '(?<=^#! Expire: ).*$' "$fil")
|
|
local _currHostsFuncsUsed=$(grep '()' "${fil}" | awk '!/#/ {print $1}')
|
|
local chfuCleaned=${_currHostsFuncsUsed//$'\n'/, }
|
|
echo -e "[${YELLOW}$_hostcode${NC}] ${GREEN}$_hostnick${NC}"
|
|
echo -e "Module: ./hosts/${BLUE}${tfilename}${NC}"
|
|
echo -e "Info: ${NC}$_info${NC}"
|
|
echo -e "MaxSize: ${BLUE}${_msize}${NC}"
|
|
echo -e "Expire: ${RED}${_expire}${NC}"
|
|
echo -e "Functions: ${PINK}$chfuCleaned${NC}"
|
|
echo -e "Entrypoint: ${BLUE}${_hostfuncprefix}_UploadFile()${NC}"
|
|
echo -e ""
|
|
fi
|
|
done
|
|
}
|
|
madPluginDetails() {
|
|
clear
|
|
echo -e "${BLD}"
|
|
echo -e "${PINK}:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:"
|
|
echo -e ":${NC} ${GREEN}MAD${PINK} Plugin Details${NC} :: Available plugins & functions${PINK}${BLD} :"
|
|
echo -e ":-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:${NC}\\n"
|
|
for fil in "${ScriptDir}"/plugins/*.sh ;
|
|
do
|
|
if [ -f "$fil" ]; then
|
|
tfilename="${fil##*/}"
|
|
local _description=$(grep -oPzi '(?<=Desc: ).*\n.*(?=#.*Usage:)' "$fil" | tr '\0' ' ')
|
|
local _currPluginFuncsUsed=$(grep '()' "${fil}" | awk '!/#/ {print $1}')
|
|
local chfuCleaned=${_currPluginFuncsUsed//$'\n'/, }
|
|
echo -e "Plugin: ${YELLOW}${tfilename}${NC}${NC}"
|
|
echo -e "Functions: ${PINK}$chfuCleaned${NC}"
|
|
if [[ ! -z $_description ]]; then
|
|
echo -e "Desc: ${BLUE}${_description}${NC}"
|
|
else
|
|
echo -e ""
|
|
fi
|
|
fi
|
|
done
|
|
}
|
|
clipboard_monitor() {
|
|
InputFile="$1"
|
|
if [ ! -f "${InputFile}" ] && [ -f "${WorkDir}/${InputFile}" ]; then
|
|
InputFile="${WorkDir}/${InputFile}"
|
|
fi
|
|
if [ ! -f "${InputFile}" ]; then
|
|
touch "${InputFile}"
|
|
fi
|
|
if ! hash xclip 2>/dev/null; then
|
|
echo -e "${YELLOW}xclip is required for clipmon. Use \"sudo apt install xclip\" to install.${NC}"
|
|
exit 1
|
|
fi
|
|
echo -e "${BLD}"
|
|
echo -e "${GREEN}MAD${PINK} Clipboard Monitor: ${GREEN}Enabled${NC}"
|
|
echo -e "| Capture File: ${BLUE}${InputFile}${NC}"
|
|
echo -e "| Log file: ${WorkDir}/.temp/_clipmonlog.txt${NC}"
|
|
echo -e
|
|
clipold=$(xclip -o 2>/dev/null)
|
|
exitcode=$?
|
|
if ! [ $exitcode -eq 0 ]; then
|
|
echo "" | xclip
|
|
clipold=$(xclip -o 2>/dev/null)
|
|
fi
|
|
while true; do
|
|
clipnew=$(xclip -o 2>/dev/null)
|
|
if ! [ "$clipold" == "$clipnew" ]; then
|
|
mkdir -p "${WorkDir}/.temp"
|
|
echo "$clipnew" >> "${WorkDir}/.temp/_clipmonlog.txt"
|
|
clipold="$clipnew"
|
|
sed 's/^[[:space:]]*// ; s/[[:space:]]*$//' <<< "${clipnew}" |
|
|
while IFS= read -r line
|
|
do
|
|
line="${line//[$'\t\r\n']}"
|
|
isHostMatchFound=false
|
|
_hostcode=""
|
|
_hostnick=""
|
|
_hostfuncprefix=""
|
|
_hostdomainregex=""
|
|
readarray -d '@' -t arrHostAndDomainRegexes <<< "${ListHostAndDomainRegexes}"
|
|
for hline in "${arrHostAndDomainRegexes[@]}";
|
|
do
|
|
if [[ "$hline" == "" ]] || [[ ! "$hline" == *":"* ]]; then
|
|
continue
|
|
fi
|
|
chnk1="${hline%%:*}"
|
|
_hostcode=$(echo $chnk1|cut -f2 -d '/')
|
|
_hostnick=$(echo $chnk1|cut -f3 -d '/')
|
|
_hostfuncprefix=$(echo $chnk1|cut -f4 -d '/')
|
|
_hostdomainregex="${hline#*\:}"
|
|
if [[ "$line" =~ $_hostdomainregex ]]; then
|
|
isHostMatchFound=true
|
|
break
|
|
fi
|
|
done
|
|
if [ "$isHostMatchFound" == "true" ]; then
|
|
download_url=$(urlencode_literal_grouped_case_urlendingonly "$line")
|
|
echo -e "${GREEN}$_hostnick url found:${NC} $line${NC}"
|
|
echo -e "$line" >> ${InputFile}
|
|
fi
|
|
done
|
|
fi
|
|
sleep 1s
|
|
done
|
|
}
|
|
lstOnLoad=""
|
|
lstBeginProcessing=""
|
|
lstPreProcessUrl=""
|
|
lstPostSuccessfulDownload=""
|
|
lstPostFailedDownload=""
|
|
lstPostFailRetryDownload=""
|
|
lstDoneProcessingAllUrls=""
|
|
lstPostSuccessfulUpload=""
|
|
lstPostFailedUpload=""
|
|
lstPostFailRetryUpload=""
|
|
lstDoneProcessingAllUploads=""
|
|
OnLoad() {
|
|
local pScriptSourceDir="$1"
|
|
local pScriptArguments="$2"
|
|
if [[ -z "$lstOnLoad" ]]; then
|
|
return 0
|
|
fi
|
|
isHookMatch=false
|
|
hookReturnCode=0
|
|
readarray -d '@' -t arrHookList <<< "${lstOnLoad}"
|
|
for hookFunc in "${arrHookList[@]}";
|
|
do
|
|
if [[ "$hookFunc" == "" ]]; then
|
|
continue
|
|
fi
|
|
$hookFunc "$pScriptSourceDir" "$pScriptArguments"
|
|
ret=$?
|
|
if ((ret != 0)) && ((ret >hookReturnCode)); then
|
|
hookReturnCode=$ret
|
|
fi
|
|
done
|
|
return $hookReturnCode
|
|
}
|
|
BeginProcessing() {
|
|
local pInputFile="$1"
|
|
if [[ -z "$lstBeginProcessing" ]]; then
|
|
return 0
|
|
fi
|
|
isHookMatch=false
|
|
hookReturnCode=0
|
|
readarray -d '@' -t arrHookList <<< "${lstBeginProcessing}"
|
|
for hookFunc in "${arrHookList[@]}";
|
|
do
|
|
if [[ "$hookFunc" == "" ]]; then
|
|
continue
|
|
fi
|
|
$hookFunc "$pInputFile"
|
|
ret=$?
|
|
if ((ret != 0)) && ((ret >hookReturnCode)); then
|
|
hookReturnCode=$ret
|
|
fi
|
|
done
|
|
return $hookReturnCode
|
|
}
|
|
PreProcessUrl() {
|
|
local pFullLine="$1"
|
|
local pUrlOnly="${1%%\ *}"
|
|
if grep -Eqi '\|' <<< "${pUrlOnly}" ; then
|
|
pUrlOnly="${pUrlOnly%%\|*}"
|
|
fi
|
|
if grep -Eqi '^direct=' <<< "${pUrlOnly}" ; then
|
|
pUrlOnly="${pUrlOnly%%\=*}"
|
|
fi
|
|
if [[ -z "$lstPreProcessUrl" ]]; then
|
|
return 0
|
|
fi
|
|
isHookMatch=false
|
|
hookReturnCode=0
|
|
readarray -d '@' -t arrHookList <<< "${lstPreProcessUrl}"
|
|
for hookFunc in "${arrHookList[@]}";
|
|
do
|
|
if [[ "$hookFunc" == "" ]]; then
|
|
continue
|
|
fi
|
|
$hookFunc "$pFullLine" "$pUrlOnly"
|
|
ret=$?
|
|
if ((ret != 0)) && ((ret >hookReturnCode)); then
|
|
hookReturnCode=$ret
|
|
fi
|
|
done
|
|
return $hookReturnCode
|
|
}
|
|
PostSuccessfulDownload() {
|
|
local pRemoteUrl="$1"
|
|
local pFilePath="$2"
|
|
local pFileName="$3"
|
|
local pFolderName="$4"
|
|
local pFileSize="$5"
|
|
isHookMatch=false
|
|
hookReturnCode=0
|
|
if [[ -z "$lstPostSuccessfulDownload" ]]; then
|
|
return 0
|
|
fi
|
|
readarray -d '@' -t arrHookList <<< "${lstPostSuccessfulDownload}"
|
|
for hookFunc in "${arrHookList[@]}";
|
|
do
|
|
if [[ -z "$hookFunc" ]]; then
|
|
continue
|
|
fi
|
|
$hookFunc "$pRemoteUrl" "$pFilePath" "$pFileName" "$pFolderName" "$pFileSize"
|
|
ret=$?
|
|
if ((ret != 0)) && ((ret >hookReturnCode)); then
|
|
hookReturnCode=$ret
|
|
fi
|
|
done
|
|
return $hookReturnCode
|
|
}
|
|
PostFailedDownload() {
|
|
local pRemoteUrl="$1"
|
|
local pMessage1="$2"
|
|
local pMessage2="$3"
|
|
isHookMatch=false
|
|
hookReturnCode=0
|
|
if [[ -z "$lstPostFailedDownload" ]]; then
|
|
return 0
|
|
fi
|
|
readarray -d '@' -t arrHookList <<< "${lstPostFailedDownload}"
|
|
for hookFunc in "${arrHookList[@]}";
|
|
do
|
|
if [[ -z "$hookFunc" ]]; then
|
|
continue
|
|
fi
|
|
$hookFunc "$pRemoteUrl" "$pMessage1" "$pMessage2"
|
|
ret=$?
|
|
if ((ret != 0)) && ((ret >hookReturnCode)); then
|
|
hookReturnCode=$ret
|
|
fi
|
|
done
|
|
return $hookReturnCode
|
|
}
|
|
PostFailRetryDownload() {
|
|
local pRemoteUrl="$1"
|
|
local pFilePath="$2"
|
|
local pFileName="$3"
|
|
local pFolderName="$4"
|
|
isHookMatch=false
|
|
hookReturnCode=0
|
|
if [[ -z "$lstPostFailRetryDownload" ]]; then
|
|
return 0
|
|
fi
|
|
readarray -d '@' -t arrHookList <<< "${lstPostFailRetryDownload}"
|
|
for hookFunc in "${arrHookList[@]}";
|
|
do
|
|
if [[ -z "$hookFunc" ]]; then
|
|
continue
|
|
fi
|
|
$hookFunc "$pRemoteUrl" "$pFilePath" "$pFileName" "$pFolderName"
|
|
ret=$?
|
|
if ((ret != 0)) && ((ret >hookReturnCode)); then
|
|
hookReturnCode=$ret
|
|
fi
|
|
done
|
|
return $hookReturnCode
|
|
}
|
|
DoneProcessingAllUrls() {
|
|
local pInputFile="$1"
|
|
isHookMatch=false
|
|
hookReturnCode=0
|
|
if [[ -z "$lstDoneProcessingAllUrls" ]]; then
|
|
return 0
|
|
fi
|
|
readarray -d '@' -t arrHookList <<< "${lstDoneProcessingAllUrls}"
|
|
for hookFunc in "${arrHookList[@]}";
|
|
do
|
|
if [[ -z "$hookFunc" ]]; then
|
|
continue
|
|
fi
|
|
$hookFunc "$pInputFile"
|
|
ret=$?
|
|
if ((ret != 0)) && ((ret >hookReturnCode)); then
|
|
hookReturnCode=$ret
|
|
fi
|
|
done
|
|
return $hookReturnCode
|
|
}
|
|
PostSuccessfulUpload() {
|
|
local pFilePath="$1"
|
|
local pHostCode="$2"
|
|
local pFileName="$3"
|
|
local pFileSize="$4"
|
|
local pDownloadLink="$5"
|
|
isHookMatch=false
|
|
hookReturnCode=0
|
|
if [[ -z "$lstPostSuccessfulUpload" ]]; then
|
|
return 0
|
|
fi
|
|
readarray -d '@' -t arrHookList <<< "${lstPostSuccessfulUpload}"
|
|
for hookFunc in "${arrHookList[@]}";
|
|
do
|
|
if [[ -z "$hookFunc" ]]; then
|
|
continue
|
|
fi
|
|
$hookFunc "$pFilePath" "$pHostCode" "$pFileName" "$pFileSize" "$pDownloadLink"
|
|
ret=$?
|
|
if ((ret != 0)) && ((ret >hookReturnCode)); then
|
|
hookReturnCode=$ret
|
|
fi
|
|
done
|
|
return $hookReturnCode
|
|
}
|
|
PostFailedUpload() {
|
|
local pFilePath="$1"
|
|
local pHostCode="$2"
|
|
local pMessage1="$3"
|
|
local pMessage2="$4"
|
|
isHookMatch=false
|
|
hookReturnCode=0
|
|
if [[ -z "$lstPostFailedUpload" ]]; then
|
|
return 0
|
|
fi
|
|
readarray -d '@' -t arrHookList <<< "${lstPostFailedUpload}"
|
|
for hookFunc in "${arrHookList[@]}";
|
|
do
|
|
if [[ -z "$hookFunc" ]]; then
|
|
continue
|
|
fi
|
|
$hookFunc "$pFilePath" "$pHostCode" "$pMessage1" "$pMessage2"
|
|
ret=$?
|
|
if ((ret != 0)) && ((ret >hookReturnCode)); then
|
|
hookReturnCode=$ret
|
|
fi
|
|
done
|
|
return $hookReturnCode
|
|
}
|
|
PostFailRetryUpload() {
|
|
local pFilePath="$1"
|
|
local pHostCode="$2"
|
|
local pMessage1="$3"
|
|
local pMessage2="$4"
|
|
isHookMatch=false
|
|
hookReturnCode=0
|
|
if [[ -z "$lstPostFailRetryUpload" ]]; then
|
|
return 0
|
|
fi
|
|
readarray -d '@' -t arrHookList <<< "${lstPostFailRetryUpload}"
|
|
for hookFunc in "${arrHookList[@]}";
|
|
do
|
|
if [[ -z "$hookFunc" ]]; then
|
|
continue
|
|
fi
|
|
$hookFunc "$pFilePath" "$pHostCode" "$pMessage1" "$pMessage2"
|
|
ret=$?
|
|
if ((ret != 0)) && ((ret >hookReturnCode)); then
|
|
hookReturnCode=$ret
|
|
fi
|
|
done
|
|
return $hookReturnCode
|
|
}
|
|
DoneProcessingAllUploads() {
|
|
local pInputFile="$1"
|
|
isHookMatch=false
|
|
hookReturnCode=0
|
|
if [[ -z "$lstDoneProcessingAllUploads" ]]; then
|
|
return 0
|
|
fi
|
|
readarray -d '@' -t arrHookList <<< "${lstDoneProcessingAllUploads}"
|
|
for hookFunc in "${arrHookList[@]}";
|
|
do
|
|
if [[ -z "$hookFunc" ]]; then
|
|
continue
|
|
fi
|
|
$hookFunc "$pInputFile"
|
|
ret=$?
|
|
if ((ret != 0)) && ((ret >hookReturnCode)); then
|
|
hookReturnCode=$ret
|
|
fi
|
|
done
|
|
return $hookReturnCode
|
|
}
|
|
CheckDownloadExists () {
|
|
local cde_remote_url=$1
|
|
local cde_MoveToFolder=$2
|
|
local cde_filecnt=$3
|
|
local cde_filename=$4
|
|
local cde_file_path=$5
|
|
local cde_completed_location=$6
|
|
local cde_flockDownload="${WorkDir}/.flocks/${cde_filename//[^a-zA-Z0-9\.\_\-]/}.flock"
|
|
if [[ -e "$cde_flockDownload" ]]; then
|
|
fContents=$(cat $cde_flockDownload)
|
|
fContents=${fContents//[$'\t\r\n']}
|
|
if [ "$AutoRenameDuplicateFilenames" == "true" ]; then
|
|
if [ "$fContents" == "${cde_remote_url//[^a-zA-Z0-9]/}" ]; then
|
|
echo -e "${YELLOW}[SKIP]${NC} Lock exists for filename. Is it downloading in another terminal?${NC}"
|
|
echo -e "${YELLOW}File: ${NC}${cde_filename}"
|
|
echo -e "${YELLOW}Lock: ${NC}./.flocks/${cde_filename//[^a-zA-Z0-9\.\_\-]/}.flock"
|
|
echo -e "URL: ${cde_remote_url}"
|
|
printf "%s\\n" "================================================================================"
|
|
fileAlreadyDone=true
|
|
return 0
|
|
else
|
|
echo -e "${YELLOW}[RENAME]${NC} Lock exists for filename. Renaming download filename.${NC}"
|
|
echo -e "${YELLOW}File: ${NC}${cde_filename}"
|
|
echo -e "${YELLOW}Lock: ${NC}./.flocks/${cde_filename//[^a-zA-Z0-9\.\_\-]/}.flock"
|
|
echo -e "URL: ${cde_remote_url}"
|
|
printf "%s\\n" "================================================================================"
|
|
renameDuplicateDownload "${cde_remote_url}" "${cde_filename}"
|
|
fileAlreadyDone=true
|
|
return 0
|
|
fi
|
|
else
|
|
echo -e "${YELLOW}[SKIP]${NC} Lock exists for filename. Is it downloading in another terminal?${NC}"
|
|
echo -e "${YELLOW}File: ${NC}${cde_filename}"
|
|
echo -e "${YELLOW}Lock: ${NC}./.flocks/${cde_filename//[^a-zA-Z0-9\.\_\-]/}.flock"
|
|
echo -e "URL: ${cde_remote_url}"
|
|
printf "%s\\n" "================================================================================"
|
|
if [ ! "$fContents" == "${cde_remote_url//[^a-zA-Z0-9]/}" ]; then
|
|
skipUrlDownload "${cde_remote_url}" "${cde_filename}" "./.flocks/${cde_filename//[^a-zA-Z0-9\.\_\-]/}.flock" "$cde_flockDownload"
|
|
fi
|
|
fileAlreadyDone=true
|
|
return 0
|
|
fi
|
|
fi
|
|
if [[ -e "${cde_file_path}" ]]; then
|
|
existing_file_size=$(stat --format="%s" "${cde_file_path}" | tr -d '[:space:]')
|
|
if [[ "${existing_file_size}" -eq "${file_size_bytes}" ]]; then
|
|
echo -e "${GREEN}File exists in downloads and the size matches the expected size.\nNo need to re-download.${NC}"
|
|
mkdir -p "${WorkDir}/downloads"
|
|
if [ ! "${cde_MoveToFolder}" == "" ] ; then
|
|
if [ ! -d "${cde_completed_location}${cde_MoveToFolder}" ]; then
|
|
mkdir -p "${cde_completed_location}${cde_MoveToFolder}"
|
|
fi
|
|
mv "${cde_file_path}" "${cde_completed_location}${cde_MoveToFolder}/"
|
|
printf "\\n%s\\n" "================================================================================"
|
|
echo -e "${RED}❤${GREEN}[OK] Active download exists and is complete.${NC}"
|
|
echo -e "${GREEN}File: ${NC}#${cde_filecnt}, ${cde_filename}"
|
|
echo -e "URL: ${cde_remote_url}"
|
|
echo -e "Location: \"./downloads/${cde_MoveToFolder}/${cde_filename}\""
|
|
printf "%s\\n" "================================================================================"
|
|
successDownloadExists "${cde_remote_url}" "${cde_filename}" "${cde_MoveToFolder}" "${existing_file_size}" "${cde_completed_location}${cde_MoveToFolder}/${cde_filename}"
|
|
fileAlreadyDone=true
|
|
return 0
|
|
else
|
|
mv "${cde_file_path}" "${cde_completed_location}/"
|
|
printf "\\n%s\\n" "================================================================================"
|
|
echo -e "${RED}❤${GREEN}[OK] Active download exists and is complete.${NC}"
|
|
echo -e "${GREEN}File: ${NC}#${cde_filecnt}, ${cde_filename}"
|
|
echo -e "URL: ${cde_remote_url}"
|
|
echo -e "Location: \"./downloads/${cde_filename}\""
|
|
printf "%s\\n" "================================================================================"
|
|
successDownloadExists "${cde_remote_url}" "${cde_filename}" "./" "${existing_file_size}" "${cde_completed_location}${cde_filename}"
|
|
fileAlreadyDone=true
|
|
return 0
|
|
fi
|
|
return 0
|
|
elif [ ! -z "$file_size_bytes" ] && [[ "${existing_file_size}" -gt "${file_size_bytes}" ]]; then
|
|
echo -e "${RED}ERROR: File exists in downloads folder but is larger than expected.${NC}\nThis could be due to several instances saving the same file, an old download using the same name, or host experiencing a temporary issue."
|
|
fileExistsButSizeTooLargeDownload "${cde_remote_url}" "${cde_filename}" "${existing_file_size}" "${file_size_bytes}"
|
|
exitDownloadError=true
|
|
return 0
|
|
fi
|
|
fi
|
|
if [ ! "${cde_MoveToFolder}" == "" ] ; then
|
|
if [[ -e "${cde_completed_location}${cde_MoveToFolder}/${cde_filename}" ]] ; then
|
|
if [ "$AutoRenameDuplicateFilenames" == "true" ]; then
|
|
printf "\\n%s\\n" "================================================================================"
|
|
echo -e "${RED}❤${GREEN}[RENAME] Download exists and is complete. Renaming download filename.${NC}"
|
|
echo -e "${GREEN}File: ${NC}#${cde_filecnt}, ${cde_filename}"
|
|
echo -e "URL: ${cde_remote_url}"
|
|
echo -e "Location: \"./downloads/${cde_filename}\""
|
|
printf "%s\\n" "================================================================================"
|
|
renameDuplicateDownload "${cde_remote_url}" "${cde_filename}"
|
|
fileAlreadyDone=true
|
|
return 0
|
|
else
|
|
printf "\\n%s\\n" "================================================================================"
|
|
echo -e "${RED}❤${GREEN}[OK] Download exists and is complete.${NC}"
|
|
echo -e "${GREEN}File: ${NC}#${cde_filecnt}, ${cde_filename}"
|
|
echo -e "URL: ${cde_remote_url}"
|
|
echo -e "Location: \"./downloads/${cde_MoveToFolder}/${cde_filename}\""
|
|
printf "%s\\n" "================================================================================"
|
|
successDownloadExists "${cde_remote_url}" "${cde_filename}" "${cde_MoveToFolder}" "${existing_file_size}" "${cde_completed_location}${cde_MoveToFolder}/${cde_filename}"
|
|
fileAlreadyDone=true
|
|
return 0
|
|
fi
|
|
fi
|
|
else
|
|
if [[ -e "${cde_completed_location}${cde_filename}" ]] ; then
|
|
if [ "$AutoRenameDuplicateFilenames" == "true" ]; then
|
|
printf "\\n%s\\n" "================================================================================"
|
|
echo -e "${RED}❤${GREEN}[RENAME] Download exists and is complete. Renaming download filename.${NC}"
|
|
echo -e "${GREEN}File: ${NC}#${cde_filecnt}, ${cde_filename}"
|
|
echo -e "URL: ${cde_remote_url}"
|
|
echo -e "Location: \"./downloads/${cde_filename}\""
|
|
printf "%s\\n" "================================================================================"
|
|
renameDuplicateDownload "${cde_remote_url}" "${cde_filename}"
|
|
fileAlreadyDone=true
|
|
return 0
|
|
else
|
|
printf "\\n%s\\n" "================================================================================"
|
|
echo -e "${RED}❤${GREEN}[OK] Download exists and is complete.${NC}"
|
|
echo -e "${GREEN}File: ${NC}#${cde_filecnt}, ${cde_filename}"
|
|
echo -e "URL: ${cde_remote_url}"
|
|
echo -e "Location: \"./downloads/${cde_filename}\""
|
|
printf "%s\\n" "================================================================================"
|
|
successDownloadExists "${cde_remote_url}" "${cde_filename}" "./" "${existing_file_size}" "${cde_completed_location}${cde_filename}"
|
|
fileAlreadyDone=true
|
|
return 0
|
|
fi
|
|
fi
|
|
fi
|
|
return 1
|
|
}
|
|
CheckNoHtml() {
|
|
local cde_remote_url=$1
|
|
local cde_filename=$2
|
|
local cde_file_path=$3
|
|
if [ -f "${cde_file_path}" ] ; then
|
|
badHtml=$(grep -aoPi '^.*(</HTML>|</script>|content-type|:error|not found|too many connections).*$' "${cde_file_path}")
|
|
if [ "$badHtml" == "" ]; then
|
|
return 0
|
|
else
|
|
if [ "${DebugAllEnabled}" == "true" ] ; then
|
|
cp "$cde_file_path" "${WorkDir}/.debug/$cde_filename.htmldebug.txt"
|
|
fi
|
|
if [ "${DebugAllEnabled}" == "true" ] ; then
|
|
echo -e ""
|
|
printf "${YELLOW}HTML found:${NC} Details in ./.debug/_err.log"
|
|
mkdir -p "${WorkDir}/.debug"
|
|
echo -e "-----------------------------------------------------------------" >> "${WorkDir}/.debug/_err.log"
|
|
echo -e "DateTime: `date +%y%m%d-%H%M%S`" >> "${WorkDir}/.debug/_err.log"
|
|
echo -e "Message: Bad html found" >> "${WorkDir}/.debug/_err.log"
|
|
echo -e "RemoteUrl: ${cde_remote_url}" >> "${WorkDir}/.debug/_err.log"
|
|
echo -e "Filename: ${cde_filename}" >> "${WorkDir}/.debug/_err.log"
|
|
echo -e "FilePath: ${cde_file_path}" >> "${WorkDir}/.debug/_err.log"
|
|
echo -e "BadHtml: ${badHtml}" >> "${WorkDir}/.debug/_err.log"
|
|
echo -e ""
|
|
fi
|
|
return 1
|
|
fi
|
|
else
|
|
return 0
|
|
fi
|
|
}
|
|
CheckFileSize() {
|
|
local cde_remote_url=$1
|
|
local cde_file_size_bytes=$2
|
|
if [ -z $cde_file_size_bytes ]; then
|
|
return 1
|
|
fi
|
|
if (( cde_file_size_bytes < MinimumAllowedFilesize )); then
|
|
echo -e "${RED}| ERROR: File less than MinimumAllowedFilesize${NC} (${MinimumAllowedFilesize})${NC}"
|
|
failedDownload "${remote_url}" "File less than MinimumAllowedFilesize. (min: ${MinimumAllowedFilesize})" "${cde_file_size_bytes}"
|
|
fileAlreadyDone=true
|
|
return 0
|
|
fi
|
|
return 1
|
|
}
|
|
ProcessCompletedDownload() {
|
|
local cde_remote_url=$1
|
|
local cde_MoveToFolder=$2
|
|
local cde_filecnt=$3
|
|
local cde_filename=$4
|
|
local cde_file_size_bytes=$5
|
|
local cde_completed_location=$6
|
|
local cde_inflight_path=$7
|
|
completed_location="${WorkDir}/downloads/"
|
|
if [ ! -d "${WorkDir}/downloads" ]; then
|
|
mkdir -p "${WorkDir}/downloads"
|
|
fi
|
|
if [ ! "${cde_MoveToFolder}" == "" ] ; then
|
|
if [ ! -d "${cde_completed_location}${cde_MoveToFolder}" ]; then
|
|
mkdir -p "${cde_completed_location}${cde_MoveToFolder}"
|
|
fi
|
|
mv "${cde_inflight_path}" "${cde_completed_location}${cde_MoveToFolder}/"
|
|
printf "\\n%s\\n" "================================================================================"
|
|
echo -e "${RED}❤${GREEN}[OK] Download complete.${NC}"
|
|
echo -e "${GREEN}File: ${NC}#${cde_filecnt}, ${cde_filename}"
|
|
echo -e "URL: ${cde_remote_url}"
|
|
echo -e "Location: \"./downloads/${cde_MoveToFolder}/${cde_filename}\""
|
|
printf "%s\\n" "================================================================================"
|
|
successDownload "${cde_remote_url}" "${cde_filename}" "${cde_MoveToFolder}" "${cde_file_size_bytes}" "${cde_completed_location}${cde_MoveToFolder}/${cde_filename}"
|
|
else
|
|
mv "${cde_inflight_path}" "${WorkDir}/downloads/"
|
|
printf "\\n%s\\n" "================================================================================"
|
|
echo -e "${RED}❤${GREEN}[OK] Download complete.${NC}"
|
|
echo -e "${GREEN}File: ${NC}#${cde_filecnt}, ${cde_filename}"
|
|
echo -e "URL: ${cde_remote_url}"
|
|
echo -e "Location: \"./downloads/${cde_filename}\""
|
|
printf "%s\\n" "================================================================================"
|
|
successDownload "${cde_remote_url}" "${cde_filename}" "./" "${cde_file_size_bytes}" "${cde_completed_location}${cde_filename}"
|
|
fi
|
|
}
|
|
MadUploadFromFileTxt() {
|
|
local pInFile="$1"
|
|
mkdir -p "${WorkDir}/uploads"
|
|
clear
|
|
echo -e "${BLD}"
|
|
echo -e "${PINK}:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:"
|
|
echo -e ":${NC} ${GREEN}MAD${PINK} Uploader${NC} :: Transfer files in uploads.txt${PINK}${BLD} :"
|
|
echo -e ":-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:${NC}\\n"
|
|
OnLoad $0 $@
|
|
if ((torPort >= 9050 && torPort <= 9150)); then
|
|
printf "TorIp: ${GREEN}$TorIp${NC}, Tor listening on port ${GREEN}$torPort${NC}, "
|
|
else
|
|
printf "TorIp: ${RED}$TorIp${NC}, ${RED}Tor port not found !!${NC}.\\n"
|
|
printf "Ensure Tor is setup and listening on a port between 9050 and 9150. Exiting...\\n"
|
|
exit 1
|
|
fi
|
|
if [ "${UseTorCurlImpersonate}" == "true" ]; then
|
|
printf "client: ${GREEN}Tor${NC} + ${BLUE}curl_impersonate${NC}\\n"
|
|
else
|
|
printf "client: ${GREEN}Tor${NC} + ${GREEN}curl${NC}\\n"
|
|
fi
|
|
printf "ConnectTimeout: ${GREEN}$ConnectTimeout${NC}, UploadRetries: ${GREEN}$MaxUploadRetries${NC}, "
|
|
if [ "${RateMonitorEnabled}" == "true" ]; then
|
|
printf "RateMonitor: ${GREEN}${RateMonitorEnabled}${NC}"
|
|
else
|
|
printf "RateMonitor: ${GREY}${RateMonitorEnabled}${NC}"
|
|
fi
|
|
printf "\\n"
|
|
echo -e "DefaultUploadHosts: ${BLUE}$DefaultUploadHosts${NC}"
|
|
if [ "${DebugAllEnabled}" == "true" ] || [ "${DebugPluginsEnabled}" == "true" ] ; then
|
|
bDebugMsgPrintCnt=0
|
|
if [ "${DebugAllEnabled}" == "true" ]; then
|
|
printf "DebugHosts: ${BLUE}${DebugAllEnabled}${NC}"
|
|
bDebugMsgPrintCnt=$((bDebugMsgPrintCnt + 1))
|
|
fi
|
|
if [ "${DebugPluginsEnabled}" == "true" ]; then
|
|
if ((bDebugMsgPrintCnt > 0)) ; then
|
|
printf ", "
|
|
fi
|
|
printf "DebugPlugins: ${BLUE}${DebugPluginsEnabled}${NC}"
|
|
fi
|
|
printf "\\n"
|
|
fi
|
|
echo -e ""
|
|
HashFilelistTxt=$( sha1sum ${InputFile} | awk '{print $1}' )
|
|
if [ "$VerboseLoading" == "true" ]; then
|
|
printf "SHA1: %s\\n\\n" "${HashFilelistTxt}"
|
|
fi
|
|
mainLoopControl=true
|
|
while ${mainLoopControl}
|
|
do
|
|
mainLoopControl="$LoopThroughFileUntilComplete"
|
|
BeginProcessing "${InputFile}"
|
|
qChkLineCount=$(grep -Evi '^#|^$|#OK#|#FAIL#|#RETRY#)' "${InputFile}" | wc -l | awk '{ print $1 }')
|
|
if ((qChkLineCount <= 0)) ; then
|
|
if DoneProcessingAllUrls "${InputFile}" ; then
|
|
if [ "${AutoShowMadStatus}" == "true" ] ; then
|
|
echo -e "${RED}❤${GREEN}Done! ${YELLOW}Me0W!${NC} :D"
|
|
madStatusUploads "${InputFile}"
|
|
fi
|
|
exit 0
|
|
else
|
|
continue
|
|
fi
|
|
fi
|
|
fileCount=1
|
|
lockCount=0
|
|
sed 's/^[[:space:]]*// ; s/[[:space:]]*$//' "${InputFile}" |
|
|
while IFS= read -r line
|
|
do
|
|
if [ -f "${InputFile}" ] ; then
|
|
currentHashFilelistTxt=$( sha1sum "${InputFile}" | awk '{print $1}' )
|
|
else
|
|
currentHashFilelistTxt=$( sha1sum ${InputFile} | awk '{print $1}' )
|
|
fi
|
|
if ((qChkLineCount > 0)) && [ ! "${HashFilelistTxt}" == "${currentHashFilelistTxt}" ]; then
|
|
printf "\\n%s\\n" "--------------------------------------------"
|
|
printf "[${PINK}Reload${NC}] ${InputFile} was modified.\\n"
|
|
printf "%s\\n" "--------------------------------------------"
|
|
HashFilelistTxt="$currentHashFilelistTxt"
|
|
mainLoopControl=false
|
|
exec "$0" "upload" "${InputFile}"
|
|
exit 1
|
|
fi
|
|
upfile_filepath=""
|
|
upfile_filename=""
|
|
upfile_hostcode=""
|
|
upfile_fsize=0
|
|
line="${line//[$'\t\r\n']}"
|
|
if [[ ${line} =~ ^# ]] ; then
|
|
continue
|
|
elif grep -Eqvi '\|' <<< "$line" ; then
|
|
continue
|
|
fi
|
|
if [[ $line =~ \| ]] ; then
|
|
upfile_filepath="${line%%\|*}"
|
|
upfile_hostcode="${line##*\|}"
|
|
else
|
|
continue
|
|
fi
|
|
if [ ! -f "$upfile_filepath" ]; then
|
|
upfile_filepath="${WorkDir}/uploads/$upfile_filepath"
|
|
fi
|
|
upfile_filename="${upfile_filepath##*\/}"
|
|
upfile_fsize=$(GetFileSize "$upfile_filepath" "false")
|
|
if [ ! -f "$upfile_filepath" ] || ((upfile_fsize <= 0)) ; then
|
|
uploadBadInputLine "$line" "Bad input line (file: $upfile_filepath, size: $upfile_fsize)"
|
|
continue
|
|
fi
|
|
if [ -z "$upfile_hostcode" ] || [ -z "$upfile_filepath" ]; then
|
|
uploadBadInputLine "$line" "Bad input line (check filename and hostcode are valid)"
|
|
continue
|
|
fi
|
|
UploadTicket="${WorkDir}/.flocks/upload_${upfile_hostcode}_${upfile_filepath//[^a-zA-Z0-9]/}"
|
|
if [ -f "$UploadTicket" ]; then
|
|
echo -e "${YELLOW}Lock Exists (Skipping)${NC}: ./.flocks/upload_${upfile_hostcode}_${upfile_filepath//[^a-zA-Z0-9]/}"
|
|
fileCount=$((fileCount + 1))
|
|
continue
|
|
fi
|
|
_hostcode=""
|
|
_hostnick=""
|
|
_hostfuncprefix=""
|
|
_hostdomainregex=""
|
|
readarray -d '@' -t arrUploadHosts <<< "${ListUploadHosts}"
|
|
for hline in "${arrUploadHosts[@]}";
|
|
do
|
|
if [[ "$hline" == "" ]] || [[ ! "$hline" == *"/"* ]]; then
|
|
continue
|
|
fi
|
|
_hostcode=$(echo $hline|cut -f2 -d '/')
|
|
_hostnick=$(echo $hline|cut -f3 -d '/')
|
|
_hostfuncprefix=$(echo $hline|cut -f4 -d '/')
|
|
if [[ "$upfile_hostcode" == "$_hostcode" ]] ; then
|
|
printf "\nUploading ${BLUE}%s${NC} to ${YELLOW}%s${NC} ${GREEN}%d${NC} of ${GREEN}%d${NC}\\n" $upfile_filename $_hostcode $fileCount $lineCount
|
|
printf "|Size: %d\\n" $upfile_fsize
|
|
printf "|Path: %s\\n" $upfile_filepath
|
|
mkdir -p "${WorkDir}/.flocks"
|
|
touch ${UploadTicket}
|
|
echo -e "-----------------------------------------------------------------------"
|
|
${_hostfuncprefix}_UploadFile "${upfile_hostcode}" "${upfile_filepath}" "${fileCount}" "$line"
|
|
echo -e ""
|
|
break
|
|
fi
|
|
done
|
|
done #loop through the file line by line
|
|
qChkLineCount=$(grep -Evi '^#|^$|#OK#|#FAIL#|#RETRY#)' "${InputFile}" | wc -l | awk '{ print $1 }')
|
|
if ((qChkLineCount > 0)) ; then
|
|
if [ "$LoopThroughFileUntilComplete" == "false" ]; then
|
|
echo -e "${NC}"
|
|
echo -e "${YELLOW}Unprocessed / Skipped File / HostCode(s) Found:${NC}"
|
|
echo -e "Most likely another terminal is uploading it, or a flock exists...${NC}"
|
|
if DoneProcessingAllUploads "${InputFile}" ; then
|
|
exit 0
|
|
else
|
|
continue
|
|
fi
|
|
else
|
|
for ((a=1; a<=4; a++)); do
|
|
printf "\033[1A\r"
|
|
printf "\33[2K\r"
|
|
done
|
|
echo -e "${NC}"
|
|
echo -e "${YELLOW}Unprocessed / Skipped File / HostCode(s) Found:${NC}"
|
|
echo -e "Most likely another terminal is uploading it, or a flock exists...${NC}"
|
|
echo -e "${YELLOW}Catnapping${NC} for ${CatnapDuration} mins to allow ${YELLOW}$qChkLineCount${NC} upload(s) to finish... ${YELLOW}zZzZzZ${NC} ${BLUE}x$CatnapCount${NC}"
|
|
sleep ${CatnapDuration}m
|
|
continue
|
|
fi
|
|
else
|
|
if DoneProcessingAllUploads "${InputFile}" ; then
|
|
exit 0
|
|
else
|
|
continue
|
|
fi
|
|
fi
|
|
done #loop until done
|
|
}
|
|
MadUploadFilesInUploadsFolder() {
|
|
clear
|
|
echo -e "${BLD}"
|
|
echo -e "${PINK}:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:"
|
|
echo -e ":${NC} ${GREEN}MAD${PINK} Uploader${NC} : Transfer files in ./uploads folder${PINK}${BLD} :"
|
|
echo -e ":-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:${NC}\\n"
|
|
OnLoad $0 $@
|
|
if ((torPort >= 9050 && torPort <= 9150)); then
|
|
printf "TorIp: ${GREEN}$TorIp${NC}, Tor listening on port ${GREEN}$torPort${NC}, "
|
|
else
|
|
printf "TorIp: ${RED}$TorIp${NC}, ${RED}Tor port not found !!${NC}.\\n"
|
|
printf "Ensure Tor is setup and listening on a port between 9050 and 9150. Exiting...\\n"
|
|
exit 1
|
|
fi
|
|
if [ "${UseTorCurlImpersonate}" == "true" ]; then
|
|
printf "client: ${GREEN}Tor${NC} + ${BLUE}curl_impersonate${NC}\\n"
|
|
else
|
|
printf "client: ${GREEN}Tor${NC} + ${GREEN}curl${NC}\\n"
|
|
fi
|
|
printf "ConnectTimeout: ${GREEN}$ConnectTimeout${NC}, UploadRetries: ${GREEN}$MaxUploadRetries${NC}, "
|
|
if [ "${RateMonitorEnabled}" == "true" ]; then
|
|
printf "RateMonitor: ${GREEN}${RateMonitorEnabled}${NC}"
|
|
else
|
|
printf "RateMonitor: ${GREY}${RateMonitorEnabled}${NC}"
|
|
fi
|
|
printf "\\n"
|
|
echo -e "DefaultUploadHosts: ${BLUE}$DefaultUploadHosts${NC}"
|
|
if [ "${DebugAllEnabled}" == "true" ] || [ "${DebugPluginsEnabled}" == "true" ] ; then
|
|
bDebugMsgPrintCnt=0
|
|
if [ "${DebugAllEnabled}" == "true" ]; then
|
|
printf "DebugHosts: ${BLUE}${DebugAllEnabled}${NC}"
|
|
bDebugMsgPrintCnt=$((bDebugMsgPrintCnt + 1))
|
|
fi
|
|
if [ "${DebugPluginsEnabled}" == "true" ]; then
|
|
if ((bDebugMsgPrintCnt > 0)) ; then
|
|
printf ", "
|
|
fi
|
|
printf "DebugPlugins: ${BLUE}${DebugPluginsEnabled}${NC}"
|
|
fi
|
|
printf "\\n"
|
|
fi
|
|
echo -e ""
|
|
echo -e " Filesize Filename (./uploads/ folder)"
|
|
echo -e "-------------------------------------------------"
|
|
for fil in "${WorkDir}/uploads/"*.[Rr][aA][rR] \
|
|
"${WorkDir}/uploads/"*.[7][zZ] \
|
|
"${WorkDir}/uploads/"*.[0-9][0-9][0-9] ;
|
|
do
|
|
if [ -d "$fil" ] || [ ! -f "$fil" ]; then
|
|
continue
|
|
fi
|
|
fsize=$(GetFileSize "$fil" "false")
|
|
if [ -f "$fil" ] && ((fsize > 0)); then
|
|
tfilename="${fil##*/}"
|
|
printf "${BLUE}%10s${NC} ${GREEN}%-56s${NC}\\n" $(GetFileSize "$fil" "true") "${tfilename}"
|
|
fi
|
|
done
|
|
printf "\\n"
|
|
yes_or_no "Are you sure you wish to continue uploading the following files? " && {
|
|
local lstUploadto=""
|
|
echo -e ""
|
|
echo -e "Available ${YELLOW}HostCodes${NC} of hosts to upload to..."
|
|
_hostcode=""
|
|
_hostnick=""
|
|
_hostfuncprefix=""
|
|
isHostMatchFound=false
|
|
_helpprochostcodes=""
|
|
readarray -d '@' -t arrListUploadHosts <<< "${ListUploadHosts}"
|
|
local itemcount=1
|
|
for hline in "${arrListUploadHosts[@]}";
|
|
do
|
|
if [ -z "$hline" ] ; then
|
|
continue
|
|
fi
|
|
_hostcode=$(echo $hline|cut -f2 -d '/')
|
|
_hostnick=$(echo $hline|cut -f3 -d '/')
|
|
_hostfuncprefix=$(echo $hline|cut -f4 -d '/')
|
|
if [ -z "$_hostcode" ] ; then
|
|
continue
|
|
fi
|
|
if [[ ! "$_helpprochostcodes" =~ *"$_hostcode"* ]]; then
|
|
_helpprochostcodes="${_helpprochostcodes}:${_hostcode}:"
|
|
local pad12=" "
|
|
if ((itemcount <= 2)); then
|
|
printf "${YELLOW}%9s${NC} %-15s${NC}" "${_hostcode}" "${_hostnick}"
|
|
itemcount=$((itemcount + 1))
|
|
else
|
|
printf "${YELLOW}%9s${NC} %-15s${NC}\\n" "${_hostcode}" "${_hostnick}"
|
|
itemcount=1
|
|
fi
|
|
fi
|
|
done
|
|
if ((itemcount <= 1)); then
|
|
printf "\\n"
|
|
else
|
|
printf "\\n\\n"
|
|
fi
|
|
echo -e "Enter hostcodes for the hosts you wish to upload to (comma separated list):"
|
|
echo -e "ie. ${YELLOW}1f,kraken,oshi${NC}, ${RED}q${NC} or ${RED}e${NC} to quit, or ${BLUE}d${NC} to use the default hosts"
|
|
echo -e "${BLUE}Default Hosts: ${YELLOW}${DefaultUploadHosts}${NC}"
|
|
read -p "--> " lstUploadto
|
|
if [[ "$lstUploadto" =~ '(q|Q|quit|e|E|exit)' ]] ; then
|
|
exit 0
|
|
fi
|
|
if [[ "$lstUploadto" == 'd' ]] || [[ "$lstUploadto" == 'D' ]]; then
|
|
lstUploadto="$DefaultUploadHosts"
|
|
fi
|
|
echo -e ""
|
|
for fil in "${WorkDir}/uploads/"*.[Rr][aA][rR] \
|
|
"${WorkDir}/uploads/"*.[7][zZ] \
|
|
"${WorkDir}/uploads/"*.[0-9][0-9][0-9] ;
|
|
do
|
|
if [ -d "$fil" ] || [ ! -f "$fil" ]; then
|
|
continue
|
|
fi
|
|
fsize=$(GetFileSize "$fil" "false")
|
|
if ((fsize > 0)); then
|
|
tfilename="${fil##*/}"
|
|
readarray -d ',' -t arrUploadHostCodes <<< "${lstUploadto}"
|
|
for hline in "${arrUploadHostCodes[@]}";
|
|
do
|
|
if [[ "$hline" == "" ]] ; then
|
|
continue
|
|
fi
|
|
userentry_hostcode=$(TrimWhitespace "$hline")
|
|
linematch=""
|
|
if [ -f "${WorkDir}/uploads/temp_upload_handler.txt" ]; then
|
|
linematch=$(grep -Eni -m 1 "[OK] file: ${tfilename}, host: ${userentry_hostcode},.*\$" "${WorkDir}/uploads/temp_upload_handler.txt")
|
|
if [ ! -z "$linematch" ] ; then
|
|
echo -e ""
|
|
echo -e "${GREEN}$tfilename${NC} already uploaded to ${userentry_hostcode} in ${GREEN}temp_upload_handler.txt${NC}"
|
|
echo -e "${BLUE}line${NC}: ${linematch//, /\\n}"
|
|
echo -e ""
|
|
successUploadExists "$tfilename" "$userentry_hostcode" "File/Host (OK) found in ./uploads/temp_upload_handler.txt (line#: ${linematch%%:*})"
|
|
continue
|
|
fi
|
|
linematch=$(grep -Eni -m 1 "[FAIL] file: ${tfilename}, host: ${userentry_hostcode},.*\$" "${WorkDir}/uploads/temp_upload_handler.txt")
|
|
if [ ! -z "$linematch" ] ; then
|
|
echo -e ""
|
|
echo -e "${GREEN}$tfilename${NC} already failed upload to ${userentry_hostcode} in ${GREEN}temp_upload_handler.txt${NC}"
|
|
echo -e "${BLUE}line${NC}: ${linematch//, /\\n}"
|
|
echo -e ""
|
|
skipfailedUpload "$pline" "$tfilename" "$userentry_hostcode" "File/Host (FAIL) found in ./uploads/temp_upload_handler.txt (line#: ${linematch%%:*})"
|
|
continue
|
|
fi
|
|
linematch=$(grep -Eni -m 1 "[RETRY] file: ${tfilename}, host: ${userentry_hostcode},.*\$" "${WorkDir}/uploads/temp_upload_handler.txt")
|
|
if [ ! -z "$linematch" ] ; then
|
|
echo -e ""
|
|
echo -e "${GREEN}$tfilename${NC} already failed upload to ${userentry_hostcode} in ${GREEN}temp_upload_handler.txt${NC}"
|
|
echo -e "${BLUE}line${NC}: ${linematch//, /\\n}"
|
|
echo -e ""
|
|
skipfailedUpload "$pline" "$tfilename" "$userentry_hostcode" "File/Host (FAIL or RETRY) found in ./uploads/temp_upload_handler.txt (line#: ${linematch%%:*})"
|
|
continue
|
|
fi
|
|
fi
|
|
if [[ "$ListUploadHosts" == *"/$userentry_hostcode/"* ]]; then
|
|
_hostcode=""
|
|
_hostnick=""
|
|
_hostfuncprefix=""
|
|
isHostMatchFound=false
|
|
_helpprochostcodes=""
|
|
readarray -d '@' -t arrListUploadHosts <<< "${ListUploadHosts}"
|
|
local itemcount=1
|
|
for hline in "${arrListUploadHosts[@]}";
|
|
do
|
|
if [[ "$hline" == "" ]] ; then
|
|
continue
|
|
fi
|
|
_hostcode=$(echo $hline|cut -f2 -d '/')
|
|
_hostnick=$(echo $hline|cut -f3 -d '/')
|
|
_hostfuncprefix=$(echo $hline|cut -f4 -d '/')
|
|
if [[ "$_hostcode" == "$userentry_hostcode" ]]; then
|
|
UploadTicket="${WorkDir}/.flocks/upload_${_hostcode}_${fil//[^a-zA-Z0-9]/}"
|
|
if [ -f "$UploadTicket" ]; then
|
|
echo -e "${YELLOW}Lock Exists (Skipping)${NC}: ./.flocks/upload_${upfile_hostcode}_${upfile_filepath//[^a-zA-Z0-9]/}"
|
|
continue
|
|
fi
|
|
mkdir -p "${WorkDir}/.flocks"
|
|
touch ${UploadTicket}
|
|
echo -e "-----------------------------------------------------------------------"
|
|
${_hostfuncprefix}_UploadFile "${userentry_hostcode}" "${fil}" "${fileCount}"
|
|
echo -e ""
|
|
break
|
|
fi
|
|
done
|
|
fi
|
|
done
|
|
fi
|
|
done
|
|
}
|
|
exit 0
|
|
}
|
|
direct_FetchFileInfo() {
|
|
echo -e "${GREEN}# Fetching file info…${NC}"
|
|
finalAttempt=$1
|
|
CLEANSTRING=${remote_url//[^a-zA-Z0-9]/}
|
|
trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; echo ""; tput cnorm; exit" 0 1 2 3 6 15
|
|
download_url=${file_url}
|
|
if grep -Eqi '.onion' <<< "$download_url" && grep -Eqi 'https://' <<< "$download_url" ; then
|
|
echo -e "${PINK}| Reverting .onion address to http...${NC}"
|
|
download_url="${download_url/https/http}"
|
|
fi
|
|
file_id=$(grep -oP '[^\/]*$' <<< "$file_url")
|
|
maxretries=4
|
|
for ((j=1; j<=$maxretries; j++)); do
|
|
mkdir -p "${WorkDir}/.temp"
|
|
if ((j > 1)); then
|
|
tput rc; tput el;
|
|
fi
|
|
tput sc
|
|
filename=""
|
|
file_size_bytes=""
|
|
tor_identity="${RANDOM}"
|
|
if ((j % 2 == 0)); then
|
|
printf "| Retrieving Get/Head: attempt #$j"
|
|
file_header=$(tor_curl_request --insecure -m 8 -s -D - -o /dev/null \
|
|
-H "Connection: keep-alive" \
|
|
-w 'EffectiveUrl=%{url_effective}' \
|
|
"$download_url")
|
|
else
|
|
printf "| Retrieving Head: attempt #$j"
|
|
rm -f "${WorkDir}/.temp/directhead"
|
|
file_header=$(tor_curl_request --insecure --head -H "Connection: keep-alive" -L -s -i "$download_url" |
|
|
tee "${WorkDir}/.temp/directhead" &
|
|
sleep 6
|
|
[ -s "${WorkDir}/.temp/directhead" ]
|
|
kill $! 2>/dev/null
|
|
)
|
|
if [ ! -f "${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}" ]; then
|
|
touch "${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}"
|
|
fi
|
|
rm -f "${WorkDir}/.temp/directhead"
|
|
fi
|
|
if [ "${DebugAllEnabled}" == "true" ] ; then
|
|
urllen=${#remote_url}
|
|
if ((urllen > 64)); then
|
|
debugHtml "${remote_url##*/}" "direct_${RANDOM}_head" "download_url: ${download_url}"$'\n'"${file_header}"
|
|
else
|
|
debugHtml "${remote_url##*/}" "direct_${remote_url//[^a-zA-Z0-9]/}_head" "download_url: ${download_url}"$'\n'"${file_header}"
|
|
fi
|
|
fi
|
|
if [ ! -z "$file_header" ] ; then
|
|
if grep -Eqi 'HTTP/.*404' <<< "${file_header}" ; then
|
|
printf "\\n"
|
|
echo -e "${RED}| 404. The file was not found or has been removed.${NC}"
|
|
removedDownload "${remote_url}"
|
|
exitDownloadNotAvailable=true
|
|
return 1
|
|
fi
|
|
if ! grep -Eqi 'HTTP/.*200|HTTP/.*302' <<< "${file_header}" ; then
|
|
hResponse=$(grep -oPi 'HTTP/.*? \K.*$' <<< "${file_header}")
|
|
if ((j>=maxretries)); then
|
|
printf "\\n"
|
|
echo -e "${RED}| Unexpected header response: ${hResponse}${NC}"
|
|
failedRetryDownload "${remote_url}" "Unexpected header response: ${hResponse}" ""
|
|
exitDownloadNotAvailable=true
|
|
return 1
|
|
else
|
|
printf "\\n"
|
|
echo -e "${YELLOW}| Unexpected header response: ${hResponse}. Retrying...${NC}"
|
|
continue
|
|
fi
|
|
fi
|
|
if [ "$filename_override" == "" ] && [ "$filename" == "" ] ; then
|
|
if grep -Eqi 'filename=' <<< "${file_header}" ; then
|
|
filename=$(grep -oPi -m 1 'filename=\K.*?$' <<< "${file_header}")
|
|
filename="${filename%%;*}"
|
|
filename=${filename//filename*=UTF-8\'\'/}
|
|
filename=${filename##filename}
|
|
filename=${filename//\"/}
|
|
filename=${filename//[$'\t\r\n']}
|
|
elif [ ! -z $file_id ]; then
|
|
if grep -Eqi '(.rar|.7z|.zip|.[0-9][0-9][0-9])$' <<< ${file_id} ; then
|
|
printf "\\n"
|
|
echo -e "${YELLOW}| Failed to extract file name, using url name.${NC}"
|
|
filename="$file_id"
|
|
else
|
|
printf "\\n"
|
|
echo -e "${YELLOW}| Failed to extract file name. Using url fileid.${NC}"
|
|
filename=${file_id}".renameMe"
|
|
fi
|
|
else
|
|
printf "\\n"
|
|
echo -e "${YELLOW}| Failed to extract file name. Using url.${NC}"
|
|
filename="${remote_url//[^a-zA-Z0-9]/}.renameMe"
|
|
filename="${filename/https/}"
|
|
fi
|
|
fi
|
|
if grep -Eqi 'content-length:' <<< "${file_header}" ; then
|
|
file_size_bytes=$(grep -oPi '(?<=content-length: ).*?(?=$)' <<< "$file_header")
|
|
file_size_bytes=${file_size_bytes//[$'\t\r\n']}
|
|
else
|
|
printf "\\n"
|
|
echo -e "${YELLOW}| Failed to retreive filesize${NC}"
|
|
if ((j>=$maxretries)); then
|
|
file_size_bytes=""
|
|
else
|
|
continue
|
|
fi
|
|
fi
|
|
printf "\\n"
|
|
break
|
|
else
|
|
if ((j>=$maxretries)); then
|
|
printf "\\n"
|
|
echo -e "${YELLOW}| No response. Attempting direct download without head${NC}"
|
|
return 1
|
|
else
|
|
continue
|
|
fi
|
|
fi
|
|
done
|
|
touch ${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}
|
|
if [ ! "$filename_override" == "" ] ; then
|
|
filename="$filename_override"
|
|
fi
|
|
filename=$(sanitize_file_or_folder_name "${filename}")
|
|
if [ -z "$filename" ]; then
|
|
printf "\\n"
|
|
echo -e "${RED}| Unexpected or no header response [no filename]${NC}"
|
|
return 1
|
|
fi
|
|
if [ -z $file_size_bytes ] ; then
|
|
file_size_readable="${RED}Unknown filesize…${NC}"
|
|
else
|
|
file_size_readable="$(numfmt --to=iec --from=auto --format "%.2f" <<< "$file_size_bytes")"
|
|
fi
|
|
echo -e "${YELLOW}| File size:${NC}\t${file_size_readable}"
|
|
file_path="${download_inflight_path}${filename}"
|
|
echo -e "${YELLOW}| File name:${NC}\t\"${filename}\""
|
|
flockDownload="${WorkDir}/.flocks/${filename//[^a-zA-Z0-9\.\_\-]/}.flock"
|
|
if CheckDownloadExists "$remote_url" "$MoveToFolder" "$filecnt" "$filename" "$file_path" "$completed_location" ; then
|
|
return 1
|
|
fi
|
|
echo "${remote_url//[^a-zA-Z0-9]/}" > $flockDownload
|
|
}
|
|
direct_GetFile() {
|
|
echo -e "${GREEN}# Downloading…"
|
|
echo -e "${YELLOW}| File path:${NC}\t./.inflight/${filename}\n"
|
|
fileCnt=$1
|
|
retryCnt=$2
|
|
finalAttempt=$3
|
|
flockDownload="${WorkDir}/.flocks/${filename//[^a-zA-Z0-9\.\_\-]/}.flock"
|
|
for ((j=1; j<=$MaxDownloadRetries; j++)); do
|
|
pd_presize=0
|
|
if [ -f "$file_path" ] ; then
|
|
pd_presize=$(stat --format="%s" "$file_path" | tr -d '[:space:]')
|
|
fi
|
|
if [ -z $file_size_bytes ] ; then
|
|
echo -e "${BLUE}| No Resume Fetch${NC} (unknown filesize)"
|
|
CLEANSTRING=${remote_url//[^a-zA-Z0-9]/}
|
|
trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; rm -f $flockDownload; echo ""; tput cnorm; exit" 0 1 2 3 6 15
|
|
tor_curl_request --insecure -L --referer "$file_url" "$download_url" --output "$file_path"
|
|
rc=$?
|
|
if [ $rc -ne 0 ] ; then
|
|
printf "${RED}Download Failed (bad exit status).${NC}"
|
|
if [ -f ${file_path} ]; then
|
|
printf "${YELLOW} Partial removed...${NC}"
|
|
printf "\n\n"
|
|
rm -f "${file_path}"
|
|
else
|
|
printf "\n\n"
|
|
fi
|
|
if ((j >= $MaxDownloadRetries)) ; then
|
|
rm -f "$flockDownload";
|
|
if [ "${finalAttempt}" == "true" ] ; then
|
|
droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}"
|
|
fi
|
|
return 1
|
|
else
|
|
continue
|
|
fi
|
|
fi
|
|
if CheckNoHtml "$remote_url" "$filename" "$file_path" ; then
|
|
containsHtml=false
|
|
else
|
|
containsHtml=true
|
|
fi
|
|
if [ "$containsHtml" == "true" ]; then
|
|
echo -e "${YELLOW}Download Failed (contains html)${NC} partial removed..."
|
|
rm -f "${file_path}"
|
|
if ((j >= $MaxDownloadRetries)) ; then
|
|
rm -f "$flockDownload";
|
|
if [ "${finalAttempt}" == "true" ] ; then
|
|
droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}"
|
|
fi
|
|
return 1
|
|
else
|
|
continue
|
|
fi
|
|
fi
|
|
break
|
|
else
|
|
CLEANSTRING=${remote_url//[^a-zA-Z0-9]/}
|
|
trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; rm -f $flockDownload; echo ""; tput cnorm; exit" 0 1 2 3 6 15
|
|
if [ "${RateMonitorEnabled}" == "true" ]; then
|
|
tor_curl_request --insecure -L --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval "$download_url" --continue-at - --output "$file_path"
|
|
else
|
|
tor_curl_request --insecure -L --referer "$file_url" "$download_url" --continue-at - --output "$file_path"
|
|
fi
|
|
received_file_size=0
|
|
if [ -f "$file_path" ] ; then
|
|
received_file_size=$(stat --format="%s" "$file_path" | tr -d '[:space:]')
|
|
fi
|
|
if CheckNoHtml "$remote_url" "$filename" "$file_path" ; then
|
|
containsHtml=false
|
|
else
|
|
containsHtml=true
|
|
fi
|
|
downDelta=$(( received_file_size - pd_presize ))
|
|
if [[ "${received_file_size}" -ne "${file_size_bytes}" ]] || [ "$containsHtml" == "true" ]; then
|
|
if [ "${AutoRepairBadPartials}" == "true" ] && (( downDelta > 0 && downDelta < 1024 )) ; then
|
|
if [ -f "${file_path}" ] ; then
|
|
if ((pd_presize > 0)); then
|
|
echo -e "${YELLOW}Bad node / HTML found:${NC} reverting to previous file..."
|
|
truncateDownload "$remote_url" "$filename" "$pd_presize" "$received_file_size"
|
|
truncate -s $pd_presize "${file_path}"
|
|
else
|
|
echo -e "${YELLOW}Bad node / HTML found:${NC} tainted partial removed..."
|
|
rm -f "${file_path}"
|
|
fi
|
|
fi
|
|
if ((j >= $MaxDownloadRetries)) ; then
|
|
rm -f "$flockDownload";
|
|
if [ "${finalAttempt}" == "true" ] ; then
|
|
droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}"
|
|
fi
|
|
return 1
|
|
else
|
|
continue
|
|
fi
|
|
elif [ "${AutoRepairBadPartials}" == "true" ] && [ "$containsHtml" == "true" ] ; then
|
|
if [ -f "${file_path}" ] ; then
|
|
if ((pd_presize > 0)); then
|
|
echo -e "${YELLOW}Bad node / HTML found:${NC} reverting to previous file..."
|
|
truncateDownload "$remote_url" "$filename" "$pd_presize" "$received_file_size"
|
|
truncate -s $pd_presize "${file_path}"
|
|
else
|
|
echo -e "${YELLOW}Bad node / HTML found:${NC} tainted partial removed..."
|
|
rm -f "${file_path}"
|
|
fi
|
|
fi
|
|
if ((j >= $MaxDownloadRetries)) ; then
|
|
rm -f "$flockDownload";
|
|
if [ "${finalAttempt}" == "true" ] ; then
|
|
droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}"
|
|
fi
|
|
return 1
|
|
else
|
|
continue
|
|
fi
|
|
elif (( downDelta > 0 && downDelta < 1024 )) || [ "$containsHtml" == "true" ] ; then
|
|
if [ -f "$file_path" ] ; then
|
|
rm -rf "$file_path"
|
|
fi
|
|
echo -e "\n${YELLOW}Bad node / HTML found:${NC} tainted partial removed..."
|
|
if ((j >= $MaxDownloadRetries)) ; then
|
|
rm -f "$flockDownload";
|
|
if [ "${finalAttempt}" == "true" ] ; then
|
|
droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}"
|
|
fi
|
|
return 1
|
|
else
|
|
continue
|
|
fi
|
|
fi
|
|
if [[ "${received_file_size}" -ne "${file_size_bytes}" ]]; then
|
|
echo -e "\n${RED}Download failed, file is incomplete.${NC}"
|
|
if ((j >= $MaxDownloadRetries)) ; then
|
|
rm -f "$flockDownload";
|
|
if [ "${finalAttempt}" == "true" ] ; then
|
|
droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}"
|
|
fi
|
|
return 1
|
|
else
|
|
continue
|
|
fi
|
|
fi
|
|
else
|
|
break
|
|
fi
|
|
fi
|
|
done
|
|
rm -f "$flockDownload";
|
|
ProcessCompletedDownload "$remote_url" "$MoveToFolder" "$filecnt" "$filename" "$file_size_bytes" "$completed_location" "$file_path"
|
|
return 0
|
|
}
|
|
direct_DownloadFile() {
|
|
local remote_url="${1}" # url from urls.txt
|
|
local filecnt=${2}
|
|
local file_url="${3}" # Override url ie. lainsafe.onion --> lainsafe
|
|
if [ -z "$file_url" ]; then
|
|
file_url="$remote_url"
|
|
fi
|
|
warnAndRetryUnknownError=false
|
|
exitDownloadError=false
|
|
exitDownloadNotAvailable=false
|
|
fileAlreadyDone=false
|
|
download_inflight_path="${WorkDir}/.inflight/"
|
|
mkdir -p "$download_inflight_path"
|
|
completed_location="${WorkDir}/downloads/"
|
|
tor_identity="${RANDOM}"
|
|
finalAttempt="false"
|
|
for ((z=0; z<=$MaxUrlRetries; z++)); do
|
|
if [ $z -eq $MaxUrlRetries ] ; then
|
|
finalAttempt="true"
|
|
fi
|
|
CLEANSTRING=${remote_url//[^a-zA-Z0-9]/}
|
|
trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; echo ""; tput cnorm; exit" 0 1 2 3 6 15
|
|
if direct_FetchFileInfo $finalAttempt && direct_GetFile "${filecnt}" $((z+1)) $finalAttempt ; then
|
|
return 0
|
|
elif [ $z -lt $MaxUrlRetries ]; then
|
|
if [ "${fileAlreadyDone}" == "true" ] ; then
|
|
break
|
|
fi
|
|
if [[ "${warnAndRetryUnknownError}" == "true" ]] ; then
|
|
if [ "${DebugAllEnabled}" == "true" ] ; then
|
|
debugHtml "${remote_url##*/}" "error" "Retry due to an unknown issue: attempt #$((z+1)) of ${MaxUrlRetries}"
|
|
fi
|
|
fi
|
|
if [[ "${exitDownloadError}" == "true" || "${exitDownloadNotAvailable}" == "true" ]] ; then
|
|
if [ "${DebugAllEnabled}" == "true" ] ; then
|
|
debugHtml "${remote_url##*/}" "error" "Exit due to unrecoverable issue"
|
|
fi
|
|
rm -f "${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}"
|
|
break
|
|
fi
|
|
echo -e "\n${YELLOW}A recoverable error occurred, retry attempt $((z+1))/${MaxUrlRetries}${NC}"
|
|
sleep 3
|
|
fi
|
|
done
|
|
rm -f "${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}"
|
|
}
|
|
jira_UploadFile() {
|
|
local _hostCode=${1}
|
|
local filepath=${2}
|
|
local filecnt=${3}
|
|
local pline=${4}
|
|
local filename="${filepath##*/}"
|
|
warnAndRetryUnknownError=false
|
|
exitUploadError=false
|
|
exitUploadNotAvailable=false
|
|
fileAlreadyDone=false
|
|
tor_identity="${RANDOM}"
|
|
UploadTicket="${WorkDir}/.flocks/upload_${_hostCode}_${filepath//[^a-zA-Z0-9]/}"
|
|
fsize=$(GetFileSize "$filepath" "false")
|
|
if ((fsize > jira_MaxUploadSizeInBytes)); then
|
|
rm -f "${UploadTicket}"
|
|
echo -e "${YELLOW}| SKIP${NC}: The size of $filename is to large for $_hostCode. ($fsize > $jira_MaxUploadSizeInBytes)"
|
|
failedUpload "$pline" "${filepath}" "${_hostCode}" "Skipping upload. The size of $filename is to large for $_hostCode. ($fsize > $jira_MaxUploadSizeInBytes)"
|
|
return 1
|
|
fi
|
|
finalAttempt="false"
|
|
for ((z=0; z<=$MaxUploadRetries; z++)); do
|
|
if [ $z -eq $MaxUploadRetries ] ; then
|
|
finalAttempt="true"
|
|
fi
|
|
trap "rm -f "${UploadTicket}"; echo ""; tput cnorm; exit" 0 1 2 3 6 15
|
|
if jira_PostFile "${filepath}" "${_hostCode}" "${filename}" "${filecnt}" $((z+1)) $finalAttempt $pline ; then
|
|
return 0
|
|
elif [ $z -lt $MaxUploadRetries ]; then
|
|
if [ "${fileAlreadyDone}" == "true" ] ; then
|
|
break
|
|
fi
|
|
if [[ "${warnAndRetryUnknownError}" == "true" ]] ; then
|
|
if [ "${DebugAllEnabled}" == "true" ] ; then
|
|
debugHtml "${filepath##*/}" "error" "Retry due to an unknown issue: attempt #$((z+1)) of ${MaxUploadRetries}"
|
|
fi
|
|
fi
|
|
if [[ "${exitUploadError}" == "true" || "${exitUploadNotAvailable}" == "true" ]] ; then
|
|
if [ "${DebugAllEnabled}" == "true" ] ; then
|
|
debugHtml "${filepath##*/}" "error" "Exit due to unrecoverable issue"
|
|
fi
|
|
rm -f "${UploadTicket}"
|
|
break
|
|
fi
|
|
echo -e "\n${YELLOW}A recoverable error occurred, retry attempt $((z+1))/${MaxUploadRetries}${NC}"
|
|
sleep 3
|
|
fi
|
|
done
|
|
rm -f "${UploadTicket}"
|
|
}
|
|
jira_PostFile() {
|
|
local filepath=$1
|
|
local _hostCode=$2
|
|
local filename=$3
|
|
local fileCnt=$4
|
|
local retryCnt=$5
|
|
local finalAttempt=$6
|
|
local pline=${7}
|
|
UploadTicket="${WorkDir}/.flocks/upload_${_hostCode}_${filepath//[^a-zA-Z0-9]/}"
|
|
echo -e "[${YELLOW}${_hostCode}${NC}] Uploading ${GREEN}${filename}${NC}"
|
|
tor_identity="${RANDOM}"
|
|
arrFiles=("$filepath")
|
|
trap "rm -f ${UploadTicket}; echo ""; tput cnorm; exit" 0 1 2 3 6 15
|
|
if ((jira_filetype == 1)) ; then
|
|
response=$(tor_curl_upload --insecure -i \
|
|
-H "Content-Type: multipart/form-data" \
|
|
-F "key=" \
|
|
-F "time=$jira_timeval" \
|
|
-F "file=@${filepath}" \
|
|
"${jira_PostUrlHost}")
|
|
else
|
|
response=$(tor_curl_upload --insecure -i \
|
|
-H "Content-Type: multipart/form-data" \
|
|
-F "key=" \
|
|
-F "time=$jira_timeval" \
|
|
-F "files[]=@${arrFiles[@]}" \
|
|
"${jira_PostUrlHost}")
|
|
fi
|
|
if [ "${DebugAllEnabled}" == "true" ] ; then
|
|
debugHtml "${filepath##*/}" "${_hostCode}_upload" "post_url: ${jira_PostUrlHost}"$'\n'"${response}"
|
|
fi
|
|
if grep -Eqi ' 200 ' <<< "${response}" ; then
|
|
hash=$(echo "$response" | tail -2 | head -1)
|
|
hash=${hash//[$'\t\r\n']}
|
|
filesize=$(GetFileSize "$filepath" "false")
|
|
downloadLink="${jira_downloadLinkPrefix}${hash}&p=1"
|
|
echo -e "${GREEN}| Upload Success${NC}"
|
|
echo -e "| Size: ${BLUE}${filesize}${NC} bytes${NC}"
|
|
echo -e "| Link: ${YELLOW}${downloadLink}${NC}"
|
|
successUpload "$pline" "${filepath}" "${_hostCode}" "${filesize}" "${downloadLink}" "{$response}"
|
|
return 0
|
|
else
|
|
err=$(grep -oPi '(?<=HTTP/).*?(?=$)' <<< "$response")
|
|
if [ "${finalAttempt}" == "true" ] ; then
|
|
printf "\\n"
|
|
echo -e "${RED}| Upload failed. Status: ${err}${NC}"
|
|
failedRetryUpload "$pline" "${filepath}" "${_hostCode}" "Failed to upload file" "Status: $err"
|
|
exitUploadError=true
|
|
return 1
|
|
else
|
|
return 1
|
|
fi
|
|
fi
|
|
}
|
|
backupIFS=$IFS
|
|
IFS=$(echo -en "\n\b")
|
|
RED=$(tput setaf 1)
|
|
PINK=$(tput setaf 5)
|
|
NC=$(tput sgr0) # No color
|
|
BLD=$(tput bold) # Bold text
|
|
WHITE="\033[1;37m"
|
|
CYAN="\033[1;36m"
|
|
BLUE="\033[1;34m"
|
|
YELLOW="\033[1;33m"
|
|
GREEN="\033[1;32m"
|
|
RED="\033[1;31m"
|
|
GREY="\033[1;30m"
|
|
NC="\033[0m" # No color
|
|
tput civis # Hide the cursor
|
|
trap "tput el; tput cnorm; exit" 0 1 2 3 6 15
|
|
CatnapCount=0
|
|
UrlOnly=false
|
|
didSourceMadConfig=false
|
|
if [ -f "${ScriptDir}/mad.config" ] ; then
|
|
source ${ScriptDir}/mad.config
|
|
didSourceMadConfig=true
|
|
fi
|
|
if [ ! -z ${WorkDirOverride} ]; then
|
|
WorkDir="${WorkDirOverride}"
|
|
if [ ! -d "${WorkDir}" ]; then
|
|
mkdir -p "${WorkDir}"
|
|
fi
|
|
fi
|
|
if [[ "$1" == "audit" ]]; then
|
|
madAudit
|
|
exit 0
|
|
fi
|
|
torPort=$(checkTor)
|
|
if [ "$torPort" == "" ] ; then
|
|
printf "%s\\n" "Tor is not running!"
|
|
exit 1
|
|
fi
|
|
if [ "${UseTorCurlImpersonate}" == "true" ]; then
|
|
curl_impersonate=()
|
|
readarray -d $'' arrFiles < <(find "$ScriptDir" -maxdepth 1 -name "curl_*" -printf '%p\n' | sort -Vk1)
|
|
bFoundCurlHeader=false
|
|
for fil in "${arrFiles[@]}";
|
|
do
|
|
if which $fil >/dev/null; then
|
|
curl_impersonate=($fil)
|
|
bFoundCurlHeader=true
|
|
break
|
|
fi
|
|
done
|
|
if [ "$bFoundCurlHeader" == "false" ]; then
|
|
echo -e "${RED}[ERROR] Missing dependency \"curl-impersonate\"!${NC}"
|
|
echo -e "Some hosts use CloudFlare to detect and block scripts (such as hexload)."
|
|
echo -e "To get around it, this script needs to impersonate a browser."
|
|
echo -e "You'll need to download ${GREEN}\"curl-impersonate\"${NC}."
|
|
echo -e ""
|
|
echo -e "[Manual Installation]"
|
|
echo -e "The latest binary can be obtained on GitHub, search for \"curl-impersonate\""
|
|
echo -e "To access the releases on GitHub without javascript, do the following:"
|
|
echo -e " 1. Visit the page of curl-impersonate and add \"/releases/latest/\" at end of URL."
|
|
echo -e " 2. You'll be redirected to the latest version, e.g: \"/releases/tag/v0.5.4\""
|
|
echo -e " 3. In the URL replace \"tag\" with \"expanded_assets\""
|
|
echo -e " e.g. \"/releases/expanded_assets/v0.6.1\""
|
|
echo -e " 4. Download archive ${GREEN}\"curl-impersonate-vX.Y.Z.x86_64-linux-gnu.tar.gz\"${YELLOW}."
|
|
echo -e " 5. Extract files ${GREEN}\"curl-impersonate-ff\"${NC} and ${GREEN}\"curl_ff109\"${NC} next to this script."
|
|
echo -e ""
|
|
echo -e "[Auto Install]"
|
|
echo -e "run $0 install_curl_impersonate\\n"
|
|
echo -e ""
|
|
yes_or_no "Do you wish to download and extract latest curl_impersonate (using tor+curl)?" && {
|
|
UseTorCurlImpersonate=false
|
|
install_curl_impersonate
|
|
}
|
|
exit 0
|
|
fi
|
|
fi
|
|
arg1="$1" # filelist, multi, hex, 1f, pd, reset, status
|
|
arg2="$2" # auto, filelist, <https://url>
|
|
arg3="$3" # filelist
|
|
arg4="$4" # moveToFolder
|
|
arg5="$5" # fileCount
|
|
arg6="$6" # lineCount
|
|
if [ "$#" -ne 1 ] && [ "$#" -ne 2 ] && [ "$#" -ne 3 ] && [ "$#" -ne 4 ] && [ "$#" -ne 5 ] && [ "$#" -ne 6 ] || \
|
|
[ "$1" == "help" ] || [ "$1" == "?" ] ; then
|
|
LoadMadDownloadHosts
|
|
echo -e "${BLUE}MAD Help -------------------------${NC}"
|
|
echo -e "Supported Hosts + Keyword:"
|
|
_hostcode=""
|
|
_hostnick=""
|
|
_hostfuncprefix=""
|
|
_hosturls=""
|
|
_hostdomainregex=""
|
|
_helpprochostcodes=""
|
|
readarray -d '@' -t arrHostAndDomainRegexes <<< "${ListHostAndDomainRegexes}"
|
|
for hline in "${arrHostAndDomainRegexes[@]}";
|
|
do
|
|
if [[ "$hline" == "" ]] || [[ ! "$hline" == *":"* ]]; then
|
|
continue
|
|
fi
|
|
chnk1="${hline%%:*}"
|
|
_hostcode=$(echo $chnk1|cut -f2 -d '/')
|
|
_hostnick=$(echo $chnk1|cut -f3 -d '/')
|
|
_hostfuncprefix=$(echo $chnk1|cut -f4 -d '/')
|
|
_hosturls=$(echo $chnk1|cut -f5 -d '/')
|
|
_hostdomainregex="${hline#*\:}"
|
|
if [[ ! "$_helpprochostcodes" =~ *"$_hostcode"* ]]; then
|
|
echo -e " [${YELLOW}$_hostcode${NC}] $_hosturls"
|
|
_helpprochostcodes="${_helpprochostcodes}:${_hostcode}:"
|
|
fi
|
|
done
|
|
echo -e " [${YELLOW}direct=${NC}] any direct url or cdn url${NC}"
|
|
echo -e " * Script supports multiple terminals/instances running."
|
|
echo -e ""
|
|
echo -e "${BLUE}Download Usage ----------------------------${NC}"
|
|
echo -e "[${YELLOW}Process Downloads${NC}]: 1 terminal, process list"
|
|
printf " %s urls.txt\\n" "$0"
|
|
echo -e ""
|
|
echo -e "[${YELLOW}Specific Host${NC}]: (1f, hex, kraken, pd, oshi, fh, dosya, upee, uhive, uflix)"
|
|
printf " %s 1f urls.txt\\n" "$0"
|
|
printf " %s oshi urls.txt\\n" "$0"
|
|
echo -e ""
|
|
echo -e "[${YELLOW}Multi Spawn${NC}]: Launch X terminals to process downloads (OS dependent)"
|
|
printf " %s multi [2-8] urls.txt\\n" "$0"
|
|
echo -e ""
|
|
echo -e "[${YELLOW}Multi Spawn, Specific Host${NC}]: Launch X terminals to process specified host (OS dependent)"
|
|
printf " %s multi hex [2-8] urls.txt\\n" "$0"
|
|
printf " %s multi pd [2-8] urls.txt\\n" "$0"
|
|
echo -e ""
|
|
echo -e "[${YELLOW}Multi Auto Spawn${NC}]: Launch 1 terminal per host with downloads (OS dependent)"
|
|
printf " %s multi auto urls.txt\\n" "$0"
|
|
echo -e ""
|
|
echo -e "[${YELLOW}Multi Auto Spawn [# terms]${NC}]: Launch 1 terminal per host with downloads [x total] (OS dependent)"
|
|
printf " %s multi auto [2-8] urls.txt\\n" "$0"
|
|
echo -e ""
|
|
echo -e "${BLUE}Other Download Commands --------------------${NC}"
|
|
echo -e "[${YELLOW}Status${NC}]: Displays the status of urls in the download urls.txt"
|
|
printf " %s status urls.txt\\n" "$0"
|
|
echo -e "[${YELLOW}Reset Urls${NC}]: Resets lines with #RETRY# comment in the download urls.txt"
|
|
echo -e " - Only works when AutoCommentOnCompletion=true"
|
|
printf " %s reset urls.txt\\n" "$0"
|
|
echo -e ""
|
|
echo -e "${BLUE}Upload Usage ----------------------------${NC}"
|
|
echo -e "[${YELLOW}Process Uploads${NC}]: Uploads each file & HostCode line in uploads.txt"
|
|
printf " %s upload uploads.txt\\n" "$0"
|
|
echo -e "[${YELLOW}MAD Uploader UI${NC}]: Uploads files in the ./uploads folder via user prompts"
|
|
printf " %s upload\\n" "$0"
|
|
echo -e ""
|
|
echo -e "${BLUE}Other Upload Commands --------------------${NC}"
|
|
echo -e "[${YELLOW}Upload Status${NC}]: Displays the status of file uploads in the uploads.txt"
|
|
printf " %s upload status uploads.txt\\n" "$0"
|
|
echo -e "[${YELLOW}Reset Urls${NC}]: Resets lines with #RETRY# comment in the uploads.txt"
|
|
echo -e " - Only works when AutoCommentOnCompletion=true"
|
|
printf " %s upload reset uploads.txt\\n" "$0"
|
|
echo -e ""
|
|
echo -e "${BLUE}Other MAD Commands --------------------${NC}"
|
|
echo -e "[${YELLOW}Install curl_impersonate${NC}]: Downloads the latest binary for curl_impersonate from github repo (3 choices)"
|
|
printf " %s install_curl_impersonate\\n" "$0"
|
|
echo -e "[${YELLOW}ClipboardMonitor${NC}]: Monitors clipboard for urls of supported download hosts and adds to urls.txt"
|
|
printf " %s clipmon urls.txt\\n" "$0"
|
|
echo -e ""
|
|
echo -e "${BLUE}Information -----------------------${NC}"
|
|
echo -e "[${YELLOW}Configurables${NC}]: Set optional params in the global section (top of script)"
|
|
echo -e " - AutoReloadOnFilelistTxtChanges: Reloads urls.txt if modified to get additions"
|
|
echo -e " - AutoCommentOnCompletion: Comments completed lines and adds status at end"
|
|
echo -e " #OK#, #RETRY#, #REMOVED#, #FAILED#, etc -- Filename / Reason"
|
|
echo -e " - ClearScreenOnAutoReload: Clears terminal session whenever the urls.txt is reloaded"
|
|
echo -e " - AutoRepairBadPartials: Automatically repairs partial downloads with bad data to allow resuming"
|
|
echo -e " through bad nodes"
|
|
echo -e ""
|
|
echo -e "${BLUE}Special Commands allowed in urls.txt ---${NC}"
|
|
echo -e "[${YELLOW}MoveToFolder${NC}]: Sets the folder to move downloads to (blank resets to leave in downloads)"
|
|
echo -e " - folder=My folder name (2020)"
|
|
echo -e "[${YELLOW}FilenameOverride${NC}]: Sets the specified filename to save the download as per url"
|
|
echo -e " (Add \"|filename.ext\" to end of url, no double quotes)"
|
|
echo -e " - http://oshi.at/abcd/origAABB.rar|My specified file.part1.rar"
|
|
echo -e "[${YELLOW}DirectUrl${NC}]: Will attempt to download directly from the url or cdn link"
|
|
echo -e " - direct=http://pomf2.lain.la/f/abcd00zz.7z"
|
|
echo -e " - If the direct url doesn't end in the filename, it is highly recommended to override it with |filename.ext"
|
|
echo -e ' - ie. direct=http://somehost.onion/abcD|filename.part1.rar'
|
|
echo -e ""
|
|
exit 0
|
|
fi
|
|
if [[ "$arg1" == "upload" ]] || [[ "$arg1" == "uploads" ]]; then
|
|
if [ "$arg2" == "status" ] && [ -f "$arg3" ]; then
|
|
madStatusUploads "$arg3"
|
|
exit 0
|
|
fi
|
|
if [ "$arg2" == "reset" ] && [ -f "$arg3" ]; then
|
|
madResetUploads "$arg3"
|
|
exit 0
|
|
fi
|
|
if [ ! -z "$arg2" ]; then
|
|
InputFile="$arg2"
|
|
if [ ! -f "${InputFile}" ] && [ -f "${WorkDir}/${InputFile}" ]; then
|
|
InputFile="${WorkDir}/${InputFile}"
|
|
fi
|
|
if [ ! -f "${InputFile}" ]; then
|
|
echo -e "Unable to read file $InputFile [1]"
|
|
exit 1
|
|
fi
|
|
CleanInputFile $InputFile
|
|
qChkLineCount=0
|
|
if [ -f "${InputFile}" ] ; then
|
|
qChkLineCount=$(grep -Evi '^#|^$|#OK#|#FAIL#|#RETRY#)' "${InputFile}" | wc -l | awk '{ print $1 }')
|
|
if ((qChkLineCount <= 0)); then
|
|
if DoneProcessingAllUploads "$InputFile" ; then
|
|
if [ "${AutoShowMadStatus}" == "true" ] ; then
|
|
echo -e "${RED}❤${GREEN}Done! ${YELLOW}Me0W!${NC} :D"
|
|
madStatusUploads "$InputFile"
|
|
fi
|
|
exit 0
|
|
fi
|
|
exit 0
|
|
fi
|
|
LoadMadUploadHosts
|
|
MadUploadFromFileTxt "$InputFile"
|
|
exit 0
|
|
else
|
|
echo -e "Unable to read file $InputFile [2]"
|
|
exit 1
|
|
fi
|
|
else
|
|
LoadMadUploadHosts
|
|
MadUploadFilesInUploadsFolder
|
|
exit 0
|
|
fi
|
|
echo -e "Nothing to process.."
|
|
exit 0
|
|
elif [[ "$arg1" == "reset" ]] && [[ ! -z "$arg2" ]]; then
|
|
madReset "$arg2"
|
|
elif [[ "$arg1" == "status" ]] && [[ ! -z "$arg2" ]]; then
|
|
madStatus "$arg2"
|
|
elif [[ "$arg1" == "clipmon" ]] && [[ ! -z "$arg2" ]]; then
|
|
clipboard_monitor "$arg2"
|
|
elif [[ "$arg1" == "install_curl_impersonate" ]]; then
|
|
install_curl_impersonate
|
|
exit 0
|
|
elif [[ "$arg1" == "hosts" ]]; then
|
|
madHostDetails
|
|
exit 0
|
|
elif [[ "$arg1" == "plugins" ]]; then
|
|
madPluginDetails
|
|
exit 0
|
|
fi
|
|
if grep -Eqi "^(http|direct=http)" <<< "$1" ; then
|
|
UrlOnly=true
|
|
fi
|
|
echo -e "${BLD}"
|
|
echo -e "${PINK}:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:"
|
|
echo -e ":${NC} ${GREEN}M${NC}ulti-host ${GREEN}A${NC}uto ${GREEN}D${NC}ownloader - ${YELLOW}v${ScriptVersion} ${BLUE}(by kittykat) ${PINK}:"
|
|
echo -e ":-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:-:${NC}\\n"
|
|
if [ "$didSourceMadConfig" == "true" ]; then
|
|
echo -e "[${GREEN}LOAD${NC}] ${BLUE}mad.config${NC}"
|
|
echo -e ""
|
|
fi
|
|
LoadMadDownloadHosts
|
|
LoadMadUploadHosts
|
|
LoadMadPlugins
|
|
OnLoad $0 $@
|
|
if ((torPort >= 9050 && torPort <= 9150)); then
|
|
printf "TorIp: ${GREEN}$TorIp${NC}, Tor listening on port ${GREEN}$torPort${NC}, "
|
|
else
|
|
printf "TorIp: ${RED}$TorIp${NC}, ${RED}Tor port not found !!${NC}.\\n"
|
|
printf "Ensure Tor is setup and listening on a port between 9050 and 9150. Exiting...\\n"
|
|
exit 1
|
|
fi
|
|
if [ "${UseTorCurlImpersonate}" == "true" ]; then
|
|
printf "client: ${GREEN}Tor${NC} + ${BLUE}curl_impersonate${NC}\\n"
|
|
else
|
|
printf "client: ${GREEN}Tor${NC} + ${GREEN}curl${NC}\\n"
|
|
fi
|
|
echo -e "ConnectTimeout: ${GREEN}$ConnectTimeout${NC}, CircuitRetries: ${GREEN}$CircuitRetries${NC}, UrlRetries: ${GREEN}$MaxUrlRetries${NC}, DownloadRetries: ${GREEN}$MaxDownloadRetries${NC}"
|
|
if [ "${LoopThroughFileUntilComplete}" == "true" ]; then
|
|
printf "Loop: ${GREEN}${LoopThroughFileUntilComplete}${NC}, "
|
|
else
|
|
printf "Loop: ${GREY}${LoopThroughFileUntilComplete}${NC}, "
|
|
fi
|
|
if [ "${AutoReloadOnFilelistTxtChanges}" == "true" ]; then
|
|
printf "AutoReload: ${GREEN}${AutoReloadOnFilelistTxtChanges}${NC}, "
|
|
else
|
|
printf "AutoReload: ${GREY}${AutoReloadOnFilelistTxtChanges}${NC}, "
|
|
fi
|
|
if [ "${AutoCommentOnCompletion}" == "true" ]; then
|
|
printf "AutoComment: ${GREEN}${AutoCommentOnCompletion}${NC}, "
|
|
else
|
|
printf "AutoComment: ${GREY}${AutoCommentOnCompletion}${NC}, "
|
|
fi
|
|
if [ "${AutoRepairBadPartials}" == "true" ]; then
|
|
printf "AutoRepairPartials: ${GREEN}${AutoRepairBadPartials}${NC}"
|
|
else
|
|
printf "AutoRepairPartials: ${GREY}${AutoRepairBadPartials}${NC}"
|
|
fi
|
|
printf "\\n"
|
|
if [ "${RateMonitorEnabled}" == "true" ]; then
|
|
printf "RateMonitor: ${GREEN}${RateMonitorEnabled}${NC}, "
|
|
else
|
|
printf "RateMonitor: ${GREY}${RateMonitorEnabled}${NC}, "
|
|
fi
|
|
if grep -Eq "pjscloud.sh" <<< "$LoadPlugins" && [ "$PJSCloud_pixeldrain" == "true" ]; then
|
|
if [ "${ar_pgsKey[0]}" == 'aa-bbbbb-ccccc-ddddd-eeeee-fffff' ] || [ "${ar_pgsKey[0]}" == "" ] ; then
|
|
printf " ${RED}[==>${NC} Setup ${BLUE}PJS apikey${NC} in pjscloud.sh ${RED}<==]${NC}"
|
|
PJSCloud_pixeldrain=false
|
|
printf "PDPump: ${GREY}${PJSCloud_pixeldrain}${NC}, "
|
|
else
|
|
printf "PDPump: ${GREEN}${PJSCloud_pixeldrain}${NC}, "
|
|
fi
|
|
else
|
|
printf "PDPump: ${GREY}${PJSCloud_pixeldrain}${NC}, "
|
|
fi
|
|
if [ "${UsePixeldrainBypass}" == "true" ]; then
|
|
printf "PDBypass: ${GREEN}${UsePixeldrainBypass}${NC}, "
|
|
else
|
|
printf "PDBypass: ${GREY}${UsePixeldrainBypass}${NC}, "
|
|
fi
|
|
if [ "${EnableFiledotProcessing}" == "true" ]; then
|
|
GetRandomFiledotUser
|
|
if [ "${ar_fdUP[0]}" == 'user1|pass1' ] || [ "${ar_fdUP[0]}" == "" ]; then
|
|
printf " ${RED}[==>${NC} Setup ${BLUE}user${NC}/${BLUE}pass${NC} in script ${RED}<==]${NC}"
|
|
EnableFiledotProcessing=false
|
|
printf "Filedot: ${GREY}$EnableFiledotProcessing${NC}"
|
|
else
|
|
printf "Filedot: ${GREEN}$EnableFiledotProcessing${NC}"
|
|
fi
|
|
else
|
|
printf "Filedot: ${GREY}$EnableFiledotProcessing${NC}"
|
|
fi
|
|
printf "\\n"
|
|
if [ "${DebugAllEnabled}" == "true" ] || [ "${DebugPluginsEnabled}" == "true" ] ; then
|
|
bDebugMsgPrintCnt=0
|
|
if [ "${DebugAllEnabled}" == "true" ]; then
|
|
printf "DebugHosts: ${BLUE}${DebugAllEnabled}${NC}"
|
|
bDebugMsgPrintCnt=$((bDebugMsgPrintCnt + 1))
|
|
fi
|
|
if [ "${DebugPluginsEnabled}" == "true" ]; then
|
|
if ((bDebugMsgPrintCnt > 0)) ; then
|
|
printf ", "
|
|
fi
|
|
printf "DebugPlugins: ${BLUE}${DebugPluginsEnabled}${NC}"
|
|
fi
|
|
printf "\\n"
|
|
fi
|
|
if [ ! "$UrlOnly" == "true" ]; then
|
|
qChkLineCount=0
|
|
if [ -f "${WorkDir}/$1" ] ; then
|
|
qChkLineCount=$(grep -Ei '^(http|direct=http)' "${WorkDir}/$1" | wc -l | awk '{ print $1 }')
|
|
if ((qChkLineCount <= 0)); then
|
|
if DoneProcessingAllUrls "$1" ; then
|
|
if [ "${AutoShowMadStatus}" == "true" ] ; then
|
|
echo -e "${RED}❤${GREEN}Done! ${YELLOW}Me0W!${NC} :D"
|
|
madStatus "$1"
|
|
fi
|
|
exit 0
|
|
fi
|
|
fi
|
|
elif [ -f "${WorkDir}/$2" ] ; then
|
|
qChkLineCount=$(grep -Ei '^(http|direct=http)' "${WorkDir}/$2" | wc -l | awk '{ print $1 }')
|
|
if ((qChkLineCount <= 0)); then
|
|
if DoneProcessingAllUrls "$2" ; then
|
|
if [ "${AutoShowMadStatus}" == "true" ] ; then
|
|
echo -e "${RED}❤${GREEN}Done! ${YELLOW}Me0W!${NC} :D"
|
|
madStatus "$2"
|
|
fi
|
|
exit 0
|
|
fi
|
|
fi
|
|
elif [ -f "${WorkDir}/$3" ] ; then
|
|
qChkLineCount=$(grep -Ei '^(http|direct=http)' "${WorkDir}/$3" | wc -l | awk '{ print $1 }')
|
|
if ((qChkLineCount <= 0)); then
|
|
if DoneProcessingAllUrls "$3" ; then
|
|
if [ "${AutoShowMadStatus}" == "true" ] ; then
|
|
echo -e "${RED}❤${GREEN}Done! ${YELLOW}Me0W!${NC} :D"
|
|
madStatus "$3"
|
|
fi
|
|
exit 0
|
|
fi
|
|
fi
|
|
fi
|
|
fi
|
|
hostOnlyOrUrl=""
|
|
multiCount=0
|
|
if [ "$UrlOnly" == "true" ]; then
|
|
echo -e ""
|
|
echo -e "${BLUE}:-:-:-: URL Only Mode :-:-:-:${NC}"
|
|
line="$arg1"
|
|
if [ ! -z "$arg2" ]; then
|
|
filename_override="$arg2"
|
|
fi
|
|
if [[ $line =~ | ]] ; then
|
|
line="${line%%\|*}"
|
|
fi
|
|
if [[ ${line} =~ ^# ]] ; then
|
|
exit 0
|
|
elif [[ ${line} =~ ^direct=http ]]; then
|
|
if ! PreProcessUrl "$line" ; then
|
|
continue
|
|
fi
|
|
remote_url=${line/direct=/}
|
|
if [[ ${remote_url} =~ ^http: ]] ; then
|
|
remote_url=${remote_url/http:/https:}
|
|
fi
|
|
CLEANSTRING=${remote_url//[^a-zA-Z0-9]/}
|
|
if [ -f "${WorkDir}/.flocks/${CLEANSTRING}" ] ; then
|
|
echo -e "${YELLOW}Lock Exists (Skipping)${NC}: ./.flocks/${CLEANSTRING}"
|
|
exit 0
|
|
fi
|
|
printf "\nGetting ${YELLOW}direct${NC} file ${GREEN}1${NC}\\n"
|
|
if [ ! "${filename_override}" == "" ] ; then
|
|
printf "[${BLUE}FilenameOverride${NC}]: %s\\n" $filename_override
|
|
fi
|
|
printf "[${BLUE}DirectUrl${NC}]: %s\\n" "${remote_url}"
|
|
mkdir -p "${WorkDir}/.flocks"
|
|
touch "${WorkDir}/.flocks/${CLEANSTRING}"
|
|
mkdir -p "${WorkDir}/.inflight"
|
|
direct_DownloadFile "${remote_url}" "${fileCount}"
|
|
elif [[ ${line} =~ ^http ]] ; then
|
|
if ! PreProcessUrl "$line" ; then
|
|
continue
|
|
fi
|
|
remote_url=${line}
|
|
if [[ ${remote_url} =~ ^http: ]] ; then
|
|
remote_url=${remote_url/http:/https:}
|
|
fi
|
|
CLEANSTRING=${remote_url//[^a-zA-Z0-9]/}
|
|
if [ -f "${WorkDir}/.flocks/${CLEANSTRING}" ] ; then
|
|
echo -e "${YELLOW}Lock Exists (Skipping)${NC}: ./.flocks/${CLEANSTRING}"
|
|
exit 0
|
|
fi
|
|
isHostMatchFound=false
|
|
_hostcode=""
|
|
_hostnick=""
|
|
_hostfuncprefix=""
|
|
_hosturls=""
|
|
_hostdomainregex=""
|
|
readarray -d '@' -t arrHostAndDomainRegexes <<< "${ListHostAndDomainRegexes}"
|
|
for hline in "${arrHostAndDomainRegexes[@]}";
|
|
do
|
|
if [[ "$hline" == "" ]] || [[ ! "$hline" == *":"* ]]; then
|
|
continue
|
|
fi
|
|
chnk1="${hline%%:*}"
|
|
_hostcode=$(echo $chnk1|cut -f2 -d '/')
|
|
_hostnick=$(echo $chnk1|cut -f3 -d '/')
|
|
_hostfuncprefix=$(echo $chnk1|cut -f4 -d '/')
|
|
_hosturls=$(echo $chnk1|cut -f5 -d '/')
|
|
_hostdomainregex="${hline#*\:}"
|
|
if [[ $remote_url =~ $_hostdomainregex ]]; then
|
|
isHostMatchFound=true
|
|
printf "\nGetting ${YELLOW}$_hostnick${NC} file ${GREEN}1${NC}\\n"
|
|
if [ ! "${filename_override}" == "" ] ; then
|
|
printf "[${BLUE}FilenameOverride${NC}]: %s\\n" $filename_override
|
|
fi
|
|
printf "[DownloadUrl]: %s\\n" "${remote_url}"
|
|
mkdir -p "${WorkDir}/.flocks"
|
|
touch "${WorkDir}/.flocks/${CLEANSTRING}"
|
|
mkdir -p "${WorkDir}/.inflight"
|
|
${_hostfuncprefix}_DownloadFile "${remote_url}" "${fileCount}"
|
|
break
|
|
fi
|
|
done
|
|
if [ "$isHostMatchFound" == "false" ]; then
|
|
printf "${RED}Invalid url (bad format or unsupported host [UO]):${NC} \\n%s\\n" $remote_url
|
|
badUrlDownload "${remote_url}"
|
|
exit 0
|
|
fi
|
|
else
|
|
printf "Ignore garbage line.\\n" > /dev/null
|
|
exit 0
|
|
fi
|
|
exit 0
|
|
fi
|
|
if [[ "$arg1" == "multi" ]] && [[ "$arg2" == "auto" ]] ; then
|
|
useMultiCount=false
|
|
multiCount=2 # Default ./mad.sh multi # urls.txt
|
|
if [[ "$arg3" == "2" || "$arg3" == "3" || "$arg3" == "4" || \
|
|
"$arg3" == "5" || "$arg3" == "6" || "$arg3" == "7" || "$arg3" == "8" ]] ; then
|
|
useMultiCount=true
|
|
multiCount=$((arg3))
|
|
InputFile="$4"
|
|
else
|
|
InputFile="$3"
|
|
fi
|
|
if [ ! -f "${InputFile}" ] && [ -f "${WorkDir}/${InputFile}" ]; then
|
|
InputFile="${WorkDir}/${InputFile}"
|
|
fi
|
|
if [ ! -f "${InputFile}" ]; then
|
|
printf "Unable to read file %s! [1]\\n" "$1"
|
|
exit 1
|
|
else
|
|
isHostMatchFound=false
|
|
_hostcode=""
|
|
_hostnick=""
|
|
_hostfuncprefix=""
|
|
_hosturls=""
|
|
_hostdomainregex=""
|
|
readarray -d '@' -t arrHostAndDomainRegexes <<< "${ListHostAndDomainRegexes}"
|
|
for hline in "${arrHostAndDomainRegexes[@]}";
|
|
do
|
|
if [[ "$hline" == "" ]] || [[ ! "$hline" == *":"* ]]; then
|
|
continue
|
|
fi
|
|
chnk1="${hline%%:*}"
|
|
_hostcode=$(echo $chnk1|cut -f2 -d '/')
|
|
_hostnick=$(echo $chnk1|cut -f3 -d '/')
|
|
_hostfuncprefix=$(echo $chnk1|cut -f4 -d '/')
|
|
_hosturls=$(echo $chnk1|cut -f5 -d '/')
|
|
_hostdomainregex="${hline#*\:}"
|
|
lineCount=0
|
|
lineCount=$(grep -Ei $_hostdomainregex "${InputFile}" | wc -l | awk '{ print $1 }')
|
|
if ((lineCount > 0)) ; then
|
|
if [ "$useMultiCount" == "false" ] || ( [ $useMultiCount == "true" ] && ((multiCount > 0)) ) ; then
|
|
printf "%s has ${GREEN}%d ${YELLOW}$_hostnick${NC} files to download.\\n" "${InputFile}" $lineCount
|
|
LaunchTerminal "$_hostcode" ${InputFile}
|
|
multiCount=$((multiCount - 1))
|
|
fi
|
|
fi
|
|
done
|
|
if [ $useMultiCount == "true" ] && ((multiCount > 0)) ; then
|
|
printf "Spawning ${GREEN}%d ${YELLOW}allhosts${NC} terminals.\\n" $multiCount
|
|
for ((k=1; k<=$multiCount; k++)); do
|
|
LaunchTerminal ${InputFile}
|
|
done
|
|
fi
|
|
fi
|
|
exit 0
|
|
elif [[ "$arg1" == "multi" ]] && \
|
|
[[ "$arg3" == "2" || "$arg3" == "3" || "$arg3" == "4" || \
|
|
"$arg3" == "5" || "$arg3" == "6" || "$arg3" == "7" || "$arg3" == "8" ]] ; then
|
|
multiCount=2 # Default ./mad.sh multi # urls.txt
|
|
if [[ "$ListHostAndDomainRegexes" == *"$arg2"* ]]; then
|
|
InputFile="$arg4"
|
|
multiCount=$arg3
|
|
if [ ! -f "${InputFile}" ] && [ -f "${WorkDir}/${InputFile}" ]; then
|
|
InputFile="${WorkDir}/${InputFile}"
|
|
fi
|
|
if [ ! -f "${InputFile}" ]; then
|
|
printf "Unable to read file %s! [2]\\n" "$1"
|
|
exit 1
|
|
else
|
|
isHostMatchFound=false
|
|
_hostcode=""
|
|
_hostnick=""
|
|
_hostfuncprefix=""
|
|
_hostdomainregex=""
|
|
readarray -d '@' -t arrHostAndDomainRegexes <<< "${ListHostAndDomainRegexes}"
|
|
for hline in "${arrHostAndDomainRegexes[@]}";
|
|
do
|
|
if [[ "$hline" == "" ]] || [[ ! "$hline" == *":"* ]]; then
|
|
continue
|
|
fi
|
|
chnk1="${hline%%:*}"
|
|
_hostcode=$(echo $chnk1|cut -f2 -d '/')
|
|
_hostnick=$(echo $chnk1|cut -f3 -d '/')
|
|
_hostfuncprefix=$(echo $chnk1|cut -f4 -d '/')
|
|
_hostdomainregex="${hline#*\:}"
|
|
if [ "$arg2" == "$_hostcode" ] ; then
|
|
lineCount=$(grep -Ei $_hostdomainregex "${InputFile}" | wc -l | awk '{ print $1 }')
|
|
printf "%s has ${GREEN}%d ${YELLOW}$_hostnick${NC} files to download.\\n" "${InputFile}" $lineCount
|
|
for ((k=1; k<=$multiCount; k++)); do
|
|
LaunchTerminal "$_hostcode" ${InputFile}
|
|
done
|
|
exit 0
|
|
fi
|
|
done
|
|
fi
|
|
else
|
|
InputFile="$arg3"
|
|
if [[ "$arg1" == "multi" ]] && \
|
|
[[ "$arg2" == "2" || "$arg2" == "3" || "$arg2" == "4" || \
|
|
"$arg2" == "5" || "$arg2" == "6" || "$arg2" == "7" || "$arg2" == "8" ]] ; then
|
|
multiCount=$arg2
|
|
fi
|
|
if [ ! -f "${InputFile}" ] && [ -f "${WorkDir}/${InputFile}" ]; then
|
|
InputFile="${WorkDir}/${InputFile}"
|
|
fi
|
|
if [ ! -f "${InputFile}" ]; then
|
|
printf "Unable to read file %s! [3]\\n" "$1"
|
|
exit 1
|
|
else
|
|
isHostMatchFound=false
|
|
_hostcode=""
|
|
_hostnick=""
|
|
_hostfuncprefix=""
|
|
_hostdomainregex=""
|
|
readarray -d '@' -t arrHostAndDomainRegexes <<< "${ListHostAndDomainRegexes}"
|
|
lineCount=0
|
|
for hline in "${arrHostAndDomainRegexes[@]}";
|
|
do
|
|
if [[ "$hline" == "" ]] || [[ ! "$hline" == *":"* ]]; then
|
|
continue
|
|
fi
|
|
chnk1="${hline%%:*}"
|
|
_hostcode=$(echo $chnk1|cut -f2 -d '/')
|
|
_hostnick=$(echo $chnk1|cut -f3 -d '/')
|
|
_hostfuncprefix=$(echo $chnk1|cut -f4 -d '/')
|
|
_hostdomainregex="${hline#*\:}"
|
|
if [ "$_hostfuncprefix" == "direct" ]; then
|
|
lineCount2=$(grep -Ei '^direct=http' "${InputFile}" | wc -l | awk '{ print $1 }')
|
|
lineCount=$((lineCount + lineCount2))
|
|
elif [ "$_hostcode" == "fdot" ] && [ "${EnableFiledotProcessing}" == "true" ]; then
|
|
lineCount2=$(grep -Ei $_hostdomainregex "${InputFile}" | wc -l | awk '{ print $1 }')
|
|
lineCount=$((lineCount + lineCount2))
|
|
else
|
|
lineCount2=$(grep -Ei $_hostdomainregex "${InputFile}" | wc -l | awk '{ print $1 }')
|
|
lineCount=$((lineCount + lineCount2))
|
|
fi
|
|
done
|
|
printf "%s has ${GREEN}%d${NC} files to download.\\n" "${InputFile}" $lineCount
|
|
for ((k=1; k<=$multiCount; k++)); do
|
|
LaunchTerminal "allhosts" ${InputFile}
|
|
done
|
|
exit 0
|
|
fi
|
|
fi
|
|
else
|
|
isHostMatchFound=false
|
|
_hostcode=""
|
|
_hostnick=""
|
|
_hostfuncprefix=""
|
|
_hostdomainregex=""
|
|
readarray -d '@' -t arrHostAndDomainRegexes <<< "${ListHostAndDomainRegexes}"
|
|
lineCount=0
|
|
foundhostnick=""
|
|
for hline in "${arrHostAndDomainRegexes[@]}";
|
|
do
|
|
if [[ "$hline" == "" ]] || [[ ! "$hline" == *":"* ]]; then
|
|
continue
|
|
fi
|
|
chnk1="${hline%%:*}"
|
|
_hostcode=$(echo $chnk1|cut -f2 -d '/')
|
|
_hostnick=$(echo $chnk1|cut -f3 -d '/')
|
|
_hostfuncprefix=$(echo $chnk1|cut -f4 -d '/')
|
|
_hostdomainregex="${hline#*\:}"
|
|
if [ "$arg1" == "$_hostcode" ]; then
|
|
hostOnlyOrUrl="$1"
|
|
InputFile="$2"
|
|
if [ ! -f "${InputFile}" ] && [ -f "${WorkDir}/${InputFile}" ]; then
|
|
InputFile="${WorkDir}/${InputFile}"
|
|
fi
|
|
if [ ! -f "${InputFile}" ]; then
|
|
printf "Unable to read file %s! [host]\\n" "$1"
|
|
exit 1
|
|
else
|
|
lineCount2=$(grep -Ei $_hostdomainregex "${InputFile}" | wc -l | awk '{ print $1 }')
|
|
lineCount=$((lineCount + lineCount2))
|
|
foundhostnick="$_hostnick"
|
|
fi
|
|
isHostMatchFound=true
|
|
fi
|
|
done
|
|
if [ "$isHostMatchFound" == "true" ]; then
|
|
printf "%s has ${GREEN}%d ${YELLOW}$foundhostnick${NC} files to download.\\n" "${InputFile}" $lineCount
|
|
else
|
|
InputFile="$1"
|
|
if [ ! -f "${InputFile}" ] && [ -f "${WorkDir}/${InputFile}" ]; then
|
|
InputFile="${WorkDir}/${InputFile}"
|
|
fi
|
|
if [ ! -f "${InputFile}" ]; then
|
|
printf "Unable to read file %s! [main]\\n" "$1"
|
|
exit 1
|
|
fi
|
|
lineCount=$(grep -Ei '^(http|direct=http)' "${InputFile}" | wc -l | awk '{ print $1 }')
|
|
printf "%s has ${GREEN}%d${NC} files to download.\\n" "${InputFile}" $lineCount
|
|
fi
|
|
fi
|
|
CleanInputFile $InputFile
|
|
HashFilelistTxt=$( sha1sum ${InputFile} | awk '{print $1}' )
|
|
if [ "$VerboseLoading" == "true" ]; then
|
|
printf "SHA1: %s\\n\\n" "${HashFilelistTxt}"
|
|
fi
|
|
mainLoopControl=true
|
|
while ${mainLoopControl}
|
|
do
|
|
mainLoopControl="$LoopThroughFileUntilComplete"
|
|
BeginProcessing "${InputFile}"
|
|
qChkLineCount=$(grep -Ei '^(http|direct=http)' "${InputFile}" | wc -l | awk '{ print $1 }')
|
|
if ((qChkLineCount <= 0)) ; then
|
|
if DoneProcessingAllUrls "${InputFile}" ; then
|
|
if [ "${AutoShowMadStatus}" == "true" ] ; then
|
|
echo -e "${RED}❤${GREEN}Done! ${YELLOW}Me0W!${NC} :D"
|
|
madStatus "${InputFile}"
|
|
fi
|
|
exit 0
|
|
else
|
|
continue
|
|
fi
|
|
fi
|
|
fileCount=1
|
|
lockCount=0
|
|
sed 's/^[[:space:]]*// ; s/[[:space:]]*$//' "${InputFile}" |
|
|
while IFS= read -r line
|
|
do
|
|
filename_override=""
|
|
line="${line//[$'\t\r\n']}"
|
|
if [[ $line =~ \| ]] && grep -Eqi '^(http|direct=)' <<< "$line" && grep -Eqvi '^#' <<< "$line" ; then
|
|
filename_override="${line##*\|}"
|
|
line="${line%%\|*}"
|
|
fi
|
|
if [ -f "${InputFile}" ] ; then
|
|
currentHashFilelistTxt=$( sha1sum "${InputFile}" | awk '{print $1}' )
|
|
else
|
|
currentHashFilelistTxt=$( sha1sum ${InputFile} | awk '{print $1}' )
|
|
fi
|
|
if [ -f "${WorkDir}/clear" ] ; then
|
|
clear
|
|
printf "[${PINK}ClearScreen${NC}] Clear file was found. (removing and clearing)\\n"
|
|
if [ -f "${WorkDir}/clear" ] ; then
|
|
rm -f "${WorkDir}/clear"
|
|
fi
|
|
elif ((qChkLineCount > 0)) && [ ! "${HashFilelistTxt}" == "${currentHashFilelistTxt}" ] && \
|
|
[ "${ClearScreenOnAutoReload}" == "true" ] && [ "${AutoReloadOnFilelistTxtChanges}" == "true" ] ; then
|
|
clear
|
|
printf "[${PINK}ClearScreen${NC}] Auto-clearing screen. (${InputFile} change detected)\\n"
|
|
fi
|
|
if [ -f "${WorkDir}/stop" ] ; then
|
|
printf "\\n%s\\n" "--------------------------------------------"
|
|
printf "[${PINK}Stop${NC}] stop file was found.\\n"
|
|
printf "%s\\n" "--------------------------------------------"
|
|
rm -f "${WorkDir}/stop"
|
|
exit 1
|
|
fi
|
|
if [ -f "${WorkDir}/restart" ] ; then
|
|
printf "\\n%s\\n" "--------------------------------------------"
|
|
printf "[${PINK}Restart${NC}] restart file was found.\\n"
|
|
printf "%s\\n" "--------------------------------------------"
|
|
rm -f "${WorkDir}/restart"
|
|
if [ -f "${WorkDir}/reload" ] ; then
|
|
rm -f "${WorkDir}/reload"
|
|
fi
|
|
ReloadScript "$@"
|
|
exit 1
|
|
elif [ -f "${WorkDir}/reload" ] ; then
|
|
printf "\\n%s\\n" "--------------------------------------------"
|
|
printf "[${PINK}Reload${NC}] reload file was found.\\n"
|
|
printf "%s\\n" "--------------------------------------------"
|
|
rm -f "${WorkDir}/reload"
|
|
if [ -f "${WorkDir}/restart" ] ; then
|
|
rm -f "${WorkDir}/restart"
|
|
fi
|
|
ReloadScript "$@"
|
|
exit 1
|
|
elif ((qChkLineCount > 0)) && [ "${AutoReloadOnFilelistTxtChanges}" == "true" ] && \
|
|
[ ! "${HashFilelistTxt}" == "${currentHashFilelistTxt}" ]; then
|
|
printf "\\n%s\\n" "--------------------------------------------"
|
|
printf "[${PINK}Reload${NC}] ${InputFile} was modified.\\n"
|
|
printf "%s\\n" "--------------------------------------------"
|
|
ReloadScript "$@"
|
|
exit 1
|
|
fi
|
|
if [[ $line =~ ^STOP! ]] ; then
|
|
printf "\\n%s\\n" "----------------------------------------------------"
|
|
printf "Stopping -- STOP! keyword encountered.\\n"
|
|
printf "%s\\n" "----------------------------------------------------"
|
|
exit 1
|
|
fi
|
|
if grep -ie '^folder=' <<< "$line" > /dev/null ; then
|
|
MoveToFolder=${line:7} #get everything starting at pos 7+
|
|
MoveToFolder=$(sanitize_file_or_folder_name "${MoveToFolder}")
|
|
continue ## resume next line ##
|
|
fi
|
|
if grep -ie '^#[^ ].*=.\+' <<< "$line" > /dev/null ; then
|
|
line=${line:1}
|
|
key=${line%%=*}
|
|
val=${line#*=}
|
|
UrlsVars[$key]="$val"
|
|
continue ## resume next line ##
|
|
fi
|
|
if [[ ${line} =~ ^(http|https|direct=http|direct=https):// ]] ; then
|
|
if ! PreProcessUrl "$line" ; then
|
|
continue
|
|
fi
|
|
fi
|
|
if [ ! -z "${hostOnlyOrUrl}" ] ; then
|
|
_hostcode=""
|
|
_hostnick=""
|
|
_hostfuncprefix=""
|
|
_hostdomainregex=""
|
|
readarray -d '@' -t arrHostAndDomainRegexes <<< "${ListHostAndDomainRegexes}"
|
|
isHostMatchFound=false
|
|
isSupportedHost=false
|
|
isSkipOkay=false
|
|
if [[ ${line} =~ ^# ]] ; then
|
|
continue
|
|
elif [[ $remote_url =~ ^direct=(http|https):// ]] ; then
|
|
continue
|
|
elif [[ ${line} =~ ^http ]] ; then
|
|
remote_url=${line}
|
|
if [[ ${remote_url} =~ ^http: ]] ; then
|
|
remote_url=${remote_url/http:/https:}
|
|
fi
|
|
else
|
|
printf "Ignore garbage line.\\n" > /dev/null
|
|
continue
|
|
fi
|
|
for hline in "${arrHostAndDomainRegexes[@]}";
|
|
do
|
|
if [[ "$hline" == "" ]] || [[ ! "$hline" == *":"* ]]; then
|
|
continue
|
|
fi
|
|
chnk1="${hline%%:*}"
|
|
_hostcode=$(echo $chnk1|cut -f2 -d '/')
|
|
_hostnick=$(echo $chnk1|cut -f3 -d '/')
|
|
_hostfuncprefix=$(echo $chnk1|cut -f4 -d '/')
|
|
_hostdomainregex="${hline#*\:}"
|
|
if [[ $remote_url =~ $_hostdomainregex ]] ; then
|
|
isSupportedHost=true
|
|
if [ "${hostOnlyOrUrl}" == "$_hostcode" ] ; then
|
|
isHostMatchFound=true
|
|
CLEANSTRING=${remote_url//[^a-zA-Z0-9]/}
|
|
if [ -f "${WorkDir}/.flocks/${CLEANSTRING}" ] ; then
|
|
if ((CatnapCount <= 0)) ; then
|
|
echo -e "${YELLOW}Lock Exists (Skipping)${NC}: ./.flocks/${CLEANSTRING}"
|
|
fi
|
|
fileCount=$((fileCount + 1))
|
|
isSkipOkay=true
|
|
break
|
|
fi
|
|
printf "\nGetting ${YELLOW}$_hostnick${NC} file ${GREEN}%d${NC} of ${GREEN}%d${NC} ${PINK}($_hostcode urls)${NC}\\n" $fileCount $lineCount
|
|
if [ ! "${filename_override}" == "" ] ; then
|
|
printf "[${BLUE}FilenameOverride${NC}]: %s\\n" $filename_override
|
|
fi
|
|
if [ ! "${MoveToFolder}" == "" ] ; then
|
|
printf "[${BLUE}MoveToFolder${NC}]: %s\\n" $MoveToFolder
|
|
fi
|
|
printf "[DownloadUrl]: %s\\n" "${remote_url}"
|
|
mkdir -p "${WorkDir}/.flocks"
|
|
touch "${WorkDir}/.flocks/${CLEANSTRING}"
|
|
mkdir -p "${WorkDir}/.inflight"
|
|
${_hostfuncprefix}_DownloadFile "${remote_url}" "${fileCount}"
|
|
fileCount=$((fileCount + 1))
|
|
break
|
|
fi
|
|
fi
|
|
done
|
|
if [ "$isSkipOkay" == "true" ]; then
|
|
continue
|
|
elif [ "$isSupportedHost" == "false" ]; then
|
|
printf "${RED}Invalid url (bad format or unsupported host [m1]):${NC} \\n%s\\n" $remote_url
|
|
badUrlDownload "${remote_url}"
|
|
continue
|
|
fi
|
|
else
|
|
if [[ ${line} =~ ^# ]] ; then
|
|
continue
|
|
elif [[ ${line} =~ ^direct=http ]]; then
|
|
remote_url=${line/direct=/}
|
|
if [[ ${remote_url} =~ ^http: ]] ; then
|
|
remote_url=${remote_url/http:/https:}
|
|
fi
|
|
CLEANSTRING=${remote_url//[^a-zA-Z0-9]/}
|
|
if [ -f "${WorkDir}/.flocks/${CLEANSTRING}" ] ; then
|
|
if ((CatnapCount <= 0)) ; then
|
|
echo -e "${YELLOW}Lock Exists (Skipping)${NC}: ./.flocks/${CLEANSTRING}"
|
|
fi
|
|
fileCount=$((fileCount + 1))
|
|
continue
|
|
fi
|
|
printf "\nGetting ${YELLOW}direct${NC} file ${GREEN}%d${NC} of ${GREEN}%d${NC}\\n" $fileCount $lineCount
|
|
if [ ! "${filename_override}" == "" ] ; then
|
|
printf "[${BLUE}FilenameOverride${NC}]: %s\\n" $filename_override
|
|
fi
|
|
if [ ! "${MoveToFolder}" == "" ] ; then
|
|
printf "[${BLUE}MoveToFolder${NC}]: %s\\n" $MoveToFolder
|
|
fi
|
|
printf "[${BLUE}DirectUrl${NC}]: %s\\n" "${remote_url}"
|
|
mkdir -p "${WorkDir}/.flocks"
|
|
touch "${WorkDir}/.flocks/${CLEANSTRING}"
|
|
mkdir -p "${WorkDir}/.inflight"
|
|
direct_DownloadFile "${remote_url}" "${fileCount}"
|
|
fileCount=$((fileCount + 1))
|
|
continue
|
|
elif [[ ${line} =~ ^http ]] ; then
|
|
remote_url=${line}
|
|
if [[ ${remote_url} =~ ^http: ]] ; then
|
|
remote_url=${remote_url/http:/https:}
|
|
fi
|
|
else
|
|
printf "Ignore garbage line.\\n" > /dev/null
|
|
continue
|
|
fi
|
|
_hostcode=""
|
|
_hostnick=""
|
|
_hostfuncprefix=""
|
|
_hostdomainregex=""
|
|
readarray -d '@' -t arrHostAndDomainRegexes <<< "${ListHostAndDomainRegexes}"
|
|
isHostMatchFound=false
|
|
isSkipOkay=false
|
|
for hline in "${arrHostAndDomainRegexes[@]}";
|
|
do
|
|
if [[ "$hline" == "" ]] || [[ ! "$hline" == *":"* ]]; then
|
|
continue
|
|
fi
|
|
chnk1="${hline%%:*}"
|
|
_hostcode=$(echo $chnk1|cut -f2 -d '/')
|
|
_hostnick=$(echo $chnk1|cut -f3 -d '/')
|
|
_hostfuncprefix=$(echo $chnk1|cut -f4 -d '/')
|
|
_hostdomainregex="${hline#*\:}"
|
|
if [[ $remote_url =~ $_hostdomainregex ]] ; then
|
|
isHostMatchFound=true
|
|
CLEANSTRING=${remote_url//[^a-zA-Z0-9]/}
|
|
if [ -f "${WorkDir}/.flocks/${CLEANSTRING}" ] ; then
|
|
if ((CatnapCount <= 0)) ; then
|
|
echo -e "${YELLOW}Lock Exists (Skipping)${NC}: ./.flocks/${CLEANSTRING}"
|
|
fi
|
|
fileCount=$((fileCount + 1))
|
|
isSkipOkay=true
|
|
break
|
|
fi
|
|
printf "\nGetting ${YELLOW}$_hostnick${NC} file ${GREEN}%d${NC} of ${GREEN}%d${NC}\\n" $fileCount $lineCount
|
|
if [ ! "${filename_override}" == "" ] ; then
|
|
printf "[${BLUE}FilenameOverride${NC}]: %s\\n" $filename_override
|
|
fi
|
|
if [ ! "${MoveToFolder}" == "" ] ; then
|
|
printf "[${BLUE}MoveToFolder${NC}]: %s\\n" $MoveToFolder
|
|
fi
|
|
printf "[DownloadUrl]: %s\\n" "${remote_url}"
|
|
mkdir -p "${WorkDir}/.flocks"
|
|
touch "${WorkDir}/.flocks/${CLEANSTRING}"
|
|
mkdir -p "${WorkDir}/.inflight"
|
|
${_hostfuncprefix}_DownloadFile "${remote_url}" "${fileCount}"
|
|
fileCount=$((fileCount + 1))
|
|
break
|
|
fi
|
|
done
|
|
if [ "$isSkipOkay" == "true" ]; then
|
|
continue
|
|
elif [ "$isHostMatchFound" == "false" ]; then
|
|
printf "${RED}Invalid url or disabled host (bad format or unsupported host [m*]):${NC} \\n%s\\n" $remote_url
|
|
badUrlDownload "${remote_url}"
|
|
continue
|
|
fi
|
|
fi
|
|
done #loop through the file line by line
|
|
qChkLineCount=$(grep -Ei '^(http|direct=http)' "${InputFile}" | wc -l | awk '{ print $1 }')
|
|
if ((qChkLineCount > 0)) ; then
|
|
if [ ! -z $hostOnlyOrUrl ] ; then
|
|
echo -e "${NC}"
|
|
echo -e "${YELLOW}Unprocessed / Skipped URL(s) Found:${NC}"
|
|
echo -e "Most likely from a different host than ${YELLOW}$hostOnlyOrUrl${NC}, another terminal is downloading it, or a flock exists...${NC}"
|
|
echo -e "Switching back to processing ${YELLOW}all host${NC} urls...${NC}"
|
|
ReloadScript ""
|
|
exit 0
|
|
else
|
|
if [ "$LoopThroughFileUntilComplete" == "false" ]; then
|
|
if [ ! -z $hostOnlyOrUrl ] ; then
|
|
echo -e "${NC}"
|
|
echo -e "${YELLOW}Unprocessed / Skipped URL(s) Found:${NC}"
|
|
echo -e "Most likely from a different host than ${YELLOW}$hostOnlyOrUrl${NC}, another terminal is downloading it, or a flock exists...${NC}"
|
|
fi
|
|
if DoneProcessingAllUrls "${InputFile}" ; then
|
|
exit 0
|
|
else
|
|
continue
|
|
fi
|
|
else
|
|
CatnapCount=$((CatnapCount + 1))
|
|
if ((CatnapCount > 1)); then
|
|
for ((a=1; a<=4; a++)); do
|
|
printf "\033[1A\r"
|
|
printf "\33[2K\r"
|
|
done
|
|
fi
|
|
echo -e "${NC}"
|
|
echo -e "${YELLOW}Unprocessed / Skipped URL(s) Found:${NC}"
|
|
echo -e "Most likely another terminal is downloading it or a flock exists...${NC}"
|
|
echo -e "${YELLOW}Catnapping${NC} for ${CatnapDuration} mins to allow ${YELLOW}$qChkLineCount${NC} download(s) to finish... ${YELLOW}zZzZzZ${NC} ${BLUE}x$CatnapCount${NC}"
|
|
sleep ${CatnapDuration}m
|
|
continue
|
|
fi
|
|
fi
|
|
else
|
|
if DoneProcessingAllUrls "${InputFile}" ; then
|
|
exit 0
|
|
else
|
|
continue
|
|
fi
|
|
fi
|
|
done #loop until all urls processed
|
|
IFS=$backupIFS
|