#! Name: biteblob.sh #! Author: kittykat #! Version: 2024.09.13 #! Desc: Add support for downloading and processing of urls for a new host #! Usage: Copy this file into the ./${ScriptDir}/hosts/ folder #! #! #! ------------ REQUIRED SECTION --------------- #! @[UPDATE] HostAndDomainRegexes: This string is loaded into mad.sh and allows dynamic handling of new url data #! Format: '/HostCode/HostNick/HostFuncPrefix:HostDomainRegex@' #! HostCode: (ie. 'fh' for filehaus -- cannot be used by other hosts) #! HostNick: What is displayed throughout MAD output (ie. 'filehaus' -- "urls.txt has 10 filehaus.." will be displayed) #! HostFuncPrefix: (ie. 'fh' -- fh_DownloadFile(), fh_FetchFileInfo() .. ) #! * Note: Must begin with a letter a-z (functions beginning with numbers are no bueno) #! HostDomainRegex: The regex used to verify matching urls HostCode='bite' HostNick='biteblob' HostFuncPrefix='bite' HostUrls='biteblob.com' HostDomainRegex='^(http|https)://(.*\.)?biteblob\.(com|org)' #! #! !! DO NOT UPDATE OR REMOVE !! #! This merges the Required HostAndDomainRegexes into mad.sh ListHostAndDomainRegexes=${ListHostAndDomainRegexes}'/'${HostCode}'/'${HostNick}'/'${HostFuncPrefix}'/'${HostUrls}':'${HostDomainRegex}'@' #! #! #! ------------ (1) Host Main Download Function --------------- # #! #! @REQUIRED: Host Main Download function #! Must be named specifically as such: #! _DownloadFile() bite_DownloadFile() { local remote_url=${1} local file_url=${1} local filecnt=${2} warnAndRetryUnknownError=false exitDownloadError=false exitDownloadNotAvailable=false fileAlreadyDone=false download_inflight_path="${WorkDir}/.inflight/" mkdir -p "$download_inflight_path" completed_location="${WorkDir}/downloads/" tor_identity="${RANDOM}" finalAttempt="false" for ((z=0; z<=$MaxUrlRetries; z++)); do if [[ $z -eq $MaxUrlRetries ]] ; then finalAttempt="true" fi CLEANSTRING=${remote_url//[^a-zA-Z0-9]/} trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; echo ""; tput cnorm; exit" 0 1 2 3 6 15 if bite_FetchFileInfo $finalAttempt && bite_GetFile "${filecnt}" $((z+1)) $finalAttempt ; then return 0 elif [[ $z -lt $MaxUrlRetries ]]; then if [[ "${fileAlreadyDone}" == "true" ]] ; then break fi if [[ "${warnAndRetryUnknownError}" == "true" ]] ; then if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${remote_url##*/}" "error" "Retry due to an unknown issue: attempt #$((z+1)) of ${MaxUrlRetries}" fi fi if [[ "${exitDownloadError}" == "true" || "${exitDownloadNotAvailable}" == "true" ]] ; then if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${remote_url##*/}" "error" "Exit due to unrecoverable issue" fi rm -f "${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}" break fi echo -e "\n${YELLOW}A recoverable error occurred, retry attempt $((z+1))/${MaxUrlRetries}${NC}" sleep 3 fi done rm -f "${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}" } #! #! ------------- (2) Fetch File Info Function ----------------- # #! bite_FetchFileInfo() { finalAttempt=$1 maxfetchretries=5 fixed_url=${remote_url} if grep -Eqi "biteblob.org" <<< "$fixed_url"; then fixed_url=${remote_url/biteblob.org/biteblob.com} fi if grep -Eqi "biteblob.com/Download/" <<< "$fixed_url"; then fixed_url=${remote_url/biteblob.com\/Download/biteblob.com\/Information} fi download_url=${fixed_url/Information/Download} echo -e "${GREEN}# Fetching download link…${NC}" for ((j=1; j<=$maxfetchretries; j++)); do mkdir -p "${WorkDir}/.temp" printf " ." tor_identity="${RANDOM}" CLEANSTRING=${remote_url//[^a-zA-Z0-9]/} trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; echo ""; tput cnorm; exit" 0 1 2 3 6 15 response=$(tor_curl_request --insecure -L -s "${fixed_url}") if [[ "${DebugAllEnabled}" == "true" ]] ; then debugHtml "${remote_url##*/}" "bite_dwnpage$j" "url: $fixed_url"$'\n'"${response}" fi if [[ -z $response ]] ; then if [[ $j == $maxfetchretries ]] ; then printf "\\n" echo -e "${RED}| Failed to extract download link${NC}" warnAndRetryUnknownError=true if [[ "${finalAttempt}" == "true" ]] ; then failedRetryDownload "${remote_url}" "" "" fi return 1 else continue fi fi if grep -Eqi 'Not Found|Invalid Request|Link Unauthorized|No download available|file was removed|file has been deleted' <<< "$response"; then printf "\\n" echo -e "${RED}| The file was not found. It could be deleted or expired.${NC}" exitDownloadError=true removedDownload "${remote_url}" return 1 fi if grep -Eqi '