v2024.11.06

This commit is contained in:
kittykat 2024-11-08 09:32:54 +00:00
parent 1f87d598a4
commit 808d64768b
118 changed files with 28958 additions and 0 deletions

525
hosts/dosya.sh Executable file
View file

@ -0,0 +1,525 @@
#! Name: dosya.sh
#! Author: kittykat
#! Version: 2024.10.25
#! Desc: Add support for downloading and processing of urls for a new host
#! Usage: Copy this file into the ./${ScriptDir}/hosts/ folder
#!
#!
#! ------------ REQUIRED SECTION ---------------
#! @[UPDATE] HostAndDomainRegexes: This string is loaded into mad.sh and allows dynamic handling of new url data
#! Format: '/HostCode/HostNick/HostFuncPrefix:HostDomainRegex@'
#! HostCode: <aUniqueCodeForHost> (ie. 'fh' for filehaus -- cannot be used by other hosts)
#! HostNick: What is displayed throughout MAD output (ie. 'filehaus' -- "urls.txt has 10 filehaus.." will be displayed)
#! HostFuncPrefix: <aUniqueStringThatMustPrefixHostFunctions> (ie. 'fh' -- fh_DownloadFile(), fh_FetchFileInfo() .. )
#! * Note: Must begin with a letter a-z (functions beginning with numbers are no bueno)
#! HostDomainRegex: The regex used to verify matching urls
HostCode='dosya'
HostNick='dosyaupload'
HostFuncPrefix='dosya'
HostUrls='dosyaupload.com'
HostDomainRegex='^(http|https)://(.*\.)?dosyaupload\.com'
#!
#! !! DO NOT UPDATE OR REMOVE !!
#! This merges the Required HostAndDomainRegexes into mad.sh
ListHostAndDomainRegexes=${ListHostAndDomainRegexes}'/'${HostCode}'/'${HostNick}'/'${HostFuncPrefix}'/'${HostUrls}':'${HostDomainRegex}'@'
#!
#!
#! ------------ (1) Host Main Download Function --------------- #
#!
#! @REQUIRED: Host Main Download function
#! Must be named specifically as such:
#! <HostFuncPrefix>_DownloadFile()
dosya_DownloadFile() {
local remote_url=${1}
local file_url=${1}
local filecnt=${2}
warnAndRetryUnknownError=false
exitDownloadError=false
exitDownloadNotAvailable=false
fileAlreadyDone=false
download_inflight_path="${WorkDir}/.inflight/"
mkdir -p "$download_inflight_path"
completed_location="${WorkDir}/downloads/"
local failed_responses=0
tor_identity="${RANDOM}"
finalAttempt="false"
for ((z=0; z<=$MaxUrlRetries; z++)); do
if [ $z -eq $MaxUrlRetries ] ; then
finalAttempt="true"
fi
CLEANSTRING=${remote_url//[^a-zA-Z0-9]/}
trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; echo ""; tput cnorm; exit" 0 1 2 3 6 15
if dosya_FetchFileInfo $finalAttempt && dosya_GetFile "${filecnt}" $((z+1)) $finalAttempt ; then
return 0
elif [ $z -lt $MaxUrlRetries ]; then
if [ "${fileAlreadyDone}" == "true" ] ; then
break
fi
if [[ "${warnAndRetryUnknownError}" == "true" ]] ; then
if [ "${DebugAllEnabled}" == "true" ] ; then
debugHtml "${remote_url##*/}" "error" "Retry due to an unknown issue: attempt #$((z+1)) of ${MaxUrlRetries}"
fi
fi
if [[ "${exitDownloadError}" == "true" || "${exitDownloadNotAvailable}" == "true" ]] ; then
if [ "${DebugAllEnabled}" == "true" ] ; then
debugHtml "${remote_url##*/}" "error" "Exit due to unrecoverable issue"
fi
rm -f "${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}"
break
fi
echo -e "\n${YELLOW}A recoverable error occurred, retry attempt $((z+1))/${MaxUrlRetries}${NC}"
sleep 3
fi
done
rm -f "${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}"
}
#!
#! ------------- (2) Fetch File Info Function ----------------- #
#!
dosya_FetchFileInfo() {
finalAttempt=$1
local dosya_cookie_jar="" dosya_token=""
if ((failed_responses >= 4)) ; then
rm -f "${dosya_cookie_jar}";
if ((i > 1)) ; then
printf "\\n"
fi
echo -e "${RED}| Too many failed responses. Try again later.${NC}"
failedRetryDownload "${remote_url}" "Too many failed responses. Try again later." ""
exitDownloadNotAvailable=true
return 1
fi
CLEANSTRING=${remote_url//[^a-zA-Z0-9]/}
trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; echo ""; tput cnorm; exit" 0 1 2 3 6 15
maxI=1
for ((i=1; i<=$maxI; i++)); do
dos_url=""
mkdir -p "${WorkDir}/.temp"
dosya_cookie_jar=$(mktemp "${WorkDir}/.temp/dosya_cookies""${instance_no}"".XXXXXX")
if ((i <= 1)) ; then
echo -e "${GREEN}# Fetching dosya link and file info…${NC}"
else
echo -e ""
echo -e "${GREEN}# Fetching new dosya link and file info…${NC}"
fi
tor_identity="${RANDOM}"
GetRandomUA
trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; rm -f ${dosya_cookie_jar}; echo ""; tput cnorm; exit" 0 1 2 3 6 15
if [ "${UseTorCurlImpersonate}" == "true" ]; then
PAGE=$(tor_curl_request --insecure --max-time "$ConnectTimeout" -L -s \
-c "${dosya_cookie_jar}" \
"${remote_url}")
else
PAGE=$(tor_curl_request --insecure --max-time "$ConnectTimeout" -L -s \
-H "User-Agent: $RandomUA" \
-H 'Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8' \
-H 'Accept-Language: en-US,en;q=0.5' \
-H 'Accept-Encoding: gzip, deflate, br' -H 'Connection: keep-alive' -H 'Upgrade-Insecure-Requests: 1' \
-H 'Sec-Fetch-Dest: document' -H 'Sec-Fetch-Mode: navigate' -H 'Sec-Fetch-Site: none' -H 'Sec-Fetch-User: ?1' \
-c "${dosya_cookie_jar}" \
"${remote_url}")
fi
if [ "${DebugAllEnabled}" == "true" ] ; then
debugHtml "${remote_url##*/}" "dos_fetch_$i" "remote_url: ${remote_url}"$'\n'"User-Agent: $RandomUA"$'\n'"${PAGE}"
fi
if grep -Eqi '<i class="fa fa-exclamation-triangle margin-right-20">|Hata indir|Dosya silindi|Oops - Sayfa Bulunamadı!' <<< "${PAGE}" ; then
rm -f "${dosya_cookie_jar}";
if ((i > 1)) ; then
printf "\\n"
fi
echo -e "${RED}| The file has been removed${NC}"
removedDownload "${remote_url}"
exitDownloadNotAvailable=true
return 1
fi
dos_url=$(grep -oP '(?<='"'"'btn btn-default'"'"' href='"'"')[^<]*?(?='"'"'>DOSYAYI)' <<< "${PAGE}")
if [[ -z $dos_url ]] ; then
rm -f "${dosya_cookie_jar}";
failed_responses=$((failed_responses + 1))
if ((i >= $maxI)) ; then
echo -e "${YELLOW}| Dosya url not found${NC}"
return 1
fi
printf "${YELLOW}. ${NC}"
continue
fi
maxJ=20
for ((j=1; j<=$maxJ; j++)); do
dosya_token_url=""
if grep -Eqi 'download_token=' <<< $dos_url ; then
printf "${YELLOW}${NC}"
else
printf "${YELLOW}. ${NC}"
tor_identity="${RANDOM}"
GetRandomUA
sleep 3s
fi
form_data="${dos_url##*\?}"
url_parent="${dos_url%%\?*}"
if grep -Eqi 'download_token=' <<< $dos_url ; then
dos_down_fileid="${url_parent##*dosyaupload.com\/}"
file_path="${dos_down_fileid##*\/}"
dos_down_fileid="${dos_down_fileid%%\/*}"
else
dos_down_fileid="${url_parent##*\/}"
fi
cookie_filehosting=$(cat ${dosya_cookie_jar} | grep -oP -m 1 '(?<=filehosting).*$')
cookie_filehosting=${cookie_filehosting//[$'\t\r\n']}
if grep -Eqi 'https://dosyaupload.com' <<< "$dos_url" ; then
dos_url=${dos_url/dosyaupload.com/www.dosyaupload.com}
fi
trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; rm -f ${dosya_cookie_jar}; echo ""; tput cnorm; exit" 0 1 2 3 6 15
if [ "${UseTorCurlImpersonate}" == "true" ]; then
file_header=$(tor_curl_request_extended --insecure --head -L -s \
-H "Cookie: filehosting=$cookie_filehosting" \
-H "Host: www.dosyaupload.com" \
-w "\n\nHTTP_CODE: %{http_code}\nEFFECTIVE_URL: %{url_effective}\n" \
-b "${dosya_cookie_jar}" -c "${dosya_cookie_jar}" "$dos_url")
else
file_header=$(tor_curl_request_extended --insecure --head -L -s \
-w "\n\nHTTP_CODE: %{http_code}\nEFFECTIVE_URL: %{url_effective}\n" \
-H "User-Agent: $RandomUA" \
-H "Cookie: filehosting=$cookie_filehosting" \
-H "Host: www.dosyaupload.com" \
-H 'Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8' \
-H 'Accept-Language: en-US,en;q=0.5' \
-H 'Accept-Encoding: gzip, deflate, br' \
-H 'Connection: keep-alive' -H 'Upgrade-Insecure-Requests: 1' \
-H 'Sec-Fetch-Dest: document' -H 'Sec-Fetch-Mode: navigate' -H 'Sec-Fetch-Site: none' -H 'Sec-Fetch-User: ?1' \
-b "${dosya_cookie_jar}" -c "${dosya_cookie_jar}" "$dos_url")
fi
if [ "${DebugAllEnabled}" == "true" ] ; then
debugHtml "${remote_url##*/}" "dos_head_""$i"_"$j" "dos_url: ${dos_url}"$'\n'"User-Agent: $RandomUA"$'\n\n'"${file_header}"
fi
if [ ! -z "$file_header" ] ; then
bIsLocation=false
bIsContentLength=false
if grep -Eqi 'download_token=' <<< $dos_url ; then
bIsLocation=true
if grep -Eqi 'HTTP_CODE: 000' <<< "$file_header" ; then
if ((j >= 6)) && ((i >= $maxI)) ; then
rm -f "${dosya_cookie_jar}";
printf "${YELLOW}${NC}"
if ((j > 1)) ; then
printf "\\n"
fi
echo -e "${RED}| The cdn url returned no response or disconnected client. Try again later.${NC}"
if [ "${finalAttempt}" == "true" ] ; then
failedRetryDownload "${remote_url}" "The cdn url returned no response or disconnected client. Try again later." ""
fi
return 1
elif ((j >= 6)) ; then
rm -f "${dosya_cookie_jar}";
printf "${YELLOW}${NC}"
if ((j > 1)) ; then
printf "\\n"
fi
echo -e "${YELLOW}| The cdn url returned no response or disconnected client. Try again later.${NC}"
break
else
continue
fi
fi
else
if grep -Eqi 'HTTP_CODE: 302' <<< "$file_header" ; then
if grep -Eqi 'download_token=' <<< "$file_header" ; then
bIsLocation=true
printf "L \n"
j=1
dosya_token_url=$(grep -oP 'EFFECTIVE_URL: \K.*$' <<< "${file_header}")
dosya_token_url=${dosya_token_url//[$'\t\r\n']}
download_url=${dosya_token_url}
fi
elif grep -Eqi 'location:' <<< "${file_header}" ; then
bIsLocation=true
printf "L \n"
j=1
dosya_token_url=$(grep -oPi '(?<=location: ).*?(?=$)' <<< "$file_header")
dosya_token_url=${dosya_token_url//[$'\t\r\n']}
download_url=${dosya_token_url}
fi
fi
if grep -Eqi 'filename="' <<< "${file_header}" ; then
dosya_fname=$(grep -oPi '(?<=filename=").*?(?=")' <<< "$file_header")
dosya_fname=${dosya_fname//[$'\t\r\n']}
else
dosya_fname=${dosya_token_url%%\?*}
dosya_fname=${dosya_fname##*/}
fi
if grep -Eqi 'Content-Length' <<< "${file_header}" ; then
bIsContentLength=true
file_size_bytes=$(grep -oPi '(?<=content-length: ).*?$' <<< "$file_header")
file_size_bytes=${file_size_bytes//[$'\t\r\n']}
fi
if ! $bIsLocation ; then
if ((j >= $maxJ)) && ((i >= $maxI)) ; then
rm -f "${dosya_cookie_jar}";
if ((j > 1)) ; then
printf "\\n"
fi
echo -e "${RED}| Failed to find pt url${NC}"
if [ "${finalAttempt}" == "true" ] ; then
failedRetryDownload "${remote_url}" "" ""
fi
return 1
elif ((j >= $maxJ)) ; then
rm -f ${dosya_cookie_jar};
if ((j > 1)) ; then
printf "\\n"
fi
break
else
continue
fi
elif ! $bIsContentLength ; then
if ((j >= $maxJ)) && ((i >= $maxI)) ; then
rm -f "${dosya_cookie_jar}";
if ((j > 1)) ; then
printf "\\n"
fi
echo -e "${RED}| Failed to extract file size${NC}"
if [ "${finalAttempt}" == "true" ] ; then
failedRetryDownload "${remote_url}" "" ""
fi
return 1
elif ((j >= $maxJ)) ; then
rm -f ${dosya_cookie_jar};
if ((j > 1)) ; then
printf "\\n"
fi
break
else
dos_url=$download_url
continue
fi
else
if [ ! -z $dosya_token_url ]; then
rm -f ${dosya_cookie_jar};
if ((j > 1)) ; then
printf "\\n"
fi
echo -e "${GREEN}|Dosya url found${NC}"
break
else
if ((j >= $maxJ)) && ((i >= $maxI)) ; then
rm -f "${dosya_cookie_jar}";
if ((j > 1)) ; then
printf "\\n"
fi
echo -e "${RED}|Dosya url not found${NC}"
if [ "${finalAttempt}" == "true" ] ; then
failedRetryDownload "${remote_url}" "" ""
fi
return 1
elif ((j >= $maxJ)) ; then
rm -f ${dosya_cookie_jar};
if ((j > 1)) ; then
printf "\\n"
fi
break
else
continue
fi
fi
fi
else
if ((j >= $maxJ)) && ((i >= $maxI)) ; then
rm -f "${dosya_cookie_jar}";
if ((j > 1)) ; then
printf "\\n"
fi
echo -e "${RED}| No response. Try again later."
if [ "${finalAttempt}" == "true" ] ; then
failedRetryDownload "${remote_url}" "" ""
fi
return 1
elif ((j >= $maxJ)) ; then
rm -f "${dosya_cookie_jar}";
if ((j > 1)) ; then
printf "\\n"
fi
break
else
continue
fi
fi
done #head
if [ ! -z $dosya_token_url ]; then
rm -f "${dosya_cookie_jar}";
break
fi
done #fetch
touch ${WorkDir}/.flocks/${remote_url//[^a-zA-Z0-9]/}
if [ ! "$filename_override" == "" ] ; then
filename="$filename_override"
else
filename=$dosya_fname
fi
filename=$(sanitize_file_or_folder_name "${filename}")
if [ -z $file_size_bytes ] ; then
if [ "${finalAttempt}" == "true" ] ; then
failedRetryDownload "${remote_url}" "Filesize not found!" ""
fi
echo -e "${YELLOW}| Filesize not found… retry${NC}"
return 1
else
file_size_readable="$(numfmt --to=iec --from=auto --format "%.2f" <<< "$file_size_bytes")"
fi
echo -e "${YELLOW}| File size:${NC}\t${file_size_readable}"
file_path="${download_inflight_path}${filename}"
echo -e "${YELLOW}| File name:${NC}\t\"${filename}\""
flockDownload="${WorkDir}/.flocks/${filename//[^a-zA-Z0-9\.\_\-]/}.flock"
if CheckFileSize "${remote_url}" "${file_size_bytes}" ; then
return 1
fi
if CheckDownloadExists "$remote_url" "$MoveToFolder" "$filecnt" "$filename" "$file_path" "$completed_location" ; then
return 1
fi
echo "${remote_url//[^a-zA-Z0-9]/}" > $flockDownload
}
#!
#! ----------- (3) Fetch File / Download File Function --------------- #
#!
dosya_GetFile() {
echo -e "${GREEN}# Downloading… (may take up to 60 seconds for server to begin sending)"
echo -e "${YELLOW}| File path:${NC}\t./.inflight/${filename}\n"
fileCnt=$1
retryCnt=$2
finalAttempt=$3
flockDownload="${WorkDir}/.flocks/${filename//[^a-zA-Z0-9\.\_\-]/}.flock"
if [ "${DebugAllEnabled}" == "true" ] ; then
debugHtml "${remote_url##*/}" "dos_down_UA" "User-Agent: ${RandomUA}"
fi
for ((j=1; j<=$MaxDownloadRetries; j++)); do
pd_presize=0
if [ -f "$file_path" ] ; then
pd_presize=$(stat --format="%s" "$file_path" | tr -d '[:space:]')
fi
GetRandomUA
CLEANSTRING=${remote_url//[^a-zA-Z0-9]/}
trap "rm -f ${WorkDir}/.flocks/${CLEANSTRING}; rm -f $flockDownload; echo ""; tput cnorm; exit" 0 1 2 3 6 15
if [ "${RateMonitorEnabled}" == "true" ]; then
tor_curl_request -L -G --insecure \
-H "Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/jxl,image/webp,*/*;q=0.8" \
-H "Accept-Language: en-US,en;q=0.5" \
-H "Accept-Encoding: gzip, deflate, br" \
-H "Connection: keep-alive" \
-H "Upgrade-Insecure-Requests: 1" \
-H "Sec-Fetch-Dest: document" \
-H "Sec-Fetch-Mode: navigate" \
-H "Sec-Fetch-Site: none" \
-H "Sec-Fetch-User: ?1" \
-H "DNT: 1" \
-H "Sec-GPC: 1" \
-b "${dosya_cookie_jar}" --speed-limit $DownloadSpeedMin --speed-time $DownloadTimeoutInterval \
"$download_url" --continue-at - --output "$file_path"
else
tor_curl_request -L -G --insecure \
-H "Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/jxl,image/webp,*/*;q=0.8" \
-H "Accept-Language: en-US,en;q=0.5" \
-H "Accept-Encoding: gzip, deflate, br" \
-H "Connection: keep-alive" \
-H "Upgrade-Insecure-Requests: 1" \
-H "Sec-Fetch-Dest: document" \
-H "Sec-Fetch-Mode: navigate" \
-H "Sec-Fetch-Site: none" \
-H "Sec-Fetch-User: ?1" \
-H "DNT: 1" \
-H "Sec-GPC: 1" \
-b "${dosya_cookie_jar}" \
"$download_url" --continue-at - --output "$file_path"
fi
received_file_size=0
if [ -f "$file_path" ] ; then
received_file_size=$(stat --format="%s" "$file_path" | tr -d '[:space:]')
fi
if CheckNoHtml "$remote_url" "$filename" "$file_path" ; then
containsHtml=false
else
containsHtml=true
fi
downDelta=$(( received_file_size - pd_presize ))
if [[ "${received_file_size}" -ne "${file_size_bytes}" ]] || [ "$containsHtml" == "true" ]; then
if [ "${AutoRepairBadPartials}" == "true" ] && (( downDelta > 0 && downDelta < 1024 )) ; then
if [ -f "${file_path}" ] ; then
if ((pd_presize > 0)); then
echo -e "${YELLOW}Bad node / HTML found:${NC} reverting to previous file..."
truncateDownload "$remote_url" "$filename" "$pd_presize" "$received_file_size"
truncate -s $pd_presize "${file_path}"
else
echo -e "${YELLOW}Bad node / HTML found:${NC} tainted partial removed..."
rm -f "${file_path}"
fi
fi
if ((j >= $MaxDownloadRetries)) ; then
rm -f "${dosya_cookie_jar}";
rm -f "$flockDownload";
if [ "${finalAttempt}" == "true" ] ; then
droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}"
fi
return 1
else
continue
fi
elif [ "${AutoRepairBadPartials}" == "true" ] && [ "$containsHtml" == "true" ] ; then
if [ -f "${file_path}" ] ; then
if ((pd_presize > 0)); then
echo -e "${YELLOW}Bad node / HTML found:${NC} reverting to previous file..."
truncateDownload "$remote_url" "$filename" "$pd_presize" "$received_file_size"
truncate -s $pd_presize "${file_path}"
else
echo -e "${YELLOW}Bad node / HTML found:${NC} tainted partial removed..."
rm -f "${file_path}"
fi
fi
if ((j >= $MaxDownloadRetries)) ; then
rm -f "${dosya_cookie_jar}";
rm -f "$flockDownload";
if [ "${finalAttempt}" == "true" ] ; then
droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}"
fi
return 1
else
continue
fi
elif (( downDelta > 0 && downDelta < 1024 )) || [ "$containsHtml" == "true" ] ; then
if [ -f "$file_path" ] ; then
rm -rf "$file_path"
fi
echo -e "\n${YELLOW}Bad node / HTML found:${NC} tainted partial removed..."
if ((j >= $MaxDownloadRetries)) ; then
rm -f "${dosya_cookie_jar}";
rm -f "$flockDownload";
if [ "${finalAttempt}" == "true" ] ; then
droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}"
fi
return 1
else
continue
fi
fi
if [[ "${received_file_size}" -ne "${file_size_bytes}" ]]; then
echo -e "\n${RED}Download failed, file is incomplete.${NC}"
if ((j >= $MaxDownloadRetries)) ; then
rm -f "${dosya_cookie_jar}";
rm -f "$flockDownload";
if [ "${finalAttempt}" == "true" ] ; then
droppedSizeBadDownload "${remote_url}" "${filename}" "${received_file_size}"
fi
return 1
else
continue
fi
fi
else
break
fi
done
rm -f "$flockDownload";
rm -f "${dosya_cookie_jar}";
ProcessCompletedDownload "$remote_url" "$MoveToFolder" "$filecnt" "$filename" "$file_size_bytes" "$completed_location" "$file_path"
return 0
}
#!
#! --------------- Host Extra Functions ------------------- #
#!