mad/hosts/up_axfc.sh
kittykat d62376f7a8
# 2025.02.18 - [uploadhive] Add handling of the new /cgi-bin/dl.cgi/ url tickets (WIP)
#               (unfortunately, this is tied to the requesting ip, so downloads get "Wrong IP")
# 2025.02.18 - [up_oshi] Add Manage url as comment on uploads
# 2025.02.18 - [up_oshi / oshi] use /nossl/ url and http
# 2025.02.17 - [gofile] Add a random sleep if 429 response detected (too many requests)
# 2025.02.17 - [*ALL] Audit and update all single bracket operations
# 2025.02.17 - [filehaus] Fix downloading from fh
# 2025.02.15 - [uploadbay] Update urls regex for acceptable alternate
# 2025.02.15 - [up_sendnow] Add send.now as upload host
# 2025.02.15 - [sendnow] Fix handling of filenames with special characters in url
2025-02-19 13:41:07 +00:00

232 lines
10 KiB
Bash
Raw Permalink Blame History

This file contains ambiguous Unicode characters

This file contains Unicode characters that might be confused with other characters. If you think that this is intentional, you can safely ignore this warning. Use the Escape button to reveal them.

#! Name: up_axfc.sh
#! Author: kittykat
#! Version: 2025.01.16
#! Desc: Add support for uploading files to a new host
#! Info: Files are accessible at https://www.axfc.net/<hash>
#! MaxSize: 2GB
#! Expire: ?
#! Usage: Copy this file into the ./${WorkDir}/hosts_upload/ folder
#!
#!
#! ------------ REQUIRED SECTION ---------------
#! @[UPDATE] ListUploadHosts: This string is loaded into mad.sh and allows dynamic handling of new url data
#! Format: '/HostCode/HostNick/HostFuncPrefix@'
#! HostCode: <aUniqueCodeForHost> (ie. 'fh' for filehaus -- cannot be used by other hosts)
#! HostNick: What is displayed throughout MAD output
#! HostFuncPrefix: <aUniqueStringThatMustPrefixHostFunctions> ie. 'fh' -- fh_UploadFile()
#! * Note: Must begin with a letter a-z (functions beginning with numbers are no bueno)
HostCode='axfc'
HostNick='axfc.net'
HostFuncPrefix='axfc'
#!
#! !! DO NOT UPDATE OR REMOVE !!
#! This merges the Required HostAndDomainRegexes into mad.sh
ListUploadHosts=${ListUploadHosts}'/'${HostCode}'/'${HostNick}'/'${HostFuncPrefix}'@'
#!
#!
#! Configurables
#! -------------
#!
#! ------------ (1) Host Main Upload Function --------------- #
#!
#! @REQUIRED: Host Main Upload function
#! Must be named specifically as such:
#! <HostFuncPrefix>_UploadFile()
axfc_UploadFile() {
local _hostCode=${1}
local filepath=${2}
local filecnt=${3}
local pline=${4}
local filename="${filepath##*/}"
warnAndRetryUnknownError=false
exitUploadError=false
exitUploadNotAvailable=false
fileAlreadyDone=false
tor_identity="${RANDOM}"
UploadTicket="${WorkDir}/.flocks/upload_${_hostCode}_${filepath//[^a-zA-Z0-9]/}"
MaxUploadSizeInBytes=2147483648
fsize=$(GetFileSize "$filepath" "false")
if ((fsize > MaxUploadSizeInBytes)); then
rm -f "${UploadTicket}"
echo -e "${YELLOW}| SKIP${NC}: The size of $filename is to large for $_hostCode. ($fsize > $MaxUploadSizeInBytes)"
failedUpload "${filepath}" "${_hostCode}" "Skipping upload. The size of $filename is to large for $_hostCode. ($fsize > $MaxUploadSizeInBytes)"
return 1
fi
finalAttempt="false"
for ((z=0; z<=$MaxUploadRetries; z++)); do
if [[ $z -eq $MaxUploadRetries ]] ; then
finalAttempt="true"
fi
trap "rm -f "${UploadTicket}"; echo ""; tput cnorm; exit" 0 1 2 3 6 15
if axfc_PostFile "${filepath}" "${_hostCode}" "${filename}" "${filecnt}" $((z+1)) $finalAttempt $pline ; then
return 0
elif [[ $z -lt $MaxUploadRetries ]]; then
if [[ "${fileAlreadyDone}" == "true" ]] ; then
rm -f "${UploadTicket}"
break
fi
if [[ "${warnAndRetryUnknownError}" == "true" ]] ; then
if [[ "${DebugAllEnabled}" == "true" ]] ; then
debugHtml "${filepath##*/}" "error" "Retry due to an unknown issue: attempt #$((z+1)) of ${MaxUploadRetries}"
fi
fi
if [[ "${exitUploadError}" == "true" || "${exitUploadNotAvailable}" == "true" ]] ; then
if [[ "${DebugAllEnabled}" == "true" ]] ; then
debugHtml "${filepath##*/}" "error" "Exit due to unrecoverable issue"
fi
rm -f "${UploadTicket}"
break
fi
echo -e "\n${YELLOW}A recoverable error occurred, retry attempt $((z+1))/${MaxUploadRetries}${NC}"
sleep 3
fi
done
rm -f "${UploadTicket}"
}
#!
#! ----------- (2) Post File / Upload File Function --------------- #
#!
axfc_PostFile() {
local filepath=$1
local _hostCode=$2
local filename=$3
local fileCnt=$4
local retryCnt=$5
local finalAttempt=$6
local pline=${7}
UploadTicket="${WorkDir}/.flocks/upload_${_hostCode}_${filepath//[^a-zA-Z0-9]/}"
echo -e "[${YELLOW}${_hostCode}${NC}] Uploading ${GREEN}${filename}${NC}"
tor_identity="${RANDOM}"
fixed_url='https://www.axfc.net/u/post_m.pl'
maxfetchretries=3
echo -e "${GREEN}# Fetching upload post…${NC}"
for ((i=1; i<=$maxfetchretries; i++)); do
mkdir -p "${WorkDir}/.temp"
axfc_cookie_jar=$(mktemp "${WorkDir}/.temp/axfc_cookies""${instance_no}"".XXXXXX")
printf " ."
tor_identity="${RANDOM}"
trap "rm -f ${UploadTicket}; rm -f ${axfc_cookie_jar}; echo ""; tput cnorm; exit" 0 1 2 3 6 15
response=$(tor_curl_request --insecure -L -s -b "${axfc_cookie_jar}" -c "${axfc_cookie_jar}" "$fixed_url")
if [[ "${DebugAllEnabled}" == "true" ]] ; then
debugHtml "${filepath##*/}" "axfc_fetch$i" "${response}"
fi
if [[ -z $response ]] ; then
rm -f "${axfc_cookie_jar}";
if [[ $i == $maxfetchretries ]] ; then
printf "\\n"
echo -e "${RED}| Failed to start an upload [1]${NC}"
warnAndRetryUnknownError=true
if [[ "${finalAttempt}" == "true" ]] ; then
failedRetryUpload "$pline" "${filepath}" "${_hostCode}" "Failed to start an upload [1]" ""
fi
return 1
else
continue
fi
fi
printf "\\n"
break
done
maxfetchretries=3
echo -e "${GREEN}# Fetching token info and token url…${NC}"
for ((i=1; i<=$maxfetchretries; i++)); do
printf " ."
randelkey=$(( RANDOM % (9999 - 1000 + 1) + 1000 ))
trap "rm -f ${UploadTicket}; rm -f ${axfc_cookie_jar}; echo ""; tput cnorm; exit" 0 1 2 3 6 15
response=$(tor_curl_upload --insecure -L -s -X POST \
-H 'Referer: https://www.axfc.net/u/post_m.pl' \
-H 'Content-Type: application/x-www-form-urlencoded' \
-H 'Origin: https://www.axfc.net' \
-H 'Connection: keep-alive' \
-b "${axfc_cookie_jar}" -c "${axfc_cookie_jar}" \
--data-raw "method=upload&ext=ext&filename=1&comment=&address=&delpass=$randelkey&keyword=1234&count=&term=0&term_y=2024&term_mon=10&term_d=1&term_h=15&term_min=0&term_s=0&term_ps=&term_mp=3600" \
-w "\n\nHTTP_CODE: %{http_code}\nEFFECTIVE_URL: %{url_effective}\n" \
"$fixed_url")
if [[ "${DebugAllEnabled}" == "true" ]] ; then
debugHtml "${filepath##*/}" "axfc_ticket$i" "${response}"
fi
if [[ -z $response ]] ; then
rm -f "${axfc_cookie_jar}";
if [[ $i == $maxfetchretries ]] ; then
printf "\\n"
echo -e "${RED}| Failed to extract token link [1].${NC}"
warnAndRetryUnknownError=true
if [[ "${finalAttempt}" == "true" ]] ; then
failedUpload "$pline" "${filepath}" "${_hostCode}" "Failed to extract token link [1]" ""
fi
return 1
else
continue
fi
fi
if grep -Eqi 'ACTION\="https\://' <<< "$response"; then
PostUrlHost=$(grep -oPi -m 1 '(?<=ACTION\=").*?(?=">.*$)' <<< "$response")
break
else
rm -f "${axfc_cookie_jar}";
if [[ $i == $maxfetchretries ]] ; then
printf "\\n"
echo -e "${RED}| Ticket url not found [1].${NC}"
warnAndRetryUnknownError=true
if [[ "${finalAttempt}" == "true" ]] ; then
failedRetryUpload "$pline" "${filepath}" "${_hostCode}" "Ticket url not found [1]" ""
fi
return 1
else
continue
fi
fi
done
arrFiles=("$filepath")
printf "\\n"
echo -e "${GREEN}# Posting file…${NC}"
trap "rm -f ${UploadTicket}; rm -f ${axfc_cookie_jar}; echo ""; tput cnorm; exit" 0 1 2 3 6 15
response=$(tor_curl_upload --insecure -L -i -X POST \
-H "Content-Type: multipart/form-data" \
-H 'Connection: keep-alive' \
-F "filedata=@$filepath" \
-b "${axfc_cookie_jar}" -c "${axfc_cookie_jar}" \
"$PostUrlHost")
if [[ "${DebugAllEnabled}" == "true" ]] ; then
debugHtml "${filepath##*/}" "${_hostCode}_upload" "post_url: ${PostUrlHost}"$'\n'"${response}"
fi
response_ascii=$(mconvert_utf8_to_ascii "$response")
if [[ "${DebugAllEnabled}" == "true" ]] ; then
debugHtml "${filepath##*/}" "${_hostCode}_upload_ascii" "post_url: ${PostUrlHost}"$'\n'"${response_ascii}"
fi
if grep -Eqi -m 1 'a href="https://www\.axfc\.net\/u\/.*\?key=1234"' <<< "${response_ascii}" ; then
url=$(grep -oPi -m 1 '(?<=a href="https://www\.axfc\.net\/u\/).*?(?=".*$)' <<< "$response_ascii")
key=$(grep -oPi -m 1 '(?<=\?key=).*?(?=".*$)' <<< "$response_ascii")
filesize=$(GetFileSize "$filepath" "false")
downloadLink="https://www.axfc.net/u/${url%%$'\n'*}?key=${key%%$'\n'*}"
echo -e "${GREEN}| Upload Success${NC}"
echo -e "| Size: ${BLUE}${filesize}${NC} bytes${NC}"
echo -e "| Link [1]: ${YELLOW}${downloadLink}${NC}"
successUpload "$pline" "${filepath}" "${_hostCode}" "${filesize}" "${downloadLink}" "{$response}"
return 0
elif grep -Eqi 'Axfc Uploader -投稿完了.*キーワード付きURL.*a href="https://www.axfc.net.*(QueryString無しVer)' <<< "${response}" ; then
subSearch=$(awk '/Axfc Uploader -投稿完了/,/(QueryString無しVer)/' <<< "$response")
url=$(grep -oPi -m 1 '(?<=キーワード付きURL\</td\>\</tr\>\<tr\>\<td\>\<a href\=").*?(?=" target="_blank">ファイルIDのみ.*$)' <<< "$subSearch")
filesize=$(GetFileSize "$filepath" "false")
downloadLink="${url%%$'\n'*}"
echo -e "${GREEN}| Upload Success${NC}"
echo -e "| Size: ${BLUE}${filesize}${NC} bytes${NC}"
echo -e "| Link [2]: ${YELLOW}${downloadLink}${NC}"
successUpload "$pline" "${filepath}" "${_hostCode}" "${filesize}" "${downloadLink}" "{$response}"
return 0
else
err=$(grep -oPi '(?<=HTTP/).*?(?=")' <<< "$response")
if [[ "${finalAttempt}" == "true" ]] ; then
printf "\\n"
echo -e "${RED}| Upload failed. Status: ${err}${NC}"
failedRetryUpload "$pline" "${filepath}" "${_hostCode}" "Failed to upload file" "Status: $err"
exitUploadError=true
return 1
else
return 1
fi
fi
}
#!
#! --------------- Host Extra Functions ------------------- #
#!