making it a requirement to have variables checks and allowing overrides
This commit is contained in:
parent
01422d0bd7
commit
62071280a5
@ -52,6 +52,8 @@ function display_help_and_exit() {
|
|||||||
|
|
||||||
function program_checks() {
|
function program_checks() {
|
||||||
|
|
||||||
|
echo_debug "Completing program checks. program_checks()"
|
||||||
|
|
||||||
if [ ! -d "${processing_dir}" ]
|
if [ ! -d "${processing_dir}" ]
|
||||||
then
|
then
|
||||||
echo_error "The \"${processing_dir}\" is required but is not defined."
|
echo_error "The \"${processing_dir}\" is required but is not defined."
|
||||||
@ -86,12 +88,54 @@ function program_checks() {
|
|||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#################################################
|
||||||
|
# Program Checks
|
||||||
|
|
||||||
|
function check_variable_is_correct() {
|
||||||
|
|
||||||
|
echo_debug "Checking variables ${*}. check_variable_is_correct()"
|
||||||
|
|
||||||
|
for argument in "$@"
|
||||||
|
do
|
||||||
|
case "${argument}" in
|
||||||
|
working_dir)
|
||||||
|
if [[ ! -d "${working_dir}" || -z "${working_dir}" ]]
|
||||||
|
then
|
||||||
|
echo_error "The \"working dir\" variable is missing."
|
||||||
|
fi
|
||||||
|
;;
|
||||||
|
ep_num)
|
||||||
|
if [ -z "${ep_num}" ]
|
||||||
|
then
|
||||||
|
echo_error "The \"ep_num\" variable is missing."
|
||||||
|
fi
|
||||||
|
if [ "$( echo "${ep_num}" | grep --perl-regexp '^(0{0,3}[1-9]\d{0,2}|[1-9]\d{0,3})$' | wc --lines )" -eq "0" ]
|
||||||
|
then
|
||||||
|
echo_error "The \"ep_num\" variable is not a valid number between 1 and 9999."
|
||||||
|
fi
|
||||||
|
;;
|
||||||
|
shownotes_edited)
|
||||||
|
if [ ! -s "${shownotes_edited}" ]
|
||||||
|
then
|
||||||
|
echo_debug "The \"shownotes_edited\" variable is missing."
|
||||||
|
return
|
||||||
|
fi
|
||||||
|
if [ "$( file --brief --mime-type "${shownotes_edited}" | grep --count 'text/html' )" -ne "1" ]
|
||||||
|
then
|
||||||
|
echo_error "The \"shownotes_edited\" variable has not a valid \"text/html\" mime type."
|
||||||
|
fi
|
||||||
|
;;
|
||||||
|
esac
|
||||||
|
done
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
#################################################
|
#################################################
|
||||||
# Get the next show in the queue
|
# Get the next show in the queue
|
||||||
|
|
||||||
function get_working_dir_from_hpr_hub() {
|
function get_working_dir_from_hpr_hub() {
|
||||||
|
|
||||||
echo_debug "Processing the next HPR Show in the queue"
|
echo_debug "Processing the next HPR Show in the queue. get_working_dir_from_hpr_hub()"
|
||||||
|
|
||||||
if [ "$( curl --silent --netrc-file ${HOME}/.netrc --write-out '%{http_code}' https://hub.hackerpublicradio.org/cms/status.php --output "${processing_dir}/status.csv" )" != 200 ]
|
if [ "$( curl --silent --netrc-file ${HOME}/.netrc --write-out '%{http_code}' https://hub.hackerpublicradio.org/cms/status.php --output "${processing_dir}/status.csv" )" != 200 ]
|
||||||
then
|
then
|
||||||
@ -147,7 +191,7 @@ function get_working_dir_from_hpr_hub() {
|
|||||||
|
|
||||||
function get_working_dir_from_local_dir() {
|
function get_working_dir_from_local_dir() {
|
||||||
|
|
||||||
echo_debug "Processing a local directory"
|
echo_debug "Processing a local directory. get_working_dir_from_local_dir()"
|
||||||
|
|
||||||
if [[ ! -d "${working_dir}" || -z "${working_dir}" ]]
|
if [[ ! -d "${working_dir}" || -z "${working_dir}" ]]
|
||||||
then
|
then
|
||||||
@ -197,7 +241,7 @@ function get_working_dir_from_local_dir() {
|
|||||||
|
|
||||||
function get_working_dir() {
|
function get_working_dir() {
|
||||||
|
|
||||||
echo_debug "Getting working directory and populating show information. $#"
|
echo_debug "Getting working directory and populating show information. get_working_dir()"
|
||||||
|
|
||||||
if [ $# -eq 0 ]
|
if [ $# -eq 0 ]
|
||||||
then
|
then
|
||||||
@ -229,7 +273,7 @@ function get_working_dir() {
|
|||||||
|
|
||||||
function get_episode_metadata() {
|
function get_episode_metadata() {
|
||||||
|
|
||||||
echo_debug "Extracting the episode metadata"
|
echo_debug "Extracting the episode metadata. get_episode_metadata()"
|
||||||
|
|
||||||
if [[ -s "${working_dir}/shownotes.json" && "$( file --brief --mime-type "${working_dir}/shownotes.json" | grep --count "application/json" )" -eq 0 ]]
|
if [[ -s "${working_dir}/shownotes.json" && "$( file --brief --mime-type "${working_dir}/shownotes.json" | grep --count "application/json" )" -eq 0 ]]
|
||||||
then
|
then
|
||||||
@ -238,6 +282,7 @@ function get_episode_metadata() {
|
|||||||
|
|
||||||
shownotes_json="${working_dir}/shownotes.json"
|
shownotes_json="${working_dir}/shownotes.json"
|
||||||
shownotes_html="${working_dir}/shownotes.html"
|
shownotes_html="${working_dir}/shownotes.html"
|
||||||
|
shownotes_edited="${working_dir}/shownotes_edited.html"
|
||||||
|
|
||||||
hostid="$( jq --raw-output '.host.Host_ID' ${shownotes_json} )"
|
hostid="$( jq --raw-output '.host.Host_ID' ${shownotes_json} )"
|
||||||
host_name="$( jq --raw-output '.host.Host_Name' ${shownotes_json} )"
|
host_name="$( jq --raw-output '.host.Host_Name' ${shownotes_json} )"
|
||||||
@ -315,11 +360,11 @@ function get_episode_metadata() {
|
|||||||
|
|
||||||
function extract_images_brute_force() {
|
function extract_images_brute_force() {
|
||||||
|
|
||||||
echo_debug "Extracting images with grep."
|
echo_debug "Extracting images with grep. extract_images_brute_force()"
|
||||||
|
|
||||||
if [ -s "${shownotes_html%.*}_edited.html" ]
|
if [ -s "${shownotes_edited}" ]
|
||||||
then
|
then
|
||||||
echo_debug "There is already an edited version of the shownotes at \"${shownotes_html%.*}_edited.html\", slipping image extraction."
|
echo_debug "There is already an edited version of the shownotes at \"${shownotes_edited}\", slipping image extraction."
|
||||||
return
|
return
|
||||||
fi
|
fi
|
||||||
|
|
||||||
@ -461,6 +506,8 @@ function extract_images_brute_force() {
|
|||||||
|
|
||||||
function media_checks() {
|
function media_checks() {
|
||||||
|
|
||||||
|
echo_debug "Running media checks. media_checks()"
|
||||||
|
|
||||||
if [[ -n "${remote_media}" && "${remote_media}" != "null" ]]
|
if [[ -n "${remote_media}" && "${remote_media}" != "null" ]]
|
||||||
then
|
then
|
||||||
echo_debug "Fetching remote media from \"${remote_media}\""
|
echo_debug "Fetching remote media from \"${remote_media}\""
|
||||||
@ -484,7 +531,7 @@ function media_checks() {
|
|||||||
echo_error "Could not create the media_basename \"${media_basename}/\""
|
echo_error "Could not create the media_basename \"${media_basename}/\""
|
||||||
fi
|
fi
|
||||||
|
|
||||||
if [ "$( echo "${media}" | wc -l )" -ne 1 ]
|
if [ "$( echo "${media}" | wc --lines )" -ne 1 ]
|
||||||
then
|
then
|
||||||
echo "Multiple files found. Which one do you want to use ?"
|
echo "Multiple files found. Which one do you want to use ?"
|
||||||
select this_media in $( echo "${media}" )
|
select this_media in $( echo "${media}" )
|
||||||
@ -558,7 +605,7 @@ function media_checks() {
|
|||||||
|
|
||||||
function generate_initial_report() {
|
function generate_initial_report() {
|
||||||
|
|
||||||
echo_debug "Generating the initial report."
|
echo_debug "Generating the initial report. generate_initial_report()"
|
||||||
|
|
||||||
# TODO list the images.
|
# TODO list the images.
|
||||||
|
|
||||||
@ -672,7 +719,7 @@ $(cat "${shownotes_srt}" )
|
|||||||
|
|
||||||
function manual_shownotes_review() {
|
function manual_shownotes_review() {
|
||||||
|
|
||||||
echo_debug "Validating the initial report."
|
echo_debug "Validating the initial report. manual_shownotes_review()"
|
||||||
|
|
||||||
if [[ -z "${shownotes_html}" || ! -s "${shownotes_html}" || ! -s "${working_dir}/processing/${media_basename%.*}_media_report.html" ]]
|
if [[ -z "${shownotes_html}" || ! -s "${shownotes_html}" || ! -s "${working_dir}/processing/${media_basename%.*}_media_report.html" ]]
|
||||||
then
|
then
|
||||||
@ -681,23 +728,23 @@ function manual_shownotes_review() {
|
|||||||
echo_error "The files needed for to generate the inital report information are not available."
|
echo_error "The files needed for to generate the inital report information are not available."
|
||||||
fi
|
fi
|
||||||
|
|
||||||
if [ -s "${shownotes_html%.*}_edited.html" ]
|
if [ -s "${shownotes_edited}" ]
|
||||||
then
|
then
|
||||||
echo_debug "There is already an edited version of the shownotes at \"${shownotes_html%.*}_edited.html\"."
|
echo_debug "There is already an edited version of the shownotes at \"${shownotes_edited}\"."
|
||||||
return
|
return
|
||||||
fi
|
fi
|
||||||
|
|
||||||
cp -v "${shownotes_html}" "${shownotes_html%.*}_edited.html"
|
cp -v "${shownotes_html}" "${shownotes_edited}"
|
||||||
|
|
||||||
if [ ! -s "${shownotes_html%.*}_edited.html" ]
|
if [ ! -s "${shownotes_edited}" ]
|
||||||
then
|
then
|
||||||
echo_error "The edited shownotes are missing \"${shownotes_html%.*}_edited.html\"."
|
echo_error "The edited shownotes are missing \"${shownotes_edited}\"."
|
||||||
fi
|
fi
|
||||||
|
|
||||||
kate "${shownotes_html%.*}_edited.html" >/dev/null 2>&1 &
|
kate "${shownotes_edited}" >/dev/null 2>&1 &
|
||||||
librewolf "${working_dir}/processing/${media_basename%.*}_media_report.html" >/dev/null 2>&1 &
|
librewolf "${working_dir}/processing/${media_basename%.*}_media_report.html" >/dev/null 2>&1 &
|
||||||
seamonkey "${shownotes_html%.*}_edited.html" >/dev/null 2>&1 &
|
seamonkey "${shownotes_edited}" >/dev/null 2>&1 &
|
||||||
# # # # bluefish "${shownotes_html%.*}_edited.html" >/dev/null 2>&1 &
|
# # # # bluefish "${shownotes_edited}" >/dev/null 2>&1 &
|
||||||
# https://markdowntohtml.com/
|
# https://markdowntohtml.com/
|
||||||
|
|
||||||
read -p "Does the metadata 'look ok ? (N|y) ? " -n 1 -r
|
read -p "Does the metadata 'look ok ? (N|y) ? " -n 1 -r
|
||||||
@ -708,14 +755,14 @@ function manual_shownotes_review() {
|
|||||||
fi
|
fi
|
||||||
|
|
||||||
# remove extra wrappers that seamonkey adds
|
# remove extra wrappers that seamonkey adds
|
||||||
grep --invert-match --perl-regexp '<!DOCTYPE|<html|html>|head>|<meta|body>' "${shownotes_html%.*}_edited.html" | sponge "${shownotes_html%.*}_edited.html"
|
grep --invert-match --perl-regexp '<!DOCTYPE|<html|html>|head>|<meta|body>' "${shownotes_edited}" | sponge "${shownotes_edited}"
|
||||||
|
|
||||||
# Check to see if images have been linked TODO make a loop for found images
|
# Check to see if images have been linked TODO make a loop for found images
|
||||||
if [ "$( find "${working_dir}" -type f -iname "*_image_*" | wc -l )" -ne "0" ]
|
if [ "$( find "${working_dir}" -type f -iname "*_image_*" | wc --lines )" -ne "0" ]
|
||||||
then
|
then
|
||||||
if [ "$( grep --count "_image_" "${shownotes_html%.*}_edited.html" )" -eq "0" ]
|
if [ "$( grep --count "_image_" "${shownotes_edited}" )" -eq "0" ]
|
||||||
then
|
then
|
||||||
echo_error "The extracted images were not linked in the shownotes \"${shownotes_html%.*}_edited.html\"."
|
echo_error "The extracted images were not linked in the shownotes \"${shownotes_edited}\"."
|
||||||
fi
|
fi
|
||||||
fi
|
fi
|
||||||
}
|
}
|
||||||
@ -725,17 +772,19 @@ function manual_shownotes_review() {
|
|||||||
|
|
||||||
function post_show_to_hpr_db() {
|
function post_show_to_hpr_db() {
|
||||||
|
|
||||||
|
echo_debug "Posting the show to the HPR DB. post_show_to_hpr_db()"
|
||||||
|
|
||||||
if [ "$( curl --silent --netrc --write-out '%{http_code}' https://hub.hackerpublicradio.org/cms/say.php?id=${ep_num} --output /dev/null )" == 200 ]
|
if [ "$( curl --silent --netrc --write-out '%{http_code}' https://hub.hackerpublicradio.org/cms/say.php?id=${ep_num} --output /dev/null )" == 200 ]
|
||||||
then
|
then
|
||||||
echo "WARN: The Episode hpr${ep_num} has already been posted"
|
echo "WARN: The Episode hpr${ep_num} has already been posted"
|
||||||
return
|
return
|
||||||
fi
|
fi
|
||||||
|
|
||||||
if [ ! -s "${shownotes_html%.*}_edited.html" ]
|
if [ ! -s "${shownotes_edited}" ]
|
||||||
then
|
then
|
||||||
echo_error "Failed to find the extracted shownote html file \"${shownotes_html%.*}_edited.html\""
|
echo_error "Failed to find the extracted shownote html file \"${shownotes_edited}\""
|
||||||
fi
|
fi
|
||||||
notes="$( cat "${shownotes_html%.*}_edited.html" | jq --slurp --raw-input @uri | sed -e 's/%0A"$//g' -e 's/^"//g' )"
|
notes="$( cat "${shownotes_edited}" | jq --slurp --raw-input @uri | sed -e 's/%0A"$//g' -e 's/^"//g' )"
|
||||||
host_profile_encoded="$( echo "${host_profile}" | jq --slurp --raw-input @uri | sed -e 's/%0A"$//g' -e 's/^"//g' )"
|
host_profile_encoded="$( echo "${host_profile}" | jq --slurp --raw-input @uri | sed -e 's/%0A"$//g' -e 's/^"//g' )"
|
||||||
|
|
||||||
|
|
||||||
@ -799,7 +848,9 @@ function post_show_to_hpr_db() {
|
|||||||
#################################################
|
#################################################
|
||||||
# Generate text to speech summary
|
# Generate text to speech summary
|
||||||
|
|
||||||
function create_tts_summary {
|
function create_tts_summary() {
|
||||||
|
|
||||||
|
echo_debug "Creating Text to Speech summary. create_tts_summary()"
|
||||||
|
|
||||||
if [ "$( curl --silent --netrc --write-out '%{http_code}' https://hub.hackerpublicradio.org/cms/say.php?id=${ep_num} --output "${working_dir}/episode_summary.json" )" != 200 ]
|
if [ "$( curl --silent --netrc --write-out '%{http_code}' https://hub.hackerpublicradio.org/cms/say.php?id=${ep_num} --output "${working_dir}/episode_summary.json" )" != 200 ]
|
||||||
then
|
then
|
||||||
@ -838,9 +889,9 @@ function create_tts_summary {
|
|||||||
#################################################
|
#################################################
|
||||||
# Generate Intro
|
# Generate Intro
|
||||||
|
|
||||||
function generate_intro {
|
function generate_intro() {
|
||||||
|
|
||||||
echo_debug "Generating the intro."
|
echo_debug "Generating the intro. generate_intro()"
|
||||||
|
|
||||||
if [[ ! -s "${working_dir}/processing/episode_tts.wav" || ! -s "${theme}" || ! -s "${media}" || ! -s "${outro_flac}" || ! -d "${working_dir}/processing/" ]]
|
if [[ ! -s "${working_dir}/processing/episode_tts.wav" || ! -s "${theme}" || ! -s "${media}" || ! -s "${outro_flac}" || ! -d "${working_dir}/processing/" ]]
|
||||||
then
|
then
|
||||||
@ -868,9 +919,9 @@ function generate_intro {
|
|||||||
#################################################
|
#################################################
|
||||||
# Generate parent audio - the sandwitch
|
# Generate parent audio - the sandwitch
|
||||||
|
|
||||||
function generate_parent_audio {
|
function generate_parent_audio() {
|
||||||
|
|
||||||
echo_debug "Generating the parent audio - the sandwitch."
|
echo_debug "Generating the parent audio - the sandwitch. generate_parent_audio()"
|
||||||
|
|
||||||
if [[ ! -s "${working_dir}/processing/episode_intro.flac" || ! -s "${media}" || ! -s "${outro_flac}" ]]
|
if [[ ! -s "${working_dir}/processing/episode_intro.flac" || ! -s "${media}" || ! -s "${outro_flac}" ]]
|
||||||
then
|
then
|
||||||
@ -892,9 +943,9 @@ function generate_parent_audio {
|
|||||||
#################################################
|
#################################################
|
||||||
# Generate derived media
|
# Generate derived media
|
||||||
|
|
||||||
function generate_derived_media {
|
function generate_derived_media() {
|
||||||
|
|
||||||
echo_debug "Generating derived audio."
|
echo_debug "Generating derived audio. generate_derived_media()"
|
||||||
|
|
||||||
if [[ ! -s "${working_dir}/processing/episode_final.flac" ]]
|
if [[ ! -s "${working_dir}/processing/episode_final.flac" ]]
|
||||||
then
|
then
|
||||||
@ -907,9 +958,9 @@ function generate_derived_media {
|
|||||||
|
|
||||||
# https://wiki.multimedia.cx/index.php?title=FFmpeg_Metadata
|
# https://wiki.multimedia.cx/index.php?title=FFmpeg_Metadata
|
||||||
|
|
||||||
for ext in flac wav mp3 ogg opus
|
for extension in flac wav mp3 ogg opus
|
||||||
do
|
do
|
||||||
echo_debug "Generating \"hpr${ep_num}.${ext}\"."
|
echo_debug "Generating \"hpr${ep_num}.${extension}\"."
|
||||||
ffmpeg -hide_banner -loglevel error -y -i "${working_dir}/processing/episode_final.flac" \
|
ffmpeg -hide_banner -loglevel error -y -i "${working_dir}/processing/episode_final.flac" \
|
||||||
-metadata title="${title}" \
|
-metadata title="${title}" \
|
||||||
-metadata artist="${host_name}" \
|
-metadata artist="${host_name}" \
|
||||||
@ -921,14 +972,14 @@ function generate_derived_media {
|
|||||||
-metadata genre="Podcast" \
|
-metadata genre="Podcast" \
|
||||||
-metadata language="English" \
|
-metadata language="English" \
|
||||||
-metadata copyright="${episode_license}" \
|
-metadata copyright="${episode_license}" \
|
||||||
"${working_dir}/hpr${ep_num}.${ext}"
|
"${working_dir}/hpr${ep_num}.${extension}"
|
||||||
|
|
||||||
fix_tags -album="Hacker Public Radio" -artist="${host_name}" -comment="${episode_comment} The license is ${episode_license}" -genre="Podcast" -title="${title}" -track="${ep_num}" -year="${episode_year}" "${working_dir}/hpr${ep_num}.${ext}"
|
fix_tags -album="Hacker Public Radio" -artist="${host_name}" -comment="${episode_comment} The license is ${episode_license}" -genre="Podcast" -title="${title}" -track="${ep_num}" -year="${episode_year}" "${working_dir}/hpr${ep_num}.${extension}"
|
||||||
|
|
||||||
if [[ ! -s "${working_dir}/hpr${ep_num}.${ext}" ]]
|
if [[ ! -s "${working_dir}/hpr${ep_num}.${extension}" ]]
|
||||||
then
|
then
|
||||||
echo_error "Failed to generate \"${working_dir}/hpr${ep_num}.${ext}\"."
|
echo_error "Failed to generate \"${working_dir}/hpr${ep_num}.${extension}\"."
|
||||||
ls -al "${working_dir}/hpr${ep_num}.${ext}"
|
ls -al "${working_dir}/hpr${ep_num}.${extension}"
|
||||||
fi
|
fi
|
||||||
|
|
||||||
done
|
done
|
||||||
@ -948,7 +999,7 @@ function generate_derived_media {
|
|||||||
|
|
||||||
function generate_show_transcript() {
|
function generate_show_transcript() {
|
||||||
|
|
||||||
echo_debug "Generate show transcript and subtitles."
|
echo_debug "Generate show transcript and subtitles. generate_show_transcript()"
|
||||||
|
|
||||||
# TODO Currently processed elsewhere by hpr-get-and-transcode.bash and uploaded to hpr:upload/ to be synced with media above
|
# TODO Currently processed elsewhere by hpr-get-and-transcode.bash and uploaded to hpr:upload/ to be synced with media above
|
||||||
|
|
||||||
@ -1028,7 +1079,7 @@ function generate_show_transcript() {
|
|||||||
|
|
||||||
cat "${working_dir}/processing/episode.srt" | while read this_line
|
cat "${working_dir}/processing/episode.srt" | while read this_line
|
||||||
do
|
do
|
||||||
if [ "$( echo "${this_line}" | grep -c -P '^[0-9]+$' )" -eq "1" ]
|
if [ "$( echo "${this_line}" | grep -c --perl-regexp '^[0-9]+$' )" -eq "1" ]
|
||||||
then
|
then
|
||||||
echo "${count}"
|
echo "${count}"
|
||||||
count=$((count+1))
|
count=$((count+1))
|
||||||
@ -1054,7 +1105,7 @@ function generate_show_transcript() {
|
|||||||
|
|
||||||
function generate_final_report() {
|
function generate_final_report() {
|
||||||
|
|
||||||
echo_debug "Generating the final report."
|
echo_debug "Generating the final report. generate_final_report()"
|
||||||
|
|
||||||
final_report="${working_dir}/processing/hpr${ep_num}_report.html"
|
final_report="${working_dir}/processing/hpr${ep_num}_report.html"
|
||||||
|
|
||||||
@ -1159,7 +1210,7 @@ $( cat "${working_dir}/processing/hpr${ep_num}_${this_file_extension_to_check}_f
|
|||||||
|
|
||||||
<hr />
|
<hr />
|
||||||
|
|
||||||
$(cat "${shownotes_html%.*}_edited.html" )
|
$(cat "${shownotes_edited}" )
|
||||||
|
|
||||||
<hr />
|
<hr />
|
||||||
|
|
||||||
@ -1190,7 +1241,7 @@ $(cat "${working_dir}/hpr${ep_num}.txt" )
|
|||||||
|
|
||||||
function manual_final_review() {
|
function manual_final_review() {
|
||||||
|
|
||||||
echo_debug "Validating the final report."
|
echo_debug "Validating the final report. manual_final_review()"
|
||||||
|
|
||||||
if [[ -z "${final_report}" || ! -s "${final_report}" ]]
|
if [[ -z "${final_report}" || ! -s "${final_report}" ]]
|
||||||
then
|
then
|
||||||
@ -1199,7 +1250,7 @@ function manual_final_review() {
|
|||||||
fi
|
fi
|
||||||
|
|
||||||
librewolf "${final_report}" >/dev/null 2>&1 &
|
librewolf "${final_report}" >/dev/null 2>&1 &
|
||||||
# # # # bluefish "${shownotes_html%.*}_edited.html" >/dev/null 2>&1 &
|
# # # # bluefish "${shownotes_edited}" >/dev/null 2>&1 &
|
||||||
# https://markdowntohtml.com/
|
# https://markdowntohtml.com/
|
||||||
|
|
||||||
read -p "Does the metadata 'look ok ? (N|y) ? " -n 1 -r
|
read -p "Does the metadata 'look ok ? (N|y) ? " -n 1 -r
|
||||||
@ -1217,7 +1268,7 @@ function manual_final_review() {
|
|||||||
|
|
||||||
function register_assets() {
|
function register_assets() {
|
||||||
|
|
||||||
echo_debug "Registering the assets with the hpr database"
|
echo_debug "Registering the assets with the hpr database. register_assets()"
|
||||||
|
|
||||||
if [[ -s "${working_dir}/hpr${ep_num}_assets.csv" ]]
|
if [[ -s "${working_dir}/hpr${ep_num}_assets.csv" ]]
|
||||||
then
|
then
|
||||||
@ -1289,7 +1340,10 @@ function register_assets() {
|
|||||||
# Register the assets with the hpr database
|
# Register the assets with the hpr database
|
||||||
|
|
||||||
function copy_files_to_origin_server() {
|
function copy_files_to_origin_server() {
|
||||||
echo_debug "Copying the files to the origin server"
|
|
||||||
|
echo_debug "Copying the files to the origin server. copy_files_to_origin_server()"
|
||||||
|
|
||||||
|
check_variable_is_correct working_dir ep_num
|
||||||
|
|
||||||
# TODO get a origin server capable of storing all the files
|
# TODO get a origin server capable of storing all the files
|
||||||
for this_asset in hpr${ep_num}.mp3 hpr${ep_num}.ogg hpr${ep_num}.opus hpr${ep_num}.srt hpr${ep_num}.txt $( find "${working_dir}/" -type f -iname "hpr${ep_num}_image_*.*" )
|
for this_asset in hpr${ep_num}.mp3 hpr${ep_num}.ogg hpr${ep_num}.opus hpr${ep_num}.srt hpr${ep_num}.txt $( find "${working_dir}/" -type f -iname "hpr${ep_num}_image_*.*" )
|
||||||
@ -1305,7 +1359,7 @@ function copy_files_to_origin_server() {
|
|||||||
ls -al "${this_file}"
|
ls -al "${this_file}"
|
||||||
fi
|
fi
|
||||||
|
|
||||||
if [ "$( ssh rsync.net ls hpr/eps/hpr${ep_num}/ 2>/dev/null | wc -l )" -eq "0" ]
|
if [ "$( ssh rsync.net ls hpr/eps/hpr${ep_num}/ 2>/dev/null | wc --lines )" -eq "0" ]
|
||||||
then
|
then
|
||||||
echo_debug "Creating \"hpr/eps/hpr${ep_num}/\" on the origin server."
|
echo_debug "Creating \"hpr/eps/hpr${ep_num}/\" on the origin server."
|
||||||
ssh rsync.net mkdir hpr/eps/hpr${ep_num}/ 2>/dev/null
|
ssh rsync.net mkdir hpr/eps/hpr${ep_num}/ 2>/dev/null
|
||||||
@ -1333,37 +1387,39 @@ function copy_files_to_origin_server() {
|
|||||||
# Send the derived files to the server borg to be sent to borg
|
# Send the derived files to the server borg to be sent to borg
|
||||||
function copy_derived_files_to_borg() {
|
function copy_derived_files_to_borg() {
|
||||||
|
|
||||||
echo_debug "Sending the derived files to the admin server borg"
|
echo_debug "Sending the derived files to the admin server borg. copy_derived_files_to_borg()"
|
||||||
|
|
||||||
for ext in flac mp3 ogg opus wav
|
check_variable_is_correct working_dir ep_num shownotes_html
|
||||||
|
|
||||||
|
for extension in flac mp3 ogg opus wav
|
||||||
do
|
do
|
||||||
if [[ ! -s "${working_dir}/hpr${ep_num}.${ext}" ]]
|
if [[ ! -s "${working_dir}/hpr${ep_num}.${extension}" ]]
|
||||||
then
|
then
|
||||||
echo_error "The derived files to the admin server borg is missing \"hpr${ep_num}.${ext}\"."
|
echo_error "The derived files to the admin server borg is missing \"hpr${ep_num}.${extension}\"."
|
||||||
ls -al "${working_dir}/hpr${ep_num}.${ext}"
|
ls -al "${working_dir}/hpr${ep_num}.${extension}"
|
||||||
fi
|
fi
|
||||||
done
|
done
|
||||||
|
|
||||||
echo_debug "rsync -ave ssh --partial --progress \"${working_dir}/hpr${ep_num}.{flac,mp3,ogg,opus,wav}\" borg:/data/IA/uploads/"
|
echo_debug "rsync -ave ssh --partial --progress \"${working_dir}/hpr${ep_num}.{flac,mp3,ogg,opus,wav}\" hpradmin@borg:/data/IA/uploads/"
|
||||||
rsync -ave ssh --partial --progress "${working_dir}/hpr${ep_num}".{flac,mp3,ogg,opus,wav} borg:/data/IA/uploads/
|
rsync -ave ssh --partial --progress "${working_dir}/hpr${ep_num}".{flac,mp3,ogg,opus,wav} hpradmin@borg:/data/IA/uploads/
|
||||||
rsync_error="${?}"
|
rsync_error="${?}"
|
||||||
if [ "${rsync_error}" -ne "0" ]
|
if [ "${rsync_error}" -ne "0" ]
|
||||||
then
|
then
|
||||||
echo_error "rsync to \"borg:/data/IA/uploads/\" failed with error ${rsync_error}"
|
echo_error "rsync to \"hpradmin@borg:/data/IA/uploads/\" failed with error ${rsync_error}"
|
||||||
fi
|
fi
|
||||||
|
|
||||||
rsync -ave ssh --partial --progress "${working_dir}/hpr${ep_num}".{txt,srt} borg:/data/IA/uploads/hpr${ep_num}/
|
rsync -ave ssh --partial --progress "${working_dir}/hpr${ep_num}".{txt,srt} hpradmin@borg:/data/IA/uploads/hpr${ep_num}/
|
||||||
rsync_error="${?}"
|
rsync_error="${?}"
|
||||||
if [ "${rsync_error}" -ne "0" ]
|
if [ "${rsync_error}" -ne "0" ]
|
||||||
then
|
then
|
||||||
echo_error "rsync to \"borg:/data/IA/uploads/hpr${ep_num}/\" failed with error ${rsync_error}"
|
echo_error "rsync to \"hpradmin@borg:/data/IA/uploads/hpr${ep_num}/\" failed with error ${rsync_error}"
|
||||||
fi
|
fi
|
||||||
|
|
||||||
rsync -ave ssh --partial --progress "${shownotes_html%.*}_edited.html" borg:/data/IA/uploads/hpr${ep_num}/shownotes.html
|
rsync -ave ssh --partial --progress "${shownotes_edited}" hpradmin@borg:/data/IA/uploads/hpr${ep_num}/shownotes.html
|
||||||
rsync_error="${?}"
|
rsync_error="${?}"
|
||||||
if [ "${rsync_error}" -ne "0" ]
|
if [ "${rsync_error}" -ne "0" ]
|
||||||
then
|
then
|
||||||
echo_error "rsync to \"borg:/data/IA/uploads/hpr${ep_num}/shownotes.html\" failed with error ${rsync_error}"
|
echo_error "rsync to \"hpradmin@borg:/data/IA/uploads/hpr${ep_num}/shownotes.html\" failed with error ${rsync_error}"
|
||||||
fi
|
fi
|
||||||
|
|
||||||
# Get the current status
|
# Get the current status
|
||||||
@ -1373,10 +1429,10 @@ function copy_derived_files_to_borg() {
|
|||||||
fi
|
fi
|
||||||
|
|
||||||
# Check the current status is correct SHOW_POSTED
|
# Check the current status is correct SHOW_POSTED
|
||||||
response=$( grep -P ",${ep_num},.*SHOW_POSTED," "${processing_dir}/status.csv" | head -1 | sed 's/,/ /g' )
|
response=$( grep --perl-regexp ",${ep_num},.*SHOW_POSTED," "${processing_dir}/status.csv" | head -1 | sed 's/,/ /g' )
|
||||||
if [ -z "${response}" ]
|
if [ -z "${response}" ]
|
||||||
then
|
then
|
||||||
grep -P ",${ep_num},.*SHOW_POSTED," "${processing_dir}/status.csv"
|
grep --perl-regexp ",${ep_num},.*SHOW_POSTED," "${processing_dir}/status.csv"
|
||||||
echo_debug "The show \"${ep_num}\" hasn't the correct status of \"SHOW_POSTED\" in the database."
|
echo_debug "The show \"${ep_num}\" hasn't the correct status of \"SHOW_POSTED\" in the database."
|
||||||
fi
|
fi
|
||||||
|
|
||||||
@ -1393,10 +1449,10 @@ function copy_derived_files_to_borg() {
|
|||||||
fi
|
fi
|
||||||
|
|
||||||
# Check the current status is correct MEDIA_TRANSCODED
|
# Check the current status is correct MEDIA_TRANSCODED
|
||||||
response=$( grep -P ",${ep_num},.*MEDIA_TRANSCODED," "${processing_dir}/status.csv" | head -1 | sed 's/,/ /g' )
|
response=$( grep --perl-regexp ",${ep_num},.*MEDIA_TRANSCODED," "${processing_dir}/status.csv" | head -1 | sed 's/,/ /g' )
|
||||||
if [ -z "${response}" ]
|
if [ -z "${response}" ]
|
||||||
then
|
then
|
||||||
grep -P ",${ep_num},.*MEDIA_TRANSCODED," "${processing_dir}/status.csv"
|
grep --perl-regexp ",${ep_num},.*MEDIA_TRANSCODED," "${processing_dir}/status.csv"
|
||||||
echo_error "The show \"${ep_num}\" hasn't the correct status of \"MEDIA_TRANSCODED\" in the database."
|
echo_error "The show \"${ep_num}\" hasn't the correct status of \"MEDIA_TRANSCODED\" in the database."
|
||||||
fi
|
fi
|
||||||
|
|
||||||
@ -1412,7 +1468,8 @@ function copy_derived_files_to_borg() {
|
|||||||
# Wait for the Internet Archive to finish processing
|
# Wait for the Internet Archive to finish processing
|
||||||
|
|
||||||
function wait_for_the_internet_archive_to_process() {
|
function wait_for_the_internet_archive_to_process() {
|
||||||
echo_debug "Sending the derived files to Internet Archive"
|
|
||||||
|
echo_debug "Waiting for the Internet Archive to finish processing. wait_for_the_internet_archive_to_process()"
|
||||||
|
|
||||||
if [ -z "${ep_num}x" ]
|
if [ -z "${ep_num}x" ]
|
||||||
then
|
then
|
||||||
@ -1424,9 +1481,9 @@ function wait_for_the_internet_archive_to_process() {
|
|||||||
# Send the derived files to the server borg to be sent to the Internet Archive
|
# Send the derived files to the server borg to be sent to the Internet Archive
|
||||||
|
|
||||||
function upload_to_the_internet_archive() {
|
function upload_to_the_internet_archive() {
|
||||||
echo_debug "Sending the derived files to Internet Archive"
|
echo_debug "Sending the derived files to Internet Archive. upload_to_the_internet_archive()"
|
||||||
|
|
||||||
continue
|
check_variable_is_correct working_dir ep_num shownotes_html
|
||||||
|
|
||||||
# hpr4371.flac
|
# hpr4371.flac
|
||||||
# hpr4371.mp3
|
# hpr4371.mp3
|
||||||
@ -1437,16 +1494,20 @@ function upload_to_the_internet_archive() {
|
|||||||
# hpr4371.txt
|
# hpr4371.txt
|
||||||
# hpr4371.wav
|
# hpr4371.wav
|
||||||
#
|
#
|
||||||
# for ext in flac mp3 ogg opus wav
|
|
||||||
# do
|
find ${working_dir} -mindepth 1 -maxdepth 1 -type f \( -iname "hpr${ep_num}.flac" -or -iname "hpr${ep_num}.mp3" -or -iname "hpr${ep_num}.ogg" -or -iname "hpr${ep_num}.opus" -or -iname "hpr${ep_num}_source.*" -or -iname "hpr${ep_num}.srt" -or -iname "hpr${ep_num}.txt" -or -iname "hpr${ep_num}.wav" \)
|
||||||
# if [[ ! -s "${working_dir}/hpr${ep_num}.${ext}" ]]
|
|
||||||
# then
|
|
||||||
# echo_error "The derived files to the Internet Archive are missing \"hpr${ep_num}.${ext}\"."
|
for extension in flac mp3 ogg opus _source.* srt txt wav
|
||||||
# ls -al "${working_dir}/hpr${ep_num}.${ext}"
|
do
|
||||||
# fi
|
if [[ ! -s "${working_dir}/hpr${ep_num}.${extension}" ]]
|
||||||
# done
|
then
|
||||||
|
echo_error "The derived files to the Internet Archive are missing \"hpr${ep_num}.${extension}\"."
|
||||||
|
ls -al "${working_dir}/hpr${ep_num}.${extension}"
|
||||||
|
fi
|
||||||
|
done
|
||||||
#
|
#
|
||||||
# "${working_dir}/hpr${ep_num}.${ext}"
|
# "${working_dir}/hpr${ep_num}.${extension}"
|
||||||
#
|
#
|
||||||
# ep_num="4371"
|
# ep_num="4371"
|
||||||
# ia upload hpr${ep_num} "${working_dir}/hpr${ep_num}.txt" --metadata=mediatype:audio --metadata="contributor:HackerPublicRadio" --metadata="creator:HPR Volunteers" --metadata="date:2025-05-05" --metadata="description:This show is a placeholder and will be updated soon." --metadata="language:eng" --metadata="licenseurl:http://creativecommons.org/licenses/by-sa/4.0" --metadata="title:A placeholder for hpr${ep_num}." --metadata=reviews-allowed:none --header x-archive-keep-old-version:0 --retries=5 --no-derive --no-backup
|
# ia upload hpr${ep_num} "${working_dir}/hpr${ep_num}.txt" --metadata=mediatype:audio --metadata="contributor:HackerPublicRadio" --metadata="creator:HPR Volunteers" --metadata="date:2025-05-05" --metadata="description:This show is a placeholder and will be updated soon." --metadata="language:eng" --metadata="licenseurl:http://creativecommons.org/licenses/by-sa/4.0" --metadata="title:A placeholder for hpr${ep_num}." --metadata=reviews-allowed:none --header x-archive-keep-old-version:0 --retries=5 --no-derive --no-backup
|
||||||
@ -1460,7 +1521,7 @@ function upload_to_the_internet_archive() {
|
|||||||
# locate placeholder
|
# locate placeholder
|
||||||
|
|
||||||
|
|
||||||
# for extension in .flac .mp3 .ogg .opus _source.ogg .srt .txt .wav;
|
# for extension in .flac .mp3 .ogg .opus _source.ogg .srt .txt .wav
|
||||||
# do
|
# do
|
||||||
# echo ia upload hpr${ep_num} "${working_dir}/hpr${ep_num}${extension}" --header "x-archive-keep-old-version:0" --retries=5 --no-derive --no-backup
|
# echo ia upload hpr${ep_num} "${working_dir}/hpr${ep_num}${extension}" --header "x-archive-keep-old-version:0" --retries=5 --no-derive --no-backup
|
||||||
# done
|
# done
|
||||||
@ -1470,10 +1531,10 @@ function upload_to_the_internet_archive() {
|
|||||||
|
|
||||||
# # # # for ext in flac mp3 ogg opus wav
|
# # # # for ext in flac mp3 ogg opus wav
|
||||||
# # # # do
|
# # # # do
|
||||||
# # # # if [[ ! -s "${working_dir}/hpr${ep_num}.${ext}" ]]
|
# # # # if [[ ! -s "${working_dir}/hpr${ep_num}.${extension}" ]]
|
||||||
# # # # then
|
# # # # then
|
||||||
# # # # echo_error "The derived files to the IA are missing \"hpr${ep_num}.${ext}\"."
|
# # # # echo_error "The derived files to the IA are missing \"hpr${ep_num}.${extension}\"."
|
||||||
# # # # ls -al "${working_dir}/hpr${ep_num}.${ext}"
|
# # # # ls -al "${working_dir}/hpr${ep_num}.${extension}"
|
||||||
# # # # fi
|
# # # # fi
|
||||||
# # # # done
|
# # # # done
|
||||||
# # # #
|
# # # #
|
||||||
@ -1555,22 +1616,23 @@ function upload_to_the_internet_archive() {
|
|||||||
# TODO copy the files to the backup disk
|
# TODO copy the files to the backup disk
|
||||||
|
|
||||||
# Get supplied working dir and ep_num if provided
|
# Get supplied working dir and ep_num if provided
|
||||||
for argument in "$@"
|
if [ $# -gt 0 ]
|
||||||
|
then
|
||||||
|
declare -A hash
|
||||||
|
for argument
|
||||||
do
|
do
|
||||||
|
if [[ $argument =~ ^[^=]+=.*$ ]]
|
||||||
if [ "$( echo "${argument}" | grep --count "working_dir=" )" -eq "1" ]
|
|
||||||
then
|
then
|
||||||
working_dir="$( realpath $( echo "${argument}" | sed 's/^.*working_dir=//g' | awk '{print $1}' ) )/"
|
key="${argument%=*}"
|
||||||
echo_debug "Using supplied 'working_dir' of \"${working_dir}\""
|
value="${argument#*=}"
|
||||||
|
eval "${key}=${value}"
|
||||||
|
echo_debug "Using supplied \"${key}\" of \"${value}\""
|
||||||
|
check_variable_is_correct ${key}
|
||||||
fi
|
fi
|
||||||
|
|
||||||
if [ "$( echo "${argument}" | grep --count "ep_num=" )" -eq "1" ]
|
|
||||||
then
|
|
||||||
ep_num="$( echo "${argument}" | sed 's/^.*ep_num=//g' | awk '{print $1}' )"
|
|
||||||
echo_debug "Using supplied 'ep_num' of \"${ep_num}\""
|
|
||||||
fi
|
|
||||||
|
|
||||||
done
|
done
|
||||||
|
fi
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
# program_checks # We know that all the programs and variables are set
|
# program_checks # We know that all the programs and variables are set
|
||||||
#
|
#
|
||||||
@ -1604,11 +1666,13 @@ done
|
|||||||
#
|
#
|
||||||
# register_assets
|
# register_assets
|
||||||
|
|
||||||
copy_files_to_origin_server
|
# copy_files_to_origin_server
|
||||||
|
|
||||||
|
#check_variable_is_correct working_dir ep_num shownotes_edited
|
||||||
|
|
||||||
# copy_derived_files_to_borg
|
# copy_derived_files_to_borg
|
||||||
|
|
||||||
# upload_to_the_internet_archive
|
upload_to_the_internet_archive
|
||||||
|
|
||||||
#for i in {4301..4305};do echo ${i};/home/ken/sourcecode/personal/bin/hpr-check-ccdn-links.bash ${i};done
|
#for i in {4301..4305};do echo ${i};/home/ken/sourcecode/personal/bin/hpr-check-ccdn-links.bash ${i};done
|
||||||
|
|
||||||
|
Loading…
x
Reference in New Issue
Block a user