Added SQL rss feed, Fixed regex for image, and scp files to hpr server
This commit is contained in:
@@ -18,6 +18,7 @@ last_update_txt="${sql_save_dir}/last_update.txt"
|
|||||||
hpr_full_sql="${sql_save_dir}/hpr_hpr_full.sql"
|
hpr_full_sql="${sql_save_dir}/hpr_hpr_full.sql"
|
||||||
hpr_site_sql="${sql_save_dir}/hpr.sql"
|
hpr_site_sql="${sql_save_dir}/hpr.sql"
|
||||||
full_mysqldump_sql="${sql_save_dir}/mysqldump.sql"
|
full_mysqldump_sql="${sql_save_dir}/mysqldump.sql"
|
||||||
|
hpr_database_file="hackerpublicradio.org/public_html/hpr.sql"
|
||||||
|
|
||||||
sync_delay_seconds="300" # 5 minutes
|
sync_delay_seconds="300" # 5 minutes
|
||||||
|
|
||||||
@@ -143,14 +144,51 @@ fi
|
|||||||
mysqldump --defaults-file="${credential_file}" --tz-utc --add-drop-database --databases hpr_hpr> "${full_mysqldump_sql}"
|
mysqldump --defaults-file="${credential_file}" --tz-utc --add-drop-database --databases hpr_hpr> "${full_mysqldump_sql}"
|
||||||
tail "${full_mysqldump_sql}" | grep 'Dump completed on'
|
tail "${full_mysqldump_sql}" | grep 'Dump completed on'
|
||||||
|
|
||||||
|
echo "<?xml version=\"1.0\" encoding=\"UTF-8\"?>
|
||||||
|
<rss xmlns:atom=\"https://www.w3.org/2005/Atom\" version=\"2.0\">
|
||||||
|
<channel>
|
||||||
|
<title>Hacker Public Radio ~ Database Feed</title>
|
||||||
|
<link>http://hackerpublicradio.org/about.html</link>
|
||||||
|
<description>This Feed provides information the latest version of the HPR database.</description>
|
||||||
|
<language>en-us</language>
|
||||||
|
<copyright>Creative Commons Attribution-ShareAlike 4.0 International (CC BY-SA 4.0) License</copyright>
|
||||||
|
<managingEditor>feedback.nospam@nospam.hackerpublicradio.org (HPR Feedback)</managingEditor>
|
||||||
|
<webMaster>admin.nospam@nospam.hackerpublicradio.org (HPR Webmaster)</webMaster>
|
||||||
|
<generator>https://repo.anhonesthost.net/HPR/hpr-tools/src/branch/main/workflow/hpr_db_backup.bash</generator>
|
||||||
|
<docs>https://www.rssboard.org/rss-specification</docs>
|
||||||
|
<ttl>15</ttl>
|
||||||
|

|
||||||
|
<atom:link href=\"https://hackerpublicradio.org/hpr.sql.rss\" rel=\"self\" type=\"application/rss+xml\"/>
|
||||||
|
<pubDate>$( date --utc --rfc-email )</pubDate>
|
||||||
|
<item>
|
||||||
|
<title>Export of the Public mariadb SQL for ${live_db_last_update_iso8601}</title>
|
||||||
|
<author>admin.nospam@nospam.hackerpublicradio.org (Janitors)</author>
|
||||||
|
<link>http://hackerpublicradio.org/hpr.sql#${live_db_last_update_iso8601}</link>
|
||||||
|
<description/>
|
||||||
|
<pubDate>$( date --utc --rfc-email )</pubDate>
|
||||||
|
<enclosure url=\"http://hackerpublicradio.org/hpr.sql#${live_db_last_update_iso8601}\" length=\"$( ls -al "${hpr_site_sql}" | awk '{print $5}' )\" type=\"application/sql\"/>
|
||||||
|
<guid isPermaLink=\"false\">sha1sum:$( sha1sum "${hpr_site_sql}" | awk '{print $1}' ),md5sum:$( md5sum "${hpr_site_sql}" | awk '{print $1}' )</guid>
|
||||||
|
</item>
|
||||||
|
</channel>
|
||||||
|
</rss>" > "${hpr_site_sql}.rss"
|
||||||
|
|
||||||
if [ $HOSTNAME = "whp01.cloud-hosting.io" ]
|
if [ $HOSTNAME = "whp01.cloud-hosting.io" ]
|
||||||
then
|
then
|
||||||
cp -v "${hpr_site_sql}" $HOME/hackerpublicradio.org/public_html/hpr.sql
|
cp -v "${hpr_site_sql}" "$HOME/${hpr_database_file}"
|
||||||
|
cp -v "${hpr_site_sql}.rss" "$HOME/${hpr_database_file}.rss"
|
||||||
else
|
else
|
||||||
rsync -av --partial --progress ${hpr_site_sql} hpr:hackerpublicradio.org/public_html/hpr.sql
|
rsync -av --partial --progress ${hpr_site_sql} hpr:${hpr_database_file}
|
||||||
|
rsync -av --partial --progress ${hpr_site_sql}.rss hpr:${hpr_database_file}.rss
|
||||||
fi
|
fi
|
||||||
|
|
||||||
echo "${live_db_last_update_iso8601}" > "${last_update_txt}"
|
echo "${live_db_last_update_iso8601}" > "${last_update_txt}"
|
||||||
|
|
||||||
|
echo "Finished export of \"${live_db_last_update_iso8601}\""
|
||||||
|
|
||||||
|
@@ -869,7 +869,7 @@ function extract_images_brute_force() {
|
|||||||
# Download referenced images
|
# Download referenced images
|
||||||
image_count_external="1"
|
image_count_external="1"
|
||||||
|
|
||||||
for image in $( grep --color=never --perl-regexp --only-matching '<img.*src.*http.*>' "${shownotes_html}" | awk -F 'src=' '{print $2}' | awk -F '"' '{print $2}' )
|
for image in $( grep --color=never --perl-regexp --only-matching '<img.*src.*http.*://.*>' "${shownotes_html}" | awk -F 'src=' '{print $2}' | awk -F '"' '{print $2}' )
|
||||||
do
|
do
|
||||||
this_image="${working_dir}/hpr${ep_num}_image_ext_${image_count_external}"
|
this_image="${working_dir}/hpr${ep_num}_image_ext_${image_count_external}"
|
||||||
wget "${image}" --output-document=${this_image}
|
wget "${image}" --output-document=${this_image}
|
||||||
@@ -1967,7 +1967,7 @@ function register_assets() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
#################################################
|
#################################################
|
||||||
# Register the assets with the hpr database
|
# Copy the files to the origin server
|
||||||
|
|
||||||
function copy_files_to_origin_server() {
|
function copy_files_to_origin_server() {
|
||||||
|
|
||||||
@@ -2013,6 +2013,48 @@ function copy_files_to_origin_server() {
|
|||||||
done
|
done
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#################################################
|
||||||
|
# Copy the images to the hpr server
|
||||||
|
|
||||||
|
function copy_files_to_hpr_server() {
|
||||||
|
|
||||||
|
echo_debug "Copying the files to the hpr server. copy_files_to_hpr_server()"
|
||||||
|
|
||||||
|
check_variable_is_correct working_dir ep_num
|
||||||
|
|
||||||
|
# Get the file list from the HPR db
|
||||||
|
if [ -z "${files_json}" ]
|
||||||
|
then
|
||||||
|
files_json="${working_dir}/hpr${ep_num}_files.json"
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [ -s "${files_json}" ]
|
||||||
|
then
|
||||||
|
echo_debug "Removing stale \"files_json\" file \"${files_json}\"."
|
||||||
|
rm -v "${files_json}"
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo_debug "Getting the \"files_json\" file from the HPR site."
|
||||||
|
if [ "$( curl --silent --netrc --write-out '%{http_code}' https://hub.hackerpublicradio.org/cms/assets.php?id=${ep_num} --output "${files_json}" )" != 200 ]
|
||||||
|
then
|
||||||
|
echo_error "The Episode hpr${ep_num} has not been posted"
|
||||||
|
fi
|
||||||
|
check_variable_is_correct files_json
|
||||||
|
|
||||||
|
ssh hpr -t "mkdir -v hackerpublicradio.org/public_html/eps/hpr${ep_num}"
|
||||||
|
|
||||||
|
for this_file in $( jq --raw-output ".hpr${ep_num} | keys | @tsv" "${files_json}" )
|
||||||
|
do
|
||||||
|
skip_file="$( echo "$this_file" | grep --count --perl-regexp "hpr${ep_num}.flac|hpr${ep_num}.mp3|hpr${ep_num}.ogg|hpr${ep_num}.opus|hpr${ep_num}.spx|hpr${ep_num}.srt|hpr${ep_num}.txt|hpr${ep_num}.wav" )"
|
||||||
|
if [ "${skip_file}" -eq "1" ]
|
||||||
|
then
|
||||||
|
continue
|
||||||
|
fi
|
||||||
|
scp "$this_file" hpr:hackerpublicradio.org/public_html/eps/hpr${ep_num}/
|
||||||
|
done
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
#################################################
|
#################################################
|
||||||
# Send the derived files to the server borg to be sent to borg
|
# Send the derived files to the server borg to be sent to borg
|
||||||
|
|
||||||
@@ -2334,6 +2376,8 @@ copy_files_to_origin_server
|
|||||||
|
|
||||||
check_variable_is_correct working_dir ep_num shownotes_edited
|
check_variable_is_correct working_dir ep_num shownotes_edited
|
||||||
|
|
||||||
|
copy_files_to_hpr_server
|
||||||
|
|
||||||
copy_derived_files_to_borg
|
copy_derived_files_to_borg
|
||||||
|
|
||||||
create_item_on_the_internet_archive
|
create_item_on_the_internet_archive
|
||||||
|
Reference in New Issue
Block a user