WordPress: Adding last modified date to Twenty Seventeen

How to add Post’s last modified date to the Twenty Seventeen theme.

Step 1.) Add custom style for text color
/wp-content/themes/twentyseventeen/style.css

/* Change to fit your needs */
.entry-date.published {
    color: #e1e1e1;
}
.modified-date {
    color: #9ecbff;
    margin-left: 8px;
    font-style: italic;
}

/* Add a subtle separator */
.date-separator {
    margin: 0 5px;
    color: #666;
}


Step 2.) Modify function twentyseventeen_time_link()
/wp-content/themes/twentyseventeen/inc/template-tags.php

if ( ! function_exists( 'twentyseventeen_time_link' ) ) :

function twentyseventeen_time_link() {
    $modified_time = get_the_modified_time('U');
    $posted_time = get_the_time('U');
    
    if ($modified_time > $posted_time) {
        // Format for published + modified date with better spacing and structure
        $time_string = sprintf(
            '<time class="entry-date published">%1$s</time><span class="modified-date">Updated: %2$s</span>',
            get_the_date(),
            get_the_modified_date()
        );
    } else {
        // Format for published date only
        $time_string = sprintf(
            '<time class="entry-date published">%1$s</time>',
            get_the_date()
        );
    }

    // Wrap the time string in a link, and preface it with 'Posted on'.
    return sprintf(
        '<span class="screen-reader-text">%1$s</span><a href="%2$s" rel="bookmark">%3$s</a>',
        _x('Posted on', 'post date', 'twentyseventeen'),
        esc_url(get_permalink()),
        $time_string
    );
}
endif;

Python: Backup files to Google Drive using API

Purpose:
Backing up a file from a hosting server to google drive as a contingency plan.

Prerequisites:
Google service account key. (Create a Google Service Account and an Key for API access)

Step 1.) Install python and necessary packages.

pip install --upgrade google-api-python-client google-auth-httplib2 google-auth

Step 2.) Set Variables
SCRIPT_DIRECTORY = “/home/user/docker_planka”
SERVICE_ACCOUNT_FILE = “/home/user/GoogleDrive-serviceAccount.json”
DRIVE_FOLDER_ID = “googleDriveFolderID”
FILE_SUFFIX = “filename.tgz” (Used to detect file for upload and deletion)

The script will upload the single file and remove the old file as long as the name’s are no identical.

import os
import glob
from googleapiclient.discovery import build
from googleapiclient.http import MediaFileUpload
from google.oauth2 import service_account

# ====== CONFIGURATION =====
# Change working directory for cron compatibility
SCRIPT_DIRECTORY = "/home/user/docker_planka"
os.chdir(SCRIPT_DIRECTORY)

# Google Service Account Key
SERVICE_ACCOUNT_FILE = "/home/user/GoogleDrive-serviceAccount.json"

# Google Drive folder ID
DRIVE_FOLDER_ID = "1NXqKQi69mOx3FpgnXmcdjhZfjE-xQfaL"

# File name's ending (suffix)
FILE_SUFFIX = "planka.tgz"

SCOPES = ['https://www.googleapis.com/auth/drive.file']

def get_latest_backup_file(suffix):
    files = glob.glob(f"*{suffix}")
    if not files:
        raise FileNotFoundError(f"No files matching '*{suffix}' found in current directory.")
    latest_file = max(files, key=os.path.getmtime)
    return latest_file

def upload_to_drive(service, local_file_path, drive_folder_id):
    file_metadata = {'name': os.path.basename(local_file_path)}
    if drive_folder_id:
        file_metadata['parents'] = [drive_folder_id]
    media = MediaFileUpload(local_file_path, resumable=True)
    file = service.files().create(
        body=file_metadata,
        media_body=media,
        fields='id, name'
    ).execute()
    print(f"Uploaded '{local_file_path}' as '{file.get('name')}' (ID: {file.get('id')})")
    return file.get('id'), file.get('name')

def delete_old_backups_in_drive(service, folder_id, suffix, exclude_name=None):
    query = f"'{folder_id}' in parents and name contains '{suffix}' and trashed = false"
    results = service.files().list(q=query, fields="files(id, name)").execute()
    files = results.get('files', [])
    for file in files:
        if file['name'] != exclude_name:
            print(f"Deleting old backup '{file['name']}' (ID: {file['id']}) from Google Drive.")
            service.files().delete(fileId=file['id']).execute()

def main():
    credentials = service_account.Credentials.from_service_account_file(
        SERVICE_ACCOUNT_FILE, scopes=SCOPES
    )
    service = build('drive', 'v3', credentials=credentials)

    # Step 1: Find latest backup file
    backup_file = get_latest_backup_file(FILE_SUFFIX)

    # Step 2: Upload the latest backup
    uploaded_file_id, uploaded_file_name = upload_to_drive(service, backup_file, DRIVE_FOLDER_ID)

    # Step 3: Delete old backups from Drive, except the just-uploaded one
    delete_old_backups_in_drive(service, DRIVE_FOLDER_ID, FILE_SUFFIX, exclude_name=uploaded_file_name)

if __name__ == "__main__":
    main()

Docker MongoDB: mongodump backup

This is a script to run mongodump in a running MongoDB container, and backup to the local server as a tgz file.
1.) Runs mongodump
2.) Creates a tgz file where the script is ran
3.) Removes any old tgz files that are in the same directory

// My MongoDB is container is running in Docker Swarm mode to use Docker Secrets.

#!/bin/bash

set -euo pipefail

# Create a temp directory for the backup
TMPDIR="$(mktemp -d)"
trap "rm -rf $TMPDIR" EXIT

# Get the running MongoDB container ID
CONTAINER_ID=$(docker ps --filter "name=btc_mongodb" --format "{{.ID}}" | head -n 1)
if [ -z "$CONTAINER_ID" ]; then
  echo "No running container found with 'btc_mongodb' in its name."
  exit 1
fi

# Retrieve MongoDB credentials from secrets in the container
USERNAME=$(docker exec "$CONTAINER_ID" cat /run/secrets/mongo_root_username)
PASSWORD=$(docker exec "$CONTAINER_ID" cat /run/secrets/mongo_root_password)

# Run mongodump inside the container, outputting to /tmp
docker exec "$CONTAINER_ID" mongodump \
  --uri="mongodb://$USERNAME:$PASSWORD@localhost:27017/" \
  --out /tmp/mongodump

# Copy the mongodump from the container to the host's temp directory
docker cp "$CONTAINER_ID":/tmp/mongodump "$TMPDIR"

# Create the archive in the current directory
BACKUP_FILENAME="$(date +%Y-%m%d-%H%M)-btcMongo.tgz"
tar -czf "$BACKUP_FILENAME" -C "$TMPDIR/mongodump" .

# Remove any previous *-btcMongo.tgz except the newly created one
for f in *-btcMongo.tgz; do
  if [[ "$f" != "$BACKUP_FILENAME" ]]; then
    rm -f -- "$f"
  fi
done

echo "Backup complete: $BACKUP_FILENAME"