WordPress: Adding last modified date to Twenty Seventeen

How to add Post’s last modified date to the Twenty Seventeen theme.

Step 1.) Add custom style for text color
/wp-content/themes/twentyseventeen/style.css

/* Change to fit your needs */
.entry-date.published {
    color: #e1e1e1;
}
.modified-date {
    color: #9ecbff;
    margin-left: 8px;
    font-style: italic;
}

/* Add a subtle separator */
.date-separator {
    margin: 0 5px;
    color: #666;
}


Step 2.) Modify function twentyseventeen_time_link()
/wp-content/themes/twentyseventeen/inc/template-tags.php

if ( ! function_exists( 'twentyseventeen_time_link' ) ) :

function twentyseventeen_time_link() {
    $modified_time = get_the_modified_time('U');
    $posted_time = get_the_time('U');
    
    if ($modified_time > $posted_time) {
        // Format for published + modified date with better spacing and structure
        $time_string = sprintf(
            '<time class="entry-date published">%1$s</time><span class="modified-date">Updated: %2$s</span>',
            get_the_date(),
            get_the_modified_date()
        );
    } else {
        // Format for published date only
        $time_string = sprintf(
            '<time class="entry-date published">%1$s</time>',
            get_the_date()
        );
    }

    // Wrap the time string in a link, and preface it with 'Posted on'.
    return sprintf(
        '<span class="screen-reader-text">%1$s</span><a href="%2$s" rel="bookmark">%3$s</a>',
        _x('Posted on', 'post date', 'twentyseventeen'),
        esc_url(get_permalink()),
        $time_string
    );
}
endif;

Python: Backup files to Google Drive using API

Purpose:
Backing up a file from a hosting server to google drive as a contingency plan.

Update: Uses OAuth because a Google Service Account has limitations and will be denied access after a while.

Prerequisites:
Google OAuth token is required for this method.

Step 1.) Install python and necessary packages.

pip install --upgrade google-api-python-client google-auth-httplib2 google-auth

Step 2.) Set Variables
SCRIPT_DIRECTORY = “/home/user/docker_planka”
TOKEN_FILE = os.path.join(SCRIPT_DIRECTORY, “oauth-token.json”)
DRIVE_FOLDER_ID = “googleDriveFolderID”
FILE_SUFFIX = “filename.tgz” (Used to detect file for upload and deletion)

The script will upload the single file and remove the old file as long as the name’s are no identical.

import os
import glob
from googleapiclient.discovery import build
from googleapiclient.http import MediaFileUpload
from googleapiclient.errors import HttpError
from google.auth.transport.requests import Request
from google.oauth2.credentials import Credentials
import logging
import time

# =================================================================================
# SCRIPT CONFIGURATION
# =================================================================================
logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s')

# --- Your Settings ---
SCRIPT_DIRECTORY = "/home/btro/docker_planka"
os.chdir(SCRIPT_DIRECTORY)

TOKEN_FILE = os.path.join(SCRIPT_DIRECTORY, "oauth-token.json")
DRIVE_FOLDER_ID = "1NXqKQi69mOx3FpgnXmcdjhZfjE-xQfaL"
FILE_SUFFIX = "planka.tgz"
SCOPES = ['https://www.googleapis.com/auth/drive.file']
# =================================================================================


def get_drive_credentials():
    creds = None
    if os.path.exists(TOKEN_FILE):
        creds = Credentials.from_authorized_user_file(TOKEN_FILE, SCOPES)
    if not creds or not creds.valid:
        if creds and creds.expired and creds.refresh_token:
            try:
                logging.info("Credentials expired. Refreshing token...")
                creds.refresh(Request())
                with open(TOKEN_FILE, 'w') as token:
                    token.write(creds.to_json())
            except Exception as e:
                logging.error(f"FATAL: Could not refresh token. Please regenerate it. Error: {e}")
                return None
        else:
            logging.error(f"FATAL: Token file '{TOKEN_FILE}' is invalid or missing. Please generate a new one.")
            return None
    return creds


def get_latest_backup_file(suffix):
    logging.info(f"Finding latest local file ending with '{suffix}'...")
    files = glob.glob(f"*{suffix}")
    if not files:
        raise FileNotFoundError(f"No local files matching '*{suffix}' found.")
    latest_file = max(files, key=os.path.getmtime)
    logging.info(f"Found local file: {latest_file}")
    return latest_file


def upload_to_drive(service, local_file_path, drive_folder_id):

    filename = os.path.basename(local_file_path)
    logging.info(f"Uploading '{filename}'...")
    file_metadata = {'name': filename, 'parents': [drive_folder_id]}
    media = MediaFileUpload(local_file_path, resumable=True)
    file = service.files().create(
        body=file_metadata,
        media_body=media,
        fields='id, name'
    ).execute()
    logging.info(f"Successfully uploaded '{file.get('name')}' (ID: {file.get('id')})")
    return file.get('id'), file.get('name')


def delete_old_backups_in_drive(service, folder_id, suffix, exclude_name=None):
    logging.info(f"Starting cleanup. Will exclude any file named: '{exclude_name}'")

    # Query for all backups matching the suffix.
    query = f"'{folder_id}' in parents and name contains '{suffix}' and trashed = false"
    try:
        results = service.files().list(q=query, fields="files(id, name)").execute()
        files = results.get('files', [])

        if not files:
            logging.info("No remote files found to check for deletion.")
            return

        logging.info(f"Found {len(files)} remote backup file(s) to check.")
        for file in files:
            if file['name'] != exclude_name:
                logging.info(f"DELETING old backup: '{file['name']}' (ID: {file['id']})")
                service.files().delete(fileId=file['id']).execute()
            else:
                logging.info(f"KEEPING current backup: '{file['name']}'")
    except HttpError as error:
        logging.error(f"An error occurred during cleanup: {error}")


def main():
    logging.info("====== Starting Backup Job (Using Your Proven Logic) ======")
    credentials = get_drive_credentials()
    if not credentials:
        return

    try:
        service = build('drive', 'v3', credentials=credentials)

        # Step 1: Find latest backup file
        backup_file = get_latest_backup_file(FILE_SUFFIX)

        # Step 2: Upload the latest backup
        uploaded_file_id, uploaded_file_name = upload_to_drive(service, backup_file, DRIVE_FOLDER_ID)

        # CRITICAL ADDITION: Wait for 10 seconds.
        # This gives the Google Drive API time to update its search index
        # so the delete function can see all the files.
        logging.info("Waiting 10 seconds for API to process changes before cleanup...")
        time.sleep(10)

        # Step 3: Delete old backups from Drive, except the just-uploaded one
        delete_old_backups_in_drive(service, DRIVE_FOLDER_ID, FILE_SUFFIX, exclude_name=uploaded_file_name)

    except FileNotFoundError as e:
        logging.error(f"Operation failed: {e}")
    except Exception as e:
        logging.error(f"An unexpected error occurred: {e}", exc_info=True)

    logging.info("====== Backup Job Finished ======")


if __name__ == "__main__":
    main()

Docker MongoDB: mongodump backup

This is a script to run mongodump in a running MongoDB container, and backup to the local server as a tgz file.
1.) Runs mongodump
2.) Creates a tgz file where the script is ran
3.) Removes any old tgz files that are in the same directory

// My MongoDB is container is running in Docker Swarm mode to use Docker Secrets.

#!/bin/bash

set -euo pipefail

# Create a temp directory for the backup
TMPDIR="$(mktemp -d)"
trap "rm -rf $TMPDIR" EXIT

# Get the running MongoDB container ID
CONTAINER_ID=$(docker ps --filter "name=btc_mongodb" --format "{{.ID}}" | head -n 1)
if [ -z "$CONTAINER_ID" ]; then
  echo "No running container found with 'btc_mongodb' in its name."
  exit 1
fi

# Retrieve MongoDB credentials from secrets in the container
USERNAME=$(docker exec "$CONTAINER_ID" cat /run/secrets/mongo_root_username)
PASSWORD=$(docker exec "$CONTAINER_ID" cat /run/secrets/mongo_root_password)

# Run mongodump inside the container, outputting to /tmp
docker exec "$CONTAINER_ID" mongodump \
  --uri="mongodb://$USERNAME:$PASSWORD@localhost:27017/" \
  --out /tmp/mongodump

# Copy the mongodump from the container to the host's temp directory
docker cp "$CONTAINER_ID":/tmp/mongodump "$TMPDIR"

# Create the archive in the current directory
BACKUP_FILENAME="$(date +%Y-%m%d-%H%M)-btcMongo.tgz"
tar -czf "$BACKUP_FILENAME" -C "$TMPDIR/mongodump" .

# Remove any previous *-btcMongo.tgz except the newly created one
for f in *-btcMongo.tgz; do
  if [[ "$f" != "$BACKUP_FILENAME" ]]; then
    rm -f -- "$f"
  fi
done

echo "Backup complete: $BACKUP_FILENAME"