Purpose: 
Backing up a file from a hosting server to google drive as a contingency plan.
Update: Uses OAuth because a Google Service Account has limitations and will be denied access after a while.
Prerequisites: 
Google OAuth token is required for this method.
Step 1.) Install python and necessary packages.
pip install --upgrade google-api-python-client google-auth-httplib2 google-authStep 2.) Set Variables
SCRIPT_DIRECTORY = “/home/user/docker_planka”
TOKEN_FILE = os.path.join(SCRIPT_DIRECTORY, “oauth-token.json”)
DRIVE_FOLDER_ID = “googleDriveFolderID”
FILE_SUFFIX = “filename.tgz” (Used to detect file for upload and deletion)
The script will upload the single file and remove the old file as long as the name’s are no identical.
import os
import glob
from googleapiclient.discovery import build
from googleapiclient.http import MediaFileUpload
from googleapiclient.errors import HttpError
from google.auth.transport.requests import Request
from google.oauth2.credentials import Credentials
import logging
import time
# =================================================================================
# SCRIPT CONFIGURATION
# =================================================================================
logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s')
# --- Your Settings ---
SCRIPT_DIRECTORY = "/home/btro/docker_planka"
os.chdir(SCRIPT_DIRECTORY)
TOKEN_FILE = os.path.join(SCRIPT_DIRECTORY, "oauth-token.json")
DRIVE_FOLDER_ID = "1NXqKQi69mOx3FpgnXmcdjhZfjE-xQfaL"
FILE_SUFFIX = "planka.tgz"
SCOPES = ['https://www.googleapis.com/auth/drive.file']
# =================================================================================
def get_drive_credentials():
    creds = None
    if os.path.exists(TOKEN_FILE):
        creds = Credentials.from_authorized_user_file(TOKEN_FILE, SCOPES)
    if not creds or not creds.valid:
        if creds and creds.expired and creds.refresh_token:
            try:
                logging.info("Credentials expired. Refreshing token...")
                creds.refresh(Request())
                with open(TOKEN_FILE, 'w') as token:
                    token.write(creds.to_json())
            except Exception as e:
                logging.error(f"FATAL: Could not refresh token. Please regenerate it. Error: {e}")
                return None
        else:
            logging.error(f"FATAL: Token file '{TOKEN_FILE}' is invalid or missing. Please generate a new one.")
            return None
    return creds
def get_latest_backup_file(suffix):
    logging.info(f"Finding latest local file ending with '{suffix}'...")
    files = glob.glob(f"*{suffix}")
    if not files:
        raise FileNotFoundError(f"No local files matching '*{suffix}' found.")
    latest_file = max(files, key=os.path.getmtime)
    logging.info(f"Found local file: {latest_file}")
    return latest_file
def upload_to_drive(service, local_file_path, drive_folder_id):
    filename = os.path.basename(local_file_path)
    logging.info(f"Uploading '{filename}'...")
    file_metadata = {'name': filename, 'parents': [drive_folder_id]}
    media = MediaFileUpload(local_file_path, resumable=True)
    file = service.files().create(
        body=file_metadata,
        media_body=media,
        fields='id, name'
    ).execute()
    logging.info(f"Successfully uploaded '{file.get('name')}' (ID: {file.get('id')})")
    return file.get('id'), file.get('name')
def delete_old_backups_in_drive(service, folder_id, suffix, exclude_name=None):
    logging.info(f"Starting cleanup. Will exclude any file named: '{exclude_name}'")
    # Query for all backups matching the suffix.
    query = f"'{folder_id}' in parents and name contains '{suffix}' and trashed = false"
    try:
        results = service.files().list(q=query, fields="files(id, name)").execute()
        files = results.get('files', [])
        if not files:
            logging.info("No remote files found to check for deletion.")
            return
        logging.info(f"Found {len(files)} remote backup file(s) to check.")
        for file in files:
            if file['name'] != exclude_name:
                logging.info(f"DELETING old backup: '{file['name']}' (ID: {file['id']})")
                service.files().delete(fileId=file['id']).execute()
            else:
                logging.info(f"KEEPING current backup: '{file['name']}'")
    except HttpError as error:
        logging.error(f"An error occurred during cleanup: {error}")
def main():
    logging.info("====== Starting Backup Job (Using Your Proven Logic) ======")
    credentials = get_drive_credentials()
    if not credentials:
        return
    try:
        service = build('drive', 'v3', credentials=credentials)
        # Step 1: Find latest backup file
        backup_file = get_latest_backup_file(FILE_SUFFIX)
        # Step 2: Upload the latest backup
        uploaded_file_id, uploaded_file_name = upload_to_drive(service, backup_file, DRIVE_FOLDER_ID)
        # CRITICAL ADDITION: Wait for 10 seconds.
        # This gives the Google Drive API time to update its search index
        # so the delete function can see all the files.
        logging.info("Waiting 10 seconds for API to process changes before cleanup...")
        time.sleep(10)
        # Step 3: Delete old backups from Drive, except the just-uploaded one
        delete_old_backups_in_drive(service, DRIVE_FOLDER_ID, FILE_SUFFIX, exclude_name=uploaded_file_name)
    except FileNotFoundError as e:
        logging.error(f"Operation failed: {e}")
    except Exception as e:
        logging.error(f"An unexpected error occurred: {e}", exc_info=True)
    logging.info("====== Backup Job Finished ======")
if __name__ == "__main__":
    main()
