automation

chevron-rightBash (backup database and push to S3hashtag

SQL backup to S3 and Github

Prerequisites- AWS CLI configured

#!/bin/bash

# ==========================
# PostgreSQL Backup to S3
# ==========================

# ---------- Configuration ----------
PG_HOST="localhost"
PG_PORT="5432"
PG_DB="your_database"
PG_USER="your_user"
PG_PASSWORD="your_password"

S3_BUCKET="s3://your-bucket-name"
BACKUP_DIR="/tmp/pg_backups"
RETENTION_DAYS=7   # Number of days to keep local backups

# ---------- Date & Filename ----------
DATE=$(date +'%Y-%m-%d_%H-%M-%S')
BACKUP_FILE="${BACKUP_DIR}/${PG_DB}_backup_${DATE}.sql.gz"
LOG_FILE="${BACKUP_DIR}/backup_log_${DATE}.log"

# ---------- Ensure backup directory exists ----------
mkdir -p "$BACKUP_DIR"

# ---------- Export password to avoid prompt ----------
export PGPASSWORD="$PG_PASSWORD"

# ---------- Logging ----------
echo "[$(date)] Starting backup for database: $PG_DB" | tee -a "$LOG_FILE"

# ---------- Backup PostgreSQL Database ----------
pg_dump -h "$PG_HOST" -p "$PG_PORT" -U "$PG_USER" "$PG_DB" | gzip > "$BACKUP_FILE"

if [ $? -ne 0 ]; then
    echo "[$(date)] ERROR: PostgreSQL backup failed!" | tee -a "$LOG_FILE"
    exit 1
else
    echo "[$(date)] Backup successful: $BACKUP_FILE" | tee -a "$LOG_FILE"
fi

# ---------- Upload to S3 ----------
aws s3 cp "$BACKUP_FILE" "$S3_BUCKET/"

if [ $? -ne 0 ]; then
    echo "[$(date)] ERROR: Failed to upload backup to S3!" | tee -a "$LOG_FILE"
    exit 1
else
    echo "[$(date)] Backup successfully uploaded to S3: $S3_BUCKET" | tee -a "$LOG_FILE"
fi

# ---------- Cleanup old backups ----------
find "$BACKUP_DIR" -type f -name "*.sql.gz" -mtime +$RETENTION_DAYS -exec rm -f {} \;
echo "[$(date)] Old backups older than $RETENTION_DAYS days removed" | tee -a "$LOG_FILE"

# ---------- Finish ----------
echo "[$(date)] PostgreSQL backup process completed" | tee -a "$LOG_FILE"

# ---------- Unset password ----------
unset PGPASSWORD

things to remember

Date variable

DATE=$(date +'%Y-%m-%d_%H-%M-%S')
# use variable
backup_file="backup_${DATE}.sql.gz"

IF COMMAND FAILED THEN PRINT
if [ $? -ne 0 ]; then
    echo "[$(date)] ERROR: Failed to upload backup to S3!" | tee -a "$LOG_FILE"
    exit 1
else
    echo "[$(date)] Backup successfully uploaded to S3: $S3_BUCKET" | tee -a "$LOG_FILE"
fi

chevron-rightPowerShell install packageshashtag

#Download and install WARP  
# Source URL 
$url="1111-releases.cloudflareclient.com/windows/Cloudflare_WARP_Release-x64.msi" 
 

# Destination file 
$dest= "c:\warp\Cloudflare_WARP_Release-x64.msi" 


# Download the file 
Invoke-WebRequest -Uri $url -OutFile $dest 

 
#install file
#$install_args = "/v /s /qn" 

#Start-Process -FilePath "c:\warp\Cloudflare_WARP_Release-x64.msi" -ArgumentList $install_args -Wait 

# For .exe installers
Start-Process -FilePath $dest -ArgumentList "/silent /norestart" -Wait
 
# For .msi installers   
msiexec /i c:\warp\Cloudflare_WARP_Release-x64.msi /quiet /qn /norestart /log c:\warp\warp.log PROPERTY1=value1 PROPERTY2=value2 

Write-Host "WARP installation completed"

# Create an empty text file
New-Item -ItemType File -Path "C:\MyFolder\myfile.txt"

# Create nested directories (like mkdir -p)
New-Item -ItemType Directory -Path "C:\MyFolder\SubFolder" -Force

chevron-rightWindows disk cleanuphashtag

// Some code

# Using cleanmgr (Disk Cleanup Tool)
# Open Disk Cleanup GUI for C: drive
cleanmgr /d C:

# Remove files in Temp folder
del /q /f /s %TEMP%\*
del /q /f /s C:\Windows\Temp\*

/q → Quiet (no confirmation)
/f → Force deletion of read-only files
/s → Include subfolders

# Delete temp files
Remove-Item -Path "$env:TEMP\*" -Recurse -Force
Remove-Item -Path "C:\Windows\Temp\*" -Recurse -Force

Use Task-schedule in windows like crontab in linux

Last updated