388 lines
14 KiB
YAML
388 lines
14 KiB
YAML
---
|
|
# Daily maintenance tasks
|
|
apiVersion: batch/v1
|
|
kind: CronJob
|
|
metadata:
|
|
name: piefed-daily-maintenance
|
|
namespace: piefed-application
|
|
labels:
|
|
app.kubernetes.io/name: piefed
|
|
app.kubernetes.io/component: cronjob
|
|
spec:
|
|
schedule: "0 2 * * *" # Daily at 2 AM UTC
|
|
successfulJobsHistoryLimit: 1
|
|
failedJobsHistoryLimit: 1
|
|
concurrencyPolicy: Forbid
|
|
jobTemplate:
|
|
spec:
|
|
template:
|
|
spec:
|
|
imagePullSecrets:
|
|
- name: harbor-pull-secret
|
|
containers:
|
|
- name: daily-maintenance
|
|
image: <YOUR_REGISTRY_URL>/library/piefed-web:latest
|
|
command:
|
|
- /bin/sh
|
|
- -c
|
|
- |
|
|
echo "Running daily maintenance tasks..."
|
|
export FLASK_APP=pyfedi.py
|
|
cd /app
|
|
|
|
# Setup dual logging (file + stdout) for OpenObserve
|
|
python -c "
|
|
import logging
|
|
import sys
|
|
|
|
def setup_dual_logging():
|
|
'''Add stdout handlers to existing loggers without disrupting file logging'''
|
|
# Create a shared console handler
|
|
console_handler = logging.StreamHandler(sys.stdout)
|
|
console_handler.setLevel(logging.INFO)
|
|
console_handler.setFormatter(logging.Formatter(
|
|
'%(asctime)s [%(name)s] %(levelname)s: %(message)s'
|
|
))
|
|
|
|
# Add console handler to key loggers (in addition to their existing file handlers)
|
|
loggers_to_enhance = [
|
|
'flask.app', # Flask application logger
|
|
'werkzeug', # Web server logger
|
|
'celery', # Celery worker logger
|
|
'celery.task', # Celery task logger
|
|
'celery.worker', # Celery worker logger
|
|
'' # Root logger
|
|
]
|
|
|
|
for logger_name in loggers_to_enhance:
|
|
logger = logging.getLogger(logger_name)
|
|
logger.setLevel(logging.INFO)
|
|
|
|
# Check if this logger already has a stdout handler
|
|
has_stdout_handler = any(
|
|
isinstance(h, logging.StreamHandler) and h.stream == sys.stdout
|
|
for h in logger.handlers
|
|
)
|
|
|
|
if not has_stdout_handler:
|
|
logger.addHandler(console_handler)
|
|
|
|
print('Dual logging configured: file + stdout for OpenObserve')
|
|
|
|
# Call the function
|
|
setup_dual_logging()
|
|
"
|
|
|
|
# Run the daily maintenance command with proper logging
|
|
flask daily-maintenance-celery
|
|
echo "Daily maintenance completed"
|
|
envFrom:
|
|
- configMapRef:
|
|
name: piefed-config
|
|
- secretRef:
|
|
name: piefed-secrets
|
|
resources:
|
|
requests:
|
|
cpu: 100m
|
|
memory: 256Mi
|
|
limits:
|
|
cpu: 500m
|
|
memory: 512Mi
|
|
volumeMounts:
|
|
- name: app-storage
|
|
mountPath: /app/media
|
|
subPath: media
|
|
volumes:
|
|
- name: app-storage
|
|
persistentVolumeClaim:
|
|
claimName: piefed-app-storage
|
|
restartPolicy: OnFailure
|
|
---
|
|
# Remove orphan files
|
|
apiVersion: batch/v1
|
|
kind: CronJob
|
|
metadata:
|
|
name: piefed-remove-orphans
|
|
namespace: piefed-application
|
|
labels:
|
|
app.kubernetes.io/name: piefed
|
|
app.kubernetes.io/component: cronjob
|
|
spec:
|
|
schedule: "0 3 * * 0" # Weekly on Sunday at 3 AM UTC
|
|
successfulJobsHistoryLimit: 1
|
|
failedJobsHistoryLimit: 1
|
|
concurrencyPolicy: Forbid
|
|
jobTemplate:
|
|
spec:
|
|
template:
|
|
spec:
|
|
imagePullSecrets:
|
|
- name: harbor-pull-secret
|
|
containers:
|
|
- name: remove-orphans
|
|
image: <YOUR_REGISTRY_URL>/library/piefed-web:latest
|
|
command:
|
|
- /bin/sh
|
|
- -c
|
|
- |
|
|
echo "Removing orphaned files..."
|
|
export FLASK_APP=pyfedi.py
|
|
cd /app
|
|
|
|
# Setup dual logging (file + stdout) for OpenObserve
|
|
python -c "
|
|
import logging
|
|
import sys
|
|
|
|
def setup_dual_logging():
|
|
'''Add stdout handlers to existing loggers without disrupting file logging'''
|
|
# Create a shared console handler
|
|
console_handler = logging.StreamHandler(sys.stdout)
|
|
console_handler.setLevel(logging.INFO)
|
|
console_handler.setFormatter(logging.Formatter(
|
|
'%(asctime)s [%(name)s] %(levelname)s: %(message)s'
|
|
))
|
|
|
|
# Add console handler to key loggers (in addition to their existing file handlers)
|
|
loggers_to_enhance = [
|
|
'flask.app', # Flask application logger
|
|
'werkzeug', # Web server logger
|
|
'celery', # Celery worker logger
|
|
'celery.task', # Celery task logger
|
|
'celery.worker', # Celery worker logger
|
|
'' # Root logger
|
|
]
|
|
|
|
for logger_name in loggers_to_enhance:
|
|
logger = logging.getLogger(logger_name)
|
|
logger.setLevel(logging.INFO)
|
|
|
|
# Check if this logger already has a stdout handler
|
|
has_stdout_handler = any(
|
|
isinstance(h, logging.StreamHandler) and h.stream == sys.stdout
|
|
for h in logger.handlers
|
|
)
|
|
|
|
if not has_stdout_handler:
|
|
logger.addHandler(console_handler)
|
|
|
|
print('Dual logging configured: file + stdout for OpenObserve')
|
|
|
|
# Call the function
|
|
setup_dual_logging()
|
|
"
|
|
|
|
# Run the remove orphan files command with proper logging
|
|
flask remove_orphan_files
|
|
echo "Orphan cleanup completed"
|
|
envFrom:
|
|
- configMapRef:
|
|
name: piefed-config
|
|
- secretRef:
|
|
name: piefed-secrets
|
|
resources:
|
|
requests:
|
|
cpu: 100m
|
|
memory: 256Mi
|
|
limits:
|
|
cpu: 500m
|
|
memory: 512Mi
|
|
volumeMounts:
|
|
- name: app-storage
|
|
mountPath: /app/media
|
|
subPath: media
|
|
volumes:
|
|
- name: app-storage
|
|
persistentVolumeClaim:
|
|
claimName: piefed-app-storage
|
|
restartPolicy: OnFailure
|
|
---
|
|
# Send queued notifications
|
|
apiVersion: batch/v1
|
|
kind: CronJob
|
|
metadata:
|
|
name: piefed-send-queue
|
|
namespace: piefed-application
|
|
labels:
|
|
app.kubernetes.io/name: piefed
|
|
app.kubernetes.io/component: cronjob
|
|
spec:
|
|
schedule: "*/10 * * * *" # Every 10 minutes
|
|
successfulJobsHistoryLimit: 1
|
|
failedJobsHistoryLimit: 1
|
|
concurrencyPolicy: Forbid
|
|
jobTemplate:
|
|
spec:
|
|
template:
|
|
spec:
|
|
imagePullSecrets:
|
|
- name: harbor-pull-secret
|
|
containers:
|
|
- name: send-queue
|
|
image: <YOUR_REGISTRY_URL>/library/piefed-web:latest
|
|
command:
|
|
- /bin/sh
|
|
- -c
|
|
- |
|
|
echo "Processing notification queue..."
|
|
export FLASK_APP=pyfedi.py
|
|
cd /app
|
|
|
|
# Setup dual logging (file + stdout) for OpenObserve
|
|
python -c "
|
|
import logging
|
|
import sys
|
|
|
|
def setup_dual_logging():
|
|
'''Add stdout handlers to existing loggers without disrupting file logging'''
|
|
# Create a shared console handler
|
|
console_handler = logging.StreamHandler(sys.stdout)
|
|
console_handler.setLevel(logging.INFO)
|
|
console_handler.setFormatter(logging.Formatter(
|
|
'%(asctime)s [%(name)s] %(levelname)s: %(message)s'
|
|
))
|
|
|
|
# Add console handler to key loggers (in addition to their existing file handlers)
|
|
loggers_to_enhance = [
|
|
'flask.app', # Flask application logger
|
|
'werkzeug', # Web server logger
|
|
'celery', # Celery worker logger
|
|
'celery.task', # Celery task logger
|
|
'celery.worker', # Celery worker logger
|
|
'' # Root logger
|
|
]
|
|
|
|
for logger_name in loggers_to_enhance:
|
|
logger = logging.getLogger(logger_name)
|
|
logger.setLevel(logging.INFO)
|
|
|
|
# Check if this logger already has a stdout handler
|
|
has_stdout_handler = any(
|
|
isinstance(h, logging.StreamHandler) and h.stream == sys.stdout
|
|
for h in logger.handlers
|
|
)
|
|
|
|
if not has_stdout_handler:
|
|
logger.addHandler(console_handler)
|
|
|
|
print('Dual logging configured: file + stdout for OpenObserve')
|
|
|
|
# Call the function
|
|
setup_dual_logging()
|
|
"
|
|
|
|
# Run the send-queue command with proper logging
|
|
flask send-queue
|
|
echo "Queue processing completed"
|
|
envFrom:
|
|
- configMapRef:
|
|
name: piefed-config
|
|
- secretRef:
|
|
name: piefed-secrets
|
|
resources:
|
|
requests:
|
|
cpu: 50m
|
|
memory: 128Mi
|
|
limits:
|
|
cpu: 200m
|
|
memory: 256Mi
|
|
restartPolicy: Never
|
|
---
|
|
# Send email notifications
|
|
apiVersion: batch/v1
|
|
kind: CronJob
|
|
metadata:
|
|
name: piefed-email-notifications
|
|
namespace: piefed-application
|
|
labels:
|
|
app.kubernetes.io/name: piefed
|
|
app.kubernetes.io/component: cronjob
|
|
spec:
|
|
schedule: "1 */6 * * *" # Every 6 hours at minute 1
|
|
successfulJobsHistoryLimit: 1
|
|
failedJobsHistoryLimit: 1
|
|
concurrencyPolicy: Forbid
|
|
jobTemplate:
|
|
spec:
|
|
template:
|
|
spec:
|
|
imagePullSecrets:
|
|
- name: harbor-pull-secret
|
|
containers:
|
|
- name: email-notifications
|
|
image: <YOUR_REGISTRY_URL>/library/piefed-web:latest
|
|
command:
|
|
- /bin/sh
|
|
- -c
|
|
- |
|
|
echo "Processing email notifications..."
|
|
export FLASK_APP=pyfedi.py
|
|
cd /app
|
|
|
|
# Setup dual logging (file + stdout) for OpenObserve
|
|
python -c "
|
|
import logging
|
|
import sys
|
|
|
|
def setup_dual_logging():
|
|
'''Add stdout handlers to existing loggers without disrupting file logging'''
|
|
# Create a shared console handler
|
|
console_handler = logging.StreamHandler(sys.stdout)
|
|
console_handler.setLevel(logging.INFO)
|
|
console_handler.setFormatter(logging.Formatter(
|
|
'%(asctime)s [%(name)s] %(levelname)s: %(message)s'
|
|
))
|
|
|
|
# Add console handler to key loggers (in addition to their existing file handlers)
|
|
loggers_to_enhance = [
|
|
'flask.app', # Flask application logger
|
|
'werkzeug', # Web server logger
|
|
'celery', # Celery worker logger
|
|
'celery.task', # Celery task logger
|
|
'celery.worker', # Celery worker logger
|
|
'' # Root logger
|
|
]
|
|
|
|
for logger_name in loggers_to_enhance:
|
|
logger = logging.getLogger(logger_name)
|
|
logger.setLevel(logging.INFO)
|
|
|
|
# Check if this logger already has a stdout handler
|
|
has_stdout_handler = any(
|
|
isinstance(h, logging.StreamHandler) and h.stream == sys.stdout
|
|
for h in logger.handlers
|
|
)
|
|
|
|
if not has_stdout_handler:
|
|
logger.addHandler(console_handler)
|
|
|
|
print('Dual logging configured: file + stdout for OpenObserve')
|
|
|
|
# Call the function
|
|
setup_dual_logging()
|
|
"
|
|
|
|
# Run email notification commands with proper logging
|
|
echo "Sending missed notifications..."
|
|
flask send_missed_notifs
|
|
|
|
echo "Processing email bounces..."
|
|
flask process_email_bounces
|
|
|
|
echo "Cleaning up old activities..."
|
|
flask clean_up_old_activities
|
|
|
|
echo "Email notification processing completed"
|
|
envFrom:
|
|
- configMapRef:
|
|
name: piefed-config
|
|
- secretRef:
|
|
name: piefed-secrets
|
|
resources:
|
|
requests:
|
|
cpu: 50m
|
|
memory: 128Mi
|
|
limits:
|
|
cpu: 200m
|
|
memory: 256Mi
|
|
restartPolicy: Never |