Files
site-profile/.gitea/workflows/tag-old-pull-requests.yaml
Alex Lebens 20669d9766
All checks were successful
renovate / renovate (push) Successful in 18s
test-build / build (push) Successful in 1m18s
fix
2025-06-10 14:04:19 -05:00

122 lines
5.0 KiB
YAML

name: tag-old-pull-requests
on:
schedule:
- cron: '@daily'
workflow_dispatch:
jobs:
tag-old-pull-requests:
runs-on: ubuntu-latest
steps:
- name: Set up Python
uses: actions/setup-python@v5
with:
python-version: '3.13'
- name: Install dependencies
run: pip install requests
- name: Tag Old Pull Requests
env:
BOT_TOKEN: ${{ secrets.BOT_TOKEN }}
INSTANCE_URL: ${{ vars.INSTANCE_URL }}
REPOSITORY: ${{ gitea.repository }}
TAG_NAME: 'stale'
DAYS_OLD: 3
EXCLUDE_TAG_NAME: ''
REQUIRED_TAG: 'automerge'
run: |
python - <<EOF
import os
import sys
import requests
from datetime import datetime, timedelta, timezone
from urllib.parse import quote
def main():
"""
Main function to fetch old pull requests from a Gitea repository and tag them.
"""
# --- 1. Get configuration from environment variables ---
try:
instance_url = os.environ['INSTANCE_URL'].rstrip('/')
repository = os.environ['REPOSITORY']
token = os.environ['BOT_TOKEN']
days_old = int(os.environ.get('DAYS_OLD', '30'))
tag_name = os.environ['TAG_NAME']
exclude_tag_name = os.environ.get('EXCLUDE_TAG_NAME')
required_tag = os.environ.get('REQUIRED_TAG')
except KeyError as e:
print(f"Error: Missing required environment variable: {e}", file=sys.stderr)
sys.exit(1)
# --- 2. Set up API headers and base URL ---
headers = {
'Authorization': f'token {token}',
'Accept': 'application/json',
'Content-Type': 'application/json'
}
base_api_url = f"{instance_url}/api/v1/repos/{repository}/pulls"
# --- 3. Conditionally build the request parameters ---
params = {'state': 'open'}
if required_tag:
print(f">> Filtering for pull requests with the required tag: {required_tag}")
params['labels'] = required_tag
else:
print(">> No required tag specified. Checking all open pull requests.")
# --- 4. Fetch pull requests ---
print(">> Fetching pull requests ...")
try:
response = requests.get(base_api_url, headers=headers, params=params, timeout=30)
response.raise_for_status()
pull_requests = response.json()
except requests.exceptions.RequestException as e:
print(f"Error fetching pull requests: {e}", file=sys.stderr)
sys.exit(1)
if not pull_requests:
print(">> No open pull requests found, exiting.")
sys.exit(0)
print(f">> Processing {len(pull_requests)} open pull requests ...")
# --- 5. Filter and tag old pull requests ---
older_than_date = datetime.now(timezone.utc) - timedelta(days=days_old)
print(f">> Filtering for pull requests older than {older_than_date.strftime('%Y-%m-%dT%H:%M:%SZ')}")
for pr in pull_requests:
pr_number = pr['number']
pr_created_at = datetime.fromisoformat(pr['created_at'].replace('Z', '+00:00'))
pr_current_labels = {label['name'] for label in pr.get('labels', [])}
if pr_created_at < older_than_date:
if exclude_tag_name and exclude_tag_name in pr_current_labels:
print(f">> Skipping PR #{pr_number} because it has the '{exclude_tag_name}' tag.")
continue
if tag_name not in pr_current_labels:
print(f">> Tagging PR #{pr_number} with '{tag_name}'")
updated_labels = list(pr_current_labels | {tag_name})
payload = {'labels': updated_labels}
update_url = f"{base_api_url}/{pr_number}/labels"
try:
update_response = requests.put(update_url, headers=headers, json=payload, timeout=30)
update_response.raise_for_status()
print(f">> Successfully updated labels for PR #{pr_number}")
except requests.exceptions.RequestException as e:
print(f"Error updating labels for PR #{pr_number}: {e}", file=sys.stderr)
else:
print(f">> Skipping PR #{pr_number} because it already has the '{tag_name}' tag.")
print(">> Finished processing pull requests.")
if __name__ == "__main__":
main()
EOF