# Copyright (C) 2025 The Qt Company Ltd. # Contact: https://www.qt.io/licensing/ # # You may use this file under the terms of the 3-clause BSD license. # See the file LICENSE in qt/qtrepotools for details. # """This script listens for incoming webhook requests of patchset-created type from Gerrit for qt/qtbase on the dev branch. If Wayland-related files are modified and the commit message includes 'Pick-to:' targets for versions 6.9 or older, it posts a 'Sanity-Review: -1' comment to Gerrit. """ import asyncio import base64 import json import logging import os import re import sys import traceback from functools import wraps from logging.handlers import TimedRotatingFileHandler import aiohttp from aiohttp import web # Configure logging LOG_DIR = "logging" os.makedirs(LOG_DIR, exist_ok=True) LOG_FILE = os.path.join(LOG_DIR, "qtwaylandbot.log") handler = TimedRotatingFileHandler(LOG_FILE, when='midnight', backupCount=90) handler.setFormatter(logging.Formatter( '%(asctime)s - %(levelname)s - %(message)s')) logging.basicConfig(level=logging.INFO, handlers=[handler]) log = logging.getLogger() GERRIT_USERNAME = os.environ.get('QTWAYLANDBOT_GERRIT_USERNAME') GERRIT_PASSWORD = os.environ.get('QTWAYLANDBOT_GERRIT_PASSWORD') if not GERRIT_USERNAME or not GERRIT_PASSWORD: log.info( 'Please set the QTWAYLANDBOT_GERRIT_USERNAME and QTWAYLANDBOT_GERRIT_PASSWORD environment variables.') sys.exit(1) # Base64 encode the username and password GERRIT_AUTH = GERRIT_USERNAME + ':' + GERRIT_PASSWORD GERRIT_AUTH = GERRIT_AUTH.encode('utf-8') GERRIT_AUTH = base64.b64encode(GERRIT_AUTH).decode('utf-8') CONFIG = { 'MAX_RETRIES': 2, 'RETRY_DELAY': 5, 'TEAMS_URL': os.environ.get('QTWAYLANDBOT_TEAMS_WEBHOOK_URL'), 'TEAMS_ERROR_URL': os.environ.get('QTWAYLANDBOT_TEAMS_ERROR_WEBHOOK_URL'), } def log_errors(f): """Decorator to log any unhandled errors in a function.""" @wraps(f) async def wrapper(*args, **kwargs): try: return await f(*args, **kwargs) except Exception as e: log.error("Error in %s: %s\n%s", f.__name__, str(e), traceback.format_exc()) # For web handlers, we want the error to propagate to return a 500 # For other background tasks, it might be okay to just log if isinstance(args[0], web.Request) or isinstance(args[0], web.View): raise # Re-raise the exception to be caught by the web server raise return wrapper async def post_review(data, review, retry=0): """Post the review to Gerrit.""" change_number = data['change']['number'] revision = data['patchSet']['revision'] url = f"https://codereview.qt-project.org/a/changes/{change_number}/revisions/{revision}/review" headers = {'Content-Type': 'application/json;charset=UTF-8', 'Authorization': 'Basic ' + GERRIT_AUTH} log.info("%s: Posting review", change_number) log.info('%s: Review data: %s', change_number, json.dumps(review)) try: async with aiohttp.ClientSession() as session: async with session.post(url, json=review, headers=headers) as response: if response.status == 409 and retry < CONFIG['MAX_RETRIES']: log.info( '%s: Retrying due to 409 Lock Failure (%s/%s)...', change_number, retry + 1, CONFIG['MAX_RETRIES']) await asyncio.sleep(CONFIG['RETRY_DELAY']) await post_review(data, review, retry + 1) elif response.status >= 400: response_text = await response.text() log.error('Error posting review to %s: %s %s', change_number, response.status, response_text) else: log.info('%s: Review posted successfully.', change_number) except aiohttp.ClientError as e: log.error('Error posting review to %s: %s', change_number, str(e)) if retry < CONFIG['MAX_RETRIES']: log.info('%s: Retrying due to client error (%s/%s)...', change_number, retry + 1, CONFIG['MAX_RETRIES']) await asyncio.sleep(CONFIG['RETRY_DELAY']) await post_review(data, review, retry + 1) else: log.error( '%s: Max retries reached for posting review after client error.', change_number) except Exception as e: log.error('%s: Unexpected error in post_review: %s\n%s', change_number, str(e), traceback.format_exc()) async def get_files_from_gerrit(change_number, revision, retry=0): """Fetch the list of files for a given change and revision from Gerrit.""" url = f"https://codereview.qt-project.org/changes/{change_number}/revisions/{revision}/files" headers = {'Authorization': 'Basic ' + GERRIT_AUTH} log.info("%s: Fetching files for revision %s", change_number, revision) try: async with aiohttp.ClientSession() as session: async with session.get(url, headers=headers) as response: # Gerrit returns a non-JSON response starting with ")]}'" to prevent XSSI. # We need to strip this prefix before parsing JSON. response_text = await response.text() if response_text.startswith(")]}'"): response_text = response_text[4:] if response.status == 200: files_data = json.loads(response_text) # files_data is a dict where keys are file paths # and values are objects with details (e.g., status if a file was DELETED) log.info("%s: Successfully fetched %d files for revision %s.", change_number, len(files_data), revision) return files_data # Return the whole dict elif response.status >= 400: log.error('Error fetching files from Gerrit for %s revision %s: %s %s', change_number, revision, response.status, response_text) if retry < CONFIG['MAX_RETRIES']: log.info('%s: Retrying file fetch (%s/%s)...', change_number, retry + 1, CONFIG['MAX_RETRIES']) await asyncio.sleep(CONFIG['RETRY_DELAY']) return await get_files_from_gerrit(change_number, revision, retry + 1) else: log.error( '%s: Max retries reached for fetching files.', change_number) return None # Indicate failure after retries except aiohttp.ClientError as e: log.error('Client error fetching files for %s revision %s: %s', change_number, revision, str(e)) if retry < CONFIG['MAX_RETRIES']: log.info('%s: Retrying file fetch due to client error (%s/%s)...', change_number, retry + 1, CONFIG['MAX_RETRIES']) await asyncio.sleep(CONFIG['RETRY_DELAY']) return await get_files_from_gerrit(change_number, revision, retry + 1) else: log.error( '%s: Max retries reached for fetching files after client error.', change_number) return None # Indicate failure after retries except json.JSONDecodeError as e: log.error('Error decoding JSON from Gerrit file list for %s revision %s: %s. Response text: %s', # Log part of the response change_number, revision, str(e), response_text[:200]) return None # Indicate failure except Exception as e: log.error('%s: Unexpected error in get_files_from_gerrit for revision %s: %s\n%s', change_number, revision, str(e), traceback.format_exc()) return None # Indicate failure async def post_teams_error_message(data, custom_text, retry=0): """Post an error message to Teams with the change details for diagnostic purposes.""" if not CONFIG['TEAMS_ERROR_URL']: log.info('Teams error webhook URL not set. Skipping error message posting.') return headers = { 'Content-Type': 'application/json' } # Ensure data and data['change'] exist before trying to access sub-keys change_subject = "Unknown Subject" change_number_val = "Unknown Change Number" project_val = "Unknown Project" branch_val = "Unknown Branch" change_url_val = "#" if data and 'change' in data: change_data = data['change'] change_subject = change_data.get('subject', 'Unknown Subject') change_number_val = change_data.get('number', 'Unknown Change Number') project_val = change_data.get('project', 'Unknown Project') branch_val = change_data.get('branch', 'Unknown Branch') change_url_val = change_data.get('url', '#') card = { "@type": "MessageCard", "@context": "http://schema.org/extensions", "summary": "Error in QtWaylandBot", "themeColor": "D70000", # Red for error "title": f"Error processing: {change_subject}", "text": custom_text, "sections": [{ "activityTitle": change_subject, "activitySubtitle": f"Change number: {change_number_val}", "facts": [{ "name": "Project:", "value": project_val }, { "name": "Branch:", "value": branch_val }], "markdown": True }], "potentialAction": [{ "@type": "OpenUri", "name": "View Change", "targets": [{ "os": "default", "uri": change_url_val }] }] } try: async with aiohttp.ClientSession() as session: async with session.post(CONFIG['TEAMS_ERROR_URL'], json=card, headers=headers) as res: if res.status == 408 and retry < CONFIG['MAX_RETRIES']: log.info('%s: Retrying Teams error message due to 408 Request Timeout...', change_number_val) await asyncio.sleep(CONFIG['RETRY_DELAY']) await post_teams_error_message(data, custom_text, retry + 1) elif res.status >= 400: response_text = await res.text() log.error('Error posting Teams error message: %s %s', res.status, response_text) else: log.info('%s: Teams error message posted successfully.', change_number_val) except aiohttp.ClientError as e: log.error('Error posting Teams error message: %s', str(e)) if retry < CONFIG['MAX_RETRIES']: log.info( '%s: Retrying Teams error message due to client error...', change_number_val) await asyncio.sleep(CONFIG['RETRY_DELAY']) await post_teams_error_message(data, custom_text, retry + 1) @log_errors async def handle(request): """Handle the incoming webhook request.""" data = None # Initialize data and body to None for error handling scope body = "" try: body = await request.text() data = json.loads(body) # Validate request required_fields = ['type', 'change', 'patchSet'] if not all(field in data for field in required_fields): log.warning("Missing required fields in request data.") return web.Response(status=400, text="Missing required fields") # Make sure the change is in state NEW if data['change'].get('status') != 'NEW': log.info("Change %s status is %s, not NEW. Skipping.", data['change'].get( 'number', 'N/A'), data['change'].get('status', 'N/A')) return web.Response(status=201, text="Not a NEW change.") # make sure it's a patchset-created event if data.get('type') != 'patchset-created': log.info("Event type is %s, not patchset-created. Skipping.", data.get('type', 'N/A')) return web.Response(status=201, text="Not a patchset-created event.") # Filter for qt/qtbase project and dev branch if data['change'].get('project') != 'qt/qtbase': log.info("Project is %s, not qt/qtbase. Skipping.", data['change'].get('project', 'N/A')) return web.Response(status=201, text="Not qt/qtbase project.") if data['change'].get('branch') != 'dev': log.info("Branch is %s, not dev. Skipping.", data['change'].get('branch', 'N/A')) return web.Response(status=201, text="Not dev branch.") change_number = data['change']['number'] revision = data['patchSet']['revision'] log.info("%s: Received patchset-created event for %s/%s on branch %s.", change_number, data['change']['project'], revision, data['change']['branch']) # Fetch files from Gerrit files_from_gerrit = await get_files_from_gerrit(change_number, revision) if files_from_gerrit is None: log.error( "%s: Failed to fetch files from Gerrit for revision %s. Aborting processing.", change_number, revision) # Potentially send a Teams notification here if critical await post_teams_error_message(data, "Failed to fetch files from Gerrit for revision" + f" {revision}. Processing aborted.") return web.Response(status=500, text="Failed to fetch files from Gerrit.") if not files_from_gerrit: log.info("%s: No files found in revision %s according to Gerrit. Skipping.", change_number, revision) return web.Response(status=201, text="No files in revision according to Gerrit.") # Identify if any files are in relevant qtwayland paths # files_from_gerrit is a dict: {"/COMMIT_MSG": {"lines_inserted": ...}, "path/to/file.c": {"lines_inserted": ...}} # We are interested in the keys (file paths) and their status (if available and not 'DELETED') wayland_patterns = ["/wayland/", "/qtwaylandscanner/", "cmake/test_waylandclient"] changed_wayland_filepaths = [] for file_path, file_details in files_from_gerrit.items(): if file_path == "/COMMIT_MSG": # Skip commit message pseudo-file continue # Check if 'status' field exists and is 'DELETED' # If 'status' is not present, assume it's a modified/added file for this revision if file_details and file_details.get('status') == 'DELETED': continue # Skip deleted files if any(p in file_path for p in wayland_patterns): changed_wayland_filepaths.append(file_path) if changed_wayland_filepaths: log.info("%s: Wayland-related file changes detected in: %s", change_number, ", ".join(changed_wayland_filepaths)) else: # This covers both no wayland files changed, and if all wayland files were DELETED log.info("%s: No active Wayland-related file changes found (checked patterns: %s). Skipping.", change_number, wayland_patterns) return web.Response(status=201, text="No Wayland file changes.") log.info("%s: Wayland-related file changes detected.", change_number) # Read commit message and search for Pick-to footer commit_message = data.get('change', {}).get('commitMessage', '') pick_to_match = re.search( r"(?:^|\n)Pick-to:(?:\s+(.+))", commit_message, re.MULTILINE) if not pick_to_match: log.info( "%s: No 'Pick-to:' footer found in commit message. Skipping.", change_number) return web.Response(status=201, text="No Pick-to footer.") pick_targets_str = pick_to_match.group(1) potential_targets = pick_targets_str.split() old_pick_targets_found = [] for target in potential_targets: match = re.match(r"(\d+)\.(\d+)(?:\.(\d+))?", target) # Parses 6.10, 6.9, 6.9.0 if match: major = int(match.group(1)) minor = int(match.group(2)) # Patch version not strictly needed for this comparison # Check if version is <= 6.9 (older than 6.10) if major < 6 or (major == 6 and minor <= 9): old_pick_targets_found.append(target) if old_pick_targets_found: log.info("%s: Found old Pick-to targets (%s). Posting review.", change_number, ", ".join(old_pick_targets_found)) review_message = ( "Heads up! This change includes 'Pick-to:' targets for Qt versions 6.9 or older: " f"{', '.join(old_pick_targets_found)}.\n\n" "Wayland-related changes targeting these older versions (older than 6.10) " "must be manually backported to the standalone qt/qtwayland repository," " and the Pick-to footer cannot be used for these versions.\n\n" ) review_payload = { "message": review_message, "labels": { "Sanity-Review": -1 } } await post_review(data, review_payload) else: log.info( "%s: No Pick-to targets <= 6.9 found. No action needed.", change_number) return web.Response(status=200, text="Processing complete.") except json.JSONDecodeError: log.error("Invalid JSON received.") # data might be None here, or malformed. # Send part of body await post_teams_error_message(data if data else {}, f"Invalid JSON received.\nBody: {body[:500]}...") return web.Response(status=400, text="Invalid JSON") except Exception as e: log.error("Unhandled error in handle function: %s\n%s", str(e), traceback.format_exc()) # data might be None or partially populated if error occurred early error_context = f"Error: {str(e)}\nTraceback:\n{traceback.format_exc()}" await post_teams_error_message(data if data else {}, error_context) return web.Response(status=500, text="Internal Server Error") async def handle_status(req): """Handle the status request.""" log.info("Status check received.") return web.Response(status=200, text="QtWaylandBot is running.") async def run_web_server(): """Run the web server.""" app = web.Application() app.add_routes([web.get('/status', handle_status)]) # Assuming root path for Gerrit webhooks app.add_routes([web.post('/', handle)]) runner = web.AppRunner(app) await runner.setup() port = os.environ.get("QTWAYLANDBOT_PORT") or 8089 site = web.TCPSite(runner, '0.0.0.0', int(port)) try: await site.start() log.info("QtWaylandBot web server started on port %s", port) # Keep server running while True: await asyncio.sleep(3600) except Exception as e: log.error(f"Failed to start web server: {e}") finally: await runner.cleanup() def main(): loop = asyncio.new_event_loop() asyncio.set_event_loop(loop) try: log.info("Starting QtWaylandBot...") loop.run_until_complete(run_web_server()) except KeyboardInterrupt: log.info("QtWaylandBot shutting down...") except Exception as e: log.critical( f"QtWaylandBot failed to start or run: {e}\n{traceback.format_exc()}") finally: log.info("Closing event loop.") # Gracefully shutdown running tasks tasks = [t for t in asyncio.all_tasks( loop) if t is not asyncio.current_task(loop)] if tasks: log.info(f"Cancelling {len(tasks)} outstanding tasks...") for task in tasks: task.cancel() loop.run_until_complete(asyncio.gather( *tasks, return_exceptions=True)) log.info("Outstanding tasks cancelled.") loop.close() log.info("Event loop closed.") if __name__ == "__main__": main()