Created
February 17, 2026 14:10
-
-
Save exhuma/62f6a42a370ca08660763af3e108d55f to your computer and use it in GitHub Desktop.
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| #!/usr/bin/env python3 | |
| """ | |
| Disable or restore GitLab pipeline schedules in bulk. | |
| This script targets GitLab v4 REST APIs and uses only the Python standard | |
| library (urllib). It disables all *active* pipeline schedules across all | |
| projects owned by a group namespace (including subgroups). Any schedules | |
| successfully disabled are recorded into a JSON state file for later restore. | |
| Configuration: | |
| - GITLAB_URL: Base URL, e.g. https://gitlab.example.com | |
| - GITLAB_TOKEN: Personal access token (or other token) with API access | |
| Usage: | |
| - Disable schedules: disable-pipelines.py disable <namespace> | |
| - Restore schedules: disable-pipelines.py enable <state-file> | |
| """ | |
| from __future__ import annotations | |
| import argparse | |
| import json | |
| import os | |
| import sys | |
| import time | |
| import urllib.error | |
| import urllib.parse | |
| import urllib.request | |
| from dataclasses import dataclass | |
| from pathlib import Path | |
| from typing import Any | |
| class _NoRedirect(urllib.request.HTTPRedirectHandler): | |
| """Disable redirects for urllib requests.""" | |
| def redirect_request( | |
| self, | |
| req: urllib.request.Request, | |
| fp: Any, | |
| code: int, | |
| msg: str, | |
| headers: Any, | |
| newurl: str, | |
| ) -> None: | |
| raise urllib.error.HTTPError( | |
| req.full_url, | |
| code, | |
| f"Redirects disabled: {msg}", | |
| headers, | |
| fp, | |
| ) | |
| @dataclass(frozen=True) | |
| class _HttpResponse: | |
| """HTTP response container.""" | |
| status: int | |
| headers: dict[str, str] | |
| body: bytes | |
| class GitLabClient: | |
| """Minimal GitLab API client based on urllib.""" | |
| def __init__(self, base_url: str, token: str, timeout_s: float = 30.0): | |
| """Create a client.""" | |
| normalized = base_url.rstrip("/") | |
| if not normalized: | |
| raise ValueError("base_url is empty") | |
| self._base_url = normalized | |
| self._token = token | |
| self._timeout_s = timeout_s | |
| self._opener = urllib.request.build_opener(_NoRedirect()) | |
| def get_json( | |
| self, path: str, query: dict[str, str] | None = None | |
| ) -> tuple[Any, dict[str, str]]: | |
| """GET and parse JSON.""" | |
| response = self._request("GET", path, query=query) | |
| return self._parse_json(response), response.headers | |
| def put_form_json( | |
| self, path: str, form: dict[str, str] | |
| ) -> tuple[Any, dict[str, str]]: | |
| """PUT form-encoded data and parse JSON.""" | |
| body = urllib.parse.urlencode(form).encode("utf-8") | |
| headers = {"Content-Type": "application/x-www-form-urlencoded"} | |
| response = self._request("PUT", path, body=body, headers=headers) | |
| return self._parse_json(response), response.headers | |
| def _request( | |
| self, | |
| method: str, | |
| path: str, | |
| *, | |
| query: dict[str, str] | None = None, | |
| body: bytes | None = None, | |
| headers: dict[str, str] | None = None, | |
| retries: int = 3, | |
| ) -> _HttpResponse: | |
| """Perform a minimal HTTP request.""" | |
| url = self._build_url(path, query) | |
| request_headers: dict[str, str] = { | |
| "Accept": "application/json", | |
| "PRIVATE-TOKEN": self._token, | |
| } | |
| if headers: | |
| request_headers.update(headers) | |
| request = urllib.request.Request( | |
| url, | |
| data=body, | |
| headers=request_headers, | |
| method=method, | |
| ) | |
| last_error: Exception | None = None | |
| for attempt in range(retries + 1): | |
| try: | |
| with self._opener.open( | |
| request, timeout=self._timeout_s | |
| ) as resp: | |
| status = int(resp.getcode()) | |
| raw_headers = { | |
| k: v for (k, v) in resp.headers.items() if k and v | |
| } | |
| body_bytes = resp.read() if resp is not None else b"" | |
| if 200 <= status < 300: | |
| return _HttpResponse( | |
| status=status, headers=raw_headers, body=body_bytes | |
| ) | |
| raise RuntimeError( | |
| f"Unexpected HTTP status {status} for {method} {url}" | |
| ) | |
| except urllib.error.HTTPError as exc: | |
| status = int(getattr(exc, "code", 0) or 0) | |
| retry_after_s = _retry_after_seconds(exc.headers) | |
| is_retryable = status in {429, 500, 502, 503, 504} | |
| if attempt < retries and is_retryable: | |
| sleep_s = retry_after_s | |
| if sleep_s is None: | |
| sleep_s = 1.0 * (2**attempt) | |
| time.sleep(sleep_s) | |
| last_error = exc | |
| continue | |
| message = _http_error_message(exc) | |
| raise RuntimeError( | |
| f"HTTP {status} for {method} {url}: {message}" | |
| ) from exc | |
| except urllib.error.URLError as exc: | |
| if attempt < retries: | |
| time.sleep(1.0 * (2**attempt)) | |
| last_error = exc | |
| continue | |
| raise RuntimeError( | |
| f"Network error for {method} {url}: {exc}" | |
| ) from exc | |
| raise RuntimeError(f"Request failed: {last_error}") | |
| def _build_url(self, path: str, query: dict[str, str] | None) -> str: | |
| """Build absolute GitLab API URL.""" | |
| if not path.startswith("/"): | |
| raise ValueError("path must start with '/'") | |
| base = f"{self._base_url}/api/v4{path}" | |
| if not query: | |
| return base | |
| qs = urllib.parse.urlencode(query) | |
| return f"{base}?{qs}" | |
| def _parse_json(self, response: _HttpResponse) -> Any: | |
| """Parse JSON body, allowing empty responses.""" | |
| if not response.body: | |
| return None | |
| return json.loads(response.body.decode("utf-8")) | |
| def _retry_after_seconds(headers: Any) -> float | None: | |
| """Parse Retry-After header if present.""" | |
| if headers is None: | |
| return None | |
| value = headers.get("Retry-After") | |
| if not value: | |
| return None | |
| try: | |
| return float(value) | |
| except ValueError: | |
| return None | |
| def _http_error_message(exc: urllib.error.HTTPError) -> str: | |
| """Extract a useful error message from an HTTPError.""" | |
| try: | |
| raw = exc.read() | |
| except Exception: | |
| raw = b"" | |
| if not raw: | |
| return str(exc) | |
| try: | |
| decoded = raw.decode("utf-8") | |
| except UnicodeDecodeError: | |
| return raw[:200].decode("latin-1", errors="replace") | |
| try: | |
| parsed = json.loads(decoded) | |
| except json.JSONDecodeError: | |
| return decoded[:200] | |
| if isinstance(parsed, dict): | |
| message = parsed.get("message") | |
| if isinstance(message, str): | |
| return message | |
| if message is not None: | |
| return json.dumps(message, ensure_ascii=False)[:200] | |
| return decoded[:200] | |
| def _require_env(name: str) -> str: | |
| """Read a required environment variable.""" | |
| value = os.environ.get(name) | |
| if not value: | |
| raise RuntimeError(f"Missing environment variable: {name}") | |
| return value | |
| def _paginate( | |
| client: GitLabClient, | |
| path: str, | |
| query: dict[str, str], | |
| ) -> list[Any]: | |
| """Fetch all pages for a GitLab list endpoint.""" | |
| items: list[Any] = [] | |
| page = 1 | |
| while True: | |
| page_query = dict(query) | |
| page_query["page"] = str(page) | |
| payload, headers = client.get_json(path, query=page_query) | |
| if payload is None: | |
| break | |
| if not isinstance(payload, list): | |
| raise RuntimeError(f"Expected list response for {path}") | |
| items.extend(payload) | |
| next_page = headers.get("X-Next-Page") | |
| if not next_page: | |
| break | |
| page = int(next_page) | |
| return items | |
| def _encode_group_path(namespace: str) -> str: | |
| """Encode a group full path for use in /groups/:id APIs.""" | |
| return urllib.parse.quote(namespace, safe="") | |
| def disable_schedules( | |
| client: GitLabClient, | |
| namespace: str, | |
| state_file: str, | |
| dry_run: bool, | |
| ) -> int: | |
| """Disable active pipeline schedules and record changes.""" | |
| group_encoded = _encode_group_path(namespace) | |
| group, _ = client.get_json(f"/groups/{group_encoded}") | |
| if not isinstance(group, dict) or "id" not in group: | |
| raise RuntimeError("Failed to resolve namespace to a group") | |
| group_id = int(group["id"]) | |
| projects = _paginate( | |
| client, | |
| f"/groups/{group_id}/projects", | |
| { | |
| "include_subgroups": "true", | |
| "with_shared": "false", | |
| "archived": "false", | |
| "per_page": "100", | |
| }, | |
| ) | |
| changed: list[dict[str, Any]] = [] | |
| failures = 0 | |
| projects_scanned = 0 | |
| schedules_disabled = 0 | |
| for project in projects: | |
| if not isinstance(project, dict): | |
| continue | |
| project_id = project.get("id") | |
| project_path = project.get("path_with_namespace") | |
| if not isinstance(project_id, int): | |
| continue | |
| projects_scanned += 1 | |
| schedules = _paginate( | |
| client, | |
| f"/projects/{project_id}/pipeline_schedules", | |
| {"scope": "active", "per_page": "100"}, | |
| ) | |
| for schedule in schedules: | |
| if not isinstance(schedule, dict): | |
| continue | |
| schedule_id = schedule.get("id") | |
| active = schedule.get("active") | |
| if not isinstance(schedule_id, int): | |
| continue | |
| if active is False: | |
| continue | |
| description = schedule.get("description") | |
| ref = schedule.get("ref") | |
| if dry_run: | |
| print( | |
| "Would disable schedule" | |
| f" project_id={project_id}" | |
| f" schedule_id={schedule_id}" | |
| f" project={project_path}" | |
| ) | |
| continue | |
| try: | |
| client.put_form_json( | |
| f"/projects/{project_id}/pipeline_schedules/{schedule_id}", | |
| {"active": "false"}, | |
| ) | |
| except Exception as exc: | |
| failures += 1 | |
| print( | |
| "Failed to disable schedule" | |
| f" project_id={project_id}" | |
| f" schedule_id={schedule_id}: {exc}", | |
| file=sys.stderr, | |
| ) | |
| continue | |
| schedules_disabled += 1 | |
| changed.append( | |
| { | |
| "project_id": project_id, | |
| "schedule_id": schedule_id, | |
| "project": project_path, | |
| "description": description, | |
| "ref": ref, | |
| } | |
| ) | |
| if not dry_run: | |
| with Path(state_file).open("w", encoding="utf-8") as f: | |
| json.dump(changed, f, indent=2, sort_keys=True) | |
| f.write("\n") | |
| print( | |
| f"Projects scanned: {projects_scanned}; " | |
| f"Schedules disabled: {schedules_disabled}; " | |
| f"Failures: {failures}" | |
| ) | |
| if dry_run: | |
| print("Dry-run: no changes were made.") | |
| else: | |
| print(f"State file written: {state_file}") | |
| return 1 if failures else 0 | |
| def enable_schedules( | |
| client: GitLabClient, state_file: str, dry_run: bool | |
| ) -> int: | |
| """Re-enable schedules recorded in a state file.""" | |
| with Path(state_file).open(encoding="utf-8") as f: | |
| payload = json.load(f) | |
| if not isinstance(payload, list): | |
| raise RuntimeError("State file must contain a JSON list") | |
| failures = 0 | |
| enabled = 0 | |
| for entry in payload: | |
| if not isinstance(entry, dict): | |
| continue | |
| project_id = entry.get("project_id") | |
| schedule_id = entry.get("schedule_id") | |
| if not isinstance(project_id, int) or not isinstance(schedule_id, int): | |
| failures += 1 | |
| print( | |
| f"Invalid entry in state file: {entry}", | |
| file=sys.stderr, | |
| ) | |
| continue | |
| if dry_run: | |
| print( | |
| "Would enable schedule" | |
| f" project_id={project_id}" | |
| f" schedule_id={schedule_id}" | |
| ) | |
| continue | |
| try: | |
| client.put_form_json( | |
| f"/projects/{project_id}/pipeline_schedules/{schedule_id}", | |
| {"active": "true"}, | |
| ) | |
| except Exception as exc: | |
| failures += 1 | |
| print( | |
| "Failed to enable schedule" | |
| f" project_id={project_id}" | |
| f" schedule_id={schedule_id}: {exc}", | |
| file=sys.stderr, | |
| ) | |
| continue | |
| enabled += 1 | |
| print(f"Schedules enabled: {enabled}; Failures: {failures}") | |
| if dry_run: | |
| print("Dry-run: no changes were made.") | |
| return 1 if failures else 0 | |
| def _build_parser() -> argparse.ArgumentParser: | |
| """Build CLI argument parser.""" | |
| parser = argparse.ArgumentParser( | |
| prog="disable-pipelines.py", | |
| description=( | |
| "Disable or restore GitLab pipeline schedules across a namespace." | |
| ), | |
| ) | |
| subparsers = parser.add_subparsers(dest="command", required=True) | |
| disable_parser = subparsers.add_parser( | |
| "disable", | |
| help="Disable all active schedules in a namespace", | |
| ) | |
| disable_parser.add_argument( | |
| "namespace", | |
| help="GitLab group full path, e.g. my-org/platform/team", | |
| ) | |
| disable_parser.add_argument( | |
| "--state-file", | |
| default="disabled-schedules.json", | |
| help="Path to write the JSON state file", | |
| ) | |
| disable_parser.add_argument( | |
| "--dry-run", | |
| action="store_true", | |
| help="Print actions without applying changes", | |
| ) | |
| enable_parser = subparsers.add_parser( | |
| "enable", | |
| help="Re-enable schedules recorded in a state file", | |
| ) | |
| enable_parser.add_argument( | |
| "state_file", | |
| help="JSON file created by the disable command", | |
| ) | |
| enable_parser.add_argument( | |
| "--dry-run", | |
| action="store_true", | |
| help="Print actions without applying changes", | |
| ) | |
| return parser | |
| def main(argv: list[str]) -> int: | |
| """Program entry point.""" | |
| parser = _build_parser() | |
| args = parser.parse_args(argv) | |
| base_url = _require_env("GITLAB_URL") | |
| token = _require_env("GITLAB_TOKEN") | |
| client = GitLabClient(base_url=base_url, token=token) | |
| if args.command == "disable": | |
| return disable_schedules( | |
| client, | |
| namespace=str(args.namespace), | |
| state_file=str(args.state_file), | |
| dry_run=bool(args.dry_run), | |
| ) | |
| if args.command == "enable": | |
| return enable_schedules( | |
| client, | |
| state_file=str(args.state_file), | |
| dry_run=bool(args.dry_run), | |
| ) | |
| raise RuntimeError("Unreachable") | |
| if __name__ == "__main__": | |
| raise SystemExit(main(sys.argv[1:])) |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment