Improved error processing (#1590)

This commit is contained in:
daveoconnor
2025-01-10 13:19:11 -08:00
committed by GitHub
parent b8c04c42df
commit e2a2caebce
8 changed files with 58 additions and 22 deletions

View File

@@ -1,3 +1,5 @@
import os
import pytest import pytest
import tempfile import tempfile
from PIL import Image from PIL import Image
@@ -41,3 +43,16 @@ def pytest_collection_modifyitems(config, items):
for item in items: for item in items:
if "asciidoctor" in item.keywords: if "asciidoctor" in item.keywords:
item.add_marker(skip_asciidoctor) item.add_marker(skip_asciidoctor)
@pytest.fixture(scope="session", autouse=True)
def ensure_github_token_env_variable():
# I wanted to use pytest_env but skip_if_set=true only applies if the env var
# is not set at all, not if the env var is empty, so this is needed anyway.
VAR_NAME = "GITHUB_TOKEN" # Replace with your actual variable name
VAR_DEFAULT_VALUE = "top-secret"
current_value = os.getenv(VAR_NAME)
if not current_value:
os.environ[VAR_NAME] = VAR_DEFAULT_VALUE
print(f"Env variable '{VAR_NAME}' not set. Forced to {VAR_DEFAULT_VALUE=}.")

View File

@@ -160,6 +160,15 @@ def get_s3_client():
) )
def does_s3_key_exist(client, bucket_name, s3_key):
try:
client.head_object(Bucket=bucket_name, Key=s3_key.lstrip("/"))
return True
except ClientError:
logger.debug(f"{s3_key} does not exist in {bucket_name}")
return False
def get_s3_keys(content_path, config_filename=None): def get_s3_keys(content_path, config_filename=None):
""" """
Get the S3 key for a given content path Get the S3 key for a given content path

View File

@@ -62,6 +62,8 @@ class GithubAPIClient:
"cmake", "cmake",
"more", "more",
] ]
if not self.token:
raise ValueError("No GitHub token provided or set in environment.")
def initialize_api(self) -> GhApi: def initialize_api(self) -> GhApi:
""" """
@@ -311,9 +313,7 @@ class GithubAPIClient:
# This usually happens because the library does not have a `meta/libraries.json` # This usually happens because the library does not have a `meta/libraries.json`
# in the requested tag. More likely to happen with older versions of libraries. # in the requested tag. More likely to happen with older versions of libraries.
except requests.exceptions.HTTPError: except requests.exceptions.HTTPError:
self.logger.exception( self.logger.warning(f"get_library_metadata_failed {repo_slug=}, {url=}")
"get_library_metadata_failed", repo=repo_slug, url=url
)
return None return None
else: else:
return response.json() return response.json()
@@ -357,7 +357,14 @@ class GithubAPIClient:
repo_slug = self.repo_slug repo_slug = self.repo_slug
if not ref: if not ref:
ref = self.ref ref = self.ref
return self.api.git.get_ref(owner=self.owner, repo=repo_slug, ref=ref) try:
ref_response = self.api.git.get_ref(
owner=self.owner, repo=repo_slug, ref=ref
)
except OSError as e:
logger.warning("get_ref_failed", repo=repo_slug, ref=ref, exc_msg=str(e))
raise ValueError(f"Could not get ref for {repo_slug} and {ref}")
return ref_response
def get_repo(self, repo_slug: str = None) -> dict: def get_repo(self, repo_slug: str = None) -> dict:
""" """

View File

@@ -14,7 +14,7 @@ DJANGO_DEBUG=1
# Don't use this secret key in production obviously # Don't use this secret key in production obviously
SECRET_KEY="top-secret" SECRET_KEY="top-secret"
GITHUB_TOKEN="top-secret" GITHUB_TOKEN=
# AWS_ACCESS_KEY_ID="changeme" # AWS_ACCESS_KEY_ID="changeme"
# AWS_SECRET_ACCESS_KEY="changeme" # AWS_SECRET_ACCESS_KEY="changeme"

View File

@@ -126,3 +126,6 @@ alias shell := console
@pip-compile-upgrade: ## Upgrade existing Python dependencies to their latest versions @pip-compile-upgrade: ## Upgrade existing Python dependencies to their latest versions
just pip-compile --upgrade just pip-compile --upgrade
@manage args:
docker compose run --rm web python manage.py {{ args }}

View File

@@ -74,9 +74,8 @@ def get_and_store_library_version_documentation_urls_for_version(version_pk):
library_version.save() library_version.save()
except LibraryVersion.DoesNotExist: except LibraryVersion.DoesNotExist:
logger.info( logger.info(
"get_library_version_documentation_urls_version_does_not_exist", f"get_library_version_documentation_urls_version_does_not_exist"
library_name=library_name, f"{library_name=} {version.slug=}",
version_slug=version.slug,
) )
continue continue
except LibraryVersion.MultipleObjectsReturned: except LibraryVersion.MultipleObjectsReturned:

View File

@@ -9,7 +9,7 @@ from jsoncomment import JsonComment
from django.conf import settings from django.conf import settings
from core.asciidoc import convert_adoc_to_html from core.asciidoc import convert_adoc_to_html
from core.boostrenderer import get_file_data, get_s3_client from core.boostrenderer import get_file_data, get_s3_client, does_s3_key_exist
from core.htmlhelper import modernize_release_notes from core.htmlhelper import modernize_release_notes
from core.models import RenderedContent from core.models import RenderedContent
@@ -188,15 +188,16 @@ def get_release_notes_for_version_s3(version_pk):
bucket_name = settings.STATIC_CONTENT_BUCKET_NAME bucket_name = settings.STATIC_CONTENT_BUCKET_NAME
primary_key = f"release-notes/master/{filename}.adoc" primary_key = f"release-notes/master/{filename}.adoc"
response = get_file_data(s3_client, bucket_name, primary_key) fallback_key = f"release-notes/master/{filename.rsplit('_', 1)[0] + '_x'}.adoc"
if not response:
# Some beta release notes end in _x.html instead of _0.html; try that. response = None
fallback_filename = filename.rsplit("_", 1)[0] + "_x" if does_s3_key_exist(s3_client, bucket_name, primary_key):
fallback_key = f"release-notes/master/{fallback_filename}.adoc" response = get_file_data(s3_client, bucket_name, primary_key)
elif does_s3_key_exist(s3_client, bucket_name, fallback_key):
response = get_file_data(s3_client, bucket_name, fallback_key) response = get_file_data(s3_client, bucket_name, fallback_key)
if response: else:
return response["content"].decode() logger.info(f"no release notes found for {filename=}")
return "" return response["content"].decode() if response else ""
def get_release_notes_for_version_github(version_pk): def get_release_notes_for_version_github(version_pk):

View File

@@ -75,7 +75,7 @@ def import_versions(
def import_release_notes(): def import_release_notes():
"""Imports release notes from the existing rendered """Imports release notes from the existing rendered
release notes in the repository.""" release notes in the repository."""
for version in Version.objects.active(): for version in Version.objects.exclude(name__in=["master", "develop"]).active():
store_release_notes_task.delay(str(version.pk)) store_release_notes_task.delay(str(version.pk))
store_release_notes_in_progress_task.delay() store_release_notes_in_progress_task.delay()
@@ -262,10 +262,12 @@ def import_library_versions(version_name, token=None, version_type="tag"):
# Get the gitmodules file for the version, which contains library data # Get the gitmodules file for the version, which contains library data
# The master and develop branches are not tags, so we retrieve their data # The master and develop branches are not tags, so we retrieve their data
# from the heads/ namespace instead of tags/ # from the heads/ namespace instead of tags/
if version_type == "tag": ref_s = f"tags/{version_name}" if version_type == "tag" else f"heads/{version_name}"
ref = client.get_ref(ref=f"tags/{version_name}") try:
else: ref = client.get_ref(ref=ref_s)
ref = client.get_ref(ref=f"heads/{version_name}") except ValueError:
logger.info(f"import_library_versions_invalid_ref {ref_s=}")
return
raw_gitmodules = client.get_gitmodules(ref=ref) raw_gitmodules = client.get_gitmodules(ref=ref)
if not raw_gitmodules: if not raw_gitmodules: