diff --git a/.envrc b/.envrc
new file mode 100644
index 00000000..e8627187
--- /dev/null
+++ b/.envrc
@@ -0,0 +1,8 @@
+# we have checks to account for first time setup
+if [ -f ".env" ]; then
+ dotenv
+fi
+if [ -f "$HOME/.config/nix/nix.conf" ] || [ -f "/etc/nix/nix.conf" ]; then
+ echo "Loading nix shell environment."
+ use flake
+fi
diff --git a/.gitignore b/.gitignore
index ff1a33c0..7940db74 100644
--- a/.gitignore
+++ b/.gitignore
@@ -147,7 +147,7 @@ media
python.log
# direnv
-.envrc
+.direnv
.local_env
.DS_Store
diff --git a/README.md b/README.md
index f20eb1c5..a00645d1 100644
--- a/README.md
+++ b/README.md
@@ -6,7 +6,7 @@ A Django based website that will power a new Boost website. See the [documentati
Links:
-- https://www.stage.boost.cppalliance.org/ - staging
+- https://stage.boost.org/ - staging
- https://www.boost.org/ - production
---
@@ -15,54 +15,66 @@ Links:
This project will use Python 3.13, Docker, and Docker Compose.
-Instructions to install those packages are included in [development_setup_notes.md](docs/development_setup_notes.md).
+There are two options for development setups, nix and native. The native setup doesn't require nix to be installed. The nix-based setup works in a similar way to a python venv, with everything encapsulated for the project. Note: it is not a vm. It handles installation of all of the application and development environment's dependencies automatically:
-**NOTE**: All of these various `docker compose` commands, along with other helpful
-developer utility commands, are codified in our `justfile` and can be ran with
-less typing.
+- The python version relevant to the project
+- awscli
+- gdk
+- just
+- opentofu
+- nodejs
+- yarn
+- asciidoctor
+- asciidoctor-boost gem
+- pre-commit
+- black
+- isort
+- pip-tools
-You will need to install `just`, by [following the documentation](https://just.systems/man/en/)
-**Environment Variables**: Copy file `env.template` to `.env` and adjust values to match your local environment. See [Environment Variables](docs/env_vars.md) for more information.
+1. Development System setup
+ 1. Give your ssh key to Sam so he can add it to the boost.cpp.al server.
+ 1. Basic Setup Options:
+ 1. [Nix-based](docs/development_setup_notes_nix.md)
+ 1. [Native](docs/development_setup_notes_native.md)
+ 1. [Initial Data Setup](docs/first_time_data_import.md)
+ 1. [Allauth Social Login Setup](docs/allauth_setup.md)
+1. System Concepts
+ 1. [GitHub Data Syncing](docs/syncing_data_with_github.md)
+ 1. [Rendered Content Caching](docs/caching_rendered_content.md)
+ 1. [Static Content from S3](docs/static_content.md)
+ 1. ['manage' Commands](docs/commands.md)
+ 1. [API](docs/api.md)
+ 1. [RSS Feeds](docs/rss_feeds.md)
+ 1. [Env Vars](docs/env_vars.md)
+ 1. [Calendar](docs/calendar.md)
+ 1. [News](docs/news.md)
+1. Instructional
+ 1. [Dependency Updates](docs/dependencies.md)
+ 1. [Release Reports](docs/release_reports.md)
+ 1. [User Management](docs/user_management.md)
+ 1. [Non-Dev Server Allauth Setup](docs/non-dev-server-allauth-setup.md)
+ 1. [Admin Features](docs/admin.md)
+ 1. [Mailing List Setup](docs/mailing_list.md)
-```shell
-$ cp env.template .env
-```
-
-**NOTE**: Double check that the exposed port assigned to the PostgreSQL
-container does not clash with a database or other server you have running
-locally.
-
-Then run:
+After going through the "Development System setup" steps above to create the Docker image, install dependencies, and start the services in `docker-compose.yml`, run:
```shell
# start our services (and build them if necessary)
$ docker compose up
# to create database migrations
-$ docker compose run --rm web python manage.py makemigrations
+$ just makemigrations
# to run database migrations
-$ docker compose run --rm web python manage.py migrate
+$ just migrate
# to create a superuser
-$ docker compose run --rm web python manage.py createsuperuser
+$ just manage createsuperuser
```
-This will create the Docker image, install dependencies, start the services
-defined in `docker-compose.yml`, and start the webserver.
-
styles.css is still missing in a local docker-compose environment. Steps to add it:
-```
-# One-time setup
-curl -o- https://raw.githubusercontent.com/nvm-sh/nvm/v0.40.0/install.sh | bash
-. ~/.bashrc
-nvm install 20
-nvm use 20
-npm install -g yarn
-```
-
```
# Each time - rebuild styles.css
yarn
@@ -90,13 +102,13 @@ $ docker compose down
To run the tests, execute:
```shell
-$ docker compose run --rm web pytest
+$ just test
```
or run:
```shell
-$ just test
+$ docker compose run --rm web pytest
```
## Yarn and Tailwind
@@ -125,23 +137,10 @@ currently generated by `docker compose build` and is included in the Docker imag
---
-## Generating Local Data
-
-To **add real Boost libraries and sync all the data from GitHub and S3**, set appropriate values in your new .env file according to [Environment Variables](docs/env_vars.md) for `GITHUB_TOKEN`, `STATIC_CONTENT_AWS_ACCESS_KEY_ID`, `STATIC_CONTENT_AWS_SECRET_ACCESS_KEY`, `STATIC_CONTENT_BUCKET_NAME`, `STATIC_CONTENT_REGION`, `STATIC_CONTENT_AWS_S3_ENDPOINT_URL` then run:
-
-```bash
-docker compose run --rm web python manage.py update_libraries --local
-```
-
-Those values can be gotten from another developer in the `#boost-website` Slack channel.
-
-The `--local` flag speeds up the command a lot by excluding the retrieval of data you generally don't need. For more information, see `update_libraries` in [Management Commands](docs/commands.md).
-
-Then as a superuser log into the admin interface, go to "Versions" and click on the "import new releases" button in the top right.
-
----
-
## Setting up Mailman/Hyperkitty locally
+
+Note: This is optional - not required for the website to function, here for reference in case a need arises.
+
``shell
sudo apt-get install sassc
git clone git@gitlab.com:mailman/hyperkitty.git
@@ -152,7 +151,7 @@ pip install psycopg2-binary
``
change settings.py to use postgres database:
'ENGINE': 'django.db.backends.postgresql_psycopg2',
-Update database values in settings to use the same host, user, password, and the database name as in the .env file value for `HYPERKITTY_DATABASE_NAME`.
+Update database values in settings to use the same host, user, password, and the database name as in the .env file value for `HYPERKITTY_DATABASE_NAME` (`lists_production_web` by default).
run `django-admin migrate --pythonpath example_project --settings settings`
@@ -172,7 +171,7 @@ psql -U postgres -W hyperkitty_db < /lists_stage_web.staging-db1-2.2025-02-06-08
To work with mailinglist data locally, the django application expects to be
able to query a copy of the hyperkitty database from HYPERKITTY_DATABASE_NAME.
-Then, the `sync_mailinglist_stats` management command can be run.
+Then, `just manage sync_mailinglist_stats` management command can be run.
## Deploying
@@ -186,7 +185,7 @@ TDB
## Pre-commit Hooks
-We use [pre-commit hooks](https://pre-commit.com/) to check code for style, syntax, and other issues. They help to maintain consistent code quality and style across the project, and prevent issues from being introduced into the codebase..
+We use [pre-commit hooks](https://pre-commit.com/) to check code for style, syntax, and other issues. They help to maintain consistent code quality and style across the project, and prevent issues from being introduced into the codebase.
| Pre-commit Hook | Description |
| --------------- | ----------- |
@@ -194,17 +193,6 @@ We use [pre-commit hooks](https://pre-commit.com/) to check code for style, synt
| [Ruff](https://github.com/charliermarsh/ruff) | Wrapper around `flake8` and `isort`, among other linters |
| [Djhtml](https://github.com/rtts/djhtml) | Auto-formats Django templates |
-### Setup and Usage
-
-| Description | Command |
-| ---- | ------- |
-| 1. Install the `pre-commit` package using `pip` | `pip install pre-commit` |
-| 2. Install our list of pre-commit hooks locally | `pre-commit install` |
-| 3. Run all hooks for changed files before commit | `pre-commit run` |
-| 4. Run specific hook before commit | `pre-commit run {hook}` |
-| 5. Run hooks for all files, even unchanged ones | `pre-commit run --all-files` |
-| 6. Commit without running pre-commit hooks | `git commit -m "Your commit message" --no-verify` |
-
Example commands for running specific hooks:
| Hook | Example |
diff --git a/docs/README.md b/docs/README.md
index 600ca72d..48485ef1 100644
--- a/docs/README.md
+++ b/docs/README.md
@@ -4,7 +4,7 @@
- [Boost Mailing List](./mailing_list.md) -Includes OAuth instructions
- [Caching and the `RenderedContent` model](./caching_rendered_content.md)
- [Dependency Management](./dependencies.md)
-- [Development Setup Notes](./development_setup_notes.md)
+- [Development Setup Notes](development_setup_notes_native.md)
- [Environment Variables](./env_vars.md)
- [Events Calendar](./calendar.md)
- [Example Files](./examples/README.md) - Contains samples of `libraries.json`. `.gitmodules`, and other files that Boost data depends on
diff --git a/docs/allauth_setup.md b/docs/allauth_setup.md
new file mode 100644
index 00000000..ca4e847d
--- /dev/null
+++ b/docs/allauth_setup.md
@@ -0,0 +1,59 @@
+### Social Login with django-allauth
+
+Follow these instructions to use the social logins through django-allauth on your local machine.
+
+See https://testdriven.io/blog/django-social-auth/ for more information.
+
+#### Github
+- Go to https://github.com/settings/applications/new and add a new OAuth application
+- Set `http://localhost:8000` as the Homepage URL
+- Set `http://localhost:8000/accounts/github/login/callback/` as the Callback URL
+- Click whether you want to enable the device flow
+ -
+- On completion copy the Client ID and Client Secret to the `.env` file as values of `GITHUB_OAUTH_CLIENT_ID` and `GITHUB_OAUTH_CLIENT_SECRET`.
+- Run `direnv allow` and restart your docker containers.
+
+Setup should be complete and you should be able to see an option to "Use Github" on the sign up page.
+
+To test the flow including authorizing Github for the Boost account, log into your GitHub account settings and click **Applications** in the left menu. Find the "Boost" authorization and delete it. The next time you log into Boost with this GitHub account, you will have to re-authorize it.
+
+
+
+This setup process is not something that can currently be automated through terraform because of a lack of relevant Github API endpoints to create Oauth credentials.
+
+#### Google
+
+More detailed instructions at:
+
+https://docs.allauth.org/en/latest/socialaccount/providers/google.html
+
+1. Update the `.env` file with values for:
+ 1. `TF_VAR_google_cloud_email` (the email address of your Google Cloud account)
+ 2. `TF_VAR_google_organization_domain` (usually the domain of your Google Cloud account, e.g. "boost.org" if you will be using a @boost.org email address)
+ 3. `TF_VAR_google_cloud_project_name` (optional, default: localboostdev) - needs to change if destroyed and a setup is needed within 30 days
+2. Run `just development-tofu-init` to initialize tofu.
+3. Run `just development-tofu-plan` to confirm the planned changes.
+4. Run `just development-tofu-apply` to apply the changes.
+5. Go to https://console.developers.google.com/
+ 1. Search for the newly created project, named "Boost Development" (ID: localboostdev by default).
+ 2. Type "credentials" in the search input at the top of the page.
+ 3. Select "Credentials" under "APIs & Services".
+ 1. Click "+ CREATE CREDENTIALS"
+ 2. Select "OAuth Client ID"
+ 3. Select Application Type: "Web application"
+ 4. Name: "Boost Development" (arbitrary)
+ 5. For "Authorized Javascript Origins" use:`http://localhost:8000`
+ 6. For "Authorized Redirect URIs" use:
+ * `http://localhost:8000/accounts/google/login/callback/`
+ * `http://localhost:8000/accounts/google/login/callback/?flowName=GeneralOAuthFlow`
+ 7. Save
+6. From the page that's displayed, update the `.env` file with values for the following:
+ - `GOOGLE_OAUTH_CLIENT_ID` should be similar to "k235bn2b1l1(...)asdsk.apps.googleusercontent.com"
+ - `GOOGLE_OAUTH_CLIENT_SECRET` should be similar to "LAJACO(...)KLAI612ANAD"
+
+Point 5 above can not be automated through terraform because of a lack of relevant Google Cloud API endpoints to create Oauth credentials.
+
+Setup should be complete and you should be able to see an option to "Use Google" on the sign up page.
+
+#### Additional Notes on allauth login flows:
+**Working locally**: If you need to run through the login flows multiple times, create a superuser so you can log into the admin. Then, log into the admin and delete your "Social Account" from the admin. This will test a fresh connection to GitHub for your logged-in GitHub user.
diff --git a/docs/development_setup_notes.md b/docs/development_setup_notes_native.md
similarity index 69%
rename from docs/development_setup_notes.md
rename to docs/development_setup_notes_native.md
index ecc9884c..e0fd4ad3 100644
--- a/docs/development_setup_notes.md
+++ b/docs/development_setup_notes_native.md
@@ -271,73 +271,25 @@ cd website-v2
cp env.template .env
```
-Edit the .env, adding AWS keys.
+Edit the .env, adding AWS keys, and adjust values to match your local environment. See [Environment Variables](docs/env_vars.md) for more information.
+
+**NOTE**: Double check that the exposed port assigned to the PostgreSQL
+container does not clash with a database or other server you have running
+locally.
+
+### Pre-commit Hooks Setup
+
+| Description | Command |
+| ---- | ------- |
+| 1. Install the `pre-commit` package using `pip` | `pip install pre-commit` |
+| 2. Install our list of pre-commit hooks locally | `pre-commit install` |
+| 3. Run all hooks for changed files before commit | `pre-commit run` |
+| 4. Run specific hook before commit | `pre-commit run {hook}` |
+| 5. Run hooks for all files, even unchanged ones | `pre-commit run --all-files` |
+| 6. Commit without running pre-commit hooks | `git commit -m "Your commit message" --no-verify` |
Continue to the instructions in the top-level README.md file.
-## Local Development
-
-### Social Login with django-allauth
-
-Follow these instructions to use the social logins through django-allauth on your local machine.
-
-See https://testdriven.io/blog/django-social-auth/ for more information.
-
-#### Github
-- Go to https://github.com/settings/applications/new and add a new OAuth application
-- Set `http://localhost:8000` as the Homepage URL
-- Set `http://localhost:8000/accounts/github/login/callback/` as the Callback URL
-- Click whether you want to enable the device flow
- -
-- On completion copy the Client ID and Client Secret to the `.env` file as values of `GITHUB_OAUTH_CLIENT_ID` and `GITHUB_OAUTH_CLIENT_SECRET`.
-- Run `direnv allow` and restart your docker containers.
-
-Setup should be complete and you should be able to see an option to "Use Github" on the sign up page.
-
-To test the flow including authorizing Github for the Boost account, log into your GitHub account settings and click **Applications** in the left menu. Find the "Boost" authorization and delete it. The next time you log into Boost with this GitHub account, you will have to re-authorize it.
-
-
-
-This setup process is not something that can currently be automated through terraform because of a lack of relevant Github API endpoints to create Oauth credentials.
-
-#### Google
-
-More detailed instructions at:
-
-https://docs.allauth.org/en/latest/socialaccount/providers/google.html
-
-1. Update the `.env` file with values for:
- 1. `TF_VAR_google_cloud_email` (the email address of your Google Cloud account)
- 2. `TF_VAR_google_organization_domain` (usually the domain of your Google Cloud account, e.g. "boost.org" if you will be using an @boost.org email address)
- 3. `TF_VAR_google_cloud_project_name` (optional, default: localboostdev) - needs to change if destroyed and a setup is needed within 30 days
-2. Run `just development-tofu-init` to initialize tofu.
-3. Run `just development-tofu-plan` to confirm the planned changes.
-4. Run `just development-tofu-apply` to apply the changes.
-5. Go to https://console.developers.google.com/
- 1. Search for the newly created project, named "Boost Development" (ID: localboostdev by default).
- 2. Type "credentials" in the search input at the top of the page.
- 3. Select "Credentials" under "APIs & Services".
- 1. Click "+ CREATE CREDENTIALS"
- 2. Select "OAuth Client ID"
- 3. Select Application Type: "Web application"
- 4. Name: "Boost Development" (arbitrary)
- 5. For "Authorized Javascript Origins" use:`http://localhost:8000`
- 6. For "Authorized Redirect URIs" use:
- * `http://localhost:8000/accounts/google/login/callback/`
- * `http://localhost:8000/accounts/google/login/callback/?flowName=GeneralOAuthFlow`
- 7. Save
-6. From the page that's displayed, update the `.env` file with values for the following:
- - `GOOGLE_OAUTH_CLIENT_ID` should be similar to "k235bn2b1l1(...)asdsk.apps.googleusercontent.com"
- - `GOOGLE_OAUTH_CLIENT_SECRET` should be similar to "LAJACO(...)KLAI612ANAD"
-7. Run `direnv allow` and restart your docker containers.
-
-Point 5 above can not be automated through terraform because of a lack of relevant Google Cloud API endpoints to create Oauth credentials.
-
-Setup should be complete and you should be able to see an option to "Use Google" on the sign up page.
-
-#### Additional Notes:
-**Working locally**: If you need to run through the login flows multiple times, create a superuser so you can log into the admin. Then, log into the admin and delete your "Social Account" from the admin. This will test a fresh connection to GitHub for your logged-in GitHub user.
-
### Debugging
For local development there is Django Debug Toolbar, and the option to set a debugger.
diff --git a/docs/development_setup_notes_nix.md b/docs/development_setup_notes_nix.md
new file mode 100644
index 00000000..1b5012fd
--- /dev/null
+++ b/docs/development_setup_notes_nix.md
@@ -0,0 +1,74 @@
+# Boost.org Website
+
+## Overview
+
+A Django based website that will power a new Boost website. See the [documentation](./docs/README.md) for more information about maintaining this project.
+
+Links:
+
+- https://www.stage.boost.cppalliance.org/ - staging
+- https://www.boost.org/ - production
+
+---
+
+## Local Development Setup
+
+This project uses Python 3.11, Docker, and Docker Compose.
+
+This document describes how to set up a development environment using Nix, which is a package manager that allows for reproducible builds and development environments, like a better encapsulated declarative cross-platform Homebrew.
+
+For a basic rundown on Nix, this video could be useful https://www.youtube.com/watch?v=yQwW8dkuHqw
+
+1. Install the following according to the instructions for your platform if not already installed:
+ 1. Direnv - https://direnv.net/docs/installation.html (don't install OS packaged version, must be >= 2.35.0) and then configure your shell to add the hook as per the direnv docs.
+ 2. Docker Engine
+ * Linux - https://docs.docker.com/engine/install/
+ * MacOS - https://orbstack.dev/ or https://github.com/abiosoft/colima ?
+ * Windows - ?
+ 3. Just - https://just.systems/man/en/packages.html
+ 4. Nix - https://nixos.org/download/ (multi-user, y to all options)
+2. Ask Sam to provide permissions on the database backup drive to an email address associated with Google that you provide.
+3. Clone this https://github.com/boostorg/website-v2.git repository to your machine.
+4. cd into the repository directory.
+ * This should trigger a process which has you to log in to your google account to allow permissions for the production database load later. Use the email address provided to Sam in point 2.
+5. In a terminal run `just bootstrap-nix` in the root of the checked out repository to install the necessary development dependencies and generate the .env file.
+6. Update the generated .env file with the necessary environment variables. Where you can't retrieve these yourself, you can ask someone for some in #boost-website on the slack server at https://ccplang.slack.com. The minimum that must be set is:
+ * GITHUB_TOKEN - a personal access token for the GitHub API, from your profile
+ * STATIC_CONTENT_AWS_ACCESS_KEY_ID - ask for this
+ * STATIC_CONTENT_AWS_SECRET_ACCESS_KEY - ask for this
+7. Run `just setup` to build services, and build the JS and CSS assets. If docker fails with permissions errors, reboot your machine.
+8. Run `just load_production_data` to download live data from the backup server for the website and mailman/hyperkitty. That script can be run regularly to update data from the live servers. You will lose all local data.
+9. Run `docker compose up` to start the server.
+
+
+## Debugging
+For local development there is Django Debug Toolbar, and the option to set a debugger.
+
+In your env:
+- Django Debug Toolbar, enabled by default, can be disabled by setting DEBUG_TOOLBAR=False
+- IDE Debugging, disabled by default, can be enabled by uncommenting `PYTHONBREAKPOINT` in your .env file.
+
+### Set Up Pycharm
+You can set up your IDE with a new "Python Debug Server" configuration as:
+
+
+
+### Debugger Usage
+To use the debugger add `breakpoint()` on a line in the code before you want to start debugging and then add breakpoints by clicking on the gutter. The debugger will stop at these point, you can then step/inspect the variables.
+
+## Troubleshooting
+
+### Docker
+Keep in mind if there are issues with docker that the host docker daemon on your machine and the docker daemon in the nix setup may not match. It's a good idea to keep both up to date.
+
+### Direnv
+when you switch to the directory you may see direnv exporting a bunch of environment variables as below.
+
+The installer configures direnv to suppress those but it's a recent configuration option, so may be worth checking for an update if you see them.
+
+## Disk space
+Should you find you're running short on disk space, to delete previous versioned store data you can run `nix-collect-garbage -d`. Reentering the directory will then reinstall all the current dependencies again. It's probably a good idea to run that periodically.
+
+```shell
+direnv: export +ALLOWED_HOSTS +AR +AS...
+```
diff --git a/docs/first_time_data_import.md b/docs/first_time_data_import.md
index f5cc8125..dff78883 100644
--- a/docs/first_time_data_import.md
+++ b/docs/first_time_data_import.md
@@ -1,10 +1,20 @@
# Populating the Database for the First Time
+- [Development Environments](#development-environments)
- [Deployed Environments](#deployed-environments)
- [Further Reading](#further-reading)
This document contains information about importing Boost Versions (also called Releases), Libraries, and the data associated with those objects. It is concerned with importing data in **deployed environments**, but at the bottom of the page there is a section on importing data for **local development**.
+## Development Environments
+
+Download the production data for the website and mailman/hyperkitty.
+```bash
+just load_production_data
+````
+
+That script can be run regularly to update data from the live servers. You will lose all local data.
+
## Deployed Environments
There are several steps to populating the database with historical Boost data, because we retrieve Boost data from multiple sources.
diff --git a/docs/non-dev-server-allauth-setup.md b/docs/non-dev-server-allauth-setup.md
index fbc6f1b6..0bc0abe2 100644
--- a/docs/non-dev-server-allauth-setup.md
+++ b/docs/non-dev-server-allauth-setup.md
@@ -1,6 +1,6 @@
# Production/Staging Server setup for allauth
-For development see [development_setup_notes.md](development_setup_notes.md).
+For development see [development_setup_notes_native.md](development_setup_notes_native.md).
For this setup adjustments will need to be made to the values as applicable for each server and service.
diff --git a/env.template b/env.template
index c2c0dbc6..1340d656 100644
--- a/env.template
+++ b/env.template
@@ -33,7 +33,7 @@ PROD_MEDIA_CONTENT_REGION=$STATIC_CONTENT_REGION
PROD_MEDIA_CONTENT_AWS_S3_ENDPOINT_URL=$STATIC_CONTENT_AWS_S3_ENDPOINT_URL
# Mailman database settings
-HYPERKITTY_DATABASE_NAME="hyperkitty_db"
+HYPERKITTY_DATABASE_NAME="lists_production_web"
DATABASE_URL="postgresql://postgres@db:5432/postgres"
DATABASE_TYPE="postgres"
DATABASE_CLASS="mailman.database.postgresql.PostgreSQLDatabase"
diff --git a/flake.lock b/flake.lock
new file mode 100644
index 00000000..e13c6442
--- /dev/null
+++ b/flake.lock
@@ -0,0 +1,61 @@
+{
+ "nodes": {
+ "flake-utils": {
+ "inputs": {
+ "systems": "systems"
+ },
+ "locked": {
+ "lastModified": 1731533236,
+ "narHash": "sha256-l0KFg5HjrsfsO/JpG+r7fRrqm12kzFHyUHqHCVpMMbI=",
+ "owner": "numtide",
+ "repo": "flake-utils",
+ "rev": "11707dc2f618dd54ca8739b309ec4fc024de578b",
+ "type": "github"
+ },
+ "original": {
+ "owner": "numtide",
+ "repo": "flake-utils",
+ "type": "github"
+ }
+ },
+ "nixpkgs": {
+ "locked": {
+ "lastModified": 1736755442,
+ "narHash": "sha256-a3MMEY7i/wdF0gb7WFNTn6onzaiMOvwj7OerRVenA8o=",
+ "owner": "NixOS",
+ "repo": "nixpkgs",
+ "rev": "ef56e777fedaa4da8c66a150081523c5de1e0171",
+ "type": "github"
+ },
+ "original": {
+ "owner": "NixOS",
+ "ref": "nixpkgs-unstable",
+ "repo": "nixpkgs",
+ "type": "github"
+ }
+ },
+ "root": {
+ "inputs": {
+ "flake-utils": "flake-utils",
+ "nixpkgs": "nixpkgs"
+ }
+ },
+ "systems": {
+ "locked": {
+ "lastModified": 1681028828,
+ "narHash": "sha256-Vy1rq5AaRuLzOxct8nz4T6wlgyUR7zLU309k9mBC768=",
+ "owner": "nix-systems",
+ "repo": "default",
+ "rev": "da67096a3b9bf56a91d16901293e51ba5b49a27e",
+ "type": "github"
+ },
+ "original": {
+ "owner": "nix-systems",
+ "repo": "default",
+ "type": "github"
+ }
+ }
+ },
+ "root": "root",
+ "version": 7
+}
diff --git a/flake.nix b/flake.nix
new file mode 100644
index 00000000..9ffc6744
--- /dev/null
+++ b/flake.nix
@@ -0,0 +1,96 @@
+{
+ description = "Boost.org development environment.";
+
+ inputs = {
+ nixpkgs.url = "github:NixOS/nixpkgs/nixpkgs-unstable";
+ flake-utils.url = "github:numtide/flake-utils";
+ };
+
+ outputs = { self, nixpkgs, flake-utils, ... }@inputs:
+ flake-utils.lib.eachDefaultSystem (system:
+ let
+ pkgs = import nixpkgs {
+ inherit system;
+ };
+ # https://nixos.wiki/wiki/Google_Cloud_SDK
+ gdk = pkgs.google-cloud-sdk.withExtraComponents( with pkgs.google-cloud-sdk.components; [
+ gke-gcloud-auth-plugin
+ ]);
+ # Install a Ruby gem from rubygems.org
+ asciidoctorBoostGem = pkgs.stdenv.mkDerivation rec {
+ pname = "asciidoctor-boost";
+ version = "0.1.7";
+ sha = "ce139448812a9848219ce4cdb521c83c16009406a9d16efbc90bb24e94a46c24";
+
+ src = pkgs.fetchurl {
+ url = "https://rubygems.org/downloads/${pname}-${version}.gem";
+ sha256 = "${sha}";
+ };
+ dontUnpack = true;
+ nativeBuildInputs = [ pkgs.ruby ];
+ buildPhase = "true"; # Nothing to compile.
+ installPhase = ''
+ # Create a temporary gem directory
+ mkdir -p $out
+ # Set GEM_HOME to install gems locally under $out.
+ export GEM_HOME=$out
+ # Install the gem into GEM_HOME.
+ ${pkgs.ruby}/bin/gem install ${src} --no-document --ignore-dependencies
+ '';
+ meta = {
+ description = "Asciidoctor Boost Ruby Gem installed from rubygems.org";
+ homepage = "https://rubygems.org/gems/asciidoctor-boost";
+ license = "BSL-1.0";
+ };
+ };
+
+ in {
+ devShells.default = pkgs.mkShell {
+ buildInputs = with pkgs; [
+ # general system
+ # e.g. this could contain docker client if we wanted that to be consistent,
+ # though we need the daemon on the host anyway so it's redundant
+ # general project
+ awscli
+ gdk
+ just
+ opentofu
+ # frontend
+ nodejs_22 # matches Dockerfile, due for upgrade?
+ yarn
+ # backend
+ asciidoctor
+ asciidoctorBoostGem
+ pre-commit
+ python313 # matches Dockerfile, due for upgrade?
+ python313.pkgs.black
+ python313.pkgs.isort
+ python313.pkgs.pip-tools
+ ];
+ # Host system installation workflow goes into the bootstrap justfile target.
+ # Project specific installation and execution workflow should go here.
+ shellHook = ''
+ if [ ! -f .git/hooks/pre-commit ]; then
+ pre-commit install
+ fi
+ if [ ! -d .venv ]; then
+ python3.13 -m venv .venv
+ . .venv/bin/activate
+ pip install -r requirements.txt -r requirements-dev.txt
+ else
+ . .venv/bin/activate
+ fi
+ if [ ! -f .env ]; then
+ cp env.template .env
+ echo ".env created, you should update its contents"
+ fi
+ # google cloud login
+ gcloud auth list --format="value(account)" | grep -q . || {
+ echo "Not logged in. Running gcloud auth login..."
+ gcloud auth login
+ }
+ '';
+ };
+ }
+ );
+}
diff --git a/justfile b/justfile
index 1c1b8c1e..a351e075 100644
--- a/justfile
+++ b/justfile
@@ -12,14 +12,44 @@ DJANGO_VERSION := "5.2"
# - https://github.blog/2015-06-30-scripts-to-rule-them-all/
# ----
-@bootstrap: ## installs/updates all dependencies
- #!/usr/bin/env bash
- set -euo pipefail
- if [ ! -f "{{ENV_FILE}}" ]; then
- echo "{{ENV_FILE}} created"
- cp env.template {{ENV_FILE}}
+@bootstrap-nix: ## installs/updates all dependencies
+ command -v direnv >/dev/null 2>&1 || { echo >&2 "Direnv is required but not installed. see: https://direnv.net/docs/installation.html - Aborting."; exit 1; }
+ command -v nix >/dev/null 2>&1 || { echo >&2 "Nix is required but not installed. see: https://nixos.org/download.html - Aborting."; exit 1; }
+ command -v just >/dev/null 2>&1 || { echo >&2 "Just is required but not installed. see: https://just.systems/man/en/packages.html - Aborting."; exit 1; }
+ command -v docker >/dev/null 2>&1 || { echo >&2 "Docker is required but not installed. see: docs for links - Aborting."; exit 1; }
+
+ shell_name=$(basename "$SHELL") && \
+ echo $shell_name && \
+ if [ "$shell_name" = "zsh" ] && command -v zsh >/dev/null; then \
+ zsh -i -c 'echo ${precmd_functions} | grep -q _direnv_hook' || { echo "❌ direnv hook is NOT installed in Zsh"; exit 1; }; \
+ elif ([ "$shell_name" = "pwsh" ] || [ "$shell_name" = "powershell" ]) && command -v "$shell_name" >/dev/null; then \
+ "$shell_name" -NoProfile -Command '$function:prompt.ToString() | grep -q direnv' || { echo "❌ direnv hook is NOT installed in PowerShell"; exit 1; }; \
+ else \
+ echo "ℹ️ Unsupported shell for checking direnv hook: $shell_name. Ensure you have the direnv shell hook eval set up correctly if there are problems."; \
fi
- docker compose --file {{COMPOSE_FILE}} build --force-rm
+
+ if [ ! -d $HOME/.config/direnv/direnv.toml ]; then \
+ mkdir -p $HOME/.config/direnv; \
+ printf "[global]\nhide_env_diff = true\nload_dotenv = true\n" > $HOME/.config/direnv/direnv.toml; \
+ fi
+ if [ ! -d $HOME/.config/nix ]; then \
+ mkdir -p $HOME/.config/nix; \
+ printf "experimental-features = nix-command flakes\n" > $HOME/.config/nix/nix.conf; \
+ fi
+ # check if the docker group exists, create if not
+ if [ ! $(getent group docker) ]; then \
+ echo "ℹ️ Adding docker group..."; \
+ sudo groupadd docker; \
+ fi
+
+ # check if user is in docker group, add if not
+ if [ $(id -Gn | grep -c docker) -eq 0 ]; then \
+ echo "ℹ️ Adding docker group"; \
+ sudo usermod -aG docker $USER; \
+ echo "ℹ️ Added docker user. Please close the shell and open a new one."; \
+ fi
+ echo "Bootstrapping complete, update your .env and run 'just setup'"
+ echo "If you have issues with docker permissions running just setup try restarting your machine."
@rebuild: ## rebuilds containers
docker compose kill
@@ -32,7 +62,7 @@ DJANGO_VERSION := "5.2"
@build: ## builds containers
docker compose pull
- DOCKER_BUILDKIT=1 docker compose build
+ docker compose build
@cibuild: ## invoked by continuous integration servers to run tests
python -m pytest
@@ -50,6 +80,8 @@ alias shell := console
@setup: ## sets up a project to be used for the first time
docker compose --file {{COMPOSE_FILE}} build --force-rm
docker compose --file docker-compose.yml run --rm web python manage.py migrate --noinput
+ npm install
+ npm run build
@test_pytest *args: ## runs pytest (optional: test file/pattern, -v for verbose, -vv for very verbose)
-docker compose run --rm -e DEBUG_TOOLBAR="False" web pytest -s --create-db {{ args }}
diff --git a/requirements.txt b/requirements.txt
index aad1a1d2..e465a2dd 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -42,17 +42,17 @@ beautifulsoup4==4.14.2
# via
# -r ./requirements.in
# wagtail
-billiard==4.2.2
+billiard==4.2.4
# via celery
black==25.11.0
# via -r ./requirements.in
bleach==4.1.0
# via wagtail-markdown
-boto3==1.40.74
+boto3==1.42.0
# via
# -r ./requirements.in
# django-bakery
-botocore==1.40.74
+botocore==1.41.6
# via
# boto3
# s3transfer
@@ -98,7 +98,7 @@ colorama==0.4.6
# via interrogate
contourpy==1.3.3
# via matplotlib
-coverage[toml]==7.11.3
+coverage[toml]==7.12.0
# via pytest-cov
cryptography==46.0.3
# via
@@ -109,10 +109,10 @@ cycler==0.12.1
# via matplotlib
decorator==5.2.1
# via ipython
-diff-match-patch==20241021
- # via django-import-export
defusedxml==0.7.1
# via willow
+diff-match-patch==20241021
+ # via django-import-export
distlib==0.4.0
# via virtualenv
distro==1.9.0
@@ -152,7 +152,7 @@ django==5.2.8
# wagtail
django-admin-env-notice==1.0.1
# via -r ./requirements.in
-django-allauth[socialaccount]==65.13.0
+django-allauth[socialaccount]==65.13.1
# via -r ./requirements.in
django-anymail[mailgun]==13.1
# via -r ./requirements.in
@@ -180,7 +180,7 @@ django-health-check==3.20.0
# via -r ./requirements.in
django-imagekit==6.0.0
# via -r ./requirements.in
-django-import-export==4.3.13
+django-import-export==4.3.14
# via -r ./requirements.in
django-js-asset==3.1.2
# via django-mptt
@@ -228,7 +228,7 @@ et-xmlfile==2.0.0
# via openpyxl
executing==2.2.1
# via stack-data
-faker==38.0.0
+faker==38.2.0
# via -r ./requirements.in
fastcore==1.8.16
# via ghapi
@@ -334,7 +334,7 @@ oauthlib==3.3.1
# via
# django-allauth
# django-oauth-toolkit
-openai==2.8.0
+openai==2.8.1
# via -r ./requirements.in
openpyxl==3.1.5
# via wagtail
@@ -401,7 +401,7 @@ pycparser==2.23
# via cffi
pycryptodome==3.23.0
# via minio
-pydantic==2.12.4
+pydantic==2.12.5
# via
# algoliasearch
# openai
@@ -464,7 +464,7 @@ requests==2.32.5
# wagtail
responses==0.25.8
# via -r ./requirements.in
-s3transfer==0.14.0
+s3transfer==0.16.0
# via boto3
setuptools==80.9.0
# via fs
@@ -474,7 +474,7 @@ six==1.17.0
# django-bakery
# fs
# python-dateutil
-slack-sdk==3.38.0
+slack-sdk==3.39.0
# via -r ./requirements.in
sniffio==1.3.1
# via
@@ -532,7 +532,7 @@ urllib3==2.5.0
# minio
# requests
# responses
-uv==0.9.9
+uv==0.9.14
# via -r ./requirements.in
vine==5.1.0
# via
@@ -541,7 +541,7 @@ vine==5.1.0
# kombu
virtualenv==20.35.4
# via pre-commit
-wagtail==7.2
+wagtail==7.2.1
# via
# -r ./requirements.in
# wagtail-markdown
@@ -563,5 +563,5 @@ yarl==1.22.0
# via aiohttp
zope-event==6.1
# via gevent
-zope-interface==8.1
+zope-interface==8.1.1
# via gevent
diff --git a/scripts/load_production_data.sh b/scripts/load_production_data.sh
index d9a4d725..1a02fdfc 100755
--- a/scripts/load_production_data.sh
+++ b/scripts/load_production_data.sh
@@ -11,7 +11,7 @@ set -eu
#
# READ IN COMMAND-LINE OPTIONS
-TEMP=$(getopt -o h:: --long help::,lists::,only-lists:: -- "$@")
+TEMP=$(getopt -o h:: --long help::,no-web::,no-lists:: -- "$@")
eval set -- "$TEMP"
# extract options and their arguments into variables.
@@ -19,14 +19,14 @@ while true ; do
case "$1" in
-h|--help)
helpmessage="""
-usage: load_production_data.sh [-h] [--lists] [--only-lists]
+usage: load_production_data.sh [-h] [--no-web] [--no-lists]
-Load production data. By default this will import the main website database.
+Load production data. By default this will import both the main website database and mailing list databases.
optional arguments:
-h, --help Show this help message and exit
- --lists Import mailing list dbs also.
- --only-lists Import mailing list database and not the main web database.
+ --no-web Skip importing the main website database.
+ --no-lists Skip importing mailing list databases.
"""
echo ""
@@ -34,10 +34,10 @@ optional arguments:
echo ""
exit 0
;;
- --lists)
- lists_option="yes" ; shift 2 ;;
- --only-lists)
- lists_option="yes" ; skip_web_option="yes" ; shift 2 ;;
+ --no-web)
+ skip_web_option="yes" ; shift 2 ;;
+ --no-lists)
+ skip_lists_option="yes" ; shift 2 ;;
--) shift ; break ;;
*) echo "Internal error!" ; exit 1 ;;
esac
@@ -49,11 +49,12 @@ source .env
download_media_file() {
# download all files from the PROD_MEDIA_CONTENT bucket and copy to Docker container
- [ -z "$PROD_MEDIA_CONTENT_AWS_ACCESS_KEY_ID" ] && {
+ # todo: remove the changeme check and remove 'changeme' as the default, use nothing instead
+ [[ -z "$PROD_MEDIA_CONTENT_AWS_ACCESS_KEY_ID" || "$PROD_MEDIA_CONTENT_AWS_ACCESS_KEY_ID" == "changeme" ]] && {
echo "Error: PROD_MEDIA_CONTENT_AWS_ACCESS_KEY_ID not set in .env";
return 1;
}
- [ -z "$PROD_MEDIA_CONTENT_AWS_SECRET_ACCESS_KEY" ] && {
+ [[ -z "$PROD_MEDIA_CONTENT_AWS_SECRET_ACCESS_KEY" || "$PROD_MEDIA_CONTENT_AWS_SECRET_ACCESS_KEY" = "changeme" ]] && {
echo "Error: PROD_MEDIA_CONTENT_AWS_SECRET_ACCESS_KEY not set in .env";
return 1;
}
@@ -187,7 +188,7 @@ download_latest_db_dump() {
echo "Deleting all rows from socialaccount_socialapp table and setting fake passwords..."
docker compose exec web bash -c "./manage.py shell -c 'from allauth.socialaccount.models import SocialApp; SocialApp.objects.all().delete()'"
just manage "set_fake_passwords --password=test"
- docker compose exec web bash -c "DJANGO_SUPERUSER_USERNAME=superadmin DJANGO_SUPERUSER_EMAIL=superadmin@boost.org DJANGO_SUPERUSER_PASSWORD=foobarone ./manage.py createsuperuser --noinput" || true
+ echo 'from django.contrib.auth import get_user_model; u=get_user_model().objects.get(email="superadmin@boost.org"); u.set_password("foobarone"); u.save()' | docker compose exec -T web python manage.py shell
echo "Database restored successfully from $DUMP_FILENAME"
return 0
@@ -200,7 +201,7 @@ if [ "${skip_web_option:-}" != "yes" ]; then
}
fi
-if [ "${lists_option:-}" = "yes" ]; then
+if [ "${skip_lists_option:-}" != "yes" ]; then
download_latest_db_dump lists_web_db || {
echo "Failed to download and restore latest lists_web_db dump";
exit 1;