diff --git a/.github/ISSUE_TEMPLATE/config.yml b/.github/ISSUE_TEMPLATE/config.yml
index 3ba13e0cec..0b2ccf668e 100644
--- a/.github/ISSUE_TEMPLATE/config.yml
+++ b/.github/ISSUE_TEMPLATE/config.yml
@@ -1 +1,8 @@
blank_issues_enabled: false
+contact_links:
+ - name: Studio GitHub Discussions
+ url: https://github.com/learningequality/studio/discussions
+ about: Please ask general questions about contributing to Studio or report development server issues here.
+ - name: Learning Equality Community Forum
+ url: https://community.learningequality.org/
+ about: Ask and answer questions about Learning Equality's products and tools, share your experiences using Kolibri, and connect with users around the world.
diff --git a/.github/dependabot.yml b/.github/dependabot.yml
index 663fdde54c..24349f8d83 100644
--- a/.github/dependabot.yml
+++ b/.github/dependabot.yml
@@ -7,19 +7,30 @@ updates:
- package-ecosystem: "pip"
directory: "/"
schedule:
- interval: "daily"
+ interval: "weekly"
+ day: "wednesday"
time: "00:00"
# Maintain dependencies for Javascript
- package-ecosystem: "npm"
directory: "/"
schedule:
- interval: "daily"
+ interval: "weekly"
+ day: "wednesday"
time: "00:00"
+ groups:
+ babel:
+ patterns:
+ - "@babel/*"
# Maintain dependencies for Github Actions
- package-ecosystem: "github-actions"
directory: "/"
schedule:
- interval: "daily"
+ interval: "weekly"
+ day: "wednesday"
time: "00:00"
+ groups:
+ github:
+ patterns:
+ - "actions/*"
diff --git a/.github/workflows/containerbuild.yml b/.github/workflows/containerbuild.yml
new file mode 100644
index 0000000000..361b0fad36
--- /dev/null
+++ b/.github/workflows/containerbuild.yml
@@ -0,0 +1,105 @@
+name: Container Build
+
+on:
+ push:
+ branches:
+ - unstable
+ - hotfixes
+ - master
+ tags:
+ - 'v*'
+ pull_request:
+
+jobs:
+ pre_postgres:
+ name: Path match check - postgres
+ runs-on: ubuntu-latest
+ # Map a step output to a job output
+ outputs:
+ should_skip: ${{ steps.skip_check.outputs.should_skip }}
+ steps:
+ - id: skip_check
+ uses: fkirc/skip-duplicate-actions@master
+ with:
+ skip_after_successful_duplicate: false
+ github_token: ${{ github.token }}
+ paths: '["docker/Dockerfile.postgres.dev", ".github/workflows/containerbuild.yml"]'
+
+ build_and_push_postgres:
+ name: Postgres - build and push Docker image to GitHub Container Registry
+ needs: pre_postgres
+ if: ${{ needs.pre_postgres.outputs.should_skip != 'true' }}
+ runs-on: ubuntu-latest
+ steps:
+ - name: Checkout codebase
+ uses: actions/checkout@v4
+
+ - name: Set up QEMU
+ uses: docker/setup-qemu-action@v3
+
+ - name: Set up Docker Buildx
+ uses: docker/setup-buildx-action@v3
+
+ - name: Log in to Docker Hub
+ if: github.event_name != 'pull_request'
+ uses: docker/login-action@v3
+ with:
+ registry: ghcr.io
+ username: ${{ github.actor }}
+ password: ${{ secrets.GITHUB_TOKEN }}
+
+ - name: Extract metadata (tags, labels) for Docker
+ id: meta
+ uses: docker/metadata-action@v5
+ with:
+ images: ghcr.io/learningequality/postgres
+ env:
+ DOCKER_METADATA_ANNOTATIONS_LEVELS: manifest,index
+
+ - name: Build and push Docker image
+ uses: docker/build-push-action@v5
+ with:
+ context: ./docker
+ file: ./docker/Dockerfile.postgres.dev
+ platforms: linux/amd64,linux/arm64
+ push: ${{ github.event_name != 'pull_request' }}
+ tags: ${{ steps.meta.outputs.tags }}
+ labels: ${{ steps.meta.outputs.labels }}
+ annotations: ${{ steps.meta.outputs.annotations }}
+
+ pre_nginx:
+ name: Path match check - nginx
+ runs-on: ubuntu-latest
+ # Map a step output to a job output
+ outputs:
+ should_skip: ${{ steps.skip_check.outputs.should_skip }}
+ steps:
+ - id: skip_check
+ uses: fkirc/skip-duplicate-actions@master
+ with:
+ skip_after_successful_duplicate: false
+ github_token: ${{ github.token }}
+ paths: '["k8s/images/nginx/*", ".github/workflows/containerbuild.yml"]'
+
+ build_nginx:
+ name: nginx - test build of nginx Docker image
+ needs: pre_nginx
+ if: ${{ needs.pre_nginx.outputs.should_skip != 'true' }}
+ runs-on: ubuntu-latest
+ steps:
+ - name: Checkout codebase
+ uses: actions/checkout@v4
+
+ - name: Set up QEMU
+ uses: docker/setup-qemu-action@v3
+
+ - name: Set up Docker Buildx
+ uses: docker/setup-buildx-action@v3
+
+ - name: Build Docker image
+ uses: docker/build-push-action@v5
+ with:
+ context: ./
+ file: ./k8s/images/nginx/Dockerfile
+ platforms: linux/amd64
+ push: false
diff --git a/.github/workflows/deploytest.yml b/.github/workflows/deploytest.yml
index 8935b4f4c7..71b3b9296c 100644
--- a/.github/workflows/deploytest.yml
+++ b/.github/workflows/deploytest.yml
@@ -29,11 +29,11 @@ jobs:
steps:
- uses: actions/checkout@v4
- name: Use Node.js
- uses: actions/setup-node@v3
+ uses: actions/setup-node@v4
with:
node-version: '16.x'
- name: Cache Node.js modules
- uses: actions/cache@v3
+ uses: actions/cache@v4
with:
path: '**/node_modules'
key: ${{ runner.OS }}-node-${{ hashFiles('**/yarn.lock') }}
@@ -52,12 +52,12 @@ jobs:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- - name: Set up Python 3.9
- uses: actions/setup-python@v4
+ - name: Set up Python 3.10
+ uses: actions/setup-python@v5
with:
- python-version: 3.9
+ python-version: '3.10'
- name: pip cache
- uses: actions/cache@v3
+ uses: actions/cache@v4
with:
path: ~/.cache/pip
key: ${{ runner.os }}-pyprod-${{ hashFiles('requirements.txt') }}
@@ -69,11 +69,11 @@ jobs:
pip install pip-tools
pip-sync requirements.txt
- name: Use Node.js
- uses: actions/setup-node@v3
+ uses: actions/setup-node@v4
with:
node-version: '16.x'
- name: Cache Node.js modules
- uses: actions/cache@v3
+ uses: actions/cache@v4
with:
path: '**/node_modules'
key: ${{ runner.OS }}-node-${{ hashFiles('**/yarn.lock') }}
diff --git a/.github/workflows/frontendlint.yml b/.github/workflows/frontendlint.yml
index 03bcc2f474..c28a80937a 100644
--- a/.github/workflows/frontendlint.yml
+++ b/.github/workflows/frontendlint.yml
@@ -29,11 +29,11 @@ jobs:
steps:
- uses: actions/checkout@v4
- name: Use Node.js
- uses: actions/setup-node@v3
+ uses: actions/setup-node@v4
with:
node-version: '16.x'
- name: Cache Node.js modules
- uses: actions/cache@v3
+ uses: actions/cache@v4
with:
path: '**/node_modules'
key: ${{ runner.OS }}-node-${{ hashFiles('**/yarn.lock') }}
@@ -49,7 +49,7 @@ jobs:
if: github.event.pull_request && github.event.pull_request.head.repo.full_name == github.repository
id: git-check
run: echo ::set-output name=modified::$(git diff-index --name-only HEAD)
- - uses: tibdex/github-app-token@v1
+ - uses: tibdex/github-app-token@v2
if: github.event.pull_request && github.event.pull_request.head.repo.full_name == github.repository && steps.git-check.outputs.modified != ''
id: generate-token
with:
diff --git a/.github/workflows/frontendtest.yml b/.github/workflows/frontendtest.yml
index fe5a2968a9..e83ac316d8 100644
--- a/.github/workflows/frontendtest.yml
+++ b/.github/workflows/frontendtest.yml
@@ -29,11 +29,11 @@ jobs:
steps:
- uses: actions/checkout@v4
- name: Use Node.js
- uses: actions/setup-node@v3
+ uses: actions/setup-node@v4
with:
node-version: '16.x'
- name: Cache Node.js modules
- uses: actions/cache@v3
+ uses: actions/cache@v4
with:
path: '**/node_modules'
key: ${{ runner.OS }}-node-${{ hashFiles('**/yarn.lock') }}
diff --git a/.github/workflows/notify_team_new_comment.yml b/.github/workflows/notify_team_new_comment.yml
new file mode 100644
index 0000000000..6b6c1e21d7
--- /dev/null
+++ b/.github/workflows/notify_team_new_comment.yml
@@ -0,0 +1,35 @@
+name: Send a slack notification when a contributor comments on issue
+
+on:
+ issue_comment:
+ types: [created]
+
+jobs:
+ contributor_issue_comment:
+ name: Contributor issue comment
+
+ if: >-
+ ${{
+ !github.event.issue.pull_request &&
+ github.event.comment.author_association != 'MEMBER' &&
+ github.event.comment.author_association != 'OWNER'
+ }}
+
+ runs-on: ubuntu-latest
+ steps:
+ - name: Escape title double quotes
+ id: escape_title
+ env:
+ ISSUE_TITLE: ${{ github.event.issue.title }}
+ run: echo "ISSUE_TITLE=${ISSUE_TITLE//\"/\\\"}" >> "$GITHUB_OUTPUT"
+
+ - name: Send message to Slack channel
+ env:
+ SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}
+ SLACK_WEBHOOK_TYPE: INCOMING_WEBHOOK
+ uses: slackapi/slack-github-action@v1.27.0
+ with:
+ payload: |
+ {
+ "text": "*[Studio] New comment on issue: <${{ github.event.issue.html_url }}#issuecomment-${{ github.event.comment.id }}|${{ steps.escape_title.outputs.ISSUE_TITLE }} by ${{ github.event.comment.user.login }}>*"
+ }
diff --git a/.github/workflows/pythontest.yml b/.github/workflows/pythontest.yml
index 5c7ca726ec..443e445b4e 100644
--- a/.github/workflows/pythontest.yml
+++ b/.github/workflows/pythontest.yml
@@ -70,12 +70,12 @@ jobs:
-v /tmp/minio_data:/data \
-v /tmp/minio_config:/root/.minio \
minio/minio server /data
- - name: Set up Python 3.9
- uses: actions/setup-python@v4
+ - name: Set up Python 3.10
+ uses: actions/setup-python@v5
with:
- python-version: 3.9
+ python-version: '3.10'
- name: pip cache
- uses: actions/cache@v3
+ uses: actions/cache@v4
with:
path: ~/.cache/pip
key: ${{ runner.os }}-pytest-${{ hashFiles('requirements.txt', 'requirements-dev.txt') }}
diff --git a/.gitignore b/.gitignore
index 8d869357f8..64e2dc5733 100644
--- a/.gitignore
+++ b/.gitignore
@@ -128,7 +128,7 @@ webpack-stats\.json
storybook-static/
# i18n
-/contentcuration/locale/CSV_FILES/*
+/contentcuration/locale/**/LC_MESSAGES/*.csv
# pyenv
.python-version
diff --git a/.readthedocs.yml b/.readthedocs.yml
deleted file mode 100644
index 1c905e7dff..0000000000
--- a/.readthedocs.yml
+++ /dev/null
@@ -1,15 +0,0 @@
-# .readthedocs.yml
-# Read the Docs configuration file
-# See https://docs.readthedocs.io/en/stable/config-file/v2.html for details
-
-# Required
-version: 2
-
-formats: all
-
-# Optionally set the version of Python and requirements required to build your docs
-python:
- version: 3.6
- install:
- - requirements: requirements.txt
- - requirements: requirements-docs.txt
diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md
new file mode 100644
index 0000000000..35d0e2c4c1
--- /dev/null
+++ b/CONTRIBUTING.md
@@ -0,0 +1,28 @@
+
+## How can I contribute?
+
+1. š **Skim through the [Developer documentation](./docs/_index.md)** to understand where to refer later on.
+2. š» **Follow the [Local development instructions](./docs/local_dev_docker.md) to set up your development server.**
+3. š **Search for issues tagged as [help wanted](https://github.com/learningequality/studio/issues?q=is%3Aissue+is%3Aopen+label%3A%22help+wanted%22+no%3Aassignee) or [good first issue](https://github.com/learningequality/studio/issues?q=is%3Aissue+is%3Aopen+label%3A%22good+first+issue%22+no%3Aassignee).**
+4. š£ļø **Ask us for an assignment in the comments of an issue you've chosen.** Please request assignment of a reasonable amount of issues at a time. Once you finish your current issue or two, you are welcome to ask for more.
+
+**ā Where to ask questions**
+
+- For anything development related, refer to the [Developer documentation](./docs/_index.md) at first. Some answers may already be there.
+- For questions related to a specific issue or assignment requests, use the corresponding issue's comments section.
+- Visit [GitHub Discussions](https://github.com/learningequality/studio/discussions) to ask about anything related to contributing or to troubleshoot development server issues.
+
+**š„ How to connect**
+
+- We encourage you to visit [GitHub Discussions](https://github.com/learningequality/studio/discussions) to connect with the Learning Equality team as well as with other contributors.
+- If you'd like to contribute on a regular basis, we are happy to invite you to our open-source community Slack channel. Get in touch with us at info@learningequality.org to receive an invitation.
+
+---
+
+š Please allow us a few days to reply to your comments. If you don't hear from us within a week, reach out via [GitHub Discussions](https://github.com/learningequality/studio/discussions).
+
+As soon as you open a pull request, it may take us a week or two to review it as we're a small team. We appreciate your contribution and will provide feedback.
+
+---
+
+*Thank you for your interest in contributing! Learning Equality was founded by volunteers dedicated to helping make educational materials more accessible to those in need, and every contribution makes a difference.*
diff --git a/Makefile b/Makefile
index be4a0b7b51..619fcee41e 100644
--- a/Makefile
+++ b/Makefile
@@ -38,7 +38,7 @@ migrate:
# 4) Remove the management command from this `deploy-migrate` recipe
# 5) Repeat!
deploy-migrate:
- echo "Nothing to do here!"
+ python contentcuration/manage.py rectify_incorrect_contentnode_source_fields
contentnodegc:
python contentcuration/manage.py garbage_collect
@@ -136,7 +136,9 @@ dummyusers:
hascaptions:
python contentcuration/manage.py set_orm_based_has_captions
-export COMPOSE_PROJECT_NAME=studio_$(shell git rev-parse --abbrev-ref HEAD)
+BRANCH_NAME := $(shell git rev-parse --abbrev-ref HEAD | sed 's/[^a-zA-Z0-9_-]/-/g')
+
+export COMPOSE_PROJECT_NAME=studio_$(BRANCH_NAME)
purge-postgres: .docker/pgpass
-PGPASSFILE=.docker/pgpass dropdb -U learningequality "kolibri-studio" --port 5432 -h localhost
diff --git a/README.md b/README.md
index aa0a9ad8d5..5f8b9f8427 100644
--- a/README.md
+++ b/README.md
@@ -1,6 +1,6 @@
# Kolibri Studio
-[](https://github.com/learningequality/studio/actions/workflows/pythontest.yml) [](https://github.com/learningequality/studio/actions/workflows/frontendtest.yml) [](http://codecov.io/github/learningequality/studio?branch=develop])
+[](https://github.com/learningequality/studio/actions/workflows/pythontest.yml) [](https://github.com/learningequality/studio/actions/workflows/frontendtest.yml)
[Kolibri Studio](https://studio.learningequality.org) is a web application designed to deliver educational materials to [Kolibri](http://learningequality.org/kolibri/). It supports:
@@ -13,258 +13,31 @@ Kolibri Studio uses the [Django framework](https://www.djangoproject.com/) for t
If you are looking for help setting up custom content channels, uploading and organizing resources using Kolibri Studio, please refer to the [User Guide](https://kolibri-studio.readthedocs.io/en/latest/).
-## Local development instructions
-The following guide utilizes docker and docker-compose to run select services required for Studio to function. If you would rather install these services on your host, please follow the [host-setup guide](docs/host_services_setup.md).
+
+## How can I contribute?
-### Prerequisites
-Please install these prerequisites, or alternatives for setting up your local development environment:
-- [volta](https://docs.volta.sh/guide/getting-started) or a different node.js manager
-- [pyenv](https://kolibri-dev.readthedocs.io/en/develop/howtos/installing_pyenv.html) and [pyenv-virtualenv](https://github.com/pyenv/pyenv-virtualenv#installation)
-- [docker](https://docs.docker.com/install/) and [docker-compose](https://docs.docker.com/compose/install/)
+1. š **Skim through the [Developer documentation](./docs/_index.md)** to understand where to refer later on.
+2. š» **Follow the [Local development instructions](./docs/local_dev_docker.md) to set up your development server.**
+3. š **Search for issues tagged as [help wanted](https://github.com/learningequality/studio/issues?q=is%3Aissue+is%3Aopen+label%3A%22help+wanted%22+no%3Aassignee) or [good first issue](https://github.com/learningequality/studio/issues?q=is%3Aissue+is%3Aopen+label%3A%22good+first+issue%22+no%3Aassignee).**
+4. š£ļø **Ask us for an assignment in the comments of an issue you've chosen.** Please request assignment of a reasonable amount of issues at a time. Once you finish your current issue or two, you are welcome to ask for more.
+**ā Where to ask questions**
-### Build your python virtual environment
-To determine the preferred version of Python, you can check the `runtime.txt` file:
-```bash
-$ cat runtime.txt
-# This is the required version of Python to run Studio currently.
-# This is determined by the default Python 3 version that is installed
-# inside Ubuntu Bionic, which is used to build images for Studio.
-# We encode it here so that it can be picked up by Github's dependabot
-# to manage automated package upgrades.
-python-3.9.13
-```
-Use `pyenv` to install the version of Python listed in that file, and to also set up a virtual environment:
-```bash
-pyenv install 3.9.13
-pyenv virtualenv 3.9.13 studio-py3.9
-pyenv activate studio-py3.9
-```
-Now you may install Studio's Python dependencies:
-```bash
-pip install -r requirements.txt -r requirements-dev.txt
-```
-To deactivate the virtual environment, when you're finished developing on Studio for the time being:
-```bash
-pyenv deactivate
-```
+- For anything development related, refer to the [Developer documentation](./docs/_index.md) at first. Some answers may already be there.
+- For questions related to a specific issue or assignment requests, use the corresponding issue's comments section.
+- Visit [GitHub Discussions](https://github.com/learningequality/studio/discussions) to ask about anything related to contributing or to troubleshoot development server issues.
-#### A note about dependencies on Apple Silicon M1+
-If you run into an error with `pip install` related to the `grcpio` package, it is because it currently [does not support M1 with the version for `grcpio` Studio uses](https://github.com/grpc/grpc/issues/25082). In order to fix it, you will need to add the following environmental variables before running `pip install`:
-```bash
-export GRPC_PYTHON_BUILD_SYSTEM_OPENSSL=1
-export GRPC_PYTHON_BUILD_SYSTEM_ZLIB=1
-export CFLAGS="-I/opt/homebrew/opt/openssl/include"
-export LDFLAGS="-L/opt/homebrew/opt/openssl/lib"
-```
+**š„ How to connect**
-### Install frontend dependencies
-Install the version of node.js supported by Studio, and install `yarn` version 1.x:
-```bash
-volta install node@16
-volta install yarn@1
-```
-After installing `yarn`, you may now install frontend dependencies:
-```bash
-yarn install
-```
+- We encourage you to visit [GitHub Discussions](https://github.com/learningequality/studio/discussions) to connect with the Learning Equality team as well as with other contributors.
+- If you'd like to contribute on a regular basis, we are happy to invite you to our open-source community Slack channel. Get in touch with us at info@learningequality.org to receive an invitation.
-### Install and run services
+---
-Studio requires some background services to be running:
+š Please allow us a few days to reply to your comments. If you don't hear from us within a week, reach out via [GitHub Discussions](https://github.com/learningequality/studio/discussions).
-* Minio - a local S3 storage emulation
-* PostgreSQL (postgres) - a relational database
-* Redis - a fast key/value store useful for caching
-* Celery - the task manager and executor, which relies on the Studio codebase
+As soon as you open a pull request, it may take us a week or two to review it as we're a small team. We appreciate your contribution and will provide feedback.
-Generally speaking, you'll want to open a separate terminal/terminal-tab to run the services. With docker and docker-compose installed, running the above services is as easy as:
-```bash
-make run-services
-```
+---
-The above command may take longer the first time it's run. It includes starting the `celery` workers, and the other dependent services through docker, which can be done separately with the following two commands:
-
-```bash
-make dcservicesup
-make devceleryworkers
-```
-
-To confirm that docker-based services are running, you should see three containers when executing `docker ps`. For example:
-
-```bash
-> docker ps
-CONTAINER ID IMAGE COMMAND CREATED STATUS PORTS NAMES
-e09c5c203b93 redis:6.0.9 "docker-entrypoint.sā¦" 51 seconds ago Up 49 seconds 0.0.0.0:6379->6379/tcp studio_vue-refactor_redis_1
-6164371efb6b minio/minio "minio server /data" 51 seconds ago Up 49 seconds 0.0.0.0:9000->9000/tcp studio_vue-refactor_minio_1
-c86bbfa3a59e postgres:12.10 "docker-entrypoint.sā¦" 51 seconds ago Up 49 seconds 0.0.0.0:5432->5432/tcp studio_vue-refactor_postgres_1
-```
-
-To stop the services, press Ctrl + C in the terminal where you ran `make run-services` (or `dcservicesup`). Once you've done that, you may run the following command to remove the docker containers (they will be recreated when you run `run-services` or `dcservicesup` again):
-```bash
-make dcservicesdown
-```
-
-### Initializing Studio
-With the services running, in a separate terminal/terminal-tab, we can now initialize the database for Studio development purposes. The command below will initialize the database tables, import constants, and a user account for development:
-```bash
-yarn run devsetup
-```
-
-### Running the development server
-With the services running, in a separate terminal/terminal-tab, and the database initialized, we can start the dev server:
-```bash
-yarn run devserver:hot # with Vue hot module reloading
-# or
-yarn run devserver # without hot module reloading
-```
-
-Either of the above commands will take a few moments to build the frontend. When it finishes, you can sign in with the account created by the `yarn run devsetup` command:
-- url: `http://localhost:8080/accounts/login/`
-- username: `a@a.com`
-- password: `a`
-
-### Running the celery service
-Studio uses `celery` for executing asynchronous tasks, which are integral to Studio's channel editing architecture. The celery service does not reload when there are Python changes like the Django devserver does, so it's often preferred to run it separately. If you are developing changes against a task or the celery configuration, you'll need to use `make dcservicesup` to run only the docker-based services.
-
-In a separate terminal/terminal-tab, run the following to start the service and press Ctrl + C to stop it:
-```bash
-make devceleryworkers
-```
-
-Stop and restart the above to reload your changes.
-
-## Adding or updating dependencies
-
-We use `pip-tools` to ensure all our dependencies use the same versions on all deployments.
-
-To add a dependency, add it to either `requirements.in` or `requirements-dev.in`, then
-run `pip-compile requirements[-dev|-docs].in` to generate the .txt file. Please make sure that
-both the `.in` and `.txt` file changes are part of the commit when updating dependencies.
-
-To update a dependency, use `pip-compile --upgrade-package [package-name] requirements[-dev|-docs].in`
-
-For more details, please see the [pip-tools docs on Github](https://github.com/jazzband/pip-tools).
-
-## Additional tools
-
-### Running tests
-
-With Studio's services running, you may run tests with the following commands:
-
-```bash
-# backend
-make test
-# frontend
-yarn run test
-```
-
-View [more testing tips](docs/running_tests.md)
-
-### Linting
-
-Front-end linting is run using:
-
-```bash
-yarn run lint-frontend
-```
-
-Some linting errors can be fixed automatically by running:
-
-```bash
-yarn run lint-frontend:format
-```
-
-Make sure you've set up pre-commit hooks as described above. This will ensure that linting is automatically run on staged changes before every commit.
-
-### Profiling and local production testing
-
-If you want to test the performance of your changes, you can start up a local server with settings closer to a production environment like so:
-
-```bash
-# build frontend dependencies
-yarn run build
-# run the server (no webpack)
-yarn run runserver
-# or for profiling production more closely
-yarn run runserver:prod-profiling
-```
-
-Once the local production server is running, you can also use Locust to test your changes under scenarios of high demand like so:
-
-```bash
-cd deploy/chaos/loadtest
-make timed_run
-make stop_slaves # mac: killall python
-```
-
-#### Profiling
-
-In case you need to profile the application to know which part of the code are more time consuming, there are two different profilers available to work in two different modes. Both will store the profiling output in a directory that's determined by the `PROFILE_DIR` env variable. If this variable is not set, the output files will be store in a folder called profiler inside the OS temp folder (`/tmp/profile` usually)
-Note that both profiling modes are incompatible: you can either use one or the other, but not both at the same time. In case the env variables are set for both modes, _All request profiling mode_ will be used.
-
-##### All requests profiling mode
-
-This mode will create interactive html files with all the profiling information for every request the Studio server receives. The name of the files will contain the total execution time, the endpoint name and a timestamp.
-
-To activate it an env variable called `PROFILE_STUDIO_FULL` must be set.
-
-Example of use:
-
-`PROFILE_STUDIO_FULL=y yarn runserver`
-
-Afterwards no further treatment of the generated files is needed. You can open directly the html files in your browser.
-
-##### Endpoint profiling mode
-
-When using the _all requests mode_ it's usual that the profile folder is soon full of information for requests that are not interesting for the developer, obscuring the files for specific endpoints.
-
-If an env variable called `PROFILE_STUDIO_FILTER` is used, the profiler will be executed only on the http requests containing the text stated by the variable.
-
-Example of use:
-
-`PROFILE_STUDIO_FILTER=edit yarn localprodserver`
-
-For this case, only html requests having the text _edit_ in their request path will be profiled. The profile folder will not have html files, but binary dump files (with the timestamp as filename) of the profiler information that can be later seen by different profiling tools (`snakeviz` that can be installed using pip is recommended). Also while the server is running, the ten most time consuming lines of code of the filtered request will be shown in the console where Studio has been launched.
-
-Example of snakeviz use:
-
-`snakeviz /tmp/profile/studio\:20200909161405011678.prof`
-
-will open the browser with an interactive diagram with all the profiling information
-
-### Storybook
-
-Storybook is a development environment for UI components. If this is your first encounter with this tool, you can check [this presentation](https://docs.google.com/presentation/d/10JL4C9buygWsTbT62Ym149Yh9zSR9nY_ZqFumBKUY0o/edit?usp=sharing) or [its website](https://storybook.js.org/). You are encouraged to use it any time you need to develop a new UI component. It is especially suitable for smaller to middle size components that represent basic UI building blocks.
-
-An example is worth a thousand words so please have a look at these simple [stories of an example component](./contentcuration/contentcuration/frontend/shared/views/details/DetailsRow.stories.js) to see how to write yours. For detailed information on writing stories you can [go through this tutorial](https://www.learnstorybook.com/intro-to-storybook/).
-
-You can also check [official addons](https://storybook.js.org/addons/).
-
-**Run development server**
-
-```bash
-yarn run storybook
-```
-
-With detailed webpack information (useful when debugging loaders, addons and similar):
-
-```bash
-yarn run storybook:debug
-```
-
-**Bundle**
-
-```bash
-yarn run storybook:build
-```
-
-The output is saved to *storybook-static/*.
-
-### Current usage notes
-
-We've decided not to push our stories to the codebase and keep them locally in the near future. Although this limits the number of advantages Storybook provides, it allows us to start using it as soon as possible without the need to agree on all conventions and it also gives the whole team enough time to test the development workflow so we can decide later if we want to adopt this tool in a larger scale.
-
-Taking into account the above-mentioned, all stories except of example *DetailsRow.stories.js* will be ignored by git as long as you use a naming convention for Storybook source files: *\*.stories.js*.
-
-Although we don't share stories at this point, Storybook is installed and configured in the codebase to prevent the need for everyone to configure everything locally. If you update Storybook Webpack settings, install a new plugin and similar, you are welcome to share such updates with other members of the team.
+*Thank you for your interest in contributing! Learning Equality was founded by volunteers dedicated to helping make educational materials more accessible to those in need, and every contribution makes a difference.*
diff --git a/bin/run_minio.py b/bin/run_minio.py
deleted file mode 100755
index 42adf31562..0000000000
--- a/bin/run_minio.py
+++ /dev/null
@@ -1,40 +0,0 @@
-#!/usr/bin/env python
-
-import os
-import pathlib
-import subprocess
-
-MINIO_RUN_TYPES = ["LOCAL", "GCS_PROXY"]
-
-MINIO_LOCAL_HOME_STORAGE = pathlib.Path("/app") / "contentworkshop_content"
-
-MINIO_CONFIG_DIR = MINIO_LOCAL_HOME_STORAGE / ".minio"
-
-GOOGLE_APPLICATION_CREDENTIALS_PATH = os.getenv("GOOGLE_APPLICATION_CREDENTIALS")
-
-GOOGLE_GCS_PROJECT_ID = os.getenv("GOOGLE_GCS_PROJECT_ID")
-
-
-if __name__ == "__main__":
-
- run_type = os.getenv("MINIO_RUN_TYPE")
-
- if run_type not in MINIO_RUN_TYPES:
- raise AssertionError("MINIO_RUN_TYPE must be one of {}".format(MINIO_RUN_TYPES))
-
- if run_type == "LOCAL":
- cmd = ["minio", "server", "-C", str(MINIO_CONFIG_DIR), str(MINIO_LOCAL_HOME_STORAGE)]
- elif run_type == "GCS_PROXY":
-
- if not os.path.exists(GOOGLE_APPLICATION_CREDENTIALS_PATH):
- raise AssertionError("the env var GOOGLE_APPLICATION_CREDENTIALS must be defined," " and pointing to a credentials file for your project.")
-
- if not GOOGLE_GCS_PROJECT_ID:
- raise AssertionError("$GOOGLE_GCS_PROJECT_ID must be defined with the project" " id where you store your objects.")
- cmd = ["minio", "gateway", "gcs", GOOGLE_GCS_PROJECT_ID]
- else:
- raise Exception("Unhandled run_type type: {}".format(run_type))
-
- subprocess.check_call(cmd)
-
-
diff --git a/cloudbuild-production.yaml b/cloudbuild-production.yaml
index f703827174..3ff333a67f 100644
--- a/cloudbuild-production.yaml
+++ b/cloudbuild-production.yaml
@@ -11,6 +11,7 @@ steps:
- -c
- >
docker build
+ --build_arg COMMIT_SHA=$COMMIT_SHA
-f k8s/images/app/Dockerfile
--cache-from gcr.io/$PROJECT_ID/learningequality-studio-app:latest
-t gcr.io/$PROJECT_ID/learningequality-studio-app:$COMMIT_SHA
diff --git a/contentcuration/automation/tests/appnexus/__init__.py b/contentcuration/automation/tests/appnexus/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
diff --git a/contentcuration/automation/tests/appnexus/test_base.py b/contentcuration/automation/tests/appnexus/test_base.py
new file mode 100644
index 0000000000..7944e00e4f
--- /dev/null
+++ b/contentcuration/automation/tests/appnexus/test_base.py
@@ -0,0 +1,48 @@
+import pytest
+
+from automation.utils.appnexus.base import Adapter
+from automation.utils.appnexus.base import Backend
+
+
+class MockBackend(Backend):
+ def connect(self) -> None:
+ return super().connect()
+
+ def make_request(self, request):
+ return super().make_request(request)
+
+ @classmethod
+ def _create_instance(cls) -> 'MockBackend':
+ return cls()
+
+
+class MockAdapter(Adapter):
+ def mockoperation(self):
+ pass
+
+
+def test_backend_error():
+ with pytest.raises(NotImplementedError) as error:
+ Backend.get_instance()
+ assert "Subclasses should implement the creation of instance" in str(error.value)
+
+def test_backend_singleton():
+ b1, b2 = MockBackend.get_instance(), MockBackend.get_instance()
+ assert id(b1) == id(b2)
+
+
+def test_adapter_creation():
+ a = MockAdapter(backend=MockBackend)
+ assert isinstance(a, Adapter)
+
+
+def test_adapter_backend_default():
+ b = MockBackend()
+ adapter = Adapter(backend=b)
+ assert isinstance(adapter.backend, Backend)
+
+
+def test_adapter_backend_custom():
+ b = MockBackend()
+ a = Adapter(backend=b)
+ assert a.backend is b
diff --git a/contentcuration/automation/utils/appnexus/APILayer.md b/contentcuration/automation/utils/appnexus/APILayer.md
new file mode 100644
index 0000000000..4e82e5b3f3
--- /dev/null
+++ b/contentcuration/automation/utils/appnexus/APILayer.md
@@ -0,0 +1,161 @@
+## API Layer Documentation
+
+### Overview
+
+Within the `contentcuration` app in Studio, we want to build an API layer that acts as a communication bridge with different backends like Docker Images, Google Cloud Platform's Vertex AI, and VM instances, cloud storage services, etc. The goal is to make sure this API layer can work with these backends, regardless of where or how they do the job. As long as the input and output formats stay the same, this setup provides flexibility in choosing and using backend resources.
+
+### Description and outcomes
+
+The stand-alone deployed backend service(s) will not have direct access to `contentcuration` models or the database for that matter, so this API layer facilitates access to these resources by receiving and returning a standardized requests and responses, irrespective of the backend interacted with.
+
+#### The Architecture
+
+
+
+The key components of this architecture are as follows:
+
+#### 1. Creating the Backend Interface
+
+The Backend class serves as an abstract interface that outlines the operations all backends must support. It implements the Singleton pattern to ensure that only one instance of each backend type can exist. The methods defined by the Backend class are:
+
+```python
+ABSTRACT CLASS Backend:
+ _instance = None # Private variable to hold the instance
+
+ ABSTRACT METHOD connect()
+ # Provides blue print to connect
+ pass
+
+ ABSTRACT METHOD make_request(params)
+ # provide blue print to make request
+ pass
+
+ ABSTRACT METHOD request(params)
+ # provide blue print for the request object
+ pass
+
+ ABSTRACT METHOD response(params)
+ # provides blue print for the response object
+ pass
+
+ CLASS METHOD get_instance(cls)
+ IF cls._instance is None:
+ cls._instance = cls._create_instance()
+ return cls._instance
+
+ CLASS METHOD _create_instance(cls)
+ raise NotImplementedError # concrete class must implement
+```
+
+Different backends can now be created by implementing the base `Backend` class:
+
+```python
+# Implement CONCRETE CLASS using ABSTRACT Backend class
+CLASS GCS IMPLEMENTS Backend:
+ METHOD make_request(request):
+ # make request to Google Cloud Storage services
+
+ METHOD connect(params):
+ # Implement the connect method for GCS
+
+ CLASS METHOD _create_instance(cls)
+ # initialize a GCS Backend instance
+
+CLASS ML IMPLEMENTS Backend:
+ METHOD make_request(request):
+ # make request to DeepLearning models hosted as service
+
+ METHOD connect(params):
+ # Implement the connect method for hosted ML service
+
+ CLASS METHOD _create_instance(cls)
+ # initialize a ML Backend instance
+
+CLASS OtherBackend IMPLEMENTS Backend:
+ ...
+ [you get the idea]
+```
+
+To create an instance of a backend, using the `ML` class as an example, use the `get_instance()` method:
+
+```python
+>>> backend = ML.get_instance()
+```
+
+To centralize the creation of `Backend` instances based on specific Django settings(e.g. dev vs. production environments), create `BackendFactory` class. This should follow the Factory Design Pattern.
+
+```python
+# Factory to instantiate the Backend based on Django Settings
+CLASS BackendFactory:
+ METHOD create_backend(self, backend=None) -> Backend
+ IF backend:
+ return backend
+ ELSE:
+ # Create an Adapter instance based on Django settings
+ IF DjangoSettings is 'SomeSetting':
+ backend = GCS.get_instance() # Use of Singleton pattern
+ ELSE IF DjangoSettings is 'AnotherSetting':
+ backend = ML.get_instance()
+ ELSE
+ RAISE ValueError
+ # Return the created Backend instance
+ RETURN backend
+```
+The `BackendFactory`'s `create_backend` method optionally allows a `Backend` instance to be injected into the factory instead of relying solely on Django settings. This is particularly useful if we want to explicitly specify the backend to use.
+
+### Creating Adapter that accepts any Backend
+
+The **`Adapter`** class can be initialized with a `Backend` instance(optional) which provides a `make_request` method that forwards requests to the chosen `Backend`, while adhering to its specific `request` and `response` formats.
+
+```python
+CLASS Adapter:
+
+ METHOD __init__(self, backend(Optional) defaults None)
+ # Initialize the Backend with BackendFactory
+ backend_factory = BackendFactory()
+ SET backend = backend_factory.create_backend(backend)
+
+ METHOD request(self):
+ # something
+ return self.backend.request()
+
+ METHOD response(self):
+ # something
+ return self.backend.response()
+```
+
+With this `Adapter` class in place, we can create Adapter that are able interact with any backend we need.
+
+```python
+CLASS Recommendation INHERITS ADAPTER:
+ METHOD generateEmbeddings(self, request) -> Boolean
+ # [ Implementation ]
+
+ METHOD getRecommendation(self, request) -> Array
+ # [ Implementation ]
+
+CLASS Transcription INHERITS ADAPTER:
+ METHOD generateCaption(self, request) -> Array
+ # [ Implementation ]
+
+CLASS OtherAdapter INHERITS ADAPTER:
+ METHOD someOperation(self, request) -> Any
+ # Operation that any backend wants
+```
+
+Below is a sample use case, using the `ML` backend as an example:
+
+```python
+>>> backend = ML.get_instance()
+>>> adapter = Transcription(backend)
+```
+
+To access specific methods within the adapter:
+
+```python
+>>> adapter.generateCaption(...)
+```
+
+### Resources
+
+[OOP Design patterns](https://refactoring.guru/design-patterns/catalog)
diff --git a/contentcuration/automation/utils/appnexus/__init__.py b/contentcuration/automation/utils/appnexus/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
diff --git a/contentcuration/automation/utils/appnexus/base.py b/contentcuration/automation/utils/appnexus/base.py
new file mode 100644
index 0000000000..ab9e6d5096
--- /dev/null
+++ b/contentcuration/automation/utils/appnexus/base.py
@@ -0,0 +1,62 @@
+from abc import ABC
+from abc import abstractmethod
+from builtins import NotImplementedError
+
+
+class BackendRequest(object):
+ """ Class that should be inherited by specific backend for its requests"""
+ pass
+
+
+class BackendResponse(object):
+ """ Class that should be inherited by specific backend for its responses"""
+ pass
+
+
+class Backend(ABC):
+ """ An abstract base class for backend interfaces that also implements the singleton pattern """
+ _instance = None
+
+ def __new__(class_, *args, **kwargs):
+ if not isinstance(class_._instance, class_):
+ class_._instance = object.__new__(class_, *args, **kwargs)
+ return class_._instance
+
+ @abstractmethod
+ def connect(self) -> None:
+ """ Establishes a connection to the backend service. """
+ pass
+
+ @abstractmethod
+ def make_request(self, request) -> BackendResponse:
+ """ Make a request based on "request" """
+ pass
+
+ @classmethod
+ def get_instance(cls) -> 'Backend':
+ """ Returns existing instance, if not then create one. """
+ return cls._instance if cls._instance else cls._create_instance()
+
+ @classmethod
+ def _create_instance(cls) -> 'Backend':
+ """ Returns the instance after creating it. """
+ raise NotImplementedError("Subclasses should implement the creation of instance")
+
+
+class BackendFactory(ABC):
+ @abstractmethod
+ def create_backend(self) -> Backend:
+ """ Create a Backend instance from the given backend. """
+ pass
+
+
+class Adapter:
+ """
+ Base class for adapters that interact with a backend interface.
+
+ This class should be inherited by adapter classes that facilitate
+ interaction with different backend implementations.
+ """
+
+ def __init__(self, backend: Backend) -> None:
+ self.backend = backend
diff --git a/contentcuration/contentcuration/constants/feedback.py b/contentcuration/contentcuration/constants/feedback.py
new file mode 100644
index 0000000000..178c4a99ab
--- /dev/null
+++ b/contentcuration/contentcuration/constants/feedback.py
@@ -0,0 +1,8 @@
+FEEDBACK_TYPE_CHOICES = (
+ ('IMPORTED', 'Imported'),
+ ('REJECTED', 'Rejected'),
+ ('PREVIEWED', 'Previewed'),
+ ('SHOWMORE', 'Show More'),
+ ('IGNORED', 'Ignored'),
+ ('FLAGGED', 'Flagged'),
+)
diff --git a/contentcuration/contentcuration/forms.py b/contentcuration/contentcuration/forms.py
index d9dc781f61..8e9320d85f 100644
--- a/contentcuration/contentcuration/forms.py
+++ b/contentcuration/contentcuration/forms.py
@@ -1,5 +1,4 @@
import json
-from builtins import object
from django import forms
from django.conf import settings
@@ -7,6 +6,7 @@
from django.contrib.auth.forms import UserChangeForm
from django.contrib.auth.forms import UserCreationForm
from django.core import signing
+from django.core.exceptions import ValidationError
from django.db.models import Q
from django.template.loader import render_to_string
@@ -16,23 +16,16 @@
REGISTRATION_SALT = getattr(settings, 'REGISTRATION_SALT', 'registration')
-class ExtraFormMixin(object):
-
- def check_field(self, field, error):
- if not self.cleaned_data.get(field):
- self.errors[field] = self.error_class()
- self.add_error(field, error)
- return False
- return self.cleaned_data.get(field)
-
-
# LOGIN/REGISTRATION FORMS
#################################################################
-class RegistrationForm(UserCreationForm, ExtraFormMixin):
+class RegistrationForm(UserCreationForm):
+ CODE_ACCOUNT_ACTIVE = 'account_active'
+ CODE_ACCOUNT_INACTIVE = 'account_inactive'
+
first_name = forms.CharField(required=True)
last_name = forms.CharField(required=True)
- email = forms.CharField(required=True)
- password1 = forms.CharField(required=True)
+ email = forms.EmailField(required=True)
+ password1 = forms.CharField(required=True, min_length=8)
password2 = forms.CharField(required=True)
uses = forms.CharField(required=True)
other_use = forms.CharField(required=False)
@@ -45,22 +38,18 @@ class RegistrationForm(UserCreationForm, ExtraFormMixin):
locations = forms.CharField(required=True)
def clean_email(self):
- email = self.cleaned_data['email'].strip().lower()
- if User.objects.filter(Q(is_active=True) | Q(deleted=True), email__iexact=email).exists():
- raise UserWarning
+ # ensure email is lower case
+ email = self.cleaned_data["email"].strip().lower()
+ user_qs = User.objects.filter(email__iexact=email)
+ if user_qs.exists():
+ if user_qs.filter(Q(is_active=True) | Q(deleted=True)).exists():
+ raise ValidationError("Account already active", code=self.CODE_ACCOUNT_ACTIVE)
+ else:
+ raise ValidationError("Already registered.", code=self.CODE_ACCOUNT_INACTIVE)
return email
- def clean(self):
- super(RegistrationForm, self).clean()
-
- # Errors should be caught on the frontend
- # or a warning should be thrown if the account exists
- self.errors.clear()
- return self.cleaned_data
-
def save(self, commit=True):
- user = super(RegistrationForm, self).save(commit=commit)
- user.set_password(self.cleaned_data["password1"])
+ user = super(RegistrationForm, self).save(commit=False)
user.first_name = self.cleaned_data["first_name"]
user.last_name = self.cleaned_data["last_name"]
user.information = {
@@ -165,7 +154,7 @@ def save(self, user):
return user
-class StorageRequestForm(forms.Form, ExtraFormMixin):
+class StorageRequestForm(forms.Form):
# Nature of content
storage = forms.CharField(required=True)
kind = forms.CharField(required=True)
@@ -194,7 +183,7 @@ class Meta:
"audience", "import_count", "location", "uploading_for", "organization_type", "time_constraint", "message")
-class IssueReportForm(forms.Form, ExtraFormMixin):
+class IssueReportForm(forms.Form):
operating_system = forms.CharField(required=True)
browser = forms.CharField(required=True)
channel = forms.CharField(required=False)
@@ -204,7 +193,7 @@ class Meta:
fields = ("operating_system", "browser", "channel", "description")
-class DeleteAccountForm(forms.Form, ExtraFormMixin):
+class DeleteAccountForm(forms.Form):
email = forms.CharField(required=True)
def __init__(self, user, *args, **kwargs):
@@ -214,5 +203,5 @@ def __init__(self, user, *args, **kwargs):
def clean_email(self):
email = self.cleaned_data['email'].strip().lower()
if self.user.is_admin or self.user.email.lower() != self.cleaned_data['email']:
- raise UserWarning
+ raise ValidationError("Not allowed")
return email
diff --git a/contentcuration/contentcuration/frontend/RecommendedResourceCard/components/RecommendedResourceCard.vue b/contentcuration/contentcuration/frontend/RecommendedResourceCard/components/RecommendedResourceCard.vue
new file mode 100644
index 0000000000..4052c50a77
--- /dev/null
+++ b/contentcuration/contentcuration/frontend/RecommendedResourceCard/components/RecommendedResourceCard.vue
@@ -0,0 +1,79 @@
+
+
+
+
+
+
+ Practice
+
+
+
+
+
+
+
+ below title slot section for the KCard component:
+ below title slot section for the KCard component
+