Skip to content

Commit 2bacb01

Browse files
authored
feat(python-examples): Python S3 examples for both containers and functions (#72)
* Clean up python API S3 example * Add Python container example * Add link to README * Add 200 response on GET * Typo * Review comments
1 parent 400fa5f commit 2bacb01

File tree

16 files changed

+281
-47
lines changed

16 files changed

+281
-47
lines changed

.gitignore

+5
Original file line numberDiff line numberDiff line change
@@ -16,3 +16,8 @@ node_modules/
1616

1717
# Python
1818
venv/
19+
__pycache__/
20+
21+
# Python API framework
22+
package/
23+
.scw

README.md

+1
Original file line numberDiff line numberDiff line change
@@ -66,6 +66,7 @@ Table of Contents:
6666
| **[NGINX CORS Private](containers/nginx-cors-private-python/README.md)** <br/> An NGINX proxy to allow CORS requests to a private container. | Python Flask | [Terraform] |
6767
| **[NGINX hello world](containers/nginx-hello-world/README.md)** <br/> A minimal example running the base NGINX image in a serverless container. | N/A | [Serverless Framework] |
6868
| **[Python hello world](containers/python-hello-world/README.md)** <br/> A minimal example running a Flask HTTP server in a serverless container. | N/A | [Serverless Framework] |
69+
| **[Python S3 upload](containers/python-s3-upload/README.md)** <br/> A Python + Flask HTTP server that receives file uploads and writes them to S3. | N/A | [Terraform] |
6970
| **[Terraform NGINX hello world](containers/terraform-nginx-hello-world/README.md)** <br/> A minimal example running the base NGINX image in a serverless container deployed with Terraform. | N/A | [Terraform] |
7071
| **[Triggers with Terraform](containers/terraform-triggers/README.md)** <br/> Configuring two SQS triggers, used to trigger two containers, one public, one private. | N/A | [Terraform] |
7172

containers/python-s3-upload/README.md

+48
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,48 @@
1+
# Container used to upload files to S3
2+
3+
This container does the following:
4+
5+
* Read a file from an HTTP request form
6+
* Store the file in S3
7+
8+
## Requirements
9+
10+
- You have an account and are logged into the [Scaleway console](https://console.scaleway.com)
11+
- You have created an API key in the [console](https://console.scaleway.com/iam/api-keys), with at least the `ObjectStorageFullAccess`, `ContainerRegistryFullAccess`, and `FunctionsFullAccess` permissions, plus access to the relevant project for Object Storage
12+
- You have [Terraform](https://registry.terraform.io/providers/scaleway/scaleway/latest/docs) installed on your machine
13+
- You have logged in to the Scaleway Container Registry (`scw registry login`)
14+
15+
## Deploy on Scaleway
16+
17+
First you need to set the following environment variables:
18+
19+
```bash
20+
export TF_VAR_access_key=<your API access key>
21+
export TF_VAR_secret_key=<your API secret key>
22+
export TF_VAR_project_id=<your project id>
23+
```
24+
25+
Deployment can be done by running:
26+
27+
```bash
28+
terraform init
29+
30+
terraform plan
31+
32+
terraform apply
33+
```
34+
35+
You can then query your function by running:
36+
37+
```bash
38+
# Upload a random Terraform file to the bucket
39+
curl -F [email protected] $(terraform output -raw endpoint)
40+
```
41+
42+
You can get the bucket name with:
43+
44+
```bash
45+
terraform output -raw bucket_name
46+
```
47+
48+
You should then see the `requirements.txt` file uploaded to your bucket.
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,10 @@
1+
FROM python:3.10
2+
WORKDIR /app
3+
4+
RUN pip3 install --upgrade pip
5+
COPY requirements.txt .
6+
RUN pip3 install -r requirements.txt --target .
7+
8+
COPY app.py .
9+
10+
CMD [ "python3", "./app.py" ]
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,52 @@
1+
from flask import Flask, request
2+
import logging
3+
import os
4+
5+
import boto3
6+
7+
REGION = "fr-par"
8+
S3_URL = "https://s3.fr-par.scw.cloud"
9+
10+
SCW_ACCESS_KEY = os.environ["ACCESS_KEY"]
11+
SCW_SECRET_KEY = os.environ["SECRET_KEY"]
12+
BUCKET_NAME = os.environ["BUCKET_NAME"]
13+
14+
logging.basicConfig(level=logging.INFO)
15+
16+
app = Flask(__name__)
17+
18+
19+
@app.route("/", methods=["GET"])
20+
def hello():
21+
return {
22+
"statusCode": 200,
23+
"body": "Hello from the container!",
24+
}
25+
26+
27+
@app.route("/", methods=["POST"])
28+
def upload():
29+
s3 = boto3.client(
30+
"s3",
31+
region_name=REGION,
32+
use_ssl=True,
33+
endpoint_url=S3_URL,
34+
aws_access_key_id=SCW_ACCESS_KEY,
35+
aws_secret_access_key=SCW_SECRET_KEY,
36+
)
37+
38+
uploaded_file = request.files["file"]
39+
file_body = uploaded_file.read()
40+
41+
logging.info(f"Uploading to {BUCKET_NAME}/{uploaded_file.filename}")
42+
43+
s3.put_object(Key=uploaded_file.filename, Bucket=BUCKET_NAME, Body=file_body)
44+
45+
return {
46+
"statusCode": 200,
47+
"body": f"Successfully uploaded {uploaded_file.filename} to bucket!",
48+
}
49+
50+
51+
if __name__ == "__main__":
52+
app.run(debug=True, host="0.0.0.0", port=8080)
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,3 @@
1+
boto3==1.34.30
2+
chardet==4.0.0
3+
Flask==2.2.2
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,24 @@
1+
resource scaleway_container_namespace main {
2+
name = "python-s3-example"
3+
}
4+
5+
resource scaleway_container main {
6+
name = "python-s3-example"
7+
description = "S3 file uploader"
8+
namespace_id = scaleway_container_namespace.main.id
9+
registry_image = docker_image.main.name
10+
port = 8080
11+
cpu_limit = 1000
12+
memory_limit = 1024
13+
min_scale = 0
14+
max_scale = 1
15+
privacy = "public"
16+
deploy = true
17+
environment_variables = {
18+
"BUCKET_NAME" = scaleway_object_bucket.main.name
19+
}
20+
secret_environment_variables = {
21+
"ACCESS_KEY" = var.access_key
22+
"SECRET_KEY" = var.secret_key
23+
}
24+
}
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,16 @@
1+
resource "scaleway_registry_namespace" "main" {
2+
name = "s3-example-${random_string.suffix.result}"
3+
region = "fr-par"
4+
project_id = var.project_id
5+
}
6+
7+
resource "docker_image" "main" {
8+
name = "${scaleway_registry_namespace.main.endpoint}/s3-example:${var.image_version}"
9+
build {
10+
context = "${path.cwd}/../container"
11+
}
12+
13+
provisioner "local-exec" {
14+
command = "docker push ${docker_image.main.name}"
15+
}
16+
}
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,7 @@
1+
output "bucket_name" {
2+
value = scaleway_object_bucket.main.name
3+
}
4+
5+
output "endpoint" {
6+
value = scaleway_container.main.domain_name
7+
}
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,17 @@
1+
provider "scaleway" {
2+
zone = "fr-par-1"
3+
region = "fr-par"
4+
access_key = var.access_key
5+
secret_key = var.secret_key
6+
project_id = var.project_id
7+
}
8+
9+
provider "docker" {
10+
host = "unix:///var/run/docker.sock"
11+
12+
registry_auth {
13+
address = scaleway_registry_namespace.main.endpoint
14+
username = "nologin"
15+
password = var.secret_key
16+
}
17+
}
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,3 @@
1+
resource "scaleway_object_bucket" "main" {
2+
name = "python-s3-example-${random_string.suffix.result}"
3+
}
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,22 @@
1+
variable "access_key" {
2+
type = string
3+
}
4+
5+
variable "secret_key" {
6+
type = string
7+
}
8+
9+
variable "project_id" {
10+
type = string
11+
}
12+
13+
variable "image_version" {
14+
type = string
15+
default = "0.0.2"
16+
}
17+
18+
resource "random_string" "suffix" {
19+
length = 8
20+
upper = false
21+
special = false
22+
}
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,12 @@
1+
terraform {
2+
required_providers {
3+
scaleway = {
4+
source = "scaleway/scaleway"
5+
}
6+
docker = {
7+
source = "kreuzwerker/docker"
8+
version = "3.0.2"
9+
}
10+
}
11+
required_version = ">= 0.13"
12+
}

functions/python-upload-file-s3-multipart/README.md

+40-19
Original file line numberDiff line numberDiff line change
@@ -3,42 +3,63 @@
33
This function does the following steps:
44

55
* Read a file from an HTTP request form
6-
* Send the file to long-term storage with Glacier for S3
6+
* Store the file in S3
77

88
## Requirements
99

10-
This example uses the [Python API Framework](https://github.com/scaleway/serverless-api-project) to deploy the function.
10+
This example uses the [Python API Framework](https://github.com/scaleway/serverless-api-framework-python) to build and deploy the function.
1111

12-
If needed, create a bucket and provide the following variables in your environment:
12+
First you need to:
1313

14-
```env
15-
export SCW_ACCESS_KEY =
16-
export SCW_SECRET_KEY =
17-
export BUCKET_NAME =
18-
```
14+
- Create an API key in the [console](https://console.scaleway.com/iam/api-keys), with at least the `ObjectStorageFullAccess` and `FunctionsFullAccess` permissions, and access to the relevant project for Object Storage access
15+
- Get the access key and secret key for this API key
16+
- Get your project ID
17+
- Create an S3 bucket
1918

20-
## Running
19+
You then need to set the following environment variables:
2120

22-
### Running locally
21+
```bash
22+
export SCW_ACCESS_KEY=<your access key>
23+
export SCW_SECRET_KEY=<your secret key>
24+
export SCW_DEFAULT_PROJECT_ID=<your project id>
25+
export BUCKET_NAME=<bucket name>
26+
```
2327

24-
This examples uses [Serverless Functions Python Framework](https://github.com/scaleway/serverless-functions-python) and can be executed locally:
28+
## Deploy on Scaleway
29+
30+
Deployment can be done with `scw_serverless`:
2531

2632
```bash
27-
pip install -r requirements-dev.txtbash
28-
python app.py
33+
pip install --user -r requirements.txt
34+
35+
scw-serverless deploy app.py
2936
```
3037

31-
The upload endpoint allows you to upload files to Glacier via the `file` form-data key:
38+
This will then print out your function's URL. You can use this to test your function with:
3239

3340
```bash
34-
echo -e 'Hello world!\n My contents will be stored in a bunker!' > myfile.dat
35-
curl -F file=@myfile.dat localhost:8080
41+
# Upload the requirements file
42+
curl -F file=@requirements.txt <your function URL>
3643
```
3744

38-
### Deploying with the API Framework
45+
You should then see the `requirements.txt` file uploaded to your bucket.
3946

40-
Deployment can be done with `scw_serverless`:
47+
_Warning_ when deploying the function, do not create a virtual environment directory in this project root, as this will be included in the deployment zip and make it too large.
48+
49+
## Running it locally
50+
51+
You can test your function locally thanks to the [Serverless Functions Python Framework](https://github.com/scaleway/serverless-functions-python). To do this, you can run:
52+
53+
```bash
54+
pip install --user -r requirements-dev.txt
55+
56+
python app.py
57+
```
58+
59+
This starts the function locally, allowing you to upload files to S3 via the `file` form-data key:
4160

4261
```bash
43-
scw_serverless deploy app.py
62+
# Upload the requirements file
63+
curl -F [email protected] localhost:8080
4464
```
65+

functions/python-upload-file-s3-multipart/app.py

+17-23
Original file line numberDiff line numberDiff line change
@@ -1,23 +1,19 @@
1-
from typing import TYPE_CHECKING
21
import logging
32
import os
43

54
from scw_serverless import Serverless
6-
if TYPE_CHECKING:
7-
from scaleway_functions_python.framework.v1.hints import Context, Event, Response
85

96
import boto3
107
from streaming_form_data import StreamingFormDataParser
118
from streaming_form_data.targets import ValueTarget
129

10+
REGION = "fr-par"
11+
S3_URL = "https://s3.fr-par.scw.cloud"
12+
1313
SCW_ACCESS_KEY = os.environ["SCW_ACCESS_KEY"]
1414
SCW_SECRET_KEY = os.environ["SCW_SECRET_KEY"]
1515
BUCKET_NAME = os.environ["BUCKET_NAME"]
1616

17-
# Files will be uploaded to cold storage
18-
# See: https://www.scaleway.com/en/glacier-cold-storage/
19-
STORAGE_CLASS = "GLACIER"
20-
2117
app = Serverless(
2218
"s3-utilities",
2319
secret={
@@ -30,23 +26,21 @@
3026
},
3127
)
3228

33-
s3 = boto3.resource(
34-
"s3",
35-
region_name="fr-par",
36-
use_ssl=True,
37-
endpoint_url="https://s3.fr-par.scw.cloud",
38-
aws_access_key_id=SCW_ACCESS_KEY,
39-
aws_secret_access_key=SCW_SECRET_KEY,
40-
)
41-
42-
bucket = s3.Bucket(BUCKET_NAME)
43-
4429
logging.basicConfig(level=logging.INFO)
4530

4631

47-
@app.func()
48-
def upload(event: "Event", _context: "Context") -> "Response":
49-
"""Upload form data to S3 Glacier."""
32+
@app.func(memory_limit=512)
33+
def upload(event, _context):
34+
"""Upload form data to S3"""
35+
36+
s3 = boto3.client(
37+
"s3",
38+
region_name=REGION,
39+
use_ssl=True,
40+
endpoint_url=S3_URL,
41+
aws_access_key_id=SCW_ACCESS_KEY,
42+
aws_secret_access_key=SCW_SECRET_KEY,
43+
)
5044

5145
headers = event["headers"]
5246
parser = StreamingFormDataParser(headers=headers)
@@ -63,8 +57,8 @@ def upload(event: "Event", _context: "Context") -> "Response":
6357

6458
name = target.multipart_filename
6559

66-
logging.info("Uploading file %s to Glacier on %s", name, bucket.name)
67-
bucket.put_object(Key=name, Body=target.value, StorageClass=STORAGE_CLASS)
60+
logging.info(f"Uploading to {BUCKET_NAME}/{name}")
61+
s3.put_object(Bucket=BUCKET_NAME, Key=name, Body=target.value)
6862

6963
return {"statusCode": 200, "body": f"Successfully uploaded {name} to bucket!"}
7064

0 commit comments

Comments
 (0)