forked from pingcap/autoflow
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy path.env.example
31 lines (27 loc) · 1.43 KB
/
.env.example
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
ENVIRONMENT=production
# You can generate a new secret key by running the following command
# $ python3 -c "import secrets; print(secrets.token_urlsafe(32))"
# SECRET_KEY is very important, please do not share it with others,
# SECRET_KEY must greater or equal to 32 characters.
SECRET_KEY=
# Replace with your own sentry dsn, leave it commented if you don't want to use sentry
# SENTRY_DSN=https://[email protected]/xxxxxx
# Replace with your own TiDB cluster connection information,
# TiDB Serverless is recommended. You can quickly create one from https://tidbcloud.com/
TIDB_HOST=xxxxx.prod.aws.tidbcloud.com
TIDB_USER=
TIDB_PASSWORD=
TIDB_DATABASE=
TIDB_SSL=true
# CAUTION: Do not change EMBEDDING_DIMS after initializing the database.
# Changing the embedding dimensions requires recreating the database and tables.
# The default EMBEDDING_DIMS and EMBEDDING_MAX_TOKENS are set for the OpenAI text-embedding-3-small model.
# If using a different embedding model, adjust these values according to the model's specifications.
# For example:
# openai/text-embedding-3-small: EMBEDDING_DIMS=1536, EMBEDDING_MAX_TOKENS=8191
# maidalun1020/bce-embedding-base_v1: EMBEDDING_DIMS=768, EMBEDDING_MAX_TOKENS=512
# BAAI/bge-m3: EMBEDDING_DIMS=1024, EMBEDDING_MAX_TOKENS=8192
EMBEDDING_DIMS=1536
# EMBEDDING_MAX_TOKENS should be equal or smaller than the embedding model's max tokens,
# it indicates the max size of document chunks.
EMBEDDING_MAX_TOKENS=8191