Commit f425942d authored by Wade's avatar Wade

milvus and graph volume

parent 2c60b355
MILVUS_DOCKER_VOLUME_DIRECTORY=./volume/milvus/
GRAP_DATA_VOLUME_DIRECTORY=./volume/graph/
\ No newline at end of file
This diff is collapsed.
[system]
# Load language from environment variable(It is set by the hook)
language = "${env:DBGPT_LANG:-zh}"
log_level = "INFO"
api_keys = []
encrypt_key = "your_secret_key"
# Server Configurations
[service.web]
host = "0.0.0.0"
port = 5670
[service.web.database]
type = "sqlite"
path = "pilot/meta_data/dbgpt.db"
[app]
temperature = 0.6
[[app.configs]]
name = "chat_excel"
temperature = 0.1
duckdb_extensions_dir = []
force_install = true
[[app.configs]]
name = "chat_normal"
memory = {type="token", max_token_limit=20000}
[[app.configs]]
name = "chat_with_db_qa"
schema_retrieve_top_k = 50
memory = {type="token", max_token_limit=20000}
# Model Configurations
[models]
[[models.llms]]
name = "${env:LLM_MODEL_NAME:-gpt-4o}"
provider = "${env:LLM_MODEL_PROVIDER:-proxy/openai}"
api_base = "${env:OPENAI_API_BASE:-https://api.openai.com/v1}"
api_key = "${env:OPENAI_API_KEY}"
[[models.embeddings]]
name = "${env:EMBEDDING_MODEL_NAME:-text-embedding-3-small}"
provider = "${env:EMBEDDING_MODEL_PROVIDER:-proxy/openai}"
api_url = "${env:EMBEDDING_MODEL_API_URL:-https://api.openai.com/v1/embeddings}"
api_key = "${env:OPENAI_API_KEY}"
[system]
# Load language from environment variable(It is set by the hook)
language = "${env:DBGPT_LANG:-zh}"
log_level = "INFO"
api_keys = []
encrypt_key = "your_secret_key"
# Server Configurations
[service.web]
host = "0.0.0.0"
port = 5670
[service.web.database]
type = "sqlite"
path = "pilot/meta_data/dbgpt.db"
[rag]
chunk_size=1000
chunk_overlap=100
similarity_top_k=5
similarity_score_threshold=0.0
max_chunks_once_load=10
max_threads=1
rerank_top_k=3
[rag.storage]
[rag.storage.vector]
type = "chroma"
persist_path = "pilot/data"
[rag.storage.full_text]
type = "elasticsearch"
host="127.0.0.1"
port=9200
# Model Configurations
[models]
[[models.llms]]
name = "${env:LLM_MODEL_NAME:-gpt-4o}"
provider = "${env:LLM_MODEL_PROVIDER:-proxy/openai}"
api_base = "${env:OPENAI_API_BASE:-https://api.openai.com/v1}"
api_key = "${env:OPENAI_API_KEY}"
[[models.embeddings]]
name = "${env:EMBEDDING_MODEL_NAME:-text-embedding-3-small}"
provider = "${env:EMBEDDING_MODEL_PROVIDER:-proxy/openai}"
api_url = "${env:EMBEDDING_MODEL_API_URL:-https://api.openai.com/v1/embeddings}"
api_key = "${env:OPENAI_API_KEY}"
[system]
# Load language from environment variable(It is set by the hook)
language = "${env:DBGPT_LANG:-zh}"
log_level = "INFO"
api_keys = []
encrypt_key = "your_secret_key"
# Server Configurations
[service.web]
host = "0.0.0.0"
port = 5670
[service.web.database]
type = "sqlite"
path = "pilot/meta_data/dbgpt.db"
[[serves]]
type = "file"
# Default backend for file server
default_backend = "s3"
[[serves.backends]]
type = "oss"
endpoint = "https://oss-cn-beijing.aliyuncs.com"
region = "oss-cn-beijing"
access_key_id = "${env:OSS_ACCESS_KEY_ID}"
access_key_secret = "${env:OSS_ACCESS_KEY_SECRET}"
fixed_bucket = "{your_bucket_name}"
[[serves.backends]]
# Use Tencent COS s3 compatible API as the file server
type = "s3"
endpoint = "https://cos.ap-beijing.myqcloud.com"
region = "ap-beijing"
access_key_id = "${env:COS_SECRETID}"
access_key_secret = "${env:COS_SECRETKEY}"
fixed_bucket = "{your_bucket_name}"
# Model Configurations
[models]
[[models.llms]]
name = "${env:LLM_MODEL_NAME:-gpt-4o}"
provider = "${env:LLM_MODEL_PROVIDER:-proxy/openai}"
api_base = "${env:OPENAI_API_BASE:-https://api.openai.com/v1}"
api_key = "${env:OPENAI_API_KEY}"
[[models.embeddings]]
name = "${env:EMBEDDING_MODEL_NAME:-text-embedding-3-small}"
provider = "${env:EMBEDDING_MODEL_PROVIDER:-proxy/openai}"
api_url = "${env:EMBEDDING_MODEL_API_URL:-https://api.openai.com/v1/embeddings}"
api_key = "${env:OPENAI_API_KEY}"
[system]
# Load language from environment variable(It is set by the hook)
language = "${env:DBGPT_LANG:-zh}"
log_level = "INFO"
api_keys = []
encrypt_key = "your_secret_key"
# Server Configurations
[service.web]
host = "0.0.0.0"
port = 5670
[service.web.database]
type = "sqlite"
path = "pilot/meta_data/dbgpt.db"
[rag]
chunk_size=1000
chunk_overlap=0
similarity_top_k=5
similarity_score_threshold=0.0
max_chunks_once_load=10
max_threads=1
rerank_top_k=3
[rag.storage]
[rag.storage.vector]
type = "chroma"
persist_path = "pilot/data"
[rag.storage.graph]
type = "tugraph"
host="127.0.0.1"
port=7687
username="admin"
password="73@TuGraph"
# enable_summary="True"
# community_topk=20
# community_score_threshold=0.3
# triplet_graph_enabled="True"
# extract_topk=20
# document_graph_enabled="True"
# knowledge_graph_chunk_search_top_size=20
# knowledge_graph_extraction_batch_size=20
# enable_similarity_search="True"
# knowledge_graph_embedding_batch_size=20
# similarity_search_topk=5
# extract_score_threshold=0.7
# enable_text_search="True"
# text2gql_model_enabled="True"
# text2gql_model_name="qwen2.5:latest"
# Model Configurations
[models]
[[models.llms]]
name = "${env:LLM_MODEL_NAME:-gpt-4o}"
provider = "${env:LLM_MODEL_PROVIDER:-proxy/openai}"
api_base = "${env:OPENAI_API_BASE:-https://api.openai.com/v1}"
api_key = "${env:OPENAI_API_KEY}"
[[models.embeddings]]
name = "${env:EMBEDDING_MODEL_NAME:-text-embedding-3-small}"
provider = "${env:EMBEDDING_MODEL_PROVIDER:-proxy/openai}"
api_url = "${env:EMBEDDING_MODEL_API_URL:-https://api.openai.com/v1/embeddings}"
api_key = "${env:OPENAI_API_KEY}"
[system]
# Load language from environment variable(It is set by the hook)
language = "${env:DBGPT_LANG:-zh}"
api_keys = []
encrypt_key = "your_secret_key"
# Server Configurations
[service.web]
host = "0.0.0.0"
port = 5670
[service.web.database]
type = "sqlite"
path = "pilot/meta_data/dbgpt.db"
[rag.storage]
[rag.storage.vector]
type = "chroma"
persist_path = "pilot/data"
# Model Configurations
[models]
[[models.llms]]
name = "THUDM/glm-4-9b-chat-hf"
provider = "hf"
# If not provided, the model will be downloaded from the Hugging Face model hub
# uncomment the following line to specify the model path in the local file system
# path = "the-model-path-in-the-local-file-system"
path = "models/THUDM/glm-4-9b-chat-hf"
[[models.embeddings]]
name = "BAAI/bge-large-zh-v1.5"
provider = "hf"
# If not provided, the model will be downloaded from the Hugging Face model hub
# uncomment the following line to specify the model path in the local file system
# path = "the-model-path-in-the-local-file-system"
path = "models/BAAI/bge-large-zh-v1.5"
[system]
# Load language from environment variable(It is set by the hook)
language = "${env:DBGPT_LANG:-zh}"
api_keys = []
encrypt_key = "your_secret_key"
# Server Configurations
[service.web]
host = "0.0.0.0"
port = 5670
[service.web.database]
type = "sqlite"
path = "pilot/meta_data/dbgpt.db"
[rag.storage]
[rag.storage.vector]
type = "chroma"
persist_path = "pilot/data"
# Model Configurations
[models]
[[models.llms]]
name = "DeepSeek-R1-Distill-Qwen-1.5B"
provider = "llama.cpp.server"
# If not provided, the model will be downloaded from the Hugging Face model hub
# uncomment the following line to specify the model path in the local file system
# https://huggingface.co/bartowski/DeepSeek-R1-Distill-Qwen-1.5B-GGUF
# path = "the-model-path-in-the-local-file-system"
path = "models/DeepSeek-R1-Distill-Qwen-1.5B-Q4_K_M.gguf"
[[models.embeddings]]
name = "BAAI/bge-large-zh-v1.5"
provider = "hf"
# If not provided, the model will be downloaded from the Hugging Face model hub
# uncomment the following line to specify the model path in the local file system
# path = "the-model-path-in-the-local-file-system"
path = "models/BAAI/bge-large-zh-v1.5"
[system]
# Load language from environment variable(It is set by the hook)
language = "${env:DBGPT_LANG:-zh}"
api_keys = []
encrypt_key = "your_secret_key"
# Server Configurations
[service.web]
host = "0.0.0.0"
port = 5670
[service.web.database]
type = "sqlite"
path = "pilot/meta_data/dbgpt.db"
[rag.storage]
[rag.storage.vector]
type = "chroma"
persist_path = "pilot/data"
# Model Configurations
[models]
[[models.llms]]
name = "DeepSeek-R1-Distill-Qwen-1.5B"
provider = "llama.cpp"
# If not provided, the model will be downloaded from the Hugging Face model hub
# uncomment the following line to specify the model path in the local file system
# https://huggingface.co/bartowski/DeepSeek-R1-Distill-Qwen-1.5B-GGUF
# path = "the-model-path-in-the-local-file-system"
path = "models/DeepSeek-R1-Distill-Qwen-1.5B-Q4_K_M.gguf"
[[models.embeddings]]
name = "BAAI/bge-large-zh-v1.5"
provider = "hf"
# If not provided, the model will be downloaded from the Hugging Face model hub
# uncomment the following line to specify the model path in the local file system
# path = "the-model-path-in-the-local-file-system"
path = "models/BAAI/bge-large-zh-v1.5"
[system]
# Load language from environment variable(It is set by the hook)
language = "${env:DBGPT_LANG:-zh}"
api_keys = []
encrypt_key = "your_secret_key"
# Server Configurations
[service.web]
host = "0.0.0.0"
port = 5670
[service.web.database]
type = "sqlite"
path = "pilot/meta_data/dbgpt.db"
[rag.storage]
[rag.storage.vector]
type = "chroma"
persist_path = "pilot/data"
# Model Configurations
[models]
[[models.llms]]
name = "Qwen2.5-Coder-0.5B-Instruct"
provider = "hf"
# If not provided, the model will be downloaded from the Hugging Face model hub
# uncomment the following line to specify the model path in the local file system
# path = "the-model-path-in-the-local-file-system"
path = "models/Qwen2.5-Coder-0.5B-Instruct"
[[models.embeddings]]
name = "BAAI/bge-large-zh-v1.5"
provider = "hf"
# If not provided, the model will be downloaded from the Hugging Face model hub
# uncomment the following line to specify the model path in the local file system
# path = "the-model-path-in-the-local-file-system"
path = "models/BAAI/bge-large-zh-v1.5"
[system]
# Load language from environment variable(It is set by the hook)
language = "${env:DBGPT_LANG:-zh}"
api_keys = []
encrypt_key = "your_secret_key"
# Server Configurations
[service.web]
host = "0.0.0.0"
port = 5670
[service.web.database]
type = "sqlite"
path = "pilot/meta_data/dbgpt.db"
[rag.storage]
[rag.storage.vector]
type = "chroma"
persist_path = "pilot/data"
# Model Configurations
[models]
[[models.llms]]
name = "Qwen/Qwen3-14B"
provider = "hf"
# If not provided, the model will be downloaded from the Hugging Face model hub
# uncomment the following line to specify the model path in the local file system
# path = "the-model-path-in-the-local-file-system"
# Force the model to be used in non-thinking mode, set to false
# reasoning_model = false
[[models.embeddings]]
name = "BAAI/bge-large-zh-v1.5"
provider = "hf"
# If not provided, the model will be downloaded from the Hugging Face model hub
# uncomment the following line to specify the model path in the local file system
# path = "the-model-path-in-the-local-file-system"
[system]
# Load language from environment variable(It is set by the hook)
language = "${env:DBGPT_LANG:-zh}"
api_keys = []
encrypt_key = "your_secret_key"
# Server Configurations
[service.web]
host = "0.0.0.0"
port = 5670
[service.web.database]
type = "sqlite"
path = "pilot/meta_data/dbgpt.db"
[rag.storage]
[rag.storage.vector]
type = "chroma"
persist_path = "pilot/data"
# Model Configurations
[models]
[[models.llms]]
name = "DeepSeek-R1-Distill-Qwen-1.5B"
provider = "vllm"
# If not provided, the model will be downloaded from the Hugging Face model hub
# uncomment the following line to specify the model path in the local file system
# path = "the-model-path-in-the-local-file-system"
path = "models/DeepSeek-R1-Distill-Qwen-1.5B"
# dtype = "float32"
[[models.embeddings]]
name = "BAAI/bge-large-zh-v1.5"
provider = "hf"
# If not provided, the model will be downloaded from the Hugging Face model hub
# uncomment the following line to specify the model path in the local file system
# path = "the-model-path-in-the-local-file-system"
path = "/data/models/bge-large-zh-v1.5"
[system]
# Load language from environment variable(It is set by the hook)
language = "${env:DBGPT_LANG:-zh}"
api_keys = []
encrypt_key = "your_secret_key"
# Server Configurations
[service.web]
host = "0.0.0.0"
port = 5670
[service.web.database]
type = "sqlite"
path = "pilot/meta_data/dbgpt.db"
[service.model.worker]
host = "127.0.0.1"
[rag.storage]
[rag.storage.vector]
type = "chroma"
persist_path = "pilot/data"
# Model Configurations
[models]
[[models.llms]]
name = "deepseek-reasoner"
# name = "deepseek-chat"
provider = "proxy/deepseek"
api_key = "your_deepseek_api_key"
[[models.embeddings]]
name = "BAAI/bge-large-zh-v1.5"
provider = "hf"
# If not provided, the model will be downloaded from the Hugging Face model hub
# uncomment the following line to specify the model path in the local file system
# path = "the-model-path-in-the-local-file-system"
path = "models/bge-large-zh-v1.5"
[system]
# Load language from environment variable(It is set by the hook)
language = "${env:DBGPT_LANG:-zh}"
api_keys = []
encrypt_key = "your_secret_key"
# Server Configurations
[service.web]
host = "0.0.0.0"
port = 5670
[service.web.database]
type = "sqlite"
path = "pilot/meta_data/dbgpt.db"
[service.model.worker]
host = "127.0.0.1"
[rag.storage]
[rag.storage.vector]
type = "chroma"
persist_path = "pilot/data"
# Model Configurations
[models]
[[models.llms]]
name = "deepseek-v3"
provider = "proxy/infiniai"
api_key = "${env:INFINIAI_API_KEY}"
[[models.embeddings]]
name = "bge-m3"
provider = "proxy/openai"
api_url = "https://cloud.infini-ai.com/maas/v1/embeddings"
api_key = "${env:INFINIAI_API_KEY}"
[[models.rerankers]]
type = "reranker"
name = "bge-reranker-v2-m3"
provider = "proxy/infiniai"
api_key = "${env:INFINIAI_API_KEY}"
\ No newline at end of file
[system]
# Load language from environment variable(It is set by the hook)
language = "${env:DBGPT_LANG:-en}"
api_keys = []
encrypt_key = "your_secret_key"
# Server Configurations
[service.web]
host = "0.0.0.0"
port = 5670
[service.web.database]
type = "sqlite"
path = "pilot/meta_data/dbgpt.db"
[rag.storage]
[rag.storage.vector]
type = "chroma"
persist_path = "pilot/data"
# Model Configurations
[models]
[[models.llms]]
name = "deepseek-r1:1.5b"
provider = "proxy/ollama"
api_base = "http://localhost:11434"
api_key = ""
[[models.embeddings]]
name = "bge-m3:latest"
provider = "proxy/ollama"
api_url = "http://localhost:11434"
api_key = ""
[system]
# Load language from environment variable(It is set by the hook)
language = "${env:DBGPT_LANG:-en}"
api_keys = []
encrypt_key = "your_secret_key"
# Server Configurations
[service.web]
host = "0.0.0.0"
port = 5670
[service.web.database]
type = "sqlite"
path = "pilot/meta_data/dbgpt.db"
[rag.storage]
[rag.storage.vector]
type = "chroma"
persist_path = "pilot/data"
# Model Configurations
[models]
[[models.llms]]
name = "${env:LLM_MODEL_NAME:-gpt-4o}"
provider = "${env:LLM_MODEL_PROVIDER:-proxy/openai}"
api_base = "${env:OPENAI_API_BASE:-https://api.openai.com/v1}"
api_key = "${env:OPENAI_API_KEY}"
[[models.embeddings]]
name = "${env:EMBEDDING_MODEL_NAME:-text-embedding-3-small}"
provider = "${env:EMBEDDING_MODEL_PROVIDER:-proxy/openai}"
api_url = "${env:EMBEDDING_MODEL_API_URL:-https://api.openai.com/v1/embeddings}"
api_key = "${env:OPENAI_API_KEY}"
[system]
# Load language from environment variable(It is set by the hook)
language = "${env:DBGPT_LANG:-zh}"
api_keys = []
encrypt_key = "your_secret_key"
# Server Configurations
[service.web]
host = "0.0.0.0"
port = 5670
[service.web.database]
type = "mysql"
host = "${env:MYSQL_HOST:-127.0.0.1}"
port = "${env:MYSQL_PORT:-3306}"
database = "${env:MYSQL_DATABASE:-dbgpt}"
user = "${env:MYSQL_USER:-root}"
password ="${env:MYSQL_PASSWORD:-aa123456}"
[service.model.worker]
host = "127.0.0.1"
[rag.storage]
[rag.storage.vector]
type = "chroma"
persist_path = "pilot/data"
# Model Configurations
[models]
[[models.llms]]
name = "Qwen/Qwen2.5-Coder-32B-Instruct"
provider = "proxy/siliconflow"
api_key = "${env:SILICONFLOW_API_KEY}"
[[models.embeddings]]
name = "BAAI/bge-m3"
provider = "proxy/siliconflow"
api_key = "${env:SILICONFLOW_API_KEY}"
[[models.rerankers]]
type = "reranker"
name = "BAAI/bge-reranker-v2-m3"
provider = "proxy/siliconflow"
api_key = "${env:SILICONFLOW_API_KEY}"
[system]
# Load language from environment variable(It is set by the hook)
language = "${env:DBGPT_LANG:-zh}"
api_keys = []
encrypt_key = "your_secret_key"
# Server Configurations
[service.web]
host = "0.0.0.0"
port = 5670
[service.web.database]
type = "sqlite"
path = "pilot/meta_data/dbgpt.db"
[service.model.worker]
host = "127.0.0.1"
[rag.storage]
[rag.storage.vector]
type = "chroma"
persist_path = "pilot/data"
# Model Configurations
[models]
[[models.llms]]
name = "Qwen/Qwen2.5-Coder-32B-Instruct"
provider = "proxy/siliconflow"
api_key = "${env:SILICONFLOW_API_KEY}"
[[models.embeddings]]
name = "BAAI/bge-large-zh-v1.5"
provider = "proxy/openai"
api_url = "https://api.siliconflow.cn/v1/embeddings"
api_key = "${env:SILICONFLOW_API_KEY}"
[[models.rerankers]]
type = "reranker"
name = "BAAI/bge-reranker-v2-m3"
provider = "proxy/siliconflow"
api_key = "${env:SILICONFLOW_API_KEY}"
[system]
# Load language from environment variable(It is set by the hook)
language = "${env:DBGPT_LANG:-en}"
api_keys = []
encrypt_key = "your_secret_key"
# Server Configurations
[service.web]
host = "0.0.0.0"
port = 5670
[service.web.database]
type = "sqlite"
path = "pilot/meta_data/dbgpt.db"
[rag.storage]
[rag.storage.vector]
type = "chroma"
persist_path = "pilot/data"
# Model Configurations
[models]
[[models.llms]]
name = "qwen-plus"
provider = "${env:LLM_MODEL_PROVIDER:proxy/tongyi}"
api_base = "https://dashscope.aliyuncs.com/compatible-mode/v1"
api_key = "${env:DASHSCOPE_API_KEY}"
[[models.embeddings]]
name = "text-embedding-v3"
provider = "${env:EMBEDDING_MODEL_PROVIDER:proxy/tongyi}"
api_url = "https://dashscope.aliyuncs.com/compatible-mode/v1/embeddings"
api_key = "${env:DASHSCOPE_API_KEY}"
# For advice on how to change settings please see
# http://dev.mysql.com/doc/refman/8.0/en/server-configuration-defaults.html
[mysqld]
#
# Remove leading # and set to the amount of RAM for the most important data
# cache in MySQL. Start at 70% of total RAM for dedicated server, else 10%.
# innodb_buffer_pool_size = 128M
#
# Remove leading # to turn on a very important data integrity option: logging
# changes to the binary log between backups.
# log_bin
#
# Remove leading # to set options mainly useful for reporting servers.
# The server defaults are faster for transactions and fast SELECTs.
# Adjust sizes as needed, experiment to find the optimal values.
# join_buffer_size = 128M
# sort_buffer_size = 2M
# read_rnd_buffer_size = 2M
# Remove leading # to revert to previous value for default_authentication_plugin,
# this will increase compatibility with older clients. For background, see:
# https://dev.mysql.com/doc/refman/8.0/en/server-system-variables.html#sysvar_default_authentication_plugin
# default-authentication-plugin=mysql_native_password
skip-host-cache
skip-name-resolve
datadir=/var/lib/mysql
socket=/var/lib/mysql/mysql.sock
secure-file-priv=/var/lib/mysql-files
user=mysql
pid-file=/var/run/mysqld/mysqld.pid
# add example config
default-authentication-plugin=mysql_native_password
character_set_server=utf8mb4
collation-server=utf8mb4_unicode_ci
init_connect='SET NAMES utf8mb4'
[mysql]
default-character-set=utf8mb4
[client]
default-character-set=utf8mb4
\ No newline at end of file
create database case_1_student_manager character set utf8;
use case_1_student_manager;
CREATE TABLE students (
student_id INT PRIMARY KEY,
student_name VARCHAR(100) COMMENT '学生姓名',
major VARCHAR(100) COMMENT '专业',
year_of_enrollment INT COMMENT '入学年份',
student_age INT COMMENT '学生年龄'
) COMMENT '学生信息表';
CREATE TABLE courses (
course_id INT PRIMARY KEY,
course_name VARCHAR(100) COMMENT '课程名称',
credit FLOAT COMMENT '学分'
) COMMENT '课程信息表';
CREATE TABLE scores (
student_id INT,
course_id INT,
score INT COMMENT '得分',
semester VARCHAR(50) COMMENT '学期',
PRIMARY KEY (student_id, course_id),
FOREIGN KEY (student_id) REFERENCES students(student_id),
FOREIGN KEY (course_id) REFERENCES courses(course_id)
) COMMENT '学生成绩表';
INSERT INTO students (student_id, student_name, major, year_of_enrollment, student_age) VALUES
(1, '张三', '计算机科学', 2020, 20),
(2, '李四', '计算机科学', 2021, 19),
(3, '王五', '物理学', 2020, 21),
(4, '赵六', '数学', 2021, 19),
(5, '周七', '计算机科学', 2022, 18),
(6, '吴八', '物理学', 2020, 21),
(7, '郑九', '数学', 2021, 19),
(8, '孙十', '计算机科学', 2022, 18),
(9, '刘十一', '物理学', 2020, 21),
(10, '陈十二', '数学', 2021, 19);
INSERT INTO courses (course_id, course_name, credit) VALUES
(1, '计算机基础', 3),
(2, '数据结构', 4),
(3, '高等物理', 3),
(4, '线性代数', 4),
(5, '微积分', 5),
(6, '编程语言', 4),
(7, '量子力学', 3),
(8, '概率论', 4),
(9, '数据库系统', 4),
(10, '计算机网络', 4);
INSERT INTO scores (student_id, course_id, score, semester) VALUES
(1, 1, 90, '2020年秋季'),
(1, 2, 85, '2021年春季'),
(2, 1, 88, '2021年秋季'),
(2, 2, 90, '2022年春季'),
(3, 3, 92, '2020年秋季'),
(3, 4, 85, '2021年春季'),
(4, 3, 88, '2021年秋季'),
(4, 4, 86, '2022年春季'),
(5, 1, 90, '2022年秋季'),
(5, 2, 87, '2023年春季');
CREATE TABLE students (
student_id INTEGER PRIMARY KEY,
student_name VARCHAR(100),
major VARCHAR(100),
year_of_enrollment INTEGER,
student_age INTEGER
);
CREATE TABLE courses (
course_id INTEGER PRIMARY KEY,
course_name VARCHAR(100),
credit REAL
);
CREATE TABLE scores (
student_id INTEGER,
course_id INTEGER,
score INTEGER,
semester VARCHAR(50),
PRIMARY KEY (student_id, course_id),
FOREIGN KEY (student_id) REFERENCES students(student_id),
FOREIGN KEY (course_id) REFERENCES courses(course_id)
);
INSERT INTO students (student_id, student_name, major, year_of_enrollment, student_age) VALUES
(1, '张三', '计算机科学', 2020, 20),
(2, '李四', '计算机科学', 2021, 19),
(3, '王五', '物理学', 2020, 21),
(4, '赵六', '数学', 2021, 19),
(5, '周七', '计算机科学', 2022, 18),
(6, '吴八', '物理学', 2020, 21),
(7, '郑九', '数学', 2021, 19),
(8, '孙十', '计算机科学', 2022, 18),
(9, '刘十一', '物理学', 2020, 21),
(10, '陈十二', '数学', 2021, 19);
INSERT INTO courses (course_id, course_name, credit) VALUES
(1, '计算机基础', 3),
(2, '数据结构', 4),
(3, '高等物理', 3),
(4, '线性代数', 4),
(5, '微积分', 5),
(6, '编程语言', 4),
(7, '量子力学', 3),
(8, '概率论', 4),
(9, '数据库系统', 4),
(10, '计算机网络', 4);
INSERT INTO scores (student_id, course_id, score, semester) VALUES
(1, 1, 90, '2020年秋季'),
(1, 2, 85, '2021年春季'),
(2, 1, 88, '2021年秋季'),
(2, 2, 90, '2022年春季'),
(3, 3, 92, '2020年秋季'),
(3, 4, 85, '2021年春季'),
(4, 3, 88, '2021年秋季'),
(4, 4, 86, '2022年春季'),
(5, 1, 90, '2022年秋季'),
(5, 2, 87, '2023年春季');
CREATE SCHEMA case_1_student_manager;
COMMENT ON SCHEMA case_1_student_manager is '学校管理系统';
SET SEARCH_PATH = case_1_student_manager;
CREATE TABLE students (
student_id INT PRIMARY KEY,
student_name VARCHAR(100),
major VARCHAR(100),
year_of_enrollment INT,
student_age INT
);
COMMENT ON TABLE students IS '学生信息表';
COMMENT ON COLUMN students.student_name IS '学生姓名';
COMMENT ON COLUMN students.major IS '专业';
COMMENT ON COLUMN students.year_of_enrollment IS '入学年份';
COMMENT ON COLUMN students.student_age IS '学生年龄';
CREATE TABLE courses (
course_id INT PRIMARY KEY,
course_name VARCHAR(100),
credit FLOAT
);
COMMENT ON TABLE courses IS '课程信息表';
COMMENT ON COLUMN courses.course_name IS '课程名称';
COMMENT ON COLUMN courses.credit IS '学分';
CREATE TABLE scores (
student_id INT,
course_id INT,
score INT,
semester VARCHAR(50),
PRIMARY KEY (student_id, course_id),
FOREIGN KEY (student_id) REFERENCES students(student_id),
FOREIGN KEY (course_id) REFERENCES courses(course_id)
);
COMMENT ON TABLE scores IS '学生成绩表';
COMMENT ON COLUMN scores.score IS '得分';
COMMENT ON COLUMN scores.semester IS '学期';
INSERT INTO students (student_id, student_name, major, year_of_enrollment, student_age) VALUES
(1, '张三', '计算机科学', 2020, 20),
(2, '李四', '计算机科学', 2021, 19),
(3, '王五', '物理学', 2020, 21),
(4, '赵六', '数学', 2021, 19),
(5, '周七', '计算机科学', 2022, 18),
(6, '吴八', '物理学', 2020, 21),
(7, '郑九', '数学', 2021, 19),
(8, '孙十', '计算机科学', 2022, 18),
(9, '刘十一', '物理学', 2020, 21),
(10, '陈十二', '数学', 2021, 19);
INSERT INTO courses (course_id, course_name, credit) VALUES
(1, '计算机基础', 3),
(2, '数据结构', 4),
(3, '高等物理', 3),
(4, '线性代数', 4),
(5, '微积分', 5),
(6, '编程语言', 4),
(7, '量子力学', 3),
(8, '概率论', 4),
(9, '数据库系统', 4),
(10, '计算机网络', 4);
INSERT INTO scores (student_id, course_id, score, semester) VALUES
(1, 1, 90, '2020年秋季'),
(1, 2, 85, '2021年春季'),
(2, 1, 88, '2021年秋季'),
(2, 2, 90, '2022年春季'),
(3, 3, 92, '2020年秋季'),
(3, 4, 85, '2021年春季'),
(4, 3, 88, '2021年秋季'),
(4, 4, 86, '2022年春季'),
(5, 1, 90, '2022年秋季'),
(5, 2, 87, '2023年春季');
COMMIT;
create database case_2_ecom character set utf8;
use case_2_ecom;
CREATE TABLE users (
user_id INT PRIMARY KEY,
user_name VARCHAR(100) COMMENT '用户名',
user_email VARCHAR(100) COMMENT '用户邮箱',
registration_date DATE COMMENT '注册日期',
user_country VARCHAR(100) COMMENT '用户国家'
) COMMENT '用户信息表';
CREATE TABLE products (
product_id INT PRIMARY KEY,
product_name VARCHAR(100) COMMENT '商品名称',
product_price FLOAT COMMENT '商品价格'
) COMMENT '商品信息表';
CREATE TABLE orders (
order_id INT PRIMARY KEY,
user_id INT,
product_id INT,
quantity INT COMMENT '数量',
order_date DATE COMMENT '订单日期',
FOREIGN KEY (user_id) REFERENCES users(user_id),
FOREIGN KEY (product_id) REFERENCES products(product_id)
) COMMENT '订单信息表';
INSERT INTO users (user_id, user_name, user_email, registration_date, user_country) VALUES
(1, 'John', 'john@gmail.com', '2020-01-01', 'USA'),
(2, 'Mary', 'mary@gmail.com', '2021-01-01', 'UK'),
(3, 'Bob', 'bob@gmail.com', '2020-01-01', 'USA'),
(4, 'Alice', 'alice@gmail.com', '2021-01-01', 'UK'),
(5, 'Charlie', 'charlie@gmail.com', '2020-01-01', 'USA'),
(6, 'David', 'david@gmail.com', '2021-01-01', 'UK'),
(7, 'Eve', 'eve@gmail.com', '2020-01-01', 'USA'),
(8, 'Frank', 'frank@gmail.com', '2021-01-01', 'UK'),
(9, 'Grace', 'grace@gmail.com', '2020-01-01', 'USA'),
(10, 'Helen', 'helen@gmail.com', '2021-01-01', 'UK');
INSERT INTO products (product_id, product_name, product_price) VALUES
(1, 'iPhone', 699),
(2, 'Samsung Galaxy', 599),
(3, 'iPad', 329),
(4, 'Macbook', 1299),
(5, 'Apple Watch', 399),
(6, 'AirPods', 159),
(7, 'Echo', 99),
(8, 'Kindle', 89),
(9, 'Fire TV Stick', 39),
(10, 'Echo Dot', 49);
INSERT INTO orders (order_id, user_id, product_id, quantity, order_date) VALUES
(1, 1, 1, 1, '2022-01-01'),
(2, 1, 2, 1, '2022-02-01'),
(3, 2, 3, 2, '2022-03-01'),
(4, 2, 4, 1, '2022-04-01'),
(5, 3, 5, 2, '2022-05-01'),
(6, 3, 6, 3, '2022-06-01'),
(7, 4, 7, 2, '2022-07-01'),
(8, 4, 8, 1, '2022-08-01'),
(9, 5, 9, 2, '2022-09-01'),
(10, 5, 10, 3, '2022-10-01');
CREATE TABLE users (
user_id INTEGER PRIMARY KEY,
user_name VARCHAR(100),
user_email VARCHAR(100),
registration_date DATE,
user_country VARCHAR(100)
);
CREATE TABLE products (
product_id INTEGER PRIMARY KEY,
product_name VARCHAR(100),
product_price REAL
);
CREATE TABLE orders (
order_id INTEGER PRIMARY KEY,
user_id INTEGER,
product_id INTEGER,
quantity INTEGER,
order_date DATE,
FOREIGN KEY (user_id) REFERENCES users(user_id),
FOREIGN KEY (product_id) REFERENCES products(product_id)
);
INSERT INTO users (user_id, user_name, user_email, registration_date, user_country) VALUES
(1, 'John', 'john@gmail.com', '2020-01-01', 'USA'),
(2, 'Mary', 'mary@gmail.com', '2021-01-01', 'UK'),
(3, 'Bob', 'bob@gmail.com', '2020-01-01', 'USA'),
(4, 'Alice', 'alice@gmail.com', '2021-01-01', 'UK'),
(5, 'Charlie', 'charlie@gmail.com', '2020-01-01', 'USA'),
(6, 'David', 'david@gmail.com', '2021-01-01', 'UK'),
(7, 'Eve', 'eve@gmail.com', '2020-01-01', 'USA'),
(8, 'Frank', 'frank@gmail.com', '2021-01-01', 'UK'),
(9, 'Grace', 'grace@gmail.com', '2020-01-01', 'USA'),
(10, 'Helen', 'helen@gmail.com', '2021-01-01', 'UK');
INSERT INTO products (product_id, product_name, product_price) VALUES
(1, 'iPhone', 699),
(2, 'Samsung Galaxy', 599),
(3, 'iPad', 329),
(4, 'Macbook', 1299),
(5, 'Apple Watch', 399),
(6, 'AirPods', 159),
(7, 'Echo', 99),
(8, 'Kindle', 89),
(9, 'Fire TV Stick', 39),
(10, 'Echo Dot', 49);
INSERT INTO orders (order_id, user_id, product_id, quantity, order_date) VALUES
(1, 1, 1, 1, '2022-01-01'),
(2, 1, 2, 1, '2022-02-01'),
(3, 2, 3, 2, '2022-03-01'),
(4, 2, 4, 1, '2022-04-01'),
(5, 3, 5, 2, '2022-05-01'),
(6, 3, 6, 3, '2022-06-01'),
(7, 4, 7, 2, '2022-07-01'),
(8, 4, 8, 1, '2022-08-01'),
(9, 5, 9, 2, '2022-09-01'),
(10, 5, 10, 3, '2022-10-01');
CREATE SCHEMA case_2_ecom;
COMMENT ON SCHEMA case_2_ecom is '电子商务系统';
SET SEARCH_PATH = case_2_ecom;
CREATE TABLE users (
user_id INT PRIMARY KEY,
user_name VARCHAR(100),
user_email VARCHAR(100),
registration_date DATE,
user_country VARCHAR(100)
);
COMMENT ON TABLE users IS '用户信息表';
COMMENT ON COLUMN users.user_name IS '用户名';
COMMENT ON COLUMN users.user_email IS '用户邮箱';
COMMENT ON COLUMN users.registration_date IS '注册日期';
COMMENT ON COLUMN users.user_country IS '用户国家';
CREATE TABLE products (
product_id INT PRIMARY KEY,
product_name VARCHAR(100),
product_price FLOAT
);
COMMENT ON TABLE products IS '商品信息表';
COMMENT ON COLUMN products.product_name IS '商品名称';
COMMENT ON COLUMN products.product_price IS '商品价格';
CREATE TABLE orders (
order_id INT PRIMARY KEY,
user_id INT,
product_id INT,
quantity INT,
order_date DATE,
FOREIGN KEY (user_id) REFERENCES users(user_id),
FOREIGN KEY (product_id) REFERENCES products(product_id)
);
COMMENT ON TABLE orders IS '订单信息表';
COMMENT ON COLUMN orders.quantity IS '数量';
COMMENT ON COLUMN orders.order_date IS '订单日期';
INSERT INTO users (user_id, user_name, user_email, registration_date, user_country) VALUES
(1, 'John', 'john@gmail.com', '2020-01-01', 'USA'),
(2, 'Mary', 'mary@gmail.com', '2021-01-01', 'UK'),
(3, 'Bob', 'bob@gmail.com', '2020-01-01', 'USA'),
(4, 'Alice', 'alice@gmail.com', '2021-01-01', 'UK'),
(5, 'Charlie', 'charlie@gmail.com', '2020-01-01', 'USA'),
(6, 'David', 'david@gmail.com', '2021-01-01', 'UK'),
(7, 'Eve', 'eve@gmail.com', '2020-01-01', 'USA'),
(8, 'Frank', 'frank@gmail.com', '2021-01-01', 'UK'),
(9, 'Grace', 'grace@gmail.com', '2020-01-01', 'USA'),
(10, 'Helen', 'helen@gmail.com', '2021-01-01', 'UK');
INSERT INTO products (product_id, product_name, product_price) VALUES
(1, 'iPhone', 699),
(2, 'Samsung Galaxy', 599),
(3, 'iPad', 329),
(4, 'Macbook', 1299),
(5, 'Apple Watch', 399),
(6, 'AirPods', 159),
(7, 'Echo', 99),
(8, 'Kindle', 89),
(9, 'Fire TV Stick', 39),
(10, 'Echo Dot', 49);
INSERT INTO orders (order_id, user_id, product_id, quantity, order_date) VALUES
(1, 1, 1, 1, '2022-01-01'),
(2, 1, 2, 1, '2022-02-01'),
(3, 2, 3, 2, '2022-03-01'),
(4, 2, 4, 1, '2022-04-01'),
(5, 3, 5, 2, '2022-05-01'),
(6, 3, 6, 3, '2022-06-01'),
(7, 4, 7, 2, '2022-07-01'),
(8, 4, 8, 1, '2022-08-01'),
(9, 5, 9, 2, '2022-09-01'),
(10, 5, 10, 3, '2022-10-01');
COMMIT;
# 测试问题
## 场景一
学校管理系统,主要测试SQL助手的联合查询,条件查询和排序功能。
我们的数据库有三个表:学生表、课程表和成绩表。我们要测试SQL助手能否处理复杂的SQL查询,包括连接多个表,按照一定的条件筛选数据,以及对结果进行排序。
### Q1
查询所有学生的姓名,专业和成绩,按成绩降序排序
SQL:
```sql
SELECT students.student_name, students.major, scores.score
FROM students
JOIN scores ON students.student_id = scores.student_id
ORDER BY scores.score DESC;
```
### Q2
查询 "计算机科学" 专业的学生的平均成绩
SQL:
```sql
SELECT AVG(scores.score) as avg_score
FROM students
JOIN scores ON students.student_id = scores.student_id
WHERE students.major = '计算机科学';
```
### Q3
查询哪些学生在 "2023年春季" 学期的课程学分总和超过2学分
```sql
SELECT students.student_name
FROM students
JOIN scores ON students.student_id = scores.student_id
JOIN courses ON scores.course_id = courses.course_id
WHERE scores.semester = '2023年春季'
GROUP BY students.student_id
HAVING SUM(courses.credit) > 2;
```
## 场景二:电商系统,主要测试SQL助手的数据聚合和分组功能。
我们的数据库有三个表:用户表、商品表和订单表。我们要测试SQL助手能否处理复杂的SQL查询,包括对数据进行聚合和分组。
### Q1
查询每个用户的总订单数量
SQL:
```sql
SELECT users.user_name, COUNT(orders.order_id) as order_count
FROM users
JOIN orders ON users.user_id = orders.user_id
GROUP BY users.user_id;
```
### Q2
查询每种商品的总销售额
```sql
SELECT products.product_name, SUM(products.product_price * orders.quantity) as total_sales
FROM products
JOIN orders ON products.product_id = orders.product_id
GROUP BY products.product_id;
```
### Q3
查询2023年最受欢迎的商品(订单数量最多的商品)
```sql
SELECT products.product_name
FROM products
JOIN orders ON products.product_id = orders.product_id
WHERE YEAR(orders.order_date) = 2023
GROUP BY products.product_id
ORDER BY COUNT(orders.order_id) DESC
LIMIT 1;
```
\ No newline at end of file
create database test_case_info character set utf8;
use test_case_info;
CREATE TABLE test_cases (
case_id INT AUTO_INCREMENT PRIMARY KEY,
scenario_name VARCHAR(100) COMMENT '场景名称',
scenario_description TEXT COMMENT '场景描述',
test_question VARCHAR(500) COMMENT '测试问题',
expected_sql TEXT COMMENT '预期SQL',
correct_output TEXT COMMENT '正确输出'
) COMMENT '测试用例表';
INSERT INTO test_cases (scenario_name, scenario_description, test_question, expected_sql, correct_output) VALUES
('学校管理系统', '测试SQL助手的联合查询,条件查询和排序功能', '查询所有学生的姓名,专业和成绩,按成绩降序排序', 'SELECT students.student_name, students.major, scores.score FROM students JOIN scores ON students.student_id = scores.student_id ORDER BY scores.score DESC;', '返回所有学生的姓名,专业和成绩,按成绩降序排序的结果'),
('学校管理系统', '测试SQL助手的联合查询,条件查询和排序功能', '查询计算机科学专业的学生的平均成绩', 'SELECT AVG(scores.score) as avg_score FROM students JOIN scores ON students.student_id = scores.student_id WHERE students.major = ''计算机科学'';', '返回计算机科学专业学生的平均成绩'),
('学校管理系统', '测试SQL助手的联合查询,条件查询和排序功能', '查询哪些学生在2023年秋季学期的课程学分总和超过15', 'SELECT students.student_name FROM students JOIN scores ON students.student_id = scores.student_id JOIN courses ON scores.course_id = courses.course_id WHERE scores.semester = ''2023年秋季'' GROUP BY students.student_id HAVING SUM(courses.credit) > 15;', '返回在2023年秋季学期的课程学分总和超过15的学生的姓名'),
('电商系统', '测试SQL助手的数据聚合和分组功能', '查询每个用户的总订单数量', 'SELECT users.user_name, COUNT(orders.order_id) as order_count FROM users JOIN orders ON users.user_id = orders.user_id GROUP BY users.user_id;', '返回每个用户的总订单数量'),
('电商系统', '测试SQL助手的数据聚合和分组功能', '查询每种商品的总销售额', 'SELECT products.product_name, SUM(products.product_price * orders.quantity) as total_sales FROM products JOIN orders ON products.product_id = orders.product_id GROUP BY products.product_id;', '返回每种商品的总销售额'),
('电商系统', '测试SQL助手的数据聚合和分组功能', '查询2023年最受欢迎的商品(订单数量最多的商品)', 'SELECT products.product_name FROM products JOIN orders ON products.product_id = orders.product_id WHERE YEAR(orders.order_date) = 2023 GROUP BY products.product_id ORDER BY COUNT(orders.order_id) DESC LIMIT 1;', '返回2023年最受欢迎的商品(订单数量最多的商品)的名称');
CREATE TABLE test_cases (
case_id INTEGER PRIMARY KEY AUTOINCREMENT,
scenario_name VARCHAR(100),
scenario_description TEXT,
test_question VARCHAR(500),
expected_sql TEXT,
correct_output TEXT
);
INSERT INTO test_cases (scenario_name, scenario_description, test_question, expected_sql, correct_output) VALUES
('学校管理系统', '测试SQL助手的联合查询,条件查询和排序功能', '查询所有学生的姓名,专业和成绩,按成绩降序排序', 'SELECT students.student_name, students.major, scores.score FROM students JOIN scores ON students.student_id = scores.student_id ORDER BY scores.score DESC;', '返回所有学生的姓名,专业和成绩,按成绩降序排序的结果'),
('学校管理系统', '测试SQL助手的联合查询,条件查询和排序功能', '查询计算机科学专业的学生的平均成绩', 'SELECT AVG(scores.score) as avg_score FROM students JOIN scores ON students.student_id = scores.student_id WHERE students.major = ''计算机科学'';', '返回计算机科学专业学生的平均成绩'),
('学校管理系统', '测试SQL助手的联合查询,条件查询和排序功能', '查询哪些学生在2023年秋季学期的课程学分总和超过15', 'SELECT students.student_name FROM students JOIN scores ON students.student_id = scores.student_id JOIN courses ON scores.course_id = courses.course_id WHERE scores.semester = ''2023年秋季'' GROUP BY students.student_id HAVING SUM(courses.credit) > 15;', '返回在2023年秋季学期的课程学分总和超过15的学生的姓名'),
('电商系统', '测试SQL助手的数据聚合和分组功能', '查询每个用户的总订单数量', 'SELECT users.user_name, COUNT(orders.order_id) as order_count FROM users JOIN orders ON users.user_id = orders.user_id GROUP BY users.user_id;', '返回每个用户的总订单数量'),
('电商系统', '测试SQL助手的数据聚合和分组功能', '查询每种商品的总销售额', 'SELECT products.product_name, SUM(products.product_price * orders.quantity) as total_sales FROM products JOIN orders ON products.product_id = orders.product_id GROUP BY products.product_id;', '返回每种商品的总销售额'),
('电商系统', '测试SQL助手的数据聚合和分组功能', '查询2023年最受欢迎的商品(订单数量最多的商品)', 'SELECT products.product_name FROM products JOIN orders ON products.product_id = orders.product_id WHERE YEAR(orders.order_date) = 2023 GROUP BY products.product_id ORDER BY COUNT(orders.order_id) DESC LIMIT 1;', '返回2023年最受欢迎的商品(订单数量最多的商品)的名称');
CREATE SCHEMA test_case_info;
COMMENT ON SCHEMA test_case_info is '测试用例信息';
SET SEARCH_PATH = test_case_info;
CREATE TABLE test_cases (
case_id SERIAL /*INT AUTO_INCREMENT*/ PRIMARY KEY,
scenario_name VARCHAR(100),
scenario_description VARCHAR(6500),
test_question VARCHAR(500),
expected_sql VARCHAR(6500),
correct_output VARCHAR(6500)
);
COMMENT ON TABLE test_cases IS '测试用例表';
COMMENT ON COLUMN test_cases.scenario_name IS '场景名称';
COMMENT ON COLUMN test_cases.scenario_description IS '场景描述';
COMMENT ON COLUMN test_cases.test_question IS '测试问题';
COMMENT ON COLUMN test_cases.expected_sql IS '预期SQL';
COMMENT ON COLUMN test_cases.correct_output IS '正确输出';
INSERT INTO test_cases (scenario_name, scenario_description, test_question, expected_sql, correct_output) VALUES
('学校管理系统', '测试SQL助手的联合查询,条件查询和排序功能', '查询所有学生的姓名,专业和成绩,按成绩降序排序', 'SELECT students.student_name, students.major, scores.score FROM students JOIN scores ON students.student_id = scores.student_id ORDER BY scores.score DESC;', '返回所有学生的姓名,专业和成绩,按成绩降序排序的结果'),
('学校管理系统', '测试SQL助手的联合查询,条件查询和排序功能', '查询计算机科学专业的学生的平均成绩', 'SELECT AVG(scores.score) as avg_score FROM students JOIN scores ON students.student_id = scores.student_id WHERE students.major = ''计算机科学'';', '返回计算机科学专业学生的平均成绩'),
('学校管理系统', '测试SQL助手的联合查询,条件查询和排序功能', '查询哪些学生在2023年秋季学期的课程学分总和超过15', 'SELECT students.student_name FROM students JOIN scores ON students.student_id = scores.student_id JOIN courses ON scores.course_id = courses.course_id WHERE scores.semester = ''2023年秋季'' GROUP BY students.student_id HAVING SUM(courses.credit) > 15;', '返回在2023年秋季学期的课程学分总和超过15的学生的姓名'),
('电商系统', '测试SQL助手的数据聚合和分组功能', '查询每个用户的总订单数量', 'SELECT users.user_name, COUNT(orders.order_id) as order_count FROM users JOIN orders ON users.user_id = orders.user_id GROUP BY users.user_id;', '返回每个用户的总订单数量'),
('电商系统', '测试SQL助手的数据聚合和分组功能', '查询每种商品的总销售额', 'SELECT products.product_name, SUM(products.product_price * orders.quantity) as total_sales FROM products JOIN orders ON products.product_id = orders.product_id GROUP BY products.product_id;', '返回每种商品的总销售额'),
('电商系统', '测试SQL助手的数据聚合和分组功能', '查询2023年最受欢迎的商品(订单数量最多的商品)', 'SELECT products.product_name FROM products JOIN orders ON products.product_id = orders.product_id WHERE YEAR(orders.order_date) = 2023 GROUP BY products.product_id ORDER BY COUNT(orders.order_id) DESC LIMIT 1;', '返回2023年最受欢迎的商品(订单数量最多的商品)的名称');
COMMIT;
USE mysql;
UPDATE user SET Host='%' WHERE User='root';
FLUSH PRIVILEGES;
\ No newline at end of file
version: '3.9' # To run current docker compose file, you should prepare the siliconflow api key in your environment.
# SILICONFLOW_API_KEY=${SILICONFLOW_API_KEY} docker compose up -d
services: services:
agentchat: db:
container_name: "agentchat" image: mysql/mysql-server
build: environment:
context: . # Build context is the current directory MYSQL_USER: 'user'
dockerfile: Dockerfile # Dockerfile for agentchat MYSQL_PASSWORD: 'password'
MYSQL_ROOT_PASSWORD: 'aa123456'
ports:
- 3306:3306
volumes: volumes:
- /var/log:/var/log # - dbgpt-myql-db:/var/lib/mysql
- ${GRAP_DATA_VOLUME_DIRECTORY:-.}/dbgpt-myql-db:/var/lib/mysql
- ./config/graph/examples/my.cnf:/etc/my.cnf
- ./config/graph/examples/sqls:/docker-entrypoint-initdb.d
- ./config/graph/assets/dbgpt.sql:/docker-entrypoint-initdb.d/dbgpt.sql
restart: unless-stopped
networks:
- dbgptnet
webserver:
image: eosphorosai/dbgpt-openai:latest
command: dbgpt start webserver --config /app/configs/dbgpt-graphrag.toml
environment: environment:
- GEMINI_API_KEY=${GEMINI_API_KEY} # Reference variable from .env - SILICONFLOW_API_KEY=${SILICONFLOW_API_KEY}
- MYSQL_PASSWORD=aa123456
- MYSQL_HOST=db
- MYSQL_PORT=3306
- MYSQL_DATABASE=dbgpt
- MYSQL_USER=root
- OPENAI_API_KEY=sk-UIpD9DohtE0Ok4wtFdC21668Dc3241629e8aA05d5dAeFdA1
volumes:
- ./config/graph/configs:/app/configs
- ${GRAP_DATA_VOLUME_DIRECTORY:-.}/data:/data
# May be you can mount your models to container
- ${GRAP_DATA_VOLUME_DIRECTORY:-.}/data/models:/app/models
# - dbgpt-data:/app/pilot/data
- ${GRAP_DATA_VOLUME_DIRECTORY:-.}/dbgpt-data:/app/pilot/data
#- dbgpt-message:/app/pilot/message
- ${GRAP_DATA_VOLUME_DIRECTORY:-.}/dbgpt-message:/app/pilot/message
depends_on:
- db
- tugraph
ports:
- 5670:5670/tcp
# webserver may be failed, it must wait all sqls in /docker-entrypoint-initdb.d execute finish.
restart: unless-stopped
networks:
- dbgptnet
ipc: host
tugraph:
image: tugraph/tugraph-runtime-centos7:4.5.1
command: lgraph_server -d run --enable_plugin true
ports: ports:
- "8000:8000" - 7070:7070
entrypoint: /app/myapp - 7687:7687
docker-host: - 9090:9090
image: qoomon/docker-host container_name: tugraph_demo
cap_add: ['NET_ADMIN', 'NET_RAW'] restart: unless-stopped
mem_limit: 8M networks:
restart: on-failure - dbgptnet
# etcd: networks:
# container_name: milvus-etcd dbgptnet:
# image: quay.io/coreos/etcd:v3.5.18 driver: bridge
# environment: name: dbgptnet
# - ETCD_AUTO_COMPACTION_MODE=revision
# - ETCD_AUTO_COMPACTION_RETENTION=1000
# - ETCD_QUOTA_BACKEND_BYTES=4294967296
# - ETCD_SNAPSHOT_COUNT=50000
# volumes:
# - ${DOCKER_VOLUME_DIRECTORY:-.}/volumes/etcd:/etcd
# command: etcd -advertise-client-urls=http://etcd:2379 -listen-client-urls http://0.0.0.0:2379 --data-dir /etcd
# healthcheck:
# test: ["CMD", "etcdctl", "endpoint", "health"]
# interval: 30s
# timeout: 20s
# retries: 3
# minio:
# container_name: milvus-minio
# image: minio/minio:RELEASE.2023-03-20T20-16-18Z
# environment:
# MINIO_ACCESS_KEY: minioadmin
# MINIO_SECRET_KEY: minioadmin
# ports:
# - "9001:9001"
# - "9000:9000"
# volumes:
# - ${DOCKER_VOLUME_DIRECTORY:-.}/volumes/minio:/minio_data
# command: minio server /minio_data --console-address ":9001"
# healthcheck:
# test: ["CMD", "curl", "-f", "http://localhost:9000/minio/health/live"]
# interval: 30s
# timeout: 20s
# retries: 3
# standalone:
# container_name: milvus-standalone
# image: milvusdb/milvus:v2.5.10
# command: ["milvus", "run", "standalone"]
# security_opt:
# - seccomp:unconfined
# environment:
# ETCD_ENDPOINTS: etcd:2379
# MINIO_ADDRESS: minio:9000
# volumes:
# - ${DOCKER_VOLUME_DIRECTORY:-.}/volumes/milvus:/var/lib/milvus
# healthcheck:
# test: ["CMD", "curl", "-f", "http://localhost:9091/healthz"]
# interval: 30s
# start_period: 90s
# timeout: 20s
# retries: 3
# ports:
# - "19530:19530"
# - "9091:9091"
# depends_on:
# - "etcd"
# - "minio"
# db:
# image: mysql/mysql-server
# environment:
# MYSQL_USER: 'user'
# MYSQL_PASSWORD: 'password'
# MYSQL_ROOT_PASSWORD: 'aa123456'
# ports:
# - 3306:3306
# volumes:
# - dbgpt-myql-db:/var/lib/mysql
# - ./docker/examples/my.cnf:/etc/my.cnf
# - ./docker/examples/sqls:/docker-entrypoint-initdb.d
# - ./assets/schema/dbgpt.sql:/docker-entrypoint-initdb.d/dbgpt.sql
# restart: unless-stopped
# webserver:
# image: eosphorosai/dbgpt-openai:latest
# command: dbgpt start webserver --config /app/configs/dbgpt-graphrag.toml
# environment:
# - SILICONFLOW_API_KEY=${SILICONFLOW_API_KEY}
# - MYSQL_PASSWORD=aa123456
# - MYSQL_HOST=db
# - MYSQL_PORT=3306
# - MYSQL_DATABASE=dbgpt
# - MYSQL_USER=root
# - OPENAI_API_KEY=sk-UIpD9DohtE0Ok4wtFdC21668Dc3241629e8aA05d5dAeFdA1
# volumes:
# - ./configs:/app/configs
# - /data:/data
# # May be you can mount your models to container
# - /data/models:/app/models
# - dbgpt-data:/app/pilot/data
# - dbgpt-message:/app/pilot/message
# depends_on:
# - db
# - tugraph
# ports:
# - 5670:5670/tcp
# # webserver may be failed, it must wait all sqls in /docker-entrypoint-initdb.d execute finish.
# restart: unless-stopped
# networks:
# - dbgptnet
# ipc: host
# tugraph:
# image: tugraph/tugraph-runtime-centos7:4.5.1
# command: lgraph_server -d run --enable_plugin true
# ports:
# - 7070:7070
# - 7687:7687
# - 9090:9090
# container_name: tugraph_demo
# restart: unless-stopped
# volumes:
# dbgpt-myql-db:
# dbgpt-data:
# dbgpt-message:
# dbgpt-alembic-versions:
...@@ -9,7 +9,7 @@ services: ...@@ -9,7 +9,7 @@ services:
- ETCD_QUOTA_BACKEND_BYTES=4294967296 - ETCD_QUOTA_BACKEND_BYTES=4294967296
- ETCD_SNAPSHOT_COUNT=50000 - ETCD_SNAPSHOT_COUNT=50000
volumes: volumes:
- ${DOCKER_VOLUME_DIRECTORY:-.}/volumes/etcd:/etcd - ${MILVUS_DOCKER_VOLUME_DIRECTORY:-.}/etcd:/etcd
command: etcd -advertise-client-urls=http://etcd:2379 -listen-client-urls http://0.0.0.0:2379 --data-dir /etcd command: etcd -advertise-client-urls=http://etcd:2379 -listen-client-urls http://0.0.0.0:2379 --data-dir /etcd
healthcheck: healthcheck:
test: ["CMD", "etcdctl", "endpoint", "health"] test: ["CMD", "etcdctl", "endpoint", "health"]
...@@ -26,7 +26,7 @@ services: ...@@ -26,7 +26,7 @@ services:
- "9001:9001" - "9001:9001"
- "9000:9000" - "9000:9000"
volumes: volumes:
- ${DOCKER_VOLUME_DIRECTORY:-.}/volumes/minio:/minio_data - ${MILVUS_DOCKER_VOLUME_DIRECTORY:-.}/minio:/minio_data
command: minio server /minio_data --console-address ":9001" command: minio server /minio_data --console-address ":9001"
healthcheck: healthcheck:
test: ["CMD", "curl", "-f", "http://localhost:9000/minio/health/live"] test: ["CMD", "curl", "-f", "http://localhost:9000/minio/health/live"]
...@@ -43,7 +43,7 @@ services: ...@@ -43,7 +43,7 @@ services:
ETCD_ENDPOINTS: etcd:2379 ETCD_ENDPOINTS: etcd:2379
MINIO_ADDRESS: minio:9000 MINIO_ADDRESS: minio:9000
volumes: volumes:
- ${DOCKER_VOLUME_DIRECTORY:-.}/volumes/milvus:/var/lib/milvus - ${MILVUS_DOCKER_VOLUME_DIRECTORY:-.}/milvus:/var/lib/milvus
healthcheck: healthcheck:
test: ["CMD", "curl", "-f", "http://localhost:9091/healthz"] test: ["CMD", "curl", "-f", "http://localhost:9091/healthz"]
interval: 30s interval: 30s
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment