Commit e3ddfa73 authored by Wade's avatar Wade

add test

parent c3c1a456
...@@ -2,13 +2,26 @@ version: '3.8' ...@@ -2,13 +2,26 @@ version: '3.8'
services: services:
locust: locust:
image: locustio/locust:latest image: locustio/locust:latest
container_name: locust container_name: locust-master
ports: ports:
- "8089:8089" # Expose Locust web UI - "8089:8089" # Expose Locust web UI
volumes: volumes:
- ./config/test/locustfile.py:/mnt/locust/locustfile.py # Mount the Locust script - ./config/test/locustfile.py:/mnt/locust/locustfile.py # Mount the Locust script
environment: environment:
- LOCUST_HOST=http://agentchat:8000 # Replace with your API server host - LOCUST_HOST=http://agentchat:8000
command: -f /mnt/locust/locustfile.py --master command: -f /mnt/locust/locustfile.py --master
depends_on: depends_on:
- agentchat - agentchat
locust-worker:
image: locustio/locust:latest
container_name: locust-worker
volumes:
- ./config/test/locustfile.py:/mnt/locust/locustfile.py
environment:
- LOCUST_HOST=http://agentchat:8000
command: -f /mnt/locust/locustfile.py --worker --master-host locust-master
depends_on:
- locust
- agentchat
deploy:
replicas: 1 # Run 1 worker instances (adjust as needed)
package main
import (
"context"
"fmt"
"log"
"github.com/firebase/genkit/go/ai"
"github.com/firebase/genkit/go/genkit"
"github.com/firebase/genkit/go/plugins/googlegenai"
)
// ModelRouter 定义模型路由器
type ModelRouter struct {
models map[string]func(apiKey string) ai.Model
}
// NewModelRouter 初始化模型路由器
func NewModelRouter() *ModelRouter {
return &ModelRouter{
models: map[string]func(apiKey string) ai.Model{
"gemini-1.5-flash": func(apiKey string) ai.Model {
return googlegenai.GoogleAIModel(g, "gemini-2.5-pro-preview-03-25")
},
"deepseek": func(apiKey string) ai.Model {
// 假设 DeepSeek 插件实现
// 示例:return deepseek.Model("deepseek-model", deepseek.WithAPIKey(apiKey))
log.Printf("DeepSeek not implemented, using placeholder")
return nil // 替换为实际 DeepSeek 插件
},
},
}
}
// GetModel 根据模型名称和 API 密钥获取模型
func (mr *ModelRouter) GetModel(modelName, apiKey string) (ai.Model, error) {
if modelName == "" {
return nil, fmt.Errorf("model parameter is required")
}
modelFunc, ok := mr.models[modelName]
if !ok {
return nil, fmt.Errorf("unsupported model: %s", modelName)
}
model := modelFunc(apiKey)
if model == nil {
return nil, fmt.Errorf("failed to initialize model: %s", modelName)
}
return model, nil
}
// type Input struct {
// Content []*ai.Part `json:"content,omitempty"`
// Model string `json:"model,omitempty"`
// APIKey string `json:"apiKey,omitempty"`
// Username string `json:"username,omitempty"`
// UserID string `json:"user_id,omitempty"`
// }
func defineChatFlow(g *genkit.Genkit, mr *ModelRouter) {
genkit.DefineFlow(g, "chat", func(ctx context.Context, input Input) (string, error) {
// Get the model
model, err := mr.GetModel(input.Model, input.APIKey)
if err != nil {
return "", fmt.Errorf("failed to get model: %v", err)
}
// Add user context to the prompt if available
userContext := ""
if input.Username != "" {
userContext = fmt.Sprintf("User %s ", input.Username)
}
if input.UserID != "" {
userContext += fmt.Sprintf("(ID: %s) ", input.UserID)
}
resp, err := genkit.Generate(ctx, g,
ai.WithModel(model),
ai.WithPrompt(fmt.Sprintf("%sasks: %s", userContext, input.Content)))
if err != nil {
fmt.Println(err.Error())
return "", err
}
fmt.Println("resp.Text()", resp.Text())
return resp.Text(), nil
})
}
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment