Commit 8864a9d6 authored by Wade's avatar Wade

graprag ok

parent 060c9918
......@@ -96,6 +96,17 @@ curl -d '{"content": "What is the capital of UK?"}' http://localhost:8000/chat
curl -X POST \
-H "Content-Type: application/json" \
-d '{"content": "What is the capital of UK?", "username": "Alice", "user_id": "user123"}' \
http://localhost:8000/chat
curl -X POST \
-H "Content-Type: application/json" \
-d '{"content": "What is the capital of UK?", "username": "Bob", "user_id": "user456"}' \
http://localhost:8000/chat
curl -d '{"content": "What is the capital of UK?"}' http://localhost:8000/indexDocuments
......@@ -105,7 +116,7 @@ curl -d '{"content": "What is the capital of UK?"}' http://localhost:8000/indexD
curl -X POST http://localhost:8000/indexDocuments \
-H "Content-Type: application/json" \
-d '{"content": "What is the capital of UK?", "metadata": {"user_id": "user456", "username": "Bob"}}'
{"result": "Document indexed successfully"}
......@@ -120,9 +131,25 @@ curl -X POST http://localhost:8000/indexDocuments \
curl -X POST http://localhost:8000/indexGraph \
-H "Content-Type: application/json" \
-d '{"user_id": "user456", "username": "Bob", "content": "What is the capital of UK?", "metadata": {}}'
-d '{"user_id": "user456", "username": "Bob", "content": "Paris is the capital of France", "metadata": {}}'
curl -X POST http://localhost:8000/indexGraph \
-H "Content-Type: application/json" \
-d '{"user_id": "user456", "username": "Bob", "content": "USA is the largest importer of coffee", "metadata": {}}'
curl -X POST http://localhost:8000/indexGraph \
-H "Content-Type: application/json" \
-d '{"user_id": "user456", "username": "Bob", "content": "Water exists in 3 states - solid, liquid and gas", "metadata": {}}'
{"result": "Document indexed successfully"}
......@@ -173,3 +200,75 @@ curl -X POST http://localhost:8000/indexDocuments \
curl -X 'POST' \
'http://54.92.111.204:5670/api/v1/chat/completions' \
-H 'accept: application/json' \
-H 'Content-Type: application/json' \
-d '{
"conv_uid": "",
"user_input": "",
"user_name": "string",
"chat_mode": "",
"app_code": "",
"temperature": 0.5,
"max_new_tokens": 4000,
"select_param": "string",
"model_name": "string",
"incremental": false,
"sys_code": "string",
"prompt_code": "string",
"ext_info": {}
}'
curl -X 'POST' \
'http://54.92.111.204:5670/api/v1/chat/completions' \
-H 'accept: application/json' \
-H 'Content-Type: application/json' \
-d '{
"user_input": "111111111",
"chat_mode": "chat_knowledge",
"app_code": "chat_knowledge",
"temperature": 0.5,
"max_new_tokens": 4000,
"select_param": "user456",
"incremental": false,
"model_name": "Qwen/Qwen2.5-Coder-32B-Instruct"
}'
curl -X 'POST' \
'http://54.92.111.204:5670/api/v2/chat/completions' \
-H 'accept: application/json' \
-H 'Content-Type: application/json' \
-d '{
"model": "Qwen/Qwen2.5-Coder-32B-Instruct",
"messages": "What is the capital of UK?",
"temperature": 0.7,
"top_p": 1,
"top_k": -1,
"n": 1,
"max_tokens": 0,
"stream": false,
"repetition_penalty": 1,
"frequency_penalty": 0,
"presence_penalty": 0,
"chat_mode": "chat_knowledge",
"chat_param": "user456",
"incremental": true,
"enable_vis": true
}'
{"id":"feb02244-deac-4955-97f8-175f1f541d20","object":"chat.completion","created":1748949140,"model":"Qwen/Qwen2.5-Coder-32B-Instruct","choices":[{"index":0,"message":{"role":"assistant","content":"知识库中提供的内容不足以回答此问题\n\n<references title=\"References\" references=\"[]\" />","reasoning_content":null},"finish_reason":null}],"usage":{"prompt_tokens":0,"total_tokens":0,"completion_tokens":0}}%wade@WadeLeedeMacBook-Pro graphrag %
......@@ -20,6 +20,8 @@ import (
httpSwagger "github.com/swaggo/http-swagger"
_ "github.com/wade-liwei/agentchat/docs" // 导入生成的 Swagger 文档
"github.com/wade-liwei/agentchat/util"
)
// GraphKnowledge
......@@ -189,7 +191,12 @@ func main() {
fmt.Println("input-------------------------------", string(inputAsJson))
dRequest := ai.DocumentFromText(input.Content, nil)
metaData := make(map[string]any)
metaData[util.UserIdKey]= input.UserID
metaData[util.UserNameKey]= input.Username
dRequest := ai.DocumentFromText(input.Content, metaData)
response, err := ai.Retrieve(ctx, retriever, ai.WithDocs(dRequest))
if err != nil {
return "", err
......@@ -199,12 +206,24 @@ func main() {
fmt.Println("d.Content[0].Text", d.Content[0].Text)
}
graphResponse, err := ai.Retrieve(ctx, graphRetriever, ai.WithDocs(dRequest))
if err != nil {
return "", err
}
for _, d := range graphResponse.Documents {
fmt.Println("d.Content[0].Text", d.Content[0].Text)
}
var sb strings.Builder
for _, d := range response.Documents {
sb.WriteString(d.Content[0].Text)
sb.WriteByte('\n')
}
promptInput := &simpleQaPromptInput{
Query: input.Content,
Context: sb.String(),
......
......@@ -32,6 +32,8 @@ import (
"github.com/firebase/genkit/go/ai"
"github.com/firebase/genkit/go/genkit"
"github.com/wade-liwei/agentchat/util"
)
// Client 知识库客户端
......@@ -277,7 +279,7 @@ func parseAddr(addr string) (string, int) {
func DefineIndexerAndRetriever(ctx context.Context, g *genkit.Genkit) (ai.Indexer, ai.Retriever, error) {
spaceID := ""
modelName := ""
modelName := "Qwen/Qwen2.5-Coder-32B-Instruct"
k := genkit.LookupPlugin(g, provider)
if k == nil {
......@@ -604,71 +606,246 @@ func (ds *docStore) Index(ctx context.Context, req *ai.IndexerRequest) error {
// return nil
// }
// RetrieverOptions for Knowledge retrieval.
type RetrieverOptions struct {
Count int `json:"count,omitempty"` // Max documents to retrieve.
MetricType string `json:"metric_type,omitempty"` // Similarity metric (e.g., "L2", "IP").
// // RetrieverOptions for Knowledge retrieval.
// type RetrieverOptions struct {
// Count int `json:"count,omitempty"` // Max documents to retrieve.
// MetricType string `json:"metric_type,omitempty"` // Similarity metric (e.g., "L2", "IP").
// }
// // Assuming ai.Part has a Text() method or Text field to get the string content
// func partsToString(parts []*ai.Part) string {
// var texts []string
// for _, part := range parts {
// // Adjust this based on the actual ai.Part structure
// // If ai.Part has a Text() method:
// texts = append(texts, part.Text)
// // OR if ai.Part has a Text field:
// // texts = append(texts, part.Text)
// }
// return strings.Join(texts, " ")
// }
// // Retrieve implements the Retriever.Retrieve method.
// func (ds *docStore) Retrieve(ctx context.Context, req *ai.RetrieverRequest) (*ai.RetrieverResponse, error) {
// // count := 3
// // metricTypeStr := "L2"
// // if req.Options != nil {
// // ropt, ok := req.Options.(*RetrieverOptions)
// // if !ok {
// // return nil, fmt.Errorf("graphrag.Retrieve options have type %T, want %T", req.Options, &RetrieverOptions{})
// // }
// // if ropt.Count > 0 {
// // count = ropt.Count
// // }
// // if ropt.MetricType != "" {
// // metricTypeStr = ropt.MetricType
// // }
// // }
// queryContent := partsToString(req.Query.Content)
// // Format query for retrieval.
// queryText := fmt.Sprintf("Search for: %s", queryContent)
// username := "Alice" // Default, override if metadata available.
// if req.Query.Metadata != nil {
// if uname, ok := req.Query.Metadata["username"].(string); ok {
// username = uname
// }
// }
// // Prepare request for chat completions endpoint.
// url := fmt.Sprintf("%s/api/v1/chat/completions", ds.client.BaseURL)
// chatReq := struct {
// ConvUID string `json:"conv_uid"`
// UserInput string `json:"user_input"`
// UserName string `json:"user_name"`
// ChatMode string `json:"chat_mode"`
// AppCode string `json:"app_code"`
// Temperature float32 `json:"temperature"`
// MaxNewTokens int `json:"max_new_tokens"`
// SelectParam string `json:"select_param"`
// ModelName string `json:"model_name"`
// Incremental bool `json:"incremental"`
// SysCode string `json:"sys_code"`
// PromptCode string `json:"prompt_code"`
// ExtInfo map[string]interface{} `json:"ext_info"`
// }{
// ConvUID: "",
// UserInput: queryText,
// UserName: username,
// ChatMode: "",
// AppCode: "",
// Temperature: 0.5,
// MaxNewTokens: 4000,
// SelectParam: "",
// ModelName: ds.modelName,
// Incremental: false,
// SysCode: "",
// PromptCode: "",
// ExtInfo: map[string]interface{}{
// "space_id": ds.spaceID,
// //"k": count,
// },
// }
// body, err := json.Marshal(chatReq)
// if err != nil {
// return nil, fmt.Errorf("marshal chat request: %w", err)
// }
// httpReq, err := http.NewRequestWithContext(ctx, "POST", url, bytes.NewBuffer(body))
// if err != nil {
// return nil, fmt.Errorf("create chat request: %w", err)
// }
// httpReq.Header.Set("Accept", "application/json")
// httpReq.Header.Set("Content-Type", "application/json")
// client := &http.Client{}
// resp, err := client.Do(httpReq)
// if err != nil {
// return nil, fmt.Errorf("send chat request: %w", err)
// }
// defer resp.Body.Close()
// if resp.StatusCode != http.StatusOK {
// body, _ := io.ReadAll(resp.Body)
// return nil, fmt.Errorf("chat completion failed with status %d: %s", resp.StatusCode, string(body))
// }
// // Parse response
// var chatResp struct {
// Success bool `json:"success"`
// Data struct {
// Answer []struct {
// Content string `json:"content"`
// DocID int `json:"doc_id"`
// Score float64 `json:"score"`
// Metadata map[string]interface{} `json:"metadata_map"`
// } `json:"answer"`
// } `json:"data"`
// }
// if err := json.NewDecoder(resp.Body).Decode(&chatResp); err != nil {
// return nil, fmt.Errorf("decode chat response: %w", err)
// }
// var docs []*ai.Document
// for _, doc := range chatResp.Data.Answer {
// metadata := doc.Metadata
// if metadata == nil {
// metadata = make(map[string]interface{})
// }
// // Ensure metadata includes user_id and username.
// if _, ok := metadata["user_id"]; !ok {
// metadata["user_id"] = "user123"
// }
// if _, ok := metadata["username"]; !ok {
// metadata["username"] = username
// }
// aiDoc := ai.DocumentFromText(doc.Content, metadata)
// docs = append(docs, aiDoc)
// }
// return &ai.RetrieverResponse{
// Documents: docs,
// }, nil
// }
// ChatRequest 定义请求结构体,匹配单元测试的 curl 请求
type ChatRequest struct {
Model string `json:"model"`
Messages string `json:"messages"`
Temperature float64 `json:"temperature"`
TopP float64 `json:"top_p"`
TopK int `json:"top_k"`
N int `json:"n"`
MaxTokens int `json:"max_tokens"`
Stream bool `json:"stream"`
RepetitionPenalty float64 `json:"repetition_penalty"`
FrequencyPenalty float64 `json:"frequency_penalty"`
PresencePenalty float64 `json:"presence_penalty"`
ChatMode string `json:"chat_mode"`
ChatParam string `json:"chat_param"`
EnableVis bool `json:"enable_vis"`
}
// ChatResponse 定义响应结构体,匹配单元测试的 API 响应
type ChatResponse struct {
ID string `json:"id"`
Object string `json:"object"`
Created int64 `json:"created"`
Model string `json:"model"`
Choices []struct {
Index int `json:"index"`
Message struct {
Role string `json:"role"`
Content string `json:"content"`
ReasoningContent interface{} `json:"reasoning_content"`
} `json:"message"`
FinishReason interface{} `json:"finish_reason"`
} `json:"choices"`
Usage struct {
PromptTokens int `json:"prompt_tokens"`
TotalTokens int `json:"total_tokens"`
CompletionTokens int `json:"completion_tokens"`
} `json:"usage"`
}
// Assuming ai.Part has a Text() method or Text field to get the string content
func partsToString(parts []*ai.Part) string {
var texts []string
for _, part := range parts {
// Adjust this based on the actual ai.Part structure
// If ai.Part has a Text() method:
texts = append(texts, part.Text)
// OR if ai.Part has a Text field:
// texts = append(texts, part.Text)
}
return strings.Join(texts, " ")
}
// Retrieve implements the Retriever.Retrieve method.
func (ds *docStore) Retrieve(ctx context.Context, req *ai.RetrieverRequest) (*ai.RetrieverResponse, error) {
// count := 3
// metricTypeStr := "L2"
// if req.Options != nil {
// ropt, ok := req.Options.(*RetrieverOptions)
// if !ok {
// return nil, fmt.Errorf("graphrag.Retrieve options have type %T, want %T", req.Options, &RetrieverOptions{})
// }
// if ropt.Count > 0 {
// count = ropt.Count
// }
// if ropt.MetricType != "" {
// metricTypeStr = ropt.MetricType
// }
// }
// Format query for retrieval.
queryText := fmt.Sprintf("Search for: %s", req.Query.Content)
username := "Alice" // Default, override if metadata available.
if req.Query.Metadata != nil {
if uname, ok := req.Query.Metadata["username"].(string); ok {
username = uname
}
queryContent := partsToString(req.Query.Content)
queryText := fmt.Sprintf("Search for: %s", queryContent)
if req.Query.Metadata == nil {
// If ok, we don't use the User struct since the requirement is to error on non-nil
return nil, fmt.Errorf("req.Query.Metadata must be not nil, got type %T", req.Options)
}
for k,v := range req.Query.Metadata {
fmt.Println("k",k,"v",v)
}
// Extract username and user_id from req.Query.Metadata
userName, ok := req.Query.Metadata[util.UserNameKey].(string)
if !ok {
return nil, fmt.Errorf("req.Query.Metadata must provide username key")
}
userId, ok := req.Query.Metadata[util.UserIdKey].(string)
if !ok {
return nil, fmt.Errorf("req.Query.Metadata must provide user_id key")
}
// Prepare request for chat completions endpoint.
url := fmt.Sprintf("%s/api/v1/chat/completions", ds.client.BaseURL)
chatReq := struct {
ConvUID string `json:"conv_uid"`
UserInput string `json:"user_input"`
UserName string `json:"user_name"`
ChatMode string `json:"chat_mode"`
AppCode string `json:"app_code"`
Temperature float32 `json:"temperature"`
MaxNewTokens int `json:"max_new_tokens"`
SelectParam string `json:"select_param"`
ModelName string `json:"model_name"`
Incremental bool `json:"incremental"`
SysCode string `json:"sys_code"`
PromptCode string `json:"prompt_code"`
ExtInfo map[string]interface{} `json:"ext_info"`
}{
ConvUID: "",
UserInput: queryText,
UserName: username,
ChatMode: "",
AppCode: "",
Temperature: 0.5,
MaxNewTokens: 4000,
SelectParam: "",
ModelName: ds.modelName,
Incremental: false,
SysCode: "",
PromptCode: "",
ExtInfo: map[string]interface{}{
"space_id": ds.spaceID,
//"k": count,
},
url := fmt.Sprintf("%s/api/v2/chat/completions", ds.client.BaseURL)
chatReq := ChatRequest{
Model: ds.modelName,
Messages: queryText,
Temperature: 0.7,
TopP: 1,
TopK: -1,
N: 1,
MaxTokens: 0,
Stream: false,
RepetitionPenalty: 1,
FrequencyPenalty: 0,
PresencePenalty: 0,
ChatMode: "chat_knowledge",
ChatParam: userId,
EnableVis: true,
}
body, err := json.Marshal(chatReq)
......@@ -696,35 +873,21 @@ func (ds *docStore) Retrieve(ctx context.Context, req *ai.RetrieverRequest) (*ai
}
// Parse response
var chatResp struct {
Success bool `json:"success"`
Data struct {
Answer []struct {
Content string `json:"content"`
DocID int `json:"doc_id"`
Score float64 `json:"score"`
Metadata map[string]interface{} `json:"metadata_map"`
} `json:"answer"`
} `json:"data"`
}
var chatResp ChatResponse
if err := json.NewDecoder(resp.Body).Decode(&chatResp); err != nil {
return nil, fmt.Errorf("decode chat response: %w", err)
body, _ := io.ReadAll(resp.Body)
return nil, fmt.Errorf("decode chat response: %w, raw response: %s", err, string(body))
}
// Convert response to ai.Document
var docs []*ai.Document
for _, doc := range chatResp.Data.Answer {
metadata := doc.Metadata
if metadata == nil {
metadata = make(map[string]interface{})
}
// Ensure metadata includes user_id and username.
if _, ok := metadata["user_id"]; !ok {
metadata["user_id"] = "user123"
}
if _, ok := metadata["username"]; !ok {
metadata["username"] = username
if len(chatResp.Choices) > 0 {
content := chatResp.Choices[0].Message.Content
metadata := map[string]interface{}{
util.UserIdKey: userId,
util.UserNameKey: userName,
}
aiDoc := ai.DocumentFromText(doc.Content, metadata)
aiDoc := ai.DocumentFromText(content, metadata)
docs = append(docs, aiDoc)
}
......
package graphrag
import (
"bytes"
"encoding/json"
"io"
"net/http"
"testing"
)
// 测试函数
func TestChatCompletions(t *testing.T) {
// 准备请求数据
reqBody := ChatRequest{
Model: "Qwen/Qwen2.5-Coder-32B-Instruct",
Messages: "What is the capital of UK?",
Temperature: 0.7,
TopP: 1,
TopK: -1,
N: 1,
MaxTokens: 0,
Stream: false,
RepetitionPenalty: 1,
FrequencyPenalty: 0,
PresencePenalty: 0,
ChatMode: "chat_knowledge",
ChatParam: "user456",
EnableVis: true,
}
bodyBytes, err := json.Marshal(reqBody)
if err != nil {
t.Fatalf("Failed to marshal request body: %v", err)
}
// 创建 HTTP 请求
url := "http://54.92.111.204:5670/api/v2/chat/completions"
req, err := http.NewRequest(http.MethodPost, url, bytes.NewReader(bodyBytes))
if err != nil {
t.Fatalf("Failed to create request: %v", err)
}
req.Header.Set("Accept", "application/json")
req.Header.Set("Content-Type", "application/json")
// 发送请求
client := &http.Client{}
resp, err := client.Do(req)
if err != nil {
t.Fatalf("Failed to send request: %v", err)
}
defer resp.Body.Close()
// 验证响应状态码
if resp.StatusCode != http.StatusOK {
bodyBytes, err := io.ReadAll(resp.Body)
if err != nil {
t.Fatalf("Failed to read response body: %v", err)
}
t.Errorf("Expected status code 200, got %d. Response body: %s", resp.StatusCode, string(bodyBytes))
}
// 读取响应体以调试
bodyBytes, err = io.ReadAll(resp.Body)
if err != nil {
t.Fatalf("Failed to read response body: %v", err)
}
bodyString := string(bodyBytes)
t.Logf("Response body: %s", bodyString)
// 解析响应体
var respBody ChatResponse
if err := json.Unmarshal(bodyBytes, &respBody); err != nil {
t.Fatalf("Failed to decode response body: %v. Raw response: %s", err, bodyString)
}
// 验证响应内容
if len(respBody.Choices) == 0 {
t.Fatal("Expected at least one choice in response, got none")
}
expectedContent := "知识库中提供的内容不足以回答此问题\n\n<references title=\"References\" references=\"[]\" />"
if respBody.Choices[0].Message.Content != expectedContent {
t.Errorf("Expected response content %q, got %q", expectedContent, respBody.Choices[0].Message.Content)
}
}
/*
curl 'http://54.92.111.204:5670/api/v1/chat/completions' \
-H 'Accept-Language: zh-CN,zh;q=0.9' \
-H 'Connection: keep-alive' \
-H 'Content-Type: application/json' \
-b 'sb-eowcghempsnzqalhkois-auth-token=base64-eyJhY2Nlc3NfdG9rZW4iOiJleUpoYkdjaU9pSklVekkxTmlJc0ltdHBaQ0k2SWpkemVpdEtXQzlCU0ZaSFFUWnBhVE1pTENKMGVYQWlPaUpLVjFRaWZRLmV5SnBjM01pT2lKb2RIUndjem92TDJWdmQyTm5hR1Z0Y0hOdWVuRmhiR2hyYjJsekxuTjFjR0ZpWVhObExtTnZMMkYxZEdndmRqRWlMQ0p6ZFdJaU9pSXpaamRoWVdRMU1TMHpNVGd3TFRSbVlXRXRPV0ZsTnkwMlpXRm1PV001Tm1JNFpHVWlMQ0poZFdRaU9pSmhkWFJvWlc1MGFXTmhkR1ZrSWl3aVpYaHdJam94TnpRNE16TXpOREV5TENKcFlYUWlPakUzTkRnek1qazRNVElzSW1WdFlXbHNJam9pYkdsM1pXbGZkMkZrWlVCcFkyeHZkV1F1WTI5dElpd2ljR2h2Ym1VaU9pSWlMQ0poY0hCZmJXVjBZV1JoZEdFaU9uc2ljSEp2ZG1sa1pYSWlPaUpsYldGcGJDSXNJbkJ5YjNacFpHVnljeUk2V3lKbGJXRnBiQ0pkZlN3aWRYTmxjbDl0WlhSaFpHRjBZU0k2ZXlKbGJXRnBiQ0k2SW14cGQyVnBYM2RoWkdWQWFXTnNiM1ZrTG1OdmJTSXNJbVZ0WVdsc1gzWmxjbWxtYVdWa0lqcDBjblZsTENKd2FHOXVaVjkyWlhKcFptbGxaQ0k2Wm1Gc2MyVXNJbk4xWWlJNklqTm1OMkZoWkRVeExUTXhPREF0TkdaaFlTMDVZV1UzTFRabFlXWTVZemsyWWpoa1pTSjlMQ0p5YjJ4bElqb2lZWFYwYUdWdWRHbGpZWFJsWkNJc0ltRmhiQ0k2SW1GaGJERWlMQ0poYlhJaU9sdDdJbTFsZEdodlpDSTZJbkJoYzNOM2IzSmtJaXdpZEdsdFpYTjBZVzF3SWpveE56UTJOVE0zTXpNNWZWMHNJbk5sYzNOcGIyNWZhV1FpT2lKa1lUTmhOelpqTmkweVlXVXdMVFEyWVdNdFlUaGxOUzFtWVRCallUTXpObUl5TlRZaUxDSnBjMTloYm05dWVXMXZkWE1pT21aaGJITmxmUS52MEhKdFRHN3EzSnc4QkRqdlFrWm9pOTVKcnNVZGNUZi1FWjBvc2d6OEk0IiwidG9rZW5fdHlwZSI6ImJlYXJlciIsImV4cGlyZXNfaW4iOjM2MDAsImV4cGlyZXNfYXQiOjE3NDgzMzM0MTIsInJlZnJlc2hfdG9rZW4iOiJscmpzdGl5MnM0a2giLCJ1c2VyIjp7ImlkIjoiM2Y3YWFkNTEtMzE4MC00ZmFhLTlhZTctNmVhZjljOTZiOGRlIiwiYXVkIjoiYXV0aGVudGljYXRlZCIsInJvbGUiOiJhdXRoZW50aWNhdGVkIiwiZW1haWwiOiJsaXdlaV93YWRlQGljbG91ZC5jb20iLCJlbWFpbF9jb25maXJtZWRfYXQiOiIyMDI1LTAyLTEyVDExOjA0OjQwLjIxNzkzOVoiLCJwaG9uZSI6IiIsImNvbmZpcm1hdGlvbl9zZW50X2F0IjoiMjAyNS0wMi0xMlQxMTowNDowMC41Mzk2MjhaIiwiY29uZmlybWVkX2F0IjoiMjAyNS0wMi0xMlQxMTowNDo0MC4yMTc5MzlaIiwibGFzdF9zaWduX2luX2F0IjoiMjAyNS0wNS0wNlQxMzo0NToyMC45MDA0MTRaIiwiYXBwX21ldGFkYXRhIjp7InByb3ZpZGVyIjoiZW1haWwiLCJwcm92aWRlcnMiOlsiZW1haWwiXX0sInVzZXJfbWV0YWRhdGEiOnsiZW1haWwiOiJsaXdlaV93YWRlQGljbG91ZC5jb20iLCJlbWFpbF92ZXJpZmllZCI6dHJ1ZSwicGhvbmVfdmVyaWZpZWQiOmZhbHNlLCJzdWIiOiIzZjdhYWQ1MS0zMTgwLTRmYWEtOWFlNy02ZWFmOWM5NmI4ZGUifSwiaWRlbnRpdGllcyI6W3siaWRlbnRpdHlfaWQiOiI2MjFiYTUxZi0yYzYzLTQxOWMtOWI2OS0zYzUzYTc5NDlhMzkiLCJpZCI6IjNmN2FhZDUxLTMxODAtNGZhYS05YWU3LTZlYWY5Yzk2YjhkZSIsInVzZXJfaWQiOiIzZjdhYWQ1MS0zMTgwLTRmYWEtOWFlNy02ZWFmOWM5NmI4ZGUiLCJpZGVudGl0eV9kYXRhIjp7ImVtYWlsIjoibGl3ZWlfd2FkZUBpY2xvdWQuY29tIiwiZW1haWxfdmVyaWZpZWQiOnRydWUsInBob25lX3ZlcmlmaWVkIjpmYWxzZSwic3ViIjoiM2Y3YWFkNTEtMzE4MC00ZmFhLTlhZTctNmVhZjljOTZiOGRlIn0sInByb3ZpZGVyIjoiZW1haWwiLCJsYXN0X3NpZ25faW5fYXQiOiIyMDI1LTAyLTEyVDExOjA0OjAwLjUxMTAwOVoiLCJjcmVhdGVkX2F0IjoiMjAyNS0wMi0xMlQxMTowNDowMC41MTExNDRaIiwidXBkYXRlZF9hdCI6IjIwMjUtMDItMTJUMTE6MDQ6MDAuNTExMTQ0WiIsImVtYWlsIjoibGl3ZWlfd2FkZUBpY2xvdWQuY29tIn1dLCJjcmVhdGVkX2F0IjoiMjAyNS0wMi0xMlQxMTowNDowMC40NDYwMzdaIiwidXBkYXRlZF9hdCI6IjIwMjUtMDUtMjdUMDc6MTA6MTIuMjM0MDUyWiIsImlzX2Fub255bW91cyI6ZmFsc2V9fQ' \
-H 'Origin: http://54.92.111.204:5670' \
-H 'Referer: http://54.92.111.204:5670/chat?scene=chat_knowledge&id=f62c8044-4054-11f0-b9d7-36eb2f648a81&knowledge_id=user456' \
-H 'User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/137.0.0.0 Safari/537.36' \
-H 'accept: text/event-stream' \
-H 'user-id;' \
--data-raw '{"chat_mode":"chat_knowledge","model_name":"Qwen/Qwen2.5-Coder-32B-Instruct","user_input":"1111111111111111111111111","app_code":"chat_knowledge","temperature":0.6,"max_new_tokens":4000,"select_param":"user456","conv_uid":"f62c8044-4054-11f0-b9d7-36eb2f648a81"}' \
--insecure
curl -X 'POST' \
'http://54.92.111.204:5670/api/v1/chat/completions' \
-H 'accept: application/json' \
-H 'Content-Type: application/json' \
-d '{
"user_input": "111111111",
"chat_mode": "chat_knowledge",
"app_code": "chat_knowledge",
"temperature": 0.5,
"max_new_tokens": 4000,
"select_param": "user456",
"model_name": "Qwen/Qwen2.5-Coder-32B-Instruct"
}'
curl -X 'POST' \
'http://54.92.111.204:5670/api/v1/chat/completions' \
-H 'accept: application/json' \
-H 'Content-Type: application/json' \
-d '{
"user_input": "111111111",
"chat_mode": "chat_knowledge",
"app_code": "chat_knowledge",
"temperature": 0.5,
"max_new_tokens": 4000,
"incremental": false,
"select_param": "user456",
"model_name": "Qwen/Qwen2.5-Coder-32B-Instruct"
}'
*/
......@@ -51,3 +51,91 @@ curl -X 'POST' \
/*
curl 'http://54.92.111.204:5670/api/v1/chat/completions' \
-H 'Accept-Language: zh-CN,zh;q=0.9' \
-H 'Connection: keep-alive' \
-H 'Content-Type: application/json' \
-b 'sb-eowcghempsnzqalhkois-auth-token=base64-eyJhY2Nlc3NfdG9rZW4iOiJleUpoYkdjaU9pSklVekkxTmlJc0ltdHBaQ0k2SWpkemVpdEtXQzlCU0ZaSFFUWnBhVE1pTENKMGVYQWlPaUpLVjFRaWZRLmV5SnBjM01pT2lKb2RIUndjem92TDJWdmQyTm5hR1Z0Y0hOdWVuRmhiR2hyYjJsekxuTjFjR0ZpWVhObExtTnZMMkYxZEdndmRqRWlMQ0p6ZFdJaU9pSXpaamRoWVdRMU1TMHpNVGd3TFRSbVlXRXRPV0ZsTnkwMlpXRm1PV001Tm1JNFpHVWlMQ0poZFdRaU9pSmhkWFJvWlc1MGFXTmhkR1ZrSWl3aVpYaHdJam94TnpRNE16TXpOREV5TENKcFlYUWlPakUzTkRnek1qazRNVElzSW1WdFlXbHNJam9pYkdsM1pXbGZkMkZrWlVCcFkyeHZkV1F1WTI5dElpd2ljR2h2Ym1VaU9pSWlMQ0poY0hCZmJXVjBZV1JoZEdFaU9uc2ljSEp2ZG1sa1pYSWlPaUpsYldGcGJDSXNJbkJ5YjNacFpHVnljeUk2V3lKbGJXRnBiQ0pkZlN3aWRYTmxjbDl0WlhSaFpHRjBZU0k2ZXlKbGJXRnBiQ0k2SW14cGQyVnBYM2RoWkdWQWFXTnNiM1ZrTG1OdmJTSXNJbVZ0WVdsc1gzWmxjbWxtYVdWa0lqcDBjblZsTENKd2FHOXVaVjkyWlhKcFptbGxaQ0k2Wm1Gc2MyVXNJbk4xWWlJNklqTm1OMkZoWkRVeExUTXhPREF0TkdaaFlTMDVZV1UzTFRabFlXWTVZemsyWWpoa1pTSjlMQ0p5YjJ4bElqb2lZWFYwYUdWdWRHbGpZWFJsWkNJc0ltRmhiQ0k2SW1GaGJERWlMQ0poYlhJaU9sdDdJbTFsZEdodlpDSTZJbkJoYzNOM2IzSmtJaXdpZEdsdFpYTjBZVzF3SWpveE56UTJOVE0zTXpNNWZWMHNJbk5sYzNOcGIyNWZhV1FpT2lKa1lUTmhOelpqTmkweVlXVXdMVFEyWVdNdFlUaGxOUzFtWVRCallUTXpObUl5TlRZaUxDSnBjMTloYm05dWVXMXZkWE1pT21aaGJITmxmUS52MEhKdFRHN3EzSnc4QkRqdlFrWm9pOTVKcnNVZGNUZi1FWjBvc2d6OEk0IiwidG9rZW5fdHlwZSI6ImJlYXJlciIsImV4cGlyZXNfaW4iOjM2MDAsImV4cGlyZXNfYXQiOjE3NDgzMzM0MTIsInJlZnJlc2hfdG9rZW4iOiJscmpzdGl5MnM0a2giLCJ1c2VyIjp7ImlkIjoiM2Y3YWFkNTEtMzE4MC00ZmFhLTlhZTctNmVhZjljOTZiOGRlIiwiYXVkIjoiYXV0aGVudGljYXRlZCIsInJvbGUiOiJhdXRoZW50aWNhdGVkIiwiZW1haWwiOiJsaXdlaV93YWRlQGljbG91ZC5jb20iLCJlbWFpbF9jb25maXJtZWRfYXQiOiIyMDI1LTAyLTEyVDExOjA0OjQwLjIxNzkzOVoiLCJwaG9uZSI6IiIsImNvbmZpcm1hdGlvbl9zZW50X2F0IjoiMjAyNS0wMi0xMlQxMTowNDowMC41Mzk2MjhaIiwiY29uZmlybWVkX2F0IjoiMjAyNS0wMi0xMlQxMTowNDo0MC4yMTc5MzlaIiwibGFzdF9zaWduX2luX2F0IjoiMjAyNS0wNS0wNlQxMzo0NToyMC45MDA0MTRaIiwiYXBwX21ldGFkYXRhIjp7InByb3ZpZGVyIjoiZW1haWwiLCJwcm92aWRlcnMiOlsiZW1haWwiXX0sInVzZXJfbWV0YWRhdGEiOnsiZW1haWwiOiJsaXdlaV93YWRlQGljbG91ZC5jb20iLCJlbWFpbF92ZXJpZmllZCI6dHJ1ZSwicGhvbmVfdmVyaWZpZWQiOmZhbHNlLCJzdWIiOiIzZjdhYWQ1MS0zMTgwLTRmYWEtOWFlNy02ZWFmOWM5NmI4ZGUifSwiaWRlbnRpdGllcyI6W3siaWRlbnRpdHlfaWQiOiI2MjFiYTUxZi0yYzYzLTQxOWMtOWI2OS0zYzUzYTc5NDlhMzkiLCJpZCI6IjNmN2FhZDUxLTMxODAtNGZhYS05YWU3LTZlYWY5Yzk2YjhkZSIsInVzZXJfaWQiOiIzZjdhYWQ1MS0zMTgwLTRmYWEtOWFlNy02ZWFmOWM5NmI4ZGUiLCJpZGVudGl0eV9kYXRhIjp7ImVtYWlsIjoibGl3ZWlfd2FkZUBpY2xvdWQuY29tIiwiZW1haWxfdmVyaWZpZWQiOnRydWUsInBob25lX3ZlcmlmaWVkIjpmYWxzZSwic3ViIjoiM2Y3YWFkNTEtMzE4MC00ZmFhLTlhZTctNmVhZjljOTZiOGRlIn0sInByb3ZpZGVyIjoiZW1haWwiLCJsYXN0X3NpZ25faW5fYXQiOiIyMDI1LTAyLTEyVDExOjA0OjAwLjUxMTAwOVoiLCJjcmVhdGVkX2F0IjoiMjAyNS0wMi0xMlQxMTowNDowMC41MTExNDRaIiwidXBkYXRlZF9hdCI6IjIwMjUtMDItMTJUMTE6MDQ6MDAuNTExMTQ0WiIsImVtYWlsIjoibGl3ZWlfd2FkZUBpY2xvdWQuY29tIn1dLCJjcmVhdGVkX2F0IjoiMjAyNS0wMi0xMlQxMTowNDowMC40NDYwMzdaIiwidXBkYXRlZF9hdCI6IjIwMjUtMDUtMjdUMDc6MTA6MTIuMjM0MDUyWiIsImlzX2Fub255bW91cyI6ZmFsc2V9fQ' \
-H 'Origin: http://54.92.111.204:5670' \
-H 'Referer: http://54.92.111.204:5670/chat?scene=chat_knowledge&id=f62c8044-4054-11f0-b9d7-36eb2f648a81&knowledge_id=user456' \
-H 'User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/137.0.0.0 Safari/537.36' \
-H 'accept: text/event-stream' \
-H 'user-id;' \
--data-raw '{"chat_mode":"chat_knowledge","model_name":"Qwen/Qwen2.5-Coder-32B-Instruct","user_input":"1111111111111111111111111","app_code":"chat_knowledge","temperature":0.6,"max_new_tokens":4000,"select_param":"user456","conv_uid":"f62c8044-4054-11f0-b9d7-36eb2f648a81"}' \
--insecure
curl -X 'POST' \
'http://54.92.111.204:5670/api/v1/chat/completions' \
-H 'accept: application/json' \
-H 'Content-Type: application/json' \
-d '{
"user_input": "111111111",
"chat_mode": "chat_knowledge",
"app_code": "chat_knowledge",
"temperature": 0.5,
"max_new_tokens": 4000,
"select_param": "user456",
"model_name": "Qwen/Qwen2.5-Coder-32B-Instruct"
}'
curl -X 'POST' \
'http://54.92.111.204:5670/api/v1/chat/completions' \
-H 'accept: application/json' \
-H 'Content-Type: application/json' \
-d '{
"user_input": "111111111",
"chat_mode": "chat_knowledge",
"app_code": "chat_knowledge",
"temperature": 0.5,
"max_new_tokens": 4000,
"incremental": false,
"select_param": "user456",
"model_name": "Qwen/Qwen2.5-Coder-32B-Instruct"
}'
*/
curl -X 'POST' \
'http://54.92.111.204:5670/api/v2/chat/completions' \
-H 'accept: application/json' \
-H 'Content-Type: application/json' \
-d '{
"model": "Qwen/Qwen2.5-Coder-32B-Instruct",
"messages": "What is the capital of UK?",
"temperature": 0.7,
"top_p": 1,
"top_k": -1,
"n": 1,
"max_tokens": 0,
"stream": false,
"repetition_penalty": 1,
"frequency_penalty": 0,
"presence_penalty": 0,
"chat_mode": "chat_knowledge",
"chat_param": "user456",
"incremental": true,
"enable_vis": true
}'
{"id":"feb02244-deac-4955-97f8-175f1f541d20","object":"chat.completion","created":1748949140,"model":"Qwen/Qwen2.5-Coder-32B-Instruct","choices":[{"index":0,"message":{"role":"assistant","content":"知识库中提供的内容不足以回答此问题\n\n<references title=\"References\" references=\"[]\" />","reasoning_content":null},"finish_reason":null}],"usage":{"prompt_tokens":0,"total_tokens":0,"completion_tokens":0}}
package util
const UserNameKey = "username"
const UserIdKey = "user_id"
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment