From bf14de64c15d7798c0d0252fd641085b6eac1fff Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?=E9=BB=84=E6=8C=AF?= <huangzhen@styd.cn>
Date: Thu, 6 Feb 2025 19:10:22 +0800
Subject: [PATCH 1/3] deepseek

---
 examples/openai_example/openaillm_test.go    |  42 ++++++
 go.mod                                       |   2 +-
 go.sum                                       |   4 +-
 openai/internal/openaiclient/api_chat.go     | 127 +++++++++++++++++--
 openai/internal/openaiclient/openaiclient.go |  20 ++-
 openai/openaillm.go                          |  28 ++--
 openai/openaillm_option.go                   |   7 +-
 options.go                                   |  20 +++
 schema/chat_message.go                       |   3 +-
 9 files changed, 221 insertions(+), 32 deletions(-)

diff --git a/examples/openai_example/openaillm_test.go b/examples/openai_example/openaillm_test.go
index 6217bdf..f2393c2 100644
--- a/examples/openai_example/openaillm_test.go
+++ b/examples/openai_example/openaillm_test.go
@@ -23,6 +23,7 @@ import (
 )
 
 var baseUrl = "https://apiagent.kaopuai.com/v1"
+var deepseekBaseUrl = "https://api.deepseek.com/chat/completions"
 var token = os.Getenv("OPENAI_API_KEY")
 
 // 测试 chat 调用
@@ -60,6 +61,7 @@ func TestLLM_Chat(t *testing.T) {
 // 测试 stream 返回
 func TestLLM_Stream(t *testing.T) {
 	ctx := context.Background()
+	token = "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"
 	llm, err := openai.New(openai.WithToken(token), openai.WithBaseURL(baseUrl))
 	if err != nil {
 		fmt.Println(err.Error())
@@ -480,3 +482,43 @@ func CallAzure(userMessage string) {
 	}
 
 }
+
+func TestDeepseek_Stream(t *testing.T) {
+	ctx := context.Background()
+	llm, err := openai.New(openai.WithToken(token), openai.WithModel("deepseek-reasoner"), openai.WithBaseURL(deepseekBaseUrl), openai.WithAPIType(openai.APITypeDeepseek))
+	if err != nil {
+		fmt.Println(err.Error())
+		return
+	}
+	messages := []*schema.ChatMessage{
+		&schema.ChatMessage{
+			Role:    schema.RoleSystem,
+			Content: "你是一个 AI 助手",
+		},
+		&schema.ChatMessage{
+			Role:    schema.RoleUser,
+			Content: "你好啊",
+		},
+	}
+	resp, err := llm.Chat(ctx, messages,
+		kpllms.WithReasonStreamingFunc(func(ctx context.Context, content kpllms.ReasonContent, innerErr error) error {
+			fmt.Println("type: ", content.Type, "content: ", content.Content)
+			return nil
+		}),
+		//kpllms.WithStreamingFunc(func(ctx context.Context, chunk []byte, innerErr error) error {
+		//	fmt.Println(string(chunk))
+		//	return nil
+		//}),
+	)
+
+	if err != nil {
+		fmt.Println(err.Error())
+		return
+	}
+	b, err := json.Marshal(resp)
+	if err != nil {
+		fmt.Println(err.Error())
+		return
+	}
+	fmt.Println(string(b))
+}
diff --git a/go.mod b/go.mod
index 992cbfa..16eadb7 100644
--- a/go.mod
+++ b/go.mod
@@ -10,7 +10,7 @@ require (
 	github.com/pkoukk/tiktoken-go v0.1.7
 	github.com/tcolgate/mp3 v0.0.0-20170426193717-e79c5a46d300
 	github.com/volcengine/volc-sdk-golang v1.0.158
-	github.com/volcengine/volcengine-go-sdk v1.0.178
+	github.com/volcengine/volcengine-go-sdk v1.0.179
 )
 
 require (
diff --git a/go.sum b/go.sum
index 2db0faa..50f6de7 100644
--- a/go.sum
+++ b/go.sum
@@ -2660,8 +2660,8 @@ github.com/vishvananda/netns v0.0.4/go.mod h1:SpkAiCQRtJ6TvvxPnOSyH3BMl6unz3xZla
 github.com/volcengine/volc-sdk-golang v1.0.23/go.mod h1:AfG/PZRUkHJ9inETvbjNifTDgut25Wbkm2QoYBTbvyU=
 github.com/volcengine/volc-sdk-golang v1.0.158 h1:7VHA5SpMNFjVpskDl9uEzKNdeQXBzqB+kaY+azX0FnU=
 github.com/volcengine/volc-sdk-golang v1.0.158/go.mod h1:+epuCKzDYSqsciTxQdDOY5DCt8dNsmj2gRN6yI//fQo=
-github.com/volcengine/volcengine-go-sdk v1.0.178 h1:lCu2JuWOoIZAjNfJSBi/KLTWVFdvejLXsBrNE1wgCIU=
-github.com/volcengine/volcengine-go-sdk v1.0.178/go.mod h1:gfEDc1s7SYaGoY+WH2dRrS3qiuDJMkwqyfXWCa7+7oA=
+github.com/volcengine/volcengine-go-sdk v1.0.179 h1:jYVfrw9Fw3I2jQTNogu19gs7TI+3569PLXyiDnYXJOc=
+github.com/volcengine/volcengine-go-sdk v1.0.179/go.mod h1:gfEDc1s7SYaGoY+WH2dRrS3qiuDJMkwqyfXWCa7+7oA=
 github.com/willf/bitset v1.1.11-0.20200630133818-d5bec3311243/go.mod h1:RjeCKbqT1RxIR/KWY6phxZiaY1IyutSBfGjNPySAYV4=
 github.com/willf/bitset v1.1.11/go.mod h1:83CECat5yLh5zVOf4P1ErAgKA5UDvKtgyUABdr3+MjI=
 github.com/x448/float16 v0.8.4/go.mod h1:14CWIYCyZA/cWjXOioeEpHeN/83MdbZDRQHoFcYsOfg=
diff --git a/openai/internal/openaiclient/api_chat.go b/openai/internal/openaiclient/api_chat.go
index 9cbdf57..2796c52 100644
--- a/openai/internal/openaiclient/api_chat.go
+++ b/openai/internal/openaiclient/api_chat.go
@@ -5,6 +5,7 @@ import (
 	"encoding/json"
 	"errors"
 	"fmt"
+	"resp.kaopuai.com/lib/kpllms"
 	"resp.kaopuai.com/lib/kpllms/internal/httputils"
 	"strings"
 )
@@ -40,10 +41,12 @@ type ChatRequest struct {
 	Tools []Tool `json:"tools,omitempty"`
 
 	// 指定工具调用的方式,string 或者 ToolChoice, 例如:auto,自动调用,指定调用
-	ToolChoice any `json:"tool_choice,omitempty"`
+	ToolChoice    any            `json:"tool_choice,omitempty"`
+	StreamOptions *StreamOptions `json:"stream_options,omitempty"`
 
 	// 流式放回的回调函数
-	StreamingFunc func(ctx context.Context, chunk []byte, innerErr error) error `json:"-"`
+	StreamingFunc       func(ctx context.Context, chunk []byte, innerErr error) error                 `json:"-"`
+	ReasonStreamingFunc func(ctx context.Context, content kpllms.ReasonContent, innerErr error) error `json:"-"`
 }
 
 type ToolType string
@@ -80,6 +83,10 @@ type ResponseFormat struct {
 	Type string `json:"type"`
 }
 
+type StreamOptions struct {
+	IncludeUsage bool `json:"include_usage,omitempty"`
+}
+
 // ChatMessage is a message in a chat request.
 type ChatMessage struct { //nolint:musttag
 	Role    string `json:"role"`
@@ -96,9 +103,10 @@ type ChatMessage struct { //nolint:musttag
 
 // ChatMessageResponse is a message in a chat request.
 type ChatMessageResponse struct { //nolint:musttag
-	Role    string `json:"role"`
-	Content string `json:"content,omitempty"`
-	Name    string `json:"name,omitempty"`
+	Role             string `json:"role"`
+	Content          string `json:"content,omitempty"`
+	ReasoningContent string `json:"reasoning_content,omitempty"`
+	Name             string `json:"name,omitempty"`
 
 	// 函数列表
 	ToolCalls []ToolCall `json:"tool_calls,omitempty"`
@@ -164,12 +172,14 @@ type StreamedChatResponsePayload struct {
 	Choices []struct {
 		Index float64 `json:"index,omitempty"`
 		Delta struct {
-			Role      string     `json:"role,omitempty"`
-			Content   string     `json:"content,omitempty"`
-			ToolCalls []ToolCall `json:"tool_calls,omitempty"`
+			Role             string     `json:"role,omitempty"`
+			Content          string     `json:"content,omitempty"`
+			ReasoningContent string     `json:"reasoning_content,omitempty"`
+			ToolCalls        []ToolCall `json:"tool_calls,omitempty"`
 		} `json:"delta,omitempty"`
 		FinishReason FinishReason `json:"finish_reason,omitempty"`
 	} `json:"choices,omitempty"`
+	Usage ChatUsage `json:"usage,omitempty"`
 }
 
 // FunctionDefinition is a definition of a function that can be called by the model.
@@ -203,9 +213,6 @@ type FunctionCall struct {
 }
 
 func (c *Client) createChat(ctx context.Context, payload *ChatRequest) (*ChatCompletionResponse, error) {
-	if payload.StreamingFunc != nil {
-		payload.Stream = true
-	}
 	var response ChatCompletionResponse
 	// 处理流式返回
 	if payload.StreamingFunc != nil {
@@ -291,3 +298,101 @@ func (c *Client) createChat(ctx context.Context, payload *ChatRequest) (*ChatCom
 	}
 	return &response, nil
 }
+
+func (c *Client) createReasonChat(ctx context.Context, payload *ChatRequest) (*ChatCompletionResponse, error) {
+	var response ChatCompletionResponse
+	// 处理流式返回
+	if payload.ReasonStreamingFunc != nil {
+		// 流式返回初始化一下, 避免赋值时报空指针
+		response.Choices = []*ChatCompletionChoice{
+			{},
+		}
+		err := httputils.HttpStream(ctx, c.buildURL("/chat/completions", c.Model), payload, c.setHeaders(), func(ctx context.Context, line string) error {
+			//fmt.Println(line)
+			// func 内会返回流式返回的每行数据,对每行数据逐行处理
+			if line == "" {
+				// 空行不处理,空行是数据间隔行
+				return nil
+			}
+			if !strings.HasPrefix(line, "data:") {
+				return errors.New(fmt.Sprintf("unexpected line: %v", line))
+			}
+			data := strings.TrimPrefix(line, "data: ")
+			// 传输结束
+			if data == "[DONE]" {
+				return nil
+			}
+			fmt.Println(string(data))
+			// 解析有用的数据
+			var streamResponse StreamedChatResponsePayload
+			err := json.Unmarshal([]byte(data), &streamResponse)
+			if err != nil {
+				return err
+			}
+			if len(streamResponse.Choices) == 0 {
+				return nil
+			}
+
+			response.Usage = ChatUsage{
+				PromptTokens:     streamResponse.Usage.PromptTokens,
+				CompletionTokens: streamResponse.Usage.CompletionTokens,
+				TotalTokens:      streamResponse.Usage.TotalTokens,
+			}
+
+			// 写入最后一个结束标识
+			response.Choices[0].FinishReason = streamResponse.Choices[0].FinishReason
+			// 如果是非函数调用
+			if streamResponse.Choices[0].Delta.ToolCalls == nil {
+				// 空文本不传
+				if streamResponse.Choices[0].Delta.ReasoningContent != "" {
+					// 非函数调用
+					// 拼接所有内容
+					response.Choices[0].Message.ReasoningContent += streamResponse.Choices[0].Delta.ReasoningContent
+					return payload.ReasonStreamingFunc(ctx, kpllms.ReasonContent{Type: kpllms.ContentTypeReason, Content: streamResponse.Choices[0].Delta.ReasoningContent}, nil)
+				} else if streamResponse.Choices[0].Delta.Content != "" {
+					// 拼接所有内容
+					response.Choices[0].Message.Content += streamResponse.Choices[0].Delta.Content
+					return payload.ReasonStreamingFunc(ctx, kpllms.ReasonContent{Type: kpllms.ContentTypeResult, Content: streamResponse.Choices[0].Delta.Content}, nil)
+				} else {
+					return nil
+				}
+
+			}
+
+			// 如果是函数调用, 遇到 type=function 加入一个函数,  openai有并行返回函数的功能
+			// 返回第几个函数
+			toolCallIndex := streamResponse.Choices[0].Delta.ToolCalls[0].Index
+			if streamResponse.Choices[0].Delta.ToolCalls[0].Type == "function" {
+				response.Choices[0].Message.ToolCalls = append(response.Choices[0].Message.ToolCalls, ToolCall{})
+				response.Choices[0].Message.ToolCalls[toolCallIndex].Index = toolCallIndex
+				response.Choices[0].Message.ToolCalls[toolCallIndex].ID = streamResponse.Choices[0].Delta.ToolCalls[0].ID
+				response.Choices[0].Message.ToolCalls[toolCallIndex].Type = streamResponse.Choices[0].Delta.ToolCalls[0].Type
+				response.Choices[0].Message.ToolCalls[toolCallIndex].Function.Name = streamResponse.Choices[0].Delta.ToolCalls[0].Function.Name
+				//fmt.Println(len(response.Choices[0].Message.ToolCalls))
+			}
+			response.Choices[0].Message.ToolCalls[toolCallIndex].Function.Arguments += streamResponse.Choices[0].Delta.ToolCalls[0].Function.Arguments
+
+			// 如果是 function则无需 stream 流式返回,避免输出错误
+
+			return nil
+		})
+		//PromptTokens := NumTokensFromMessages(payload.Messages, "gpt-4o")
+		//CompletionTokens := CountTokens("gpt-4o", response.Choices[0].Message.Content)
+		//response.Usage = ChatUsage{
+		//	PromptTokens:     PromptTokens,
+		//	CompletionTokens: CompletionTokens,
+		//	TotalTokens:      PromptTokens + CompletionTokens,
+		//}
+		if err != nil {
+			return nil, err
+		}
+
+	} else {
+		// 处理非流式返回
+		err := httputils.HttpPost(ctx, c.buildURL("/chat/completions", c.Model), payload, c.setHeaders(), &response)
+		if err != nil {
+			return nil, err
+		}
+	}
+	return &response, nil
+}
diff --git a/openai/internal/openaiclient/openaiclient.go b/openai/internal/openaiclient/openaiclient.go
index 89dbe51..fab3b37 100644
--- a/openai/internal/openaiclient/openaiclient.go
+++ b/openai/internal/openaiclient/openaiclient.go
@@ -22,9 +22,10 @@ var ErrEmptyResponse = errors.New("empty response")
 type APIType string
 
 const (
-	APITypeOpenAI  APIType = "OPEN_AI"
-	APITypeAzure   APIType = "AZURE"
-	APITypeAzureAD APIType = "AZURE_AD"
+	APITypeOpenAI   APIType = "OPEN_AI"
+	APITypeAzure    APIType = "AZURE"
+	APITypeAzureAD  APIType = "AZURE_AD"
+	APITypeDeepseek APIType = "Deepseek"
 )
 
 // Client is a client for the OpenAI API.
@@ -142,7 +143,13 @@ func (c *Client) CreateChat(ctx context.Context, r *ChatRequest) (*ChatCompletio
 			r.Model = c.Model
 		}
 	}
-	resp, err := c.createChat(ctx, r)
+	var resp *ChatCompletionResponse
+	var err error
+	if r.ReasonStreamingFunc != nil {
+		resp, err = c.createReasonChat(ctx, r)
+	} else {
+		resp, err = c.createChat(ctx, r)
+	}
 	if err != nil {
 		return nil, err
 	}
@@ -159,7 +166,7 @@ func IsAzure(apiType APIType) bool {
 func (c *Client) setHeaders() map[string]string {
 	m := map[string]string{}
 	m["Content-Type"] = "application/json"
-	if c.apiType == APITypeOpenAI || c.apiType == APITypeAzureAD {
+	if c.apiType == APITypeOpenAI || c.apiType == APITypeAzureAD || c.apiType == APITypeDeepseek {
 		m["Authorization"] = "Bearer " + c.token
 	} else {
 		m["api-key"] = c.token
@@ -175,6 +182,9 @@ func (c *Client) buildURL(suffix string, model string) string {
 		return c.buildAzureURL(suffix, model)
 	}
 
+	if c.apiType == APITypeDeepseek {
+		return fmt.Sprintf("%s%s", "https://api.deepseek.com", suffix)
+	}
 	// open ai implement:
 	return fmt.Sprintf("%s%s", c.baseURL, suffix)
 }
diff --git a/openai/openaillm.go b/openai/openaillm.go
index b33d9f1..54d9b0d 100644
--- a/openai/openaillm.go
+++ b/openai/openaillm.go
@@ -143,13 +143,14 @@ func (o *LLM) Chat(ctx context.Context, messages []*schema.ChatMessage, options
 
 	// 生成请求参数, openai不支持实时搜索
 	req := &openaiclient.ChatRequest{
-		Model:         opts.Model,
-		Messages:      chatMsgs,
-		StreamingFunc: opts.StreamingFunc,
-		Temperature:   opts.Temperature,
-		MaxTokens:     opts.MaxTokens,
-		TopP:          opts.TopP,
-		StopWords:     opts.StopWords,
+		Model:               opts.Model,
+		Messages:            chatMsgs,
+		StreamingFunc:       opts.StreamingFunc,
+		Temperature:         opts.Temperature,
+		MaxTokens:           opts.MaxTokens,
+		TopP:                opts.TopP,
+		StopWords:           opts.StopWords,
+		ReasonStreamingFunc: opts.ReasonStreamingFunc,
 	}
 
 	// 使用 json 格式返回
@@ -157,6 +158,14 @@ func (o *LLM) Chat(ctx context.Context, messages []*schema.ChatMessage, options
 		req.ResponseFormat = ResponseFormatJSON
 	}
 
+	if req.StreamingFunc != nil || req.ReasonStreamingFunc != nil {
+		req.Stream = true
+	}
+
+	if req.ReasonStreamingFunc != nil {
+		req.StreamOptions = &openaiclient.StreamOptions{IncludeUsage: true}
+	}
+
 	// 组装工具
 	for _, tool := range opts.Tools {
 		t, err := toolFromTool(tool)
@@ -191,8 +200,9 @@ func (o *LLM) Chat(ctx context.Context, messages []*schema.ChatMessage, options
 	for i, c := range result.Choices {
 		choices = append(choices, &schema.ContentChoice{
 
-			Content:    c.Message.Content,
-			StopReason: fmt.Sprint(c.FinishReason),
+			Content:          c.Message.Content,
+			ReasoningContent: c.Message.ReasoningContent,
+			StopReason:       fmt.Sprint(c.FinishReason),
 			Usage: &schema.Usage{
 				PromptTokens:     result.Usage.PromptTokens,
 				CompletionTokens: result.Usage.CompletionTokens,
diff --git a/openai/openaillm_option.go b/openai/openaillm_option.go
index 3822262..cc4a25a 100644
--- a/openai/openaillm_option.go
+++ b/openai/openaillm_option.go
@@ -15,9 +15,10 @@ const (
 type APIType openaiclient.APIType
 
 const (
-	APITypeOpenAI  APIType = APIType(openaiclient.APITypeOpenAI)
-	APITypeAzure           = APIType(openaiclient.APITypeAzure)
-	APITypeAzureAD         = APIType(openaiclient.APITypeAzureAD)
+	APITypeOpenAI   APIType = APIType(openaiclient.APITypeOpenAI)
+	APITypeAzure            = APIType(openaiclient.APITypeAzure)
+	APITypeAzureAD          = APIType(openaiclient.APITypeAzureAD)
+	APITypeDeepseek         = APIType(openaiclient.APITypeDeepseek)
 )
 
 const (
diff --git a/options.go b/options.go
index 7eae7cd..7d37012 100644
--- a/options.go
+++ b/options.go
@@ -2,6 +2,13 @@ package kpllms
 
 import "context"
 
+type ContentType string
+
+var (
+	ContentTypeReason ContentType = "reason"
+	ContentTypeResult ContentType = "result"
+)
+
 /*
 定义大模型调用公共参数
 */
@@ -31,6 +38,8 @@ type CallOptions struct {
 	JsonProperties       map[string]any
 	// 流式输出时是否获取完整message
 	StreamFullMessage bool
+	// 推理模型
+	ReasonStreamingFunc func(ctx context.Context, content ReasonContent, innerErr error) error
 }
 
 type ResponseFormat struct {
@@ -58,6 +67,11 @@ type ToolChoiceFunction struct {
 	Name string
 }
 
+type ReasonContent struct {
+	Type    ContentType `json:"type"`
+	Content string      `json:"content"`
+}
+
 func WithModel(model string) CallOption {
 	return func(o *CallOptions) {
 		o.Model = model
@@ -128,3 +142,9 @@ func WithStreamFullMessage(is bool) CallOption {
 		o.StreamFullMessage = is
 	}
 }
+
+func WithReasonStreamingFunc(streamingFunc func(ctx context.Context, content ReasonContent, innerErr error) error) CallOption {
+	return func(o *CallOptions) {
+		o.ReasonStreamingFunc = streamingFunc
+	}
+}
diff --git a/schema/chat_message.go b/schema/chat_message.go
index fe7fbc5..b9420b5 100644
--- a/schema/chat_message.go
+++ b/schema/chat_message.go
@@ -75,7 +75,8 @@ type ContentResponse struct {
 }
 
 type ContentChoice struct {
-	Content string
+	Content          string
+	ReasoningContent string
 
 	StopReason string
 
-- 
GitLab


From 8f2ce8fee27013b5ea0f12a65547cb2115ef9ee3 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?=E9=BB=84=E6=8C=AF?= <huangzhen@styd.cn>
Date: Thu, 6 Feb 2025 20:00:38 +0800
Subject: [PATCH 2/3] deepseek

---
 examples/openai_example/openaillm_test.go    |  5 +--
 openai/internal/openaiclient/api_chat.go     | 32 ++++++++++++++------
 openai/internal/openaiclient/openaiclient.go | 11 ++++---
 3 files changed, 32 insertions(+), 16 deletions(-)

diff --git a/examples/openai_example/openaillm_test.go b/examples/openai_example/openaillm_test.go
index f2393c2..964a8b5 100644
--- a/examples/openai_example/openaillm_test.go
+++ b/examples/openai_example/openaillm_test.go
@@ -23,7 +23,7 @@ import (
 )
 
 var baseUrl = "https://apiagent.kaopuai.com/v1"
-var deepseekBaseUrl = "https://api.deepseek.com/chat/completions"
+var deepseekBaseUrl = "https://integrate.api.nvidia.com/v1"
 var token = os.Getenv("OPENAI_API_KEY")
 
 // 测试 chat 调用
@@ -485,7 +485,8 @@ func CallAzure(userMessage string) {
 
 func TestDeepseek_Stream(t *testing.T) {
 	ctx := context.Background()
-	llm, err := openai.New(openai.WithToken(token), openai.WithModel("deepseek-reasoner"), openai.WithBaseURL(deepseekBaseUrl), openai.WithAPIType(openai.APITypeDeepseek))
+	token = "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"
+	llm, err := openai.New(openai.WithToken(token), openai.WithModel("deepseek-ai/deepseek-r1"), openai.WithBaseURL(deepseekBaseUrl), openai.WithAPIType(openai.APITypeDeepseek))
 	if err != nil {
 		fmt.Println(err.Error())
 		return
diff --git a/openai/internal/openaiclient/api_chat.go b/openai/internal/openaiclient/api_chat.go
index 2796c52..e93c2af 100644
--- a/openai/internal/openaiclient/api_chat.go
+++ b/openai/internal/openaiclient/api_chat.go
@@ -307,6 +307,9 @@ func (c *Client) createReasonChat(ctx context.Context, payload *ChatRequest) (*C
 		response.Choices = []*ChatCompletionChoice{
 			{},
 		}
+
+		thinkStop := false
+
 		err := httputils.HttpStream(ctx, c.buildURL("/chat/completions", c.Model), payload, c.setHeaders(), func(ctx context.Context, line string) error {
 			//fmt.Println(line)
 			// func 内会返回流式返回的每行数据,对每行数据逐行处理
@@ -322,16 +325,12 @@ func (c *Client) createReasonChat(ctx context.Context, payload *ChatRequest) (*C
 			if data == "[DONE]" {
 				return nil
 			}
-			fmt.Println(string(data))
 			// 解析有用的数据
 			var streamResponse StreamedChatResponsePayload
 			err := json.Unmarshal([]byte(data), &streamResponse)
 			if err != nil {
 				return err
 			}
-			if len(streamResponse.Choices) == 0 {
-				return nil
-			}
 
 			response.Usage = ChatUsage{
 				PromptTokens:     streamResponse.Usage.PromptTokens,
@@ -339,22 +338,35 @@ func (c *Client) createReasonChat(ctx context.Context, payload *ChatRequest) (*C
 				TotalTokens:      streamResponse.Usage.TotalTokens,
 			}
 
+			if len(streamResponse.Choices) == 0 {
+				return nil
+			}
+
 			// 写入最后一个结束标识
 			response.Choices[0].FinishReason = streamResponse.Choices[0].FinishReason
 			// 如果是非函数调用
 			if streamResponse.Choices[0].Delta.ToolCalls == nil {
+				if streamResponse.Choices[0].Delta.Content == "<think>" {
+					return nil
+				}
+				if streamResponse.Choices[0].Delta.Content == "\n\n" {
+					return nil
+				}
+				if streamResponse.Choices[0].Delta.Content == "</think>" {
+					thinkStop = true
+					return nil
+				}
+
 				// 空文本不传
-				if streamResponse.Choices[0].Delta.ReasoningContent != "" {
+				if !thinkStop {
 					// 非函数调用
 					// 拼接所有内容
-					response.Choices[0].Message.ReasoningContent += streamResponse.Choices[0].Delta.ReasoningContent
-					return payload.ReasonStreamingFunc(ctx, kpllms.ReasonContent{Type: kpllms.ContentTypeReason, Content: streamResponse.Choices[0].Delta.ReasoningContent}, nil)
-				} else if streamResponse.Choices[0].Delta.Content != "" {
+					response.Choices[0].Message.ReasoningContent += streamResponse.Choices[0].Delta.Content
+					return payload.ReasonStreamingFunc(ctx, kpllms.ReasonContent{Type: kpllms.ContentTypeReason, Content: streamResponse.Choices[0].Delta.Content}, nil)
+				} else {
 					// 拼接所有内容
 					response.Choices[0].Message.Content += streamResponse.Choices[0].Delta.Content
 					return payload.ReasonStreamingFunc(ctx, kpllms.ReasonContent{Type: kpllms.ContentTypeResult, Content: streamResponse.Choices[0].Delta.Content}, nil)
-				} else {
-					return nil
 				}
 
 			}
diff --git a/openai/internal/openaiclient/openaiclient.go b/openai/internal/openaiclient/openaiclient.go
index fab3b37..cb2adce 100644
--- a/openai/internal/openaiclient/openaiclient.go
+++ b/openai/internal/openaiclient/openaiclient.go
@@ -166,8 +166,11 @@ func IsAzure(apiType APIType) bool {
 func (c *Client) setHeaders() map[string]string {
 	m := map[string]string{}
 	m["Content-Type"] = "application/json"
-	if c.apiType == APITypeOpenAI || c.apiType == APITypeAzureAD || c.apiType == APITypeDeepseek {
+	if c.apiType == APITypeOpenAI || c.apiType == APITypeAzureAD {
 		m["Authorization"] = "Bearer " + c.token
+	} else if c.apiType == APITypeDeepseek {
+		m["Authorization"] = "Bearer " + c.token
+		m["accept"] = "application/json"
 	} else {
 		m["api-key"] = c.token
 	}
@@ -182,9 +185,9 @@ func (c *Client) buildURL(suffix string, model string) string {
 		return c.buildAzureURL(suffix, model)
 	}
 
-	if c.apiType == APITypeDeepseek {
-		return fmt.Sprintf("%s%s", "https://api.deepseek.com", suffix)
-	}
+	//if c.apiType == APITypeDeepseek {
+	//	return fmt.Sprintf("%s%s", "https://api.deepseek.com", suffix)
+	//}
 	// open ai implement:
 	return fmt.Sprintf("%s%s", c.baseURL, suffix)
 }
-- 
GitLab


From 28a9f90da911702a866bb3e036446d446d4b40f0 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?=E9=BB=84=E6=8C=AF?= <huangzhen@styd.cn>
Date: Thu, 6 Feb 2025 20:00:52 +0800
Subject: [PATCH 3/3] deepseek

---
 examples/openai_example/openaillm_test.go | 1 -
 1 file changed, 1 deletion(-)

diff --git a/examples/openai_example/openaillm_test.go b/examples/openai_example/openaillm_test.go
index 964a8b5..d6773a0 100644
--- a/examples/openai_example/openaillm_test.go
+++ b/examples/openai_example/openaillm_test.go
@@ -485,7 +485,6 @@ func CallAzure(userMessage string) {
 
 func TestDeepseek_Stream(t *testing.T) {
 	ctx := context.Background()
-	token = "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"
 	llm, err := openai.New(openai.WithToken(token), openai.WithModel("deepseek-ai/deepseek-r1"), openai.WithBaseURL(deepseekBaseUrl), openai.WithAPIType(openai.APITypeDeepseek))
 	if err != nil {
 		fmt.Println(err.Error())
-- 
GitLab