Skip to content

Commit

Permalink
feat: support DeepL's model (close songquanpeng#1126)
Browse files Browse the repository at this point in the history
  • Loading branch information
songquanpeng committed Apr 27, 2024
1 parent e64e770 commit 0079062
Show file tree
Hide file tree
Showing 20 changed files with 305 additions and 10 deletions.
1 change: 1 addition & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -86,6 +86,7 @@ _✨ 通过标准的 OpenAI API 格式访问所有的大模型,开箱即用
+ [x] [Cohere](https://cohere.com/)
+ [x] [DeepSeek](https://www.deepseek.com/)
+ [x] [Cloudflare Workers AI](https://developers.cloudflare.com/workers-ai/)
+ [x] [DeepL](https://www.deepl.com/)
2. 支持配置镜像以及众多[第三方代理服务](https://iamazing.cn/page/openai-api-third-party-services)
3. 支持通过**负载均衡**的方式访问多个渠道。
4. 支持 **stream 模式**,可以通过流式传输实现打字机效果。
Expand Down
3 changes: 3 additions & 0 deletions relay/adaptor.go
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@ import (
"github.com/songquanpeng/one-api/relay/adaptor/cloudflare"
"github.com/songquanpeng/one-api/relay/adaptor/cohere"
"github.com/songquanpeng/one-api/relay/adaptor/coze"
"github.com/songquanpeng/one-api/relay/adaptor/deepl"
"github.com/songquanpeng/one-api/relay/adaptor/gemini"
"github.com/songquanpeng/one-api/relay/adaptor/ollama"
"github.com/songquanpeng/one-api/relay/adaptor/openai"
Expand Down Expand Up @@ -52,6 +53,8 @@ func GetAdaptor(apiType int) adaptor.Adaptor {
return &cohere.Adaptor{}
case apitype.Cloudflare:
return &cloudflare.Adaptor{}
case apitype.DeepL:
return &deepl.Adaptor{}
}
return nil
}
73 changes: 73 additions & 0 deletions relay/adaptor/deepl/adaptor.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,73 @@
package deepl

import (
"errors"
"fmt"
"github.com/gin-gonic/gin"
"github.com/songquanpeng/one-api/relay/adaptor"
"github.com/songquanpeng/one-api/relay/meta"
"github.com/songquanpeng/one-api/relay/model"
"io"
"net/http"
)

type Adaptor struct {
meta *meta.Meta
promptText string
}

func (a *Adaptor) Init(meta *meta.Meta) {
a.meta = meta
}

func (a *Adaptor) GetRequestURL(meta *meta.Meta) (string, error) {
return fmt.Sprintf("%s/v2/translate", meta.BaseURL), nil
}

func (a *Adaptor) SetupRequestHeader(c *gin.Context, req *http.Request, meta *meta.Meta) error {
adaptor.SetupCommonRequestHeader(c, req, meta)
req.Header.Set("Authorization", "DeepL-Auth-Key "+meta.APIKey)
return nil
}

func (a *Adaptor) ConvertRequest(c *gin.Context, relayMode int, request *model.GeneralOpenAIRequest) (any, error) {
if request == nil {
return nil, errors.New("request is nil")
}
convertedRequest, text := ConvertRequest(*request)
a.promptText = text
return convertedRequest, nil
}

func (a *Adaptor) ConvertImageRequest(request *model.ImageRequest) (any, error) {
if request == nil {
return nil, errors.New("request is nil")
}
return request, nil
}

func (a *Adaptor) DoRequest(c *gin.Context, meta *meta.Meta, requestBody io.Reader) (*http.Response, error) {
return adaptor.DoRequestHelper(a, c, meta, requestBody)
}

func (a *Adaptor) DoResponse(c *gin.Context, resp *http.Response, meta *meta.Meta) (usage *model.Usage, err *model.ErrorWithStatusCode) {
if meta.IsStream {
err = StreamHandler(c, resp, meta.ActualModelName)
} else {
err = Handler(c, resp, meta.ActualModelName)
}
promptTokens := len(a.promptText)
usage = &model.Usage{
PromptTokens: promptTokens,
TotalTokens: promptTokens,
}
return
}

func (a *Adaptor) GetModelList() []string {
return ModelList
}

func (a *Adaptor) GetChannelName() string {
return "deepl"
}
9 changes: 9 additions & 0 deletions relay/adaptor/deepl/constants.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
package deepl

// https://developers.deepl.com/docs/api-reference/glossaries

var ModelList = []string{
"deepl-zh",
"deepl-en",
"deepl-ja",
}
11 changes: 11 additions & 0 deletions relay/adaptor/deepl/helper.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,11 @@
package deepl

import "strings"

func parseLangFromModelName(modelName string) string {
parts := strings.Split(modelName, "-")
if len(parts) == 1 {
return "ZH"
}
return parts[1]
}
137 changes: 137 additions & 0 deletions relay/adaptor/deepl/main.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,137 @@
package deepl

import (
"encoding/json"
"github.com/gin-gonic/gin"
"github.com/songquanpeng/one-api/common"
"github.com/songquanpeng/one-api/common/helper"
"github.com/songquanpeng/one-api/relay/adaptor/openai"
"github.com/songquanpeng/one-api/relay/constant"
"github.com/songquanpeng/one-api/relay/constant/finishreason"
"github.com/songquanpeng/one-api/relay/constant/role"
"github.com/songquanpeng/one-api/relay/model"
"io"
"net/http"
)

// https://developers.deepl.com/docs/getting-started/your-first-api-request

func ConvertRequest(textRequest model.GeneralOpenAIRequest) (*Request, string) {
var text string
if len(textRequest.Messages) != 0 {
text = textRequest.Messages[len(textRequest.Messages)-1].StringContent()
}
deeplRequest := Request{
TargetLang: parseLangFromModelName(textRequest.Model),
Text: []string{text},
}
return &deeplRequest, text
}

func StreamResponseDeepL2OpenAI(deeplResponse *Response) *openai.ChatCompletionsStreamResponse {
var choice openai.ChatCompletionsStreamResponseChoice
if len(deeplResponse.Translations) != 0 {
choice.Delta.Content = deeplResponse.Translations[0].Text
}
choice.Delta.Role = role.Assistant
choice.FinishReason = &constant.StopFinishReason
openaiResponse := openai.ChatCompletionsStreamResponse{
Object: constant.StreamObject,
Created: helper.GetTimestamp(),
Choices: []openai.ChatCompletionsStreamResponseChoice{choice},
}
return &openaiResponse
}

func ResponseDeepL2OpenAI(deeplResponse *Response) *openai.TextResponse {
var responseText string
if len(deeplResponse.Translations) != 0 {
responseText = deeplResponse.Translations[0].Text
}
choice := openai.TextResponseChoice{
Index: 0,
Message: model.Message{
Role: role.Assistant,
Content: responseText,
Name: nil,
},
FinishReason: finishreason.Stop,
}
fullTextResponse := openai.TextResponse{
Object: constant.NonStreamObject,
Created: helper.GetTimestamp(),
Choices: []openai.TextResponseChoice{choice},
}
return &fullTextResponse
}

func StreamHandler(c *gin.Context, resp *http.Response, modelName string) *model.ErrorWithStatusCode {
responseBody, err := io.ReadAll(resp.Body)
if err != nil {
return openai.ErrorWrapper(err, "read_response_body_failed", http.StatusInternalServerError)
}
err = resp.Body.Close()
if err != nil {
return openai.ErrorWrapper(err, "close_response_body_failed", http.StatusInternalServerError)
}
var deeplResponse Response
err = json.Unmarshal(responseBody, &deeplResponse)
if err != nil {
return openai.ErrorWrapper(err, "unmarshal_response_body_failed", http.StatusInternalServerError)
}
fullTextResponse := StreamResponseDeepL2OpenAI(&deeplResponse)
fullTextResponse.Model = modelName
fullTextResponse.Id = helper.GetResponseID(c)
jsonData, err := json.Marshal(fullTextResponse)
if err != nil {
return openai.ErrorWrapper(err, "marshal_response_body_failed", http.StatusInternalServerError)
}
common.SetEventStreamHeaders(c)
c.Stream(func(w io.Writer) bool {
if jsonData != nil {
c.Render(-1, common.CustomEvent{Data: "data: " + string(jsonData)})
jsonData = nil
return true
}
c.Render(-1, common.CustomEvent{Data: "data: [DONE]"})
return false
})
_ = resp.Body.Close()
return nil
}

func Handler(c *gin.Context, resp *http.Response, modelName string) *model.ErrorWithStatusCode {
responseBody, err := io.ReadAll(resp.Body)
if err != nil {
return openai.ErrorWrapper(err, "read_response_body_failed", http.StatusInternalServerError)
}
err = resp.Body.Close()
if err != nil {
return openai.ErrorWrapper(err, "close_response_body_failed", http.StatusInternalServerError)
}
var deeplResponse Response
err = json.Unmarshal(responseBody, &deeplResponse)
if err != nil {
return openai.ErrorWrapper(err, "unmarshal_response_body_failed", http.StatusInternalServerError)
}
if deeplResponse.Message != "" {
return &model.ErrorWithStatusCode{
Error: model.Error{
Message: deeplResponse.Message,
Code: "deepl_error",
},
StatusCode: resp.StatusCode,
}
}
fullTextResponse := ResponseDeepL2OpenAI(&deeplResponse)
fullTextResponse.Model = modelName
fullTextResponse.Id = helper.GetResponseID(c)
jsonResponse, err := json.Marshal(fullTextResponse)
if err != nil {
return openai.ErrorWrapper(err, "marshal_response_body_failed", http.StatusInternalServerError)
}
c.Writer.Header().Set("Content-Type", "application/json")
c.Writer.WriteHeader(resp.StatusCode)
_, err = c.Writer.Write(jsonResponse)
return nil
}
16 changes: 16 additions & 0 deletions relay/adaptor/deepl/model.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,16 @@
package deepl

type Request struct {
Text []string `json:"text"`
TargetLang string `json:"target_lang"`
}

type Translation struct {
DetectedSourceLanguage string `json:"detected_source_language,omitempty"`
Text string `json:"text,omitempty"`
}

type Response struct {
Translations []Translation `json:"translations,omitempty"`
Message string `json:"message,omitempty"`
}
4 changes: 4 additions & 0 deletions relay/adaptor/openai/token.go
Original file line number Diff line number Diff line change
Expand Up @@ -206,3 +206,7 @@ func CountTokenText(text string, model string) int {
tokenEncoder := getTokenEncoder(model)
return getTokenNum(tokenEncoder, text)
}

func CountToken(text string) int {
return CountTokenInput(text, "gpt-3.5-turbo")
}
1 change: 1 addition & 0 deletions relay/apitype/define.go
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@ const (
Coze
Cohere
Cloudflare
DeepL

Dummy // this one is only for count, do not add any channel after this
)
4 changes: 4 additions & 0 deletions relay/billing/ratio/model.go
Original file line number Diff line number Diff line change
Expand Up @@ -173,6 +173,10 @@ var ModelRatio = map[string]float64{
// https://platform.deepseek.com/api-docs/pricing/
"deepseek-chat": 1.0 / 1000 * RMB,
"deepseek-coder": 1.0 / 1000 * RMB,
// https://www.deepl.com/pro?cta=header-prices
"deepl-zh": 25.0 / 1000 * USD,
"deepl-en": 25.0 / 1000 * USD,
"deepl-ja": 25.0 / 1000 * USD,
}

var CompletionRatio = map[string]float64{}
Expand Down
1 change: 1 addition & 0 deletions relay/channeltype/define.go
Original file line number Diff line number Diff line change
Expand Up @@ -39,6 +39,7 @@ const (
Cohere
DeepSeek
Cloudflare
DeepL

Dummy
)
2 changes: 2 additions & 0 deletions relay/channeltype/helper.go
Original file line number Diff line number Diff line change
Expand Up @@ -33,6 +33,8 @@ func ToAPIType(channelType int) int {
apiType = apitype.Cohere
case Cloudflare:
apiType = apitype.Cloudflare
case DeepL:
apiType = apitype.DeepL
}

return apiType
Expand Down
1 change: 1 addition & 0 deletions relay/channeltype/url.go
Original file line number Diff line number Diff line change
Expand Up @@ -39,6 +39,7 @@ var ChannelBaseURLs = []string{
"https://api.cohere.ai", // 35
"https://api.deepseek.com", // 36
"https://api.cloudflare.com", // 37
"https://api-free.deepl.com", // 38
}

func init() {
Expand Down
2 changes: 2 additions & 0 deletions relay/constant/common.go
Original file line number Diff line number Diff line change
@@ -1,3 +1,5 @@
package constant

var StopFinishReason = "stop"
var StreamObject = "chat.completion.chunk"
var NonStreamObject = "chat.completion"
5 changes: 5 additions & 0 deletions relay/constant/finishreason/define.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
package finishreason

const (
Stop = "stop"
)
5 changes: 5 additions & 0 deletions relay/constant/role/define.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
package role

const (
Assistant = "assistant"
)
18 changes: 18 additions & 0 deletions relay/controller/helper.go
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@ import (
"github.com/songquanpeng/one-api/relay/relaymode"
"math"
"net/http"
"strings"
)

func getAndValidateTextRequest(c *gin.Context, relayMode int) (*relaymodel.GeneralOpenAIRequest, error) {
Expand Down Expand Up @@ -204,3 +205,20 @@ func getMappedModelName(modelName string, mapping map[string]string) (string, bo
}
return modelName, false
}

func isErrorHappened(meta *meta.Meta, resp *http.Response) bool {
if resp == nil {
return true
}
if resp.StatusCode != http.StatusOK {
return true
}
if meta.ChannelType == channeltype.DeepL {
// skip stream check for deepl
return false
}
if meta.IsStream && strings.HasPrefix(resp.Header.Get("Content-Type"), "application/json") {
return true
}
return false
}
Loading

0 comments on commit 0079062

Please sign in to comment.