Skip to content

Commit

Permalink
feat: initial support of Dall-E (#148, #266)
Browse files Browse the repository at this point in the history
* feat: initial support of Dall-E

* fix: fix N not timed

---------

Co-authored-by: JustSong <[email protected]>
Co-authored-by: JustSong <[email protected]>
  • Loading branch information
3 people authored Jul 15, 2023
1 parent 81c5901 commit b520b54
Show file tree
Hide file tree
Showing 7 changed files with 191 additions and 13 deletions.
11 changes: 6 additions & 5 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -81,16 +81,17 @@ _✨ All in one 的 OpenAI 接口,整合各种 API 访问方式,开箱即用
12. 支持以美元为单位显示额度。
13. 支持发布公告,设置充值链接,设置新用户初始额度。
14. 支持模型映射,重定向用户的请求模型。
15. 支持丰富的**自定义**设置,
15. 支持绘图接口。
16. 支持丰富的**自定义**设置,
1. 支持自定义系统名称,logo 以及页脚。
2. 支持自定义首页和关于页面,可以选择使用 HTML & Markdown 代码进行自定义,或者使用一个单独的网页通过 iframe 嵌入。
16. 支持通过系统访问令牌访问管理 API。
17. 支持 Cloudflare Turnstile 用户校验。
18. 支持用户管理,支持**多种用户登录注册方式**
17. 支持通过系统访问令牌访问管理 API。
18. 支持 Cloudflare Turnstile 用户校验。
19. 支持用户管理,支持**多种用户登录注册方式**
+ 邮箱登录注册以及通过邮箱进行密码重置。
+ [GitHub 开放授权](https://github.com/settings/applications/new)
+ 微信公众号授权(需要额外部署 [WeChat Server](https://github.com/songquanpeng/wechat-server))。
19. 未来其他大模型开放 API 后,将第一时间支持,并将其封装成同样的 API 访问方式。
20. 未来其他大模型开放 API 后,将第一时间支持,并将其封装成同样的 API 访问方式。

## 部署
### 基于 Docker 进行部署
Expand Down
1 change: 1 addition & 0 deletions common/model-ratio.go
Original file line number Diff line number Diff line change
Expand Up @@ -35,6 +35,7 @@ var ModelRatio = map[string]float64{
"text-search-ada-doc-001": 10,
"text-moderation-stable": 0.1,
"text-moderation-latest": 0.1,
"dall-e": 8,
}

func ModelRatio2JSONString() string {
Expand Down
10 changes: 10 additions & 0 deletions controller/model.go
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@ package controller

import (
"fmt"

"github.com/gin-gonic/gin"
)

Expand Down Expand Up @@ -53,6 +54,15 @@ func init() {
})
// https://platform.openai.com/docs/models/model-endpoint-compatibility
openAIModels = []OpenAIModels{
{
Id: "dall-e",
Object: "model",
Created: 1677649963,
OwnedBy: "openai",
Permission: permission,
Root: "dall-e",
Parent: nil,
},
{
Id: "gpt-3.5-turbo",
Object: "model",
Expand Down
161 changes: 154 additions & 7 deletions controller/relay-image.go
Original file line number Diff line number Diff line change
@@ -1,34 +1,181 @@
package controller

import (
"github.com/gin-gonic/gin"
"bytes"
"encoding/json"
"errors"
"fmt"
"io"
"net/http"
"one-api/common"
"one-api/model"

"github.com/gin-gonic/gin"
)

func relayImageHelper(c *gin.Context, relayMode int) *OpenAIErrorWithStatusCode {
// TODO: this part is not finished
req, err := http.NewRequest(c.Request.Method, c.Request.RequestURI, c.Request.Body)
imageModel := "dall-e"

tokenId := c.GetInt("token_id")
channelType := c.GetInt("channel")
userId := c.GetInt("id")
consumeQuota := c.GetBool("consume_quota")
group := c.GetString("group")

var imageRequest ImageRequest
if consumeQuota {
err := common.UnmarshalBodyReusable(c, &imageRequest)
if err != nil {
return errorWrapper(err, "bind_request_body_failed", http.StatusBadRequest)
}
}

// Prompt validation
if imageRequest.Prompt == "" {
return errorWrapper(errors.New("prompt is required"), "required_field_missing", http.StatusBadRequest)
}

// Not "256x256", "512x512", or "1024x1024"
if imageRequest.Size != "" && imageRequest.Size != "256x256" && imageRequest.Size != "512x512" && imageRequest.Size != "1024x1024" {
return errorWrapper(errors.New("size must be one of 256x256, 512x512, or 1024x1024"), "invalid_field_value", http.StatusBadRequest)
}

// N should between 1 and 10
if imageRequest.N != 0 && (imageRequest.N < 1 || imageRequest.N > 10) {
return errorWrapper(errors.New("n must be between 1 and 10"), "invalid_field_value", http.StatusBadRequest)
}

// map model name
modelMapping := c.GetString("model_mapping")
isModelMapped := false
if modelMapping != "" {
modelMap := make(map[string]string)
err := json.Unmarshal([]byte(modelMapping), &modelMap)
if err != nil {
return errorWrapper(err, "unmarshal_model_mapping_failed", http.StatusInternalServerError)
}
if modelMap[imageModel] != "" {
imageModel = modelMap[imageModel]
isModelMapped = true
}
}

baseURL := common.ChannelBaseURLs[channelType]
requestURL := c.Request.URL.String()

if c.GetString("base_url") != "" {
baseURL = c.GetString("base_url")
}

fullRequestURL := fmt.Sprintf("%s%s", baseURL, requestURL)

var requestBody io.Reader
if isModelMapped {
jsonStr, err := json.Marshal(imageRequest)
if err != nil {
return errorWrapper(err, "marshal_text_request_failed", http.StatusInternalServerError)
}
requestBody = bytes.NewBuffer(jsonStr)
} else {
requestBody = c.Request.Body
}

modelRatio := common.GetModelRatio(imageModel)
groupRatio := common.GetGroupRatio(group)
ratio := modelRatio * groupRatio
userQuota, err := model.CacheGetUserQuota(userId)

sizeRatio := 1.0
// Size
if imageRequest.Size == "256x256" {
sizeRatio = 1
} else if imageRequest.Size == "512x512" {
sizeRatio = 1.125
} else if imageRequest.Size == "1024x1024" {
sizeRatio = 1.25
}
quota := int(ratio*sizeRatio*1000) * imageRequest.N

if consumeQuota && userQuota-quota < 0 {
return errorWrapper(err, "insufficient_user_quota", http.StatusForbidden)
}

req, err := http.NewRequest(c.Request.Method, fullRequestURL, requestBody)
if err != nil {
return errorWrapper(err, "new_request_failed", http.StatusInternalServerError)
}
req.Header.Set("Authorization", c.Request.Header.Get("Authorization"))

req.Header.Set("Content-Type", c.Request.Header.Get("Content-Type"))
req.Header.Set("Accept", c.Request.Header.Get("Accept"))

client := &http.Client{}
resp, err := client.Do(req)
if err != nil {
return errorWrapper(err, "do_request_failed", http.StatusOK)
return errorWrapper(err, "do_request_failed", http.StatusInternalServerError)
}

err = req.Body.Close()
if err != nil {
return errorWrapper(err, "close_request_body_failed", http.StatusOK)
return errorWrapper(err, "close_request_body_failed", http.StatusInternalServerError)
}
err = c.Request.Body.Close()
if err != nil {
return errorWrapper(err, "close_request_body_failed", http.StatusInternalServerError)
}
var textResponse ImageResponse

defer func() {
if consumeQuota {
err := model.PostConsumeTokenQuota(tokenId, quota)
if err != nil {
common.SysError("error consuming token remain quota: " + err.Error())
}
err = model.CacheUpdateUserQuota(userId)
if err != nil {
common.SysError("error update user quota cache: " + err.Error())
}
if quota != 0 {
tokenName := c.GetString("token_name")
logContent := fmt.Sprintf("模型倍率 %.2f,分组倍率 %.2f", modelRatio, groupRatio)
model.RecordConsumeLog(userId, 0, 0, imageModel, tokenName, quota, logContent)
model.UpdateUserUsedQuotaAndRequestCount(userId, quota)
channelId := c.GetInt("channel_id")
model.UpdateChannelUsedQuota(channelId, quota)
}
}
}()

if consumeQuota {
responseBody, err := io.ReadAll(resp.Body)

if err != nil {
return errorWrapper(err, "read_response_body_failed", http.StatusInternalServerError)
}
err = resp.Body.Close()
if err != nil {
return errorWrapper(err, "close_response_body_failed", http.StatusInternalServerError)
}
err = json.Unmarshal(responseBody, &textResponse)
if err != nil {
return errorWrapper(err, "unmarshal_response_body_failed", http.StatusInternalServerError)
}

resp.Body = io.NopCloser(bytes.NewBuffer(responseBody))
}

for k, v := range resp.Header {
c.Writer.Header().Set(k, v[0])
}
c.Writer.WriteHeader(resp.StatusCode)

_, err = io.Copy(c.Writer, resp.Body)
if err != nil {
return errorWrapper(err, "copy_response_body_failed", http.StatusOK)
return errorWrapper(err, "copy_response_body_failed", http.StatusInternalServerError)
}
err = resp.Body.Close()
if err != nil {
return errorWrapper(err, "close_response_body_failed", http.StatusOK)
return errorWrapper(err, "close_response_body_failed", http.StatusInternalServerError)
}
return nil
}
14 changes: 14 additions & 0 deletions controller/relay.go
Original file line number Diff line number Diff line change
Expand Up @@ -38,6 +38,7 @@ type GeneralOpenAIRequest struct {
N int `json:"n,omitempty"`
Input any `json:"input,omitempty"`
Instruction string `json:"instruction,omitempty"`
Size string `json:"size,omitempty"`
}

type ChatRequest struct {
Expand All @@ -54,6 +55,12 @@ type TextRequest struct {
//Stream bool `json:"stream"`
}

type ImageRequest struct {
Prompt string `json:"prompt"`
N int `json:"n"`
Size string `json:"size"`
}

type Usage struct {
PromptTokens int `json:"prompt_tokens"`
CompletionTokens int `json:"completion_tokens"`
Expand All @@ -77,6 +84,13 @@ type TextResponse struct {
Error OpenAIError `json:"error"`
}

type ImageResponse struct {
Created int `json:"created"`
Data []struct {
Url string `json:"url"`
}
}

type ChatCompletionsStreamResponse struct {
Choices []struct {
Delta struct {
Expand Down
5 changes: 5 additions & 0 deletions middleware/distributor.go
Original file line number Diff line number Diff line change
Expand Up @@ -79,6 +79,11 @@ func Distribute() func(c *gin.Context) {
modelRequest.Model = c.Param("model")
}
}
if strings.HasPrefix(c.Request.URL.Path, "/v1/images/generations") {
if modelRequest.Model == "" {
modelRequest.Model = "dall-e"
}
}
channel, err = model.CacheGetRandomSatisfiedChannel(userGroup, modelRequest.Model)
if err != nil {
message := "无可用渠道"
Expand Down
2 changes: 1 addition & 1 deletion router/relay-router.go
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@ func SetRelayRouter(router *gin.Engine) {
relayV1Router.POST("/completions", controller.Relay)
relayV1Router.POST("/chat/completions", controller.Relay)
relayV1Router.POST("/edits", controller.Relay)
relayV1Router.POST("/images/generations", controller.RelayNotImplemented)
relayV1Router.POST("/images/generations", controller.Relay)
relayV1Router.POST("/images/edits", controller.RelayNotImplemented)
relayV1Router.POST("/images/variations", controller.RelayNotImplemented)
relayV1Router.POST("/embeddings", controller.Relay)
Expand Down

0 comments on commit b520b54

Please sign in to comment.