feat(AI): add salted user id
This commit is contained in:
parent
816ae68aeb
commit
b60be9ae7e
|
@ -1,6 +1,8 @@
|
||||||
package main
|
package main
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"crypto/md5"
|
||||||
|
"encoding/base64"
|
||||||
"strconv"
|
"strconv"
|
||||||
"strings"
|
"strings"
|
||||||
"time"
|
"time"
|
||||||
|
@ -234,6 +236,7 @@ func handleAssistantConversation(c tele.Context, thread []*tele.Message) error {
|
||||||
Messages: chatReqMsgs,
|
Messages: chatReqMsgs,
|
||||||
Temperature: lo.ToPtr(0.42),
|
Temperature: lo.ToPtr(0.42),
|
||||||
MaxTokens: 2048,
|
MaxTokens: 2048,
|
||||||
|
User: assistantHashUserId(lastMsg.Sender.ID),
|
||||||
}
|
}
|
||||||
|
|
||||||
typingNotifyCh := setTyping(c)
|
typingNotifyCh := setTyping(c)
|
||||||
|
@ -272,3 +275,9 @@ func assistantRemoveMention(msg, name string) string {
|
||||||
}
|
}
|
||||||
return orig
|
return orig
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func assistantHashUserId(uid int64) string {
|
||||||
|
seasoned := []byte("RdnuRPqp66vtbc28QRO0ecKSLKXifz7G9UbXLoyCMpw" + strconv.FormatInt(uid, 10))
|
||||||
|
hashed := md5.Sum(seasoned) // Don't judge me
|
||||||
|
return base64.URLEncoding.EncodeToString(hashed[:])[:22]
|
||||||
|
}
|
||||||
|
|
|
@ -18,16 +18,16 @@ type ChatMessage struct {
|
||||||
type ChatRequest struct {
|
type ChatRequest struct {
|
||||||
Model string `json:"model"`
|
Model string `json:"model"`
|
||||||
Messages []ChatMessage `json:"messages"`
|
Messages []ChatMessage `json:"messages"`
|
||||||
Temperature *float64 `json:"temperature,omitempty"`
|
Temperature *float64 `json:"temperature,omitempty"` // What sampling temperature to use, between 0 and 2.
|
||||||
TopP *float64 `json:"top_p,omitempty"`
|
TopP *float64 `json:"top_p,omitempty"` // Nucleus sampling. Specify this or temperature but not both.
|
||||||
N int `json:"n,omitempty"`
|
N int `json:"n,omitempty"` // How many chat completion choices to generate for each input message.
|
||||||
Stream bool `json:"stream,omitempty"`
|
Stream bool `json:"stream,omitempty"` // If set, partial message deltas will be sent as data-only server-sent events as they become available.
|
||||||
Stop []string `json:"stop,omitempty"`
|
Stop []string `json:"stop,omitempty"` // Up to 4 sequences where the API will stop generating further tokens.
|
||||||
MaxTokens int `json:"max_tokens,omitempty"`
|
MaxTokens int `json:"max_tokens,omitempty"`
|
||||||
PresencePenalty *float64 `json:"presence_penalty,omitempty"`
|
PresencePenalty *float64 `json:"presence_penalty,omitempty"` // Number between -2.0 and 2.0.
|
||||||
FrequencyPenalty *float64 `json:"frequency_penalty,omitempty"`
|
FrequencyPenalty *float64 `json:"frequency_penalty,omitempty"` // Number between -2.0 and 2.0.
|
||||||
LogitBias map[string]float64 `json:"logit_bias,omitempty"`
|
LogitBias map[string]float64 `json:"logit_bias,omitempty"` // Modify the likelihood of specified tokens appearing in the completion.
|
||||||
User string `json:"user,omitempty"`
|
User string `json:"user,omitempty"` // A unique identifier representing your end-user, which can help OpenAI to monitor and detect abuse.
|
||||||
}
|
}
|
||||||
|
|
||||||
type ChatResponseChoice struct {
|
type ChatResponseChoice struct {
|
||||||
|
|
Loading…
Reference in New Issue