diff --git a/assistant.go b/assistant.go index b115561..7ce9ea8 100644 --- a/assistant.go +++ b/assistant.go @@ -234,7 +234,7 @@ func handleAssistantConversation(c tele.Context, thread []*tele.Message) error { } req := openai.ChatRequest{ - Model: openai.ModelGpt41, + Model: openai.ModelGpt45, Messages: chatReqMsgs, Temperature: lo.ToPtr(0.42), User: assistantHashUserId(lastMsg.Sender.ID), diff --git a/botcmd_reason.go b/botcmd_reason.go index cfd72fb..918291a 100644 --- a/botcmd_reason.go +++ b/botcmd_reason.go @@ -29,7 +29,7 @@ func handleReasonCmd(c tele.Context) error { } req := openai.ChatRequest{ - Model: openai.ModelO4Mini, + Model: openai.ModelO3Mini, Messages: []openai.ChatMessage{ { Role: openai.ChatRoleUser, diff --git a/hostcmds/dig.go b/hostcmds/dig.go index 61a41b6..cf102a8 100644 --- a/hostcmds/dig.go +++ b/hostcmds/dig.go @@ -26,7 +26,7 @@ var ( "SVCB", "TA", "TKEY", "TLSA", "TSIG", "TXT", "URI", "ZONEMD", }) - errDigInvalidArgs = fmt.Errorf("invalid request") + digErrInvalidArgs = fmt.Errorf("invalid request") digIdnaMapper = idna.New(idna.MapForLookup(), idna.StrictDomainName(false)) ) @@ -43,13 +43,13 @@ func NewDigRequest(req string) (*DigRequest, error) { args := strings.Fields(req) nArgs := len(args) if nArgs == 0 || nArgs > 2 { - return nil, errDigInvalidArgs + return nil, digErrInvalidArgs } if nArgs > 1 { typ := strings.ToUpper(args[1]) if _, ok := digValidDnsTypes[typ]; !ok { - return nil, errDigInvalidArgs + return nil, digErrInvalidArgs } ret.Type = typ } @@ -64,7 +64,7 @@ func NewDigRequest(req string) (*DigRequest, error) { name, err := digIdnaMapper.ToASCII(args[0]) if err != nil { - return nil, errDigInvalidArgs + return nil, digErrInvalidArgs } ret.Name = name return ret, nil diff --git a/msgcache.go b/msgcache.go index 6e64aec..7a4b3c4 100644 --- a/msgcache.go +++ b/msgcache.go @@ -69,7 +69,11 @@ func getCachedThread(msg *tele.Message) ([]*tele.Message, error) { threadR := []*tele.Message{msg} currentMsg := msg - for currentMsg.ReplyTo != nil { + for { + if currentMsg.ReplyTo == nil { + break + } + parentMsg, err := getCachedMessage(currentMsg.ReplyTo) if err != nil { return nil, err diff --git a/openai/client.go b/openai/client.go index 68eea1d..2802647 100644 --- a/openai/client.go +++ b/openai/client.go @@ -67,7 +67,7 @@ func (c *Client) ChatCompletionStream(request ChatRequest) (*ChatResponseStream, } if resp.StatusCode() != 200 { - defer func() { _ = rbody.Close() }() + defer rbody.Close() var respBodyStr string if respBody, err := io.ReadAll(rbody); err == nil { respBodyStr = string(respBody) @@ -83,7 +83,7 @@ func (c *Client) ChatCompletionStream(request ChatRequest) (*ChatResponseStream, } go func() { defer func() { - _ = rbody.Close() + rbody.Close() close(ret.Stream) close(ret.Done) }() diff --git a/openai/models.go b/openai/models.go index bdf8da9..8657a11 100644 --- a/openai/models.go +++ b/openai/models.go @@ -1,8 +1,9 @@ package openai const ( - ModelGpt4O = "gpt-4o" // The safe default, balanced model. - ModelO1 = "o1" // Expensive reasoning model - ModelO4Mini = "o4-mini" // Cheaper yet powerful reasoning model - ModelGpt41 = "gpt-4.1" // OpenAI's Flagship model + ModelGpt4O = "gpt-4o" // Safe default + ModelO1Preview = "o1-preview" // Expensive reasoning model + ModelO1Mini = "o1-mini" // Cheaper reasoning model + ModelO3Mini = "o3-mini" // Cheaper yet powerful reasoning model + ModelGpt45 = "gpt-4.5-preview" // Bleeding edge (and expensive) ) diff --git a/utils/net.go b/utils/net.go index 61c393f..71d4f1b 100644 --- a/utils/net.go +++ b/utils/net.go @@ -11,7 +11,7 @@ func getLocalIP() (net.IP, error) { if err != nil { return nil, err } - defer func() { _ = conn.Close() }() + defer conn.Close() host, _, err := net.SplitHostPort(conn.LocalAddr().String()) if err != nil {