Compare commits

..

16 Commits

4 changed files with 150 additions and 11 deletions

10
Dockerfile Normal file
View File

@@ -0,0 +1,10 @@
FROM golang:latest AS builder
WORKDIR /app
COPY go.mod go.sum ./
RUN go mod download
COPY *.go ./
RUN CGO_ENABLED=0 GOOS=linux go build -o /grok-bot
FROM alpine:latest
COPY --from=builder /grok-bot /grok-bot
CMD ["/grok-bot"]

30
docker-compose.yml Normal file
View File

@@ -0,0 +1,30 @@
services:
bot:
build: .
environment:
BOT_TOKEN: ""
CHAT_ID: -1003268915330
CHANNEL_ID: -1003290014225
OPENAI_BASE_URL: http://llama-server:8080/v1
SYSTEM_PROMPT_PATH: /etc/sysprompt.txt
MAX_CONCURRENT_REQUESTS: 2
volumes:
- ./sysprompt.txt:/etc/sysprompt.txt:ro
restart: unless-stopped
llama-server:
image: ghcr.io/ggml-org/llama.cpp:server
container_name: llama-server
ports:
- "8080:8080"
volumes:
- ~/models:/models
deploy:
resources:
limits:
memory: 2g
memswap_limit: 6g
command: >
-m /models/qwen25_15B.gguf
--port 8080
--host 0.0.0.0
-n 512

120
main.go
View File

@@ -15,17 +15,61 @@ import (
"github.com/openai/openai-go/v3" "github.com/openai/openai-go/v3"
"github.com/openai/openai-go/v3/option" "github.com/openai/openai-go/v3/option"
"github.com/openai/openai-go/v3/responses" "github.com/openai/openai-go/v3/responses"
"github.com/openai/openai-go/v3/shared"
) )
type OpenAIPrompter struct { type OpenAIPrompter struct {
cli openai.Client cli openai.Client
cfg *Config cfg *Config
log *slog.Logger
} }
func (p *OpenAIPrompter) Prompt(ctx context.Context, question string) (*HighlyTrustedResponse, error) { func composeSysPromptWithContext(systemPrompt string, req PromptRequest) string {
var b strings.Builder
b.WriteString(systemPrompt)
b.WriteString("\nИмя пользователя: ")
b.WriteString(req.Username)
if req.ReplyToContent != nil {
b.WriteString("\nПользователь отсылается на текст сообщения: ")
b.WriteString(*req.ReplyToContent)
}
return b.String()
}
func (p *OpenAIPrompter) Prompt(ctx context.Context, req PromptRequest) (*HighlyTrustedResponse, error) {
p.log.Info("new prompt request",
"req", req)
sysPrompt := composeSysPromptWithContext(p.cfg.SystemPrompt, req)
input := []responses.ResponseInputItemUnionParam{
{
OfMessage: &responses.EasyInputMessageParam{
Content: responses.EasyInputMessageContentUnionParam{
OfString: openai.String(sysPrompt),
},
Role: responses.EasyInputMessageRoleSystem,
},
},
{
OfMessage: &responses.EasyInputMessageParam{
Content: responses.EasyInputMessageContentUnionParam{
OfString: openai.String(req.Question),
},
Role: responses.EasyInputMessageRoleUser,
},
},
}
resp, err := p.cli.Responses.New(ctx, responses.ResponseNewParams{ resp, err := p.cli.Responses.New(ctx, responses.ResponseNewParams{
Input: responses.ResponseNewParamsInputUnion{ Input: responses.ResponseNewParamsInputUnion{
OfString: openai.String(question), OfInputItemList: input,
},
Reasoning: shared.ReasoningParam{
Effort: shared.ReasoningEffortXhigh,
}, },
}) })
if err != nil { if err != nil {
@@ -37,11 +81,13 @@ func (p *OpenAIPrompter) Prompt(ctx context.Context, question string) (*HighlyTr
}, nil }, nil
} }
func NewOpenAIProoooompter(cfg *Config) *OpenAIPrompter { func NewOpenAIProoooompter(cfg *Config, log *slog.Logger) *OpenAIPrompter {
return &OpenAIPrompter{ return &OpenAIPrompter{
cli: openai.NewClient( cli: openai.NewClient(
option.WithBaseURL(cfg.OpenAIBaseURL), option.WithBaseURL(cfg.OpenAIBaseURL),
), ),
cfg: cfg,
log: log,
} }
} }
@@ -49,8 +95,14 @@ type HighlyTrustedResponse struct {
Text string Text string
} }
type PromptRequest struct {
Username string
Question string
ReplyToContent *string
}
type Proompter interface { type Proompter interface {
Prompt(ctx context.Context, question string) (*HighlyTrustedResponse, error) Prompt(ctx context.Context, req PromptRequest) (*HighlyTrustedResponse, error)
} }
type App struct { type App struct {
@@ -67,10 +119,12 @@ func (a *App) handleMessage(ctx context.Context, msg *tgbotapi.Message) error {
} }
chatID := msg.Chat.ID chatID := msg.Chat.ID
if chatID != a.config.ChatID || !strings.HasPrefix(msg.Text, "@grok") { if chatID != a.config.ChatID || !strings.HasPrefix(msg.Text, "@grok ") {
return nil return nil
} }
question := msg.Text[len("@grok "):]
select { select {
case a.sema <- struct{}{}: case a.sema <- struct{}{}:
default: default:
@@ -79,14 +133,39 @@ func (a *App) handleMessage(ctx context.Context, msg *tgbotapi.Message) error {
} }
defer func() { <-a.sema }() defer func() { <-a.sema }()
a.log.Info("message passed all guards", "text", msg.Text) a.log.Info(
"message passed all guards",
"og_text", msg.Text,
"transformed_text", question,
)
response, err := a.proompter.Prompt(ctx, msg.Text) var repliedToContent *string
if msg.ReplyToMessage != nil {
a.log.Info("message was a reply")
repliedToContent = &msg.ReplyToMessage.Text
}
var username string
if msg.From != nil {
username = msg.From.UserName
}
response, err := a.proompter.Prompt(ctx, PromptRequest{
Question: question,
ReplyToContent: repliedToContent,
Username: username,
})
if err != nil { if err != nil {
return fmt.Errorf("prompting: %w", err) return fmt.Errorf("prompting: %w", err)
} }
_, err = a.bot.Send(tgbotapi.NewMessage(chatID, response.Text)) _, err = a.bot.Send(tgbotapi.MessageConfig{
BaseChat: tgbotapi.BaseChat{
ChatID: chatID,
ReplyToMessageID: msg.MessageID,
},
Text: response.Text,
})
if err != nil { if err != nil {
return fmt.Errorf("responding: %w", err) return fmt.Errorf("responding: %w", err)
} }
@@ -104,7 +183,7 @@ func (a *App) HandleUpdates(ctx context.Context) error {
} }
if upd.Message != nil { if upd.Message != nil {
a.log.Info("new message", "chat", upd.Message.Chat) a.log.Info("new message", "update", upd)
go func() { go func() {
if err := a.handleMessage(ctx, upd.Message); err != nil { if err := a.handleMessage(ctx, upd.Message); err != nil {
a.log.Error("handling message", "msg", upd.Message, "err", err) a.log.Error("handling message", "msg", upd.Message, "err", err)
@@ -143,15 +222,17 @@ func NewApp(cfg *Config, prompter Proompter) (*App, error) {
} }
type Config struct { type Config struct {
SystemPrompt string
OpenAIBaseURL string OpenAIBaseURL string
BotToken string BotToken string
MaxConcurrentRequests uint MaxConcurrentRequests uint
ChatID int64 ChatID int64
ChannelID int64
} }
func LoadConfig(cfg *Config) error { func LoadConfig(cfg *Config) error {
if err := godotenv.Load(".env"); err != nil { if err := godotenv.Load(".env"); err != nil {
return err slog.Warn("no env file loaded", "err", err)
} }
cfg.OpenAIBaseURL = os.Getenv("OPENAI_BASE_URL") cfg.OpenAIBaseURL = os.Getenv("OPENAI_BASE_URL")
@@ -178,6 +259,23 @@ func LoadConfig(cfg *Config) error {
cfg.ChatID = chatID cfg.ChatID = chatID
channelID, err := strconv.ParseInt(os.Getenv("CHANNEL_ID"), 10, 64)
if err != nil {
return err
}
if chatID == 0 {
slog.Warn("channel id is not set")
}
cfg.ChannelID = channelID
sysPromptPath := os.Getenv("SYSTEM_PROMPT_PATH")
promptBytes, err := os.ReadFile(sysPromptPath)
if err != nil {
slog.Warn("could not load system prompt", "path", sysPromptPath)
} else {
cfg.SystemPrompt = string(promptBytes)
}
return nil return nil
} }
@@ -194,7 +292,7 @@ func main() {
os.Exit(1) os.Exit(1)
} }
prompter := NewOpenAIProoooompter(&cfg) prompter := NewOpenAIProoooompter(&cfg, log)
app, err := NewApp(&cfg, prompter) app, err := NewApp(&cfg, prompter)
if err != nil { if err != nil {

1
sysprompt.txt Normal file
View File

@@ -0,0 +1 @@
Put system prompt here