Compare commits

..

14 Commits

4 changed files with 137 additions and 10 deletions

10
Dockerfile Normal file
View File

@@ -0,0 +1,10 @@
FROM golang:latest AS builder
WORKDIR /app
COPY go.mod go.sum ./
RUN go mod download
COPY *.go ./
RUN CGO_ENABLED=0 GOOS=linux go build -o /grok-bot
FROM alpine:latest
COPY --from=builder /grok-bot /grok-bot
CMD ["/grok-bot"]

30
docker-compose.yml Normal file
View File

@@ -0,0 +1,30 @@
services:
bot:
build: .
environment:
BOT_TOKEN: ""
CHAT_ID: -1003268915330
CHANNEL_ID: -1003290014225
OPENAI_BASE_URL: http://llama-server:8080/v1
SYSTEM_PROMPT_PATH: /etc/sysprompt.txt
MAX_CONCURRENT_REQUESTS: 2
volumes:
- ./sysprompt.txt:/etc/sysprompt.txt:ro
restart: unless-stopped
llama-server:
image: ghcr.io/ggml-org/llama.cpp:server
container_name: llama-server
ports:
- "8080:8080"
volumes:
- ~/models:/models
deploy:
resources:
limits:
memory: 2g
memswap_limit: 6g
command: >
-m /models/qwen25_15B.gguf
--port 8080
--host 0.0.0.0
-n 512

102
main.go
View File

@@ -15,17 +15,61 @@ import (
"github.com/openai/openai-go/v3"
"github.com/openai/openai-go/v3/option"
"github.com/openai/openai-go/v3/responses"
"github.com/openai/openai-go/v3/shared"
)
type OpenAIPrompter struct {
cli openai.Client
cfg *Config
log *slog.Logger
}
func composeSysPromptWithContext(systemPrompt string, req PromptRequest) string {
var b strings.Builder
b.WriteString(systemPrompt)
b.WriteString("\nИмя пользователя: ")
b.WriteString(req.Username)
if req.ReplyToContent != nil {
b.WriteString("\nПользователь отсылается на текст сообщения: ")
b.WriteString(*req.ReplyToContent)
}
return b.String()
}
func (p *OpenAIPrompter) Prompt(ctx context.Context, req PromptRequest) (*HighlyTrustedResponse, error) {
p.log.Info("new prompt request",
"req", req)
sysPrompt := composeSysPromptWithContext(p.cfg.SystemPrompt, req)
input := []responses.ResponseInputItemUnionParam{
{
OfMessage: &responses.EasyInputMessageParam{
Content: responses.EasyInputMessageContentUnionParam{
OfString: openai.String(sysPrompt),
},
Role: responses.EasyInputMessageRoleSystem,
},
},
{
OfMessage: &responses.EasyInputMessageParam{
Content: responses.EasyInputMessageContentUnionParam{
OfString: openai.String(req.Question),
},
Role: responses.EasyInputMessageRoleUser,
},
},
}
func (p *OpenAIPrompter) Prompt(ctx context.Context, question string) (*HighlyTrustedResponse, error) {
resp, err := p.cli.Responses.New(ctx, responses.ResponseNewParams{
Input: responses.ResponseNewParamsInputUnion{
OfString: openai.String(question),
OfInputItemList: input,
},
Reasoning: shared.ReasoningParam{
Effort: shared.ReasoningEffortXhigh,
},
})
if err != nil {
@@ -37,11 +81,13 @@ func (p *OpenAIPrompter) Prompt(ctx context.Context, question string) (*HighlyTr
}, nil
}
func NewOpenAIProoooompter(cfg *Config) *OpenAIPrompter {
func NewOpenAIProoooompter(cfg *Config, log *slog.Logger) *OpenAIPrompter {
return &OpenAIPrompter{
cli: openai.NewClient(
option.WithBaseURL(cfg.OpenAIBaseURL),
),
cfg: cfg,
log: log,
}
}
@@ -49,8 +95,14 @@ type HighlyTrustedResponse struct {
Text string
}
type PromptRequest struct {
Username string
Question string
ReplyToContent *string
}
type Proompter interface {
Prompt(ctx context.Context, question string) (*HighlyTrustedResponse, error)
Prompt(ctx context.Context, req PromptRequest) (*HighlyTrustedResponse, error)
}
type App struct {
@@ -87,7 +139,22 @@ func (a *App) handleMessage(ctx context.Context, msg *tgbotapi.Message) error {
"transformed_text", question,
)
response, err := a.proompter.Prompt(ctx, question)
var repliedToContent *string
if msg.ReplyToMessage != nil {
a.log.Info("message was a reply")
repliedToContent = &msg.ReplyToMessage.Text
}
var username string
if msg.From != nil {
username = msg.From.UserName
}
response, err := a.proompter.Prompt(ctx, PromptRequest{
Question: question,
ReplyToContent: repliedToContent,
Username: username,
})
if err != nil {
return fmt.Errorf("prompting: %w", err)
}
@@ -116,7 +183,7 @@ func (a *App) HandleUpdates(ctx context.Context) error {
}
if upd.Message != nil {
a.log.Info("new message", "chat", upd.Message.Chat)
a.log.Info("new message", "update", upd)
go func() {
if err := a.handleMessage(ctx, upd.Message); err != nil {
a.log.Error("handling message", "msg", upd.Message, "err", err)
@@ -155,15 +222,17 @@ func NewApp(cfg *Config, prompter Proompter) (*App, error) {
}
type Config struct {
SystemPrompt string
OpenAIBaseURL string
BotToken string
MaxConcurrentRequests uint
ChatID int64
ChannelID int64
}
func LoadConfig(cfg *Config) error {
if err := godotenv.Load(".env"); err != nil {
return err
slog.Warn("no env file loaded", "err", err)
}
cfg.OpenAIBaseURL = os.Getenv("OPENAI_BASE_URL")
@@ -190,6 +259,23 @@ func LoadConfig(cfg *Config) error {
cfg.ChatID = chatID
channelID, err := strconv.ParseInt(os.Getenv("CHANNEL_ID"), 10, 64)
if err != nil {
return err
}
if chatID == 0 {
slog.Warn("channel id is not set")
}
cfg.ChannelID = channelID
sysPromptPath := os.Getenv("SYSTEM_PROMPT_PATH")
promptBytes, err := os.ReadFile(sysPromptPath)
if err != nil {
slog.Warn("could not load system prompt", "path", sysPromptPath)
} else {
cfg.SystemPrompt = string(promptBytes)
}
return nil
}
@@ -206,7 +292,7 @@ func main() {
os.Exit(1)
}
prompter := NewOpenAIProoooompter(&cfg)
prompter := NewOpenAIProoooompter(&cfg, log)
app, err := NewApp(&cfg, prompter)
if err != nil {

1
sysprompt.txt Normal file
View File

@@ -0,0 +1 @@
Put system prompt here