You've already forked grok
Compare commits
11 Commits
e17ac03f6f
...
main
| Author | SHA1 | Date | |
|---|---|---|---|
| cf7bab2923 | |||
| d3c9a7984c | |||
| bc49bedde6 | |||
| a4f156eac7 | |||
| 6332aa3c19 | |||
| 21c46ff2c1 | |||
| e60555df02 | |||
| d16d8ed906 | |||
| 6e9c21328e | |||
| ff79cfd786 | |||
| 0bb11c3bd7 |
10
Dockerfile
Normal file
10
Dockerfile
Normal file
@@ -0,0 +1,10 @@
|
||||
FROM golang:latest AS builder
|
||||
WORKDIR /app
|
||||
COPY go.mod go.sum ./
|
||||
RUN go mod download
|
||||
COPY *.go ./
|
||||
RUN CGO_ENABLED=0 GOOS=linux go build -o /grok-bot
|
||||
|
||||
FROM alpine:latest
|
||||
COPY --from=builder /grok-bot /grok-bot
|
||||
CMD ["/grok-bot"]
|
||||
30
docker-compose.yml
Normal file
30
docker-compose.yml
Normal file
@@ -0,0 +1,30 @@
|
||||
services:
|
||||
bot:
|
||||
build: .
|
||||
environment:
|
||||
BOT_TOKEN: ""
|
||||
CHAT_ID: -1003268915330
|
||||
CHANNEL_ID: -1003290014225
|
||||
OPENAI_BASE_URL: http://llama-server:8080/v1
|
||||
SYSTEM_PROMPT_PATH: /etc/sysprompt.txt
|
||||
MAX_CONCURRENT_REQUESTS: 2
|
||||
volumes:
|
||||
- ./sysprompt.txt:/etc/sysprompt.txt:ro
|
||||
restart: unless-stopped
|
||||
llama-server:
|
||||
image: ghcr.io/ggml-org/llama.cpp:server
|
||||
container_name: llama-server
|
||||
ports:
|
||||
- "8080:8080"
|
||||
volumes:
|
||||
- ~/models:/models
|
||||
deploy:
|
||||
resources:
|
||||
limits:
|
||||
memory: 2g
|
||||
memswap_limit: 6g
|
||||
command: >
|
||||
-m /models/qwen25_15B.gguf
|
||||
--port 8080
|
||||
--host 0.0.0.0
|
||||
-n 512
|
||||
97
main.go
97
main.go
@@ -21,13 +21,52 @@ import (
|
||||
type OpenAIPrompter struct {
|
||||
cli openai.Client
|
||||
cfg *Config
|
||||
log *slog.Logger
|
||||
}
|
||||
|
||||
func composeSysPromptWithContext(systemPrompt string, req PromptRequest) string {
|
||||
var b strings.Builder
|
||||
|
||||
b.WriteString(systemPrompt)
|
||||
b.WriteString("\nИмя пользователя: ")
|
||||
b.WriteString(req.Username)
|
||||
|
||||
if req.ReplyToContent != nil {
|
||||
b.WriteString("\nПользователь отсылается на текст сообщения: ")
|
||||
b.WriteString(*req.ReplyToContent)
|
||||
}
|
||||
|
||||
return b.String()
|
||||
}
|
||||
|
||||
func (p *OpenAIPrompter) Prompt(ctx context.Context, req PromptRequest) (*HighlyTrustedResponse, error) {
|
||||
p.log.Info("new prompt request",
|
||||
"req", req)
|
||||
|
||||
sysPrompt := composeSysPromptWithContext(p.cfg.SystemPrompt, req)
|
||||
|
||||
input := []responses.ResponseInputItemUnionParam{
|
||||
{
|
||||
OfMessage: &responses.EasyInputMessageParam{
|
||||
Content: responses.EasyInputMessageContentUnionParam{
|
||||
OfString: openai.String(sysPrompt),
|
||||
},
|
||||
Role: responses.EasyInputMessageRoleSystem,
|
||||
},
|
||||
},
|
||||
{
|
||||
OfMessage: &responses.EasyInputMessageParam{
|
||||
Content: responses.EasyInputMessageContentUnionParam{
|
||||
OfString: openai.String(req.Question),
|
||||
},
|
||||
Role: responses.EasyInputMessageRoleUser,
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
func (p *OpenAIPrompter) Prompt(ctx context.Context, question string) (*HighlyTrustedResponse, error) {
|
||||
resp, err := p.cli.Responses.New(ctx, responses.ResponseNewParams{
|
||||
Instructions: openai.String(p.cfg.SystemPrompt),
|
||||
Input: responses.ResponseNewParamsInputUnion{
|
||||
OfString: openai.String(question),
|
||||
OfInputItemList: input,
|
||||
},
|
||||
Reasoning: shared.ReasoningParam{
|
||||
Effort: shared.ReasoningEffortXhigh,
|
||||
@@ -42,12 +81,13 @@ func (p *OpenAIPrompter) Prompt(ctx context.Context, question string) (*HighlyTr
|
||||
}, nil
|
||||
}
|
||||
|
||||
func NewOpenAIProoooompter(cfg *Config) *OpenAIPrompter {
|
||||
func NewOpenAIProoooompter(cfg *Config, log *slog.Logger) *OpenAIPrompter {
|
||||
return &OpenAIPrompter{
|
||||
cli: openai.NewClient(
|
||||
option.WithBaseURL(cfg.OpenAIBaseURL),
|
||||
),
|
||||
cfg: cfg,
|
||||
log: log,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -55,8 +95,14 @@ type HighlyTrustedResponse struct {
|
||||
Text string
|
||||
}
|
||||
|
||||
type PromptRequest struct {
|
||||
Username string
|
||||
Question string
|
||||
ReplyToContent *string
|
||||
}
|
||||
|
||||
type Proompter interface {
|
||||
Prompt(ctx context.Context, question string) (*HighlyTrustedResponse, error)
|
||||
Prompt(ctx context.Context, req PromptRequest) (*HighlyTrustedResponse, error)
|
||||
}
|
||||
|
||||
type App struct {
|
||||
@@ -93,7 +139,22 @@ func (a *App) handleMessage(ctx context.Context, msg *tgbotapi.Message) error {
|
||||
"transformed_text", question,
|
||||
)
|
||||
|
||||
response, err := a.proompter.Prompt(ctx, question)
|
||||
var repliedToContent *string
|
||||
if msg.ReplyToMessage != nil {
|
||||
a.log.Info("message was a reply")
|
||||
repliedToContent = &msg.ReplyToMessage.Text
|
||||
}
|
||||
|
||||
var username string
|
||||
if msg.From != nil {
|
||||
username = msg.From.UserName
|
||||
}
|
||||
|
||||
response, err := a.proompter.Prompt(ctx, PromptRequest{
|
||||
Question: question,
|
||||
ReplyToContent: repliedToContent,
|
||||
Username: username,
|
||||
})
|
||||
if err != nil {
|
||||
return fmt.Errorf("prompting: %w", err)
|
||||
}
|
||||
@@ -122,7 +183,7 @@ func (a *App) HandleUpdates(ctx context.Context) error {
|
||||
}
|
||||
|
||||
if upd.Message != nil {
|
||||
a.log.Info("new message", "chat", upd.Message.Chat)
|
||||
a.log.Info("new message", "update", upd)
|
||||
go func() {
|
||||
if err := a.handleMessage(ctx, upd.Message); err != nil {
|
||||
a.log.Error("handling message", "msg", upd.Message, "err", err)
|
||||
@@ -166,11 +227,12 @@ type Config struct {
|
||||
BotToken string
|
||||
MaxConcurrentRequests uint
|
||||
ChatID int64
|
||||
ChannelID int64
|
||||
}
|
||||
|
||||
func LoadConfig(cfg *Config) error {
|
||||
if err := godotenv.Load(".env"); err != nil {
|
||||
return err
|
||||
slog.Warn("no env file loaded", "err", err)
|
||||
}
|
||||
|
||||
cfg.OpenAIBaseURL = os.Getenv("OPENAI_BASE_URL")
|
||||
@@ -197,7 +259,22 @@ func LoadConfig(cfg *Config) error {
|
||||
|
||||
cfg.ChatID = chatID
|
||||
|
||||
cfg.SystemPrompt = os.Getenv("SYSTEM_PROMPT")
|
||||
channelID, err := strconv.ParseInt(os.Getenv("CHANNEL_ID"), 10, 64)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if chatID == 0 {
|
||||
slog.Warn("channel id is not set")
|
||||
}
|
||||
cfg.ChannelID = channelID
|
||||
|
||||
sysPromptPath := os.Getenv("SYSTEM_PROMPT_PATH")
|
||||
promptBytes, err := os.ReadFile(sysPromptPath)
|
||||
if err != nil {
|
||||
slog.Warn("could not load system prompt", "path", sysPromptPath)
|
||||
} else {
|
||||
cfg.SystemPrompt = string(promptBytes)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
@@ -215,7 +292,7 @@ func main() {
|
||||
os.Exit(1)
|
||||
}
|
||||
|
||||
prompter := NewOpenAIProoooompter(&cfg)
|
||||
prompter := NewOpenAIProoooompter(&cfg, log)
|
||||
|
||||
app, err := NewApp(&cfg, prompter)
|
||||
if err != nil {
|
||||
|
||||
1
sysprompt.txt
Normal file
1
sysprompt.txt
Normal file
@@ -0,0 +1 @@
|
||||
Put system prompt here
|
||||
Reference in New Issue
Block a user