You've already forked grok
Compare commits
2 Commits
e17ac03f6f
...
ff79cfd786
| Author | SHA1 | Date | |
|---|---|---|---|
| ff79cfd786 | |||
| 0bb11c3bd7 |
10
Dockerfile
Normal file
10
Dockerfile
Normal file
@@ -0,0 +1,10 @@
|
||||
FROM golang:latest AS builder
|
||||
WORKDIR /app
|
||||
COPY go.mod go.sum ./
|
||||
RUN go mod download
|
||||
COPY *.go ./
|
||||
RUN CGO_ENABLED=0 GOOS=linux go build -o /grok-bot
|
||||
|
||||
FROM alpine:latest
|
||||
COPY --from=builder /grok-bot /grok-bot
|
||||
CMD ["/grok-bot"]
|
||||
28
docker-compose.yml
Normal file
28
docker-compose.yml
Normal file
@@ -0,0 +1,28 @@
|
||||
services:
|
||||
bot:
|
||||
build: .
|
||||
environment:
|
||||
BOT_TOKEN: ""
|
||||
CHAT_ID: ""
|
||||
OPENAI_BASE_URL: "http://localhost:8080/v1"
|
||||
SYSTEM_PROMPT_PATH: /etc/sysprompt.txt
|
||||
volumes:
|
||||
- ./sysprompt.txt:/etc/sysprompt.txt:ro
|
||||
restart: unless-stopped
|
||||
llama-server:
|
||||
image: ghcr.io/ggml-org/llama.cpp:server
|
||||
container_name: llama-server
|
||||
ports:
|
||||
- "8080:8080"
|
||||
volumes:
|
||||
- ~/models:/models
|
||||
deploy:
|
||||
resources:
|
||||
limits:
|
||||
memory: 2g
|
||||
memswap_limit: 6g
|
||||
command: >
|
||||
-m /models/qwen25_15B.gguf
|
||||
--port 8080
|
||||
--host 0.0.0.0
|
||||
-n 512
|
||||
16
main.go
16
main.go
@@ -73,11 +73,11 @@ func (a *App) handleMessage(ctx context.Context, msg *tgbotapi.Message) error {
|
||||
}
|
||||
|
||||
chatID := msg.Chat.ID
|
||||
if chatID != a.config.ChatID || !strings.HasPrefix(msg.Text, "@grok") {
|
||||
if chatID != a.config.ChatID || !strings.HasPrefix(msg.Text, "@grok ") {
|
||||
return nil
|
||||
}
|
||||
|
||||
question := msg.Text[len("@grok"):]
|
||||
question := msg.Text[len("@grok "):]
|
||||
|
||||
select {
|
||||
case a.sema <- struct{}{}:
|
||||
@@ -122,7 +122,7 @@ func (a *App) HandleUpdates(ctx context.Context) error {
|
||||
}
|
||||
|
||||
if upd.Message != nil {
|
||||
a.log.Info("new message", "chat", upd.Message.Chat)
|
||||
a.log.Info("new message", "update", upd)
|
||||
go func() {
|
||||
if err := a.handleMessage(ctx, upd.Message); err != nil {
|
||||
a.log.Error("handling message", "msg", upd.Message, "err", err)
|
||||
@@ -170,7 +170,7 @@ type Config struct {
|
||||
|
||||
func LoadConfig(cfg *Config) error {
|
||||
if err := godotenv.Load(".env"); err != nil {
|
||||
return err
|
||||
slog.Warn("no env file loaded", "err", err)
|
||||
}
|
||||
|
||||
cfg.OpenAIBaseURL = os.Getenv("OPENAI_BASE_URL")
|
||||
@@ -197,7 +197,13 @@ func LoadConfig(cfg *Config) error {
|
||||
|
||||
cfg.ChatID = chatID
|
||||
|
||||
cfg.SystemPrompt = os.Getenv("SYSTEM_PROMPT")
|
||||
sysPromptPath := os.Getenv("SYSTEM_PROMPT_PATH")
|
||||
promptBytes, err := os.ReadFile(sysPromptPath)
|
||||
if err != nil {
|
||||
slog.Warn("could not load system prompt", "path", sysPromptPath)
|
||||
} else {
|
||||
cfg.SystemPrompt = string(promptBytes)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
1
sysprompt.txt
Normal file
1
sysprompt.txt
Normal file
@@ -0,0 +1 @@
|
||||
Put system prompt here
|
||||
Reference in New Issue
Block a user