absolute cinema

This commit is contained in:
2026-02-01 03:14:01 +03:00
parent 1c96676fa4
commit 5d91719154
3 changed files with 241 additions and 0 deletions

13
go.mod
View File

@@ -1,3 +1,16 @@
module git.hashlag.net/lezzercringe/grok module git.hashlag.net/lezzercringe/grok
go 1.25.3 go 1.25.3
require (
github.com/go-telegram-bot-api/telegram-bot-api/v5 v5.5.1
github.com/joho/godotenv v1.5.1
github.com/openai/openai-go/v3 v3.17.0
)
require (
github.com/tidwall/gjson v1.18.0 // indirect
github.com/tidwall/match v1.1.1 // indirect
github.com/tidwall/pretty v1.2.1 // indirect
github.com/tidwall/sjson v1.2.5 // indirect
)

16
go.sum Normal file
View File

@@ -0,0 +1,16 @@
github.com/go-telegram-bot-api/telegram-bot-api/v5 v5.5.1 h1:wG8n/XJQ07TmjbITcGiUaOtXxdrINDz1b0J1w0SzqDc=
github.com/go-telegram-bot-api/telegram-bot-api/v5 v5.5.1/go.mod h1:A2S0CWkNylc2phvKXWBBdD3K0iGnDBGbzRpISP2zBl8=
github.com/joho/godotenv v1.5.1 h1:7eLL/+HRGLY0ldzfGMeQkb7vMd0as4CfYvUVzLqw0N0=
github.com/joho/godotenv v1.5.1/go.mod h1:f4LDr5Voq0i2e/R5DDNOoa2zzDfwtkZa6DnEwAbqwq4=
github.com/openai/openai-go/v3 v3.17.0 h1:CfTkmQoItolSyW+bHOUF190KuX5+1Zv6MC0Gb4wAwy8=
github.com/openai/openai-go/v3 v3.17.0/go.mod h1:cdufnVK14cWcT9qA1rRtrXx4FTRsgbDPW7Ia7SS5cZo=
github.com/tidwall/gjson v1.14.2/go.mod h1:/wbyibRr2FHMks5tjHJ5F8dMZh3AcwJEMf5vlfC0lxk=
github.com/tidwall/gjson v1.18.0 h1:FIDeeyB800efLX89e5a8Y0BNH+LOngJyGrIWxG2FKQY=
github.com/tidwall/gjson v1.18.0/go.mod h1:/wbyibRr2FHMks5tjHJ5F8dMZh3AcwJEMf5vlfC0lxk=
github.com/tidwall/match v1.1.1 h1:+Ho715JplO36QYgwN9PGYNhgZvoUSc9X2c80KVTi+GA=
github.com/tidwall/match v1.1.1/go.mod h1:eRSPERbgtNPcGhD8UCthc6PmLEQXEWd3PRB5JTxsfmM=
github.com/tidwall/pretty v1.2.0/go.mod h1:ITEVvHYasfjBbM0u2Pg8T2nJnzm8xPwvNhhsoaGGjNU=
github.com/tidwall/pretty v1.2.1 h1:qjsOFOWWQl+N3RsoF5/ssm1pHmJJwhjlSbZ51I6wMl4=
github.com/tidwall/pretty v1.2.1/go.mod h1:ITEVvHYasfjBbM0u2Pg8T2nJnzm8xPwvNhhsoaGGjNU=
github.com/tidwall/sjson v1.2.5 h1:kLy8mja+1c9jlljvWTlSazM7cKDRfJuR/bOJhcY5NcY=
github.com/tidwall/sjson v1.2.5/go.mod h1:Fvgq9kS/6ociJEDnK0Fk1cpYF4FIW6ZF7LAe+6jwd28=

212
main.go
View File

@@ -0,0 +1,212 @@
package main
import (
"context"
"errors"
"fmt"
"log/slog"
"os"
"os/signal"
"strconv"
"strings"
tgbotapi "github.com/go-telegram-bot-api/telegram-bot-api/v5"
"github.com/joho/godotenv"
"github.com/openai/openai-go/v3"
"github.com/openai/openai-go/v3/option"
"github.com/openai/openai-go/v3/responses"
)
type OpenAIPrompter struct {
cli openai.Client
cfg *Config
}
func (p *OpenAIPrompter) Prompt(ctx context.Context, question string) (*HighlyTrustedResponse, error) {
resp, err := p.cli.Responses.New(ctx, responses.ResponseNewParams{
Input: responses.ResponseNewParamsInputUnion{
OfString: openai.String(question),
},
})
if err != nil {
return nil, err
}
return &HighlyTrustedResponse{
Text: resp.OutputText(),
}, nil
}
func NewOpenAIProoooompter(cfg *Config) *OpenAIPrompter {
return &OpenAIPrompter{
cli: openai.NewClient(
option.WithBaseURL(cfg.OpenAIBaseURL),
),
}
}
type HighlyTrustedResponse struct {
Text string
}
type Proompter interface {
Prompt(ctx context.Context, question string) (*HighlyTrustedResponse, error)
}
type App struct {
log *slog.Logger
bot *tgbotapi.BotAPI
proompter Proompter
sema chan struct{}
config *Config
}
func (a *App) handleMessage(ctx context.Context, msg *tgbotapi.Message) error {
if msg.Chat != nil {
return nil
}
chatID := msg.Chat.ID
if chatID != a.config.ChatID || !strings.HasPrefix(msg.Text, "@grok") {
return nil
}
select {
case a.sema <- struct{}{}:
default:
a.log.Info("concurrency limit hit", "msg", msg)
return nil
}
defer func() { <-a.sema }()
a.log.Info("message passed all guards", "text", msg.Text)
response, err := a.proompter.Prompt(ctx, msg.Text)
if err != nil {
return fmt.Errorf("prompting: %w", err)
}
_, err = a.bot.Send(tgbotapi.NewMessage(chatID, response.Text))
if err != nil {
return fmt.Errorf("responding: %w", err)
}
return nil
}
func (a *App) HandleUpdates(ctx context.Context) error {
upds := a.bot.GetUpdatesChan(tgbotapi.UpdateConfig{})
for {
select {
case upd, ok := <-upds:
if !ok {
return errors.New("channel closed")
}
if upd.Message != nil {
go func() {
if err := a.handleMessage(ctx, upd.Message); err != nil {
a.log.Error("handling message", "msg", upd.Message, "err", err)
}
}()
}
case <-ctx.Done():
return ctx.Err()
}
}
}
func NewApp(cfg *Config, prompter Proompter) (*App, error) {
var app App
var err error
app.proompter = prompter
app.log = slog.New(slog.NewTextHandler(os.Stdout, &slog.HandlerOptions{}))
app.bot, err = tgbotapi.NewBotAPI(cfg.BotToken)
if err != nil {
return nil, err
}
if cfg.MaxConcurrentRequests == 0 {
return nil, errors.New("concurrency limit not set")
}
app.sema = make(chan struct{}, cfg.MaxConcurrentRequests)
app.config = cfg
return &app, nil
}
type Config struct {
OpenAIBaseURL string
BotToken string
MaxConcurrentRequests uint
ChatID int64
}
func LoadConfig(cfg *Config) error {
if err := godotenv.Load(".env"); err != nil {
return err
}
cfg.OpenAIBaseURL = os.Getenv("OPENAI_BASE_URL")
if cfg.OpenAIBaseURL == "" {
return errors.New("openai base url not set")
}
cfg.BotToken = os.Getenv("BOT_TOKEN")
mcg, err := strconv.ParseUint(
os.Getenv("MAX_CONCURRENT_REQUESTS"),
10, 64,
)
if err != nil {
return err
}
cfg.MaxConcurrentRequests = uint(mcg)
chatID, err := strconv.ParseInt(os.Getenv("CHAT_ID"), 10, 64)
if err != nil {
return err
}
if chatID == 0 {
return errors.New("non-zero chat id is unsafe")
}
cfg.ChatID = chatID
return nil
}
func main() {
ctx, cancel := signal.NotifyContext(context.Background(), os.Interrupt)
defer cancel()
log := slog.New(slog.NewTextHandler(os.Stdout, &slog.HandlerOptions{}))
log.Info("Starting GROK")
var cfg Config
if err := LoadConfig(&cfg); err != nil {
log.Error("loading config", "err", err)
os.Exit(1)
}
prompter := NewOpenAIProoooompter(&cfg)
app, err := NewApp(&cfg, prompter)
if err != nil {
log.Error("initializing app", "err", err)
os.Exit(1)
}
go func() {
if err := app.HandleUpdates(ctx); err != nil {
log.Error("handleUpdates exited", "err", err)
}
cancel()
}()
<-ctx.Done()
}