From b4a2ee3acad858d94c0ec676720d1b0432603744 Mon Sep 17 00:00:00 2001 From: Marius Rometsch Date: Thu, 30 Jan 2025 14:16:03 +0100 Subject: [PATCH] first commit --- README.md | 3 ++ config.json | 7 +++ go.mod | 3 ++ main.go | 123 ++++++++++++++++++++++++++++++++++++++++++++++++++++ 4 files changed, 136 insertions(+) create mode 100644 README.md create mode 100644 config.json create mode 100644 go.mod create mode 100644 main.go diff --git a/README.md b/README.md new file mode 100644 index 0000000..9039c71 --- /dev/null +++ b/README.md @@ -0,0 +1,3 @@ +# VFN DnD AI Rule Helper + +Ein einfacher ChatGPT Wrapper geschrieben in Go \ No newline at end of file diff --git a/config.json b/config.json new file mode 100644 index 0000000..0c99c57 --- /dev/null +++ b/config.json @@ -0,0 +1,7 @@ +{ + "instructions": "You are a helpful AI assistant for Tabletop Roleplaying rules. Do not be creative. you can combine information to draw conclusions but never make up information iif you are unsure. If you are unsure provide links or extracts from relevant sources.", + "ruleset": "DnD 3.5", + "model": "gpt-3.5-turbo", + "apiKey": "" +} + \ No newline at end of file diff --git a/go.mod b/go.mod new file mode 100644 index 0000000..06c8e87 --- /dev/null +++ b/go.mod @@ -0,0 +1,3 @@ +module openai-client + +go 1.23.5 diff --git a/main.go b/main.go new file mode 100644 index 0000000..6866279 --- /dev/null +++ b/main.go @@ -0,0 +1,123 @@ +package main + +import ( + "bufio" + "bytes" + "encoding/json" + "fmt" + "io/ioutil" + "net/http" + "os" +) + +type OpenAIRequest struct { + Model string `json:"model"` + Messages []Message `json:"messages"` +} + +type Message struct { + Role string `json:"role"` + Content string `json:"content"` +} + +type OpenAIResponse struct { + Choices []struct { + Message Message `json:"message"` + } `json:"choices"` +} + +type Config struct { + Instructions string `json:"instructions"` + Model string `json:"model"` + Ruleset string `json:"ruleset"` + configApiKey string `json:"apiKey"` +} + +func loadConfig(filename string) (Config, error) { + var config Config + file, err := ioutil.ReadFile(filename) + if err != nil { + return config, err + } + err = json.Unmarshal(file, &config) + return config, err +} + +func main() { + apiKey := os.Getenv("OPENAI_API_KEY") + + config, err := loadConfig("config.json") + if err != nil { + fmt.Println("Error loading config file:", err) + return + } + + if apiKey == "" { + apiKey = config.configApiKey + } + if apiKey == "" { + fmt.Println("Error: Neither config api Variable nor OPENAI_API_KEY environment variable is set.") + return + } + + scanner := bufio.NewScanner(os.Stdin) + messages := []Message{{Role: "system", Content: config.Instructions + " Only include information relevant for " + config.Ruleset + ". Do not include information from other rulesets."}} + + for { + fmt.Print("Enter your prompt (or type 'exit' to quit): ") + scanner.Scan() + prompt := scanner.Text() + + if prompt == "exit" { + fmt.Println("Goodbye!") + break + } + + messages = append(messages, Message{Role: "user", Content: prompt}) + + requestBody, err := json.Marshal(OpenAIRequest{ + Model: config.Model, + Messages: messages, + }) + if err != nil { + fmt.Println("Error creating JSON request:", err) + continue + } + + req, err := http.NewRequest("POST", "https://api.openai.com/v1/chat/completions", bytes.NewBuffer(requestBody)) + if err != nil { + fmt.Println("Error creating request:", err) + continue + } + req.Header.Set("Content-Type", "application/json") + req.Header.Set("Authorization", "Bearer "+apiKey) + + client := &http.Client{} + resp, err := client.Do(req) + if err != nil { + fmt.Println("Error making request:", err) + continue + } + defer resp.Body.Close() + + body, err := ioutil.ReadAll(resp.Body) + if err != nil { + fmt.Println("Error reading response:", err) + continue + } + + var openAIResp OpenAIResponse + if err := json.Unmarshal(body, &openAIResp); err != nil { + fmt.Println("Error parsing JSON response:", err) + continue + } + + if len(openAIResp.Choices) > 0 { + responseMessage := openAIResp.Choices[0].Message + fmt.Println("AI Response:", responseMessage.Content) + messages = append(messages, responseMessage) + } else { + fmt.Println("No response from AI.") + } + } +}