summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--.gitignore3
-rw-r--r--.vscode/launch.json18
-rw-r--r--doc/env.sample7
-rw-r--r--requirements.txt2
-rwxr-xr-xshitoutcode.sh11
-rw-r--r--shitoutcode/__init__.py86
-rw-r--r--shitoutcode/__main__.py54
7 files changed, 181 insertions, 0 deletions
diff --git a/.gitignore b/.gitignore
new file mode 100644
index 0000000..f42515e
--- /dev/null
+++ b/.gitignore
@@ -0,0 +1,3 @@
+__pycache__
+
+/.env
diff --git a/.vscode/launch.json b/.vscode/launch.json
new file mode 100644
index 0000000..770dfaa
--- /dev/null
+++ b/.vscode/launch.json
@@ -0,0 +1,18 @@
+{
+ // Use IntelliSense to learn about possible attributes.
+ // Hover to view descriptions of existing attributes.
+ // For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387
+ "version": "0.2.0",
+ "configurations": [
+ {
+ "name": "shitoutcode",
+ "type": "debugpy",
+ "request": "launch",
+ "cwd": "${workspaceFolder}",
+ "module": "shitoutcode",
+ "args": [],
+ "envFile": "${workspaceFolder}/.env",
+ "justMyCode": true
+ },
+ ]
+}
diff --git a/doc/env.sample b/doc/env.sample
new file mode 100644
index 0000000..de7d0a4
--- /dev/null
+++ b/doc/env.sample
@@ -0,0 +1,7 @@
+OPENAI_API_KEY="FIXME"
+LLM_MODEL="gpt-3.5-turbo" # optional
+OUT_TOKENS_MIN="100" # optional
+OUT_TOKENS_MAX="500" # optional
+LLM_TEMP="0" # optional
+LLM_SEED="0" # optional
+MAX_TOKENS="8000"
diff --git a/requirements.txt b/requirements.txt
new file mode 100644
index 0000000..712e768
--- /dev/null
+++ b/requirements.txt
@@ -0,0 +1,2 @@
+pyjson5
+openai
diff --git a/shitoutcode.sh b/shitoutcode.sh
new file mode 100755
index 0000000..5d107ed
--- /dev/null
+++ b/shitoutcode.sh
@@ -0,0 +1,11 @@
+#!/bin/sh
+set -e
+. "./.env"
+export OPENAI_API_KEY
+export LLM_MODEL
+export OUT_TOKENS_MIN
+export OUT_TOKENS_MAX
+export LLM_TEMP
+export LLM_SEED
+
+python3 -m shitoutcode $@
diff --git a/shitoutcode/__init__.py b/shitoutcode/__init__.py
new file mode 100644
index 0000000..67da767
--- /dev/null
+++ b/shitoutcode/__init__.py
@@ -0,0 +1,86 @@
+import openai
+import pyjson5
+
+
+class Env:
+ def __init__(self):
+ self.model = "gpt-4o"
+ self.output_tokens_min = 250
+ self.output_tokens_max = 500
+ self.temp: float = None
+ self.seed: int = None
+ self.max_tokens = 8000
+
+class SourceFile:
+ def __init__(self):
+ self.name: str = None
+ self.category: str = None
+ self.contents: str = None
+
+class LLMAPIException (Exception): ...
+
+def __do_prompt (lang: str, extra_prompt: str, env: Env):
+ prompt = '''
+Write me a program source code written in %s. Write anything you'd like.
+Use more than %d words, but no more than %d words in the code.''' % (
+ lang, env.output_tokens_min, env.output_tokens_max)
+ if extra_prompt:
+ prompt = ' ' + extra_prompt
+
+ messages = [
+ {
+ "role": "system",
+ "content": "You're a helpful assistant that writes "
+ + "computer program of any kind"
+ },
+ { "role": "user", "content": prompt }
+ ]
+ functions = [
+ {
+ "name": "get_result",
+ "description": "Output the source code",
+ "parameters": {
+ "type": "object",
+ "properties": {
+ "cat": {
+ "type": "string",
+ "description": "one word describing program functions"
+ },
+ "filename": {
+ "type": "string",
+ "description": "The source code file name" # TODO: strip unallowed characters in Unix fs
+ },
+ "code": {
+ "type": "string",
+ "description": "The source code"
+ }
+ }
+ }
+ }
+ ]
+
+ return openai.chat.completions.create(
+ model = env.model,
+ messages = messages,
+ functions = functions,
+ function_call = "auto",
+ temperature = env.temp,
+ seed = env.seed,
+ max_tokens = env.max_tokens
+ )
+
+def gen_rand_srccode (lang: str, extra_prompt: str, env: Env) -> SourceFile | str:
+ rsp = __do_prompt(lang, extra_prompt, env)
+ match rsp.choices[0].finish_reason:
+ case 'stop':
+ return rsp.choices[0].message.content
+ case 'function_call':
+ choice = pyjson5.loads(rsp.choices[0].message.function_call.arguments)
+
+ ret = SourceFile()
+ ret.name = choice["filename"]
+ ret.category = choice.get("cat")
+ ret.contents = choice["code"]
+
+ return ret
+ case _: raise LLMAPIException(rsp.choices[0].finish_reason)
diff --git a/shitoutcode/__main__.py b/shitoutcode/__main__.py
new file mode 100644
index 0000000..09ca715
--- /dev/null
+++ b/shitoutcode/__main__.py
@@ -0,0 +1,54 @@
+import os
+import random
+import sys
+from shitoutcode import Env, LLMAPIException, SourceFile, gen_rand_srccode
+
+ARGV0 = "shitoutcode"
+
+def loadEnv_from_env () -> Env:
+ def wrap_default (val: str, t: type, dv = None):
+ if val is None:
+ return dv
+ return t(val)
+
+ ret = Env()
+ ret.model = wrap_default(os.getenv("LLM_MODEL"), str, "gpt-4o-mini")
+ ret.output_tokens_min = wrap_default(os.getenv("OUT_TOKENS_MIN"), int, ret.output_tokens_min)
+ ret.output_tokens_max = wrap_default(os.getenv("OUT_TOKENS_MAX"), int, ret.output_tokens_max)
+ ret.temp = wrap_default(os.getenv("LLM_TEMP"), float)
+ ret.seed = wrap_default(os.getenv("LLM_SEED"), int)
+ ret.max_tokens = wrap_default(os.getenv("LLM_SEED"), int, ret.max_tokens)
+
+ return ret
+
+def getPathLine (x: SourceFile) -> str:
+ if x.category:
+ return x.category + os.path.sep + x.name
+ return x.name
+
+def pickRandLang () -> str:
+ thelist = [
+ "C",
+ "C++",
+ "Javascript",
+ "Java"
+ ]
+ r = random.randint(0, len(thelist) - 1)
+
+ return thelist[r]
+
+env = loadEnv_from_env()
+lang = sys.argv[1] if len(sys.argv) > 1 else pickRandLang()
+extra_prompt = sys.argv[2] if len(sys.argv) > 2 else None
+
+try:
+ result = gen_rand_srccode(lang, extra_prompt, env)
+ if result is SourceFile:
+ print(getPathLine(result))
+ print()
+ print(result.contents)
+ else:
+ print(result)
+except LLMAPIException as e:
+ sys.stderr.write(ARGV0 + ": model gave up: " + str(e) + os.linesep)
+ exit(1)