-
Notifications
You must be signed in to change notification settings - Fork 2
/
gpt-3.5-turbo-16k-guidance-icl-wgsl.py
89 lines (72 loc) · 2.24 KB
/
gpt-3.5-turbo-16k-guidance-icl-wgsl.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
from pathlib import Path
import re
import tiktoken
import guidance
import os
import subprocess
llm_string = "gpt-3.5-turbo-16k"
llm_model = guidance.llms.OpenAI(
llm_string
) # Make sure OPENAI_API_KEY is set in your environment variables
encoding = tiktoken.encoding_for_model(llm_string)
repos = ["https://github.com/webgpu/webgpu-samples", "https://github.com/gfx-rs/wgpu"]
for repo in repos:
repo_name = repo.split("/")[-1].split(".git")[0]
if not os.path.exists(repo_name):
print(f"Cloning {repo} into {repo_name}")
subprocess.run(
["git", "clone", "--depth", "1", "--single-branch", repo], check=True
)
suffix = ".wgsl"
path = "./"
cache_file = "cache.md"
cache = ""
if not Path(cache_file).exists():
wgsl_files = [
(code, len(encoding.encode(code)))
for code in [
re.sub(
r"^\s*\n",
"",
re.sub(r"//.*", "", open(file, "r").read()),
flags=re.MULTILINE,
)
for file in Path(path).rglob(f"*{suffix}")
]
]
wgsl_files.sort(key=lambda x: x[1])
total_tokens = 0
max_tokens = 14200
with open(cache_file, "w") as md_file:
md_file.write(
"Use the syntax and style of following WGSL WebGPU Shading Language examples delimited by triple backticks to respond to user inputs.\n\n"
)
for code, token_count in wgsl_files:
if total_tokens + token_count > max_tokens:
break
md_file.write("Example WGSL WebGPU Shading Language Code:\n")
md_file.write("```wgsl\n")
md_file.write(code.strip() + "\n")
md_file.write("```\n\n")
total_tokens += token_count
cache = open(cache_file, "r").read()
wgsl_bot = guidance(
"""
{{#system~}}
{{wgsl_cache}}
{{~/system}}
{{#user~}}
Respond to the following question according to the examples:
{{query}}
{{~/user}}
{{#assistant~}}
{{gen 'answer' temperature=0 max_tokens=1024}}
{{~/assistant}}
""",
llm=llm_model,
)
query = input("Enter your query: ")
if not query.strip():
print("User query is empty. Using default query.\n")
query = "Write basic pixel code"
print(wgsl_bot(wgsl_cache=cache, query=query)["answer"])