Skip to content

Commit be32b09

Browse files
authored
Merge pull request #18 from WhatTheFuzz/feature/background_thread
Implement query submission on BackgroundTaskThread
2 parents e771fd2 + 271f563 commit be32b09

File tree

4 files changed

+35
-17
lines changed

4 files changed

+35
-17
lines changed

README.md

Lines changed: 1 addition & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -82,12 +82,7 @@ for each model, as described in [OpenAI's documentation][tokens].
8282

8383
## Known Issues
8484

85-
The query does not use Python's [asyncio][asyncio] and thus blocks the main
86-
thread. You may be unable to interact with the Binary Ninja UI while the query
87-
is waiting to be resolved. In some cases, your operating system may detect that
88-
Binary Ninja has stopped responding and ask you to Force Quit it. I have not
89-
experience any egregiously long hangs, however. This is documented in issue
90-
[#8][issue-8].
85+
Please submit an issue if you find something that isn't working properly.
9186

9287
## License
9388

@@ -97,6 +92,4 @@ This project is licensed under the [MIT license][license].
9792
[token]:https://beta.openai.com/account/api-keys
9893
[tokens]:https://beta.openai.com/docs/models/gpt-3
9994
[entry]:./src/entry.py
100-
[asyncio]:https://docs.python.org/3/library/asyncio.html
101-
[issue-8]:https://github.com/WhatTheFuzz/binaryninja-openai/issues/8
10295
[license]:./LICENSE

plugin.json

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -27,6 +27,6 @@
2727
"openai"
2828
]
2929
},
30-
"version": "1.2.0",
30+
"version": "1.3.0",
3131
"minimumbinaryninjaversion": 3200
3232
}

src/agent.py

Lines changed: 8 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -12,6 +12,8 @@
1212
from binaryninja.settings import Settings
1313
from binaryninja import log
1414

15+
from . query import Query
16+
1517

1618
class Agent:
1719

@@ -142,11 +144,9 @@ def generate_query(self, function: Union[LowLevelILFunction,
142144
prompt += '\n'.join(self.instruction_list(function))
143145
return prompt
144146

145-
def send_query(self, query: str) -> str:
146-
'''Sends a query to the engine and returns the response.'''
147-
response: str = openai.Completion.create(
148-
model=self.model,
149-
prompt=query,
150-
max_tokens=self.get_token_count(),
151-
)
152-
return response.choices[0].text
147+
def send_query(self, query: str) -> None:
148+
'''Sends a query to the engine and prints the response.'''
149+
query = Query(query_string=query,
150+
model=self.model,
151+
max_token_count=self.get_token_count())
152+
query.start()

src/query.py

Lines changed: 25 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,25 @@
1+
import openai
2+
from binaryninja.plugin import BackgroundTaskThread
3+
4+
5+
class Query(BackgroundTaskThread):
6+
7+
def __init__(self, query_string: str, model: str,
8+
max_token_count: int) -> None:
9+
BackgroundTaskThread.__init__(self,
10+
initial_progress_text="",
11+
can_cancel=False)
12+
self.query_string: str = query_string
13+
self.model: str = model
14+
self.max_token_count: int = max_token_count
15+
16+
def run(self) -> None:
17+
self.progress = "Submitting query to OpenAI."
18+
19+
response: str = openai.Completion.create(
20+
model=self.model,
21+
prompt=self.query_string,
22+
max_tokens=self.max_token_count,
23+
)
24+
# Notify the user.
25+
print(response.choices[0].text)

0 commit comments

Comments
 (0)