Skip to content

Commit

Permalink
added more options, split very long responses
Browse files Browse the repository at this point in the history
- added option to change the `max_tokens`
- added option to show additional info
- responses are now broken up into chunks to avoid them getting cut off. they can still get cut off due to the token limit - will probably fix that in the future
  • Loading branch information
Eisenhuth committed Mar 24, 2023
1 parent 33494ee commit 9ec3d84
Show file tree
Hide file tree
Showing 3 changed files with 27 additions and 8 deletions.
31 changes: 24 additions & 7 deletions dalamud-chatgpt/ChatGPTPlugin.cs
Original file line number Diff line number Diff line change
@@ -1,6 +1,8 @@
using System.Net.Http;
using System.Linq;
using System.Net.Http;
using System.Net.Http.Headers;
using System.Text;
using System.Text.RegularExpressions;
using System.Threading.Tasks;
using Dalamud.Game.Command;
using Dalamud.Game.Gui;
Expand All @@ -25,7 +27,9 @@ public class ChatGPTPlugin : IDalamudPlugin
[PluginService] private static CommandManager CommandManager { get; set; } = null!;

private string configKey;
private int configMaxTokens;
private bool configLineBreaks;
private bool configAdditionalInfo;

public ChatGPTPlugin([RequiredVersion("1.0")] DalamudPluginInterface dalamudPluginInterface, [RequiredVersion("1.0")] ChatGui chatGui, [RequiredVersion("1.0")] CommandManager commandManager)
{
Expand Down Expand Up @@ -68,7 +72,7 @@ private async Task SendPrompt(string input)
var client = new HttpClient();
client.DefaultRequestHeaders.Authorization = new AuthenticationHeaderValue("Bearer", configuration.ApiKey);

var requestBody = $"{{\"model\": \"{Configuration.Model}\", \"prompt\": \"{input}\", \"max_tokens\": 256}}";
var requestBody = $"{{\"model\": \"{Configuration.Model}\", \"prompt\": \"{input}\", \"max_tokens\": {configuration.MaxTokens}}}";
var content = new StringContent(requestBody, Encoding.UTF8, "application/json");

var response = await client.PostAsync(Configuration.Endpoint, content);
Expand All @@ -81,8 +85,17 @@ private async Task SendPrompt(string input)
{
if(configLineBreaks)
text = text.Replace("\r", "").Replace("\n", "");

chatGui.Print($"ChatGPT prompt: {input}{text}");

const int chunkSize = 1000;
var regex = new Regex(@".{1," + chunkSize + @"}(\s+|$)"); //jesus take the wheel
var chunks = regex.Matches(text).Select(match => match.Value);
chunks = chunks.ToList();

if(configAdditionalInfo)
chatGui.Print($"ChatGPT \nprompt: {input}\nmodel: {Configuration.Model}\nmax_tokens: {configMaxTokens}\nresponse length: {text.Length}\nchunks: {chunks.Count()}");

foreach (var chunk in chunks)
chatGui.Print($"ChatGPT: {chunk}");
}
}

Expand All @@ -96,7 +109,9 @@ private void DrawConfiguration()
ImGui.Begin($"{Name} Configuration", ref drawConfiguration);

ImGui.Separator();

ImGui.Checkbox("remove line breaks from responses", ref configLineBreaks);
ImGui.Checkbox("show additional info", ref configAdditionalInfo);
ImGui.InputInt("max_tokens", ref configMaxTokens);
ImGui.InputText("API Key", ref configKey, 60, ImGuiInputTextFlags.Password);

if (ImGui.Button("Get API Key"))
Expand All @@ -105,8 +120,6 @@ private void DrawConfiguration()
Util.OpenLink(apiKeysUrl);
}

ImGui.Checkbox("remove line breaks from responses", ref configLineBreaks);

ImGui.Separator();


Expand All @@ -129,13 +142,17 @@ private static void OpenConfig()
private void LoadConfiguration()
{
configKey = configuration.ApiKey;
configMaxTokens = configuration.MaxTokens != 0 ? configuration.MaxTokens : 256;
configLineBreaks = configuration.RemoveLineBreaks;
configAdditionalInfo = configuration.ShowAdditionalInfo;
}

private void SaveConfiguration()
{
configuration.ApiKey = configKey;
configuration.MaxTokens = configMaxTokens;
configuration.RemoveLineBreaks = configLineBreaks;
configuration.ShowAdditionalInfo = configAdditionalInfo;

PluginInterface.SavePluginConfig(configuration);
}
Expand Down
2 changes: 2 additions & 0 deletions dalamud-chatgpt/Configuration.cs
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,9 @@ public class Configuration : IPluginConfiguration
{
public int Version { get; set; }
public bool RemoveLineBreaks { get; set; } = false;
public bool ShowAdditionalInfo { get; set; }
public string ApiKey { get; set; } = "";
public int MaxTokens { get; set; }
public const string Endpoint = "https://api.openai.com/v1/completions";
public const string Model = "text-davinci-003";

Expand Down
2 changes: 1 addition & 1 deletion dalamud-chatgpt/dalamud-chatgpt.csproj
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@
<AppendTargetFrameworkToOutputPath>false</AppendTargetFrameworkToOutputPath>
<OutputPath>$(AppData)\Eisenhuth\DalamudDevPlugins\ChatGPTPlugin\</OutputPath>
<ProduceReferenceAssembly>false</ProduceReferenceAssembly>
<AssemblyVersion>1.0.3.0</AssemblyVersion>
<AssemblyVersion>1.0.4.0</AssemblyVersion>
<CopyLocalLockFileAssemblies>true</CopyLocalLockFileAssemblies>
<RestorePackagesWithLockFile>true</RestorePackagesWithLockFile>
<PackageProjectUrl>https://github.com/Eisenhuth/dalamud-chatgpt</PackageProjectUrl>
Expand Down

0 comments on commit 9ec3d84

Please sign in to comment.