diff --git a/src/Directory.Packages.props b/src/Directory.Packages.props
index 74b9a5d0..76d9d2b2 100644
--- a/src/Directory.Packages.props
+++ b/src/Directory.Packages.props
@@ -12,7 +12,7 @@
-
+
diff --git a/src/ResXManager.Translators/AzureOpenAITranslator.cs b/src/ResXManager.Translators/AzureOpenAITranslator.cs
index 041076e6..881f6d70 100644
--- a/src/ResXManager.Translators/AzureOpenAITranslator.cs
+++ b/src/ResXManager.Translators/AzureOpenAITranslator.cs
@@ -1,6 +1,6 @@
namespace ResXManager.Translators;
-using global::Microsoft.DeepDev;
+using global::Microsoft.ML.Tokenizers;
using Newtonsoft.Json;
using ResXManager.Infrastructure;
using System;
@@ -138,11 +138,11 @@ private sealed class ChatCompletionsResponse
private async Task TranslateUsingChatModel(ITranslationSession translationSession, HttpClient client)
{
- const string ApiVersion = "2023-05-15";
+ const string ApiVersion = "2024-06-01";
var endpointUri = new Uri($"/openai/deployments/{ModelDeploymentName}/chat/completions?api-version={ApiVersion}", UriKind.Relative);
- var tokenizer = await TokenizerBuilder.CreateByModelNameAsync(
+ var tokenizer = TiktokenTokenizer.CreateForModel(
ModelName ?? throw new InvalidOperationException("No model name provided in configuration!")
- ).ConfigureAwait(false);
+ );
var retries = 0;
@@ -200,7 +200,7 @@ private async Task TranslateUsingChatModel(ITranslationSession translationSessio
}
private IEnumerable<(ChatMessage message, ICollection items)> PackChatModelMessagesIntoBatches(
- ITranslationSession translationSession, IEnumerable items, CultureInfo targetCulture, ITokenizer tokenizer
+ ITranslationSession translationSession, IEnumerable items, CultureInfo targetCulture, TiktokenTokenizer tokenizer
)
{
var batchItems = new List();
@@ -209,7 +209,7 @@ private async Task TranslateUsingChatModel(ITranslationSession translationSessio
foreach (var item in items)
{
- var currentBatch = batchItems.Concat(new[] { item }).ToList();
+ var currentBatch = batchItems.Concat([item]).ToList();
var currentMessage = GenerateChatModelMessageForTranslations(translationSession, currentBatch, targetCulture);
if (currentMessage?.Content is null)
@@ -218,7 +218,7 @@ private async Task TranslateUsingChatModel(ITranslationSession translationSessio
continue;
}
- var tokens = tokenizer.Encode(currentMessage.Content, new List()).Count;
+ var tokens = tokenizer.CountTokens(currentMessage.Content);
if (tokens > PromptTokens)
{
translationSession.AddMessage($"Prompt for resource would exceed {PromptTokens} tokens: {item.Source.Substring(0, 20)}...");
@@ -235,7 +235,7 @@ private async Task TranslateUsingChatModel(ITranslationSession translationSessio
{
yield return (batchMessage, batchItems);
- batchItems = new List();
+ batchItems = [];
batchTokens = 0;
}
@@ -414,11 +414,11 @@ private sealed class CompletionsResponse
private async Task TranslateUsingCompletionsModel(ITranslationSession translationSession, HttpClient client)
{
- const string ApiVersion = "2023-05-15";
+ const string ApiVersion = "2024-06-01";
var endpointUri = new Uri($"/openai/deployments/{ModelDeploymentName}/completions?api-version={ApiVersion}", UriKind.Relative);
- var tokenizer = await TokenizerBuilder.CreateByModelNameAsync(
+ var tokenizer = TiktokenTokenizer.CreateForModel(
ModelName ?? throw new InvalidOperationException("No model name provided in configuration!")
- ).ConfigureAwait(false);
+ );
var retries = 0;
@@ -467,7 +467,7 @@ private async Task TranslateUsingCompletionsModel(ITranslationSession translatio
}
}
- private IEnumerable PackCompletionModelPromptsIntoBatches(ITranslationSession translationSession, ITokenizer tokenizer)
+ private IEnumerable PackCompletionModelPromptsIntoBatches(ITranslationSession translationSession, TiktokenTokenizer tokenizer)
{
var batchItems = new PromptList();
var batchTokens = 0;
@@ -481,7 +481,7 @@ private IEnumerable PackCompletionModelPromptsIntoBatches(ITranslati
continue;
}
- var tokens = tokenizer.Encode(prompt, new List()).Count;
+ var tokens = tokenizer.CountTokens(prompt);
if (tokens > PromptTokens)
{
@@ -499,7 +499,7 @@ private IEnumerable PackCompletionModelPromptsIntoBatches(ITranslati
{
yield return batchItems;
- batchItems = new PromptList();
+ batchItems = [];
batchTokens = 0;
}
@@ -634,12 +634,12 @@ public string? ModelName
private static IList GetCredentials()
{
- return new ICredentialItem[]
- {
+ return
+ [
new CredentialItem("AuthenticationKey", "Key"),
new CredentialItem("Url", "Endpoint Url", false),
new CredentialItem("ModelDeploymentName", "Model Deployment Name", false),
new CredentialItem("ModelName", "Model Name", false),
- };
+ ];
}
}
diff --git a/src/ResXManager.Translators/FodyWeavers.xml b/src/ResXManager.Translators/FodyWeavers.xml
index 6c847fc3..c7599a7a 100644
--- a/src/ResXManager.Translators/FodyWeavers.xml
+++ b/src/ResXManager.Translators/FodyWeavers.xml
@@ -2,5 +2,5 @@
-
+
\ No newline at end of file
diff --git a/src/ResXManager.Translators/OpenAITranslator.cs b/src/ResXManager.Translators/OpenAITranslator.cs
index d223c453..688d68c0 100644
--- a/src/ResXManager.Translators/OpenAITranslator.cs
+++ b/src/ResXManager.Translators/OpenAITranslator.cs
@@ -1,6 +1,6 @@
namespace ResXManager.Translators;
-using global::Microsoft.DeepDev;
+using global::Microsoft.ML.Tokenizers;
using Newtonsoft.Json;
using ResXManager.Infrastructure;
using System;
@@ -145,9 +145,9 @@ private sealed class CompletionsResponse
private async Task TranslateUsingCompletionsModel(ITranslationSession translationSession, HttpClient client)
{
var endpointUri = new Uri($"/v1/chat/completions", UriKind.Relative);
- var tokenizer = await TokenizerBuilder.CreateByModelNameAsync(
+ var tokenizer = TiktokenTokenizer.CreateForModel(
ModelName ?? throw new InvalidOperationException("No model name provided in configuration!")
- ).ConfigureAwait(false);
+ );
var retries = 0;
@@ -200,7 +200,7 @@ private async Task TranslateUsingCompletionsModel(ITranslationSession translatio
}
}
- private IEnumerable<(ITranslationItem item, string prompt)> PackCompletionModelPrompts(ITranslationSession translationSession, ITokenizer tokenizer)
+ private IEnumerable<(ITranslationItem item, string prompt)> PackCompletionModelPrompts(ITranslationSession translationSession, TiktokenTokenizer tokenizer)
{
foreach (var item in translationSession.Items)
{
@@ -211,7 +211,7 @@ private async Task TranslateUsingCompletionsModel(ITranslationSession translatio
continue;
}
- var tokens = tokenizer.Encode(prompt, new List()).Count;
+ var tokens = tokenizer.CountTokens(prompt);
if (tokens > PromptTokens)
{
@@ -328,10 +328,10 @@ public string? ModelName
private static IList GetCredentials()
{
- return new ICredentialItem[]
- {
+ return
+ [
new CredentialItem("AuthenticationKey", "Key"),
new CredentialItem("ModelName", "Model Name", false),
- };
+ ];
}
}
diff --git a/src/ResXManager.Translators/ResXManager.Translators.csproj b/src/ResXManager.Translators/ResXManager.Translators.csproj
index fc3a3cd6..85b76b7d 100644
--- a/src/ResXManager.Translators/ResXManager.Translators.csproj
+++ b/src/ResXManager.Translators/ResXManager.Translators.csproj
@@ -52,7 +52,7 @@
-
+