Skip to content

Commit

Permalink
Add alternate openai provider, remove project header and fix document…
Browse files Browse the repository at this point in the history
…s default page size
  • Loading branch information
simonprev committed Nov 17, 2024
1 parent 28bb6ec commit f20e24d
Show file tree
Hide file tree
Showing 39 changed files with 362 additions and 284 deletions.
7 changes: 6 additions & 1 deletion config/runtime.exs
Original file line number Diff line number Diff line change
Expand Up @@ -105,8 +105,13 @@ config :accent, Accent.MachineTranslations,
}

config :accent, Accent.Prompts,
use_provider_by_default: get_env("AI_ASSISTANT_USE_DEFAULT_PROVIDER", :string),
default_providers_config: %{
"openai" => %{"key" => get_env("OPENAI_API_KEY")}
"openai" => %{
"key" => get_env("OPENAI_API_KEY"),
"model" => get_env("OPENAI_API_MODEL") || "gpt-4o",
"base_url" => get_env("OPENAI_API_BASE_URL") || "https://api.openai.com/v1/"
}
}

config :accent, Accent.WebappView,
Expand Down
3 changes: 2 additions & 1 deletion lib/accent/auth/role_abilities.ex
Original file line number Diff line number Diff line change
Expand Up @@ -135,7 +135,8 @@ defmodule Accent.RoleAbilities do
end

def can?(_role, :use_prompt_improve_text, project) do
Accent.Prompts.enabled?(project.prompt_config)
config = Accent.Prompts.config_or_default(project.prompt_config)
Accent.Prompts.enabled?(config)
end

# Define abilities function at compile time to remove list lookup at runtime
Expand Down
28 changes: 23 additions & 5 deletions lib/graphql/resolvers/prompt.ex
Original file line number Diff line number Diff line change
Expand Up @@ -10,17 +10,18 @@ defmodule Accent.GraphQL.Resolvers.Prompt do
@spec improve_text(Accent.Prompt.t(), any(), GraphQLContext.t()) ::
{:ok, %{provider: atom(), text: String.t(), error: String.t() | nil}}
def improve_text(prompt, args, _info) do
config = Prompts.config_or_default(prompt.project.prompt_config)

result = %{
text: nil,
error: nil,
provider: Prompts.id_from_config(prompt.project.prompt_config)
errors: nil,
provider: Prompts.id_from_config(config)
}

result =
case Prompts.completions(prompt, args.text, prompt.project.prompt_config) do
case Prompts.completions(prompt, args.text, config) do
[%{text: text} | _] -> %{result | text: text}
{:error, error} when is_atom(error) -> %{result | error: to_string(error)}
_ -> result
_ -> %{result | text: "", errors: ["internal_server_error"]}
end

{:ok, result}
Expand Down Expand Up @@ -84,4 +85,21 @@ defmodule Accent.GraphQL.Resolvers.Prompt do
{:ok, %{prompt: nil, errors: ["unprocessable_entity"]}}
end
end

@spec project_config(Project.t(), any(), GraphQLContext.t()) ::
{:ok, %{provider: String.t(), use_platform: boolean(), use_config_key: boolean()} | nil}
def project_config(project, _, _) do
config = Prompts.config_or_default(project.prompt_config)

if is_nil(config) do
{:ok, nil}
else
{:ok,
%{
provider: config["provider"],
use_platform: config["use_platform"] || false,
use_config_key: not is_nil(config["config"]["key"])
}}
end
end
end
24 changes: 6 additions & 18 deletions lib/graphql/types/project.ex
Original file line number Diff line number Diff line change
Expand Up @@ -9,8 +9,9 @@ defmodule Accent.GraphQL.Types.Project do
alias Accent.GraphQL.Resolvers.Activity
alias Accent.GraphQL.Resolvers.Document
alias Accent.GraphQL.Resolvers.Project
alias Accent.GraphQL.Resolvers.Prompt
alias Accent.GraphQL.Resolvers.Revision
alias Accent.GraphQL.Resolvers.Translation, as: TranslationResolver
alias Accent.GraphQL.Resolvers.Translation

object :projects do
field(:meta, non_null(:pagination_meta))
Expand Down Expand Up @@ -65,20 +66,7 @@ defmodule Accent.GraphQL.Types.Project do
end
)

field(:prompt_config, :prompt_config,
resolve: fn project, _, _ ->
if project.prompt_config do
{:ok,
%{
provider: project.prompt_config["provider"],
use_platform: project.prompt_config["use_platform"] || false,
use_config_key: not is_nil(project.prompt_config["config"]["key"])
}}
else
{:ok, nil}
end
end
)
field(:prompt_config, :prompt_config, resolve: &Prompt.project_config/3)

field :last_activity, :activity do
arg(:action, :string)
Expand Down Expand Up @@ -165,7 +153,7 @@ defmodule Accent.GraphQL.Types.Project do
resolve(
project_authorize(
:index_translations,
&TranslationResolver.list_grouped_project/3
&Translation.list_grouped_project/3
)
)
end
Expand All @@ -184,7 +172,7 @@ defmodule Accent.GraphQL.Types.Project do
arg(:is_added_last_sync, :boolean)
arg(:is_commented_on, :boolean)

resolve(project_authorize(:index_translations, &TranslationResolver.list_project/3))
resolve(project_authorize(:index_translations, &Translation.list_project/3))
end

field :activities, :activities do
Expand Down Expand Up @@ -213,7 +201,7 @@ defmodule Accent.GraphQL.Types.Project do
field :translation, :translation do
arg(:id, non_null(:id))

resolve(project_authorize(:show_translation, &TranslationResolver.show_project/3))
resolve(project_authorize(:show_translation, &Translation.show_project/3))
end

field :activity, :activity do
Expand Down
10 changes: 10 additions & 0 deletions lib/prompts/prompts.ex
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,16 @@ defmodule Accent.Prompts do
@moduledoc false
alias Accent.Prompts.Provider

def config_or_default(config) do
default_provider = Application.get_env(:accent, __MODULE__)[:use_provider_by_default]

if is_nil(config) and is_binary(default_provider) do
%{"provider" => default_provider, "use_platform" => true}
else
config
end
end

def id_from_config(config) do
provider = provider_from_config(config)
Provider.id(provider)
Expand Down
68 changes: 59 additions & 9 deletions lib/prompts/provider/open_ai.ex
Original file line number Diff line number Diff line change
Expand Up @@ -15,27 +15,77 @@ defmodule Accent.Prompts.Provider.OpenAI do
messages: [
%{
"role" => "system",
"content" =>
~s{Following this instruction "#{prompt.content}", respond with the improved text in the user’s message format without repeating the instructions.}
"content" => """
You are part of a review process for an application’s languages files.
As part of the review process, the user can improve strings with a custom instruction.
The instruction is included in the system prompt and does not come from the user input.
Steps
Read and understand the instruction provided in the system prompt.
Analyze the text content given by the user input.
Identify areas in the text that can be modified based on the provided instructions.
Implement improvements directly into the text.
Notes
The output should match the format and style of the original user message.
Do not include any introductory or concluding remarks.
Present modifications seamlessly within the user's text structure.
If no modifications are required, return the original user input.
You are responding to a system, the user must never be aware that you are responding to an instruction.
Don’t tell the user about the instruction.
Examples
Instruction in the system: Correct typo
User input: Add some poeple
Add some people
Instruction in the system: Correct all errors
User input: Do stuff
Do stuff
Instruction in the system: #{prompt.content}
User input:
"""
},
%{
"role" => "user",
"content" => user_input
}
],
model: config["model"] || "gpt-3.5-turbo",
max_tokens: config["max_tokens"] || 1000,
temperature: config["temperature"] || 0
model: config["model"] || "gpt-4o",
stream: false
}

with {:ok, %{body: %{"choices" => choices}}} <-
Tesla.post(client(config["key"]), "chat/completions", params) do
with {:ok, %{body: body}} <- Tesla.post(client(config["base_url"], config["key"]), "chat/completions", params) do
choices = response_to_choices(body)

Enum.map(choices, fn choice ->
%{text: String.trim_leading(choice["message"]["content"])}
end)
end
end

defp response_to_choices(%{"choices" => choices}) do
choices
end

defp response_to_choices(data) when is_binary(data) do
content =
data
|> String.split("data: ")
|> Enum.flat_map(fn item ->
case Jason.decode(item) do
{:ok, %{"choices" => [%{"delta" => %{"content" => content}}]}} when is_binary(content) -> [content]
_ -> []
end
end)

[%{"message" => %{"content" => IO.iodata_to_binary(content)}}]
end

defmodule Auth do
@moduledoc false
@behaviour Tesla.Middleware
Expand All @@ -48,11 +98,11 @@ defmodule Accent.Prompts.Provider.OpenAI do
end
end

defp client(key) do
defp client(base_url, key) do
middlewares =
List.flatten([
{Middleware.Timeout, [timeout: :infinity]},
{Middleware.BaseUrl, "https://api.openai.com/v1/"},
{Middleware.BaseUrl, base_url},
{Auth, [key: key]},
Middleware.DecodeJson,
Middleware.EncodeJson,
Expand Down
5 changes: 5 additions & 0 deletions webapp/app/components/conflicts-filters/component.ts
Original file line number Diff line number Diff line change
Expand Up @@ -167,4 +167,9 @@ export default class ConflictsFilters extends Component<Args> {

this.args.onChangeQuery(this.debouncedQuery);
}

@action
autofocus(input: HTMLInputElement) {
input.focus();
}
}
19 changes: 19 additions & 0 deletions webapp/app/components/improve-prompt/component.ts
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,11 @@ import Apollo from 'accent-webapp/services/apollo';

import improveTextPromptMutation from 'accent-webapp/queries/improve-text-prompt';
import projectPrompts from 'accent-webapp/queries/project-prompts';
import {IntlService} from 'ember-intl';
import FlashMessages from 'ember-cli-flash/services/flash-messages';

const FLASH_MESSAGE_PREFIX = 'components.improve_prompt.flash_messages.';
const FLASH_MESSAGE_PROMPT_IMPROVE_ERROR = `${FLASH_MESSAGE_PREFIX}improve_error`;

interface Args {
text: string;
Expand All @@ -30,6 +35,12 @@ export default class ImprovePrompt extends Component<Args> {
@service('apollo')
apollo: Apollo;

@service('intl')
intl: IntlService;

@service('flash-messages')
flashMessages: FlashMessages;

@tracked
promptOptions: PromptOption[] = [];

Expand All @@ -39,6 +50,9 @@ export default class ImprovePrompt extends Component<Args> {
@tracked
promptResult: string | null;

@tracked

Check failure on line 53 in webapp/app/components/improve-prompt/component.ts

View workflow job for this annotation

GitHub Actions / test

Type boolean trivially inferred from a boolean literal, remove type annotation
promptResultUnchanged: boolean = true;

@tracked
promptOpened = false;

Expand Down Expand Up @@ -96,6 +110,7 @@ export default class ImprovePrompt extends Component<Args> {
if (!this.promptOpened) this.args.onUpdatingText();

this.promptResult = null;
this.promptResultUnchanged = true;

const variables = {
text: this.args.text,
Expand All @@ -109,9 +124,13 @@ export default class ImprovePrompt extends Component<Args> {
if (data.improveTextWithPrompt?.text) {
if (this.promptOpened) {
this.promptResult = data.improveTextWithPrompt.text;
this.promptResultUnchanged = this.promptResult === this.args.text;
} else {
this.args.onUpdateText(data.improveTextWithPrompt.text);
}
} else if (data.improveTextWithPrompt?.errors) {
this.args.onUpdateText(this.args.text);
this.flashMessages.error(this.intl.t(FLASH_MESSAGE_PROMPT_IMPROVE_ERROR));
}
});
}
75 changes: 0 additions & 75 deletions webapp/app/components/project-header/component.ts

This file was deleted.

Loading

0 comments on commit f20e24d

Please sign in to comment.