Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 3 additions & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -122,7 +122,7 @@ Create a `.env` file and add OpenCommit config variables there like this:

```env
...
OCO_AI_PROVIDER=<openai (default), anthropic, azure, ollama, gemini, flowise, deepseek, aimlapi>
OCO_AI_PROVIDER=<openai (default), anthropic, azure, ollama, gemini, flowise, deepseek, aimlapi, minimax>
OCO_API_KEY=<your OpenAI API token> // or other LLM provider API token
OCO_API_URL=<may be used to set proxy path to OpenAI api>
OCO_API_CUSTOM_HEADERS=<JSON string of custom HTTP headers to include in API requests>
Expand Down Expand Up @@ -235,6 +235,8 @@ oco config set OCO_AI_PROVIDER=azure OCO_API_KEY=<your_azure_api_key> OCO_API_UR
oco config set OCO_AI_PROVIDER=flowise OCO_API_KEY=<your_flowise_api_key> OCO_API_URL=<your_flowise_endpoint>

oco config set OCO_AI_PROVIDER=ollama OCO_API_KEY=<your_ollama_api_key> OCO_API_URL=<your_ollama_endpoint>

oco config set OCO_AI_PROVIDER=minimax OCO_API_KEY=<your_minimax_api_key>
```

### Locale configuration
Expand Down
16 changes: 12 additions & 4 deletions src/commands/config.ts
Original file line number Diff line number Diff line change
Expand Up @@ -136,6 +136,8 @@ export const MODEL_LIST = {
],
deepseek: ['deepseek-chat', 'deepseek-reasoner'],

minimax: ['MiniMax-M2.7', 'MiniMax-M2.5', 'MiniMax-M2.5-highspeed'],

// AI/ML API available chat-completion models
// https://api.aimlapi.com/v1/models
aimlapi: [
Expand Down Expand Up @@ -593,6 +595,8 @@ const getDefaultModel = (provider: string | undefined): string => {
return MODEL_LIST.aimlapi[0];
case 'openrouter':
return MODEL_LIST.openrouter[0];
case 'minimax':
return MODEL_LIST.minimax[0];
default:
return MODEL_LIST.openai[0];
}
Expand Down Expand Up @@ -784,9 +788,10 @@ export const configValidators = {
'groq',
'deepseek',
'aimlapi',
'openrouter'
'openrouter',
'minimax'
].includes(value) || value.startsWith('ollama'),
`${value} is not supported yet, use 'ollama', 'mlx', 'anthropic', 'azure', 'gemini', 'flowise', 'mistral', 'deepseek', 'aimlapi' or 'openai' (default)`
`${value} is not supported yet, use 'ollama', 'mlx', 'anthropic', 'azure', 'gemini', 'flowise', 'mistral', 'deepseek', 'aimlapi', 'minimax' or 'openai' (default)`
);

return value;
Expand Down Expand Up @@ -844,7 +849,8 @@ export enum OCO_AI_PROVIDER_ENUM {
MLX = 'mlx',
DEEPSEEK = 'deepseek',
AIMLAPI = 'aimlapi',
OPENROUTER = 'openrouter'
OPENROUTER = 'openrouter',
MINIMAX = 'minimax'
}

export const PROVIDER_API_KEY_URLS: Record<string, string | null> = {
Expand All @@ -857,6 +863,7 @@ export const PROVIDER_API_KEY_URLS: Record<string, string | null> = {
[OCO_AI_PROVIDER_ENUM.OPENROUTER]: 'https://openrouter.ai/keys',
[OCO_AI_PROVIDER_ENUM.AIMLAPI]: 'https://aimlapi.com/app/keys',
[OCO_AI_PROVIDER_ENUM.AZURE]: 'https://portal.azure.com/',
[OCO_AI_PROVIDER_ENUM.MINIMAX]: 'https://platform.minimaxi.com/user-center/basic-information/interface-key',
[OCO_AI_PROVIDER_ENUM.OLLAMA]: null,
[OCO_AI_PROVIDER_ENUM.MLX]: null,
[OCO_AI_PROVIDER_ENUM.FLOWISE]: null,
Expand All @@ -871,7 +878,8 @@ export const RECOMMENDED_MODELS: Record<string, string> = {
[OCO_AI_PROVIDER_ENUM.MISTRAL]: 'mistral-small-latest',
[OCO_AI_PROVIDER_ENUM.DEEPSEEK]: 'deepseek-chat',
[OCO_AI_PROVIDER_ENUM.OPENROUTER]: 'openai/gpt-4o-mini',
[OCO_AI_PROVIDER_ENUM.AIMLAPI]: 'gpt-4o-mini'
[OCO_AI_PROVIDER_ENUM.AIMLAPI]: 'gpt-4o-mini',
[OCO_AI_PROVIDER_ENUM.MINIMAX]: 'MiniMax-M2.7'
}

export type ConfigType = {
Expand Down
6 changes: 4 additions & 2 deletions src/commands/setup.ts
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,8 @@ const PROVIDER_DISPLAY_NAMES: Record<string, string> = {
[OCO_AI_PROVIDER_ENUM.OPENROUTER]: 'OpenRouter (Multiple providers)',
[OCO_AI_PROVIDER_ENUM.AIMLAPI]: 'AI/ML API',
[OCO_AI_PROVIDER_ENUM.AZURE]: 'Azure OpenAI',
[OCO_AI_PROVIDER_ENUM.MLX]: 'MLX (Apple Silicon, local)'
[OCO_AI_PROVIDER_ENUM.MLX]: 'MLX (Apple Silicon, local)',
[OCO_AI_PROVIDER_ENUM.MINIMAX]: 'MiniMax (M2.7, M2.5, fast inference)'
};

const PRIMARY_PROVIDERS = [
Expand All @@ -48,7 +49,8 @@ const OTHER_PROVIDERS = [
OCO_AI_PROVIDER_ENUM.OPENROUTER,
OCO_AI_PROVIDER_ENUM.AIMLAPI,
OCO_AI_PROVIDER_ENUM.AZURE,
OCO_AI_PROVIDER_ENUM.MLX
OCO_AI_PROVIDER_ENUM.MLX,
OCO_AI_PROVIDER_ENUM.MINIMAX
];

const NO_API_KEY_PROVIDERS = [
Expand Down
49 changes: 49 additions & 0 deletions src/engine/minimax.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,49 @@
import { OpenAI } from 'openai';
import { GenerateCommitMessageErrorEnum } from '../generateCommitMessageFromGitDiff';
import { normalizeEngineError } from '../utils/engineErrorHandler';
import { removeContentTags } from '../utils/removeContentTags';
import { tokenCount } from '../utils/tokenCount';
import { OpenAiEngine, OpenAiConfig } from './openAi';

export interface MiniMaxConfig extends OpenAiConfig {}

export class MiniMaxEngine extends OpenAiEngine {
constructor(config: MiniMaxConfig) {
super({
baseURL: 'https://api.minimax.io/v1',
...config
});
}

public generateCommitMessage = async (
messages: Array<OpenAI.Chat.Completions.ChatCompletionMessageParam>
): Promise<string | null> => {
const params = {
model: this.config.model,
messages,
temperature: 0.01,
top_p: 0.1,
max_tokens: this.config.maxTokensOutput
};

try {
const REQUEST_TOKENS = messages
.map((msg) => tokenCount(msg.content as string) + 4)
.reduce((a, b) => a + b, 0);

if (
REQUEST_TOKENS >
this.config.maxTokensInput - this.config.maxTokensOutput
)
throw new Error(GenerateCommitMessageErrorEnum.tooMuchTokens);

const completion = await this.client.chat.completions.create(params);

const message = completion.choices[0].message;
let content = message?.content;
return removeContentTags(content, 'think');
} catch (error) {
throw normalizeEngineError(error, 'minimax', this.config.model);
}
};
}
4 changes: 4 additions & 0 deletions src/utils/engine.ts
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@ import { MLXEngine } from '../engine/mlx';
import { DeepseekEngine } from '../engine/deepseek';
import { AimlApiEngine } from '../engine/aimlapi';
import { OpenRouterEngine } from '../engine/openrouter';
import { MiniMaxEngine } from '../engine/minimax';

export function parseCustomHeaders(headers: any): Record<string, string> {
let parsedHeaders = {};
Expand Down Expand Up @@ -88,6 +89,9 @@ export function getEngine(): AiEngine {
case OCO_AI_PROVIDER_ENUM.OPENROUTER:
return new OpenRouterEngine(DEFAULT_CONFIG);

case OCO_AI_PROVIDER_ENUM.MINIMAX:
return new MiniMaxEngine(DEFAULT_CONFIG);

default:
return new OpenAiEngine(DEFAULT_CONFIG);
}
Expand Down
3 changes: 3 additions & 0 deletions src/utils/errors.ts
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@ export const PROVIDER_BILLING_URLS: Record<string, string | null> = {
[OCO_AI_PROVIDER_ENUM.OPENROUTER]: 'https://openrouter.ai/credits',
[OCO_AI_PROVIDER_ENUM.AIMLAPI]: 'https://aimlapi.com/app/billing',
[OCO_AI_PROVIDER_ENUM.AZURE]: 'https://portal.azure.com/#view/Microsoft_Azure_CostManagement',
[OCO_AI_PROVIDER_ENUM.MINIMAX]: 'https://platform.minimaxi.com/user-center/basic-information',
[OCO_AI_PROVIDER_ENUM.OLLAMA]: null,
[OCO_AI_PROVIDER_ENUM.MLX]: null,
[OCO_AI_PROVIDER_ENUM.FLOWISE]: null,
Expand Down Expand Up @@ -202,6 +203,8 @@ export function getRecommendedModel(provider: string): string | null {
return 'openai/gpt-4o-mini';
case OCO_AI_PROVIDER_ENUM.AIMLAPI:
return 'gpt-4o-mini';
case OCO_AI_PROVIDER_ENUM.MINIMAX:
return 'MiniMax-M2.7';
default:
return null;
}
Expand Down
32 changes: 32 additions & 0 deletions src/utils/modelCache.ts
Original file line number Diff line number Diff line change
Expand Up @@ -185,6 +185,30 @@ export async function fetchOpenRouterModels(apiKey: string): Promise<string[]> {
}
}

export async function fetchMiniMaxModels(apiKey: string): Promise<string[]> {
try {
const response = await fetch('https://api.minimax.io/v1/models', {
headers: {
Authorization: `Bearer ${apiKey}`
}
});

if (!response.ok) {
return MODEL_LIST.minimax;
}

const data = await response.json();
const models = data.data
?.map((m: { id: string }) => m.id)
.filter((id: string) => id.startsWith('MiniMax-'))
.sort();

return models && models.length > 0 ? models : MODEL_LIST.minimax;
} catch {
return MODEL_LIST.minimax;
}
}

export async function fetchDeepSeekModels(apiKey: string): Promise<string[]> {
try {
const response = await fetch('https://api.deepseek.com/v1/models', {
Expand Down Expand Up @@ -273,6 +297,14 @@ export async function fetchModelsForProvider(
}
break;

case OCO_AI_PROVIDER_ENUM.MINIMAX:
if (apiKey) {
models = await fetchMiniMaxModels(apiKey);
} else {
models = MODEL_LIST.minimax;
}
break;

case OCO_AI_PROVIDER_ENUM.AIMLAPI:
models = MODEL_LIST.aimlapi;
break;
Expand Down
112 changes: 112 additions & 0 deletions test/unit/minimax-integration.test.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,112 @@
import { OpenAI } from 'openai';

// Mock @clack/prompts to prevent process.exit calls
jest.mock('@clack/prompts', () => ({
intro: jest.fn(),
outro: jest.fn()
}));

/**
* Integration tests for MiniMax engine.
* These tests verify the MiniMax API works correctly via OpenAI-compatible SDK.
* This mirrors the exact behavior of MiniMaxEngine which extends OpenAiEngine.
*
* Run with: MINIMAX_API_KEY=<key> npm run test -- test/unit/minimax-integration.test.ts
*/
const MINIMAX_API_KEY = process.env.MINIMAX_API_KEY;
const describeIntegration = MINIMAX_API_KEY ? describe : describe.skip;

describeIntegration('MiniMax Integration (requires MINIMAX_API_KEY)', () => {
let client: OpenAI;

beforeAll(() => {
client = new OpenAI({
apiKey: MINIMAX_API_KEY!,
baseURL: 'https://api.minimax.io/v1'
});
});

it('should generate a commit message with M2.7', async () => {
const completion = await client.chat.completions.create({
model: 'MiniMax-M2.7',
messages: [
{
role: 'system',
content:
'You are an expert at writing concise, meaningful git commit messages. Generate a conventional commit message for the provided code diff. Output only the commit message, nothing else.'
},
{
role: 'user',
content: `diff --git a/src/utils.ts b/src/utils.ts
--- a/src/utils.ts
+++ b/src/utils.ts
@@ -10,6 +10,10 @@ export function formatDate(date: Date): string {
return date.toISOString();
}

+export function formatCurrency(amount: number, currency: string = 'USD'): string {
+ return new Intl.NumberFormat('en-US', { style: 'currency', currency }).format(amount);
+}
+
export function capitalize(str: string): string {`
}
],
temperature: 0.01,
top_p: 0.1,
max_tokens: 500
});

const content = completion.choices[0].message?.content;
expect(content).toBeDefined();
expect(typeof content).toBe('string');
expect(content!.length).toBeGreaterThan(0);
}, 30000);

it('should generate commit message with M2.5-highspeed', async () => {
const completion = await client.chat.completions.create({
model: 'MiniMax-M2.5-highspeed',
messages: [
{
role: 'system',
content:
'You are an expert at writing concise git commit messages. Generate a conventional commit message. Output only the commit message.'
},
{
role: 'user',
content: `diff --git a/README.md b/README.md
--- a/README.md
+++ b/README.md
@@ -1,3 +1,5 @@
# My Project

A simple project.
+
+## Installation`
}
],
temperature: 0.01,
top_p: 0.1,
max_tokens: 500
});

const content = completion.choices[0].message?.content;
expect(content).toBeDefined();
expect(typeof content).toBe('string');
expect(content!.length).toBeGreaterThan(0);
}, 30000);

it('should handle authentication error with invalid API key', async () => {
const badClient = new OpenAI({
apiKey: 'invalid-api-key',
baseURL: 'https://api.minimax.io/v1'
});

await expect(
badClient.chat.completions.create({
model: 'MiniMax-M2.7',
messages: [{ role: 'user', content: 'test' }],
max_tokens: 10
})
).rejects.toThrow();
}, 30000);
});
Loading
Loading