Alibaba Cloud Model Studio で MiniMax モデルを呼び出します。
重要
このドキュメントは中国本土リージョンにのみ適用されます。モデルを使用するには、中国本土リージョンから API キーを取得してください。
モデル概要
MiniMax-M2.5 は、MiniMax シリーズの最新モデルです。コーディング、オフィス業務、テキスト要約に優れており、高速な出力を提供します。
モデル | コンテキストウィンドウ | 最大入力 | 最大 CoT + 応答 `thinking_budget` パラメーターはサポートされていません。 |
(トークン) | |||
MiniMax-M2.5 | 196,608 | 196,601 | 32,768 |
思考モードのみがサポートされています。
これらのモデルはサードパーティサービスではありません。Model Studio サーバーにデプロイされています。
クイックスタート
前提条件:開始する前に、API キーを作成し、環境変数として設定してください。SDK を使用してモデルを呼び出す場合は、OpenAI または DashScope SDK をインストールしてください。
OpenAI 互換
Python
サンプルコード
import os
from openai import OpenAI
client = OpenAI(
api_key=os.getenv("DASHSCOPE_API_KEY"),
base_url="https://dashscope.aliyuncs.com/compatible-mode/v1",
)
completion = client.chat.completions.create(
model="MiniMax-M2.5",
messages=[{"role": "user", "content": "Who are you?"}],
stream=True,
)
reasoning_content = "" # 完全な Chain-of-Thought
answer_content = "" # 完全な応答
is_answering = False # 応答が開始されたかどうか
print("\n" + "=" * 20 + "Chain of thought" + "=" * 20 + "\n")
for chunk in completion:
if chunk.choices:
delta = chunk.choices[0].delta
# Chain-of-Thought コンテンツのみを収集
if hasattr(delta, "reasoning_content") and delta.reasoning_content is not None:
if not is_answering:
print(delta.reasoning_content, end="", flush=True)
reasoning_content += delta.reasoning_content
# コンテンツが到着したら応答を開始
if hasattr(delta, "content") and delta.content:
if not is_answering:
print("\n" + "=" * 20 + "Full response" + "=" * 20 + "\n")
is_answering = True
print(delta.content, end="", flush=True)
answer_content += delta.content応答
====================Chain of thought====================
The user asked "Who are you?".
I should reply and introduce myself as an AI assistant.
====================Full response====================
Hello! I am MiniMax-M2.5, an AI assistant. I can help you answer questions, provide information, hold conversations, and more. How can I help you?Node.js
サンプルコード
import OpenAI from "openai";
import process from 'process';
// OpenAI クライアントを初期化
const openai = new OpenAI({
// 環境変数を設定していない場合は、これを Alibaba Cloud Model Studio の API キーに置き換えてください: apiKey: "sk-xxx"
apiKey: process.env.DASHSCOPE_API_KEY,
baseURL: 'https://dashscope.aliyuncs.com/compatible-mode/v1'
});
let reasoningContent = ''; // 完全な Chain-of-Thought
let answerContent = ''; // 完全な応答
let isAnswering = false; // 応答が開始されたかどうか
async function main() {
const messages = [{ role: 'user', content: 'Who are you?' }];
const stream = await openai.chat.completions.create({
model: 'MiniMax-M2.5',
messages,
stream: true,
});
console.log('\n' + '='.repeat(20) + 'Chain of thought' + '='.repeat(20) + '\n');
for await (const chunk of stream) {
if (chunk.choices?.length) {
const delta = chunk.choices[0].delta;
// Chain-of-Thought コンテンツのみを収集
if (delta.reasoning_content !== undefined && delta.reasoning_content !== null) {
if (!isAnswering) {
process.stdout.write(delta.reasoning_content);
}
reasoningContent += delta.reasoning_content;
}
// コンテンツが到着したら応答を開始
if (delta.content !== undefined && delta.content) {
if (!isAnswering) {
console.log('\n' + '='.repeat(20) + 'Full response' + '='.repeat(20) + '\n');
isAnswering = true;
}
process.stdout.write(delta.content);
answerContent += delta.content;
}
}
}
}
main();応答
====================Chain of thought====================
The user asked "Who are you?".
I should reply and introduce myself as an AI assistant.
====================Full response====================
Hello! I am MiniMax-M2.5, an AI assistant. I can help you answer questions, provide information, hold conversations, and more. How can I help you?HTTP
サンプルコード
curl
curl -X POST https://dashscope.aliyuncs.com/compatible-mode/v1/chat/completions \
-H "Authorization: Bearer $DASHSCOPE_API_KEY" \
-H "Content-Type: application/json" \
-d '{
"model": "MiniMax-M2.5",
"messages": [
{
"role": "user",
"content": "Who are you?"
}
]
}'応答
{
"choices": [
{
"message": {
"content": "Hello! I am MiniMax-M2.5, an AI assistant developed by MiniMax. I can help you answer questions, provide information, hold conversations, and complete various text-related tasks. How can I help you?",
"reasoning_content": "The user asked \"Who are you?\".\n\nI should reply and introduce myself.",
"role": "assistant"
},
"finish_reason": "stop",
"index": 0,
"logprobs": null
}
],
"object": "chat.completion",
"usage": {
"prompt_tokens": 40,
"completion_tokens": 72,
"total_tokens": 112,
"completion_tokens_details": {
"reasoning_tokens": 26
},
"prompt_tokens_details": {
"cached_tokens": 0
}
},
"created": 1771944590,
"system_fingerprint": null,
"model": "MiniMax-M2.5",
"id": "chatcmpl-b1277a9c-52da-9de7-988a-d5c063d83xxx"
}DashScope
Python
サンプルコード
import os
from dashscope import Generation
# リクエストパラメーターを初期化
messages = [{"role": "user", "content": "Who are you?"}]
completion = Generation.call(
# 環境変数を設定していない場合は、これを Alibaba Cloud Model Studio の API キーに置き換えてください: api_key="sk-xxx"
api_key=os.getenv("DASHSCOPE_API_KEY"),
model="MiniMax-M2.5",
messages=messages,
result_format="message", # 結果のフォーマットを message に設定
stream=True, # ストリーミング出力を有効化
incremental_output=True, # 増分出力を有効化
)
reasoning_content = "" # 完全な Chain-of-Thought
answer_content = "" # 完全な応答
is_answering = False # 応答が開始されたかどうか
print("\n" + "=" * 20 + "Chain of thought" + "=" * 20 + "\n")
for chunk in completion:
message = chunk.output.choices[0].message
# Chain-of-Thought コンテンツのみを収集
if message.reasoning_content:
if not is_answering:
print(message.reasoning_content, end="", flush=True)
reasoning_content += message.reasoning_content
# コンテンツが到着したら応答を開始
if message.content:
if not is_answering:
print("\n" + "=" * 20 + "Full response" + "=" * 20 + "\n")
is_answering = True
print(message.content, end="", flush=True)
answer_content += message.content
# ループが終了すると、reasoning_content と answer_content に完全なコンテンツが格納されます
# 必要に応じてさらに処理できます
# print(f"\n\nFull chain of thought:\n{reasoning_content}")
# print(f"\nFull response:\n{answer_content}")応答
====================Chain of thought====================
The user asked "Who are you?".
I should reply and introduce myself as an AI assistant.
====================Full response====================
Hello! I am MiniMax-M2.5, an AI assistant. I can help you answer questions, provide information, hold conversations, and more. How can I help you?Java
サンプルコード
// DashScope SDK バージョン >= 2.19.4
import com.alibaba.dashscope.aigc.generation.Generation;
import com.alibaba.dashscope.aigc.generation.GenerationParam;
import com.alibaba.dashscope.aigc.generation.GenerationResult;
import com.alibaba.dashscope.common.Message;
import com.alibaba.dashscope.common.Role;
import com.alibaba.dashscope.exception.ApiException;
import com.alibaba.dashscope.exception.InputRequiredException;
import com.alibaba.dashscope.exception.NoApiKeyException;
import io.reactivex.Flowable;
import java.lang.System;
import java.util.Arrays;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class Main {
private static final Logger logger = LoggerFactory.getLogger(Main.class);
private static StringBuilder reasoningContent = new StringBuilder();
private static StringBuilder finalContent = new StringBuilder();
private static boolean isFirstPrint = true;
private static void handleGenerationResult(GenerationResult message) {
String reasoning = message.getOutput().getChoices().get(0).getMessage().getReasoningContent();
String content = message.getOutput().getChoices().get(0).getMessage().getContent();
if (reasoning != null && !reasoning.isEmpty()) {
reasoningContent.append(reasoning);
if (isFirstPrint) {
System.out.println("====================Chain of thought====================");
isFirstPrint = false;
}
System.out.print(reasoning);
}
if (content != null && !content.isEmpty()) {
finalContent.append(content);
if (!isFirstPrint) {
System.out.println("\n====================Full response====================");
isFirstPrint = true;
}
System.out.print(content);
}
}
private static GenerationParam buildGenerationParam(Message userMsg) {
return GenerationParam.builder()
// 環境変数を設定していない場合は、この行を .apiKey("sk-xxx") に置き換えてください
.apiKey(System.getenv("DASHSCOPE_API_KEY"))
.model("MiniMax-M2.5")
.incrementalOutput(true)
.resultFormat("message")
.messages(Arrays.asList(userMsg))
.build();
}
public static void streamCallWithMessage(Generation gen, Message userMsg)
throws NoApiKeyException, ApiException, InputRequiredException {
GenerationParam param = buildGenerationParam(userMsg);
Flowable<GenerationResult> result = gen.streamCall(param);
result.blockingForEach(message -> handleGenerationResult(message));
}
public static void main(String[] args) {
try {
Generation gen = new Generation();
Message userMsg = Message.builder().role(Role.USER.getValue()).content("Who are you?").build();
streamCallWithMessage(gen, userMsg);
// 最終結果を出力
// if (reasoningContent.length() > 0) {
// System.out.println("\n====================Full response====================");
// System.out.println(finalContent.toString());
// }
} catch (ApiException | NoApiKeyException | InputRequiredException e) {
logger.error("An exception occurred: {}", e.getMessage());
}
System.exit(0);
}
}応答
====================Chain of thought====================
The user asked "Who are you?".
I should reply and introduce myself as an AI assistant.
====================Full response====================
Hello! I am MiniMax-M2.5, an AI assistant. I can help you answer questions, provide information, hold conversations, and more. How can I help you?HTTP
サンプルコード
curl
curl -X POST "https://dashscope.aliyuncs.com/api/v1/services/aigc/text-generation/generation" \
-H "Authorization: Bearer $DASHSCOPE_API_KEY" \
-H "Content-Type: application/json" \
-d '{
"model": "MiniMax-M2.5",
"input":{
"messages":[
{
"role": "user",
"content": "Who are you?"
}
]
},
"parameters": {
"result_format": "message"
}
}'応答
{
"output": {
"choices": [
{
"finish_reason": "stop",
"message": {
"content": "Hello! I am MiniMax-M2.5, an AI assistant developed by MiniMax. I can help you answer questions, provide information, hold conversations, and complete various text-related tasks. How can I help you?",
"reasoning_content": "The user asked \"Who are you?\".\n\nI should reply and introduce myself. I should state that I am MiniMax-M2.5, an AI assistant developed by MiniMax.",
"role": "assistant"
}
}
]
},
"usage": {
"input_tokens": 41,
"output_tokens": 79,
"output_tokens_details": {
"reasoning_tokens": 39
},
"prompt_tokens_details": {
"cached_tokens": 0
},
"total_tokens": 120
},
"request_id": "1bbd770e-564a-4601-83fc-3bf639423xxx"
}モデルの機能
モデル | |||||||
MiniMax-M2.5 | 暗黙的なキャッシュのみ。 | ||||||
MiniMax-M2.1 |
既定のパラメーター値
モデル | 温度 | top_p | presence_penalty |
MiniMax-M2.5 | 1.0 | 0.95 | 0.0 |
MiniMax-M2.1 | 1.0 | 0.95 | 0.0 |
エラーコード
モデルの呼び出しが失敗してエラーメッセージが返された場合、トラブルシューティングについてはエラーメッセージをご参照ください。