#!/usr/bin/env node
import { Server } from "@modelcontextprotocol/sdk/server/index.js";
import { StdioServerTransport } from "@modelcontextprotocol/sdk/server/stdio.js";
import {
CallToolRequestSchema,
ListToolsRequestSchema,
Tool,
} from "@modelcontextprotocol/sdk/types.js";
import OpenAI from "openai";
const SORA_TOOLS: Tool[] = [
{
name: "create_video",
description: "Generate a video using Sora 2 or Sora 2 Pro model. Creates a video from a text prompt with optional parameters for resolution, duration, and model selection.",
inputSchema: {
type: "object",
properties: {
prompt: {
type: "string",
description: "Text description of the video to generate",
},
model: {
type: "string",
enum: ["sora-2", "sora-2-pro"],
description: "Model to use for generation. sora-2 is faster and cheaper, sora-2-pro provides higher quality.",
default: "sora-2",
},
size: {
type: "string",
enum: ["1280x720", "720x1280", "1792x1024", "1024x1792"],
description: "Video resolution (width x height)",
default: "1280x720",
},
seconds: {
type: "string",
enum: ["4", "8", "12"],
description: "Video duration in seconds",
default: "8",
},
},
required: ["prompt"],
},
},
{
name: "get_video_status",
description: "Get the status and details of a video generation job. Returns information about whether the video is still processing, completed, or failed.",
inputSchema: {
type: "object",
properties: {
video_id: {
type: "string",
description: "The ID of the video generation job to check",
},
},
required: ["video_id"],
},
},
{
name: "list_videos",
description: "List all video generation jobs with optional filtering and pagination.",
inputSchema: {
type: "object",
properties: {
limit: {
type: "number",
description: "Maximum number of videos to return",
default: 20,
},
after: {
type: "string",
description: "Cursor for pagination - ID of the last video from the previous page",
},
},
},
},
{
name: "delete_video",
description: "Delete a video generation job and its associated data.",
inputSchema: {
type: "object",
properties: {
video_id: {
type: "string",
description: "The ID of the video to delete",
},
},
required: ["video_id"],
},
},
{
name: "merge_videos",
description: "Merge multiple video files into a single video. Videos are concatenated in the order provided.",
inputSchema: {
type: "object",
properties: {
video_urls: {
type: "array",
items: {
type: "string",
},
description: "Array of video URLs or local file paths to merge (in order)",
minItems: 2,
},
output_path: {
type: "string",
description: "Path where the merged video will be saved (default: merged_video.mp4)",
default: "merged_video.mp4",
},
},
required: ["video_urls"],
},
},
{
name: "create_fade_animation",
description: "Create a fade-in animation video from a static image. The image will fade from black to full visibility.",
inputSchema: {
type: "object",
properties: {
image_path: {
type: "string",
description: "Path or URL to the input image",
},
duration: {
type: "number",
description: "Duration of the fade animation in seconds (default: 3)",
default: 3,
},
output_path: {
type: "string",
description: "Path where the video will be saved (default: fade_animation.mp4)",
default: "fade_animation.mp4",
},
fade_type: {
type: "string",
enum: ["in", "out", "in-out"],
description: "Type of fade effect: 'in' (fade in), 'out' (fade out), 'in-out' (fade in then out)",
default: "in",
},
},
required: ["image_path"],
},
},
];
class SoraServer {
private server: Server;
private openai: OpenAI;
constructor() {
console.error("[SORA MCP] Initializing Sora MCP server...");
const apiKey = process.env.OPENAI_API_KEY;
if (!apiKey) {
console.error("[SORA MCP] ERROR: OPENAI_API_KEY environment variable is missing!");
throw new Error("OPENAI_API_KEY environment variable is required");
}
console.error("[SORA MCP] API key found, length:", apiKey.length);
this.openai = new OpenAI({ apiKey });
console.error("[SORA MCP] OpenAI client initialized");
this.server = new Server(
{
name: "sora-2-mcp",
version: "1.0.0",
},
{
capabilities: {
tools: {},
},
}
);
console.error("[SORA MCP] MCP server created");
this.setupHandlers();
console.error("[SORA MCP] Handlers registered");
}
private setupHandlers(): void {
this.server.setRequestHandler(ListToolsRequestSchema, async () => ({
tools: SORA_TOOLS,
}));
this.server.setRequestHandler(CallToolRequestSchema, async (request) => {
const { name, arguments: args } = request.params;
console.error("[SORA MCP] Tool called:", name, "with args:", JSON.stringify(args));
try {
switch (name) {
case "create_video":
return await this.createVideo(args);
case "get_video_status":
return await this.getVideoStatus(args);
case "list_videos":
return await this.listVideos(args);
case "delete_video":
return await this.deleteVideo(args);
case "merge_videos":
return await this.mergeVideos(args);
case "create_fade_animation":
return await this.createFadeAnimation(args);
default:
console.error("[SORA MCP] Unknown tool requested:", name);
throw new Error(`Unknown tool: ${name}`);
}
} catch (error) {
console.error("[SORA MCP] Tool execution error:", error);
const errorMessage = error instanceof Error ? error.message : String(error);
const errorStack = error instanceof Error ? error.stack : undefined;
console.error("[SORA MCP] Error stack:", errorStack);
return {
content: [
{
type: "text",
text: `Error: ${errorMessage}`,
},
],
};
}
});
}
private async createVideo(args: any) {
const { prompt, model = "sora-2", size = "1280x720", seconds = "8" } = args;
console.error("[SORA MCP] Creating video with params:", { prompt: prompt.substring(0, 100), model, size, seconds });
try {
// Use FormData instead of JSON as per OpenAI Sora API requirements
const formData = new FormData();
formData.append("prompt", prompt);
formData.append("model", model);
formData.append("size", size);
formData.append("seconds", seconds);
const response = await fetch("https://api.openai.com/v1/videos", {
method: "POST",
headers: {
"Authorization": `Bearer ${process.env.OPENAI_API_KEY}`,
// Note: Don't set Content-Type header - browser/fetch will set it correctly for FormData
},
body: formData,
});
console.error("[SORA MCP] API response status:", response.status);
console.error("[SORA MCP] API response headers:", JSON.stringify(Object.fromEntries(response.headers.entries())));
if (!response.ok) {
const errorText = await response.text();
console.error("[SORA MCP] API error response:", errorText);
throw new Error(`API request failed with status ${response.status}: ${errorText}`);
}
const videoData: any = await response.json();
console.error("[SORA MCP] Video data received:", JSON.stringify(videoData));
return {
content: [
{
type: "text",
text: JSON.stringify(
{
success: true,
video_id: videoData.id,
status: videoData.status,
model: videoData.model,
message: "Video generation started. Use get_video_status to check progress.",
data: videoData,
},
null,
2
),
},
],
};
} catch (error) {
console.error("[SORA MCP] Error in createVideo:", error);
console.error("[SORA MCP] Error details:", {
message: error instanceof Error ? error.message : String(error),
stack: error instanceof Error ? error.stack : undefined,
name: error instanceof Error ? error.name : undefined,
});
throw error;
}
}
private async getVideoStatus(args: any) {
const { video_id } = args;
console.error("[SORA MCP] Getting video status for:", video_id);
try {
const response = await fetch(`https://api.openai.com/v1/videos/${video_id}`, {
method: "GET",
headers: {
"Authorization": `Bearer ${process.env.OPENAI_API_KEY}`,
},
});
console.error("[SORA MCP] Status response:", response.status);
if (!response.ok) {
const errorText = await response.text();
console.error("[SORA MCP] API error response:", errorText);
throw new Error(`API request failed with status ${response.status}: ${errorText}`);
}
const videoData: any = await response.json();
console.error("[SORA MCP] Status data:", JSON.stringify(videoData));
return {
content: [
{
type: "text",
text: JSON.stringify(
{
success: true,
video_id: videoData.id,
status: videoData.status,
url: videoData.url,
data: videoData,
},
null,
2
),
},
],
};
} catch (error) {
console.error("[SORA MCP] Error in getVideoStatus:", error);
throw error;
}
}
private async listVideos(args: any) {
const { limit = 20, after } = args;
console.error("[SORA MCP] Listing videos with limit:", limit, "after:", after);
try {
const params = new URLSearchParams({ limit: limit.toString() });
if (after) {
params.append("after", after);
}
const url = `https://api.openai.com/v1/videos?${params.toString()}`;
const response = await fetch(url, {
method: "GET",
headers: {
"Authorization": `Bearer ${process.env.OPENAI_API_KEY}`,
},
});
console.error("[SORA MCP] List response status:", response.status);
if (!response.ok) {
const errorText = await response.text();
console.error("[SORA MCP] API error response:", errorText);
throw new Error(`API request failed with status ${response.status}: ${errorText}`);
}
const listData: any = await response.json();
console.error("[SORA MCP] List data:", JSON.stringify(listData));
return {
content: [
{
type: "text",
text: JSON.stringify(
{
success: true,
videos: listData.data,
has_more: listData.has_more,
},
null,
2
),
},
],
};
} catch (error) {
console.error("[SORA MCP] Error in listVideos:", error);
throw error;
}
}
private async deleteVideo(args: any) {
const { video_id } = args;
console.error("[SORA MCP] Deleting video:", video_id);
try {
const response = await fetch(`https://api.openai.com/v1/videos/${video_id}`, {
method: "DELETE",
headers: {
"Authorization": `Bearer ${process.env.OPENAI_API_KEY}`,
},
});
console.error("[SORA MCP] Delete response status:", response.status);
if (!response.ok) {
const errorText = await response.text();
console.error("[SORA MCP] API error response:", errorText);
throw new Error(`API request failed with status ${response.status}: ${errorText}`);
}
console.error("[SORA MCP] Video deleted successfully");
return {
content: [
{
type: "text",
text: JSON.stringify(
{
success: true,
message: `Video ${video_id} deleted successfully`,
},
null,
2
),
},
],
};
} catch (error) {
console.error("[SORA MCP] Error in deleteVideo:", error);
throw error;
}
}
private async mergeVideos(args: any) {
const { video_urls, output_path = "merged_video.mp4" } = args;
console.error("[SORA MCP] Merging videos:", video_urls.length, "files");
try {
const { exec } = await import("child_process");
const { promisify } = await import("util");
const execAsync = promisify(exec);
const fs = await import("fs");
const path = await import("path");
// Create a temporary file list for ffmpeg concat
const listPath = path.join("/tmp", `concat_list_${Date.now()}.txt`);
const fileList = video_urls.map((url: string) => `file '${url}'`).join("\n");
fs.writeFileSync(listPath, fileList);
// Use ffmpeg to concatenate videos
const command = `ffmpeg -f concat -safe 0 -i "${listPath}" -c copy "${output_path}"`;
console.error("[SORA MCP] Running ffmpeg command:", command);
const { stdout, stderr } = await execAsync(command);
console.error("[SORA MCP] ffmpeg output:", stderr);
// Clean up temporary file
fs.unlinkSync(listPath);
return {
content: [
{
type: "text",
text: JSON.stringify(
{
success: true,
output_path,
message: `Successfully merged ${video_urls.length} videos into ${output_path}`,
},
null,
2
),
},
],
};
} catch (error) {
console.error("[SORA MCP] Error in mergeVideos:", error);
throw new Error(
`Failed to merge videos. Make sure ffmpeg is installed and the video files exist. Error: ${
error instanceof Error ? error.message : String(error)
}`
);
}
}
private async createFadeAnimation(args: any) {
const {
image_path,
duration = 3,
output_path = "fade_animation.mp4",
fade_type = "in",
} = args;
console.error("[SORA MCP] Creating fade animation from image:", image_path);
try {
const { exec } = await import("child_process");
const { promisify } = await import("util");
const execAsync = promisify(exec);
// Build ffmpeg filter based on fade type
let fadeFilter = "";
if (fade_type === "in") {
fadeFilter = `fade=t=in:st=0:d=${duration}`;
} else if (fade_type === "out") {
fadeFilter = `fade=t=out:st=0:d=${duration}`;
} else if (fade_type === "in-out") {
const halfDuration = duration / 2;
fadeFilter = `fade=t=in:st=0:d=${halfDuration},fade=t=out:st=${halfDuration}:d=${halfDuration}`;
}
// Create video from image with fade effect
const command = `ffmpeg -loop 1 -i "${image_path}" -vf "${fadeFilter}" -c:v libx264 -t ${duration} -pix_fmt yuv420p "${output_path}"`;
console.error("[SORA MCP] Running ffmpeg command:", command);
const { stdout, stderr } = await execAsync(command);
console.error("[SORA MCP] ffmpeg output:", stderr);
return {
content: [
{
type: "text",
text: JSON.stringify(
{
success: true,
output_path,
duration,
fade_type,
message: `Successfully created ${fade_type} fade animation from ${image_path}`,
},
null,
2
),
},
],
};
} catch (error) {
console.error("[SORA MCP] Error in createFadeAnimation:", error);
throw new Error(
`Failed to create fade animation. Make sure ffmpeg is installed and the image file exists. Error: ${
error instanceof Error ? error.message : String(error)
}`
);
}
}
async run(): Promise<void> {
const transport = new StdioServerTransport();
await this.server.connect(transport);
console.error("Sora 2 MCP server running on stdio");
}
}
const server = new SoraServer();
server.run().catch(console.error);