Skip to main content
Glama
Jing-yilin

Reddit MCP Server

by Jing-yilin
index.ts21.5 kB
#!/usr/bin/env node import { Server } from '@modelcontextprotocol/sdk/server/index.js'; import { StdioServerTransport } from '@modelcontextprotocol/sdk/server/stdio.js'; import { CallToolRequestSchema, ListToolsRequestSchema, Tool, CallToolResult, ErrorCode, McpError } from '@modelcontextprotocol/sdk/types.js'; import axios, { AxiosInstance, AxiosRequestConfig } from 'axios'; import { HttpsProxyAgent } from 'https-proxy-agent'; import { encode } from '@toon-format/toon'; import * as fs from 'fs'; import * as path from 'path'; const DataCleaners = { cleanPost(raw: any): any { if (!raw?.data) return null; const d = raw.data; return { id: d.id, title: d.title, subreddit: d.subreddit, author: d.author, score: d.score, upvoteRatio: d.upvote_ratio, comments: d.num_comments, created: d.created_utc ? new Date(d.created_utc * 1000).toISOString() : null, url: d.url, permalink: `https://reddit.com${d.permalink}`, selftext: d.selftext || undefined, isVideo: d.is_video, isSelf: d.is_self, thumbnail: d.thumbnail !== 'self' && d.thumbnail !== 'default' ? d.thumbnail : undefined, flair: d.link_flair_text || undefined, awards: d.total_awards_received, over18: d.over_18, }; }, cleanComment(raw: any, depth: number = 0): any { if (!raw?.data || raw.kind !== 't1') return null; const d = raw.data; const replies: any[] = []; if (d.replies?.data?.children) { for (const child of d.replies.data.children) { const cleaned = DataCleaners.cleanComment(child, depth + 1); if (cleaned) replies.push(cleaned); } } return { id: d.id, author: d.author, body: d.body, score: d.score, created: d.created_utc ? new Date(d.created_utc * 1000).toISOString() : null, depth, edited: d.edited ? true : false, replies: replies.length > 0 ? replies : undefined, }; }, cleanUser(raw: any): any { if (!raw?.data) return null; const d = raw.data; return { id: d.id, name: d.name, created: d.created_utc ? new Date(d.created_utc * 1000).toISOString() : null, linkKarma: d.link_karma, commentKarma: d.comment_karma, totalKarma: d.total_karma, verified: d.verified, hasVerifiedEmail: d.has_verified_email, premium: d.is_gold, isMod: d.is_mod, iconUrl: d.icon_img?.split('?')[0], description: d.subreddit?.public_description || undefined, }; }, cleanSubreddit(raw: any): any { if (!raw?.data) return null; const d = raw.data; return { id: d.id, name: d.display_name, title: d.title, description: d.public_description, subscribers: d.subscribers, activeUsers: d.accounts_active, created: d.created_utc ? new Date(d.created_utc * 1000).toISOString() : null, over18: d.over18, url: `https://reddit.com${d.url}`, iconUrl: d.icon_img?.split('?')[0] || d.community_icon?.split('?')[0], }; }, cleanPostList(children: any[]): any[] { if (!Array.isArray(children)) return []; return children.map(DataCleaners.cleanPost).filter(Boolean); }, cleanCommentList(children: any[], maxDepth: number = 3): any[] { if (!Array.isArray(children)) return []; return children .filter(c => c.kind === 't1') .map(c => DataCleaners.cleanComment(c, 0)) .filter(Boolean); }, }; class RedditMCPServer { private server: Server; private apiClient: AxiosInstance; constructor() { this.server = new Server( { name: 'reddit-mcp-server', version: '1.0.0', }, { capabilities: { tools: {}, }, } ); const axiosConfig: AxiosRequestConfig = { baseURL: 'https://www.reddit.com', timeout: 30000, headers: { 'User-Agent': 'Reddit-MCP-Server/1.0.0 (by /u/mcp-bot)', 'Accept': 'application/json', } }; const proxyUrl = process.env.PROXY_URL || process.env.HTTP_PROXY || process.env.HTTPS_PROXY; if (proxyUrl) { axiosConfig.httpsAgent = new HttpsProxyAgent(proxyUrl); axiosConfig.proxy = false; } this.apiClient = axios.create(axiosConfig); this.setupToolHandlers(); } private setupToolHandlers(): void { this.server.setRequestHandler(ListToolsRequestSchema, async () => { return { tools: [ { name: 'get_subreddit_hot', description: 'Get hot posts from a subreddit. Returns cleaned data in TOON format.', inputSchema: { type: 'object', properties: { subreddit: { type: 'string', description: 'Subreddit name (without r/)' }, limit: { type: 'integer', description: 'Number of posts (default: 10, max: 100)', default: 10 }, after: { type: 'string', description: 'Pagination cursor (fullname of item)' }, save_dir: { type: 'string', description: 'Directory to save cleaned JSON data' }, max_items: { type: 'integer', description: 'Maximum posts to return (default: 10)', default: 10 }, }, required: ['subreddit'], }, } as Tool, { name: 'get_subreddit_new', description: 'Get new posts from a subreddit. Returns cleaned data in TOON format.', inputSchema: { type: 'object', properties: { subreddit: { type: 'string', description: 'Subreddit name (without r/)' }, limit: { type: 'integer', description: 'Number of posts (default: 10, max: 100)', default: 10 }, after: { type: 'string', description: 'Pagination cursor (fullname of item)' }, save_dir: { type: 'string', description: 'Directory to save cleaned JSON data' }, max_items: { type: 'integer', description: 'Maximum posts to return (default: 10)', default: 10 }, }, required: ['subreddit'], }, } as Tool, { name: 'get_subreddit_top', description: 'Get top posts from a subreddit. Returns cleaned data in TOON format.', inputSchema: { type: 'object', properties: { subreddit: { type: 'string', description: 'Subreddit name (without r/)' }, time: { type: 'string', description: 'Time filter: hour, day, week, month, year, all', enum: ['hour', 'day', 'week', 'month', 'year', 'all'], default: 'day' }, limit: { type: 'integer', description: 'Number of posts (default: 10, max: 100)', default: 10 }, after: { type: 'string', description: 'Pagination cursor (fullname of item)' }, save_dir: { type: 'string', description: 'Directory to save cleaned JSON data' }, max_items: { type: 'integer', description: 'Maximum posts to return (default: 10)', default: 10 }, }, required: ['subreddit'], }, } as Tool, { name: 'get_post_content', description: 'Get post details with comments. Returns cleaned data in TOON format.', inputSchema: { type: 'object', properties: { post_id: { type: 'string', description: 'Reddit post ID (e.g., "abc123")' }, subreddit: { type: 'string', description: 'Subreddit name (optional, improves lookup)' }, comment_limit: { type: 'integer', description: 'Number of top-level comments (default: 20)', default: 20 }, comment_depth: { type: 'integer', description: 'Max depth of comment tree (default: 3)', default: 3 }, sort: { type: 'string', description: 'Comment sort: confidence, top, new, controversial, old, qa', enum: ['confidence', 'top', 'new', 'controversial', 'old', 'qa'], default: 'confidence' }, save_dir: { type: 'string', description: 'Directory to save cleaned JSON data' }, }, required: ['post_id'], }, } as Tool, { name: 'search_posts', description: 'Search Reddit posts by query. Returns cleaned data in TOON format.', inputSchema: { type: 'object', properties: { query: { type: 'string', description: 'Search query' }, subreddit: { type: 'string', description: 'Limit search to subreddit (optional)' }, sort: { type: 'string', description: 'Sort by: relevance, hot, top, new, comments', enum: ['relevance', 'hot', 'top', 'new', 'comments'], default: 'relevance' }, time: { type: 'string', description: 'Time filter: hour, day, week, month, year, all', enum: ['hour', 'day', 'week', 'month', 'year', 'all'], default: 'all' }, limit: { type: 'integer', description: 'Number of results (default: 10, max: 100)', default: 10 }, after: { type: 'string', description: 'Pagination cursor' }, save_dir: { type: 'string', description: 'Directory to save cleaned JSON data' }, max_items: { type: 'integer', description: 'Maximum posts to return (default: 10)', default: 10 }, }, required: ['query'], }, } as Tool, { name: 'get_user_info', description: 'Get Reddit user profile information. Returns cleaned data in TOON format.', inputSchema: { type: 'object', properties: { username: { type: 'string', description: 'Reddit username (without u/)' }, save_dir: { type: 'string', description: 'Directory to save cleaned JSON data' }, }, required: ['username'], }, } as Tool, { name: 'get_user_posts', description: 'Get posts submitted by a user. Returns cleaned data in TOON format.', inputSchema: { type: 'object', properties: { username: { type: 'string', description: 'Reddit username (without u/)' }, sort: { type: 'string', description: 'Sort by: hot, new, top, controversial', enum: ['hot', 'new', 'top', 'controversial'], default: 'new' }, time: { type: 'string', description: 'Time filter for top/controversial: hour, day, week, month, year, all', enum: ['hour', 'day', 'week', 'month', 'year', 'all'], default: 'all' }, limit: { type: 'integer', description: 'Number of posts (default: 10, max: 100)', default: 10 }, after: { type: 'string', description: 'Pagination cursor' }, save_dir: { type: 'string', description: 'Directory to save cleaned JSON data' }, max_items: { type: 'integer', description: 'Maximum posts to return (default: 10)', default: 10 }, }, required: ['username'], }, } as Tool, { name: 'get_user_comments', description: 'Get comments made by a user. Returns cleaned data in TOON format.', inputSchema: { type: 'object', properties: { username: { type: 'string', description: 'Reddit username (without u/)' }, sort: { type: 'string', description: 'Sort by: hot, new, top, controversial', enum: ['hot', 'new', 'top', 'controversial'], default: 'new' }, time: { type: 'string', description: 'Time filter for top/controversial: hour, day, week, month, year, all', enum: ['hour', 'day', 'week', 'month', 'year', 'all'], default: 'all' }, limit: { type: 'integer', description: 'Number of comments (default: 10, max: 100)', default: 10 }, after: { type: 'string', description: 'Pagination cursor' }, save_dir: { type: 'string', description: 'Directory to save cleaned JSON data' }, max_items: { type: 'integer', description: 'Maximum comments to return (default: 10)', default: 10 }, }, required: ['username'], }, } as Tool, { name: 'get_subreddit_info', description: 'Get subreddit information. Returns cleaned data in TOON format.', inputSchema: { type: 'object', properties: { subreddit: { type: 'string', description: 'Subreddit name (without r/)' }, save_dir: { type: 'string', description: 'Directory to save cleaned JSON data' }, }, required: ['subreddit'], }, } as Tool, ], }; }); this.server.setRequestHandler(CallToolRequestSchema, async (request) => { try { const { name, arguments: args } = request.params; if (!args) throw new McpError(ErrorCode.InvalidParams, 'Missing arguments'); switch (name) { case 'get_subreddit_hot': return await this.getSubredditPosts(args as Record<string, any>, 'hot'); case 'get_subreddit_new': return await this.getSubredditPosts(args as Record<string, any>, 'new'); case 'get_subreddit_top': return await this.getSubredditPosts(args as Record<string, any>, 'top'); case 'get_post_content': return await this.getPostContent(args as Record<string, any>); case 'search_posts': return await this.searchPosts(args as Record<string, any>); case 'get_user_info': return await this.getUserInfo(args as Record<string, any>); case 'get_user_posts': return await this.getUserPosts(args as Record<string, any>); case 'get_user_comments': return await this.getUserComments(args as Record<string, any>); case 'get_subreddit_info': return await this.getSubredditInfo(args as Record<string, any>); default: throw new McpError(ErrorCode.MethodNotFound, `Unknown tool: ${name}`); } } catch (error) { if (error instanceof McpError) throw error; const message = error instanceof Error ? error.message : 'Unknown error occurred'; throw new McpError(ErrorCode.InternalError, `Reddit API error: ${message}`); } }); } private async makeRequest(endpoint: string, params?: Record<string, any>): Promise<any> { try { const response = await this.apiClient.get(endpoint, { params }); return response.data; } catch (error) { if (axios.isAxiosError(error)) { const statusCode = error.response?.status || 500; const errorMessage = error.response?.data?.message || error.response?.data?.error || error.message; throw new Error(`Reddit API error (${statusCode}): ${errorMessage}`); } throw error; } } private saveData(data: any, dir: string, toolName: string): string { try { if (!fs.existsSync(dir)) fs.mkdirSync(dir, { recursive: true }); const timestamp = new Date().toISOString().replace(/[:.]/g, '-'); const filename = `${toolName}_${timestamp}.json`; const filepath = path.join(dir, filename); fs.writeFileSync(filepath, JSON.stringify(data, null, 2)); return filepath; } catch (e) { return `Error saving: ${e}`; } } private formatResponse( cleanedData: any, options: { saveDir?: string; toolName?: string; pagination?: { after?: string; before?: string } } ): CallToolResult { const output: any = { data: cleanedData }; if (options.pagination?.after) { output.pagination = { nextCursor: options.pagination.after }; } let savedPath = ''; if (options.saveDir && options.toolName) { savedPath = this.saveData(output, options.saveDir, options.toolName); } const toonString = encode(output); let text = toonString; if (savedPath) text += `\n\n[Cleaned data saved to: ${savedPath}]`; return { content: [{ type: 'text', text }] }; } private async getSubredditPosts(args: Record<string, any>, sort: string): Promise<CallToolResult> { const params: Record<string, any> = { limit: Math.min(args.limit || 25, 100), raw_json: 1, }; if (args.after) params.after = args.after; if (sort === 'top' && args.time) params.t = args.time; const data = await this.makeRequest(`/r/${args.subreddit}/${sort}.json`, params); const maxItems = args.max_items || 10; const cleaned = DataCleaners.cleanPostList(data?.data?.children || []).slice(0, maxItems); return this.formatResponse(cleaned, { saveDir: args.save_dir, toolName: `get_subreddit_${sort}`, pagination: { after: data?.data?.after }, }); } private async getPostContent(args: Record<string, any>): Promise<CallToolResult> { const params: Record<string, any> = { limit: args.comment_limit || 20, depth: args.comment_depth || 3, sort: args.sort || 'confidence', raw_json: 1, }; let endpoint = `/comments/${args.post_id}.json`; if (args.subreddit) { endpoint = `/r/${args.subreddit}/comments/${args.post_id}.json`; } const data = await this.makeRequest(endpoint, params); const postData = data?.[0]?.data?.children?.[0]; const commentsData = data?.[1]?.data?.children || []; const post = DataCleaners.cleanPost(postData); const comments = DataCleaners.cleanCommentList(commentsData, args.comment_depth || 3); return this.formatResponse({ post, comments }, { saveDir: args.save_dir, toolName: 'get_post_content', }); } private async searchPosts(args: Record<string, any>): Promise<CallToolResult> { const params: Record<string, any> = { q: args.query, sort: args.sort || 'relevance', t: args.time || 'all', limit: Math.min(args.limit || 25, 100), raw_json: 1, type: 'link', }; if (args.subreddit) params.restrict_sr = 'on'; if (args.after) params.after = args.after; let endpoint = '/search.json'; if (args.subreddit) { endpoint = `/r/${args.subreddit}/search.json`; } const data = await this.makeRequest(endpoint, params); const maxItems = args.max_items || 10; const cleaned = DataCleaners.cleanPostList(data?.data?.children || []).slice(0, maxItems); return this.formatResponse(cleaned, { saveDir: args.save_dir, toolName: 'search_posts', pagination: { after: data?.data?.after }, }); } private async getUserInfo(args: Record<string, any>): Promise<CallToolResult> { const data = await this.makeRequest(`/user/${args.username}/about.json`, { raw_json: 1 }); const cleaned = DataCleaners.cleanUser(data); return this.formatResponse(cleaned, { saveDir: args.save_dir, toolName: 'get_user_info', }); } private async getUserPosts(args: Record<string, any>): Promise<CallToolResult> { const params: Record<string, any> = { sort: args.sort || 'new', limit: Math.min(args.limit || 25, 100), raw_json: 1, type: 'links', }; if (args.time) params.t = args.time; if (args.after) params.after = args.after; const data = await this.makeRequest(`/user/${args.username}/submitted.json`, params); const maxItems = args.max_items || 10; const cleaned = DataCleaners.cleanPostList(data?.data?.children || []).slice(0, maxItems); return this.formatResponse(cleaned, { saveDir: args.save_dir, toolName: 'get_user_posts', pagination: { after: data?.data?.after }, }); } private async getUserComments(args: Record<string, any>): Promise<CallToolResult> { const params: Record<string, any> = { sort: args.sort || 'new', limit: Math.min(args.limit || 25, 100), raw_json: 1, type: 'comments', }; if (args.time) params.t = args.time; if (args.after) params.after = args.after; const data = await this.makeRequest(`/user/${args.username}/comments.json`, params); const maxItems = args.max_items || 10; const children = data?.data?.children || []; const cleaned = children .slice(0, maxItems) .map((c: any) => { if (c.kind !== 't1' || !c.data) return null; const d = c.data; return { id: d.id, subreddit: d.subreddit, postTitle: d.link_title, author: d.author, body: d.body, score: d.score, created: d.created_utc ? new Date(d.created_utc * 1000).toISOString() : null, permalink: `https://reddit.com${d.permalink}`, }; }) .filter(Boolean); return this.formatResponse(cleaned, { saveDir: args.save_dir, toolName: 'get_user_comments', pagination: { after: data?.data?.after }, }); } private async getSubredditInfo(args: Record<string, any>): Promise<CallToolResult> { const data = await this.makeRequest(`/r/${args.subreddit}/about.json`, { raw_json: 1 }); const cleaned = DataCleaners.cleanSubreddit(data); return this.formatResponse(cleaned, { saveDir: args.save_dir, toolName: 'get_subreddit_info', }); } async run(): Promise<void> { const transport = new StdioServerTransport(); await this.server.connect(transport); } } const server = new RedditMCPServer(); server.run().catch(console.error);

Latest Blog Posts

MCP directory API

We provide all the information about MCP servers via our MCP API.

curl -X GET 'https://glama.ai/api/mcp/v1/servers/Jing-yilin/reddit-mcp'

If you have feedback or need assistance with the MCP directory API, please join our Discord server