"""
MCP Server for Adult Entertainment Industry Statistics
This server provides tools to query statistics about the adult entertainment industry
using Google Trends API for real-time and historical search data.
Data Source:
- Google Trends API (pytrends) - Real-time search trend data from 2004 to present
"""
from typing import Any, Optional
import json
from datetime import datetime, timedelta
from mcp.server.fastmcp import FastMCP
import sys
from pytrends.request import TrendReq
import time
import pandas as pd
# Initialize FastMCP server
mcp = FastMCP("pornhub-mcp")
# Constants
USER_AGENT = "industry-stats-mcp/1.0"
# Cache for storing fetched data (to avoid excessive API calls)
TRENDS_CACHE = {}
# Initialize Google Trends
pytrends = None
try:
pytrends = TrendReq(hl='en-US', tz=360, timeout=(10, 25), retries=2, backoff_factor=0.1)
print("✅ Google Trends API initialized successfully", file=sys.stderr)
except Exception as e:
print(f"⚠️ Warning: Could not initialize Google Trends: {e}", file=sys.stderr)
# ============================================================================
# HELPER FUNCTIONS - Google Trends API
# ============================================================================
def get_trends_data(keywords: list[str], timeframe: str = 'today 12-m', geo: str = 'US') -> dict:
"""
Fetch Google Trends data for given keywords.
Args:
keywords: List of search terms to compare (max 5)
timeframe: Time period (e.g., 'today 12-m', 'today 5-y', '2020-01-01 2024-12-31')
geo: Geographic region (e.g., 'US', 'GB', '' for worldwide)
Returns:
Dictionary with trends data
"""
if not pytrends:
return {"error": "Google Trends API not available"}
# Check cache
cache_key = f"{','.join(keywords)}_{timeframe}_{geo}"
if cache_key in TRENDS_CACHE:
cached = TRENDS_CACHE[cache_key]
age = (datetime.now() - datetime.fromisoformat(cached['fetched_at'])).seconds
if age < 3600: # Cache for 1 hour
print(f"Using cached data (age: {age}s)", file=sys.stderr)
return cached
try:
print(f"Fetching Google Trends: {keywords}, {timeframe}, {geo}", file=sys.stderr)
# Build payload
pytrends.build_payload(keywords, cat=0, timeframe=timeframe, geo=geo, gprop='')
# Get interest over time
interest_over_time_df = pytrends.interest_over_time()
# Get interest by region
try:
interest_by_region_df = pytrends.interest_by_region(resolution='REGION', inc_low_vol=True, inc_geo_code=False)
except Exception as e:
print(f"Could not fetch regional data: {e}", file=sys.stderr)
interest_by_region_df = pd.DataFrame()
# Get related queries
try:
related_queries = pytrends.related_queries()
except Exception as e:
print(f"Could not fetch related queries: {e}", file=sys.stderr)
related_queries = {}
result = {
"keywords": keywords,
"timeframe": timeframe,
"geo": geo,
"interest_over_time": interest_over_time_df.to_dict() if not interest_over_time_df.empty else {},
"interest_by_region": interest_by_region_df.to_dict() if not interest_by_region_df.empty else {},
"related_queries": related_queries,
"fetched_at": datetime.now().isoformat()
}
# Cache the result
TRENDS_CACHE[cache_key] = result
# Rate limiting
time.sleep(1)
return result
except Exception as e:
print(f"Error fetching Google Trends data: {e}", file=sys.stderr)
return {"error": str(e)}
def format_interest_over_time(data: dict) -> str:
"""Format interest over time data into readable text."""
if "error" in data:
return f"❌ Error: {data['error']}"
if not data.get("interest_over_time"):
return "No data available for the specified time period and keywords."
lines = [
f"📊 Google Trends Analysis",
f"Keywords: {', '.join(data['keywords'])}",
f"Region: {data['geo'] if data['geo'] else 'Worldwide'}",
f"Period: {data['timeframe']}",
"=" * 60,
""
]
# Calculate statistics for each keyword
interest_data = data['interest_over_time']
if interest_data:
lines.append("📈 Search Interest Statistics (0-100 scale):")
lines.append("")
for keyword in data['keywords']:
if keyword in interest_data:
values = [v for v in interest_data[keyword].values() if isinstance(v, (int, float))]
if values:
avg = sum(values) / len(values)
max_val = max(values)
min_val = min(values)
lines.append(f"'{keyword}':")
lines.append(f" Average: {avg:.1f}")
lines.append(f" Peak: {max_val}")
lines.append(f" Low: {min_val}")
# Trend direction
if len(values) >= 2:
recent_avg = sum(values[-4:]) / min(4, len(values[-4:]))
older_avg = sum(values[:4]) / min(4, len(values[:4]))
if recent_avg > older_avg * 1.1:
lines.append(f" Trend: 📈 Growing ({((recent_avg/older_avg - 1) * 100):.0f}%)")
elif recent_avg < older_avg * 0.9:
lines.append(f" Trend: 📉 Declining ({((1 - recent_avg/older_avg) * 100):.0f}%)")
else:
lines.append(f" Trend: ➡️ Stable")
lines.append("")
return "\n".join(lines)
def format_regional_interest(data: dict, top_n: int = 10) -> str:
"""Format regional interest data into readable text."""
if "error" in data:
return f"❌ Error: {data['error']}"
if not data.get("interest_by_region"):
return "No regional data available."
lines = [
f"🌎 Regional Interest",
"=" * 60,
""
]
region_data = data['interest_by_region']
for keyword in data['keywords']:
if keyword in region_data:
lines.append(f"Top regions for '{keyword}':")
# Sort regions by interest
regions = {region: value for region, value in region_data[keyword].items()
if isinstance(value, (int, float)) and value > 0}
sorted_regions = sorted(regions.items(), key=lambda x: x[1], reverse=True)[:top_n]
if sorted_regions:
for i, (region, value) in enumerate(sorted_regions, 1):
lines.append(f" {i}. {region}: {value}/100")
else:
lines.append(" No regional data available")
lines.append("")
return "\n".join(lines)
def format_related_queries(data: dict) -> str:
"""Format related queries data."""
if "error" in data or not data.get("related_queries"):
return ""
lines = [
"",
"🔍 Related & Trending Queries",
"=" * 60,
""
]
related = data['related_queries']
for keyword in data['keywords']:
if keyword in related:
lines.append(f"Related to '{keyword}':")
# Top related queries
if 'top' in related[keyword] and related[keyword]['top'] is not None:
top_df = related[keyword]['top']
if not top_df.empty:
lines.append(" Top:")
for idx, row in top_df.head(5).iterrows():
lines.append(f" • {row['query']} ({row['value']})")
# Rising queries
if 'rising' in related[keyword] and related[keyword]['rising'] is not None:
rising_df = related[keyword]['rising']
if not rising_df.empty:
lines.append(" Rising:")
for idx, row in rising_df.head(5).iterrows():
growth = row['value']
if growth == 'Breakout':
lines.append(f" • {row['query']} (🔥 Breakout)")
else:
lines.append(f" • {row['query']} (+{growth}%)")
lines.append("")
return "\n".join(lines)
# ============================================================================
# MCP TOOLS
# ============================================================================
@mcp.tool()
async def search_trends(
keywords: list[str],
timeframe: str = "today 12-m",
region: str = "US"
) -> str:
"""
Search Google Trends for any keywords (performers, platforms, categories, etc.).
Args:
keywords: List of search terms to analyze (max 5). Examples:
["Lana Rhoades", "Riley Reid"]
["pornhub", "onlyfans"]
["milf", "teen", "amateur"]
timeframe: Time period. Options:
'today 12-m' (past year, default)
'today 3-m' (past 3 months)
'today 5-y' (past 5 years)
'2020-01-01 2024-12-31' (custom date range)
Available back to 2004
region: Geographic region code:
'US' (USA, default)
'GB' (UK)
'' (Worldwide)
Any ISO country code
Returns:
Complete Google Trends analysis with interest over time, regional data, and related queries.
"""
if not pytrends:
return "❌ Google Trends API is not available. Please check configuration."
if len(keywords) > 5:
return "⚠️ Maximum 5 keywords allowed per query. Please reduce your list."
if len(keywords) == 0:
return "⚠️ Please provide at least one keyword to search."
# Fetch data
data = get_trends_data(keywords, timeframe, region)
# Format and return results
result = [
format_interest_over_time(data),
"",
format_regional_interest(data, top_n=10),
format_related_queries(data),
"",
"📝 Notes:",
"- Values are on a 0-100 scale where 100 = peak popularity for the time period",
"- Data represents search interest, not absolute search volumes",
f"- Data fetched: {datetime.now().strftime('%Y-%m-%d %H:%M:%S')}"
]
return "\n".join(result)
@mcp.tool()
async def compare_performers(
performer_names: list[str],
timeframe: str = "today 12-m",
region: str = "US"
) -> str:
"""
Compare Google search trends between different performers.
Args:
performer_names: List of performer names to compare (max 5)
Example: ["Lana Rhoades", "Riley Reid", "Abella Danger"]
timeframe: Time period (default: past 12 months)
Use 'today 5-y' for 5 year comparison
Use '2020-01-01 2024-12-31' for custom range
region: Region code (default: US)
Returns:
Comparative analysis showing which performer has higher search interest.
"""
if len(performer_names) > 5:
return "⚠️ Maximum 5 performers can be compared at once."
if len(performer_names) < 2:
return "⚠️ Please provide at least 2 performers to compare."
return await search_trends(performer_names, timeframe, region)
@mcp.tool()
async def compare_platforms(
platform_names: list[str],
timeframe: str = "today 12-m",
region: str = "US"
) -> str:
"""
Compare Google search trends between different adult platforms.
Args:
platform_names: List of platform names (max 5)
Example: ["pornhub", "onlyfans", "xvideos"]
timeframe: Time period (default: past 12 months)
region: Region code (default: US)
Returns:
Comparative analysis showing platform popularity trends.
"""
if len(platform_names) > 5:
return "⚠️ Maximum 5 platforms can be compared at once."
if len(platform_names) < 2:
return "⚠️ Please provide at least 2 platforms to compare."
return await search_trends(platform_names, timeframe, region)
@mcp.tool()
async def analyze_category_trends(
categories: list[str],
timeframe: str = "today 12-m",
region: str = "US"
) -> str:
"""
Analyze Google search trends for adult content categories.
Args:
categories: List of category keywords (max 5)
Example: ["milf", "teen", "amateur", "lesbian", "hentai"]
timeframe: Time period (default: past 12 months)
region: Region code (default: US)
Returns:
Trend analysis showing category popularity and growth.
"""
if len(categories) > 5:
return "⚠️ Maximum 5 categories can be compared at once."
return await search_trends(categories, timeframe, region)
@mcp.tool()
async def historical_analysis(
keyword: str,
start_year: int = 2020,
end_year: int = 2024,
region: str = "US"
) -> str:
"""
Analyze historical trends for a keyword across multiple years.
Args:
keyword: Single keyword to analyze (performer, platform, or category)
Example: "Lana Rhoades", "onlyfans", "milf"
start_year: Starting year (2004 or later)
end_year: Ending year (default: 2024)
region: Region code (default: US)
Returns:
Multi-year trend analysis showing growth, peaks, and patterns.
"""
if start_year < 2004:
return "⚠️ Google Trends data is only available from 2004 onwards."
if start_year >= end_year:
return "⚠️ Start year must be before end year."
timeframe = f"{start_year}-01-01 {end_year}-12-31"
result = await search_trends([keyword], timeframe, region)
return f"📅 Historical Analysis: {start_year}-{end_year}\n\n{result}"
@mcp.tool()
async def trending_searches(
base_keyword: str,
timeframe: str = "today 12-m",
region: str = "US"
) -> str:
"""
Find trending and related searches for a keyword.
Args:
base_keyword: Main keyword to find related searches for
Example: "pornhub", "onlyfans", or any performer name
timeframe: Time period (default: past 12 months)
region: Region code (default: US)
Returns:
List of related and rising search terms.
"""
data = get_trends_data([base_keyword], timeframe, region)
if "error" in data:
return f"❌ Error: {data['error']}"
result = [
f"🔥 Trending Searches Related to '{base_keyword}'",
f"Region: {region if region else 'Worldwide'}",
f"Period: {timeframe}",
"=" * 60,
"",
format_related_queries(data),
"",
f"Data fetched: {datetime.now().strftime('%Y-%m-%d %H:%M:%S')}"
]
return "\n".join(result)
# ============================================================================
# RUN SERVER
# ============================================================================
def main():
"""Initialize and run the MCP server."""
print("🚀 Starting Pornhub MCP Server (Google Trends API Only)", file=sys.stderr)
print(f"🔍 Google Trends API: {'✅ Available' if pytrends else '❌ Unavailable'}", file=sys.stderr)
print(f"📅 Historical data available from 2004 to present", file=sys.stderr)
mcp.run(transport="stdio")
if __name__ == "__main__":
main()