"""Career domain read tools: search and fetch job listings."""
import logging
from typing import Optional
from ...models import JobListing, Application
from ...utils.storage import get_db_connection
from .scrapers.greenhouse import scrape_greenhouse_jobs
from .scrapers.lever import scrape_lever_jobs
logger = logging.getLogger(__name__)
async def search_greenhouse_boards(company_name: str) -> list[JobListing]:
"""
Search for jobs on a company's Greenhouse job board.
Args:
company_name: Name of the company to search
Returns:
List of job listings found
"""
logger.info(f"Searching Greenhouse boards for: {company_name}")
try:
jobs = await scrape_greenhouse_jobs(company_name)
logger.info(f"Found {len(jobs)} jobs on Greenhouse for {company_name}")
return jobs
except Exception as e:
logger.error(f"Error searching Greenhouse for {company_name}: {e}", exc_info=True)
raise
async def search_lever_boards(company_name: str) -> list[JobListing]:
"""
Search for jobs on a company's Lever job board.
Args:
company_name: Name of the company to search
Returns:
List of job listings found
"""
logger.info(f"Searching Lever boards for: {company_name}")
try:
jobs = await scrape_lever_jobs(company_name)
logger.info(f"Found {len(jobs)} jobs on Lever for {company_name}")
return jobs
except Exception as e:
logger.error(f"Error searching Lever for {company_name}: {e}", exc_info=True)
raise
async def get_applications(status: Optional[str] = None) -> list[Application]:
"""
Get tracked job applications, optionally filtered by status.
Args:
status: Optional status filter (interested, applied, interviewing, offered, rejected)
Returns:
List of applications matching the filter
"""
logger.info(f"Getting applications with status: {status or 'all'}")
try:
async with get_db_connection() as db:
if status:
cursor = await db.execute(
"""
SELECT id, job_url, company, title, location, applied_date,
status, notes, next_action
FROM applications
WHERE status = ?
ORDER BY applied_date DESC
""",
(status,)
)
else:
cursor = await db.execute(
"""
SELECT id, job_url, company, title, location, applied_date,
status, notes, next_action
FROM applications
ORDER BY applied_date DESC
"""
)
rows = await cursor.fetchall()
applications = []
for row in rows:
job = JobListing(
title=row[3],
url=row[1],
company=row[2],
location=row[4],
posted_date=None,
source="tracked"
)
app = Application(
id=row[0],
job=job,
applied_date=row[5],
status=row[6],
notes=row[7] or "",
next_action=row[8]
)
applications.append(app)
logger.info(f"Found {len(applications)} applications")
return applications
except Exception as e:
logger.error(f"Error getting applications: {e}", exc_info=True)
raise